e_aria.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768
  1. /*
  2. * Copyright 2017-2019 The OpenSSL Project Authors. All Rights Reserved.
  3. * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
  4. *
  5. * Licensed under the OpenSSL license (the "License"). You may not use
  6. * this file except in compliance with the License. You can obtain a copy
  7. * in the file LICENSE in the source distribution or at
  8. * https://www.openssl.org/source/license.html
  9. */
  10. #include "internal/cryptlib.h"
  11. #ifndef OPENSSL_NO_ARIA
  12. # include <openssl/evp.h>
  13. # include <openssl/modes.h>
  14. # include <openssl/rand.h>
  15. # include <openssl/rand_drbg.h>
  16. # include "internal/aria.h"
  17. # include "internal/evp_int.h"
  18. # include "modes_lcl.h"
  19. # include "evp_locl.h"
  20. /* ARIA subkey Structure */
  21. typedef struct {
  22. ARIA_KEY ks;
  23. } EVP_ARIA_KEY;
  24. /* ARIA GCM context */
  25. typedef struct {
  26. union {
  27. double align;
  28. ARIA_KEY ks;
  29. } ks; /* ARIA subkey to use */
  30. int key_set; /* Set if key initialised */
  31. int iv_set; /* Set if an iv is set */
  32. GCM128_CONTEXT gcm;
  33. unsigned char *iv; /* Temporary IV store */
  34. int ivlen; /* IV length */
  35. int taglen;
  36. int iv_gen; /* It is OK to generate IVs */
  37. int tls_aad_len; /* TLS AAD length */
  38. } EVP_ARIA_GCM_CTX;
  39. /* ARIA CCM context */
  40. typedef struct {
  41. union {
  42. double align;
  43. ARIA_KEY ks;
  44. } ks; /* ARIA key schedule to use */
  45. int key_set; /* Set if key initialised */
  46. int iv_set; /* Set if an iv is set */
  47. int tag_set; /* Set if tag is valid */
  48. int len_set; /* Set if message length set */
  49. int L, M; /* L and M parameters from RFC3610 */
  50. int tls_aad_len; /* TLS AAD length */
  51. CCM128_CONTEXT ccm;
  52. ccm128_f str;
  53. } EVP_ARIA_CCM_CTX;
  54. /* The subkey for ARIA is generated. */
  55. static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  56. const unsigned char *iv, int enc)
  57. {
  58. int ret;
  59. int mode = EVP_CIPHER_CTX_mode(ctx);
  60. if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
  61. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  62. EVP_CIPHER_CTX_get_cipher_data(ctx));
  63. else
  64. ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  65. EVP_CIPHER_CTX_get_cipher_data(ctx));
  66. if (ret < 0) {
  67. EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  68. return 0;
  69. }
  70. return 1;
  71. }
  72. static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
  73. size_t len, const ARIA_KEY *key,
  74. unsigned char *ivec, const int enc)
  75. {
  76. if (enc)
  77. CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
  78. (block128_f) aria_encrypt);
  79. else
  80. CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
  81. (block128_f) aria_encrypt);
  82. }
  83. static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
  84. size_t length, const ARIA_KEY *key,
  85. unsigned char *ivec, int *num, const int enc)
  86. {
  87. CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
  88. (block128_f) aria_encrypt);
  89. }
  90. static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
  91. size_t length, const ARIA_KEY *key,
  92. unsigned char *ivec, int *num, const int enc)
  93. {
  94. CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
  95. (block128_f) aria_encrypt);
  96. }
  97. static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
  98. size_t length, const ARIA_KEY *key,
  99. unsigned char *ivec, int *num, const int enc)
  100. {
  101. CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
  102. (block128_f) aria_encrypt);
  103. }
  104. static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
  105. const ARIA_KEY *key, const int enc)
  106. {
  107. aria_encrypt(in, out, key);
  108. }
  109. static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
  110. size_t length, const ARIA_KEY *key,
  111. unsigned char *ivec, int *num)
  112. {
  113. CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
  114. (block128_f) aria_encrypt);
  115. }
  116. IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
  117. NID_aria_128, 16, 16, 16, 128,
  118. 0, aria_init_key, NULL,
  119. EVP_CIPHER_set_asn1_iv,
  120. EVP_CIPHER_get_asn1_iv,
  121. NULL)
  122. IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
  123. NID_aria_192, 16, 24, 16, 128,
  124. 0, aria_init_key, NULL,
  125. EVP_CIPHER_set_asn1_iv,
  126. EVP_CIPHER_get_asn1_iv,
  127. NULL)
  128. IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
  129. NID_aria_256, 16, 32, 16, 128,
  130. 0, aria_init_key, NULL,
  131. EVP_CIPHER_set_asn1_iv,
  132. EVP_CIPHER_get_asn1_iv,
  133. NULL)
  134. # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
  135. IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
  136. IMPLEMENT_ARIA_CFBR(128,1)
  137. IMPLEMENT_ARIA_CFBR(192,1)
  138. IMPLEMENT_ARIA_CFBR(256,1)
  139. IMPLEMENT_ARIA_CFBR(128,8)
  140. IMPLEMENT_ARIA_CFBR(192,8)
  141. IMPLEMENT_ARIA_CFBR(256,8)
  142. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  143. static const EVP_CIPHER aria_##keylen##_##mode = { \
  144. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  145. flags|EVP_CIPH_##MODE##_MODE, \
  146. aria_init_key, \
  147. aria_##mode##_cipher, \
  148. NULL, \
  149. sizeof(EVP_ARIA_KEY), \
  150. NULL,NULL,NULL,NULL }; \
  151. const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
  152. { return &aria_##keylen##_##mode; }
  153. static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  154. const unsigned char *in, size_t len)
  155. {
  156. unsigned int num = EVP_CIPHER_CTX_num(ctx);
  157. EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
  158. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
  159. EVP_CIPHER_CTX_iv_noconst(ctx),
  160. EVP_CIPHER_CTX_buf_noconst(ctx), &num,
  161. (block128_f) aria_encrypt);
  162. EVP_CIPHER_CTX_set_num(ctx, num);
  163. return 1;
  164. }
  165. BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
  166. BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
  167. BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
  168. /* Authenticated cipher modes (GCM/CCM) */
  169. /* increment counter (64-bit int) by 1 */
  170. static void ctr64_inc(unsigned char *counter)
  171. {
  172. int n = 8;
  173. unsigned char c;
  174. do {
  175. --n;
  176. c = counter[n];
  177. ++c;
  178. counter[n] = c;
  179. if (c)
  180. return;
  181. } while (n);
  182. }
  183. static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  184. const unsigned char *iv, int enc)
  185. {
  186. int ret;
  187. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  188. if (!iv && !key)
  189. return 1;
  190. if (key) {
  191. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  192. &gctx->ks.ks);
  193. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  194. (block128_f) aria_encrypt);
  195. if (ret < 0) {
  196. EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  197. return 0;
  198. }
  199. /*
  200. * If we have an iv can set it directly, otherwise use saved IV.
  201. */
  202. if (iv == NULL && gctx->iv_set)
  203. iv = gctx->iv;
  204. if (iv) {
  205. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  206. gctx->iv_set = 1;
  207. }
  208. gctx->key_set = 1;
  209. } else {
  210. /* If key set use IV, otherwise copy */
  211. if (gctx->key_set)
  212. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  213. else
  214. memcpy(gctx->iv, iv, gctx->ivlen);
  215. gctx->iv_set = 1;
  216. gctx->iv_gen = 0;
  217. }
  218. return 1;
  219. }
  220. static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  221. {
  222. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
  223. switch (type) {
  224. case EVP_CTRL_INIT:
  225. gctx->key_set = 0;
  226. gctx->iv_set = 0;
  227. gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
  228. gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
  229. gctx->taglen = -1;
  230. gctx->iv_gen = 0;
  231. gctx->tls_aad_len = -1;
  232. return 1;
  233. case EVP_CTRL_AEAD_SET_IVLEN:
  234. if (arg <= 0)
  235. return 0;
  236. /* Allocate memory for IV if needed */
  237. if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
  238. if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
  239. OPENSSL_free(gctx->iv);
  240. if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
  241. EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
  242. return 0;
  243. }
  244. }
  245. gctx->ivlen = arg;
  246. return 1;
  247. case EVP_CTRL_AEAD_SET_TAG:
  248. if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
  249. return 0;
  250. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  251. gctx->taglen = arg;
  252. return 1;
  253. case EVP_CTRL_AEAD_GET_TAG:
  254. if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
  255. || gctx->taglen < 0)
  256. return 0;
  257. memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
  258. return 1;
  259. case EVP_CTRL_GCM_SET_IV_FIXED:
  260. /* Special case: -1 length restores whole IV */
  261. if (arg == -1) {
  262. memcpy(gctx->iv, ptr, gctx->ivlen);
  263. gctx->iv_gen = 1;
  264. return 1;
  265. }
  266. /*
  267. * Fixed field must be at least 4 bytes and invocation field at least
  268. * 8.
  269. */
  270. if ((arg < 4) || (gctx->ivlen - arg) < 8)
  271. return 0;
  272. if (arg)
  273. memcpy(gctx->iv, ptr, arg);
  274. if (EVP_CIPHER_CTX_encrypting(c)
  275. && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
  276. return 0;
  277. gctx->iv_gen = 1;
  278. return 1;
  279. case EVP_CTRL_GCM_IV_GEN:
  280. if (gctx->iv_gen == 0 || gctx->key_set == 0)
  281. return 0;
  282. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  283. if (arg <= 0 || arg > gctx->ivlen)
  284. arg = gctx->ivlen;
  285. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  286. /*
  287. * Invocation field will be at least 8 bytes in size and so no need
  288. * to check wrap around or increment more than last 8 bytes.
  289. */
  290. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  291. gctx->iv_set = 1;
  292. return 1;
  293. case EVP_CTRL_GCM_SET_IV_INV:
  294. if (gctx->iv_gen == 0 || gctx->key_set == 0
  295. || EVP_CIPHER_CTX_encrypting(c))
  296. return 0;
  297. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  298. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  299. gctx->iv_set = 1;
  300. return 1;
  301. case EVP_CTRL_AEAD_TLS1_AAD:
  302. /* Save the AAD for later use */
  303. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  304. return 0;
  305. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  306. gctx->tls_aad_len = arg;
  307. {
  308. unsigned int len =
  309. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
  310. | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
  311. /* Correct length for explicit IV */
  312. if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
  313. return 0;
  314. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
  315. /* If decrypting correct for tag too */
  316. if (!EVP_CIPHER_CTX_encrypting(c)) {
  317. if (len < EVP_GCM_TLS_TAG_LEN)
  318. return 0;
  319. len -= EVP_GCM_TLS_TAG_LEN;
  320. }
  321. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
  322. EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
  323. }
  324. /* Extra padding: tag appended to record */
  325. return EVP_GCM_TLS_TAG_LEN;
  326. case EVP_CTRL_COPY:
  327. {
  328. EVP_CIPHER_CTX *out = ptr;
  329. EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
  330. if (gctx->gcm.key) {
  331. if (gctx->gcm.key != &gctx->ks)
  332. return 0;
  333. gctx_out->gcm.key = &gctx_out->ks;
  334. }
  335. if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
  336. gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
  337. else {
  338. if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
  339. EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
  340. return 0;
  341. }
  342. memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  343. }
  344. return 1;
  345. }
  346. default:
  347. return -1;
  348. }
  349. }
  350. static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  351. const unsigned char *in, size_t len)
  352. {
  353. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  354. int rv = -1;
  355. /* Encrypt/decrypt must be performed in place */
  356. if (out != in
  357. || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
  358. return -1;
  359. /*
  360. * Set IV from start of buffer or generate IV and write to start of
  361. * buffer.
  362. */
  363. if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
  364. EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
  365. EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
  366. goto err;
  367. /* Use saved AAD */
  368. if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
  369. gctx->tls_aad_len))
  370. goto err;
  371. /* Fix buffer and length to point to payload */
  372. in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  373. out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  374. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  375. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  376. /* Encrypt payload */
  377. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  378. goto err;
  379. out += len;
  380. /* Finally write tag */
  381. CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
  382. rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  383. } else {
  384. /* Decrypt */
  385. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  386. goto err;
  387. /* Retrieve tag */
  388. CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
  389. EVP_GCM_TLS_TAG_LEN);
  390. /* If tag mismatch wipe buffer */
  391. if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
  392. EVP_GCM_TLS_TAG_LEN)) {
  393. OPENSSL_cleanse(out, len);
  394. goto err;
  395. }
  396. rv = len;
  397. }
  398. err:
  399. gctx->iv_set = 0;
  400. gctx->tls_aad_len = -1;
  401. return rv;
  402. }
  403. static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  404. const unsigned char *in, size_t len)
  405. {
  406. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
  407. /* If not set up, return error */
  408. if (!gctx->key_set)
  409. return -1;
  410. if (gctx->tls_aad_len >= 0)
  411. return aria_gcm_tls_cipher(ctx, out, in, len);
  412. if (!gctx->iv_set)
  413. return -1;
  414. if (in) {
  415. if (out == NULL) {
  416. if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
  417. return -1;
  418. } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
  419. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  420. return -1;
  421. } else {
  422. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  423. return -1;
  424. }
  425. return len;
  426. }
  427. if (!EVP_CIPHER_CTX_encrypting(ctx)) {
  428. if (gctx->taglen < 0)
  429. return -1;
  430. if (CRYPTO_gcm128_finish(&gctx->gcm,
  431. EVP_CIPHER_CTX_buf_noconst(ctx),
  432. gctx->taglen) != 0)
  433. return -1;
  434. gctx->iv_set = 0;
  435. return 0;
  436. }
  437. CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
  438. gctx->taglen = 16;
  439. /* Don't reuse the IV */
  440. gctx->iv_set = 0;
  441. return 0;
  442. }
  443. static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
  444. {
  445. EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
  446. if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(ctx))
  447. OPENSSL_free(gctx->iv);
  448. return 1;
  449. }
  450. static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  451. const unsigned char *iv, int enc)
  452. {
  453. int ret;
  454. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  455. if (!iv && !key)
  456. return 1;
  457. if (key) {
  458. ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  459. &cctx->ks.ks);
  460. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  461. &cctx->ks, (block128_f) aria_encrypt);
  462. if (ret < 0) {
  463. EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
  464. return 0;
  465. }
  466. cctx->str = NULL;
  467. cctx->key_set = 1;
  468. }
  469. if (iv) {
  470. memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
  471. cctx->iv_set = 1;
  472. }
  473. return 1;
  474. }
  475. static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  476. {
  477. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
  478. switch (type) {
  479. case EVP_CTRL_INIT:
  480. cctx->key_set = 0;
  481. cctx->iv_set = 0;
  482. cctx->L = 8;
  483. cctx->M = 12;
  484. cctx->tag_set = 0;
  485. cctx->len_set = 0;
  486. cctx->tls_aad_len = -1;
  487. return 1;
  488. case EVP_CTRL_AEAD_TLS1_AAD:
  489. /* Save the AAD for later use */
  490. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  491. return 0;
  492. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  493. cctx->tls_aad_len = arg;
  494. {
  495. uint16_t len =
  496. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
  497. | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
  498. /* Correct length for explicit IV */
  499. if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
  500. return 0;
  501. len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
  502. /* If decrypting correct for tag too */
  503. if (!EVP_CIPHER_CTX_encrypting(c)) {
  504. if (len < cctx->M)
  505. return 0;
  506. len -= cctx->M;
  507. }
  508. EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
  509. EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
  510. }
  511. /* Extra padding: tag appended to record */
  512. return cctx->M;
  513. case EVP_CTRL_CCM_SET_IV_FIXED:
  514. /* Sanity check length */
  515. if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
  516. return 0;
  517. /* Just copy to first part of IV */
  518. memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
  519. return 1;
  520. case EVP_CTRL_AEAD_SET_IVLEN:
  521. arg = 15 - arg;
  522. /* fall thru */
  523. case EVP_CTRL_CCM_SET_L:
  524. if (arg < 2 || arg > 8)
  525. return 0;
  526. cctx->L = arg;
  527. return 1;
  528. case EVP_CTRL_AEAD_SET_TAG:
  529. if ((arg & 1) || arg < 4 || arg > 16)
  530. return 0;
  531. if (EVP_CIPHER_CTX_encrypting(c) && ptr)
  532. return 0;
  533. if (ptr) {
  534. cctx->tag_set = 1;
  535. memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
  536. }
  537. cctx->M = arg;
  538. return 1;
  539. case EVP_CTRL_AEAD_GET_TAG:
  540. if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
  541. return 0;
  542. if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
  543. return 0;
  544. cctx->tag_set = 0;
  545. cctx->iv_set = 0;
  546. cctx->len_set = 0;
  547. return 1;
  548. case EVP_CTRL_COPY:
  549. {
  550. EVP_CIPHER_CTX *out = ptr;
  551. EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
  552. if (cctx->ccm.key) {
  553. if (cctx->ccm.key != &cctx->ks)
  554. return 0;
  555. cctx_out->ccm.key = &cctx_out->ks;
  556. }
  557. return 1;
  558. }
  559. default:
  560. return -1;
  561. }
  562. }
  563. static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  564. const unsigned char *in, size_t len)
  565. {
  566. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  567. CCM128_CONTEXT *ccm = &cctx->ccm;
  568. /* Encrypt/decrypt must be performed in place */
  569. if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
  570. return -1;
  571. /* If encrypting set explicit IV from sequence number (start of AAD) */
  572. if (EVP_CIPHER_CTX_encrypting(ctx))
  573. memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
  574. EVP_CCM_TLS_EXPLICIT_IV_LEN);
  575. /* Get rest of IV from explicit IV */
  576. memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
  577. EVP_CCM_TLS_EXPLICIT_IV_LEN);
  578. /* Correct length value */
  579. len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
  580. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
  581. len))
  582. return -1;
  583. /* Use saved AAD */
  584. CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
  585. /* Fix buffer to point to payload */
  586. in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
  587. out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
  588. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  589. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
  590. : CRYPTO_ccm128_encrypt(ccm, in, out, len))
  591. return -1;
  592. if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
  593. return -1;
  594. return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
  595. } else {
  596. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
  597. : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  598. unsigned char tag[16];
  599. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  600. if (!CRYPTO_memcmp(tag, in + len, cctx->M))
  601. return len;
  602. }
  603. }
  604. OPENSSL_cleanse(out, len);
  605. return -1;
  606. }
  607. }
  608. static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  609. const unsigned char *in, size_t len)
  610. {
  611. EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
  612. CCM128_CONTEXT *ccm = &cctx->ccm;
  613. /* If not set up, return error */
  614. if (!cctx->key_set)
  615. return -1;
  616. if (cctx->tls_aad_len >= 0)
  617. return aria_ccm_tls_cipher(ctx, out, in, len);
  618. /* EVP_*Final() doesn't return any data */
  619. if (in == NULL && out != NULL)
  620. return 0;
  621. if (!cctx->iv_set)
  622. return -1;
  623. if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
  624. return -1;
  625. if (!out) {
  626. if (!in) {
  627. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
  628. 15 - cctx->L, len))
  629. return -1;
  630. cctx->len_set = 1;
  631. return len;
  632. }
  633. /* If have AAD need message length */
  634. if (!cctx->len_set && len)
  635. return -1;
  636. CRYPTO_ccm128_aad(ccm, in, len);
  637. return len;
  638. }
  639. /* If not set length yet do it */
  640. if (!cctx->len_set) {
  641. if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
  642. 15 - cctx->L, len))
  643. return -1;
  644. cctx->len_set = 1;
  645. }
  646. if (EVP_CIPHER_CTX_encrypting(ctx)) {
  647. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
  648. : CRYPTO_ccm128_encrypt(ccm, in, out, len))
  649. return -1;
  650. cctx->tag_set = 1;
  651. return len;
  652. } else {
  653. int rv = -1;
  654. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
  655. cctx->str) :
  656. !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  657. unsigned char tag[16];
  658. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  659. if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
  660. cctx->M))
  661. rv = len;
  662. }
  663. }
  664. if (rv == -1)
  665. OPENSSL_cleanse(out, len);
  666. cctx->iv_set = 0;
  667. cctx->tag_set = 0;
  668. cctx->len_set = 0;
  669. return rv;
  670. }
  671. }
  672. #define aria_ccm_cleanup NULL
  673. #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
  674. | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
  675. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  676. | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
  677. #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  678. static const EVP_CIPHER aria_##keylen##_##mode = { \
  679. nid##_##keylen##_##nmode, \
  680. blocksize, keylen/8, ivlen, \
  681. ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
  682. aria_##mode##_init_key, \
  683. aria_##mode##_cipher, \
  684. aria_##mode##_cleanup, \
  685. sizeof(EVP_ARIA_##MODE##_CTX), \
  686. NULL,NULL,aria_##mode##_ctrl,NULL }; \
  687. const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
  688. { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
  689. BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
  690. BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
  691. BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
  692. BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
  693. BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
  694. BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
  695. #endif