e_aes.c 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437
  1. /* ====================================================================
  2. * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * openssl-core@openssl.org.
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ==================================================================== */
  48. #include <string.h>
  49. #include <openssl/aead.h>
  50. #include <openssl/aes.h>
  51. #include <openssl/cipher.h>
  52. #include <openssl/cpu.h>
  53. #include <openssl/err.h>
  54. #include <openssl/mem.h>
  55. #include <openssl/nid.h>
  56. #include <openssl/rand.h>
  57. #include "internal.h"
  58. #include "../../internal.h"
  59. #include "../aes/internal.h"
  60. #include "../modes/internal.h"
  61. #include "../delocate.h"
  62. #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  63. #include <openssl/arm_arch.h>
  64. #endif
  65. OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
  66. typedef struct {
  67. union {
  68. double align;
  69. AES_KEY ks;
  70. } ks;
  71. block128_f block;
  72. union {
  73. cbc128_f cbc;
  74. ctr128_f ctr;
  75. } stream;
  76. } EVP_AES_KEY;
  77. typedef struct {
  78. union {
  79. double align;
  80. AES_KEY ks;
  81. } ks; // AES key schedule to use
  82. int key_set; // Set if key initialised
  83. int iv_set; // Set if an iv is set
  84. GCM128_CONTEXT gcm;
  85. uint8_t *iv; // Temporary IV store
  86. int ivlen; // IV length
  87. int taglen;
  88. int iv_gen; // It is OK to generate IVs
  89. ctr128_f ctr;
  90. } EVP_AES_GCM_CTX;
  91. #if !defined(OPENSSL_NO_ASM) && \
  92. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  93. #define VPAES
  94. static char vpaes_capable(void) {
  95. return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
  96. }
  97. #if defined(OPENSSL_X86_64)
  98. #define BSAES
  99. static char bsaes_capable(void) {
  100. return vpaes_capable();
  101. }
  102. #endif
  103. #elif !defined(OPENSSL_NO_ASM) && \
  104. (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
  105. #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
  106. #define BSAES
  107. static char bsaes_capable(void) {
  108. return CRYPTO_is_NEON_capable();
  109. }
  110. #endif
  111. #endif
  112. #if defined(BSAES)
  113. // On platforms where BSAES gets defined (just above), then these functions are
  114. // provided by asm.
  115. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  116. const AES_KEY *key, uint8_t ivec[16], int enc);
  117. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  118. const AES_KEY *key, const uint8_t ivec[16]);
  119. #else
  120. static char bsaes_capable(void) {
  121. return 0;
  122. }
  123. // On other platforms, bsaes_capable() will always return false and so the
  124. // following will never be called.
  125. static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  126. const AES_KEY *key, uint8_t ivec[16], int enc) {
  127. abort();
  128. }
  129. static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
  130. size_t len, const AES_KEY *key,
  131. const uint8_t ivec[16]) {
  132. abort();
  133. }
  134. #endif
  135. #if defined(VPAES)
  136. // On platforms where VPAES gets defined (just above), then these functions are
  137. // provided by asm.
  138. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  139. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  140. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  141. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  142. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  143. const AES_KEY *key, uint8_t *ivec, int enc);
  144. #else
  145. static char vpaes_capable(void) {
  146. return 0;
  147. }
  148. // On other platforms, vpaes_capable() will always return false and so the
  149. // following will never be called.
  150. static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
  151. AES_KEY *key) {
  152. abort();
  153. }
  154. static int vpaes_set_decrypt_key(const uint8_t *userKey, int bits,
  155. AES_KEY *key) {
  156. abort();
  157. }
  158. static void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  159. abort();
  160. }
  161. static void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  162. abort();
  163. }
  164. static void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  165. const AES_KEY *key, uint8_t *ivec, int enc) {
  166. abort();
  167. }
  168. #endif
  169. #if !defined(OPENSSL_NO_ASM) && \
  170. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  171. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  172. int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  173. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  174. void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  175. void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  176. const AES_KEY *key, int enc);
  177. void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  178. const AES_KEY *key, uint8_t *ivec, int enc);
  179. #else
  180. // On other platforms, aesni_capable() will always return false and so the
  181. // following will never be called.
  182. static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  183. abort();
  184. }
  185. static int aesni_set_encrypt_key(const uint8_t *userKey, int bits,
  186. AES_KEY *key) {
  187. abort();
  188. }
  189. static void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
  190. size_t blocks, const void *key,
  191. const uint8_t *ivec) {
  192. abort();
  193. }
  194. #endif
  195. static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  196. const uint8_t *iv, int enc) {
  197. int ret, mode;
  198. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  199. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  200. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  201. if (hwaes_capable()) {
  202. ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  203. dat->block = (block128_f)aes_hw_decrypt;
  204. dat->stream.cbc = NULL;
  205. if (mode == EVP_CIPH_CBC_MODE) {
  206. dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
  207. }
  208. } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
  209. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  210. dat->block = (block128_f)AES_decrypt;
  211. dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
  212. } else if (vpaes_capable()) {
  213. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  214. dat->block = (block128_f)vpaes_decrypt;
  215. dat->stream.cbc =
  216. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  217. } else {
  218. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  219. dat->block = (block128_f)AES_decrypt;
  220. dat->stream.cbc =
  221. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  222. }
  223. } else if (hwaes_capable()) {
  224. ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  225. dat->block = (block128_f)aes_hw_encrypt;
  226. dat->stream.cbc = NULL;
  227. if (mode == EVP_CIPH_CBC_MODE) {
  228. dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
  229. } else if (mode == EVP_CIPH_CTR_MODE) {
  230. dat->stream.ctr = (ctr128_f)aes_hw_ctr32_encrypt_blocks;
  231. }
  232. } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
  233. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  234. dat->block = (block128_f)AES_encrypt;
  235. dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
  236. } else if (vpaes_capable()) {
  237. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  238. dat->block = (block128_f)vpaes_encrypt;
  239. dat->stream.cbc =
  240. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  241. } else {
  242. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  243. dat->block = (block128_f)AES_encrypt;
  244. dat->stream.cbc =
  245. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  246. }
  247. if (ret < 0) {
  248. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  249. return 0;
  250. }
  251. return 1;
  252. }
  253. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  254. size_t len) {
  255. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  256. if (dat->stream.cbc) {
  257. (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  258. } else if (ctx->encrypt) {
  259. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  260. } else {
  261. CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  262. }
  263. return 1;
  264. }
  265. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  266. size_t len) {
  267. size_t bl = ctx->cipher->block_size;
  268. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  269. if (len < bl) {
  270. return 1;
  271. }
  272. len -= bl;
  273. for (size_t i = 0; i <= len; i += bl) {
  274. (*dat->block)(in + i, out + i, &dat->ks);
  275. }
  276. return 1;
  277. }
  278. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  279. size_t len) {
  280. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  281. if (dat->stream.ctr) {
  282. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf,
  283. &ctx->num, dat->stream.ctr);
  284. } else {
  285. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &ctx->num,
  286. dat->block);
  287. }
  288. return 1;
  289. }
  290. static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  291. size_t len) {
  292. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  293. CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
  294. return 1;
  295. }
  296. static char aesni_capable(void);
  297. ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
  298. block128_f *out_block, const uint8_t *key,
  299. size_t key_bytes) {
  300. if (aesni_capable()) {
  301. aesni_set_encrypt_key(key, key_bytes * 8, aes_key);
  302. if (gcm_ctx != NULL) {
  303. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt, 1);
  304. }
  305. if (out_block) {
  306. *out_block = (block128_f) aesni_encrypt;
  307. }
  308. return (ctr128_f)aesni_ctr32_encrypt_blocks;
  309. }
  310. if (hwaes_capable()) {
  311. aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
  312. if (gcm_ctx != NULL) {
  313. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_hw_encrypt, 0);
  314. }
  315. if (out_block) {
  316. *out_block = (block128_f) aes_hw_encrypt;
  317. }
  318. return (ctr128_f)aes_hw_ctr32_encrypt_blocks;
  319. }
  320. if (bsaes_capable()) {
  321. AES_set_encrypt_key(key, key_bytes * 8, aes_key);
  322. if (gcm_ctx != NULL) {
  323. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
  324. }
  325. if (out_block) {
  326. *out_block = (block128_f) AES_encrypt;
  327. }
  328. return (ctr128_f)bsaes_ctr32_encrypt_blocks;
  329. }
  330. if (vpaes_capable()) {
  331. vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
  332. if (out_block) {
  333. *out_block = (block128_f) vpaes_encrypt;
  334. }
  335. if (gcm_ctx != NULL) {
  336. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt, 0);
  337. }
  338. return NULL;
  339. }
  340. AES_set_encrypt_key(key, key_bytes * 8, aes_key);
  341. if (gcm_ctx != NULL) {
  342. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
  343. }
  344. if (out_block) {
  345. *out_block = (block128_f) AES_encrypt;
  346. }
  347. return NULL;
  348. }
  349. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  350. const uint8_t *iv, int enc) {
  351. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  352. if (!iv && !key) {
  353. return 1;
  354. }
  355. if (key) {
  356. gctx->ctr =
  357. aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
  358. // If we have an iv can set it directly, otherwise use saved IV.
  359. if (iv == NULL && gctx->iv_set) {
  360. iv = gctx->iv;
  361. }
  362. if (iv) {
  363. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  364. gctx->iv_set = 1;
  365. }
  366. gctx->key_set = 1;
  367. } else {
  368. // If key set use IV, otherwise copy
  369. if (gctx->key_set) {
  370. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  371. } else {
  372. OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
  373. }
  374. gctx->iv_set = 1;
  375. gctx->iv_gen = 0;
  376. }
  377. return 1;
  378. }
  379. static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
  380. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  381. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  382. if (gctx->iv != c->iv) {
  383. OPENSSL_free(gctx->iv);
  384. }
  385. }
  386. // increment counter (64-bit int) by 1
  387. static void ctr64_inc(uint8_t *counter) {
  388. int n = 8;
  389. uint8_t c;
  390. do {
  391. --n;
  392. c = counter[n];
  393. ++c;
  394. counter[n] = c;
  395. if (c) {
  396. return;
  397. }
  398. } while (n);
  399. }
  400. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
  401. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  402. switch (type) {
  403. case EVP_CTRL_INIT:
  404. gctx->key_set = 0;
  405. gctx->iv_set = 0;
  406. gctx->ivlen = c->cipher->iv_len;
  407. gctx->iv = c->iv;
  408. gctx->taglen = -1;
  409. gctx->iv_gen = 0;
  410. return 1;
  411. case EVP_CTRL_GCM_SET_IVLEN:
  412. if (arg <= 0) {
  413. return 0;
  414. }
  415. // Allocate memory for IV if needed
  416. if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
  417. if (gctx->iv != c->iv) {
  418. OPENSSL_free(gctx->iv);
  419. }
  420. gctx->iv = OPENSSL_malloc(arg);
  421. if (!gctx->iv) {
  422. return 0;
  423. }
  424. }
  425. gctx->ivlen = arg;
  426. return 1;
  427. case EVP_CTRL_GCM_SET_TAG:
  428. if (arg <= 0 || arg > 16 || c->encrypt) {
  429. return 0;
  430. }
  431. OPENSSL_memcpy(c->buf, ptr, arg);
  432. gctx->taglen = arg;
  433. return 1;
  434. case EVP_CTRL_GCM_GET_TAG:
  435. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
  436. return 0;
  437. }
  438. OPENSSL_memcpy(ptr, c->buf, arg);
  439. return 1;
  440. case EVP_CTRL_GCM_SET_IV_FIXED:
  441. // Special case: -1 length restores whole IV
  442. if (arg == -1) {
  443. OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
  444. gctx->iv_gen = 1;
  445. return 1;
  446. }
  447. // Fixed field must be at least 4 bytes and invocation field
  448. // at least 8.
  449. if (arg < 4 || (gctx->ivlen - arg) < 8) {
  450. return 0;
  451. }
  452. if (arg) {
  453. OPENSSL_memcpy(gctx->iv, ptr, arg);
  454. }
  455. if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
  456. return 0;
  457. }
  458. gctx->iv_gen = 1;
  459. return 1;
  460. case EVP_CTRL_GCM_IV_GEN:
  461. if (gctx->iv_gen == 0 || gctx->key_set == 0) {
  462. return 0;
  463. }
  464. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
  465. if (arg <= 0 || arg > gctx->ivlen) {
  466. arg = gctx->ivlen;
  467. }
  468. OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  469. // Invocation field will be at least 8 bytes in size and
  470. // so no need to check wrap around or increment more than
  471. // last 8 bytes.
  472. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  473. gctx->iv_set = 1;
  474. return 1;
  475. case EVP_CTRL_GCM_SET_IV_INV:
  476. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
  477. return 0;
  478. }
  479. OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  480. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
  481. gctx->iv_set = 1;
  482. return 1;
  483. case EVP_CTRL_COPY: {
  484. EVP_CIPHER_CTX *out = ptr;
  485. EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
  486. if (gctx->iv == c->iv) {
  487. gctx_out->iv = out->iv;
  488. } else {
  489. gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
  490. if (!gctx_out->iv) {
  491. return 0;
  492. }
  493. OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  494. }
  495. return 1;
  496. }
  497. default:
  498. return -1;
  499. }
  500. }
  501. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  502. size_t len) {
  503. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  504. // If not set up, return error
  505. if (!gctx->key_set) {
  506. return -1;
  507. }
  508. if (!gctx->iv_set) {
  509. return -1;
  510. }
  511. if (in) {
  512. if (out == NULL) {
  513. if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
  514. return -1;
  515. }
  516. } else if (ctx->encrypt) {
  517. if (gctx->ctr) {
  518. if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
  519. gctx->ctr)) {
  520. return -1;
  521. }
  522. } else {
  523. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
  524. return -1;
  525. }
  526. }
  527. } else {
  528. if (gctx->ctr) {
  529. if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
  530. gctx->ctr)) {
  531. return -1;
  532. }
  533. } else {
  534. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
  535. return -1;
  536. }
  537. }
  538. }
  539. return len;
  540. } else {
  541. if (!ctx->encrypt) {
  542. if (gctx->taglen < 0 ||
  543. !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
  544. return -1;
  545. }
  546. gctx->iv_set = 0;
  547. return 0;
  548. }
  549. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  550. gctx->taglen = 16;
  551. // Don't reuse the IV
  552. gctx->iv_set = 0;
  553. return 0;
  554. }
  555. }
  556. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
  557. memset(out, 0, sizeof(EVP_CIPHER));
  558. out->nid = NID_aes_128_cbc;
  559. out->block_size = 16;
  560. out->key_len = 16;
  561. out->iv_len = 16;
  562. out->ctx_size = sizeof(EVP_AES_KEY);
  563. out->flags = EVP_CIPH_CBC_MODE;
  564. out->init = aes_init_key;
  565. out->cipher = aes_cbc_cipher;
  566. }
  567. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
  568. memset(out, 0, sizeof(EVP_CIPHER));
  569. out->nid = NID_aes_128_ctr;
  570. out->block_size = 1;
  571. out->key_len = 16;
  572. out->iv_len = 16;
  573. out->ctx_size = sizeof(EVP_AES_KEY);
  574. out->flags = EVP_CIPH_CTR_MODE;
  575. out->init = aes_init_key;
  576. out->cipher = aes_ctr_cipher;
  577. }
  578. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
  579. memset(out, 0, sizeof(EVP_CIPHER));
  580. out->nid = NID_aes_128_ecb;
  581. out->block_size = 16;
  582. out->key_len = 16;
  583. out->ctx_size = sizeof(EVP_AES_KEY);
  584. out->flags = EVP_CIPH_ECB_MODE;
  585. out->init = aes_init_key;
  586. out->cipher = aes_ecb_cipher;
  587. }
  588. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
  589. memset(out, 0, sizeof(EVP_CIPHER));
  590. out->nid = NID_aes_128_ofb128;
  591. out->block_size = 1;
  592. out->key_len = 16;
  593. out->iv_len = 16;
  594. out->ctx_size = sizeof(EVP_AES_KEY);
  595. out->flags = EVP_CIPH_OFB_MODE;
  596. out->init = aes_init_key;
  597. out->cipher = aes_ofb_cipher;
  598. }
  599. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
  600. memset(out, 0, sizeof(EVP_CIPHER));
  601. out->nid = NID_aes_128_gcm;
  602. out->block_size = 1;
  603. out->key_len = 16;
  604. out->iv_len = 12;
  605. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  606. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  607. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  608. EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
  609. out->init = aes_gcm_init_key;
  610. out->cipher = aes_gcm_cipher;
  611. out->cleanup = aes_gcm_cleanup;
  612. out->ctrl = aes_gcm_ctrl;
  613. }
  614. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
  615. memset(out, 0, sizeof(EVP_CIPHER));
  616. out->nid = NID_aes_192_cbc;
  617. out->block_size = 16;
  618. out->key_len = 24;
  619. out->iv_len = 16;
  620. out->ctx_size = sizeof(EVP_AES_KEY);
  621. out->flags = EVP_CIPH_CBC_MODE;
  622. out->init = aes_init_key;
  623. out->cipher = aes_cbc_cipher;
  624. }
  625. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
  626. memset(out, 0, sizeof(EVP_CIPHER));
  627. out->nid = NID_aes_192_ctr;
  628. out->block_size = 1;
  629. out->key_len = 24;
  630. out->iv_len = 16;
  631. out->ctx_size = sizeof(EVP_AES_KEY);
  632. out->flags = EVP_CIPH_CTR_MODE;
  633. out->init = aes_init_key;
  634. out->cipher = aes_ctr_cipher;
  635. }
  636. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
  637. memset(out, 0, sizeof(EVP_CIPHER));
  638. out->nid = NID_aes_192_ecb;
  639. out->block_size = 16;
  640. out->key_len = 24;
  641. out->ctx_size = sizeof(EVP_AES_KEY);
  642. out->flags = EVP_CIPH_ECB_MODE;
  643. out->init = aes_init_key;
  644. out->cipher = aes_ecb_cipher;
  645. }
  646. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
  647. memset(out, 0, sizeof(EVP_CIPHER));
  648. out->nid = NID_aes_192_gcm;
  649. out->block_size = 1;
  650. out->key_len = 24;
  651. out->iv_len = 12;
  652. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  653. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  654. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  655. EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
  656. out->init = aes_gcm_init_key;
  657. out->cipher = aes_gcm_cipher;
  658. out->cleanup = aes_gcm_cleanup;
  659. out->ctrl = aes_gcm_ctrl;
  660. }
  661. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
  662. memset(out, 0, sizeof(EVP_CIPHER));
  663. out->nid = NID_aes_256_cbc;
  664. out->block_size = 16;
  665. out->key_len = 32;
  666. out->iv_len = 16;
  667. out->ctx_size = sizeof(EVP_AES_KEY);
  668. out->flags = EVP_CIPH_CBC_MODE;
  669. out->init = aes_init_key;
  670. out->cipher = aes_cbc_cipher;
  671. }
  672. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
  673. memset(out, 0, sizeof(EVP_CIPHER));
  674. out->nid = NID_aes_256_ctr;
  675. out->block_size = 1;
  676. out->key_len = 32;
  677. out->iv_len = 16;
  678. out->ctx_size = sizeof(EVP_AES_KEY);
  679. out->flags = EVP_CIPH_CTR_MODE;
  680. out->init = aes_init_key;
  681. out->cipher = aes_ctr_cipher;
  682. }
  683. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
  684. memset(out, 0, sizeof(EVP_CIPHER));
  685. out->nid = NID_aes_256_ecb;
  686. out->block_size = 16;
  687. out->key_len = 32;
  688. out->ctx_size = sizeof(EVP_AES_KEY);
  689. out->flags = EVP_CIPH_ECB_MODE;
  690. out->init = aes_init_key;
  691. out->cipher = aes_ecb_cipher;
  692. }
  693. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
  694. memset(out, 0, sizeof(EVP_CIPHER));
  695. out->nid = NID_aes_256_ofb128;
  696. out->block_size = 1;
  697. out->key_len = 32;
  698. out->iv_len = 16;
  699. out->ctx_size = sizeof(EVP_AES_KEY);
  700. out->flags = EVP_CIPH_OFB_MODE;
  701. out->init = aes_init_key;
  702. out->cipher = aes_ofb_cipher;
  703. }
  704. DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
  705. memset(out, 0, sizeof(EVP_CIPHER));
  706. out->nid = NID_aes_256_gcm;
  707. out->block_size = 1;
  708. out->key_len = 32;
  709. out->iv_len = 12;
  710. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  711. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  712. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  713. EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
  714. out->init = aes_gcm_init_key;
  715. out->cipher = aes_gcm_cipher;
  716. out->cleanup = aes_gcm_cleanup;
  717. out->ctrl = aes_gcm_ctrl;
  718. }
  719. #if !defined(OPENSSL_NO_ASM) && \
  720. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  721. // AES-NI section.
  722. static char aesni_capable(void) {
  723. return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
  724. }
  725. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  726. const uint8_t *iv, int enc) {
  727. int ret, mode;
  728. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  729. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  730. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  731. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  732. dat->block = (block128_f)aesni_decrypt;
  733. dat->stream.cbc =
  734. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
  735. } else {
  736. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  737. dat->block = (block128_f)aesni_encrypt;
  738. if (mode == EVP_CIPH_CBC_MODE) {
  739. dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
  740. } else if (mode == EVP_CIPH_CTR_MODE) {
  741. dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  742. } else {
  743. dat->stream.cbc = NULL;
  744. }
  745. }
  746. if (ret < 0) {
  747. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  748. return 0;
  749. }
  750. return 1;
  751. }
  752. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  753. const uint8_t *in, size_t len) {
  754. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  755. return 1;
  756. }
  757. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  758. const uint8_t *in, size_t len) {
  759. size_t bl = ctx->cipher->block_size;
  760. if (len < bl) {
  761. return 1;
  762. }
  763. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  764. return 1;
  765. }
  766. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  767. const uint8_t *iv, int enc) {
  768. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  769. if (!iv && !key) {
  770. return 1;
  771. }
  772. if (key) {
  773. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  774. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt, 1);
  775. gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  776. // If we have an iv can set it directly, otherwise use
  777. // saved IV.
  778. if (iv == NULL && gctx->iv_set) {
  779. iv = gctx->iv;
  780. }
  781. if (iv) {
  782. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  783. gctx->iv_set = 1;
  784. }
  785. gctx->key_set = 1;
  786. } else {
  787. // If key set use IV, otherwise copy
  788. if (gctx->key_set) {
  789. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  790. } else {
  791. OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
  792. }
  793. gctx->iv_set = 1;
  794. gctx->iv_gen = 0;
  795. }
  796. return 1;
  797. }
  798. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_cbc) {
  799. memset(out, 0, sizeof(EVP_CIPHER));
  800. out->nid = NID_aes_128_cbc;
  801. out->block_size = 16;
  802. out->key_len = 16;
  803. out->iv_len = 16;
  804. out->ctx_size = sizeof(EVP_AES_KEY);
  805. out->flags = EVP_CIPH_CBC_MODE;
  806. out->init = aesni_init_key;
  807. out->cipher = aesni_cbc_cipher;
  808. }
  809. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ctr) {
  810. memset(out, 0, sizeof(EVP_CIPHER));
  811. out->nid = NID_aes_128_ctr;
  812. out->block_size = 1;
  813. out->key_len = 16;
  814. out->iv_len = 16;
  815. out->ctx_size = sizeof(EVP_AES_KEY);
  816. out->flags = EVP_CIPH_CTR_MODE;
  817. out->init = aesni_init_key;
  818. out->cipher = aes_ctr_cipher;
  819. }
  820. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ecb) {
  821. memset(out, 0, sizeof(EVP_CIPHER));
  822. out->nid = NID_aes_128_ecb;
  823. out->block_size = 16;
  824. out->key_len = 16;
  825. out->ctx_size = sizeof(EVP_AES_KEY);
  826. out->flags = EVP_CIPH_ECB_MODE;
  827. out->init = aesni_init_key;
  828. out->cipher = aesni_ecb_cipher;
  829. }
  830. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ofb) {
  831. memset(out, 0, sizeof(EVP_CIPHER));
  832. out->nid = NID_aes_128_ofb128;
  833. out->block_size = 1;
  834. out->key_len = 16;
  835. out->iv_len = 16;
  836. out->ctx_size = sizeof(EVP_AES_KEY);
  837. out->flags = EVP_CIPH_OFB_MODE;
  838. out->init = aesni_init_key;
  839. out->cipher = aes_ofb_cipher;
  840. }
  841. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_gcm) {
  842. memset(out, 0, sizeof(EVP_CIPHER));
  843. out->nid = NID_aes_128_gcm;
  844. out->block_size = 1;
  845. out->key_len = 16;
  846. out->iv_len = 12;
  847. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  848. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  849. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  850. EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
  851. out->init = aesni_gcm_init_key;
  852. out->cipher = aes_gcm_cipher;
  853. out->cleanup = aes_gcm_cleanup;
  854. out->ctrl = aes_gcm_ctrl;
  855. }
  856. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_cbc) {
  857. memset(out, 0, sizeof(EVP_CIPHER));
  858. out->nid = NID_aes_192_cbc;
  859. out->block_size = 16;
  860. out->key_len = 24;
  861. out->iv_len = 16;
  862. out->ctx_size = sizeof(EVP_AES_KEY);
  863. out->flags = EVP_CIPH_CBC_MODE;
  864. out->init = aesni_init_key;
  865. out->cipher = aesni_cbc_cipher;
  866. }
  867. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ctr) {
  868. memset(out, 0, sizeof(EVP_CIPHER));
  869. out->nid = NID_aes_192_ctr;
  870. out->block_size = 1;
  871. out->key_len = 24;
  872. out->iv_len = 16;
  873. out->ctx_size = sizeof(EVP_AES_KEY);
  874. out->flags = EVP_CIPH_CTR_MODE;
  875. out->init = aesni_init_key;
  876. out->cipher = aes_ctr_cipher;
  877. }
  878. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ecb) {
  879. memset(out, 0, sizeof(EVP_CIPHER));
  880. out->nid = NID_aes_192_ecb;
  881. out->block_size = 16;
  882. out->key_len = 24;
  883. out->ctx_size = sizeof(EVP_AES_KEY);
  884. out->flags = EVP_CIPH_ECB_MODE;
  885. out->init = aesni_init_key;
  886. out->cipher = aesni_ecb_cipher;
  887. }
  888. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_gcm) {
  889. memset(out, 0, sizeof(EVP_CIPHER));
  890. out->nid = NID_aes_192_gcm;
  891. out->block_size = 1;
  892. out->key_len = 24;
  893. out->iv_len = 12;
  894. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  895. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  896. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  897. EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
  898. out->init = aesni_gcm_init_key;
  899. out->cipher = aes_gcm_cipher;
  900. out->cleanup = aes_gcm_cleanup;
  901. out->ctrl = aes_gcm_ctrl;
  902. }
  903. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_cbc) {
  904. memset(out, 0, sizeof(EVP_CIPHER));
  905. out->nid = NID_aes_256_cbc;
  906. out->block_size = 16;
  907. out->key_len = 32;
  908. out->iv_len = 16;
  909. out->ctx_size = sizeof(EVP_AES_KEY);
  910. out->flags = EVP_CIPH_CBC_MODE;
  911. out->init = aesni_init_key;
  912. out->cipher = aesni_cbc_cipher;
  913. }
  914. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ctr) {
  915. memset(out, 0, sizeof(EVP_CIPHER));
  916. out->nid = NID_aes_256_ctr;
  917. out->block_size = 1;
  918. out->key_len = 32;
  919. out->iv_len = 16;
  920. out->ctx_size = sizeof(EVP_AES_KEY);
  921. out->flags = EVP_CIPH_CTR_MODE;
  922. out->init = aesni_init_key;
  923. out->cipher = aes_ctr_cipher;
  924. }
  925. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ecb) {
  926. memset(out, 0, sizeof(EVP_CIPHER));
  927. out->nid = NID_aes_256_ecb;
  928. out->block_size = 16;
  929. out->key_len = 32;
  930. out->ctx_size = sizeof(EVP_AES_KEY);
  931. out->flags = EVP_CIPH_ECB_MODE;
  932. out->init = aesni_init_key;
  933. out->cipher = aesni_ecb_cipher;
  934. }
  935. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ofb) {
  936. memset(out, 0, sizeof(EVP_CIPHER));
  937. out->nid = NID_aes_256_ofb128;
  938. out->block_size = 1;
  939. out->key_len = 32;
  940. out->iv_len = 16;
  941. out->ctx_size = sizeof(EVP_AES_KEY);
  942. out->flags = EVP_CIPH_OFB_MODE;
  943. out->init = aesni_init_key;
  944. out->cipher = aes_ofb_cipher;
  945. }
  946. DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_gcm) {
  947. memset(out, 0, sizeof(EVP_CIPHER));
  948. out->nid = NID_aes_256_gcm;
  949. out->block_size = 1;
  950. out->key_len = 32;
  951. out->iv_len = 12;
  952. out->ctx_size = sizeof(EVP_AES_GCM_CTX);
  953. out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
  954. EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
  955. EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
  956. EVP_CIPH_FLAG_AEAD_CIPHER;
  957. out->init = aesni_gcm_init_key;
  958. out->cipher = aes_gcm_cipher;
  959. out->cleanup = aes_gcm_cleanup;
  960. out->ctrl = aes_gcm_ctrl;
  961. }
  962. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  963. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  964. if (aesni_capable()) { \
  965. return aesni_##keybits##_##mode(); \
  966. } else { \
  967. return aes_##keybits##_##mode##_generic(); \
  968. } \
  969. }
  970. #else // ^^^ OPENSSL_X86_64 || OPENSSL_X86
  971. static char aesni_capable(void) {
  972. return 0;
  973. }
  974. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  975. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  976. return aes_##keybits##_##mode##_generic(); \
  977. }
  978. #endif
  979. EVP_CIPHER_FUNCTION(128, cbc)
  980. EVP_CIPHER_FUNCTION(128, ctr)
  981. EVP_CIPHER_FUNCTION(128, ecb)
  982. EVP_CIPHER_FUNCTION(128, ofb)
  983. EVP_CIPHER_FUNCTION(128, gcm)
  984. EVP_CIPHER_FUNCTION(192, cbc)
  985. EVP_CIPHER_FUNCTION(192, ctr)
  986. EVP_CIPHER_FUNCTION(192, ecb)
  987. EVP_CIPHER_FUNCTION(192, gcm)
  988. EVP_CIPHER_FUNCTION(256, cbc)
  989. EVP_CIPHER_FUNCTION(256, ctr)
  990. EVP_CIPHER_FUNCTION(256, ecb)
  991. EVP_CIPHER_FUNCTION(256, ofb)
  992. EVP_CIPHER_FUNCTION(256, gcm)
  993. #define EVP_AEAD_AES_GCM_TAG_LEN 16
  994. struct aead_aes_gcm_ctx {
  995. union {
  996. double align;
  997. AES_KEY ks;
  998. } ks;
  999. GCM128_CONTEXT gcm;
  1000. ctr128_f ctr;
  1001. };
  1002. struct aead_aes_gcm_tls12_ctx {
  1003. struct aead_aes_gcm_ctx gcm_ctx;
  1004. uint64_t min_next_nonce;
  1005. };
  1006. static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
  1007. size_t *out_tag_len, const uint8_t *key,
  1008. size_t key_len, size_t tag_len) {
  1009. const size_t key_bits = key_len * 8;
  1010. if (key_bits != 128 && key_bits != 256) {
  1011. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1012. return 0; // EVP_AEAD_CTX_init should catch this.
  1013. }
  1014. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  1015. tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  1016. }
  1017. if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
  1018. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  1019. return 0;
  1020. }
  1021. gcm_ctx->ctr =
  1022. aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
  1023. *out_tag_len = tag_len;
  1024. return 1;
  1025. }
  1026. static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1027. size_t key_len, size_t requested_tag_len) {
  1028. struct aead_aes_gcm_ctx *gcm_ctx;
  1029. gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
  1030. if (gcm_ctx == NULL) {
  1031. return 0;
  1032. }
  1033. size_t actual_tag_len;
  1034. if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
  1035. requested_tag_len)) {
  1036. OPENSSL_free(gcm_ctx);
  1037. return 0;
  1038. }
  1039. ctx->aead_state = gcm_ctx;
  1040. ctx->tag_len = actual_tag_len;
  1041. return 1;
  1042. }
  1043. static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
  1044. OPENSSL_free(ctx->aead_state);
  1045. }
  1046. static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1047. uint8_t *out_tag, size_t *out_tag_len,
  1048. size_t max_out_tag_len,
  1049. const uint8_t *nonce, size_t nonce_len,
  1050. const uint8_t *in, size_t in_len,
  1051. const uint8_t *extra_in,
  1052. size_t extra_in_len,
  1053. const uint8_t *ad, size_t ad_len) {
  1054. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  1055. GCM128_CONTEXT gcm;
  1056. if (extra_in_len + ctx->tag_len < ctx->tag_len) {
  1057. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1058. return 0;
  1059. }
  1060. if (max_out_tag_len < extra_in_len + ctx->tag_len) {
  1061. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1062. return 0;
  1063. }
  1064. if (nonce_len == 0) {
  1065. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
  1066. return 0;
  1067. }
  1068. const AES_KEY *key = &gcm_ctx->ks.ks;
  1069. OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  1070. CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
  1071. if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  1072. return 0;
  1073. }
  1074. if (gcm_ctx->ctr) {
  1075. if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
  1076. gcm_ctx->ctr)) {
  1077. return 0;
  1078. }
  1079. } else {
  1080. if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
  1081. return 0;
  1082. }
  1083. }
  1084. if (extra_in_len) {
  1085. if (gcm_ctx->ctr) {
  1086. if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
  1087. extra_in_len, gcm_ctx->ctr)) {
  1088. return 0;
  1089. }
  1090. } else {
  1091. if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
  1092. return 0;
  1093. }
  1094. }
  1095. }
  1096. CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
  1097. *out_tag_len = ctx->tag_len + extra_in_len;
  1098. return 1;
  1099. }
  1100. static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1101. const uint8_t *nonce, size_t nonce_len,
  1102. const uint8_t *in, size_t in_len,
  1103. const uint8_t *in_tag, size_t in_tag_len,
  1104. const uint8_t *ad, size_t ad_len) {
  1105. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  1106. uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
  1107. GCM128_CONTEXT gcm;
  1108. if (nonce_len == 0) {
  1109. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
  1110. return 0;
  1111. }
  1112. if (in_tag_len != ctx->tag_len) {
  1113. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1114. return 0;
  1115. }
  1116. const AES_KEY *key = &gcm_ctx->ks.ks;
  1117. OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  1118. CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
  1119. if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  1120. return 0;
  1121. }
  1122. if (gcm_ctx->ctr) {
  1123. if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
  1124. gcm_ctx->ctr)) {
  1125. return 0;
  1126. }
  1127. } else {
  1128. if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
  1129. return 0;
  1130. }
  1131. }
  1132. CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
  1133. if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
  1134. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1135. return 0;
  1136. }
  1137. return 1;
  1138. }
  1139. DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
  1140. memset(out, 0, sizeof(EVP_AEAD));
  1141. out->key_len = 16;
  1142. out->nonce_len = 12;
  1143. out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
  1144. out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  1145. out->seal_scatter_supports_extra_in = 1;
  1146. out->init = aead_aes_gcm_init;
  1147. out->cleanup = aead_aes_gcm_cleanup;
  1148. out->seal_scatter = aead_aes_gcm_seal_scatter;
  1149. out->open_gather = aead_aes_gcm_open_gather;
  1150. }
  1151. DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
  1152. memset(out, 0, sizeof(EVP_AEAD));
  1153. out->key_len = 32;
  1154. out->nonce_len = 12;
  1155. out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
  1156. out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  1157. out->seal_scatter_supports_extra_in = 1;
  1158. out->init = aead_aes_gcm_init;
  1159. out->cleanup = aead_aes_gcm_cleanup;
  1160. out->seal_scatter = aead_aes_gcm_seal_scatter;
  1161. out->open_gather = aead_aes_gcm_open_gather;
  1162. }
  1163. static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1164. size_t key_len, size_t requested_tag_len) {
  1165. struct aead_aes_gcm_tls12_ctx *gcm_ctx;
  1166. gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_tls12_ctx));
  1167. if (gcm_ctx == NULL) {
  1168. return 0;
  1169. }
  1170. gcm_ctx->min_next_nonce = 0;
  1171. size_t actual_tag_len;
  1172. if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
  1173. requested_tag_len)) {
  1174. OPENSSL_free(gcm_ctx);
  1175. return 0;
  1176. }
  1177. ctx->aead_state = gcm_ctx;
  1178. ctx->tag_len = actual_tag_len;
  1179. return 1;
  1180. }
  1181. static void aead_aes_gcm_tls12_cleanup(EVP_AEAD_CTX *ctx) {
  1182. OPENSSL_free(ctx->aead_state);
  1183. }
  1184. static int aead_aes_gcm_tls12_seal_scatter(
  1185. const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
  1186. size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
  1187. size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
  1188. size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
  1189. struct aead_aes_gcm_tls12_ctx *gcm_ctx = ctx->aead_state;
  1190. if (nonce_len != 12) {
  1191. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1192. return 0;
  1193. }
  1194. // The given nonces must be strictly monotonically increasing.
  1195. uint64_t given_counter;
  1196. OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
  1197. sizeof(given_counter));
  1198. given_counter = CRYPTO_bswap8(given_counter);
  1199. if (given_counter == UINT64_MAX ||
  1200. given_counter < gcm_ctx->min_next_nonce) {
  1201. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
  1202. return 0;
  1203. }
  1204. gcm_ctx->min_next_nonce = given_counter + 1;
  1205. return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
  1206. max_out_tag_len, nonce, nonce_len, in,
  1207. in_len, extra_in, extra_in_len, ad, ad_len);
  1208. }
  1209. DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
  1210. memset(out, 0, sizeof(EVP_AEAD));
  1211. out->key_len = 16;
  1212. out->nonce_len = 12;
  1213. out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
  1214. out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  1215. out->seal_scatter_supports_extra_in = 1;
  1216. out->init = aead_aes_gcm_tls12_init;
  1217. out->cleanup = aead_aes_gcm_tls12_cleanup;
  1218. out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
  1219. out->open_gather = aead_aes_gcm_open_gather;
  1220. }
  1221. DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
  1222. memset(out, 0, sizeof(EVP_AEAD));
  1223. out->key_len = 32;
  1224. out->nonce_len = 12;
  1225. out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
  1226. out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  1227. out->seal_scatter_supports_extra_in = 1;
  1228. out->init = aead_aes_gcm_tls12_init;
  1229. out->cleanup = aead_aes_gcm_tls12_cleanup;
  1230. out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
  1231. out->open_gather = aead_aes_gcm_open_gather;
  1232. }
  1233. int EVP_has_aes_hardware(void) {
  1234. #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
  1235. return aesni_capable() && crypto_gcm_clmul_enabled();
  1236. #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  1237. return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
  1238. #else
  1239. return 0;
  1240. #endif
  1241. }