e_aes.c 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771
  1. /* ====================================================================
  2. * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * openssl-core@openssl.org.
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ==================================================================== */
  48. #include <string.h>
  49. #include <openssl/aead.h>
  50. #include <openssl/aes.h>
  51. #include <openssl/cipher.h>
  52. #include <openssl/cpu.h>
  53. #include <openssl/err.h>
  54. #include <openssl/mem.h>
  55. #include <openssl/nid.h>
  56. #include <openssl/rand.h>
  57. #include <openssl/sha.h>
  58. #include "internal.h"
  59. #include "../internal.h"
  60. #include "../modes/internal.h"
  61. #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  62. #include <openssl/arm_arch.h>
  63. #endif
  64. OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) /* Unreachable code. */
  65. typedef struct {
  66. union {
  67. double align;
  68. AES_KEY ks;
  69. } ks;
  70. block128_f block;
  71. union {
  72. cbc128_f cbc;
  73. ctr128_f ctr;
  74. } stream;
  75. } EVP_AES_KEY;
  76. typedef struct {
  77. union {
  78. double align;
  79. AES_KEY ks;
  80. } ks; /* AES key schedule to use */
  81. int key_set; /* Set if key initialised */
  82. int iv_set; /* Set if an iv is set */
  83. GCM128_CONTEXT gcm;
  84. uint8_t *iv; /* Temporary IV store */
  85. int ivlen; /* IV length */
  86. int taglen;
  87. int iv_gen; /* It is OK to generate IVs */
  88. ctr128_f ctr;
  89. } EVP_AES_GCM_CTX;
  90. #if !defined(OPENSSL_NO_ASM) && \
  91. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  92. #define VPAES
  93. static char vpaes_capable(void) {
  94. return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
  95. }
  96. #if defined(OPENSSL_X86_64)
  97. #define BSAES
  98. static char bsaes_capable(void) {
  99. return vpaes_capable();
  100. }
  101. #endif
  102. #elif !defined(OPENSSL_NO_ASM) && \
  103. (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
  104. #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
  105. #define BSAES
  106. static char bsaes_capable(void) {
  107. return CRYPTO_is_NEON_capable();
  108. }
  109. #endif
  110. #define HWAES
  111. static int hwaes_capable(void) {
  112. return CRYPTO_is_ARMv8_AES_capable();
  113. }
  114. #elif !defined(OPENSSL_NO_ASM) && defined(OPENSSL_PPC64LE)
  115. #define HWAES
  116. static int hwaes_capable(void) {
  117. return CRYPTO_is_PPC64LE_vcrypto_capable();
  118. }
  119. #endif /* OPENSSL_PPC64LE */
  120. #if defined(BSAES)
  121. /* On platforms where BSAES gets defined (just above), then these functions are
  122. * provided by asm. */
  123. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  124. const AES_KEY *key, uint8_t ivec[16], int enc);
  125. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  126. const AES_KEY *key, const uint8_t ivec[16]);
  127. #else
  128. static char bsaes_capable(void) {
  129. return 0;
  130. }
  131. /* On other platforms, bsaes_capable() will always return false and so the
  132. * following will never be called. */
  133. static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  134. const AES_KEY *key, uint8_t ivec[16], int enc) {
  135. abort();
  136. }
  137. static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
  138. size_t len, const AES_KEY *key,
  139. const uint8_t ivec[16]) {
  140. abort();
  141. }
  142. #endif
  143. #if defined(VPAES)
  144. /* On platforms where VPAES gets defined (just above), then these functions are
  145. * provided by asm. */
  146. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  147. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  148. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  149. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  150. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  151. const AES_KEY *key, uint8_t *ivec, int enc);
  152. #else
  153. static char vpaes_capable(void) {
  154. return 0;
  155. }
  156. /* On other platforms, vpaes_capable() will always return false and so the
  157. * following will never be called. */
  158. static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
  159. AES_KEY *key) {
  160. abort();
  161. }
  162. static int vpaes_set_decrypt_key(const uint8_t *userKey, int bits,
  163. AES_KEY *key) {
  164. abort();
  165. }
  166. static void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  167. abort();
  168. }
  169. static void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  170. abort();
  171. }
  172. static void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  173. const AES_KEY *key, uint8_t *ivec, int enc) {
  174. abort();
  175. }
  176. #endif
  177. #if defined(HWAES)
  178. int aes_hw_set_encrypt_key(const uint8_t *user_key, const int bits,
  179. AES_KEY *key);
  180. int aes_hw_set_decrypt_key(const uint8_t *user_key, const int bits,
  181. AES_KEY *key);
  182. void aes_hw_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  183. void aes_hw_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  184. void aes_hw_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  185. const AES_KEY *key, uint8_t *ivec, const int enc);
  186. void aes_hw_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  187. const AES_KEY *key, const uint8_t ivec[16]);
  188. #else
  189. /* If HWAES isn't defined then we provide dummy functions for each of the hwaes
  190. * functions. */
  191. static int hwaes_capable(void) {
  192. return 0;
  193. }
  194. static int aes_hw_set_encrypt_key(const uint8_t *user_key, int bits,
  195. AES_KEY *key) {
  196. abort();
  197. }
  198. static int aes_hw_set_decrypt_key(const uint8_t *user_key, int bits,
  199. AES_KEY *key) {
  200. abort();
  201. }
  202. static void aes_hw_encrypt(const uint8_t *in, uint8_t *out,
  203. const AES_KEY *key) {
  204. abort();
  205. }
  206. static void aes_hw_decrypt(const uint8_t *in, uint8_t *out,
  207. const AES_KEY *key) {
  208. abort();
  209. }
  210. static void aes_hw_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  211. const AES_KEY *key, uint8_t *ivec, int enc) {
  212. abort();
  213. }
  214. static void aes_hw_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
  215. size_t len, const AES_KEY *key,
  216. const uint8_t ivec[16]) {
  217. abort();
  218. }
  219. #endif
  220. #if !defined(OPENSSL_NO_ASM) && \
  221. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  222. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  223. int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  224. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  225. void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  226. void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  227. const AES_KEY *key, int enc);
  228. void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  229. const AES_KEY *key, uint8_t *ivec, int enc);
  230. #else
  231. /* On other platforms, aesni_capable() will always return false and so the
  232. * following will never be called. */
  233. static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  234. abort();
  235. }
  236. static int aesni_set_encrypt_key(const uint8_t *userKey, int bits,
  237. AES_KEY *key) {
  238. abort();
  239. }
  240. static void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
  241. size_t blocks, const void *key,
  242. const uint8_t *ivec) {
  243. abort();
  244. }
  245. #endif
  246. static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  247. const uint8_t *iv, int enc) {
  248. int ret, mode;
  249. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  250. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  251. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  252. if (hwaes_capable()) {
  253. ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  254. dat->block = (block128_f)aes_hw_decrypt;
  255. dat->stream.cbc = NULL;
  256. if (mode == EVP_CIPH_CBC_MODE) {
  257. dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
  258. }
  259. } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
  260. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  261. dat->block = (block128_f)AES_decrypt;
  262. dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
  263. } else if (vpaes_capable()) {
  264. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  265. dat->block = (block128_f)vpaes_decrypt;
  266. dat->stream.cbc =
  267. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  268. } else {
  269. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  270. dat->block = (block128_f)AES_decrypt;
  271. dat->stream.cbc =
  272. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  273. }
  274. } else if (hwaes_capable()) {
  275. ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  276. dat->block = (block128_f)aes_hw_encrypt;
  277. dat->stream.cbc = NULL;
  278. if (mode == EVP_CIPH_CBC_MODE) {
  279. dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
  280. } else if (mode == EVP_CIPH_CTR_MODE) {
  281. dat->stream.ctr = (ctr128_f)aes_hw_ctr32_encrypt_blocks;
  282. }
  283. } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
  284. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  285. dat->block = (block128_f)AES_encrypt;
  286. dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
  287. } else if (vpaes_capable()) {
  288. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  289. dat->block = (block128_f)vpaes_encrypt;
  290. dat->stream.cbc =
  291. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  292. } else {
  293. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  294. dat->block = (block128_f)AES_encrypt;
  295. dat->stream.cbc =
  296. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  297. }
  298. if (ret < 0) {
  299. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  300. return 0;
  301. }
  302. return 1;
  303. }
  304. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  305. size_t len) {
  306. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  307. if (dat->stream.cbc) {
  308. (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  309. } else if (ctx->encrypt) {
  310. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  311. } else {
  312. CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  313. }
  314. return 1;
  315. }
  316. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  317. size_t len) {
  318. size_t bl = ctx->cipher->block_size;
  319. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  320. if (len < bl) {
  321. return 1;
  322. }
  323. len -= bl;
  324. for (size_t i = 0; i <= len; i += bl) {
  325. (*dat->block)(in + i, out + i, &dat->ks);
  326. }
  327. return 1;
  328. }
  329. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  330. size_t len) {
  331. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  332. if (dat->stream.ctr) {
  333. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf,
  334. &ctx->num, dat->stream.ctr);
  335. } else {
  336. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &ctx->num,
  337. dat->block);
  338. }
  339. return 1;
  340. }
  341. static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  342. size_t len) {
  343. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  344. CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
  345. return 1;
  346. }
  347. static char aesni_capable(void);
  348. static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
  349. block128_f *out_block, const uint8_t *key,
  350. size_t key_len) {
  351. if (aesni_capable()) {
  352. aesni_set_encrypt_key(key, key_len * 8, aes_key);
  353. if (gcm_ctx != NULL) {
  354. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt);
  355. }
  356. if (out_block) {
  357. *out_block = (block128_f) aesni_encrypt;
  358. }
  359. return (ctr128_f)aesni_ctr32_encrypt_blocks;
  360. }
  361. if (hwaes_capable()) {
  362. aes_hw_set_encrypt_key(key, key_len * 8, aes_key);
  363. if (gcm_ctx != NULL) {
  364. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_hw_encrypt);
  365. }
  366. if (out_block) {
  367. *out_block = (block128_f) aes_hw_encrypt;
  368. }
  369. return (ctr128_f)aes_hw_ctr32_encrypt_blocks;
  370. }
  371. if (bsaes_capable()) {
  372. AES_set_encrypt_key(key, key_len * 8, aes_key);
  373. if (gcm_ctx != NULL) {
  374. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  375. }
  376. if (out_block) {
  377. *out_block = (block128_f) AES_encrypt;
  378. }
  379. return (ctr128_f)bsaes_ctr32_encrypt_blocks;
  380. }
  381. if (vpaes_capable()) {
  382. vpaes_set_encrypt_key(key, key_len * 8, aes_key);
  383. if (out_block) {
  384. *out_block = (block128_f) vpaes_encrypt;
  385. }
  386. if (gcm_ctx != NULL) {
  387. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
  388. }
  389. return NULL;
  390. }
  391. AES_set_encrypt_key(key, key_len * 8, aes_key);
  392. if (gcm_ctx != NULL) {
  393. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  394. }
  395. if (out_block) {
  396. *out_block = (block128_f) AES_encrypt;
  397. }
  398. return NULL;
  399. }
  400. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  401. const uint8_t *iv, int enc) {
  402. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  403. if (!iv && !key) {
  404. return 1;
  405. }
  406. if (key) {
  407. gctx->ctr =
  408. aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
  409. /* If we have an iv can set it directly, otherwise use saved IV. */
  410. if (iv == NULL && gctx->iv_set) {
  411. iv = gctx->iv;
  412. }
  413. if (iv) {
  414. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  415. gctx->iv_set = 1;
  416. }
  417. gctx->key_set = 1;
  418. } else {
  419. /* If key set use IV, otherwise copy */
  420. if (gctx->key_set) {
  421. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  422. } else {
  423. OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
  424. }
  425. gctx->iv_set = 1;
  426. gctx->iv_gen = 0;
  427. }
  428. return 1;
  429. }
  430. static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
  431. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  432. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  433. if (gctx->iv != c->iv) {
  434. OPENSSL_free(gctx->iv);
  435. }
  436. }
  437. /* increment counter (64-bit int) by 1 */
  438. static void ctr64_inc(uint8_t *counter) {
  439. int n = 8;
  440. uint8_t c;
  441. do {
  442. --n;
  443. c = counter[n];
  444. ++c;
  445. counter[n] = c;
  446. if (c) {
  447. return;
  448. }
  449. } while (n);
  450. }
  451. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
  452. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  453. switch (type) {
  454. case EVP_CTRL_INIT:
  455. gctx->key_set = 0;
  456. gctx->iv_set = 0;
  457. gctx->ivlen = c->cipher->iv_len;
  458. gctx->iv = c->iv;
  459. gctx->taglen = -1;
  460. gctx->iv_gen = 0;
  461. return 1;
  462. case EVP_CTRL_GCM_SET_IVLEN:
  463. if (arg <= 0) {
  464. return 0;
  465. }
  466. /* Allocate memory for IV if needed */
  467. if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
  468. if (gctx->iv != c->iv) {
  469. OPENSSL_free(gctx->iv);
  470. }
  471. gctx->iv = OPENSSL_malloc(arg);
  472. if (!gctx->iv) {
  473. return 0;
  474. }
  475. }
  476. gctx->ivlen = arg;
  477. return 1;
  478. case EVP_CTRL_GCM_SET_TAG:
  479. if (arg <= 0 || arg > 16 || c->encrypt) {
  480. return 0;
  481. }
  482. OPENSSL_memcpy(c->buf, ptr, arg);
  483. gctx->taglen = arg;
  484. return 1;
  485. case EVP_CTRL_GCM_GET_TAG:
  486. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
  487. return 0;
  488. }
  489. OPENSSL_memcpy(ptr, c->buf, arg);
  490. return 1;
  491. case EVP_CTRL_GCM_SET_IV_FIXED:
  492. /* Special case: -1 length restores whole IV */
  493. if (arg == -1) {
  494. OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
  495. gctx->iv_gen = 1;
  496. return 1;
  497. }
  498. /* Fixed field must be at least 4 bytes and invocation field
  499. * at least 8. */
  500. if (arg < 4 || (gctx->ivlen - arg) < 8) {
  501. return 0;
  502. }
  503. if (arg) {
  504. OPENSSL_memcpy(gctx->iv, ptr, arg);
  505. }
  506. if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
  507. return 0;
  508. }
  509. gctx->iv_gen = 1;
  510. return 1;
  511. case EVP_CTRL_GCM_IV_GEN:
  512. if (gctx->iv_gen == 0 || gctx->key_set == 0) {
  513. return 0;
  514. }
  515. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
  516. if (arg <= 0 || arg > gctx->ivlen) {
  517. arg = gctx->ivlen;
  518. }
  519. OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  520. /* Invocation field will be at least 8 bytes in size and
  521. * so no need to check wrap around or increment more than
  522. * last 8 bytes. */
  523. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  524. gctx->iv_set = 1;
  525. return 1;
  526. case EVP_CTRL_GCM_SET_IV_INV:
  527. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
  528. return 0;
  529. }
  530. OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  531. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
  532. gctx->iv_set = 1;
  533. return 1;
  534. case EVP_CTRL_COPY: {
  535. EVP_CIPHER_CTX *out = ptr;
  536. EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
  537. if (gctx->iv == c->iv) {
  538. gctx_out->iv = out->iv;
  539. } else {
  540. gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
  541. if (!gctx_out->iv) {
  542. return 0;
  543. }
  544. OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  545. }
  546. return 1;
  547. }
  548. default:
  549. return -1;
  550. }
  551. }
  552. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  553. size_t len) {
  554. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  555. /* If not set up, return error */
  556. if (!gctx->key_set) {
  557. return -1;
  558. }
  559. if (!gctx->iv_set) {
  560. return -1;
  561. }
  562. if (in) {
  563. if (out == NULL) {
  564. if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
  565. return -1;
  566. }
  567. } else if (ctx->encrypt) {
  568. if (gctx->ctr) {
  569. if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
  570. gctx->ctr)) {
  571. return -1;
  572. }
  573. } else {
  574. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
  575. return -1;
  576. }
  577. }
  578. } else {
  579. if (gctx->ctr) {
  580. if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
  581. gctx->ctr)) {
  582. return -1;
  583. }
  584. } else {
  585. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
  586. return -1;
  587. }
  588. }
  589. }
  590. return len;
  591. } else {
  592. if (!ctx->encrypt) {
  593. if (gctx->taglen < 0 ||
  594. !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
  595. return -1;
  596. }
  597. gctx->iv_set = 0;
  598. return 0;
  599. }
  600. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  601. gctx->taglen = 16;
  602. /* Don't reuse the IV */
  603. gctx->iv_set = 0;
  604. return 0;
  605. }
  606. }
  607. static const EVP_CIPHER aes_128_cbc = {
  608. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  609. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  610. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  611. NULL /* cleanup */, NULL /* ctrl */};
  612. static const EVP_CIPHER aes_128_ctr = {
  613. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  614. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  615. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  616. NULL /* cleanup */, NULL /* ctrl */};
  617. static const EVP_CIPHER aes_128_ecb = {
  618. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  619. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  620. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  621. NULL /* cleanup */, NULL /* ctrl */};
  622. static const EVP_CIPHER aes_128_ofb = {
  623. NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
  624. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  625. NULL /* app_data */, aes_init_key, aes_ofb_cipher,
  626. NULL /* cleanup */, NULL /* ctrl */};
  627. static const EVP_CIPHER aes_128_gcm = {
  628. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  629. sizeof(EVP_AES_GCM_CTX),
  630. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  631. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  632. EVP_CIPH_FLAG_AEAD_CIPHER,
  633. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  634. aes_gcm_ctrl};
  635. static const EVP_CIPHER aes_192_cbc = {
  636. NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
  637. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  638. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  639. NULL /* cleanup */, NULL /* ctrl */};
  640. static const EVP_CIPHER aes_192_ctr = {
  641. NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
  642. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  643. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  644. NULL /* cleanup */, NULL /* ctrl */};
  645. static const EVP_CIPHER aes_192_ecb = {
  646. NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
  647. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  648. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  649. NULL /* cleanup */, NULL /* ctrl */};
  650. static const EVP_CIPHER aes_192_gcm = {
  651. NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
  652. sizeof(EVP_AES_GCM_CTX),
  653. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  654. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  655. EVP_CIPH_FLAG_AEAD_CIPHER,
  656. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  657. aes_gcm_ctrl};
  658. static const EVP_CIPHER aes_256_cbc = {
  659. NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
  660. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  661. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  662. NULL /* cleanup */, NULL /* ctrl */};
  663. static const EVP_CIPHER aes_256_ctr = {
  664. NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
  665. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  666. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  667. NULL /* cleanup */, NULL /* ctrl */};
  668. static const EVP_CIPHER aes_256_ecb = {
  669. NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
  670. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  671. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  672. NULL /* cleanup */, NULL /* ctrl */};
  673. static const EVP_CIPHER aes_256_ofb = {
  674. NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
  675. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  676. NULL /* app_data */, aes_init_key, aes_ofb_cipher,
  677. NULL /* cleanup */, NULL /* ctrl */};
  678. static const EVP_CIPHER aes_256_gcm = {
  679. NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  680. sizeof(EVP_AES_GCM_CTX),
  681. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  682. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  683. EVP_CIPH_FLAG_AEAD_CIPHER,
  684. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  685. aes_gcm_ctrl};
  686. #if !defined(OPENSSL_NO_ASM) && \
  687. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  688. /* AES-NI section. */
  689. static char aesni_capable(void) {
  690. return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
  691. }
  692. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  693. const uint8_t *iv, int enc) {
  694. int ret, mode;
  695. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  696. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  697. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  698. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  699. dat->block = (block128_f)aesni_decrypt;
  700. dat->stream.cbc =
  701. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
  702. } else {
  703. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  704. dat->block = (block128_f)aesni_encrypt;
  705. if (mode == EVP_CIPH_CBC_MODE) {
  706. dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
  707. } else if (mode == EVP_CIPH_CTR_MODE) {
  708. dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  709. } else {
  710. dat->stream.cbc = NULL;
  711. }
  712. }
  713. if (ret < 0) {
  714. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  715. return 0;
  716. }
  717. return 1;
  718. }
  719. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  720. const uint8_t *in, size_t len) {
  721. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  722. return 1;
  723. }
  724. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  725. const uint8_t *in, size_t len) {
  726. size_t bl = ctx->cipher->block_size;
  727. if (len < bl) {
  728. return 1;
  729. }
  730. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  731. return 1;
  732. }
  733. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  734. const uint8_t *iv, int enc) {
  735. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  736. if (!iv && !key) {
  737. return 1;
  738. }
  739. if (key) {
  740. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  741. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
  742. gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  743. /* If we have an iv can set it directly, otherwise use
  744. * saved IV. */
  745. if (iv == NULL && gctx->iv_set) {
  746. iv = gctx->iv;
  747. }
  748. if (iv) {
  749. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  750. gctx->iv_set = 1;
  751. }
  752. gctx->key_set = 1;
  753. } else {
  754. /* If key set use IV, otherwise copy */
  755. if (gctx->key_set) {
  756. CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
  757. } else {
  758. OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
  759. }
  760. gctx->iv_set = 1;
  761. gctx->iv_gen = 0;
  762. }
  763. return 1;
  764. }
  765. static const EVP_CIPHER aesni_128_cbc = {
  766. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  767. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  768. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  769. NULL /* cleanup */, NULL /* ctrl */};
  770. static const EVP_CIPHER aesni_128_ctr = {
  771. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  772. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  773. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  774. NULL /* cleanup */, NULL /* ctrl */};
  775. static const EVP_CIPHER aesni_128_ecb = {
  776. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  777. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  778. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  779. NULL /* cleanup */, NULL /* ctrl */};
  780. static const EVP_CIPHER aesni_128_ofb = {
  781. NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
  782. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  783. NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
  784. NULL /* cleanup */, NULL /* ctrl */};
  785. static const EVP_CIPHER aesni_128_gcm = {
  786. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  787. sizeof(EVP_AES_GCM_CTX),
  788. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  789. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  790. EVP_CIPH_FLAG_AEAD_CIPHER,
  791. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  792. aes_gcm_ctrl};
  793. static const EVP_CIPHER aesni_192_cbc = {
  794. NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
  795. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  796. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  797. NULL /* cleanup */, NULL /* ctrl */};
  798. static const EVP_CIPHER aesni_192_ctr = {
  799. NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
  800. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  801. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  802. NULL /* cleanup */, NULL /* ctrl */};
  803. static const EVP_CIPHER aesni_192_ecb = {
  804. NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
  805. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  806. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  807. NULL /* cleanup */, NULL /* ctrl */};
  808. static const EVP_CIPHER aesni_192_gcm = {
  809. NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
  810. sizeof(EVP_AES_GCM_CTX),
  811. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  812. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  813. EVP_CIPH_FLAG_AEAD_CIPHER,
  814. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  815. aes_gcm_ctrl};
  816. static const EVP_CIPHER aesni_256_cbc = {
  817. NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
  818. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  819. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  820. NULL /* cleanup */, NULL /* ctrl */};
  821. static const EVP_CIPHER aesni_256_ctr = {
  822. NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
  823. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  824. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  825. NULL /* cleanup */, NULL /* ctrl */};
  826. static const EVP_CIPHER aesni_256_ecb = {
  827. NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
  828. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  829. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  830. NULL /* cleanup */, NULL /* ctrl */};
  831. static const EVP_CIPHER aesni_256_ofb = {
  832. NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
  833. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  834. NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
  835. NULL /* cleanup */, NULL /* ctrl */};
  836. static const EVP_CIPHER aesni_256_gcm = {
  837. NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  838. sizeof(EVP_AES_GCM_CTX),
  839. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  840. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
  841. EVP_CIPH_FLAG_AEAD_CIPHER,
  842. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  843. aes_gcm_ctrl};
  844. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  845. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  846. if (aesni_capable()) { \
  847. return &aesni_##keybits##_##mode; \
  848. } else { \
  849. return &aes_##keybits##_##mode; \
  850. } \
  851. }
  852. #else /* ^^^ OPENSSL_X86_64 || OPENSSL_X86 */
  853. static char aesni_capable(void) {
  854. return 0;
  855. }
  856. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  857. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  858. return &aes_##keybits##_##mode; \
  859. }
  860. #endif
  861. EVP_CIPHER_FUNCTION(128, cbc)
  862. EVP_CIPHER_FUNCTION(128, ctr)
  863. EVP_CIPHER_FUNCTION(128, ecb)
  864. EVP_CIPHER_FUNCTION(128, ofb)
  865. EVP_CIPHER_FUNCTION(128, gcm)
  866. EVP_CIPHER_FUNCTION(192, cbc)
  867. EVP_CIPHER_FUNCTION(192, ctr)
  868. EVP_CIPHER_FUNCTION(192, ecb)
  869. EVP_CIPHER_FUNCTION(192, gcm)
  870. EVP_CIPHER_FUNCTION(256, cbc)
  871. EVP_CIPHER_FUNCTION(256, ctr)
  872. EVP_CIPHER_FUNCTION(256, ecb)
  873. EVP_CIPHER_FUNCTION(256, ofb)
  874. EVP_CIPHER_FUNCTION(256, gcm)
  875. #define EVP_AEAD_AES_GCM_TAG_LEN 16
  876. struct aead_aes_gcm_ctx {
  877. union {
  878. double align;
  879. AES_KEY ks;
  880. } ks;
  881. GCM128_CONTEXT gcm;
  882. ctr128_f ctr;
  883. uint8_t tag_len;
  884. };
  885. static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  886. size_t key_len, size_t tag_len) {
  887. struct aead_aes_gcm_ctx *gcm_ctx;
  888. const size_t key_bits = key_len * 8;
  889. if (key_bits != 128 && key_bits != 256) {
  890. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  891. return 0; /* EVP_AEAD_CTX_init should catch this. */
  892. }
  893. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  894. tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  895. }
  896. if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
  897. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  898. return 0;
  899. }
  900. gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
  901. if (gcm_ctx == NULL) {
  902. return 0;
  903. }
  904. gcm_ctx->ctr =
  905. aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
  906. gcm_ctx->tag_len = tag_len;
  907. ctx->aead_state = gcm_ctx;
  908. return 1;
  909. }
  910. static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
  911. struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  912. OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
  913. OPENSSL_free(gcm_ctx);
  914. }
  915. static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  916. size_t *out_len, size_t max_out_len,
  917. const uint8_t *nonce, size_t nonce_len,
  918. const uint8_t *in, size_t in_len,
  919. const uint8_t *ad, size_t ad_len) {
  920. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  921. GCM128_CONTEXT gcm;
  922. if (in_len + gcm_ctx->tag_len < in_len) {
  923. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  924. return 0;
  925. }
  926. if (max_out_len < in_len + gcm_ctx->tag_len) {
  927. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  928. return 0;
  929. }
  930. const AES_KEY *key = &gcm_ctx->ks.ks;
  931. OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  932. CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
  933. if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  934. return 0;
  935. }
  936. if (gcm_ctx->ctr) {
  937. if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
  938. gcm_ctx->ctr)) {
  939. return 0;
  940. }
  941. } else {
  942. if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
  943. return 0;
  944. }
  945. }
  946. CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
  947. *out_len = in_len + gcm_ctx->tag_len;
  948. return 1;
  949. }
  950. static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  951. size_t *out_len, size_t max_out_len,
  952. const uint8_t *nonce, size_t nonce_len,
  953. const uint8_t *in, size_t in_len,
  954. const uint8_t *ad, size_t ad_len) {
  955. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  956. uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
  957. size_t plaintext_len;
  958. GCM128_CONTEXT gcm;
  959. if (in_len < gcm_ctx->tag_len) {
  960. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  961. return 0;
  962. }
  963. plaintext_len = in_len - gcm_ctx->tag_len;
  964. if (max_out_len < plaintext_len) {
  965. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  966. return 0;
  967. }
  968. const AES_KEY *key = &gcm_ctx->ks.ks;
  969. OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  970. CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
  971. if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  972. return 0;
  973. }
  974. if (gcm_ctx->ctr) {
  975. if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out,
  976. in_len - gcm_ctx->tag_len, gcm_ctx->ctr)) {
  977. return 0;
  978. }
  979. } else {
  980. if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len - gcm_ctx->tag_len)) {
  981. return 0;
  982. }
  983. }
  984. CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
  985. if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
  986. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  987. return 0;
  988. }
  989. *out_len = plaintext_len;
  990. return 1;
  991. }
  992. static const EVP_AEAD aead_aes_128_gcm = {
  993. 16, /* key len */
  994. 12, /* nonce len */
  995. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  996. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  997. aead_aes_gcm_init,
  998. NULL, /* init_with_direction */
  999. aead_aes_gcm_cleanup,
  1000. aead_aes_gcm_seal,
  1001. aead_aes_gcm_open,
  1002. NULL, /* get_iv */
  1003. };
  1004. static const EVP_AEAD aead_aes_256_gcm = {
  1005. 32, /* key len */
  1006. 12, /* nonce len */
  1007. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  1008. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  1009. aead_aes_gcm_init,
  1010. NULL, /* init_with_direction */
  1011. aead_aes_gcm_cleanup,
  1012. aead_aes_gcm_seal,
  1013. aead_aes_gcm_open,
  1014. NULL, /* get_iv */
  1015. };
  1016. const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
  1017. const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
  1018. #define EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN SHA256_DIGEST_LENGTH
  1019. #define EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN 12
  1020. struct aead_aes_ctr_hmac_sha256_ctx {
  1021. union {
  1022. double align;
  1023. AES_KEY ks;
  1024. } ks;
  1025. ctr128_f ctr;
  1026. block128_f block;
  1027. SHA256_CTX inner_init_state;
  1028. SHA256_CTX outer_init_state;
  1029. uint8_t tag_len;
  1030. };
  1031. static void hmac_init(SHA256_CTX *out_inner, SHA256_CTX *out_outer,
  1032. const uint8_t hmac_key[32]) {
  1033. static const size_t hmac_key_len = 32;
  1034. uint8_t block[SHA256_CBLOCK];
  1035. OPENSSL_memcpy(block, hmac_key, hmac_key_len);
  1036. OPENSSL_memset(block + hmac_key_len, 0x36, sizeof(block) - hmac_key_len);
  1037. unsigned i;
  1038. for (i = 0; i < hmac_key_len; i++) {
  1039. block[i] ^= 0x36;
  1040. }
  1041. SHA256_Init(out_inner);
  1042. SHA256_Update(out_inner, block, sizeof(block));
  1043. OPENSSL_memset(block + hmac_key_len, 0x5c, sizeof(block) - hmac_key_len);
  1044. for (i = 0; i < hmac_key_len; i++) {
  1045. block[i] ^= (0x36 ^ 0x5c);
  1046. }
  1047. SHA256_Init(out_outer);
  1048. SHA256_Update(out_outer, block, sizeof(block));
  1049. }
  1050. static int aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1051. size_t key_len, size_t tag_len) {
  1052. struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx;
  1053. static const size_t hmac_key_len = 32;
  1054. if (key_len < hmac_key_len) {
  1055. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1056. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1057. }
  1058. const size_t aes_key_len = key_len - hmac_key_len;
  1059. if (aes_key_len != 16 && aes_key_len != 32) {
  1060. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1061. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1062. }
  1063. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  1064. tag_len = EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN;
  1065. }
  1066. if (tag_len > EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN) {
  1067. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  1068. return 0;
  1069. }
  1070. aes_ctx = OPENSSL_malloc(sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
  1071. if (aes_ctx == NULL) {
  1072. OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
  1073. return 0;
  1074. }
  1075. aes_ctx->ctr =
  1076. aes_ctr_set_key(&aes_ctx->ks.ks, NULL, &aes_ctx->block, key, aes_key_len);
  1077. aes_ctx->tag_len = tag_len;
  1078. hmac_init(&aes_ctx->inner_init_state, &aes_ctx->outer_init_state,
  1079. key + aes_key_len);
  1080. ctx->aead_state = aes_ctx;
  1081. return 1;
  1082. }
  1083. static void aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX *ctx) {
  1084. struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1085. OPENSSL_cleanse(aes_ctx, sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
  1086. OPENSSL_free(aes_ctx);
  1087. }
  1088. static void hmac_update_uint64(SHA256_CTX *sha256, uint64_t value) {
  1089. unsigned i;
  1090. uint8_t bytes[8];
  1091. for (i = 0; i < sizeof(bytes); i++) {
  1092. bytes[i] = value & 0xff;
  1093. value >>= 8;
  1094. }
  1095. SHA256_Update(sha256, bytes, sizeof(bytes));
  1096. }
  1097. static void hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],
  1098. const SHA256_CTX *inner_init_state,
  1099. const SHA256_CTX *outer_init_state,
  1100. const uint8_t *ad, size_t ad_len,
  1101. const uint8_t *nonce, const uint8_t *ciphertext,
  1102. size_t ciphertext_len) {
  1103. SHA256_CTX sha256;
  1104. OPENSSL_memcpy(&sha256, inner_init_state, sizeof(sha256));
  1105. hmac_update_uint64(&sha256, ad_len);
  1106. hmac_update_uint64(&sha256, ciphertext_len);
  1107. SHA256_Update(&sha256, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
  1108. SHA256_Update(&sha256, ad, ad_len);
  1109. /* Pad with zeros to the end of the SHA-256 block. */
  1110. const unsigned num_padding =
  1111. (SHA256_CBLOCK - ((sizeof(uint64_t)*2 +
  1112. EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN + ad_len) %
  1113. SHA256_CBLOCK)) %
  1114. SHA256_CBLOCK;
  1115. uint8_t padding[SHA256_CBLOCK];
  1116. OPENSSL_memset(padding, 0, num_padding);
  1117. SHA256_Update(&sha256, padding, num_padding);
  1118. SHA256_Update(&sha256, ciphertext, ciphertext_len);
  1119. uint8_t inner_digest[SHA256_DIGEST_LENGTH];
  1120. SHA256_Final(inner_digest, &sha256);
  1121. OPENSSL_memcpy(&sha256, outer_init_state, sizeof(sha256));
  1122. SHA256_Update(&sha256, inner_digest, sizeof(inner_digest));
  1123. SHA256_Final(out, &sha256);
  1124. }
  1125. static void aead_aes_ctr_hmac_sha256_crypt(
  1126. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx, uint8_t *out,
  1127. const uint8_t *in, size_t len, const uint8_t *nonce) {
  1128. /* Since the AEAD operation is one-shot, keeping a buffer of unused keystream
  1129. * bytes is pointless. However, |CRYPTO_ctr128_encrypt| requires it. */
  1130. uint8_t partial_block_buffer[AES_BLOCK_SIZE];
  1131. unsigned partial_block_offset = 0;
  1132. OPENSSL_memset(partial_block_buffer, 0, sizeof(partial_block_buffer));
  1133. uint8_t counter[AES_BLOCK_SIZE];
  1134. OPENSSL_memcpy(counter, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
  1135. OPENSSL_memset(counter + EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN, 0, 4);
  1136. if (aes_ctx->ctr) {
  1137. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &aes_ctx->ks.ks, counter,
  1138. partial_block_buffer, &partial_block_offset,
  1139. aes_ctx->ctr);
  1140. } else {
  1141. CRYPTO_ctr128_encrypt(in, out, len, &aes_ctx->ks.ks, counter,
  1142. partial_block_buffer, &partial_block_offset,
  1143. aes_ctx->block);
  1144. }
  1145. }
  1146. static int aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1147. size_t *out_len, size_t max_out_len,
  1148. const uint8_t *nonce, size_t nonce_len,
  1149. const uint8_t *in, size_t in_len,
  1150. const uint8_t *ad, size_t ad_len) {
  1151. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1152. const uint64_t in_len_64 = in_len;
  1153. if (in_len + aes_ctx->tag_len < in_len ||
  1154. /* This input is so large it would overflow the 32-bit block counter. */
  1155. in_len_64 >= (UINT64_C(1) << 32) * AES_BLOCK_SIZE) {
  1156. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1157. return 0;
  1158. }
  1159. if (max_out_len < in_len + aes_ctx->tag_len) {
  1160. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1161. return 0;
  1162. }
  1163. if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
  1164. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1165. return 0;
  1166. }
  1167. aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, in_len, nonce);
  1168. uint8_t hmac_result[SHA256_DIGEST_LENGTH];
  1169. hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
  1170. &aes_ctx->outer_init_state, ad, ad_len, nonce, out, in_len);
  1171. OPENSSL_memcpy(out + in_len, hmac_result, aes_ctx->tag_len);
  1172. *out_len = in_len + aes_ctx->tag_len;
  1173. return 1;
  1174. }
  1175. static int aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1176. size_t *out_len, size_t max_out_len,
  1177. const uint8_t *nonce, size_t nonce_len,
  1178. const uint8_t *in, size_t in_len,
  1179. const uint8_t *ad, size_t ad_len) {
  1180. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1181. size_t plaintext_len;
  1182. if (in_len < aes_ctx->tag_len) {
  1183. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1184. return 0;
  1185. }
  1186. plaintext_len = in_len - aes_ctx->tag_len;
  1187. if (max_out_len < plaintext_len) {
  1188. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1189. return 0;
  1190. }
  1191. if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
  1192. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1193. return 0;
  1194. }
  1195. uint8_t hmac_result[SHA256_DIGEST_LENGTH];
  1196. hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
  1197. &aes_ctx->outer_init_state, ad, ad_len, nonce, in,
  1198. plaintext_len);
  1199. if (CRYPTO_memcmp(hmac_result, in + plaintext_len, aes_ctx->tag_len) != 0) {
  1200. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1201. return 0;
  1202. }
  1203. aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, plaintext_len, nonce);
  1204. *out_len = plaintext_len;
  1205. return 1;
  1206. }
  1207. static const EVP_AEAD aead_aes_128_ctr_hmac_sha256 = {
  1208. 16 /* AES key */ + 32 /* HMAC key */,
  1209. 12, /* nonce length */
  1210. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
  1211. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
  1212. aead_aes_ctr_hmac_sha256_init,
  1213. NULL /* init_with_direction */,
  1214. aead_aes_ctr_hmac_sha256_cleanup,
  1215. aead_aes_ctr_hmac_sha256_seal,
  1216. aead_aes_ctr_hmac_sha256_open,
  1217. NULL /* get_iv */,
  1218. };
  1219. static const EVP_AEAD aead_aes_256_ctr_hmac_sha256 = {
  1220. 32 /* AES key */ + 32 /* HMAC key */,
  1221. 12, /* nonce length */
  1222. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
  1223. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
  1224. aead_aes_ctr_hmac_sha256_init,
  1225. NULL /* init_with_direction */,
  1226. aead_aes_ctr_hmac_sha256_cleanup,
  1227. aead_aes_ctr_hmac_sha256_seal,
  1228. aead_aes_ctr_hmac_sha256_open,
  1229. NULL /* get_iv */,
  1230. };
  1231. const EVP_AEAD *EVP_aead_aes_128_ctr_hmac_sha256(void) {
  1232. return &aead_aes_128_ctr_hmac_sha256;
  1233. }
  1234. const EVP_AEAD *EVP_aead_aes_256_ctr_hmac_sha256(void) {
  1235. return &aead_aes_256_ctr_hmac_sha256;
  1236. }
  1237. #if !defined(OPENSSL_SMALL)
  1238. #define EVP_AEAD_AES_GCM_SIV_NONCE_LEN 12
  1239. #define EVP_AEAD_AES_GCM_SIV_TAG_LEN 16
  1240. struct aead_aes_gcm_siv_ctx {
  1241. union {
  1242. double align;
  1243. AES_KEY ks;
  1244. } ks;
  1245. block128_f kgk_block;
  1246. unsigned is_256:1;
  1247. };
  1248. static int aead_aes_gcm_siv_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1249. size_t key_len, size_t tag_len) {
  1250. const size_t key_bits = key_len * 8;
  1251. if (key_bits != 128 && key_bits != 256) {
  1252. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1253. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1254. }
  1255. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  1256. tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  1257. }
  1258. if (tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  1259. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  1260. return 0;
  1261. }
  1262. struct aead_aes_gcm_siv_ctx *gcm_siv_ctx =
  1263. OPENSSL_malloc(sizeof(struct aead_aes_gcm_siv_ctx));
  1264. if (gcm_siv_ctx == NULL) {
  1265. return 0;
  1266. }
  1267. OPENSSL_memset(gcm_siv_ctx, 0, sizeof(struct aead_aes_gcm_siv_ctx));
  1268. if (aesni_capable()) {
  1269. aesni_set_encrypt_key(key, key_len * 8, &gcm_siv_ctx->ks.ks);
  1270. gcm_siv_ctx->kgk_block = (block128_f)aesni_encrypt;
  1271. } else if (hwaes_capable()) {
  1272. aes_hw_set_encrypt_key(key, key_len * 8, &gcm_siv_ctx->ks.ks);
  1273. gcm_siv_ctx->kgk_block = (block128_f)aes_hw_encrypt;
  1274. } else if (vpaes_capable()) {
  1275. vpaes_set_encrypt_key(key, key_len * 8, &gcm_siv_ctx->ks.ks);
  1276. gcm_siv_ctx->kgk_block = (block128_f)vpaes_encrypt;
  1277. } else {
  1278. AES_set_encrypt_key(key, key_len * 8, &gcm_siv_ctx->ks.ks);
  1279. gcm_siv_ctx->kgk_block = (block128_f)AES_encrypt;
  1280. }
  1281. gcm_siv_ctx->is_256 = (key_len == 32);
  1282. ctx->aead_state = gcm_siv_ctx;
  1283. return 1;
  1284. }
  1285. static void aead_aes_gcm_siv_cleanup(EVP_AEAD_CTX *ctx) {
  1286. struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
  1287. OPENSSL_cleanse(gcm_siv_ctx, sizeof(struct aead_aes_gcm_siv_ctx));
  1288. OPENSSL_free(gcm_siv_ctx);
  1289. }
  1290. /* gcm_siv_crypt encrypts (or decrypts—it's the same thing) |in_len| bytes from
  1291. * |in| to |out|, using the block function |enc_block| with |key| in counter
  1292. * mode, starting at |initial_counter|. This differs from the traditional
  1293. * counter mode code in that the counter is handled little-endian, only the
  1294. * first four bytes are used and the GCM-SIV tweak to the final byte is
  1295. * applied. The |in| and |out| pointers may be equal but otherwise must not
  1296. * alias. */
  1297. static void gcm_siv_crypt(uint8_t *out, const uint8_t *in, size_t in_len,
  1298. const uint8_t initial_counter[AES_BLOCK_SIZE],
  1299. block128_f enc_block, const AES_KEY *key) {
  1300. union {
  1301. uint32_t w[4];
  1302. uint8_t c[16];
  1303. } counter;
  1304. OPENSSL_memcpy(counter.c, initial_counter, AES_BLOCK_SIZE);
  1305. counter.c[15] |= 0x80;
  1306. for (size_t done = 0; done < in_len;) {
  1307. uint8_t keystream[AES_BLOCK_SIZE];
  1308. enc_block(counter.c, keystream, key);
  1309. counter.w[0]++;
  1310. size_t todo = AES_BLOCK_SIZE;
  1311. if (in_len - done < todo) {
  1312. todo = in_len - done;
  1313. }
  1314. for (size_t i = 0; i < todo; i++) {
  1315. out[done + i] = keystream[i] ^ in[done + i];
  1316. }
  1317. done += todo;
  1318. }
  1319. }
  1320. /* gcm_siv_polyval evaluates POLYVAL at |auth_key| on the given plaintext and
  1321. * AD. The result is written to |out_tag|. */
  1322. static void gcm_siv_polyval(
  1323. uint8_t out_tag[16], const uint8_t *in, size_t in_len, const uint8_t *ad,
  1324. size_t ad_len, const uint8_t auth_key[16],
  1325. const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
  1326. struct polyval_ctx polyval_ctx;
  1327. CRYPTO_POLYVAL_init(&polyval_ctx, auth_key);
  1328. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, ad, ad_len & ~15);
  1329. uint8_t scratch[16];
  1330. if (ad_len & 15) {
  1331. OPENSSL_memset(scratch, 0, sizeof(scratch));
  1332. OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
  1333. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
  1334. }
  1335. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, in, in_len & ~15);
  1336. if (in_len & 15) {
  1337. OPENSSL_memset(scratch, 0, sizeof(scratch));
  1338. OPENSSL_memcpy(scratch, &in[in_len & ~15], in_len & 15);
  1339. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
  1340. }
  1341. union {
  1342. uint8_t c[16];
  1343. struct {
  1344. uint64_t ad;
  1345. uint64_t in;
  1346. } bitlens;
  1347. } length_block;
  1348. length_block.bitlens.ad = ad_len * 8;
  1349. length_block.bitlens.in = in_len * 8;
  1350. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, length_block.c,
  1351. sizeof(length_block));
  1352. CRYPTO_POLYVAL_finish(&polyval_ctx, out_tag);
  1353. for (size_t i = 0; i < EVP_AEAD_AES_GCM_SIV_NONCE_LEN; i++) {
  1354. out_tag[i] ^= nonce[i];
  1355. }
  1356. out_tag[15] &= 0x7f;
  1357. }
  1358. /* gcm_siv_record_keys contains the keys used for a specific GCM-SIV record. */
  1359. struct gcm_siv_record_keys {
  1360. uint8_t auth_key[16];
  1361. union {
  1362. double align;
  1363. AES_KEY ks;
  1364. } enc_key;
  1365. block128_f enc_block;
  1366. };
  1367. /* gcm_siv_keys calculates the keys for a specific GCM-SIV record with the
  1368. * given nonce and writes them to |*out_keys|. */
  1369. static void gcm_siv_keys(
  1370. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx,
  1371. struct gcm_siv_record_keys *out_keys,
  1372. const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
  1373. const AES_KEY *const key = &gcm_siv_ctx->ks.ks;
  1374. uint8_t key_material[(128 /* POLYVAL key */ + 256 /* max AES key */) / 8];
  1375. const size_t blocks_needed = gcm_siv_ctx->is_256 ? 6 : 4;
  1376. uint8_t counter[AES_BLOCK_SIZE];
  1377. OPENSSL_memset(counter, 0, AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
  1378. OPENSSL_memcpy(counter + AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN,
  1379. nonce, EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
  1380. for (size_t i = 0; i < blocks_needed; i++) {
  1381. counter[0] = i;
  1382. uint8_t ciphertext[AES_BLOCK_SIZE];
  1383. gcm_siv_ctx->kgk_block(counter, ciphertext, key);
  1384. OPENSSL_memcpy(&key_material[i * 8], ciphertext, 8);
  1385. }
  1386. OPENSSL_memcpy(out_keys->auth_key, key_material, 16);
  1387. aes_ctr_set_key(&out_keys->enc_key.ks, NULL, &out_keys->enc_block,
  1388. key_material + 16, gcm_siv_ctx->is_256 ? 32 : 16);
  1389. }
  1390. static int aead_aes_gcm_siv_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1391. size_t *out_len, size_t max_out_len,
  1392. const uint8_t *nonce, size_t nonce_len,
  1393. const uint8_t *in, size_t in_len,
  1394. const uint8_t *ad, size_t ad_len) {
  1395. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
  1396. const uint64_t in_len_64 = in_len;
  1397. const uint64_t ad_len_64 = ad_len;
  1398. if (in_len + EVP_AEAD_AES_GCM_SIV_TAG_LEN < in_len ||
  1399. in_len_64 > (UINT64_C(1) << 36) ||
  1400. ad_len_64 >= (UINT64_C(1) << 61)) {
  1401. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1402. return 0;
  1403. }
  1404. if (max_out_len < in_len + EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  1405. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1406. return 0;
  1407. }
  1408. if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
  1409. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1410. return 0;
  1411. }
  1412. struct gcm_siv_record_keys keys;
  1413. gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
  1414. uint8_t tag[16];
  1415. gcm_siv_polyval(tag, in, in_len, ad, ad_len, keys.auth_key, nonce);
  1416. keys.enc_block(tag, tag, &keys.enc_key.ks);
  1417. gcm_siv_crypt(out, in, in_len, tag, keys.enc_block, &keys.enc_key.ks);
  1418. OPENSSL_memcpy(&out[in_len], tag, EVP_AEAD_AES_GCM_SIV_TAG_LEN);
  1419. *out_len = in_len + EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  1420. return 1;
  1421. }
  1422. static int aead_aes_gcm_siv_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1423. size_t *out_len, size_t max_out_len,
  1424. const uint8_t *nonce, size_t nonce_len,
  1425. const uint8_t *in, size_t in_len,
  1426. const uint8_t *ad, size_t ad_len) {
  1427. const uint64_t ad_len_64 = ad_len;
  1428. if (ad_len_64 >= (UINT64_C(1) << 61)) {
  1429. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1430. return 0;
  1431. }
  1432. const uint64_t in_len_64 = in_len;
  1433. if (in_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN ||
  1434. in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) {
  1435. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1436. return 0;
  1437. }
  1438. if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
  1439. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1440. return 0;
  1441. }
  1442. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
  1443. const size_t plaintext_len = in_len - EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  1444. if (max_out_len < plaintext_len) {
  1445. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1446. return 0;
  1447. }
  1448. struct gcm_siv_record_keys keys;
  1449. gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
  1450. gcm_siv_crypt(out, in, plaintext_len, &in[plaintext_len], keys.enc_block,
  1451. &keys.enc_key.ks);
  1452. uint8_t expected_tag[EVP_AEAD_AES_GCM_SIV_TAG_LEN];
  1453. gcm_siv_polyval(expected_tag, out, plaintext_len, ad, ad_len, keys.auth_key,
  1454. nonce);
  1455. keys.enc_block(expected_tag, expected_tag, &keys.enc_key.ks);
  1456. if (CRYPTO_memcmp(expected_tag, &in[plaintext_len], sizeof(expected_tag)) !=
  1457. 0) {
  1458. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1459. return 0;
  1460. }
  1461. *out_len = plaintext_len;
  1462. return 1;
  1463. }
  1464. static const EVP_AEAD aead_aes_128_gcm_siv = {
  1465. 16, /* key length */
  1466. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
  1467. EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
  1468. EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
  1469. aead_aes_gcm_siv_init,
  1470. NULL /* init_with_direction */,
  1471. aead_aes_gcm_siv_cleanup,
  1472. aead_aes_gcm_siv_seal,
  1473. aead_aes_gcm_siv_open,
  1474. NULL /* get_iv */,
  1475. };
  1476. static const EVP_AEAD aead_aes_256_gcm_siv = {
  1477. 32, /* key length */
  1478. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
  1479. EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
  1480. EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
  1481. aead_aes_gcm_siv_init,
  1482. NULL /* init_with_direction */,
  1483. aead_aes_gcm_siv_cleanup,
  1484. aead_aes_gcm_siv_seal,
  1485. aead_aes_gcm_siv_open,
  1486. NULL /* get_iv */,
  1487. };
  1488. const EVP_AEAD *EVP_aead_aes_128_gcm_siv(void) {
  1489. return &aead_aes_128_gcm_siv;
  1490. }
  1491. const EVP_AEAD *EVP_aead_aes_256_gcm_siv(void) {
  1492. return &aead_aes_256_gcm_siv;
  1493. }
  1494. #endif /* !OPENSSL_SMALL */
  1495. int EVP_has_aes_hardware(void) {
  1496. #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
  1497. return aesni_capable() && crypto_gcm_clmul_enabled();
  1498. #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  1499. return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
  1500. #else
  1501. return 0;
  1502. #endif
  1503. }