e_aesgcmsiv.c 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867
  1. /* Copyright (c) 2017, Google Inc.
  2. *
  3. * Permission to use, copy, modify, and/or distribute this software for any
  4. * purpose with or without fee is hereby granted, provided that the above
  5. * copyright notice and this permission notice appear in all copies.
  6. *
  7. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  8. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  9. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
  10. * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  11. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
  12. * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
  13. * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
  14. #include <openssl/aead.h>
  15. #include <assert.h>
  16. #include <openssl/cipher.h>
  17. #include <openssl/cpu.h>
  18. #include <openssl/crypto.h>
  19. #include <openssl/err.h>
  20. #include "../fipsmodule/cipher/internal.h"
  21. #define EVP_AEAD_AES_GCM_SIV_NONCE_LEN 12
  22. #define EVP_AEAD_AES_GCM_SIV_TAG_LEN 16
  23. #if defined(OPENSSL_X86_64) && !defined(OPENSSL_NO_ASM)
  24. // Optimised AES-GCM-SIV
  25. struct aead_aes_gcm_siv_asm_ctx {
  26. alignas(16) uint8_t key[16*15];
  27. int is_128_bit;
  28. // ptr contains the original pointer from |OPENSSL_malloc|, which may only be
  29. // 8-byte aligned. When freeing this structure, actually call |OPENSSL_free|
  30. // on this pointer.
  31. void *ptr;
  32. };
  33. // aes128gcmsiv_aes_ks writes an AES-128 key schedule for |key| to
  34. // |out_expanded_key|.
  35. extern void aes128gcmsiv_aes_ks(
  36. const uint8_t key[16], uint8_t out_expanded_key[16*15]);
  37. // aes128gcmsiv_aes_ks writes an AES-128 key schedule for |key| to
  38. // |out_expanded_key|.
  39. extern void aes256gcmsiv_aes_ks(
  40. const uint8_t key[16], uint8_t out_expanded_key[16*15]);
  41. static int aead_aes_gcm_siv_asm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  42. size_t key_len, size_t tag_len) {
  43. const size_t key_bits = key_len * 8;
  44. if (key_bits != 128 && key_bits != 256) {
  45. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  46. return 0; // EVP_AEAD_CTX_init should catch this.
  47. }
  48. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  49. tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  50. }
  51. if (tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  52. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  53. return 0;
  54. }
  55. char *ptr = OPENSSL_malloc(sizeof(struct aead_aes_gcm_siv_asm_ctx) + 8);
  56. if (ptr == NULL) {
  57. return 0;
  58. }
  59. assert((((uintptr_t)ptr) & 7) == 0);
  60. // gcm_siv_ctx needs to be 16-byte aligned in a cross-platform way.
  61. struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx =
  62. (struct aead_aes_gcm_siv_asm_ctx *)(ptr + (((uintptr_t)ptr) & 8));
  63. assert((((uintptr_t)gcm_siv_ctx) & 15) == 0);
  64. gcm_siv_ctx->ptr = ptr;
  65. if (key_bits == 128) {
  66. aes128gcmsiv_aes_ks(key, &gcm_siv_ctx->key[0]);
  67. gcm_siv_ctx->is_128_bit = 1;
  68. } else {
  69. aes256gcmsiv_aes_ks(key, &gcm_siv_ctx->key[0]);
  70. gcm_siv_ctx->is_128_bit = 0;
  71. }
  72. ctx->aead_state = gcm_siv_ctx;
  73. ctx->tag_len = tag_len;
  74. return 1;
  75. }
  76. static void aead_aes_gcm_siv_asm_cleanup(EVP_AEAD_CTX *ctx) {
  77. const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = ctx->aead_state;
  78. OPENSSL_free(gcm_siv_ctx->ptr);
  79. }
  80. // aesgcmsiv_polyval_horner updates the POLYVAL value in |in_out_poly| to
  81. // include a number (|in_blocks|) of 16-byte blocks of data from |in|, given
  82. // the POLYVAL key in |key|.
  83. extern void aesgcmsiv_polyval_horner(const uint8_t in_out_poly[16],
  84. const uint8_t key[16], const uint8_t *in,
  85. size_t in_blocks);
  86. // aesgcmsiv_htable_init writes powers 1..8 of |auth_key| to |out_htable|.
  87. extern void aesgcmsiv_htable_init(uint8_t out_htable[16 * 8],
  88. const uint8_t auth_key[16]);
  89. // aesgcmsiv_htable6_init writes powers 1..6 of |auth_key| to |out_htable|.
  90. extern void aesgcmsiv_htable6_init(uint8_t out_htable[16 * 6],
  91. const uint8_t auth_key[16]);
  92. // aesgcmsiv_htable_polyval updates the POLYVAL value in |in_out_poly| to
  93. // include |in_len| bytes of data from |in|. (Where |in_len| must be a multiple
  94. // of 16.) It uses the precomputed powers of the key given in |htable|.
  95. extern void aesgcmsiv_htable_polyval(const uint8_t htable[16 * 8],
  96. const uint8_t *in, size_t in_len,
  97. uint8_t in_out_poly[16]);
  98. // aes128gcmsiv_dec decrypts |in_len| & ~15 bytes from |out| and writes them to
  99. // |in|. (The full value of |in_len| is still used to find the authentication
  100. // tag appended to the ciphertext, however, so must not be pre-masked.)
  101. //
  102. // |in| and |out| may be equal, but must not otherwise overlap.
  103. //
  104. // While decrypting, it updates the POLYVAL value found at the beginning of
  105. // |in_out_calculated_tag_and_scratch| and writes the updated value back before
  106. // return. During executation, it may use the whole of this space for other
  107. // purposes. In order to decrypt and update the POLYVAL value, it uses the
  108. // expanded key from |key| and the table of powers in |htable|.
  109. extern void aes128gcmsiv_dec(const uint8_t *in, uint8_t *out,
  110. uint8_t in_out_calculated_tag_and_scratch[16 * 8],
  111. const uint8_t htable[16 * 6],
  112. const struct aead_aes_gcm_siv_asm_ctx *key,
  113. size_t in_len);
  114. // aes256gcmsiv_dec acts like |aes128gcmsiv_dec|, but for AES-256.
  115. extern void aes256gcmsiv_dec(const uint8_t *in, uint8_t *out,
  116. uint8_t in_out_calculated_tag_and_scratch[16 * 8],
  117. const uint8_t htable[16 * 6],
  118. const struct aead_aes_gcm_siv_asm_ctx *key,
  119. size_t in_len);
  120. // aes128gcmsiv_kdf performs the AES-GCM-SIV KDF given the expanded key from
  121. // |key_schedule| and the nonce in |nonce|. Note that, while only 12 bytes of
  122. // the nonce are used, 16 bytes are read and so the value must be
  123. // right-padded.
  124. extern void aes128gcmsiv_kdf(const uint8_t nonce[16],
  125. uint64_t out_key_material[8],
  126. const uint8_t *key_schedule);
  127. // aes256gcmsiv_kdf acts like |aes128gcmsiv_kdf|, but for AES-256.
  128. extern void aes256gcmsiv_kdf(const uint8_t nonce[16],
  129. uint64_t out_key_material[12],
  130. const uint8_t *key_schedule);
  131. // aes128gcmsiv_aes_ks_enc_x1 performs a key expansion of the AES-128 key in
  132. // |key|, writes the expanded key to |out_expanded_key| and encrypts a single
  133. // block from |in| to |out|.
  134. extern void aes128gcmsiv_aes_ks_enc_x1(const uint8_t in[16], uint8_t out[16],
  135. uint8_t out_expanded_key[16 * 15],
  136. const uint64_t key[2]);
  137. // aes256gcmsiv_aes_ks_enc_x1 acts like |aes128gcmsiv_aes_ks_enc_x1|, but for
  138. // AES-256.
  139. extern void aes256gcmsiv_aes_ks_enc_x1(const uint8_t in[16], uint8_t out[16],
  140. uint8_t out_expanded_key[16 * 15],
  141. const uint64_t key[4]);
  142. // aes128gcmsiv_ecb_enc_block encrypts a single block from |in| to |out| using
  143. // the expanded key in |expanded_key|.
  144. extern void aes128gcmsiv_ecb_enc_block(
  145. const uint8_t in[16], uint8_t out[16],
  146. const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
  147. // aes256gcmsiv_ecb_enc_block acts like |aes128gcmsiv_ecb_enc_block|, but for
  148. // AES-256.
  149. extern void aes256gcmsiv_ecb_enc_block(
  150. const uint8_t in[16], uint8_t out[16],
  151. const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
  152. // aes128gcmsiv_enc_msg_x4 encrypts |in_len| bytes from |in| to |out| using the
  153. // expanded key from |key|. (The value of |in_len| must be a multiple of 16.)
  154. // The |in| and |out| buffers may be equal but must not otherwise overlap. The
  155. // initial counter is constructed from the given |tag| as required by
  156. // AES-GCM-SIV.
  157. extern void aes128gcmsiv_enc_msg_x4(const uint8_t *in, uint8_t *out,
  158. const uint8_t *tag,
  159. const struct aead_aes_gcm_siv_asm_ctx *key,
  160. size_t in_len);
  161. // aes256gcmsiv_enc_msg_x4 acts like |aes128gcmsiv_enc_msg_x4|, but for
  162. // AES-256.
  163. extern void aes256gcmsiv_enc_msg_x4(const uint8_t *in, uint8_t *out,
  164. const uint8_t *tag,
  165. const struct aead_aes_gcm_siv_asm_ctx *key,
  166. size_t in_len);
  167. // aes128gcmsiv_enc_msg_x8 acts like |aes128gcmsiv_enc_msg_x4|, but is
  168. // optimised for longer messages.
  169. extern void aes128gcmsiv_enc_msg_x8(const uint8_t *in, uint8_t *out,
  170. const uint8_t *tag,
  171. const struct aead_aes_gcm_siv_asm_ctx *key,
  172. size_t in_len);
  173. // aes256gcmsiv_enc_msg_x8 acts like |aes256gcmsiv_enc_msg_x4|, but is
  174. // optimised for longer messages.
  175. extern void aes256gcmsiv_enc_msg_x8(const uint8_t *in, uint8_t *out,
  176. const uint8_t *tag,
  177. const struct aead_aes_gcm_siv_asm_ctx *key,
  178. size_t in_len);
  179. // gcm_siv_asm_polyval evaluates POLYVAL at |auth_key| on the given plaintext
  180. // and AD. The result is written to |out_tag|.
  181. static void gcm_siv_asm_polyval(uint8_t out_tag[16], const uint8_t *in,
  182. size_t in_len, const uint8_t *ad, size_t ad_len,
  183. const uint8_t auth_key[16],
  184. const uint8_t nonce[12]) {
  185. OPENSSL_memset(out_tag, 0, 16);
  186. const size_t ad_blocks = ad_len / 16;
  187. const size_t in_blocks = in_len / 16;
  188. int htable_init = 0;
  189. alignas(16) uint8_t htable[16*8];
  190. if (ad_blocks > 8 || in_blocks > 8) {
  191. htable_init = 1;
  192. aesgcmsiv_htable_init(htable, auth_key);
  193. }
  194. if (htable_init) {
  195. aesgcmsiv_htable_polyval(htable, ad, ad_len & ~15, out_tag);
  196. } else {
  197. aesgcmsiv_polyval_horner(out_tag, auth_key, ad, ad_blocks);
  198. }
  199. uint8_t scratch[16];
  200. if (ad_len & 15) {
  201. OPENSSL_memset(scratch, 0, sizeof(scratch));
  202. OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
  203. aesgcmsiv_polyval_horner(out_tag, auth_key, scratch, 1);
  204. }
  205. if (htable_init) {
  206. aesgcmsiv_htable_polyval(htable, in, in_len & ~15, out_tag);
  207. } else {
  208. aesgcmsiv_polyval_horner(out_tag, auth_key, in, in_blocks);
  209. }
  210. if (in_len & 15) {
  211. OPENSSL_memset(scratch, 0, sizeof(scratch));
  212. OPENSSL_memcpy(scratch, &in[in_len & ~15], in_len & 15);
  213. aesgcmsiv_polyval_horner(out_tag, auth_key, scratch, 1);
  214. }
  215. union {
  216. uint8_t c[16];
  217. struct {
  218. uint64_t ad;
  219. uint64_t in;
  220. } bitlens;
  221. } length_block;
  222. length_block.bitlens.ad = ad_len * 8;
  223. length_block.bitlens.in = in_len * 8;
  224. aesgcmsiv_polyval_horner(out_tag, auth_key, length_block.c, 1);
  225. for (size_t i = 0; i < 12; i++) {
  226. out_tag[i] ^= nonce[i];
  227. }
  228. out_tag[15] &= 0x7f;
  229. }
  230. // aead_aes_gcm_siv_asm_crypt_last_block handles the encryption/decryption
  231. // (same thing in CTR mode) of the final block of a plaintext/ciphertext. It
  232. // writes |in_len| & 15 bytes to |out| + |in_len|, based on an initial counter
  233. // derived from |tag|.
  234. static void aead_aes_gcm_siv_asm_crypt_last_block(
  235. int is_128_bit, uint8_t *out, const uint8_t *in, size_t in_len,
  236. const uint8_t tag[16],
  237. const struct aead_aes_gcm_siv_asm_ctx *enc_key_expanded) {
  238. alignas(16) union {
  239. uint8_t c[16];
  240. uint32_t u32[4];
  241. } counter;
  242. OPENSSL_memcpy(&counter, tag, sizeof(counter));
  243. counter.c[15] |= 0x80;
  244. counter.u32[0] += in_len / 16;
  245. if (is_128_bit) {
  246. aes128gcmsiv_ecb_enc_block(&counter.c[0], &counter.c[0], enc_key_expanded);
  247. } else {
  248. aes256gcmsiv_ecb_enc_block(&counter.c[0], &counter.c[0], enc_key_expanded);
  249. }
  250. const size_t last_bytes_offset = in_len & ~15;
  251. const size_t last_bytes_len = in_len & 15;
  252. uint8_t *last_bytes_out = &out[last_bytes_offset];
  253. const uint8_t *last_bytes_in = &in[last_bytes_offset];
  254. for (size_t i = 0; i < last_bytes_len; i++) {
  255. last_bytes_out[i] = last_bytes_in[i] ^ counter.c[i];
  256. }
  257. }
  258. // aead_aes_gcm_siv_kdf calculates the record encryption and authentication
  259. // keys given the |nonce|.
  260. static void aead_aes_gcm_siv_kdf(
  261. int is_128_bit, const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx,
  262. uint64_t out_record_auth_key[2], uint64_t out_record_enc_key[4],
  263. const uint8_t nonce[12]) {
  264. alignas(16) uint8_t padded_nonce[16];
  265. OPENSSL_memcpy(padded_nonce, nonce, 12);
  266. alignas(16) uint64_t key_material[12];
  267. if (is_128_bit) {
  268. aes128gcmsiv_kdf(padded_nonce, key_material, &gcm_siv_ctx->key[0]);
  269. out_record_enc_key[0] = key_material[4];
  270. out_record_enc_key[1] = key_material[6];
  271. } else {
  272. aes256gcmsiv_kdf(padded_nonce, key_material, &gcm_siv_ctx->key[0]);
  273. out_record_enc_key[0] = key_material[4];
  274. out_record_enc_key[1] = key_material[6];
  275. out_record_enc_key[2] = key_material[8];
  276. out_record_enc_key[3] = key_material[10];
  277. }
  278. out_record_auth_key[0] = key_material[0];
  279. out_record_auth_key[1] = key_material[2];
  280. }
  281. static int aead_aes_gcm_siv_asm_seal_scatter(
  282. const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
  283. size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
  284. size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
  285. size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
  286. const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = ctx->aead_state;
  287. const uint64_t in_len_64 = in_len;
  288. const uint64_t ad_len_64 = ad_len;
  289. if (in_len_64 > (UINT64_C(1) << 36) ||
  290. ad_len_64 >= (UINT64_C(1) << 61)) {
  291. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  292. return 0;
  293. }
  294. if (max_out_tag_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  295. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  296. return 0;
  297. }
  298. if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
  299. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  300. return 0;
  301. }
  302. alignas(16) uint64_t record_auth_key[2];
  303. alignas(16) uint64_t record_enc_key[4];
  304. aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
  305. record_enc_key, nonce);
  306. alignas(16) uint8_t tag[16] = {0};
  307. gcm_siv_asm_polyval(tag, in, in_len, ad, ad_len,
  308. (const uint8_t *)record_auth_key, nonce);
  309. struct aead_aes_gcm_siv_asm_ctx enc_key_expanded;
  310. if (gcm_siv_ctx->is_128_bit) {
  311. aes128gcmsiv_aes_ks_enc_x1(tag, tag, &enc_key_expanded.key[0],
  312. record_enc_key);
  313. if (in_len < 128) {
  314. aes128gcmsiv_enc_msg_x4(in, out, tag, &enc_key_expanded, in_len & ~15);
  315. } else {
  316. aes128gcmsiv_enc_msg_x8(in, out, tag, &enc_key_expanded, in_len & ~15);
  317. }
  318. } else {
  319. aes256gcmsiv_aes_ks_enc_x1(tag, tag, &enc_key_expanded.key[0],
  320. record_enc_key);
  321. if (in_len < 128) {
  322. aes256gcmsiv_enc_msg_x4(in, out, tag, &enc_key_expanded, in_len & ~15);
  323. } else {
  324. aes256gcmsiv_enc_msg_x8(in, out, tag, &enc_key_expanded, in_len & ~15);
  325. }
  326. }
  327. if (in_len & 15) {
  328. aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit, out, in,
  329. in_len, tag, &enc_key_expanded);
  330. }
  331. OPENSSL_memcpy(out_tag, tag, sizeof(tag));
  332. *out_tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  333. return 1;
  334. }
  335. // TODO(martinkr): Add aead_aes_gcm_siv_asm_open_gather. N.B. aes128gcmsiv_dec
  336. // expects ciphertext and tag in a contiguous buffer.
  337. static int aead_aes_gcm_siv_asm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  338. size_t *out_len, size_t max_out_len,
  339. const uint8_t *nonce, size_t nonce_len,
  340. const uint8_t *in, size_t in_len,
  341. const uint8_t *ad, size_t ad_len) {
  342. const uint64_t ad_len_64 = ad_len;
  343. if (ad_len_64 >= (UINT64_C(1) << 61)) {
  344. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  345. return 0;
  346. }
  347. const uint64_t in_len_64 = in_len;
  348. if (in_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN ||
  349. in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) {
  350. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  351. return 0;
  352. }
  353. const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = ctx->aead_state;
  354. const size_t plaintext_len = in_len - EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  355. const uint8_t *const given_tag = in + plaintext_len;
  356. if (max_out_len < plaintext_len) {
  357. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  358. return 0;
  359. }
  360. alignas(16) uint64_t record_auth_key[2];
  361. alignas(16) uint64_t record_enc_key[4];
  362. aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
  363. record_enc_key, nonce);
  364. struct aead_aes_gcm_siv_asm_ctx expanded_key;
  365. if (gcm_siv_ctx->is_128_bit) {
  366. aes128gcmsiv_aes_ks((const uint8_t *) record_enc_key, &expanded_key.key[0]);
  367. } else {
  368. aes256gcmsiv_aes_ks((const uint8_t *) record_enc_key, &expanded_key.key[0]);
  369. }
  370. // calculated_tag is 16*8 bytes, rather than 16 bytes, because
  371. // aes[128|256]gcmsiv_dec uses the extra as scratch space.
  372. alignas(16) uint8_t calculated_tag[16 * 8] = {0};
  373. OPENSSL_memset(calculated_tag, 0, EVP_AEAD_AES_GCM_SIV_TAG_LEN);
  374. const size_t ad_blocks = ad_len / 16;
  375. aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key, ad,
  376. ad_blocks);
  377. uint8_t scratch[16];
  378. if (ad_len & 15) {
  379. OPENSSL_memset(scratch, 0, sizeof(scratch));
  380. OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
  381. aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
  382. scratch, 1);
  383. }
  384. alignas(16) uint8_t htable[16 * 6];
  385. aesgcmsiv_htable6_init(htable, (const uint8_t *)record_auth_key);
  386. if (gcm_siv_ctx->is_128_bit) {
  387. aes128gcmsiv_dec(in, out, calculated_tag, htable, &expanded_key,
  388. plaintext_len);
  389. } else {
  390. aes256gcmsiv_dec(in, out, calculated_tag, htable, &expanded_key,
  391. plaintext_len);
  392. }
  393. if (plaintext_len & 15) {
  394. aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit, out, in,
  395. plaintext_len, given_tag,
  396. &expanded_key);
  397. OPENSSL_memset(scratch, 0, sizeof(scratch));
  398. OPENSSL_memcpy(scratch, out + (plaintext_len & ~15), plaintext_len & 15);
  399. aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
  400. scratch, 1);
  401. }
  402. union {
  403. uint8_t c[16];
  404. struct {
  405. uint64_t ad;
  406. uint64_t in;
  407. } bitlens;
  408. } length_block;
  409. length_block.bitlens.ad = ad_len * 8;
  410. length_block.bitlens.in = plaintext_len * 8;
  411. aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
  412. length_block.c, 1);
  413. for (size_t i = 0; i < 12; i++) {
  414. calculated_tag[i] ^= nonce[i];
  415. }
  416. calculated_tag[15] &= 0x7f;
  417. if (gcm_siv_ctx->is_128_bit) {
  418. aes128gcmsiv_ecb_enc_block(calculated_tag, calculated_tag, &expanded_key);
  419. } else {
  420. aes256gcmsiv_ecb_enc_block(calculated_tag, calculated_tag, &expanded_key);
  421. }
  422. if (CRYPTO_memcmp(calculated_tag, given_tag, EVP_AEAD_AES_GCM_SIV_TAG_LEN) !=
  423. 0) {
  424. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  425. return 0;
  426. }
  427. *out_len = in_len - EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  428. return 1;
  429. }
  430. static const EVP_AEAD aead_aes_128_gcm_siv_asm = {
  431. 16, // key length
  432. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, // nonce length
  433. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // overhead
  434. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // max tag length
  435. 0, // seal_scatter_supports_extra_in
  436. aead_aes_gcm_siv_asm_init,
  437. NULL /* init_with_direction */,
  438. aead_aes_gcm_siv_asm_cleanup,
  439. aead_aes_gcm_siv_asm_open,
  440. aead_aes_gcm_siv_asm_seal_scatter,
  441. NULL /* open_gather */,
  442. NULL /* get_iv */,
  443. NULL /* tag_len */,
  444. };
  445. static const EVP_AEAD aead_aes_256_gcm_siv_asm = {
  446. 32, // key length
  447. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, // nonce length
  448. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // overhead
  449. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // max tag length
  450. 0, // seal_scatter_supports_extra_in
  451. aead_aes_gcm_siv_asm_init,
  452. NULL /* init_with_direction */,
  453. aead_aes_gcm_siv_asm_cleanup,
  454. aead_aes_gcm_siv_asm_open,
  455. aead_aes_gcm_siv_asm_seal_scatter,
  456. NULL /* open_gather */,
  457. NULL /* get_iv */,
  458. NULL /* tag_len */,
  459. };
  460. #endif // X86_64 && !NO_ASM
  461. struct aead_aes_gcm_siv_ctx {
  462. union {
  463. double align;
  464. AES_KEY ks;
  465. } ks;
  466. block128_f kgk_block;
  467. unsigned is_256:1;
  468. };
  469. static int aead_aes_gcm_siv_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  470. size_t key_len, size_t tag_len) {
  471. const size_t key_bits = key_len * 8;
  472. if (key_bits != 128 && key_bits != 256) {
  473. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  474. return 0; // EVP_AEAD_CTX_init should catch this.
  475. }
  476. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  477. tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  478. }
  479. if (tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  480. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  481. return 0;
  482. }
  483. struct aead_aes_gcm_siv_ctx *gcm_siv_ctx =
  484. OPENSSL_malloc(sizeof(struct aead_aes_gcm_siv_ctx));
  485. if (gcm_siv_ctx == NULL) {
  486. return 0;
  487. }
  488. OPENSSL_memset(gcm_siv_ctx, 0, sizeof(struct aead_aes_gcm_siv_ctx));
  489. aes_ctr_set_key(&gcm_siv_ctx->ks.ks, NULL, &gcm_siv_ctx->kgk_block, key,
  490. key_len);
  491. gcm_siv_ctx->is_256 = (key_len == 32);
  492. ctx->aead_state = gcm_siv_ctx;
  493. ctx->tag_len = tag_len;
  494. return 1;
  495. }
  496. static void aead_aes_gcm_siv_cleanup(EVP_AEAD_CTX *ctx) {
  497. OPENSSL_free(ctx->aead_state);
  498. }
  499. // gcm_siv_crypt encrypts (or decrypts—it's the same thing) |in_len| bytes from
  500. // |in| to |out|, using the block function |enc_block| with |key| in counter
  501. // mode, starting at |initial_counter|. This differs from the traditional
  502. // counter mode code in that the counter is handled little-endian, only the
  503. // first four bytes are used and the GCM-SIV tweak to the final byte is
  504. // applied. The |in| and |out| pointers may be equal but otherwise must not
  505. // alias.
  506. static void gcm_siv_crypt(uint8_t *out, const uint8_t *in, size_t in_len,
  507. const uint8_t initial_counter[AES_BLOCK_SIZE],
  508. block128_f enc_block, const AES_KEY *key) {
  509. union {
  510. uint32_t w[4];
  511. uint8_t c[16];
  512. } counter;
  513. OPENSSL_memcpy(counter.c, initial_counter, AES_BLOCK_SIZE);
  514. counter.c[15] |= 0x80;
  515. for (size_t done = 0; done < in_len;) {
  516. uint8_t keystream[AES_BLOCK_SIZE];
  517. enc_block(counter.c, keystream, key);
  518. counter.w[0]++;
  519. size_t todo = AES_BLOCK_SIZE;
  520. if (in_len - done < todo) {
  521. todo = in_len - done;
  522. }
  523. for (size_t i = 0; i < todo; i++) {
  524. out[done + i] = keystream[i] ^ in[done + i];
  525. }
  526. done += todo;
  527. }
  528. }
  529. // gcm_siv_polyval evaluates POLYVAL at |auth_key| on the given plaintext and
  530. // AD. The result is written to |out_tag|.
  531. static void gcm_siv_polyval(
  532. uint8_t out_tag[16], const uint8_t *in, size_t in_len, const uint8_t *ad,
  533. size_t ad_len, const uint8_t auth_key[16],
  534. const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
  535. struct polyval_ctx polyval_ctx;
  536. CRYPTO_POLYVAL_init(&polyval_ctx, auth_key);
  537. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, ad, ad_len & ~15);
  538. uint8_t scratch[16];
  539. if (ad_len & 15) {
  540. OPENSSL_memset(scratch, 0, sizeof(scratch));
  541. OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
  542. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
  543. }
  544. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, in, in_len & ~15);
  545. if (in_len & 15) {
  546. OPENSSL_memset(scratch, 0, sizeof(scratch));
  547. OPENSSL_memcpy(scratch, &in[in_len & ~15], in_len & 15);
  548. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
  549. }
  550. union {
  551. uint8_t c[16];
  552. struct {
  553. uint64_t ad;
  554. uint64_t in;
  555. } bitlens;
  556. } length_block;
  557. length_block.bitlens.ad = ad_len * 8;
  558. length_block.bitlens.in = in_len * 8;
  559. CRYPTO_POLYVAL_update_blocks(&polyval_ctx, length_block.c,
  560. sizeof(length_block));
  561. CRYPTO_POLYVAL_finish(&polyval_ctx, out_tag);
  562. for (size_t i = 0; i < EVP_AEAD_AES_GCM_SIV_NONCE_LEN; i++) {
  563. out_tag[i] ^= nonce[i];
  564. }
  565. out_tag[15] &= 0x7f;
  566. }
  567. // gcm_siv_record_keys contains the keys used for a specific GCM-SIV record.
  568. struct gcm_siv_record_keys {
  569. uint8_t auth_key[16];
  570. union {
  571. double align;
  572. AES_KEY ks;
  573. } enc_key;
  574. block128_f enc_block;
  575. };
  576. // gcm_siv_keys calculates the keys for a specific GCM-SIV record with the
  577. // given nonce and writes them to |*out_keys|.
  578. static void gcm_siv_keys(
  579. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx,
  580. struct gcm_siv_record_keys *out_keys,
  581. const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
  582. const AES_KEY *const key = &gcm_siv_ctx->ks.ks;
  583. uint8_t key_material[(128 /* POLYVAL key */ + 256 /* max AES key */) / 8];
  584. const size_t blocks_needed = gcm_siv_ctx->is_256 ? 6 : 4;
  585. uint8_t counter[AES_BLOCK_SIZE];
  586. OPENSSL_memset(counter, 0, AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
  587. OPENSSL_memcpy(counter + AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN,
  588. nonce, EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
  589. for (size_t i = 0; i < blocks_needed; i++) {
  590. counter[0] = i;
  591. uint8_t ciphertext[AES_BLOCK_SIZE];
  592. gcm_siv_ctx->kgk_block(counter, ciphertext, key);
  593. OPENSSL_memcpy(&key_material[i * 8], ciphertext, 8);
  594. }
  595. OPENSSL_memcpy(out_keys->auth_key, key_material, 16);
  596. aes_ctr_set_key(&out_keys->enc_key.ks, NULL, &out_keys->enc_block,
  597. key_material + 16, gcm_siv_ctx->is_256 ? 32 : 16);
  598. }
  599. static int aead_aes_gcm_siv_seal_scatter(
  600. const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
  601. size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
  602. size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
  603. size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
  604. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
  605. const uint64_t in_len_64 = in_len;
  606. const uint64_t ad_len_64 = ad_len;
  607. if (in_len + EVP_AEAD_AES_GCM_SIV_TAG_LEN < in_len ||
  608. in_len_64 > (UINT64_C(1) << 36) ||
  609. ad_len_64 >= (UINT64_C(1) << 61)) {
  610. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  611. return 0;
  612. }
  613. if (max_out_tag_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
  614. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  615. return 0;
  616. }
  617. if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
  618. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  619. return 0;
  620. }
  621. struct gcm_siv_record_keys keys;
  622. gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
  623. uint8_t tag[16];
  624. gcm_siv_polyval(tag, in, in_len, ad, ad_len, keys.auth_key, nonce);
  625. keys.enc_block(tag, tag, &keys.enc_key.ks);
  626. gcm_siv_crypt(out, in, in_len, tag, keys.enc_block, &keys.enc_key.ks);
  627. OPENSSL_memcpy(out_tag, tag, EVP_AEAD_AES_GCM_SIV_TAG_LEN);
  628. *out_tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
  629. return 1;
  630. }
  631. static int aead_aes_gcm_siv_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
  632. const uint8_t *nonce, size_t nonce_len,
  633. const uint8_t *in, size_t in_len,
  634. const uint8_t *in_tag,
  635. size_t in_tag_len, const uint8_t *ad,
  636. size_t ad_len) {
  637. const uint64_t ad_len_64 = ad_len;
  638. if (ad_len_64 >= (UINT64_C(1) << 61)) {
  639. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  640. return 0;
  641. }
  642. const uint64_t in_len_64 = in_len;
  643. if (in_tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN ||
  644. in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) {
  645. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  646. return 0;
  647. }
  648. if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
  649. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  650. return 0;
  651. }
  652. const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
  653. struct gcm_siv_record_keys keys;
  654. gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
  655. gcm_siv_crypt(out, in, in_len, in_tag, keys.enc_block, &keys.enc_key.ks);
  656. uint8_t expected_tag[EVP_AEAD_AES_GCM_SIV_TAG_LEN];
  657. gcm_siv_polyval(expected_tag, out, in_len, ad, ad_len, keys.auth_key, nonce);
  658. keys.enc_block(expected_tag, expected_tag, &keys.enc_key.ks);
  659. if (CRYPTO_memcmp(expected_tag, in_tag, sizeof(expected_tag)) != 0) {
  660. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  661. return 0;
  662. }
  663. return 1;
  664. }
  665. static const EVP_AEAD aead_aes_128_gcm_siv = {
  666. 16, // key length
  667. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, // nonce length
  668. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // overhead
  669. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // max tag length
  670. 0, // seal_scatter_supports_extra_in
  671. aead_aes_gcm_siv_init,
  672. NULL /* init_with_direction */,
  673. aead_aes_gcm_siv_cleanup,
  674. NULL /* open */,
  675. aead_aes_gcm_siv_seal_scatter,
  676. aead_aes_gcm_siv_open_gather,
  677. NULL /* get_iv */,
  678. NULL /* tag_len */,
  679. };
  680. static const EVP_AEAD aead_aes_256_gcm_siv = {
  681. 32, // key length
  682. EVP_AEAD_AES_GCM_SIV_NONCE_LEN, // nonce length
  683. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // overhead
  684. EVP_AEAD_AES_GCM_SIV_TAG_LEN, // max tag length
  685. 0, // seal_scatter_supports_extra_in
  686. aead_aes_gcm_siv_init,
  687. NULL /* init_with_direction */,
  688. aead_aes_gcm_siv_cleanup,
  689. NULL /* open */,
  690. aead_aes_gcm_siv_seal_scatter,
  691. aead_aes_gcm_siv_open_gather,
  692. NULL /* get_iv */,
  693. NULL /* tag_len */,
  694. };
  695. #if defined(OPENSSL_X86_64) && !defined(OPENSSL_NO_ASM)
  696. static char avx_aesni_capable(void) {
  697. const uint32_t ecx = OPENSSL_ia32cap_P[1];
  698. return (ecx & (1 << (57 - 32))) != 0 /* AESNI */ &&
  699. (ecx & (1 << 28)) != 0 /* AVX */;
  700. }
  701. const EVP_AEAD *EVP_aead_aes_128_gcm_siv(void) {
  702. if (avx_aesni_capable()) {
  703. return &aead_aes_128_gcm_siv_asm;
  704. }
  705. return &aead_aes_128_gcm_siv;
  706. }
  707. const EVP_AEAD *EVP_aead_aes_256_gcm_siv(void) {
  708. if (avx_aesni_capable()) {
  709. return &aead_aes_256_gcm_siv_asm;
  710. }
  711. return &aead_aes_256_gcm_siv;
  712. }
  713. #else
  714. const EVP_AEAD *EVP_aead_aes_128_gcm_siv(void) {
  715. return &aead_aes_128_gcm_siv;
  716. }
  717. const EVP_AEAD *EVP_aead_aes_256_gcm_siv(void) {
  718. return &aead_aes_256_gcm_siv;
  719. }
  720. #endif // X86_64 && !NO_ASM