1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <string.h>
50 
51 #include <openssl/aead.h>
52 #include <openssl/aes.h>
53 #include <openssl/cipher.h>
54 #include <openssl/cpu.h>
55 #include <openssl/err.h>
56 #include <openssl/mem.h>
57 #include <openssl/obj.h>
58 #include <openssl/rand.h>
59 #include <openssl/sha.h>
60 
61 #include "internal.h"
62 #include "../internal.h"
63 #include "../modes/internal.h"
64 
65 #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
66 #include <openssl/arm_arch.h>
67 #endif
68 
69 
70 typedef struct {
71   union {
72     double align;
73     AES_KEY ks;
74   } ks;
75   block128_f block;
76   union {
77     cbc128_f cbc;
78     ctr128_f ctr;
79   } stream;
80 } EVP_AES_KEY;
81 
82 typedef struct {
83   union {
84     double align;
85     AES_KEY ks;
86   } ks;        /* AES key schedule to use */
87   int key_set; /* Set if key initialised */
88   int iv_set;  /* Set if an iv is set */
89   GCM128_CONTEXT gcm;
90   uint8_t *iv; /* Temporary IV store */
91   int ivlen;         /* IV length */
92   int taglen;
93   int iv_gen;      /* It is OK to generate IVs */
94   ctr128_f ctr;
95 } EVP_AES_GCM_CTX;
96 
97 #if !defined(OPENSSL_NO_ASM) && \
98     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
99 #define VPAES
vpaes_capable(void)100 static char vpaes_capable(void) {
101   return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
102 }
103 
104 #if defined(OPENSSL_X86_64)
105 #define BSAES
bsaes_capable(void)106 static char bsaes_capable(void) {
107   return vpaes_capable();
108 }
109 #endif
110 
111 #elif !defined(OPENSSL_NO_ASM) && \
112     (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
113 
114 #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
115 #define BSAES
bsaes_capable(void)116 static char bsaes_capable(void) {
117   return CRYPTO_is_NEON_capable();
118 }
119 #endif
120 
121 #define HWAES
hwaes_capable(void)122 static int hwaes_capable(void) {
123   return CRYPTO_is_ARMv8_AES_capable();
124 }
125 
126 int aes_v8_set_encrypt_key(const uint8_t *user_key, const int bits,
127                            AES_KEY *key);
128 int aes_v8_set_decrypt_key(const uint8_t *user_key, const int bits,
129                            AES_KEY *key);
130 void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
131 void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
132 void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
133                         const AES_KEY *key, uint8_t *ivec, const int enc);
134 void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
135                                  const AES_KEY *key, const uint8_t ivec[16]);
136 
137 #endif  /* OPENSSL_ARM */
138 
139 #if defined(BSAES)
140 /* On platforms where BSAES gets defined (just above), then these functions are
141  * provided by asm. */
142 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
143                        const AES_KEY *key, uint8_t ivec[16], int enc);
144 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
145                                 const AES_KEY *key, const uint8_t ivec[16]);
146 #else
bsaes_capable(void)147 static char bsaes_capable(void) {
148   return 0;
149 }
150 
151 /* On other platforms, bsaes_capable() will always return false and so the
152  * following will never be called. */
bsaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t ivec[16],int enc)153 static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
154                               const AES_KEY *key, uint8_t ivec[16], int enc) {
155   abort();
156 }
157 
bsaes_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t len,const AES_KEY * key,const uint8_t ivec[16])158 static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
159                                        size_t len, const AES_KEY *key,
160                                        const uint8_t ivec[16]) {
161   abort();
162 }
163 #endif
164 
165 #if defined(VPAES)
166 /* On platforms where VPAES gets defined (just above), then these functions are
167  * provided by asm. */
168 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
169 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
170 
171 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
172 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
173 
174 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
175                        const AES_KEY *key, uint8_t *ivec, int enc);
176 #else
vpaes_capable(void)177 static char vpaes_capable(void) {
178   return 0;
179 }
180 
181 /* On other platforms, vpaes_capable() will always return false and so the
182  * following will never be called. */
vpaes_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)183 static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
184                                  AES_KEY *key) {
185   abort();
186 }
vpaes_set_decrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)187 static int vpaes_set_decrypt_key(const uint8_t *userKey, int bits,
188                                  AES_KEY *key) {
189   abort();
190 }
vpaes_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)191 static void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
192   abort();
193 }
vpaes_decrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)194 static void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
195   abort();
196 }
vpaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t * ivec,int enc)197 static void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
198                               const AES_KEY *key, uint8_t *ivec, int enc) {
199   abort();
200 }
201 #endif
202 
203 #if !defined(HWAES)
204 /* If HWAES isn't defined then we provide dummy functions for each of the hwaes
205  * functions. */
hwaes_capable(void)206 static int hwaes_capable(void) {
207   return 0;
208 }
209 
aes_v8_set_encrypt_key(const uint8_t * user_key,int bits,AES_KEY * key)210 static int aes_v8_set_encrypt_key(const uint8_t *user_key, int bits,
211                                   AES_KEY *key) {
212   abort();
213 }
214 
aes_v8_set_decrypt_key(const uint8_t * user_key,int bits,AES_KEY * key)215 static int aes_v8_set_decrypt_key(const uint8_t *user_key, int bits,
216                                   AES_KEY *key) {
217   abort();
218 }
219 
aes_v8_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)220 static void aes_v8_encrypt(const uint8_t *in, uint8_t *out,
221                            const AES_KEY *key) {
222   abort();
223 }
224 
aes_v8_decrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)225 static void aes_v8_decrypt(const uint8_t *in, uint8_t *out,
226                            const AES_KEY *key) {
227   abort();
228 }
229 
aes_v8_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t * ivec,int enc)230 static void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
231                                const AES_KEY *key, uint8_t *ivec, int enc) {
232   abort();
233 }
234 
aes_v8_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t len,const AES_KEY * key,const uint8_t ivec[16])235 static void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
236                                         size_t len, const AES_KEY *key,
237                                         const uint8_t ivec[16]) {
238   abort();
239 }
240 #endif
241 
242 #if !defined(OPENSSL_NO_ASM) && \
243     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
244 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
245 int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
246 
247 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
248 void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
249 
250 void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
251                        const AES_KEY *key, int enc);
252 void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
253                        const AES_KEY *key, uint8_t *ivec, int enc);
254 
255 void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
256                                 const void *key, const uint8_t *ivec);
257 
258 #if defined(OPENSSL_X86_64)
259 size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
260                          const void *key, uint8_t ivec[16], uint64_t *Xi);
261 #define AES_gcm_encrypt aesni_gcm_encrypt
262 size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
263                          const void *key, uint8_t ivec[16], uint64_t *Xi);
264 #define AES_gcm_decrypt aesni_gcm_decrypt
265 void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
266                    size_t len);
267 #define AES_GCM_ASM(gctx) \
268   (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
269 #endif  /* OPENSSL_X86_64 */
270 
271 #else
272 
273 /* On other platforms, aesni_capable() will always return false and so the
274  * following will never be called. */
aesni_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)275 static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
276   abort();
277 }
aesni_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)278 static int aesni_set_encrypt_key(const uint8_t *userKey, int bits,
279                                  AES_KEY *key) {
280   abort();
281 }
aesni_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t blocks,const void * key,const uint8_t * ivec)282 static void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
283                                        size_t blocks, const void *key,
284                                        const uint8_t *ivec) {
285   abort();
286 }
287 
288 #endif
289 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)290 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
291                         const uint8_t *iv, int enc)
292                         OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
293   int ret, mode;
294   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
295 
296   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
297   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
298     if (hwaes_capable()) {
299       ret = aes_v8_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
300       dat->block = (block128_f)aes_v8_decrypt;
301       dat->stream.cbc = NULL;
302       if (mode == EVP_CIPH_CBC_MODE) {
303         dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
304       }
305     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
306       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
307       dat->block = (block128_f)AES_decrypt;
308       dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
309     } else if (vpaes_capable()) {
310       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
311       dat->block = (block128_f)vpaes_decrypt;
312       dat->stream.cbc =
313           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
314     } else {
315       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
316       dat->block = (block128_f)AES_decrypt;
317       dat->stream.cbc =
318           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
319     }
320   } else if (hwaes_capable()) {
321     ret = aes_v8_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
322     dat->block = (block128_f)aes_v8_encrypt;
323     dat->stream.cbc = NULL;
324     if (mode == EVP_CIPH_CBC_MODE) {
325       dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
326     } else if (mode == EVP_CIPH_CTR_MODE) {
327       dat->stream.ctr = (ctr128_f)aes_v8_ctr32_encrypt_blocks;
328     }
329   } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
330     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
331     dat->block = (block128_f)AES_encrypt;
332     dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
333   } else if (vpaes_capable()) {
334     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
335     dat->block = (block128_f)vpaes_encrypt;
336     dat->stream.cbc =
337         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
338   } else {
339     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
340     dat->block = (block128_f)AES_encrypt;
341     dat->stream.cbc =
342         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
343   }
344 
345   if (ret < 0) {
346     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
347     return 0;
348   }
349 
350   return 1;
351 }
352 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)353 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
354                           size_t len) {
355   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
356 
357   if (dat->stream.cbc) {
358     (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
359   } else if (ctx->encrypt) {
360     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
361   } else {
362     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
363   }
364 
365   return 1;
366 }
367 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)368 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
369                           size_t len) {
370   size_t bl = ctx->cipher->block_size;
371   size_t i;
372   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
373 
374   if (len < bl) {
375     return 1;
376   }
377 
378   for (i = 0, len -= bl; i <= len; i += bl) {
379     (*dat->block)(in + i, out + i, &dat->ks);
380   }
381 
382   return 1;
383 }
384 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)385 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
386                           size_t len) {
387   unsigned int num = ctx->num;
388   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
389 
390   if (dat->stream.ctr) {
391     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
392                                 dat->stream.ctr);
393   } else {
394     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
395                           dat->block);
396   }
397   ctx->num = (size_t)num;
398   return 1;
399 }
400 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)401 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
402                           size_t len) {
403   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
404 
405   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
406   return 1;
407 }
408 
409 static char aesni_capable(void);
410 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_CONTEXT * gcm_ctx,block128_f * out_block,const uint8_t * key,size_t key_len)411 static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
412                                 block128_f *out_block, const uint8_t *key,
413                                 size_t key_len)
414                                 OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
415   if (aesni_capable()) {
416     aesni_set_encrypt_key(key, key_len * 8, aes_key);
417     if (gcm_ctx != NULL) {
418       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt);
419     }
420     if (out_block) {
421       *out_block = (block128_f) aesni_encrypt;
422     }
423     return (ctr128_f)aesni_ctr32_encrypt_blocks;
424   }
425 
426   if (hwaes_capable()) {
427     aes_v8_set_encrypt_key(key, key_len * 8, aes_key);
428     if (gcm_ctx != NULL) {
429       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_v8_encrypt);
430     }
431     if (out_block) {
432       *out_block = (block128_f) aes_v8_encrypt;
433     }
434     return (ctr128_f)aes_v8_ctr32_encrypt_blocks;
435   }
436 
437   if (bsaes_capable()) {
438     AES_set_encrypt_key(key, key_len * 8, aes_key);
439     if (gcm_ctx != NULL) {
440       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
441     }
442     if (out_block) {
443       *out_block = (block128_f) AES_encrypt;
444     }
445     return (ctr128_f)bsaes_ctr32_encrypt_blocks;
446   }
447 
448   if (vpaes_capable()) {
449     vpaes_set_encrypt_key(key, key_len * 8, aes_key);
450     if (out_block) {
451       *out_block = (block128_f) vpaes_encrypt;
452     }
453     if (gcm_ctx != NULL) {
454       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
455     }
456     return NULL;
457   }
458 
459   AES_set_encrypt_key(key, key_len * 8, aes_key);
460   if (gcm_ctx != NULL) {
461     CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
462   }
463   if (out_block) {
464     *out_block = (block128_f) AES_encrypt;
465   }
466   return NULL;
467 }
468 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)469 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
470                             const uint8_t *iv, int enc) {
471   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
472   if (!iv && !key) {
473     return 1;
474   }
475   if (key) {
476     gctx->ctr =
477         aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
478     /* If we have an iv can set it directly, otherwise use saved IV. */
479     if (iv == NULL && gctx->iv_set) {
480       iv = gctx->iv;
481     }
482     if (iv) {
483       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
484       gctx->iv_set = 1;
485     }
486     gctx->key_set = 1;
487   } else {
488     /* If key set use IV, otherwise copy */
489     if (gctx->key_set) {
490       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
491     } else {
492       memcpy(gctx->iv, iv, gctx->ivlen);
493     }
494     gctx->iv_set = 1;
495     gctx->iv_gen = 0;
496   }
497   return 1;
498 }
499 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)500 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
501   EVP_AES_GCM_CTX *gctx = c->cipher_data;
502   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
503   if (gctx->iv != c->iv) {
504     OPENSSL_free(gctx->iv);
505   }
506 }
507 
508 /* increment counter (64-bit int) by 1 */
ctr64_inc(uint8_t * counter)509 static void ctr64_inc(uint8_t *counter) {
510   int n = 8;
511   uint8_t c;
512 
513   do {
514     --n;
515     c = counter[n];
516     ++c;
517     counter[n] = c;
518     if (c) {
519       return;
520     }
521   } while (n);
522 }
523 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)524 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
525   EVP_AES_GCM_CTX *gctx = c->cipher_data;
526   switch (type) {
527     case EVP_CTRL_INIT:
528       gctx->key_set = 0;
529       gctx->iv_set = 0;
530       gctx->ivlen = c->cipher->iv_len;
531       gctx->iv = c->iv;
532       gctx->taglen = -1;
533       gctx->iv_gen = 0;
534       return 1;
535 
536     case EVP_CTRL_GCM_SET_IVLEN:
537       if (arg <= 0) {
538         return 0;
539       }
540 
541       /* Allocate memory for IV if needed */
542       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
543         if (gctx->iv != c->iv) {
544           OPENSSL_free(gctx->iv);
545         }
546         gctx->iv = OPENSSL_malloc(arg);
547         if (!gctx->iv) {
548           return 0;
549         }
550       }
551       gctx->ivlen = arg;
552       return 1;
553 
554     case EVP_CTRL_GCM_SET_TAG:
555       if (arg <= 0 || arg > 16 || c->encrypt) {
556         return 0;
557       }
558       memcpy(c->buf, ptr, arg);
559       gctx->taglen = arg;
560       return 1;
561 
562     case EVP_CTRL_GCM_GET_TAG:
563       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
564         return 0;
565       }
566       memcpy(ptr, c->buf, arg);
567       return 1;
568 
569     case EVP_CTRL_GCM_SET_IV_FIXED:
570       /* Special case: -1 length restores whole IV */
571       if (arg == -1) {
572         memcpy(gctx->iv, ptr, gctx->ivlen);
573         gctx->iv_gen = 1;
574         return 1;
575       }
576       /* Fixed field must be at least 4 bytes and invocation field
577        * at least 8. */
578       if (arg < 4 || (gctx->ivlen - arg) < 8) {
579         return 0;
580       }
581       if (arg) {
582         memcpy(gctx->iv, ptr, arg);
583       }
584       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
585         return 0;
586       }
587       gctx->iv_gen = 1;
588       return 1;
589 
590     case EVP_CTRL_GCM_IV_GEN:
591       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
592         return 0;
593       }
594       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
595       if (arg <= 0 || arg > gctx->ivlen) {
596         arg = gctx->ivlen;
597       }
598       memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
599       /* Invocation field will be at least 8 bytes in size and
600        * so no need to check wrap around or increment more than
601        * last 8 bytes. */
602       ctr64_inc(gctx->iv + gctx->ivlen - 8);
603       gctx->iv_set = 1;
604       return 1;
605 
606     case EVP_CTRL_GCM_SET_IV_INV:
607       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
608         return 0;
609       }
610       memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
611       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
612       gctx->iv_set = 1;
613       return 1;
614 
615     case EVP_CTRL_COPY: {
616       EVP_CIPHER_CTX *out = ptr;
617       EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
618       if (gctx->iv == c->iv) {
619         gctx_out->iv = out->iv;
620       } else {
621         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
622         if (!gctx_out->iv) {
623           return 0;
624         }
625         memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
626       }
627       return 1;
628     }
629 
630     default:
631       return -1;
632   }
633 }
634 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)635 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
636                           size_t len) {
637   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
638 
639   /* If not set up, return error */
640   if (!gctx->key_set) {
641     return -1;
642   }
643   if (!gctx->iv_set) {
644     return -1;
645   }
646 
647   if (in) {
648     if (out == NULL) {
649       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
650         return -1;
651       }
652     } else if (ctx->encrypt) {
653       if (gctx->ctr) {
654         size_t bulk = 0;
655 #if defined(AES_GCM_ASM)
656         if (len >= 32 && AES_GCM_ASM(gctx)) {
657           size_t res = (16 - gctx->gcm.mres) % 16;
658 
659           if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, res)) {
660             return -1;
661           }
662 
663           bulk = AES_gcm_encrypt(in + res, out + res, len - res, &gctx->ks.ks,
664                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
665           gctx->gcm.len.u[1] += bulk;
666           bulk += res;
667         }
668 #endif
669         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in + bulk,
670                                          out + bulk, len - bulk, gctx->ctr)) {
671           return -1;
672         }
673       } else {
674         size_t bulk = 0;
675         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in + bulk,
676                                    out + bulk, len - bulk)) {
677           return -1;
678         }
679       }
680     } else {
681       if (gctx->ctr) {
682         size_t bulk = 0;
683 #if defined(AES_GCM_ASM)
684         if (len >= 16 && AES_GCM_ASM(gctx)) {
685           size_t res = (16 - gctx->gcm.mres) % 16;
686 
687           if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, res)) {
688             return -1;
689           }
690 
691           bulk = AES_gcm_decrypt(in + res, out + res, len - res, &gctx->ks.ks,
692                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
693           gctx->gcm.len.u[1] += bulk;
694           bulk += res;
695         }
696 #endif
697         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in + bulk,
698                                          out + bulk, len - bulk, gctx->ctr)) {
699           return -1;
700         }
701       } else {
702         size_t bulk = 0;
703         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in + bulk,
704                                    out + bulk, len - bulk)) {
705           return -1;
706         }
707       }
708     }
709     return len;
710   } else {
711     if (!ctx->encrypt) {
712       if (gctx->taglen < 0 ||
713           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
714         return -1;
715       }
716       gctx->iv_set = 0;
717       return 0;
718     }
719     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
720     gctx->taglen = 16;
721     /* Don't reuse the IV */
722     gctx->iv_set = 0;
723     return 0;
724   }
725 }
726 
727 static const EVP_CIPHER aes_128_cbc = {
728     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
729     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
730     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
731     NULL /* cleanup */,  NULL /* ctrl */};
732 
733 static const EVP_CIPHER aes_128_ctr = {
734     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
735     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
736     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
737     NULL /* cleanup */,  NULL /* ctrl */};
738 
739 static const EVP_CIPHER aes_128_ecb = {
740     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
741     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
742     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
743     NULL /* cleanup */,  NULL /* ctrl */};
744 
745 static const EVP_CIPHER aes_128_ofb = {
746     NID_aes_128_ofb128,  1 /* block_size */,  16 /* key_size */,
747     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
748     NULL /* app_data */, aes_init_key,        aes_ofb_cipher,
749     NULL /* cleanup */,  NULL /* ctrl */};
750 
751 static const EVP_CIPHER aes_128_gcm = {
752     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
753     sizeof(EVP_AES_GCM_CTX),
754     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
755         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
756         EVP_CIPH_FLAG_AEAD_CIPHER,
757     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
758     aes_gcm_ctrl};
759 
760 
761 static const EVP_CIPHER aes_192_cbc = {
762     NID_aes_192_cbc,     16 /* block_size */, 24 /* key_size */,
763     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
764     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
765     NULL /* cleanup */,  NULL /* ctrl */};
766 
767 static const EVP_CIPHER aes_192_ctr = {
768     NID_aes_192_ctr,     1 /* block_size */,  24 /* key_size */,
769     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
770     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
771     NULL /* cleanup */,  NULL /* ctrl */};
772 
773 static const EVP_CIPHER aes_192_ecb = {
774     NID_aes_192_ecb,     16 /* block_size */, 24 /* key_size */,
775     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
776     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
777     NULL /* cleanup */,  NULL /* ctrl */};
778 
779 static const EVP_CIPHER aes_192_gcm = {
780     NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
781     sizeof(EVP_AES_GCM_CTX),
782     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
783         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
784         EVP_CIPH_FLAG_AEAD_CIPHER,
785     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
786     aes_gcm_ctrl};
787 
788 
789 static const EVP_CIPHER aes_256_cbc = {
790     NID_aes_256_cbc,     16 /* block_size */, 32 /* key_size */,
791     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
792     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
793     NULL /* cleanup */,  NULL /* ctrl */};
794 
795 static const EVP_CIPHER aes_256_ctr = {
796     NID_aes_256_ctr,     1 /* block_size */,  32 /* key_size */,
797     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
798     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
799     NULL /* cleanup */,  NULL /* ctrl */};
800 
801 static const EVP_CIPHER aes_256_ecb = {
802     NID_aes_256_ecb,     16 /* block_size */, 32 /* key_size */,
803     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
804     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
805     NULL /* cleanup */,  NULL /* ctrl */};
806 
807 static const EVP_CIPHER aes_256_ofb = {
808     NID_aes_256_ofb128,  1 /* block_size */,  32 /* key_size */,
809     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
810     NULL /* app_data */, aes_init_key,        aes_ofb_cipher,
811     NULL /* cleanup */,  NULL /* ctrl */};
812 
813 static const EVP_CIPHER aes_256_gcm = {
814     NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
815     sizeof(EVP_AES_GCM_CTX),
816     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
817         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
818         EVP_CIPH_FLAG_AEAD_CIPHER,
819     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
820     aes_gcm_ctrl};
821 
822 #if !defined(OPENSSL_NO_ASM) && \
823     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
824 
825 /* AES-NI section. */
826 
aesni_capable(void)827 static char aesni_capable(void) {
828   return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
829 }
830 
aesni_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)831 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
832                           const uint8_t *iv, int enc) {
833   int ret, mode;
834   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
835 
836   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
837   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
838     ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
839     dat->block = (block128_f)aesni_decrypt;
840     dat->stream.cbc =
841         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
842   } else {
843     ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
844     dat->block = (block128_f)aesni_encrypt;
845     if (mode == EVP_CIPH_CBC_MODE) {
846       dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
847     } else if (mode == EVP_CIPH_CTR_MODE) {
848       dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
849     } else {
850       dat->stream.cbc = NULL;
851     }
852   }
853 
854   if (ret < 0) {
855     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
856     return 0;
857   }
858 
859   return 1;
860 }
861 
aesni_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)862 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
863                             const uint8_t *in, size_t len) {
864   aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
865 
866   return 1;
867 }
868 
aesni_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)869 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
870                             const uint8_t *in, size_t len) {
871   size_t bl = ctx->cipher->block_size;
872 
873   if (len < bl) {
874     return 1;
875   }
876 
877   aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
878 
879   return 1;
880 }
881 
aesni_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)882 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
883                               const uint8_t *iv, int enc) {
884   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
885   if (!iv && !key) {
886     return 1;
887   }
888   if (key) {
889     aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
890     CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
891     gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
892     /* If we have an iv can set it directly, otherwise use
893      * saved IV. */
894     if (iv == NULL && gctx->iv_set) {
895       iv = gctx->iv;
896     }
897     if (iv) {
898       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
899       gctx->iv_set = 1;
900     }
901     gctx->key_set = 1;
902   } else {
903     /* If key set use IV, otherwise copy */
904     if (gctx->key_set) {
905       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
906     } else {
907       memcpy(gctx->iv, iv, gctx->ivlen);
908     }
909     gctx->iv_set = 1;
910     gctx->iv_gen = 0;
911   }
912   return 1;
913 }
914 
915 static const EVP_CIPHER aesni_128_cbc = {
916     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
917     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
918     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
919     NULL /* cleanup */,  NULL /* ctrl */};
920 
921 static const EVP_CIPHER aesni_128_ctr = {
922     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
923     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
924     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
925     NULL /* cleanup */,  NULL /* ctrl */};
926 
927 static const EVP_CIPHER aesni_128_ecb = {
928     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
929     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
930     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
931     NULL /* cleanup */,  NULL /* ctrl */};
932 
933 static const EVP_CIPHER aesni_128_ofb = {
934     NID_aes_128_ofb128,  1 /* block_size */,  16 /* key_size */,
935     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
936     NULL /* app_data */, aesni_init_key,      aes_ofb_cipher,
937     NULL /* cleanup */,  NULL /* ctrl */};
938 
939 static const EVP_CIPHER aesni_128_gcm = {
940     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
941     sizeof(EVP_AES_GCM_CTX),
942     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
943         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
944         EVP_CIPH_FLAG_AEAD_CIPHER,
945     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
946     aes_gcm_ctrl};
947 
948 
949 static const EVP_CIPHER aesni_192_cbc = {
950     NID_aes_192_cbc,     16 /* block_size */, 24 /* key_size */,
951     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
952     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
953     NULL /* cleanup */,  NULL /* ctrl */};
954 
955 static const EVP_CIPHER aesni_192_ctr = {
956     NID_aes_192_ctr,     1 /* block_size */,  24 /* key_size */,
957     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
958     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
959     NULL /* cleanup */,  NULL /* ctrl */};
960 
961 static const EVP_CIPHER aesni_192_ecb = {
962     NID_aes_192_ecb,     16 /* block_size */, 24 /* key_size */,
963     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
964     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
965     NULL /* cleanup */,  NULL /* ctrl */};
966 
967 static const EVP_CIPHER aesni_192_gcm = {
968     NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
969     sizeof(EVP_AES_GCM_CTX),
970     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
971         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
972         EVP_CIPH_FLAG_AEAD_CIPHER,
973     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
974     aes_gcm_ctrl};
975 
976 
977 static const EVP_CIPHER aesni_256_cbc = {
978     NID_aes_256_cbc,     16 /* block_size */, 32 /* key_size */,
979     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
980     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
981     NULL /* cleanup */,  NULL /* ctrl */};
982 
983 static const EVP_CIPHER aesni_256_ctr = {
984     NID_aes_256_ctr,     1 /* block_size */,  32 /* key_size */,
985     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
986     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
987     NULL /* cleanup */,  NULL /* ctrl */};
988 
989 static const EVP_CIPHER aesni_256_ecb = {
990     NID_aes_256_ecb,     16 /* block_size */, 32 /* key_size */,
991     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
992     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
993     NULL /* cleanup */,  NULL /* ctrl */};
994 
995 static const EVP_CIPHER aesni_256_ofb = {
996     NID_aes_256_ofb128,  1 /* block_size */,  32 /* key_size */,
997     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
998     NULL /* app_data */, aesni_init_key,      aes_ofb_cipher,
999     NULL /* cleanup */,  NULL /* ctrl */};
1000 
1001 static const EVP_CIPHER aesni_256_gcm = {
1002     NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
1003     sizeof(EVP_AES_GCM_CTX),
1004     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
1005         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
1006         EVP_CIPH_FLAG_AEAD_CIPHER,
1007     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
1008     aes_gcm_ctrl};
1009 
1010 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1011   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1012     if (aesni_capable()) {                             \
1013       return &aesni_##keybits##_##mode;                \
1014     } else {                                           \
1015       return &aes_##keybits##_##mode;                  \
1016     }                                                  \
1017   }
1018 
1019 #else  /* ^^^  OPENSSL_X86_64 || OPENSSL_X86 */
1020 
aesni_capable(void)1021 static char aesni_capable(void) {
1022   return 0;
1023 }
1024 
1025 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1026   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1027     return &aes_##keybits##_##mode;                    \
1028   }
1029 
1030 #endif
1031 
1032 EVP_CIPHER_FUNCTION(128, cbc)
1033 EVP_CIPHER_FUNCTION(128, ctr)
1034 EVP_CIPHER_FUNCTION(128, ecb)
1035 EVP_CIPHER_FUNCTION(128, ofb)
1036 EVP_CIPHER_FUNCTION(128, gcm)
1037 
1038 EVP_CIPHER_FUNCTION(192, cbc)
1039 EVP_CIPHER_FUNCTION(192, ctr)
1040 EVP_CIPHER_FUNCTION(192, ecb)
1041 EVP_CIPHER_FUNCTION(192, gcm)
1042 
1043 EVP_CIPHER_FUNCTION(256, cbc)
1044 EVP_CIPHER_FUNCTION(256, ctr)
1045 EVP_CIPHER_FUNCTION(256, ecb)
1046 EVP_CIPHER_FUNCTION(256, ofb)
1047 EVP_CIPHER_FUNCTION(256, gcm)
1048 
1049 
1050 #define EVP_AEAD_AES_GCM_TAG_LEN 16
1051 
1052 struct aead_aes_gcm_ctx {
1053   union {
1054     double align;
1055     AES_KEY ks;
1056   } ks;
1057   GCM128_CONTEXT gcm;
1058   ctr128_f ctr;
1059   uint8_t tag_len;
1060 };
1061 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1062 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1063                              size_t key_len, size_t tag_len) {
1064   struct aead_aes_gcm_ctx *gcm_ctx;
1065   const size_t key_bits = key_len * 8;
1066 
1067   if (key_bits != 128 && key_bits != 256) {
1068     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1069     return 0; /* EVP_AEAD_CTX_init should catch this. */
1070   }
1071 
1072   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1073     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1074   }
1075 
1076   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1077     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
1078     return 0;
1079   }
1080 
1081   gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1082   if (gcm_ctx == NULL) {
1083     return 0;
1084   }
1085 
1086   gcm_ctx->ctr =
1087       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
1088   gcm_ctx->tag_len = tag_len;
1089   ctx->aead_state = gcm_ctx;
1090 
1091   return 1;
1092 }
1093 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)1094 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
1095   struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1096   OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
1097   OPENSSL_free(gcm_ctx);
1098 }
1099 
aead_aes_gcm_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1100 static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1101                              size_t *out_len, size_t max_out_len,
1102                              const uint8_t *nonce, size_t nonce_len,
1103                              const uint8_t *in, size_t in_len,
1104                              const uint8_t *ad, size_t ad_len) {
1105   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1106   GCM128_CONTEXT gcm;
1107 
1108   if (in_len + gcm_ctx->tag_len < in_len) {
1109     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1110     return 0;
1111   }
1112 
1113   if (max_out_len < in_len + gcm_ctx->tag_len) {
1114     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1115     return 0;
1116   }
1117 
1118   const AES_KEY *key = &gcm_ctx->ks.ks;
1119 
1120   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1121   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1122 
1123   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1124     return 0;
1125   }
1126 
1127   if (gcm_ctx->ctr) {
1128     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
1129                                      gcm_ctx->ctr)) {
1130       return 0;
1131     }
1132   } else {
1133     if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
1134       return 0;
1135     }
1136   }
1137 
1138   CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1139   *out_len = in_len + gcm_ctx->tag_len;
1140   return 1;
1141 }
1142 
aead_aes_gcm_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1143 static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1144                              size_t *out_len, size_t max_out_len,
1145                              const uint8_t *nonce, size_t nonce_len,
1146                              const uint8_t *in, size_t in_len,
1147                              const uint8_t *ad, size_t ad_len) {
1148   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1149   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1150   size_t plaintext_len;
1151   GCM128_CONTEXT gcm;
1152 
1153   if (in_len < gcm_ctx->tag_len) {
1154     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1155     return 0;
1156   }
1157 
1158   plaintext_len = in_len - gcm_ctx->tag_len;
1159 
1160   if (max_out_len < plaintext_len) {
1161     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1162     return 0;
1163   }
1164 
1165   const AES_KEY *key = &gcm_ctx->ks.ks;
1166 
1167   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1168   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1169 
1170   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1171     return 0;
1172   }
1173 
1174   if (gcm_ctx->ctr) {
1175     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out,
1176                                      in_len - gcm_ctx->tag_len, gcm_ctx->ctr)) {
1177       return 0;
1178     }
1179   } else {
1180     if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len - gcm_ctx->tag_len)) {
1181       return 0;
1182     }
1183   }
1184 
1185   CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1186   if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
1187     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1188     return 0;
1189   }
1190 
1191   *out_len = plaintext_len;
1192   return 1;
1193 }
1194 
1195 static const EVP_AEAD aead_aes_128_gcm = {
1196     16,                       /* key len */
1197     12,                       /* nonce len */
1198     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1199     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
1200     aead_aes_gcm_init,
1201     NULL, /* init_with_direction */
1202     aead_aes_gcm_cleanup,
1203     aead_aes_gcm_seal,
1204     aead_aes_gcm_open,
1205     NULL,                     /* get_rc4_state */
1206     NULL,                     /* get_iv */
1207 };
1208 
1209 static const EVP_AEAD aead_aes_256_gcm = {
1210     32,                       /* key len */
1211     12,                       /* nonce len */
1212     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1213     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
1214     aead_aes_gcm_init,
1215     NULL, /* init_with_direction */
1216     aead_aes_gcm_cleanup,
1217     aead_aes_gcm_seal,
1218     aead_aes_gcm_open,
1219     NULL,                     /* get_rc4_state */
1220     NULL,                     /* get_iv */
1221 };
1222 
EVP_aead_aes_128_gcm(void)1223 const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
1224 
EVP_aead_aes_256_gcm(void)1225 const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
1226 
1227 
1228 /* AES Key Wrap is specified in
1229  * http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
1230  * or https://tools.ietf.org/html/rfc3394 */
1231 
1232 struct aead_aes_key_wrap_ctx {
1233   uint8_t key[32];
1234   unsigned key_bits;
1235 };
1236 
aead_aes_key_wrap_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1237 static int aead_aes_key_wrap_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1238                                   size_t key_len, size_t tag_len) {
1239   struct aead_aes_key_wrap_ctx *kw_ctx;
1240   const size_t key_bits = key_len * 8;
1241 
1242   if (key_bits != 128 && key_bits != 256) {
1243     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1244     return 0; /* EVP_AEAD_CTX_init should catch this. */
1245   }
1246 
1247   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1248     tag_len = 8;
1249   }
1250 
1251   if (tag_len != 8) {
1252     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_TAG_SIZE);
1253     return 0;
1254   }
1255 
1256   kw_ctx = OPENSSL_malloc(sizeof(struct aead_aes_key_wrap_ctx));
1257   if (kw_ctx == NULL) {
1258     OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
1259     return 0;
1260   }
1261 
1262   memcpy(kw_ctx->key, key, key_len);
1263   kw_ctx->key_bits = key_bits;
1264 
1265   ctx->aead_state = kw_ctx;
1266   return 1;
1267 }
1268 
aead_aes_key_wrap_cleanup(EVP_AEAD_CTX * ctx)1269 static void aead_aes_key_wrap_cleanup(EVP_AEAD_CTX *ctx) {
1270   struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1271   OPENSSL_cleanse(kw_ctx, sizeof(struct aead_aes_key_wrap_ctx));
1272   OPENSSL_free(kw_ctx);
1273 }
1274 
1275 /* kDefaultAESKeyWrapNonce is the default nonce value given in 2.2.3.1. */
1276 static const uint8_t kDefaultAESKeyWrapNonce[8] = {0xa6, 0xa6, 0xa6, 0xa6,
1277                                                    0xa6, 0xa6, 0xa6, 0xa6};
1278 
1279 
aead_aes_key_wrap_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1280 static int aead_aes_key_wrap_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1281                                   size_t *out_len, size_t max_out_len,
1282                                   const uint8_t *nonce, size_t nonce_len,
1283                                   const uint8_t *in, size_t in_len,
1284                                   const uint8_t *ad, size_t ad_len) {
1285   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1286   union {
1287     double align;
1288     AES_KEY ks;
1289   } ks;
1290   /* Variables in this function match up with the variables in the second half
1291    * of section 2.2.1. */
1292   unsigned i, j, n;
1293   uint8_t A[AES_BLOCK_SIZE];
1294 
1295   if (ad_len != 0) {
1296     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
1297     return 0;
1298   }
1299 
1300   if (nonce_len == 0) {
1301     nonce = kDefaultAESKeyWrapNonce;
1302     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1303   }
1304 
1305   if (nonce_len != 8) {
1306     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1307     return 0;
1308   }
1309 
1310   if (in_len % 8 != 0) {
1311     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1312     return 0;
1313   }
1314 
1315   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1316    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1317    * conservatively cap it to 2^32-16 to stop 32-bit platforms complaining that
1318    * a comparison is always true. */
1319   if (in_len > 0xfffffff0) {
1320     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1321     return 0;
1322   }
1323 
1324   n = in_len / 8;
1325 
1326   if (n < 2) {
1327     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1328     return 0;
1329   }
1330 
1331   if (in_len + 8 < in_len) {
1332     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1333     return 0;
1334   }
1335 
1336   if (max_out_len < in_len + 8) {
1337     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1338     return 0;
1339   }
1340 
1341   if (AES_set_encrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
1342     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
1343     return 0;
1344   }
1345 
1346   memmove(out + 8, in, in_len);
1347   memcpy(A, nonce, 8);
1348 
1349   for (j = 0; j < 6; j++) {
1350     for (i = 1; i <= n; i++) {
1351       uint32_t t;
1352 
1353       memcpy(A + 8, out + 8 * i, 8);
1354       AES_encrypt(A, A, &ks.ks);
1355       t = n * j + i;
1356       A[7] ^= t & 0xff;
1357       A[6] ^= (t >> 8) & 0xff;
1358       A[5] ^= (t >> 16) & 0xff;
1359       A[4] ^= (t >> 24) & 0xff;
1360       memcpy(out + 8 * i, A + 8, 8);
1361     }
1362   }
1363 
1364   memcpy(out, A, 8);
1365   *out_len = in_len + 8;
1366   return 1;
1367 }
1368 
aead_aes_key_wrap_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1369 static int aead_aes_key_wrap_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1370                                   size_t *out_len, size_t max_out_len,
1371                                   const uint8_t *nonce, size_t nonce_len,
1372                                   const uint8_t *in, size_t in_len,
1373                                   const uint8_t *ad, size_t ad_len) {
1374   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1375   union {
1376     double align;
1377     AES_KEY ks;
1378   } ks;
1379   /* Variables in this function match up with the variables in the second half
1380    * of section 2.2.1. */
1381   unsigned i, j, n;
1382   uint8_t A[AES_BLOCK_SIZE];
1383 
1384   if (ad_len != 0) {
1385     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
1386     return 0;
1387   }
1388 
1389   if (nonce_len == 0) {
1390     nonce = kDefaultAESKeyWrapNonce;
1391     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1392   }
1393 
1394   if (nonce_len != 8) {
1395     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1396     return 0;
1397   }
1398 
1399   if (in_len % 8 != 0) {
1400     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1401     return 0;
1402   }
1403 
1404   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1405    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1406    * conservatively cap it to 2^32-8 to stop 32-bit platforms complaining that
1407    * a comparison is always true. */
1408   if (in_len > 0xfffffff8) {
1409     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1410     return 0;
1411   }
1412 
1413   if (in_len < 24) {
1414     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1415     return 0;
1416   }
1417 
1418   n = (in_len / 8) - 1;
1419 
1420   if (max_out_len < in_len - 8) {
1421     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1422     return 0;
1423   }
1424 
1425   if (AES_set_decrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
1426     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
1427     return 0;
1428   }
1429 
1430   memcpy(A, in, 8);
1431   memmove(out, in + 8, in_len - 8);
1432 
1433   for (j = 5; j < 6; j--) {
1434     for (i = n; i > 0; i--) {
1435       uint32_t t;
1436 
1437       t = n * j + i;
1438       A[7] ^= t & 0xff;
1439       A[6] ^= (t >> 8) & 0xff;
1440       A[5] ^= (t >> 16) & 0xff;
1441       A[4] ^= (t >> 24) & 0xff;
1442       memcpy(A + 8, out + 8 * (i - 1), 8);
1443       AES_decrypt(A, A, &ks.ks);
1444       memcpy(out + 8 * (i - 1), A + 8, 8);
1445     }
1446   }
1447 
1448   if (CRYPTO_memcmp(A, nonce, 8) != 0) {
1449     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1450     return 0;
1451   }
1452 
1453   *out_len = in_len - 8;
1454   return 1;
1455 }
1456 
1457 static const EVP_AEAD aead_aes_128_key_wrap = {
1458     16, /* key len */
1459     8,  /* nonce len */
1460     8,  /* overhead */
1461     8,  /* max tag length */
1462     aead_aes_key_wrap_init,
1463     NULL, /* init_with_direction */
1464     aead_aes_key_wrap_cleanup,
1465     aead_aes_key_wrap_seal,
1466     aead_aes_key_wrap_open,
1467     NULL, /* get_rc4_state */
1468     NULL, /* get_iv */
1469 };
1470 
1471 static const EVP_AEAD aead_aes_256_key_wrap = {
1472     32, /* key len */
1473     8,  /* nonce len */
1474     8,  /* overhead */
1475     8,  /* max tag length */
1476     aead_aes_key_wrap_init,
1477     NULL, /* init_with_direction */
1478     aead_aes_key_wrap_cleanup,
1479     aead_aes_key_wrap_seal,
1480     aead_aes_key_wrap_open,
1481     NULL, /* get_rc4_state */
1482     NULL, /* get_iv */
1483 };
1484 
EVP_aead_aes_128_key_wrap(void)1485 const EVP_AEAD *EVP_aead_aes_128_key_wrap(void) { return &aead_aes_128_key_wrap; }
1486 
EVP_aead_aes_256_key_wrap(void)1487 const EVP_AEAD *EVP_aead_aes_256_key_wrap(void) { return &aead_aes_256_key_wrap; }
1488 
1489 
1490 #define EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN SHA256_DIGEST_LENGTH
1491 #define EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN 12
1492 
1493 struct aead_aes_ctr_hmac_sha256_ctx {
1494   union {
1495     double align;
1496     AES_KEY ks;
1497   } ks;
1498   ctr128_f ctr;
1499   block128_f block;
1500   SHA256_CTX inner_init_state;
1501   SHA256_CTX outer_init_state;
1502   uint8_t tag_len;
1503 };
1504 
hmac_init(SHA256_CTX * out_inner,SHA256_CTX * out_outer,const uint8_t hmac_key[32])1505 static void hmac_init(SHA256_CTX *out_inner, SHA256_CTX *out_outer,
1506                       const uint8_t hmac_key[32]) {
1507   static const size_t hmac_key_len = 32;
1508   uint8_t block[SHA256_CBLOCK];
1509   memcpy(block, hmac_key, hmac_key_len);
1510   memset(block + hmac_key_len, 0x36, sizeof(block) - hmac_key_len);
1511 
1512   unsigned i;
1513   for (i = 0; i < hmac_key_len; i++) {
1514     block[i] ^= 0x36;
1515   }
1516 
1517   SHA256_Init(out_inner);
1518   SHA256_Update(out_inner, block, sizeof(block));
1519 
1520   memset(block + hmac_key_len, 0x5c, sizeof(block) - hmac_key_len);
1521   for (i = 0; i < hmac_key_len; i++) {
1522     block[i] ^= (0x36 ^ 0x5c);
1523   }
1524 
1525   SHA256_Init(out_outer);
1526   SHA256_Update(out_outer, block, sizeof(block));
1527 }
1528 
aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1529 static int aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1530                                          size_t key_len, size_t tag_len) {
1531   struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx;
1532   static const size_t hmac_key_len = 32;
1533 
1534   if (key_len < hmac_key_len) {
1535     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1536     return 0; /* EVP_AEAD_CTX_init should catch this. */
1537   }
1538 
1539   const size_t aes_key_len = key_len - hmac_key_len;
1540   if (aes_key_len != 16 && aes_key_len != 32) {
1541     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1542     return 0; /* EVP_AEAD_CTX_init should catch this. */
1543   }
1544 
1545   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1546     tag_len = EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN;
1547   }
1548 
1549   if (tag_len > EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN) {
1550     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
1551     return 0;
1552   }
1553 
1554   aes_ctx = OPENSSL_malloc(sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1555   if (aes_ctx == NULL) {
1556     OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
1557     return 0;
1558   }
1559 
1560   aes_ctx->ctr =
1561       aes_ctr_set_key(&aes_ctx->ks.ks, NULL, &aes_ctx->block, key, aes_key_len);
1562   aes_ctx->tag_len = tag_len;
1563   hmac_init(&aes_ctx->inner_init_state, &aes_ctx->outer_init_state,
1564             key + aes_key_len);
1565 
1566   ctx->aead_state = aes_ctx;
1567 
1568   return 1;
1569 }
1570 
aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX * ctx)1571 static void aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX *ctx) {
1572   struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1573   OPENSSL_cleanse(aes_ctx, sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1574   OPENSSL_free(aes_ctx);
1575 }
1576 
hmac_update_uint64(SHA256_CTX * sha256,uint64_t value)1577 static void hmac_update_uint64(SHA256_CTX *sha256, uint64_t value) {
1578   unsigned i;
1579   uint8_t bytes[8];
1580 
1581   for (i = 0; i < sizeof(bytes); i++) {
1582     bytes[i] = value & 0xff;
1583     value >>= 8;
1584   }
1585   SHA256_Update(sha256, bytes, sizeof(bytes));
1586 }
1587 
hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],const SHA256_CTX * inner_init_state,const SHA256_CTX * outer_init_state,const uint8_t * ad,size_t ad_len,const uint8_t * nonce,const uint8_t * ciphertext,size_t ciphertext_len)1588 static void hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],
1589                            const SHA256_CTX *inner_init_state,
1590                            const SHA256_CTX *outer_init_state,
1591                            const uint8_t *ad, size_t ad_len,
1592                            const uint8_t *nonce, const uint8_t *ciphertext,
1593                            size_t ciphertext_len) {
1594   SHA256_CTX sha256;
1595   memcpy(&sha256, inner_init_state, sizeof(sha256));
1596   hmac_update_uint64(&sha256, ad_len);
1597   hmac_update_uint64(&sha256, ciphertext_len);
1598   SHA256_Update(&sha256, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1599   SHA256_Update(&sha256, ad, ad_len);
1600 
1601   /* Pad with zeros to the end of the SHA-256 block. */
1602   const unsigned num_padding =
1603       (SHA256_CBLOCK - ((sizeof(uint64_t)*2 +
1604                          EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN + ad_len) %
1605                         SHA256_CBLOCK)) %
1606       SHA256_CBLOCK;
1607   uint8_t padding[SHA256_CBLOCK];
1608   memset(padding, 0, num_padding);
1609   SHA256_Update(&sha256, padding, num_padding);
1610 
1611   SHA256_Update(&sha256, ciphertext, ciphertext_len);
1612 
1613   uint8_t inner_digest[SHA256_DIGEST_LENGTH];
1614   SHA256_Final(inner_digest, &sha256);
1615 
1616   memcpy(&sha256, outer_init_state, sizeof(sha256));
1617   SHA256_Update(&sha256, inner_digest, sizeof(inner_digest));
1618   SHA256_Final(out, &sha256);
1619 }
1620 
aead_aes_ctr_hmac_sha256_crypt(const struct aead_aes_ctr_hmac_sha256_ctx * aes_ctx,uint8_t * out,const uint8_t * in,size_t len,const uint8_t * nonce)1621 static void aead_aes_ctr_hmac_sha256_crypt(
1622     const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx, uint8_t *out,
1623     const uint8_t *in, size_t len, const uint8_t *nonce) {
1624   /* Since the AEAD operation is one-shot, keeping a buffer of unused keystream
1625    * bytes is pointless. However, |CRYPTO_ctr128_encrypt| requires it. */
1626   uint8_t partial_block_buffer[AES_BLOCK_SIZE];
1627   unsigned partial_block_offset = 0;
1628   memset(partial_block_buffer, 0, sizeof(partial_block_buffer));
1629 
1630   uint8_t counter[AES_BLOCK_SIZE];
1631   memcpy(counter, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1632   memset(counter + EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN, 0, 4);
1633 
1634   if (aes_ctx->ctr) {
1635     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &aes_ctx->ks.ks, counter,
1636                                 partial_block_buffer, &partial_block_offset,
1637                                 aes_ctx->ctr);
1638   } else {
1639     CRYPTO_ctr128_encrypt(in, out, len, &aes_ctx->ks.ks, counter,
1640                           partial_block_buffer, &partial_block_offset,
1641                           aes_ctx->block);
1642   }
1643 }
1644 
aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1645 static int aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1646                                          size_t *out_len, size_t max_out_len,
1647                                          const uint8_t *nonce, size_t nonce_len,
1648                                          const uint8_t *in, size_t in_len,
1649                                          const uint8_t *ad, size_t ad_len) {
1650   const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1651   const uint64_t in_len_64 = in_len;
1652 
1653   if (in_len + aes_ctx->tag_len < in_len ||
1654       /* This input is so large it would overflow the 32-bit block counter. */
1655       in_len_64 >= (UINT64_C(1) << 32) * AES_BLOCK_SIZE) {
1656     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1657     return 0;
1658   }
1659 
1660   if (max_out_len < in_len + aes_ctx->tag_len) {
1661     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1662     return 0;
1663   }
1664 
1665   if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
1666     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1667     return 0;
1668   }
1669 
1670   aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, in_len, nonce);
1671 
1672   uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1673   hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1674                  &aes_ctx->outer_init_state, ad, ad_len, nonce, out, in_len);
1675   memcpy(out + in_len, hmac_result, aes_ctx->tag_len);
1676   *out_len = in_len + aes_ctx->tag_len;
1677 
1678   return 1;
1679 }
1680 
aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1681 static int aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1682                                          size_t *out_len, size_t max_out_len,
1683                                          const uint8_t *nonce, size_t nonce_len,
1684                                          const uint8_t *in, size_t in_len,
1685                                          const uint8_t *ad, size_t ad_len) {
1686   const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1687   size_t plaintext_len;
1688 
1689   if (in_len < aes_ctx->tag_len) {
1690     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1691     return 0;
1692   }
1693 
1694   plaintext_len = in_len - aes_ctx->tag_len;
1695 
1696   if (max_out_len < plaintext_len) {
1697     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1698     return 0;
1699   }
1700 
1701   if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
1702     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1703     return 0;
1704   }
1705 
1706   uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1707   hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1708                  &aes_ctx->outer_init_state, ad, ad_len, nonce, in,
1709                  plaintext_len);
1710   if (CRYPTO_memcmp(hmac_result, in + plaintext_len, aes_ctx->tag_len) != 0) {
1711     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1712     return 0;
1713   }
1714 
1715   aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, plaintext_len, nonce);
1716 
1717   *out_len = plaintext_len;
1718   return 1;
1719 }
1720 
1721 static const EVP_AEAD aead_aes_128_ctr_hmac_sha256 = {
1722     16 /* AES key */ + 32 /* HMAC key */,
1723     12,                                       /* nonce length */
1724     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* overhead */
1725     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* max tag length */
1726 
1727     aead_aes_ctr_hmac_sha256_init,
1728     NULL /* init_with_direction */,
1729     aead_aes_ctr_hmac_sha256_cleanup,
1730     aead_aes_ctr_hmac_sha256_seal,
1731     aead_aes_ctr_hmac_sha256_open,
1732     NULL /* get_rc4_state */,
1733     NULL /* get_iv */,
1734 };
1735 
1736 static const EVP_AEAD aead_aes_256_ctr_hmac_sha256 = {
1737     32 /* AES key */ + 32 /* HMAC key */,
1738     12,                                       /* nonce length */
1739     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* overhead */
1740     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* max tag length */
1741 
1742     aead_aes_ctr_hmac_sha256_init,
1743     NULL /* init_with_direction */,
1744     aead_aes_ctr_hmac_sha256_cleanup,
1745     aead_aes_ctr_hmac_sha256_seal,
1746     aead_aes_ctr_hmac_sha256_open,
1747     NULL /* get_rc4_state */,
1748     NULL /* get_iv */,
1749 };
1750 
EVP_aead_aes_128_ctr_hmac_sha256(void)1751 const EVP_AEAD *EVP_aead_aes_128_ctr_hmac_sha256(void) {
1752   return &aead_aes_128_ctr_hmac_sha256;
1753 }
1754 
EVP_aead_aes_256_ctr_hmac_sha256(void)1755 const EVP_AEAD *EVP_aead_aes_256_ctr_hmac_sha256(void) {
1756   return &aead_aes_256_ctr_hmac_sha256;
1757 }
1758 
EVP_has_aes_hardware(void)1759 int EVP_has_aes_hardware(void) {
1760 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1761   return aesni_capable() && crypto_gcm_clmul_enabled();
1762 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1763   return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1764 #else
1765   return 0;
1766 #endif
1767 }
1768