/external/wpa_supplicant_8/src/crypto/ |
D | aes-ccm.c | 32 u8 aad_buf[2 * AES_BLOCK_SIZE]; in aes_ccm_auth_start() 33 u8 b[AES_BLOCK_SIZE]; in aes_ccm_auth_start() 41 WPA_PUT_BE16(&b[AES_BLOCK_SIZE - L], plain_len); in aes_ccm_auth_start() 43 wpa_hexdump_key(MSG_EXCESSIVE, "CCM B_0", b, AES_BLOCK_SIZE); in aes_ccm_auth_start() 56 if (aad_len > AES_BLOCK_SIZE - 2) { in aes_ccm_auth_start() 57 xor_aes_block(&aad_buf[AES_BLOCK_SIZE], x); in aes_ccm_auth_start() 59 aes_encrypt(aes, &aad_buf[AES_BLOCK_SIZE], x); in aes_ccm_auth_start() 66 size_t last = len % AES_BLOCK_SIZE; in aes_ccm_auth() 69 for (i = 0; i < len / AES_BLOCK_SIZE; i++) { in aes_ccm_auth() 72 data += AES_BLOCK_SIZE; in aes_ccm_auth() [all …]
|
D | aes-cbc.c | 27 u8 cbc[AES_BLOCK_SIZE]; in aes_128_cbc_encrypt() 37 os_memcpy(cbc, iv, AES_BLOCK_SIZE); in aes_128_cbc_encrypt() 39 blocks = data_len / AES_BLOCK_SIZE; in aes_128_cbc_encrypt() 41 for (j = 0; j < AES_BLOCK_SIZE; j++) in aes_128_cbc_encrypt() 44 os_memcpy(pos, cbc, AES_BLOCK_SIZE); in aes_128_cbc_encrypt() 45 pos += AES_BLOCK_SIZE; in aes_128_cbc_encrypt() 63 u8 cbc[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE]; in aes_128_cbc_decrypt() 73 os_memcpy(cbc, iv, AES_BLOCK_SIZE); in aes_128_cbc_decrypt() 75 blocks = data_len / AES_BLOCK_SIZE; in aes_128_cbc_decrypt() 77 os_memcpy(tmp, pos, AES_BLOCK_SIZE); in aes_128_cbc_decrypt() [all …]
|
D | aes-siv.c | 17 static const u8 zero[AES_BLOCK_SIZE]; 25 for (i = 0; i < AES_BLOCK_SIZE - 1; i++) in dbl() 27 pad[AES_BLOCK_SIZE - 1] <<= 1; in dbl() 29 pad[AES_BLOCK_SIZE - 1] ^= 0x87; in dbl() 37 for (i = 0; i < AES_BLOCK_SIZE; i++) in xor() 56 os_memset(pad, 0, AES_BLOCK_SIZE); in pad_block() 59 if (len < AES_BLOCK_SIZE) in pad_block() 67 u8 tmp[AES_BLOCK_SIZE], tmp2[AES_BLOCK_SIZE]; in aes_s2v() 76 tmp[AES_BLOCK_SIZE - 1] = 1; in aes_s2v() 97 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v() [all …]
|
D | aes-omac1.c | 21 for (i = 0; i < AES_BLOCK_SIZE - 1; i++) in gf_mulx() 23 pad[AES_BLOCK_SIZE - 1] <<= 1; in gf_mulx() 25 pad[AES_BLOCK_SIZE - 1] ^= 0x87; in gf_mulx() 47 u8 cbc[AES_BLOCK_SIZE], pad[AES_BLOCK_SIZE]; in omac1_aes_vector() 57 os_memset(cbc, 0, AES_BLOCK_SIZE); in omac1_aes_vector() 68 while (left >= AES_BLOCK_SIZE) { in omac1_aes_vector() 69 for (i = 0; i < AES_BLOCK_SIZE; i++) { in omac1_aes_vector() 76 if (i + 1 == AES_BLOCK_SIZE && in omac1_aes_vector() 77 left == AES_BLOCK_SIZE) in omac1_aes_vector() 84 if (left > AES_BLOCK_SIZE) in omac1_aes_vector() [all …]
|
D | aes-gcm.c | 19 val = WPA_GET_BE32(block + AES_BLOCK_SIZE - 4); in inc32() 21 WPA_PUT_BE32(block + AES_BLOCK_SIZE - 4, val); in inc32() 146 u8 cb[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE]; in aes_gctr() 155 os_memcpy(cb, icb, AES_BLOCK_SIZE); in aes_gctr() 160 xpos += AES_BLOCK_SIZE; in aes_gctr() 161 ypos += AES_BLOCK_SIZE; in aes_gctr() 184 os_memset(H, 0, AES_BLOCK_SIZE); in aes_gcm_init_hash_subkey() 187 H, AES_BLOCK_SIZE); in aes_gcm_init_hash_subkey() 199 os_memset(J0 + iv_len, 0, AES_BLOCK_SIZE - iv_len); in aes_gcm_prepare_j0() 200 J0[AES_BLOCK_SIZE - 1] = 0x01; in aes_gcm_prepare_j0() [all …]
|
D | crypto_internal-cipher.c | 76 os_memcpy(ctx->u.aes.cbc, iv, AES_BLOCK_SIZE); in crypto_cipher_init() 117 if (len % AES_BLOCK_SIZE) in crypto_cipher_encrypt() 119 blocks = len / AES_BLOCK_SIZE; in crypto_cipher_encrypt() 121 for (j = 0; j < AES_BLOCK_SIZE; j++) in crypto_cipher_encrypt() 125 os_memcpy(crypt, ctx->u.aes.cbc, AES_BLOCK_SIZE); in crypto_cipher_encrypt() 126 plain += AES_BLOCK_SIZE; in crypto_cipher_encrypt() 127 crypt += AES_BLOCK_SIZE; in crypto_cipher_encrypt() 181 if (len % AES_BLOCK_SIZE) in crypto_cipher_decrypt() 183 blocks = len / AES_BLOCK_SIZE; in crypto_cipher_decrypt() 185 os_memcpy(tmp, crypt, AES_BLOCK_SIZE); in crypto_cipher_decrypt() [all …]
|
D | aes-ctr.c | 32 u8 counter[AES_BLOCK_SIZE], buf[AES_BLOCK_SIZE]; in aes_ctr_encrypt() 37 os_memcpy(counter, nonce, AES_BLOCK_SIZE); in aes_ctr_encrypt() 42 len = (left < AES_BLOCK_SIZE) ? left : AES_BLOCK_SIZE; in aes_ctr_encrypt() 48 for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) { in aes_ctr_encrypt()
|
D | aes-eax.c | 34 u8 nonce_mac[AES_BLOCK_SIZE], hdr_mac[AES_BLOCK_SIZE], in aes_128_eax_encrypt() 35 data_mac[AES_BLOCK_SIZE]; in aes_128_eax_encrypt() 69 for (i = 0; i < AES_BLOCK_SIZE; i++) in aes_128_eax_encrypt() 98 u8 nonce_mac[AES_BLOCK_SIZE], hdr_mac[AES_BLOCK_SIZE], in aes_128_eax_decrypt() 99 data_mac[AES_BLOCK_SIZE]; in aes_128_eax_decrypt() 139 for (i = 0; i < AES_BLOCK_SIZE; i++) { in aes_128_eax_decrypt()
|
/external/rust/crates/quiche/deps/boringssl/src/crypto/cmac/ |
D | cmac.c | 65 uint8_t k1[AES_BLOCK_SIZE]; 66 uint8_t k2[AES_BLOCK_SIZE]; 68 uint8_t block[AES_BLOCK_SIZE]; 131 OPENSSL_memcpy(out->k1, in->k1, AES_BLOCK_SIZE); in CMAC_CTX_copy() 132 OPENSSL_memcpy(out->k2, in->k2, AES_BLOCK_SIZE); in CMAC_CTX_copy() 133 OPENSSL_memcpy(out->block, in->block, AES_BLOCK_SIZE); in CMAC_CTX_copy() 172 static const uint8_t kZeroIV[AES_BLOCK_SIZE] = {0}; 176 uint8_t scratch[AES_BLOCK_SIZE]; in CMAC_Init() 179 if ((block_size != AES_BLOCK_SIZE && block_size != 8 /* 3-DES */) || in CMAC_Init() 188 if (block_size == AES_BLOCK_SIZE) { in CMAC_Init() [all …]
|
/external/boringssl/src/crypto/cmac/ |
D | cmac.c | 65 uint8_t k1[AES_BLOCK_SIZE]; 66 uint8_t k2[AES_BLOCK_SIZE]; 68 uint8_t block[AES_BLOCK_SIZE]; 131 OPENSSL_memcpy(out->k1, in->k1, AES_BLOCK_SIZE); in CMAC_CTX_copy() 132 OPENSSL_memcpy(out->k2, in->k2, AES_BLOCK_SIZE); in CMAC_CTX_copy() 133 OPENSSL_memcpy(out->block, in->block, AES_BLOCK_SIZE); in CMAC_CTX_copy() 172 static const uint8_t kZeroIV[AES_BLOCK_SIZE] = {0}; 176 uint8_t scratch[AES_BLOCK_SIZE]; in CMAC_Init() 179 if ((block_size != AES_BLOCK_SIZE && block_size != 8 /* 3-DES */) || in CMAC_Init() 188 if (block_size == AES_BLOCK_SIZE) { in CMAC_Init() [all …]
|
/external/openssh/ |
D | cipher-ctr.c | 40 u_char aes_counter[AES_BLOCK_SIZE]; 64 u_char buf[AES_BLOCK_SIZE]; in ssh_aes_ctr() 74 ssh_ctr_inc(c->aes_counter, AES_BLOCK_SIZE); in ssh_aes_ctr() 77 n = (n + 1) % AES_BLOCK_SIZE; in ssh_aes_ctr() 96 memcpy(c->aes_counter, iv, AES_BLOCK_SIZE); in ssh_aes_ctr_init() 133 aes_ctr.block_size = AES_BLOCK_SIZE; in evp_aes_128_ctr() 134 aes_ctr.iv_len = AES_BLOCK_SIZE; in evp_aes_128_ctr()
|
D | cipher-aesctr.c | 65 memcpy(x->ctr, iv, AES_BLOCK_SIZE); in aesctr_ivsetup() 72 u8 buf[AES_BLOCK_SIZE]; in aesctr_encrypt_bytes() 77 aesctr_inc(x->ctr, AES_BLOCK_SIZE); in aesctr_encrypt_bytes() 80 n = (n + 1) % AES_BLOCK_SIZE; in aesctr_encrypt_bytes()
|
D | cipher-aesctr.h | 23 #define AES_BLOCK_SIZE 16 macro 28 u8 ctr[AES_BLOCK_SIZE]; /* counter */
|
/external/boringssl/src/crypto/fipsmodule/rand/ |
D | ctrdrbg.c | 67 OPENSSL_STATIC_ASSERT(CTR_DRBG_ENTROPY_LEN % AES_BLOCK_SIZE == 0, 87 for (size_t i = 0; i < CTR_DRBG_ENTROPY_LEN; i += AES_BLOCK_SIZE) { in ctr_drbg_update() 157 while (out_len >= AES_BLOCK_SIZE) { in CTR_DRBG_generate() 163 todo &= ~(AES_BLOCK_SIZE-1); in CTR_DRBG_generate() 164 const size_t num_blocks = todo / AES_BLOCK_SIZE; in CTR_DRBG_generate() 172 for (size_t i = 0; i < todo; i += AES_BLOCK_SIZE) { in CTR_DRBG_generate() 183 uint8_t block[AES_BLOCK_SIZE]; in CTR_DRBG_generate()
|
/external/rust/crates/quiche/deps/boringssl/src/crypto/fipsmodule/rand/ |
D | ctrdrbg.c | 67 OPENSSL_STATIC_ASSERT(CTR_DRBG_ENTROPY_LEN % AES_BLOCK_SIZE == 0, 87 for (size_t i = 0; i < CTR_DRBG_ENTROPY_LEN; i += AES_BLOCK_SIZE) { in ctr_drbg_update() 157 while (out_len >= AES_BLOCK_SIZE) { in CTR_DRBG_generate() 163 todo &= ~(AES_BLOCK_SIZE-1); in CTR_DRBG_generate() 164 const size_t num_blocks = todo / AES_BLOCK_SIZE; in CTR_DRBG_generate() 172 for (size_t i = 0; i < todo; i += AES_BLOCK_SIZE) { in CTR_DRBG_generate() 183 uint8_t block[AES_BLOCK_SIZE]; in CTR_DRBG_generate()
|
/external/boringssl/src/crypto/fipsmodule/aes/ |
D | key_wrap.c | 81 uint8_t A[AES_BLOCK_SIZE]; in AES_wrap_key() 117 uint8_t A[AES_BLOCK_SIZE]; in aes_unwrap_key_inner() 174 uint8_t block[AES_BLOCK_SIZE]; in AES_wrap_key_padded() 182 *out_len = AES_BLOCK_SIZE; in AES_wrap_key_padded() 205 if (in_len < AES_BLOCK_SIZE || max_out < in_len - 8) { in AES_unwrap_key_padded() 210 if (in_len == AES_BLOCK_SIZE) { in AES_unwrap_key_padded() 211 uint8_t block[AES_BLOCK_SIZE]; in AES_unwrap_key_padded()
|
D | aes_test.cc | 42 ASSERT_EQ(static_cast<unsigned>(AES_BLOCK_SIZE), plaintext.size()); in TestRaw() 43 ASSERT_EQ(static_cast<unsigned>(AES_BLOCK_SIZE), ciphertext.size()); in TestRaw() 49 uint8_t block[AES_BLOCK_SIZE]; in TestRaw() 54 OPENSSL_memcpy(block, plaintext.data(), AES_BLOCK_SIZE); in TestRaw() 65 OPENSSL_memcpy(block, ciphertext.data(), AES_BLOCK_SIZE); in TestRaw() 280 uint8_t block[AES_BLOCK_SIZE]; in TEST() 281 uint8_t buf[AES_BLOCK_SIZE * 64] = {0}; in TEST() 305 CHECK_ABI(bsaes_cbc_encrypt, buf, buf, AES_BLOCK_SIZE * blocks, &key, in TEST() 316 CHECK_ABI(vpaes_cbc_encrypt, buf, buf, AES_BLOCK_SIZE * blocks, &key, in TEST() 329 CHECK_ABI(vpaes_cbc_encrypt, buf, buf, AES_BLOCK_SIZE * blocks, &key, in TEST() [all …]
|
/external/rust/crates/quiche/deps/boringssl/src/crypto/fipsmodule/aes/ |
D | key_wrap.c | 81 uint8_t A[AES_BLOCK_SIZE]; in AES_wrap_key() 117 uint8_t A[AES_BLOCK_SIZE]; in aes_unwrap_key_inner() 174 uint8_t block[AES_BLOCK_SIZE]; in AES_wrap_key_padded() 182 *out_len = AES_BLOCK_SIZE; in AES_wrap_key_padded() 205 if (in_len < AES_BLOCK_SIZE || max_out < in_len - 8) { in AES_unwrap_key_padded() 210 if (in_len == AES_BLOCK_SIZE) { in AES_unwrap_key_padded() 211 uint8_t block[AES_BLOCK_SIZE]; in AES_unwrap_key_padded()
|
/external/lzma/CPP/7zip/Crypto/ |
D | MyAes.cpp | 19 memset(_iv, 0, AES_BLOCK_SIZE); in CAesCbcCoder() 35 if (size < AES_BLOCK_SIZE) in STDMETHODIMP_() 36 return AES_BLOCK_SIZE; in STDMETHODIMP_() 56 if (size != AES_BLOCK_SIZE) in SetInitVector()
|
/external/wpa_supplicant_8/src/eap_common/ |
D | eap_eke_common.c | 49 return AES_BLOCK_SIZE + dhlen; in eap_eke_dhcomp_len() 102 return AES_BLOCK_SIZE + 16 + mac_len; in eap_eke_pnonce_len() 117 return AES_BLOCK_SIZE + 2 * 16 + mac_len; in eap_eke_pnonce_ps_len() 347 u8 iv[AES_BLOCK_SIZE]; in eap_eke_dhcomp() 361 if (random_get_bytes(iv, AES_BLOCK_SIZE)) in eap_eke_dhcomp() 364 iv, AES_BLOCK_SIZE); in eap_eke_dhcomp() 368 os_memcpy(ret_dhcomp, iv, AES_BLOCK_SIZE); in eap_eke_dhcomp() 369 os_memcpy(ret_dhcomp + AES_BLOCK_SIZE, pub, dh_len); in eap_eke_dhcomp() 371 ret_dhcomp, AES_BLOCK_SIZE + dh_len); in eap_eke_dhcomp() 391 os_memcpy(peer_pub, peer_dhcomp + AES_BLOCK_SIZE, dh->prime_len); in eap_eke_shared_secret() [all …]
|
/external/rust/crates/quiche/deps/boringssl/src/include/openssl/ |
D | aes.h | 68 #define AES_BLOCK_SIZE 16 macro 112 uint8_t ivec[AES_BLOCK_SIZE], 113 uint8_t ecount_buf[AES_BLOCK_SIZE],
|
/external/boringssl/src/include/openssl/ |
D | aes.h | 68 #define AES_BLOCK_SIZE 16 macro 115 uint8_t ivec[AES_BLOCK_SIZE], 116 uint8_t ecount_buf[AES_BLOCK_SIZE],
|
/external/wpa_supplicant_8/src/common/ |
D | dpp_auth.c | 68 u8 wrapped_data[4 + DPP_MAX_NONCE_LEN + 4 + 1 + AES_BLOCK_SIZE]; in dpp_auth_build_req() 210 siv_len += AES_BLOCK_SIZE; in dpp_auth_build_req() 246 4 + 4 + DPP_MAX_HASH_LEN + AES_BLOCK_SIZE in dpp_auth_build_resp() 248 u8 wrapped_data[DPP_AUTH_RESP_CLEAR_LEN + AES_BLOCK_SIZE]; in dpp_auth_build_resp() 396 siv_len += AES_BLOCK_SIZE; in dpp_auth_build_resp() 424 u8 wrapped_r_auth[4 + DPP_MAX_HASH_LEN + AES_BLOCK_SIZE], *w_r_auth; in dpp_auth_build_resp_ok() 519 wrapped_r_auth_len = 4 + auth->curve->hash_len + AES_BLOCK_SIZE; in dpp_auth_build_resp_ok() 701 if (!wrapped_data || wrapped_data_len < AES_BLOCK_SIZE) { in dpp_auth_req_rx() 803 unwrapped_len = wrapped_data_len - AES_BLOCK_SIZE; in dpp_auth_req_rx() 982 4 + i_auth_len + r_nonce_len + AES_BLOCK_SIZE; in dpp_auth_build_conf() [all …]
|
/external/boringssl/src/crypto/cipher_extra/ |
D | e_aesgcmsiv.c | 424 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) { in aead_aes_gcm_siv_asm_open() 620 const uint8_t initial_counter[AES_BLOCK_SIZE], in gcm_siv_crypt() argument 627 OPENSSL_memcpy(counter.c, initial_counter, AES_BLOCK_SIZE); in gcm_siv_crypt() 631 uint8_t keystream[AES_BLOCK_SIZE]; in gcm_siv_crypt() 635 size_t todo = AES_BLOCK_SIZE; in gcm_siv_crypt() 713 uint8_t counter[AES_BLOCK_SIZE]; in gcm_siv_keys() 714 OPENSSL_memset(counter, 0, AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN); in gcm_siv_keys() 715 OPENSSL_memcpy(counter + AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN, in gcm_siv_keys() 720 uint8_t ciphertext[AES_BLOCK_SIZE]; in gcm_siv_keys() 794 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) { in aead_aes_gcm_siv_open_gather()
|
/external/rust/crates/quiche/deps/boringssl/src/crypto/cipher_extra/ |
D | e_aesgcmsiv.c | 424 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) { in aead_aes_gcm_siv_asm_open() 620 const uint8_t initial_counter[AES_BLOCK_SIZE], in gcm_siv_crypt() argument 627 OPENSSL_memcpy(counter.c, initial_counter, AES_BLOCK_SIZE); in gcm_siv_crypt() 631 uint8_t keystream[AES_BLOCK_SIZE]; in gcm_siv_crypt() 635 size_t todo = AES_BLOCK_SIZE; in gcm_siv_crypt() 713 uint8_t counter[AES_BLOCK_SIZE]; in gcm_siv_keys() 714 OPENSSL_memset(counter, 0, AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN); in gcm_siv_keys() 715 OPENSSL_memcpy(counter + AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN, in gcm_siv_keys() 720 uint8_t ciphertext[AES_BLOCK_SIZE]; in gcm_siv_keys() 794 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) { in aead_aes_gcm_siv_open_gather()
|