Lines Matching refs:Xi
154 static void gcm_gmult_4bit(uint64_t Xi[2], const u128 Htable[16]) { in gcm_gmult_4bit()
163 nlo = ((const uint8_t *)Xi)[15]; in gcm_gmult_4bit()
187 nlo = ((const uint8_t *)Xi)[cnt]; in gcm_gmult_4bit()
206 Xi[0] = BSWAP8(Z.hi); in gcm_gmult_4bit()
207 Xi[1] = BSWAP8(Z.lo); in gcm_gmult_4bit()
209 uint8_t *p = (uint8_t *)Xi; in gcm_gmult_4bit()
221 Xi[0] = Z.hi; in gcm_gmult_4bit()
222 Xi[1] = Z.lo; in gcm_gmult_4bit()
231 static void gcm_ghash_4bit(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in gcm_ghash_4bit()
243 nlo = ((const uint8_t *)Xi)[15]; in gcm_ghash_4bit()
268 nlo = ((const uint8_t *)Xi)[cnt]; in gcm_ghash_4bit()
288 Xi[0] = BSWAP8(Z.hi); in gcm_ghash_4bit()
289 Xi[1] = BSWAP8(Z.lo); in gcm_ghash_4bit()
291 uint8_t *p = (uint8_t *)Xi; in gcm_ghash_4bit()
303 Xi[0] = Z.hi; in gcm_ghash_4bit()
304 Xi[1] = Z.lo; in gcm_ghash_4bit()
309 void gcm_gmult_4bit(uint64_t Xi[2], const u128 Htable[16]);
310 void gcm_ghash_4bit(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
314 #define GCM_MUL(ctx, Xi) gcm_gmult_4bit(ctx->Xi.u, ctx->Htable) argument
316 #define GHASH(ctx, in, len) gcm_ghash_4bit((ctx)->Xi.u, (ctx)->Htable, in, len)
328 void gcm_init_clmul(u128 Htable[16], const uint64_t Xi[2]);
329 void gcm_gmult_clmul(uint64_t Xi[2], const u128 Htable[16]);
330 void gcm_ghash_clmul(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
338 void gcm_init_avx(u128 Htable[16], const uint64_t Xi[2]);
339 void gcm_gmult_avx(uint64_t Xi[2], const u128 Htable[16]);
340 void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, size_t len);
345 void gcm_gmult_4bit_mmx(uint64_t Xi[2], const u128 Htable[16]);
346 void gcm_ghash_4bit_mmx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
349 void gcm_gmult_4bit_x86(uint64_t Xi[2], const u128 Htable[16]);
350 void gcm_ghash_4bit_x86(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
363 void gcm_init_v8(u128 Htable[16], const uint64_t Xi[2]);
364 void gcm_gmult_v8(uint64_t Xi[2], const u128 Htable[16]);
365 void gcm_ghash_v8(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
374 void gcm_init_neon(u128 Htable[16], const uint64_t Xi[2]);
375 void gcm_gmult_neon(uint64_t Xi[2], const u128 Htable[16]);
376 void gcm_ghash_neon(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
383 void gcm_init_neon(u128 Htable[16], const uint64_t Xi[2]) { in gcm_init_neon()
386 void gcm_gmult_neon(uint64_t Xi[2], const u128 Htable[16]) { in gcm_gmult_neon()
389 void gcm_ghash_neon(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in gcm_ghash_neon()
401 #define GCM_MUL(ctx, Xi) (*gcm_gmult_p)(ctx->Xi.u, ctx->Htable) argument
404 #define GHASH(ctx, in, len) (*gcm_ghash_p)(ctx->Xi.u, ctx->Htable, in, len)
501 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_setiv()
506 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_setiv()
507 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_setiv()
576 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_aad()
578 void (*gcm_ghash_p)(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in CRYPTO_gcm128_aad()
596 ctx->Xi.c[n] ^= *(aad++); in CRYPTO_gcm128_aad()
601 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_aad()
617 ctx->Xi.c[i] ^= aad[i]; in CRYPTO_gcm128_aad()
619 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_aad()
627 ctx->Xi.c[i] ^= aad[i]; in CRYPTO_gcm128_aad()
647 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_encrypt()
649 void (*gcm_ghash_p)(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in CRYPTO_gcm128_encrypt()
663 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt()
676 ctx->Xi.c[n] ^= *(out++) = *(in++) ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
681 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt()
698 ctx->Xi.c[n] ^= out[i] = in[i] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
701 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt()
769 ctx->Xi.t[i] ^= out_t[i] = in_t[i] ^ ctx->EKi.t[i]; in CRYPTO_gcm128_encrypt()
771 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt()
786 ctx->Xi.c[n] ^= out[n] = in[n] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
807 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_decrypt()
809 void (*gcm_ghash_p)(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in CRYPTO_gcm128_decrypt()
823 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt()
838 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
843 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt()
863 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
866 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt()
934 ctx->Xi.t[i] ^= c; in CRYPTO_gcm128_decrypt()
936 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt()
952 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
972 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_encrypt_ctr32()
974 void (*gcm_ghash_p)(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in CRYPTO_gcm128_encrypt_ctr32()
988 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt_ctr32()
1001 ctx->Xi.c[n] ^= *(out++) = *(in++) ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt_ctr32()
1006 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt_ctr32()
1046 ctx->Xi.c[i] ^= out[i]; in CRYPTO_gcm128_encrypt_ctr32()
1048 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_encrypt_ctr32()
1062 ctx->Xi.c[n] ^= out[n] = in[n] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt_ctr32()
1081 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_decrypt_ctr32()
1083 void (*gcm_ghash_p)(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in CRYPTO_gcm128_decrypt_ctr32()
1097 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt_ctr32()
1112 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt_ctr32()
1117 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt_ctr32()
1148 ctx->Xi.c[k] ^= in[k]; in CRYPTO_gcm128_decrypt_ctr32()
1150 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_decrypt_ctr32()
1177 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt_ctr32()
1195 void (*gcm_gmult_p)(uint64_t Xi[2], const u128 Htable[16]) = ctx->gmult; in CRYPTO_gcm128_finish()
1199 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_finish()
1217 ctx->Xi.u[0] ^= alen; in CRYPTO_gcm128_finish()
1218 ctx->Xi.u[1] ^= clen; in CRYPTO_gcm128_finish()
1219 GCM_MUL(ctx, Xi); in CRYPTO_gcm128_finish()
1221 ctx->Xi.u[0] ^= ctx->EK0.u[0]; in CRYPTO_gcm128_finish()
1222 ctx->Xi.u[1] ^= ctx->EK0.u[1]; in CRYPTO_gcm128_finish()
1224 if (tag && len <= sizeof(ctx->Xi)) { in CRYPTO_gcm128_finish()
1225 return CRYPTO_memcmp(ctx->Xi.c, tag, len) == 0; in CRYPTO_gcm128_finish()
1233 memcpy(tag, ctx->Xi.c, len <= sizeof(ctx->Xi.c) ? len : sizeof(ctx->Xi.c)); in CRYPTO_gcm128_tag()