Lines Matching refs:Xi

167 static void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16])  in gcm_gmult_4bit()
174 nlo = ((const u8 *)Xi)[15]; in gcm_gmult_4bit()
196 nlo = ((const u8 *)Xi)[cnt]; in gcm_gmult_4bit()
214 Xi[0] = BSWAP8(Z.hi); in gcm_gmult_4bit()
215 Xi[1] = BSWAP8(Z.lo); in gcm_gmult_4bit()
217 u8 *p = (u8 *)Xi; in gcm_gmult_4bit()
229 Xi[0] = Z.hi; in gcm_gmult_4bit()
230 Xi[1] = Z.lo; in gcm_gmult_4bit()
245 static void gcm_ghash_4bit(u64 Xi[2], const u128 Htable[16], in gcm_ghash_4bit()
255 nlo = ((const u8 *)Xi)[15]; in gcm_ghash_4bit()
278 nlo = ((const u8 *)Xi)[cnt]; in gcm_ghash_4bit()
297 Xi[0] = BSWAP8(Z.hi); in gcm_ghash_4bit()
298 Xi[1] = BSWAP8(Z.lo); in gcm_ghash_4bit()
300 u8 *p = (u8 *)Xi; in gcm_ghash_4bit()
312 Xi[0] = Z.hi; in gcm_ghash_4bit()
313 Xi[1] = Z.lo; in gcm_ghash_4bit()
323 void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16]);
324 void gcm_ghash_4bit(u64 Xi[2], const u128 Htable[16], const u8 *inp,
328 # define GCM_MUL(ctx) ctx->funcs.gmult(ctx->Xi.u,ctx->Htable)
330 # define GHASH(ctx,in,len) ctx->funcs.ghash((ctx)->Xi.u,(ctx)->Htable,in,len)
346 void gcm_init_clmul(u128 Htable[16], const u64 Xi[2]);
347 void gcm_gmult_clmul(u64 Xi[2], const u128 Htable[16]);
348 void gcm_ghash_clmul(u64 Xi[2], const u128 Htable[16], const u8 *inp,
356 void gcm_init_avx(u128 Htable[16], const u64 Xi[2]);
357 void gcm_gmult_avx(u64 Xi[2], const u128 Htable[16]);
358 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *inp,
364 void gcm_gmult_4bit_mmx(u64 Xi[2], const u128 Htable[16]);
365 void gcm_ghash_4bit_mmx(u64 Xi[2], const u128 Htable[16], const u8 *inp,
368 void gcm_gmult_4bit_x86(u64 Xi[2], const u128 Htable[16]);
369 void gcm_ghash_4bit_x86(u64 Xi[2], const u128 Htable[16], const u8 *inp,
380 void gcm_init_neon(u128 Htable[16], const u64 Xi[2]);
381 void gcm_gmult_neon(u64 Xi[2], const u128 Htable[16]);
382 void gcm_ghash_neon(u64 Xi[2], const u128 Htable[16], const u8 *inp,
384 void gcm_init_v8(u128 Htable[16], const u64 Xi[2]);
385 void gcm_gmult_v8(u64 Xi[2], const u128 Htable[16]);
386 void gcm_ghash_v8(u64 Xi[2], const u128 Htable[16], const u8 *inp,
392 void gcm_init_vis3(u128 Htable[16], const u64 Xi[2]);
393 void gcm_gmult_vis3(u64 Xi[2], const u128 Htable[16]);
394 void gcm_ghash_vis3(u64 Xi[2], const u128 Htable[16], const u8 *inp,
399 void gcm_init_p8(u128 Htable[16], const u64 Xi[2]);
400 void gcm_gmult_p8(u64 Xi[2], const u128 Htable[16]);
401 void gcm_ghash_p8(u64 Xi[2], const u128 Htable[16], const u8 *inp,
407 void gcm_init_rv64i_zbc(u128 Htable[16], const u64 Xi[2]);
408 void gcm_init_rv64i_zbc__zbb(u128 Htable[16], const u64 Xi[2]);
409 void gcm_init_rv64i_zbc__zbkb(u128 Htable[16], const u64 Xi[2]);
410 void gcm_gmult_rv64i_zbc(u64 Xi[2], const u128 Htable[16]);
411 void gcm_gmult_rv64i_zbc__zbkb(u64 Xi[2], const u128 Htable[16]);
412 void gcm_ghash_rv64i_zbc(u64 Xi[2], const u128 Htable[16],
414 void gcm_ghash_rv64i_zbc__zbkb(u64 Xi[2], const u128 Htable[16],
417 void gcm_init_rv64i_zvkb_zvbc(u128 Htable[16], const u64 Xi[2]);
418 void gcm_gmult_rv64i_zvkb_zvbc(u64 Xi[2], const u128 Htable[16]);
419 void gcm_ghash_rv64i_zvkb_zvbc(u64 Xi[2], const u128 Htable[16],
422 void gcm_init_rv64i_zvkg(u128 Htable[16], const u64 Xi[2]);
423 void gcm_init_rv64i_zvkg_zvkb(u128 Htable[16], const u64 Xi[2]);
424 void gcm_gmult_rv64i_zvkg(u64 Xi[2], const u128 Htable[16]);
425 void gcm_ghash_rv64i_zvkg(u64 Xi[2], const u128 Htable[16],
573 void ossl_gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16]) in ossl_gcm_gmult_4bit()
578 funcs.gmult(Xi, Htable); in ossl_gcm_gmult_4bit()
581 void ossl_gcm_ghash_4bit(u64 Xi[2], const u128 Htable[16], in ossl_gcm_ghash_4bit()
590 funcs.ghash(Xi, Htable, inp, len); in ossl_gcm_ghash_4bit()
595 Xi[0] ^= tmp[0]; in ossl_gcm_ghash_4bit()
596 Xi[1] ^= tmp[1]; in ossl_gcm_ghash_4bit()
597 funcs.gmult(Xi, Htable); in ossl_gcm_ghash_4bit()
654 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_setiv()
655 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_setiv()
659 ctx->Xi.c[i] ^= iv[i]; in CRYPTO_gcm128_setiv()
666 ctx->Xi.c[i] ^= iv[i]; in CRYPTO_gcm128_setiv()
672 ctx->Xi.u[1] ^= BSWAP8(len0); in CRYPTO_gcm128_setiv()
674 ctx->Xi.c[8] ^= (u8)(len0 >> 56); in CRYPTO_gcm128_setiv()
675 ctx->Xi.c[9] ^= (u8)(len0 >> 48); in CRYPTO_gcm128_setiv()
676 ctx->Xi.c[10] ^= (u8)(len0 >> 40); in CRYPTO_gcm128_setiv()
677 ctx->Xi.c[11] ^= (u8)(len0 >> 32); in CRYPTO_gcm128_setiv()
678 ctx->Xi.c[12] ^= (u8)(len0 >> 24); in CRYPTO_gcm128_setiv()
679 ctx->Xi.c[13] ^= (u8)(len0 >> 16); in CRYPTO_gcm128_setiv()
680 ctx->Xi.c[14] ^= (u8)(len0 >> 8); in CRYPTO_gcm128_setiv()
681 ctx->Xi.c[15] ^= (u8)(len0); in CRYPTO_gcm128_setiv()
684 ctx->Xi.u[1] ^= len0; in CRYPTO_gcm128_setiv()
691 ctr = BSWAP4(ctx->Xi.d[3]); in CRYPTO_gcm128_setiv()
693 ctr = GETU32(ctx->Xi.c + 12); in CRYPTO_gcm128_setiv()
696 ctr = ctx->Xi.d[3]; in CRYPTO_gcm128_setiv()
699 ctx->Yi.u[0] = ctx->Xi.u[0]; in CRYPTO_gcm128_setiv()
700 ctx->Yi.u[1] = ctx->Xi.u[1]; in CRYPTO_gcm128_setiv()
703 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_setiv()
704 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_setiv()
736 ctx->Xi.c[n] ^= *(aad++); in CRYPTO_gcm128_aad()
756 ctx->Xi.c[i] ^= aad[i]; in CRYPTO_gcm128_aad()
765 ctx->Xi.c[i] ^= aad[i]; in CRYPTO_gcm128_aad()
798 memcpy(ctx->Xn, ctx->Xi.c, sizeof(ctx->Xi)); in CRYPTO_gcm128_encrypt()
799 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_encrypt()
800 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_encrypt()
801 mres = sizeof(ctx->Xi); in CRYPTO_gcm128_encrypt()
837 ctx->Xi.c[n] ^= *(out++) = *(in++) ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
928 ctx->Xi.t[i] ^= out_t[i] = in_t[i] ^ ctx->EKi.t[i]; in CRYPTO_gcm128_encrypt()
953 ctx->Xi.c[n] ^= out[n] = in[n] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
986 ctx->Xi.c[n] ^= out[i] = in[i] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt()
1023 memcpy(ctx->Xn, ctx->Xi.c, sizeof(ctx->Xi)); in CRYPTO_gcm128_decrypt()
1024 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_decrypt()
1025 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_decrypt()
1026 mres = sizeof(ctx->Xi); in CRYPTO_gcm128_decrypt()
1064 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
1155 ctx->Xi.t[i] ^= c; in CRYPTO_gcm128_decrypt()
1182 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
1219 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt()
1258 memcpy(ctx->Xn, ctx->Xi.c, sizeof(ctx->Xi)); in CRYPTO_gcm128_encrypt_ctr32()
1259 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_encrypt_ctr32()
1260 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_encrypt_ctr32()
1261 mres = sizeof(ctx->Xi); in CRYPTO_gcm128_encrypt_ctr32()
1294 ctx->Xi.c[n] ^= *(out++) = *(in++) ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt_ctr32()
1352 ctx->Xi.c[i] ^= out[i]; in CRYPTO_gcm128_encrypt_ctr32()
1373 ctx->Xi.c[mres++] ^= out[n] = in[n] ^ ctx->EKi.c[n]; in CRYPTO_gcm128_encrypt_ctr32()
1412 memcpy(ctx->Xn, ctx->Xi.c, sizeof(ctx->Xi)); in CRYPTO_gcm128_decrypt_ctr32()
1413 ctx->Xi.u[0] = 0; in CRYPTO_gcm128_decrypt_ctr32()
1414 ctx->Xi.u[1] = 0; in CRYPTO_gcm128_decrypt_ctr32()
1415 mres = sizeof(ctx->Xi); in CRYPTO_gcm128_decrypt_ctr32()
1450 ctx->Xi.c[n] ^= c; in CRYPTO_gcm128_decrypt_ctr32()
1496 ctx->Xi.c[k] ^= in[k]; in CRYPTO_gcm128_decrypt_ctr32()
1533 ctx->Xi.c[mres++] ^= c; in CRYPTO_gcm128_decrypt_ctr32()
1595 ctx->Xi.u[0] ^= alen; in CRYPTO_gcm128_finish()
1596 ctx->Xi.u[1] ^= clen; in CRYPTO_gcm128_finish()
1600 ctx->Xi.u[0] ^= ctx->EK0.u[0]; in CRYPTO_gcm128_finish()
1601 ctx->Xi.u[1] ^= ctx->EK0.u[1]; in CRYPTO_gcm128_finish()
1603 if (tag && len <= sizeof(ctx->Xi)) in CRYPTO_gcm128_finish()
1604 return CRYPTO_memcmp(ctx->Xi.c, tag, len); in CRYPTO_gcm128_finish()
1612 memcpy(tag, ctx->Xi.c, in CRYPTO_gcm128_tag()
1613 len <= sizeof(ctx->Xi.c) ? len : sizeof(ctx->Xi.c)); in CRYPTO_gcm128_tag()