Lines Matching refs:x2
74 __m128i x0, x1, x2, k, shuf_mask; in crc32_pclmul_batch() local
94 x2 = _mm_loadu_si128((__m128i *)(p + 0x10)); in crc32_pclmul_batch()
95 x2 = _mm_shuffle_epi8(x2, shuf_mask); /* endianness swap */ in crc32_pclmul_batch()
107 x6 = _mm_clmulepi64_si128(x2, k, 0x00); in crc32_pclmul_batch()
111 x2 = _mm_clmulepi64_si128(x2, k, 0x11); in crc32_pclmul_batch()
123 x2 = _mm_xor_si128(x2, x6); in crc32_pclmul_batch()
127 x2 = _mm_xor_si128(x2, x10); in crc32_pclmul_batch()
142 x0 = _mm_xor_si128(x0, x2); in crc32_pclmul_batch()
154 x2 = _mm_loadu_si128((__m128i *)(p + 0x00)); in crc32_pclmul_batch()
155 x2 = _mm_shuffle_epi8(x2, shuf_mask); /* endianness swap */ in crc32_pclmul_batch()
158 x0 = _mm_xor_si128(x0, x2); in crc32_pclmul_batch()
191 __m128i x0, x1, x2, k; in crc32_pclmul_reflected_batch() local
205 x2 = _mm_loadu_si128((__m128i *)(p + 0x10)); in crc32_pclmul_reflected_batch()
216 x6 = _mm_clmulepi64_si128(x2, k, 0x00); in crc32_pclmul_reflected_batch()
220 x2 = _mm_clmulepi64_si128(x2, k, 0x11); in crc32_pclmul_reflected_batch()
228 x2 = _mm_xor_si128(x2, x6); in crc32_pclmul_reflected_batch()
232 x2 = _mm_xor_si128(x2, x10); in crc32_pclmul_reflected_batch()
247 x0 = _mm_xor_si128(x0, x2); in crc32_pclmul_reflected_batch()
259 x2 = _mm_loadu_si128((__m128i *)(p + 0x00)); in crc32_pclmul_reflected_batch()
262 x0 = _mm_xor_si128(x0, x2); in crc32_pclmul_reflected_batch()