Lines Matching refs:x1

74 	__m128i x0, x1, x2, k, shuf_mask;  in crc32_pclmul_batch()  local
82 x1 = _mm_loadu_si128((__m128i *)(p + 0x00)); in crc32_pclmul_batch()
84 x1 = _mm_shuffle_epi8(x1, shuf_mask); /* endianness swap */ in crc32_pclmul_batch()
85 x0 = _mm_xor_si128(x1, x0); in crc32_pclmul_batch()
92 x1 = _mm_loadu_si128((__m128i *)(p + 0x00)); in crc32_pclmul_batch()
93 x1 = _mm_shuffle_epi8(x1, shuf_mask); /* endianness swap */ in crc32_pclmul_batch()
106 x5 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_batch()
110 x1 = _mm_clmulepi64_si128(x1, k, 0x11); in crc32_pclmul_batch()
122 x1 = _mm_xor_si128(x1, x5); in crc32_pclmul_batch()
126 x1 = _mm_xor_si128(x1, x9); in crc32_pclmul_batch()
138 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_batch()
156 x1 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
159 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_batch()
166 x1 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
169 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_batch()
171 x1 = _mm_clmulepi64_si128(x0, k, 0x01); in crc32_pclmul_batch()
172 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_batch()
176 x1 = _mm_move_epi64(x0); in crc32_pclmul_batch()
177 x1 = _mm_srli_si128(x1, 4); in crc32_pclmul_batch()
178 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_batch()
179 x1 = _mm_srli_si128(x1, 4); in crc32_pclmul_batch()
180 x1 = _mm_clmulepi64_si128(x1, k, 0x10); in crc32_pclmul_batch()
181 x0 = _mm_xor_si128(x1, x0); in crc32_pclmul_batch()
191 __m128i x0, x1, x2, k; in crc32_pclmul_reflected_batch() local
204 x1 = _mm_loadu_si128((__m128i *)(p + 0x00)); in crc32_pclmul_reflected_batch()
215 x5 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
219 x1 = _mm_clmulepi64_si128(x1, k, 0x11); in crc32_pclmul_reflected_batch()
227 x1 = _mm_xor_si128(x1, x5); in crc32_pclmul_reflected_batch()
231 x1 = _mm_xor_si128(x1, x9); in crc32_pclmul_reflected_batch()
243 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_reflected_batch()
260 x1 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
263 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_reflected_batch()
269 x1 = _mm_clmulepi64_si128(x0, k, 0x10); in crc32_pclmul_reflected_batch()
271 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_reflected_batch()
273 x1 = _mm_shuffle_epi32(x0, 0xfc); in crc32_pclmul_reflected_batch()
276 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
277 x0 = _mm_xor_si128(x0, x1); in crc32_pclmul_reflected_batch()
280 x1 = _mm_shuffle_epi32(x0, 0xf3); in crc32_pclmul_reflected_batch()
283 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
284 x1 = _mm_clmulepi64_si128(x1, k, 0x10); in crc32_pclmul_reflected_batch()
285 x0 = _mm_xor_si128(x1, x0); in crc32_pclmul_reflected_batch()