Lines Matching refs:k

74 	__m128i x0, x1, x2, k, shuf_mask;  in crc32_pclmul_batch()  local
101 k = _mm_loadu_si128((__m128i *)consts->k1k2); in crc32_pclmul_batch()
105 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
106 x5 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_batch()
107 x6 = _mm_clmulepi64_si128(x2, k, 0x00); in crc32_pclmul_batch()
108 x7 = _mm_clmulepi64_si128(x3, k, 0x00); in crc32_pclmul_batch()
109 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
110 x1 = _mm_clmulepi64_si128(x1, k, 0x11); in crc32_pclmul_batch()
111 x2 = _mm_clmulepi64_si128(x2, k, 0x11); in crc32_pclmul_batch()
112 x3 = _mm_clmulepi64_si128(x3, k, 0x11); in crc32_pclmul_batch()
134 k = _mm_loadu_si128((__m128i *)consts->k3k4); in crc32_pclmul_batch()
136 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
137 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
140 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
141 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
144 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
145 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
150 k = _mm_loadu_si128((__m128i *)consts->k3k4); in crc32_pclmul_batch()
156 x1 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_batch()
157 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
165 k = _mm_loadu_si128((__m128i*)consts->k5k6); in crc32_pclmul_batch()
166 x1 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_batch()
171 x1 = _mm_clmulepi64_si128(x0, k, 0x01); in crc32_pclmul_batch()
175 k = _mm_loadu_si128((__m128i*)consts->uPx); in crc32_pclmul_batch()
178 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_batch()
180 x1 = _mm_clmulepi64_si128(x1, k, 0x10); in crc32_pclmul_batch()
191 __m128i x0, x1, x2, k; in crc32_pclmul_reflected_batch() local
210 k = _mm_loadu_si128((__m128i *)consts->k1k2); in crc32_pclmul_reflected_batch()
214 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
215 x5 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
216 x6 = _mm_clmulepi64_si128(x2, k, 0x00); in crc32_pclmul_reflected_batch()
217 x7 = _mm_clmulepi64_si128(x3, k, 0x00); in crc32_pclmul_reflected_batch()
218 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_reflected_batch()
219 x1 = _mm_clmulepi64_si128(x1, k, 0x11); in crc32_pclmul_reflected_batch()
220 x2 = _mm_clmulepi64_si128(x2, k, 0x11); in crc32_pclmul_reflected_batch()
221 x3 = _mm_clmulepi64_si128(x3, k, 0x11); in crc32_pclmul_reflected_batch()
239 k = _mm_loadu_si128((__m128i *)consts->k3k4); in crc32_pclmul_reflected_batch()
241 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
242 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_reflected_batch()
245 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
246 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_reflected_batch()
249 x4 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
250 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_reflected_batch()
255 k = _mm_loadu_si128((__m128i *)consts->k3k4); in crc32_pclmul_reflected_batch()
260 x1 = _mm_clmulepi64_si128(x0, k, 0x00); in crc32_pclmul_reflected_batch()
261 x0 = _mm_clmulepi64_si128(x0, k, 0x11); in crc32_pclmul_reflected_batch()
269 x1 = _mm_clmulepi64_si128(x0, k, 0x10); in crc32_pclmul_reflected_batch()
275 k = _mm_loadu_si128((__m128i*)consts->k5k6); in crc32_pclmul_reflected_batch()
276 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
282 k = _mm_loadu_si128((__m128i*)consts->uPx); in crc32_pclmul_reflected_batch()
283 x1 = _mm_clmulepi64_si128(x1, k, 0x00); in crc32_pclmul_reflected_batch()
284 x1 = _mm_clmulepi64_si128(x1, k, 0x10); in crc32_pclmul_reflected_batch()