Lines Matching refs:x
17 my ($t0,$t1,$t2,$t3,$t4,$t5,$t6,$t7,$t8,$x)=map("\$r$_",(12..21));
105 my @x = ($t0, $t1, $t2, $t3, $t4, $t5, $t6, $t7,
115 add.w @x[$a0],@x[$a0],@x[$b0]
116 xor @x[$d0],@x[$d0],@x[$a0]
117 rotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
118 add.w @x[$a1],@x[$a1],@x[$b1]
119 xor @x[$d1],@x[$d1],@x[$a1]
120 rotri.w @x[$d1],@x[$d1],16
122 add.w @x[$c0],@x[$c0],@x[$d0]
123 xor @x[$b0],@x[$b0],@x[$c0]
124 rotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
125 add.w @x[$c1],@x[$c1],@x[$d1]
126 xor @x[$b1],@x[$b1],@x[$c1]
127 rotri.w @x[$b1],@x[$b1],20
129 add.w @x[$a0],@x[$a0],@x[$b0]
130 xor @x[$d0],@x[$d0],@x[$a0]
131 rotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
132 add.w @x[$a1],@x[$a1],@x[$b1]
133 xor @x[$d1],@x[$d1],@x[$a1]
134 rotri.w @x[$d1],@x[$d1],24
136 add.w @x[$c0],@x[$c0],@x[$d0]
137 xor @x[$b0],@x[$b0],@x[$c0]
138 rotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
139 add.w @x[$c1],@x[$c1],@x[$d1]
140 xor @x[$b1],@x[$b1],@x[$c1]
141 rotri.w @x[$b1],@x[$b1],25
143 add.w @x[$a2],@x[$a2],@x[$b2]
144 xor @x[$d2],@x[$d2],@x[$a2]
145 rotri.w @x[$d2],@x[$d2],16
146 add.w @x[$a3],@x[$a3],@x[$b3]
147 xor @x[$d3],@x[$d3],@x[$a3]
148 rotri.w @x[$d3],@x[$d3],16
150 add.w @x[$c2],@x[$c2],@x[$d2]
151 xor @x[$b2],@x[$b2],@x[$c2]
152 rotri.w @x[$b2],@x[$b2],20
153 add.w @x[$c3],@x[$c3],@x[$d3]
154 xor @x[$b3],@x[$b3],@x[$c3]
155 rotri.w @x[$b3],@x[$b3],20
157 add.w @x[$a2],@x[$a2],@x[$b2]
158 xor @x[$d2],@x[$d2],@x[$a2]
159 rotri.w @x[$d2],@x[$d2],24
160 add.w @x[$a3],@x[$a3],@x[$b3]
161 xor @x[$d3],@x[$d3],@x[$a3]
162 rotri.w @x[$d3],@x[$d3],24
164 add.w @x[$c2],@x[$c2],@x[$d2]
165 xor @x[$b2],@x[$b2],@x[$c2]
166 rotri.w @x[$b2],@x[$b2],25
167 add.w @x[$c3],@x[$c3],@x[$d3]
168 xor @x[$b3],@x[$b3],@x[$c3]
169 rotri.w @x[$b3],@x[$b3],25
196 ld.w @x[0],$t8,0 # 'expa'
197 ld.w @x[1],$t8,4 # 'nd 3'
198 ld.w @x[2],$t8,8 # '2-by'
199 ld.w @x[3],$t8,12 # 'te k'
202 ld.w @x[4],$key,4*0
203 ld.w @x[5],$key,4*1
204 ld.w @x[6],$key,4*2
205 ld.w @x[7],$key,4*3
206 ld.w @x[8],$key,4*4
207 ld.w @x[9],$key,4*5
208 ld.w @x[10],$key,4*6
209 ld.w @x[11],$key,4*7
212 move @x[12],$s8
215 ld.w @x[13],$counter,4*1
216 ld.w @x[14],$counter,4*2
217 ld.w @x[15],$counter,4*3
239 add.w @x[0],@x[0],$a7
240 add.w @x[1],@x[1],$a6
241 add.w @x[2],@x[2],$a5
243 add.w @x[3],@x[3],$a7
249 add.w @x[4],@x[4],$t8
250 add.w @x[5],@x[5],$a7
251 add.w @x[6],@x[6],$a6
252 add.w @x[7],@x[7],$a5
258 add.w @x[8],@x[8],$t8
259 add.w @x[9],@x[9],$a7
260 add.w @x[10],@x[10],$a6
261 add.w @x[11],@x[11],$a5
263 add.w @x[12],@x[12],$s8
268 add.w @x[13],@x[13],$t8
269 add.w @x[14],@x[14],$a7
270 add.w @x[15],@x[15],$a6
280 xor $t8,$t8,@x[0]
281 xor $a7,$a7,@x[1]
282 xor $a6,$a6,@x[2]
283 xor $a5,$a5,@x[3]
293 xor $t8,$t8,@x[4]
294 xor $a7,$a7,@x[5]
295 xor $a6,$a6,@x[6]
296 xor $a5,$a5,@x[7]
306 xor $t8,$t8,@x[8]
307 xor $a7,$a7,@x[9]
308 xor $a6,$a6,@x[10]
309 xor $a5,$a5,@x[11]
319 xor $t8,$t8,@x[12]
320 xor $a7,$a7,@x[13]
321 xor $a6,$a6,@x[14]
322 xor $a5,$a5,@x[15]
339 st.w @x[0],$a7,4*0
340 st.w @x[1],$a7,4*1
341 st.w @x[2],$a7,4*2
342 st.w @x[3],$a7,4*3
343 st.w @x[4],$a7,4*4
344 st.w @x[5],$a7,4*5
345 st.w @x[6],$a7,4*6
346 st.w @x[7],$a7,4*7
347 st.w @x[8],$a7,4*8
348 st.w @x[9],$a7,4*9
349 st.w @x[10],$a7,4*10
350 st.w @x[11],$a7,4*11
351 st.w @x[12],$a7,4*12
352 st.w @x[13],$a7,4*13
353 st.w @x[14],$a7,4*14
354 st.w @x[15],$a7,4*15
390 my @x = ($vr0, $vr1, $vr2, $vr3, $vr4, $vr5, $vr6, $vr7,
404 vadd.w @x[$a0],@x[$a0],@x[$b0]
405 vxor.v @x[$d0],@x[$d0],@x[$a0]
406 vrotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
407 vadd.w @x[$a1],@x[$a1],@x[$b1]
408 vxor.v @x[$d1],@x[$d1],@x[$a1]
409 vrotri.w @x[$d1],@x[$d1],16
411 vadd.w @x[$c0],@x[$c0],@x[$d0]
412 vxor.v @x[$b0],@x[$b0],@x[$c0]
413 vrotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
414 vadd.w @x[$c1],@x[$c1],@x[$d1]
415 vxor.v @x[$b1],@x[$b1],@x[$c1]
416 vrotri.w @x[$b1],@x[$b1],20
418 vadd.w @x[$a0],@x[$a0],@x[$b0]
419 vxor.v @x[$d0],@x[$d0],@x[$a0]
420 vrotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
421 vadd.w @x[$a1],@x[$a1],@x[$b1]
422 vxor.v @x[$d1],@x[$d1],@x[$a1]
423 vrotri.w @x[$d1],@x[$d1],24
425 vadd.w @x[$c0],@x[$c0],@x[$d0]
426 vxor.v @x[$b0],@x[$b0],@x[$c0]
427 vrotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
428 vadd.w @x[$c1],@x[$c1],@x[$d1]
429 vxor.v @x[$b1],@x[$b1],@x[$c1]
430 vrotri.w @x[$b1],@x[$b1],25
432 vadd.w @x[$a2],@x[$a2],@x[$b2]
433 vxor.v @x[$d2],@x[$d2],@x[$a2]
434 vrotri.w @x[$d2],@x[$d2],16
435 vadd.w @x[$a3],@x[$a3],@x[$b3]
436 vxor.v @x[$d3],@x[$d3],@x[$a3]
437 vrotri.w @x[$d3],@x[$d3],16
439 vadd.w @x[$c2],@x[$c2],@x[$d2]
440 vxor.v @x[$b2],@x[$b2],@x[$c2]
441 vrotri.w @x[$b2],@x[$b2],20
442 vadd.w @x[$c3],@x[$c3],@x[$d3]
443 vxor.v @x[$b3],@x[$b3],@x[$c3]
444 vrotri.w @x[$b3],@x[$b3],20
446 vadd.w @x[$a2],@x[$a2],@x[$b2]
447 vxor.v @x[$d2],@x[$d2],@x[$a2]
448 vrotri.w @x[$d2],@x[$d2],24
449 vadd.w @x[$a3],@x[$a3],@x[$b3]
450 vxor.v @x[$d3],@x[$d3],@x[$a3]
451 vrotri.w @x[$d3],@x[$d3],24
453 vadd.w @x[$c2],@x[$c2],@x[$d2]
454 vxor.v @x[$b2],@x[$b2],@x[$c2]
455 vrotri.w @x[$b2],@x[$b2],25
456 vadd.w @x[$c3],@x[$c3],@x[$d3]
457 vxor.v @x[$b3],@x[$b3],@x[$c3]
458 vrotri.w @x[$b3],@x[$b3],25
476 vldrepl.w @x[0],$t8,4*0 # 'expa'
477 vldrepl.w @x[1],$t8,4*1 # 'nd 3'
478 vldrepl.w @x[2],$t8,4*2 # '2-by'
479 vldrepl.w @x[3],$t8,4*3 # 'te k'
482 vldrepl.w @x[4],$key,4*0
483 vldrepl.w @x[5],$key,4*1
484 vldrepl.w @x[6],$key,4*2
485 vldrepl.w @x[7],$key,4*3
486 vldrepl.w @x[8],$key,4*4
487 vldrepl.w @x[9],$key,4*5
488 vldrepl.w @x[10],$key,4*6
489 vldrepl.w @x[11],$key,4*7
492 vreplgr2vr.w @x[12],$t4
495 vldrepl.w @x[13],$counter,4*1
496 vldrepl.w @x[14],$counter,4*2
497 vldrepl.w @x[15],$counter,4*3
502 vadd.w @x[12],@x[12],@y[0]
505 vori.b @y[0],@x[0],0
506 vori.b @y[1],@x[1],0
507 vori.b @y[2],@x[2],0
508 vori.b @y[3],@x[3],0
509 vori.b @y[4],@x[4],0
510 vori.b @y[5],@x[5],0
511 vori.b @y[6],@x[6],0
512 vori.b @y[7],@x[7],0
513 vori.b @y[8],@x[8],0
514 vori.b @y[9],@x[9],0
515 vori.b @y[10],@x[10],0
516 vori.b @y[11],@x[11],0
517 vori.b @y[12],@x[12],0
518 vori.b @y[13],@x[13],0
519 vori.b @y[14],@x[14],0
520 vori.b @y[15],@x[15],0
538 vadd.w @x[0],@x[0],@y[0]
539 vadd.w @x[1],@x[1],@y[1]
540 vadd.w @x[2],@x[2],@y[2]
541 vadd.w @x[3],@x[3],@y[3]
542 vadd.w @x[4],@x[4],@y[4]
543 vadd.w @x[5],@x[5],@y[5]
544 vadd.w @x[6],@x[6],@y[6]
545 vadd.w @x[7],@x[7],@y[7]
546 vadd.w @x[8],@x[8],@y[8]
547 vadd.w @x[9],@x[9],@y[9]
548 vadd.w @x[10],@x[10],@y[10]
549 vadd.w @x[11],@x[11],@y[11]
550 vadd.w @x[12],@x[12],@y[12]
551 vadd.w @x[13],@x[13],@y[13]
552 vadd.w @x[14],@x[14],@y[14]
553 vadd.w @x[15],@x[15],@y[15]
556 vilvl.w @y[0],@x[1],@x[0]
557 vilvh.w @y[1],@x[1],@x[0]
558 vilvl.w @y[2],@x[3],@x[2]
559 vilvh.w @y[3],@x[3],@x[2]
560 vilvl.w @y[4],@x[5],@x[4]
561 vilvh.w @y[5],@x[5],@x[4]
562 vilvl.w @y[6],@x[7],@x[6]
563 vilvh.w @y[7],@x[7],@x[6]
564 vilvl.w @y[8],@x[9],@x[8]
565 vilvh.w @y[9],@x[9],@x[8]
566 vilvl.w @y[10],@x[11],@x[10]
567 vilvh.w @y[11],@x[11],@x[10]
568 vilvl.w @y[12],@x[13],@x[12]
569 vilvh.w @y[13],@x[13],@x[12]
570 vilvl.w @y[14],@x[15],@x[14]
571 vilvh.w @y[15],@x[15],@x[14]
573 vilvl.d @x[0],@y[2],@y[0]
574 vilvh.d @x[1],@y[2],@y[0]
575 vilvl.d @x[2],@y[3],@y[1]
576 vilvh.d @x[3],@y[3],@y[1]
577 vilvl.d @x[4],@y[6],@y[4]
578 vilvh.d @x[5],@y[6],@y[4]
579 vilvl.d @x[6],@y[7],@y[5]
580 vilvh.d @x[7],@y[7],@y[5]
581 vilvl.d @x[8],@y[10],@y[8]
582 vilvh.d @x[9],@y[10],@y[8]
583 vilvl.d @x[10],@y[11],@y[9]
584 vilvh.d @x[11],@y[11],@y[9]
585 vilvl.d @x[12],@y[14],@y[12]
586 vilvh.d @x[13],@y[14],@y[12]
587 vilvl.d @x[14],@y[15],@y[13]
588 vilvh.d @x[15],@y[15],@y[13]
592 @x = (@x[0],@x[4],@x[8],@x[12],@x[1],@x[5],@x[9],@x[13],
593 @x[2],@x[6],@x[10],@x[14],@x[3],@x[7],@x[11],@x[15]);
604 vxor.v @y[0],@y[0],@x[0]
605 vxor.v @y[1],@y[1],@x[1]
606 vxor.v @y[2],@y[2],@x[2]
607 vxor.v @y[3],@y[3],@x[3]
617 vxor.v @y[0],@y[0],@x[4]
618 vxor.v @y[1],@y[1],@x[5]
619 vxor.v @y[2],@y[2],@x[6]
620 vxor.v @y[3],@y[3],@x[7]
630 vxor.v @y[0],@y[0],@x[8]
631 vxor.v @y[1],@y[1],@x[9]
632 vxor.v @y[2],@y[2],@x[10]
633 vxor.v @y[3],@y[3],@x[11]
643 vxor.v @y[0],@y[0],@x[12]
644 vxor.v @y[1],@y[1],@x[13]
645 vxor.v @y[2],@y[2],@x[14]
646 vxor.v @y[3],@y[3],@x[15]
668 vst @x[0],$sp,16*0
669 vst @x[1],$sp,16*1
670 vst @x[2],$sp,16*2
671 vst @x[3],$sp,16*3
681 vxor.v @y[0],@y[0],@x[0]
682 vxor.v @y[1],@y[1],@x[1]
683 vxor.v @y[2],@y[2],@x[2]
684 vxor.v @y[3],@y[3],@x[3]
694 vst @x[4],$sp,16*0
695 vst @x[5],$sp,16*1
696 vst @x[6],$sp,16*2
697 vst @x[7],$sp,16*3
707 vxor.v @y[0],@y[0],@x[0]
708 vxor.v @y[1],@y[1],@x[1]
709 vxor.v @y[2],@y[2],@x[2]
710 vxor.v @y[3],@y[3],@x[3]
720 vxor.v @y[0],@y[0],@x[4]
721 vxor.v @y[1],@y[1],@x[5]
722 vxor.v @y[2],@y[2],@x[6]
723 vxor.v @y[3],@y[3],@x[7]
733 vst @x[8],$sp,16*0
734 vst @x[9],$sp,16*1
735 vst @x[10],$sp,16*2
736 vst @x[11],$sp,16*3
746 vxor.v @y[0],@y[0],@x[0]
747 vxor.v @y[1],@y[1],@x[1]
748 vxor.v @y[2],@y[2],@x[2]
749 vxor.v @y[3],@y[3],@x[3]
759 vxor.v @y[0],@y[0],@x[4]
760 vxor.v @y[1],@y[1],@x[5]
761 vxor.v @y[2],@y[2],@x[6]
762 vxor.v @y[3],@y[3],@x[7]
772 vxor.v @y[0],@y[0],@x[8]
773 vxor.v @y[1],@y[1],@x[9]
774 vxor.v @y[2],@y[2],@x[10]
775 vxor.v @y[3],@y[3],@x[11]
785 vst @x[12],$sp,16*0
786 vst @x[13],$sp,16*1
787 vst @x[14],$sp,16*2
788 vst @x[15],$sp,16*3
814 my @x = ($xr0, $xr1, $xr2, $xr3, $xr4, $xr5, $xr6, $xr7,
828 xvadd.w @x[$a0],@x[$a0],@x[$b0]
829 xvxor.v @x[$d0],@x[$d0],@x[$a0]
830 xvrotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
831 xvadd.w @x[$a1],@x[$a1],@x[$b1]
832 xvxor.v @x[$d1],@x[$d1],@x[$a1]
833 xvrotri.w @x[$d1],@x[$d1],16
835 xvadd.w @x[$c0],@x[$c0],@x[$d0]
836 xvxor.v @x[$b0],@x[$b0],@x[$c0]
837 xvrotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
838 xvadd.w @x[$c1],@x[$c1],@x[$d1]
839 xvxor.v @x[$b1],@x[$b1],@x[$c1]
840 xvrotri.w @x[$b1],@x[$b1],20
842 xvadd.w @x[$a0],@x[$a0],@x[$b0]
843 xvxor.v @x[$d0],@x[$d0],@x[$a0]
844 xvrotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
845 xvadd.w @x[$a1],@x[$a1],@x[$b1]
846 xvxor.v @x[$d1],@x[$d1],@x[$a1]
847 xvrotri.w @x[$d1],@x[$d1],24
849 xvadd.w @x[$c0],@x[$c0],@x[$d0]
850 xvxor.v @x[$b0],@x[$b0],@x[$c0]
851 xvrotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
852 xvadd.w @x[$c1],@x[$c1],@x[$d1]
853 xvxor.v @x[$b1],@x[$b1],@x[$c1]
854 xvrotri.w @x[$b1],@x[$b1],25
856 xvadd.w @x[$a2],@x[$a2],@x[$b2]
857 xvxor.v @x[$d2],@x[$d2],@x[$a2]
858 xvrotri.w @x[$d2],@x[$d2],16
859 xvadd.w @x[$a3],@x[$a3],@x[$b3]
860 xvxor.v @x[$d3],@x[$d3],@x[$a3]
861 xvrotri.w @x[$d3],@x[$d3],16
863 xvadd.w @x[$c2],@x[$c2],@x[$d2]
864 xvxor.v @x[$b2],@x[$b2],@x[$c2]
865 xvrotri.w @x[$b2],@x[$b2],20
866 xvadd.w @x[$c3],@x[$c3],@x[$d3]
867 xvxor.v @x[$b3],@x[$b3],@x[$c3]
868 xvrotri.w @x[$b3],@x[$b3],20
870 xvadd.w @x[$a2],@x[$a2],@x[$b2]
871 xvxor.v @x[$d2],@x[$d2],@x[$a2]
872 xvrotri.w @x[$d2],@x[$d2],24
873 xvadd.w @x[$a3],@x[$a3],@x[$b3]
874 xvxor.v @x[$d3],@x[$d3],@x[$a3]
875 xvrotri.w @x[$d3],@x[$d3],24
877 xvadd.w @x[$c2],@x[$c2],@x[$d2]
878 xvxor.v @x[$b2],@x[$b2],@x[$c2]
879 xvrotri.w @x[$b2],@x[$b2],25
880 xvadd.w @x[$c3],@x[$c3],@x[$d3]
881 xvxor.v @x[$b3],@x[$b3],@x[$c3]
882 xvrotri.w @x[$b3],@x[$b3],25
900 xvldrepl.w @x[0],$t8,4*0 # 'expa'
901 xvldrepl.w @x[1],$t8,4*1 # 'nd 3'
902 xvldrepl.w @x[2],$t8,4*2 # '2-by'
903 xvldrepl.w @x[3],$t8,4*3 # 'te k'
906 xvldrepl.w @x[4],$key,4*0
907 xvldrepl.w @x[5],$key,4*1
908 xvldrepl.w @x[6],$key,4*2
909 xvldrepl.w @x[7],$key,4*3
910 xvldrepl.w @x[8],$key,4*4
911 xvldrepl.w @x[9],$key,4*5
912 xvldrepl.w @x[10],$key,4*6
913 xvldrepl.w @x[11],$key,4*7
916 xvreplgr2vr.w @x[12],$t4
919 xvldrepl.w @x[13],$counter,4*1
920 xvldrepl.w @x[14],$counter,4*2
921 xvldrepl.w @x[15],$counter,4*3
926 xvadd.w @x[12],@x[12],@y[0]
929 xvori.b @y[0],@x[0],0
930 xvori.b @y[1],@x[1],0
931 xvori.b @y[2],@x[2],0
932 xvori.b @y[3],@x[3],0
933 xvori.b @y[4],@x[4],0
934 xvori.b @y[5],@x[5],0
935 xvori.b @y[6],@x[6],0
936 xvori.b @y[7],@x[7],0
937 xvori.b @y[8],@x[8],0
938 xvori.b @y[9],@x[9],0
939 xvori.b @y[10],@x[10],0
940 xvori.b @y[11],@x[11],0
941 xvori.b @y[12],@x[12],0
942 xvori.b @y[13],@x[13],0
943 xvori.b @y[14],@x[14],0
944 xvori.b @y[15],@x[15],0
962 xvadd.w @x[0],@x[0],@y[0]
963 xvadd.w @x[1],@x[1],@y[1]
964 xvadd.w @x[2],@x[2],@y[2]
965 xvadd.w @x[3],@x[3],@y[3]
966 xvadd.w @x[4],@x[4],@y[4]
967 xvadd.w @x[5],@x[5],@y[5]
968 xvadd.w @x[6],@x[6],@y[6]
969 xvadd.w @x[7],@x[7],@y[7]
970 xvadd.w @x[8],@x[8],@y[8]
971 xvadd.w @x[9],@x[9],@y[9]
972 xvadd.w @x[10],@x[10],@y[10]
973 xvadd.w @x[11],@x[11],@y[11]
974 xvadd.w @x[12],@x[12],@y[12]
975 xvadd.w @x[13],@x[13],@y[13]
976 xvadd.w @x[14],@x[14],@y[14]
977 xvadd.w @x[15],@x[15],@y[15]
980 xvilvl.w @y[0],@x[1],@x[0]
981 xvilvh.w @y[1],@x[1],@x[0]
982 xvilvl.w @y[2],@x[3],@x[2]
983 xvilvh.w @y[3],@x[3],@x[2]
984 xvilvl.w @y[4],@x[5],@x[4]
985 xvilvh.w @y[5],@x[5],@x[4]
986 xvilvl.w @y[6],@x[7],@x[6]
987 xvilvh.w @y[7],@x[7],@x[6]
988 xvilvl.w @y[8],@x[9],@x[8]
989 xvilvh.w @y[9],@x[9],@x[8]
990 xvilvl.w @y[10],@x[11],@x[10]
991 xvilvh.w @y[11],@x[11],@x[10]
992 xvilvl.w @y[12],@x[13],@x[12]
993 xvilvh.w @y[13],@x[13],@x[12]
994 xvilvl.w @y[14],@x[15],@x[14]
995 xvilvh.w @y[15],@x[15],@x[14]
997 xvilvl.d @x[0],@y[2],@y[0]
998 xvilvh.d @x[1],@y[2],@y[0]
999 xvilvl.d @x[2],@y[3],@y[1]
1000 xvilvh.d @x[3],@y[3],@y[1]
1001 xvilvl.d @x[4],@y[6],@y[4]
1002 xvilvh.d @x[5],@y[6],@y[4]
1003 xvilvl.d @x[6],@y[7],@y[5]
1004 xvilvh.d @x[7],@y[7],@y[5]
1005 xvilvl.d @x[8],@y[10],@y[8]
1006 xvilvh.d @x[9],@y[10],@y[8]
1007 xvilvl.d @x[10],@y[11],@y[9]
1008 xvilvh.d @x[11],@y[11],@y[9]
1009 xvilvl.d @x[12],@y[14],@y[12]
1010 xvilvh.d @x[13],@y[14],@y[12]
1011 xvilvl.d @x[14],@y[15],@y[13]
1012 xvilvh.d @x[15],@y[15],@y[13]
1014 xvori.b @y[0],@x[4],0
1015 xvpermi.q @y[0],@x[0],0x20
1016 xvori.b @y[1],@x[5],0
1017 xvpermi.q @y[1],@x[1],0x20
1018 xvori.b @y[2],@x[6],0
1019 xvpermi.q @y[2],@x[2],0x20
1020 xvori.b @y[3],@x[7],0
1021 xvpermi.q @y[3],@x[3],0x20
1022 xvori.b @y[4],@x[4],0
1023 xvpermi.q @y[4],@x[0],0x31
1024 xvori.b @y[5],@x[5],0
1025 xvpermi.q @y[5],@x[1],0x31
1026 xvori.b @y[6],@x[6],0
1027 xvpermi.q @y[6],@x[2],0x31
1028 xvori.b @y[7],@x[7],0
1029 xvpermi.q @y[7],@x[3],0x31
1030 xvori.b @y[8],@x[12],0
1031 xvpermi.q @y[8],@x[8],0x20
1032 xvori.b @y[9],@x[13],0
1033 xvpermi.q @y[9],@x[9],0x20
1034 xvori.b @y[10],@x[14],0
1035 xvpermi.q @y[10],@x[10],0x20
1036 xvori.b @y[11],@x[15],0
1037 xvpermi.q @y[11],@x[11],0x20
1038 xvori.b @y[12],@x[12],0
1039 xvpermi.q @y[12],@x[8],0x31
1040 xvori.b @y[13],@x[13],0
1041 xvpermi.q @y[13],@x[9],0x31
1042 xvori.b @y[14],@x[14],0
1043 xvpermi.q @y[14],@x[10],0x31
1044 xvori.b @y[15],@x[15],0
1045 xvpermi.q @y[15],@x[11],0x31
1058 xvld @x[0],$inp,32*0
1059 xvld @x[1],$inp,32*1
1060 xvld @x[2],$inp,32*2
1061 xvld @x[3],$inp,32*3
1062 xvxor.v @x[0],@x[0],@y[0]
1063 xvxor.v @x[1],@x[1],@y[1]
1064 xvxor.v @x[2],@x[2],@y[2]
1065 xvxor.v @x[3],@x[3],@y[3]
1066 xvst @x[0],$out,32*0
1067 xvst @x[1],$out,32*1
1068 xvst @x[2],$out,32*2
1069 xvst @x[3],$out,32*3
1071 xvld @x[0],$inp,32*4
1072 xvld @x[1],$inp,32*5
1073 xvld @x[2],$inp,32*6
1074 xvld @x[3],$inp,32*7
1075 xvxor.v @x[0],@x[0],@y[4]
1076 xvxor.v @x[1],@x[1],@y[5]
1077 xvxor.v @x[2],@x[2],@y[6]
1078 xvxor.v @x[3],@x[3],@y[7]
1079 xvst @x[0],$out,32*4
1080 xvst @x[1],$out,32*5
1081 xvst @x[2],$out,32*6
1082 xvst @x[3],$out,32*7
1084 xvld @x[0],$inp,32*8
1085 xvld @x[1],$inp,32*9
1086 xvld @x[2],$inp,32*10
1087 xvld @x[3],$inp,32*11
1088 xvxor.v @x[0],@x[0],@y[8]
1089 xvxor.v @x[1],@x[1],@y[9]
1090 xvxor.v @x[2],@x[2],@y[10]
1091 xvxor.v @x[3],@x[3],@y[11]
1092 xvst @x[0],$out,32*8
1093 xvst @x[1],$out,32*9
1094 xvst @x[2],$out,32*10
1095 xvst @x[3],$out,32*11
1097 xvld @x[0],$inp,32*12
1098 xvld @x[1],$inp,32*13
1099 xvld @x[2],$inp,32*14
1100 xvld @x[3],$inp,32*15
1101 xvxor.v @x[0],@x[0],@y[12]
1102 xvxor.v @x[1],@x[1],@y[13]
1103 xvxor.v @x[2],@x[2],@y[14]
1104 xvxor.v @x[3],@x[3],@y[15]
1105 xvst @x[0],$out,32*12
1106 xvst @x[1],$out,32*13
1107 xvst @x[2],$out,32*14
1108 xvst @x[3],$out,32*15
1141 xvld @x[0],$inp,32*0
1142 xvld @x[1],$inp,32*1
1143 xvxor.v @x[0],@x[0],@y[0]
1144 xvxor.v @x[1],@x[1],@y[1]
1145 xvst @x[0],$out,32*0
1146 xvst @x[1],$out,32*1
1159 xvld @x[0],$inp,32*0
1160 xvld @x[1],$inp,32*1
1161 xvld @x[2],$inp,32*2
1162 xvld @x[3],$inp,32*3
1163 xvxor.v @x[0],@x[0],@y[0]
1164 xvxor.v @x[1],@x[1],@y[1]
1165 xvxor.v @x[2],@x[2],@y[2]
1166 xvxor.v @x[3],@x[3],@y[3]
1167 xvst @x[0],$out,32*0
1168 xvst @x[1],$out,32*1
1169 xvst @x[2],$out,32*2
1170 xvst @x[3],$out,32*3
1183 xvld @x[0],$inp,32*0
1184 xvld @x[1],$inp,32*1
1185 xvld @x[2],$inp,32*2
1186 xvld @x[3],$inp,32*3
1187 xvxor.v @x[0],@x[0],@y[0]
1188 xvxor.v @x[1],@x[1],@y[1]
1189 xvxor.v @x[2],@x[2],@y[2]
1190 xvxor.v @x[3],@x[3],@y[3]
1191 xvst @x[0],$out,32*0
1192 xvst @x[1],$out,32*1
1193 xvst @x[2],$out,32*2
1194 xvst @x[3],$out,32*3
1196 xvld @x[0],$inp,32*4
1197 xvld @x[1],$inp,32*5
1198 xvxor.v @x[0],@x[0],@y[4]
1199 xvxor.v @x[1],@x[1],@y[5]
1200 xvst @x[0],$out,32*4
1201 xvst @x[1],$out,32*5
1214 xvld @x[0],$inp,32*0
1215 xvld @x[1],$inp,32*1
1216 xvld @x[2],$inp,32*2
1217 xvld @x[3],$inp,32*3
1218 xvxor.v @x[0],@x[0],@y[0]
1219 xvxor.v @x[1],@x[1],@y[1]
1220 xvxor.v @x[2],@x[2],@y[2]
1221 xvxor.v @x[3],@x[3],@y[3]
1222 xvst @x[0],$out,32*0
1223 xvst @x[1],$out,32*1
1224 xvst @x[2],$out,32*2
1225 xvst @x[3],$out,32*3
1227 xvld @x[0],$inp,32*4
1228 xvld @x[1],$inp,32*5
1229 xvld @x[2],$inp,32*6
1230 xvld @x[3],$inp,32*7
1231 xvxor.v @x[0],@x[0],@y[4]
1232 xvxor.v @x[1],@x[1],@y[5]
1233 xvxor.v @x[2],@x[2],@y[6]
1234 xvxor.v @x[3],@x[3],@y[7]
1235 xvst @x[0],$out,32*4
1236 xvst @x[1],$out,32*5
1237 xvst @x[2],$out,32*6
1238 xvst @x[3],$out,32*7
1251 xvld @x[0],$inp,32*0
1252 xvld @x[1],$inp,32*1
1253 xvld @x[2],$inp,32*2
1254 xvld @x[3],$inp,32*3
1255 xvxor.v @x[0],@x[0],@y[0]
1256 xvxor.v @x[1],@x[1],@y[1]
1257 xvxor.v @x[2],@x[2],@y[2]
1258 xvxor.v @x[3],@x[3],@y[3]
1259 xvst @x[0],$out,32*0
1260 xvst @x[1],$out,32*1
1261 xvst @x[2],$out,32*2
1262 xvst @x[3],$out,32*3
1264 xvld @x[0],$inp,32*4
1265 xvld @x[1],$inp,32*5
1266 xvld @x[2],$inp,32*6
1267 xvld @x[3],$inp,32*7
1268 xvxor.v @x[0],@x[0],@y[4]
1269 xvxor.v @x[1],@x[1],@y[5]
1270 xvxor.v @x[2],@x[2],@y[6]
1271 xvxor.v @x[3],@x[3],@y[7]
1272 xvst @x[0],$out,32*4
1273 xvst @x[1],$out,32*5
1274 xvst @x[2],$out,32*6
1275 xvst @x[3],$out,32*7
1277 xvld @x[0],$inp,32*8
1278 xvld @x[1],$inp,32*9
1279 xvxor.v @x[0],@x[0],@y[8]
1280 xvxor.v @x[1],@x[1],@y[9]
1281 xvst @x[0],$out,32*8
1282 xvst @x[1],$out,32*9
1295 xvld @x[0],$inp,32*0
1296 xvld @x[1],$inp,32*1
1297 xvld @x[2],$inp,32*2
1298 xvld @x[3],$inp,32*3
1299 xvxor.v @x[0],@x[0],@y[0]
1300 xvxor.v @x[1],@x[1],@y[1]
1301 xvxor.v @x[2],@x[2],@y[2]
1302 xvxor.v @x[3],@x[3],@y[3]
1303 xvst @x[0],$out,32*0
1304 xvst @x[1],$out,32*1
1305 xvst @x[2],$out,32*2
1306 xvst @x[3],$out,32*3
1308 xvld @x[0],$inp,32*4
1309 xvld @x[1],$inp,32*5
1310 xvld @x[2],$inp,32*6
1311 xvld @x[3],$inp,32*7
1312 xvxor.v @x[0],@x[0],@y[4]
1313 xvxor.v @x[1],@x[1],@y[5]
1314 xvxor.v @x[2],@x[2],@y[6]
1315 xvxor.v @x[3],@x[3],@y[7]
1316 xvst @x[0],$out,32*4
1317 xvst @x[1],$out,32*5
1318 xvst @x[2],$out,32*6
1319 xvst @x[3],$out,32*7
1321 xvld @x[0],$inp,32*8
1322 xvld @x[1],$inp,32*9
1323 xvld @x[2],$inp,32*10
1324 xvld @x[3],$inp,32*11
1325 xvxor.v @x[0],@x[0],@y[8]
1326 xvxor.v @x[1],@x[1],@y[9]
1327 xvxor.v @x[2],@x[2],@y[10]
1328 xvxor.v @x[3],@x[3],@y[11]
1329 xvst @x[0],$out,32*8
1330 xvst @x[1],$out,32*9
1331 xvst @x[2],$out,32*10
1332 xvst @x[3],$out,32*11
1345 xvld @x[0],$inp,32*0
1346 xvld @x[1],$inp,32*1
1347 xvld @x[2],$inp,32*2
1348 xvld @x[3],$inp,32*3
1349 xvxor.v @x[0],@x[0],@y[0]
1350 xvxor.v @x[1],@x[1],@y[1]
1351 xvxor.v @x[2],@x[2],@y[2]
1352 xvxor.v @x[3],@x[3],@y[3]
1353 xvst @x[0],$out,32*0
1354 xvst @x[1],$out,32*1
1355 xvst @x[2],$out,32*2
1356 xvst @x[3],$out,32*3
1358 xvld @x[0],$inp,32*4
1359 xvld @x[1],$inp,32*5
1360 xvld @x[2],$inp,32*6
1361 xvld @x[3],$inp,32*7
1362 xvxor.v @x[0],@x[0],@y[4]
1363 xvxor.v @x[1],@x[1],@y[5]
1364 xvxor.v @x[2],@x[2],@y[6]
1365 xvxor.v @x[3],@x[3],@y[7]
1366 xvst @x[0],$out,32*4
1367 xvst @x[1],$out,32*5
1368 xvst @x[2],$out,32*6
1369 xvst @x[3],$out,32*7
1371 xvld @x[0],$inp,32*8
1372 xvld @x[1],$inp,32*9
1373 xvld @x[2],$inp,32*10
1374 xvld @x[3],$inp,32*11
1375 xvxor.v @x[0],@x[0],@y[8]
1376 xvxor.v @x[1],@x[1],@y[9]
1377 xvxor.v @x[2],@x[2],@y[10]
1378 xvxor.v @x[3],@x[3],@y[11]
1379 xvst @x[0],$out,32*8
1380 xvst @x[1],$out,32*9
1381 xvst @x[2],$out,32*10
1382 xvst @x[3],$out,32*11
1384 xvld @x[0],$inp,32*12
1385 xvld @x[1],$inp,32*13
1386 xvxor.v @x[0],@x[0],@y[12]
1387 xvxor.v @x[1],@x[1],@y[13]
1388 xvst @x[0],$out,32*12
1389 xvst @x[1],$out,32*13