Lines Matching refs:x
17 my ($t0,$t1,$t2,$t3,$t4,$t5,$t6,$t7,$t8,$x)=map("\$r$_",(12..21));
104 my @x = ($t0, $t1, $t2, $t3, $t4, $t5, $t6, $t7,
114 add.w @x[$a0],@x[$a0],@x[$b0]
115 xor @x[$d0],@x[$d0],@x[$a0]
116 rotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
117 add.w @x[$a1],@x[$a1],@x[$b1]
118 xor @x[$d1],@x[$d1],@x[$a1]
119 rotri.w @x[$d1],@x[$d1],16
121 add.w @x[$c0],@x[$c0],@x[$d0]
122 xor @x[$b0],@x[$b0],@x[$c0]
123 rotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
124 add.w @x[$c1],@x[$c1],@x[$d1]
125 xor @x[$b1],@x[$b1],@x[$c1]
126 rotri.w @x[$b1],@x[$b1],20
128 add.w @x[$a0],@x[$a0],@x[$b0]
129 xor @x[$d0],@x[$d0],@x[$a0]
130 rotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
131 add.w @x[$a1],@x[$a1],@x[$b1]
132 xor @x[$d1],@x[$d1],@x[$a1]
133 rotri.w @x[$d1],@x[$d1],24
135 add.w @x[$c0],@x[$c0],@x[$d0]
136 xor @x[$b0],@x[$b0],@x[$c0]
137 rotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
138 add.w @x[$c1],@x[$c1],@x[$d1]
139 xor @x[$b1],@x[$b1],@x[$c1]
140 rotri.w @x[$b1],@x[$b1],25
142 add.w @x[$a2],@x[$a2],@x[$b2]
143 xor @x[$d2],@x[$d2],@x[$a2]
144 rotri.w @x[$d2],@x[$d2],16
145 add.w @x[$a3],@x[$a3],@x[$b3]
146 xor @x[$d3],@x[$d3],@x[$a3]
147 rotri.w @x[$d3],@x[$d3],16
149 add.w @x[$c2],@x[$c2],@x[$d2]
150 xor @x[$b2],@x[$b2],@x[$c2]
151 rotri.w @x[$b2],@x[$b2],20
152 add.w @x[$c3],@x[$c3],@x[$d3]
153 xor @x[$b3],@x[$b3],@x[$c3]
154 rotri.w @x[$b3],@x[$b3],20
156 add.w @x[$a2],@x[$a2],@x[$b2]
157 xor @x[$d2],@x[$d2],@x[$a2]
158 rotri.w @x[$d2],@x[$d2],24
159 add.w @x[$a3],@x[$a3],@x[$b3]
160 xor @x[$d3],@x[$d3],@x[$a3]
161 rotri.w @x[$d3],@x[$d3],24
163 add.w @x[$c2],@x[$c2],@x[$d2]
164 xor @x[$b2],@x[$b2],@x[$c2]
165 rotri.w @x[$b2],@x[$b2],25
166 add.w @x[$c3],@x[$c3],@x[$d3]
167 xor @x[$b3],@x[$b3],@x[$c3]
168 rotri.w @x[$b3],@x[$b3],25
195 ld.w @x[0],$t8,0 # 'expa'
196 ld.w @x[1],$t8,4 # 'nd 3'
197 ld.w @x[2],$t8,8 # '2-by'
198 ld.w @x[3],$t8,12 # 'te k'
201 ld.w @x[4],$key,4*0
202 ld.w @x[5],$key,4*1
203 ld.w @x[6],$key,4*2
204 ld.w @x[7],$key,4*3
205 ld.w @x[8],$key,4*4
206 ld.w @x[9],$key,4*5
207 ld.w @x[10],$key,4*6
208 ld.w @x[11],$key,4*7
211 move @x[12],$s8
214 ld.w @x[13],$counter,4*1
215 ld.w @x[14],$counter,4*2
216 ld.w @x[15],$counter,4*3
238 add.w @x[0],@x[0],$a7
239 add.w @x[1],@x[1],$a6
240 add.w @x[2],@x[2],$a5
242 add.w @x[3],@x[3],$a7
248 add.w @x[4],@x[4],$t8
249 add.w @x[5],@x[5],$a7
250 add.w @x[6],@x[6],$a6
251 add.w @x[7],@x[7],$a5
257 add.w @x[8],@x[8],$t8
258 add.w @x[9],@x[9],$a7
259 add.w @x[10],@x[10],$a6
260 add.w @x[11],@x[11],$a5
262 add.w @x[12],@x[12],$s8
267 add.w @x[13],@x[13],$t8
268 add.w @x[14],@x[14],$a7
269 add.w @x[15],@x[15],$a6
279 xor $t8,$t8,@x[0]
280 xor $a7,$a7,@x[1]
281 xor $a6,$a6,@x[2]
282 xor $a5,$a5,@x[3]
292 xor $t8,$t8,@x[4]
293 xor $a7,$a7,@x[5]
294 xor $a6,$a6,@x[6]
295 xor $a5,$a5,@x[7]
305 xor $t8,$t8,@x[8]
306 xor $a7,$a7,@x[9]
307 xor $a6,$a6,@x[10]
308 xor $a5,$a5,@x[11]
318 xor $t8,$t8,@x[12]
319 xor $a7,$a7,@x[13]
320 xor $a6,$a6,@x[14]
321 xor $a5,$a5,@x[15]
338 st.w @x[0],$a7,4*0
339 st.w @x[1],$a7,4*1
340 st.w @x[2],$a7,4*2
341 st.w @x[3],$a7,4*3
342 st.w @x[4],$a7,4*4
343 st.w @x[5],$a7,4*5
344 st.w @x[6],$a7,4*6
345 st.w @x[7],$a7,4*7
346 st.w @x[8],$a7,4*8
347 st.w @x[9],$a7,4*9
348 st.w @x[10],$a7,4*10
349 st.w @x[11],$a7,4*11
350 st.w @x[12],$a7,4*12
351 st.w @x[13],$a7,4*13
352 st.w @x[14],$a7,4*14
353 st.w @x[15],$a7,4*15
389 my @x = ($vr0, $vr1, $vr2, $vr3, $vr4, $vr5, $vr6, $vr7,
403 vadd.w @x[$a0],@x[$a0],@x[$b0]
404 vxor.v @x[$d0],@x[$d0],@x[$a0]
405 vrotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
406 vadd.w @x[$a1],@x[$a1],@x[$b1]
407 vxor.v @x[$d1],@x[$d1],@x[$a1]
408 vrotri.w @x[$d1],@x[$d1],16
410 vadd.w @x[$c0],@x[$c0],@x[$d0]
411 vxor.v @x[$b0],@x[$b0],@x[$c0]
412 vrotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
413 vadd.w @x[$c1],@x[$c1],@x[$d1]
414 vxor.v @x[$b1],@x[$b1],@x[$c1]
415 vrotri.w @x[$b1],@x[$b1],20
417 vadd.w @x[$a0],@x[$a0],@x[$b0]
418 vxor.v @x[$d0],@x[$d0],@x[$a0]
419 vrotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
420 vadd.w @x[$a1],@x[$a1],@x[$b1]
421 vxor.v @x[$d1],@x[$d1],@x[$a1]
422 vrotri.w @x[$d1],@x[$d1],24
424 vadd.w @x[$c0],@x[$c0],@x[$d0]
425 vxor.v @x[$b0],@x[$b0],@x[$c0]
426 vrotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
427 vadd.w @x[$c1],@x[$c1],@x[$d1]
428 vxor.v @x[$b1],@x[$b1],@x[$c1]
429 vrotri.w @x[$b1],@x[$b1],25
431 vadd.w @x[$a2],@x[$a2],@x[$b2]
432 vxor.v @x[$d2],@x[$d2],@x[$a2]
433 vrotri.w @x[$d2],@x[$d2],16
434 vadd.w @x[$a3],@x[$a3],@x[$b3]
435 vxor.v @x[$d3],@x[$d3],@x[$a3]
436 vrotri.w @x[$d3],@x[$d3],16
438 vadd.w @x[$c2],@x[$c2],@x[$d2]
439 vxor.v @x[$b2],@x[$b2],@x[$c2]
440 vrotri.w @x[$b2],@x[$b2],20
441 vadd.w @x[$c3],@x[$c3],@x[$d3]
442 vxor.v @x[$b3],@x[$b3],@x[$c3]
443 vrotri.w @x[$b3],@x[$b3],20
445 vadd.w @x[$a2],@x[$a2],@x[$b2]
446 vxor.v @x[$d2],@x[$d2],@x[$a2]
447 vrotri.w @x[$d2],@x[$d2],24
448 vadd.w @x[$a3],@x[$a3],@x[$b3]
449 vxor.v @x[$d3],@x[$d3],@x[$a3]
450 vrotri.w @x[$d3],@x[$d3],24
452 vadd.w @x[$c2],@x[$c2],@x[$d2]
453 vxor.v @x[$b2],@x[$b2],@x[$c2]
454 vrotri.w @x[$b2],@x[$b2],25
455 vadd.w @x[$c3],@x[$c3],@x[$d3]
456 vxor.v @x[$b3],@x[$b3],@x[$c3]
457 vrotri.w @x[$b3],@x[$b3],25
475 vldrepl.w @x[0],$t8,4*0 # 'expa'
476 vldrepl.w @x[1],$t8,4*1 # 'nd 3'
477 vldrepl.w @x[2],$t8,4*2 # '2-by'
478 vldrepl.w @x[3],$t8,4*3 # 'te k'
481 vldrepl.w @x[4],$key,4*0
482 vldrepl.w @x[5],$key,4*1
483 vldrepl.w @x[6],$key,4*2
484 vldrepl.w @x[7],$key,4*3
485 vldrepl.w @x[8],$key,4*4
486 vldrepl.w @x[9],$key,4*5
487 vldrepl.w @x[10],$key,4*6
488 vldrepl.w @x[11],$key,4*7
491 vreplgr2vr.w @x[12],$t4
494 vldrepl.w @x[13],$counter,4*1
495 vldrepl.w @x[14],$counter,4*2
496 vldrepl.w @x[15],$counter,4*3
501 vadd.w @x[12],@x[12],@y[0]
504 vori.b @y[0],@x[0],0
505 vori.b @y[1],@x[1],0
506 vori.b @y[2],@x[2],0
507 vori.b @y[3],@x[3],0
508 vori.b @y[4],@x[4],0
509 vori.b @y[5],@x[5],0
510 vori.b @y[6],@x[6],0
511 vori.b @y[7],@x[7],0
512 vori.b @y[8],@x[8],0
513 vori.b @y[9],@x[9],0
514 vori.b @y[10],@x[10],0
515 vori.b @y[11],@x[11],0
516 vori.b @y[12],@x[12],0
517 vori.b @y[13],@x[13],0
518 vori.b @y[14],@x[14],0
519 vori.b @y[15],@x[15],0
537 vadd.w @x[0],@x[0],@y[0]
538 vadd.w @x[1],@x[1],@y[1]
539 vadd.w @x[2],@x[2],@y[2]
540 vadd.w @x[3],@x[3],@y[3]
541 vadd.w @x[4],@x[4],@y[4]
542 vadd.w @x[5],@x[5],@y[5]
543 vadd.w @x[6],@x[6],@y[6]
544 vadd.w @x[7],@x[7],@y[7]
545 vadd.w @x[8],@x[8],@y[8]
546 vadd.w @x[9],@x[9],@y[9]
547 vadd.w @x[10],@x[10],@y[10]
548 vadd.w @x[11],@x[11],@y[11]
549 vadd.w @x[12],@x[12],@y[12]
550 vadd.w @x[13],@x[13],@y[13]
551 vadd.w @x[14],@x[14],@y[14]
552 vadd.w @x[15],@x[15],@y[15]
555 vilvl.w @y[0],@x[1],@x[0]
556 vilvh.w @y[1],@x[1],@x[0]
557 vilvl.w @y[2],@x[3],@x[2]
558 vilvh.w @y[3],@x[3],@x[2]
559 vilvl.w @y[4],@x[5],@x[4]
560 vilvh.w @y[5],@x[5],@x[4]
561 vilvl.w @y[6],@x[7],@x[6]
562 vilvh.w @y[7],@x[7],@x[6]
563 vilvl.w @y[8],@x[9],@x[8]
564 vilvh.w @y[9],@x[9],@x[8]
565 vilvl.w @y[10],@x[11],@x[10]
566 vilvh.w @y[11],@x[11],@x[10]
567 vilvl.w @y[12],@x[13],@x[12]
568 vilvh.w @y[13],@x[13],@x[12]
569 vilvl.w @y[14],@x[15],@x[14]
570 vilvh.w @y[15],@x[15],@x[14]
572 vilvl.d @x[0],@y[2],@y[0]
573 vilvh.d @x[1],@y[2],@y[0]
574 vilvl.d @x[2],@y[3],@y[1]
575 vilvh.d @x[3],@y[3],@y[1]
576 vilvl.d @x[4],@y[6],@y[4]
577 vilvh.d @x[5],@y[6],@y[4]
578 vilvl.d @x[6],@y[7],@y[5]
579 vilvh.d @x[7],@y[7],@y[5]
580 vilvl.d @x[8],@y[10],@y[8]
581 vilvh.d @x[9],@y[10],@y[8]
582 vilvl.d @x[10],@y[11],@y[9]
583 vilvh.d @x[11],@y[11],@y[9]
584 vilvl.d @x[12],@y[14],@y[12]
585 vilvh.d @x[13],@y[14],@y[12]
586 vilvl.d @x[14],@y[15],@y[13]
587 vilvh.d @x[15],@y[15],@y[13]
591 @x = (@x[0],@x[4],@x[8],@x[12],@x[1],@x[5],@x[9],@x[13],
592 @x[2],@x[6],@x[10],@x[14],@x[3],@x[7],@x[11],@x[15]);
603 vxor.v @y[0],@y[0],@x[0]
604 vxor.v @y[1],@y[1],@x[1]
605 vxor.v @y[2],@y[2],@x[2]
606 vxor.v @y[3],@y[3],@x[3]
616 vxor.v @y[0],@y[0],@x[4]
617 vxor.v @y[1],@y[1],@x[5]
618 vxor.v @y[2],@y[2],@x[6]
619 vxor.v @y[3],@y[3],@x[7]
629 vxor.v @y[0],@y[0],@x[8]
630 vxor.v @y[1],@y[1],@x[9]
631 vxor.v @y[2],@y[2],@x[10]
632 vxor.v @y[3],@y[3],@x[11]
642 vxor.v @y[0],@y[0],@x[12]
643 vxor.v @y[1],@y[1],@x[13]
644 vxor.v @y[2],@y[2],@x[14]
645 vxor.v @y[3],@y[3],@x[15]
667 vst @x[0],$sp,16*0
668 vst @x[1],$sp,16*1
669 vst @x[2],$sp,16*2
670 vst @x[3],$sp,16*3
680 vxor.v @y[0],@y[0],@x[0]
681 vxor.v @y[1],@y[1],@x[1]
682 vxor.v @y[2],@y[2],@x[2]
683 vxor.v @y[3],@y[3],@x[3]
693 vst @x[4],$sp,16*0
694 vst @x[5],$sp,16*1
695 vst @x[6],$sp,16*2
696 vst @x[7],$sp,16*3
706 vxor.v @y[0],@y[0],@x[0]
707 vxor.v @y[1],@y[1],@x[1]
708 vxor.v @y[2],@y[2],@x[2]
709 vxor.v @y[3],@y[3],@x[3]
719 vxor.v @y[0],@y[0],@x[4]
720 vxor.v @y[1],@y[1],@x[5]
721 vxor.v @y[2],@y[2],@x[6]
722 vxor.v @y[3],@y[3],@x[7]
732 vst @x[8],$sp,16*0
733 vst @x[9],$sp,16*1
734 vst @x[10],$sp,16*2
735 vst @x[11],$sp,16*3
745 vxor.v @y[0],@y[0],@x[0]
746 vxor.v @y[1],@y[1],@x[1]
747 vxor.v @y[2],@y[2],@x[2]
748 vxor.v @y[3],@y[3],@x[3]
758 vxor.v @y[0],@y[0],@x[4]
759 vxor.v @y[1],@y[1],@x[5]
760 vxor.v @y[2],@y[2],@x[6]
761 vxor.v @y[3],@y[3],@x[7]
771 vxor.v @y[0],@y[0],@x[8]
772 vxor.v @y[1],@y[1],@x[9]
773 vxor.v @y[2],@y[2],@x[10]
774 vxor.v @y[3],@y[3],@x[11]
784 vst @x[12],$sp,16*0
785 vst @x[13],$sp,16*1
786 vst @x[14],$sp,16*2
787 vst @x[15],$sp,16*3
813 my @x = ($xr0, $xr1, $xr2, $xr3, $xr4, $xr5, $xr6, $xr7,
827 xvadd.w @x[$a0],@x[$a0],@x[$b0]
828 xvxor.v @x[$d0],@x[$d0],@x[$a0]
829 xvrotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits
830 xvadd.w @x[$a1],@x[$a1],@x[$b1]
831 xvxor.v @x[$d1],@x[$d1],@x[$a1]
832 xvrotri.w @x[$d1],@x[$d1],16
834 xvadd.w @x[$c0],@x[$c0],@x[$d0]
835 xvxor.v @x[$b0],@x[$b0],@x[$c0]
836 xvrotri.w @x[$b0],@x[$b0],20 # rotate left 12 bits
837 xvadd.w @x[$c1],@x[$c1],@x[$d1]
838 xvxor.v @x[$b1],@x[$b1],@x[$c1]
839 xvrotri.w @x[$b1],@x[$b1],20
841 xvadd.w @x[$a0],@x[$a0],@x[$b0]
842 xvxor.v @x[$d0],@x[$d0],@x[$a0]
843 xvrotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits
844 xvadd.w @x[$a1],@x[$a1],@x[$b1]
845 xvxor.v @x[$d1],@x[$d1],@x[$a1]
846 xvrotri.w @x[$d1],@x[$d1],24
848 xvadd.w @x[$c0],@x[$c0],@x[$d0]
849 xvxor.v @x[$b0],@x[$b0],@x[$c0]
850 xvrotri.w @x[$b0],@x[$b0],25 # rotate left 7 bits
851 xvadd.w @x[$c1],@x[$c1],@x[$d1]
852 xvxor.v @x[$b1],@x[$b1],@x[$c1]
853 xvrotri.w @x[$b1],@x[$b1],25
855 xvadd.w @x[$a2],@x[$a2],@x[$b2]
856 xvxor.v @x[$d2],@x[$d2],@x[$a2]
857 xvrotri.w @x[$d2],@x[$d2],16
858 xvadd.w @x[$a3],@x[$a3],@x[$b3]
859 xvxor.v @x[$d3],@x[$d3],@x[$a3]
860 xvrotri.w @x[$d3],@x[$d3],16
862 xvadd.w @x[$c2],@x[$c2],@x[$d2]
863 xvxor.v @x[$b2],@x[$b2],@x[$c2]
864 xvrotri.w @x[$b2],@x[$b2],20
865 xvadd.w @x[$c3],@x[$c3],@x[$d3]
866 xvxor.v @x[$b3],@x[$b3],@x[$c3]
867 xvrotri.w @x[$b3],@x[$b3],20
869 xvadd.w @x[$a2],@x[$a2],@x[$b2]
870 xvxor.v @x[$d2],@x[$d2],@x[$a2]
871 xvrotri.w @x[$d2],@x[$d2],24
872 xvadd.w @x[$a3],@x[$a3],@x[$b3]
873 xvxor.v @x[$d3],@x[$d3],@x[$a3]
874 xvrotri.w @x[$d3],@x[$d3],24
876 xvadd.w @x[$c2],@x[$c2],@x[$d2]
877 xvxor.v @x[$b2],@x[$b2],@x[$c2]
878 xvrotri.w @x[$b2],@x[$b2],25
879 xvadd.w @x[$c3],@x[$c3],@x[$d3]
880 xvxor.v @x[$b3],@x[$b3],@x[$c3]
881 xvrotri.w @x[$b3],@x[$b3],25
899 xvldrepl.w @x[0],$t8,4*0 # 'expa'
900 xvldrepl.w @x[1],$t8,4*1 # 'nd 3'
901 xvldrepl.w @x[2],$t8,4*2 # '2-by'
902 xvldrepl.w @x[3],$t8,4*3 # 'te k'
905 xvldrepl.w @x[4],$key,4*0
906 xvldrepl.w @x[5],$key,4*1
907 xvldrepl.w @x[6],$key,4*2
908 xvldrepl.w @x[7],$key,4*3
909 xvldrepl.w @x[8],$key,4*4
910 xvldrepl.w @x[9],$key,4*5
911 xvldrepl.w @x[10],$key,4*6
912 xvldrepl.w @x[11],$key,4*7
915 xvreplgr2vr.w @x[12],$t4
918 xvldrepl.w @x[13],$counter,4*1
919 xvldrepl.w @x[14],$counter,4*2
920 xvldrepl.w @x[15],$counter,4*3
925 xvadd.w @x[12],@x[12],@y[0]
928 xvori.b @y[0],@x[0],0
929 xvori.b @y[1],@x[1],0
930 xvori.b @y[2],@x[2],0
931 xvori.b @y[3],@x[3],0
932 xvori.b @y[4],@x[4],0
933 xvori.b @y[5],@x[5],0
934 xvori.b @y[6],@x[6],0
935 xvori.b @y[7],@x[7],0
936 xvori.b @y[8],@x[8],0
937 xvori.b @y[9],@x[9],0
938 xvori.b @y[10],@x[10],0
939 xvori.b @y[11],@x[11],0
940 xvori.b @y[12],@x[12],0
941 xvori.b @y[13],@x[13],0
942 xvori.b @y[14],@x[14],0
943 xvori.b @y[15],@x[15],0
961 xvadd.w @x[0],@x[0],@y[0]
962 xvadd.w @x[1],@x[1],@y[1]
963 xvadd.w @x[2],@x[2],@y[2]
964 xvadd.w @x[3],@x[3],@y[3]
965 xvadd.w @x[4],@x[4],@y[4]
966 xvadd.w @x[5],@x[5],@y[5]
967 xvadd.w @x[6],@x[6],@y[6]
968 xvadd.w @x[7],@x[7],@y[7]
969 xvadd.w @x[8],@x[8],@y[8]
970 xvadd.w @x[9],@x[9],@y[9]
971 xvadd.w @x[10],@x[10],@y[10]
972 xvadd.w @x[11],@x[11],@y[11]
973 xvadd.w @x[12],@x[12],@y[12]
974 xvadd.w @x[13],@x[13],@y[13]
975 xvadd.w @x[14],@x[14],@y[14]
976 xvadd.w @x[15],@x[15],@y[15]
979 xvilvl.w @y[0],@x[1],@x[0]
980 xvilvh.w @y[1],@x[1],@x[0]
981 xvilvl.w @y[2],@x[3],@x[2]
982 xvilvh.w @y[3],@x[3],@x[2]
983 xvilvl.w @y[4],@x[5],@x[4]
984 xvilvh.w @y[5],@x[5],@x[4]
985 xvilvl.w @y[6],@x[7],@x[6]
986 xvilvh.w @y[7],@x[7],@x[6]
987 xvilvl.w @y[8],@x[9],@x[8]
988 xvilvh.w @y[9],@x[9],@x[8]
989 xvilvl.w @y[10],@x[11],@x[10]
990 xvilvh.w @y[11],@x[11],@x[10]
991 xvilvl.w @y[12],@x[13],@x[12]
992 xvilvh.w @y[13],@x[13],@x[12]
993 xvilvl.w @y[14],@x[15],@x[14]
994 xvilvh.w @y[15],@x[15],@x[14]
996 xvilvl.d @x[0],@y[2],@y[0]
997 xvilvh.d @x[1],@y[2],@y[0]
998 xvilvl.d @x[2],@y[3],@y[1]
999 xvilvh.d @x[3],@y[3],@y[1]
1000 xvilvl.d @x[4],@y[6],@y[4]
1001 xvilvh.d @x[5],@y[6],@y[4]
1002 xvilvl.d @x[6],@y[7],@y[5]
1003 xvilvh.d @x[7],@y[7],@y[5]
1004 xvilvl.d @x[8],@y[10],@y[8]
1005 xvilvh.d @x[9],@y[10],@y[8]
1006 xvilvl.d @x[10],@y[11],@y[9]
1007 xvilvh.d @x[11],@y[11],@y[9]
1008 xvilvl.d @x[12],@y[14],@y[12]
1009 xvilvh.d @x[13],@y[14],@y[12]
1010 xvilvl.d @x[14],@y[15],@y[13]
1011 xvilvh.d @x[15],@y[15],@y[13]
1013 xvori.b @y[0],@x[4],0
1014 xvpermi.q @y[0],@x[0],0x20
1015 xvori.b @y[1],@x[5],0
1016 xvpermi.q @y[1],@x[1],0x20
1017 xvori.b @y[2],@x[6],0
1018 xvpermi.q @y[2],@x[2],0x20
1019 xvori.b @y[3],@x[7],0
1020 xvpermi.q @y[3],@x[3],0x20
1021 xvori.b @y[4],@x[4],0
1022 xvpermi.q @y[4],@x[0],0x31
1023 xvori.b @y[5],@x[5],0
1024 xvpermi.q @y[5],@x[1],0x31
1025 xvori.b @y[6],@x[6],0
1026 xvpermi.q @y[6],@x[2],0x31
1027 xvori.b @y[7],@x[7],0
1028 xvpermi.q @y[7],@x[3],0x31
1029 xvori.b @y[8],@x[12],0
1030 xvpermi.q @y[8],@x[8],0x20
1031 xvori.b @y[9],@x[13],0
1032 xvpermi.q @y[9],@x[9],0x20
1033 xvori.b @y[10],@x[14],0
1034 xvpermi.q @y[10],@x[10],0x20
1035 xvori.b @y[11],@x[15],0
1036 xvpermi.q @y[11],@x[11],0x20
1037 xvori.b @y[12],@x[12],0
1038 xvpermi.q @y[12],@x[8],0x31
1039 xvori.b @y[13],@x[13],0
1040 xvpermi.q @y[13],@x[9],0x31
1041 xvori.b @y[14],@x[14],0
1042 xvpermi.q @y[14],@x[10],0x31
1043 xvori.b @y[15],@x[15],0
1044 xvpermi.q @y[15],@x[11],0x31
1057 xvld @x[0],$inp,32*0
1058 xvld @x[1],$inp,32*1
1059 xvld @x[2],$inp,32*2
1060 xvld @x[3],$inp,32*3
1061 xvxor.v @x[0],@x[0],@y[0]
1062 xvxor.v @x[1],@x[1],@y[1]
1063 xvxor.v @x[2],@x[2],@y[2]
1064 xvxor.v @x[3],@x[3],@y[3]
1065 xvst @x[0],$out,32*0
1066 xvst @x[1],$out,32*1
1067 xvst @x[2],$out,32*2
1068 xvst @x[3],$out,32*3
1070 xvld @x[0],$inp,32*4
1071 xvld @x[1],$inp,32*5
1072 xvld @x[2],$inp,32*6
1073 xvld @x[3],$inp,32*7
1074 xvxor.v @x[0],@x[0],@y[4]
1075 xvxor.v @x[1],@x[1],@y[5]
1076 xvxor.v @x[2],@x[2],@y[6]
1077 xvxor.v @x[3],@x[3],@y[7]
1078 xvst @x[0],$out,32*4
1079 xvst @x[1],$out,32*5
1080 xvst @x[2],$out,32*6
1081 xvst @x[3],$out,32*7
1083 xvld @x[0],$inp,32*8
1084 xvld @x[1],$inp,32*9
1085 xvld @x[2],$inp,32*10
1086 xvld @x[3],$inp,32*11
1087 xvxor.v @x[0],@x[0],@y[8]
1088 xvxor.v @x[1],@x[1],@y[9]
1089 xvxor.v @x[2],@x[2],@y[10]
1090 xvxor.v @x[3],@x[3],@y[11]
1091 xvst @x[0],$out,32*8
1092 xvst @x[1],$out,32*9
1093 xvst @x[2],$out,32*10
1094 xvst @x[3],$out,32*11
1096 xvld @x[0],$inp,32*12
1097 xvld @x[1],$inp,32*13
1098 xvld @x[2],$inp,32*14
1099 xvld @x[3],$inp,32*15
1100 xvxor.v @x[0],@x[0],@y[12]
1101 xvxor.v @x[1],@x[1],@y[13]
1102 xvxor.v @x[2],@x[2],@y[14]
1103 xvxor.v @x[3],@x[3],@y[15]
1104 xvst @x[0],$out,32*12
1105 xvst @x[1],$out,32*13
1106 xvst @x[2],$out,32*14
1107 xvst @x[3],$out,32*15
1140 xvld @x[0],$inp,32*0
1141 xvld @x[1],$inp,32*1
1142 xvxor.v @x[0],@x[0],@y[0]
1143 xvxor.v @x[1],@x[1],@y[1]
1144 xvst @x[0],$out,32*0
1145 xvst @x[1],$out,32*1
1158 xvld @x[0],$inp,32*0
1159 xvld @x[1],$inp,32*1
1160 xvld @x[2],$inp,32*2
1161 xvld @x[3],$inp,32*3
1162 xvxor.v @x[0],@x[0],@y[0]
1163 xvxor.v @x[1],@x[1],@y[1]
1164 xvxor.v @x[2],@x[2],@y[2]
1165 xvxor.v @x[3],@x[3],@y[3]
1166 xvst @x[0],$out,32*0
1167 xvst @x[1],$out,32*1
1168 xvst @x[2],$out,32*2
1169 xvst @x[3],$out,32*3
1182 xvld @x[0],$inp,32*0
1183 xvld @x[1],$inp,32*1
1184 xvld @x[2],$inp,32*2
1185 xvld @x[3],$inp,32*3
1186 xvxor.v @x[0],@x[0],@y[0]
1187 xvxor.v @x[1],@x[1],@y[1]
1188 xvxor.v @x[2],@x[2],@y[2]
1189 xvxor.v @x[3],@x[3],@y[3]
1190 xvst @x[0],$out,32*0
1191 xvst @x[1],$out,32*1
1192 xvst @x[2],$out,32*2
1193 xvst @x[3],$out,32*3
1195 xvld @x[0],$inp,32*4
1196 xvld @x[1],$inp,32*5
1197 xvxor.v @x[0],@x[0],@y[4]
1198 xvxor.v @x[1],@x[1],@y[5]
1199 xvst @x[0],$out,32*4
1200 xvst @x[1],$out,32*5
1213 xvld @x[0],$inp,32*0
1214 xvld @x[1],$inp,32*1
1215 xvld @x[2],$inp,32*2
1216 xvld @x[3],$inp,32*3
1217 xvxor.v @x[0],@x[0],@y[0]
1218 xvxor.v @x[1],@x[1],@y[1]
1219 xvxor.v @x[2],@x[2],@y[2]
1220 xvxor.v @x[3],@x[3],@y[3]
1221 xvst @x[0],$out,32*0
1222 xvst @x[1],$out,32*1
1223 xvst @x[2],$out,32*2
1224 xvst @x[3],$out,32*3
1226 xvld @x[0],$inp,32*4
1227 xvld @x[1],$inp,32*5
1228 xvld @x[2],$inp,32*6
1229 xvld @x[3],$inp,32*7
1230 xvxor.v @x[0],@x[0],@y[4]
1231 xvxor.v @x[1],@x[1],@y[5]
1232 xvxor.v @x[2],@x[2],@y[6]
1233 xvxor.v @x[3],@x[3],@y[7]
1234 xvst @x[0],$out,32*4
1235 xvst @x[1],$out,32*5
1236 xvst @x[2],$out,32*6
1237 xvst @x[3],$out,32*7
1250 xvld @x[0],$inp,32*0
1251 xvld @x[1],$inp,32*1
1252 xvld @x[2],$inp,32*2
1253 xvld @x[3],$inp,32*3
1254 xvxor.v @x[0],@x[0],@y[0]
1255 xvxor.v @x[1],@x[1],@y[1]
1256 xvxor.v @x[2],@x[2],@y[2]
1257 xvxor.v @x[3],@x[3],@y[3]
1258 xvst @x[0],$out,32*0
1259 xvst @x[1],$out,32*1
1260 xvst @x[2],$out,32*2
1261 xvst @x[3],$out,32*3
1263 xvld @x[0],$inp,32*4
1264 xvld @x[1],$inp,32*5
1265 xvld @x[2],$inp,32*6
1266 xvld @x[3],$inp,32*7
1267 xvxor.v @x[0],@x[0],@y[4]
1268 xvxor.v @x[1],@x[1],@y[5]
1269 xvxor.v @x[2],@x[2],@y[6]
1270 xvxor.v @x[3],@x[3],@y[7]
1271 xvst @x[0],$out,32*4
1272 xvst @x[1],$out,32*5
1273 xvst @x[2],$out,32*6
1274 xvst @x[3],$out,32*7
1276 xvld @x[0],$inp,32*8
1277 xvld @x[1],$inp,32*9
1278 xvxor.v @x[0],@x[0],@y[8]
1279 xvxor.v @x[1],@x[1],@y[9]
1280 xvst @x[0],$out,32*8
1281 xvst @x[1],$out,32*9
1294 xvld @x[0],$inp,32*0
1295 xvld @x[1],$inp,32*1
1296 xvld @x[2],$inp,32*2
1297 xvld @x[3],$inp,32*3
1298 xvxor.v @x[0],@x[0],@y[0]
1299 xvxor.v @x[1],@x[1],@y[1]
1300 xvxor.v @x[2],@x[2],@y[2]
1301 xvxor.v @x[3],@x[3],@y[3]
1302 xvst @x[0],$out,32*0
1303 xvst @x[1],$out,32*1
1304 xvst @x[2],$out,32*2
1305 xvst @x[3],$out,32*3
1307 xvld @x[0],$inp,32*4
1308 xvld @x[1],$inp,32*5
1309 xvld @x[2],$inp,32*6
1310 xvld @x[3],$inp,32*7
1311 xvxor.v @x[0],@x[0],@y[4]
1312 xvxor.v @x[1],@x[1],@y[5]
1313 xvxor.v @x[2],@x[2],@y[6]
1314 xvxor.v @x[3],@x[3],@y[7]
1315 xvst @x[0],$out,32*4
1316 xvst @x[1],$out,32*5
1317 xvst @x[2],$out,32*6
1318 xvst @x[3],$out,32*7
1320 xvld @x[0],$inp,32*8
1321 xvld @x[1],$inp,32*9
1322 xvld @x[2],$inp,32*10
1323 xvld @x[3],$inp,32*11
1324 xvxor.v @x[0],@x[0],@y[8]
1325 xvxor.v @x[1],@x[1],@y[9]
1326 xvxor.v @x[2],@x[2],@y[10]
1327 xvxor.v @x[3],@x[3],@y[11]
1328 xvst @x[0],$out,32*8
1329 xvst @x[1],$out,32*9
1330 xvst @x[2],$out,32*10
1331 xvst @x[3],$out,32*11
1344 xvld @x[0],$inp,32*0
1345 xvld @x[1],$inp,32*1
1346 xvld @x[2],$inp,32*2
1347 xvld @x[3],$inp,32*3
1348 xvxor.v @x[0],@x[0],@y[0]
1349 xvxor.v @x[1],@x[1],@y[1]
1350 xvxor.v @x[2],@x[2],@y[2]
1351 xvxor.v @x[3],@x[3],@y[3]
1352 xvst @x[0],$out,32*0
1353 xvst @x[1],$out,32*1
1354 xvst @x[2],$out,32*2
1355 xvst @x[3],$out,32*3
1357 xvld @x[0],$inp,32*4
1358 xvld @x[1],$inp,32*5
1359 xvld @x[2],$inp,32*6
1360 xvld @x[3],$inp,32*7
1361 xvxor.v @x[0],@x[0],@y[4]
1362 xvxor.v @x[1],@x[1],@y[5]
1363 xvxor.v @x[2],@x[2],@y[6]
1364 xvxor.v @x[3],@x[3],@y[7]
1365 xvst @x[0],$out,32*4
1366 xvst @x[1],$out,32*5
1367 xvst @x[2],$out,32*6
1368 xvst @x[3],$out,32*7
1370 xvld @x[0],$inp,32*8
1371 xvld @x[1],$inp,32*9
1372 xvld @x[2],$inp,32*10
1373 xvld @x[3],$inp,32*11
1374 xvxor.v @x[0],@x[0],@y[8]
1375 xvxor.v @x[1],@x[1],@y[9]
1376 xvxor.v @x[2],@x[2],@y[10]
1377 xvxor.v @x[3],@x[3],@y[11]
1378 xvst @x[0],$out,32*8
1379 xvst @x[1],$out,32*9
1380 xvst @x[2],$out,32*10
1381 xvst @x[3],$out,32*11
1383 xvld @x[0],$inp,32*12
1384 xvld @x[1],$inp,32*13
1385 xvxor.v @x[0],@x[0],@y[12]
1386 xvxor.v @x[1],@x[1],@y[13]
1387 xvst @x[0],$out,32*12
1388 xvst @x[1],$out,32*13