Lines Matching refs:y0

225 !  y0 = vis_fand(div,DC3);
229 ! y0 = vis_fpsub32(dtmp0, y0);
230 ! dtmp0 = div0 * y0;
232 ! y0 *= dtmp0;
233 ! dtmp1 = div0 * y0;
235 ! y0 *= dtmp1;
245 ! xx *= y0;
388 fand %f32,DC3,%f24 ! (0_0) y0 = vis_fand(div,dconst3);
401 fpsub32 %f0,%f24,%f40 ! (0_0) y0 = vis_fpsub32(dtmp0, y0);
412 fmuld %f32,%f40,%f42 ! (0_0) dtmp0 = div0 * y0;
424 fand %f30,DC3,%f24 ! (1_0) y0 = vis_fand(div,dconst3);
438 fpsub32 %f0,%f24,%f38 ! (1_0) y0 = vis_fpsub32(dtmp0, y0);
441 fmuld %f40,%f44,%f40 ! (0_0) y0 *= dtmp0;
450 fmuld %f30,%f38,%f42 ! (1_0) dtmp0 = div0 * y0;
459 fmuld %f32,%f40,%f32 ! (0_0) dtmp1 = div0 * y0;
463 fand %f28,DC3,%f24 ! (2_0) y0 = vis_fand(div,dconst3);
478 fmuld %f40,%f46,%f26 ! (0_0) y0 *= dtmp1;
480 fpsub32 %f0,%f24,%f40 ! (2_0) y0 = vis_fpsub32(dtmp0, y0);
482 fmuld %f38,%f44,%f38 ! (1_0) y0 *= dtmp0;
491 fmuld %f28,%f40,%f42 ! (2_0) dtmp0 = div0 * y0;
496 fmuld %f22,%f26,%f22 ! (0_0) xx *= y0;
501 fmuld %f30,%f38,%f30 ! (1_0) dtmp1 = div0 * y0;
505 fand %f34,DC3,%f24 ! (3_0) y0 = vis_fand(div,dconst3);
526 fmuld %f38,%f46,%f26 ! (1_0) y0 *= dtmp1;
529 fpsub32 %f0,%f24,%f38 ! (3_0) y0 = vis_fpsub32(dtmp0, y0);
531 fmuld %f40,%f44,%f40 ! (2_0) y0 *= dtmp0;
540 fmuld %f34,%f38,%f42 ! (3_0) dtmp0 = div0 * y0;
545 fmuld %f20,%f26,%f20 ! (1_0) xx *= y0;
550 fmuld %f28,%f40,%f28 ! (2_0) dtmp1 = div0 * y0;
562 fand %f32,DC3,%f24 ! (4_0) y0 = vis_fand(div,dconst3);
595 fmuld %f40,%f46,%f26 ! (2_1) y0 *= dtmp1;
598 fpsub32 %f0,%f24,%f40 ! (4_1) y0 = vis_fpsub32(dtmp0, y0);
600 fmuld %f38,%f44,%f38 ! (3_1) y0 *= dtmp0;
610 fmuld %f32,%f40,%f42 ! (4_1) dtmp0 = div0 * y0;
615 fmuld %f18,%f26,%f18 ! (2_1) xx *= y0;
620 fmuld %f34,%f38,%f34 ! (3_1) dtmp1 = div0 * y0;
633 fand %f30,DC3,%f24 ! (5_1) y0 = vis_fand(div,dconst3);
660 fmuld %f38,%f46,%f26 ! (3_1) y0 *= dtmp1;
663 fpsub32 %f0,%f24,%f38 ! (5_1) y0 = vis_fpsub32(dtmp0, y0);
665 fmuld %f40,%f44,%f40 ! (4_1) y0 *= dtmp0;
675 fmuld %f30,%f38,%f42 ! (5_1) dtmp0 = div0 * y0;
680 fmuld %f16,%f26,%f16 ! (3_1) xx *= y0;
685 fmuld %f32,%f40,%f32 ! (4_1) dtmp1 = div0 * y0;
698 fand %f28,DC3,%f24 ! (6_1) y0 = vis_fand(div,dconst3);
725 fmuld %f40,%f46,%f26 ! (4_1) y0 *= dtmp1;
728 fpsub32 %f0,%f24,%f40 ! (6_1) y0 = vis_fpsub32(dtmp0, y0);
730 fmuld %f38,%f44,%f38 ! (5_1) y0 *= dtmp0;
740 fmuld %f28,%f40,%f42 ! (6_1) dtmp0 = div0 * y0;
745 fmuld %f14,%f26,%f14 ! (4_1) xx *= y0;
750 fmuld %f30,%f38,%f30 ! (5_1) dtmp1 = div0 * y0;
763 fand %f34,DC3,%f24 ! (7_1) y0 = vis_fand(div,dconst3);
790 fmuld %f38,%f46,%f26 ! (5_1) y0 *= dtmp1;
793 fpsub32 %f0,%f24,%f38 ! (7_1) y0 = vis_fpsub32(dtmp0, y0);
795 fmuld %f40,%f44,%f40 ! (6_1) y0 *= dtmp0;
805 fmuld %f34,%f38,%f42 ! (7_1) dtmp0 = div0 * y0;
810 fmuld %f36,%f26,%f36 ! (5_1) xx *= y0;
815 fmuld %f28,%f40,%f28 ! (6_1) dtmp1 = div0 * y0;
828 fand %f32,DC3,%f24 ! (0_0) y0 = vis_fand(div,dconst3);
855 fmuld %f40,%f46,%f26 ! (6_1) y0 *= dtmp1;
858 fpsub32 %f0,%f24,%f40 ! (0_0) y0 = vis_fpsub32(dtmp0, y0);
860 fmuld %f38,%f44,%f38 ! (7_1) y0 *= dtmp0;
870 fmuld %f32,%f40,%f42 ! (0_0) dtmp0 = div0 * y0;
875 fmuld %f10,%f26,%f10 ! (6_1) xx *= y0;
880 fmuld %f34,%f38,%f34 ! (7_1) dtmp1 = div0 * y0;
893 fand %f30,DC3,%f24 ! (1_0) y0 = vis_fand(div,dconst3);
920 fmuld %f38,%f46,%f26 ! (7_1) y0 *= dtmp1;
923 fpsub32 %f0,%f24,%f38 ! (1_0) y0 = vis_fpsub32(dtmp0, y0);
925 fmuld %f40,%f44,%f40 ! (0_0) y0 *= dtmp0;
935 fmuld %f30,%f38,%f42 ! (1_0) dtmp0 = div0 * y0;
940 fmuld %f8,%f26,%f8 ! (7_1) xx *= y0;
945 fmuld %f32,%f40,%f32 ! (0_0) dtmp1 = div0 * y0;
958 fand %f28,DC3,%f24 ! (2_0) y0 = vis_fand(div,dconst3);
985 fmuld %f40,%f46,%f26 ! (0_0) y0 *= dtmp1;
988 fpsub32 %f0,%f24,%f40 ! (2_0) y0 = vis_fpsub32(dtmp0, y0);
990 fmuld %f38,%f44,%f38 ! (1_0) y0 *= dtmp0;
1000 fmuld %f28,%f40,%f42 ! (2_0) dtmp0 = div0 * y0;
1005 fmuld %f22,%f26,%f22 ! (0_0) xx *= y0;
1010 fmuld %f30,%f38,%f30 ! (1_0) dtmp1 = div0 * y0;
1023 fand %f34,DC3,%f24 ! (3_0) y0 = vis_fand(div,dconst3);
1050 fmuld %f38,%f46,%f26 ! (1_0) y0 *= dtmp1;
1053 fpsub32 %f0,%f24,%f38 ! (3_0) y0 = vis_fpsub32(dtmp0, y0);
1055 fmuld %f40,%f44,%f40 ! (2_0) y0 *= dtmp0;
1065 fmuld %f34,%f38,%f42 ! (3_0) dtmp0 = div0 * y0;
1070 fmuld %f20,%f26,%f20 ! (1_0) xx *= y0;
1075 fmuld %f28,%f40,%f28 ! (2_0) dtmp1 = div0 * y0;
1088 fand %f32,DC3,%f24 ! (4_0) y0 = vis_fand(div,dconst3);
1122 fmuld %f40,%f46,%f26 ! (2_1) y0 *= dtmp1;
1125 fpsub32 %f0,%f24,%f40 ! (4_1) y0 = vis_fpsub32(dtmp0, y0);
1127 fmuld %f38,%f44,%f38 ! (3_1) y0 *= dtmp0;
1132 fmuld %f32,%f40,%f42 ! (4_1) dtmp0 = div0 * y0;
1135 fmuld %f18,%f26,%f18 ! (2_1) xx *= y0;
1139 fmuld %f34,%f38,%f34 ! (3_1) dtmp1 = div0 * y0;
1148 fand %f30,DC3,%f24 ! (5_1) y0 = vis_fand(div,dconst3);
1175 fmuld %f38,%f46,%f26 ! (3_1) y0 *= dtmp1;
1178 fpsub32 %f0,%f24,%f38 ! (5_1) y0 = vis_fpsub32(dtmp0, y0);
1180 fmuld %f40,%f44,%f40 ! (4_1) y0 *= dtmp0;
1184 fmuld %f30,%f38,%f42 ! (5_1) dtmp0 = div0 * y0;
1187 fmuld %f16,%f26,%f16 ! (3_1) xx *= y0;
1190 fmuld %f32,%f40,%f32 ! (4_1) dtmp1 = div0 * y0;
1199 fand %f28,DC3,%f24 ! (6_1) y0 = vis_fand(div,dconst3);
1226 fmuld %f40,%f46,%f26 ! (4_1) y0 *= dtmp1;
1229 fpsub32 %f0,%f24,%f40 ! (6_1) y0 = vis_fpsub32(dtmp0, y0);
1231 fmuld %f38,%f44,%f38 ! (5_1) y0 *= dtmp0;
1235 fmuld %f28,%f40,%f42 ! (6_1) dtmp0 = div0 * y0;
1238 fmuld %f14,%f26,%f14 ! (4_1) xx *= y0;
1241 fmuld %f30,%f38,%f30 ! (5_1) dtmp1 = div0 * y0;
1272 fmuld %f38,%f46,%f26 ! (5_1) y0 *= dtmp1;
1276 fmuld %f40,%f44,%f40 ! (6_1) y0 *= dtmp0;
1282 fmuld %f36,%f26,%f36 ! (5_1) xx *= y0;
1285 fmuld %f28,%f40,%f28 ! (6_1) dtmp1 = div0 * y0;
1315 fmuld %f40,%f46,%f26 ! (6_1) y0 *= dtmp1;
1323 fmuld %f10,%f26,%f10 ! (6_1) xx *= y0;