Chris@10
|
1 /*
|
Chris@10
|
2 * Copyright (c) 2003, 2007-11 Matteo Frigo
|
Chris@10
|
3 * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology
|
Chris@10
|
4 *
|
Chris@10
|
5 * This program is free software; you can redistribute it and/or modify
|
Chris@10
|
6 * it under the terms of the GNU General Public License as published by
|
Chris@10
|
7 * the Free Software Foundation; either version 2 of the License, or
|
Chris@10
|
8 * (at your option) any later version.
|
Chris@10
|
9 *
|
Chris@10
|
10 * This program is distributed in the hope that it will be useful,
|
Chris@10
|
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
Chris@10
|
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
Chris@10
|
13 * GNU General Public License for more details.
|
Chris@10
|
14 *
|
Chris@10
|
15 * You should have received a copy of the GNU General Public License
|
Chris@10
|
16 * along with this program; if not, write to the Free Software
|
Chris@10
|
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
Chris@10
|
18 *
|
Chris@10
|
19 */
|
Chris@10
|
20
|
Chris@10
|
21 /* This file was automatically generated --- DO NOT EDIT */
|
Chris@10
|
22 /* Generated on Sun Nov 25 07:37:57 EST 2012 */
|
Chris@10
|
23
|
Chris@10
|
24 #include "codelet-dft.h"
|
Chris@10
|
25
|
Chris@10
|
26 #ifdef HAVE_FMA
|
Chris@10
|
27
|
Chris@10
|
28 /* Generated by: ../../../genfft/gen_notw.native -fma -reorder-insns -schedule-for-pipeline -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include n2s.h -store-multiple 4 */
|
Chris@10
|
29
|
Chris@10
|
30 /*
|
Chris@10
|
31 * This function contains 912 FP additions, 392 FP multiplications,
|
Chris@10
|
32 * (or, 520 additions, 0 multiplications, 392 fused multiply/add),
|
Chris@10
|
33 * 310 stack variables, 15 constants, and 288 memory accesses
|
Chris@10
|
34 */
|
Chris@10
|
35 #include "n2s.h"
|
Chris@10
|
36
|
Chris@10
|
37 static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
|
Chris@10
|
38 {
|
Chris@10
|
39 DVK(KP881921264, +0.881921264348355029712756863660388349508442621);
|
Chris@10
|
40 DVK(KP956940335, +0.956940335732208864935797886980269969482849206);
|
Chris@10
|
41 DVK(KP534511135, +0.534511135950791641089685961295362908582039528);
|
Chris@10
|
42 DVK(KP303346683, +0.303346683607342391675883946941299872384187453);
|
Chris@10
|
43 DVK(KP773010453, +0.773010453362736960810906609758469800971041293);
|
Chris@10
|
44 DVK(KP995184726, +0.995184726672196886244836953109479921575474869);
|
Chris@10
|
45 DVK(KP820678790, +0.820678790828660330972281985331011598767386482);
|
Chris@10
|
46 DVK(KP098491403, +0.098491403357164253077197521291327432293052451);
|
Chris@10
|
47 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
|
Chris@10
|
48 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
|
Chris@10
|
49 DVK(KP668178637, +0.668178637919298919997757686523080761552472251);
|
Chris@10
|
50 DVK(KP198912367, +0.198912367379658006911597622644676228597850501);
|
Chris@10
|
51 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
|
Chris@10
|
52 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
|
Chris@10
|
53 DVK(KP414213562, +0.414213562373095048801688724209698078569671875);
|
Chris@10
|
54 {
|
Chris@10
|
55 INT i;
|
Chris@10
|
56 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) {
|
Chris@10
|
57 V TeJ, TeK, TeP, TeQ, TfH, TfI, TfJ, TfK, Tgj, Tgk, Tgv, Tgw, T9a, T99, T9e;
|
Chris@10
|
58 V T9b;
|
Chris@10
|
59 {
|
Chris@10
|
60 V T7B, T37, T5Z, T8F, TbB, TcB, Tf, Td9, T62, T7C, T2i, TdH, Tcb, Tah, T8G;
|
Chris@10
|
61 V T3e, Tak, TbC, T65, T3m, TdI, Tu, Tda, T2x, TbD, Tan, T8I, T7G, T8J, T7J;
|
Chris@10
|
62 V T64, T3t, Tas, Tce, TK, Tdd, Tav, Tcf, Tdc, T2N, T3G, T6G, T9k, T7O, T9l;
|
Chris@10
|
63 V T7R, T6H, T3N, T1L, TdA, Tdx, Teo, Tbs, Tct, T5Q, T6V, T8y, T9z, T5j, T6Y;
|
Chris@10
|
64 V Tbb, Tcw, T8n, T9C, Tch, Taz, Tdf, TZ, Tdg, T32, Tci, TaC, T6J, T3Z, T9n;
|
Chris@10
|
65 V T7V, T9o, T7Y, T6K, T46, Tdp, T1g, Tej, Tdm, Tcm, Tb1, Tcp, TaK, T6O, T4X;
|
Chris@10
|
66 V T9s, T8f, T6R, T4q, T9v, T84, Tdn, T1v, Tek, Tds, Tcn, TaV, Tcq, Tb4, T9t;
|
Chris@10
|
67 V T8b, T9w, T8i, T6S, T50, T6P, T4N, T5k, T1V, T1S, TdB, Tbi, T5s, Tbt, Tbg;
|
Chris@10
|
68 V T5F, T5R, T5p, T1Y, Tbj, T5n, T8z, T8q;
|
Chris@10
|
69 {
|
Chris@10
|
70 V Tba, T57, T8l, Tb7, T5M, T8w, T8m, T5P, T8x, T5i;
|
Chris@10
|
71 {
|
Chris@10
|
72 V T2p, T7F, T7E, Tal, T2w, Tam, T3s, T7H, T7I, T3p, T3d, T3a;
|
Chris@10
|
73 {
|
Chris@10
|
74 V T8, T35, T3, T5Y, T26, T5X, T6, T36, T29, T9, T2b, T2c, Tb, Tc, T2e;
|
Chris@10
|
75 V T2f;
|
Chris@10
|
76 {
|
Chris@10
|
77 V T1, T2, T24, T25, T4, T5, T27, T28;
|
Chris@10
|
78 T1 = LD(&(ri[0]), ivs, &(ri[0]));
|
Chris@10
|
79 T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0]));
|
Chris@10
|
80 T24 = LD(&(ii[0]), ivs, &(ii[0]));
|
Chris@10
|
81 T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0]));
|
Chris@10
|
82 T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
|
Chris@10
|
83 T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0]));
|
Chris@10
|
84 T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
|
Chris@10
|
85 T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0]));
|
Chris@10
|
86 T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
|
Chris@10
|
87 T35 = VSUB(T1, T2);
|
Chris@10
|
88 T3 = VADD(T1, T2);
|
Chris@10
|
89 T5Y = VSUB(T24, T25);
|
Chris@10
|
90 T26 = VADD(T24, T25);
|
Chris@10
|
91 T5X = VSUB(T4, T5);
|
Chris@10
|
92 T6 = VADD(T4, T5);
|
Chris@10
|
93 T36 = VSUB(T27, T28);
|
Chris@10
|
94 T29 = VADD(T27, T28);
|
Chris@10
|
95 T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0]));
|
Chris@10
|
96 T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
|
Chris@10
|
97 T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0]));
|
Chris@10
|
98 Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0]));
|
Chris@10
|
99 Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
|
Chris@10
|
100 T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0]));
|
Chris@10
|
101 T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
|
Chris@10
|
102 }
|
Chris@10
|
103 {
|
Chris@10
|
104 V T39, Ta, T38, T2d, T3b, Td, T3c, T2g, Taf, T7;
|
Chris@10
|
105 T7B = VADD(T35, T36);
|
Chris@10
|
106 T37 = VSUB(T35, T36);
|
Chris@10
|
107 T39 = VSUB(T8, T9);
|
Chris@10
|
108 Ta = VADD(T8, T9);
|
Chris@10
|
109 T38 = VSUB(T2b, T2c);
|
Chris@10
|
110 T2d = VADD(T2b, T2c);
|
Chris@10
|
111 T3b = VSUB(Tb, Tc);
|
Chris@10
|
112 Td = VADD(Tb, Tc);
|
Chris@10
|
113 T3c = VSUB(T2e, T2f);
|
Chris@10
|
114 T2g = VADD(T2e, T2f);
|
Chris@10
|
115 T5Z = VADD(T5X, T5Y);
|
Chris@10
|
116 T8F = VSUB(T5Y, T5X);
|
Chris@10
|
117 Taf = VSUB(T3, T6);
|
Chris@10
|
118 T7 = VADD(T3, T6);
|
Chris@10
|
119 {
|
Chris@10
|
120 V TbA, T2a, Te, Tbz, T60, T61, T2h, Tag;
|
Chris@10
|
121 TbA = VSUB(T26, T29);
|
Chris@10
|
122 T2a = VADD(T26, T29);
|
Chris@10
|
123 Te = VADD(Ta, Td);
|
Chris@10
|
124 Tbz = VSUB(Td, Ta);
|
Chris@10
|
125 T3d = VADD(T3b, T3c);
|
Chris@10
|
126 T60 = VSUB(T3b, T3c);
|
Chris@10
|
127 T61 = VADD(T39, T38);
|
Chris@10
|
128 T3a = VSUB(T38, T39);
|
Chris@10
|
129 T2h = VADD(T2d, T2g);
|
Chris@10
|
130 Tag = VSUB(T2d, T2g);
|
Chris@10
|
131 TbB = VADD(Tbz, TbA);
|
Chris@10
|
132 TcB = VSUB(TbA, Tbz);
|
Chris@10
|
133 Tf = VADD(T7, Te);
|
Chris@10
|
134 Td9 = VSUB(T7, Te);
|
Chris@10
|
135 T62 = VSUB(T60, T61);
|
Chris@10
|
136 T7C = VADD(T61, T60);
|
Chris@10
|
137 T2i = VADD(T2a, T2h);
|
Chris@10
|
138 TdH = VSUB(T2a, T2h);
|
Chris@10
|
139 Tcb = VSUB(Taf, Tag);
|
Chris@10
|
140 Tah = VADD(Taf, Tag);
|
Chris@10
|
141 }
|
Chris@10
|
142 }
|
Chris@10
|
143 }
|
Chris@10
|
144 {
|
Chris@10
|
145 V T3j, Ti, T3h, T2l, T3g, Tl, T2t, T3k, T2o, T3q, Tp, T3o, T2s, T3n, Ts;
|
Chris@10
|
146 V T2u, T2m, T2n;
|
Chris@10
|
147 {
|
Chris@10
|
148 V Tg, Th, T2j, T2k, Tj, Tk;
|
Chris@10
|
149 Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
|
Chris@10
|
150 Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0]));
|
Chris@10
|
151 T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
|
Chris@10
|
152 T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0]));
|
Chris@10
|
153 Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
|
Chris@10
|
154 Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0]));
|
Chris@10
|
155 T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
|
Chris@10
|
156 T8G = VADD(T3a, T3d);
|
Chris@10
|
157 T3e = VSUB(T3a, T3d);
|
Chris@10
|
158 T3j = VSUB(Tg, Th);
|
Chris@10
|
159 Ti = VADD(Tg, Th);
|
Chris@10
|
160 T3h = VSUB(T2j, T2k);
|
Chris@10
|
161 T2l = VADD(T2j, T2k);
|
Chris@10
|
162 T3g = VSUB(Tj, Tk);
|
Chris@10
|
163 Tl = VADD(Tj, Tk);
|
Chris@10
|
164 T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0]));
|
Chris@10
|
165 }
|
Chris@10
|
166 {
|
Chris@10
|
167 V Tn, To, T2q, T2r, Tq, Tr;
|
Chris@10
|
168 Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0]));
|
Chris@10
|
169 To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
|
Chris@10
|
170 T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0]));
|
Chris@10
|
171 T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
|
Chris@10
|
172 Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
|
Chris@10
|
173 Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0]));
|
Chris@10
|
174 T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
|
Chris@10
|
175 T3k = VSUB(T2m, T2n);
|
Chris@10
|
176 T2o = VADD(T2m, T2n);
|
Chris@10
|
177 T3q = VSUB(Tn, To);
|
Chris@10
|
178 Tp = VADD(Tn, To);
|
Chris@10
|
179 T3o = VSUB(T2q, T2r);
|
Chris@10
|
180 T2s = VADD(T2q, T2r);
|
Chris@10
|
181 T3n = VSUB(Tq, Tr);
|
Chris@10
|
182 Ts = VADD(Tq, Tr);
|
Chris@10
|
183 T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0]));
|
Chris@10
|
184 }
|
Chris@10
|
185 {
|
Chris@10
|
186 V Tai, Tm, Taj, T3r;
|
Chris@10
|
187 Tai = VSUB(Ti, Tl);
|
Chris@10
|
188 Tm = VADD(Ti, Tl);
|
Chris@10
|
189 T2p = VADD(T2l, T2o);
|
Chris@10
|
190 Taj = VSUB(T2l, T2o);
|
Chris@10
|
191 {
|
Chris@10
|
192 V T3i, T3l, Tt, T2v;
|
Chris@10
|
193 T7F = VSUB(T3h, T3g);
|
Chris@10
|
194 T3i = VADD(T3g, T3h);
|
Chris@10
|
195 T3l = VSUB(T3j, T3k);
|
Chris@10
|
196 T7E = VADD(T3j, T3k);
|
Chris@10
|
197 Tt = VADD(Tp, Ts);
|
Chris@10
|
198 Tal = VSUB(Tp, Ts);
|
Chris@10
|
199 T2v = VADD(T2t, T2u);
|
Chris@10
|
200 T3r = VSUB(T2t, T2u);
|
Chris@10
|
201 Tak = VADD(Tai, Taj);
|
Chris@10
|
202 TbC = VSUB(Taj, Tai);
|
Chris@10
|
203 T65 = VFNMS(LDK(KP414213562), T3i, T3l);
|
Chris@10
|
204 T3m = VFMA(LDK(KP414213562), T3l, T3i);
|
Chris@10
|
205 TdI = VSUB(Tt, Tm);
|
Chris@10
|
206 Tu = VADD(Tm, Tt);
|
Chris@10
|
207 T2w = VADD(T2s, T2v);
|
Chris@10
|
208 Tam = VSUB(T2s, T2v);
|
Chris@10
|
209 }
|
Chris@10
|
210 T3s = VSUB(T3q, T3r);
|
Chris@10
|
211 T7H = VADD(T3q, T3r);
|
Chris@10
|
212 T7I = VSUB(T3o, T3n);
|
Chris@10
|
213 T3p = VADD(T3n, T3o);
|
Chris@10
|
214 }
|
Chris@10
|
215 }
|
Chris@10
|
216 {
|
Chris@10
|
217 V T7M, T7Q, T7N, T3M, T3J, T7P;
|
Chris@10
|
218 {
|
Chris@10
|
219 V TG, T3H, Ty, T3x, T2B, T3w, TB, T3I, T2E, TH, T2J, T2K, TD, TE, T2G;
|
Chris@10
|
220 V T2H;
|
Chris@10
|
221 {
|
Chris@10
|
222 V Tw, Tx, T2z, T2A, Tz, TA, T2C, T2D;
|
Chris@10
|
223 Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
|
Chris@10
|
224 Tda = VSUB(T2p, T2w);
|
Chris@10
|
225 T2x = VADD(T2p, T2w);
|
Chris@10
|
226 TbD = VADD(Tal, Tam);
|
Chris@10
|
227 Tan = VSUB(Tal, Tam);
|
Chris@10
|
228 T8I = VFNMS(LDK(KP414213562), T7E, T7F);
|
Chris@10
|
229 T7G = VFMA(LDK(KP414213562), T7F, T7E);
|
Chris@10
|
230 T8J = VFMA(LDK(KP414213562), T7H, T7I);
|
Chris@10
|
231 T7J = VFNMS(LDK(KP414213562), T7I, T7H);
|
Chris@10
|
232 T64 = VFMA(LDK(KP414213562), T3p, T3s);
|
Chris@10
|
233 T3t = VFNMS(LDK(KP414213562), T3s, T3p);
|
Chris@10
|
234 Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0]));
|
Chris@10
|
235 T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
|
Chris@10
|
236 T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0]));
|
Chris@10
|
237 Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
|
Chris@10
|
238 TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0]));
|
Chris@10
|
239 T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
|
Chris@10
|
240 T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0]));
|
Chris@10
|
241 TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0]));
|
Chris@10
|
242 T3H = VSUB(Tw, Tx);
|
Chris@10
|
243 Ty = VADD(Tw, Tx);
|
Chris@10
|
244 T3x = VSUB(T2z, T2A);
|
Chris@10
|
245 T2B = VADD(T2z, T2A);
|
Chris@10
|
246 T3w = VSUB(Tz, TA);
|
Chris@10
|
247 TB = VADD(Tz, TA);
|
Chris@10
|
248 T3I = VSUB(T2C, T2D);
|
Chris@10
|
249 T2E = VADD(T2C, T2D);
|
Chris@10
|
250 TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
|
Chris@10
|
251 T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0]));
|
Chris@10
|
252 T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
|
Chris@10
|
253 TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
|
Chris@10
|
254 TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0]));
|
Chris@10
|
255 T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
|
Chris@10
|
256 T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0]));
|
Chris@10
|
257 }
|
Chris@10
|
258 {
|
Chris@10
|
259 V Tat, TC, Tar, T2F, T3K, T3E, TJ, Taq, T2M, Tau, T3B, T3L, T3y, T3F;
|
Chris@10
|
260 {
|
Chris@10
|
261 V TI, T3C, T2L, T3D, TF, T3z, T2I, T3A;
|
Chris@10
|
262 Tat = VSUB(Ty, TB);
|
Chris@10
|
263 TC = VADD(Ty, TB);
|
Chris@10
|
264 TI = VADD(TG, TH);
|
Chris@10
|
265 T3C = VSUB(TG, TH);
|
Chris@10
|
266 T2L = VADD(T2J, T2K);
|
Chris@10
|
267 T3D = VSUB(T2J, T2K);
|
Chris@10
|
268 TF = VADD(TD, TE);
|
Chris@10
|
269 T3z = VSUB(TD, TE);
|
Chris@10
|
270 T2I = VADD(T2G, T2H);
|
Chris@10
|
271 T3A = VSUB(T2G, T2H);
|
Chris@10
|
272 Tar = VSUB(T2B, T2E);
|
Chris@10
|
273 T2F = VADD(T2B, T2E);
|
Chris@10
|
274 T3K = VADD(T3C, T3D);
|
Chris@10
|
275 T3E = VSUB(T3C, T3D);
|
Chris@10
|
276 TJ = VADD(TF, TI);
|
Chris@10
|
277 Taq = VSUB(TI, TF);
|
Chris@10
|
278 T2M = VADD(T2I, T2L);
|
Chris@10
|
279 Tau = VSUB(T2I, T2L);
|
Chris@10
|
280 T3B = VADD(T3z, T3A);
|
Chris@10
|
281 T3L = VSUB(T3A, T3z);
|
Chris@10
|
282 }
|
Chris@10
|
283 T7M = VSUB(T3x, T3w);
|
Chris@10
|
284 T3y = VADD(T3w, T3x);
|
Chris@10
|
285 Tas = VADD(Taq, Tar);
|
Chris@10
|
286 Tce = VSUB(Tar, Taq);
|
Chris@10
|
287 TK = VADD(TC, TJ);
|
Chris@10
|
288 Tdd = VSUB(TC, TJ);
|
Chris@10
|
289 Tav = VADD(Tat, Tau);
|
Chris@10
|
290 Tcf = VSUB(Tat, Tau);
|
Chris@10
|
291 T7Q = VADD(T3B, T3E);
|
Chris@10
|
292 T3F = VSUB(T3B, T3E);
|
Chris@10
|
293 Tdc = VSUB(T2F, T2M);
|
Chris@10
|
294 T2N = VADD(T2F, T2M);
|
Chris@10
|
295 T7N = VADD(T3L, T3K);
|
Chris@10
|
296 T3M = VSUB(T3K, T3L);
|
Chris@10
|
297 T3J = VSUB(T3H, T3I);
|
Chris@10
|
298 T7P = VADD(T3H, T3I);
|
Chris@10
|
299 T3G = VFNMS(LDK(KP707106781), T3F, T3y);
|
Chris@10
|
300 T6G = VFMA(LDK(KP707106781), T3F, T3y);
|
Chris@10
|
301 }
|
Chris@10
|
302 }
|
Chris@10
|
303 {
|
Chris@10
|
304 V T1H, T5I, T1z, Tb8, T56, T53, T1C, Tb9, T5L, T1I, T5e, T5f, T1E, T1F, T59;
|
Chris@10
|
305 V T5a;
|
Chris@10
|
306 {
|
Chris@10
|
307 V T1x, T1y, T54, T55, T1A, T1B, T5J, T5K;
|
Chris@10
|
308 T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
309 T9k = VFNMS(LDK(KP707106781), T7N, T7M);
|
Chris@10
|
310 T7O = VFMA(LDK(KP707106781), T7N, T7M);
|
Chris@10
|
311 T9l = VFNMS(LDK(KP707106781), T7Q, T7P);
|
Chris@10
|
312 T7R = VFMA(LDK(KP707106781), T7Q, T7P);
|
Chris@10
|
313 T6H = VFMA(LDK(KP707106781), T3M, T3J);
|
Chris@10
|
314 T3N = VFNMS(LDK(KP707106781), T3M, T3J);
|
Chris@10
|
315 T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
316 T54 = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
317 T55 = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
318 T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
319 T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
320 T5J = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
321 T5K = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
322 T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
323 T5I = VSUB(T1x, T1y);
|
Chris@10
|
324 T1z = VADD(T1x, T1y);
|
Chris@10
|
325 Tb8 = VADD(T54, T55);
|
Chris@10
|
326 T56 = VSUB(T54, T55);
|
Chris@10
|
327 T53 = VSUB(T1A, T1B);
|
Chris@10
|
328 T1C = VADD(T1A, T1B);
|
Chris@10
|
329 Tb9 = VADD(T5J, T5K);
|
Chris@10
|
330 T5L = VSUB(T5J, T5K);
|
Chris@10
|
331 T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
332 T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
333 T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
334 T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
335 T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
336 T59 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
337 T5a = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
338 }
|
Chris@10
|
339 {
|
Chris@10
|
340 V Tbo, T1D, Tdv, T5h, T5N, T1K, Tdw, Tbr, T5O, T5c;
|
Chris@10
|
341 {
|
Chris@10
|
342 V T1J, T5d, Tbq, T5g, T1G, T58, Tbp, T5b;
|
Chris@10
|
343 Tbo = VSUB(T1z, T1C);
|
Chris@10
|
344 T1D = VADD(T1z, T1C);
|
Chris@10
|
345 T1J = VADD(T1H, T1I);
|
Chris@10
|
346 T5d = VSUB(T1H, T1I);
|
Chris@10
|
347 Tbq = VADD(T5e, T5f);
|
Chris@10
|
348 T5g = VSUB(T5e, T5f);
|
Chris@10
|
349 T1G = VADD(T1E, T1F);
|
Chris@10
|
350 T58 = VSUB(T1E, T1F);
|
Chris@10
|
351 Tbp = VADD(T59, T5a);
|
Chris@10
|
352 T5b = VSUB(T59, T5a);
|
Chris@10
|
353 Tba = VSUB(Tb8, Tb9);
|
Chris@10
|
354 Tdv = VADD(Tb8, Tb9);
|
Chris@10
|
355 T57 = VADD(T53, T56);
|
Chris@10
|
356 T8l = VSUB(T56, T53);
|
Chris@10
|
357 T5h = VSUB(T5d, T5g);
|
Chris@10
|
358 T5N = VADD(T5d, T5g);
|
Chris@10
|
359 Tb7 = VSUB(T1J, T1G);
|
Chris@10
|
360 T1K = VADD(T1G, T1J);
|
Chris@10
|
361 Tdw = VADD(Tbp, Tbq);
|
Chris@10
|
362 Tbr = VSUB(Tbp, Tbq);
|
Chris@10
|
363 T5O = VSUB(T5b, T58);
|
Chris@10
|
364 T5c = VADD(T58, T5b);
|
Chris@10
|
365 }
|
Chris@10
|
366 T5M = VSUB(T5I, T5L);
|
Chris@10
|
367 T8w = VADD(T5I, T5L);
|
Chris@10
|
368 T1L = VADD(T1D, T1K);
|
Chris@10
|
369 TdA = VSUB(T1D, T1K);
|
Chris@10
|
370 Tdx = VSUB(Tdv, Tdw);
|
Chris@10
|
371 Teo = VADD(Tdv, Tdw);
|
Chris@10
|
372 Tbs = VADD(Tbo, Tbr);
|
Chris@10
|
373 Tct = VSUB(Tbo, Tbr);
|
Chris@10
|
374 T8m = VADD(T5O, T5N);
|
Chris@10
|
375 T5P = VSUB(T5N, T5O);
|
Chris@10
|
376 T8x = VADD(T5c, T5h);
|
Chris@10
|
377 T5i = VSUB(T5c, T5h);
|
Chris@10
|
378 }
|
Chris@10
|
379 }
|
Chris@10
|
380 }
|
Chris@10
|
381 }
|
Chris@10
|
382 {
|
Chris@10
|
383 V T4e, T82, T8d, T4T, T4W, T83, T4p, T8e;
|
Chris@10
|
384 {
|
Chris@10
|
385 V T7T, T3R, T42, T7W, T3Y, T7X, T45, T7U;
|
Chris@10
|
386 {
|
Chris@10
|
387 V T40, TN, T2Y, T3Q, T2Q, T3P, TQ, T41, T2T, T3V, TX, T2Z, TS, TT, T2V;
|
Chris@10
|
388 V T2W;
|
Chris@10
|
389 {
|
Chris@10
|
390 V T2O, T2P, TO, TP, TL, TM;
|
Chris@10
|
391 TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0]));
|
Chris@10
|
392 TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
|
Chris@10
|
393 T5Q = VFNMS(LDK(KP707106781), T5P, T5M);
|
Chris@10
|
394 T6V = VFMA(LDK(KP707106781), T5P, T5M);
|
Chris@10
|
395 T8y = VFMA(LDK(KP707106781), T8x, T8w);
|
Chris@10
|
396 T9z = VFNMS(LDK(KP707106781), T8x, T8w);
|
Chris@10
|
397 T5j = VFNMS(LDK(KP707106781), T5i, T57);
|
Chris@10
|
398 T6Y = VFMA(LDK(KP707106781), T5i, T57);
|
Chris@10
|
399 Tbb = VADD(Tb7, Tba);
|
Chris@10
|
400 Tcw = VSUB(Tba, Tb7);
|
Chris@10
|
401 T8n = VFMA(LDK(KP707106781), T8m, T8l);
|
Chris@10
|
402 T9C = VFNMS(LDK(KP707106781), T8m, T8l);
|
Chris@10
|
403 T40 = VSUB(TL, TM);
|
Chris@10
|
404 TN = VADD(TL, TM);
|
Chris@10
|
405 T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0]));
|
Chris@10
|
406 T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
|
Chris@10
|
407 TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
|
Chris@10
|
408 TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0]));
|
Chris@10
|
409 {
|
Chris@10
|
410 V T2R, T2S, TV, TW;
|
Chris@10
|
411 T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
|
Chris@10
|
412 T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0]));
|
Chris@10
|
413 TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0]));
|
Chris@10
|
414 TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
|
Chris@10
|
415 T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0]));
|
Chris@10
|
416 T3Q = VSUB(T2O, T2P);
|
Chris@10
|
417 T2Q = VADD(T2O, T2P);
|
Chris@10
|
418 T3P = VSUB(TO, TP);
|
Chris@10
|
419 TQ = VADD(TO, TP);
|
Chris@10
|
420 T41 = VSUB(T2R, T2S);
|
Chris@10
|
421 T2T = VADD(T2R, T2S);
|
Chris@10
|
422 T3V = VSUB(TV, TW);
|
Chris@10
|
423 TX = VADD(TV, TW);
|
Chris@10
|
424 T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
|
Chris@10
|
425 TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
|
Chris@10
|
426 TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0]));
|
Chris@10
|
427 T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
|
Chris@10
|
428 T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0]));
|
Chris@10
|
429 }
|
Chris@10
|
430 }
|
Chris@10
|
431 {
|
Chris@10
|
432 V TaA, TR, Tay, T2U, T3W, T30, TU, T3S, T2X, T3T;
|
Chris@10
|
433 TaA = VSUB(TN, TQ);
|
Chris@10
|
434 TR = VADD(TN, TQ);
|
Chris@10
|
435 Tay = VSUB(T2Q, T2T);
|
Chris@10
|
436 T2U = VADD(T2Q, T2T);
|
Chris@10
|
437 T3W = VSUB(T2Y, T2Z);
|
Chris@10
|
438 T30 = VADD(T2Y, T2Z);
|
Chris@10
|
439 TU = VADD(TS, TT);
|
Chris@10
|
440 T3S = VSUB(TS, TT);
|
Chris@10
|
441 T2X = VADD(T2V, T2W);
|
Chris@10
|
442 T3T = VSUB(T2V, T2W);
|
Chris@10
|
443 {
|
Chris@10
|
444 V T3X, T43, Tax, TY, T31, TaB, T3U, T44;
|
Chris@10
|
445 T7T = VSUB(T3Q, T3P);
|
Chris@10
|
446 T3R = VADD(T3P, T3Q);
|
Chris@10
|
447 T3X = VSUB(T3V, T3W);
|
Chris@10
|
448 T43 = VADD(T3V, T3W);
|
Chris@10
|
449 Tax = VSUB(TX, TU);
|
Chris@10
|
450 TY = VADD(TU, TX);
|
Chris@10
|
451 T31 = VADD(T2X, T30);
|
Chris@10
|
452 TaB = VSUB(T2X, T30);
|
Chris@10
|
453 T3U = VADD(T3S, T3T);
|
Chris@10
|
454 T44 = VSUB(T3T, T3S);
|
Chris@10
|
455 T42 = VSUB(T40, T41);
|
Chris@10
|
456 T7W = VADD(T40, T41);
|
Chris@10
|
457 Tch = VSUB(Tay, Tax);
|
Chris@10
|
458 Taz = VADD(Tax, Tay);
|
Chris@10
|
459 Tdf = VSUB(TR, TY);
|
Chris@10
|
460 TZ = VADD(TR, TY);
|
Chris@10
|
461 Tdg = VSUB(T2U, T31);
|
Chris@10
|
462 T32 = VADD(T2U, T31);
|
Chris@10
|
463 Tci = VSUB(TaA, TaB);
|
Chris@10
|
464 TaC = VADD(TaA, TaB);
|
Chris@10
|
465 T3Y = VSUB(T3U, T3X);
|
Chris@10
|
466 T7X = VADD(T3U, T3X);
|
Chris@10
|
467 T45 = VSUB(T43, T44);
|
Chris@10
|
468 T7U = VADD(T44, T43);
|
Chris@10
|
469 }
|
Chris@10
|
470 }
|
Chris@10
|
471 }
|
Chris@10
|
472 {
|
Chris@10
|
473 V T4P, T14, T4l, TaH, T4d, T4a, T17, TaI, T4S, T4k, T1e, T4m, T19, T1a, T4g;
|
Chris@10
|
474 V T4h;
|
Chris@10
|
475 {
|
Chris@10
|
476 V T4b, T4c, T15, T16, T12, T13;
|
Chris@10
|
477 T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
478 T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
479 T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
480 T6J = VFMA(LDK(KP707106781), T3Y, T3R);
|
Chris@10
|
481 T3Z = VFNMS(LDK(KP707106781), T3Y, T3R);
|
Chris@10
|
482 T9n = VFNMS(LDK(KP707106781), T7U, T7T);
|
Chris@10
|
483 T7V = VFMA(LDK(KP707106781), T7U, T7T);
|
Chris@10
|
484 T9o = VFNMS(LDK(KP707106781), T7X, T7W);
|
Chris@10
|
485 T7Y = VFMA(LDK(KP707106781), T7X, T7W);
|
Chris@10
|
486 T6K = VFMA(LDK(KP707106781), T45, T42);
|
Chris@10
|
487 T46 = VFNMS(LDK(KP707106781), T45, T42);
|
Chris@10
|
488 T4P = VSUB(T12, T13);
|
Chris@10
|
489 T14 = VADD(T12, T13);
|
Chris@10
|
490 T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
491 T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
492 T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
493 {
|
Chris@10
|
494 V T4Q, T4R, T1c, T1d;
|
Chris@10
|
495 T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
496 T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
497 T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
498 T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
499 T4l = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
500 TaH = VADD(T4b, T4c);
|
Chris@10
|
501 T4d = VSUB(T4b, T4c);
|
Chris@10
|
502 T4a = VSUB(T15, T16);
|
Chris@10
|
503 T17 = VADD(T15, T16);
|
Chris@10
|
504 TaI = VADD(T4Q, T4R);
|
Chris@10
|
505 T4S = VSUB(T4Q, T4R);
|
Chris@10
|
506 T4k = VSUB(T1c, T1d);
|
Chris@10
|
507 T1e = VADD(T1c, T1d);
|
Chris@10
|
508 T4m = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
509 T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
510 T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
511 T4g = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
512 T4h = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
513 }
|
Chris@10
|
514 }
|
Chris@10
|
515 {
|
Chris@10
|
516 V TaX, T18, T4n, TaZ, TaJ, Tdk, T1b, T4f, TaY, T4i;
|
Chris@10
|
517 TaX = VSUB(T14, T17);
|
Chris@10
|
518 T18 = VADD(T14, T17);
|
Chris@10
|
519 T4n = VSUB(T4l, T4m);
|
Chris@10
|
520 TaZ = VADD(T4l, T4m);
|
Chris@10
|
521 TaJ = VSUB(TaH, TaI);
|
Chris@10
|
522 Tdk = VADD(TaH, TaI);
|
Chris@10
|
523 T1b = VADD(T19, T1a);
|
Chris@10
|
524 T4f = VSUB(T19, T1a);
|
Chris@10
|
525 TaY = VADD(T4g, T4h);
|
Chris@10
|
526 T4i = VSUB(T4g, T4h);
|
Chris@10
|
527 T4e = VADD(T4a, T4d);
|
Chris@10
|
528 T82 = VSUB(T4d, T4a);
|
Chris@10
|
529 {
|
Chris@10
|
530 V T4U, T4o, T1f, TaG, Tdl, Tb0, T4V, T4j;
|
Chris@10
|
531 T8d = VADD(T4P, T4S);
|
Chris@10
|
532 T4T = VSUB(T4P, T4S);
|
Chris@10
|
533 T4U = VADD(T4k, T4n);
|
Chris@10
|
534 T4o = VSUB(T4k, T4n);
|
Chris@10
|
535 T1f = VADD(T1b, T1e);
|
Chris@10
|
536 TaG = VSUB(T1e, T1b);
|
Chris@10
|
537 Tdl = VADD(TaY, TaZ);
|
Chris@10
|
538 Tb0 = VSUB(TaY, TaZ);
|
Chris@10
|
539 T4V = VSUB(T4i, T4f);
|
Chris@10
|
540 T4j = VADD(T4f, T4i);
|
Chris@10
|
541 Tdp = VSUB(T18, T1f);
|
Chris@10
|
542 T1g = VADD(T18, T1f);
|
Chris@10
|
543 Tej = VADD(Tdk, Tdl);
|
Chris@10
|
544 Tdm = VSUB(Tdk, Tdl);
|
Chris@10
|
545 Tcm = VSUB(TaX, Tb0);
|
Chris@10
|
546 Tb1 = VADD(TaX, Tb0);
|
Chris@10
|
547 T4W = VSUB(T4U, T4V);
|
Chris@10
|
548 T83 = VADD(T4V, T4U);
|
Chris@10
|
549 T4p = VSUB(T4j, T4o);
|
Chris@10
|
550 T8e = VADD(T4j, T4o);
|
Chris@10
|
551 Tcp = VSUB(TaJ, TaG);
|
Chris@10
|
552 TaK = VADD(TaG, TaJ);
|
Chris@10
|
553 }
|
Chris@10
|
554 }
|
Chris@10
|
555 }
|
Chris@10
|
556 }
|
Chris@10
|
557 {
|
Chris@10
|
558 V T1n, Tdq, T4r, T1q, TaR, T4z, Tb2, TaP, T4M, T4Y, T4w, T1t, TaS, T4u, T8g;
|
Chris@10
|
559 V T87;
|
Chris@10
|
560 {
|
Chris@10
|
561 V T1r, T85, T4L, TaO, TaN, T86, T4G, T1s, T4s, T4t;
|
Chris@10
|
562 {
|
Chris@10
|
563 V T1h, T1i, T4I, T4J, T1k, T1l, T4D, T4E;
|
Chris@10
|
564 T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
565 T6O = VFMA(LDK(KP707106781), T4W, T4T);
|
Chris@10
|
566 T4X = VFNMS(LDK(KP707106781), T4W, T4T);
|
Chris@10
|
567 T9s = VFNMS(LDK(KP707106781), T8e, T8d);
|
Chris@10
|
568 T8f = VFMA(LDK(KP707106781), T8e, T8d);
|
Chris@10
|
569 T6R = VFMA(LDK(KP707106781), T4p, T4e);
|
Chris@10
|
570 T4q = VFNMS(LDK(KP707106781), T4p, T4e);
|
Chris@10
|
571 T9v = VFNMS(LDK(KP707106781), T83, T82);
|
Chris@10
|
572 T84 = VFMA(LDK(KP707106781), T83, T82);
|
Chris@10
|
573 T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
574 T4I = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
575 T4J = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
576 T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
577 T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
578 T4D = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
579 T4E = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
580 {
|
Chris@10
|
581 V T1o, T4C, T1j, TaL, T4K, T4H, T1m, TaM, T4F, T1p, T4x, T4y;
|
Chris@10
|
582 T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
583 T4C = VSUB(T1h, T1i);
|
Chris@10
|
584 T1j = VADD(T1h, T1i);
|
Chris@10
|
585 TaL = VADD(T4I, T4J);
|
Chris@10
|
586 T4K = VSUB(T4I, T4J);
|
Chris@10
|
587 T4H = VSUB(T1k, T1l);
|
Chris@10
|
588 T1m = VADD(T1k, T1l);
|
Chris@10
|
589 TaM = VADD(T4D, T4E);
|
Chris@10
|
590 T4F = VSUB(T4D, T4E);
|
Chris@10
|
591 T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
592 T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
593 T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
594 T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
595 T85 = VSUB(T4K, T4H);
|
Chris@10
|
596 T4L = VADD(T4H, T4K);
|
Chris@10
|
597 TaO = VSUB(T1j, T1m);
|
Chris@10
|
598 T1n = VADD(T1j, T1m);
|
Chris@10
|
599 Tdq = VADD(TaL, TaM);
|
Chris@10
|
600 TaN = VSUB(TaL, TaM);
|
Chris@10
|
601 T86 = VADD(T4C, T4F);
|
Chris@10
|
602 T4G = VSUB(T4C, T4F);
|
Chris@10
|
603 T4r = VSUB(T1o, T1p);
|
Chris@10
|
604 T1q = VADD(T1o, T1p);
|
Chris@10
|
605 TaR = VADD(T4x, T4y);
|
Chris@10
|
606 T4z = VSUB(T4x, T4y);
|
Chris@10
|
607 T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
608 T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
609 T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
610 }
|
Chris@10
|
611 }
|
Chris@10
|
612 Tb2 = VADD(TaO, TaN);
|
Chris@10
|
613 TaP = VSUB(TaN, TaO);
|
Chris@10
|
614 T4M = VFNMS(LDK(KP414213562), T4L, T4G);
|
Chris@10
|
615 T4Y = VFMA(LDK(KP414213562), T4G, T4L);
|
Chris@10
|
616 T4w = VSUB(T1r, T1s);
|
Chris@10
|
617 T1t = VADD(T1r, T1s);
|
Chris@10
|
618 TaS = VADD(T4s, T4t);
|
Chris@10
|
619 T4u = VSUB(T4s, T4t);
|
Chris@10
|
620 T8g = VFMA(LDK(KP414213562), T85, T86);
|
Chris@10
|
621 T87 = VFNMS(LDK(KP414213562), T86, T85);
|
Chris@10
|
622 }
|
Chris@10
|
623 {
|
Chris@10
|
624 V T1W, T8o, T5E, Tbf, Tbe, T8p, T5z, T1X, T5l, T5m;
|
Chris@10
|
625 {
|
Chris@10
|
626 V T5B, T5v, T1O, T5C, T1P, T1Q, T5w, T5x;
|
Chris@10
|
627 {
|
Chris@10
|
628 V T1M, T88, T4A, T1u, TaQ, Tdr, TaT, T89, T4v, T1N, TaU, Tb3;
|
Chris@10
|
629 T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
630 T88 = VSUB(T4z, T4w);
|
Chris@10
|
631 T4A = VADD(T4w, T4z);
|
Chris@10
|
632 T1u = VADD(T1q, T1t);
|
Chris@10
|
633 TaQ = VSUB(T1q, T1t);
|
Chris@10
|
634 Tdr = VADD(TaR, TaS);
|
Chris@10
|
635 TaT = VSUB(TaR, TaS);
|
Chris@10
|
636 T89 = VADD(T4r, T4u);
|
Chris@10
|
637 T4v = VSUB(T4r, T4u);
|
Chris@10
|
638 T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
639 T5B = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
640 Tdn = VSUB(T1u, T1n);
|
Chris@10
|
641 T1v = VADD(T1n, T1u);
|
Chris@10
|
642 Tek = VADD(Tdq, Tdr);
|
Chris@10
|
643 Tds = VSUB(Tdq, Tdr);
|
Chris@10
|
644 TaU = VADD(TaQ, TaT);
|
Chris@10
|
645 Tb3 = VSUB(TaQ, TaT);
|
Chris@10
|
646 {
|
Chris@10
|
647 V T8a, T8h, T4Z, T4B;
|
Chris@10
|
648 T8a = VFMA(LDK(KP414213562), T89, T88);
|
Chris@10
|
649 T8h = VFNMS(LDK(KP414213562), T88, T89);
|
Chris@10
|
650 T4Z = VFNMS(LDK(KP414213562), T4v, T4A);
|
Chris@10
|
651 T4B = VFMA(LDK(KP414213562), T4A, T4v);
|
Chris@10
|
652 T5v = VSUB(T1M, T1N);
|
Chris@10
|
653 T1O = VADD(T1M, T1N);
|
Chris@10
|
654 Tcn = VSUB(TaU, TaP);
|
Chris@10
|
655 TaV = VADD(TaP, TaU);
|
Chris@10
|
656 Tcq = VSUB(Tb2, Tb3);
|
Chris@10
|
657 Tb4 = VADD(Tb2, Tb3);
|
Chris@10
|
658 T9t = VSUB(T8a, T87);
|
Chris@10
|
659 T8b = VADD(T87, T8a);
|
Chris@10
|
660 T9w = VSUB(T8g, T8h);
|
Chris@10
|
661 T8i = VADD(T8g, T8h);
|
Chris@10
|
662 T6S = VADD(T4Y, T4Z);
|
Chris@10
|
663 T50 = VSUB(T4Y, T4Z);
|
Chris@10
|
664 T6P = VADD(T4M, T4B);
|
Chris@10
|
665 T4N = VSUB(T4B, T4M);
|
Chris@10
|
666 T5C = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
667 }
|
Chris@10
|
668 }
|
Chris@10
|
669 T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
670 T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
671 T5w = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
672 T5x = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
673 {
|
Chris@10
|
674 V T5q, Tbc, T5D, T5A, T1R, Tbd, T5y, T5r, T1T, T1U;
|
Chris@10
|
675 T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
676 T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
677 T5q = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
678 Tbc = VADD(T5B, T5C);
|
Chris@10
|
679 T5D = VSUB(T5B, T5C);
|
Chris@10
|
680 T5A = VSUB(T1P, T1Q);
|
Chris@10
|
681 T1R = VADD(T1P, T1Q);
|
Chris@10
|
682 Tbd = VADD(T5w, T5x);
|
Chris@10
|
683 T5y = VSUB(T5w, T5x);
|
Chris@10
|
684 T5k = VSUB(T1T, T1U);
|
Chris@10
|
685 T1V = VADD(T1T, T1U);
|
Chris@10
|
686 T5r = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
687 T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
688 T8o = VSUB(T5D, T5A);
|
Chris@10
|
689 T5E = VADD(T5A, T5D);
|
Chris@10
|
690 Tbf = VSUB(T1O, T1R);
|
Chris@10
|
691 T1S = VADD(T1O, T1R);
|
Chris@10
|
692 TdB = VADD(Tbc, Tbd);
|
Chris@10
|
693 Tbe = VSUB(Tbc, Tbd);
|
Chris@10
|
694 T8p = VADD(T5v, T5y);
|
Chris@10
|
695 T5z = VSUB(T5v, T5y);
|
Chris@10
|
696 Tbi = VADD(T5q, T5r);
|
Chris@10
|
697 T5s = VSUB(T5q, T5r);
|
Chris@10
|
698 T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
699 T5l = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
700 T5m = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
701 }
|
Chris@10
|
702 }
|
Chris@10
|
703 Tbt = VADD(Tbf, Tbe);
|
Chris@10
|
704 Tbg = VSUB(Tbe, Tbf);
|
Chris@10
|
705 T5F = VFNMS(LDK(KP414213562), T5E, T5z);
|
Chris@10
|
706 T5R = VFMA(LDK(KP414213562), T5z, T5E);
|
Chris@10
|
707 T5p = VSUB(T1W, T1X);
|
Chris@10
|
708 T1Y = VADD(T1W, T1X);
|
Chris@10
|
709 Tbj = VADD(T5l, T5m);
|
Chris@10
|
710 T5n = VSUB(T5l, T5m);
|
Chris@10
|
711 T8z = VFMA(LDK(KP414213562), T8o, T8p);
|
Chris@10
|
712 T8q = VFNMS(LDK(KP414213562), T8p, T8o);
|
Chris@10
|
713 }
|
Chris@10
|
714 }
|
Chris@10
|
715 }
|
Chris@10
|
716 }
|
Chris@10
|
717 {
|
Chris@10
|
718 V Tbm, Tbv, T9A, T8u, T9D, T8B, T6Z, T5T, T6W, T5G, TeL, TeM, TeN, TeO, TeR;
|
Chris@10
|
719 V TeS, TeT, TeU, TeV, TeW, TeX, TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6;
|
Chris@10
|
720 V Tf7, Tf8, Tf9, Tfa, Tfb, Tfc, TbE, Tao, Tfd, Tfe, Td7, Td8, Tff, Tfg, Tfh;
|
Chris@10
|
721 V Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr, Tfs;
|
Chris@10
|
722 {
|
Chris@10
|
723 V Tel, Tdy, TdD, Tcu, Tcx, Teq, Tei, Ten, Tex, Teh, TeB, Tev, Te9, Tec;
|
Chris@10
|
724 {
|
Chris@10
|
725 V Tef, Teu, TeE, TeD, T11, TeF, T1w, T21, Tet, T2y, T33, Teg, T20;
|
Chris@10
|
726 {
|
Chris@10
|
727 V Tv, T8r, T5t, T1Z, Tbh, TdC, Tbk, T8s, T5o, T10, Tep, Tbl, Tbu;
|
Chris@10
|
728 Tef = VSUB(Tf, Tu);
|
Chris@10
|
729 Tv = VADD(Tf, Tu);
|
Chris@10
|
730 T8r = VSUB(T5s, T5p);
|
Chris@10
|
731 T5t = VADD(T5p, T5s);
|
Chris@10
|
732 T1Z = VADD(T1V, T1Y);
|
Chris@10
|
733 Tbh = VSUB(T1V, T1Y);
|
Chris@10
|
734 TdC = VADD(Tbi, Tbj);
|
Chris@10
|
735 Tbk = VSUB(Tbi, Tbj);
|
Chris@10
|
736 T8s = VADD(T5k, T5n);
|
Chris@10
|
737 T5o = VSUB(T5k, T5n);
|
Chris@10
|
738 T10 = VADD(TK, TZ);
|
Chris@10
|
739 Teu = VSUB(TZ, TK);
|
Chris@10
|
740 Tel = VSUB(Tej, Tek);
|
Chris@10
|
741 TeE = VADD(Tej, Tek);
|
Chris@10
|
742 Tdy = VSUB(T1Z, T1S);
|
Chris@10
|
743 T20 = VADD(T1S, T1Z);
|
Chris@10
|
744 Tep = VADD(TdB, TdC);
|
Chris@10
|
745 TdD = VSUB(TdB, TdC);
|
Chris@10
|
746 Tbl = VADD(Tbh, Tbk);
|
Chris@10
|
747 Tbu = VSUB(Tbh, Tbk);
|
Chris@10
|
748 {
|
Chris@10
|
749 V T8t, T8A, T5S, T5u;
|
Chris@10
|
750 T8t = VFMA(LDK(KP414213562), T8s, T8r);
|
Chris@10
|
751 T8A = VFNMS(LDK(KP414213562), T8r, T8s);
|
Chris@10
|
752 T5S = VFNMS(LDK(KP414213562), T5o, T5t);
|
Chris@10
|
753 T5u = VFMA(LDK(KP414213562), T5t, T5o);
|
Chris@10
|
754 TeD = VSUB(Tv, T10);
|
Chris@10
|
755 T11 = VADD(Tv, T10);
|
Chris@10
|
756 Tcu = VSUB(Tbl, Tbg);
|
Chris@10
|
757 Tbm = VADD(Tbg, Tbl);
|
Chris@10
|
758 Tcx = VSUB(Tbt, Tbu);
|
Chris@10
|
759 Tbv = VADD(Tbt, Tbu);
|
Chris@10
|
760 T9A = VSUB(T8t, T8q);
|
Chris@10
|
761 T8u = VADD(T8q, T8t);
|
Chris@10
|
762 T9D = VSUB(T8z, T8A);
|
Chris@10
|
763 T8B = VADD(T8z, T8A);
|
Chris@10
|
764 T6Z = VADD(T5R, T5S);
|
Chris@10
|
765 T5T = VSUB(T5R, T5S);
|
Chris@10
|
766 T6W = VADD(T5F, T5u);
|
Chris@10
|
767 T5G = VSUB(T5u, T5F);
|
Chris@10
|
768 TeF = VADD(Teo, Tep);
|
Chris@10
|
769 Teq = VSUB(Teo, Tep);
|
Chris@10
|
770 }
|
Chris@10
|
771 }
|
Chris@10
|
772 Tei = VSUB(T1g, T1v);
|
Chris@10
|
773 T1w = VADD(T1g, T1v);
|
Chris@10
|
774 T21 = VADD(T1L, T20);
|
Chris@10
|
775 Ten = VSUB(T1L, T20);
|
Chris@10
|
776 Tet = VSUB(T2i, T2x);
|
Chris@10
|
777 T2y = VADD(T2i, T2x);
|
Chris@10
|
778 T33 = VADD(T2N, T32);
|
Chris@10
|
779 Teg = VSUB(T2N, T32);
|
Chris@10
|
780 {
|
Chris@10
|
781 V TeI, TeG, T23, T22, TeH, T34;
|
Chris@10
|
782 TeI = VADD(TeE, TeF);
|
Chris@10
|
783 TeG = VSUB(TeE, TeF);
|
Chris@10
|
784 T23 = VSUB(T21, T1w);
|
Chris@10
|
785 T22 = VADD(T1w, T21);
|
Chris@10
|
786 TeH = VADD(T2y, T33);
|
Chris@10
|
787 T34 = VSUB(T2y, T33);
|
Chris@10
|
788 Tex = VSUB(Tef, Teg);
|
Chris@10
|
789 Teh = VADD(Tef, Teg);
|
Chris@10
|
790 TeJ = VSUB(TeD, TeG);
|
Chris@10
|
791 STM4(&(ro[48]), TeJ, ovs, &(ro[0]));
|
Chris@10
|
792 TeK = VADD(TeD, TeG);
|
Chris@10
|
793 STM4(&(ro[16]), TeK, ovs, &(ro[0]));
|
Chris@10
|
794 TeL = VADD(T11, T22);
|
Chris@10
|
795 STM4(&(ro[0]), TeL, ovs, &(ro[0]));
|
Chris@10
|
796 TeM = VSUB(T11, T22);
|
Chris@10
|
797 STM4(&(ro[32]), TeM, ovs, &(ro[0]));
|
Chris@10
|
798 TeN = VADD(TeH, TeI);
|
Chris@10
|
799 STM4(&(io[0]), TeN, ovs, &(io[0]));
|
Chris@10
|
800 TeO = VSUB(TeH, TeI);
|
Chris@10
|
801 STM4(&(io[32]), TeO, ovs, &(io[0]));
|
Chris@10
|
802 TeP = VSUB(T34, T23);
|
Chris@10
|
803 STM4(&(io[48]), TeP, ovs, &(io[0]));
|
Chris@10
|
804 TeQ = VADD(T23, T34);
|
Chris@10
|
805 STM4(&(io[16]), TeQ, ovs, &(io[0]));
|
Chris@10
|
806 TeB = VADD(Teu, Tet);
|
Chris@10
|
807 Tev = VSUB(Tet, Teu);
|
Chris@10
|
808 }
|
Chris@10
|
809 }
|
Chris@10
|
810 {
|
Chris@10
|
811 V TdV, Tdb, TdJ, Te5, TdE, Tdz, TdZ, Tdo, Te6, Tdi, Teb, Te3, TdW, TdM, Tdt;
|
Chris@10
|
812 V TdY;
|
Chris@10
|
813 {
|
Chris@10
|
814 V TdL, Tde, Tey, Tem, Tez, Ter, Tdh, TdK, Te1, Te2;
|
Chris@10
|
815 TdV = VADD(Td9, Tda);
|
Chris@10
|
816 Tdb = VSUB(Td9, Tda);
|
Chris@10
|
817 TdJ = VSUB(TdH, TdI);
|
Chris@10
|
818 Te5 = VADD(TdI, TdH);
|
Chris@10
|
819 TdL = VADD(Tdd, Tdc);
|
Chris@10
|
820 Tde = VSUB(Tdc, Tdd);
|
Chris@10
|
821 Tey = VSUB(Tel, Tei);
|
Chris@10
|
822 Tem = VADD(Tei, Tel);
|
Chris@10
|
823 Tez = VADD(Ten, Teq);
|
Chris@10
|
824 Ter = VSUB(Ten, Teq);
|
Chris@10
|
825 Tdh = VADD(Tdf, Tdg);
|
Chris@10
|
826 TdK = VSUB(Tdf, Tdg);
|
Chris@10
|
827 TdE = VSUB(TdA, TdD);
|
Chris@10
|
828 Te1 = VADD(TdA, TdD);
|
Chris@10
|
829 Te2 = VADD(Tdy, Tdx);
|
Chris@10
|
830 Tdz = VSUB(Tdx, Tdy);
|
Chris@10
|
831 TdZ = VADD(Tdn, Tdm);
|
Chris@10
|
832 Tdo = VSUB(Tdm, Tdn);
|
Chris@10
|
833 {
|
Chris@10
|
834 V TeA, TeC, Tew, Tes;
|
Chris@10
|
835 TeA = VSUB(Tey, Tez);
|
Chris@10
|
836 TeC = VADD(Tey, Tez);
|
Chris@10
|
837 Tew = VSUB(Ter, Tem);
|
Chris@10
|
838 Tes = VADD(Tem, Ter);
|
Chris@10
|
839 Te6 = VADD(Tde, Tdh);
|
Chris@10
|
840 Tdi = VSUB(Tde, Tdh);
|
Chris@10
|
841 Teb = VFMA(LDK(KP414213562), Te1, Te2);
|
Chris@10
|
842 Te3 = VFNMS(LDK(KP414213562), Te2, Te1);
|
Chris@10
|
843 TdW = VADD(TdL, TdK);
|
Chris@10
|
844 TdM = VSUB(TdK, TdL);
|
Chris@10
|
845 TeR = VFMA(LDK(KP707106781), TeA, Tex);
|
Chris@10
|
846 STM4(&(ro[24]), TeR, ovs, &(ro[0]));
|
Chris@10
|
847 TeS = VFNMS(LDK(KP707106781), TeA, Tex);
|
Chris@10
|
848 STM4(&(ro[56]), TeS, ovs, &(ro[0]));
|
Chris@10
|
849 TeT = VFMA(LDK(KP707106781), TeC, TeB);
|
Chris@10
|
850 STM4(&(io[8]), TeT, ovs, &(io[0]));
|
Chris@10
|
851 TeU = VFNMS(LDK(KP707106781), TeC, TeB);
|
Chris@10
|
852 STM4(&(io[40]), TeU, ovs, &(io[0]));
|
Chris@10
|
853 TeV = VFMA(LDK(KP707106781), Tew, Tev);
|
Chris@10
|
854 STM4(&(io[24]), TeV, ovs, &(io[0]));
|
Chris@10
|
855 TeW = VFNMS(LDK(KP707106781), Tew, Tev);
|
Chris@10
|
856 STM4(&(io[56]), TeW, ovs, &(io[0]));
|
Chris@10
|
857 TeX = VFMA(LDK(KP707106781), Tes, Teh);
|
Chris@10
|
858 STM4(&(ro[8]), TeX, ovs, &(ro[0]));
|
Chris@10
|
859 TeY = VFNMS(LDK(KP707106781), Tes, Teh);
|
Chris@10
|
860 STM4(&(ro[40]), TeY, ovs, &(ro[0]));
|
Chris@10
|
861 Tdt = VSUB(Tdp, Tds);
|
Chris@10
|
862 TdY = VADD(Tdp, Tds);
|
Chris@10
|
863 }
|
Chris@10
|
864 }
|
Chris@10
|
865 {
|
Chris@10
|
866 V TdT, Tdj, TdP, TdN, TdR, Tdu, Tea, Te0, TdQ, TdF, TdX, Ted, Te7;
|
Chris@10
|
867 TdT = VFNMS(LDK(KP707106781), Tdi, Tdb);
|
Chris@10
|
868 Tdj = VFMA(LDK(KP707106781), Tdi, Tdb);
|
Chris@10
|
869 TdP = VFMA(LDK(KP707106781), TdM, TdJ);
|
Chris@10
|
870 TdN = VFNMS(LDK(KP707106781), TdM, TdJ);
|
Chris@10
|
871 TdR = VFNMS(LDK(KP414213562), Tdo, Tdt);
|
Chris@10
|
872 Tdu = VFMA(LDK(KP414213562), Tdt, Tdo);
|
Chris@10
|
873 Tea = VFNMS(LDK(KP414213562), TdY, TdZ);
|
Chris@10
|
874 Te0 = VFMA(LDK(KP414213562), TdZ, TdY);
|
Chris@10
|
875 TdQ = VFMA(LDK(KP414213562), Tdz, TdE);
|
Chris@10
|
876 TdF = VFNMS(LDK(KP414213562), TdE, Tdz);
|
Chris@10
|
877 Te9 = VFNMS(LDK(KP707106781), TdW, TdV);
|
Chris@10
|
878 TdX = VFMA(LDK(KP707106781), TdW, TdV);
|
Chris@10
|
879 Ted = VFMA(LDK(KP707106781), Te6, Te5);
|
Chris@10
|
880 Te7 = VFNMS(LDK(KP707106781), Te6, Te5);
|
Chris@10
|
881 {
|
Chris@10
|
882 V Tee, Te8, Te4, TdU, TdS, TdO, TdG;
|
Chris@10
|
883 Tee = VADD(Tea, Teb);
|
Chris@10
|
884 Tec = VSUB(Tea, Teb);
|
Chris@10
|
885 Te8 = VSUB(Te3, Te0);
|
Chris@10
|
886 Te4 = VADD(Te0, Te3);
|
Chris@10
|
887 TdU = VADD(TdR, TdQ);
|
Chris@10
|
888 TdS = VSUB(TdQ, TdR);
|
Chris@10
|
889 TdO = VADD(Tdu, TdF);
|
Chris@10
|
890 TdG = VSUB(Tdu, TdF);
|
Chris@10
|
891 TeZ = VFMA(LDK(KP923879532), Tee, Ted);
|
Chris@10
|
892 STM4(&(io[4]), TeZ, ovs, &(io[0]));
|
Chris@10
|
893 Tf0 = VFNMS(LDK(KP923879532), Tee, Ted);
|
Chris@10
|
894 STM4(&(io[36]), Tf0, ovs, &(io[0]));
|
Chris@10
|
895 Tf1 = VFMA(LDK(KP923879532), Te4, TdX);
|
Chris@10
|
896 STM4(&(ro[4]), Tf1, ovs, &(ro[0]));
|
Chris@10
|
897 Tf2 = VFNMS(LDK(KP923879532), Te4, TdX);
|
Chris@10
|
898 STM4(&(ro[36]), Tf2, ovs, &(ro[0]));
|
Chris@10
|
899 Tf3 = VFMA(LDK(KP923879532), TdU, TdT);
|
Chris@10
|
900 STM4(&(ro[60]), Tf3, ovs, &(ro[0]));
|
Chris@10
|
901 Tf4 = VFNMS(LDK(KP923879532), TdU, TdT);
|
Chris@10
|
902 STM4(&(ro[28]), Tf4, ovs, &(ro[0]));
|
Chris@10
|
903 Tf5 = VFMA(LDK(KP923879532), TdS, TdP);
|
Chris@10
|
904 STM4(&(io[12]), Tf5, ovs, &(io[0]));
|
Chris@10
|
905 Tf6 = VFNMS(LDK(KP923879532), TdS, TdP);
|
Chris@10
|
906 STM4(&(io[44]), Tf6, ovs, &(io[0]));
|
Chris@10
|
907 Tf7 = VFMA(LDK(KP923879532), TdO, TdN);
|
Chris@10
|
908 STM4(&(io[60]), Tf7, ovs, &(io[0]));
|
Chris@10
|
909 Tf8 = VFNMS(LDK(KP923879532), TdO, TdN);
|
Chris@10
|
910 STM4(&(io[28]), Tf8, ovs, &(io[0]));
|
Chris@10
|
911 Tf9 = VFMA(LDK(KP923879532), TdG, Tdj);
|
Chris@10
|
912 STM4(&(ro[12]), Tf9, ovs, &(ro[0]));
|
Chris@10
|
913 Tfa = VFNMS(LDK(KP923879532), TdG, Tdj);
|
Chris@10
|
914 STM4(&(ro[44]), Tfa, ovs, &(ro[0]));
|
Chris@10
|
915 Tfb = VFMA(LDK(KP923879532), Te8, Te7);
|
Chris@10
|
916 STM4(&(io[20]), Tfb, ovs, &(io[0]));
|
Chris@10
|
917 Tfc = VFNMS(LDK(KP923879532), Te8, Te7);
|
Chris@10
|
918 STM4(&(io[52]), Tfc, ovs, &(io[0]));
|
Chris@10
|
919 }
|
Chris@10
|
920 }
|
Chris@10
|
921 }
|
Chris@10
|
922 {
|
Chris@10
|
923 V TcF, TcE, Tcy, Tcv, TcT, Tco, TcP, Tcd, TcZ, TcD, Td0, Tck, Td4, TcX, Tcr;
|
Chris@10
|
924 V TcS;
|
Chris@10
|
925 {
|
Chris@10
|
926 V Tcc, TcC, Tcg, Tcj, TcV, TcW;
|
Chris@10
|
927 TbE = VADD(TbC, TbD);
|
Chris@10
|
928 Tcc = VSUB(TbC, TbD);
|
Chris@10
|
929 TcC = VSUB(Tan, Tak);
|
Chris@10
|
930 Tao = VADD(Tak, Tan);
|
Chris@10
|
931 TcF = VFNMS(LDK(KP414213562), Tce, Tcf);
|
Chris@10
|
932 Tcg = VFMA(LDK(KP414213562), Tcf, Tce);
|
Chris@10
|
933 Tcj = VFNMS(LDK(KP414213562), Tci, Tch);
|
Chris@10
|
934 TcE = VFMA(LDK(KP414213562), Tch, Tci);
|
Chris@10
|
935 Tcy = VFNMS(LDK(KP707106781), Tcx, Tcw);
|
Chris@10
|
936 TcV = VFMA(LDK(KP707106781), Tcx, Tcw);
|
Chris@10
|
937 TcW = VFMA(LDK(KP707106781), Tcu, Tct);
|
Chris@10
|
938 Tcv = VFNMS(LDK(KP707106781), Tcu, Tct);
|
Chris@10
|
939 TcT = VFMA(LDK(KP707106781), Tcn, Tcm);
|
Chris@10
|
940 Tco = VFNMS(LDK(KP707106781), Tcn, Tcm);
|
Chris@10
|
941 Tfd = VFMA(LDK(KP923879532), Tec, Te9);
|
Chris@10
|
942 STM4(&(ro[20]), Tfd, ovs, &(ro[0]));
|
Chris@10
|
943 Tfe = VFNMS(LDK(KP923879532), Tec, Te9);
|
Chris@10
|
944 STM4(&(ro[52]), Tfe, ovs, &(ro[0]));
|
Chris@10
|
945 TcP = VFNMS(LDK(KP707106781), Tcc, Tcb);
|
Chris@10
|
946 Tcd = VFMA(LDK(KP707106781), Tcc, Tcb);
|
Chris@10
|
947 TcZ = VFNMS(LDK(KP707106781), TcC, TcB);
|
Chris@10
|
948 TcD = VFMA(LDK(KP707106781), TcC, TcB);
|
Chris@10
|
949 Td0 = VADD(Tcg, Tcj);
|
Chris@10
|
950 Tck = VSUB(Tcg, Tcj);
|
Chris@10
|
951 Td4 = VFMA(LDK(KP198912367), TcV, TcW);
|
Chris@10
|
952 TcX = VFNMS(LDK(KP198912367), TcW, TcV);
|
Chris@10
|
953 Tcr = VFNMS(LDK(KP707106781), Tcq, Tcp);
|
Chris@10
|
954 TcS = VFMA(LDK(KP707106781), Tcq, Tcp);
|
Chris@10
|
955 }
|
Chris@10
|
956 {
|
Chris@10
|
957 V TcJ, Tcl, TcK, Tcs, TcQ, TcG, Td5, TcU, TcL, Tcz;
|
Chris@10
|
958 TcJ = VFNMS(LDK(KP923879532), Tck, Tcd);
|
Chris@10
|
959 Tcl = VFMA(LDK(KP923879532), Tck, Tcd);
|
Chris@10
|
960 TcK = VFNMS(LDK(KP668178637), Tco, Tcr);
|
Chris@10
|
961 Tcs = VFMA(LDK(KP668178637), Tcr, Tco);
|
Chris@10
|
962 TcQ = VADD(TcF, TcE);
|
Chris@10
|
963 TcG = VSUB(TcE, TcF);
|
Chris@10
|
964 Td5 = VFNMS(LDK(KP198912367), TcS, TcT);
|
Chris@10
|
965 TcU = VFMA(LDK(KP198912367), TcT, TcS);
|
Chris@10
|
966 TcL = VFMA(LDK(KP668178637), Tcv, Tcy);
|
Chris@10
|
967 Tcz = VFNMS(LDK(KP668178637), Tcy, Tcv);
|
Chris@10
|
968 {
|
Chris@10
|
969 V Td1, Td3, TcR, TcN, TcH, Td2, TcY, TcM, TcO, TcI, TcA, Td6;
|
Chris@10
|
970 Td1 = VFMA(LDK(KP923879532), Td0, TcZ);
|
Chris@10
|
971 Td3 = VFNMS(LDK(KP923879532), Td0, TcZ);
|
Chris@10
|
972 TcR = VFNMS(LDK(KP923879532), TcQ, TcP);
|
Chris@10
|
973 Td7 = VFMA(LDK(KP923879532), TcQ, TcP);
|
Chris@10
|
974 TcN = VFMA(LDK(KP923879532), TcG, TcD);
|
Chris@10
|
975 TcH = VFNMS(LDK(KP923879532), TcG, TcD);
|
Chris@10
|
976 Td2 = VADD(TcU, TcX);
|
Chris@10
|
977 TcY = VSUB(TcU, TcX);
|
Chris@10
|
978 TcM = VSUB(TcK, TcL);
|
Chris@10
|
979 TcO = VADD(TcK, TcL);
|
Chris@10
|
980 TcI = VSUB(Tcz, Tcs);
|
Chris@10
|
981 TcA = VADD(Tcs, Tcz);
|
Chris@10
|
982 Td6 = VSUB(Td4, Td5);
|
Chris@10
|
983 Td8 = VADD(Td5, Td4);
|
Chris@10
|
984 Tff = VFMA(LDK(KP980785280), TcY, TcR);
|
Chris@10
|
985 STM4(&(ro[14]), Tff, ovs, &(ro[0]));
|
Chris@10
|
986 Tfg = VFNMS(LDK(KP980785280), TcY, TcR);
|
Chris@10
|
987 STM4(&(ro[46]), Tfg, ovs, &(ro[0]));
|
Chris@10
|
988 Tfh = VFMA(LDK(KP831469612), TcM, TcJ);
|
Chris@10
|
989 STM4(&(ro[22]), Tfh, ovs, &(ro[0]));
|
Chris@10
|
990 Tfi = VFNMS(LDK(KP831469612), TcM, TcJ);
|
Chris@10
|
991 STM4(&(ro[54]), Tfi, ovs, &(ro[0]));
|
Chris@10
|
992 Tfj = VFMA(LDK(KP831469612), TcO, TcN);
|
Chris@10
|
993 STM4(&(io[6]), Tfj, ovs, &(io[0]));
|
Chris@10
|
994 Tfk = VFNMS(LDK(KP831469612), TcO, TcN);
|
Chris@10
|
995 STM4(&(io[38]), Tfk, ovs, &(io[0]));
|
Chris@10
|
996 Tfl = VFMA(LDK(KP831469612), TcI, TcH);
|
Chris@10
|
997 STM4(&(io[22]), Tfl, ovs, &(io[0]));
|
Chris@10
|
998 Tfm = VFNMS(LDK(KP831469612), TcI, TcH);
|
Chris@10
|
999 STM4(&(io[54]), Tfm, ovs, &(io[0]));
|
Chris@10
|
1000 Tfn = VFMA(LDK(KP831469612), TcA, Tcl);
|
Chris@10
|
1001 STM4(&(ro[6]), Tfn, ovs, &(ro[0]));
|
Chris@10
|
1002 Tfo = VFNMS(LDK(KP831469612), TcA, Tcl);
|
Chris@10
|
1003 STM4(&(ro[38]), Tfo, ovs, &(ro[0]));
|
Chris@10
|
1004 Tfp = VFMA(LDK(KP980785280), Td6, Td3);
|
Chris@10
|
1005 STM4(&(io[14]), Tfp, ovs, &(io[0]));
|
Chris@10
|
1006 Tfq = VFNMS(LDK(KP980785280), Td6, Td3);
|
Chris@10
|
1007 STM4(&(io[46]), Tfq, ovs, &(io[0]));
|
Chris@10
|
1008 Tfr = VFNMS(LDK(KP980785280), Td2, Td1);
|
Chris@10
|
1009 STM4(&(io[30]), Tfr, ovs, &(io[0]));
|
Chris@10
|
1010 Tfs = VFMA(LDK(KP980785280), Td2, Td1);
|
Chris@10
|
1011 STM4(&(io[62]), Tfs, ovs, &(io[0]));
|
Chris@10
|
1012 }
|
Chris@10
|
1013 }
|
Chris@10
|
1014 }
|
Chris@10
|
1015 }
|
Chris@10
|
1016 {
|
Chris@10
|
1017 V Tft, Tfu, Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, T3f;
|
Chris@10
|
1018 V T66, T63, T3u, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW;
|
Chris@10
|
1019 V TfX, TfY, TfZ, Tg0, Tc5, Tc8;
|
Chris@10
|
1020 {
|
Chris@10
|
1021 V TbH, TbG, Tbw, Tbn, TbV, TaW, TbR, Tap, Tc1, TbF, Tc2, TaE, Tc7, TbZ, Tb5;
|
Chris@10
|
1022 V TbU;
|
Chris@10
|
1023 {
|
Chris@10
|
1024 V Taw, TaD, TbX, TbY;
|
Chris@10
|
1025 TbH = VFMA(LDK(KP414213562), Tas, Tav);
|
Chris@10
|
1026 Taw = VFNMS(LDK(KP414213562), Tav, Tas);
|
Chris@10
|
1027 TaD = VFMA(LDK(KP414213562), TaC, Taz);
|
Chris@10
|
1028 TbG = VFNMS(LDK(KP414213562), Taz, TaC);
|
Chris@10
|
1029 Tbw = VFNMS(LDK(KP707106781), Tbv, Tbs);
|
Chris@10
|
1030 TbX = VFMA(LDK(KP707106781), Tbv, Tbs);
|
Chris@10
|
1031 TbY = VFMA(LDK(KP707106781), Tbm, Tbb);
|
Chris@10
|
1032 Tbn = VFNMS(LDK(KP707106781), Tbm, Tbb);
|
Chris@10
|
1033 TbV = VFMA(LDK(KP707106781), TaV, TaK);
|
Chris@10
|
1034 TaW = VFNMS(LDK(KP707106781), TaV, TaK);
|
Chris@10
|
1035 Tft = VFMA(LDK(KP980785280), Td8, Td7);
|
Chris@10
|
1036 STM4(&(ro[62]), Tft, ovs, &(ro[0]));
|
Chris@10
|
1037 Tfu = VFNMS(LDK(KP980785280), Td8, Td7);
|
Chris@10
|
1038 STM4(&(ro[30]), Tfu, ovs, &(ro[0]));
|
Chris@10
|
1039 TbR = VFMA(LDK(KP707106781), Tao, Tah);
|
Chris@10
|
1040 Tap = VFNMS(LDK(KP707106781), Tao, Tah);
|
Chris@10
|
1041 Tc1 = VFMA(LDK(KP707106781), TbE, TbB);
|
Chris@10
|
1042 TbF = VFNMS(LDK(KP707106781), TbE, TbB);
|
Chris@10
|
1043 Tc2 = VADD(Taw, TaD);
|
Chris@10
|
1044 TaE = VSUB(Taw, TaD);
|
Chris@10
|
1045 Tc7 = VFMA(LDK(KP198912367), TbX, TbY);
|
Chris@10
|
1046 TbZ = VFNMS(LDK(KP198912367), TbY, TbX);
|
Chris@10
|
1047 Tb5 = VFNMS(LDK(KP707106781), Tb4, Tb1);
|
Chris@10
|
1048 TbU = VFMA(LDK(KP707106781), Tb4, Tb1);
|
Chris@10
|
1049 }
|
Chris@10
|
1050 {
|
Chris@10
|
1051 V TbP, TaF, TbN, Tb6, TbS, TbI, Tc6, TbW, TbM, Tbx;
|
Chris@10
|
1052 TbP = VFNMS(LDK(KP923879532), TaE, Tap);
|
Chris@10
|
1053 TaF = VFMA(LDK(KP923879532), TaE, Tap);
|
Chris@10
|
1054 TbN = VFNMS(LDK(KP668178637), TaW, Tb5);
|
Chris@10
|
1055 Tb6 = VFMA(LDK(KP668178637), Tb5, TaW);
|
Chris@10
|
1056 TbS = VADD(TbH, TbG);
|
Chris@10
|
1057 TbI = VSUB(TbG, TbH);
|
Chris@10
|
1058 Tc6 = VFNMS(LDK(KP198912367), TbU, TbV);
|
Chris@10
|
1059 TbW = VFMA(LDK(KP198912367), TbV, TbU);
|
Chris@10
|
1060 TbM = VFMA(LDK(KP668178637), Tbn, Tbw);
|
Chris@10
|
1061 Tbx = VFNMS(LDK(KP668178637), Tbw, Tbn);
|
Chris@10
|
1062 {
|
Chris@10
|
1063 V Tc3, Tc9, TbT, TbL, TbJ, Tc4, Tc0, TbQ, TbO, TbK, Tby, Tca;
|
Chris@10
|
1064 Tc3 = VFNMS(LDK(KP923879532), Tc2, Tc1);
|
Chris@10
|
1065 Tc9 = VFMA(LDK(KP923879532), Tc2, Tc1);
|
Chris@10
|
1066 TbT = VFMA(LDK(KP923879532), TbS, TbR);
|
Chris@10
|
1067 Tc5 = VFNMS(LDK(KP923879532), TbS, TbR);
|
Chris@10
|
1068 TbL = VFMA(LDK(KP923879532), TbI, TbF);
|
Chris@10
|
1069 TbJ = VFNMS(LDK(KP923879532), TbI, TbF);
|
Chris@10
|
1070 Tc4 = VSUB(TbZ, TbW);
|
Chris@10
|
1071 Tc0 = VADD(TbW, TbZ);
|
Chris@10
|
1072 TbQ = VADD(TbN, TbM);
|
Chris@10
|
1073 TbO = VSUB(TbM, TbN);
|
Chris@10
|
1074 TbK = VADD(Tb6, Tbx);
|
Chris@10
|
1075 Tby = VSUB(Tb6, Tbx);
|
Chris@10
|
1076 Tca = VADD(Tc6, Tc7);
|
Chris@10
|
1077 Tc8 = VSUB(Tc6, Tc7);
|
Chris@10
|
1078 Tfv = VFMA(LDK(KP980785280), Tc0, TbT);
|
Chris@10
|
1079 STM4(&(ro[2]), Tfv, ovs, &(ro[0]));
|
Chris@10
|
1080 Tfw = VFNMS(LDK(KP980785280), Tc0, TbT);
|
Chris@10
|
1081 STM4(&(ro[34]), Tfw, ovs, &(ro[0]));
|
Chris@10
|
1082 Tfx = VFMA(LDK(KP831469612), TbQ, TbP);
|
Chris@10
|
1083 STM4(&(ro[58]), Tfx, ovs, &(ro[0]));
|
Chris@10
|
1084 Tfy = VFNMS(LDK(KP831469612), TbQ, TbP);
|
Chris@10
|
1085 STM4(&(ro[26]), Tfy, ovs, &(ro[0]));
|
Chris@10
|
1086 Tfz = VFMA(LDK(KP831469612), TbO, TbL);
|
Chris@10
|
1087 STM4(&(io[10]), Tfz, ovs, &(io[0]));
|
Chris@10
|
1088 TfA = VFNMS(LDK(KP831469612), TbO, TbL);
|
Chris@10
|
1089 STM4(&(io[42]), TfA, ovs, &(io[0]));
|
Chris@10
|
1090 TfB = VFMA(LDK(KP831469612), TbK, TbJ);
|
Chris@10
|
1091 STM4(&(io[58]), TfB, ovs, &(io[0]));
|
Chris@10
|
1092 TfC = VFNMS(LDK(KP831469612), TbK, TbJ);
|
Chris@10
|
1093 STM4(&(io[26]), TfC, ovs, &(io[0]));
|
Chris@10
|
1094 TfD = VFMA(LDK(KP831469612), Tby, TaF);
|
Chris@10
|
1095 STM4(&(ro[10]), TfD, ovs, &(ro[0]));
|
Chris@10
|
1096 TfE = VFNMS(LDK(KP831469612), Tby, TaF);
|
Chris@10
|
1097 STM4(&(ro[42]), TfE, ovs, &(ro[0]));
|
Chris@10
|
1098 TfF = VFMA(LDK(KP980785280), Tca, Tc9);
|
Chris@10
|
1099 STM4(&(io[2]), TfF, ovs, &(io[0]));
|
Chris@10
|
1100 TfG = VFNMS(LDK(KP980785280), Tca, Tc9);
|
Chris@10
|
1101 STM4(&(io[34]), TfG, ovs, &(io[0]));
|
Chris@10
|
1102 TfH = VFNMS(LDK(KP980785280), Tc4, Tc3);
|
Chris@10
|
1103 STM4(&(io[50]), TfH, ovs, &(io[0]));
|
Chris@10
|
1104 TfI = VFMA(LDK(KP980785280), Tc4, Tc3);
|
Chris@10
|
1105 STM4(&(io[18]), TfI, ovs, &(io[0]));
|
Chris@10
|
1106 }
|
Chris@10
|
1107 }
|
Chris@10
|
1108 }
|
Chris@10
|
1109 {
|
Chris@10
|
1110 V T70, T6X, T7h, T6F, T7x, T7m, T7w, T7p, T7s, T6M, T7c, T6U, T7r, T75, T7i;
|
Chris@10
|
1111 V T78, T7b, T6N;
|
Chris@10
|
1112 {
|
Chris@10
|
1113 V T6T, T6Q, T77, T6I, T6L, T76, T73, T74;
|
Chris@10
|
1114 {
|
Chris@10
|
1115 V T6D, T6E, T7k, T7l, T7n, T7o;
|
Chris@10
|
1116 T3f = VFMA(LDK(KP707106781), T3e, T37);
|
Chris@10
|
1117 T6D = VFNMS(LDK(KP707106781), T3e, T37);
|
Chris@10
|
1118 T6E = VADD(T65, T64);
|
Chris@10
|
1119 T66 = VSUB(T64, T65);
|
Chris@10
|
1120 T6T = VFNMS(LDK(KP923879532), T6S, T6R);
|
Chris@10
|
1121 T7k = VFMA(LDK(KP923879532), T6S, T6R);
|
Chris@10
|
1122 T7l = VFMA(LDK(KP923879532), T6P, T6O);
|
Chris@10
|
1123 T6Q = VFNMS(LDK(KP923879532), T6P, T6O);
|
Chris@10
|
1124 T70 = VFNMS(LDK(KP923879532), T6Z, T6Y);
|
Chris@10
|
1125 T7n = VFMA(LDK(KP923879532), T6Z, T6Y);
|
Chris@10
|
1126 T7o = VFMA(LDK(KP923879532), T6W, T6V);
|
Chris@10
|
1127 T6X = VFNMS(LDK(KP923879532), T6W, T6V);
|
Chris@10
|
1128 T77 = VFNMS(LDK(KP198912367), T6G, T6H);
|
Chris@10
|
1129 T6I = VFMA(LDK(KP198912367), T6H, T6G);
|
Chris@10
|
1130 TfJ = VFMA(LDK(KP980785280), Tc8, Tc5);
|
Chris@10
|
1131 STM4(&(ro[18]), TfJ, ovs, &(ro[0]));
|
Chris@10
|
1132 TfK = VFNMS(LDK(KP980785280), Tc8, Tc5);
|
Chris@10
|
1133 STM4(&(ro[50]), TfK, ovs, &(ro[0]));
|
Chris@10
|
1134 T7h = VFMA(LDK(KP923879532), T6E, T6D);
|
Chris@10
|
1135 T6F = VFNMS(LDK(KP923879532), T6E, T6D);
|
Chris@10
|
1136 T7x = VFNMS(LDK(KP098491403), T7k, T7l);
|
Chris@10
|
1137 T7m = VFMA(LDK(KP098491403), T7l, T7k);
|
Chris@10
|
1138 T7w = VFMA(LDK(KP098491403), T7n, T7o);
|
Chris@10
|
1139 T7p = VFNMS(LDK(KP098491403), T7o, T7n);
|
Chris@10
|
1140 T6L = VFNMS(LDK(KP198912367), T6K, T6J);
|
Chris@10
|
1141 T76 = VFMA(LDK(KP198912367), T6J, T6K);
|
Chris@10
|
1142 }
|
Chris@10
|
1143 T63 = VFMA(LDK(KP707106781), T62, T5Z);
|
Chris@10
|
1144 T73 = VFNMS(LDK(KP707106781), T62, T5Z);
|
Chris@10
|
1145 T74 = VADD(T3m, T3t);
|
Chris@10
|
1146 T3u = VSUB(T3m, T3t);
|
Chris@10
|
1147 T7s = VADD(T6I, T6L);
|
Chris@10
|
1148 T6M = VSUB(T6I, T6L);
|
Chris@10
|
1149 T7c = VFNMS(LDK(KP820678790), T6Q, T6T);
|
Chris@10
|
1150 T6U = VFMA(LDK(KP820678790), T6T, T6Q);
|
Chris@10
|
1151 T7r = VFMA(LDK(KP923879532), T74, T73);
|
Chris@10
|
1152 T75 = VFNMS(LDK(KP923879532), T74, T73);
|
Chris@10
|
1153 T7i = VADD(T77, T76);
|
Chris@10
|
1154 T78 = VSUB(T76, T77);
|
Chris@10
|
1155 }
|
Chris@10
|
1156 T7b = VFNMS(LDK(KP980785280), T6M, T6F);
|
Chris@10
|
1157 T6N = VFMA(LDK(KP980785280), T6M, T6F);
|
Chris@10
|
1158 {
|
Chris@10
|
1159 V T7u, T7q, T7v, T7t, T7A, T7y, T7j, T7z, T7f, T79, T71, T7d;
|
Chris@10
|
1160 T7u = VADD(T7m, T7p);
|
Chris@10
|
1161 T7q = VSUB(T7m, T7p);
|
Chris@10
|
1162 T7v = VFNMS(LDK(KP980785280), T7s, T7r);
|
Chris@10
|
1163 T7t = VFMA(LDK(KP980785280), T7s, T7r);
|
Chris@10
|
1164 T7A = VADD(T7x, T7w);
|
Chris@10
|
1165 T7y = VSUB(T7w, T7x);
|
Chris@10
|
1166 T7j = VFNMS(LDK(KP980785280), T7i, T7h);
|
Chris@10
|
1167 T7z = VFMA(LDK(KP980785280), T7i, T7h);
|
Chris@10
|
1168 T7f = VFMA(LDK(KP980785280), T78, T75);
|
Chris@10
|
1169 T79 = VFNMS(LDK(KP980785280), T78, T75);
|
Chris@10
|
1170 T71 = VFNMS(LDK(KP820678790), T70, T6X);
|
Chris@10
|
1171 T7d = VFMA(LDK(KP820678790), T6X, T70);
|
Chris@10
|
1172 {
|
Chris@10
|
1173 V T7g, T7e, T72, T7a;
|
Chris@10
|
1174 TfL = VFMA(LDK(KP995184726), T7y, T7v);
|
Chris@10
|
1175 STM4(&(io[15]), TfL, ovs, &(io[1]));
|
Chris@10
|
1176 TfM = VFNMS(LDK(KP995184726), T7y, T7v);
|
Chris@10
|
1177 STM4(&(io[47]), TfM, ovs, &(io[1]));
|
Chris@10
|
1178 TfN = VFMA(LDK(KP995184726), T7q, T7j);
|
Chris@10
|
1179 STM4(&(ro[15]), TfN, ovs, &(ro[1]));
|
Chris@10
|
1180 TfO = VFNMS(LDK(KP995184726), T7q, T7j);
|
Chris@10
|
1181 STM4(&(ro[47]), TfO, ovs, &(ro[1]));
|
Chris@10
|
1182 T7g = VADD(T7c, T7d);
|
Chris@10
|
1183 T7e = VSUB(T7c, T7d);
|
Chris@10
|
1184 T72 = VADD(T6U, T71);
|
Chris@10
|
1185 T7a = VSUB(T71, T6U);
|
Chris@10
|
1186 TfP = VFNMS(LDK(KP995184726), T7u, T7t);
|
Chris@10
|
1187 STM4(&(io[31]), TfP, ovs, &(io[1]));
|
Chris@10
|
1188 TfQ = VFMA(LDK(KP995184726), T7u, T7t);
|
Chris@10
|
1189 STM4(&(io[63]), TfQ, ovs, &(io[1]));
|
Chris@10
|
1190 TfR = VFMA(LDK(KP773010453), T7e, T7b);
|
Chris@10
|
1191 STM4(&(ro[23]), TfR, ovs, &(ro[1]));
|
Chris@10
|
1192 TfS = VFNMS(LDK(KP773010453), T7e, T7b);
|
Chris@10
|
1193 STM4(&(ro[55]), TfS, ovs, &(ro[1]));
|
Chris@10
|
1194 TfT = VFMA(LDK(KP773010453), T7g, T7f);
|
Chris@10
|
1195 STM4(&(io[7]), TfT, ovs, &(io[1]));
|
Chris@10
|
1196 TfU = VFNMS(LDK(KP773010453), T7g, T7f);
|
Chris@10
|
1197 STM4(&(io[39]), TfU, ovs, &(io[1]));
|
Chris@10
|
1198 TfV = VFMA(LDK(KP773010453), T7a, T79);
|
Chris@10
|
1199 STM4(&(io[23]), TfV, ovs, &(io[1]));
|
Chris@10
|
1200 TfW = VFNMS(LDK(KP773010453), T7a, T79);
|
Chris@10
|
1201 STM4(&(io[55]), TfW, ovs, &(io[1]));
|
Chris@10
|
1202 TfX = VFMA(LDK(KP773010453), T72, T6N);
|
Chris@10
|
1203 STM4(&(ro[7]), TfX, ovs, &(ro[1]));
|
Chris@10
|
1204 TfY = VFNMS(LDK(KP773010453), T72, T6N);
|
Chris@10
|
1205 STM4(&(ro[39]), TfY, ovs, &(ro[1]));
|
Chris@10
|
1206 TfZ = VFNMS(LDK(KP995184726), T7A, T7z);
|
Chris@10
|
1207 STM4(&(ro[31]), TfZ, ovs, &(ro[1]));
|
Chris@10
|
1208 Tg0 = VFMA(LDK(KP995184726), T7A, T7z);
|
Chris@10
|
1209 STM4(&(ro[63]), Tg0, ovs, &(ro[1]));
|
Chris@10
|
1210 }
|
Chris@10
|
1211 }
|
Chris@10
|
1212 }
|
Chris@10
|
1213 {
|
Chris@10
|
1214 V T7D, T8K, T8H, T7K, Ta8, Ta7, Tae, Tad;
|
Chris@10
|
1215 {
|
Chris@10
|
1216 V T9x, T9u, T9E, T9B, T9L, T9K, T9V, T9j, Tab, Ta0, Taa, Ta3, Ta6, T9q, T9H;
|
Chris@10
|
1217 V T9I;
|
Chris@10
|
1218 {
|
Chris@10
|
1219 V T9h, T9i, T9Y, T9Z, Ta1, Ta2, T9m, T9p;
|
Chris@10
|
1220 T7D = VFMA(LDK(KP707106781), T7C, T7B);
|
Chris@10
|
1221 T9h = VFNMS(LDK(KP707106781), T7C, T7B);
|
Chris@10
|
1222 T9i = VSUB(T8I, T8J);
|
Chris@10
|
1223 T8K = VADD(T8I, T8J);
|
Chris@10
|
1224 T9x = VFNMS(LDK(KP923879532), T9w, T9v);
|
Chris@10
|
1225 T9Y = VFMA(LDK(KP923879532), T9w, T9v);
|
Chris@10
|
1226 T9Z = VFMA(LDK(KP923879532), T9t, T9s);
|
Chris@10
|
1227 T9u = VFNMS(LDK(KP923879532), T9t, T9s);
|
Chris@10
|
1228 T9E = VFNMS(LDK(KP923879532), T9D, T9C);
|
Chris@10
|
1229 Ta1 = VFMA(LDK(KP923879532), T9D, T9C);
|
Chris@10
|
1230 Ta2 = VFMA(LDK(KP923879532), T9A, T9z);
|
Chris@10
|
1231 T9B = VFNMS(LDK(KP923879532), T9A, T9z);
|
Chris@10
|
1232 T9L = VFNMS(LDK(KP668178637), T9k, T9l);
|
Chris@10
|
1233 T9m = VFMA(LDK(KP668178637), T9l, T9k);
|
Chris@10
|
1234 T9p = VFNMS(LDK(KP668178637), T9o, T9n);
|
Chris@10
|
1235 T9K = VFMA(LDK(KP668178637), T9n, T9o);
|
Chris@10
|
1236 T9V = VFNMS(LDK(KP923879532), T9i, T9h);
|
Chris@10
|
1237 T9j = VFMA(LDK(KP923879532), T9i, T9h);
|
Chris@10
|
1238 Tab = VFNMS(LDK(KP303346683), T9Y, T9Z);
|
Chris@10
|
1239 Ta0 = VFMA(LDK(KP303346683), T9Z, T9Y);
|
Chris@10
|
1240 Taa = VFMA(LDK(KP303346683), Ta1, Ta2);
|
Chris@10
|
1241 Ta3 = VFNMS(LDK(KP303346683), Ta2, Ta1);
|
Chris@10
|
1242 Ta6 = VADD(T9m, T9p);
|
Chris@10
|
1243 T9q = VSUB(T9m, T9p);
|
Chris@10
|
1244 T8H = VFMA(LDK(KP707106781), T8G, T8F);
|
Chris@10
|
1245 T9H = VFNMS(LDK(KP707106781), T8G, T8F);
|
Chris@10
|
1246 T9I = VSUB(T7J, T7G);
|
Chris@10
|
1247 T7K = VADD(T7G, T7J);
|
Chris@10
|
1248 }
|
Chris@10
|
1249 {
|
Chris@10
|
1250 V T9P, T9r, T9Q, T9y, Ta5, T9J, T9W, T9M, T9R, T9F;
|
Chris@10
|
1251 T9P = VFNMS(LDK(KP831469612), T9q, T9j);
|
Chris@10
|
1252 T9r = VFMA(LDK(KP831469612), T9q, T9j);
|
Chris@10
|
1253 T9Q = VFNMS(LDK(KP534511135), T9u, T9x);
|
Chris@10
|
1254 T9y = VFMA(LDK(KP534511135), T9x, T9u);
|
Chris@10
|
1255 Ta5 = VFNMS(LDK(KP923879532), T9I, T9H);
|
Chris@10
|
1256 T9J = VFMA(LDK(KP923879532), T9I, T9H);
|
Chris@10
|
1257 T9W = VADD(T9L, T9K);
|
Chris@10
|
1258 T9M = VSUB(T9K, T9L);
|
Chris@10
|
1259 T9R = VFMA(LDK(KP534511135), T9B, T9E);
|
Chris@10
|
1260 T9F = VFNMS(LDK(KP534511135), T9E, T9B);
|
Chris@10
|
1261 {
|
Chris@10
|
1262 V T9T, T9N, T9U, T9S, T9G, T9O;
|
Chris@10
|
1263 {
|
Chris@10
|
1264 V Ta4, Ta9, Tac, T9X;
|
Chris@10
|
1265 Ta8 = VADD(Ta0, Ta3);
|
Chris@10
|
1266 Ta4 = VSUB(Ta0, Ta3);
|
Chris@10
|
1267 Ta9 = VFNMS(LDK(KP831469612), Ta6, Ta5);
|
Chris@10
|
1268 Ta7 = VFMA(LDK(KP831469612), Ta6, Ta5);
|
Chris@10
|
1269 Tae = VADD(Tab, Taa);
|
Chris@10
|
1270 Tac = VSUB(Taa, Tab);
|
Chris@10
|
1271 T9X = VFNMS(LDK(KP831469612), T9W, T9V);
|
Chris@10
|
1272 Tad = VFMA(LDK(KP831469612), T9W, T9V);
|
Chris@10
|
1273 T9T = VFMA(LDK(KP831469612), T9M, T9J);
|
Chris@10
|
1274 T9N = VFNMS(LDK(KP831469612), T9M, T9J);
|
Chris@10
|
1275 T9U = VADD(T9Q, T9R);
|
Chris@10
|
1276 T9S = VSUB(T9Q, T9R);
|
Chris@10
|
1277 T9G = VADD(T9y, T9F);
|
Chris@10
|
1278 T9O = VSUB(T9F, T9y);
|
Chris@10
|
1279 {
|
Chris@10
|
1280 V Tg1, Tg2, Tg3, Tg4;
|
Chris@10
|
1281 Tg1 = VFNMS(LDK(KP956940335), Tac, Ta9);
|
Chris@10
|
1282 STM4(&(io[45]), Tg1, ovs, &(io[1]));
|
Chris@10
|
1283 STN4(&(io[44]), Tf6, Tg1, Tfq, TfM, ovs);
|
Chris@10
|
1284 Tg2 = VFMA(LDK(KP956940335), Ta4, T9X);
|
Chris@10
|
1285 STM4(&(ro[13]), Tg2, ovs, &(ro[1]));
|
Chris@10
|
1286 STN4(&(ro[12]), Tf9, Tg2, Tff, TfN, ovs);
|
Chris@10
|
1287 Tg3 = VFNMS(LDK(KP956940335), Ta4, T9X);
|
Chris@10
|
1288 STM4(&(ro[45]), Tg3, ovs, &(ro[1]));
|
Chris@10
|
1289 STN4(&(ro[44]), Tfa, Tg3, Tfg, TfO, ovs);
|
Chris@10
|
1290 Tg4 = VFMA(LDK(KP956940335), Tac, Ta9);
|
Chris@10
|
1291 STM4(&(io[13]), Tg4, ovs, &(io[1]));
|
Chris@10
|
1292 STN4(&(io[12]), Tf5, Tg4, Tfp, TfL, ovs);
|
Chris@10
|
1293 }
|
Chris@10
|
1294 }
|
Chris@10
|
1295 {
|
Chris@10
|
1296 V Tg5, Tg6, Tg7, Tg8;
|
Chris@10
|
1297 Tg5 = VFMA(LDK(KP881921264), T9S, T9P);
|
Chris@10
|
1298 STM4(&(ro[21]), Tg5, ovs, &(ro[1]));
|
Chris@10
|
1299 STN4(&(ro[20]), Tfd, Tg5, Tfh, TfR, ovs);
|
Chris@10
|
1300 Tg6 = VFNMS(LDK(KP881921264), T9S, T9P);
|
Chris@10
|
1301 STM4(&(ro[53]), Tg6, ovs, &(ro[1]));
|
Chris@10
|
1302 STN4(&(ro[52]), Tfe, Tg6, Tfi, TfS, ovs);
|
Chris@10
|
1303 Tg7 = VFMA(LDK(KP881921264), T9U, T9T);
|
Chris@10
|
1304 STM4(&(io[5]), Tg7, ovs, &(io[1]));
|
Chris@10
|
1305 STN4(&(io[4]), TeZ, Tg7, Tfj, TfT, ovs);
|
Chris@10
|
1306 Tg8 = VFNMS(LDK(KP881921264), T9U, T9T);
|
Chris@10
|
1307 STM4(&(io[37]), Tg8, ovs, &(io[1]));
|
Chris@10
|
1308 STN4(&(io[36]), Tf0, Tg8, Tfk, TfU, ovs);
|
Chris@10
|
1309 {
|
Chris@10
|
1310 V Tg9, Tga, Tgb, Tgc;
|
Chris@10
|
1311 Tg9 = VFMA(LDK(KP881921264), T9O, T9N);
|
Chris@10
|
1312 STM4(&(io[21]), Tg9, ovs, &(io[1]));
|
Chris@10
|
1313 STN4(&(io[20]), Tfb, Tg9, Tfl, TfV, ovs);
|
Chris@10
|
1314 Tga = VFNMS(LDK(KP881921264), T9O, T9N);
|
Chris@10
|
1315 STM4(&(io[53]), Tga, ovs, &(io[1]));
|
Chris@10
|
1316 STN4(&(io[52]), Tfc, Tga, Tfm, TfW, ovs);
|
Chris@10
|
1317 Tgb = VFMA(LDK(KP881921264), T9G, T9r);
|
Chris@10
|
1318 STM4(&(ro[5]), Tgb, ovs, &(ro[1]));
|
Chris@10
|
1319 STN4(&(ro[4]), Tf1, Tgb, Tfn, TfX, ovs);
|
Chris@10
|
1320 Tgc = VFNMS(LDK(KP881921264), T9G, T9r);
|
Chris@10
|
1321 STM4(&(ro[37]), Tgc, ovs, &(ro[1]));
|
Chris@10
|
1322 STN4(&(ro[36]), Tf2, Tgc, Tfo, TfY, ovs);
|
Chris@10
|
1323 }
|
Chris@10
|
1324 }
|
Chris@10
|
1325 }
|
Chris@10
|
1326 }
|
Chris@10
|
1327 }
|
Chris@10
|
1328 {
|
Chris@10
|
1329 V Tgh, Tgi, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu;
|
Chris@10
|
1330 {
|
Chris@10
|
1331 V T5U, T6j, T3v, T6y, T6o, T5H, T69, T68, T6z, T6r, T6u, T48, T6f, T52, T6t;
|
Chris@10
|
1332 V T67, T6h, T49;
|
Chris@10
|
1333 {
|
Chris@10
|
1334 V T51, T4O, T6p, T6q, T3O, T47, T6m, T6n;
|
Chris@10
|
1335 T51 = VFNMS(LDK(KP923879532), T50, T4X);
|
Chris@10
|
1336 T6m = VFMA(LDK(KP923879532), T50, T4X);
|
Chris@10
|
1337 T6n = VFMA(LDK(KP923879532), T4N, T4q);
|
Chris@10
|
1338 T4O = VFNMS(LDK(KP923879532), T4N, T4q);
|
Chris@10
|
1339 T5U = VFNMS(LDK(KP923879532), T5T, T5Q);
|
Chris@10
|
1340 T6p = VFMA(LDK(KP923879532), T5T, T5Q);
|
Chris@10
|
1341 {
|
Chris@10
|
1342 V Tgd, Tge, Tgf, Tgg;
|
Chris@10
|
1343 Tgd = VFMA(LDK(KP956940335), Ta8, Ta7);
|
Chris@10
|
1344 STM4(&(io[61]), Tgd, ovs, &(io[1]));
|
Chris@10
|
1345 STN4(&(io[60]), Tf7, Tgd, Tfs, TfQ, ovs);
|
Chris@10
|
1346 Tge = VFNMS(LDK(KP956940335), Ta8, Ta7);
|
Chris@10
|
1347 STM4(&(io[29]), Tge, ovs, &(io[1]));
|
Chris@10
|
1348 STN4(&(io[28]), Tf8, Tge, Tfr, TfP, ovs);
|
Chris@10
|
1349 Tgf = VFMA(LDK(KP956940335), Tae, Tad);
|
Chris@10
|
1350 STM4(&(ro[61]), Tgf, ovs, &(ro[1]));
|
Chris@10
|
1351 STN4(&(ro[60]), Tf3, Tgf, Tft, Tg0, ovs);
|
Chris@10
|
1352 Tgg = VFNMS(LDK(KP956940335), Tae, Tad);
|
Chris@10
|
1353 STM4(&(ro[29]), Tgg, ovs, &(ro[1]));
|
Chris@10
|
1354 STN4(&(ro[28]), Tf4, Tgg, Tfu, TfZ, ovs);
|
Chris@10
|
1355 T6j = VFMA(LDK(KP923879532), T3u, T3f);
|
Chris@10
|
1356 T3v = VFNMS(LDK(KP923879532), T3u, T3f);
|
Chris@10
|
1357 T6y = VFNMS(LDK(KP303346683), T6m, T6n);
|
Chris@10
|
1358 T6o = VFMA(LDK(KP303346683), T6n, T6m);
|
Chris@10
|
1359 T6q = VFMA(LDK(KP923879532), T5G, T5j);
|
Chris@10
|
1360 T5H = VFNMS(LDK(KP923879532), T5G, T5j);
|
Chris@10
|
1361 }
|
Chris@10
|
1362 T69 = VFMA(LDK(KP668178637), T3G, T3N);
|
Chris@10
|
1363 T3O = VFNMS(LDK(KP668178637), T3N, T3G);
|
Chris@10
|
1364 T47 = VFMA(LDK(KP668178637), T46, T3Z);
|
Chris@10
|
1365 T68 = VFNMS(LDK(KP668178637), T3Z, T46);
|
Chris@10
|
1366 T6z = VFMA(LDK(KP303346683), T6p, T6q);
|
Chris@10
|
1367 T6r = VFNMS(LDK(KP303346683), T6q, T6p);
|
Chris@10
|
1368 T6u = VADD(T3O, T47);
|
Chris@10
|
1369 T48 = VSUB(T3O, T47);
|
Chris@10
|
1370 T6f = VFNMS(LDK(KP534511135), T4O, T51);
|
Chris@10
|
1371 T52 = VFMA(LDK(KP534511135), T51, T4O);
|
Chris@10
|
1372 T6t = VFMA(LDK(KP923879532), T66, T63);
|
Chris@10
|
1373 T67 = VFNMS(LDK(KP923879532), T66, T63);
|
Chris@10
|
1374 }
|
Chris@10
|
1375 T6h = VFNMS(LDK(KP831469612), T48, T3v);
|
Chris@10
|
1376 T49 = VFMA(LDK(KP831469612), T48, T3v);
|
Chris@10
|
1377 {
|
Chris@10
|
1378 V T6w, T6s, T6B, T6v, T6A, T6C, T6k, T6a, T6e, T5V;
|
Chris@10
|
1379 T6w = VSUB(T6r, T6o);
|
Chris@10
|
1380 T6s = VADD(T6o, T6r);
|
Chris@10
|
1381 T6B = VFMA(LDK(KP831469612), T6u, T6t);
|
Chris@10
|
1382 T6v = VFNMS(LDK(KP831469612), T6u, T6t);
|
Chris@10
|
1383 T6A = VSUB(T6y, T6z);
|
Chris@10
|
1384 T6C = VADD(T6y, T6z);
|
Chris@10
|
1385 T6k = VADD(T69, T68);
|
Chris@10
|
1386 T6a = VSUB(T68, T69);
|
Chris@10
|
1387 T6e = VFMA(LDK(KP534511135), T5H, T5U);
|
Chris@10
|
1388 T5V = VFNMS(LDK(KP534511135), T5U, T5H);
|
Chris@10
|
1389 Tgh = VFMA(LDK(KP956940335), T6C, T6B);
|
Chris@10
|
1390 STM4(&(io[3]), Tgh, ovs, &(io[1]));
|
Chris@10
|
1391 Tgi = VFNMS(LDK(KP956940335), T6C, T6B);
|
Chris@10
|
1392 STM4(&(io[35]), Tgi, ovs, &(io[1]));
|
Chris@10
|
1393 {
|
Chris@10
|
1394 V T6l, T6x, T6d, T6b;
|
Chris@10
|
1395 T6l = VFMA(LDK(KP831469612), T6k, T6j);
|
Chris@10
|
1396 T6x = VFNMS(LDK(KP831469612), T6k, T6j);
|
Chris@10
|
1397 T6d = VFMA(LDK(KP831469612), T6a, T67);
|
Chris@10
|
1398 T6b = VFNMS(LDK(KP831469612), T6a, T67);
|
Chris@10
|
1399 {
|
Chris@10
|
1400 V T6g, T6i, T5W, T6c;
|
Chris@10
|
1401 T6g = VSUB(T6e, T6f);
|
Chris@10
|
1402 T6i = VADD(T6f, T6e);
|
Chris@10
|
1403 T5W = VSUB(T52, T5V);
|
Chris@10
|
1404 T6c = VADD(T52, T5V);
|
Chris@10
|
1405 Tgj = VFMA(LDK(KP956940335), T6w, T6v);
|
Chris@10
|
1406 STM4(&(io[19]), Tgj, ovs, &(io[1]));
|
Chris@10
|
1407 Tgk = VFNMS(LDK(KP956940335), T6w, T6v);
|
Chris@10
|
1408 STM4(&(io[51]), Tgk, ovs, &(io[1]));
|
Chris@10
|
1409 Tgl = VFMA(LDK(KP956940335), T6s, T6l);
|
Chris@10
|
1410 STM4(&(ro[3]), Tgl, ovs, &(ro[1]));
|
Chris@10
|
1411 Tgm = VFNMS(LDK(KP956940335), T6s, T6l);
|
Chris@10
|
1412 STM4(&(ro[35]), Tgm, ovs, &(ro[1]));
|
Chris@10
|
1413 Tgn = VFMA(LDK(KP881921264), T6i, T6h);
|
Chris@10
|
1414 STM4(&(ro[59]), Tgn, ovs, &(ro[1]));
|
Chris@10
|
1415 Tgo = VFNMS(LDK(KP881921264), T6i, T6h);
|
Chris@10
|
1416 STM4(&(ro[27]), Tgo, ovs, &(ro[1]));
|
Chris@10
|
1417 Tgp = VFMA(LDK(KP881921264), T6g, T6d);
|
Chris@10
|
1418 STM4(&(io[11]), Tgp, ovs, &(io[1]));
|
Chris@10
|
1419 Tgq = VFNMS(LDK(KP881921264), T6g, T6d);
|
Chris@10
|
1420 STM4(&(io[43]), Tgq, ovs, &(io[1]));
|
Chris@10
|
1421 Tgr = VFMA(LDK(KP881921264), T6c, T6b);
|
Chris@10
|
1422 STM4(&(io[59]), Tgr, ovs, &(io[1]));
|
Chris@10
|
1423 Tgs = VFNMS(LDK(KP881921264), T6c, T6b);
|
Chris@10
|
1424 STM4(&(io[27]), Tgs, ovs, &(io[1]));
|
Chris@10
|
1425 Tgt = VFMA(LDK(KP881921264), T5W, T49);
|
Chris@10
|
1426 STM4(&(ro[11]), Tgt, ovs, &(ro[1]));
|
Chris@10
|
1427 Tgu = VFNMS(LDK(KP881921264), T5W, T49);
|
Chris@10
|
1428 STM4(&(ro[43]), Tgu, ovs, &(ro[1]));
|
Chris@10
|
1429 Tgv = VFNMS(LDK(KP956940335), T6A, T6x);
|
Chris@10
|
1430 STM4(&(ro[51]), Tgv, ovs, &(ro[1]));
|
Chris@10
|
1431 Tgw = VFMA(LDK(KP956940335), T6A, T6x);
|
Chris@10
|
1432 STM4(&(ro[19]), Tgw, ovs, &(ro[1]));
|
Chris@10
|
1433 }
|
Chris@10
|
1434 }
|
Chris@10
|
1435 }
|
Chris@10
|
1436 }
|
Chris@10
|
1437 {
|
Chris@10
|
1438 V T8j, T8c, T8C, T8v, T8N, T8M, T8X, T7L, T9c, T92, T9d, T95, T98, T80;
|
Chris@10
|
1439 {
|
Chris@10
|
1440 V T90, T91, T93, T94, T7S, T7Z;
|
Chris@10
|
1441 T8j = VFNMS(LDK(KP923879532), T8i, T8f);
|
Chris@10
|
1442 T90 = VFMA(LDK(KP923879532), T8i, T8f);
|
Chris@10
|
1443 T91 = VFMA(LDK(KP923879532), T8b, T84);
|
Chris@10
|
1444 T8c = VFNMS(LDK(KP923879532), T8b, T84);
|
Chris@10
|
1445 T8C = VFNMS(LDK(KP923879532), T8B, T8y);
|
Chris@10
|
1446 T93 = VFMA(LDK(KP923879532), T8B, T8y);
|
Chris@10
|
1447 T94 = VFMA(LDK(KP923879532), T8u, T8n);
|
Chris@10
|
1448 T8v = VFNMS(LDK(KP923879532), T8u, T8n);
|
Chris@10
|
1449 T8N = VFMA(LDK(KP198912367), T7O, T7R);
|
Chris@10
|
1450 T7S = VFNMS(LDK(KP198912367), T7R, T7O);
|
Chris@10
|
1451 T7Z = VFMA(LDK(KP198912367), T7Y, T7V);
|
Chris@10
|
1452 T8M = VFNMS(LDK(KP198912367), T7V, T7Y);
|
Chris@10
|
1453 T8X = VFMA(LDK(KP923879532), T7K, T7D);
|
Chris@10
|
1454 T7L = VFNMS(LDK(KP923879532), T7K, T7D);
|
Chris@10
|
1455 T9c = VFNMS(LDK(KP098491403), T90, T91);
|
Chris@10
|
1456 T92 = VFMA(LDK(KP098491403), T91, T90);
|
Chris@10
|
1457 T9d = VFMA(LDK(KP098491403), T93, T94);
|
Chris@10
|
1458 T95 = VFNMS(LDK(KP098491403), T94, T93);
|
Chris@10
|
1459 T98 = VADD(T7S, T7Z);
|
Chris@10
|
1460 T80 = VSUB(T7S, T7Z);
|
Chris@10
|
1461 }
|
Chris@10
|
1462 {
|
Chris@10
|
1463 V T8V, T81, T8T, T8k, T97, T8L, T8Y, T8O, T8S, T8D;
|
Chris@10
|
1464 T8V = VFNMS(LDK(KP980785280), T80, T7L);
|
Chris@10
|
1465 T81 = VFMA(LDK(KP980785280), T80, T7L);
|
Chris@10
|
1466 T8T = VFNMS(LDK(KP820678790), T8c, T8j);
|
Chris@10
|
1467 T8k = VFMA(LDK(KP820678790), T8j, T8c);
|
Chris@10
|
1468 T97 = VFMA(LDK(KP923879532), T8K, T8H);
|
Chris@10
|
1469 T8L = VFNMS(LDK(KP923879532), T8K, T8H);
|
Chris@10
|
1470 T8Y = VADD(T8N, T8M);
|
Chris@10
|
1471 T8O = VSUB(T8M, T8N);
|
Chris@10
|
1472 T8S = VFMA(LDK(KP820678790), T8v, T8C);
|
Chris@10
|
1473 T8D = VFNMS(LDK(KP820678790), T8C, T8v);
|
Chris@10
|
1474 {
|
Chris@10
|
1475 V T8R, T8P, T8U, T8W, T8E, T8Q;
|
Chris@10
|
1476 {
|
Chris@10
|
1477 V T96, T9f, T9g, T8Z;
|
Chris@10
|
1478 T9a = VSUB(T95, T92);
|
Chris@10
|
1479 T96 = VADD(T92, T95);
|
Chris@10
|
1480 T9f = VFMA(LDK(KP980785280), T98, T97);
|
Chris@10
|
1481 T99 = VFNMS(LDK(KP980785280), T98, T97);
|
Chris@10
|
1482 T9e = VSUB(T9c, T9d);
|
Chris@10
|
1483 T9g = VADD(T9c, T9d);
|
Chris@10
|
1484 T8Z = VFMA(LDK(KP980785280), T8Y, T8X);
|
Chris@10
|
1485 T9b = VFNMS(LDK(KP980785280), T8Y, T8X);
|
Chris@10
|
1486 T8R = VFMA(LDK(KP980785280), T8O, T8L);
|
Chris@10
|
1487 T8P = VFNMS(LDK(KP980785280), T8O, T8L);
|
Chris@10
|
1488 T8U = VSUB(T8S, T8T);
|
Chris@10
|
1489 T8W = VADD(T8T, T8S);
|
Chris@10
|
1490 T8E = VSUB(T8k, T8D);
|
Chris@10
|
1491 T8Q = VADD(T8k, T8D);
|
Chris@10
|
1492 {
|
Chris@10
|
1493 V Tgx, Tgy, Tgz, TgA;
|
Chris@10
|
1494 Tgx = VFNMS(LDK(KP995184726), T9g, T9f);
|
Chris@10
|
1495 STM4(&(io[33]), Tgx, ovs, &(io[1]));
|
Chris@10
|
1496 STN4(&(io[32]), TeO, Tgx, TfG, Tgi, ovs);
|
Chris@10
|
1497 Tgy = VFMA(LDK(KP995184726), T96, T8Z);
|
Chris@10
|
1498 STM4(&(ro[1]), Tgy, ovs, &(ro[1]));
|
Chris@10
|
1499 STN4(&(ro[0]), TeL, Tgy, Tfv, Tgl, ovs);
|
Chris@10
|
1500 Tgz = VFNMS(LDK(KP995184726), T96, T8Z);
|
Chris@10
|
1501 STM4(&(ro[33]), Tgz, ovs, &(ro[1]));
|
Chris@10
|
1502 STN4(&(ro[32]), TeM, Tgz, Tfw, Tgm, ovs);
|
Chris@10
|
1503 TgA = VFMA(LDK(KP995184726), T9g, T9f);
|
Chris@10
|
1504 STM4(&(io[1]), TgA, ovs, &(io[1]));
|
Chris@10
|
1505 STN4(&(io[0]), TeN, TgA, TfF, Tgh, ovs);
|
Chris@10
|
1506 }
|
Chris@10
|
1507 }
|
Chris@10
|
1508 {
|
Chris@10
|
1509 V TgB, TgC, TgD, TgE;
|
Chris@10
|
1510 TgB = VFMA(LDK(KP773010453), T8W, T8V);
|
Chris@10
|
1511 STM4(&(ro[57]), TgB, ovs, &(ro[1]));
|
Chris@10
|
1512 STN4(&(ro[56]), TeS, TgB, Tfx, Tgn, ovs);
|
Chris@10
|
1513 TgC = VFNMS(LDK(KP773010453), T8W, T8V);
|
Chris@10
|
1514 STM4(&(ro[25]), TgC, ovs, &(ro[1]));
|
Chris@10
|
1515 STN4(&(ro[24]), TeR, TgC, Tfy, Tgo, ovs);
|
Chris@10
|
1516 TgD = VFMA(LDK(KP773010453), T8U, T8R);
|
Chris@10
|
1517 STM4(&(io[9]), TgD, ovs, &(io[1]));
|
Chris@10
|
1518 STN4(&(io[8]), TeT, TgD, Tfz, Tgp, ovs);
|
Chris@10
|
1519 TgE = VFNMS(LDK(KP773010453), T8U, T8R);
|
Chris@10
|
1520 STM4(&(io[41]), TgE, ovs, &(io[1]));
|
Chris@10
|
1521 STN4(&(io[40]), TeU, TgE, TfA, Tgq, ovs);
|
Chris@10
|
1522 {
|
Chris@10
|
1523 V TgF, TgG, TgH, TgI;
|
Chris@10
|
1524 TgF = VFMA(LDK(KP773010453), T8Q, T8P);
|
Chris@10
|
1525 STM4(&(io[57]), TgF, ovs, &(io[1]));
|
Chris@10
|
1526 STN4(&(io[56]), TeW, TgF, TfB, Tgr, ovs);
|
Chris@10
|
1527 TgG = VFNMS(LDK(KP773010453), T8Q, T8P);
|
Chris@10
|
1528 STM4(&(io[25]), TgG, ovs, &(io[1]));
|
Chris@10
|
1529 STN4(&(io[24]), TeV, TgG, TfC, Tgs, ovs);
|
Chris@10
|
1530 TgH = VFMA(LDK(KP773010453), T8E, T81);
|
Chris@10
|
1531 STM4(&(ro[9]), TgH, ovs, &(ro[1]));
|
Chris@10
|
1532 STN4(&(ro[8]), TeX, TgH, TfD, Tgt, ovs);
|
Chris@10
|
1533 TgI = VFNMS(LDK(KP773010453), T8E, T81);
|
Chris@10
|
1534 STM4(&(ro[41]), TgI, ovs, &(ro[1]));
|
Chris@10
|
1535 STN4(&(ro[40]), TeY, TgI, TfE, Tgu, ovs);
|
Chris@10
|
1536 }
|
Chris@10
|
1537 }
|
Chris@10
|
1538 }
|
Chris@10
|
1539 }
|
Chris@10
|
1540 }
|
Chris@10
|
1541 }
|
Chris@10
|
1542 }
|
Chris@10
|
1543 }
|
Chris@10
|
1544 }
|
Chris@10
|
1545 }
|
Chris@10
|
1546 {
|
Chris@10
|
1547 V TgJ, TgK, TgL, TgM;
|
Chris@10
|
1548 TgJ = VFMA(LDK(KP995184726), T9a, T99);
|
Chris@10
|
1549 STM4(&(io[17]), TgJ, ovs, &(io[1]));
|
Chris@10
|
1550 STN4(&(io[16]), TeQ, TgJ, TfI, Tgj, ovs);
|
Chris@10
|
1551 TgK = VFNMS(LDK(KP995184726), T9a, T99);
|
Chris@10
|
1552 STM4(&(io[49]), TgK, ovs, &(io[1]));
|
Chris@10
|
1553 STN4(&(io[48]), TeP, TgK, TfH, Tgk, ovs);
|
Chris@10
|
1554 TgL = VFMA(LDK(KP995184726), T9e, T9b);
|
Chris@10
|
1555 STM4(&(ro[17]), TgL, ovs, &(ro[1]));
|
Chris@10
|
1556 STN4(&(ro[16]), TeK, TgL, TfJ, Tgw, ovs);
|
Chris@10
|
1557 TgM = VFNMS(LDK(KP995184726), T9e, T9b);
|
Chris@10
|
1558 STM4(&(ro[49]), TgM, ovs, &(ro[1]));
|
Chris@10
|
1559 STN4(&(ro[48]), TeJ, TgM, TfK, Tgv, ovs);
|
Chris@10
|
1560 }
|
Chris@10
|
1561 }
|
Chris@10
|
1562 }
|
Chris@10
|
1563 VLEAVE();
|
Chris@10
|
1564 }
|
Chris@10
|
1565
|
Chris@10
|
1566 static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {520, 0, 392, 0}, &GENUS, 0, 1, 0, 0 };
|
Chris@10
|
1567
|
Chris@10
|
1568 void XSIMD(codelet_n2sv_64) (planner *p) {
|
Chris@10
|
1569 X(kdft_register) (p, n2sv_64, &desc);
|
Chris@10
|
1570 }
|
Chris@10
|
1571
|
Chris@10
|
1572 #else /* HAVE_FMA */
|
Chris@10
|
1573
|
Chris@10
|
1574 /* Generated by: ../../../genfft/gen_notw.native -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include n2s.h -store-multiple 4 */
|
Chris@10
|
1575
|
Chris@10
|
1576 /*
|
Chris@10
|
1577 * This function contains 912 FP additions, 248 FP multiplications,
|
Chris@10
|
1578 * (or, 808 additions, 144 multiplications, 104 fused multiply/add),
|
Chris@10
|
1579 * 260 stack variables, 15 constants, and 288 memory accesses
|
Chris@10
|
1580 */
|
Chris@10
|
1581 #include "n2s.h"
|
Chris@10
|
1582
|
Chris@10
|
1583 static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
|
Chris@10
|
1584 {
|
Chris@10
|
1585 DVK(KP773010453, +0.773010453362736960810906609758469800971041293);
|
Chris@10
|
1586 DVK(KP634393284, +0.634393284163645498215171613225493370675687095);
|
Chris@10
|
1587 DVK(KP098017140, +0.098017140329560601994195563888641845861136673);
|
Chris@10
|
1588 DVK(KP995184726, +0.995184726672196886244836953109479921575474869);
|
Chris@10
|
1589 DVK(KP881921264, +0.881921264348355029712756863660388349508442621);
|
Chris@10
|
1590 DVK(KP471396736, +0.471396736825997648556387625905254377657460319);
|
Chris@10
|
1591 DVK(KP290284677, +0.290284677254462367636192375817395274691476278);
|
Chris@10
|
1592 DVK(KP956940335, +0.956940335732208864935797886980269969482849206);
|
Chris@10
|
1593 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
|
Chris@10
|
1594 DVK(KP555570233, +0.555570233019602224742830813948532874374937191);
|
Chris@10
|
1595 DVK(KP195090322, +0.195090322016128267848284868477022240927691618);
|
Chris@10
|
1596 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
|
Chris@10
|
1597 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
|
Chris@10
|
1598 DVK(KP382683432, +0.382683432365089771728459984030398866761344562);
|
Chris@10
|
1599 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
|
Chris@10
|
1600 {
|
Chris@10
|
1601 INT i;
|
Chris@10
|
1602 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) {
|
Chris@10
|
1603 V T37, T7B, T8F, T5Z, Tf, Td9, TbB, TcB, T62, T7C, T2i, TdH, Tah, Tcb, T3e;
|
Chris@10
|
1604 V T8G, Tu, TdI, Tak, TbD, Tan, TbC, T2x, Tda, T3m, T65, T7G, T8J, T7J, T8I;
|
Chris@10
|
1605 V T3t, T64, TK, Tdd, Tas, Tce, Tav, Tcf, T2N, Tdc, T3G, T6G, T7O, T9k, T7R;
|
Chris@10
|
1606 V T9l, T3N, T6H, T1L, Tdv, Tbs, Tcw, TdC, Teo, T5j, T6V, T5Q, T6Y, T8y, T9C;
|
Chris@10
|
1607 V Tbb, Tct, T8n, T9z, TZ, Tdf, Taz, Tch, TaC, Tci, T32, Tdg, T3Z, T6J, T7V;
|
Chris@10
|
1608 V T9n, T7Y, T9o, T46, T6K, T1g, Tdp, Tb1, Tcm, Tdm, Tej, T4q, T6R, T4X, T6O;
|
Chris@10
|
1609 V T8f, T9s, TaK, Tcp, T84, T9v, T1v, Tdn, Tb4, Tcq, Tds, Tek, T4N, T6P, T50;
|
Chris@10
|
1610 V T6S, T8i, T9w, TaV, Tcn, T8b, T9t, T20, TdD, Tbv, Tcu, Tdy, Tep, T5G, T6Z;
|
Chris@10
|
1611 V T5T, T6W, T8B, T9A, Tbm, Tcx, T8u, T9D;
|
Chris@10
|
1612 {
|
Chris@10
|
1613 V T3, T35, T26, T5Y, T6, T5X, T29, T36, Ta, T39, T2d, T38, Td, T3b, T2g;
|
Chris@10
|
1614 V T3c;
|
Chris@10
|
1615 {
|
Chris@10
|
1616 V T1, T2, T24, T25;
|
Chris@10
|
1617 T1 = LD(&(ri[0]), ivs, &(ri[0]));
|
Chris@10
|
1618 T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0]));
|
Chris@10
|
1619 T3 = VADD(T1, T2);
|
Chris@10
|
1620 T35 = VSUB(T1, T2);
|
Chris@10
|
1621 T24 = LD(&(ii[0]), ivs, &(ii[0]));
|
Chris@10
|
1622 T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0]));
|
Chris@10
|
1623 T26 = VADD(T24, T25);
|
Chris@10
|
1624 T5Y = VSUB(T24, T25);
|
Chris@10
|
1625 }
|
Chris@10
|
1626 {
|
Chris@10
|
1627 V T4, T5, T27, T28;
|
Chris@10
|
1628 T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
|
Chris@10
|
1629 T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0]));
|
Chris@10
|
1630 T6 = VADD(T4, T5);
|
Chris@10
|
1631 T5X = VSUB(T4, T5);
|
Chris@10
|
1632 T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
|
Chris@10
|
1633 T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0]));
|
Chris@10
|
1634 T29 = VADD(T27, T28);
|
Chris@10
|
1635 T36 = VSUB(T27, T28);
|
Chris@10
|
1636 }
|
Chris@10
|
1637 {
|
Chris@10
|
1638 V T8, T9, T2b, T2c;
|
Chris@10
|
1639 T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
|
Chris@10
|
1640 T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0]));
|
Chris@10
|
1641 Ta = VADD(T8, T9);
|
Chris@10
|
1642 T39 = VSUB(T8, T9);
|
Chris@10
|
1643 T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
|
Chris@10
|
1644 T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0]));
|
Chris@10
|
1645 T2d = VADD(T2b, T2c);
|
Chris@10
|
1646 T38 = VSUB(T2b, T2c);
|
Chris@10
|
1647 }
|
Chris@10
|
1648 {
|
Chris@10
|
1649 V Tb, Tc, T2e, T2f;
|
Chris@10
|
1650 Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0]));
|
Chris@10
|
1651 Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
|
Chris@10
|
1652 Td = VADD(Tb, Tc);
|
Chris@10
|
1653 T3b = VSUB(Tb, Tc);
|
Chris@10
|
1654 T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0]));
|
Chris@10
|
1655 T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
|
Chris@10
|
1656 T2g = VADD(T2e, T2f);
|
Chris@10
|
1657 T3c = VSUB(T2e, T2f);
|
Chris@10
|
1658 }
|
Chris@10
|
1659 {
|
Chris@10
|
1660 V T7, Te, T2a, T2h;
|
Chris@10
|
1661 T37 = VSUB(T35, T36);
|
Chris@10
|
1662 T7B = VADD(T35, T36);
|
Chris@10
|
1663 T8F = VSUB(T5Y, T5X);
|
Chris@10
|
1664 T5Z = VADD(T5X, T5Y);
|
Chris@10
|
1665 T7 = VADD(T3, T6);
|
Chris@10
|
1666 Te = VADD(Ta, Td);
|
Chris@10
|
1667 Tf = VADD(T7, Te);
|
Chris@10
|
1668 Td9 = VSUB(T7, Te);
|
Chris@10
|
1669 {
|
Chris@10
|
1670 V Tbz, TbA, T60, T61;
|
Chris@10
|
1671 Tbz = VSUB(T26, T29);
|
Chris@10
|
1672 TbA = VSUB(Td, Ta);
|
Chris@10
|
1673 TbB = VSUB(Tbz, TbA);
|
Chris@10
|
1674 TcB = VADD(TbA, Tbz);
|
Chris@10
|
1675 T60 = VSUB(T3b, T3c);
|
Chris@10
|
1676 T61 = VADD(T39, T38);
|
Chris@10
|
1677 T62 = VMUL(LDK(KP707106781), VSUB(T60, T61));
|
Chris@10
|
1678 T7C = VMUL(LDK(KP707106781), VADD(T61, T60));
|
Chris@10
|
1679 }
|
Chris@10
|
1680 T2a = VADD(T26, T29);
|
Chris@10
|
1681 T2h = VADD(T2d, T2g);
|
Chris@10
|
1682 T2i = VADD(T2a, T2h);
|
Chris@10
|
1683 TdH = VSUB(T2a, T2h);
|
Chris@10
|
1684 {
|
Chris@10
|
1685 V Taf, Tag, T3a, T3d;
|
Chris@10
|
1686 Taf = VSUB(T3, T6);
|
Chris@10
|
1687 Tag = VSUB(T2d, T2g);
|
Chris@10
|
1688 Tah = VSUB(Taf, Tag);
|
Chris@10
|
1689 Tcb = VADD(Taf, Tag);
|
Chris@10
|
1690 T3a = VSUB(T38, T39);
|
Chris@10
|
1691 T3d = VADD(T3b, T3c);
|
Chris@10
|
1692 T3e = VMUL(LDK(KP707106781), VSUB(T3a, T3d));
|
Chris@10
|
1693 T8G = VMUL(LDK(KP707106781), VADD(T3a, T3d));
|
Chris@10
|
1694 }
|
Chris@10
|
1695 }
|
Chris@10
|
1696 }
|
Chris@10
|
1697 {
|
Chris@10
|
1698 V Ti, T3j, T2l, T3h, Tl, T3g, T2o, T3k, Tp, T3q, T2s, T3o, Ts, T3n, T2v;
|
Chris@10
|
1699 V T3r;
|
Chris@10
|
1700 {
|
Chris@10
|
1701 V Tg, Th, T2j, T2k;
|
Chris@10
|
1702 Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
|
Chris@10
|
1703 Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0]));
|
Chris@10
|
1704 Ti = VADD(Tg, Th);
|
Chris@10
|
1705 T3j = VSUB(Tg, Th);
|
Chris@10
|
1706 T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
|
Chris@10
|
1707 T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0]));
|
Chris@10
|
1708 T2l = VADD(T2j, T2k);
|
Chris@10
|
1709 T3h = VSUB(T2j, T2k);
|
Chris@10
|
1710 }
|
Chris@10
|
1711 {
|
Chris@10
|
1712 V Tj, Tk, T2m, T2n;
|
Chris@10
|
1713 Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
|
Chris@10
|
1714 Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0]));
|
Chris@10
|
1715 Tl = VADD(Tj, Tk);
|
Chris@10
|
1716 T3g = VSUB(Tj, Tk);
|
Chris@10
|
1717 T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
|
Chris@10
|
1718 T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0]));
|
Chris@10
|
1719 T2o = VADD(T2m, T2n);
|
Chris@10
|
1720 T3k = VSUB(T2m, T2n);
|
Chris@10
|
1721 }
|
Chris@10
|
1722 {
|
Chris@10
|
1723 V Tn, To, T2q, T2r;
|
Chris@10
|
1724 Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0]));
|
Chris@10
|
1725 To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
|
Chris@10
|
1726 Tp = VADD(Tn, To);
|
Chris@10
|
1727 T3q = VSUB(Tn, To);
|
Chris@10
|
1728 T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0]));
|
Chris@10
|
1729 T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
|
Chris@10
|
1730 T2s = VADD(T2q, T2r);
|
Chris@10
|
1731 T3o = VSUB(T2q, T2r);
|
Chris@10
|
1732 }
|
Chris@10
|
1733 {
|
Chris@10
|
1734 V Tq, Tr, T2t, T2u;
|
Chris@10
|
1735 Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
|
Chris@10
|
1736 Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0]));
|
Chris@10
|
1737 Ts = VADD(Tq, Tr);
|
Chris@10
|
1738 T3n = VSUB(Tq, Tr);
|
Chris@10
|
1739 T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
|
Chris@10
|
1740 T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0]));
|
Chris@10
|
1741 T2v = VADD(T2t, T2u);
|
Chris@10
|
1742 T3r = VSUB(T2t, T2u);
|
Chris@10
|
1743 }
|
Chris@10
|
1744 {
|
Chris@10
|
1745 V Tm, Tt, Tai, Taj;
|
Chris@10
|
1746 Tm = VADD(Ti, Tl);
|
Chris@10
|
1747 Tt = VADD(Tp, Ts);
|
Chris@10
|
1748 Tu = VADD(Tm, Tt);
|
Chris@10
|
1749 TdI = VSUB(Tt, Tm);
|
Chris@10
|
1750 Tai = VSUB(T2l, T2o);
|
Chris@10
|
1751 Taj = VSUB(Ti, Tl);
|
Chris@10
|
1752 Tak = VSUB(Tai, Taj);
|
Chris@10
|
1753 TbD = VADD(Taj, Tai);
|
Chris@10
|
1754 }
|
Chris@10
|
1755 {
|
Chris@10
|
1756 V Tal, Tam, T2p, T2w;
|
Chris@10
|
1757 Tal = VSUB(Tp, Ts);
|
Chris@10
|
1758 Tam = VSUB(T2s, T2v);
|
Chris@10
|
1759 Tan = VADD(Tal, Tam);
|
Chris@10
|
1760 TbC = VSUB(Tal, Tam);
|
Chris@10
|
1761 T2p = VADD(T2l, T2o);
|
Chris@10
|
1762 T2w = VADD(T2s, T2v);
|
Chris@10
|
1763 T2x = VADD(T2p, T2w);
|
Chris@10
|
1764 Tda = VSUB(T2p, T2w);
|
Chris@10
|
1765 }
|
Chris@10
|
1766 {
|
Chris@10
|
1767 V T3i, T3l, T7E, T7F;
|
Chris@10
|
1768 T3i = VADD(T3g, T3h);
|
Chris@10
|
1769 T3l = VSUB(T3j, T3k);
|
Chris@10
|
1770 T3m = VFNMS(LDK(KP923879532), T3l, VMUL(LDK(KP382683432), T3i));
|
Chris@10
|
1771 T65 = VFMA(LDK(KP923879532), T3i, VMUL(LDK(KP382683432), T3l));
|
Chris@10
|
1772 T7E = VSUB(T3h, T3g);
|
Chris@10
|
1773 T7F = VADD(T3j, T3k);
|
Chris@10
|
1774 T7G = VFNMS(LDK(KP382683432), T7F, VMUL(LDK(KP923879532), T7E));
|
Chris@10
|
1775 T8J = VFMA(LDK(KP382683432), T7E, VMUL(LDK(KP923879532), T7F));
|
Chris@10
|
1776 }
|
Chris@10
|
1777 {
|
Chris@10
|
1778 V T7H, T7I, T3p, T3s;
|
Chris@10
|
1779 T7H = VSUB(T3o, T3n);
|
Chris@10
|
1780 T7I = VADD(T3q, T3r);
|
Chris@10
|
1781 T7J = VFMA(LDK(KP923879532), T7H, VMUL(LDK(KP382683432), T7I));
|
Chris@10
|
1782 T8I = VFNMS(LDK(KP382683432), T7H, VMUL(LDK(KP923879532), T7I));
|
Chris@10
|
1783 T3p = VADD(T3n, T3o);
|
Chris@10
|
1784 T3s = VSUB(T3q, T3r);
|
Chris@10
|
1785 T3t = VFMA(LDK(KP382683432), T3p, VMUL(LDK(KP923879532), T3s));
|
Chris@10
|
1786 T64 = VFNMS(LDK(KP923879532), T3p, VMUL(LDK(KP382683432), T3s));
|
Chris@10
|
1787 }
|
Chris@10
|
1788 }
|
Chris@10
|
1789 {
|
Chris@10
|
1790 V Ty, T3H, T2B, T3x, TB, T3w, T2E, T3I, TI, T3L, T2L, T3B, TF, T3K, T2I;
|
Chris@10
|
1791 V T3E;
|
Chris@10
|
1792 {
|
Chris@10
|
1793 V Tw, Tx, T2C, T2D;
|
Chris@10
|
1794 Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
|
Chris@10
|
1795 Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0]));
|
Chris@10
|
1796 Ty = VADD(Tw, Tx);
|
Chris@10
|
1797 T3H = VSUB(Tw, Tx);
|
Chris@10
|
1798 {
|
Chris@10
|
1799 V T2z, T2A, Tz, TA;
|
Chris@10
|
1800 T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
|
Chris@10
|
1801 T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0]));
|
Chris@10
|
1802 T2B = VADD(T2z, T2A);
|
Chris@10
|
1803 T3x = VSUB(T2z, T2A);
|
Chris@10
|
1804 Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
|
Chris@10
|
1805 TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0]));
|
Chris@10
|
1806 TB = VADD(Tz, TA);
|
Chris@10
|
1807 T3w = VSUB(Tz, TA);
|
Chris@10
|
1808 }
|
Chris@10
|
1809 T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
|
Chris@10
|
1810 T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0]));
|
Chris@10
|
1811 T2E = VADD(T2C, T2D);
|
Chris@10
|
1812 T3I = VSUB(T2C, T2D);
|
Chris@10
|
1813 {
|
Chris@10
|
1814 V TG, TH, T3z, T2J, T2K, T3A;
|
Chris@10
|
1815 TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0]));
|
Chris@10
|
1816 TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
|
Chris@10
|
1817 T3z = VSUB(TG, TH);
|
Chris@10
|
1818 T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0]));
|
Chris@10
|
1819 T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
|
Chris@10
|
1820 T3A = VSUB(T2J, T2K);
|
Chris@10
|
1821 TI = VADD(TG, TH);
|
Chris@10
|
1822 T3L = VADD(T3z, T3A);
|
Chris@10
|
1823 T2L = VADD(T2J, T2K);
|
Chris@10
|
1824 T3B = VSUB(T3z, T3A);
|
Chris@10
|
1825 }
|
Chris@10
|
1826 {
|
Chris@10
|
1827 V TD, TE, T3C, T2G, T2H, T3D;
|
Chris@10
|
1828 TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
|
Chris@10
|
1829 TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0]));
|
Chris@10
|
1830 T3C = VSUB(TD, TE);
|
Chris@10
|
1831 T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
|
Chris@10
|
1832 T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0]));
|
Chris@10
|
1833 T3D = VSUB(T2G, T2H);
|
Chris@10
|
1834 TF = VADD(TD, TE);
|
Chris@10
|
1835 T3K = VSUB(T3D, T3C);
|
Chris@10
|
1836 T2I = VADD(T2G, T2H);
|
Chris@10
|
1837 T3E = VADD(T3C, T3D);
|
Chris@10
|
1838 }
|
Chris@10
|
1839 }
|
Chris@10
|
1840 {
|
Chris@10
|
1841 V TC, TJ, Taq, Tar;
|
Chris@10
|
1842 TC = VADD(Ty, TB);
|
Chris@10
|
1843 TJ = VADD(TF, TI);
|
Chris@10
|
1844 TK = VADD(TC, TJ);
|
Chris@10
|
1845 Tdd = VSUB(TC, TJ);
|
Chris@10
|
1846 Taq = VSUB(T2B, T2E);
|
Chris@10
|
1847 Tar = VSUB(TI, TF);
|
Chris@10
|
1848 Tas = VSUB(Taq, Tar);
|
Chris@10
|
1849 Tce = VADD(Tar, Taq);
|
Chris@10
|
1850 }
|
Chris@10
|
1851 {
|
Chris@10
|
1852 V Tat, Tau, T2F, T2M;
|
Chris@10
|
1853 Tat = VSUB(Ty, TB);
|
Chris@10
|
1854 Tau = VSUB(T2I, T2L);
|
Chris@10
|
1855 Tav = VSUB(Tat, Tau);
|
Chris@10
|
1856 Tcf = VADD(Tat, Tau);
|
Chris@10
|
1857 T2F = VADD(T2B, T2E);
|
Chris@10
|
1858 T2M = VADD(T2I, T2L);
|
Chris@10
|
1859 T2N = VADD(T2F, T2M);
|
Chris@10
|
1860 Tdc = VSUB(T2F, T2M);
|
Chris@10
|
1861 }
|
Chris@10
|
1862 {
|
Chris@10
|
1863 V T3y, T3F, T7M, T7N;
|
Chris@10
|
1864 T3y = VADD(T3w, T3x);
|
Chris@10
|
1865 T3F = VMUL(LDK(KP707106781), VSUB(T3B, T3E));
|
Chris@10
|
1866 T3G = VSUB(T3y, T3F);
|
Chris@10
|
1867 T6G = VADD(T3y, T3F);
|
Chris@10
|
1868 T7M = VSUB(T3x, T3w);
|
Chris@10
|
1869 T7N = VMUL(LDK(KP707106781), VADD(T3K, T3L));
|
Chris@10
|
1870 T7O = VSUB(T7M, T7N);
|
Chris@10
|
1871 T9k = VADD(T7M, T7N);
|
Chris@10
|
1872 }
|
Chris@10
|
1873 {
|
Chris@10
|
1874 V T7P, T7Q, T3J, T3M;
|
Chris@10
|
1875 T7P = VADD(T3H, T3I);
|
Chris@10
|
1876 T7Q = VMUL(LDK(KP707106781), VADD(T3E, T3B));
|
Chris@10
|
1877 T7R = VSUB(T7P, T7Q);
|
Chris@10
|
1878 T9l = VADD(T7P, T7Q);
|
Chris@10
|
1879 T3J = VSUB(T3H, T3I);
|
Chris@10
|
1880 T3M = VMUL(LDK(KP707106781), VSUB(T3K, T3L));
|
Chris@10
|
1881 T3N = VSUB(T3J, T3M);
|
Chris@10
|
1882 T6H = VADD(T3J, T3M);
|
Chris@10
|
1883 }
|
Chris@10
|
1884 }
|
Chris@10
|
1885 {
|
Chris@10
|
1886 V T1z, T53, T5L, Tbo, T1C, T5I, T56, Tbp, T1J, Tb9, T5h, T5N, T1G, Tb8, T5c;
|
Chris@10
|
1887 V T5O;
|
Chris@10
|
1888 {
|
Chris@10
|
1889 V T1x, T1y, T54, T55;
|
Chris@10
|
1890 T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1891 T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1892 T1z = VADD(T1x, T1y);
|
Chris@10
|
1893 T53 = VSUB(T1x, T1y);
|
Chris@10
|
1894 {
|
Chris@10
|
1895 V T5J, T5K, T1A, T1B;
|
Chris@10
|
1896 T5J = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1897 T5K = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1898 T5L = VSUB(T5J, T5K);
|
Chris@10
|
1899 Tbo = VADD(T5J, T5K);
|
Chris@10
|
1900 T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1901 T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1902 T1C = VADD(T1A, T1B);
|
Chris@10
|
1903 T5I = VSUB(T1A, T1B);
|
Chris@10
|
1904 }
|
Chris@10
|
1905 T54 = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1906 T55 = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1907 T56 = VSUB(T54, T55);
|
Chris@10
|
1908 Tbp = VADD(T54, T55);
|
Chris@10
|
1909 {
|
Chris@10
|
1910 V T1H, T1I, T5d, T5e, T5f, T5g;
|
Chris@10
|
1911 T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1912 T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1913 T5d = VSUB(T1H, T1I);
|
Chris@10
|
1914 T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1915 T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1916 T5g = VSUB(T5e, T5f);
|
Chris@10
|
1917 T1J = VADD(T1H, T1I);
|
Chris@10
|
1918 Tb9 = VADD(T5e, T5f);
|
Chris@10
|
1919 T5h = VADD(T5d, T5g);
|
Chris@10
|
1920 T5N = VSUB(T5d, T5g);
|
Chris@10
|
1921 }
|
Chris@10
|
1922 {
|
Chris@10
|
1923 V T1E, T1F, T5b, T58, T59, T5a;
|
Chris@10
|
1924 T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1925 T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
1926 T5b = VSUB(T1E, T1F);
|
Chris@10
|
1927 T58 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1928 T59 = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
1929 T5a = VSUB(T58, T59);
|
Chris@10
|
1930 T1G = VADD(T1E, T1F);
|
Chris@10
|
1931 Tb8 = VADD(T58, T59);
|
Chris@10
|
1932 T5c = VSUB(T5a, T5b);
|
Chris@10
|
1933 T5O = VADD(T5b, T5a);
|
Chris@10
|
1934 }
|
Chris@10
|
1935 }
|
Chris@10
|
1936 {
|
Chris@10
|
1937 V T1D, T1K, Tbq, Tbr;
|
Chris@10
|
1938 T1D = VADD(T1z, T1C);
|
Chris@10
|
1939 T1K = VADD(T1G, T1J);
|
Chris@10
|
1940 T1L = VADD(T1D, T1K);
|
Chris@10
|
1941 Tdv = VSUB(T1D, T1K);
|
Chris@10
|
1942 Tbq = VSUB(Tbo, Tbp);
|
Chris@10
|
1943 Tbr = VSUB(T1J, T1G);
|
Chris@10
|
1944 Tbs = VSUB(Tbq, Tbr);
|
Chris@10
|
1945 Tcw = VADD(Tbr, Tbq);
|
Chris@10
|
1946 }
|
Chris@10
|
1947 {
|
Chris@10
|
1948 V TdA, TdB, T57, T5i;
|
Chris@10
|
1949 TdA = VADD(Tbo, Tbp);
|
Chris@10
|
1950 TdB = VADD(Tb8, Tb9);
|
Chris@10
|
1951 TdC = VSUB(TdA, TdB);
|
Chris@10
|
1952 Teo = VADD(TdA, TdB);
|
Chris@10
|
1953 T57 = VSUB(T53, T56);
|
Chris@10
|
1954 T5i = VMUL(LDK(KP707106781), VSUB(T5c, T5h));
|
Chris@10
|
1955 T5j = VSUB(T57, T5i);
|
Chris@10
|
1956 T6V = VADD(T57, T5i);
|
Chris@10
|
1957 }
|
Chris@10
|
1958 {
|
Chris@10
|
1959 V T5M, T5P, T8w, T8x;
|
Chris@10
|
1960 T5M = VADD(T5I, T5L);
|
Chris@10
|
1961 T5P = VMUL(LDK(KP707106781), VSUB(T5N, T5O));
|
Chris@10
|
1962 T5Q = VSUB(T5M, T5P);
|
Chris@10
|
1963 T6Y = VADD(T5M, T5P);
|
Chris@10
|
1964 T8w = VSUB(T5L, T5I);
|
Chris@10
|
1965 T8x = VMUL(LDK(KP707106781), VADD(T5c, T5h));
|
Chris@10
|
1966 T8y = VSUB(T8w, T8x);
|
Chris@10
|
1967 T9C = VADD(T8w, T8x);
|
Chris@10
|
1968 }
|
Chris@10
|
1969 {
|
Chris@10
|
1970 V Tb7, Tba, T8l, T8m;
|
Chris@10
|
1971 Tb7 = VSUB(T1z, T1C);
|
Chris@10
|
1972 Tba = VSUB(Tb8, Tb9);
|
Chris@10
|
1973 Tbb = VSUB(Tb7, Tba);
|
Chris@10
|
1974 Tct = VADD(Tb7, Tba);
|
Chris@10
|
1975 T8l = VADD(T53, T56);
|
Chris@10
|
1976 T8m = VMUL(LDK(KP707106781), VADD(T5O, T5N));
|
Chris@10
|
1977 T8n = VSUB(T8l, T8m);
|
Chris@10
|
1978 T9z = VADD(T8l, T8m);
|
Chris@10
|
1979 }
|
Chris@10
|
1980 }
|
Chris@10
|
1981 {
|
Chris@10
|
1982 V TN, T40, T2Q, T3Q, TQ, T3P, T2T, T41, TX, T44, T30, T3U, TU, T43, T2X;
|
Chris@10
|
1983 V T3X;
|
Chris@10
|
1984 {
|
Chris@10
|
1985 V TL, TM, T2R, T2S;
|
Chris@10
|
1986 TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0]));
|
Chris@10
|
1987 TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
|
Chris@10
|
1988 TN = VADD(TL, TM);
|
Chris@10
|
1989 T40 = VSUB(TL, TM);
|
Chris@10
|
1990 {
|
Chris@10
|
1991 V T2O, T2P, TO, TP;
|
Chris@10
|
1992 T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0]));
|
Chris@10
|
1993 T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
|
Chris@10
|
1994 T2Q = VADD(T2O, T2P);
|
Chris@10
|
1995 T3Q = VSUB(T2O, T2P);
|
Chris@10
|
1996 TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
|
Chris@10
|
1997 TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0]));
|
Chris@10
|
1998 TQ = VADD(TO, TP);
|
Chris@10
|
1999 T3P = VSUB(TO, TP);
|
Chris@10
|
2000 }
|
Chris@10
|
2001 T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
|
Chris@10
|
2002 T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0]));
|
Chris@10
|
2003 T2T = VADD(T2R, T2S);
|
Chris@10
|
2004 T41 = VSUB(T2R, T2S);
|
Chris@10
|
2005 {
|
Chris@10
|
2006 V TV, TW, T3S, T2Y, T2Z, T3T;
|
Chris@10
|
2007 TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0]));
|
Chris@10
|
2008 TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
|
Chris@10
|
2009 T3S = VSUB(TV, TW);
|
Chris@10
|
2010 T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0]));
|
Chris@10
|
2011 T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
|
Chris@10
|
2012 T3T = VSUB(T2Y, T2Z);
|
Chris@10
|
2013 TX = VADD(TV, TW);
|
Chris@10
|
2014 T44 = VADD(T3S, T3T);
|
Chris@10
|
2015 T30 = VADD(T2Y, T2Z);
|
Chris@10
|
2016 T3U = VSUB(T3S, T3T);
|
Chris@10
|
2017 }
|
Chris@10
|
2018 {
|
Chris@10
|
2019 V TS, TT, T3V, T2V, T2W, T3W;
|
Chris@10
|
2020 TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
|
Chris@10
|
2021 TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0]));
|
Chris@10
|
2022 T3V = VSUB(TS, TT);
|
Chris@10
|
2023 T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
|
Chris@10
|
2024 T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0]));
|
Chris@10
|
2025 T3W = VSUB(T2V, T2W);
|
Chris@10
|
2026 TU = VADD(TS, TT);
|
Chris@10
|
2027 T43 = VSUB(T3W, T3V);
|
Chris@10
|
2028 T2X = VADD(T2V, T2W);
|
Chris@10
|
2029 T3X = VADD(T3V, T3W);
|
Chris@10
|
2030 }
|
Chris@10
|
2031 }
|
Chris@10
|
2032 {
|
Chris@10
|
2033 V TR, TY, Tax, Tay;
|
Chris@10
|
2034 TR = VADD(TN, TQ);
|
Chris@10
|
2035 TY = VADD(TU, TX);
|
Chris@10
|
2036 TZ = VADD(TR, TY);
|
Chris@10
|
2037 Tdf = VSUB(TR, TY);
|
Chris@10
|
2038 Tax = VSUB(T2Q, T2T);
|
Chris@10
|
2039 Tay = VSUB(TX, TU);
|
Chris@10
|
2040 Taz = VSUB(Tax, Tay);
|
Chris@10
|
2041 Tch = VADD(Tay, Tax);
|
Chris@10
|
2042 }
|
Chris@10
|
2043 {
|
Chris@10
|
2044 V TaA, TaB, T2U, T31;
|
Chris@10
|
2045 TaA = VSUB(TN, TQ);
|
Chris@10
|
2046 TaB = VSUB(T2X, T30);
|
Chris@10
|
2047 TaC = VSUB(TaA, TaB);
|
Chris@10
|
2048 Tci = VADD(TaA, TaB);
|
Chris@10
|
2049 T2U = VADD(T2Q, T2T);
|
Chris@10
|
2050 T31 = VADD(T2X, T30);
|
Chris@10
|
2051 T32 = VADD(T2U, T31);
|
Chris@10
|
2052 Tdg = VSUB(T2U, T31);
|
Chris@10
|
2053 }
|
Chris@10
|
2054 {
|
Chris@10
|
2055 V T3R, T3Y, T7T, T7U;
|
Chris@10
|
2056 T3R = VADD(T3P, T3Q);
|
Chris@10
|
2057 T3Y = VMUL(LDK(KP707106781), VSUB(T3U, T3X));
|
Chris@10
|
2058 T3Z = VSUB(T3R, T3Y);
|
Chris@10
|
2059 T6J = VADD(T3R, T3Y);
|
Chris@10
|
2060 T7T = VADD(T40, T41);
|
Chris@10
|
2061 T7U = VMUL(LDK(KP707106781), VADD(T3X, T3U));
|
Chris@10
|
2062 T7V = VSUB(T7T, T7U);
|
Chris@10
|
2063 T9n = VADD(T7T, T7U);
|
Chris@10
|
2064 }
|
Chris@10
|
2065 {
|
Chris@10
|
2066 V T7W, T7X, T42, T45;
|
Chris@10
|
2067 T7W = VSUB(T3Q, T3P);
|
Chris@10
|
2068 T7X = VMUL(LDK(KP707106781), VADD(T43, T44));
|
Chris@10
|
2069 T7Y = VSUB(T7W, T7X);
|
Chris@10
|
2070 T9o = VADD(T7W, T7X);
|
Chris@10
|
2071 T42 = VSUB(T40, T41);
|
Chris@10
|
2072 T45 = VMUL(LDK(KP707106781), VSUB(T43, T44));
|
Chris@10
|
2073 T46 = VSUB(T42, T45);
|
Chris@10
|
2074 T6K = VADD(T42, T45);
|
Chris@10
|
2075 }
|
Chris@10
|
2076 }
|
Chris@10
|
2077 {
|
Chris@10
|
2078 V T14, T4P, T4d, TaG, T17, T4a, T4S, TaH, T1e, TaZ, T4j, T4V, T1b, TaY, T4o;
|
Chris@10
|
2079 V T4U;
|
Chris@10
|
2080 {
|
Chris@10
|
2081 V T12, T13, T4Q, T4R;
|
Chris@10
|
2082 T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2083 T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2084 T14 = VADD(T12, T13);
|
Chris@10
|
2085 T4P = VSUB(T12, T13);
|
Chris@10
|
2086 {
|
Chris@10
|
2087 V T4b, T4c, T15, T16;
|
Chris@10
|
2088 T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2089 T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2090 T4d = VSUB(T4b, T4c);
|
Chris@10
|
2091 TaG = VADD(T4b, T4c);
|
Chris@10
|
2092 T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2093 T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2094 T17 = VADD(T15, T16);
|
Chris@10
|
2095 T4a = VSUB(T15, T16);
|
Chris@10
|
2096 }
|
Chris@10
|
2097 T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2098 T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2099 T4S = VSUB(T4Q, T4R);
|
Chris@10
|
2100 TaH = VADD(T4Q, T4R);
|
Chris@10
|
2101 {
|
Chris@10
|
2102 V T1c, T1d, T4f, T4g, T4h, T4i;
|
Chris@10
|
2103 T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2104 T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2105 T4f = VSUB(T1c, T1d);
|
Chris@10
|
2106 T4g = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2107 T4h = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2108 T4i = VSUB(T4g, T4h);
|
Chris@10
|
2109 T1e = VADD(T1c, T1d);
|
Chris@10
|
2110 TaZ = VADD(T4g, T4h);
|
Chris@10
|
2111 T4j = VSUB(T4f, T4i);
|
Chris@10
|
2112 T4V = VADD(T4f, T4i);
|
Chris@10
|
2113 }
|
Chris@10
|
2114 {
|
Chris@10
|
2115 V T19, T1a, T4k, T4l, T4m, T4n;
|
Chris@10
|
2116 T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2117 T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2118 T4k = VSUB(T19, T1a);
|
Chris@10
|
2119 T4l = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2120 T4m = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2121 T4n = VSUB(T4l, T4m);
|
Chris@10
|
2122 T1b = VADD(T19, T1a);
|
Chris@10
|
2123 TaY = VADD(T4l, T4m);
|
Chris@10
|
2124 T4o = VADD(T4k, T4n);
|
Chris@10
|
2125 T4U = VSUB(T4n, T4k);
|
Chris@10
|
2126 }
|
Chris@10
|
2127 }
|
Chris@10
|
2128 {
|
Chris@10
|
2129 V T18, T1f, TaX, Tb0;
|
Chris@10
|
2130 T18 = VADD(T14, T17);
|
Chris@10
|
2131 T1f = VADD(T1b, T1e);
|
Chris@10
|
2132 T1g = VADD(T18, T1f);
|
Chris@10
|
2133 Tdp = VSUB(T18, T1f);
|
Chris@10
|
2134 TaX = VSUB(T14, T17);
|
Chris@10
|
2135 Tb0 = VSUB(TaY, TaZ);
|
Chris@10
|
2136 Tb1 = VSUB(TaX, Tb0);
|
Chris@10
|
2137 Tcm = VADD(TaX, Tb0);
|
Chris@10
|
2138 }
|
Chris@10
|
2139 {
|
Chris@10
|
2140 V Tdk, Tdl, T4e, T4p;
|
Chris@10
|
2141 Tdk = VADD(TaG, TaH);
|
Chris@10
|
2142 Tdl = VADD(TaY, TaZ);
|
Chris@10
|
2143 Tdm = VSUB(Tdk, Tdl);
|
Chris@10
|
2144 Tej = VADD(Tdk, Tdl);
|
Chris@10
|
2145 T4e = VADD(T4a, T4d);
|
Chris@10
|
2146 T4p = VMUL(LDK(KP707106781), VSUB(T4j, T4o));
|
Chris@10
|
2147 T4q = VSUB(T4e, T4p);
|
Chris@10
|
2148 T6R = VADD(T4e, T4p);
|
Chris@10
|
2149 }
|
Chris@10
|
2150 {
|
Chris@10
|
2151 V T4T, T4W, T8d, T8e;
|
Chris@10
|
2152 T4T = VSUB(T4P, T4S);
|
Chris@10
|
2153 T4W = VMUL(LDK(KP707106781), VSUB(T4U, T4V));
|
Chris@10
|
2154 T4X = VSUB(T4T, T4W);
|
Chris@10
|
2155 T6O = VADD(T4T, T4W);
|
Chris@10
|
2156 T8d = VADD(T4P, T4S);
|
Chris@10
|
2157 T8e = VMUL(LDK(KP707106781), VADD(T4o, T4j));
|
Chris@10
|
2158 T8f = VSUB(T8d, T8e);
|
Chris@10
|
2159 T9s = VADD(T8d, T8e);
|
Chris@10
|
2160 }
|
Chris@10
|
2161 {
|
Chris@10
|
2162 V TaI, TaJ, T82, T83;
|
Chris@10
|
2163 TaI = VSUB(TaG, TaH);
|
Chris@10
|
2164 TaJ = VSUB(T1e, T1b);
|
Chris@10
|
2165 TaK = VSUB(TaI, TaJ);
|
Chris@10
|
2166 Tcp = VADD(TaJ, TaI);
|
Chris@10
|
2167 T82 = VSUB(T4d, T4a);
|
Chris@10
|
2168 T83 = VMUL(LDK(KP707106781), VADD(T4U, T4V));
|
Chris@10
|
2169 T84 = VSUB(T82, T83);
|
Chris@10
|
2170 T9v = VADD(T82, T83);
|
Chris@10
|
2171 }
|
Chris@10
|
2172 }
|
Chris@10
|
2173 {
|
Chris@10
|
2174 V T1j, TaR, T1m, TaS, T4G, T4L, TaT, TaQ, T89, T88, T1q, TaM, T1t, TaN, T4v;
|
Chris@10
|
2175 V T4A, TaO, TaL, T86, T85;
|
Chris@10
|
2176 {
|
Chris@10
|
2177 V T4H, T4F, T4C, T4K;
|
Chris@10
|
2178 {
|
Chris@10
|
2179 V T1h, T1i, T4D, T4E;
|
Chris@10
|
2180 T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2181 T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2182 T1j = VADD(T1h, T1i);
|
Chris@10
|
2183 T4H = VSUB(T1h, T1i);
|
Chris@10
|
2184 T4D = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2185 T4E = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2186 T4F = VSUB(T4D, T4E);
|
Chris@10
|
2187 TaR = VADD(T4D, T4E);
|
Chris@10
|
2188 }
|
Chris@10
|
2189 {
|
Chris@10
|
2190 V T1k, T1l, T4I, T4J;
|
Chris@10
|
2191 T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2192 T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2193 T1m = VADD(T1k, T1l);
|
Chris@10
|
2194 T4C = VSUB(T1k, T1l);
|
Chris@10
|
2195 T4I = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2196 T4J = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2197 T4K = VSUB(T4I, T4J);
|
Chris@10
|
2198 TaS = VADD(T4I, T4J);
|
Chris@10
|
2199 }
|
Chris@10
|
2200 T4G = VADD(T4C, T4F);
|
Chris@10
|
2201 T4L = VSUB(T4H, T4K);
|
Chris@10
|
2202 TaT = VSUB(TaR, TaS);
|
Chris@10
|
2203 TaQ = VSUB(T1j, T1m);
|
Chris@10
|
2204 T89 = VADD(T4H, T4K);
|
Chris@10
|
2205 T88 = VSUB(T4F, T4C);
|
Chris@10
|
2206 }
|
Chris@10
|
2207 {
|
Chris@10
|
2208 V T4r, T4z, T4w, T4u;
|
Chris@10
|
2209 {
|
Chris@10
|
2210 V T1o, T1p, T4x, T4y;
|
Chris@10
|
2211 T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2212 T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2213 T1q = VADD(T1o, T1p);
|
Chris@10
|
2214 T4r = VSUB(T1o, T1p);
|
Chris@10
|
2215 T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2216 T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2217 T4z = VSUB(T4x, T4y);
|
Chris@10
|
2218 TaM = VADD(T4x, T4y);
|
Chris@10
|
2219 }
|
Chris@10
|
2220 {
|
Chris@10
|
2221 V T1r, T1s, T4s, T4t;
|
Chris@10
|
2222 T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2223 T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2224 T1t = VADD(T1r, T1s);
|
Chris@10
|
2225 T4w = VSUB(T1r, T1s);
|
Chris@10
|
2226 T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2227 T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2228 T4u = VSUB(T4s, T4t);
|
Chris@10
|
2229 TaN = VADD(T4s, T4t);
|
Chris@10
|
2230 }
|
Chris@10
|
2231 T4v = VSUB(T4r, T4u);
|
Chris@10
|
2232 T4A = VADD(T4w, T4z);
|
Chris@10
|
2233 TaO = VSUB(TaM, TaN);
|
Chris@10
|
2234 TaL = VSUB(T1q, T1t);
|
Chris@10
|
2235 T86 = VSUB(T4z, T4w);
|
Chris@10
|
2236 T85 = VADD(T4r, T4u);
|
Chris@10
|
2237 }
|
Chris@10
|
2238 {
|
Chris@10
|
2239 V T1n, T1u, Tb2, Tb3;
|
Chris@10
|
2240 T1n = VADD(T1j, T1m);
|
Chris@10
|
2241 T1u = VADD(T1q, T1t);
|
Chris@10
|
2242 T1v = VADD(T1n, T1u);
|
Chris@10
|
2243 Tdn = VSUB(T1u, T1n);
|
Chris@10
|
2244 Tb2 = VSUB(TaT, TaQ);
|
Chris@10
|
2245 Tb3 = VADD(TaL, TaO);
|
Chris@10
|
2246 Tb4 = VMUL(LDK(KP707106781), VSUB(Tb2, Tb3));
|
Chris@10
|
2247 Tcq = VMUL(LDK(KP707106781), VADD(Tb2, Tb3));
|
Chris@10
|
2248 }
|
Chris@10
|
2249 {
|
Chris@10
|
2250 V Tdq, Tdr, T4B, T4M;
|
Chris@10
|
2251 Tdq = VADD(TaR, TaS);
|
Chris@10
|
2252 Tdr = VADD(TaM, TaN);
|
Chris@10
|
2253 Tds = VSUB(Tdq, Tdr);
|
Chris@10
|
2254 Tek = VADD(Tdq, Tdr);
|
Chris@10
|
2255 T4B = VFNMS(LDK(KP923879532), T4A, VMUL(LDK(KP382683432), T4v));
|
Chris@10
|
2256 T4M = VFMA(LDK(KP923879532), T4G, VMUL(LDK(KP382683432), T4L));
|
Chris@10
|
2257 T4N = VSUB(T4B, T4M);
|
Chris@10
|
2258 T6P = VADD(T4M, T4B);
|
Chris@10
|
2259 }
|
Chris@10
|
2260 {
|
Chris@10
|
2261 V T4Y, T4Z, T8g, T8h;
|
Chris@10
|
2262 T4Y = VFNMS(LDK(KP923879532), T4L, VMUL(LDK(KP382683432), T4G));
|
Chris@10
|
2263 T4Z = VFMA(LDK(KP382683432), T4A, VMUL(LDK(KP923879532), T4v));
|
Chris@10
|
2264 T50 = VSUB(T4Y, T4Z);
|
Chris@10
|
2265 T6S = VADD(T4Y, T4Z);
|
Chris@10
|
2266 T8g = VFNMS(LDK(KP382683432), T89, VMUL(LDK(KP923879532), T88));
|
Chris@10
|
2267 T8h = VFMA(LDK(KP923879532), T86, VMUL(LDK(KP382683432), T85));
|
Chris@10
|
2268 T8i = VSUB(T8g, T8h);
|
Chris@10
|
2269 T9w = VADD(T8g, T8h);
|
Chris@10
|
2270 }
|
Chris@10
|
2271 {
|
Chris@10
|
2272 V TaP, TaU, T87, T8a;
|
Chris@10
|
2273 TaP = VSUB(TaL, TaO);
|
Chris@10
|
2274 TaU = VADD(TaQ, TaT);
|
Chris@10
|
2275 TaV = VMUL(LDK(KP707106781), VSUB(TaP, TaU));
|
Chris@10
|
2276 Tcn = VMUL(LDK(KP707106781), VADD(TaU, TaP));
|
Chris@10
|
2277 T87 = VFNMS(LDK(KP382683432), T86, VMUL(LDK(KP923879532), T85));
|
Chris@10
|
2278 T8a = VFMA(LDK(KP382683432), T88, VMUL(LDK(KP923879532), T89));
|
Chris@10
|
2279 T8b = VSUB(T87, T8a);
|
Chris@10
|
2280 T9t = VADD(T8a, T87);
|
Chris@10
|
2281 }
|
Chris@10
|
2282 }
|
Chris@10
|
2283 {
|
Chris@10
|
2284 V T1O, Tbc, T1R, Tbd, T5o, T5t, Tbf, Tbe, T8p, T8o, T1V, Tbi, T1Y, Tbj, T5z;
|
Chris@10
|
2285 V T5E, Tbk, Tbh, T8s, T8r;
|
Chris@10
|
2286 {
|
Chris@10
|
2287 V T5p, T5n, T5k, T5s;
|
Chris@10
|
2288 {
|
Chris@10
|
2289 V T1M, T1N, T5l, T5m;
|
Chris@10
|
2290 T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2291 T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2292 T1O = VADD(T1M, T1N);
|
Chris@10
|
2293 T5p = VSUB(T1M, T1N);
|
Chris@10
|
2294 T5l = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2295 T5m = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2296 T5n = VSUB(T5l, T5m);
|
Chris@10
|
2297 Tbc = VADD(T5l, T5m);
|
Chris@10
|
2298 }
|
Chris@10
|
2299 {
|
Chris@10
|
2300 V T1P, T1Q, T5q, T5r;
|
Chris@10
|
2301 T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2302 T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2303 T1R = VADD(T1P, T1Q);
|
Chris@10
|
2304 T5k = VSUB(T1P, T1Q);
|
Chris@10
|
2305 T5q = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2306 T5r = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2307 T5s = VSUB(T5q, T5r);
|
Chris@10
|
2308 Tbd = VADD(T5q, T5r);
|
Chris@10
|
2309 }
|
Chris@10
|
2310 T5o = VADD(T5k, T5n);
|
Chris@10
|
2311 T5t = VSUB(T5p, T5s);
|
Chris@10
|
2312 Tbf = VSUB(T1O, T1R);
|
Chris@10
|
2313 Tbe = VSUB(Tbc, Tbd);
|
Chris@10
|
2314 T8p = VADD(T5p, T5s);
|
Chris@10
|
2315 T8o = VSUB(T5n, T5k);
|
Chris@10
|
2316 }
|
Chris@10
|
2317 {
|
Chris@10
|
2318 V T5A, T5y, T5v, T5D;
|
Chris@10
|
2319 {
|
Chris@10
|
2320 V T1T, T1U, T5w, T5x;
|
Chris@10
|
2321 T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2322 T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2323 T1V = VADD(T1T, T1U);
|
Chris@10
|
2324 T5A = VSUB(T1T, T1U);
|
Chris@10
|
2325 T5w = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2326 T5x = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2327 T5y = VSUB(T5w, T5x);
|
Chris@10
|
2328 Tbi = VADD(T5w, T5x);
|
Chris@10
|
2329 }
|
Chris@10
|
2330 {
|
Chris@10
|
2331 V T1W, T1X, T5B, T5C;
|
Chris@10
|
2332 T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2333 T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)]));
|
Chris@10
|
2334 T1Y = VADD(T1W, T1X);
|
Chris@10
|
2335 T5v = VSUB(T1W, T1X);
|
Chris@10
|
2336 T5B = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2337 T5C = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)]));
|
Chris@10
|
2338 T5D = VSUB(T5B, T5C);
|
Chris@10
|
2339 Tbj = VADD(T5B, T5C);
|
Chris@10
|
2340 }
|
Chris@10
|
2341 T5z = VADD(T5v, T5y);
|
Chris@10
|
2342 T5E = VSUB(T5A, T5D);
|
Chris@10
|
2343 Tbk = VSUB(Tbi, Tbj);
|
Chris@10
|
2344 Tbh = VSUB(T1V, T1Y);
|
Chris@10
|
2345 T8s = VADD(T5A, T5D);
|
Chris@10
|
2346 T8r = VSUB(T5y, T5v);
|
Chris@10
|
2347 }
|
Chris@10
|
2348 {
|
Chris@10
|
2349 V T1S, T1Z, Tbt, Tbu;
|
Chris@10
|
2350 T1S = VADD(T1O, T1R);
|
Chris@10
|
2351 T1Z = VADD(T1V, T1Y);
|
Chris@10
|
2352 T20 = VADD(T1S, T1Z);
|
Chris@10
|
2353 TdD = VSUB(T1Z, T1S);
|
Chris@10
|
2354 Tbt = VSUB(Tbh, Tbk);
|
Chris@10
|
2355 Tbu = VADD(Tbf, Tbe);
|
Chris@10
|
2356 Tbv = VMUL(LDK(KP707106781), VSUB(Tbt, Tbu));
|
Chris@10
|
2357 Tcu = VMUL(LDK(KP707106781), VADD(Tbu, Tbt));
|
Chris@10
|
2358 }
|
Chris@10
|
2359 {
|
Chris@10
|
2360 V Tdw, Tdx, T5u, T5F;
|
Chris@10
|
2361 Tdw = VADD(Tbc, Tbd);
|
Chris@10
|
2362 Tdx = VADD(Tbi, Tbj);
|
Chris@10
|
2363 Tdy = VSUB(Tdw, Tdx);
|
Chris@10
|
2364 Tep = VADD(Tdw, Tdx);
|
Chris@10
|
2365 T5u = VFNMS(LDK(KP923879532), T5t, VMUL(LDK(KP382683432), T5o));
|
Chris@10
|
2366 T5F = VFMA(LDK(KP382683432), T5z, VMUL(LDK(KP923879532), T5E));
|
Chris@10
|
2367 T5G = VSUB(T5u, T5F);
|
Chris@10
|
2368 T6Z = VADD(T5u, T5F);
|
Chris@10
|
2369 }
|
Chris@10
|
2370 {
|
Chris@10
|
2371 V T5R, T5S, T8z, T8A;
|
Chris@10
|
2372 T5R = VFNMS(LDK(KP923879532), T5z, VMUL(LDK(KP382683432), T5E));
|
Chris@10
|
2373 T5S = VFMA(LDK(KP923879532), T5o, VMUL(LDK(KP382683432), T5t));
|
Chris@10
|
2374 T5T = VSUB(T5R, T5S);
|
Chris@10
|
2375 T6W = VADD(T5S, T5R);
|
Chris@10
|
2376 T8z = VFNMS(LDK(KP382683432), T8r, VMUL(LDK(KP923879532), T8s));
|
Chris@10
|
2377 T8A = VFMA(LDK(KP382683432), T8o, VMUL(LDK(KP923879532), T8p));
|
Chris@10
|
2378 T8B = VSUB(T8z, T8A);
|
Chris@10
|
2379 T9A = VADD(T8A, T8z);
|
Chris@10
|
2380 }
|
Chris@10
|
2381 {
|
Chris@10
|
2382 V Tbg, Tbl, T8q, T8t;
|
Chris@10
|
2383 Tbg = VSUB(Tbe, Tbf);
|
Chris@10
|
2384 Tbl = VADD(Tbh, Tbk);
|
Chris@10
|
2385 Tbm = VMUL(LDK(KP707106781), VSUB(Tbg, Tbl));
|
Chris@10
|
2386 Tcx = VMUL(LDK(KP707106781), VADD(Tbg, Tbl));
|
Chris@10
|
2387 T8q = VFNMS(LDK(KP382683432), T8p, VMUL(LDK(KP923879532), T8o));
|
Chris@10
|
2388 T8t = VFMA(LDK(KP923879532), T8r, VMUL(LDK(KP382683432), T8s));
|
Chris@10
|
2389 T8u = VSUB(T8q, T8t);
|
Chris@10
|
2390 T9D = VADD(T8q, T8t);
|
Chris@10
|
2391 }
|
Chris@10
|
2392 }
|
Chris@10
|
2393 {
|
Chris@10
|
2394 V TeJ, TeK, TeL, TeM, TeN, TeO, TeP, TeQ, TeR, TeS, TeT, TeU, TeV, TeW, TeX;
|
Chris@10
|
2395 V TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6, Tf7, Tf8, Tf9, Tfa, Tfb, Tfc;
|
Chris@10
|
2396 V Tfd, Tfe, Tff, Tfg, Tfh, Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr;
|
Chris@10
|
2397 V Tfs, Tft, Tfu;
|
Chris@10
|
2398 {
|
Chris@10
|
2399 V T11, TeD, TeG, TeI, T22, T23, T34, TeH;
|
Chris@10
|
2400 {
|
Chris@10
|
2401 V Tv, T10, TeE, TeF;
|
Chris@10
|
2402 Tv = VADD(Tf, Tu);
|
Chris@10
|
2403 T10 = VADD(TK, TZ);
|
Chris@10
|
2404 T11 = VADD(Tv, T10);
|
Chris@10
|
2405 TeD = VSUB(Tv, T10);
|
Chris@10
|
2406 TeE = VADD(Tej, Tek);
|
Chris@10
|
2407 TeF = VADD(Teo, Tep);
|
Chris@10
|
2408 TeG = VSUB(TeE, TeF);
|
Chris@10
|
2409 TeI = VADD(TeE, TeF);
|
Chris@10
|
2410 }
|
Chris@10
|
2411 {
|
Chris@10
|
2412 V T1w, T21, T2y, T33;
|
Chris@10
|
2413 T1w = VADD(T1g, T1v);
|
Chris@10
|
2414 T21 = VADD(T1L, T20);
|
Chris@10
|
2415 T22 = VADD(T1w, T21);
|
Chris@10
|
2416 T23 = VSUB(T21, T1w);
|
Chris@10
|
2417 T2y = VADD(T2i, T2x);
|
Chris@10
|
2418 T33 = VADD(T2N, T32);
|
Chris@10
|
2419 T34 = VSUB(T2y, T33);
|
Chris@10
|
2420 TeH = VADD(T2y, T33);
|
Chris@10
|
2421 }
|
Chris@10
|
2422 TeJ = VSUB(T11, T22);
|
Chris@10
|
2423 STM4(&(ro[32]), TeJ, ovs, &(ro[0]));
|
Chris@10
|
2424 TeK = VSUB(TeH, TeI);
|
Chris@10
|
2425 STM4(&(io[32]), TeK, ovs, &(io[0]));
|
Chris@10
|
2426 TeL = VADD(T11, T22);
|
Chris@10
|
2427 STM4(&(ro[0]), TeL, ovs, &(ro[0]));
|
Chris@10
|
2428 TeM = VADD(TeH, TeI);
|
Chris@10
|
2429 STM4(&(io[0]), TeM, ovs, &(io[0]));
|
Chris@10
|
2430 TeN = VADD(T23, T34);
|
Chris@10
|
2431 STM4(&(io[16]), TeN, ovs, &(io[0]));
|
Chris@10
|
2432 TeO = VADD(TeD, TeG);
|
Chris@10
|
2433 STM4(&(ro[16]), TeO, ovs, &(ro[0]));
|
Chris@10
|
2434 TeP = VSUB(T34, T23);
|
Chris@10
|
2435 STM4(&(io[48]), TeP, ovs, &(io[0]));
|
Chris@10
|
2436 TeQ = VSUB(TeD, TeG);
|
Chris@10
|
2437 STM4(&(ro[48]), TeQ, ovs, &(ro[0]));
|
Chris@10
|
2438 }
|
Chris@10
|
2439 {
|
Chris@10
|
2440 V Teh, Tex, Tev, TeB, Tem, Tey, Ter, Tez;
|
Chris@10
|
2441 {
|
Chris@10
|
2442 V Tef, Teg, Tet, Teu;
|
Chris@10
|
2443 Tef = VSUB(Tf, Tu);
|
Chris@10
|
2444 Teg = VSUB(T2N, T32);
|
Chris@10
|
2445 Teh = VADD(Tef, Teg);
|
Chris@10
|
2446 Tex = VSUB(Tef, Teg);
|
Chris@10
|
2447 Tet = VSUB(T2i, T2x);
|
Chris@10
|
2448 Teu = VSUB(TZ, TK);
|
Chris@10
|
2449 Tev = VSUB(Tet, Teu);
|
Chris@10
|
2450 TeB = VADD(Teu, Tet);
|
Chris@10
|
2451 }
|
Chris@10
|
2452 {
|
Chris@10
|
2453 V Tei, Tel, Ten, Teq;
|
Chris@10
|
2454 Tei = VSUB(T1g, T1v);
|
Chris@10
|
2455 Tel = VSUB(Tej, Tek);
|
Chris@10
|
2456 Tem = VADD(Tei, Tel);
|
Chris@10
|
2457 Tey = VSUB(Tel, Tei);
|
Chris@10
|
2458 Ten = VSUB(T1L, T20);
|
Chris@10
|
2459 Teq = VSUB(Teo, Tep);
|
Chris@10
|
2460 Ter = VSUB(Ten, Teq);
|
Chris@10
|
2461 Tez = VADD(Ten, Teq);
|
Chris@10
|
2462 }
|
Chris@10
|
2463 {
|
Chris@10
|
2464 V Tes, TeC, Tew, TeA;
|
Chris@10
|
2465 Tes = VMUL(LDK(KP707106781), VADD(Tem, Ter));
|
Chris@10
|
2466 TeR = VSUB(Teh, Tes);
|
Chris@10
|
2467 STM4(&(ro[40]), TeR, ovs, &(ro[0]));
|
Chris@10
|
2468 TeS = VADD(Teh, Tes);
|
Chris@10
|
2469 STM4(&(ro[8]), TeS, ovs, &(ro[0]));
|
Chris@10
|
2470 TeC = VMUL(LDK(KP707106781), VADD(Tey, Tez));
|
Chris@10
|
2471 TeT = VSUB(TeB, TeC);
|
Chris@10
|
2472 STM4(&(io[40]), TeT, ovs, &(io[0]));
|
Chris@10
|
2473 TeU = VADD(TeB, TeC);
|
Chris@10
|
2474 STM4(&(io[8]), TeU, ovs, &(io[0]));
|
Chris@10
|
2475 Tew = VMUL(LDK(KP707106781), VSUB(Ter, Tem));
|
Chris@10
|
2476 TeV = VSUB(Tev, Tew);
|
Chris@10
|
2477 STM4(&(io[56]), TeV, ovs, &(io[0]));
|
Chris@10
|
2478 TeW = VADD(Tev, Tew);
|
Chris@10
|
2479 STM4(&(io[24]), TeW, ovs, &(io[0]));
|
Chris@10
|
2480 TeA = VMUL(LDK(KP707106781), VSUB(Tey, Tez));
|
Chris@10
|
2481 TeX = VSUB(Tex, TeA);
|
Chris@10
|
2482 STM4(&(ro[56]), TeX, ovs, &(ro[0]));
|
Chris@10
|
2483 TeY = VADD(Tex, TeA);
|
Chris@10
|
2484 STM4(&(ro[24]), TeY, ovs, &(ro[0]));
|
Chris@10
|
2485 }
|
Chris@10
|
2486 }
|
Chris@10
|
2487 {
|
Chris@10
|
2488 V Tdb, TdV, Te5, TdJ, Tdi, Te6, Te3, Teb, TdM, TdW, Tdu, TdQ, Te0, Tea, TdF;
|
Chris@10
|
2489 V TdR;
|
Chris@10
|
2490 {
|
Chris@10
|
2491 V Tde, Tdh, Tdo, Tdt;
|
Chris@10
|
2492 Tdb = VSUB(Td9, Tda);
|
Chris@10
|
2493 TdV = VADD(Td9, Tda);
|
Chris@10
|
2494 Te5 = VADD(TdI, TdH);
|
Chris@10
|
2495 TdJ = VSUB(TdH, TdI);
|
Chris@10
|
2496 Tde = VSUB(Tdc, Tdd);
|
Chris@10
|
2497 Tdh = VADD(Tdf, Tdg);
|
Chris@10
|
2498 Tdi = VMUL(LDK(KP707106781), VSUB(Tde, Tdh));
|
Chris@10
|
2499 Te6 = VMUL(LDK(KP707106781), VADD(Tde, Tdh));
|
Chris@10
|
2500 {
|
Chris@10
|
2501 V Te1, Te2, TdK, TdL;
|
Chris@10
|
2502 Te1 = VADD(Tdv, Tdy);
|
Chris@10
|
2503 Te2 = VADD(TdD, TdC);
|
Chris@10
|
2504 Te3 = VFNMS(LDK(KP382683432), Te2, VMUL(LDK(KP923879532), Te1));
|
Chris@10
|
2505 Teb = VFMA(LDK(KP923879532), Te2, VMUL(LDK(KP382683432), Te1));
|
Chris@10
|
2506 TdK = VSUB(Tdf, Tdg);
|
Chris@10
|
2507 TdL = VADD(Tdd, Tdc);
|
Chris@10
|
2508 TdM = VMUL(LDK(KP707106781), VSUB(TdK, TdL));
|
Chris@10
|
2509 TdW = VMUL(LDK(KP707106781), VADD(TdL, TdK));
|
Chris@10
|
2510 }
|
Chris@10
|
2511 Tdo = VSUB(Tdm, Tdn);
|
Chris@10
|
2512 Tdt = VSUB(Tdp, Tds);
|
Chris@10
|
2513 Tdu = VFMA(LDK(KP923879532), Tdo, VMUL(LDK(KP382683432), Tdt));
|
Chris@10
|
2514 TdQ = VFNMS(LDK(KP923879532), Tdt, VMUL(LDK(KP382683432), Tdo));
|
Chris@10
|
2515 {
|
Chris@10
|
2516 V TdY, TdZ, Tdz, TdE;
|
Chris@10
|
2517 TdY = VADD(Tdn, Tdm);
|
Chris@10
|
2518 TdZ = VADD(Tdp, Tds);
|
Chris@10
|
2519 Te0 = VFMA(LDK(KP382683432), TdY, VMUL(LDK(KP923879532), TdZ));
|
Chris@10
|
2520 Tea = VFNMS(LDK(KP382683432), TdZ, VMUL(LDK(KP923879532), TdY));
|
Chris@10
|
2521 Tdz = VSUB(Tdv, Tdy);
|
Chris@10
|
2522 TdE = VSUB(TdC, TdD);
|
Chris@10
|
2523 TdF = VFNMS(LDK(KP923879532), TdE, VMUL(LDK(KP382683432), Tdz));
|
Chris@10
|
2524 TdR = VFMA(LDK(KP382683432), TdE, VMUL(LDK(KP923879532), Tdz));
|
Chris@10
|
2525 }
|
Chris@10
|
2526 }
|
Chris@10
|
2527 {
|
Chris@10
|
2528 V Tdj, TdG, TdT, TdU;
|
Chris@10
|
2529 Tdj = VADD(Tdb, Tdi);
|
Chris@10
|
2530 TdG = VADD(Tdu, TdF);
|
Chris@10
|
2531 TeZ = VSUB(Tdj, TdG);
|
Chris@10
|
2532 STM4(&(ro[44]), TeZ, ovs, &(ro[0]));
|
Chris@10
|
2533 Tf0 = VADD(Tdj, TdG);
|
Chris@10
|
2534 STM4(&(ro[12]), Tf0, ovs, &(ro[0]));
|
Chris@10
|
2535 TdT = VADD(TdJ, TdM);
|
Chris@10
|
2536 TdU = VADD(TdQ, TdR);
|
Chris@10
|
2537 Tf1 = VSUB(TdT, TdU);
|
Chris@10
|
2538 STM4(&(io[44]), Tf1, ovs, &(io[0]));
|
Chris@10
|
2539 Tf2 = VADD(TdT, TdU);
|
Chris@10
|
2540 STM4(&(io[12]), Tf2, ovs, &(io[0]));
|
Chris@10
|
2541 }
|
Chris@10
|
2542 {
|
Chris@10
|
2543 V TdN, TdO, TdP, TdS;
|
Chris@10
|
2544 TdN = VSUB(TdJ, TdM);
|
Chris@10
|
2545 TdO = VSUB(TdF, Tdu);
|
Chris@10
|
2546 Tf3 = VSUB(TdN, TdO);
|
Chris@10
|
2547 STM4(&(io[60]), Tf3, ovs, &(io[0]));
|
Chris@10
|
2548 Tf4 = VADD(TdN, TdO);
|
Chris@10
|
2549 STM4(&(io[28]), Tf4, ovs, &(io[0]));
|
Chris@10
|
2550 TdP = VSUB(Tdb, Tdi);
|
Chris@10
|
2551 TdS = VSUB(TdQ, TdR);
|
Chris@10
|
2552 Tf5 = VSUB(TdP, TdS);
|
Chris@10
|
2553 STM4(&(ro[60]), Tf5, ovs, &(ro[0]));
|
Chris@10
|
2554 Tf6 = VADD(TdP, TdS);
|
Chris@10
|
2555 STM4(&(ro[28]), Tf6, ovs, &(ro[0]));
|
Chris@10
|
2556 }
|
Chris@10
|
2557 {
|
Chris@10
|
2558 V TdX, Te4, Ted, Tee;
|
Chris@10
|
2559 TdX = VADD(TdV, TdW);
|
Chris@10
|
2560 Te4 = VADD(Te0, Te3);
|
Chris@10
|
2561 Tf7 = VSUB(TdX, Te4);
|
Chris@10
|
2562 STM4(&(ro[36]), Tf7, ovs, &(ro[0]));
|
Chris@10
|
2563 Tf8 = VADD(TdX, Te4);
|
Chris@10
|
2564 STM4(&(ro[4]), Tf8, ovs, &(ro[0]));
|
Chris@10
|
2565 Ted = VADD(Te5, Te6);
|
Chris@10
|
2566 Tee = VADD(Tea, Teb);
|
Chris@10
|
2567 Tf9 = VSUB(Ted, Tee);
|
Chris@10
|
2568 STM4(&(io[36]), Tf9, ovs, &(io[0]));
|
Chris@10
|
2569 Tfa = VADD(Ted, Tee);
|
Chris@10
|
2570 STM4(&(io[4]), Tfa, ovs, &(io[0]));
|
Chris@10
|
2571 }
|
Chris@10
|
2572 {
|
Chris@10
|
2573 V Te7, Te8, Te9, Tec;
|
Chris@10
|
2574 Te7 = VSUB(Te5, Te6);
|
Chris@10
|
2575 Te8 = VSUB(Te3, Te0);
|
Chris@10
|
2576 Tfb = VSUB(Te7, Te8);
|
Chris@10
|
2577 STM4(&(io[52]), Tfb, ovs, &(io[0]));
|
Chris@10
|
2578 Tfc = VADD(Te7, Te8);
|
Chris@10
|
2579 STM4(&(io[20]), Tfc, ovs, &(io[0]));
|
Chris@10
|
2580 Te9 = VSUB(TdV, TdW);
|
Chris@10
|
2581 Tec = VSUB(Tea, Teb);
|
Chris@10
|
2582 Tfd = VSUB(Te9, Tec);
|
Chris@10
|
2583 STM4(&(ro[52]), Tfd, ovs, &(ro[0]));
|
Chris@10
|
2584 Tfe = VADD(Te9, Tec);
|
Chris@10
|
2585 STM4(&(ro[20]), Tfe, ovs, &(ro[0]));
|
Chris@10
|
2586 }
|
Chris@10
|
2587 }
|
Chris@10
|
2588 {
|
Chris@10
|
2589 V Tcd, TcP, TcD, TcZ, Tck, Td0, TcX, Td5, Tcs, TcK, TcG, TcQ, TcU, Td4, Tcz;
|
Chris@10
|
2590 V TcL, Tcc, TcC;
|
Chris@10
|
2591 Tcc = VMUL(LDK(KP707106781), VADD(TbD, TbC));
|
Chris@10
|
2592 Tcd = VSUB(Tcb, Tcc);
|
Chris@10
|
2593 TcP = VADD(Tcb, Tcc);
|
Chris@10
|
2594 TcC = VMUL(LDK(KP707106781), VADD(Tak, Tan));
|
Chris@10
|
2595 TcD = VSUB(TcB, TcC);
|
Chris@10
|
2596 TcZ = VADD(TcB, TcC);
|
Chris@10
|
2597 {
|
Chris@10
|
2598 V Tcg, Tcj, TcV, TcW;
|
Chris@10
|
2599 Tcg = VFNMS(LDK(KP382683432), Tcf, VMUL(LDK(KP923879532), Tce));
|
Chris@10
|
2600 Tcj = VFMA(LDK(KP923879532), Tch, VMUL(LDK(KP382683432), Tci));
|
Chris@10
|
2601 Tck = VSUB(Tcg, Tcj);
|
Chris@10
|
2602 Td0 = VADD(Tcg, Tcj);
|
Chris@10
|
2603 TcV = VADD(Tct, Tcu);
|
Chris@10
|
2604 TcW = VADD(Tcw, Tcx);
|
Chris@10
|
2605 TcX = VFNMS(LDK(KP195090322), TcW, VMUL(LDK(KP980785280), TcV));
|
Chris@10
|
2606 Td5 = VFMA(LDK(KP195090322), TcV, VMUL(LDK(KP980785280), TcW));
|
Chris@10
|
2607 }
|
Chris@10
|
2608 {
|
Chris@10
|
2609 V Tco, Tcr, TcE, TcF;
|
Chris@10
|
2610 Tco = VSUB(Tcm, Tcn);
|
Chris@10
|
2611 Tcr = VSUB(Tcp, Tcq);
|
Chris@10
|
2612 Tcs = VFMA(LDK(KP555570233), Tco, VMUL(LDK(KP831469612), Tcr));
|
Chris@10
|
2613 TcK = VFNMS(LDK(KP831469612), Tco, VMUL(LDK(KP555570233), Tcr));
|
Chris@10
|
2614 TcE = VFNMS(LDK(KP382683432), Tch, VMUL(LDK(KP923879532), Tci));
|
Chris@10
|
2615 TcF = VFMA(LDK(KP382683432), Tce, VMUL(LDK(KP923879532), Tcf));
|
Chris@10
|
2616 TcG = VSUB(TcE, TcF);
|
Chris@10
|
2617 TcQ = VADD(TcF, TcE);
|
Chris@10
|
2618 }
|
Chris@10
|
2619 {
|
Chris@10
|
2620 V TcS, TcT, Tcv, Tcy;
|
Chris@10
|
2621 TcS = VADD(Tcm, Tcn);
|
Chris@10
|
2622 TcT = VADD(Tcp, Tcq);
|
Chris@10
|
2623 TcU = VFMA(LDK(KP980785280), TcS, VMUL(LDK(KP195090322), TcT));
|
Chris@10
|
2624 Td4 = VFNMS(LDK(KP195090322), TcS, VMUL(LDK(KP980785280), TcT));
|
Chris@10
|
2625 Tcv = VSUB(Tct, Tcu);
|
Chris@10
|
2626 Tcy = VSUB(Tcw, Tcx);
|
Chris@10
|
2627 Tcz = VFNMS(LDK(KP831469612), Tcy, VMUL(LDK(KP555570233), Tcv));
|
Chris@10
|
2628 TcL = VFMA(LDK(KP831469612), Tcv, VMUL(LDK(KP555570233), Tcy));
|
Chris@10
|
2629 }
|
Chris@10
|
2630 {
|
Chris@10
|
2631 V Tcl, TcA, TcN, TcO;
|
Chris@10
|
2632 Tcl = VADD(Tcd, Tck);
|
Chris@10
|
2633 TcA = VADD(Tcs, Tcz);
|
Chris@10
|
2634 Tff = VSUB(Tcl, TcA);
|
Chris@10
|
2635 STM4(&(ro[42]), Tff, ovs, &(ro[0]));
|
Chris@10
|
2636 Tfg = VADD(Tcl, TcA);
|
Chris@10
|
2637 STM4(&(ro[10]), Tfg, ovs, &(ro[0]));
|
Chris@10
|
2638 TcN = VADD(TcD, TcG);
|
Chris@10
|
2639 TcO = VADD(TcK, TcL);
|
Chris@10
|
2640 Tfh = VSUB(TcN, TcO);
|
Chris@10
|
2641 STM4(&(io[42]), Tfh, ovs, &(io[0]));
|
Chris@10
|
2642 Tfi = VADD(TcN, TcO);
|
Chris@10
|
2643 STM4(&(io[10]), Tfi, ovs, &(io[0]));
|
Chris@10
|
2644 }
|
Chris@10
|
2645 {
|
Chris@10
|
2646 V TcH, TcI, TcJ, TcM;
|
Chris@10
|
2647 TcH = VSUB(TcD, TcG);
|
Chris@10
|
2648 TcI = VSUB(Tcz, Tcs);
|
Chris@10
|
2649 Tfj = VSUB(TcH, TcI);
|
Chris@10
|
2650 STM4(&(io[58]), Tfj, ovs, &(io[0]));
|
Chris@10
|
2651 Tfk = VADD(TcH, TcI);
|
Chris@10
|
2652 STM4(&(io[26]), Tfk, ovs, &(io[0]));
|
Chris@10
|
2653 TcJ = VSUB(Tcd, Tck);
|
Chris@10
|
2654 TcM = VSUB(TcK, TcL);
|
Chris@10
|
2655 Tfl = VSUB(TcJ, TcM);
|
Chris@10
|
2656 STM4(&(ro[58]), Tfl, ovs, &(ro[0]));
|
Chris@10
|
2657 Tfm = VADD(TcJ, TcM);
|
Chris@10
|
2658 STM4(&(ro[26]), Tfm, ovs, &(ro[0]));
|
Chris@10
|
2659 }
|
Chris@10
|
2660 {
|
Chris@10
|
2661 V TcR, TcY, Td7, Td8;
|
Chris@10
|
2662 TcR = VADD(TcP, TcQ);
|
Chris@10
|
2663 TcY = VADD(TcU, TcX);
|
Chris@10
|
2664 Tfn = VSUB(TcR, TcY);
|
Chris@10
|
2665 STM4(&(ro[34]), Tfn, ovs, &(ro[0]));
|
Chris@10
|
2666 Tfo = VADD(TcR, TcY);
|
Chris@10
|
2667 STM4(&(ro[2]), Tfo, ovs, &(ro[0]));
|
Chris@10
|
2668 Td7 = VADD(TcZ, Td0);
|
Chris@10
|
2669 Td8 = VADD(Td4, Td5);
|
Chris@10
|
2670 Tfp = VSUB(Td7, Td8);
|
Chris@10
|
2671 STM4(&(io[34]), Tfp, ovs, &(io[0]));
|
Chris@10
|
2672 Tfq = VADD(Td7, Td8);
|
Chris@10
|
2673 STM4(&(io[2]), Tfq, ovs, &(io[0]));
|
Chris@10
|
2674 }
|
Chris@10
|
2675 {
|
Chris@10
|
2676 V Td1, Td2, Td3, Td6;
|
Chris@10
|
2677 Td1 = VSUB(TcZ, Td0);
|
Chris@10
|
2678 Td2 = VSUB(TcX, TcU);
|
Chris@10
|
2679 Tfr = VSUB(Td1, Td2);
|
Chris@10
|
2680 STM4(&(io[50]), Tfr, ovs, &(io[0]));
|
Chris@10
|
2681 Tfs = VADD(Td1, Td2);
|
Chris@10
|
2682 STM4(&(io[18]), Tfs, ovs, &(io[0]));
|
Chris@10
|
2683 Td3 = VSUB(TcP, TcQ);
|
Chris@10
|
2684 Td6 = VSUB(Td4, Td5);
|
Chris@10
|
2685 Tft = VSUB(Td3, Td6);
|
Chris@10
|
2686 STM4(&(ro[50]), Tft, ovs, &(ro[0]));
|
Chris@10
|
2687 Tfu = VADD(Td3, Td6);
|
Chris@10
|
2688 STM4(&(ro[18]), Tfu, ovs, &(ro[0]));
|
Chris@10
|
2689 }
|
Chris@10
|
2690 }
|
Chris@10
|
2691 {
|
Chris@10
|
2692 V Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, TfH, TfI, TfJ;
|
Chris@10
|
2693 V TfK, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW, TfX, TfY;
|
Chris@10
|
2694 V TfZ, Tg0;
|
Chris@10
|
2695 {
|
Chris@10
|
2696 V Tap, TbR, TbF, Tc1, TaE, Tc2, TbZ, Tc7, Tb6, TbM, TbI, TbS, TbW, Tc6, Tbx;
|
Chris@10
|
2697 V TbN, Tao, TbE;
|
Chris@10
|
2698 Tao = VMUL(LDK(KP707106781), VSUB(Tak, Tan));
|
Chris@10
|
2699 Tap = VSUB(Tah, Tao);
|
Chris@10
|
2700 TbR = VADD(Tah, Tao);
|
Chris@10
|
2701 TbE = VMUL(LDK(KP707106781), VSUB(TbC, TbD));
|
Chris@10
|
2702 TbF = VSUB(TbB, TbE);
|
Chris@10
|
2703 Tc1 = VADD(TbB, TbE);
|
Chris@10
|
2704 {
|
Chris@10
|
2705 V Taw, TaD, TbX, TbY;
|
Chris@10
|
2706 Taw = VFNMS(LDK(KP923879532), Tav, VMUL(LDK(KP382683432), Tas));
|
Chris@10
|
2707 TaD = VFMA(LDK(KP382683432), Taz, VMUL(LDK(KP923879532), TaC));
|
Chris@10
|
2708 TaE = VSUB(Taw, TaD);
|
Chris@10
|
2709 Tc2 = VADD(Taw, TaD);
|
Chris@10
|
2710 TbX = VADD(Tbb, Tbm);
|
Chris@10
|
2711 TbY = VADD(Tbs, Tbv);
|
Chris@10
|
2712 TbZ = VFNMS(LDK(KP555570233), TbY, VMUL(LDK(KP831469612), TbX));
|
Chris@10
|
2713 Tc7 = VFMA(LDK(KP831469612), TbY, VMUL(LDK(KP555570233), TbX));
|
Chris@10
|
2714 }
|
Chris@10
|
2715 {
|
Chris@10
|
2716 V TaW, Tb5, TbG, TbH;
|
Chris@10
|
2717 TaW = VSUB(TaK, TaV);
|
Chris@10
|
2718 Tb5 = VSUB(Tb1, Tb4);
|
Chris@10
|
2719 Tb6 = VFMA(LDK(KP980785280), TaW, VMUL(LDK(KP195090322), Tb5));
|
Chris@10
|
2720 TbM = VFNMS(LDK(KP980785280), Tb5, VMUL(LDK(KP195090322), TaW));
|
Chris@10
|
2721 TbG = VFNMS(LDK(KP923879532), Taz, VMUL(LDK(KP382683432), TaC));
|
Chris@10
|
2722 TbH = VFMA(LDK(KP923879532), Tas, VMUL(LDK(KP382683432), Tav));
|
Chris@10
|
2723 TbI = VSUB(TbG, TbH);
|
Chris@10
|
2724 TbS = VADD(TbH, TbG);
|
Chris@10
|
2725 }
|
Chris@10
|
2726 {
|
Chris@10
|
2727 V TbU, TbV, Tbn, Tbw;
|
Chris@10
|
2728 TbU = VADD(TaK, TaV);
|
Chris@10
|
2729 TbV = VADD(Tb1, Tb4);
|
Chris@10
|
2730 TbW = VFMA(LDK(KP555570233), TbU, VMUL(LDK(KP831469612), TbV));
|
Chris@10
|
2731 Tc6 = VFNMS(LDK(KP555570233), TbV, VMUL(LDK(KP831469612), TbU));
|
Chris@10
|
2732 Tbn = VSUB(Tbb, Tbm);
|
Chris@10
|
2733 Tbw = VSUB(Tbs, Tbv);
|
Chris@10
|
2734 Tbx = VFNMS(LDK(KP980785280), Tbw, VMUL(LDK(KP195090322), Tbn));
|
Chris@10
|
2735 TbN = VFMA(LDK(KP195090322), Tbw, VMUL(LDK(KP980785280), Tbn));
|
Chris@10
|
2736 }
|
Chris@10
|
2737 {
|
Chris@10
|
2738 V TaF, Tby, TbP, TbQ;
|
Chris@10
|
2739 TaF = VADD(Tap, TaE);
|
Chris@10
|
2740 Tby = VADD(Tb6, Tbx);
|
Chris@10
|
2741 Tfv = VSUB(TaF, Tby);
|
Chris@10
|
2742 STM4(&(ro[46]), Tfv, ovs, &(ro[0]));
|
Chris@10
|
2743 Tfw = VADD(TaF, Tby);
|
Chris@10
|
2744 STM4(&(ro[14]), Tfw, ovs, &(ro[0]));
|
Chris@10
|
2745 TbP = VADD(TbF, TbI);
|
Chris@10
|
2746 TbQ = VADD(TbM, TbN);
|
Chris@10
|
2747 Tfx = VSUB(TbP, TbQ);
|
Chris@10
|
2748 STM4(&(io[46]), Tfx, ovs, &(io[0]));
|
Chris@10
|
2749 Tfy = VADD(TbP, TbQ);
|
Chris@10
|
2750 STM4(&(io[14]), Tfy, ovs, &(io[0]));
|
Chris@10
|
2751 }
|
Chris@10
|
2752 {
|
Chris@10
|
2753 V TbJ, TbK, TbL, TbO;
|
Chris@10
|
2754 TbJ = VSUB(TbF, TbI);
|
Chris@10
|
2755 TbK = VSUB(Tbx, Tb6);
|
Chris@10
|
2756 Tfz = VSUB(TbJ, TbK);
|
Chris@10
|
2757 STM4(&(io[62]), Tfz, ovs, &(io[0]));
|
Chris@10
|
2758 TfA = VADD(TbJ, TbK);
|
Chris@10
|
2759 STM4(&(io[30]), TfA, ovs, &(io[0]));
|
Chris@10
|
2760 TbL = VSUB(Tap, TaE);
|
Chris@10
|
2761 TbO = VSUB(TbM, TbN);
|
Chris@10
|
2762 TfB = VSUB(TbL, TbO);
|
Chris@10
|
2763 STM4(&(ro[62]), TfB, ovs, &(ro[0]));
|
Chris@10
|
2764 TfC = VADD(TbL, TbO);
|
Chris@10
|
2765 STM4(&(ro[30]), TfC, ovs, &(ro[0]));
|
Chris@10
|
2766 }
|
Chris@10
|
2767 {
|
Chris@10
|
2768 V TbT, Tc0, Tc9, Tca;
|
Chris@10
|
2769 TbT = VADD(TbR, TbS);
|
Chris@10
|
2770 Tc0 = VADD(TbW, TbZ);
|
Chris@10
|
2771 TfD = VSUB(TbT, Tc0);
|
Chris@10
|
2772 STM4(&(ro[38]), TfD, ovs, &(ro[0]));
|
Chris@10
|
2773 TfE = VADD(TbT, Tc0);
|
Chris@10
|
2774 STM4(&(ro[6]), TfE, ovs, &(ro[0]));
|
Chris@10
|
2775 Tc9 = VADD(Tc1, Tc2);
|
Chris@10
|
2776 Tca = VADD(Tc6, Tc7);
|
Chris@10
|
2777 TfF = VSUB(Tc9, Tca);
|
Chris@10
|
2778 STM4(&(io[38]), TfF, ovs, &(io[0]));
|
Chris@10
|
2779 TfG = VADD(Tc9, Tca);
|
Chris@10
|
2780 STM4(&(io[6]), TfG, ovs, &(io[0]));
|
Chris@10
|
2781 }
|
Chris@10
|
2782 {
|
Chris@10
|
2783 V Tc3, Tc4, Tc5, Tc8;
|
Chris@10
|
2784 Tc3 = VSUB(Tc1, Tc2);
|
Chris@10
|
2785 Tc4 = VSUB(TbZ, TbW);
|
Chris@10
|
2786 TfH = VSUB(Tc3, Tc4);
|
Chris@10
|
2787 STM4(&(io[54]), TfH, ovs, &(io[0]));
|
Chris@10
|
2788 TfI = VADD(Tc3, Tc4);
|
Chris@10
|
2789 STM4(&(io[22]), TfI, ovs, &(io[0]));
|
Chris@10
|
2790 Tc5 = VSUB(TbR, TbS);
|
Chris@10
|
2791 Tc8 = VSUB(Tc6, Tc7);
|
Chris@10
|
2792 TfJ = VSUB(Tc5, Tc8);
|
Chris@10
|
2793 STM4(&(ro[54]), TfJ, ovs, &(ro[0]));
|
Chris@10
|
2794 TfK = VADD(Tc5, Tc8);
|
Chris@10
|
2795 STM4(&(ro[22]), TfK, ovs, &(ro[0]));
|
Chris@10
|
2796 }
|
Chris@10
|
2797 }
|
Chris@10
|
2798 {
|
Chris@10
|
2799 V T6F, T7h, T7m, T7w, T7p, T7x, T6M, T7s, T6U, T7c, T75, T7r, T78, T7i, T71;
|
Chris@10
|
2800 V T7d;
|
Chris@10
|
2801 {
|
Chris@10
|
2802 V T6D, T6E, T7k, T7l;
|
Chris@10
|
2803 T6D = VADD(T37, T3e);
|
Chris@10
|
2804 T6E = VADD(T65, T64);
|
Chris@10
|
2805 T6F = VSUB(T6D, T6E);
|
Chris@10
|
2806 T7h = VADD(T6D, T6E);
|
Chris@10
|
2807 T7k = VADD(T6O, T6P);
|
Chris@10
|
2808 T7l = VADD(T6R, T6S);
|
Chris@10
|
2809 T7m = VFMA(LDK(KP956940335), T7k, VMUL(LDK(KP290284677), T7l));
|
Chris@10
|
2810 T7w = VFNMS(LDK(KP290284677), T7k, VMUL(LDK(KP956940335), T7l));
|
Chris@10
|
2811 }
|
Chris@10
|
2812 {
|
Chris@10
|
2813 V T7n, T7o, T6I, T6L;
|
Chris@10
|
2814 T7n = VADD(T6V, T6W);
|
Chris@10
|
2815 T7o = VADD(T6Y, T6Z);
|
Chris@10
|
2816 T7p = VFNMS(LDK(KP290284677), T7o, VMUL(LDK(KP956940335), T7n));
|
Chris@10
|
2817 T7x = VFMA(LDK(KP290284677), T7n, VMUL(LDK(KP956940335), T7o));
|
Chris@10
|
2818 T6I = VFNMS(LDK(KP555570233), T6H, VMUL(LDK(KP831469612), T6G));
|
Chris@10
|
2819 T6L = VFMA(LDK(KP831469612), T6J, VMUL(LDK(KP555570233), T6K));
|
Chris@10
|
2820 T6M = VSUB(T6I, T6L);
|
Chris@10
|
2821 T7s = VADD(T6I, T6L);
|
Chris@10
|
2822 }
|
Chris@10
|
2823 {
|
Chris@10
|
2824 V T6Q, T6T, T73, T74;
|
Chris@10
|
2825 T6Q = VSUB(T6O, T6P);
|
Chris@10
|
2826 T6T = VSUB(T6R, T6S);
|
Chris@10
|
2827 T6U = VFMA(LDK(KP471396736), T6Q, VMUL(LDK(KP881921264), T6T));
|
Chris@10
|
2828 T7c = VFNMS(LDK(KP881921264), T6Q, VMUL(LDK(KP471396736), T6T));
|
Chris@10
|
2829 T73 = VADD(T5Z, T62);
|
Chris@10
|
2830 T74 = VADD(T3m, T3t);
|
Chris@10
|
2831 T75 = VSUB(T73, T74);
|
Chris@10
|
2832 T7r = VADD(T73, T74);
|
Chris@10
|
2833 }
|
Chris@10
|
2834 {
|
Chris@10
|
2835 V T76, T77, T6X, T70;
|
Chris@10
|
2836 T76 = VFNMS(LDK(KP555570233), T6J, VMUL(LDK(KP831469612), T6K));
|
Chris@10
|
2837 T77 = VFMA(LDK(KP555570233), T6G, VMUL(LDK(KP831469612), T6H));
|
Chris@10
|
2838 T78 = VSUB(T76, T77);
|
Chris@10
|
2839 T7i = VADD(T77, T76);
|
Chris@10
|
2840 T6X = VSUB(T6V, T6W);
|
Chris@10
|
2841 T70 = VSUB(T6Y, T6Z);
|
Chris@10
|
2842 T71 = VFNMS(LDK(KP881921264), T70, VMUL(LDK(KP471396736), T6X));
|
Chris@10
|
2843 T7d = VFMA(LDK(KP881921264), T6X, VMUL(LDK(KP471396736), T70));
|
Chris@10
|
2844 }
|
Chris@10
|
2845 {
|
Chris@10
|
2846 V T6N, T72, T7f, T7g;
|
Chris@10
|
2847 T6N = VADD(T6F, T6M);
|
Chris@10
|
2848 T72 = VADD(T6U, T71);
|
Chris@10
|
2849 TfL = VSUB(T6N, T72);
|
Chris@10
|
2850 STM4(&(ro[43]), TfL, ovs, &(ro[1]));
|
Chris@10
|
2851 TfM = VADD(T6N, T72);
|
Chris@10
|
2852 STM4(&(ro[11]), TfM, ovs, &(ro[1]));
|
Chris@10
|
2853 T7f = VADD(T75, T78);
|
Chris@10
|
2854 T7g = VADD(T7c, T7d);
|
Chris@10
|
2855 TfN = VSUB(T7f, T7g);
|
Chris@10
|
2856 STM4(&(io[43]), TfN, ovs, &(io[1]));
|
Chris@10
|
2857 TfO = VADD(T7f, T7g);
|
Chris@10
|
2858 STM4(&(io[11]), TfO, ovs, &(io[1]));
|
Chris@10
|
2859 }
|
Chris@10
|
2860 {
|
Chris@10
|
2861 V T79, T7a, T7b, T7e;
|
Chris@10
|
2862 T79 = VSUB(T75, T78);
|
Chris@10
|
2863 T7a = VSUB(T71, T6U);
|
Chris@10
|
2864 TfP = VSUB(T79, T7a);
|
Chris@10
|
2865 STM4(&(io[59]), TfP, ovs, &(io[1]));
|
Chris@10
|
2866 TfQ = VADD(T79, T7a);
|
Chris@10
|
2867 STM4(&(io[27]), TfQ, ovs, &(io[1]));
|
Chris@10
|
2868 T7b = VSUB(T6F, T6M);
|
Chris@10
|
2869 T7e = VSUB(T7c, T7d);
|
Chris@10
|
2870 TfR = VSUB(T7b, T7e);
|
Chris@10
|
2871 STM4(&(ro[59]), TfR, ovs, &(ro[1]));
|
Chris@10
|
2872 TfS = VADD(T7b, T7e);
|
Chris@10
|
2873 STM4(&(ro[27]), TfS, ovs, &(ro[1]));
|
Chris@10
|
2874 }
|
Chris@10
|
2875 {
|
Chris@10
|
2876 V T7j, T7q, T7z, T7A;
|
Chris@10
|
2877 T7j = VADD(T7h, T7i);
|
Chris@10
|
2878 T7q = VADD(T7m, T7p);
|
Chris@10
|
2879 TfT = VSUB(T7j, T7q);
|
Chris@10
|
2880 STM4(&(ro[35]), TfT, ovs, &(ro[1]));
|
Chris@10
|
2881 TfU = VADD(T7j, T7q);
|
Chris@10
|
2882 STM4(&(ro[3]), TfU, ovs, &(ro[1]));
|
Chris@10
|
2883 T7z = VADD(T7r, T7s);
|
Chris@10
|
2884 T7A = VADD(T7w, T7x);
|
Chris@10
|
2885 TfV = VSUB(T7z, T7A);
|
Chris@10
|
2886 STM4(&(io[35]), TfV, ovs, &(io[1]));
|
Chris@10
|
2887 TfW = VADD(T7z, T7A);
|
Chris@10
|
2888 STM4(&(io[3]), TfW, ovs, &(io[1]));
|
Chris@10
|
2889 }
|
Chris@10
|
2890 {
|
Chris@10
|
2891 V T7t, T7u, T7v, T7y;
|
Chris@10
|
2892 T7t = VSUB(T7r, T7s);
|
Chris@10
|
2893 T7u = VSUB(T7p, T7m);
|
Chris@10
|
2894 TfX = VSUB(T7t, T7u);
|
Chris@10
|
2895 STM4(&(io[51]), TfX, ovs, &(io[1]));
|
Chris@10
|
2896 TfY = VADD(T7t, T7u);
|
Chris@10
|
2897 STM4(&(io[19]), TfY, ovs, &(io[1]));
|
Chris@10
|
2898 T7v = VSUB(T7h, T7i);
|
Chris@10
|
2899 T7y = VSUB(T7w, T7x);
|
Chris@10
|
2900 TfZ = VSUB(T7v, T7y);
|
Chris@10
|
2901 STM4(&(ro[51]), TfZ, ovs, &(ro[1]));
|
Chris@10
|
2902 Tg0 = VADD(T7v, T7y);
|
Chris@10
|
2903 STM4(&(ro[19]), Tg0, ovs, &(ro[1]));
|
Chris@10
|
2904 }
|
Chris@10
|
2905 }
|
Chris@10
|
2906 {
|
Chris@10
|
2907 V T9j, T9V, Ta0, Taa, Ta3, Tab, T9q, Ta6, T9y, T9Q, T9J, Ta5, T9M, T9W, T9F;
|
Chris@10
|
2908 V T9R;
|
Chris@10
|
2909 {
|
Chris@10
|
2910 V T9h, T9i, T9Y, T9Z;
|
Chris@10
|
2911 T9h = VADD(T7B, T7C);
|
Chris@10
|
2912 T9i = VADD(T8J, T8I);
|
Chris@10
|
2913 T9j = VSUB(T9h, T9i);
|
Chris@10
|
2914 T9V = VADD(T9h, T9i);
|
Chris@10
|
2915 T9Y = VADD(T9s, T9t);
|
Chris@10
|
2916 T9Z = VADD(T9v, T9w);
|
Chris@10
|
2917 Ta0 = VFMA(LDK(KP995184726), T9Y, VMUL(LDK(KP098017140), T9Z));
|
Chris@10
|
2918 Taa = VFNMS(LDK(KP098017140), T9Y, VMUL(LDK(KP995184726), T9Z));
|
Chris@10
|
2919 }
|
Chris@10
|
2920 {
|
Chris@10
|
2921 V Ta1, Ta2, T9m, T9p;
|
Chris@10
|
2922 Ta1 = VADD(T9z, T9A);
|
Chris@10
|
2923 Ta2 = VADD(T9C, T9D);
|
Chris@10
|
2924 Ta3 = VFNMS(LDK(KP098017140), Ta2, VMUL(LDK(KP995184726), Ta1));
|
Chris@10
|
2925 Tab = VFMA(LDK(KP098017140), Ta1, VMUL(LDK(KP995184726), Ta2));
|
Chris@10
|
2926 T9m = VFNMS(LDK(KP195090322), T9l, VMUL(LDK(KP980785280), T9k));
|
Chris@10
|
2927 T9p = VFMA(LDK(KP195090322), T9n, VMUL(LDK(KP980785280), T9o));
|
Chris@10
|
2928 T9q = VSUB(T9m, T9p);
|
Chris@10
|
2929 Ta6 = VADD(T9m, T9p);
|
Chris@10
|
2930 }
|
Chris@10
|
2931 {
|
Chris@10
|
2932 V T9u, T9x, T9H, T9I;
|
Chris@10
|
2933 T9u = VSUB(T9s, T9t);
|
Chris@10
|
2934 T9x = VSUB(T9v, T9w);
|
Chris@10
|
2935 T9y = VFMA(LDK(KP634393284), T9u, VMUL(LDK(KP773010453), T9x));
|
Chris@10
|
2936 T9Q = VFNMS(LDK(KP773010453), T9u, VMUL(LDK(KP634393284), T9x));
|
Chris@10
|
2937 T9H = VADD(T8F, T8G);
|
Chris@10
|
2938 T9I = VADD(T7G, T7J);
|
Chris@10
|
2939 T9J = VSUB(T9H, T9I);
|
Chris@10
|
2940 Ta5 = VADD(T9H, T9I);
|
Chris@10
|
2941 }
|
Chris@10
|
2942 {
|
Chris@10
|
2943 V T9K, T9L, T9B, T9E;
|
Chris@10
|
2944 T9K = VFNMS(LDK(KP195090322), T9o, VMUL(LDK(KP980785280), T9n));
|
Chris@10
|
2945 T9L = VFMA(LDK(KP980785280), T9l, VMUL(LDK(KP195090322), T9k));
|
Chris@10
|
2946 T9M = VSUB(T9K, T9L);
|
Chris@10
|
2947 T9W = VADD(T9L, T9K);
|
Chris@10
|
2948 T9B = VSUB(T9z, T9A);
|
Chris@10
|
2949 T9E = VSUB(T9C, T9D);
|
Chris@10
|
2950 T9F = VFNMS(LDK(KP773010453), T9E, VMUL(LDK(KP634393284), T9B));
|
Chris@10
|
2951 T9R = VFMA(LDK(KP773010453), T9B, VMUL(LDK(KP634393284), T9E));
|
Chris@10
|
2952 }
|
Chris@10
|
2953 {
|
Chris@10
|
2954 V T9r, T9G, Tg1, Tg2;
|
Chris@10
|
2955 T9r = VADD(T9j, T9q);
|
Chris@10
|
2956 T9G = VADD(T9y, T9F);
|
Chris@10
|
2957 Tg1 = VSUB(T9r, T9G);
|
Chris@10
|
2958 STM4(&(ro[41]), Tg1, ovs, &(ro[1]));
|
Chris@10
|
2959 STN4(&(ro[40]), TeR, Tg1, Tff, TfL, ovs);
|
Chris@10
|
2960 Tg2 = VADD(T9r, T9G);
|
Chris@10
|
2961 STM4(&(ro[9]), Tg2, ovs, &(ro[1]));
|
Chris@10
|
2962 STN4(&(ro[8]), TeS, Tg2, Tfg, TfM, ovs);
|
Chris@10
|
2963 }
|
Chris@10
|
2964 {
|
Chris@10
|
2965 V T9T, T9U, Tg3, Tg4;
|
Chris@10
|
2966 T9T = VADD(T9J, T9M);
|
Chris@10
|
2967 T9U = VADD(T9Q, T9R);
|
Chris@10
|
2968 Tg3 = VSUB(T9T, T9U);
|
Chris@10
|
2969 STM4(&(io[41]), Tg3, ovs, &(io[1]));
|
Chris@10
|
2970 STN4(&(io[40]), TeT, Tg3, Tfh, TfN, ovs);
|
Chris@10
|
2971 Tg4 = VADD(T9T, T9U);
|
Chris@10
|
2972 STM4(&(io[9]), Tg4, ovs, &(io[1]));
|
Chris@10
|
2973 STN4(&(io[8]), TeU, Tg4, Tfi, TfO, ovs);
|
Chris@10
|
2974 }
|
Chris@10
|
2975 {
|
Chris@10
|
2976 V T9N, T9O, Tg5, Tg6;
|
Chris@10
|
2977 T9N = VSUB(T9J, T9M);
|
Chris@10
|
2978 T9O = VSUB(T9F, T9y);
|
Chris@10
|
2979 Tg5 = VSUB(T9N, T9O);
|
Chris@10
|
2980 STM4(&(io[57]), Tg5, ovs, &(io[1]));
|
Chris@10
|
2981 STN4(&(io[56]), TeV, Tg5, Tfj, TfP, ovs);
|
Chris@10
|
2982 Tg6 = VADD(T9N, T9O);
|
Chris@10
|
2983 STM4(&(io[25]), Tg6, ovs, &(io[1]));
|
Chris@10
|
2984 STN4(&(io[24]), TeW, Tg6, Tfk, TfQ, ovs);
|
Chris@10
|
2985 }
|
Chris@10
|
2986 {
|
Chris@10
|
2987 V T9P, T9S, Tg7, Tg8;
|
Chris@10
|
2988 T9P = VSUB(T9j, T9q);
|
Chris@10
|
2989 T9S = VSUB(T9Q, T9R);
|
Chris@10
|
2990 Tg7 = VSUB(T9P, T9S);
|
Chris@10
|
2991 STM4(&(ro[57]), Tg7, ovs, &(ro[1]));
|
Chris@10
|
2992 STN4(&(ro[56]), TeX, Tg7, Tfl, TfR, ovs);
|
Chris@10
|
2993 Tg8 = VADD(T9P, T9S);
|
Chris@10
|
2994 STM4(&(ro[25]), Tg8, ovs, &(ro[1]));
|
Chris@10
|
2995 STN4(&(ro[24]), TeY, Tg8, Tfm, TfS, ovs);
|
Chris@10
|
2996 }
|
Chris@10
|
2997 {
|
Chris@10
|
2998 V T9X, Ta4, Tg9, Tga;
|
Chris@10
|
2999 T9X = VADD(T9V, T9W);
|
Chris@10
|
3000 Ta4 = VADD(Ta0, Ta3);
|
Chris@10
|
3001 Tg9 = VSUB(T9X, Ta4);
|
Chris@10
|
3002 STM4(&(ro[33]), Tg9, ovs, &(ro[1]));
|
Chris@10
|
3003 STN4(&(ro[32]), TeJ, Tg9, Tfn, TfT, ovs);
|
Chris@10
|
3004 Tga = VADD(T9X, Ta4);
|
Chris@10
|
3005 STM4(&(ro[1]), Tga, ovs, &(ro[1]));
|
Chris@10
|
3006 STN4(&(ro[0]), TeL, Tga, Tfo, TfU, ovs);
|
Chris@10
|
3007 }
|
Chris@10
|
3008 {
|
Chris@10
|
3009 V Tad, Tae, Tgb, Tgc;
|
Chris@10
|
3010 Tad = VADD(Ta5, Ta6);
|
Chris@10
|
3011 Tae = VADD(Taa, Tab);
|
Chris@10
|
3012 Tgb = VSUB(Tad, Tae);
|
Chris@10
|
3013 STM4(&(io[33]), Tgb, ovs, &(io[1]));
|
Chris@10
|
3014 STN4(&(io[32]), TeK, Tgb, Tfp, TfV, ovs);
|
Chris@10
|
3015 Tgc = VADD(Tad, Tae);
|
Chris@10
|
3016 STM4(&(io[1]), Tgc, ovs, &(io[1]));
|
Chris@10
|
3017 STN4(&(io[0]), TeM, Tgc, Tfq, TfW, ovs);
|
Chris@10
|
3018 }
|
Chris@10
|
3019 {
|
Chris@10
|
3020 V Ta7, Ta8, Tgd, Tge;
|
Chris@10
|
3021 Ta7 = VSUB(Ta5, Ta6);
|
Chris@10
|
3022 Ta8 = VSUB(Ta3, Ta0);
|
Chris@10
|
3023 Tgd = VSUB(Ta7, Ta8);
|
Chris@10
|
3024 STM4(&(io[49]), Tgd, ovs, &(io[1]));
|
Chris@10
|
3025 STN4(&(io[48]), TeP, Tgd, Tfr, TfX, ovs);
|
Chris@10
|
3026 Tge = VADD(Ta7, Ta8);
|
Chris@10
|
3027 STM4(&(io[17]), Tge, ovs, &(io[1]));
|
Chris@10
|
3028 STN4(&(io[16]), TeN, Tge, Tfs, TfY, ovs);
|
Chris@10
|
3029 }
|
Chris@10
|
3030 {
|
Chris@10
|
3031 V Ta9, Tac, Tgf, Tgg;
|
Chris@10
|
3032 Ta9 = VSUB(T9V, T9W);
|
Chris@10
|
3033 Tac = VSUB(Taa, Tab);
|
Chris@10
|
3034 Tgf = VSUB(Ta9, Tac);
|
Chris@10
|
3035 STM4(&(ro[49]), Tgf, ovs, &(ro[1]));
|
Chris@10
|
3036 STN4(&(ro[48]), TeQ, Tgf, Tft, TfZ, ovs);
|
Chris@10
|
3037 Tgg = VADD(Ta9, Tac);
|
Chris@10
|
3038 STM4(&(ro[17]), Tgg, ovs, &(ro[1]));
|
Chris@10
|
3039 STN4(&(ro[16]), TeO, Tgg, Tfu, Tg0, ovs);
|
Chris@10
|
3040 }
|
Chris@10
|
3041 }
|
Chris@10
|
3042 {
|
Chris@10
|
3043 V Tgh, Tgi, Tgj, Tgk, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu, Tgv;
|
Chris@10
|
3044 V Tgw;
|
Chris@10
|
3045 {
|
Chris@10
|
3046 V T3v, T6j, T6o, T6y, T6r, T6z, T48, T6u, T52, T6e, T67, T6t, T6a, T6k, T5V;
|
Chris@10
|
3047 V T6f;
|
Chris@10
|
3048 {
|
Chris@10
|
3049 V T3f, T3u, T6m, T6n;
|
Chris@10
|
3050 T3f = VSUB(T37, T3e);
|
Chris@10
|
3051 T3u = VSUB(T3m, T3t);
|
Chris@10
|
3052 T3v = VSUB(T3f, T3u);
|
Chris@10
|
3053 T6j = VADD(T3f, T3u);
|
Chris@10
|
3054 T6m = VADD(T4q, T4N);
|
Chris@10
|
3055 T6n = VADD(T4X, T50);
|
Chris@10
|
3056 T6o = VFMA(LDK(KP634393284), T6m, VMUL(LDK(KP773010453), T6n));
|
Chris@10
|
3057 T6y = VFNMS(LDK(KP634393284), T6n, VMUL(LDK(KP773010453), T6m));
|
Chris@10
|
3058 }
|
Chris@10
|
3059 {
|
Chris@10
|
3060 V T6p, T6q, T3O, T47;
|
Chris@10
|
3061 T6p = VADD(T5j, T5G);
|
Chris@10
|
3062 T6q = VADD(T5Q, T5T);
|
Chris@10
|
3063 T6r = VFNMS(LDK(KP634393284), T6q, VMUL(LDK(KP773010453), T6p));
|
Chris@10
|
3064 T6z = VFMA(LDK(KP773010453), T6q, VMUL(LDK(KP634393284), T6p));
|
Chris@10
|
3065 T3O = VFNMS(LDK(KP980785280), T3N, VMUL(LDK(KP195090322), T3G));
|
Chris@10
|
3066 T47 = VFMA(LDK(KP195090322), T3Z, VMUL(LDK(KP980785280), T46));
|
Chris@10
|
3067 T48 = VSUB(T3O, T47);
|
Chris@10
|
3068 T6u = VADD(T3O, T47);
|
Chris@10
|
3069 }
|
Chris@10
|
3070 {
|
Chris@10
|
3071 V T4O, T51, T63, T66;
|
Chris@10
|
3072 T4O = VSUB(T4q, T4N);
|
Chris@10
|
3073 T51 = VSUB(T4X, T50);
|
Chris@10
|
3074 T52 = VFMA(LDK(KP995184726), T4O, VMUL(LDK(KP098017140), T51));
|
Chris@10
|
3075 T6e = VFNMS(LDK(KP995184726), T51, VMUL(LDK(KP098017140), T4O));
|
Chris@10
|
3076 T63 = VSUB(T5Z, T62);
|
Chris@10
|
3077 T66 = VSUB(T64, T65);
|
Chris@10
|
3078 T67 = VSUB(T63, T66);
|
Chris@10
|
3079 T6t = VADD(T63, T66);
|
Chris@10
|
3080 }
|
Chris@10
|
3081 {
|
Chris@10
|
3082 V T68, T69, T5H, T5U;
|
Chris@10
|
3083 T68 = VFNMS(LDK(KP980785280), T3Z, VMUL(LDK(KP195090322), T46));
|
Chris@10
|
3084 T69 = VFMA(LDK(KP980785280), T3G, VMUL(LDK(KP195090322), T3N));
|
Chris@10
|
3085 T6a = VSUB(T68, T69);
|
Chris@10
|
3086 T6k = VADD(T69, T68);
|
Chris@10
|
3087 T5H = VSUB(T5j, T5G);
|
Chris@10
|
3088 T5U = VSUB(T5Q, T5T);
|
Chris@10
|
3089 T5V = VFNMS(LDK(KP995184726), T5U, VMUL(LDK(KP098017140), T5H));
|
Chris@10
|
3090 T6f = VFMA(LDK(KP098017140), T5U, VMUL(LDK(KP995184726), T5H));
|
Chris@10
|
3091 }
|
Chris@10
|
3092 {
|
Chris@10
|
3093 V T49, T5W, T6h, T6i;
|
Chris@10
|
3094 T49 = VADD(T3v, T48);
|
Chris@10
|
3095 T5W = VADD(T52, T5V);
|
Chris@10
|
3096 Tgh = VSUB(T49, T5W);
|
Chris@10
|
3097 STM4(&(ro[47]), Tgh, ovs, &(ro[1]));
|
Chris@10
|
3098 Tgi = VADD(T49, T5W);
|
Chris@10
|
3099 STM4(&(ro[15]), Tgi, ovs, &(ro[1]));
|
Chris@10
|
3100 T6h = VADD(T67, T6a);
|
Chris@10
|
3101 T6i = VADD(T6e, T6f);
|
Chris@10
|
3102 Tgj = VSUB(T6h, T6i);
|
Chris@10
|
3103 STM4(&(io[47]), Tgj, ovs, &(io[1]));
|
Chris@10
|
3104 Tgk = VADD(T6h, T6i);
|
Chris@10
|
3105 STM4(&(io[15]), Tgk, ovs, &(io[1]));
|
Chris@10
|
3106 }
|
Chris@10
|
3107 {
|
Chris@10
|
3108 V T6b, T6c, T6d, T6g;
|
Chris@10
|
3109 T6b = VSUB(T67, T6a);
|
Chris@10
|
3110 T6c = VSUB(T5V, T52);
|
Chris@10
|
3111 Tgl = VSUB(T6b, T6c);
|
Chris@10
|
3112 STM4(&(io[63]), Tgl, ovs, &(io[1]));
|
Chris@10
|
3113 Tgm = VADD(T6b, T6c);
|
Chris@10
|
3114 STM4(&(io[31]), Tgm, ovs, &(io[1]));
|
Chris@10
|
3115 T6d = VSUB(T3v, T48);
|
Chris@10
|
3116 T6g = VSUB(T6e, T6f);
|
Chris@10
|
3117 Tgn = VSUB(T6d, T6g);
|
Chris@10
|
3118 STM4(&(ro[63]), Tgn, ovs, &(ro[1]));
|
Chris@10
|
3119 Tgo = VADD(T6d, T6g);
|
Chris@10
|
3120 STM4(&(ro[31]), Tgo, ovs, &(ro[1]));
|
Chris@10
|
3121 }
|
Chris@10
|
3122 {
|
Chris@10
|
3123 V T6l, T6s, T6B, T6C;
|
Chris@10
|
3124 T6l = VADD(T6j, T6k);
|
Chris@10
|
3125 T6s = VADD(T6o, T6r);
|
Chris@10
|
3126 Tgp = VSUB(T6l, T6s);
|
Chris@10
|
3127 STM4(&(ro[39]), Tgp, ovs, &(ro[1]));
|
Chris@10
|
3128 Tgq = VADD(T6l, T6s);
|
Chris@10
|
3129 STM4(&(ro[7]), Tgq, ovs, &(ro[1]));
|
Chris@10
|
3130 T6B = VADD(T6t, T6u);
|
Chris@10
|
3131 T6C = VADD(T6y, T6z);
|
Chris@10
|
3132 Tgr = VSUB(T6B, T6C);
|
Chris@10
|
3133 STM4(&(io[39]), Tgr, ovs, &(io[1]));
|
Chris@10
|
3134 Tgs = VADD(T6B, T6C);
|
Chris@10
|
3135 STM4(&(io[7]), Tgs, ovs, &(io[1]));
|
Chris@10
|
3136 }
|
Chris@10
|
3137 {
|
Chris@10
|
3138 V T6v, T6w, T6x, T6A;
|
Chris@10
|
3139 T6v = VSUB(T6t, T6u);
|
Chris@10
|
3140 T6w = VSUB(T6r, T6o);
|
Chris@10
|
3141 Tgt = VSUB(T6v, T6w);
|
Chris@10
|
3142 STM4(&(io[55]), Tgt, ovs, &(io[1]));
|
Chris@10
|
3143 Tgu = VADD(T6v, T6w);
|
Chris@10
|
3144 STM4(&(io[23]), Tgu, ovs, &(io[1]));
|
Chris@10
|
3145 T6x = VSUB(T6j, T6k);
|
Chris@10
|
3146 T6A = VSUB(T6y, T6z);
|
Chris@10
|
3147 Tgv = VSUB(T6x, T6A);
|
Chris@10
|
3148 STM4(&(ro[55]), Tgv, ovs, &(ro[1]));
|
Chris@10
|
3149 Tgw = VADD(T6x, T6A);
|
Chris@10
|
3150 STM4(&(ro[23]), Tgw, ovs, &(ro[1]));
|
Chris@10
|
3151 }
|
Chris@10
|
3152 }
|
Chris@10
|
3153 {
|
Chris@10
|
3154 V T7L, T8X, T92, T9c, T95, T9d, T80, T98, T8k, T8S, T8L, T97, T8O, T8Y, T8D;
|
Chris@10
|
3155 V T8T;
|
Chris@10
|
3156 {
|
Chris@10
|
3157 V T7D, T7K, T90, T91;
|
Chris@10
|
3158 T7D = VSUB(T7B, T7C);
|
Chris@10
|
3159 T7K = VSUB(T7G, T7J);
|
Chris@10
|
3160 T7L = VSUB(T7D, T7K);
|
Chris@10
|
3161 T8X = VADD(T7D, T7K);
|
Chris@10
|
3162 T90 = VADD(T84, T8b);
|
Chris@10
|
3163 T91 = VADD(T8f, T8i);
|
Chris@10
|
3164 T92 = VFMA(LDK(KP471396736), T90, VMUL(LDK(KP881921264), T91));
|
Chris@10
|
3165 T9c = VFNMS(LDK(KP471396736), T91, VMUL(LDK(KP881921264), T90));
|
Chris@10
|
3166 }
|
Chris@10
|
3167 {
|
Chris@10
|
3168 V T93, T94, T7S, T7Z;
|
Chris@10
|
3169 T93 = VADD(T8n, T8u);
|
Chris@10
|
3170 T94 = VADD(T8y, T8B);
|
Chris@10
|
3171 T95 = VFNMS(LDK(KP471396736), T94, VMUL(LDK(KP881921264), T93));
|
Chris@10
|
3172 T9d = VFMA(LDK(KP881921264), T94, VMUL(LDK(KP471396736), T93));
|
Chris@10
|
3173 T7S = VFNMS(LDK(KP831469612), T7R, VMUL(LDK(KP555570233), T7O));
|
Chris@10
|
3174 T7Z = VFMA(LDK(KP831469612), T7V, VMUL(LDK(KP555570233), T7Y));
|
Chris@10
|
3175 T80 = VSUB(T7S, T7Z);
|
Chris@10
|
3176 T98 = VADD(T7S, T7Z);
|
Chris@10
|
3177 }
|
Chris@10
|
3178 {
|
Chris@10
|
3179 V T8c, T8j, T8H, T8K;
|
Chris@10
|
3180 T8c = VSUB(T84, T8b);
|
Chris@10
|
3181 T8j = VSUB(T8f, T8i);
|
Chris@10
|
3182 T8k = VFMA(LDK(KP956940335), T8c, VMUL(LDK(KP290284677), T8j));
|
Chris@10
|
3183 T8S = VFNMS(LDK(KP956940335), T8j, VMUL(LDK(KP290284677), T8c));
|
Chris@10
|
3184 T8H = VSUB(T8F, T8G);
|
Chris@10
|
3185 T8K = VSUB(T8I, T8J);
|
Chris@10
|
3186 T8L = VSUB(T8H, T8K);
|
Chris@10
|
3187 T97 = VADD(T8H, T8K);
|
Chris@10
|
3188 }
|
Chris@10
|
3189 {
|
Chris@10
|
3190 V T8M, T8N, T8v, T8C;
|
Chris@10
|
3191 T8M = VFNMS(LDK(KP831469612), T7Y, VMUL(LDK(KP555570233), T7V));
|
Chris@10
|
3192 T8N = VFMA(LDK(KP555570233), T7R, VMUL(LDK(KP831469612), T7O));
|
Chris@10
|
3193 T8O = VSUB(T8M, T8N);
|
Chris@10
|
3194 T8Y = VADD(T8N, T8M);
|
Chris@10
|
3195 T8v = VSUB(T8n, T8u);
|
Chris@10
|
3196 T8C = VSUB(T8y, T8B);
|
Chris@10
|
3197 T8D = VFNMS(LDK(KP956940335), T8C, VMUL(LDK(KP290284677), T8v));
|
Chris@10
|
3198 T8T = VFMA(LDK(KP290284677), T8C, VMUL(LDK(KP956940335), T8v));
|
Chris@10
|
3199 }
|
Chris@10
|
3200 {
|
Chris@10
|
3201 V T81, T8E, Tgx, Tgy;
|
Chris@10
|
3202 T81 = VADD(T7L, T80);
|
Chris@10
|
3203 T8E = VADD(T8k, T8D);
|
Chris@10
|
3204 Tgx = VSUB(T81, T8E);
|
Chris@10
|
3205 STM4(&(ro[45]), Tgx, ovs, &(ro[1]));
|
Chris@10
|
3206 STN4(&(ro[44]), TeZ, Tgx, Tfv, Tgh, ovs);
|
Chris@10
|
3207 Tgy = VADD(T81, T8E);
|
Chris@10
|
3208 STM4(&(ro[13]), Tgy, ovs, &(ro[1]));
|
Chris@10
|
3209 STN4(&(ro[12]), Tf0, Tgy, Tfw, Tgi, ovs);
|
Chris@10
|
3210 }
|
Chris@10
|
3211 {
|
Chris@10
|
3212 V T8V, T8W, Tgz, TgA;
|
Chris@10
|
3213 T8V = VADD(T8L, T8O);
|
Chris@10
|
3214 T8W = VADD(T8S, T8T);
|
Chris@10
|
3215 Tgz = VSUB(T8V, T8W);
|
Chris@10
|
3216 STM4(&(io[45]), Tgz, ovs, &(io[1]));
|
Chris@10
|
3217 STN4(&(io[44]), Tf1, Tgz, Tfx, Tgj, ovs);
|
Chris@10
|
3218 TgA = VADD(T8V, T8W);
|
Chris@10
|
3219 STM4(&(io[13]), TgA, ovs, &(io[1]));
|
Chris@10
|
3220 STN4(&(io[12]), Tf2, TgA, Tfy, Tgk, ovs);
|
Chris@10
|
3221 }
|
Chris@10
|
3222 {
|
Chris@10
|
3223 V T8P, T8Q, TgB, TgC;
|
Chris@10
|
3224 T8P = VSUB(T8L, T8O);
|
Chris@10
|
3225 T8Q = VSUB(T8D, T8k);
|
Chris@10
|
3226 TgB = VSUB(T8P, T8Q);
|
Chris@10
|
3227 STM4(&(io[61]), TgB, ovs, &(io[1]));
|
Chris@10
|
3228 STN4(&(io[60]), Tf3, TgB, Tfz, Tgl, ovs);
|
Chris@10
|
3229 TgC = VADD(T8P, T8Q);
|
Chris@10
|
3230 STM4(&(io[29]), TgC, ovs, &(io[1]));
|
Chris@10
|
3231 STN4(&(io[28]), Tf4, TgC, TfA, Tgm, ovs);
|
Chris@10
|
3232 }
|
Chris@10
|
3233 {
|
Chris@10
|
3234 V T8R, T8U, TgD, TgE;
|
Chris@10
|
3235 T8R = VSUB(T7L, T80);
|
Chris@10
|
3236 T8U = VSUB(T8S, T8T);
|
Chris@10
|
3237 TgD = VSUB(T8R, T8U);
|
Chris@10
|
3238 STM4(&(ro[61]), TgD, ovs, &(ro[1]));
|
Chris@10
|
3239 STN4(&(ro[60]), Tf5, TgD, TfB, Tgn, ovs);
|
Chris@10
|
3240 TgE = VADD(T8R, T8U);
|
Chris@10
|
3241 STM4(&(ro[29]), TgE, ovs, &(ro[1]));
|
Chris@10
|
3242 STN4(&(ro[28]), Tf6, TgE, TfC, Tgo, ovs);
|
Chris@10
|
3243 }
|
Chris@10
|
3244 {
|
Chris@10
|
3245 V T8Z, T96, TgF, TgG;
|
Chris@10
|
3246 T8Z = VADD(T8X, T8Y);
|
Chris@10
|
3247 T96 = VADD(T92, T95);
|
Chris@10
|
3248 TgF = VSUB(T8Z, T96);
|
Chris@10
|
3249 STM4(&(ro[37]), TgF, ovs, &(ro[1]));
|
Chris@10
|
3250 STN4(&(ro[36]), Tf7, TgF, TfD, Tgp, ovs);
|
Chris@10
|
3251 TgG = VADD(T8Z, T96);
|
Chris@10
|
3252 STM4(&(ro[5]), TgG, ovs, &(ro[1]));
|
Chris@10
|
3253 STN4(&(ro[4]), Tf8, TgG, TfE, Tgq, ovs);
|
Chris@10
|
3254 }
|
Chris@10
|
3255 {
|
Chris@10
|
3256 V T9f, T9g, TgH, TgI;
|
Chris@10
|
3257 T9f = VADD(T97, T98);
|
Chris@10
|
3258 T9g = VADD(T9c, T9d);
|
Chris@10
|
3259 TgH = VSUB(T9f, T9g);
|
Chris@10
|
3260 STM4(&(io[37]), TgH, ovs, &(io[1]));
|
Chris@10
|
3261 STN4(&(io[36]), Tf9, TgH, TfF, Tgr, ovs);
|
Chris@10
|
3262 TgI = VADD(T9f, T9g);
|
Chris@10
|
3263 STM4(&(io[5]), TgI, ovs, &(io[1]));
|
Chris@10
|
3264 STN4(&(io[4]), Tfa, TgI, TfG, Tgs, ovs);
|
Chris@10
|
3265 }
|
Chris@10
|
3266 {
|
Chris@10
|
3267 V T99, T9a, TgJ, TgK;
|
Chris@10
|
3268 T99 = VSUB(T97, T98);
|
Chris@10
|
3269 T9a = VSUB(T95, T92);
|
Chris@10
|
3270 TgJ = VSUB(T99, T9a);
|
Chris@10
|
3271 STM4(&(io[53]), TgJ, ovs, &(io[1]));
|
Chris@10
|
3272 STN4(&(io[52]), Tfb, TgJ, TfH, Tgt, ovs);
|
Chris@10
|
3273 TgK = VADD(T99, T9a);
|
Chris@10
|
3274 STM4(&(io[21]), TgK, ovs, &(io[1]));
|
Chris@10
|
3275 STN4(&(io[20]), Tfc, TgK, TfI, Tgu, ovs);
|
Chris@10
|
3276 }
|
Chris@10
|
3277 {
|
Chris@10
|
3278 V T9b, T9e, TgL, TgM;
|
Chris@10
|
3279 T9b = VSUB(T8X, T8Y);
|
Chris@10
|
3280 T9e = VSUB(T9c, T9d);
|
Chris@10
|
3281 TgL = VSUB(T9b, T9e);
|
Chris@10
|
3282 STM4(&(ro[53]), TgL, ovs, &(ro[1]));
|
Chris@10
|
3283 STN4(&(ro[52]), Tfd, TgL, TfJ, Tgv, ovs);
|
Chris@10
|
3284 TgM = VADD(T9b, T9e);
|
Chris@10
|
3285 STM4(&(ro[21]), TgM, ovs, &(ro[1]));
|
Chris@10
|
3286 STN4(&(ro[20]), Tfe, TgM, TfK, Tgw, ovs);
|
Chris@10
|
3287 }
|
Chris@10
|
3288 }
|
Chris@10
|
3289 }
|
Chris@10
|
3290 }
|
Chris@10
|
3291 }
|
Chris@10
|
3292 }
|
Chris@10
|
3293 }
|
Chris@10
|
3294 VLEAVE();
|
Chris@10
|
3295 }
|
Chris@10
|
3296
|
Chris@10
|
3297 static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {808, 144, 104, 0}, &GENUS, 0, 1, 0, 0 };
|
Chris@10
|
3298
|
Chris@10
|
3299 void XSIMD(codelet_n2sv_64) (planner *p) {
|
Chris@10
|
3300 X(kdft_register) (p, n2sv_64, &desc);
|
Chris@10
|
3301 }
|
Chris@10
|
3302
|
Chris@10
|
3303 #endif /* HAVE_FMA */
|