annotate src/fftw-3.3.8/dft/simd/common/n2sv_64.c @ 84:08ae793730bd

Add null config files
author Chris Cannam
date Mon, 02 Mar 2020 14:03:47 +0000
parents d0c2a83c1364
children
rev   line source
Chris@82 1 /*
Chris@82 2 * Copyright (c) 2003, 2007-14 Matteo Frigo
Chris@82 3 * Copyright (c) 2003, 2007-14 Massachusetts Institute of Technology
Chris@82 4 *
Chris@82 5 * This program is free software; you can redistribute it and/or modify
Chris@82 6 * it under the terms of the GNU General Public License as published by
Chris@82 7 * the Free Software Foundation; either version 2 of the License, or
Chris@82 8 * (at your option) any later version.
Chris@82 9 *
Chris@82 10 * This program is distributed in the hope that it will be useful,
Chris@82 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
Chris@82 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Chris@82 13 * GNU General Public License for more details.
Chris@82 14 *
Chris@82 15 * You should have received a copy of the GNU General Public License
Chris@82 16 * along with this program; if not, write to the Free Software
Chris@82 17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Chris@82 18 *
Chris@82 19 */
Chris@82 20
Chris@82 21 /* This file was automatically generated --- DO NOT EDIT */
Chris@82 22 /* Generated on Thu May 24 08:05:24 EDT 2018 */
Chris@82 23
Chris@82 24 #include "dft/codelet-dft.h"
Chris@82 25
Chris@82 26 #if defined(ARCH_PREFERS_FMA) || defined(ISA_EXTENSION_PREFERS_FMA)
Chris@82 27
Chris@82 28 /* Generated by: ../../../genfft/gen_notw.native -fma -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include dft/simd/n2s.h -store-multiple 4 */
Chris@82 29
Chris@82 30 /*
Chris@82 31 * This function contains 912 FP additions, 392 FP multiplications,
Chris@82 32 * (or, 520 additions, 0 multiplications, 392 fused multiply/add),
Chris@82 33 * 260 stack variables, 15 constants, and 288 memory accesses
Chris@82 34 */
Chris@82 35 #include "dft/simd/n2s.h"
Chris@82 36
Chris@82 37 static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
Chris@82 38 {
Chris@82 39 DVK(KP956940335, +0.956940335732208864935797886980269969482849206);
Chris@82 40 DVK(KP881921264, +0.881921264348355029712756863660388349508442621);
Chris@82 41 DVK(KP534511135, +0.534511135950791641089685961295362908582039528);
Chris@82 42 DVK(KP303346683, +0.303346683607342391675883946941299872384187453);
Chris@82 43 DVK(KP995184726, +0.995184726672196886244836953109479921575474869);
Chris@82 44 DVK(KP773010453, +0.773010453362736960810906609758469800971041293);
Chris@82 45 DVK(KP820678790, +0.820678790828660330972281985331011598767386482);
Chris@82 46 DVK(KP098491403, +0.098491403357164253077197521291327432293052451);
Chris@82 47 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
Chris@82 48 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
Chris@82 49 DVK(KP668178637, +0.668178637919298919997757686523080761552472251);
Chris@82 50 DVK(KP198912367, +0.198912367379658006911597622644676228597850501);
Chris@82 51 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
Chris@82 52 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
Chris@82 53 DVK(KP414213562, +0.414213562373095048801688724209698078569671875);
Chris@82 54 {
Chris@82 55 INT i;
Chris@82 56 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) {
Chris@82 57 V T37, T7B, T8F, T5Z, Tf, Td9, TbB, TcB, T62, T7C, T2i, TdH, Tah, Tcb, T3e;
Chris@82 58 V T8G, Tu, TdI, Tak, TbC, Tan, TbD, T2x, Tda, T3m, T65, T7G, T8I, T7J, T8J;
Chris@82 59 V T3t, T64, TK, Tdd, Tas, Tce, Tav, Tcf, T2N, Tdc, T3G, T6G, T7O, T9k, T7R;
Chris@82 60 V T9l, T3N, T6H, T1L, TdA, Tbs, Tct, Tdx, Teo, T5j, T6Y, T5Q, T6V, T8y, T9z;
Chris@82 61 V Tbb, Tcw, T8n, T9C, TZ, Tdf, Taz, Tch, TaC, Tci, T32, Tdg, T3Z, T6J, T7V;
Chris@82 62 V T9n, T7Y, T9o, T46, T6K, T1g, Tdp, Tb1, Tcm, Tdm, Tej, T4q, T6R, T4X, T6O;
Chris@82 63 V T8f, T9s, TaK, Tcp, T84, T9v, T1v, Tdn, Tb4, Tcq, Tds, Tek, T4N, T6P, T50;
Chris@82 64 V T6S, T8i, T9w, TaV, Tcn, T8b, T9t, T20, Tdy, Tbv, Tcx, TdD, Tep, T5G, T6W;
Chris@82 65 V T5T, T6Z, T8B, T9D, Tbm, Tcu, T8u, T9A;
Chris@82 66 {
Chris@82 67 V T3, T35, T26, T5Y, T6, T5X, T29, T36, Ta, T39, T2d, T38, Td, T3b, T2g;
Chris@82 68 V T3c;
Chris@82 69 {
Chris@82 70 V T1, T2, T24, T25;
Chris@82 71 T1 = LD(&(ri[0]), ivs, &(ri[0]));
Chris@82 72 T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0]));
Chris@82 73 T3 = VADD(T1, T2);
Chris@82 74 T35 = VSUB(T1, T2);
Chris@82 75 T24 = LD(&(ii[0]), ivs, &(ii[0]));
Chris@82 76 T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0]));
Chris@82 77 T26 = VADD(T24, T25);
Chris@82 78 T5Y = VSUB(T24, T25);
Chris@82 79 }
Chris@82 80 {
Chris@82 81 V T4, T5, T27, T28;
Chris@82 82 T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
Chris@82 83 T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0]));
Chris@82 84 T6 = VADD(T4, T5);
Chris@82 85 T5X = VSUB(T4, T5);
Chris@82 86 T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
Chris@82 87 T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0]));
Chris@82 88 T29 = VADD(T27, T28);
Chris@82 89 T36 = VSUB(T27, T28);
Chris@82 90 }
Chris@82 91 {
Chris@82 92 V T8, T9, T2b, T2c;
Chris@82 93 T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
Chris@82 94 T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0]));
Chris@82 95 Ta = VADD(T8, T9);
Chris@82 96 T39 = VSUB(T8, T9);
Chris@82 97 T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
Chris@82 98 T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0]));
Chris@82 99 T2d = VADD(T2b, T2c);
Chris@82 100 T38 = VSUB(T2b, T2c);
Chris@82 101 }
Chris@82 102 {
Chris@82 103 V Tb, Tc, T2e, T2f;
Chris@82 104 Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0]));
Chris@82 105 Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
Chris@82 106 Td = VADD(Tb, Tc);
Chris@82 107 T3b = VSUB(Tb, Tc);
Chris@82 108 T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0]));
Chris@82 109 T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
Chris@82 110 T2g = VADD(T2e, T2f);
Chris@82 111 T3c = VSUB(T2e, T2f);
Chris@82 112 }
Chris@82 113 {
Chris@82 114 V T7, Te, T2a, T2h;
Chris@82 115 T37 = VSUB(T35, T36);
Chris@82 116 T7B = VADD(T35, T36);
Chris@82 117 T8F = VSUB(T5Y, T5X);
Chris@82 118 T5Z = VADD(T5X, T5Y);
Chris@82 119 T7 = VADD(T3, T6);
Chris@82 120 Te = VADD(Ta, Td);
Chris@82 121 Tf = VADD(T7, Te);
Chris@82 122 Td9 = VSUB(T7, Te);
Chris@82 123 {
Chris@82 124 V Tbz, TbA, T60, T61;
Chris@82 125 Tbz = VSUB(Td, Ta);
Chris@82 126 TbA = VSUB(T26, T29);
Chris@82 127 TbB = VADD(Tbz, TbA);
Chris@82 128 TcB = VSUB(TbA, Tbz);
Chris@82 129 T60 = VSUB(T3b, T3c);
Chris@82 130 T61 = VADD(T39, T38);
Chris@82 131 T62 = VSUB(T60, T61);
Chris@82 132 T7C = VADD(T61, T60);
Chris@82 133 }
Chris@82 134 T2a = VADD(T26, T29);
Chris@82 135 T2h = VADD(T2d, T2g);
Chris@82 136 T2i = VADD(T2a, T2h);
Chris@82 137 TdH = VSUB(T2a, T2h);
Chris@82 138 {
Chris@82 139 V Taf, Tag, T3a, T3d;
Chris@82 140 Taf = VSUB(T3, T6);
Chris@82 141 Tag = VSUB(T2d, T2g);
Chris@82 142 Tah = VADD(Taf, Tag);
Chris@82 143 Tcb = VSUB(Taf, Tag);
Chris@82 144 T3a = VSUB(T38, T39);
Chris@82 145 T3d = VADD(T3b, T3c);
Chris@82 146 T3e = VSUB(T3a, T3d);
Chris@82 147 T8G = VADD(T3a, T3d);
Chris@82 148 }
Chris@82 149 }
Chris@82 150 }
Chris@82 151 {
Chris@82 152 V Ti, T3j, T2l, T3h, Tl, T3g, T2o, T3k, Tp, T3q, T2s, T3o, Ts, T3n, T2v;
Chris@82 153 V T3r;
Chris@82 154 {
Chris@82 155 V Tg, Th, T2j, T2k;
Chris@82 156 Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
Chris@82 157 Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0]));
Chris@82 158 Ti = VADD(Tg, Th);
Chris@82 159 T3j = VSUB(Tg, Th);
Chris@82 160 T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
Chris@82 161 T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0]));
Chris@82 162 T2l = VADD(T2j, T2k);
Chris@82 163 T3h = VSUB(T2j, T2k);
Chris@82 164 }
Chris@82 165 {
Chris@82 166 V Tj, Tk, T2m, T2n;
Chris@82 167 Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
Chris@82 168 Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0]));
Chris@82 169 Tl = VADD(Tj, Tk);
Chris@82 170 T3g = VSUB(Tj, Tk);
Chris@82 171 T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
Chris@82 172 T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0]));
Chris@82 173 T2o = VADD(T2m, T2n);
Chris@82 174 T3k = VSUB(T2m, T2n);
Chris@82 175 }
Chris@82 176 {
Chris@82 177 V Tn, To, T2q, T2r;
Chris@82 178 Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0]));
Chris@82 179 To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
Chris@82 180 Tp = VADD(Tn, To);
Chris@82 181 T3q = VSUB(Tn, To);
Chris@82 182 T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0]));
Chris@82 183 T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
Chris@82 184 T2s = VADD(T2q, T2r);
Chris@82 185 T3o = VSUB(T2q, T2r);
Chris@82 186 }
Chris@82 187 {
Chris@82 188 V Tq, Tr, T2t, T2u;
Chris@82 189 Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
Chris@82 190 Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0]));
Chris@82 191 Ts = VADD(Tq, Tr);
Chris@82 192 T3n = VSUB(Tq, Tr);
Chris@82 193 T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
Chris@82 194 T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0]));
Chris@82 195 T2v = VADD(T2t, T2u);
Chris@82 196 T3r = VSUB(T2t, T2u);
Chris@82 197 }
Chris@82 198 {
Chris@82 199 V Tm, Tt, Tai, Taj;
Chris@82 200 Tm = VADD(Ti, Tl);
Chris@82 201 Tt = VADD(Tp, Ts);
Chris@82 202 Tu = VADD(Tm, Tt);
Chris@82 203 TdI = VSUB(Tt, Tm);
Chris@82 204 Tai = VSUB(Ti, Tl);
Chris@82 205 Taj = VSUB(T2l, T2o);
Chris@82 206 Tak = VADD(Tai, Taj);
Chris@82 207 TbC = VSUB(Taj, Tai);
Chris@82 208 }
Chris@82 209 {
Chris@82 210 V Tal, Tam, T2p, T2w;
Chris@82 211 Tal = VSUB(Tp, Ts);
Chris@82 212 Tam = VSUB(T2s, T2v);
Chris@82 213 Tan = VSUB(Tal, Tam);
Chris@82 214 TbD = VADD(Tal, Tam);
Chris@82 215 T2p = VADD(T2l, T2o);
Chris@82 216 T2w = VADD(T2s, T2v);
Chris@82 217 T2x = VADD(T2p, T2w);
Chris@82 218 Tda = VSUB(T2p, T2w);
Chris@82 219 }
Chris@82 220 {
Chris@82 221 V T3i, T3l, T7E, T7F;
Chris@82 222 T3i = VADD(T3g, T3h);
Chris@82 223 T3l = VSUB(T3j, T3k);
Chris@82 224 T3m = VFMA(LDK(KP414213562), T3l, T3i);
Chris@82 225 T65 = VFNMS(LDK(KP414213562), T3i, T3l);
Chris@82 226 T7E = VADD(T3j, T3k);
Chris@82 227 T7F = VSUB(T3h, T3g);
Chris@82 228 T7G = VFMA(LDK(KP414213562), T7F, T7E);
Chris@82 229 T8I = VFNMS(LDK(KP414213562), T7E, T7F);
Chris@82 230 }
Chris@82 231 {
Chris@82 232 V T7H, T7I, T3p, T3s;
Chris@82 233 T7H = VADD(T3q, T3r);
Chris@82 234 T7I = VSUB(T3o, T3n);
Chris@82 235 T7J = VFNMS(LDK(KP414213562), T7I, T7H);
Chris@82 236 T8J = VFMA(LDK(KP414213562), T7H, T7I);
Chris@82 237 T3p = VADD(T3n, T3o);
Chris@82 238 T3s = VSUB(T3q, T3r);
Chris@82 239 T3t = VFNMS(LDK(KP414213562), T3s, T3p);
Chris@82 240 T64 = VFMA(LDK(KP414213562), T3p, T3s);
Chris@82 241 }
Chris@82 242 }
Chris@82 243 {
Chris@82 244 V Ty, T3H, T2B, T3x, TB, T3w, T2E, T3I, TI, T3K, T2L, T3E, TF, T3L, T2I;
Chris@82 245 V T3B;
Chris@82 246 {
Chris@82 247 V Tw, Tx, T2C, T2D;
Chris@82 248 Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
Chris@82 249 Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0]));
Chris@82 250 Ty = VADD(Tw, Tx);
Chris@82 251 T3H = VSUB(Tw, Tx);
Chris@82 252 {
Chris@82 253 V T2z, T2A, Tz, TA;
Chris@82 254 T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
Chris@82 255 T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0]));
Chris@82 256 T2B = VADD(T2z, T2A);
Chris@82 257 T3x = VSUB(T2z, T2A);
Chris@82 258 Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
Chris@82 259 TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0]));
Chris@82 260 TB = VADD(Tz, TA);
Chris@82 261 T3w = VSUB(Tz, TA);
Chris@82 262 }
Chris@82 263 T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
Chris@82 264 T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0]));
Chris@82 265 T2E = VADD(T2C, T2D);
Chris@82 266 T3I = VSUB(T2C, T2D);
Chris@82 267 {
Chris@82 268 V TG, TH, T3C, T2J, T2K, T3D;
Chris@82 269 TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0]));
Chris@82 270 TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
Chris@82 271 T3C = VSUB(TG, TH);
Chris@82 272 T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0]));
Chris@82 273 T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
Chris@82 274 T3D = VSUB(T2J, T2K);
Chris@82 275 TI = VADD(TG, TH);
Chris@82 276 T3K = VADD(T3C, T3D);
Chris@82 277 T2L = VADD(T2J, T2K);
Chris@82 278 T3E = VSUB(T3C, T3D);
Chris@82 279 }
Chris@82 280 {
Chris@82 281 V TD, TE, T3z, T2G, T2H, T3A;
Chris@82 282 TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
Chris@82 283 TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0]));
Chris@82 284 T3z = VSUB(TD, TE);
Chris@82 285 T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
Chris@82 286 T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0]));
Chris@82 287 T3A = VSUB(T2G, T2H);
Chris@82 288 TF = VADD(TD, TE);
Chris@82 289 T3L = VSUB(T3A, T3z);
Chris@82 290 T2I = VADD(T2G, T2H);
Chris@82 291 T3B = VADD(T3z, T3A);
Chris@82 292 }
Chris@82 293 }
Chris@82 294 {
Chris@82 295 V TC, TJ, Taq, Tar;
Chris@82 296 TC = VADD(Ty, TB);
Chris@82 297 TJ = VADD(TF, TI);
Chris@82 298 TK = VADD(TC, TJ);
Chris@82 299 Tdd = VSUB(TC, TJ);
Chris@82 300 Taq = VSUB(TI, TF);
Chris@82 301 Tar = VSUB(T2B, T2E);
Chris@82 302 Tas = VADD(Taq, Tar);
Chris@82 303 Tce = VSUB(Tar, Taq);
Chris@82 304 }
Chris@82 305 {
Chris@82 306 V Tat, Tau, T2F, T2M;
Chris@82 307 Tat = VSUB(Ty, TB);
Chris@82 308 Tau = VSUB(T2I, T2L);
Chris@82 309 Tav = VADD(Tat, Tau);
Chris@82 310 Tcf = VSUB(Tat, Tau);
Chris@82 311 T2F = VADD(T2B, T2E);
Chris@82 312 T2M = VADD(T2I, T2L);
Chris@82 313 T2N = VADD(T2F, T2M);
Chris@82 314 Tdc = VSUB(T2F, T2M);
Chris@82 315 }
Chris@82 316 {
Chris@82 317 V T3y, T3F, T7M, T7N;
Chris@82 318 T3y = VADD(T3w, T3x);
Chris@82 319 T3F = VSUB(T3B, T3E);
Chris@82 320 T3G = VFNMS(LDK(KP707106781), T3F, T3y);
Chris@82 321 T6G = VFMA(LDK(KP707106781), T3F, T3y);
Chris@82 322 T7M = VSUB(T3x, T3w);
Chris@82 323 T7N = VADD(T3L, T3K);
Chris@82 324 T7O = VFMA(LDK(KP707106781), T7N, T7M);
Chris@82 325 T9k = VFNMS(LDK(KP707106781), T7N, T7M);
Chris@82 326 }
Chris@82 327 {
Chris@82 328 V T7P, T7Q, T3J, T3M;
Chris@82 329 T7P = VADD(T3H, T3I);
Chris@82 330 T7Q = VADD(T3B, T3E);
Chris@82 331 T7R = VFMA(LDK(KP707106781), T7Q, T7P);
Chris@82 332 T9l = VFNMS(LDK(KP707106781), T7Q, T7P);
Chris@82 333 T3J = VSUB(T3H, T3I);
Chris@82 334 T3M = VSUB(T3K, T3L);
Chris@82 335 T3N = VFNMS(LDK(KP707106781), T3M, T3J);
Chris@82 336 T6H = VFMA(LDK(KP707106781), T3M, T3J);
Chris@82 337 }
Chris@82 338 }
Chris@82 339 {
Chris@82 340 V T1z, T5I, T56, Tb8, T1C, T53, T5L, Tb9, T1J, Tbq, T5h, T5N, T1G, Tbp, T5c;
Chris@82 341 V T5O;
Chris@82 342 {
Chris@82 343 V T1x, T1y, T5J, T5K;
Chris@82 344 T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)]));
Chris@82 345 T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
Chris@82 346 T1z = VADD(T1x, T1y);
Chris@82 347 T5I = VSUB(T1x, T1y);
Chris@82 348 {
Chris@82 349 V T54, T55, T1A, T1B;
Chris@82 350 T54 = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)]));
Chris@82 351 T55 = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
Chris@82 352 T56 = VSUB(T54, T55);
Chris@82 353 Tb8 = VADD(T54, T55);
Chris@82 354 T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
Chris@82 355 T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)]));
Chris@82 356 T1C = VADD(T1A, T1B);
Chris@82 357 T53 = VSUB(T1A, T1B);
Chris@82 358 }
Chris@82 359 T5J = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
Chris@82 360 T5K = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)]));
Chris@82 361 T5L = VSUB(T5J, T5K);
Chris@82 362 Tb9 = VADD(T5J, T5K);
Chris@82 363 {
Chris@82 364 V T1H, T1I, T5d, T5e, T5f, T5g;
Chris@82 365 T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)]));
Chris@82 366 T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
Chris@82 367 T5d = VSUB(T1H, T1I);
Chris@82 368 T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)]));
Chris@82 369 T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
Chris@82 370 T5g = VSUB(T5e, T5f);
Chris@82 371 T1J = VADD(T1H, T1I);
Chris@82 372 Tbq = VADD(T5e, T5f);
Chris@82 373 T5h = VSUB(T5d, T5g);
Chris@82 374 T5N = VADD(T5d, T5g);
Chris@82 375 }
Chris@82 376 {
Chris@82 377 V T1E, T1F, T58, T59, T5a, T5b;
Chris@82 378 T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
Chris@82 379 T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)]));
Chris@82 380 T58 = VSUB(T1E, T1F);
Chris@82 381 T59 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
Chris@82 382 T5a = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)]));
Chris@82 383 T5b = VSUB(T59, T5a);
Chris@82 384 T1G = VADD(T1E, T1F);
Chris@82 385 Tbp = VADD(T59, T5a);
Chris@82 386 T5c = VADD(T58, T5b);
Chris@82 387 T5O = VSUB(T5b, T58);
Chris@82 388 }
Chris@82 389 }
Chris@82 390 {
Chris@82 391 V T1D, T1K, Tbo, Tbr;
Chris@82 392 T1D = VADD(T1z, T1C);
Chris@82 393 T1K = VADD(T1G, T1J);
Chris@82 394 T1L = VADD(T1D, T1K);
Chris@82 395 TdA = VSUB(T1D, T1K);
Chris@82 396 Tbo = VSUB(T1z, T1C);
Chris@82 397 Tbr = VSUB(Tbp, Tbq);
Chris@82 398 Tbs = VADD(Tbo, Tbr);
Chris@82 399 Tct = VSUB(Tbo, Tbr);
Chris@82 400 }
Chris@82 401 {
Chris@82 402 V Tdv, Tdw, T57, T5i;
Chris@82 403 Tdv = VADD(Tb8, Tb9);
Chris@82 404 Tdw = VADD(Tbp, Tbq);
Chris@82 405 Tdx = VSUB(Tdv, Tdw);
Chris@82 406 Teo = VADD(Tdv, Tdw);
Chris@82 407 T57 = VADD(T53, T56);
Chris@82 408 T5i = VSUB(T5c, T5h);
Chris@82 409 T5j = VFNMS(LDK(KP707106781), T5i, T57);
Chris@82 410 T6Y = VFMA(LDK(KP707106781), T5i, T57);
Chris@82 411 }
Chris@82 412 {
Chris@82 413 V T5M, T5P, T8w, T8x;
Chris@82 414 T5M = VSUB(T5I, T5L);
Chris@82 415 T5P = VSUB(T5N, T5O);
Chris@82 416 T5Q = VFNMS(LDK(KP707106781), T5P, T5M);
Chris@82 417 T6V = VFMA(LDK(KP707106781), T5P, T5M);
Chris@82 418 T8w = VADD(T5I, T5L);
Chris@82 419 T8x = VADD(T5c, T5h);
Chris@82 420 T8y = VFMA(LDK(KP707106781), T8x, T8w);
Chris@82 421 T9z = VFNMS(LDK(KP707106781), T8x, T8w);
Chris@82 422 }
Chris@82 423 {
Chris@82 424 V Tb7, Tba, T8l, T8m;
Chris@82 425 Tb7 = VSUB(T1J, T1G);
Chris@82 426 Tba = VSUB(Tb8, Tb9);
Chris@82 427 Tbb = VADD(Tb7, Tba);
Chris@82 428 Tcw = VSUB(Tba, Tb7);
Chris@82 429 T8l = VSUB(T56, T53);
Chris@82 430 T8m = VADD(T5O, T5N);
Chris@82 431 T8n = VFMA(LDK(KP707106781), T8m, T8l);
Chris@82 432 T9C = VFNMS(LDK(KP707106781), T8m, T8l);
Chris@82 433 }
Chris@82 434 }
Chris@82 435 {
Chris@82 436 V TN, T40, T2Q, T3Q, TQ, T3P, T2T, T41, TX, T43, T30, T3X, TU, T44, T2X;
Chris@82 437 V T3U;
Chris@82 438 {
Chris@82 439 V TL, TM, T2R, T2S;
Chris@82 440 TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0]));
Chris@82 441 TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
Chris@82 442 TN = VADD(TL, TM);
Chris@82 443 T40 = VSUB(TL, TM);
Chris@82 444 {
Chris@82 445 V T2O, T2P, TO, TP;
Chris@82 446 T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0]));
Chris@82 447 T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
Chris@82 448 T2Q = VADD(T2O, T2P);
Chris@82 449 T3Q = VSUB(T2O, T2P);
Chris@82 450 TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
Chris@82 451 TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0]));
Chris@82 452 TQ = VADD(TO, TP);
Chris@82 453 T3P = VSUB(TO, TP);
Chris@82 454 }
Chris@82 455 T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
Chris@82 456 T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0]));
Chris@82 457 T2T = VADD(T2R, T2S);
Chris@82 458 T41 = VSUB(T2R, T2S);
Chris@82 459 {
Chris@82 460 V TV, TW, T3V, T2Y, T2Z, T3W;
Chris@82 461 TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0]));
Chris@82 462 TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
Chris@82 463 T3V = VSUB(TV, TW);
Chris@82 464 T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0]));
Chris@82 465 T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
Chris@82 466 T3W = VSUB(T2Y, T2Z);
Chris@82 467 TX = VADD(TV, TW);
Chris@82 468 T43 = VADD(T3V, T3W);
Chris@82 469 T30 = VADD(T2Y, T2Z);
Chris@82 470 T3X = VSUB(T3V, T3W);
Chris@82 471 }
Chris@82 472 {
Chris@82 473 V TS, TT, T3S, T2V, T2W, T3T;
Chris@82 474 TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
Chris@82 475 TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0]));
Chris@82 476 T3S = VSUB(TS, TT);
Chris@82 477 T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
Chris@82 478 T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0]));
Chris@82 479 T3T = VSUB(T2V, T2W);
Chris@82 480 TU = VADD(TS, TT);
Chris@82 481 T44 = VSUB(T3T, T3S);
Chris@82 482 T2X = VADD(T2V, T2W);
Chris@82 483 T3U = VADD(T3S, T3T);
Chris@82 484 }
Chris@82 485 }
Chris@82 486 {
Chris@82 487 V TR, TY, Tax, Tay;
Chris@82 488 TR = VADD(TN, TQ);
Chris@82 489 TY = VADD(TU, TX);
Chris@82 490 TZ = VADD(TR, TY);
Chris@82 491 Tdf = VSUB(TR, TY);
Chris@82 492 Tax = VSUB(TX, TU);
Chris@82 493 Tay = VSUB(T2Q, T2T);
Chris@82 494 Taz = VADD(Tax, Tay);
Chris@82 495 Tch = VSUB(Tay, Tax);
Chris@82 496 }
Chris@82 497 {
Chris@82 498 V TaA, TaB, T2U, T31;
Chris@82 499 TaA = VSUB(TN, TQ);
Chris@82 500 TaB = VSUB(T2X, T30);
Chris@82 501 TaC = VADD(TaA, TaB);
Chris@82 502 Tci = VSUB(TaA, TaB);
Chris@82 503 T2U = VADD(T2Q, T2T);
Chris@82 504 T31 = VADD(T2X, T30);
Chris@82 505 T32 = VADD(T2U, T31);
Chris@82 506 Tdg = VSUB(T2U, T31);
Chris@82 507 }
Chris@82 508 {
Chris@82 509 V T3R, T3Y, T7T, T7U;
Chris@82 510 T3R = VADD(T3P, T3Q);
Chris@82 511 T3Y = VSUB(T3U, T3X);
Chris@82 512 T3Z = VFNMS(LDK(KP707106781), T3Y, T3R);
Chris@82 513 T6J = VFMA(LDK(KP707106781), T3Y, T3R);
Chris@82 514 T7T = VSUB(T3Q, T3P);
Chris@82 515 T7U = VADD(T44, T43);
Chris@82 516 T7V = VFMA(LDK(KP707106781), T7U, T7T);
Chris@82 517 T9n = VFNMS(LDK(KP707106781), T7U, T7T);
Chris@82 518 }
Chris@82 519 {
Chris@82 520 V T7W, T7X, T42, T45;
Chris@82 521 T7W = VADD(T40, T41);
Chris@82 522 T7X = VADD(T3U, T3X);
Chris@82 523 T7Y = VFMA(LDK(KP707106781), T7X, T7W);
Chris@82 524 T9o = VFNMS(LDK(KP707106781), T7X, T7W);
Chris@82 525 T42 = VSUB(T40, T41);
Chris@82 526 T45 = VSUB(T43, T44);
Chris@82 527 T46 = VFNMS(LDK(KP707106781), T45, T42);
Chris@82 528 T6K = VFMA(LDK(KP707106781), T45, T42);
Chris@82 529 }
Chris@82 530 }
Chris@82 531 {
Chris@82 532 V T14, T4P, T4d, TaH, T17, T4a, T4S, TaI, T1e, TaZ, T4o, T4U, T1b, TaY, T4j;
Chris@82 533 V T4V;
Chris@82 534 {
Chris@82 535 V T12, T13, T4Q, T4R;
Chris@82 536 T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
Chris@82 537 T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)]));
Chris@82 538 T14 = VADD(T12, T13);
Chris@82 539 T4P = VSUB(T12, T13);
Chris@82 540 {
Chris@82 541 V T4b, T4c, T15, T16;
Chris@82 542 T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
Chris@82 543 T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)]));
Chris@82 544 T4d = VSUB(T4b, T4c);
Chris@82 545 TaH = VADD(T4b, T4c);
Chris@82 546 T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
Chris@82 547 T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)]));
Chris@82 548 T17 = VADD(T15, T16);
Chris@82 549 T4a = VSUB(T15, T16);
Chris@82 550 }
Chris@82 551 T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
Chris@82 552 T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)]));
Chris@82 553 T4S = VSUB(T4Q, T4R);
Chris@82 554 TaI = VADD(T4Q, T4R);
Chris@82 555 {
Chris@82 556 V T1c, T1d, T4k, T4l, T4m, T4n;
Chris@82 557 T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)]));
Chris@82 558 T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
Chris@82 559 T4k = VSUB(T1c, T1d);
Chris@82 560 T4l = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)]));
Chris@82 561 T4m = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
Chris@82 562 T4n = VSUB(T4l, T4m);
Chris@82 563 T1e = VADD(T1c, T1d);
Chris@82 564 TaZ = VADD(T4l, T4m);
Chris@82 565 T4o = VSUB(T4k, T4n);
Chris@82 566 T4U = VADD(T4k, T4n);
Chris@82 567 }
Chris@82 568 {
Chris@82 569 V T19, T1a, T4f, T4g, T4h, T4i;
Chris@82 570 T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
Chris@82 571 T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)]));
Chris@82 572 T4f = VSUB(T19, T1a);
Chris@82 573 T4g = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
Chris@82 574 T4h = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)]));
Chris@82 575 T4i = VSUB(T4g, T4h);
Chris@82 576 T1b = VADD(T19, T1a);
Chris@82 577 TaY = VADD(T4g, T4h);
Chris@82 578 T4j = VADD(T4f, T4i);
Chris@82 579 T4V = VSUB(T4i, T4f);
Chris@82 580 }
Chris@82 581 }
Chris@82 582 {
Chris@82 583 V T18, T1f, TaX, Tb0;
Chris@82 584 T18 = VADD(T14, T17);
Chris@82 585 T1f = VADD(T1b, T1e);
Chris@82 586 T1g = VADD(T18, T1f);
Chris@82 587 Tdp = VSUB(T18, T1f);
Chris@82 588 TaX = VSUB(T14, T17);
Chris@82 589 Tb0 = VSUB(TaY, TaZ);
Chris@82 590 Tb1 = VADD(TaX, Tb0);
Chris@82 591 Tcm = VSUB(TaX, Tb0);
Chris@82 592 }
Chris@82 593 {
Chris@82 594 V Tdk, Tdl, T4e, T4p;
Chris@82 595 Tdk = VADD(TaH, TaI);
Chris@82 596 Tdl = VADD(TaY, TaZ);
Chris@82 597 Tdm = VSUB(Tdk, Tdl);
Chris@82 598 Tej = VADD(Tdk, Tdl);
Chris@82 599 T4e = VADD(T4a, T4d);
Chris@82 600 T4p = VSUB(T4j, T4o);
Chris@82 601 T4q = VFNMS(LDK(KP707106781), T4p, T4e);
Chris@82 602 T6R = VFMA(LDK(KP707106781), T4p, T4e);
Chris@82 603 }
Chris@82 604 {
Chris@82 605 V T4T, T4W, T8d, T8e;
Chris@82 606 T4T = VSUB(T4P, T4S);
Chris@82 607 T4W = VSUB(T4U, T4V);
Chris@82 608 T4X = VFNMS(LDK(KP707106781), T4W, T4T);
Chris@82 609 T6O = VFMA(LDK(KP707106781), T4W, T4T);
Chris@82 610 T8d = VADD(T4P, T4S);
Chris@82 611 T8e = VADD(T4j, T4o);
Chris@82 612 T8f = VFMA(LDK(KP707106781), T8e, T8d);
Chris@82 613 T9s = VFNMS(LDK(KP707106781), T8e, T8d);
Chris@82 614 }
Chris@82 615 {
Chris@82 616 V TaG, TaJ, T82, T83;
Chris@82 617 TaG = VSUB(T1e, T1b);
Chris@82 618 TaJ = VSUB(TaH, TaI);
Chris@82 619 TaK = VADD(TaG, TaJ);
Chris@82 620 Tcp = VSUB(TaJ, TaG);
Chris@82 621 T82 = VSUB(T4d, T4a);
Chris@82 622 T83 = VADD(T4V, T4U);
Chris@82 623 T84 = VFMA(LDK(KP707106781), T83, T82);
Chris@82 624 T9v = VFNMS(LDK(KP707106781), T83, T82);
Chris@82 625 }
Chris@82 626 }
Chris@82 627 {
Chris@82 628 V T1j, TaL, T1m, TaM, T4G, T4L, TaO, TaN, T86, T85, T1q, TaR, T1t, TaS, T4v;
Chris@82 629 V T4A, TaT, TaQ, T89, T88;
Chris@82 630 {
Chris@82 631 V T4C, T4K, T4H, T4F;
Chris@82 632 {
Chris@82 633 V T1h, T1i, T4I, T4J;
Chris@82 634 T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
Chris@82 635 T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)]));
Chris@82 636 T1j = VADD(T1h, T1i);
Chris@82 637 T4C = VSUB(T1h, T1i);
Chris@82 638 T4I = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
Chris@82 639 T4J = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)]));
Chris@82 640 T4K = VSUB(T4I, T4J);
Chris@82 641 TaL = VADD(T4I, T4J);
Chris@82 642 }
Chris@82 643 {
Chris@82 644 V T1k, T1l, T4D, T4E;
Chris@82 645 T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
Chris@82 646 T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)]));
Chris@82 647 T1m = VADD(T1k, T1l);
Chris@82 648 T4H = VSUB(T1k, T1l);
Chris@82 649 T4D = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
Chris@82 650 T4E = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)]));
Chris@82 651 T4F = VSUB(T4D, T4E);
Chris@82 652 TaM = VADD(T4D, T4E);
Chris@82 653 }
Chris@82 654 T4G = VSUB(T4C, T4F);
Chris@82 655 T4L = VADD(T4H, T4K);
Chris@82 656 TaO = VSUB(T1j, T1m);
Chris@82 657 TaN = VSUB(TaL, TaM);
Chris@82 658 T86 = VADD(T4C, T4F);
Chris@82 659 T85 = VSUB(T4K, T4H);
Chris@82 660 }
Chris@82 661 {
Chris@82 662 V T4r, T4z, T4w, T4u;
Chris@82 663 {
Chris@82 664 V T1o, T1p, T4x, T4y;
Chris@82 665 T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)]));
Chris@82 666 T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
Chris@82 667 T1q = VADD(T1o, T1p);
Chris@82 668 T4r = VSUB(T1o, T1p);
Chris@82 669 T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)]));
Chris@82 670 T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
Chris@82 671 T4z = VSUB(T4x, T4y);
Chris@82 672 TaR = VADD(T4x, T4y);
Chris@82 673 }
Chris@82 674 {
Chris@82 675 V T1r, T1s, T4s, T4t;
Chris@82 676 T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
Chris@82 677 T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)]));
Chris@82 678 T1t = VADD(T1r, T1s);
Chris@82 679 T4w = VSUB(T1r, T1s);
Chris@82 680 T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
Chris@82 681 T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)]));
Chris@82 682 T4u = VSUB(T4s, T4t);
Chris@82 683 TaS = VADD(T4s, T4t);
Chris@82 684 }
Chris@82 685 T4v = VSUB(T4r, T4u);
Chris@82 686 T4A = VADD(T4w, T4z);
Chris@82 687 TaT = VSUB(TaR, TaS);
Chris@82 688 TaQ = VSUB(T1q, T1t);
Chris@82 689 T89 = VADD(T4r, T4u);
Chris@82 690 T88 = VSUB(T4z, T4w);
Chris@82 691 }
Chris@82 692 {
Chris@82 693 V T1n, T1u, Tb2, Tb3;
Chris@82 694 T1n = VADD(T1j, T1m);
Chris@82 695 T1u = VADD(T1q, T1t);
Chris@82 696 T1v = VADD(T1n, T1u);
Chris@82 697 Tdn = VSUB(T1u, T1n);
Chris@82 698 Tb2 = VADD(TaO, TaN);
Chris@82 699 Tb3 = VSUB(TaQ, TaT);
Chris@82 700 Tb4 = VADD(Tb2, Tb3);
Chris@82 701 Tcq = VSUB(Tb2, Tb3);
Chris@82 702 }
Chris@82 703 {
Chris@82 704 V Tdq, Tdr, T4B, T4M;
Chris@82 705 Tdq = VADD(TaL, TaM);
Chris@82 706 Tdr = VADD(TaR, TaS);
Chris@82 707 Tds = VSUB(Tdq, Tdr);
Chris@82 708 Tek = VADD(Tdq, Tdr);
Chris@82 709 T4B = VFMA(LDK(KP414213562), T4A, T4v);
Chris@82 710 T4M = VFNMS(LDK(KP414213562), T4L, T4G);
Chris@82 711 T4N = VSUB(T4B, T4M);
Chris@82 712 T6P = VADD(T4M, T4B);
Chris@82 713 }
Chris@82 714 {
Chris@82 715 V T4Y, T4Z, T8g, T8h;
Chris@82 716 T4Y = VFMA(LDK(KP414213562), T4G, T4L);
Chris@82 717 T4Z = VFNMS(LDK(KP414213562), T4v, T4A);
Chris@82 718 T50 = VSUB(T4Y, T4Z);
Chris@82 719 T6S = VADD(T4Y, T4Z);
Chris@82 720 T8g = VFMA(LDK(KP414213562), T85, T86);
Chris@82 721 T8h = VFNMS(LDK(KP414213562), T88, T89);
Chris@82 722 T8i = VADD(T8g, T8h);
Chris@82 723 T9w = VSUB(T8g, T8h);
Chris@82 724 }
Chris@82 725 {
Chris@82 726 V TaP, TaU, T87, T8a;
Chris@82 727 TaP = VSUB(TaN, TaO);
Chris@82 728 TaU = VADD(TaQ, TaT);
Chris@82 729 TaV = VADD(TaP, TaU);
Chris@82 730 Tcn = VSUB(TaU, TaP);
Chris@82 731 T87 = VFNMS(LDK(KP414213562), T86, T85);
Chris@82 732 T8a = VFMA(LDK(KP414213562), T89, T88);
Chris@82 733 T8b = VADD(T87, T8a);
Chris@82 734 T9t = VSUB(T8a, T87);
Chris@82 735 }
Chris@82 736 }
Chris@82 737 {
Chris@82 738 V T1O, Tbc, T1R, Tbd, T5z, T5E, Tbf, Tbe, T8p, T8o, T1V, Tbi, T1Y, Tbj, T5o;
Chris@82 739 V T5t, Tbk, Tbh, T8s, T8r;
Chris@82 740 {
Chris@82 741 V T5v, T5D, T5A, T5y;
Chris@82 742 {
Chris@82 743 V T1M, T1N, T5B, T5C;
Chris@82 744 T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
Chris@82 745 T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)]));
Chris@82 746 T1O = VADD(T1M, T1N);
Chris@82 747 T5v = VSUB(T1M, T1N);
Chris@82 748 T5B = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
Chris@82 749 T5C = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)]));
Chris@82 750 T5D = VSUB(T5B, T5C);
Chris@82 751 Tbc = VADD(T5B, T5C);
Chris@82 752 }
Chris@82 753 {
Chris@82 754 V T1P, T1Q, T5w, T5x;
Chris@82 755 T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
Chris@82 756 T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)]));
Chris@82 757 T1R = VADD(T1P, T1Q);
Chris@82 758 T5A = VSUB(T1P, T1Q);
Chris@82 759 T5w = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
Chris@82 760 T5x = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)]));
Chris@82 761 T5y = VSUB(T5w, T5x);
Chris@82 762 Tbd = VADD(T5w, T5x);
Chris@82 763 }
Chris@82 764 T5z = VSUB(T5v, T5y);
Chris@82 765 T5E = VADD(T5A, T5D);
Chris@82 766 Tbf = VSUB(T1O, T1R);
Chris@82 767 Tbe = VSUB(Tbc, Tbd);
Chris@82 768 T8p = VADD(T5v, T5y);
Chris@82 769 T8o = VSUB(T5D, T5A);
Chris@82 770 }
Chris@82 771 {
Chris@82 772 V T5k, T5s, T5p, T5n;
Chris@82 773 {
Chris@82 774 V T1T, T1U, T5q, T5r;
Chris@82 775 T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)]));
Chris@82 776 T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
Chris@82 777 T1V = VADD(T1T, T1U);
Chris@82 778 T5k = VSUB(T1T, T1U);
Chris@82 779 T5q = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)]));
Chris@82 780 T5r = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
Chris@82 781 T5s = VSUB(T5q, T5r);
Chris@82 782 Tbi = VADD(T5q, T5r);
Chris@82 783 }
Chris@82 784 {
Chris@82 785 V T1W, T1X, T5l, T5m;
Chris@82 786 T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
Chris@82 787 T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)]));
Chris@82 788 T1Y = VADD(T1W, T1X);
Chris@82 789 T5p = VSUB(T1W, T1X);
Chris@82 790 T5l = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
Chris@82 791 T5m = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)]));
Chris@82 792 T5n = VSUB(T5l, T5m);
Chris@82 793 Tbj = VADD(T5l, T5m);
Chris@82 794 }
Chris@82 795 T5o = VSUB(T5k, T5n);
Chris@82 796 T5t = VADD(T5p, T5s);
Chris@82 797 Tbk = VSUB(Tbi, Tbj);
Chris@82 798 Tbh = VSUB(T1V, T1Y);
Chris@82 799 T8s = VADD(T5k, T5n);
Chris@82 800 T8r = VSUB(T5s, T5p);
Chris@82 801 }
Chris@82 802 {
Chris@82 803 V T1S, T1Z, Tbt, Tbu;
Chris@82 804 T1S = VADD(T1O, T1R);
Chris@82 805 T1Z = VADD(T1V, T1Y);
Chris@82 806 T20 = VADD(T1S, T1Z);
Chris@82 807 Tdy = VSUB(T1Z, T1S);
Chris@82 808 Tbt = VADD(Tbf, Tbe);
Chris@82 809 Tbu = VSUB(Tbh, Tbk);
Chris@82 810 Tbv = VADD(Tbt, Tbu);
Chris@82 811 Tcx = VSUB(Tbt, Tbu);
Chris@82 812 }
Chris@82 813 {
Chris@82 814 V TdB, TdC, T5u, T5F;
Chris@82 815 TdB = VADD(Tbc, Tbd);
Chris@82 816 TdC = VADD(Tbi, Tbj);
Chris@82 817 TdD = VSUB(TdB, TdC);
Chris@82 818 Tep = VADD(TdB, TdC);
Chris@82 819 T5u = VFMA(LDK(KP414213562), T5t, T5o);
Chris@82 820 T5F = VFNMS(LDK(KP414213562), T5E, T5z);
Chris@82 821 T5G = VSUB(T5u, T5F);
Chris@82 822 T6W = VADD(T5F, T5u);
Chris@82 823 }
Chris@82 824 {
Chris@82 825 V T5R, T5S, T8z, T8A;
Chris@82 826 T5R = VFMA(LDK(KP414213562), T5z, T5E);
Chris@82 827 T5S = VFNMS(LDK(KP414213562), T5o, T5t);
Chris@82 828 T5T = VSUB(T5R, T5S);
Chris@82 829 T6Z = VADD(T5R, T5S);
Chris@82 830 T8z = VFMA(LDK(KP414213562), T8o, T8p);
Chris@82 831 T8A = VFNMS(LDK(KP414213562), T8r, T8s);
Chris@82 832 T8B = VADD(T8z, T8A);
Chris@82 833 T9D = VSUB(T8z, T8A);
Chris@82 834 }
Chris@82 835 {
Chris@82 836 V Tbg, Tbl, T8q, T8t;
Chris@82 837 Tbg = VSUB(Tbe, Tbf);
Chris@82 838 Tbl = VADD(Tbh, Tbk);
Chris@82 839 Tbm = VADD(Tbg, Tbl);
Chris@82 840 Tcu = VSUB(Tbl, Tbg);
Chris@82 841 T8q = VFNMS(LDK(KP414213562), T8p, T8o);
Chris@82 842 T8t = VFMA(LDK(KP414213562), T8s, T8r);
Chris@82 843 T8u = VADD(T8q, T8t);
Chris@82 844 T9A = VSUB(T8t, T8q);
Chris@82 845 }
Chris@82 846 }
Chris@82 847 {
Chris@82 848 V TeJ, TeK, TeL, TeM, TeN, TeO, TeP, TeQ, TeR, TeS, TeT, TeU, TeV, TeW, TeX;
Chris@82 849 V TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6, Tf7, Tf8, Tf9, Tfa, Tfb, Tfc;
Chris@82 850 V Tfd, Tfe, Tff, Tfg, Tfh, Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr;
Chris@82 851 V Tfs, Tft, Tfu;
Chris@82 852 {
Chris@82 853 V T11, TeD, TeG, TeI, T22, T23, T34, TeH;
Chris@82 854 {
Chris@82 855 V Tv, T10, TeE, TeF;
Chris@82 856 Tv = VADD(Tf, Tu);
Chris@82 857 T10 = VADD(TK, TZ);
Chris@82 858 T11 = VADD(Tv, T10);
Chris@82 859 TeD = VSUB(Tv, T10);
Chris@82 860 TeE = VADD(Tej, Tek);
Chris@82 861 TeF = VADD(Teo, Tep);
Chris@82 862 TeG = VSUB(TeE, TeF);
Chris@82 863 TeI = VADD(TeE, TeF);
Chris@82 864 }
Chris@82 865 {
Chris@82 866 V T1w, T21, T2y, T33;
Chris@82 867 T1w = VADD(T1g, T1v);
Chris@82 868 T21 = VADD(T1L, T20);
Chris@82 869 T22 = VADD(T1w, T21);
Chris@82 870 T23 = VSUB(T21, T1w);
Chris@82 871 T2y = VADD(T2i, T2x);
Chris@82 872 T33 = VADD(T2N, T32);
Chris@82 873 T34 = VSUB(T2y, T33);
Chris@82 874 TeH = VADD(T2y, T33);
Chris@82 875 }
Chris@82 876 TeJ = VSUB(T11, T22);
Chris@82 877 STM4(&(ro[32]), TeJ, ovs, &(ro[0]));
Chris@82 878 TeK = VSUB(TeH, TeI);
Chris@82 879 STM4(&(io[32]), TeK, ovs, &(io[0]));
Chris@82 880 TeL = VADD(T11, T22);
Chris@82 881 STM4(&(ro[0]), TeL, ovs, &(ro[0]));
Chris@82 882 TeM = VADD(TeH, TeI);
Chris@82 883 STM4(&(io[0]), TeM, ovs, &(io[0]));
Chris@82 884 TeN = VADD(T23, T34);
Chris@82 885 STM4(&(io[16]), TeN, ovs, &(io[0]));
Chris@82 886 TeO = VADD(TeD, TeG);
Chris@82 887 STM4(&(ro[16]), TeO, ovs, &(ro[0]));
Chris@82 888 TeP = VSUB(T34, T23);
Chris@82 889 STM4(&(io[48]), TeP, ovs, &(io[0]));
Chris@82 890 TeQ = VSUB(TeD, TeG);
Chris@82 891 STM4(&(ro[48]), TeQ, ovs, &(ro[0]));
Chris@82 892 }
Chris@82 893 {
Chris@82 894 V Teh, Tex, Tev, TeB, Tem, Tey, Ter, Tez;
Chris@82 895 {
Chris@82 896 V Tef, Teg, Tet, Teu;
Chris@82 897 Tef = VSUB(Tf, Tu);
Chris@82 898 Teg = VSUB(T2N, T32);
Chris@82 899 Teh = VADD(Tef, Teg);
Chris@82 900 Tex = VSUB(Tef, Teg);
Chris@82 901 Tet = VSUB(T2i, T2x);
Chris@82 902 Teu = VSUB(TZ, TK);
Chris@82 903 Tev = VSUB(Tet, Teu);
Chris@82 904 TeB = VADD(Teu, Tet);
Chris@82 905 }
Chris@82 906 {
Chris@82 907 V Tei, Tel, Ten, Teq;
Chris@82 908 Tei = VSUB(T1g, T1v);
Chris@82 909 Tel = VSUB(Tej, Tek);
Chris@82 910 Tem = VADD(Tei, Tel);
Chris@82 911 Tey = VSUB(Tel, Tei);
Chris@82 912 Ten = VSUB(T1L, T20);
Chris@82 913 Teq = VSUB(Teo, Tep);
Chris@82 914 Ter = VSUB(Ten, Teq);
Chris@82 915 Tez = VADD(Ten, Teq);
Chris@82 916 }
Chris@82 917 {
Chris@82 918 V Tes, TeC, Tew, TeA;
Chris@82 919 Tes = VADD(Tem, Ter);
Chris@82 920 TeR = VFNMS(LDK(KP707106781), Tes, Teh);
Chris@82 921 STM4(&(ro[40]), TeR, ovs, &(ro[0]));
Chris@82 922 TeS = VFMA(LDK(KP707106781), Tes, Teh);
Chris@82 923 STM4(&(ro[8]), TeS, ovs, &(ro[0]));
Chris@82 924 TeC = VADD(Tey, Tez);
Chris@82 925 TeT = VFNMS(LDK(KP707106781), TeC, TeB);
Chris@82 926 STM4(&(io[40]), TeT, ovs, &(io[0]));
Chris@82 927 TeU = VFMA(LDK(KP707106781), TeC, TeB);
Chris@82 928 STM4(&(io[8]), TeU, ovs, &(io[0]));
Chris@82 929 Tew = VSUB(Ter, Tem);
Chris@82 930 TeV = VFNMS(LDK(KP707106781), Tew, Tev);
Chris@82 931 STM4(&(io[56]), TeV, ovs, &(io[0]));
Chris@82 932 TeW = VFMA(LDK(KP707106781), Tew, Tev);
Chris@82 933 STM4(&(io[24]), TeW, ovs, &(io[0]));
Chris@82 934 TeA = VSUB(Tey, Tez);
Chris@82 935 TeX = VFNMS(LDK(KP707106781), TeA, Tex);
Chris@82 936 STM4(&(ro[56]), TeX, ovs, &(ro[0]));
Chris@82 937 TeY = VFMA(LDK(KP707106781), TeA, Tex);
Chris@82 938 STM4(&(ro[24]), TeY, ovs, &(ro[0]));
Chris@82 939 }
Chris@82 940 }
Chris@82 941 {
Chris@82 942 V Tdb, TdV, Te5, TdJ, Tdi, Te6, Te3, Teb, TdM, TdW, Tdu, TdR, Te0, Tea, TdF;
Chris@82 943 V TdQ;
Chris@82 944 {
Chris@82 945 V Tde, Tdh, Tdo, Tdt;
Chris@82 946 Tdb = VSUB(Td9, Tda);
Chris@82 947 TdV = VADD(Td9, Tda);
Chris@82 948 Te5 = VADD(TdI, TdH);
Chris@82 949 TdJ = VSUB(TdH, TdI);
Chris@82 950 Tde = VSUB(Tdc, Tdd);
Chris@82 951 Tdh = VADD(Tdf, Tdg);
Chris@82 952 Tdi = VSUB(Tde, Tdh);
Chris@82 953 Te6 = VADD(Tde, Tdh);
Chris@82 954 {
Chris@82 955 V Te1, Te2, TdK, TdL;
Chris@82 956 Te1 = VADD(TdA, TdD);
Chris@82 957 Te2 = VADD(Tdy, Tdx);
Chris@82 958 Te3 = VFNMS(LDK(KP414213562), Te2, Te1);
Chris@82 959 Teb = VFMA(LDK(KP414213562), Te1, Te2);
Chris@82 960 TdK = VSUB(Tdf, Tdg);
Chris@82 961 TdL = VADD(Tdd, Tdc);
Chris@82 962 TdM = VSUB(TdK, TdL);
Chris@82 963 TdW = VADD(TdL, TdK);
Chris@82 964 }
Chris@82 965 Tdo = VSUB(Tdm, Tdn);
Chris@82 966 Tdt = VSUB(Tdp, Tds);
Chris@82 967 Tdu = VFMA(LDK(KP414213562), Tdt, Tdo);
Chris@82 968 TdR = VFNMS(LDK(KP414213562), Tdo, Tdt);
Chris@82 969 {
Chris@82 970 V TdY, TdZ, Tdz, TdE;
Chris@82 971 TdY = VADD(Tdp, Tds);
Chris@82 972 TdZ = VADD(Tdn, Tdm);
Chris@82 973 Te0 = VFMA(LDK(KP414213562), TdZ, TdY);
Chris@82 974 Tea = VFNMS(LDK(KP414213562), TdY, TdZ);
Chris@82 975 Tdz = VSUB(Tdx, Tdy);
Chris@82 976 TdE = VSUB(TdA, TdD);
Chris@82 977 TdF = VFNMS(LDK(KP414213562), TdE, Tdz);
Chris@82 978 TdQ = VFMA(LDK(KP414213562), Tdz, TdE);
Chris@82 979 }
Chris@82 980 }
Chris@82 981 {
Chris@82 982 V Tdj, TdG, TdP, TdS;
Chris@82 983 Tdj = VFMA(LDK(KP707106781), Tdi, Tdb);
Chris@82 984 TdG = VSUB(Tdu, TdF);
Chris@82 985 TeZ = VFNMS(LDK(KP923879532), TdG, Tdj);
Chris@82 986 STM4(&(ro[44]), TeZ, ovs, &(ro[0]));
Chris@82 987 Tf0 = VFMA(LDK(KP923879532), TdG, Tdj);
Chris@82 988 STM4(&(ro[12]), Tf0, ovs, &(ro[0]));
Chris@82 989 TdP = VFMA(LDK(KP707106781), TdM, TdJ);
Chris@82 990 TdS = VSUB(TdQ, TdR);
Chris@82 991 Tf1 = VFNMS(LDK(KP923879532), TdS, TdP);
Chris@82 992 STM4(&(io[44]), Tf1, ovs, &(io[0]));
Chris@82 993 Tf2 = VFMA(LDK(KP923879532), TdS, TdP);
Chris@82 994 STM4(&(io[12]), Tf2, ovs, &(io[0]));
Chris@82 995 }
Chris@82 996 {
Chris@82 997 V TdN, TdO, TdT, TdU;
Chris@82 998 TdN = VFNMS(LDK(KP707106781), TdM, TdJ);
Chris@82 999 TdO = VADD(Tdu, TdF);
Chris@82 1000 Tf3 = VFNMS(LDK(KP923879532), TdO, TdN);
Chris@82 1001 STM4(&(io[28]), Tf3, ovs, &(io[0]));
Chris@82 1002 Tf4 = VFMA(LDK(KP923879532), TdO, TdN);
Chris@82 1003 STM4(&(io[60]), Tf4, ovs, &(io[0]));
Chris@82 1004 TdT = VFNMS(LDK(KP707106781), Tdi, Tdb);
Chris@82 1005 TdU = VADD(TdR, TdQ);
Chris@82 1006 Tf5 = VFNMS(LDK(KP923879532), TdU, TdT);
Chris@82 1007 STM4(&(ro[28]), Tf5, ovs, &(ro[0]));
Chris@82 1008 Tf6 = VFMA(LDK(KP923879532), TdU, TdT);
Chris@82 1009 STM4(&(ro[60]), Tf6, ovs, &(ro[0]));
Chris@82 1010 }
Chris@82 1011 {
Chris@82 1012 V TdX, Te4, Ted, Tee;
Chris@82 1013 TdX = VFMA(LDK(KP707106781), TdW, TdV);
Chris@82 1014 Te4 = VADD(Te0, Te3);
Chris@82 1015 Tf7 = VFNMS(LDK(KP923879532), Te4, TdX);
Chris@82 1016 STM4(&(ro[36]), Tf7, ovs, &(ro[0]));
Chris@82 1017 Tf8 = VFMA(LDK(KP923879532), Te4, TdX);
Chris@82 1018 STM4(&(ro[4]), Tf8, ovs, &(ro[0]));
Chris@82 1019 Ted = VFMA(LDK(KP707106781), Te6, Te5);
Chris@82 1020 Tee = VADD(Tea, Teb);
Chris@82 1021 Tf9 = VFNMS(LDK(KP923879532), Tee, Ted);
Chris@82 1022 STM4(&(io[36]), Tf9, ovs, &(io[0]));
Chris@82 1023 Tfa = VFMA(LDK(KP923879532), Tee, Ted);
Chris@82 1024 STM4(&(io[4]), Tfa, ovs, &(io[0]));
Chris@82 1025 }
Chris@82 1026 {
Chris@82 1027 V Te7, Te8, Te9, Tec;
Chris@82 1028 Te7 = VFNMS(LDK(KP707106781), Te6, Te5);
Chris@82 1029 Te8 = VSUB(Te3, Te0);
Chris@82 1030 Tfb = VFNMS(LDK(KP923879532), Te8, Te7);
Chris@82 1031 STM4(&(io[52]), Tfb, ovs, &(io[0]));
Chris@82 1032 Tfc = VFMA(LDK(KP923879532), Te8, Te7);
Chris@82 1033 STM4(&(io[20]), Tfc, ovs, &(io[0]));
Chris@82 1034 Te9 = VFNMS(LDK(KP707106781), TdW, TdV);
Chris@82 1035 Tec = VSUB(Tea, Teb);
Chris@82 1036 Tfd = VFNMS(LDK(KP923879532), Tec, Te9);
Chris@82 1037 STM4(&(ro[52]), Tfd, ovs, &(ro[0]));
Chris@82 1038 Tfe = VFMA(LDK(KP923879532), Tec, Te9);
Chris@82 1039 STM4(&(ro[20]), Tfe, ovs, &(ro[0]));
Chris@82 1040 }
Chris@82 1041 }
Chris@82 1042 {
Chris@82 1043 V Tcd, TcP, TcD, TcZ, Tck, Td0, TcX, Td4, Tcs, TcK, TcG, TcQ, TcU, Td5, Tcz;
Chris@82 1044 V TcL, Tcc, TcC;
Chris@82 1045 Tcc = VSUB(TbC, TbD);
Chris@82 1046 Tcd = VFMA(LDK(KP707106781), Tcc, Tcb);
Chris@82 1047 TcP = VFNMS(LDK(KP707106781), Tcc, Tcb);
Chris@82 1048 TcC = VSUB(Tan, Tak);
Chris@82 1049 TcD = VFMA(LDK(KP707106781), TcC, TcB);
Chris@82 1050 TcZ = VFNMS(LDK(KP707106781), TcC, TcB);
Chris@82 1051 {
Chris@82 1052 V Tcg, Tcj, TcV, TcW;
Chris@82 1053 Tcg = VFMA(LDK(KP414213562), Tcf, Tce);
Chris@82 1054 Tcj = VFNMS(LDK(KP414213562), Tci, Tch);
Chris@82 1055 Tck = VSUB(Tcg, Tcj);
Chris@82 1056 Td0 = VADD(Tcg, Tcj);
Chris@82 1057 TcV = VFMA(LDK(KP707106781), Tcx, Tcw);
Chris@82 1058 TcW = VFMA(LDK(KP707106781), Tcu, Tct);
Chris@82 1059 TcX = VFNMS(LDK(KP198912367), TcW, TcV);
Chris@82 1060 Td4 = VFMA(LDK(KP198912367), TcV, TcW);
Chris@82 1061 }
Chris@82 1062 {
Chris@82 1063 V Tco, Tcr, TcE, TcF;
Chris@82 1064 Tco = VFNMS(LDK(KP707106781), Tcn, Tcm);
Chris@82 1065 Tcr = VFNMS(LDK(KP707106781), Tcq, Tcp);
Chris@82 1066 Tcs = VFMA(LDK(KP668178637), Tcr, Tco);
Chris@82 1067 TcK = VFNMS(LDK(KP668178637), Tco, Tcr);
Chris@82 1068 TcE = VFMA(LDK(KP414213562), Tch, Tci);
Chris@82 1069 TcF = VFNMS(LDK(KP414213562), Tce, Tcf);
Chris@82 1070 TcG = VSUB(TcE, TcF);
Chris@82 1071 TcQ = VADD(TcF, TcE);
Chris@82 1072 }
Chris@82 1073 {
Chris@82 1074 V TcS, TcT, Tcv, Tcy;
Chris@82 1075 TcS = VFMA(LDK(KP707106781), Tcq, Tcp);
Chris@82 1076 TcT = VFMA(LDK(KP707106781), Tcn, Tcm);
Chris@82 1077 TcU = VFMA(LDK(KP198912367), TcT, TcS);
Chris@82 1078 Td5 = VFNMS(LDK(KP198912367), TcS, TcT);
Chris@82 1079 Tcv = VFNMS(LDK(KP707106781), Tcu, Tct);
Chris@82 1080 Tcy = VFNMS(LDK(KP707106781), Tcx, Tcw);
Chris@82 1081 Tcz = VFNMS(LDK(KP668178637), Tcy, Tcv);
Chris@82 1082 TcL = VFMA(LDK(KP668178637), Tcv, Tcy);
Chris@82 1083 }
Chris@82 1084 {
Chris@82 1085 V Tcl, TcA, TcN, TcO;
Chris@82 1086 Tcl = VFMA(LDK(KP923879532), Tck, Tcd);
Chris@82 1087 TcA = VADD(Tcs, Tcz);
Chris@82 1088 Tff = VFNMS(LDK(KP831469612), TcA, Tcl);
Chris@82 1089 STM4(&(ro[38]), Tff, ovs, &(ro[0]));
Chris@82 1090 Tfg = VFMA(LDK(KP831469612), TcA, Tcl);
Chris@82 1091 STM4(&(ro[6]), Tfg, ovs, &(ro[0]));
Chris@82 1092 TcN = VFMA(LDK(KP923879532), TcG, TcD);
Chris@82 1093 TcO = VADD(TcK, TcL);
Chris@82 1094 Tfh = VFNMS(LDK(KP831469612), TcO, TcN);
Chris@82 1095 STM4(&(io[38]), Tfh, ovs, &(io[0]));
Chris@82 1096 Tfi = VFMA(LDK(KP831469612), TcO, TcN);
Chris@82 1097 STM4(&(io[6]), Tfi, ovs, &(io[0]));
Chris@82 1098 }
Chris@82 1099 {
Chris@82 1100 V TcH, TcI, TcJ, TcM;
Chris@82 1101 TcH = VFNMS(LDK(KP923879532), TcG, TcD);
Chris@82 1102 TcI = VSUB(Tcz, Tcs);
Chris@82 1103 Tfj = VFNMS(LDK(KP831469612), TcI, TcH);
Chris@82 1104 STM4(&(io[54]), Tfj, ovs, &(io[0]));
Chris@82 1105 Tfk = VFMA(LDK(KP831469612), TcI, TcH);
Chris@82 1106 STM4(&(io[22]), Tfk, ovs, &(io[0]));
Chris@82 1107 TcJ = VFNMS(LDK(KP923879532), Tck, Tcd);
Chris@82 1108 TcM = VSUB(TcK, TcL);
Chris@82 1109 Tfl = VFNMS(LDK(KP831469612), TcM, TcJ);
Chris@82 1110 STM4(&(ro[54]), Tfl, ovs, &(ro[0]));
Chris@82 1111 Tfm = VFMA(LDK(KP831469612), TcM, TcJ);
Chris@82 1112 STM4(&(ro[22]), Tfm, ovs, &(ro[0]));
Chris@82 1113 }
Chris@82 1114 {
Chris@82 1115 V TcR, TcY, Td3, Td6;
Chris@82 1116 TcR = VFNMS(LDK(KP923879532), TcQ, TcP);
Chris@82 1117 TcY = VSUB(TcU, TcX);
Chris@82 1118 Tfn = VFNMS(LDK(KP980785280), TcY, TcR);
Chris@82 1119 STM4(&(ro[46]), Tfn, ovs, &(ro[0]));
Chris@82 1120 Tfo = VFMA(LDK(KP980785280), TcY, TcR);
Chris@82 1121 STM4(&(ro[14]), Tfo, ovs, &(ro[0]));
Chris@82 1122 Td3 = VFNMS(LDK(KP923879532), Td0, TcZ);
Chris@82 1123 Td6 = VSUB(Td4, Td5);
Chris@82 1124 Tfp = VFNMS(LDK(KP980785280), Td6, Td3);
Chris@82 1125 STM4(&(io[46]), Tfp, ovs, &(io[0]));
Chris@82 1126 Tfq = VFMA(LDK(KP980785280), Td6, Td3);
Chris@82 1127 STM4(&(io[14]), Tfq, ovs, &(io[0]));
Chris@82 1128 }
Chris@82 1129 {
Chris@82 1130 V Td1, Td2, Td7, Td8;
Chris@82 1131 Td1 = VFMA(LDK(KP923879532), Td0, TcZ);
Chris@82 1132 Td2 = VADD(TcU, TcX);
Chris@82 1133 Tfr = VFNMS(LDK(KP980785280), Td2, Td1);
Chris@82 1134 STM4(&(io[30]), Tfr, ovs, &(io[0]));
Chris@82 1135 Tfs = VFMA(LDK(KP980785280), Td2, Td1);
Chris@82 1136 STM4(&(io[62]), Tfs, ovs, &(io[0]));
Chris@82 1137 Td7 = VFMA(LDK(KP923879532), TcQ, TcP);
Chris@82 1138 Td8 = VADD(Td5, Td4);
Chris@82 1139 Tft = VFNMS(LDK(KP980785280), Td8, Td7);
Chris@82 1140 STM4(&(ro[30]), Tft, ovs, &(ro[0]));
Chris@82 1141 Tfu = VFMA(LDK(KP980785280), Td8, Td7);
Chris@82 1142 STM4(&(ro[62]), Tfu, ovs, &(ro[0]));
Chris@82 1143 }
Chris@82 1144 }
Chris@82 1145 {
Chris@82 1146 V Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, TfH, TfI, TfJ;
Chris@82 1147 V TfK, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW, TfX, TfY;
Chris@82 1148 V TfZ, Tg0;
Chris@82 1149 {
Chris@82 1150 V Tap, TbR, TbF, Tc1, TaE, Tc2, TbZ, Tc7, Tb6, TbN, TbI, TbS, TbW, Tc6, Tbx;
Chris@82 1151 V TbM, Tao, TbE;
Chris@82 1152 Tao = VADD(Tak, Tan);
Chris@82 1153 Tap = VFNMS(LDK(KP707106781), Tao, Tah);
Chris@82 1154 TbR = VFMA(LDK(KP707106781), Tao, Tah);
Chris@82 1155 TbE = VADD(TbC, TbD);
Chris@82 1156 TbF = VFNMS(LDK(KP707106781), TbE, TbB);
Chris@82 1157 Tc1 = VFMA(LDK(KP707106781), TbE, TbB);
Chris@82 1158 {
Chris@82 1159 V Taw, TaD, TbX, TbY;
Chris@82 1160 Taw = VFNMS(LDK(KP414213562), Tav, Tas);
Chris@82 1161 TaD = VFMA(LDK(KP414213562), TaC, Taz);
Chris@82 1162 TaE = VSUB(Taw, TaD);
Chris@82 1163 Tc2 = VADD(Taw, TaD);
Chris@82 1164 TbX = VFMA(LDK(KP707106781), Tbv, Tbs);
Chris@82 1165 TbY = VFMA(LDK(KP707106781), Tbm, Tbb);
Chris@82 1166 TbZ = VFNMS(LDK(KP198912367), TbY, TbX);
Chris@82 1167 Tc7 = VFMA(LDK(KP198912367), TbX, TbY);
Chris@82 1168 }
Chris@82 1169 {
Chris@82 1170 V TaW, Tb5, TbG, TbH;
Chris@82 1171 TaW = VFNMS(LDK(KP707106781), TaV, TaK);
Chris@82 1172 Tb5 = VFNMS(LDK(KP707106781), Tb4, Tb1);
Chris@82 1173 Tb6 = VFMA(LDK(KP668178637), Tb5, TaW);
Chris@82 1174 TbN = VFNMS(LDK(KP668178637), TaW, Tb5);
Chris@82 1175 TbG = VFNMS(LDK(KP414213562), Taz, TaC);
Chris@82 1176 TbH = VFMA(LDK(KP414213562), Tas, Tav);
Chris@82 1177 TbI = VSUB(TbG, TbH);
Chris@82 1178 TbS = VADD(TbH, TbG);
Chris@82 1179 }
Chris@82 1180 {
Chris@82 1181 V TbU, TbV, Tbn, Tbw;
Chris@82 1182 TbU = VFMA(LDK(KP707106781), Tb4, Tb1);
Chris@82 1183 TbV = VFMA(LDK(KP707106781), TaV, TaK);
Chris@82 1184 TbW = VFMA(LDK(KP198912367), TbV, TbU);
Chris@82 1185 Tc6 = VFNMS(LDK(KP198912367), TbU, TbV);
Chris@82 1186 Tbn = VFNMS(LDK(KP707106781), Tbm, Tbb);
Chris@82 1187 Tbw = VFNMS(LDK(KP707106781), Tbv, Tbs);
Chris@82 1188 Tbx = VFNMS(LDK(KP668178637), Tbw, Tbn);
Chris@82 1189 TbM = VFMA(LDK(KP668178637), Tbn, Tbw);
Chris@82 1190 }
Chris@82 1191 {
Chris@82 1192 V TaF, Tby, TbL, TbO;
Chris@82 1193 TaF = VFMA(LDK(KP923879532), TaE, Tap);
Chris@82 1194 Tby = VSUB(Tb6, Tbx);
Chris@82 1195 Tfv = VFNMS(LDK(KP831469612), Tby, TaF);
Chris@82 1196 STM4(&(ro[42]), Tfv, ovs, &(ro[0]));
Chris@82 1197 Tfw = VFMA(LDK(KP831469612), Tby, TaF);
Chris@82 1198 STM4(&(ro[10]), Tfw, ovs, &(ro[0]));
Chris@82 1199 TbL = VFMA(LDK(KP923879532), TbI, TbF);
Chris@82 1200 TbO = VSUB(TbM, TbN);
Chris@82 1201 Tfx = VFNMS(LDK(KP831469612), TbO, TbL);
Chris@82 1202 STM4(&(io[42]), Tfx, ovs, &(io[0]));
Chris@82 1203 Tfy = VFMA(LDK(KP831469612), TbO, TbL);
Chris@82 1204 STM4(&(io[10]), Tfy, ovs, &(io[0]));
Chris@82 1205 }
Chris@82 1206 {
Chris@82 1207 V TbJ, TbK, TbP, TbQ;
Chris@82 1208 TbJ = VFNMS(LDK(KP923879532), TbI, TbF);
Chris@82 1209 TbK = VADD(Tb6, Tbx);
Chris@82 1210 Tfz = VFNMS(LDK(KP831469612), TbK, TbJ);
Chris@82 1211 STM4(&(io[26]), Tfz, ovs, &(io[0]));
Chris@82 1212 TfA = VFMA(LDK(KP831469612), TbK, TbJ);
Chris@82 1213 STM4(&(io[58]), TfA, ovs, &(io[0]));
Chris@82 1214 TbP = VFNMS(LDK(KP923879532), TaE, Tap);
Chris@82 1215 TbQ = VADD(TbN, TbM);
Chris@82 1216 TfB = VFNMS(LDK(KP831469612), TbQ, TbP);
Chris@82 1217 STM4(&(ro[26]), TfB, ovs, &(ro[0]));
Chris@82 1218 TfC = VFMA(LDK(KP831469612), TbQ, TbP);
Chris@82 1219 STM4(&(ro[58]), TfC, ovs, &(ro[0]));
Chris@82 1220 }
Chris@82 1221 {
Chris@82 1222 V TbT, Tc0, Tc9, Tca;
Chris@82 1223 TbT = VFMA(LDK(KP923879532), TbS, TbR);
Chris@82 1224 Tc0 = VADD(TbW, TbZ);
Chris@82 1225 TfD = VFNMS(LDK(KP980785280), Tc0, TbT);
Chris@82 1226 STM4(&(ro[34]), TfD, ovs, &(ro[0]));
Chris@82 1227 TfE = VFMA(LDK(KP980785280), Tc0, TbT);
Chris@82 1228 STM4(&(ro[2]), TfE, ovs, &(ro[0]));
Chris@82 1229 Tc9 = VFMA(LDK(KP923879532), Tc2, Tc1);
Chris@82 1230 Tca = VADD(Tc6, Tc7);
Chris@82 1231 TfF = VFNMS(LDK(KP980785280), Tca, Tc9);
Chris@82 1232 STM4(&(io[34]), TfF, ovs, &(io[0]));
Chris@82 1233 TfG = VFMA(LDK(KP980785280), Tca, Tc9);
Chris@82 1234 STM4(&(io[2]), TfG, ovs, &(io[0]));
Chris@82 1235 }
Chris@82 1236 {
Chris@82 1237 V Tc3, Tc4, Tc5, Tc8;
Chris@82 1238 Tc3 = VFNMS(LDK(KP923879532), Tc2, Tc1);
Chris@82 1239 Tc4 = VSUB(TbZ, TbW);
Chris@82 1240 TfH = VFNMS(LDK(KP980785280), Tc4, Tc3);
Chris@82 1241 STM4(&(io[50]), TfH, ovs, &(io[0]));
Chris@82 1242 TfI = VFMA(LDK(KP980785280), Tc4, Tc3);
Chris@82 1243 STM4(&(io[18]), TfI, ovs, &(io[0]));
Chris@82 1244 Tc5 = VFNMS(LDK(KP923879532), TbS, TbR);
Chris@82 1245 Tc8 = VSUB(Tc6, Tc7);
Chris@82 1246 TfJ = VFNMS(LDK(KP980785280), Tc8, Tc5);
Chris@82 1247 STM4(&(ro[50]), TfJ, ovs, &(ro[0]));
Chris@82 1248 TfK = VFMA(LDK(KP980785280), Tc8, Tc5);
Chris@82 1249 STM4(&(ro[18]), TfK, ovs, &(ro[0]));
Chris@82 1250 }
Chris@82 1251 }
Chris@82 1252 {
Chris@82 1253 V T6F, T7h, T7m, T7x, T7p, T7w, T6M, T7s, T6U, T7c, T75, T7r, T78, T7i, T71;
Chris@82 1254 V T7d;
Chris@82 1255 {
Chris@82 1256 V T6D, T6E, T7k, T7l;
Chris@82 1257 T6D = VFNMS(LDK(KP707106781), T3e, T37);
Chris@82 1258 T6E = VADD(T65, T64);
Chris@82 1259 T6F = VFNMS(LDK(KP923879532), T6E, T6D);
Chris@82 1260 T7h = VFMA(LDK(KP923879532), T6E, T6D);
Chris@82 1261 T7k = VFMA(LDK(KP923879532), T6S, T6R);
Chris@82 1262 T7l = VFMA(LDK(KP923879532), T6P, T6O);
Chris@82 1263 T7m = VFMA(LDK(KP098491403), T7l, T7k);
Chris@82 1264 T7x = VFNMS(LDK(KP098491403), T7k, T7l);
Chris@82 1265 }
Chris@82 1266 {
Chris@82 1267 V T7n, T7o, T6I, T6L;
Chris@82 1268 T7n = VFMA(LDK(KP923879532), T6Z, T6Y);
Chris@82 1269 T7o = VFMA(LDK(KP923879532), T6W, T6V);
Chris@82 1270 T7p = VFNMS(LDK(KP098491403), T7o, T7n);
Chris@82 1271 T7w = VFMA(LDK(KP098491403), T7n, T7o);
Chris@82 1272 T6I = VFMA(LDK(KP198912367), T6H, T6G);
Chris@82 1273 T6L = VFNMS(LDK(KP198912367), T6K, T6J);
Chris@82 1274 T6M = VSUB(T6I, T6L);
Chris@82 1275 T7s = VADD(T6I, T6L);
Chris@82 1276 }
Chris@82 1277 {
Chris@82 1278 V T6Q, T6T, T73, T74;
Chris@82 1279 T6Q = VFNMS(LDK(KP923879532), T6P, T6O);
Chris@82 1280 T6T = VFNMS(LDK(KP923879532), T6S, T6R);
Chris@82 1281 T6U = VFMA(LDK(KP820678790), T6T, T6Q);
Chris@82 1282 T7c = VFNMS(LDK(KP820678790), T6Q, T6T);
Chris@82 1283 T73 = VFNMS(LDK(KP707106781), T62, T5Z);
Chris@82 1284 T74 = VADD(T3m, T3t);
Chris@82 1285 T75 = VFNMS(LDK(KP923879532), T74, T73);
Chris@82 1286 T7r = VFMA(LDK(KP923879532), T74, T73);
Chris@82 1287 }
Chris@82 1288 {
Chris@82 1289 V T76, T77, T6X, T70;
Chris@82 1290 T76 = VFMA(LDK(KP198912367), T6J, T6K);
Chris@82 1291 T77 = VFNMS(LDK(KP198912367), T6G, T6H);
Chris@82 1292 T78 = VSUB(T76, T77);
Chris@82 1293 T7i = VADD(T77, T76);
Chris@82 1294 T6X = VFNMS(LDK(KP923879532), T6W, T6V);
Chris@82 1295 T70 = VFNMS(LDK(KP923879532), T6Z, T6Y);
Chris@82 1296 T71 = VFNMS(LDK(KP820678790), T70, T6X);
Chris@82 1297 T7d = VFMA(LDK(KP820678790), T6X, T70);
Chris@82 1298 }
Chris@82 1299 {
Chris@82 1300 V T6N, T72, T7f, T7g;
Chris@82 1301 T6N = VFMA(LDK(KP980785280), T6M, T6F);
Chris@82 1302 T72 = VADD(T6U, T71);
Chris@82 1303 TfL = VFNMS(LDK(KP773010453), T72, T6N);
Chris@82 1304 STM4(&(ro[39]), TfL, ovs, &(ro[1]));
Chris@82 1305 TfM = VFMA(LDK(KP773010453), T72, T6N);
Chris@82 1306 STM4(&(ro[7]), TfM, ovs, &(ro[1]));
Chris@82 1307 T7f = VFMA(LDK(KP980785280), T78, T75);
Chris@82 1308 T7g = VADD(T7c, T7d);
Chris@82 1309 TfN = VFNMS(LDK(KP773010453), T7g, T7f);
Chris@82 1310 STM4(&(io[39]), TfN, ovs, &(io[1]));
Chris@82 1311 TfO = VFMA(LDK(KP773010453), T7g, T7f);
Chris@82 1312 STM4(&(io[7]), TfO, ovs, &(io[1]));
Chris@82 1313 }
Chris@82 1314 {
Chris@82 1315 V T79, T7a, T7b, T7e;
Chris@82 1316 T79 = VFNMS(LDK(KP980785280), T78, T75);
Chris@82 1317 T7a = VSUB(T71, T6U);
Chris@82 1318 TfP = VFNMS(LDK(KP773010453), T7a, T79);
Chris@82 1319 STM4(&(io[55]), TfP, ovs, &(io[1]));
Chris@82 1320 TfQ = VFMA(LDK(KP773010453), T7a, T79);
Chris@82 1321 STM4(&(io[23]), TfQ, ovs, &(io[1]));
Chris@82 1322 T7b = VFNMS(LDK(KP980785280), T6M, T6F);
Chris@82 1323 T7e = VSUB(T7c, T7d);
Chris@82 1324 TfR = VFNMS(LDK(KP773010453), T7e, T7b);
Chris@82 1325 STM4(&(ro[55]), TfR, ovs, &(ro[1]));
Chris@82 1326 TfS = VFMA(LDK(KP773010453), T7e, T7b);
Chris@82 1327 STM4(&(ro[23]), TfS, ovs, &(ro[1]));
Chris@82 1328 }
Chris@82 1329 {
Chris@82 1330 V T7j, T7q, T7v, T7y;
Chris@82 1331 T7j = VFNMS(LDK(KP980785280), T7i, T7h);
Chris@82 1332 T7q = VSUB(T7m, T7p);
Chris@82 1333 TfT = VFNMS(LDK(KP995184726), T7q, T7j);
Chris@82 1334 STM4(&(ro[47]), TfT, ovs, &(ro[1]));
Chris@82 1335 TfU = VFMA(LDK(KP995184726), T7q, T7j);
Chris@82 1336 STM4(&(ro[15]), TfU, ovs, &(ro[1]));
Chris@82 1337 T7v = VFNMS(LDK(KP980785280), T7s, T7r);
Chris@82 1338 T7y = VSUB(T7w, T7x);
Chris@82 1339 TfV = VFNMS(LDK(KP995184726), T7y, T7v);
Chris@82 1340 STM4(&(io[47]), TfV, ovs, &(io[1]));
Chris@82 1341 TfW = VFMA(LDK(KP995184726), T7y, T7v);
Chris@82 1342 STM4(&(io[15]), TfW, ovs, &(io[1]));
Chris@82 1343 }
Chris@82 1344 {
Chris@82 1345 V T7t, T7u, T7z, T7A;
Chris@82 1346 T7t = VFMA(LDK(KP980785280), T7s, T7r);
Chris@82 1347 T7u = VADD(T7m, T7p);
Chris@82 1348 TfX = VFNMS(LDK(KP995184726), T7u, T7t);
Chris@82 1349 STM4(&(io[31]), TfX, ovs, &(io[1]));
Chris@82 1350 TfY = VFMA(LDK(KP995184726), T7u, T7t);
Chris@82 1351 STM4(&(io[63]), TfY, ovs, &(io[1]));
Chris@82 1352 T7z = VFMA(LDK(KP980785280), T7i, T7h);
Chris@82 1353 T7A = VADD(T7x, T7w);
Chris@82 1354 TfZ = VFNMS(LDK(KP995184726), T7A, T7z);
Chris@82 1355 STM4(&(ro[31]), TfZ, ovs, &(ro[1]));
Chris@82 1356 Tg0 = VFMA(LDK(KP995184726), T7A, T7z);
Chris@82 1357 STM4(&(ro[63]), Tg0, ovs, &(ro[1]));
Chris@82 1358 }
Chris@82 1359 }
Chris@82 1360 {
Chris@82 1361 V T9j, T9V, Ta0, Tab, Ta3, Taa, T9q, Ta6, T9y, T9Q, T9J, Ta5, T9M, T9W, T9F;
Chris@82 1362 V T9R;
Chris@82 1363 {
Chris@82 1364 V T9h, T9i, T9Y, T9Z;
Chris@82 1365 T9h = VFNMS(LDK(KP707106781), T7C, T7B);
Chris@82 1366 T9i = VSUB(T8I, T8J);
Chris@82 1367 T9j = VFMA(LDK(KP923879532), T9i, T9h);
Chris@82 1368 T9V = VFNMS(LDK(KP923879532), T9i, T9h);
Chris@82 1369 T9Y = VFMA(LDK(KP923879532), T9w, T9v);
Chris@82 1370 T9Z = VFMA(LDK(KP923879532), T9t, T9s);
Chris@82 1371 Ta0 = VFMA(LDK(KP303346683), T9Z, T9Y);
Chris@82 1372 Tab = VFNMS(LDK(KP303346683), T9Y, T9Z);
Chris@82 1373 }
Chris@82 1374 {
Chris@82 1375 V Ta1, Ta2, T9m, T9p;
Chris@82 1376 Ta1 = VFMA(LDK(KP923879532), T9D, T9C);
Chris@82 1377 Ta2 = VFMA(LDK(KP923879532), T9A, T9z);
Chris@82 1378 Ta3 = VFNMS(LDK(KP303346683), Ta2, Ta1);
Chris@82 1379 Taa = VFMA(LDK(KP303346683), Ta1, Ta2);
Chris@82 1380 T9m = VFMA(LDK(KP668178637), T9l, T9k);
Chris@82 1381 T9p = VFNMS(LDK(KP668178637), T9o, T9n);
Chris@82 1382 T9q = VSUB(T9m, T9p);
Chris@82 1383 Ta6 = VADD(T9m, T9p);
Chris@82 1384 }
Chris@82 1385 {
Chris@82 1386 V T9u, T9x, T9H, T9I;
Chris@82 1387 T9u = VFNMS(LDK(KP923879532), T9t, T9s);
Chris@82 1388 T9x = VFNMS(LDK(KP923879532), T9w, T9v);
Chris@82 1389 T9y = VFMA(LDK(KP534511135), T9x, T9u);
Chris@82 1390 T9Q = VFNMS(LDK(KP534511135), T9u, T9x);
Chris@82 1391 T9H = VFNMS(LDK(KP707106781), T8G, T8F);
Chris@82 1392 T9I = VSUB(T7J, T7G);
Chris@82 1393 T9J = VFMA(LDK(KP923879532), T9I, T9H);
Chris@82 1394 Ta5 = VFNMS(LDK(KP923879532), T9I, T9H);
Chris@82 1395 }
Chris@82 1396 {
Chris@82 1397 V T9K, T9L, T9B, T9E;
Chris@82 1398 T9K = VFMA(LDK(KP668178637), T9n, T9o);
Chris@82 1399 T9L = VFNMS(LDK(KP668178637), T9k, T9l);
Chris@82 1400 T9M = VSUB(T9K, T9L);
Chris@82 1401 T9W = VADD(T9L, T9K);
Chris@82 1402 T9B = VFNMS(LDK(KP923879532), T9A, T9z);
Chris@82 1403 T9E = VFNMS(LDK(KP923879532), T9D, T9C);
Chris@82 1404 T9F = VFNMS(LDK(KP534511135), T9E, T9B);
Chris@82 1405 T9R = VFMA(LDK(KP534511135), T9B, T9E);
Chris@82 1406 }
Chris@82 1407 {
Chris@82 1408 V T9r, T9G, Tg1, Tg2;
Chris@82 1409 T9r = VFMA(LDK(KP831469612), T9q, T9j);
Chris@82 1410 T9G = VADD(T9y, T9F);
Chris@82 1411 Tg1 = VFNMS(LDK(KP881921264), T9G, T9r);
Chris@82 1412 STM4(&(ro[37]), Tg1, ovs, &(ro[1]));
Chris@82 1413 STN4(&(ro[36]), Tf7, Tg1, Tff, TfL, ovs);
Chris@82 1414 Tg2 = VFMA(LDK(KP881921264), T9G, T9r);
Chris@82 1415 STM4(&(ro[5]), Tg2, ovs, &(ro[1]));
Chris@82 1416 STN4(&(ro[4]), Tf8, Tg2, Tfg, TfM, ovs);
Chris@82 1417 }
Chris@82 1418 {
Chris@82 1419 V T9T, T9U, Tg3, Tg4;
Chris@82 1420 T9T = VFMA(LDK(KP831469612), T9M, T9J);
Chris@82 1421 T9U = VADD(T9Q, T9R);
Chris@82 1422 Tg3 = VFNMS(LDK(KP881921264), T9U, T9T);
Chris@82 1423 STM4(&(io[37]), Tg3, ovs, &(io[1]));
Chris@82 1424 STN4(&(io[36]), Tf9, Tg3, Tfh, TfN, ovs);
Chris@82 1425 Tg4 = VFMA(LDK(KP881921264), T9U, T9T);
Chris@82 1426 STM4(&(io[5]), Tg4, ovs, &(io[1]));
Chris@82 1427 STN4(&(io[4]), Tfa, Tg4, Tfi, TfO, ovs);
Chris@82 1428 }
Chris@82 1429 {
Chris@82 1430 V T9N, T9O, Tg5, Tg6;
Chris@82 1431 T9N = VFNMS(LDK(KP831469612), T9M, T9J);
Chris@82 1432 T9O = VSUB(T9F, T9y);
Chris@82 1433 Tg5 = VFNMS(LDK(KP881921264), T9O, T9N);
Chris@82 1434 STM4(&(io[53]), Tg5, ovs, &(io[1]));
Chris@82 1435 STN4(&(io[52]), Tfb, Tg5, Tfj, TfP, ovs);
Chris@82 1436 Tg6 = VFMA(LDK(KP881921264), T9O, T9N);
Chris@82 1437 STM4(&(io[21]), Tg6, ovs, &(io[1]));
Chris@82 1438 STN4(&(io[20]), Tfc, Tg6, Tfk, TfQ, ovs);
Chris@82 1439 }
Chris@82 1440 {
Chris@82 1441 V T9P, T9S, Tg7, Tg8;
Chris@82 1442 T9P = VFNMS(LDK(KP831469612), T9q, T9j);
Chris@82 1443 T9S = VSUB(T9Q, T9R);
Chris@82 1444 Tg7 = VFNMS(LDK(KP881921264), T9S, T9P);
Chris@82 1445 STM4(&(ro[53]), Tg7, ovs, &(ro[1]));
Chris@82 1446 STN4(&(ro[52]), Tfd, Tg7, Tfl, TfR, ovs);
Chris@82 1447 Tg8 = VFMA(LDK(KP881921264), T9S, T9P);
Chris@82 1448 STM4(&(ro[21]), Tg8, ovs, &(ro[1]));
Chris@82 1449 STN4(&(ro[20]), Tfe, Tg8, Tfm, TfS, ovs);
Chris@82 1450 }
Chris@82 1451 {
Chris@82 1452 V T9X, Ta4, Tg9, Tga;
Chris@82 1453 T9X = VFNMS(LDK(KP831469612), T9W, T9V);
Chris@82 1454 Ta4 = VSUB(Ta0, Ta3);
Chris@82 1455 Tg9 = VFNMS(LDK(KP956940335), Ta4, T9X);
Chris@82 1456 STM4(&(ro[45]), Tg9, ovs, &(ro[1]));
Chris@82 1457 STN4(&(ro[44]), TeZ, Tg9, Tfn, TfT, ovs);
Chris@82 1458 Tga = VFMA(LDK(KP956940335), Ta4, T9X);
Chris@82 1459 STM4(&(ro[13]), Tga, ovs, &(ro[1]));
Chris@82 1460 STN4(&(ro[12]), Tf0, Tga, Tfo, TfU, ovs);
Chris@82 1461 }
Chris@82 1462 {
Chris@82 1463 V Ta9, Tac, Tgb, Tgc;
Chris@82 1464 Ta9 = VFNMS(LDK(KP831469612), Ta6, Ta5);
Chris@82 1465 Tac = VSUB(Taa, Tab);
Chris@82 1466 Tgb = VFNMS(LDK(KP956940335), Tac, Ta9);
Chris@82 1467 STM4(&(io[45]), Tgb, ovs, &(io[1]));
Chris@82 1468 STN4(&(io[44]), Tf1, Tgb, Tfp, TfV, ovs);
Chris@82 1469 Tgc = VFMA(LDK(KP956940335), Tac, Ta9);
Chris@82 1470 STM4(&(io[13]), Tgc, ovs, &(io[1]));
Chris@82 1471 STN4(&(io[12]), Tf2, Tgc, Tfq, TfW, ovs);
Chris@82 1472 }
Chris@82 1473 {
Chris@82 1474 V Ta7, Ta8, Tgd, Tge;
Chris@82 1475 Ta7 = VFMA(LDK(KP831469612), Ta6, Ta5);
Chris@82 1476 Ta8 = VADD(Ta0, Ta3);
Chris@82 1477 Tgd = VFNMS(LDK(KP956940335), Ta8, Ta7);
Chris@82 1478 STM4(&(io[29]), Tgd, ovs, &(io[1]));
Chris@82 1479 STN4(&(io[28]), Tf3, Tgd, Tfr, TfX, ovs);
Chris@82 1480 Tge = VFMA(LDK(KP956940335), Ta8, Ta7);
Chris@82 1481 STM4(&(io[61]), Tge, ovs, &(io[1]));
Chris@82 1482 STN4(&(io[60]), Tf4, Tge, Tfs, TfY, ovs);
Chris@82 1483 }
Chris@82 1484 {
Chris@82 1485 V Tad, Tae, Tgf, Tgg;
Chris@82 1486 Tad = VFMA(LDK(KP831469612), T9W, T9V);
Chris@82 1487 Tae = VADD(Tab, Taa);
Chris@82 1488 Tgf = VFNMS(LDK(KP956940335), Tae, Tad);
Chris@82 1489 STM4(&(ro[29]), Tgf, ovs, &(ro[1]));
Chris@82 1490 STN4(&(ro[28]), Tf5, Tgf, Tft, TfZ, ovs);
Chris@82 1491 Tgg = VFMA(LDK(KP956940335), Tae, Tad);
Chris@82 1492 STM4(&(ro[61]), Tgg, ovs, &(ro[1]));
Chris@82 1493 STN4(&(ro[60]), Tf6, Tgg, Tfu, Tg0, ovs);
Chris@82 1494 }
Chris@82 1495 }
Chris@82 1496 {
Chris@82 1497 V Tgh, Tgi, Tgj, Tgk, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu, Tgv;
Chris@82 1498 V Tgw;
Chris@82 1499 {
Chris@82 1500 V T3v, T6j, T6o, T6y, T6r, T6z, T48, T6u, T52, T6f, T67, T6t, T6a, T6k, T5V;
Chris@82 1501 V T6e;
Chris@82 1502 {
Chris@82 1503 V T3f, T3u, T6m, T6n;
Chris@82 1504 T3f = VFMA(LDK(KP707106781), T3e, T37);
Chris@82 1505 T3u = VSUB(T3m, T3t);
Chris@82 1506 T3v = VFNMS(LDK(KP923879532), T3u, T3f);
Chris@82 1507 T6j = VFMA(LDK(KP923879532), T3u, T3f);
Chris@82 1508 T6m = VFMA(LDK(KP923879532), T50, T4X);
Chris@82 1509 T6n = VFMA(LDK(KP923879532), T4N, T4q);
Chris@82 1510 T6o = VFMA(LDK(KP303346683), T6n, T6m);
Chris@82 1511 T6y = VFNMS(LDK(KP303346683), T6m, T6n);
Chris@82 1512 }
Chris@82 1513 {
Chris@82 1514 V T6p, T6q, T3O, T47;
Chris@82 1515 T6p = VFMA(LDK(KP923879532), T5T, T5Q);
Chris@82 1516 T6q = VFMA(LDK(KP923879532), T5G, T5j);
Chris@82 1517 T6r = VFNMS(LDK(KP303346683), T6q, T6p);
Chris@82 1518 T6z = VFMA(LDK(KP303346683), T6p, T6q);
Chris@82 1519 T3O = VFNMS(LDK(KP668178637), T3N, T3G);
Chris@82 1520 T47 = VFMA(LDK(KP668178637), T46, T3Z);
Chris@82 1521 T48 = VSUB(T3O, T47);
Chris@82 1522 T6u = VADD(T3O, T47);
Chris@82 1523 }
Chris@82 1524 {
Chris@82 1525 V T4O, T51, T63, T66;
Chris@82 1526 T4O = VFNMS(LDK(KP923879532), T4N, T4q);
Chris@82 1527 T51 = VFNMS(LDK(KP923879532), T50, T4X);
Chris@82 1528 T52 = VFMA(LDK(KP534511135), T51, T4O);
Chris@82 1529 T6f = VFNMS(LDK(KP534511135), T4O, T51);
Chris@82 1530 T63 = VFMA(LDK(KP707106781), T62, T5Z);
Chris@82 1531 T66 = VSUB(T64, T65);
Chris@82 1532 T67 = VFNMS(LDK(KP923879532), T66, T63);
Chris@82 1533 T6t = VFMA(LDK(KP923879532), T66, T63);
Chris@82 1534 }
Chris@82 1535 {
Chris@82 1536 V T68, T69, T5H, T5U;
Chris@82 1537 T68 = VFNMS(LDK(KP668178637), T3Z, T46);
Chris@82 1538 T69 = VFMA(LDK(KP668178637), T3G, T3N);
Chris@82 1539 T6a = VSUB(T68, T69);
Chris@82 1540 T6k = VADD(T69, T68);
Chris@82 1541 T5H = VFNMS(LDK(KP923879532), T5G, T5j);
Chris@82 1542 T5U = VFNMS(LDK(KP923879532), T5T, T5Q);
Chris@82 1543 T5V = VFNMS(LDK(KP534511135), T5U, T5H);
Chris@82 1544 T6e = VFMA(LDK(KP534511135), T5H, T5U);
Chris@82 1545 }
Chris@82 1546 {
Chris@82 1547 V T49, T5W, T6d, T6g;
Chris@82 1548 T49 = VFMA(LDK(KP831469612), T48, T3v);
Chris@82 1549 T5W = VSUB(T52, T5V);
Chris@82 1550 Tgh = VFNMS(LDK(KP881921264), T5W, T49);
Chris@82 1551 STM4(&(ro[43]), Tgh, ovs, &(ro[1]));
Chris@82 1552 Tgi = VFMA(LDK(KP881921264), T5W, T49);
Chris@82 1553 STM4(&(ro[11]), Tgi, ovs, &(ro[1]));
Chris@82 1554 T6d = VFMA(LDK(KP831469612), T6a, T67);
Chris@82 1555 T6g = VSUB(T6e, T6f);
Chris@82 1556 Tgj = VFNMS(LDK(KP881921264), T6g, T6d);
Chris@82 1557 STM4(&(io[43]), Tgj, ovs, &(io[1]));
Chris@82 1558 Tgk = VFMA(LDK(KP881921264), T6g, T6d);
Chris@82 1559 STM4(&(io[11]), Tgk, ovs, &(io[1]));
Chris@82 1560 }
Chris@82 1561 {
Chris@82 1562 V T6b, T6c, T6h, T6i;
Chris@82 1563 T6b = VFNMS(LDK(KP831469612), T6a, T67);
Chris@82 1564 T6c = VADD(T52, T5V);
Chris@82 1565 Tgl = VFNMS(LDK(KP881921264), T6c, T6b);
Chris@82 1566 STM4(&(io[27]), Tgl, ovs, &(io[1]));
Chris@82 1567 Tgm = VFMA(LDK(KP881921264), T6c, T6b);
Chris@82 1568 STM4(&(io[59]), Tgm, ovs, &(io[1]));
Chris@82 1569 T6h = VFNMS(LDK(KP831469612), T48, T3v);
Chris@82 1570 T6i = VADD(T6f, T6e);
Chris@82 1571 Tgn = VFNMS(LDK(KP881921264), T6i, T6h);
Chris@82 1572 STM4(&(ro[27]), Tgn, ovs, &(ro[1]));
Chris@82 1573 Tgo = VFMA(LDK(KP881921264), T6i, T6h);
Chris@82 1574 STM4(&(ro[59]), Tgo, ovs, &(ro[1]));
Chris@82 1575 }
Chris@82 1576 {
Chris@82 1577 V T6l, T6s, T6B, T6C;
Chris@82 1578 T6l = VFMA(LDK(KP831469612), T6k, T6j);
Chris@82 1579 T6s = VADD(T6o, T6r);
Chris@82 1580 Tgp = VFNMS(LDK(KP956940335), T6s, T6l);
Chris@82 1581 STM4(&(ro[35]), Tgp, ovs, &(ro[1]));
Chris@82 1582 Tgq = VFMA(LDK(KP956940335), T6s, T6l);
Chris@82 1583 STM4(&(ro[3]), Tgq, ovs, &(ro[1]));
Chris@82 1584 T6B = VFMA(LDK(KP831469612), T6u, T6t);
Chris@82 1585 T6C = VADD(T6y, T6z);
Chris@82 1586 Tgr = VFNMS(LDK(KP956940335), T6C, T6B);
Chris@82 1587 STM4(&(io[35]), Tgr, ovs, &(io[1]));
Chris@82 1588 Tgs = VFMA(LDK(KP956940335), T6C, T6B);
Chris@82 1589 STM4(&(io[3]), Tgs, ovs, &(io[1]));
Chris@82 1590 }
Chris@82 1591 {
Chris@82 1592 V T6v, T6w, T6x, T6A;
Chris@82 1593 T6v = VFNMS(LDK(KP831469612), T6u, T6t);
Chris@82 1594 T6w = VSUB(T6r, T6o);
Chris@82 1595 Tgt = VFNMS(LDK(KP956940335), T6w, T6v);
Chris@82 1596 STM4(&(io[51]), Tgt, ovs, &(io[1]));
Chris@82 1597 Tgu = VFMA(LDK(KP956940335), T6w, T6v);
Chris@82 1598 STM4(&(io[19]), Tgu, ovs, &(io[1]));
Chris@82 1599 T6x = VFNMS(LDK(KP831469612), T6k, T6j);
Chris@82 1600 T6A = VSUB(T6y, T6z);
Chris@82 1601 Tgv = VFNMS(LDK(KP956940335), T6A, T6x);
Chris@82 1602 STM4(&(ro[51]), Tgv, ovs, &(ro[1]));
Chris@82 1603 Tgw = VFMA(LDK(KP956940335), T6A, T6x);
Chris@82 1604 STM4(&(ro[19]), Tgw, ovs, &(ro[1]));
Chris@82 1605 }
Chris@82 1606 }
Chris@82 1607 {
Chris@82 1608 V T7L, T8X, T92, T9c, T95, T9d, T80, T98, T8k, T8T, T8L, T97, T8O, T8Y, T8D;
Chris@82 1609 V T8S;
Chris@82 1610 {
Chris@82 1611 V T7D, T7K, T90, T91;
Chris@82 1612 T7D = VFMA(LDK(KP707106781), T7C, T7B);
Chris@82 1613 T7K = VADD(T7G, T7J);
Chris@82 1614 T7L = VFNMS(LDK(KP923879532), T7K, T7D);
Chris@82 1615 T8X = VFMA(LDK(KP923879532), T7K, T7D);
Chris@82 1616 T90 = VFMA(LDK(KP923879532), T8i, T8f);
Chris@82 1617 T91 = VFMA(LDK(KP923879532), T8b, T84);
Chris@82 1618 T92 = VFMA(LDK(KP098491403), T91, T90);
Chris@82 1619 T9c = VFNMS(LDK(KP098491403), T90, T91);
Chris@82 1620 }
Chris@82 1621 {
Chris@82 1622 V T93, T94, T7S, T7Z;
Chris@82 1623 T93 = VFMA(LDK(KP923879532), T8B, T8y);
Chris@82 1624 T94 = VFMA(LDK(KP923879532), T8u, T8n);
Chris@82 1625 T95 = VFNMS(LDK(KP098491403), T94, T93);
Chris@82 1626 T9d = VFMA(LDK(KP098491403), T93, T94);
Chris@82 1627 T7S = VFNMS(LDK(KP198912367), T7R, T7O);
Chris@82 1628 T7Z = VFMA(LDK(KP198912367), T7Y, T7V);
Chris@82 1629 T80 = VSUB(T7S, T7Z);
Chris@82 1630 T98 = VADD(T7S, T7Z);
Chris@82 1631 }
Chris@82 1632 {
Chris@82 1633 V T8c, T8j, T8H, T8K;
Chris@82 1634 T8c = VFNMS(LDK(KP923879532), T8b, T84);
Chris@82 1635 T8j = VFNMS(LDK(KP923879532), T8i, T8f);
Chris@82 1636 T8k = VFMA(LDK(KP820678790), T8j, T8c);
Chris@82 1637 T8T = VFNMS(LDK(KP820678790), T8c, T8j);
Chris@82 1638 T8H = VFMA(LDK(KP707106781), T8G, T8F);
Chris@82 1639 T8K = VADD(T8I, T8J);
Chris@82 1640 T8L = VFNMS(LDK(KP923879532), T8K, T8H);
Chris@82 1641 T97 = VFMA(LDK(KP923879532), T8K, T8H);
Chris@82 1642 }
Chris@82 1643 {
Chris@82 1644 V T8M, T8N, T8v, T8C;
Chris@82 1645 T8M = VFNMS(LDK(KP198912367), T7V, T7Y);
Chris@82 1646 T8N = VFMA(LDK(KP198912367), T7O, T7R);
Chris@82 1647 T8O = VSUB(T8M, T8N);
Chris@82 1648 T8Y = VADD(T8N, T8M);
Chris@82 1649 T8v = VFNMS(LDK(KP923879532), T8u, T8n);
Chris@82 1650 T8C = VFNMS(LDK(KP923879532), T8B, T8y);
Chris@82 1651 T8D = VFNMS(LDK(KP820678790), T8C, T8v);
Chris@82 1652 T8S = VFMA(LDK(KP820678790), T8v, T8C);
Chris@82 1653 }
Chris@82 1654 {
Chris@82 1655 V T81, T8E, Tgx, Tgy;
Chris@82 1656 T81 = VFMA(LDK(KP980785280), T80, T7L);
Chris@82 1657 T8E = VSUB(T8k, T8D);
Chris@82 1658 Tgx = VFNMS(LDK(KP773010453), T8E, T81);
Chris@82 1659 STM4(&(ro[41]), Tgx, ovs, &(ro[1]));
Chris@82 1660 STN4(&(ro[40]), TeR, Tgx, Tfv, Tgh, ovs);
Chris@82 1661 Tgy = VFMA(LDK(KP773010453), T8E, T81);
Chris@82 1662 STM4(&(ro[9]), Tgy, ovs, &(ro[1]));
Chris@82 1663 STN4(&(ro[8]), TeS, Tgy, Tfw, Tgi, ovs);
Chris@82 1664 }
Chris@82 1665 {
Chris@82 1666 V T8R, T8U, Tgz, TgA;
Chris@82 1667 T8R = VFMA(LDK(KP980785280), T8O, T8L);
Chris@82 1668 T8U = VSUB(T8S, T8T);
Chris@82 1669 Tgz = VFNMS(LDK(KP773010453), T8U, T8R);
Chris@82 1670 STM4(&(io[41]), Tgz, ovs, &(io[1]));
Chris@82 1671 STN4(&(io[40]), TeT, Tgz, Tfx, Tgj, ovs);
Chris@82 1672 TgA = VFMA(LDK(KP773010453), T8U, T8R);
Chris@82 1673 STM4(&(io[9]), TgA, ovs, &(io[1]));
Chris@82 1674 STN4(&(io[8]), TeU, TgA, Tfy, Tgk, ovs);
Chris@82 1675 }
Chris@82 1676 {
Chris@82 1677 V T8P, T8Q, TgB, TgC;
Chris@82 1678 T8P = VFNMS(LDK(KP980785280), T8O, T8L);
Chris@82 1679 T8Q = VADD(T8k, T8D);
Chris@82 1680 TgB = VFNMS(LDK(KP773010453), T8Q, T8P);
Chris@82 1681 STM4(&(io[25]), TgB, ovs, &(io[1]));
Chris@82 1682 STN4(&(io[24]), TeW, TgB, Tfz, Tgl, ovs);
Chris@82 1683 TgC = VFMA(LDK(KP773010453), T8Q, T8P);
Chris@82 1684 STM4(&(io[57]), TgC, ovs, &(io[1]));
Chris@82 1685 STN4(&(io[56]), TeV, TgC, TfA, Tgm, ovs);
Chris@82 1686 }
Chris@82 1687 {
Chris@82 1688 V T8V, T8W, TgD, TgE;
Chris@82 1689 T8V = VFNMS(LDK(KP980785280), T80, T7L);
Chris@82 1690 T8W = VADD(T8T, T8S);
Chris@82 1691 TgD = VFNMS(LDK(KP773010453), T8W, T8V);
Chris@82 1692 STM4(&(ro[25]), TgD, ovs, &(ro[1]));
Chris@82 1693 STN4(&(ro[24]), TeY, TgD, TfB, Tgn, ovs);
Chris@82 1694 TgE = VFMA(LDK(KP773010453), T8W, T8V);
Chris@82 1695 STM4(&(ro[57]), TgE, ovs, &(ro[1]));
Chris@82 1696 STN4(&(ro[56]), TeX, TgE, TfC, Tgo, ovs);
Chris@82 1697 }
Chris@82 1698 {
Chris@82 1699 V T8Z, T96, TgF, TgG;
Chris@82 1700 T8Z = VFMA(LDK(KP980785280), T8Y, T8X);
Chris@82 1701 T96 = VADD(T92, T95);
Chris@82 1702 TgF = VFNMS(LDK(KP995184726), T96, T8Z);
Chris@82 1703 STM4(&(ro[33]), TgF, ovs, &(ro[1]));
Chris@82 1704 STN4(&(ro[32]), TeJ, TgF, TfD, Tgp, ovs);
Chris@82 1705 TgG = VFMA(LDK(KP995184726), T96, T8Z);
Chris@82 1706 STM4(&(ro[1]), TgG, ovs, &(ro[1]));
Chris@82 1707 STN4(&(ro[0]), TeL, TgG, TfE, Tgq, ovs);
Chris@82 1708 }
Chris@82 1709 {
Chris@82 1710 V T9f, T9g, TgH, TgI;
Chris@82 1711 T9f = VFMA(LDK(KP980785280), T98, T97);
Chris@82 1712 T9g = VADD(T9c, T9d);
Chris@82 1713 TgH = VFNMS(LDK(KP995184726), T9g, T9f);
Chris@82 1714 STM4(&(io[33]), TgH, ovs, &(io[1]));
Chris@82 1715 STN4(&(io[32]), TeK, TgH, TfF, Tgr, ovs);
Chris@82 1716 TgI = VFMA(LDK(KP995184726), T9g, T9f);
Chris@82 1717 STM4(&(io[1]), TgI, ovs, &(io[1]));
Chris@82 1718 STN4(&(io[0]), TeM, TgI, TfG, Tgs, ovs);
Chris@82 1719 }
Chris@82 1720 {
Chris@82 1721 V T99, T9a, TgJ, TgK;
Chris@82 1722 T99 = VFNMS(LDK(KP980785280), T98, T97);
Chris@82 1723 T9a = VSUB(T95, T92);
Chris@82 1724 TgJ = VFNMS(LDK(KP995184726), T9a, T99);
Chris@82 1725 STM4(&(io[49]), TgJ, ovs, &(io[1]));
Chris@82 1726 STN4(&(io[48]), TeP, TgJ, TfH, Tgt, ovs);
Chris@82 1727 TgK = VFMA(LDK(KP995184726), T9a, T99);
Chris@82 1728 STM4(&(io[17]), TgK, ovs, &(io[1]));
Chris@82 1729 STN4(&(io[16]), TeN, TgK, TfI, Tgu, ovs);
Chris@82 1730 }
Chris@82 1731 {
Chris@82 1732 V T9b, T9e, TgL, TgM;
Chris@82 1733 T9b = VFNMS(LDK(KP980785280), T8Y, T8X);
Chris@82 1734 T9e = VSUB(T9c, T9d);
Chris@82 1735 TgL = VFNMS(LDK(KP995184726), T9e, T9b);
Chris@82 1736 STM4(&(ro[49]), TgL, ovs, &(ro[1]));
Chris@82 1737 STN4(&(ro[48]), TeQ, TgL, TfJ, Tgv, ovs);
Chris@82 1738 TgM = VFMA(LDK(KP995184726), T9e, T9b);
Chris@82 1739 STM4(&(ro[17]), TgM, ovs, &(ro[1]));
Chris@82 1740 STN4(&(ro[16]), TeO, TgM, TfK, Tgw, ovs);
Chris@82 1741 }
Chris@82 1742 }
Chris@82 1743 }
Chris@82 1744 }
Chris@82 1745 }
Chris@82 1746 }
Chris@82 1747 }
Chris@82 1748 VLEAVE();
Chris@82 1749 }
Chris@82 1750
Chris@82 1751 static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {520, 0, 392, 0}, &GENUS, 0, 1, 0, 0 };
Chris@82 1752
Chris@82 1753 void XSIMD(codelet_n2sv_64) (planner *p) {
Chris@82 1754 X(kdft_register) (p, n2sv_64, &desc);
Chris@82 1755 }
Chris@82 1756
Chris@82 1757 #else
Chris@82 1758
Chris@82 1759 /* Generated by: ../../../genfft/gen_notw.native -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include dft/simd/n2s.h -store-multiple 4 */
Chris@82 1760
Chris@82 1761 /*
Chris@82 1762 * This function contains 912 FP additions, 248 FP multiplications,
Chris@82 1763 * (or, 808 additions, 144 multiplications, 104 fused multiply/add),
Chris@82 1764 * 260 stack variables, 15 constants, and 288 memory accesses
Chris@82 1765 */
Chris@82 1766 #include "dft/simd/n2s.h"
Chris@82 1767
Chris@82 1768 static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
Chris@82 1769 {
Chris@82 1770 DVK(KP773010453, +0.773010453362736960810906609758469800971041293);
Chris@82 1771 DVK(KP634393284, +0.634393284163645498215171613225493370675687095);
Chris@82 1772 DVK(KP098017140, +0.098017140329560601994195563888641845861136673);
Chris@82 1773 DVK(KP995184726, +0.995184726672196886244836953109479921575474869);
Chris@82 1774 DVK(KP881921264, +0.881921264348355029712756863660388349508442621);
Chris@82 1775 DVK(KP471396736, +0.471396736825997648556387625905254377657460319);
Chris@82 1776 DVK(KP290284677, +0.290284677254462367636192375817395274691476278);
Chris@82 1777 DVK(KP956940335, +0.956940335732208864935797886980269969482849206);
Chris@82 1778 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
Chris@82 1779 DVK(KP555570233, +0.555570233019602224742830813948532874374937191);
Chris@82 1780 DVK(KP195090322, +0.195090322016128267848284868477022240927691618);
Chris@82 1781 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
Chris@82 1782 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
Chris@82 1783 DVK(KP382683432, +0.382683432365089771728459984030398866761344562);
Chris@82 1784 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
Chris@82 1785 {
Chris@82 1786 INT i;
Chris@82 1787 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) {
Chris@82 1788 V T37, T7B, T8F, T5Z, Tf, Td9, TbB, TcB, T62, T7C, T2i, TdH, Tah, Tcb, T3e;
Chris@82 1789 V T8G, Tu, TdI, Tak, TbD, Tan, TbC, T2x, Tda, T3m, T65, T7G, T8J, T7J, T8I;
Chris@82 1790 V T3t, T64, TK, Tdd, Tas, Tce, Tav, Tcf, T2N, Tdc, T3G, T6G, T7O, T9k, T7R;
Chris@82 1791 V T9l, T3N, T6H, T1L, Tdv, Tbs, Tcw, TdC, Teo, T5j, T6V, T5Q, T6Y, T8y, T9C;
Chris@82 1792 V Tbb, Tct, T8n, T9z, TZ, Tdf, Taz, Tch, TaC, Tci, T32, Tdg, T3Z, T6J, T7V;
Chris@82 1793 V T9n, T7Y, T9o, T46, T6K, T1g, Tdp, Tb1, Tcm, Tdm, Tej, T4q, T6R, T4X, T6O;
Chris@82 1794 V T8f, T9s, TaK, Tcp, T84, T9v, T1v, Tdn, Tb4, Tcq, Tds, Tek, T4N, T6P, T50;
Chris@82 1795 V T6S, T8i, T9w, TaV, Tcn, T8b, T9t, T20, TdD, Tbv, Tcu, Tdy, Tep, T5G, T6Z;
Chris@82 1796 V T5T, T6W, T8B, T9A, Tbm, Tcx, T8u, T9D;
Chris@82 1797 {
Chris@82 1798 V T3, T35, T26, T5Y, T6, T5X, T29, T36, Ta, T39, T2d, T38, Td, T3b, T2g;
Chris@82 1799 V T3c;
Chris@82 1800 {
Chris@82 1801 V T1, T2, T24, T25;
Chris@82 1802 T1 = LD(&(ri[0]), ivs, &(ri[0]));
Chris@82 1803 T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0]));
Chris@82 1804 T3 = VADD(T1, T2);
Chris@82 1805 T35 = VSUB(T1, T2);
Chris@82 1806 T24 = LD(&(ii[0]), ivs, &(ii[0]));
Chris@82 1807 T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0]));
Chris@82 1808 T26 = VADD(T24, T25);
Chris@82 1809 T5Y = VSUB(T24, T25);
Chris@82 1810 }
Chris@82 1811 {
Chris@82 1812 V T4, T5, T27, T28;
Chris@82 1813 T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
Chris@82 1814 T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0]));
Chris@82 1815 T6 = VADD(T4, T5);
Chris@82 1816 T5X = VSUB(T4, T5);
Chris@82 1817 T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
Chris@82 1818 T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0]));
Chris@82 1819 T29 = VADD(T27, T28);
Chris@82 1820 T36 = VSUB(T27, T28);
Chris@82 1821 }
Chris@82 1822 {
Chris@82 1823 V T8, T9, T2b, T2c;
Chris@82 1824 T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
Chris@82 1825 T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0]));
Chris@82 1826 Ta = VADD(T8, T9);
Chris@82 1827 T39 = VSUB(T8, T9);
Chris@82 1828 T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
Chris@82 1829 T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0]));
Chris@82 1830 T2d = VADD(T2b, T2c);
Chris@82 1831 T38 = VSUB(T2b, T2c);
Chris@82 1832 }
Chris@82 1833 {
Chris@82 1834 V Tb, Tc, T2e, T2f;
Chris@82 1835 Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0]));
Chris@82 1836 Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
Chris@82 1837 Td = VADD(Tb, Tc);
Chris@82 1838 T3b = VSUB(Tb, Tc);
Chris@82 1839 T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0]));
Chris@82 1840 T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
Chris@82 1841 T2g = VADD(T2e, T2f);
Chris@82 1842 T3c = VSUB(T2e, T2f);
Chris@82 1843 }
Chris@82 1844 {
Chris@82 1845 V T7, Te, T2a, T2h;
Chris@82 1846 T37 = VSUB(T35, T36);
Chris@82 1847 T7B = VADD(T35, T36);
Chris@82 1848 T8F = VSUB(T5Y, T5X);
Chris@82 1849 T5Z = VADD(T5X, T5Y);
Chris@82 1850 T7 = VADD(T3, T6);
Chris@82 1851 Te = VADD(Ta, Td);
Chris@82 1852 Tf = VADD(T7, Te);
Chris@82 1853 Td9 = VSUB(T7, Te);
Chris@82 1854 {
Chris@82 1855 V Tbz, TbA, T60, T61;
Chris@82 1856 Tbz = VSUB(T26, T29);
Chris@82 1857 TbA = VSUB(Td, Ta);
Chris@82 1858 TbB = VSUB(Tbz, TbA);
Chris@82 1859 TcB = VADD(TbA, Tbz);
Chris@82 1860 T60 = VSUB(T3b, T3c);
Chris@82 1861 T61 = VADD(T39, T38);
Chris@82 1862 T62 = VMUL(LDK(KP707106781), VSUB(T60, T61));
Chris@82 1863 T7C = VMUL(LDK(KP707106781), VADD(T61, T60));
Chris@82 1864 }
Chris@82 1865 T2a = VADD(T26, T29);
Chris@82 1866 T2h = VADD(T2d, T2g);
Chris@82 1867 T2i = VADD(T2a, T2h);
Chris@82 1868 TdH = VSUB(T2a, T2h);
Chris@82 1869 {
Chris@82 1870 V Taf, Tag, T3a, T3d;
Chris@82 1871 Taf = VSUB(T3, T6);
Chris@82 1872 Tag = VSUB(T2d, T2g);
Chris@82 1873 Tah = VSUB(Taf, Tag);
Chris@82 1874 Tcb = VADD(Taf, Tag);
Chris@82 1875 T3a = VSUB(T38, T39);
Chris@82 1876 T3d = VADD(T3b, T3c);
Chris@82 1877 T3e = VMUL(LDK(KP707106781), VSUB(T3a, T3d));
Chris@82 1878 T8G = VMUL(LDK(KP707106781), VADD(T3a, T3d));
Chris@82 1879 }
Chris@82 1880 }
Chris@82 1881 }
Chris@82 1882 {
Chris@82 1883 V Ti, T3j, T2l, T3h, Tl, T3g, T2o, T3k, Tp, T3q, T2s, T3o, Ts, T3n, T2v;
Chris@82 1884 V T3r;
Chris@82 1885 {
Chris@82 1886 V Tg, Th, T2j, T2k;
Chris@82 1887 Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
Chris@82 1888 Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0]));
Chris@82 1889 Ti = VADD(Tg, Th);
Chris@82 1890 T3j = VSUB(Tg, Th);
Chris@82 1891 T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
Chris@82 1892 T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0]));
Chris@82 1893 T2l = VADD(T2j, T2k);
Chris@82 1894 T3h = VSUB(T2j, T2k);
Chris@82 1895 }
Chris@82 1896 {
Chris@82 1897 V Tj, Tk, T2m, T2n;
Chris@82 1898 Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
Chris@82 1899 Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0]));
Chris@82 1900 Tl = VADD(Tj, Tk);
Chris@82 1901 T3g = VSUB(Tj, Tk);
Chris@82 1902 T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
Chris@82 1903 T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0]));
Chris@82 1904 T2o = VADD(T2m, T2n);
Chris@82 1905 T3k = VSUB(T2m, T2n);
Chris@82 1906 }
Chris@82 1907 {
Chris@82 1908 V Tn, To, T2q, T2r;
Chris@82 1909 Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0]));
Chris@82 1910 To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
Chris@82 1911 Tp = VADD(Tn, To);
Chris@82 1912 T3q = VSUB(Tn, To);
Chris@82 1913 T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0]));
Chris@82 1914 T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
Chris@82 1915 T2s = VADD(T2q, T2r);
Chris@82 1916 T3o = VSUB(T2q, T2r);
Chris@82 1917 }
Chris@82 1918 {
Chris@82 1919 V Tq, Tr, T2t, T2u;
Chris@82 1920 Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
Chris@82 1921 Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0]));
Chris@82 1922 Ts = VADD(Tq, Tr);
Chris@82 1923 T3n = VSUB(Tq, Tr);
Chris@82 1924 T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
Chris@82 1925 T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0]));
Chris@82 1926 T2v = VADD(T2t, T2u);
Chris@82 1927 T3r = VSUB(T2t, T2u);
Chris@82 1928 }
Chris@82 1929 {
Chris@82 1930 V Tm, Tt, Tai, Taj;
Chris@82 1931 Tm = VADD(Ti, Tl);
Chris@82 1932 Tt = VADD(Tp, Ts);
Chris@82 1933 Tu = VADD(Tm, Tt);
Chris@82 1934 TdI = VSUB(Tt, Tm);
Chris@82 1935 Tai = VSUB(T2l, T2o);
Chris@82 1936 Taj = VSUB(Ti, Tl);
Chris@82 1937 Tak = VSUB(Tai, Taj);
Chris@82 1938 TbD = VADD(Taj, Tai);
Chris@82 1939 }
Chris@82 1940 {
Chris@82 1941 V Tal, Tam, T2p, T2w;
Chris@82 1942 Tal = VSUB(Tp, Ts);
Chris@82 1943 Tam = VSUB(T2s, T2v);
Chris@82 1944 Tan = VADD(Tal, Tam);
Chris@82 1945 TbC = VSUB(Tal, Tam);
Chris@82 1946 T2p = VADD(T2l, T2o);
Chris@82 1947 T2w = VADD(T2s, T2v);
Chris@82 1948 T2x = VADD(T2p, T2w);
Chris@82 1949 Tda = VSUB(T2p, T2w);
Chris@82 1950 }
Chris@82 1951 {
Chris@82 1952 V T3i, T3l, T7E, T7F;
Chris@82 1953 T3i = VADD(T3g, T3h);
Chris@82 1954 T3l = VSUB(T3j, T3k);
Chris@82 1955 T3m = VFNMS(LDK(KP923879532), T3l, VMUL(LDK(KP382683432), T3i));
Chris@82 1956 T65 = VFMA(LDK(KP923879532), T3i, VMUL(LDK(KP382683432), T3l));
Chris@82 1957 T7E = VSUB(T3h, T3g);
Chris@82 1958 T7F = VADD(T3j, T3k);
Chris@82 1959 T7G = VFNMS(LDK(KP382683432), T7F, VMUL(LDK(KP923879532), T7E));
Chris@82 1960 T8J = VFMA(LDK(KP382683432), T7E, VMUL(LDK(KP923879532), T7F));
Chris@82 1961 }
Chris@82 1962 {
Chris@82 1963 V T7H, T7I, T3p, T3s;
Chris@82 1964 T7H = VSUB(T3o, T3n);
Chris@82 1965 T7I = VADD(T3q, T3r);
Chris@82 1966 T7J = VFMA(LDK(KP923879532), T7H, VMUL(LDK(KP382683432), T7I));
Chris@82 1967 T8I = VFNMS(LDK(KP382683432), T7H, VMUL(LDK(KP923879532), T7I));
Chris@82 1968 T3p = VADD(T3n, T3o);
Chris@82 1969 T3s = VSUB(T3q, T3r);
Chris@82 1970 T3t = VFMA(LDK(KP382683432), T3p, VMUL(LDK(KP923879532), T3s));
Chris@82 1971 T64 = VFNMS(LDK(KP923879532), T3p, VMUL(LDK(KP382683432), T3s));
Chris@82 1972 }
Chris@82 1973 }
Chris@82 1974 {
Chris@82 1975 V Ty, T3H, T2B, T3x, TB, T3w, T2E, T3I, TI, T3L, T2L, T3B, TF, T3K, T2I;
Chris@82 1976 V T3E;
Chris@82 1977 {
Chris@82 1978 V Tw, Tx, T2C, T2D;
Chris@82 1979 Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
Chris@82 1980 Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0]));
Chris@82 1981 Ty = VADD(Tw, Tx);
Chris@82 1982 T3H = VSUB(Tw, Tx);
Chris@82 1983 {
Chris@82 1984 V T2z, T2A, Tz, TA;
Chris@82 1985 T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
Chris@82 1986 T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0]));
Chris@82 1987 T2B = VADD(T2z, T2A);
Chris@82 1988 T3x = VSUB(T2z, T2A);
Chris@82 1989 Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
Chris@82 1990 TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0]));
Chris@82 1991 TB = VADD(Tz, TA);
Chris@82 1992 T3w = VSUB(Tz, TA);
Chris@82 1993 }
Chris@82 1994 T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
Chris@82 1995 T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0]));
Chris@82 1996 T2E = VADD(T2C, T2D);
Chris@82 1997 T3I = VSUB(T2C, T2D);
Chris@82 1998 {
Chris@82 1999 V TG, TH, T3z, T2J, T2K, T3A;
Chris@82 2000 TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0]));
Chris@82 2001 TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
Chris@82 2002 T3z = VSUB(TG, TH);
Chris@82 2003 T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0]));
Chris@82 2004 T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
Chris@82 2005 T3A = VSUB(T2J, T2K);
Chris@82 2006 TI = VADD(TG, TH);
Chris@82 2007 T3L = VADD(T3z, T3A);
Chris@82 2008 T2L = VADD(T2J, T2K);
Chris@82 2009 T3B = VSUB(T3z, T3A);
Chris@82 2010 }
Chris@82 2011 {
Chris@82 2012 V TD, TE, T3C, T2G, T2H, T3D;
Chris@82 2013 TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
Chris@82 2014 TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0]));
Chris@82 2015 T3C = VSUB(TD, TE);
Chris@82 2016 T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
Chris@82 2017 T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0]));
Chris@82 2018 T3D = VSUB(T2G, T2H);
Chris@82 2019 TF = VADD(TD, TE);
Chris@82 2020 T3K = VSUB(T3D, T3C);
Chris@82 2021 T2I = VADD(T2G, T2H);
Chris@82 2022 T3E = VADD(T3C, T3D);
Chris@82 2023 }
Chris@82 2024 }
Chris@82 2025 {
Chris@82 2026 V TC, TJ, Taq, Tar;
Chris@82 2027 TC = VADD(Ty, TB);
Chris@82 2028 TJ = VADD(TF, TI);
Chris@82 2029 TK = VADD(TC, TJ);
Chris@82 2030 Tdd = VSUB(TC, TJ);
Chris@82 2031 Taq = VSUB(T2B, T2E);
Chris@82 2032 Tar = VSUB(TI, TF);
Chris@82 2033 Tas = VSUB(Taq, Tar);
Chris@82 2034 Tce = VADD(Tar, Taq);
Chris@82 2035 }
Chris@82 2036 {
Chris@82 2037 V Tat, Tau, T2F, T2M;
Chris@82 2038 Tat = VSUB(Ty, TB);
Chris@82 2039 Tau = VSUB(T2I, T2L);
Chris@82 2040 Tav = VSUB(Tat, Tau);
Chris@82 2041 Tcf = VADD(Tat, Tau);
Chris@82 2042 T2F = VADD(T2B, T2E);
Chris@82 2043 T2M = VADD(T2I, T2L);
Chris@82 2044 T2N = VADD(T2F, T2M);
Chris@82 2045 Tdc = VSUB(T2F, T2M);
Chris@82 2046 }
Chris@82 2047 {
Chris@82 2048 V T3y, T3F, T7M, T7N;
Chris@82 2049 T3y = VADD(T3w, T3x);
Chris@82 2050 T3F = VMUL(LDK(KP707106781), VSUB(T3B, T3E));
Chris@82 2051 T3G = VSUB(T3y, T3F);
Chris@82 2052 T6G = VADD(T3y, T3F);
Chris@82 2053 T7M = VSUB(T3x, T3w);
Chris@82 2054 T7N = VMUL(LDK(KP707106781), VADD(T3K, T3L));
Chris@82 2055 T7O = VSUB(T7M, T7N);
Chris@82 2056 T9k = VADD(T7M, T7N);
Chris@82 2057 }
Chris@82 2058 {
Chris@82 2059 V T7P, T7Q, T3J, T3M;
Chris@82 2060 T7P = VADD(T3H, T3I);
Chris@82 2061 T7Q = VMUL(LDK(KP707106781), VADD(T3E, T3B));
Chris@82 2062 T7R = VSUB(T7P, T7Q);
Chris@82 2063 T9l = VADD(T7P, T7Q);
Chris@82 2064 T3J = VSUB(T3H, T3I);
Chris@82 2065 T3M = VMUL(LDK(KP707106781), VSUB(T3K, T3L));
Chris@82 2066 T3N = VSUB(T3J, T3M);
Chris@82 2067 T6H = VADD(T3J, T3M);
Chris@82 2068 }
Chris@82 2069 }
Chris@82 2070 {
Chris@82 2071 V T1z, T53, T5L, Tbo, T1C, T5I, T56, Tbp, T1J, Tb9, T5h, T5N, T1G, Tb8, T5c;
Chris@82 2072 V T5O;
Chris@82 2073 {
Chris@82 2074 V T1x, T1y, T54, T55;
Chris@82 2075 T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2076 T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2077 T1z = VADD(T1x, T1y);
Chris@82 2078 T53 = VSUB(T1x, T1y);
Chris@82 2079 {
Chris@82 2080 V T5J, T5K, T1A, T1B;
Chris@82 2081 T5J = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2082 T5K = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2083 T5L = VSUB(T5J, T5K);
Chris@82 2084 Tbo = VADD(T5J, T5K);
Chris@82 2085 T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2086 T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2087 T1C = VADD(T1A, T1B);
Chris@82 2088 T5I = VSUB(T1A, T1B);
Chris@82 2089 }
Chris@82 2090 T54 = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2091 T55 = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2092 T56 = VSUB(T54, T55);
Chris@82 2093 Tbp = VADD(T54, T55);
Chris@82 2094 {
Chris@82 2095 V T1H, T1I, T5d, T5e, T5f, T5g;
Chris@82 2096 T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2097 T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2098 T5d = VSUB(T1H, T1I);
Chris@82 2099 T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2100 T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2101 T5g = VSUB(T5e, T5f);
Chris@82 2102 T1J = VADD(T1H, T1I);
Chris@82 2103 Tb9 = VADD(T5e, T5f);
Chris@82 2104 T5h = VADD(T5d, T5g);
Chris@82 2105 T5N = VSUB(T5d, T5g);
Chris@82 2106 }
Chris@82 2107 {
Chris@82 2108 V T1E, T1F, T5b, T58, T59, T5a;
Chris@82 2109 T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2110 T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2111 T5b = VSUB(T1E, T1F);
Chris@82 2112 T58 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2113 T59 = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2114 T5a = VSUB(T58, T59);
Chris@82 2115 T1G = VADD(T1E, T1F);
Chris@82 2116 Tb8 = VADD(T58, T59);
Chris@82 2117 T5c = VSUB(T5a, T5b);
Chris@82 2118 T5O = VADD(T5b, T5a);
Chris@82 2119 }
Chris@82 2120 }
Chris@82 2121 {
Chris@82 2122 V T1D, T1K, Tbq, Tbr;
Chris@82 2123 T1D = VADD(T1z, T1C);
Chris@82 2124 T1K = VADD(T1G, T1J);
Chris@82 2125 T1L = VADD(T1D, T1K);
Chris@82 2126 Tdv = VSUB(T1D, T1K);
Chris@82 2127 Tbq = VSUB(Tbo, Tbp);
Chris@82 2128 Tbr = VSUB(T1J, T1G);
Chris@82 2129 Tbs = VSUB(Tbq, Tbr);
Chris@82 2130 Tcw = VADD(Tbr, Tbq);
Chris@82 2131 }
Chris@82 2132 {
Chris@82 2133 V TdA, TdB, T57, T5i;
Chris@82 2134 TdA = VADD(Tbo, Tbp);
Chris@82 2135 TdB = VADD(Tb8, Tb9);
Chris@82 2136 TdC = VSUB(TdA, TdB);
Chris@82 2137 Teo = VADD(TdA, TdB);
Chris@82 2138 T57 = VSUB(T53, T56);
Chris@82 2139 T5i = VMUL(LDK(KP707106781), VSUB(T5c, T5h));
Chris@82 2140 T5j = VSUB(T57, T5i);
Chris@82 2141 T6V = VADD(T57, T5i);
Chris@82 2142 }
Chris@82 2143 {
Chris@82 2144 V T5M, T5P, T8w, T8x;
Chris@82 2145 T5M = VADD(T5I, T5L);
Chris@82 2146 T5P = VMUL(LDK(KP707106781), VSUB(T5N, T5O));
Chris@82 2147 T5Q = VSUB(T5M, T5P);
Chris@82 2148 T6Y = VADD(T5M, T5P);
Chris@82 2149 T8w = VSUB(T5L, T5I);
Chris@82 2150 T8x = VMUL(LDK(KP707106781), VADD(T5c, T5h));
Chris@82 2151 T8y = VSUB(T8w, T8x);
Chris@82 2152 T9C = VADD(T8w, T8x);
Chris@82 2153 }
Chris@82 2154 {
Chris@82 2155 V Tb7, Tba, T8l, T8m;
Chris@82 2156 Tb7 = VSUB(T1z, T1C);
Chris@82 2157 Tba = VSUB(Tb8, Tb9);
Chris@82 2158 Tbb = VSUB(Tb7, Tba);
Chris@82 2159 Tct = VADD(Tb7, Tba);
Chris@82 2160 T8l = VADD(T53, T56);
Chris@82 2161 T8m = VMUL(LDK(KP707106781), VADD(T5O, T5N));
Chris@82 2162 T8n = VSUB(T8l, T8m);
Chris@82 2163 T9z = VADD(T8l, T8m);
Chris@82 2164 }
Chris@82 2165 }
Chris@82 2166 {
Chris@82 2167 V TN, T40, T2Q, T3Q, TQ, T3P, T2T, T41, TX, T44, T30, T3U, TU, T43, T2X;
Chris@82 2168 V T3X;
Chris@82 2169 {
Chris@82 2170 V TL, TM, T2R, T2S;
Chris@82 2171 TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0]));
Chris@82 2172 TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
Chris@82 2173 TN = VADD(TL, TM);
Chris@82 2174 T40 = VSUB(TL, TM);
Chris@82 2175 {
Chris@82 2176 V T2O, T2P, TO, TP;
Chris@82 2177 T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0]));
Chris@82 2178 T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
Chris@82 2179 T2Q = VADD(T2O, T2P);
Chris@82 2180 T3Q = VSUB(T2O, T2P);
Chris@82 2181 TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
Chris@82 2182 TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0]));
Chris@82 2183 TQ = VADD(TO, TP);
Chris@82 2184 T3P = VSUB(TO, TP);
Chris@82 2185 }
Chris@82 2186 T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
Chris@82 2187 T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0]));
Chris@82 2188 T2T = VADD(T2R, T2S);
Chris@82 2189 T41 = VSUB(T2R, T2S);
Chris@82 2190 {
Chris@82 2191 V TV, TW, T3S, T2Y, T2Z, T3T;
Chris@82 2192 TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0]));
Chris@82 2193 TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
Chris@82 2194 T3S = VSUB(TV, TW);
Chris@82 2195 T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0]));
Chris@82 2196 T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
Chris@82 2197 T3T = VSUB(T2Y, T2Z);
Chris@82 2198 TX = VADD(TV, TW);
Chris@82 2199 T44 = VADD(T3S, T3T);
Chris@82 2200 T30 = VADD(T2Y, T2Z);
Chris@82 2201 T3U = VSUB(T3S, T3T);
Chris@82 2202 }
Chris@82 2203 {
Chris@82 2204 V TS, TT, T3V, T2V, T2W, T3W;
Chris@82 2205 TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
Chris@82 2206 TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0]));
Chris@82 2207 T3V = VSUB(TS, TT);
Chris@82 2208 T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
Chris@82 2209 T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0]));
Chris@82 2210 T3W = VSUB(T2V, T2W);
Chris@82 2211 TU = VADD(TS, TT);
Chris@82 2212 T43 = VSUB(T3W, T3V);
Chris@82 2213 T2X = VADD(T2V, T2W);
Chris@82 2214 T3X = VADD(T3V, T3W);
Chris@82 2215 }
Chris@82 2216 }
Chris@82 2217 {
Chris@82 2218 V TR, TY, Tax, Tay;
Chris@82 2219 TR = VADD(TN, TQ);
Chris@82 2220 TY = VADD(TU, TX);
Chris@82 2221 TZ = VADD(TR, TY);
Chris@82 2222 Tdf = VSUB(TR, TY);
Chris@82 2223 Tax = VSUB(T2Q, T2T);
Chris@82 2224 Tay = VSUB(TX, TU);
Chris@82 2225 Taz = VSUB(Tax, Tay);
Chris@82 2226 Tch = VADD(Tay, Tax);
Chris@82 2227 }
Chris@82 2228 {
Chris@82 2229 V TaA, TaB, T2U, T31;
Chris@82 2230 TaA = VSUB(TN, TQ);
Chris@82 2231 TaB = VSUB(T2X, T30);
Chris@82 2232 TaC = VSUB(TaA, TaB);
Chris@82 2233 Tci = VADD(TaA, TaB);
Chris@82 2234 T2U = VADD(T2Q, T2T);
Chris@82 2235 T31 = VADD(T2X, T30);
Chris@82 2236 T32 = VADD(T2U, T31);
Chris@82 2237 Tdg = VSUB(T2U, T31);
Chris@82 2238 }
Chris@82 2239 {
Chris@82 2240 V T3R, T3Y, T7T, T7U;
Chris@82 2241 T3R = VADD(T3P, T3Q);
Chris@82 2242 T3Y = VMUL(LDK(KP707106781), VSUB(T3U, T3X));
Chris@82 2243 T3Z = VSUB(T3R, T3Y);
Chris@82 2244 T6J = VADD(T3R, T3Y);
Chris@82 2245 T7T = VADD(T40, T41);
Chris@82 2246 T7U = VMUL(LDK(KP707106781), VADD(T3X, T3U));
Chris@82 2247 T7V = VSUB(T7T, T7U);
Chris@82 2248 T9n = VADD(T7T, T7U);
Chris@82 2249 }
Chris@82 2250 {
Chris@82 2251 V T7W, T7X, T42, T45;
Chris@82 2252 T7W = VSUB(T3Q, T3P);
Chris@82 2253 T7X = VMUL(LDK(KP707106781), VADD(T43, T44));
Chris@82 2254 T7Y = VSUB(T7W, T7X);
Chris@82 2255 T9o = VADD(T7W, T7X);
Chris@82 2256 T42 = VSUB(T40, T41);
Chris@82 2257 T45 = VMUL(LDK(KP707106781), VSUB(T43, T44));
Chris@82 2258 T46 = VSUB(T42, T45);
Chris@82 2259 T6K = VADD(T42, T45);
Chris@82 2260 }
Chris@82 2261 }
Chris@82 2262 {
Chris@82 2263 V T14, T4P, T4d, TaG, T17, T4a, T4S, TaH, T1e, TaZ, T4j, T4V, T1b, TaY, T4o;
Chris@82 2264 V T4U;
Chris@82 2265 {
Chris@82 2266 V T12, T13, T4Q, T4R;
Chris@82 2267 T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2268 T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2269 T14 = VADD(T12, T13);
Chris@82 2270 T4P = VSUB(T12, T13);
Chris@82 2271 {
Chris@82 2272 V T4b, T4c, T15, T16;
Chris@82 2273 T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2274 T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2275 T4d = VSUB(T4b, T4c);
Chris@82 2276 TaG = VADD(T4b, T4c);
Chris@82 2277 T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2278 T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2279 T17 = VADD(T15, T16);
Chris@82 2280 T4a = VSUB(T15, T16);
Chris@82 2281 }
Chris@82 2282 T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2283 T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2284 T4S = VSUB(T4Q, T4R);
Chris@82 2285 TaH = VADD(T4Q, T4R);
Chris@82 2286 {
Chris@82 2287 V T1c, T1d, T4f, T4g, T4h, T4i;
Chris@82 2288 T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2289 T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2290 T4f = VSUB(T1c, T1d);
Chris@82 2291 T4g = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2292 T4h = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2293 T4i = VSUB(T4g, T4h);
Chris@82 2294 T1e = VADD(T1c, T1d);
Chris@82 2295 TaZ = VADD(T4g, T4h);
Chris@82 2296 T4j = VSUB(T4f, T4i);
Chris@82 2297 T4V = VADD(T4f, T4i);
Chris@82 2298 }
Chris@82 2299 {
Chris@82 2300 V T19, T1a, T4k, T4l, T4m, T4n;
Chris@82 2301 T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2302 T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2303 T4k = VSUB(T19, T1a);
Chris@82 2304 T4l = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2305 T4m = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2306 T4n = VSUB(T4l, T4m);
Chris@82 2307 T1b = VADD(T19, T1a);
Chris@82 2308 TaY = VADD(T4l, T4m);
Chris@82 2309 T4o = VADD(T4k, T4n);
Chris@82 2310 T4U = VSUB(T4n, T4k);
Chris@82 2311 }
Chris@82 2312 }
Chris@82 2313 {
Chris@82 2314 V T18, T1f, TaX, Tb0;
Chris@82 2315 T18 = VADD(T14, T17);
Chris@82 2316 T1f = VADD(T1b, T1e);
Chris@82 2317 T1g = VADD(T18, T1f);
Chris@82 2318 Tdp = VSUB(T18, T1f);
Chris@82 2319 TaX = VSUB(T14, T17);
Chris@82 2320 Tb0 = VSUB(TaY, TaZ);
Chris@82 2321 Tb1 = VSUB(TaX, Tb0);
Chris@82 2322 Tcm = VADD(TaX, Tb0);
Chris@82 2323 }
Chris@82 2324 {
Chris@82 2325 V Tdk, Tdl, T4e, T4p;
Chris@82 2326 Tdk = VADD(TaG, TaH);
Chris@82 2327 Tdl = VADD(TaY, TaZ);
Chris@82 2328 Tdm = VSUB(Tdk, Tdl);
Chris@82 2329 Tej = VADD(Tdk, Tdl);
Chris@82 2330 T4e = VADD(T4a, T4d);
Chris@82 2331 T4p = VMUL(LDK(KP707106781), VSUB(T4j, T4o));
Chris@82 2332 T4q = VSUB(T4e, T4p);
Chris@82 2333 T6R = VADD(T4e, T4p);
Chris@82 2334 }
Chris@82 2335 {
Chris@82 2336 V T4T, T4W, T8d, T8e;
Chris@82 2337 T4T = VSUB(T4P, T4S);
Chris@82 2338 T4W = VMUL(LDK(KP707106781), VSUB(T4U, T4V));
Chris@82 2339 T4X = VSUB(T4T, T4W);
Chris@82 2340 T6O = VADD(T4T, T4W);
Chris@82 2341 T8d = VADD(T4P, T4S);
Chris@82 2342 T8e = VMUL(LDK(KP707106781), VADD(T4o, T4j));
Chris@82 2343 T8f = VSUB(T8d, T8e);
Chris@82 2344 T9s = VADD(T8d, T8e);
Chris@82 2345 }
Chris@82 2346 {
Chris@82 2347 V TaI, TaJ, T82, T83;
Chris@82 2348 TaI = VSUB(TaG, TaH);
Chris@82 2349 TaJ = VSUB(T1e, T1b);
Chris@82 2350 TaK = VSUB(TaI, TaJ);
Chris@82 2351 Tcp = VADD(TaJ, TaI);
Chris@82 2352 T82 = VSUB(T4d, T4a);
Chris@82 2353 T83 = VMUL(LDK(KP707106781), VADD(T4U, T4V));
Chris@82 2354 T84 = VSUB(T82, T83);
Chris@82 2355 T9v = VADD(T82, T83);
Chris@82 2356 }
Chris@82 2357 }
Chris@82 2358 {
Chris@82 2359 V T1j, TaR, T1m, TaS, T4G, T4L, TaT, TaQ, T89, T88, T1q, TaM, T1t, TaN, T4v;
Chris@82 2360 V T4A, TaO, TaL, T86, T85;
Chris@82 2361 {
Chris@82 2362 V T4H, T4F, T4C, T4K;
Chris@82 2363 {
Chris@82 2364 V T1h, T1i, T4D, T4E;
Chris@82 2365 T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2366 T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2367 T1j = VADD(T1h, T1i);
Chris@82 2368 T4H = VSUB(T1h, T1i);
Chris@82 2369 T4D = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2370 T4E = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2371 T4F = VSUB(T4D, T4E);
Chris@82 2372 TaR = VADD(T4D, T4E);
Chris@82 2373 }
Chris@82 2374 {
Chris@82 2375 V T1k, T1l, T4I, T4J;
Chris@82 2376 T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2377 T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2378 T1m = VADD(T1k, T1l);
Chris@82 2379 T4C = VSUB(T1k, T1l);
Chris@82 2380 T4I = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2381 T4J = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2382 T4K = VSUB(T4I, T4J);
Chris@82 2383 TaS = VADD(T4I, T4J);
Chris@82 2384 }
Chris@82 2385 T4G = VADD(T4C, T4F);
Chris@82 2386 T4L = VSUB(T4H, T4K);
Chris@82 2387 TaT = VSUB(TaR, TaS);
Chris@82 2388 TaQ = VSUB(T1j, T1m);
Chris@82 2389 T89 = VADD(T4H, T4K);
Chris@82 2390 T88 = VSUB(T4F, T4C);
Chris@82 2391 }
Chris@82 2392 {
Chris@82 2393 V T4r, T4z, T4w, T4u;
Chris@82 2394 {
Chris@82 2395 V T1o, T1p, T4x, T4y;
Chris@82 2396 T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2397 T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2398 T1q = VADD(T1o, T1p);
Chris@82 2399 T4r = VSUB(T1o, T1p);
Chris@82 2400 T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2401 T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2402 T4z = VSUB(T4x, T4y);
Chris@82 2403 TaM = VADD(T4x, T4y);
Chris@82 2404 }
Chris@82 2405 {
Chris@82 2406 V T1r, T1s, T4s, T4t;
Chris@82 2407 T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2408 T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2409 T1t = VADD(T1r, T1s);
Chris@82 2410 T4w = VSUB(T1r, T1s);
Chris@82 2411 T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2412 T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2413 T4u = VSUB(T4s, T4t);
Chris@82 2414 TaN = VADD(T4s, T4t);
Chris@82 2415 }
Chris@82 2416 T4v = VSUB(T4r, T4u);
Chris@82 2417 T4A = VADD(T4w, T4z);
Chris@82 2418 TaO = VSUB(TaM, TaN);
Chris@82 2419 TaL = VSUB(T1q, T1t);
Chris@82 2420 T86 = VSUB(T4z, T4w);
Chris@82 2421 T85 = VADD(T4r, T4u);
Chris@82 2422 }
Chris@82 2423 {
Chris@82 2424 V T1n, T1u, Tb2, Tb3;
Chris@82 2425 T1n = VADD(T1j, T1m);
Chris@82 2426 T1u = VADD(T1q, T1t);
Chris@82 2427 T1v = VADD(T1n, T1u);
Chris@82 2428 Tdn = VSUB(T1u, T1n);
Chris@82 2429 Tb2 = VSUB(TaT, TaQ);
Chris@82 2430 Tb3 = VADD(TaL, TaO);
Chris@82 2431 Tb4 = VMUL(LDK(KP707106781), VSUB(Tb2, Tb3));
Chris@82 2432 Tcq = VMUL(LDK(KP707106781), VADD(Tb2, Tb3));
Chris@82 2433 }
Chris@82 2434 {
Chris@82 2435 V Tdq, Tdr, T4B, T4M;
Chris@82 2436 Tdq = VADD(TaR, TaS);
Chris@82 2437 Tdr = VADD(TaM, TaN);
Chris@82 2438 Tds = VSUB(Tdq, Tdr);
Chris@82 2439 Tek = VADD(Tdq, Tdr);
Chris@82 2440 T4B = VFNMS(LDK(KP923879532), T4A, VMUL(LDK(KP382683432), T4v));
Chris@82 2441 T4M = VFMA(LDK(KP923879532), T4G, VMUL(LDK(KP382683432), T4L));
Chris@82 2442 T4N = VSUB(T4B, T4M);
Chris@82 2443 T6P = VADD(T4M, T4B);
Chris@82 2444 }
Chris@82 2445 {
Chris@82 2446 V T4Y, T4Z, T8g, T8h;
Chris@82 2447 T4Y = VFNMS(LDK(KP923879532), T4L, VMUL(LDK(KP382683432), T4G));
Chris@82 2448 T4Z = VFMA(LDK(KP382683432), T4A, VMUL(LDK(KP923879532), T4v));
Chris@82 2449 T50 = VSUB(T4Y, T4Z);
Chris@82 2450 T6S = VADD(T4Y, T4Z);
Chris@82 2451 T8g = VFNMS(LDK(KP382683432), T89, VMUL(LDK(KP923879532), T88));
Chris@82 2452 T8h = VFMA(LDK(KP923879532), T86, VMUL(LDK(KP382683432), T85));
Chris@82 2453 T8i = VSUB(T8g, T8h);
Chris@82 2454 T9w = VADD(T8g, T8h);
Chris@82 2455 }
Chris@82 2456 {
Chris@82 2457 V TaP, TaU, T87, T8a;
Chris@82 2458 TaP = VSUB(TaL, TaO);
Chris@82 2459 TaU = VADD(TaQ, TaT);
Chris@82 2460 TaV = VMUL(LDK(KP707106781), VSUB(TaP, TaU));
Chris@82 2461 Tcn = VMUL(LDK(KP707106781), VADD(TaU, TaP));
Chris@82 2462 T87 = VFNMS(LDK(KP382683432), T86, VMUL(LDK(KP923879532), T85));
Chris@82 2463 T8a = VFMA(LDK(KP382683432), T88, VMUL(LDK(KP923879532), T89));
Chris@82 2464 T8b = VSUB(T87, T8a);
Chris@82 2465 T9t = VADD(T8a, T87);
Chris@82 2466 }
Chris@82 2467 }
Chris@82 2468 {
Chris@82 2469 V T1O, Tbc, T1R, Tbd, T5o, T5t, Tbf, Tbe, T8p, T8o, T1V, Tbi, T1Y, Tbj, T5z;
Chris@82 2470 V T5E, Tbk, Tbh, T8s, T8r;
Chris@82 2471 {
Chris@82 2472 V T5p, T5n, T5k, T5s;
Chris@82 2473 {
Chris@82 2474 V T1M, T1N, T5l, T5m;
Chris@82 2475 T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2476 T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2477 T1O = VADD(T1M, T1N);
Chris@82 2478 T5p = VSUB(T1M, T1N);
Chris@82 2479 T5l = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2480 T5m = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2481 T5n = VSUB(T5l, T5m);
Chris@82 2482 Tbc = VADD(T5l, T5m);
Chris@82 2483 }
Chris@82 2484 {
Chris@82 2485 V T1P, T1Q, T5q, T5r;
Chris@82 2486 T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2487 T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2488 T1R = VADD(T1P, T1Q);
Chris@82 2489 T5k = VSUB(T1P, T1Q);
Chris@82 2490 T5q = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2491 T5r = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2492 T5s = VSUB(T5q, T5r);
Chris@82 2493 Tbd = VADD(T5q, T5r);
Chris@82 2494 }
Chris@82 2495 T5o = VADD(T5k, T5n);
Chris@82 2496 T5t = VSUB(T5p, T5s);
Chris@82 2497 Tbf = VSUB(T1O, T1R);
Chris@82 2498 Tbe = VSUB(Tbc, Tbd);
Chris@82 2499 T8p = VADD(T5p, T5s);
Chris@82 2500 T8o = VSUB(T5n, T5k);
Chris@82 2501 }
Chris@82 2502 {
Chris@82 2503 V T5A, T5y, T5v, T5D;
Chris@82 2504 {
Chris@82 2505 V T1T, T1U, T5w, T5x;
Chris@82 2506 T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2507 T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2508 T1V = VADD(T1T, T1U);
Chris@82 2509 T5A = VSUB(T1T, T1U);
Chris@82 2510 T5w = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2511 T5x = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2512 T5y = VSUB(T5w, T5x);
Chris@82 2513 Tbi = VADD(T5w, T5x);
Chris@82 2514 }
Chris@82 2515 {
Chris@82 2516 V T1W, T1X, T5B, T5C;
Chris@82 2517 T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2518 T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)]));
Chris@82 2519 T1Y = VADD(T1W, T1X);
Chris@82 2520 T5v = VSUB(T1W, T1X);
Chris@82 2521 T5B = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2522 T5C = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)]));
Chris@82 2523 T5D = VSUB(T5B, T5C);
Chris@82 2524 Tbj = VADD(T5B, T5C);
Chris@82 2525 }
Chris@82 2526 T5z = VADD(T5v, T5y);
Chris@82 2527 T5E = VSUB(T5A, T5D);
Chris@82 2528 Tbk = VSUB(Tbi, Tbj);
Chris@82 2529 Tbh = VSUB(T1V, T1Y);
Chris@82 2530 T8s = VADD(T5A, T5D);
Chris@82 2531 T8r = VSUB(T5y, T5v);
Chris@82 2532 }
Chris@82 2533 {
Chris@82 2534 V T1S, T1Z, Tbt, Tbu;
Chris@82 2535 T1S = VADD(T1O, T1R);
Chris@82 2536 T1Z = VADD(T1V, T1Y);
Chris@82 2537 T20 = VADD(T1S, T1Z);
Chris@82 2538 TdD = VSUB(T1Z, T1S);
Chris@82 2539 Tbt = VSUB(Tbh, Tbk);
Chris@82 2540 Tbu = VADD(Tbf, Tbe);
Chris@82 2541 Tbv = VMUL(LDK(KP707106781), VSUB(Tbt, Tbu));
Chris@82 2542 Tcu = VMUL(LDK(KP707106781), VADD(Tbu, Tbt));
Chris@82 2543 }
Chris@82 2544 {
Chris@82 2545 V Tdw, Tdx, T5u, T5F;
Chris@82 2546 Tdw = VADD(Tbc, Tbd);
Chris@82 2547 Tdx = VADD(Tbi, Tbj);
Chris@82 2548 Tdy = VSUB(Tdw, Tdx);
Chris@82 2549 Tep = VADD(Tdw, Tdx);
Chris@82 2550 T5u = VFNMS(LDK(KP923879532), T5t, VMUL(LDK(KP382683432), T5o));
Chris@82 2551 T5F = VFMA(LDK(KP382683432), T5z, VMUL(LDK(KP923879532), T5E));
Chris@82 2552 T5G = VSUB(T5u, T5F);
Chris@82 2553 T6Z = VADD(T5u, T5F);
Chris@82 2554 }
Chris@82 2555 {
Chris@82 2556 V T5R, T5S, T8z, T8A;
Chris@82 2557 T5R = VFNMS(LDK(KP923879532), T5z, VMUL(LDK(KP382683432), T5E));
Chris@82 2558 T5S = VFMA(LDK(KP923879532), T5o, VMUL(LDK(KP382683432), T5t));
Chris@82 2559 T5T = VSUB(T5R, T5S);
Chris@82 2560 T6W = VADD(T5S, T5R);
Chris@82 2561 T8z = VFNMS(LDK(KP382683432), T8r, VMUL(LDK(KP923879532), T8s));
Chris@82 2562 T8A = VFMA(LDK(KP382683432), T8o, VMUL(LDK(KP923879532), T8p));
Chris@82 2563 T8B = VSUB(T8z, T8A);
Chris@82 2564 T9A = VADD(T8A, T8z);
Chris@82 2565 }
Chris@82 2566 {
Chris@82 2567 V Tbg, Tbl, T8q, T8t;
Chris@82 2568 Tbg = VSUB(Tbe, Tbf);
Chris@82 2569 Tbl = VADD(Tbh, Tbk);
Chris@82 2570 Tbm = VMUL(LDK(KP707106781), VSUB(Tbg, Tbl));
Chris@82 2571 Tcx = VMUL(LDK(KP707106781), VADD(Tbg, Tbl));
Chris@82 2572 T8q = VFNMS(LDK(KP382683432), T8p, VMUL(LDK(KP923879532), T8o));
Chris@82 2573 T8t = VFMA(LDK(KP923879532), T8r, VMUL(LDK(KP382683432), T8s));
Chris@82 2574 T8u = VSUB(T8q, T8t);
Chris@82 2575 T9D = VADD(T8q, T8t);
Chris@82 2576 }
Chris@82 2577 }
Chris@82 2578 {
Chris@82 2579 V TeJ, TeK, TeL, TeM, TeN, TeO, TeP, TeQ, TeR, TeS, TeT, TeU, TeV, TeW, TeX;
Chris@82 2580 V TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6, Tf7, Tf8, Tf9, Tfa, Tfb, Tfc;
Chris@82 2581 V Tfd, Tfe, Tff, Tfg, Tfh, Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr;
Chris@82 2582 V Tfs, Tft, Tfu;
Chris@82 2583 {
Chris@82 2584 V T11, TeD, TeG, TeI, T22, T23, T34, TeH;
Chris@82 2585 {
Chris@82 2586 V Tv, T10, TeE, TeF;
Chris@82 2587 Tv = VADD(Tf, Tu);
Chris@82 2588 T10 = VADD(TK, TZ);
Chris@82 2589 T11 = VADD(Tv, T10);
Chris@82 2590 TeD = VSUB(Tv, T10);
Chris@82 2591 TeE = VADD(Tej, Tek);
Chris@82 2592 TeF = VADD(Teo, Tep);
Chris@82 2593 TeG = VSUB(TeE, TeF);
Chris@82 2594 TeI = VADD(TeE, TeF);
Chris@82 2595 }
Chris@82 2596 {
Chris@82 2597 V T1w, T21, T2y, T33;
Chris@82 2598 T1w = VADD(T1g, T1v);
Chris@82 2599 T21 = VADD(T1L, T20);
Chris@82 2600 T22 = VADD(T1w, T21);
Chris@82 2601 T23 = VSUB(T21, T1w);
Chris@82 2602 T2y = VADD(T2i, T2x);
Chris@82 2603 T33 = VADD(T2N, T32);
Chris@82 2604 T34 = VSUB(T2y, T33);
Chris@82 2605 TeH = VADD(T2y, T33);
Chris@82 2606 }
Chris@82 2607 TeJ = VSUB(T11, T22);
Chris@82 2608 STM4(&(ro[32]), TeJ, ovs, &(ro[0]));
Chris@82 2609 TeK = VSUB(TeH, TeI);
Chris@82 2610 STM4(&(io[32]), TeK, ovs, &(io[0]));
Chris@82 2611 TeL = VADD(T11, T22);
Chris@82 2612 STM4(&(ro[0]), TeL, ovs, &(ro[0]));
Chris@82 2613 TeM = VADD(TeH, TeI);
Chris@82 2614 STM4(&(io[0]), TeM, ovs, &(io[0]));
Chris@82 2615 TeN = VADD(T23, T34);
Chris@82 2616 STM4(&(io[16]), TeN, ovs, &(io[0]));
Chris@82 2617 TeO = VADD(TeD, TeG);
Chris@82 2618 STM4(&(ro[16]), TeO, ovs, &(ro[0]));
Chris@82 2619 TeP = VSUB(T34, T23);
Chris@82 2620 STM4(&(io[48]), TeP, ovs, &(io[0]));
Chris@82 2621 TeQ = VSUB(TeD, TeG);
Chris@82 2622 STM4(&(ro[48]), TeQ, ovs, &(ro[0]));
Chris@82 2623 }
Chris@82 2624 {
Chris@82 2625 V Teh, Tex, Tev, TeB, Tem, Tey, Ter, Tez;
Chris@82 2626 {
Chris@82 2627 V Tef, Teg, Tet, Teu;
Chris@82 2628 Tef = VSUB(Tf, Tu);
Chris@82 2629 Teg = VSUB(T2N, T32);
Chris@82 2630 Teh = VADD(Tef, Teg);
Chris@82 2631 Tex = VSUB(Tef, Teg);
Chris@82 2632 Tet = VSUB(T2i, T2x);
Chris@82 2633 Teu = VSUB(TZ, TK);
Chris@82 2634 Tev = VSUB(Tet, Teu);
Chris@82 2635 TeB = VADD(Teu, Tet);
Chris@82 2636 }
Chris@82 2637 {
Chris@82 2638 V Tei, Tel, Ten, Teq;
Chris@82 2639 Tei = VSUB(T1g, T1v);
Chris@82 2640 Tel = VSUB(Tej, Tek);
Chris@82 2641 Tem = VADD(Tei, Tel);
Chris@82 2642 Tey = VSUB(Tel, Tei);
Chris@82 2643 Ten = VSUB(T1L, T20);
Chris@82 2644 Teq = VSUB(Teo, Tep);
Chris@82 2645 Ter = VSUB(Ten, Teq);
Chris@82 2646 Tez = VADD(Ten, Teq);
Chris@82 2647 }
Chris@82 2648 {
Chris@82 2649 V Tes, TeC, Tew, TeA;
Chris@82 2650 Tes = VMUL(LDK(KP707106781), VADD(Tem, Ter));
Chris@82 2651 TeR = VSUB(Teh, Tes);
Chris@82 2652 STM4(&(ro[40]), TeR, ovs, &(ro[0]));
Chris@82 2653 TeS = VADD(Teh, Tes);
Chris@82 2654 STM4(&(ro[8]), TeS, ovs, &(ro[0]));
Chris@82 2655 TeC = VMUL(LDK(KP707106781), VADD(Tey, Tez));
Chris@82 2656 TeT = VSUB(TeB, TeC);
Chris@82 2657 STM4(&(io[40]), TeT, ovs, &(io[0]));
Chris@82 2658 TeU = VADD(TeB, TeC);
Chris@82 2659 STM4(&(io[8]), TeU, ovs, &(io[0]));
Chris@82 2660 Tew = VMUL(LDK(KP707106781), VSUB(Ter, Tem));
Chris@82 2661 TeV = VSUB(Tev, Tew);
Chris@82 2662 STM4(&(io[56]), TeV, ovs, &(io[0]));
Chris@82 2663 TeW = VADD(Tev, Tew);
Chris@82 2664 STM4(&(io[24]), TeW, ovs, &(io[0]));
Chris@82 2665 TeA = VMUL(LDK(KP707106781), VSUB(Tey, Tez));
Chris@82 2666 TeX = VSUB(Tex, TeA);
Chris@82 2667 STM4(&(ro[56]), TeX, ovs, &(ro[0]));
Chris@82 2668 TeY = VADD(Tex, TeA);
Chris@82 2669 STM4(&(ro[24]), TeY, ovs, &(ro[0]));
Chris@82 2670 }
Chris@82 2671 }
Chris@82 2672 {
Chris@82 2673 V Tdb, TdV, Te5, TdJ, Tdi, Te6, Te3, Teb, TdM, TdW, Tdu, TdQ, Te0, Tea, TdF;
Chris@82 2674 V TdR;
Chris@82 2675 {
Chris@82 2676 V Tde, Tdh, Tdo, Tdt;
Chris@82 2677 Tdb = VSUB(Td9, Tda);
Chris@82 2678 TdV = VADD(Td9, Tda);
Chris@82 2679 Te5 = VADD(TdI, TdH);
Chris@82 2680 TdJ = VSUB(TdH, TdI);
Chris@82 2681 Tde = VSUB(Tdc, Tdd);
Chris@82 2682 Tdh = VADD(Tdf, Tdg);
Chris@82 2683 Tdi = VMUL(LDK(KP707106781), VSUB(Tde, Tdh));
Chris@82 2684 Te6 = VMUL(LDK(KP707106781), VADD(Tde, Tdh));
Chris@82 2685 {
Chris@82 2686 V Te1, Te2, TdK, TdL;
Chris@82 2687 Te1 = VADD(Tdv, Tdy);
Chris@82 2688 Te2 = VADD(TdD, TdC);
Chris@82 2689 Te3 = VFNMS(LDK(KP382683432), Te2, VMUL(LDK(KP923879532), Te1));
Chris@82 2690 Teb = VFMA(LDK(KP923879532), Te2, VMUL(LDK(KP382683432), Te1));
Chris@82 2691 TdK = VSUB(Tdf, Tdg);
Chris@82 2692 TdL = VADD(Tdd, Tdc);
Chris@82 2693 TdM = VMUL(LDK(KP707106781), VSUB(TdK, TdL));
Chris@82 2694 TdW = VMUL(LDK(KP707106781), VADD(TdL, TdK));
Chris@82 2695 }
Chris@82 2696 Tdo = VSUB(Tdm, Tdn);
Chris@82 2697 Tdt = VSUB(Tdp, Tds);
Chris@82 2698 Tdu = VFMA(LDK(KP923879532), Tdo, VMUL(LDK(KP382683432), Tdt));
Chris@82 2699 TdQ = VFNMS(LDK(KP923879532), Tdt, VMUL(LDK(KP382683432), Tdo));
Chris@82 2700 {
Chris@82 2701 V TdY, TdZ, Tdz, TdE;
Chris@82 2702 TdY = VADD(Tdn, Tdm);
Chris@82 2703 TdZ = VADD(Tdp, Tds);
Chris@82 2704 Te0 = VFMA(LDK(KP382683432), TdY, VMUL(LDK(KP923879532), TdZ));
Chris@82 2705 Tea = VFNMS(LDK(KP382683432), TdZ, VMUL(LDK(KP923879532), TdY));
Chris@82 2706 Tdz = VSUB(Tdv, Tdy);
Chris@82 2707 TdE = VSUB(TdC, TdD);
Chris@82 2708 TdF = VFNMS(LDK(KP923879532), TdE, VMUL(LDK(KP382683432), Tdz));
Chris@82 2709 TdR = VFMA(LDK(KP382683432), TdE, VMUL(LDK(KP923879532), Tdz));
Chris@82 2710 }
Chris@82 2711 }
Chris@82 2712 {
Chris@82 2713 V Tdj, TdG, TdT, TdU;
Chris@82 2714 Tdj = VADD(Tdb, Tdi);
Chris@82 2715 TdG = VADD(Tdu, TdF);
Chris@82 2716 TeZ = VSUB(Tdj, TdG);
Chris@82 2717 STM4(&(ro[44]), TeZ, ovs, &(ro[0]));
Chris@82 2718 Tf0 = VADD(Tdj, TdG);
Chris@82 2719 STM4(&(ro[12]), Tf0, ovs, &(ro[0]));
Chris@82 2720 TdT = VADD(TdJ, TdM);
Chris@82 2721 TdU = VADD(TdQ, TdR);
Chris@82 2722 Tf1 = VSUB(TdT, TdU);
Chris@82 2723 STM4(&(io[44]), Tf1, ovs, &(io[0]));
Chris@82 2724 Tf2 = VADD(TdT, TdU);
Chris@82 2725 STM4(&(io[12]), Tf2, ovs, &(io[0]));
Chris@82 2726 }
Chris@82 2727 {
Chris@82 2728 V TdN, TdO, TdP, TdS;
Chris@82 2729 TdN = VSUB(TdJ, TdM);
Chris@82 2730 TdO = VSUB(TdF, Tdu);
Chris@82 2731 Tf3 = VSUB(TdN, TdO);
Chris@82 2732 STM4(&(io[60]), Tf3, ovs, &(io[0]));
Chris@82 2733 Tf4 = VADD(TdN, TdO);
Chris@82 2734 STM4(&(io[28]), Tf4, ovs, &(io[0]));
Chris@82 2735 TdP = VSUB(Tdb, Tdi);
Chris@82 2736 TdS = VSUB(TdQ, TdR);
Chris@82 2737 Tf5 = VSUB(TdP, TdS);
Chris@82 2738 STM4(&(ro[60]), Tf5, ovs, &(ro[0]));
Chris@82 2739 Tf6 = VADD(TdP, TdS);
Chris@82 2740 STM4(&(ro[28]), Tf6, ovs, &(ro[0]));
Chris@82 2741 }
Chris@82 2742 {
Chris@82 2743 V TdX, Te4, Ted, Tee;
Chris@82 2744 TdX = VADD(TdV, TdW);
Chris@82 2745 Te4 = VADD(Te0, Te3);
Chris@82 2746 Tf7 = VSUB(TdX, Te4);
Chris@82 2747 STM4(&(ro[36]), Tf7, ovs, &(ro[0]));
Chris@82 2748 Tf8 = VADD(TdX, Te4);
Chris@82 2749 STM4(&(ro[4]), Tf8, ovs, &(ro[0]));
Chris@82 2750 Ted = VADD(Te5, Te6);
Chris@82 2751 Tee = VADD(Tea, Teb);
Chris@82 2752 Tf9 = VSUB(Ted, Tee);
Chris@82 2753 STM4(&(io[36]), Tf9, ovs, &(io[0]));
Chris@82 2754 Tfa = VADD(Ted, Tee);
Chris@82 2755 STM4(&(io[4]), Tfa, ovs, &(io[0]));
Chris@82 2756 }
Chris@82 2757 {
Chris@82 2758 V Te7, Te8, Te9, Tec;
Chris@82 2759 Te7 = VSUB(Te5, Te6);
Chris@82 2760 Te8 = VSUB(Te3, Te0);
Chris@82 2761 Tfb = VSUB(Te7, Te8);
Chris@82 2762 STM4(&(io[52]), Tfb, ovs, &(io[0]));
Chris@82 2763 Tfc = VADD(Te7, Te8);
Chris@82 2764 STM4(&(io[20]), Tfc, ovs, &(io[0]));
Chris@82 2765 Te9 = VSUB(TdV, TdW);
Chris@82 2766 Tec = VSUB(Tea, Teb);
Chris@82 2767 Tfd = VSUB(Te9, Tec);
Chris@82 2768 STM4(&(ro[52]), Tfd, ovs, &(ro[0]));
Chris@82 2769 Tfe = VADD(Te9, Tec);
Chris@82 2770 STM4(&(ro[20]), Tfe, ovs, &(ro[0]));
Chris@82 2771 }
Chris@82 2772 }
Chris@82 2773 {
Chris@82 2774 V Tcd, TcP, TcD, TcZ, Tck, Td0, TcX, Td5, Tcs, TcK, TcG, TcQ, TcU, Td4, Tcz;
Chris@82 2775 V TcL, Tcc, TcC;
Chris@82 2776 Tcc = VMUL(LDK(KP707106781), VADD(TbD, TbC));
Chris@82 2777 Tcd = VSUB(Tcb, Tcc);
Chris@82 2778 TcP = VADD(Tcb, Tcc);
Chris@82 2779 TcC = VMUL(LDK(KP707106781), VADD(Tak, Tan));
Chris@82 2780 TcD = VSUB(TcB, TcC);
Chris@82 2781 TcZ = VADD(TcB, TcC);
Chris@82 2782 {
Chris@82 2783 V Tcg, Tcj, TcV, TcW;
Chris@82 2784 Tcg = VFNMS(LDK(KP382683432), Tcf, VMUL(LDK(KP923879532), Tce));
Chris@82 2785 Tcj = VFMA(LDK(KP923879532), Tch, VMUL(LDK(KP382683432), Tci));
Chris@82 2786 Tck = VSUB(Tcg, Tcj);
Chris@82 2787 Td0 = VADD(Tcg, Tcj);
Chris@82 2788 TcV = VADD(Tct, Tcu);
Chris@82 2789 TcW = VADD(Tcw, Tcx);
Chris@82 2790 TcX = VFNMS(LDK(KP195090322), TcW, VMUL(LDK(KP980785280), TcV));
Chris@82 2791 Td5 = VFMA(LDK(KP195090322), TcV, VMUL(LDK(KP980785280), TcW));
Chris@82 2792 }
Chris@82 2793 {
Chris@82 2794 V Tco, Tcr, TcE, TcF;
Chris@82 2795 Tco = VSUB(Tcm, Tcn);
Chris@82 2796 Tcr = VSUB(Tcp, Tcq);
Chris@82 2797 Tcs = VFMA(LDK(KP555570233), Tco, VMUL(LDK(KP831469612), Tcr));
Chris@82 2798 TcK = VFNMS(LDK(KP831469612), Tco, VMUL(LDK(KP555570233), Tcr));
Chris@82 2799 TcE = VFNMS(LDK(KP382683432), Tch, VMUL(LDK(KP923879532), Tci));
Chris@82 2800 TcF = VFMA(LDK(KP382683432), Tce, VMUL(LDK(KP923879532), Tcf));
Chris@82 2801 TcG = VSUB(TcE, TcF);
Chris@82 2802 TcQ = VADD(TcF, TcE);
Chris@82 2803 }
Chris@82 2804 {
Chris@82 2805 V TcS, TcT, Tcv, Tcy;
Chris@82 2806 TcS = VADD(Tcm, Tcn);
Chris@82 2807 TcT = VADD(Tcp, Tcq);
Chris@82 2808 TcU = VFMA(LDK(KP980785280), TcS, VMUL(LDK(KP195090322), TcT));
Chris@82 2809 Td4 = VFNMS(LDK(KP195090322), TcS, VMUL(LDK(KP980785280), TcT));
Chris@82 2810 Tcv = VSUB(Tct, Tcu);
Chris@82 2811 Tcy = VSUB(Tcw, Tcx);
Chris@82 2812 Tcz = VFNMS(LDK(KP831469612), Tcy, VMUL(LDK(KP555570233), Tcv));
Chris@82 2813 TcL = VFMA(LDK(KP831469612), Tcv, VMUL(LDK(KP555570233), Tcy));
Chris@82 2814 }
Chris@82 2815 {
Chris@82 2816 V Tcl, TcA, TcN, TcO;
Chris@82 2817 Tcl = VADD(Tcd, Tck);
Chris@82 2818 TcA = VADD(Tcs, Tcz);
Chris@82 2819 Tff = VSUB(Tcl, TcA);
Chris@82 2820 STM4(&(ro[42]), Tff, ovs, &(ro[0]));
Chris@82 2821 Tfg = VADD(Tcl, TcA);
Chris@82 2822 STM4(&(ro[10]), Tfg, ovs, &(ro[0]));
Chris@82 2823 TcN = VADD(TcD, TcG);
Chris@82 2824 TcO = VADD(TcK, TcL);
Chris@82 2825 Tfh = VSUB(TcN, TcO);
Chris@82 2826 STM4(&(io[42]), Tfh, ovs, &(io[0]));
Chris@82 2827 Tfi = VADD(TcN, TcO);
Chris@82 2828 STM4(&(io[10]), Tfi, ovs, &(io[0]));
Chris@82 2829 }
Chris@82 2830 {
Chris@82 2831 V TcH, TcI, TcJ, TcM;
Chris@82 2832 TcH = VSUB(TcD, TcG);
Chris@82 2833 TcI = VSUB(Tcz, Tcs);
Chris@82 2834 Tfj = VSUB(TcH, TcI);
Chris@82 2835 STM4(&(io[58]), Tfj, ovs, &(io[0]));
Chris@82 2836 Tfk = VADD(TcH, TcI);
Chris@82 2837 STM4(&(io[26]), Tfk, ovs, &(io[0]));
Chris@82 2838 TcJ = VSUB(Tcd, Tck);
Chris@82 2839 TcM = VSUB(TcK, TcL);
Chris@82 2840 Tfl = VSUB(TcJ, TcM);
Chris@82 2841 STM4(&(ro[58]), Tfl, ovs, &(ro[0]));
Chris@82 2842 Tfm = VADD(TcJ, TcM);
Chris@82 2843 STM4(&(ro[26]), Tfm, ovs, &(ro[0]));
Chris@82 2844 }
Chris@82 2845 {
Chris@82 2846 V TcR, TcY, Td7, Td8;
Chris@82 2847 TcR = VADD(TcP, TcQ);
Chris@82 2848 TcY = VADD(TcU, TcX);
Chris@82 2849 Tfn = VSUB(TcR, TcY);
Chris@82 2850 STM4(&(ro[34]), Tfn, ovs, &(ro[0]));
Chris@82 2851 Tfo = VADD(TcR, TcY);
Chris@82 2852 STM4(&(ro[2]), Tfo, ovs, &(ro[0]));
Chris@82 2853 Td7 = VADD(TcZ, Td0);
Chris@82 2854 Td8 = VADD(Td4, Td5);
Chris@82 2855 Tfp = VSUB(Td7, Td8);
Chris@82 2856 STM4(&(io[34]), Tfp, ovs, &(io[0]));
Chris@82 2857 Tfq = VADD(Td7, Td8);
Chris@82 2858 STM4(&(io[2]), Tfq, ovs, &(io[0]));
Chris@82 2859 }
Chris@82 2860 {
Chris@82 2861 V Td1, Td2, Td3, Td6;
Chris@82 2862 Td1 = VSUB(TcZ, Td0);
Chris@82 2863 Td2 = VSUB(TcX, TcU);
Chris@82 2864 Tfr = VSUB(Td1, Td2);
Chris@82 2865 STM4(&(io[50]), Tfr, ovs, &(io[0]));
Chris@82 2866 Tfs = VADD(Td1, Td2);
Chris@82 2867 STM4(&(io[18]), Tfs, ovs, &(io[0]));
Chris@82 2868 Td3 = VSUB(TcP, TcQ);
Chris@82 2869 Td6 = VSUB(Td4, Td5);
Chris@82 2870 Tft = VSUB(Td3, Td6);
Chris@82 2871 STM4(&(ro[50]), Tft, ovs, &(ro[0]));
Chris@82 2872 Tfu = VADD(Td3, Td6);
Chris@82 2873 STM4(&(ro[18]), Tfu, ovs, &(ro[0]));
Chris@82 2874 }
Chris@82 2875 }
Chris@82 2876 {
Chris@82 2877 V Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, TfH, TfI, TfJ;
Chris@82 2878 V TfK, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW, TfX, TfY;
Chris@82 2879 V TfZ, Tg0;
Chris@82 2880 {
Chris@82 2881 V Tap, TbR, TbF, Tc1, TaE, Tc2, TbZ, Tc7, Tb6, TbM, TbI, TbS, TbW, Tc6, Tbx;
Chris@82 2882 V TbN, Tao, TbE;
Chris@82 2883 Tao = VMUL(LDK(KP707106781), VSUB(Tak, Tan));
Chris@82 2884 Tap = VSUB(Tah, Tao);
Chris@82 2885 TbR = VADD(Tah, Tao);
Chris@82 2886 TbE = VMUL(LDK(KP707106781), VSUB(TbC, TbD));
Chris@82 2887 TbF = VSUB(TbB, TbE);
Chris@82 2888 Tc1 = VADD(TbB, TbE);
Chris@82 2889 {
Chris@82 2890 V Taw, TaD, TbX, TbY;
Chris@82 2891 Taw = VFNMS(LDK(KP923879532), Tav, VMUL(LDK(KP382683432), Tas));
Chris@82 2892 TaD = VFMA(LDK(KP382683432), Taz, VMUL(LDK(KP923879532), TaC));
Chris@82 2893 TaE = VSUB(Taw, TaD);
Chris@82 2894 Tc2 = VADD(Taw, TaD);
Chris@82 2895 TbX = VADD(Tbb, Tbm);
Chris@82 2896 TbY = VADD(Tbs, Tbv);
Chris@82 2897 TbZ = VFNMS(LDK(KP555570233), TbY, VMUL(LDK(KP831469612), TbX));
Chris@82 2898 Tc7 = VFMA(LDK(KP831469612), TbY, VMUL(LDK(KP555570233), TbX));
Chris@82 2899 }
Chris@82 2900 {
Chris@82 2901 V TaW, Tb5, TbG, TbH;
Chris@82 2902 TaW = VSUB(TaK, TaV);
Chris@82 2903 Tb5 = VSUB(Tb1, Tb4);
Chris@82 2904 Tb6 = VFMA(LDK(KP980785280), TaW, VMUL(LDK(KP195090322), Tb5));
Chris@82 2905 TbM = VFNMS(LDK(KP980785280), Tb5, VMUL(LDK(KP195090322), TaW));
Chris@82 2906 TbG = VFNMS(LDK(KP923879532), Taz, VMUL(LDK(KP382683432), TaC));
Chris@82 2907 TbH = VFMA(LDK(KP923879532), Tas, VMUL(LDK(KP382683432), Tav));
Chris@82 2908 TbI = VSUB(TbG, TbH);
Chris@82 2909 TbS = VADD(TbH, TbG);
Chris@82 2910 }
Chris@82 2911 {
Chris@82 2912 V TbU, TbV, Tbn, Tbw;
Chris@82 2913 TbU = VADD(TaK, TaV);
Chris@82 2914 TbV = VADD(Tb1, Tb4);
Chris@82 2915 TbW = VFMA(LDK(KP555570233), TbU, VMUL(LDK(KP831469612), TbV));
Chris@82 2916 Tc6 = VFNMS(LDK(KP555570233), TbV, VMUL(LDK(KP831469612), TbU));
Chris@82 2917 Tbn = VSUB(Tbb, Tbm);
Chris@82 2918 Tbw = VSUB(Tbs, Tbv);
Chris@82 2919 Tbx = VFNMS(LDK(KP980785280), Tbw, VMUL(LDK(KP195090322), Tbn));
Chris@82 2920 TbN = VFMA(LDK(KP195090322), Tbw, VMUL(LDK(KP980785280), Tbn));
Chris@82 2921 }
Chris@82 2922 {
Chris@82 2923 V TaF, Tby, TbP, TbQ;
Chris@82 2924 TaF = VADD(Tap, TaE);
Chris@82 2925 Tby = VADD(Tb6, Tbx);
Chris@82 2926 Tfv = VSUB(TaF, Tby);
Chris@82 2927 STM4(&(ro[46]), Tfv, ovs, &(ro[0]));
Chris@82 2928 Tfw = VADD(TaF, Tby);
Chris@82 2929 STM4(&(ro[14]), Tfw, ovs, &(ro[0]));
Chris@82 2930 TbP = VADD(TbF, TbI);
Chris@82 2931 TbQ = VADD(TbM, TbN);
Chris@82 2932 Tfx = VSUB(TbP, TbQ);
Chris@82 2933 STM4(&(io[46]), Tfx, ovs, &(io[0]));
Chris@82 2934 Tfy = VADD(TbP, TbQ);
Chris@82 2935 STM4(&(io[14]), Tfy, ovs, &(io[0]));
Chris@82 2936 }
Chris@82 2937 {
Chris@82 2938 V TbJ, TbK, TbL, TbO;
Chris@82 2939 TbJ = VSUB(TbF, TbI);
Chris@82 2940 TbK = VSUB(Tbx, Tb6);
Chris@82 2941 Tfz = VSUB(TbJ, TbK);
Chris@82 2942 STM4(&(io[62]), Tfz, ovs, &(io[0]));
Chris@82 2943 TfA = VADD(TbJ, TbK);
Chris@82 2944 STM4(&(io[30]), TfA, ovs, &(io[0]));
Chris@82 2945 TbL = VSUB(Tap, TaE);
Chris@82 2946 TbO = VSUB(TbM, TbN);
Chris@82 2947 TfB = VSUB(TbL, TbO);
Chris@82 2948 STM4(&(ro[62]), TfB, ovs, &(ro[0]));
Chris@82 2949 TfC = VADD(TbL, TbO);
Chris@82 2950 STM4(&(ro[30]), TfC, ovs, &(ro[0]));
Chris@82 2951 }
Chris@82 2952 {
Chris@82 2953 V TbT, Tc0, Tc9, Tca;
Chris@82 2954 TbT = VADD(TbR, TbS);
Chris@82 2955 Tc0 = VADD(TbW, TbZ);
Chris@82 2956 TfD = VSUB(TbT, Tc0);
Chris@82 2957 STM4(&(ro[38]), TfD, ovs, &(ro[0]));
Chris@82 2958 TfE = VADD(TbT, Tc0);
Chris@82 2959 STM4(&(ro[6]), TfE, ovs, &(ro[0]));
Chris@82 2960 Tc9 = VADD(Tc1, Tc2);
Chris@82 2961 Tca = VADD(Tc6, Tc7);
Chris@82 2962 TfF = VSUB(Tc9, Tca);
Chris@82 2963 STM4(&(io[38]), TfF, ovs, &(io[0]));
Chris@82 2964 TfG = VADD(Tc9, Tca);
Chris@82 2965 STM4(&(io[6]), TfG, ovs, &(io[0]));
Chris@82 2966 }
Chris@82 2967 {
Chris@82 2968 V Tc3, Tc4, Tc5, Tc8;
Chris@82 2969 Tc3 = VSUB(Tc1, Tc2);
Chris@82 2970 Tc4 = VSUB(TbZ, TbW);
Chris@82 2971 TfH = VSUB(Tc3, Tc4);
Chris@82 2972 STM4(&(io[54]), TfH, ovs, &(io[0]));
Chris@82 2973 TfI = VADD(Tc3, Tc4);
Chris@82 2974 STM4(&(io[22]), TfI, ovs, &(io[0]));
Chris@82 2975 Tc5 = VSUB(TbR, TbS);
Chris@82 2976 Tc8 = VSUB(Tc6, Tc7);
Chris@82 2977 TfJ = VSUB(Tc5, Tc8);
Chris@82 2978 STM4(&(ro[54]), TfJ, ovs, &(ro[0]));
Chris@82 2979 TfK = VADD(Tc5, Tc8);
Chris@82 2980 STM4(&(ro[22]), TfK, ovs, &(ro[0]));
Chris@82 2981 }
Chris@82 2982 }
Chris@82 2983 {
Chris@82 2984 V T6F, T7h, T7m, T7w, T7p, T7x, T6M, T7s, T6U, T7c, T75, T7r, T78, T7i, T71;
Chris@82 2985 V T7d;
Chris@82 2986 {
Chris@82 2987 V T6D, T6E, T7k, T7l;
Chris@82 2988 T6D = VADD(T37, T3e);
Chris@82 2989 T6E = VADD(T65, T64);
Chris@82 2990 T6F = VSUB(T6D, T6E);
Chris@82 2991 T7h = VADD(T6D, T6E);
Chris@82 2992 T7k = VADD(T6O, T6P);
Chris@82 2993 T7l = VADD(T6R, T6S);
Chris@82 2994 T7m = VFMA(LDK(KP956940335), T7k, VMUL(LDK(KP290284677), T7l));
Chris@82 2995 T7w = VFNMS(LDK(KP290284677), T7k, VMUL(LDK(KP956940335), T7l));
Chris@82 2996 }
Chris@82 2997 {
Chris@82 2998 V T7n, T7o, T6I, T6L;
Chris@82 2999 T7n = VADD(T6V, T6W);
Chris@82 3000 T7o = VADD(T6Y, T6Z);
Chris@82 3001 T7p = VFNMS(LDK(KP290284677), T7o, VMUL(LDK(KP956940335), T7n));
Chris@82 3002 T7x = VFMA(LDK(KP290284677), T7n, VMUL(LDK(KP956940335), T7o));
Chris@82 3003 T6I = VFNMS(LDK(KP555570233), T6H, VMUL(LDK(KP831469612), T6G));
Chris@82 3004 T6L = VFMA(LDK(KP831469612), T6J, VMUL(LDK(KP555570233), T6K));
Chris@82 3005 T6M = VSUB(T6I, T6L);
Chris@82 3006 T7s = VADD(T6I, T6L);
Chris@82 3007 }
Chris@82 3008 {
Chris@82 3009 V T6Q, T6T, T73, T74;
Chris@82 3010 T6Q = VSUB(T6O, T6P);
Chris@82 3011 T6T = VSUB(T6R, T6S);
Chris@82 3012 T6U = VFMA(LDK(KP471396736), T6Q, VMUL(LDK(KP881921264), T6T));
Chris@82 3013 T7c = VFNMS(LDK(KP881921264), T6Q, VMUL(LDK(KP471396736), T6T));
Chris@82 3014 T73 = VADD(T5Z, T62);
Chris@82 3015 T74 = VADD(T3m, T3t);
Chris@82 3016 T75 = VSUB(T73, T74);
Chris@82 3017 T7r = VADD(T73, T74);
Chris@82 3018 }
Chris@82 3019 {
Chris@82 3020 V T76, T77, T6X, T70;
Chris@82 3021 T76 = VFNMS(LDK(KP555570233), T6J, VMUL(LDK(KP831469612), T6K));
Chris@82 3022 T77 = VFMA(LDK(KP555570233), T6G, VMUL(LDK(KP831469612), T6H));
Chris@82 3023 T78 = VSUB(T76, T77);
Chris@82 3024 T7i = VADD(T77, T76);
Chris@82 3025 T6X = VSUB(T6V, T6W);
Chris@82 3026 T70 = VSUB(T6Y, T6Z);
Chris@82 3027 T71 = VFNMS(LDK(KP881921264), T70, VMUL(LDK(KP471396736), T6X));
Chris@82 3028 T7d = VFMA(LDK(KP881921264), T6X, VMUL(LDK(KP471396736), T70));
Chris@82 3029 }
Chris@82 3030 {
Chris@82 3031 V T6N, T72, T7f, T7g;
Chris@82 3032 T6N = VADD(T6F, T6M);
Chris@82 3033 T72 = VADD(T6U, T71);
Chris@82 3034 TfL = VSUB(T6N, T72);
Chris@82 3035 STM4(&(ro[43]), TfL, ovs, &(ro[1]));
Chris@82 3036 TfM = VADD(T6N, T72);
Chris@82 3037 STM4(&(ro[11]), TfM, ovs, &(ro[1]));
Chris@82 3038 T7f = VADD(T75, T78);
Chris@82 3039 T7g = VADD(T7c, T7d);
Chris@82 3040 TfN = VSUB(T7f, T7g);
Chris@82 3041 STM4(&(io[43]), TfN, ovs, &(io[1]));
Chris@82 3042 TfO = VADD(T7f, T7g);
Chris@82 3043 STM4(&(io[11]), TfO, ovs, &(io[1]));
Chris@82 3044 }
Chris@82 3045 {
Chris@82 3046 V T79, T7a, T7b, T7e;
Chris@82 3047 T79 = VSUB(T75, T78);
Chris@82 3048 T7a = VSUB(T71, T6U);
Chris@82 3049 TfP = VSUB(T79, T7a);
Chris@82 3050 STM4(&(io[59]), TfP, ovs, &(io[1]));
Chris@82 3051 TfQ = VADD(T79, T7a);
Chris@82 3052 STM4(&(io[27]), TfQ, ovs, &(io[1]));
Chris@82 3053 T7b = VSUB(T6F, T6M);
Chris@82 3054 T7e = VSUB(T7c, T7d);
Chris@82 3055 TfR = VSUB(T7b, T7e);
Chris@82 3056 STM4(&(ro[59]), TfR, ovs, &(ro[1]));
Chris@82 3057 TfS = VADD(T7b, T7e);
Chris@82 3058 STM4(&(ro[27]), TfS, ovs, &(ro[1]));
Chris@82 3059 }
Chris@82 3060 {
Chris@82 3061 V T7j, T7q, T7z, T7A;
Chris@82 3062 T7j = VADD(T7h, T7i);
Chris@82 3063 T7q = VADD(T7m, T7p);
Chris@82 3064 TfT = VSUB(T7j, T7q);
Chris@82 3065 STM4(&(ro[35]), TfT, ovs, &(ro[1]));
Chris@82 3066 TfU = VADD(T7j, T7q);
Chris@82 3067 STM4(&(ro[3]), TfU, ovs, &(ro[1]));
Chris@82 3068 T7z = VADD(T7r, T7s);
Chris@82 3069 T7A = VADD(T7w, T7x);
Chris@82 3070 TfV = VSUB(T7z, T7A);
Chris@82 3071 STM4(&(io[35]), TfV, ovs, &(io[1]));
Chris@82 3072 TfW = VADD(T7z, T7A);
Chris@82 3073 STM4(&(io[3]), TfW, ovs, &(io[1]));
Chris@82 3074 }
Chris@82 3075 {
Chris@82 3076 V T7t, T7u, T7v, T7y;
Chris@82 3077 T7t = VSUB(T7r, T7s);
Chris@82 3078 T7u = VSUB(T7p, T7m);
Chris@82 3079 TfX = VSUB(T7t, T7u);
Chris@82 3080 STM4(&(io[51]), TfX, ovs, &(io[1]));
Chris@82 3081 TfY = VADD(T7t, T7u);
Chris@82 3082 STM4(&(io[19]), TfY, ovs, &(io[1]));
Chris@82 3083 T7v = VSUB(T7h, T7i);
Chris@82 3084 T7y = VSUB(T7w, T7x);
Chris@82 3085 TfZ = VSUB(T7v, T7y);
Chris@82 3086 STM4(&(ro[51]), TfZ, ovs, &(ro[1]));
Chris@82 3087 Tg0 = VADD(T7v, T7y);
Chris@82 3088 STM4(&(ro[19]), Tg0, ovs, &(ro[1]));
Chris@82 3089 }
Chris@82 3090 }
Chris@82 3091 {
Chris@82 3092 V T9j, T9V, Ta0, Taa, Ta3, Tab, T9q, Ta6, T9y, T9Q, T9J, Ta5, T9M, T9W, T9F;
Chris@82 3093 V T9R;
Chris@82 3094 {
Chris@82 3095 V T9h, T9i, T9Y, T9Z;
Chris@82 3096 T9h = VADD(T7B, T7C);
Chris@82 3097 T9i = VADD(T8J, T8I);
Chris@82 3098 T9j = VSUB(T9h, T9i);
Chris@82 3099 T9V = VADD(T9h, T9i);
Chris@82 3100 T9Y = VADD(T9s, T9t);
Chris@82 3101 T9Z = VADD(T9v, T9w);
Chris@82 3102 Ta0 = VFMA(LDK(KP995184726), T9Y, VMUL(LDK(KP098017140), T9Z));
Chris@82 3103 Taa = VFNMS(LDK(KP098017140), T9Y, VMUL(LDK(KP995184726), T9Z));
Chris@82 3104 }
Chris@82 3105 {
Chris@82 3106 V Ta1, Ta2, T9m, T9p;
Chris@82 3107 Ta1 = VADD(T9z, T9A);
Chris@82 3108 Ta2 = VADD(T9C, T9D);
Chris@82 3109 Ta3 = VFNMS(LDK(KP098017140), Ta2, VMUL(LDK(KP995184726), Ta1));
Chris@82 3110 Tab = VFMA(LDK(KP098017140), Ta1, VMUL(LDK(KP995184726), Ta2));
Chris@82 3111 T9m = VFNMS(LDK(KP195090322), T9l, VMUL(LDK(KP980785280), T9k));
Chris@82 3112 T9p = VFMA(LDK(KP195090322), T9n, VMUL(LDK(KP980785280), T9o));
Chris@82 3113 T9q = VSUB(T9m, T9p);
Chris@82 3114 Ta6 = VADD(T9m, T9p);
Chris@82 3115 }
Chris@82 3116 {
Chris@82 3117 V T9u, T9x, T9H, T9I;
Chris@82 3118 T9u = VSUB(T9s, T9t);
Chris@82 3119 T9x = VSUB(T9v, T9w);
Chris@82 3120 T9y = VFMA(LDK(KP634393284), T9u, VMUL(LDK(KP773010453), T9x));
Chris@82 3121 T9Q = VFNMS(LDK(KP773010453), T9u, VMUL(LDK(KP634393284), T9x));
Chris@82 3122 T9H = VADD(T8F, T8G);
Chris@82 3123 T9I = VADD(T7G, T7J);
Chris@82 3124 T9J = VSUB(T9H, T9I);
Chris@82 3125 Ta5 = VADD(T9H, T9I);
Chris@82 3126 }
Chris@82 3127 {
Chris@82 3128 V T9K, T9L, T9B, T9E;
Chris@82 3129 T9K = VFNMS(LDK(KP195090322), T9o, VMUL(LDK(KP980785280), T9n));
Chris@82 3130 T9L = VFMA(LDK(KP980785280), T9l, VMUL(LDK(KP195090322), T9k));
Chris@82 3131 T9M = VSUB(T9K, T9L);
Chris@82 3132 T9W = VADD(T9L, T9K);
Chris@82 3133 T9B = VSUB(T9z, T9A);
Chris@82 3134 T9E = VSUB(T9C, T9D);
Chris@82 3135 T9F = VFNMS(LDK(KP773010453), T9E, VMUL(LDK(KP634393284), T9B));
Chris@82 3136 T9R = VFMA(LDK(KP773010453), T9B, VMUL(LDK(KP634393284), T9E));
Chris@82 3137 }
Chris@82 3138 {
Chris@82 3139 V T9r, T9G, Tg1, Tg2;
Chris@82 3140 T9r = VADD(T9j, T9q);
Chris@82 3141 T9G = VADD(T9y, T9F);
Chris@82 3142 Tg1 = VSUB(T9r, T9G);
Chris@82 3143 STM4(&(ro[41]), Tg1, ovs, &(ro[1]));
Chris@82 3144 STN4(&(ro[40]), TeR, Tg1, Tff, TfL, ovs);
Chris@82 3145 Tg2 = VADD(T9r, T9G);
Chris@82 3146 STM4(&(ro[9]), Tg2, ovs, &(ro[1]));
Chris@82 3147 STN4(&(ro[8]), TeS, Tg2, Tfg, TfM, ovs);
Chris@82 3148 }
Chris@82 3149 {
Chris@82 3150 V T9T, T9U, Tg3, Tg4;
Chris@82 3151 T9T = VADD(T9J, T9M);
Chris@82 3152 T9U = VADD(T9Q, T9R);
Chris@82 3153 Tg3 = VSUB(T9T, T9U);
Chris@82 3154 STM4(&(io[41]), Tg3, ovs, &(io[1]));
Chris@82 3155 STN4(&(io[40]), TeT, Tg3, Tfh, TfN, ovs);
Chris@82 3156 Tg4 = VADD(T9T, T9U);
Chris@82 3157 STM4(&(io[9]), Tg4, ovs, &(io[1]));
Chris@82 3158 STN4(&(io[8]), TeU, Tg4, Tfi, TfO, ovs);
Chris@82 3159 }
Chris@82 3160 {
Chris@82 3161 V T9N, T9O, Tg5, Tg6;
Chris@82 3162 T9N = VSUB(T9J, T9M);
Chris@82 3163 T9O = VSUB(T9F, T9y);
Chris@82 3164 Tg5 = VSUB(T9N, T9O);
Chris@82 3165 STM4(&(io[57]), Tg5, ovs, &(io[1]));
Chris@82 3166 STN4(&(io[56]), TeV, Tg5, Tfj, TfP, ovs);
Chris@82 3167 Tg6 = VADD(T9N, T9O);
Chris@82 3168 STM4(&(io[25]), Tg6, ovs, &(io[1]));
Chris@82 3169 STN4(&(io[24]), TeW, Tg6, Tfk, TfQ, ovs);
Chris@82 3170 }
Chris@82 3171 {
Chris@82 3172 V T9P, T9S, Tg7, Tg8;
Chris@82 3173 T9P = VSUB(T9j, T9q);
Chris@82 3174 T9S = VSUB(T9Q, T9R);
Chris@82 3175 Tg7 = VSUB(T9P, T9S);
Chris@82 3176 STM4(&(ro[57]), Tg7, ovs, &(ro[1]));
Chris@82 3177 STN4(&(ro[56]), TeX, Tg7, Tfl, TfR, ovs);
Chris@82 3178 Tg8 = VADD(T9P, T9S);
Chris@82 3179 STM4(&(ro[25]), Tg8, ovs, &(ro[1]));
Chris@82 3180 STN4(&(ro[24]), TeY, Tg8, Tfm, TfS, ovs);
Chris@82 3181 }
Chris@82 3182 {
Chris@82 3183 V T9X, Ta4, Tg9, Tga;
Chris@82 3184 T9X = VADD(T9V, T9W);
Chris@82 3185 Ta4 = VADD(Ta0, Ta3);
Chris@82 3186 Tg9 = VSUB(T9X, Ta4);
Chris@82 3187 STM4(&(ro[33]), Tg9, ovs, &(ro[1]));
Chris@82 3188 STN4(&(ro[32]), TeJ, Tg9, Tfn, TfT, ovs);
Chris@82 3189 Tga = VADD(T9X, Ta4);
Chris@82 3190 STM4(&(ro[1]), Tga, ovs, &(ro[1]));
Chris@82 3191 STN4(&(ro[0]), TeL, Tga, Tfo, TfU, ovs);
Chris@82 3192 }
Chris@82 3193 {
Chris@82 3194 V Tad, Tae, Tgb, Tgc;
Chris@82 3195 Tad = VADD(Ta5, Ta6);
Chris@82 3196 Tae = VADD(Taa, Tab);
Chris@82 3197 Tgb = VSUB(Tad, Tae);
Chris@82 3198 STM4(&(io[33]), Tgb, ovs, &(io[1]));
Chris@82 3199 STN4(&(io[32]), TeK, Tgb, Tfp, TfV, ovs);
Chris@82 3200 Tgc = VADD(Tad, Tae);
Chris@82 3201 STM4(&(io[1]), Tgc, ovs, &(io[1]));
Chris@82 3202 STN4(&(io[0]), TeM, Tgc, Tfq, TfW, ovs);
Chris@82 3203 }
Chris@82 3204 {
Chris@82 3205 V Ta7, Ta8, Tgd, Tge;
Chris@82 3206 Ta7 = VSUB(Ta5, Ta6);
Chris@82 3207 Ta8 = VSUB(Ta3, Ta0);
Chris@82 3208 Tgd = VSUB(Ta7, Ta8);
Chris@82 3209 STM4(&(io[49]), Tgd, ovs, &(io[1]));
Chris@82 3210 STN4(&(io[48]), TeP, Tgd, Tfr, TfX, ovs);
Chris@82 3211 Tge = VADD(Ta7, Ta8);
Chris@82 3212 STM4(&(io[17]), Tge, ovs, &(io[1]));
Chris@82 3213 STN4(&(io[16]), TeN, Tge, Tfs, TfY, ovs);
Chris@82 3214 }
Chris@82 3215 {
Chris@82 3216 V Ta9, Tac, Tgf, Tgg;
Chris@82 3217 Ta9 = VSUB(T9V, T9W);
Chris@82 3218 Tac = VSUB(Taa, Tab);
Chris@82 3219 Tgf = VSUB(Ta9, Tac);
Chris@82 3220 STM4(&(ro[49]), Tgf, ovs, &(ro[1]));
Chris@82 3221 STN4(&(ro[48]), TeQ, Tgf, Tft, TfZ, ovs);
Chris@82 3222 Tgg = VADD(Ta9, Tac);
Chris@82 3223 STM4(&(ro[17]), Tgg, ovs, &(ro[1]));
Chris@82 3224 STN4(&(ro[16]), TeO, Tgg, Tfu, Tg0, ovs);
Chris@82 3225 }
Chris@82 3226 }
Chris@82 3227 {
Chris@82 3228 V Tgh, Tgi, Tgj, Tgk, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu, Tgv;
Chris@82 3229 V Tgw;
Chris@82 3230 {
Chris@82 3231 V T3v, T6j, T6o, T6y, T6r, T6z, T48, T6u, T52, T6e, T67, T6t, T6a, T6k, T5V;
Chris@82 3232 V T6f;
Chris@82 3233 {
Chris@82 3234 V T3f, T3u, T6m, T6n;
Chris@82 3235 T3f = VSUB(T37, T3e);
Chris@82 3236 T3u = VSUB(T3m, T3t);
Chris@82 3237 T3v = VSUB(T3f, T3u);
Chris@82 3238 T6j = VADD(T3f, T3u);
Chris@82 3239 T6m = VADD(T4q, T4N);
Chris@82 3240 T6n = VADD(T4X, T50);
Chris@82 3241 T6o = VFMA(LDK(KP634393284), T6m, VMUL(LDK(KP773010453), T6n));
Chris@82 3242 T6y = VFNMS(LDK(KP634393284), T6n, VMUL(LDK(KP773010453), T6m));
Chris@82 3243 }
Chris@82 3244 {
Chris@82 3245 V T6p, T6q, T3O, T47;
Chris@82 3246 T6p = VADD(T5j, T5G);
Chris@82 3247 T6q = VADD(T5Q, T5T);
Chris@82 3248 T6r = VFNMS(LDK(KP634393284), T6q, VMUL(LDK(KP773010453), T6p));
Chris@82 3249 T6z = VFMA(LDK(KP773010453), T6q, VMUL(LDK(KP634393284), T6p));
Chris@82 3250 T3O = VFNMS(LDK(KP980785280), T3N, VMUL(LDK(KP195090322), T3G));
Chris@82 3251 T47 = VFMA(LDK(KP195090322), T3Z, VMUL(LDK(KP980785280), T46));
Chris@82 3252 T48 = VSUB(T3O, T47);
Chris@82 3253 T6u = VADD(T3O, T47);
Chris@82 3254 }
Chris@82 3255 {
Chris@82 3256 V T4O, T51, T63, T66;
Chris@82 3257 T4O = VSUB(T4q, T4N);
Chris@82 3258 T51 = VSUB(T4X, T50);
Chris@82 3259 T52 = VFMA(LDK(KP995184726), T4O, VMUL(LDK(KP098017140), T51));
Chris@82 3260 T6e = VFNMS(LDK(KP995184726), T51, VMUL(LDK(KP098017140), T4O));
Chris@82 3261 T63 = VSUB(T5Z, T62);
Chris@82 3262 T66 = VSUB(T64, T65);
Chris@82 3263 T67 = VSUB(T63, T66);
Chris@82 3264 T6t = VADD(T63, T66);
Chris@82 3265 }
Chris@82 3266 {
Chris@82 3267 V T68, T69, T5H, T5U;
Chris@82 3268 T68 = VFNMS(LDK(KP980785280), T3Z, VMUL(LDK(KP195090322), T46));
Chris@82 3269 T69 = VFMA(LDK(KP980785280), T3G, VMUL(LDK(KP195090322), T3N));
Chris@82 3270 T6a = VSUB(T68, T69);
Chris@82 3271 T6k = VADD(T69, T68);
Chris@82 3272 T5H = VSUB(T5j, T5G);
Chris@82 3273 T5U = VSUB(T5Q, T5T);
Chris@82 3274 T5V = VFNMS(LDK(KP995184726), T5U, VMUL(LDK(KP098017140), T5H));
Chris@82 3275 T6f = VFMA(LDK(KP098017140), T5U, VMUL(LDK(KP995184726), T5H));
Chris@82 3276 }
Chris@82 3277 {
Chris@82 3278 V T49, T5W, T6h, T6i;
Chris@82 3279 T49 = VADD(T3v, T48);
Chris@82 3280 T5W = VADD(T52, T5V);
Chris@82 3281 Tgh = VSUB(T49, T5W);
Chris@82 3282 STM4(&(ro[47]), Tgh, ovs, &(ro[1]));
Chris@82 3283 Tgi = VADD(T49, T5W);
Chris@82 3284 STM4(&(ro[15]), Tgi, ovs, &(ro[1]));
Chris@82 3285 T6h = VADD(T67, T6a);
Chris@82 3286 T6i = VADD(T6e, T6f);
Chris@82 3287 Tgj = VSUB(T6h, T6i);
Chris@82 3288 STM4(&(io[47]), Tgj, ovs, &(io[1]));
Chris@82 3289 Tgk = VADD(T6h, T6i);
Chris@82 3290 STM4(&(io[15]), Tgk, ovs, &(io[1]));
Chris@82 3291 }
Chris@82 3292 {
Chris@82 3293 V T6b, T6c, T6d, T6g;
Chris@82 3294 T6b = VSUB(T67, T6a);
Chris@82 3295 T6c = VSUB(T5V, T52);
Chris@82 3296 Tgl = VSUB(T6b, T6c);
Chris@82 3297 STM4(&(io[63]), Tgl, ovs, &(io[1]));
Chris@82 3298 Tgm = VADD(T6b, T6c);
Chris@82 3299 STM4(&(io[31]), Tgm, ovs, &(io[1]));
Chris@82 3300 T6d = VSUB(T3v, T48);
Chris@82 3301 T6g = VSUB(T6e, T6f);
Chris@82 3302 Tgn = VSUB(T6d, T6g);
Chris@82 3303 STM4(&(ro[63]), Tgn, ovs, &(ro[1]));
Chris@82 3304 Tgo = VADD(T6d, T6g);
Chris@82 3305 STM4(&(ro[31]), Tgo, ovs, &(ro[1]));
Chris@82 3306 }
Chris@82 3307 {
Chris@82 3308 V T6l, T6s, T6B, T6C;
Chris@82 3309 T6l = VADD(T6j, T6k);
Chris@82 3310 T6s = VADD(T6o, T6r);
Chris@82 3311 Tgp = VSUB(T6l, T6s);
Chris@82 3312 STM4(&(ro[39]), Tgp, ovs, &(ro[1]));
Chris@82 3313 Tgq = VADD(T6l, T6s);
Chris@82 3314 STM4(&(ro[7]), Tgq, ovs, &(ro[1]));
Chris@82 3315 T6B = VADD(T6t, T6u);
Chris@82 3316 T6C = VADD(T6y, T6z);
Chris@82 3317 Tgr = VSUB(T6B, T6C);
Chris@82 3318 STM4(&(io[39]), Tgr, ovs, &(io[1]));
Chris@82 3319 Tgs = VADD(T6B, T6C);
Chris@82 3320 STM4(&(io[7]), Tgs, ovs, &(io[1]));
Chris@82 3321 }
Chris@82 3322 {
Chris@82 3323 V T6v, T6w, T6x, T6A;
Chris@82 3324 T6v = VSUB(T6t, T6u);
Chris@82 3325 T6w = VSUB(T6r, T6o);
Chris@82 3326 Tgt = VSUB(T6v, T6w);
Chris@82 3327 STM4(&(io[55]), Tgt, ovs, &(io[1]));
Chris@82 3328 Tgu = VADD(T6v, T6w);
Chris@82 3329 STM4(&(io[23]), Tgu, ovs, &(io[1]));
Chris@82 3330 T6x = VSUB(T6j, T6k);
Chris@82 3331 T6A = VSUB(T6y, T6z);
Chris@82 3332 Tgv = VSUB(T6x, T6A);
Chris@82 3333 STM4(&(ro[55]), Tgv, ovs, &(ro[1]));
Chris@82 3334 Tgw = VADD(T6x, T6A);
Chris@82 3335 STM4(&(ro[23]), Tgw, ovs, &(ro[1]));
Chris@82 3336 }
Chris@82 3337 }
Chris@82 3338 {
Chris@82 3339 V T7L, T8X, T92, T9c, T95, T9d, T80, T98, T8k, T8S, T8L, T97, T8O, T8Y, T8D;
Chris@82 3340 V T8T;
Chris@82 3341 {
Chris@82 3342 V T7D, T7K, T90, T91;
Chris@82 3343 T7D = VSUB(T7B, T7C);
Chris@82 3344 T7K = VSUB(T7G, T7J);
Chris@82 3345 T7L = VSUB(T7D, T7K);
Chris@82 3346 T8X = VADD(T7D, T7K);
Chris@82 3347 T90 = VADD(T84, T8b);
Chris@82 3348 T91 = VADD(T8f, T8i);
Chris@82 3349 T92 = VFMA(LDK(KP471396736), T90, VMUL(LDK(KP881921264), T91));
Chris@82 3350 T9c = VFNMS(LDK(KP471396736), T91, VMUL(LDK(KP881921264), T90));
Chris@82 3351 }
Chris@82 3352 {
Chris@82 3353 V T93, T94, T7S, T7Z;
Chris@82 3354 T93 = VADD(T8n, T8u);
Chris@82 3355 T94 = VADD(T8y, T8B);
Chris@82 3356 T95 = VFNMS(LDK(KP471396736), T94, VMUL(LDK(KP881921264), T93));
Chris@82 3357 T9d = VFMA(LDK(KP881921264), T94, VMUL(LDK(KP471396736), T93));
Chris@82 3358 T7S = VFNMS(LDK(KP831469612), T7R, VMUL(LDK(KP555570233), T7O));
Chris@82 3359 T7Z = VFMA(LDK(KP831469612), T7V, VMUL(LDK(KP555570233), T7Y));
Chris@82 3360 T80 = VSUB(T7S, T7Z);
Chris@82 3361 T98 = VADD(T7S, T7Z);
Chris@82 3362 }
Chris@82 3363 {
Chris@82 3364 V T8c, T8j, T8H, T8K;
Chris@82 3365 T8c = VSUB(T84, T8b);
Chris@82 3366 T8j = VSUB(T8f, T8i);
Chris@82 3367 T8k = VFMA(LDK(KP956940335), T8c, VMUL(LDK(KP290284677), T8j));
Chris@82 3368 T8S = VFNMS(LDK(KP956940335), T8j, VMUL(LDK(KP290284677), T8c));
Chris@82 3369 T8H = VSUB(T8F, T8G);
Chris@82 3370 T8K = VSUB(T8I, T8J);
Chris@82 3371 T8L = VSUB(T8H, T8K);
Chris@82 3372 T97 = VADD(T8H, T8K);
Chris@82 3373 }
Chris@82 3374 {
Chris@82 3375 V T8M, T8N, T8v, T8C;
Chris@82 3376 T8M = VFNMS(LDK(KP831469612), T7Y, VMUL(LDK(KP555570233), T7V));
Chris@82 3377 T8N = VFMA(LDK(KP555570233), T7R, VMUL(LDK(KP831469612), T7O));
Chris@82 3378 T8O = VSUB(T8M, T8N);
Chris@82 3379 T8Y = VADD(T8N, T8M);
Chris@82 3380 T8v = VSUB(T8n, T8u);
Chris@82 3381 T8C = VSUB(T8y, T8B);
Chris@82 3382 T8D = VFNMS(LDK(KP956940335), T8C, VMUL(LDK(KP290284677), T8v));
Chris@82 3383 T8T = VFMA(LDK(KP290284677), T8C, VMUL(LDK(KP956940335), T8v));
Chris@82 3384 }
Chris@82 3385 {
Chris@82 3386 V T81, T8E, Tgx, Tgy;
Chris@82 3387 T81 = VADD(T7L, T80);
Chris@82 3388 T8E = VADD(T8k, T8D);
Chris@82 3389 Tgx = VSUB(T81, T8E);
Chris@82 3390 STM4(&(ro[45]), Tgx, ovs, &(ro[1]));
Chris@82 3391 STN4(&(ro[44]), TeZ, Tgx, Tfv, Tgh, ovs);
Chris@82 3392 Tgy = VADD(T81, T8E);
Chris@82 3393 STM4(&(ro[13]), Tgy, ovs, &(ro[1]));
Chris@82 3394 STN4(&(ro[12]), Tf0, Tgy, Tfw, Tgi, ovs);
Chris@82 3395 }
Chris@82 3396 {
Chris@82 3397 V T8V, T8W, Tgz, TgA;
Chris@82 3398 T8V = VADD(T8L, T8O);
Chris@82 3399 T8W = VADD(T8S, T8T);
Chris@82 3400 Tgz = VSUB(T8V, T8W);
Chris@82 3401 STM4(&(io[45]), Tgz, ovs, &(io[1]));
Chris@82 3402 STN4(&(io[44]), Tf1, Tgz, Tfx, Tgj, ovs);
Chris@82 3403 TgA = VADD(T8V, T8W);
Chris@82 3404 STM4(&(io[13]), TgA, ovs, &(io[1]));
Chris@82 3405 STN4(&(io[12]), Tf2, TgA, Tfy, Tgk, ovs);
Chris@82 3406 }
Chris@82 3407 {
Chris@82 3408 V T8P, T8Q, TgB, TgC;
Chris@82 3409 T8P = VSUB(T8L, T8O);
Chris@82 3410 T8Q = VSUB(T8D, T8k);
Chris@82 3411 TgB = VSUB(T8P, T8Q);
Chris@82 3412 STM4(&(io[61]), TgB, ovs, &(io[1]));
Chris@82 3413 STN4(&(io[60]), Tf3, TgB, Tfz, Tgl, ovs);
Chris@82 3414 TgC = VADD(T8P, T8Q);
Chris@82 3415 STM4(&(io[29]), TgC, ovs, &(io[1]));
Chris@82 3416 STN4(&(io[28]), Tf4, TgC, TfA, Tgm, ovs);
Chris@82 3417 }
Chris@82 3418 {
Chris@82 3419 V T8R, T8U, TgD, TgE;
Chris@82 3420 T8R = VSUB(T7L, T80);
Chris@82 3421 T8U = VSUB(T8S, T8T);
Chris@82 3422 TgD = VSUB(T8R, T8U);
Chris@82 3423 STM4(&(ro[61]), TgD, ovs, &(ro[1]));
Chris@82 3424 STN4(&(ro[60]), Tf5, TgD, TfB, Tgn, ovs);
Chris@82 3425 TgE = VADD(T8R, T8U);
Chris@82 3426 STM4(&(ro[29]), TgE, ovs, &(ro[1]));
Chris@82 3427 STN4(&(ro[28]), Tf6, TgE, TfC, Tgo, ovs);
Chris@82 3428 }
Chris@82 3429 {
Chris@82 3430 V T8Z, T96, TgF, TgG;
Chris@82 3431 T8Z = VADD(T8X, T8Y);
Chris@82 3432 T96 = VADD(T92, T95);
Chris@82 3433 TgF = VSUB(T8Z, T96);
Chris@82 3434 STM4(&(ro[37]), TgF, ovs, &(ro[1]));
Chris@82 3435 STN4(&(ro[36]), Tf7, TgF, TfD, Tgp, ovs);
Chris@82 3436 TgG = VADD(T8Z, T96);
Chris@82 3437 STM4(&(ro[5]), TgG, ovs, &(ro[1]));
Chris@82 3438 STN4(&(ro[4]), Tf8, TgG, TfE, Tgq, ovs);
Chris@82 3439 }
Chris@82 3440 {
Chris@82 3441 V T9f, T9g, TgH, TgI;
Chris@82 3442 T9f = VADD(T97, T98);
Chris@82 3443 T9g = VADD(T9c, T9d);
Chris@82 3444 TgH = VSUB(T9f, T9g);
Chris@82 3445 STM4(&(io[37]), TgH, ovs, &(io[1]));
Chris@82 3446 STN4(&(io[36]), Tf9, TgH, TfF, Tgr, ovs);
Chris@82 3447 TgI = VADD(T9f, T9g);
Chris@82 3448 STM4(&(io[5]), TgI, ovs, &(io[1]));
Chris@82 3449 STN4(&(io[4]), Tfa, TgI, TfG, Tgs, ovs);
Chris@82 3450 }
Chris@82 3451 {
Chris@82 3452 V T99, T9a, TgJ, TgK;
Chris@82 3453 T99 = VSUB(T97, T98);
Chris@82 3454 T9a = VSUB(T95, T92);
Chris@82 3455 TgJ = VSUB(T99, T9a);
Chris@82 3456 STM4(&(io[53]), TgJ, ovs, &(io[1]));
Chris@82 3457 STN4(&(io[52]), Tfb, TgJ, TfH, Tgt, ovs);
Chris@82 3458 TgK = VADD(T99, T9a);
Chris@82 3459 STM4(&(io[21]), TgK, ovs, &(io[1]));
Chris@82 3460 STN4(&(io[20]), Tfc, TgK, TfI, Tgu, ovs);
Chris@82 3461 }
Chris@82 3462 {
Chris@82 3463 V T9b, T9e, TgL, TgM;
Chris@82 3464 T9b = VSUB(T8X, T8Y);
Chris@82 3465 T9e = VSUB(T9c, T9d);
Chris@82 3466 TgL = VSUB(T9b, T9e);
Chris@82 3467 STM4(&(ro[53]), TgL, ovs, &(ro[1]));
Chris@82 3468 STN4(&(ro[52]), Tfd, TgL, TfJ, Tgv, ovs);
Chris@82 3469 TgM = VADD(T9b, T9e);
Chris@82 3470 STM4(&(ro[21]), TgM, ovs, &(ro[1]));
Chris@82 3471 STN4(&(ro[20]), Tfe, TgM, TfK, Tgw, ovs);
Chris@82 3472 }
Chris@82 3473 }
Chris@82 3474 }
Chris@82 3475 }
Chris@82 3476 }
Chris@82 3477 }
Chris@82 3478 }
Chris@82 3479 VLEAVE();
Chris@82 3480 }
Chris@82 3481
Chris@82 3482 static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {808, 144, 104, 0}, &GENUS, 0, 1, 0, 0 };
Chris@82 3483
Chris@82 3484 void XSIMD(codelet_n2sv_64) (planner *p) {
Chris@82 3485 X(kdft_register) (p, n2sv_64, &desc);
Chris@82 3486 }
Chris@82 3487
Chris@82 3488 #endif