annotate fft/fftw/fftw-3.3.4/dft/simd/common/n2sv_32.c @ 40:223f770b5341 kissfft-double tip

Try a double-precision kissfft
author Chris Cannam
date Wed, 07 Sep 2016 10:40:32 +0100
parents 26056e866c29
children
rev   line source
Chris@19 1 /*
Chris@19 2 * Copyright (c) 2003, 2007-14 Matteo Frigo
Chris@19 3 * Copyright (c) 2003, 2007-14 Massachusetts Institute of Technology
Chris@19 4 *
Chris@19 5 * This program is free software; you can redistribute it and/or modify
Chris@19 6 * it under the terms of the GNU General Public License as published by
Chris@19 7 * the Free Software Foundation; either version 2 of the License, or
Chris@19 8 * (at your option) any later version.
Chris@19 9 *
Chris@19 10 * This program is distributed in the hope that it will be useful,
Chris@19 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
Chris@19 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Chris@19 13 * GNU General Public License for more details.
Chris@19 14 *
Chris@19 15 * You should have received a copy of the GNU General Public License
Chris@19 16 * along with this program; if not, write to the Free Software
Chris@19 17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Chris@19 18 *
Chris@19 19 */
Chris@19 20
Chris@19 21 /* This file was automatically generated --- DO NOT EDIT */
Chris@19 22 /* Generated on Tue Mar 4 13:47:05 EST 2014 */
Chris@19 23
Chris@19 24 #include "codelet-dft.h"
Chris@19 25
Chris@19 26 #ifdef HAVE_FMA
Chris@19 27
Chris@19 28 /* Generated by: ../../../genfft/gen_notw.native -fma -reorder-insns -schedule-for-pipeline -simd -compact -variables 4 -pipeline-latency 8 -n 32 -name n2sv_32 -with-ostride 1 -include n2s.h -store-multiple 4 */
Chris@19 29
Chris@19 30 /*
Chris@19 31 * This function contains 372 FP additions, 136 FP multiplications,
Chris@19 32 * (or, 236 additions, 0 multiplications, 136 fused multiply/add),
Chris@19 33 * 194 stack variables, 7 constants, and 144 memory accesses
Chris@19 34 */
Chris@19 35 #include "n2s.h"
Chris@19 36
Chris@19 37 static void n2sv_32(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
Chris@19 38 {
Chris@19 39 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
Chris@19 40 DVK(KP198912367, +0.198912367379658006911597622644676228597850501);
Chris@19 41 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
Chris@19 42 DVK(KP668178637, +0.668178637919298919997757686523080761552472251);
Chris@19 43 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
Chris@19 44 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
Chris@19 45 DVK(KP414213562, +0.414213562373095048801688724209698078569671875);
Chris@19 46 {
Chris@19 47 INT i;
Chris@19 48 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(128, is), MAKE_VOLATILE_STRIDE(128, os)) {
Chris@19 49 V T61, T62, T63, T64, T65, T66, T67, T68, T69, T6a, T6b, T6c, T6d, T6e, T6f;
Chris@19 50 V T6g, T6h, T6i, T6j, T6k, T6l, T6m, T6n, T6o, T6p, T6q, T6r, T6s, T6t, T6u;
Chris@19 51 V T6v, T6w, T3g, T3f, T6x, T6y, T6z, T6A, T6B, T6C, T6D, T6E, T4p, T49, T4l;
Chris@19 52 V T4j, T6F, T6G, T6H, T6I, T6J, T6K, T6L, T6M, T3n, T3b, T3r, T3l, T3o, T3e;
Chris@19 53 V T4q, T4o, T4k, T4g, T3h, T3p;
Chris@19 54 {
Chris@19 55 V T2T, T3T, T4r, T7, T3t, T1z, T18, T4Z, Te, T50, T1f, T4s, T1G, T3U, T2W;
Chris@19 56 V T3u, Tm, T1n, T3X, T3y, T2Z, T1O, T53, T4w, Tt, T1u, T3W, T3B, T2Y, T1V;
Chris@19 57 V T52, T4z, T3O, T2t, T3L, T2K, TZ, T5F, T4R, T5k, T5j, T4W, T5I, T5X, T2E;
Chris@19 58 V T3M, T2N, T3P, T3H, T22, T3E, T2j, T4G, T5h, TK, T5A, T5D, T5W, T2d, T3F;
Chris@19 59 V T4L, T5g, T3I, T2m;
Chris@19 60 {
Chris@19 61 V T1L, T1j, T1k, T1l, T4v, T1K, T3w;
Chris@19 62 {
Chris@19 63 V T1, T2, T12, T13, T4, T5, T15, T16;
Chris@19 64 T1 = LD(&(ri[0]), ivs, &(ri[0]));
Chris@19 65 T2 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
Chris@19 66 T12 = LD(&(ii[0]), ivs, &(ii[0]));
Chris@19 67 T13 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
Chris@19 68 T4 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
Chris@19 69 T5 = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
Chris@19 70 T15 = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
Chris@19 71 T16 = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
Chris@19 72 {
Chris@19 73 V Tb, T1A, Ta, T1B, T1b, Tc, T1c, T1d;
Chris@19 74 {
Chris@19 75 V T8, T1x, T3, T2R, T14, T2S, T6, T1y, T17, T9, T19, T1a;
Chris@19 76 T8 = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
Chris@19 77 T1x = VSUB(T1, T2);
Chris@19 78 T3 = VADD(T1, T2);
Chris@19 79 T2R = VSUB(T12, T13);
Chris@19 80 T14 = VADD(T12, T13);
Chris@19 81 T2S = VSUB(T4, T5);
Chris@19 82 T6 = VADD(T4, T5);
Chris@19 83 T1y = VSUB(T15, T16);
Chris@19 84 T17 = VADD(T15, T16);
Chris@19 85 T9 = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
Chris@19 86 T19 = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
Chris@19 87 T1a = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
Chris@19 88 Tb = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
Chris@19 89 T2T = VSUB(T2R, T2S);
Chris@19 90 T3T = VADD(T2S, T2R);
Chris@19 91 T4r = VSUB(T3, T6);
Chris@19 92 T7 = VADD(T3, T6);
Chris@19 93 T3t = VSUB(T1x, T1y);
Chris@19 94 T1z = VADD(T1x, T1y);
Chris@19 95 T18 = VADD(T14, T17);
Chris@19 96 T4Z = VSUB(T14, T17);
Chris@19 97 T1A = VSUB(T8, T9);
Chris@19 98 Ta = VADD(T8, T9);
Chris@19 99 T1B = VSUB(T19, T1a);
Chris@19 100 T1b = VADD(T19, T1a);
Chris@19 101 Tc = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
Chris@19 102 T1c = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
Chris@19 103 T1d = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
Chris@19 104 }
Chris@19 105 {
Chris@19 106 V Ti, T1I, T1J, Tl;
Chris@19 107 {
Chris@19 108 V T1h, T1C, T2U, T1D, Td, T1E, T1e, T1i, Tg, Th;
Chris@19 109 Tg = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
Chris@19 110 Th = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
Chris@19 111 T1h = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
Chris@19 112 T1C = VADD(T1A, T1B);
Chris@19 113 T2U = VSUB(T1B, T1A);
Chris@19 114 T1D = VSUB(Tb, Tc);
Chris@19 115 Td = VADD(Tb, Tc);
Chris@19 116 T1E = VSUB(T1c, T1d);
Chris@19 117 T1e = VADD(T1c, T1d);
Chris@19 118 T1L = VSUB(Tg, Th);
Chris@19 119 Ti = VADD(Tg, Th);
Chris@19 120 T1i = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
Chris@19 121 {
Chris@19 122 V T2V, T1F, Tj, Tk;
Chris@19 123 Tj = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
Chris@19 124 Tk = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
Chris@19 125 Te = VADD(Ta, Td);
Chris@19 126 T50 = VSUB(Td, Ta);
Chris@19 127 T2V = VADD(T1D, T1E);
Chris@19 128 T1F = VSUB(T1D, T1E);
Chris@19 129 T1f = VADD(T1b, T1e);
Chris@19 130 T4s = VSUB(T1b, T1e);
Chris@19 131 T1j = VADD(T1h, T1i);
Chris@19 132 T1I = VSUB(T1h, T1i);
Chris@19 133 T1J = VSUB(Tj, Tk);
Chris@19 134 Tl = VADD(Tj, Tk);
Chris@19 135 T1G = VADD(T1C, T1F);
Chris@19 136 T3U = VSUB(T1F, T1C);
Chris@19 137 T2W = VADD(T2U, T2V);
Chris@19 138 T3u = VSUB(T2U, T2V);
Chris@19 139 T1k = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
Chris@19 140 T1l = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
Chris@19 141 }
Chris@19 142 }
Chris@19 143 T4v = VSUB(Ti, Tl);
Chris@19 144 Tm = VADD(Ti, Tl);
Chris@19 145 T1K = VSUB(T1I, T1J);
Chris@19 146 T3w = VADD(T1J, T1I);
Chris@19 147 }
Chris@19 148 }
Chris@19 149 }
Chris@19 150 {
Chris@19 151 V T1r, T1S, T1q, T1s, T4x, T1R, T3z;
Chris@19 152 {
Chris@19 153 V Tp, T1P, T1Q, Ts;
Chris@19 154 {
Chris@19 155 V Tn, To, T1o, T1M, T1m, T1p;
Chris@19 156 Tn = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
Chris@19 157 To = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
Chris@19 158 T1o = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
Chris@19 159 T1M = VSUB(T1k, T1l);
Chris@19 160 T1m = VADD(T1k, T1l);
Chris@19 161 T1p = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
Chris@19 162 {
Chris@19 163 V Tq, Tr, T3x, T1N, T4u;
Chris@19 164 Tq = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
Chris@19 165 Tr = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
Chris@19 166 T1r = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
Chris@19 167 T1S = VSUB(Tn, To);
Chris@19 168 Tp = VADD(Tn, To);
Chris@19 169 T3x = VSUB(T1L, T1M);
Chris@19 170 T1N = VADD(T1L, T1M);
Chris@19 171 T4u = VSUB(T1j, T1m);
Chris@19 172 T1n = VADD(T1j, T1m);
Chris@19 173 T1P = VSUB(T1o, T1p);
Chris@19 174 T1q = VADD(T1o, T1p);
Chris@19 175 T1Q = VSUB(Tq, Tr);
Chris@19 176 Ts = VADD(Tq, Tr);
Chris@19 177 T3X = VFNMS(LDK(KP414213562), T3w, T3x);
Chris@19 178 T3y = VFMA(LDK(KP414213562), T3x, T3w);
Chris@19 179 T2Z = VFMA(LDK(KP414213562), T1K, T1N);
Chris@19 180 T1O = VFNMS(LDK(KP414213562), T1N, T1K);
Chris@19 181 T53 = VADD(T4v, T4u);
Chris@19 182 T4w = VSUB(T4u, T4v);
Chris@19 183 T1s = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
Chris@19 184 }
Chris@19 185 }
Chris@19 186 T4x = VSUB(Tp, Ts);
Chris@19 187 Tt = VADD(Tp, Ts);
Chris@19 188 T1R = VSUB(T1P, T1Q);
Chris@19 189 T3z = VADD(T1Q, T1P);
Chris@19 190 }
Chris@19 191 {
Chris@19 192 V T4S, T5G, T2y, T2L, T4V, T5H, T2D, T2M;
Chris@19 193 {
Chris@19 194 V T2G, TN, T4N, T2r, T2s, TQ, T2A, T4O, T2J, T2x, TU, T4T, T2w, T2z, TX;
Chris@19 195 V T2B, T2H, T2I, TR;
Chris@19 196 {
Chris@19 197 V TL, TM, T2p, T1T, T1t, T2q;
Chris@19 198 TL = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
Chris@19 199 TM = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
Chris@19 200 T2p = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
Chris@19 201 T1T = VSUB(T1r, T1s);
Chris@19 202 T1t = VADD(T1r, T1s);
Chris@19 203 T2q = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
Chris@19 204 {
Chris@19 205 V TO, TP, T3A, T1U, T4y;
Chris@19 206 TO = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
Chris@19 207 TP = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
Chris@19 208 T2H = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
Chris@19 209 T2G = VSUB(TL, TM);
Chris@19 210 TN = VADD(TL, TM);
Chris@19 211 T3A = VSUB(T1S, T1T);
Chris@19 212 T1U = VADD(T1S, T1T);
Chris@19 213 T4y = VSUB(T1q, T1t);
Chris@19 214 T1u = VADD(T1q, T1t);
Chris@19 215 T4N = VADD(T2p, T2q);
Chris@19 216 T2r = VSUB(T2p, T2q);
Chris@19 217 T2s = VSUB(TO, TP);
Chris@19 218 TQ = VADD(TO, TP);
Chris@19 219 T3W = VFMA(LDK(KP414213562), T3z, T3A);
Chris@19 220 T3B = VFNMS(LDK(KP414213562), T3A, T3z);
Chris@19 221 T2Y = VFNMS(LDK(KP414213562), T1R, T1U);
Chris@19 222 T1V = VFMA(LDK(KP414213562), T1U, T1R);
Chris@19 223 T52 = VSUB(T4x, T4y);
Chris@19 224 T4z = VADD(T4x, T4y);
Chris@19 225 T2I = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
Chris@19 226 }
Chris@19 227 }
Chris@19 228 {
Chris@19 229 V TS, TT, T2u, T2v, TV, TW;
Chris@19 230 TS = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
Chris@19 231 TT = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
Chris@19 232 T2u = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
Chris@19 233 T2v = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
Chris@19 234 TV = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
Chris@19 235 TW = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
Chris@19 236 T2A = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
Chris@19 237 T4O = VADD(T2H, T2I);
Chris@19 238 T2J = VSUB(T2H, T2I);
Chris@19 239 T2x = VSUB(TS, TT);
Chris@19 240 TU = VADD(TS, TT);
Chris@19 241 T4T = VADD(T2u, T2v);
Chris@19 242 T2w = VSUB(T2u, T2v);
Chris@19 243 T2z = VSUB(TV, TW);
Chris@19 244 TX = VADD(TV, TW);
Chris@19 245 T2B = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
Chris@19 246 }
Chris@19 247 T3O = VADD(T2s, T2r);
Chris@19 248 T2t = VSUB(T2r, T2s);
Chris@19 249 T3L = VSUB(T2G, T2J);
Chris@19 250 T2K = VADD(T2G, T2J);
Chris@19 251 T4S = VSUB(TN, TQ);
Chris@19 252 TR = VADD(TN, TQ);
Chris@19 253 {
Chris@19 254 V T4P, T4Q, TY, T4U, T2C;
Chris@19 255 T5G = VADD(T4N, T4O);
Chris@19 256 T4P = VSUB(T4N, T4O);
Chris@19 257 T4Q = VSUB(TX, TU);
Chris@19 258 TY = VADD(TU, TX);
Chris@19 259 T4U = VADD(T2A, T2B);
Chris@19 260 T2C = VSUB(T2A, T2B);
Chris@19 261 T2y = VSUB(T2w, T2x);
Chris@19 262 T2L = VADD(T2x, T2w);
Chris@19 263 TZ = VADD(TR, TY);
Chris@19 264 T5F = VSUB(TR, TY);
Chris@19 265 T4V = VSUB(T4T, T4U);
Chris@19 266 T5H = VADD(T4T, T4U);
Chris@19 267 T2D = VADD(T2z, T2C);
Chris@19 268 T2M = VSUB(T2z, T2C);
Chris@19 269 T4R = VSUB(T4P, T4Q);
Chris@19 270 T5k = VADD(T4Q, T4P);
Chris@19 271 }
Chris@19 272 }
Chris@19 273 {
Chris@19 274 V T2f, Ty, T23, T4C, T20, T21, TB, T4D, T2i, T26, TF, T24, TG, TH, T29;
Chris@19 275 V T2a;
Chris@19 276 {
Chris@19 277 V T1Y, T1Z, Tz, TA, T2g, T2h, Tw, Tx, TD, TE;
Chris@19 278 Tw = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
Chris@19 279 Tx = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
Chris@19 280 T5j = VADD(T4S, T4V);
Chris@19 281 T4W = VSUB(T4S, T4V);
Chris@19 282 T5I = VSUB(T5G, T5H);
Chris@19 283 T5X = VADD(T5G, T5H);
Chris@19 284 T2E = VADD(T2y, T2D);
Chris@19 285 T3M = VSUB(T2D, T2y);
Chris@19 286 T2N = VADD(T2L, T2M);
Chris@19 287 T3P = VSUB(T2L, T2M);
Chris@19 288 T2f = VSUB(Tw, Tx);
Chris@19 289 Ty = VADD(Tw, Tx);
Chris@19 290 T1Y = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
Chris@19 291 T1Z = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
Chris@19 292 Tz = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
Chris@19 293 TA = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
Chris@19 294 T2g = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
Chris@19 295 T2h = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
Chris@19 296 TD = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
Chris@19 297 TE = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
Chris@19 298 T23 = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
Chris@19 299 T4C = VADD(T1Y, T1Z);
Chris@19 300 T20 = VSUB(T1Y, T1Z);
Chris@19 301 T21 = VSUB(Tz, TA);
Chris@19 302 TB = VADD(Tz, TA);
Chris@19 303 T4D = VADD(T2g, T2h);
Chris@19 304 T2i = VSUB(T2g, T2h);
Chris@19 305 T26 = VSUB(TD, TE);
Chris@19 306 TF = VADD(TD, TE);
Chris@19 307 T24 = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
Chris@19 308 TG = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
Chris@19 309 TH = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
Chris@19 310 T29 = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
Chris@19 311 T2a = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
Chris@19 312 }
Chris@19 313 {
Chris@19 314 V T4I, T25, T28, TI, T4J, T2b, T4H, TC, T5B, T4E;
Chris@19 315 T3H = VADD(T21, T20);
Chris@19 316 T22 = VSUB(T20, T21);
Chris@19 317 T3E = VSUB(T2f, T2i);
Chris@19 318 T2j = VADD(T2f, T2i);
Chris@19 319 T4I = VADD(T23, T24);
Chris@19 320 T25 = VSUB(T23, T24);
Chris@19 321 T28 = VSUB(TG, TH);
Chris@19 322 TI = VADD(TG, TH);
Chris@19 323 T4J = VADD(T29, T2a);
Chris@19 324 T2b = VSUB(T29, T2a);
Chris@19 325 T4H = VSUB(Ty, TB);
Chris@19 326 TC = VADD(Ty, TB);
Chris@19 327 T5B = VADD(T4C, T4D);
Chris@19 328 T4E = VSUB(T4C, T4D);
Chris@19 329 {
Chris@19 330 V T27, T2k, TJ, T4F, T4K, T5C, T2c, T2l;
Chris@19 331 T27 = VSUB(T25, T26);
Chris@19 332 T2k = VADD(T26, T25);
Chris@19 333 TJ = VADD(TF, TI);
Chris@19 334 T4F = VSUB(TI, TF);
Chris@19 335 T4K = VSUB(T4I, T4J);
Chris@19 336 T5C = VADD(T4I, T4J);
Chris@19 337 T2c = VADD(T28, T2b);
Chris@19 338 T2l = VSUB(T28, T2b);
Chris@19 339 T4G = VSUB(T4E, T4F);
Chris@19 340 T5h = VADD(T4F, T4E);
Chris@19 341 TK = VADD(TC, TJ);
Chris@19 342 T5A = VSUB(TC, TJ);
Chris@19 343 T5D = VSUB(T5B, T5C);
Chris@19 344 T5W = VADD(T5B, T5C);
Chris@19 345 T2d = VADD(T27, T2c);
Chris@19 346 T3F = VSUB(T2c, T27);
Chris@19 347 T4L = VSUB(T4H, T4K);
Chris@19 348 T5g = VADD(T4H, T4K);
Chris@19 349 T3I = VSUB(T2k, T2l);
Chris@19 350 T2m = VADD(T2k, T2l);
Chris@19 351 }
Chris@19 352 }
Chris@19 353 }
Chris@19 354 }
Chris@19 355 }
Chris@19 356 }
Chris@19 357 {
Chris@19 358 V T1v, T1g, T5V, Tv, T60, T5Y, T11, T10;
Chris@19 359 {
Chris@19 360 V T5o, T5n, T5i, T5r, T5f, T5l, T5w, T5u;
Chris@19 361 {
Chris@19 362 V T5d, T4t, T4A, T4X, T58, T51, T4M, T59, T54, T5e, T5b, T4B;
Chris@19 363 T5d = VADD(T4r, T4s);
Chris@19 364 T4t = VSUB(T4r, T4s);
Chris@19 365 T4A = VSUB(T4w, T4z);
Chris@19 366 T5o = VADD(T4w, T4z);
Chris@19 367 T4X = VFNMS(LDK(KP414213562), T4W, T4R);
Chris@19 368 T58 = VFMA(LDK(KP414213562), T4R, T4W);
Chris@19 369 T5n = VADD(T50, T4Z);
Chris@19 370 T51 = VSUB(T4Z, T50);
Chris@19 371 T4M = VFMA(LDK(KP414213562), T4L, T4G);
Chris@19 372 T59 = VFNMS(LDK(KP414213562), T4G, T4L);
Chris@19 373 T54 = VSUB(T52, T53);
Chris@19 374 T5e = VADD(T53, T52);
Chris@19 375 T5b = VFNMS(LDK(KP707106781), T4A, T4t);
Chris@19 376 T4B = VFMA(LDK(KP707106781), T4A, T4t);
Chris@19 377 {
Chris@19 378 V T5s, T56, T4Y, T5c, T5a, T57, T55, T5t;
Chris@19 379 T5i = VFMA(LDK(KP414213562), T5h, T5g);
Chris@19 380 T5s = VFNMS(LDK(KP414213562), T5g, T5h);
Chris@19 381 T56 = VADD(T4M, T4X);
Chris@19 382 T4Y = VSUB(T4M, T4X);
Chris@19 383 T5c = VADD(T59, T58);
Chris@19 384 T5a = VSUB(T58, T59);
Chris@19 385 T57 = VFMA(LDK(KP707106781), T54, T51);
Chris@19 386 T55 = VFNMS(LDK(KP707106781), T54, T51);
Chris@19 387 T5r = VFNMS(LDK(KP707106781), T5e, T5d);
Chris@19 388 T5f = VFMA(LDK(KP707106781), T5e, T5d);
Chris@19 389 T5t = VFMA(LDK(KP414213562), T5j, T5k);
Chris@19 390 T5l = VFNMS(LDK(KP414213562), T5k, T5j);
Chris@19 391 T61 = VFMA(LDK(KP923879532), T4Y, T4B);
Chris@19 392 STM4(&(ro[6]), T61, ovs, &(ro[0]));
Chris@19 393 T62 = VFNMS(LDK(KP923879532), T4Y, T4B);
Chris@19 394 STM4(&(ro[22]), T62, ovs, &(ro[0]));
Chris@19 395 T63 = VFMA(LDK(KP923879532), T5c, T5b);
Chris@19 396 STM4(&(ro[30]), T63, ovs, &(ro[0]));
Chris@19 397 T64 = VFNMS(LDK(KP923879532), T5c, T5b);
Chris@19 398 STM4(&(ro[14]), T64, ovs, &(ro[0]));
Chris@19 399 T65 = VFMA(LDK(KP923879532), T5a, T57);
Chris@19 400 STM4(&(io[6]), T65, ovs, &(io[0]));
Chris@19 401 T66 = VFNMS(LDK(KP923879532), T5a, T57);
Chris@19 402 STM4(&(io[22]), T66, ovs, &(io[0]));
Chris@19 403 T67 = VFMA(LDK(KP923879532), T56, T55);
Chris@19 404 STM4(&(io[30]), T67, ovs, &(io[0]));
Chris@19 405 T68 = VFNMS(LDK(KP923879532), T56, T55);
Chris@19 406 STM4(&(io[14]), T68, ovs, &(io[0]));
Chris@19 407 T5w = VADD(T5s, T5t);
Chris@19 408 T5u = VSUB(T5s, T5t);
Chris@19 409 }
Chris@19 410 }
Chris@19 411 {
Chris@19 412 V Tf, T5P, T5z, T5S, T5U, T5O, T5K, T5L, T5M, Tu, T5T, T5N;
Chris@19 413 {
Chris@19 414 V T5E, T5Q, T5q, T5m, T5v, T5p, T5R, T5J, T5x, T5y;
Chris@19 415 Tf = VADD(T7, Te);
Chris@19 416 T5x = VSUB(T7, Te);
Chris@19 417 T5y = VSUB(T1n, T1u);
Chris@19 418 T1v = VADD(T1n, T1u);
Chris@19 419 T69 = VFMA(LDK(KP923879532), T5u, T5r);
Chris@19 420 STM4(&(ro[10]), T69, ovs, &(ro[0]));
Chris@19 421 T6a = VFNMS(LDK(KP923879532), T5u, T5r);
Chris@19 422 STM4(&(ro[26]), T6a, ovs, &(ro[0]));
Chris@19 423 T5E = VADD(T5A, T5D);
Chris@19 424 T5Q = VSUB(T5D, T5A);
Chris@19 425 T5q = VSUB(T5l, T5i);
Chris@19 426 T5m = VADD(T5i, T5l);
Chris@19 427 T5v = VFMA(LDK(KP707106781), T5o, T5n);
Chris@19 428 T5p = VFNMS(LDK(KP707106781), T5o, T5n);
Chris@19 429 T5P = VSUB(T5x, T5y);
Chris@19 430 T5z = VADD(T5x, T5y);
Chris@19 431 T5R = VADD(T5F, T5I);
Chris@19 432 T5J = VSUB(T5F, T5I);
Chris@19 433 T6b = VFMA(LDK(KP923879532), T5m, T5f);
Chris@19 434 STM4(&(ro[2]), T6b, ovs, &(ro[0]));
Chris@19 435 T6c = VFNMS(LDK(KP923879532), T5m, T5f);
Chris@19 436 STM4(&(ro[18]), T6c, ovs, &(ro[0]));
Chris@19 437 T6d = VFMA(LDK(KP923879532), T5w, T5v);
Chris@19 438 STM4(&(io[2]), T6d, ovs, &(io[0]));
Chris@19 439 T6e = VFNMS(LDK(KP923879532), T5w, T5v);
Chris@19 440 STM4(&(io[18]), T6e, ovs, &(io[0]));
Chris@19 441 T6f = VFMA(LDK(KP923879532), T5q, T5p);
Chris@19 442 STM4(&(io[10]), T6f, ovs, &(io[0]));
Chris@19 443 T6g = VFNMS(LDK(KP923879532), T5q, T5p);
Chris@19 444 STM4(&(io[26]), T6g, ovs, &(io[0]));
Chris@19 445 T5S = VSUB(T5Q, T5R);
Chris@19 446 T5U = VADD(T5Q, T5R);
Chris@19 447 T5O = VSUB(T5J, T5E);
Chris@19 448 T5K = VADD(T5E, T5J);
Chris@19 449 T1g = VADD(T18, T1f);
Chris@19 450 T5L = VSUB(T18, T1f);
Chris@19 451 T5M = VSUB(Tt, Tm);
Chris@19 452 Tu = VADD(Tm, Tt);
Chris@19 453 }
Chris@19 454 T6h = VFMA(LDK(KP707106781), T5S, T5P);
Chris@19 455 STM4(&(ro[12]), T6h, ovs, &(ro[0]));
Chris@19 456 T6i = VFNMS(LDK(KP707106781), T5S, T5P);
Chris@19 457 STM4(&(ro[28]), T6i, ovs, &(ro[0]));
Chris@19 458 T6j = VFMA(LDK(KP707106781), T5K, T5z);
Chris@19 459 STM4(&(ro[4]), T6j, ovs, &(ro[0]));
Chris@19 460 T6k = VFNMS(LDK(KP707106781), T5K, T5z);
Chris@19 461 STM4(&(ro[20]), T6k, ovs, &(ro[0]));
Chris@19 462 T5T = VADD(T5M, T5L);
Chris@19 463 T5N = VSUB(T5L, T5M);
Chris@19 464 T5V = VSUB(Tf, Tu);
Chris@19 465 Tv = VADD(Tf, Tu);
Chris@19 466 T6l = VFMA(LDK(KP707106781), T5U, T5T);
Chris@19 467 STM4(&(io[4]), T6l, ovs, &(io[0]));
Chris@19 468 T6m = VFNMS(LDK(KP707106781), T5U, T5T);
Chris@19 469 STM4(&(io[20]), T6m, ovs, &(io[0]));
Chris@19 470 T6n = VFMA(LDK(KP707106781), T5O, T5N);
Chris@19 471 STM4(&(io[12]), T6n, ovs, &(io[0]));
Chris@19 472 T6o = VFNMS(LDK(KP707106781), T5O, T5N);
Chris@19 473 STM4(&(io[28]), T6o, ovs, &(io[0]));
Chris@19 474 T60 = VADD(T5W, T5X);
Chris@19 475 T5Y = VSUB(T5W, T5X);
Chris@19 476 T11 = VSUB(TZ, TK);
Chris@19 477 T10 = VADD(TK, TZ);
Chris@19 478 }
Chris@19 479 }
Chris@19 480 {
Chris@19 481 V T39, T3k, T3j, T3a, T1X, T37, T33, T31, T3d, T3c, T47, T4i, T4h, T48, T4b;
Chris@19 482 V T4a, T4e, T3N, T41, T3D, T45, T3Z, T38, T36, T32, T2Q, T42, T3K, T3Q, T4d;
Chris@19 483 {
Chris@19 484 V T2e, T2n, T2F, T2O, T1w, T5Z;
Chris@19 485 {
Chris@19 486 V T1H, T1W, T2X, T30;
Chris@19 487 T39 = VFMA(LDK(KP707106781), T1G, T1z);
Chris@19 488 T1H = VFNMS(LDK(KP707106781), T1G, T1z);
Chris@19 489 T1W = VSUB(T1O, T1V);
Chris@19 490 T3k = VADD(T1O, T1V);
Chris@19 491 T3j = VFMA(LDK(KP707106781), T2W, T2T);
Chris@19 492 T2X = VFNMS(LDK(KP707106781), T2W, T2T);
Chris@19 493 T30 = VSUB(T2Y, T2Z);
Chris@19 494 T3a = VADD(T2Z, T2Y);
Chris@19 495 T6p = VSUB(T5V, T5Y);
Chris@19 496 STM4(&(ro[24]), T6p, ovs, &(ro[0]));
Chris@19 497 T6q = VADD(T5V, T5Y);
Chris@19 498 STM4(&(ro[8]), T6q, ovs, &(ro[0]));
Chris@19 499 T6r = VADD(Tv, T10);
Chris@19 500 STM4(&(ro[0]), T6r, ovs, &(ro[0]));
Chris@19 501 T6s = VSUB(Tv, T10);
Chris@19 502 STM4(&(ro[16]), T6s, ovs, &(ro[0]));
Chris@19 503 T1w = VSUB(T1g, T1v);
Chris@19 504 T5Z = VADD(T1g, T1v);
Chris@19 505 T1X = VFMA(LDK(KP923879532), T1W, T1H);
Chris@19 506 T37 = VFNMS(LDK(KP923879532), T1W, T1H);
Chris@19 507 T33 = VFMA(LDK(KP923879532), T30, T2X);
Chris@19 508 T31 = VFNMS(LDK(KP923879532), T30, T2X);
Chris@19 509 }
Chris@19 510 T3d = VFMA(LDK(KP707106781), T2d, T22);
Chris@19 511 T2e = VFNMS(LDK(KP707106781), T2d, T22);
Chris@19 512 T2n = VFNMS(LDK(KP707106781), T2m, T2j);
Chris@19 513 T3c = VFMA(LDK(KP707106781), T2m, T2j);
Chris@19 514 T6t = VADD(T5Z, T60);
Chris@19 515 STM4(&(io[0]), T6t, ovs, &(io[0]));
Chris@19 516 T6u = VSUB(T5Z, T60);
Chris@19 517 STM4(&(io[16]), T6u, ovs, &(io[0]));
Chris@19 518 T6v = VSUB(T1w, T11);
Chris@19 519 STM4(&(io[24]), T6v, ovs, &(io[0]));
Chris@19 520 T6w = VADD(T11, T1w);
Chris@19 521 STM4(&(io[8]), T6w, ovs, &(io[0]));
Chris@19 522 T3g = VFMA(LDK(KP707106781), T2E, T2t);
Chris@19 523 T2F = VFNMS(LDK(KP707106781), T2E, T2t);
Chris@19 524 T2O = VFNMS(LDK(KP707106781), T2N, T2K);
Chris@19 525 T3f = VFMA(LDK(KP707106781), T2N, T2K);
Chris@19 526 {
Chris@19 527 V T3v, T35, T2o, T3C, T3V, T3Y;
Chris@19 528 T47 = VFNMS(LDK(KP707106781), T3u, T3t);
Chris@19 529 T3v = VFMA(LDK(KP707106781), T3u, T3t);
Chris@19 530 T35 = VFNMS(LDK(KP668178637), T2e, T2n);
Chris@19 531 T2o = VFMA(LDK(KP668178637), T2n, T2e);
Chris@19 532 T3C = VSUB(T3y, T3B);
Chris@19 533 T4i = VADD(T3y, T3B);
Chris@19 534 T4h = VFNMS(LDK(KP707106781), T3U, T3T);
Chris@19 535 T3V = VFMA(LDK(KP707106781), T3U, T3T);
Chris@19 536 T3Y = VSUB(T3W, T3X);
Chris@19 537 T48 = VADD(T3X, T3W);
Chris@19 538 {
Chris@19 539 V T3G, T34, T2P, T3J;
Chris@19 540 T4b = VFMA(LDK(KP707106781), T3F, T3E);
Chris@19 541 T3G = VFNMS(LDK(KP707106781), T3F, T3E);
Chris@19 542 T34 = VFMA(LDK(KP668178637), T2F, T2O);
Chris@19 543 T2P = VFNMS(LDK(KP668178637), T2O, T2F);
Chris@19 544 T3J = VFNMS(LDK(KP707106781), T3I, T3H);
Chris@19 545 T4a = VFMA(LDK(KP707106781), T3I, T3H);
Chris@19 546 T4e = VFMA(LDK(KP707106781), T3M, T3L);
Chris@19 547 T3N = VFNMS(LDK(KP707106781), T3M, T3L);
Chris@19 548 T41 = VFNMS(LDK(KP923879532), T3C, T3v);
Chris@19 549 T3D = VFMA(LDK(KP923879532), T3C, T3v);
Chris@19 550 T45 = VFMA(LDK(KP923879532), T3Y, T3V);
Chris@19 551 T3Z = VFNMS(LDK(KP923879532), T3Y, T3V);
Chris@19 552 T38 = VADD(T35, T34);
Chris@19 553 T36 = VSUB(T34, T35);
Chris@19 554 T32 = VADD(T2o, T2P);
Chris@19 555 T2Q = VSUB(T2o, T2P);
Chris@19 556 T42 = VFNMS(LDK(KP668178637), T3G, T3J);
Chris@19 557 T3K = VFMA(LDK(KP668178637), T3J, T3G);
Chris@19 558 T3Q = VFNMS(LDK(KP707106781), T3P, T3O);
Chris@19 559 T4d = VFMA(LDK(KP707106781), T3P, T3O);
Chris@19 560 }
Chris@19 561 }
Chris@19 562 }
Chris@19 563 {
Chris@19 564 V T4n, T4c, T43, T3R, T4m, T4f;
Chris@19 565 T6x = VFMA(LDK(KP831469612), T38, T37);
Chris@19 566 STM4(&(ro[29]), T6x, ovs, &(ro[1]));
Chris@19 567 T6y = VFNMS(LDK(KP831469612), T38, T37);
Chris@19 568 STM4(&(ro[13]), T6y, ovs, &(ro[1]));
Chris@19 569 T6z = VFMA(LDK(KP831469612), T36, T33);
Chris@19 570 STM4(&(io[5]), T6z, ovs, &(io[1]));
Chris@19 571 T6A = VFNMS(LDK(KP831469612), T36, T33);
Chris@19 572 STM4(&(io[21]), T6A, ovs, &(io[1]));
Chris@19 573 T6B = VFMA(LDK(KP831469612), T32, T31);
Chris@19 574 STM4(&(io[29]), T6B, ovs, &(io[1]));
Chris@19 575 T6C = VFNMS(LDK(KP831469612), T32, T31);
Chris@19 576 STM4(&(io[13]), T6C, ovs, &(io[1]));
Chris@19 577 T6D = VFMA(LDK(KP831469612), T2Q, T1X);
Chris@19 578 STM4(&(ro[5]), T6D, ovs, &(ro[1]));
Chris@19 579 T6E = VFNMS(LDK(KP831469612), T2Q, T1X);
Chris@19 580 STM4(&(ro[21]), T6E, ovs, &(ro[1]));
Chris@19 581 T43 = VFMA(LDK(KP668178637), T3N, T3Q);
Chris@19 582 T3R = VFNMS(LDK(KP668178637), T3Q, T3N);
Chris@19 583 {
Chris@19 584 V T44, T46, T40, T3S;
Chris@19 585 T44 = VSUB(T42, T43);
Chris@19 586 T46 = VADD(T42, T43);
Chris@19 587 T40 = VSUB(T3R, T3K);
Chris@19 588 T3S = VADD(T3K, T3R);
Chris@19 589 T4p = VFMA(LDK(KP923879532), T48, T47);
Chris@19 590 T49 = VFNMS(LDK(KP923879532), T48, T47);
Chris@19 591 T4l = VFNMS(LDK(KP923879532), T4i, T4h);
Chris@19 592 T4j = VFMA(LDK(KP923879532), T4i, T4h);
Chris@19 593 T4n = VFNMS(LDK(KP198912367), T4a, T4b);
Chris@19 594 T4c = VFMA(LDK(KP198912367), T4b, T4a);
Chris@19 595 T6F = VFMA(LDK(KP831469612), T44, T41);
Chris@19 596 STM4(&(ro[11]), T6F, ovs, &(ro[1]));
Chris@19 597 T6G = VFNMS(LDK(KP831469612), T44, T41);
Chris@19 598 STM4(&(ro[27]), T6G, ovs, &(ro[1]));
Chris@19 599 T6H = VFMA(LDK(KP831469612), T46, T45);
Chris@19 600 STM4(&(io[3]), T6H, ovs, &(io[1]));
Chris@19 601 T6I = VFNMS(LDK(KP831469612), T46, T45);
Chris@19 602 STM4(&(io[19]), T6I, ovs, &(io[1]));
Chris@19 603 T6J = VFMA(LDK(KP831469612), T40, T3Z);
Chris@19 604 STM4(&(io[11]), T6J, ovs, &(io[1]));
Chris@19 605 T6K = VFNMS(LDK(KP831469612), T40, T3Z);
Chris@19 606 STM4(&(io[27]), T6K, ovs, &(io[1]));
Chris@19 607 T6L = VFMA(LDK(KP831469612), T3S, T3D);
Chris@19 608 STM4(&(ro[3]), T6L, ovs, &(ro[1]));
Chris@19 609 T6M = VFNMS(LDK(KP831469612), T3S, T3D);
Chris@19 610 STM4(&(ro[19]), T6M, ovs, &(ro[1]));
Chris@19 611 }
Chris@19 612 T4m = VFMA(LDK(KP198912367), T4d, T4e);
Chris@19 613 T4f = VFNMS(LDK(KP198912367), T4e, T4d);
Chris@19 614 T3n = VFNMS(LDK(KP923879532), T3a, T39);
Chris@19 615 T3b = VFMA(LDK(KP923879532), T3a, T39);
Chris@19 616 T3r = VFMA(LDK(KP923879532), T3k, T3j);
Chris@19 617 T3l = VFNMS(LDK(KP923879532), T3k, T3j);
Chris@19 618 T3o = VFNMS(LDK(KP198912367), T3c, T3d);
Chris@19 619 T3e = VFMA(LDK(KP198912367), T3d, T3c);
Chris@19 620 T4q = VADD(T4n, T4m);
Chris@19 621 T4o = VSUB(T4m, T4n);
Chris@19 622 T4k = VADD(T4c, T4f);
Chris@19 623 T4g = VSUB(T4c, T4f);
Chris@19 624 }
Chris@19 625 }
Chris@19 626 }
Chris@19 627 }
Chris@19 628 {
Chris@19 629 V T6N, T6O, T6P, T6Q;
Chris@19 630 T6N = VFMA(LDK(KP980785280), T4q, T4p);
Chris@19 631 STM4(&(ro[31]), T6N, ovs, &(ro[1]));
Chris@19 632 STN4(&(ro[28]), T6i, T6x, T63, T6N, ovs);
Chris@19 633 T6O = VFNMS(LDK(KP980785280), T4q, T4p);
Chris@19 634 STM4(&(ro[15]), T6O, ovs, &(ro[1]));
Chris@19 635 STN4(&(ro[12]), T6h, T6y, T64, T6O, ovs);
Chris@19 636 T6P = VFMA(LDK(KP980785280), T4o, T4l);
Chris@19 637 STM4(&(io[7]), T6P, ovs, &(io[1]));
Chris@19 638 STN4(&(io[4]), T6l, T6z, T65, T6P, ovs);
Chris@19 639 T6Q = VFNMS(LDK(KP980785280), T4o, T4l);
Chris@19 640 STM4(&(io[23]), T6Q, ovs, &(io[1]));
Chris@19 641 STN4(&(io[20]), T6m, T6A, T66, T6Q, ovs);
Chris@19 642 {
Chris@19 643 V T6R, T6S, T6T, T6U;
Chris@19 644 T6R = VFMA(LDK(KP980785280), T4k, T4j);
Chris@19 645 STM4(&(io[31]), T6R, ovs, &(io[1]));
Chris@19 646 STN4(&(io[28]), T6o, T6B, T67, T6R, ovs);
Chris@19 647 T6S = VFNMS(LDK(KP980785280), T4k, T4j);
Chris@19 648 STM4(&(io[15]), T6S, ovs, &(io[1]));
Chris@19 649 STN4(&(io[12]), T6n, T6C, T68, T6S, ovs);
Chris@19 650 T6T = VFMA(LDK(KP980785280), T4g, T49);
Chris@19 651 STM4(&(ro[7]), T6T, ovs, &(ro[1]));
Chris@19 652 STN4(&(ro[4]), T6j, T6D, T61, T6T, ovs);
Chris@19 653 T6U = VFNMS(LDK(KP980785280), T4g, T49);
Chris@19 654 STM4(&(ro[23]), T6U, ovs, &(ro[1]));
Chris@19 655 STN4(&(ro[20]), T6k, T6E, T62, T6U, ovs);
Chris@19 656 T3h = VFNMS(LDK(KP198912367), T3g, T3f);
Chris@19 657 T3p = VFMA(LDK(KP198912367), T3f, T3g);
Chris@19 658 }
Chris@19 659 }
Chris@19 660 {
Chris@19 661 V T3s, T3q, T3i, T3m;
Chris@19 662 T3s = VADD(T3o, T3p);
Chris@19 663 T3q = VSUB(T3o, T3p);
Chris@19 664 T3i = VADD(T3e, T3h);
Chris@19 665 T3m = VSUB(T3h, T3e);
Chris@19 666 {
Chris@19 667 V T6V, T6W, T6X, T6Y;
Chris@19 668 T6V = VFMA(LDK(KP980785280), T3q, T3n);
Chris@19 669 STM4(&(ro[9]), T6V, ovs, &(ro[1]));
Chris@19 670 STN4(&(ro[8]), T6q, T6V, T69, T6F, ovs);
Chris@19 671 T6W = VFNMS(LDK(KP980785280), T3q, T3n);
Chris@19 672 STM4(&(ro[25]), T6W, ovs, &(ro[1]));
Chris@19 673 STN4(&(ro[24]), T6p, T6W, T6a, T6G, ovs);
Chris@19 674 T6X = VFMA(LDK(KP980785280), T3s, T3r);
Chris@19 675 STM4(&(io[1]), T6X, ovs, &(io[1]));
Chris@19 676 STN4(&(io[0]), T6t, T6X, T6d, T6H, ovs);
Chris@19 677 T6Y = VFNMS(LDK(KP980785280), T3s, T3r);
Chris@19 678 STM4(&(io[17]), T6Y, ovs, &(io[1]));
Chris@19 679 STN4(&(io[16]), T6u, T6Y, T6e, T6I, ovs);
Chris@19 680 {
Chris@19 681 V T6Z, T70, T71, T72;
Chris@19 682 T6Z = VFMA(LDK(KP980785280), T3m, T3l);
Chris@19 683 STM4(&(io[9]), T6Z, ovs, &(io[1]));
Chris@19 684 STN4(&(io[8]), T6w, T6Z, T6f, T6J, ovs);
Chris@19 685 T70 = VFNMS(LDK(KP980785280), T3m, T3l);
Chris@19 686 STM4(&(io[25]), T70, ovs, &(io[1]));
Chris@19 687 STN4(&(io[24]), T6v, T70, T6g, T6K, ovs);
Chris@19 688 T71 = VFMA(LDK(KP980785280), T3i, T3b);
Chris@19 689 STM4(&(ro[1]), T71, ovs, &(ro[1]));
Chris@19 690 STN4(&(ro[0]), T6r, T71, T6b, T6L, ovs);
Chris@19 691 T72 = VFNMS(LDK(KP980785280), T3i, T3b);
Chris@19 692 STM4(&(ro[17]), T72, ovs, &(ro[1]));
Chris@19 693 STN4(&(ro[16]), T6s, T72, T6c, T6M, ovs);
Chris@19 694 }
Chris@19 695 }
Chris@19 696 }
Chris@19 697 }
Chris@19 698 }
Chris@19 699 VLEAVE();
Chris@19 700 }
Chris@19 701
Chris@19 702 static const kdft_desc desc = { 32, XSIMD_STRING("n2sv_32"), {236, 0, 136, 0}, &GENUS, 0, 1, 0, 0 };
Chris@19 703
Chris@19 704 void XSIMD(codelet_n2sv_32) (planner *p) {
Chris@19 705 X(kdft_register) (p, n2sv_32, &desc);
Chris@19 706 }
Chris@19 707
Chris@19 708 #else /* HAVE_FMA */
Chris@19 709
Chris@19 710 /* Generated by: ../../../genfft/gen_notw.native -simd -compact -variables 4 -pipeline-latency 8 -n 32 -name n2sv_32 -with-ostride 1 -include n2s.h -store-multiple 4 */
Chris@19 711
Chris@19 712 /*
Chris@19 713 * This function contains 372 FP additions, 84 FP multiplications,
Chris@19 714 * (or, 340 additions, 52 multiplications, 32 fused multiply/add),
Chris@19 715 * 130 stack variables, 7 constants, and 144 memory accesses
Chris@19 716 */
Chris@19 717 #include "n2s.h"
Chris@19 718
Chris@19 719 static void n2sv_32(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs)
Chris@19 720 {
Chris@19 721 DVK(KP831469612, +0.831469612302545237078788377617905756738560812);
Chris@19 722 DVK(KP555570233, +0.555570233019602224742830813948532874374937191);
Chris@19 723 DVK(KP195090322, +0.195090322016128267848284868477022240927691618);
Chris@19 724 DVK(KP980785280, +0.980785280403230449126182236134239036973933731);
Chris@19 725 DVK(KP923879532, +0.923879532511286756128183189396788286822416626);
Chris@19 726 DVK(KP382683432, +0.382683432365089771728459984030398866761344562);
Chris@19 727 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
Chris@19 728 {
Chris@19 729 INT i;
Chris@19 730 for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(128, is), MAKE_VOLATILE_STRIDE(128, os)) {
Chris@19 731 V T7, T4r, T4Z, T18, T1z, T3t, T3T, T2T, Te, T1f, T50, T4s, T2W, T3u, T1G;
Chris@19 732 V T3U, Tm, T1n, T1O, T2Z, T3y, T3X, T4w, T53, Tt, T1u, T1V, T2Y, T3B, T3W;
Chris@19 733 V T4z, T52, T2t, T3L, T3O, T2K, TR, TY, T5F, T5G, T5H, T5I, T4R, T5j, T2E;
Chris@19 734 V T3P, T4W, T5k, T2N, T3M, T22, T3E, T3H, T2j, TC, TJ, T5A, T5B, T5C, T5D;
Chris@19 735 V T4G, T5g, T2d, T3F, T4L, T5h, T2m, T3I;
Chris@19 736 {
Chris@19 737 V T3, T1x, T14, T2S, T6, T2R, T17, T1y;
Chris@19 738 {
Chris@19 739 V T1, T2, T12, T13;
Chris@19 740 T1 = LD(&(ri[0]), ivs, &(ri[0]));
Chris@19 741 T2 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0]));
Chris@19 742 T3 = VADD(T1, T2);
Chris@19 743 T1x = VSUB(T1, T2);
Chris@19 744 T12 = LD(&(ii[0]), ivs, &(ii[0]));
Chris@19 745 T13 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0]));
Chris@19 746 T14 = VADD(T12, T13);
Chris@19 747 T2S = VSUB(T12, T13);
Chris@19 748 }
Chris@19 749 {
Chris@19 750 V T4, T5, T15, T16;
Chris@19 751 T4 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0]));
Chris@19 752 T5 = LD(&(ri[WS(is, 24)]), ivs, &(ri[0]));
Chris@19 753 T6 = VADD(T4, T5);
Chris@19 754 T2R = VSUB(T4, T5);
Chris@19 755 T15 = LD(&(ii[WS(is, 8)]), ivs, &(ii[0]));
Chris@19 756 T16 = LD(&(ii[WS(is, 24)]), ivs, &(ii[0]));
Chris@19 757 T17 = VADD(T15, T16);
Chris@19 758 T1y = VSUB(T15, T16);
Chris@19 759 }
Chris@19 760 T7 = VADD(T3, T6);
Chris@19 761 T4r = VSUB(T3, T6);
Chris@19 762 T4Z = VSUB(T14, T17);
Chris@19 763 T18 = VADD(T14, T17);
Chris@19 764 T1z = VSUB(T1x, T1y);
Chris@19 765 T3t = VADD(T1x, T1y);
Chris@19 766 T3T = VSUB(T2S, T2R);
Chris@19 767 T2T = VADD(T2R, T2S);
Chris@19 768 }
Chris@19 769 {
Chris@19 770 V Ta, T1B, T1b, T1A, Td, T1D, T1e, T1E;
Chris@19 771 {
Chris@19 772 V T8, T9, T19, T1a;
Chris@19 773 T8 = LD(&(ri[WS(is, 4)]), ivs, &(ri[0]));
Chris@19 774 T9 = LD(&(ri[WS(is, 20)]), ivs, &(ri[0]));
Chris@19 775 Ta = VADD(T8, T9);
Chris@19 776 T1B = VSUB(T8, T9);
Chris@19 777 T19 = LD(&(ii[WS(is, 4)]), ivs, &(ii[0]));
Chris@19 778 T1a = LD(&(ii[WS(is, 20)]), ivs, &(ii[0]));
Chris@19 779 T1b = VADD(T19, T1a);
Chris@19 780 T1A = VSUB(T19, T1a);
Chris@19 781 }
Chris@19 782 {
Chris@19 783 V Tb, Tc, T1c, T1d;
Chris@19 784 Tb = LD(&(ri[WS(is, 28)]), ivs, &(ri[0]));
Chris@19 785 Tc = LD(&(ri[WS(is, 12)]), ivs, &(ri[0]));
Chris@19 786 Td = VADD(Tb, Tc);
Chris@19 787 T1D = VSUB(Tb, Tc);
Chris@19 788 T1c = LD(&(ii[WS(is, 28)]), ivs, &(ii[0]));
Chris@19 789 T1d = LD(&(ii[WS(is, 12)]), ivs, &(ii[0]));
Chris@19 790 T1e = VADD(T1c, T1d);
Chris@19 791 T1E = VSUB(T1c, T1d);
Chris@19 792 }
Chris@19 793 Te = VADD(Ta, Td);
Chris@19 794 T1f = VADD(T1b, T1e);
Chris@19 795 T50 = VSUB(Td, Ta);
Chris@19 796 T4s = VSUB(T1b, T1e);
Chris@19 797 {
Chris@19 798 V T2U, T2V, T1C, T1F;
Chris@19 799 T2U = VSUB(T1D, T1E);
Chris@19 800 T2V = VADD(T1B, T1A);
Chris@19 801 T2W = VMUL(LDK(KP707106781), VSUB(T2U, T2V));
Chris@19 802 T3u = VMUL(LDK(KP707106781), VADD(T2V, T2U));
Chris@19 803 T1C = VSUB(T1A, T1B);
Chris@19 804 T1F = VADD(T1D, T1E);
Chris@19 805 T1G = VMUL(LDK(KP707106781), VSUB(T1C, T1F));
Chris@19 806 T3U = VMUL(LDK(KP707106781), VADD(T1C, T1F));
Chris@19 807 }
Chris@19 808 }
Chris@19 809 {
Chris@19 810 V Ti, T1L, T1j, T1J, Tl, T1I, T1m, T1M, T1K, T1N;
Chris@19 811 {
Chris@19 812 V Tg, Th, T1h, T1i;
Chris@19 813 Tg = LD(&(ri[WS(is, 2)]), ivs, &(ri[0]));
Chris@19 814 Th = LD(&(ri[WS(is, 18)]), ivs, &(ri[0]));
Chris@19 815 Ti = VADD(Tg, Th);
Chris@19 816 T1L = VSUB(Tg, Th);
Chris@19 817 T1h = LD(&(ii[WS(is, 2)]), ivs, &(ii[0]));
Chris@19 818 T1i = LD(&(ii[WS(is, 18)]), ivs, &(ii[0]));
Chris@19 819 T1j = VADD(T1h, T1i);
Chris@19 820 T1J = VSUB(T1h, T1i);
Chris@19 821 }
Chris@19 822 {
Chris@19 823 V Tj, Tk, T1k, T1l;
Chris@19 824 Tj = LD(&(ri[WS(is, 10)]), ivs, &(ri[0]));
Chris@19 825 Tk = LD(&(ri[WS(is, 26)]), ivs, &(ri[0]));
Chris@19 826 Tl = VADD(Tj, Tk);
Chris@19 827 T1I = VSUB(Tj, Tk);
Chris@19 828 T1k = LD(&(ii[WS(is, 10)]), ivs, &(ii[0]));
Chris@19 829 T1l = LD(&(ii[WS(is, 26)]), ivs, &(ii[0]));
Chris@19 830 T1m = VADD(T1k, T1l);
Chris@19 831 T1M = VSUB(T1k, T1l);
Chris@19 832 }
Chris@19 833 Tm = VADD(Ti, Tl);
Chris@19 834 T1n = VADD(T1j, T1m);
Chris@19 835 T1K = VADD(T1I, T1J);
Chris@19 836 T1N = VSUB(T1L, T1M);
Chris@19 837 T1O = VFNMS(LDK(KP923879532), T1N, VMUL(LDK(KP382683432), T1K));
Chris@19 838 T2Z = VFMA(LDK(KP923879532), T1K, VMUL(LDK(KP382683432), T1N));
Chris@19 839 {
Chris@19 840 V T3w, T3x, T4u, T4v;
Chris@19 841 T3w = VSUB(T1J, T1I);
Chris@19 842 T3x = VADD(T1L, T1M);
Chris@19 843 T3y = VFNMS(LDK(KP382683432), T3x, VMUL(LDK(KP923879532), T3w));
Chris@19 844 T3X = VFMA(LDK(KP382683432), T3w, VMUL(LDK(KP923879532), T3x));
Chris@19 845 T4u = VSUB(T1j, T1m);
Chris@19 846 T4v = VSUB(Ti, Tl);
Chris@19 847 T4w = VSUB(T4u, T4v);
Chris@19 848 T53 = VADD(T4v, T4u);
Chris@19 849 }
Chris@19 850 }
Chris@19 851 {
Chris@19 852 V Tp, T1S, T1q, T1Q, Ts, T1P, T1t, T1T, T1R, T1U;
Chris@19 853 {
Chris@19 854 V Tn, To, T1o, T1p;
Chris@19 855 Tn = LD(&(ri[WS(is, 30)]), ivs, &(ri[0]));
Chris@19 856 To = LD(&(ri[WS(is, 14)]), ivs, &(ri[0]));
Chris@19 857 Tp = VADD(Tn, To);
Chris@19 858 T1S = VSUB(Tn, To);
Chris@19 859 T1o = LD(&(ii[WS(is, 30)]), ivs, &(ii[0]));
Chris@19 860 T1p = LD(&(ii[WS(is, 14)]), ivs, &(ii[0]));
Chris@19 861 T1q = VADD(T1o, T1p);
Chris@19 862 T1Q = VSUB(T1o, T1p);
Chris@19 863 }
Chris@19 864 {
Chris@19 865 V Tq, Tr, T1r, T1s;
Chris@19 866 Tq = LD(&(ri[WS(is, 6)]), ivs, &(ri[0]));
Chris@19 867 Tr = LD(&(ri[WS(is, 22)]), ivs, &(ri[0]));
Chris@19 868 Ts = VADD(Tq, Tr);
Chris@19 869 T1P = VSUB(Tq, Tr);
Chris@19 870 T1r = LD(&(ii[WS(is, 6)]), ivs, &(ii[0]));
Chris@19 871 T1s = LD(&(ii[WS(is, 22)]), ivs, &(ii[0]));
Chris@19 872 T1t = VADD(T1r, T1s);
Chris@19 873 T1T = VSUB(T1r, T1s);
Chris@19 874 }
Chris@19 875 Tt = VADD(Tp, Ts);
Chris@19 876 T1u = VADD(T1q, T1t);
Chris@19 877 T1R = VADD(T1P, T1Q);
Chris@19 878 T1U = VSUB(T1S, T1T);
Chris@19 879 T1V = VFMA(LDK(KP382683432), T1R, VMUL(LDK(KP923879532), T1U));
Chris@19 880 T2Y = VFNMS(LDK(KP923879532), T1R, VMUL(LDK(KP382683432), T1U));
Chris@19 881 {
Chris@19 882 V T3z, T3A, T4x, T4y;
Chris@19 883 T3z = VSUB(T1Q, T1P);
Chris@19 884 T3A = VADD(T1S, T1T);
Chris@19 885 T3B = VFMA(LDK(KP923879532), T3z, VMUL(LDK(KP382683432), T3A));
Chris@19 886 T3W = VFNMS(LDK(KP382683432), T3z, VMUL(LDK(KP923879532), T3A));
Chris@19 887 T4x = VSUB(Tp, Ts);
Chris@19 888 T4y = VSUB(T1q, T1t);
Chris@19 889 T4z = VADD(T4x, T4y);
Chris@19 890 T52 = VSUB(T4x, T4y);
Chris@19 891 }
Chris@19 892 }
Chris@19 893 {
Chris@19 894 V TN, T2p, T2J, T4S, TQ, T2G, T2s, T4T, TU, T2x, T2w, T4O, TX, T2z, T2C;
Chris@19 895 V T4P;
Chris@19 896 {
Chris@19 897 V TL, TM, T2H, T2I;
Chris@19 898 TL = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)]));
Chris@19 899 TM = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)]));
Chris@19 900 TN = VADD(TL, TM);
Chris@19 901 T2p = VSUB(TL, TM);
Chris@19 902 T2H = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)]));
Chris@19 903 T2I = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)]));
Chris@19 904 T2J = VSUB(T2H, T2I);
Chris@19 905 T4S = VADD(T2H, T2I);
Chris@19 906 }
Chris@19 907 {
Chris@19 908 V TO, TP, T2q, T2r;
Chris@19 909 TO = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)]));
Chris@19 910 TP = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)]));
Chris@19 911 TQ = VADD(TO, TP);
Chris@19 912 T2G = VSUB(TO, TP);
Chris@19 913 T2q = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)]));
Chris@19 914 T2r = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)]));
Chris@19 915 T2s = VSUB(T2q, T2r);
Chris@19 916 T4T = VADD(T2q, T2r);
Chris@19 917 }
Chris@19 918 {
Chris@19 919 V TS, TT, T2u, T2v;
Chris@19 920 TS = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)]));
Chris@19 921 TT = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)]));
Chris@19 922 TU = VADD(TS, TT);
Chris@19 923 T2x = VSUB(TS, TT);
Chris@19 924 T2u = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)]));
Chris@19 925 T2v = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)]));
Chris@19 926 T2w = VSUB(T2u, T2v);
Chris@19 927 T4O = VADD(T2u, T2v);
Chris@19 928 }
Chris@19 929 {
Chris@19 930 V TV, TW, T2A, T2B;
Chris@19 931 TV = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)]));
Chris@19 932 TW = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)]));
Chris@19 933 TX = VADD(TV, TW);
Chris@19 934 T2z = VSUB(TV, TW);
Chris@19 935 T2A = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)]));
Chris@19 936 T2B = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)]));
Chris@19 937 T2C = VSUB(T2A, T2B);
Chris@19 938 T4P = VADD(T2A, T2B);
Chris@19 939 }
Chris@19 940 T2t = VSUB(T2p, T2s);
Chris@19 941 T3L = VADD(T2p, T2s);
Chris@19 942 T3O = VSUB(T2J, T2G);
Chris@19 943 T2K = VADD(T2G, T2J);
Chris@19 944 TR = VADD(TN, TQ);
Chris@19 945 TY = VADD(TU, TX);
Chris@19 946 T5F = VSUB(TR, TY);
Chris@19 947 {
Chris@19 948 V T4N, T4Q, T2y, T2D;
Chris@19 949 T5G = VADD(T4S, T4T);
Chris@19 950 T5H = VADD(T4O, T4P);
Chris@19 951 T5I = VSUB(T5G, T5H);
Chris@19 952 T4N = VSUB(TN, TQ);
Chris@19 953 T4Q = VSUB(T4O, T4P);
Chris@19 954 T4R = VSUB(T4N, T4Q);
Chris@19 955 T5j = VADD(T4N, T4Q);
Chris@19 956 T2y = VSUB(T2w, T2x);
Chris@19 957 T2D = VADD(T2z, T2C);
Chris@19 958 T2E = VMUL(LDK(KP707106781), VSUB(T2y, T2D));
Chris@19 959 T3P = VMUL(LDK(KP707106781), VADD(T2y, T2D));
Chris@19 960 {
Chris@19 961 V T4U, T4V, T2L, T2M;
Chris@19 962 T4U = VSUB(T4S, T4T);
Chris@19 963 T4V = VSUB(TX, TU);
Chris@19 964 T4W = VSUB(T4U, T4V);
Chris@19 965 T5k = VADD(T4V, T4U);
Chris@19 966 T2L = VSUB(T2z, T2C);
Chris@19 967 T2M = VADD(T2x, T2w);
Chris@19 968 T2N = VMUL(LDK(KP707106781), VSUB(T2L, T2M));
Chris@19 969 T3M = VMUL(LDK(KP707106781), VADD(T2M, T2L));
Chris@19 970 }
Chris@19 971 }
Chris@19 972 }
Chris@19 973 {
Chris@19 974 V Ty, T2f, T21, T4C, TB, T1Y, T2i, T4D, TF, T28, T2b, T4I, TI, T23, T26;
Chris@19 975 V T4J;
Chris@19 976 {
Chris@19 977 V Tw, Tx, T1Z, T20;
Chris@19 978 Tw = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)]));
Chris@19 979 Tx = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)]));
Chris@19 980 Ty = VADD(Tw, Tx);
Chris@19 981 T2f = VSUB(Tw, Tx);
Chris@19 982 T1Z = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)]));
Chris@19 983 T20 = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)]));
Chris@19 984 T21 = VSUB(T1Z, T20);
Chris@19 985 T4C = VADD(T1Z, T20);
Chris@19 986 }
Chris@19 987 {
Chris@19 988 V Tz, TA, T2g, T2h;
Chris@19 989 Tz = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)]));
Chris@19 990 TA = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)]));
Chris@19 991 TB = VADD(Tz, TA);
Chris@19 992 T1Y = VSUB(Tz, TA);
Chris@19 993 T2g = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)]));
Chris@19 994 T2h = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)]));
Chris@19 995 T2i = VSUB(T2g, T2h);
Chris@19 996 T4D = VADD(T2g, T2h);
Chris@19 997 }
Chris@19 998 {
Chris@19 999 V TD, TE, T29, T2a;
Chris@19 1000 TD = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)]));
Chris@19 1001 TE = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)]));
Chris@19 1002 TF = VADD(TD, TE);
Chris@19 1003 T28 = VSUB(TD, TE);
Chris@19 1004 T29 = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)]));
Chris@19 1005 T2a = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)]));
Chris@19 1006 T2b = VSUB(T29, T2a);
Chris@19 1007 T4I = VADD(T29, T2a);
Chris@19 1008 }
Chris@19 1009 {
Chris@19 1010 V TG, TH, T24, T25;
Chris@19 1011 TG = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)]));
Chris@19 1012 TH = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)]));
Chris@19 1013 TI = VADD(TG, TH);
Chris@19 1014 T23 = VSUB(TG, TH);
Chris@19 1015 T24 = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)]));
Chris@19 1016 T25 = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)]));
Chris@19 1017 T26 = VSUB(T24, T25);
Chris@19 1018 T4J = VADD(T24, T25);
Chris@19 1019 }
Chris@19 1020 T22 = VADD(T1Y, T21);
Chris@19 1021 T3E = VADD(T2f, T2i);
Chris@19 1022 T3H = VSUB(T21, T1Y);
Chris@19 1023 T2j = VSUB(T2f, T2i);
Chris@19 1024 TC = VADD(Ty, TB);
Chris@19 1025 TJ = VADD(TF, TI);
Chris@19 1026 T5A = VSUB(TC, TJ);
Chris@19 1027 {
Chris@19 1028 V T4E, T4F, T27, T2c;
Chris@19 1029 T5B = VADD(T4C, T4D);
Chris@19 1030 T5C = VADD(T4I, T4J);
Chris@19 1031 T5D = VSUB(T5B, T5C);
Chris@19 1032 T4E = VSUB(T4C, T4D);
Chris@19 1033 T4F = VSUB(TI, TF);
Chris@19 1034 T4G = VSUB(T4E, T4F);
Chris@19 1035 T5g = VADD(T4F, T4E);
Chris@19 1036 T27 = VSUB(T23, T26);
Chris@19 1037 T2c = VADD(T28, T2b);
Chris@19 1038 T2d = VMUL(LDK(KP707106781), VSUB(T27, T2c));
Chris@19 1039 T3F = VMUL(LDK(KP707106781), VADD(T2c, T27));
Chris@19 1040 {
Chris@19 1041 V T4H, T4K, T2k, T2l;
Chris@19 1042 T4H = VSUB(Ty, TB);
Chris@19 1043 T4K = VSUB(T4I, T4J);
Chris@19 1044 T4L = VSUB(T4H, T4K);
Chris@19 1045 T5h = VADD(T4H, T4K);
Chris@19 1046 T2k = VSUB(T2b, T28);
Chris@19 1047 T2l = VADD(T23, T26);
Chris@19 1048 T2m = VMUL(LDK(KP707106781), VSUB(T2k, T2l));
Chris@19 1049 T3I = VMUL(LDK(KP707106781), VADD(T2k, T2l));
Chris@19 1050 }
Chris@19 1051 }
Chris@19 1052 }
Chris@19 1053 {
Chris@19 1054 V T61, T62, T63, T64, T65, T66, T67, T68, T69, T6a, T6b, T6c, T6d, T6e, T6f;
Chris@19 1055 V T6g, T6h, T6i, T6j, T6k, T6l, T6m, T6n, T6o, T6p, T6q, T6r, T6s, T6t, T6u;
Chris@19 1056 V T6v, T6w;
Chris@19 1057 {
Chris@19 1058 V T4B, T57, T5a, T5c, T4Y, T56, T55, T5b;
Chris@19 1059 {
Chris@19 1060 V T4t, T4A, T58, T59;
Chris@19 1061 T4t = VSUB(T4r, T4s);
Chris@19 1062 T4A = VMUL(LDK(KP707106781), VSUB(T4w, T4z));
Chris@19 1063 T4B = VADD(T4t, T4A);
Chris@19 1064 T57 = VSUB(T4t, T4A);
Chris@19 1065 T58 = VFNMS(LDK(KP923879532), T4L, VMUL(LDK(KP382683432), T4G));
Chris@19 1066 T59 = VFMA(LDK(KP382683432), T4W, VMUL(LDK(KP923879532), T4R));
Chris@19 1067 T5a = VSUB(T58, T59);
Chris@19 1068 T5c = VADD(T58, T59);
Chris@19 1069 }
Chris@19 1070 {
Chris@19 1071 V T4M, T4X, T51, T54;
Chris@19 1072 T4M = VFMA(LDK(KP923879532), T4G, VMUL(LDK(KP382683432), T4L));
Chris@19 1073 T4X = VFNMS(LDK(KP923879532), T4W, VMUL(LDK(KP382683432), T4R));
Chris@19 1074 T4Y = VADD(T4M, T4X);
Chris@19 1075 T56 = VSUB(T4X, T4M);
Chris@19 1076 T51 = VSUB(T4Z, T50);
Chris@19 1077 T54 = VMUL(LDK(KP707106781), VSUB(T52, T53));
Chris@19 1078 T55 = VSUB(T51, T54);
Chris@19 1079 T5b = VADD(T51, T54);
Chris@19 1080 }
Chris@19 1081 T61 = VSUB(T4B, T4Y);
Chris@19 1082 STM4(&(ro[22]), T61, ovs, &(ro[0]));
Chris@19 1083 T62 = VSUB(T5b, T5c);
Chris@19 1084 STM4(&(io[22]), T62, ovs, &(io[0]));
Chris@19 1085 T63 = VADD(T4B, T4Y);
Chris@19 1086 STM4(&(ro[6]), T63, ovs, &(ro[0]));
Chris@19 1087 T64 = VADD(T5b, T5c);
Chris@19 1088 STM4(&(io[6]), T64, ovs, &(io[0]));
Chris@19 1089 T65 = VSUB(T55, T56);
Chris@19 1090 STM4(&(io[30]), T65, ovs, &(io[0]));
Chris@19 1091 T66 = VSUB(T57, T5a);
Chris@19 1092 STM4(&(ro[30]), T66, ovs, &(ro[0]));
Chris@19 1093 T67 = VADD(T55, T56);
Chris@19 1094 STM4(&(io[14]), T67, ovs, &(io[0]));
Chris@19 1095 T68 = VADD(T57, T5a);
Chris@19 1096 STM4(&(ro[14]), T68, ovs, &(ro[0]));
Chris@19 1097 }
Chris@19 1098 {
Chris@19 1099 V T5f, T5r, T5u, T5w, T5m, T5q, T5p, T5v;
Chris@19 1100 {
Chris@19 1101 V T5d, T5e, T5s, T5t;
Chris@19 1102 T5d = VADD(T4r, T4s);
Chris@19 1103 T5e = VMUL(LDK(KP707106781), VADD(T53, T52));
Chris@19 1104 T5f = VADD(T5d, T5e);
Chris@19 1105 T5r = VSUB(T5d, T5e);
Chris@19 1106 T5s = VFNMS(LDK(KP382683432), T5h, VMUL(LDK(KP923879532), T5g));
Chris@19 1107 T5t = VFMA(LDK(KP923879532), T5k, VMUL(LDK(KP382683432), T5j));
Chris@19 1108 T5u = VSUB(T5s, T5t);
Chris@19 1109 T5w = VADD(T5s, T5t);
Chris@19 1110 }
Chris@19 1111 {
Chris@19 1112 V T5i, T5l, T5n, T5o;
Chris@19 1113 T5i = VFMA(LDK(KP382683432), T5g, VMUL(LDK(KP923879532), T5h));
Chris@19 1114 T5l = VFNMS(LDK(KP382683432), T5k, VMUL(LDK(KP923879532), T5j));
Chris@19 1115 T5m = VADD(T5i, T5l);
Chris@19 1116 T5q = VSUB(T5l, T5i);
Chris@19 1117 T5n = VADD(T50, T4Z);
Chris@19 1118 T5o = VMUL(LDK(KP707106781), VADD(T4w, T4z));
Chris@19 1119 T5p = VSUB(T5n, T5o);
Chris@19 1120 T5v = VADD(T5n, T5o);
Chris@19 1121 }
Chris@19 1122 T69 = VSUB(T5f, T5m);
Chris@19 1123 STM4(&(ro[18]), T69, ovs, &(ro[0]));
Chris@19 1124 T6a = VSUB(T5v, T5w);
Chris@19 1125 STM4(&(io[18]), T6a, ovs, &(io[0]));
Chris@19 1126 T6b = VADD(T5f, T5m);
Chris@19 1127 STM4(&(ro[2]), T6b, ovs, &(ro[0]));
Chris@19 1128 T6c = VADD(T5v, T5w);
Chris@19 1129 STM4(&(io[2]), T6c, ovs, &(io[0]));
Chris@19 1130 T6d = VSUB(T5p, T5q);
Chris@19 1131 STM4(&(io[26]), T6d, ovs, &(io[0]));
Chris@19 1132 T6e = VSUB(T5r, T5u);
Chris@19 1133 STM4(&(ro[26]), T6e, ovs, &(ro[0]));
Chris@19 1134 T6f = VADD(T5p, T5q);
Chris@19 1135 STM4(&(io[10]), T6f, ovs, &(io[0]));
Chris@19 1136 T6g = VADD(T5r, T5u);
Chris@19 1137 STM4(&(ro[10]), T6g, ovs, &(ro[0]));
Chris@19 1138 }
Chris@19 1139 {
Chris@19 1140 V T5z, T5P, T5S, T5U, T5K, T5O, T5N, T5T;
Chris@19 1141 {
Chris@19 1142 V T5x, T5y, T5Q, T5R;
Chris@19 1143 T5x = VSUB(T7, Te);
Chris@19 1144 T5y = VSUB(T1n, T1u);
Chris@19 1145 T5z = VADD(T5x, T5y);
Chris@19 1146 T5P = VSUB(T5x, T5y);
Chris@19 1147 T5Q = VSUB(T5D, T5A);
Chris@19 1148 T5R = VADD(T5F, T5I);
Chris@19 1149 T5S = VMUL(LDK(KP707106781), VSUB(T5Q, T5R));
Chris@19 1150 T5U = VMUL(LDK(KP707106781), VADD(T5Q, T5R));
Chris@19 1151 }
Chris@19 1152 {
Chris@19 1153 V T5E, T5J, T5L, T5M;
Chris@19 1154 T5E = VADD(T5A, T5D);
Chris@19 1155 T5J = VSUB(T5F, T5I);
Chris@19 1156 T5K = VMUL(LDK(KP707106781), VADD(T5E, T5J));
Chris@19 1157 T5O = VMUL(LDK(KP707106781), VSUB(T5J, T5E));
Chris@19 1158 T5L = VSUB(T18, T1f);
Chris@19 1159 T5M = VSUB(Tt, Tm);
Chris@19 1160 T5N = VSUB(T5L, T5M);
Chris@19 1161 T5T = VADD(T5M, T5L);
Chris@19 1162 }
Chris@19 1163 T6h = VSUB(T5z, T5K);
Chris@19 1164 STM4(&(ro[20]), T6h, ovs, &(ro[0]));
Chris@19 1165 T6i = VSUB(T5T, T5U);
Chris@19 1166 STM4(&(io[20]), T6i, ovs, &(io[0]));
Chris@19 1167 T6j = VADD(T5z, T5K);
Chris@19 1168 STM4(&(ro[4]), T6j, ovs, &(ro[0]));
Chris@19 1169 T6k = VADD(T5T, T5U);
Chris@19 1170 STM4(&(io[4]), T6k, ovs, &(io[0]));
Chris@19 1171 T6l = VSUB(T5N, T5O);
Chris@19 1172 STM4(&(io[28]), T6l, ovs, &(io[0]));
Chris@19 1173 T6m = VSUB(T5P, T5S);
Chris@19 1174 STM4(&(ro[28]), T6m, ovs, &(ro[0]));
Chris@19 1175 T6n = VADD(T5N, T5O);
Chris@19 1176 STM4(&(io[12]), T6n, ovs, &(io[0]));
Chris@19 1177 T6o = VADD(T5P, T5S);
Chris@19 1178 STM4(&(ro[12]), T6o, ovs, &(ro[0]));
Chris@19 1179 }
Chris@19 1180 {
Chris@19 1181 V Tv, T5V, T5Y, T60, T10, T11, T1w, T5Z;
Chris@19 1182 {
Chris@19 1183 V Tf, Tu, T5W, T5X;
Chris@19 1184 Tf = VADD(T7, Te);
Chris@19 1185 Tu = VADD(Tm, Tt);
Chris@19 1186 Tv = VADD(Tf, Tu);
Chris@19 1187 T5V = VSUB(Tf, Tu);
Chris@19 1188 T5W = VADD(T5B, T5C);
Chris@19 1189 T5X = VADD(T5G, T5H);
Chris@19 1190 T5Y = VSUB(T5W, T5X);
Chris@19 1191 T60 = VADD(T5W, T5X);
Chris@19 1192 }
Chris@19 1193 {
Chris@19 1194 V TK, TZ, T1g, T1v;
Chris@19 1195 TK = VADD(TC, TJ);
Chris@19 1196 TZ = VADD(TR, TY);
Chris@19 1197 T10 = VADD(TK, TZ);
Chris@19 1198 T11 = VSUB(TZ, TK);
Chris@19 1199 T1g = VADD(T18, T1f);
Chris@19 1200 T1v = VADD(T1n, T1u);
Chris@19 1201 T1w = VSUB(T1g, T1v);
Chris@19 1202 T5Z = VADD(T1g, T1v);
Chris@19 1203 }
Chris@19 1204 T6p = VSUB(Tv, T10);
Chris@19 1205 STM4(&(ro[16]), T6p, ovs, &(ro[0]));
Chris@19 1206 T6q = VSUB(T5Z, T60);
Chris@19 1207 STM4(&(io[16]), T6q, ovs, &(io[0]));
Chris@19 1208 T6r = VADD(Tv, T10);
Chris@19 1209 STM4(&(ro[0]), T6r, ovs, &(ro[0]));
Chris@19 1210 T6s = VADD(T5Z, T60);
Chris@19 1211 STM4(&(io[0]), T6s, ovs, &(io[0]));
Chris@19 1212 T6t = VADD(T11, T1w);
Chris@19 1213 STM4(&(io[8]), T6t, ovs, &(io[0]));
Chris@19 1214 T6u = VADD(T5V, T5Y);
Chris@19 1215 STM4(&(ro[8]), T6u, ovs, &(ro[0]));
Chris@19 1216 T6v = VSUB(T1w, T11);
Chris@19 1217 STM4(&(io[24]), T6v, ovs, &(io[0]));
Chris@19 1218 T6w = VSUB(T5V, T5Y);
Chris@19 1219 STM4(&(ro[24]), T6w, ovs, &(ro[0]));
Chris@19 1220 }
Chris@19 1221 {
Chris@19 1222 V T6x, T6y, T6z, T6A, T6B, T6C, T6D, T6E;
Chris@19 1223 {
Chris@19 1224 V T1X, T33, T31, T37, T2o, T34, T2P, T35;
Chris@19 1225 {
Chris@19 1226 V T1H, T1W, T2X, T30;
Chris@19 1227 T1H = VSUB(T1z, T1G);
Chris@19 1228 T1W = VSUB(T1O, T1V);
Chris@19 1229 T1X = VADD(T1H, T1W);
Chris@19 1230 T33 = VSUB(T1H, T1W);
Chris@19 1231 T2X = VSUB(T2T, T2W);
Chris@19 1232 T30 = VSUB(T2Y, T2Z);
Chris@19 1233 T31 = VSUB(T2X, T30);
Chris@19 1234 T37 = VADD(T2X, T30);
Chris@19 1235 }
Chris@19 1236 {
Chris@19 1237 V T2e, T2n, T2F, T2O;
Chris@19 1238 T2e = VSUB(T22, T2d);
Chris@19 1239 T2n = VSUB(T2j, T2m);
Chris@19 1240 T2o = VFMA(LDK(KP980785280), T2e, VMUL(LDK(KP195090322), T2n));
Chris@19 1241 T34 = VFNMS(LDK(KP980785280), T2n, VMUL(LDK(KP195090322), T2e));
Chris@19 1242 T2F = VSUB(T2t, T2E);
Chris@19 1243 T2O = VSUB(T2K, T2N);
Chris@19 1244 T2P = VFNMS(LDK(KP980785280), T2O, VMUL(LDK(KP195090322), T2F));
Chris@19 1245 T35 = VFMA(LDK(KP195090322), T2O, VMUL(LDK(KP980785280), T2F));
Chris@19 1246 }
Chris@19 1247 {
Chris@19 1248 V T2Q, T38, T32, T36;
Chris@19 1249 T2Q = VADD(T2o, T2P);
Chris@19 1250 T6x = VSUB(T1X, T2Q);
Chris@19 1251 STM4(&(ro[23]), T6x, ovs, &(ro[1]));
Chris@19 1252 T6y = VADD(T1X, T2Q);
Chris@19 1253 STM4(&(ro[7]), T6y, ovs, &(ro[1]));
Chris@19 1254 T38 = VADD(T34, T35);
Chris@19 1255 T6z = VSUB(T37, T38);
Chris@19 1256 STM4(&(io[23]), T6z, ovs, &(io[1]));
Chris@19 1257 T6A = VADD(T37, T38);
Chris@19 1258 STM4(&(io[7]), T6A, ovs, &(io[1]));
Chris@19 1259 T32 = VSUB(T2P, T2o);
Chris@19 1260 T6B = VSUB(T31, T32);
Chris@19 1261 STM4(&(io[31]), T6B, ovs, &(io[1]));
Chris@19 1262 T6C = VADD(T31, T32);
Chris@19 1263 STM4(&(io[15]), T6C, ovs, &(io[1]));
Chris@19 1264 T36 = VSUB(T34, T35);
Chris@19 1265 T6D = VSUB(T33, T36);
Chris@19 1266 STM4(&(ro[31]), T6D, ovs, &(ro[1]));
Chris@19 1267 T6E = VADD(T33, T36);
Chris@19 1268 STM4(&(ro[15]), T6E, ovs, &(ro[1]));
Chris@19 1269 }
Chris@19 1270 }
Chris@19 1271 {
Chris@19 1272 V T3D, T41, T3Z, T45, T3K, T42, T3R, T43;
Chris@19 1273 {
Chris@19 1274 V T3v, T3C, T3V, T3Y;
Chris@19 1275 T3v = VSUB(T3t, T3u);
Chris@19 1276 T3C = VSUB(T3y, T3B);
Chris@19 1277 T3D = VADD(T3v, T3C);
Chris@19 1278 T41 = VSUB(T3v, T3C);
Chris@19 1279 T3V = VSUB(T3T, T3U);
Chris@19 1280 T3Y = VSUB(T3W, T3X);
Chris@19 1281 T3Z = VSUB(T3V, T3Y);
Chris@19 1282 T45 = VADD(T3V, T3Y);
Chris@19 1283 }
Chris@19 1284 {
Chris@19 1285 V T3G, T3J, T3N, T3Q;
Chris@19 1286 T3G = VSUB(T3E, T3F);
Chris@19 1287 T3J = VSUB(T3H, T3I);
Chris@19 1288 T3K = VFMA(LDK(KP555570233), T3G, VMUL(LDK(KP831469612), T3J));
Chris@19 1289 T42 = VFNMS(LDK(KP831469612), T3G, VMUL(LDK(KP555570233), T3J));
Chris@19 1290 T3N = VSUB(T3L, T3M);
Chris@19 1291 T3Q = VSUB(T3O, T3P);
Chris@19 1292 T3R = VFNMS(LDK(KP831469612), T3Q, VMUL(LDK(KP555570233), T3N));
Chris@19 1293 T43 = VFMA(LDK(KP831469612), T3N, VMUL(LDK(KP555570233), T3Q));
Chris@19 1294 }
Chris@19 1295 {
Chris@19 1296 V T3S, T6F, T6G, T46, T6H, T6I;
Chris@19 1297 T3S = VADD(T3K, T3R);
Chris@19 1298 T6F = VSUB(T3D, T3S);
Chris@19 1299 STM4(&(ro[21]), T6F, ovs, &(ro[1]));
Chris@19 1300 STN4(&(ro[20]), T6h, T6F, T61, T6x, ovs);
Chris@19 1301 T6G = VADD(T3D, T3S);
Chris@19 1302 STM4(&(ro[5]), T6G, ovs, &(ro[1]));
Chris@19 1303 STN4(&(ro[4]), T6j, T6G, T63, T6y, ovs);
Chris@19 1304 T46 = VADD(T42, T43);
Chris@19 1305 T6H = VSUB(T45, T46);
Chris@19 1306 STM4(&(io[21]), T6H, ovs, &(io[1]));
Chris@19 1307 STN4(&(io[20]), T6i, T6H, T62, T6z, ovs);
Chris@19 1308 T6I = VADD(T45, T46);
Chris@19 1309 STM4(&(io[5]), T6I, ovs, &(io[1]));
Chris@19 1310 STN4(&(io[4]), T6k, T6I, T64, T6A, ovs);
Chris@19 1311 }
Chris@19 1312 {
Chris@19 1313 V T40, T6J, T6K, T44, T6L, T6M;
Chris@19 1314 T40 = VSUB(T3R, T3K);
Chris@19 1315 T6J = VSUB(T3Z, T40);
Chris@19 1316 STM4(&(io[29]), T6J, ovs, &(io[1]));
Chris@19 1317 STN4(&(io[28]), T6l, T6J, T65, T6B, ovs);
Chris@19 1318 T6K = VADD(T3Z, T40);
Chris@19 1319 STM4(&(io[13]), T6K, ovs, &(io[1]));
Chris@19 1320 STN4(&(io[12]), T6n, T6K, T67, T6C, ovs);
Chris@19 1321 T44 = VSUB(T42, T43);
Chris@19 1322 T6L = VSUB(T41, T44);
Chris@19 1323 STM4(&(ro[29]), T6L, ovs, &(ro[1]));
Chris@19 1324 STN4(&(ro[28]), T6m, T6L, T66, T6D, ovs);
Chris@19 1325 T6M = VADD(T41, T44);
Chris@19 1326 STM4(&(ro[13]), T6M, ovs, &(ro[1]));
Chris@19 1327 STN4(&(ro[12]), T6o, T6M, T68, T6E, ovs);
Chris@19 1328 }
Chris@19 1329 }
Chris@19 1330 }
Chris@19 1331 {
Chris@19 1332 V T6N, T6O, T6P, T6Q, T6R, T6S, T6T, T6U;
Chris@19 1333 {
Chris@19 1334 V T49, T4l, T4j, T4p, T4c, T4m, T4f, T4n;
Chris@19 1335 {
Chris@19 1336 V T47, T48, T4h, T4i;
Chris@19 1337 T47 = VADD(T3t, T3u);
Chris@19 1338 T48 = VADD(T3X, T3W);
Chris@19 1339 T49 = VADD(T47, T48);
Chris@19 1340 T4l = VSUB(T47, T48);
Chris@19 1341 T4h = VADD(T3T, T3U);
Chris@19 1342 T4i = VADD(T3y, T3B);
Chris@19 1343 T4j = VSUB(T4h, T4i);
Chris@19 1344 T4p = VADD(T4h, T4i);
Chris@19 1345 }
Chris@19 1346 {
Chris@19 1347 V T4a, T4b, T4d, T4e;
Chris@19 1348 T4a = VADD(T3E, T3F);
Chris@19 1349 T4b = VADD(T3H, T3I);
Chris@19 1350 T4c = VFMA(LDK(KP980785280), T4a, VMUL(LDK(KP195090322), T4b));
Chris@19 1351 T4m = VFNMS(LDK(KP195090322), T4a, VMUL(LDK(KP980785280), T4b));
Chris@19 1352 T4d = VADD(T3L, T3M);
Chris@19 1353 T4e = VADD(T3O, T3P);
Chris@19 1354 T4f = VFNMS(LDK(KP195090322), T4e, VMUL(LDK(KP980785280), T4d));
Chris@19 1355 T4n = VFMA(LDK(KP195090322), T4d, VMUL(LDK(KP980785280), T4e));
Chris@19 1356 }
Chris@19 1357 {
Chris@19 1358 V T4g, T4q, T4k, T4o;
Chris@19 1359 T4g = VADD(T4c, T4f);
Chris@19 1360 T6N = VSUB(T49, T4g);
Chris@19 1361 STM4(&(ro[17]), T6N, ovs, &(ro[1]));
Chris@19 1362 T6O = VADD(T49, T4g);
Chris@19 1363 STM4(&(ro[1]), T6O, ovs, &(ro[1]));
Chris@19 1364 T4q = VADD(T4m, T4n);
Chris@19 1365 T6P = VSUB(T4p, T4q);
Chris@19 1366 STM4(&(io[17]), T6P, ovs, &(io[1]));
Chris@19 1367 T6Q = VADD(T4p, T4q);
Chris@19 1368 STM4(&(io[1]), T6Q, ovs, &(io[1]));
Chris@19 1369 T4k = VSUB(T4f, T4c);
Chris@19 1370 T6R = VSUB(T4j, T4k);
Chris@19 1371 STM4(&(io[25]), T6R, ovs, &(io[1]));
Chris@19 1372 T6S = VADD(T4j, T4k);
Chris@19 1373 STM4(&(io[9]), T6S, ovs, &(io[1]));
Chris@19 1374 T4o = VSUB(T4m, T4n);
Chris@19 1375 T6T = VSUB(T4l, T4o);
Chris@19 1376 STM4(&(ro[25]), T6T, ovs, &(ro[1]));
Chris@19 1377 T6U = VADD(T4l, T4o);
Chris@19 1378 STM4(&(ro[9]), T6U, ovs, &(ro[1]));
Chris@19 1379 }
Chris@19 1380 }
Chris@19 1381 {
Chris@19 1382 V T3b, T3n, T3l, T3r, T3e, T3o, T3h, T3p;
Chris@19 1383 {
Chris@19 1384 V T39, T3a, T3j, T3k;
Chris@19 1385 T39 = VADD(T1z, T1G);
Chris@19 1386 T3a = VADD(T2Z, T2Y);
Chris@19 1387 T3b = VADD(T39, T3a);
Chris@19 1388 T3n = VSUB(T39, T3a);
Chris@19 1389 T3j = VADD(T2T, T2W);
Chris@19 1390 T3k = VADD(T1O, T1V);
Chris@19 1391 T3l = VSUB(T3j, T3k);
Chris@19 1392 T3r = VADD(T3j, T3k);
Chris@19 1393 }
Chris@19 1394 {
Chris@19 1395 V T3c, T3d, T3f, T3g;
Chris@19 1396 T3c = VADD(T22, T2d);
Chris@19 1397 T3d = VADD(T2j, T2m);
Chris@19 1398 T3e = VFMA(LDK(KP555570233), T3c, VMUL(LDK(KP831469612), T3d));
Chris@19 1399 T3o = VFNMS(LDK(KP555570233), T3d, VMUL(LDK(KP831469612), T3c));
Chris@19 1400 T3f = VADD(T2t, T2E);
Chris@19 1401 T3g = VADD(T2K, T2N);
Chris@19 1402 T3h = VFNMS(LDK(KP555570233), T3g, VMUL(LDK(KP831469612), T3f));
Chris@19 1403 T3p = VFMA(LDK(KP831469612), T3g, VMUL(LDK(KP555570233), T3f));
Chris@19 1404 }
Chris@19 1405 {
Chris@19 1406 V T3i, T6V, T6W, T3s, T6X, T6Y;
Chris@19 1407 T3i = VADD(T3e, T3h);
Chris@19 1408 T6V = VSUB(T3b, T3i);
Chris@19 1409 STM4(&(ro[19]), T6V, ovs, &(ro[1]));
Chris@19 1410 STN4(&(ro[16]), T6p, T6N, T69, T6V, ovs);
Chris@19 1411 T6W = VADD(T3b, T3i);
Chris@19 1412 STM4(&(ro[3]), T6W, ovs, &(ro[1]));
Chris@19 1413 STN4(&(ro[0]), T6r, T6O, T6b, T6W, ovs);
Chris@19 1414 T3s = VADD(T3o, T3p);
Chris@19 1415 T6X = VSUB(T3r, T3s);
Chris@19 1416 STM4(&(io[19]), T6X, ovs, &(io[1]));
Chris@19 1417 STN4(&(io[16]), T6q, T6P, T6a, T6X, ovs);
Chris@19 1418 T6Y = VADD(T3r, T3s);
Chris@19 1419 STM4(&(io[3]), T6Y, ovs, &(io[1]));
Chris@19 1420 STN4(&(io[0]), T6s, T6Q, T6c, T6Y, ovs);
Chris@19 1421 }
Chris@19 1422 {
Chris@19 1423 V T3m, T6Z, T70, T3q, T71, T72;
Chris@19 1424 T3m = VSUB(T3h, T3e);
Chris@19 1425 T6Z = VSUB(T3l, T3m);
Chris@19 1426 STM4(&(io[27]), T6Z, ovs, &(io[1]));
Chris@19 1427 STN4(&(io[24]), T6v, T6R, T6d, T6Z, ovs);
Chris@19 1428 T70 = VADD(T3l, T3m);
Chris@19 1429 STM4(&(io[11]), T70, ovs, &(io[1]));
Chris@19 1430 STN4(&(io[8]), T6t, T6S, T6f, T70, ovs);
Chris@19 1431 T3q = VSUB(T3o, T3p);
Chris@19 1432 T71 = VSUB(T3n, T3q);
Chris@19 1433 STM4(&(ro[27]), T71, ovs, &(ro[1]));
Chris@19 1434 STN4(&(ro[24]), T6w, T6T, T6e, T71, ovs);
Chris@19 1435 T72 = VADD(T3n, T3q);
Chris@19 1436 STM4(&(ro[11]), T72, ovs, &(ro[1]));
Chris@19 1437 STN4(&(ro[8]), T6u, T6U, T6g, T72, ovs);
Chris@19 1438 }
Chris@19 1439 }
Chris@19 1440 }
Chris@19 1441 }
Chris@19 1442 }
Chris@19 1443 }
Chris@19 1444 VLEAVE();
Chris@19 1445 }
Chris@19 1446
Chris@19 1447 static const kdft_desc desc = { 32, XSIMD_STRING("n2sv_32"), {340, 52, 32, 0}, &GENUS, 0, 1, 0, 0 };
Chris@19 1448
Chris@19 1449 void XSIMD(codelet_n2sv_32) (planner *p) {
Chris@19 1450 X(kdft_register) (p, n2sv_32, &desc);
Chris@19 1451 }
Chris@19 1452
Chris@19 1453 #endif /* HAVE_FMA */