Chris@10: /* Chris@10: * Copyright (c) 2003, 2007-11 Matteo Frigo Chris@10: * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology Chris@10: * Chris@10: * This program is free software; you can redistribute it and/or modify Chris@10: * it under the terms of the GNU General Public License as published by Chris@10: * the Free Software Foundation; either version 2 of the License, or Chris@10: * (at your option) any later version. Chris@10: * Chris@10: * This program is distributed in the hope that it will be useful, Chris@10: * but WITHOUT ANY WARRANTY; without even the implied warranty of Chris@10: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Chris@10: * GNU General Public License for more details. Chris@10: * Chris@10: * You should have received a copy of the GNU General Public License Chris@10: * along with this program; if not, write to the Free Software Chris@10: * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Chris@10: * Chris@10: */ Chris@10: Chris@10: /* This file was automatically generated --- DO NOT EDIT */ Chris@10: /* Generated on Sun Nov 25 07:37:57 EST 2012 */ Chris@10: Chris@10: #include "codelet-dft.h" Chris@10: Chris@10: #ifdef HAVE_FMA Chris@10: Chris@10: /* Generated by: ../../../genfft/gen_notw.native -fma -reorder-insns -schedule-for-pipeline -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include n2s.h -store-multiple 4 */ Chris@10: Chris@10: /* Chris@10: * This function contains 912 FP additions, 392 FP multiplications, Chris@10: * (or, 520 additions, 0 multiplications, 392 fused multiply/add), Chris@10: * 310 stack variables, 15 constants, and 288 memory accesses Chris@10: */ Chris@10: #include "n2s.h" Chris@10: Chris@10: static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs) Chris@10: { Chris@10: DVK(KP881921264, +0.881921264348355029712756863660388349508442621); Chris@10: DVK(KP956940335, +0.956940335732208864935797886980269969482849206); Chris@10: DVK(KP534511135, +0.534511135950791641089685961295362908582039528); Chris@10: DVK(KP303346683, +0.303346683607342391675883946941299872384187453); Chris@10: DVK(KP773010453, +0.773010453362736960810906609758469800971041293); Chris@10: DVK(KP995184726, +0.995184726672196886244836953109479921575474869); Chris@10: DVK(KP820678790, +0.820678790828660330972281985331011598767386482); Chris@10: DVK(KP098491403, +0.098491403357164253077197521291327432293052451); Chris@10: DVK(KP831469612, +0.831469612302545237078788377617905756738560812); Chris@10: DVK(KP980785280, +0.980785280403230449126182236134239036973933731); Chris@10: DVK(KP668178637, +0.668178637919298919997757686523080761552472251); Chris@10: DVK(KP198912367, +0.198912367379658006911597622644676228597850501); Chris@10: DVK(KP923879532, +0.923879532511286756128183189396788286822416626); Chris@10: DVK(KP707106781, +0.707106781186547524400844362104849039284835938); Chris@10: DVK(KP414213562, +0.414213562373095048801688724209698078569671875); Chris@10: { Chris@10: INT i; Chris@10: for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) { Chris@10: V TeJ, TeK, TeP, TeQ, TfH, TfI, TfJ, TfK, Tgj, Tgk, Tgv, Tgw, T9a, T99, T9e; Chris@10: V T9b; Chris@10: { Chris@10: V T7B, T37, T5Z, T8F, TbB, TcB, Tf, Td9, T62, T7C, T2i, TdH, Tcb, Tah, T8G; Chris@10: V T3e, Tak, TbC, T65, T3m, TdI, Tu, Tda, T2x, TbD, Tan, T8I, T7G, T8J, T7J; Chris@10: V T64, T3t, Tas, Tce, TK, Tdd, Tav, Tcf, Tdc, T2N, T3G, T6G, T9k, T7O, T9l; Chris@10: V T7R, T6H, T3N, T1L, TdA, Tdx, Teo, Tbs, Tct, T5Q, T6V, T8y, T9z, T5j, T6Y; Chris@10: V Tbb, Tcw, T8n, T9C, Tch, Taz, Tdf, TZ, Tdg, T32, Tci, TaC, T6J, T3Z, T9n; Chris@10: V T7V, T9o, T7Y, T6K, T46, Tdp, T1g, Tej, Tdm, Tcm, Tb1, Tcp, TaK, T6O, T4X; Chris@10: V T9s, T8f, T6R, T4q, T9v, T84, Tdn, T1v, Tek, Tds, Tcn, TaV, Tcq, Tb4, T9t; Chris@10: V T8b, T9w, T8i, T6S, T50, T6P, T4N, T5k, T1V, T1S, TdB, Tbi, T5s, Tbt, Tbg; Chris@10: V T5F, T5R, T5p, T1Y, Tbj, T5n, T8z, T8q; Chris@10: { Chris@10: V Tba, T57, T8l, Tb7, T5M, T8w, T8m, T5P, T8x, T5i; Chris@10: { Chris@10: V T2p, T7F, T7E, Tal, T2w, Tam, T3s, T7H, T7I, T3p, T3d, T3a; Chris@10: { Chris@10: V T8, T35, T3, T5Y, T26, T5X, T6, T36, T29, T9, T2b, T2c, Tb, Tc, T2e; Chris@10: V T2f; Chris@10: { Chris@10: V T1, T2, T24, T25, T4, T5, T27, T28; Chris@10: T1 = LD(&(ri[0]), ivs, &(ri[0])); Chris@10: T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0])); Chris@10: T24 = LD(&(ii[0]), ivs, &(ii[0])); Chris@10: T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0])); Chris@10: T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0])); Chris@10: T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0])); Chris@10: T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0])); Chris@10: T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0])); Chris@10: T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0])); Chris@10: T35 = VSUB(T1, T2); Chris@10: T3 = VADD(T1, T2); Chris@10: T5Y = VSUB(T24, T25); Chris@10: T26 = VADD(T24, T25); Chris@10: T5X = VSUB(T4, T5); Chris@10: T6 = VADD(T4, T5); Chris@10: T36 = VSUB(T27, T28); Chris@10: T29 = VADD(T27, T28); Chris@10: T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0])); Chris@10: T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0])); Chris@10: T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0])); Chris@10: Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0])); Chris@10: Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0])); Chris@10: T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0])); Chris@10: T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0])); Chris@10: } Chris@10: { Chris@10: V T39, Ta, T38, T2d, T3b, Td, T3c, T2g, Taf, T7; Chris@10: T7B = VADD(T35, T36); Chris@10: T37 = VSUB(T35, T36); Chris@10: T39 = VSUB(T8, T9); Chris@10: Ta = VADD(T8, T9); Chris@10: T38 = VSUB(T2b, T2c); Chris@10: T2d = VADD(T2b, T2c); Chris@10: T3b = VSUB(Tb, Tc); Chris@10: Td = VADD(Tb, Tc); Chris@10: T3c = VSUB(T2e, T2f); Chris@10: T2g = VADD(T2e, T2f); Chris@10: T5Z = VADD(T5X, T5Y); Chris@10: T8F = VSUB(T5Y, T5X); Chris@10: Taf = VSUB(T3, T6); Chris@10: T7 = VADD(T3, T6); Chris@10: { Chris@10: V TbA, T2a, Te, Tbz, T60, T61, T2h, Tag; Chris@10: TbA = VSUB(T26, T29); Chris@10: T2a = VADD(T26, T29); Chris@10: Te = VADD(Ta, Td); Chris@10: Tbz = VSUB(Td, Ta); Chris@10: T3d = VADD(T3b, T3c); Chris@10: T60 = VSUB(T3b, T3c); Chris@10: T61 = VADD(T39, T38); Chris@10: T3a = VSUB(T38, T39); Chris@10: T2h = VADD(T2d, T2g); Chris@10: Tag = VSUB(T2d, T2g); Chris@10: TbB = VADD(Tbz, TbA); Chris@10: TcB = VSUB(TbA, Tbz); Chris@10: Tf = VADD(T7, Te); Chris@10: Td9 = VSUB(T7, Te); Chris@10: T62 = VSUB(T60, T61); Chris@10: T7C = VADD(T61, T60); Chris@10: T2i = VADD(T2a, T2h); Chris@10: TdH = VSUB(T2a, T2h); Chris@10: Tcb = VSUB(Taf, Tag); Chris@10: Tah = VADD(Taf, Tag); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T3j, Ti, T3h, T2l, T3g, Tl, T2t, T3k, T2o, T3q, Tp, T3o, T2s, T3n, Ts; Chris@10: V T2u, T2m, T2n; Chris@10: { Chris@10: V Tg, Th, T2j, T2k, Tj, Tk; Chris@10: Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0])); Chris@10: Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0])); Chris@10: T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0])); Chris@10: T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0])); Chris@10: Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0])); Chris@10: Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0])); Chris@10: T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0])); Chris@10: T8G = VADD(T3a, T3d); Chris@10: T3e = VSUB(T3a, T3d); Chris@10: T3j = VSUB(Tg, Th); Chris@10: Ti = VADD(Tg, Th); Chris@10: T3h = VSUB(T2j, T2k); Chris@10: T2l = VADD(T2j, T2k); Chris@10: T3g = VSUB(Tj, Tk); Chris@10: Tl = VADD(Tj, Tk); Chris@10: T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0])); Chris@10: } Chris@10: { Chris@10: V Tn, To, T2q, T2r, Tq, Tr; Chris@10: Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0])); Chris@10: To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0])); Chris@10: T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0])); Chris@10: T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0])); Chris@10: Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0])); Chris@10: Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0])); Chris@10: T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0])); Chris@10: T3k = VSUB(T2m, T2n); Chris@10: T2o = VADD(T2m, T2n); Chris@10: T3q = VSUB(Tn, To); Chris@10: Tp = VADD(Tn, To); Chris@10: T3o = VSUB(T2q, T2r); Chris@10: T2s = VADD(T2q, T2r); Chris@10: T3n = VSUB(Tq, Tr); Chris@10: Ts = VADD(Tq, Tr); Chris@10: T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0])); Chris@10: } Chris@10: { Chris@10: V Tai, Tm, Taj, T3r; Chris@10: Tai = VSUB(Ti, Tl); Chris@10: Tm = VADD(Ti, Tl); Chris@10: T2p = VADD(T2l, T2o); Chris@10: Taj = VSUB(T2l, T2o); Chris@10: { Chris@10: V T3i, T3l, Tt, T2v; Chris@10: T7F = VSUB(T3h, T3g); Chris@10: T3i = VADD(T3g, T3h); Chris@10: T3l = VSUB(T3j, T3k); Chris@10: T7E = VADD(T3j, T3k); Chris@10: Tt = VADD(Tp, Ts); Chris@10: Tal = VSUB(Tp, Ts); Chris@10: T2v = VADD(T2t, T2u); Chris@10: T3r = VSUB(T2t, T2u); Chris@10: Tak = VADD(Tai, Taj); Chris@10: TbC = VSUB(Taj, Tai); Chris@10: T65 = VFNMS(LDK(KP414213562), T3i, T3l); Chris@10: T3m = VFMA(LDK(KP414213562), T3l, T3i); Chris@10: TdI = VSUB(Tt, Tm); Chris@10: Tu = VADD(Tm, Tt); Chris@10: T2w = VADD(T2s, T2v); Chris@10: Tam = VSUB(T2s, T2v); Chris@10: } Chris@10: T3s = VSUB(T3q, T3r); Chris@10: T7H = VADD(T3q, T3r); Chris@10: T7I = VSUB(T3o, T3n); Chris@10: T3p = VADD(T3n, T3o); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T7M, T7Q, T7N, T3M, T3J, T7P; Chris@10: { Chris@10: V TG, T3H, Ty, T3x, T2B, T3w, TB, T3I, T2E, TH, T2J, T2K, TD, TE, T2G; Chris@10: V T2H; Chris@10: { Chris@10: V Tw, Tx, T2z, T2A, Tz, TA, T2C, T2D; Chris@10: Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0])); Chris@10: Tda = VSUB(T2p, T2w); Chris@10: T2x = VADD(T2p, T2w); Chris@10: TbD = VADD(Tal, Tam); Chris@10: Tan = VSUB(Tal, Tam); Chris@10: T8I = VFNMS(LDK(KP414213562), T7E, T7F); Chris@10: T7G = VFMA(LDK(KP414213562), T7F, T7E); Chris@10: T8J = VFMA(LDK(KP414213562), T7H, T7I); Chris@10: T7J = VFNMS(LDK(KP414213562), T7I, T7H); Chris@10: T64 = VFMA(LDK(KP414213562), T3p, T3s); Chris@10: T3t = VFNMS(LDK(KP414213562), T3s, T3p); Chris@10: Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0])); Chris@10: T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0])); Chris@10: T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0])); Chris@10: Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0])); Chris@10: TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0])); Chris@10: T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0])); Chris@10: T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0])); Chris@10: TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0])); Chris@10: T3H = VSUB(Tw, Tx); Chris@10: Ty = VADD(Tw, Tx); Chris@10: T3x = VSUB(T2z, T2A); Chris@10: T2B = VADD(T2z, T2A); Chris@10: T3w = VSUB(Tz, TA); Chris@10: TB = VADD(Tz, TA); Chris@10: T3I = VSUB(T2C, T2D); Chris@10: T2E = VADD(T2C, T2D); Chris@10: TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0])); Chris@10: T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0])); Chris@10: T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0])); Chris@10: TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0])); Chris@10: TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0])); Chris@10: T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0])); Chris@10: T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0])); Chris@10: } Chris@10: { Chris@10: V Tat, TC, Tar, T2F, T3K, T3E, TJ, Taq, T2M, Tau, T3B, T3L, T3y, T3F; Chris@10: { Chris@10: V TI, T3C, T2L, T3D, TF, T3z, T2I, T3A; Chris@10: Tat = VSUB(Ty, TB); Chris@10: TC = VADD(Ty, TB); Chris@10: TI = VADD(TG, TH); Chris@10: T3C = VSUB(TG, TH); Chris@10: T2L = VADD(T2J, T2K); Chris@10: T3D = VSUB(T2J, T2K); Chris@10: TF = VADD(TD, TE); Chris@10: T3z = VSUB(TD, TE); Chris@10: T2I = VADD(T2G, T2H); Chris@10: T3A = VSUB(T2G, T2H); Chris@10: Tar = VSUB(T2B, T2E); Chris@10: T2F = VADD(T2B, T2E); Chris@10: T3K = VADD(T3C, T3D); Chris@10: T3E = VSUB(T3C, T3D); Chris@10: TJ = VADD(TF, TI); Chris@10: Taq = VSUB(TI, TF); Chris@10: T2M = VADD(T2I, T2L); Chris@10: Tau = VSUB(T2I, T2L); Chris@10: T3B = VADD(T3z, T3A); Chris@10: T3L = VSUB(T3A, T3z); Chris@10: } Chris@10: T7M = VSUB(T3x, T3w); Chris@10: T3y = VADD(T3w, T3x); Chris@10: Tas = VADD(Taq, Tar); Chris@10: Tce = VSUB(Tar, Taq); Chris@10: TK = VADD(TC, TJ); Chris@10: Tdd = VSUB(TC, TJ); Chris@10: Tav = VADD(Tat, Tau); Chris@10: Tcf = VSUB(Tat, Tau); Chris@10: T7Q = VADD(T3B, T3E); Chris@10: T3F = VSUB(T3B, T3E); Chris@10: Tdc = VSUB(T2F, T2M); Chris@10: T2N = VADD(T2F, T2M); Chris@10: T7N = VADD(T3L, T3K); Chris@10: T3M = VSUB(T3K, T3L); Chris@10: T3J = VSUB(T3H, T3I); Chris@10: T7P = VADD(T3H, T3I); Chris@10: T3G = VFNMS(LDK(KP707106781), T3F, T3y); Chris@10: T6G = VFMA(LDK(KP707106781), T3F, T3y); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1H, T5I, T1z, Tb8, T56, T53, T1C, Tb9, T5L, T1I, T5e, T5f, T1E, T1F, T59; Chris@10: V T5a; Chris@10: { Chris@10: V T1x, T1y, T54, T55, T1A, T1B, T5J, T5K; Chris@10: T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)])); Chris@10: T9k = VFNMS(LDK(KP707106781), T7N, T7M); Chris@10: T7O = VFMA(LDK(KP707106781), T7N, T7M); Chris@10: T9l = VFNMS(LDK(KP707106781), T7Q, T7P); Chris@10: T7R = VFMA(LDK(KP707106781), T7Q, T7P); Chris@10: T6H = VFMA(LDK(KP707106781), T3M, T3J); Chris@10: T3N = VFNMS(LDK(KP707106781), T3M, T3J); Chris@10: T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)])); Chris@10: T54 = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)])); Chris@10: T55 = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5J = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5K = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5I = VSUB(T1x, T1y); Chris@10: T1z = VADD(T1x, T1y); Chris@10: Tb8 = VADD(T54, T55); Chris@10: T56 = VSUB(T54, T55); Chris@10: T53 = VSUB(T1A, T1B); Chris@10: T1C = VADD(T1A, T1B); Chris@10: Tb9 = VADD(T5J, T5K); Chris@10: T5L = VSUB(T5J, T5K); Chris@10: T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)])); Chris@10: T59 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5a = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)])); Chris@10: } Chris@10: { Chris@10: V Tbo, T1D, Tdv, T5h, T5N, T1K, Tdw, Tbr, T5O, T5c; Chris@10: { Chris@10: V T1J, T5d, Tbq, T5g, T1G, T58, Tbp, T5b; Chris@10: Tbo = VSUB(T1z, T1C); Chris@10: T1D = VADD(T1z, T1C); Chris@10: T1J = VADD(T1H, T1I); Chris@10: T5d = VSUB(T1H, T1I); Chris@10: Tbq = VADD(T5e, T5f); Chris@10: T5g = VSUB(T5e, T5f); Chris@10: T1G = VADD(T1E, T1F); Chris@10: T58 = VSUB(T1E, T1F); Chris@10: Tbp = VADD(T59, T5a); Chris@10: T5b = VSUB(T59, T5a); Chris@10: Tba = VSUB(Tb8, Tb9); Chris@10: Tdv = VADD(Tb8, Tb9); Chris@10: T57 = VADD(T53, T56); Chris@10: T8l = VSUB(T56, T53); Chris@10: T5h = VSUB(T5d, T5g); Chris@10: T5N = VADD(T5d, T5g); Chris@10: Tb7 = VSUB(T1J, T1G); Chris@10: T1K = VADD(T1G, T1J); Chris@10: Tdw = VADD(Tbp, Tbq); Chris@10: Tbr = VSUB(Tbp, Tbq); Chris@10: T5O = VSUB(T5b, T58); Chris@10: T5c = VADD(T58, T5b); Chris@10: } Chris@10: T5M = VSUB(T5I, T5L); Chris@10: T8w = VADD(T5I, T5L); Chris@10: T1L = VADD(T1D, T1K); Chris@10: TdA = VSUB(T1D, T1K); Chris@10: Tdx = VSUB(Tdv, Tdw); Chris@10: Teo = VADD(Tdv, Tdw); Chris@10: Tbs = VADD(Tbo, Tbr); Chris@10: Tct = VSUB(Tbo, Tbr); Chris@10: T8m = VADD(T5O, T5N); Chris@10: T5P = VSUB(T5N, T5O); Chris@10: T8x = VADD(T5c, T5h); Chris@10: T5i = VSUB(T5c, T5h); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T4e, T82, T8d, T4T, T4W, T83, T4p, T8e; Chris@10: { Chris@10: V T7T, T3R, T42, T7W, T3Y, T7X, T45, T7U; Chris@10: { Chris@10: V T40, TN, T2Y, T3Q, T2Q, T3P, TQ, T41, T2T, T3V, TX, T2Z, TS, TT, T2V; Chris@10: V T2W; Chris@10: { Chris@10: V T2O, T2P, TO, TP, TL, TM; Chris@10: TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0])); Chris@10: TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0])); Chris@10: T5Q = VFNMS(LDK(KP707106781), T5P, T5M); Chris@10: T6V = VFMA(LDK(KP707106781), T5P, T5M); Chris@10: T8y = VFMA(LDK(KP707106781), T8x, T8w); Chris@10: T9z = VFNMS(LDK(KP707106781), T8x, T8w); Chris@10: T5j = VFNMS(LDK(KP707106781), T5i, T57); Chris@10: T6Y = VFMA(LDK(KP707106781), T5i, T57); Chris@10: Tbb = VADD(Tb7, Tba); Chris@10: Tcw = VSUB(Tba, Tb7); Chris@10: T8n = VFMA(LDK(KP707106781), T8m, T8l); Chris@10: T9C = VFNMS(LDK(KP707106781), T8m, T8l); Chris@10: T40 = VSUB(TL, TM); Chris@10: TN = VADD(TL, TM); Chris@10: T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0])); Chris@10: T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0])); Chris@10: TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0])); Chris@10: TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0])); Chris@10: { Chris@10: V T2R, T2S, TV, TW; Chris@10: T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0])); Chris@10: T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0])); Chris@10: TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0])); Chris@10: TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0])); Chris@10: T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0])); Chris@10: T3Q = VSUB(T2O, T2P); Chris@10: T2Q = VADD(T2O, T2P); Chris@10: T3P = VSUB(TO, TP); Chris@10: TQ = VADD(TO, TP); Chris@10: T41 = VSUB(T2R, T2S); Chris@10: T2T = VADD(T2R, T2S); Chris@10: T3V = VSUB(TV, TW); Chris@10: TX = VADD(TV, TW); Chris@10: T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0])); Chris@10: TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0])); Chris@10: TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0])); Chris@10: T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0])); Chris@10: T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TaA, TR, Tay, T2U, T3W, T30, TU, T3S, T2X, T3T; Chris@10: TaA = VSUB(TN, TQ); Chris@10: TR = VADD(TN, TQ); Chris@10: Tay = VSUB(T2Q, T2T); Chris@10: T2U = VADD(T2Q, T2T); Chris@10: T3W = VSUB(T2Y, T2Z); Chris@10: T30 = VADD(T2Y, T2Z); Chris@10: TU = VADD(TS, TT); Chris@10: T3S = VSUB(TS, TT); Chris@10: T2X = VADD(T2V, T2W); Chris@10: T3T = VSUB(T2V, T2W); Chris@10: { Chris@10: V T3X, T43, Tax, TY, T31, TaB, T3U, T44; Chris@10: T7T = VSUB(T3Q, T3P); Chris@10: T3R = VADD(T3P, T3Q); Chris@10: T3X = VSUB(T3V, T3W); Chris@10: T43 = VADD(T3V, T3W); Chris@10: Tax = VSUB(TX, TU); Chris@10: TY = VADD(TU, TX); Chris@10: T31 = VADD(T2X, T30); Chris@10: TaB = VSUB(T2X, T30); Chris@10: T3U = VADD(T3S, T3T); Chris@10: T44 = VSUB(T3T, T3S); Chris@10: T42 = VSUB(T40, T41); Chris@10: T7W = VADD(T40, T41); Chris@10: Tch = VSUB(Tay, Tax); Chris@10: Taz = VADD(Tax, Tay); Chris@10: Tdf = VSUB(TR, TY); Chris@10: TZ = VADD(TR, TY); Chris@10: Tdg = VSUB(T2U, T31); Chris@10: T32 = VADD(T2U, T31); Chris@10: Tci = VSUB(TaA, TaB); Chris@10: TaC = VADD(TaA, TaB); Chris@10: T3Y = VSUB(T3U, T3X); Chris@10: T7X = VADD(T3U, T3X); Chris@10: T45 = VSUB(T43, T44); Chris@10: T7U = VADD(T44, T43); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T4P, T14, T4l, TaH, T4d, T4a, T17, TaI, T4S, T4k, T1e, T4m, T19, T1a, T4g; Chris@10: V T4h; Chris@10: { Chris@10: V T4b, T4c, T15, T16, T12, T13; Chris@10: T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)])); Chris@10: T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)])); Chris@10: T6J = VFMA(LDK(KP707106781), T3Y, T3R); Chris@10: T3Z = VFNMS(LDK(KP707106781), T3Y, T3R); Chris@10: T9n = VFNMS(LDK(KP707106781), T7U, T7T); Chris@10: T7V = VFMA(LDK(KP707106781), T7U, T7T); Chris@10: T9o = VFNMS(LDK(KP707106781), T7X, T7W); Chris@10: T7Y = VFMA(LDK(KP707106781), T7X, T7W); Chris@10: T6K = VFMA(LDK(KP707106781), T45, T42); Chris@10: T46 = VFNMS(LDK(KP707106781), T45, T42); Chris@10: T4P = VSUB(T12, T13); Chris@10: T14 = VADD(T12, T13); Chris@10: T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)])); Chris@10: T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)])); Chris@10: T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)])); Chris@10: { Chris@10: V T4Q, T4R, T1c, T1d; Chris@10: T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4l = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)])); Chris@10: TaH = VADD(T4b, T4c); Chris@10: T4d = VSUB(T4b, T4c); Chris@10: T4a = VSUB(T15, T16); Chris@10: T17 = VADD(T15, T16); Chris@10: TaI = VADD(T4Q, T4R); Chris@10: T4S = VSUB(T4Q, T4R); Chris@10: T4k = VSUB(T1c, T1d); Chris@10: T1e = VADD(T1c, T1d); Chris@10: T4m = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)])); Chris@10: T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4g = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4h = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TaX, T18, T4n, TaZ, TaJ, Tdk, T1b, T4f, TaY, T4i; Chris@10: TaX = VSUB(T14, T17); Chris@10: T18 = VADD(T14, T17); Chris@10: T4n = VSUB(T4l, T4m); Chris@10: TaZ = VADD(T4l, T4m); Chris@10: TaJ = VSUB(TaH, TaI); Chris@10: Tdk = VADD(TaH, TaI); Chris@10: T1b = VADD(T19, T1a); Chris@10: T4f = VSUB(T19, T1a); Chris@10: TaY = VADD(T4g, T4h); Chris@10: T4i = VSUB(T4g, T4h); Chris@10: T4e = VADD(T4a, T4d); Chris@10: T82 = VSUB(T4d, T4a); Chris@10: { Chris@10: V T4U, T4o, T1f, TaG, Tdl, Tb0, T4V, T4j; Chris@10: T8d = VADD(T4P, T4S); Chris@10: T4T = VSUB(T4P, T4S); Chris@10: T4U = VADD(T4k, T4n); Chris@10: T4o = VSUB(T4k, T4n); Chris@10: T1f = VADD(T1b, T1e); Chris@10: TaG = VSUB(T1e, T1b); Chris@10: Tdl = VADD(TaY, TaZ); Chris@10: Tb0 = VSUB(TaY, TaZ); Chris@10: T4V = VSUB(T4i, T4f); Chris@10: T4j = VADD(T4f, T4i); Chris@10: Tdp = VSUB(T18, T1f); Chris@10: T1g = VADD(T18, T1f); Chris@10: Tej = VADD(Tdk, Tdl); Chris@10: Tdm = VSUB(Tdk, Tdl); Chris@10: Tcm = VSUB(TaX, Tb0); Chris@10: Tb1 = VADD(TaX, Tb0); Chris@10: T4W = VSUB(T4U, T4V); Chris@10: T83 = VADD(T4V, T4U); Chris@10: T4p = VSUB(T4j, T4o); Chris@10: T8e = VADD(T4j, T4o); Chris@10: Tcp = VSUB(TaJ, TaG); Chris@10: TaK = VADD(TaG, TaJ); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1n, Tdq, T4r, T1q, TaR, T4z, Tb2, TaP, T4M, T4Y, T4w, T1t, TaS, T4u, T8g; Chris@10: V T87; Chris@10: { Chris@10: V T1r, T85, T4L, TaO, TaN, T86, T4G, T1s, T4s, T4t; Chris@10: { Chris@10: V T1h, T1i, T4I, T4J, T1k, T1l, T4D, T4E; Chris@10: T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)])); Chris@10: T6O = VFMA(LDK(KP707106781), T4W, T4T); Chris@10: T4X = VFNMS(LDK(KP707106781), T4W, T4T); Chris@10: T9s = VFNMS(LDK(KP707106781), T8e, T8d); Chris@10: T8f = VFMA(LDK(KP707106781), T8e, T8d); Chris@10: T6R = VFMA(LDK(KP707106781), T4p, T4e); Chris@10: T4q = VFNMS(LDK(KP707106781), T4p, T4e); Chris@10: T9v = VFNMS(LDK(KP707106781), T83, T82); Chris@10: T84 = VFMA(LDK(KP707106781), T83, T82); Chris@10: T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4I = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4J = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4D = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4E = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)])); Chris@10: { Chris@10: V T1o, T4C, T1j, TaL, T4K, T4H, T1m, TaM, T4F, T1p, T4x, T4y; Chris@10: T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4C = VSUB(T1h, T1i); Chris@10: T1j = VADD(T1h, T1i); Chris@10: TaL = VADD(T4I, T4J); Chris@10: T4K = VSUB(T4I, T4J); Chris@10: T4H = VSUB(T1k, T1l); Chris@10: T1m = VADD(T1k, T1l); Chris@10: TaM = VADD(T4D, T4E); Chris@10: T4F = VSUB(T4D, T4E); Chris@10: T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)])); Chris@10: T85 = VSUB(T4K, T4H); Chris@10: T4L = VADD(T4H, T4K); Chris@10: TaO = VSUB(T1j, T1m); Chris@10: T1n = VADD(T1j, T1m); Chris@10: Tdq = VADD(TaL, TaM); Chris@10: TaN = VSUB(TaL, TaM); Chris@10: T86 = VADD(T4C, T4F); Chris@10: T4G = VSUB(T4C, T4F); Chris@10: T4r = VSUB(T1o, T1p); Chris@10: T1q = VADD(T1o, T1p); Chris@10: TaR = VADD(T4x, T4y); Chris@10: T4z = VSUB(T4x, T4y); Chris@10: T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)])); Chris@10: } Chris@10: } Chris@10: Tb2 = VADD(TaO, TaN); Chris@10: TaP = VSUB(TaN, TaO); Chris@10: T4M = VFNMS(LDK(KP414213562), T4L, T4G); Chris@10: T4Y = VFMA(LDK(KP414213562), T4G, T4L); Chris@10: T4w = VSUB(T1r, T1s); Chris@10: T1t = VADD(T1r, T1s); Chris@10: TaS = VADD(T4s, T4t); Chris@10: T4u = VSUB(T4s, T4t); Chris@10: T8g = VFMA(LDK(KP414213562), T85, T86); Chris@10: T87 = VFNMS(LDK(KP414213562), T86, T85); Chris@10: } Chris@10: { Chris@10: V T1W, T8o, T5E, Tbf, Tbe, T8p, T5z, T1X, T5l, T5m; Chris@10: { Chris@10: V T5B, T5v, T1O, T5C, T1P, T1Q, T5w, T5x; Chris@10: { Chris@10: V T1M, T88, T4A, T1u, TaQ, Tdr, TaT, T89, T4v, T1N, TaU, Tb3; Chris@10: T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)])); Chris@10: T88 = VSUB(T4z, T4w); Chris@10: T4A = VADD(T4w, T4z); Chris@10: T1u = VADD(T1q, T1t); Chris@10: TaQ = VSUB(T1q, T1t); Chris@10: Tdr = VADD(TaR, TaS); Chris@10: TaT = VSUB(TaR, TaS); Chris@10: T89 = VADD(T4r, T4u); Chris@10: T4v = VSUB(T4r, T4u); Chris@10: T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5B = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)])); Chris@10: Tdn = VSUB(T1u, T1n); Chris@10: T1v = VADD(T1n, T1u); Chris@10: Tek = VADD(Tdq, Tdr); Chris@10: Tds = VSUB(Tdq, Tdr); Chris@10: TaU = VADD(TaQ, TaT); Chris@10: Tb3 = VSUB(TaQ, TaT); Chris@10: { Chris@10: V T8a, T8h, T4Z, T4B; Chris@10: T8a = VFMA(LDK(KP414213562), T89, T88); Chris@10: T8h = VFNMS(LDK(KP414213562), T88, T89); Chris@10: T4Z = VFNMS(LDK(KP414213562), T4v, T4A); Chris@10: T4B = VFMA(LDK(KP414213562), T4A, T4v); Chris@10: T5v = VSUB(T1M, T1N); Chris@10: T1O = VADD(T1M, T1N); Chris@10: Tcn = VSUB(TaU, TaP); Chris@10: TaV = VADD(TaP, TaU); Chris@10: Tcq = VSUB(Tb2, Tb3); Chris@10: Tb4 = VADD(Tb2, Tb3); Chris@10: T9t = VSUB(T8a, T87); Chris@10: T8b = VADD(T87, T8a); Chris@10: T9w = VSUB(T8g, T8h); Chris@10: T8i = VADD(T8g, T8h); Chris@10: T6S = VADD(T4Y, T4Z); Chris@10: T50 = VSUB(T4Y, T4Z); Chris@10: T6P = VADD(T4M, T4B); Chris@10: T4N = VSUB(T4B, T4M); Chris@10: T5C = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)])); Chris@10: } Chris@10: } Chris@10: T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5w = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5x = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)])); Chris@10: { Chris@10: V T5q, Tbc, T5D, T5A, T1R, Tbd, T5y, T5r, T1T, T1U; Chris@10: T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5q = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)])); Chris@10: Tbc = VADD(T5B, T5C); Chris@10: T5D = VSUB(T5B, T5C); Chris@10: T5A = VSUB(T1P, T1Q); Chris@10: T1R = VADD(T1P, T1Q); Chris@10: Tbd = VADD(T5w, T5x); Chris@10: T5y = VSUB(T5w, T5x); Chris@10: T5k = VSUB(T1T, T1U); Chris@10: T1V = VADD(T1T, T1U); Chris@10: T5r = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)])); Chris@10: T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)])); Chris@10: T8o = VSUB(T5D, T5A); Chris@10: T5E = VADD(T5A, T5D); Chris@10: Tbf = VSUB(T1O, T1R); Chris@10: T1S = VADD(T1O, T1R); Chris@10: TdB = VADD(Tbc, Tbd); Chris@10: Tbe = VSUB(Tbc, Tbd); Chris@10: T8p = VADD(T5v, T5y); Chris@10: T5z = VSUB(T5v, T5y); Chris@10: Tbi = VADD(T5q, T5r); Chris@10: T5s = VSUB(T5q, T5r); Chris@10: T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5l = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5m = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)])); Chris@10: } Chris@10: } Chris@10: Tbt = VADD(Tbf, Tbe); Chris@10: Tbg = VSUB(Tbe, Tbf); Chris@10: T5F = VFNMS(LDK(KP414213562), T5E, T5z); Chris@10: T5R = VFMA(LDK(KP414213562), T5z, T5E); Chris@10: T5p = VSUB(T1W, T1X); Chris@10: T1Y = VADD(T1W, T1X); Chris@10: Tbj = VADD(T5l, T5m); Chris@10: T5n = VSUB(T5l, T5m); Chris@10: T8z = VFMA(LDK(KP414213562), T8o, T8p); Chris@10: T8q = VFNMS(LDK(KP414213562), T8p, T8o); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tbm, Tbv, T9A, T8u, T9D, T8B, T6Z, T5T, T6W, T5G, TeL, TeM, TeN, TeO, TeR; Chris@10: V TeS, TeT, TeU, TeV, TeW, TeX, TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6; Chris@10: V Tf7, Tf8, Tf9, Tfa, Tfb, Tfc, TbE, Tao, Tfd, Tfe, Td7, Td8, Tff, Tfg, Tfh; Chris@10: V Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr, Tfs; Chris@10: { Chris@10: V Tel, Tdy, TdD, Tcu, Tcx, Teq, Tei, Ten, Tex, Teh, TeB, Tev, Te9, Tec; Chris@10: { Chris@10: V Tef, Teu, TeE, TeD, T11, TeF, T1w, T21, Tet, T2y, T33, Teg, T20; Chris@10: { Chris@10: V Tv, T8r, T5t, T1Z, Tbh, TdC, Tbk, T8s, T5o, T10, Tep, Tbl, Tbu; Chris@10: Tef = VSUB(Tf, Tu); Chris@10: Tv = VADD(Tf, Tu); Chris@10: T8r = VSUB(T5s, T5p); Chris@10: T5t = VADD(T5p, T5s); Chris@10: T1Z = VADD(T1V, T1Y); Chris@10: Tbh = VSUB(T1V, T1Y); Chris@10: TdC = VADD(Tbi, Tbj); Chris@10: Tbk = VSUB(Tbi, Tbj); Chris@10: T8s = VADD(T5k, T5n); Chris@10: T5o = VSUB(T5k, T5n); Chris@10: T10 = VADD(TK, TZ); Chris@10: Teu = VSUB(TZ, TK); Chris@10: Tel = VSUB(Tej, Tek); Chris@10: TeE = VADD(Tej, Tek); Chris@10: Tdy = VSUB(T1Z, T1S); Chris@10: T20 = VADD(T1S, T1Z); Chris@10: Tep = VADD(TdB, TdC); Chris@10: TdD = VSUB(TdB, TdC); Chris@10: Tbl = VADD(Tbh, Tbk); Chris@10: Tbu = VSUB(Tbh, Tbk); Chris@10: { Chris@10: V T8t, T8A, T5S, T5u; Chris@10: T8t = VFMA(LDK(KP414213562), T8s, T8r); Chris@10: T8A = VFNMS(LDK(KP414213562), T8r, T8s); Chris@10: T5S = VFNMS(LDK(KP414213562), T5o, T5t); Chris@10: T5u = VFMA(LDK(KP414213562), T5t, T5o); Chris@10: TeD = VSUB(Tv, T10); Chris@10: T11 = VADD(Tv, T10); Chris@10: Tcu = VSUB(Tbl, Tbg); Chris@10: Tbm = VADD(Tbg, Tbl); Chris@10: Tcx = VSUB(Tbt, Tbu); Chris@10: Tbv = VADD(Tbt, Tbu); Chris@10: T9A = VSUB(T8t, T8q); Chris@10: T8u = VADD(T8q, T8t); Chris@10: T9D = VSUB(T8z, T8A); Chris@10: T8B = VADD(T8z, T8A); Chris@10: T6Z = VADD(T5R, T5S); Chris@10: T5T = VSUB(T5R, T5S); Chris@10: T6W = VADD(T5F, T5u); Chris@10: T5G = VSUB(T5u, T5F); Chris@10: TeF = VADD(Teo, Tep); Chris@10: Teq = VSUB(Teo, Tep); Chris@10: } Chris@10: } Chris@10: Tei = VSUB(T1g, T1v); Chris@10: T1w = VADD(T1g, T1v); Chris@10: T21 = VADD(T1L, T20); Chris@10: Ten = VSUB(T1L, T20); Chris@10: Tet = VSUB(T2i, T2x); Chris@10: T2y = VADD(T2i, T2x); Chris@10: T33 = VADD(T2N, T32); Chris@10: Teg = VSUB(T2N, T32); Chris@10: { Chris@10: V TeI, TeG, T23, T22, TeH, T34; Chris@10: TeI = VADD(TeE, TeF); Chris@10: TeG = VSUB(TeE, TeF); Chris@10: T23 = VSUB(T21, T1w); Chris@10: T22 = VADD(T1w, T21); Chris@10: TeH = VADD(T2y, T33); Chris@10: T34 = VSUB(T2y, T33); Chris@10: Tex = VSUB(Tef, Teg); Chris@10: Teh = VADD(Tef, Teg); Chris@10: TeJ = VSUB(TeD, TeG); Chris@10: STM4(&(ro[48]), TeJ, ovs, &(ro[0])); Chris@10: TeK = VADD(TeD, TeG); Chris@10: STM4(&(ro[16]), TeK, ovs, &(ro[0])); Chris@10: TeL = VADD(T11, T22); Chris@10: STM4(&(ro[0]), TeL, ovs, &(ro[0])); Chris@10: TeM = VSUB(T11, T22); Chris@10: STM4(&(ro[32]), TeM, ovs, &(ro[0])); Chris@10: TeN = VADD(TeH, TeI); Chris@10: STM4(&(io[0]), TeN, ovs, &(io[0])); Chris@10: TeO = VSUB(TeH, TeI); Chris@10: STM4(&(io[32]), TeO, ovs, &(io[0])); Chris@10: TeP = VSUB(T34, T23); Chris@10: STM4(&(io[48]), TeP, ovs, &(io[0])); Chris@10: TeQ = VADD(T23, T34); Chris@10: STM4(&(io[16]), TeQ, ovs, &(io[0])); Chris@10: TeB = VADD(Teu, Tet); Chris@10: Tev = VSUB(Tet, Teu); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TdV, Tdb, TdJ, Te5, TdE, Tdz, TdZ, Tdo, Te6, Tdi, Teb, Te3, TdW, TdM, Tdt; Chris@10: V TdY; Chris@10: { Chris@10: V TdL, Tde, Tey, Tem, Tez, Ter, Tdh, TdK, Te1, Te2; Chris@10: TdV = VADD(Td9, Tda); Chris@10: Tdb = VSUB(Td9, Tda); Chris@10: TdJ = VSUB(TdH, TdI); Chris@10: Te5 = VADD(TdI, TdH); Chris@10: TdL = VADD(Tdd, Tdc); Chris@10: Tde = VSUB(Tdc, Tdd); Chris@10: Tey = VSUB(Tel, Tei); Chris@10: Tem = VADD(Tei, Tel); Chris@10: Tez = VADD(Ten, Teq); Chris@10: Ter = VSUB(Ten, Teq); Chris@10: Tdh = VADD(Tdf, Tdg); Chris@10: TdK = VSUB(Tdf, Tdg); Chris@10: TdE = VSUB(TdA, TdD); Chris@10: Te1 = VADD(TdA, TdD); Chris@10: Te2 = VADD(Tdy, Tdx); Chris@10: Tdz = VSUB(Tdx, Tdy); Chris@10: TdZ = VADD(Tdn, Tdm); Chris@10: Tdo = VSUB(Tdm, Tdn); Chris@10: { Chris@10: V TeA, TeC, Tew, Tes; Chris@10: TeA = VSUB(Tey, Tez); Chris@10: TeC = VADD(Tey, Tez); Chris@10: Tew = VSUB(Ter, Tem); Chris@10: Tes = VADD(Tem, Ter); Chris@10: Te6 = VADD(Tde, Tdh); Chris@10: Tdi = VSUB(Tde, Tdh); Chris@10: Teb = VFMA(LDK(KP414213562), Te1, Te2); Chris@10: Te3 = VFNMS(LDK(KP414213562), Te2, Te1); Chris@10: TdW = VADD(TdL, TdK); Chris@10: TdM = VSUB(TdK, TdL); Chris@10: TeR = VFMA(LDK(KP707106781), TeA, Tex); Chris@10: STM4(&(ro[24]), TeR, ovs, &(ro[0])); Chris@10: TeS = VFNMS(LDK(KP707106781), TeA, Tex); Chris@10: STM4(&(ro[56]), TeS, ovs, &(ro[0])); Chris@10: TeT = VFMA(LDK(KP707106781), TeC, TeB); Chris@10: STM4(&(io[8]), TeT, ovs, &(io[0])); Chris@10: TeU = VFNMS(LDK(KP707106781), TeC, TeB); Chris@10: STM4(&(io[40]), TeU, ovs, &(io[0])); Chris@10: TeV = VFMA(LDK(KP707106781), Tew, Tev); Chris@10: STM4(&(io[24]), TeV, ovs, &(io[0])); Chris@10: TeW = VFNMS(LDK(KP707106781), Tew, Tev); Chris@10: STM4(&(io[56]), TeW, ovs, &(io[0])); Chris@10: TeX = VFMA(LDK(KP707106781), Tes, Teh); Chris@10: STM4(&(ro[8]), TeX, ovs, &(ro[0])); Chris@10: TeY = VFNMS(LDK(KP707106781), Tes, Teh); Chris@10: STM4(&(ro[40]), TeY, ovs, &(ro[0])); Chris@10: Tdt = VSUB(Tdp, Tds); Chris@10: TdY = VADD(Tdp, Tds); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TdT, Tdj, TdP, TdN, TdR, Tdu, Tea, Te0, TdQ, TdF, TdX, Ted, Te7; Chris@10: TdT = VFNMS(LDK(KP707106781), Tdi, Tdb); Chris@10: Tdj = VFMA(LDK(KP707106781), Tdi, Tdb); Chris@10: TdP = VFMA(LDK(KP707106781), TdM, TdJ); Chris@10: TdN = VFNMS(LDK(KP707106781), TdM, TdJ); Chris@10: TdR = VFNMS(LDK(KP414213562), Tdo, Tdt); Chris@10: Tdu = VFMA(LDK(KP414213562), Tdt, Tdo); Chris@10: Tea = VFNMS(LDK(KP414213562), TdY, TdZ); Chris@10: Te0 = VFMA(LDK(KP414213562), TdZ, TdY); Chris@10: TdQ = VFMA(LDK(KP414213562), Tdz, TdE); Chris@10: TdF = VFNMS(LDK(KP414213562), TdE, Tdz); Chris@10: Te9 = VFNMS(LDK(KP707106781), TdW, TdV); Chris@10: TdX = VFMA(LDK(KP707106781), TdW, TdV); Chris@10: Ted = VFMA(LDK(KP707106781), Te6, Te5); Chris@10: Te7 = VFNMS(LDK(KP707106781), Te6, Te5); Chris@10: { Chris@10: V Tee, Te8, Te4, TdU, TdS, TdO, TdG; Chris@10: Tee = VADD(Tea, Teb); Chris@10: Tec = VSUB(Tea, Teb); Chris@10: Te8 = VSUB(Te3, Te0); Chris@10: Te4 = VADD(Te0, Te3); Chris@10: TdU = VADD(TdR, TdQ); Chris@10: TdS = VSUB(TdQ, TdR); Chris@10: TdO = VADD(Tdu, TdF); Chris@10: TdG = VSUB(Tdu, TdF); Chris@10: TeZ = VFMA(LDK(KP923879532), Tee, Ted); Chris@10: STM4(&(io[4]), TeZ, ovs, &(io[0])); Chris@10: Tf0 = VFNMS(LDK(KP923879532), Tee, Ted); Chris@10: STM4(&(io[36]), Tf0, ovs, &(io[0])); Chris@10: Tf1 = VFMA(LDK(KP923879532), Te4, TdX); Chris@10: STM4(&(ro[4]), Tf1, ovs, &(ro[0])); Chris@10: Tf2 = VFNMS(LDK(KP923879532), Te4, TdX); Chris@10: STM4(&(ro[36]), Tf2, ovs, &(ro[0])); Chris@10: Tf3 = VFMA(LDK(KP923879532), TdU, TdT); Chris@10: STM4(&(ro[60]), Tf3, ovs, &(ro[0])); Chris@10: Tf4 = VFNMS(LDK(KP923879532), TdU, TdT); Chris@10: STM4(&(ro[28]), Tf4, ovs, &(ro[0])); Chris@10: Tf5 = VFMA(LDK(KP923879532), TdS, TdP); Chris@10: STM4(&(io[12]), Tf5, ovs, &(io[0])); Chris@10: Tf6 = VFNMS(LDK(KP923879532), TdS, TdP); Chris@10: STM4(&(io[44]), Tf6, ovs, &(io[0])); Chris@10: Tf7 = VFMA(LDK(KP923879532), TdO, TdN); Chris@10: STM4(&(io[60]), Tf7, ovs, &(io[0])); Chris@10: Tf8 = VFNMS(LDK(KP923879532), TdO, TdN); Chris@10: STM4(&(io[28]), Tf8, ovs, &(io[0])); Chris@10: Tf9 = VFMA(LDK(KP923879532), TdG, Tdj); Chris@10: STM4(&(ro[12]), Tf9, ovs, &(ro[0])); Chris@10: Tfa = VFNMS(LDK(KP923879532), TdG, Tdj); Chris@10: STM4(&(ro[44]), Tfa, ovs, &(ro[0])); Chris@10: Tfb = VFMA(LDK(KP923879532), Te8, Te7); Chris@10: STM4(&(io[20]), Tfb, ovs, &(io[0])); Chris@10: Tfc = VFNMS(LDK(KP923879532), Te8, Te7); Chris@10: STM4(&(io[52]), Tfc, ovs, &(io[0])); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V TcF, TcE, Tcy, Tcv, TcT, Tco, TcP, Tcd, TcZ, TcD, Td0, Tck, Td4, TcX, Tcr; Chris@10: V TcS; Chris@10: { Chris@10: V Tcc, TcC, Tcg, Tcj, TcV, TcW; Chris@10: TbE = VADD(TbC, TbD); Chris@10: Tcc = VSUB(TbC, TbD); Chris@10: TcC = VSUB(Tan, Tak); Chris@10: Tao = VADD(Tak, Tan); Chris@10: TcF = VFNMS(LDK(KP414213562), Tce, Tcf); Chris@10: Tcg = VFMA(LDK(KP414213562), Tcf, Tce); Chris@10: Tcj = VFNMS(LDK(KP414213562), Tci, Tch); Chris@10: TcE = VFMA(LDK(KP414213562), Tch, Tci); Chris@10: Tcy = VFNMS(LDK(KP707106781), Tcx, Tcw); Chris@10: TcV = VFMA(LDK(KP707106781), Tcx, Tcw); Chris@10: TcW = VFMA(LDK(KP707106781), Tcu, Tct); Chris@10: Tcv = VFNMS(LDK(KP707106781), Tcu, Tct); Chris@10: TcT = VFMA(LDK(KP707106781), Tcn, Tcm); Chris@10: Tco = VFNMS(LDK(KP707106781), Tcn, Tcm); Chris@10: Tfd = VFMA(LDK(KP923879532), Tec, Te9); Chris@10: STM4(&(ro[20]), Tfd, ovs, &(ro[0])); Chris@10: Tfe = VFNMS(LDK(KP923879532), Tec, Te9); Chris@10: STM4(&(ro[52]), Tfe, ovs, &(ro[0])); Chris@10: TcP = VFNMS(LDK(KP707106781), Tcc, Tcb); Chris@10: Tcd = VFMA(LDK(KP707106781), Tcc, Tcb); Chris@10: TcZ = VFNMS(LDK(KP707106781), TcC, TcB); Chris@10: TcD = VFMA(LDK(KP707106781), TcC, TcB); Chris@10: Td0 = VADD(Tcg, Tcj); Chris@10: Tck = VSUB(Tcg, Tcj); Chris@10: Td4 = VFMA(LDK(KP198912367), TcV, TcW); Chris@10: TcX = VFNMS(LDK(KP198912367), TcW, TcV); Chris@10: Tcr = VFNMS(LDK(KP707106781), Tcq, Tcp); Chris@10: TcS = VFMA(LDK(KP707106781), Tcq, Tcp); Chris@10: } Chris@10: { Chris@10: V TcJ, Tcl, TcK, Tcs, TcQ, TcG, Td5, TcU, TcL, Tcz; Chris@10: TcJ = VFNMS(LDK(KP923879532), Tck, Tcd); Chris@10: Tcl = VFMA(LDK(KP923879532), Tck, Tcd); Chris@10: TcK = VFNMS(LDK(KP668178637), Tco, Tcr); Chris@10: Tcs = VFMA(LDK(KP668178637), Tcr, Tco); Chris@10: TcQ = VADD(TcF, TcE); Chris@10: TcG = VSUB(TcE, TcF); Chris@10: Td5 = VFNMS(LDK(KP198912367), TcS, TcT); Chris@10: TcU = VFMA(LDK(KP198912367), TcT, TcS); Chris@10: TcL = VFMA(LDK(KP668178637), Tcv, Tcy); Chris@10: Tcz = VFNMS(LDK(KP668178637), Tcy, Tcv); Chris@10: { Chris@10: V Td1, Td3, TcR, TcN, TcH, Td2, TcY, TcM, TcO, TcI, TcA, Td6; Chris@10: Td1 = VFMA(LDK(KP923879532), Td0, TcZ); Chris@10: Td3 = VFNMS(LDK(KP923879532), Td0, TcZ); Chris@10: TcR = VFNMS(LDK(KP923879532), TcQ, TcP); Chris@10: Td7 = VFMA(LDK(KP923879532), TcQ, TcP); Chris@10: TcN = VFMA(LDK(KP923879532), TcG, TcD); Chris@10: TcH = VFNMS(LDK(KP923879532), TcG, TcD); Chris@10: Td2 = VADD(TcU, TcX); Chris@10: TcY = VSUB(TcU, TcX); Chris@10: TcM = VSUB(TcK, TcL); Chris@10: TcO = VADD(TcK, TcL); Chris@10: TcI = VSUB(Tcz, Tcs); Chris@10: TcA = VADD(Tcs, Tcz); Chris@10: Td6 = VSUB(Td4, Td5); Chris@10: Td8 = VADD(Td5, Td4); Chris@10: Tff = VFMA(LDK(KP980785280), TcY, TcR); Chris@10: STM4(&(ro[14]), Tff, ovs, &(ro[0])); Chris@10: Tfg = VFNMS(LDK(KP980785280), TcY, TcR); Chris@10: STM4(&(ro[46]), Tfg, ovs, &(ro[0])); Chris@10: Tfh = VFMA(LDK(KP831469612), TcM, TcJ); Chris@10: STM4(&(ro[22]), Tfh, ovs, &(ro[0])); Chris@10: Tfi = VFNMS(LDK(KP831469612), TcM, TcJ); Chris@10: STM4(&(ro[54]), Tfi, ovs, &(ro[0])); Chris@10: Tfj = VFMA(LDK(KP831469612), TcO, TcN); Chris@10: STM4(&(io[6]), Tfj, ovs, &(io[0])); Chris@10: Tfk = VFNMS(LDK(KP831469612), TcO, TcN); Chris@10: STM4(&(io[38]), Tfk, ovs, &(io[0])); Chris@10: Tfl = VFMA(LDK(KP831469612), TcI, TcH); Chris@10: STM4(&(io[22]), Tfl, ovs, &(io[0])); Chris@10: Tfm = VFNMS(LDK(KP831469612), TcI, TcH); Chris@10: STM4(&(io[54]), Tfm, ovs, &(io[0])); Chris@10: Tfn = VFMA(LDK(KP831469612), TcA, Tcl); Chris@10: STM4(&(ro[6]), Tfn, ovs, &(ro[0])); Chris@10: Tfo = VFNMS(LDK(KP831469612), TcA, Tcl); Chris@10: STM4(&(ro[38]), Tfo, ovs, &(ro[0])); Chris@10: Tfp = VFMA(LDK(KP980785280), Td6, Td3); Chris@10: STM4(&(io[14]), Tfp, ovs, &(io[0])); Chris@10: Tfq = VFNMS(LDK(KP980785280), Td6, Td3); Chris@10: STM4(&(io[46]), Tfq, ovs, &(io[0])); Chris@10: Tfr = VFNMS(LDK(KP980785280), Td2, Td1); Chris@10: STM4(&(io[30]), Tfr, ovs, &(io[0])); Chris@10: Tfs = VFMA(LDK(KP980785280), Td2, Td1); Chris@10: STM4(&(io[62]), Tfs, ovs, &(io[0])); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tft, Tfu, Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, T3f; Chris@10: V T66, T63, T3u, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW; Chris@10: V TfX, TfY, TfZ, Tg0, Tc5, Tc8; Chris@10: { Chris@10: V TbH, TbG, Tbw, Tbn, TbV, TaW, TbR, Tap, Tc1, TbF, Tc2, TaE, Tc7, TbZ, Tb5; Chris@10: V TbU; Chris@10: { Chris@10: V Taw, TaD, TbX, TbY; Chris@10: TbH = VFMA(LDK(KP414213562), Tas, Tav); Chris@10: Taw = VFNMS(LDK(KP414213562), Tav, Tas); Chris@10: TaD = VFMA(LDK(KP414213562), TaC, Taz); Chris@10: TbG = VFNMS(LDK(KP414213562), Taz, TaC); Chris@10: Tbw = VFNMS(LDK(KP707106781), Tbv, Tbs); Chris@10: TbX = VFMA(LDK(KP707106781), Tbv, Tbs); Chris@10: TbY = VFMA(LDK(KP707106781), Tbm, Tbb); Chris@10: Tbn = VFNMS(LDK(KP707106781), Tbm, Tbb); Chris@10: TbV = VFMA(LDK(KP707106781), TaV, TaK); Chris@10: TaW = VFNMS(LDK(KP707106781), TaV, TaK); Chris@10: Tft = VFMA(LDK(KP980785280), Td8, Td7); Chris@10: STM4(&(ro[62]), Tft, ovs, &(ro[0])); Chris@10: Tfu = VFNMS(LDK(KP980785280), Td8, Td7); Chris@10: STM4(&(ro[30]), Tfu, ovs, &(ro[0])); Chris@10: TbR = VFMA(LDK(KP707106781), Tao, Tah); Chris@10: Tap = VFNMS(LDK(KP707106781), Tao, Tah); Chris@10: Tc1 = VFMA(LDK(KP707106781), TbE, TbB); Chris@10: TbF = VFNMS(LDK(KP707106781), TbE, TbB); Chris@10: Tc2 = VADD(Taw, TaD); Chris@10: TaE = VSUB(Taw, TaD); Chris@10: Tc7 = VFMA(LDK(KP198912367), TbX, TbY); Chris@10: TbZ = VFNMS(LDK(KP198912367), TbY, TbX); Chris@10: Tb5 = VFNMS(LDK(KP707106781), Tb4, Tb1); Chris@10: TbU = VFMA(LDK(KP707106781), Tb4, Tb1); Chris@10: } Chris@10: { Chris@10: V TbP, TaF, TbN, Tb6, TbS, TbI, Tc6, TbW, TbM, Tbx; Chris@10: TbP = VFNMS(LDK(KP923879532), TaE, Tap); Chris@10: TaF = VFMA(LDK(KP923879532), TaE, Tap); Chris@10: TbN = VFNMS(LDK(KP668178637), TaW, Tb5); Chris@10: Tb6 = VFMA(LDK(KP668178637), Tb5, TaW); Chris@10: TbS = VADD(TbH, TbG); Chris@10: TbI = VSUB(TbG, TbH); Chris@10: Tc6 = VFNMS(LDK(KP198912367), TbU, TbV); Chris@10: TbW = VFMA(LDK(KP198912367), TbV, TbU); Chris@10: TbM = VFMA(LDK(KP668178637), Tbn, Tbw); Chris@10: Tbx = VFNMS(LDK(KP668178637), Tbw, Tbn); Chris@10: { Chris@10: V Tc3, Tc9, TbT, TbL, TbJ, Tc4, Tc0, TbQ, TbO, TbK, Tby, Tca; Chris@10: Tc3 = VFNMS(LDK(KP923879532), Tc2, Tc1); Chris@10: Tc9 = VFMA(LDK(KP923879532), Tc2, Tc1); Chris@10: TbT = VFMA(LDK(KP923879532), TbS, TbR); Chris@10: Tc5 = VFNMS(LDK(KP923879532), TbS, TbR); Chris@10: TbL = VFMA(LDK(KP923879532), TbI, TbF); Chris@10: TbJ = VFNMS(LDK(KP923879532), TbI, TbF); Chris@10: Tc4 = VSUB(TbZ, TbW); Chris@10: Tc0 = VADD(TbW, TbZ); Chris@10: TbQ = VADD(TbN, TbM); Chris@10: TbO = VSUB(TbM, TbN); Chris@10: TbK = VADD(Tb6, Tbx); Chris@10: Tby = VSUB(Tb6, Tbx); Chris@10: Tca = VADD(Tc6, Tc7); Chris@10: Tc8 = VSUB(Tc6, Tc7); Chris@10: Tfv = VFMA(LDK(KP980785280), Tc0, TbT); Chris@10: STM4(&(ro[2]), Tfv, ovs, &(ro[0])); Chris@10: Tfw = VFNMS(LDK(KP980785280), Tc0, TbT); Chris@10: STM4(&(ro[34]), Tfw, ovs, &(ro[0])); Chris@10: Tfx = VFMA(LDK(KP831469612), TbQ, TbP); Chris@10: STM4(&(ro[58]), Tfx, ovs, &(ro[0])); Chris@10: Tfy = VFNMS(LDK(KP831469612), TbQ, TbP); Chris@10: STM4(&(ro[26]), Tfy, ovs, &(ro[0])); Chris@10: Tfz = VFMA(LDK(KP831469612), TbO, TbL); Chris@10: STM4(&(io[10]), Tfz, ovs, &(io[0])); Chris@10: TfA = VFNMS(LDK(KP831469612), TbO, TbL); Chris@10: STM4(&(io[42]), TfA, ovs, &(io[0])); Chris@10: TfB = VFMA(LDK(KP831469612), TbK, TbJ); Chris@10: STM4(&(io[58]), TfB, ovs, &(io[0])); Chris@10: TfC = VFNMS(LDK(KP831469612), TbK, TbJ); Chris@10: STM4(&(io[26]), TfC, ovs, &(io[0])); Chris@10: TfD = VFMA(LDK(KP831469612), Tby, TaF); Chris@10: STM4(&(ro[10]), TfD, ovs, &(ro[0])); Chris@10: TfE = VFNMS(LDK(KP831469612), Tby, TaF); Chris@10: STM4(&(ro[42]), TfE, ovs, &(ro[0])); Chris@10: TfF = VFMA(LDK(KP980785280), Tca, Tc9); Chris@10: STM4(&(io[2]), TfF, ovs, &(io[0])); Chris@10: TfG = VFNMS(LDK(KP980785280), Tca, Tc9); Chris@10: STM4(&(io[34]), TfG, ovs, &(io[0])); Chris@10: TfH = VFNMS(LDK(KP980785280), Tc4, Tc3); Chris@10: STM4(&(io[50]), TfH, ovs, &(io[0])); Chris@10: TfI = VFMA(LDK(KP980785280), Tc4, Tc3); Chris@10: STM4(&(io[18]), TfI, ovs, &(io[0])); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T70, T6X, T7h, T6F, T7x, T7m, T7w, T7p, T7s, T6M, T7c, T6U, T7r, T75, T7i; Chris@10: V T78, T7b, T6N; Chris@10: { Chris@10: V T6T, T6Q, T77, T6I, T6L, T76, T73, T74; Chris@10: { Chris@10: V T6D, T6E, T7k, T7l, T7n, T7o; Chris@10: T3f = VFMA(LDK(KP707106781), T3e, T37); Chris@10: T6D = VFNMS(LDK(KP707106781), T3e, T37); Chris@10: T6E = VADD(T65, T64); Chris@10: T66 = VSUB(T64, T65); Chris@10: T6T = VFNMS(LDK(KP923879532), T6S, T6R); Chris@10: T7k = VFMA(LDK(KP923879532), T6S, T6R); Chris@10: T7l = VFMA(LDK(KP923879532), T6P, T6O); Chris@10: T6Q = VFNMS(LDK(KP923879532), T6P, T6O); Chris@10: T70 = VFNMS(LDK(KP923879532), T6Z, T6Y); Chris@10: T7n = VFMA(LDK(KP923879532), T6Z, T6Y); Chris@10: T7o = VFMA(LDK(KP923879532), T6W, T6V); Chris@10: T6X = VFNMS(LDK(KP923879532), T6W, T6V); Chris@10: T77 = VFNMS(LDK(KP198912367), T6G, T6H); Chris@10: T6I = VFMA(LDK(KP198912367), T6H, T6G); Chris@10: TfJ = VFMA(LDK(KP980785280), Tc8, Tc5); Chris@10: STM4(&(ro[18]), TfJ, ovs, &(ro[0])); Chris@10: TfK = VFNMS(LDK(KP980785280), Tc8, Tc5); Chris@10: STM4(&(ro[50]), TfK, ovs, &(ro[0])); Chris@10: T7h = VFMA(LDK(KP923879532), T6E, T6D); Chris@10: T6F = VFNMS(LDK(KP923879532), T6E, T6D); Chris@10: T7x = VFNMS(LDK(KP098491403), T7k, T7l); Chris@10: T7m = VFMA(LDK(KP098491403), T7l, T7k); Chris@10: T7w = VFMA(LDK(KP098491403), T7n, T7o); Chris@10: T7p = VFNMS(LDK(KP098491403), T7o, T7n); Chris@10: T6L = VFNMS(LDK(KP198912367), T6K, T6J); Chris@10: T76 = VFMA(LDK(KP198912367), T6J, T6K); Chris@10: } Chris@10: T63 = VFMA(LDK(KP707106781), T62, T5Z); Chris@10: T73 = VFNMS(LDK(KP707106781), T62, T5Z); Chris@10: T74 = VADD(T3m, T3t); Chris@10: T3u = VSUB(T3m, T3t); Chris@10: T7s = VADD(T6I, T6L); Chris@10: T6M = VSUB(T6I, T6L); Chris@10: T7c = VFNMS(LDK(KP820678790), T6Q, T6T); Chris@10: T6U = VFMA(LDK(KP820678790), T6T, T6Q); Chris@10: T7r = VFMA(LDK(KP923879532), T74, T73); Chris@10: T75 = VFNMS(LDK(KP923879532), T74, T73); Chris@10: T7i = VADD(T77, T76); Chris@10: T78 = VSUB(T76, T77); Chris@10: } Chris@10: T7b = VFNMS(LDK(KP980785280), T6M, T6F); Chris@10: T6N = VFMA(LDK(KP980785280), T6M, T6F); Chris@10: { Chris@10: V T7u, T7q, T7v, T7t, T7A, T7y, T7j, T7z, T7f, T79, T71, T7d; Chris@10: T7u = VADD(T7m, T7p); Chris@10: T7q = VSUB(T7m, T7p); Chris@10: T7v = VFNMS(LDK(KP980785280), T7s, T7r); Chris@10: T7t = VFMA(LDK(KP980785280), T7s, T7r); Chris@10: T7A = VADD(T7x, T7w); Chris@10: T7y = VSUB(T7w, T7x); Chris@10: T7j = VFNMS(LDK(KP980785280), T7i, T7h); Chris@10: T7z = VFMA(LDK(KP980785280), T7i, T7h); Chris@10: T7f = VFMA(LDK(KP980785280), T78, T75); Chris@10: T79 = VFNMS(LDK(KP980785280), T78, T75); Chris@10: T71 = VFNMS(LDK(KP820678790), T70, T6X); Chris@10: T7d = VFMA(LDK(KP820678790), T6X, T70); Chris@10: { Chris@10: V T7g, T7e, T72, T7a; Chris@10: TfL = VFMA(LDK(KP995184726), T7y, T7v); Chris@10: STM4(&(io[15]), TfL, ovs, &(io[1])); Chris@10: TfM = VFNMS(LDK(KP995184726), T7y, T7v); Chris@10: STM4(&(io[47]), TfM, ovs, &(io[1])); Chris@10: TfN = VFMA(LDK(KP995184726), T7q, T7j); Chris@10: STM4(&(ro[15]), TfN, ovs, &(ro[1])); Chris@10: TfO = VFNMS(LDK(KP995184726), T7q, T7j); Chris@10: STM4(&(ro[47]), TfO, ovs, &(ro[1])); Chris@10: T7g = VADD(T7c, T7d); Chris@10: T7e = VSUB(T7c, T7d); Chris@10: T72 = VADD(T6U, T71); Chris@10: T7a = VSUB(T71, T6U); Chris@10: TfP = VFNMS(LDK(KP995184726), T7u, T7t); Chris@10: STM4(&(io[31]), TfP, ovs, &(io[1])); Chris@10: TfQ = VFMA(LDK(KP995184726), T7u, T7t); Chris@10: STM4(&(io[63]), TfQ, ovs, &(io[1])); Chris@10: TfR = VFMA(LDK(KP773010453), T7e, T7b); Chris@10: STM4(&(ro[23]), TfR, ovs, &(ro[1])); Chris@10: TfS = VFNMS(LDK(KP773010453), T7e, T7b); Chris@10: STM4(&(ro[55]), TfS, ovs, &(ro[1])); Chris@10: TfT = VFMA(LDK(KP773010453), T7g, T7f); Chris@10: STM4(&(io[7]), TfT, ovs, &(io[1])); Chris@10: TfU = VFNMS(LDK(KP773010453), T7g, T7f); Chris@10: STM4(&(io[39]), TfU, ovs, &(io[1])); Chris@10: TfV = VFMA(LDK(KP773010453), T7a, T79); Chris@10: STM4(&(io[23]), TfV, ovs, &(io[1])); Chris@10: TfW = VFNMS(LDK(KP773010453), T7a, T79); Chris@10: STM4(&(io[55]), TfW, ovs, &(io[1])); Chris@10: TfX = VFMA(LDK(KP773010453), T72, T6N); Chris@10: STM4(&(ro[7]), TfX, ovs, &(ro[1])); Chris@10: TfY = VFNMS(LDK(KP773010453), T72, T6N); Chris@10: STM4(&(ro[39]), TfY, ovs, &(ro[1])); Chris@10: TfZ = VFNMS(LDK(KP995184726), T7A, T7z); Chris@10: STM4(&(ro[31]), TfZ, ovs, &(ro[1])); Chris@10: Tg0 = VFMA(LDK(KP995184726), T7A, T7z); Chris@10: STM4(&(ro[63]), Tg0, ovs, &(ro[1])); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T7D, T8K, T8H, T7K, Ta8, Ta7, Tae, Tad; Chris@10: { Chris@10: V T9x, T9u, T9E, T9B, T9L, T9K, T9V, T9j, Tab, Ta0, Taa, Ta3, Ta6, T9q, T9H; Chris@10: V T9I; Chris@10: { Chris@10: V T9h, T9i, T9Y, T9Z, Ta1, Ta2, T9m, T9p; Chris@10: T7D = VFMA(LDK(KP707106781), T7C, T7B); Chris@10: T9h = VFNMS(LDK(KP707106781), T7C, T7B); Chris@10: T9i = VSUB(T8I, T8J); Chris@10: T8K = VADD(T8I, T8J); Chris@10: T9x = VFNMS(LDK(KP923879532), T9w, T9v); Chris@10: T9Y = VFMA(LDK(KP923879532), T9w, T9v); Chris@10: T9Z = VFMA(LDK(KP923879532), T9t, T9s); Chris@10: T9u = VFNMS(LDK(KP923879532), T9t, T9s); Chris@10: T9E = VFNMS(LDK(KP923879532), T9D, T9C); Chris@10: Ta1 = VFMA(LDK(KP923879532), T9D, T9C); Chris@10: Ta2 = VFMA(LDK(KP923879532), T9A, T9z); Chris@10: T9B = VFNMS(LDK(KP923879532), T9A, T9z); Chris@10: T9L = VFNMS(LDK(KP668178637), T9k, T9l); Chris@10: T9m = VFMA(LDK(KP668178637), T9l, T9k); Chris@10: T9p = VFNMS(LDK(KP668178637), T9o, T9n); Chris@10: T9K = VFMA(LDK(KP668178637), T9n, T9o); Chris@10: T9V = VFNMS(LDK(KP923879532), T9i, T9h); Chris@10: T9j = VFMA(LDK(KP923879532), T9i, T9h); Chris@10: Tab = VFNMS(LDK(KP303346683), T9Y, T9Z); Chris@10: Ta0 = VFMA(LDK(KP303346683), T9Z, T9Y); Chris@10: Taa = VFMA(LDK(KP303346683), Ta1, Ta2); Chris@10: Ta3 = VFNMS(LDK(KP303346683), Ta2, Ta1); Chris@10: Ta6 = VADD(T9m, T9p); Chris@10: T9q = VSUB(T9m, T9p); Chris@10: T8H = VFMA(LDK(KP707106781), T8G, T8F); Chris@10: T9H = VFNMS(LDK(KP707106781), T8G, T8F); Chris@10: T9I = VSUB(T7J, T7G); Chris@10: T7K = VADD(T7G, T7J); Chris@10: } Chris@10: { Chris@10: V T9P, T9r, T9Q, T9y, Ta5, T9J, T9W, T9M, T9R, T9F; Chris@10: T9P = VFNMS(LDK(KP831469612), T9q, T9j); Chris@10: T9r = VFMA(LDK(KP831469612), T9q, T9j); Chris@10: T9Q = VFNMS(LDK(KP534511135), T9u, T9x); Chris@10: T9y = VFMA(LDK(KP534511135), T9x, T9u); Chris@10: Ta5 = VFNMS(LDK(KP923879532), T9I, T9H); Chris@10: T9J = VFMA(LDK(KP923879532), T9I, T9H); Chris@10: T9W = VADD(T9L, T9K); Chris@10: T9M = VSUB(T9K, T9L); Chris@10: T9R = VFMA(LDK(KP534511135), T9B, T9E); Chris@10: T9F = VFNMS(LDK(KP534511135), T9E, T9B); Chris@10: { Chris@10: V T9T, T9N, T9U, T9S, T9G, T9O; Chris@10: { Chris@10: V Ta4, Ta9, Tac, T9X; Chris@10: Ta8 = VADD(Ta0, Ta3); Chris@10: Ta4 = VSUB(Ta0, Ta3); Chris@10: Ta9 = VFNMS(LDK(KP831469612), Ta6, Ta5); Chris@10: Ta7 = VFMA(LDK(KP831469612), Ta6, Ta5); Chris@10: Tae = VADD(Tab, Taa); Chris@10: Tac = VSUB(Taa, Tab); Chris@10: T9X = VFNMS(LDK(KP831469612), T9W, T9V); Chris@10: Tad = VFMA(LDK(KP831469612), T9W, T9V); Chris@10: T9T = VFMA(LDK(KP831469612), T9M, T9J); Chris@10: T9N = VFNMS(LDK(KP831469612), T9M, T9J); Chris@10: T9U = VADD(T9Q, T9R); Chris@10: T9S = VSUB(T9Q, T9R); Chris@10: T9G = VADD(T9y, T9F); Chris@10: T9O = VSUB(T9F, T9y); Chris@10: { Chris@10: V Tg1, Tg2, Tg3, Tg4; Chris@10: Tg1 = VFNMS(LDK(KP956940335), Tac, Ta9); Chris@10: STM4(&(io[45]), Tg1, ovs, &(io[1])); Chris@10: STN4(&(io[44]), Tf6, Tg1, Tfq, TfM, ovs); Chris@10: Tg2 = VFMA(LDK(KP956940335), Ta4, T9X); Chris@10: STM4(&(ro[13]), Tg2, ovs, &(ro[1])); Chris@10: STN4(&(ro[12]), Tf9, Tg2, Tff, TfN, ovs); Chris@10: Tg3 = VFNMS(LDK(KP956940335), Ta4, T9X); Chris@10: STM4(&(ro[45]), Tg3, ovs, &(ro[1])); Chris@10: STN4(&(ro[44]), Tfa, Tg3, Tfg, TfO, ovs); Chris@10: Tg4 = VFMA(LDK(KP956940335), Tac, Ta9); Chris@10: STM4(&(io[13]), Tg4, ovs, &(io[1])); Chris@10: STN4(&(io[12]), Tf5, Tg4, Tfp, TfL, ovs); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tg5, Tg6, Tg7, Tg8; Chris@10: Tg5 = VFMA(LDK(KP881921264), T9S, T9P); Chris@10: STM4(&(ro[21]), Tg5, ovs, &(ro[1])); Chris@10: STN4(&(ro[20]), Tfd, Tg5, Tfh, TfR, ovs); Chris@10: Tg6 = VFNMS(LDK(KP881921264), T9S, T9P); Chris@10: STM4(&(ro[53]), Tg6, ovs, &(ro[1])); Chris@10: STN4(&(ro[52]), Tfe, Tg6, Tfi, TfS, ovs); Chris@10: Tg7 = VFMA(LDK(KP881921264), T9U, T9T); Chris@10: STM4(&(io[5]), Tg7, ovs, &(io[1])); Chris@10: STN4(&(io[4]), TeZ, Tg7, Tfj, TfT, ovs); Chris@10: Tg8 = VFNMS(LDK(KP881921264), T9U, T9T); Chris@10: STM4(&(io[37]), Tg8, ovs, &(io[1])); Chris@10: STN4(&(io[36]), Tf0, Tg8, Tfk, TfU, ovs); Chris@10: { Chris@10: V Tg9, Tga, Tgb, Tgc; Chris@10: Tg9 = VFMA(LDK(KP881921264), T9O, T9N); Chris@10: STM4(&(io[21]), Tg9, ovs, &(io[1])); Chris@10: STN4(&(io[20]), Tfb, Tg9, Tfl, TfV, ovs); Chris@10: Tga = VFNMS(LDK(KP881921264), T9O, T9N); Chris@10: STM4(&(io[53]), Tga, ovs, &(io[1])); Chris@10: STN4(&(io[52]), Tfc, Tga, Tfm, TfW, ovs); Chris@10: Tgb = VFMA(LDK(KP881921264), T9G, T9r); Chris@10: STM4(&(ro[5]), Tgb, ovs, &(ro[1])); Chris@10: STN4(&(ro[4]), Tf1, Tgb, Tfn, TfX, ovs); Chris@10: Tgc = VFNMS(LDK(KP881921264), T9G, T9r); Chris@10: STM4(&(ro[37]), Tgc, ovs, &(ro[1])); Chris@10: STN4(&(ro[36]), Tf2, Tgc, Tfo, TfY, ovs); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tgh, Tgi, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu; Chris@10: { Chris@10: V T5U, T6j, T3v, T6y, T6o, T5H, T69, T68, T6z, T6r, T6u, T48, T6f, T52, T6t; Chris@10: V T67, T6h, T49; Chris@10: { Chris@10: V T51, T4O, T6p, T6q, T3O, T47, T6m, T6n; Chris@10: T51 = VFNMS(LDK(KP923879532), T50, T4X); Chris@10: T6m = VFMA(LDK(KP923879532), T50, T4X); Chris@10: T6n = VFMA(LDK(KP923879532), T4N, T4q); Chris@10: T4O = VFNMS(LDK(KP923879532), T4N, T4q); Chris@10: T5U = VFNMS(LDK(KP923879532), T5T, T5Q); Chris@10: T6p = VFMA(LDK(KP923879532), T5T, T5Q); Chris@10: { Chris@10: V Tgd, Tge, Tgf, Tgg; Chris@10: Tgd = VFMA(LDK(KP956940335), Ta8, Ta7); Chris@10: STM4(&(io[61]), Tgd, ovs, &(io[1])); Chris@10: STN4(&(io[60]), Tf7, Tgd, Tfs, TfQ, ovs); Chris@10: Tge = VFNMS(LDK(KP956940335), Ta8, Ta7); Chris@10: STM4(&(io[29]), Tge, ovs, &(io[1])); Chris@10: STN4(&(io[28]), Tf8, Tge, Tfr, TfP, ovs); Chris@10: Tgf = VFMA(LDK(KP956940335), Tae, Tad); Chris@10: STM4(&(ro[61]), Tgf, ovs, &(ro[1])); Chris@10: STN4(&(ro[60]), Tf3, Tgf, Tft, Tg0, ovs); Chris@10: Tgg = VFNMS(LDK(KP956940335), Tae, Tad); Chris@10: STM4(&(ro[29]), Tgg, ovs, &(ro[1])); Chris@10: STN4(&(ro[28]), Tf4, Tgg, Tfu, TfZ, ovs); Chris@10: T6j = VFMA(LDK(KP923879532), T3u, T3f); Chris@10: T3v = VFNMS(LDK(KP923879532), T3u, T3f); Chris@10: T6y = VFNMS(LDK(KP303346683), T6m, T6n); Chris@10: T6o = VFMA(LDK(KP303346683), T6n, T6m); Chris@10: T6q = VFMA(LDK(KP923879532), T5G, T5j); Chris@10: T5H = VFNMS(LDK(KP923879532), T5G, T5j); Chris@10: } Chris@10: T69 = VFMA(LDK(KP668178637), T3G, T3N); Chris@10: T3O = VFNMS(LDK(KP668178637), T3N, T3G); Chris@10: T47 = VFMA(LDK(KP668178637), T46, T3Z); Chris@10: T68 = VFNMS(LDK(KP668178637), T3Z, T46); Chris@10: T6z = VFMA(LDK(KP303346683), T6p, T6q); Chris@10: T6r = VFNMS(LDK(KP303346683), T6q, T6p); Chris@10: T6u = VADD(T3O, T47); Chris@10: T48 = VSUB(T3O, T47); Chris@10: T6f = VFNMS(LDK(KP534511135), T4O, T51); Chris@10: T52 = VFMA(LDK(KP534511135), T51, T4O); Chris@10: T6t = VFMA(LDK(KP923879532), T66, T63); Chris@10: T67 = VFNMS(LDK(KP923879532), T66, T63); Chris@10: } Chris@10: T6h = VFNMS(LDK(KP831469612), T48, T3v); Chris@10: T49 = VFMA(LDK(KP831469612), T48, T3v); Chris@10: { Chris@10: V T6w, T6s, T6B, T6v, T6A, T6C, T6k, T6a, T6e, T5V; Chris@10: T6w = VSUB(T6r, T6o); Chris@10: T6s = VADD(T6o, T6r); Chris@10: T6B = VFMA(LDK(KP831469612), T6u, T6t); Chris@10: T6v = VFNMS(LDK(KP831469612), T6u, T6t); Chris@10: T6A = VSUB(T6y, T6z); Chris@10: T6C = VADD(T6y, T6z); Chris@10: T6k = VADD(T69, T68); Chris@10: T6a = VSUB(T68, T69); Chris@10: T6e = VFMA(LDK(KP534511135), T5H, T5U); Chris@10: T5V = VFNMS(LDK(KP534511135), T5U, T5H); Chris@10: Tgh = VFMA(LDK(KP956940335), T6C, T6B); Chris@10: STM4(&(io[3]), Tgh, ovs, &(io[1])); Chris@10: Tgi = VFNMS(LDK(KP956940335), T6C, T6B); Chris@10: STM4(&(io[35]), Tgi, ovs, &(io[1])); Chris@10: { Chris@10: V T6l, T6x, T6d, T6b; Chris@10: T6l = VFMA(LDK(KP831469612), T6k, T6j); Chris@10: T6x = VFNMS(LDK(KP831469612), T6k, T6j); Chris@10: T6d = VFMA(LDK(KP831469612), T6a, T67); Chris@10: T6b = VFNMS(LDK(KP831469612), T6a, T67); Chris@10: { Chris@10: V T6g, T6i, T5W, T6c; Chris@10: T6g = VSUB(T6e, T6f); Chris@10: T6i = VADD(T6f, T6e); Chris@10: T5W = VSUB(T52, T5V); Chris@10: T6c = VADD(T52, T5V); Chris@10: Tgj = VFMA(LDK(KP956940335), T6w, T6v); Chris@10: STM4(&(io[19]), Tgj, ovs, &(io[1])); Chris@10: Tgk = VFNMS(LDK(KP956940335), T6w, T6v); Chris@10: STM4(&(io[51]), Tgk, ovs, &(io[1])); Chris@10: Tgl = VFMA(LDK(KP956940335), T6s, T6l); Chris@10: STM4(&(ro[3]), Tgl, ovs, &(ro[1])); Chris@10: Tgm = VFNMS(LDK(KP956940335), T6s, T6l); Chris@10: STM4(&(ro[35]), Tgm, ovs, &(ro[1])); Chris@10: Tgn = VFMA(LDK(KP881921264), T6i, T6h); Chris@10: STM4(&(ro[59]), Tgn, ovs, &(ro[1])); Chris@10: Tgo = VFNMS(LDK(KP881921264), T6i, T6h); Chris@10: STM4(&(ro[27]), Tgo, ovs, &(ro[1])); Chris@10: Tgp = VFMA(LDK(KP881921264), T6g, T6d); Chris@10: STM4(&(io[11]), Tgp, ovs, &(io[1])); Chris@10: Tgq = VFNMS(LDK(KP881921264), T6g, T6d); Chris@10: STM4(&(io[43]), Tgq, ovs, &(io[1])); Chris@10: Tgr = VFMA(LDK(KP881921264), T6c, T6b); Chris@10: STM4(&(io[59]), Tgr, ovs, &(io[1])); Chris@10: Tgs = VFNMS(LDK(KP881921264), T6c, T6b); Chris@10: STM4(&(io[27]), Tgs, ovs, &(io[1])); Chris@10: Tgt = VFMA(LDK(KP881921264), T5W, T49); Chris@10: STM4(&(ro[11]), Tgt, ovs, &(ro[1])); Chris@10: Tgu = VFNMS(LDK(KP881921264), T5W, T49); Chris@10: STM4(&(ro[43]), Tgu, ovs, &(ro[1])); Chris@10: Tgv = VFNMS(LDK(KP956940335), T6A, T6x); Chris@10: STM4(&(ro[51]), Tgv, ovs, &(ro[1])); Chris@10: Tgw = VFMA(LDK(KP956940335), T6A, T6x); Chris@10: STM4(&(ro[19]), Tgw, ovs, &(ro[1])); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V T8j, T8c, T8C, T8v, T8N, T8M, T8X, T7L, T9c, T92, T9d, T95, T98, T80; Chris@10: { Chris@10: V T90, T91, T93, T94, T7S, T7Z; Chris@10: T8j = VFNMS(LDK(KP923879532), T8i, T8f); Chris@10: T90 = VFMA(LDK(KP923879532), T8i, T8f); Chris@10: T91 = VFMA(LDK(KP923879532), T8b, T84); Chris@10: T8c = VFNMS(LDK(KP923879532), T8b, T84); Chris@10: T8C = VFNMS(LDK(KP923879532), T8B, T8y); Chris@10: T93 = VFMA(LDK(KP923879532), T8B, T8y); Chris@10: T94 = VFMA(LDK(KP923879532), T8u, T8n); Chris@10: T8v = VFNMS(LDK(KP923879532), T8u, T8n); Chris@10: T8N = VFMA(LDK(KP198912367), T7O, T7R); Chris@10: T7S = VFNMS(LDK(KP198912367), T7R, T7O); Chris@10: T7Z = VFMA(LDK(KP198912367), T7Y, T7V); Chris@10: T8M = VFNMS(LDK(KP198912367), T7V, T7Y); Chris@10: T8X = VFMA(LDK(KP923879532), T7K, T7D); Chris@10: T7L = VFNMS(LDK(KP923879532), T7K, T7D); Chris@10: T9c = VFNMS(LDK(KP098491403), T90, T91); Chris@10: T92 = VFMA(LDK(KP098491403), T91, T90); Chris@10: T9d = VFMA(LDK(KP098491403), T93, T94); Chris@10: T95 = VFNMS(LDK(KP098491403), T94, T93); Chris@10: T98 = VADD(T7S, T7Z); Chris@10: T80 = VSUB(T7S, T7Z); Chris@10: } Chris@10: { Chris@10: V T8V, T81, T8T, T8k, T97, T8L, T8Y, T8O, T8S, T8D; Chris@10: T8V = VFNMS(LDK(KP980785280), T80, T7L); Chris@10: T81 = VFMA(LDK(KP980785280), T80, T7L); Chris@10: T8T = VFNMS(LDK(KP820678790), T8c, T8j); Chris@10: T8k = VFMA(LDK(KP820678790), T8j, T8c); Chris@10: T97 = VFMA(LDK(KP923879532), T8K, T8H); Chris@10: T8L = VFNMS(LDK(KP923879532), T8K, T8H); Chris@10: T8Y = VADD(T8N, T8M); Chris@10: T8O = VSUB(T8M, T8N); Chris@10: T8S = VFMA(LDK(KP820678790), T8v, T8C); Chris@10: T8D = VFNMS(LDK(KP820678790), T8C, T8v); Chris@10: { Chris@10: V T8R, T8P, T8U, T8W, T8E, T8Q; Chris@10: { Chris@10: V T96, T9f, T9g, T8Z; Chris@10: T9a = VSUB(T95, T92); Chris@10: T96 = VADD(T92, T95); Chris@10: T9f = VFMA(LDK(KP980785280), T98, T97); Chris@10: T99 = VFNMS(LDK(KP980785280), T98, T97); Chris@10: T9e = VSUB(T9c, T9d); Chris@10: T9g = VADD(T9c, T9d); Chris@10: T8Z = VFMA(LDK(KP980785280), T8Y, T8X); Chris@10: T9b = VFNMS(LDK(KP980785280), T8Y, T8X); Chris@10: T8R = VFMA(LDK(KP980785280), T8O, T8L); Chris@10: T8P = VFNMS(LDK(KP980785280), T8O, T8L); Chris@10: T8U = VSUB(T8S, T8T); Chris@10: T8W = VADD(T8T, T8S); Chris@10: T8E = VSUB(T8k, T8D); Chris@10: T8Q = VADD(T8k, T8D); Chris@10: { Chris@10: V Tgx, Tgy, Tgz, TgA; Chris@10: Tgx = VFNMS(LDK(KP995184726), T9g, T9f); Chris@10: STM4(&(io[33]), Tgx, ovs, &(io[1])); Chris@10: STN4(&(io[32]), TeO, Tgx, TfG, Tgi, ovs); Chris@10: Tgy = VFMA(LDK(KP995184726), T96, T8Z); Chris@10: STM4(&(ro[1]), Tgy, ovs, &(ro[1])); Chris@10: STN4(&(ro[0]), TeL, Tgy, Tfv, Tgl, ovs); Chris@10: Tgz = VFNMS(LDK(KP995184726), T96, T8Z); Chris@10: STM4(&(ro[33]), Tgz, ovs, &(ro[1])); Chris@10: STN4(&(ro[32]), TeM, Tgz, Tfw, Tgm, ovs); Chris@10: TgA = VFMA(LDK(KP995184726), T9g, T9f); Chris@10: STM4(&(io[1]), TgA, ovs, &(io[1])); Chris@10: STN4(&(io[0]), TeN, TgA, TfF, Tgh, ovs); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TgB, TgC, TgD, TgE; Chris@10: TgB = VFMA(LDK(KP773010453), T8W, T8V); Chris@10: STM4(&(ro[57]), TgB, ovs, &(ro[1])); Chris@10: STN4(&(ro[56]), TeS, TgB, Tfx, Tgn, ovs); Chris@10: TgC = VFNMS(LDK(KP773010453), T8W, T8V); Chris@10: STM4(&(ro[25]), TgC, ovs, &(ro[1])); Chris@10: STN4(&(ro[24]), TeR, TgC, Tfy, Tgo, ovs); Chris@10: TgD = VFMA(LDK(KP773010453), T8U, T8R); Chris@10: STM4(&(io[9]), TgD, ovs, &(io[1])); Chris@10: STN4(&(io[8]), TeT, TgD, Tfz, Tgp, ovs); Chris@10: TgE = VFNMS(LDK(KP773010453), T8U, T8R); Chris@10: STM4(&(io[41]), TgE, ovs, &(io[1])); Chris@10: STN4(&(io[40]), TeU, TgE, TfA, Tgq, ovs); Chris@10: { Chris@10: V TgF, TgG, TgH, TgI; Chris@10: TgF = VFMA(LDK(KP773010453), T8Q, T8P); Chris@10: STM4(&(io[57]), TgF, ovs, &(io[1])); Chris@10: STN4(&(io[56]), TeW, TgF, TfB, Tgr, ovs); Chris@10: TgG = VFNMS(LDK(KP773010453), T8Q, T8P); Chris@10: STM4(&(io[25]), TgG, ovs, &(io[1])); Chris@10: STN4(&(io[24]), TeV, TgG, TfC, Tgs, ovs); Chris@10: TgH = VFMA(LDK(KP773010453), T8E, T81); Chris@10: STM4(&(ro[9]), TgH, ovs, &(ro[1])); Chris@10: STN4(&(ro[8]), TeX, TgH, TfD, Tgt, ovs); Chris@10: TgI = VFNMS(LDK(KP773010453), T8E, T81); Chris@10: STM4(&(ro[41]), TgI, ovs, &(ro[1])); Chris@10: STN4(&(ro[40]), TeY, TgI, TfE, Tgu, ovs); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V TgJ, TgK, TgL, TgM; Chris@10: TgJ = VFMA(LDK(KP995184726), T9a, T99); Chris@10: STM4(&(io[17]), TgJ, ovs, &(io[1])); Chris@10: STN4(&(io[16]), TeQ, TgJ, TfI, Tgj, ovs); Chris@10: TgK = VFNMS(LDK(KP995184726), T9a, T99); Chris@10: STM4(&(io[49]), TgK, ovs, &(io[1])); Chris@10: STN4(&(io[48]), TeP, TgK, TfH, Tgk, ovs); Chris@10: TgL = VFMA(LDK(KP995184726), T9e, T9b); Chris@10: STM4(&(ro[17]), TgL, ovs, &(ro[1])); Chris@10: STN4(&(ro[16]), TeK, TgL, TfJ, Tgw, ovs); Chris@10: TgM = VFNMS(LDK(KP995184726), T9e, T9b); Chris@10: STM4(&(ro[49]), TgM, ovs, &(ro[1])); Chris@10: STN4(&(ro[48]), TeJ, TgM, TfK, Tgv, ovs); Chris@10: } Chris@10: } Chris@10: } Chris@10: VLEAVE(); Chris@10: } Chris@10: Chris@10: static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {520, 0, 392, 0}, &GENUS, 0, 1, 0, 0 }; Chris@10: Chris@10: void XSIMD(codelet_n2sv_64) (planner *p) { Chris@10: X(kdft_register) (p, n2sv_64, &desc); Chris@10: } Chris@10: Chris@10: #else /* HAVE_FMA */ Chris@10: Chris@10: /* Generated by: ../../../genfft/gen_notw.native -simd -compact -variables 4 -pipeline-latency 8 -n 64 -name n2sv_64 -with-ostride 1 -include n2s.h -store-multiple 4 */ Chris@10: Chris@10: /* Chris@10: * This function contains 912 FP additions, 248 FP multiplications, Chris@10: * (or, 808 additions, 144 multiplications, 104 fused multiply/add), Chris@10: * 260 stack variables, 15 constants, and 288 memory accesses Chris@10: */ Chris@10: #include "n2s.h" Chris@10: Chris@10: static void n2sv_64(const R *ri, const R *ii, R *ro, R *io, stride is, stride os, INT v, INT ivs, INT ovs) Chris@10: { Chris@10: DVK(KP773010453, +0.773010453362736960810906609758469800971041293); Chris@10: DVK(KP634393284, +0.634393284163645498215171613225493370675687095); Chris@10: DVK(KP098017140, +0.098017140329560601994195563888641845861136673); Chris@10: DVK(KP995184726, +0.995184726672196886244836953109479921575474869); Chris@10: DVK(KP881921264, +0.881921264348355029712756863660388349508442621); Chris@10: DVK(KP471396736, +0.471396736825997648556387625905254377657460319); Chris@10: DVK(KP290284677, +0.290284677254462367636192375817395274691476278); Chris@10: DVK(KP956940335, +0.956940335732208864935797886980269969482849206); Chris@10: DVK(KP831469612, +0.831469612302545237078788377617905756738560812); Chris@10: DVK(KP555570233, +0.555570233019602224742830813948532874374937191); Chris@10: DVK(KP195090322, +0.195090322016128267848284868477022240927691618); Chris@10: DVK(KP980785280, +0.980785280403230449126182236134239036973933731); Chris@10: DVK(KP923879532, +0.923879532511286756128183189396788286822416626); Chris@10: DVK(KP382683432, +0.382683432365089771728459984030398866761344562); Chris@10: DVK(KP707106781, +0.707106781186547524400844362104849039284835938); Chris@10: { Chris@10: INT i; Chris@10: for (i = v; i > 0; i = i - (2 * VL), ri = ri + ((2 * VL) * ivs), ii = ii + ((2 * VL) * ivs), ro = ro + ((2 * VL) * ovs), io = io + ((2 * VL) * ovs), MAKE_VOLATILE_STRIDE(256, is), MAKE_VOLATILE_STRIDE(256, os)) { Chris@10: V T37, T7B, T8F, T5Z, Tf, Td9, TbB, TcB, T62, T7C, T2i, TdH, Tah, Tcb, T3e; Chris@10: V T8G, Tu, TdI, Tak, TbD, Tan, TbC, T2x, Tda, T3m, T65, T7G, T8J, T7J, T8I; Chris@10: V T3t, T64, TK, Tdd, Tas, Tce, Tav, Tcf, T2N, Tdc, T3G, T6G, T7O, T9k, T7R; Chris@10: V T9l, T3N, T6H, T1L, Tdv, Tbs, Tcw, TdC, Teo, T5j, T6V, T5Q, T6Y, T8y, T9C; Chris@10: V Tbb, Tct, T8n, T9z, TZ, Tdf, Taz, Tch, TaC, Tci, T32, Tdg, T3Z, T6J, T7V; Chris@10: V T9n, T7Y, T9o, T46, T6K, T1g, Tdp, Tb1, Tcm, Tdm, Tej, T4q, T6R, T4X, T6O; Chris@10: V T8f, T9s, TaK, Tcp, T84, T9v, T1v, Tdn, Tb4, Tcq, Tds, Tek, T4N, T6P, T50; Chris@10: V T6S, T8i, T9w, TaV, Tcn, T8b, T9t, T20, TdD, Tbv, Tcu, Tdy, Tep, T5G, T6Z; Chris@10: V T5T, T6W, T8B, T9A, Tbm, Tcx, T8u, T9D; Chris@10: { Chris@10: V T3, T35, T26, T5Y, T6, T5X, T29, T36, Ta, T39, T2d, T38, Td, T3b, T2g; Chris@10: V T3c; Chris@10: { Chris@10: V T1, T2, T24, T25; Chris@10: T1 = LD(&(ri[0]), ivs, &(ri[0])); Chris@10: T2 = LD(&(ri[WS(is, 32)]), ivs, &(ri[0])); Chris@10: T3 = VADD(T1, T2); Chris@10: T35 = VSUB(T1, T2); Chris@10: T24 = LD(&(ii[0]), ivs, &(ii[0])); Chris@10: T25 = LD(&(ii[WS(is, 32)]), ivs, &(ii[0])); Chris@10: T26 = VADD(T24, T25); Chris@10: T5Y = VSUB(T24, T25); Chris@10: } Chris@10: { Chris@10: V T4, T5, T27, T28; Chris@10: T4 = LD(&(ri[WS(is, 16)]), ivs, &(ri[0])); Chris@10: T5 = LD(&(ri[WS(is, 48)]), ivs, &(ri[0])); Chris@10: T6 = VADD(T4, T5); Chris@10: T5X = VSUB(T4, T5); Chris@10: T27 = LD(&(ii[WS(is, 16)]), ivs, &(ii[0])); Chris@10: T28 = LD(&(ii[WS(is, 48)]), ivs, &(ii[0])); Chris@10: T29 = VADD(T27, T28); Chris@10: T36 = VSUB(T27, T28); Chris@10: } Chris@10: { Chris@10: V T8, T9, T2b, T2c; Chris@10: T8 = LD(&(ri[WS(is, 8)]), ivs, &(ri[0])); Chris@10: T9 = LD(&(ri[WS(is, 40)]), ivs, &(ri[0])); Chris@10: Ta = VADD(T8, T9); Chris@10: T39 = VSUB(T8, T9); Chris@10: T2b = LD(&(ii[WS(is, 8)]), ivs, &(ii[0])); Chris@10: T2c = LD(&(ii[WS(is, 40)]), ivs, &(ii[0])); Chris@10: T2d = VADD(T2b, T2c); Chris@10: T38 = VSUB(T2b, T2c); Chris@10: } Chris@10: { Chris@10: V Tb, Tc, T2e, T2f; Chris@10: Tb = LD(&(ri[WS(is, 56)]), ivs, &(ri[0])); Chris@10: Tc = LD(&(ri[WS(is, 24)]), ivs, &(ri[0])); Chris@10: Td = VADD(Tb, Tc); Chris@10: T3b = VSUB(Tb, Tc); Chris@10: T2e = LD(&(ii[WS(is, 56)]), ivs, &(ii[0])); Chris@10: T2f = LD(&(ii[WS(is, 24)]), ivs, &(ii[0])); Chris@10: T2g = VADD(T2e, T2f); Chris@10: T3c = VSUB(T2e, T2f); Chris@10: } Chris@10: { Chris@10: V T7, Te, T2a, T2h; Chris@10: T37 = VSUB(T35, T36); Chris@10: T7B = VADD(T35, T36); Chris@10: T8F = VSUB(T5Y, T5X); Chris@10: T5Z = VADD(T5X, T5Y); Chris@10: T7 = VADD(T3, T6); Chris@10: Te = VADD(Ta, Td); Chris@10: Tf = VADD(T7, Te); Chris@10: Td9 = VSUB(T7, Te); Chris@10: { Chris@10: V Tbz, TbA, T60, T61; Chris@10: Tbz = VSUB(T26, T29); Chris@10: TbA = VSUB(Td, Ta); Chris@10: TbB = VSUB(Tbz, TbA); Chris@10: TcB = VADD(TbA, Tbz); Chris@10: T60 = VSUB(T3b, T3c); Chris@10: T61 = VADD(T39, T38); Chris@10: T62 = VMUL(LDK(KP707106781), VSUB(T60, T61)); Chris@10: T7C = VMUL(LDK(KP707106781), VADD(T61, T60)); Chris@10: } Chris@10: T2a = VADD(T26, T29); Chris@10: T2h = VADD(T2d, T2g); Chris@10: T2i = VADD(T2a, T2h); Chris@10: TdH = VSUB(T2a, T2h); Chris@10: { Chris@10: V Taf, Tag, T3a, T3d; Chris@10: Taf = VSUB(T3, T6); Chris@10: Tag = VSUB(T2d, T2g); Chris@10: Tah = VSUB(Taf, Tag); Chris@10: Tcb = VADD(Taf, Tag); Chris@10: T3a = VSUB(T38, T39); Chris@10: T3d = VADD(T3b, T3c); Chris@10: T3e = VMUL(LDK(KP707106781), VSUB(T3a, T3d)); Chris@10: T8G = VMUL(LDK(KP707106781), VADD(T3a, T3d)); Chris@10: } Chris@10: } Chris@10: } Chris@10: { Chris@10: V Ti, T3j, T2l, T3h, Tl, T3g, T2o, T3k, Tp, T3q, T2s, T3o, Ts, T3n, T2v; Chris@10: V T3r; Chris@10: { Chris@10: V Tg, Th, T2j, T2k; Chris@10: Tg = LD(&(ri[WS(is, 4)]), ivs, &(ri[0])); Chris@10: Th = LD(&(ri[WS(is, 36)]), ivs, &(ri[0])); Chris@10: Ti = VADD(Tg, Th); Chris@10: T3j = VSUB(Tg, Th); Chris@10: T2j = LD(&(ii[WS(is, 4)]), ivs, &(ii[0])); Chris@10: T2k = LD(&(ii[WS(is, 36)]), ivs, &(ii[0])); Chris@10: T2l = VADD(T2j, T2k); Chris@10: T3h = VSUB(T2j, T2k); Chris@10: } Chris@10: { Chris@10: V Tj, Tk, T2m, T2n; Chris@10: Tj = LD(&(ri[WS(is, 20)]), ivs, &(ri[0])); Chris@10: Tk = LD(&(ri[WS(is, 52)]), ivs, &(ri[0])); Chris@10: Tl = VADD(Tj, Tk); Chris@10: T3g = VSUB(Tj, Tk); Chris@10: T2m = LD(&(ii[WS(is, 20)]), ivs, &(ii[0])); Chris@10: T2n = LD(&(ii[WS(is, 52)]), ivs, &(ii[0])); Chris@10: T2o = VADD(T2m, T2n); Chris@10: T3k = VSUB(T2m, T2n); Chris@10: } Chris@10: { Chris@10: V Tn, To, T2q, T2r; Chris@10: Tn = LD(&(ri[WS(is, 60)]), ivs, &(ri[0])); Chris@10: To = LD(&(ri[WS(is, 28)]), ivs, &(ri[0])); Chris@10: Tp = VADD(Tn, To); Chris@10: T3q = VSUB(Tn, To); Chris@10: T2q = LD(&(ii[WS(is, 60)]), ivs, &(ii[0])); Chris@10: T2r = LD(&(ii[WS(is, 28)]), ivs, &(ii[0])); Chris@10: T2s = VADD(T2q, T2r); Chris@10: T3o = VSUB(T2q, T2r); Chris@10: } Chris@10: { Chris@10: V Tq, Tr, T2t, T2u; Chris@10: Tq = LD(&(ri[WS(is, 12)]), ivs, &(ri[0])); Chris@10: Tr = LD(&(ri[WS(is, 44)]), ivs, &(ri[0])); Chris@10: Ts = VADD(Tq, Tr); Chris@10: T3n = VSUB(Tq, Tr); Chris@10: T2t = LD(&(ii[WS(is, 12)]), ivs, &(ii[0])); Chris@10: T2u = LD(&(ii[WS(is, 44)]), ivs, &(ii[0])); Chris@10: T2v = VADD(T2t, T2u); Chris@10: T3r = VSUB(T2t, T2u); Chris@10: } Chris@10: { Chris@10: V Tm, Tt, Tai, Taj; Chris@10: Tm = VADD(Ti, Tl); Chris@10: Tt = VADD(Tp, Ts); Chris@10: Tu = VADD(Tm, Tt); Chris@10: TdI = VSUB(Tt, Tm); Chris@10: Tai = VSUB(T2l, T2o); Chris@10: Taj = VSUB(Ti, Tl); Chris@10: Tak = VSUB(Tai, Taj); Chris@10: TbD = VADD(Taj, Tai); Chris@10: } Chris@10: { Chris@10: V Tal, Tam, T2p, T2w; Chris@10: Tal = VSUB(Tp, Ts); Chris@10: Tam = VSUB(T2s, T2v); Chris@10: Tan = VADD(Tal, Tam); Chris@10: TbC = VSUB(Tal, Tam); Chris@10: T2p = VADD(T2l, T2o); Chris@10: T2w = VADD(T2s, T2v); Chris@10: T2x = VADD(T2p, T2w); Chris@10: Tda = VSUB(T2p, T2w); Chris@10: } Chris@10: { Chris@10: V T3i, T3l, T7E, T7F; Chris@10: T3i = VADD(T3g, T3h); Chris@10: T3l = VSUB(T3j, T3k); Chris@10: T3m = VFNMS(LDK(KP923879532), T3l, VMUL(LDK(KP382683432), T3i)); Chris@10: T65 = VFMA(LDK(KP923879532), T3i, VMUL(LDK(KP382683432), T3l)); Chris@10: T7E = VSUB(T3h, T3g); Chris@10: T7F = VADD(T3j, T3k); Chris@10: T7G = VFNMS(LDK(KP382683432), T7F, VMUL(LDK(KP923879532), T7E)); Chris@10: T8J = VFMA(LDK(KP382683432), T7E, VMUL(LDK(KP923879532), T7F)); Chris@10: } Chris@10: { Chris@10: V T7H, T7I, T3p, T3s; Chris@10: T7H = VSUB(T3o, T3n); Chris@10: T7I = VADD(T3q, T3r); Chris@10: T7J = VFMA(LDK(KP923879532), T7H, VMUL(LDK(KP382683432), T7I)); Chris@10: T8I = VFNMS(LDK(KP382683432), T7H, VMUL(LDK(KP923879532), T7I)); Chris@10: T3p = VADD(T3n, T3o); Chris@10: T3s = VSUB(T3q, T3r); Chris@10: T3t = VFMA(LDK(KP382683432), T3p, VMUL(LDK(KP923879532), T3s)); Chris@10: T64 = VFNMS(LDK(KP923879532), T3p, VMUL(LDK(KP382683432), T3s)); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Ty, T3H, T2B, T3x, TB, T3w, T2E, T3I, TI, T3L, T2L, T3B, TF, T3K, T2I; Chris@10: V T3E; Chris@10: { Chris@10: V Tw, Tx, T2C, T2D; Chris@10: Tw = LD(&(ri[WS(is, 2)]), ivs, &(ri[0])); Chris@10: Tx = LD(&(ri[WS(is, 34)]), ivs, &(ri[0])); Chris@10: Ty = VADD(Tw, Tx); Chris@10: T3H = VSUB(Tw, Tx); Chris@10: { Chris@10: V T2z, T2A, Tz, TA; Chris@10: T2z = LD(&(ii[WS(is, 2)]), ivs, &(ii[0])); Chris@10: T2A = LD(&(ii[WS(is, 34)]), ivs, &(ii[0])); Chris@10: T2B = VADD(T2z, T2A); Chris@10: T3x = VSUB(T2z, T2A); Chris@10: Tz = LD(&(ri[WS(is, 18)]), ivs, &(ri[0])); Chris@10: TA = LD(&(ri[WS(is, 50)]), ivs, &(ri[0])); Chris@10: TB = VADD(Tz, TA); Chris@10: T3w = VSUB(Tz, TA); Chris@10: } Chris@10: T2C = LD(&(ii[WS(is, 18)]), ivs, &(ii[0])); Chris@10: T2D = LD(&(ii[WS(is, 50)]), ivs, &(ii[0])); Chris@10: T2E = VADD(T2C, T2D); Chris@10: T3I = VSUB(T2C, T2D); Chris@10: { Chris@10: V TG, TH, T3z, T2J, T2K, T3A; Chris@10: TG = LD(&(ri[WS(is, 58)]), ivs, &(ri[0])); Chris@10: TH = LD(&(ri[WS(is, 26)]), ivs, &(ri[0])); Chris@10: T3z = VSUB(TG, TH); Chris@10: T2J = LD(&(ii[WS(is, 58)]), ivs, &(ii[0])); Chris@10: T2K = LD(&(ii[WS(is, 26)]), ivs, &(ii[0])); Chris@10: T3A = VSUB(T2J, T2K); Chris@10: TI = VADD(TG, TH); Chris@10: T3L = VADD(T3z, T3A); Chris@10: T2L = VADD(T2J, T2K); Chris@10: T3B = VSUB(T3z, T3A); Chris@10: } Chris@10: { Chris@10: V TD, TE, T3C, T2G, T2H, T3D; Chris@10: TD = LD(&(ri[WS(is, 10)]), ivs, &(ri[0])); Chris@10: TE = LD(&(ri[WS(is, 42)]), ivs, &(ri[0])); Chris@10: T3C = VSUB(TD, TE); Chris@10: T2G = LD(&(ii[WS(is, 10)]), ivs, &(ii[0])); Chris@10: T2H = LD(&(ii[WS(is, 42)]), ivs, &(ii[0])); Chris@10: T3D = VSUB(T2G, T2H); Chris@10: TF = VADD(TD, TE); Chris@10: T3K = VSUB(T3D, T3C); Chris@10: T2I = VADD(T2G, T2H); Chris@10: T3E = VADD(T3C, T3D); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TC, TJ, Taq, Tar; Chris@10: TC = VADD(Ty, TB); Chris@10: TJ = VADD(TF, TI); Chris@10: TK = VADD(TC, TJ); Chris@10: Tdd = VSUB(TC, TJ); Chris@10: Taq = VSUB(T2B, T2E); Chris@10: Tar = VSUB(TI, TF); Chris@10: Tas = VSUB(Taq, Tar); Chris@10: Tce = VADD(Tar, Taq); Chris@10: } Chris@10: { Chris@10: V Tat, Tau, T2F, T2M; Chris@10: Tat = VSUB(Ty, TB); Chris@10: Tau = VSUB(T2I, T2L); Chris@10: Tav = VSUB(Tat, Tau); Chris@10: Tcf = VADD(Tat, Tau); Chris@10: T2F = VADD(T2B, T2E); Chris@10: T2M = VADD(T2I, T2L); Chris@10: T2N = VADD(T2F, T2M); Chris@10: Tdc = VSUB(T2F, T2M); Chris@10: } Chris@10: { Chris@10: V T3y, T3F, T7M, T7N; Chris@10: T3y = VADD(T3w, T3x); Chris@10: T3F = VMUL(LDK(KP707106781), VSUB(T3B, T3E)); Chris@10: T3G = VSUB(T3y, T3F); Chris@10: T6G = VADD(T3y, T3F); Chris@10: T7M = VSUB(T3x, T3w); Chris@10: T7N = VMUL(LDK(KP707106781), VADD(T3K, T3L)); Chris@10: T7O = VSUB(T7M, T7N); Chris@10: T9k = VADD(T7M, T7N); Chris@10: } Chris@10: { Chris@10: V T7P, T7Q, T3J, T3M; Chris@10: T7P = VADD(T3H, T3I); Chris@10: T7Q = VMUL(LDK(KP707106781), VADD(T3E, T3B)); Chris@10: T7R = VSUB(T7P, T7Q); Chris@10: T9l = VADD(T7P, T7Q); Chris@10: T3J = VSUB(T3H, T3I); Chris@10: T3M = VMUL(LDK(KP707106781), VSUB(T3K, T3L)); Chris@10: T3N = VSUB(T3J, T3M); Chris@10: T6H = VADD(T3J, T3M); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1z, T53, T5L, Tbo, T1C, T5I, T56, Tbp, T1J, Tb9, T5h, T5N, T1G, Tb8, T5c; Chris@10: V T5O; Chris@10: { Chris@10: V T1x, T1y, T54, T55; Chris@10: T1x = LD(&(ri[WS(is, 63)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1y = LD(&(ri[WS(is, 31)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1z = VADD(T1x, T1y); Chris@10: T53 = VSUB(T1x, T1y); Chris@10: { Chris@10: V T5J, T5K, T1A, T1B; Chris@10: T5J = LD(&(ii[WS(is, 63)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5K = LD(&(ii[WS(is, 31)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5L = VSUB(T5J, T5K); Chris@10: Tbo = VADD(T5J, T5K); Chris@10: T1A = LD(&(ri[WS(is, 15)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1B = LD(&(ri[WS(is, 47)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1C = VADD(T1A, T1B); Chris@10: T5I = VSUB(T1A, T1B); Chris@10: } Chris@10: T54 = LD(&(ii[WS(is, 15)]), ivs, &(ii[WS(is, 1)])); Chris@10: T55 = LD(&(ii[WS(is, 47)]), ivs, &(ii[WS(is, 1)])); Chris@10: T56 = VSUB(T54, T55); Chris@10: Tbp = VADD(T54, T55); Chris@10: { Chris@10: V T1H, T1I, T5d, T5e, T5f, T5g; Chris@10: T1H = LD(&(ri[WS(is, 55)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1I = LD(&(ri[WS(is, 23)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5d = VSUB(T1H, T1I); Chris@10: T5e = LD(&(ii[WS(is, 55)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5f = LD(&(ii[WS(is, 23)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5g = VSUB(T5e, T5f); Chris@10: T1J = VADD(T1H, T1I); Chris@10: Tb9 = VADD(T5e, T5f); Chris@10: T5h = VADD(T5d, T5g); Chris@10: T5N = VSUB(T5d, T5g); Chris@10: } Chris@10: { Chris@10: V T1E, T1F, T5b, T58, T59, T5a; Chris@10: T1E = LD(&(ri[WS(is, 7)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1F = LD(&(ri[WS(is, 39)]), ivs, &(ri[WS(is, 1)])); Chris@10: T5b = VSUB(T1E, T1F); Chris@10: T58 = LD(&(ii[WS(is, 7)]), ivs, &(ii[WS(is, 1)])); Chris@10: T59 = LD(&(ii[WS(is, 39)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5a = VSUB(T58, T59); Chris@10: T1G = VADD(T1E, T1F); Chris@10: Tb8 = VADD(T58, T59); Chris@10: T5c = VSUB(T5a, T5b); Chris@10: T5O = VADD(T5b, T5a); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1D, T1K, Tbq, Tbr; Chris@10: T1D = VADD(T1z, T1C); Chris@10: T1K = VADD(T1G, T1J); Chris@10: T1L = VADD(T1D, T1K); Chris@10: Tdv = VSUB(T1D, T1K); Chris@10: Tbq = VSUB(Tbo, Tbp); Chris@10: Tbr = VSUB(T1J, T1G); Chris@10: Tbs = VSUB(Tbq, Tbr); Chris@10: Tcw = VADD(Tbr, Tbq); Chris@10: } Chris@10: { Chris@10: V TdA, TdB, T57, T5i; Chris@10: TdA = VADD(Tbo, Tbp); Chris@10: TdB = VADD(Tb8, Tb9); Chris@10: TdC = VSUB(TdA, TdB); Chris@10: Teo = VADD(TdA, TdB); Chris@10: T57 = VSUB(T53, T56); Chris@10: T5i = VMUL(LDK(KP707106781), VSUB(T5c, T5h)); Chris@10: T5j = VSUB(T57, T5i); Chris@10: T6V = VADD(T57, T5i); Chris@10: } Chris@10: { Chris@10: V T5M, T5P, T8w, T8x; Chris@10: T5M = VADD(T5I, T5L); Chris@10: T5P = VMUL(LDK(KP707106781), VSUB(T5N, T5O)); Chris@10: T5Q = VSUB(T5M, T5P); Chris@10: T6Y = VADD(T5M, T5P); Chris@10: T8w = VSUB(T5L, T5I); Chris@10: T8x = VMUL(LDK(KP707106781), VADD(T5c, T5h)); Chris@10: T8y = VSUB(T8w, T8x); Chris@10: T9C = VADD(T8w, T8x); Chris@10: } Chris@10: { Chris@10: V Tb7, Tba, T8l, T8m; Chris@10: Tb7 = VSUB(T1z, T1C); Chris@10: Tba = VSUB(Tb8, Tb9); Chris@10: Tbb = VSUB(Tb7, Tba); Chris@10: Tct = VADD(Tb7, Tba); Chris@10: T8l = VADD(T53, T56); Chris@10: T8m = VMUL(LDK(KP707106781), VADD(T5O, T5N)); Chris@10: T8n = VSUB(T8l, T8m); Chris@10: T9z = VADD(T8l, T8m); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TN, T40, T2Q, T3Q, TQ, T3P, T2T, T41, TX, T44, T30, T3U, TU, T43, T2X; Chris@10: V T3X; Chris@10: { Chris@10: V TL, TM, T2R, T2S; Chris@10: TL = LD(&(ri[WS(is, 62)]), ivs, &(ri[0])); Chris@10: TM = LD(&(ri[WS(is, 30)]), ivs, &(ri[0])); Chris@10: TN = VADD(TL, TM); Chris@10: T40 = VSUB(TL, TM); Chris@10: { Chris@10: V T2O, T2P, TO, TP; Chris@10: T2O = LD(&(ii[WS(is, 62)]), ivs, &(ii[0])); Chris@10: T2P = LD(&(ii[WS(is, 30)]), ivs, &(ii[0])); Chris@10: T2Q = VADD(T2O, T2P); Chris@10: T3Q = VSUB(T2O, T2P); Chris@10: TO = LD(&(ri[WS(is, 14)]), ivs, &(ri[0])); Chris@10: TP = LD(&(ri[WS(is, 46)]), ivs, &(ri[0])); Chris@10: TQ = VADD(TO, TP); Chris@10: T3P = VSUB(TO, TP); Chris@10: } Chris@10: T2R = LD(&(ii[WS(is, 14)]), ivs, &(ii[0])); Chris@10: T2S = LD(&(ii[WS(is, 46)]), ivs, &(ii[0])); Chris@10: T2T = VADD(T2R, T2S); Chris@10: T41 = VSUB(T2R, T2S); Chris@10: { Chris@10: V TV, TW, T3S, T2Y, T2Z, T3T; Chris@10: TV = LD(&(ri[WS(is, 54)]), ivs, &(ri[0])); Chris@10: TW = LD(&(ri[WS(is, 22)]), ivs, &(ri[0])); Chris@10: T3S = VSUB(TV, TW); Chris@10: T2Y = LD(&(ii[WS(is, 54)]), ivs, &(ii[0])); Chris@10: T2Z = LD(&(ii[WS(is, 22)]), ivs, &(ii[0])); Chris@10: T3T = VSUB(T2Y, T2Z); Chris@10: TX = VADD(TV, TW); Chris@10: T44 = VADD(T3S, T3T); Chris@10: T30 = VADD(T2Y, T2Z); Chris@10: T3U = VSUB(T3S, T3T); Chris@10: } Chris@10: { Chris@10: V TS, TT, T3V, T2V, T2W, T3W; Chris@10: TS = LD(&(ri[WS(is, 6)]), ivs, &(ri[0])); Chris@10: TT = LD(&(ri[WS(is, 38)]), ivs, &(ri[0])); Chris@10: T3V = VSUB(TS, TT); Chris@10: T2V = LD(&(ii[WS(is, 6)]), ivs, &(ii[0])); Chris@10: T2W = LD(&(ii[WS(is, 38)]), ivs, &(ii[0])); Chris@10: T3W = VSUB(T2V, T2W); Chris@10: TU = VADD(TS, TT); Chris@10: T43 = VSUB(T3W, T3V); Chris@10: T2X = VADD(T2V, T2W); Chris@10: T3X = VADD(T3V, T3W); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TR, TY, Tax, Tay; Chris@10: TR = VADD(TN, TQ); Chris@10: TY = VADD(TU, TX); Chris@10: TZ = VADD(TR, TY); Chris@10: Tdf = VSUB(TR, TY); Chris@10: Tax = VSUB(T2Q, T2T); Chris@10: Tay = VSUB(TX, TU); Chris@10: Taz = VSUB(Tax, Tay); Chris@10: Tch = VADD(Tay, Tax); Chris@10: } Chris@10: { Chris@10: V TaA, TaB, T2U, T31; Chris@10: TaA = VSUB(TN, TQ); Chris@10: TaB = VSUB(T2X, T30); Chris@10: TaC = VSUB(TaA, TaB); Chris@10: Tci = VADD(TaA, TaB); Chris@10: T2U = VADD(T2Q, T2T); Chris@10: T31 = VADD(T2X, T30); Chris@10: T32 = VADD(T2U, T31); Chris@10: Tdg = VSUB(T2U, T31); Chris@10: } Chris@10: { Chris@10: V T3R, T3Y, T7T, T7U; Chris@10: T3R = VADD(T3P, T3Q); Chris@10: T3Y = VMUL(LDK(KP707106781), VSUB(T3U, T3X)); Chris@10: T3Z = VSUB(T3R, T3Y); Chris@10: T6J = VADD(T3R, T3Y); Chris@10: T7T = VADD(T40, T41); Chris@10: T7U = VMUL(LDK(KP707106781), VADD(T3X, T3U)); Chris@10: T7V = VSUB(T7T, T7U); Chris@10: T9n = VADD(T7T, T7U); Chris@10: } Chris@10: { Chris@10: V T7W, T7X, T42, T45; Chris@10: T7W = VSUB(T3Q, T3P); Chris@10: T7X = VMUL(LDK(KP707106781), VADD(T43, T44)); Chris@10: T7Y = VSUB(T7W, T7X); Chris@10: T9o = VADD(T7W, T7X); Chris@10: T42 = VSUB(T40, T41); Chris@10: T45 = VMUL(LDK(KP707106781), VSUB(T43, T44)); Chris@10: T46 = VSUB(T42, T45); Chris@10: T6K = VADD(T42, T45); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T14, T4P, T4d, TaG, T17, T4a, T4S, TaH, T1e, TaZ, T4j, T4V, T1b, TaY, T4o; Chris@10: V T4U; Chris@10: { Chris@10: V T12, T13, T4Q, T4R; Chris@10: T12 = LD(&(ri[WS(is, 1)]), ivs, &(ri[WS(is, 1)])); Chris@10: T13 = LD(&(ri[WS(is, 33)]), ivs, &(ri[WS(is, 1)])); Chris@10: T14 = VADD(T12, T13); Chris@10: T4P = VSUB(T12, T13); Chris@10: { Chris@10: V T4b, T4c, T15, T16; Chris@10: T4b = LD(&(ii[WS(is, 1)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4c = LD(&(ii[WS(is, 33)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4d = VSUB(T4b, T4c); Chris@10: TaG = VADD(T4b, T4c); Chris@10: T15 = LD(&(ri[WS(is, 17)]), ivs, &(ri[WS(is, 1)])); Chris@10: T16 = LD(&(ri[WS(is, 49)]), ivs, &(ri[WS(is, 1)])); Chris@10: T17 = VADD(T15, T16); Chris@10: T4a = VSUB(T15, T16); Chris@10: } Chris@10: T4Q = LD(&(ii[WS(is, 17)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4R = LD(&(ii[WS(is, 49)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4S = VSUB(T4Q, T4R); Chris@10: TaH = VADD(T4Q, T4R); Chris@10: { Chris@10: V T1c, T1d, T4f, T4g, T4h, T4i; Chris@10: T1c = LD(&(ri[WS(is, 57)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1d = LD(&(ri[WS(is, 25)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4f = VSUB(T1c, T1d); Chris@10: T4g = LD(&(ii[WS(is, 57)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4h = LD(&(ii[WS(is, 25)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4i = VSUB(T4g, T4h); Chris@10: T1e = VADD(T1c, T1d); Chris@10: TaZ = VADD(T4g, T4h); Chris@10: T4j = VSUB(T4f, T4i); Chris@10: T4V = VADD(T4f, T4i); Chris@10: } Chris@10: { Chris@10: V T19, T1a, T4k, T4l, T4m, T4n; Chris@10: T19 = LD(&(ri[WS(is, 9)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1a = LD(&(ri[WS(is, 41)]), ivs, &(ri[WS(is, 1)])); Chris@10: T4k = VSUB(T19, T1a); Chris@10: T4l = LD(&(ii[WS(is, 9)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4m = LD(&(ii[WS(is, 41)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4n = VSUB(T4l, T4m); Chris@10: T1b = VADD(T19, T1a); Chris@10: TaY = VADD(T4l, T4m); Chris@10: T4o = VADD(T4k, T4n); Chris@10: T4U = VSUB(T4n, T4k); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T18, T1f, TaX, Tb0; Chris@10: T18 = VADD(T14, T17); Chris@10: T1f = VADD(T1b, T1e); Chris@10: T1g = VADD(T18, T1f); Chris@10: Tdp = VSUB(T18, T1f); Chris@10: TaX = VSUB(T14, T17); Chris@10: Tb0 = VSUB(TaY, TaZ); Chris@10: Tb1 = VSUB(TaX, Tb0); Chris@10: Tcm = VADD(TaX, Tb0); Chris@10: } Chris@10: { Chris@10: V Tdk, Tdl, T4e, T4p; Chris@10: Tdk = VADD(TaG, TaH); Chris@10: Tdl = VADD(TaY, TaZ); Chris@10: Tdm = VSUB(Tdk, Tdl); Chris@10: Tej = VADD(Tdk, Tdl); Chris@10: T4e = VADD(T4a, T4d); Chris@10: T4p = VMUL(LDK(KP707106781), VSUB(T4j, T4o)); Chris@10: T4q = VSUB(T4e, T4p); Chris@10: T6R = VADD(T4e, T4p); Chris@10: } Chris@10: { Chris@10: V T4T, T4W, T8d, T8e; Chris@10: T4T = VSUB(T4P, T4S); Chris@10: T4W = VMUL(LDK(KP707106781), VSUB(T4U, T4V)); Chris@10: T4X = VSUB(T4T, T4W); Chris@10: T6O = VADD(T4T, T4W); Chris@10: T8d = VADD(T4P, T4S); Chris@10: T8e = VMUL(LDK(KP707106781), VADD(T4o, T4j)); Chris@10: T8f = VSUB(T8d, T8e); Chris@10: T9s = VADD(T8d, T8e); Chris@10: } Chris@10: { Chris@10: V TaI, TaJ, T82, T83; Chris@10: TaI = VSUB(TaG, TaH); Chris@10: TaJ = VSUB(T1e, T1b); Chris@10: TaK = VSUB(TaI, TaJ); Chris@10: Tcp = VADD(TaJ, TaI); Chris@10: T82 = VSUB(T4d, T4a); Chris@10: T83 = VMUL(LDK(KP707106781), VADD(T4U, T4V)); Chris@10: T84 = VSUB(T82, T83); Chris@10: T9v = VADD(T82, T83); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1j, TaR, T1m, TaS, T4G, T4L, TaT, TaQ, T89, T88, T1q, TaM, T1t, TaN, T4v; Chris@10: V T4A, TaO, TaL, T86, T85; Chris@10: { Chris@10: V T4H, T4F, T4C, T4K; Chris@10: { Chris@10: V T1h, T1i, T4D, T4E; Chris@10: T1h = LD(&(ri[WS(is, 5)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1i = LD(&(ri[WS(is, 37)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1j = VADD(T1h, T1i); Chris@10: T4H = VSUB(T1h, T1i); Chris@10: T4D = LD(&(ii[WS(is, 5)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4E = LD(&(ii[WS(is, 37)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4F = VSUB(T4D, T4E); Chris@10: TaR = VADD(T4D, T4E); Chris@10: } Chris@10: { Chris@10: V T1k, T1l, T4I, T4J; Chris@10: T1k = LD(&(ri[WS(is, 21)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1l = LD(&(ri[WS(is, 53)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1m = VADD(T1k, T1l); Chris@10: T4C = VSUB(T1k, T1l); Chris@10: T4I = LD(&(ii[WS(is, 21)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4J = LD(&(ii[WS(is, 53)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4K = VSUB(T4I, T4J); Chris@10: TaS = VADD(T4I, T4J); Chris@10: } Chris@10: T4G = VADD(T4C, T4F); Chris@10: T4L = VSUB(T4H, T4K); Chris@10: TaT = VSUB(TaR, TaS); Chris@10: TaQ = VSUB(T1j, T1m); Chris@10: T89 = VADD(T4H, T4K); Chris@10: T88 = VSUB(T4F, T4C); Chris@10: } Chris@10: { Chris@10: V T4r, T4z, T4w, T4u; Chris@10: { Chris@10: V T1o, T1p, T4x, T4y; Chris@10: T1o = LD(&(ri[WS(is, 61)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1p = LD(&(ri[WS(is, 29)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1q = VADD(T1o, T1p); Chris@10: T4r = VSUB(T1o, T1p); Chris@10: T4x = LD(&(ii[WS(is, 61)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4y = LD(&(ii[WS(is, 29)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4z = VSUB(T4x, T4y); Chris@10: TaM = VADD(T4x, T4y); Chris@10: } Chris@10: { Chris@10: V T1r, T1s, T4s, T4t; Chris@10: T1r = LD(&(ri[WS(is, 13)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1s = LD(&(ri[WS(is, 45)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1t = VADD(T1r, T1s); Chris@10: T4w = VSUB(T1r, T1s); Chris@10: T4s = LD(&(ii[WS(is, 13)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4t = LD(&(ii[WS(is, 45)]), ivs, &(ii[WS(is, 1)])); Chris@10: T4u = VSUB(T4s, T4t); Chris@10: TaN = VADD(T4s, T4t); Chris@10: } Chris@10: T4v = VSUB(T4r, T4u); Chris@10: T4A = VADD(T4w, T4z); Chris@10: TaO = VSUB(TaM, TaN); Chris@10: TaL = VSUB(T1q, T1t); Chris@10: T86 = VSUB(T4z, T4w); Chris@10: T85 = VADD(T4r, T4u); Chris@10: } Chris@10: { Chris@10: V T1n, T1u, Tb2, Tb3; Chris@10: T1n = VADD(T1j, T1m); Chris@10: T1u = VADD(T1q, T1t); Chris@10: T1v = VADD(T1n, T1u); Chris@10: Tdn = VSUB(T1u, T1n); Chris@10: Tb2 = VSUB(TaT, TaQ); Chris@10: Tb3 = VADD(TaL, TaO); Chris@10: Tb4 = VMUL(LDK(KP707106781), VSUB(Tb2, Tb3)); Chris@10: Tcq = VMUL(LDK(KP707106781), VADD(Tb2, Tb3)); Chris@10: } Chris@10: { Chris@10: V Tdq, Tdr, T4B, T4M; Chris@10: Tdq = VADD(TaR, TaS); Chris@10: Tdr = VADD(TaM, TaN); Chris@10: Tds = VSUB(Tdq, Tdr); Chris@10: Tek = VADD(Tdq, Tdr); Chris@10: T4B = VFNMS(LDK(KP923879532), T4A, VMUL(LDK(KP382683432), T4v)); Chris@10: T4M = VFMA(LDK(KP923879532), T4G, VMUL(LDK(KP382683432), T4L)); Chris@10: T4N = VSUB(T4B, T4M); Chris@10: T6P = VADD(T4M, T4B); Chris@10: } Chris@10: { Chris@10: V T4Y, T4Z, T8g, T8h; Chris@10: T4Y = VFNMS(LDK(KP923879532), T4L, VMUL(LDK(KP382683432), T4G)); Chris@10: T4Z = VFMA(LDK(KP382683432), T4A, VMUL(LDK(KP923879532), T4v)); Chris@10: T50 = VSUB(T4Y, T4Z); Chris@10: T6S = VADD(T4Y, T4Z); Chris@10: T8g = VFNMS(LDK(KP382683432), T89, VMUL(LDK(KP923879532), T88)); Chris@10: T8h = VFMA(LDK(KP923879532), T86, VMUL(LDK(KP382683432), T85)); Chris@10: T8i = VSUB(T8g, T8h); Chris@10: T9w = VADD(T8g, T8h); Chris@10: } Chris@10: { Chris@10: V TaP, TaU, T87, T8a; Chris@10: TaP = VSUB(TaL, TaO); Chris@10: TaU = VADD(TaQ, TaT); Chris@10: TaV = VMUL(LDK(KP707106781), VSUB(TaP, TaU)); Chris@10: Tcn = VMUL(LDK(KP707106781), VADD(TaU, TaP)); Chris@10: T87 = VFNMS(LDK(KP382683432), T86, VMUL(LDK(KP923879532), T85)); Chris@10: T8a = VFMA(LDK(KP382683432), T88, VMUL(LDK(KP923879532), T89)); Chris@10: T8b = VSUB(T87, T8a); Chris@10: T9t = VADD(T8a, T87); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T1O, Tbc, T1R, Tbd, T5o, T5t, Tbf, Tbe, T8p, T8o, T1V, Tbi, T1Y, Tbj, T5z; Chris@10: V T5E, Tbk, Tbh, T8s, T8r; Chris@10: { Chris@10: V T5p, T5n, T5k, T5s; Chris@10: { Chris@10: V T1M, T1N, T5l, T5m; Chris@10: T1M = LD(&(ri[WS(is, 3)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1N = LD(&(ri[WS(is, 35)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1O = VADD(T1M, T1N); Chris@10: T5p = VSUB(T1M, T1N); Chris@10: T5l = LD(&(ii[WS(is, 3)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5m = LD(&(ii[WS(is, 35)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5n = VSUB(T5l, T5m); Chris@10: Tbc = VADD(T5l, T5m); Chris@10: } Chris@10: { Chris@10: V T1P, T1Q, T5q, T5r; Chris@10: T1P = LD(&(ri[WS(is, 19)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1Q = LD(&(ri[WS(is, 51)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1R = VADD(T1P, T1Q); Chris@10: T5k = VSUB(T1P, T1Q); Chris@10: T5q = LD(&(ii[WS(is, 19)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5r = LD(&(ii[WS(is, 51)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5s = VSUB(T5q, T5r); Chris@10: Tbd = VADD(T5q, T5r); Chris@10: } Chris@10: T5o = VADD(T5k, T5n); Chris@10: T5t = VSUB(T5p, T5s); Chris@10: Tbf = VSUB(T1O, T1R); Chris@10: Tbe = VSUB(Tbc, Tbd); Chris@10: T8p = VADD(T5p, T5s); Chris@10: T8o = VSUB(T5n, T5k); Chris@10: } Chris@10: { Chris@10: V T5A, T5y, T5v, T5D; Chris@10: { Chris@10: V T1T, T1U, T5w, T5x; Chris@10: T1T = LD(&(ri[WS(is, 59)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1U = LD(&(ri[WS(is, 27)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1V = VADD(T1T, T1U); Chris@10: T5A = VSUB(T1T, T1U); Chris@10: T5w = LD(&(ii[WS(is, 59)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5x = LD(&(ii[WS(is, 27)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5y = VSUB(T5w, T5x); Chris@10: Tbi = VADD(T5w, T5x); Chris@10: } Chris@10: { Chris@10: V T1W, T1X, T5B, T5C; Chris@10: T1W = LD(&(ri[WS(is, 11)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1X = LD(&(ri[WS(is, 43)]), ivs, &(ri[WS(is, 1)])); Chris@10: T1Y = VADD(T1W, T1X); Chris@10: T5v = VSUB(T1W, T1X); Chris@10: T5B = LD(&(ii[WS(is, 11)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5C = LD(&(ii[WS(is, 43)]), ivs, &(ii[WS(is, 1)])); Chris@10: T5D = VSUB(T5B, T5C); Chris@10: Tbj = VADD(T5B, T5C); Chris@10: } Chris@10: T5z = VADD(T5v, T5y); Chris@10: T5E = VSUB(T5A, T5D); Chris@10: Tbk = VSUB(Tbi, Tbj); Chris@10: Tbh = VSUB(T1V, T1Y); Chris@10: T8s = VADD(T5A, T5D); Chris@10: T8r = VSUB(T5y, T5v); Chris@10: } Chris@10: { Chris@10: V T1S, T1Z, Tbt, Tbu; Chris@10: T1S = VADD(T1O, T1R); Chris@10: T1Z = VADD(T1V, T1Y); Chris@10: T20 = VADD(T1S, T1Z); Chris@10: TdD = VSUB(T1Z, T1S); Chris@10: Tbt = VSUB(Tbh, Tbk); Chris@10: Tbu = VADD(Tbf, Tbe); Chris@10: Tbv = VMUL(LDK(KP707106781), VSUB(Tbt, Tbu)); Chris@10: Tcu = VMUL(LDK(KP707106781), VADD(Tbu, Tbt)); Chris@10: } Chris@10: { Chris@10: V Tdw, Tdx, T5u, T5F; Chris@10: Tdw = VADD(Tbc, Tbd); Chris@10: Tdx = VADD(Tbi, Tbj); Chris@10: Tdy = VSUB(Tdw, Tdx); Chris@10: Tep = VADD(Tdw, Tdx); Chris@10: T5u = VFNMS(LDK(KP923879532), T5t, VMUL(LDK(KP382683432), T5o)); Chris@10: T5F = VFMA(LDK(KP382683432), T5z, VMUL(LDK(KP923879532), T5E)); Chris@10: T5G = VSUB(T5u, T5F); Chris@10: T6Z = VADD(T5u, T5F); Chris@10: } Chris@10: { Chris@10: V T5R, T5S, T8z, T8A; Chris@10: T5R = VFNMS(LDK(KP923879532), T5z, VMUL(LDK(KP382683432), T5E)); Chris@10: T5S = VFMA(LDK(KP923879532), T5o, VMUL(LDK(KP382683432), T5t)); Chris@10: T5T = VSUB(T5R, T5S); Chris@10: T6W = VADD(T5S, T5R); Chris@10: T8z = VFNMS(LDK(KP382683432), T8r, VMUL(LDK(KP923879532), T8s)); Chris@10: T8A = VFMA(LDK(KP382683432), T8o, VMUL(LDK(KP923879532), T8p)); Chris@10: T8B = VSUB(T8z, T8A); Chris@10: T9A = VADD(T8A, T8z); Chris@10: } Chris@10: { Chris@10: V Tbg, Tbl, T8q, T8t; Chris@10: Tbg = VSUB(Tbe, Tbf); Chris@10: Tbl = VADD(Tbh, Tbk); Chris@10: Tbm = VMUL(LDK(KP707106781), VSUB(Tbg, Tbl)); Chris@10: Tcx = VMUL(LDK(KP707106781), VADD(Tbg, Tbl)); Chris@10: T8q = VFNMS(LDK(KP382683432), T8p, VMUL(LDK(KP923879532), T8o)); Chris@10: T8t = VFMA(LDK(KP923879532), T8r, VMUL(LDK(KP382683432), T8s)); Chris@10: T8u = VSUB(T8q, T8t); Chris@10: T9D = VADD(T8q, T8t); Chris@10: } Chris@10: } Chris@10: { Chris@10: V TeJ, TeK, TeL, TeM, TeN, TeO, TeP, TeQ, TeR, TeS, TeT, TeU, TeV, TeW, TeX; Chris@10: V TeY, TeZ, Tf0, Tf1, Tf2, Tf3, Tf4, Tf5, Tf6, Tf7, Tf8, Tf9, Tfa, Tfb, Tfc; Chris@10: V Tfd, Tfe, Tff, Tfg, Tfh, Tfi, Tfj, Tfk, Tfl, Tfm, Tfn, Tfo, Tfp, Tfq, Tfr; Chris@10: V Tfs, Tft, Tfu; Chris@10: { Chris@10: V T11, TeD, TeG, TeI, T22, T23, T34, TeH; Chris@10: { Chris@10: V Tv, T10, TeE, TeF; Chris@10: Tv = VADD(Tf, Tu); Chris@10: T10 = VADD(TK, TZ); Chris@10: T11 = VADD(Tv, T10); Chris@10: TeD = VSUB(Tv, T10); Chris@10: TeE = VADD(Tej, Tek); Chris@10: TeF = VADD(Teo, Tep); Chris@10: TeG = VSUB(TeE, TeF); Chris@10: TeI = VADD(TeE, TeF); Chris@10: } Chris@10: { Chris@10: V T1w, T21, T2y, T33; Chris@10: T1w = VADD(T1g, T1v); Chris@10: T21 = VADD(T1L, T20); Chris@10: T22 = VADD(T1w, T21); Chris@10: T23 = VSUB(T21, T1w); Chris@10: T2y = VADD(T2i, T2x); Chris@10: T33 = VADD(T2N, T32); Chris@10: T34 = VSUB(T2y, T33); Chris@10: TeH = VADD(T2y, T33); Chris@10: } Chris@10: TeJ = VSUB(T11, T22); Chris@10: STM4(&(ro[32]), TeJ, ovs, &(ro[0])); Chris@10: TeK = VSUB(TeH, TeI); Chris@10: STM4(&(io[32]), TeK, ovs, &(io[0])); Chris@10: TeL = VADD(T11, T22); Chris@10: STM4(&(ro[0]), TeL, ovs, &(ro[0])); Chris@10: TeM = VADD(TeH, TeI); Chris@10: STM4(&(io[0]), TeM, ovs, &(io[0])); Chris@10: TeN = VADD(T23, T34); Chris@10: STM4(&(io[16]), TeN, ovs, &(io[0])); Chris@10: TeO = VADD(TeD, TeG); Chris@10: STM4(&(ro[16]), TeO, ovs, &(ro[0])); Chris@10: TeP = VSUB(T34, T23); Chris@10: STM4(&(io[48]), TeP, ovs, &(io[0])); Chris@10: TeQ = VSUB(TeD, TeG); Chris@10: STM4(&(ro[48]), TeQ, ovs, &(ro[0])); Chris@10: } Chris@10: { Chris@10: V Teh, Tex, Tev, TeB, Tem, Tey, Ter, Tez; Chris@10: { Chris@10: V Tef, Teg, Tet, Teu; Chris@10: Tef = VSUB(Tf, Tu); Chris@10: Teg = VSUB(T2N, T32); Chris@10: Teh = VADD(Tef, Teg); Chris@10: Tex = VSUB(Tef, Teg); Chris@10: Tet = VSUB(T2i, T2x); Chris@10: Teu = VSUB(TZ, TK); Chris@10: Tev = VSUB(Tet, Teu); Chris@10: TeB = VADD(Teu, Tet); Chris@10: } Chris@10: { Chris@10: V Tei, Tel, Ten, Teq; Chris@10: Tei = VSUB(T1g, T1v); Chris@10: Tel = VSUB(Tej, Tek); Chris@10: Tem = VADD(Tei, Tel); Chris@10: Tey = VSUB(Tel, Tei); Chris@10: Ten = VSUB(T1L, T20); Chris@10: Teq = VSUB(Teo, Tep); Chris@10: Ter = VSUB(Ten, Teq); Chris@10: Tez = VADD(Ten, Teq); Chris@10: } Chris@10: { Chris@10: V Tes, TeC, Tew, TeA; Chris@10: Tes = VMUL(LDK(KP707106781), VADD(Tem, Ter)); Chris@10: TeR = VSUB(Teh, Tes); Chris@10: STM4(&(ro[40]), TeR, ovs, &(ro[0])); Chris@10: TeS = VADD(Teh, Tes); Chris@10: STM4(&(ro[8]), TeS, ovs, &(ro[0])); Chris@10: TeC = VMUL(LDK(KP707106781), VADD(Tey, Tez)); Chris@10: TeT = VSUB(TeB, TeC); Chris@10: STM4(&(io[40]), TeT, ovs, &(io[0])); Chris@10: TeU = VADD(TeB, TeC); Chris@10: STM4(&(io[8]), TeU, ovs, &(io[0])); Chris@10: Tew = VMUL(LDK(KP707106781), VSUB(Ter, Tem)); Chris@10: TeV = VSUB(Tev, Tew); Chris@10: STM4(&(io[56]), TeV, ovs, &(io[0])); Chris@10: TeW = VADD(Tev, Tew); Chris@10: STM4(&(io[24]), TeW, ovs, &(io[0])); Chris@10: TeA = VMUL(LDK(KP707106781), VSUB(Tey, Tez)); Chris@10: TeX = VSUB(Tex, TeA); Chris@10: STM4(&(ro[56]), TeX, ovs, &(ro[0])); Chris@10: TeY = VADD(Tex, TeA); Chris@10: STM4(&(ro[24]), TeY, ovs, &(ro[0])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tdb, TdV, Te5, TdJ, Tdi, Te6, Te3, Teb, TdM, TdW, Tdu, TdQ, Te0, Tea, TdF; Chris@10: V TdR; Chris@10: { Chris@10: V Tde, Tdh, Tdo, Tdt; Chris@10: Tdb = VSUB(Td9, Tda); Chris@10: TdV = VADD(Td9, Tda); Chris@10: Te5 = VADD(TdI, TdH); Chris@10: TdJ = VSUB(TdH, TdI); Chris@10: Tde = VSUB(Tdc, Tdd); Chris@10: Tdh = VADD(Tdf, Tdg); Chris@10: Tdi = VMUL(LDK(KP707106781), VSUB(Tde, Tdh)); Chris@10: Te6 = VMUL(LDK(KP707106781), VADD(Tde, Tdh)); Chris@10: { Chris@10: V Te1, Te2, TdK, TdL; Chris@10: Te1 = VADD(Tdv, Tdy); Chris@10: Te2 = VADD(TdD, TdC); Chris@10: Te3 = VFNMS(LDK(KP382683432), Te2, VMUL(LDK(KP923879532), Te1)); Chris@10: Teb = VFMA(LDK(KP923879532), Te2, VMUL(LDK(KP382683432), Te1)); Chris@10: TdK = VSUB(Tdf, Tdg); Chris@10: TdL = VADD(Tdd, Tdc); Chris@10: TdM = VMUL(LDK(KP707106781), VSUB(TdK, TdL)); Chris@10: TdW = VMUL(LDK(KP707106781), VADD(TdL, TdK)); Chris@10: } Chris@10: Tdo = VSUB(Tdm, Tdn); Chris@10: Tdt = VSUB(Tdp, Tds); Chris@10: Tdu = VFMA(LDK(KP923879532), Tdo, VMUL(LDK(KP382683432), Tdt)); Chris@10: TdQ = VFNMS(LDK(KP923879532), Tdt, VMUL(LDK(KP382683432), Tdo)); Chris@10: { Chris@10: V TdY, TdZ, Tdz, TdE; Chris@10: TdY = VADD(Tdn, Tdm); Chris@10: TdZ = VADD(Tdp, Tds); Chris@10: Te0 = VFMA(LDK(KP382683432), TdY, VMUL(LDK(KP923879532), TdZ)); Chris@10: Tea = VFNMS(LDK(KP382683432), TdZ, VMUL(LDK(KP923879532), TdY)); Chris@10: Tdz = VSUB(Tdv, Tdy); Chris@10: TdE = VSUB(TdC, TdD); Chris@10: TdF = VFNMS(LDK(KP923879532), TdE, VMUL(LDK(KP382683432), Tdz)); Chris@10: TdR = VFMA(LDK(KP382683432), TdE, VMUL(LDK(KP923879532), Tdz)); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tdj, TdG, TdT, TdU; Chris@10: Tdj = VADD(Tdb, Tdi); Chris@10: TdG = VADD(Tdu, TdF); Chris@10: TeZ = VSUB(Tdj, TdG); Chris@10: STM4(&(ro[44]), TeZ, ovs, &(ro[0])); Chris@10: Tf0 = VADD(Tdj, TdG); Chris@10: STM4(&(ro[12]), Tf0, ovs, &(ro[0])); Chris@10: TdT = VADD(TdJ, TdM); Chris@10: TdU = VADD(TdQ, TdR); Chris@10: Tf1 = VSUB(TdT, TdU); Chris@10: STM4(&(io[44]), Tf1, ovs, &(io[0])); Chris@10: Tf2 = VADD(TdT, TdU); Chris@10: STM4(&(io[12]), Tf2, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V TdN, TdO, TdP, TdS; Chris@10: TdN = VSUB(TdJ, TdM); Chris@10: TdO = VSUB(TdF, Tdu); Chris@10: Tf3 = VSUB(TdN, TdO); Chris@10: STM4(&(io[60]), Tf3, ovs, &(io[0])); Chris@10: Tf4 = VADD(TdN, TdO); Chris@10: STM4(&(io[28]), Tf4, ovs, &(io[0])); Chris@10: TdP = VSUB(Tdb, Tdi); Chris@10: TdS = VSUB(TdQ, TdR); Chris@10: Tf5 = VSUB(TdP, TdS); Chris@10: STM4(&(ro[60]), Tf5, ovs, &(ro[0])); Chris@10: Tf6 = VADD(TdP, TdS); Chris@10: STM4(&(ro[28]), Tf6, ovs, &(ro[0])); Chris@10: } Chris@10: { Chris@10: V TdX, Te4, Ted, Tee; Chris@10: TdX = VADD(TdV, TdW); Chris@10: Te4 = VADD(Te0, Te3); Chris@10: Tf7 = VSUB(TdX, Te4); Chris@10: STM4(&(ro[36]), Tf7, ovs, &(ro[0])); Chris@10: Tf8 = VADD(TdX, Te4); Chris@10: STM4(&(ro[4]), Tf8, ovs, &(ro[0])); Chris@10: Ted = VADD(Te5, Te6); Chris@10: Tee = VADD(Tea, Teb); Chris@10: Tf9 = VSUB(Ted, Tee); Chris@10: STM4(&(io[36]), Tf9, ovs, &(io[0])); Chris@10: Tfa = VADD(Ted, Tee); Chris@10: STM4(&(io[4]), Tfa, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V Te7, Te8, Te9, Tec; Chris@10: Te7 = VSUB(Te5, Te6); Chris@10: Te8 = VSUB(Te3, Te0); Chris@10: Tfb = VSUB(Te7, Te8); Chris@10: STM4(&(io[52]), Tfb, ovs, &(io[0])); Chris@10: Tfc = VADD(Te7, Te8); Chris@10: STM4(&(io[20]), Tfc, ovs, &(io[0])); Chris@10: Te9 = VSUB(TdV, TdW); Chris@10: Tec = VSUB(Tea, Teb); Chris@10: Tfd = VSUB(Te9, Tec); Chris@10: STM4(&(ro[52]), Tfd, ovs, &(ro[0])); Chris@10: Tfe = VADD(Te9, Tec); Chris@10: STM4(&(ro[20]), Tfe, ovs, &(ro[0])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tcd, TcP, TcD, TcZ, Tck, Td0, TcX, Td5, Tcs, TcK, TcG, TcQ, TcU, Td4, Tcz; Chris@10: V TcL, Tcc, TcC; Chris@10: Tcc = VMUL(LDK(KP707106781), VADD(TbD, TbC)); Chris@10: Tcd = VSUB(Tcb, Tcc); Chris@10: TcP = VADD(Tcb, Tcc); Chris@10: TcC = VMUL(LDK(KP707106781), VADD(Tak, Tan)); Chris@10: TcD = VSUB(TcB, TcC); Chris@10: TcZ = VADD(TcB, TcC); Chris@10: { Chris@10: V Tcg, Tcj, TcV, TcW; Chris@10: Tcg = VFNMS(LDK(KP382683432), Tcf, VMUL(LDK(KP923879532), Tce)); Chris@10: Tcj = VFMA(LDK(KP923879532), Tch, VMUL(LDK(KP382683432), Tci)); Chris@10: Tck = VSUB(Tcg, Tcj); Chris@10: Td0 = VADD(Tcg, Tcj); Chris@10: TcV = VADD(Tct, Tcu); Chris@10: TcW = VADD(Tcw, Tcx); Chris@10: TcX = VFNMS(LDK(KP195090322), TcW, VMUL(LDK(KP980785280), TcV)); Chris@10: Td5 = VFMA(LDK(KP195090322), TcV, VMUL(LDK(KP980785280), TcW)); Chris@10: } Chris@10: { Chris@10: V Tco, Tcr, TcE, TcF; Chris@10: Tco = VSUB(Tcm, Tcn); Chris@10: Tcr = VSUB(Tcp, Tcq); Chris@10: Tcs = VFMA(LDK(KP555570233), Tco, VMUL(LDK(KP831469612), Tcr)); Chris@10: TcK = VFNMS(LDK(KP831469612), Tco, VMUL(LDK(KP555570233), Tcr)); Chris@10: TcE = VFNMS(LDK(KP382683432), Tch, VMUL(LDK(KP923879532), Tci)); Chris@10: TcF = VFMA(LDK(KP382683432), Tce, VMUL(LDK(KP923879532), Tcf)); Chris@10: TcG = VSUB(TcE, TcF); Chris@10: TcQ = VADD(TcF, TcE); Chris@10: } Chris@10: { Chris@10: V TcS, TcT, Tcv, Tcy; Chris@10: TcS = VADD(Tcm, Tcn); Chris@10: TcT = VADD(Tcp, Tcq); Chris@10: TcU = VFMA(LDK(KP980785280), TcS, VMUL(LDK(KP195090322), TcT)); Chris@10: Td4 = VFNMS(LDK(KP195090322), TcS, VMUL(LDK(KP980785280), TcT)); Chris@10: Tcv = VSUB(Tct, Tcu); Chris@10: Tcy = VSUB(Tcw, Tcx); Chris@10: Tcz = VFNMS(LDK(KP831469612), Tcy, VMUL(LDK(KP555570233), Tcv)); Chris@10: TcL = VFMA(LDK(KP831469612), Tcv, VMUL(LDK(KP555570233), Tcy)); Chris@10: } Chris@10: { Chris@10: V Tcl, TcA, TcN, TcO; Chris@10: Tcl = VADD(Tcd, Tck); Chris@10: TcA = VADD(Tcs, Tcz); Chris@10: Tff = VSUB(Tcl, TcA); Chris@10: STM4(&(ro[42]), Tff, ovs, &(ro[0])); Chris@10: Tfg = VADD(Tcl, TcA); Chris@10: STM4(&(ro[10]), Tfg, ovs, &(ro[0])); Chris@10: TcN = VADD(TcD, TcG); Chris@10: TcO = VADD(TcK, TcL); Chris@10: Tfh = VSUB(TcN, TcO); Chris@10: STM4(&(io[42]), Tfh, ovs, &(io[0])); Chris@10: Tfi = VADD(TcN, TcO); Chris@10: STM4(&(io[10]), Tfi, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V TcH, TcI, TcJ, TcM; Chris@10: TcH = VSUB(TcD, TcG); Chris@10: TcI = VSUB(Tcz, Tcs); Chris@10: Tfj = VSUB(TcH, TcI); Chris@10: STM4(&(io[58]), Tfj, ovs, &(io[0])); Chris@10: Tfk = VADD(TcH, TcI); Chris@10: STM4(&(io[26]), Tfk, ovs, &(io[0])); Chris@10: TcJ = VSUB(Tcd, Tck); Chris@10: TcM = VSUB(TcK, TcL); Chris@10: Tfl = VSUB(TcJ, TcM); Chris@10: STM4(&(ro[58]), Tfl, ovs, &(ro[0])); Chris@10: Tfm = VADD(TcJ, TcM); Chris@10: STM4(&(ro[26]), Tfm, ovs, &(ro[0])); Chris@10: } Chris@10: { Chris@10: V TcR, TcY, Td7, Td8; Chris@10: TcR = VADD(TcP, TcQ); Chris@10: TcY = VADD(TcU, TcX); Chris@10: Tfn = VSUB(TcR, TcY); Chris@10: STM4(&(ro[34]), Tfn, ovs, &(ro[0])); Chris@10: Tfo = VADD(TcR, TcY); Chris@10: STM4(&(ro[2]), Tfo, ovs, &(ro[0])); Chris@10: Td7 = VADD(TcZ, Td0); Chris@10: Td8 = VADD(Td4, Td5); Chris@10: Tfp = VSUB(Td7, Td8); Chris@10: STM4(&(io[34]), Tfp, ovs, &(io[0])); Chris@10: Tfq = VADD(Td7, Td8); Chris@10: STM4(&(io[2]), Tfq, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V Td1, Td2, Td3, Td6; Chris@10: Td1 = VSUB(TcZ, Td0); Chris@10: Td2 = VSUB(TcX, TcU); Chris@10: Tfr = VSUB(Td1, Td2); Chris@10: STM4(&(io[50]), Tfr, ovs, &(io[0])); Chris@10: Tfs = VADD(Td1, Td2); Chris@10: STM4(&(io[18]), Tfs, ovs, &(io[0])); Chris@10: Td3 = VSUB(TcP, TcQ); Chris@10: Td6 = VSUB(Td4, Td5); Chris@10: Tft = VSUB(Td3, Td6); Chris@10: STM4(&(ro[50]), Tft, ovs, &(ro[0])); Chris@10: Tfu = VADD(Td3, Td6); Chris@10: STM4(&(ro[18]), Tfu, ovs, &(ro[0])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tfv, Tfw, Tfx, Tfy, Tfz, TfA, TfB, TfC, TfD, TfE, TfF, TfG, TfH, TfI, TfJ; Chris@10: V TfK, TfL, TfM, TfN, TfO, TfP, TfQ, TfR, TfS, TfT, TfU, TfV, TfW, TfX, TfY; Chris@10: V TfZ, Tg0; Chris@10: { Chris@10: V Tap, TbR, TbF, Tc1, TaE, Tc2, TbZ, Tc7, Tb6, TbM, TbI, TbS, TbW, Tc6, Tbx; Chris@10: V TbN, Tao, TbE; Chris@10: Tao = VMUL(LDK(KP707106781), VSUB(Tak, Tan)); Chris@10: Tap = VSUB(Tah, Tao); Chris@10: TbR = VADD(Tah, Tao); Chris@10: TbE = VMUL(LDK(KP707106781), VSUB(TbC, TbD)); Chris@10: TbF = VSUB(TbB, TbE); Chris@10: Tc1 = VADD(TbB, TbE); Chris@10: { Chris@10: V Taw, TaD, TbX, TbY; Chris@10: Taw = VFNMS(LDK(KP923879532), Tav, VMUL(LDK(KP382683432), Tas)); Chris@10: TaD = VFMA(LDK(KP382683432), Taz, VMUL(LDK(KP923879532), TaC)); Chris@10: TaE = VSUB(Taw, TaD); Chris@10: Tc2 = VADD(Taw, TaD); Chris@10: TbX = VADD(Tbb, Tbm); Chris@10: TbY = VADD(Tbs, Tbv); Chris@10: TbZ = VFNMS(LDK(KP555570233), TbY, VMUL(LDK(KP831469612), TbX)); Chris@10: Tc7 = VFMA(LDK(KP831469612), TbY, VMUL(LDK(KP555570233), TbX)); Chris@10: } Chris@10: { Chris@10: V TaW, Tb5, TbG, TbH; Chris@10: TaW = VSUB(TaK, TaV); Chris@10: Tb5 = VSUB(Tb1, Tb4); Chris@10: Tb6 = VFMA(LDK(KP980785280), TaW, VMUL(LDK(KP195090322), Tb5)); Chris@10: TbM = VFNMS(LDK(KP980785280), Tb5, VMUL(LDK(KP195090322), TaW)); Chris@10: TbG = VFNMS(LDK(KP923879532), Taz, VMUL(LDK(KP382683432), TaC)); Chris@10: TbH = VFMA(LDK(KP923879532), Tas, VMUL(LDK(KP382683432), Tav)); Chris@10: TbI = VSUB(TbG, TbH); Chris@10: TbS = VADD(TbH, TbG); Chris@10: } Chris@10: { Chris@10: V TbU, TbV, Tbn, Tbw; Chris@10: TbU = VADD(TaK, TaV); Chris@10: TbV = VADD(Tb1, Tb4); Chris@10: TbW = VFMA(LDK(KP555570233), TbU, VMUL(LDK(KP831469612), TbV)); Chris@10: Tc6 = VFNMS(LDK(KP555570233), TbV, VMUL(LDK(KP831469612), TbU)); Chris@10: Tbn = VSUB(Tbb, Tbm); Chris@10: Tbw = VSUB(Tbs, Tbv); Chris@10: Tbx = VFNMS(LDK(KP980785280), Tbw, VMUL(LDK(KP195090322), Tbn)); Chris@10: TbN = VFMA(LDK(KP195090322), Tbw, VMUL(LDK(KP980785280), Tbn)); Chris@10: } Chris@10: { Chris@10: V TaF, Tby, TbP, TbQ; Chris@10: TaF = VADD(Tap, TaE); Chris@10: Tby = VADD(Tb6, Tbx); Chris@10: Tfv = VSUB(TaF, Tby); Chris@10: STM4(&(ro[46]), Tfv, ovs, &(ro[0])); Chris@10: Tfw = VADD(TaF, Tby); Chris@10: STM4(&(ro[14]), Tfw, ovs, &(ro[0])); Chris@10: TbP = VADD(TbF, TbI); Chris@10: TbQ = VADD(TbM, TbN); Chris@10: Tfx = VSUB(TbP, TbQ); Chris@10: STM4(&(io[46]), Tfx, ovs, &(io[0])); Chris@10: Tfy = VADD(TbP, TbQ); Chris@10: STM4(&(io[14]), Tfy, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V TbJ, TbK, TbL, TbO; Chris@10: TbJ = VSUB(TbF, TbI); Chris@10: TbK = VSUB(Tbx, Tb6); Chris@10: Tfz = VSUB(TbJ, TbK); Chris@10: STM4(&(io[62]), Tfz, ovs, &(io[0])); Chris@10: TfA = VADD(TbJ, TbK); Chris@10: STM4(&(io[30]), TfA, ovs, &(io[0])); Chris@10: TbL = VSUB(Tap, TaE); Chris@10: TbO = VSUB(TbM, TbN); Chris@10: TfB = VSUB(TbL, TbO); Chris@10: STM4(&(ro[62]), TfB, ovs, &(ro[0])); Chris@10: TfC = VADD(TbL, TbO); Chris@10: STM4(&(ro[30]), TfC, ovs, &(ro[0])); Chris@10: } Chris@10: { Chris@10: V TbT, Tc0, Tc9, Tca; Chris@10: TbT = VADD(TbR, TbS); Chris@10: Tc0 = VADD(TbW, TbZ); Chris@10: TfD = VSUB(TbT, Tc0); Chris@10: STM4(&(ro[38]), TfD, ovs, &(ro[0])); Chris@10: TfE = VADD(TbT, Tc0); Chris@10: STM4(&(ro[6]), TfE, ovs, &(ro[0])); Chris@10: Tc9 = VADD(Tc1, Tc2); Chris@10: Tca = VADD(Tc6, Tc7); Chris@10: TfF = VSUB(Tc9, Tca); Chris@10: STM4(&(io[38]), TfF, ovs, &(io[0])); Chris@10: TfG = VADD(Tc9, Tca); Chris@10: STM4(&(io[6]), TfG, ovs, &(io[0])); Chris@10: } Chris@10: { Chris@10: V Tc3, Tc4, Tc5, Tc8; Chris@10: Tc3 = VSUB(Tc1, Tc2); Chris@10: Tc4 = VSUB(TbZ, TbW); Chris@10: TfH = VSUB(Tc3, Tc4); Chris@10: STM4(&(io[54]), TfH, ovs, &(io[0])); Chris@10: TfI = VADD(Tc3, Tc4); Chris@10: STM4(&(io[22]), TfI, ovs, &(io[0])); Chris@10: Tc5 = VSUB(TbR, TbS); Chris@10: Tc8 = VSUB(Tc6, Tc7); Chris@10: TfJ = VSUB(Tc5, Tc8); Chris@10: STM4(&(ro[54]), TfJ, ovs, &(ro[0])); Chris@10: TfK = VADD(Tc5, Tc8); Chris@10: STM4(&(ro[22]), TfK, ovs, &(ro[0])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T6F, T7h, T7m, T7w, T7p, T7x, T6M, T7s, T6U, T7c, T75, T7r, T78, T7i, T71; Chris@10: V T7d; Chris@10: { Chris@10: V T6D, T6E, T7k, T7l; Chris@10: T6D = VADD(T37, T3e); Chris@10: T6E = VADD(T65, T64); Chris@10: T6F = VSUB(T6D, T6E); Chris@10: T7h = VADD(T6D, T6E); Chris@10: T7k = VADD(T6O, T6P); Chris@10: T7l = VADD(T6R, T6S); Chris@10: T7m = VFMA(LDK(KP956940335), T7k, VMUL(LDK(KP290284677), T7l)); Chris@10: T7w = VFNMS(LDK(KP290284677), T7k, VMUL(LDK(KP956940335), T7l)); Chris@10: } Chris@10: { Chris@10: V T7n, T7o, T6I, T6L; Chris@10: T7n = VADD(T6V, T6W); Chris@10: T7o = VADD(T6Y, T6Z); Chris@10: T7p = VFNMS(LDK(KP290284677), T7o, VMUL(LDK(KP956940335), T7n)); Chris@10: T7x = VFMA(LDK(KP290284677), T7n, VMUL(LDK(KP956940335), T7o)); Chris@10: T6I = VFNMS(LDK(KP555570233), T6H, VMUL(LDK(KP831469612), T6G)); Chris@10: T6L = VFMA(LDK(KP831469612), T6J, VMUL(LDK(KP555570233), T6K)); Chris@10: T6M = VSUB(T6I, T6L); Chris@10: T7s = VADD(T6I, T6L); Chris@10: } Chris@10: { Chris@10: V T6Q, T6T, T73, T74; Chris@10: T6Q = VSUB(T6O, T6P); Chris@10: T6T = VSUB(T6R, T6S); Chris@10: T6U = VFMA(LDK(KP471396736), T6Q, VMUL(LDK(KP881921264), T6T)); Chris@10: T7c = VFNMS(LDK(KP881921264), T6Q, VMUL(LDK(KP471396736), T6T)); Chris@10: T73 = VADD(T5Z, T62); Chris@10: T74 = VADD(T3m, T3t); Chris@10: T75 = VSUB(T73, T74); Chris@10: T7r = VADD(T73, T74); Chris@10: } Chris@10: { Chris@10: V T76, T77, T6X, T70; Chris@10: T76 = VFNMS(LDK(KP555570233), T6J, VMUL(LDK(KP831469612), T6K)); Chris@10: T77 = VFMA(LDK(KP555570233), T6G, VMUL(LDK(KP831469612), T6H)); Chris@10: T78 = VSUB(T76, T77); Chris@10: T7i = VADD(T77, T76); Chris@10: T6X = VSUB(T6V, T6W); Chris@10: T70 = VSUB(T6Y, T6Z); Chris@10: T71 = VFNMS(LDK(KP881921264), T70, VMUL(LDK(KP471396736), T6X)); Chris@10: T7d = VFMA(LDK(KP881921264), T6X, VMUL(LDK(KP471396736), T70)); Chris@10: } Chris@10: { Chris@10: V T6N, T72, T7f, T7g; Chris@10: T6N = VADD(T6F, T6M); Chris@10: T72 = VADD(T6U, T71); Chris@10: TfL = VSUB(T6N, T72); Chris@10: STM4(&(ro[43]), TfL, ovs, &(ro[1])); Chris@10: TfM = VADD(T6N, T72); Chris@10: STM4(&(ro[11]), TfM, ovs, &(ro[1])); Chris@10: T7f = VADD(T75, T78); Chris@10: T7g = VADD(T7c, T7d); Chris@10: TfN = VSUB(T7f, T7g); Chris@10: STM4(&(io[43]), TfN, ovs, &(io[1])); Chris@10: TfO = VADD(T7f, T7g); Chris@10: STM4(&(io[11]), TfO, ovs, &(io[1])); Chris@10: } Chris@10: { Chris@10: V T79, T7a, T7b, T7e; Chris@10: T79 = VSUB(T75, T78); Chris@10: T7a = VSUB(T71, T6U); Chris@10: TfP = VSUB(T79, T7a); Chris@10: STM4(&(io[59]), TfP, ovs, &(io[1])); Chris@10: TfQ = VADD(T79, T7a); Chris@10: STM4(&(io[27]), TfQ, ovs, &(io[1])); Chris@10: T7b = VSUB(T6F, T6M); Chris@10: T7e = VSUB(T7c, T7d); Chris@10: TfR = VSUB(T7b, T7e); Chris@10: STM4(&(ro[59]), TfR, ovs, &(ro[1])); Chris@10: TfS = VADD(T7b, T7e); Chris@10: STM4(&(ro[27]), TfS, ovs, &(ro[1])); Chris@10: } Chris@10: { Chris@10: V T7j, T7q, T7z, T7A; Chris@10: T7j = VADD(T7h, T7i); Chris@10: T7q = VADD(T7m, T7p); Chris@10: TfT = VSUB(T7j, T7q); Chris@10: STM4(&(ro[35]), TfT, ovs, &(ro[1])); Chris@10: TfU = VADD(T7j, T7q); Chris@10: STM4(&(ro[3]), TfU, ovs, &(ro[1])); Chris@10: T7z = VADD(T7r, T7s); Chris@10: T7A = VADD(T7w, T7x); Chris@10: TfV = VSUB(T7z, T7A); Chris@10: STM4(&(io[35]), TfV, ovs, &(io[1])); Chris@10: TfW = VADD(T7z, T7A); Chris@10: STM4(&(io[3]), TfW, ovs, &(io[1])); Chris@10: } Chris@10: { Chris@10: V T7t, T7u, T7v, T7y; Chris@10: T7t = VSUB(T7r, T7s); Chris@10: T7u = VSUB(T7p, T7m); Chris@10: TfX = VSUB(T7t, T7u); Chris@10: STM4(&(io[51]), TfX, ovs, &(io[1])); Chris@10: TfY = VADD(T7t, T7u); Chris@10: STM4(&(io[19]), TfY, ovs, &(io[1])); Chris@10: T7v = VSUB(T7h, T7i); Chris@10: T7y = VSUB(T7w, T7x); Chris@10: TfZ = VSUB(T7v, T7y); Chris@10: STM4(&(ro[51]), TfZ, ovs, &(ro[1])); Chris@10: Tg0 = VADD(T7v, T7y); Chris@10: STM4(&(ro[19]), Tg0, ovs, &(ro[1])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T9j, T9V, Ta0, Taa, Ta3, Tab, T9q, Ta6, T9y, T9Q, T9J, Ta5, T9M, T9W, T9F; Chris@10: V T9R; Chris@10: { Chris@10: V T9h, T9i, T9Y, T9Z; Chris@10: T9h = VADD(T7B, T7C); Chris@10: T9i = VADD(T8J, T8I); Chris@10: T9j = VSUB(T9h, T9i); Chris@10: T9V = VADD(T9h, T9i); Chris@10: T9Y = VADD(T9s, T9t); Chris@10: T9Z = VADD(T9v, T9w); Chris@10: Ta0 = VFMA(LDK(KP995184726), T9Y, VMUL(LDK(KP098017140), T9Z)); Chris@10: Taa = VFNMS(LDK(KP098017140), T9Y, VMUL(LDK(KP995184726), T9Z)); Chris@10: } Chris@10: { Chris@10: V Ta1, Ta2, T9m, T9p; Chris@10: Ta1 = VADD(T9z, T9A); Chris@10: Ta2 = VADD(T9C, T9D); Chris@10: Ta3 = VFNMS(LDK(KP098017140), Ta2, VMUL(LDK(KP995184726), Ta1)); Chris@10: Tab = VFMA(LDK(KP098017140), Ta1, VMUL(LDK(KP995184726), Ta2)); Chris@10: T9m = VFNMS(LDK(KP195090322), T9l, VMUL(LDK(KP980785280), T9k)); Chris@10: T9p = VFMA(LDK(KP195090322), T9n, VMUL(LDK(KP980785280), T9o)); Chris@10: T9q = VSUB(T9m, T9p); Chris@10: Ta6 = VADD(T9m, T9p); Chris@10: } Chris@10: { Chris@10: V T9u, T9x, T9H, T9I; Chris@10: T9u = VSUB(T9s, T9t); Chris@10: T9x = VSUB(T9v, T9w); Chris@10: T9y = VFMA(LDK(KP634393284), T9u, VMUL(LDK(KP773010453), T9x)); Chris@10: T9Q = VFNMS(LDK(KP773010453), T9u, VMUL(LDK(KP634393284), T9x)); Chris@10: T9H = VADD(T8F, T8G); Chris@10: T9I = VADD(T7G, T7J); Chris@10: T9J = VSUB(T9H, T9I); Chris@10: Ta5 = VADD(T9H, T9I); Chris@10: } Chris@10: { Chris@10: V T9K, T9L, T9B, T9E; Chris@10: T9K = VFNMS(LDK(KP195090322), T9o, VMUL(LDK(KP980785280), T9n)); Chris@10: T9L = VFMA(LDK(KP980785280), T9l, VMUL(LDK(KP195090322), T9k)); Chris@10: T9M = VSUB(T9K, T9L); Chris@10: T9W = VADD(T9L, T9K); Chris@10: T9B = VSUB(T9z, T9A); Chris@10: T9E = VSUB(T9C, T9D); Chris@10: T9F = VFNMS(LDK(KP773010453), T9E, VMUL(LDK(KP634393284), T9B)); Chris@10: T9R = VFMA(LDK(KP773010453), T9B, VMUL(LDK(KP634393284), T9E)); Chris@10: } Chris@10: { Chris@10: V T9r, T9G, Tg1, Tg2; Chris@10: T9r = VADD(T9j, T9q); Chris@10: T9G = VADD(T9y, T9F); Chris@10: Tg1 = VSUB(T9r, T9G); Chris@10: STM4(&(ro[41]), Tg1, ovs, &(ro[1])); Chris@10: STN4(&(ro[40]), TeR, Tg1, Tff, TfL, ovs); Chris@10: Tg2 = VADD(T9r, T9G); Chris@10: STM4(&(ro[9]), Tg2, ovs, &(ro[1])); Chris@10: STN4(&(ro[8]), TeS, Tg2, Tfg, TfM, ovs); Chris@10: } Chris@10: { Chris@10: V T9T, T9U, Tg3, Tg4; Chris@10: T9T = VADD(T9J, T9M); Chris@10: T9U = VADD(T9Q, T9R); Chris@10: Tg3 = VSUB(T9T, T9U); Chris@10: STM4(&(io[41]), Tg3, ovs, &(io[1])); Chris@10: STN4(&(io[40]), TeT, Tg3, Tfh, TfN, ovs); Chris@10: Tg4 = VADD(T9T, T9U); Chris@10: STM4(&(io[9]), Tg4, ovs, &(io[1])); Chris@10: STN4(&(io[8]), TeU, Tg4, Tfi, TfO, ovs); Chris@10: } Chris@10: { Chris@10: V T9N, T9O, Tg5, Tg6; Chris@10: T9N = VSUB(T9J, T9M); Chris@10: T9O = VSUB(T9F, T9y); Chris@10: Tg5 = VSUB(T9N, T9O); Chris@10: STM4(&(io[57]), Tg5, ovs, &(io[1])); Chris@10: STN4(&(io[56]), TeV, Tg5, Tfj, TfP, ovs); Chris@10: Tg6 = VADD(T9N, T9O); Chris@10: STM4(&(io[25]), Tg6, ovs, &(io[1])); Chris@10: STN4(&(io[24]), TeW, Tg6, Tfk, TfQ, ovs); Chris@10: } Chris@10: { Chris@10: V T9P, T9S, Tg7, Tg8; Chris@10: T9P = VSUB(T9j, T9q); Chris@10: T9S = VSUB(T9Q, T9R); Chris@10: Tg7 = VSUB(T9P, T9S); Chris@10: STM4(&(ro[57]), Tg7, ovs, &(ro[1])); Chris@10: STN4(&(ro[56]), TeX, Tg7, Tfl, TfR, ovs); Chris@10: Tg8 = VADD(T9P, T9S); Chris@10: STM4(&(ro[25]), Tg8, ovs, &(ro[1])); Chris@10: STN4(&(ro[24]), TeY, Tg8, Tfm, TfS, ovs); Chris@10: } Chris@10: { Chris@10: V T9X, Ta4, Tg9, Tga; Chris@10: T9X = VADD(T9V, T9W); Chris@10: Ta4 = VADD(Ta0, Ta3); Chris@10: Tg9 = VSUB(T9X, Ta4); Chris@10: STM4(&(ro[33]), Tg9, ovs, &(ro[1])); Chris@10: STN4(&(ro[32]), TeJ, Tg9, Tfn, TfT, ovs); Chris@10: Tga = VADD(T9X, Ta4); Chris@10: STM4(&(ro[1]), Tga, ovs, &(ro[1])); Chris@10: STN4(&(ro[0]), TeL, Tga, Tfo, TfU, ovs); Chris@10: } Chris@10: { Chris@10: V Tad, Tae, Tgb, Tgc; Chris@10: Tad = VADD(Ta5, Ta6); Chris@10: Tae = VADD(Taa, Tab); Chris@10: Tgb = VSUB(Tad, Tae); Chris@10: STM4(&(io[33]), Tgb, ovs, &(io[1])); Chris@10: STN4(&(io[32]), TeK, Tgb, Tfp, TfV, ovs); Chris@10: Tgc = VADD(Tad, Tae); Chris@10: STM4(&(io[1]), Tgc, ovs, &(io[1])); Chris@10: STN4(&(io[0]), TeM, Tgc, Tfq, TfW, ovs); Chris@10: } Chris@10: { Chris@10: V Ta7, Ta8, Tgd, Tge; Chris@10: Ta7 = VSUB(Ta5, Ta6); Chris@10: Ta8 = VSUB(Ta3, Ta0); Chris@10: Tgd = VSUB(Ta7, Ta8); Chris@10: STM4(&(io[49]), Tgd, ovs, &(io[1])); Chris@10: STN4(&(io[48]), TeP, Tgd, Tfr, TfX, ovs); Chris@10: Tge = VADD(Ta7, Ta8); Chris@10: STM4(&(io[17]), Tge, ovs, &(io[1])); Chris@10: STN4(&(io[16]), TeN, Tge, Tfs, TfY, ovs); Chris@10: } Chris@10: { Chris@10: V Ta9, Tac, Tgf, Tgg; Chris@10: Ta9 = VSUB(T9V, T9W); Chris@10: Tac = VSUB(Taa, Tab); Chris@10: Tgf = VSUB(Ta9, Tac); Chris@10: STM4(&(ro[49]), Tgf, ovs, &(ro[1])); Chris@10: STN4(&(ro[48]), TeQ, Tgf, Tft, TfZ, ovs); Chris@10: Tgg = VADD(Ta9, Tac); Chris@10: STM4(&(ro[17]), Tgg, ovs, &(ro[1])); Chris@10: STN4(&(ro[16]), TeO, Tgg, Tfu, Tg0, ovs); Chris@10: } Chris@10: } Chris@10: { Chris@10: V Tgh, Tgi, Tgj, Tgk, Tgl, Tgm, Tgn, Tgo, Tgp, Tgq, Tgr, Tgs, Tgt, Tgu, Tgv; Chris@10: V Tgw; Chris@10: { Chris@10: V T3v, T6j, T6o, T6y, T6r, T6z, T48, T6u, T52, T6e, T67, T6t, T6a, T6k, T5V; Chris@10: V T6f; Chris@10: { Chris@10: V T3f, T3u, T6m, T6n; Chris@10: T3f = VSUB(T37, T3e); Chris@10: T3u = VSUB(T3m, T3t); Chris@10: T3v = VSUB(T3f, T3u); Chris@10: T6j = VADD(T3f, T3u); Chris@10: T6m = VADD(T4q, T4N); Chris@10: T6n = VADD(T4X, T50); Chris@10: T6o = VFMA(LDK(KP634393284), T6m, VMUL(LDK(KP773010453), T6n)); Chris@10: T6y = VFNMS(LDK(KP634393284), T6n, VMUL(LDK(KP773010453), T6m)); Chris@10: } Chris@10: { Chris@10: V T6p, T6q, T3O, T47; Chris@10: T6p = VADD(T5j, T5G); Chris@10: T6q = VADD(T5Q, T5T); Chris@10: T6r = VFNMS(LDK(KP634393284), T6q, VMUL(LDK(KP773010453), T6p)); Chris@10: T6z = VFMA(LDK(KP773010453), T6q, VMUL(LDK(KP634393284), T6p)); Chris@10: T3O = VFNMS(LDK(KP980785280), T3N, VMUL(LDK(KP195090322), T3G)); Chris@10: T47 = VFMA(LDK(KP195090322), T3Z, VMUL(LDK(KP980785280), T46)); Chris@10: T48 = VSUB(T3O, T47); Chris@10: T6u = VADD(T3O, T47); Chris@10: } Chris@10: { Chris@10: V T4O, T51, T63, T66; Chris@10: T4O = VSUB(T4q, T4N); Chris@10: T51 = VSUB(T4X, T50); Chris@10: T52 = VFMA(LDK(KP995184726), T4O, VMUL(LDK(KP098017140), T51)); Chris@10: T6e = VFNMS(LDK(KP995184726), T51, VMUL(LDK(KP098017140), T4O)); Chris@10: T63 = VSUB(T5Z, T62); Chris@10: T66 = VSUB(T64, T65); Chris@10: T67 = VSUB(T63, T66); Chris@10: T6t = VADD(T63, T66); Chris@10: } Chris@10: { Chris@10: V T68, T69, T5H, T5U; Chris@10: T68 = VFNMS(LDK(KP980785280), T3Z, VMUL(LDK(KP195090322), T46)); Chris@10: T69 = VFMA(LDK(KP980785280), T3G, VMUL(LDK(KP195090322), T3N)); Chris@10: T6a = VSUB(T68, T69); Chris@10: T6k = VADD(T69, T68); Chris@10: T5H = VSUB(T5j, T5G); Chris@10: T5U = VSUB(T5Q, T5T); Chris@10: T5V = VFNMS(LDK(KP995184726), T5U, VMUL(LDK(KP098017140), T5H)); Chris@10: T6f = VFMA(LDK(KP098017140), T5U, VMUL(LDK(KP995184726), T5H)); Chris@10: } Chris@10: { Chris@10: V T49, T5W, T6h, T6i; Chris@10: T49 = VADD(T3v, T48); Chris@10: T5W = VADD(T52, T5V); Chris@10: Tgh = VSUB(T49, T5W); Chris@10: STM4(&(ro[47]), Tgh, ovs, &(ro[1])); Chris@10: Tgi = VADD(T49, T5W); Chris@10: STM4(&(ro[15]), Tgi, ovs, &(ro[1])); Chris@10: T6h = VADD(T67, T6a); Chris@10: T6i = VADD(T6e, T6f); Chris@10: Tgj = VSUB(T6h, T6i); Chris@10: STM4(&(io[47]), Tgj, ovs, &(io[1])); Chris@10: Tgk = VADD(T6h, T6i); Chris@10: STM4(&(io[15]), Tgk, ovs, &(io[1])); Chris@10: } Chris@10: { Chris@10: V T6b, T6c, T6d, T6g; Chris@10: T6b = VSUB(T67, T6a); Chris@10: T6c = VSUB(T5V, T52); Chris@10: Tgl = VSUB(T6b, T6c); Chris@10: STM4(&(io[63]), Tgl, ovs, &(io[1])); Chris@10: Tgm = VADD(T6b, T6c); Chris@10: STM4(&(io[31]), Tgm, ovs, &(io[1])); Chris@10: T6d = VSUB(T3v, T48); Chris@10: T6g = VSUB(T6e, T6f); Chris@10: Tgn = VSUB(T6d, T6g); Chris@10: STM4(&(ro[63]), Tgn, ovs, &(ro[1])); Chris@10: Tgo = VADD(T6d, T6g); Chris@10: STM4(&(ro[31]), Tgo, ovs, &(ro[1])); Chris@10: } Chris@10: { Chris@10: V T6l, T6s, T6B, T6C; Chris@10: T6l = VADD(T6j, T6k); Chris@10: T6s = VADD(T6o, T6r); Chris@10: Tgp = VSUB(T6l, T6s); Chris@10: STM4(&(ro[39]), Tgp, ovs, &(ro[1])); Chris@10: Tgq = VADD(T6l, T6s); Chris@10: STM4(&(ro[7]), Tgq, ovs, &(ro[1])); Chris@10: T6B = VADD(T6t, T6u); Chris@10: T6C = VADD(T6y, T6z); Chris@10: Tgr = VSUB(T6B, T6C); Chris@10: STM4(&(io[39]), Tgr, ovs, &(io[1])); Chris@10: Tgs = VADD(T6B, T6C); Chris@10: STM4(&(io[7]), Tgs, ovs, &(io[1])); Chris@10: } Chris@10: { Chris@10: V T6v, T6w, T6x, T6A; Chris@10: T6v = VSUB(T6t, T6u); Chris@10: T6w = VSUB(T6r, T6o); Chris@10: Tgt = VSUB(T6v, T6w); Chris@10: STM4(&(io[55]), Tgt, ovs, &(io[1])); Chris@10: Tgu = VADD(T6v, T6w); Chris@10: STM4(&(io[23]), Tgu, ovs, &(io[1])); Chris@10: T6x = VSUB(T6j, T6k); Chris@10: T6A = VSUB(T6y, T6z); Chris@10: Tgv = VSUB(T6x, T6A); Chris@10: STM4(&(ro[55]), Tgv, ovs, &(ro[1])); Chris@10: Tgw = VADD(T6x, T6A); Chris@10: STM4(&(ro[23]), Tgw, ovs, &(ro[1])); Chris@10: } Chris@10: } Chris@10: { Chris@10: V T7L, T8X, T92, T9c, T95, T9d, T80, T98, T8k, T8S, T8L, T97, T8O, T8Y, T8D; Chris@10: V T8T; Chris@10: { Chris@10: V T7D, T7K, T90, T91; Chris@10: T7D = VSUB(T7B, T7C); Chris@10: T7K = VSUB(T7G, T7J); Chris@10: T7L = VSUB(T7D, T7K); Chris@10: T8X = VADD(T7D, T7K); Chris@10: T90 = VADD(T84, T8b); Chris@10: T91 = VADD(T8f, T8i); Chris@10: T92 = VFMA(LDK(KP471396736), T90, VMUL(LDK(KP881921264), T91)); Chris@10: T9c = VFNMS(LDK(KP471396736), T91, VMUL(LDK(KP881921264), T90)); Chris@10: } Chris@10: { Chris@10: V T93, T94, T7S, T7Z; Chris@10: T93 = VADD(T8n, T8u); Chris@10: T94 = VADD(T8y, T8B); Chris@10: T95 = VFNMS(LDK(KP471396736), T94, VMUL(LDK(KP881921264), T93)); Chris@10: T9d = VFMA(LDK(KP881921264), T94, VMUL(LDK(KP471396736), T93)); Chris@10: T7S = VFNMS(LDK(KP831469612), T7R, VMUL(LDK(KP555570233), T7O)); Chris@10: T7Z = VFMA(LDK(KP831469612), T7V, VMUL(LDK(KP555570233), T7Y)); Chris@10: T80 = VSUB(T7S, T7Z); Chris@10: T98 = VADD(T7S, T7Z); Chris@10: } Chris@10: { Chris@10: V T8c, T8j, T8H, T8K; Chris@10: T8c = VSUB(T84, T8b); Chris@10: T8j = VSUB(T8f, T8i); Chris@10: T8k = VFMA(LDK(KP956940335), T8c, VMUL(LDK(KP290284677), T8j)); Chris@10: T8S = VFNMS(LDK(KP956940335), T8j, VMUL(LDK(KP290284677), T8c)); Chris@10: T8H = VSUB(T8F, T8G); Chris@10: T8K = VSUB(T8I, T8J); Chris@10: T8L = VSUB(T8H, T8K); Chris@10: T97 = VADD(T8H, T8K); Chris@10: } Chris@10: { Chris@10: V T8M, T8N, T8v, T8C; Chris@10: T8M = VFNMS(LDK(KP831469612), T7Y, VMUL(LDK(KP555570233), T7V)); Chris@10: T8N = VFMA(LDK(KP555570233), T7R, VMUL(LDK(KP831469612), T7O)); Chris@10: T8O = VSUB(T8M, T8N); Chris@10: T8Y = VADD(T8N, T8M); Chris@10: T8v = VSUB(T8n, T8u); Chris@10: T8C = VSUB(T8y, T8B); Chris@10: T8D = VFNMS(LDK(KP956940335), T8C, VMUL(LDK(KP290284677), T8v)); Chris@10: T8T = VFMA(LDK(KP290284677), T8C, VMUL(LDK(KP956940335), T8v)); Chris@10: } Chris@10: { Chris@10: V T81, T8E, Tgx, Tgy; Chris@10: T81 = VADD(T7L, T80); Chris@10: T8E = VADD(T8k, T8D); Chris@10: Tgx = VSUB(T81, T8E); Chris@10: STM4(&(ro[45]), Tgx, ovs, &(ro[1])); Chris@10: STN4(&(ro[44]), TeZ, Tgx, Tfv, Tgh, ovs); Chris@10: Tgy = VADD(T81, T8E); Chris@10: STM4(&(ro[13]), Tgy, ovs, &(ro[1])); Chris@10: STN4(&(ro[12]), Tf0, Tgy, Tfw, Tgi, ovs); Chris@10: } Chris@10: { Chris@10: V T8V, T8W, Tgz, TgA; Chris@10: T8V = VADD(T8L, T8O); Chris@10: T8W = VADD(T8S, T8T); Chris@10: Tgz = VSUB(T8V, T8W); Chris@10: STM4(&(io[45]), Tgz, ovs, &(io[1])); Chris@10: STN4(&(io[44]), Tf1, Tgz, Tfx, Tgj, ovs); Chris@10: TgA = VADD(T8V, T8W); Chris@10: STM4(&(io[13]), TgA, ovs, &(io[1])); Chris@10: STN4(&(io[12]), Tf2, TgA, Tfy, Tgk, ovs); Chris@10: } Chris@10: { Chris@10: V T8P, T8Q, TgB, TgC; Chris@10: T8P = VSUB(T8L, T8O); Chris@10: T8Q = VSUB(T8D, T8k); Chris@10: TgB = VSUB(T8P, T8Q); Chris@10: STM4(&(io[61]), TgB, ovs, &(io[1])); Chris@10: STN4(&(io[60]), Tf3, TgB, Tfz, Tgl, ovs); Chris@10: TgC = VADD(T8P, T8Q); Chris@10: STM4(&(io[29]), TgC, ovs, &(io[1])); Chris@10: STN4(&(io[28]), Tf4, TgC, TfA, Tgm, ovs); Chris@10: } Chris@10: { Chris@10: V T8R, T8U, TgD, TgE; Chris@10: T8R = VSUB(T7L, T80); Chris@10: T8U = VSUB(T8S, T8T); Chris@10: TgD = VSUB(T8R, T8U); Chris@10: STM4(&(ro[61]), TgD, ovs, &(ro[1])); Chris@10: STN4(&(ro[60]), Tf5, TgD, TfB, Tgn, ovs); Chris@10: TgE = VADD(T8R, T8U); Chris@10: STM4(&(ro[29]), TgE, ovs, &(ro[1])); Chris@10: STN4(&(ro[28]), Tf6, TgE, TfC, Tgo, ovs); Chris@10: } Chris@10: { Chris@10: V T8Z, T96, TgF, TgG; Chris@10: T8Z = VADD(T8X, T8Y); Chris@10: T96 = VADD(T92, T95); Chris@10: TgF = VSUB(T8Z, T96); Chris@10: STM4(&(ro[37]), TgF, ovs, &(ro[1])); Chris@10: STN4(&(ro[36]), Tf7, TgF, TfD, Tgp, ovs); Chris@10: TgG = VADD(T8Z, T96); Chris@10: STM4(&(ro[5]), TgG, ovs, &(ro[1])); Chris@10: STN4(&(ro[4]), Tf8, TgG, TfE, Tgq, ovs); Chris@10: } Chris@10: { Chris@10: V T9f, T9g, TgH, TgI; Chris@10: T9f = VADD(T97, T98); Chris@10: T9g = VADD(T9c, T9d); Chris@10: TgH = VSUB(T9f, T9g); Chris@10: STM4(&(io[37]), TgH, ovs, &(io[1])); Chris@10: STN4(&(io[36]), Tf9, TgH, TfF, Tgr, ovs); Chris@10: TgI = VADD(T9f, T9g); Chris@10: STM4(&(io[5]), TgI, ovs, &(io[1])); Chris@10: STN4(&(io[4]), Tfa, TgI, TfG, Tgs, ovs); Chris@10: } Chris@10: { Chris@10: V T99, T9a, TgJ, TgK; Chris@10: T99 = VSUB(T97, T98); Chris@10: T9a = VSUB(T95, T92); Chris@10: TgJ = VSUB(T99, T9a); Chris@10: STM4(&(io[53]), TgJ, ovs, &(io[1])); Chris@10: STN4(&(io[52]), Tfb, TgJ, TfH, Tgt, ovs); Chris@10: TgK = VADD(T99, T9a); Chris@10: STM4(&(io[21]), TgK, ovs, &(io[1])); Chris@10: STN4(&(io[20]), Tfc, TgK, TfI, Tgu, ovs); Chris@10: } Chris@10: { Chris@10: V T9b, T9e, TgL, TgM; Chris@10: T9b = VSUB(T8X, T8Y); Chris@10: T9e = VSUB(T9c, T9d); Chris@10: TgL = VSUB(T9b, T9e); Chris@10: STM4(&(ro[53]), TgL, ovs, &(ro[1])); Chris@10: STN4(&(ro[52]), Tfd, TgL, TfJ, Tgv, ovs); Chris@10: TgM = VADD(T9b, T9e); Chris@10: STM4(&(ro[21]), TgM, ovs, &(ro[1])); Chris@10: STN4(&(ro[20]), Tfe, TgM, TfK, Tgw, ovs); Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: } Chris@10: VLEAVE(); Chris@10: } Chris@10: Chris@10: static const kdft_desc desc = { 64, XSIMD_STRING("n2sv_64"), {808, 144, 104, 0}, &GENUS, 0, 1, 0, 0 }; Chris@10: Chris@10: void XSIMD(codelet_n2sv_64) (planner *p) { Chris@10: X(kdft_register) (p, n2sv_64, &desc); Chris@10: } Chris@10: Chris@10: #endif /* HAVE_FMA */