d@0
|
1 /*
|
d@0
|
2 * Copyright (c) 2003, 2007-8 Matteo Frigo
|
d@0
|
3 * Copyright (c) 2003, 2007-8 Massachusetts Institute of Technology
|
d@0
|
4 *
|
d@0
|
5 * This program is free software; you can redistribute it and/or modify
|
d@0
|
6 * it under the terms of the GNU General Public License as published by
|
d@0
|
7 * the Free Software Foundation; either version 2 of the License, or
|
d@0
|
8 * (at your option) any later version.
|
d@0
|
9 *
|
d@0
|
10 * This program is distributed in the hope that it will be useful,
|
d@0
|
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
d@0
|
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
d@0
|
13 * GNU General Public License for more details.
|
d@0
|
14 *
|
d@0
|
15 * You should have received a copy of the GNU General Public License
|
d@0
|
16 * along with this program; if not, write to the Free Software
|
d@0
|
17 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
d@0
|
18 *
|
d@0
|
19 */
|
d@0
|
20 /* Generated by: ../../genfft/gen_twiddle_c -standalone -fma -reorder-insns -simd -compact -variables 100000 -include fftw-spu.h -trivial-stores -n 8 -name X(spu_t1fv_8) */
|
d@0
|
21
|
d@0
|
22 /*
|
d@0
|
23 * This function contains 33 FP additions, 24 FP multiplications,
|
d@0
|
24 * (or, 23 additions, 14 multiplications, 10 fused multiply/add),
|
d@0
|
25 * 44 stack variables, 1 constants, and 16 memory accesses
|
d@0
|
26 */
|
d@0
|
27 #include "fftw-spu.h"
|
d@0
|
28
|
d@0
|
29 void X(spu_t1fv_8) (R *ri, R *ii, const R *W, stride rs, INT mb, INT me, INT ms) {
|
d@0
|
30 DVK(KP707106781, +0.707106781186547524400844362104849039284835938);
|
d@0
|
31 INT m;
|
d@0
|
32 R *x;
|
d@0
|
33 x = ri;
|
d@0
|
34 for (m = mb, W = W + (mb * ((TWVL / VL) * 14)); m < me; m = m + VL, x = x + (VL * ms), W = W + (TWVL * 14), MAKE_VOLATILE_STRIDE(rs)) {
|
d@0
|
35 V T4, Tu, Tl, Tv, Tx, Ty, Tf, Tm, T1, T3, T2, Ti, Tk, Th, Tj;
|
d@0
|
36 V T6, Td, T8, Tb, T5, Tc, T7, Ta, T9, Te, Tq, Tr, Tw, Tz, To;
|
d@0
|
37 V Tp, Tg, Tn, TE, TF, TC, TD, Ts, Tt, TA, TB;
|
d@0
|
38 T1 = LD(&(x[0]), ms, &(x[0]));
|
d@0
|
39 T2 = LD(&(x[WS(rs, 4)]), ms, &(x[0]));
|
d@0
|
40 T3 = BYTWJ(&(W[TWVL * 6]), T2);
|
d@0
|
41 T4 = VSUB(T1, T3);
|
d@0
|
42 Tu = VADD(T1, T3);
|
d@0
|
43 Th = LD(&(x[WS(rs, 2)]), ms, &(x[0]));
|
d@0
|
44 Ti = BYTWJ(&(W[TWVL * 2]), Th);
|
d@0
|
45 Tj = LD(&(x[WS(rs, 6)]), ms, &(x[0]));
|
d@0
|
46 Tk = BYTWJ(&(W[TWVL * 10]), Tj);
|
d@0
|
47 Tl = VSUB(Ti, Tk);
|
d@0
|
48 Tv = VADD(Ti, Tk);
|
d@0
|
49 T5 = LD(&(x[WS(rs, 1)]), ms, &(x[WS(rs, 1)]));
|
d@0
|
50 T6 = BYTWJ(&(W[0]), T5);
|
d@0
|
51 Tc = LD(&(x[WS(rs, 3)]), ms, &(x[WS(rs, 1)]));
|
d@0
|
52 Td = BYTWJ(&(W[TWVL * 4]), Tc);
|
d@0
|
53 T7 = LD(&(x[WS(rs, 5)]), ms, &(x[WS(rs, 1)]));
|
d@0
|
54 T8 = BYTWJ(&(W[TWVL * 8]), T7);
|
d@0
|
55 Ta = LD(&(x[WS(rs, 7)]), ms, &(x[WS(rs, 1)]));
|
d@0
|
56 Tb = BYTWJ(&(W[TWVL * 12]), Ta);
|
d@0
|
57 Tx = VADD(T6, T8);
|
d@0
|
58 T9 = VSUB(T6, T8);
|
d@0
|
59 Te = VSUB(Tb, Td);
|
d@0
|
60 Ty = VADD(Tb, Td);
|
d@0
|
61 Tf = VADD(T9, Te);
|
d@0
|
62 Tm = VSUB(Te, T9);
|
d@0
|
63 Tg = VFMA(LDK(KP707106781), Tf, T4);
|
d@0
|
64 Tq = VFNMS(LDK(KP707106781), Tf, T4);
|
d@0
|
65 Tr = VFMA(LDK(KP707106781), Tm, Tl);
|
d@0
|
66 Tn = VFNMS(LDK(KP707106781), Tm, Tl);
|
d@0
|
67 To = VFNMSI(Tn, Tg);
|
d@0
|
68 Tp = VFMAI(Tn, Tg);
|
d@0
|
69 ST(&(x[WS(rs, 1)]), To, ms, &(x[WS(rs, 1)]));
|
d@0
|
70 ST(&(x[WS(rs, 7)]), Tp, ms, &(x[WS(rs, 1)]));
|
d@0
|
71 TC = VSUB(Tu, Tv);
|
d@0
|
72 Tw = VADD(Tu, Tv);
|
d@0
|
73 Tz = VADD(Tx, Ty);
|
d@0
|
74 TD = VSUB(Ty, Tx);
|
d@0
|
75 TE = VFNMSI(TD, TC);
|
d@0
|
76 TF = VFMAI(TD, TC);
|
d@0
|
77 ST(&(x[WS(rs, 6)]), TE, ms, &(x[0]));
|
d@0
|
78 ST(&(x[WS(rs, 2)]), TF, ms, &(x[0]));
|
d@0
|
79 Ts = VFNMSI(Tr, Tq);
|
d@0
|
80 Tt = VFMAI(Tr, Tq);
|
d@0
|
81 ST(&(x[WS(rs, 5)]), Ts, ms, &(x[WS(rs, 1)]));
|
d@0
|
82 ST(&(x[WS(rs, 3)]), Tt, ms, &(x[WS(rs, 1)]));
|
d@0
|
83 TA = VSUB(Tw, Tz);
|
d@0
|
84 TB = VADD(Tw, Tz);
|
d@0
|
85 ST(&(x[WS(rs, 4)]), TA, ms, &(x[0]));
|
d@0
|
86 ST(&(x[0]), TB, ms, &(x[0]));
|
d@0
|
87 }
|
d@0
|
88 }
|