56 #define BITS_INV_ACC 5 // 4 or 5 for IEEE 57 #define SHIFT_INV_ROW (16 - BITS_INV_ACC) //11 58 #define SHIFT_INV_COL (1 + BITS_INV_ACC) //6 59 #define RND_INV_ROW (1024 * (6 - BITS_INV_ACC)) 60 #define RND_INV_COL (16 * (BITS_INV_ACC - 3)) 61 #define RND_INV_CORR (RND_INV_COL - 1) 63 #define BITS_FRW_ACC 3 // 2 or 3 for accuracy 64 #define SHIFT_FRW_COL BITS_FRW_ACC 65 #define SHIFT_FRW_ROW (BITS_FRW_ACC + 17) 66 #define RND_FRW_ROW (262144*(BITS_FRW_ACC - 1)) 75 13036,13036,13036,13036,
76 27146,27146,27146,27146,
77 -21746,-21746,-21746,-21746,
78 23170,23170,23170,23170};
151 16384,16384,16384,-16384,
152 21407,8867,8867,-21407,
153 16384,-16384,16384,16384,
154 -8867,21407,-21407,-8867,
155 22725,12873,19266,-22725,
156 19266,4520,-4520,-12873,
157 12873,4520,4520,19266,
158 -22725,19266,-12873,-22725,
160 22725,22725,22725,-22725,
161 29692,12299,12299,-29692,
162 22725,-22725,22725,22725,
163 -12299,29692,-29692,-12299,
164 31521,17855,26722,-31521,
165 26722,6270,-6270,-17855,
166 17855,6270,6270,26722,
167 -31521,26722,-17855,-31521,
169 21407,21407,21407,-21407,
170 27969,11585,11585,-27969,
171 21407,-21407,21407,21407,
172 -11585,27969,-27969,-11585,
173 29692,16819,25172,-29692,
174 25172,5906,-5906,-16819,
175 16819,5906,5906,25172,
176 -29692,25172,-16819,-29692,
178 19266,19266,19266,-19266,
179 25172,10426,10426,-25172,
180 19266,-19266,19266,19266,
181 -10426,25172,-25172,-10426,
182 26722,15137,22654,-26722,
183 22654,5315,-5315,-15137,
184 15137,5315,5315,22654,
185 -26722,22654,-15137,-26722,
193 16384,21407,16384,8867,
194 16384,8867,-16384,-21407,
195 16384,-8867,16384,-21407,
196 -16384,21407,16384,-8867,
197 22725,19266,19266,-4520,
198 12873,4520,-22725,-12873,
199 12873,-22725,4520,-12873,
200 4520,19266,19266,-22725,
202 22725,29692,22725,12299,
203 22725,12299,-22725,-29692,
204 22725,-12299,22725,-29692,
205 -22725,29692,22725,-12299,
206 31521,26722,26722,-6270,
207 17855,6270,-31521,-17855,
208 17855,-31521,6270,-17855,
209 6270,26722,26722,-31521,
211 21407,27969,21407,11585,
212 21407,11585,-21407,-27969,
213 21407,-11585,21407,-27969,
214 -21407,27969,21407,-11585,
215 29692,25172,25172,-5906,
216 16819,5906,-29692,-16819,
217 16819,-29692,5906,-16819,
218 5906,25172,25172,-29692,
220 19266,25172,19266,10426,
221 19266,10426,-19266,-25172,
222 19266,-10426,19266,-25172,
223 -19266,25172,19266,-10426,
224 26722,22654,22654,-5315,
225 15137,5315,-26722,-15137,
226 15137,-26722,5315,-15137,
227 5315,22654,22654,-26722,
237 #define DCT_8_INV_ROW_MMX(A1,A2,A3,A4)\ 238 "movq " #A1 ",%%mm0 \n\t"\ 239 "movq 8+" #A1 ",%%mm1 \n\t"\ 240 "movq %%mm0,%%mm2 \n\t"\ 241 "movq " #A3 ",%%mm3 \n\t"\ 242 "punpcklwd %%mm1,%%mm0 \n\t"\ 243 "movq %%mm0,%%mm5 \n\t"\ 244 "punpckldq %%mm0,%%mm0 \n\t"\ 245 "movq 8+" #A3 ",%%mm4 \n\t"\ 246 "punpckhwd %%mm1,%%mm2 \n\t"\ 247 "pmaddwd %%mm0,%%mm3 \n\t"\ 248 "movq %%mm2,%%mm6 \n\t"\ 249 "movq 32+" #A3 ",%%mm1 \n\t"\ 250 "punpckldq %%mm2,%%mm2 \n\t"\ 251 "pmaddwd %%mm2,%%mm4 \n\t"\ 252 "punpckhdq %%mm5,%%mm5 \n\t"\ 253 "pmaddwd 16+" #A3 ",%%mm0 \n\t"\ 254 "punpckhdq %%mm6,%%mm6 \n\t"\ 255 "movq 40+" #A3 ",%%mm7 \n\t"\ 256 "pmaddwd %%mm5,%%mm1 \n\t"\ 257 "paddd " #A4 ",%%mm3 \n\t"\ 258 "pmaddwd %%mm6,%%mm7 \n\t"\ 259 "pmaddwd 24+" #A3 ",%%mm2 \n\t"\ 260 "paddd %%mm4,%%mm3 \n\t"\ 261 "pmaddwd 48+" #A3 ",%%mm5 \n\t"\ 262 "movq %%mm3,%%mm4 \n\t"\ 263 "pmaddwd 56+" #A3 ",%%mm6 \n\t"\ 264 "paddd %%mm7,%%mm1 \n\t"\ 265 "paddd " #A4 ",%%mm0 \n\t"\ 266 "psubd %%mm1,%%mm3 \n\t"\ 267 "psrad $11,%%mm3 \n\t"\ 268 "paddd %%mm4,%%mm1 \n\t"\ 269 "paddd %%mm2,%%mm0 \n\t"\ 270 "psrad $11,%%mm1 \n\t"\ 271 "paddd %%mm6,%%mm5 \n\t"\ 272 "movq %%mm0,%%mm4 \n\t"\ 273 "paddd %%mm5,%%mm0 \n\t"\ 274 "psubd %%mm5,%%mm4 \n\t"\ 275 "psrad $11,%%mm0 \n\t"\ 276 "psrad $11,%%mm4 \n\t"\ 277 "packssdw %%mm0,%%mm1 \n\t"\ 278 "packssdw %%mm3,%%mm4 \n\t"\ 279 "movq %%mm4,%%mm7 \n\t"\ 280 "psrld $16,%%mm4 \n\t"\ 281 "pslld $16,%%mm7 \n\t"\ 282 "movq %%mm1," #A2 " \n\t"\ 283 "por %%mm4,%%mm7 \n\t"\ 284 "movq %%mm7,8 +" #A2 "\n\t"\ 291 #define DCT_8_INV_ROW_XMM(A1,A2,A3,A4)\ 292 "movq " #A1 ",%%mm0 \n\t"\ 293 "movq 8+" #A1 ",%%mm1 \n\t"\ 294 "movq %%mm0,%%mm2 \n\t"\ 295 "movq " #A3 ",%%mm3 \n\t"\ 296 "pshufw $0x88,%%mm0,%%mm0 \n\t"\ 297 "movq 8+" #A3 ",%%mm4 \n\t"\ 298 "movq %%mm1,%%mm5 \n\t"\ 299 "pmaddwd %%mm0,%%mm3 \n\t"\ 300 "movq 32+" #A3 ",%%mm6 \n\t"\ 301 "pshufw $0x88,%%mm1,%%mm1 \n\t"\ 302 "pmaddwd %%mm1,%%mm4 \n\t"\ 303 "movq 40+" #A3 ",%%mm7 \n\t"\ 304 "pshufw $0xdd,%%mm2,%%mm2 \n\t"\ 305 "pmaddwd %%mm2,%%mm6 \n\t"\ 306 "pshufw $0xdd,%%mm5,%%mm5 \n\t"\ 307 "pmaddwd %%mm5,%%mm7 \n\t"\ 308 "paddd " #A4 ",%%mm3 \n\t"\ 309 "pmaddwd 16+" #A3 ",%%mm0 \n\t"\ 310 "paddd %%mm4,%%mm3 \n\t"\ 311 "pmaddwd 24+" #A3 ",%%mm1 \n\t"\ 312 "movq %%mm3,%%mm4 \n\t"\ 313 "pmaddwd 48+" #A3 ",%%mm2 \n\t"\ 314 "paddd %%mm7,%%mm6 \n\t"\ 315 "pmaddwd 56+" #A3 ",%%mm5 \n\t"\ 316 "paddd %%mm6,%%mm3 \n\t"\ 317 "paddd " #A4 ",%%mm0 \n\t"\ 318 "psrad $11,%%mm3 \n\t"\ 319 "paddd %%mm1,%%mm0 \n\t"\ 320 "psubd %%mm6,%%mm4 \n\t"\ 321 "movq %%mm0,%%mm7 \n\t"\ 322 "paddd %%mm5,%%mm2 \n\t"\ 323 "paddd %%mm2,%%mm0 \n\t"\ 324 "psrad $11,%%mm4 \n\t"\ 325 "psubd %%mm2,%%mm7 \n\t"\ 326 "psrad $11,%%mm0 \n\t"\ 327 "psrad $11,%%mm7 \n\t"\ 328 "packssdw %%mm0,%%mm3 \n\t"\ 329 "packssdw %%mm4,%%mm7 \n\t"\ 330 "movq %%mm3, " #A2 " \n\t"\ 331 "pshufw $0xb1,%%mm7,%%mm7 \n\t"\ 332 "movq %%mm7,8 +" #A2 "\n\t"\ 398 #define DCT_8_INV_COL(A1,A2)\ 399 "movq 2*8(%3),%%mm0\n\t"\ 400 "movq 16*3+" #A1 ",%%mm3\n\t"\ 401 "movq %%mm0,%%mm1 \n\t"\ 402 "movq 16*5+" #A1 ",%%mm5\n\t"\ 403 "pmulhw %%mm3,%%mm0 \n\t"\ 404 "movq (%3),%%mm4\n\t"\ 405 "pmulhw %%mm5,%%mm1 \n\t"\ 406 "movq 16*7+" #A1 ",%%mm7\n\t"\ 407 "movq %%mm4,%%mm2 \n\t"\ 408 "movq 16*1+" #A1 ",%%mm6\n\t"\ 409 "pmulhw %%mm7,%%mm4 \n\t"\ 410 "paddsw %%mm3,%%mm0 \n\t"\ 411 "pmulhw %%mm6,%%mm2 \n\t"\ 412 "paddsw %%mm3,%%mm1 \n\t"\ 413 "psubsw %%mm5,%%mm0 \n\t"\ 414 "movq 3*8(%3),%%mm3\n\t"\ 415 "paddsw %%mm5,%%mm1 \n\t"\ 416 "paddsw %%mm6,%%mm4 \n\t"\ 417 "psubsw %%mm7,%%mm2 \n\t"\ 418 "movq %%mm4,%%mm5 \n\t"\ 419 "movq %%mm2,%%mm6 \n\t"\ 420 "paddsw %%mm1,%%mm5 \n\t"\ 421 "psubsw %%mm0,%%mm6 \n\t"\ 422 "psubsw %%mm1,%%mm4 \n\t"\ 423 "paddsw %%mm0,%%mm2 \n\t"\ 424 "movq 1*8(%3),%%mm7\n\t"\ 425 "movq %%mm4,%%mm1 \n\t"\ 426 "movq %%mm5,3*16 +" #A2 "\n\t"\ 427 "paddsw %%mm2,%%mm1 \n\t"\ 428 "movq %%mm6,5*16 +" #A2 "\n\t"\ 429 "psubsw %%mm2,%%mm4 \n\t"\ 430 "movq 2*16+" #A1 ",%%mm5\n\t"\ 431 "movq %%mm7,%%mm0 \n\t"\ 432 "movq 6*16+" #A1 ",%%mm6\n\t"\ 433 "pmulhw %%mm5,%%mm0 \n\t"\ 434 "pmulhw %%mm6,%%mm7 \n\t"\ 435 "pmulhw %%mm3,%%mm1 \n\t"\ 436 "movq 0*16+" #A1 ",%%mm2\n\t"\ 437 "pmulhw %%mm3,%%mm4 \n\t"\ 438 "psubsw %%mm6,%%mm0 \n\t"\ 439 "movq %%mm2,%%mm3 \n\t"\ 440 "movq 4*16+" #A1 ",%%mm6\n\t"\ 441 "paddsw %%mm5,%%mm7 \n\t"\ 442 "paddsw %%mm6,%%mm2 \n\t"\ 443 "psubsw %%mm6,%%mm3 \n\t"\ 444 "movq %%mm2,%%mm5 \n\t"\ 445 "movq %%mm3,%%mm6 \n\t"\ 446 "psubsw %%mm7,%%mm2 \n\t"\ 447 "paddsw %%mm0,%%mm3 \n\t"\ 448 "paddsw %%mm1,%%mm1 \n\t"\ 449 "paddsw %%mm4,%%mm4 \n\t"\ 450 "paddsw %%mm7,%%mm5 \n\t"\ 451 "psubsw %%mm0,%%mm6 \n\t"\ 452 "movq %%mm3,%%mm7 \n\t"\ 453 "movq %%mm6,%%mm0 \n\t"\ 454 "paddsw %%mm1,%%mm3 \n\t"\ 455 "paddsw %%mm4,%%mm6 \n\t"\ 456 "psraw $6,%%mm3 \n\t"\ 457 "psubsw %%mm1,%%mm7 \n\t"\ 458 "psraw $6,%%mm6 \n\t"\ 459 "psubsw %%mm4,%%mm0 \n\t"\ 460 "movq 3*16+" #A2 ",%%mm1 \n\t"\ 461 "psraw $6,%%mm7 \n\t"\ 462 "movq %%mm5,%%mm4 \n\t"\ 463 "psraw $6,%%mm0 \n\t"\ 464 "movq %%mm3,1*16+" #A2 "\n\t"\ 465 "paddsw %%mm1,%%mm5 \n\t"\ 466 "movq %%mm6,2*16+" #A2 "\n\t"\ 467 "psubsw %%mm1,%%mm4 \n\t"\ 468 "movq 5*16+" #A2 ",%%mm3 \n\t"\ 469 "psraw $6,%%mm5 \n\t"\ 470 "movq %%mm2,%%mm6 \n\t"\ 471 "psraw $6,%%mm4 \n\t"\ 472 "movq %%mm0,5*16+" #A2 "\n\t"\ 473 "paddsw %%mm3,%%mm2 \n\t"\ 474 "movq %%mm7,6*16+" #A2 "\n\t"\ 475 "psubsw %%mm3,%%mm6 \n\t"\ 476 "movq %%mm5,0*16+" #A2 "\n\t"\ 477 "psraw $6,%%mm2 \n\t"\ 478 "movq %%mm4,7*16+" #A2 "\n\t"\ 479 "psraw $6,%%mm6 \n\t"\ 480 "movq %%mm2,3*16+" #A2 "\n\t"\ 481 "movq %%mm6,4*16+" #A2 "\n\t" 495 DCT_8_INV_ROW_MMX(0*16(%0), 0*16(%0), 64*0(%2), 8*0(%1))
496 DCT_8_INV_ROW_MMX(1*16(%0), 1*16(%0), 64*1(%2), 8*1(%1))
497 DCT_8_INV_ROW_MMX(2*16(%0), 2*16(%0), 64*2(%2), 8*2(%1))
498 DCT_8_INV_ROW_MMX(3*16(%0), 3*16(%0), 64*3(%2), 8*3(%1))
499 DCT_8_INV_ROW_MMX(4*16(%0), 4*16(%0), 64*0(%2), 8*4(%1))
500 DCT_8_INV_ROW_MMX(5*16(%0), 5*16(%0), 64*3(%2), 8*5(%1))
501 DCT_8_INV_ROW_MMX(6*16(%0), 6*16(%0), 64*2(%2), 8*6(%1))
502 DCT_8_INV_ROW_MMX(7*16(%0), 7*16(%0), 64*1(%2), 8*7(%1))
505 DCT_8_INV_COL(0(%0), 0(%0))
506 DCT_8_INV_COL(8(%0), 8(%0))
507 :: "
r"(block), "
r"(rounder_0), "
r"(tab_i_04_mmx), "
r"(tg_1_16));
519 DCT_8_INV_ROW_XMM(0*16(%0), 0*16(%0), 64*0(%2), 8*0(%1))
520 DCT_8_INV_ROW_XMM(1*16(%0), 1*16(%0), 64*1(%2), 8*1(%1))
521 DCT_8_INV_ROW_XMM(2*16(%0), 2*16(%0), 64*2(%2), 8*2(%1))
522 DCT_8_INV_ROW_XMM(3*16(%0), 3*16(%0), 64*3(%2), 8*3(%1))
523 DCT_8_INV_ROW_XMM(4*16(%0), 4*16(%0), 64*0(%2), 8*4(%1))
524 DCT_8_INV_ROW_XMM(5*16(%0), 5*16(%0), 64*3(%2), 8*5(%1))
525 DCT_8_INV_ROW_XMM(6*16(%0), 6*16(%0), 64*2(%2), 8*6(%1))
526 DCT_8_INV_ROW_XMM(7*16(%0), 7*16(%0), 64*1(%2), 8*7(%1))
529 DCT_8_INV_COL(0(%0), 0(%0))
530 DCT_8_INV_COL(8(%0), 8(%0))
531 :: "
r"(block), "
r"(rounder_0), "
r"(tab_i_04_xmm), "
r"(tg_1_16));
memory handling functions
#define DECLARE_ALIGNED(n, t, v)
void ff_idct_xvid_mmxext_put(uint8_t *dest, int line_size, int16_t *block)
void ff_put_pixels_clamped_mmx(const int16_t *block, uint8_t *pixels, int line_size)
void ff_add_pixels_clamped_mmx(const int16_t *block, uint8_t *pixels, int line_size)
void ff_idct_xvid_mmx_add(uint8_t *dest, int line_size, int16_t *block)
void ff_idct_xvid_mmx_put(uint8_t *dest, int line_size, int16_t *block)
void ff_idct_xvid_mmxext_add(uint8_t *dest, int line_size, int16_t *block)
header for Xvid IDCT functions
void ff_idct_xvid_mmx(short *block)
void ff_idct_xvid_mmxext(short *block)