21 #ifndef AVCODEC_X86_DCA_H
22 #define AVCODEC_X86_DCA_H
26 #if ARCH_X86_64 && HAVE_SSE2_INLINE
31 # define int8x8_fmul_int32 int8x8_fmul_int32
33 float *dst,
const int8_t *src,
int scale)
37 "cvtsi2ss %2, %%xmm0 \n\t"
38 "mulss %3, %%xmm0 \n\t"
39 "movq (%1), %%xmm1 \n\t"
40 "punpcklbw %%xmm1, %%xmm1 \n\t"
41 "movaps %%xmm1, %%xmm2 \n\t"
42 "punpcklwd %%xmm1, %%xmm1 \n\t"
43 "punpckhwd %%xmm2, %%xmm2 \n\t"
44 "psrad $24, %%xmm1 \n\t"
45 "psrad $24, %%xmm2 \n\t"
46 "shufps $0, %%xmm0, %%xmm0 \n\t"
47 "cvtdq2ps %%xmm1, %%xmm1 \n\t"
48 "cvtdq2ps %%xmm2, %%xmm2 \n\t"
49 "mulps %%xmm0, %%xmm1 \n\t"
50 "mulps %%xmm0, %%xmm2 \n\t"
51 "movaps %%xmm1, 0(%0) \n\t"
52 "movaps %%xmm2, 16(%0) \n\t"
53 ::
"r"(dst),
"r"(src),
"m"(scale),
"m"(inverse16)
memory handling functions
#define DECLARE_ALIGNED(n, t, v)
#define XMM_CLOBBERS_ONLY(...)