2001-04-10 02:29:38 +00:00
|
|
|
// Generic alpha renderers for all YUV modes and RGB depths.
|
2001-11-11 22:14:13 +00:00
|
|
|
// Optimized by Nick and Michael
|
2001-11-26 21:12:15 +00:00
|
|
|
// Code from Michael Niedermayer (michaelni@gmx.at) is under GPL
|
2001-04-10 02:29:38 +00:00
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
#undef PREFETCH
|
|
|
|
#undef EMMS
|
|
|
|
#undef PREFETCHW
|
|
|
|
#undef PAVGB
|
2001-06-02 16:02:38 +00:00
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
#ifdef HAVE_3DNOW
|
|
|
|
#define PREFETCH "prefetch"
|
|
|
|
#define PREFETCHW "prefetchw"
|
|
|
|
#define PAVGB "pavgusb"
|
|
|
|
#elif defined ( HAVE_MMX2 )
|
|
|
|
#define PREFETCH "prefetchnta"
|
|
|
|
#define PREFETCHW "prefetcht0"
|
|
|
|
#define PAVGB "pavgb"
|
|
|
|
#else
|
|
|
|
#define PREFETCH "/nop"
|
|
|
|
#define PREFETCHW "/nop"
|
|
|
|
#endif
|
2001-11-11 22:14:13 +00:00
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
#ifdef HAVE_3DNOW
|
|
|
|
/* On K6 femms is faster of emms. On K7 femms is directly mapped on emms. */
|
|
|
|
#define EMMS "femms"
|
|
|
|
#else
|
|
|
|
#define EMMS "emms"
|
2001-11-11 22:14:13 +00:00
|
|
|
#endif
|
2001-04-24 20:03:13 +00:00
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
static inline void RENAME(vo_draw_alpha_yv12)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
|
2001-04-10 02:29:38 +00:00
|
|
|
int y;
|
2001-11-11 22:14:13 +00:00
|
|
|
#if defined(FAST_OSD) && !defined(HAVE_MMX)
|
2001-06-02 16:02:38 +00:00
|
|
|
w=w>>1;
|
|
|
|
#endif
|
2001-11-11 22:14:13 +00:00
|
|
|
PROFILE_START();
|
2001-04-10 02:29:38 +00:00
|
|
|
for(y=0;y<h;y++){
|
|
|
|
register int x;
|
2001-11-11 22:14:13 +00:00
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" %0\n\t"
|
|
|
|
PREFETCH" %1\n\t"
|
|
|
|
PREFETCH" %2\n\t"
|
|
|
|
// "pxor %%mm7, %%mm7\n\t"
|
|
|
|
"pcmpeqb %%mm5, %%mm5\n\t" // F..F
|
|
|
|
"movq %%mm5, %%mm4\n\t"
|
|
|
|
"psllw $8, %%mm5\n\t" //FF00FF00FF00
|
|
|
|
"psrlw $8, %%mm4\n\t" //00FF00FF00FF
|
|
|
|
::"m"(*dstbase),"m"(*srca),"m"(*src):"memory");
|
|
|
|
for(x=0;x<w;x+=8){
|
|
|
|
asm volatile(
|
|
|
|
"movl %1, %%eax\n\t"
|
|
|
|
"orl 4%1, %%eax\n\t"
|
|
|
|
" jz 1f\n\t"
|
|
|
|
PREFETCHW" 32%0\n\t"
|
|
|
|
PREFETCH" 32%1\n\t"
|
|
|
|
PREFETCH" 32%2\n\t"
|
|
|
|
"movq %0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"pand %%mm4, %%mm0\n\t" //0Y0Y0Y0Y
|
|
|
|
"psrlw $8, %%mm1\n\t" //0Y0Y0Y0Y
|
|
|
|
"movq %1, %%mm2\n\t" //srca HGFEDCBA
|
2002-01-19 04:44:49 +00:00
|
|
|
"paddb "MANGLE(bFF)", %%mm2\n\t"
|
2001-11-11 22:14:13 +00:00
|
|
|
"movq %%mm2, %%mm3\n\t"
|
|
|
|
"pand %%mm4, %%mm2\n\t" //0G0E0C0A
|
|
|
|
"psrlw $8, %%mm3\n\t" //0H0F0D0B
|
|
|
|
"pmullw %%mm2, %%mm0\n\t"
|
|
|
|
"pmullw %%mm3, %%mm1\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"pand %%mm5, %%mm1\n\t"
|
|
|
|
"por %%mm1, %%mm0\n\t"
|
|
|
|
"paddb %2, %%mm0\n\t"
|
|
|
|
"movq %%mm0, %0\n\t"
|
|
|
|
"1:\n\t"
|
|
|
|
:: "m" (dstbase[x]), "m" (srca[x]), "m" (src[x])
|
|
|
|
: "%eax");
|
|
|
|
}
|
|
|
|
#else
|
2001-04-10 02:29:38 +00:00
|
|
|
for(x=0;x<w;x++){
|
2001-06-02 16:02:38 +00:00
|
|
|
#ifdef FAST_OSD
|
|
|
|
if(srca[2*x+0]) dstbase[2*x+0]=src[2*x+0];
|
|
|
|
if(srca[2*x+1]) dstbase[2*x+1]=src[2*x+1];
|
|
|
|
#else
|
2001-04-10 02:29:38 +00:00
|
|
|
if(srca[x]) dstbase[x]=((dstbase[x]*srca[x])>>8)+src[x];
|
2001-06-02 16:02:38 +00:00
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
}
|
2001-11-11 22:14:13 +00:00
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
src+=srcstride;
|
|
|
|
srca+=srcstride;
|
|
|
|
dstbase+=dststride;
|
|
|
|
}
|
2001-11-11 22:14:13 +00:00
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(EMMS:::"memory");
|
|
|
|
#endif
|
|
|
|
PROFILE_END("vo_draw_alpha_yv12");
|
2001-04-10 02:29:38 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
static inline void RENAME(vo_draw_alpha_yuy2)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
|
2001-04-10 02:29:38 +00:00
|
|
|
int y;
|
2001-11-11 22:14:13 +00:00
|
|
|
#if defined(FAST_OSD) && !defined(HAVE_MMX)
|
2001-06-02 16:02:38 +00:00
|
|
|
w=w>>1;
|
|
|
|
#endif
|
2001-11-11 22:14:13 +00:00
|
|
|
PROFILE_START();
|
2001-04-10 02:29:38 +00:00
|
|
|
for(y=0;y<h;y++){
|
|
|
|
register int x;
|
2001-11-11 22:14:13 +00:00
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" %0\n\t"
|
|
|
|
PREFETCH" %1\n\t"
|
|
|
|
PREFETCH" %2\n\t"
|
|
|
|
"pxor %%mm7, %%mm7\n\t"
|
|
|
|
"pcmpeqb %%mm5, %%mm5\n\t" // F..F
|
|
|
|
"movq %%mm5, %%mm4\n\t"
|
|
|
|
"psllw $8, %%mm5\n\t" //FF00FF00FF00
|
|
|
|
"psrlw $8, %%mm4\n\t" //00FF00FF00FF
|
|
|
|
::"m"(*dstbase),"m"(*srca),"m"(*src));
|
|
|
|
for(x=0;x<w;x+=4){
|
|
|
|
asm volatile(
|
|
|
|
"movl %1, %%eax\n\t"
|
|
|
|
"orl %%eax, %%eax\n\t"
|
|
|
|
" jz 1f\n\t"
|
|
|
|
PREFETCHW" 32%0\n\t"
|
|
|
|
PREFETCH" 32%1\n\t"
|
|
|
|
PREFETCH" 32%2\n\t"
|
|
|
|
"movq %0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"pand %%mm4, %%mm0\n\t" //0Y0Y0Y0Y
|
|
|
|
"movd %%eax, %%mm2\n\t" //srca 0000DCBA
|
2002-01-19 04:44:49 +00:00
|
|
|
"paddb "MANGLE(bFF)", %%mm2\n\t"
|
2001-11-11 22:14:13 +00:00
|
|
|
"punpcklbw %%mm7, %%mm2\n\t" //srca 0D0C0B0A
|
|
|
|
"pmullw %%mm2, %%mm0\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"pand %%mm5, %%mm1\n\t" //U0V0U0V0
|
|
|
|
"movd %2, %%mm2\n\t" //src 0000DCBA
|
|
|
|
"punpcklbw %%mm7, %%mm2\n\t" //srca 0D0C0B0A
|
|
|
|
"por %%mm1, %%mm0\n\t"
|
|
|
|
"paddb %%mm2, %%mm0\n\t"
|
|
|
|
"movq %%mm0, %0\n\t"
|
|
|
|
"1:\n\t"
|
|
|
|
:: "m" (dstbase[x*2]), "m" (srca[x]), "m" (src[x])
|
|
|
|
: "%eax");
|
|
|
|
}
|
|
|
|
#else
|
2001-04-10 02:29:38 +00:00
|
|
|
for(x=0;x<w;x++){
|
2001-06-02 16:02:38 +00:00
|
|
|
#ifdef FAST_OSD
|
|
|
|
if(srca[2*x+0]) dstbase[4*x+0]=src[2*x+0];
|
|
|
|
if(srca[2*x+1]) dstbase[4*x+2]=src[2*x+1];
|
|
|
|
#else
|
2001-12-10 13:14:28 +00:00
|
|
|
if(srca[x]) {
|
|
|
|
dstbase[2*x]=((dstbase[2*x]*srca[x])>>8)+src[x];
|
|
|
|
dstbase[2*x+1]=((((signed)dstbase[2*x+1]-128)*srca[x])>>8)+128;
|
|
|
|
}
|
2001-06-02 16:02:38 +00:00
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
}
|
2001-11-11 22:14:13 +00:00
|
|
|
#endif
|
|
|
|
src+=srcstride;
|
2001-04-10 02:29:38 +00:00
|
|
|
srca+=srcstride;
|
|
|
|
dstbase+=dststride;
|
|
|
|
}
|
2001-11-11 22:14:13 +00:00
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(EMMS:::"memory");
|
|
|
|
#endif
|
|
|
|
PROFILE_END("vo_draw_alpha_yuy2");
|
2001-04-10 02:29:38 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
static inline void RENAME(vo_draw_alpha_rgb24)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
|
2001-04-10 02:29:38 +00:00
|
|
|
int y;
|
|
|
|
for(y=0;y<h;y++){
|
|
|
|
register unsigned char *dst = dstbase;
|
|
|
|
register int x;
|
2001-11-11 16:09:19 +00:00
|
|
|
#ifdef ARCH_X86
|
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" %0\n\t"
|
|
|
|
PREFETCH" %1\n\t"
|
|
|
|
PREFETCH" %2\n\t"
|
|
|
|
"pxor %%mm7, %%mm7\n\t"
|
|
|
|
"pcmpeqb %%mm6, %%mm6\n\t" // F..F
|
|
|
|
::"m"(*dst),"m"(*srca),"m"(*src):"memory");
|
|
|
|
for(x=0;x<w;x+=2){
|
2001-11-11 17:14:57 +00:00
|
|
|
if(srca[x] || srca[x+1])
|
2001-11-11 16:09:19 +00:00
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" 32%0\n\t"
|
|
|
|
PREFETCH" 32%1\n\t"
|
|
|
|
PREFETCH" 32%2\n\t"
|
|
|
|
"movq %0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"movq %%mm0, %%mm5\n\t"
|
|
|
|
"punpcklbw %%mm7, %%mm0\n\t"
|
|
|
|
"punpckhbw %%mm7, %%mm1\n\t"
|
|
|
|
"movd %1, %%mm2\n\t" // srca ABCD0000
|
|
|
|
"paddb %%mm6, %%mm2\n\t"
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // srca AABBCCDD
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // srca AAAABBBB
|
|
|
|
"movq %%mm2, %%mm3\n\t"
|
|
|
|
"punpcklbw %%mm7, %%mm2\n\t" // srca 0A0A0A0A
|
|
|
|
"punpckhbw %%mm7, %%mm3\n\t" // srca 0B0B0B0B
|
|
|
|
"pmullw %%mm2, %%mm0\n\t"
|
|
|
|
"pmullw %%mm3, %%mm1\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"psrlw $8, %%mm1\n\t"
|
|
|
|
"packuswb %%mm1, %%mm0\n\t"
|
|
|
|
"movd %2, %%mm2 \n\t" // src ABCD0000
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // src AABBCCDD
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // src AAAABBBB
|
|
|
|
"paddb %%mm2, %%mm0\n\t"
|
|
|
|
"pand %4, %%mm5\n\t"
|
|
|
|
"pand %3, %%mm0\n\t"
|
|
|
|
"por %%mm0, %%mm5\n\t"
|
|
|
|
"movq %%mm5, %0\n\t"
|
|
|
|
:: "m" (dst[0]), "m" (srca[x]), "m" (src[x]), "m"(mask24hl), "m"(mask24lh));
|
|
|
|
dst += 6;
|
|
|
|
}
|
|
|
|
#else /* HAVE_MMX */
|
|
|
|
for(x=0;x<w;x++){
|
|
|
|
if(srca[x]){
|
|
|
|
asm volatile(
|
|
|
|
"movzbl (%0), %%ecx\n\t"
|
|
|
|
"movzbl 1(%0), %%eax\n\t"
|
|
|
|
"movzbl 2(%0), %%edx\n\t"
|
|
|
|
|
|
|
|
"imull %1, %%ecx\n\t"
|
|
|
|
"imull %1, %%eax\n\t"
|
|
|
|
"imull %1, %%edx\n\t"
|
|
|
|
|
|
|
|
"addl %2, %%ecx\n\t"
|
|
|
|
"addl %2, %%eax\n\t"
|
|
|
|
"addl %2, %%edx\n\t"
|
|
|
|
|
|
|
|
"movb %%ch, (%0)\n\t"
|
|
|
|
"movb %%ah, 1(%0)\n\t"
|
|
|
|
"movb %%dh, 2(%0)\n\t"
|
|
|
|
|
|
|
|
:
|
|
|
|
:"r" (dst),
|
|
|
|
"r" ((unsigned)srca[x]),
|
|
|
|
"r" (((unsigned)src[x])<<8)
|
|
|
|
:"%eax", "%ecx", "%edx"
|
|
|
|
);
|
|
|
|
}
|
|
|
|
dst += 3;
|
|
|
|
}
|
|
|
|
#endif /* HAVE_MMX */
|
|
|
|
#else /*non x86 arch*/
|
2001-04-10 02:29:38 +00:00
|
|
|
for(x=0;x<w;x++){
|
|
|
|
if(srca[x]){
|
2001-06-02 16:02:38 +00:00
|
|
|
#ifdef FAST_OSD
|
|
|
|
dst[0]=dst[1]=dst[2]=src[x];
|
|
|
|
#else
|
2001-04-10 02:29:38 +00:00
|
|
|
dst[0]=((dst[0]*srca[x])>>8)+src[x];
|
|
|
|
dst[1]=((dst[1]*srca[x])>>8)+src[x];
|
|
|
|
dst[2]=((dst[2]*srca[x])>>8)+src[x];
|
2001-06-02 16:02:38 +00:00
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
}
|
|
|
|
dst+=3; // 24bpp
|
|
|
|
}
|
2001-11-11 16:09:19 +00:00
|
|
|
#endif /* arch_x86 */
|
2001-04-10 02:29:38 +00:00
|
|
|
src+=srcstride;
|
|
|
|
srca+=srcstride;
|
|
|
|
dstbase+=dststride;
|
|
|
|
}
|
2001-11-11 16:09:19 +00:00
|
|
|
#ifdef HAVE_MMX
|
|
|
|
asm volatile(EMMS:::"memory");
|
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2001-11-26 21:12:15 +00:00
|
|
|
static inline void RENAME(vo_draw_alpha_rgb32)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
|
2001-04-10 02:29:38 +00:00
|
|
|
int y;
|
2001-11-11 14:42:10 +00:00
|
|
|
PROFILE_START();
|
2001-04-10 02:29:38 +00:00
|
|
|
for(y=0;y<h;y++){
|
|
|
|
register int x;
|
2001-11-10 18:40:49 +00:00
|
|
|
#ifdef ARCH_X86
|
2001-11-11 14:42:10 +00:00
|
|
|
#ifdef HAVE_MMX
|
2001-11-11 22:14:13 +00:00
|
|
|
#ifdef HAVE_3DNOW
|
2001-11-11 15:35:11 +00:00
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" %0\n\t"
|
|
|
|
PREFETCH" %1\n\t"
|
|
|
|
PREFETCH" %2\n\t"
|
|
|
|
"pxor %%mm7, %%mm7\n\t"
|
|
|
|
"pcmpeqb %%mm6, %%mm6\n\t" // F..F
|
2001-11-11 16:09:19 +00:00
|
|
|
::"m"(*dstbase),"m"(*srca),"m"(*src):"memory");
|
2001-11-11 15:35:11 +00:00
|
|
|
for(x=0;x<w;x+=2){
|
2001-11-11 17:14:57 +00:00
|
|
|
if(srca[x] || srca[x+1])
|
2001-11-10 18:40:49 +00:00
|
|
|
asm volatile(
|
2001-11-11 15:35:11 +00:00
|
|
|
PREFETCHW" 32%0\n\t"
|
|
|
|
PREFETCH" 32%1\n\t"
|
|
|
|
PREFETCH" 32%2\n\t"
|
|
|
|
"movq %0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"punpcklbw %%mm7, %%mm0\n\t"
|
|
|
|
"punpckhbw %%mm7, %%mm1\n\t"
|
|
|
|
"movd %1, %%mm2\n\t" // srca ABCD0000
|
|
|
|
"paddb %%mm6, %%mm2\n\t"
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // srca AABBCCDD
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // srca AAAABBBB
|
|
|
|
"movq %%mm2, %%mm3\n\t"
|
|
|
|
"punpcklbw %%mm7, %%mm2\n\t" // srca 0A0A0A0A
|
|
|
|
"punpckhbw %%mm7, %%mm3\n\t" // srca 0B0B0B0B
|
|
|
|
"pmullw %%mm2, %%mm0\n\t"
|
|
|
|
"pmullw %%mm3, %%mm1\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"psrlw $8, %%mm1\n\t"
|
|
|
|
"packuswb %%mm1, %%mm0\n\t"
|
|
|
|
"movd %2, %%mm2 \n\t" // src ABCD0000
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // src AABBCCDD
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" // src AAAABBBB
|
|
|
|
"paddb %%mm2, %%mm0\n\t"
|
|
|
|
"movq %%mm0, %0\n\t"
|
|
|
|
:: "m" (dstbase[4*x]), "m" (srca[x]), "m" (src[x]));
|
|
|
|
}
|
2001-11-11 22:14:13 +00:00
|
|
|
#else //this is faster for intels crap
|
|
|
|
asm volatile(
|
|
|
|
PREFETCHW" %0\n\t"
|
|
|
|
PREFETCH" %1\n\t"
|
|
|
|
PREFETCH" %2\n\t"
|
|
|
|
"pxor %%mm7, %%mm7\n\t"
|
|
|
|
"pcmpeqb %%mm5, %%mm5\n\t" // F..F
|
|
|
|
"movq %%mm5, %%mm4\n\t"
|
|
|
|
"psllw $8, %%mm5\n\t" //FF00FF00FF00
|
|
|
|
"psrlw $8, %%mm4\n\t" //00FF00FF00FF
|
|
|
|
::"m"(*dstbase),"m"(*srca),"m"(*src):"memory");
|
|
|
|
for(x=0;x<w;x+=4){
|
|
|
|
asm volatile(
|
|
|
|
"movl %1, %%eax\n\t"
|
|
|
|
"orl %%eax, %%eax\n\t"
|
|
|
|
" jz 1f\n\t"
|
|
|
|
PREFETCHW" 32%0\n\t"
|
|
|
|
PREFETCH" 32%1\n\t"
|
|
|
|
PREFETCH" 32%2\n\t"
|
|
|
|
"movq %0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"pand %%mm4, %%mm0\n\t" //0R0B0R0B
|
|
|
|
"psrlw $8, %%mm1\n\t" //0?0G0?0G
|
|
|
|
"movd %%eax, %%mm2\n\t" //srca 0000DCBA
|
2002-01-19 04:44:49 +00:00
|
|
|
"paddb "MANGLE(bFF)", %%mm2\n\t"
|
2001-11-11 22:14:13 +00:00
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" //srca DDCCBBAA
|
|
|
|
"movq %%mm2, %%mm3\n\t"
|
|
|
|
"punpcklbw %%mm7, %%mm2\n\t" //srca 0B0B0A0A
|
|
|
|
"pmullw %%mm2, %%mm0\n\t"
|
|
|
|
"pmullw %%mm2, %%mm1\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"pand %%mm5, %%mm1\n\t"
|
|
|
|
"por %%mm1, %%mm0\n\t"
|
|
|
|
"movd %2, %%mm2 \n\t" //src 0000DCBA
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" //src DDCCBBAA
|
|
|
|
"movq %%mm2, %%mm6\n\t"
|
|
|
|
"punpcklbw %%mm2, %%mm2\n\t" //src BBBBAAAA
|
|
|
|
"paddb %%mm2, %%mm0\n\t"
|
|
|
|
"movq %%mm0, %0\n\t"
|
|
|
|
|
|
|
|
"movq 8%0, %%mm0\n\t" // dstbase
|
|
|
|
"movq %%mm0, %%mm1\n\t"
|
|
|
|
"pand %%mm4, %%mm0\n\t" //0R0B0R0B
|
|
|
|
"psrlw $8, %%mm1\n\t" //0?0G0?0G
|
|
|
|
"punpckhbw %%mm7, %%mm3\n\t" //srca 0D0D0C0C
|
|
|
|
"pmullw %%mm3, %%mm0\n\t"
|
|
|
|
"pmullw %%mm3, %%mm1\n\t"
|
|
|
|
"psrlw $8, %%mm0\n\t"
|
|
|
|
"pand %%mm5, %%mm1\n\t"
|
|
|
|
"por %%mm1, %%mm0\n\t"
|
|
|
|
"punpckhbw %%mm6, %%mm6\n\t" //src DDDDCCCC
|
|
|
|
"paddb %%mm6, %%mm0\n\t"
|
|
|
|
"movq %%mm0, 8%0\n\t"
|
|
|
|
"1:\n\t"
|
|
|
|
:: "m" (dstbase[4*x]), "m" (srca[x]), "m" (src[x])
|
|
|
|
: "%eax");
|
|
|
|
}
|
|
|
|
#endif
|
2001-11-11 16:09:19 +00:00
|
|
|
#else /* HAVE_MMX */
|
2001-11-11 11:18:50 +00:00
|
|
|
for(x=0;x<w;x++){
|
|
|
|
if(srca[x]){
|
|
|
|
asm volatile(
|
|
|
|
"movzbl (%0), %%ecx\n\t"
|
|
|
|
"movzbl 1(%0), %%eax\n\t"
|
|
|
|
"movzbl 2(%0), %%edx\n\t"
|
2001-10-30 22:35:02 +00:00
|
|
|
|
2001-11-11 11:18:50 +00:00
|
|
|
"imull %1, %%ecx\n\t"
|
|
|
|
"imull %1, %%eax\n\t"
|
|
|
|
"imull %1, %%edx\n\t"
|
2001-10-30 22:35:02 +00:00
|
|
|
|
2001-11-11 11:18:50 +00:00
|
|
|
"addl %2, %%ecx\n\t"
|
|
|
|
"addl %2, %%eax\n\t"
|
|
|
|
"addl %2, %%edx\n\t"
|
2001-10-30 22:35:02 +00:00
|
|
|
|
2001-11-11 11:18:50 +00:00
|
|
|
"movb %%ch, (%0)\n\t"
|
|
|
|
"movb %%ah, 1(%0)\n\t"
|
|
|
|
"movb %%dh, 2(%0)\n\t"
|
2001-10-30 22:35:02 +00:00
|
|
|
|
2001-11-11 11:18:50 +00:00
|
|
|
:
|
|
|
|
:"r" (&dstbase[4*x]),
|
|
|
|
"r" ((unsigned)srca[x]),
|
|
|
|
"r" (((unsigned)src[x])<<8)
|
|
|
|
:"%eax", "%ecx", "%edx"
|
2001-10-30 22:35:02 +00:00
|
|
|
);
|
2001-11-11 11:18:50 +00:00
|
|
|
}
|
|
|
|
}
|
2001-11-11 16:09:19 +00:00
|
|
|
#endif /* HAVE_MMX */
|
2001-11-10 18:40:49 +00:00
|
|
|
#else /*non x86 arch*/
|
2001-04-10 02:29:38 +00:00
|
|
|
for(x=0;x<w;x++){
|
|
|
|
if(srca[x]){
|
2001-06-02 16:02:38 +00:00
|
|
|
#ifdef FAST_OSD
|
|
|
|
dstbase[4*x+0]=dstbase[4*x+1]=dstbase[4*x+2]=src[x];
|
|
|
|
#else
|
2001-04-10 02:29:38 +00:00
|
|
|
dstbase[4*x+0]=((dstbase[4*x+0]*srca[x])>>8)+src[x];
|
|
|
|
dstbase[4*x+1]=((dstbase[4*x+1]*srca[x])>>8)+src[x];
|
|
|
|
dstbase[4*x+2]=((dstbase[4*x+2]*srca[x])>>8)+src[x];
|
2001-06-02 16:02:38 +00:00
|
|
|
#endif
|
2001-04-10 02:29:38 +00:00
|
|
|
}
|
|
|
|
}
|
2001-11-10 18:40:49 +00:00
|
|
|
#endif /* arch_x86 */
|
2001-04-10 02:29:38 +00:00
|
|
|
src+=srcstride;
|
|
|
|
srca+=srcstride;
|
|
|
|
dstbase+=dststride;
|
|
|
|
}
|
2001-11-11 14:42:10 +00:00
|
|
|
#ifdef HAVE_MMX
|
2001-11-10 18:40:49 +00:00
|
|
|
asm volatile(EMMS:::"memory");
|
2001-11-11 11:18:50 +00:00
|
|
|
#endif
|
2001-11-11 14:42:10 +00:00
|
|
|
PROFILE_END("vo_draw_alpha_rgb32");
|
2001-04-10 02:29:38 +00:00
|
|
|
return;
|
|
|
|
}
|