From e5c2794a7162e485eefd3133af5b98fd31386aeb Mon Sep 17 00:00:00 2001 From: Janne Grunau Date: Sun, 24 Mar 2013 19:44:23 +0100 Subject: [PATCH] x86: consistently use unaligned movs in the unaligned bswap Fixes fate errors in asv1, ffvhuff and huffyuv on x86_32. --- libavcodec/x86/dsputil.asm | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/libavcodec/x86/dsputil.asm b/libavcodec/x86/dsputil.asm index a1aaaf5d40..078f58c8d4 100644 --- a/libavcodec/x86/dsputil.asm +++ b/libavcodec/x86/dsputil.asm @@ -552,8 +552,8 @@ VECTOR_CLIP_INT32 6, 1, 0, 0 %if cpuflag(ssse3) pshufb m0, m2 pshufb m1, m2 - mova [r0 + 0], m0 - mova [r0 + 16], m1 + mov%1 [r0 + 0], m0 + mov%1 [r0 + 16], m1 %else pshuflw m0, m0, 10110001b pshuflw m1, m1, 10110001b @@ -567,8 +567,8 @@ VECTOR_CLIP_INT32 6, 1, 0, 0 psrlw m3, 8 por m2, m0 por m3, m1 - mova [r0 + 0], m2 - mova [r0 + 16], m3 + mov%1 [r0 + 0], m2 + mov%1 [r0 + 16], m3 %endif add r0, 32 add r1, 32 @@ -581,7 +581,7 @@ VECTOR_CLIP_INT32 6, 1, 0, 0 mov%1 m0, [r1] %if cpuflag(ssse3) pshufb m0, m2 - mova [r0], m0 + mov%1 [r0], m0 %else pshuflw m0, m0, 10110001b pshufhw m0, m0, 10110001b @@ -589,7 +589,7 @@ VECTOR_CLIP_INT32 6, 1, 0, 0 psllw m0, 8 psrlw m2, 8 por m2, m0 - mova [r0], m2 + mov%1 [r0], m2 %endif add r1, 16 add r0, 16