fix runtime-cpu-detect bug in swscale

swscale doesnt ever actually do any runtime detection at all when
runtime cpu detection is enabled, it simply passes whatever is passed
to -sws_flags, which could be nothing at all making swscale default to
the C implementation.
This commit is contained in:
Darren Horrocks 2011-04-05 02:34:26 +02:00 committed by Michael Niedermayer
parent 434f248723
commit 30c6fefd01
1 changed files with 7 additions and 0 deletions

View File

@ -62,6 +62,7 @@ untested special converters
#include "rgb2rgb.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/cpu.h"
#include "libavutil/avutil.h"
#include "libavutil/mathematics.h"
#include "libavutil/bswap.h"
@ -1315,6 +1316,12 @@ SwsFunc ff_getSwsFunc(SwsContext *c)
#if CONFIG_RUNTIME_CPUDETECT
int flags = c->flags;
int cpuflags = av_get_cpu_flags();
flags |= (cpuflags & AV_CPU_FLAG_MMX ? SWS_CPU_CAPS_MMX : 0);
flags |= (cpuflags & AV_CPU_FLAG_MMX2 ? SWS_CPU_CAPS_MMX2 : 0);
flags |= (cpuflags & AV_CPU_FLAG_3DNOW ? SWS_CPU_CAPS_3DNOW : 0);
#if ARCH_X86
// ordered per speed fastest first
if (flags & SWS_CPU_CAPS_MMX2) {