vo_opengl: disable alpha by default

This reverts the default behavior introduced in commit 93feffad. Way too
often libavcodec will return RGB data that has an alpha channel as per
pixel format, but actually contains garbage.

On the other hand, this will actually render garbage color values in
e.g. PNG files (for pixels with alpha==0, the color value should be
essentially ignored, which is what the old alpha blend mode did).

This "fixes" #1528, which is probably a decoder bug (or far less likely,
a broken file).
This commit is contained in:
wm4 2015-02-03 21:00:21 +01:00
parent e6664e94a1
commit a33b46194c
2 changed files with 3 additions and 3 deletions

View File

@ -557,7 +557,7 @@ Available video output drivers are:
Sizes must be a power of two, and 512 at most.
``alpha=<blend|yes|no>``
Decides what to do if the input has an alpha component (default: blend).
Decides what to do if the input has an alpha component (default: no).
blend
Blend the frame against a black background.

View File

@ -323,7 +323,7 @@ const struct gl_video_opts gl_video_opts_def = {
.scalers = { "bilinear", "bilinear" },
.scaler_params = {{NAN, NAN}, {NAN, NAN}},
.scaler_radius = {3, 3},
.alpha_mode = 2,
.alpha_mode = 0,
.background = {0, 0, 0, 255},
.gamma = 1.0f,
};
@ -341,7 +341,7 @@ const struct gl_video_opts gl_video_opts_hq_def = {
.dscaler = "mitchell",
.scaler_params = {{NAN, NAN}, {NAN, NAN}},
.scaler_radius = {3, 3},
.alpha_mode = 2,
.alpha_mode = 0,
.background = {0, 0, 0, 255},
.gamma = 1.0f,
};