d3d11_fmt: allow selecting a format with any alpha value

The decoder may end up using a format with alpha even if it doesn't use it.
This commit is contained in:
Steve Lhomme 2024-03-27 10:49:11 +01:00
parent 6f115bf359
commit dac3cffcd8
2 changed files with 3 additions and 3 deletions

View File

@ -744,7 +744,7 @@ const d3d_format_t *(FindD3D11Format)(vlc_object_t *o,
uint8_t bits_per_channel,
uint8_t widthDenominator,
uint8_t heightDenominator,
uint8_t alpha_bits,
int alpha_bits,
int cpu_gpu,
UINT supportFlags)
{
@ -766,7 +766,7 @@ const d3d_format_t *(FindD3D11Format)(vlc_object_t *o,
continue;
if (heightDenominator && heightDenominator < output_format->heightDenominator)
continue;
if (alpha_bits && output_format->bitsForAlpha < alpha_bits)
if (alpha_bits > 0 && output_format->bitsForAlpha < alpha_bits)
continue;
if (alpha_bits == 0 && output_format->bitsForAlpha != 0)
continue;

View File

@ -197,7 +197,7 @@ const d3d_format_t *FindD3D11Format(vlc_object_t *,
uint8_t bits_per_channel,
uint8_t widthDenominator,
uint8_t heightDenominator,
uint8_t alpha_bits,
int alpha_bits,
int cpu_gpu,
UINT supportFlags);
#define FindD3D11Format(a,b,c,d,e,f,g,h,i,j) \