lavu/hwcontext_qsv: Add support for AV_PIX_FMT_BGRA.

RGB32(AV_PIX_FMT_BGRA on intel platforms) format may be used as overlay with alpha blending.
So add AV_PIX_FMT_BGRA format support.

One example of alpha blending overlay: ffmpeg -hwaccel qsv -c:v h264_qsv -i BA1_Sony_D.jsv
-filter_complex 'movie=lena-rgba.png,hwupload=extra_hw_frames=16[a];[0:v][a]overlay_qsv=x=10:y=10'
-c:v h264_qsv -y out.mp4

Rename RGB32 to be BGRA to make it clearer as Mark Thompson's suggestion.

V2: Add P010 format support else will introduce HEVC 10bit encoding regression.
    Thanks for LinJie's discovery.

Signed-off-by: Zhong Li <zhong.li@intel.com>
Verified-by: Fu, Linjie <linjie.fu@intel.com>
This commit is contained in:
Zhong Li 2018-05-21 17:58:52 +08:00
parent 7a6d88ee62
commit a5e1cb9e96
2 changed files with 35 additions and 11 deletions

View File

@ -142,7 +142,7 @@ static int pix_fmt_to_mfx_fourcc(int format)
return MFX_FOURCC_NV12;
case AV_PIX_FMT_YUYV422:
return MFX_FOURCC_YUY2;
case AV_PIX_FMT_RGB32:
case AV_PIX_FMT_BGRA:
return MFX_FOURCC_RGB4;
}

View File

@ -100,6 +100,7 @@ static const struct {
uint32_t fourcc;
} supported_pixel_formats[] = {
{ AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
{ AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
{ AV_PIX_FMT_P010, MFX_FOURCC_P010 },
{ AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
};
@ -751,6 +752,37 @@ static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
return ret;
}
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
{
switch (frame->format) {
case AV_PIX_FMT_NV12:
case AV_PIX_FMT_P010:
surface->Data.Y = frame->data[0];
surface->Data.UV = frame->data[1];
break;
case AV_PIX_FMT_YUV420P:
surface->Data.Y = frame->data[0];
surface->Data.U = frame->data[1];
surface->Data.V = frame->data[2];
break;
case AV_PIX_FMT_BGRA:
surface->Data.B = frame->data[0];
surface->Data.G = frame->data[0] + 1;
surface->Data.R = frame->data[0] + 2;
surface->Data.A = frame->data[0] + 3;
break;
default:
return MFX_ERR_UNSUPPORTED;
}
surface->Data.Pitch = frame->linesize[0];
surface->Data.TimeStamp = frame->pts;
return 0;
}
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
const AVFrame *src)
{
@ -796,11 +828,7 @@ static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
}
out.Info = in->Info;
out.Data.PitchLow = dst->linesize[0];
out.Data.Y = dst->data[0];
out.Data.U = dst->data[1];
out.Data.V = dst->data[2];
out.Data.A = dst->data[3];
map_frame_to_surface(dst, &out);
do {
err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
@ -868,11 +896,7 @@ static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
}
in.Info = out->Info;
in.Data.PitchLow = src->linesize[0];
in.Data.Y = src->data[0];
in.Data.U = src->data[1];
in.Data.V = src->data[2];
in.Data.A = src->data[3];
map_frame_to_surface(src, &in);
do {
err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);