libhb: clean up qsv (#6958)

Removed all the unused parts from the previous implementation
and refactored many checks to avoid duplication.
This commit is contained in:
Damiano Galassi 2025-06-12 18:59:56 +02:00 committed by GitHub
parent c3978fd321
commit 4955ef8553
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 332 additions and 1494 deletions

View File

@ -4567,13 +4567,7 @@ static void job_setup(hb_job_t * job, hb_title_t * title)
job->metadata = hb_metadata_copy( title->metadata ); job->metadata = hb_metadata_copy( title->metadata );
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
job->qsv.ctx = NULL; job->qsv_ctx = hb_qsv_context_init();
if (!job->indepth_scan)
{
job->qsv.ctx = hb_qsv_context_init();
}
job->qsv.decode = !!(title->video_decode_support &
HB_DECODE_SUPPORT_QSV);
#endif #endif
} }
@ -4670,6 +4664,11 @@ static void job_clean( hb_job_t * job )
// clean up metadata // clean up metadata
hb_metadata_close( &job->metadata ); hb_metadata_close( &job->metadata );
#if HB_PROJECT_FEATURE_QSV
// cleanup qsv specific data
hb_qsv_context_close(&job->qsv_ctx);
#endif
} }
} }
@ -7021,17 +7020,16 @@ int hb_get_best_pix_fmt(hb_job_t * job)
static int pix_hw_fmt_is_supported(hb_job_t *job, int pix_fmt) static int pix_hw_fmt_is_supported(hb_job_t *job, int pix_fmt)
{ {
if (pix_fmt == AV_PIX_FMT_QSV) if (hb_hwaccel_is_full_hardware_pipeline_enabled(job))
{ {
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_qsv_full_path_is_enabled(job) && hb_qsv_get_memory_type(job) == MFX_IOPATTERN_OUT_VIDEO_MEMORY) if (pix_fmt == AV_PIX_FMT_QSV &&
job->hw_decode & HB_DECODE_SUPPORT_QSV &&
hb_qsv_get_memory_type(job) == MFX_IOPATTERN_OUT_VIDEO_MEMORY)
{ {
return 1; return 1;
} }
#endif #endif
}
else if (hb_hwaccel_is_full_hardware_pipeline_enabled(job))
{
if (pix_fmt == AV_PIX_FMT_CUDA && if (pix_fmt == AV_PIX_FMT_CUDA &&
job->hw_decode & HB_DECODE_SUPPORT_NVDEC) job->hw_decode & HB_DECODE_SUPPORT_NVDEC)
{ {

View File

@ -109,21 +109,21 @@ static int crop_scale_init(hb_filter_object_t * filter, hb_filter_init_t * init)
hb_dict_set_int(avsettings, "w", width); hb_dict_set_int(avsettings, "w", width);
hb_dict_set_int(avsettings, "h", height); hb_dict_set_int(avsettings, "h", height);
hb_dict_set_int(avsettings, "async_depth", init->job->qsv.async_depth); hb_dict_set_int(avsettings, "async_depth", init->job->qsv_ctx->async_depth);
int hw_generation = hb_qsv_hardware_generation(hb_qsv_get_platform(hb_qsv_get_adapter_index())); int hw_generation = hb_qsv_hardware_generation(hb_qsv_get_platform(hb_qsv_get_adapter_index()));
if (init->job->qsv.ctx->vpp_scale_mode) if (init->job->qsv_ctx->vpp_scale_mode)
{ {
hb_dict_set_string(avsettings, "scale_mode", init->job->qsv.ctx->vpp_scale_mode); hb_dict_set_string(avsettings, "scale_mode", init->job->qsv_ctx->vpp_scale_mode);
hb_log("qsv: scaling filter mode %s", init->job->qsv.ctx->vpp_scale_mode); hb_log("qsv: scaling filter mode %s", init->job->qsv_ctx->vpp_scale_mode);
} }
else if (hw_generation >= QSV_G8) else if (hw_generation >= QSV_G8)
{ {
hb_dict_set_string(avsettings, "scale_mode", "compute"); hb_dict_set_string(avsettings, "scale_mode", "compute");
hb_log("qsv: scaling filter mode %s", "compute"); hb_log("qsv: scaling filter mode %s", "compute");
} }
if (init->job->qsv.ctx->vpp_interpolation_method) if (init->job->qsv_ctx->vpp_interpolation_method)
{ {
hb_dict_set_string(avsettings, "method", init->job->qsv.ctx->vpp_interpolation_method); hb_dict_set_string(avsettings, "method", init->job->qsv_ctx->vpp_interpolation_method);
} }
hb_dict_set(avfilter, "vpp_qsv", avsettings); hb_dict_set(avfilter, "vpp_qsv", avsettings);
} }

View File

@ -54,7 +54,6 @@
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
#include "handbrake/qsv_common.h" #include "handbrake/qsv_common.h"
#include "handbrake/qsv_libav.h"
#endif #endif
static void compute_frame_duration( hb_work_private_t *pv ); static void compute_frame_duration( hb_work_private_t *pv );
@ -145,16 +144,6 @@ struct hb_work_private_s
int drop_samples; int drop_samples;
uint64_t downmix_mask; uint64_t downmix_mask;
#if HB_PROJECT_FEATURE_QSV
// QSV-specific settings
struct
{
int decode;
hb_qsv_config config;
const char * codec_name;
} qsv;
#endif
AVFrame * hw_frame; AVFrame * hw_frame;
enum AVPixelFormat hw_pix_fmt; enum AVPixelFormat hw_pix_fmt;
@ -1456,7 +1445,7 @@ int reinit_video_filters(hb_work_private_t * pv)
{ {
settings = hb_dict_init(); settings = hb_dict_init();
#if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ )) #if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ ))
if (hb_qsv_full_path_is_enabled(pv->job)) if (pv->frame->hw_frames_ctx && pv->job->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
hb_dict_set(settings, "w", hb_value_int(orig_width)); hb_dict_set(settings, "w", hb_value_int(orig_width));
hb_dict_set(settings, "h", hb_value_int(orig_height)); hb_dict_set(settings, "h", hb_value_int(orig_height));
@ -1512,7 +1501,7 @@ int reinit_video_filters(hb_work_private_t * pv)
if (pv->title->rotation != HB_ROTATION_0) if (pv->title->rotation != HB_ROTATION_0)
{ {
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_qsv_full_path_is_enabled(pv->job)) if (pv->frame->hw_frames_ctx && pv->job->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
switch (pv->title->rotation) switch (pv->title->rotation)
{ {
@ -1842,60 +1831,17 @@ static int decavcodecvInit( hb_work_object_t * w, hb_job_t * job )
pv->next_pts = 0; pv->next_pts = 0;
hb_buffer_list_clear(&pv->list); hb_buffer_list_clear(&pv->list);
#if HB_PROJECT_FEATURE_QSV
pv->qsv.decode = hb_qsv_decode_is_enabled(job);
if (pv->qsv.decode)
{
pv->qsv.codec_name = hb_qsv_decode_get_codec_name(w->codec_param);
pv->qsv.config.io_pattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
if(hb_qsv_get_memory_type(job) == MFX_IOPATTERN_OUT_VIDEO_MEMORY)
{
hb_qsv_info_t *info = hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec);
if (info != NULL)
{
// setup the QSV configuration
pv->qsv.config.io_pattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
pv->qsv.config.impl_requested = info->implementation;
pv->qsv.config.async_depth = job->qsv.async_depth;
pv->qsv.config.sync_need = 0;
pv->qsv.config.usage_threaded = 1;
pv->qsv.config.additional_buffers = 64; // FIFO_LARGE
if (info->capabilities & HB_QSV_CAP_RATECONTROL_LA)
{
// more surfaces may be needed for the lookahead
pv->qsv.config.additional_buffers = 160;
}
if (!pv->job->qsv.ctx)
{
hb_error( "decavcodecvInit: no context" );
return 1;
}
pv->job->qsv.ctx->full_path_is_enabled = 1;
if (!pv->job->qsv.ctx->dec_space)
{
pv->job->qsv.ctx->dec_space = av_mallocz(sizeof(hb_qsv_space));
if(!pv->job->qsv.ctx->dec_space)
{
hb_error( "decavcodecvInit: dec_space alloc failed" );
return 1;
}
pv->job->qsv.ctx->dec_space->is_init_done = 1;
}
}
}
}
#endif
if( pv->job && pv->job->title && !pv->job->title->has_resolution_change ) if( pv->job && pv->job->title && !pv->job->title->has_resolution_change )
{ {
pv->threads = HB_FFMPEG_THREADS_AUTO; pv->threads = HB_FFMPEG_THREADS_AUTO;
} }
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (pv->qsv.decode) if (hb_hwaccel_decode_is_enabled(job) &&
pv->job->hw_decode & HB_DECODE_SUPPORT_QSV)
{ {
pv->codec = avcodec_find_decoder_by_name(pv->qsv.codec_name); const char *codec_name = hb_qsv_decode_get_codec_name(w->codec_param);
pv->codec = avcodec_find_decoder_by_name(codec_name);
} }
else else
#endif #endif
@ -1941,15 +1887,18 @@ static int decavcodecvInit( hb_work_object_t * w, hb_job_t * job )
} }
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (pv->qsv.decode) if (hb_hwaccel_decode_is_enabled(job) &&
{ pv->job->hw_decode & HB_DECODE_SUPPORT_QSV)
{
if (hb_hwaccel_is_full_hardware_pipeline_enabled(pv->job)) if (hb_hwaccel_is_full_hardware_pipeline_enabled(pv->job))
{ {
hb_hwaccel_hwframes_ctx_init(pv->context, job); hb_hwaccel_hwframes_ctx_init(pv->context, job);
job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx = av_buffer_ref(pv->context->hw_frames_ctx); job->qsv_ctx->hw_frames_ctx = av_buffer_ref(pv->context->hw_frames_ctx);
} }
if (pv->context->codec_id == AV_CODEC_ID_HEVC) if (pv->context->codec_id == AV_CODEC_ID_HEVC)
{
av_dict_set( &av_opts, "load_plugin", "hevc_hw", 0 ); av_dict_set( &av_opts, "load_plugin", "hevc_hw", 0 );
}
} }
#endif #endif
@ -2487,7 +2436,8 @@ static int decavcodecvInfo( hb_work_object_t *w, hb_work_info_t *info )
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_qsv_available()) if (hb_qsv_available())
{ {
if (hb_qsv_decode_is_codec_supported(hb_qsv_get_adapter_index(), pv->context->codec_id, pv->context->pix_fmt, pv->context->width, pv->context->height)) if (hb_qsv_decode_is_codec_supported(hb_qsv_get_adapter_index(), pv->context->codec_id,
pv->context->pix_fmt, pv->context->width, pv->context->height))
{ {
info->video_decode_support |= HB_DECODE_SUPPORT_QSV; info->video_decode_support |= HB_DECODE_SUPPORT_QSV;
} }

View File

@ -933,8 +933,8 @@ int encavcodecInit( hb_work_object_t * w, hb_job_t * job )
} }
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_hwaccel_is_full_hardware_pipeline_enabled(pv->job) && if (hb_hwaccel_is_full_hardware_pipeline_enabled(job) &&
hb_qsv_decode_is_enabled(job)) job->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
pv->context = context; pv->context = context;
pv->qsv_data.codec = codec; pv->qsv_data.codec = codec;
@ -1311,10 +1311,12 @@ int encavcodecWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
// postponed encoder initialization, reused code from encavcodecInit() // postponed encoder initialization, reused code from encavcodecInit()
if (hb_hwaccel_is_full_hardware_pipeline_enabled(pv->job) && if (hb_hwaccel_is_full_hardware_pipeline_enabled(pv->job) &&
hb_qsv_decode_is_enabled(pv->job) && pv->context->hw_frames_ctx == NULL && pv->job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx != NULL) pv->job->hw_pix_fmt == AV_PIX_FMT_QSV &&
pv->context->hw_frames_ctx == NULL &&
pv->job->qsv_ctx->hw_frames_ctx != NULL)
{ {
// use the same hw frames context as for decoder or filter graph hw frames context // use the same hw frames context as for decoder or filter graph hw frames context
pv->context->hw_frames_ctx = pv->job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx; pv->context->hw_frames_ctx = av_buffer_ref(pv->job->qsv_ctx->hw_frames_ctx);
int open_ret = 0; int open_ret = 0;
if ((open_ret = hb_avcodec_open(pv->context, pv->qsv_data.codec, &pv->qsv_data.av_opts, HB_FFMPEG_THREADS_AUTO))) if ((open_ret = hb_avcodec_open(pv->context, pv->qsv_data.codec, &pv->qsv_data.av_opts, HB_FFMPEG_THREADS_AUTO)))
{ {
@ -1322,14 +1324,6 @@ int encavcodecWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
return HB_WORK_ERROR; return HB_WORK_ERROR;
} }
/*
* Reload colorimetry settings in case custom
* values were set in the encoder_options string.
*/
pv->job->color_prim_override = pv->context->color_primaries;
pv->job->color_transfer_override = pv->context->color_trc;
pv->job->color_matrix_override = pv->context->colorspace;
// avcodec_open populates the opts dictionary with the // avcodec_open populates the opts dictionary with the
// things it didn't recognize. // things it didn't recognize.
AVDictionaryEntry *t = NULL; AVDictionaryEntry *t = NULL;

View File

@ -59,14 +59,14 @@ static int format_init(hb_filter_object_t *filter, hb_filter_init_t *init)
hb_dict_t *avsettings = hb_dict_init(); hb_dict_t *avsettings = hb_dict_init();
#if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ )) #if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ ))
if (hb_qsv_full_path_is_enabled(init->job)) if (init->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
hb_dict_set_string(avsettings, "format", format); hb_dict_set_string(avsettings, "format", format);
hb_dict_set_int(avsettings, "async_depth", init->job->qsv.async_depth); hb_dict_set_int(avsettings, "async_depth", init->job->qsv_ctx->async_depth);
init->pix_fmt = av_get_pix_fmt(format); init->pix_fmt = av_get_pix_fmt(format);
if (init->job->qsv.ctx->out_range != AVCOL_RANGE_UNSPECIFIED) if (init->job->qsv_ctx->out_range != AVCOL_RANGE_UNSPECIFIED)
hb_dict_set_string(avsettings, "out_range", (init->job->qsv.ctx->out_range == AVCOL_RANGE_JPEG) ? "full" : "limited"); hb_dict_set_string(avsettings, "out_range", (init->job->qsv_ctx->out_range == AVCOL_RANGE_JPEG) ? "full" : "limited");
hb_dict_set(avfilter, "vpp_qsv", avsettings); hb_dict_set(avfilter, "vpp_qsv", avsettings);
} }

View File

@ -104,7 +104,7 @@ typedef enum
#include "libavutil/channel_layout.h" #include "libavutil/channel_layout.h"
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
#include "qsv_libav.h" #include "qsv_common.h"
#endif #endif
#ifdef __LIBHB__ #ifdef __LIBHB__
@ -873,20 +873,16 @@ struct hb_job_s
// initially (for frame accurate positioning // initially (for frame accurate positioning
// to non-I frames). // to non-I frames).
// QSV-specific settings
struct
{
int decode;
int async_depth;
#if HB_PROJECT_FEATURE_QSV
hb_qsv_context *ctx;
#endif
} qsv;
int hw_decode; int hw_decode;
int keep_duplicate_titles; int keep_duplicate_titles;
#ifdef __LIBHB__ #ifdef __LIBHB__
#if HB_PROJECT_FEATURE_QSV
// QSV-specific settings
hb_qsv_context_t *qsv_ctx;
#endif
/* Internal data */ /* Internal data */
hb_handle_t * h; hb_handle_t * h;
volatile hb_error_code * done_error; volatile hb_error_code * done_error;

View File

@ -29,6 +29,7 @@ AVBufferRef *hb_hwaccel_init_hw_frames_ctx(AVBufferRef *hw_device_ctx,
int hb_hwaccel_hwframe_init(hb_job_t *job, struct AVFrame **frame); int hb_hwaccel_hwframe_init(hb_job_t *job, struct AVFrame **frame);
hb_buffer_t * hb_hwaccel_copy_video_buffer_to_hw_video_buffer(hb_job_t *job, hb_buffer_t **buf); hb_buffer_t * hb_hwaccel_copy_video_buffer_to_hw_video_buffer(hb_job_t *job, hb_buffer_t **buf);
const char * hb_hwaccel_get_name(int hw_decode);
int hb_hwaccel_available(int codec_id, const char *device_name); int hb_hwaccel_available(int codec_id, const char *device_name);
int hb_hwaccel_decode_is_enabled(hb_job_t *job); int hb_hwaccel_decode_is_enabled(hb_job_t *job);
int hb_hwaccel_is_full_hardware_pipeline_enabled(hb_job_t *job); int hb_hwaccel_is_full_hardware_pipeline_enabled(hb_job_t *job);

View File

@ -60,10 +60,6 @@ void hb_job_setup_passes(hb_handle_t *h, hb_job_t *job, hb_list_t *list_pass);
*/ */
typedef struct hb_buffer_s hb_buffer_t; typedef struct hb_buffer_s hb_buffer_t;
#if HB_PROJECT_FEATURE_QSV
#include "handbrake/qsv_libav.h"
#endif
struct hb_buffer_settings_s struct hb_buffer_settings_s
{ {
enum { OTHER_BUF, AUDIO_BUF, VIDEO_BUF, SUBTITLE_BUF, FRAME_BUF } type; enum { OTHER_BUF, AUDIO_BUF, VIDEO_BUF, SUBTITLE_BUF, FRAME_BUF } type;
@ -151,14 +147,6 @@ struct hb_buffer_s
int size; int size;
} plane[4]; // 3 Color components + alpha } plane[4]; // 3 Color components + alpha
#if HB_PROJECT_FEATURE_QSV
struct qsv
{
void * qsv_atom;
hb_qsv_context * ctx;
} qsv_details;
#endif
void *storage; void *storage;
enum { STANDARD, AVFRAME, COREMEDIA } storage_type; enum { STANDARD, AVFRAME, COREMEDIA } storage_type;

View File

@ -10,27 +10,45 @@
#ifndef HANDBRAKE_QSV_COMMON_H #ifndef HANDBRAKE_QSV_COMMON_H
#define HANDBRAKE_QSV_COMMON_H #define HANDBRAKE_QSV_COMMON_H
int hb_qsv_available(); int hb_qsv_available();
#include "handbrake/project.h" #include "handbrake/project.h"
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
// Public API // Public API
int hb_qsv_impl_set_preferred(const char *name); int hb_qsv_impl_set_preferred(const char *name);
#ifdef __LIBHB__ #ifdef __LIBHB__
// Private API // Private API
#include "vpl/mfxvideo.h"
#include "handbrake/hb_dict.h" #include "handbrake/hb_dict.h"
#include "handbrake/qsv_libav.h"
#include "libavutil/hwcontext_qsv.h" #include "libavutil/hwcontext_qsv.h"
#include "libavcodec/avcodec.h"
typedef struct hb_qsv_context_s
{
int async_depth;
int la_is_enabled;
int memory_type;
int out_range;
int dx_index;
const char *vpp_scale_mode;
const char *vpp_interpolation_method;
AVBufferRef *hw_frames_ctx;
} hb_qsv_context_t;
// version of MSDK/QSV API currently used
#define HB_QSV_MSDK_VERSION_MAJOR 1
#define HB_QSV_MSDK_VERSION_MINOR 3
/* Minimum Intel Media SDK version (currently 1.3, for Sandy Bridge support) */ /* Minimum Intel Media SDK version (currently 1.3, for Sandy Bridge support) */
#define HB_QSV_MINVERSION_MAJOR HB_QSV_MSDK_VERSION_MAJOR #define HB_QSV_MINVERSION_MAJOR HB_QSV_MSDK_VERSION_MAJOR
#define HB_QSV_MINVERSION_MINOR HB_QSV_MSDK_VERSION_MINOR #define HB_QSV_MINVERSION_MINOR HB_QSV_MSDK_VERSION_MINOR
#define HB_QSV_FFMPEG_INITIAL_POOL_SIZE (0)
#define HB_QSV_FFMPEG_EXTRA_HW_FRAMES (60)
static const char * const hb_qsv_h264_level_names[] = static const char * const hb_qsv_h264_level_names[] =
{ {
"auto", "1.0", "1b", "1.1", "1.2", "1.3", "2.0", "2.1", "2.2", "3.0", "auto", "1.0", "1b", "1.1", "1.2", "1.3", "2.0", "2.1", "2.2", "3.0",
@ -202,7 +220,6 @@ int hb_qsv_implementation_is_hardware(mfxIMPL implementation);
/* Intel Quick Sync Video DECODE utilities */ /* Intel Quick Sync Video DECODE utilities */
const char* hb_qsv_decode_get_codec_name(enum AVCodecID codec_id); const char* hb_qsv_decode_get_codec_name(enum AVCodecID codec_id);
int hb_qsv_decode_is_enabled(hb_job_t *job);
/* Media SDK parameters handling */ /* Media SDK parameters handling */
enum enum
@ -349,15 +366,13 @@ mfxIMPL hb_qsv_dx_index_to_impl(int dx_index);
/* QSV pipeline helpers */ /* QSV pipeline helpers */
const char * hb_map_qsv_preset_name(const char * preset); const char * hb_map_qsv_preset_name(const char * preset);
int hb_qsv_apply_encoder_options(qsv_data_t * qsv_data, hb_job_t * job, AVDictionary** av_opts); int hb_qsv_apply_encoder_options(qsv_data_t * qsv_data, hb_job_t * job, AVDictionary** av_opts);
int hb_qsv_is_enabled(hb_job_t *job); hb_qsv_context_t * hb_qsv_context_init();
hb_qsv_context* hb_qsv_context_init(); hb_qsv_context_t * hb_qsv_context_dup(const hb_qsv_context_t *src);
void hb_qsv_context_uninit(hb_job_t *job); void hb_qsv_context_close(hb_qsv_context_t **_ctx);
int hb_qsv_are_filters_supported(hb_job_t *job); int hb_qsv_are_filters_supported(hb_job_t *job);
int hb_qsv_get_memory_type(hb_job_t *job); int hb_qsv_get_memory_type(hb_job_t *job);
int hb_qsv_full_path_is_enabled(hb_job_t *job); int hb_qsv_full_path_is_enabled(hb_job_t *job);
int hb_qsv_get_buffer(AVCodecContext *s, AVFrame *frame, int flags);
enum AVPixelFormat hb_qsv_get_format(AVCodecContext *s, const enum AVPixelFormat *pix_fmts); enum AVPixelFormat hb_qsv_get_format(AVCodecContext *s, const enum AVPixelFormat *pix_fmts);
void hb_qsv_uninit_enc(hb_job_t *job);
int hb_qsv_setup_job(hb_job_t *job); int hb_qsv_setup_job(hb_job_t *job);
int hb_qsv_decode_h264_is_supported(int adapter_index); int hb_qsv_decode_h264_is_supported(int adapter_index);
int hb_qsv_decode_h265_is_supported(int adapter_index); int hb_qsv_decode_h265_is_supported(int adapter_index);

View File

@ -1,527 +0,0 @@
/* ********************************************************************* *\
Copyright (C) 2013 Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\* ********************************************************************* */
#ifndef HANDBRAKE_QSV_LIBAV_H
#define HANDBRAKE_QSV_LIBAV_H
/**
* @file
* @ingroup lavc_codec_hwaccel_qsv
* Common header for QSV/MediaSDK acceleration
*/
/**
* @defgroup lavc_codec_hwaccel_qsv QSV/MediaSDK based Decode/Encode and VPP
* @ingroup lavc_codec_hwaccel
*
* As Intel Quick Sync Video (QSV) can decode/preprocess/encode with HW
* acceleration.
*
* Supported features:
* - access:
* - format AV_PIX_FMT_QSV_H264, AVCodec decoder based implementation
* - name "h264_qsv", avcodec_find_decoder_by_name( "h264_qsv")
* - IO Pattern:
* - Opaque memory: MFX_IOPATTERN_OUT_OPAQUE_MEMORY // Video memory is
* MFX_IMPL_HARDWARE or MFX_IMPL_AUTO and runtime support,
* otherwise: System Memory
* - System memory: MFX_IOPATTERN_OUT_SYSTEM_MEMORY
* - Allocators:
* - default allocator for System memory: MFX_MEMTYPE_SYSTEM_MEMORY
* - details:
* implementation as "per frame"
*
* TODO list:
* - access:
* - format AV_PIX_FMT_QSV_MPEG2
* - format AV_PIX_FMT_QSV_VC1
* - format AV_PIX_FMT_QSV, see "details" below
* - IO Pattern:
* - VIDEO_MEMORY // MFX_IOPATTERN_OUT_VIDEO_MEMORY
* - Allocators:
* - Video memory: MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET /
* MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET
* - details:
* "per slice" support: AV_PIX_FMT_QSV with AVHWAccel based implementation
*
* Note hb_qsv_config struct required to fill in via
* AVCodecContext.hwaccel_context
*
* As per frame, note AVFrame.data[2] (qsv_atom) used for frame atom id,
* data/linesize should be used together with SYSTEM_MEMORY and tested
*
* Note: Compilation would require:
* - Intel MediaSDK headers, Full SDK is available from the original web site:
* http://software.intel.com/en-us/vcsource/tools/media-SDK
* Will be referenced as mfx*.h (mfxdefs.h, mfxstructures.h, ... )
* and
* - Final application has to link against Intel MediaSDK dispatcher, available
* at MediaSDK as well
*
* Target OS: as per available dispatcher and driver support
*
* Implementation details:
* Provided struct hb_qsv_context contain several struct hb_qsv_space(s) for decode,
* VPP and encode.
* hb_qsv_space just contain needed environment for the appropriate action.
* Based on this - pipeline (see pipes) will be build to pass details such as
* mfxFrameSurface1* and mfxSyncPoint* from one action to the next.
*
* Resources re-usage (hb_qsv_flush_stages):
* hb_qsv_context *qsv = (hb_qsv_context *)video_codec_ctx->priv_data;
* hb_qsv_list *pipe = (hb_qsv_list *)video_frame->data[2];
* hb_qsv_flush_stages( qsv->pipes, &pipe );
*
* DTS re-usage:
* hb_qsv_dts_pop(qsv);
*
* for video,DX9/11 memory it has to be Unlock'ed as well
*
* Implementation is thread aware and uses synchronization point(s) from MediaSDK
* as per configuration.
*
* For the details of MediaSDK usage and options available - please refer to the
* available documentation at MediaSDK.
*
* Feature set used from MSDK is defined by HB_QSV_MSDK_VERSION_MAJOR and
* HB_QSV_MSDK_VERSION_MINOR
*
* @{
*/
#include <stdint.h>
#include <string.h>
#include "vpl/mfxvideo.h"
#include "vpl/mfxdispatcher.h"
#include "libavutil/mem.h"
#include "libavutil/time.h"
#include "libavcodec/avcodec.h"
#if defined (__GNUC__)
#include <pthread.h>
#define ff_qsv_atomic_inc(ptr) __sync_add_and_fetch(ptr,1)
#define ff_qsv_atomic_dec(ptr) __sync_sub_and_fetch (ptr,1)
#elif HAVE_WINDOWS_H // MSVC case
#include <windows.h>
#if HAVE_PTHREADS
#include <pthread.h>
#elif HAVE_W32THREADS
#include "w32pthreads.h"
#endif
#define ff_qsv_atomic_inc(ptr) InterlockedIncrement(ptr)
#define ff_qsv_atomic_dec(ptr) InterlockedDecrement (ptr)
#endif
// sleep is defined in milliseconds
#define hb_qsv_sleep(x) av_usleep((x)*1000)
#define HB_QSV_ZERO_MEMORY(VAR) {memset(&VAR, 0, sizeof(VAR));}
#define HB_QSV_ALIGN32(X) (((mfxU32)((X)+31)) & (~ (mfxU32)31))
#define HB_QSV_ALIGN16(value) (((value + 15) >> 4) << 4)
#ifndef HB_QSV_PRINT_RET_MSG
#define HB_QSV_PRINT_RET_MSG(ERR) { fprintf(stderr, "Error code %d,\t%s\t%d\n", ERR, __FUNCTION__, __LINE__); }
#endif
#ifndef HB_QSV_DEBUG_ASSERT
#define HB_QSV_DEBUG_ASSERT(x,y) { if ((x)) { fprintf(stderr, "\nASSERT: %s\n", y); } }
#endif
#define HB_QSV_CHECK_RET(P, X, ERR) {if ((X) > (P)) {HB_QSV_PRINT_RET_MSG(ERR); return;}}
#define HB_QSV_CHECK_RESULT(P, X, ERR) {if ((X) > (P)) {HB_QSV_PRINT_RET_MSG(ERR); return ERR;}}
#define HB_QSV_CHECK_POINTER(P, ERR) {if (!(P)) {HB_QSV_PRINT_RET_MSG(ERR); return ERR;}}
#define HB_QSV_IGNORE_MFX_STS(P, X) {if ((X) == (P)) {P = MFX_ERR_NONE;}}
#define HB_QSV_ID_BUFFER MFX_MAKEFOURCC('B','U','F','F')
#define HB_QSV_ID_FRAME MFX_MAKEFOURCC('F','R','M','E')
#define HB_QSV_SURFACE_NUM 80
#define HB_QSV_SYNC_NUM HB_QSV_SURFACE_NUM*3/4
#define HB_QSV_JOB_SIZE_DEFAULT 10
#define HB_QSV_SYNC_TIME_DEFAULT 10000
// see hb_qsv_get_free_sync, hb_qsv_get_free_surface , 100 if usleep(10*1000)(10ms) == 1 sec
#define HB_QSV_REPEAT_NUM_DEFAULT 100
#define HB_QSV_ASYNC_DEPTH_DEFAULT 4
#define HB_QSV_AVC_DECODER_WIDTH_MAX 4096
#define HB_QSV_AVC_DECODER_HEIGHT_MAX 4096
// version of MSDK/QSV API currently used
#define HB_QSV_MSDK_VERSION_MAJOR 1
#define HB_QSV_MSDK_VERSION_MINOR 3
typedef enum HB_QSV_STAGE_TYPE {
#define HB_QSV_DECODE_MASK 0x001
HB_QSV_DECODE = 0x001,
#define HB_QSV_VPP_MASK 0x0F0
// "Mandatory VPP filter" , might be with "Hint-based VPP filters"
HB_QSV_VPP_DEFAULT = 0x010,
// "User Modules" etc
HB_QSV_VPP_USER = 0x020,
#define av_QSV_ENCODE_MASK 0x100
HB_QSV_ENCODE = 0x100
#define HB_QSV_ANY_MASK 0xFFF
} HB_QSV_STAGE_TYPE;
typedef struct QSVMid {
AVBufferRef *hw_frames_ref;
mfxHDLPair *handle_pair;
AVFrame *locked_frame;
AVFrame *hw_frame;
mfxFrameSurface1 surf;
} QSVMid;
typedef struct QSVFrame {
AVFrame *frame;
mfxFrameSurface1 surface;
mfxEncodeCtrl enc_ctrl;
mfxExtDecodedFrameInfo dec_info;
mfxExtBuffer *ext_param;
int queued;
int used;
struct QSVFrame *next;
} QSVFrame;
#define HB_QSV_FFMPEG_INITIAL_POOL_SIZE (0)
#define HB_QSV_FFMPEG_EXTRA_HW_FRAMES (60)
typedef struct HBQSVFramesContext {
AVBufferRef *hw_frames_ctx;
} HBQSVFramesContext;
typedef struct hb_qsv_list {
// practically pthread_mutex_t
void *mutex;
pthread_mutexattr_t mta;
void **items;
int items_alloc;
int items_count;
} hb_qsv_list;
typedef struct hb_qsv_sync {
mfxSyncPoint* p_sync;
int in_use;
} hb_qsv_sync;
typedef struct hb_qsv_stage {
HB_QSV_STAGE_TYPE type;
struct {
mfxBitstream *p_bs;
mfxFrameSurface1 *p_surface;
HBQSVFramesContext *p_frames_ctx;
} in;
struct {
mfxBitstream *p_bs;
mfxFrameSurface1 *p_surface;
hb_qsv_sync *sync;
} out;
hb_qsv_list *pending;
} hb_qsv_stage;
typedef struct hb_qsv_task {
mfxBitstream *bs;
hb_qsv_stage *stage;
} hb_qsv_task;
typedef struct hb_qsv_space {
uint8_t is_init_done;
HB_QSV_STAGE_TYPE type;
mfxVideoParam m_mfxVideoParam;
mfxFrameAllocResponse response;
mfxFrameAllocRequest request[2]; // [0] - in, [1] - out, if needed
mfxExtBuffer **p_ext_params;
uint16_t p_ext_param_num;
uint16_t surface_num_max_used;
uint16_t surface_num;
mfxFrameSurface1 *p_surfaces[HB_QSV_SURFACE_NUM];
uint16_t sync_num_max_used;
uint16_t sync_num;
hb_qsv_sync *p_syncp[HB_QSV_SYNC_NUM];
mfxBitstream bs;
uint8_t *p_buf;
size_t p_buf_max_size;
// only for encode and tasks
hb_qsv_list *tasks;
hb_qsv_list *pending;
// storage for allocations/mfxMemId*
mfxMemId *mids;
} hb_qsv_space;
typedef struct hb_qsv_context {
volatile int is_context_active;
mfxIMPL impl;
mfxSession mfx_session;
mfxVersion ver;
// decode
hb_qsv_space *dec_space;
// encode
hb_qsv_space *enc_space;
// vpp
hb_qsv_list *vpp_space;
hb_qsv_list *pipes;
// MediaSDK starting from API version 1.6 includes DecodeTimeStamp
// in addition to TimeStamp
// see also HB_QSV_MSDK_VERSION_MINOR , HB_QSV_MSDK_VERSION_MAJOR
hb_qsv_list *dts_seq;
// practically pthread_mutex_t
void *qts_seq_mutex;
int is_anex;
void *qsv_config;
int la_is_enabled;
int qsv_hw_filters_via_video_memory_are_enabled;
int qsv_hw_filters_via_system_memory_are_enabled;
int memory_type;
int out_range;
int full_path_is_enabled;
const char *vpp_scale_mode;
const char *vpp_interpolation_method;
int dx_index;
AVBufferRef *hb_hw_device_ctx;
AVBufferRef *hb_ffmpeg_qsv_hw_frames_ctx;
mfxHDL device_manager_handle;
mfxHandleType device_manager_handle_type;
void *device_context;
hb_display_t *display;
} hb_qsv_context;
typedef enum {
QSV_PART_ANY = 0,
QSV_PART_LOWER,
QSV_PART_UPPER
} hb_qsv_split;
typedef struct {
int64_t dts;
} hb_qsv_dts;
typedef struct hb_qsv_alloc_frame {
mfxU32 id;
mfxFrameInfo info;
} hb_qsv_alloc_frame;
typedef struct hb_qsv_alloc_buffer {
mfxU32 id;
mfxU32 nbytes;
mfxU16 type;
} hb_qsv_alloc_buffer;
typedef struct hb_qsv_allocators_space {
hb_qsv_space *space;
mfxFrameAllocator frame_alloc;
} hb_qsv_allocators_space;
typedef struct hb_qsv_config {
/**
* Set asynch depth of processing with QSV
* Format: 0 and more
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int async_depth;
/**
* Range of numbers that indicate trade-offs between quality and speed.
* Format: from 1/MFX_TARGETUSAGE_BEST_QUALITY to 7/MFX_TARGETUSAGE_BEST_SPEED inclusive
*
* - encoding: Set by user.
* - decoding: unused
*/
int target_usage;
/**
* Number of reference frames; if NumRefFrame = 0, this parameter is not specified.
* Format: 0 and more
*
* - encoding: Set by user.
* - decoding: unused
*/
int num_ref_frame;
/**
* Distance between I- or P- key frames; if it is zero, the GOP structure is unspecified.
* Note: If GopRefDist = 1, there are no B-frames used.
*
* - encoding: Set by user.
* - decoding: unused
*/
int gop_ref_dist;
/**
* Number of pictures within the current GOP (Group of Pictures); if GopPicSize=0,
* then the GOP size is unspecified. If GopPicSize=1, only I-frames are used.
*
* - encoding: Set by user.
* - decoding: unused
*/
int gop_pic_size;
/**
* Set type of surfaces used with QSV
* Format: "IOPattern enum" of Media SDK
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int io_pattern;
/**
* Set amount of additional surfaces might be needed
* Format: amount of additional buffers(surfaces+syncs)
* to allocate in advance
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int additional_buffers;
/**
* If pipeline should be sync.
* Format: wait time in milliseconds,
* HB_QSV_SYNC_TIME_DEFAULT/10000 might be a good value
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int sync_need;
/**
* Type of implementation needed
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int impl_requested;
/**
* if QSV usage is multithreaded.
* Format: Yes/No, 1/0
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
int usage_threaded;
/**
* if QSV use an external allocation (valid per session/mfxSession)
* Format: pointer to allocators, if default: 0
*
* note that:
* System Memory: can be used without provided and external allocator,
* meaning MediaSDK will use an internal one
* Video Memory: in this case - we must provide an external allocator
* Also, Media SDK session doesn't require external allocator if the application
* uses opaque memory
*
* Calls SetFrameAllocator/SetBufferAllocator
* (MFXVideoCORE_SetFrameAllocator/MFXVideoCORE_SetBufferAllocator)
* are to pass allocators to Media SDK
*
* - encoding: Set by user.
* - decoding: Set by user.
*/
hb_qsv_allocators_space *allocators;
} hb_qsv_config;
#define ANEX_UNKNOWN 0
#define ANEX_PREFIX 1
#define ANEX_NO_PREFIX 2
static const uint8_t ff_prefix_code[] = { 0x00, 0x00, 0x00, 0x01 };
int hb_qsv_get_free_sync(hb_qsv_space *, hb_qsv_context *);
int hb_qsv_get_free_surface(hb_qsv_space *, hb_qsv_context *, mfxFrameInfo *,
hb_qsv_split);
int hb_qsv_get_free_encode_task(hb_qsv_list *);
int hb_qsv_wait_on_sync(hb_qsv_context *, hb_qsv_stage *);
void hb_qsv_add_context_usage(hb_qsv_context *, int);
void hb_qsv_pipe_list_create(hb_qsv_list **, int);
void hb_qsv_pipe_list_clean(hb_qsv_list **);
void hb_qsv_add_stagee(hb_qsv_list **, hb_qsv_stage *, int);
hb_qsv_stage *hb_qsv_get_last_stage(hb_qsv_list *);
hb_qsv_list *hb_qsv_pipe_by_stage(hb_qsv_list *, hb_qsv_stage *);
void hb_qsv_flush_stages(hb_qsv_list *, hb_qsv_list **, int);
void hb_qsv_dts_ordered_insert(hb_qsv_context *, int, int, int64_t, int);
void hb_qsv_dts_pop(hb_qsv_context *);
hb_qsv_stage *hb_qsv_stage_init(void);
void hb_qsv_stage_clean(hb_qsv_stage **, int);
int hb_qsv_context_clean(hb_qsv_context *, int);
int ff_qsv_is_sync_in_pipe(mfxSyncPoint *, hb_qsv_context *);
int ff_qsv_is_surface_in_pipe(mfxFrameSurface1 *, hb_qsv_context *);
hb_qsv_list *hb_qsv_list_init(int);
int hb_qsv_list_lock(hb_qsv_list *);
int hb_qsv_list_unlock(hb_qsv_list *);
int hb_qsv_list_add(hb_qsv_list *, void *);
void hb_qsv_list_rem(hb_qsv_list *, void *);
void hb_qsv_list_insert(hb_qsv_list *, int, void *);
void hb_qsv_list_close(hb_qsv_list **);
int hb_qsv_list_count(hb_qsv_list *);
void *hb_qsv_list_item(hb_qsv_list *, int);
/* @} */
#endif // HANDBRAKE_QSV_LIBAV_H

View File

@ -1793,6 +1793,10 @@ static void hb_add_internal( hb_handle_t * h, hb_job_t * job, hb_list_t *list_pa
job_copy->list_attachment = NULL; job_copy->list_attachment = NULL;
job_copy->metadata = NULL; job_copy->metadata = NULL;
#if HB_PROJECT_FEATURE_QSV
job_copy->qsv_ctx = hb_qsv_context_dup(job->qsv_ctx);
#endif
/* If we're doing Foreign Audio Search, copy all subtitles matching the /* If we're doing Foreign Audio Search, copy all subtitles matching the
* first audio track language we find in the audio list. * first audio track language we find in the audio list.
* *

View File

@ -594,10 +594,13 @@ hb_dict_t* hb_job_to_dict( const hb_job_t * job )
int subtitle_search_burn; int subtitle_search_burn;
int ii; int ii;
int adapter_index = 0; int adapter_index = 0;
int async_depth = 0;
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (job->qsv.ctx){ if (job->qsv_ctx)
adapter_index = job->qsv.ctx->dx_index; {
async_depth = job->qsv_ctx->async_depth;
adapter_index = job->qsv_ctx->dx_index;
} }
#endif #endif
@ -619,8 +622,8 @@ hb_dict_t* hb_job_to_dict( const hb_job_t * job )
"s:{s:o, s:o, s:o, s:o, s:o}," "s:{s:o, s:o, s:o, s:o, s:o},"
// PAR {Num, Den} // PAR {Num, Den}
"s:{s:o, s:o}," "s:{s:o, s:o},"
// Video {Encoder, HardwareDecode, QSV {Decode, AsyncDepth, AdapterIndex}} // Video {Encoder, HardwareDecode, QSV {AsyncDepth, AdapterIndex}}
"s:{s:o, s:o, s:{s:o, s:o, s:o}}," "s:{s:o, s:o, s:{s:o, s:o}},"
// Audio {CopyMask, FallbackEncoder, AudioList []} // Audio {CopyMask, FallbackEncoder, AudioList []}
"s:{s:[], s:o, s:[]}," "s:{s:[], s:o, s:[]},"
// Subtitles {Search {Enable, Forced, Default, Burn}, SubtitleList []} // Subtitles {Search {Enable, Forced, Default, Burn}, SubtitleList []}
@ -650,8 +653,7 @@ hb_dict_t* hb_job_to_dict( const hb_job_t * job )
"Encoder", hb_value_int(job->vcodec), "Encoder", hb_value_int(job->vcodec),
"HardwareDecode", hb_value_int(job->hw_decode), "HardwareDecode", hb_value_int(job->hw_decode),
"QSV", "QSV",
"Decode", hb_value_bool(job->qsv.decode), "AsyncDepth", hb_value_int(async_depth),
"AsyncDepth", hb_value_int(job->qsv.async_depth),
"AdapterIndex", hb_value_int(adapter_index), "AdapterIndex", hb_value_int(adapter_index),
"Audio", "Audio",
"CopyMask", "CopyMask",
@ -1176,6 +1178,7 @@ hb_job_t* hb_dict_to_job( hb_handle_t * h, hb_dict_t *dict )
int vbitrate = -1; int vbitrate = -1;
double vquality = HB_INVALID_VIDEO_QUALITY; double vquality = HB_INVALID_VIDEO_QUALITY;
int adapter_index = -1; int adapter_index = -1;
int async_depth = -1;
hb_dict_t * meta_dict = NULL; hb_dict_t * meta_dict = NULL;
hb_value_array_t * art_array = NULL; hb_value_array_t * art_array = NULL;
@ -1200,7 +1203,7 @@ hb_job_t* hb_dict_to_job( hb_handle_t * h, hb_dict_t *dict )
// DolbyVisionConfigurationRecord // DolbyVisionConfigurationRecord
// ColorPrimariesOverride, ColorTransferOverride, ColorMatrixOverride, // ColorPrimariesOverride, ColorTransferOverride, ColorMatrixOverride,
// HardwareDecode // HardwareDecode
// QSV {Decode, AsyncDepth, AdapterIndex}} // QSV {AsyncDepth, AdapterIndex}}
"s:{s:o, s?F, s?i, s?s, s?s, s?s, s?s, s?s," "s:{s:o, s?F, s?i, s?s, s?s, s?s, s?s, s?s,"
" s?b, s?b, s?i," " s?b, s?b, s?i,"
" s?i, s?i, s?i," " s?i, s?i, s?i,"
@ -1210,7 +1213,7 @@ hb_job_t* hb_dict_to_job( hb_handle_t * h, hb_dict_t *dict )
" s?o," " s?o,"
" s?i, s?i, s?i," " s?i, s?i, s?i,"
" s?i," " s?i,"
" s?{s?b, s?i, s?i}}," " s?{s?i, s?i}},"
// Audio {CopyMask, FallbackEncoder, AudioList} // Audio {CopyMask, FallbackEncoder, AudioList}
"s?{s?o, s?o, s?o}," "s?{s?o, s?o, s?o},"
// Subtitle {Search {Enable, Forced, Default, Burn, ExternalFilename}, SubtitleList} // Subtitle {Search {Enable, Forced, Default, Burn, ExternalFilename}, SubtitleList}
@ -1271,8 +1274,7 @@ hb_job_t* hb_dict_to_job( hb_handle_t * h, hb_dict_t *dict )
"ColorMatrixOverride", unpack_i(&job->color_matrix_override), "ColorMatrixOverride", unpack_i(&job->color_matrix_override),
"HardwareDecode", unpack_i(&job->hw_decode), "HardwareDecode", unpack_i(&job->hw_decode),
"QSV", "QSV",
"Decode", unpack_b(&job->qsv.decode), "AsyncDepth", unpack_i(&async_depth),
"AsyncDepth", unpack_i(&job->qsv.async_depth),
"AdapterIndex", unpack_i(&adapter_index), "AdapterIndex", unpack_i(&adapter_index),
"Audio", "Audio",
"CopyMask", unpack_o(&acodec_copy_mask), "CopyMask", unpack_o(&acodec_copy_mask),
@ -1405,11 +1407,14 @@ hb_job_t* hb_dict_to_job( hb_handle_t * h, hb_dict_t *dict )
hb_job_set_encoder_options(job, video_options); hb_job_set_encoder_options(job, video_options);
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (job->qsv.ctx) { if (job->qsv_ctx)
job->qsv.ctx->dx_index = adapter_index; {
job->qsv_ctx->async_depth = async_depth;
job->qsv_ctx->dx_index = adapter_index;
} }
// Prefer to use QSV decode when QSV encoder is enabled // Prefer to use QSV decode when QSV encoder is enabled
if (!job->hw_decode && job->qsv.decode && hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec)) { if (job->hw_decode && hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec))
{
job->hw_decode = HB_DECODE_SUPPORT_QSV; job->hw_decode = HB_DECODE_SUPPORT_QSV;
} }
#endif #endif

View File

@ -16,11 +16,6 @@
#include "handbrake/avfilter_priv.h" #include "handbrake/avfilter_priv.h"
#include "handbrake/hwaccel.h" #include "handbrake/hwaccel.h"
#if HB_PROJECT_FEATURE_QSV
#include "handbrake/qsv_common.h"
#endif
struct hb_avfilter_graph_s struct hb_avfilter_graph_s
{ {
AVFilterGraph * avgraph; AVFilterGraph * avgraph;
@ -282,31 +277,41 @@ int hb_avfilter_add_buf(hb_avfilter_graph_t * graph, hb_buffer_t ** buf_in)
return ret; return ret;
} }
#if HB_PROJECT_FEATURE_QSV
static void set_qsv_hw_frames_ctx(hb_avfilter_graph_t * graph)
{
if (graph->job->hw_pix_fmt == AV_PIX_FMT_QSV)
{
AVBufferRef *hw_frames_ctx = av_buffersink_get_hw_frames_ctx(graph->output);
if (!hw_frames_ctx)
{
hb_error("hb_avfilter_get_buf: failed to get hw_frames_ctx from sink");
}
else
{
// copy hw frame ctx from filter graph for future encoder initialization
if (graph->job->qsv_ctx->hw_frames_ctx->buffer != hw_frames_ctx->buffer)
{
if (graph->job->qsv_ctx->hw_frames_ctx)
{
av_buffer_unref(&graph->job->qsv_ctx->hw_frames_ctx);
}
graph->job->qsv_ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ctx);
}
}
}
}
#endif
hb_buffer_t * hb_avfilter_get_buf(hb_avfilter_graph_t * graph) hb_buffer_t * hb_avfilter_get_buf(hb_avfilter_graph_t * graph)
{ {
int result; int result = av_buffersink_get_frame(graph->output, graph->frame);
result = av_buffersink_get_frame(graph->output, graph->frame);
if (result >= 0) if (result >= 0)
{ {
hb_buffer_t * buf;
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_hwaccel_is_full_hardware_pipeline_enabled(graph->job) && set_qsv_hw_frames_ctx(graph);
hb_qsv_decode_is_enabled(graph->job)) #endif
{ hb_buffer_t *buf = hb_avframe_to_video_buffer(graph->frame, graph->out_time_base);
AVBufferRef *hw_frames_ctx = av_buffersink_get_hw_frames_ctx(graph->output);
if (!hw_frames_ctx)
{
hb_error("hb_avfilter_get_buf: failed to get hw_frames_ctx from sink");
}
else
{
// copy hw frame ctx from filter graph for future encoder initialization
graph->job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx = av_buffer_ref(hw_frames_ctx);
}
}
#endif
buf = hb_avframe_to_video_buffer(graph->frame, graph->out_time_base);
av_frame_unref(graph->frame); av_frame_unref(graph->frame);
return buf; return buf;
} }

View File

@ -15,30 +15,60 @@
#include "platform/macosx/vt_common.h" #include "platform/macosx/vt_common.h"
#endif #endif
static int is_encoder_supported(int encoder_id) static int is_encoder_supported(int hw_decode, int encoder_id)
{ {
switch (encoder_id) int ret = 0;
#ifdef __APPLE__
if (hw_decode & HB_DECODE_SUPPORT_VIDEOTOOLBOX)
{ {
case HB_VCODEC_FFMPEG_NVENC_H264: switch (encoder_id)
case HB_VCODEC_FFMPEG_NVENC_H265: {
case HB_VCODEC_FFMPEG_NVENC_H265_10BIT: case HB_VCODEC_VT_H264:
case HB_VCODEC_FFMPEG_NVENC_AV1: case HB_VCODEC_VT_H265:
case HB_VCODEC_FFMPEG_NVENC_AV1_10BIT: case HB_VCODEC_VT_H265_10BIT:
case HB_VCODEC_VT_H264: ret = 1;
case HB_VCODEC_VT_H265: break;
case HB_VCODEC_VT_H265_10BIT: default:
case HB_VCODEC_FFMPEG_QSV_H264: ret = 0;
case HB_VCODEC_FFMPEG_QSV_H265: }
case HB_VCODEC_FFMPEG_QSV_H265_10BIT:
case HB_VCODEC_FFMPEG_QSV_AV1:
case HB_VCODEC_FFMPEG_QSV_AV1_10BIT:
return 1;
default:
return 0;
} }
#endif
if (hw_decode & HB_DECODE_SUPPORT_NVDEC)
{
switch (encoder_id)
{
case HB_VCODEC_FFMPEG_NVENC_H264:
case HB_VCODEC_FFMPEG_NVENC_H265:
case HB_VCODEC_FFMPEG_NVENC_H265_10BIT:
case HB_VCODEC_FFMPEG_NVENC_AV1:
case HB_VCODEC_FFMPEG_NVENC_AV1_10BIT:
ret = 1;
break;
default:
ret = 0;
}
}
#if HB_PROJECT_FEATURE_QSV
if (hw_decode & HB_DECODE_SUPPORT_QSV)
{
switch (encoder_id)
{
case HB_VCODEC_FFMPEG_QSV_H264:
case HB_VCODEC_FFMPEG_QSV_H265:
case HB_VCODEC_FFMPEG_QSV_H265_10BIT:
case HB_VCODEC_FFMPEG_QSV_AV1:
case HB_VCODEC_FFMPEG_QSV_AV1_10BIT:
ret = 1;
break;
default:
ret = 0;
}
}
#endif
return ret;
} }
static int are_filters_supported(hb_job_t * job) static int are_filters_supported(hb_job_t *job)
{ {
int ret = 0; int ret = 0;
#ifdef __APPLE__ #ifdef __APPLE__
@ -63,22 +93,15 @@ static int are_filters_supported(hb_job_t * job)
int hb_hwaccel_is_enabled(hb_job_t *job) int hb_hwaccel_is_enabled(hb_job_t *job)
{ {
return job != NULL && return job != NULL &&
( (job->title->video_decode_support & HB_DECODE_SUPPORT_HWACCEL) &&
( (job->hw_decode & HB_DECODE_SUPPORT_HWACCEL);
(job->title->video_decode_support & HB_DECODE_SUPPORT_HWACCEL) &&
(job->hw_decode & HB_DECODE_SUPPORT_HWACCEL)
)
#if HB_PROJECT_FEATURE_QSV
|| hb_qsv_decode_is_enabled(job)
#endif
);
} }
int hb_hwaccel_is_full_hardware_pipeline_enabled(hb_job_t *job) int hb_hwaccel_is_full_hardware_pipeline_enabled(hb_job_t *job)
{ {
return hb_hwaccel_is_enabled(job) && return hb_hwaccel_is_enabled(job) &&
are_filters_supported(job) && are_filters_supported(job) &&
is_encoder_supported(job->vcodec); is_encoder_supported(job->hw_decode, job->vcodec);
} }
int hb_hwaccel_decode_is_enabled(hb_job_t *job) int hb_hwaccel_decode_is_enabled(hb_job_t *job)
@ -100,6 +123,26 @@ int hb_hwaccel_decode_is_enabled(hb_job_t *job)
} }
} }
const char * hb_hwaccel_get_name(int hw_decode)
{
if (hw_decode & HB_DECODE_SUPPORT_VIDEOTOOLBOX)
{
return "videotoolbox";
}
if (hw_decode & HB_DECODE_SUPPORT_NVDEC)
{
return "nvdec";
}
if (hw_decode & HB_DECODE_SUPPORT_QSV)
{
return "qsv";
}
if (hw_decode & HB_DECODE_SUPPORT_MF)
{
return "mf";
}
}
enum AVHWDeviceType hb_hwaccel_available(int codec_id, const char *hwdevice_name) enum AVHWDeviceType hb_hwaccel_available(int codec_id, const char *hwdevice_name)
{ {
if (is_hardware_disabled()) if (is_hardware_disabled())
@ -148,10 +191,10 @@ enum AVPixelFormat hw_hwaccel_get_hw_format(AVCodecContext *ctx, const enum AVPi
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (*p == AV_PIX_FMT_QSV) if (*p == AV_PIX_FMT_QSV)
{ {
if (job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx) if (job->qsv_ctx->hw_frames_ctx)
{ {
// in case if decoder and encoder have the same size // in case if decoder and encoder have the same size
ctx->hw_frames_ctx = av_buffer_ref(job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx); ctx->hw_frames_ctx = av_buffer_ref(job->qsv_ctx->hw_frames_ctx);
} }
} }
#endif #endif
@ -259,7 +302,7 @@ int hb_hwaccel_hwframes_ctx_init(AVCodecContext *ctx, hb_job_t *job)
ctx->pix_fmt = job->hw_pix_fmt; ctx->pix_fmt = job->hw_pix_fmt;
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_hwaccel_is_full_hardware_pipeline_enabled(job) && if (hb_hwaccel_is_full_hardware_pipeline_enabled(job) &&
hb_qsv_decode_is_enabled(job)) job->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
ctx->extra_hw_frames = HB_QSV_FFMPEG_EXTRA_HW_FRAMES; ctx->extra_hw_frames = HB_QSV_FFMPEG_EXTRA_HW_FRAMES;
ctx->sw_pix_fmt = job->input_pix_fmt; ctx->sw_pix_fmt = job->input_pix_fmt;
@ -276,7 +319,7 @@ int hb_hwaccel_hwframes_ctx_init(AVCodecContext *ctx, hb_job_t *job)
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
if (hb_hwaccel_is_full_hardware_pipeline_enabled(job) && if (hb_hwaccel_is_full_hardware_pipeline_enabled(job) &&
hb_qsv_decode_is_enabled(job)) job->hw_pix_fmt == AV_PIX_FMT_QSV)
{ {
// Use input pix format for decoder and filters frame pools, output frame pools are created by FFmpeg // Use input pix format for decoder and filters frame pools, output frame pools are created by FFmpeg
frames_ctx->sw_format = job->input_pix_fmt; frames_ctx->sw_format = job->input_pix_fmt;

View File

@ -2102,11 +2102,6 @@ int hb_preset_apply_video(const hb_dict_t *preset, hb_dict_t *job_dict)
hb_dict_set(video_dict, "QSV", qsv); hb_dict_set(video_dict, "QSV", qsv);
qsv = hb_dict_get(video_dict, "QSV"); qsv = hb_dict_get(video_dict, "QSV");
} }
if ((value = hb_dict_get(preset, "VideoQSVDecode")) != NULL)
{
hb_dict_set(qsv, "Decode",
hb_value_xform(value, HB_VALUE_TYPE_BOOL));
}
if ((value = hb_dict_get(preset, "VideoQSVAsyncDepth")) != NULL) if ((value = hb_dict_get(preset, "VideoQSVAsyncDepth")) != NULL)
{ {
hb_dict_set(qsv, "AsyncDepth", hb_dict_set(qsv, "AsyncDepth",

View File

@ -15,6 +15,9 @@
#include <stdio.h> #include <stdio.h>
#include <string.h> #include <string.h>
#include "vpl/mfxvideo.h"
#include "vpl/mfxdispatcher.h"
#include "handbrake/handbrake.h" #include "handbrake/handbrake.h"
#include "handbrake/ports.h" #include "handbrake/ports.h"
#include "handbrake/common.h" #include "handbrake/common.h"
@ -25,6 +28,24 @@
#include "handbrake/av1_common.h" #include "handbrake/av1_common.h"
#include "handbrake/hbffmpeg.h" #include "handbrake/hbffmpeg.h"
#ifndef HB_QSV_PRINT_RET_MSG
#define HB_QSV_PRINT_RET_MSG(ERR) { fprintf(stderr, "Error code %d,\t%s\t%d\n", ERR, __FUNCTION__, __LINE__); }
#endif
#ifndef HB_QSV_DEBUG_ASSERT
#define HB_QSV_DEBUG_ASSERT(x,y) { if ((x)) { fprintf(stderr, "\nASSERT: %s\n", y); } }
#endif
#define HB_QSV_CHECK_RET(P, X, ERR) {if ((X) > (P)) {HB_QSV_PRINT_RET_MSG(ERR); return;}}
#define HB_QSV_CHECK_RESULT(P, X, ERR) {if ((X) > (P)) {HB_QSV_PRINT_RET_MSG(ERR); return ERR;}}
#define HB_QSV_CHECK_POINTER(P, ERR) {if (!(P)) {HB_QSV_PRINT_RET_MSG(ERR); return ERR;}}
#define HB_QSV_IGNORE_MFX_STS(P, X) {if ((X) == (P)) {P = MFX_ERR_NONE;}}
#define HB_QSV_ASYNC_DEPTH_DEFAULT 4
#define HB_QSV_AVC_DECODER_WIDTH_MAX 4096
#define HB_QSV_AVC_DECODER_HEIGHT_MAX 4096
typedef struct hb_qsv_adapter_details typedef struct hb_qsv_adapter_details
{ {
// DirectX index // DirectX index
@ -653,7 +674,7 @@ static int hb_qsv_make_adapters_list(hb_list_t **qsv_adapters_list, hb_list_t **
* and collect GPU adapters capabilities. * and collect GPU adapters capabilities.
* *
* @returns encoder codec mask supported by QSV implemenation, * @returns encoder codec mask supported by QSV implemenation,
* 0 if QSV is not avalable, -1 if HB_PROJECT_FEATURE_QSV is not enabled * 0 if QSV is not available, -1 if HB_PROJECT_FEATURE_QSV is not enabled
*/ */
int hb_qsv_available() int hb_qsv_available()
{ {
@ -2170,7 +2191,7 @@ static int hb_qsv_parse_options(hb_job_t *job)
free(str); free(str);
if (!err) if (!err)
{ {
job->qsv.async_depth = async_depth; job->qsv_ctx->async_depth = async_depth;
} }
} }
else if (!strcasecmp(key, "memory-type")) else if (!strcasecmp(key, "memory-type"))
@ -2183,7 +2204,7 @@ static int hb_qsv_parse_options(hb_job_t *job)
} }
else else
{ {
job->qsv.ctx->memory_type = mode->value; job->qsv_ctx->memory_type = mode->value;
} }
} }
else if (!strcasecmp(key, "out_range")) else if (!strcasecmp(key, "out_range"))
@ -2196,7 +2217,7 @@ static int hb_qsv_parse_options(hb_job_t *job)
} }
else else
{ {
job->qsv.ctx->out_range = mode->value; job->qsv_ctx->out_range = mode->value;
} }
} }
} }
@ -2207,51 +2228,51 @@ static int hb_qsv_parse_options(hb_job_t *job)
int hb_qsv_setup_job(hb_job_t *job) int hb_qsv_setup_job(hb_job_t *job)
{ {
// parse the json parameter if (job->qsv_ctx == NULL)
if (job->qsv.ctx && job->qsv.ctx->dx_index >= -1)
{ {
hb_qsv_param_parse_dx_index(job, job->qsv.ctx->dx_index); return 1;
} }
// parse the advanced options parameter // Parse the json parameter
if (job->qsv_ctx->dx_index >= -1)
{
hb_qsv_param_parse_dx_index(job, job->qsv_ctx->dx_index);
}
// Parse the advanced options parameter
hb_qsv_parse_options(job); hb_qsv_parse_options(job);
int async_depth_default = hb_qsv_param_default_async_depth(); int async_depth_default = hb_qsv_param_default_async_depth();
if (job->qsv.async_depth <= 0 || job->qsv.async_depth > async_depth_default) if (job->qsv_ctx->async_depth <= 0 || job->qsv_ctx->async_depth > async_depth_default)
{ {
job->qsv.async_depth = async_depth_default; job->qsv_ctx->async_depth = async_depth_default;
} }
// Make sure QSV Decode is only True if the selected QSV adapter supports decode.
job->qsv.decode = job->qsv.decode && hb_qsv_available(); // Make sure QSV Decode is only True if the selected QSV adapter supports decode
if (job->hw_decode & HB_DECODE_SUPPORT_QSV)
{
int is_codec_supported = hb_qsv_decode_is_codec_supported(hb_qsv_get_adapter_index(),
job->title->video_codec_param, job->input_pix_fmt,
job->title->geometry.width, job->title->geometry.height);
if (is_codec_supported == 0)
{
job->hw_decode &= ~HB_DECODE_SUPPORT_QSV;
}
}
return 0; return 0;
} }
int hb_qsv_decode_is_enabled(hb_job_t *job)
{
if (!job)
return 0;
int qsv_decode_is_codec_supported = hb_qsv_decode_is_codec_supported(hb_qsv_get_adapter_index(),
job->title->video_codec_param, job->input_pix_fmt, job->title->geometry.width, job->title->geometry.height);
return ((job->qsv.decode) && (job->title->video_decode_support & HB_DECODE_SUPPORT_QSV)) &&
qsv_decode_is_codec_supported;
}
int hb_qsv_is_enabled(hb_job_t *job)
{
return hb_qsv_decode_is_enabled(job) || hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec);
}
int hb_qsv_get_memory_type(hb_job_t *job) int hb_qsv_get_memory_type(hb_job_t *job)
{ {
int qsv_full_path_is_enabled = hb_qsv_full_path_is_enabled(job); int qsv_full_path_is_enabled = hb_qsv_full_path_is_enabled(job);
if (qsv_full_path_is_enabled) if (qsv_full_path_is_enabled)
{ {
if (job->qsv.ctx->memory_type == MFX_IOPATTERN_OUT_VIDEO_MEMORY) if (job->qsv_ctx->memory_type == MFX_IOPATTERN_OUT_VIDEO_MEMORY)
return MFX_IOPATTERN_OUT_VIDEO_MEMORY; return MFX_IOPATTERN_OUT_VIDEO_MEMORY;
else if (job->qsv.ctx->memory_type == MFX_IOPATTERN_OUT_SYSTEM_MEMORY) else if (job->qsv_ctx->memory_type == MFX_IOPATTERN_OUT_SYSTEM_MEMORY)
return MFX_IOPATTERN_OUT_SYSTEM_MEMORY; return MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
} }
@ -2261,20 +2282,16 @@ int hb_qsv_get_memory_type(hb_job_t *job)
int hb_qsv_full_path_is_enabled(hb_job_t *job) int hb_qsv_full_path_is_enabled(hb_job_t *job)
{ {
int qsv_full_path_is_enabled = 0; int qsv_full_path_is_enabled = 0;
if (!job || !job->qsv.ctx) if (!job || !job->qsv_ctx)
{
return 0;
}
if (hb_get_bit_depth(job->title->pix_fmt) == -1)
{ {
return 0; return 0;
} }
#if defined(_WIN32) || defined(__MINGW32__) #if defined(_WIN32) || defined(__MINGW32__)
hb_qsv_info_t *info = hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec); hb_qsv_info_t *info = hb_qsv_encoder_info_get(hb_qsv_get_adapter_index(), job->vcodec);
qsv_full_path_is_enabled = (hb_qsv_decode_is_enabled(job) && qsv_full_path_is_enabled = (job->hw_decode & HB_DECODE_SUPPORT_QSV &&
info && hb_qsv_implementation_is_hardware(info->implementation) && info && hb_qsv_implementation_is_hardware(info->implementation) &&
job->qsv.ctx && hb_qsv_are_filters_supported(job)); job->qsv_ctx && hb_qsv_are_filters_supported(job));
#endif #endif
return qsv_full_path_is_enabled; return qsv_full_path_is_enabled;
} }
@ -2404,9 +2421,9 @@ int hb_qsv_select_ffmpeg_options(qsv_data_t * qsv_data, hb_job_t *job, AVDiction
param->rc.lookahead = param->rc.lookahead && (param->rc.icq || job->vquality <= HB_INVALID_VIDEO_QUALITY); param->rc.lookahead = param->rc.lookahead && (param->rc.icq || job->vquality <= HB_INVALID_VIDEO_QUALITY);
} }
if (job->qsv.ctx != NULL) if (job->qsv_ctx != NULL)
{ {
job->qsv.ctx->la_is_enabled = param->rc.lookahead ? 1 : 0; job->qsv_ctx->la_is_enabled = param->rc.lookahead ? 1 : 0;
} }
// libmfx BRC parameters are 16 bits thus maybe overflow, then BRCParamMultiplier is needed // libmfx BRC parameters are 16 bits thus maybe overflow, then BRCParamMultiplier is needed
@ -3211,14 +3228,14 @@ int hb_qsv_param_parse(AVDictionary** av_opts, hb_qsv_param_t *param, hb_qsv_inf
else if (!strcasecmp(key, "memory-type")) else if (!strcasecmp(key, "memory-type"))
{ {
// Check if was parsed already in decoder initialization // Check if was parsed already in decoder initialization
if (job->qsv.ctx && !job->qsv.ctx->memory_type) if (job->qsv_ctx && !job->qsv_ctx->memory_type)
{ {
hb_triplet_t* mode = NULL; hb_triplet_t* mode = NULL;
mode = hb_triplet4key(hb_qsv_memory_types, value); mode = hb_triplet4key(hb_qsv_memory_types, value);
if (!mode) if (!mode)
error = HB_QSV_PARAM_BAD_VALUE; error = HB_QSV_PARAM_BAD_VALUE;
else else
job->qsv.ctx->memory_type = mode->value; job->qsv_ctx->memory_type = mode->value;
} }
} }
else if (!strcasecmp(key, "out_range")) else if (!strcasecmp(key, "out_range"))
@ -3893,54 +3910,27 @@ int hb_qsv_param_parse_dx_index(hb_job_t *job, const int dx_index)
// find DirectX adapter with given index in list of QSV adapters // find DirectX adapter with given index in list of QSV adapters
if (details && (details->index == dx_index)) if (details && (details->index == dx_index))
{ {
job->qsv.ctx->dx_index = details->index; job->qsv_ctx->dx_index = details->index;
hb_log("qsv: %s qsv adapter with index %u has been selected", hb_qsv_get_adapter_type(details), details->index); hb_log("qsv: %s qsv adapter with index %u has been selected", hb_qsv_get_adapter_type(details), details->index);
hb_qsv_set_adapter_index(details->index); hb_qsv_set_adapter_index(details->index);
return 0; return 0;
} }
} }
job->qsv.ctx->dx_index = hb_qsv_get_adapter_index(); job->qsv_ctx->dx_index = hb_qsv_get_adapter_index();
return -1; return -1;
} }
#if defined(_WIN32) || defined(__MINGW32__) #if defined(_WIN32) || defined(__MINGW32__)
static int qsv_get_buffer(AVCodecContext *s, AVFrame *frame, int flags)
{
int ret = -1;
if(s->hw_frames_ctx)
{
ret = av_hwframe_get_buffer(s->hw_frames_ctx, frame, 0);
}
return ret;
}
void hb_qsv_uninit_enc(hb_job_t *job)
{
if(job->qsv.ctx && job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx)
{
if (job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx)
av_buffer_unref(&job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx);
av_free(job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx);
job->qsv.ctx->hb_ffmpeg_qsv_hw_frames_ctx = NULL;
}
if (job->qsv.ctx && job->qsv.ctx->hb_hw_device_ctx)
{
av_buffer_unref(&job->qsv.ctx->hb_hw_device_ctx);
job->qsv.ctx->hb_hw_device_ctx = NULL;
}
job->qsv.ctx->device_manager_handle = NULL;
}
static int hb_qsv_ffmpeg_set_options(hb_job_t *job, AVDictionary** dict) static int hb_qsv_ffmpeg_set_options(hb_job_t *job, AVDictionary** dict)
{ {
int err; int err;
AVDictionary* out_dict = *dict; AVDictionary* out_dict = *dict;
if (job->qsv.ctx && job->qsv.ctx->dx_index >= 0) if (job->qsv_ctx && job->qsv_ctx->dx_index >= 0)
{ {
char device[32]; char device[32];
snprintf(device, 32, "%u", job->qsv.ctx->dx_index); snprintf(device, 32, "%u", job->qsv_ctx->dx_index);
err = av_dict_set(&out_dict, "child_device", device, 0); err = av_dict_set(&out_dict, "child_device", device, 0);
if (err < 0) if (err < 0)
{ {
@ -3987,18 +3977,9 @@ err_out:
return err; return err;
} }
int hb_qsv_get_buffer(AVCodecContext *s, AVFrame *frame, int flags)
{
if (frame->format == AV_PIX_FMT_QSV)
return qsv_get_buffer(s, frame, flags);
return avcodec_default_get_buffer2(s, frame, flags);
}
int hb_qsv_are_filters_supported(hb_job_t *job) int hb_qsv_are_filters_supported(hb_job_t *job)
{ {
int num_sw_filters = 0; int num_sw_filters = 0;
int num_hw_filters = 0;
if (job->list_filter != NULL && hb_list_count(job->list_filter) > 0) if (job->list_filter != NULL && hb_list_count(job->list_filter) > 0)
{ {
for (int i = 0; i < hb_list_count(job->list_filter); i++) for (int i = 0; i < hb_list_count(job->list_filter); i++)
@ -4008,14 +3989,10 @@ int hb_qsv_are_filters_supported(hb_job_t *job)
{ {
// pixel format conversion is done via VPP filter // pixel format conversion is done via VPP filter
case HB_FILTER_FORMAT: case HB_FILTER_FORMAT:
num_hw_filters++;
break;
// cropping and scaling always done via VPP filter // cropping and scaling always done via VPP filter
case HB_FILTER_CROP_SCALE: case HB_FILTER_CROP_SCALE:
num_hw_filters++;
break;
case HB_FILTER_ROTATE: case HB_FILTER_ROTATE:
num_hw_filters++; case HB_FILTER_AVFILTER:
break; break;
case HB_FILTER_VFR: case HB_FILTER_VFR:
{ {
@ -4026,9 +4003,6 @@ int hb_qsv_are_filters_supported(hb_job_t *job)
break; break;
} }
} }
case HB_FILTER_AVFILTER:
num_hw_filters++;
break;
default: default:
// count only filters with access to frame data // count only filters with access to frame data
num_sw_filters++; num_sw_filters++;
@ -4061,48 +4035,55 @@ enum AVPixelFormat hb_qsv_get_format(AVCodecContext *s, const enum AVPixelFormat
return AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE;
} }
int hb_qsv_get_buffer(AVCodecContext *s, AVFrame *frame, int flags)
{
return -1;
}
void hb_qsv_uninit_enc(hb_job_t *job)
{
}
#endif #endif
hb_qsv_context* hb_qsv_context_init() hb_qsv_context_t * hb_qsv_context_init()
{ {
if (!hb_qsv_available()) { if (!hb_qsv_available())
{
return 0; return 0;
} }
hb_qsv_context *ctx; hb_qsv_context_t *ctx = av_mallocz(sizeof(hb_qsv_context_t));
ctx = av_mallocz(sizeof(hb_qsv_context));
if (!ctx) if (!ctx)
{ {
hb_error( "hb_qsv_context_init: qsv ctx alloc failed" ); hb_error("hb_qsv_context_init: qsv ctx alloc failed");
return NULL; return NULL;
} }
ctx->dx_index = hb_qsv_get_default_adapter_index(); ctx->dx_index = hb_qsv_get_default_adapter_index();
ctx->out_range = AVCOL_RANGE_UNSPECIFIED; ctx->out_range = AVCOL_RANGE_UNSPECIFIED;
hb_qsv_add_context_usage(ctx, 0);
return ctx; return ctx;
} }
void hb_qsv_context_uninit(hb_job_t *job) hb_qsv_context_t * hb_qsv_context_dup(const hb_qsv_context_t *src)
{ {
hb_qsv_context *ctx = job->qsv.ctx; if (src == NULL)
if ( ctx == NULL ) {
return NULL;
}
hb_qsv_context_t *ctx = hb_qsv_context_init();
if (ctx)
{
memcpy(ctx, src, sizeof(hb_qsv_context_t));
ctx->hw_frames_ctx = NULL;
}
return ctx;
}
void hb_qsv_context_close(hb_qsv_context_t **_ctx)
{
hb_qsv_context_t *ctx = *_ctx;
if (ctx == NULL)
{ {
hb_error( "hb_qsv_context_uninit: ctx is NULL" );
return; return;
} }
/* QSV context cleanup and MFXClose */
hb_qsv_context_clean(ctx, hb_qsv_full_path_is_enabled(job)); if (ctx->hw_frames_ctx)
av_free(ctx); {
job->qsv.ctx = NULL; av_buffer_unref(&ctx->hw_frames_ctx);
}
av_freep(_ctx);
// restore adapter index after user preferences // restore adapter index after user preferences
g_adapter_index = hb_qsv_get_default_adapter_index(); g_adapter_index = hb_qsv_get_default_adapter_index();

View File

@ -1,608 +0,0 @@
/* ********************************************************************* *\
Copyright (C) 2013 Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\* ********************************************************************* */
#include "handbrake/project.h"
#if HB_PROJECT_FEATURE_QSV
#include "handbrake/hbffmpeg.h"
#include "handbrake/qsv_libav.h"
int hb_qsv_get_free_encode_task(hb_qsv_list * tasks)
{
int ret = MFX_ERR_NOT_FOUND;
int i = 0;
if (tasks)
for (i = 0; i < hb_qsv_list_count(tasks); i++) {
hb_qsv_task *task = hb_qsv_list_item(tasks, i);
if (task->stage && task->stage->out.sync)
if (!(*task->stage->out.sync->p_sync)) {
ret = i;
break;
}
}
return ret;
}
int hb_qsv_get_free_sync(hb_qsv_space * space, hb_qsv_context * qsv)
{
int ret = -1;
int counter = 0;
while (1) {
for (int i = 0; i < space->sync_num; i++) {
if (!(*(space->p_syncp[i]->p_sync)) &&
0 == space->p_syncp[i]->in_use ) {
if (i > space->sync_num_max_used)
space->sync_num_max_used = i;
ff_qsv_atomic_inc(&space->p_syncp[i]->in_use);
return i;
}
}
if (++counter >= HB_QSV_REPEAT_NUM_DEFAULT) {
hb_error("QSV: not enough to have %d sync point(s) allocated", space->sync_num);
break;
}
hb_qsv_sleep(5);
}
return ret;
}
int hb_qsv_get_free_surface(hb_qsv_space * space, hb_qsv_context * qsv,
mfxFrameInfo * info, hb_qsv_split part)
{
int ret = -1;
int from = 0;
int up = space->surface_num;
int counter = 0;
while (1) {
from = 0;
up = space->surface_num;
if (part == QSV_PART_LOWER)
up /= 2;
if (part == QSV_PART_UPPER)
from = up / 2;
for (int i = from; i < up; i++) {
if (0 == space->p_surfaces[i]->Data.Locked) {
memcpy(&(space->p_surfaces[i]->Info), info,
sizeof(mfxFrameInfo));
if (i > space->surface_num_max_used)
space->surface_num_max_used = i;
return i;
}
}
if (++counter >= HB_QSV_REPEAT_NUM_DEFAULT) {
hb_error("QSV: not enough to have %d surface(s) allocated", up);
break;
}
hb_qsv_sleep(5);
}
return ret;
}
int ff_qsv_is_surface_in_pipe(mfxFrameSurface1 * p_surface, hb_qsv_context * qsv)
{
int ret = 0;
int a, b;
hb_qsv_list *list = 0;
hb_qsv_stage *stage = 0;
if (!p_surface)
return ret;
if (!qsv->pipes)
return ret;
for (a = 0; a < hb_qsv_list_count(qsv->pipes); a++) {
list = hb_qsv_list_item(qsv->pipes, a);
for (b = 0; b < hb_qsv_list_count(list); b++) {
stage = hb_qsv_list_item(list, b);
if (p_surface == stage->out.p_surface)
return (stage->type << 16) | 2;
if (p_surface == stage->in.p_surface)
return (stage->type << 16) | 1;
}
}
return ret;
}
int ff_qsv_is_sync_in_pipe(mfxSyncPoint * sync, hb_qsv_context * qsv)
{
int ret = 0;
int a, b;
hb_qsv_list *list = 0;
hb_qsv_stage *stage = 0;
if (!sync)
return ret;
if (!qsv->pipes)
return ret;
for (a = 0; a < hb_qsv_list_count(qsv->pipes); a++) {
list = hb_qsv_list_item(qsv->pipes, a);
for (b = 0; b < hb_qsv_list_count(list); b++) {
stage = hb_qsv_list_item(list, b);
if (sync == stage->out.sync->p_sync) {
return 1;
}
}
}
return ret;
}
hb_qsv_stage *hb_qsv_stage_init(void)
{
hb_qsv_stage *stage = av_mallocz(sizeof(hb_qsv_stage));
return stage;
}
void hb_qsv_stage_clean(hb_qsv_stage ** stage, int is_clean_content)
{
if (is_clean_content) {
if ((*stage)->out.sync) {
if ((*stage)->out.sync->p_sync)
{
*(*stage)->out.sync->p_sync = 0;
}
if ((*stage)->out.sync->in_use > 0)
{
ff_qsv_atomic_dec(&(*stage)->out.sync->in_use);
}
(*stage)->out.sync = 0;
}
if ((*stage)->out.p_surface) {
(*stage)->out.p_surface = 0;
}
if ((*stage)->in.p_surface) {
(*stage)->in.p_surface = 0;
}
}
av_freep(stage);
}
void hb_qsv_add_context_usage(hb_qsv_context * qsv, int is_threaded)
{
int is_active = 0;
int mut_ret = 0;
is_active = ff_qsv_atomic_inc(&qsv->is_context_active);
if (is_active == 1) {
memset(&qsv->mfx_session, 0, sizeof(mfxSession));
hb_qsv_pipe_list_create(&qsv->pipes, is_threaded);
qsv->dts_seq = hb_qsv_list_init(is_threaded);
if (is_threaded) {
qsv->qts_seq_mutex = av_mallocz(sizeof(pthread_mutex_t));
if (qsv->qts_seq_mutex){
mut_ret = pthread_mutex_init(qsv->qts_seq_mutex, NULL);
if(mut_ret)
hb_log("QSV: pthread_mutex_init issue[%d] at %s", mut_ret, __FUNCTION__);
}
} else
qsv->qts_seq_mutex = 0;
}
}
int hb_qsv_context_clean(hb_qsv_context * qsv, int full_job)
{
int is_active = 0;
mfxStatus sts = MFX_ERR_NONE;
int mut_ret = 0;
is_active = ff_qsv_atomic_dec(&qsv->is_context_active);
// spaces would have to be cleaned on the own,
// here we care about the rest, common stuff
if (is_active == 0) {
if (qsv->dts_seq) {
while (hb_qsv_list_count(qsv->dts_seq))
hb_qsv_dts_pop(qsv);
hb_qsv_list_close(&qsv->dts_seq);
}
if (qsv->qts_seq_mutex) {
mut_ret = pthread_mutex_destroy(qsv->qts_seq_mutex);
if(mut_ret)
hb_log("QSV: pthread_mutex_destroy issue[%d] at %s", mut_ret, __FUNCTION__);
qsv->qts_seq_mutex = 0;
}
if (qsv->pipes)
hb_qsv_pipe_list_clean(&qsv->pipes);
if (qsv->mfx_session && !full_job) {
sts = MFXClose(qsv->mfx_session);
HB_QSV_CHECK_RESULT(sts, MFX_ERR_NONE, sts);
qsv->mfx_session = 0;
// display must be closed after MFXClose
hb_display_close(&qsv->display);
qsv->display = NULL;
}
}
return 0;
}
void hb_qsv_pipe_list_create(hb_qsv_list ** list, int is_threaded)
{
if (!*list)
*list = hb_qsv_list_init(is_threaded);
}
void hb_qsv_pipe_list_clean(hb_qsv_list ** list)
{
hb_qsv_list *stage;
int i = 0;
if (*list) {
for (i = hb_qsv_list_count(*list); i > 0; i--) {
stage = hb_qsv_list_item(*list, i - 1);
hb_qsv_flush_stages(*list, &stage, 0);
}
hb_qsv_list_close(list);
}
}
void hb_qsv_add_stagee(hb_qsv_list ** list, hb_qsv_stage * stage, int is_threaded)
{
if (!*list)
*list = hb_qsv_list_init(is_threaded);
hb_qsv_list_add(*list, stage);
}
hb_qsv_stage *hb_qsv_get_last_stage(hb_qsv_list * list)
{
hb_qsv_stage *stage = 0;
int size = 0;
hb_qsv_list_lock(list);
size = hb_qsv_list_count(list);
if (size > 0)
stage = hb_qsv_list_item(list, size - 1);
hb_qsv_list_unlock(list);
return stage;
}
void hb_qsv_flush_stages(hb_qsv_list * list, hb_qsv_list ** item, int is_flush_content)
{
int i = 0;
int x = 0;
hb_qsv_stage *stage = 0;
hb_qsv_list *to_remove_list = 0;
hb_qsv_list *to_remove_atom_list = 0;
hb_qsv_list *to_remove_atom = 0;
for (i = 0; i < hb_qsv_list_count(*item); i++) {
stage = hb_qsv_list_item(*item, i);
if(stage->pending){
if(!to_remove_list)
to_remove_list = hb_qsv_list_init(0);
hb_qsv_list_add(to_remove_list, stage->pending);
}
hb_qsv_stage_clean(&stage, is_flush_content);
// should actually remove from the list but ok...
}
hb_qsv_list_rem(list, *item);
hb_qsv_list_close(item);
if(to_remove_list){
for (i = hb_qsv_list_count(to_remove_list); i > 0; i--){
to_remove_atom_list = hb_qsv_list_item(to_remove_list, i-1);
for (x = hb_qsv_list_count(to_remove_atom_list); x > 0; x--){
to_remove_atom = hb_qsv_list_item(to_remove_atom_list, x-1);
hb_qsv_flush_stages(list, &to_remove_atom, is_flush_content);
}
}
hb_qsv_list_close(&to_remove_list);
}
}
hb_qsv_list *hb_qsv_pipe_by_stage(hb_qsv_list * list, hb_qsv_stage * stage)
{
hb_qsv_list *item = 0;
hb_qsv_stage *cur_stage = 0;
int i = 0;
int a = 0;
for (i = 0; i < hb_qsv_list_count(list); i++) {
item = hb_qsv_list_item(list, i);
for (a = 0; a < hb_qsv_list_count(item); a++) {
cur_stage = hb_qsv_list_item(item, a);
if (cur_stage == stage)
return item;
}
}
return 0;
}
// no duplicate of the same value, if end == 0 : working over full length
void hb_qsv_dts_ordered_insert(hb_qsv_context * qsv, int start, int end,
int64_t dts, int iter)
{
hb_qsv_dts *cur_dts = 0;
hb_qsv_dts *new_dts = 0;
int i = 0;
int mut_ret = 0;
if (iter == 0 && qsv->qts_seq_mutex){
mut_ret = pthread_mutex_lock(qsv->qts_seq_mutex);
if(mut_ret)
hb_log("QSV: pthread_mutex_lock issue[%d] at %s", mut_ret, __FUNCTION__);
}
if (end == 0)
end = hb_qsv_list_count(qsv->dts_seq);
if (end <= start) {
new_dts = av_mallocz(sizeof(hb_qsv_dts));
if( new_dts ) {
new_dts->dts = dts;
hb_qsv_list_add(qsv->dts_seq, new_dts);
}
} else
for (i = end; i > start; i--) {
cur_dts = hb_qsv_list_item(qsv->dts_seq, i - 1);
if (cur_dts->dts < dts) {
new_dts = av_mallocz(sizeof(hb_qsv_dts));
if( new_dts ) {
new_dts->dts = dts;
hb_qsv_list_insert(qsv->dts_seq, i, new_dts);
}
break;
} else if (cur_dts->dts == dts)
break;
}
if (iter == 0 && qsv->qts_seq_mutex){
mut_ret = pthread_mutex_unlock(qsv->qts_seq_mutex);
if(mut_ret)
hb_log("QSV: pthread_mutex_unlock issue[%d] at %s", mut_ret, __FUNCTION__);
}
}
void hb_qsv_dts_pop(hb_qsv_context * qsv)
{
hb_qsv_dts *item = 0;
int mut_ret = 0;
if (qsv && qsv->qts_seq_mutex){
mut_ret = pthread_mutex_lock(qsv->qts_seq_mutex);
if(mut_ret)
hb_log("QSV: pthread_mutex_lock issue[%d] at %s", mut_ret, __FUNCTION__);
}
if (hb_qsv_list_count(qsv->dts_seq)) {
item = hb_qsv_list_item(qsv->dts_seq, 0);
hb_qsv_list_rem(qsv->dts_seq, item);
av_free(item);
}
if (qsv && qsv->qts_seq_mutex){
mut_ret = pthread_mutex_unlock(qsv->qts_seq_mutex);
if(mut_ret)
hb_log("QSV: pthread_mutex_lock issue[%d] at %s", mut_ret, __FUNCTION__);
}
}
hb_qsv_list *hb_qsv_list_init(int is_threaded)
{
hb_qsv_list *l;
int mut_ret;
l = av_mallocz(sizeof(hb_qsv_list));
if (!l)
return 0;
l->items = av_mallocz(HB_QSV_JOB_SIZE_DEFAULT * sizeof(void *));
if (!l->items)
return 0;
l->items_alloc = HB_QSV_JOB_SIZE_DEFAULT;
if (is_threaded) {
l->mutex = av_mallocz(sizeof(pthread_mutex_t));
if (l->mutex){
mut_ret = pthread_mutexattr_init(&l->mta);
if( mut_ret )
hb_log("QSV: pthread_mutexattr_init issue[%d] at %s", mut_ret, __FUNCTION__);
mut_ret = pthread_mutexattr_settype(&l->mta, PTHREAD_MUTEX_RECURSIVE /*PTHREAD_MUTEX_ERRORCHECK*/);
if( mut_ret )
hb_log("QSV: pthread_mutexattr_settype issue[%d] at %s", mut_ret, __FUNCTION__);
mut_ret = pthread_mutex_init(l->mutex, &l->mta);
if( mut_ret )
hb_log("QSV: pthread_mutex_init issue[%d] at %s", mut_ret, __FUNCTION__);
}
} else
l->mutex = 0;
return l;
}
int hb_qsv_list_count(hb_qsv_list * l)
{
int count;
hb_qsv_list_lock(l);
count = l->items_count;
hb_qsv_list_unlock(l);
return count;
}
int hb_qsv_list_add(hb_qsv_list * l, void *p)
{
int pos = -1;
if (!p) {
return pos;
}
hb_qsv_list_lock(l);
if (l->items_count == l->items_alloc) {
/* We need a bigger boat */
l->items_alloc += HB_QSV_JOB_SIZE_DEFAULT;
l->items = av_realloc(l->items, l->items_alloc * sizeof(void *));
}
l->items[l->items_count] = p;
pos = (l->items_count);
l->items_count++;
hb_qsv_list_unlock(l);
return pos;
}
void hb_qsv_list_rem(hb_qsv_list * l, void *p)
{
int i;
hb_qsv_list_lock(l);
/* Find the item in the list */
for (i = 0; i < l->items_count; i++) {
if (l->items[i] == p) {
/* Shift all items after it sizeof( void * ) bytes earlier */
memmove(&l->items[i], &l->items[i + 1],
(l->items_count - i - 1) * sizeof(void *));
l->items_count--;
break;
}
}
hb_qsv_list_unlock(l);
}
void *hb_qsv_list_item(hb_qsv_list * l, int i)
{
void *ret = NULL;
if (i < 0)
return NULL;
hb_qsv_list_lock(l);
if( i < l->items_count)
ret = l->items[i];
hb_qsv_list_unlock(l);
return ret;
}
void hb_qsv_list_insert(hb_qsv_list * l, int pos, void *p)
{
if (!p)
return;
hb_qsv_list_lock(l);
if (l->items_count == l->items_alloc) {
l->items_alloc += HB_QSV_JOB_SIZE_DEFAULT;
l->items = av_realloc(l->items, l->items_alloc * sizeof(void *));
}
if (l->items_count != pos) {
memmove(&l->items[pos + 1], &l->items[pos],
(l->items_count - pos) * sizeof(void *));
}
l->items[pos] = p;
l->items_count--;
hb_qsv_list_unlock(l);
}
void hb_qsv_list_close(hb_qsv_list ** _l)
{
hb_qsv_list *l = *_l;
int mut_ret;
hb_qsv_list_lock(l);
av_free(l->items);
if (l->mutex){
mut_ret = pthread_mutex_unlock(l->mutex);
if (mut_ret)
hb_log("QSV: pthread_mutex_unlock issue[%d] at %s", mut_ret, __FUNCTION__);
mut_ret = pthread_mutex_destroy(l->mutex);
if (mut_ret)
hb_log("QSV: pthread_mutex_destroy issue[%d] at %s", mut_ret, __FUNCTION__);
mut_ret = pthread_mutexattr_destroy(&l->mta);
if (mut_ret)
hb_log("QSV: pthread_mutexattr_destroy issue[%d] at %s", mut_ret, __FUNCTION__);
}
av_freep(_l);
}
int hb_qsv_list_lock(hb_qsv_list *l){
int ret = 0;
if (l->mutex){
ret = pthread_mutex_lock(l->mutex);
if( ret )
hb_log("QSV: pthread_mutex_lock issue[%d] at %s", ret, __FUNCTION__);
}
return ret;
}
int hb_qsv_list_unlock(hb_qsv_list *l){
int ret = 0;
if (l->mutex){
ret = pthread_mutex_unlock(l->mutex);
if( ret )
hb_log("QSV: pthread_mutex_unlock issue[%d] at %s", ret, __FUNCTION__);
}
return ret;
}
int hb_qsv_wait_on_sync(hb_qsv_context *qsv, hb_qsv_stage *stage)
{
int iter = 0;
mfxStatus sts = MFX_ERR_NONE;
if( stage )
if(*stage->out.sync->p_sync){
while(1){
iter++;
sts = MFXVideoCORE_SyncOperation(qsv->mfx_session,*stage->out.sync->p_sync, HB_QSV_SYNC_TIME_DEFAULT);
if(MFX_WRN_IN_EXECUTION == sts){
if(iter>20)
HB_QSV_DEBUG_ASSERT(1, "Sync failed");
hb_qsv_sleep(10);
continue;
}
HB_QSV_CHECK_RESULT(sts, MFX_ERR_NONE, sts);
break;
}
}
return 0;
}
#endif // HB_PROJECT_FEATURE_QSV

View File

@ -104,7 +104,7 @@ static int qsv_rotate_init(hb_filter_private_t * pv, hb_filter_init_t * init, in
{ {
hb_dict_set(avsettings, "transpose", hb_value_string(trans)); hb_dict_set(avsettings, "transpose", hb_value_string(trans));
} }
hb_dict_set_int(avsettings, "async_depth", init->job->qsv.async_depth); hb_dict_set_int(avsettings, "async_depth", init->job->qsv_ctx->async_depth);
hb_dict_set(avfilter, "vpp_qsv", avsettings); hb_dict_set(avfilter, "vpp_qsv", avsettings);
pv->avfilters = avfilter; pv->avfilters = avfilter;
} }
@ -118,7 +118,7 @@ static int qsv_rotate_init(hb_filter_private_t * pv, hb_filter_init_t * init, in
avfilter = hb_dict_init(); avfilter = hb_dict_init();
hb_dict_set(avsettings, "transpose", hb_value_string("vflip")); hb_dict_set(avsettings, "transpose", hb_value_string("vflip"));
hb_dict_set_int(avsettings, "async_depth", init->job->qsv.async_depth); hb_dict_set_int(avsettings, "async_depth", init->job->qsv_ctx->async_depth);
hb_dict_set(avfilter, "vpp_qsv", avsettings); hb_dict_set(avfilter, "vpp_qsv", avsettings);
pv->avfilters = avfilter; pv->avfilters = avfilter;
} }
@ -127,7 +127,7 @@ static int qsv_rotate_init(hb_filter_private_t * pv, hb_filter_init_t * init, in
avfilter = hb_dict_init(); avfilter = hb_dict_init();
hb_dict_set(avsettings, "transpose", hb_value_string("hflip")); hb_dict_set(avsettings, "transpose", hb_value_string("hflip"));
hb_dict_set_int(avsettings, "async_depth", init->job->qsv.async_depth); hb_dict_set_int(avsettings, "async_depth", init->job->qsv_ctx->async_depth);
hb_dict_set(avfilter, "vpp_qsv", avsettings); hb_dict_set(avfilter, "vpp_qsv", avsettings);
pv->avfilters = avfilter; pv->avfilters = avfilter;
} }
@ -184,8 +184,7 @@ static int rotate_init(hb_filter_object_t * filter, hb_filter_init_t * init)
} }
#if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ )) #if HB_PROJECT_FEATURE_QSV && (defined( _WIN32 ) || defined( __MINGW32__ ))
if (hb_hwaccel_is_full_hardware_pipeline_enabled(init->job) && if (init->hw_pix_fmt == AV_PIX_FMT_QSV)
hb_qsv_decode_is_enabled(init->job))
{ {
qsv_rotate_init(pv, init, angle, flip); qsv_rotate_init(pv, init, angle, flip);
return 0; return 0;

View File

@ -2937,11 +2937,13 @@ static int syncVideoWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
// Fix of LA case allowing use of LA up to 40 in full encode path, // Fix of LA case allowing use of LA up to 40 in full encode path,
// as currently for such support we cannot allocate >64 slices per texture // as currently for such support we cannot allocate >64 slices per texture
// due to MSFT limitation, not impacting other cases // due to MSFT limitation, not impacting other cases
if (pv->common->job->qsv.ctx && (pv->common->job->qsv.ctx->la_is_enabled == 1) hb_job_t *job = pv->common->job;
&& pv->common->job->qsv.ctx->full_path_is_enabled) if (job->hw_pix_fmt == AV_PIX_FMT_QSV &&
job->qsv_ctx->la_is_enabled == 1 &&
hb_qsv_get_memory_type(job) == MFX_IOPATTERN_OUT_VIDEO_MEMORY)
{ {
pv->stream->max_len = SYNC_MIN_VIDEO_QUEUE_LEN; pv->stream->max_len = SYNC_MIN_VIDEO_QUEUE_LEN;
pv->common->job->qsv.ctx->la_is_enabled++; pv->common->job->qsv_ctx->la_is_enabled++;
} }
#endif #endif

View File

@ -148,12 +148,6 @@ static void work_func( void * _work )
hb_job_close(&job); hb_job_close(&job);
job = new_job; job = new_job;
} }
#if HB_PROJECT_FEATURE_QSV
if (hb_qsv_available())
{
hb_qsv_setup_job(job);
}
#endif
hb_job_setup_passes(job->h, job, passes); hb_job_setup_passes(job->h, job, passes);
hb_job_close(&job); hb_job_close(&job);
@ -505,17 +499,14 @@ void hb_display_job_info(hb_job_t *job)
hb_log(" * video track"); hb_log(" * video track");
#if HB_PROJECT_FEATURE_QSV
if (hb_qsv_decode_is_enabled(job))
{
hb_log(" + decoder: %s %d-bit (%s)",
hb_qsv_decode_get_codec_name(title->video_codec_param), hb_get_bit_depth(job->input_pix_fmt), av_get_pix_fmt_name(job->input_pix_fmt));
} else
#endif
if (hb_hwaccel_decode_is_enabled(job)) if (hb_hwaccel_decode_is_enabled(job))
{ {
hb_log(" + decoder: %s %d-bit hwaccel (%s, %s)", hb_log(" + decoder: %s %d-bit %s hwaccel (%s, %s)",
title->video_codec_name, hb_get_bit_depth(job->input_pix_fmt), av_get_pix_fmt_name(job->input_pix_fmt), av_get_pix_fmt_name(job->hw_pix_fmt)); title->video_codec_name,
hb_get_bit_depth(job->input_pix_fmt),
hb_hwaccel_get_name(job->hw_decode),
av_get_pix_fmt_name(job->input_pix_fmt),
av_get_pix_fmt_name(job->hw_pix_fmt));
} }
else else
{ {
@ -1729,7 +1720,7 @@ static void sanitize_dynamic_hdr_metadata_passthru(hb_job_t *job)
// the dynamic hdr side data // the dynamic hdr side data
if (job->passthru_dynamic_hdr_metadata) if (job->passthru_dynamic_hdr_metadata)
{ {
job->qsv.decode = 0; job->hw_decode &= ~HB_DECODE_SUPPORT_QSV;
} }
#endif #endif
} }
@ -1788,10 +1779,16 @@ static void do_job(hb_job_t *job)
{ {
job->hw_decode = 0; job->hw_decode = 0;
} }
if (job->hw_decode == HB_DECODE_SUPPORT_MF) if (job->hw_decode & HB_DECODE_SUPPORT_MF)
{ {
job->hw_decode |= HB_DECODE_SUPPORT_FORCE_HW; job->hw_decode |= HB_DECODE_SUPPORT_FORCE_HW;
} }
else if (job->hw_decode & HB_DECODE_SUPPORT_QSV)
{
#if HB_PROJECT_FEATURE_QSV
hb_qsv_setup_job(job);
#endif
}
// This must be performed before initializing filters because // This must be performed before initializing filters because
// it can add the subtitle render filter. // it can add the subtitle render filter.
@ -1809,13 +1806,12 @@ static void do_job(hb_job_t *job)
hb_filter_init_t init; hb_filter_init_t init;
sanitize_filter_list_pre(job, title->geometry); sanitize_filter_list_pre(job, title->geometry);
sanitize_dynamic_hdr_metadata_passthru(job);
// Select the optimal pixel formats for the pipeline // Select the optimal pixel formats for the pipeline
job->hw_pix_fmt = hb_get_best_hw_pix_fmt(job); job->hw_pix_fmt = hb_get_best_hw_pix_fmt(job);
job->input_pix_fmt = hb_get_best_pix_fmt(job); job->input_pix_fmt = hb_get_best_pix_fmt(job);
sanitize_dynamic_hdr_metadata_passthru(job);
// Init hwaccel context if needed // Init hwaccel context if needed
if (hb_hwaccel_decode_is_enabled(job)) if (hb_hwaccel_decode_is_enabled(job))
{ {

View File

@ -3270,7 +3270,6 @@ static int ParseOptions( int argc, char ** argv )
#if HB_PROJECT_FEATURE_QSV #if HB_PROJECT_FEATURE_QSV
else if (!strcmp(optarg, "qsv")) else if (!strcmp(optarg, "qsv"))
{ {
qsv_decode = 1;
hw_decode = HB_DECODE_SUPPORT_QSV; hw_decode = HB_DECODE_SUPPORT_QSV;
} }
#endif #endif
@ -3282,6 +3281,10 @@ static int ParseOptions( int argc, char ** argv )
{ {
hw_decode = 0; hw_decode = 0;
} }
if (hw_decode)
{
hw_decode |= HB_DECODE_SUPPORT_FORCE_HW;
}
} break; } break;
case KEEP_DUPLICATE_TITLES: case KEEP_DUPLICATE_TITLES:
keep_duplicate_titles = 1; keep_duplicate_titles = 1;
@ -4470,7 +4473,6 @@ static hb_dict_t * PreparePreset(const char *preset_name)
} }
if (qsv_decode != -1) if (qsv_decode != -1)
{ {
hb_dict_set(preset, "VideoQSVDecode", hb_value_int(qsv_decode));
hw_decode = qsv_decode ? HB_DECODE_SUPPORT_QSV : 0; hw_decode = qsv_decode ? HB_DECODE_SUPPORT_QSV : 0;
} }
#endif #endif

View File

@ -74,5 +74,7 @@ namespace HandBrake.Interop.Interop.HbLib
public const uint HB_DECODE_SUPPORT_MF = 0x10; public const uint HB_DECODE_SUPPORT_MF = 0x10;
public const uint HB_DECODE_SUPPORT_HWACCEL = (HB_DECODE_SUPPORT_NVDEC | HB_DECODE_SUPPORT_VIDEOTOOLBOX | HB_DECODE_SUPPORT_QSV | HB_DECODE_SUPPORT_MF); public const uint HB_DECODE_SUPPORT_HWACCEL = (HB_DECODE_SUPPORT_NVDEC | HB_DECODE_SUPPORT_VIDEOTOOLBOX | HB_DECODE_SUPPORT_QSV | HB_DECODE_SUPPORT_MF);
public const uint HB_DECODE_SUPPORT_FORCE_HW = 0x80000000;
} }
} }

View File

@ -14,11 +14,6 @@ namespace HandBrake.Interop.Interop.Json.Encode
/// </summary> /// </summary>
public class QSV public class QSV
{ {
/// <summary>
/// Gets or sets a value indicating whether decode.
/// </summary>
public bool Decode { get; set; }
/// <summary> /// <summary>
/// Gets or sets the async depth. /// Gets or sets the async depth.
/// </summary> /// </summary>

View File

@ -85,6 +85,6 @@ namespace HandBrake.Interop.Interop.Json.Encode
/// <summary> /// <summary>
/// HB_DECODE_SUPPORT constants in common.h /// HB_DECODE_SUPPORT constants in common.h
/// </summary> /// </summary>
public int HardwareDecode { get; set; } public uint HardwareDecode { get; set; }
} }
} }

View File

@ -290,13 +290,15 @@ namespace HandBrakeWPF.Services.Encode.Factories
if (this.isEncodePath && (job.VideoEncoder?.IsQuickSync ?? false)) if (this.isEncodePath && (job.VideoEncoder?.IsQuickSync ?? false))
{ {
video.QSV.Decode = HandBrakeHardwareEncoderHelper.IsQsvAvailable && enableQuickSyncDecoding; video.HardwareDecode = HandBrakeHardwareEncoderHelper.IsQsvAvailable && enableQuickSyncDecoding ?
NativeConstants.HB_DECODE_SUPPORT_QSV : 0 ;
} }
// Allow use of the QSV decoder is configurable for non QSV encoders. // Allow use of the QSV decoder is configurable for non QSV encoders.
if (this.isEncodePath && job.VideoEncoder != null && !job.VideoEncoder.IsHardwareEncoder && useQSVDecodeForNonQSVEnc && enableQuickSyncDecoding) if (this.isEncodePath && job.VideoEncoder != null && !job.VideoEncoder.IsHardwareEncoder && useQSVDecodeForNonQSVEnc && enableQuickSyncDecoding)
{ {
video.QSV.Decode = HandBrakeHardwareEncoderHelper.IsQsvAvailable && useQSVDecodeForNonQSVEnc; video.HardwareDecode = HandBrakeHardwareEncoderHelper.IsQsvAvailable && useQSVDecodeForNonQSVEnc ?
NativeConstants.HB_DECODE_SUPPORT_QSV | NativeConstants.HB_DECODE_SUPPORT_FORCE_HW : 0;
} }
if (this.isEncodePath && HandBrakeHardwareEncoderHelper.IsQsvAvailable && (HandBrakeHardwareEncoderHelper.QsvHardwareGeneration > 6) && (job.VideoEncoder?.IsQuickSync ?? false)) if (this.isEncodePath && HandBrakeHardwareEncoderHelper.IsQsvAvailable && (HandBrakeHardwareEncoderHelper.QsvHardwareGeneration > 6) && (job.VideoEncoder?.IsQuickSync ?? false))
@ -313,12 +315,12 @@ namespace HandBrakeWPF.Services.Encode.Factories
if (this.isEncodePath && HandBrakeHardwareEncoderHelper.IsNVDecAvailable && this.userSettingService.GetUserSetting<bool>(UserSettingConstants.EnableNvDecSupport) && job.VideoEncoder.IsNVEnc) if (this.isEncodePath && HandBrakeHardwareEncoderHelper.IsNVDecAvailable && this.userSettingService.GetUserSetting<bool>(UserSettingConstants.EnableNvDecSupport) && job.VideoEncoder.IsNVEnc)
{ {
video.HardwareDecode = (int)NativeConstants.HB_DECODE_SUPPORT_NVDEC; video.HardwareDecode = NativeConstants.HB_DECODE_SUPPORT_NVDEC;
} }
if (HandBrakeHardwareEncoderHelper.IsDirectXAvailable && this.userSettingService.GetUserSetting<bool>(UserSettingConstants.EnableDirectXDecoding)) if (HandBrakeHardwareEncoderHelper.IsDirectXAvailable && this.userSettingService.GetUserSetting<bool>(UserSettingConstants.EnableDirectXDecoding))
{ {
video.HardwareDecode = (int)NativeConstants.HB_DECODE_SUPPORT_MF; video.HardwareDecode = NativeConstants.HB_DECODE_SUPPORT_MF;
} }