Ticket #13311: 20180830_1146_latest_master.patch
File 20180830_1146_latest_master.patch, 67.2 KB (added by , 7 years ago) |
---|
-
mythtv/configure
diff --git a/mythtv/configure b/mythtv/configure index f7cfc291b8..2f50f4536c 100755
a b Advanced options (experts only): 134 134 --disable-vdpau disable NVidia VDPAU hardware acceleration. 135 135 --disable-crystalhd disable Broadcom CrystalHD hardware decoder support 136 136 --disable-vaapi disable VAAPI hardware accelerated video decoding 137 --disable-vaapi2 disable VAAPI@ hardware accelerated video decoding 137 138 --disable-openmax disable OpenMAX hardware accelerated video decoding 138 139 --disable-dxva2 disable hardware accelerated decoding on windows 139 140 --disable-mediacodec disable hardware accelerated decoding on android … … HWACCEL_AUTODETECT_LIBRARY_LIST=" 1401 1402 crystalhd 1402 1403 dxva2 1403 1404 vaapi 1405 vaapi2 1404 1406 vda 1405 1407 vdpau 1406 1408 " … … USING_LIST=' 2040 2042 opengl 2041 2043 opengles 2042 2044 vaapi 2045 vaapi2 2043 2046 vdpau 2044 2047 openmax 2045 2048 mediacodec … … if enabled x11 ; then 7272 7275 echo "xv support ${xv-no}" 7273 7276 echo "VDPAU support ${vdpau-no}" 7274 7277 echo "VAAPI support ${vaapi-no}" 7278 echo "VAAPI2 support ${vaapi2-no}" 7275 7279 echo "CrystalHD support ${crystalhd-no}" 7276 7280 echo "OpenMAX support ${openmax-no}" 7277 7281 if enabled openmax ; then -
mythtv/libs/libmythtv/avformatdecoder.cpp
diff --git a/mythtv/libs/libmythtv/avformatdecoder.cpp b/mythtv/libs/libmythtv/avformatdecoder.cpp index 5b5fce95e8..1e1ba28edc 100644
a b extern "C" { 70 70 #include <QtAndroidExtras> 71 71 #endif 72 72 73 #ifdef USING_VAAPI2 74 #include "vaapi2context.h" 75 #endif 76 73 77 extern "C" { 74 78 #include "libavutil/avutil.h" 75 79 #include "libavutil/error.h" … … int get_avf_buffer_dxva2(struct AVCodecContext *c, AVFrame *pic, int flags); 182 186 #ifdef USING_VAAPI 183 187 int get_avf_buffer_vaapi(struct AVCodecContext *c, AVFrame *pic, int flags); 184 188 #endif 189 #ifdef USING_VAAPI2 190 int get_avf_buffer_vaapi2(struct AVCodecContext *c, AVFrame *pic, int flags); 191 #endif 185 192 186 193 static int determinable_frame_size(struct AVCodecContext *avctx) 187 194 { … … static int has_codec_parameters(AVStream *st) 248 255 return 1; 249 256 } 250 257 251 static bool force_sw_decode(AVCodecContext * avctx)258 static bool force_sw_decode(AVCodecContext * /*avctx*/) 252 259 { 253 switch (avctx->codec_id) 254 { 255 case AV_CODEC_ID_H264: 256 switch (avctx->profile) 257 { 258 case FF_PROFILE_H264_HIGH_10: 259 case FF_PROFILE_H264_HIGH_10_INTRA: 260 case FF_PROFILE_H264_HIGH_422: 261 case FF_PROFILE_H264_HIGH_422_INTRA: 262 case FF_PROFILE_H264_HIGH_444_PREDICTIVE: 263 case FF_PROFILE_H264_HIGH_444_INTRA: 264 case FF_PROFILE_H264_CAVLC_444: 265 return true; 266 default: 267 break; 268 } 269 break; 270 default: 271 break; 272 } 260 // This is nonsense... 261 // switch (avctx->codec_id) 262 // { 263 // case AV_CODEC_ID_H264: 264 // switch (avctx->profile) 265 // { 266 // case FF_PROFILE_H264_HIGH_10: 267 // case FF_PROFILE_H264_HIGH_10_INTRA: 268 // case FF_PROFILE_H264_HIGH_422: 269 // case FF_PROFILE_H264_HIGH_422_INTRA: 270 // case FF_PROFILE_H264_HIGH_444_PREDICTIVE: 271 // case FF_PROFILE_H264_HIGH_444_INTRA: 272 // case FF_PROFILE_H264_CAVLC_444: 273 // return true; 274 // default: 275 // break; 276 // } 277 // break; 278 // default: 279 // break; 280 // } 273 281 return false; 274 282 } 275 283 … … void AvFormatDecoder::GetDecoders(render_opts &opts) 385 393 opts.decoders->append("vaapi"); 386 394 (*opts.equiv_decoders)["vaapi"].append("dummy"); 387 395 #endif 396 #ifdef USING_VAAPI2 397 opts.decoders->append("vaapi2"); 398 (*opts.equiv_decoders)["vaapi2"].append("dummy"); 399 #endif 388 400 #ifdef USING_MEDIACODEC 389 401 opts.decoders->append("mediacodec"); 390 402 (*opts.equiv_decoders)["mediacodec"].append("dummy"); … … enum AVPixelFormat get_format_dxva2(struct AVCodecContext *avctx, 1515 1527 } 1516 1528 #endif 1517 1529 1518 #ifdef USING_VAAPI 1530 1519 1531 static bool IS_VAAPI_PIX_FMT(enum AVPixelFormat fmt) 1520 1532 { 1521 1533 return fmt == AV_PIX_FMT_VAAPI_MOCO || … … static bool IS_VAAPI_PIX_FMT(enum AVPixelFormat fmt) 1523 1535 fmt == AV_PIX_FMT_VAAPI_VLD; 1524 1536 } 1525 1537 1538 #ifdef USING_VAAPI 1539 1526 1540 // Declared separately to allow attribute 1527 1541 static enum AVPixelFormat get_format_vaapi(struct AVCodecContext *, 1528 1542 const enum AVPixelFormat *) MUNUSED; … … enum AVPixelFormat get_format_vaapi(struct AVCodecContext *avctx, 1549 1563 } 1550 1564 #endif 1551 1565 1566 #ifdef USING_VAAPI2 1567 static enum AVPixelFormat get_format_vaapi2(struct AVCodecContext *avctx, 1568 const enum AVPixelFormat *valid_fmts) 1569 { 1570 enum AVPixelFormat ret = AV_PIX_FMT_NONE; 1571 while (*valid_fmts != AV_PIX_FMT_NONE) { 1572 if (IS_VAAPI_PIX_FMT(*valid_fmts)) 1573 { 1574 ret = *valid_fmts; 1575 avctx->pix_fmt = ret; 1576 // Vaapi2Context::SetHwframeCtx(avctx, 20); 1577 break; 1578 } 1579 valid_fmts++; 1580 } 1581 1582 // AVBufferRef* dev_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI); 1583 // AVBufferRef* frame_ctx = av_hwframe_ctx_alloc(dev_ctx); 1584 // avctx->hw_frames_ctx = frame_ctx; 1585 // avctx->hwaccel_context = 1586 return ret; 1587 } 1588 #endif 1589 1552 1590 #ifdef USING_MEDIACODEC 1553 1591 static enum AVPixelFormat get_format_mediacodec(struct AVCodecContext *avctx, 1554 1592 const enum AVPixelFormat *valid_fmts) … … void AvFormatDecoder::InitVideoCodec(AVStream *stream, AVCodecContext *enc, 1629 1667 else 1630 1668 #endif 1631 1669 #ifdef USING_VAAPI 1632 if (CODEC_IS_VAAPI(codec, enc) )1670 if (CODEC_IS_VAAPI(codec, enc) && codec_is_vaapi(video_codec_id)) 1633 1671 { 1634 1672 enc->get_buffer2 = get_avf_buffer_vaapi; 1635 1673 enc->get_format = get_format_vaapi; … … void AvFormatDecoder::InitVideoCodec(AVStream *stream, AVCodecContext *enc, 1644 1682 enc->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; 1645 1683 } 1646 1684 else 1685 #endif 1686 #ifdef USING_VAAPI2 1687 if (codec_is_vaapi2(video_codec_id)) 1688 { 1689 enc->get_buffer2 = get_avf_buffer_vaapi2; 1690 enc->get_format = get_format_vaapi2; 1691 } 1692 else 1647 1693 #endif 1648 1694 if (codec && codec->capabilities & AV_CODEC_CAP_DR1) 1649 1695 { … … void AvFormatDecoder::InitVideoCodec(AVStream *stream, AVCodecContext *enc, 1659 1705 .arg(ff_codec_id_string(enc->codec_id))); 1660 1706 } 1661 1707 1708 MythCodecContext *mctx = gCodecMap->getMythCodecContext(stream, video_codec_id); 1709 if (mctx) 1710 { 1711 mctx->setPlayer(m_parent); 1712 int ret = mctx->HwDecoderInit(enc); 1713 if (ret < 0) 1714 { 1715 char error[AV_ERROR_MAX_STRING_SIZE]; 1716 if (ret < 0) 1717 { 1718 LOG(VB_GENERAL, LOG_ERR, LOC + 1719 QString("HwDecoderInit unable to initialize hardware decoder: %1 (%2)") 1720 .arg(av_make_error_string(error, sizeof(error), ret)) 1721 .arg(ret)); 1722 // force it to switch to software decoding 1723 averror_count = SEQ_PKT_ERR_MAX + 1; 1724 m_streams_changed = true; 1725 } 1726 } 1727 } 1728 1662 1729 if (FlagIsSet(kDecodeLowRes) || FlagIsSet(kDecodeSingleThreaded) || 1663 1730 FlagIsSet(kDecodeFewBlocks) || FlagIsSet(kDecodeNoLoopFilter) || 1664 1731 FlagIsSet(kDecodeNoDecode)) … … int AvFormatDecoder::ScanStreams(bool novideo) 2535 2602 foundgpudecoder = true; 2536 2603 } 2537 2604 } 2605 #endif // USING_MEDIACODEC 2606 #ifdef USING_VAAPI2 2607 if (!foundgpudecoder) 2608 { 2609 MythCodecID vaapi2_mcid; 2610 AVPixelFormat pix_fmt = AV_PIX_FMT_YUV420P; 2611 vaapi2_mcid = Vaapi2Context::GetBestSupportedCodec( 2612 &codec, dec, mpeg_version(enc->codec_id), 2613 pix_fmt); 2614 2615 if (codec_is_vaapi2(vaapi2_mcid)) 2616 { 2617 gCodecMap->freeCodecContext(ic->streams[selTrack]); 2618 enc = gCodecMap->getCodecContext(ic->streams[selTrack], codec); 2619 video_codec_id = vaapi2_mcid; 2620 foundgpudecoder = true; 2621 } 2622 } 2538 2623 #endif // USING_MEDIACODEC 2539 2624 } 2540 2625 // default to mpeg2 … … int AvFormatDecoder::ScanStreams(bool novideo) 2556 2641 2557 2642 use_frame_timing = false; 2558 2643 if (! private_dec 2559 && (codec_is_std(video_codec_id) || codec_is_mediacodec(video_codec_id))) 2644 && (codec_is_std(video_codec_id) 2645 || codec_is_mediacodec(video_codec_id) 2646 || codec_is_vaapi2(video_codec_id))) 2560 2647 use_frame_timing = true; 2561 2648 2562 2649 if (FlagIsSet(kDecodeSingleThreaded)) … … int get_avf_buffer_vaapi(struct AVCodecContext *c, AVFrame *pic, int /*flags*/) 3056 3143 } 3057 3144 #endif 3058 3145 3146 #ifdef USING_VAAPI2 3147 int get_avf_buffer_vaapi2(struct AVCodecContext *c, AVFrame *pic, int flags) 3148 { 3149 AvFormatDecoder *nd = (AvFormatDecoder *)(c->opaque); 3150 3151 nd->directrendering = false; 3152 return avcodec_default_get_buffer2(c, pic, flags); 3153 } 3154 #endif 3155 3059 3156 void AvFormatDecoder::DecodeDTVCC(const uint8_t *buf, uint len, bool scte) 3060 3157 { 3061 3158 if (!len) … … bool AvFormatDecoder::ProcessVideoPacket(AVStream *curstream, AVPacket *pkt) 3591 3688 int retryCount = 0; 3592 3689 int ret = 0, gotpicture = 0; 3593 3690 AVCodecContext *context = gCodecMap->getCodecContext(curstream); 3691 MythCodecContext *mctx = gCodecMap->getMythCodecContext(curstream); 3692 mctx->setPlayer(m_parent); 3594 3693 MythAVFrame mpa_pic; 3595 3694 if (!mpa_pic) 3596 3695 { … … bool AvFormatDecoder::ProcessVideoPacket(AVStream *curstream, AVPacket *pkt) 3630 3729 // into separate routines or separate threads. 3631 3730 // Also now that it always consumes a whole buffer some code 3632 3731 // in the caller may be able to be optimized. 3633 ret = avcodec_receive_frame(context, mpa_pic); 3732 3733 // ret = avcodec_receive_frame(context, mpa_pic); 3734 // FilteredReceiveFrame will apply any codec-dependent filtering 3735 ret = mctx->FilteredReceiveFrame(context, mpa_pic); 3634 3736 3635 3737 if (ret == 0) 3636 3738 gotpicture = 1; … … bool AvFormatDecoder::ProcessVideoPacket(AVStream *curstream, AVPacket *pkt) 3671 3773 QString("video avcodec_send_packet error: %1 (%2) gotpicture:%3") 3672 3774 .arg(av_make_error_string(error, sizeof(error), ret2)) 3673 3775 .arg(ret2).arg(gotpicture)); 3674 if ( ret == AVERROR_INVALIDDATA || ret2 == AVERROR_INVALIDDATA)3776 if (++averror_count > SEQ_PKT_ERR_MAX) 3675 3777 { 3676 if (++averror_count > SEQ_PKT_ERR_MAX) 3677 { 3678 // If erroring on GPU assist, try switching to software decode 3679 if (codec_is_std(video_codec_id)) 3680 m_parent->SetErrored(QObject::tr("Video Decode Error")); 3681 else 3682 m_streams_changed = true; 3683 } 3778 // If erroring on GPU assist, try switching to software decode 3779 if (codec_is_std(video_codec_id)) 3780 m_parent->SetErrored(QObject::tr("Video Decode Error")); 3781 else 3782 m_streams_changed = true; 3684 3783 } 3685 3784 if (ret == AVERROR_EXTERNAL || ret2 == AVERROR_EXTERNAL) 3686 3785 m_streams_changed = true; … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3832 3931 3833 3932 VideoFrame *picframe = (VideoFrame *)(mpa_pic->opaque); 3834 3933 3934 // if (IS_VAAPI_PIX_FMT((AVPixelFormat)mpa_pic->format)) 3935 // directrendering=false; 3936 3835 3937 if (FlagIsSet(kDecodeNoDecode)) 3836 3938 { 3837 3939 // Do nothing, we just want the pts, captions, subtites, etc. … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3840 3942 } 3841 3943 else if (!directrendering) 3842 3944 { 3945 AVFrame *tmp_frame = NULL; 3946 AVFrame *use_frame = NULL; 3947 3948 if (IS_VAAPI_PIX_FMT((AVPixelFormat)mpa_pic->format)) 3949 { 3950 int ret = 0; 3951 tmp_frame = av_frame_alloc(); 3952 use_frame = tmp_frame; 3953 /* retrieve data from GPU to CPU */ 3954 if ((ret = av_hwframe_transfer_data(use_frame, mpa_pic, 0)) < 0) { 3955 LOG(VB_GENERAL, LOG_ERR, LOC 3956 + QString("Error %1 transferring the data to system memory") 3957 .arg(ret)); 3958 av_frame_free(&use_frame); 3959 return false; 3960 } 3961 } 3962 else 3963 use_frame = mpa_pic; 3964 3843 3965 AVFrame tmppicture; 3844 3966 3845 3967 VideoFrame *xf = picframe; … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3847 3969 3848 3970 unsigned char *buf = picframe->buf; 3849 3971 av_image_fill_arrays(tmppicture.data, tmppicture.linesize, 3850 buf, AV_PIX_FMT_YUV420P, context->width,3851 context->height, IMAGE_ALIGN);3972 buf, AV_PIX_FMT_YUV420P, use_frame->width, 3973 use_frame->height, IMAGE_ALIGN); 3852 3974 tmppicture.data[0] = buf + picframe->offsets[0]; 3853 3975 tmppicture.data[1] = buf + picframe->offsets[1]; 3854 3976 tmppicture.data[2] = buf + picframe->offsets[2]; … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3857 3979 tmppicture.linesize[2] = picframe->pitches[2]; 3858 3980 3859 3981 QSize dim = get_video_dim(*context); 3860 sws_ctx = sws_getCachedContext(sws_ctx, context->width,3861 context->height, context->pix_fmt,3862 context->width, context->height,3982 sws_ctx = sws_getCachedContext(sws_ctx, use_frame->width, 3983 use_frame->height, (AVPixelFormat)use_frame->format, 3984 use_frame->width, use_frame->height, 3863 3985 AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, 3864 3986 NULL, NULL, NULL); 3865 3987 if (!sws_ctx) … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3867 3989 LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to allocate sws context"); 3868 3990 return false; 3869 3991 } 3870 sws_scale(sws_ctx, mpa_pic->data, mpa_pic->linesize, 0, dim.height(),3992 sws_scale(sws_ctx, use_frame->data, use_frame->linesize, 0, dim.height(), 3871 3993 tmppicture.data, tmppicture.linesize); 3872 3994 3873 3995 if (xf) … … bool AvFormatDecoder::ProcessVideoFrame(AVStream *stream, AVFrame *mpa_pic) 3880 4002 xf->aspect = current_aspect; 3881 4003 m_parent->DiscardVideoFrame(xf); 3882 4004 } 4005 if (tmp_frame) 4006 av_frame_free(&tmp_frame); 3883 4007 } 3884 4008 else if (!picframe) 3885 4009 { -
mythtv/libs/libmythtv/avformatdecoder.h
diff --git a/mythtv/libs/libmythtv/avformatdecoder.h b/mythtv/libs/libmythtv/avformatdecoder.h index e4dded2157..1ea3785b7d 100644
a b class AvFormatDecoder : public DecoderBase 206 206 207 207 friend int get_avf_buffer(struct AVCodecContext *c, AVFrame *pic, 208 208 int flags); 209 friend int get_avf_buffer_vaapi2(struct AVCodecContext *c, AVFrame *pic, 210 int flags); 209 211 friend void release_avf_buffer(void *opaque, uint8_t *data); 210 212 211 213 friend int open_avf(URLContext *h, const char *filename, int flags); -
mythtv/libs/libmythtv/libmythtv.pro
diff --git a/mythtv/libs/libmythtv/libmythtv.pro b/mythtv/libs/libmythtv/libmythtv.pro index 939ca20984..8ffe3b8266 100644
a b using_frontend { 366 366 HEADERS += decoderbase.h 367 367 HEADERS += nuppeldecoder.h avformatdecoder.h 368 368 HEADERS += privatedecoder.h 369 HEADERS += mythcodeccontext.h 369 370 SOURCES += decoderbase.cpp 370 371 SOURCES += nuppeldecoder.cpp avformatdecoder.cpp 371 372 SOURCES += privatedecoder.cpp 373 SOURCES += mythcodeccontext.cpp 372 374 373 375 using_crystalhd { 374 376 DEFINES += USING_CRYSTALHD … … using_frontend { 505 507 using_opengl_video:DEFINES += USING_GLVAAPI 506 508 } 507 509 510 using_vaapi2 { 511 DEFINES += USING_VAAPI2 512 HEADERS += vaapi2context.h 513 SOURCES += vaapi2context.cpp 514 } 515 508 516 using_mediacodec { 509 517 DEFINES += USING_MEDIACODEC 510 518 HEADERS += mediacodeccontext.h -
mythtv/libs/libmythtv/mythavutil.cpp
diff --git a/mythtv/libs/libmythtv/mythavutil.cpp b/mythtv/libs/libmythtv/mythavutil.cpp index c21aec4456..06eb918bd0 100644
a b 10 10 #include "mythavutil.h" 11 11 #include "mythcorecontext.h" 12 12 #include "mythconfig.h" 13 #include "vaapi2context.h" 13 14 extern "C" { 14 15 #include "libswscale/swscale.h" 15 16 #include "libavfilter/avfilter.h" … … AVCodecContext *MythCodecMap::getCodecContext(const AVStream *stream, 388 389 const AVCodec *pCodec, bool nullCodec) 389 390 { 390 391 QMutexLocker lock(&mapLock); 391 AVCodecContext *avctx = stream Map.value(stream, NULL);392 AVCodecContext *avctx = streamAvMap.value(stream, NULL); 392 393 if (!avctx) 393 394 { 394 395 if (stream == NULL || stream->codecpar == NULL) … … AVCodecContext *MythCodecMap::getCodecContext(const AVStream *stream, 411 412 avcodec_free_context(&avctx); 412 413 if (avctx) 413 414 { 414 av _codec_set_pkt_timebase(avctx, stream->time_base);415 stream Map.insert(stream, avctx);415 avctx->pkt_timebase = stream->time_base; 416 streamAvMap.insert(stream, avctx); 416 417 } 417 418 } 418 419 return avctx; 419 420 } 420 421 422 MythCodecContext *MythCodecMap::getMythCodecContext(AVStream* stream, MythCodecID codec) 423 { 424 QMutexLocker lock(&mapLock); 425 MythCodecContext *mctx = streamMythMap.value(stream, NULL); 426 if (!mctx && codec) 427 { 428 if (codec_is_vaapi2(codec)) 429 mctx = new Vaapi2Context(stream); 430 if (!mctx) 431 mctx = new MythCodecContext(stream); 432 streamMythMap.insert(stream, mctx); 433 } 434 return mctx; 435 } 436 421 437 AVCodecContext *MythCodecMap::hasCodecContext(const AVStream *stream) 422 438 { 423 return stream Map.value(stream, NULL);439 return streamAvMap.value(stream, NULL); 424 440 } 425 441 426 442 void MythCodecMap::freeCodecContext(const AVStream *stream) 427 443 { 428 444 QMutexLocker lock(&mapLock); 429 AVCodecContext *avctx = stream Map.take(stream);445 AVCodecContext *avctx = streamAvMap.take(stream); 430 446 if (avctx) 431 447 avcodec_free_context(&avctx); 448 MythCodecContext *mctx = streamMythMap.take(stream); 449 if (mctx) 450 delete mctx; 432 451 } 433 452 434 453 void MythCodecMap::freeAllCodecContexts() 435 454 { 436 455 QMutexLocker lock(&mapLock); 437 QMap<const AVStream*, AVCodecContext*>::iterator i = stream Map.begin();438 while (i != stream Map.end()) {456 QMap<const AVStream*, AVCodecContext*>::iterator i = streamAvMap.begin(); 457 while (i != streamAvMap.end()) { 439 458 const AVStream *stream = i.key(); 440 459 ++i; 441 460 freeCodecContext(stream); -
mythtv/libs/libmythtv/mythavutil.h
diff --git a/mythtv/libs/libmythtv/mythavutil.h b/mythtv/libs/libmythtv/mythavutil.h index 1357fe2c54..b14ad3ddd2 100644
a b 11 11 12 12 #include "mythtvexp.h" // for MUNUSED 13 13 #include "mythframe.h" 14 #include "mythcodecid.h" 15 14 16 extern "C" { 15 17 #include "libavcodec/avcodec.h" 16 18 } … … private: 86 88 * This is a singeton class - only 1 instance gets created. 87 89 */ 88 90 91 class MythCodecContext; 92 89 93 class MTV_PUBLIC MythCodecMap 90 94 { 91 95 public: … … class MTV_PUBLIC MythCodecMap 95 99 AVCodecContext *getCodecContext(const AVStream*, 96 100 const AVCodec *pCodec = NULL, bool nullCodec = false); 97 101 AVCodecContext *hasCodecContext(const AVStream*); 102 MythCodecContext *getMythCodecContext(AVStream*, 103 const MythCodecID codec = kCodec_NONE); 98 104 void freeCodecContext(const AVStream*); 99 105 void freeAllCodecContexts(); 100 106 protected: 101 QMap<const AVStream*, AVCodecContext*> streamMap; 107 QMap<const AVStream*, AVCodecContext*> streamAvMap; 108 QMap<const AVStream*, MythCodecContext*> streamMythMap; 102 109 QMutex mapLock; 103 110 }; 104 111 -
new file mythtv/libs/libmythtv/mythcodeccontext.cpp
diff --git a/mythtv/libs/libmythtv/mythcodeccontext.cpp b/mythtv/libs/libmythtv/mythcodeccontext.cpp new file mode 100644 index 0000000000..1038c81f08
- + 1 ////////////////////////////////////////////////////////////////////////////// 2 // Copyright (c) 2017 MythTV Developers <mythtv-dev@mythtv.org> 3 // 4 // This is part of MythTV (https://www.mythtv.org) 5 // 6 // This program is free software; you can redistribute it and/or modify 7 // it under the terms of the GNU General Public License as published by 8 // the Free Software Foundation; either version 2 of the License, or 9 // (at your option) any later version. 10 // 11 // This program is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 // 16 // You should have received a copy of the GNU General Public License 17 // along with this program; if not, write to the Free Software 18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 // 20 // You should have received a copy of the GNU General Public License 21 // along with this program. If not, see <http://www.gnu.org/licenses/>. 22 // 23 ////////////////////////////////////////////////////////////////////////////// 24 25 #include "mythcorecontext.h" 26 #include "mythlogging.h" 27 #include "mythcodeccontext.h" 28 #include "videooutbase.h" 29 #include "mythplayer.h" 30 31 extern "C" { 32 #include "libavutil/pixfmt.h" 33 #include "libavutil/hwcontext.h" 34 #include "libavcodec/avcodec.h" 35 #include "libavfilter/avfilter.h" 36 #include "libavfilter/buffersink.h" 37 #include "libavfilter/buffersrc.h" 38 #include "libavformat/avformat.h" 39 #include "libavutil/opt.h" 40 #include "libavutil/buffer.h" 41 } 42 43 #define LOC QString("MythCodecContext: ") 44 45 MythCodecContext::MythCodecContext(AVStream* initStream) : 46 stream(initStream), 47 buffersink_ctx(NULL), 48 buffersrc_ctx(NULL), 49 filter_graph(NULL), 50 filtersInitialized(false), 51 hw_frames_ctx(0), 52 player(NULL), 53 ptsUsed(0) 54 { 55 priorPts[0] = 0; 56 priorPts[1] = 0; 57 } 58 59 MythCodecContext::~MythCodecContext() 60 { 61 CloseFilters(); 62 } 63 64 QStringList MythCodecContext::MythCodecContext::GetDeinterlacers(QString decodername) 65 { 66 QStringList ret; 67 #ifdef USING_VAAPI2 68 if (decodername == "vaapi2") 69 { 70 ret.append("vaapi2default"); 71 ret.append("vaapi2bob"); 72 ret.append("vaapi2weave"); 73 ret.append("vaapi2motion_adaptive"); 74 ret.append("vaapi2motion_compensated"); 75 ret.append("vaapi2doubleratedefault"); 76 ret.append("vaapi2doubleratebob"); 77 ret.append("vaapi2doublerateweave"); 78 ret.append("vaapi2doubleratemotion_adaptive"); 79 ret.append("vaapi2doubleratemotion_compensated"); 80 81 /* 82 "mode", "Deinterlacing mode", 83 "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm", 84 "bob", "Use the bob deinterlacing algorithm", 85 "weave", "Use the weave deinterlacing algorithm", 86 "motion_adaptive", "Use the motion adaptive deinterlacing algorithm", 87 "motion_compensated", "Use the motion compensated deinterlacing algorithm", 88 89 "rate", "Generate output at frame rate or field rate", 90 "frame", "Output at frame rate (one frame of output for each field-pair)", 91 "field", "Output at field rate (one frame of output for each field)", 92 93 "auto", "Only deinterlace fields, passing frames through unchanged", 94 1 = enabled 95 0 = disabled 96 */ 97 98 } 99 #endif 100 return ret; 101 } 102 103 104 // Currently this will only set up the filter after an interlaced frame. 105 // If we need other filters apart from deinterlace filters we will 106 // need to make a change here. 107 108 int MythCodecContext::FilteredReceiveFrame(AVCodecContext *ctx, AVFrame *frame) 109 { 110 int ret = 0; 111 112 while (1) 113 { 114 if (filter_graph) 115 { 116 ret = av_buffersink_get_frame(buffersink_ctx, frame); 117 if (ret >= 0) 118 { 119 if (priorPts[0] && ptsUsed == priorPts[1]) 120 { 121 frame->pts = priorPts[1] + (priorPts[1] - priorPts[0])/2; 122 frame->scte_cc_len = 0; 123 frame->atsc_cc_len = 0; 124 av_frame_remove_side_data(frame, AV_FRAME_DATA_A53_CC); 125 } 126 else 127 { 128 frame->pts = priorPts[1]; 129 ptsUsed = priorPts[1]; 130 } 131 // {const char *msg = QString("filter pts=%1 interlaced=%2") 132 // .arg(frame->pts).arg(frame->interlaced_frame).toLocal8Bit(); 133 // fprintf(stderr,"%s\n", msg);} 134 } 135 if (ret != AVERROR(EAGAIN)) 136 break; 137 } 138 139 // EAGAIN or no filter graph 140 ret = avcodec_receive_frame(ctx, frame); 141 if (ret < 0) 142 break; 143 // {const char *msg = QString("codec pts=%1 interlaced=%2") 144 // .arg(frame->pts).arg(frame->interlaced_frame).toLocal8Bit(); 145 // fprintf(stderr,"%s\n", msg);} 146 priorPts[0]=priorPts[1]; 147 priorPts[1]=frame->pts; 148 if (frame->interlaced_frame || filter_graph) 149 { 150 if (!filtersInitialized 151 || width != frame->width 152 || height != frame->height) 153 { 154 // bypass any frame of unknown format 155 if (frame->format < 0) 156 break; 157 ret = InitFilters(ctx, frame); 158 filtersInitialized = true; 159 if (ret < 0) 160 { 161 LOG(VB_GENERAL, LOG_ERR, LOC + "InitFilters failed - continue without filters"); 162 break; 163 } 164 } 165 if (filter_graph) 166 { 167 ret = av_buffersrc_add_frame(buffersrc_ctx, frame); 168 if (ret < 0) 169 break; 170 } 171 else 172 break; 173 } 174 else 175 break; 176 } 177 178 return ret; 179 } 180 181 182 int MythCodecContext::InitFilters(AVCodecContext *ctx, AVFrame *frame) 183 { 184 char args[512]; 185 int ret = 0; 186 CloseFilters(); 187 width = frame->width; 188 height = frame->height; 189 if (!player) 190 return -1; 191 VideoOutput *vo = player->GetVideoOutput(); 192 VideoDisplayProfile *vdisp_profile = vo->GetProfile(); 193 QString filtername = vdisp_profile->GetFilteredDeint(QString()); 194 QString filters = GetDeinterlaceFilter(filtername); 195 196 if (filters.isEmpty()) 197 return ret; 198 199 const AVFilter *buffersrc = avfilter_get_by_name("buffer"); 200 const AVFilter *buffersink = avfilter_get_by_name("buffersink"); 201 AVFilterInOut *outputs = avfilter_inout_alloc(); 202 AVFilterInOut *inputs = avfilter_inout_alloc(); 203 AVRational time_base = stream->time_base; 204 // enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE }; 205 // enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_VAAPI_VLD, AV_PIX_FMT_NONE }; 206 AVBufferSrcParameters* params = NULL; 207 208 filter_graph = avfilter_graph_alloc(); 209 if (!outputs || !inputs || !filter_graph) 210 { 211 ret = AVERROR(ENOMEM); 212 goto end; 213 } 214 215 /* buffer video source: the decoded frames from the decoder will be inserted here. */ 216 snprintf(args, sizeof(args), 217 "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", 218 frame->width, frame->height, frame->format, // ctx->pix_fmt, 219 time_base.num, time_base.den, 220 ctx->sample_aspect_ratio.num, ctx->sample_aspect_ratio.den); 221 222 // isInterlaced = frame->interlaced_frame; 223 224 ret = avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in", 225 args, NULL, filter_graph); 226 if (ret < 0) 227 { 228 LOG(VB_GENERAL, LOG_ERR, LOC + "avfilter_graph_create_filter failed for buffer source"); 229 goto end; 230 } 231 232 params = av_buffersrc_parameters_alloc(); 233 // params->format = frame->format; 234 // params->time_base.num = stream->time_base.num; 235 // params->time_base.den = stream->time_base.den; 236 // params->width = frame->width; 237 // params->height = frame->height; 238 // params->sample_aspect_ratio.num = ctx->sample_aspect_ratio.num; 239 // params->sample_aspect_ratio.den = ctx->sample_aspect_ratio.den; 240 if (hw_frames_ctx) 241 av_buffer_unref(&hw_frames_ctx); 242 hw_frames_ctx = av_buffer_ref(frame->hw_frames_ctx); 243 params->hw_frames_ctx = hw_frames_ctx; 244 245 ret = av_buffersrc_parameters_set(buffersrc_ctx, params); 246 247 if (ret < 0) 248 { 249 LOG(VB_GENERAL, LOG_ERR, LOC + "av_buffersrc_parameters_set failed"); 250 goto end; 251 } 252 253 av_freep(¶ms); 254 255 /* buffer video sink: to terminate the filter chain. */ 256 ret = avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out", 257 NULL, NULL, filter_graph); 258 if (ret < 0) 259 { 260 LOG(VB_GENERAL, LOG_ERR, LOC + "avfilter_graph_create_filter failed for buffer sink"); 261 goto end; 262 } 263 264 // ret = av_opt_set_int_list(buffersink_ctx, "pix_fmts", pix_fmts, 265 // AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN); 266 // if (ret < 0) 267 // { 268 // LOG(VB_GENERAL, LOG_ERR, LOC + "av_opt_set_int_list pix_fmts failed"); 269 // goto end; 270 // } 271 272 /* 273 * Set the endpoints for the filter graph. The filter_graph will 274 * be linked to the graph described by filters_descr. 275 */ 276 277 /* 278 * The buffer source output must be connected to the input pad of 279 * the first filter described by filters_descr; since the first 280 * filter input label is not specified, it is set to "in" by 281 * default. 282 */ 283 outputs->name = av_strdup("in"); 284 outputs->filter_ctx = buffersrc_ctx; 285 outputs->pad_idx = 0; 286 outputs->next = NULL; 287 288 /* 289 * The buffer sink input must be connected to the output pad of 290 * the last filter described by filters_descr; since the last 291 * filter output label is not specified, it is set to "out" by 292 * default. 293 */ 294 inputs->name = av_strdup("out"); 295 inputs->filter_ctx = buffersink_ctx; 296 inputs->pad_idx = 0; 297 inputs->next = NULL; 298 299 if ((ret = avfilter_graph_parse_ptr(filter_graph, filters.toLocal8Bit(), 300 &inputs, &outputs,0)) < 0) 301 { 302 LOG(VB_GENERAL, LOG_ERR, LOC 303 + QString("avfilter_graph_parse_ptr failed for %1").arg(filters)); 304 goto end; 305 } 306 307 if ((ret = avfilter_graph_config(filter_graph, NULL)) < 0) 308 { 309 LOG(VB_GENERAL, LOG_ERR, LOC 310 + QString("avfilter_graph_config failed")); 311 goto end; 312 } 313 314 // send the first packet to the filters 315 // frame->pts = frame->best_effort_timestamp; 316 // ret = av_buffersrc_add_frame(buffersrc_ctx, frame); 317 // av_frame_unref(frame); 318 319 // if (ret < 0) 320 // { 321 // LOG(VB_GENERAL, LOG_ERR, LOC 322 // + QString("av_buffersrc_add_frame first time failed")); 323 // goto end; 324 // } 325 LOG(VB_PLAYBACK, LOG_INFO, LOC + 326 QString("Using hardware decoder based deinterlace filter <%1>.") 327 .arg(filters)); 328 end: 329 if (ret < 0) 330 { 331 avfilter_graph_free(&filter_graph); 332 filter_graph = NULL; 333 } 334 avfilter_inout_free(&inputs); 335 avfilter_inout_free(&outputs); 336 337 return ret; 338 } 339 340 void MythCodecContext::CloseFilters() 341 { 342 avfilter_graph_free(&filter_graph); 343 filter_graph = NULL; 344 buffersink_ctx = NULL; 345 buffersrc_ctx = NULL; 346 filtersInitialized = false; 347 ptsUsed = 0; 348 priorPts[0] = 0; 349 priorPts[1] = 0; 350 // isInterlaced = 0; 351 width = 0; 352 height = 0; 353 354 if (hw_frames_ctx) 355 av_buffer_unref(&hw_frames_ctx); 356 } 357 No newline at end of file -
new file mythtv/libs/libmythtv/mythcodeccontext.h
diff --git a/mythtv/libs/libmythtv/mythcodeccontext.h b/mythtv/libs/libmythtv/mythcodeccontext.h new file mode 100644 index 0000000000..95cb1858d6
- + 1 ////////////////////////////////////////////////////////////////////////////// 2 // Copyright (c) 2017 MythTV Developers <mythtv-dev@mythtv.org> 3 // 4 // This is part of MythTV (https://www.mythtv.org) 5 // 6 // This program is free software; you can redistribute it and/or modify 7 // it under the terms of the GNU General Public License as published by 8 // the Free Software Foundation; either version 2 of the License, or 9 // (at your option) any later version. 10 // 11 // This program is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 // 16 // You should have received a copy of the GNU General Public License 17 // along with this program; if not, write to the Free Software 18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 // 20 // You should have received a copy of the GNU General Public License 21 // along with this program. If not, see <http://www.gnu.org/licenses/>. 22 // 23 ////////////////////////////////////////////////////////////////////////////// 24 25 26 #ifndef MYTHCODECONTEXT_H 27 #define MYTHCODECCONTEXT_H 28 29 struct AVCodecContext; 30 struct AVFrame; 31 struct AVStream; 32 struct AVFilterContext; 33 struct AVFilterGraph; 34 struct AVBufferRef; 35 class MythPlayer; 36 37 #include "mythtvexp.h" 38 39 class MTV_PUBLIC MythCodecContext 40 { 41 public: 42 MythCodecContext(AVStream* initStream); 43 virtual ~MythCodecContext(); 44 virtual int HwDecoderInit(AVCodecContext * /*ctx*/) { return 0; } 45 virtual int FilteredReceiveFrame(AVCodecContext *ctx, AVFrame *frame); 46 int InitFilters(AVCodecContext *ctx, AVFrame *frame); 47 void CloseFilters(); 48 static QStringList GetDeinterlacers(QString decodername); 49 virtual QString GetDeinterlaceFilter(QString /*filtername*/) { return QString(); } 50 void setPlayer(MythPlayer *tPlayer) { player = tPlayer; } 51 protected: 52 AVStream* stream; 53 AVFilterContext *buffersink_ctx; 54 AVFilterContext *buffersrc_ctx; 55 AVFilterGraph *filter_graph; 56 bool filtersInitialized; 57 AVBufferRef *hw_frames_ctx; 58 MythPlayer *player; 59 int64_t priorPts[2]; 60 int64_t ptsUsed; 61 // bool isInterlaced; 62 int width; 63 int height; 64 }; 65 66 #endif // MYTHCODECCONTEXT_H 67 No newline at end of file -
mythtv/libs/libmythtv/mythcodecid.cpp
diff --git a/mythtv/libs/libmythtv/mythcodecid.cpp b/mythtv/libs/libmythtv/mythcodecid.cpp index b2023512b5..cff4ba0358 100644
a b QString toString(MythCodecID codecid) 123 123 case kCodec_HEVC_MEDIACODEC: 124 124 return "HEVC MEDIACODEC"; 125 125 126 case kCodec_MPEG1_VAAPI2: 127 return "MPEG1 VAAPI2"; 128 case kCodec_MPEG2_VAAPI2: 129 return "MPEG2 VAAPI2"; 130 case kCodec_H263_VAAPI2: 131 return "H.263 VAAPI2"; 132 case kCodec_MPEG4_VAAPI2: 133 return "MPEG4 VAAPI2"; 134 case kCodec_H264_VAAPI2: 135 return "H.264 VAAPI2"; 136 case kCodec_VC1_VAAPI2: 137 return "VC1 VAAPI2"; 138 case kCodec_WMV3_VAAPI2: 139 return "WMV3 VAAPI2"; 140 case kCodec_VP8_VAAPI2: 141 return "VP8 VAAPI2"; 142 case kCodec_VP9_VAAPI2: 143 return "VP9 VAAPI2"; 144 case kCodec_HEVC_VAAPI2: 145 return "HEVC VAAPI2"; 146 126 147 default: 127 148 break; 128 149 } … … int myth2av_codecid(MythCodecID codec_id, bool &vdpau) 305 326 ret = AV_CODEC_ID_HEVC; 306 327 break; 307 328 329 case kCodec_MPEG1_VAAPI2: 330 ret = AV_CODEC_ID_MPEG1VIDEO; 331 break; 332 case kCodec_MPEG2_VAAPI2: 333 ret = AV_CODEC_ID_MPEG2VIDEO; 334 break; 335 case kCodec_H263_VAAPI2: 336 ret = AV_CODEC_ID_H263; 337 break; 338 case kCodec_MPEG4_VAAPI2: 339 ret = AV_CODEC_ID_MPEG4; 340 break; 341 case kCodec_H264_VAAPI2: 342 ret = AV_CODEC_ID_H264; 343 break; 344 case kCodec_VC1_VAAPI2: 345 ret = AV_CODEC_ID_VC1; 346 break; 347 case kCodec_WMV3_VAAPI2: 348 ret = AV_CODEC_ID_WMV3; 349 break; 350 case kCodec_VP8_VAAPI2: 351 ret = AV_CODEC_ID_VP8; 352 break; 353 case kCodec_VP9_VAAPI2: 354 ret = AV_CODEC_ID_VP9; 355 break; 356 case kCodec_HEVC_VAAPI2: 357 ret = AV_CODEC_ID_HEVC; 358 break; 359 308 360 default: 309 361 LOG(VB_GENERAL, LOG_ERR, 310 362 QString("Error: MythCodecID %1 has not been " … … QString get_encoding_type(MythCodecID codecid) 356 408 case kCodec_MPEG1_VAAPI: 357 409 case kCodec_MPEG1_DXVA2: 358 410 case kCodec_MPEG1_MEDIACODEC: 411 case kCodec_MPEG1_VAAPI2: 359 412 case kCodec_MPEG2: 360 413 case kCodec_MPEG2_VDPAU: 361 414 case kCodec_MPEG2_VAAPI: 362 415 case kCodec_MPEG2_DXVA2: 363 416 case kCodec_MPEG2_MEDIACODEC: 417 case kCodec_MPEG2_VAAPI2: 364 418 return "MPEG-2"; 365 419 366 420 case kCodec_H263: … … QString get_encoding_type(MythCodecID codecid) 368 422 case kCodec_H263_VAAPI: 369 423 case kCodec_H263_DXVA2: 370 424 case kCodec_H263_MEDIACODEC: 425 case kCodec_H263_VAAPI2: 371 426 return "H.263"; 372 427 373 428 case kCodec_NUV_MPEG4: … … QString get_encoding_type(MythCodecID codecid) 376 431 case kCodec_MPEG4_VAAPI: 377 432 case kCodec_MPEG4_DXVA2: 378 433 case kCodec_MPEG4_MEDIACODEC: 434 case kCodec_MPEG4_VAAPI2: 379 435 return "MPEG-4"; 380 436 381 437 case kCodec_H264: … … QString get_encoding_type(MythCodecID codecid) 383 439 case kCodec_H264_VAAPI: 384 440 case kCodec_H264_DXVA2: 385 441 case kCodec_H264_MEDIACODEC: 442 case kCodec_H264_VAAPI2: 386 443 return "H.264"; 387 444 388 445 case kCodec_VC1: … … QString get_encoding_type(MythCodecID codecid) 390 447 case kCodec_VC1_VAAPI: 391 448 case kCodec_VC1_DXVA2: 392 449 case kCodec_VC1_MEDIACODEC: 450 case kCodec_VC1_VAAPI2: 393 451 return "VC-1"; 394 452 395 453 case kCodec_WMV3: … … QString get_encoding_type(MythCodecID codecid) 397 455 case kCodec_WMV3_VAAPI: 398 456 case kCodec_WMV3_DXVA2: 399 457 case kCodec_WMV3_MEDIACODEC: 458 case kCodec_WMV3_VAAPI2: 400 459 return "WMV3"; 401 460 402 461 case kCodec_VP8: … … QString get_encoding_type(MythCodecID codecid) 404 463 case kCodec_VP8_VAAPI: 405 464 case kCodec_VP8_DXVA2: 406 465 case kCodec_VP8_MEDIACODEC: 466 case kCodec_VP8_VAAPI2: 407 467 return "VP8"; 408 468 409 469 case kCodec_VP9: … … QString get_encoding_type(MythCodecID codecid) 411 471 case kCodec_VP9_VAAPI: 412 472 case kCodec_VP9_DXVA2: 413 473 case kCodec_VP9_MEDIACODEC: 474 case kCodec_VP9_VAAPI2: 414 475 return "VP8"; 415 476 416 477 case kCodec_HEVC: … … QString get_encoding_type(MythCodecID codecid) 418 479 case kCodec_HEVC_VAAPI: 419 480 case kCodec_HEVC_DXVA2: 420 481 case kCodec_HEVC_MEDIACODEC: 482 case kCodec_HEVC_VAAPI2: 421 483 return "HEVC"; 422 484 423 485 case kCodec_NONE: … … QString get_encoding_type(MythCodecID codecid) 426 488 case kCodec_VAAPI_END: 427 489 case kCodec_DXVA2_END: 428 490 case kCodec_MEDIACODEC_END: 491 case kCodec_VAAPI2_END: 429 492 return QString(); 430 493 } 431 494 … … QString get_decoder_name(MythCodecID codec_id) 446 509 if (codec_is_mediacodec(codec_id)) 447 510 return "mediacodec"; 448 511 512 if (codec_is_vaapi2(codec_id)) 513 return "vaapi2"; 514 449 515 return "ffmpeg"; 450 516 } -
mythtv/libs/libmythtv/mythcodecid.h
diff --git a/mythtv/libs/libmythtv/mythcodecid.h b/mythtv/libs/libmythtv/mythcodecid.h index ce3620467d..f1d225d4be 100644
a b typedef enum 91 91 92 92 kCodec_MEDIACODEC_END, 93 93 94 kCodec_VAAPI2_BEGIN = kCodec_MEDIACODEC_END, 95 96 kCodec_MPEG1_VAAPI2, 97 kCodec_MPEG2_VAAPI2, 98 kCodec_H263_VAAPI2, 99 kCodec_MPEG4_VAAPI2, 100 kCodec_H264_VAAPI2, 101 kCodec_VC1_VAAPI2, 102 kCodec_WMV3_VAAPI2, 103 kCodec_VP8_VAAPI2, 104 kCodec_VP9_VAAPI2, 105 kCodec_HEVC_VAAPI2, 106 107 kCodec_VAAPI2_END, 108 94 109 } MythCodecID; 95 110 96 111 // MythCodecID convenience functions … … typedef enum 113 128 (id == kCodec_VC1_DXVA2))) 114 129 #define codec_is_mediacodec(id) ((id > kCodec_MEDIACODEC_BEGIN) && \ 115 130 (id < kCodec_MEDIACODEC_END)) 131 #define codec_is_vaapi2(id) ((id > kCodec_VAAPI2_BEGIN) && \ 132 (id < kCodec_VAAPI2_END)) 116 133 117 #define codec_sw_copy(id) (codec_is_std(id) || codec_is_mediacodec(id) )134 #define codec_sw_copy(id) (codec_is_std(id) || codec_is_mediacodec(id) || codec_is_vaapi2(id)) 118 135 119 136 QString get_encoding_type(MythCodecID codecid); 120 137 QString get_decoder_name(MythCodecID codec_id); … … int mpeg_version(int codec_id); 156 173 #define CODEC_IS_MEDIACODEC(codec) (0) 157 174 #endif 158 175 159 #define CODEC_IS_HWACCEL(codec, enc) (CODEC_IS_VDPAU(codec) || \160 CODEC_IS_VAAPI(codec, enc) || \161 CODEC_IS_DXVA2(codec, enc) || \162 CODEC_IS_MEDIACODEC(codec))163 164 176 #endif // _MYTH_CODEC_ID_H_ -
mythtv/libs/libmythtv/mythplayer.cpp
diff --git a/mythtv/libs/libmythtv/mythplayer.cpp b/mythtv/libs/libmythtv/mythplayer.cpp index e4e0bd83ea..9ce7962160 100644
a b void MythPlayer::SetBuffering(bool new_buffering) 2182 2182 } 2183 2183 } 2184 2184 2185 // For debugging playback set this to increase the timeout so that 2186 // playback does not fail if stepping through code. 2187 // Set PREBUFFERDEBUG to any value and you will get 30 minutes. 2188 static char *preBufferDebug = getenv("PREBUFFERDEBUG"); 2189 2185 2190 bool MythPlayer::PrebufferEnoughFrames(int min_buffers) 2186 2191 { 2187 2192 if (!videoOutput) … … bool MythPlayer::PrebufferEnoughFrames(int min_buffers) 2247 2252 audio.Pause(false); 2248 2253 } 2249 2254 } 2250 if ((waited_for > 500) && !videoOutput->EnoughFreeFrames()) 2255 int msecs = 500; 2256 if (preBufferDebug) 2257 msecs = 1800000; 2258 if ((waited_for > msecs /*500*/) && !videoOutput->EnoughFreeFrames()) 2251 2259 { 2252 2260 LOG(VB_GENERAL, LOG_NOTICE, LOC + 2253 2261 "Timed out waiting for frames, and" … … bool MythPlayer::PrebufferEnoughFrames(int min_buffers) 2257 2265 // to recover from serious problems if frames get leaked. 2258 2266 DiscardVideoFrames(true); 2259 2267 } 2260 if (waited_for > 30000) // 30 seconds for internet streamed media 2268 msecs = 30000; 2269 if (preBufferDebug) 2270 msecs = 1800000; 2271 if (waited_for > msecs /*30000*/) // 30 seconds for internet streamed media 2261 2272 { 2262 2273 LOG(VB_GENERAL, LOG_ERR, LOC + 2263 2274 "Waited too long for decoder to fill video buffers. Exiting.."); -
new file mythtv/libs/libmythtv/vaapi2context.cpp
diff --git a/mythtv/libs/libmythtv/vaapi2context.cpp b/mythtv/libs/libmythtv/vaapi2context.cpp new file mode 100644 index 0000000000..21e9dec4cf
- + 1 ////////////////////////////////////////////////////////////////////////////// 2 // Copyright (c) 2017 MythTV Developers <mythtv-dev@mythtv.org> 3 // 4 // This is part of MythTV (https://www.mythtv.org) 5 // 6 // This program is free software; you can redistribute it and/or modify 7 // it under the terms of the GNU General Public License as published by 8 // the Free Software Foundation; either version 2 of the License, or 9 // (at your option) any later version. 10 // 11 // This program is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 // 16 // You should have received a copy of the GNU General Public License 17 // along with this program; if not, write to the Free Software 18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 // 20 // You should have received a copy of the GNU General Public License 21 // along with this program. If not, see <http://www.gnu.org/licenses/>. 22 // 23 ////////////////////////////////////////////////////////////////////////////// 24 25 #include "mythcorecontext.h" 26 #include "mythlogging.h" 27 #include "vaapi2context.h" 28 #include "videooutbase.h" 29 #include "mythplayer.h" 30 31 extern "C" { 32 #include "libavutil/pixfmt.h" 33 #include "libavutil/hwcontext.h" 34 #include "libavcodec/avcodec.h" 35 } 36 37 #define LOC QString("VAAPI2: ") 38 39 Vaapi2Context::Vaapi2Context(AVStream* initStream) : 40 MythCodecContext(initStream) 41 { 42 43 } 44 45 MythCodecID Vaapi2Context::GetBestSupportedCodec( 46 AVCodec **ppCodec, 47 const QString &decoder, 48 uint stream_type, 49 AVPixelFormat &pix_fmt) 50 { 51 enum AVHWDeviceType type = AV_HWDEVICE_TYPE_VAAPI; 52 53 AVPixelFormat fmt = AV_PIX_FMT_NONE; 54 if (decoder == "vaapi2") 55 { 56 for (int i = 0;; i++) { 57 const AVCodecHWConfig *config = avcodec_get_hw_config(*ppCodec, i); 58 if (!config) { 59 LOG(VB_PLAYBACK, LOG_INFO, LOC + 60 QString("Decoder %1 does not support device type %2.") 61 .arg((*ppCodec)->name).arg(av_hwdevice_get_type_name(type))); 62 break; 63 } 64 if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && 65 config->device_type == type) { 66 fmt = config->pix_fmt; 67 break; 68 } 69 } 70 } 71 if (fmt == AV_PIX_FMT_NONE) 72 return (MythCodecID)(kCodec_MPEG1 + (stream_type - 1)); 73 else 74 { 75 LOG(VB_PLAYBACK, LOG_INFO, LOC + 76 QString("Decoder %1 supports device type %2.") 77 .arg((*ppCodec)->name).arg(av_hwdevice_get_type_name(type))); 78 pix_fmt = fmt; 79 return (MythCodecID)(kCodec_MPEG1_VAAPI2 + (stream_type - 1)); 80 } 81 } 82 83 // const char *filter_descr = "scale=78:24,transpose=cclock"; 84 /* other way: 85 scale=78:24 [scl]; [scl] transpose=cclock // assumes "[in]" and "[out]" to be input output pads respectively 86 */ 87 88 int Vaapi2Context::HwDecoderInit(AVCodecContext *ctx) 89 { 90 int ret = 0; 91 AVBufferRef *hw_device_ctx = NULL; 92 93 const char *device = NULL; 94 QString vaapiDevice = gCoreContext->GetSetting("VAAPIDevice"); 95 if (!vaapiDevice.isEmpty()) 96 { 97 device = vaapiDevice.toLocal8Bit().constData(); 98 } 99 100 ret = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, 101 device, NULL, 0); 102 if (ret < 0) 103 { 104 char error[AV_ERROR_MAX_STRING_SIZE]; 105 LOG(VB_GENERAL, LOG_ERR, LOC + 106 QString("av_hwdevice_ctx_create Device = <%3> error: %1 (%2)") 107 .arg(av_make_error_string(error, sizeof(error), ret)) 108 .arg(ret).arg(vaapiDevice)); 109 } 110 else 111 { 112 ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); 113 av_buffer_unref(&hw_device_ctx); 114 } 115 116 return ret; 117 } 118 119 120 QString Vaapi2Context::GetDeinterlaceFilter(QString filtername) 121 { 122 // example filter - deinterlace_vaapi=mode=default:rate=frame:auto=1 123 // example filtername - vaapi2doubleratemotion_compensated 124 QString ret; 125 QString rate="frame"; 126 if (!filtername.startsWith("vaapi2")) 127 return ret; 128 filtername.remove(0,6); //remove "vaapi2" 129 if (filtername.startsWith("doublerate")) 130 { 131 rate="field"; 132 filtername.remove(0,10); // remove "doublerate" 133 } 134 ret=QString("deinterlace_vaapi=mode=%1:rate=%2:auto=1") 135 .arg(filtername).arg(rate); 136 137 return ret; 138 } 139 140 141 142 // If we find that it needs more buffers than the default 143 // we can uncomment this and call it from get_format_vaapi2 144 // int Vaapi2Context::SetHwframeCtx(AVCodecContext *ctx, int poolsize) 145 // { 146 // AVBufferRef *hw_frames_ref; 147 // AVHWFramesContext *frames_ctx = NULL; 148 // int err = 0; 149 150 // if (!(hw_frames_ref = av_hwframe_ctx_alloc(ctx->hw_device_ctx))) { 151 // fprintf(stderr, "Failed to create VAAPI frame context.\n"); 152 // return -1; 153 // } 154 // frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); 155 // frames_ctx->format = AV_PIX_FMT_VAAPI; 156 // frames_ctx->sw_format = AV_PIX_FMT_NV12; 157 // frames_ctx->width = ctx->width; 158 // frames_ctx->height = ctx->height; 159 // frames_ctx->initial_pool_size = poolsize; 160 // if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { 161 // fprintf(stderr, "Failed to initialize VAAPI frame context."); 162 // av_buffer_unref(&hw_frames_ref); 163 // return err; 164 // } 165 // ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref); 166 // if (!ctx->hw_frames_ctx) 167 // err = AVERROR(ENOMEM); 168 169 // av_buffer_unref(&hw_frames_ref); 170 // return err; 171 // } -
new file mythtv/libs/libmythtv/vaapi2context.h
diff --git a/mythtv/libs/libmythtv/vaapi2context.h b/mythtv/libs/libmythtv/vaapi2context.h new file mode 100644 index 0000000000..fcd84b5833
- + 1 ////////////////////////////////////////////////////////////////////////////// 2 // Copyright (c) 2017 MythTV Developers <mythtv-dev@mythtv.org> 3 // 4 // This is part of MythTV (https://www.mythtv.org) 5 // 6 // This program is free software; you can redistribute it and/or modify 7 // it under the terms of the GNU General Public License as published by 8 // the Free Software Foundation; either version 2 of the License, or 9 // (at your option) any later version. 10 // 11 // This program is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 // 16 // You should have received a copy of the GNU General Public License 17 // along with this program; if not, write to the Free Software 18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 // 20 // You should have received a copy of the GNU General Public License 21 // along with this program. If not, see <http://www.gnu.org/licenses/>. 22 // 23 ////////////////////////////////////////////////////////////////////////////// 24 25 26 #ifndef VAAPI2CONTEXT_H 27 #define VAAPI2CONTEXT_H 28 29 #include "mythtvexp.h" 30 #include "mythcodecid.h" 31 #include "mythcodeccontext.h" 32 33 extern "C" { 34 #include "libavcodec/avcodec.h" 35 } 36 37 class MTV_PUBLIC Vaapi2Context : public MythCodecContext 38 { 39 public: 40 Vaapi2Context(AVStream* initStream); 41 static MythCodecID GetBestSupportedCodec(AVCodec **ppCodec, 42 const QString &decoder, 43 uint stream_type, 44 AVPixelFormat &pix_fmt); 45 int HwDecoderInit(AVCodecContext *ctx); 46 virtual QString GetDeinterlaceFilter(QString filtername); 47 }; 48 49 #endif // VAAPI2CONTEXT_H 50 No newline at end of file -
mythtv/libs/libmythtv/videodisplayprofile.cpp
diff --git a/mythtv/libs/libmythtv/videodisplayprofile.cpp b/mythtv/libs/libmythtv/videodisplayprofile.cpp index 700ee3bb99..990b1ae6f7 100644
a b using namespace std; 8 8 #include "mythlogging.h" 9 9 #include "videooutbase.h" 10 10 #include "avformatdecoder.h" 11 #include "mythcodeccontext.h" 11 12 12 13 13 14 // options are NNN NNN-MMM 0-MMM NNN-99999 >NNN >=NNN <MMM <=MMM or blank … … bool ProfileItem::IsValid(QString *reason) const 258 259 } 259 260 260 261 QStringList deints = VideoDisplayProfile::GetDeinterlacers(renderer); 262 QStringList decoderdeints = MythCodecContext::GetDeinterlacers(decoder); 263 deints.append(decoderdeints); 261 264 QString deint0 = Get("pref_deint0"); 262 265 QString deint1 = Get("pref_deint1"); 263 266 if (!deint0.isEmpty() && !deints.contains(deint0)) … … QString VideoDisplayProfile::GetDecoderName(const QString &decoder) 852 855 dec_name["dxva2"] = QObject::tr("Windows hardware acceleration"); 853 856 dec_name["vda"] = QObject::tr("Mac VDA hardware acceleration"); 854 857 dec_name["mediacodec"] = QObject::tr("Android MediaCodec decoder"); 858 dec_name["vaapi2"] = QObject::tr("VAAPI2 acceleration"); 855 859 } 856 860 857 861 QString ret = decoder; … … QString VideoDisplayProfile::GetDecoderHelp(QString decoder) 913 917 "Mediacodec will use the graphics hardware to " 914 918 "accelerate video decoding on Android. "); 915 919 920 if (decoder == "vaapi2") 921 msg += QObject::tr( 922 "VAAPI2 is a new implementation of VAAPI to will use the graphics hardware to " 923 "accelerate video decoding on Intel CPUs. "); 924 916 925 return msg; 917 926 } 918 927 … … QString VideoDisplayProfile::GetDeinterlacerName(const QString &short_name) 941 950 else if ("fieldorderdoubleprocessdeint" == short_name) 942 951 return QObject::tr("Interlaced (2x)"); 943 952 else if ("opengllinearblend" == short_name) 944 return QObject::tr("Linear blend (HW )");953 return QObject::tr("Linear blend (HW-GL)"); 945 954 else if ("openglkerneldeint" == short_name) 946 return QObject::tr("Kernel (HW )");955 return QObject::tr("Kernel (HW-GL)"); 947 956 else if ("openglbobdeint" == short_name) 948 return QObject::tr("Bob (2x, HW )");957 return QObject::tr("Bob (2x, HW-GL)"); 949 958 else if ("openglonefield" == short_name) 950 return QObject::tr("One field (HW )");959 return QObject::tr("One field (HW-GL)"); 951 960 else if ("opengldoubleratekerneldeint" == short_name) 952 return QObject::tr("Kernel (2x, HW )");961 return QObject::tr("Kernel (2x, HW-GL)"); 953 962 else if ("opengldoubleratelinearblend" == short_name) 954 return QObject::tr("Linear blend (2x, HW )");963 return QObject::tr("Linear blend (2x, HW-GL)"); 955 964 else if ("opengldoubleratefieldorder" == short_name) 956 return QObject::tr("Interlaced (2x, HW )");965 return QObject::tr("Interlaced (2x, HW-GL)"); 957 966 else if ("vdpauonefield" == short_name) 958 967 return QObject::tr("One Field (1x, HW)"); 959 968 else if ("vdpaubobdeint" == short_name) … … QString VideoDisplayProfile::GetDeinterlacerName(const QString &short_name) 978 987 else if ("openmaxlinedouble" == short_name) 979 988 return QObject::tr("Line double (HW)"); 980 989 #endif // def USING_OPENMAX 990 #ifdef USING_VAAPI2 991 else if ("vaapi2default" == short_name) 992 return QObject::tr("Advanced (HW-VA)"); 993 else if ("vaapi2bob" == short_name) 994 return QObject::tr("Bob (HW-VA)"); 995 else if ("vaapi2weave" == short_name) 996 return QObject::tr("Weave (HW-VA)"); 997 else if ("vaapi2motion_adaptive" == short_name) 998 return QObject::tr("Motion Adaptive (HW-VA)"); 999 else if ("vaapi2motion_compensated" == short_name) 1000 return QObject::tr("Motion Compensated (HW-VA)"); 1001 else if ("vaapi2doubleratedefault" == short_name) 1002 return QObject::tr("Advanced (2x, HW-VA)"); 1003 else if ("vaapi2doubleratebob" == short_name) 1004 return QObject::tr("Bob (2x, HW-VA)"); 1005 else if ("vaapi2doublerateweave" == short_name) 1006 return QObject::tr("Weave (2x, HW-VA)"); 1007 else if ("vaapi2doubleratemotion_adaptive" == short_name) 1008 return QObject::tr("Motion Adaptive (2x, HW-VA)"); 1009 else if ("vaapi2doubleratemotion_compensated" == short_name) 1010 return QObject::tr("Motion Compensated (2x, HW-VA)"); 1011 #endif 981 1012 982 1013 return ""; 983 1014 } … … void VideoDisplayProfile::CreateProfiles(const QString &hostname) 1468 1499 CreateProfile(groupid, 1, "", "", "", 1469 1500 "mediacodec", 4, true, "opengl", 1470 1501 "opengl2", true, 1471 "none", "none", 1502 "opengldoubleratelinearblend", "opengllinearblend", 1503 ""); 1504 } 1505 #endif 1506 1507 #ifdef USING_VAAPI2 1508 if (!profiles.contains("VAAPI2 Normal")) { 1509 (void) QObject::tr("VAAPI2 Normal", 1510 "Sample: VAAPI2 Normal"); 1511 groupid = CreateProfileGroup("VAAPI2 Normal", hostname); 1512 CreateProfile(groupid, 1, "", "", "", 1513 "vaapi2", 4, true, "opengl", 1514 "opengl2", true, 1515 "vaapi2doubleratedefault", "vaapi2default", 1472 1516 ""); 1473 1517 } 1474 1518 #endif … … QString VideoDisplayProfile::GetDeinterlacerHelp(const QString &deint) 1615 1659 1616 1660 QString kUsingGPU = QObject::tr("(Hardware Accelerated)"); 1617 1661 1662 QString kUsingVA = QObject::tr("(VAAPI Hardware Accelerated)"); 1663 1664 QString kUsingGL = QObject::tr("(OpenGL Hardware Accelerated)"); 1665 1618 1666 QString kGreedyHMsg = QObject::tr( 1619 1667 "This deinterlacer uses several fields to reduce motion blur. " 1620 1668 "It has increased CPU requirements."); … … QString VideoDisplayProfile::GetDeinterlacerHelp(const QString &deint) 1635 1683 "This deinterlacer uses multiple fields to reduce motion blur " 1636 1684 "and smooth edges. "); 1637 1685 1686 QString kMostAdvMsg = QObject::tr( 1687 "Use the most advanced hardware deinterlacing algorithm available. "); 1688 1689 QString kWeaveMsg = QObject::tr( 1690 "Use the weave deinterlacing algorithm. "); 1691 1692 QString kMAMsg = QObject::tr( 1693 "Use the motion adaptive deinterlacing algorithm. "); 1694 1695 QString kMCMsg = QObject::tr( 1696 "Use the motion compensated deinterlacing algorithm. "); 1697 1638 1698 if (deint == "none") 1639 1699 msg = kNoneMsg; 1640 1700 else if (deint == "onefield") … … QString VideoDisplayProfile::GetDeinterlacerHelp(const QString &deint) 1648 1708 else if (deint == "kerneldoubleprocessdeint") 1649 1709 msg = kKernelMsg + " " + kDoubleRateMsg; 1650 1710 else if (deint == "openglonefield") 1651 msg = kOneFieldMsg + " " + kUsingG PU;1711 msg = kOneFieldMsg + " " + kUsingGL; 1652 1712 else if (deint == "openglbobdeint") 1653 msg = kBobMsg + " " + kUsingG PU;1713 msg = kBobMsg + " " + kUsingGL; 1654 1714 else if (deint == "opengllinearblend") 1655 msg = kLinearBlendMsg + " " + kUsingG PU;1715 msg = kLinearBlendMsg + " " + kUsingGL; 1656 1716 else if (deint == "openglkerneldeint") 1657 msg = kKernelMsg + " " + kUsingG PU;1717 msg = kKernelMsg + " " + kUsingGL; 1658 1718 else if (deint == "opengldoubleratelinearblend") 1659 msg = kLinearBlendMsg + " " + kDoubleRateMsg + " " + kUsingG PU;1719 msg = kLinearBlendMsg + " " + kDoubleRateMsg + " " + kUsingGL; 1660 1720 else if (deint == "opengldoubleratekerneldeint") 1661 msg = kKernelMsg + " " + kDoubleRateMsg + " " + kUsingG PU;1721 msg = kKernelMsg + " " + kDoubleRateMsg + " " + kUsingGL; 1662 1722 else if (deint == "opengldoubleratefieldorder") 1663 msg = kFieldOrderMsg + " " + kDoubleRateMsg + " " + kUsingG PU;1723 msg = kFieldOrderMsg + " " + kDoubleRateMsg + " " + kUsingGL; 1664 1724 else if (deint == "greedyhdeint") 1665 1725 msg = kGreedyHMsg; 1666 1726 else if (deint == "greedyhdoubleprocessdeint") … … QString VideoDisplayProfile::GetDeinterlacerHelp(const QString &deint) 1687 1747 msg = kOneFieldMsg + " " + kUsingGPU; 1688 1748 else if (deint == "vaapibobdeint") 1689 1749 msg = kBobMsg + " " + kUsingGPU; 1750 1751 else if (deint == "vaapi2default") 1752 msg = kMostAdvMsg + " " + kUsingVA; 1753 else if (deint == "vaapi2bob") 1754 msg = kBobMsg + " " + kUsingVA; 1755 else if (deint == "vaapi2weave") 1756 msg = kWeaveMsg + " " + kUsingVA; 1757 else if (deint == "vaapi2motion_adaptive") 1758 msg = kMAMsg + " " + kUsingVA; 1759 else if (deint == "vaapi2motion_compensated") 1760 msg = kMCMsg + " " + kUsingVA; 1761 else if (deint == "vaapi2doubleratedefault") 1762 msg = kMostAdvMsg + " " + kDoubleRateMsg + " " + kUsingVA; 1763 else if (deint == "vaapi2doubleratebob") 1764 msg = kBobMsg + " " + kDoubleRateMsg + " " + kUsingVA; 1765 else if (deint == "vaapi2doublerateweave") 1766 msg = kWeaveMsg + " " + kDoubleRateMsg + " " + kUsingVA; 1767 else if (deint == "vaapi2doubleratemotion_adaptive") 1768 msg = kMAMsg + " " + kDoubleRateMsg + " " + kUsingVA; 1769 else if (deint == "vaapi2doubleratemotion_compensated") 1770 msg = kMCMsg + " " + kDoubleRateMsg + " " + kUsingVA; 1690 1771 else 1691 1772 msg = QObject::tr("'%1' has not been documented yet.").arg(deint); 1692 1773 -
mythtv/libs/libmythtv/videoout_opengl.cpp
diff --git a/mythtv/libs/libmythtv/videoout_opengl.cpp b/mythtv/libs/libmythtv/videoout_opengl.cpp index b689947d73..51b62527ec 100644
a b void VideoOutputOpenGL::GetRenderOptions(render_opts &opts, 40 40 (*opts.safe_renderers)["openmax"].append("opengl"); 41 41 if (opts.decoders->contains("mediacodec")) 42 42 (*opts.safe_renderers)["mediacodec"].append("opengl"); 43 if (opts.decoders->contains("vaapi2")) 44 (*opts.safe_renderers)["vaapi2"].append("opengl"); 43 45 opts.priorities->insert("opengl", 65); 44 46 45 47 // lite profile - no colourspace control, GPU deinterlacing … … bool VideoOutputOpenGL::InputChanged(const QSize &video_dim_buf, 270 272 StopEmbedding(); 271 273 } 272 274 273 if (!codec_is_std(av_codec_id) && !codec_is_mediacodec(av_codec_id)) 275 if (!codec_is_std(av_codec_id) 276 && !codec_is_mediacodec(av_codec_id) 277 && !codec_is_vaapi2(av_codec_id)) 274 278 { 275 279 LOG(VB_GENERAL, LOG_ERR, LOC + "New video codec is not supported."); 276 280 errorState = kError_Unknown; … … QStringList VideoOutputOpenGL::GetAllowedRenderers( 741 745 { 742 746 list << "opengl" << "opengl-lite"; 743 747 } 744 else if (codec_is_mediacodec(myth_codec_id) && !getenv("NO_OPENGL")) 748 else if ((codec_is_mediacodec(myth_codec_id) || codec_is_vaapi2(myth_codec_id)) 749 && !getenv("NO_OPENGL")) 745 750 { 746 751 list << "opengl"; 747 752 } … … bool VideoOutputOpenGL::SetupDeinterlace( 812 817 if (db_vdisp_profile) 813 818 m_deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter); 814 819 820 if (m_deintfiltername.startsWith("vaapi2")) 821 return false; 822 815 823 if (!m_deintfiltername.contains("opengl")) 816 824 { 817 825 gl_videochain->SetDeinterlacing(false); -
mythtv/libs/libmythtv/videoout_xv.cpp
diff --git a/mythtv/libs/libmythtv/videoout_xv.cpp b/mythtv/libs/libmythtv/videoout_xv.cpp index ccc756ace7..1074b1ea4c 100644
a b void VideoOutputXv::GetRenderOptions(render_opts &opts, 122 122 (*opts.safe_renderers)["crystalhd"].append("xshm"); 123 123 (*opts.safe_renderers)["crystalhd"].append("xv-blit"); 124 124 } 125 126 // These could work but needs some debugging so disable for now 127 // if (opts.decoders->contains("vaapi2")) 128 // { 129 // (*opts.safe_renderers)["vaapi2"].append("xlib"); 130 // (*opts.safe_renderers)["vaapi2"].append("xshm"); 131 // (*opts.safe_renderers)["vaapi2"].append("xv-blit"); 132 // } 133 125 134 } 126 135 127 136 /** \class VideoOutputXv … … static QStringList allowed_video_renderers( 2090 2099 2091 2100 QStringList list; 2092 2101 if (codec_is_std(myth_codec_id)) 2102 // this needs some work 2103 // || codec_is_vaapi2(myth_codec_id)) 2093 2104 { 2094 2105 if (xv) 2095 2106 list += "xv-blit"; -
mythtv/libs/libmythtv/videooutbase.cpp
diff --git a/mythtv/libs/libmythtv/videooutbase.cpp b/mythtv/libs/libmythtv/videooutbase.cpp index 60b2d4fa24..dc892c4647 100644
a b bool VideoOutput::SetupDeinterlace(bool interlaced, 607 607 else 608 608 m_deintfiltername = ""; 609 609 610 m_deintFiltMan = new FilterManager;611 610 m_deintFilter = NULL; 611 m_deintFiltMan = NULL; 612 613 if (m_deintfiltername.startsWith("vaapi2")) 614 { 615 m_deinterlacing = false; 616 return false; 617 } 618 619 m_deintFiltMan = new FilterManager; 612 620 613 621 if (!m_deintfiltername.isEmpty()) 614 622 { -
mythtv/libs/libmythtv/videooutbase.h
diff --git a/mythtv/libs/libmythtv/videooutbase.h b/mythtv/libs/libmythtv/videooutbase.h index 0c11bd60ce..35a5f2877a 100644
a b class VideoOutput 76 76 virtual void PrepareFrame(VideoFrame *buffer, FrameScanType, 77 77 OSD *osd) = 0; 78 78 virtual void Show(FrameScanType) = 0; 79 VideoDisplayProfile *GetProfile() { return db_vdisp_profile; } 80 79 81 80 82 virtual void WindowResized(const QSize &) {} 81 83 -
mythtv/programs/mythfrontend/globalsettings.cpp
diff --git a/mythtv/programs/mythfrontend/globalsettings.cpp b/mythtv/programs/mythfrontend/globalsettings.cpp index 7f87ba55ca..c169aab6a5 100644
a b 38 38 #include "mythuihelper.h" 39 39 #include "mythuidefines.h" 40 40 #include "langsettings.h" 41 #include "mythcodeccontext.h" 41 42 42 43 #ifdef USING_AIRPLAY 43 44 #include "AirPlay/mythraopconnection.h" … … static HostSpinBoxSetting *AudioReadAhead() 66 67 return gc; 67 68 } 68 69 70 #ifdef USING_VAAPI2 71 static HostTextEditSetting *VAAPIDevice() 72 { 73 HostTextEditSetting *ge = new HostTextEditSetting("VAAPIDevice"); 74 75 ge->setLabel(MainGeneralSettings::tr("Decoder Device for VAAPI2 hardware decoding")); 76 77 ge->setValue(""); 78 79 QString help = MainGeneralSettings::tr( 80 "Use this if your system does not detect the VAAPI device. " 81 "Example: '/dev/dri/renderD128'."); 82 83 ge->setHelpText(help); 84 85 return ge; 86 } 87 #endif 88 69 89 #if CONFIG_DEBUGTYPE 70 90 static HostCheckBoxSetting *FFmpegDemuxer() 71 91 { … … void PlaybackProfileItemConfig::decoderChanged(const QString &dec) 937 957 938 958 decoder->setHelpText(VideoDisplayProfile::GetDecoderHelp(dec)); 939 959 960 QString vrenderer2 = vidrend->getValue(); 961 vrenderChanged(vrenderer2); 962 940 963 InitLabel(); 941 964 } 942 965 … … void PlaybackProfileItemConfig::vrenderChanged(const QString &renderer) 944 967 { 945 968 QStringList osds = VideoDisplayProfile::GetOSDs(renderer); 946 969 QStringList deints = VideoDisplayProfile::GetDeinterlacers(renderer); 970 QString decodername = decoder->getValue(); 971 QStringList decoderdeints = MythCodecContext::GetDeinterlacers(decodername); 972 deints.append(decoderdeints); 947 973 QString losd = osdrend->getValue(); 948 974 QString ldeint0 = deint0->getValue(); 949 975 QString ldeint1 = deint1->getValue(); … … void PlaybackSettings::Load(void) 3958 3984 GroupSetting* general = new GroupSetting(); 3959 3985 general->setLabel(tr("General Playback")); 3960 3986 general->addChild(RealtimePriority()); 3987 #ifdef USING_VAAPI2 3988 general->addChild(VAAPIDevice()); 3989 #endif 3961 3990 general->addChild(AudioReadAhead()); 3962 3991 general->addChild(JumpToProgramOSD()); 3963 3992 general->addChild(ClearSavedPosition()); -
mythtv/programs/mythfrontend/mythfrontend.pro
diff --git a/mythtv/programs/mythfrontend/mythfrontend.pro b/mythtv/programs/mythfrontend/mythfrontend.pro index 580bc606af..83517acfef 100644
a b using_opengl:DEFINES += USING_OPENGL 116 116 using_opengl_video:DEFINES += USING_OPENGL_VIDEO 117 117 using_vdpau:DEFINES += USING_VDPAU 118 118 using_vaapi:using_opengl_video:DEFINES += USING_GLVAAPI 119 using_vaapi2:DEFINES += USING_VAAPI2 119 120 120 121 using_pulse:DEFINES += USING_PULSE 121 122 using_pulseoutput: DEFINES += USING_PULSEOUTPUT