10#include <QtCore/qloggingcategory.h>
11#include <QtCore/private/qexpected_p.h>
14#include "libavutil/display.h"
15#include "libavutil/pixdesc.h"
26AVCodecID avCodecID(
const QMediaEncoderSettings &settings)
28 const QMediaFormat::VideoCodec qVideoCodec = settings.videoCodec();
29 return QFFmpegMediaFormatInfo::codecIdForVideoCodec(qVideoCodec);
34VideoFrameEncoderUPtr
VideoFrameEncoder::create(
const QMediaEncoderSettings &encoderSettings,
36 AVFormatContext *formatContext)
38 Q_ASSERT(isSwPixelFormat(sourceParams.swFormat));
39 Q_ASSERT(isHwPixelFormat(sourceParams.format) || sourceParams.swFormat == sourceParams.format);
41 AVStream *stream = createStream(sourceParams, formatContext);
46 CreationResult result;
48 auto findAndOpenAVEncoder = [&](
const auto &scoresGetter,
const auto &creator) {
49 auto createWithTargetFormatFallback = [&](
const Codec &codec) {
50 result = creator(codec, AVPixelFormatSet{});
54 if (!result.encoder) {
55 const auto targetFormatDesc = av_pix_fmt_desc_get(result.targetFormat);
56 const bool is420TargetFormat = targetFormatDesc
57 && targetFormatDesc->log2_chroma_h == 1
58 && targetFormatDesc->log2_chroma_w == 1;
59 if (is420TargetFormat && result.targetFormat != AV_PIX_FMT_NV12)
60 result = creator(codec, AVPixelFormatSet{ result.targetFormat });
63 return result.encoder !=
nullptr;
65 return QFFmpeg::findAndOpenAVEncoder(avCodecID(encoderSettings), scoresGetter,
66 createWithTargetFormatFallback);
70 const auto &deviceTypes = HWAccel::encodingDeviceTypes();
72 auto findDeviceType = [&](
const Codec &codec) {
73 std::optional<AVPixelFormat> pixelFormat = findAVPixelFormat(codec, &isHwPixelFormat);
75 return deviceTypes.end();
77 return std::find_if(deviceTypes.begin(), deviceTypes.end(),
78 [pixelFormat](AVHWDeviceType deviceType) {
79 return pixelFormatForHwDevice(deviceType) == pixelFormat;
84 [&](
const Codec &codec) {
85 const auto found = findDeviceType(codec);
86 if (found == deviceTypes.end())
87 return NotSuitableAVScore;
89 return DefaultAVScore -
static_cast<AVScore>(found - deviceTypes.begin());
91 [&](
const Codec &codec,
const AVPixelFormatSet &prohibitedTargetFormats) {
92 HWAccelUPtr hwAccel = HWAccel::create(*findDeviceType(codec));
94 return CreationResult{};
95 if (!hwAccel->matchesSizeContraints(encoderSettings.videoResolution()))
96 return CreationResult{};
97 return create(stream, codec, std::move(hwAccel), sourceParams, encoderSettings,
98 prohibitedTargetFormats);
102 if (!result.encoder) {
103 findAndOpenAVEncoder(
104 [&](
const Codec &codec) {
105 return findSWFormatScores(codec, sourceParams.swFormat);
107 [&](
const Codec &codec,
const AVPixelFormatSet &prohibitedTargetFormats) {
108 return create(stream, codec,
nullptr, sourceParams, encoderSettings,
109 prohibitedTargetFormats);
113 if (
auto &encoder = result.encoder)
114 qCDebug(qLcVideoFrameEncoder)
115 <<
"found" << (encoder->m_accel ?
"hw" :
"sw") <<
"encoder"
116 << encoder->m_codec.name() <<
"for id" << encoder->m_codec.id();
118 qCWarning(qLcVideoFrameEncoder) <<
"No valid video codecs found";
120 return std::move(result.encoder);
125 const QMediaEncoderSettings &encoderSettings)
126 : m_settings(encoderSettings),
129 m_accel(std::move(hwAccel)),
130 m_sourceSize(sourceParams.size),
131 m_sourceFormat(sourceParams.format),
132 m_sourceSWFormat(sourceParams.swFormat)
137 AVFormatContext *formatContext)
139 AVStream *stream = avformat_new_stream(formatContext,
nullptr);
144 stream->id = formatContext->nb_streams - 1;
145 stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
147 stream->codecpar->color_trc = sourceParams.colorTransfer;
148 stream->codecpar->color_space = sourceParams.colorSpace;
149 stream->codecpar->color_range = sourceParams.colorRange;
151 if (sourceParams.transform.rotation != QtVideo::Rotation::None || sourceParams.transform.mirroredHorizontallyAfterRotation) {
152 constexpr auto displayMatrixSize =
sizeof(int32_t) * 9;
153 AVPacketSideData sideData = {
reinterpret_cast<uint8_t *>(av_malloc(displayMatrixSize)),
154 displayMatrixSize, AV_PKT_DATA_DISPLAYMATRIX };
155 int32_t *matrix =
reinterpret_cast<int32_t *>(sideData.data);
156 av_display_rotation_set(matrix,
static_cast<
double>(sourceParams.transform.rotation));
157 if (sourceParams.transform.mirroredHorizontallyAfterRotation)
158 av_display_matrix_flip(matrix, sourceParams.transform.mirroredHorizontallyAfterRotation,
false);
160 addStreamSideData(stream, sideData);
169 const QMediaEncoderSettings &encoderSettings,
170 const AVPixelFormatSet &prohibitedTargetFormats)
173 sourceParams, encoderSettings));
174 frameEncoder->initTargetSize();
176 frameEncoder->initCodecFrameRate();
178 if (!frameEncoder->initTargetFormats(prohibitedTargetFormats))
181 frameEncoder->initStream();
183 const AVPixelFormat targetFormat = frameEncoder->m_targetFormat;
185 if (!frameEncoder->initCodecContext())
186 return {
nullptr, targetFormat };
188 if (!frameEncoder->open())
189 return {
nullptr, targetFormat };
191 frameEncoder->updateConversions();
193 return {
std::move(frameEncoder), targetFormat };
198 m_targetSize = adjustVideoResolution(m_codec, m_settings.videoResolution());
202 if (m_codec.name() == u"h264_mf") {
203 auto makeEven = [](
int size) {
return size & ~1; };
204 const QSize fixedSize(makeEven(m_targetSize.width()), makeEven(m_targetSize.height()));
205 if (fixedSize != m_targetSize) {
206 qCDebug(qLcVideoFrameEncoder) <<
"Fix odd video resolution for codec" << m_codec.name()
207 <<
":" << m_targetSize <<
"->" << fixedSize;
208 m_targetSize = fixedSize;
216 const auto frameRates = m_codec.frameRates();
217 if (qLcVideoFrameEncoder().isEnabled(QtDebugMsg))
218 for (AVRational rate : frameRates)
219 qCDebug(qLcVideoFrameEncoder) <<
"supported frame rate:" << rate;
221 m_codecFrameRate = adjustFrameRate(frameRates, m_settings.videoFrameRate());
222 qCDebug(qLcVideoFrameEncoder) <<
"Adjusted frame rate:" << m_codecFrameRate;
225bool VideoFrameEncoder::initTargetFormats(
const AVPixelFormatSet &prohibitedTargetFormats)
227 const auto format = findTargetFormat(m_sourceFormat, m_sourceSWFormat, m_codec, m_accel.get(),
228 prohibitedTargetFormats);
231 qWarning() <<
"Could not find target format for codecId" << m_codec.id();
235 m_targetFormat = *format;
237 if (isHwPixelFormat(m_targetFormat)) {
242 const auto swFormat = findTargetSWFormat(m_sourceSWFormat, m_codec, *m_accel);
244 qWarning() <<
"Cannot find software target format. sourceSWFormat:" << m_sourceSWFormat
245 <<
"targetFormat:" << m_targetFormat;
249 m_targetSWFormat = *swFormat;
251 m_accel->createFramesContext(m_targetSWFormat, m_targetSize);
252 if (!m_accel->hwFramesContextAsBuffer())
255 m_targetSWFormat = m_targetFormat;
265 m_stream->codecpar->codec_id = m_codec.id();
269 if (m_codec.id() == AV_CODEC_ID_HEVC)
270 m_stream->codecpar->codec_tag = MKTAG(
'h',
'v',
'c',
'1');
272 m_stream->codecpar->codec_tag = 0;
275 m_stream->codecpar->format = m_targetFormat;
276 m_stream->codecpar->width = m_targetSize.width();
277 m_stream->codecpar->height = m_targetSize.height();
278 m_stream->codecpar->sample_aspect_ratio = AVRational{ 1, 1 };
279#if QT_CODEC_PARAMETERS_HAVE_FRAMERATE
280 m_stream->codecpar->framerate = m_codecFrameRate;
283 const auto frameRates = m_codec.frameRates();
284 m_stream->time_base = adjustFrameTimeBase(frameRates, m_codecFrameRate);
289 Q_ASSERT(m_stream->codecpar->codec_id);
291 m_codecContext.reset(avcodec_alloc_context3(m_codec.get()));
292 if (!m_codecContext) {
293 qWarning() <<
"Could not allocate codec context";
298 avcodec_parameters_to_context(m_codecContext.get(), m_stream->codecpar);
299#if !QT_CODEC_PARAMETERS_HAVE_FRAMERATE
300 m_codecContext->framerate = m_codecFrameRate;
302 m_codecContext->time_base = m_stream->time_base;
303 qCDebug(qLcVideoFrameEncoder) <<
"codecContext time base" << m_codecContext->time_base.num
304 << m_codecContext->time_base.den;
307 auto deviceContext = m_accel->hwDeviceContextAsBuffer();
308 Q_ASSERT(deviceContext);
309 m_codecContext->hw_device_ctx = av_buffer_ref(deviceContext);
311 if (
auto framesContext = m_accel->hwFramesContextAsBuffer())
312 m_codecContext->hw_frames_ctx = av_buffer_ref(framesContext);
315 avcodec_parameters_from_context(m_stream->codecpar, m_codecContext.get());
322 Q_ASSERT(m_codecContext);
324 AVDictionaryHolder opts;
325 applyVideoEncoderOptions(m_settings, QByteArray{ m_codec.name() }, m_codecContext.get(), opts);
326 applyExperimentalCodecOptions(m_codec, opts);
328 qCDebug(qLcVideoFrameEncoder) <<
"Opening encoder" << m_codec.name() <<
"with" << opts;
330 const int res = avcodec_open2(m_codecContext.get(), m_codec.get(), opts);
332 qCWarning(qLcVideoFrameEncoder)
333 <<
"Couldn't open video encoder" << m_codec.name() <<
"; result:" << AVError(res);
336 qCDebug(qLcVideoFrameEncoder) <<
"video codec opened" << res <<
"time base"
337 << m_codecContext->time_base;
343 return m_codecFrameRate.den ? qreal(m_codecFrameRate.num) / m_codecFrameRate.den : 0.;
348 qint64 div = 1'000'000 * m_stream->time_base.num;
349 return div != 0 ? (us * m_stream->time_base.den + div / 2) / div : 0;
354 return m_stream->time_base;
360 FrameConverter(AVFrameUPtr inputFrame) : m_inputFrame{ std::move(inputFrame) } { }
364 AVFrameUPtr cpuFrame = makeAVFrame();
366 int err = av_hwframe_transfer_data(cpuFrame.get(), currentFrame(), 0);
368 qCDebug(qLcVideoFrameEncoder)
369 <<
"Error transferring frame data to surface." << AVError(err);
373 setFrame(std::move(cpuFrame));
377 void convert(SwsContext *scaleContext, AVPixelFormat format,
const QSize &size)
379 AVFrameUPtr scaledFrame = makeAVFrame();
381 scaledFrame->format = format;
382 scaledFrame->width = size.width();
383 scaledFrame->height = size.height();
385 av_frame_get_buffer(scaledFrame.get(), 0);
387 const AVFrame *srcFrame = currentFrame();
389 const auto scaledHeight =
390 sws_scale(scaleContext, srcFrame->data, srcFrame->linesize, 0, srcFrame->height,
391 scaledFrame->data, scaledFrame->linesize);
393 if (scaledHeight != scaledFrame->height)
394 qCWarning(qLcVideoFrameEncoder)
395 <<
"Scaled height" << scaledHeight <<
"!=" << scaledFrame->height;
397 setFrame(std::move(scaledFrame));
400 int uploadToHw(HWAccel *accel)
402 auto *hwFramesContext = accel->hwFramesContextAsBuffer();
403 Q_ASSERT(hwFramesContext);
404 AVFrameUPtr hwFrame = makeAVFrame();
406 return AVERROR(ENOMEM);
408 int err = av_hwframe_get_buffer(hwFramesContext, hwFrame.get(), 0);
410 qCDebug(qLcVideoFrameEncoder) <<
"Error getting HW buffer" << AVError(err);
413 qCDebug(qLcVideoFrameEncoder) <<
"got HW buffer";
415 if (!hwFrame->hw_frames_ctx) {
416 qCDebug(qLcVideoFrameEncoder) <<
"no hw frames context";
417 return AVERROR(ENOMEM);
419 err = av_hwframe_transfer_data(hwFrame.get(), currentFrame(), 0);
421 qCDebug(qLcVideoFrameEncoder)
422 <<
"Error transferring frame data to surface." << AVError(err);
426 setFrame(std::move(hwFrame));
431 q23::expected<AVFrameUPtr,
int> takeResultFrame()
434 AVFrameUPtr converted = std::move(m_convertedFrame);
435 AVFrameUPtr input = std::move(m_inputFrame);
441 const int status = av_frame_copy_props(converted.get(), input.get());
443 return q23::unexpected{ status };
449 void setFrame(AVFrameUPtr frame) { m_convertedFrame = std::move(frame); }
451 AVFrame *currentFrame()
const
453 if (m_convertedFrame)
454 return m_convertedFrame.get();
455 return m_inputFrame.get();
458 AVFrameUPtr m_inputFrame;
459 AVFrameUPtr m_convertedFrame;
465 if (!m_codecContext) {
466 qWarning() <<
"codec context is not initialized!";
467 return AVERROR(EINVAL);
471 return avcodec_send_frame(m_codecContext.get(),
nullptr);
473 if (!updateSourceFormatAndSize(inputFrame.get()))
474 return AVERROR(EINVAL);
476 FrameConverter converter{
std::move(inputFrame) };
478 if (m_downloadFromHW) {
479 const int status = converter.downloadFromHw();
485 converter.convert(m_scaleContext.get(), m_targetSWFormat, m_targetSize);
488 const int status = converter.uploadToHw(m_accel.get());
493 const q23::expected<AVFrameUPtr,
int> resultFrame = converter.takeResultFrame();
495 return resultFrame.error();
497 AVRational timeBase{};
499 getAVFrameTime(*resultFrame.value(), pts, timeBase);
500 qCDebug(qLcVideoFrameEncoder) <<
"sending frame" << pts <<
"*" << timeBase;
502 return avcodec_send_frame(m_codecContext.get(), resultFrame.value().get());
505qint64
VideoFrameEncoder::estimateDuration(
const AVPacket &packet,
bool isFirstPacket)
513 const AVRational frameDuration = av_inv_q(m_codecContext->framerate);
514 duration = av_rescale_q(1, frameDuration, m_stream->time_base);
517 duration = packet.pts - m_lastPacketTime;
528 auto getPacket = [&]() {
529 AVPacketUPtr packet(av_packet_alloc());
530 const int ret = avcodec_receive_packet(m_codecContext.get(), packet.get());
532 if (ret != AVERROR(EOF) && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
533 qCDebug(qLcVideoFrameEncoder) <<
"Error receiving packet" << ret << AVError(ret);
534 return AVPacketUPtr{};
536 auto ts = timeStampMs(packet->pts, m_stream->time_base);
538 qCDebug(qLcVideoFrameEncoder)
539 <<
"got a packet" << packet->pts << packet->dts << (ts ? *ts : 0);
541 packet->stream_index = m_stream->id;
543 if (packet->duration == 0) {
544 const bool firstFrame = m_lastPacketTime == AV_NOPTS_VALUE;
545 packet->duration = estimateDuration(*packet, firstFrame);
548 m_lastPacketTime = packet->pts;
553 auto fixPacketDts = [&](AVPacket &packet) {
557 if (packet.dts == AV_NOPTS_VALUE)
560 packet.dts -= m_packetDtsOffset;
562 if (packet.pts != AV_NOPTS_VALUE && packet.pts < packet.dts) {
563 m_packetDtsOffset += packet.dts - packet.pts;
564 packet.dts = packet.pts;
566 if (m_prevPacketDts != AV_NOPTS_VALUE && packet.dts < m_prevPacketDts) {
567 qCWarning(qLcVideoFrameEncoder)
568 <<
"Skip packet; failed to fix dts:" << packet.dts << m_prevPacketDts;
573 m_prevPacketDts = packet.dts;
578 while (
auto packet = getPacket()) {
579 if (fixPacketDts(*packet))
590 const QSize frameSize(frame->width, frame->height);
591 const AVPixelFormat frameFormat =
static_cast<AVPixelFormat>(frame->format);
593 if (frameSize == m_sourceSize && frameFormat == m_sourceFormat)
596 auto applySourceFormatAndSize = [&](AVPixelFormat swFormat) {
597 m_sourceSize = frameSize;
598 m_sourceFormat = frameFormat;
599 m_sourceSWFormat = swFormat;
604 if (frameFormat == m_sourceFormat)
605 return applySourceFormatAndSize(m_sourceSWFormat);
607 if (frameFormat == AV_PIX_FMT_NONE) {
608 qWarning() <<
"Got a frame with invalid pixel format";
612 if (isSwPixelFormat(frameFormat))
613 return applySourceFormatAndSize(frameFormat);
615 auto framesCtx =
reinterpret_cast<
const AVHWFramesContext *>(frame->hw_frames_ctx->data);
616 if (!framesCtx || framesCtx->sw_format == AV_PIX_FMT_NONE) {
617 qWarning() <<
"Cannot update conversions as hw frame has invalid framesCtx" << framesCtx;
621 return applySourceFormatAndSize(framesCtx->sw_format);
626 const bool needToScale = m_sourceSize != m_targetSize;
627 const bool zeroCopy = m_sourceFormat == m_targetFormat && !needToScale;
629 m_scaleContext.reset();
632 m_downloadFromHW =
false;
633 m_uploadToHW =
false;
635 qCDebug(qLcVideoFrameEncoder) <<
"zero copy encoding, format" << m_targetFormat;
640 m_downloadFromHW = m_sourceFormat != m_sourceSWFormat;
641 m_uploadToHW = m_targetFormat != m_targetSWFormat;
643 if (m_sourceSWFormat != m_targetSWFormat || needToScale) {
644 qCDebug(qLcVideoFrameEncoder)
645 <<
"video source and encoder use different formats:" << m_sourceSWFormat
646 << m_targetSWFormat <<
"or sizes:" << m_sourceSize << m_targetSize;
648 const SwsFlags conversionType = getScaleConversionType(m_sourceSize, m_targetSize);
650 m_scaleContext = createSwsContext(m_sourceSize, m_sourceSWFormat, m_targetSize,
651 m_targetSWFormat, conversionType);
654 qCDebug(qLcVideoFrameEncoder) <<
"VideoFrameEncoder conversions initialized:"
655 <<
"sourceFormat:" << m_sourceFormat
656 << (isHwPixelFormat(m_sourceFormat) ?
"(hw)" :
"(sw)")
657 <<
"targetFormat:" << m_targetFormat
658 << (isHwPixelFormat(m_targetFormat) ?
"(hw)" :
"(sw)")
659 <<
"sourceSWFormat:" << m_sourceSWFormat
660 <<
"targetSWFormat:" << m_targetSWFormat
661 <<
"scaleContext:" << m_scaleContext.get();
const AVRational & getTimeBase() const
qint64 getPts(qint64 ms) const
int sendFrame(AVFrameUPtr inputFrame)
AVPacketUPtr retrievePacket()
qreal codecFrameRate() const
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
Q_STATIC_LOGGING_CATEGORY(lcAccessibilityCore, "qt.accessibility.core")