Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qavfsamplebufferdelegate.mm
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QtFFmpegMediaPluginImpl/private/qavfsamplebufferdelegate_p.h>
5
6#include <QtMultimedia/private/qavfhelpers_p.h>
7#include <QtMultimedia/private/qvideoframe_p.h>
8
9#define AVMediaType XAVMediaType
10#include <QtFFmpegMediaPluginImpl/private/qffmpegvideobuffer_p.h>
11#include <QtFFmpegMediaPluginImpl/private/qffmpeghwaccel_p.h>
12#undef AVMediaType
13
14#include <optional>
15
17
18static void releaseHwFrame(void * /*opaque*/, uint8_t *data)
19{
20 CVPixelBufferRelease(CVPixelBufferRef(data));
21}
22
23namespace {
24
25class CVImageVideoBuffer : public QAbstractVideoBuffer
26{
27public:
28 CVImageVideoBuffer(CVImageBufferRef imageBuffer) : m_buffer(imageBuffer)
29 {
30 CVPixelBufferRetain(imageBuffer);
31 }
32
33 ~CVImageVideoBuffer()
34 {
35 Q_ASSERT(m_mode == QVideoFrame::NotMapped);
36 CVPixelBufferRelease(m_buffer);
37 }
38
39 CVImageVideoBuffer::MapData map(QVideoFrame::MapMode mode) override
40 {
41 MapData mapData;
42
43 if (m_mode == QVideoFrame::NotMapped) {
44 CVPixelBufferLockBaseAddress(
45 m_buffer, mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
46 m_mode = mode;
47 }
48
49 mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
50 Q_ASSERT(mapData.planeCount <= 3);
51
52 if (!mapData.planeCount) {
53 // single plane
54 mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
55 mapData.data[0] = static_cast<uchar *>(CVPixelBufferGetBaseAddress(m_buffer));
56 mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
57 mapData.planeCount = mapData.data[0] ? 1 : 0;
58 return mapData;
59 }
60
61 // For a bi-planar or tri-planar format we have to set the parameters correctly:
62 for (int i = 0; i < mapData.planeCount; ++i) {
63 mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
64 mapData.dataSize[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
65 mapData.data[i] = static_cast<uchar *>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
66 }
67
68 return mapData;
69 }
70
71 void unmap() override
72 {
73 if (m_mode != QVideoFrame::NotMapped) {
74 CVPixelBufferUnlockBaseAddress(
75 m_buffer, m_mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
76 m_mode = QVideoFrame::NotMapped;
77 }
78 }
79
80 QVideoFrameFormat format() const override { return {}; }
81
82private:
83 CVImageBufferRef m_buffer;
84 QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
85};
86
87}
88
89// Make sure this is compatible with the layout used in ffmpeg's hwcontext_videotoolbox
90static QFFmpeg::AVFrameUPtr allocHWFrame(AVBufferRef *hwContext, const CVPixelBufferRef &pixbuf)
91{
92 AVHWFramesContext *ctx = (AVHWFramesContext *)hwContext->data;
93 auto frame = QFFmpeg::makeAVFrame();
94 frame->hw_frames_ctx = av_buffer_ref(hwContext);
95 frame->extended_data = frame->data;
96
97 frame->buf[0] = av_buffer_create((uint8_t *)pixbuf, 1, releaseHwFrame, NULL, 0);
98 frame->data[3] = (uint8_t *)pixbuf;
99 CVPixelBufferRetain(pixbuf);
100 frame->width = ctx->width;
101 frame->height = ctx->height;
102 frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
103 if (frame->width != (int)CVPixelBufferGetWidth(pixbuf)
104 || frame->height != (int)CVPixelBufferGetHeight(pixbuf)) {
105
106 // This can happen while changing camera format
107 return nullptr;
108 }
109 return frame;
110}
111
112@implementation QAVFSampleBufferDelegate {
113@private
114 std::function<void(const QVideoFrame &)> frameHandler;
115 QFFmpeg::QAVFSampleBufferDelegateTransformProvider transformationProvider;
116 AVBufferRef *hwFramesContext;
117 std::unique_ptr<QFFmpeg::HWAccel> m_accel;
118 qint64 startTime;
119 std::optional<qint64> baseTime;
120 qreal frameRate;
121}
122
123static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
124 CVImageBufferRef imageBuffer, QVideoFrameFormat format)
125{
126 Q_ASSERT(delegate.baseTime);
127
128 if (!delegate.m_accel)
129 return {};
130
131 auto avFrame = allocHWFrame(delegate.m_accel->hwFramesContextAsBuffer(), imageBuffer);
132 if (!avFrame)
133 return {};
134
135#ifdef USE_SW_FRAMES
136 {
137 auto swFrame = QFFmpeg::makeAVFrame();
138 /* retrieve data from GPU to CPU */
139 const int ret = av_hwframe_transfer_data(swFrame.get(), avFrame.get(), 0);
140 if (ret < 0) {
141 qWarning() << "Error transferring the data to system memory:" << ret;
142 } else {
143 avFrame = std::move(swFrame);
144 }
145 }
146#endif
147
148 avFrame->pts = delegate.startTime - *delegate.baseTime;
149
150 return QVideoFramePrivate::createFrame(std::make_unique<QFFmpegVideoBuffer>(std::move(avFrame)),
151 format);
152}
153
154- (instancetype)initWithFrameHandler:(std::function<void(const QVideoFrame &)>)handler
155{
156 if (!(self = [super init]))
157 return nil;
158
159 Q_ASSERT(handler);
160
161 frameHandler = std::move(handler);
162 hwFramesContext = nullptr;
163 startTime = 0;
164 frameRate = 0.;
165 return self;
166}
167
168- (void)discardFutureSamples
169{
170 frameHandler = nullptr;
171}
172
173- (void)setTransformationProvider:
174 (const QFFmpeg::QAVFSampleBufferDelegateTransformProvider &)provider
175{
176 transformationProvider = std::move(provider);
177}
178
179- (void)captureOutput:(AVCaptureOutput *)captureOutput
180 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
181 fromConnection:(AVCaptureConnection *)connection
182{
183 Q_UNUSED(captureOutput);
184
185 if (!frameHandler)
186 return;
187
188 // NB: on iOS captureOutput/connection can be nil (when recording a video -
189 // avfmediaassetwriter).
190
191 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
192
193 if (!imageBuffer) {
194 qWarning() << "Cannot get image buffer from sample buffer";
195 return;
196 }
197
198 const CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
199 const qint64 frameTime = time.timescale ? time.value * 1000000 / time.timescale : 0;
200 if (!baseTime) {
201 baseTime = frameTime;
202 startTime = frameTime;
203 }
204
205 QVideoFrameFormat format = QAVFHelpers::videoFormatForImageBuffer(imageBuffer);
206 if (!format.isValid()) {
207 qWarning() << "Cannot get get video format for image buffer"
208 << CVPixelBufferGetWidth(imageBuffer) << 'x'
209 << CVPixelBufferGetHeight(imageBuffer);
210 return;
211 }
212
213 std::optional<QFFmpeg::QAVFSampleBufferDelegateTransform> transform;
214 if (transformationProvider) {
215 transform = transformationProvider(connection);
216 const VideoTransformation &surfaceTransform = transform.value().surfaceTransform;
217 format.setRotation(surfaceTransform.rotation);
218 format.setMirrored(surfaceTransform.mirroredHorizontallyAfterRotation);
219 }
220
221 format.setStreamFrameRate(frameRate);
222
223 auto frame = createHwVideoFrame(*self, imageBuffer, format);
224 if (!frame.isValid())
225 frame = QVideoFramePrivate::createFrame(std::make_unique<CVImageVideoBuffer>(imageBuffer),
226 std::move(format));
227
228 if (transform.has_value()) {
229 const VideoTransformation &presentationTransform = transform.value().presentationTransform;
230 frame.setRotation(presentationTransform.rotation);
231 frame.setMirrored(presentationTransform.mirroredHorizontallyAfterRotation);
232 }
233
234 frame.setStartTime(startTime - *baseTime);
235 frame.setEndTime(frameTime - *baseTime);
236 startTime = frameTime;
237
238 frameHandler(frame);
239}
240
241- (void)setHWAccel:(std::unique_ptr<QFFmpeg::HWAccel> &&)accel
242{
243 m_accel = std::move(accel);
244}
245
246- (void)setVideoFormatFrameRate:(qreal)rate
247{
248 frameRate = rate;
249}
250
251@end
static QFFmpeg::AVFrameUPtr allocHWFrame(AVBufferRef *hwContext, const CVPixelBufferRef &pixbuf)
static QT_USE_NAMESPACE void releaseHwFrame(void *, uint8_t *data)