20static QFFmpeg::AVFrameUPtr allocHWFrame(
21 AVBufferRef *hwContext,
22 QAVFHelpers::QSharedCVPixelBuffer sharedPixBuf)
24 Q_ASSERT(sharedPixBuf);
26 AVHWFramesContext *ctx = (AVHWFramesContext *)hwContext->data;
27 auto frame = QFFmpeg::makeAVFrame();
28 frame->hw_frames_ctx = av_buffer_ref(hwContext);
29 frame->extended_data = frame->data;
31 CVPixelBufferRef pixbuf = sharedPixBuf.release();
32 auto releasePixBufFn = [](
void* opaquePtr, uint8_t *) {
33 CVPixelBufferRelease(
static_cast<CVPixelBufferRef>(opaquePtr));
35 frame->buf[0] = av_buffer_create(
nullptr, 0, releasePixBufFn, pixbuf, 0);
38 frame->data[3] = (uint8_t *)pixbuf;
39 frame->width = ctx->width;
40 frame->height = ctx->height;
41 frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
42 if (frame->width != (
int)CVPixelBufferGetWidth(pixbuf)
43 || frame->height != (
int)CVPixelBufferGetHeight(pixbuf)) {
122- (
void)captureOutput:(AVCaptureOutput *)captureOutput
123 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
124 fromConnection:(AVCaptureConnection *)connection
126 Q_UNUSED(captureOutput);
134 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
135 if (!imageBuffer || CFGetTypeID(imageBuffer) != CVPixelBufferGetTypeID()) {
136 qWarning() <<
"Cannot get image buffer from sample buffer";
140 auto pixelBuffer = QAVFHelpers::QSharedCVPixelBuffer(
142 QAVFHelpers::QSharedCVPixelBuffer::RefMode::NeedsRef);
144 const CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
145 const qint64 frameTime = time.timescale ? time.value * 1000000 / time.timescale : 0;
147 baseTime = frameTime;
148 startTime = frameTime;
151 QVideoFrameFormat format = QAVFHelpers::videoFormatForImageBuffer(pixelBuffer.get());
152 if (!format.isValid()) {
153 qWarning() <<
"Cannot get get video format for image buffer"
154 << CVPixelBufferGetWidth(pixelBuffer.get()) <<
'x'
155 << CVPixelBufferGetHeight(pixelBuffer.get());
159 std::optional<QFFmpeg::QAVFSampleBufferDelegateTransform> transform;
160 if (transformationProvider) {
161 transform = transformationProvider(connection);
162 const VideoTransformation &surfaceTransform = transform.value().surfaceTransform;
163 format.setRotation(surfaceTransform.rotation);
164 format.setMirrored(surfaceTransform.mirroredHorizontallyAfterRotation);
167 format.setStreamFrameRate(frameRate);
169 auto frame = createHwVideoFrame(*self, pixelBuffer, format);
170 if (!frame.isValid())
171 frame = QVideoFramePrivate::createFrame(
172 std::make_unique<QFFmpeg::CVImageVideoBuffer>(std::move(pixelBuffer)),
175 if (transform.has_value()) {
176 const VideoTransformation &presentationTransform = transform.value().presentationTransform;
177 frame.setRotation(presentationTransform.rotation);
178 frame.setMirrored(presentationTransform.mirroredHorizontallyAfterRotation);
181 frame.setStartTime(startTime - *baseTime);
182 frame.setEndTime(frameTime - *baseTime);
183 startTime = frameTime;