blob: 9c385a79d88eb66cd464f2813ecb10771cfd57a1
1 | /* |
2 | * Intel MediaSDK QSV encoder utility functions |
3 | * |
4 | * copyright (c) 2013 Yukinori Yamazoe |
5 | * copyright (c) 2015 Anton Khirnov |
6 | * |
7 | * This file is part of FFmpeg. |
8 | * |
9 | * FFmpeg is free software; you can redistribute it and/or |
10 | * modify it under the terms of the GNU Lesser General Public |
11 | * License as published by the Free Software Foundation; either |
12 | * version 2.1 of the License, or (at your option) any later version. |
13 | * |
14 | * FFmpeg is distributed in the hope that it will be useful, |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | * Lesser General Public License for more details. |
18 | * |
19 | * You should have received a copy of the GNU Lesser General Public |
20 | * License along with FFmpeg; if not, write to the Free Software |
21 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
22 | */ |
23 | |
24 | #include <string.h> |
25 | #include <sys/types.h> |
26 | #include <mfx/mfxvideo.h> |
27 | |
28 | #include "libavutil/common.h" |
29 | #include "libavutil/hwcontext.h" |
30 | #include "libavutil/hwcontext_qsv.h" |
31 | #include "libavutil/mem.h" |
32 | #include "libavutil/log.h" |
33 | #include "libavutil/time.h" |
34 | #include "libavutil/imgutils.h" |
35 | #include "libavcodec/bytestream.h" |
36 | |
37 | #include "avcodec.h" |
38 | #include "internal.h" |
39 | #include "qsv.h" |
40 | #include "qsv_internal.h" |
41 | #include "qsvenc.h" |
42 | |
43 | static const struct { |
44 | mfxU16 profile; |
45 | const char *name; |
46 | } profile_names[] = { |
47 | { MFX_PROFILE_AVC_BASELINE, "baseline" }, |
48 | { MFX_PROFILE_AVC_MAIN, "main" }, |
49 | { MFX_PROFILE_AVC_EXTENDED, "extended" }, |
50 | { MFX_PROFILE_AVC_HIGH, "high" }, |
51 | #if QSV_VERSION_ATLEAST(1, 15) |
52 | { MFX_PROFILE_AVC_HIGH_422, "high 422" }, |
53 | #endif |
54 | #if QSV_VERSION_ATLEAST(1, 4) |
55 | { MFX_PROFILE_AVC_CONSTRAINED_BASELINE, "constrained baseline" }, |
56 | { MFX_PROFILE_AVC_CONSTRAINED_HIGH, "constrained high" }, |
57 | { MFX_PROFILE_AVC_PROGRESSIVE_HIGH, "progressive high" }, |
58 | #endif |
59 | { MFX_PROFILE_MPEG2_SIMPLE, "simple" }, |
60 | { MFX_PROFILE_MPEG2_MAIN, "main" }, |
61 | { MFX_PROFILE_MPEG2_HIGH, "high" }, |
62 | { MFX_PROFILE_VC1_SIMPLE, "simple" }, |
63 | { MFX_PROFILE_VC1_MAIN, "main" }, |
64 | { MFX_PROFILE_VC1_ADVANCED, "advanced" }, |
65 | #if QSV_VERSION_ATLEAST(1, 8) |
66 | { MFX_PROFILE_HEVC_MAIN, "main" }, |
67 | { MFX_PROFILE_HEVC_MAIN10, "main10" }, |
68 | { MFX_PROFILE_HEVC_MAINSP, "mainsp" }, |
69 | #endif |
70 | }; |
71 | |
72 | static const char *print_profile(mfxU16 profile) |
73 | { |
74 | int i; |
75 | for (i = 0; i < FF_ARRAY_ELEMS(profile_names); i++) |
76 | if (profile == profile_names[i].profile) |
77 | return profile_names[i].name; |
78 | return "unknown"; |
79 | } |
80 | |
81 | static const struct { |
82 | mfxU16 rc_mode; |
83 | const char *name; |
84 | } rc_names[] = { |
85 | { MFX_RATECONTROL_CBR, "CBR" }, |
86 | { MFX_RATECONTROL_VBR, "VBR" }, |
87 | { MFX_RATECONTROL_CQP, "CQP" }, |
88 | { MFX_RATECONTROL_AVBR, "AVBR" }, |
89 | #if QSV_HAVE_LA |
90 | { MFX_RATECONTROL_LA, "LA" }, |
91 | #endif |
92 | #if QSV_HAVE_ICQ |
93 | { MFX_RATECONTROL_ICQ, "ICQ" }, |
94 | { MFX_RATECONTROL_LA_ICQ, "LA_ICQ" }, |
95 | #endif |
96 | #if QSV_HAVE_VCM |
97 | { MFX_RATECONTROL_VCM, "VCM" }, |
98 | #endif |
99 | #if QSV_VERSION_ATLEAST(1, 10) |
100 | { MFX_RATECONTROL_LA_EXT, "LA_EXT" }, |
101 | #endif |
102 | #if QSV_HAVE_LA_HRD |
103 | { MFX_RATECONTROL_LA_HRD, "LA_HRD" }, |
104 | #endif |
105 | #if QSV_HAVE_QVBR |
106 | { MFX_RATECONTROL_QVBR, "QVBR" }, |
107 | #endif |
108 | }; |
109 | |
110 | static const char *print_ratecontrol(mfxU16 rc_mode) |
111 | { |
112 | int i; |
113 | for (i = 0; i < FF_ARRAY_ELEMS(rc_names); i++) |
114 | if (rc_mode == rc_names[i].rc_mode) |
115 | return rc_names[i].name; |
116 | return "unknown"; |
117 | } |
118 | |
119 | static const char *print_threestate(mfxU16 val) |
120 | { |
121 | if (val == MFX_CODINGOPTION_ON) |
122 | return "ON"; |
123 | else if (val == MFX_CODINGOPTION_OFF) |
124 | return "OFF"; |
125 | return "unknown"; |
126 | } |
127 | |
128 | static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q, |
129 | mfxExtBuffer **coding_opts) |
130 | { |
131 | mfxInfoMFX *info = &q->param.mfx; |
132 | |
133 | mfxExtCodingOption *co = (mfxExtCodingOption*)coding_opts[0]; |
134 | #if QSV_HAVE_CO2 |
135 | mfxExtCodingOption2 *co2 = (mfxExtCodingOption2*)coding_opts[1]; |
136 | #endif |
137 | |
138 | av_log(avctx, AV_LOG_VERBOSE, "profile: %s; level: %"PRIu16"\n", |
139 | print_profile(info->CodecProfile), info->CodecLevel); |
140 | |
141 | av_log(avctx, AV_LOG_VERBOSE, "GopPicSize: %"PRIu16"; GopRefDist: %"PRIu16"; GopOptFlag: ", |
142 | info->GopPicSize, info->GopRefDist); |
143 | if (info->GopOptFlag & MFX_GOP_CLOSED) |
144 | av_log(avctx, AV_LOG_VERBOSE, "closed "); |
145 | if (info->GopOptFlag & MFX_GOP_STRICT) |
146 | av_log(avctx, AV_LOG_VERBOSE, "strict "); |
147 | av_log(avctx, AV_LOG_VERBOSE, "; IdrInterval: %"PRIu16"\n", info->IdrInterval); |
148 | |
149 | av_log(avctx, AV_LOG_VERBOSE, "TargetUsage: %"PRIu16"; RateControlMethod: %s\n", |
150 | info->TargetUsage, print_ratecontrol(info->RateControlMethod)); |
151 | |
152 | if (info->RateControlMethod == MFX_RATECONTROL_CBR || |
153 | info->RateControlMethod == MFX_RATECONTROL_VBR |
154 | #if QSV_HAVE_VCM |
155 | || info->RateControlMethod == MFX_RATECONTROL_VCM |
156 | #endif |
157 | ) { |
158 | av_log(avctx, AV_LOG_VERBOSE, |
159 | "InitialDelayInKB: %"PRIu16"; TargetKbps: %"PRIu16"; MaxKbps: %"PRIu16"\n", |
160 | info->InitialDelayInKB, info->TargetKbps, info->MaxKbps); |
161 | } else if (info->RateControlMethod == MFX_RATECONTROL_CQP) { |
162 | av_log(avctx, AV_LOG_VERBOSE, "QPI: %"PRIu16"; QPP: %"PRIu16"; QPB: %"PRIu16"\n", |
163 | info->QPI, info->QPP, info->QPB); |
164 | } else if (info->RateControlMethod == MFX_RATECONTROL_AVBR) { |
165 | av_log(avctx, AV_LOG_VERBOSE, |
166 | "TargetKbps: %"PRIu16"; Accuracy: %"PRIu16"; Convergence: %"PRIu16"\n", |
167 | info->TargetKbps, info->Accuracy, info->Convergence); |
168 | } |
169 | #if QSV_HAVE_LA |
170 | else if (info->RateControlMethod == MFX_RATECONTROL_LA |
171 | #if QSV_HAVE_LA_HRD |
172 | || info->RateControlMethod == MFX_RATECONTROL_LA_HRD |
173 | #endif |
174 | ) { |
175 | av_log(avctx, AV_LOG_VERBOSE, |
176 | "TargetKbps: %"PRIu16"; LookAheadDepth: %"PRIu16"\n", |
177 | info->TargetKbps, co2->LookAheadDepth); |
178 | } |
179 | #endif |
180 | #if QSV_HAVE_ICQ |
181 | else if (info->RateControlMethod == MFX_RATECONTROL_ICQ) { |
182 | av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"\n", info->ICQQuality); |
183 | } else if (info->RateControlMethod == MFX_RATECONTROL_LA_ICQ) { |
184 | av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"; LookAheadDepth: %"PRIu16"\n", |
185 | info->ICQQuality, co2->LookAheadDepth); |
186 | } |
187 | #endif |
188 | |
189 | av_log(avctx, AV_LOG_VERBOSE, "NumSlice: %"PRIu16"; NumRefFrame: %"PRIu16"\n", |
190 | info->NumSlice, info->NumRefFrame); |
191 | av_log(avctx, AV_LOG_VERBOSE, "RateDistortionOpt: %s\n", |
192 | print_threestate(co->RateDistortionOpt)); |
193 | |
194 | #if QSV_HAVE_CO2 |
195 | av_log(avctx, AV_LOG_VERBOSE, |
196 | "RecoveryPointSEI: %s IntRefType: %"PRIu16"; IntRefCycleSize: %"PRIu16"; IntRefQPDelta: %"PRId16"\n", |
197 | print_threestate(co->RecoveryPointSEI), co2->IntRefType, co2->IntRefCycleSize, co2->IntRefQPDelta); |
198 | |
199 | av_log(avctx, AV_LOG_VERBOSE, "MaxFrameSize: %"PRIu16"; ", co2->MaxFrameSize); |
200 | #if QSV_HAVE_MAX_SLICE_SIZE |
201 | av_log(avctx, AV_LOG_VERBOSE, "MaxSliceSize: %"PRIu16"; ", co2->MaxSliceSize); |
202 | #endif |
203 | av_log(avctx, AV_LOG_VERBOSE, "\n"); |
204 | |
205 | av_log(avctx, AV_LOG_VERBOSE, |
206 | "BitrateLimit: %s; MBBRC: %s; ExtBRC: %s\n", |
207 | print_threestate(co2->BitrateLimit), print_threestate(co2->MBBRC), |
208 | print_threestate(co2->ExtBRC)); |
209 | |
210 | #if QSV_HAVE_TRELLIS |
211 | av_log(avctx, AV_LOG_VERBOSE, "Trellis: "); |
212 | if (co2->Trellis & MFX_TRELLIS_OFF) { |
213 | av_log(avctx, AV_LOG_VERBOSE, "off"); |
214 | } else if (!co2->Trellis) { |
215 | av_log(avctx, AV_LOG_VERBOSE, "auto"); |
216 | } else { |
217 | if (co2->Trellis & MFX_TRELLIS_I) av_log(avctx, AV_LOG_VERBOSE, "I"); |
218 | if (co2->Trellis & MFX_TRELLIS_P) av_log(avctx, AV_LOG_VERBOSE, "P"); |
219 | if (co2->Trellis & MFX_TRELLIS_B) av_log(avctx, AV_LOG_VERBOSE, "B"); |
220 | } |
221 | av_log(avctx, AV_LOG_VERBOSE, "\n"); |
222 | #endif |
223 | |
224 | #if QSV_VERSION_ATLEAST(1, 8) |
225 | av_log(avctx, AV_LOG_VERBOSE, |
226 | "RepeatPPS: %s; NumMbPerSlice: %"PRIu16"; LookAheadDS: ", |
227 | print_threestate(co2->RepeatPPS), co2->NumMbPerSlice); |
228 | switch (co2->LookAheadDS) { |
229 | case MFX_LOOKAHEAD_DS_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break; |
230 | case MFX_LOOKAHEAD_DS_2x: av_log(avctx, AV_LOG_VERBOSE, "2x"); break; |
231 | case MFX_LOOKAHEAD_DS_4x: av_log(avctx, AV_LOG_VERBOSE, "4x"); break; |
232 | default: av_log(avctx, AV_LOG_VERBOSE, "unknown"); break; |
233 | } |
234 | av_log(avctx, AV_LOG_VERBOSE, "\n"); |
235 | |
236 | av_log(avctx, AV_LOG_VERBOSE, "AdaptiveI: %s; AdaptiveB: %s; BRefType: ", |
237 | print_threestate(co2->AdaptiveI), print_threestate(co2->AdaptiveB)); |
238 | switch (co2->BRefType) { |
239 | case MFX_B_REF_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break; |
240 | case MFX_B_REF_PYRAMID: av_log(avctx, AV_LOG_VERBOSE, "pyramid"); break; |
241 | default: av_log(avctx, AV_LOG_VERBOSE, "auto"); break; |
242 | } |
243 | av_log(avctx, AV_LOG_VERBOSE, "\n"); |
244 | #endif |
245 | |
246 | #if QSV_VERSION_ATLEAST(1, 9) |
247 | av_log(avctx, AV_LOG_VERBOSE, |
248 | "MinQPI: %"PRIu8"; MaxQPI: %"PRIu8"; MinQPP: %"PRIu8"; MaxQPP: %"PRIu8"; MinQPB: %"PRIu8"; MaxQPB: %"PRIu8"\n", |
249 | co2->MinQPI, co2->MaxQPI, co2->MinQPP, co2->MaxQPP, co2->MinQPB, co2->MaxQPB); |
250 | #endif |
251 | #endif |
252 | |
253 | if (avctx->codec_id == AV_CODEC_ID_H264) { |
254 | av_log(avctx, AV_LOG_VERBOSE, "Entropy coding: %s; MaxDecFrameBuffering: %"PRIu16"\n", |
255 | co->CAVLC == MFX_CODINGOPTION_ON ? "CAVLC" : "CABAC", co->MaxDecFrameBuffering); |
256 | av_log(avctx, AV_LOG_VERBOSE, |
257 | "NalHrdConformance: %s; SingleSeiNalUnit: %s; VuiVclHrdParameters: %s VuiNalHrdParameters: %s\n", |
258 | print_threestate(co->NalHrdConformance), print_threestate(co->SingleSeiNalUnit), |
259 | print_threestate(co->VuiVclHrdParameters), print_threestate(co->VuiNalHrdParameters)); |
260 | } |
261 | } |
262 | |
263 | static int select_rc_mode(AVCodecContext *avctx, QSVEncContext *q) |
264 | { |
265 | const char *rc_desc; |
266 | mfxU16 rc_mode; |
267 | |
268 | int want_la = q->look_ahead; |
269 | int want_qscale = !!(avctx->flags & AV_CODEC_FLAG_QSCALE); |
270 | int want_vcm = q->vcm; |
271 | |
272 | if (want_la && !QSV_HAVE_LA) { |
273 | av_log(avctx, AV_LOG_ERROR, |
274 | "Lookahead ratecontrol mode requested, but is not supported by this SDK version\n"); |
275 | return AVERROR(ENOSYS); |
276 | } |
277 | if (want_vcm && !QSV_HAVE_VCM) { |
278 | av_log(avctx, AV_LOG_ERROR, |
279 | "VCM ratecontrol mode requested, but is not supported by this SDK version\n"); |
280 | return AVERROR(ENOSYS); |
281 | } |
282 | |
283 | if (want_la + want_qscale + want_vcm > 1) { |
284 | av_log(avctx, AV_LOG_ERROR, |
285 | "More than one of: { constant qscale, lookahead, VCM } requested, " |
286 | "only one of them can be used at a time.\n"); |
287 | return AVERROR(EINVAL); |
288 | } |
289 | |
290 | if (want_qscale) { |
291 | rc_mode = MFX_RATECONTROL_CQP; |
292 | rc_desc = "constant quantization parameter (CQP)"; |
293 | } |
294 | #if QSV_HAVE_VCM |
295 | else if (want_vcm) { |
296 | rc_mode = MFX_RATECONTROL_VCM; |
297 | rc_desc = "video conferencing mode (VCM)"; |
298 | } |
299 | #endif |
300 | #if QSV_HAVE_LA |
301 | else if (want_la) { |
302 | rc_mode = MFX_RATECONTROL_LA; |
303 | rc_desc = "VBR with lookahead (LA)"; |
304 | |
305 | #if QSV_HAVE_ICQ |
306 | if (avctx->global_quality > 0) { |
307 | rc_mode = MFX_RATECONTROL_LA_ICQ; |
308 | rc_desc = "intelligent constant quality with lookahead (LA_ICQ)"; |
309 | } |
310 | #endif |
311 | } |
312 | #endif |
313 | #if QSV_HAVE_ICQ |
314 | else if (avctx->global_quality > 0) { |
315 | rc_mode = MFX_RATECONTROL_ICQ; |
316 | rc_desc = "intelligent constant quality (ICQ)"; |
317 | } |
318 | #endif |
319 | else if (avctx->rc_max_rate == avctx->bit_rate) { |
320 | rc_mode = MFX_RATECONTROL_CBR; |
321 | rc_desc = "constant bitrate (CBR)"; |
322 | } else if (!avctx->rc_max_rate) { |
323 | rc_mode = MFX_RATECONTROL_AVBR; |
324 | rc_desc = "average variable bitrate (AVBR)"; |
325 | } else { |
326 | rc_mode = MFX_RATECONTROL_VBR; |
327 | rc_desc = "variable bitrate (VBR)"; |
328 | } |
329 | |
330 | q->param.mfx.RateControlMethod = rc_mode; |
331 | av_log(avctx, AV_LOG_VERBOSE, "Using the %s ratecontrol method\n", rc_desc); |
332 | |
333 | return 0; |
334 | } |
335 | |
336 | static int rc_supported(QSVEncContext *q) |
337 | { |
338 | mfxVideoParam param_out = { .mfx.CodecId = q->param.mfx.CodecId }; |
339 | mfxStatus ret; |
340 | |
341 | ret = MFXVideoENCODE_Query(q->session, &q->param, ¶m_out); |
342 | if (ret < 0 || |
343 | param_out.mfx.RateControlMethod != q->param.mfx.RateControlMethod) |
344 | return 0; |
345 | return 1; |
346 | } |
347 | |
348 | static int init_video_param(AVCodecContext *avctx, QSVEncContext *q) |
349 | { |
350 | enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ? |
351 | avctx->sw_pix_fmt : avctx->pix_fmt; |
352 | const AVPixFmtDescriptor *desc; |
353 | float quant; |
354 | int ret; |
355 | |
356 | ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); |
357 | if (ret < 0) |
358 | return AVERROR_BUG; |
359 | q->param.mfx.CodecId = ret; |
360 | |
361 | q->width_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16; |
362 | |
363 | if (avctx->level > 0) |
364 | q->param.mfx.CodecLevel = avctx->level; |
365 | |
366 | q->param.mfx.CodecProfile = q->profile; |
367 | q->param.mfx.TargetUsage = q->preset; |
368 | q->param.mfx.GopPicSize = FFMAX(0, avctx->gop_size); |
369 | q->param.mfx.GopRefDist = FFMAX(-1, avctx->max_b_frames) + 1; |
370 | q->param.mfx.GopOptFlag = avctx->flags & AV_CODEC_FLAG_CLOSED_GOP ? |
371 | MFX_GOP_CLOSED : 0; |
372 | q->param.mfx.IdrInterval = q->idr_interval; |
373 | q->param.mfx.NumSlice = avctx->slices; |
374 | q->param.mfx.NumRefFrame = FFMAX(0, avctx->refs); |
375 | q->param.mfx.EncodedOrder = 0; |
376 | q->param.mfx.BufferSizeInKB = 0; |
377 | |
378 | desc = av_pix_fmt_desc_get(sw_format); |
379 | if (!desc) |
380 | return AVERROR_BUG; |
381 | |
382 | ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC); |
383 | |
384 | q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align); |
385 | q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 32); |
386 | q->param.mfx.FrameInfo.CropX = 0; |
387 | q->param.mfx.FrameInfo.CropY = 0; |
388 | q->param.mfx.FrameInfo.CropW = avctx->width; |
389 | q->param.mfx.FrameInfo.CropH = avctx->height; |
390 | q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num; |
391 | q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den; |
392 | q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; |
393 | q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; |
394 | q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth; |
395 | q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth; |
396 | q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8; |
397 | |
398 | if (avctx->hw_frames_ctx) { |
399 | AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; |
400 | AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx; |
401 | q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width; |
402 | q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height; |
403 | } |
404 | |
405 | if (avctx->framerate.den > 0 && avctx->framerate.num > 0) { |
406 | q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num; |
407 | q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den; |
408 | } else { |
409 | q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den; |
410 | q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num; |
411 | } |
412 | |
413 | ret = select_rc_mode(avctx, q); |
414 | if (ret < 0) |
415 | return ret; |
416 | |
417 | switch (q->param.mfx.RateControlMethod) { |
418 | case MFX_RATECONTROL_CBR: |
419 | case MFX_RATECONTROL_VBR: |
420 | #if QSV_HAVE_VCM |
421 | case MFX_RATECONTROL_VCM: |
422 | #endif |
423 | q->param.mfx.InitialDelayInKB = avctx->rc_initial_buffer_occupancy / 1000; |
424 | q->param.mfx.TargetKbps = avctx->bit_rate / 1000; |
425 | q->param.mfx.MaxKbps = avctx->rc_max_rate / 1000; |
426 | break; |
427 | case MFX_RATECONTROL_CQP: |
428 | quant = avctx->global_quality / FF_QP2LAMBDA; |
429 | |
430 | q->param.mfx.QPI = av_clip(quant * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51); |
431 | q->param.mfx.QPP = av_clip(quant, 0, 51); |
432 | q->param.mfx.QPB = av_clip(quant * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51); |
433 | |
434 | break; |
435 | case MFX_RATECONTROL_AVBR: |
436 | q->param.mfx.TargetKbps = avctx->bit_rate / 1000; |
437 | q->param.mfx.Convergence = q->avbr_convergence; |
438 | q->param.mfx.Accuracy = q->avbr_accuracy; |
439 | break; |
440 | #if QSV_HAVE_LA |
441 | case MFX_RATECONTROL_LA: |
442 | q->param.mfx.TargetKbps = avctx->bit_rate / 1000; |
443 | q->extco2.LookAheadDepth = q->look_ahead_depth; |
444 | break; |
445 | #if QSV_HAVE_ICQ |
446 | case MFX_RATECONTROL_LA_ICQ: |
447 | q->extco2.LookAheadDepth = q->look_ahead_depth; |
448 | case MFX_RATECONTROL_ICQ: |
449 | q->param.mfx.ICQQuality = avctx->global_quality; |
450 | break; |
451 | #endif |
452 | #endif |
453 | } |
454 | |
455 | // the HEVC encoder plugin currently fails if coding options |
456 | // are provided |
457 | if (avctx->codec_id != AV_CODEC_ID_HEVC) { |
458 | q->extco.Header.BufferId = MFX_EXTBUFF_CODING_OPTION; |
459 | q->extco.Header.BufferSz = sizeof(q->extco); |
460 | #if FF_API_CODER_TYPE |
461 | FF_DISABLE_DEPRECATION_WARNINGS |
462 | if (avctx->coder_type != 0) |
463 | q->cavlc = avctx->coder_type == FF_CODER_TYPE_VLC; |
464 | FF_ENABLE_DEPRECATION_WARNINGS |
465 | #endif |
466 | q->extco.CAVLC = q->cavlc ? MFX_CODINGOPTION_ON |
467 | : MFX_CODINGOPTION_UNKNOWN; |
468 | |
469 | q->extco.PicTimingSEI = q->pic_timing_sei ? |
470 | MFX_CODINGOPTION_ON : MFX_CODINGOPTION_UNKNOWN; |
471 | |
472 | if (q->rdo >= 0) |
473 | q->extco.RateDistortionOpt = q->rdo > 0 ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
474 | |
475 | if (avctx->codec_id == AV_CODEC_ID_H264) { |
476 | if (avctx->strict_std_compliance != FF_COMPLIANCE_NORMAL) |
477 | q->extco.NalHrdConformance = avctx->strict_std_compliance > FF_COMPLIANCE_NORMAL ? |
478 | MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
479 | |
480 | if (q->single_sei_nal_unit >= 0) |
481 | q->extco.SingleSeiNalUnit = q->single_sei_nal_unit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
482 | if (q->recovery_point_sei >= 0) |
483 | q->extco.RecoveryPointSEI = q->recovery_point_sei ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
484 | q->extco.MaxDecFrameBuffering = q->max_dec_frame_buffering; |
485 | } |
486 | |
487 | q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco; |
488 | |
489 | #if QSV_HAVE_CO2 |
490 | if (avctx->codec_id == AV_CODEC_ID_H264) { |
491 | q->extco2.Header.BufferId = MFX_EXTBUFF_CODING_OPTION2; |
492 | q->extco2.Header.BufferSz = sizeof(q->extco2); |
493 | |
494 | if (q->int_ref_type >= 0) |
495 | q->extco2.IntRefType = q->int_ref_type; |
496 | if (q->int_ref_cycle_size >= 0) |
497 | q->extco2.IntRefCycleSize = q->int_ref_cycle_size; |
498 | if (q->int_ref_qp_delta != INT16_MIN) |
499 | q->extco2.IntRefQPDelta = q->int_ref_qp_delta; |
500 | |
501 | if (q->bitrate_limit >= 0) |
502 | q->extco2.BitrateLimit = q->bitrate_limit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
503 | if (q->mbbrc >= 0) |
504 | q->extco2.MBBRC = q->mbbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
505 | if (q->extbrc >= 0) |
506 | q->extco2.ExtBRC = q->extbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
507 | |
508 | if (q->max_frame_size >= 0) |
509 | q->extco2.MaxFrameSize = q->max_frame_size; |
510 | #if QSV_HAVE_MAX_SLICE_SIZE |
511 | if (q->max_slice_size >= 0) |
512 | q->extco2.MaxSliceSize = q->max_slice_size; |
513 | #endif |
514 | |
515 | #if QSV_HAVE_TRELLIS |
516 | q->extco2.Trellis = q->trellis; |
517 | #endif |
518 | |
519 | #if QSV_HAVE_BREF_TYPE |
520 | #if FF_API_PRIVATE_OPT |
521 | FF_DISABLE_DEPRECATION_WARNINGS |
522 | if (avctx->b_frame_strategy >= 0) |
523 | q->b_strategy = avctx->b_frame_strategy; |
524 | FF_ENABLE_DEPRECATION_WARNINGS |
525 | #endif |
526 | if (q->b_strategy >= 0) |
527 | q->extco2.BRefType = q->b_strategy ? MFX_B_REF_PYRAMID : MFX_B_REF_OFF; |
528 | if (q->adaptive_i >= 0) |
529 | q->extco2.AdaptiveI = q->adaptive_i ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
530 | if (q->adaptive_b >= 0) |
531 | q->extco2.AdaptiveB = q->adaptive_b ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF; |
532 | #endif |
533 | |
534 | q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2; |
535 | |
536 | #if QSV_HAVE_LA_DS |
537 | q->extco2.LookAheadDS = q->look_ahead_downsampling; |
538 | #endif |
539 | } |
540 | #endif |
541 | } |
542 | |
543 | if (!rc_supported(q)) { |
544 | av_log(avctx, AV_LOG_ERROR, |
545 | "Selected ratecontrol mode is not supported by the QSV " |
546 | "runtime. Choose a different mode.\n"); |
547 | return AVERROR(ENOSYS); |
548 | } |
549 | |
550 | return 0; |
551 | } |
552 | |
553 | static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q) |
554 | { |
555 | AVCPBProperties *cpb_props; |
556 | |
557 | uint8_t sps_buf[128]; |
558 | uint8_t pps_buf[128]; |
559 | |
560 | mfxExtCodingOptionSPSPPS extradata = { |
561 | .Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS, |
562 | .Header.BufferSz = sizeof(extradata), |
563 | .SPSBuffer = sps_buf, .SPSBufSize = sizeof(sps_buf), |
564 | .PPSBuffer = pps_buf, .PPSBufSize = sizeof(pps_buf) |
565 | }; |
566 | |
567 | mfxExtCodingOption co = { |
568 | .Header.BufferId = MFX_EXTBUFF_CODING_OPTION, |
569 | .Header.BufferSz = sizeof(co), |
570 | }; |
571 | #if QSV_HAVE_CO2 |
572 | mfxExtCodingOption2 co2 = { |
573 | .Header.BufferId = MFX_EXTBUFF_CODING_OPTION2, |
574 | .Header.BufferSz = sizeof(co2), |
575 | }; |
576 | #endif |
577 | |
578 | mfxExtBuffer *ext_buffers[] = { |
579 | (mfxExtBuffer*)&extradata, |
580 | (mfxExtBuffer*)&co, |
581 | #if QSV_HAVE_CO2 |
582 | (mfxExtBuffer*)&co2, |
583 | #endif |
584 | }; |
585 | |
586 | int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO; |
587 | int ret; |
588 | |
589 | q->param.ExtParam = ext_buffers; |
590 | q->param.NumExtParam = FF_ARRAY_ELEMS(ext_buffers); |
591 | |
592 | ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param); |
593 | if (ret < 0) |
594 | return ff_qsv_print_error(avctx, ret, |
595 | "Error calling GetVideoParam"); |
596 | |
597 | q->packet_size = q->param.mfx.BufferSizeInKB * 1000; |
598 | |
599 | if (!extradata.SPSBufSize || (need_pps && !extradata.PPSBufSize)) { |
600 | av_log(avctx, AV_LOG_ERROR, "No extradata returned from libmfx.\n"); |
601 | return AVERROR_UNKNOWN; |
602 | } |
603 | |
604 | avctx->extradata = av_malloc(extradata.SPSBufSize + need_pps * extradata.PPSBufSize + |
605 | AV_INPUT_BUFFER_PADDING_SIZE); |
606 | if (!avctx->extradata) |
607 | return AVERROR(ENOMEM); |
608 | |
609 | memcpy(avctx->extradata, sps_buf, extradata.SPSBufSize); |
610 | if (need_pps) |
611 | memcpy(avctx->extradata + extradata.SPSBufSize, pps_buf, extradata.PPSBufSize); |
612 | avctx->extradata_size = extradata.SPSBufSize + need_pps * extradata.PPSBufSize; |
613 | memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); |
614 | |
615 | cpb_props = ff_add_cpb_side_data(avctx); |
616 | if (!cpb_props) |
617 | return AVERROR(ENOMEM); |
618 | cpb_props->max_bitrate = avctx->rc_max_rate; |
619 | cpb_props->min_bitrate = avctx->rc_min_rate; |
620 | cpb_props->avg_bitrate = avctx->bit_rate; |
621 | cpb_props->buffer_size = avctx->rc_buffer_size; |
622 | |
623 | dump_video_param(avctx, q, ext_buffers + 1); |
624 | |
625 | return 0; |
626 | } |
627 | |
628 | static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q) |
629 | { |
630 | AVQSVContext *qsv = avctx->hwaccel_context; |
631 | mfxFrameSurface1 *surfaces; |
632 | int nb_surfaces, i; |
633 | |
634 | nb_surfaces = qsv->nb_opaque_surfaces + q->req.NumFrameSuggested + q->async_depth; |
635 | |
636 | q->opaque_alloc_buf = av_buffer_allocz(sizeof(*surfaces) * nb_surfaces); |
637 | if (!q->opaque_alloc_buf) |
638 | return AVERROR(ENOMEM); |
639 | |
640 | q->opaque_surfaces = av_malloc_array(nb_surfaces, sizeof(*q->opaque_surfaces)); |
641 | if (!q->opaque_surfaces) |
642 | return AVERROR(ENOMEM); |
643 | |
644 | surfaces = (mfxFrameSurface1*)q->opaque_alloc_buf->data; |
645 | for (i = 0; i < nb_surfaces; i++) { |
646 | surfaces[i].Info = q->req.Info; |
647 | q->opaque_surfaces[i] = surfaces + i; |
648 | } |
649 | |
650 | q->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION; |
651 | q->opaque_alloc.Header.BufferSz = sizeof(q->opaque_alloc); |
652 | q->opaque_alloc.In.Surfaces = q->opaque_surfaces; |
653 | q->opaque_alloc.In.NumSurface = nb_surfaces; |
654 | q->opaque_alloc.In.Type = q->req.Type; |
655 | |
656 | q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->opaque_alloc; |
657 | |
658 | qsv->nb_opaque_surfaces = nb_surfaces; |
659 | qsv->opaque_surfaces = q->opaque_alloc_buf; |
660 | qsv->opaque_alloc_type = q->req.Type; |
661 | |
662 | return 0; |
663 | } |
664 | |
665 | static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q) |
666 | { |
667 | int ret; |
668 | |
669 | if (avctx->hwaccel_context) { |
670 | AVQSVContext *qsv = avctx->hwaccel_context; |
671 | q->session = qsv->session; |
672 | } else if (avctx->hw_frames_ctx) { |
673 | q->frames_ctx.hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx); |
674 | if (!q->frames_ctx.hw_frames_ctx) |
675 | return AVERROR(ENOMEM); |
676 | |
677 | ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session, |
678 | &q->frames_ctx, q->load_plugins, |
679 | q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY); |
680 | if (ret < 0) { |
681 | av_buffer_unref(&q->frames_ctx.hw_frames_ctx); |
682 | return ret; |
683 | } |
684 | |
685 | q->session = q->internal_session; |
686 | } else { |
687 | ret = ff_qsv_init_internal_session(avctx, &q->internal_session, |
688 | q->load_plugins); |
689 | if (ret < 0) |
690 | return ret; |
691 | |
692 | q->session = q->internal_session; |
693 | } |
694 | |
695 | return 0; |
696 | } |
697 | |
698 | int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q) |
699 | { |
700 | int iopattern = 0; |
701 | int opaque_alloc = 0; |
702 | int ret; |
703 | |
704 | q->param.AsyncDepth = q->async_depth; |
705 | |
706 | q->async_fifo = av_fifo_alloc((1 + q->async_depth) * |
707 | (sizeof(AVPacket) + sizeof(mfxSyncPoint*) + sizeof(mfxBitstream*))); |
708 | if (!q->async_fifo) |
709 | return AVERROR(ENOMEM); |
710 | |
711 | if (avctx->hwaccel_context) { |
712 | AVQSVContext *qsv = avctx->hwaccel_context; |
713 | |
714 | iopattern = qsv->iopattern; |
715 | opaque_alloc = qsv->opaque_alloc; |
716 | } |
717 | |
718 | if (avctx->hw_frames_ctx) { |
719 | AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; |
720 | AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx; |
721 | |
722 | if (!iopattern) { |
723 | if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME) |
724 | iopattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY; |
725 | else if (frames_hwctx->frame_type & |
726 | (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) |
727 | iopattern = MFX_IOPATTERN_IN_VIDEO_MEMORY; |
728 | } |
729 | } |
730 | |
731 | if (!iopattern) |
732 | iopattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY; |
733 | q->param.IOPattern = iopattern; |
734 | |
735 | ret = qsvenc_init_session(avctx, q); |
736 | if (ret < 0) |
737 | return ret; |
738 | |
739 | ret = init_video_param(avctx, q); |
740 | if (ret < 0) |
741 | return ret; |
742 | |
743 | ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req); |
744 | if (ret < 0) |
745 | return ff_qsv_print_error(avctx, ret, |
746 | "Error querying the encoding parameters"); |
747 | |
748 | if (opaque_alloc) { |
749 | ret = qsv_init_opaque_alloc(avctx, q); |
750 | if (ret < 0) |
751 | return ret; |
752 | } |
753 | |
754 | if (avctx->hwaccel_context) { |
755 | AVQSVContext *qsv = avctx->hwaccel_context; |
756 | int i, j; |
757 | |
758 | q->extparam = av_mallocz_array(qsv->nb_ext_buffers + q->nb_extparam_internal, |
759 | sizeof(*q->extparam)); |
760 | if (!q->extparam) |
761 | return AVERROR(ENOMEM); |
762 | |
763 | q->param.ExtParam = q->extparam; |
764 | for (i = 0; i < qsv->nb_ext_buffers; i++) |
765 | q->param.ExtParam[i] = qsv->ext_buffers[i]; |
766 | q->param.NumExtParam = qsv->nb_ext_buffers; |
767 | |
768 | for (i = 0; i < q->nb_extparam_internal; i++) { |
769 | for (j = 0; j < qsv->nb_ext_buffers; j++) { |
770 | if (qsv->ext_buffers[j]->BufferId == q->extparam_internal[i]->BufferId) |
771 | break; |
772 | } |
773 | if (j < qsv->nb_ext_buffers) |
774 | continue; |
775 | |
776 | q->param.ExtParam[q->param.NumExtParam++] = q->extparam_internal[i]; |
777 | } |
778 | } else { |
779 | q->param.ExtParam = q->extparam_internal; |
780 | q->param.NumExtParam = q->nb_extparam_internal; |
781 | } |
782 | |
783 | ret = MFXVideoENCODE_Init(q->session, &q->param); |
784 | if (ret < 0) |
785 | return ff_qsv_print_error(avctx, ret, |
786 | "Error initializing the encoder"); |
787 | else if (ret > 0) |
788 | ff_qsv_print_warning(avctx, ret, |
789 | "Warning in encoder initialization"); |
790 | |
791 | ret = qsv_retrieve_enc_params(avctx, q); |
792 | if (ret < 0) { |
793 | av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n"); |
794 | return ret; |
795 | } |
796 | |
797 | q->avctx = avctx; |
798 | |
799 | return 0; |
800 | } |
801 | |
802 | static void free_encoder_ctrl_payloads(mfxEncodeCtrl* enc_ctrl) |
803 | { |
804 | if (enc_ctrl) { |
805 | int i; |
806 | for (i = 0; i < enc_ctrl->NumPayload && i < QSV_MAX_ENC_PAYLOAD; i++) { |
807 | av_free(enc_ctrl->Payload[i]); |
808 | } |
809 | enc_ctrl->NumPayload = 0; |
810 | } |
811 | } |
812 | |
813 | static void clear_unused_frames(QSVEncContext *q) |
814 | { |
815 | QSVFrame *cur = q->work_frames; |
816 | while (cur) { |
817 | if (cur->used && !cur->surface.Data.Locked) { |
818 | free_encoder_ctrl_payloads(&cur->enc_ctrl); |
819 | av_frame_unref(cur->frame); |
820 | cur->used = 0; |
821 | } |
822 | cur = cur->next; |
823 | } |
824 | } |
825 | |
826 | static int get_free_frame(QSVEncContext *q, QSVFrame **f) |
827 | { |
828 | QSVFrame *frame, **last; |
829 | |
830 | clear_unused_frames(q); |
831 | |
832 | frame = q->work_frames; |
833 | last = &q->work_frames; |
834 | while (frame) { |
835 | if (!frame->used) { |
836 | *f = frame; |
837 | frame->used = 1; |
838 | return 0; |
839 | } |
840 | |
841 | last = &frame->next; |
842 | frame = frame->next; |
843 | } |
844 | |
845 | frame = av_mallocz(sizeof(*frame)); |
846 | if (!frame) |
847 | return AVERROR(ENOMEM); |
848 | frame->frame = av_frame_alloc(); |
849 | if (!frame->frame) { |
850 | av_freep(&frame); |
851 | return AVERROR(ENOMEM); |
852 | } |
853 | frame->enc_ctrl.Payload = av_mallocz(sizeof(mfxPayload*) * QSV_MAX_ENC_PAYLOAD); |
854 | if (!frame->enc_ctrl.Payload) { |
855 | av_freep(&frame); |
856 | return AVERROR(ENOMEM); |
857 | } |
858 | *last = frame; |
859 | |
860 | *f = frame; |
861 | frame->used = 1; |
862 | |
863 | return 0; |
864 | } |
865 | |
866 | static int submit_frame(QSVEncContext *q, const AVFrame *frame, |
867 | QSVFrame **new_frame) |
868 | { |
869 | QSVFrame *qf; |
870 | int ret; |
871 | |
872 | ret = get_free_frame(q, &qf); |
873 | if (ret < 0) |
874 | return ret; |
875 | |
876 | if (frame->format == AV_PIX_FMT_QSV) { |
877 | ret = av_frame_ref(qf->frame, frame); |
878 | if (ret < 0) |
879 | return ret; |
880 | |
881 | qf->surface = *(mfxFrameSurface1*)qf->frame->data[3]; |
882 | |
883 | if (q->frames_ctx.mids) { |
884 | ret = ff_qsv_find_surface_idx(&q->frames_ctx, qf); |
885 | if (ret < 0) |
886 | return ret; |
887 | |
888 | qf->surface.Data.MemId = &q->frames_ctx.mids[ret]; |
889 | } |
890 | } else { |
891 | /* make a copy if the input is not padded as libmfx requires */ |
892 | if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) { |
893 | qf->frame->height = FFALIGN(frame->height, 32); |
894 | qf->frame->width = FFALIGN(frame->width, q->width_align); |
895 | |
896 | ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF); |
897 | if (ret < 0) |
898 | return ret; |
899 | |
900 | qf->frame->height = frame->height; |
901 | qf->frame->width = frame->width; |
902 | ret = av_frame_copy(qf->frame, frame); |
903 | if (ret < 0) { |
904 | av_frame_unref(qf->frame); |
905 | return ret; |
906 | } |
907 | } else { |
908 | ret = av_frame_ref(qf->frame, frame); |
909 | if (ret < 0) |
910 | return ret; |
911 | } |
912 | |
913 | qf->surface.Info = q->param.mfx.FrameInfo; |
914 | |
915 | qf->surface.Info.PicStruct = |
916 | !frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE : |
917 | frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF : |
918 | MFX_PICSTRUCT_FIELD_BFF; |
919 | if (frame->repeat_pict == 1) |
920 | qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED; |
921 | else if (frame->repeat_pict == 2) |
922 | qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING; |
923 | else if (frame->repeat_pict == 4) |
924 | qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING; |
925 | |
926 | qf->surface.Data.PitchLow = qf->frame->linesize[0]; |
927 | qf->surface.Data.Y = qf->frame->data[0]; |
928 | qf->surface.Data.UV = qf->frame->data[1]; |
929 | } |
930 | |
931 | qf->surface.Data.TimeStamp = av_rescale_q(frame->pts, q->avctx->time_base, (AVRational){1, 90000}); |
932 | |
933 | *new_frame = qf; |
934 | |
935 | return 0; |
936 | } |
937 | |
938 | static void print_interlace_msg(AVCodecContext *avctx, QSVEncContext *q) |
939 | { |
940 | if (q->param.mfx.CodecId == MFX_CODEC_AVC) { |
941 | if (q->param.mfx.CodecProfile == MFX_PROFILE_AVC_BASELINE || |
942 | q->param.mfx.CodecLevel < MFX_LEVEL_AVC_21 || |
943 | q->param.mfx.CodecLevel > MFX_LEVEL_AVC_41) |
944 | av_log(avctx, AV_LOG_WARNING, |
945 | "Interlaced coding is supported" |
946 | " at Main/High Profile Level 2.1-4.1\n"); |
947 | } |
948 | } |
949 | |
950 | static int encode_frame(AVCodecContext *avctx, QSVEncContext *q, |
951 | const AVFrame *frame) |
952 | { |
953 | AVPacket new_pkt = { 0 }; |
954 | mfxBitstream *bs; |
955 | |
956 | mfxFrameSurface1 *surf = NULL; |
957 | mfxSyncPoint *sync = NULL; |
958 | QSVFrame *qsv_frame = NULL; |
959 | mfxEncodeCtrl* enc_ctrl = NULL; |
960 | int ret; |
961 | |
962 | if (frame) { |
963 | ret = submit_frame(q, frame, &qsv_frame); |
964 | if (ret < 0) { |
965 | av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n"); |
966 | return ret; |
967 | } |
968 | } |
969 | if (qsv_frame) { |
970 | surf = &qsv_frame->surface; |
971 | enc_ctrl = &qsv_frame->enc_ctrl; |
972 | } |
973 | |
974 | ret = av_new_packet(&new_pkt, q->packet_size); |
975 | if (ret < 0) { |
976 | av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n"); |
977 | return ret; |
978 | } |
979 | |
980 | bs = av_mallocz(sizeof(*bs)); |
981 | if (!bs) { |
982 | av_packet_unref(&new_pkt); |
983 | return AVERROR(ENOMEM); |
984 | } |
985 | bs->Data = new_pkt.data; |
986 | bs->MaxLength = new_pkt.size; |
987 | |
988 | if (q->set_encode_ctrl_cb) { |
989 | q->set_encode_ctrl_cb(avctx, frame, &qsv_frame->enc_ctrl); |
990 | } |
991 | |
992 | sync = av_mallocz(sizeof(*sync)); |
993 | if (!sync) { |
994 | av_freep(&bs); |
995 | av_packet_unref(&new_pkt); |
996 | return AVERROR(ENOMEM); |
997 | } |
998 | |
999 | do { |
1000 | ret = MFXVideoENCODE_EncodeFrameAsync(q->session, enc_ctrl, surf, bs, sync); |
1001 | if (ret == MFX_WRN_DEVICE_BUSY) |
1002 | av_usleep(500); |
1003 | } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_WRN_IN_EXECUTION); |
1004 | |
1005 | if (ret > 0) |
1006 | ff_qsv_print_warning(avctx, ret, "Warning during encoding"); |
1007 | |
1008 | if (ret < 0) { |
1009 | av_packet_unref(&new_pkt); |
1010 | av_freep(&bs); |
1011 | av_freep(&sync); |
1012 | return (ret == MFX_ERR_MORE_DATA) ? |
1013 | 0 : ff_qsv_print_error(avctx, ret, "Error during encoding"); |
1014 | } |
1015 | |
1016 | if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame) |
1017 | print_interlace_msg(avctx, q); |
1018 | |
1019 | if (*sync) { |
1020 | av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL); |
1021 | av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL); |
1022 | av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL); |
1023 | } else { |
1024 | av_freep(&sync); |
1025 | av_packet_unref(&new_pkt); |
1026 | av_freep(&bs); |
1027 | } |
1028 | |
1029 | return 0; |
1030 | } |
1031 | |
1032 | int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q, |
1033 | AVPacket *pkt, const AVFrame *frame, int *got_packet) |
1034 | { |
1035 | int ret; |
1036 | |
1037 | ret = encode_frame(avctx, q, frame); |
1038 | if (ret < 0) |
1039 | return ret; |
1040 | |
1041 | if (!av_fifo_space(q->async_fifo) || |
1042 | (!frame && av_fifo_size(q->async_fifo))) { |
1043 | AVPacket new_pkt; |
1044 | mfxBitstream *bs; |
1045 | mfxSyncPoint *sync; |
1046 | |
1047 | av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL); |
1048 | av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); |
1049 | av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL); |
1050 | |
1051 | do { |
1052 | ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000); |
1053 | } while (ret == MFX_WRN_IN_EXECUTION); |
1054 | |
1055 | new_pkt.dts = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base); |
1056 | new_pkt.pts = av_rescale_q(bs->TimeStamp, (AVRational){1, 90000}, avctx->time_base); |
1057 | new_pkt.size = bs->DataLength; |
1058 | |
1059 | if (bs->FrameType & MFX_FRAMETYPE_IDR || |
1060 | bs->FrameType & MFX_FRAMETYPE_xIDR) |
1061 | new_pkt.flags |= AV_PKT_FLAG_KEY; |
1062 | |
1063 | #if FF_API_CODED_FRAME |
1064 | FF_DISABLE_DEPRECATION_WARNINGS |
1065 | if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI) |
1066 | avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; |
1067 | else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP) |
1068 | avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P; |
1069 | else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB) |
1070 | avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B; |
1071 | FF_ENABLE_DEPRECATION_WARNINGS |
1072 | #endif |
1073 | |
1074 | av_freep(&bs); |
1075 | av_freep(&sync); |
1076 | |
1077 | if (pkt->data) { |
1078 | if (pkt->size < new_pkt.size) { |
1079 | av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n", |
1080 | pkt->size, new_pkt.size); |
1081 | av_packet_unref(&new_pkt); |
1082 | return AVERROR(EINVAL); |
1083 | } |
1084 | |
1085 | memcpy(pkt->data, new_pkt.data, new_pkt.size); |
1086 | pkt->size = new_pkt.size; |
1087 | |
1088 | ret = av_packet_copy_props(pkt, &new_pkt); |
1089 | av_packet_unref(&new_pkt); |
1090 | if (ret < 0) |
1091 | return ret; |
1092 | } else |
1093 | *pkt = new_pkt; |
1094 | |
1095 | *got_packet = 1; |
1096 | } |
1097 | |
1098 | return 0; |
1099 | } |
1100 | |
1101 | int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q) |
1102 | { |
1103 | QSVFrame *cur; |
1104 | |
1105 | if (q->session) |
1106 | MFXVideoENCODE_Close(q->session); |
1107 | if (q->internal_session) |
1108 | MFXClose(q->internal_session); |
1109 | q->session = NULL; |
1110 | q->internal_session = NULL; |
1111 | |
1112 | av_buffer_unref(&q->frames_ctx.hw_frames_ctx); |
1113 | av_buffer_unref(&q->frames_ctx.mids_buf); |
1114 | |
1115 | cur = q->work_frames; |
1116 | while (cur) { |
1117 | q->work_frames = cur->next; |
1118 | av_frame_free(&cur->frame); |
1119 | av_free(cur->enc_ctrl.Payload); |
1120 | av_freep(&cur); |
1121 | cur = q->work_frames; |
1122 | } |
1123 | |
1124 | while (q->async_fifo && av_fifo_size(q->async_fifo)) { |
1125 | AVPacket pkt; |
1126 | mfxSyncPoint *sync; |
1127 | mfxBitstream *bs; |
1128 | |
1129 | av_fifo_generic_read(q->async_fifo, &pkt, sizeof(pkt), NULL); |
1130 | av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); |
1131 | av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL); |
1132 | |
1133 | av_freep(&sync); |
1134 | av_freep(&bs); |
1135 | av_packet_unref(&pkt); |
1136 | } |
1137 | av_fifo_free(q->async_fifo); |
1138 | q->async_fifo = NULL; |
1139 | |
1140 | av_freep(&q->opaque_surfaces); |
1141 | av_buffer_unref(&q->opaque_alloc_buf); |
1142 | |
1143 | av_freep(&q->extparam); |
1144 | |
1145 | return 0; |
1146 | } |
1147 |