blob: 9b8f83964740e634cd54f0f242388cc1a7332910
1 | /* |
2 | * This file is part of FFmpeg. |
3 | * |
4 | * FFmpeg is free software; you can redistribute it and/or |
5 | * modify it under the terms of the GNU Lesser General Public |
6 | * License as published by the Free Software Foundation; either |
7 | * version 2.1 of the License, or (at your option) any later version. |
8 | * |
9 | * FFmpeg is distributed in the hope that it will be useful, |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 | * Lesser General Public License for more details. |
13 | * |
14 | * You should have received a copy of the GNU Lesser General Public |
15 | * License along with FFmpeg; if not, write to the Free Software |
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
17 | */ |
18 | |
19 | #include "config.h" |
20 | |
21 | #include <stdint.h> |
22 | #include <string.h> |
23 | |
24 | #include <vdpau/vdpau.h> |
25 | |
26 | #include "buffer.h" |
27 | #include "common.h" |
28 | #include "hwcontext.h" |
29 | #include "hwcontext_internal.h" |
30 | #include "hwcontext_vdpau.h" |
31 | #include "mem.h" |
32 | #include "pixfmt.h" |
33 | #include "pixdesc.h" |
34 | |
35 | typedef struct VDPAUDeviceContext { |
36 | VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps; |
37 | VdpVideoSurfaceGetBitsYCbCr *get_data; |
38 | VdpVideoSurfacePutBitsYCbCr *put_data; |
39 | VdpVideoSurfaceCreate *surf_create; |
40 | VdpVideoSurfaceDestroy *surf_destroy; |
41 | |
42 | enum AVPixelFormat *pix_fmts[3]; |
43 | int nb_pix_fmts[3]; |
44 | } VDPAUDeviceContext; |
45 | |
46 | typedef struct VDPAUFramesContext { |
47 | VdpVideoSurfaceGetBitsYCbCr *get_data; |
48 | VdpVideoSurfacePutBitsYCbCr *put_data; |
49 | VdpChromaType chroma_type; |
50 | int chroma_idx; |
51 | |
52 | const enum AVPixelFormat *pix_fmts; |
53 | int nb_pix_fmts; |
54 | } VDPAUFramesContext; |
55 | |
56 | typedef struct VDPAUPixFmtMap { |
57 | VdpYCbCrFormat vdpau_fmt; |
58 | enum AVPixelFormat pix_fmt; |
59 | } VDPAUPixFmtMap; |
60 | |
61 | static const VDPAUPixFmtMap pix_fmts_420[] = { |
62 | { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 }, |
63 | { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P }, |
64 | { 0, AV_PIX_FMT_NONE, }, |
65 | }; |
66 | |
67 | static const VDPAUPixFmtMap pix_fmts_422[] = { |
68 | { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 }, |
69 | { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P }, |
70 | { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 }, |
71 | { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 }, |
72 | { 0, AV_PIX_FMT_NONE, }, |
73 | }; |
74 | |
75 | static const VDPAUPixFmtMap pix_fmts_444[] = { |
76 | { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P }, |
77 | { 0, AV_PIX_FMT_NONE, }, |
78 | }; |
79 | |
80 | static const struct { |
81 | VdpChromaType chroma_type; |
82 | const VDPAUPixFmtMap *map; |
83 | } vdpau_pix_fmts[] = { |
84 | { VDP_CHROMA_TYPE_420, pix_fmts_420 }, |
85 | { VDP_CHROMA_TYPE_422, pix_fmts_422 }, |
86 | { VDP_CHROMA_TYPE_444, pix_fmts_444 }, |
87 | }; |
88 | |
89 | static int count_pixfmts(const VDPAUPixFmtMap *map) |
90 | { |
91 | int count = 0; |
92 | while (map->pix_fmt != AV_PIX_FMT_NONE) { |
93 | map++; |
94 | count++; |
95 | } |
96 | return count; |
97 | } |
98 | |
99 | static int vdpau_init_pixmfts(AVHWDeviceContext *ctx) |
100 | { |
101 | AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
102 | VDPAUDeviceContext *priv = ctx->internal->priv; |
103 | int i; |
104 | |
105 | for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) { |
106 | const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map; |
107 | int nb_pix_fmts; |
108 | |
109 | nb_pix_fmts = count_pixfmts(map); |
110 | priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i])); |
111 | if (!priv->pix_fmts[i]) |
112 | return AVERROR(ENOMEM); |
113 | |
114 | nb_pix_fmts = 0; |
115 | while (map->pix_fmt != AV_PIX_FMT_NONE) { |
116 | VdpBool supported; |
117 | VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type, |
118 | map->vdpau_fmt, &supported); |
119 | if (err == VDP_STATUS_OK && supported) |
120 | priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt; |
121 | map++; |
122 | } |
123 | priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE; |
124 | priv->nb_pix_fmts[i] = nb_pix_fmts; |
125 | } |
126 | |
127 | return 0; |
128 | } |
129 | |
130 | #define GET_CALLBACK(id, result) \ |
131 | do { \ |
132 | void *tmp; \ |
133 | err = hwctx->get_proc_address(hwctx->device, id, &tmp); \ |
134 | if (err != VDP_STATUS_OK) { \ |
135 | av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \ |
136 | return AVERROR_UNKNOWN; \ |
137 | } \ |
138 | result = tmp; \ |
139 | } while (0) |
140 | |
141 | static int vdpau_device_init(AVHWDeviceContext *ctx) |
142 | { |
143 | AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
144 | VDPAUDeviceContext *priv = ctx->internal->priv; |
145 | VdpStatus err; |
146 | int ret; |
147 | |
148 | GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES, |
149 | priv->get_transfer_caps); |
150 | GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data); |
151 | GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data); |
152 | GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create); |
153 | GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy); |
154 | |
155 | ret = vdpau_init_pixmfts(ctx); |
156 | if (ret < 0) { |
157 | av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n"); |
158 | return ret; |
159 | } |
160 | |
161 | return 0; |
162 | } |
163 | |
164 | static void vdpau_device_uninit(AVHWDeviceContext *ctx) |
165 | { |
166 | VDPAUDeviceContext *priv = ctx->internal->priv; |
167 | int i; |
168 | |
169 | for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) |
170 | av_freep(&priv->pix_fmts[i]); |
171 | } |
172 | |
173 | static void vdpau_buffer_free(void *opaque, uint8_t *data) |
174 | { |
175 | AVHWFramesContext *ctx = opaque; |
176 | VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv; |
177 | VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data; |
178 | |
179 | device_priv->surf_destroy(surf); |
180 | } |
181 | |
182 | static AVBufferRef *vdpau_pool_alloc(void *opaque, int size) |
183 | { |
184 | AVHWFramesContext *ctx = opaque; |
185 | VDPAUFramesContext *priv = ctx->internal->priv; |
186 | AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx; |
187 | VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv; |
188 | |
189 | AVBufferRef *ret; |
190 | VdpVideoSurface surf; |
191 | VdpStatus err; |
192 | |
193 | err = device_priv->surf_create(device_hwctx->device, priv->chroma_type, |
194 | ctx->width, ctx->height, &surf); |
195 | if (err != VDP_STATUS_OK) { |
196 | av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n"); |
197 | return NULL; |
198 | } |
199 | |
200 | ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf), |
201 | vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY); |
202 | if (!ret) { |
203 | device_priv->surf_destroy(surf); |
204 | return NULL; |
205 | } |
206 | |
207 | return ret; |
208 | } |
209 | |
210 | static int vdpau_frames_init(AVHWFramesContext *ctx) |
211 | { |
212 | VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv; |
213 | VDPAUFramesContext *priv = ctx->internal->priv; |
214 | |
215 | int i; |
216 | |
217 | switch (ctx->sw_format) { |
218 | case AV_PIX_FMT_YUV420P: priv->chroma_type = VDP_CHROMA_TYPE_420; break; |
219 | case AV_PIX_FMT_YUV422P: priv->chroma_type = VDP_CHROMA_TYPE_422; break; |
220 | case AV_PIX_FMT_YUV444P: priv->chroma_type = VDP_CHROMA_TYPE_444; break; |
221 | default: |
222 | av_log(ctx, AV_LOG_ERROR, "Unsupported data layout: %s\n", |
223 | av_get_pix_fmt_name(ctx->sw_format)); |
224 | return AVERROR(ENOSYS); |
225 | } |
226 | |
227 | for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) { |
228 | if (vdpau_pix_fmts[i].chroma_type == priv->chroma_type) { |
229 | priv->chroma_idx = i; |
230 | priv->pix_fmts = device_priv->pix_fmts[i]; |
231 | priv->nb_pix_fmts = device_priv->nb_pix_fmts[i]; |
232 | break; |
233 | } |
234 | } |
235 | if (!priv->pix_fmts) { |
236 | av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n", priv->chroma_type); |
237 | return AVERROR(ENOSYS); |
238 | } |
239 | |
240 | if (!ctx->pool) { |
241 | ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx, |
242 | vdpau_pool_alloc, NULL); |
243 | if (!ctx->internal->pool_internal) |
244 | return AVERROR(ENOMEM); |
245 | } |
246 | |
247 | priv->get_data = device_priv->get_data; |
248 | priv->put_data = device_priv->put_data; |
249 | |
250 | return 0; |
251 | } |
252 | |
253 | static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame) |
254 | { |
255 | frame->buf[0] = av_buffer_pool_get(ctx->pool); |
256 | if (!frame->buf[0]) |
257 | return AVERROR(ENOMEM); |
258 | |
259 | frame->data[3] = frame->buf[0]->data; |
260 | frame->format = AV_PIX_FMT_VDPAU; |
261 | frame->width = ctx->width; |
262 | frame->height = ctx->height; |
263 | |
264 | return 0; |
265 | } |
266 | |
267 | static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, |
268 | enum AVHWFrameTransferDirection dir, |
269 | enum AVPixelFormat **formats) |
270 | { |
271 | VDPAUFramesContext *priv = ctx->internal->priv; |
272 | |
273 | enum AVPixelFormat *fmts; |
274 | |
275 | if (priv->nb_pix_fmts == 1) { |
276 | av_log(ctx, AV_LOG_ERROR, |
277 | "No target formats are supported for this chroma type\n"); |
278 | return AVERROR(ENOSYS); |
279 | } |
280 | |
281 | fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts)); |
282 | if (!fmts) |
283 | return AVERROR(ENOMEM); |
284 | |
285 | memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts)); |
286 | *formats = fmts; |
287 | |
288 | return 0; |
289 | } |
290 | |
291 | static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, |
292 | const AVFrame *src) |
293 | { |
294 | VDPAUFramesContext *priv = ctx->internal->priv; |
295 | VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3]; |
296 | |
297 | void *data[3]; |
298 | uint32_t linesize[3]; |
299 | |
300 | const VDPAUPixFmtMap *map; |
301 | VdpYCbCrFormat vdpau_format; |
302 | VdpStatus err; |
303 | int i; |
304 | |
305 | for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) { |
306 | data[i] = dst->data[i]; |
307 | if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) { |
308 | av_log(ctx, AV_LOG_ERROR, |
309 | "The linesize %d cannot be represented as uint32\n", |
310 | dst->linesize[i]); |
311 | return AVERROR(ERANGE); |
312 | } |
313 | linesize[i] = dst->linesize[i]; |
314 | } |
315 | |
316 | map = vdpau_pix_fmts[priv->chroma_idx].map; |
317 | for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { |
318 | if (map[i].pix_fmt == dst->format) { |
319 | vdpau_format = map[i].vdpau_fmt; |
320 | break; |
321 | } |
322 | } |
323 | if (map[i].pix_fmt == AV_PIX_FMT_NONE) { |
324 | av_log(ctx, AV_LOG_ERROR, |
325 | "Unsupported target pixel format: %s\n", |
326 | av_get_pix_fmt_name(dst->format)); |
327 | return AVERROR(EINVAL); |
328 | } |
329 | |
330 | if (vdpau_format == VDP_YCBCR_FORMAT_YV12) |
331 | FFSWAP(void*, data[1], data[2]); |
332 | |
333 | err = priv->get_data(surf, vdpau_format, data, linesize); |
334 | if (err != VDP_STATUS_OK) { |
335 | av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n"); |
336 | return AVERROR_UNKNOWN; |
337 | } |
338 | |
339 | return 0; |
340 | } |
341 | |
342 | static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, |
343 | const AVFrame *src) |
344 | { |
345 | VDPAUFramesContext *priv = ctx->internal->priv; |
346 | VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3]; |
347 | |
348 | const void *data[3]; |
349 | uint32_t linesize[3]; |
350 | |
351 | const VDPAUPixFmtMap *map; |
352 | VdpYCbCrFormat vdpau_format; |
353 | VdpStatus err; |
354 | int i; |
355 | |
356 | for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) { |
357 | data[i] = src->data[i]; |
358 | if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) { |
359 | av_log(ctx, AV_LOG_ERROR, |
360 | "The linesize %d cannot be represented as uint32\n", |
361 | src->linesize[i]); |
362 | return AVERROR(ERANGE); |
363 | } |
364 | linesize[i] = src->linesize[i]; |
365 | } |
366 | |
367 | map = vdpau_pix_fmts[priv->chroma_idx].map; |
368 | for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { |
369 | if (map[i].pix_fmt == src->format) { |
370 | vdpau_format = map[i].vdpau_fmt; |
371 | break; |
372 | } |
373 | } |
374 | if (map[i].pix_fmt == AV_PIX_FMT_NONE) { |
375 | av_log(ctx, AV_LOG_ERROR, |
376 | "Unsupported source pixel format: %s\n", |
377 | av_get_pix_fmt_name(src->format)); |
378 | return AVERROR(EINVAL); |
379 | } |
380 | |
381 | if (vdpau_format == VDP_YCBCR_FORMAT_YV12) |
382 | FFSWAP(const void*, data[1], data[2]); |
383 | |
384 | err = priv->put_data(surf, vdpau_format, data, linesize); |
385 | if (err != VDP_STATUS_OK) { |
386 | av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n"); |
387 | return AVERROR_UNKNOWN; |
388 | } |
389 | |
390 | return 0; |
391 | } |
392 | |
393 | #if HAVE_VDPAU_X11 |
394 | #include <vdpau/vdpau_x11.h> |
395 | #include <X11/Xlib.h> |
396 | |
397 | typedef struct VDPAUDevicePriv { |
398 | VdpDeviceDestroy *device_destroy; |
399 | Display *dpy; |
400 | } VDPAUDevicePriv; |
401 | |
402 | static void vdpau_device_free(AVHWDeviceContext *ctx) |
403 | { |
404 | AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
405 | VDPAUDevicePriv *priv = ctx->user_opaque; |
406 | |
407 | if (priv->device_destroy) |
408 | priv->device_destroy(hwctx->device); |
409 | if (priv->dpy) |
410 | XCloseDisplay(priv->dpy); |
411 | av_freep(&priv); |
412 | } |
413 | |
414 | static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device, |
415 | AVDictionary *opts, int flags) |
416 | { |
417 | AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
418 | |
419 | VDPAUDevicePriv *priv; |
420 | VdpStatus err; |
421 | VdpGetInformationString *get_information_string; |
422 | const char *display, *vendor; |
423 | |
424 | priv = av_mallocz(sizeof(*priv)); |
425 | if (!priv) |
426 | return AVERROR(ENOMEM); |
427 | |
428 | ctx->user_opaque = priv; |
429 | ctx->free = vdpau_device_free; |
430 | |
431 | priv->dpy = XOpenDisplay(device); |
432 | if (!priv->dpy) { |
433 | av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n", |
434 | XDisplayName(device)); |
435 | return AVERROR_UNKNOWN; |
436 | } |
437 | display = XDisplayString(priv->dpy); |
438 | |
439 | err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy), |
440 | &hwctx->device, &hwctx->get_proc_address); |
441 | if (err != VDP_STATUS_OK) { |
442 | av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n", |
443 | display); |
444 | return AVERROR_UNKNOWN; |
445 | } |
446 | |
447 | GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string); |
448 | GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy); |
449 | |
450 | get_information_string(&vendor); |
451 | av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on " |
452 | "X11 display %s\n", vendor, display); |
453 | |
454 | return 0; |
455 | } |
456 | #endif |
457 | |
458 | const HWContextType ff_hwcontext_type_vdpau = { |
459 | .type = AV_HWDEVICE_TYPE_VDPAU, |
460 | .name = "VDPAU", |
461 | |
462 | .device_hwctx_size = sizeof(AVVDPAUDeviceContext), |
463 | .device_priv_size = sizeof(VDPAUDeviceContext), |
464 | .frames_priv_size = sizeof(VDPAUFramesContext), |
465 | |
466 | #if HAVE_VDPAU_X11 |
467 | .device_create = vdpau_device_create, |
468 | #endif |
469 | .device_init = vdpau_device_init, |
470 | .device_uninit = vdpau_device_uninit, |
471 | .frames_init = vdpau_frames_init, |
472 | .frames_get_buffer = vdpau_get_buffer, |
473 | .transfer_get_formats = vdpau_transfer_get_formats, |
474 | .transfer_data_to = vdpau_transfer_data_to, |
475 | .transfer_data_from = vdpau_transfer_data_from, |
476 | |
477 | .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE }, |
478 | }; |
479 |