blob: 2e0b5a7ff615ba22422e0870b2a126682698738e
1 | /* |
2 | * drivers/amlogic/amports/vvp9.c |
3 | * |
4 | * Copyright (C) 2015 Amlogic, Inc. All rights reserved. |
5 | * |
6 | * This program is free software; you can redistribute it and/or modify |
7 | * it under the terms of the GNU General Public License as published by |
8 | * the Free Software Foundation; either version 2 of the License, or |
9 | * (at your option) any later version. |
10 | * |
11 | * This program is distributed in the hope that it will be useful, but WITHOUT |
12 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
14 | * more details. |
15 | * |
16 | */ |
17 | #define DEBUG |
18 | #include <linux/kernel.h> |
19 | #include <linux/module.h> |
20 | #include <linux/types.h> |
21 | #include <linux/errno.h> |
22 | #include <linux/interrupt.h> |
23 | #include <linux/semaphore.h> |
24 | #include <linux/delay.h> |
25 | #include <linux/timer.h> |
26 | #include <linux/kfifo.h> |
27 | #include <linux/kthread.h> |
28 | #include <linux/spinlock.h> |
29 | #include <linux/platform_device.h> |
30 | #include <linux/amlogic/media/vfm/vframe.h> |
31 | #include <linux/amlogic/media/utils/amstream.h> |
32 | #include <linux/amlogic/media/utils/vformat.h> |
33 | #include <linux/amlogic/media/frame_sync/ptsserv.h> |
34 | #include <linux/amlogic/media/canvas/canvas.h> |
35 | #include <linux/amlogic/media/vfm/vframe_provider.h> |
36 | #include <linux/amlogic/media/vfm/vframe_receiver.h> |
37 | #include <linux/dma-mapping.h> |
38 | #include <linux/dma-contiguous.h> |
39 | #include <linux/slab.h> |
40 | #include <linux/amlogic/tee.h> |
41 | #include "../../../stream_input/amports/amports_priv.h" |
42 | #include <linux/amlogic/media/codec_mm/codec_mm.h> |
43 | #include "../utils/decoder_mmu_box.h" |
44 | #include "../utils/decoder_bmmu_box.h" |
45 | |
46 | #define MEM_NAME "codec_vp9" |
47 | /* #include <mach/am_regs.h> */ |
48 | #include <linux/amlogic/media/utils/vdec_reg.h> |
49 | #include "../utils/vdec.h" |
50 | #include "../utils/amvdec.h" |
51 | #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC |
52 | #include "../utils/vdec_profile.h" |
53 | #endif |
54 | |
55 | #include <linux/amlogic/media/video_sink/video.h> |
56 | #include <linux/amlogic/media/codec_mm/configs.h> |
57 | #include "../utils/config_parser.h" |
58 | #include "../utils/firmware.h" |
59 | #include "../../../common/chips/decoder_cpu_ver_info.h" |
60 | #include "../utils/vdec_v4l2_buffer_ops.h" |
61 | #include <media/v4l2-mem2mem.h> |
62 | |
63 | #define MIX_STREAM_SUPPORT |
64 | |
65 | #include "vvp9.h" |
66 | |
67 | |
68 | /*#define SUPPORT_FB_DECODING*/ |
69 | /*#define FB_DECODING_TEST_SCHEDULE*/ |
70 | |
71 | |
72 | #define HW_MASK_FRONT 0x1 |
73 | #define HW_MASK_BACK 0x2 |
74 | |
75 | #define VP9D_MPP_REFINFO_TBL_ACCCONFIG 0x3442 |
76 | #define VP9D_MPP_REFINFO_DATA 0x3443 |
77 | #define VP9D_MPP_REF_SCALE_ENBL 0x3441 |
78 | #define HEVC_MPRED_CTRL4 0x324c |
79 | #define HEVC_CM_HEADER_START_ADDR 0x3628 |
80 | #define HEVC_DBLK_CFGB 0x350b |
81 | #define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464 |
82 | #define HEVC_SAO_MMU_VH1_ADDR 0x363b |
83 | #define HEVC_SAO_MMU_VH0_ADDR 0x363a |
84 | |
85 | #define HEVC_MV_INFO 0x310d |
86 | #define HEVC_QP_INFO 0x3137 |
87 | #define HEVC_SKIP_INFO 0x3136 |
88 | |
89 | #define VP9_10B_DEC_IDLE 0 |
90 | #define VP9_10B_DEC_FRAME_HEADER 1 |
91 | #define VP9_10B_DEC_SLICE_SEGMENT 2 |
92 | #define VP9_10B_DECODE_SLICE 5 |
93 | #define VP9_10B_DISCARD_NAL 6 |
94 | #define VP9_DUMP_LMEM 7 |
95 | #define HEVC_DECPIC_DATA_DONE 0xa |
96 | #define HEVC_DECPIC_DATA_ERROR 0xb |
97 | #define HEVC_NAL_DECODE_DONE 0xe |
98 | #define HEVC_DECODE_BUFEMPTY 0x20 |
99 | #define HEVC_DECODE_TIMEOUT 0x21 |
100 | #define HEVC_SEARCH_BUFEMPTY 0x22 |
101 | #define HEVC_DECODE_OVER_SIZE 0x23 |
102 | #define HEVC_S2_DECODING_DONE 0x50 |
103 | #define VP9_HEAD_PARSER_DONE 0xf0 |
104 | #define VP9_HEAD_SEARCH_DONE 0xf1 |
105 | #define VP9_EOS 0xf2 |
106 | #define HEVC_ACTION_DONE 0xff |
107 | |
108 | #define VF_POOL_SIZE 32 |
109 | |
110 | #undef pr_info |
111 | #define pr_info printk |
112 | |
113 | #define DECODE_MODE_SINGLE ((0x80 << 24) | 0) |
114 | #define DECODE_MODE_MULTI_STREAMBASE ((0x80 << 24) | 1) |
115 | #define DECODE_MODE_MULTI_FRAMEBASE ((0x80 << 24) | 2) |
116 | #define DECODE_MODE_SINGLE_LOW_LATENCY ((0x80 << 24) | 3) |
117 | #define DECODE_MODE_MULTI_FRAMEBASE_NOHEAD ((0x80 << 24) | 4) |
118 | |
119 | #define VP9_TRIGGER_FRAME_DONE 0x100 |
120 | #define VP9_TRIGGER_FRAME_ENABLE 0x200 |
121 | |
122 | #define MV_MEM_UNIT 0x240 |
123 | /*--------------------------------------------------- |
124 | * Include "parser_cmd.h" |
125 | *--------------------------------------------------- |
126 | */ |
127 | #define PARSER_CMD_SKIP_CFG_0 0x0000090b |
128 | |
129 | #define PARSER_CMD_SKIP_CFG_1 0x1b14140f |
130 | |
131 | #define PARSER_CMD_SKIP_CFG_2 0x001b1910 |
132 | |
133 | #define PARSER_CMD_NUMBER 37 |
134 | |
135 | /*#define HEVC_PIC_STRUCT_SUPPORT*/ |
136 | /* to remove, fix build error */ |
137 | |
138 | /*#define CODEC_MM_FLAGS_FOR_VDECODER 0*/ |
139 | |
140 | #define MULTI_INSTANCE_SUPPORT |
141 | #define SUPPORT_10BIT |
142 | /* #define ERROR_HANDLE_DEBUG */ |
143 | |
144 | #ifndef STAT_KTHREAD |
145 | #define STAT_KTHREAD 0x40 |
146 | #endif |
147 | |
148 | #ifdef MULTI_INSTANCE_SUPPORT |
149 | #define MAX_DECODE_INSTANCE_NUM 9 |
150 | #define MULTI_DRIVER_NAME "ammvdec_vp9" |
151 | static unsigned int max_decode_instance_num |
152 | = MAX_DECODE_INSTANCE_NUM; |
153 | static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM]; |
154 | static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM]; |
155 | static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM]; |
156 | static unsigned int run_count[MAX_DECODE_INSTANCE_NUM]; |
157 | static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM]; |
158 | static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM]; |
159 | |
160 | static u32 decode_timeout_val = 200; |
161 | static int start_decode_buf_level = 0x8000; |
162 | static u32 work_buf_size; |
163 | |
164 | static u32 force_pts_unstable; |
165 | |
166 | static u32 mv_buf_margin; |
167 | |
168 | /* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */ |
169 | /* double_write_mode: |
170 | * 0, no double write; |
171 | * 1, 1:1 ratio; |
172 | * 2, (1/4):(1/4) ratio; |
173 | * 3, (1/4):(1/4) ratio, with both compressed frame included |
174 | * 4, (1/2):(1/2) ratio; |
175 | * 0x10, double write only |
176 | * 0x100, if > 1080p,use mode 4,else use mode 1; |
177 | * 0x200, if > 1080p,use mode 2,else use mode 1; |
178 | * 0x300, if > 720p, use mode 4, else use mode 1; |
179 | */ |
180 | static u32 double_write_mode; |
181 | |
182 | #define DRIVER_NAME "amvdec_vp9" |
183 | #define MODULE_NAME "amvdec_vp9" |
184 | #define DRIVER_HEADER_NAME "amvdec_vp9_header" |
185 | |
186 | |
187 | #define PUT_INTERVAL (HZ/100) |
188 | #define ERROR_SYSTEM_RESET_COUNT 200 |
189 | |
190 | #define PTS_NORMAL 0 |
191 | #define PTS_NONE_REF_USE_DURATION 1 |
192 | |
193 | #define PTS_MODE_SWITCHING_THRESHOLD 3 |
194 | #define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3 |
195 | |
196 | #define DUR2PTS(x) ((x)*90/96) |
197 | |
198 | struct VP9Decoder_s; |
199 | static int vvp9_vf_states(struct vframe_states *states, void *); |
200 | static struct vframe_s *vvp9_vf_peek(void *); |
201 | static struct vframe_s *vvp9_vf_get(void *); |
202 | static void vvp9_vf_put(struct vframe_s *, void *); |
203 | static int vvp9_event_cb(int type, void *data, void *private_data); |
204 | |
205 | static int vvp9_stop(struct VP9Decoder_s *pbi); |
206 | #ifdef MULTI_INSTANCE_SUPPORT |
207 | static s32 vvp9_init(struct vdec_s *vdec); |
208 | #else |
209 | static s32 vvp9_init(struct VP9Decoder_s *pbi); |
210 | #endif |
211 | static void vvp9_prot_init(struct VP9Decoder_s *pbi, u32 mask); |
212 | static int vvp9_local_init(struct VP9Decoder_s *pbi); |
213 | static void vvp9_put_timer_func(unsigned long arg); |
214 | static void dump_data(struct VP9Decoder_s *pbi, int size); |
215 | static unsigned char get_data_check_sum |
216 | (struct VP9Decoder_s *pbi, int size); |
217 | static void dump_pic_list(struct VP9Decoder_s *pbi); |
218 | static int vp9_alloc_mmu( |
219 | struct VP9Decoder_s *pbi, |
220 | int cur_buf_idx, |
221 | int pic_width, |
222 | int pic_height, |
223 | unsigned short bit_depth, |
224 | unsigned int *mmu_index_adr); |
225 | |
226 | |
227 | static const char vvp9_dec_id[] = "vvp9-dev"; |
228 | |
229 | #define PROVIDER_NAME "decoder.vp9" |
230 | #define MULTI_INSTANCE_PROVIDER_NAME "vdec.vp9" |
231 | |
232 | static const struct vframe_operations_s vvp9_vf_provider = { |
233 | .peek = vvp9_vf_peek, |
234 | .get = vvp9_vf_get, |
235 | .put = vvp9_vf_put, |
236 | .event_cb = vvp9_event_cb, |
237 | .vf_states = vvp9_vf_states, |
238 | }; |
239 | |
240 | static struct vframe_provider_s vvp9_vf_prov; |
241 | |
242 | static u32 bit_depth_luma; |
243 | static u32 bit_depth_chroma; |
244 | static u32 frame_width; |
245 | static u32 frame_height; |
246 | static u32 video_signal_type; |
247 | |
248 | static u32 on_no_keyframe_skiped; |
249 | |
250 | #define PROB_SIZE (496 * 2 * 4) |
251 | #define PROB_BUF_SIZE (0x5000) |
252 | #define COUNT_BUF_SIZE (0x300 * 4 * 4) |
253 | /*compute_losless_comp_body_size(4096, 2304, 1) = 18874368(0x1200000)*/ |
254 | #define MAX_FRAME_4K_NUM 0x1200 |
255 | #define MAX_FRAME_8K_NUM 0x4800 |
256 | |
257 | #define HEVC_ASSIST_MMU_MAP_ADDR 0x3009 |
258 | |
259 | #ifdef SUPPORT_FB_DECODING |
260 | /* register define */ |
261 | #define HEVC_ASSIST_HED_FB_W_CTL 0x3006 |
262 | #define HEVC_ASSIST_HED_FB_R_CTL 0x3007 |
263 | #define HEVC_ASSIST_HED_FB_ADDR 0x3008 |
264 | #define HEVC_ASSIST_FB_MMU_MAP_ADDR 0x300a |
265 | #define HEVC_ASSIST_FBD_MMU_MAP_ADDR 0x300b |
266 | |
267 | |
268 | #define MAX_STAGE_PAGE_NUM 0x1200 |
269 | #define STAGE_MMU_MAP_SIZE (MAX_STAGE_PAGE_NUM * 4) |
270 | #endif |
271 | static inline int div_r32(int64_t m, int n) |
272 | { |
273 | /* |
274 | *return (int)(m/n) |
275 | */ |
276 | #ifndef CONFIG_ARM64 |
277 | int64_t qu = 0; |
278 | qu = div_s64(m, n); |
279 | return (int)qu; |
280 | #else |
281 | return (int)(m/n); |
282 | #endif |
283 | } |
284 | |
285 | /*USE_BUF_BLOCK*/ |
286 | struct BUF_s { |
287 | int index; |
288 | unsigned int alloc_flag; |
289 | /*buffer */ |
290 | unsigned int cma_page_count; |
291 | unsigned long alloc_addr; |
292 | unsigned long start_adr; |
293 | unsigned int size; |
294 | |
295 | unsigned int free_start_adr; |
296 | ulong v4l_ref_buf_addr; |
297 | ulong header_addr; |
298 | u32 header_size; |
299 | u32 luma_size; |
300 | ulong chroma_addr; |
301 | u32 chroma_size; |
302 | } /*BUF_t */; |
303 | |
304 | struct MVBUF_s { |
305 | unsigned long start_adr; |
306 | unsigned int size; |
307 | int used_flag; |
308 | } /*MVBUF_t */; |
309 | |
310 | /* #undef BUFMGR_ONLY to enable hardware configuration */ |
311 | |
312 | /*#define TEST_WR_PTR_INC*/ |
313 | /*#define WR_PTR_INC_NUM 128*/ |
314 | #define WR_PTR_INC_NUM 1 |
315 | |
316 | #define SIMULATION |
317 | #define DOS_PROJECT |
318 | #undef MEMORY_MAP_IN_REAL_CHIP |
319 | |
320 | /*#undef DOS_PROJECT*/ |
321 | /*#define MEMORY_MAP_IN_REAL_CHIP*/ |
322 | |
323 | /*#define BUFFER_MGR_ONLY*/ |
324 | /*#define CONFIG_HEVC_CLK_FORCED_ON*/ |
325 | /*#define ENABLE_SWAP_TEST*/ |
326 | #define MCRCC_ENABLE |
327 | |
328 | #define VP9_LPF_LVL_UPDATE |
329 | /*#define DBG_LF_PRINT*/ |
330 | |
331 | #ifdef VP9_10B_NV21 |
332 | #else |
333 | #define LOSLESS_COMPRESS_MODE |
334 | #endif |
335 | |
336 | #define DOUBLE_WRITE_YSTART_TEMP 0x02000000 |
337 | #define DOUBLE_WRITE_CSTART_TEMP 0x02900000 |
338 | |
339 | |
340 | |
341 | typedef unsigned int u32; |
342 | typedef unsigned short u16; |
343 | |
344 | #define VP9_DEBUG_BUFMGR 0x01 |
345 | #define VP9_DEBUG_BUFMGR_MORE 0x02 |
346 | #define VP9_DEBUG_BUFMGR_DETAIL 0x04 |
347 | #define VP9_DEBUG_OUT_PTS 0x10 |
348 | #define VP9_DEBUG_SEND_PARAM_WITH_REG 0x100 |
349 | #define VP9_DEBUG_MERGE 0x200 |
350 | #define VP9_DEBUG_DBG_LF_PRINT 0x400 |
351 | #define VP9_DEBUG_REG 0x800 |
352 | #define VP9_DEBUG_2_STAGE 0x1000 |
353 | #define VP9_DEBUG_2_STAGE_MORE 0x2000 |
354 | #define VP9_DEBUG_QOS_INFO 0x4000 |
355 | #define VP9_DEBUG_DIS_LOC_ERROR_PROC 0x10000 |
356 | #define VP9_DEBUG_DIS_SYS_ERROR_PROC 0x20000 |
357 | #define VP9_DEBUG_DUMP_PIC_LIST 0x40000 |
358 | #define VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000 |
359 | #define VP9_DEBUG_NO_TRIGGER_FRAME 0x100000 |
360 | #define VP9_DEBUG_LOAD_UCODE_FROM_FILE 0x200000 |
361 | #define VP9_DEBUG_FORCE_SEND_AGAIN 0x400000 |
362 | #define VP9_DEBUG_DUMP_DATA 0x800000 |
363 | #define VP9_DEBUG_CACHE 0x1000000 |
364 | #define VP9_DEBUG_CACHE_HIT_RATE 0x2000000 |
365 | #define IGNORE_PARAM_FROM_CONFIG 0x8000000 |
366 | #ifdef MULTI_INSTANCE_SUPPORT |
367 | #define PRINT_FLAG_ERROR 0x0 |
368 | #define PRINT_FLAG_V4L_DETAIL 0x10000000 |
369 | #define PRINT_FLAG_VDEC_STATUS 0x20000000 |
370 | #define PRINT_FLAG_VDEC_DETAIL 0x40000000 |
371 | #define PRINT_FLAG_VDEC_DATA 0x80000000 |
372 | #endif |
373 | |
374 | static u32 debug; |
375 | static bool is_reset; |
376 | /*for debug*/ |
377 | /* |
378 | udebug_flag: |
379 | bit 0, enable ucode print |
380 | bit 1, enable ucode detail print |
381 | bit [31:16] not 0, pos to dump lmem |
382 | bit 2, pop bits to lmem |
383 | bit [11:8], pre-pop bits for alignment (when bit 2 is 1) |
384 | */ |
385 | static u32 udebug_flag; |
386 | /* |
387 | when udebug_flag[1:0] is not 0 |
388 | udebug_pause_pos not 0, |
389 | pause position |
390 | */ |
391 | static u32 udebug_pause_pos; |
392 | /* |
393 | when udebug_flag[1:0] is not 0 |
394 | and udebug_pause_pos is not 0, |
395 | pause only when DEBUG_REG2 is equal to this val |
396 | */ |
397 | static u32 udebug_pause_val; |
398 | |
399 | static u32 udebug_pause_decode_idx; |
400 | |
401 | static u32 without_display_mode; |
402 | |
403 | /* |
404 | *[3:0] 0: default use config from omx. |
405 | * 1: force enable fence. |
406 | * 2: disable fence. |
407 | *[7:4] 0: fence use for driver. |
408 | * 1: fence fd use for app. |
409 | */ |
410 | static u32 force_config_fence; |
411 | |
412 | #define DEBUG_REG |
413 | #ifdef DEBUG_REG |
414 | void WRITE_VREG_DBG2(unsigned int adr, unsigned int val) |
415 | { |
416 | if (debug & VP9_DEBUG_REG) |
417 | pr_info("%s(%x, %x)\n", __func__, adr, val); |
418 | if (adr != 0) |
419 | WRITE_VREG(adr, val); |
420 | } |
421 | |
422 | #undef WRITE_VREG |
423 | #define WRITE_VREG WRITE_VREG_DBG2 |
424 | #endif |
425 | |
426 | #define FRAME_CNT_WINDOW_SIZE 59 |
427 | #define RATE_CORRECTION_THRESHOLD 5 |
428 | /************************************************** |
429 | |
430 | VP9 buffer management start |
431 | |
432 | ***************************************************/ |
433 | |
434 | #define MMU_COMPRESS_HEADER_SIZE 0x48000 |
435 | #define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4) |
436 | #define MAX_SIZE_8K (8192 * 4608) |
437 | #define MAX_SIZE_4K (4096 * 2304) |
438 | #define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K) |
439 | |
440 | #define INVALID_IDX -1 /* Invalid buffer index.*/ |
441 | |
442 | #define RPM_BEGIN 0x200 |
443 | #define RPM_END 0x280 |
444 | |
445 | union param_u { |
446 | struct { |
447 | unsigned short data[RPM_END - RPM_BEGIN]; |
448 | } l; |
449 | struct { |
450 | /* from ucode lmem, do not change this struct */ |
451 | unsigned short profile; |
452 | unsigned short show_existing_frame; |
453 | unsigned short frame_to_show_idx; |
454 | unsigned short frame_type; /*1 bit*/ |
455 | unsigned short show_frame; /*1 bit*/ |
456 | unsigned short error_resilient_mode; /*1 bit*/ |
457 | unsigned short intra_only; /*1 bit*/ |
458 | unsigned short display_size_present; /*1 bit*/ |
459 | unsigned short reset_frame_context; |
460 | unsigned short refresh_frame_flags; |
461 | unsigned short width; |
462 | unsigned short height; |
463 | unsigned short display_width; |
464 | unsigned short display_height; |
465 | /* |
466 | *bit[11:8] - ref_frame_info_0 (ref(3-bits), ref_frame_sign_bias(1-bit)) |
467 | *bit[7:4] - ref_frame_info_1 (ref(3-bits), ref_frame_sign_bias(1-bit)) |
468 | *bit[3:0] - ref_frame_info_2 (ref(3-bits), ref_frame_sign_bias(1-bit)) |
469 | */ |
470 | unsigned short ref_info; |
471 | /* |
472 | *bit[2]: same_frame_size0 |
473 | *bit[1]: same_frame_size1 |
474 | *bit[0]: same_frame_size2 |
475 | */ |
476 | unsigned short same_frame_size; |
477 | |
478 | unsigned short mode_ref_delta_enabled; |
479 | unsigned short ref_deltas[4]; |
480 | unsigned short mode_deltas[2]; |
481 | unsigned short filter_level; |
482 | unsigned short sharpness_level; |
483 | unsigned short bit_depth; |
484 | unsigned short seg_quant_info[8]; |
485 | unsigned short seg_enabled; |
486 | unsigned short seg_abs_delta; |
487 | /* bit 15: feature enabled; bit 8, sign; bit[5:0], data */ |
488 | unsigned short seg_lf_info[8]; |
489 | } p; |
490 | }; |
491 | |
492 | |
493 | struct vpx_codec_frame_buffer_s { |
494 | uint8_t *data; /**< Pointer to the data buffer */ |
495 | size_t size; /**< Size of data in bytes */ |
496 | void *priv; /**< Frame's private data */ |
497 | }; |
498 | |
499 | enum vpx_color_space_t { |
500 | VPX_CS_UNKNOWN = 0, /**< Unknown */ |
501 | VPX_CS_BT_601 = 1, /**< BT.601 */ |
502 | VPX_CS_BT_709 = 2, /**< BT.709 */ |
503 | VPX_CS_SMPTE_170 = 3, /**< SMPTE.170 */ |
504 | VPX_CS_SMPTE_240 = 4, /**< SMPTE.240 */ |
505 | VPX_CS_BT_2020 = 5, /**< BT.2020 */ |
506 | VPX_CS_RESERVED = 6, /**< Reserved */ |
507 | VPX_CS_SRGB = 7 /**< sRGB */ |
508 | }; /**< alias for enum vpx_color_space */ |
509 | |
510 | enum vpx_bit_depth_t { |
511 | VPX_BITS_8 = 8, /**< 8 bits */ |
512 | VPX_BITS_10 = 10, /**< 10 bits */ |
513 | VPX_BITS_12 = 12, /**< 12 bits */ |
514 | }; |
515 | |
516 | #define MAX_SLICE_NUM 1024 |
517 | struct PIC_BUFFER_CONFIG_s { |
518 | int index; |
519 | int BUF_index; |
520 | int mv_buf_index; |
521 | int comp_body_size; |
522 | int buf_size; |
523 | int vf_ref; |
524 | int y_canvas_index; |
525 | int uv_canvas_index; |
526 | #ifdef MULTI_INSTANCE_SUPPORT |
527 | struct canvas_config_s canvas_config[2]; |
528 | #endif |
529 | int decode_idx; |
530 | int slice_type; |
531 | int stream_offset; |
532 | u32 pts; |
533 | u64 pts64; |
534 | u64 timestamp; |
535 | uint8_t error_mark; |
536 | /**/ |
537 | int slice_idx; |
538 | /*buffer*/ |
539 | unsigned long header_adr; |
540 | unsigned long mpred_mv_wr_start_addr; |
541 | /*unsigned long mc_y_adr; |
542 | *unsigned long mc_u_v_adr; |
543 | */ |
544 | unsigned int dw_y_adr; |
545 | unsigned int dw_u_v_adr; |
546 | int mc_canvas_y; |
547 | int mc_canvas_u_v; |
548 | |
549 | int lcu_total; |
550 | /**/ |
551 | int y_width; |
552 | int y_height; |
553 | int y_crop_width; |
554 | int y_crop_height; |
555 | int y_stride; |
556 | |
557 | int uv_width; |
558 | int uv_height; |
559 | int uv_crop_width; |
560 | int uv_crop_height; |
561 | int uv_stride; |
562 | |
563 | int alpha_width; |
564 | int alpha_height; |
565 | int alpha_stride; |
566 | |
567 | uint8_t *y_buffer; |
568 | uint8_t *u_buffer; |
569 | uint8_t *v_buffer; |
570 | uint8_t *alpha_buffer; |
571 | |
572 | uint8_t *buffer_alloc; |
573 | int buffer_alloc_sz; |
574 | int border; |
575 | int frame_size; |
576 | int subsampling_x; |
577 | int subsampling_y; |
578 | unsigned int bit_depth; |
579 | enum vpx_color_space_t color_space; |
580 | |
581 | int corrupted; |
582 | int flags; |
583 | unsigned long cma_alloc_addr; |
584 | |
585 | int double_write_mode; |
586 | |
587 | /* picture qos infomation*/ |
588 | int max_qp; |
589 | int avg_qp; |
590 | int min_qp; |
591 | int max_skip; |
592 | int avg_skip; |
593 | int min_skip; |
594 | int max_mv; |
595 | int min_mv; |
596 | int avg_mv; |
597 | |
598 | u32 hw_decode_time; |
599 | u32 frame_size2; // For frame base mode |
600 | bool vframe_bound; |
601 | |
602 | /* vdec sync. */ |
603 | struct fence *fence; |
604 | } PIC_BUFFER_CONFIG; |
605 | |
606 | enum BITSTREAM_PROFILE { |
607 | PROFILE_0, |
608 | PROFILE_1, |
609 | PROFILE_2, |
610 | PROFILE_3, |
611 | MAX_PROFILES |
612 | }; |
613 | |
614 | enum FRAME_TYPE { |
615 | KEY_FRAME = 0, |
616 | INTER_FRAME = 1, |
617 | FRAME_TYPES, |
618 | }; |
619 | |
620 | enum REFERENCE_MODE { |
621 | SINGLE_REFERENCE = 0, |
622 | COMPOUND_REFERENCE = 1, |
623 | REFERENCE_MODE_SELECT = 2, |
624 | REFERENCE_MODES = 3, |
625 | }; |
626 | |
627 | #define NONE -1 |
628 | #define INTRA_FRAME 0 |
629 | #define LAST_FRAME 1 |
630 | #define GOLDEN_FRAME 2 |
631 | #define ALTREF_FRAME 3 |
632 | #define MAX_REF_FRAMES 4 |
633 | |
634 | #define REFS_PER_FRAME 3 |
635 | |
636 | #define REF_FRAMES_LOG2 3 |
637 | #define REF_FRAMES (1 << REF_FRAMES_LOG2) |
638 | #define REF_FRAMES_4K (6) |
639 | |
640 | /*4 scratch frames for the new frames to support a maximum of 4 cores decoding |
641 | *in parallel, 3 for scaled references on the encoder. |
642 | *TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number |
643 | * // of framebuffers. |
644 | *TODO(jkoleszar): These 3 extra references could probably come from the |
645 | *normal reference pool. |
646 | */ |
647 | #define FRAME_BUFFERS (REF_FRAMES + 16) |
648 | #define HEADER_FRAME_BUFFERS (FRAME_BUFFERS) |
649 | #define MAX_BUF_NUM (FRAME_BUFFERS) |
650 | #define MV_BUFFER_NUM FRAME_BUFFERS |
651 | #ifdef SUPPORT_FB_DECODING |
652 | #define STAGE_MAX_BUFFERS 16 |
653 | #else |
654 | #define STAGE_MAX_BUFFERS 0 |
655 | #endif |
656 | |
657 | #define FRAME_CONTEXTS_LOG2 2 |
658 | #define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2) |
659 | /*buffer + header buffer + workspace*/ |
660 | #ifdef MV_USE_FIXED_BUF |
661 | #define MAX_BMMU_BUFFER_NUM (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + 1) |
662 | #define VF_BUFFER_IDX(n) (n) |
663 | #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n) |
664 | #define WORK_SPACE_BUF_ID (FRAME_BUFFERS + HEADER_FRAME_BUFFERS) |
665 | #else |
666 | #define MAX_BMMU_BUFFER_NUM \ |
667 | (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM + 1) |
668 | #define VF_BUFFER_IDX(n) (n) |
669 | #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n) |
670 | #define MV_BUFFER_IDX(n) (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + n) |
671 | #define WORK_SPACE_BUF_ID \ |
672 | (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM) |
673 | #endif |
674 | |
675 | struct RefCntBuffer_s { |
676 | int ref_count; |
677 | /*MV_REF *mvs;*/ |
678 | int mi_rows; |
679 | int mi_cols; |
680 | struct vpx_codec_frame_buffer_s raw_frame_buffer; |
681 | struct PIC_BUFFER_CONFIG_s buf; |
682 | |
683 | /*The Following variables will only be used in frame parallel decode. |
684 | * |
685 | *frame_worker_owner indicates which FrameWorker owns this buffer. NULL means |
686 | *that no FrameWorker owns, or is decoding, this buffer. |
687 | *VP9Worker *frame_worker_owner; |
688 | * |
689 | *row and col indicate which position frame has been decoded to in real |
690 | *pixel unit. They are reset to -1 when decoding begins and set to INT_MAX |
691 | *when the frame is fully decoded. |
692 | */ |
693 | int row; |
694 | int col; |
695 | } RefCntBuffer; |
696 | |
697 | struct RefBuffer_s { |
698 | /*TODO(dkovalev): idx is not really required and should be removed, now it |
699 | *is used in vp9_onyxd_if.c |
700 | */ |
701 | int idx; |
702 | struct PIC_BUFFER_CONFIG_s *buf; |
703 | /*struct scale_factors sf;*/ |
704 | } RefBuffer; |
705 | |
706 | struct InternalFrameBuffer_s { |
707 | uint8_t *data; |
708 | size_t size; |
709 | int in_use; |
710 | } InternalFrameBuffer; |
711 | |
712 | struct InternalFrameBufferList_s { |
713 | int num_internal_frame_buffers; |
714 | struct InternalFrameBuffer_s *int_fb; |
715 | } InternalFrameBufferList; |
716 | |
717 | struct BufferPool_s { |
718 | /*Protect BufferPool from being accessed by several FrameWorkers at |
719 | *the same time during frame parallel decode. |
720 | *TODO(hkuang): Try to use atomic variable instead of locking the whole pool. |
721 | * |
722 | *Private data associated with the frame buffer callbacks. |
723 | *void *cb_priv; |
724 | * |
725 | *vpx_get_frame_buffer_cb_fn_t get_fb_cb; |
726 | *vpx_release_frame_buffer_cb_fn_t release_fb_cb; |
727 | */ |
728 | |
729 | struct RefCntBuffer_s frame_bufs[FRAME_BUFFERS]; |
730 | |
731 | /*Frame buffers allocated internally by the codec.*/ |
732 | struct InternalFrameBufferList_s int_frame_buffers; |
733 | unsigned long flags; |
734 | spinlock_t lock; |
735 | |
736 | } BufferPool; |
737 | |
738 | #define lock_buffer_pool(pool, flags) \ |
739 | spin_lock_irqsave(&pool->lock, flags) |
740 | |
741 | #define unlock_buffer_pool(pool, flags) \ |
742 | spin_unlock_irqrestore(&pool->lock, flags) |
743 | |
744 | struct VP9_Common_s { |
745 | enum vpx_color_space_t color_space; |
746 | int width; |
747 | int height; |
748 | int display_width; |
749 | int display_height; |
750 | int last_width; |
751 | int last_height; |
752 | |
753 | int subsampling_x; |
754 | int subsampling_y; |
755 | |
756 | int use_highbitdepth;/*Marks if we need to use 16bit frame buffers.*/ |
757 | |
758 | struct PIC_BUFFER_CONFIG_s *frame_to_show; |
759 | struct RefCntBuffer_s *prev_frame; |
760 | |
761 | /*TODO(hkuang): Combine this with cur_buf in macroblockd.*/ |
762 | struct RefCntBuffer_s *cur_frame; |
763 | |
764 | int ref_frame_map[REF_FRAMES]; /* maps fb_idx to reference slot */ |
765 | |
766 | /*Prepare ref_frame_map for the next frame. |
767 | *Only used in frame parallel decode. |
768 | */ |
769 | int next_ref_frame_map[REF_FRAMES]; |
770 | |
771 | /* TODO(jkoleszar): could expand active_ref_idx to 4, |
772 | *with 0 as intra, and roll new_fb_idx into it. |
773 | */ |
774 | |
775 | /*Each frame can reference REFS_PER_FRAME buffers*/ |
776 | struct RefBuffer_s frame_refs[REFS_PER_FRAME]; |
777 | |
778 | int prev_fb_idx; |
779 | int new_fb_idx; |
780 | int cur_fb_idx_mmu; |
781 | /*last frame's frame type for motion search*/ |
782 | enum FRAME_TYPE last_frame_type; |
783 | enum FRAME_TYPE frame_type; |
784 | |
785 | int show_frame; |
786 | int last_show_frame; |
787 | int show_existing_frame; |
788 | |
789 | /*Flag signaling that the frame is encoded using only INTRA modes.*/ |
790 | uint8_t intra_only; |
791 | uint8_t last_intra_only; |
792 | |
793 | int allow_high_precision_mv; |
794 | |
795 | /*Flag signaling that the frame context should be reset to default |
796 | *values. 0 or 1 implies don't reset, 2 reset just the context |
797 | *specified in the frame header, 3 reset all contexts. |
798 | */ |
799 | int reset_frame_context; |
800 | |
801 | /*MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in |
802 | * MODE_INFO (8-pixel) units. |
803 | */ |
804 | int MBs; |
805 | int mb_rows, mi_rows; |
806 | int mb_cols, mi_cols; |
807 | int mi_stride; |
808 | |
809 | /*Whether to use previous frame's motion vectors for prediction.*/ |
810 | int use_prev_frame_mvs; |
811 | |
812 | int refresh_frame_context; /* Two state 0 = NO, 1 = YES */ |
813 | |
814 | int ref_frame_sign_bias[MAX_REF_FRAMES]; /* Two state 0, 1 */ |
815 | |
816 | /*struct loopfilter lf;*/ |
817 | /*struct segmentation seg;*/ |
818 | |
819 | /*TODO(hkuang):Remove this as it is the same as frame_parallel_decode*/ |
820 | /* in pbi.*/ |
821 | int frame_parallel_decode; /* frame-based threading.*/ |
822 | |
823 | /*Context probabilities for reference frame prediction*/ |
824 | /*MV_REFERENCE_FRAME comp_fixed_ref;*/ |
825 | /*MV_REFERENCE_FRAME comp_var_ref[2];*/ |
826 | enum REFERENCE_MODE reference_mode; |
827 | |
828 | /*FRAME_CONTEXT *fc; */ /* this frame entropy */ |
829 | /*FRAME_CONTEXT *frame_contexts; */ /*FRAME_CONTEXTS*/ |
830 | /*unsigned int frame_context_idx; *//* Context to use/update */ |
831 | /*FRAME_COUNTS counts;*/ |
832 | |
833 | unsigned int current_video_frame; |
834 | enum BITSTREAM_PROFILE profile; |
835 | |
836 | enum vpx_bit_depth_t bit_depth; |
837 | |
838 | int error_resilient_mode; |
839 | int frame_parallel_decoding_mode; |
840 | |
841 | int byte_alignment; |
842 | int skip_loop_filter; |
843 | |
844 | /*External BufferPool passed from outside.*/ |
845 | struct BufferPool_s *buffer_pool; |
846 | |
847 | int above_context_alloc_cols; |
848 | |
849 | }; |
850 | |
851 | static void set_canvas(struct VP9Decoder_s *pbi, |
852 | struct PIC_BUFFER_CONFIG_s *pic_config); |
853 | static int prepare_display_buf(struct VP9Decoder_s *pbi, |
854 | struct PIC_BUFFER_CONFIG_s *pic_config); |
855 | |
856 | static void fill_frame_info(struct VP9Decoder_s *pbi, |
857 | struct PIC_BUFFER_CONFIG_s *frame, |
858 | unsigned int framesize, |
859 | unsigned int pts); |
860 | |
861 | static struct PIC_BUFFER_CONFIG_s *get_frame_new_buffer(struct VP9_Common_s *cm) |
862 | { |
863 | return &cm->buffer_pool->frame_bufs[cm->new_fb_idx].buf; |
864 | } |
865 | |
866 | static void ref_cnt_fb(struct RefCntBuffer_s *bufs, int *idx, int new_idx) |
867 | { |
868 | const int ref_index = *idx; |
869 | |
870 | if (ref_index >= 0 && bufs[ref_index].ref_count > 0) { |
871 | bufs[ref_index].ref_count--; |
872 | /*pr_info("[MMU DEBUG 2] dec ref_count[%d] : %d\r\n", |
873 | * ref_index, bufs[ref_index].ref_count); |
874 | */ |
875 | } |
876 | |
877 | *idx = new_idx; |
878 | |
879 | bufs[new_idx].ref_count++; |
880 | /*pr_info("[MMU DEBUG 3] inc ref_count[%d] : %d\r\n", |
881 | * new_idx, bufs[new_idx].ref_count); |
882 | */ |
883 | } |
884 | |
885 | int vp9_release_frame_buffer(struct vpx_codec_frame_buffer_s *fb) |
886 | { |
887 | struct InternalFrameBuffer_s *const int_fb = |
888 | (struct InternalFrameBuffer_s *)fb->priv; |
889 | if (int_fb) |
890 | int_fb->in_use = 0; |
891 | return 0; |
892 | } |
893 | |
894 | static int compute_losless_comp_body_size(int width, int height, |
895 | uint8_t is_bit_depth_10); |
896 | |
897 | static void setup_display_size(struct VP9_Common_s *cm, union param_u *params, |
898 | int print_header_info) |
899 | { |
900 | cm->display_width = cm->width; |
901 | cm->display_height = cm->height; |
902 | if (params->p.display_size_present) { |
903 | if (print_header_info) |
904 | pr_info(" * 1-bit display_size_present read : 1\n"); |
905 | cm->display_width = params->p.display_width; |
906 | cm->display_height = params->p.display_height; |
907 | /*vp9_read_frame_size(rb, &cm->display_width, |
908 | * &cm->display_height); |
909 | */ |
910 | } else { |
911 | if (print_header_info) |
912 | pr_info(" * 1-bit display_size_present read : 0\n"); |
913 | } |
914 | } |
915 | |
916 | |
917 | uint8_t print_header_info = 0; |
918 | |
919 | struct buff_s { |
920 | u32 buf_start; |
921 | u32 buf_size; |
922 | u32 buf_end; |
923 | } buff_t; |
924 | |
925 | struct BuffInfo_s { |
926 | u32 max_width; |
927 | u32 max_height; |
928 | u32 start_adr; |
929 | u32 end_adr; |
930 | struct buff_s ipp; |
931 | struct buff_s sao_abv; |
932 | struct buff_s sao_vb; |
933 | struct buff_s short_term_rps; |
934 | struct buff_s vps; |
935 | struct buff_s sps; |
936 | struct buff_s pps; |
937 | struct buff_s sao_up; |
938 | struct buff_s swap_buf; |
939 | struct buff_s swap_buf2; |
940 | struct buff_s scalelut; |
941 | struct buff_s dblk_para; |
942 | struct buff_s dblk_data; |
943 | struct buff_s seg_map; |
944 | struct buff_s mmu_vbh; |
945 | struct buff_s cm_header; |
946 | struct buff_s mpred_above; |
947 | #ifdef MV_USE_FIXED_BUF |
948 | struct buff_s mpred_mv; |
949 | #endif |
950 | struct buff_s rpm; |
951 | struct buff_s lmem; |
952 | } BuffInfo_t; |
953 | #ifdef MULTI_INSTANCE_SUPPORT |
954 | #define DEC_RESULT_NONE 0 |
955 | #define DEC_RESULT_DONE 1 |
956 | #define DEC_RESULT_AGAIN 2 |
957 | #define DEC_RESULT_CONFIG_PARAM 3 |
958 | #define DEC_RESULT_ERROR 4 |
959 | #define DEC_INIT_PICLIST 5 |
960 | #define DEC_UNINIT_PICLIST 6 |
961 | #define DEC_RESULT_GET_DATA 7 |
962 | #define DEC_RESULT_GET_DATA_RETRY 8 |
963 | #define DEC_RESULT_EOS 9 |
964 | #define DEC_RESULT_FORCE_EXIT 10 |
965 | #define DEC_RESULT_NEED_MORE_BUFFER 11 |
966 | #define DEC_V4L2_CONTINUE_DECODING 18 |
967 | |
968 | #define DEC_S1_RESULT_NONE 0 |
969 | #define DEC_S1_RESULT_DONE 1 |
970 | #define DEC_S1_RESULT_FORCE_EXIT 2 |
971 | #define DEC_S1_RESULT_TEST_TRIGGER_DONE 0xf0 |
972 | |
973 | #ifdef FB_DECODING_TEST_SCHEDULE |
974 | #define TEST_SET_NONE 0 |
975 | #define TEST_SET_PIC_DONE 1 |
976 | #define TEST_SET_S2_DONE 2 |
977 | #endif |
978 | |
979 | static void vp9_work(struct work_struct *work); |
980 | #endif |
981 | struct loop_filter_info_n; |
982 | struct loopfilter; |
983 | struct segmentation; |
984 | |
985 | #ifdef SUPPORT_FB_DECODING |
986 | static void mpred_process(struct VP9Decoder_s *pbi); |
987 | static void vp9_s1_work(struct work_struct *work); |
988 | |
989 | struct stage_buf_s { |
990 | int index; |
991 | unsigned short rpm[RPM_END - RPM_BEGIN]; |
992 | }; |
993 | |
994 | static unsigned int not_run2_ready[MAX_DECODE_INSTANCE_NUM]; |
995 | |
996 | static unsigned int run2_count[MAX_DECODE_INSTANCE_NUM]; |
997 | |
998 | #ifdef FB_DECODING_TEST_SCHEDULE |
999 | u32 stage_buf_num; /* = 16;*/ |
1000 | #else |
1001 | u32 stage_buf_num; |
1002 | #endif |
1003 | #endif |
1004 | |
1005 | struct VP9Decoder_s { |
1006 | #ifdef MULTI_INSTANCE_SUPPORT |
1007 | unsigned char index; |
1008 | |
1009 | struct device *cma_dev; |
1010 | struct platform_device *platform_dev; |
1011 | void (*vdec_cb)(struct vdec_s *, void *); |
1012 | void *vdec_cb_arg; |
1013 | struct vframe_chunk_s *chunk; |
1014 | int dec_result; |
1015 | struct work_struct work; |
1016 | struct work_struct recycle_mmu_work; |
1017 | struct work_struct set_clk_work; |
1018 | u32 start_shift_bytes; |
1019 | |
1020 | struct BuffInfo_s work_space_buf_store; |
1021 | unsigned long buf_start; |
1022 | u32 buf_size; |
1023 | u32 cma_alloc_count; |
1024 | unsigned long cma_alloc_addr; |
1025 | uint8_t eos; |
1026 | unsigned long int start_process_time; |
1027 | unsigned last_lcu_idx; |
1028 | int decode_timeout_count; |
1029 | unsigned timeout_num; |
1030 | int save_buffer_mode; |
1031 | |
1032 | int double_write_mode; |
1033 | #endif |
1034 | long used_4k_num; |
1035 | |
1036 | unsigned char m_ins_flag; |
1037 | char *provider_name; |
1038 | union param_u param; |
1039 | int frame_count; |
1040 | int pic_count; |
1041 | u32 stat; |
1042 | struct timer_list timer; |
1043 | u32 frame_dur; |
1044 | u32 frame_ar; |
1045 | int fatal_error; |
1046 | uint8_t init_flag; |
1047 | uint8_t first_sc_checked; |
1048 | uint8_t process_busy; |
1049 | #define PROC_STATE_INIT 0 |
1050 | #define PROC_STATE_DECODESLICE 1 |
1051 | #define PROC_STATE_SENDAGAIN 2 |
1052 | uint8_t process_state; |
1053 | u32 ucode_pause_pos; |
1054 | |
1055 | int show_frame_num; |
1056 | struct buff_s mc_buf_spec; |
1057 | struct dec_sysinfo vvp9_amstream_dec_info; |
1058 | void *rpm_addr; |
1059 | void *lmem_addr; |
1060 | dma_addr_t rpm_phy_addr; |
1061 | dma_addr_t lmem_phy_addr; |
1062 | unsigned short *lmem_ptr; |
1063 | unsigned short *debug_ptr; |
1064 | |
1065 | void *prob_buffer_addr; |
1066 | void *count_buffer_addr; |
1067 | dma_addr_t prob_buffer_phy_addr; |
1068 | dma_addr_t count_buffer_phy_addr; |
1069 | |
1070 | void *frame_mmu_map_addr; |
1071 | dma_addr_t frame_mmu_map_phy_addr; |
1072 | |
1073 | unsigned int use_cma_flag; |
1074 | |
1075 | struct BUF_s m_BUF[MAX_BUF_NUM]; |
1076 | struct MVBUF_s m_mv_BUF[MV_BUFFER_NUM]; |
1077 | u32 used_buf_num; |
1078 | DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE); |
1079 | DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE); |
1080 | DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE); |
1081 | struct vframe_s vfpool[VF_POOL_SIZE]; |
1082 | u32 vf_pre_count; |
1083 | u32 vf_get_count; |
1084 | u32 vf_put_count; |
1085 | int buf_num; |
1086 | int pic_num; |
1087 | int lcu_size_log2; |
1088 | unsigned int losless_comp_body_size; |
1089 | |
1090 | u32 video_signal_type; |
1091 | |
1092 | int pts_mode; |
1093 | int last_lookup_pts; |
1094 | int last_pts; |
1095 | u64 last_lookup_pts_us64; |
1096 | u64 last_pts_us64; |
1097 | u64 shift_byte_count; |
1098 | |
1099 | u32 pts_unstable; |
1100 | u32 frame_cnt_window; |
1101 | u32 pts1, pts2; |
1102 | u32 last_duration; |
1103 | u32 duration_from_pts_done; |
1104 | bool vp9_first_pts_ready; |
1105 | |
1106 | u32 shift_byte_count_lo; |
1107 | u32 shift_byte_count_hi; |
1108 | int pts_mode_switching_count; |
1109 | int pts_mode_recovery_count; |
1110 | |
1111 | bool get_frame_dur; |
1112 | u32 saved_resolution; |
1113 | |
1114 | /**/ |
1115 | struct VP9_Common_s common; |
1116 | struct RefCntBuffer_s *cur_buf; |
1117 | int refresh_frame_flags; |
1118 | uint8_t need_resync; |
1119 | uint8_t hold_ref_buf; |
1120 | uint8_t ready_for_new_data; |
1121 | struct BufferPool_s vp9_buffer_pool; |
1122 | |
1123 | struct BuffInfo_s *work_space_buf; |
1124 | |
1125 | struct buff_s *mc_buf; |
1126 | |
1127 | unsigned int frame_width; |
1128 | unsigned int frame_height; |
1129 | |
1130 | unsigned short *rpm_ptr; |
1131 | int init_pic_w; |
1132 | int init_pic_h; |
1133 | int lcu_total; |
1134 | int lcu_size; |
1135 | |
1136 | int slice_type; |
1137 | |
1138 | int skip_flag; |
1139 | int decode_idx; |
1140 | int slice_idx; |
1141 | uint8_t has_keyframe; |
1142 | uint8_t wait_buf; |
1143 | uint8_t error_flag; |
1144 | |
1145 | /* bit 0, for decoding; bit 1, for displaying */ |
1146 | uint8_t ignore_bufmgr_error; |
1147 | int PB_skip_mode; |
1148 | int PB_skip_count_after_decoding; |
1149 | /*hw*/ |
1150 | |
1151 | /*lf*/ |
1152 | int default_filt_lvl; |
1153 | struct loop_filter_info_n *lfi; |
1154 | struct loopfilter *lf; |
1155 | struct segmentation *seg_4lf; |
1156 | /**/ |
1157 | struct vdec_info *gvs; |
1158 | |
1159 | u32 pre_stream_offset; |
1160 | |
1161 | unsigned int dec_status; |
1162 | u32 last_put_idx; |
1163 | int new_frame_displayed; |
1164 | void *mmu_box; |
1165 | void *bmmu_box; |
1166 | int mmu_enable; |
1167 | struct vframe_master_display_colour_s vf_dp; |
1168 | struct firmware_s *fw; |
1169 | int max_pic_w; |
1170 | int max_pic_h; |
1171 | #ifdef SUPPORT_FB_DECODING |
1172 | int dec_s1_result; |
1173 | int s1_test_cmd; |
1174 | struct work_struct s1_work; |
1175 | int used_stage_buf_num; |
1176 | int s1_pos; |
1177 | int s2_pos; |
1178 | void *stage_mmu_map_addr; |
1179 | dma_addr_t stage_mmu_map_phy_addr; |
1180 | struct stage_buf_s *s1_buf; |
1181 | struct stage_buf_s *s2_buf; |
1182 | struct stage_buf_s *stage_bufs |
1183 | [STAGE_MAX_BUFFERS]; |
1184 | unsigned char run2_busy; |
1185 | |
1186 | int s1_mv_buf_index; |
1187 | int s1_mv_buf_index_pre; |
1188 | int s1_mv_buf_index_pre_pre; |
1189 | unsigned long s1_mpred_mv_wr_start_addr; |
1190 | unsigned long s1_mpred_mv_wr_start_addr_pre; |
1191 | unsigned short s1_intra_only; |
1192 | unsigned short s1_frame_type; |
1193 | unsigned short s1_width; |
1194 | unsigned short s1_height; |
1195 | unsigned short s1_last_show_frame; |
1196 | union param_u s1_param; |
1197 | u8 back_not_run_ready; |
1198 | #endif |
1199 | int need_cache_size; |
1200 | u64 sc_start_time; |
1201 | bool postproc_done; |
1202 | int low_latency_flag; |
1203 | bool no_head; |
1204 | bool pic_list_init_done; |
1205 | bool pic_list_init_done2; |
1206 | bool is_used_v4l; |
1207 | void *v4l2_ctx; |
1208 | bool v4l_params_parsed; |
1209 | int frameinfo_enable; |
1210 | struct vframe_qos_s vframe_qos; |
1211 | u32 mem_map_mode; |
1212 | u32 dynamic_buf_num_margin; |
1213 | struct vframe_s vframe_dummy; |
1214 | unsigned int res_ch_flag; |
1215 | /*struct VP9Decoder_s vp9_decoder;*/ |
1216 | union param_u vp9_param; |
1217 | int sidebind_type; |
1218 | int sidebind_channel_id; |
1219 | bool enable_fence; |
1220 | int fence_usage; |
1221 | u32 frame_mode_pts_save[FRAME_BUFFERS]; |
1222 | u64 frame_mode_pts64_save[FRAME_BUFFERS]; |
1223 | int run_ready_min_buf_num; |
1224 | int one_package_frame_cnt; |
1225 | }; |
1226 | |
1227 | static int vp9_print(struct VP9Decoder_s *pbi, |
1228 | int flag, const char *fmt, ...) |
1229 | { |
1230 | #define HEVC_PRINT_BUF 256 |
1231 | unsigned char buf[HEVC_PRINT_BUF]; |
1232 | int len = 0; |
1233 | |
1234 | if (pbi == NULL || |
1235 | (flag == 0) || |
1236 | (debug & flag)) { |
1237 | va_list args; |
1238 | |
1239 | va_start(args, fmt); |
1240 | if (pbi) |
1241 | len = sprintf(buf, "[%d]", pbi->index); |
1242 | vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args); |
1243 | pr_debug("%s", buf); |
1244 | va_end(args); |
1245 | } |
1246 | return 0; |
1247 | } |
1248 | |
1249 | static int is_oversize(int w, int h) |
1250 | { |
1251 | int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)? |
1252 | MAX_SIZE_8K : MAX_SIZE_4K; |
1253 | |
1254 | if (w <= 0 || h <= 0) |
1255 | return true; |
1256 | |
1257 | if (h != 0 && (w > max / h)) |
1258 | return true; |
1259 | |
1260 | return false; |
1261 | } |
1262 | |
1263 | static int v4l_alloc_and_config_pic(struct VP9Decoder_s *pbi, |
1264 | struct PIC_BUFFER_CONFIG_s *pic); |
1265 | |
1266 | static void resize_context_buffers(struct VP9Decoder_s *pbi, |
1267 | struct VP9_Common_s *cm, int width, int height) |
1268 | { |
1269 | if (cm->width != width || cm->height != height) { |
1270 | /* to do ..*/ |
1271 | if (pbi != NULL) { |
1272 | pbi->vp9_first_pts_ready = 0; |
1273 | pbi->duration_from_pts_done = 0; |
1274 | } |
1275 | pr_info("%s (%d,%d)=>(%d,%d)\r\n", __func__, cm->width, |
1276 | cm->height, width, height); |
1277 | cm->width = width; |
1278 | cm->height = height; |
1279 | } |
1280 | /* |
1281 | *if (cm->cur_frame->mvs == NULL || |
1282 | * cm->mi_rows > cm->cur_frame->mi_rows || |
1283 | * cm->mi_cols > cm->cur_frame->mi_cols) { |
1284 | * resize_mv_buffer(cm); |
1285 | *} |
1286 | */ |
1287 | } |
1288 | |
1289 | static int valid_ref_frame_size(int ref_width, int ref_height, |
1290 | int this_width, int this_height) { |
1291 | return 2 * this_width >= ref_width && |
1292 | 2 * this_height >= ref_height && |
1293 | this_width <= 16 * ref_width && |
1294 | this_height <= 16 * ref_height; |
1295 | } |
1296 | |
1297 | /* |
1298 | *static int valid_ref_frame_img_fmt(enum vpx_bit_depth_t ref_bit_depth, |
1299 | * int ref_xss, int ref_yss, |
1300 | * enum vpx_bit_depth_t this_bit_depth, |
1301 | * int this_xss, int this_yss) { |
1302 | * return ref_bit_depth == this_bit_depth && ref_xss == this_xss && |
1303 | * ref_yss == this_yss; |
1304 | *} |
1305 | */ |
1306 | |
1307 | |
1308 | static int setup_frame_size( |
1309 | struct VP9Decoder_s *pbi, |
1310 | struct VP9_Common_s *cm, union param_u *params, |
1311 | unsigned int *mmu_index_adr, |
1312 | int print_header_info) { |
1313 | int width, height; |
1314 | struct BufferPool_s * const pool = cm->buffer_pool; |
1315 | struct PIC_BUFFER_CONFIG_s *ybf; |
1316 | int ret = 0; |
1317 | |
1318 | width = params->p.width; |
1319 | height = params->p.height; |
1320 | if (is_oversize(width, height)) { |
1321 | vp9_print(pbi, 0, "%s, Error: Invalid frame size\n", __func__); |
1322 | return -1; |
1323 | } |
1324 | |
1325 | /*vp9_read_frame_size(rb, &width, &height);*/ |
1326 | if (print_header_info) |
1327 | pr_info(" * 16-bits w read : %d (width : %d)\n", width, height); |
1328 | if (print_header_info) |
1329 | pr_info |
1330 | (" * 16-bits h read : %d (height : %d)\n", width, height); |
1331 | |
1332 | WRITE_VREG(HEVC_PARSER_PICTURE_SIZE, (height << 16) | width); |
1333 | #ifdef VP9_10B_HED_FB |
1334 | WRITE_VREG(HEVC_ASSIST_PIC_SIZE_FB_READ, (height << 16) | width); |
1335 | #endif |
1336 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
1337 | ret = vp9_alloc_mmu(pbi, |
1338 | cm->new_fb_idx, |
1339 | params->p.width, |
1340 | params->p.height, |
1341 | params->p.bit_depth, |
1342 | mmu_index_adr); |
1343 | if (ret != 0) { |
1344 | pr_err("can't alloc need mmu1,idx %d ret =%d\n", |
1345 | cm->new_fb_idx, |
1346 | ret); |
1347 | return ret; |
1348 | } |
1349 | cm->cur_fb_idx_mmu = cm->new_fb_idx; |
1350 | } |
1351 | |
1352 | resize_context_buffers(pbi, cm, width, height); |
1353 | setup_display_size(cm, params, print_header_info); |
1354 | #if 0 |
1355 | lock_buffer_pool(pool); |
1356 | if (vp9_realloc_frame_buffer( |
1357 | get_frame_new_buffer(cm), cm->width, cm->height, |
1358 | cm->subsampling_x, cm->subsampling_y, |
1359 | #if CONFIG_VP9_HIGHBITDEPTH |
1360 | cm->use_highbitdepth, |
1361 | #endif |
1362 | VP9_DEC_BORDER_IN_PIXELS, |
1363 | cm->byte_alignment, |
1364 | &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, |
1365 | pool->get_fb_cb, pool->cb_priv)) { |
1366 | unlock_buffer_pool(pool); |
1367 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
1368 | "Failed to allocate frame buffer"); |
1369 | } |
1370 | unlock_buffer_pool(pool); |
1371 | #else |
1372 | /* porting */ |
1373 | ybf = get_frame_new_buffer(cm); |
1374 | if (!ybf) |
1375 | return -1; |
1376 | |
1377 | ybf->y_crop_width = width; |
1378 | ybf->y_crop_height = height; |
1379 | ybf->bit_depth = params->p.bit_depth; |
1380 | #endif |
1381 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x; |
1382 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y; |
1383 | pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = |
1384 | (unsigned int)cm->bit_depth; |
1385 | pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space; |
1386 | return ret; |
1387 | } |
1388 | |
1389 | static int setup_frame_size_with_refs( |
1390 | struct VP9Decoder_s *pbi, |
1391 | struct VP9_Common_s *cm, |
1392 | union param_u *params, |
1393 | unsigned int *mmu_index_adr, |
1394 | int print_header_info) { |
1395 | |
1396 | int width, height; |
1397 | int found = 0, i; |
1398 | int has_valid_ref_frame = 0; |
1399 | struct PIC_BUFFER_CONFIG_s *ybf; |
1400 | struct BufferPool_s * const pool = cm->buffer_pool; |
1401 | int ret = 0; |
1402 | |
1403 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
1404 | if ((params->p.same_frame_size >> |
1405 | (REFS_PER_FRAME - i - 1)) & 0x1) { |
1406 | struct PIC_BUFFER_CONFIG_s *const buf = |
1407 | cm->frame_refs[i].buf; |
1408 | /*if (print_header_info) |
1409 | * pr_info |
1410 | * ("1-bit same_frame_size[%d] read : 1\n", i); |
1411 | */ |
1412 | width = buf->y_crop_width; |
1413 | height = buf->y_crop_height; |
1414 | /*if (print_header_info) |
1415 | * pr_info |
1416 | * (" - same_frame_size width : %d\n", width); |
1417 | */ |
1418 | /*if (print_header_info) |
1419 | * pr_info |
1420 | * (" - same_frame_size height : %d\n", height); |
1421 | */ |
1422 | found = 1; |
1423 | break; |
1424 | } else { |
1425 | /*if (print_header_info) |
1426 | * pr_info |
1427 | * ("1-bit same_frame_size[%d] read : 0\n", i); |
1428 | */ |
1429 | } |
1430 | } |
1431 | |
1432 | if (!found) { |
1433 | /*vp9_read_frame_size(rb, &width, &height);*/ |
1434 | width = params->p.width; |
1435 | height = params->p.height; |
1436 | /*if (print_header_info) |
1437 | * pr_info |
1438 | * (" * 16-bits w read : %d (width : %d)\n", |
1439 | * width, height); |
1440 | *if (print_header_info) |
1441 | * pr_info |
1442 | * (" * 16-bits h read : %d (height : %d)\n", |
1443 | * width, height); |
1444 | */ |
1445 | } |
1446 | |
1447 | if (is_oversize(width, height)) { |
1448 | vp9_print(pbi, 0, "%s, Error: Invalid frame size\n", __func__); |
1449 | return -1; |
1450 | } |
1451 | |
1452 | params->p.width = width; |
1453 | params->p.height = height; |
1454 | |
1455 | WRITE_VREG(HEVC_PARSER_PICTURE_SIZE, (height << 16) | width); |
1456 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
1457 | /*if(cm->prev_fb_idx >= 0) release_unused_4k(cm->prev_fb_idx); |
1458 | *cm->prev_fb_idx = cm->new_fb_idx; |
1459 | */ |
1460 | /* pr_info |
1461 | * ("[DEBUG DEBUG]Before alloc_mmu, |
1462 | * prev_fb_idx : %d, new_fb_idx : %d\r\n", |
1463 | * cm->prev_fb_idx, cm->new_fb_idx); |
1464 | */ |
1465 | ret = vp9_alloc_mmu(pbi, cm->new_fb_idx, |
1466 | params->p.width, params->p.height, |
1467 | params->p.bit_depth, mmu_index_adr); |
1468 | if (ret != 0) { |
1469 | pr_err("can't alloc need mmu,idx %d\r\n", |
1470 | cm->new_fb_idx); |
1471 | return ret; |
1472 | } |
1473 | cm->cur_fb_idx_mmu = cm->new_fb_idx; |
1474 | } |
1475 | |
1476 | /*Check to make sure at least one of frames that this frame references |
1477 | *has valid dimensions. |
1478 | */ |
1479 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
1480 | struct RefBuffer_s * const ref_frame = &cm->frame_refs[i]; |
1481 | |
1482 | has_valid_ref_frame |= |
1483 | valid_ref_frame_size(ref_frame->buf->y_crop_width, |
1484 | ref_frame->buf->y_crop_height, |
1485 | width, height); |
1486 | } |
1487 | if (!has_valid_ref_frame) { |
1488 | pr_err("Error: Referenced frame has invalid size\r\n"); |
1489 | return -1; |
1490 | } |
1491 | #if 0 |
1492 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
1493 | struct RefBuffer_s * const ref_frame = |
1494 | &cm->frame_refs[i]; |
1495 | if (!valid_ref_frame_img_fmt( |
1496 | ref_frame->buf->bit_depth, |
1497 | ref_frame->buf->subsampling_x, |
1498 | ref_frame->buf->subsampling_y, |
1499 | cm->bit_depth, |
1500 | cm->subsampling_x, |
1501 | cm->subsampling_y)) |
1502 | pr_err |
1503 | ("Referenced frame incompatible color fmt\r\n"); |
1504 | return -1; |
1505 | } |
1506 | #endif |
1507 | resize_context_buffers(pbi, cm, width, height); |
1508 | setup_display_size(cm, params, print_header_info); |
1509 | |
1510 | #if 0 |
1511 | lock_buffer_pool(pool); |
1512 | if (vp9_realloc_frame_buffer( |
1513 | get_frame_new_buffer(cm), cm->width, cm->height, |
1514 | cm->subsampling_x, cm->subsampling_y, |
1515 | #if CONFIG_VP9_HIGHBITDEPTH |
1516 | cm->use_highbitdepth, |
1517 | #endif |
1518 | VP9_DEC_BORDER_IN_PIXELS, |
1519 | cm->byte_alignment, |
1520 | &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, |
1521 | pool->get_fb_cb, |
1522 | pool->cb_priv)) { |
1523 | unlock_buffer_pool(pool); |
1524 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
1525 | "Failed to allocate frame buffer"); |
1526 | } |
1527 | unlock_buffer_pool(pool); |
1528 | #else |
1529 | /* porting */ |
1530 | ybf = get_frame_new_buffer(cm); |
1531 | if (!ybf) |
1532 | return -1; |
1533 | |
1534 | ybf->y_crop_width = width; |
1535 | ybf->y_crop_height = height; |
1536 | ybf->bit_depth = params->p.bit_depth; |
1537 | #endif |
1538 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x; |
1539 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y; |
1540 | pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = |
1541 | (unsigned int)cm->bit_depth; |
1542 | pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space; |
1543 | return ret; |
1544 | } |
1545 | |
1546 | static inline bool close_to(int a, int b, int m) |
1547 | { |
1548 | return (abs(a - b) < m) ? true : false; |
1549 | } |
1550 | |
1551 | #ifdef MULTI_INSTANCE_SUPPORT |
1552 | static int vp9_print_cont(struct VP9Decoder_s *pbi, |
1553 | int flag, const char *fmt, ...) |
1554 | { |
1555 | unsigned char buf[HEVC_PRINT_BUF]; |
1556 | int len = 0; |
1557 | |
1558 | if (pbi == NULL || |
1559 | (flag == 0) || |
1560 | (debug & flag)) { |
1561 | va_list args; |
1562 | |
1563 | va_start(args, fmt); |
1564 | vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args); |
1565 | pr_debug("%s", buf); |
1566 | va_end(args); |
1567 | } |
1568 | return 0; |
1569 | } |
1570 | |
1571 | static void trigger_schedule(struct VP9Decoder_s *pbi) |
1572 | { |
1573 | if (pbi->is_used_v4l) { |
1574 | struct aml_vcodec_ctx *ctx = |
1575 | (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
1576 | |
1577 | if (ctx->param_sets_from_ucode && |
1578 | !pbi->v4l_params_parsed) |
1579 | vdec_v4l_write_frame_sync(ctx); |
1580 | } |
1581 | |
1582 | if (pbi->vdec_cb) |
1583 | pbi->vdec_cb(hw_to_vdec(pbi), pbi->vdec_cb_arg); |
1584 | } |
1585 | |
1586 | static void reset_process_time(struct VP9Decoder_s *pbi) |
1587 | { |
1588 | if (pbi->start_process_time) { |
1589 | unsigned process_time = |
1590 | 1000 * (jiffies - pbi->start_process_time) / HZ; |
1591 | pbi->start_process_time = 0; |
1592 | if (process_time > max_process_time[pbi->index]) |
1593 | max_process_time[pbi->index] = process_time; |
1594 | } |
1595 | } |
1596 | |
1597 | static void start_process_time(struct VP9Decoder_s *pbi) |
1598 | { |
1599 | pbi->start_process_time = jiffies; |
1600 | pbi->decode_timeout_count = 0; |
1601 | pbi->last_lcu_idx = 0; |
1602 | } |
1603 | |
1604 | static void timeout_process(struct VP9Decoder_s *pbi) |
1605 | { |
1606 | pbi->timeout_num++; |
1607 | amhevc_stop(); |
1608 | vp9_print(pbi, |
1609 | 0, "%s decoder timeout\n", __func__); |
1610 | |
1611 | pbi->dec_result = DEC_RESULT_DONE; |
1612 | reset_process_time(pbi); |
1613 | vdec_schedule_work(&pbi->work); |
1614 | } |
1615 | |
1616 | static u32 get_valid_double_write_mode(struct VP9Decoder_s *pbi) |
1617 | { |
1618 | return ((double_write_mode & 0x80000000) == 0) ? |
1619 | pbi->double_write_mode : |
1620 | (double_write_mode & 0x7fffffff); |
1621 | } |
1622 | |
1623 | static int v4l_parser_get_double_write_mode(struct VP9Decoder_s *pbi) |
1624 | { |
1625 | u32 valid_dw_mode = get_valid_double_write_mode(pbi); |
1626 | u32 dw; |
1627 | int w, h; |
1628 | |
1629 | /* mask for supporting double write value bigger than 0x100 */ |
1630 | if (valid_dw_mode & 0xffffff00) { |
1631 | w = pbi->frame_width; |
1632 | h = pbi->frame_height; |
1633 | |
1634 | dw = 0x1; /*1:1*/ |
1635 | switch (valid_dw_mode) { |
1636 | case 0x100: |
1637 | if (w > 1920 && h > 1088) |
1638 | dw = 0x4; /*1:2*/ |
1639 | break; |
1640 | case 0x200: |
1641 | if (w > 1920 && h > 1088) |
1642 | dw = 0x2; /*1:4*/ |
1643 | break; |
1644 | case 0x300: |
1645 | if (w > 1280 && h > 720) |
1646 | dw = 0x4; /*1:2*/ |
1647 | break; |
1648 | default: |
1649 | break; |
1650 | } |
1651 | return dw; |
1652 | } |
1653 | |
1654 | return valid_dw_mode; |
1655 | } |
1656 | |
1657 | |
1658 | static int get_double_write_mode(struct VP9Decoder_s *pbi) |
1659 | { |
1660 | u32 valid_dw_mode = get_valid_double_write_mode(pbi); |
1661 | u32 dw; |
1662 | int w, h; |
1663 | struct VP9_Common_s *cm = &pbi->common; |
1664 | struct PIC_BUFFER_CONFIG_s *cur_pic_config; |
1665 | |
1666 | /* mask for supporting double write value bigger than 0x100 */ |
1667 | if (valid_dw_mode & 0xffffff00) { |
1668 | if (!cm->cur_frame) |
1669 | return 1;/*no valid frame,*/ |
1670 | cur_pic_config = &cm->cur_frame->buf; |
1671 | w = cur_pic_config->y_crop_width; |
1672 | h = cur_pic_config->y_crop_height; |
1673 | |
1674 | dw = 0x1; /*1:1*/ |
1675 | switch (valid_dw_mode) { |
1676 | case 0x100: |
1677 | if (w > 1920 && h > 1088) |
1678 | dw = 0x4; /*1:2*/ |
1679 | break; |
1680 | case 0x200: |
1681 | if (w > 1920 && h > 1088) |
1682 | dw = 0x2; /*1:4*/ |
1683 | break; |
1684 | case 0x300: |
1685 | if (w > 1280 && h > 720) |
1686 | dw = 0x4; /*1:2*/ |
1687 | break; |
1688 | default: |
1689 | break; |
1690 | } |
1691 | return dw; |
1692 | } |
1693 | |
1694 | return valid_dw_mode; |
1695 | } |
1696 | |
1697 | /* for double write buf alloc */ |
1698 | static int get_double_write_mode_init(struct VP9Decoder_s *pbi) |
1699 | { |
1700 | u32 valid_dw_mode = get_valid_double_write_mode(pbi); |
1701 | u32 dw; |
1702 | int w = pbi->init_pic_w; |
1703 | int h = pbi->init_pic_h; |
1704 | |
1705 | dw = 0x1; /*1:1*/ |
1706 | switch (valid_dw_mode) { |
1707 | case 0x100: |
1708 | if (w > 1920 && h > 1088) |
1709 | dw = 0x4; /*1:2*/ |
1710 | break; |
1711 | case 0x200: |
1712 | if (w > 1920 && h > 1088) |
1713 | dw = 0x2; /*1:4*/ |
1714 | break; |
1715 | case 0x300: |
1716 | if (w > 1280 && h > 720) |
1717 | dw = 0x4; /*1:2*/ |
1718 | break; |
1719 | default: |
1720 | dw = valid_dw_mode; |
1721 | break; |
1722 | } |
1723 | return dw; |
1724 | } |
1725 | #endif |
1726 | |
1727 | static int get_double_write_ratio(struct VP9Decoder_s *pbi, |
1728 | int dw_mode) |
1729 | { |
1730 | int ratio = 1; |
1731 | if ((dw_mode == 2) || |
1732 | (dw_mode == 3)) |
1733 | ratio = 4; |
1734 | else if (dw_mode == 4) |
1735 | ratio = 2; |
1736 | return ratio; |
1737 | } |
1738 | |
1739 | //#define MAX_4K_NUM 0x1200 |
1740 | |
1741 | int vp9_alloc_mmu( |
1742 | struct VP9Decoder_s *pbi, |
1743 | int cur_buf_idx, |
1744 | int pic_width, |
1745 | int pic_height, |
1746 | unsigned short bit_depth, |
1747 | unsigned int *mmu_index_adr) |
1748 | { |
1749 | int bit_depth_10 = (bit_depth == VPX_BITS_10); |
1750 | int picture_size; |
1751 | int cur_mmu_4k_number, max_frame_num; |
1752 | if (!pbi->mmu_box) { |
1753 | pr_err("error no mmu box!\n"); |
1754 | return -1; |
1755 | } |
1756 | if (get_double_write_mode(pbi) == 0x10) |
1757 | return 0; |
1758 | if (bit_depth >= VPX_BITS_12) { |
1759 | pbi->fatal_error = DECODER_FATAL_ERROR_SIZE_OVERFLOW; |
1760 | pr_err("fatal_error, un support bit depth 12!\n\n"); |
1761 | return -1; |
1762 | } |
1763 | picture_size = compute_losless_comp_body_size(pic_width, pic_height, |
1764 | bit_depth_10); |
1765 | cur_mmu_4k_number = ((picture_size + (1 << 12) - 1) >> 12); |
1766 | |
1767 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) |
1768 | max_frame_num = MAX_FRAME_8K_NUM; |
1769 | else |
1770 | max_frame_num = MAX_FRAME_4K_NUM; |
1771 | |
1772 | if (cur_mmu_4k_number > max_frame_num) { |
1773 | pr_err("over max !! cur_mmu_4k_number 0x%x width %d height %d\n", |
1774 | cur_mmu_4k_number, pic_width, pic_height); |
1775 | return -1; |
1776 | } |
1777 | |
1778 | return decoder_mmu_box_alloc_idx( |
1779 | pbi->mmu_box, |
1780 | cur_buf_idx, |
1781 | cur_mmu_4k_number, |
1782 | mmu_index_adr); |
1783 | } |
1784 | |
1785 | |
1786 | #ifndef MV_USE_FIXED_BUF |
1787 | static void dealloc_mv_bufs(struct VP9Decoder_s *pbi) |
1788 | { |
1789 | int i; |
1790 | for (i = 0; i < MV_BUFFER_NUM; i++) { |
1791 | if (pbi->m_mv_BUF[i].start_adr) { |
1792 | if (debug) |
1793 | pr_info( |
1794 | "dealloc mv buf(%d) adr %ld size 0x%x used_flag %d\n", |
1795 | i, pbi->m_mv_BUF[i].start_adr, |
1796 | pbi->m_mv_BUF[i].size, |
1797 | pbi->m_mv_BUF[i].used_flag); |
1798 | decoder_bmmu_box_free_idx( |
1799 | pbi->bmmu_box, |
1800 | MV_BUFFER_IDX(i)); |
1801 | pbi->m_mv_BUF[i].start_adr = 0; |
1802 | pbi->m_mv_BUF[i].size = 0; |
1803 | pbi->m_mv_BUF[i].used_flag = 0; |
1804 | } |
1805 | } |
1806 | } |
1807 | |
1808 | static int alloc_mv_buf(struct VP9Decoder_s *pbi, |
1809 | int i, int size) |
1810 | { |
1811 | int ret = 0; |
1812 | |
1813 | if (pbi->m_mv_BUF[i].start_adr && |
1814 | size > pbi->m_mv_BUF[i].size) { |
1815 | dealloc_mv_bufs(pbi); |
1816 | } else if (pbi->m_mv_BUF[i].start_adr) |
1817 | return 0; |
1818 | |
1819 | if (decoder_bmmu_box_alloc_buf_phy |
1820 | (pbi->bmmu_box, |
1821 | MV_BUFFER_IDX(i), size, |
1822 | DRIVER_NAME, |
1823 | &pbi->m_mv_BUF[i].start_adr) < 0) { |
1824 | pbi->m_mv_BUF[i].start_adr = 0; |
1825 | ret = -1; |
1826 | } else { |
1827 | pbi->m_mv_BUF[i].size = size; |
1828 | pbi->m_mv_BUF[i].used_flag = 0; |
1829 | ret = 0; |
1830 | if (debug) { |
1831 | pr_info( |
1832 | "MV Buffer %d: start_adr %p size %x\n", |
1833 | i, |
1834 | (void *)pbi->m_mv_BUF[i].start_adr, |
1835 | pbi->m_mv_BUF[i].size); |
1836 | } |
1837 | } |
1838 | return ret; |
1839 | } |
1840 | |
1841 | static int init_mv_buf_list(struct VP9Decoder_s *pbi) |
1842 | { |
1843 | int i; |
1844 | int ret = 0; |
1845 | int count = MV_BUFFER_NUM; |
1846 | int pic_width = pbi->init_pic_w; |
1847 | int pic_height = pbi->init_pic_h; |
1848 | int lcu_size = 64; /*fixed 64*/ |
1849 | int pic_width_64 = (pic_width + 63) & (~0x3f); |
1850 | int pic_height_32 = (pic_height + 31) & (~0x1f); |
1851 | int pic_width_lcu = (pic_width_64 % lcu_size) ? |
1852 | pic_width_64 / lcu_size + 1 |
1853 | : pic_width_64 / lcu_size; |
1854 | int pic_height_lcu = (pic_height_32 % lcu_size) ? |
1855 | pic_height_32 / lcu_size + 1 |
1856 | : pic_height_32 / lcu_size; |
1857 | int lcu_total = pic_width_lcu * pic_height_lcu; |
1858 | int size = ((lcu_total * MV_MEM_UNIT) + 0xffff) & |
1859 | (~0xffff); |
1860 | if (mv_buf_margin > 0) |
1861 | count = REF_FRAMES + mv_buf_margin; |
1862 | |
1863 | if (pbi->init_pic_w > 2048 && pbi->init_pic_h > 1088) |
1864 | count = REF_FRAMES_4K + mv_buf_margin; |
1865 | |
1866 | if (debug) { |
1867 | pr_info("%s w:%d, h:%d, count: %d\n", |
1868 | __func__, pbi->init_pic_w, pbi->init_pic_h, count); |
1869 | } |
1870 | |
1871 | for (i = 0; |
1872 | i < count && i < MV_BUFFER_NUM; i++) { |
1873 | if (alloc_mv_buf(pbi, i, size) < 0) { |
1874 | ret = -1; |
1875 | break; |
1876 | } |
1877 | } |
1878 | return ret; |
1879 | } |
1880 | |
1881 | static int get_mv_buf(struct VP9Decoder_s *pbi, |
1882 | int *mv_buf_index, |
1883 | unsigned long *mpred_mv_wr_start_addr) |
1884 | { |
1885 | int i; |
1886 | int ret = -1; |
1887 | for (i = 0; i < MV_BUFFER_NUM; i++) { |
1888 | if (pbi->m_mv_BUF[i].start_adr && |
1889 | pbi->m_mv_BUF[i].used_flag == 0) { |
1890 | pbi->m_mv_BUF[i].used_flag = 1; |
1891 | ret = i; |
1892 | break; |
1893 | } |
1894 | } |
1895 | |
1896 | if (ret >= 0) { |
1897 | *mv_buf_index = ret; |
1898 | *mpred_mv_wr_start_addr = |
1899 | (pbi->m_mv_BUF[ret].start_adr + 0xffff) & |
1900 | (~0xffff); |
1901 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
1902 | pr_info( |
1903 | "%s => %d (%ld) size 0x%x\n", |
1904 | __func__, ret, |
1905 | *mpred_mv_wr_start_addr, |
1906 | pbi->m_mv_BUF[ret].size); |
1907 | } else { |
1908 | pr_info( |
1909 | "%s: Error, mv buf is not enough\n", |
1910 | __func__); |
1911 | } |
1912 | return ret; |
1913 | } |
1914 | |
1915 | static void put_mv_buf(struct VP9Decoder_s *pbi, |
1916 | int *mv_buf_index) |
1917 | { |
1918 | int i = *mv_buf_index; |
1919 | if (i >= MV_BUFFER_NUM) { |
1920 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
1921 | pr_info( |
1922 | "%s: index %d beyond range\n", |
1923 | __func__, i); |
1924 | return; |
1925 | } |
1926 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
1927 | pr_info( |
1928 | "%s(%d): used_flag(%d)\n", |
1929 | __func__, i, |
1930 | pbi->m_mv_BUF[i].used_flag); |
1931 | |
1932 | *mv_buf_index = -1; |
1933 | if (pbi->m_mv_BUF[i].start_adr && |
1934 | pbi->m_mv_BUF[i].used_flag) |
1935 | pbi->m_mv_BUF[i].used_flag = 0; |
1936 | } |
1937 | |
1938 | static void put_un_used_mv_bufs(struct VP9Decoder_s *pbi) |
1939 | { |
1940 | struct VP9_Common_s *const cm = &pbi->common; |
1941 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
1942 | int i; |
1943 | for (i = 0; i < pbi->used_buf_num; ++i) { |
1944 | if ((frame_bufs[i].ref_count == 0) && |
1945 | (frame_bufs[i].buf.index != -1) && |
1946 | (frame_bufs[i].buf.mv_buf_index >= 0) |
1947 | ) |
1948 | put_mv_buf(pbi, &frame_bufs[i].buf.mv_buf_index); |
1949 | } |
1950 | } |
1951 | |
1952 | #ifdef SUPPORT_FB_DECODING |
1953 | static bool mv_buf_available(struct VP9Decoder_s *pbi) |
1954 | { |
1955 | int i; |
1956 | bool ret = 0; |
1957 | for (i = 0; i < MV_BUFFER_NUM; i++) { |
1958 | if (pbi->m_mv_BUF[i].start_adr && |
1959 | pbi->m_mv_BUF[i].used_flag == 0) { |
1960 | ret = 1; |
1961 | break; |
1962 | } |
1963 | } |
1964 | return ret; |
1965 | } |
1966 | #endif |
1967 | #endif |
1968 | |
1969 | #ifdef SUPPORT_FB_DECODING |
1970 | static void init_stage_buf(struct VP9Decoder_s *pbi) |
1971 | { |
1972 | uint i; |
1973 | for (i = 0; i < STAGE_MAX_BUFFERS |
1974 | && i < stage_buf_num; i++) { |
1975 | pbi->stage_bufs[i] = |
1976 | vmalloc(sizeof(struct stage_buf_s)); |
1977 | if (pbi->stage_bufs[i] == NULL) { |
1978 | vp9_print(pbi, |
1979 | 0, "%s vmalloc fail\n", __func__); |
1980 | break; |
1981 | } |
1982 | pbi->stage_bufs[i]->index = i; |
1983 | } |
1984 | pbi->used_stage_buf_num = i; |
1985 | pbi->s1_pos = 0; |
1986 | pbi->s2_pos = 0; |
1987 | pbi->s1_buf = NULL; |
1988 | pbi->s2_buf = NULL; |
1989 | pbi->s1_mv_buf_index = FRAME_BUFFERS; |
1990 | pbi->s1_mv_buf_index_pre = FRAME_BUFFERS; |
1991 | pbi->s1_mv_buf_index_pre_pre = FRAME_BUFFERS; |
1992 | |
1993 | if (pbi->used_stage_buf_num > 0) |
1994 | vp9_print(pbi, |
1995 | 0, "%s 2 stage decoding buf %d\n", |
1996 | __func__, |
1997 | pbi->used_stage_buf_num); |
1998 | } |
1999 | |
2000 | static void uninit_stage_buf(struct VP9Decoder_s *pbi) |
2001 | { |
2002 | int i; |
2003 | for (i = 0; i < pbi->used_stage_buf_num; i++) { |
2004 | if (pbi->stage_bufs[i]) |
2005 | vfree(pbi->stage_bufs[i]); |
2006 | pbi->stage_bufs[i] = NULL; |
2007 | } |
2008 | pbi->used_stage_buf_num = 0; |
2009 | pbi->s1_pos = 0; |
2010 | pbi->s2_pos = 0; |
2011 | pbi->s1_buf = NULL; |
2012 | pbi->s2_buf = NULL; |
2013 | } |
2014 | |
2015 | static int get_s1_buf( |
2016 | struct VP9Decoder_s *pbi) |
2017 | { |
2018 | struct stage_buf_s *buf = NULL; |
2019 | int ret = -1; |
2020 | int buf_page_num = MAX_STAGE_PAGE_NUM; |
2021 | int next_s1_pos = pbi->s1_pos + 1; |
2022 | |
2023 | if (next_s1_pos >= pbi->used_stage_buf_num) |
2024 | next_s1_pos = 0; |
2025 | if (next_s1_pos == pbi->s2_pos) { |
2026 | pbi->s1_buf = NULL; |
2027 | return ret; |
2028 | } |
2029 | |
2030 | buf = pbi->stage_bufs[pbi->s1_pos]; |
2031 | ret = decoder_mmu_box_alloc_idx( |
2032 | pbi->mmu_box, |
2033 | buf->index, |
2034 | buf_page_num, |
2035 | pbi->stage_mmu_map_addr); |
2036 | if (ret < 0) { |
2037 | vp9_print(pbi, 0, |
2038 | "%s decoder_mmu_box_alloc fail for index %d (s1_pos %d s2_pos %d)\n", |
2039 | __func__, buf->index, |
2040 | pbi->s1_pos, pbi->s2_pos); |
2041 | buf = NULL; |
2042 | } else { |
2043 | vp9_print(pbi, VP9_DEBUG_2_STAGE, |
2044 | "%s decoder_mmu_box_alloc %d page for index %d (s1_pos %d s2_pos %d)\n", |
2045 | __func__, buf_page_num, buf->index, |
2046 | pbi->s1_pos, pbi->s2_pos); |
2047 | } |
2048 | pbi->s1_buf = buf; |
2049 | return ret; |
2050 | } |
2051 | |
2052 | static void inc_s1_pos(struct VP9Decoder_s *pbi) |
2053 | { |
2054 | struct stage_buf_s *buf = |
2055 | pbi->stage_bufs[pbi->s1_pos]; |
2056 | |
2057 | int used_page_num = |
2058 | #ifdef FB_DECODING_TEST_SCHEDULE |
2059 | MAX_STAGE_PAGE_NUM/2; |
2060 | #else |
2061 | (READ_VREG(HEVC_ASSIST_HED_FB_W_CTL) >> 16); |
2062 | #endif |
2063 | decoder_mmu_box_free_idx_tail(pbi->mmu_box, |
2064 | FRAME_BUFFERS + buf->index, used_page_num); |
2065 | |
2066 | pbi->s1_pos++; |
2067 | if (pbi->s1_pos >= pbi->used_stage_buf_num) |
2068 | pbi->s1_pos = 0; |
2069 | |
2070 | vp9_print(pbi, VP9_DEBUG_2_STAGE, |
2071 | "%s (used_page_num %d) for index %d (s1_pos %d s2_pos %d)\n", |
2072 | __func__, used_page_num, buf->index, |
2073 | pbi->s1_pos, pbi->s2_pos); |
2074 | } |
2075 | |
2076 | #define s2_buf_available(pbi) (pbi->s1_pos != pbi->s2_pos) |
2077 | |
2078 | static int get_s2_buf( |
2079 | struct VP9Decoder_s *pbi) |
2080 | { |
2081 | int ret = -1; |
2082 | struct stage_buf_s *buf = NULL; |
2083 | if (s2_buf_available(pbi)) { |
2084 | buf = pbi->stage_bufs[pbi->s2_pos]; |
2085 | vp9_print(pbi, VP9_DEBUG_2_STAGE, |
2086 | "%s for index %d (s1_pos %d s2_pos %d)\n", |
2087 | __func__, buf->index, |
2088 | pbi->s1_pos, pbi->s2_pos); |
2089 | pbi->s2_buf = buf; |
2090 | ret = 0; |
2091 | } |
2092 | return ret; |
2093 | } |
2094 | |
2095 | static void inc_s2_pos(struct VP9Decoder_s *pbi) |
2096 | { |
2097 | struct stage_buf_s *buf = |
2098 | pbi->stage_bufs[pbi->s2_pos]; |
2099 | decoder_mmu_box_free_idx(pbi->mmu_box, |
2100 | FRAME_BUFFERS + buf->index); |
2101 | pbi->s2_pos++; |
2102 | if (pbi->s2_pos >= pbi->used_stage_buf_num) |
2103 | pbi->s2_pos = 0; |
2104 | vp9_print(pbi, VP9_DEBUG_2_STAGE, |
2105 | "%s for index %d (s1_pos %d s2_pos %d)\n", |
2106 | __func__, buf->index, |
2107 | pbi->s1_pos, pbi->s2_pos); |
2108 | } |
2109 | |
2110 | static int get_free_stage_buf_num(struct VP9Decoder_s *pbi) |
2111 | { |
2112 | int num; |
2113 | if (pbi->s1_pos >= pbi->s2_pos) |
2114 | num = pbi->used_stage_buf_num - |
2115 | (pbi->s1_pos - pbi->s2_pos) - 1; |
2116 | else |
2117 | num = (pbi->s2_pos - pbi->s1_pos) - 1; |
2118 | return num; |
2119 | } |
2120 | |
2121 | #ifndef FB_DECODING_TEST_SCHEDULE |
2122 | static DEFINE_SPINLOCK(fb_core_spin_lock); |
2123 | |
2124 | static u8 is_s2_decoding_finished(struct VP9Decoder_s *pbi) |
2125 | { |
2126 | /* to do: VLSI review |
2127 | completion of last LCU decoding in BACK |
2128 | */ |
2129 | return 1; |
2130 | } |
2131 | |
2132 | static void start_s1_decoding(struct VP9Decoder_s *pbi) |
2133 | { |
2134 | /* to do: VLSI review |
2135 | after parser, how to start LCU decoding in BACK |
2136 | */ |
2137 | } |
2138 | |
2139 | static void fb_reset_core(struct vdec_s *vdec, u32 mask) |
2140 | { |
2141 | /* to do: VLSI review |
2142 | 1. how to disconnect DMC for FRONT and BACK |
2143 | 2. reset bit 13, 24, FRONT or BACK ?? |
2144 | */ |
2145 | |
2146 | unsigned long flags; |
2147 | u32 reset_bits = 0; |
2148 | if (mask & HW_MASK_FRONT) |
2149 | WRITE_VREG(HEVC_STREAM_CONTROL, 0); |
2150 | spin_lock_irqsave(&fb_core_spin_lock, flags); |
2151 | codec_dmcbus_write(DMC_REQ_CTRL, |
2152 | codec_dmcbus_read(DMC_REQ_CTRL) & (~(1 << 4))); |
2153 | spin_unlock_irqrestore(&fb_core_spin_lock, flags); |
2154 | |
2155 | while (!(codec_dmcbus_read(DMC_CHAN_STS) |
2156 | & (1 << 4))) |
2157 | ; |
2158 | |
2159 | if ((mask & HW_MASK_FRONT) && |
2160 | input_frame_based(vdec)) |
2161 | WRITE_VREG(HEVC_STREAM_CONTROL, 0); |
2162 | |
2163 | /* |
2164 | * 2: assist |
2165 | * 3: parser |
2166 | * 4: parser_state |
2167 | * 8: dblk |
2168 | * 11:mcpu |
2169 | * 12:ccpu |
2170 | * 13:ddr |
2171 | * 14:iqit |
2172 | * 15:ipp |
2173 | * 17:qdct |
2174 | * 18:mpred |
2175 | * 19:sao |
2176 | * 24:hevc_afifo |
2177 | */ |
2178 | if (mask & HW_MASK_FRONT) { |
2179 | reset_bits = |
2180 | (1<<3)|(1<<4)|(1<<11)| |
2181 | (1<<12)|(1<<18); |
2182 | } |
2183 | if (mask & HW_MASK_BACK) { |
2184 | reset_bits = |
2185 | (1<<8)|(1<<13)|(1<<14)|(1<<15)| |
2186 | (1<<17)|(1<<19)|(1<<24); |
2187 | } |
2188 | WRITE_VREG(DOS_SW_RESET3, reset_bits); |
2189 | #if 0 |
2190 | (1<<3)|(1<<4)|(1<<8)|(1<<11)| |
2191 | (1<<12)|(1<<13)|(1<<14)|(1<<15)| |
2192 | (1<<17)|(1<<18)|(1<<19)|(1<<24); |
2193 | #endif |
2194 | WRITE_VREG(DOS_SW_RESET3, 0); |
2195 | |
2196 | |
2197 | spin_lock_irqsave(&fb_core_spin_lock, flags); |
2198 | codec_dmcbus_write(DMC_REQ_CTRL, |
2199 | codec_dmcbus_read(DMC_REQ_CTRL) | (1 << 4)); |
2200 | spin_unlock_irqrestore(&fb_core_spin_lock, flags); |
2201 | |
2202 | } |
2203 | #endif |
2204 | |
2205 | #endif |
2206 | |
2207 | static void init_pic_list_hw(struct VP9Decoder_s *pbi); |
2208 | |
2209 | static int get_free_fb(struct VP9Decoder_s *pbi) |
2210 | { |
2211 | struct VP9_Common_s *const cm = &pbi->common; |
2212 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
2213 | int i; |
2214 | unsigned long flags; |
2215 | |
2216 | lock_buffer_pool(cm->buffer_pool, flags); |
2217 | if (debug & VP9_DEBUG_BUFMGR_MORE) { |
2218 | for (i = 0; i < pbi->used_buf_num; ++i) { |
2219 | pr_info("%s:%d, ref_count %d vf_ref %d index %d\r\n", |
2220 | __func__, i, frame_bufs[i].ref_count, |
2221 | frame_bufs[i].buf.vf_ref, |
2222 | frame_bufs[i].buf.index); |
2223 | } |
2224 | } |
2225 | for (i = 0; i < pbi->used_buf_num; ++i) { |
2226 | if ((frame_bufs[i].ref_count == 0) && |
2227 | (frame_bufs[i].buf.vf_ref == 0) && |
2228 | (frame_bufs[i].buf.index != -1) |
2229 | ) |
2230 | break; |
2231 | } |
2232 | if (i != pbi->used_buf_num) { |
2233 | frame_bufs[i].ref_count = 1; |
2234 | /*pr_info("[MMU DEBUG 1] set ref_count[%d] : %d\r\n", |
2235 | i, frame_bufs[i].ref_count);*/ |
2236 | } else { |
2237 | /* Reset i to be INVALID_IDX to indicate |
2238 | no free buffer found*/ |
2239 | i = INVALID_IDX; |
2240 | } |
2241 | |
2242 | unlock_buffer_pool(cm->buffer_pool, flags); |
2243 | return i; |
2244 | } |
2245 | |
2246 | static int v4l_get_free_fb(struct VP9Decoder_s *pbi) |
2247 | { |
2248 | struct VP9_Common_s *const cm = &pbi->common; |
2249 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
2250 | struct aml_vcodec_ctx * v4l = pbi->v4l2_ctx; |
2251 | struct v4l_buff_pool *pool = &v4l->cap_pool; |
2252 | struct PIC_BUFFER_CONFIG_s *pic = NULL; |
2253 | int i, idx = INVALID_IDX; |
2254 | ulong flags; |
2255 | |
2256 | lock_buffer_pool(cm->buffer_pool, flags); |
2257 | |
2258 | for (i = 0; i < pool->in; ++i) { |
2259 | u32 state = (pool->seq[i] >> 16); |
2260 | u32 index = (pool->seq[i] & 0xffff); |
2261 | |
2262 | switch (state) { |
2263 | case V4L_CAP_BUFF_IN_DEC: |
2264 | pic = &frame_bufs[i].buf; |
2265 | if ((frame_bufs[i].ref_count == 0) && |
2266 | (pic->vf_ref == 0) && |
2267 | (pic->index != -1) && |
2268 | pic->cma_alloc_addr) { |
2269 | idx = i; |
2270 | } |
2271 | break; |
2272 | case V4L_CAP_BUFF_IN_M2M: |
2273 | pic = &frame_bufs[index].buf; |
2274 | pic->y_crop_width = pbi->frame_width; |
2275 | pic->y_crop_height = pbi->frame_height; |
2276 | if (!v4l_alloc_and_config_pic(pbi, pic)) { |
2277 | set_canvas(pbi, pic); |
2278 | init_pic_list_hw(pbi); |
2279 | idx = index; |
2280 | } |
2281 | break; |
2282 | default: |
2283 | pr_err("v4l buffer state err %d.\n", state); |
2284 | break; |
2285 | } |
2286 | |
2287 | if (idx != INVALID_IDX) { |
2288 | frame_bufs[idx].ref_count = 1; |
2289 | break; |
2290 | } |
2291 | } |
2292 | |
2293 | unlock_buffer_pool(cm->buffer_pool, flags); |
2294 | |
2295 | return idx; |
2296 | } |
2297 | |
2298 | static int get_free_buf_count(struct VP9Decoder_s *pbi) |
2299 | { |
2300 | struct VP9_Common_s *const cm = &pbi->common; |
2301 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
2302 | int i; |
2303 | int free_buf_count = 0; |
2304 | for (i = 0; i < pbi->used_buf_num; ++i) |
2305 | if ((frame_bufs[i].ref_count == 0) && |
2306 | (frame_bufs[i].buf.vf_ref == 0) && |
2307 | (frame_bufs[i].buf.index != -1) |
2308 | ) |
2309 | free_buf_count++; |
2310 | return free_buf_count; |
2311 | } |
2312 | |
2313 | static void decrease_ref_count(int idx, struct RefCntBuffer_s *const frame_bufs, |
2314 | struct BufferPool_s *const pool) |
2315 | { |
2316 | if (idx >= 0) { |
2317 | --frame_bufs[idx].ref_count; |
2318 | /*pr_info("[MMU DEBUG 7] dec ref_count[%d] : %d\r\n", idx, |
2319 | * frame_bufs[idx].ref_count); |
2320 | */ |
2321 | /*A worker may only get a free framebuffer index when |
2322 | *calling get_free_fb. But the private buffer is not set up |
2323 | *until finish decoding header. So any error happens during |
2324 | *decoding header, the frame_bufs will not have valid priv |
2325 | *buffer. |
2326 | */ |
2327 | |
2328 | if (frame_bufs[idx].ref_count == 0 && |
2329 | frame_bufs[idx].raw_frame_buffer.priv) |
2330 | vp9_release_frame_buffer |
2331 | (&frame_bufs[idx].raw_frame_buffer); |
2332 | } |
2333 | } |
2334 | |
2335 | static void generate_next_ref_frames(struct VP9Decoder_s *pbi) |
2336 | { |
2337 | struct VP9_Common_s *const cm = &pbi->common; |
2338 | struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs; |
2339 | struct BufferPool_s *const pool = cm->buffer_pool; |
2340 | int mask, ref_index = 0; |
2341 | unsigned long flags; |
2342 | |
2343 | /* Generate next_ref_frame_map.*/ |
2344 | lock_buffer_pool(pool, flags); |
2345 | for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { |
2346 | if (mask & 1) { |
2347 | cm->next_ref_frame_map[ref_index] = cm->new_fb_idx; |
2348 | ++frame_bufs[cm->new_fb_idx].ref_count; |
2349 | /*pr_info("[MMU DEBUG 4] inc ref_count[%d] : %d\r\n", |
2350 | *cm->new_fb_idx, frame_bufs[cm->new_fb_idx].ref_count); |
2351 | */ |
2352 | } else |
2353 | cm->next_ref_frame_map[ref_index] = |
2354 | cm->ref_frame_map[ref_index]; |
2355 | /* Current thread holds the reference frame.*/ |
2356 | if (cm->ref_frame_map[ref_index] >= 0) { |
2357 | ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count; |
2358 | /*pr_info |
2359 | *("[MMU DEBUG 5] inc ref_count[%d] : %d\r\n", |
2360 | *cm->ref_frame_map[ref_index], |
2361 | *frame_bufs[cm->ref_frame_map[ref_index]].ref_count); |
2362 | */ |
2363 | } |
2364 | ++ref_index; |
2365 | } |
2366 | |
2367 | for (; ref_index < REF_FRAMES; ++ref_index) { |
2368 | cm->next_ref_frame_map[ref_index] = |
2369 | cm->ref_frame_map[ref_index]; |
2370 | /* Current thread holds the reference frame.*/ |
2371 | if (cm->ref_frame_map[ref_index] >= 0) { |
2372 | ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count; |
2373 | /*pr_info("[MMU DEBUG 6] inc ref_count[%d] : %d\r\n", |
2374 | *cm->ref_frame_map[ref_index], |
2375 | *frame_bufs[cm->ref_frame_map[ref_index]].ref_count); |
2376 | */ |
2377 | } |
2378 | } |
2379 | unlock_buffer_pool(pool, flags); |
2380 | return; |
2381 | } |
2382 | |
2383 | static void refresh_ref_frames(struct VP9Decoder_s *pbi) |
2384 | |
2385 | { |
2386 | struct VP9_Common_s *const cm = &pbi->common; |
2387 | struct BufferPool_s *pool = cm->buffer_pool; |
2388 | struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs; |
2389 | int mask, ref_index = 0; |
2390 | unsigned long flags; |
2391 | |
2392 | lock_buffer_pool(pool, flags); |
2393 | for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { |
2394 | const int old_idx = cm->ref_frame_map[ref_index]; |
2395 | /*Current thread releases the holding of reference frame.*/ |
2396 | decrease_ref_count(old_idx, frame_bufs, pool); |
2397 | |
2398 | /*Release the reference frame in reference map.*/ |
2399 | if ((mask & 1) && old_idx >= 0) |
2400 | decrease_ref_count(old_idx, frame_bufs, pool); |
2401 | cm->ref_frame_map[ref_index] = |
2402 | cm->next_ref_frame_map[ref_index]; |
2403 | ++ref_index; |
2404 | } |
2405 | |
2406 | /*Current thread releases the holding of reference frame.*/ |
2407 | for (; ref_index < REF_FRAMES && !cm->show_existing_frame; |
2408 | ++ref_index) { |
2409 | const int old_idx = cm->ref_frame_map[ref_index]; |
2410 | |
2411 | decrease_ref_count(old_idx, frame_bufs, pool); |
2412 | cm->ref_frame_map[ref_index] = |
2413 | cm->next_ref_frame_map[ref_index]; |
2414 | } |
2415 | unlock_buffer_pool(pool, flags); |
2416 | return; |
2417 | } |
2418 | |
2419 | int vp9_bufmgr_process(struct VP9Decoder_s *pbi, union param_u *params) |
2420 | { |
2421 | struct VP9_Common_s *const cm = &pbi->common; |
2422 | struct BufferPool_s *pool = cm->buffer_pool; |
2423 | struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs; |
2424 | struct PIC_BUFFER_CONFIG_s *pic = NULL; |
2425 | int i; |
2426 | int ret; |
2427 | |
2428 | pbi->ready_for_new_data = 0; |
2429 | |
2430 | if (pbi->has_keyframe == 0 && |
2431 | params->p.frame_type != KEY_FRAME){ |
2432 | on_no_keyframe_skiped++; |
2433 | return -2; |
2434 | } |
2435 | pbi->has_keyframe = 1; |
2436 | on_no_keyframe_skiped = 0; |
2437 | #if 0 |
2438 | if (pbi->mmu_enable) { |
2439 | if (!pbi->m_ins_flag) |
2440 | pbi->used_4k_num = (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16); |
2441 | if (cm->prev_fb_idx >= 0) { |
2442 | decoder_mmu_box_free_idx_tail(pbi->mmu_box, |
2443 | cm->prev_fb_idx, pbi->used_4k_num); |
2444 | } |
2445 | } |
2446 | #endif |
2447 | if (cm->new_fb_idx >= 0 |
2448 | && frame_bufs[cm->new_fb_idx].ref_count == 0){ |
2449 | vp9_release_frame_buffer |
2450 | (&frame_bufs[cm->new_fb_idx].raw_frame_buffer); |
2451 | } |
2452 | /*pr_info("Before get_free_fb, prev_fb_idx : %d, new_fb_idx : %d\r\n", |
2453 | cm->prev_fb_idx, cm->new_fb_idx);*/ |
2454 | #ifndef MV_USE_FIXED_BUF |
2455 | put_un_used_mv_bufs(pbi); |
2456 | if (debug & VP9_DEBUG_BUFMGR_DETAIL) |
2457 | dump_pic_list(pbi); |
2458 | #endif |
2459 | cm->new_fb_idx = pbi->is_used_v4l ? |
2460 | v4l_get_free_fb(pbi) : |
2461 | get_free_fb(pbi); |
2462 | if (cm->new_fb_idx == INVALID_IDX) { |
2463 | pr_info("get_free_fb error\r\n"); |
2464 | return -1; |
2465 | } |
2466 | |
2467 | #ifndef MV_USE_FIXED_BUF |
2468 | #ifdef SUPPORT_FB_DECODING |
2469 | if (pbi->used_stage_buf_num == 0) { |
2470 | #endif |
2471 | if (get_mv_buf(pbi, |
2472 | &pool->frame_bufs[cm->new_fb_idx]. |
2473 | buf.mv_buf_index, |
2474 | &pool->frame_bufs[cm->new_fb_idx]. |
2475 | buf.mpred_mv_wr_start_addr |
2476 | ) < 0) { |
2477 | pr_info("get_mv_buf fail\r\n"); |
2478 | return -1; |
2479 | } |
2480 | if (debug & VP9_DEBUG_BUFMGR_DETAIL) |
2481 | dump_pic_list(pbi); |
2482 | #ifdef SUPPORT_FB_DECODING |
2483 | } |
2484 | #endif |
2485 | #endif |
2486 | cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx]; |
2487 | /*if (debug & VP9_DEBUG_BUFMGR) |
2488 | pr_info("[VP9 DEBUG]%s(get_free_fb): %d\r\n", __func__, |
2489 | cm->new_fb_idx);*/ |
2490 | |
2491 | pbi->cur_buf = &frame_bufs[cm->new_fb_idx]; |
2492 | if (pbi->mmu_enable) { |
2493 | /* moved to after picture size ready |
2494 | *alloc_mmu(cm, params->p.width, params->p.height, |
2495 | *params->p.bit_depth, pbi->frame_mmu_map_addr); |
2496 | */ |
2497 | cm->prev_fb_idx = cm->new_fb_idx; |
2498 | } |
2499 | /*read_uncompressed_header()*/ |
2500 | cm->last_frame_type = cm->frame_type; |
2501 | cm->last_intra_only = cm->intra_only; |
2502 | cm->profile = params->p.profile; |
2503 | if (cm->profile >= MAX_PROFILES) { |
2504 | pr_err("Error: Unsupported profile %d\r\n", cm->profile); |
2505 | return -1; |
2506 | } |
2507 | cm->show_existing_frame = params->p.show_existing_frame; |
2508 | if (cm->show_existing_frame) { |
2509 | /* Show an existing frame directly.*/ |
2510 | int frame_to_show_idx = params->p.frame_to_show_idx; |
2511 | int frame_to_show; |
2512 | unsigned long flags; |
2513 | if (frame_to_show_idx >= REF_FRAMES) { |
2514 | pr_info("frame_to_show_idx %d exceed max index\r\n", |
2515 | frame_to_show_idx); |
2516 | return -1; |
2517 | } |
2518 | |
2519 | frame_to_show = cm->ref_frame_map[frame_to_show_idx]; |
2520 | /*pr_info("frame_to_show %d\r\n", frame_to_show);*/ |
2521 | lock_buffer_pool(pool, flags); |
2522 | if (frame_to_show < 0 || |
2523 | frame_bufs[frame_to_show].ref_count < 1) { |
2524 | unlock_buffer_pool(pool, flags); |
2525 | pr_err |
2526 | ("Error:Buffer %d does not contain a decoded frame", |
2527 | frame_to_show); |
2528 | return -1; |
2529 | } |
2530 | |
2531 | ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show); |
2532 | unlock_buffer_pool(pool, flags); |
2533 | pbi->refresh_frame_flags = 0; |
2534 | /*cm->lf.filter_level = 0;*/ |
2535 | cm->show_frame = 1; |
2536 | |
2537 | /* |
2538 | *if (pbi->frame_parallel_decode) { |
2539 | * for (i = 0; i < REF_FRAMES; ++i) |
2540 | * cm->next_ref_frame_map[i] = |
2541 | * cm->ref_frame_map[i]; |
2542 | *} |
2543 | */ |
2544 | /* do not decode, search next start code */ |
2545 | return 1; |
2546 | } |
2547 | cm->frame_type = params->p.frame_type; |
2548 | cm->show_frame = params->p.show_frame; |
2549 | cm->bit_depth = params->p.bit_depth; |
2550 | cm->error_resilient_mode = params->p.error_resilient_mode; |
2551 | |
2552 | |
2553 | if (cm->frame_type == KEY_FRAME) { |
2554 | pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1; |
2555 | |
2556 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
2557 | cm->frame_refs[i].idx = INVALID_IDX; |
2558 | cm->frame_refs[i].buf = NULL; |
2559 | } |
2560 | |
2561 | ret = setup_frame_size(pbi, |
2562 | cm, params, pbi->frame_mmu_map_addr, |
2563 | print_header_info); |
2564 | if (ret) |
2565 | return -1; |
2566 | if (pbi->need_resync) { |
2567 | memset(&cm->ref_frame_map, -1, |
2568 | sizeof(cm->ref_frame_map)); |
2569 | pbi->need_resync = 0; |
2570 | } |
2571 | } else { |
2572 | cm->intra_only = cm->show_frame ? 0 : params->p.intra_only; |
2573 | /*if (print_header_info) { |
2574 | * if (cm->show_frame) |
2575 | * pr_info |
2576 | * ("intra_only set to 0 because of show_frame\n"); |
2577 | * else |
2578 | * pr_info |
2579 | * ("1-bit intra_only read: %d\n", cm->intra_only); |
2580 | *} |
2581 | */ |
2582 | |
2583 | |
2584 | cm->reset_frame_context = cm->error_resilient_mode ? |
2585 | 0 : params->p.reset_frame_context; |
2586 | if (print_header_info) { |
2587 | if (cm->error_resilient_mode) |
2588 | pr_info |
2589 | ("reset to 0 error_resilient_mode\n"); |
2590 | else |
2591 | pr_info |
2592 | (" * 2-bits reset_frame_context read : %d\n", |
2593 | cm->reset_frame_context); |
2594 | } |
2595 | |
2596 | if (cm->intra_only) { |
2597 | if (cm->profile > PROFILE_0) { |
2598 | /*read_bitdepth_colorspace_sampling(cm, |
2599 | * rb, print_header_info); |
2600 | */ |
2601 | } else { |
2602 | /*NOTE: The intra-only frame header |
2603 | *does not include the specification |
2604 | *of either the color format or |
2605 | *color sub-sampling |
2606 | *in profile 0. VP9 specifies that the default |
2607 | *color format should be YUV 4:2:0 in this |
2608 | *case (normative). |
2609 | */ |
2610 | cm->color_space = VPX_CS_BT_601; |
2611 | cm->subsampling_y = cm->subsampling_x = 1; |
2612 | cm->bit_depth = VPX_BITS_8; |
2613 | cm->use_highbitdepth = 0; |
2614 | } |
2615 | |
2616 | pbi->refresh_frame_flags = |
2617 | params->p.refresh_frame_flags; |
2618 | /*if (print_header_info) |
2619 | * pr_info("*%d-bits refresh_frame read:0x%x\n", |
2620 | * REF_FRAMES, pbi->refresh_frame_flags); |
2621 | */ |
2622 | ret = setup_frame_size(pbi, |
2623 | cm, |
2624 | params, |
2625 | pbi->frame_mmu_map_addr, |
2626 | print_header_info); |
2627 | if (ret) |
2628 | return -1; |
2629 | if (pbi->need_resync) { |
2630 | memset(&cm->ref_frame_map, -1, |
2631 | sizeof(cm->ref_frame_map)); |
2632 | pbi->need_resync = 0; |
2633 | } |
2634 | } else if (pbi->need_resync != 1) { /* Skip if need resync */ |
2635 | pbi->refresh_frame_flags = |
2636 | params->p.refresh_frame_flags; |
2637 | if (print_header_info) |
2638 | pr_info |
2639 | ("*%d-bits refresh_frame read:0x%x\n", |
2640 | REF_FRAMES, pbi->refresh_frame_flags); |
2641 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
2642 | const int ref = |
2643 | (params->p.ref_info >> |
2644 | (((REFS_PER_FRAME-i-1)*4)+1)) |
2645 | & 0x7; |
2646 | const int idx = |
2647 | cm->ref_frame_map[ref]; |
2648 | struct RefBuffer_s * const ref_frame = |
2649 | &cm->frame_refs[i]; |
2650 | if (print_header_info) |
2651 | pr_info("*%d-bits ref[%d]read:%d\n", |
2652 | REF_FRAMES_LOG2, i, ref); |
2653 | ref_frame->idx = idx; |
2654 | ref_frame->buf = &frame_bufs[idx].buf; |
2655 | cm->ref_frame_sign_bias[LAST_FRAME + i] |
2656 | = (params->p.ref_info >> |
2657 | ((REFS_PER_FRAME-i-1)*4)) & 0x1; |
2658 | if (print_header_info) |
2659 | pr_info("1bit ref_frame_sign_bias"); |
2660 | /*pr_info |
2661 | *("%dread: %d\n", |
2662 | *LAST_FRAME+i, |
2663 | *cm->ref_frame_sign_bias |
2664 | *[LAST_FRAME + i]); |
2665 | */ |
2666 | /*pr_info |
2667 | *("[VP9 DEBUG]%s(get ref):%d\r\n", |
2668 | *__func__, ref_frame->idx); |
2669 | */ |
2670 | |
2671 | } |
2672 | |
2673 | ret = setup_frame_size_with_refs( |
2674 | pbi, |
2675 | cm, |
2676 | params, |
2677 | pbi->frame_mmu_map_addr, |
2678 | print_header_info); |
2679 | if (ret) |
2680 | return -1; |
2681 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
2682 | /*struct RefBuffer_s *const ref_buf = |
2683 | *&cm->frame_refs[i]; |
2684 | */ |
2685 | /* to do: |
2686 | *vp9_setup_scale_factors_for_frame |
2687 | */ |
2688 | } |
2689 | } |
2690 | } |
2691 | |
2692 | pic = get_frame_new_buffer(cm); |
2693 | if (!pic) |
2694 | return -1; |
2695 | |
2696 | pic->bit_depth = cm->bit_depth; |
2697 | pic->color_space = cm->color_space; |
2698 | pic->slice_type = cm->frame_type; |
2699 | |
2700 | if (pbi->need_resync) { |
2701 | pr_err |
2702 | ("Error: Keyframe/intra-only frame required to reset\r\n"); |
2703 | return -1; |
2704 | } |
2705 | generate_next_ref_frames(pbi); |
2706 | pbi->hold_ref_buf = 1; |
2707 | |
2708 | #if 0 |
2709 | if (frame_is_intra_only(cm) || cm->error_resilient_mode) |
2710 | vp9_setup_past_independence(cm); |
2711 | setup_loopfilter(&cm->lf, rb, print_header_info); |
2712 | setup_quantization(cm, &pbi->mb, rb, print_header_info); |
2713 | setup_segmentation(&cm->seg, rb, print_header_info); |
2714 | setup_segmentation_dequant(cm, print_header_info); |
2715 | |
2716 | setup_tile_info(cm, rb, print_header_info); |
2717 | sz = vp9_rb_read_literal(rb, 16); |
2718 | if (print_header_info) |
2719 | pr_info(" * 16-bits size read : %d (0x%x)\n", sz, sz); |
2720 | |
2721 | if (sz == 0) |
2722 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
2723 | "Invalid header size"); |
2724 | #endif |
2725 | /*end read_uncompressed_header()*/ |
2726 | cm->use_prev_frame_mvs = !cm->error_resilient_mode && |
2727 | cm->width == cm->last_width && |
2728 | cm->height == cm->last_height && |
2729 | !cm->last_intra_only && |
2730 | cm->last_show_frame && |
2731 | (cm->last_frame_type != KEY_FRAME); |
2732 | |
2733 | /*pr_info |
2734 | *("set use_prev_frame_mvs to %d (last_width %d last_height %d", |
2735 | *cm->use_prev_frame_mvs, cm->last_width, cm->last_height); |
2736 | *pr_info |
2737 | *(" last_intra_only %d last_show_frame %d last_frame_type %d)\n", |
2738 | *cm->last_intra_only, cm->last_show_frame, cm->last_frame_type); |
2739 | */ |
2740 | |
2741 | if (pbi->enable_fence && cm->show_frame) { |
2742 | struct PIC_BUFFER_CONFIG_s *pic = &cm->cur_frame->buf; |
2743 | struct vdec_s *vdec = hw_to_vdec(pbi); |
2744 | |
2745 | /* create fence for each buffers. */ |
2746 | ret = vdec_timeline_create_fence(&vdec->sync); |
2747 | if (ret < 0) |
2748 | return ret; |
2749 | |
2750 | pic->fence = vdec->sync.fence; |
2751 | pic->bit_depth = cm->bit_depth; |
2752 | pic->slice_type = cm->frame_type; |
2753 | pic->stream_offset = pbi->pre_stream_offset; |
2754 | |
2755 | if (pbi->chunk) { |
2756 | pic->pts = pbi->chunk->pts; |
2757 | pic->pts64 = pbi->chunk->pts64; |
2758 | pic->timestamp = pbi->chunk->timestamp; |
2759 | } |
2760 | |
2761 | /* post video vframe. */ |
2762 | prepare_display_buf(pbi, pic); |
2763 | } |
2764 | |
2765 | return 0; |
2766 | } |
2767 | |
2768 | |
2769 | void swap_frame_buffers(struct VP9Decoder_s *pbi) |
2770 | { |
2771 | int ref_index = 0; |
2772 | struct VP9_Common_s *const cm = &pbi->common; |
2773 | struct BufferPool_s *const pool = cm->buffer_pool; |
2774 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
2775 | unsigned long flags; |
2776 | refresh_ref_frames(pbi); |
2777 | pbi->hold_ref_buf = 0; |
2778 | cm->frame_to_show = get_frame_new_buffer(cm); |
2779 | |
2780 | if (cm->frame_to_show) { |
2781 | /*if (!pbi->frame_parallel_decode || !cm->show_frame) {*/ |
2782 | lock_buffer_pool(pool, flags); |
2783 | --frame_bufs[cm->new_fb_idx].ref_count; |
2784 | /*pr_info("[MMU DEBUG 8] dec ref_count[%d] : %d\r\n", cm->new_fb_idx, |
2785 | * frame_bufs[cm->new_fb_idx].ref_count); |
2786 | */ |
2787 | unlock_buffer_pool(pool, flags); |
2788 | /*}*/ |
2789 | } |
2790 | |
2791 | /*Invalidate these references until the next frame starts.*/ |
2792 | for (ref_index = 0; ref_index < 3; ref_index++) |
2793 | cm->frame_refs[ref_index].idx = -1; |
2794 | } |
2795 | |
2796 | #if 0 |
2797 | static void check_resync(vpx_codec_alg_priv_t *const ctx, |
2798 | const struct VP9Decoder_s *const pbi) |
2799 | { |
2800 | /* Clear resync flag if worker got a key frame or intra only frame.*/ |
2801 | if (ctx->need_resync == 1 && pbi->need_resync == 0 && |
2802 | (pbi->common.intra_only || pbi->common.frame_type == KEY_FRAME)) |
2803 | ctx->need_resync = 0; |
2804 | } |
2805 | #endif |
2806 | |
2807 | int vp9_get_raw_frame(struct VP9Decoder_s *pbi, struct PIC_BUFFER_CONFIG_s *sd) |
2808 | { |
2809 | struct VP9_Common_s *const cm = &pbi->common; |
2810 | int ret = -1; |
2811 | |
2812 | if (pbi->ready_for_new_data == 1) |
2813 | return ret; |
2814 | |
2815 | pbi->ready_for_new_data = 1; |
2816 | |
2817 | /* no raw frame to show!!! */ |
2818 | if (!cm->show_frame) |
2819 | return ret; |
2820 | |
2821 | /* may not be get buff in v4l2 */ |
2822 | if (!cm->frame_to_show) |
2823 | return ret; |
2824 | |
2825 | pbi->ready_for_new_data = 1; |
2826 | |
2827 | *sd = *cm->frame_to_show; |
2828 | ret = 0; |
2829 | |
2830 | return ret; |
2831 | } |
2832 | |
2833 | int vp9_bufmgr_init(struct VP9Decoder_s *pbi, struct BuffInfo_s *buf_spec_i, |
2834 | struct buff_s *mc_buf_i) { |
2835 | struct VP9_Common_s *cm = &pbi->common; |
2836 | |
2837 | /*memset(pbi, 0, sizeof(struct VP9Decoder_s));*/ |
2838 | pbi->frame_count = 0; |
2839 | pbi->pic_count = 0; |
2840 | pbi->pre_stream_offset = 0; |
2841 | cm->buffer_pool = &pbi->vp9_buffer_pool; |
2842 | spin_lock_init(&cm->buffer_pool->lock); |
2843 | cm->prev_fb_idx = INVALID_IDX; |
2844 | cm->new_fb_idx = INVALID_IDX; |
2845 | pbi->used_4k_num = -1; |
2846 | cm->cur_fb_idx_mmu = INVALID_IDX; |
2847 | pr_debug |
2848 | ("After vp9_bufmgr_init, prev_fb_idx : %d, new_fb_idx : %d\r\n", |
2849 | cm->prev_fb_idx, cm->new_fb_idx); |
2850 | pbi->need_resync = 1; |
2851 | /* Initialize the references to not point to any frame buffers.*/ |
2852 | memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map)); |
2853 | memset(&cm->next_ref_frame_map, -1, sizeof(cm->next_ref_frame_map)); |
2854 | cm->current_video_frame = 0; |
2855 | pbi->ready_for_new_data = 1; |
2856 | |
2857 | /* private init */ |
2858 | pbi->work_space_buf = buf_spec_i; |
2859 | if (!pbi->mmu_enable) |
2860 | pbi->mc_buf = mc_buf_i; |
2861 | |
2862 | pbi->rpm_addr = NULL; |
2863 | pbi->lmem_addr = NULL; |
2864 | |
2865 | pbi->use_cma_flag = 0; |
2866 | pbi->decode_idx = 0; |
2867 | pbi->slice_idx = 0; |
2868 | /*int m_uiMaxCUWidth = 1<<7;*/ |
2869 | /*int m_uiMaxCUHeight = 1<<7;*/ |
2870 | pbi->has_keyframe = 0; |
2871 | pbi->skip_flag = 0; |
2872 | pbi->wait_buf = 0; |
2873 | pbi->error_flag = 0; |
2874 | |
2875 | pbi->pts_mode = PTS_NORMAL; |
2876 | pbi->last_pts = 0; |
2877 | pbi->last_lookup_pts = 0; |
2878 | pbi->last_pts_us64 = 0; |
2879 | pbi->last_lookup_pts_us64 = 0; |
2880 | pbi->shift_byte_count = 0; |
2881 | pbi->shift_byte_count_lo = 0; |
2882 | pbi->shift_byte_count_hi = 0; |
2883 | pbi->pts_mode_switching_count = 0; |
2884 | pbi->pts_mode_recovery_count = 0; |
2885 | |
2886 | pbi->buf_num = 0; |
2887 | pbi->pic_num = 0; |
2888 | |
2889 | return 0; |
2890 | } |
2891 | |
2892 | int vp9_bufmgr_postproc(struct VP9Decoder_s *pbi) |
2893 | { |
2894 | struct vdec_s *vdec = hw_to_vdec(pbi); |
2895 | struct VP9_Common_s *cm = &pbi->common; |
2896 | struct PIC_BUFFER_CONFIG_s sd; |
2897 | |
2898 | if (pbi->postproc_done) |
2899 | return 0; |
2900 | pbi->postproc_done = 1; |
2901 | swap_frame_buffers(pbi); |
2902 | if (!cm->show_existing_frame) { |
2903 | cm->last_show_frame = cm->show_frame; |
2904 | cm->prev_frame = cm->cur_frame; |
2905 | #if 0 |
2906 | if (cm->seg.enabled && !pbi->frame_parallel_decode) |
2907 | vp9_swap_current_and_last_seg_map(cm); |
2908 | #endif |
2909 | } |
2910 | cm->last_width = cm->width; |
2911 | cm->last_height = cm->height; |
2912 | if (cm->show_frame) |
2913 | cm->current_video_frame++; |
2914 | |
2915 | if (vp9_get_raw_frame(pbi, &sd) == 0) { |
2916 | /*pr_info("Display frame index %d\r\n", sd.index);*/ |
2917 | sd.stream_offset = pbi->pre_stream_offset; |
2918 | |
2919 | if (pbi->enable_fence) { |
2920 | /* notify signal to wake up wq of fence. */ |
2921 | vdec_timeline_increase(&vdec->sync, 1); |
2922 | } else { |
2923 | prepare_display_buf(pbi, &sd); |
2924 | } |
2925 | |
2926 | pbi->pre_stream_offset = READ_VREG(HEVC_SHIFT_BYTE_COUNT); |
2927 | } |
2928 | |
2929 | /* else |
2930 | * pr_info |
2931 | * ("Not display this frame,ready_for_new_data%d show_frame%d\r\n", |
2932 | * pbi->ready_for_new_data, cm->show_frame); |
2933 | */ |
2934 | return 0; |
2935 | } |
2936 | |
2937 | /************************************************** |
2938 | * |
2939 | *VP9 buffer management end |
2940 | * |
2941 | *************************************************** |
2942 | */ |
2943 | |
2944 | |
2945 | #define HEVC_CM_BODY_START_ADDR 0x3626 |
2946 | #define HEVC_CM_BODY_LENGTH 0x3627 |
2947 | #define HEVC_CM_HEADER_LENGTH 0x3629 |
2948 | #define HEVC_CM_HEADER_OFFSET 0x362b |
2949 | |
2950 | #define LOSLESS_COMPRESS_MODE |
2951 | |
2952 | /*#define DECOMP_HEADR_SURGENT*/ |
2953 | #ifdef VP9_10B_NV21 |
2954 | static u32 mem_map_mode = 2 /* 0:linear 1:32x32 2:64x32*/ |
2955 | #else |
2956 | static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */ |
2957 | #endif |
2958 | static u32 enable_mem_saving = 1; |
2959 | static u32 force_w_h; |
2960 | |
2961 | static u32 force_fps; |
2962 | |
2963 | |
2964 | const u32 vp9_version = 201602101; |
2965 | static u32 debug; |
2966 | static u32 radr; |
2967 | static u32 rval; |
2968 | static u32 pop_shorts; |
2969 | static u32 dbg_cmd; |
2970 | static u32 dbg_skip_decode_index; |
2971 | static u32 endian = 0xff0; |
2972 | #ifdef ERROR_HANDLE_DEBUG |
2973 | static u32 dbg_nal_skip_flag; |
2974 | /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */ |
2975 | static u32 dbg_nal_skip_count; |
2976 | #endif |
2977 | /*for debug*/ |
2978 | static u32 decode_pic_begin; |
2979 | static uint slice_parse_begin; |
2980 | static u32 step; |
2981 | #ifdef MIX_STREAM_SUPPORT |
2982 | static u32 buf_alloc_width = 4096; |
2983 | static u32 buf_alloc_height = 2304; |
2984 | static u32 vp9_max_pic_w = 4096; |
2985 | static u32 vp9_max_pic_h = 2304; |
2986 | |
2987 | static u32 dynamic_buf_num_margin; |
2988 | #else |
2989 | static u32 buf_alloc_width; |
2990 | static u32 buf_alloc_height; |
2991 | static u32 dynamic_buf_num_margin = 7; |
2992 | #endif |
2993 | static u32 buf_alloc_depth = 10; |
2994 | static u32 buf_alloc_size; |
2995 | /* |
2996 | *bit[0]: 0, |
2997 | * bit[1]: 0, always release cma buffer when stop |
2998 | * bit[1]: 1, never release cma buffer when stop |
2999 | *bit[0]: 1, when stop, release cma buffer if blackout is 1; |
3000 | *do not release cma buffer is blackout is not 1 |
3001 | * |
3002 | *bit[2]: 0, when start decoding, check current displayed buffer |
3003 | * (only for buffer decoded by vp9) if blackout is 0 |
3004 | * 1, do not check current displayed buffer |
3005 | * |
3006 | *bit[3]: 1, if blackout is not 1, do not release current |
3007 | * displayed cma buffer always. |
3008 | */ |
3009 | /* set to 1 for fast play; |
3010 | * set to 8 for other case of "keep last frame" |
3011 | */ |
3012 | static u32 buffer_mode = 1; |
3013 | /* buffer_mode_dbg: debug only*/ |
3014 | static u32 buffer_mode_dbg = 0xffff0000; |
3015 | /**/ |
3016 | |
3017 | /* |
3018 | *bit 0, 1: only display I picture; |
3019 | *bit 1, 1: only decode I picture; |
3020 | */ |
3021 | static u32 i_only_flag; |
3022 | |
3023 | static u32 low_latency_flag; |
3024 | |
3025 | static u32 no_head; |
3026 | |
3027 | static u32 max_decoding_time; |
3028 | /* |
3029 | *error handling |
3030 | */ |
3031 | /*error_handle_policy: |
3032 | *bit 0: 0, auto skip error_skip_nal_count nals before error recovery; |
3033 | *1, skip error_skip_nal_count nals before error recovery; |
3034 | *bit 1 (valid only when bit0 == 1): |
3035 | *1, wait vps/sps/pps after error recovery; |
3036 | *bit 2 (valid only when bit0 == 0): |
3037 | *0, auto search after error recovery (vp9_recover() called); |
3038 | *1, manual search after error recovery |
3039 | *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2)) |
3040 | * |
3041 | *bit 4: 0, set error_mark after reset/recover |
3042 | * 1, do not set error_mark after reset/recover |
3043 | *bit 5: 0, check total lcu for every picture |
3044 | * 1, do not check total lcu |
3045 | * |
3046 | */ |
3047 | |
3048 | static u32 error_handle_policy; |
3049 | /*static u32 parser_sei_enable = 1;*/ |
3050 | #define MAX_BUF_NUM_NORMAL 12 |
3051 | #define MAX_BUF_NUM_LESS 10 |
3052 | static u32 max_buf_num = MAX_BUF_NUM_NORMAL; |
3053 | #define MAX_BUF_NUM_SAVE_BUF 8 |
3054 | |
3055 | static u32 run_ready_min_buf_num = 2; |
3056 | |
3057 | |
3058 | static DEFINE_MUTEX(vvp9_mutex); |
3059 | #ifndef MULTI_INSTANCE_SUPPORT |
3060 | static struct device *cma_dev; |
3061 | #endif |
3062 | |
3063 | #define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0 |
3064 | #define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1 |
3065 | #define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2 |
3066 | #define VP9_ADAPT_PROB_REG HEVC_ASSIST_SCRATCH_3 |
3067 | #define VP9_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_4 |
3068 | #define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5 |
3069 | #define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6 |
3070 | #define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7 |
3071 | #define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8 |
3072 | #define VP9_PROB_SWAP_BUFFER HEVC_ASSIST_SCRATCH_9 |
3073 | #define VP9_COUNT_SWAP_BUFFER HEVC_ASSIST_SCRATCH_A |
3074 | #define VP9_SEG_MAP_BUFFER HEVC_ASSIST_SCRATCH_B |
3075 | #define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D |
3076 | #define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E |
3077 | #define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F |
3078 | #define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F |
3079 | #define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L |
3080 | #ifdef MULTI_INSTANCE_SUPPORT |
3081 | #define HEVC_DECODE_COUNT HEVC_ASSIST_SCRATCH_M |
3082 | #define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N |
3083 | #else |
3084 | #define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M |
3085 | #define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N |
3086 | #endif |
3087 | #define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G |
3088 | #define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H |
3089 | |
3090 | |
3091 | /* |
3092 | *ucode parser/search control |
3093 | *bit 0: 0, header auto parse; 1, header manual parse |
3094 | *bit 1: 0, auto skip for noneseamless stream; 1, no skip |
3095 | *bit [3:2]: valid when bit1==0; |
3096 | *0, auto skip nal before first vps/sps/pps/idr; |
3097 | *1, auto skip nal before first vps/sps/pps |
3098 | *2, auto skip nal before first vps/sps/pps, |
3099 | * and not decode until the first I slice (with slice address of 0) |
3100 | * |
3101 | *3, auto skip before first I slice (nal_type >=16 && nal_type<=21) |
3102 | *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) ) |
3103 | *bit [16]: for NAL_UNIT_EOS when bit0 is 0: |
3104 | * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm |
3105 | *bit [17]: for NAL_SEI when bit0 is 0: |
3106 | * 0, do not parse SEI in ucode; 1, parse SEI in ucode |
3107 | *bit [31:20]: used by ucode for debug purpose |
3108 | */ |
3109 | #define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I |
3110 | /*[31:24] chip feature |
3111 | 31: 0, use MBOX1; 1, use MBOX0 |
3112 | */ |
3113 | #define DECODE_MODE HEVC_ASSIST_SCRATCH_J |
3114 | #define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K |
3115 | |
3116 | #ifdef MULTI_INSTANCE_SUPPORT |
3117 | #define RPM_BUF_SIZE (0x400 * 2) |
3118 | #else |
3119 | #define RPM_BUF_SIZE (0x80*2) |
3120 | #endif |
3121 | #define LMEM_BUF_SIZE (0x400 * 2) |
3122 | |
3123 | #define WORK_BUF_SPEC_NUM 3 |
3124 | static struct BuffInfo_s amvvp9_workbuff_spec[WORK_BUF_SPEC_NUM] = { |
3125 | { |
3126 | /* 8M bytes */ |
3127 | .max_width = 1920, |
3128 | .max_height = 1088, |
3129 | .ipp = { |
3130 | /* IPP work space calculation : |
3131 | * 4096 * (Y+CbCr+Flags) = 12k, round to 16k |
3132 | */ |
3133 | .buf_size = 0x4000, |
3134 | }, |
3135 | .sao_abv = { |
3136 | .buf_size = 0x30000, |
3137 | }, |
3138 | .sao_vb = { |
3139 | .buf_size = 0x30000, |
3140 | }, |
3141 | .short_term_rps = { |
3142 | /* SHORT_TERM_RPS - Max 64 set, 16 entry every set, |
3143 | * total 64x16x2 = 2048 bytes (0x800) |
3144 | */ |
3145 | .buf_size = 0x800, |
3146 | }, |
3147 | .vps = { |
3148 | /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, |
3149 | * total 0x0800 bytes |
3150 | */ |
3151 | .buf_size = 0x800, |
3152 | }, |
3153 | .sps = { |
3154 | /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, |
3155 | * total 0x0800 bytes |
3156 | */ |
3157 | .buf_size = 0x800, |
3158 | }, |
3159 | .pps = { |
3160 | /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, |
3161 | * total 0x2000 bytes |
3162 | */ |
3163 | .buf_size = 0x2000, |
3164 | }, |
3165 | .sao_up = { |
3166 | /* SAO UP STORE AREA - Max 640(10240/16) LCU, |
3167 | * each has 16 bytes total 0x2800 bytes |
3168 | */ |
3169 | .buf_size = 0x2800, |
3170 | }, |
3171 | .swap_buf = { |
3172 | /* 256cyclex64bit = 2K bytes 0x800 |
3173 | * (only 144 cycles valid) |
3174 | */ |
3175 | .buf_size = 0x800, |
3176 | }, |
3177 | .swap_buf2 = { |
3178 | .buf_size = 0x800, |
3179 | }, |
3180 | .scalelut = { |
3181 | /* support up to 32 SCALELUT 1024x32 = |
3182 | * 32Kbytes (0x8000) |
3183 | */ |
3184 | .buf_size = 0x8000, |
3185 | }, |
3186 | .dblk_para = { |
3187 | /* DBLK -> Max 256(4096/16) LCU, |
3188 | *each para 1024bytes(total:0x40000), |
3189 | *data 1024bytes(total:0x40000) |
3190 | */ |
3191 | .buf_size = 0x80000, |
3192 | }, |
3193 | .dblk_data = { |
3194 | .buf_size = 0x80000, |
3195 | }, |
3196 | .seg_map = { |
3197 | /*4096x2304/64/64 *24 = 0xd800 Bytes*/ |
3198 | .buf_size = 0xd800, |
3199 | }, |
3200 | .mmu_vbh = { |
3201 | .buf_size = 0x5000, /*2*16*(more than 2304)/4, 4K*/ |
3202 | }, |
3203 | #if 0 |
3204 | .cm_header = { |
3205 | /*add one for keeper.*/ |
3206 | .buf_size = MMU_COMPRESS_HEADER_SIZE * |
3207 | (FRAME_BUFFERS + 1), |
3208 | /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */ |
3209 | }, |
3210 | #endif |
3211 | .mpred_above = { |
3212 | .buf_size = 0x10000, /* 2 * size of hevc*/ |
3213 | }, |
3214 | #ifdef MV_USE_FIXED_BUF |
3215 | .mpred_mv = {/* 1080p, 0x40000 per buffer */ |
3216 | .buf_size = 0x40000 * FRAME_BUFFERS, |
3217 | }, |
3218 | #endif |
3219 | .rpm = { |
3220 | .buf_size = RPM_BUF_SIZE, |
3221 | }, |
3222 | .lmem = { |
3223 | .buf_size = 0x400 * 2, |
3224 | } |
3225 | }, |
3226 | { |
3227 | .max_width = 4096, |
3228 | .max_height = 2304, |
3229 | .ipp = { |
3230 | /* IPP work space calculation : |
3231 | * 4096 * (Y+CbCr+Flags) = 12k, round to 16k |
3232 | */ |
3233 | .buf_size = 0x4000, |
3234 | }, |
3235 | .sao_abv = { |
3236 | .buf_size = 0x30000, |
3237 | }, |
3238 | .sao_vb = { |
3239 | .buf_size = 0x30000, |
3240 | }, |
3241 | .short_term_rps = { |
3242 | /* SHORT_TERM_RPS - Max 64 set, 16 entry every set, |
3243 | * total 64x16x2 = 2048 bytes (0x800) |
3244 | */ |
3245 | .buf_size = 0x800, |
3246 | }, |
3247 | .vps = { |
3248 | /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, |
3249 | * total 0x0800 bytes |
3250 | */ |
3251 | .buf_size = 0x800, |
3252 | }, |
3253 | .sps = { |
3254 | /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, |
3255 | * total 0x0800 bytes |
3256 | */ |
3257 | .buf_size = 0x800, |
3258 | }, |
3259 | .pps = { |
3260 | /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, |
3261 | * total 0x2000 bytes |
3262 | */ |
3263 | .buf_size = 0x2000, |
3264 | }, |
3265 | .sao_up = { |
3266 | /* SAO UP STORE AREA - Max 640(10240/16) LCU, |
3267 | * each has 16 bytes total 0x2800 bytes |
3268 | */ |
3269 | .buf_size = 0x2800, |
3270 | }, |
3271 | .swap_buf = { |
3272 | /* 256cyclex64bit = 2K bytes 0x800 |
3273 | * (only 144 cycles valid) |
3274 | */ |
3275 | .buf_size = 0x800, |
3276 | }, |
3277 | .swap_buf2 = { |
3278 | .buf_size = 0x800, |
3279 | }, |
3280 | .scalelut = { |
3281 | /* support up to 32 SCALELUT 1024x32 = 32Kbytes |
3282 | * (0x8000) |
3283 | */ |
3284 | .buf_size = 0x8000, |
3285 | }, |
3286 | .dblk_para = { |
3287 | /* DBLK -> Max 256(4096/16) LCU, |
3288 | *each para 1024bytes(total:0x40000), |
3289 | *data 1024bytes(total:0x40000) |
3290 | */ |
3291 | .buf_size = 0x80000, |
3292 | }, |
3293 | .dblk_data = { |
3294 | .buf_size = 0x80000, |
3295 | }, |
3296 | .seg_map = { |
3297 | /*4096x2304/64/64 *24 = 0xd800 Bytes*/ |
3298 | .buf_size = 0xd800, |
3299 | }, |
3300 | .mmu_vbh = { |
3301 | .buf_size = 0x5000,/*2*16*(more than 2304)/4, 4K*/ |
3302 | }, |
3303 | #if 0 |
3304 | .cm_header = { |
3305 | /*add one for keeper.*/ |
3306 | .buf_size = MMU_COMPRESS_HEADER_SIZE * |
3307 | (FRAME_BUFFERS + 1), |
3308 | /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */ |
3309 | }, |
3310 | #endif |
3311 | .mpred_above = { |
3312 | .buf_size = 0x10000, /* 2 * size of hevc*/ |
3313 | }, |
3314 | #ifdef MV_USE_FIXED_BUF |
3315 | .mpred_mv = { |
3316 | /* .buf_size = 0x100000*16, |
3317 | * //4k2k , 0x100000 per buffer |
3318 | */ |
3319 | /* 4096x2304 , 0x120000 per buffer */ |
3320 | .buf_size = 0x120000 * FRAME_BUFFERS, |
3321 | }, |
3322 | #endif |
3323 | .rpm = { |
3324 | .buf_size = RPM_BUF_SIZE, |
3325 | }, |
3326 | .lmem = { |
3327 | .buf_size = 0x400 * 2, |
3328 | } |
3329 | }, |
3330 | { |
3331 | .max_width = 4096*2, |
3332 | .max_height = 2304*2, |
3333 | .ipp = { |
3334 | // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k |
3335 | .buf_size = 0x4000*2, |
3336 | }, |
3337 | .sao_abv = { |
3338 | .buf_size = 0x30000*2, |
3339 | }, |
3340 | .sao_vb = { |
3341 | .buf_size = 0x30000*2, |
3342 | }, |
3343 | .short_term_rps = { |
3344 | // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800) |
3345 | .buf_size = 0x800, |
3346 | }, |
3347 | .vps = { |
3348 | // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes |
3349 | .buf_size = 0x800, |
3350 | }, |
3351 | .sps = { |
3352 | // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes |
3353 | .buf_size = 0x800, |
3354 | }, |
3355 | .pps = { |
3356 | // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes |
3357 | .buf_size = 0x2000, |
3358 | }, |
3359 | .sao_up = { |
3360 | // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes |
3361 | .buf_size = 0x2800*2, |
3362 | }, |
3363 | .swap_buf = { |
3364 | // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid) |
3365 | .buf_size = 0x800, |
3366 | }, |
3367 | .swap_buf2 = { |
3368 | .buf_size = 0x800, |
3369 | }, |
3370 | .scalelut = { |
3371 | // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000) |
3372 | .buf_size = 0x8000*2, |
3373 | }, |
3374 | .dblk_para = { |
3375 | // DBLK -> Max 256(4096/16) LCU, each para 1024bytes(total:0x40000), data 1024bytes(total:0x40000) |
3376 | .buf_size = 0x80000*2, |
3377 | }, |
3378 | .dblk_data = { |
3379 | .buf_size = 0x80000*2, |
3380 | }, |
3381 | .seg_map = { |
3382 | /*4096x2304/64/64 *24 = 0xd800 Bytes*/ |
3383 | .buf_size = 0xd800*4, |
3384 | }, |
3385 | .mmu_vbh = { |
3386 | .buf_size = 0x5000*2, //2*16*(more than 2304)/4, 4K |
3387 | }, |
3388 | #if 0 |
3389 | .cm_header = { |
3390 | //.buf_size = MMU_COMPRESS_HEADER_SIZE*8, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) |
3391 | .buf_size = MMU_COMPRESS_HEADER_SIZE*16, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) |
3392 | }, |
3393 | #endif |
3394 | .mpred_above = { |
3395 | .buf_size = 0x10000*2, /* 2 * size of hevc*/ |
3396 | }, |
3397 | #ifdef MV_USE_FIXED_BUF |
3398 | .mpred_mv = { |
3399 | //4k2k , 0x100000 per buffer */ |
3400 | /* 4096x2304 , 0x120000 per buffer */ |
3401 | .buf_size = 0x120000 * FRAME_BUFFERS * 4, |
3402 | }, |
3403 | #endif |
3404 | .rpm = { |
3405 | .buf_size = RPM_BUF_SIZE, |
3406 | }, |
3407 | .lmem = { |
3408 | .buf_size = 0x400 * 2, |
3409 | } |
3410 | } |
3411 | }; |
3412 | |
3413 | |
3414 | /*Losless compression body buffer size 4K per 64x32 (jt)*/ |
3415 | int compute_losless_comp_body_size(int width, int height, |
3416 | uint8_t is_bit_depth_10) |
3417 | { |
3418 | int width_x64; |
3419 | int height_x32; |
3420 | int bsize; |
3421 | |
3422 | width_x64 = width + 63; |
3423 | width_x64 >>= 6; |
3424 | height_x32 = height + 31; |
3425 | height_x32 >>= 5; |
3426 | bsize = (is_bit_depth_10?4096:3200)*width_x64*height_x32; |
3427 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
3428 | pr_info("%s(%d,%d,%d)=>%d\n", |
3429 | __func__, width, height, |
3430 | is_bit_depth_10, bsize); |
3431 | |
3432 | return bsize; |
3433 | } |
3434 | |
3435 | /* Losless compression header buffer size 32bytes per 128x64 (jt)*/ |
3436 | static int compute_losless_comp_header_size(int width, int height) |
3437 | { |
3438 | int width_x128; |
3439 | int height_x64; |
3440 | int hsize; |
3441 | |
3442 | width_x128 = width + 127; |
3443 | width_x128 >>= 7; |
3444 | height_x64 = height + 63; |
3445 | height_x64 >>= 6; |
3446 | |
3447 | hsize = 32 * width_x128 * height_x64; |
3448 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
3449 | pr_info("%s(%d,%d)=>%d\n", |
3450 | __func__, width, height, |
3451 | hsize); |
3452 | |
3453 | return hsize; |
3454 | } |
3455 | |
3456 | static void init_buff_spec(struct VP9Decoder_s *pbi, |
3457 | struct BuffInfo_s *buf_spec) |
3458 | { |
3459 | void *mem_start_virt; |
3460 | |
3461 | buf_spec->ipp.buf_start = buf_spec->start_adr; |
3462 | buf_spec->sao_abv.buf_start = |
3463 | buf_spec->ipp.buf_start + buf_spec->ipp.buf_size; |
3464 | |
3465 | buf_spec->sao_vb.buf_start = |
3466 | buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size; |
3467 | buf_spec->short_term_rps.buf_start = |
3468 | buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size; |
3469 | buf_spec->vps.buf_start = |
3470 | buf_spec->short_term_rps.buf_start + |
3471 | buf_spec->short_term_rps.buf_size; |
3472 | buf_spec->sps.buf_start = |
3473 | buf_spec->vps.buf_start + buf_spec->vps.buf_size; |
3474 | buf_spec->pps.buf_start = |
3475 | buf_spec->sps.buf_start + buf_spec->sps.buf_size; |
3476 | buf_spec->sao_up.buf_start = |
3477 | buf_spec->pps.buf_start + buf_spec->pps.buf_size; |
3478 | buf_spec->swap_buf.buf_start = |
3479 | buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size; |
3480 | buf_spec->swap_buf2.buf_start = |
3481 | buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size; |
3482 | buf_spec->scalelut.buf_start = |
3483 | buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size; |
3484 | buf_spec->dblk_para.buf_start = |
3485 | buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size; |
3486 | buf_spec->dblk_data.buf_start = |
3487 | buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size; |
3488 | buf_spec->seg_map.buf_start = |
3489 | buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size; |
3490 | if (pbi == NULL || pbi->mmu_enable) { |
3491 | buf_spec->mmu_vbh.buf_start = |
3492 | buf_spec->seg_map.buf_start + |
3493 | buf_spec->seg_map.buf_size; |
3494 | buf_spec->mpred_above.buf_start = |
3495 | buf_spec->mmu_vbh.buf_start + |
3496 | buf_spec->mmu_vbh.buf_size; |
3497 | } else { |
3498 | buf_spec->mpred_above.buf_start = |
3499 | buf_spec->seg_map.buf_start + buf_spec->seg_map.buf_size; |
3500 | } |
3501 | #ifdef MV_USE_FIXED_BUF |
3502 | buf_spec->mpred_mv.buf_start = |
3503 | buf_spec->mpred_above.buf_start + |
3504 | buf_spec->mpred_above.buf_size; |
3505 | |
3506 | buf_spec->rpm.buf_start = |
3507 | buf_spec->mpred_mv.buf_start + |
3508 | buf_spec->mpred_mv.buf_size; |
3509 | #else |
3510 | buf_spec->rpm.buf_start = |
3511 | buf_spec->mpred_above.buf_start + |
3512 | buf_spec->mpred_above.buf_size; |
3513 | |
3514 | #endif |
3515 | buf_spec->lmem.buf_start = |
3516 | buf_spec->rpm.buf_start + |
3517 | buf_spec->rpm.buf_size; |
3518 | buf_spec->end_adr = |
3519 | buf_spec->lmem.buf_start + |
3520 | buf_spec->lmem.buf_size; |
3521 | |
3522 | if (!pbi) |
3523 | return; |
3524 | |
3525 | if (!vdec_secure(hw_to_vdec(pbi))) { |
3526 | mem_start_virt = |
3527 | codec_mm_phys_to_virt(buf_spec->dblk_para.buf_start); |
3528 | if (mem_start_virt) { |
3529 | memset(mem_start_virt, 0, |
3530 | buf_spec->dblk_para.buf_size); |
3531 | codec_mm_dma_flush(mem_start_virt, |
3532 | buf_spec->dblk_para.buf_size, |
3533 | DMA_TO_DEVICE); |
3534 | } else { |
3535 | mem_start_virt = codec_mm_vmap( |
3536 | buf_spec->dblk_para.buf_start, |
3537 | buf_spec->dblk_para.buf_size); |
3538 | if (mem_start_virt) { |
3539 | memset(mem_start_virt, 0, |
3540 | buf_spec->dblk_para.buf_size); |
3541 | codec_mm_dma_flush(mem_start_virt, |
3542 | buf_spec->dblk_para.buf_size, |
3543 | DMA_TO_DEVICE); |
3544 | codec_mm_unmap_phyaddr(mem_start_virt); |
3545 | } else { |
3546 | /*not virt for tvp playing, |
3547 | may need clear on ucode.*/ |
3548 | pr_err("mem_start_virt failed\n"); |
3549 | } |
3550 | } |
3551 | } |
3552 | |
3553 | if (debug) { |
3554 | pr_info("%s workspace (%x %x) size = %x\n", __func__, |
3555 | buf_spec->start_adr, buf_spec->end_adr, |
3556 | buf_spec->end_adr - buf_spec->start_adr); |
3557 | } |
3558 | |
3559 | if (debug) { |
3560 | pr_info("ipp.buf_start :%x\n", |
3561 | buf_spec->ipp.buf_start); |
3562 | pr_info("sao_abv.buf_start :%x\n", |
3563 | buf_spec->sao_abv.buf_start); |
3564 | pr_info("sao_vb.buf_start :%x\n", |
3565 | buf_spec->sao_vb.buf_start); |
3566 | pr_info("short_term_rps.buf_start :%x\n", |
3567 | buf_spec->short_term_rps.buf_start); |
3568 | pr_info("vps.buf_start :%x\n", |
3569 | buf_spec->vps.buf_start); |
3570 | pr_info("sps.buf_start :%x\n", |
3571 | buf_spec->sps.buf_start); |
3572 | pr_info("pps.buf_start :%x\n", |
3573 | buf_spec->pps.buf_start); |
3574 | pr_info("sao_up.buf_start :%x\n", |
3575 | buf_spec->sao_up.buf_start); |
3576 | pr_info("swap_buf.buf_start :%x\n", |
3577 | buf_spec->swap_buf.buf_start); |
3578 | pr_info("swap_buf2.buf_start :%x\n", |
3579 | buf_spec->swap_buf2.buf_start); |
3580 | pr_info("scalelut.buf_start :%x\n", |
3581 | buf_spec->scalelut.buf_start); |
3582 | pr_info("dblk_para.buf_start :%x\n", |
3583 | buf_spec->dblk_para.buf_start); |
3584 | pr_info("dblk_data.buf_start :%x\n", |
3585 | buf_spec->dblk_data.buf_start); |
3586 | pr_info("seg_map.buf_start :%x\n", |
3587 | buf_spec->seg_map.buf_start); |
3588 | if (pbi->mmu_enable) { |
3589 | pr_info("mmu_vbh.buf_start :%x\n", |
3590 | buf_spec->mmu_vbh.buf_start); |
3591 | } |
3592 | pr_info("mpred_above.buf_start :%x\n", |
3593 | buf_spec->mpred_above.buf_start); |
3594 | #ifdef MV_USE_FIXED_BUF |
3595 | pr_info("mpred_mv.buf_start :%x\n", |
3596 | buf_spec->mpred_mv.buf_start); |
3597 | #endif |
3598 | if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0) { |
3599 | pr_info("rpm.buf_start :%x\n", |
3600 | buf_spec->rpm.buf_start); |
3601 | } |
3602 | } |
3603 | } |
3604 | |
3605 | /* cache_util.c */ |
3606 | #define THODIYIL_MCRCC_CANVAS_ALGX 4 |
3607 | |
3608 | static u32 mcrcc_cache_alg_flag = THODIYIL_MCRCC_CANVAS_ALGX; |
3609 | |
3610 | static void mcrcc_perfcount_reset(void) |
3611 | { |
3612 | if (debug & VP9_DEBUG_CACHE) |
3613 | pr_info("[cache_util.c] Entered mcrcc_perfcount_reset...\n"); |
3614 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)0x1); |
3615 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)0x0); |
3616 | return; |
3617 | } |
3618 | |
3619 | static unsigned raw_mcr_cnt_total_prev; |
3620 | static unsigned hit_mcr_0_cnt_total_prev; |
3621 | static unsigned hit_mcr_1_cnt_total_prev; |
3622 | static unsigned byp_mcr_cnt_nchcanv_total_prev; |
3623 | static unsigned byp_mcr_cnt_nchoutwin_total_prev; |
3624 | |
3625 | static void mcrcc_get_hitrate(unsigned reset_pre) |
3626 | { |
3627 | unsigned delta_hit_mcr_0_cnt; |
3628 | unsigned delta_hit_mcr_1_cnt; |
3629 | unsigned delta_raw_mcr_cnt; |
3630 | unsigned delta_mcr_cnt_nchcanv; |
3631 | unsigned delta_mcr_cnt_nchoutwin; |
3632 | |
3633 | unsigned tmp; |
3634 | unsigned raw_mcr_cnt; |
3635 | unsigned hit_mcr_cnt; |
3636 | unsigned byp_mcr_cnt_nchoutwin; |
3637 | unsigned byp_mcr_cnt_nchcanv; |
3638 | int hitrate; |
3639 | if (reset_pre) { |
3640 | raw_mcr_cnt_total_prev = 0; |
3641 | hit_mcr_0_cnt_total_prev = 0; |
3642 | hit_mcr_1_cnt_total_prev = 0; |
3643 | byp_mcr_cnt_nchcanv_total_prev = 0; |
3644 | byp_mcr_cnt_nchoutwin_total_prev = 0; |
3645 | } |
3646 | if (debug & VP9_DEBUG_CACHE) |
3647 | pr_info("[cache_util.c] Entered mcrcc_get_hitrate...\n"); |
3648 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x0<<1)); |
3649 | raw_mcr_cnt = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3650 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x1<<1)); |
3651 | hit_mcr_cnt = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3652 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x2<<1)); |
3653 | byp_mcr_cnt_nchoutwin = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3654 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x3<<1)); |
3655 | byp_mcr_cnt_nchcanv = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3656 | |
3657 | if (debug & VP9_DEBUG_CACHE) |
3658 | pr_info("raw_mcr_cnt_total: %d\n", |
3659 | raw_mcr_cnt); |
3660 | if (debug & VP9_DEBUG_CACHE) |
3661 | pr_info("hit_mcr_cnt_total: %d\n", |
3662 | hit_mcr_cnt); |
3663 | if (debug & VP9_DEBUG_CACHE) |
3664 | pr_info("byp_mcr_cnt_nchoutwin_total: %d\n", |
3665 | byp_mcr_cnt_nchoutwin); |
3666 | if (debug & VP9_DEBUG_CACHE) |
3667 | pr_info("byp_mcr_cnt_nchcanv_total: %d\n", |
3668 | byp_mcr_cnt_nchcanv); |
3669 | |
3670 | delta_raw_mcr_cnt = raw_mcr_cnt - |
3671 | raw_mcr_cnt_total_prev; |
3672 | delta_mcr_cnt_nchcanv = byp_mcr_cnt_nchcanv - |
3673 | byp_mcr_cnt_nchcanv_total_prev; |
3674 | delta_mcr_cnt_nchoutwin = byp_mcr_cnt_nchoutwin - |
3675 | byp_mcr_cnt_nchoutwin_total_prev; |
3676 | raw_mcr_cnt_total_prev = raw_mcr_cnt; |
3677 | byp_mcr_cnt_nchcanv_total_prev = byp_mcr_cnt_nchcanv; |
3678 | byp_mcr_cnt_nchoutwin_total_prev = byp_mcr_cnt_nchoutwin; |
3679 | |
3680 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x4<<1)); |
3681 | tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3682 | if (debug & VP9_DEBUG_CACHE) |
3683 | pr_info("miss_mcr_0_cnt_total: %d\n", tmp); |
3684 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x5<<1)); |
3685 | tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3686 | if (debug & VP9_DEBUG_CACHE) |
3687 | pr_info("miss_mcr_1_cnt_total: %d\n", tmp); |
3688 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x6<<1)); |
3689 | tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3690 | if (debug & VP9_DEBUG_CACHE) |
3691 | pr_info("hit_mcr_0_cnt_total: %d\n", tmp); |
3692 | delta_hit_mcr_0_cnt = tmp - hit_mcr_0_cnt_total_prev; |
3693 | hit_mcr_0_cnt_total_prev = tmp; |
3694 | WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x7<<1)); |
3695 | tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA); |
3696 | if (debug & VP9_DEBUG_CACHE) |
3697 | pr_info("hit_mcr_1_cnt_total: %d\n", tmp); |
3698 | delta_hit_mcr_1_cnt = tmp - hit_mcr_1_cnt_total_prev; |
3699 | hit_mcr_1_cnt_total_prev = tmp; |
3700 | |
3701 | if (delta_raw_mcr_cnt != 0) { |
3702 | hitrate = 100 * delta_hit_mcr_0_cnt |
3703 | / delta_raw_mcr_cnt; |
3704 | if (debug & VP9_DEBUG_CACHE) |
3705 | pr_info("CANV0_HIT_RATE : %d\n", hitrate); |
3706 | hitrate = 100 * delta_hit_mcr_1_cnt |
3707 | / delta_raw_mcr_cnt; |
3708 | if (debug & VP9_DEBUG_CACHE) |
3709 | pr_info("CANV1_HIT_RATE : %d\n", hitrate); |
3710 | hitrate = 100 * delta_mcr_cnt_nchcanv |
3711 | / delta_raw_mcr_cnt; |
3712 | if (debug & VP9_DEBUG_CACHE) |
3713 | pr_info("NONCACH_CANV_BYP_RATE : %d\n", hitrate); |
3714 | hitrate = 100 * delta_mcr_cnt_nchoutwin |
3715 | / delta_raw_mcr_cnt; |
3716 | if (debug & VP9_DEBUG_CACHE) |
3717 | pr_info("CACHE_OUTWIN_BYP_RATE : %d\n", hitrate); |
3718 | } |
3719 | |
3720 | |
3721 | if (raw_mcr_cnt != 0) { |
3722 | hitrate = 100 * hit_mcr_cnt / raw_mcr_cnt; |
3723 | if (debug & VP9_DEBUG_CACHE) |
3724 | pr_info("MCRCC_HIT_RATE : %d\n", hitrate); |
3725 | hitrate = 100 * (byp_mcr_cnt_nchoutwin + byp_mcr_cnt_nchcanv) |
3726 | / raw_mcr_cnt; |
3727 | if (debug & VP9_DEBUG_CACHE) |
3728 | pr_info("MCRCC_BYP_RATE : %d\n", hitrate); |
3729 | } else { |
3730 | if (debug & VP9_DEBUG_CACHE) |
3731 | pr_info("MCRCC_HIT_RATE : na\n"); |
3732 | if (debug & VP9_DEBUG_CACHE) |
3733 | pr_info("MCRCC_BYP_RATE : na\n"); |
3734 | } |
3735 | return; |
3736 | } |
3737 | |
3738 | |
3739 | static void decomp_perfcount_reset(void) |
3740 | { |
3741 | if (debug & VP9_DEBUG_CACHE) |
3742 | pr_info("[cache_util.c] Entered decomp_perfcount_reset...\n"); |
3743 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)0x1); |
3744 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)0x0); |
3745 | return; |
3746 | } |
3747 | |
3748 | static void decomp_get_hitrate(void) |
3749 | { |
3750 | unsigned raw_mcr_cnt; |
3751 | unsigned hit_mcr_cnt; |
3752 | int hitrate; |
3753 | if (debug & VP9_DEBUG_CACHE) |
3754 | pr_info("[cache_util.c] Entered decomp_get_hitrate...\n"); |
3755 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x0<<1)); |
3756 | raw_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3757 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x1<<1)); |
3758 | hit_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3759 | |
3760 | if (debug & VP9_DEBUG_CACHE) |
3761 | pr_info("hcache_raw_cnt_total: %d\n", raw_mcr_cnt); |
3762 | if (debug & VP9_DEBUG_CACHE) |
3763 | pr_info("hcache_hit_cnt_total: %d\n", hit_mcr_cnt); |
3764 | |
3765 | if (raw_mcr_cnt != 0) { |
3766 | hitrate = hit_mcr_cnt * 100 / raw_mcr_cnt; |
3767 | if (debug & VP9_DEBUG_CACHE) |
3768 | pr_info("DECOMP_HCACHE_HIT_RATE : %d\n", hitrate); |
3769 | } else { |
3770 | if (debug & VP9_DEBUG_CACHE) |
3771 | pr_info("DECOMP_HCACHE_HIT_RATE : na\n"); |
3772 | } |
3773 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x2<<1)); |
3774 | raw_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3775 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x3<<1)); |
3776 | hit_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3777 | |
3778 | if (debug & VP9_DEBUG_CACHE) |
3779 | pr_info("dcache_raw_cnt_total: %d\n", raw_mcr_cnt); |
3780 | if (debug & VP9_DEBUG_CACHE) |
3781 | pr_info("dcache_hit_cnt_total: %d\n", hit_mcr_cnt); |
3782 | |
3783 | if (raw_mcr_cnt != 0) { |
3784 | hitrate = hit_mcr_cnt * 100 / raw_mcr_cnt; |
3785 | if (debug & VP9_DEBUG_CACHE) |
3786 | pr_info("DECOMP_DCACHE_HIT_RATE : %d\n", hitrate); |
3787 | } else { |
3788 | if (debug & VP9_DEBUG_CACHE) |
3789 | pr_info("DECOMP_DCACHE_HIT_RATE : na\n"); |
3790 | } |
3791 | return; |
3792 | } |
3793 | |
3794 | static void decomp_get_comprate(void) |
3795 | { |
3796 | unsigned raw_ucomp_cnt; |
3797 | unsigned fast_comp_cnt; |
3798 | unsigned slow_comp_cnt; |
3799 | int comprate; |
3800 | |
3801 | if (debug & VP9_DEBUG_CACHE) |
3802 | pr_info("[cache_util.c] Entered decomp_get_comprate...\n"); |
3803 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x4<<1)); |
3804 | fast_comp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3805 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x5<<1)); |
3806 | slow_comp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3807 | WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x6<<1)); |
3808 | raw_ucomp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA); |
3809 | |
3810 | if (debug & VP9_DEBUG_CACHE) |
3811 | pr_info("decomp_fast_comp_total: %d\n", fast_comp_cnt); |
3812 | if (debug & VP9_DEBUG_CACHE) |
3813 | pr_info("decomp_slow_comp_total: %d\n", slow_comp_cnt); |
3814 | if (debug & VP9_DEBUG_CACHE) |
3815 | pr_info("decomp_raw_uncomp_total: %d\n", raw_ucomp_cnt); |
3816 | |
3817 | if (raw_ucomp_cnt != 0) { |
3818 | comprate = (fast_comp_cnt + slow_comp_cnt) |
3819 | * 100 / raw_ucomp_cnt; |
3820 | if (debug & VP9_DEBUG_CACHE) |
3821 | pr_info("DECOMP_COMP_RATIO : %d\n", comprate); |
3822 | } else { |
3823 | if (debug & VP9_DEBUG_CACHE) |
3824 | pr_info("DECOMP_COMP_RATIO : na\n"); |
3825 | } |
3826 | return; |
3827 | } |
3828 | /* cache_util.c end */ |
3829 | |
3830 | /*==================================================== |
3831 | *======================================================================== |
3832 | *vp9_prob define |
3833 | *======================================================================== |
3834 | */ |
3835 | #define VP9_PARTITION_START 0 |
3836 | #define VP9_PARTITION_SIZE_STEP (3 * 4) |
3837 | #define VP9_PARTITION_ONE_SIZE (4 * VP9_PARTITION_SIZE_STEP) |
3838 | #define VP9_PARTITION_KEY_START 0 |
3839 | #define VP9_PARTITION_P_START VP9_PARTITION_ONE_SIZE |
3840 | #define VP9_PARTITION_SIZE (2 * VP9_PARTITION_ONE_SIZE) |
3841 | #define VP9_SKIP_START (VP9_PARTITION_START + VP9_PARTITION_SIZE) |
3842 | #define VP9_SKIP_SIZE 4 /* only use 3*/ |
3843 | #define VP9_TX_MODE_START (VP9_SKIP_START+VP9_SKIP_SIZE) |
3844 | #define VP9_TX_MODE_8_0_OFFSET 0 |
3845 | #define VP9_TX_MODE_8_1_OFFSET 1 |
3846 | #define VP9_TX_MODE_16_0_OFFSET 2 |
3847 | #define VP9_TX_MODE_16_1_OFFSET 4 |
3848 | #define VP9_TX_MODE_32_0_OFFSET 6 |
3849 | #define VP9_TX_MODE_32_1_OFFSET 9 |
3850 | #define VP9_TX_MODE_SIZE 12 |
3851 | #define VP9_COEF_START (VP9_TX_MODE_START+VP9_TX_MODE_SIZE) |
3852 | #define VP9_COEF_BAND_0_OFFSET 0 |
3853 | #define VP9_COEF_BAND_1_OFFSET (VP9_COEF_BAND_0_OFFSET + 3 * 3 + 1) |
3854 | #define VP9_COEF_BAND_2_OFFSET (VP9_COEF_BAND_1_OFFSET + 6 * 3) |
3855 | #define VP9_COEF_BAND_3_OFFSET (VP9_COEF_BAND_2_OFFSET + 6 * 3) |
3856 | #define VP9_COEF_BAND_4_OFFSET (VP9_COEF_BAND_3_OFFSET + 6 * 3) |
3857 | #define VP9_COEF_BAND_5_OFFSET (VP9_COEF_BAND_4_OFFSET + 6 * 3) |
3858 | #define VP9_COEF_SIZE_ONE_SET 100 /* ((3 +5*6)*3 + 1 padding)*/ |
3859 | #define VP9_COEF_4X4_START (VP9_COEF_START + 0 * VP9_COEF_SIZE_ONE_SET) |
3860 | #define VP9_COEF_8X8_START (VP9_COEF_START + 4 * VP9_COEF_SIZE_ONE_SET) |
3861 | #define VP9_COEF_16X16_START (VP9_COEF_START + 8 * VP9_COEF_SIZE_ONE_SET) |
3862 | #define VP9_COEF_32X32_START (VP9_COEF_START + 12 * VP9_COEF_SIZE_ONE_SET) |
3863 | #define VP9_COEF_SIZE_PLANE (2 * VP9_COEF_SIZE_ONE_SET) |
3864 | #define VP9_COEF_SIZE (4 * 2 * 2 * VP9_COEF_SIZE_ONE_SET) |
3865 | #define VP9_INTER_MODE_START (VP9_COEF_START+VP9_COEF_SIZE) |
3866 | #define VP9_INTER_MODE_SIZE 24 /* only use 21 ( #*7)*/ |
3867 | #define VP9_INTERP_START (VP9_INTER_MODE_START+VP9_INTER_MODE_SIZE) |
3868 | #define VP9_INTERP_SIZE 8 |
3869 | #define VP9_INTRA_INTER_START (VP9_INTERP_START+VP9_INTERP_SIZE) |
3870 | #define VP9_INTRA_INTER_SIZE 4 |
3871 | #define VP9_INTERP_INTRA_INTER_START VP9_INTERP_START |
3872 | #define VP9_INTERP_INTRA_INTER_SIZE (VP9_INTERP_SIZE + VP9_INTRA_INTER_SIZE) |
3873 | #define VP9_COMP_INTER_START \ |
3874 | (VP9_INTERP_INTRA_INTER_START+VP9_INTERP_INTRA_INTER_SIZE) |
3875 | #define VP9_COMP_INTER_SIZE 5 |
3876 | #define VP9_COMP_REF_START (VP9_COMP_INTER_START+VP9_COMP_INTER_SIZE) |
3877 | #define VP9_COMP_REF_SIZE 5 |
3878 | #define VP9_SINGLE_REF_START (VP9_COMP_REF_START+VP9_COMP_REF_SIZE) |
3879 | #define VP9_SINGLE_REF_SIZE 10 |
3880 | #define VP9_REF_MODE_START VP9_COMP_INTER_START |
3881 | #define VP9_REF_MODE_SIZE \ |
3882 | (VP9_COMP_INTER_SIZE+VP9_COMP_REF_SIZE+VP9_SINGLE_REF_SIZE) |
3883 | #define VP9_IF_Y_MODE_START (VP9_REF_MODE_START+VP9_REF_MODE_SIZE) |
3884 | #define VP9_IF_Y_MODE_SIZE 36 |
3885 | #define VP9_IF_UV_MODE_START (VP9_IF_Y_MODE_START+VP9_IF_Y_MODE_SIZE) |
3886 | #define VP9_IF_UV_MODE_SIZE 92 /* only use 90*/ |
3887 | #define VP9_MV_JOINTS_START (VP9_IF_UV_MODE_START+VP9_IF_UV_MODE_SIZE) |
3888 | #define VP9_MV_JOINTS_SIZE 3 |
3889 | #define VP9_MV_SIGN_0_START (VP9_MV_JOINTS_START+VP9_MV_JOINTS_SIZE) |
3890 | #define VP9_MV_SIGN_0_SIZE 1 |
3891 | #define VP9_MV_CLASSES_0_START (VP9_MV_SIGN_0_START+VP9_MV_SIGN_0_SIZE) |
3892 | #define VP9_MV_CLASSES_0_SIZE 10 |
3893 | #define VP9_MV_CLASS0_0_START (VP9_MV_CLASSES_0_START+VP9_MV_CLASSES_0_SIZE) |
3894 | #define VP9_MV_CLASS0_0_SIZE 1 |
3895 | #define VP9_MV_BITS_0_START (VP9_MV_CLASS0_0_START+VP9_MV_CLASS0_0_SIZE) |
3896 | #define VP9_MV_BITS_0_SIZE 10 |
3897 | #define VP9_MV_SIGN_1_START (VP9_MV_BITS_0_START+VP9_MV_BITS_0_SIZE) |
3898 | #define VP9_MV_SIGN_1_SIZE 1 |
3899 | #define VP9_MV_CLASSES_1_START \ |
3900 | (VP9_MV_SIGN_1_START+VP9_MV_SIGN_1_SIZE) |
3901 | #define VP9_MV_CLASSES_1_SIZE 10 |
3902 | #define VP9_MV_CLASS0_1_START \ |
3903 | (VP9_MV_CLASSES_1_START+VP9_MV_CLASSES_1_SIZE) |
3904 | #define VP9_MV_CLASS0_1_SIZE 1 |
3905 | #define VP9_MV_BITS_1_START \ |
3906 | (VP9_MV_CLASS0_1_START+VP9_MV_CLASS0_1_SIZE) |
3907 | #define VP9_MV_BITS_1_SIZE 10 |
3908 | #define VP9_MV_CLASS0_FP_0_START \ |
3909 | (VP9_MV_BITS_1_START+VP9_MV_BITS_1_SIZE) |
3910 | #define VP9_MV_CLASS0_FP_0_SIZE 9 |
3911 | #define VP9_MV_CLASS0_FP_1_START \ |
3912 | (VP9_MV_CLASS0_FP_0_START+VP9_MV_CLASS0_FP_0_SIZE) |
3913 | #define VP9_MV_CLASS0_FP_1_SIZE 9 |
3914 | #define VP9_MV_CLASS0_HP_0_START \ |
3915 | (VP9_MV_CLASS0_FP_1_START+VP9_MV_CLASS0_FP_1_SIZE) |
3916 | #define VP9_MV_CLASS0_HP_0_SIZE 2 |
3917 | #define VP9_MV_CLASS0_HP_1_START \ |
3918 | (VP9_MV_CLASS0_HP_0_START+VP9_MV_CLASS0_HP_0_SIZE) |
3919 | #define VP9_MV_CLASS0_HP_1_SIZE 2 |
3920 | #define VP9_MV_START VP9_MV_JOINTS_START |
3921 | #define VP9_MV_SIZE 72 /*only use 69*/ |
3922 | |
3923 | #define VP9_TOTAL_SIZE (VP9_MV_START + VP9_MV_SIZE) |
3924 | |
3925 | |
3926 | /*======================================================================== |
3927 | * vp9_count_mem define |
3928 | *======================================================================== |
3929 | */ |
3930 | #define VP9_COEF_COUNT_START 0 |
3931 | #define VP9_COEF_COUNT_BAND_0_OFFSET 0 |
3932 | #define VP9_COEF_COUNT_BAND_1_OFFSET \ |
3933 | (VP9_COEF_COUNT_BAND_0_OFFSET + 3*5) |
3934 | #define VP9_COEF_COUNT_BAND_2_OFFSET \ |
3935 | (VP9_COEF_COUNT_BAND_1_OFFSET + 6*5) |
3936 | #define VP9_COEF_COUNT_BAND_3_OFFSET \ |
3937 | (VP9_COEF_COUNT_BAND_2_OFFSET + 6*5) |
3938 | #define VP9_COEF_COUNT_BAND_4_OFFSET \ |
3939 | (VP9_COEF_COUNT_BAND_3_OFFSET + 6*5) |
3940 | #define VP9_COEF_COUNT_BAND_5_OFFSET \ |
3941 | (VP9_COEF_COUNT_BAND_4_OFFSET + 6*5) |
3942 | #define VP9_COEF_COUNT_SIZE_ONE_SET 165 /* ((3 +5*6)*5 */ |
3943 | #define VP9_COEF_COUNT_4X4_START \ |
3944 | (VP9_COEF_COUNT_START + 0*VP9_COEF_COUNT_SIZE_ONE_SET) |
3945 | #define VP9_COEF_COUNT_8X8_START \ |
3946 | (VP9_COEF_COUNT_START + 4*VP9_COEF_COUNT_SIZE_ONE_SET) |
3947 | #define VP9_COEF_COUNT_16X16_START \ |
3948 | (VP9_COEF_COUNT_START + 8*VP9_COEF_COUNT_SIZE_ONE_SET) |
3949 | #define VP9_COEF_COUNT_32X32_START \ |
3950 | (VP9_COEF_COUNT_START + 12*VP9_COEF_COUNT_SIZE_ONE_SET) |
3951 | #define VP9_COEF_COUNT_SIZE_PLANE (2 * VP9_COEF_COUNT_SIZE_ONE_SET) |
3952 | #define VP9_COEF_COUNT_SIZE (4 * 2 * 2 * VP9_COEF_COUNT_SIZE_ONE_SET) |
3953 | |
3954 | #define VP9_INTRA_INTER_COUNT_START \ |
3955 | (VP9_COEF_COUNT_START+VP9_COEF_COUNT_SIZE) |
3956 | #define VP9_INTRA_INTER_COUNT_SIZE (4*2) |
3957 | #define VP9_COMP_INTER_COUNT_START \ |
3958 | (VP9_INTRA_INTER_COUNT_START+VP9_INTRA_INTER_COUNT_SIZE) |
3959 | #define VP9_COMP_INTER_COUNT_SIZE (5*2) |
3960 | #define VP9_COMP_REF_COUNT_START \ |
3961 | (VP9_COMP_INTER_COUNT_START+VP9_COMP_INTER_COUNT_SIZE) |
3962 | #define VP9_COMP_REF_COUNT_SIZE (5*2) |
3963 | #define VP9_SINGLE_REF_COUNT_START \ |
3964 | (VP9_COMP_REF_COUNT_START+VP9_COMP_REF_COUNT_SIZE) |
3965 | #define VP9_SINGLE_REF_COUNT_SIZE (10*2) |
3966 | #define VP9_TX_MODE_COUNT_START \ |
3967 | (VP9_SINGLE_REF_COUNT_START+VP9_SINGLE_REF_COUNT_SIZE) |
3968 | #define VP9_TX_MODE_COUNT_SIZE (12*2) |
3969 | #define VP9_SKIP_COUNT_START \ |
3970 | (VP9_TX_MODE_COUNT_START+VP9_TX_MODE_COUNT_SIZE) |
3971 | #define VP9_SKIP_COUNT_SIZE (3*2) |
3972 | #define VP9_MV_SIGN_0_COUNT_START \ |
3973 | (VP9_SKIP_COUNT_START+VP9_SKIP_COUNT_SIZE) |
3974 | #define VP9_MV_SIGN_0_COUNT_SIZE (1*2) |
3975 | #define VP9_MV_SIGN_1_COUNT_START \ |
3976 | (VP9_MV_SIGN_0_COUNT_START+VP9_MV_SIGN_0_COUNT_SIZE) |
3977 | #define VP9_MV_SIGN_1_COUNT_SIZE (1*2) |
3978 | #define VP9_MV_BITS_0_COUNT_START \ |
3979 | (VP9_MV_SIGN_1_COUNT_START+VP9_MV_SIGN_1_COUNT_SIZE) |
3980 | #define VP9_MV_BITS_0_COUNT_SIZE (10*2) |
3981 | #define VP9_MV_BITS_1_COUNT_START \ |
3982 | (VP9_MV_BITS_0_COUNT_START+VP9_MV_BITS_0_COUNT_SIZE) |
3983 | #define VP9_MV_BITS_1_COUNT_SIZE (10*2) |
3984 | #define VP9_MV_CLASS0_HP_0_COUNT_START \ |
3985 | (VP9_MV_BITS_1_COUNT_START+VP9_MV_BITS_1_COUNT_SIZE) |
3986 | #define VP9_MV_CLASS0_HP_0_COUNT_SIZE (2*2) |
3987 | #define VP9_MV_CLASS0_HP_1_COUNT_START \ |
3988 | (VP9_MV_CLASS0_HP_0_COUNT_START+VP9_MV_CLASS0_HP_0_COUNT_SIZE) |
3989 | #define VP9_MV_CLASS0_HP_1_COUNT_SIZE (2*2) |
3990 | /* Start merge_tree*/ |
3991 | #define VP9_INTER_MODE_COUNT_START \ |
3992 | (VP9_MV_CLASS0_HP_1_COUNT_START+VP9_MV_CLASS0_HP_1_COUNT_SIZE) |
3993 | #define VP9_INTER_MODE_COUNT_SIZE (7*4) |
3994 | #define VP9_IF_Y_MODE_COUNT_START \ |
3995 | (VP9_INTER_MODE_COUNT_START+VP9_INTER_MODE_COUNT_SIZE) |
3996 | #define VP9_IF_Y_MODE_COUNT_SIZE (10*4) |
3997 | #define VP9_IF_UV_MODE_COUNT_START \ |
3998 | (VP9_IF_Y_MODE_COUNT_START+VP9_IF_Y_MODE_COUNT_SIZE) |
3999 | #define VP9_IF_UV_MODE_COUNT_SIZE (10*10) |
4000 | #define VP9_PARTITION_P_COUNT_START \ |
4001 | (VP9_IF_UV_MODE_COUNT_START+VP9_IF_UV_MODE_COUNT_SIZE) |
4002 | #define VP9_PARTITION_P_COUNT_SIZE (4*4*4) |
4003 | #define VP9_INTERP_COUNT_START \ |
4004 | (VP9_PARTITION_P_COUNT_START+VP9_PARTITION_P_COUNT_SIZE) |
4005 | #define VP9_INTERP_COUNT_SIZE (4*3) |
4006 | #define VP9_MV_JOINTS_COUNT_START \ |
4007 | (VP9_INTERP_COUNT_START+VP9_INTERP_COUNT_SIZE) |
4008 | #define VP9_MV_JOINTS_COUNT_SIZE (1 * 4) |
4009 | #define VP9_MV_CLASSES_0_COUNT_START \ |
4010 | (VP9_MV_JOINTS_COUNT_START+VP9_MV_JOINTS_COUNT_SIZE) |
4011 | #define VP9_MV_CLASSES_0_COUNT_SIZE (1*11) |
4012 | #define VP9_MV_CLASS0_0_COUNT_START \ |
4013 | (VP9_MV_CLASSES_0_COUNT_START+VP9_MV_CLASSES_0_COUNT_SIZE) |
4014 | #define VP9_MV_CLASS0_0_COUNT_SIZE (1*2) |
4015 | #define VP9_MV_CLASSES_1_COUNT_START \ |
4016 | (VP9_MV_CLASS0_0_COUNT_START+VP9_MV_CLASS0_0_COUNT_SIZE) |
4017 | #define VP9_MV_CLASSES_1_COUNT_SIZE (1*11) |
4018 | #define VP9_MV_CLASS0_1_COUNT_START \ |
4019 | (VP9_MV_CLASSES_1_COUNT_START+VP9_MV_CLASSES_1_COUNT_SIZE) |
4020 | #define VP9_MV_CLASS0_1_COUNT_SIZE (1*2) |
4021 | #define VP9_MV_CLASS0_FP_0_COUNT_START \ |
4022 | (VP9_MV_CLASS0_1_COUNT_START+VP9_MV_CLASS0_1_COUNT_SIZE) |
4023 | #define VP9_MV_CLASS0_FP_0_COUNT_SIZE (3*4) |
4024 | #define VP9_MV_CLASS0_FP_1_COUNT_START \ |
4025 | (VP9_MV_CLASS0_FP_0_COUNT_START+VP9_MV_CLASS0_FP_0_COUNT_SIZE) |
4026 | #define VP9_MV_CLASS0_FP_1_COUNT_SIZE (3*4) |
4027 | |
4028 | |
4029 | #define DC_PRED 0 /* Average of above and left pixels*/ |
4030 | #define V_PRED 1 /* Vertical*/ |
4031 | #define H_PRED 2 /* Horizontal*/ |
4032 | #define D45_PRED 3 /*Directional 45 deg = round(arctan(1/1) * 180/pi)*/ |
4033 | #define D135_PRED 4 /* Directional 135 deg = 180 - 45*/ |
4034 | #define D117_PRED 5 /* Directional 117 deg = 180 - 63*/ |
4035 | #define D153_PRED 6 /* Directional 153 deg = 180 - 27*/ |
4036 | #define D207_PRED 7 /* Directional 207 deg = 180 + 27*/ |
4037 | #define D63_PRED 8 /*Directional 63 deg = round(arctan(2/1) * 180/pi)*/ |
4038 | #define TM_PRED 9 /*True-motion*/ |
4039 | |
4040 | int clip_prob(int p) |
4041 | { |
4042 | return (p > 255) ? 255 : (p < 1) ? 1 : p; |
4043 | } |
4044 | |
4045 | #define ROUND_POWER_OF_TWO(value, n) \ |
4046 | (((value) + (1 << ((n) - 1))) >> (n)) |
4047 | |
4048 | #define MODE_MV_COUNT_SAT 20 |
4049 | static const int count_to_update_factor[MODE_MV_COUNT_SAT + 1] = { |
4050 | 0, 6, 12, 19, 25, 32, 38, 44, 51, 57, 64, |
4051 | 70, 76, 83, 89, 96, 102, 108, 115, 121, 128 |
4052 | }; |
4053 | |
4054 | void vp9_tree_merge_probs(unsigned int *prev_prob, unsigned int *cur_prob, |
4055 | int coef_node_start, int tree_left, int tree_right, int tree_i, |
4056 | int node) { |
4057 | |
4058 | int prob_32, prob_res, prob_shift; |
4059 | int pre_prob, new_prob; |
4060 | int den, m_count, get_prob, factor; |
4061 | |
4062 | prob_32 = prev_prob[coef_node_start / 4 * 2]; |
4063 | prob_res = coef_node_start & 3; |
4064 | prob_shift = prob_res * 8; |
4065 | pre_prob = (prob_32 >> prob_shift) & 0xff; |
4066 | |
4067 | den = tree_left + tree_right; |
4068 | |
4069 | if (den == 0) |
4070 | new_prob = pre_prob; |
4071 | else { |
4072 | m_count = (den < MODE_MV_COUNT_SAT) ? |
4073 | den : MODE_MV_COUNT_SAT; |
4074 | get_prob = clip_prob( |
4075 | div_r32(((int64_t)tree_left * 256 + (den >> 1)), |
4076 | den)); |
4077 | /*weighted_prob*/ |
4078 | factor = count_to_update_factor[m_count]; |
4079 | new_prob = ROUND_POWER_OF_TWO(pre_prob * (256 - factor) |
4080 | + get_prob * factor, 8); |
4081 | } |
4082 | cur_prob[coef_node_start / 4 * 2] = (cur_prob[coef_node_start / 4 * 2] |
4083 | & (~(0xff << prob_shift))) | (new_prob << prob_shift); |
4084 | |
4085 | /*pr_info(" - [%d][%d] 0x%02X --> 0x%02X (0x%X 0x%X) (%X)\n", |
4086 | *tree_i, node, pre_prob, new_prob, tree_left, tree_right, |
4087 | *cur_prob[coef_node_start/4*2]); |
4088 | */ |
4089 | } |
4090 | |
4091 | |
4092 | /*void adapt_coef_probs(void)*/ |
4093 | void adapt_coef_probs(int pic_count, int prev_kf, int cur_kf, int pre_fc, |
4094 | unsigned int *prev_prob, unsigned int *cur_prob, unsigned int *count) |
4095 | { |
4096 | /* 80 * 64bits = 0xF00 ( use 0x1000 4K bytes) |
4097 | *unsigned int prev_prob[496*2]; |
4098 | *unsigned int cur_prob[496*2]; |
4099 | *0x300 * 128bits = 0x3000 (32K Bytes) |
4100 | *unsigned int count[0x300*4]; |
4101 | */ |
4102 | |
4103 | int tx_size, coef_tx_size_start, coef_count_tx_size_start; |
4104 | int plane, coef_plane_start, coef_count_plane_start; |
4105 | int type, coef_type_start, coef_count_type_start; |
4106 | int band, coef_band_start, coef_count_band_start; |
4107 | int cxt_num; |
4108 | int cxt, coef_cxt_start, coef_count_cxt_start; |
4109 | int node, coef_node_start, coef_count_node_start; |
4110 | |
4111 | int tree_i, tree_left, tree_right; |
4112 | int mvd_i; |
4113 | |
4114 | int count_sat = 24; |
4115 | /*int update_factor = 112;*/ /*If COEF_MAX_UPDATE_FACTOR_AFTER_KEY, |
4116 | *use 128 |
4117 | */ |
4118 | /* If COEF_MAX_UPDATE_FACTOR_AFTER_KEY, use 128*/ |
4119 | /*int update_factor = (pic_count == 1) ? 128 : 112;*/ |
4120 | int update_factor = cur_kf ? 112 : |
4121 | prev_kf ? 128 : 112; |
4122 | |
4123 | int prob_32; |
4124 | int prob_res; |
4125 | int prob_shift; |
4126 | int pre_prob; |
4127 | |
4128 | int num, den; |
4129 | int get_prob; |
4130 | int m_count; |
4131 | int factor; |
4132 | |
4133 | int new_prob; |
4134 | |
4135 | if (debug & VP9_DEBUG_MERGE) |
4136 | pr_info |
4137 | ("\n ##adapt_coef_probs (pre_fc : %d ,prev_kf : %d,cur_kf : %d)##\n\n", |
4138 | pre_fc, prev_kf, cur_kf); |
4139 | |
4140 | /*adapt_coef_probs*/ |
4141 | for (tx_size = 0; tx_size < 4; tx_size++) { |
4142 | coef_tx_size_start = VP9_COEF_START |
4143 | + tx_size * 4 * VP9_COEF_SIZE_ONE_SET; |
4144 | coef_count_tx_size_start = VP9_COEF_COUNT_START |
4145 | + tx_size * 4 * VP9_COEF_COUNT_SIZE_ONE_SET; |
4146 | coef_plane_start = coef_tx_size_start; |
4147 | coef_count_plane_start = coef_count_tx_size_start; |
4148 | for (plane = 0; plane < 2; plane++) { |
4149 | coef_type_start = coef_plane_start; |
4150 | coef_count_type_start = coef_count_plane_start; |
4151 | for (type = 0; type < 2; type++) { |
4152 | coef_band_start = coef_type_start; |
4153 | coef_count_band_start = coef_count_type_start; |
4154 | for (band = 0; band < 6; band++) { |
4155 | if (band == 0) |
4156 | cxt_num = 3; |
4157 | else |
4158 | cxt_num = 6; |
4159 | coef_cxt_start = coef_band_start; |
4160 | coef_count_cxt_start = |
4161 | coef_count_band_start; |
4162 | for (cxt = 0; cxt < cxt_num; cxt++) { |
4163 | const int n0 = |
4164 | count[coef_count_cxt_start]; |
4165 | const int n1 = |
4166 | count[coef_count_cxt_start + 1]; |
4167 | const int n2 = |
4168 | count[coef_count_cxt_start + 2]; |
4169 | const int neob = |
4170 | count[coef_count_cxt_start + 3]; |
4171 | const int nneob = |
4172 | count[coef_count_cxt_start + 4]; |
4173 | const unsigned int |
4174 | branch_ct[3][2] = { |
4175 | { neob, nneob }, |
4176 | { n0, n1 + n2 }, |
4177 | { n1, n2 } |
4178 | }; |
4179 | coef_node_start = |
4180 | coef_cxt_start; |
4181 | for |
4182 | (node = 0; node < 3; node++) { |
4183 | prob_32 = |
4184 | prev_prob[ |
4185 | coef_node_start |
4186 | / 4 * 2]; |
4187 | prob_res = |
4188 | coef_node_start & 3; |
4189 | prob_shift = |
4190 | prob_res * 8; |
4191 | pre_prob = |
4192 | (prob_32 >> prob_shift) |
4193 | & 0xff; |
4194 | |
4195 | /*get_binary_prob*/ |
4196 | num = |
4197 | branch_ct[node][0]; |
4198 | den = |
4199 | branch_ct[node][0] + |
4200 | branch_ct[node][1]; |
4201 | m_count = (den < |
4202 | count_sat) |
4203 | ? den : count_sat; |
4204 | |
4205 | get_prob = |
4206 | (den == 0) ? 128u : |
4207 | clip_prob( |
4208 | div_r32(((int64_t) |
4209 | num * 256 |
4210 | + (den >> 1)), |
4211 | den)); |
4212 | |
4213 | factor = |
4214 | update_factor * m_count |
4215 | / count_sat; |
4216 | new_prob = |
4217 | ROUND_POWER_OF_TWO |
4218 | (pre_prob * |
4219 | (256 - factor) + |
4220 | get_prob * factor, 8); |
4221 | |
4222 | cur_prob[coef_node_start |
4223 | / 4 * 2] = |
4224 | (cur_prob |
4225 | [coef_node_start |
4226 | / 4 * 2] & (~(0xff << |
4227 | prob_shift))) | |
4228 | (new_prob << |
4229 | prob_shift); |
4230 | |
4231 | coef_node_start += 1; |
4232 | } |
4233 | |
4234 | coef_cxt_start = |
4235 | coef_cxt_start + 3; |
4236 | coef_count_cxt_start = |
4237 | coef_count_cxt_start |
4238 | + 5; |
4239 | } |
4240 | if (band == 0) { |
4241 | coef_band_start += 10; |
4242 | coef_count_band_start += 15; |
4243 | } else { |
4244 | coef_band_start += 18; |
4245 | coef_count_band_start += 30; |
4246 | } |
4247 | } |
4248 | coef_type_start += VP9_COEF_SIZE_ONE_SET; |
4249 | coef_count_type_start += |
4250 | VP9_COEF_COUNT_SIZE_ONE_SET; |
4251 | } |
4252 | coef_plane_start += 2 * VP9_COEF_SIZE_ONE_SET; |
4253 | coef_count_plane_start += |
4254 | 2 * VP9_COEF_COUNT_SIZE_ONE_SET; |
4255 | } |
4256 | } |
4257 | |
4258 | if (cur_kf == 0) { |
4259 | /*mode_mv_merge_probs - merge_intra_inter_prob*/ |
4260 | for (coef_count_node_start = VP9_INTRA_INTER_COUNT_START; |
4261 | coef_count_node_start < (VP9_MV_CLASS0_HP_1_COUNT_START + |
4262 | VP9_MV_CLASS0_HP_1_COUNT_SIZE); coef_count_node_start += 2) { |
4263 | |
4264 | if (coef_count_node_start == |
4265 | VP9_INTRA_INTER_COUNT_START) { |
4266 | if (debug & VP9_DEBUG_MERGE) |
4267 | pr_info(" # merge_intra_inter_prob\n"); |
4268 | coef_node_start = VP9_INTRA_INTER_START; |
4269 | } else if (coef_count_node_start == |
4270 | VP9_COMP_INTER_COUNT_START) { |
4271 | if (debug & VP9_DEBUG_MERGE) |
4272 | pr_info(" # merge_comp_inter_prob\n"); |
4273 | coef_node_start = VP9_COMP_INTER_START; |
4274 | } |
4275 | /* |
4276 | *else if (coef_count_node_start == |
4277 | * VP9_COMP_REF_COUNT_START) { |
4278 | * pr_info(" # merge_comp_inter_prob\n"); |
4279 | * coef_node_start = VP9_COMP_REF_START; |
4280 | *} |
4281 | *else if (coef_count_node_start == |
4282 | * VP9_SINGLE_REF_COUNT_START) { |
4283 | * pr_info(" # merge_comp_inter_prob\n"); |
4284 | * coef_node_start = VP9_SINGLE_REF_START; |
4285 | *} |
4286 | */ |
4287 | else if (coef_count_node_start == |
4288 | VP9_TX_MODE_COUNT_START) { |
4289 | if (debug & VP9_DEBUG_MERGE) |
4290 | pr_info(" # merge_tx_mode_probs\n"); |
4291 | coef_node_start = VP9_TX_MODE_START; |
4292 | } else if (coef_count_node_start == |
4293 | VP9_SKIP_COUNT_START) { |
4294 | if (debug & VP9_DEBUG_MERGE) |
4295 | pr_info(" # merge_skip_probs\n"); |
4296 | coef_node_start = VP9_SKIP_START; |
4297 | } else if (coef_count_node_start == |
4298 | VP9_MV_SIGN_0_COUNT_START) { |
4299 | if (debug & VP9_DEBUG_MERGE) |
4300 | pr_info(" # merge_sign_0\n"); |
4301 | coef_node_start = VP9_MV_SIGN_0_START; |
4302 | } else if (coef_count_node_start == |
4303 | VP9_MV_SIGN_1_COUNT_START) { |
4304 | if (debug & VP9_DEBUG_MERGE) |
4305 | pr_info(" # merge_sign_1\n"); |
4306 | coef_node_start = VP9_MV_SIGN_1_START; |
4307 | } else if (coef_count_node_start == |
4308 | VP9_MV_BITS_0_COUNT_START) { |
4309 | if (debug & VP9_DEBUG_MERGE) |
4310 | pr_info(" # merge_bits_0\n"); |
4311 | coef_node_start = VP9_MV_BITS_0_START; |
4312 | } else if (coef_count_node_start == |
4313 | VP9_MV_BITS_1_COUNT_START) { |
4314 | if (debug & VP9_DEBUG_MERGE) |
4315 | pr_info(" # merge_bits_1\n"); |
4316 | coef_node_start = VP9_MV_BITS_1_START; |
4317 | } else if (coef_count_node_start == |
4318 | VP9_MV_CLASS0_HP_0_COUNT_START) { |
4319 | if (debug & VP9_DEBUG_MERGE) |
4320 | pr_info(" # merge_class0_hp\n"); |
4321 | coef_node_start = VP9_MV_CLASS0_HP_0_START; |
4322 | } |
4323 | |
4324 | |
4325 | den = count[coef_count_node_start] + |
4326 | count[coef_count_node_start + 1]; |
4327 | |
4328 | prob_32 = prev_prob[coef_node_start / 4 * 2]; |
4329 | prob_res = coef_node_start & 3; |
4330 | prob_shift = prob_res * 8; |
4331 | pre_prob = (prob_32 >> prob_shift) & 0xff; |
4332 | |
4333 | if (den == 0) |
4334 | new_prob = pre_prob; |
4335 | else { |
4336 | m_count = (den < MODE_MV_COUNT_SAT) ? |
4337 | den : MODE_MV_COUNT_SAT; |
4338 | get_prob = |
4339 | clip_prob( |
4340 | div_r32(((int64_t)count[coef_count_node_start] |
4341 | * 256 + (den >> 1)), |
4342 | den)); |
4343 | /*weighted_prob*/ |
4344 | factor = count_to_update_factor[m_count]; |
4345 | new_prob = |
4346 | ROUND_POWER_OF_TWO(pre_prob * (256 - factor) |
4347 | + get_prob * factor, 8); |
4348 | } |
4349 | cur_prob[coef_node_start / 4 * 2] = |
4350 | (cur_prob[coef_node_start / 4 * 2] & |
4351 | (~(0xff << prob_shift))) |
4352 | | (new_prob << prob_shift); |
4353 | |
4354 | coef_node_start = coef_node_start + 1; |
4355 | } |
4356 | if (debug & VP9_DEBUG_MERGE) |
4357 | pr_info(" # merge_vp9_inter_mode_tree\n"); |
4358 | coef_node_start = VP9_INTER_MODE_START; |
4359 | coef_count_node_start = VP9_INTER_MODE_COUNT_START; |
4360 | for (tree_i = 0; tree_i < 7; tree_i++) { |
4361 | for (node = 0; node < 3; node++) { |
4362 | switch (node) { |
4363 | case 2: |
4364 | tree_left = |
4365 | count[coef_count_node_start + 1]; |
4366 | tree_right = |
4367 | count[coef_count_node_start + 3]; |
4368 | break; |
4369 | case 1: |
4370 | tree_left = |
4371 | count[coef_count_node_start + 0]; |
4372 | tree_right = |
4373 | count[coef_count_node_start + 1] |
4374 | + count[coef_count_node_start + 3]; |
4375 | break; |
4376 | default: |
4377 | tree_left = |
4378 | count[coef_count_node_start + 2]; |
4379 | tree_right = |
4380 | count[coef_count_node_start + 0] |
4381 | + count[coef_count_node_start + 1] |
4382 | + count[coef_count_node_start + 3]; |
4383 | break; |
4384 | |
4385 | } |
4386 | |
4387 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4388 | coef_node_start, tree_left, tree_right, |
4389 | tree_i, node); |
4390 | |
4391 | coef_node_start = coef_node_start + 1; |
4392 | } |
4393 | coef_count_node_start = coef_count_node_start + 4; |
4394 | } |
4395 | if (debug & VP9_DEBUG_MERGE) |
4396 | pr_info(" # merge_vp9_intra_mode_tree\n"); |
4397 | coef_node_start = VP9_IF_Y_MODE_START; |
4398 | coef_count_node_start = VP9_IF_Y_MODE_COUNT_START; |
4399 | for (tree_i = 0; tree_i < 14; tree_i++) { |
4400 | for (node = 0; node < 9; node++) { |
4401 | switch (node) { |
4402 | case 8: |
4403 | tree_left = |
4404 | count[coef_count_node_start+D153_PRED]; |
4405 | tree_right = |
4406 | count[coef_count_node_start+D207_PRED]; |
4407 | break; |
4408 | case 7: |
4409 | tree_left = |
4410 | count[coef_count_node_start+D63_PRED]; |
4411 | tree_right = |
4412 | count[coef_count_node_start+D207_PRED] + |
4413 | count[coef_count_node_start+D153_PRED]; |
4414 | break; |
4415 | case 6: |
4416 | tree_left = |
4417 | count[coef_count_node_start + D45_PRED]; |
4418 | tree_right = |
4419 | count[coef_count_node_start+D207_PRED] + |
4420 | count[coef_count_node_start+D153_PRED] + |
4421 | count[coef_count_node_start+D63_PRED]; |
4422 | break; |
4423 | case 5: |
4424 | tree_left = |
4425 | count[coef_count_node_start+D135_PRED]; |
4426 | tree_right = |
4427 | count[coef_count_node_start+D117_PRED]; |
4428 | break; |
4429 | case 4: |
4430 | tree_left = |
4431 | count[coef_count_node_start+H_PRED]; |
4432 | tree_right = |
4433 | count[coef_count_node_start+D117_PRED] + |
4434 | count[coef_count_node_start+D135_PRED]; |
4435 | break; |
4436 | case 3: |
4437 | tree_left = |
4438 | count[coef_count_node_start+H_PRED] + |
4439 | count[coef_count_node_start+D117_PRED] + |
4440 | count[coef_count_node_start+D135_PRED]; |
4441 | tree_right = |
4442 | count[coef_count_node_start+D45_PRED] + |
4443 | count[coef_count_node_start+D207_PRED] + |
4444 | count[coef_count_node_start+D153_PRED] + |
4445 | count[coef_count_node_start+D63_PRED]; |
4446 | break; |
4447 | case 2: |
4448 | tree_left = |
4449 | count[coef_count_node_start+V_PRED]; |
4450 | tree_right = |
4451 | count[coef_count_node_start+H_PRED] + |
4452 | count[coef_count_node_start+D117_PRED] + |
4453 | count[coef_count_node_start+D135_PRED] + |
4454 | count[coef_count_node_start+D45_PRED] + |
4455 | count[coef_count_node_start+D207_PRED] + |
4456 | count[coef_count_node_start+D153_PRED] + |
4457 | count[coef_count_node_start+D63_PRED]; |
4458 | break; |
4459 | case 1: |
4460 | tree_left = |
4461 | count[coef_count_node_start+TM_PRED]; |
4462 | tree_right = |
4463 | count[coef_count_node_start+V_PRED] + |
4464 | count[coef_count_node_start+H_PRED] + |
4465 | count[coef_count_node_start+D117_PRED] + |
4466 | count[coef_count_node_start+D135_PRED] + |
4467 | count[coef_count_node_start+D45_PRED] + |
4468 | count[coef_count_node_start+D207_PRED] + |
4469 | count[coef_count_node_start+D153_PRED] + |
4470 | count[coef_count_node_start+D63_PRED]; |
4471 | break; |
4472 | default: |
4473 | tree_left = |
4474 | count[coef_count_node_start+DC_PRED]; |
4475 | tree_right = |
4476 | count[coef_count_node_start+TM_PRED] + |
4477 | count[coef_count_node_start+V_PRED] + |
4478 | count[coef_count_node_start+H_PRED] + |
4479 | count[coef_count_node_start+D117_PRED] + |
4480 | count[coef_count_node_start+D135_PRED] + |
4481 | count[coef_count_node_start+D45_PRED] + |
4482 | count[coef_count_node_start+D207_PRED] + |
4483 | count[coef_count_node_start+D153_PRED] + |
4484 | count[coef_count_node_start+D63_PRED]; |
4485 | break; |
4486 | |
4487 | } |
4488 | |
4489 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4490 | coef_node_start, tree_left, tree_right, |
4491 | tree_i, node); |
4492 | |
4493 | coef_node_start = coef_node_start + 1; |
4494 | } |
4495 | coef_count_node_start = coef_count_node_start + 10; |
4496 | } |
4497 | |
4498 | if (debug & VP9_DEBUG_MERGE) |
4499 | pr_info(" # merge_vp9_partition_tree\n"); |
4500 | coef_node_start = VP9_PARTITION_P_START; |
4501 | coef_count_node_start = VP9_PARTITION_P_COUNT_START; |
4502 | for (tree_i = 0; tree_i < 16; tree_i++) { |
4503 | for (node = 0; node < 3; node++) { |
4504 | switch (node) { |
4505 | case 2: |
4506 | tree_left = |
4507 | count[coef_count_node_start + 2]; |
4508 | tree_right = |
4509 | count[coef_count_node_start + 3]; |
4510 | break; |
4511 | case 1: |
4512 | tree_left = |
4513 | count[coef_count_node_start + 1]; |
4514 | tree_right = |
4515 | count[coef_count_node_start + 2] + |
4516 | count[coef_count_node_start + 3]; |
4517 | break; |
4518 | default: |
4519 | tree_left = |
4520 | count[coef_count_node_start + 0]; |
4521 | tree_right = |
4522 | count[coef_count_node_start + 1] + |
4523 | count[coef_count_node_start + 2] + |
4524 | count[coef_count_node_start + 3]; |
4525 | break; |
4526 | |
4527 | } |
4528 | |
4529 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4530 | coef_node_start, |
4531 | tree_left, tree_right, tree_i, node); |
4532 | |
4533 | coef_node_start = coef_node_start + 1; |
4534 | } |
4535 | coef_count_node_start = coef_count_node_start + 4; |
4536 | } |
4537 | |
4538 | if (debug & VP9_DEBUG_MERGE) |
4539 | pr_info(" # merge_vp9_switchable_interp_tree\n"); |
4540 | coef_node_start = VP9_INTERP_START; |
4541 | coef_count_node_start = VP9_INTERP_COUNT_START; |
4542 | for (tree_i = 0; tree_i < 4; tree_i++) { |
4543 | for (node = 0; node < 2; node++) { |
4544 | switch (node) { |
4545 | case 1: |
4546 | tree_left = |
4547 | count[coef_count_node_start + 1]; |
4548 | tree_right = |
4549 | count[coef_count_node_start + 2]; |
4550 | break; |
4551 | default: |
4552 | tree_left = |
4553 | count[coef_count_node_start + 0]; |
4554 | tree_right = |
4555 | count[coef_count_node_start + 1] + |
4556 | count[coef_count_node_start + 2]; |
4557 | break; |
4558 | |
4559 | } |
4560 | |
4561 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4562 | coef_node_start, |
4563 | tree_left, tree_right, tree_i, node); |
4564 | |
4565 | coef_node_start = coef_node_start + 1; |
4566 | } |
4567 | coef_count_node_start = coef_count_node_start + 3; |
4568 | } |
4569 | |
4570 | if (debug & VP9_DEBUG_MERGE) |
4571 | pr_info("# merge_vp9_mv_joint_tree\n"); |
4572 | coef_node_start = VP9_MV_JOINTS_START; |
4573 | coef_count_node_start = VP9_MV_JOINTS_COUNT_START; |
4574 | for (tree_i = 0; tree_i < 1; tree_i++) { |
4575 | for (node = 0; node < 3; node++) { |
4576 | switch (node) { |
4577 | case 2: |
4578 | tree_left = |
4579 | count[coef_count_node_start + 2]; |
4580 | tree_right = |
4581 | count[coef_count_node_start + 3]; |
4582 | break; |
4583 | case 1: |
4584 | tree_left = |
4585 | count[coef_count_node_start + 1]; |
4586 | tree_right = |
4587 | count[coef_count_node_start + 2] + |
4588 | count[coef_count_node_start + 3]; |
4589 | break; |
4590 | default: |
4591 | tree_left = |
4592 | count[coef_count_node_start + 0]; |
4593 | tree_right = |
4594 | count[coef_count_node_start + 1] + |
4595 | count[coef_count_node_start + 2] + |
4596 | count[coef_count_node_start + 3]; |
4597 | break; |
4598 | } |
4599 | |
4600 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4601 | coef_node_start, |
4602 | tree_left, tree_right, tree_i, node); |
4603 | |
4604 | coef_node_start = coef_node_start + 1; |
4605 | } |
4606 | coef_count_node_start = coef_count_node_start + 4; |
4607 | } |
4608 | |
4609 | for (mvd_i = 0; mvd_i < 2; mvd_i++) { |
4610 | if (debug & VP9_DEBUG_MERGE) |
4611 | pr_info(" # merge_vp9_mv_class_tree [%d] -\n", mvd_i); |
4612 | coef_node_start = |
4613 | mvd_i ? VP9_MV_CLASSES_1_START : VP9_MV_CLASSES_0_START; |
4614 | coef_count_node_start = |
4615 | mvd_i ? VP9_MV_CLASSES_1_COUNT_START |
4616 | : VP9_MV_CLASSES_0_COUNT_START; |
4617 | tree_i = 0; |
4618 | for (node = 0; node < 10; node++) { |
4619 | switch (node) { |
4620 | case 9: |
4621 | tree_left = |
4622 | count[coef_count_node_start + 9]; |
4623 | tree_right = |
4624 | count[coef_count_node_start + 10]; |
4625 | break; |
4626 | case 8: |
4627 | tree_left = |
4628 | count[coef_count_node_start + 7]; |
4629 | tree_right = |
4630 | count[coef_count_node_start + 8]; |
4631 | break; |
4632 | case 7: |
4633 | tree_left = |
4634 | count[coef_count_node_start + 7] + |
4635 | count[coef_count_node_start + 8]; |
4636 | tree_right = |
4637 | count[coef_count_node_start + 9] + |
4638 | count[coef_count_node_start + 10]; |
4639 | break; |
4640 | case 6: |
4641 | tree_left = |
4642 | count[coef_count_node_start + 6]; |
4643 | tree_right = |
4644 | count[coef_count_node_start + 7] + |
4645 | count[coef_count_node_start + 8] + |
4646 | count[coef_count_node_start + 9] + |
4647 | count[coef_count_node_start + 10]; |
4648 | break; |
4649 | case 5: |
4650 | tree_left = |
4651 | count[coef_count_node_start + 4]; |
4652 | tree_right = |
4653 | count[coef_count_node_start + 5]; |
4654 | break; |
4655 | case 4: |
4656 | tree_left = |
4657 | count[coef_count_node_start + 4] + |
4658 | count[coef_count_node_start + 5]; |
4659 | tree_right = |
4660 | count[coef_count_node_start + 6] + |
4661 | count[coef_count_node_start + 7] + |
4662 | count[coef_count_node_start + 8] + |
4663 | count[coef_count_node_start + 9] + |
4664 | count[coef_count_node_start + 10]; |
4665 | break; |
4666 | case 3: |
4667 | tree_left = |
4668 | count[coef_count_node_start + 2]; |
4669 | tree_right = |
4670 | count[coef_count_node_start + 3]; |
4671 | break; |
4672 | case 2: |
4673 | tree_left = |
4674 | count[coef_count_node_start + 2] + |
4675 | count[coef_count_node_start + 3]; |
4676 | tree_right = |
4677 | count[coef_count_node_start + 4] + |
4678 | count[coef_count_node_start + 5] + |
4679 | count[coef_count_node_start + 6] + |
4680 | count[coef_count_node_start + 7] + |
4681 | count[coef_count_node_start + 8] + |
4682 | count[coef_count_node_start + 9] + |
4683 | count[coef_count_node_start + 10]; |
4684 | break; |
4685 | case 1: |
4686 | tree_left = |
4687 | count[coef_count_node_start + 1]; |
4688 | tree_right = |
4689 | count[coef_count_node_start + 2] + |
4690 | count[coef_count_node_start + 3] + |
4691 | count[coef_count_node_start + 4] + |
4692 | count[coef_count_node_start + 5] + |
4693 | count[coef_count_node_start + 6] + |
4694 | count[coef_count_node_start + 7] + |
4695 | count[coef_count_node_start + 8] + |
4696 | count[coef_count_node_start + 9] + |
4697 | count[coef_count_node_start + 10]; |
4698 | break; |
4699 | default: |
4700 | tree_left = |
4701 | count[coef_count_node_start + 0]; |
4702 | tree_right = |
4703 | count[coef_count_node_start + 1] + |
4704 | count[coef_count_node_start + 2] + |
4705 | count[coef_count_node_start + 3] + |
4706 | count[coef_count_node_start + 4] + |
4707 | count[coef_count_node_start + 5] + |
4708 | count[coef_count_node_start + 6] + |
4709 | count[coef_count_node_start + 7] + |
4710 | count[coef_count_node_start + 8] + |
4711 | count[coef_count_node_start + 9] + |
4712 | count[coef_count_node_start + 10]; |
4713 | break; |
4714 | |
4715 | } |
4716 | |
4717 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4718 | coef_node_start, tree_left, tree_right, |
4719 | tree_i, node); |
4720 | |
4721 | coef_node_start = coef_node_start + 1; |
4722 | } |
4723 | |
4724 | if (debug & VP9_DEBUG_MERGE) |
4725 | pr_info(" # merge_vp9_mv_class0_tree [%d] -\n", mvd_i); |
4726 | coef_node_start = |
4727 | mvd_i ? VP9_MV_CLASS0_1_START : VP9_MV_CLASS0_0_START; |
4728 | coef_count_node_start = |
4729 | mvd_i ? VP9_MV_CLASS0_1_COUNT_START : |
4730 | VP9_MV_CLASS0_0_COUNT_START; |
4731 | tree_i = 0; |
4732 | node = 0; |
4733 | tree_left = count[coef_count_node_start + 0]; |
4734 | tree_right = count[coef_count_node_start + 1]; |
4735 | |
4736 | vp9_tree_merge_probs(prev_prob, cur_prob, coef_node_start, |
4737 | tree_left, tree_right, tree_i, node); |
4738 | if (debug & VP9_DEBUG_MERGE) |
4739 | pr_info(" # merge_vp9_mv_fp_tree_class0_fp [%d] -\n", |
4740 | mvd_i); |
4741 | coef_node_start = |
4742 | mvd_i ? VP9_MV_CLASS0_FP_1_START : |
4743 | VP9_MV_CLASS0_FP_0_START; |
4744 | coef_count_node_start = |
4745 | mvd_i ? VP9_MV_CLASS0_FP_1_COUNT_START : |
4746 | VP9_MV_CLASS0_FP_0_COUNT_START; |
4747 | for (tree_i = 0; tree_i < 3; tree_i++) { |
4748 | for (node = 0; node < 3; node++) { |
4749 | switch (node) { |
4750 | case 2: |
4751 | tree_left = |
4752 | count[coef_count_node_start + 2]; |
4753 | tree_right = |
4754 | count[coef_count_node_start + 3]; |
4755 | break; |
4756 | case 1: |
4757 | tree_left = |
4758 | count[coef_count_node_start + 1]; |
4759 | tree_right = |
4760 | count[coef_count_node_start + 2] |
4761 | + count[coef_count_node_start + 3]; |
4762 | break; |
4763 | default: |
4764 | tree_left = |
4765 | count[coef_count_node_start + 0]; |
4766 | tree_right = |
4767 | count[coef_count_node_start + 1] |
4768 | + count[coef_count_node_start + 2] |
4769 | + count[coef_count_node_start + 3]; |
4770 | break; |
4771 | |
4772 | } |
4773 | |
4774 | vp9_tree_merge_probs(prev_prob, cur_prob, |
4775 | coef_node_start, tree_left, tree_right, |
4776 | tree_i, node); |
4777 | |
4778 | coef_node_start = coef_node_start + 1; |
4779 | } |
4780 | coef_count_node_start = coef_count_node_start + 4; |
4781 | } |
4782 | |
4783 | } /* for mvd_i (mvd_y or mvd_x)*/ |
4784 | } |
4785 | |
4786 | } |
4787 | |
4788 | static bool v4l_is_there_vframe_bound(struct VP9Decoder_s *pbi) |
4789 | { |
4790 | int i; |
4791 | struct VP9_Common_s *const cm = &pbi->common; |
4792 | struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs; |
4793 | |
4794 | for (i = 0; i < pbi->used_buf_num; ++i) { |
4795 | if (frame_bufs[i].buf.vframe_bound) |
4796 | return true; |
4797 | } |
4798 | |
4799 | return false; |
4800 | } |
4801 | |
4802 | static void v4l_mmu_buffer_release(struct VP9Decoder_s *pbi) |
4803 | { |
4804 | struct VP9_Common_s *const cm = &pbi->common; |
4805 | struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs; |
4806 | int i; |
4807 | |
4808 | /* release workspace */ |
4809 | if (pbi->bmmu_box) |
4810 | decoder_bmmu_box_free_idx(pbi->bmmu_box, |
4811 | WORK_SPACE_BUF_ID); |
4812 | /* |
4813 | * it's only when vframe get back to driver, right now we can be sure |
4814 | * that vframe and fd are related. if the playback exits, the capture |
4815 | * requires the upper app to release when the fd is closed, and others |
4816 | * buffers drivers are released by driver. |
4817 | */ |
4818 | for (i = 0; i < pbi->used_buf_num; ++i) { |
4819 | if (!frame_bufs[i].buf.vframe_bound) { |
4820 | if (pbi->bmmu_box) |
4821 | decoder_bmmu_box_free_idx(pbi->bmmu_box, |
4822 | HEADER_BUFFER_IDX(i)); |
4823 | if (pbi->mmu_box) |
4824 | decoder_mmu_box_free_idx(pbi->mmu_box, i); |
4825 | |
4826 | vp9_print(pbi, PRINT_FLAG_V4L_DETAIL, |
4827 | "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n", |
4828 | __func__, i, pbi->bmmu_box, pbi->mmu_box); |
4829 | } |
4830 | } |
4831 | } |
4832 | |
4833 | static void uninit_mmu_buffers(struct VP9Decoder_s *pbi) |
4834 | { |
4835 | #ifndef MV_USE_FIXED_BUF |
4836 | dealloc_mv_bufs(pbi); |
4837 | #endif |
4838 | if (pbi->is_used_v4l && |
4839 | v4l_is_there_vframe_bound(pbi)) { |
4840 | if (get_double_write_mode(pbi) != 0x10) { |
4841 | v4l_mmu_buffer_release(pbi); |
4842 | return; |
4843 | } |
4844 | } |
4845 | |
4846 | if (pbi->mmu_box) |
4847 | decoder_mmu_box_free(pbi->mmu_box); |
4848 | pbi->mmu_box = NULL; |
4849 | |
4850 | if (pbi->bmmu_box) |
4851 | decoder_bmmu_box_free(pbi->bmmu_box); |
4852 | pbi->bmmu_box = NULL; |
4853 | } |
4854 | |
4855 | static int calc_luc_quantity(u32 w, u32 h) |
4856 | { |
4857 | int lcu_size = 64; /*fixed 64*/ |
4858 | int pic_width_64 = (w + 63) & (~0x3f); |
4859 | int pic_height_32 = (h + 31) & (~0x1f); |
4860 | int pic_width_lcu = (pic_width_64 % lcu_size) ? |
4861 | pic_width_64 / lcu_size + 1 : pic_width_64 / lcu_size; |
4862 | int pic_height_lcu = (pic_height_32 % lcu_size) ? |
4863 | pic_height_32 / lcu_size + 1 : pic_height_32 / lcu_size; |
4864 | |
4865 | return pic_width_lcu * pic_height_lcu; |
4866 | } |
4867 | |
4868 | static int v4l_alloc_and_config_pic(struct VP9Decoder_s *pbi, |
4869 | struct PIC_BUFFER_CONFIG_s *pic) |
4870 | { |
4871 | int ret = -1; |
4872 | int i = pic->index; |
4873 | int dw_mode = get_double_write_mode_init(pbi); |
4874 | int lcu_total = calc_luc_quantity(pbi->frame_width, pbi->frame_height); |
4875 | #ifdef MV_USE_FIXED_BUF |
4876 | u32 mpred_mv_end = pbi->work_space_buf->mpred_mv.buf_start + |
4877 | pbi->work_space_buf->mpred_mv.buf_size; |
4878 | #endif |
4879 | struct vdec_v4l2_buffer *fb = NULL; |
4880 | |
4881 | if (i < 0) |
4882 | return ret; |
4883 | |
4884 | ret = vdec_v4l_get_buffer(pbi->v4l2_ctx, &fb); |
4885 | if (ret < 0) { |
4886 | vp9_print(pbi, 0, "[%d] VP9 get buffer fail.\n", |
4887 | ((struct aml_vcodec_ctx *) (pbi->v4l2_ctx))->id); |
4888 | return ret; |
4889 | } |
4890 | |
4891 | if (pbi->mmu_enable) { |
4892 | pbi->m_BUF[i].header_addr = decoder_bmmu_box_get_phy_addr( |
4893 | pbi->bmmu_box, HEADER_BUFFER_IDX(i)); |
4894 | if (debug & VP9_DEBUG_BUFMGR_MORE) { |
4895 | pr_info("MMU header_adr %d: %ld\n", |
4896 | i, pbi->m_BUF[i].header_addr); |
4897 | } |
4898 | } |
4899 | |
4900 | #ifdef MV_USE_FIXED_BUF |
4901 | if ((pbi->work_space_buf->mpred_mv.buf_start + |
4902 | (((i + 1) * lcu_total) * MV_MEM_UNIT)) |
4903 | <= mpred_mv_end) { |
4904 | #endif |
4905 | pbi->m_BUF[i].v4l_ref_buf_addr = (ulong)fb; |
4906 | pic->cma_alloc_addr = fb->m.mem[0].addr; |
4907 | if (fb->num_planes == 1) { |
4908 | pbi->m_BUF[i].start_adr = fb->m.mem[0].addr; |
4909 | pbi->m_BUF[i].luma_size = fb->m.mem[0].offset; |
4910 | pbi->m_BUF[i].size = fb->m.mem[0].size; |
4911 | fb->m.mem[0].bytes_used = fb->m.mem[0].size; |
4912 | pic->dw_y_adr = pbi->m_BUF[i].start_adr; |
4913 | pic->dw_u_v_adr = pic->dw_y_adr + pbi->m_BUF[i].luma_size; |
4914 | } else if (fb->num_planes == 2) { |
4915 | pbi->m_BUF[i].start_adr = fb->m.mem[0].addr; |
4916 | pbi->m_BUF[i].size = fb->m.mem[0].size; |
4917 | pbi->m_BUF[i].chroma_addr = fb->m.mem[1].addr; |
4918 | pbi->m_BUF[i].chroma_size = fb->m.mem[1].size; |
4919 | fb->m.mem[0].bytes_used = fb->m.mem[0].size; |
4920 | fb->m.mem[1].bytes_used = fb->m.mem[1].size; |
4921 | pic->dw_y_adr = pbi->m_BUF[i].start_adr; |
4922 | pic->dw_u_v_adr = pbi->m_BUF[i].chroma_addr; |
4923 | } |
4924 | |
4925 | /* config frame buffer */ |
4926 | if (pbi->mmu_enable) |
4927 | pic->header_adr = pbi->m_BUF[i].header_addr; |
4928 | |
4929 | pic->BUF_index = i; |
4930 | pic->lcu_total = lcu_total; |
4931 | pic->mc_canvas_y = pic->index; |
4932 | pic->mc_canvas_u_v = pic->index; |
4933 | |
4934 | if (dw_mode & 0x10) { |
4935 | pic->mc_canvas_y = (pic->index << 1); |
4936 | pic->mc_canvas_u_v = (pic->index << 1) + 1; |
4937 | } |
4938 | |
4939 | #ifdef MV_USE_FIXED_BUF |
4940 | pic->mpred_mv_wr_start_addr = |
4941 | pbi->work_space_buf->mpred_mv.buf_start + |
4942 | ((pic->index * lcu_total) * MV_MEM_UNIT); |
4943 | #endif |
4944 | if (debug) { |
4945 | pr_info("%s index %d BUF_index %d ", |
4946 | __func__, pic->index, |
4947 | pic->BUF_index); |
4948 | pr_info("comp_body_size %x comp_buf_size %x ", |
4949 | pic->comp_body_size, |
4950 | pic->buf_size); |
4951 | pr_info("mpred_mv_wr_start_adr %ld\n", |
4952 | pic->mpred_mv_wr_start_addr); |
4953 | pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n", |
4954 | pic->dw_y_adr, |
4955 | pic->dw_u_v_adr); |
4956 | } |
4957 | #ifdef MV_USE_FIXED_BUF |
4958 | } |
4959 | #endif |
4960 | return ret; |
4961 | } |
4962 | |
4963 | static int config_pic(struct VP9Decoder_s *pbi, |
4964 | struct PIC_BUFFER_CONFIG_s *pic_config) |
4965 | { |
4966 | int ret = -1; |
4967 | int i; |
4968 | int pic_width = pbi->init_pic_w; |
4969 | int pic_height = pbi->init_pic_h; |
4970 | int lcu_size = 64; /*fixed 64*/ |
4971 | int pic_width_64 = (pic_width + 63) & (~0x3f); |
4972 | int pic_height_32 = (pic_height + 31) & (~0x1f); |
4973 | int pic_width_lcu = (pic_width_64 % lcu_size) ? |
4974 | pic_width_64 / lcu_size + 1 |
4975 | : pic_width_64 / lcu_size; |
4976 | int pic_height_lcu = (pic_height_32 % lcu_size) ? |
4977 | pic_height_32 / lcu_size + 1 |
4978 | : pic_height_32 / lcu_size; |
4979 | int lcu_total = pic_width_lcu * pic_height_lcu; |
4980 | #ifdef MV_USE_FIXED_BUF |
4981 | u32 mpred_mv_end = pbi->work_space_buf->mpred_mv.buf_start + |
4982 | pbi->work_space_buf->mpred_mv.buf_size; |
4983 | #endif |
4984 | u32 y_adr = 0; |
4985 | int buf_size = 0; |
4986 | |
4987 | int losless_comp_header_size = |
4988 | compute_losless_comp_header_size(pic_width, |
4989 | pic_height); |
4990 | int losless_comp_body_size = compute_losless_comp_body_size(pic_width, |
4991 | pic_height, buf_alloc_depth == 10); |
4992 | int mc_buffer_size = losless_comp_header_size + losless_comp_body_size; |
4993 | int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16; |
4994 | int mc_buffer_size_u_v = 0; |
4995 | int mc_buffer_size_u_v_h = 0; |
4996 | int dw_mode = get_double_write_mode_init(pbi); |
4997 | |
4998 | pbi->lcu_total = lcu_total; |
4999 | |
5000 | if (dw_mode) { |
5001 | int pic_width_dw = pic_width / |
5002 | get_double_write_ratio(pbi, dw_mode); |
5003 | int pic_height_dw = pic_height / |
5004 | get_double_write_ratio(pbi, dw_mode); |
5005 | |
5006 | int pic_width_64_dw = (pic_width_dw + 63) & (~0x3f); |
5007 | int pic_height_32_dw = (pic_height_dw + 31) & (~0x1f); |
5008 | int pic_width_lcu_dw = (pic_width_64_dw % lcu_size) ? |
5009 | pic_width_64_dw / lcu_size + 1 |
5010 | : pic_width_64_dw / lcu_size; |
5011 | int pic_height_lcu_dw = (pic_height_32_dw % lcu_size) ? |
5012 | pic_height_32_dw / lcu_size + 1 |
5013 | : pic_height_32_dw / lcu_size; |
5014 | int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw; |
5015 | mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2; |
5016 | mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16; |
5017 | /*64k alignment*/ |
5018 | buf_size = ((mc_buffer_size_u_v_h << 16) * 3); |
5019 | buf_size = ((buf_size + 0xffff) >> 16) << 16; |
5020 | } |
5021 | |
5022 | if (mc_buffer_size & 0xffff) /*64k alignment*/ |
5023 | mc_buffer_size_h += 1; |
5024 | if ((!pbi->mmu_enable) && ((dw_mode & 0x10) == 0)) |
5025 | buf_size += (mc_buffer_size_h << 16); |
5026 | |
5027 | if (pbi->mmu_enable) { |
5028 | pic_config->header_adr = decoder_bmmu_box_get_phy_addr( |
5029 | pbi->bmmu_box, HEADER_BUFFER_IDX(pic_config->index)); |
5030 | |
5031 | if (debug & VP9_DEBUG_BUFMGR_MORE) { |
5032 | pr_info("MMU header_adr %d: %ld\n", |
5033 | pic_config->index, pic_config->header_adr); |
5034 | } |
5035 | } |
5036 | |
5037 | i = pic_config->index; |
5038 | #ifdef MV_USE_FIXED_BUF |
5039 | if ((pbi->work_space_buf->mpred_mv.buf_start + |
5040 | (((i + 1) * lcu_total) * MV_MEM_UNIT)) |
5041 | <= mpred_mv_end |
5042 | ) { |
5043 | #endif |
5044 | if (buf_size > 0) { |
5045 | ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box, |
5046 | VF_BUFFER_IDX(i), |
5047 | buf_size, DRIVER_NAME, |
5048 | &pic_config->cma_alloc_addr); |
5049 | if (ret < 0) { |
5050 | pr_info( |
5051 | "decoder_bmmu_box_alloc_buf_phy idx %d size %d fail\n", |
5052 | VF_BUFFER_IDX(i), |
5053 | buf_size |
5054 | ); |
5055 | return ret; |
5056 | } |
5057 | |
5058 | if (pic_config->cma_alloc_addr) |
5059 | y_adr = pic_config->cma_alloc_addr; |
5060 | else { |
5061 | pr_info( |
5062 | "decoder_bmmu_box_alloc_buf_phy idx %d size %d return null\n", |
5063 | VF_BUFFER_IDX(i), |
5064 | buf_size |
5065 | ); |
5066 | return -1; |
5067 | } |
5068 | } |
5069 | { |
5070 | /*ensure get_pic_by_POC() |
5071 | not get the buffer not decoded*/ |
5072 | pic_config->BUF_index = i; |
5073 | pic_config->lcu_total = lcu_total; |
5074 | |
5075 | pic_config->comp_body_size = losless_comp_body_size; |
5076 | pic_config->buf_size = buf_size; |
5077 | |
5078 | pic_config->mc_canvas_y = pic_config->index; |
5079 | pic_config->mc_canvas_u_v = pic_config->index; |
5080 | if (dw_mode & 0x10) { |
5081 | pic_config->dw_y_adr = y_adr; |
5082 | pic_config->dw_u_v_adr = y_adr + |
5083 | ((mc_buffer_size_u_v_h << 16) << 1); |
5084 | |
5085 | pic_config->mc_canvas_y = |
5086 | (pic_config->index << 1); |
5087 | pic_config->mc_canvas_u_v = |
5088 | (pic_config->index << 1) + 1; |
5089 | } else if (dw_mode) { |
5090 | pic_config->dw_y_adr = y_adr; |
5091 | pic_config->dw_u_v_adr = pic_config->dw_y_adr + |
5092 | ((mc_buffer_size_u_v_h << 16) << 1); |
5093 | } |
5094 | #ifdef MV_USE_FIXED_BUF |
5095 | pic_config->mpred_mv_wr_start_addr = |
5096 | pbi->work_space_buf->mpred_mv.buf_start + |
5097 | ((pic_config->index * lcu_total) |
5098 | * MV_MEM_UNIT); |
5099 | #endif |
5100 | if (debug) { |
5101 | pr_info |
5102 | ("%s index %d BUF_index %d ", |
5103 | __func__, pic_config->index, |
5104 | pic_config->BUF_index); |
5105 | pr_info |
5106 | ("comp_body_size %x comp_buf_size %x ", |
5107 | pic_config->comp_body_size, |
5108 | pic_config->buf_size); |
5109 | pr_info |
5110 | ("mpred_mv_wr_start_adr %ld\n", |
5111 | pic_config->mpred_mv_wr_start_addr); |
5112 | pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n", |
5113 | pic_config->dw_y_adr, |
5114 | pic_config->dw_u_v_adr); |
5115 | } |
5116 | ret = 0; |
5117 | } |
5118 | #ifdef MV_USE_FIXED_BUF |
5119 | } |
5120 | #endif |
5121 | return ret; |
5122 | } |
5123 | |
5124 | static int vvp9_mmu_compress_header_size(struct VP9Decoder_s *pbi) |
5125 | { |
5126 | if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) && |
5127 | IS_8K_SIZE(pbi->max_pic_w, pbi->max_pic_h)) |
5128 | return (MMU_COMPRESS_8K_HEADER_SIZE); |
5129 | |
5130 | return (MMU_COMPRESS_HEADER_SIZE); |
5131 | } |
5132 | |
5133 | /*#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)*/ |
5134 | static int vvp9_frame_mmu_map_size(struct VP9Decoder_s *pbi) |
5135 | { |
5136 | if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) && |
5137 | IS_8K_SIZE(pbi->max_pic_w, pbi->max_pic_h)) |
5138 | return (MAX_FRAME_8K_NUM * 4); |
5139 | |
5140 | return (MAX_FRAME_4K_NUM * 4); |
5141 | } |
5142 | |
5143 | static void init_pic_list(struct VP9Decoder_s *pbi) |
5144 | { |
5145 | int i; |
5146 | struct VP9_Common_s *cm = &pbi->common; |
5147 | struct PIC_BUFFER_CONFIG_s *pic_config; |
5148 | u32 header_size; |
5149 | struct vdec_s *vdec = hw_to_vdec(pbi); |
5150 | |
5151 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
5152 | header_size = vvp9_mmu_compress_header_size(pbi); |
5153 | /*alloc VP9 compress header first*/ |
5154 | for (i = 0; i < pbi->used_buf_num; i++) { |
5155 | unsigned long buf_addr; |
5156 | if (decoder_bmmu_box_alloc_buf_phy |
5157 | (pbi->bmmu_box, |
5158 | HEADER_BUFFER_IDX(i), header_size, |
5159 | DRIVER_HEADER_NAME, |
5160 | &buf_addr) < 0) { |
5161 | pr_info("%s malloc compress header failed %d\n", |
5162 | DRIVER_HEADER_NAME, i); |
5163 | pbi->fatal_error |= DECODER_FATAL_ERROR_NO_MEM; |
5164 | return; |
5165 | } |
5166 | } |
5167 | } |
5168 | for (i = 0; i < pbi->used_buf_num; i++) { |
5169 | pic_config = &cm->buffer_pool->frame_bufs[i].buf; |
5170 | pic_config->index = i; |
5171 | pic_config->BUF_index = -1; |
5172 | pic_config->mv_buf_index = -1; |
5173 | if (vdec->parallel_dec == 1) { |
5174 | pic_config->y_canvas_index = -1; |
5175 | pic_config->uv_canvas_index = -1; |
5176 | } |
5177 | pic_config->y_crop_width = pbi->init_pic_w; |
5178 | pic_config->y_crop_height = pbi->init_pic_h; |
5179 | pic_config->double_write_mode = get_double_write_mode(pbi); |
5180 | |
5181 | if (!pbi->is_used_v4l) { |
5182 | if (config_pic(pbi, pic_config) < 0) { |
5183 | if (debug) |
5184 | pr_info("Config_pic %d fail\n", |
5185 | pic_config->index); |
5186 | pic_config->index = -1; |
5187 | break; |
5188 | } |
5189 | |
5190 | if (pic_config->double_write_mode) { |
5191 | set_canvas(pbi, pic_config); |
5192 | } |
5193 | } |
5194 | } |
5195 | for (; i < pbi->used_buf_num; i++) { |
5196 | pic_config = &cm->buffer_pool->frame_bufs[i].buf; |
5197 | pic_config->index = -1; |
5198 | pic_config->BUF_index = -1; |
5199 | pic_config->mv_buf_index = -1; |
5200 | if (vdec->parallel_dec == 1) { |
5201 | pic_config->y_canvas_index = -1; |
5202 | pic_config->uv_canvas_index = -1; |
5203 | } |
5204 | } |
5205 | pr_info("%s ok, used_buf_num = %d\n", |
5206 | __func__, pbi->used_buf_num); |
5207 | } |
5208 | |
5209 | static void init_pic_list_hw(struct VP9Decoder_s *pbi) |
5210 | { |
5211 | int i; |
5212 | struct VP9_Common_s *cm = &pbi->common; |
5213 | struct PIC_BUFFER_CONFIG_s *pic_config; |
5214 | /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);*/ |
5215 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, |
5216 | (0x1 << 1) | (0x1 << 2)); |
5217 | |
5218 | |
5219 | for (i = 0; i < pbi->used_buf_num; i++) { |
5220 | pic_config = &cm->buffer_pool->frame_bufs[i].buf; |
5221 | if (pic_config->index < 0) |
5222 | break; |
5223 | |
5224 | if (pbi->mmu_enable && ((pic_config->double_write_mode & 0x10) == 0)) { |
5225 | |
5226 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA, |
5227 | pic_config->header_adr >> 5); |
5228 | } else { |
5229 | /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR, |
5230 | * pic_config->mc_y_adr |
5231 | * | (pic_config->mc_canvas_y << 8) | 0x1); |
5232 | */ |
5233 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA, |
5234 | pic_config->dw_y_adr >> 5); |
5235 | } |
5236 | #ifndef LOSLESS_COMPRESS_MODE |
5237 | /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR, |
5238 | * pic_config->mc_u_v_adr |
5239 | * | (pic_config->mc_canvas_u_v << 8)| 0x1); |
5240 | */ |
5241 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA, |
5242 | pic_config->header_adr >> 5); |
5243 | #else |
5244 | if (pic_config->double_write_mode & 0x10) { |
5245 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA, |
5246 | pic_config->dw_u_v_adr >> 5); |
5247 | } |
5248 | #endif |
5249 | } |
5250 | WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1); |
5251 | |
5252 | /*Zero out canvas registers in IPP -- avoid simulation X*/ |
5253 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, |
5254 | (0 << 8) | (0 << 1) | 1); |
5255 | for (i = 0; i < 32; i++) |
5256 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0); |
5257 | } |
5258 | |
5259 | |
5260 | static void dump_pic_list(struct VP9Decoder_s *pbi) |
5261 | { |
5262 | struct VP9_Common_s *const cm = &pbi->common; |
5263 | struct PIC_BUFFER_CONFIG_s *pic_config; |
5264 | int i; |
5265 | for (i = 0; i < FRAME_BUFFERS; i++) { |
5266 | pic_config = &cm->buffer_pool->frame_bufs[i].buf; |
5267 | vp9_print(pbi, 0, |
5268 | "Buf(%d) index %d mv_buf_index %d ref_count %d vf_ref %d dec_idx %d slice_type %d w/h %d/%d adr%ld\n", |
5269 | i, |
5270 | pic_config->index, |
5271 | #ifndef MV_USE_FIXED_BUF |
5272 | pic_config->mv_buf_index, |
5273 | #else |
5274 | -1, |
5275 | #endif |
5276 | cm->buffer_pool-> |
5277 | frame_bufs[i].ref_count, |
5278 | pic_config->vf_ref, |
5279 | pic_config->decode_idx, |
5280 | pic_config->slice_type, |
5281 | pic_config->y_crop_width, |
5282 | pic_config->y_crop_height, |
5283 | pic_config->cma_alloc_addr |
5284 | ); |
5285 | } |
5286 | return; |
5287 | } |
5288 | |
5289 | static int config_pic_size(struct VP9Decoder_s *pbi, unsigned short bit_depth) |
5290 | { |
5291 | #ifdef LOSLESS_COMPRESS_MODE |
5292 | unsigned int data32; |
5293 | #endif |
5294 | int losless_comp_header_size, losless_comp_body_size; |
5295 | struct VP9_Common_s *cm = &pbi->common; |
5296 | struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf; |
5297 | |
5298 | frame_width = cur_pic_config->y_crop_width; |
5299 | frame_height = cur_pic_config->y_crop_height; |
5300 | cur_pic_config->bit_depth = bit_depth; |
5301 | cur_pic_config->double_write_mode = get_double_write_mode(pbi); |
5302 | losless_comp_header_size = |
5303 | compute_losless_comp_header_size(cur_pic_config->y_crop_width, |
5304 | cur_pic_config->y_crop_height); |
5305 | losless_comp_body_size = |
5306 | compute_losless_comp_body_size(cur_pic_config->y_crop_width, |
5307 | cur_pic_config->y_crop_height, (bit_depth == VPX_BITS_10)); |
5308 | cur_pic_config->comp_body_size = losless_comp_body_size; |
5309 | #ifdef LOSLESS_COMPRESS_MODE |
5310 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5311 | if (bit_depth == VPX_BITS_10) |
5312 | data32 &= ~(1 << 9); |
5313 | else |
5314 | data32 |= (1 << 9); |
5315 | |
5316 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5317 | |
5318 | if (pbi->mmu_enable) { |
5319 | /*bit[4] : paged_mem_mode*/ |
5320 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4)); |
5321 | } else { |
5322 | /*bit[3] smem mdoe*/ |
5323 | if (bit_depth == VPX_BITS_10) |
5324 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0 << 3)); |
5325 | else |
5326 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (1 << 3)); |
5327 | } |
5328 | if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1) |
5329 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5)); |
5330 | /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);*/ |
5331 | WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size); |
5332 | WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size); |
5333 | WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size); |
5334 | if (get_double_write_mode(pbi) & 0x10) |
5335 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31); |
5336 | #else |
5337 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31); |
5338 | #endif |
5339 | return 0; |
5340 | } |
5341 | |
5342 | static int config_mc_buffer(struct VP9Decoder_s *pbi, unsigned short bit_depth) |
5343 | { |
5344 | int i; |
5345 | struct VP9_Common_s *cm = &pbi->common; |
5346 | struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf; |
5347 | uint8_t scale_enable = 0; |
5348 | |
5349 | if (debug&VP9_DEBUG_BUFMGR_MORE) |
5350 | pr_info("config_mc_buffer entered .....\n"); |
5351 | |
5352 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, |
5353 | (0 << 8) | (0 << 1) | 1); |
5354 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
5355 | struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf; |
5356 | if (!pic_config) |
5357 | continue; |
5358 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, |
5359 | (pic_config->mc_canvas_u_v << 16) |
5360 | | (pic_config->mc_canvas_u_v << 8) |
5361 | | pic_config->mc_canvas_y); |
5362 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
5363 | pr_info("refid %x mc_canvas_u_v %x mc_canvas_y %x\n", |
5364 | i, pic_config->mc_canvas_u_v, |
5365 | pic_config->mc_canvas_y); |
5366 | } |
5367 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, |
5368 | (16 << 8) | (0 << 1) | 1); |
5369 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
5370 | struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf; |
5371 | if (!pic_config) |
5372 | continue; |
5373 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, |
5374 | (pic_config->mc_canvas_u_v << 16) |
5375 | | (pic_config->mc_canvas_u_v << 8) |
5376 | | pic_config->mc_canvas_y); |
5377 | } |
5378 | |
5379 | /*auto_inc start index:0 field:0*/ |
5380 | WRITE_VREG(VP9D_MPP_REFINFO_TBL_ACCCONFIG, 0x1 << 2); |
5381 | /*index 0:last 1:golden 2:altref*/ |
5382 | for (i = 0; i < REFS_PER_FRAME; i++) { |
5383 | int ref_pic_body_size; |
5384 | struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf; |
5385 | if (!pic_config) |
5386 | continue; |
5387 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, pic_config->y_crop_width); |
5388 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, pic_config->y_crop_height); |
5389 | |
5390 | if (pic_config->y_crop_width != cur_pic_config->y_crop_width || |
5391 | pic_config->y_crop_height != cur_pic_config->y_crop_height) { |
5392 | scale_enable |= (1 << i); |
5393 | } |
5394 | ref_pic_body_size = |
5395 | compute_losless_comp_body_size(pic_config->y_crop_width, |
5396 | pic_config->y_crop_height, (bit_depth == VPX_BITS_10)); |
5397 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, |
5398 | (pic_config->y_crop_width << 14) |
5399 | / cur_pic_config->y_crop_width); |
5400 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, |
5401 | (pic_config->y_crop_height << 14) |
5402 | / cur_pic_config->y_crop_height); |
5403 | if (pbi->mmu_enable) |
5404 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, 0); |
5405 | else |
5406 | WRITE_VREG(VP9D_MPP_REFINFO_DATA, ref_pic_body_size >> 5); |
5407 | } |
5408 | WRITE_VREG(VP9D_MPP_REF_SCALE_ENBL, scale_enable); |
5409 | return 0; |
5410 | } |
5411 | |
5412 | static void clear_mpred_hw(struct VP9Decoder_s *pbi) |
5413 | { |
5414 | unsigned int data32; |
5415 | |
5416 | data32 = READ_VREG(HEVC_MPRED_CTRL4); |
5417 | data32 &= (~(1 << 6)); |
5418 | WRITE_VREG(HEVC_MPRED_CTRL4, data32); |
5419 | } |
5420 | |
5421 | static void config_mpred_hw(struct VP9Decoder_s *pbi) |
5422 | { |
5423 | struct VP9_Common_s *cm = &pbi->common; |
5424 | struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf; |
5425 | struct PIC_BUFFER_CONFIG_s *last_frame_pic_config = |
5426 | &cm->prev_frame->buf; |
5427 | |
5428 | unsigned int data32; |
5429 | int mpred_curr_lcu_x; |
5430 | int mpred_curr_lcu_y; |
5431 | int mpred_mv_rd_end_addr; |
5432 | |
5433 | |
5434 | mpred_mv_rd_end_addr = last_frame_pic_config->mpred_mv_wr_start_addr |
5435 | + (last_frame_pic_config->lcu_total * MV_MEM_UNIT); |
5436 | |
5437 | data32 = READ_VREG(HEVC_MPRED_CURR_LCU); |
5438 | mpred_curr_lcu_x = data32 & 0xffff; |
5439 | mpred_curr_lcu_y = (data32 >> 16) & 0xffff; |
5440 | |
5441 | if (debug & VP9_DEBUG_BUFMGR) |
5442 | pr_info("cur pic_config index %d col pic_config index %d\n", |
5443 | cur_pic_config->index, last_frame_pic_config->index); |
5444 | WRITE_VREG(HEVC_MPRED_CTRL3, 0x24122412); |
5445 | WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, |
5446 | pbi->work_space_buf->mpred_above.buf_start); |
5447 | |
5448 | data32 = READ_VREG(HEVC_MPRED_CTRL4); |
5449 | |
5450 | data32 &= (~(1 << 6)); |
5451 | data32 |= (cm->use_prev_frame_mvs << 6); |
5452 | WRITE_VREG(HEVC_MPRED_CTRL4, data32); |
5453 | |
5454 | WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR, |
5455 | cur_pic_config->mpred_mv_wr_start_addr); |
5456 | WRITE_VREG(HEVC_MPRED_MV_WPTR, cur_pic_config->mpred_mv_wr_start_addr); |
5457 | |
5458 | WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, |
5459 | last_frame_pic_config->mpred_mv_wr_start_addr); |
5460 | WRITE_VREG(HEVC_MPRED_MV_RPTR, |
5461 | last_frame_pic_config->mpred_mv_wr_start_addr); |
5462 | /*data32 = ((pbi->lcu_x_num - pbi->tile_width_lcu)*MV_MEM_UNIT);*/ |
5463 | /*WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP,data32);*/ |
5464 | /*WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP,data32);*/ |
5465 | WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr); |
5466 | |
5467 | } |
5468 | |
5469 | static void config_sao_hw(struct VP9Decoder_s *pbi, union param_u *params) |
5470 | { |
5471 | struct VP9_Common_s *cm = &pbi->common; |
5472 | struct PIC_BUFFER_CONFIG_s *pic_config = &cm->cur_frame->buf; |
5473 | |
5474 | unsigned int data32; |
5475 | int lcu_size = 64; |
5476 | int mc_buffer_size_u_v = |
5477 | pic_config->lcu_total * lcu_size*lcu_size/2; |
5478 | int mc_buffer_size_u_v_h = |
5479 | (mc_buffer_size_u_v + 0xffff) >> 16;/*64k alignment*/ |
5480 | struct aml_vcodec_ctx * v4l2_ctx = pbi->v4l2_ctx; |
5481 | |
5482 | if (get_double_write_mode(pbi)) { |
5483 | WRITE_VREG(HEVC_SAO_Y_START_ADDR, pic_config->dw_y_adr); |
5484 | WRITE_VREG(HEVC_SAO_C_START_ADDR, pic_config->dw_u_v_adr); |
5485 | WRITE_VREG(HEVC_SAO_Y_WPTR, pic_config->dw_y_adr); |
5486 | WRITE_VREG(HEVC_SAO_C_WPTR, pic_config->dw_u_v_adr); |
5487 | } else { |
5488 | WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff); |
5489 | WRITE_VREG(HEVC_SAO_C_START_ADDR, 0xffffffff); |
5490 | } |
5491 | if (pbi->mmu_enable) |
5492 | WRITE_VREG(HEVC_CM_HEADER_START_ADDR, pic_config->header_adr); |
5493 | |
5494 | data32 = (mc_buffer_size_u_v_h << 16) << 1; |
5495 | /*pr_info("data32=%x,mc_buffer_size_u_v_h=%x,lcu_total=%x\n", |
5496 | * data32, mc_buffer_size_u_v_h, pic_config->lcu_total); |
5497 | */ |
5498 | WRITE_VREG(HEVC_SAO_Y_LENGTH, data32); |
5499 | |
5500 | data32 = (mc_buffer_size_u_v_h << 16); |
5501 | WRITE_VREG(HEVC_SAO_C_LENGTH, data32); |
5502 | |
5503 | #ifdef VP9_10B_NV21 |
5504 | #ifdef DOS_PROJECT |
5505 | data32 = READ_VREG(HEVC_SAO_CTRL1); |
5506 | data32 &= (~0x3000); |
5507 | /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/ |
5508 | data32 |= (pbi->mem_map_mode << 12); |
5509 | data32 &= (~0x3); |
5510 | data32 |= 0x1; /* [1]:dw_disable [0]:cm_disable*/ |
5511 | WRITE_VREG(HEVC_SAO_CTRL1, data32); |
5512 | /*[23:22] dw_v1_ctrl [21:20] dw_v0_ctrl [19:18] dw_h1_ctrl |
5513 | * [17:16] dw_h0_ctrl |
5514 | */ |
5515 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5516 | /*set them all 0 for H265_NV21 (no down-scale)*/ |
5517 | data32 &= ~(0xff << 16); |
5518 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5519 | data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG); |
5520 | data32 &= (~0x30); |
5521 | /*[5:4] address_format 00:linear 01:32x32 10:64x32*/ |
5522 | data32 |= (pbi->mem_map_mode << 4); |
5523 | WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32); |
5524 | #else |
5525 | /*m8baby test1902*/ |
5526 | data32 = READ_VREG(HEVC_SAO_CTRL1); |
5527 | data32 &= (~0x3000); |
5528 | /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/ |
5529 | data32 |= (pbi->mem_map_mode << 12); |
5530 | data32 &= (~0xff0); |
5531 | /*data32 |= 0x670;*/ /*Big-Endian per 64-bit*/ |
5532 | data32 |= 0x880; /*.Big-Endian per 64-bit */ |
5533 | data32 &= (~0x3); |
5534 | data32 |= 0x1; /*[1]:dw_disable [0]:cm_disable*/ |
5535 | WRITE_VREG(HEVC_SAO_CTRL1, data32); |
5536 | /* [23:22] dw_v1_ctrl [21:20] dw_v0_ctrl |
5537 | *[19:18] dw_h1_ctrl [17:16] dw_h0_ctrl |
5538 | */ |
5539 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5540 | /* set them all 0 for H265_NV21 (no down-scale)*/ |
5541 | data32 &= ~(0xff << 16); |
5542 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5543 | |
5544 | data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG); |
5545 | data32 &= (~0x30); |
5546 | /*[5:4] address_format 00:linear 01:32x32 10:64x32*/ |
5547 | data32 |= (pbi->mem_map_mode << 4); |
5548 | data32 &= (~0xF); |
5549 | data32 |= 0x8; /*Big-Endian per 64-bit*/ |
5550 | WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32); |
5551 | #endif |
5552 | #else |
5553 | data32 = READ_VREG(HEVC_SAO_CTRL1); |
5554 | data32 &= (~0x3000); |
5555 | data32 |= (pbi->mem_map_mode << |
5556 | 12); |
5557 | |
5558 | /* [13:12] axi_aformat, 0-Linear, |
5559 | * 1-32x32, 2-64x32 |
5560 | */ |
5561 | data32 &= (~0xff0); |
5562 | /* data32 |= 0x670; // Big-Endian per 64-bit */ |
5563 | data32 |= endian; /* Big-Endian per 64-bit */ |
5564 | data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/ |
5565 | if (get_double_write_mode(pbi) == 0) |
5566 | data32 |= 0x2; /*disable double write*/ |
5567 | else if (get_double_write_mode(pbi) & 0x10) |
5568 | data32 |= 0x1; /*disable cm*/ |
5569 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) { /* >= G12A dw write control */ |
5570 | unsigned int data; |
5571 | data = READ_VREG(HEVC_DBLK_CFGB); |
5572 | data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/ |
5573 | if (get_double_write_mode(pbi) == 0) |
5574 | data |= (0x1 << 8); /*enable first write*/ |
5575 | else if (get_double_write_mode(pbi) & 0x10) |
5576 | data |= (0x1 << 9); /*double write only*/ |
5577 | else |
5578 | data |= ((0x1 << 8) |(0x1 << 9)); |
5579 | WRITE_VREG(HEVC_DBLK_CFGB, data); |
5580 | } |
5581 | |
5582 | /* swap uv */ |
5583 | if (pbi->is_used_v4l) { |
5584 | if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) || |
5585 | (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M)) |
5586 | data32 &= ~(1 << 8); /* NV21 */ |
5587 | else |
5588 | data32 |= (1 << 8); /* NV12 */ |
5589 | } |
5590 | |
5591 | /* |
5592 | * [31:24] ar_fifo1_axi_thred |
5593 | * [23:16] ar_fifo0_axi_thred |
5594 | * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes |
5595 | * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32 |
5596 | * [11:08] axi_lendian_C |
5597 | * [07:04] axi_lendian_Y |
5598 | * [3] reserved |
5599 | * [2] clk_forceon |
5600 | * [1] dw_disable:disable double write output |
5601 | * [0] cm_disable:disable compress output |
5602 | */ |
5603 | WRITE_VREG(HEVC_SAO_CTRL1, data32); |
5604 | |
5605 | if (get_double_write_mode(pbi) & 0x10) { |
5606 | /* [23:22] dw_v1_ctrl |
5607 | *[21:20] dw_v0_ctrl |
5608 | *[19:18] dw_h1_ctrl |
5609 | *[17:16] dw_h0_ctrl |
5610 | */ |
5611 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5612 | /*set them all 0 for H265_NV21 (no down-scale)*/ |
5613 | data32 &= ~(0xff << 16); |
5614 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5615 | } else { |
5616 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5617 | data32 &= (~(0xff << 16)); |
5618 | if (get_double_write_mode(pbi) == 2 || |
5619 | get_double_write_mode(pbi) == 3) |
5620 | data32 |= (0xff<<16); |
5621 | else if (get_double_write_mode(pbi) == 4) |
5622 | data32 |= (0x33<<16); |
5623 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5624 | } |
5625 | |
5626 | data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG); |
5627 | data32 &= (~0x30); |
5628 | /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */ |
5629 | data32 |= (pbi->mem_map_mode << |
5630 | 4); |
5631 | data32 &= (~0xF); |
5632 | data32 |= 0xf; /* valid only when double write only */ |
5633 | /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */ |
5634 | |
5635 | /* swap uv */ |
5636 | if (pbi->is_used_v4l) { |
5637 | if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) || |
5638 | (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M)) |
5639 | data32 |= (1 << 12); /* NV21 */ |
5640 | else |
5641 | data32 &= ~(1 << 12); /* NV12 */ |
5642 | } |
5643 | |
5644 | /* |
5645 | * [3:0] little_endian |
5646 | * [5:4] address_format 00:linear 01:32x32 10:64x32 |
5647 | * [7:6] reserved |
5648 | * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte |
5649 | * [11:10] reserved |
5650 | * [12] CbCr_byte_swap |
5651 | * [31:13] reserved |
5652 | */ |
5653 | WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32); |
5654 | #endif |
5655 | } |
5656 | |
5657 | static void vp9_config_work_space_hw(struct VP9Decoder_s *pbi, u32 mask) |
5658 | { |
5659 | struct BuffInfo_s *buf_spec = pbi->work_space_buf; |
5660 | unsigned int data32; |
5661 | |
5662 | if (debug && pbi->init_flag == 0) |
5663 | pr_info("%s %x %x %x %x %x %x %x %x %x %x %x %x\n", |
5664 | __func__, |
5665 | buf_spec->ipp.buf_start, |
5666 | buf_spec->start_adr, |
5667 | buf_spec->short_term_rps.buf_start, |
5668 | buf_spec->vps.buf_start, |
5669 | buf_spec->sps.buf_start, |
5670 | buf_spec->pps.buf_start, |
5671 | buf_spec->sao_up.buf_start, |
5672 | buf_spec->swap_buf.buf_start, |
5673 | buf_spec->swap_buf2.buf_start, |
5674 | buf_spec->scalelut.buf_start, |
5675 | buf_spec->dblk_para.buf_start, |
5676 | buf_spec->dblk_data.buf_start); |
5677 | |
5678 | if (mask & HW_MASK_FRONT) { |
5679 | if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0) |
5680 | WRITE_VREG(HEVC_RPM_BUFFER, (u32)pbi->rpm_phy_addr); |
5681 | |
5682 | WRITE_VREG(HEVC_SHORT_TERM_RPS, |
5683 | buf_spec->short_term_rps.buf_start); |
5684 | /*WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);*/ |
5685 | /*WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);*/ |
5686 | WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start); |
5687 | WRITE_VREG(HEVC_STREAM_SWAP_BUFFER, |
5688 | buf_spec->swap_buf.buf_start); |
5689 | WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, |
5690 | buf_spec->swap_buf2.buf_start); |
5691 | WRITE_VREG(LMEM_DUMP_ADR, (u32)pbi->lmem_phy_addr); |
5692 | |
5693 | } |
5694 | |
5695 | if (mask & HW_MASK_BACK) { |
5696 | #ifdef LOSLESS_COMPRESS_MODE |
5697 | int losless_comp_header_size = |
5698 | compute_losless_comp_header_size(pbi->init_pic_w, |
5699 | pbi->init_pic_h); |
5700 | int losless_comp_body_size = |
5701 | compute_losless_comp_body_size(pbi->init_pic_w, |
5702 | pbi->init_pic_h, buf_alloc_depth == 10); |
5703 | #endif |
5704 | WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, |
5705 | buf_spec->ipp.buf_start); |
5706 | WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start); |
5707 | WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start); |
5708 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) { |
5709 | /* cfg_addr_adp*/ |
5710 | WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_para.buf_start); |
5711 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
5712 | pr_info("Write HEVC_DBLK_CFGE\n"); |
5713 | } |
5714 | /* cfg_p_addr */ |
5715 | WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start); |
5716 | /* cfg_d_addr */ |
5717 | WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start); |
5718 | |
5719 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
5720 | /* |
5721 | * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>8) & 0xff; // xio left offset, default is 0x40 |
5722 | * data32 = data32 * 2; |
5723 | * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>16) & 0xff; // adp left offset, default is 0x040 |
5724 | * data32 = data32 * 2; |
5725 | */ |
5726 | WRITE_VREG(HEVC_DBLK_CFG3, 0x808010); // make left storage 2 x 4k] |
5727 | } |
5728 | #ifdef LOSLESS_COMPRESS_MODE |
5729 | if (pbi->mmu_enable) { |
5730 | /*bit[4] : paged_mem_mode*/ |
5731 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4)); |
5732 | if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1) |
5733 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0); |
5734 | } else { |
5735 | /*if(cur_pic_config->bit_depth == VPX_BITS_10) |
5736 | * WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (0<<3)); |
5737 | */ |
5738 | /*bit[3] smem mdoe*/ |
5739 | /*else WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (1<<3));*/ |
5740 | /*bit[3] smem mdoe*/ |
5741 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, |
5742 | (losless_comp_body_size >> 5)); |
5743 | } |
5744 | /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, |
5745 | (losless_comp_body_size >> 5));*/ |
5746 | /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3, |
5747 | (0xff<<20) | (0xff<<10) | 0xff);*/ |
5748 | /*8-bit mode */ |
5749 | WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size); |
5750 | WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size); |
5751 | WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size); |
5752 | if (get_double_write_mode(pbi) & 0x10) |
5753 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31); |
5754 | #else |
5755 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31); |
5756 | #endif |
5757 | |
5758 | if (pbi->mmu_enable) { |
5759 | WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start); |
5760 | WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR, buf_spec->mmu_vbh.buf_start |
5761 | + buf_spec->mmu_vbh.buf_size/2); |
5762 | /*data32 = READ_VREG(P_HEVC_SAO_CTRL9);*/ |
5763 | /*data32 |= 0x1;*/ |
5764 | /*WRITE_VREG(P_HEVC_SAO_CTRL9, data32);*/ |
5765 | |
5766 | /* use HEVC_CM_HEADER_START_ADDR */ |
5767 | data32 = READ_VREG(HEVC_SAO_CTRL5); |
5768 | data32 |= (1<<10); |
5769 | WRITE_VREG(HEVC_SAO_CTRL5, data32); |
5770 | } |
5771 | |
5772 | WRITE_VREG(VP9_SEG_MAP_BUFFER, buf_spec->seg_map.buf_start); |
5773 | |
5774 | WRITE_VREG(LMEM_DUMP_ADR, (u32)pbi->lmem_phy_addr); |
5775 | /**/ |
5776 | WRITE_VREG(VP9_PROB_SWAP_BUFFER, pbi->prob_buffer_phy_addr); |
5777 | WRITE_VREG(VP9_COUNT_SWAP_BUFFER, pbi->count_buffer_phy_addr); |
5778 | if (pbi->mmu_enable) { |
5779 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) |
5780 | WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, pbi->frame_mmu_map_phy_addr); |
5781 | else |
5782 | WRITE_VREG(VP9_MMU_MAP_BUFFER, pbi->frame_mmu_map_phy_addr); |
5783 | } |
5784 | } |
5785 | } |
5786 | |
5787 | |
5788 | #ifdef VP9_LPF_LVL_UPDATE |
5789 | /* |
5790 | * Defines, declarations, sub-functions for vp9 de-block loop |
5791 | filter Thr/Lvl table update |
5792 | * - struct segmentation is for loop filter only (removed something) |
5793 | * - function "vp9_loop_filter_init" and "vp9_loop_filter_frame_init" will |
5794 | be instantiated in C_Entry |
5795 | * - vp9_loop_filter_init run once before decoding start |
5796 | * - vp9_loop_filter_frame_init run before every frame decoding start |
5797 | * - set video format to VP9 is in vp9_loop_filter_init |
5798 | */ |
5799 | #define MAX_LOOP_FILTER 63 |
5800 | #define MAX_REF_LF_DELTAS 4 |
5801 | #define MAX_MODE_LF_DELTAS 2 |
5802 | /*#define INTRA_FRAME 0*/ |
5803 | /*#define LAST_FRAME 1*/ |
5804 | /*#define MAX_REF_FRAMES 4*/ |
5805 | #define SEGMENT_DELTADATA 0 |
5806 | #define SEGMENT_ABSDATA 1 |
5807 | #define MAX_SEGMENTS 8 |
5808 | /*.#define SEG_TREE_PROBS (MAX_SEGMENTS-1)*/ |
5809 | /*no use for loop filter, if this struct for common use, pls add it back*/ |
5810 | /*#define PREDICTION_PROBS 3*/ |
5811 | /* no use for loop filter, if this struct for common use, pls add it back*/ |
5812 | |
5813 | enum SEG_LVL_FEATURES { |
5814 | SEG_LVL_ALT_Q = 0, /*Use alternate Quantizer ....*/ |
5815 | SEG_LVL_ALT_LF = 1, /*Use alternate loop filter value...*/ |
5816 | SEG_LVL_REF_FRAME = 2, /*Optional Segment reference frame*/ |
5817 | SEG_LVL_SKIP = 3, /*Optional Segment (0,0) + skip mode*/ |
5818 | SEG_LVL_MAX = 4 /*Number of features supported*/ |
5819 | }; |
5820 | |
5821 | struct segmentation { |
5822 | uint8_t enabled; |
5823 | uint8_t update_map; |
5824 | uint8_t update_data; |
5825 | uint8_t abs_delta; |
5826 | uint8_t temporal_update; |
5827 | |
5828 | /*no use for loop filter, if this struct |
5829 | *for common use, pls add it back |
5830 | */ |
5831 | /*vp9_prob tree_probs[SEG_TREE_PROBS]; */ |
5832 | /* no use for loop filter, if this struct |
5833 | * for common use, pls add it back |
5834 | */ |
5835 | /*vp9_prob pred_probs[PREDICTION_PROBS];*/ |
5836 | |
5837 | int16_t feature_data[MAX_SEGMENTS][SEG_LVL_MAX]; |
5838 | unsigned int feature_mask[MAX_SEGMENTS]; |
5839 | }; |
5840 | |
5841 | struct loop_filter_thresh { |
5842 | uint8_t mblim; |
5843 | uint8_t lim; |
5844 | uint8_t hev_thr; |
5845 | }; |
5846 | |
5847 | struct loop_filter_info_n { |
5848 | struct loop_filter_thresh lfthr[MAX_LOOP_FILTER + 1]; |
5849 | uint8_t lvl[MAX_SEGMENTS][MAX_REF_FRAMES][MAX_MODE_LF_DELTAS]; |
5850 | }; |
5851 | |
5852 | struct loopfilter { |
5853 | int filter_level; |
5854 | |
5855 | int sharpness_level; |
5856 | int last_sharpness_level; |
5857 | |
5858 | uint8_t mode_ref_delta_enabled; |
5859 | uint8_t mode_ref_delta_update; |
5860 | |
5861 | /*0 = Intra, Last, GF, ARF*/ |
5862 | signed char ref_deltas[MAX_REF_LF_DELTAS]; |
5863 | signed char last_ref_deltas[MAX_REF_LF_DELTAS]; |
5864 | |
5865 | /*0 = ZERO_MV, MV*/ |
5866 | signed char mode_deltas[MAX_MODE_LF_DELTAS]; |
5867 | signed char last_mode_deltas[MAX_MODE_LF_DELTAS]; |
5868 | }; |
5869 | |
5870 | static int vp9_clamp(int value, int low, int high) |
5871 | { |
5872 | return value < low ? low : (value > high ? high : value); |
5873 | } |
5874 | |
5875 | int segfeature_active(struct segmentation *seg, |
5876 | int segment_id, |
5877 | enum SEG_LVL_FEATURES feature_id) { |
5878 | return seg->enabled && |
5879 | (seg->feature_mask[segment_id] & (1 << feature_id)); |
5880 | } |
5881 | |
5882 | int get_segdata(struct segmentation *seg, int segment_id, |
5883 | enum SEG_LVL_FEATURES feature_id) { |
5884 | return seg->feature_data[segment_id][feature_id]; |
5885 | } |
5886 | |
5887 | static void vp9_update_sharpness(struct loop_filter_info_n *lfi, |
5888 | int sharpness_lvl) |
5889 | { |
5890 | int lvl; |
5891 | /*For each possible value for the loop filter fill out limits*/ |
5892 | for (lvl = 0; lvl <= MAX_LOOP_FILTER; lvl++) { |
5893 | /*Set loop filter parameters that control sharpness.*/ |
5894 | int block_inside_limit = lvl >> ((sharpness_lvl > 0) + |
5895 | (sharpness_lvl > 4)); |
5896 | |
5897 | if (sharpness_lvl > 0) { |
5898 | if (block_inside_limit > (9 - sharpness_lvl)) |
5899 | block_inside_limit = (9 - sharpness_lvl); |
5900 | } |
5901 | |
5902 | if (block_inside_limit < 1) |
5903 | block_inside_limit = 1; |
5904 | |
5905 | lfi->lfthr[lvl].lim = (uint8_t)block_inside_limit; |
5906 | lfi->lfthr[lvl].mblim = (uint8_t)(2 * (lvl + 2) + |
5907 | block_inside_limit); |
5908 | } |
5909 | } |
5910 | |
5911 | /*instantiate this function once when decode is started*/ |
5912 | void vp9_loop_filter_init(struct VP9Decoder_s *pbi) |
5913 | { |
5914 | struct loop_filter_info_n *lfi = pbi->lfi; |
5915 | struct loopfilter *lf = pbi->lf; |
5916 | struct segmentation *seg_4lf = pbi->seg_4lf; |
5917 | int i; |
5918 | unsigned int data32; |
5919 | |
5920 | memset(lfi, 0, sizeof(struct loop_filter_info_n)); |
5921 | memset(lf, 0, sizeof(struct loopfilter)); |
5922 | memset(seg_4lf, 0, sizeof(struct segmentation)); |
5923 | lf->sharpness_level = 0; /*init to 0 */ |
5924 | /*init limits for given sharpness*/ |
5925 | vp9_update_sharpness(lfi, lf->sharpness_level); |
5926 | lf->last_sharpness_level = lf->sharpness_level; |
5927 | /*init hev threshold const vectors (actually no use) |
5928 | *for (i = 0; i <= MAX_LOOP_FILTER; i++) |
5929 | * lfi->lfthr[i].hev_thr = (uint8_t)(i >> 4); |
5930 | */ |
5931 | |
5932 | /*Write to register*/ |
5933 | for (i = 0; i < 32; i++) { |
5934 | unsigned int thr; |
5935 | |
5936 | thr = ((lfi->lfthr[i * 2 + 1].lim & 0x3f)<<8) | |
5937 | (lfi->lfthr[i * 2 + 1].mblim & 0xff); |
5938 | thr = (thr<<16) | ((lfi->lfthr[i*2].lim & 0x3f)<<8) | |
5939 | (lfi->lfthr[i * 2].mblim & 0xff); |
5940 | WRITE_VREG(HEVC_DBLK_CFG9, thr); |
5941 | } |
5942 | |
5943 | /*video format is VP9*/ |
5944 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
5945 | data32 = (0x3 << 14) | // (dw fifo thres r and b) |
5946 | (0x3 << 12) | // (dw fifo thres r or b) |
5947 | (0x3 << 10) | // (dw fifo thres not r/b) |
5948 | (0x3 << 8) | // 1st/2nd write both enable |
5949 | (0x1 << 0); // vp9 video format |
5950 | if (get_double_write_mode(pbi) == 0x10) |
5951 | data32 &= (~0x100); |
5952 | } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) { |
5953 | data32 = (0x57 << 8) | /*1st/2nd write both enable*/ |
5954 | (0x1 << 0); /*vp9 video format*/ |
5955 | if (get_double_write_mode(pbi) == 0x10) |
5956 | data32 &= (~0x100); |
5957 | } else |
5958 | data32 = 0x40400001; |
5959 | |
5960 | WRITE_VREG(HEVC_DBLK_CFGB, data32); |
5961 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
5962 | pr_info("[DBLK DEBUG] CFGB : 0x%x\n", data32); |
5963 | } |
5964 | /* perform this function per frame*/ |
5965 | void vp9_loop_filter_frame_init(struct segmentation *seg, |
5966 | struct loop_filter_info_n *lfi, struct loopfilter *lf, |
5967 | int default_filt_lvl) { |
5968 | int i; |
5969 | int seg_id; |
5970 | /*n_shift is the multiplier for lf_deltas |
5971 | *the multiplier is 1 for when filter_lvl is between 0 and 31; |
5972 | *2 when filter_lvl is between 32 and 63 |
5973 | */ |
5974 | const int scale = 1 << (default_filt_lvl >> 5); |
5975 | |
5976 | /*update limits if sharpness has changed*/ |
5977 | if (lf->last_sharpness_level != lf->sharpness_level) { |
5978 | vp9_update_sharpness(lfi, lf->sharpness_level); |
5979 | lf->last_sharpness_level = lf->sharpness_level; |
5980 | |
5981 | /*Write to register*/ |
5982 | for (i = 0; i < 32; i++) { |
5983 | unsigned int thr; |
5984 | |
5985 | thr = ((lfi->lfthr[i * 2 + 1].lim & 0x3f) << 8) |
5986 | | (lfi->lfthr[i * 2 + 1].mblim & 0xff); |
5987 | thr = (thr << 16) | ((lfi->lfthr[i * 2].lim & 0x3f) << 8) |
5988 | | (lfi->lfthr[i * 2].mblim & 0xff); |
5989 | WRITE_VREG(HEVC_DBLK_CFG9, thr); |
5990 | } |
5991 | } |
5992 | |
5993 | for (seg_id = 0; seg_id < MAX_SEGMENTS; seg_id++) {/*MAX_SEGMENTS = 8*/ |
5994 | int lvl_seg = default_filt_lvl; |
5995 | |
5996 | if (segfeature_active(seg, seg_id, SEG_LVL_ALT_LF)) { |
5997 | const int data = get_segdata(seg, seg_id, |
5998 | SEG_LVL_ALT_LF); |
5999 | lvl_seg = vp9_clamp(seg->abs_delta == SEGMENT_ABSDATA ? |
6000 | data : default_filt_lvl + data, |
6001 | 0, MAX_LOOP_FILTER); |
6002 | #ifdef DBG_LF_PRINT |
6003 | pr_info("segfeature_active!!!seg_id=%d,lvl_seg=%d\n", seg_id, lvl_seg); |
6004 | #endif |
6005 | } |
6006 | |
6007 | if (!lf->mode_ref_delta_enabled) { |
6008 | /*we could get rid of this if we assume that deltas are set to |
6009 | *zero when not in use; encoder always uses deltas |
6010 | */ |
6011 | memset(lfi->lvl[seg_id], lvl_seg, sizeof(lfi->lvl[seg_id])); |
6012 | } else { |
6013 | int ref, mode; |
6014 | const int intra_lvl = lvl_seg + lf->ref_deltas[INTRA_FRAME] |
6015 | * scale; |
6016 | #ifdef DBG_LF_PRINT |
6017 | pr_info("LF_PRINT:vp9_loop_filter_frame_init,seg_id=%d\n", seg_id); |
6018 | pr_info("ref_deltas[INTRA_FRAME]=%d\n", lf->ref_deltas[INTRA_FRAME]); |
6019 | #endif |
6020 | lfi->lvl[seg_id][INTRA_FRAME][0] = |
6021 | vp9_clamp(intra_lvl, 0, MAX_LOOP_FILTER); |
6022 | |
6023 | for (ref = LAST_FRAME; ref < MAX_REF_FRAMES; ++ref) { |
6024 | /* LAST_FRAME = 1, MAX_REF_FRAMES = 4*/ |
6025 | for (mode = 0; mode < MAX_MODE_LF_DELTAS; ++mode) { |
6026 | /*MAX_MODE_LF_DELTAS = 2*/ |
6027 | const int inter_lvl = |
6028 | lvl_seg + lf->ref_deltas[ref] * scale |
6029 | + lf->mode_deltas[mode] * scale; |
6030 | #ifdef DBG_LF_PRINT |
6031 | #endif |
6032 | lfi->lvl[seg_id][ref][mode] = |
6033 | vp9_clamp(inter_lvl, 0, |
6034 | MAX_LOOP_FILTER); |
6035 | } |
6036 | } |
6037 | } |
6038 | } |
6039 | |
6040 | #ifdef DBG_LF_PRINT |
6041 | /*print out thr/lvl table per frame*/ |
6042 | for (i = 0; i <= MAX_LOOP_FILTER; i++) { |
6043 | pr_info("LF_PRINT:(%d)thr=%d,blim=%d,lim=%d\n", |
6044 | i, lfi->lfthr[i].hev_thr, lfi->lfthr[i].mblim, |
6045 | lfi->lfthr[i].lim); |
6046 | } |
6047 | for (seg_id = 0; seg_id < MAX_SEGMENTS; seg_id++) { |
6048 | pr_info("LF_PRINT:lvl(seg_id=%d)(mode=0,%d,%d,%d,%d)\n", |
6049 | seg_id, lfi->lvl[seg_id][0][0], |
6050 | lfi->lvl[seg_id][1][0], lfi->lvl[seg_id][2][0], |
6051 | lfi->lvl[seg_id][3][0]); |
6052 | pr_info("i(mode=1,%d,%d,%d,%d)\n", lfi->lvl[seg_id][0][1], |
6053 | lfi->lvl[seg_id][1][1], lfi->lvl[seg_id][2][1], |
6054 | lfi->lvl[seg_id][3][1]); |
6055 | } |
6056 | #endif |
6057 | |
6058 | /*Write to register */ |
6059 | for (i = 0; i < 16; i++) { |
6060 | unsigned int level; |
6061 | |
6062 | level = ((lfi->lvl[i >> 1][3][i & 1] & 0x3f) << 24) | |
6063 | ((lfi->lvl[i >> 1][2][i & 1] & 0x3f) << 16) | |
6064 | ((lfi->lvl[i >> 1][1][i & 1] & 0x3f) << 8) | |
6065 | (lfi->lvl[i >> 1][0][i & 1] & 0x3f); |
6066 | if (!default_filt_lvl) |
6067 | level = 0; |
6068 | WRITE_VREG(HEVC_DBLK_CFGA, level); |
6069 | } |
6070 | } |
6071 | /* VP9_LPF_LVL_UPDATE */ |
6072 | #endif |
6073 | |
6074 | static void vp9_init_decoder_hw(struct VP9Decoder_s *pbi, u32 mask) |
6075 | { |
6076 | unsigned int data32; |
6077 | int i; |
6078 | const unsigned short parser_cmd[PARSER_CMD_NUMBER] = { |
6079 | 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423, |
6080 | 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE, |
6081 | 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A, |
6082 | 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408, |
6083 | 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D, |
6084 | 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00, |
6085 | 0x7C00 |
6086 | }; |
6087 | #if 0 |
6088 | if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) { |
6089 | /* Set MCR fetch priorities*/ |
6090 | data32 = 0x1 | (0x1 << 2) | (0x1 <<3) | |
6091 | (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25); |
6092 | WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32); |
6093 | } |
6094 | #endif |
6095 | /*if (debug & VP9_DEBUG_BUFMGR_MORE) |
6096 | pr_info("%s\n", __func__);*/ |
6097 | if (mask & HW_MASK_FRONT) { |
6098 | data32 = READ_VREG(HEVC_PARSER_INT_CONTROL); |
6099 | #if 1 |
6100 | /* set bit 31~29 to 3 if HEVC_STREAM_FIFO_CTL[29] is 1 */ |
6101 | data32 &= ~(7 << 29); |
6102 | data32 |= (3 << 29); |
6103 | #endif |
6104 | data32 = data32 | |
6105 | (1 << 24) |/*stream_buffer_empty_int_amrisc_enable*/ |
6106 | (1 << 22) |/*stream_fifo_empty_int_amrisc_enable*/ |
6107 | (1 << 7) |/*dec_done_int_cpu_enable*/ |
6108 | (1 << 4) |/*startcode_found_int_cpu_enable*/ |
6109 | (0 << 3) |/*startcode_found_int_amrisc_enable*/ |
6110 | (1 << 0) /*parser_int_enable*/ |
6111 | ; |
6112 | #ifdef SUPPORT_FB_DECODING |
6113 | #ifndef FB_DECODING_TEST_SCHEDULE |
6114 | /*fed_fb_slice_done_int_cpu_enable*/ |
6115 | if (pbi->used_stage_buf_num > 0) |
6116 | data32 |= (1 << 10); |
6117 | #endif |
6118 | #endif |
6119 | WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32); |
6120 | |
6121 | data32 = READ_VREG(HEVC_SHIFT_STATUS); |
6122 | data32 = data32 | |
6123 | (0 << 1) |/*emulation_check_off VP9 |
6124 | do not have emulation*/ |
6125 | (1 << 0)/*startcode_check_on*/ |
6126 | ; |
6127 | WRITE_VREG(HEVC_SHIFT_STATUS, data32); |
6128 | WRITE_VREG(HEVC_SHIFT_CONTROL, |
6129 | (0 << 14) | /*disable_start_code_protect*/ |
6130 | (1 << 10) | /*length_zero_startcode_en for VP9*/ |
6131 | (1 << 9) | /*length_valid_startcode_en for VP9*/ |
6132 | (3 << 6) | /*sft_valid_wr_position*/ |
6133 | (2 << 4) | /*emulate_code_length_sub_1*/ |
6134 | (3 << 1) | /*start_code_length_sub_1 |
6135 | VP9 use 0x00000001 as startcode (4 Bytes)*/ |
6136 | (1 << 0) /*stream_shift_enable*/ |
6137 | ); |
6138 | |
6139 | WRITE_VREG(HEVC_CABAC_CONTROL, |
6140 | (1 << 0)/*cabac_enable*/ |
6141 | ); |
6142 | |
6143 | WRITE_VREG(HEVC_PARSER_CORE_CONTROL, |
6144 | (1 << 0)/* hevc_parser_core_clk_en*/ |
6145 | ); |
6146 | |
6147 | |
6148 | WRITE_VREG(HEVC_DEC_STATUS_REG, 0); |
6149 | |
6150 | } |
6151 | |
6152 | if (mask & HW_MASK_BACK) { |
6153 | /*Initial IQIT_SCALELUT memory |
6154 | -- just to avoid X in simulation*/ |
6155 | |
6156 | WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0);/*cfg_p_addr*/ |
6157 | for (i = 0; i < 1024; i++) |
6158 | WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0); |
6159 | } |
6160 | |
6161 | if (mask & HW_MASK_FRONT) { |
6162 | u32 decode_mode; |
6163 | #ifdef ENABLE_SWAP_TEST |
6164 | WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100); |
6165 | #else |
6166 | WRITE_VREG(HEVC_STREAM_SWAP_TEST, 0); |
6167 | #endif |
6168 | #ifdef MULTI_INSTANCE_SUPPORT |
6169 | if (!pbi->m_ins_flag) { |
6170 | if (pbi->low_latency_flag) |
6171 | decode_mode = DECODE_MODE_SINGLE_LOW_LATENCY; |
6172 | else |
6173 | decode_mode = DECODE_MODE_SINGLE; |
6174 | } else if (vdec_frame_based(hw_to_vdec(pbi))) |
6175 | decode_mode = pbi->no_head ? |
6176 | DECODE_MODE_MULTI_FRAMEBASE_NOHEAD : |
6177 | DECODE_MODE_MULTI_FRAMEBASE; |
6178 | else |
6179 | decode_mode = DECODE_MODE_MULTI_STREAMBASE; |
6180 | #ifdef SUPPORT_FB_DECODING |
6181 | #ifndef FB_DECODING_TEST_SCHEDULE |
6182 | if (pbi->used_stage_buf_num > 0) |
6183 | decode_mode |= (0x01 << 24); |
6184 | #endif |
6185 | #endif |
6186 | WRITE_VREG(DECODE_MODE, decode_mode); |
6187 | WRITE_VREG(HEVC_DECODE_SIZE, 0); |
6188 | WRITE_VREG(HEVC_DECODE_COUNT, 0); |
6189 | #else |
6190 | WRITE_VREG(DECODE_MODE, DECODE_MODE_SINGLE); |
6191 | WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0); |
6192 | WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0x7fffffff); /*to remove*/ |
6193 | #endif |
6194 | /*Send parser_cmd*/ |
6195 | WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0)); |
6196 | for (i = 0; i < PARSER_CMD_NUMBER; i++) |
6197 | WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]); |
6198 | WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0); |
6199 | WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1); |
6200 | WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2); |
6201 | |
6202 | |
6203 | WRITE_VREG(HEVC_PARSER_IF_CONTROL, |
6204 | /* (1 << 8) |*/ /*sao_sw_pred_enable*/ |
6205 | (1 << 5) | /*parser_sao_if_en*/ |
6206 | (1 << 2) | /*parser_mpred_if_en*/ |
6207 | (1 << 0) /*parser_scaler_if_en*/ |
6208 | ); |
6209 | } |
6210 | |
6211 | if (mask & HW_MASK_BACK) { |
6212 | /*Changed to Start MPRED in microcode*/ |
6213 | /* |
6214 | pr_info("[test.c] Start MPRED\n"); |
6215 | WRITE_VREG(HEVC_MPRED_INT_STATUS, |
6216 | (1<<31) |
6217 | ); |
6218 | */ |
6219 | WRITE_VREG(HEVCD_IPP_TOP_CNTL, |
6220 | (0 << 1) | /*enable ipp*/ |
6221 | (1 << 0) /*software reset ipp and mpp*/ |
6222 | ); |
6223 | WRITE_VREG(HEVCD_IPP_TOP_CNTL, |
6224 | (1 << 1) | /*enable ipp*/ |
6225 | (0 << 0) /*software reset ipp and mpp*/ |
6226 | ); |
6227 | if (get_double_write_mode(pbi) & 0x10) { |
6228 | /*Enable NV21 reference read mode for MC*/ |
6229 | WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31); |
6230 | } |
6231 | |
6232 | /*Initialize mcrcc and decomp perf counters*/ |
6233 | if (mcrcc_cache_alg_flag && |
6234 | pbi->init_flag == 0) { |
6235 | mcrcc_perfcount_reset(); |
6236 | decomp_perfcount_reset(); |
6237 | } |
6238 | } |
6239 | return; |
6240 | } |
6241 | |
6242 | |
6243 | #ifdef CONFIG_HEVC_CLK_FORCED_ON |
6244 | static void config_vp9_clk_forced_on(void) |
6245 | { |
6246 | unsigned int rdata32; |
6247 | /*IQIT*/ |
6248 | rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL); |
6249 | WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2)); |
6250 | |
6251 | /* DBLK*/ |
6252 | rdata32 = READ_VREG(HEVC_DBLK_CFG0); |
6253 | WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2)); |
6254 | |
6255 | /* SAO*/ |
6256 | rdata32 = READ_VREG(HEVC_SAO_CTRL1); |
6257 | WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2)); |
6258 | |
6259 | /*MPRED*/ |
6260 | rdata32 = READ_VREG(HEVC_MPRED_CTRL1); |
6261 | WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24)); |
6262 | |
6263 | /* PARSER*/ |
6264 | rdata32 = READ_VREG(HEVC_STREAM_CONTROL); |
6265 | WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15)); |
6266 | rdata32 = READ_VREG(HEVC_SHIFT_CONTROL); |
6267 | WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15)); |
6268 | rdata32 = READ_VREG(HEVC_CABAC_CONTROL); |
6269 | WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13)); |
6270 | rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL); |
6271 | WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15)); |
6272 | rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL); |
6273 | WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15)); |
6274 | rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL); |
6275 | WRITE_VREG(HEVC_PARSER_IF_CONTROL, |
6276 | rdata32 | (0x1 << 6) | (0x1 << 3) | (0x1 << 1)); |
6277 | |
6278 | /*IPP*/ |
6279 | rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG); |
6280 | WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff); |
6281 | |
6282 | /* MCRCC*/ |
6283 | rdata32 = READ_VREG(HEVCD_MCRCC_CTL1); |
6284 | WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3)); |
6285 | } |
6286 | #endif |
6287 | |
6288 | |
6289 | #ifdef MCRCC_ENABLE |
6290 | static void dump_hit_rate(struct VP9Decoder_s *pbi) |
6291 | { |
6292 | if (debug & VP9_DEBUG_CACHE_HIT_RATE) { |
6293 | mcrcc_get_hitrate(pbi->m_ins_flag); |
6294 | decomp_get_hitrate(); |
6295 | decomp_get_comprate(); |
6296 | } |
6297 | } |
6298 | |
6299 | static void config_mcrcc_axi_hw(struct VP9Decoder_s *pbi) |
6300 | { |
6301 | unsigned int rdata32; |
6302 | unsigned short is_inter; |
6303 | /*pr_info("Entered config_mcrcc_axi_hw...\n");*/ |
6304 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);/* reset mcrcc*/ |
6305 | is_inter = ((pbi->common.frame_type != KEY_FRAME) && |
6306 | (!pbi->common.intra_only)) ? 1 : 0; |
6307 | if (!is_inter) { /* I-PIC*/ |
6308 | /*remove reset -- disables clock*/ |
6309 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0); |
6310 | return; |
6311 | } |
6312 | |
6313 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
6314 | mcrcc_get_hitrate(pbi->m_ins_flag); |
6315 | decomp_get_hitrate(); |
6316 | decomp_get_comprate(); |
6317 | } |
6318 | |
6319 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, |
6320 | (0 << 8) | (1 << 1) | 0); |
6321 | rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6322 | rdata32 = rdata32 & 0xffff; |
6323 | rdata32 = rdata32 | (rdata32 << 16); |
6324 | WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32); |
6325 | /*Programme canvas1 */ |
6326 | rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6327 | rdata32 = rdata32 & 0xffff; |
6328 | rdata32 = rdata32 | (rdata32 << 16); |
6329 | WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32); |
6330 | /*enable mcrcc progressive-mode*/ |
6331 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0); |
6332 | } |
6333 | |
6334 | static void config_mcrcc_axi_hw_new(struct VP9Decoder_s *pbi) |
6335 | { |
6336 | u32 curr_picnum = -1; |
6337 | u32 lastref_picnum = -1; |
6338 | u32 goldenref_picnum = -1; |
6339 | u32 altref_picnum = -1; |
6340 | |
6341 | u32 lastref_delta_picnum; |
6342 | u32 goldenref_delta_picnum; |
6343 | u32 altref_delta_picnum; |
6344 | |
6345 | u32 rdata32; |
6346 | |
6347 | u32 lastcanvas; |
6348 | u32 goldencanvas; |
6349 | u32 altrefcanvas; |
6350 | |
6351 | u16 is_inter; |
6352 | u16 lastref_inref; |
6353 | u16 goldenref_inref; |
6354 | u16 altref_inref; |
6355 | |
6356 | u32 refcanvas_array[3], utmp; |
6357 | int deltapicnum_array[3], tmp; |
6358 | |
6359 | struct VP9_Common_s *cm = &pbi->common; |
6360 | struct PIC_BUFFER_CONFIG_s *cur_pic_config |
6361 | = &cm->cur_frame->buf; |
6362 | curr_picnum = cur_pic_config->decode_idx; |
6363 | if (cm->frame_refs[0].buf) |
6364 | lastref_picnum = cm->frame_refs[0].buf->decode_idx; |
6365 | if (cm->frame_refs[1].buf) |
6366 | goldenref_picnum = cm->frame_refs[1].buf->decode_idx; |
6367 | if (cm->frame_refs[2].buf) |
6368 | altref_picnum = cm->frame_refs[2].buf->decode_idx; |
6369 | |
6370 | lastref_delta_picnum = (lastref_picnum >= curr_picnum) ? |
6371 | (lastref_picnum - curr_picnum) : (curr_picnum - lastref_picnum); |
6372 | goldenref_delta_picnum = (goldenref_picnum >= curr_picnum) ? |
6373 | (goldenref_picnum - curr_picnum) : |
6374 | (curr_picnum - goldenref_picnum); |
6375 | altref_delta_picnum = |
6376 | (altref_picnum >= curr_picnum) ? |
6377 | (altref_picnum - curr_picnum) : (curr_picnum - altref_picnum); |
6378 | |
6379 | lastref_inref = (cm->frame_refs[0].idx != INVALID_IDX) ? 1 : 0; |
6380 | goldenref_inref = (cm->frame_refs[1].idx != INVALID_IDX) ? 1 : 0; |
6381 | altref_inref = (cm->frame_refs[2].idx != INVALID_IDX) ? 1 : 0; |
6382 | |
6383 | if (debug & VP9_DEBUG_CACHE) |
6384 | pr_info("%s--0--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n", |
6385 | __func__, lastref_inref, goldenref_inref, altref_inref); |
6386 | |
6387 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */ |
6388 | |
6389 | is_inter = ((pbi->common.frame_type != KEY_FRAME) |
6390 | && (!pbi->common.intra_only)) ? 1 : 0; |
6391 | |
6392 | if (!is_inter) { /* I-PIC */ |
6393 | /* remove reset -- disables clock */ |
6394 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0); |
6395 | return; |
6396 | } |
6397 | |
6398 | if (!pbi->m_ins_flag) |
6399 | dump_hit_rate(pbi); |
6400 | |
6401 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (1<<1) | 0); |
6402 | lastcanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6403 | goldencanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6404 | altrefcanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6405 | |
6406 | if (debug & VP9_DEBUG_CACHE) |
6407 | pr_info("[test.c] lastref_canv:%x goldenref_canv:%x altref_canv:%x\n", |
6408 | lastcanvas, goldencanvas, altrefcanvas); |
6409 | |
6410 | altref_inref = ((altref_inref == 1) && |
6411 | (altrefcanvas != (goldenref_inref |
6412 | ? goldencanvas : 0xffffffff)) && |
6413 | (altrefcanvas != (lastref_inref ? |
6414 | lastcanvas : 0xffffffff))) ? 1 : 0; |
6415 | goldenref_inref = ((goldenref_inref == 1) && |
6416 | (goldencanvas != (lastref_inref ? |
6417 | lastcanvas : 0xffffffff))) ? 1 : 0; |
6418 | if (debug & VP9_DEBUG_CACHE) |
6419 | pr_info("[test.c]--1--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n", |
6420 | lastref_inref, goldenref_inref, altref_inref); |
6421 | |
6422 | altref_delta_picnum = altref_inref ? altref_delta_picnum : 0x7fffffff; |
6423 | goldenref_delta_picnum = goldenref_inref ? |
6424 | goldenref_delta_picnum : 0x7fffffff; |
6425 | lastref_delta_picnum = lastref_inref ? |
6426 | lastref_delta_picnum : 0x7fffffff; |
6427 | if (debug & VP9_DEBUG_CACHE) |
6428 | pr_info("[test.c]--1--lastref_delta_picnum:%d goldenref_delta_picnum:%d altref_delta_picnum:%d\n", |
6429 | lastref_delta_picnum, goldenref_delta_picnum, |
6430 | altref_delta_picnum); |
6431 | /*ARRAY SORT HERE DELTA/CANVAS ARRAY SORT -- use DELTA*/ |
6432 | |
6433 | refcanvas_array[0] = lastcanvas; |
6434 | refcanvas_array[1] = goldencanvas; |
6435 | refcanvas_array[2] = altrefcanvas; |
6436 | |
6437 | deltapicnum_array[0] = lastref_delta_picnum; |
6438 | deltapicnum_array[1] = goldenref_delta_picnum; |
6439 | deltapicnum_array[2] = altref_delta_picnum; |
6440 | |
6441 | /* sort0 : 2-to-1 */ |
6442 | if (deltapicnum_array[2] < deltapicnum_array[1]) { |
6443 | utmp = refcanvas_array[2]; |
6444 | refcanvas_array[2] = refcanvas_array[1]; |
6445 | refcanvas_array[1] = utmp; |
6446 | tmp = deltapicnum_array[2]; |
6447 | deltapicnum_array[2] = deltapicnum_array[1]; |
6448 | deltapicnum_array[1] = tmp; |
6449 | } |
6450 | /* sort1 : 1-to-0 */ |
6451 | if (deltapicnum_array[1] < deltapicnum_array[0]) { |
6452 | utmp = refcanvas_array[1]; |
6453 | refcanvas_array[1] = refcanvas_array[0]; |
6454 | refcanvas_array[0] = utmp; |
6455 | tmp = deltapicnum_array[1]; |
6456 | deltapicnum_array[1] = deltapicnum_array[0]; |
6457 | deltapicnum_array[0] = tmp; |
6458 | } |
6459 | /* sort2 : 2-to-1 */ |
6460 | if (deltapicnum_array[2] < deltapicnum_array[1]) { |
6461 | utmp = refcanvas_array[2]; refcanvas_array[2] = |
6462 | refcanvas_array[1]; refcanvas_array[1] = utmp; |
6463 | tmp = deltapicnum_array[2]; deltapicnum_array[2] = |
6464 | deltapicnum_array[1]; deltapicnum_array[1] = tmp; |
6465 | } |
6466 | if (mcrcc_cache_alg_flag == |
6467 | THODIYIL_MCRCC_CANVAS_ALGX) { /*09/15/2017*/ |
6468 | /* lowest delta_picnum */ |
6469 | rdata32 = refcanvas_array[0]; |
6470 | rdata32 = rdata32 & 0xffff; |
6471 | rdata32 = rdata32 | (rdata32 << 16); |
6472 | WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32); |
6473 | |
6474 | /* 2nd-lowest delta_picnum */ |
6475 | rdata32 = refcanvas_array[1]; |
6476 | rdata32 = rdata32 & 0xffff; |
6477 | rdata32 = rdata32 | (rdata32 << 16); |
6478 | WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32); |
6479 | } else { |
6480 | /* previous version -- LAST/GOLDEN ALWAYS -- before 09/13/2017*/ |
6481 | WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, |
6482 | (0 << 8) | (1<<1) | 0); |
6483 | rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6484 | rdata32 = rdata32 & 0xffff; |
6485 | rdata32 = rdata32 | (rdata32 << 16); |
6486 | WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32); |
6487 | |
6488 | /* Programme canvas1 */ |
6489 | rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR); |
6490 | rdata32 = rdata32 & 0xffff; |
6491 | rdata32 = rdata32 | (rdata32 << 16); |
6492 | WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32); |
6493 | } |
6494 | |
6495 | WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0); /* enable mcrcc progressive-mode */ |
6496 | return; |
6497 | } |
6498 | |
6499 | #endif |
6500 | |
6501 | |
6502 | static void free_lf_buf(struct VP9Decoder_s *pbi) |
6503 | { |
6504 | if (pbi->lfi) |
6505 | vfree(pbi->lfi); |
6506 | if (pbi->lf) |
6507 | vfree(pbi->lf); |
6508 | if (pbi->seg_4lf) |
6509 | vfree(pbi->seg_4lf); |
6510 | pbi->lfi = NULL; |
6511 | pbi->lf = NULL; |
6512 | pbi->seg_4lf = NULL; |
6513 | } |
6514 | |
6515 | static int alloc_lf_buf(struct VP9Decoder_s *pbi) |
6516 | { |
6517 | pbi->lfi = vmalloc(sizeof(struct loop_filter_info_n)); |
6518 | pbi->lf = vmalloc(sizeof(struct loopfilter)); |
6519 | pbi->seg_4lf = vmalloc(sizeof(struct segmentation)); |
6520 | if (pbi->lfi == NULL || pbi->lf == NULL || pbi->seg_4lf == NULL) { |
6521 | free_lf_buf(pbi); |
6522 | pr_err("[test.c] vp9_loop_filter init malloc error!!!\n"); |
6523 | return -1; |
6524 | } |
6525 | return 0; |
6526 | } |
6527 | |
6528 | static void vp9_local_uninit(struct VP9Decoder_s *pbi) |
6529 | { |
6530 | pbi->rpm_ptr = NULL; |
6531 | pbi->lmem_ptr = NULL; |
6532 | if (pbi->rpm_addr) { |
6533 | dma_free_coherent(amports_get_dma_device(), |
6534 | RPM_BUF_SIZE, |
6535 | pbi->rpm_addr, |
6536 | pbi->rpm_phy_addr); |
6537 | pbi->rpm_addr = NULL; |
6538 | } |
6539 | if (pbi->lmem_addr) { |
6540 | if (pbi->lmem_phy_addr) |
6541 | dma_free_coherent(amports_get_dma_device(), |
6542 | LMEM_BUF_SIZE, pbi->lmem_addr, |
6543 | pbi->lmem_phy_addr); |
6544 | pbi->lmem_addr = NULL; |
6545 | } |
6546 | if (pbi->prob_buffer_addr) { |
6547 | if (pbi->prob_buffer_phy_addr) |
6548 | dma_free_coherent(amports_get_dma_device(), |
6549 | PROB_BUF_SIZE, pbi->prob_buffer_addr, |
6550 | pbi->prob_buffer_phy_addr); |
6551 | |
6552 | pbi->prob_buffer_addr = NULL; |
6553 | } |
6554 | if (pbi->count_buffer_addr) { |
6555 | if (pbi->count_buffer_phy_addr) |
6556 | dma_free_coherent(amports_get_dma_device(), |
6557 | COUNT_BUF_SIZE, pbi->count_buffer_addr, |
6558 | pbi->count_buffer_phy_addr); |
6559 | |
6560 | pbi->count_buffer_addr = NULL; |
6561 | } |
6562 | if (pbi->mmu_enable) { |
6563 | u32 mmu_map_size = vvp9_frame_mmu_map_size(pbi); |
6564 | if (pbi->frame_mmu_map_addr) { |
6565 | if (pbi->frame_mmu_map_phy_addr) |
6566 | dma_free_coherent(amports_get_dma_device(), |
6567 | mmu_map_size, |
6568 | pbi->frame_mmu_map_addr, |
6569 | pbi->frame_mmu_map_phy_addr); |
6570 | pbi->frame_mmu_map_addr = NULL; |
6571 | } |
6572 | } |
6573 | #ifdef SUPPORT_FB_DECODING |
6574 | if (pbi->stage_mmu_map_addr) { |
6575 | if (pbi->stage_mmu_map_phy_addr) |
6576 | dma_free_coherent(amports_get_dma_device(), |
6577 | STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS, |
6578 | pbi->stage_mmu_map_addr, |
6579 | pbi->stage_mmu_map_phy_addr); |
6580 | pbi->stage_mmu_map_addr = NULL; |
6581 | } |
6582 | |
6583 | uninit_stage_buf(pbi); |
6584 | #endif |
6585 | |
6586 | #ifdef VP9_LPF_LVL_UPDATE |
6587 | free_lf_buf(pbi); |
6588 | #endif |
6589 | if (pbi->gvs) |
6590 | vfree(pbi->gvs); |
6591 | pbi->gvs = NULL; |
6592 | } |
6593 | |
6594 | static int vp9_local_init(struct VP9Decoder_s *pbi) |
6595 | { |
6596 | int ret = -1; |
6597 | /*int losless_comp_header_size, losless_comp_body_size;*/ |
6598 | |
6599 | struct BuffInfo_s *cur_buf_info = NULL; |
6600 | |
6601 | memset(&pbi->param, 0, sizeof(union param_u)); |
6602 | memset(&pbi->common, 0, sizeof(struct VP9_Common_s)); |
6603 | #ifdef MULTI_INSTANCE_SUPPORT |
6604 | cur_buf_info = &pbi->work_space_buf_store; |
6605 | |
6606 | if (vdec_is_support_4k()) { |
6607 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
6608 | memcpy(cur_buf_info, &amvvp9_workbuff_spec[2], /* 8k */ |
6609 | sizeof(struct BuffInfo_s)); |
6610 | } else |
6611 | memcpy(cur_buf_info, &amvvp9_workbuff_spec[1], /* 4k */ |
6612 | sizeof(struct BuffInfo_s)); |
6613 | } else |
6614 | memcpy(cur_buf_info, &amvvp9_workbuff_spec[0],/* 1080p */ |
6615 | sizeof(struct BuffInfo_s)); |
6616 | |
6617 | cur_buf_info->start_adr = pbi->buf_start; |
6618 | if (!pbi->mmu_enable) |
6619 | pbi->mc_buf_spec.buf_end = pbi->buf_start + pbi->buf_size; |
6620 | |
6621 | #else |
6622 | /*! MULTI_INSTANCE_SUPPORT*/ |
6623 | if (vdec_is_support_4k()) { |
6624 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) |
6625 | cur_buf_info = &amvvp9_workbuff_spec[2];/* 8k work space */ |
6626 | else |
6627 | cur_buf_info = &amvvp9_workbuff_spec[1];/* 4k2k work space */ |
6628 | } else |
6629 | cur_buf_info = &amvvp9_workbuff_spec[0];/* 1080p work space */ |
6630 | |
6631 | #endif |
6632 | |
6633 | init_buff_spec(pbi, cur_buf_info); |
6634 | vp9_bufmgr_init(pbi, cur_buf_info, NULL); |
6635 | |
6636 | if (!vdec_is_support_4k() |
6637 | && (buf_alloc_width > 1920 && buf_alloc_height > 1088)) { |
6638 | buf_alloc_width = 1920; |
6639 | buf_alloc_height = 1088; |
6640 | if (pbi->max_pic_w > 1920 && pbi->max_pic_h > 1088) { |
6641 | pbi->max_pic_w = 1920; |
6642 | pbi->max_pic_h = 1088; |
6643 | } |
6644 | } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
6645 | buf_alloc_width = 8192; |
6646 | buf_alloc_height = 4608; |
6647 | } |
6648 | pbi->init_pic_w = pbi->max_pic_w ? pbi->max_pic_w : |
6649 | (buf_alloc_width ? buf_alloc_width : |
6650 | (pbi->vvp9_amstream_dec_info.width ? |
6651 | pbi->vvp9_amstream_dec_info.width : |
6652 | pbi->work_space_buf->max_width)); |
6653 | pbi->init_pic_h = pbi->max_pic_h ? pbi->max_pic_h : |
6654 | (buf_alloc_height ? buf_alloc_height : |
6655 | (pbi->vvp9_amstream_dec_info.height ? |
6656 | pbi->vvp9_amstream_dec_info.height : |
6657 | pbi->work_space_buf->max_height)); |
6658 | |
6659 | /* video is not support unaligned with 64 in tl1 |
6660 | ** vdec canvas mode will be linear when dump yuv is set |
6661 | */ |
6662 | if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) && |
6663 | (pbi->double_write_mode != 0) && |
6664 | (((pbi->max_pic_w % 64) != 0) || |
6665 | (pbi->vvp9_amstream_dec_info.width % 64) != 0)) { |
6666 | if (hw_to_vdec(pbi)->canvas_mode != |
6667 | CANVAS_BLKMODE_LINEAR) |
6668 | pbi->mem_map_mode = 2; |
6669 | else { |
6670 | pbi->mem_map_mode = 0; |
6671 | pr_info("vdec blkmod linear, force mem_map_mode 0\n"); |
6672 | } |
6673 | } |
6674 | |
6675 | #ifndef MV_USE_FIXED_BUF |
6676 | if (init_mv_buf_list(pbi) < 0) { |
6677 | pr_err("%s: init_mv_buf_list fail\n", __func__); |
6678 | return -1; |
6679 | } |
6680 | #endif |
6681 | if (pbi->save_buffer_mode) |
6682 | pbi->used_buf_num = MAX_BUF_NUM_SAVE_BUF; |
6683 | else { |
6684 | if (pbi->is_used_v4l) |
6685 | pbi->used_buf_num = 5 + pbi->dynamic_buf_num_margin; |
6686 | else |
6687 | pbi->used_buf_num = max_buf_num; |
6688 | } |
6689 | |
6690 | if (pbi->used_buf_num > MAX_BUF_NUM) |
6691 | pbi->used_buf_num = MAX_BUF_NUM; |
6692 | if (pbi->used_buf_num > FRAME_BUFFERS) |
6693 | pbi->used_buf_num = FRAME_BUFFERS; |
6694 | |
6695 | pbi->pts_unstable = ((unsigned long)(pbi->vvp9_amstream_dec_info.param) |
6696 | & 0x40) >> 6; |
6697 | |
6698 | if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0) { |
6699 | pbi->rpm_addr = dma_alloc_coherent(amports_get_dma_device(), |
6700 | RPM_BUF_SIZE, |
6701 | &pbi->rpm_phy_addr, GFP_KERNEL); |
6702 | if (pbi->rpm_addr == NULL) { |
6703 | pr_err("%s: failed to alloc rpm buffer\n", __func__); |
6704 | return -1; |
6705 | } |
6706 | |
6707 | pbi->rpm_ptr = pbi->rpm_addr; |
6708 | } |
6709 | |
6710 | pbi->lmem_addr = dma_alloc_coherent(amports_get_dma_device(), |
6711 | LMEM_BUF_SIZE, |
6712 | &pbi->lmem_phy_addr, GFP_KERNEL); |
6713 | if (pbi->lmem_addr == NULL) { |
6714 | pr_err("%s: failed to alloc lmem buffer\n", __func__); |
6715 | return -1; |
6716 | } |
6717 | pbi->lmem_ptr = pbi->lmem_addr; |
6718 | |
6719 | pbi->prob_buffer_addr = dma_alloc_coherent(amports_get_dma_device(), |
6720 | PROB_BUF_SIZE, |
6721 | &pbi->prob_buffer_phy_addr, GFP_KERNEL); |
6722 | if (pbi->prob_buffer_addr == NULL) { |
6723 | pr_err("%s: failed to alloc prob_buffer\n", __func__); |
6724 | return -1; |
6725 | } |
6726 | memset(pbi->prob_buffer_addr, 0, PROB_BUF_SIZE); |
6727 | pbi->count_buffer_addr = dma_alloc_coherent(amports_get_dma_device(), |
6728 | COUNT_BUF_SIZE, |
6729 | &pbi->count_buffer_phy_addr, GFP_KERNEL); |
6730 | if (pbi->count_buffer_addr == NULL) { |
6731 | pr_err("%s: failed to alloc count_buffer\n", __func__); |
6732 | return -1; |
6733 | } |
6734 | memset(pbi->count_buffer_addr, 0, COUNT_BUF_SIZE); |
6735 | |
6736 | if (pbi->mmu_enable) { |
6737 | u32 mmu_map_size = vvp9_frame_mmu_map_size(pbi); |
6738 | pbi->frame_mmu_map_addr = |
6739 | dma_alloc_coherent(amports_get_dma_device(), |
6740 | mmu_map_size, |
6741 | &pbi->frame_mmu_map_phy_addr, GFP_KERNEL); |
6742 | if (pbi->frame_mmu_map_addr == NULL) { |
6743 | pr_err("%s: failed to alloc count_buffer\n", __func__); |
6744 | return -1; |
6745 | } |
6746 | memset(pbi->frame_mmu_map_addr, 0, COUNT_BUF_SIZE); |
6747 | } |
6748 | #ifdef SUPPORT_FB_DECODING |
6749 | if (pbi->m_ins_flag && stage_buf_num > 0) { |
6750 | pbi->stage_mmu_map_addr = |
6751 | dma_alloc_coherent(amports_get_dma_device(), |
6752 | STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS, |
6753 | &pbi->stage_mmu_map_phy_addr, GFP_KERNEL); |
6754 | if (pbi->stage_mmu_map_addr == NULL) { |
6755 | pr_err("%s: failed to alloc count_buffer\n", __func__); |
6756 | return -1; |
6757 | } |
6758 | memset(pbi->stage_mmu_map_addr, |
6759 | 0, STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS); |
6760 | |
6761 | init_stage_buf(pbi); |
6762 | } |
6763 | #endif |
6764 | |
6765 | ret = 0; |
6766 | return ret; |
6767 | } |
6768 | |
6769 | /******************************************** |
6770 | * Mailbox command |
6771 | ********************************************/ |
6772 | #define CMD_FINISHED 0 |
6773 | #define CMD_ALLOC_VIEW 1 |
6774 | #define CMD_FRAME_DISPLAY 3 |
6775 | #define CMD_DEBUG 10 |
6776 | |
6777 | |
6778 | #define DECODE_BUFFER_NUM_MAX 32 |
6779 | #define DISPLAY_BUFFER_NUM 6 |
6780 | |
6781 | #define video_domain_addr(adr) (adr&0x7fffffff) |
6782 | #define DECODER_WORK_SPACE_SIZE 0x800000 |
6783 | |
6784 | #define spec2canvas(x) \ |
6785 | (((x)->uv_canvas_index << 16) | \ |
6786 | ((x)->uv_canvas_index << 8) | \ |
6787 | ((x)->y_canvas_index << 0)) |
6788 | |
6789 | |
6790 | static void set_canvas(struct VP9Decoder_s *pbi, |
6791 | struct PIC_BUFFER_CONFIG_s *pic_config) |
6792 | { |
6793 | struct vdec_s *vdec = hw_to_vdec(pbi); |
6794 | int canvas_w = ALIGN(pic_config->y_crop_width, 64)/4; |
6795 | int canvas_h = ALIGN(pic_config->y_crop_height, 32)/4; |
6796 | int blkmode = pbi->mem_map_mode; |
6797 | /*CANVAS_BLKMODE_64X32*/ |
6798 | if (pic_config->double_write_mode) { |
6799 | canvas_w = pic_config->y_crop_width / |
6800 | get_double_write_ratio(pbi, |
6801 | pic_config->double_write_mode); |
6802 | canvas_h = pic_config->y_crop_height / |
6803 | get_double_write_ratio(pbi, |
6804 | pic_config->double_write_mode); |
6805 | |
6806 | if (pbi->mem_map_mode == 0) |
6807 | canvas_w = ALIGN(canvas_w, 32); |
6808 | else |
6809 | canvas_w = ALIGN(canvas_w, 64); |
6810 | canvas_h = ALIGN(canvas_h, 32); |
6811 | |
6812 | if (vdec->parallel_dec == 1) { |
6813 | if (pic_config->y_canvas_index == -1) |
6814 | pic_config->y_canvas_index = |
6815 | vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id); |
6816 | if (pic_config->uv_canvas_index == -1) |
6817 | pic_config->uv_canvas_index = |
6818 | vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id); |
6819 | } else { |
6820 | pic_config->y_canvas_index = 128 + pic_config->index * 2; |
6821 | pic_config->uv_canvas_index = 128 + pic_config->index * 2 + 1; |
6822 | } |
6823 | |
6824 | canvas_config_ex(pic_config->y_canvas_index, |
6825 | pic_config->dw_y_adr, canvas_w, canvas_h, |
6826 | CANVAS_ADDR_NOWRAP, blkmode, pbi->is_used_v4l ? 0 : 7); |
6827 | canvas_config_ex(pic_config->uv_canvas_index, |
6828 | pic_config->dw_u_v_adr, canvas_w, canvas_h, |
6829 | CANVAS_ADDR_NOWRAP, blkmode, pbi->is_used_v4l ? 0 : 7); |
6830 | |
6831 | #ifdef MULTI_INSTANCE_SUPPORT |
6832 | pic_config->canvas_config[0].phy_addr = |
6833 | pic_config->dw_y_adr; |
6834 | pic_config->canvas_config[0].width = |
6835 | canvas_w; |
6836 | pic_config->canvas_config[0].height = |
6837 | canvas_h; |
6838 | pic_config->canvas_config[0].block_mode = |
6839 | blkmode; |
6840 | pic_config->canvas_config[0].endian = pbi->is_used_v4l ? 0 : 7; |
6841 | |
6842 | pic_config->canvas_config[1].phy_addr = |
6843 | pic_config->dw_u_v_adr; |
6844 | pic_config->canvas_config[1].width = |
6845 | canvas_w; |
6846 | pic_config->canvas_config[1].height = |
6847 | canvas_h; |
6848 | pic_config->canvas_config[1].block_mode = |
6849 | blkmode; |
6850 | pic_config->canvas_config[1].endian = pbi->is_used_v4l ? 0 : 7; |
6851 | #endif |
6852 | } |
6853 | } |
6854 | |
6855 | |
6856 | static void set_frame_info(struct VP9Decoder_s *pbi, struct vframe_s *vf) |
6857 | { |
6858 | unsigned int ar; |
6859 | vf->duration = pbi->frame_dur; |
6860 | vf->duration_pulldown = 0; |
6861 | vf->flag = 0; |
6862 | vf->prop.master_display_colour = pbi->vf_dp; |
6863 | vf->signal_type = pbi->video_signal_type; |
6864 | if (vf->compWidth && vf->compHeight) |
6865 | pbi->frame_ar = vf->compHeight * 0x100 / vf->compWidth; |
6866 | ar = min_t(u32, pbi->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX); |
6867 | vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT); |
6868 | |
6869 | if (pbi->is_used_v4l && pbi->vf_dp.present_flag) { |
6870 | struct aml_vdec_hdr_infos hdr; |
6871 | struct aml_vcodec_ctx *ctx = |
6872 | (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
6873 | |
6874 | memset(&hdr, 0, sizeof(hdr)); |
6875 | hdr.signal_type = vf->signal_type; |
6876 | hdr.color_parms = pbi->vf_dp; |
6877 | vdec_v4l_set_hdr_infos(ctx, &hdr); |
6878 | } |
6879 | |
6880 | vf->sidebind_type = pbi->sidebind_type; |
6881 | vf->sidebind_channel_id = pbi->sidebind_channel_id; |
6882 | } |
6883 | |
6884 | static int vvp9_vf_states(struct vframe_states *states, void *op_arg) |
6885 | { |
6886 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg; |
6887 | |
6888 | states->vf_pool_size = VF_POOL_SIZE; |
6889 | states->buf_free_num = kfifo_len(&pbi->newframe_q); |
6890 | states->buf_avail_num = kfifo_len(&pbi->display_q); |
6891 | |
6892 | if (step == 2) |
6893 | states->buf_avail_num = 0; |
6894 | return 0; |
6895 | } |
6896 | |
6897 | static struct vframe_s *vvp9_vf_peek(void *op_arg) |
6898 | { |
6899 | struct vframe_s *vf[2] = {0, 0}; |
6900 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg; |
6901 | |
6902 | if (step == 2) |
6903 | return NULL; |
6904 | |
6905 | if (kfifo_out_peek(&pbi->display_q, (void *)&vf, 2)) { |
6906 | if (vf[1]) { |
6907 | vf[0]->next_vf_pts_valid = true; |
6908 | vf[0]->next_vf_pts = vf[1]->pts; |
6909 | } else |
6910 | vf[0]->next_vf_pts_valid = false; |
6911 | return vf[0]; |
6912 | } |
6913 | |
6914 | return NULL; |
6915 | } |
6916 | |
6917 | static struct vframe_s *vvp9_vf_get(void *op_arg) |
6918 | { |
6919 | struct vframe_s *vf; |
6920 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg; |
6921 | |
6922 | if (step == 2) |
6923 | return NULL; |
6924 | else if (step == 1) |
6925 | step = 2; |
6926 | |
6927 | if (kfifo_get(&pbi->display_q, &vf)) { |
6928 | struct vframe_s *next_vf; |
6929 | uint8_t index = vf->index & 0xff; |
6930 | if (index < pbi->used_buf_num || |
6931 | (vf->type & VIDTYPE_V4L_EOS)) { |
6932 | vf->index_disp = pbi->vf_get_count; |
6933 | pbi->vf_get_count++; |
6934 | if (debug & VP9_DEBUG_BUFMGR) |
6935 | pr_info("%s type 0x%x w/h %d/%d, pts %d, %lld\n", |
6936 | __func__, vf->type, |
6937 | vf->width, vf->height, |
6938 | vf->pts, |
6939 | vf->pts_us64); |
6940 | |
6941 | if (kfifo_peek(&pbi->display_q, &next_vf)) { |
6942 | vf->next_vf_pts_valid = true; |
6943 | vf->next_vf_pts = next_vf->pts; |
6944 | } else |
6945 | vf->next_vf_pts_valid = false; |
6946 | |
6947 | return vf; |
6948 | } |
6949 | } |
6950 | return NULL; |
6951 | } |
6952 | |
6953 | static void vvp9_vf_put(struct vframe_s *vf, void *op_arg) |
6954 | { |
6955 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg; |
6956 | uint8_t index = vf->index & 0xff; |
6957 | |
6958 | if (vf == (&pbi->vframe_dummy)) |
6959 | return; |
6960 | |
6961 | if (pbi->enable_fence && vf->fence) { |
6962 | vdec_fence_put(vf->fence); |
6963 | vf->fence = NULL; |
6964 | } |
6965 | |
6966 | kfifo_put(&pbi->newframe_q, (const struct vframe_s *)vf); |
6967 | pbi->vf_put_count++; |
6968 | if (index < pbi->used_buf_num) { |
6969 | struct VP9_Common_s *cm = &pbi->common; |
6970 | struct BufferPool_s *pool = cm->buffer_pool; |
6971 | unsigned long flags; |
6972 | |
6973 | lock_buffer_pool(pool, flags); |
6974 | if (pool->frame_bufs[index].buf.vf_ref > 0) |
6975 | pool->frame_bufs[index].buf.vf_ref--; |
6976 | |
6977 | if (pbi->is_used_v4l) |
6978 | pool->frame_bufs[index].buf.vframe_bound = true; |
6979 | |
6980 | if (pbi->wait_buf) |
6981 | WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, |
6982 | 0x1); |
6983 | pbi->last_put_idx = index; |
6984 | pbi->new_frame_displayed++; |
6985 | unlock_buffer_pool(pool, flags); |
6986 | #ifdef SUPPORT_FB_DECODING |
6987 | if (pbi->used_stage_buf_num > 0 && |
6988 | pbi->back_not_run_ready) |
6989 | trigger_schedule(pbi); |
6990 | #endif |
6991 | } |
6992 | |
6993 | } |
6994 | |
6995 | static int vvp9_event_cb(int type, void *data, void *private_data) |
6996 | { |
6997 | if (type & VFRAME_EVENT_RECEIVER_RESET) { |
6998 | #if 0 |
6999 | unsigned long flags; |
7000 | |
7001 | amhevc_stop(); |
7002 | #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER |
7003 | vf_light_unreg_provider(&vvp9_vf_prov); |
7004 | #endif |
7005 | spin_lock_irqsave(&pbi->lock, flags); |
7006 | vvp9_local_init(); |
7007 | vvp9_prot_init(); |
7008 | spin_unlock_irqrestore(&pbi->lock, flags); |
7009 | #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER |
7010 | vf_reg_provider(&vvp9_vf_prov); |
7011 | #endif |
7012 | amhevc_start(); |
7013 | #endif |
7014 | } |
7015 | |
7016 | return 0; |
7017 | } |
7018 | |
7019 | void inc_vf_ref(struct VP9Decoder_s *pbi, int index) |
7020 | { |
7021 | struct VP9_Common_s *cm = &pbi->common; |
7022 | |
7023 | cm->buffer_pool->frame_bufs[index].buf.vf_ref++; |
7024 | |
7025 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
7026 | pr_info("%s index = %d new vf_ref = %d\r\n", |
7027 | __func__, index, |
7028 | cm->buffer_pool->frame_bufs[index].buf.vf_ref); |
7029 | } |
7030 | |
7031 | static int frame_duration_adapt(struct VP9Decoder_s *pbi, struct vframe_s *vf, u32 valid) |
7032 | { |
7033 | u32 old_duration, pts_duration = 0; |
7034 | u32 pts = vf->pts; |
7035 | |
7036 | if (pbi->get_frame_dur == true) |
7037 | return true; |
7038 | |
7039 | pbi->frame_cnt_window++; |
7040 | if (!(pbi->vp9_first_pts_ready == 1)) { |
7041 | if (valid) { |
7042 | pbi->pts1 = pts; |
7043 | pbi->frame_cnt_window = 0; |
7044 | pbi->duration_from_pts_done = 0; |
7045 | pbi->vp9_first_pts_ready = 1; |
7046 | } else { |
7047 | return false; |
7048 | } |
7049 | } else { |
7050 | if (pts < pbi->pts1) { |
7051 | if (pbi->frame_cnt_window > FRAME_CNT_WINDOW_SIZE) { |
7052 | pbi->pts1 = pts; |
7053 | pbi->frame_cnt_window = 0; |
7054 | } |
7055 | } |
7056 | |
7057 | if (valid && (pbi->frame_cnt_window > FRAME_CNT_WINDOW_SIZE) && |
7058 | (pts > pbi->pts1) && (pbi->duration_from_pts_done == 0)) { |
7059 | old_duration = pbi->frame_dur; |
7060 | pbi->pts2 = pts; |
7061 | pts_duration = (((pbi->pts2 - pbi->pts1) * 16) / |
7062 | (pbi->frame_cnt_window * 15)); |
7063 | |
7064 | if (close_to(pts_duration, old_duration, 2000)) { |
7065 | pbi->frame_dur = pts_duration; |
7066 | if ((debug & VP9_DEBUG_OUT_PTS) != 0) |
7067 | pr_info("use calc duration %d\n", pts_duration); |
7068 | } |
7069 | |
7070 | if (pbi->duration_from_pts_done == 0) { |
7071 | if (close_to(pts_duration, old_duration, RATE_CORRECTION_THRESHOLD)) { |
7072 | pbi->duration_from_pts_done = 1; |
7073 | } else { |
7074 | if (!close_to(pts_duration, |
7075 | old_duration, 1000) && |
7076 | !close_to(pts_duration, |
7077 | pbi->frame_dur, 1000) && |
7078 | close_to(pts_duration, |
7079 | pbi->last_duration, 200)) { |
7080 | /* frame_dur must |
7081 | * wrong,recover it. |
7082 | */ |
7083 | pbi->frame_dur = pts_duration; |
7084 | } |
7085 | pbi->pts1 = pbi->pts2; |
7086 | pbi->frame_cnt_window = 0; |
7087 | pbi->duration_from_pts_done = 0; |
7088 | } |
7089 | } |
7090 | pbi->last_duration = pts_duration; |
7091 | } |
7092 | } |
7093 | return true; |
7094 | } |
7095 | |
7096 | static void update_vf_memhandle(struct VP9Decoder_s *pbi, |
7097 | struct vframe_s *vf, struct PIC_BUFFER_CONFIG_s *pic) |
7098 | { |
7099 | if (pic->index < 0) { |
7100 | vf->mem_handle = NULL; |
7101 | vf->mem_head_handle = NULL; |
7102 | vf->mem_dw_handle = NULL; |
7103 | } else if (vf->type & VIDTYPE_SCATTER) { |
7104 | vf->mem_handle = |
7105 | decoder_mmu_box_get_mem_handle( |
7106 | pbi->mmu_box, pic->index); |
7107 | vf->mem_head_handle = |
7108 | decoder_bmmu_box_get_mem_handle( |
7109 | pbi->bmmu_box, |
7110 | HEADER_BUFFER_IDX(pic->BUF_index)); |
7111 | if (pbi->double_write_mode == 3) |
7112 | vf->mem_dw_handle = |
7113 | decoder_bmmu_box_get_mem_handle( |
7114 | pbi->bmmu_box, |
7115 | VF_BUFFER_IDX(pic->BUF_index)); |
7116 | else |
7117 | vf->mem_dw_handle = NULL; |
7118 | } else { |
7119 | vf->mem_handle = |
7120 | decoder_bmmu_box_get_mem_handle( |
7121 | pbi->bmmu_box, VF_BUFFER_IDX(pic->BUF_index)); |
7122 | vf->mem_head_handle = NULL; |
7123 | vf->mem_dw_handle = NULL; |
7124 | /*vf->mem_head_handle = |
7125 | *decoder_bmmu_box_get_mem_handle( |
7126 | *hevc->bmmu_box, VF_BUFFER_IDX(BUF_index)); |
7127 | */ |
7128 | } |
7129 | } |
7130 | |
7131 | static inline void pbi_update_gvs(struct VP9Decoder_s *pbi) |
7132 | { |
7133 | if (pbi->gvs->frame_height != frame_height) { |
7134 | pbi->gvs->frame_width = frame_width; |
7135 | pbi->gvs->frame_height = frame_height; |
7136 | } |
7137 | if (pbi->gvs->frame_dur != pbi->frame_dur) { |
7138 | pbi->gvs->frame_dur = pbi->frame_dur; |
7139 | if (pbi->frame_dur != 0) |
7140 | pbi->gvs->frame_rate = 96000 / pbi->frame_dur; |
7141 | else |
7142 | pbi->gvs->frame_rate = -1; |
7143 | } |
7144 | pbi->gvs->status = pbi->stat | pbi->fatal_error; |
7145 | } |
7146 | |
7147 | static int prepare_display_buf(struct VP9Decoder_s *pbi, |
7148 | struct PIC_BUFFER_CONFIG_s *pic_config) |
7149 | { |
7150 | struct vframe_s *vf = NULL; |
7151 | struct vdec_s *pvdec = hw_to_vdec(pbi); |
7152 | int stream_offset = pic_config->stream_offset; |
7153 | unsigned short slice_type = pic_config->slice_type; |
7154 | struct aml_vcodec_ctx * v4l2_ctx = pbi->v4l2_ctx; |
7155 | ulong nv_order = VIDTYPE_VIU_NV21; |
7156 | u32 pts_valid = 0, pts_us64_valid = 0; |
7157 | u32 pts_save; |
7158 | u64 pts_us64_save; |
7159 | u32 frame_size = 0; |
7160 | int i = 0; |
7161 | |
7162 | |
7163 | if (debug & VP9_DEBUG_BUFMGR) |
7164 | pr_info("%s index = %d\r\n", __func__, pic_config->index); |
7165 | if (kfifo_get(&pbi->newframe_q, &vf) == 0) { |
7166 | pr_info("fatal error, no available buffer slot."); |
7167 | return -1; |
7168 | } |
7169 | |
7170 | /* swap uv */ |
7171 | if (pbi->is_used_v4l) { |
7172 | if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12) || |
7173 | (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12M)) |
7174 | nv_order = VIDTYPE_VIU_NV12; |
7175 | } |
7176 | |
7177 | if (pic_config->double_write_mode) |
7178 | set_canvas(pbi, pic_config); |
7179 | |
7180 | display_frame_count[pbi->index]++; |
7181 | if (vf) { |
7182 | if (!force_pts_unstable) { |
7183 | if ((pic_config->pts == 0) || (pic_config->pts <= pbi->last_pts)) { |
7184 | for (i = (FRAME_BUFFERS - 1); i > 0; i--) { |
7185 | if ((pbi->last_pts == pbi->frame_mode_pts_save[i]) || |
7186 | (pbi->last_pts_us64 == pbi->frame_mode_pts64_save[i])) { |
7187 | pic_config->pts = pbi->frame_mode_pts_save[i - 1]; |
7188 | pic_config->pts64 = pbi->frame_mode_pts64_save[i - 1]; |
7189 | break; |
7190 | } |
7191 | } |
7192 | if ((i == 0) || (pic_config->pts <= pbi->last_pts)) { |
7193 | vp9_print(pbi, VP9_DEBUG_OUT_PTS, |
7194 | "no found pts %d, set 0. %d, %d\n", |
7195 | i, pic_config->pts, pbi->last_pts); |
7196 | pic_config->pts = 0; |
7197 | pic_config->pts64 = 0; |
7198 | } |
7199 | } |
7200 | } |
7201 | |
7202 | if (pbi->is_used_v4l) { |
7203 | vf->v4l_mem_handle |
7204 | = pbi->m_BUF[pic_config->BUF_index].v4l_ref_buf_addr; |
7205 | if (pbi->mmu_enable) { |
7206 | vf->mm_box.bmmu_box = pbi->bmmu_box; |
7207 | vf->mm_box.bmmu_idx = HEADER_BUFFER_IDX(pic_config->BUF_index); |
7208 | vf->mm_box.mmu_box = pbi->mmu_box; |
7209 | vf->mm_box.mmu_idx = pic_config->index; |
7210 | } |
7211 | } |
7212 | |
7213 | if (pbi->enable_fence) { |
7214 | /* fill fence information. */ |
7215 | if (pbi->fence_usage == FENCE_USE_FOR_DRIVER) |
7216 | vf->fence = pic_config->fence; |
7217 | } |
7218 | |
7219 | #ifdef MULTI_INSTANCE_SUPPORT |
7220 | if (vdec_frame_based(pvdec)) { |
7221 | vf->pts = pic_config->pts; |
7222 | vf->pts_us64 = pic_config->pts64; |
7223 | vf->timestamp = pic_config->timestamp; |
7224 | if (vf->pts != 0 || vf->pts_us64 != 0) { |
7225 | pts_valid = 1; |
7226 | pts_us64_valid = 1; |
7227 | } else { |
7228 | pts_valid = 0; |
7229 | pts_us64_valid = 0; |
7230 | } |
7231 | } else |
7232 | #endif |
7233 | /* if (pts_lookup_offset(PTS_TYPE_VIDEO, |
7234 | * stream_offset, &vf->pts, 0) != 0) { |
7235 | */ |
7236 | if (pts_lookup_offset_us64 |
7237 | (PTS_TYPE_VIDEO, stream_offset, &vf->pts, |
7238 | &frame_size, 0, |
7239 | &vf->pts_us64) != 0) { |
7240 | #ifdef DEBUG_PTS |
7241 | pbi->pts_missed++; |
7242 | #endif |
7243 | vf->pts = 0; |
7244 | vf->pts_us64 = 0; |
7245 | pts_valid = 0; |
7246 | pts_us64_valid = 0; |
7247 | } else { |
7248 | #ifdef DEBUG_PTS |
7249 | pbi->pts_hit++; |
7250 | #endif |
7251 | pts_valid = 1; |
7252 | pts_us64_valid = 1; |
7253 | } |
7254 | |
7255 | fill_frame_info(pbi, pic_config, frame_size, vf->pts); |
7256 | |
7257 | pts_save = vf->pts; |
7258 | pts_us64_save = vf->pts_us64; |
7259 | if (pbi->pts_unstable) { |
7260 | frame_duration_adapt(pbi, vf, pts_valid); |
7261 | if (pbi->duration_from_pts_done) { |
7262 | pbi->pts_mode = PTS_NONE_REF_USE_DURATION; |
7263 | } else { |
7264 | if (pts_valid || pts_us64_valid) |
7265 | pbi->pts_mode = PTS_NORMAL; |
7266 | } |
7267 | } |
7268 | |
7269 | if ((pbi->pts_mode == PTS_NORMAL) && (vf->pts != 0) |
7270 | && pbi->get_frame_dur) { |
7271 | int pts_diff = (int)vf->pts - pbi->last_lookup_pts; |
7272 | |
7273 | if (pts_diff < 0) { |
7274 | pbi->pts_mode_switching_count++; |
7275 | pbi->pts_mode_recovery_count = 0; |
7276 | |
7277 | if (pbi->pts_mode_switching_count >= |
7278 | PTS_MODE_SWITCHING_THRESHOLD) { |
7279 | pbi->pts_mode = |
7280 | PTS_NONE_REF_USE_DURATION; |
7281 | pr_info |
7282 | ("HEVC: switch to n_d mode.\n"); |
7283 | } |
7284 | |
7285 | } else { |
7286 | int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD; |
7287 | |
7288 | pbi->pts_mode_recovery_count++; |
7289 | if (pbi->pts_mode_recovery_count > p) { |
7290 | pbi->pts_mode_switching_count = 0; |
7291 | pbi->pts_mode_recovery_count = 0; |
7292 | } |
7293 | } |
7294 | } |
7295 | |
7296 | if (vf->pts != 0) |
7297 | pbi->last_lookup_pts = vf->pts; |
7298 | |
7299 | if ((pbi->pts_mode == PTS_NONE_REF_USE_DURATION) |
7300 | && (slice_type != KEY_FRAME)) |
7301 | vf->pts = pbi->last_pts + DUR2PTS(pbi->frame_dur); |
7302 | pbi->last_pts = vf->pts; |
7303 | |
7304 | if (vf->pts_us64 != 0) |
7305 | pbi->last_lookup_pts_us64 = vf->pts_us64; |
7306 | |
7307 | if ((pbi->pts_mode == PTS_NONE_REF_USE_DURATION) |
7308 | && (slice_type != KEY_FRAME)) { |
7309 | vf->pts_us64 = |
7310 | pbi->last_pts_us64 + |
7311 | (DUR2PTS(pbi->frame_dur) * 100 / 9); |
7312 | } |
7313 | pbi->last_pts_us64 = vf->pts_us64; |
7314 | if ((debug & VP9_DEBUG_OUT_PTS) != 0) { |
7315 | pr_info |
7316 | ("VP9 dec out pts: pts_mode=%d,dur=%d,pts(%d,%lld)(%d,%lld)\n", |
7317 | pbi->pts_mode, pbi->frame_dur, vf->pts, |
7318 | vf->pts_us64, pts_save, pts_us64_save); |
7319 | } |
7320 | |
7321 | if (pbi->pts_mode == PTS_NONE_REF_USE_DURATION) { |
7322 | vf->disp_pts = vf->pts; |
7323 | vf->disp_pts_us64 = vf->pts_us64; |
7324 | vf->pts = pts_save; |
7325 | vf->pts_us64 = pts_us64_save; |
7326 | } else { |
7327 | vf->disp_pts = 0; |
7328 | vf->disp_pts_us64 = 0; |
7329 | } |
7330 | |
7331 | vf->index = 0xff00 | pic_config->index; |
7332 | |
7333 | if (pic_config->double_write_mode & 0x10) { |
7334 | /* double write only */ |
7335 | vf->compBodyAddr = 0; |
7336 | vf->compHeadAddr = 0; |
7337 | } else { |
7338 | if (pbi->mmu_enable) { |
7339 | vf->compBodyAddr = 0; |
7340 | vf->compHeadAddr = pic_config->header_adr; |
7341 | } else { |
7342 | /*vf->compBodyAddr = pic_config->mc_y_adr; |
7343 | *vf->compHeadAddr = pic_config->mc_y_adr + |
7344 | *pic_config->comp_body_size; */ |
7345 | /*head adr*/ |
7346 | } |
7347 | vf->canvas0Addr = vf->canvas1Addr = 0; |
7348 | } |
7349 | if (pic_config->double_write_mode) { |
7350 | vf->type = VIDTYPE_PROGRESSIVE | |
7351 | VIDTYPE_VIU_FIELD; |
7352 | vf->type |= nv_order; |
7353 | if ((pic_config->double_write_mode == 3) && |
7354 | (!IS_8K_SIZE(pic_config->y_crop_width, |
7355 | pic_config->y_crop_height))) { |
7356 | vf->type |= VIDTYPE_COMPRESS; |
7357 | if (pbi->mmu_enable) |
7358 | vf->type |= VIDTYPE_SCATTER; |
7359 | } |
7360 | #ifdef MULTI_INSTANCE_SUPPORT |
7361 | if (pbi->m_ins_flag) { |
7362 | vf->canvas0Addr = vf->canvas1Addr = -1; |
7363 | vf->plane_num = 2; |
7364 | vf->canvas0_config[0] = |
7365 | pic_config->canvas_config[0]; |
7366 | vf->canvas0_config[1] = |
7367 | pic_config->canvas_config[1]; |
7368 | vf->canvas1_config[0] = |
7369 | pic_config->canvas_config[0]; |
7370 | vf->canvas1_config[1] = |
7371 | pic_config->canvas_config[1]; |
7372 | |
7373 | } else |
7374 | #endif |
7375 | vf->canvas0Addr = vf->canvas1Addr = |
7376 | spec2canvas(pic_config); |
7377 | } else { |
7378 | vf->canvas0Addr = vf->canvas1Addr = 0; |
7379 | vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD; |
7380 | if (pbi->mmu_enable) |
7381 | vf->type |= VIDTYPE_SCATTER; |
7382 | } |
7383 | |
7384 | switch (pic_config->bit_depth) { |
7385 | case VPX_BITS_8: |
7386 | vf->bitdepth = BITDEPTH_Y8 | |
7387 | BITDEPTH_U8 | BITDEPTH_V8; |
7388 | break; |
7389 | case VPX_BITS_10: |
7390 | case VPX_BITS_12: |
7391 | vf->bitdepth = BITDEPTH_Y10 | |
7392 | BITDEPTH_U10 | BITDEPTH_V10; |
7393 | break; |
7394 | default: |
7395 | vf->bitdepth = BITDEPTH_Y10 | |
7396 | BITDEPTH_U10 | BITDEPTH_V10; |
7397 | break; |
7398 | } |
7399 | if ((vf->type & VIDTYPE_COMPRESS) == 0) |
7400 | vf->bitdepth = |
7401 | BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8; |
7402 | if (pic_config->bit_depth == VPX_BITS_8) |
7403 | vf->bitdepth |= BITDEPTH_SAVING_MODE; |
7404 | |
7405 | /* if((vf->width!=pic_config->width)| |
7406 | * (vf->height!=pic_config->height)) |
7407 | */ |
7408 | /* pr_info("aaa: %d/%d, %d/%d\n", |
7409 | vf->width,vf->height, pic_config->width, |
7410 | pic_config->height); */ |
7411 | vf->width = pic_config->y_crop_width / |
7412 | get_double_write_ratio(pbi, |
7413 | pic_config->double_write_mode); |
7414 | vf->height = pic_config->y_crop_height / |
7415 | get_double_write_ratio(pbi, |
7416 | pic_config->double_write_mode); |
7417 | if (force_w_h != 0) { |
7418 | vf->width = (force_w_h >> 16) & 0xffff; |
7419 | vf->height = force_w_h & 0xffff; |
7420 | } |
7421 | vf->compWidth = pic_config->y_crop_width; |
7422 | vf->compHeight = pic_config->y_crop_height; |
7423 | set_frame_info(pbi, vf); |
7424 | if (force_fps & 0x100) { |
7425 | u32 rate = force_fps & 0xff; |
7426 | |
7427 | if (rate) |
7428 | vf->duration = 96000/rate; |
7429 | else |
7430 | vf->duration = 0; |
7431 | } |
7432 | update_vf_memhandle(pbi, vf, pic_config); |
7433 | |
7434 | if (!(pic_config->y_crop_width == 196 |
7435 | && pic_config->y_crop_height == 196 |
7436 | && (debug & VP9_DEBUG_NO_TRIGGER_FRAME) == 0 |
7437 | && (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX))) { |
7438 | struct vdec_info tmp4x; |
7439 | |
7440 | inc_vf_ref(pbi, pic_config->index); |
7441 | decoder_do_frame_check(pvdec, vf); |
7442 | kfifo_put(&pbi->display_q, (const struct vframe_s *)vf); |
7443 | ATRACE_COUNTER(MODULE_NAME, vf->pts); |
7444 | pbi->vf_pre_count++; |
7445 | pbi_update_gvs(pbi); |
7446 | /*count info*/ |
7447 | vdec_count_info(pbi->gvs, 0, stream_offset); |
7448 | memcpy(&tmp4x, pbi->gvs, sizeof(struct vdec_info)); |
7449 | tmp4x.bit_depth_luma = pbi->vp9_param.p.bit_depth; |
7450 | tmp4x.bit_depth_chroma = pbi->vp9_param.p.bit_depth; |
7451 | tmp4x.double_write_mode = get_double_write_mode(pbi); |
7452 | vdec_fill_vdec_frame(pvdec, &pbi->vframe_qos, &tmp4x, |
7453 | vf, pic_config->hw_decode_time); |
7454 | pvdec->vdec_fps_detec(pvdec->id); |
7455 | if (without_display_mode == 0) { |
7456 | vf_notify_receiver(pbi->provider_name, |
7457 | VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL); |
7458 | } else |
7459 | vvp9_vf_put(vvp9_vf_get(pbi), pbi); |
7460 | } else { |
7461 | pbi->stat |= VP9_TRIGGER_FRAME_DONE; |
7462 | hevc_source_changed(VFORMAT_VP9, 196, 196, 30); |
7463 | pr_debug("[%s %d] drop trigger frame width %d height %d state 0x%x\n", |
7464 | __func__, __LINE__, vf->width, |
7465 | vf->height, pbi->stat); |
7466 | } |
7467 | } |
7468 | |
7469 | return 0; |
7470 | } |
7471 | |
7472 | static int notify_v4l_eos(struct vdec_s *vdec) |
7473 | { |
7474 | struct VP9Decoder_s *hw = (struct VP9Decoder_s *)vdec->private; |
7475 | struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx); |
7476 | struct vframe_s *vf = &hw->vframe_dummy; |
7477 | struct vdec_v4l2_buffer *fb = NULL; |
7478 | int index = INVALID_IDX; |
7479 | ulong expires; |
7480 | |
7481 | if (hw->eos) { |
7482 | if (hw->is_used_v4l) { |
7483 | expires = jiffies + msecs_to_jiffies(2000); |
7484 | while (INVALID_IDX == (index = v4l_get_free_fb(hw))) { |
7485 | if (time_after(jiffies, expires) || |
7486 | v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx)) |
7487 | break; |
7488 | } |
7489 | |
7490 | if (index == INVALID_IDX) { |
7491 | if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) { |
7492 | pr_err("[%d] EOS get free buff fail.\n", ctx->id); |
7493 | return -1; |
7494 | } |
7495 | } |
7496 | } |
7497 | |
7498 | vf->type |= VIDTYPE_V4L_EOS; |
7499 | vf->timestamp = ULONG_MAX; |
7500 | vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L; |
7501 | vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb : |
7502 | hw->m_BUF[index].v4l_ref_buf_addr; |
7503 | |
7504 | kfifo_put(&hw->display_q, (const struct vframe_s *)vf); |
7505 | vf_notify_receiver(vdec->vf_provider_name, |
7506 | VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL); |
7507 | |
7508 | pr_info("[%d] VP9 EOS notify.\n", (hw->is_used_v4l)?ctx->id:vdec->id); |
7509 | } |
7510 | |
7511 | return 0; |
7512 | } |
7513 | |
7514 | static void get_rpm_param(union param_u *params) |
7515 | { |
7516 | int i; |
7517 | unsigned int data32; |
7518 | |
7519 | if (debug & VP9_DEBUG_BUFMGR) |
7520 | pr_info("enter %s\r\n", __func__); |
7521 | for (i = 0; i < 128; i++) { |
7522 | do { |
7523 | data32 = READ_VREG(RPM_CMD_REG); |
7524 | /*pr_info("%x\n", data32);*/ |
7525 | } while ((data32 & 0x10000) == 0); |
7526 | params->l.data[i] = data32&0xffff; |
7527 | /*pr_info("%x\n", data32);*/ |
7528 | WRITE_VREG(RPM_CMD_REG, 0); |
7529 | } |
7530 | if (debug & VP9_DEBUG_BUFMGR) |
7531 | pr_info("leave %s\r\n", __func__); |
7532 | } |
7533 | static void debug_buffer_mgr_more(struct VP9Decoder_s *pbi) |
7534 | { |
7535 | int i; |
7536 | |
7537 | if (!(debug & VP9_DEBUG_BUFMGR_MORE)) |
7538 | return; |
7539 | pr_info("vp9_param: (%d)\n", pbi->slice_idx); |
7540 | for (i = 0; i < (RPM_END-RPM_BEGIN); i++) { |
7541 | pr_info("%04x ", pbi->vp9_param.l.data[i]); |
7542 | if (((i + 1) & 0xf) == 0) |
7543 | pr_info("\n"); |
7544 | } |
7545 | pr_info("=============param==========\r\n"); |
7546 | pr_info("profile %x\r\n", pbi->vp9_param.p.profile); |
7547 | pr_info("show_existing_frame %x\r\n", |
7548 | pbi->vp9_param.p.show_existing_frame); |
7549 | pr_info("frame_to_show_idx %x\r\n", |
7550 | pbi->vp9_param.p.frame_to_show_idx); |
7551 | pr_info("frame_type %x\r\n", pbi->vp9_param.p.frame_type); |
7552 | pr_info("show_frame %x\r\n", pbi->vp9_param.p.show_frame); |
7553 | pr_info("e.r.r.o.r_resilient_mode %x\r\n", |
7554 | pbi->vp9_param.p.error_resilient_mode); |
7555 | pr_info("intra_only %x\r\n", pbi->vp9_param.p.intra_only); |
7556 | pr_info("display_size_present %x\r\n", |
7557 | pbi->vp9_param.p.display_size_present); |
7558 | pr_info("reset_frame_context %x\r\n", |
7559 | pbi->vp9_param.p.reset_frame_context); |
7560 | pr_info("refresh_frame_flags %x\r\n", |
7561 | pbi->vp9_param.p.refresh_frame_flags); |
7562 | pr_info("bit_depth %x\r\n", pbi->vp9_param.p.bit_depth); |
7563 | pr_info("width %x\r\n", pbi->vp9_param.p.width); |
7564 | pr_info("height %x\r\n", pbi->vp9_param.p.height); |
7565 | pr_info("display_width %x\r\n", pbi->vp9_param.p.display_width); |
7566 | pr_info("display_height %x\r\n", pbi->vp9_param.p.display_height); |
7567 | pr_info("ref_info %x\r\n", pbi->vp9_param.p.ref_info); |
7568 | pr_info("same_frame_size %x\r\n", pbi->vp9_param.p.same_frame_size); |
7569 | if (!(debug & VP9_DEBUG_DBG_LF_PRINT)) |
7570 | return; |
7571 | pr_info("mode_ref_delta_enabled: 0x%x\r\n", |
7572 | pbi->vp9_param.p.mode_ref_delta_enabled); |
7573 | pr_info("sharpness_level: 0x%x\r\n", |
7574 | pbi->vp9_param.p.sharpness_level); |
7575 | pr_info("ref_deltas: 0x%x, 0x%x, 0x%x, 0x%x\r\n", |
7576 | pbi->vp9_param.p.ref_deltas[0], pbi->vp9_param.p.ref_deltas[1], |
7577 | pbi->vp9_param.p.ref_deltas[2], pbi->vp9_param.p.ref_deltas[3]); |
7578 | pr_info("mode_deltas: 0x%x, 0x%x\r\n", pbi->vp9_param.p.mode_deltas[0], |
7579 | pbi->vp9_param.p.mode_deltas[1]); |
7580 | pr_info("filter_level: 0x%x\r\n", pbi->vp9_param.p.filter_level); |
7581 | pr_info("seg_enabled: 0x%x\r\n", pbi->vp9_param.p.seg_enabled); |
7582 | pr_info("seg_abs_delta: 0x%x\r\n", pbi->vp9_param.p.seg_abs_delta); |
7583 | pr_info("seg_lf_feature_enabled: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n", |
7584 | (pbi->vp9_param.p.seg_lf_info[0]>>15 & 1), |
7585 | (pbi->vp9_param.p.seg_lf_info[1]>>15 & 1), |
7586 | (pbi->vp9_param.p.seg_lf_info[2]>>15 & 1), |
7587 | (pbi->vp9_param.p.seg_lf_info[3]>>15 & 1), |
7588 | (pbi->vp9_param.p.seg_lf_info[4]>>15 & 1), |
7589 | (pbi->vp9_param.p.seg_lf_info[5]>>15 & 1), |
7590 | (pbi->vp9_param.p.seg_lf_info[6]>>15 & 1), |
7591 | (pbi->vp9_param.p.seg_lf_info[7]>>15 & 1)); |
7592 | pr_info("seg_lf_feature_data: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n", |
7593 | (pbi->vp9_param.p.seg_lf_info[0] & 0x13f), |
7594 | (pbi->vp9_param.p.seg_lf_info[1] & 0x13f), |
7595 | (pbi->vp9_param.p.seg_lf_info[2] & 0x13f), |
7596 | (pbi->vp9_param.p.seg_lf_info[3] & 0x13f), |
7597 | (pbi->vp9_param.p.seg_lf_info[4] & 0x13f), |
7598 | (pbi->vp9_param.p.seg_lf_info[5] & 0x13f), |
7599 | (pbi->vp9_param.p.seg_lf_info[6] & 0x13f), |
7600 | (pbi->vp9_param.p.seg_lf_info[7] & 0x13f)); |
7601 | |
7602 | } |
7603 | |
7604 | |
7605 | static void vp9_recycle_mmu_buf_tail(struct VP9Decoder_s *pbi) |
7606 | { |
7607 | struct VP9_Common_s *const cm = &pbi->common; |
7608 | if (pbi->double_write_mode & 0x10) |
7609 | return; |
7610 | if (cm->cur_fb_idx_mmu != INVALID_IDX) { |
7611 | if (pbi->used_4k_num == -1) { |
7612 | pbi->used_4k_num = |
7613 | (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16); |
7614 | if (pbi->m_ins_flag) |
7615 | hevc_mmu_dma_check(hw_to_vdec(pbi)); |
7616 | } |
7617 | decoder_mmu_box_free_idx_tail(pbi->mmu_box, |
7618 | cm->cur_fb_idx_mmu, pbi->used_4k_num); |
7619 | cm->cur_fb_idx_mmu = INVALID_IDX; |
7620 | pbi->used_4k_num = -1; |
7621 | } |
7622 | } |
7623 | |
7624 | #ifdef MULTI_INSTANCE_SUPPORT |
7625 | static void vp9_recycle_mmu_buf(struct VP9Decoder_s *pbi) |
7626 | { |
7627 | struct VP9_Common_s *const cm = &pbi->common; |
7628 | if (pbi->double_write_mode & 0x10) |
7629 | return; |
7630 | if (cm->cur_fb_idx_mmu != INVALID_IDX) { |
7631 | decoder_mmu_box_free_idx(pbi->mmu_box, |
7632 | cm->cur_fb_idx_mmu); |
7633 | |
7634 | cm->cur_fb_idx_mmu = INVALID_IDX; |
7635 | pbi->used_4k_num = -1; |
7636 | } |
7637 | } |
7638 | |
7639 | void vp9_recycle_mmu_work(struct work_struct *work) |
7640 | { |
7641 | struct VP9Decoder_s *pbi = container_of(work, |
7642 | struct VP9Decoder_s, recycle_mmu_work); |
7643 | |
7644 | vp9_recycle_mmu_buf(pbi); |
7645 | } |
7646 | #endif |
7647 | |
7648 | |
7649 | static void dec_again_process(struct VP9Decoder_s *pbi) |
7650 | { |
7651 | amhevc_stop(); |
7652 | pbi->dec_result = DEC_RESULT_AGAIN; |
7653 | if (pbi->process_state == |
7654 | PROC_STATE_DECODESLICE) { |
7655 | pbi->process_state = |
7656 | PROC_STATE_SENDAGAIN; |
7657 | if (pbi->mmu_enable) { |
7658 | /* |
7659 | * Because vp9_recycle_mmu_buf has sleep function,we can't |
7660 | * call it directly. Use a recycle_mmu_work to substitude it. |
7661 | */ |
7662 | vdec_schedule_work(&pbi->recycle_mmu_work); |
7663 | } |
7664 | } |
7665 | reset_process_time(pbi); |
7666 | vdec_schedule_work(&pbi->work); |
7667 | } |
7668 | |
7669 | int continue_decoding(struct VP9Decoder_s *pbi) |
7670 | { |
7671 | int ret; |
7672 | int i; |
7673 | struct VP9_Common_s *const cm = &pbi->common; |
7674 | struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
7675 | debug_buffer_mgr_more(pbi); |
7676 | |
7677 | if (pbi->is_used_v4l && ctx->param_sets_from_ucode) |
7678 | pbi->res_ch_flag = 0; |
7679 | bit_depth_luma = pbi->vp9_param.p.bit_depth; |
7680 | bit_depth_chroma = pbi->vp9_param.p.bit_depth; |
7681 | |
7682 | if ((pbi->vp9_param.p.bit_depth >= VPX_BITS_10) && |
7683 | (get_double_write_mode(pbi) == 0x10)) { |
7684 | pbi->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW; |
7685 | pr_err("fatal err, bit_depth %d, unsupport dw 0x10\n", |
7686 | pbi->vp9_param.p.bit_depth); |
7687 | return -1; |
7688 | } |
7689 | |
7690 | if (pbi->process_state != PROC_STATE_SENDAGAIN) { |
7691 | ret = vp9_bufmgr_process(pbi, &pbi->vp9_param); |
7692 | if (!pbi->m_ins_flag) |
7693 | pbi->slice_idx++; |
7694 | } else { |
7695 | union param_u *params = &pbi->vp9_param; |
7696 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
7697 | ret = vp9_alloc_mmu(pbi, |
7698 | cm->new_fb_idx, |
7699 | params->p.width, |
7700 | params->p.height, |
7701 | params->p.bit_depth, |
7702 | pbi->frame_mmu_map_addr); |
7703 | if (ret >= 0) |
7704 | cm->cur_fb_idx_mmu = cm->new_fb_idx; |
7705 | else |
7706 | pr_err("can't alloc need mmu1,idx %d ret =%d\n", |
7707 | cm->new_fb_idx, |
7708 | ret); |
7709 | } else { |
7710 | ret = 0; |
7711 | } |
7712 | WRITE_VREG(HEVC_PARSER_PICTURE_SIZE, |
7713 | (params->p.height << 16) | params->p.width); |
7714 | } |
7715 | if (ret < 0) { |
7716 | pr_info("vp9_bufmgr_process=> %d, VP9_10B_DISCARD_NAL\r\n", |
7717 | ret); |
7718 | WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DISCARD_NAL); |
7719 | cm->show_frame = 0; |
7720 | if (pbi->mmu_enable) |
7721 | vp9_recycle_mmu_buf(pbi); |
7722 | #ifdef MULTI_INSTANCE_SUPPORT |
7723 | if (pbi->m_ins_flag) { |
7724 | pbi->dec_result = DEC_RESULT_DONE; |
7725 | #ifdef SUPPORT_FB_DECODING |
7726 | if (pbi->used_stage_buf_num == 0) |
7727 | #endif |
7728 | amhevc_stop(); |
7729 | vdec_schedule_work(&pbi->work); |
7730 | } |
7731 | #endif |
7732 | return ret; |
7733 | } else if (ret == 0) { |
7734 | struct PIC_BUFFER_CONFIG_s *cur_pic_config |
7735 | = &cm->cur_frame->buf; |
7736 | cur_pic_config->decode_idx = pbi->frame_count; |
7737 | |
7738 | if (pbi->process_state != PROC_STATE_SENDAGAIN) { |
7739 | if (!pbi->m_ins_flag) { |
7740 | pbi->frame_count++; |
7741 | decode_frame_count[pbi->index] |
7742 | = pbi->frame_count; |
7743 | } |
7744 | #ifdef MULTI_INSTANCE_SUPPORT |
7745 | if (pbi->chunk) { |
7746 | cur_pic_config->pts = pbi->chunk->pts; |
7747 | cur_pic_config->pts64 = pbi->chunk->pts64; |
7748 | cur_pic_config->timestamp = pbi->chunk->timestamp; |
7749 | } |
7750 | #endif |
7751 | } |
7752 | /*pr_info("Decode Frame Data %d\n", pbi->frame_count);*/ |
7753 | config_pic_size(pbi, pbi->vp9_param.p.bit_depth); |
7754 | |
7755 | if ((pbi->common.frame_type != KEY_FRAME) |
7756 | && (!pbi->common.intra_only)) { |
7757 | config_mc_buffer(pbi, pbi->vp9_param.p.bit_depth); |
7758 | #ifdef SUPPORT_FB_DECODING |
7759 | if (pbi->used_stage_buf_num == 0) |
7760 | #endif |
7761 | config_mpred_hw(pbi); |
7762 | } else { |
7763 | #ifdef SUPPORT_FB_DECODING |
7764 | if (pbi->used_stage_buf_num == 0) |
7765 | #endif |
7766 | clear_mpred_hw(pbi); |
7767 | } |
7768 | #ifdef MCRCC_ENABLE |
7769 | if (mcrcc_cache_alg_flag) |
7770 | config_mcrcc_axi_hw_new(pbi); |
7771 | else |
7772 | config_mcrcc_axi_hw(pbi); |
7773 | #endif |
7774 | config_sao_hw(pbi, &pbi->vp9_param); |
7775 | |
7776 | #ifdef VP9_LPF_LVL_UPDATE |
7777 | /* |
7778 | * Get loop filter related picture level parameters from Parser |
7779 | */ |
7780 | pbi->lf->mode_ref_delta_enabled = pbi->vp9_param.p.mode_ref_delta_enabled; |
7781 | pbi->lf->sharpness_level = pbi->vp9_param.p.sharpness_level; |
7782 | for (i = 0; i < 4; i++) |
7783 | pbi->lf->ref_deltas[i] = pbi->vp9_param.p.ref_deltas[i]; |
7784 | for (i = 0; i < 2; i++) |
7785 | pbi->lf->mode_deltas[i] = pbi->vp9_param.p.mode_deltas[i]; |
7786 | pbi->default_filt_lvl = pbi->vp9_param.p.filter_level; |
7787 | pbi->seg_4lf->enabled = pbi->vp9_param.p.seg_enabled; |
7788 | pbi->seg_4lf->abs_delta = pbi->vp9_param.p.seg_abs_delta; |
7789 | for (i = 0; i < MAX_SEGMENTS; i++) |
7790 | pbi->seg_4lf->feature_mask[i] = (pbi->vp9_param.p.seg_lf_info[i] & |
7791 | 0x8000) ? (1 << SEG_LVL_ALT_LF) : 0; |
7792 | for (i = 0; i < MAX_SEGMENTS; i++) |
7793 | pbi->seg_4lf->feature_data[i][SEG_LVL_ALT_LF] |
7794 | = (pbi->vp9_param.p.seg_lf_info[i] |
7795 | & 0x100) ? -(pbi->vp9_param.p.seg_lf_info[i] |
7796 | & 0x3f) : (pbi->vp9_param.p.seg_lf_info[i] & 0x3f); |
7797 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) { |
7798 | /*Set pipeline mode*/ |
7799 | uint32_t lpf_data32 = READ_VREG(HEVC_DBLK_CFGB); |
7800 | /*dblk pipeline mode=1 for performance*/ |
7801 | if (pbi->vp9_param.p.width >= 1280) |
7802 | lpf_data32 |= (0x1 << 4); |
7803 | else |
7804 | lpf_data32 &= ~(0x3 << 4); |
7805 | WRITE_VREG(HEVC_DBLK_CFGB, lpf_data32); |
7806 | } |
7807 | /* |
7808 | * Update loop filter Thr/Lvl table for every frame |
7809 | */ |
7810 | /*pr_info |
7811 | ("vp9_loop_filter (run before every frame decoding start)\n");*/ |
7812 | vp9_loop_filter_frame_init(pbi->seg_4lf, |
7813 | pbi->lfi, pbi->lf, pbi->default_filt_lvl); |
7814 | #endif |
7815 | /*pr_info("HEVC_DEC_STATUS_REG <= VP9_10B_DECODE_SLICE\n");*/ |
7816 | WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DECODE_SLICE); |
7817 | } else { |
7818 | pr_info("Skip search next start code\n"); |
7819 | cm->prev_fb_idx = INVALID_IDX; |
7820 | /*skip, search next start code*/ |
7821 | WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DECODE_SLICE); |
7822 | } |
7823 | pbi->process_state = PROC_STATE_DECODESLICE; |
7824 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
7825 | if (pbi->last_put_idx < pbi->used_buf_num) { |
7826 | struct RefCntBuffer_s *frame_bufs = |
7827 | cm->buffer_pool->frame_bufs; |
7828 | int i = pbi->last_put_idx; |
7829 | /*free not used buffers.*/ |
7830 | if ((frame_bufs[i].ref_count == 0) && |
7831 | (frame_bufs[i].buf.vf_ref == 0) && |
7832 | (frame_bufs[i].buf.index != -1)) { |
7833 | decoder_mmu_box_free_idx(pbi->mmu_box, i); |
7834 | } |
7835 | pbi->last_put_idx = -1; |
7836 | } |
7837 | } |
7838 | return ret; |
7839 | } |
7840 | |
7841 | static void fill_frame_info(struct VP9Decoder_s *pbi, |
7842 | struct PIC_BUFFER_CONFIG_s *frame, |
7843 | unsigned int framesize, |
7844 | unsigned int pts) |
7845 | { |
7846 | struct vframe_qos_s *vframe_qos = &pbi->vframe_qos; |
7847 | |
7848 | if (frame->slice_type == KEY_FRAME) |
7849 | vframe_qos->type = 1; |
7850 | else if (frame->slice_type == INTER_FRAME) |
7851 | vframe_qos->type = 2; |
7852 | /* |
7853 | #define SHOW_QOS_INFO |
7854 | */ |
7855 | if (input_frame_based(hw_to_vdec(pbi))) |
7856 | vframe_qos->size = frame->frame_size2; |
7857 | else |
7858 | vframe_qos->size = framesize; |
7859 | vframe_qos->pts = pts; |
7860 | #ifdef SHOW_QOS_INFO |
7861 | vp9_print(pbi, 0, "slice:%d\n", frame->slice_type); |
7862 | #endif |
7863 | vframe_qos->max_mv = frame->max_mv; |
7864 | vframe_qos->avg_mv = frame->avg_mv; |
7865 | vframe_qos->min_mv = frame->min_mv; |
7866 | #ifdef SHOW_QOS_INFO |
7867 | vp9_print(pbi, 0, "mv: max:%d, avg:%d, min:%d\n", |
7868 | vframe_qos->max_mv, |
7869 | vframe_qos->avg_mv, |
7870 | vframe_qos->min_mv); |
7871 | #endif |
7872 | vframe_qos->max_qp = frame->max_qp; |
7873 | vframe_qos->avg_qp = frame->avg_qp; |
7874 | vframe_qos->min_qp = frame->min_qp; |
7875 | #ifdef SHOW_QOS_INFO |
7876 | vp9_print(pbi, 0, "qp: max:%d, avg:%d, min:%d\n", |
7877 | vframe_qos->max_qp, |
7878 | vframe_qos->avg_qp, |
7879 | vframe_qos->min_qp); |
7880 | #endif |
7881 | vframe_qos->max_skip = frame->max_skip; |
7882 | vframe_qos->avg_skip = frame->avg_skip; |
7883 | vframe_qos->min_skip = frame->min_skip; |
7884 | #ifdef SHOW_QOS_INFO |
7885 | vp9_print(pbi, 0, "skip: max:%d, avg:%d, min:%d\n", |
7886 | vframe_qos->max_skip, |
7887 | vframe_qos->avg_skip, |
7888 | vframe_qos->min_skip); |
7889 | #endif |
7890 | vframe_qos->num++; |
7891 | } |
7892 | |
7893 | /* only when we decoded one field or one frame, |
7894 | we can call this function to get qos info*/ |
7895 | static void get_picture_qos_info(struct VP9Decoder_s *pbi) |
7896 | { |
7897 | struct PIC_BUFFER_CONFIG_s *frame = &pbi->cur_buf->buf; |
7898 | struct vdec_s *vdec = hw_to_vdec(pbi); |
7899 | |
7900 | if (!frame) |
7901 | return; |
7902 | if (vdec->mvfrm) { |
7903 | frame->frame_size2 = vdec->mvfrm->frame_size; |
7904 | frame->hw_decode_time = |
7905 | local_clock() - vdec->mvfrm->hw_decode_start; |
7906 | } |
7907 | |
7908 | if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) { |
7909 | unsigned char a[3]; |
7910 | unsigned char i, j, t; |
7911 | unsigned long data; |
7912 | |
7913 | data = READ_VREG(HEVC_MV_INFO); |
7914 | if (frame->slice_type == KEY_FRAME) |
7915 | data = 0; |
7916 | a[0] = data & 0xff; |
7917 | a[1] = (data >> 8) & 0xff; |
7918 | a[2] = (data >> 16) & 0xff; |
7919 | |
7920 | for (i = 0; i < 3; i++) { |
7921 | for (j = i+1; j < 3; j++) { |
7922 | if (a[j] < a[i]) { |
7923 | t = a[j]; |
7924 | a[j] = a[i]; |
7925 | a[i] = t; |
7926 | } else if (a[j] == a[i]) { |
7927 | a[i]++; |
7928 | t = a[j]; |
7929 | a[j] = a[i]; |
7930 | a[i] = t; |
7931 | } |
7932 | } |
7933 | } |
7934 | frame->max_mv = a[2]; |
7935 | frame->avg_mv = a[1]; |
7936 | frame->min_mv = a[0]; |
7937 | |
7938 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
7939 | "mv data %x a[0]= %x a[1]= %x a[2]= %x\n", |
7940 | data, a[0], a[1], a[2]); |
7941 | |
7942 | data = READ_VREG(HEVC_QP_INFO); |
7943 | a[0] = data & 0x1f; |
7944 | a[1] = (data >> 8) & 0x3f; |
7945 | a[2] = (data >> 16) & 0x7f; |
7946 | |
7947 | for (i = 0; i < 3; i++) { |
7948 | for (j = i+1; j < 3; j++) { |
7949 | if (a[j] < a[i]) { |
7950 | t = a[j]; |
7951 | a[j] = a[i]; |
7952 | a[i] = t; |
7953 | } else if (a[j] == a[i]) { |
7954 | a[i]++; |
7955 | t = a[j]; |
7956 | a[j] = a[i]; |
7957 | a[i] = t; |
7958 | } |
7959 | } |
7960 | } |
7961 | frame->max_qp = a[2]; |
7962 | frame->avg_qp = a[1]; |
7963 | frame->min_qp = a[0]; |
7964 | |
7965 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
7966 | "qp data %x a[0]= %x a[1]= %x a[2]= %x\n", |
7967 | data, a[0], a[1], a[2]); |
7968 | |
7969 | data = READ_VREG(HEVC_SKIP_INFO); |
7970 | a[0] = data & 0x1f; |
7971 | a[1] = (data >> 8) & 0x3f; |
7972 | a[2] = (data >> 16) & 0x7f; |
7973 | |
7974 | for (i = 0; i < 3; i++) { |
7975 | for (j = i+1; j < 3; j++) { |
7976 | if (a[j] < a[i]) { |
7977 | t = a[j]; |
7978 | a[j] = a[i]; |
7979 | a[i] = t; |
7980 | } else if (a[j] == a[i]) { |
7981 | a[i]++; |
7982 | t = a[j]; |
7983 | a[j] = a[i]; |
7984 | a[i] = t; |
7985 | } |
7986 | } |
7987 | } |
7988 | frame->max_skip = a[2]; |
7989 | frame->avg_skip = a[1]; |
7990 | frame->min_skip = a[0]; |
7991 | |
7992 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
7993 | "skip data %x a[0]= %x a[1]= %x a[2]= %x\n", |
7994 | data, a[0], a[1], a[2]); |
7995 | } else { |
7996 | uint32_t blk88_y_count; |
7997 | uint32_t blk88_c_count; |
7998 | uint32_t blk22_mv_count; |
7999 | uint32_t rdata32; |
8000 | int32_t mv_hi; |
8001 | int32_t mv_lo; |
8002 | uint32_t rdata32_l; |
8003 | uint32_t mvx_L0_hi; |
8004 | uint32_t mvy_L0_hi; |
8005 | uint32_t mvx_L1_hi; |
8006 | uint32_t mvy_L1_hi; |
8007 | int64_t value; |
8008 | uint64_t temp_value; |
8009 | int pic_number = frame->decode_idx; |
8010 | |
8011 | frame->max_mv = 0; |
8012 | frame->avg_mv = 0; |
8013 | frame->min_mv = 0; |
8014 | |
8015 | frame->max_skip = 0; |
8016 | frame->avg_skip = 0; |
8017 | frame->min_skip = 0; |
8018 | |
8019 | frame->max_qp = 0; |
8020 | frame->avg_qp = 0; |
8021 | frame->min_qp = 0; |
8022 | |
8023 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, "slice_type:%d, poc:%d\n", |
8024 | frame->slice_type, |
8025 | pic_number); |
8026 | |
8027 | /* set rd_idx to 0 */ |
8028 | WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0); |
8029 | |
8030 | blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8031 | if (blk88_y_count == 0) { |
8032 | |
8033 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8034 | "[Picture %d Quality] NO Data yet.\n", |
8035 | pic_number); |
8036 | |
8037 | /* reset all counts */ |
8038 | WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8)); |
8039 | return; |
8040 | } |
8041 | /* qp_y_sum */ |
8042 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8043 | |
8044 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8045 | "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n", |
8046 | pic_number, rdata32/blk88_y_count, |
8047 | rdata32, blk88_y_count); |
8048 | |
8049 | frame->avg_qp = rdata32/blk88_y_count; |
8050 | /* intra_y_count */ |
8051 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8052 | |
8053 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8054 | "[Picture %d Quality] Y intra rate : %d%c (%d)\n", |
8055 | pic_number, rdata32*100/blk88_y_count, |
8056 | '%', rdata32); |
8057 | |
8058 | /* skipped_y_count */ |
8059 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8060 | |
8061 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8062 | "[Picture %d Quality] Y skipped rate : %d%c (%d)\n", |
8063 | pic_number, rdata32*100/blk88_y_count, |
8064 | '%', rdata32); |
8065 | |
8066 | frame->avg_skip = rdata32*100/blk88_y_count; |
8067 | /* coeff_non_zero_y_count */ |
8068 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8069 | |
8070 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8071 | "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n", |
8072 | pic_number, (100 - rdata32*100/(blk88_y_count*1)), |
8073 | '%', rdata32); |
8074 | |
8075 | /* blk66_c_count */ |
8076 | blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8077 | if (blk88_c_count == 0) { |
8078 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8079 | "[Picture %d Quality] NO Data yet.\n", |
8080 | pic_number); |
8081 | /* reset all counts */ |
8082 | WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8)); |
8083 | return; |
8084 | } |
8085 | /* qp_c_sum */ |
8086 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8087 | |
8088 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8089 | "[Picture %d Quality] C QP AVG : %d (%d/%d)\n", |
8090 | pic_number, rdata32/blk88_c_count, |
8091 | rdata32, blk88_c_count); |
8092 | |
8093 | /* intra_c_count */ |
8094 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8095 | |
8096 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8097 | "[Picture %d Quality] C intra rate : %d%c (%d)\n", |
8098 | pic_number, rdata32*100/blk88_c_count, |
8099 | '%', rdata32); |
8100 | |
8101 | /* skipped_cu_c_count */ |
8102 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8103 | |
8104 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8105 | "[Picture %d Quality] C skipped rate : %d%c (%d)\n", |
8106 | pic_number, rdata32*100/blk88_c_count, |
8107 | '%', rdata32); |
8108 | |
8109 | /* coeff_non_zero_c_count */ |
8110 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8111 | |
8112 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8113 | "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n", |
8114 | pic_number, (100 - rdata32*100/(blk88_c_count*1)), |
8115 | '%', rdata32); |
8116 | |
8117 | /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0], |
8118 | 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */ |
8119 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8120 | |
8121 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8122 | "[Picture %d Quality] Y QP min : %d\n", |
8123 | pic_number, (rdata32>>0)&0xff); |
8124 | |
8125 | frame->min_qp = (rdata32>>0)&0xff; |
8126 | |
8127 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8128 | "[Picture %d Quality] Y QP max : %d\n", |
8129 | pic_number, (rdata32>>8)&0xff); |
8130 | |
8131 | frame->max_qp = (rdata32>>8)&0xff; |
8132 | |
8133 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8134 | "[Picture %d Quality] C QP min : %d\n", |
8135 | pic_number, (rdata32>>16)&0xff); |
8136 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8137 | "[Picture %d Quality] C QP max : %d\n", |
8138 | pic_number, (rdata32>>24)&0xff); |
8139 | |
8140 | /* blk22_mv_count */ |
8141 | blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8142 | if (blk22_mv_count == 0) { |
8143 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8144 | "[Picture %d Quality] NO MV Data yet.\n", |
8145 | pic_number); |
8146 | /* reset all counts */ |
8147 | WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8)); |
8148 | return; |
8149 | } |
8150 | /* mvy_L1_count[39:32], mvx_L1_count[39:32], |
8151 | mvy_L0_count[39:32], mvx_L0_count[39:32] */ |
8152 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8153 | /* should all be 0x00 or 0xff */ |
8154 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8155 | "[Picture %d Quality] MV AVG High Bits: 0x%X\n", |
8156 | pic_number, rdata32); |
8157 | |
8158 | mvx_L0_hi = ((rdata32>>0)&0xff); |
8159 | mvy_L0_hi = ((rdata32>>8)&0xff); |
8160 | mvx_L1_hi = ((rdata32>>16)&0xff); |
8161 | mvy_L1_hi = ((rdata32>>24)&0xff); |
8162 | |
8163 | /* mvx_L0_count[31:0] */ |
8164 | rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8165 | temp_value = mvx_L0_hi; |
8166 | temp_value = (temp_value << 32) | rdata32_l; |
8167 | |
8168 | if (mvx_L0_hi & 0x80) |
8169 | value = 0xFFFFFFF000000000 | temp_value; |
8170 | else |
8171 | value = temp_value; |
8172 | |
8173 | value = div_s64(value, blk22_mv_count); |
8174 | |
8175 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8176 | "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n", |
8177 | pic_number, (int)value, |
8178 | value, blk22_mv_count); |
8179 | |
8180 | frame->avg_mv = value; |
8181 | |
8182 | /* mvy_L0_count[31:0] */ |
8183 | rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8184 | temp_value = mvy_L0_hi; |
8185 | temp_value = (temp_value << 32) | rdata32_l; |
8186 | |
8187 | if (mvy_L0_hi & 0x80) |
8188 | value = 0xFFFFFFF000000000 | temp_value; |
8189 | else |
8190 | value = temp_value; |
8191 | |
8192 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8193 | "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n", |
8194 | pic_number, rdata32_l/blk22_mv_count, |
8195 | value, blk22_mv_count); |
8196 | |
8197 | /* mvx_L1_count[31:0] */ |
8198 | rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8199 | temp_value = mvx_L1_hi; |
8200 | temp_value = (temp_value << 32) | rdata32_l; |
8201 | if (mvx_L1_hi & 0x80) |
8202 | value = 0xFFFFFFF000000000 | temp_value; |
8203 | else |
8204 | value = temp_value; |
8205 | |
8206 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8207 | "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n", |
8208 | pic_number, rdata32_l/blk22_mv_count, |
8209 | value, blk22_mv_count); |
8210 | |
8211 | /* mvy_L1_count[31:0] */ |
8212 | rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8213 | temp_value = mvy_L1_hi; |
8214 | temp_value = (temp_value << 32) | rdata32_l; |
8215 | if (mvy_L1_hi & 0x80) |
8216 | value = 0xFFFFFFF000000000 | temp_value; |
8217 | else |
8218 | value = temp_value; |
8219 | |
8220 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8221 | "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n", |
8222 | pic_number, rdata32_l/blk22_mv_count, |
8223 | value, blk22_mv_count); |
8224 | |
8225 | /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */ |
8226 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8227 | mv_hi = (rdata32>>16)&0xffff; |
8228 | if (mv_hi & 0x8000) |
8229 | mv_hi = 0x8000 - mv_hi; |
8230 | |
8231 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8232 | "[Picture %d Quality] MVX_L0 MAX : %d\n", |
8233 | pic_number, mv_hi); |
8234 | |
8235 | frame->max_mv = mv_hi; |
8236 | |
8237 | mv_lo = (rdata32>>0)&0xffff; |
8238 | if (mv_lo & 0x8000) |
8239 | mv_lo = 0x8000 - mv_lo; |
8240 | |
8241 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8242 | "[Picture %d Quality] MVX_L0 MIN : %d\n", |
8243 | pic_number, mv_lo); |
8244 | |
8245 | frame->min_mv = mv_lo; |
8246 | |
8247 | /* {mvy_L0_max, mvy_L0_min} */ |
8248 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8249 | mv_hi = (rdata32>>16)&0xffff; |
8250 | if (mv_hi & 0x8000) |
8251 | mv_hi = 0x8000 - mv_hi; |
8252 | |
8253 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8254 | "[Picture %d Quality] MVY_L0 MAX : %d\n", |
8255 | pic_number, mv_hi); |
8256 | |
8257 | mv_lo = (rdata32>>0)&0xffff; |
8258 | if (mv_lo & 0x8000) |
8259 | mv_lo = 0x8000 - mv_lo; |
8260 | |
8261 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8262 | "[Picture %d Quality] MVY_L0 MIN : %d\n", |
8263 | pic_number, mv_lo); |
8264 | |
8265 | /* {mvx_L1_max, mvx_L1_min} */ |
8266 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8267 | mv_hi = (rdata32>>16)&0xffff; |
8268 | if (mv_hi & 0x8000) |
8269 | mv_hi = 0x8000 - mv_hi; |
8270 | |
8271 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8272 | "[Picture %d Quality] MVX_L1 MAX : %d\n", |
8273 | pic_number, mv_hi); |
8274 | |
8275 | mv_lo = (rdata32>>0)&0xffff; |
8276 | if (mv_lo & 0x8000) |
8277 | mv_lo = 0x8000 - mv_lo; |
8278 | |
8279 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8280 | "[Picture %d Quality] MVX_L1 MIN : %d\n", |
8281 | pic_number, mv_lo); |
8282 | |
8283 | /* {mvy_L1_max, mvy_L1_min} */ |
8284 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA); |
8285 | mv_hi = (rdata32>>16)&0xffff; |
8286 | if (mv_hi & 0x8000) |
8287 | mv_hi = 0x8000 - mv_hi; |
8288 | |
8289 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8290 | "[Picture %d Quality] MVY_L1 MAX : %d\n", |
8291 | pic_number, mv_hi); |
8292 | |
8293 | mv_lo = (rdata32>>0)&0xffff; |
8294 | if (mv_lo & 0x8000) |
8295 | mv_lo = 0x8000 - mv_lo; |
8296 | |
8297 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8298 | "[Picture %d Quality] MVY_L1 MIN : %d\n", |
8299 | pic_number, mv_lo); |
8300 | |
8301 | rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL); |
8302 | |
8303 | vp9_print(pbi, VP9_DEBUG_QOS_INFO, |
8304 | "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n", |
8305 | pic_number, rdata32); |
8306 | |
8307 | /* reset all counts */ |
8308 | WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8)); |
8309 | } |
8310 | } |
8311 | |
8312 | static int vvp9_get_ps_info(struct VP9Decoder_s *pbi, struct aml_vdec_ps_infos *ps) |
8313 | { |
8314 | int dw_mode = v4l_parser_get_double_write_mode(pbi); |
8315 | |
8316 | ps->visible_width = pbi->frame_width / get_double_write_ratio(pbi, dw_mode); |
8317 | ps->visible_height = pbi->frame_height / get_double_write_ratio(pbi, dw_mode); |
8318 | ps->coded_width = ALIGN(pbi->frame_width, 32) / get_double_write_ratio(pbi, dw_mode); |
8319 | ps->coded_height = ALIGN(pbi->frame_height, 32) / get_double_write_ratio(pbi, dw_mode); |
8320 | ps->dpb_size = pbi->used_buf_num; |
8321 | |
8322 | return 0; |
8323 | } |
8324 | |
8325 | |
8326 | static int v4l_res_change(struct VP9Decoder_s *pbi) |
8327 | { |
8328 | struct aml_vcodec_ctx *ctx = |
8329 | (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
8330 | struct VP9_Common_s *const cm = &pbi->common; |
8331 | int ret = 0; |
8332 | |
8333 | if (ctx->param_sets_from_ucode && |
8334 | pbi->res_ch_flag == 0) { |
8335 | struct aml_vdec_ps_infos ps; |
8336 | if ((cm->width != 0 && |
8337 | cm->height != 0) && |
8338 | (pbi->frame_width != cm->width || |
8339 | pbi->frame_height != cm->height)) { |
8340 | |
8341 | vp9_print(pbi, 0, "%s (%d,%d)=>(%d,%d)\r\n", __func__, cm->width, |
8342 | cm->height, pbi->frame_width, pbi->frame_height); |
8343 | vvp9_get_ps_info(pbi, &ps); |
8344 | vdec_v4l_set_ps_infos(ctx, &ps); |
8345 | vdec_v4l_res_ch_event(ctx); |
8346 | pbi->v4l_params_parsed = false; |
8347 | pbi->res_ch_flag = 1; |
8348 | pbi->eos = 1; |
8349 | vp9_bufmgr_postproc(pbi); |
8350 | //del_timer_sync(&pbi->timer); |
8351 | notify_v4l_eos(hw_to_vdec(pbi)); |
8352 | ret = 1; |
8353 | } |
8354 | } |
8355 | |
8356 | return ret; |
8357 | } |
8358 | |
8359 | |
8360 | static irqreturn_t vvp9_isr_thread_fn(int irq, void *data) |
8361 | { |
8362 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)data; |
8363 | unsigned int dec_status = pbi->dec_status; |
8364 | int i; |
8365 | |
8366 | /*if (pbi->wait_buf) |
8367 | * pr_info("set wait_buf to 0\r\n"); |
8368 | */ |
8369 | if (pbi->eos) |
8370 | return IRQ_HANDLED; |
8371 | pbi->wait_buf = 0; |
8372 | #ifdef MULTI_INSTANCE_SUPPORT |
8373 | #ifdef SUPPORT_FB_DECODING |
8374 | #ifdef FB_DECODING_TEST_SCHEDULE |
8375 | if (pbi->s1_test_cmd == TEST_SET_PIC_DONE) |
8376 | dec_status = HEVC_DECPIC_DATA_DONE; |
8377 | else if (pbi->s1_test_cmd == TEST_SET_S2_DONE |
8378 | && dec_status == HEVC_DECPIC_DATA_DONE) |
8379 | dec_status = HEVC_S2_DECODING_DONE; |
8380 | pbi->s1_test_cmd = TEST_SET_NONE; |
8381 | #else |
8382 | /*if (irq != VDEC_IRQ_0) |
8383 | dec_status = HEVC_S2_DECODING_DONE;*/ |
8384 | #endif |
8385 | if (dec_status == HEVC_S2_DECODING_DONE) { |
8386 | pbi->dec_result = DEC_RESULT_DONE; |
8387 | vdec_schedule_work(&pbi->work); |
8388 | #ifdef FB_DECODING_TEST_SCHEDULE |
8389 | amhevc_stop(); |
8390 | pbi->dec_s1_result = DEC_S1_RESULT_DONE; |
8391 | vdec_schedule_work(&pbi->s1_work); |
8392 | #endif |
8393 | } else |
8394 | #endif |
8395 | if ((dec_status == HEVC_NAL_DECODE_DONE) || |
8396 | (dec_status == HEVC_SEARCH_BUFEMPTY) || |
8397 | (dec_status == HEVC_DECODE_BUFEMPTY) |
8398 | ) { |
8399 | if (pbi->m_ins_flag) { |
8400 | reset_process_time(pbi); |
8401 | if (!vdec_frame_based(hw_to_vdec(pbi))) |
8402 | dec_again_process(pbi); |
8403 | else { |
8404 | pbi->dec_result = DEC_RESULT_GET_DATA; |
8405 | vdec_schedule_work(&pbi->work); |
8406 | } |
8407 | } |
8408 | pbi->process_busy = 0; |
8409 | return IRQ_HANDLED; |
8410 | } else if (dec_status == HEVC_DECPIC_DATA_DONE) { |
8411 | if (pbi->m_ins_flag) { |
8412 | get_picture_qos_info(pbi); |
8413 | #ifdef SUPPORT_FB_DECODING |
8414 | if (pbi->used_stage_buf_num > 0) { |
8415 | reset_process_time(pbi); |
8416 | inc_s1_pos(pbi); |
8417 | trigger_schedule(pbi); |
8418 | #ifdef FB_DECODING_TEST_SCHEDULE |
8419 | pbi->s1_test_cmd = TEST_SET_S2_DONE; |
8420 | #else |
8421 | amhevc_stop(); |
8422 | pbi->dec_s1_result = DEC_S1_RESULT_DONE; |
8423 | vdec_schedule_work(&pbi->s1_work); |
8424 | #endif |
8425 | } else |
8426 | #endif |
8427 | { |
8428 | reset_process_time(pbi); |
8429 | if (pbi->vf_pre_count == 0 || pbi->low_latency_flag) |
8430 | vp9_bufmgr_postproc(pbi); |
8431 | |
8432 | pbi->dec_result = DEC_RESULT_DONE; |
8433 | amhevc_stop(); |
8434 | if (mcrcc_cache_alg_flag) |
8435 | dump_hit_rate(pbi); |
8436 | vdec_schedule_work(&pbi->work); |
8437 | } |
8438 | } else { |
8439 | if (pbi->low_latency_flag) { |
8440 | vp9_bufmgr_postproc(pbi); |
8441 | WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE); |
8442 | #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC |
8443 | vdec_profile(hw_to_vdec(pbi), VDEC_PROFILE_EVENT_CB); |
8444 | if (debug & PRINT_FLAG_VDEC_DETAIL) |
8445 | pr_info("%s VP9 frame done \n", __func__); |
8446 | #endif |
8447 | } |
8448 | } |
8449 | |
8450 | pbi->process_busy = 0; |
8451 | return IRQ_HANDLED; |
8452 | } |
8453 | #endif |
8454 | |
8455 | if (dec_status == VP9_EOS) { |
8456 | #ifdef MULTI_INSTANCE_SUPPORT |
8457 | if (pbi->m_ins_flag) |
8458 | reset_process_time(pbi); |
8459 | #endif |
8460 | |
8461 | pr_info("VP9_EOS, flush buffer\r\n"); |
8462 | |
8463 | vp9_bufmgr_postproc(pbi); |
8464 | |
8465 | pr_info("send VP9_10B_DISCARD_NAL\r\n"); |
8466 | WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DISCARD_NAL); |
8467 | pbi->process_busy = 0; |
8468 | #ifdef MULTI_INSTANCE_SUPPORT |
8469 | if (pbi->m_ins_flag) { |
8470 | pbi->dec_result = DEC_RESULT_DONE; |
8471 | amhevc_stop(); |
8472 | vdec_schedule_work(&pbi->work); |
8473 | } |
8474 | #endif |
8475 | return IRQ_HANDLED; |
8476 | } else if (dec_status == HEVC_DECODE_OVER_SIZE) { |
8477 | pr_info("vp9 decode oversize !!\n"); |
8478 | debug |= (VP9_DEBUG_DIS_LOC_ERROR_PROC | |
8479 | VP9_DEBUG_DIS_SYS_ERROR_PROC); |
8480 | pbi->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW; |
8481 | #ifdef MULTI_INSTANCE_SUPPORT |
8482 | if (pbi->m_ins_flag) |
8483 | reset_process_time(pbi); |
8484 | #endif |
8485 | return IRQ_HANDLED; |
8486 | } |
8487 | |
8488 | if (dec_status != VP9_HEAD_PARSER_DONE) { |
8489 | pbi->process_busy = 0; |
8490 | return IRQ_HANDLED; |
8491 | } |
8492 | |
8493 | if (pbi->m_ins_flag && |
8494 | !get_free_buf_count(pbi)) { |
8495 | pbi->run_ready_min_buf_num = pbi->one_package_frame_cnt + 1; |
8496 | pr_err("need buffer, one package frame count = %d\n", pbi->one_package_frame_cnt + 1); |
8497 | pbi->dec_result = DEC_RESULT_NEED_MORE_BUFFER; |
8498 | vdec_schedule_work(&pbi->work); |
8499 | return IRQ_HANDLED; |
8500 | } |
8501 | |
8502 | pbi->one_package_frame_cnt++; |
8503 | |
8504 | #ifdef MULTI_INSTANCE_SUPPORT |
8505 | #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC |
8506 | if (pbi->m_ins_flag ==0 && pbi->low_latency_flag) { |
8507 | vdec_profile(hw_to_vdec(pbi), VDEC_PROFILE_EVENT_RUN); |
8508 | if (debug & PRINT_FLAG_VDEC_DETAIL) |
8509 | pr_info("%s VP9 frame header found \n", __func__); |
8510 | } |
8511 | #endif |
8512 | if (pbi->m_ins_flag) |
8513 | reset_process_time(pbi); |
8514 | #endif |
8515 | if (pbi->process_state != PROC_STATE_SENDAGAIN |
8516 | #ifdef SUPPORT_FB_DECODING |
8517 | && pbi->used_stage_buf_num == 0 |
8518 | #endif |
8519 | ) { |
8520 | if (pbi->mmu_enable) |
8521 | vp9_recycle_mmu_buf_tail(pbi); |
8522 | |
8523 | |
8524 | if (pbi->frame_count > 0) |
8525 | vp9_bufmgr_postproc(pbi); |
8526 | } |
8527 | |
8528 | if (debug & VP9_DEBUG_SEND_PARAM_WITH_REG) { |
8529 | get_rpm_param(&pbi->vp9_param); |
8530 | } else { |
8531 | #ifdef SUPPORT_FB_DECODING |
8532 | if (pbi->used_stage_buf_num > 0) { |
8533 | reset_process_time(pbi); |
8534 | get_s1_buf(pbi); |
8535 | |
8536 | if (get_mv_buf(pbi, |
8537 | &pbi->s1_mv_buf_index, |
8538 | &pbi->s1_mpred_mv_wr_start_addr |
8539 | ) < 0) { |
8540 | vp9_print(pbi, 0, |
8541 | "%s: Error get_mv_buf fail\n", |
8542 | __func__); |
8543 | } |
8544 | |
8545 | if (pbi->s1_buf == NULL) { |
8546 | vp9_print(pbi, 0, |
8547 | "%s: Error get_s1_buf fail\n", |
8548 | __func__); |
8549 | pbi->process_busy = 0; |
8550 | return IRQ_HANDLED; |
8551 | } |
8552 | |
8553 | for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) { |
8554 | int ii; |
8555 | for (ii = 0; ii < 4; ii++) { |
8556 | pbi->s1_buf->rpm[i + 3 - ii] = |
8557 | pbi->rpm_ptr[i + 3 - ii]; |
8558 | pbi->s1_param.l.data[i + ii] = |
8559 | pbi->rpm_ptr[i + 3 - ii]; |
8560 | } |
8561 | } |
8562 | |
8563 | mpred_process(pbi); |
8564 | #ifdef FB_DECODING_TEST_SCHEDULE |
8565 | pbi->dec_s1_result = |
8566 | DEC_S1_RESULT_TEST_TRIGGER_DONE; |
8567 | vdec_schedule_work(&pbi->s1_work); |
8568 | #else |
8569 | WRITE_VREG(HEVC_ASSIST_FB_MMU_MAP_ADDR, |
8570 | pbi->stage_mmu_map_phy_addr + |
8571 | pbi->s1_buf->index * STAGE_MMU_MAP_SIZE); |
8572 | |
8573 | start_s1_decoding(pbi); |
8574 | #endif |
8575 | start_process_time(pbi); |
8576 | pbi->process_busy = 0; |
8577 | return IRQ_HANDLED; |
8578 | } else |
8579 | #endif |
8580 | { |
8581 | for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) { |
8582 | int ii; |
8583 | for (ii = 0; ii < 4; ii++) |
8584 | pbi->vp9_param.l.data[i + ii] = |
8585 | pbi->rpm_ptr[i + 3 - ii]; |
8586 | } |
8587 | } |
8588 | } |
8589 | |
8590 | if (pbi->is_used_v4l) { |
8591 | struct aml_vcodec_ctx *ctx = |
8592 | (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
8593 | |
8594 | pbi->frame_width = pbi->vp9_param.p.width; |
8595 | pbi->frame_height = pbi->vp9_param.p.height; |
8596 | |
8597 | if (!v4l_res_change(pbi)) { |
8598 | if (ctx->param_sets_from_ucode && !pbi->v4l_params_parsed) { |
8599 | struct aml_vdec_ps_infos ps; |
8600 | |
8601 | pr_debug("set ucode parse\n"); |
8602 | vvp9_get_ps_info(pbi, &ps); |
8603 | /*notice the v4l2 codec.*/ |
8604 | vdec_v4l_set_ps_infos(ctx, &ps); |
8605 | pbi->v4l_params_parsed = true; |
8606 | pbi->postproc_done = 0; |
8607 | pbi->process_busy = 0; |
8608 | dec_again_process(pbi); |
8609 | return IRQ_HANDLED; |
8610 | } |
8611 | } else { |
8612 | pbi->postproc_done = 0; |
8613 | pbi->process_busy = 0; |
8614 | dec_again_process(pbi); |
8615 | return IRQ_HANDLED; |
8616 | } |
8617 | } |
8618 | |
8619 | continue_decoding(pbi); |
8620 | pbi->postproc_done = 0; |
8621 | pbi->process_busy = 0; |
8622 | |
8623 | #ifdef MULTI_INSTANCE_SUPPORT |
8624 | if (pbi->m_ins_flag) |
8625 | start_process_time(pbi); |
8626 | #endif |
8627 | |
8628 | return IRQ_HANDLED; |
8629 | } |
8630 | |
8631 | static irqreturn_t vvp9_isr(int irq, void *data) |
8632 | { |
8633 | int i; |
8634 | unsigned int dec_status; |
8635 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)data; |
8636 | unsigned int adapt_prob_status; |
8637 | struct VP9_Common_s *const cm = &pbi->common; |
8638 | uint debug_tag; |
8639 | |
8640 | WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1); |
8641 | |
8642 | dec_status = READ_VREG(HEVC_DEC_STATUS_REG); |
8643 | adapt_prob_status = READ_VREG(VP9_ADAPT_PROB_REG); |
8644 | if (!pbi) |
8645 | return IRQ_HANDLED; |
8646 | if (pbi->init_flag == 0) |
8647 | return IRQ_HANDLED; |
8648 | if (pbi->process_busy)/*on process.*/ |
8649 | return IRQ_HANDLED; |
8650 | pbi->dec_status = dec_status; |
8651 | pbi->process_busy = 1; |
8652 | if (debug & VP9_DEBUG_BUFMGR) |
8653 | pr_info("vp9 isr (%d) dec status = 0x%x, lcu 0x%x shiftbyte 0x%x (%x %x lev %x, wr %x, rd %x)\n", |
8654 | irq, |
8655 | dec_status, READ_VREG(HEVC_PARSER_LCU_START), |
8656 | READ_VREG(HEVC_SHIFT_BYTE_COUNT), |
8657 | READ_VREG(HEVC_STREAM_START_ADDR), |
8658 | READ_VREG(HEVC_STREAM_END_ADDR), |
8659 | READ_VREG(HEVC_STREAM_LEVEL), |
8660 | READ_VREG(HEVC_STREAM_WR_PTR), |
8661 | READ_VREG(HEVC_STREAM_RD_PTR) |
8662 | ); |
8663 | #ifdef SUPPORT_FB_DECODING |
8664 | /*if (irq != VDEC_IRQ_0) |
8665 | return IRQ_WAKE_THREAD;*/ |
8666 | #endif |
8667 | |
8668 | debug_tag = READ_HREG(DEBUG_REG1); |
8669 | if (debug_tag & 0x10000) { |
8670 | pr_info("LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1)); |
8671 | for (i = 0; i < 0x400; i += 4) { |
8672 | int ii; |
8673 | if ((i & 0xf) == 0) |
8674 | pr_info("%03x: ", i); |
8675 | for (ii = 0; ii < 4; ii++) { |
8676 | pr_info("%04x ", |
8677 | pbi->lmem_ptr[i + 3 - ii]); |
8678 | } |
8679 | if (((i + ii) & 0xf) == 0) |
8680 | pr_info("\n"); |
8681 | } |
8682 | |
8683 | if ((udebug_pause_pos == (debug_tag & 0xffff)) && |
8684 | (udebug_pause_decode_idx == 0 || |
8685 | udebug_pause_decode_idx == pbi->slice_idx) && |
8686 | (udebug_pause_val == 0 || |
8687 | udebug_pause_val == READ_HREG(DEBUG_REG2))) |
8688 | pbi->ucode_pause_pos = udebug_pause_pos; |
8689 | else if (debug_tag & 0x20000) |
8690 | pbi->ucode_pause_pos = 0xffffffff; |
8691 | if (pbi->ucode_pause_pos) |
8692 | reset_process_time(pbi); |
8693 | else |
8694 | WRITE_HREG(DEBUG_REG1, 0); |
8695 | } else if (debug_tag != 0) { |
8696 | pr_info( |
8697 | "dbg%x: %x lcu %x\n", READ_HREG(DEBUG_REG1), |
8698 | READ_HREG(DEBUG_REG2), |
8699 | READ_VREG(HEVC_PARSER_LCU_START)); |
8700 | if ((udebug_pause_pos == (debug_tag & 0xffff)) && |
8701 | (udebug_pause_decode_idx == 0 || |
8702 | udebug_pause_decode_idx == pbi->slice_idx) && |
8703 | (udebug_pause_val == 0 || |
8704 | udebug_pause_val == READ_HREG(DEBUG_REG2))) |
8705 | pbi->ucode_pause_pos = udebug_pause_pos; |
8706 | if (pbi->ucode_pause_pos) |
8707 | reset_process_time(pbi); |
8708 | else |
8709 | WRITE_HREG(DEBUG_REG1, 0); |
8710 | pbi->process_busy = 0; |
8711 | return IRQ_HANDLED; |
8712 | } |
8713 | |
8714 | #ifdef MULTI_INSTANCE_SUPPORT |
8715 | if (!pbi->m_ins_flag) { |
8716 | #endif |
8717 | if (pbi->error_flag == 1) { |
8718 | pbi->error_flag = 2; |
8719 | pbi->process_busy = 0; |
8720 | return IRQ_HANDLED; |
8721 | } else if (pbi->error_flag == 3) { |
8722 | pbi->process_busy = 0; |
8723 | return IRQ_HANDLED; |
8724 | } |
8725 | |
8726 | if (get_free_buf_count(pbi) <= 0) { |
8727 | /* |
8728 | if (pbi->wait_buf == 0) |
8729 | pr_info("set wait_buf to 1\r\n"); |
8730 | */ |
8731 | pbi->wait_buf = 1; |
8732 | pbi->process_busy = 0; |
8733 | return IRQ_HANDLED; |
8734 | } |
8735 | #ifdef MULTI_INSTANCE_SUPPORT |
8736 | } |
8737 | #endif |
8738 | if ((adapt_prob_status & 0xff) == 0xfd) { |
8739 | /*VP9_REQ_ADAPT_PROB*/ |
8740 | int pre_fc = (cm->frame_type == KEY_FRAME) ? 1 : 0; |
8741 | uint8_t *prev_prob_b = |
8742 | ((uint8_t *)pbi->prob_buffer_addr) + |
8743 | ((adapt_prob_status >> 8) * 0x1000); |
8744 | uint8_t *cur_prob_b = |
8745 | ((uint8_t *)pbi->prob_buffer_addr) + 0x4000; |
8746 | uint8_t *count_b = (uint8_t *)pbi->count_buffer_addr; |
8747 | #ifdef MULTI_INSTANCE_SUPPORT |
8748 | if (pbi->m_ins_flag) |
8749 | reset_process_time(pbi); |
8750 | #endif |
8751 | adapt_coef_probs(pbi->pic_count, |
8752 | (cm->last_frame_type == KEY_FRAME), |
8753 | pre_fc, (adapt_prob_status >> 8), |
8754 | (unsigned int *)prev_prob_b, |
8755 | (unsigned int *)cur_prob_b, (unsigned int *)count_b); |
8756 | |
8757 | memcpy(prev_prob_b, cur_prob_b, PROB_SIZE); |
8758 | WRITE_VREG(VP9_ADAPT_PROB_REG, 0); |
8759 | pbi->pic_count += 1; |
8760 | #ifdef MULTI_INSTANCE_SUPPORT |
8761 | if (pbi->m_ins_flag) |
8762 | start_process_time(pbi); |
8763 | #endif |
8764 | |
8765 | /*return IRQ_HANDLED;*/ |
8766 | } |
8767 | return IRQ_WAKE_THREAD; |
8768 | } |
8769 | |
8770 | static void vp9_set_clk(struct work_struct *work) |
8771 | { |
8772 | struct VP9Decoder_s *pbi = container_of(work, |
8773 | struct VP9Decoder_s, set_clk_work); |
8774 | int fps = 96000 / pbi->frame_dur; |
8775 | |
8776 | if (hevc_source_changed(VFORMAT_VP9, |
8777 | frame_width, frame_height, fps) > 0) |
8778 | pbi->saved_resolution = frame_width * |
8779 | frame_height * fps; |
8780 | } |
8781 | |
8782 | static void vvp9_put_timer_func(unsigned long arg) |
8783 | { |
8784 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)arg; |
8785 | struct timer_list *timer = &pbi->timer; |
8786 | uint8_t empty_flag; |
8787 | unsigned int buf_level; |
8788 | |
8789 | enum receviver_start_e state = RECEIVER_INACTIVE; |
8790 | |
8791 | if (pbi->m_ins_flag) { |
8792 | if (hw_to_vdec(pbi)->next_status |
8793 | == VDEC_STATUS_DISCONNECTED && |
8794 | !pbi->is_used_v4l) { |
8795 | #ifdef SUPPORT_FB_DECODING |
8796 | if (pbi->run2_busy) |
8797 | return; |
8798 | |
8799 | pbi->dec_s1_result = DEC_S1_RESULT_FORCE_EXIT; |
8800 | vdec_schedule_work(&pbi->s1_work); |
8801 | #endif |
8802 | pbi->dec_result = DEC_RESULT_FORCE_EXIT; |
8803 | vdec_schedule_work(&pbi->work); |
8804 | pr_debug( |
8805 | "vdec requested to be disconnected\n"); |
8806 | return; |
8807 | } |
8808 | } |
8809 | if (pbi->init_flag == 0) { |
8810 | if (pbi->stat & STAT_TIMER_ARM) { |
8811 | timer->expires = jiffies + PUT_INTERVAL; |
8812 | add_timer(&pbi->timer); |
8813 | } |
8814 | return; |
8815 | } |
8816 | if (pbi->m_ins_flag == 0) { |
8817 | if (vf_get_receiver(pbi->provider_name)) { |
8818 | state = |
8819 | vf_notify_receiver(pbi->provider_name, |
8820 | VFRAME_EVENT_PROVIDER_QUREY_STATE, |
8821 | NULL); |
8822 | if ((state == RECEIVER_STATE_NULL) |
8823 | || (state == RECEIVER_STATE_NONE)) |
8824 | state = RECEIVER_INACTIVE; |
8825 | } else |
8826 | state = RECEIVER_INACTIVE; |
8827 | |
8828 | empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1; |
8829 | /* error watchdog */ |
8830 | if (empty_flag == 0) { |
8831 | /* decoder has input */ |
8832 | if ((debug & VP9_DEBUG_DIS_LOC_ERROR_PROC) == 0) { |
8833 | |
8834 | buf_level = READ_VREG(HEVC_STREAM_LEVEL); |
8835 | /* receiver has no buffer to recycle */ |
8836 | if ((state == RECEIVER_INACTIVE) && |
8837 | (kfifo_is_empty(&pbi->display_q) && |
8838 | buf_level > 0x200) |
8839 | ) { |
8840 | WRITE_VREG |
8841 | (HEVC_ASSIST_MBOX0_IRQ_REG, |
8842 | 0x1); |
8843 | } |
8844 | } |
8845 | |
8846 | if ((debug & VP9_DEBUG_DIS_SYS_ERROR_PROC) == 0) { |
8847 | /* receiver has no buffer to recycle */ |
8848 | /*if ((state == RECEIVER_INACTIVE) && |
8849 | * (kfifo_is_empty(&pbi->display_q))) { |
8850 | *pr_info("vp9 something error,need reset\n"); |
8851 | *} |
8852 | */ |
8853 | } |
8854 | } |
8855 | } |
8856 | #ifdef MULTI_INSTANCE_SUPPORT |
8857 | else { |
8858 | if ( |
8859 | (decode_timeout_val > 0) && |
8860 | (pbi->start_process_time > 0) && |
8861 | ((1000 * (jiffies - pbi->start_process_time) / HZ) |
8862 | > decode_timeout_val) |
8863 | ) { |
8864 | int current_lcu_idx = |
8865 | READ_VREG(HEVC_PARSER_LCU_START) |
8866 | & 0xffffff; |
8867 | if (pbi->last_lcu_idx == current_lcu_idx) { |
8868 | if (pbi->decode_timeout_count > 0) |
8869 | pbi->decode_timeout_count--; |
8870 | if (pbi->decode_timeout_count == 0) { |
8871 | if (input_frame_based( |
8872 | hw_to_vdec(pbi)) || |
8873 | (READ_VREG(HEVC_STREAM_LEVEL) > 0x200)) |
8874 | timeout_process(pbi); |
8875 | else { |
8876 | vp9_print(pbi, 0, |
8877 | "timeout & empty, again\n"); |
8878 | dec_again_process(pbi); |
8879 | } |
8880 | } |
8881 | } else { |
8882 | start_process_time(pbi); |
8883 | pbi->last_lcu_idx = current_lcu_idx; |
8884 | } |
8885 | } |
8886 | } |
8887 | #endif |
8888 | |
8889 | if ((pbi->ucode_pause_pos != 0) && |
8890 | (pbi->ucode_pause_pos != 0xffffffff) && |
8891 | udebug_pause_pos != pbi->ucode_pause_pos) { |
8892 | pbi->ucode_pause_pos = 0; |
8893 | WRITE_HREG(DEBUG_REG1, 0); |
8894 | } |
8895 | #ifdef MULTI_INSTANCE_SUPPORT |
8896 | if (debug & VP9_DEBUG_FORCE_SEND_AGAIN) { |
8897 | pr_info( |
8898 | "Force Send Again\r\n"); |
8899 | debug &= ~VP9_DEBUG_FORCE_SEND_AGAIN; |
8900 | reset_process_time(pbi); |
8901 | pbi->dec_result = DEC_RESULT_AGAIN; |
8902 | if (pbi->process_state == |
8903 | PROC_STATE_DECODESLICE) { |
8904 | if (pbi->mmu_enable) |
8905 | vp9_recycle_mmu_buf(pbi); |
8906 | pbi->process_state = |
8907 | PROC_STATE_SENDAGAIN; |
8908 | } |
8909 | amhevc_stop(); |
8910 | |
8911 | vdec_schedule_work(&pbi->work); |
8912 | } |
8913 | |
8914 | if (debug & VP9_DEBUG_DUMP_DATA) { |
8915 | debug &= ~VP9_DEBUG_DUMP_DATA; |
8916 | vp9_print(pbi, 0, |
8917 | "%s: chunk size 0x%x off 0x%x sum 0x%x\n", |
8918 | __func__, |
8919 | pbi->chunk->size, |
8920 | pbi->chunk->offset, |
8921 | get_data_check_sum(pbi, pbi->chunk->size) |
8922 | ); |
8923 | dump_data(pbi, pbi->chunk->size); |
8924 | } |
8925 | #endif |
8926 | if (debug & VP9_DEBUG_DUMP_PIC_LIST) { |
8927 | dump_pic_list(pbi); |
8928 | debug &= ~VP9_DEBUG_DUMP_PIC_LIST; |
8929 | } |
8930 | if (debug & VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC) { |
8931 | WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1); |
8932 | debug &= ~VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC; |
8933 | } |
8934 | /*if (debug & VP9_DEBUG_HW_RESET) { |
8935 | }*/ |
8936 | |
8937 | if (radr != 0) { |
8938 | if (rval != 0) { |
8939 | WRITE_VREG(radr, rval); |
8940 | pr_info("WRITE_VREG(%x,%x)\n", radr, rval); |
8941 | } else |
8942 | pr_info("READ_VREG(%x)=%x\n", radr, READ_VREG(radr)); |
8943 | rval = 0; |
8944 | radr = 0; |
8945 | } |
8946 | if (pop_shorts != 0) { |
8947 | int i; |
8948 | u32 sum = 0; |
8949 | |
8950 | pr_info("pop stream 0x%x shorts\r\n", pop_shorts); |
8951 | for (i = 0; i < pop_shorts; i++) { |
8952 | u32 data = |
8953 | (READ_HREG(HEVC_SHIFTED_DATA) >> 16); |
8954 | WRITE_HREG(HEVC_SHIFT_COMMAND, |
8955 | (1<<7)|16); |
8956 | if ((i & 0xf) == 0) |
8957 | pr_info("%04x:", i); |
8958 | pr_info("%04x ", data); |
8959 | if (((i + 1) & 0xf) == 0) |
8960 | pr_info("\r\n"); |
8961 | sum += data; |
8962 | } |
8963 | pr_info("\r\nsum = %x\r\n", sum); |
8964 | pop_shorts = 0; |
8965 | } |
8966 | if (dbg_cmd != 0) { |
8967 | if (dbg_cmd == 1) { |
8968 | u32 disp_laddr; |
8969 | |
8970 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB && |
8971 | get_double_write_mode(pbi) == 0) { |
8972 | disp_laddr = |
8973 | READ_VCBUS_REG(AFBC_BODY_BADDR) << 4; |
8974 | } else { |
8975 | struct canvas_s cur_canvas; |
8976 | |
8977 | canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0) |
8978 | & 0xff), &cur_canvas); |
8979 | disp_laddr = cur_canvas.addr; |
8980 | } |
8981 | pr_info("current displayed buffer address %x\r\n", |
8982 | disp_laddr); |
8983 | } |
8984 | dbg_cmd = 0; |
8985 | } |
8986 | /*don't changed at start.*/ |
8987 | if (pbi->get_frame_dur && pbi->show_frame_num > 60 && |
8988 | pbi->frame_dur > 0 && pbi->saved_resolution != |
8989 | frame_width * frame_height * |
8990 | (96000 / pbi->frame_dur)) |
8991 | vdec_schedule_work(&pbi->set_clk_work); |
8992 | |
8993 | timer->expires = jiffies + PUT_INTERVAL; |
8994 | add_timer(timer); |
8995 | } |
8996 | |
8997 | |
8998 | int vvp9_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus) |
8999 | { |
9000 | struct VP9Decoder_s *vp9 = |
9001 | (struct VP9Decoder_s *)vdec->private; |
9002 | |
9003 | if (!vp9) |
9004 | return -1; |
9005 | |
9006 | vstatus->frame_width = frame_width; |
9007 | vstatus->frame_height = frame_height; |
9008 | if (vp9->frame_dur != 0) |
9009 | vstatus->frame_rate = 96000 / vp9->frame_dur; |
9010 | else |
9011 | vstatus->frame_rate = -1; |
9012 | vstatus->error_count = 0; |
9013 | vstatus->status = vp9->stat | vp9->fatal_error; |
9014 | vstatus->frame_dur = vp9->frame_dur; |
9015 | vstatus->bit_rate = vp9->gvs->bit_rate; |
9016 | vstatus->frame_data = vp9->gvs->frame_data; |
9017 | vstatus->total_data = vp9->gvs->total_data; |
9018 | vstatus->frame_count = vp9->gvs->frame_count; |
9019 | vstatus->error_frame_count = vp9->gvs->error_frame_count; |
9020 | vstatus->drop_frame_count = vp9->gvs->drop_frame_count; |
9021 | vstatus->total_data = vp9->gvs->total_data; |
9022 | vstatus->samp_cnt = vp9->gvs->samp_cnt; |
9023 | vstatus->offset = vp9->gvs->offset; |
9024 | snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name), |
9025 | "%s", DRIVER_NAME); |
9026 | return 0; |
9027 | } |
9028 | |
9029 | int vvp9_set_isreset(struct vdec_s *vdec, int isreset) |
9030 | { |
9031 | is_reset = isreset; |
9032 | return 0; |
9033 | } |
9034 | |
9035 | #if 0 |
9036 | static void VP9_DECODE_INIT(void) |
9037 | { |
9038 | /* enable vp9 clocks */ |
9039 | WRITE_VREG(DOS_GCLK_EN3, 0xffffffff); |
9040 | /* *************************************************************** */ |
9041 | /* Power ON HEVC */ |
9042 | /* *************************************************************** */ |
9043 | /* Powerup HEVC */ |
9044 | WRITE_VREG(AO_RTI_GEN_PWR_SLEEP0, |
9045 | READ_VREG(AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6))); |
9046 | WRITE_VREG(DOS_MEM_PD_HEVC, 0x0); |
9047 | WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2)); |
9048 | WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2))); |
9049 | /* remove isolations */ |
9050 | WRITE_VREG(AO_RTI_GEN_PWR_ISO0, |
9051 | READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10))); |
9052 | |
9053 | } |
9054 | #endif |
9055 | |
9056 | static void vvp9_prot_init(struct VP9Decoder_s *pbi, u32 mask) |
9057 | { |
9058 | unsigned int data32; |
9059 | /* VP9_DECODE_INIT(); */ |
9060 | vp9_config_work_space_hw(pbi, mask); |
9061 | if (mask & HW_MASK_BACK) |
9062 | init_pic_list_hw(pbi); |
9063 | |
9064 | vp9_init_decoder_hw(pbi, mask); |
9065 | |
9066 | #ifdef VP9_LPF_LVL_UPDATE |
9067 | if (mask & HW_MASK_BACK) |
9068 | vp9_loop_filter_init(pbi); |
9069 | #endif |
9070 | |
9071 | if ((mask & HW_MASK_FRONT) == 0) |
9072 | return; |
9073 | #if 1 |
9074 | if (debug & VP9_DEBUG_BUFMGR_MORE) |
9075 | pr_info("%s\n", __func__); |
9076 | data32 = READ_VREG(HEVC_STREAM_CONTROL); |
9077 | data32 = data32 | |
9078 | (1 << 0)/*stream_fetch_enable*/ |
9079 | ; |
9080 | WRITE_VREG(HEVC_STREAM_CONTROL, data32); |
9081 | |
9082 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) { |
9083 | if (debug & VP9_DEBUG_BUFMGR) |
9084 | pr_info("[test.c] Config STREAM_FIFO_CTL\n"); |
9085 | data32 = READ_VREG(HEVC_STREAM_FIFO_CTL); |
9086 | data32 = data32 | |
9087 | (1 << 29) // stream_fifo_hole |
9088 | ; |
9089 | WRITE_VREG(HEVC_STREAM_FIFO_CTL, data32); |
9090 | } |
9091 | #if 0 |
9092 | data32 = READ_VREG(HEVC_SHIFT_STARTCODE); |
9093 | if (data32 != 0x00000100) { |
9094 | pr_info("vp9 prot init error %d\n", __LINE__); |
9095 | return; |
9096 | } |
9097 | data32 = READ_VREG(HEVC_SHIFT_EMULATECODE); |
9098 | if (data32 != 0x00000300) { |
9099 | pr_info("vp9 prot init error %d\n", __LINE__); |
9100 | return; |
9101 | } |
9102 | WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678); |
9103 | WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0); |
9104 | data32 = READ_VREG(HEVC_SHIFT_STARTCODE); |
9105 | if (data32 != 0x12345678) { |
9106 | pr_info("vp9 prot init error %d\n", __LINE__); |
9107 | return; |
9108 | } |
9109 | data32 = READ_VREG(HEVC_SHIFT_EMULATECODE); |
9110 | if (data32 != 0x9abcdef0) { |
9111 | pr_info("vp9 prot init error %d\n", __LINE__); |
9112 | return; |
9113 | } |
9114 | #endif |
9115 | WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x000000001); |
9116 | WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300); |
9117 | #endif |
9118 | |
9119 | |
9120 | |
9121 | WRITE_VREG(HEVC_WAIT_FLAG, 1); |
9122 | |
9123 | /* WRITE_VREG(HEVC_MPSR, 1); */ |
9124 | |
9125 | /* clear mailbox interrupt */ |
9126 | WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1); |
9127 | |
9128 | /* enable mailbox interrupt */ |
9129 | WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1); |
9130 | |
9131 | /* disable PSCALE for hardware sharing */ |
9132 | WRITE_VREG(HEVC_PSCALE_CTRL, 0); |
9133 | |
9134 | WRITE_VREG(DEBUG_REG1, 0x0); |
9135 | /*check vps/sps/pps/i-slice in ucode*/ |
9136 | WRITE_VREG(NAL_SEARCH_CTL, 0x8); |
9137 | |
9138 | WRITE_VREG(DECODE_STOP_POS, udebug_flag); |
9139 | #ifdef SUPPORT_FB_DECODING |
9140 | #ifndef FB_DECODING_TEST_SCHEDULE |
9141 | if (pbi->used_stage_buf_num > 0) { |
9142 | if (mask & HW_MASK_FRONT) { |
9143 | data32 = READ_VREG( |
9144 | HEVC_ASSIST_HED_FB_W_CTL); |
9145 | data32 = data32 | |
9146 | (1 << 0) /*hed_fb_wr_en*/ |
9147 | ; |
9148 | WRITE_VREG(HEVC_ASSIST_HED_FB_W_CTL, |
9149 | data32); |
9150 | } |
9151 | if (mask & HW_MASK_BACK) { |
9152 | data32 = READ_VREG( |
9153 | HEVC_ASSIST_HED_FB_R_CTL); |
9154 | while (data32 & (1 << 7)) { |
9155 | /*wait finish*/ |
9156 | data32 = READ_VREG( |
9157 | HEVC_ASSIST_HED_FB_R_CTL); |
9158 | } |
9159 | data32 &= (~(0x1 << 0)); |
9160 | /*hed_fb_rd_addr_auto_rd*/ |
9161 | data32 &= (~(0x1 << 1)); |
9162 | /*rd_id = 0, hed_rd_map_auto_halt_num, |
9163 | after wr 2 ready, then start reading*/ |
9164 | data32 |= (0x2 << 16); |
9165 | WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL, |
9166 | data32); |
9167 | |
9168 | data32 |= (0x1 << 11); /*hed_rd_map_auto_halt_en*/ |
9169 | data32 |= (0x1 << 1); /*hed_fb_rd_addr_auto_rd*/ |
9170 | data32 |= (0x1 << 0); /*hed_fb_rd_en*/ |
9171 | WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL, |
9172 | data32); |
9173 | } |
9174 | |
9175 | } |
9176 | #endif |
9177 | #endif |
9178 | } |
9179 | |
9180 | static int vvp9_local_init(struct VP9Decoder_s *pbi) |
9181 | { |
9182 | int i; |
9183 | int ret; |
9184 | int width, height; |
9185 | if (alloc_lf_buf(pbi) < 0) |
9186 | return -1; |
9187 | |
9188 | pbi->gvs = vzalloc(sizeof(struct vdec_info)); |
9189 | if (NULL == pbi->gvs) { |
9190 | pr_info("the struct of vdec status malloc failed.\n"); |
9191 | return -1; |
9192 | } |
9193 | vdec_set_vframe_comm(hw_to_vdec(pbi), DRIVER_NAME); |
9194 | #ifdef DEBUG_PTS |
9195 | pbi->pts_missed = 0; |
9196 | pbi->pts_hit = 0; |
9197 | #endif |
9198 | pbi->new_frame_displayed = 0; |
9199 | pbi->last_put_idx = -1; |
9200 | pbi->saved_resolution = 0; |
9201 | pbi->get_frame_dur = false; |
9202 | on_no_keyframe_skiped = 0; |
9203 | pbi->duration_from_pts_done = 0; |
9204 | pbi->vp9_first_pts_ready = 0; |
9205 | pbi->frame_cnt_window = 0; |
9206 | width = pbi->vvp9_amstream_dec_info.width; |
9207 | height = pbi->vvp9_amstream_dec_info.height; |
9208 | pbi->frame_dur = |
9209 | (pbi->vvp9_amstream_dec_info.rate == |
9210 | 0) ? 3200 : pbi->vvp9_amstream_dec_info.rate; |
9211 | if (width && height) |
9212 | pbi->frame_ar = height * 0x100 / width; |
9213 | /* |
9214 | *TODO:FOR VERSION |
9215 | */ |
9216 | pr_info("vp9: ver (%d,%d) decinfo: %dx%d rate=%d\n", vp9_version, |
9217 | 0, width, height, pbi->frame_dur); |
9218 | |
9219 | if (pbi->frame_dur == 0) |
9220 | pbi->frame_dur = 96000 / 24; |
9221 | |
9222 | INIT_KFIFO(pbi->display_q); |
9223 | INIT_KFIFO(pbi->newframe_q); |
9224 | |
9225 | |
9226 | for (i = 0; i < VF_POOL_SIZE; i++) { |
9227 | const struct vframe_s *vf = &pbi->vfpool[i]; |
9228 | |
9229 | pbi->vfpool[i].index = -1; |
9230 | kfifo_put(&pbi->newframe_q, vf); |
9231 | } |
9232 | |
9233 | |
9234 | ret = vp9_local_init(pbi); |
9235 | |
9236 | if (!pbi->pts_unstable) { |
9237 | pbi->pts_unstable = |
9238 | (pbi->vvp9_amstream_dec_info.rate == 0)?1:0; |
9239 | pr_info("set pts unstable\n"); |
9240 | } |
9241 | |
9242 | return ret; |
9243 | } |
9244 | |
9245 | |
9246 | #ifdef MULTI_INSTANCE_SUPPORT |
9247 | static s32 vvp9_init(struct vdec_s *vdec) |
9248 | { |
9249 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)vdec->private; |
9250 | #else |
9251 | static s32 vvp9_init(struct VP9Decoder_s *pbi) |
9252 | { |
9253 | #endif |
9254 | int ret; |
9255 | int fw_size = 0x1000 * 16; |
9256 | struct firmware_s *fw = NULL; |
9257 | |
9258 | pbi->stat |= STAT_TIMER_INIT; |
9259 | |
9260 | if (vvp9_local_init(pbi) < 0) |
9261 | return -EBUSY; |
9262 | |
9263 | fw = vmalloc(sizeof(struct firmware_s) + fw_size); |
9264 | if (IS_ERR_OR_NULL(fw)) |
9265 | return -ENOMEM; |
9266 | |
9267 | if (get_firmware_data(VIDEO_DEC_VP9_MMU, fw->data) < 0) { |
9268 | pr_err("get firmware fail.\n"); |
9269 | vfree(fw); |
9270 | return -1; |
9271 | } |
9272 | |
9273 | fw->len = fw_size; |
9274 | |
9275 | INIT_WORK(&pbi->set_clk_work, vp9_set_clk); |
9276 | init_timer(&pbi->timer); |
9277 | |
9278 | #ifdef MULTI_INSTANCE_SUPPORT |
9279 | if (pbi->m_ins_flag) { |
9280 | pbi->timer.data = (ulong) pbi; |
9281 | pbi->timer.function = vvp9_put_timer_func; |
9282 | pbi->timer.expires = jiffies + PUT_INTERVAL; |
9283 | |
9284 | /*add_timer(&pbi->timer); |
9285 | |
9286 | pbi->stat |= STAT_TIMER_ARM; |
9287 | pbi->stat |= STAT_ISR_REG;*/ |
9288 | |
9289 | INIT_WORK(&pbi->work, vp9_work); |
9290 | INIT_WORK(&pbi->recycle_mmu_work, vp9_recycle_mmu_work); |
9291 | #ifdef SUPPORT_FB_DECODING |
9292 | if (pbi->used_stage_buf_num > 0) |
9293 | INIT_WORK(&pbi->s1_work, vp9_s1_work); |
9294 | #endif |
9295 | pbi->fw = fw; |
9296 | |
9297 | /* picture list init.*/ |
9298 | pbi->dec_result = DEC_INIT_PICLIST; |
9299 | vdec_schedule_work(&pbi->work); |
9300 | |
9301 | return 0; |
9302 | } |
9303 | #endif |
9304 | amhevc_enable(); |
9305 | |
9306 | init_pic_list(pbi); |
9307 | |
9308 | ret = amhevc_loadmc_ex(VFORMAT_VP9, NULL, fw->data); |
9309 | if (ret < 0) { |
9310 | amhevc_disable(); |
9311 | vfree(fw); |
9312 | pr_err("VP9: the %s fw loading failed, err: %x\n", |
9313 | tee_enabled() ? "TEE" : "local", ret); |
9314 | return -EBUSY; |
9315 | } |
9316 | |
9317 | vfree(fw); |
9318 | |
9319 | pbi->stat |= STAT_MC_LOAD; |
9320 | |
9321 | /* enable AMRISC side protocol */ |
9322 | vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK); |
9323 | |
9324 | if (vdec_request_threaded_irq(VDEC_IRQ_0, |
9325 | vvp9_isr, |
9326 | vvp9_isr_thread_fn, |
9327 | IRQF_ONESHOT,/*run thread on this irq disabled*/ |
9328 | "vvp9-irq", (void *)pbi)) { |
9329 | pr_info("vvp9 irq register error.\n"); |
9330 | amhevc_disable(); |
9331 | return -ENOENT; |
9332 | } |
9333 | |
9334 | pbi->stat |= STAT_ISR_REG; |
9335 | |
9336 | pbi->provider_name = PROVIDER_NAME; |
9337 | #ifdef MULTI_INSTANCE_SUPPORT |
9338 | vf_provider_init(&vvp9_vf_prov, PROVIDER_NAME, |
9339 | &vvp9_vf_provider, pbi); |
9340 | vf_reg_provider(&vvp9_vf_prov); |
9341 | vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL); |
9342 | if (pbi->frame_dur != 0) { |
9343 | if (!is_reset) |
9344 | vf_notify_receiver(pbi->provider_name, |
9345 | VFRAME_EVENT_PROVIDER_FR_HINT, |
9346 | (void *) |
9347 | ((unsigned long)pbi->frame_dur)); |
9348 | } |
9349 | #else |
9350 | vf_provider_init(&vvp9_vf_prov, PROVIDER_NAME, &vvp9_vf_provider, |
9351 | pbi); |
9352 | vf_reg_provider(&vvp9_vf_prov); |
9353 | vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL); |
9354 | if (!is_reset) |
9355 | vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_FR_HINT, |
9356 | (void *)((unsigned long)pbi->frame_dur)); |
9357 | #endif |
9358 | pbi->stat |= STAT_VF_HOOK; |
9359 | |
9360 | pbi->timer.data = (ulong)pbi; |
9361 | pbi->timer.function = vvp9_put_timer_func; |
9362 | pbi->timer.expires = jiffies + PUT_INTERVAL; |
9363 | |
9364 | pbi->stat |= STAT_VDEC_RUN; |
9365 | |
9366 | add_timer(&pbi->timer); |
9367 | |
9368 | pbi->stat |= STAT_TIMER_ARM; |
9369 | |
9370 | amhevc_start(); |
9371 | |
9372 | pbi->init_flag = 1; |
9373 | pbi->process_busy = 0; |
9374 | pr_info("%d, vvp9_init, RP=0x%x\n", |
9375 | __LINE__, READ_VREG(HEVC_STREAM_RD_PTR)); |
9376 | return 0; |
9377 | } |
9378 | |
9379 | static int vmvp9_stop(struct VP9Decoder_s *pbi) |
9380 | { |
9381 | pbi->init_flag = 0; |
9382 | |
9383 | if (pbi->stat & STAT_VDEC_RUN) { |
9384 | amhevc_stop(); |
9385 | pbi->stat &= ~STAT_VDEC_RUN; |
9386 | } |
9387 | if (pbi->stat & STAT_ISR_REG) { |
9388 | vdec_free_irq(VDEC_IRQ_0, (void *)pbi); |
9389 | pbi->stat &= ~STAT_ISR_REG; |
9390 | } |
9391 | if (pbi->stat & STAT_TIMER_ARM) { |
9392 | del_timer_sync(&pbi->timer); |
9393 | pbi->stat &= ~STAT_TIMER_ARM; |
9394 | } |
9395 | |
9396 | if (pbi->stat & STAT_VF_HOOK) { |
9397 | if (!is_reset) |
9398 | vf_notify_receiver(pbi->provider_name, |
9399 | VFRAME_EVENT_PROVIDER_FR_END_HINT, |
9400 | NULL); |
9401 | |
9402 | vf_unreg_provider(&vvp9_vf_prov); |
9403 | pbi->stat &= ~STAT_VF_HOOK; |
9404 | } |
9405 | vp9_local_uninit(pbi); |
9406 | reset_process_time(pbi); |
9407 | cancel_work_sync(&pbi->work); |
9408 | cancel_work_sync(&pbi->recycle_mmu_work); |
9409 | #ifdef SUPPORT_FB_DECODING |
9410 | if (pbi->used_stage_buf_num > 0) |
9411 | cancel_work_sync(&pbi->s1_work); |
9412 | #endif |
9413 | cancel_work_sync(&pbi->set_clk_work); |
9414 | uninit_mmu_buffers(pbi); |
9415 | if (pbi->fw) |
9416 | vfree(pbi->fw); |
9417 | pbi->fw = NULL; |
9418 | return 0; |
9419 | } |
9420 | |
9421 | static int vvp9_stop(struct VP9Decoder_s *pbi) |
9422 | { |
9423 | |
9424 | pbi->init_flag = 0; |
9425 | pbi->first_sc_checked = 0; |
9426 | if (pbi->stat & STAT_VDEC_RUN) { |
9427 | amhevc_stop(); |
9428 | pbi->stat &= ~STAT_VDEC_RUN; |
9429 | } |
9430 | |
9431 | if (pbi->stat & STAT_ISR_REG) { |
9432 | #ifdef MULTI_INSTANCE_SUPPORT |
9433 | if (!pbi->m_ins_flag) |
9434 | #endif |
9435 | WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0); |
9436 | vdec_free_irq(VDEC_IRQ_0, (void *)pbi); |
9437 | pbi->stat &= ~STAT_ISR_REG; |
9438 | } |
9439 | |
9440 | if (pbi->stat & STAT_TIMER_ARM) { |
9441 | del_timer_sync(&pbi->timer); |
9442 | pbi->stat &= ~STAT_TIMER_ARM; |
9443 | } |
9444 | |
9445 | if (pbi->stat & STAT_VF_HOOK) { |
9446 | if (!is_reset) |
9447 | vf_notify_receiver(pbi->provider_name, |
9448 | VFRAME_EVENT_PROVIDER_FR_END_HINT, |
9449 | NULL); |
9450 | |
9451 | vf_unreg_provider(&vvp9_vf_prov); |
9452 | pbi->stat &= ~STAT_VF_HOOK; |
9453 | } |
9454 | vp9_local_uninit(pbi); |
9455 | |
9456 | cancel_work_sync(&pbi->set_clk_work); |
9457 | #ifdef MULTI_INSTANCE_SUPPORT |
9458 | if (pbi->m_ins_flag) { |
9459 | #ifdef SUPPORT_FB_DECODING |
9460 | if (pbi->used_stage_buf_num > 0) |
9461 | cancel_work_sync(&pbi->s1_work); |
9462 | #endif |
9463 | cancel_work_sync(&pbi->work); |
9464 | cancel_work_sync(&pbi->recycle_mmu_work); |
9465 | } else |
9466 | amhevc_disable(); |
9467 | #else |
9468 | amhevc_disable(); |
9469 | #endif |
9470 | uninit_mmu_buffers(pbi); |
9471 | |
9472 | vfree(pbi->fw); |
9473 | pbi->fw = NULL; |
9474 | return 0; |
9475 | } |
9476 | static int amvdec_vp9_mmu_init(struct VP9Decoder_s *pbi) |
9477 | { |
9478 | int tvp_flag = vdec_secure(hw_to_vdec(pbi)) ? |
9479 | CODEC_MM_FLAGS_TVP : 0; |
9480 | int buf_size = 48; |
9481 | |
9482 | if ((pbi->max_pic_w * pbi->max_pic_h > 1280*736) && |
9483 | (pbi->max_pic_w * pbi->max_pic_h <= 1920*1088)) { |
9484 | buf_size = 12; |
9485 | } else if ((pbi->max_pic_w * pbi->max_pic_h > 0) && |
9486 | (pbi->max_pic_w * pbi->max_pic_h <= 1280*736)) { |
9487 | buf_size = 4; |
9488 | } |
9489 | pbi->need_cache_size = buf_size * SZ_1M; |
9490 | pbi->sc_start_time = get_jiffies_64(); |
9491 | if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) { |
9492 | pbi->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME, |
9493 | pbi->index, FRAME_BUFFERS, |
9494 | pbi->need_cache_size, |
9495 | tvp_flag |
9496 | ); |
9497 | if (!pbi->mmu_box) { |
9498 | pr_err("vp9 alloc mmu box failed!!\n"); |
9499 | return -1; |
9500 | } |
9501 | } |
9502 | pbi->bmmu_box = decoder_bmmu_box_alloc_box( |
9503 | DRIVER_NAME, |
9504 | pbi->index, |
9505 | MAX_BMMU_BUFFER_NUM, |
9506 | 4 + PAGE_SHIFT, |
9507 | CODEC_MM_FLAGS_CMA_CLEAR | |
9508 | CODEC_MM_FLAGS_FOR_VDECODER | |
9509 | tvp_flag); |
9510 | if (!pbi->bmmu_box) { |
9511 | pr_err("vp9 alloc bmmu box failed!!\n"); |
9512 | return -1; |
9513 | } |
9514 | return 0; |
9515 | } |
9516 | |
9517 | static struct VP9Decoder_s *gHevc; |
9518 | |
9519 | static int amvdec_vp9_probe(struct platform_device *pdev) |
9520 | { |
9521 | struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data; |
9522 | struct BUF_s BUF[MAX_BUF_NUM]; |
9523 | struct VP9Decoder_s *pbi; |
9524 | int ret; |
9525 | #ifndef MULTI_INSTANCE_SUPPORT |
9526 | int i; |
9527 | #endif |
9528 | pr_debug("%s\n", __func__); |
9529 | |
9530 | mutex_lock(&vvp9_mutex); |
9531 | pbi = vmalloc(sizeof(struct VP9Decoder_s)); |
9532 | if (pbi == NULL) { |
9533 | pr_info("\namvdec_vp9 device data allocation failed\n"); |
9534 | mutex_unlock(&vvp9_mutex); |
9535 | return -ENOMEM; |
9536 | } |
9537 | |
9538 | gHevc = pbi; |
9539 | memcpy(&BUF[0], &pbi->m_BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM); |
9540 | memset(pbi, 0, sizeof(struct VP9Decoder_s)); |
9541 | memcpy(&pbi->m_BUF[0], &BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM); |
9542 | |
9543 | pbi->init_flag = 0; |
9544 | pbi->first_sc_checked= 0; |
9545 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
9546 | vp9_max_pic_w = 8192; |
9547 | vp9_max_pic_h = 4608; |
9548 | } |
9549 | pbi->max_pic_w = vp9_max_pic_w; |
9550 | pbi->max_pic_h = vp9_max_pic_h; |
9551 | |
9552 | #ifdef MULTI_INSTANCE_SUPPORT |
9553 | pbi->eos = 0; |
9554 | pbi->start_process_time = 0; |
9555 | pbi->timeout_num = 0; |
9556 | #endif |
9557 | pbi->fatal_error = 0; |
9558 | pbi->show_frame_num = 0; |
9559 | if (pdata == NULL) { |
9560 | pr_info("\namvdec_vp9 memory resource undefined.\n"); |
9561 | vfree(pbi); |
9562 | mutex_unlock(&vvp9_mutex); |
9563 | return -EFAULT; |
9564 | } |
9565 | pbi->m_ins_flag = 0; |
9566 | #ifdef MULTI_INSTANCE_SUPPORT |
9567 | pbi->platform_dev = pdev; |
9568 | platform_set_drvdata(pdev, pdata); |
9569 | #endif |
9570 | pbi->double_write_mode = double_write_mode; |
9571 | pbi->mmu_enable = 1; |
9572 | if (amvdec_vp9_mmu_init(pbi) < 0) { |
9573 | vfree(pbi); |
9574 | mutex_unlock(&vvp9_mutex); |
9575 | pr_err("vp9 alloc bmmu box failed!!\n"); |
9576 | return -1; |
9577 | } |
9578 | |
9579 | ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box, WORK_SPACE_BUF_ID, |
9580 | work_buf_size, DRIVER_NAME, &pdata->mem_start); |
9581 | if (ret < 0) { |
9582 | uninit_mmu_buffers(pbi); |
9583 | vfree(pbi); |
9584 | mutex_unlock(&vvp9_mutex); |
9585 | return ret; |
9586 | } |
9587 | pbi->buf_size = work_buf_size; |
9588 | |
9589 | #ifdef MULTI_INSTANCE_SUPPORT |
9590 | pbi->buf_start = pdata->mem_start; |
9591 | #else |
9592 | if (!pbi->mmu_enable) |
9593 | pbi->mc_buf_spec.buf_end = pdata->mem_start + pbi->buf_size; |
9594 | |
9595 | for (i = 0; i < WORK_BUF_SPEC_NUM; i++) |
9596 | amvvp9_workbuff_spec[i].start_adr = pdata->mem_start; |
9597 | #endif |
9598 | |
9599 | |
9600 | if (debug) { |
9601 | pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n", |
9602 | pdata->mem_start, pbi->buf_size); |
9603 | } |
9604 | |
9605 | if (pdata->sys_info) |
9606 | pbi->vvp9_amstream_dec_info = *pdata->sys_info; |
9607 | else { |
9608 | pbi->vvp9_amstream_dec_info.width = 0; |
9609 | pbi->vvp9_amstream_dec_info.height = 0; |
9610 | pbi->vvp9_amstream_dec_info.rate = 30; |
9611 | } |
9612 | pbi->no_head = no_head; |
9613 | #ifdef MULTI_INSTANCE_SUPPORT |
9614 | pbi->cma_dev = pdata->cma_dev; |
9615 | #else |
9616 | cma_dev = pdata->cma_dev; |
9617 | #endif |
9618 | |
9619 | #ifdef MULTI_INSTANCE_SUPPORT |
9620 | pdata->private = pbi; |
9621 | pdata->dec_status = vvp9_dec_status; |
9622 | pdata->set_isreset = vvp9_set_isreset; |
9623 | is_reset = 0; |
9624 | if (vvp9_init(pdata) < 0) { |
9625 | #else |
9626 | if (vvp9_init(pbi) < 0) { |
9627 | #endif |
9628 | pr_info("\namvdec_vp9 init failed.\n"); |
9629 | vp9_local_uninit(pbi); |
9630 | uninit_mmu_buffers(pbi); |
9631 | vfree(pbi); |
9632 | pdata->dec_status = NULL; |
9633 | mutex_unlock(&vvp9_mutex); |
9634 | return -ENODEV; |
9635 | } |
9636 | /*set the max clk for smooth playing...*/ |
9637 | hevc_source_changed(VFORMAT_VP9, |
9638 | 4096, 2048, 60); |
9639 | mutex_unlock(&vvp9_mutex); |
9640 | |
9641 | return 0; |
9642 | } |
9643 | |
9644 | static void vdec_fence_release(struct VP9Decoder_s *pbi, |
9645 | struct vdec_sync *sync) |
9646 | { |
9647 | ulong expires; |
9648 | int i; |
9649 | |
9650 | /* notify signal to wake up all fences. */ |
9651 | vdec_timeline_increase(sync, VF_POOL_SIZE); |
9652 | |
9653 | expires = jiffies + msecs_to_jiffies(2000); |
9654 | while (!check_objs_all_signaled(sync)) { |
9655 | if (time_after(jiffies, expires)) { |
9656 | pr_err("wait fence signaled timeout.\n"); |
9657 | break; |
9658 | } |
9659 | } |
9660 | |
9661 | for (i = 0; i < VF_POOL_SIZE; i++) { |
9662 | struct vframe_s *vf = &pbi->vfpool[i]; |
9663 | |
9664 | if (vf->fence) { |
9665 | vdec_fence_put(vf->fence); |
9666 | vf->fence = NULL; |
9667 | } |
9668 | } |
9669 | |
9670 | /* decreases refcnt of timeline. */ |
9671 | vdec_timeline_put(sync); |
9672 | } |
9673 | |
9674 | static int amvdec_vp9_remove(struct platform_device *pdev) |
9675 | { |
9676 | struct VP9Decoder_s *pbi = gHevc; |
9677 | struct vdec_s *vdec = hw_to_vdec(pbi); |
9678 | int i; |
9679 | |
9680 | if (debug) |
9681 | pr_info("amvdec_vp9_remove\n"); |
9682 | |
9683 | mutex_lock(&vvp9_mutex); |
9684 | |
9685 | vvp9_stop(pbi); |
9686 | |
9687 | hevc_source_changed(VFORMAT_VP9, 0, 0, 0); |
9688 | |
9689 | if (vdec->parallel_dec == 1) { |
9690 | for (i = 0; i < FRAME_BUFFERS; i++) { |
9691 | vdec->free_canvas_ex(pbi->common.buffer_pool-> |
9692 | frame_bufs[i].buf.y_canvas_index, vdec->id); |
9693 | vdec->free_canvas_ex(pbi->common.buffer_pool-> |
9694 | frame_bufs[i].buf.uv_canvas_index, vdec->id); |
9695 | } |
9696 | } |
9697 | |
9698 | #ifdef DEBUG_PTS |
9699 | pr_info("pts missed %ld, pts hit %ld, duration %d\n", |
9700 | pbi->pts_missed, pbi->pts_hit, pbi->frame_dur); |
9701 | #endif |
9702 | mem_map_mode = 0; |
9703 | |
9704 | if (pbi->enable_fence) |
9705 | vdec_fence_release(pbi, &vdec->sync); |
9706 | |
9707 | vfree(pbi); |
9708 | mutex_unlock(&vvp9_mutex); |
9709 | |
9710 | return 0; |
9711 | } |
9712 | |
9713 | /****************************************/ |
9714 | #ifdef CONFIG_PM |
9715 | static int vp9_suspend(struct device *dev) |
9716 | { |
9717 | amhevc_suspend(to_platform_device(dev), dev->power.power_state); |
9718 | return 0; |
9719 | } |
9720 | |
9721 | static int vp9_resume(struct device *dev) |
9722 | { |
9723 | amhevc_resume(to_platform_device(dev)); |
9724 | return 0; |
9725 | } |
9726 | |
9727 | static const struct dev_pm_ops vp9_pm_ops = { |
9728 | SET_SYSTEM_SLEEP_PM_OPS(vp9_suspend, vp9_resume) |
9729 | }; |
9730 | #endif |
9731 | |
9732 | static struct platform_driver amvdec_vp9_driver = { |
9733 | .probe = amvdec_vp9_probe, |
9734 | .remove = amvdec_vp9_remove, |
9735 | .driver = { |
9736 | .name = DRIVER_NAME, |
9737 | #ifdef CONFIG_PM |
9738 | .pm = &vp9_pm_ops, |
9739 | #endif |
9740 | } |
9741 | }; |
9742 | |
9743 | static struct codec_profile_t amvdec_vp9_profile = { |
9744 | .name = "vp9", |
9745 | .profile = "" |
9746 | }; |
9747 | |
9748 | static struct codec_profile_t amvdec_vp9_profile_mult; |
9749 | |
9750 | static unsigned char get_data_check_sum |
9751 | (struct VP9Decoder_s *pbi, int size) |
9752 | { |
9753 | int jj; |
9754 | int sum = 0; |
9755 | u8 *data = NULL; |
9756 | |
9757 | if (!pbi->chunk->block->is_mapped) |
9758 | data = codec_mm_vmap(pbi->chunk->block->start + |
9759 | pbi->chunk->offset, size); |
9760 | else |
9761 | data = ((u8 *)pbi->chunk->block->start_virt) + |
9762 | pbi->chunk->offset; |
9763 | |
9764 | for (jj = 0; jj < size; jj++) |
9765 | sum += data[jj]; |
9766 | |
9767 | if (!pbi->chunk->block->is_mapped) |
9768 | codec_mm_unmap_phyaddr(data); |
9769 | return sum; |
9770 | } |
9771 | |
9772 | static void dump_data(struct VP9Decoder_s *pbi, int size) |
9773 | { |
9774 | int jj; |
9775 | u8 *data = NULL; |
9776 | int padding_size = pbi->chunk->offset & |
9777 | (VDEC_FIFO_ALIGN - 1); |
9778 | |
9779 | if (!pbi->chunk->block->is_mapped) |
9780 | data = codec_mm_vmap(pbi->chunk->block->start + |
9781 | pbi->chunk->offset, size); |
9782 | else |
9783 | data = ((u8 *)pbi->chunk->block->start_virt) + |
9784 | pbi->chunk->offset; |
9785 | |
9786 | vp9_print(pbi, 0, "padding: "); |
9787 | for (jj = padding_size; jj > 0; jj--) |
9788 | vp9_print_cont(pbi, |
9789 | 0, |
9790 | "%02x ", *(data - jj)); |
9791 | vp9_print_cont(pbi, 0, "data adr %p\n", |
9792 | data); |
9793 | |
9794 | for (jj = 0; jj < size; jj++) { |
9795 | if ((jj & 0xf) == 0) |
9796 | vp9_print(pbi, |
9797 | 0, |
9798 | "%06x:", jj); |
9799 | vp9_print_cont(pbi, |
9800 | 0, |
9801 | "%02x ", data[jj]); |
9802 | if (((jj + 1) & 0xf) == 0) |
9803 | vp9_print(pbi, |
9804 | 0, |
9805 | "\n"); |
9806 | } |
9807 | vp9_print(pbi, |
9808 | 0, |
9809 | "\n"); |
9810 | |
9811 | if (!pbi->chunk->block->is_mapped) |
9812 | codec_mm_unmap_phyaddr(data); |
9813 | } |
9814 | |
9815 | static void vp9_work(struct work_struct *work) |
9816 | { |
9817 | struct VP9Decoder_s *pbi = container_of(work, |
9818 | struct VP9Decoder_s, work); |
9819 | struct vdec_s *vdec = hw_to_vdec(pbi); |
9820 | /* finished decoding one frame or error, |
9821 | * notify vdec core to switch context |
9822 | */ |
9823 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
9824 | "%s dec_result %d %x %x %x\n", |
9825 | __func__, |
9826 | pbi->dec_result, |
9827 | READ_VREG(HEVC_STREAM_LEVEL), |
9828 | READ_VREG(HEVC_STREAM_WR_PTR), |
9829 | READ_VREG(HEVC_STREAM_RD_PTR)); |
9830 | |
9831 | if (pbi->dec_result == DEC_INIT_PICLIST) { |
9832 | init_pic_list(pbi); |
9833 | pbi->pic_list_init_done = true; |
9834 | return; |
9835 | } |
9836 | |
9837 | if (pbi->dec_result == DEC_RESULT_NEED_MORE_BUFFER) { |
9838 | reset_process_time(pbi); |
9839 | if (!get_free_buf_count(pbi)) { |
9840 | pbi->dec_result = DEC_RESULT_NEED_MORE_BUFFER; |
9841 | vdec_schedule_work(&pbi->work); |
9842 | } else { |
9843 | int i; |
9844 | |
9845 | if (pbi->mmu_enable) |
9846 | vp9_recycle_mmu_buf_tail(pbi); |
9847 | |
9848 | if (pbi->frame_count > 0) |
9849 | vp9_bufmgr_postproc(pbi); |
9850 | |
9851 | for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) { |
9852 | int ii; |
9853 | for (ii = 0; ii < 4; ii++) |
9854 | pbi->vp9_param.l.data[i + ii] = |
9855 | pbi->rpm_ptr[i + 3 - ii]; |
9856 | } |
9857 | continue_decoding(pbi); |
9858 | pbi->postproc_done = 0; |
9859 | pbi->process_busy = 0; |
9860 | |
9861 | start_process_time(pbi); |
9862 | } |
9863 | return; |
9864 | } |
9865 | |
9866 | if (((pbi->dec_result == DEC_RESULT_GET_DATA) || |
9867 | (pbi->dec_result == DEC_RESULT_GET_DATA_RETRY)) |
9868 | && (hw_to_vdec(pbi)->next_status != |
9869 | VDEC_STATUS_DISCONNECTED)) { |
9870 | if (!vdec_has_more_input(vdec)) { |
9871 | pbi->dec_result = DEC_RESULT_EOS; |
9872 | vdec_schedule_work(&pbi->work); |
9873 | return; |
9874 | } |
9875 | |
9876 | if (pbi->dec_result == DEC_RESULT_GET_DATA) { |
9877 | vp9_print(pbi, PRINT_FLAG_VDEC_STATUS, |
9878 | "%s DEC_RESULT_GET_DATA %x %x %x\n", |
9879 | __func__, |
9880 | READ_VREG(HEVC_STREAM_LEVEL), |
9881 | READ_VREG(HEVC_STREAM_WR_PTR), |
9882 | READ_VREG(HEVC_STREAM_RD_PTR)); |
9883 | vdec_vframe_dirty(vdec, pbi->chunk); |
9884 | vdec_clean_input(vdec); |
9885 | } |
9886 | |
9887 | if (get_free_buf_count(pbi) >= |
9888 | pbi->run_ready_min_buf_num) { |
9889 | int r; |
9890 | int decode_size; |
9891 | r = vdec_prepare_input(vdec, &pbi->chunk); |
9892 | if (r < 0) { |
9893 | pbi->dec_result = DEC_RESULT_GET_DATA_RETRY; |
9894 | |
9895 | vp9_print(pbi, |
9896 | PRINT_FLAG_VDEC_DETAIL, |
9897 | "amvdec_vh265: Insufficient data\n"); |
9898 | |
9899 | vdec_schedule_work(&pbi->work); |
9900 | return; |
9901 | } |
9902 | pbi->dec_result = DEC_RESULT_NONE; |
9903 | vp9_print(pbi, PRINT_FLAG_VDEC_STATUS, |
9904 | "%s: chunk size 0x%x sum 0x%x\n", |
9905 | __func__, r, |
9906 | (debug & PRINT_FLAG_VDEC_STATUS) ? |
9907 | get_data_check_sum(pbi, r) : 0 |
9908 | ); |
9909 | |
9910 | if (debug & PRINT_FLAG_VDEC_DATA) |
9911 | dump_data(pbi, pbi->chunk->size); |
9912 | |
9913 | decode_size = pbi->chunk->size + |
9914 | (pbi->chunk->offset & (VDEC_FIFO_ALIGN - 1)); |
9915 | |
9916 | WRITE_VREG(HEVC_DECODE_SIZE, |
9917 | READ_VREG(HEVC_DECODE_SIZE) + decode_size); |
9918 | |
9919 | vdec_enable_input(vdec); |
9920 | |
9921 | WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE); |
9922 | |
9923 | start_process_time(pbi); |
9924 | |
9925 | } else{ |
9926 | pbi->dec_result = DEC_RESULT_GET_DATA_RETRY; |
9927 | |
9928 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
9929 | "amvdec_vh265: Insufficient data\n"); |
9930 | |
9931 | vdec_schedule_work(&pbi->work); |
9932 | } |
9933 | return; |
9934 | } else if (pbi->dec_result == DEC_RESULT_DONE) { |
9935 | #ifdef SUPPORT_FB_DECODING |
9936 | if (pbi->used_stage_buf_num > 0) { |
9937 | #ifndef FB_DECODING_TEST_SCHEDULE |
9938 | if (!is_s2_decoding_finished(pbi)) { |
9939 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
9940 | "s2 decoding not done, check again later\n"); |
9941 | vdec_schedule_work(&pbi->work); |
9942 | } |
9943 | #endif |
9944 | inc_s2_pos(pbi); |
9945 | if (mcrcc_cache_alg_flag) |
9946 | dump_hit_rate(pbi); |
9947 | } |
9948 | #endif |
9949 | /* if (!pbi->ctx_valid) |
9950 | pbi->ctx_valid = 1; */ |
9951 | pbi->slice_idx++; |
9952 | pbi->frame_count++; |
9953 | pbi->process_state = PROC_STATE_INIT; |
9954 | decode_frame_count[pbi->index] = pbi->frame_count; |
9955 | |
9956 | if (pbi->mmu_enable) |
9957 | pbi->used_4k_num = |
9958 | (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16); |
9959 | vp9_print(pbi, PRINT_FLAG_VDEC_STATUS, |
9960 | "%s (===> %d) dec_result %d %x %x %x shiftbytes 0x%x decbytes 0x%x\n", |
9961 | __func__, |
9962 | pbi->frame_count, |
9963 | pbi->dec_result, |
9964 | READ_VREG(HEVC_STREAM_LEVEL), |
9965 | READ_VREG(HEVC_STREAM_WR_PTR), |
9966 | READ_VREG(HEVC_STREAM_RD_PTR), |
9967 | READ_VREG(HEVC_SHIFT_BYTE_COUNT), |
9968 | READ_VREG(HEVC_SHIFT_BYTE_COUNT) - |
9969 | pbi->start_shift_bytes |
9970 | ); |
9971 | vdec_vframe_dirty(hw_to_vdec(pbi), pbi->chunk); |
9972 | } else if (pbi->dec_result == DEC_RESULT_AGAIN) { |
9973 | /* |
9974 | stream base: stream buf empty or timeout |
9975 | frame base: vdec_prepare_input fail |
9976 | */ |
9977 | if (!vdec_has_more_input(vdec)) { |
9978 | pbi->dec_result = DEC_RESULT_EOS; |
9979 | vdec_schedule_work(&pbi->work); |
9980 | return; |
9981 | } |
9982 | } else if (pbi->dec_result == DEC_RESULT_EOS) { |
9983 | vp9_print(pbi, PRINT_FLAG_VDEC_STATUS, |
9984 | "%s: end of stream\n", |
9985 | __func__); |
9986 | pbi->eos = 1; |
9987 | vp9_bufmgr_postproc(pbi); |
9988 | |
9989 | notify_v4l_eos(hw_to_vdec(pbi)); |
9990 | |
9991 | vdec_vframe_dirty(hw_to_vdec(pbi), pbi->chunk); |
9992 | } else if (pbi->dec_result == DEC_RESULT_FORCE_EXIT) { |
9993 | vp9_print(pbi, PRINT_FLAG_VDEC_STATUS, |
9994 | "%s: force exit\n", |
9995 | __func__); |
9996 | if (pbi->stat & STAT_VDEC_RUN) { |
9997 | amhevc_stop(); |
9998 | pbi->stat &= ~STAT_VDEC_RUN; |
9999 | } |
10000 | |
10001 | if (pbi->stat & STAT_ISR_REG) { |
10002 | #ifdef MULTI_INSTANCE_SUPPORT |
10003 | if (!pbi->m_ins_flag) |
10004 | #endif |
10005 | WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0); |
10006 | vdec_free_irq(VDEC_IRQ_0, (void *)pbi); |
10007 | pbi->stat &= ~STAT_ISR_REG; |
10008 | } |
10009 | } |
10010 | if (pbi->stat & STAT_VDEC_RUN) { |
10011 | amhevc_stop(); |
10012 | pbi->stat &= ~STAT_VDEC_RUN; |
10013 | } |
10014 | |
10015 | if (pbi->stat & STAT_TIMER_ARM) { |
10016 | del_timer_sync(&pbi->timer); |
10017 | pbi->stat &= ~STAT_TIMER_ARM; |
10018 | } |
10019 | /* mark itself has all HW resource released and input released */ |
10020 | #ifdef SUPPORT_FB_DECODING |
10021 | if (pbi->used_stage_buf_num > 0) |
10022 | vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_HEVC_BACK); |
10023 | else |
10024 | vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_VDEC_1 |
10025 | | CORE_MASK_HEVC |
10026 | | CORE_MASK_HEVC_FRONT |
10027 | | CORE_MASK_HEVC_BACK |
10028 | ); |
10029 | #else |
10030 | if (vdec->parallel_dec == 1) |
10031 | vdec_core_finish_run(vdec, CORE_MASK_HEVC); |
10032 | else |
10033 | vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_VDEC_1 |
10034 | | CORE_MASK_HEVC); |
10035 | #endif |
10036 | trigger_schedule(pbi); |
10037 | } |
10038 | |
10039 | static int vp9_hw_ctx_restore(struct VP9Decoder_s *pbi) |
10040 | { |
10041 | /* new to do ... */ |
10042 | #if (!defined SUPPORT_FB_DECODING) |
10043 | vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK); |
10044 | #elif (defined FB_DECODING_TEST_SCHEDULE) |
10045 | vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK); |
10046 | #else |
10047 | if (pbi->used_stage_buf_num > 0) |
10048 | vvp9_prot_init(pbi, HW_MASK_FRONT); |
10049 | else |
10050 | vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK); |
10051 | #endif |
10052 | return 0; |
10053 | } |
10054 | static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask) |
10055 | { |
10056 | struct VP9Decoder_s *pbi = |
10057 | (struct VP9Decoder_s *)vdec->private; |
10058 | int tvp = vdec_secure(hw_to_vdec(pbi)) ? |
10059 | CODEC_MM_FLAGS_TVP : 0; |
10060 | unsigned long ret = 0; |
10061 | |
10062 | if (!(pbi->pic_list_init_done && pbi->pic_list_init_done2) || pbi->eos) |
10063 | return ret; |
10064 | if (!pbi->first_sc_checked && pbi->mmu_enable) { |
10065 | int size = decoder_mmu_box_sc_check(pbi->mmu_box, tvp); |
10066 | pbi->first_sc_checked = 1; |
10067 | vp9_print(pbi, 0, "vp9 cached=%d need_size=%d speed= %d ms\n", |
10068 | size, (pbi->need_cache_size >> PAGE_SHIFT), |
10069 | (int)(get_jiffies_64() - pbi->sc_start_time) * 1000/HZ); |
10070 | } |
10071 | |
10072 | #ifdef SUPPORT_FB_DECODING |
10073 | if (pbi->used_stage_buf_num > 0) { |
10074 | if (mask & CORE_MASK_HEVC_FRONT) { |
10075 | if (get_free_stage_buf_num(pbi) > 0 |
10076 | && mv_buf_available(pbi)) |
10077 | ret |= CORE_MASK_HEVC_FRONT; |
10078 | } |
10079 | if (mask & CORE_MASK_HEVC_BACK) { |
10080 | if (s2_buf_available(pbi) && |
10081 | (get_free_buf_count(pbi) >= |
10082 | pbi->run_ready_min_buf_num)) { |
10083 | ret |= CORE_MASK_HEVC_BACK; |
10084 | pbi->back_not_run_ready = 0; |
10085 | } else |
10086 | pbi->back_not_run_ready = 1; |
10087 | #if 0 |
10088 | if (get_free_buf_count(pbi) < |
10089 | run_ready_min_buf_num) |
10090 | dump_pic_list(pbi); |
10091 | #endif |
10092 | } |
10093 | } else if (get_free_buf_count(pbi) >= |
10094 | pbi->run_ready_min_buf_num) |
10095 | ret = CORE_MASK_VDEC_1 | CORE_MASK_HEVC |
10096 | | CORE_MASK_HEVC_FRONT |
10097 | | CORE_MASK_HEVC_BACK; |
10098 | |
10099 | if (ret & CORE_MASK_HEVC_FRONT) |
10100 | not_run_ready[pbi->index] = 0; |
10101 | else |
10102 | not_run_ready[pbi->index]++; |
10103 | |
10104 | if (ret & CORE_MASK_HEVC_BACK) |
10105 | not_run2_ready[pbi->index] = 0; |
10106 | else |
10107 | not_run2_ready[pbi->index]++; |
10108 | |
10109 | vp9_print(pbi, |
10110 | PRINT_FLAG_VDEC_DETAIL, "%s mask %lx=>%lx (%d %d %d %d)\r\n", |
10111 | __func__, mask, ret, |
10112 | get_free_stage_buf_num(pbi), |
10113 | mv_buf_available(pbi), |
10114 | s2_buf_available(pbi), |
10115 | get_free_buf_count(pbi) |
10116 | ); |
10117 | |
10118 | return ret; |
10119 | |
10120 | #else |
10121 | if (get_free_buf_count(pbi) >= |
10122 | pbi->run_ready_min_buf_num) { |
10123 | if (vdec->parallel_dec == 1) |
10124 | ret = CORE_MASK_HEVC; |
10125 | else |
10126 | ret = CORE_MASK_VDEC_1 | CORE_MASK_HEVC; |
10127 | } |
10128 | |
10129 | if (pbi->is_used_v4l) { |
10130 | struct aml_vcodec_ctx *ctx = |
10131 | (struct aml_vcodec_ctx *)(pbi->v4l2_ctx); |
10132 | |
10133 | if (ctx->param_sets_from_ucode) { |
10134 | if (pbi->v4l_params_parsed) { |
10135 | if ((ctx->cap_pool.in < pbi->used_buf_num) && |
10136 | v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) < |
10137 | pbi->run_ready_min_buf_num) |
10138 | ret = 0; |
10139 | } else { |
10140 | if ((pbi->res_ch_flag == 1) && |
10141 | ((ctx->state <= AML_STATE_INIT) || |
10142 | (ctx->state >= AML_STATE_FLUSHING))) |
10143 | ret = 0; |
10144 | } |
10145 | } else if (ctx->cap_pool.in < ctx->dpb_size) { |
10146 | if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) < |
10147 | pbi->run_ready_min_buf_num) |
10148 | ret = 0; |
10149 | } |
10150 | } |
10151 | |
10152 | if (ret) |
10153 | not_run_ready[pbi->index] = 0; |
10154 | else |
10155 | not_run_ready[pbi->index]++; |
10156 | |
10157 | vp9_print(pbi, |
10158 | PRINT_FLAG_VDEC_DETAIL, "%s mask %lx=>%lx\r\n", |
10159 | __func__, mask, ret); |
10160 | return ret; |
10161 | #endif |
10162 | } |
10163 | |
10164 | static void vp9_frame_mode_pts_save(struct VP9Decoder_s *pbi) |
10165 | { |
10166 | int i = 0; |
10167 | |
10168 | if (pbi->chunk == NULL) |
10169 | return; |
10170 | vp9_print(pbi, VP9_DEBUG_OUT_PTS, |
10171 | "run front: pts %d, pts64 %lld\n", pbi->chunk->pts, pbi->chunk->pts64); |
10172 | for (i = (FRAME_BUFFERS - 1); i > 0; i--) { |
10173 | pbi->frame_mode_pts_save[i] = pbi->frame_mode_pts_save[i - 1]; |
10174 | pbi->frame_mode_pts64_save[i] = pbi->frame_mode_pts64_save[i - 1]; |
10175 | } |
10176 | pbi->frame_mode_pts_save[0] = pbi->chunk->pts; |
10177 | pbi->frame_mode_pts64_save[0] = pbi->chunk->pts64; |
10178 | } |
10179 | |
10180 | static void run_front(struct vdec_s *vdec) |
10181 | { |
10182 | struct VP9Decoder_s *pbi = |
10183 | (struct VP9Decoder_s *)vdec->private; |
10184 | int ret, size; |
10185 | |
10186 | run_count[pbi->index]++; |
10187 | /* pbi->chunk = vdec_prepare_input(vdec); */ |
10188 | #if (!defined SUPPORT_FB_DECODING) |
10189 | hevc_reset_core(vdec); |
10190 | #elif (defined FB_DECODING_TEST_SCHEDULE) |
10191 | hevc_reset_core(vdec); |
10192 | #else |
10193 | if (pbi->used_stage_buf_num > 0) |
10194 | fb_reset_core(vdec, HW_MASK_FRONT); |
10195 | else |
10196 | hevc_reset_core(vdec); |
10197 | #endif |
10198 | |
10199 | size = vdec_prepare_input(vdec, &pbi->chunk); |
10200 | if (size < 0) { |
10201 | input_empty[pbi->index]++; |
10202 | |
10203 | pbi->dec_result = DEC_RESULT_AGAIN; |
10204 | |
10205 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
10206 | "ammvdec_vh265: Insufficient data\n"); |
10207 | |
10208 | vdec_schedule_work(&pbi->work); |
10209 | return; |
10210 | } |
10211 | |
10212 | input_empty[pbi->index] = 0; |
10213 | pbi->dec_result = DEC_RESULT_NONE; |
10214 | pbi->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT); |
10215 | |
10216 | vp9_frame_mode_pts_save(pbi); |
10217 | |
10218 | if (debug & PRINT_FLAG_VDEC_STATUS) { |
10219 | int ii; |
10220 | vp9_print(pbi, 0, |
10221 | "%s (%d): size 0x%x (0x%x 0x%x) sum 0x%x (%x %x %x %x %x) bytes 0x%x", |
10222 | __func__, |
10223 | pbi->frame_count, size, |
10224 | pbi->chunk ? pbi->chunk->size : 0, |
10225 | pbi->chunk ? pbi->chunk->offset : 0, |
10226 | pbi->chunk ? ((vdec_frame_based(vdec) && |
10227 | (debug & PRINT_FLAG_VDEC_STATUS)) ? |
10228 | get_data_check_sum(pbi, size) : 0) : 0, |
10229 | READ_VREG(HEVC_STREAM_START_ADDR), |
10230 | READ_VREG(HEVC_STREAM_END_ADDR), |
10231 | READ_VREG(HEVC_STREAM_LEVEL), |
10232 | READ_VREG(HEVC_STREAM_WR_PTR), |
10233 | READ_VREG(HEVC_STREAM_RD_PTR), |
10234 | pbi->start_shift_bytes); |
10235 | if (vdec_frame_based(vdec) && pbi->chunk) { |
10236 | u8 *data = NULL; |
10237 | |
10238 | if (!pbi->chunk->block->is_mapped) |
10239 | data = codec_mm_vmap(pbi->chunk->block->start + |
10240 | pbi->chunk->offset, 8); |
10241 | else |
10242 | data = ((u8 *)pbi->chunk->block->start_virt) + |
10243 | pbi->chunk->offset; |
10244 | |
10245 | vp9_print_cont(pbi, 0, "data adr %p:", |
10246 | data); |
10247 | for (ii = 0; ii < 8; ii++) |
10248 | vp9_print_cont(pbi, 0, "%02x ", |
10249 | data[ii]); |
10250 | |
10251 | if (!pbi->chunk->block->is_mapped) |
10252 | codec_mm_unmap_phyaddr(data); |
10253 | } |
10254 | vp9_print_cont(pbi, 0, "\r\n"); |
10255 | } |
10256 | if (vdec->mc_loaded) { |
10257 | /*firmware have load before, |
10258 | and not changes to another. |
10259 | ignore reload. |
10260 | */ |
10261 | } else { |
10262 | ret = amhevc_loadmc_ex(VFORMAT_VP9, NULL, pbi->fw->data); |
10263 | if (ret < 0) { |
10264 | amhevc_disable(); |
10265 | vp9_print(pbi, PRINT_FLAG_ERROR, |
10266 | "VP9: the %s fw loading failed, err: %x\n", |
10267 | tee_enabled() ? "TEE" : "local", ret); |
10268 | pbi->dec_result = DEC_RESULT_FORCE_EXIT; |
10269 | vdec_schedule_work(&pbi->work); |
10270 | return; |
10271 | } |
10272 | vdec->mc_loaded = 1; |
10273 | vdec->mc_type = VFORMAT_VP9; |
10274 | } |
10275 | |
10276 | if (vp9_hw_ctx_restore(pbi) < 0) { |
10277 | vdec_schedule_work(&pbi->work); |
10278 | return; |
10279 | } |
10280 | |
10281 | vdec_enable_input(vdec); |
10282 | |
10283 | WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE); |
10284 | |
10285 | if (vdec_frame_based(vdec)) { |
10286 | if (debug & PRINT_FLAG_VDEC_DATA) |
10287 | dump_data(pbi, pbi->chunk->size); |
10288 | |
10289 | WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0); |
10290 | size = pbi->chunk->size + |
10291 | (pbi->chunk->offset & (VDEC_FIFO_ALIGN - 1)); |
10292 | if (vdec->mvfrm) |
10293 | vdec->mvfrm->frame_size = pbi->chunk->size; |
10294 | } |
10295 | WRITE_VREG(HEVC_DECODE_SIZE, size); |
10296 | WRITE_VREG(HEVC_DECODE_COUNT, pbi->slice_idx); |
10297 | pbi->init_flag = 1; |
10298 | |
10299 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
10300 | "%s: start hevc (%x %x %x)\n", |
10301 | __func__, |
10302 | READ_VREG(HEVC_DEC_STATUS_REG), |
10303 | READ_VREG(HEVC_MPC_E), |
10304 | READ_VREG(HEVC_MPSR)); |
10305 | |
10306 | start_process_time(pbi); |
10307 | mod_timer(&pbi->timer, jiffies); |
10308 | pbi->stat |= STAT_TIMER_ARM; |
10309 | pbi->stat |= STAT_ISR_REG; |
10310 | amhevc_start(); |
10311 | pbi->stat |= STAT_VDEC_RUN; |
10312 | } |
10313 | |
10314 | #ifdef SUPPORT_FB_DECODING |
10315 | static void mpred_process(struct VP9Decoder_s *pbi) |
10316 | { |
10317 | union param_u *params = &pbi->s1_param; |
10318 | unsigned char use_prev_frame_mvs = |
10319 | !params->p.error_resilient_mode && |
10320 | params->p.width == pbi->s1_width && |
10321 | params->p.height == pbi->s1_height && |
10322 | !pbi->s1_intra_only && |
10323 | pbi->s1_last_show_frame && |
10324 | (pbi->s1_frame_type != KEY_FRAME); |
10325 | pbi->s1_width = params->p.width; |
10326 | pbi->s1_height = params->p.height; |
10327 | pbi->s1_frame_type = params->p.frame_type; |
10328 | pbi->s1_intra_only = |
10329 | (params->p.show_frame || |
10330 | params->p.show_existing_frame) |
10331 | ? 0 : params->p.intra_only; |
10332 | if ((pbi->s1_frame_type != KEY_FRAME) |
10333 | && (!pbi->s1_intra_only)) { |
10334 | unsigned int data32; |
10335 | int mpred_mv_rd_end_addr; |
10336 | |
10337 | mpred_mv_rd_end_addr = |
10338 | pbi->s1_mpred_mv_wr_start_addr_pre |
10339 | + (pbi->lcu_total * MV_MEM_UNIT); |
10340 | |
10341 | WRITE_VREG(HEVC_MPRED_CTRL3, 0x24122412); |
10342 | WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, |
10343 | pbi->work_space_buf-> |
10344 | mpred_above.buf_start); |
10345 | |
10346 | data32 = READ_VREG(HEVC_MPRED_CTRL4); |
10347 | |
10348 | data32 &= (~(1 << 6)); |
10349 | data32 |= (use_prev_frame_mvs << 6); |
10350 | WRITE_VREG(HEVC_MPRED_CTRL4, data32); |
10351 | |
10352 | WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR, |
10353 | pbi->s1_mpred_mv_wr_start_addr); |
10354 | WRITE_VREG(HEVC_MPRED_MV_WPTR, |
10355 | pbi->s1_mpred_mv_wr_start_addr); |
10356 | |
10357 | WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, |
10358 | pbi->s1_mpred_mv_wr_start_addr_pre); |
10359 | WRITE_VREG(HEVC_MPRED_MV_RPTR, |
10360 | pbi->s1_mpred_mv_wr_start_addr_pre); |
10361 | |
10362 | WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, |
10363 | mpred_mv_rd_end_addr); |
10364 | |
10365 | } else |
10366 | clear_mpred_hw(pbi); |
10367 | |
10368 | if (!params->p.show_existing_frame) { |
10369 | pbi->s1_mpred_mv_wr_start_addr_pre = |
10370 | pbi->s1_mpred_mv_wr_start_addr; |
10371 | pbi->s1_last_show_frame = |
10372 | params->p.show_frame; |
10373 | if (pbi->s1_mv_buf_index_pre_pre != MV_BUFFER_NUM) |
10374 | put_mv_buf(pbi, &pbi->s1_mv_buf_index_pre_pre); |
10375 | pbi->s1_mv_buf_index_pre_pre = |
10376 | pbi->s1_mv_buf_index_pre; |
10377 | pbi->s1_mv_buf_index_pre = pbi->s1_mv_buf_index; |
10378 | } else |
10379 | put_mv_buf(pbi, &pbi->s1_mv_buf_index); |
10380 | } |
10381 | |
10382 | static void vp9_s1_work(struct work_struct *s1_work) |
10383 | { |
10384 | struct VP9Decoder_s *pbi = container_of(s1_work, |
10385 | struct VP9Decoder_s, s1_work); |
10386 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, |
10387 | "%s dec_s1_result %d\n", |
10388 | __func__, |
10389 | pbi->dec_s1_result); |
10390 | |
10391 | #ifdef FB_DECODING_TEST_SCHEDULE |
10392 | if (pbi->dec_s1_result == |
10393 | DEC_S1_RESULT_TEST_TRIGGER_DONE) { |
10394 | pbi->s1_test_cmd = TEST_SET_PIC_DONE; |
10395 | WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1); |
10396 | } |
10397 | #endif |
10398 | if (pbi->dec_s1_result == DEC_S1_RESULT_DONE || |
10399 | pbi->dec_s1_result == DEC_S1_RESULT_FORCE_EXIT) { |
10400 | |
10401 | vdec_core_finish_run(hw_to_vdec(pbi), |
10402 | CORE_MASK_HEVC_FRONT); |
10403 | |
10404 | trigger_schedule(pbi); |
10405 | /*pbi->dec_s1_result = DEC_S1_RESULT_NONE;*/ |
10406 | } |
10407 | |
10408 | } |
10409 | |
10410 | static void run_back(struct vdec_s *vdec) |
10411 | { |
10412 | struct VP9Decoder_s *pbi = |
10413 | (struct VP9Decoder_s *)vdec->private; |
10414 | int i; |
10415 | run2_count[pbi->index]++; |
10416 | if (debug & PRINT_FLAG_VDEC_STATUS) { |
10417 | vp9_print(pbi, 0, |
10418 | "%s", __func__); |
10419 | } |
10420 | pbi->run2_busy = 1; |
10421 | #ifndef FB_DECODING_TEST_SCHEDULE |
10422 | fb_reset_core(vdec, HW_MASK_BACK); |
10423 | |
10424 | vvp9_prot_init(pbi, HW_MASK_BACK); |
10425 | #endif |
10426 | vp9_recycle_mmu_buf_tail(pbi); |
10427 | |
10428 | if (pbi->frame_count > 0) |
10429 | vp9_bufmgr_postproc(pbi); |
10430 | |
10431 | if (get_s2_buf(pbi) >= 0) { |
10432 | for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) { |
10433 | int ii; |
10434 | for (ii = 0; ii < 4; ii++) |
10435 | pbi->vp9_param.l.data[i + ii] = |
10436 | pbi->s2_buf->rpm[i + 3 - ii]; |
10437 | } |
10438 | #ifndef FB_DECODING_TEST_SCHEDULE |
10439 | WRITE_VREG(HEVC_ASSIST_FBD_MMU_MAP_ADDR, |
10440 | pbi->stage_mmu_map_phy_addr + |
10441 | pbi->s2_buf->index * STAGE_MMU_MAP_SIZE); |
10442 | #endif |
10443 | continue_decoding(pbi); |
10444 | } |
10445 | pbi->run2_busy = 0; |
10446 | } |
10447 | #endif |
10448 | |
10449 | static void run(struct vdec_s *vdec, unsigned long mask, |
10450 | void (*callback)(struct vdec_s *, void *), void *arg) |
10451 | { |
10452 | struct VP9Decoder_s *pbi = |
10453 | (struct VP9Decoder_s *)vdec->private; |
10454 | |
10455 | vp9_print(pbi, |
10456 | PRINT_FLAG_VDEC_DETAIL, "%s mask %lx\r\n", |
10457 | __func__, mask); |
10458 | |
10459 | if (vdec->mvfrm) |
10460 | vdec->mvfrm->hw_decode_start = local_clock(); |
10461 | run_count[pbi->index]++; |
10462 | pbi->vdec_cb_arg = arg; |
10463 | pbi->vdec_cb = callback; |
10464 | pbi->one_package_frame_cnt = 0; |
10465 | #ifdef SUPPORT_FB_DECODING |
10466 | if ((mask & CORE_MASK_HEVC) || |
10467 | (mask & CORE_MASK_HEVC_FRONT)) |
10468 | run_front(vdec); |
10469 | |
10470 | if ((pbi->used_stage_buf_num > 0) |
10471 | && (mask & CORE_MASK_HEVC_BACK)) |
10472 | run_back(vdec); |
10473 | #else |
10474 | run_front(vdec); |
10475 | #endif |
10476 | |
10477 | } |
10478 | |
10479 | static void init_frame_bufs(struct VP9Decoder_s *pbi) |
10480 | { |
10481 | struct vdec_s *vdec = hw_to_vdec(pbi); |
10482 | struct VP9_Common_s *const cm = &pbi->common; |
10483 | struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs; |
10484 | int i; |
10485 | |
10486 | for (i = 0; i < pbi->used_buf_num; ++i) { |
10487 | frame_bufs[i].ref_count = 0; |
10488 | frame_bufs[i].buf.vf_ref = 0; |
10489 | frame_bufs[i].buf.decode_idx = 0; |
10490 | frame_bufs[i].buf.cma_alloc_addr = 0; |
10491 | frame_bufs[i].buf.index = i; |
10492 | frame_bufs[i].buf.vframe_bound = 0; |
10493 | } |
10494 | |
10495 | if (vdec->parallel_dec == 1) { |
10496 | for (i = 0; i < FRAME_BUFFERS; i++) { |
10497 | vdec->free_canvas_ex |
10498 | (pbi->common.buffer_pool->frame_bufs[i].buf.y_canvas_index, |
10499 | vdec->id); |
10500 | vdec->free_canvas_ex |
10501 | (pbi->common.buffer_pool->frame_bufs[i].buf.uv_canvas_index, |
10502 | vdec->id); |
10503 | } |
10504 | } |
10505 | } |
10506 | |
10507 | static void reset(struct vdec_s *vdec) |
10508 | { |
10509 | struct VP9Decoder_s *pbi = |
10510 | (struct VP9Decoder_s *)vdec->private; |
10511 | |
10512 | cancel_work_sync(&pbi->work); |
10513 | if (pbi->stat & STAT_VDEC_RUN) { |
10514 | amhevc_stop(); |
10515 | pbi->stat &= ~STAT_VDEC_RUN; |
10516 | } |
10517 | |
10518 | if (pbi->stat & STAT_TIMER_ARM) { |
10519 | del_timer_sync(&pbi->timer); |
10520 | pbi->stat &= ~STAT_TIMER_ARM; |
10521 | } |
10522 | pbi->dec_result = DEC_RESULT_NONE; |
10523 | reset_process_time(pbi); |
10524 | vp9_local_uninit(pbi); |
10525 | if (vvp9_local_init(pbi) < 0) |
10526 | vp9_print(pbi, 0, "%s local_init failed \r\n", __func__); |
10527 | init_frame_bufs(pbi); |
10528 | |
10529 | pbi->eos = 0; |
10530 | |
10531 | vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__); |
10532 | } |
10533 | |
10534 | static irqreturn_t vp9_irq_cb(struct vdec_s *vdec, int irq) |
10535 | { |
10536 | struct VP9Decoder_s *pbi = |
10537 | (struct VP9Decoder_s *)vdec->private; |
10538 | return vvp9_isr(0, pbi); |
10539 | } |
10540 | |
10541 | static irqreturn_t vp9_threaded_irq_cb(struct vdec_s *vdec, int irq) |
10542 | { |
10543 | struct VP9Decoder_s *pbi = |
10544 | (struct VP9Decoder_s *)vdec->private; |
10545 | return vvp9_isr_thread_fn(0, pbi); |
10546 | } |
10547 | |
10548 | static void vp9_dump_state(struct vdec_s *vdec) |
10549 | { |
10550 | struct VP9Decoder_s *pbi = |
10551 | (struct VP9Decoder_s *)vdec->private; |
10552 | struct VP9_Common_s *const cm = &pbi->common; |
10553 | int i; |
10554 | vp9_print(pbi, 0, "====== %s\n", __func__); |
10555 | |
10556 | vp9_print(pbi, 0, |
10557 | "width/height (%d/%d), used_buf_num %d\n", |
10558 | cm->width, |
10559 | cm->height, |
10560 | pbi->used_buf_num |
10561 | ); |
10562 | |
10563 | vp9_print(pbi, 0, |
10564 | "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d low_latency %d no_head %d \n", |
10565 | input_frame_based(vdec), |
10566 | pbi->eos, |
10567 | pbi->dec_result, |
10568 | decode_frame_count[pbi->index], |
10569 | display_frame_count[pbi->index], |
10570 | run_count[pbi->index], |
10571 | not_run_ready[pbi->index], |
10572 | input_empty[pbi->index], |
10573 | pbi->low_latency_flag, |
10574 | pbi->no_head |
10575 | ); |
10576 | |
10577 | if (vf_get_receiver(vdec->vf_provider_name)) { |
10578 | enum receviver_start_e state = |
10579 | vf_notify_receiver(vdec->vf_provider_name, |
10580 | VFRAME_EVENT_PROVIDER_QUREY_STATE, |
10581 | NULL); |
10582 | vp9_print(pbi, 0, |
10583 | "\nreceiver(%s) state %d\n", |
10584 | vdec->vf_provider_name, |
10585 | state); |
10586 | } |
10587 | |
10588 | vp9_print(pbi, 0, |
10589 | "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), free_buf_count %d (min %d for run_ready)\n", |
10590 | __func__, |
10591 | kfifo_len(&pbi->newframe_q), |
10592 | VF_POOL_SIZE, |
10593 | kfifo_len(&pbi->display_q), |
10594 | VF_POOL_SIZE, |
10595 | pbi->vf_pre_count, |
10596 | pbi->vf_get_count, |
10597 | pbi->vf_put_count, |
10598 | get_free_buf_count(pbi), |
10599 | pbi->run_ready_min_buf_num |
10600 | ); |
10601 | |
10602 | dump_pic_list(pbi); |
10603 | |
10604 | for (i = 0; i < MAX_BUF_NUM; i++) { |
10605 | vp9_print(pbi, 0, |
10606 | "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n", |
10607 | i, |
10608 | pbi->m_mv_BUF[i].start_adr, |
10609 | pbi->m_mv_BUF[i].size, |
10610 | pbi->m_mv_BUF[i].used_flag); |
10611 | } |
10612 | |
10613 | vp9_print(pbi, 0, |
10614 | "HEVC_DEC_STATUS_REG=0x%x\n", |
10615 | READ_VREG(HEVC_DEC_STATUS_REG)); |
10616 | vp9_print(pbi, 0, |
10617 | "HEVC_MPC_E=0x%x\n", |
10618 | READ_VREG(HEVC_MPC_E)); |
10619 | vp9_print(pbi, 0, |
10620 | "DECODE_MODE=0x%x\n", |
10621 | READ_VREG(DECODE_MODE)); |
10622 | vp9_print(pbi, 0, |
10623 | "NAL_SEARCH_CTL=0x%x\n", |
10624 | READ_VREG(NAL_SEARCH_CTL)); |
10625 | vp9_print(pbi, 0, |
10626 | "HEVC_PARSER_LCU_START=0x%x\n", |
10627 | READ_VREG(HEVC_PARSER_LCU_START)); |
10628 | vp9_print(pbi, 0, |
10629 | "HEVC_DECODE_SIZE=0x%x\n", |
10630 | READ_VREG(HEVC_DECODE_SIZE)); |
10631 | vp9_print(pbi, 0, |
10632 | "HEVC_SHIFT_BYTE_COUNT=0x%x\n", |
10633 | READ_VREG(HEVC_SHIFT_BYTE_COUNT)); |
10634 | vp9_print(pbi, 0, |
10635 | "HEVC_STREAM_START_ADDR=0x%x\n", |
10636 | READ_VREG(HEVC_STREAM_START_ADDR)); |
10637 | vp9_print(pbi, 0, |
10638 | "HEVC_STREAM_END_ADDR=0x%x\n", |
10639 | READ_VREG(HEVC_STREAM_END_ADDR)); |
10640 | vp9_print(pbi, 0, |
10641 | "HEVC_STREAM_LEVEL=0x%x\n", |
10642 | READ_VREG(HEVC_STREAM_LEVEL)); |
10643 | vp9_print(pbi, 0, |
10644 | "HEVC_STREAM_WR_PTR=0x%x\n", |
10645 | READ_VREG(HEVC_STREAM_WR_PTR)); |
10646 | vp9_print(pbi, 0, |
10647 | "HEVC_STREAM_RD_PTR=0x%x\n", |
10648 | READ_VREG(HEVC_STREAM_RD_PTR)); |
10649 | vp9_print(pbi, 0, |
10650 | "PARSER_VIDEO_RP=0x%x\n", |
10651 | STBUF_READ(&vdec->vbuf, get_rp)); |
10652 | vp9_print(pbi, 0, |
10653 | "PARSER_VIDEO_WP=0x%x\n", |
10654 | STBUF_READ(&vdec->vbuf, get_wp)); |
10655 | |
10656 | if (input_frame_based(vdec) && |
10657 | (debug & PRINT_FLAG_VDEC_DATA) |
10658 | ) { |
10659 | int jj; |
10660 | if (pbi->chunk && pbi->chunk->block && |
10661 | pbi->chunk->size > 0) { |
10662 | u8 *data = NULL; |
10663 | |
10664 | if (!pbi->chunk->block->is_mapped) |
10665 | data = codec_mm_vmap( |
10666 | pbi->chunk->block->start + |
10667 | pbi->chunk->offset, |
10668 | pbi->chunk->size); |
10669 | else |
10670 | data = ((u8 *)pbi->chunk->block->start_virt) |
10671 | + pbi->chunk->offset; |
10672 | vp9_print(pbi, 0, |
10673 | "frame data size 0x%x\n", |
10674 | pbi->chunk->size); |
10675 | for (jj = 0; jj < pbi->chunk->size; jj++) { |
10676 | if ((jj & 0xf) == 0) |
10677 | vp9_print(pbi, 0, |
10678 | "%06x:", jj); |
10679 | vp9_print_cont(pbi, 0, |
10680 | "%02x ", data[jj]); |
10681 | if (((jj + 1) & 0xf) == 0) |
10682 | vp9_print_cont(pbi, 0, |
10683 | "\n"); |
10684 | } |
10685 | |
10686 | if (!pbi->chunk->block->is_mapped) |
10687 | codec_mm_unmap_phyaddr(data); |
10688 | } |
10689 | } |
10690 | |
10691 | } |
10692 | |
10693 | static int ammvdec_vp9_probe(struct platform_device *pdev) |
10694 | { |
10695 | struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data; |
10696 | int ret; |
10697 | int config_val; |
10698 | struct vframe_content_light_level_s content_light_level; |
10699 | struct vframe_master_display_colour_s vf_dp; |
10700 | |
10701 | struct BUF_s BUF[MAX_BUF_NUM]; |
10702 | struct VP9Decoder_s *pbi = NULL; |
10703 | pr_debug("%s\n", __func__); |
10704 | |
10705 | if (pdata == NULL) { |
10706 | pr_info("\nammvdec_vp9 memory resource undefined.\n"); |
10707 | return -EFAULT; |
10708 | } |
10709 | /*pbi = (struct VP9Decoder_s *)devm_kzalloc(&pdev->dev, |
10710 | sizeof(struct VP9Decoder_s), GFP_KERNEL);*/ |
10711 | memset(&vf_dp, 0, sizeof(struct vframe_master_display_colour_s)); |
10712 | pbi = vmalloc(sizeof(struct VP9Decoder_s)); |
10713 | if (pbi == NULL) { |
10714 | pr_info("\nammvdec_vp9 device data allocation failed\n"); |
10715 | return -ENOMEM; |
10716 | } |
10717 | memset(pbi, 0, sizeof(struct VP9Decoder_s)); |
10718 | |
10719 | /* the ctx from v4l2 driver. */ |
10720 | pbi->v4l2_ctx = pdata->private; |
10721 | |
10722 | pdata->private = pbi; |
10723 | pdata->dec_status = vvp9_dec_status; |
10724 | /* pdata->set_trickmode = set_trickmode; */ |
10725 | pdata->run_ready = run_ready; |
10726 | pdata->run = run; |
10727 | pdata->reset = reset; |
10728 | pdata->irq_handler = vp9_irq_cb; |
10729 | pdata->threaded_irq_handler = vp9_threaded_irq_cb; |
10730 | pdata->dump_state = vp9_dump_state; |
10731 | |
10732 | memcpy(&BUF[0], &pbi->m_BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM); |
10733 | memcpy(&pbi->m_BUF[0], &BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM); |
10734 | |
10735 | pbi->index = pdev->id; |
10736 | |
10737 | if (pdata->use_vfm_path) |
10738 | snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE, |
10739 | VFM_DEC_PROVIDER_NAME); |
10740 | else |
10741 | snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE, |
10742 | MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff); |
10743 | |
10744 | vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name, |
10745 | &vvp9_vf_provider, pbi); |
10746 | |
10747 | pbi->provider_name = pdata->vf_provider_name; |
10748 | platform_set_drvdata(pdev, pdata); |
10749 | |
10750 | pbi->platform_dev = pdev; |
10751 | pbi->video_signal_type = 0; |
10752 | pbi->m_ins_flag = 1; |
10753 | if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX) |
10754 | pbi->stat |= VP9_TRIGGER_FRAME_ENABLE; |
10755 | |
10756 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
10757 | pbi->max_pic_w = 8192; |
10758 | pbi->max_pic_h = 4608; |
10759 | } else { |
10760 | pbi->max_pic_w = 4096; |
10761 | pbi->max_pic_h = 2304; |
10762 | } |
10763 | #if 1 |
10764 | if ((debug & IGNORE_PARAM_FROM_CONFIG) == 0 && |
10765 | pdata->config_len) { |
10766 | #ifdef MULTI_INSTANCE_SUPPORT |
10767 | int vp9_buf_width = 0; |
10768 | int vp9_buf_height = 0; |
10769 | /*use ptr config for doubel_write_mode, etc*/ |
10770 | vp9_print(pbi, 0, "pdata->config=%s\n", pdata->config); |
10771 | if (get_config_int(pdata->config, "vp9_double_write_mode", |
10772 | &config_val) == 0) |
10773 | pbi->double_write_mode = config_val; |
10774 | else |
10775 | pbi->double_write_mode = double_write_mode; |
10776 | |
10777 | if (get_config_int(pdata->config, "save_buffer_mode", |
10778 | &config_val) == 0) |
10779 | pbi->save_buffer_mode = config_val; |
10780 | else |
10781 | pbi->save_buffer_mode = 0; |
10782 | if (get_config_int(pdata->config, "vp9_buf_width", |
10783 | &config_val) == 0) { |
10784 | vp9_buf_width = config_val; |
10785 | } |
10786 | if (get_config_int(pdata->config, "vp9_buf_height", |
10787 | &config_val) == 0) { |
10788 | vp9_buf_height = config_val; |
10789 | } |
10790 | |
10791 | if (get_config_int(pdata->config, "no_head", |
10792 | &config_val) == 0) |
10793 | pbi->no_head = config_val; |
10794 | else |
10795 | pbi->no_head = no_head; |
10796 | |
10797 | /*use ptr config for max_pic_w, etc*/ |
10798 | if (get_config_int(pdata->config, "vp9_max_pic_w", |
10799 | &config_val) == 0) { |
10800 | pbi->max_pic_w = config_val; |
10801 | } |
10802 | if (get_config_int(pdata->config, "vp9_max_pic_h", |
10803 | &config_val) == 0) { |
10804 | pbi->max_pic_h = config_val; |
10805 | } |
10806 | if ((pbi->max_pic_w * pbi->max_pic_h) |
10807 | < (vp9_buf_width * vp9_buf_height)) { |
10808 | pbi->max_pic_w = vp9_buf_width; |
10809 | pbi->max_pic_h = vp9_buf_height; |
10810 | vp9_print(pbi, 0, "use buf resolution\n"); |
10811 | } |
10812 | |
10813 | if (get_config_int(pdata->config, "sidebind_type", |
10814 | &config_val) == 0) |
10815 | pbi->sidebind_type = config_val; |
10816 | |
10817 | if (get_config_int(pdata->config, "sidebind_channel_id", |
10818 | &config_val) == 0) |
10819 | pbi->sidebind_channel_id = config_val; |
10820 | |
10821 | if (get_config_int(pdata->config, |
10822 | "parm_v4l_codec_enable", |
10823 | &config_val) == 0) |
10824 | pbi->is_used_v4l = config_val; |
10825 | |
10826 | if (get_config_int(pdata->config, |
10827 | "parm_v4l_buffer_margin", |
10828 | &config_val) == 0) |
10829 | pbi->dynamic_buf_num_margin = config_val; |
10830 | |
10831 | if (get_config_int(pdata->config, |
10832 | "parm_v4l_canvas_mem_mode", |
10833 | &config_val) == 0) |
10834 | pbi->mem_map_mode = config_val; |
10835 | |
10836 | if (get_config_int(pdata->config, |
10837 | "parm_enable_fence", |
10838 | &config_val) == 0) |
10839 | pbi->enable_fence = config_val; |
10840 | |
10841 | if (get_config_int(pdata->config, |
10842 | "parm_fence_usage", |
10843 | &config_val) == 0) |
10844 | pbi->fence_usage = config_val; |
10845 | #endif |
10846 | if (get_config_int(pdata->config, "HDRStaticInfo", |
10847 | &vf_dp.present_flag) == 0 |
10848 | && vf_dp.present_flag == 1) { |
10849 | get_config_int(pdata->config, "mG.x", |
10850 | &vf_dp.primaries[0][0]); |
10851 | get_config_int(pdata->config, "mG.y", |
10852 | &vf_dp.primaries[0][1]); |
10853 | get_config_int(pdata->config, "mB.x", |
10854 | &vf_dp.primaries[1][0]); |
10855 | get_config_int(pdata->config, "mB.y", |
10856 | &vf_dp.primaries[1][1]); |
10857 | get_config_int(pdata->config, "mR.x", |
10858 | &vf_dp.primaries[2][0]); |
10859 | get_config_int(pdata->config, "mR.y", |
10860 | &vf_dp.primaries[2][1]); |
10861 | get_config_int(pdata->config, "mW.x", |
10862 | &vf_dp.white_point[0]); |
10863 | get_config_int(pdata->config, "mW.y", |
10864 | &vf_dp.white_point[1]); |
10865 | get_config_int(pdata->config, "mMaxDL", |
10866 | &vf_dp.luminance[0]); |
10867 | get_config_int(pdata->config, "mMinDL", |
10868 | &vf_dp.luminance[1]); |
10869 | vf_dp.content_light_level.present_flag = 1; |
10870 | get_config_int(pdata->config, "mMaxCLL", |
10871 | &content_light_level.max_content); |
10872 | get_config_int(pdata->config, "mMaxFALL", |
10873 | &content_light_level.max_pic_average); |
10874 | vf_dp.content_light_level = content_light_level; |
10875 | pbi->video_signal_type = (1 << 29) |
10876 | | (5 << 26) /* unspecified */ |
10877 | | (0 << 25) /* limit */ |
10878 | | (1 << 24) /* color available */ |
10879 | | (9 << 16) /* 2020 */ |
10880 | | (16 << 8) /* 2084 */ |
10881 | | (9 << 0); /* 2020 */ |
10882 | } |
10883 | pbi->vf_dp = vf_dp; |
10884 | } else |
10885 | #endif |
10886 | { |
10887 | /*pbi->vvp9_amstream_dec_info.width = 0; |
10888 | pbi->vvp9_amstream_dec_info.height = 0; |
10889 | pbi->vvp9_amstream_dec_info.rate = 30;*/ |
10890 | pbi->double_write_mode = double_write_mode; |
10891 | } |
10892 | |
10893 | if (no_head & 0x10) { |
10894 | pbi->no_head = (no_head & 0xf); |
10895 | } |
10896 | |
10897 | if (!pbi->is_used_v4l) { |
10898 | pbi->mem_map_mode = mem_map_mode; |
10899 | } |
10900 | pbi->run_ready_min_buf_num = run_ready_min_buf_num; |
10901 | if (is_oversize(pbi->max_pic_w, pbi->max_pic_h)) { |
10902 | pr_err("over size: %dx%d, probe failed\n", |
10903 | pbi->max_pic_w, pbi->max_pic_h); |
10904 | return -1; |
10905 | } |
10906 | |
10907 | if (force_config_fence) { |
10908 | pbi->enable_fence = true; |
10909 | pbi->fence_usage = |
10910 | (force_config_fence >> 4) & 0xf; |
10911 | if (force_config_fence & 0x2) |
10912 | pbi->enable_fence = false; |
10913 | vp9_print(pbi, 0, "enable fence: %d, fence usage: %d\n", |
10914 | pbi->enable_fence, pbi->fence_usage); |
10915 | } |
10916 | |
10917 | if (pbi->enable_fence) |
10918 | pdata->sync.usage = pbi->fence_usage; |
10919 | |
10920 | pbi->mmu_enable = 1; |
10921 | video_signal_type = pbi->video_signal_type; |
10922 | |
10923 | if (pdata->sys_info) { |
10924 | pbi->vvp9_amstream_dec_info = *pdata->sys_info; |
10925 | } else { |
10926 | pbi->vvp9_amstream_dec_info.width = 0; |
10927 | pbi->vvp9_amstream_dec_info.height = 0; |
10928 | pbi->vvp9_amstream_dec_info.rate = 30; |
10929 | } |
10930 | pbi->low_latency_flag = 1; |
10931 | |
10932 | vp9_print(pbi, 0, |
10933 | "no_head %d low_latency %d\n", |
10934 | pbi->no_head, pbi->low_latency_flag); |
10935 | #if 0 |
10936 | pbi->buf_start = pdata->mem_start; |
10937 | pbi->buf_size = pdata->mem_end - pdata->mem_start + 1; |
10938 | #else |
10939 | if (amvdec_vp9_mmu_init(pbi) < 0) { |
10940 | pr_err("vp9 alloc bmmu box failed!!\n"); |
10941 | /* devm_kfree(&pdev->dev, (void *)pbi); */ |
10942 | vfree((void *)pbi); |
10943 | pdata->dec_status = NULL; |
10944 | return -1; |
10945 | } |
10946 | |
10947 | pbi->cma_alloc_count = PAGE_ALIGN(work_buf_size) / PAGE_SIZE; |
10948 | ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box, WORK_SPACE_BUF_ID, |
10949 | pbi->cma_alloc_count * PAGE_SIZE, DRIVER_NAME, |
10950 | &pbi->cma_alloc_addr); |
10951 | if (ret < 0) { |
10952 | uninit_mmu_buffers(pbi); |
10953 | /* devm_kfree(&pdev->dev, (void *)pbi); */ |
10954 | vfree((void *)pbi); |
10955 | pdata->dec_status = NULL; |
10956 | return ret; |
10957 | } |
10958 | pbi->buf_start = pbi->cma_alloc_addr; |
10959 | pbi->buf_size = work_buf_size; |
10960 | #endif |
10961 | |
10962 | pbi->init_flag = 0; |
10963 | pbi->first_sc_checked = 0; |
10964 | pbi->fatal_error = 0; |
10965 | pbi->show_frame_num = 0; |
10966 | |
10967 | if (debug) { |
10968 | pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n", |
10969 | pbi->buf_start, |
10970 | pbi->buf_size); |
10971 | } |
10972 | |
10973 | pbi->cma_dev = pdata->cma_dev; |
10974 | if (vvp9_init(pdata) < 0) { |
10975 | pr_info("\namvdec_vp9 init failed.\n"); |
10976 | vp9_local_uninit(pbi); |
10977 | uninit_mmu_buffers(pbi); |
10978 | /* devm_kfree(&pdev->dev, (void *)pbi); */ |
10979 | vfree((void *)pbi); |
10980 | pdata->dec_status = NULL; |
10981 | return -ENODEV; |
10982 | } |
10983 | vdec_set_prepare_level(pdata, start_decode_buf_level); |
10984 | hevc_source_changed(VFORMAT_VP9, |
10985 | 4096, 2048, 60); |
10986 | #ifdef SUPPORT_FB_DECODING |
10987 | if (pbi->used_stage_buf_num > 0) |
10988 | vdec_core_request(pdata, |
10989 | CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK); |
10990 | else |
10991 | vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC |
10992 | | CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK |
10993 | | CORE_MASK_COMBINE); |
10994 | #else |
10995 | if (pdata->parallel_dec == 1) |
10996 | vdec_core_request(pdata, CORE_MASK_HEVC); |
10997 | else |
10998 | vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC |
10999 | | CORE_MASK_COMBINE); |
11000 | #endif |
11001 | pbi->pic_list_init_done2 = true; |
11002 | |
11003 | if (pbi->enable_fence) { |
11004 | /* creat timeline. */ |
11005 | vdec_timeline_create(&pdata->sync, DRIVER_NAME); |
11006 | } |
11007 | |
11008 | return 0; |
11009 | } |
11010 | |
11011 | static int ammvdec_vp9_remove(struct platform_device *pdev) |
11012 | { |
11013 | struct VP9Decoder_s *pbi = (struct VP9Decoder_s *) |
11014 | (((struct vdec_s *)(platform_get_drvdata(pdev)))->private); |
11015 | struct vdec_s *vdec = hw_to_vdec(pbi); |
11016 | int i; |
11017 | if (debug) |
11018 | pr_info("amvdec_vp9_remove\n"); |
11019 | |
11020 | vmvp9_stop(pbi); |
11021 | |
11022 | #ifdef SUPPORT_FB_DECODING |
11023 | vdec_core_release(hw_to_vdec(pbi), CORE_MASK_VDEC_1 | CORE_MASK_HEVC |
11024 | | CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK |
11025 | ); |
11026 | #else |
11027 | if (vdec->parallel_dec == 1) |
11028 | vdec_core_release(hw_to_vdec(pbi), CORE_MASK_HEVC); |
11029 | else |
11030 | vdec_core_release(hw_to_vdec(pbi), CORE_MASK_VDEC_1 | CORE_MASK_HEVC); |
11031 | #endif |
11032 | vdec_set_status(hw_to_vdec(pbi), VDEC_STATUS_DISCONNECTED); |
11033 | |
11034 | if (vdec->parallel_dec == 1) { |
11035 | for (i = 0; i < FRAME_BUFFERS; i++) { |
11036 | vdec->free_canvas_ex |
11037 | (pbi->common.buffer_pool->frame_bufs[i].buf.y_canvas_index, |
11038 | vdec->id); |
11039 | vdec->free_canvas_ex |
11040 | (pbi->common.buffer_pool->frame_bufs[i].buf.uv_canvas_index, |
11041 | vdec->id); |
11042 | } |
11043 | } |
11044 | |
11045 | if (pbi->enable_fence) |
11046 | vdec_fence_release(pbi, &vdec->sync); |
11047 | |
11048 | #ifdef DEBUG_PTS |
11049 | pr_info("pts missed %ld, pts hit %ld, duration %d\n", |
11050 | pbi->pts_missed, pbi->pts_hit, pbi->frame_dur); |
11051 | #endif |
11052 | mem_map_mode = 0; |
11053 | |
11054 | /* devm_kfree(&pdev->dev, (void *)pbi); */ |
11055 | vfree((void *)pbi); |
11056 | return 0; |
11057 | } |
11058 | |
11059 | static struct platform_driver ammvdec_vp9_driver = { |
11060 | .probe = ammvdec_vp9_probe, |
11061 | .remove = ammvdec_vp9_remove, |
11062 | .driver = { |
11063 | .name = MULTI_DRIVER_NAME, |
11064 | #ifdef CONFIG_PM |
11065 | .pm = &vp9_pm_ops, |
11066 | #endif |
11067 | } |
11068 | }; |
11069 | #endif |
11070 | static struct mconfig vp9_configs[] = { |
11071 | MC_PU32("bit_depth_luma", &bit_depth_luma), |
11072 | MC_PU32("bit_depth_chroma", &bit_depth_chroma), |
11073 | MC_PU32("frame_width", &frame_width), |
11074 | MC_PU32("frame_height", &frame_height), |
11075 | MC_PU32("debug", &debug), |
11076 | MC_PU32("radr", &radr), |
11077 | MC_PU32("rval", &rval), |
11078 | MC_PU32("pop_shorts", &pop_shorts), |
11079 | MC_PU32("dbg_cmd", &dbg_cmd), |
11080 | MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index), |
11081 | MC_PU32("endian", &endian), |
11082 | MC_PU32("step", &step), |
11083 | MC_PU32("udebug_flag", &udebug_flag), |
11084 | MC_PU32("decode_pic_begin", &decode_pic_begin), |
11085 | MC_PU32("slice_parse_begin", &slice_parse_begin), |
11086 | MC_PU32("i_only_flag", &i_only_flag), |
11087 | MC_PU32("error_handle_policy", &error_handle_policy), |
11088 | MC_PU32("buf_alloc_width", &buf_alloc_width), |
11089 | MC_PU32("buf_alloc_height", &buf_alloc_height), |
11090 | MC_PU32("buf_alloc_depth", &buf_alloc_depth), |
11091 | MC_PU32("buf_alloc_size", &buf_alloc_size), |
11092 | MC_PU32("buffer_mode", &buffer_mode), |
11093 | MC_PU32("buffer_mode_dbg", &buffer_mode_dbg), |
11094 | MC_PU32("max_buf_num", &max_buf_num), |
11095 | MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin), |
11096 | MC_PU32("mem_map_mode", &mem_map_mode), |
11097 | MC_PU32("double_write_mode", &double_write_mode), |
11098 | MC_PU32("enable_mem_saving", &enable_mem_saving), |
11099 | MC_PU32("force_w_h", &force_w_h), |
11100 | MC_PU32("force_fps", &force_fps), |
11101 | MC_PU32("max_decoding_time", &max_decoding_time), |
11102 | MC_PU32("on_no_keyframe_skiped", &on_no_keyframe_skiped), |
11103 | MC_PU32("start_decode_buf_level", &start_decode_buf_level), |
11104 | MC_PU32("decode_timeout_val", &decode_timeout_val), |
11105 | MC_PU32("vp9_max_pic_w", &vp9_max_pic_w), |
11106 | MC_PU32("vp9_max_pic_h", &vp9_max_pic_h), |
11107 | }; |
11108 | static struct mconfig_node vp9_node; |
11109 | |
11110 | static int __init amvdec_vp9_driver_init_module(void) |
11111 | { |
11112 | |
11113 | struct BuffInfo_s *p_buf_info; |
11114 | |
11115 | if (vdec_is_support_4k()) { |
11116 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) |
11117 | p_buf_info = &amvvp9_workbuff_spec[2]; |
11118 | else |
11119 | p_buf_info = &amvvp9_workbuff_spec[1]; |
11120 | } else |
11121 | p_buf_info = &amvvp9_workbuff_spec[0]; |
11122 | |
11123 | init_buff_spec(NULL, p_buf_info); |
11124 | work_buf_size = |
11125 | (p_buf_info->end_adr - p_buf_info->start_adr |
11126 | + 0xffff) & (~0xffff); |
11127 | |
11128 | pr_debug("amvdec_vp9 module init\n"); |
11129 | |
11130 | error_handle_policy = 0; |
11131 | |
11132 | #ifdef ERROR_HANDLE_DEBUG |
11133 | dbg_nal_skip_flag = 0; |
11134 | dbg_nal_skip_count = 0; |
11135 | #endif |
11136 | udebug_flag = 0; |
11137 | decode_pic_begin = 0; |
11138 | slice_parse_begin = 0; |
11139 | step = 0; |
11140 | buf_alloc_size = 0; |
11141 | #ifdef MULTI_INSTANCE_SUPPORT |
11142 | if (platform_driver_register(&ammvdec_vp9_driver)) |
11143 | pr_err("failed to register ammvdec_vp9 driver\n"); |
11144 | |
11145 | #endif |
11146 | if (platform_driver_register(&amvdec_vp9_driver)) { |
11147 | pr_err("failed to register amvdec_vp9 driver\n"); |
11148 | return -ENODEV; |
11149 | } |
11150 | |
11151 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) { |
11152 | amvdec_vp9_profile.profile = |
11153 | "8k, 10bit, dwrite, compressed"; |
11154 | } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL |
11155 | /*&& get_cpu_major_id() != MESON_CPU_MAJOR_ID_GXLX*/ |
11156 | && get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TXL) { |
11157 | if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX) { |
11158 | if (vdec_is_support_4k()) |
11159 | amvdec_vp9_profile.profile = |
11160 | "4k, 10bit, dwrite, compressed"; |
11161 | else |
11162 | amvdec_vp9_profile.profile = |
11163 | "10bit, dwrite, compressed"; |
11164 | } else { |
11165 | if (vdec_is_support_4k()) |
11166 | amvdec_vp9_profile.profile = |
11167 | "4k, 10bit, dwrite, compressed"; |
11168 | else |
11169 | amvdec_vp9_profile.profile = |
11170 | "10bit, dwrite, compressed"; |
11171 | } |
11172 | |
11173 | } else { |
11174 | amvdec_vp9_profile.name = "vp9_unsupport"; |
11175 | } |
11176 | |
11177 | if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) |
11178 | max_buf_num = MAX_BUF_NUM_LESS; |
11179 | |
11180 | vcodec_profile_register(&amvdec_vp9_profile); |
11181 | amvdec_vp9_profile_mult = amvdec_vp9_profile; |
11182 | amvdec_vp9_profile_mult.name = "mvp9"; |
11183 | vcodec_profile_register(&amvdec_vp9_profile_mult); |
11184 | INIT_REG_NODE_CONFIGS("media.decoder", &vp9_node, |
11185 | "vp9", vp9_configs, CONFIG_FOR_RW); |
11186 | |
11187 | return 0; |
11188 | } |
11189 | |
11190 | static void __exit amvdec_vp9_driver_remove_module(void) |
11191 | { |
11192 | pr_debug("amvdec_vp9 module remove.\n"); |
11193 | #ifdef MULTI_INSTANCE_SUPPORT |
11194 | platform_driver_unregister(&ammvdec_vp9_driver); |
11195 | #endif |
11196 | platform_driver_unregister(&amvdec_vp9_driver); |
11197 | } |
11198 | |
11199 | /****************************************/ |
11200 | |
11201 | module_param(bit_depth_luma, uint, 0664); |
11202 | MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_vp9 bit_depth_luma\n"); |
11203 | |
11204 | module_param(bit_depth_chroma, uint, 0664); |
11205 | MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_vp9 bit_depth_chroma\n"); |
11206 | |
11207 | module_param(frame_width, uint, 0664); |
11208 | MODULE_PARM_DESC(frame_width, "\n amvdec_vp9 frame_width\n"); |
11209 | |
11210 | module_param(frame_height, uint, 0664); |
11211 | MODULE_PARM_DESC(frame_height, "\n amvdec_vp9 frame_height\n"); |
11212 | |
11213 | module_param(debug, uint, 0664); |
11214 | MODULE_PARM_DESC(debug, "\n amvdec_vp9 debug\n"); |
11215 | |
11216 | module_param(radr, uint, 0664); |
11217 | MODULE_PARM_DESC(radr, "\n radr\n"); |
11218 | |
11219 | module_param(rval, uint, 0664); |
11220 | MODULE_PARM_DESC(rval, "\n rval\n"); |
11221 | |
11222 | module_param(pop_shorts, uint, 0664); |
11223 | MODULE_PARM_DESC(pop_shorts, "\n rval\n"); |
11224 | |
11225 | module_param(dbg_cmd, uint, 0664); |
11226 | MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n"); |
11227 | |
11228 | module_param(dbg_skip_decode_index, uint, 0664); |
11229 | MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n"); |
11230 | |
11231 | module_param(endian, uint, 0664); |
11232 | MODULE_PARM_DESC(endian, "\n rval\n"); |
11233 | |
11234 | module_param(step, uint, 0664); |
11235 | MODULE_PARM_DESC(step, "\n amvdec_vp9 step\n"); |
11236 | |
11237 | module_param(decode_pic_begin, uint, 0664); |
11238 | MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_vp9 decode_pic_begin\n"); |
11239 | |
11240 | module_param(slice_parse_begin, uint, 0664); |
11241 | MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_vp9 slice_parse_begin\n"); |
11242 | |
11243 | module_param(i_only_flag, uint, 0664); |
11244 | MODULE_PARM_DESC(i_only_flag, "\n amvdec_vp9 i_only_flag\n"); |
11245 | |
11246 | module_param(low_latency_flag, uint, 0664); |
11247 | MODULE_PARM_DESC(low_latency_flag, "\n amvdec_vp9 low_latency_flag\n"); |
11248 | |
11249 | module_param(no_head, uint, 0664); |
11250 | MODULE_PARM_DESC(no_head, "\n amvdec_vp9 no_head\n"); |
11251 | |
11252 | module_param(error_handle_policy, uint, 0664); |
11253 | MODULE_PARM_DESC(error_handle_policy, "\n amvdec_vp9 error_handle_policy\n"); |
11254 | |
11255 | module_param(buf_alloc_width, uint, 0664); |
11256 | MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n"); |
11257 | |
11258 | module_param(buf_alloc_height, uint, 0664); |
11259 | MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n"); |
11260 | |
11261 | module_param(buf_alloc_depth, uint, 0664); |
11262 | MODULE_PARM_DESC(buf_alloc_depth, "\n buf_alloc_depth\n"); |
11263 | |
11264 | module_param(buf_alloc_size, uint, 0664); |
11265 | MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n"); |
11266 | |
11267 | module_param(buffer_mode, uint, 0664); |
11268 | MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n"); |
11269 | |
11270 | module_param(buffer_mode_dbg, uint, 0664); |
11271 | MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n"); |
11272 | /*USE_BUF_BLOCK*/ |
11273 | module_param(max_buf_num, uint, 0664); |
11274 | MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n"); |
11275 | |
11276 | module_param(dynamic_buf_num_margin, uint, 0664); |
11277 | MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n"); |
11278 | |
11279 | module_param(mv_buf_margin, uint, 0664); |
11280 | MODULE_PARM_DESC(mv_buf_margin, "\n mv_buf_margin\n"); |
11281 | |
11282 | module_param(run_ready_min_buf_num, uint, 0664); |
11283 | MODULE_PARM_DESC(run_ready_min_buf_num, "\n run_ready_min_buf_num\n"); |
11284 | |
11285 | /**/ |
11286 | |
11287 | module_param(mem_map_mode, uint, 0664); |
11288 | MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n"); |
11289 | |
11290 | #ifdef SUPPORT_10BIT |
11291 | module_param(double_write_mode, uint, 0664); |
11292 | MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n"); |
11293 | |
11294 | module_param(enable_mem_saving, uint, 0664); |
11295 | MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n"); |
11296 | |
11297 | module_param(force_w_h, uint, 0664); |
11298 | MODULE_PARM_DESC(force_w_h, "\n force_w_h\n"); |
11299 | #endif |
11300 | |
11301 | module_param(force_fps, uint, 0664); |
11302 | MODULE_PARM_DESC(force_fps, "\n force_fps\n"); |
11303 | |
11304 | module_param(max_decoding_time, uint, 0664); |
11305 | MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n"); |
11306 | |
11307 | module_param(on_no_keyframe_skiped, uint, 0664); |
11308 | MODULE_PARM_DESC(on_no_keyframe_skiped, "\n on_no_keyframe_skiped\n"); |
11309 | |
11310 | module_param(mcrcc_cache_alg_flag, uint, 0664); |
11311 | MODULE_PARM_DESC(mcrcc_cache_alg_flag, "\n mcrcc_cache_alg_flag\n"); |
11312 | |
11313 | #ifdef MULTI_INSTANCE_SUPPORT |
11314 | module_param(start_decode_buf_level, int, 0664); |
11315 | MODULE_PARM_DESC(start_decode_buf_level, |
11316 | "\n vp9 start_decode_buf_level\n"); |
11317 | |
11318 | module_param(decode_timeout_val, uint, 0664); |
11319 | MODULE_PARM_DESC(decode_timeout_val, |
11320 | "\n vp9 decode_timeout_val\n"); |
11321 | |
11322 | module_param(vp9_max_pic_w, uint, 0664); |
11323 | MODULE_PARM_DESC(vp9_max_pic_w, "\n vp9_max_pic_w\n"); |
11324 | |
11325 | module_param(vp9_max_pic_h, uint, 0664); |
11326 | MODULE_PARM_DESC(vp9_max_pic_h, "\n vp9_max_pic_h\n"); |
11327 | |
11328 | module_param_array(decode_frame_count, uint, |
11329 | &max_decode_instance_num, 0664); |
11330 | |
11331 | module_param_array(display_frame_count, uint, |
11332 | &max_decode_instance_num, 0664); |
11333 | |
11334 | module_param_array(max_process_time, uint, |
11335 | &max_decode_instance_num, 0664); |
11336 | |
11337 | module_param_array(run_count, uint, |
11338 | &max_decode_instance_num, 0664); |
11339 | |
11340 | module_param_array(input_empty, uint, |
11341 | &max_decode_instance_num, 0664); |
11342 | |
11343 | module_param_array(not_run_ready, uint, |
11344 | &max_decode_instance_num, 0664); |
11345 | #endif |
11346 | |
11347 | #ifdef SUPPORT_FB_DECODING |
11348 | module_param_array(not_run2_ready, uint, |
11349 | &max_decode_instance_num, 0664); |
11350 | |
11351 | module_param_array(run2_count, uint, |
11352 | &max_decode_instance_num, 0664); |
11353 | |
11354 | module_param(stage_buf_num, uint, 0664); |
11355 | MODULE_PARM_DESC(stage_buf_num, "\n amvdec_h265 stage_buf_num\n"); |
11356 | #endif |
11357 | |
11358 | module_param(udebug_flag, uint, 0664); |
11359 | MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n"); |
11360 | |
11361 | module_param(udebug_pause_pos, uint, 0664); |
11362 | MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n"); |
11363 | |
11364 | module_param(udebug_pause_val, uint, 0664); |
11365 | MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n"); |
11366 | |
11367 | module_param(udebug_pause_decode_idx, uint, 0664); |
11368 | MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n"); |
11369 | |
11370 | module_param(without_display_mode, uint, 0664); |
11371 | MODULE_PARM_DESC(without_display_mode, "\n without_display_mode\n"); |
11372 | |
11373 | module_param(force_config_fence, uint, 0664); |
11374 | MODULE_PARM_DESC(force_config_fence, "\n force enable fence\n"); |
11375 | |
11376 | module_param(force_pts_unstable, uint, 0664); |
11377 | MODULE_PARM_DESC(force_pts_unstable, "\n force_pts_unstable\n"); |
11378 | |
11379 | module_init(amvdec_vp9_driver_init_module); |
11380 | module_exit(amvdec_vp9_driver_remove_module); |
11381 | |
11382 | MODULE_DESCRIPTION("AMLOGIC vp9 Video Decoder Driver"); |
11383 | MODULE_LICENSE("GPL"); |
11384 | |
11385 |