summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 8c4a9ab69fac552248cb79a882ccb31bdb30347a
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52#define CONSTRAIN_MAX_BUF_NUM
53
54#define SWAP_HEVC_UCODE
55#define DETREFILL_ENABLE
56
57#define AGAIN_HAS_THRESHOLD
58/*#define TEST_NO_BUF*/
59#define HEVC_PIC_STRUCT_SUPPORT
60#define MULTI_INSTANCE_SUPPORT
61#define USE_UNINIT_SEMA
62
63 /* .buf_size = 0x100000*16,
64 //4k2k , 0x100000 per buffer */
65 /* 4096x2304 , 0x120000 per buffer */
66#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
67#define MPRED_4K_MV_BUF_SIZE (0x120000)
68#define MPRED_MV_BUF_SIZE (0x40000)
69
70#define MMU_COMPRESS_HEADER_SIZE 0x48000
71#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
72
73#define MAX_FRAME_4K_NUM 0x1200
74#define MAX_FRAME_8K_NUM (0x1200*4)
75
76//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
77#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
78
79#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
80
81#define HEVC_CM_HEADER_START_ADDR 0x3628
82#define HEVC_SAO_MMU_VH1_ADDR 0x363b
83#define HEVC_SAO_MMU_VH0_ADDR 0x363a
84
85#define HEVC_DBLK_CFGB 0x350b
86#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
87#define SWAP_HEVC_OFFSET (3 * 0x1000)
88
89#define MEM_NAME "codec_265"
90/* #include <mach/am_regs.h> */
91#include <linux/amlogic/media/utils/vdec_reg.h>
92
93#include "../utils/vdec.h"
94#include "../utils/amvdec.h"
95#include <linux/amlogic/media/video_sink/video.h>
96#include <linux/amlogic/media/codec_mm/configs.h>
97
98#define SEND_LMEM_WITH_RPM
99#define SUPPORT_10BIT
100/* #define ERROR_HANDLE_DEBUG */
101
102#ifndef STAT_KTHREAD
103#define STAT_KTHREAD 0x40
104#endif
105
106#ifdef MULTI_INSTANCE_SUPPORT
107#define MAX_DECODE_INSTANCE_NUM 9
108#define MULTI_DRIVER_NAME "ammvdec_h265"
109#endif
110#define DRIVER_NAME "amvdec_h265"
111#define MODULE_NAME "amvdec_h265"
112#define DRIVER_HEADER_NAME "amvdec_h265_header"
113
114#define PUT_INTERVAL (HZ/100)
115#define ERROR_SYSTEM_RESET_COUNT 200
116
117#define PTS_NORMAL 0
118#define PTS_NONE_REF_USE_DURATION 1
119
120#define PTS_MODE_SWITCHING_THRESHOLD 3
121#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
122
123#define DUR2PTS(x) ((x)*90/96)
124
125#define MAX_SIZE_8K (8192 * 4608)
126#define MAX_SIZE_4K (4096 * 2304)
127
128#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
129#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
130
131#define SEI_UserDataITU_T_T35 4
132#define INVALID_IDX -1 /* Invalid buffer index.*/
133
134static struct semaphore h265_sema;
135
136struct hevc_state_s;
137static int hevc_print(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int hevc_print_cont(struct hevc_state_s *hevc,
140 int debug_flag, const char *fmt, ...);
141static int vh265_vf_states(struct vframe_states *states, void *);
142static struct vframe_s *vh265_vf_peek(void *);
143static struct vframe_s *vh265_vf_get(void *);
144static void vh265_vf_put(struct vframe_s *, void *);
145static int vh265_event_cb(int type, void *data, void *private_data);
146
147static int vh265_stop(struct hevc_state_s *hevc);
148#ifdef MULTI_INSTANCE_SUPPORT
149static int vmh265_stop(struct hevc_state_s *hevc);
150static s32 vh265_init(struct vdec_s *vdec);
151static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
152static void reset_process_time(struct hevc_state_s *hevc);
153static void start_process_time(struct hevc_state_s *hevc);
154static void restart_process_time(struct hevc_state_s *hevc);
155static void timeout_process(struct hevc_state_s *hevc);
156#else
157static s32 vh265_init(struct hevc_state_s *hevc);
158#endif
159static void vh265_prot_init(struct hevc_state_s *hevc);
160static int vh265_local_init(struct hevc_state_s *hevc);
161static void vh265_check_timer_func(unsigned long arg);
162static void config_decode_mode(struct hevc_state_s *hevc);
163
164static const char vh265_dec_id[] = "vh265-dev";
165
166#define PROVIDER_NAME "decoder.h265"
167#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
168
169static const struct vframe_operations_s vh265_vf_provider = {
170 .peek = vh265_vf_peek,
171 .get = vh265_vf_get,
172 .put = vh265_vf_put,
173 .event_cb = vh265_event_cb,
174 .vf_states = vh265_vf_states,
175};
176
177static struct vframe_provider_s vh265_vf_prov;
178
179static u32 bit_depth_luma;
180static u32 bit_depth_chroma;
181static u32 video_signal_type;
182
183static int start_decode_buf_level = 0x8000;
184
185static unsigned int decode_timeout_val = 200;
186
187static u32 run_ready_min_buf_num = 2;
188
189/*data_resend_policy:
190 bit 0, stream base resend data when decoding buf empty
191*/
192static u32 data_resend_policy = 1;
193
194#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
195/*
196static const char * const video_format_names[] = {
197 "component", "PAL", "NTSC", "SECAM",
198 "MAC", "unspecified", "unspecified", "unspecified"
199};
200
201static const char * const color_primaries_names[] = {
202 "unknown", "bt709", "undef", "unknown",
203 "bt470m", "bt470bg", "smpte170m", "smpte240m",
204 "film", "bt2020"
205};
206
207static const char * const transfer_characteristics_names[] = {
208 "unknown", "bt709", "undef", "unknown",
209 "bt470m", "bt470bg", "smpte170m", "smpte240m",
210 "linear", "log100", "log316", "iec61966-2-4",
211 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
212 "smpte-st-2084", "smpte-st-428"
213};
214
215static const char * const matrix_coeffs_names[] = {
216 "GBR", "bt709", "undef", "unknown",
217 "fcc", "bt470bg", "smpte170m", "smpte240m",
218 "YCgCo", "bt2020nc", "bt2020c"
219};
220*/
221#ifdef SUPPORT_10BIT
222#define HEVC_CM_BODY_START_ADDR 0x3626
223#define HEVC_CM_BODY_LENGTH 0x3627
224#define HEVC_CM_HEADER_LENGTH 0x3629
225#define HEVC_CM_HEADER_OFFSET 0x362b
226#define HEVC_SAO_CTRL9 0x362d
227#define LOSLESS_COMPRESS_MODE
228/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
229/* double_write_mode:
230 * 0, no double write;
231 * 1, 1:1 ratio;
232 * 2, (1/4):(1/4) ratio;
233 * 3, (1/4):(1/4) ratio, with both compressed frame included
234 * 4, (1/2):(1/2) ratio;
235 * 0x10, double write only
236 * 0x100, if > 1080p,use mode 4,else use mode 1;
237 * 0x200, if > 1080p,use mode 2,else use mode 1;
238 * 0x300, if > 720p, use mode 4, else use mode 1;
239 */
240static u32 double_write_mode;
241
242/*#define DECOMP_HEADR_SURGENT*/
243
244static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
245static u32 enable_mem_saving = 1;
246static u32 workaround_enable;
247static u32 force_w_h;
248#endif
249static u32 force_fps;
250static u32 pts_unstable;
251#define H265_DEBUG_BUFMGR 0x01
252#define H265_DEBUG_BUFMGR_MORE 0x02
253#define H265_DEBUG_DETAIL 0x04
254#define H265_DEBUG_REG 0x08
255#define H265_DEBUG_MAN_SEARCH_NAL 0x10
256#define H265_DEBUG_MAN_SKIP_NAL 0x20
257#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
258#define H265_DEBUG_FORCE_CLK 0x80
259#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
260#define H265_DEBUG_NO_DISPLAY 0x200
261#define H265_DEBUG_DISCARD_NAL 0x400
262#define H265_DEBUG_OUT_PTS 0x800
263#define H265_DEBUG_DUMP_PIC_LIST 0x1000
264#define H265_DEBUG_PRINT_SEI 0x2000
265#define H265_DEBUG_PIC_STRUCT 0x4000
266#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
267#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
268#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
269#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
270#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
271#define H265_DEBUG_HW_RESET 0x100000
272#define H265_CFG_CANVAS_IN_DECODE 0x200000
273#define H265_DEBUG_DV 0x400000
274#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
275#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
276#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
277#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
278#ifdef MULTI_INSTANCE_SUPPORT
279#define PRINT_FLAG_ERROR 0x0
280#define IGNORE_PARAM_FROM_CONFIG 0x08000000
281#define PRINT_FRAMEBASE_DATA 0x10000000
282#define PRINT_FLAG_VDEC_STATUS 0x20000000
283#define PRINT_FLAG_VDEC_DETAIL 0x40000000
284#define PRINT_FLAG_V4L_DETAIL 0x80000000
285#endif
286
287#define BUF_POOL_SIZE 32
288#define MAX_BUF_NUM 24
289#define MAX_REF_PIC_NUM 24
290#define MAX_REF_ACTIVE 16
291
292#ifdef MV_USE_FIXED_BUF
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#else
297#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
298#define VF_BUFFER_IDX(n) (n)
299#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
300#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
301#endif
302
303#define HEVC_MV_INFO 0x310d
304#define HEVC_QP_INFO 0x3137
305#define HEVC_SKIP_INFO 0x3136
306
307const u32 h265_version = 201602101;
308static u32 debug_mask = 0xffffffff;
309static u32 log_mask;
310static u32 debug;
311static u32 radr;
312static u32 rval;
313static u32 dbg_cmd;
314static u32 dump_nal;
315static u32 dbg_skip_decode_index;
316static u32 endian = 0xff0;
317#ifdef ERROR_HANDLE_DEBUG
318static u32 dbg_nal_skip_flag;
319 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
320static u32 dbg_nal_skip_count;
321#endif
322/*for debug*/
323/*
324 udebug_flag:
325 bit 0, enable ucode print
326 bit 1, enable ucode detail print
327 bit [31:16] not 0, pos to dump lmem
328 bit 2, pop bits to lmem
329 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
330*/
331static u32 udebug_flag;
332/*
333 when udebug_flag[1:0] is not 0
334 udebug_pause_pos not 0,
335 pause position
336*/
337static u32 udebug_pause_pos;
338/*
339 when udebug_flag[1:0] is not 0
340 and udebug_pause_pos is not 0,
341 pause only when DEBUG_REG2 is equal to this val
342*/
343static u32 udebug_pause_val;
344
345static u32 udebug_pause_decode_idx;
346
347static u32 decode_pic_begin;
348static uint slice_parse_begin;
349static u32 step;
350static bool is_reset;
351
352#ifdef CONSTRAIN_MAX_BUF_NUM
353static u32 run_ready_max_vf_only_num;
354static u32 run_ready_display_q_num;
355 /*0: not check
356 0xff: work_pic_num
357 */
358static u32 run_ready_max_buf_num = 0xff;
359#endif
360
361static u32 dynamic_buf_num_margin = 7;
362static u32 buf_alloc_width;
363static u32 buf_alloc_height;
364
365static u32 max_buf_num = 16;
366static u32 buf_alloc_size;
367/*static u32 re_config_pic_flag;*/
368/*
369 *bit[0]: 0,
370 *bit[1]: 0, always release cma buffer when stop
371 *bit[1]: 1, never release cma buffer when stop
372 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
373 *do not release cma buffer is blackout is not 1
374 *
375 *bit[2]: 0, when start decoding, check current displayed buffer
376 * (only for buffer decoded by h265) if blackout is 0
377 * 1, do not check current displayed buffer
378 *
379 *bit[3]: 1, if blackout is not 1, do not release current
380 * displayed cma buffer always.
381 */
382/* set to 1 for fast play;
383 * set to 8 for other case of "keep last frame"
384 */
385static u32 buffer_mode = 1;
386
387/* buffer_mode_dbg: debug only*/
388static u32 buffer_mode_dbg = 0xffff0000;
389/**/
390/*
391 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
392 *1, start decoding after first I;
393 *2, only decode and display none error picture;
394 *3, start decoding and display after IDR,etc
395 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
396 *only for mode 0 and 1.
397 */
398static u32 nal_skip_policy = 2;
399
400/*
401 *bit 0, 1: only display I picture;
402 *bit 1, 1: only decode I picture;
403 */
404static u32 i_only_flag;
405
406/*
407bit 0, fast output first I picture
408*/
409static u32 fast_output_enable = 1;
410
411static u32 frmbase_cont_bitlevel = 0x60;
412
413/*
414use_cma: 1, use both reserver memory and cma for buffers
4152, only use cma for buffers
416*/
417static u32 use_cma = 2;
418
419#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
420static u32 prefix_aux_buf_size = (16 * 1024);
421static u32 suffix_aux_buf_size;
422
423static u32 max_decoding_time;
424/*
425 *error handling
426 */
427/*error_handle_policy:
428 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
429 *1, skip error_skip_nal_count nals before error recovery;
430 *bit 1 (valid only when bit0 == 1):
431 *1, wait vps/sps/pps after error recovery;
432 *bit 2 (valid only when bit0 == 0):
433 *0, auto search after error recovery (hevc_recover() called);
434 *1, manual search after error recovery
435 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
436 *
437 *bit 4: 0, set error_mark after reset/recover
438 * 1, do not set error_mark after reset/recover
439 *bit 5: 0, check total lcu for every picture
440 * 1, do not check total lcu
441 *bit 6: 0, do not check head error
442 * 1, check head error
443 *
444 */
445
446static u32 error_handle_policy;
447static u32 error_skip_nal_count = 6;
448static u32 error_handle_threshold = 30;
449static u32 error_handle_nal_skip_threshold = 10;
450static u32 error_handle_system_threshold = 30;
451static u32 interlace_enable = 1;
452static u32 fr_hint_status;
453
454 /*
455 *parser_sei_enable:
456 * bit 0, sei;
457 * bit 1, sei_suffix (fill aux buf)
458 * bit 2, fill sei to aux buf (when bit 0 is 1)
459 * bit 8, debug flag
460 */
461static u32 parser_sei_enable;
462#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
463static u32 parser_dolby_vision_enable = 1;
464static u32 dolby_meta_with_el;
465static u32 dolby_el_flush_th = 2;
466#endif
467/* this is only for h265 mmu enable */
468
469static u32 mmu_enable = 1;
470static u32 mmu_enable_force;
471static u32 work_buf_size;
472static unsigned int force_disp_pic_index;
473static unsigned int disp_vframe_valve_level;
474static int pre_decode_buf_level = 0x1000;
475static unsigned int pic_list_debug;
476
477
478#ifdef MULTI_INSTANCE_SUPPORT
479static unsigned int max_decode_instance_num
480 = MAX_DECODE_INSTANCE_NUM;
481static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
484static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
485static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
486static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
487static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
488static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
489{1, 1, 1, 1, 1, 1, 1, 1, 1};
490
491#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
492static unsigned char get_idx(struct hevc_state_s *hevc);
493#endif
494
495#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
496static u32 dv_toggle_prov_name;
497
498static u32 dv_debug;
499
500static u32 force_bypass_dvenl;
501#endif
502#endif
503
504
505#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
506#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
507#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
508#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
509#else
510#define get_dbg_flag(hevc) debug
511#define get_dbg_flag2(hevc) debug
512#define is_log_enable(hevc) (log_mask ? 1 : 0)
513#define get_valid_double_write_mode(hevc) double_write_mode
514#define get_buf_alloc_width(hevc) buf_alloc_width
515#define get_buf_alloc_height(hevc) buf_alloc_height
516#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
517#endif
518#define get_buffer_mode(hevc) buffer_mode
519
520
521DEFINE_SPINLOCK(lock);
522struct task_struct *h265_task = NULL;
523#undef DEBUG_REG
524#ifdef DEBUG_REG
525void WRITE_VREG_DBG(unsigned adr, unsigned val)
526{
527 if (debug & H265_DEBUG_REG)
528 pr_info("%s(%x, %x)\n", __func__, adr, val);
529 WRITE_VREG(adr, val);
530}
531
532#undef WRITE_VREG
533#define WRITE_VREG WRITE_VREG_DBG
534#endif
535
536static DEFINE_MUTEX(vh265_mutex);
537
538static DEFINE_MUTEX(vh265_log_mutex);
539
540static struct vdec_info *gvs;
541
542static u32 without_display_mode;
543
544/**************************************************
545 *
546 *h265 buffer management include
547 *
548 ***************************************************
549 */
550enum NalUnitType {
551 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
552 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
553
554 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
555 /* Current name in the spec: TSA_R */
556 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
557
558 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
559 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
560
561 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
562 /* Current name in the spec: RADL_R */
563 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
564
565 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
566 /* Current name in the spec: RASL_R */
567 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
568
569 NAL_UNIT_RESERVED_10,
570 NAL_UNIT_RESERVED_11,
571 NAL_UNIT_RESERVED_12,
572 NAL_UNIT_RESERVED_13,
573 NAL_UNIT_RESERVED_14,
574 NAL_UNIT_RESERVED_15,
575
576 /* Current name in the spec: BLA_W_LP */
577 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
578 /* Current name in the spec: BLA_W_DLP */
579 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
580 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
581 /* Current name in the spec: IDR_W_DLP */
582 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
583 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
584 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
585 NAL_UNIT_RESERVED_22,
586 NAL_UNIT_RESERVED_23,
587
588 NAL_UNIT_RESERVED_24,
589 NAL_UNIT_RESERVED_25,
590 NAL_UNIT_RESERVED_26,
591 NAL_UNIT_RESERVED_27,
592 NAL_UNIT_RESERVED_28,
593 NAL_UNIT_RESERVED_29,
594 NAL_UNIT_RESERVED_30,
595 NAL_UNIT_RESERVED_31,
596
597 NAL_UNIT_VPS, /* 32 */
598 NAL_UNIT_SPS, /* 33 */
599 NAL_UNIT_PPS, /* 34 */
600 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
601 NAL_UNIT_EOS, /* 36 */
602 NAL_UNIT_EOB, /* 37 */
603 NAL_UNIT_FILLER_DATA, /* 38 */
604 NAL_UNIT_SEI, /* 39 Prefix SEI */
605 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
606 NAL_UNIT_RESERVED_41,
607 NAL_UNIT_RESERVED_42,
608 NAL_UNIT_RESERVED_43,
609 NAL_UNIT_RESERVED_44,
610 NAL_UNIT_RESERVED_45,
611 NAL_UNIT_RESERVED_46,
612 NAL_UNIT_RESERVED_47,
613 NAL_UNIT_UNSPECIFIED_48,
614 NAL_UNIT_UNSPECIFIED_49,
615 NAL_UNIT_UNSPECIFIED_50,
616 NAL_UNIT_UNSPECIFIED_51,
617 NAL_UNIT_UNSPECIFIED_52,
618 NAL_UNIT_UNSPECIFIED_53,
619 NAL_UNIT_UNSPECIFIED_54,
620 NAL_UNIT_UNSPECIFIED_55,
621 NAL_UNIT_UNSPECIFIED_56,
622 NAL_UNIT_UNSPECIFIED_57,
623 NAL_UNIT_UNSPECIFIED_58,
624 NAL_UNIT_UNSPECIFIED_59,
625 NAL_UNIT_UNSPECIFIED_60,
626 NAL_UNIT_UNSPECIFIED_61,
627 NAL_UNIT_UNSPECIFIED_62,
628 NAL_UNIT_UNSPECIFIED_63,
629 NAL_UNIT_INVALID,
630};
631
632/* --------------------------------------------------- */
633/* Amrisc Software Interrupt */
634/* --------------------------------------------------- */
635#define AMRISC_STREAM_EMPTY_REQ 0x01
636#define AMRISC_PARSER_REQ 0x02
637#define AMRISC_MAIN_REQ 0x04
638
639/* --------------------------------------------------- */
640/* HEVC_DEC_STATUS define */
641/* --------------------------------------------------- */
642#define HEVC_DEC_IDLE 0x0
643#define HEVC_NAL_UNIT_VPS 0x1
644#define HEVC_NAL_UNIT_SPS 0x2
645#define HEVC_NAL_UNIT_PPS 0x3
646#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
647#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
648#define HEVC_SLICE_DECODING 0x6
649#define HEVC_NAL_UNIT_SEI 0x7
650#define HEVC_SLICE_SEGMENT_DONE 0x8
651#define HEVC_NAL_SEARCH_DONE 0x9
652#define HEVC_DECPIC_DATA_DONE 0xa
653#define HEVC_DECPIC_DATA_ERROR 0xb
654#define HEVC_SEI_DAT 0xc
655#define HEVC_SEI_DAT_DONE 0xd
656#define HEVC_NAL_DECODE_DONE 0xe
657#define HEVC_OVER_DECODE 0xf
658
659#define HEVC_DATA_REQUEST 0x12
660
661#define HEVC_DECODE_BUFEMPTY 0x20
662#define HEVC_DECODE_TIMEOUT 0x21
663#define HEVC_SEARCH_BUFEMPTY 0x22
664#define HEVC_DECODE_OVER_SIZE 0x23
665#define HEVC_DECODE_BUFEMPTY2 0x24
666#define HEVC_FIND_NEXT_PIC_NAL 0x50
667#define HEVC_FIND_NEXT_DVEL_NAL 0x51
668
669#define HEVC_DUMP_LMEM 0x30
670
671#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
672#define HEVC_DISCARD_NAL 0xf0
673#define HEVC_ACTION_DEC_CONT 0xfd
674#define HEVC_ACTION_ERROR 0xfe
675#define HEVC_ACTION_DONE 0xff
676
677/* --------------------------------------------------- */
678/* Include "parser_cmd.h" */
679/* --------------------------------------------------- */
680#define PARSER_CMD_SKIP_CFG_0 0x0000090b
681
682#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
683
684#define PARSER_CMD_SKIP_CFG_2 0x001b1910
685
686#define PARSER_CMD_NUMBER 37
687
688/**************************************************
689 *
690 *h265 buffer management
691 *
692 ***************************************************
693 */
694/* #define BUFFER_MGR_ONLY */
695/* #define CONFIG_HEVC_CLK_FORCED_ON */
696/* #define ENABLE_SWAP_TEST */
697#define MCRCC_ENABLE
698#define INVALID_POC 0x80000000
699
700#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
701#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
702#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
703#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
704#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
705#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
706#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
707#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
708#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
709#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
710#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
711#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
712#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
713#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
714#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
715#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
716#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
717#ifdef ENABLE_SWAP_TEST
718#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
719#endif
720
721/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
722/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
723#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
724 /*do not define ENABLE_SWAP_TEST*/
725#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
726#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
727
728#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
729#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
730/*
731 *ucode parser/search control
732 *bit 0: 0, header auto parse; 1, header manual parse
733 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
734 *bit [3:2]: valid when bit1==0;
735 *0, auto skip nal before first vps/sps/pps/idr;
736 *1, auto skip nal before first vps/sps/pps
737 *2, auto skip nal before first vps/sps/pps,
738 * and not decode until the first I slice (with slice address of 0)
739 *
740 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
741 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
742 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
743 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
744 *bit [17]: for NAL_SEI when bit0 is 0:
745 * 0, do not parse/fetch SEI in ucode;
746 * 1, parse/fetch SEI in ucode
747 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
748 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
749 * 1, fetch NAL_SEL_SUFFIX data to aux buf
750 *bit [19]:
751 * 0, parse NAL_SEI in ucode
752 * 1, fetch NAL_SEI to aux buf
753 *bit [20]: for DOLBY_VISION_META
754 * 0, do not fetch DOLBY_VISION_META to aux buf
755 * 1, fetch DOLBY_VISION_META to aux buf
756 */
757#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
758 /*read only*/
759#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
760 /*
761 [15 : 8] rps_set_id
762 [7 : 0] start_decoding_flag
763 */
764#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
765 /*set before start decoder*/
766#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
767#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
768#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
769
770#define DECODE_MODE_SINGLE 0x0
771#define DECODE_MODE_MULTI_FRAMEBASE 0x1
772#define DECODE_MODE_MULTI_STREAMBASE 0x2
773#define DECODE_MODE_MULTI_DVBAL 0x3
774#define DECODE_MODE_MULTI_DVENL 0x4
775
776#define MAX_INT 0x7FFFFFFF
777
778#define RPM_BEGIN 0x100
779#define modification_list_cur 0x148
780#define RPM_END 0x180
781
782#define RPS_USED_BIT 14
783/* MISC_FLAG0 */
784#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
785#define PCM_ENABLE_FLAG_BIT 1
786#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
787#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
788#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
789#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
790#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
791#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
792#define SLICE_SAO_LUMA_FLAG_BIT 8
793#define SLICE_SAO_CHROMA_FLAG_BIT 9
794#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
795
796union param_u {
797 struct {
798 unsigned short data[RPM_END - RPM_BEGIN];
799 } l;
800 struct {
801 /* from ucode lmem, do not change this struct */
802 unsigned short CUR_RPS[0x10];
803 unsigned short num_ref_idx_l0_active;
804 unsigned short num_ref_idx_l1_active;
805 unsigned short slice_type;
806 unsigned short slice_temporal_mvp_enable_flag;
807 unsigned short dependent_slice_segment_flag;
808 unsigned short slice_segment_address;
809 unsigned short num_title_rows_minus1;
810 unsigned short pic_width_in_luma_samples;
811 unsigned short pic_height_in_luma_samples;
812 unsigned short log2_min_coding_block_size_minus3;
813 unsigned short log2_diff_max_min_coding_block_size;
814 unsigned short log2_max_pic_order_cnt_lsb_minus4;
815 unsigned short POClsb;
816 unsigned short collocated_from_l0_flag;
817 unsigned short collocated_ref_idx;
818 unsigned short log2_parallel_merge_level;
819 unsigned short five_minus_max_num_merge_cand;
820 unsigned short sps_num_reorder_pics_0;
821 unsigned short modification_flag;
822 unsigned short tiles_enabled_flag;
823 unsigned short num_tile_columns_minus1;
824 unsigned short num_tile_rows_minus1;
825 unsigned short tile_width[8];
826 unsigned short tile_height[8];
827 unsigned short misc_flag0;
828 unsigned short pps_beta_offset_div2;
829 unsigned short pps_tc_offset_div2;
830 unsigned short slice_beta_offset_div2;
831 unsigned short slice_tc_offset_div2;
832 unsigned short pps_cb_qp_offset;
833 unsigned short pps_cr_qp_offset;
834 unsigned short first_slice_segment_in_pic_flag;
835 unsigned short m_temporalId;
836 unsigned short m_nalUnitType;
837
838 unsigned short vui_num_units_in_tick_hi;
839 unsigned short vui_num_units_in_tick_lo;
840 unsigned short vui_time_scale_hi;
841 unsigned short vui_time_scale_lo;
842 unsigned short bit_depth;
843 unsigned short profile_etc;
844 unsigned short sei_frame_field_info;
845 unsigned short video_signal_type;
846 unsigned short modification_list[0x20];
847 unsigned short conformance_window_flag;
848 unsigned short conf_win_left_offset;
849 unsigned short conf_win_right_offset;
850 unsigned short conf_win_top_offset;
851 unsigned short conf_win_bottom_offset;
852 unsigned short chroma_format_idc;
853 unsigned short color_description;
854 unsigned short aspect_ratio_idc;
855 unsigned short sar_width;
856 unsigned short sar_height;
857 unsigned short sps_max_dec_pic_buffering_minus1_0;
858 } p;
859};
860
861#define RPM_BUF_SIZE (0x80*2)
862/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
863#define LMEM_BUF_SIZE (0x500 * 2)
864
865struct buff_s {
866 u32 buf_start;
867 u32 buf_size;
868 u32 buf_end;
869};
870
871struct BuffInfo_s {
872 u32 max_width;
873 u32 max_height;
874 unsigned int start_adr;
875 unsigned int end_adr;
876 struct buff_s ipp;
877 struct buff_s sao_abv;
878 struct buff_s sao_vb;
879 struct buff_s short_term_rps;
880 struct buff_s vps;
881 struct buff_s sps;
882 struct buff_s pps;
883 struct buff_s sao_up;
884 struct buff_s swap_buf;
885 struct buff_s swap_buf2;
886 struct buff_s scalelut;
887 struct buff_s dblk_para;
888 struct buff_s dblk_data;
889 struct buff_s dblk_data2;
890 struct buff_s mmu_vbh;
891 struct buff_s cm_header;
892 struct buff_s mpred_above;
893#ifdef MV_USE_FIXED_BUF
894 struct buff_s mpred_mv;
895#endif
896 struct buff_s rpm;
897 struct buff_s lmem;
898};
899#define WORK_BUF_SPEC_NUM 3
900static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
901 {
902 /* 8M bytes */
903 .max_width = 1920,
904 .max_height = 1088,
905 .ipp = {
906 /* IPP work space calculation :
907 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
908 */
909 .buf_size = 0x4000,
910 },
911 .sao_abv = {
912 .buf_size = 0x30000,
913 },
914 .sao_vb = {
915 .buf_size = 0x30000,
916 },
917 .short_term_rps = {
918 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
919 * total 64x16x2 = 2048 bytes (0x800)
920 */
921 .buf_size = 0x800,
922 },
923 .vps = {
924 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
925 * total 0x0800 bytes
926 */
927 .buf_size = 0x800,
928 },
929 .sps = {
930 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
931 * total 0x0800 bytes
932 */
933 .buf_size = 0x800,
934 },
935 .pps = {
936 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
937 * total 0x2000 bytes
938 */
939 .buf_size = 0x2000,
940 },
941 .sao_up = {
942 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
943 * each has 16 bytes total 0x2800 bytes
944 */
945 .buf_size = 0x2800,
946 },
947 .swap_buf = {
948 /* 256cyclex64bit = 2K bytes 0x800
949 * (only 144 cycles valid)
950 */
951 .buf_size = 0x800,
952 },
953 .swap_buf2 = {
954 .buf_size = 0x800,
955 },
956 .scalelut = {
957 /* support up to 32 SCALELUT 1024x32 =
958 * 32Kbytes (0x8000)
959 */
960 .buf_size = 0x8000,
961 },
962 .dblk_para = {
963#ifdef SUPPORT_10BIT
964 .buf_size = 0x40000,
965#else
966 /* DBLK -> Max 256(4096/16) LCU, each para
967 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
968 */
969 .buf_size = 0x20000,
970#endif
971 },
972 .dblk_data = {
973 .buf_size = 0x40000,
974 },
975 .dblk_data2 = {
976 .buf_size = 0x40000,
977 }, /*dblk data for adapter*/
978 .mmu_vbh = {
979 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
980 },
981#if 0
982 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
983 .buf_size = MMU_COMPRESS_HEADER_SIZE *
984 (MAX_REF_PIC_NUM + 1),
985 },
986#endif
987 .mpred_above = {
988 .buf_size = 0x8000,
989 },
990#ifdef MV_USE_FIXED_BUF
991 .mpred_mv = {/* 1080p, 0x40000 per buffer */
992 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
993 },
994#endif
995 .rpm = {
996 .buf_size = RPM_BUF_SIZE,
997 },
998 .lmem = {
999 .buf_size = 0x500 * 2,
1000 }
1001 },
1002 {
1003 .max_width = 4096,
1004 .max_height = 2048,
1005 .ipp = {
1006 /* IPP work space calculation :
1007 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1008 */
1009 .buf_size = 0x4000,
1010 },
1011 .sao_abv = {
1012 .buf_size = 0x30000,
1013 },
1014 .sao_vb = {
1015 .buf_size = 0x30000,
1016 },
1017 .short_term_rps = {
1018 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1019 * total 64x16x2 = 2048 bytes (0x800)
1020 */
1021 .buf_size = 0x800,
1022 },
1023 .vps = {
1024 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1025 * total 0x0800 bytes
1026 */
1027 .buf_size = 0x800,
1028 },
1029 .sps = {
1030 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1031 * total 0x0800 bytes
1032 */
1033 .buf_size = 0x800,
1034 },
1035 .pps = {
1036 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1037 * total 0x2000 bytes
1038 */
1039 .buf_size = 0x2000,
1040 },
1041 .sao_up = {
1042 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1043 * each has 16 bytes total 0x2800 bytes
1044 */
1045 .buf_size = 0x2800,
1046 },
1047 .swap_buf = {
1048 /* 256cyclex64bit = 2K bytes 0x800
1049 * (only 144 cycles valid)
1050 */
1051 .buf_size = 0x800,
1052 },
1053 .swap_buf2 = {
1054 .buf_size = 0x800,
1055 },
1056 .scalelut = {
1057 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1058 * (0x8000)
1059 */
1060 .buf_size = 0x8000,
1061 },
1062 .dblk_para = {
1063 /* DBLK -> Max 256(4096/16) LCU, each para
1064 * 512bytes(total:0x20000),
1065 * data 1024bytes(total:0x40000)
1066 */
1067 .buf_size = 0x20000,
1068 },
1069 .dblk_data = {
1070 .buf_size = 0x80000,
1071 },
1072 .dblk_data2 = {
1073 .buf_size = 0x80000,
1074 }, /*dblk data for adapter*/
1075 .mmu_vbh = {
1076 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1077 },
1078#if 0
1079 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1080 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1081 (MAX_REF_PIC_NUM + 1),
1082 },
1083#endif
1084 .mpred_above = {
1085 .buf_size = 0x8000,
1086 },
1087#ifdef MV_USE_FIXED_BUF
1088 .mpred_mv = {
1089 /* .buf_size = 0x100000*16,
1090 //4k2k , 0x100000 per buffer */
1091 /* 4096x2304 , 0x120000 per buffer */
1092 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1093 },
1094#endif
1095 .rpm = {
1096 .buf_size = RPM_BUF_SIZE,
1097 },
1098 .lmem = {
1099 .buf_size = 0x500 * 2,
1100 }
1101 },
1102
1103 {
1104 .max_width = 4096*2,
1105 .max_height = 2048*2,
1106 .ipp = {
1107 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1108 .buf_size = 0x4000*2,
1109 },
1110 .sao_abv = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .sao_vb = {
1114 .buf_size = 0x30000*2,
1115 },
1116 .short_term_rps = {
1117 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1118 .buf_size = 0x800,
1119 },
1120 .vps = {
1121 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .sps = {
1125 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1126 .buf_size = 0x800,
1127 },
1128 .pps = {
1129 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1130 .buf_size = 0x2000,
1131 },
1132 .sao_up = {
1133 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1134 .buf_size = 0x2800*2,
1135 },
1136 .swap_buf = {
1137 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1138 .buf_size = 0x800,
1139 },
1140 .swap_buf2 = {
1141 .buf_size = 0x800,
1142 },
1143 .scalelut = {
1144 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1145 .buf_size = 0x8000*2,
1146 },
1147 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1148 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1149 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1150 .mmu_vbh = {
1151 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1152 },
1153#if 0
1154 .cm_header = {
1155 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1156 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1157 },
1158#endif
1159 .mpred_above = {
1160 .buf_size = 0x8000*2,
1161 },
1162#ifdef MV_USE_FIXED_BUF
1163 .mpred_mv = {
1164 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1165 },
1166#endif
1167 .rpm = {
1168 .buf_size = RPM_BUF_SIZE,
1169 },
1170 .lmem = {
1171 .buf_size = 0x500 * 2,
1172 },
1173 }
1174};
1175
1176static void init_buff_spec(struct hevc_state_s *hevc,
1177 struct BuffInfo_s *buf_spec)
1178{
1179 buf_spec->ipp.buf_start = buf_spec->start_adr;
1180 buf_spec->sao_abv.buf_start =
1181 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1182
1183 buf_spec->sao_vb.buf_start =
1184 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1185 buf_spec->short_term_rps.buf_start =
1186 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1187 buf_spec->vps.buf_start =
1188 buf_spec->short_term_rps.buf_start +
1189 buf_spec->short_term_rps.buf_size;
1190 buf_spec->sps.buf_start =
1191 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1192 buf_spec->pps.buf_start =
1193 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1194 buf_spec->sao_up.buf_start =
1195 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1196 buf_spec->swap_buf.buf_start =
1197 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1198 buf_spec->swap_buf2.buf_start =
1199 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1200 buf_spec->scalelut.buf_start =
1201 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1202 buf_spec->dblk_para.buf_start =
1203 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1204 buf_spec->dblk_data.buf_start =
1205 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1206 buf_spec->dblk_data2.buf_start =
1207 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1208 buf_spec->mmu_vbh.buf_start =
1209 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1210 buf_spec->mpred_above.buf_start =
1211 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1212#ifdef MV_USE_FIXED_BUF
1213 buf_spec->mpred_mv.buf_start =
1214 buf_spec->mpred_above.buf_start +
1215 buf_spec->mpred_above.buf_size;
1216
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_mv.buf_start +
1219 buf_spec->mpred_mv.buf_size;
1220#else
1221 buf_spec->rpm.buf_start =
1222 buf_spec->mpred_above.buf_start +
1223 buf_spec->mpred_above.buf_size;
1224#endif
1225 buf_spec->lmem.buf_start =
1226 buf_spec->rpm.buf_start +
1227 buf_spec->rpm.buf_size;
1228 buf_spec->end_adr =
1229 buf_spec->lmem.buf_start +
1230 buf_spec->lmem.buf_size;
1231
1232 if (hevc && get_dbg_flag2(hevc)) {
1233 hevc_print(hevc, 0,
1234 "%s workspace (%x %x) size = %x\n", __func__,
1235 buf_spec->start_adr, buf_spec->end_adr,
1236 buf_spec->end_adr - buf_spec->start_adr);
1237
1238 hevc_print(hevc, 0,
1239 "ipp.buf_start :%x\n",
1240 buf_spec->ipp.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_abv.buf_start :%x\n",
1243 buf_spec->sao_abv.buf_start);
1244 hevc_print(hevc, 0,
1245 "sao_vb.buf_start :%x\n",
1246 buf_spec->sao_vb.buf_start);
1247 hevc_print(hevc, 0,
1248 "short_term_rps.buf_start :%x\n",
1249 buf_spec->short_term_rps.buf_start);
1250 hevc_print(hevc, 0,
1251 "vps.buf_start :%x\n",
1252 buf_spec->vps.buf_start);
1253 hevc_print(hevc, 0,
1254 "sps.buf_start :%x\n",
1255 buf_spec->sps.buf_start);
1256 hevc_print(hevc, 0,
1257 "pps.buf_start :%x\n",
1258 buf_spec->pps.buf_start);
1259 hevc_print(hevc, 0,
1260 "sao_up.buf_start :%x\n",
1261 buf_spec->sao_up.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf.buf_start :%x\n",
1264 buf_spec->swap_buf.buf_start);
1265 hevc_print(hevc, 0,
1266 "swap_buf2.buf_start :%x\n",
1267 buf_spec->swap_buf2.buf_start);
1268 hevc_print(hevc, 0,
1269 "scalelut.buf_start :%x\n",
1270 buf_spec->scalelut.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_para.buf_start :%x\n",
1273 buf_spec->dblk_para.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data.buf_start :%x\n",
1276 buf_spec->dblk_data.buf_start);
1277 hevc_print(hevc, 0,
1278 "dblk_data2.buf_start :%x\n",
1279 buf_spec->dblk_data2.buf_start);
1280 hevc_print(hevc, 0,
1281 "mpred_above.buf_start :%x\n",
1282 buf_spec->mpred_above.buf_start);
1283#ifdef MV_USE_FIXED_BUF
1284 hevc_print(hevc, 0,
1285 "mpred_mv.buf_start :%x\n",
1286 buf_spec->mpred_mv.buf_start);
1287#endif
1288 if ((get_dbg_flag2(hevc)
1289 &
1290 H265_DEBUG_SEND_PARAM_WITH_REG)
1291 == 0) {
1292 hevc_print(hevc, 0,
1293 "rpm.buf_start :%x\n",
1294 buf_spec->rpm.buf_start);
1295 }
1296 }
1297
1298}
1299
1300enum SliceType {
1301 B_SLICE,
1302 P_SLICE,
1303 I_SLICE
1304};
1305
1306/*USE_BUF_BLOCK*/
1307struct BUF_s {
1308 ulong start_adr;
1309 u32 size;
1310 u32 luma_size;
1311 ulong header_addr;
1312 u32 header_size;
1313 int used_flag;
1314 ulong v4l_ref_buf_addr;
1315} /*BUF_t */;
1316
1317/* level 6, 6.1 maximum slice number is 800; other is 200 */
1318#define MAX_SLICE_NUM 800
1319struct PIC_s {
1320 int index;
1321 int scatter_alloc;
1322 int BUF_index;
1323 int mv_buf_index;
1324 int POC;
1325 int decode_idx;
1326 int slice_type;
1327 int RefNum_L0;
1328 int RefNum_L1;
1329 int num_reorder_pic;
1330 int stream_offset;
1331 unsigned char referenced;
1332 unsigned char output_mark;
1333 unsigned char recon_mark;
1334 unsigned char output_ready;
1335 unsigned char error_mark;
1336 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1337 unsigned char dis_mark;
1338 /**/ int slice_idx;
1339 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1340 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1341 /*buffer */
1342 unsigned int header_adr;
1343#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1344 unsigned char dv_enhance_exist;
1345#endif
1346 char *aux_data_buf;
1347 int aux_data_size;
1348 unsigned long cma_alloc_addr;
1349 struct page *alloc_pages;
1350 unsigned int mpred_mv_wr_start_addr;
1351 unsigned int mc_y_adr;
1352 unsigned int mc_u_v_adr;
1353#ifdef SUPPORT_10BIT
1354 /*unsigned int comp_body_size;*/
1355 unsigned int dw_y_adr;
1356 unsigned int dw_u_v_adr;
1357#endif
1358 int mc_canvas_y;
1359 int mc_canvas_u_v;
1360 int width;
1361 int height;
1362
1363 int y_canvas_index;
1364 int uv_canvas_index;
1365#ifdef MULTI_INSTANCE_SUPPORT
1366 struct canvas_config_s canvas_config[2];
1367#endif
1368#ifdef SUPPORT_10BIT
1369 int mem_saving_mode;
1370 u32 bit_depth_luma;
1371 u32 bit_depth_chroma;
1372#endif
1373#ifdef LOSLESS_COMPRESS_MODE
1374 unsigned int losless_comp_body_size;
1375#endif
1376 unsigned char pic_struct;
1377 int vf_ref;
1378
1379 u32 pts;
1380 u64 pts64;
1381 u64 timestamp;
1382
1383 u32 aspect_ratio_idc;
1384 u32 sar_width;
1385 u32 sar_height;
1386 u32 double_write_mode;
1387 u32 video_signal_type;
1388 unsigned short conformance_window_flag;
1389 unsigned short conf_win_left_offset;
1390 unsigned short conf_win_right_offset;
1391 unsigned short conf_win_top_offset;
1392 unsigned short conf_win_bottom_offset;
1393 unsigned short chroma_format_idc;
1394
1395 /* picture qos infomation*/
1396 int max_qp;
1397 int avg_qp;
1398 int min_qp;
1399 int max_skip;
1400 int avg_skip;
1401 int min_skip;
1402 int max_mv;
1403 int min_mv;
1404 int avg_mv;
1405
1406 bool vframe_bound;
1407} /*PIC_t */;
1408
1409#define MAX_TILE_COL_NUM 10
1410#define MAX_TILE_ROW_NUM 20
1411struct tile_s {
1412 int width;
1413 int height;
1414 int start_cu_x;
1415 int start_cu_y;
1416
1417 unsigned int sao_vb_start_addr;
1418 unsigned int sao_abv_start_addr;
1419};
1420
1421#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1422#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1423#define SEI_HDR10PLUS_MASK 0x00000004
1424
1425#define VF_POOL_SIZE 32
1426
1427#ifdef MULTI_INSTANCE_SUPPORT
1428#define DEC_RESULT_NONE 0
1429#define DEC_RESULT_DONE 1
1430#define DEC_RESULT_AGAIN 2
1431#define DEC_RESULT_CONFIG_PARAM 3
1432#define DEC_RESULT_ERROR 4
1433#define DEC_INIT_PICLIST 5
1434#define DEC_UNINIT_PICLIST 6
1435#define DEC_RESULT_GET_DATA 7
1436#define DEC_RESULT_GET_DATA_RETRY 8
1437#define DEC_RESULT_EOS 9
1438#define DEC_RESULT_FORCE_EXIT 10
1439#define DEC_RESULT_FREE_CANVAS 11
1440
1441static void vh265_work(struct work_struct *work);
1442static void vh265_timeout_work(struct work_struct *work);
1443static void vh265_notify_work(struct work_struct *work);
1444
1445#endif
1446
1447struct debug_log_s {
1448 struct list_head list;
1449 uint8_t data; /*will alloc more size*/
1450};
1451
1452struct hevc_state_s {
1453#ifdef MULTI_INSTANCE_SUPPORT
1454 struct platform_device *platform_dev;
1455 void (*vdec_cb)(struct vdec_s *, void *);
1456 void *vdec_cb_arg;
1457 struct vframe_chunk_s *chunk;
1458 int dec_result;
1459 struct work_struct work;
1460 struct work_struct timeout_work;
1461 struct work_struct notify_work;
1462 struct work_struct set_clk_work;
1463 /* timeout handle */
1464 unsigned long int start_process_time;
1465 unsigned int last_lcu_idx;
1466 unsigned int decode_timeout_count;
1467 unsigned int timeout_num;
1468#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1469 unsigned char switch_dvlayer_flag;
1470 unsigned char no_switch_dvlayer_count;
1471 unsigned char bypass_dvenl_enable;
1472 unsigned char bypass_dvenl;
1473#endif
1474 unsigned char start_parser_type;
1475 /*start_decoding_flag:
1476 vps/pps/sps/idr info from ucode*/
1477 unsigned char start_decoding_flag;
1478 unsigned char rps_set_id;
1479 unsigned char eos;
1480 int pic_decoded_lcu_idx;
1481 u8 over_decode;
1482 u8 empty_flag;
1483#endif
1484 struct vframe_s vframe_dummy;
1485 char *provider_name;
1486 int index;
1487 struct device *cma_dev;
1488 unsigned char m_ins_flag;
1489 unsigned char dolby_enhance_flag;
1490 unsigned long buf_start;
1491 u32 buf_size;
1492 u32 mv_buf_size;
1493
1494 struct BuffInfo_s work_space_buf_store;
1495 struct BuffInfo_s *work_space_buf;
1496
1497 u8 aux_data_dirty;
1498 u32 prefix_aux_size;
1499 u32 suffix_aux_size;
1500 void *aux_addr;
1501 void *rpm_addr;
1502 void *lmem_addr;
1503 dma_addr_t aux_phy_addr;
1504 dma_addr_t rpm_phy_addr;
1505 dma_addr_t lmem_phy_addr;
1506
1507 unsigned int pic_list_init_flag;
1508 unsigned int use_cma_flag;
1509
1510 unsigned short *rpm_ptr;
1511 unsigned short *lmem_ptr;
1512 unsigned short *debug_ptr;
1513 int debug_ptr_size;
1514 int pic_w;
1515 int pic_h;
1516 int lcu_x_num;
1517 int lcu_y_num;
1518 int lcu_total;
1519 int lcu_size;
1520 int lcu_size_log2;
1521 int lcu_x_num_pre;
1522 int lcu_y_num_pre;
1523 int first_pic_after_recover;
1524
1525 int num_tile_col;
1526 int num_tile_row;
1527 int tile_enabled;
1528 int tile_x;
1529 int tile_y;
1530 int tile_y_x;
1531 int tile_start_lcu_x;
1532 int tile_start_lcu_y;
1533 int tile_width_lcu;
1534 int tile_height_lcu;
1535
1536 int slice_type;
1537 unsigned int slice_addr;
1538 unsigned int slice_segment_addr;
1539
1540 unsigned char interlace_flag;
1541 unsigned char curr_pic_struct;
1542 unsigned char frame_field_info_present_flag;
1543
1544 unsigned short sps_num_reorder_pics_0;
1545 unsigned short misc_flag0;
1546 int m_temporalId;
1547 int m_nalUnitType;
1548 int TMVPFlag;
1549 int isNextSliceSegment;
1550 int LDCFlag;
1551 int m_pocRandomAccess;
1552 int plevel;
1553 int MaxNumMergeCand;
1554
1555 int new_pic;
1556 int new_tile;
1557 int curr_POC;
1558 int iPrevPOC;
1559#ifdef MULTI_INSTANCE_SUPPORT
1560 int decoded_poc;
1561 struct PIC_s *decoding_pic;
1562#endif
1563 int iPrevTid0POC;
1564 int list_no;
1565 int RefNum_L0;
1566 int RefNum_L1;
1567 int ColFromL0Flag;
1568 int LongTerm_Curr;
1569 int LongTerm_Col;
1570 int Col_POC;
1571 int LongTerm_Ref;
1572#ifdef MULTI_INSTANCE_SUPPORT
1573 int m_pocRandomAccess_bak;
1574 int curr_POC_bak;
1575 int iPrevPOC_bak;
1576 int iPrevTid0POC_bak;
1577 unsigned char start_parser_type_bak;
1578 unsigned char start_decoding_flag_bak;
1579 unsigned char rps_set_id_bak;
1580 int pic_decoded_lcu_idx_bak;
1581 int decode_idx_bak;
1582#endif
1583 struct PIC_s *cur_pic;
1584 struct PIC_s *col_pic;
1585 int skip_flag;
1586 int decode_idx;
1587 int slice_idx;
1588 unsigned char have_vps;
1589 unsigned char have_sps;
1590 unsigned char have_pps;
1591 unsigned char have_valid_start_slice;
1592 unsigned char wait_buf;
1593 unsigned char error_flag;
1594 unsigned int error_skip_nal_count;
1595 long used_4k_num;
1596
1597 unsigned char
1598 ignore_bufmgr_error; /* bit 0, for decoding;
1599 bit 1, for displaying
1600 bit 1 must be set if bit 0 is 1*/
1601 int PB_skip_mode;
1602 int PB_skip_count_after_decoding;
1603#ifdef SUPPORT_10BIT
1604 int mem_saving_mode;
1605#endif
1606#ifdef LOSLESS_COMPRESS_MODE
1607 unsigned int losless_comp_body_size;
1608#endif
1609 int pts_mode;
1610 int last_lookup_pts;
1611 int last_pts;
1612 u64 last_lookup_pts_us64;
1613 u64 last_pts_us64;
1614 u32 shift_byte_count_lo;
1615 u32 shift_byte_count_hi;
1616 int pts_mode_switching_count;
1617 int pts_mode_recovery_count;
1618
1619 int pic_num;
1620
1621 /**/
1622 union param_u param;
1623
1624 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1625
1626 struct timer_list timer;
1627 struct BUF_s m_BUF[BUF_POOL_SIZE];
1628 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1629 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1630
1631 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1632 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1633 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1634 struct vframe_s vfpool[VF_POOL_SIZE];
1635
1636 u32 stat;
1637 u32 frame_width;
1638 u32 frame_height;
1639 u32 frame_dur;
1640 u32 frame_ar;
1641 u32 bit_depth_luma;
1642 u32 bit_depth_chroma;
1643 u32 video_signal_type;
1644 u32 video_signal_type_debug;
1645 u32 saved_resolution;
1646 bool get_frame_dur;
1647 u32 error_watchdog_count;
1648 u32 error_skip_nal_wt_cnt;
1649 u32 error_system_watchdog_count;
1650
1651#ifdef DEBUG_PTS
1652 unsigned long pts_missed;
1653 unsigned long pts_hit;
1654#endif
1655 struct dec_sysinfo vh265_amstream_dec_info;
1656 unsigned char init_flag;
1657 unsigned char first_sc_checked;
1658 unsigned char uninit_list;
1659 u32 start_decoding_time;
1660
1661 int show_frame_num;
1662#ifdef USE_UNINIT_SEMA
1663 struct semaphore h265_uninit_done_sema;
1664#endif
1665 int fatal_error;
1666
1667
1668 u32 sei_present_flag;
1669 void *frame_mmu_map_addr;
1670 dma_addr_t frame_mmu_map_phy_addr;
1671 unsigned int mmu_mc_buf_start;
1672 unsigned int mmu_mc_buf_end;
1673 unsigned int mmu_mc_start_4k_adr;
1674 void *mmu_box;
1675 void *bmmu_box;
1676 int mmu_enable;
1677
1678 unsigned int dec_status;
1679
1680 /* data for SEI_MASTER_DISPLAY_COLOR */
1681 unsigned int primaries[3][2];
1682 unsigned int white_point[2];
1683 unsigned int luminance[2];
1684 /* data for SEI_CONTENT_LIGHT_LEVEL */
1685 unsigned int content_light_level[2];
1686
1687 struct PIC_s *pre_top_pic;
1688 struct PIC_s *pre_bot_pic;
1689
1690#ifdef MULTI_INSTANCE_SUPPORT
1691 int double_write_mode;
1692 int dynamic_buf_num_margin;
1693 int start_action;
1694 int save_buffer_mode;
1695#endif
1696 u32 i_only;
1697 struct list_head log_list;
1698 u32 ucode_pause_pos;
1699 u32 start_shift_bytes;
1700
1701 u32 vf_pre_count;
1702 u32 vf_get_count;
1703 u32 vf_put_count;
1704#ifdef SWAP_HEVC_UCODE
1705 dma_addr_t mc_dma_handle;
1706 void *mc_cpu_addr;
1707 int swap_size;
1708 ulong swap_addr;
1709#endif
1710#ifdef DETREFILL_ENABLE
1711 dma_addr_t detbuf_adr;
1712 u16 *detbuf_adr_virt;
1713 u8 delrefill_check;
1714#endif
1715 u8 head_error_flag;
1716 int valve_count;
1717 struct firmware_s *fw;
1718 int max_pic_w;
1719 int max_pic_h;
1720#ifdef AGAIN_HAS_THRESHOLD
1721 u8 next_again_flag;
1722 u32 pre_parser_wr_ptr;
1723#endif
1724 u32 ratio_control;
1725 u32 first_pic_flag;
1726 u32 decode_size;
1727 struct mutex chunks_mutex;
1728 int need_cache_size;
1729 u64 sc_start_time;
1730 u32 skip_first_nal;
1731 bool is_swap;
1732 bool is_4k;
1733 int frameinfo_enable;
1734 struct vframe_qos_s vframe_qos;
1735 bool is_used_v4l;
1736 void *v4l2_ctx;
1737 bool v4l_params_parsed;
1738 u32 mem_map_mode;
1739} /*hevc_stru_t */;
1740
1741#ifdef AGAIN_HAS_THRESHOLD
1742u32 again_threshold;
1743#endif
1744#ifdef SEND_LMEM_WITH_RPM
1745#define get_lmem_params(hevc, ladr) \
1746 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1747
1748
1749static int get_frame_mmu_map_size(void)
1750{
1751 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1752 return (MAX_FRAME_8K_NUM * 4);
1753
1754 return (MAX_FRAME_4K_NUM * 4);
1755}
1756
1757static int is_oversize(int w, int h)
1758{
1759 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1760 MAX_SIZE_8K : MAX_SIZE_4K;
1761
1762 if (w < 0 || h < 0)
1763 return true;
1764
1765 if (h != 0 && (w > max / h))
1766 return true;
1767
1768 return false;
1769}
1770
1771void check_head_error(struct hevc_state_s *hevc)
1772{
1773#define pcm_enabled_flag 0x040
1774#define pcm_sample_bit_depth_luma 0x041
1775#define pcm_sample_bit_depth_chroma 0x042
1776 hevc->head_error_flag = 0;
1777 if ((error_handle_policy & 0x40) == 0)
1778 return;
1779 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1780 uint16_t pcm_depth_luma = get_lmem_params(
1781 hevc, pcm_sample_bit_depth_luma);
1782 uint16_t pcm_sample_chroma = get_lmem_params(
1783 hevc, pcm_sample_bit_depth_chroma);
1784 if (pcm_depth_luma >
1785 hevc->bit_depth_luma ||
1786 pcm_sample_chroma >
1787 hevc->bit_depth_chroma) {
1788 hevc_print(hevc, 0,
1789 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1790 pcm_depth_luma,
1791 pcm_sample_chroma,
1792 hevc->bit_depth_luma,
1793 hevc->bit_depth_chroma);
1794 hevc->head_error_flag = 1;
1795 }
1796 }
1797}
1798#endif
1799
1800#ifdef SUPPORT_10BIT
1801/* Losless compression body buffer size 4K per 64x32 (jt) */
1802static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1803 int width, int height, int mem_saving_mode)
1804{
1805 int width_x64;
1806 int height_x32;
1807 int bsize;
1808
1809 width_x64 = width + 63;
1810 width_x64 >>= 6;
1811
1812 height_x32 = height + 31;
1813 height_x32 >>= 5;
1814 if (mem_saving_mode == 1 && hevc->mmu_enable)
1815 bsize = 3200 * width_x64 * height_x32;
1816 else if (mem_saving_mode == 1)
1817 bsize = 3072 * width_x64 * height_x32;
1818 else
1819 bsize = 4096 * width_x64 * height_x32;
1820
1821 return bsize;
1822}
1823
1824/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1825static int compute_losless_comp_header_size(int width, int height)
1826{
1827 int width_x128;
1828 int height_x64;
1829 int hsize;
1830
1831 width_x128 = width + 127;
1832 width_x128 >>= 7;
1833
1834 height_x64 = height + 63;
1835 height_x64 >>= 6;
1836
1837 hsize = 32*width_x128*height_x64;
1838
1839 return hsize;
1840}
1841#endif
1842
1843static int add_log(struct hevc_state_s *hevc,
1844 const char *fmt, ...)
1845{
1846#define HEVC_LOG_BUF 196
1847 struct debug_log_s *log_item;
1848 unsigned char buf[HEVC_LOG_BUF];
1849 int len = 0;
1850 va_list args;
1851 mutex_lock(&vh265_log_mutex);
1852 va_start(args, fmt);
1853 len = sprintf(buf, "<%ld> <%05d> ",
1854 jiffies, hevc->decode_idx);
1855 len += vsnprintf(buf + len,
1856 HEVC_LOG_BUF - len, fmt, args);
1857 va_end(args);
1858 log_item = kmalloc(
1859 sizeof(struct debug_log_s) + len,
1860 GFP_KERNEL);
1861 if (log_item) {
1862 INIT_LIST_HEAD(&log_item->list);
1863 strcpy(&log_item->data, buf);
1864 list_add_tail(&log_item->list,
1865 &hevc->log_list);
1866 }
1867 mutex_unlock(&vh265_log_mutex);
1868 return 0;
1869}
1870
1871static void dump_log(struct hevc_state_s *hevc)
1872{
1873 int i = 0;
1874 struct debug_log_s *log_item, *tmp;
1875 mutex_lock(&vh265_log_mutex);
1876 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1877 hevc_print(hevc, 0,
1878 "[LOG%04d]%s\n",
1879 i++,
1880 &log_item->data);
1881 list_del(&log_item->list);
1882 kfree(log_item);
1883 }
1884 mutex_unlock(&vh265_log_mutex);
1885}
1886
1887static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1888 struct PIC_s *pic)
1889{
1890 if (pic->error_mark
1891 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1892 return 1;
1893 return 0;
1894}
1895
1896static int get_pic_poc(struct hevc_state_s *hevc,
1897 unsigned int idx)
1898{
1899 if (idx != 0xff
1900 && idx < MAX_REF_PIC_NUM
1901 && hevc->m_PIC[idx])
1902 return hevc->m_PIC[idx]->POC;
1903 return INVALID_POC;
1904}
1905
1906#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1907static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1908{
1909 return (hevc->m_ins_flag &&
1910 ((double_write_mode & 0x80000000) == 0)) ?
1911 hevc->double_write_mode :
1912 (double_write_mode & 0x7fffffff);
1913}
1914
1915static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1916{
1917 return (hevc->m_ins_flag &&
1918 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1919 hevc->dynamic_buf_num_margin :
1920 (dynamic_buf_num_margin & 0x7fffffff);
1921}
1922#endif
1923
1924static int get_double_write_mode(struct hevc_state_s *hevc)
1925{
1926 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1927 int w = hevc->pic_w;
1928 int h = hevc->pic_h;
1929 u32 dw = 0x1; /*1:1*/
1930 switch (valid_dw_mode) {
1931 case 0x100:
1932 if (w > 1920 && h > 1088)
1933 dw = 0x4; /*1:2*/
1934 break;
1935 case 0x200:
1936 if (w > 1920 && h > 1088)
1937 dw = 0x2; /*1:4*/
1938 break;
1939 case 0x300:
1940 if (w > 1280 && h > 720)
1941 dw = 0x4; /*1:2*/
1942 break;
1943 default:
1944 dw = valid_dw_mode;
1945 break;
1946 }
1947 return dw;
1948}
1949
1950static int get_double_write_ratio(struct hevc_state_s *hevc,
1951 int dw_mode)
1952{
1953 int ratio = 1;
1954 if ((dw_mode == 2) ||
1955 (dw_mode == 3))
1956 ratio = 4;
1957 else if (dw_mode == 4)
1958 ratio = 2;
1959 return ratio;
1960}
1961#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1962static unsigned char get_idx(struct hevc_state_s *hevc)
1963{
1964 return hevc->index;
1965}
1966#endif
1967
1968#undef pr_info
1969#define pr_info printk
1970static int hevc_print(struct hevc_state_s *hevc,
1971 int flag, const char *fmt, ...)
1972{
1973#define HEVC_PRINT_BUF 256
1974 unsigned char buf[HEVC_PRINT_BUF];
1975 int len = 0;
1976#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1977 if (hevc == NULL ||
1978 (flag == 0) ||
1979 ((debug_mask &
1980 (1 << hevc->index))
1981 && (debug & flag))) {
1982#endif
1983 va_list args;
1984
1985 va_start(args, fmt);
1986 if (hevc)
1987 len = sprintf(buf, "[%d]", hevc->index);
1988 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1989 pr_debug("%s", buf);
1990 va_end(args);
1991#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1992 }
1993#endif
1994 return 0;
1995}
1996
1997static int hevc_print_cont(struct hevc_state_s *hevc,
1998 int flag, const char *fmt, ...)
1999{
2000 unsigned char buf[HEVC_PRINT_BUF];
2001 int len = 0;
2002#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2003 if (hevc == NULL ||
2004 (flag == 0) ||
2005 ((debug_mask &
2006 (1 << hevc->index))
2007 && (debug & flag))) {
2008#endif
2009 va_list args;
2010
2011 va_start(args, fmt);
2012 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2013 pr_info("%s", buf);
2014 va_end(args);
2015#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2016 }
2017#endif
2018 return 0;
2019}
2020
2021static void put_mv_buf(struct hevc_state_s *hevc,
2022 struct PIC_s *pic);
2023
2024static void update_vf_memhandle(struct hevc_state_s *hevc,
2025 struct vframe_s *vf, struct PIC_s *pic);
2026
2027static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2028
2029static void release_aux_data(struct hevc_state_s *hevc,
2030 struct PIC_s *pic);
2031static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2032
2033#ifdef MULTI_INSTANCE_SUPPORT
2034static void backup_decode_state(struct hevc_state_s *hevc)
2035{
2036 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2037 hevc->curr_POC_bak = hevc->curr_POC;
2038 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2039 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2040 hevc->start_parser_type_bak = hevc->start_parser_type;
2041 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2042 hevc->rps_set_id_bak = hevc->rps_set_id;
2043 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2044 hevc->decode_idx_bak = hevc->decode_idx;
2045
2046}
2047
2048static void restore_decode_state(struct hevc_state_s *hevc)
2049{
2050 struct vdec_s *vdec = hw_to_vdec(hevc);
2051 if (!vdec_has_more_input(vdec)) {
2052 hevc->pic_decoded_lcu_idx =
2053 READ_VREG(HEVC_PARSER_LCU_START)
2054 & 0xffffff;
2055 return;
2056 }
2057 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2058 "%s: discard pic index 0x%x\n",
2059 __func__, hevc->decoding_pic ?
2060 hevc->decoding_pic->index : 0xff);
2061 if (hevc->decoding_pic) {
2062 hevc->decoding_pic->error_mark = 0;
2063 hevc->decoding_pic->output_ready = 0;
2064 hevc->decoding_pic->output_mark = 0;
2065 hevc->decoding_pic->referenced = 0;
2066 hevc->decoding_pic->POC = INVALID_POC;
2067 put_mv_buf(hevc, hevc->decoding_pic);
2068 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2069 release_aux_data(hevc, hevc->decoding_pic);
2070 hevc->decoding_pic = NULL;
2071 }
2072 hevc->decode_idx = hevc->decode_idx_bak;
2073 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2074 hevc->curr_POC = hevc->curr_POC_bak;
2075 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2076 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2077 hevc->start_parser_type = hevc->start_parser_type_bak;
2078 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2079 hevc->rps_set_id = hevc->rps_set_id_bak;
2080 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2081
2082 if (hevc->pic_list_init_flag == 1)
2083 hevc->pic_list_init_flag = 0;
2084 /*if (hevc->decode_idx == 0)
2085 hevc->start_decoding_flag = 0;*/
2086
2087 hevc->slice_idx = 0;
2088 hevc->used_4k_num = -1;
2089}
2090#endif
2091
2092static void hevc_init_stru(struct hevc_state_s *hevc,
2093 struct BuffInfo_s *buf_spec_i)
2094{
2095 int i;
2096 INIT_LIST_HEAD(&hevc->log_list);
2097 hevc->work_space_buf = buf_spec_i;
2098 hevc->prefix_aux_size = 0;
2099 hevc->suffix_aux_size = 0;
2100 hevc->aux_addr = NULL;
2101 hevc->rpm_addr = NULL;
2102 hevc->lmem_addr = NULL;
2103
2104 hevc->curr_POC = INVALID_POC;
2105
2106 hevc->pic_list_init_flag = 0;
2107 hevc->use_cma_flag = 0;
2108 hevc->decode_idx = 0;
2109 hevc->slice_idx = 0;
2110 hevc->new_pic = 0;
2111 hevc->new_tile = 0;
2112 hevc->iPrevPOC = 0;
2113 hevc->list_no = 0;
2114 /* int m_uiMaxCUWidth = 1<<7; */
2115 /* int m_uiMaxCUHeight = 1<<7; */
2116 hevc->m_pocRandomAccess = MAX_INT;
2117 hevc->tile_enabled = 0;
2118 hevc->tile_x = 0;
2119 hevc->tile_y = 0;
2120 hevc->iPrevTid0POC = 0;
2121 hevc->slice_addr = 0;
2122 hevc->slice_segment_addr = 0;
2123 hevc->skip_flag = 0;
2124 hevc->misc_flag0 = 0;
2125
2126 hevc->cur_pic = NULL;
2127 hevc->col_pic = NULL;
2128 hevc->wait_buf = 0;
2129 hevc->error_flag = 0;
2130 hevc->head_error_flag = 0;
2131 hevc->error_skip_nal_count = 0;
2132 hevc->have_vps = 0;
2133 hevc->have_sps = 0;
2134 hevc->have_pps = 0;
2135 hevc->have_valid_start_slice = 0;
2136
2137 hevc->pts_mode = PTS_NORMAL;
2138 hevc->last_pts = 0;
2139 hevc->last_lookup_pts = 0;
2140 hevc->last_pts_us64 = 0;
2141 hevc->last_lookup_pts_us64 = 0;
2142 hevc->pts_mode_switching_count = 0;
2143 hevc->pts_mode_recovery_count = 0;
2144
2145 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2146 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2147 if (hevc->PB_skip_mode == 0)
2148 hevc->ignore_bufmgr_error = 0x1;
2149 else
2150 hevc->ignore_bufmgr_error = 0x0;
2151
2152 if (hevc->is_used_v4l) {
2153 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2154 if (hevc->m_PIC[i] != NULL) {
2155 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2156 hevc->m_PIC[i]->index = i;
2157 }
2158 }
2159 }
2160
2161 hevc->pic_num = 0;
2162 hevc->lcu_x_num_pre = 0;
2163 hevc->lcu_y_num_pre = 0;
2164 hevc->first_pic_after_recover = 0;
2165
2166 hevc->pre_top_pic = NULL;
2167 hevc->pre_bot_pic = NULL;
2168
2169 hevc->sei_present_flag = 0;
2170 hevc->valve_count = 0;
2171 hevc->first_pic_flag = 0;
2172#ifdef MULTI_INSTANCE_SUPPORT
2173 hevc->decoded_poc = INVALID_POC;
2174 hevc->start_process_time = 0;
2175 hevc->last_lcu_idx = 0;
2176 hevc->decode_timeout_count = 0;
2177 hevc->timeout_num = 0;
2178 hevc->eos = 0;
2179 hevc->pic_decoded_lcu_idx = -1;
2180 hevc->over_decode = 0;
2181 hevc->used_4k_num = -1;
2182 hevc->start_decoding_flag = 0;
2183 hevc->rps_set_id = 0;
2184 backup_decode_state(hevc);
2185#endif
2186#ifdef DETREFILL_ENABLE
2187 hevc->detbuf_adr = 0;
2188 hevc->detbuf_adr_virt = NULL;
2189#endif
2190}
2191
2192static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2193static int H265_alloc_mmu(struct hevc_state_s *hevc,
2194 struct PIC_s *new_pic, unsigned short bit_depth,
2195 unsigned int *mmu_index_adr);
2196
2197#ifdef DETREFILL_ENABLE
2198#define DETREFILL_BUF_SIZE (4 * 0x4000)
2199#define HEVC_SAO_DBG_MODE0 0x361e
2200#define HEVC_SAO_DBG_MODE1 0x361f
2201#define HEVC_SAO_CTRL10 0x362e
2202#define HEVC_SAO_CTRL11 0x362f
2203static int init_detrefill_buf(struct hevc_state_s *hevc)
2204{
2205 if (hevc->detbuf_adr_virt)
2206 return 0;
2207
2208 hevc->detbuf_adr_virt =
2209 (void *)dma_alloc_coherent(amports_get_dma_device(),
2210 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2211 GFP_KERNEL);
2212
2213 if (hevc->detbuf_adr_virt == NULL) {
2214 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2215 return -1;
2216 }
2217 return 0;
2218}
2219
2220static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2221{
2222 if (hevc->detbuf_adr_virt) {
2223 dma_free_coherent(amports_get_dma_device(),
2224 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2225 hevc->detbuf_adr);
2226
2227 hevc->detbuf_adr_virt = NULL;
2228 hevc->detbuf_adr = 0;
2229 }
2230}
2231
2232/*
2233 * convert uncompressed frame buffer data from/to ddr
2234 */
2235static void convUnc8x4blk(uint16_t* blk8x4Luma,
2236 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2237{
2238 if (direction == 0) {
2239 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2240 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2241 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2242 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2243 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2244 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2245 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2246 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2247 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2248 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2249 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2250 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2251 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2252 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2253 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2254 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2255 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2256 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2257
2258 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2259 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2260 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2261 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2262 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2263 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2264 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2265 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2266 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2267 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2268 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2269 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2270 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2271 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2272 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2273 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2274 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2275 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2276
2277 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2278 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2279 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2280 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2281 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2282 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2283 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2284 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2285 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2286 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2287 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2288 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2289 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2290 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2291 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2292 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2293 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2294 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2295
2296 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2297 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2298 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2299 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2300 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2301 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2302 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2303 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2304 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2305 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2306 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2307 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2308 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2309 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2310 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2311 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2312 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2313 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2314 } else {
2315 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2316 blk8x4Luma[3 + 0 * 8];
2317 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2318 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2319 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2320 (blk8x4Luma[3 + 3 * 8] >> 2);
2321 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2322 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2323 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2324 (blk8x4Luma[7 + 2 * 8] >>4);
2325 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2326 blk8x4Cb[0 + 0 * 4];
2327 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2328 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2329 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2330
2331 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2332 blk8x4Luma[0 + 0 * 8];
2333 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2334 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2335 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2336 (blk8x4Luma[0 + 1 * 8] >> 2);
2337 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2338 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2339 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2340 (blk8x4Luma[0 + 2 * 8] >>4);
2341 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2342 blk8x4Luma[2 + 2 * 8];
2343 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2344 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2345 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2346
2347 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2348 blk8x4Luma[4 + 0 * 8];
2349 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2350 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2351 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2352 (blk8x4Luma[4 + 1 * 8] >> 2);
2353 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2354 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2355 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2356 (blk8x4Luma[4 + 2 * 8] >>4);
2357 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2358 blk8x4Luma[6 + 2 * 8];
2359 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2360 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2361 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2362
2363 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2364 blk8x4Cb[1 + 0 * 4];
2365 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2366 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2367 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2368 (blk8x4Cr[2 + 0 * 4] >> 2);
2369 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2370 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2371 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2372 (blk8x4Cb[1 + 1 * 4] >>4);
2373 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2374 blk8x4Cb[2 + 1 * 4];
2375 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2376 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2377 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2378 }
2379}
2380
2381static void corrRefillWithAmrisc (
2382 struct hevc_state_s *hevc,
2383 uint32_t cmHeaderBaseAddr,
2384 uint32_t picWidth,
2385 uint32_t ctuPosition)
2386{
2387 int32_t i;
2388 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2389 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2390 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2391
2392 uint16_t cmBodyBuf[32 * 18];
2393
2394 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2395 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2396 uint32_t stride64x64 = pic_width_x64 * 128;
2397 uint32_t addr_offset64x64_abv = stride64x64 *
2398 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2399 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2400 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2401 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2402 unsigned int tmpData32;
2403
2404 uint16_t blkBuf0Y[32];
2405 uint16_t blkBuf0Cb[8];
2406 uint16_t blkBuf0Cr[8];
2407 uint16_t blkBuf1Y[32];
2408 uint16_t blkBuf1Cb[8];
2409 uint16_t blkBuf1Cr[8];
2410 int32_t blkBufCnt = 0;
2411
2412 int32_t blkIdx;
2413
2414 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2415 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2416 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2417 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2418
2419 for (i = 0; i < 32 * 18; i++)
2420 cmBodyBuf[i] = 0;
2421
2422 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2423 "%s, %d\n", __func__, __LINE__);
2424 do {
2425 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2426 } while (tmpData32);
2427 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2428 "%s, %d\n", __func__, __LINE__);
2429
2430 hevc_print(hevc, H265_DEBUG_DETAIL,
2431 "cmBodyBuf from detbuf:\n");
2432 for (i = 0; i < 32 * 18; i++) {
2433 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2434 if (get_dbg_flag(hevc) &
2435 H265_DEBUG_DETAIL) {
2436 if ((i & 0xf) == 0)
2437 hevc_print_cont(hevc, 0, "\n");
2438 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2439 }
2440 }
2441 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2442
2443 for (i = 0; i < 32; i++)
2444 blkBuf0Y[i] = 0;
2445 for (i = 0; i < 8; i++)
2446 blkBuf0Cb[i] = 0;
2447 for (i = 0; i < 8; i++)
2448 blkBuf0Cr[i] = 0;
2449 for (i = 0; i < 32; i++)
2450 blkBuf1Y[i] = 0;
2451 for (i = 0; i < 8; i++)
2452 blkBuf1Cb[i] = 0;
2453 for (i = 0; i < 8; i++)
2454 blkBuf1Cr[i] = 0;
2455
2456 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2457 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2458 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2459 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2460 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2461 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2462 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2463
2464 if (!aboveCtuAvailable && inAboveCtu)
2465 continue;
2466
2467 /* detRefillBuf --> 8x4block*/
2468 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2469
2470 if (restoreEnable) {
2471 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2472 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2473 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2474 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2475 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2476 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2477 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2478 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2479 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2480 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2481 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2482 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2483 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2484 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2485 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2486 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2487 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2488 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2489 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2490 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2491 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2492 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2493 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2494 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2495 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2496 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2497 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2498 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2499 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2500 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2501 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2502 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2503
2504 /*Store data back to DDR*/
2505 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2506 }
2507
2508 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2509 }
2510
2511 hevc_print(hevc, H265_DEBUG_DETAIL,
2512 "cmBodyBuf to detbuf:\n");
2513 for (i = 0; i < 32 * 18; i++) {
2514 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2515 if (get_dbg_flag(hevc) &
2516 H265_DEBUG_DETAIL) {
2517 if ((i & 0xf) == 0)
2518 hevc_print_cont(hevc, 0, "\n");
2519 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2520 }
2521 }
2522 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2523
2524 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2525 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2526 "%s, %d\n", __func__, __LINE__);
2527 do {
2528 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2529 } while (tmpData32);
2530 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2531 "%s, %d\n", __func__, __LINE__);
2532}
2533
2534static void delrefill(struct hevc_state_s *hevc)
2535{
2536 /*
2537 * corrRefill
2538 */
2539 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2540 [31:30]error number
2541 [29:20]error2([9:7]tilex[6:0]ctuy)
2542 [19:10]error1 [9:0]error0*/
2543 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2544 uint32_t errorIdx;
2545 uint32_t errorNum = (detResult>>30);
2546
2547 if (detResult) {
2548 hevc_print(hevc, H265_DEBUG_BUFMGR,
2549 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2550 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2551 uint32_t errorPos = errorIdx * 10;
2552 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2553 uint32_t tilex = (errorResult >> 7) - 1;
2554 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2555 + hevc->m_tile[0][tilex].width - 1;
2556 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2557 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2558 hevc_print(hevc, H265_DEBUG_BUFMGR,
2559 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2560 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2561 corrRefillWithAmrisc(
2562 hevc,
2563 (uint32_t)hevc->cur_pic->header_adr,
2564 hevc->pic_w,
2565 ctuPosition);
2566 }
2567
2568 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2569 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2570 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2571 }
2572}
2573#endif
2574
2575static void get_rpm_param(union param_u *params)
2576{
2577 int i;
2578 unsigned int data32;
2579
2580 for (i = 0; i < 128; i++) {
2581 do {
2582 data32 = READ_VREG(RPM_CMD_REG);
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 } while ((data32 & 0x10000) == 0);
2585 params->l.data[i] = data32 & 0xffff;
2586 /* hevc_print(hevc, 0, "%x\n", data32); */
2587 WRITE_VREG(RPM_CMD_REG, 0);
2588 }
2589}
2590
2591static int get_free_buf_idx(struct hevc_state_s *hevc)
2592{
2593 int index = INVALID_IDX;
2594 struct PIC_s *pic;
2595 int i;
2596
2597 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2598 pic = hevc->m_PIC[i];
2599 if (pic == NULL ||
2600 pic->index == -1 ||
2601 pic->BUF_index == -1)
2602 continue;
2603
2604 if (pic->output_mark == 0 &&
2605 pic->referenced == 0 &&
2606 pic->output_ready == 0 &&
2607 pic->cma_alloc_addr) {
2608 pic->output_ready = 1;
2609 index = i;
2610 break;
2611 }
2612 }
2613
2614 return index;
2615}
2616
2617static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2618{
2619 int i;
2620 struct PIC_s *pic;
2621 struct PIC_s *ret_pic = NULL;
2622 if (POC == INVALID_POC)
2623 return NULL;
2624 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2625 pic = hevc->m_PIC[i];
2626 if (pic == NULL || pic->index == -1 ||
2627 pic->BUF_index == -1)
2628 continue;
2629 if (pic->POC == POC) {
2630 if (ret_pic == NULL)
2631 ret_pic = pic;
2632 else {
2633 if (pic->decode_idx > ret_pic->decode_idx)
2634 ret_pic = pic;
2635 }
2636 }
2637 }
2638 return ret_pic;
2639}
2640
2641static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2642{
2643 int i;
2644 struct PIC_s *pic;
2645 struct PIC_s *ret_pic = NULL;
2646
2647 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2648 pic = hevc->m_PIC[i];
2649 if (pic == NULL || pic->index == -1 ||
2650 pic->BUF_index == -1)
2651 continue;
2652 if ((pic->POC == POC) && (pic->referenced)) {
2653 if (ret_pic == NULL)
2654 ret_pic = pic;
2655 else {
2656 if (pic->decode_idx > ret_pic->decode_idx)
2657 ret_pic = pic;
2658 }
2659 }
2660 }
2661
2662 if (ret_pic == NULL) {
2663 if (get_dbg_flag(hevc)) {
2664 hevc_print(hevc, 0,
2665 "Wrong, POC of %d is not in referenced list\n",
2666 POC);
2667 }
2668 ret_pic = get_pic_by_POC(hevc, POC);
2669 }
2670 return ret_pic;
2671}
2672
2673static unsigned int log2i(unsigned int val)
2674{
2675 unsigned int ret = -1;
2676
2677 while (val != 0) {
2678 val >>= 1;
2679 ret++;
2680 }
2681 return ret;
2682}
2683
2684static int init_buf_spec(struct hevc_state_s *hevc);
2685
2686static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2687{
2688 int i;
2689
2690 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2691 struct PIC_s *pic = hevc->m_PIC[i];
2692
2693 if (pic && pic->vframe_bound)
2694 return true;
2695 }
2696
2697 return false;
2698}
2699
2700static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2701{
2702 int i;
2703
2704 /* release workspace */
2705 if (hevc->bmmu_box)
2706 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2707 BMMU_WORKSPACE_ID);
2708 /*
2709 * it's only when vframe get back to driver, right now we can be sure
2710 * that vframe and fd are related. if the playback exits, the capture
2711 * requires the upper app to release when the fd is closed, and others
2712 * buffers drivers are released by driver.
2713 */
2714 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2715 struct PIC_s *pic = hevc->m_PIC[i];
2716
2717 if (pic && !pic->vframe_bound) {
2718 if (hevc->bmmu_box)
2719 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2720 VF_BUFFER_IDX(i));
2721 if (hevc->mmu_box)
2722 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2723
2724 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2725 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2726 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2727 }
2728 }
2729}
2730
2731static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2732{
2733 if (hevc->is_used_v4l &&
2734 v4l_is_there_vframe_bound(hevc)) {
2735 if (get_double_write_mode(hevc) != 0x10) {
2736 v4l_mmu_buffer_release(hevc);
2737 return;
2738 }
2739 }
2740
2741 if (hevc->mmu_box)
2742 decoder_mmu_box_free(hevc->mmu_box);
2743 hevc->mmu_box = NULL;
2744
2745 if (hevc->bmmu_box)
2746 decoder_bmmu_box_free(hevc->bmmu_box);
2747 hevc->bmmu_box = NULL;
2748}
2749static int init_mmu_buffers(struct hevc_state_s *hevc)
2750{
2751 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2752 CODEC_MM_FLAGS_TVP : 0;
2753 int buf_size = 64;
2754
2755 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2756 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2757 buf_size = 24;
2758 }
2759
2760 if (get_dbg_flag(hevc)) {
2761 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2762 __func__, hevc->max_pic_w, hevc->max_pic_h);
2763 }
2764
2765 hevc->need_cache_size = buf_size * SZ_1M;
2766 hevc->sc_start_time = get_jiffies_64();
2767 if (hevc->mmu_enable
2768 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2769 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2770 hevc->index,
2771 MAX_REF_PIC_NUM,
2772 buf_size * SZ_1M,
2773 tvp_flag
2774 );
2775 if (!hevc->mmu_box) {
2776 pr_err("h265 alloc mmu box failed!!\n");
2777 return -1;
2778 }
2779 }
2780
2781 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2782 hevc->index,
2783 BMMU_MAX_BUFFERS,
2784 4 + PAGE_SHIFT,
2785 CODEC_MM_FLAGS_CMA_CLEAR |
2786 CODEC_MM_FLAGS_FOR_VDECODER |
2787 tvp_flag);
2788 if (!hevc->bmmu_box) {
2789 if (hevc->mmu_box)
2790 decoder_mmu_box_free(hevc->mmu_box);
2791 hevc->mmu_box = NULL;
2792 pr_err("h265 alloc mmu box failed!!\n");
2793 return -1;
2794 }
2795 return 0;
2796}
2797
2798struct buf_stru_s
2799{
2800 int lcu_total;
2801 int mc_buffer_size_h;
2802 int mc_buffer_size_u_v_h;
2803};
2804
2805#ifndef MV_USE_FIXED_BUF
2806static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2807{
2808 int i;
2809 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2810 if (hevc->m_mv_BUF[i].start_adr) {
2811 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2812 hevc_print(hevc, 0,
2813 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2814 i, hevc->m_mv_BUF[i].start_adr,
2815 hevc->m_mv_BUF[i].size,
2816 hevc->m_mv_BUF[i].used_flag);
2817 decoder_bmmu_box_free_idx(
2818 hevc->bmmu_box,
2819 MV_BUFFER_IDX(i));
2820 hevc->m_mv_BUF[i].start_adr = 0;
2821 hevc->m_mv_BUF[i].size = 0;
2822 hevc->m_mv_BUF[i].used_flag = 0;
2823 }
2824 }
2825 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2826 if (hevc->m_PIC[i] != NULL)
2827 hevc->m_PIC[i]->mv_buf_index = -1;
2828 }
2829}
2830
2831static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2832{
2833 int ret = 0;
2834 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2835 if (decoder_bmmu_box_alloc_buf_phy
2836 (hevc->bmmu_box,
2837 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2838 DRIVER_NAME,
2839 &hevc->m_mv_BUF[i].start_adr) < 0) {
2840 hevc->m_mv_BUF[i].start_adr = 0;
2841 ret = -1;
2842 } else {
2843 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2844 hevc->m_mv_BUF[i].used_flag = 0;
2845 ret = 0;
2846 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2847 hevc_print(hevc, 0,
2848 "MV Buffer %d: start_adr %p size %x\n",
2849 i,
2850 (void *)hevc->m_mv_BUF[i].start_adr,
2851 hevc->m_mv_BUF[i].size);
2852 }
2853 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2854 void *mem_start_virt;
2855 mem_start_virt =
2856 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2857 if (mem_start_virt) {
2858 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2859 codec_mm_dma_flush(mem_start_virt,
2860 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2861 } else {
2862 mem_start_virt = codec_mm_vmap(
2863 hevc->m_mv_BUF[i].start_adr,
2864 hevc->m_mv_BUF[i].size);
2865 if (mem_start_virt) {
2866 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2867 codec_mm_dma_flush(mem_start_virt,
2868 hevc->m_mv_BUF[i].size,
2869 DMA_TO_DEVICE);
2870 codec_mm_unmap_phyaddr(mem_start_virt);
2871 } else {
2872 /*not virt for tvp playing,
2873 may need clear on ucode.*/
2874 pr_err("ref %s mem_start_virt failed\n", __func__);
2875 }
2876 }
2877 }
2878 }
2879 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2880 return ret;
2881}
2882#endif
2883
2884static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2885{
2886#ifdef MV_USE_FIXED_BUF
2887 if (pic && pic->index >= 0) {
2888 if (IS_8K_SIZE(pic->width, pic->height)) {
2889 pic->mpred_mv_wr_start_addr =
2890 hevc->work_space_buf->mpred_mv.buf_start
2891 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2892 } else {
2893 pic->mpred_mv_wr_start_addr =
2894 hevc->work_space_buf->mpred_mv.buf_start
2895 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2896 }
2897 }
2898 return 0;
2899#else
2900 int i;
2901 int ret = -1;
2902 int new_size;
2903 if (IS_8K_SIZE(pic->width, pic->height))
2904 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2905 else if (IS_4K_SIZE(pic->width, pic->height))
2906 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2907 else
2908 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2909 if (new_size != hevc->mv_buf_size) {
2910 dealloc_mv_bufs(hevc);
2911 hevc->mv_buf_size = new_size;
2912 }
2913 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2914 if (hevc->m_mv_BUF[i].start_adr &&
2915 hevc->m_mv_BUF[i].used_flag == 0) {
2916 hevc->m_mv_BUF[i].used_flag = 1;
2917 ret = i;
2918 break;
2919 }
2920 }
2921 if (ret < 0) {
2922 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2923 if (hevc->m_mv_BUF[i].start_adr == 0) {
2924 if (alloc_mv_buf(hevc, i) >= 0) {
2925 hevc->m_mv_BUF[i].used_flag = 1;
2926 ret = i;
2927 }
2928 break;
2929 }
2930 }
2931 }
2932
2933 if (ret >= 0) {
2934 pic->mv_buf_index = ret;
2935 pic->mpred_mv_wr_start_addr =
2936 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2937 (~0xffff);
2938 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2939 "%s => %d (0x%x) size 0x%x\n",
2940 __func__, ret,
2941 pic->mpred_mv_wr_start_addr,
2942 hevc->m_mv_BUF[ret].size);
2943
2944 } else {
2945 hevc_print(hevc, 0,
2946 "%s: Error, mv buf is not enough\n",
2947 __func__);
2948 }
2949 return ret;
2950
2951#endif
2952}
2953
2954static void put_mv_buf(struct hevc_state_s *hevc,
2955 struct PIC_s *pic)
2956{
2957#ifndef MV_USE_FIXED_BUF
2958 int i = pic->mv_buf_index;
2959 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2960 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2961 "%s: index %d beyond range\n",
2962 __func__, i);
2963 return;
2964 }
2965 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2966 "%s(%d): used_flag(%d)\n",
2967 __func__, i,
2968 hevc->m_mv_BUF[i].used_flag);
2969
2970 if (hevc->m_mv_BUF[i].start_adr &&
2971 hevc->m_mv_BUF[i].used_flag)
2972 hevc->m_mv_BUF[i].used_flag = 0;
2973 pic->mv_buf_index = -1;
2974#endif
2975}
2976
2977static int cal_current_buf_size(struct hevc_state_s *hevc,
2978 struct buf_stru_s *buf_stru)
2979{
2980
2981 int buf_size;
2982 int pic_width = hevc->pic_w;
2983 int pic_height = hevc->pic_h;
2984 int lcu_size = hevc->lcu_size;
2985 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2986 1 : pic_width / lcu_size;
2987 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2988 1 : pic_height / lcu_size;
2989 /*SUPPORT_10BIT*/
2990 int losless_comp_header_size = compute_losless_comp_header_size
2991 (pic_width, pic_height);
2992 /*always alloc buf for 10bit*/
2993 int losless_comp_body_size = compute_losless_comp_body_size
2994 (hevc, pic_width, pic_height, 0);
2995 int mc_buffer_size = losless_comp_header_size
2996 + losless_comp_body_size;
2997 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2998 int mc_buffer_size_u_v_h = 0;
2999
3000 int dw_mode = get_double_write_mode(hevc);
3001
3002 if (hevc->mmu_enable) {
3003 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3004 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3005 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3006 << 16;
3007 else
3008 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3009 << 16;
3010 } else
3011 buf_size = 0;
3012
3013 if (dw_mode) {
3014 int pic_width_dw = pic_width /
3015 get_double_write_ratio(hevc, dw_mode);
3016 int pic_height_dw = pic_height /
3017 get_double_write_ratio(hevc, dw_mode);
3018
3019 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3020 pic_width_dw / lcu_size + 1 :
3021 pic_width_dw / lcu_size;
3022 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3023 pic_height_dw / lcu_size + 1 :
3024 pic_height_dw / lcu_size;
3025 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3026
3027 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3028 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3029 /*64k alignment*/
3030 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3031 }
3032
3033 if ((!hevc->mmu_enable) &&
3034 ((dw_mode & 0x10) == 0)) {
3035 /* use compress mode without mmu,
3036 need buf for compress decoding*/
3037 buf_size += (mc_buffer_size_h << 16);
3038 }
3039
3040 /*in case start adr is not 64k alignment*/
3041 if (buf_size > 0)
3042 buf_size += 0x10000;
3043
3044 if (buf_stru) {
3045 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3046 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3047 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3048 }
3049
3050 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3051 pic_width, pic_height, losless_comp_header_size,
3052 losless_comp_body_size, mc_buffer_size_h,
3053 mc_buffer_size_u_v_h, buf_size);
3054
3055 return buf_size;
3056}
3057
3058static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3059{
3060 int ret = -1;
3061 int i = pic->index;
3062 struct vdec_v4l2_buffer *fb = NULL;
3063
3064 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3065 return ret;
3066
3067 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3068 if (ret < 0) {
3069 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3070 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3071 return ret;
3072 }
3073
3074 if (hevc->mmu_enable) {
3075 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3076 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3077 hevc->m_BUF[i].header_size =
3078 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3079 else
3080 hevc->m_BUF[i].header_size =
3081 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3082
3083 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3084 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3085 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3086 if (ret < 0) {
3087 hevc_print(hevc, PRINT_FLAG_ERROR,
3088 "%s[%d], header size: %d, no mem fatal err\n",
3089 __func__, i, hevc->m_BUF[i].header_size);
3090 return ret;
3091 }
3092 }
3093
3094 hevc->m_BUF[i].used_flag = 0;
3095 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3096 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3097 if (fb->num_planes == 1) {
3098 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3099 hevc->m_BUF[i].size = fb->m.mem[0].size;
3100 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3101 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3102 } else if (fb->num_planes == 2) {
3103 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3104 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3105 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3106 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3107 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3108 }
3109
3110 return ret;
3111}
3112
3113static int alloc_buf(struct hevc_state_s *hevc)
3114{
3115 int i;
3116 int ret = -1;
3117 int buf_size = cal_current_buf_size(hevc, NULL);
3118
3119 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3120 return ret;
3121
3122 for (i = 0; i < BUF_POOL_SIZE; i++) {
3123 if (hevc->m_BUF[i].start_adr == 0)
3124 break;
3125 }
3126 if (i < BUF_POOL_SIZE) {
3127 if (buf_size > 0) {
3128 ret = decoder_bmmu_box_alloc_buf_phy
3129 (hevc->bmmu_box,
3130 VF_BUFFER_IDX(i), buf_size,
3131 DRIVER_NAME,
3132 &hevc->m_BUF[i].start_adr);
3133 if (ret < 0) {
3134 hevc->m_BUF[i].start_adr = 0;
3135 if (i <= 8) {
3136 hevc->fatal_error |=
3137 DECODER_FATAL_ERROR_NO_MEM;
3138 hevc_print(hevc, PRINT_FLAG_ERROR,
3139 "%s[%d], size: %d, no mem fatal err\n",
3140 __func__, i, buf_size);
3141 }
3142 }
3143
3144 if (ret >= 0) {
3145 hevc->m_BUF[i].size = buf_size;
3146 hevc->m_BUF[i].used_flag = 0;
3147 ret = 0;
3148
3149 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3150 hevc_print(hevc, 0,
3151 "Buffer %d: start_adr %p size %x\n",
3152 i,
3153 (void *)hevc->m_BUF[i].start_adr,
3154 hevc->m_BUF[i].size);
3155 }
3156 /*flush the buffer make sure no cache dirty*/
3157 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3158 void *mem_start_virt;
3159 mem_start_virt =
3160 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3161 if (mem_start_virt) {
3162 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3163 codec_mm_dma_flush(mem_start_virt,
3164 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3165 } else {
3166 mem_start_virt = codec_mm_vmap(
3167 hevc->m_BUF[i].start_adr,
3168 hevc->m_BUF[i].size);
3169 if (mem_start_virt) {
3170 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3171 codec_mm_dma_flush(mem_start_virt,
3172 hevc->m_BUF[i].size,
3173 DMA_TO_DEVICE);
3174 codec_mm_unmap_phyaddr(mem_start_virt);
3175 } else {
3176 /*not virt for tvp playing,
3177 may need clear on ucode.*/
3178 pr_err("ref %s mem_start_virt failed\n", __func__);
3179 }
3180 }
3181 }
3182 }
3183 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3184 } else
3185 ret = 0;
3186 }
3187
3188 if (ret >= 0) {
3189 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3190 hevc_print(hevc, 0,
3191 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3192 i, hevc->pic_w, hevc->pic_h,
3193 buf_size,
3194 hevc->m_BUF[i].start_adr);
3195 }
3196 } else {
3197 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3198 hevc_print(hevc, 0,
3199 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3200 i, hevc->pic_w, hevc->pic_h,
3201 buf_size);
3202 }
3203 }
3204 return ret;
3205}
3206
3207static void set_buf_unused(struct hevc_state_s *hevc, int i)
3208{
3209 if (i >= 0 && i < BUF_POOL_SIZE)
3210 hevc->m_BUF[i].used_flag = 0;
3211}
3212
3213static void dealloc_unused_buf(struct hevc_state_s *hevc)
3214{
3215 int i;
3216 for (i = 0; i < BUF_POOL_SIZE; i++) {
3217 if (hevc->m_BUF[i].start_adr &&
3218 hevc->m_BUF[i].used_flag == 0) {
3219 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3220 hevc_print(hevc, 0,
3221 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3222 i, hevc->m_BUF[i].start_adr,
3223 hevc->m_BUF[i].size);
3224 }
3225 if (!hevc->is_used_v4l)
3226 decoder_bmmu_box_free_idx(
3227 hevc->bmmu_box,
3228 VF_BUFFER_IDX(i));
3229 hevc->m_BUF[i].start_adr = 0;
3230 hevc->m_BUF[i].size = 0;
3231 }
3232 }
3233}
3234
3235static void dealloc_pic_buf(struct hevc_state_s *hevc,
3236 struct PIC_s *pic)
3237{
3238 int i = pic->BUF_index;
3239 pic->BUF_index = -1;
3240 if (i >= 0 &&
3241 i < BUF_POOL_SIZE &&
3242 hevc->m_BUF[i].start_adr) {
3243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3244 hevc_print(hevc, 0,
3245 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3246 i, hevc->m_BUF[i].start_adr,
3247 hevc->m_BUF[i].size);
3248 }
3249
3250 if (!hevc->is_used_v4l)
3251 decoder_bmmu_box_free_idx(
3252 hevc->bmmu_box,
3253 VF_BUFFER_IDX(i));
3254 hevc->m_BUF[i].used_flag = 0;
3255 hevc->m_BUF[i].start_adr = 0;
3256 hevc->m_BUF[i].size = 0;
3257 }
3258}
3259
3260static int get_work_pic_num(struct hevc_state_s *hevc)
3261{
3262 int used_buf_num = 0;
3263 int sps_pic_buf_diff = 0;
3264
3265 if (get_dynamic_buf_num_margin(hevc) > 0) {
3266 if ((!hevc->sps_num_reorder_pics_0) &&
3267 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3268 /* the range of sps_num_reorder_pics_0 is in
3269 [0, sps_max_dec_pic_buffering_minus1_0] */
3270 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3271 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3272 } else
3273 used_buf_num = hevc->sps_num_reorder_pics_0
3274 + get_dynamic_buf_num_margin(hevc);
3275
3276 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3277 - hevc->sps_num_reorder_pics_0;
3278#ifdef MULTI_INSTANCE_SUPPORT
3279 /*
3280 need one more for multi instance, as
3281 apply_ref_pic_set() has no chanch to run to
3282 to clear referenced flag in some case
3283 */
3284 if (hevc->m_ins_flag)
3285 used_buf_num++;
3286#endif
3287 } else
3288 used_buf_num = max_buf_num;
3289
3290 if (hevc->save_buffer_mode)
3291 hevc_print(hevc, 0,
3292 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3293 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3294
3295 if (sps_pic_buf_diff >= 4)
3296 {
3297 used_buf_num += 1;
3298 }
3299
3300 if (used_buf_num > MAX_BUF_NUM)
3301 used_buf_num = MAX_BUF_NUM;
3302 return used_buf_num;
3303}
3304
3305static int get_alloc_pic_count(struct hevc_state_s *hevc)
3306{
3307 int alloc_pic_count = 0;
3308 int i;
3309 struct PIC_s *pic;
3310 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3311 pic = hevc->m_PIC[i];
3312 if (pic && pic->index >= 0)
3313 alloc_pic_count++;
3314 }
3315 return alloc_pic_count;
3316}
3317
3318static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3319{
3320 int i = pic->index;
3321 int dw_mode = get_double_write_mode(hevc);
3322
3323 if (hevc->mmu_enable)
3324 pic->header_adr = hevc->m_BUF[i].header_addr;
3325
3326 pic->BUF_index = i;
3327 pic->POC = INVALID_POC;
3328 pic->mc_canvas_y = pic->index;
3329 pic->mc_canvas_u_v = pic->index;
3330
3331 if (dw_mode & 0x10) {
3332 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3333 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3334 pic->mc_canvas_y = (pic->index << 1);
3335 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3336
3337 pic->dw_y_adr = pic->mc_y_adr;
3338 pic->dw_u_v_adr = pic->mc_u_v_adr;
3339 } else if (dw_mode) {
3340 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3341 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3342 }
3343
3344 return 0;
3345}
3346
3347static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3348{
3349 int ret = -1;
3350 int i;
3351 /*int lcu_size_log2 = hevc->lcu_size_log2;
3352 int MV_MEM_UNIT=lcu_size_log2==
3353 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3354 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3355 5 ? 0x80 : 0x20;
3356 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3357 hevc->work_space_buf->mpred_mv.buf_size;*/
3358 unsigned int y_adr = 0;
3359 struct buf_stru_s buf_stru;
3360 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3361 int dw_mode = get_double_write_mode(hevc);
3362
3363 for (i = 0; i < BUF_POOL_SIZE; i++) {
3364 if (hevc->m_BUF[i].start_adr != 0 &&
3365 hevc->m_BUF[i].used_flag == 0 &&
3366 buf_size <= hevc->m_BUF[i].size) {
3367 hevc->m_BUF[i].used_flag = 1;
3368 break;
3369 }
3370 }
3371
3372 if (i >= BUF_POOL_SIZE)
3373 return -1;
3374
3375 if (hevc->mmu_enable) {
3376 pic->header_adr = hevc->m_BUF[i].start_adr;
3377 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3378 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3379 y_adr = hevc->m_BUF[i].start_adr +
3380 MMU_COMPRESS_8K_HEADER_SIZE;
3381 else
3382 y_adr = hevc->m_BUF[i].start_adr +
3383 MMU_COMPRESS_HEADER_SIZE;
3384 } else
3385 y_adr = hevc->m_BUF[i].start_adr;
3386
3387 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3388
3389 pic->POC = INVALID_POC;
3390 /*ensure get_pic_by_POC()
3391 not get the buffer not decoded*/
3392 pic->BUF_index = i;
3393
3394 if ((!hevc->mmu_enable) &&
3395 ((dw_mode & 0x10) == 0)
3396 ) {
3397 pic->mc_y_adr = y_adr;
3398 y_adr += (buf_stru.mc_buffer_size_h << 16);
3399 }
3400 pic->mc_canvas_y = pic->index;
3401 pic->mc_canvas_u_v = pic->index;
3402 if (dw_mode & 0x10) {
3403 pic->mc_y_adr = y_adr;
3404 pic->mc_u_v_adr = y_adr +
3405 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3406 pic->mc_canvas_y = (pic->index << 1);
3407 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3408
3409 pic->dw_y_adr = pic->mc_y_adr;
3410 pic->dw_u_v_adr = pic->mc_u_v_adr;
3411 } else if (dw_mode) {
3412 pic->dw_y_adr = y_adr;
3413 pic->dw_u_v_adr = pic->dw_y_adr +
3414 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3415 }
3416
3417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3418 hevc_print(hevc, 0,
3419 "%s index %d BUF_index %d mc_y_adr %x\n",
3420 __func__, pic->index,
3421 pic->BUF_index, pic->mc_y_adr);
3422 if (hevc->mmu_enable &&
3423 dw_mode)
3424 hevc_print(hevc, 0,
3425 "mmu double write adr %ld\n",
3426 pic->cma_alloc_addr);
3427 }
3428 ret = 0;
3429
3430 return ret;
3431}
3432
3433static void init_pic_list(struct hevc_state_s *hevc)
3434{
3435 int i;
3436 int init_buf_num = get_work_pic_num(hevc);
3437 int dw_mode = get_double_write_mode(hevc);
3438 struct vdec_s *vdec = hw_to_vdec(hevc);
3439 /*alloc decoder buf will be delay if work on v4l. */
3440 if (!hevc->is_used_v4l) {
3441 for (i = 0; i < init_buf_num; i++) {
3442 if (alloc_buf(hevc) < 0) {
3443 if (i <= 8) {
3444 /*if alloced (i+1)>=9
3445 don't send errors.*/
3446 hevc->fatal_error |=
3447 DECODER_FATAL_ERROR_NO_MEM;
3448 }
3449 break;
3450 }
3451 }
3452 }
3453
3454 for (i = 0; i < init_buf_num; i++) {
3455 struct PIC_s *pic = hevc->m_PIC[i];
3456
3457 if (!pic) {
3458 pic = vmalloc(sizeof(struct PIC_s));
3459 if (pic == NULL) {
3460 hevc_print(hevc, 0,
3461 "%s: alloc pic %d fail!!!\n",
3462 __func__, i);
3463 break;
3464 }
3465 hevc->m_PIC[i] = pic;
3466 }
3467 memset(pic, 0, sizeof(struct PIC_s));
3468
3469 pic->index = i;
3470 pic->BUF_index = -1;
3471 pic->mv_buf_index = -1;
3472 if (vdec->parallel_dec == 1) {
3473 pic->y_canvas_index = -1;
3474 pic->uv_canvas_index = -1;
3475 }
3476
3477 pic->width = hevc->pic_w;
3478 pic->height = hevc->pic_h;
3479 pic->double_write_mode = dw_mode;
3480
3481 /*config canvas will be delay if work on v4l. */
3482 if (!hevc->is_used_v4l) {
3483 if (config_pic(hevc, pic) < 0) {
3484 if (get_dbg_flag(hevc))
3485 hevc_print(hevc, 0,
3486 "Config_pic %d fail\n", pic->index);
3487 pic->index = -1;
3488 i++;
3489 break;
3490 }
3491
3492 if (pic->double_write_mode)
3493 set_canvas(hevc, pic);
3494 }
3495 }
3496
3497 for (; i < MAX_REF_PIC_NUM; i++) {
3498 struct PIC_s *pic = hevc->m_PIC[i];
3499
3500 if (!pic) {
3501 pic = vmalloc(sizeof(struct PIC_s));
3502 if (pic == NULL) {
3503 hevc_print(hevc, 0,
3504 "%s: alloc pic %d fail!!!\n",
3505 __func__, i);
3506 break;
3507 }
3508 hevc->m_PIC[i] = pic;
3509 }
3510 memset(pic, 0, sizeof(struct PIC_s));
3511
3512 pic->index = -1;
3513 pic->BUF_index = -1;
3514 if (vdec->parallel_dec == 1) {
3515 pic->y_canvas_index = -1;
3516 pic->uv_canvas_index = -1;
3517 }
3518 }
3519
3520}
3521
3522static void uninit_pic_list(struct hevc_state_s *hevc)
3523{
3524 struct vdec_s *vdec = hw_to_vdec(hevc);
3525 int i;
3526#ifndef MV_USE_FIXED_BUF
3527 dealloc_mv_bufs(hevc);
3528#endif
3529 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3530 struct PIC_s *pic = hevc->m_PIC[i];
3531
3532 if (pic) {
3533 if (vdec->parallel_dec == 1) {
3534 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3535 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3536 }
3537 release_aux_data(hevc, pic);
3538 vfree(pic);
3539 hevc->m_PIC[i] = NULL;
3540 }
3541 }
3542}
3543
3544#ifdef LOSLESS_COMPRESS_MODE
3545static void init_decode_head_hw(struct hevc_state_s *hevc)
3546{
3547
3548 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3549 unsigned int data32;
3550
3551 int losless_comp_header_size =
3552 compute_losless_comp_header_size(hevc->pic_w,
3553 hevc->pic_h);
3554 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3555 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3556
3557 hevc->losless_comp_body_size = losless_comp_body_size;
3558
3559
3560 if (hevc->mmu_enable) {
3561 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3562 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3563 } else {
3564 if (hevc->mem_saving_mode == 1)
3565 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3566 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3567 else
3568 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3569 ((workaround_enable & 2) ? 1 : 0));
3570 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3571 /*
3572 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3573 * //8-bit mode
3574 */
3575 }
3576 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3577 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3578 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3579
3580 if (hevc->mmu_enable) {
3581 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3582 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3583 buf_spec->mmu_vbh.buf_start +
3584 buf_spec->mmu_vbh.buf_size/2);
3585 data32 = READ_VREG(HEVC_SAO_CTRL9);
3586 data32 |= 0x1;
3587 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3588
3589 /* use HEVC_CM_HEADER_START_ADDR */
3590 data32 = READ_VREG(HEVC_SAO_CTRL5);
3591 data32 |= (1<<10);
3592 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3593 }
3594
3595 if (!hevc->m_ins_flag)
3596 hevc_print(hevc, 0,
3597 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3598 __func__, hevc->pic_w, hevc->pic_h,
3599 losless_comp_body_size, losless_comp_header_size);
3600
3601}
3602#endif
3603#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3604
3605static void init_pic_list_hw(struct hevc_state_s *hevc)
3606{
3607 int i;
3608 int cur_pic_num = MAX_REF_PIC_NUM;
3609 int dw_mode = get_double_write_mode(hevc);
3610 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3611 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3612 (0x1 << 1) | (0x1 << 2));
3613 else
3614 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3615
3616 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3617 if (hevc->m_PIC[i] == NULL ||
3618 hevc->m_PIC[i]->index == -1) {
3619 cur_pic_num = i;
3620 break;
3621 }
3622 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3623 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3624 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3625 hevc->m_PIC[i]->header_adr>>5);
3626 else
3627 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3628 hevc->m_PIC[i]->mc_y_adr >> 5);
3629 } else
3630 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3631 hevc->m_PIC[i]->mc_y_adr |
3632 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3633 if (dw_mode & 0x10) {
3634 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3635 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3636 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3637 }
3638 else
3639 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3640 hevc->m_PIC[i]->mc_u_v_adr |
3641 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3642 | 0x1);
3643 }
3644 }
3645 if (cur_pic_num == 0)
3646 return;
3647 for (; i < MAX_REF_PIC_NUM; i++) {
3648 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3649 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3650 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3651 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3652 else
3653 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3654 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3655#ifndef LOSLESS_COMPRESS_MODE
3656 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3657 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3658#endif
3659 } else {
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3661 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3662 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3663 | 0x1);
3664#ifndef LOSLESS_COMPRESS_MODE
3665 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3666 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3667 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3668 | 0x1);
3669#endif
3670 }
3671 }
3672
3673 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3674
3675 /* Zero out canvas registers in IPP -- avoid simulation X */
3676 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3677 (0 << 8) | (0 << 1) | 1);
3678 for (i = 0; i < 32; i++)
3679 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3680
3681#ifdef LOSLESS_COMPRESS_MODE
3682 if ((dw_mode & 0x10) == 0)
3683 init_decode_head_hw(hevc);
3684#endif
3685
3686}
3687
3688
3689static void dump_pic_list(struct hevc_state_s *hevc)
3690{
3691 int i;
3692 struct PIC_s *pic;
3693
3694 hevc_print(hevc, 0,
3695 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3696 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3697 pic = hevc->m_PIC[i];
3698 if (pic == NULL || pic->index == -1)
3699 continue;
3700 hevc_print_cont(hevc, 0,
3701 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3702 pic->index, pic->BUF_index,
3703#ifndef MV_USE_FIXED_BUF
3704 pic->mv_buf_index,
3705#else
3706 -1,
3707#endif
3708 pic->decode_idx, pic->POC, pic->referenced);
3709 hevc_print_cont(hevc, 0,
3710 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3711 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3712 pic->width, pic->height);
3713 hevc_print_cont(hevc, 0,
3714 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3715 pic->output_ready, pic->mpred_mv_wr_start_addr,
3716 pic->vf_ref);
3717 }
3718}
3719
3720static void clear_referenced_flag(struct hevc_state_s *hevc)
3721{
3722 int i;
3723 struct PIC_s *pic;
3724 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3725 pic = hevc->m_PIC[i];
3726 if (pic == NULL || pic->index == -1)
3727 continue;
3728 if (pic->referenced) {
3729 pic->referenced = 0;
3730 put_mv_buf(hevc, pic);
3731 }
3732 }
3733}
3734
3735static void clear_poc_flag(struct hevc_state_s *hevc)
3736{
3737 int i;
3738 struct PIC_s *pic;
3739 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3740 pic = hevc->m_PIC[i];
3741 if (pic == NULL || pic->index == -1)
3742 continue;
3743 pic->POC = INVALID_POC;
3744 }
3745}
3746
3747static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3748 unsigned char flush_flag)
3749{
3750 int num_pic_not_yet_display = 0;
3751 int i;
3752 struct PIC_s *pic;
3753 struct PIC_s *pic_display = NULL;
3754 struct vdec_s *vdec = hw_to_vdec(hevc);
3755
3756 if (hevc->i_only & 0x4) {
3757 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3758 pic = hevc->m_PIC[i];
3759 if (pic == NULL ||
3760 (pic->index == -1) ||
3761 (pic->BUF_index == -1) ||
3762 (pic->POC == INVALID_POC))
3763 continue;
3764 if (pic->output_mark) {
3765 if (pic_display) {
3766 if (pic->decode_idx <
3767 pic_display->decode_idx)
3768 pic_display = pic;
3769
3770 } else
3771 pic_display = pic;
3772
3773 }
3774 }
3775 if (pic_display) {
3776 pic_display->output_mark = 0;
3777 pic_display->recon_mark = 0;
3778 pic_display->output_ready = 1;
3779 pic_display->referenced = 0;
3780 put_mv_buf(hevc, pic_display);
3781 }
3782 } else {
3783 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3784 pic = hevc->m_PIC[i];
3785 if (pic == NULL ||
3786 (pic->index == -1) ||
3787 (pic->BUF_index == -1) ||
3788 (pic->POC == INVALID_POC))
3789 continue;
3790 if (pic->output_mark)
3791 num_pic_not_yet_display++;
3792 if (pic->slice_type == 2 &&
3793 hevc->vf_pre_count == 0 &&
3794 fast_output_enable & 0x1) {
3795 /*fast output for first I picture*/
3796 pic->num_reorder_pic = 0;
3797 if (vdec->master || vdec->slave)
3798 pic_display = pic;
3799 hevc_print(hevc, 0, "VH265: output first frame\n");
3800 }
3801 }
3802
3803 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3804 pic = hevc->m_PIC[i];
3805 if (pic == NULL ||
3806 (pic->index == -1) ||
3807 (pic->BUF_index == -1) ||
3808 (pic->POC == INVALID_POC))
3809 continue;
3810 if (pic->output_mark) {
3811 if (pic_display) {
3812 if (pic->POC < pic_display->POC)
3813 pic_display = pic;
3814 else if ((pic->POC == pic_display->POC)
3815 && (pic->decode_idx <
3816 pic_display->
3817 decode_idx))
3818 pic_display
3819 = pic;
3820 } else
3821 pic_display = pic;
3822 }
3823 }
3824 if (pic_display) {
3825 if ((num_pic_not_yet_display >
3826 pic_display->num_reorder_pic)
3827 || flush_flag) {
3828 pic_display->output_mark = 0;
3829 pic_display->recon_mark = 0;
3830 pic_display->output_ready = 1;
3831 } else if (num_pic_not_yet_display >=
3832 (MAX_REF_PIC_NUM - 1)) {
3833 pic_display->output_mark = 0;
3834 pic_display->recon_mark = 0;
3835 pic_display->output_ready = 1;
3836 hevc_print(hevc, 0,
3837 "Warning, num_reorder_pic %d is byeond buf num\n",
3838 pic_display->num_reorder_pic);
3839 } else
3840 pic_display = NULL;
3841 }
3842 }
3843
3844 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3845 pic_display = NULL;
3846 hevc->first_pic_flag = 0;
3847 }
3848 return pic_display;
3849}
3850
3851static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3852{
3853 int i;
3854 struct PIC_s *pic;
3855
3856 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3857 hevc_print(hevc, 0,
3858 "config_mc_buffer entered .....\n");
3859 if (cur_pic->slice_type != 2) { /* P and B pic */
3860 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3861 (0 << 8) | (0 << 1) | 1);
3862 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3863 pic =
3864 get_ref_pic_by_POC(hevc,
3865 cur_pic->
3866 m_aiRefPOCList0[cur_pic->
3867 slice_idx][i]);
3868 if (pic) {
3869 if ((pic->width != hevc->pic_w) ||
3870 (pic->height != hevc->pic_h)) {
3871 hevc_print(hevc, 0,
3872 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3873 __func__, pic->POC,
3874 pic->width, pic->height);
3875 cur_pic->error_mark = 1;
3876 }
3877 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3878 cur_pic->error_mark = 1;
3879 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3880 (pic->mc_canvas_u_v << 16)
3881 | (pic->mc_canvas_u_v
3882 << 8) |
3883 pic->mc_canvas_y);
3884 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3885 hevc_print_cont(hevc, 0,
3886 "refid %x mc_canvas_u_v %x",
3887 i, pic->mc_canvas_u_v);
3888 hevc_print_cont(hevc, 0,
3889 " mc_canvas_y %x\n",
3890 pic->mc_canvas_y);
3891 }
3892 } else
3893 cur_pic->error_mark = 1;
3894
3895 if (pic == NULL || pic->error_mark) {
3896 hevc_print(hevc, 0,
3897 "Error %s, %dth poc (%d) %s",
3898 __func__, i,
3899 cur_pic->m_aiRefPOCList0[cur_pic->
3900 slice_idx][i],
3901 pic ? "has error" :
3902 "not in list0");
3903 }
3904 }
3905 }
3906 if (cur_pic->slice_type == 0) { /* B pic */
3907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3908 hevc_print(hevc, 0,
3909 "config_mc_buffer RefNum_L1\n");
3910 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3911 (16 << 8) | (0 << 1) | 1);
3912
3913 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3914 pic =
3915 get_ref_pic_by_POC(hevc,
3916 cur_pic->
3917 m_aiRefPOCList1[cur_pic->
3918 slice_idx][i]);
3919 if (pic) {
3920 if ((pic->width != hevc->pic_w) ||
3921 (pic->height != hevc->pic_h)) {
3922 hevc_print(hevc, 0,
3923 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3924 __func__, pic->POC,
3925 pic->width, pic->height);
3926 cur_pic->error_mark = 1;
3927 }
3928
3929 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3930 cur_pic->error_mark = 1;
3931 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3932 (pic->mc_canvas_u_v << 16)
3933 | (pic->mc_canvas_u_v
3934 << 8) |
3935 pic->mc_canvas_y);
3936 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3937 hevc_print_cont(hevc, 0,
3938 "refid %x mc_canvas_u_v %x",
3939 i, pic->mc_canvas_u_v);
3940 hevc_print_cont(hevc, 0,
3941 " mc_canvas_y %x\n",
3942 pic->mc_canvas_y);
3943 }
3944 } else
3945 cur_pic->error_mark = 1;
3946
3947 if (pic == NULL || pic->error_mark) {
3948 hevc_print(hevc, 0,
3949 "Error %s, %dth poc (%d) %s",
3950 __func__, i,
3951 cur_pic->m_aiRefPOCList1[cur_pic->
3952 slice_idx][i],
3953 pic ? "has error" :
3954 "not in list1");
3955 }
3956 }
3957 }
3958 return 0;
3959}
3960
3961static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3962 union param_u *params)
3963{
3964 int ii, i;
3965 int poc_tmp;
3966 struct PIC_s *pic;
3967 unsigned char is_referenced;
3968 /* hevc_print(hevc, 0,
3969 "%s cur_poc %d\n", __func__, cur_poc); */
3970 if (pic_list_debug & 0x2) {
3971 pr_err("cur poc %d\n", cur_poc);
3972 }
3973 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3974 pic = hevc->m_PIC[ii];
3975 if (pic == NULL ||
3976 pic->index == -1 ||
3977 pic->BUF_index == -1
3978 )
3979 continue;
3980
3981 if ((pic->referenced == 0 || pic->POC == cur_poc))
3982 continue;
3983 is_referenced = 0;
3984 for (i = 0; i < 16; i++) {
3985 int delt;
3986
3987 if (params->p.CUR_RPS[i] & 0x8000)
3988 break;
3989 delt =
3990 params->p.CUR_RPS[i] &
3991 ((1 << (RPS_USED_BIT - 1)) - 1);
3992 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3993 poc_tmp =
3994 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3995 delt);
3996 } else
3997 poc_tmp = cur_poc + delt;
3998 if (poc_tmp == pic->POC) {
3999 is_referenced = 1;
4000 /* hevc_print(hevc, 0, "i is %d\n", i); */
4001 break;
4002 }
4003 }
4004 if (is_referenced == 0) {
4005 pic->referenced = 0;
4006 put_mv_buf(hevc, pic);
4007 /* hevc_print(hevc, 0,
4008 "set poc %d reference to 0\n", pic->POC); */
4009 if (pic_list_debug & 0x2) {
4010 pr_err("set poc %d reference to 0\n", pic->POC);
4011 }
4012 }
4013 }
4014
4015}
4016
4017static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4018{
4019 struct PIC_s *pic = hevc->cur_pic;
4020 int i, rIdx;
4021 int num_neg = 0;
4022 int num_pos = 0;
4023 int total_num;
4024 int num_ref_idx_l0_active =
4025 (params->p.num_ref_idx_l0_active >
4026 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4027 params->p.num_ref_idx_l0_active;
4028 int num_ref_idx_l1_active =
4029 (params->p.num_ref_idx_l1_active >
4030 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4031 params->p.num_ref_idx_l1_active;
4032
4033 int RefPicSetStCurr0[16];
4034 int RefPicSetStCurr1[16];
4035
4036 for (i = 0; i < 16; i++) {
4037 RefPicSetStCurr0[i] = 0;
4038 RefPicSetStCurr1[i] = 0;
4039 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4040 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4041 }
4042 for (i = 0; i < 16; i++) {
4043 if (params->p.CUR_RPS[i] & 0x8000)
4044 break;
4045 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4046 int delt =
4047 params->p.CUR_RPS[i] &
4048 ((1 << (RPS_USED_BIT - 1)) - 1);
4049
4050 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4051 RefPicSetStCurr0[num_neg] =
4052 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4053 delt);
4054 /* hevc_print(hevc, 0,
4055 * "RefPicSetStCurr0 %x %x %x\n",
4056 * RefPicSetStCurr0[num_neg], pic->POC,
4057 * (0x800-(params[i]&0x7ff)));
4058 */
4059 num_neg++;
4060 } else {
4061 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4062 /* hevc_print(hevc, 0,
4063 * "RefPicSetStCurr1 %d\n",
4064 * RefPicSetStCurr1[num_pos]);
4065 */
4066 num_pos++;
4067 }
4068 }
4069 }
4070 total_num = num_neg + num_pos;
4071 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4072 hevc_print(hevc, 0,
4073 "%s: curpoc %d slice_type %d, total %d ",
4074 __func__, pic->POC, params->p.slice_type, total_num);
4075 hevc_print_cont(hevc, 0,
4076 "num_neg %d num_list0 %d num_list1 %d\n",
4077 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4078 }
4079
4080 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4081 hevc_print(hevc, 0,
4082 "HEVC Stream buf start ");
4083 hevc_print_cont(hevc, 0,
4084 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4085 READ_VREG(HEVC_STREAM_START_ADDR),
4086 READ_VREG(HEVC_STREAM_END_ADDR),
4087 READ_VREG(HEVC_STREAM_WR_PTR),
4088 READ_VREG(HEVC_STREAM_RD_PTR),
4089 READ_VREG(HEVC_STREAM_LEVEL),
4090 READ_VREG(HEVC_STREAM_FIFO_CTL),
4091 READ_VREG(HEVC_PARSER_INT_CONTROL));
4092 }
4093
4094 if (total_num > 0) {
4095 if (params->p.modification_flag & 0x1) {
4096 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4097 hevc_print(hevc, 0, "ref0 POC (modification):");
4098 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4099 int cIdx = params->p.modification_list[rIdx];
4100
4101 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4102 cIdx >=
4103 num_neg ? RefPicSetStCurr1[cIdx -
4104 num_neg] :
4105 RefPicSetStCurr0[cIdx];
4106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4107 hevc_print_cont(hevc, 0, "%d ",
4108 pic->m_aiRefPOCList0[pic->
4109 slice_idx]
4110 [rIdx]);
4111 }
4112 }
4113 } else {
4114 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4115 hevc_print(hevc, 0, "ref0 POC:");
4116 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4117 int cIdx = rIdx % total_num;
4118
4119 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4120 cIdx >=
4121 num_neg ? RefPicSetStCurr1[cIdx -
4122 num_neg] :
4123 RefPicSetStCurr0[cIdx];
4124 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4125 hevc_print_cont(hevc, 0, "%d ",
4126 pic->m_aiRefPOCList0[pic->
4127 slice_idx]
4128 [rIdx]);
4129 }
4130 }
4131 }
4132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4133 hevc_print_cont(hevc, 0, "\n");
4134 if (params->p.slice_type == B_SLICE) {
4135 if (params->p.modification_flag & 0x2) {
4136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4137 hevc_print(hevc, 0,
4138 "ref1 POC (modification):");
4139 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4140 rIdx++) {
4141 int cIdx;
4142
4143 if (params->p.modification_flag & 0x1) {
4144 cIdx =
4145 params->p.
4146 modification_list
4147 [num_ref_idx_l0_active +
4148 rIdx];
4149 } else {
4150 cIdx =
4151 params->p.
4152 modification_list[rIdx];
4153 }
4154 pic->m_aiRefPOCList1[pic->
4155 slice_idx][rIdx] =
4156 cIdx >=
4157 num_pos ?
4158 RefPicSetStCurr0[cIdx - num_pos]
4159 : RefPicSetStCurr1[cIdx];
4160 if (get_dbg_flag(hevc) &
4161 H265_DEBUG_BUFMGR) {
4162 hevc_print_cont(hevc, 0, "%d ",
4163 pic->
4164 m_aiRefPOCList1[pic->
4165 slice_idx]
4166 [rIdx]);
4167 }
4168 }
4169 } else {
4170 if (get_dbg_flag(hevc) &
4171 H265_DEBUG_BUFMGR)
4172 hevc_print(hevc, 0, "ref1 POC:");
4173 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4174 rIdx++) {
4175 int cIdx = rIdx % total_num;
4176
4177 pic->m_aiRefPOCList1[pic->
4178 slice_idx][rIdx] =
4179 cIdx >=
4180 num_pos ?
4181 RefPicSetStCurr0[cIdx -
4182 num_pos]
4183 : RefPicSetStCurr1[cIdx];
4184 if (get_dbg_flag(hevc) &
4185 H265_DEBUG_BUFMGR) {
4186 hevc_print_cont(hevc, 0, "%d ",
4187 pic->
4188 m_aiRefPOCList1[pic->
4189 slice_idx]
4190 [rIdx]);
4191 }
4192 }
4193 }
4194 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4195 hevc_print_cont(hevc, 0, "\n");
4196 }
4197 }
4198 /*set m_PIC */
4199 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4200 (params->p.slice_type == P_SLICE) ? 1 :
4201 (params->p.slice_type == B_SLICE) ? 0 : 3;
4202 pic->RefNum_L0 = num_ref_idx_l0_active;
4203 pic->RefNum_L1 = num_ref_idx_l1_active;
4204}
4205
4206static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4207 int pic_height_cu, int sao_mem_unit,
4208 union param_u *params)
4209{
4210 int i, j;
4211 int start_cu_x, start_cu_y;
4212 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4213 int sao_abv_size = sao_mem_unit * pic_width_cu;
4214#ifdef DETREFILL_ENABLE
4215 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4216 int tmpRefillLcuSize = 1 <<
4217 (params->p.log2_min_coding_block_size_minus3 +
4218 3 + params->p.log2_diff_max_min_coding_block_size);
4219 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4220 "%x, %x, %x, %x\n",
4221 params->p.slice_segment_address,
4222 params->p.bit_depth,
4223 params->p.tiles_enabled_flag,
4224 tmpRefillLcuSize);
4225 if (params->p.slice_segment_address == 0 &&
4226 params->p.bit_depth != 0 &&
4227 (params->p.tiles_enabled_flag & 1) &&
4228 tmpRefillLcuSize == 64)
4229 hevc->delrefill_check = 1;
4230 else
4231 hevc->delrefill_check = 0;
4232 }
4233#endif
4234
4235 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4236 if (params->p.tiles_enabled_flag & 1) {
4237 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4238 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4239
4240 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4241 || hevc->num_tile_row <= 0) {
4242 hevc->num_tile_row = 1;
4243 hevc_print(hevc, 0,
4244 "%s: num_tile_rows_minus1 (%d) error!!\n",
4245 __func__, params->p.num_tile_rows_minus1);
4246 }
4247 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4248 || hevc->num_tile_col <= 0) {
4249 hevc->num_tile_col = 1;
4250 hevc_print(hevc, 0,
4251 "%s: num_tile_columns_minus1 (%d) error!!\n",
4252 __func__, params->p.num_tile_columns_minus1);
4253 }
4254 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4255 hevc_print(hevc, 0,
4256 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4257 __func__, pic_width_cu, pic_height_cu);
4258 hevc_print_cont(hevc, 0,
4259 "num_tile_col %d num_tile_row %d:\n",
4260 hevc->num_tile_col, hevc->num_tile_row);
4261 }
4262
4263 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4264 int w = pic_width_cu / hevc->num_tile_col;
4265 int h = pic_height_cu / hevc->num_tile_row;
4266
4267 start_cu_y = 0;
4268 for (i = 0; i < hevc->num_tile_row; i++) {
4269 start_cu_x = 0;
4270 for (j = 0; j < hevc->num_tile_col; j++) {
4271 if (j == (hevc->num_tile_col - 1)) {
4272 hevc->m_tile[i][j].width =
4273 pic_width_cu -
4274 start_cu_x;
4275 } else
4276 hevc->m_tile[i][j].width = w;
4277 if (i == (hevc->num_tile_row - 1)) {
4278 hevc->m_tile[i][j].height =
4279 pic_height_cu -
4280 start_cu_y;
4281 } else
4282 hevc->m_tile[i][j].height = h;
4283 hevc->m_tile[i][j].start_cu_x
4284 = start_cu_x;
4285 hevc->m_tile[i][j].start_cu_y
4286 = start_cu_y;
4287 hevc->m_tile[i][j].sao_vb_start_addr =
4288 hevc->work_space_buf->sao_vb.
4289 buf_start + j * sao_vb_size;
4290 hevc->m_tile[i][j].sao_abv_start_addr =
4291 hevc->work_space_buf->sao_abv.
4292 buf_start + i * sao_abv_size;
4293 if (get_dbg_flag(hevc) &
4294 H265_DEBUG_BUFMGR) {
4295 hevc_print_cont(hevc, 0,
4296 "{y=%d, x=%d w %d h %d ",
4297 i, j, hevc->m_tile[i][j].width,
4298 hevc->m_tile[i][j].height);
4299 hevc_print_cont(hevc, 0,
4300 "start_x %d start_y %d ",
4301 hevc->m_tile[i][j].start_cu_x,
4302 hevc->m_tile[i][j].start_cu_y);
4303 hevc_print_cont(hevc, 0,
4304 "sao_vb_start 0x%x ",
4305 hevc->m_tile[i][j].
4306 sao_vb_start_addr);
4307 hevc_print_cont(hevc, 0,
4308 "sao_abv_start 0x%x}\n",
4309 hevc->m_tile[i][j].
4310 sao_abv_start_addr);
4311 }
4312 start_cu_x += hevc->m_tile[i][j].width;
4313
4314 }
4315 start_cu_y += hevc->m_tile[i][0].height;
4316 }
4317 } else {
4318 start_cu_y = 0;
4319 for (i = 0; i < hevc->num_tile_row; i++) {
4320 start_cu_x = 0;
4321 for (j = 0; j < hevc->num_tile_col; j++) {
4322 if (j == (hevc->num_tile_col - 1)) {
4323 hevc->m_tile[i][j].width =
4324 pic_width_cu -
4325 start_cu_x;
4326 } else {
4327 hevc->m_tile[i][j].width =
4328 params->p.tile_width[j];
4329 }
4330 if (i == (hevc->num_tile_row - 1)) {
4331 hevc->m_tile[i][j].height =
4332 pic_height_cu -
4333 start_cu_y;
4334 } else {
4335 hevc->m_tile[i][j].height =
4336 params->
4337 p.tile_height[i];
4338 }
4339 hevc->m_tile[i][j].start_cu_x
4340 = start_cu_x;
4341 hevc->m_tile[i][j].start_cu_y
4342 = start_cu_y;
4343 hevc->m_tile[i][j].sao_vb_start_addr =
4344 hevc->work_space_buf->sao_vb.
4345 buf_start + j * sao_vb_size;
4346 hevc->m_tile[i][j].sao_abv_start_addr =
4347 hevc->work_space_buf->sao_abv.
4348 buf_start + i * sao_abv_size;
4349 if (get_dbg_flag(hevc) &
4350 H265_DEBUG_BUFMGR) {
4351 hevc_print_cont(hevc, 0,
4352 "{y=%d, x=%d w %d h %d ",
4353 i, j, hevc->m_tile[i][j].width,
4354 hevc->m_tile[i][j].height);
4355 hevc_print_cont(hevc, 0,
4356 "start_x %d start_y %d ",
4357 hevc->m_tile[i][j].start_cu_x,
4358 hevc->m_tile[i][j].start_cu_y);
4359 hevc_print_cont(hevc, 0,
4360 "sao_vb_start 0x%x ",
4361 hevc->m_tile[i][j].
4362 sao_vb_start_addr);
4363 hevc_print_cont(hevc, 0,
4364 "sao_abv_start 0x%x}\n",
4365 hevc->m_tile[i][j].
4366 sao_abv_start_addr);
4367
4368 }
4369 start_cu_x += hevc->m_tile[i][j].width;
4370 }
4371 start_cu_y += hevc->m_tile[i][0].height;
4372 }
4373 }
4374 } else {
4375 hevc->num_tile_col = 1;
4376 hevc->num_tile_row = 1;
4377 hevc->m_tile[0][0].width = pic_width_cu;
4378 hevc->m_tile[0][0].height = pic_height_cu;
4379 hevc->m_tile[0][0].start_cu_x = 0;
4380 hevc->m_tile[0][0].start_cu_y = 0;
4381 hevc->m_tile[0][0].sao_vb_start_addr =
4382 hevc->work_space_buf->sao_vb.buf_start;
4383 hevc->m_tile[0][0].sao_abv_start_addr =
4384 hevc->work_space_buf->sao_abv.buf_start;
4385 }
4386}
4387
4388static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4389 int pic_width_lcu)
4390{
4391 int cu_x;
4392 int cu_y;
4393 int tile_x = 0;
4394 int tile_y = 0;
4395 int i;
4396
4397 if (pic_width_lcu == 0) {
4398 if (get_dbg_flag(hevc)) {
4399 hevc_print(hevc, 0,
4400 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4401 __func__, hevc->pic_w, hevc->pic_h);
4402 }
4403 return -1;
4404 }
4405 cu_x = cu_adr % pic_width_lcu;
4406 cu_y = cu_adr / pic_width_lcu;
4407 if (hevc->tile_enabled) {
4408 for (i = 0; i < hevc->num_tile_col; i++) {
4409 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4410 tile_x = i;
4411 else
4412 break;
4413 }
4414 for (i = 0; i < hevc->num_tile_row; i++) {
4415 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4416 tile_y = i;
4417 else
4418 break;
4419 }
4420 }
4421 return (tile_x) | (tile_y << 8);
4422}
4423
4424static void print_scratch_error(int error_num)
4425{
4426#if 0
4427 if (get_dbg_flag(hevc)) {
4428 hevc_print(hevc, 0,
4429 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4430 error_num);
4431 }
4432#endif
4433}
4434
4435static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4436{
4437 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4438
4439 if (get_dbg_flag(hevc))
4440 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4441 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4442 __func__,
4443 buf_spec->ipp.buf_start,
4444 buf_spec->start_adr,
4445 buf_spec->short_term_rps.buf_start,
4446 buf_spec->vps.buf_start,
4447 buf_spec->sps.buf_start,
4448 buf_spec->pps.buf_start,
4449 buf_spec->sao_up.buf_start,
4450 buf_spec->swap_buf.buf_start,
4451 buf_spec->swap_buf2.buf_start,
4452 buf_spec->scalelut.buf_start,
4453 buf_spec->dblk_para.buf_start,
4454 buf_spec->dblk_data.buf_start,
4455 buf_spec->dblk_data2.buf_start);
4456 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4457 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4458 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4459 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4460 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4461 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4462 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4463 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4464 if (hevc->mmu_enable) {
4465 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4466 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4467 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4468 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4469 } else
4470 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4471 } /*else
4472 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4473 buf_spec->swap_buf.buf_start);
4474 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4475 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4476 /* cfg_p_addr */
4477 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4478 /* cfg_d_addr */
4479 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4480
4481 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4482
4483 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4484}
4485
4486static void parser_cmd_write(void)
4487{
4488 u32 i;
4489 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4490 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4491 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4492 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4493 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4494 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4495 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4496 0x7C00
4497 };
4498 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4499 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4500}
4501
4502static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4503 int decode_pic_begin, int decode_pic_num)
4504{
4505 unsigned int data32;
4506 int i;
4507#if 0
4508 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4509 /* Set MCR fetch priorities*/
4510 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4511 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4512 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4513 }
4514#endif
4515#if 1
4516 /* m8baby test1902 */
4517 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4518 hevc_print(hevc, 0,
4519 "%s\n", __func__);
4520 data32 = READ_VREG(HEVC_PARSER_VERSION);
4521 if (data32 != 0x00010001) {
4522 print_scratch_error(25);
4523 return;
4524 }
4525 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4526 data32 = READ_VREG(HEVC_PARSER_VERSION);
4527 if (data32 != 0x5a5a55aa) {
4528 print_scratch_error(26);
4529 return;
4530 }
4531#if 0
4532 /* test Parser Reset */
4533 /* reset iqit to start mem init again */
4534 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4535 (1 << 3) /* reset_whole parser */
4536 );
4537 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4538 data32 = READ_VREG(HEVC_PARSER_VERSION);
4539 if (data32 != 0x00010001)
4540 hevc_print(hevc, 0,
4541 "Test Parser Fatal Error\n");
4542#endif
4543 /* reset iqit to start mem init again */
4544 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4545 );
4546 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4547 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4548
4549#endif
4550 if (!hevc->m_ins_flag) {
4551 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4552 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4553 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4554 data32 |= (0xf << 25); /*arwlen_axi_max*/
4555 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4556 }
4557 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4558 if (data32 != 0x00000100) {
4559 print_scratch_error(29);
4560 return;
4561 }
4562 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4563 if (data32 != 0x00000300) {
4564 print_scratch_error(30);
4565 return;
4566 }
4567 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4568 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4569 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4570 if (data32 != 0x12345678) {
4571 print_scratch_error(31);
4572 return;
4573 }
4574 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4575 if (data32 != 0x9abcdef0) {
4576 print_scratch_error(32);
4577 return;
4578 }
4579 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4580 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4581
4582 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4583 data32 &= 0x03ffffff;
4584 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4585 | /* stream_buffer_empty_int_amrisc_enable */
4586 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4587 (1 << 7) | /* dec_done_int_cpu_enable */
4588 (1 << 4) | /* startcode_found_int_cpu_enable */
4589 (0 << 3) | /* startcode_found_int_amrisc_enable */
4590 (1 << 0) /* parser_int_enable */
4591 ;
4592 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4593
4594 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4595 data32 = data32 | (1 << 1) | /* emulation_check_on */
4596 (1 << 0) /* startcode_check_on */
4597 ;
4598 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4599
4600 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4601 (2 << 4) | /* emulate_code_length_sub_1 */
4602 (2 << 1) | /* start_code_length_sub_1 */
4603 (1 << 0) /* stream_shift_enable */
4604 );
4605
4606 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4607 );
4608 /* hevc_parser_core_clk_en */
4609 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4610 );
4611
4612 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4613
4614 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4615 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4616 for (i = 0; i < 1024; i++)
4617 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4618
4619#ifdef ENABLE_SWAP_TEST
4620 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4621#endif
4622
4623 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4624 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4625 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4626 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4627 /* Send parser_cmd */
4628 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4629
4630 parser_cmd_write();
4631
4632 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4633 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4634 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4635
4636 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4637 /* (1 << 8) | // sao_sw_pred_enable */
4638 (1 << 5) | /* parser_sao_if_en */
4639 (1 << 2) | /* parser_mpred_if_en */
4640 (1 << 0) /* parser_scaler_if_en */
4641 );
4642
4643 /* Changed to Start MPRED in microcode */
4644 /*
4645 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4646 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4647 * (1<<31)
4648 * );
4649 */
4650
4651 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4652 (1 << 0) /* software reset ipp and mpp */
4653 );
4654 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4655 (0 << 0) /* software reset ipp and mpp */
4656 );
4657
4658 if (get_double_write_mode(hevc) & 0x10)
4659 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4660 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4661 );
4662
4663}
4664
4665static void decoder_hw_reset(void)
4666{
4667 int i;
4668 unsigned int data32;
4669 /* reset iqit to start mem init again */
4670 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4671 );
4672 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4673 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4674
4675 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4676 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4677 ;
4678 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4679
4680 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4681 if (data32 != 0x00000100) {
4682 print_scratch_error(29);
4683 return;
4684 }
4685 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4686 if (data32 != 0x00000300) {
4687 print_scratch_error(30);
4688 return;
4689 }
4690 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4691 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4692 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4693 if (data32 != 0x12345678) {
4694 print_scratch_error(31);
4695 return;
4696 }
4697 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4698 if (data32 != 0x9abcdef0) {
4699 print_scratch_error(32);
4700 return;
4701 }
4702 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4703 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4704
4705 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4706 data32 &= 0x03ffffff;
4707 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4708 | /* stream_buffer_empty_int_amrisc_enable */
4709 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4710 (1 << 7) | /* dec_done_int_cpu_enable */
4711 (1 << 4) | /* startcode_found_int_cpu_enable */
4712 (0 << 3) | /* startcode_found_int_amrisc_enable */
4713 (1 << 0) /* parser_int_enable */
4714 ;
4715 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4716
4717 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4718 data32 = data32 | (1 << 1) | /* emulation_check_on */
4719 (1 << 0) /* startcode_check_on */
4720 ;
4721 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4722
4723 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4724 (2 << 4) | /* emulate_code_length_sub_1 */
4725 (2 << 1) | /* start_code_length_sub_1 */
4726 (1 << 0) /* stream_shift_enable */
4727 );
4728
4729 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4730 );
4731 /* hevc_parser_core_clk_en */
4732 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4733 );
4734
4735 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4736 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4737 for (i = 0; i < 1024; i++)
4738 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4739
4740 /* Send parser_cmd */
4741 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4742
4743 parser_cmd_write();
4744
4745 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4746 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4747 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4748
4749 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4750 /* (1 << 8) | // sao_sw_pred_enable */
4751 (1 << 5) | /* parser_sao_if_en */
4752 (1 << 2) | /* parser_mpred_if_en */
4753 (1 << 0) /* parser_scaler_if_en */
4754 );
4755
4756 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4757 (1 << 0) /* software reset ipp and mpp */
4758 );
4759 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4760 (0 << 0) /* software reset ipp and mpp */
4761 );
4762}
4763
4764#ifdef CONFIG_HEVC_CLK_FORCED_ON
4765static void config_hevc_clk_forced_on(void)
4766{
4767 unsigned int rdata32;
4768 /* IQIT */
4769 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4770 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4771
4772 /* DBLK */
4773 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4774 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4775
4776 /* SAO */
4777 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4778 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4779
4780 /* MPRED */
4781 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4782 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4783
4784 /* PARSER */
4785 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4786 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4787 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4788 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4789 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4790 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4791 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4792 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4793 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4794 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4795 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4796 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4797 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4798
4799 /* IPP */
4800 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4801 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4802
4803 /* MCRCC */
4804 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4805 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4806}
4807#endif
4808
4809#ifdef MCRCC_ENABLE
4810static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4811{
4812 unsigned int rdata32;
4813 unsigned int rdata32_2;
4814 int l0_cnt = 0;
4815 int l1_cnt = 0x7fff;
4816
4817 if (get_double_write_mode(hevc) & 0x10) {
4818 l0_cnt = hevc->cur_pic->RefNum_L0;
4819 l1_cnt = hevc->cur_pic->RefNum_L1;
4820 }
4821
4822 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4823
4824 if (slice_type == 2) { /* I-PIC */
4825 /* remove reset -- disables clock */
4826 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4827 return;
4828 }
4829
4830 if (slice_type == 0) { /* B-PIC */
4831 /* Programme canvas0 */
4832 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4833 (0 << 8) | (0 << 1) | 0);
4834 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4835 rdata32 = rdata32 & 0xffff;
4836 rdata32 = rdata32 | (rdata32 << 16);
4837 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4838
4839 /* Programme canvas1 */
4840 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4841 (16 << 8) | (1 << 1) | 0);
4842 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4843 rdata32_2 = rdata32_2 & 0xffff;
4844 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4845 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4846 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4847 rdata32_2 = rdata32_2 & 0xffff;
4848 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4849 }
4850 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4851 } else { /* P-PIC */
4852 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4853 (0 << 8) | (1 << 1) | 0);
4854 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4855 rdata32 = rdata32 & 0xffff;
4856 rdata32 = rdata32 | (rdata32 << 16);
4857 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4858
4859 if (l0_cnt == 1) {
4860 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4861 } else {
4862 /* Programme canvas1 */
4863 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4864 rdata32 = rdata32 & 0xffff;
4865 rdata32 = rdata32 | (rdata32 << 16);
4866 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4867 }
4868 }
4869 /* enable mcrcc progressive-mode */
4870 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4871}
4872#endif
4873
4874static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4875 int sao_mem_unit)
4876{
4877 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4878 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4879 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4880 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4881}
4882
4883static u32 init_aux_size;
4884static int aux_data_is_avaible(struct hevc_state_s *hevc)
4885{
4886 u32 reg_val;
4887
4888 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4889 if (reg_val != 0 && reg_val != init_aux_size)
4890 return 1;
4891 else
4892 return 0;
4893}
4894
4895static void config_aux_buf(struct hevc_state_s *hevc)
4896{
4897 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4898 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4899 (hevc->suffix_aux_size >> 4);
4900 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4901}
4902
4903static void config_mpred_hw(struct hevc_state_s *hevc)
4904{
4905 int i;
4906 unsigned int data32;
4907 struct PIC_s *cur_pic = hevc->cur_pic;
4908 struct PIC_s *col_pic = hevc->col_pic;
4909 int AMVP_MAX_NUM_CANDS_MEM = 3;
4910 int AMVP_MAX_NUM_CANDS = 2;
4911 int NUM_CHROMA_MODE = 5;
4912 int DM_CHROMA_IDX = 36;
4913 int above_ptr_ctrl = 0;
4914 int buffer_linear = 1;
4915 int cu_size_log2 = 3;
4916
4917 int mpred_mv_rd_start_addr;
4918 int mpred_curr_lcu_x;
4919 int mpred_curr_lcu_y;
4920 int mpred_above_buf_start;
4921 int mpred_mv_rd_ptr;
4922 int mpred_mv_rd_ptr_p1;
4923 int mpred_mv_rd_end_addr;
4924 int MV_MEM_UNIT;
4925 int mpred_mv_wr_ptr;
4926 int *ref_poc_L0, *ref_poc_L1;
4927
4928 int above_en;
4929 int mv_wr_en;
4930 int mv_rd_en;
4931 int col_isIntra;
4932
4933 if (hevc->slice_type != 2) {
4934 above_en = 1;
4935 mv_wr_en = 1;
4936 mv_rd_en = 1;
4937 col_isIntra = 0;
4938 } else {
4939 above_en = 1;
4940 mv_wr_en = 1;
4941 mv_rd_en = 0;
4942 col_isIntra = 0;
4943 }
4944
4945 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4946 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4947 mpred_curr_lcu_x = data32 & 0xffff;
4948 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4949
4950 MV_MEM_UNIT =
4951 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4952 5 ? 0x80 : 0x20;
4953 mpred_mv_rd_ptr =
4954 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4955
4956 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4957 mpred_mv_rd_end_addr =
4958 mpred_mv_rd_start_addr +
4959 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4960
4961 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4962
4963 mpred_mv_wr_ptr =
4964 cur_pic->mpred_mv_wr_start_addr +
4965 (hevc->slice_addr * MV_MEM_UNIT);
4966
4967 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4968 hevc_print(hevc, 0,
4969 "cur pic index %d col pic index %d\n", cur_pic->index,
4970 col_pic->index);
4971 }
4972
4973 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4974 cur_pic->mpred_mv_wr_start_addr);
4975 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4976
4977 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4978 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4979 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4980
4981 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4982 data32 = (hevc->slice_type |
4983 hevc->new_pic << 2 |
4984 hevc->new_tile << 3 |
4985 hevc->isNextSliceSegment << 4 |
4986 hevc->TMVPFlag << 5 |
4987 hevc->LDCFlag << 6 |
4988 hevc->ColFromL0Flag << 7 |
4989 above_ptr_ctrl << 8 |
4990 above_en << 9 |
4991 mv_wr_en << 10 |
4992 mv_rd_en << 11 |
4993 col_isIntra << 12 |
4994 buffer_linear << 13 |
4995 hevc->LongTerm_Curr << 14 |
4996 hevc->LongTerm_Col << 15 |
4997 hevc->lcu_size_log2 << 16 |
4998 cu_size_log2 << 20 | hevc->plevel << 24);
4999 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5000
5001 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5002 data32 = (
5003#if 0
5004 /* no set in m8baby test1902 */
5005 /* Don't override clk_forced_on , */
5006 (data32 & (0x1 << 24)) |
5007#endif
5008 hevc->MaxNumMergeCand |
5009 AMVP_MAX_NUM_CANDS << 4 |
5010 AMVP_MAX_NUM_CANDS_MEM << 8 |
5011 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5012 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5013
5014 data32 = (hevc->pic_w | hevc->pic_h << 16);
5015 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5016
5017 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5018 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5019
5020 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5021 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5022
5023 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5024 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5025
5026 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5027 /* col_RefNum_L0<<16| */
5028 /* col_RefNum_L1<<24 */
5029 );
5030 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5031
5032 data32 = (hevc->LongTerm_Ref);
5033 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5034
5035 data32 = 0;
5036 for (i = 0; i < hevc->RefNum_L0; i++)
5037 data32 = data32 | (1 << i);
5038 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5039
5040 data32 = 0;
5041 for (i = 0; i < hevc->RefNum_L1; i++)
5042 data32 = data32 | (1 << i);
5043 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5044
5045 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5046 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5047
5048 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5049 * Ref_POC_xx_L1 in pair write order!!!
5050 */
5051 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5052 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5053
5054 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5055 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5056
5057 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5058 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5059
5060 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5061 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5062
5063 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5064 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5065
5066 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5067 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5068
5069 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5070 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5071
5072 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5073 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5074
5075 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5076 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5077
5078 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5079 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5080
5081 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5082 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5083
5084 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5085 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5086
5087 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5088 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5089
5090 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5091 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5092
5093 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5094 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5095
5096 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5097 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5098
5099 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5100 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5101
5102 if (hevc->new_pic) {
5103 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5104 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5105 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5106 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5107 } else if (!hevc->isNextSliceSegment) {
5108 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5109 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5110 }
5111
5112 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5113}
5114
5115static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5116{
5117 unsigned int data32, data32_2;
5118 int misc_flag0 = hevc->misc_flag0;
5119 int slice_deblocking_filter_disabled_flag = 0;
5120
5121 int mc_buffer_size_u_v =
5122 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5123 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5124 struct PIC_s *cur_pic = hevc->cur_pic;
5125 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5126
5127 data32 = READ_VREG(HEVC_SAO_CTRL0);
5128 data32 &= (~0xf);
5129 data32 |= hevc->lcu_size_log2;
5130 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5131
5132 data32 = (hevc->pic_w | hevc->pic_h << 16);
5133 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5134
5135 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5136 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5137
5138 if (hevc->new_pic)
5139 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5140#ifdef LOSLESS_COMPRESS_MODE
5141/*SUPPORT_10BIT*/
5142 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5143 data32 = READ_VREG(HEVC_SAO_CTRL5);
5144 data32 &= (~(0xff << 16));
5145
5146 if (get_double_write_mode(hevc) == 2 ||
5147 get_double_write_mode(hevc) == 3)
5148 data32 |= (0xff<<16);
5149 else if (get_double_write_mode(hevc) == 4)
5150 data32 |= (0x33<<16);
5151
5152 if (hevc->mem_saving_mode == 1)
5153 data32 |= (1 << 9);
5154 else
5155 data32 &= ~(1 << 9);
5156 if (workaround_enable & 1)
5157 data32 |= (1 << 7);
5158 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5159 }
5160 data32 = cur_pic->mc_y_adr;
5161 if (get_double_write_mode(hevc))
5162 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5163
5164 if ((get_double_write_mode(hevc) & 0x10) == 0)
5165 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5166
5167 if (hevc->mmu_enable)
5168 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5169#else
5170 data32 = cur_pic->mc_y_adr;
5171 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5172#endif
5173 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5174 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5175
5176#ifdef LOSLESS_COMPRESS_MODE
5177/*SUPPORT_10BIT*/
5178 if (get_double_write_mode(hevc))
5179 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5180#else
5181 data32 = cur_pic->mc_u_v_adr;
5182 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5183#endif
5184 data32 = (mc_buffer_size_u_v_h << 16);
5185 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5186
5187#ifdef LOSLESS_COMPRESS_MODE
5188/*SUPPORT_10BIT*/
5189 if (get_double_write_mode(hevc)) {
5190 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5191 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5192 }
5193#else
5194 /* multi tile to do... */
5195 data32 = cur_pic->mc_y_adr;
5196 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5197
5198 data32 = cur_pic->mc_u_v_adr;
5199 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5200#endif
5201 /* DBLK CONFIG HERE */
5202 if (hevc->new_pic) {
5203 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5204 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5205 data32 = (0xff << 8) | (0x0 << 0);
5206 else
5207 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5208 (0x0 << 0); /* h265 video format*/
5209
5210 if (hevc->pic_w >= 1280)
5211 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5212 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5213 if (get_double_write_mode(hevc) == 0)
5214 data32 |= (0x1 << 8); /*enable first write*/
5215 else if (get_double_write_mode(hevc) == 0x10)
5216 data32 |= (0x1 << 9); /*double write only*/
5217 else
5218 data32 |= ((0x1 << 8) |(0x1 << 9));
5219
5220 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5221 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5222 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5223 }
5224 data32 = (hevc->pic_w | hevc->pic_h << 16);
5225 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5226
5227 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5228 data32 =
5229 ((misc_flag0 >>
5230 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5231 0x1) << 3;
5232 } else
5233 data32 = 0;
5234 data32 |=
5235 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5236 ((params->p.pps_cr_qp_offset
5237 & 0x1f) <<
5238 9));
5239 data32 |=
5240 (hevc->lcu_size ==
5241 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5242
5243 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5244
5245 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5246 /*if (debug & 0x80) {*/
5247 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5248 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5249 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5250 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5251 data32);
5252 /*}*/
5253 }
5254 }
5255#if 0
5256 data32 = READ_VREG(HEVC_SAO_CTRL1);
5257 data32 &= (~0x3000);
5258 data32 |= (hevc->mem_map_mode <<
5259 12);
5260
5261/* [13:12] axi_aformat,
5262 * 0-Linear, 1-32x32, 2-64x32
5263 */
5264 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5265
5266 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5267 data32 &= (~0x30);
5268 data32 |= (hevc->mem_map_mode <<
5269 4);
5270
5271/* [5:4] -- address_format
5272 * 00:linear 01:32x32 10:64x32
5273 */
5274 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5275#else
5276 /* m8baby test1902 */
5277 data32 = READ_VREG(HEVC_SAO_CTRL1);
5278 data32 &= (~0x3000);
5279 data32 |= (hevc->mem_map_mode <<
5280 12);
5281
5282/* [13:12] axi_aformat, 0-Linear,
5283 * 1-32x32, 2-64x32
5284 */
5285 data32 &= (~0xff0);
5286 /* data32 |= 0x670; // Big-Endian per 64-bit */
5287 data32 |= endian; /* Big-Endian per 64-bit */
5288 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5289 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5290 if (get_double_write_mode(hevc) == 0)
5291 data32 |= 0x2; /*disable double write*/
5292 else if (get_double_write_mode(hevc) & 0x10)
5293 data32 |= 0x1; /*disable cm*/
5294 } else {
5295 unsigned int data;
5296 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5297 (0x0 << 0); /* h265 video format*/
5298 if (hevc->pic_w >= 1280)
5299 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5300 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5301 if (get_double_write_mode(hevc) == 0)
5302 data |= (0x1 << 8); /*enable first write*/
5303 else if (get_double_write_mode(hevc) & 0x10)
5304 data |= (0x1 << 9); /*double write only*/
5305 else
5306 data |= ((0x1 << 8) |(0x1 << 9));
5307
5308 WRITE_VREG(HEVC_DBLK_CFGB, data);
5309 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5310 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5311 }
5312
5313 /* swap uv */
5314 if (hevc->is_used_v4l) {
5315 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5316 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5317 data32 &= ~(1 << 8); /* NV21 */
5318 else
5319 data32 |= (1 << 8); /* NV12 */
5320 }
5321
5322 /*
5323 * [31:24] ar_fifo1_axi_thred
5324 * [23:16] ar_fifo0_axi_thred
5325 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5326 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5327 * [11:08] axi_lendian_C
5328 * [07:04] axi_lendian_Y
5329 * [3] reserved
5330 * [2] clk_forceon
5331 * [1] dw_disable:disable double write output
5332 * [0] cm_disable:disable compress output
5333 */
5334 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5335 if (get_double_write_mode(hevc) & 0x10) {
5336 /* [23:22] dw_v1_ctrl
5337 *[21:20] dw_v0_ctrl
5338 *[19:18] dw_h1_ctrl
5339 *[17:16] dw_h0_ctrl
5340 */
5341 data32 = READ_VREG(HEVC_SAO_CTRL5);
5342 /*set them all 0 for H265_NV21 (no down-scale)*/
5343 data32 &= ~(0xff << 16);
5344 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5345 }
5346
5347 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5348 data32 &= (~0x30);
5349 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5350 data32 |= (hevc->mem_map_mode <<
5351 4);
5352 data32 &= (~0xF);
5353 data32 |= 0xf; /* valid only when double write only */
5354 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5355
5356 /* swap uv */
5357 if (hevc->is_used_v4l) {
5358 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5359 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5360 data32 |= (1 << 12); /* NV21 */
5361 else
5362 data32 &= ~(1 << 12); /* NV12 */
5363 }
5364
5365 /*
5366 * [3:0] little_endian
5367 * [5:4] address_format 00:linear 01:32x32 10:64x32
5368 * [7:6] reserved
5369 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5370 * [11:10] reserved
5371 * [12] CbCr_byte_swap
5372 * [31:13] reserved
5373 */
5374 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5375#endif
5376 data32 = 0;
5377 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5378 data32_2 &= (~0x300);
5379 /* slice_deblocking_filter_disabled_flag = 0;
5380 * ucode has handle it , so read it from ucode directly
5381 */
5382 if (hevc->tile_enabled) {
5383 data32 |=
5384 ((misc_flag0 >>
5385 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5386 0x1) << 0;
5387 data32_2 |=
5388 ((misc_flag0 >>
5389 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5390 0x1) << 8;
5391 }
5392 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5393 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5394 0x1; /* ucode has handle it,so read it from ucode directly */
5395 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5396 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5397 /* slice_deblocking_filter_disabled_flag =
5398 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5399 * //ucode has handle it , so read it from ucode directly
5400 */
5401 data32 |= slice_deblocking_filter_disabled_flag << 2;
5402 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5403 hevc_print_cont(hevc, 0,
5404 "(1,%x)", data32);
5405 if (!slice_deblocking_filter_disabled_flag) {
5406 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5407 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5408 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5409 hevc_print_cont(hevc, 0,
5410 "(2,%x)", data32);
5411 }
5412 } else {
5413 data32 |=
5414 ((misc_flag0 >>
5415 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5416 0x1) << 2;
5417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5418 hevc_print_cont(hevc, 0,
5419 "(3,%x)", data32);
5420 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5421 0x1) == 0) {
5422 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5423 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5424 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5425 hevc_print_cont(hevc, 0,
5426 "(4,%x)", data32);
5427 }
5428 }
5429 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5430 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5431 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5432 || (!slice_deblocking_filter_disabled_flag))) {
5433 data32 |=
5434 ((misc_flag0 >>
5435 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5436 & 0x1) << 1;
5437 data32_2 |=
5438 ((misc_flag0 >>
5439 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5440 & 0x1) << 9;
5441 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5442 hevc_print_cont(hevc, 0,
5443 "(5,%x)\n", data32);
5444 } else {
5445 data32 |=
5446 ((misc_flag0 >>
5447 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5448 & 0x1) << 1;
5449 data32_2 |=
5450 ((misc_flag0 >>
5451 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5452 & 0x1) << 9;
5453 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5454 hevc_print_cont(hevc, 0,
5455 "(6,%x)\n", data32);
5456 }
5457 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5458 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5459}
5460
5461#ifdef TEST_NO_BUF
5462static unsigned char test_flag = 1;
5463#endif
5464
5465static void pic_list_process(struct hevc_state_s *hevc)
5466{
5467 int work_pic_num = get_work_pic_num(hevc);
5468 int alloc_pic_count = 0;
5469 int i;
5470 struct PIC_s *pic;
5471 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5472 pic = hevc->m_PIC[i];
5473 if (pic == NULL || pic->index == -1)
5474 continue;
5475 alloc_pic_count++;
5476 if (pic->output_mark == 0 && pic->referenced == 0
5477 && pic->output_ready == 0
5478 && (pic->width != hevc->pic_w ||
5479 pic->height != hevc->pic_h)
5480 ) {
5481 set_buf_unused(hevc, pic->BUF_index);
5482 pic->BUF_index = -1;
5483 if (alloc_pic_count > work_pic_num) {
5484 pic->width = 0;
5485 pic->height = 0;
5486 pic->index = -1;
5487 } else {
5488 pic->width = hevc->pic_w;
5489 pic->height = hevc->pic_h;
5490 }
5491 }
5492 }
5493 if (alloc_pic_count < work_pic_num) {
5494 int new_count = alloc_pic_count;
5495 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5496 pic = hevc->m_PIC[i];
5497 if (pic && pic->index == -1) {
5498 pic->index = i;
5499 pic->BUF_index = -1;
5500 pic->width = hevc->pic_w;
5501 pic->height = hevc->pic_h;
5502 new_count++;
5503 if (new_count >=
5504 work_pic_num)
5505 break;
5506 }
5507 }
5508
5509 }
5510 dealloc_unused_buf(hevc);
5511 if (get_alloc_pic_count(hevc)
5512 != alloc_pic_count) {
5513 hevc_print_cont(hevc, 0,
5514 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5515 __func__,
5516 work_pic_num,
5517 alloc_pic_count,
5518 get_alloc_pic_count(hevc));
5519 }
5520}
5521
5522static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5523{
5524 int i;
5525 struct PIC_s *pic;
5526 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5527 pic = hevc->m_PIC[i];
5528 if (pic == NULL || pic->index == -1)
5529 continue;
5530
5531 if (pic->output_mark == 0 && pic->referenced == 0
5532 && pic->output_ready == 0
5533 && pic->scatter_alloc
5534 )
5535 release_pic_mmu_buf(hevc, pic);
5536 }
5537
5538}
5539
5540static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5541 union param_u *rpm_param)
5542{
5543 struct PIC_s *new_pic = NULL;
5544 struct PIC_s *pic;
5545 int i;
5546 int ret;
5547
5548 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5549 pic = hevc->m_PIC[i];
5550 if (pic == NULL || pic->index == -1)
5551 continue;
5552
5553 if (pic->output_mark == 0 && pic->referenced == 0
5554 && pic->output_ready == 0
5555 && pic->width == hevc->pic_w
5556 && pic->height == hevc->pic_h
5557 ) {
5558 if (new_pic) {
5559 if (new_pic->POC != INVALID_POC) {
5560 if (pic->POC == INVALID_POC ||
5561 pic->POC < new_pic->POC)
5562 new_pic = pic;
5563 }
5564 } else
5565 new_pic = pic;
5566 }
5567 }
5568
5569 if (new_pic == NULL)
5570 return NULL;
5571
5572 if (new_pic->BUF_index < 0) {
5573 if (alloc_buf(hevc) < 0)
5574 return NULL;
5575 else {
5576 if (config_pic(hevc, new_pic) < 0) {
5577 dealloc_pic_buf(hevc, new_pic);
5578 return NULL;
5579 }
5580 }
5581 new_pic->width = hevc->pic_w;
5582 new_pic->height = hevc->pic_h;
5583 set_canvas(hevc, new_pic);
5584
5585 init_pic_list_hw(hevc);
5586 }
5587
5588 if (new_pic) {
5589 new_pic->double_write_mode =
5590 get_double_write_mode(hevc);
5591 if (new_pic->double_write_mode)
5592 set_canvas(hevc, new_pic);
5593
5594#ifdef TEST_NO_BUF
5595 if (test_flag) {
5596 test_flag = 0;
5597 return NULL;
5598 } else
5599 test_flag = 1;
5600#endif
5601 if (get_mv_buf(hevc, new_pic) < 0)
5602 return NULL;
5603
5604 if (hevc->mmu_enable) {
5605 ret = H265_alloc_mmu(hevc, new_pic,
5606 rpm_param->p.bit_depth,
5607 hevc->frame_mmu_map_addr);
5608 if (ret != 0) {
5609 put_mv_buf(hevc, new_pic);
5610 hevc_print(hevc, 0,
5611 "can't alloc need mmu1,idx %d ret =%d\n",
5612 new_pic->decode_idx,
5613 ret);
5614 return NULL;
5615 }
5616 }
5617 new_pic->referenced = 1;
5618 new_pic->decode_idx = hevc->decode_idx;
5619 new_pic->slice_idx = 0;
5620 new_pic->referenced = 1;
5621 new_pic->output_mark = 0;
5622 new_pic->recon_mark = 0;
5623 new_pic->error_mark = 0;
5624 new_pic->dis_mark = 0;
5625 /* new_pic->output_ready = 0; */
5626 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5627 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5628 new_pic->POC = hevc->curr_POC;
5629 new_pic->pic_struct = hevc->curr_pic_struct;
5630 if (new_pic->aux_data_buf)
5631 release_aux_data(hevc, new_pic);
5632 new_pic->mem_saving_mode =
5633 hevc->mem_saving_mode;
5634 new_pic->bit_depth_luma =
5635 hevc->bit_depth_luma;
5636 new_pic->bit_depth_chroma =
5637 hevc->bit_depth_chroma;
5638 new_pic->video_signal_type =
5639 hevc->video_signal_type;
5640
5641 new_pic->conformance_window_flag =
5642 hevc->param.p.conformance_window_flag;
5643 new_pic->conf_win_left_offset =
5644 hevc->param.p.conf_win_left_offset;
5645 new_pic->conf_win_right_offset =
5646 hevc->param.p.conf_win_right_offset;
5647 new_pic->conf_win_top_offset =
5648 hevc->param.p.conf_win_top_offset;
5649 new_pic->conf_win_bottom_offset =
5650 hevc->param.p.conf_win_bottom_offset;
5651 new_pic->chroma_format_idc =
5652 hevc->param.p.chroma_format_idc;
5653
5654 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5655 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5656 __func__, new_pic->index,
5657 new_pic->BUF_index, new_pic->decode_idx,
5658 new_pic->POC);
5659
5660 }
5661 if (pic_list_debug & 0x1) {
5662 dump_pic_list(hevc);
5663 pr_err("\n*******************************************\n");
5664 }
5665
5666 return new_pic;
5667}
5668
5669static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5670 union param_u *rpm_param)
5671{
5672 int ret;
5673 int used_buf_num = get_work_pic_num(hevc);
5674 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5675 struct PIC_s *new_pic = NULL;
5676 struct PIC_s *pic = NULL;
5677 int i;
5678
5679 for (i = 0; i < used_buf_num; ++i) {
5680 struct v4l_buff_pool *pool = &v4l->cap_pool;
5681 u32 state = (pool->seq[i] >> 16);
5682 u32 index = (pool->seq[i] & 0xffff);
5683
5684 switch (state) {
5685 case V4L_CAP_BUFF_IN_DEC:
5686 pic = hevc->m_PIC[i];
5687 if (pic && (pic->index != -1) &&
5688 (pic->output_mark == 0) &&
5689 (pic->referenced == 0) &&
5690 (pic->output_ready == 0) &&
5691 (pic->width == hevc->pic_w) &&
5692 (pic->height == hevc->pic_h) &&
5693 pic->cma_alloc_addr) {
5694 new_pic = pic;
5695 }
5696 break;
5697 case V4L_CAP_BUFF_IN_M2M:
5698 pic = hevc->m_PIC[index];
5699 pic->width = hevc->pic_w;
5700 pic->height = hevc->pic_h;
5701 if ((pic->index != -1) &&
5702 !v4l_alloc_buf(hevc, pic)) {
5703 v4l_config_pic(hevc, pic);
5704 init_pic_list_hw(hevc);
5705 new_pic = pic;
5706 }
5707 break;
5708 default:
5709 pr_err("v4l buffer state err %d.\n", state);
5710 break;
5711 }
5712
5713 if (new_pic)
5714 break;
5715 }
5716
5717 if (new_pic == NULL)
5718 return NULL;
5719
5720 new_pic->double_write_mode = get_double_write_mode(hevc);
5721 if (new_pic->double_write_mode)
5722 set_canvas(hevc, new_pic);
5723
5724 if (get_mv_buf(hevc, new_pic) < 0)
5725 return NULL;
5726
5727 if (hevc->mmu_enable) {
5728 ret = H265_alloc_mmu(hevc, new_pic,
5729 rpm_param->p.bit_depth,
5730 hevc->frame_mmu_map_addr);
5731 if (ret != 0) {
5732 put_mv_buf(hevc, new_pic);
5733 hevc_print(hevc, 0,
5734 "can't alloc need mmu1,idx %d ret =%d\n",
5735 new_pic->decode_idx, ret);
5736 return NULL;
5737 }
5738 }
5739
5740 new_pic->referenced = 1;
5741 new_pic->decode_idx = hevc->decode_idx;
5742 new_pic->slice_idx = 0;
5743 new_pic->referenced = 1;
5744 new_pic->output_mark = 0;
5745 new_pic->recon_mark = 0;
5746 new_pic->error_mark = 0;
5747 new_pic->dis_mark = 0;
5748 /* new_pic->output_ready = 0; */
5749 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5750 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5751 new_pic->POC = hevc->curr_POC;
5752 new_pic->pic_struct = hevc->curr_pic_struct;
5753
5754 if (new_pic->aux_data_buf)
5755 release_aux_data(hevc, new_pic);
5756 new_pic->mem_saving_mode =
5757 hevc->mem_saving_mode;
5758 new_pic->bit_depth_luma =
5759 hevc->bit_depth_luma;
5760 new_pic->bit_depth_chroma =
5761 hevc->bit_depth_chroma;
5762 new_pic->video_signal_type =
5763 hevc->video_signal_type;
5764
5765 new_pic->conformance_window_flag =
5766 hevc->param.p.conformance_window_flag;
5767 new_pic->conf_win_left_offset =
5768 hevc->param.p.conf_win_left_offset;
5769 new_pic->conf_win_right_offset =
5770 hevc->param.p.conf_win_right_offset;
5771 new_pic->conf_win_top_offset =
5772 hevc->param.p.conf_win_top_offset;
5773 new_pic->conf_win_bottom_offset =
5774 hevc->param.p.conf_win_bottom_offset;
5775 new_pic->chroma_format_idc =
5776 hevc->param.p.chroma_format_idc;
5777
5778 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5779 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5780 __func__, new_pic->index,
5781 new_pic->BUF_index, new_pic->decode_idx,
5782 new_pic->POC);
5783
5784 return new_pic;
5785}
5786
5787static int get_display_pic_num(struct hevc_state_s *hevc)
5788{
5789 int i;
5790 struct PIC_s *pic;
5791 int num = 0;
5792
5793 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5794 pic = hevc->m_PIC[i];
5795 if (pic == NULL ||
5796 pic->index == -1)
5797 continue;
5798
5799 if (pic->output_ready == 1)
5800 num++;
5801 }
5802 return num;
5803}
5804
5805static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5806{
5807 struct PIC_s *pic_display;
5808
5809 if (pic) {
5810 /*PB skip control */
5811 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5812 /* start decoding after first I */
5813 hevc->ignore_bufmgr_error |= 0x1;
5814 }
5815 if (hevc->ignore_bufmgr_error & 1) {
5816 if (hevc->PB_skip_count_after_decoding > 0)
5817 hevc->PB_skip_count_after_decoding--;
5818 else {
5819 /* start displaying */
5820 hevc->ignore_bufmgr_error |= 0x2;
5821 }
5822 }
5823 /**/
5824 if (pic->POC != INVALID_POC) {
5825 pic->output_mark = 1;
5826 pic->recon_mark = 1;
5827 }
5828 pic->recon_mark = 1;
5829 }
5830 do {
5831 pic_display = output_pic(hevc, 1);
5832
5833 if (pic_display) {
5834 pic_display->referenced = 0;
5835 put_mv_buf(hevc, pic_display);
5836 if ((pic_display->error_mark
5837 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5838 || (get_dbg_flag(hevc) &
5839 H265_DEBUG_DISPLAY_CUR_FRAME)
5840 || (get_dbg_flag(hevc) &
5841 H265_DEBUG_NO_DISPLAY)) {
5842 pic_display->output_ready = 0;
5843 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5844 hevc_print(hevc, 0,
5845 "[BM] Display: POC %d, ",
5846 pic_display->POC);
5847 hevc_print_cont(hevc, 0,
5848 "decoding index %d ==> ",
5849 pic_display->decode_idx);
5850 hevc_print_cont(hevc, 0,
5851 "Debug mode or error, recycle it\n");
5852 }
5853 } else {
5854 if (hevc->i_only & 0x1
5855 && pic_display->slice_type != 2) {
5856 pic_display->output_ready = 0;
5857 } else {
5858 prepare_display_buf(hevc, pic_display);
5859 if (get_dbg_flag(hevc)
5860 & H265_DEBUG_BUFMGR) {
5861 hevc_print(hevc, 0,
5862 "[BM] flush Display: POC %d, ",
5863 pic_display->POC);
5864 hevc_print_cont(hevc, 0,
5865 "decoding index %d\n",
5866 pic_display->decode_idx);
5867 }
5868 }
5869 }
5870 }
5871 } while (pic_display);
5872 clear_referenced_flag(hevc);
5873}
5874
5875/*
5876* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5877*/
5878static void set_aux_data(struct hevc_state_s *hevc,
5879 struct PIC_s *pic, unsigned char suffix_flag,
5880 unsigned char dv_meta_flag)
5881{
5882 int i;
5883 unsigned short *aux_adr;
5884 unsigned int size_reg_val =
5885 READ_VREG(HEVC_AUX_DATA_SIZE);
5886 unsigned int aux_count = 0;
5887 int aux_size = 0;
5888 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5889 return;
5890
5891 if (hevc->aux_data_dirty ||
5892 hevc->m_ins_flag == 0) {
5893
5894 hevc->aux_data_dirty = 0;
5895 }
5896
5897 if (suffix_flag) {
5898 aux_adr = (unsigned short *)
5899 (hevc->aux_addr +
5900 hevc->prefix_aux_size);
5901 aux_count =
5902 ((size_reg_val & 0xffff) << 4)
5903 >> 1;
5904 aux_size =
5905 hevc->suffix_aux_size;
5906 } else {
5907 aux_adr =
5908 (unsigned short *)hevc->aux_addr;
5909 aux_count =
5910 ((size_reg_val >> 16) << 4)
5911 >> 1;
5912 aux_size =
5913 hevc->prefix_aux_size;
5914 }
5915 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5916 hevc_print(hevc, 0,
5917 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5918 __func__, pic, pic->aux_data_size,
5919 aux_count, suffix_flag, dv_meta_flag);
5920 }
5921 if (aux_size > 0 && aux_count > 0) {
5922 int heads_size = 0;
5923 int new_size;
5924 char *new_buf;
5925
5926 for (i = 0; i < aux_count; i++) {
5927 unsigned char tag = aux_adr[i] >> 8;
5928 if (tag != 0 && tag != 0xff) {
5929 if (dv_meta_flag == 0)
5930 heads_size += 8;
5931 else if (dv_meta_flag == 1 && tag == 0x1)
5932 heads_size += 8;
5933 else if (dv_meta_flag == 2 && tag != 0x1)
5934 heads_size += 8;
5935 }
5936 }
5937 new_size = pic->aux_data_size + aux_count + heads_size;
5938 new_buf = vmalloc(new_size);
5939 if (new_buf) {
5940 unsigned char valid_tag = 0;
5941 unsigned char *h =
5942 new_buf +
5943 pic->aux_data_size;
5944 unsigned char *p = h + 8;
5945 int len = 0;
5946 int padding_len = 0;
5947 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5948 if (pic->aux_data_buf)
5949 vfree(pic->aux_data_buf);
5950 pic->aux_data_buf = new_buf;
5951 for (i = 0; i < aux_count; i += 4) {
5952 int ii;
5953 unsigned char tag = aux_adr[i + 3] >> 8;
5954 if (tag != 0 && tag != 0xff) {
5955 if (dv_meta_flag == 0)
5956 valid_tag = 1;
5957 else if (dv_meta_flag == 1
5958 && tag == 0x1)
5959 valid_tag = 1;
5960 else if (dv_meta_flag == 2
5961 && tag != 0x1)
5962 valid_tag = 1;
5963 else
5964 valid_tag = 0;
5965 if (valid_tag && len > 0) {
5966 pic->aux_data_size +=
5967 (len + 8);
5968 h[0] = (len >> 24)
5969 & 0xff;
5970 h[1] = (len >> 16)
5971 & 0xff;
5972 h[2] = (len >> 8)
5973 & 0xff;
5974 h[3] = (len >> 0)
5975 & 0xff;
5976 h[6] =
5977 (padding_len >> 8)
5978 & 0xff;
5979 h[7] = (padding_len)
5980 & 0xff;
5981 h += (len + 8);
5982 p += 8;
5983 len = 0;
5984 padding_len = 0;
5985 }
5986 if (valid_tag) {
5987 h[4] = tag;
5988 h[5] = 0;
5989 h[6] = 0;
5990 h[7] = 0;
5991 }
5992 }
5993 if (valid_tag) {
5994 for (ii = 0; ii < 4; ii++) {
5995 unsigned short aa =
5996 aux_adr[i + 3
5997 - ii];
5998 *p = aa & 0xff;
5999 p++;
6000 len++;
6001 /*if ((aa >> 8) == 0xff)
6002 padding_len++;*/
6003 }
6004 }
6005 }
6006 if (len > 0) {
6007 pic->aux_data_size += (len + 8);
6008 h[0] = (len >> 24) & 0xff;
6009 h[1] = (len >> 16) & 0xff;
6010 h[2] = (len >> 8) & 0xff;
6011 h[3] = (len >> 0) & 0xff;
6012 h[6] = (padding_len >> 8) & 0xff;
6013 h[7] = (padding_len) & 0xff;
6014 }
6015 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6016 hevc_print(hevc, 0,
6017 "aux: (size %d) suffix_flag %d\n",
6018 pic->aux_data_size, suffix_flag);
6019 for (i = 0; i < pic->aux_data_size; i++) {
6020 hevc_print_cont(hevc, 0,
6021 "%02x ", pic->aux_data_buf[i]);
6022 if (((i + 1) & 0xf) == 0)
6023 hevc_print_cont(hevc, 0, "\n");
6024 }
6025 hevc_print_cont(hevc, 0, "\n");
6026 }
6027
6028 } else {
6029 hevc_print(hevc, 0, "new buf alloc failed\n");
6030 if (pic->aux_data_buf)
6031 vfree(pic->aux_data_buf);
6032 pic->aux_data_buf = NULL;
6033 pic->aux_data_size = 0;
6034 }
6035 }
6036
6037}
6038
6039static void release_aux_data(struct hevc_state_s *hevc,
6040 struct PIC_s *pic)
6041{
6042 if (pic->aux_data_buf)
6043 vfree(pic->aux_data_buf);
6044 pic->aux_data_buf = NULL;
6045 pic->aux_data_size = 0;
6046}
6047
6048static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6049 struct PIC_s *pic)
6050{
6051
6052 /* prev pic */
6053 /*if (hevc->curr_POC != 0) {*/
6054 int decoded_poc = hevc->iPrevPOC;
6055#ifdef MULTI_INSTANCE_SUPPORT
6056 if (hevc->m_ins_flag) {
6057 decoded_poc = hevc->decoded_poc;
6058 hevc->decoded_poc = INVALID_POC;
6059 }
6060#endif
6061 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6062 && hevc->m_nalUnitType !=
6063 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6064 struct PIC_s *pic_display;
6065
6066 pic = get_pic_by_POC(hevc, decoded_poc);
6067 if (pic && (pic->POC != INVALID_POC)) {
6068 /*PB skip control */
6069 if (pic->error_mark == 0
6070 && hevc->PB_skip_mode == 1) {
6071 /* start decoding after
6072 * first I
6073 */
6074 hevc->ignore_bufmgr_error |= 0x1;
6075 }
6076 if (hevc->ignore_bufmgr_error & 1) {
6077 if (hevc->PB_skip_count_after_decoding > 0) {
6078 hevc->PB_skip_count_after_decoding--;
6079 } else {
6080 /* start displaying */
6081 hevc->ignore_bufmgr_error |= 0x2;
6082 }
6083 }
6084 if (hevc->mmu_enable
6085 && ((hevc->double_write_mode & 0x10) == 0)) {
6086 if (!hevc->m_ins_flag) {
6087 hevc->used_4k_num =
6088 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6089
6090 if ((!is_skip_decoding(hevc, pic)) &&
6091 (hevc->used_4k_num >= 0) &&
6092 (hevc->cur_pic->scatter_alloc
6093 == 1)) {
6094 hevc_print(hevc,
6095 H265_DEBUG_BUFMGR_MORE,
6096 "%s pic index %d scatter_alloc %d page_start %d\n",
6097 "decoder_mmu_box_free_idx_tail",
6098 hevc->cur_pic->index,
6099 hevc->cur_pic->scatter_alloc,
6100 hevc->used_4k_num);
6101 hevc_mmu_dma_check(hw_to_vdec(hevc));
6102 decoder_mmu_box_free_idx_tail(
6103 hevc->mmu_box,
6104 hevc->cur_pic->index,
6105 hevc->used_4k_num);
6106 hevc->cur_pic->scatter_alloc
6107 = 2;
6108 }
6109 hevc->used_4k_num = -1;
6110 }
6111 }
6112
6113 pic->output_mark = 1;
6114 pic->recon_mark = 1;
6115 pic->dis_mark = 1;
6116 }
6117 do {
6118 pic_display = output_pic(hevc, 0);
6119
6120 if (pic_display) {
6121 if ((pic_display->error_mark &&
6122 ((hevc->ignore_bufmgr_error &
6123 0x2) == 0))
6124 || (get_dbg_flag(hevc) &
6125 H265_DEBUG_DISPLAY_CUR_FRAME)
6126 || (get_dbg_flag(hevc) &
6127 H265_DEBUG_NO_DISPLAY)) {
6128 pic_display->output_ready = 0;
6129 if (get_dbg_flag(hevc) &
6130 H265_DEBUG_BUFMGR) {
6131 hevc_print(hevc, 0,
6132 "[BM] Display: POC %d, ",
6133 pic_display->POC);
6134 hevc_print_cont(hevc, 0,
6135 "decoding index %d ==> ",
6136 pic_display->
6137 decode_idx);
6138 hevc_print_cont(hevc, 0,
6139 "Debug or err,recycle it\n");
6140 }
6141 } else {
6142 if (hevc->i_only & 0x1
6143 && pic_display->
6144 slice_type != 2) {
6145 pic_display->output_ready = 0;
6146 } else {
6147 prepare_display_buf
6148 (hevc,
6149 pic_display);
6150 if (get_dbg_flag(hevc) &
6151 H265_DEBUG_BUFMGR) {
6152 hevc_print(hevc, 0,
6153 "[BM] Display: POC %d, ",
6154 pic_display->POC);
6155 hevc_print_cont(hevc, 0,
6156 "decoding index %d\n",
6157 pic_display->
6158 decode_idx);
6159 }
6160 }
6161 }
6162 }
6163 } while (pic_display);
6164 } else {
6165 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6166 hevc_print(hevc, 0,
6167 "[BM] current pic is IDR, ");
6168 hevc_print(hevc, 0,
6169 "clear referenced flag of all buffers\n");
6170 }
6171 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6172 dump_pic_list(hevc);
6173 pic = get_pic_by_POC(hevc, decoded_poc);
6174 flush_output(hevc, pic);
6175 }
6176
6177}
6178
6179static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6180 int decoded_lcu)
6181{
6182 int current_lcu_idx = decoded_lcu;
6183 if (decoded_lcu < 0)
6184 return;
6185
6186 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6187 hevc_print(hevc, 0,
6188 "cur lcu idx = %d, (total %d)\n",
6189 current_lcu_idx, hevc->lcu_total);
6190 }
6191 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6192 if (hevc->first_pic_after_recover) {
6193 if (current_lcu_idx !=
6194 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6195 hevc->cur_pic->error_mark = 1;
6196 } else {
6197 if (hevc->lcu_x_num_pre != 0
6198 && hevc->lcu_y_num_pre != 0
6199 && current_lcu_idx != 0
6200 && current_lcu_idx <
6201 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6202 hevc->cur_pic->error_mark = 1;
6203 }
6204 if (hevc->cur_pic->error_mark) {
6205 hevc_print(hevc, 0,
6206 "cur lcu idx = %d, (total %d), set error_mark\n",
6207 current_lcu_idx,
6208 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6209 if (is_log_enable(hevc))
6210 add_log(hevc,
6211 "cur lcu idx = %d, (total %d), set error_mark",
6212 current_lcu_idx,
6213 hevc->lcu_x_num_pre *
6214 hevc->lcu_y_num_pre);
6215
6216 }
6217
6218 }
6219 if (hevc->cur_pic && hevc->head_error_flag) {
6220 hevc->cur_pic->error_mark = 1;
6221 hevc_print(hevc, 0,
6222 "head has error, set error_mark\n");
6223 }
6224
6225 if ((error_handle_policy & 0x80) == 0) {
6226 if (hevc->over_decode && hevc->cur_pic) {
6227 hevc_print(hevc, 0,
6228 "over decode, set error_mark\n");
6229 hevc->cur_pic->error_mark = 1;
6230 }
6231 }
6232
6233 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6234 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6235}
6236
6237static void check_pic_decoded_error(struct hevc_state_s *hevc,
6238 int decoded_lcu)
6239{
6240 int current_lcu_idx = decoded_lcu;
6241 if (decoded_lcu < 0)
6242 return;
6243
6244 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6245 hevc_print(hevc, 0,
6246 "cur lcu idx = %d, (total %d)\n",
6247 current_lcu_idx, hevc->lcu_total);
6248 }
6249 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6250 if (hevc->lcu_x_num != 0
6251 && hevc->lcu_y_num != 0
6252 && current_lcu_idx != 0
6253 && current_lcu_idx <
6254 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6255 hevc->cur_pic->error_mark = 1;
6256 if (hevc->cur_pic->error_mark) {
6257 hevc_print(hevc, 0,
6258 "cur lcu idx = %d, (total %d), set error_mark\n",
6259 current_lcu_idx,
6260 hevc->lcu_x_num*hevc->lcu_y_num);
6261 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6262 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6263 hevc_print(hevc, 0,
6264 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6265 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6266 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6267 }
6268 if (is_log_enable(hevc))
6269 add_log(hevc,
6270 "cur lcu idx = %d, (total %d), set error_mark",
6271 current_lcu_idx,
6272 hevc->lcu_x_num *
6273 hevc->lcu_y_num);
6274
6275 }
6276
6277 }
6278 if (hevc->cur_pic && hevc->head_error_flag) {
6279 hevc->cur_pic->error_mark = 1;
6280 hevc_print(hevc, 0,
6281 "head has error, set error_mark\n");
6282 }
6283
6284 if ((error_handle_policy & 0x80) == 0) {
6285 if (hevc->over_decode && hevc->cur_pic) {
6286 hevc_print(hevc, 0,
6287 "over decode, set error_mark\n");
6288 hevc->cur_pic->error_mark = 1;
6289 }
6290 }
6291}
6292
6293/* only when we decoded one field or one frame,
6294we can call this function to get qos info*/
6295static void get_picture_qos_info(struct hevc_state_s *hevc)
6296{
6297 struct PIC_s *picture = hevc->cur_pic;
6298
6299/*
6300#define DEBUG_QOS
6301*/
6302
6303 if (!hevc->cur_pic)
6304 return;
6305
6306 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6307 unsigned char a[3];
6308 unsigned char i, j, t;
6309 unsigned long data;
6310
6311 data = READ_VREG(HEVC_MV_INFO);
6312 if (picture->slice_type == I_SLICE)
6313 data = 0;
6314 a[0] = data & 0xff;
6315 a[1] = (data >> 8) & 0xff;
6316 a[2] = (data >> 16) & 0xff;
6317
6318 for (i = 0; i < 3; i++)
6319 for (j = i+1; j < 3; j++) {
6320 if (a[j] < a[i]) {
6321 t = a[j];
6322 a[j] = a[i];
6323 a[i] = t;
6324 } else if (a[j] == a[i]) {
6325 a[i]++;
6326 t = a[j];
6327 a[j] = a[i];
6328 a[i] = t;
6329 }
6330 }
6331 picture->max_mv = a[2];
6332 picture->avg_mv = a[1];
6333 picture->min_mv = a[0];
6334#ifdef DEBUG_QOS
6335 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6336 data, a[0], a[1], a[2]);
6337#endif
6338
6339 data = READ_VREG(HEVC_QP_INFO);
6340 a[0] = data & 0x1f;
6341 a[1] = (data >> 8) & 0x3f;
6342 a[2] = (data >> 16) & 0x7f;
6343
6344 for (i = 0; i < 3; i++)
6345 for (j = i+1; j < 3; j++) {
6346 if (a[j] < a[i]) {
6347 t = a[j];
6348 a[j] = a[i];
6349 a[i] = t;
6350 } else if (a[j] == a[i]) {
6351 a[i]++;
6352 t = a[j];
6353 a[j] = a[i];
6354 a[i] = t;
6355 }
6356 }
6357 picture->max_qp = a[2];
6358 picture->avg_qp = a[1];
6359 picture->min_qp = a[0];
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6362 data, a[0], a[1], a[2]);
6363#endif
6364
6365 data = READ_VREG(HEVC_SKIP_INFO);
6366 a[0] = data & 0x1f;
6367 a[1] = (data >> 8) & 0x3f;
6368 a[2] = (data >> 16) & 0x7f;
6369
6370 for (i = 0; i < 3; i++)
6371 for (j = i+1; j < 3; j++) {
6372 if (a[j] < a[i]) {
6373 t = a[j];
6374 a[j] = a[i];
6375 a[i] = t;
6376 } else if (a[j] == a[i]) {
6377 a[i]++;
6378 t = a[j];
6379 a[j] = a[i];
6380 a[i] = t;
6381 }
6382 }
6383 picture->max_skip = a[2];
6384 picture->avg_skip = a[1];
6385 picture->min_skip = a[0];
6386
6387#ifdef DEBUG_QOS
6388 hevc_print(hevc, 0,
6389 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6390 data, a[0], a[1], a[2]);
6391#endif
6392 } else {
6393 uint32_t blk88_y_count;
6394 uint32_t blk88_c_count;
6395 uint32_t blk22_mv_count;
6396 uint32_t rdata32;
6397 int32_t mv_hi;
6398 int32_t mv_lo;
6399 uint32_t rdata32_l;
6400 uint32_t mvx_L0_hi;
6401 uint32_t mvy_L0_hi;
6402 uint32_t mvx_L1_hi;
6403 uint32_t mvy_L1_hi;
6404 int64_t value;
6405 uint64_t temp_value;
6406#ifdef DEBUG_QOS
6407 int pic_number = picture->POC;
6408#endif
6409
6410 picture->max_mv = 0;
6411 picture->avg_mv = 0;
6412 picture->min_mv = 0;
6413
6414 picture->max_skip = 0;
6415 picture->avg_skip = 0;
6416 picture->min_skip = 0;
6417
6418 picture->max_qp = 0;
6419 picture->avg_qp = 0;
6420 picture->min_qp = 0;
6421
6422
6423
6424#ifdef DEBUG_QOS
6425 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6426 picture->slice_type,
6427 picture->POC);
6428#endif
6429 /* set rd_idx to 0 */
6430 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6431
6432 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6433 if (blk88_y_count == 0) {
6434#ifdef DEBUG_QOS
6435 hevc_print(hevc, 0,
6436 "[Picture %d Quality] NO Data yet.\n",
6437 pic_number);
6438#endif
6439 /* reset all counts */
6440 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6441 return;
6442 }
6443 /* qp_y_sum */
6444 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6445#ifdef DEBUG_QOS
6446 hevc_print(hevc, 0,
6447 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6448 pic_number, rdata32/blk88_y_count,
6449 rdata32, blk88_y_count);
6450#endif
6451 picture->avg_qp = rdata32/blk88_y_count;
6452 /* intra_y_count */
6453 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6454#ifdef DEBUG_QOS
6455 hevc_print(hevc, 0,
6456 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6457 pic_number, rdata32*100/blk88_y_count,
6458 '%', rdata32);
6459#endif
6460 /* skipped_y_count */
6461 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6462#ifdef DEBUG_QOS
6463 hevc_print(hevc, 0,
6464 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6465 pic_number, rdata32*100/blk88_y_count,
6466 '%', rdata32);
6467#endif
6468 picture->avg_skip = rdata32*100/blk88_y_count;
6469 /* coeff_non_zero_y_count */
6470 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6471#ifdef DEBUG_QOS
6472 hevc_print(hevc, 0,
6473 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6474 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6475 '%', rdata32);
6476#endif
6477 /* blk66_c_count */
6478 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6479 if (blk88_c_count == 0) {
6480#ifdef DEBUG_QOS
6481 hevc_print(hevc, 0,
6482 "[Picture %d Quality] NO Data yet.\n",
6483 pic_number);
6484#endif
6485 /* reset all counts */
6486 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6487 return;
6488 }
6489 /* qp_c_sum */
6490 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6491#ifdef DEBUG_QOS
6492 hevc_print(hevc, 0,
6493 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6494 pic_number, rdata32/blk88_c_count,
6495 rdata32, blk88_c_count);
6496#endif
6497 /* intra_c_count */
6498 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6499#ifdef DEBUG_QOS
6500 hevc_print(hevc, 0,
6501 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6502 pic_number, rdata32*100/blk88_c_count,
6503 '%', rdata32);
6504#endif
6505 /* skipped_cu_c_count */
6506 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6507#ifdef DEBUG_QOS
6508 hevc_print(hevc, 0,
6509 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6510 pic_number, rdata32*100/blk88_c_count,
6511 '%', rdata32);
6512#endif
6513 /* coeff_non_zero_c_count */
6514 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6515#ifdef DEBUG_QOS
6516 hevc_print(hevc, 0,
6517 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6518 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6519 '%', rdata32);
6520#endif
6521
6522 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6523 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6524 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6525#ifdef DEBUG_QOS
6526 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6527 pic_number, (rdata32>>0)&0xff);
6528#endif
6529 picture->min_qp = (rdata32>>0)&0xff;
6530
6531#ifdef DEBUG_QOS
6532 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6533 pic_number, (rdata32>>8)&0xff);
6534#endif
6535 picture->max_qp = (rdata32>>8)&0xff;
6536
6537#ifdef DEBUG_QOS
6538 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6539 pic_number, (rdata32>>16)&0xff);
6540 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6541 pic_number, (rdata32>>24)&0xff);
6542#endif
6543
6544 /* blk22_mv_count */
6545 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6546 if (blk22_mv_count == 0) {
6547#ifdef DEBUG_QOS
6548 hevc_print(hevc, 0,
6549 "[Picture %d Quality] NO MV Data yet.\n",
6550 pic_number);
6551#endif
6552 /* reset all counts */
6553 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6554 return;
6555 }
6556 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6557 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6558 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6559 /* should all be 0x00 or 0xff */
6560#ifdef DEBUG_QOS
6561 hevc_print(hevc, 0,
6562 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6563 pic_number, rdata32);
6564#endif
6565 mvx_L0_hi = ((rdata32>>0)&0xff);
6566 mvy_L0_hi = ((rdata32>>8)&0xff);
6567 mvx_L1_hi = ((rdata32>>16)&0xff);
6568 mvy_L1_hi = ((rdata32>>24)&0xff);
6569
6570 /* mvx_L0_count[31:0] */
6571 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6572 temp_value = mvx_L0_hi;
6573 temp_value = (temp_value << 32) | rdata32_l;
6574
6575 if (mvx_L0_hi & 0x80)
6576 value = 0xFFFFFFF000000000 | temp_value;
6577 else
6578 value = temp_value;
6579 value = div_s64(value, blk22_mv_count);
6580#ifdef DEBUG_QOS
6581 hevc_print(hevc, 0,
6582 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6583 pic_number, (int)value,
6584 value, blk22_mv_count);
6585#endif
6586 picture->avg_mv = value;
6587
6588 /* mvy_L0_count[31:0] */
6589 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6590 temp_value = mvy_L0_hi;
6591 temp_value = (temp_value << 32) | rdata32_l;
6592
6593 if (mvy_L0_hi & 0x80)
6594 value = 0xFFFFFFF000000000 | temp_value;
6595 else
6596 value = temp_value;
6597#ifdef DEBUG_QOS
6598 hevc_print(hevc, 0,
6599 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6600 pic_number, rdata32_l/blk22_mv_count,
6601 value, blk22_mv_count);
6602#endif
6603
6604 /* mvx_L1_count[31:0] */
6605 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6606 temp_value = mvx_L1_hi;
6607 temp_value = (temp_value << 32) | rdata32_l;
6608 if (mvx_L1_hi & 0x80)
6609 value = 0xFFFFFFF000000000 | temp_value;
6610 else
6611 value = temp_value;
6612#ifdef DEBUG_QOS
6613 hevc_print(hevc, 0,
6614 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6615 pic_number, rdata32_l/blk22_mv_count,
6616 value, blk22_mv_count);
6617#endif
6618
6619 /* mvy_L1_count[31:0] */
6620 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6621 temp_value = mvy_L1_hi;
6622 temp_value = (temp_value << 32) | rdata32_l;
6623 if (mvy_L1_hi & 0x80)
6624 value = 0xFFFFFFF000000000 | temp_value;
6625 else
6626 value = temp_value;
6627#ifdef DEBUG_QOS
6628 hevc_print(hevc, 0,
6629 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6630 pic_number, rdata32_l/blk22_mv_count,
6631 value, blk22_mv_count);
6632#endif
6633
6634 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6635 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6636 mv_hi = (rdata32>>16)&0xffff;
6637 if (mv_hi & 0x8000)
6638 mv_hi = 0x8000 - mv_hi;
6639#ifdef DEBUG_QOS
6640 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6641 pic_number, mv_hi);
6642#endif
6643 picture->max_mv = mv_hi;
6644
6645 mv_lo = (rdata32>>0)&0xffff;
6646 if (mv_lo & 0x8000)
6647 mv_lo = 0x8000 - mv_lo;
6648#ifdef DEBUG_QOS
6649 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6650 pic_number, mv_lo);
6651#endif
6652 picture->min_mv = mv_lo;
6653
6654 /* {mvy_L0_max, mvy_L0_min} */
6655 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6656 mv_hi = (rdata32>>16)&0xffff;
6657 if (mv_hi & 0x8000)
6658 mv_hi = 0x8000 - mv_hi;
6659#ifdef DEBUG_QOS
6660 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6661 pic_number, mv_hi);
6662#endif
6663
6664 mv_lo = (rdata32>>0)&0xffff;
6665 if (mv_lo & 0x8000)
6666 mv_lo = 0x8000 - mv_lo;
6667#ifdef DEBUG_QOS
6668 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6669 pic_number, mv_lo);
6670#endif
6671
6672 /* {mvx_L1_max, mvx_L1_min} */
6673 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6674 mv_hi = (rdata32>>16)&0xffff;
6675 if (mv_hi & 0x8000)
6676 mv_hi = 0x8000 - mv_hi;
6677#ifdef DEBUG_QOS
6678 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6679 pic_number, mv_hi);
6680#endif
6681
6682 mv_lo = (rdata32>>0)&0xffff;
6683 if (mv_lo & 0x8000)
6684 mv_lo = 0x8000 - mv_lo;
6685#ifdef DEBUG_QOS
6686 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6687 pic_number, mv_lo);
6688#endif
6689
6690 /* {mvy_L1_max, mvy_L1_min} */
6691 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6692 mv_hi = (rdata32>>16)&0xffff;
6693 if (mv_hi & 0x8000)
6694 mv_hi = 0x8000 - mv_hi;
6695#ifdef DEBUG_QOS
6696 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6697 pic_number, mv_hi);
6698#endif
6699 mv_lo = (rdata32>>0)&0xffff;
6700 if (mv_lo & 0x8000)
6701 mv_lo = 0x8000 - mv_lo;
6702#ifdef DEBUG_QOS
6703 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6704 pic_number, mv_lo);
6705#endif
6706
6707 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6708#ifdef DEBUG_QOS
6709 hevc_print(hevc, 0,
6710 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6711 pic_number, rdata32);
6712#endif
6713 /* reset all counts */
6714 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6715 }
6716}
6717
6718static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6719 union param_u *rpm_param,
6720 int decode_pic_begin)
6721{
6722#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6723 struct vdec_s *vdec = hw_to_vdec(hevc);
6724#endif
6725 int i;
6726 int lcu_x_num_div;
6727 int lcu_y_num_div;
6728 int Col_ref;
6729 int dbg_skip_flag = 0;
6730
6731 if (hevc->wait_buf == 0) {
6732 hevc->sps_num_reorder_pics_0 =
6733 rpm_param->p.sps_num_reorder_pics_0;
6734 hevc->m_temporalId = rpm_param->p.m_temporalId;
6735 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6736 hevc->interlace_flag =
6737 (rpm_param->p.profile_etc >> 2) & 0x1;
6738 hevc->curr_pic_struct =
6739 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6740 if (parser_sei_enable & 0x4) {
6741 hevc->frame_field_info_present_flag =
6742 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6743 }
6744
6745 if (interlace_enable == 0 || hevc->m_ins_flag)
6746 hevc->interlace_flag = 0;
6747 if (interlace_enable & 0x100)
6748 hevc->interlace_flag = interlace_enable & 0x1;
6749 if (hevc->interlace_flag == 0)
6750 hevc->curr_pic_struct = 0;
6751 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6752 /*
6753 *hevc->m_pocRandomAccess = MAX_INT;
6754 * //add to fix RAP_B_Bossen_1
6755 */
6756 /* } */
6757 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6758 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6759 hevc->slice_segment_addr =
6760 rpm_param->p.slice_segment_address;
6761 if (!rpm_param->p.dependent_slice_segment_flag)
6762 hevc->slice_addr = hevc->slice_segment_addr;
6763 } else {
6764 hevc->slice_segment_addr = 0;
6765 hevc->slice_addr = 0;
6766 }
6767
6768 hevc->iPrevPOC = hevc->curr_POC;
6769 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6770 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6771 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6772 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6773 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6774 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6775 hevc->isNextSliceSegment =
6776 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6777 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6778 || hevc->pic_h !=
6779 rpm_param->p.pic_height_in_luma_samples) {
6780 hevc_print(hevc, 0,
6781 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6782 hevc->pic_w, hevc->pic_h,
6783 rpm_param->p.pic_width_in_luma_samples,
6784 rpm_param->p.pic_height_in_luma_samples,
6785 hevc->interlace_flag);
6786
6787 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6788 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6789 hevc->frame_width = hevc->pic_w;
6790 hevc->frame_height = hevc->pic_h;
6791#ifdef LOSLESS_COMPRESS_MODE
6792 if (/*re_config_pic_flag == 0 &&*/
6793 (get_double_write_mode(hevc) & 0x10) == 0)
6794 init_decode_head_hw(hevc);
6795#endif
6796 }
6797
6798 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6799 hevc_print(hevc, 0, "over size : %u x %u.\n",
6800 hevc->pic_w, hevc->pic_h);
6801 if ((!hevc->m_ins_flag) &&
6802 ((debug &
6803 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6804 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6805 H265_DEBUG_DIS_SYS_ERROR_PROC);
6806 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6807 return 3;
6808 }
6809 if (hevc->bit_depth_chroma > 10 ||
6810 hevc->bit_depth_luma > 10) {
6811 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6812 hevc->bit_depth_chroma,
6813 hevc->bit_depth_luma);
6814 if (!hevc->m_ins_flag)
6815 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6816 H265_DEBUG_DIS_SYS_ERROR_PROC);
6817 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6818 return 4;
6819 }
6820
6821 /* it will cause divide 0 error */
6822 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6823 if (get_dbg_flag(hevc)) {
6824 hevc_print(hevc, 0,
6825 "Fatal Error, pic_w = %d, pic_h = %d\n",
6826 hevc->pic_w, hevc->pic_h);
6827 }
6828 return 3;
6829 }
6830 pic_list_process(hevc);
6831
6832 hevc->lcu_size =
6833 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6834 3 + rpm_param->
6835 p.log2_diff_max_min_coding_block_size);
6836 if (hevc->lcu_size == 0) {
6837 hevc_print(hevc, 0,
6838 "Error, lcu_size = 0 (%d,%d)\n",
6839 rpm_param->p.
6840 log2_min_coding_block_size_minus3,
6841 rpm_param->p.
6842 log2_diff_max_min_coding_block_size);
6843 return 3;
6844 }
6845 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6846 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6847 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6848 hevc->lcu_x_num =
6849 ((hevc->pic_w % hevc->lcu_size) ==
6850 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6851 hevc->lcu_y_num =
6852 ((hevc->pic_h % hevc->lcu_size) ==
6853 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6854 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6855
6856 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6857 || hevc->m_nalUnitType ==
6858 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6859 hevc->curr_POC = 0;
6860 if ((hevc->m_temporalId - 1) == 0)
6861 hevc->iPrevTid0POC = hevc->curr_POC;
6862 } else {
6863 int iMaxPOClsb =
6864 1 << (rpm_param->p.
6865 log2_max_pic_order_cnt_lsb_minus4 + 4);
6866 int iPrevPOClsb;
6867 int iPrevPOCmsb;
6868 int iPOCmsb;
6869 int iPOClsb = rpm_param->p.POClsb;
6870
6871 if (iMaxPOClsb == 0) {
6872 hevc_print(hevc, 0,
6873 "error iMaxPOClsb is 0\n");
6874 return 3;
6875 }
6876
6877 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6878 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6879
6880 if ((iPOClsb < iPrevPOClsb)
6881 && ((iPrevPOClsb - iPOClsb) >=
6882 (iMaxPOClsb / 2)))
6883 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6884 else if ((iPOClsb > iPrevPOClsb)
6885 && ((iPOClsb - iPrevPOClsb) >
6886 (iMaxPOClsb / 2)))
6887 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6888 else
6889 iPOCmsb = iPrevPOCmsb;
6890 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6891 hevc_print(hevc, 0,
6892 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6893 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6894 iPOClsb);
6895 }
6896 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6897 || hevc->m_nalUnitType ==
6898 NAL_UNIT_CODED_SLICE_BLANT
6899 || hevc->m_nalUnitType ==
6900 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6901 /* For BLA picture types, POCmsb is set to 0. */
6902 iPOCmsb = 0;
6903 }
6904 hevc->curr_POC = (iPOCmsb + iPOClsb);
6905 if ((hevc->m_temporalId - 1) == 0)
6906 hevc->iPrevTid0POC = hevc->curr_POC;
6907 else {
6908 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6909 hevc_print(hevc, 0,
6910 "m_temporalID is %d\n",
6911 hevc->m_temporalId);
6912 }
6913 }
6914 }
6915 hevc->RefNum_L0 =
6916 (rpm_param->p.num_ref_idx_l0_active >
6917 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6918 num_ref_idx_l0_active;
6919 hevc->RefNum_L1 =
6920 (rpm_param->p.num_ref_idx_l1_active >
6921 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6922 num_ref_idx_l1_active;
6923
6924 /* if(curr_POC==0x10) dump_lmem(); */
6925
6926 /* skip RASL pictures after CRA/BLA pictures */
6927 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6928 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6929 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6930 || hevc->m_nalUnitType ==
6931 NAL_UNIT_CODED_SLICE_BLANT
6932 || hevc->m_nalUnitType ==
6933 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6934 hevc->m_pocRandomAccess = hevc->curr_POC;
6935 else
6936 hevc->m_pocRandomAccess = -MAX_INT;
6937 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6938 || hevc->m_nalUnitType ==
6939 NAL_UNIT_CODED_SLICE_BLANT
6940 || hevc->m_nalUnitType ==
6941 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6942 hevc->m_pocRandomAccess = hevc->curr_POC;
6943 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6944 (nal_skip_policy >= 3) &&
6945 (hevc->m_nalUnitType ==
6946 NAL_UNIT_CODED_SLICE_RASL_N ||
6947 hevc->m_nalUnitType ==
6948 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6949 if (get_dbg_flag(hevc)) {
6950 hevc_print(hevc, 0,
6951 "RASL picture with POC %d < %d ",
6952 hevc->curr_POC, hevc->m_pocRandomAccess);
6953 hevc_print(hevc, 0,
6954 "RandomAccess point POC), skip it\n");
6955 }
6956 return 1;
6957 }
6958
6959 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6960 hevc->skip_flag = 0;
6961 /**/
6962 /* if((iPrevPOC != curr_POC)){ */
6963 if (rpm_param->p.slice_segment_address == 0) {
6964 struct PIC_s *pic;
6965
6966 hevc->new_pic = 1;
6967#ifdef MULTI_INSTANCE_SUPPORT
6968 if (!hevc->m_ins_flag)
6969#endif
6970 check_pic_decoded_error_pre(hevc,
6971 READ_VREG(HEVC_PARSER_LCU_START)
6972 & 0xffffff);
6973 /**/ if (use_cma == 0) {
6974 if (hevc->pic_list_init_flag == 0) {
6975 init_pic_list(hevc);
6976 init_pic_list_hw(hevc);
6977 init_buf_spec(hevc);
6978 hevc->pic_list_init_flag = 3;
6979 }
6980 }
6981 if (!hevc->m_ins_flag) {
6982 if (hevc->cur_pic)
6983 get_picture_qos_info(hevc);
6984 }
6985 hevc->first_pic_after_recover = 0;
6986 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6987 dump_pic_list(hevc);
6988 /* prev pic */
6989 hevc_pre_pic(hevc, pic);
6990 /*
6991 *update referenced of old pictures
6992 *(cur_pic->referenced is 1 and not updated)
6993 */
6994 apply_ref_pic_set(hevc, hevc->curr_POC,
6995 rpm_param);
6996
6997 if (hevc->mmu_enable)
6998 recycle_mmu_bufs(hevc);
6999
7000#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7001 if (vdec->master) {
7002 struct hevc_state_s *hevc_ba =
7003 (struct hevc_state_s *)
7004 vdec->master->private;
7005 if (hevc_ba->cur_pic != NULL) {
7006 hevc_ba->cur_pic->dv_enhance_exist = 1;
7007 hevc_print(hevc, H265_DEBUG_DV,
7008 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7009 hevc->curr_POC, hevc_ba->cur_pic->POC);
7010 }
7011 }
7012 if (vdec->master == NULL &&
7013 vdec->slave == NULL)
7014 set_aux_data(hevc,
7015 hevc->cur_pic, 1, 0); /*suffix*/
7016 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7017 set_aux_data(hevc,
7018 hevc->cur_pic, 0, 1); /*dv meta only*/
7019#else
7020 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7021#endif
7022 /* new pic */
7023 hevc->cur_pic = hevc->is_used_v4l ?
7024 v4l_get_new_pic(hevc, rpm_param) :
7025 get_new_pic(hevc, rpm_param);
7026 if (hevc->cur_pic == NULL) {
7027 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7028 dump_pic_list(hevc);
7029 hevc->wait_buf = 1;
7030 return -1;
7031 }
7032#ifdef MULTI_INSTANCE_SUPPORT
7033 hevc->decoding_pic = hevc->cur_pic;
7034 if (!hevc->m_ins_flag)
7035 hevc->over_decode = 0;
7036#endif
7037#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7038 hevc->cur_pic->dv_enhance_exist = 0;
7039 if (vdec->slave)
7040 hevc_print(hevc, H265_DEBUG_DV,
7041 "Clear bl (poc %d) dv_enhance_exist flag\n",
7042 hevc->curr_POC);
7043 if (vdec->master == NULL &&
7044 vdec->slave == NULL)
7045 set_aux_data(hevc,
7046 hevc->cur_pic, 0, 0); /*prefix*/
7047
7048 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7049 set_aux_data(hevc,
7050 hevc->cur_pic, 0, 2); /*pre sei only*/
7051#else
7052 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7053#endif
7054 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7055 hevc->cur_pic->output_ready = 1;
7056 hevc->cur_pic->stream_offset =
7057 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7058 prepare_display_buf(hevc, hevc->cur_pic);
7059 hevc->wait_buf = 2;
7060 return -1;
7061 }
7062 } else {
7063 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7064#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7065 if (vdec->master == NULL &&
7066 vdec->slave == NULL) {
7067 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7068 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7069 }
7070#else
7071 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7072 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7073#endif
7074 }
7075 if (hevc->pic_list_init_flag != 3
7076 || hevc->cur_pic == NULL) {
7077 /* make it dec from the first slice segment */
7078 return 3;
7079 }
7080 hevc->cur_pic->slice_idx++;
7081 hevc->new_pic = 0;
7082 }
7083 } else {
7084 if (hevc->wait_buf == 1) {
7085 pic_list_process(hevc);
7086 hevc->cur_pic = hevc->is_used_v4l ?
7087 v4l_get_new_pic(hevc, rpm_param) :
7088 get_new_pic(hevc, rpm_param);
7089 if (hevc->cur_pic == NULL)
7090 return -1;
7091
7092 if (!hevc->m_ins_flag)
7093 hevc->over_decode = 0;
7094
7095#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7096 hevc->cur_pic->dv_enhance_exist = 0;
7097 if (vdec->master == NULL &&
7098 vdec->slave == NULL)
7099 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7100#else
7101 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7102#endif
7103 hevc->wait_buf = 0;
7104 } else if (hevc->wait_buf ==
7105 2) {
7106 if (get_display_pic_num(hevc) >
7107 1)
7108 return -1;
7109 hevc->wait_buf = 0;
7110 }
7111 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7112 dump_pic_list(hevc);
7113 }
7114
7115 if (hevc->new_pic) {
7116#if 1
7117 /*SUPPORT_10BIT*/
7118 int sao_mem_unit =
7119 (hevc->lcu_size == 16 ? 9 :
7120 hevc->lcu_size ==
7121 32 ? 14 : 24) << 4;
7122#else
7123 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7124#endif
7125 int pic_height_cu =
7126 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7127 int pic_width_cu =
7128 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7129 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7130
7131 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7133 hevc_print(hevc, 0,
7134 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7135 __func__,
7136 hevc->decode_idx,
7137 hevc->curr_pic_struct,
7138 hevc->interlace_flag,
7139 hevc->cur_pic->index);
7140 }
7141 if (dbg_skip_decode_index != 0 &&
7142 hevc->decode_idx == dbg_skip_decode_index)
7143 dbg_skip_flag = 1;
7144
7145 hevc->decode_idx++;
7146 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7147 sao_mem_unit, rpm_param);
7148
7149 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7150 }
7151
7152 if (hevc->iPrevPOC != hevc->curr_POC) {
7153 hevc->new_tile = 1;
7154 hevc->tile_x = 0;
7155 hevc->tile_y = 0;
7156 hevc->tile_y_x = 0;
7157 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7158 hevc_print(hevc, 0,
7159 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7160 hevc->tile_x, hevc->tile_y);
7161 }
7162 } else if (hevc->tile_enabled) {
7163 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7164 hevc_print(hevc, 0,
7165 "slice_segment_address is %d\n",
7166 rpm_param->p.slice_segment_address);
7167 }
7168 hevc->tile_y_x =
7169 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7170 (hevc->pic_w +
7171 hevc->lcu_size -
7172 1) / hevc->lcu_size);
7173 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7174 && (hevc->tile_y_x != -1)) {
7175 hevc->new_tile = 1;
7176 hevc->tile_x = hevc->tile_y_x & 0xff;
7177 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7178 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7179 hevc_print(hevc, 0,
7180 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7181 rpm_param->p.slice_segment_address,
7182 hevc->tile_x, hevc->tile_y);
7183 }
7184 } else
7185 hevc->new_tile = 0;
7186 } else
7187 hevc->new_tile = 0;
7188
7189 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7190 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7191 hevc->new_tile = 0;
7192
7193 if (hevc->new_tile) {
7194 hevc->tile_start_lcu_x =
7195 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7196 hevc->tile_start_lcu_y =
7197 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7198 hevc->tile_width_lcu =
7199 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7200 hevc->tile_height_lcu =
7201 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7202 }
7203
7204 set_ref_pic_list(hevc, rpm_param);
7205
7206 Col_ref = rpm_param->p.collocated_ref_idx;
7207
7208 hevc->LDCFlag = 0;
7209 if (rpm_param->p.slice_type != I_SLICE) {
7210 hevc->LDCFlag = 1;
7211 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7212 if (hevc->cur_pic->
7213 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7214 hevc->curr_POC)
7215 hevc->LDCFlag = 0;
7216 }
7217 if (rpm_param->p.slice_type == B_SLICE) {
7218 for (i = 0; (i < hevc->RefNum_L1)
7219 && hevc->LDCFlag; i++) {
7220 if (hevc->cur_pic->
7221 m_aiRefPOCList1[hevc->cur_pic->
7222 slice_idx][i] >
7223 hevc->curr_POC)
7224 hevc->LDCFlag = 0;
7225 }
7226 }
7227 }
7228
7229 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7230
7231 hevc->plevel =
7232 rpm_param->p.log2_parallel_merge_level;
7233 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7234
7235 hevc->LongTerm_Curr = 0; /* to do ... */
7236 hevc->LongTerm_Col = 0; /* to do ... */
7237
7238 hevc->list_no = 0;
7239 if (rpm_param->p.slice_type == B_SLICE)
7240 hevc->list_no = 1 - hevc->ColFromL0Flag;
7241 if (hevc->list_no == 0) {
7242 if (Col_ref < hevc->RefNum_L0) {
7243 hevc->Col_POC =
7244 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7245 slice_idx][Col_ref];
7246 } else
7247 hevc->Col_POC = INVALID_POC;
7248 } else {
7249 if (Col_ref < hevc->RefNum_L1) {
7250 hevc->Col_POC =
7251 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7252 slice_idx][Col_ref];
7253 } else
7254 hevc->Col_POC = INVALID_POC;
7255 }
7256
7257 hevc->LongTerm_Ref = 0; /* to do ... */
7258
7259 if (hevc->slice_type != 2) {
7260 /* if(hevc->i_only==1){ */
7261 /* return 0xf; */
7262 /* } */
7263
7264 if (hevc->Col_POC != INVALID_POC) {
7265 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7266 if (hevc->col_pic == NULL) {
7267 hevc->cur_pic->error_mark = 1;
7268 if (get_dbg_flag(hevc)) {
7269 hevc_print(hevc, 0,
7270 "WRONG,fail to get the pic Col_POC\n");
7271 }
7272 if (is_log_enable(hevc))
7273 add_log(hevc,
7274 "WRONG,fail to get the pic Col_POC");
7275 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7276 hevc->cur_pic->error_mark = 1;
7277 if (get_dbg_flag(hevc)) {
7278 hevc_print(hevc, 0,
7279 "WRONG, Col_POC error_mark is 1\n");
7280 }
7281 if (is_log_enable(hevc))
7282 add_log(hevc,
7283 "WRONG, Col_POC error_mark is 1");
7284 } else {
7285 if ((hevc->col_pic->width
7286 != hevc->pic_w) ||
7287 (hevc->col_pic->height
7288 != hevc->pic_h)) {
7289 hevc_print(hevc, 0,
7290 "Wrong reference pic (poc %d) width/height %d/%d\n",
7291 hevc->col_pic->POC,
7292 hevc->col_pic->width,
7293 hevc->col_pic->height);
7294 hevc->cur_pic->error_mark = 1;
7295 }
7296
7297 }
7298
7299 if (hevc->cur_pic->error_mark
7300 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7301#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7302 /*count info*/
7303 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7304 hevc->cur_pic->stream_offset);
7305#endif
7306 }
7307
7308 if (is_skip_decoding(hevc,
7309 hevc->cur_pic)) {
7310 return 2;
7311 }
7312 } else
7313 hevc->col_pic = hevc->cur_pic;
7314 } /* */
7315 if (hevc->col_pic == NULL)
7316 hevc->col_pic = hevc->cur_pic;
7317#ifdef BUFFER_MGR_ONLY
7318 return 0xf;
7319#else
7320 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7321 || (dbg_skip_flag))
7322 return 0xf;
7323#endif
7324
7325 config_mc_buffer(hevc, hevc->cur_pic);
7326
7327 if (is_skip_decoding(hevc,
7328 hevc->cur_pic)) {
7329 if (get_dbg_flag(hevc))
7330 hevc_print(hevc, 0,
7331 "Discard this picture index %d\n",
7332 hevc->cur_pic->index);
7333#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7334 /*count info*/
7335 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7336 hevc->cur_pic->stream_offset);
7337#endif
7338 return 2;
7339 }
7340#ifdef MCRCC_ENABLE
7341 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7342#endif
7343 config_mpred_hw(hevc);
7344
7345 config_sao_hw(hevc, rpm_param);
7346
7347 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7348 return 0xf;
7349
7350 return 0;
7351}
7352
7353
7354
7355static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7356 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7357 int cur_buf_idx = new_pic->index;
7358 int bit_depth_10 = (bit_depth != 0x00);
7359 int picture_size;
7360 int cur_mmu_4k_number;
7361 int ret, max_frame_num;
7362 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7363 new_pic->height, !bit_depth_10);
7364 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7365 if (hevc->double_write_mode & 0x10)
7366 return 0;
7367 /*hevc_print(hevc, 0,
7368 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7369 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7370 if (new_pic->scatter_alloc) {
7371 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7372 new_pic->scatter_alloc = 0;
7373 }
7374 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7375 max_frame_num = MAX_FRAME_8K_NUM;
7376 else
7377 max_frame_num = MAX_FRAME_4K_NUM;
7378 if (cur_mmu_4k_number > max_frame_num) {
7379 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7380 cur_mmu_4k_number,
7381 new_pic->width,
7382 new_pic->height);
7383 return -1;
7384 }
7385 ret = decoder_mmu_box_alloc_idx(
7386 hevc->mmu_box,
7387 cur_buf_idx,
7388 cur_mmu_4k_number,
7389 mmu_index_adr);
7390 if (ret == 0)
7391 new_pic->scatter_alloc = 1;
7392
7393 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7394 "%s pic index %d page count(%d) ret =%d\n",
7395 __func__, cur_buf_idx,
7396 cur_mmu_4k_number, ret);
7397 return ret;
7398}
7399
7400
7401static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7402 struct PIC_s *pic)
7403{
7404 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7405 "%s pic index %d scatter_alloc %d\n",
7406 __func__, pic->index,
7407 pic->scatter_alloc);
7408
7409 if (hevc->mmu_enable
7410 && ((hevc->double_write_mode & 0x10) == 0)
7411 && pic->scatter_alloc)
7412 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7413 pic->scatter_alloc = 0;
7414}
7415
7416/*
7417 *************************************************
7418 *
7419 *h265 buffer management end
7420 *
7421 **************************************************
7422 */
7423static struct hevc_state_s *gHevc;
7424
7425static void hevc_local_uninit(struct hevc_state_s *hevc)
7426{
7427 hevc->rpm_ptr = NULL;
7428 hevc->lmem_ptr = NULL;
7429
7430#ifdef SWAP_HEVC_UCODE
7431 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7432 if (hevc->mc_cpu_addr != NULL) {
7433 dma_free_coherent(amports_get_dma_device(),
7434 hevc->swap_size, hevc->mc_cpu_addr,
7435 hevc->mc_dma_handle);
7436 hevc->mc_cpu_addr = NULL;
7437 }
7438
7439 }
7440#endif
7441#ifdef DETREFILL_ENABLE
7442 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7443 uninit_detrefill_buf(hevc);
7444#endif
7445 if (hevc->aux_addr) {
7446 dma_free_coherent(amports_get_dma_device(),
7447 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7448 hevc->aux_phy_addr);
7449 hevc->aux_addr = NULL;
7450 }
7451 if (hevc->rpm_addr) {
7452 dma_free_coherent(amports_get_dma_device(),
7453 RPM_BUF_SIZE, hevc->rpm_addr,
7454 hevc->rpm_phy_addr);
7455 hevc->rpm_addr = NULL;
7456 }
7457 if (hevc->lmem_addr) {
7458 dma_free_coherent(amports_get_dma_device(),
7459 RPM_BUF_SIZE, hevc->lmem_addr,
7460 hevc->lmem_phy_addr);
7461 hevc->lmem_addr = NULL;
7462 }
7463
7464 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7465 if (hevc->frame_mmu_map_phy_addr)
7466 dma_free_coherent(amports_get_dma_device(),
7467 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7468 hevc->frame_mmu_map_phy_addr);
7469
7470 hevc->frame_mmu_map_addr = NULL;
7471 }
7472
7473 kfree(gvs);
7474 gvs = NULL;
7475}
7476
7477static int hevc_local_init(struct hevc_state_s *hevc)
7478{
7479 int ret = -1;
7480 struct BuffInfo_s *cur_buf_info = NULL;
7481
7482 memset(&hevc->param, 0, sizeof(union param_u));
7483
7484 cur_buf_info = &hevc->work_space_buf_store;
7485
7486 if (vdec_is_support_4k()) {
7487 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7488 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7489 sizeof(struct BuffInfo_s));
7490 else
7491 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7492 sizeof(struct BuffInfo_s));
7493 } else
7494 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7495 sizeof(struct BuffInfo_s));
7496
7497 cur_buf_info->start_adr = hevc->buf_start;
7498 init_buff_spec(hevc, cur_buf_info);
7499
7500 hevc_init_stru(hevc, cur_buf_info);
7501
7502 hevc->bit_depth_luma = 8;
7503 hevc->bit_depth_chroma = 8;
7504 hevc->video_signal_type = 0;
7505 hevc->video_signal_type_debug = 0;
7506 bit_depth_luma = hevc->bit_depth_luma;
7507 bit_depth_chroma = hevc->bit_depth_chroma;
7508 video_signal_type = hevc->video_signal_type;
7509
7510 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7511 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7512 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7513 if (hevc->rpm_addr == NULL) {
7514 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7515 return -1;
7516 }
7517 hevc->rpm_ptr = hevc->rpm_addr;
7518 }
7519
7520 if (prefix_aux_buf_size > 0 ||
7521 suffix_aux_buf_size > 0) {
7522 u32 aux_buf_size;
7523
7524 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7525 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7526 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7527 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7528 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7529 if (hevc->aux_addr == NULL) {
7530 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7531 return -1;
7532 }
7533 }
7534
7535 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7536 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7537 if (hevc->lmem_addr == NULL) {
7538 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7539 return -1;
7540 }
7541 hevc->lmem_ptr = hevc->lmem_addr;
7542
7543 if (hevc->mmu_enable) {
7544 hevc->frame_mmu_map_addr =
7545 dma_alloc_coherent(amports_get_dma_device(),
7546 get_frame_mmu_map_size(),
7547 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7548 if (hevc->frame_mmu_map_addr == NULL) {
7549 pr_err("%s: failed to alloc count_buffer\n", __func__);
7550 return -1;
7551 }
7552 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7553 }
7554 ret = 0;
7555 return ret;
7556}
7557
7558/*
7559 *******************************************
7560 * Mailbox command
7561 *******************************************
7562 */
7563#define CMD_FINISHED 0
7564#define CMD_ALLOC_VIEW 1
7565#define CMD_FRAME_DISPLAY 3
7566#define CMD_DEBUG 10
7567
7568
7569#define DECODE_BUFFER_NUM_MAX 32
7570#define DISPLAY_BUFFER_NUM 6
7571
7572#define video_domain_addr(adr) (adr&0x7fffffff)
7573#define DECODER_WORK_SPACE_SIZE 0x800000
7574
7575#define spec2canvas(x) \
7576 (((x)->uv_canvas_index << 16) | \
7577 ((x)->uv_canvas_index << 8) | \
7578 ((x)->y_canvas_index << 0))
7579
7580
7581static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7582{
7583 struct vdec_s *vdec = hw_to_vdec(hevc);
7584 int canvas_w = ALIGN(pic->width, 64)/4;
7585 int canvas_h = ALIGN(pic->height, 32)/4;
7586 int blkmode = hevc->mem_map_mode;
7587
7588 /*CANVAS_BLKMODE_64X32*/
7589#ifdef SUPPORT_10BIT
7590 if (pic->double_write_mode) {
7591 canvas_w = pic->width /
7592 get_double_write_ratio(hevc, pic->double_write_mode);
7593 canvas_h = pic->height /
7594 get_double_write_ratio(hevc, pic->double_write_mode);
7595
7596 if (hevc->mem_map_mode == 0)
7597 canvas_w = ALIGN(canvas_w, 32);
7598 else
7599 canvas_w = ALIGN(canvas_w, 64);
7600 canvas_h = ALIGN(canvas_h, 32);
7601
7602 if (vdec->parallel_dec == 1) {
7603 if (pic->y_canvas_index == -1)
7604 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7605 if (pic->uv_canvas_index == -1)
7606 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7607 } else {
7608 pic->y_canvas_index = 128 + pic->index * 2;
7609 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7610 }
7611
7612 canvas_config_ex(pic->y_canvas_index,
7613 pic->dw_y_adr, canvas_w, canvas_h,
7614 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7615 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7616 canvas_w, canvas_h,
7617 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7618#ifdef MULTI_INSTANCE_SUPPORT
7619 pic->canvas_config[0].phy_addr =
7620 pic->dw_y_adr;
7621 pic->canvas_config[0].width =
7622 canvas_w;
7623 pic->canvas_config[0].height =
7624 canvas_h;
7625 pic->canvas_config[0].block_mode =
7626 blkmode;
7627 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7628
7629 pic->canvas_config[1].phy_addr =
7630 pic->dw_u_v_adr;
7631 pic->canvas_config[1].width =
7632 canvas_w;
7633 pic->canvas_config[1].height =
7634 canvas_h;
7635 pic->canvas_config[1].block_mode =
7636 blkmode;
7637 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7638#endif
7639 } else {
7640 if (!hevc->mmu_enable) {
7641 /* to change after 10bit VPU is ready ... */
7642 if (vdec->parallel_dec == 1) {
7643 if (pic->y_canvas_index == -1)
7644 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7645 pic->uv_canvas_index = pic->y_canvas_index;
7646 } else {
7647 pic->y_canvas_index = 128 + pic->index;
7648 pic->uv_canvas_index = 128 + pic->index;
7649 }
7650
7651 canvas_config_ex(pic->y_canvas_index,
7652 pic->mc_y_adr, canvas_w, canvas_h,
7653 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7654 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7655 canvas_w, canvas_h,
7656 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7657 }
7658 }
7659#else
7660 if (vdec->parallel_dec == 1) {
7661 if (pic->y_canvas_index == -1)
7662 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7663 if (pic->uv_canvas_index == -1)
7664 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7665 } else {
7666 pic->y_canvas_index = 128 + pic->index * 2;
7667 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7668 }
7669
7670
7671 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7672 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7673 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7674 canvas_w, canvas_h,
7675 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7676#endif
7677}
7678
7679static int init_buf_spec(struct hevc_state_s *hevc)
7680{
7681 int pic_width = hevc->pic_w;
7682 int pic_height = hevc->pic_h;
7683
7684 /* hevc_print(hevc, 0,
7685 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7686 */
7687 hevc_print(hevc, 0,
7688 "%s2 %d %d\n", __func__, pic_width, pic_height);
7689 /* pic_width = hevc->pic_w; */
7690 /* pic_height = hevc->pic_h; */
7691
7692 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7693 hevc->frame_width = pic_width;
7694 hevc->frame_height = pic_height;
7695
7696 }
7697
7698 return 0;
7699}
7700
7701static int parse_sei(struct hevc_state_s *hevc,
7702 struct PIC_s *pic, char *sei_buf, uint32_t size)
7703{
7704 char *p = sei_buf;
7705 char *p_sei;
7706 uint16_t header;
7707 uint8_t nal_unit_type;
7708 uint8_t payload_type, payload_size;
7709 int i, j;
7710
7711 if (size < 2)
7712 return 0;
7713 header = *p++;
7714 header <<= 8;
7715 header += *p++;
7716 nal_unit_type = header >> 9;
7717 if ((nal_unit_type != NAL_UNIT_SEI)
7718 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7719 return 0;
7720 while (p+2 <= sei_buf+size) {
7721 payload_type = *p++;
7722 payload_size = *p++;
7723 if (p+payload_size <= sei_buf+size) {
7724 switch (payload_type) {
7725 case SEI_PicTiming:
7726 if ((parser_sei_enable & 0x4) &&
7727 hevc->frame_field_info_present_flag) {
7728 p_sei = p;
7729 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7730 pic->pic_struct = hevc->curr_pic_struct;
7731 if (get_dbg_flag(hevc) &
7732 H265_DEBUG_PIC_STRUCT) {
7733 hevc_print(hevc, 0,
7734 "parse result pic_struct = %d\n",
7735 hevc->curr_pic_struct);
7736 }
7737 }
7738 break;
7739 case SEI_UserDataITU_T_T35:
7740 p_sei = p;
7741 if (p_sei[0] == 0xB5
7742 && p_sei[1] == 0x00
7743 && p_sei[2] == 0x3C
7744 && p_sei[3] == 0x00
7745 && p_sei[4] == 0x01
7746 && p_sei[5] == 0x04)
7747 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7748
7749 break;
7750 case SEI_MasteringDisplayColorVolume:
7751 /*hevc_print(hevc, 0,
7752 "sei type: primary display color volume %d, size %d\n",
7753 payload_type,
7754 payload_size);*/
7755 /* master_display_colour */
7756 p_sei = p;
7757 for (i = 0; i < 3; i++) {
7758 for (j = 0; j < 2; j++) {
7759 hevc->primaries[i][j]
7760 = (*p_sei<<8)
7761 | *(p_sei+1);
7762 p_sei += 2;
7763 }
7764 }
7765 for (i = 0; i < 2; i++) {
7766 hevc->white_point[i]
7767 = (*p_sei<<8)
7768 | *(p_sei+1);
7769 p_sei += 2;
7770 }
7771 for (i = 0; i < 2; i++) {
7772 hevc->luminance[i]
7773 = (*p_sei<<24)
7774 | (*(p_sei+1)<<16)
7775 | (*(p_sei+2)<<8)
7776 | *(p_sei+3);
7777 p_sei += 4;
7778 }
7779 hevc->sei_present_flag |=
7780 SEI_MASTER_DISPLAY_COLOR_MASK;
7781 /*for (i = 0; i < 3; i++)
7782 for (j = 0; j < 2; j++)
7783 hevc_print(hevc, 0,
7784 "\tprimaries[%1d][%1d] = %04x\n",
7785 i, j,
7786 hevc->primaries[i][j]);
7787 hevc_print(hevc, 0,
7788 "\twhite_point = (%04x, %04x)\n",
7789 hevc->white_point[0],
7790 hevc->white_point[1]);
7791 hevc_print(hevc, 0,
7792 "\tmax,min luminance = %08x, %08x\n",
7793 hevc->luminance[0],
7794 hevc->luminance[1]);*/
7795 break;
7796 case SEI_ContentLightLevel:
7797 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7798 hevc_print(hevc, 0,
7799 "sei type: max content light level %d, size %d\n",
7800 payload_type, payload_size);
7801 /* content_light_level */
7802 p_sei = p;
7803 hevc->content_light_level[0]
7804 = (*p_sei<<8) | *(p_sei+1);
7805 p_sei += 2;
7806 hevc->content_light_level[1]
7807 = (*p_sei<<8) | *(p_sei+1);
7808 p_sei += 2;
7809 hevc->sei_present_flag |=
7810 SEI_CONTENT_LIGHT_LEVEL_MASK;
7811 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7812 hevc_print(hevc, 0,
7813 "\tmax cll = %04x, max_pa_cll = %04x\n",
7814 hevc->content_light_level[0],
7815 hevc->content_light_level[1]);
7816 break;
7817 default:
7818 break;
7819 }
7820 }
7821 p += payload_size;
7822 }
7823 return 0;
7824}
7825
7826static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7827 unsigned w, unsigned h)
7828{
7829 unsigned ar;
7830
7831 if (idc == 255) {
7832 ar = div_u64(256ULL * sar_h * h,
7833 sar_w * w);
7834 } else {
7835 switch (idc) {
7836 case 1:
7837 ar = 0x100 * h / w;
7838 break;
7839 case 2:
7840 ar = 0x100 * h * 11 / (w * 12);
7841 break;
7842 case 3:
7843 ar = 0x100 * h * 11 / (w * 10);
7844 break;
7845 case 4:
7846 ar = 0x100 * h * 11 / (w * 16);
7847 break;
7848 case 5:
7849 ar = 0x100 * h * 33 / (w * 40);
7850 break;
7851 case 6:
7852 ar = 0x100 * h * 11 / (w * 24);
7853 break;
7854 case 7:
7855 ar = 0x100 * h * 11 / (w * 20);
7856 break;
7857 case 8:
7858 ar = 0x100 * h * 11 / (w * 32);
7859 break;
7860 case 9:
7861 ar = 0x100 * h * 33 / (w * 80);
7862 break;
7863 case 10:
7864 ar = 0x100 * h * 11 / (w * 18);
7865 break;
7866 case 11:
7867 ar = 0x100 * h * 11 / (w * 15);
7868 break;
7869 case 12:
7870 ar = 0x100 * h * 33 / (w * 64);
7871 break;
7872 case 13:
7873 ar = 0x100 * h * 99 / (w * 160);
7874 break;
7875 case 14:
7876 ar = 0x100 * h * 3 / (w * 4);
7877 break;
7878 case 15:
7879 ar = 0x100 * h * 2 / (w * 3);
7880 break;
7881 case 16:
7882 ar = 0x100 * h * 1 / (w * 2);
7883 break;
7884 default:
7885 ar = h * 0x100 / w;
7886 break;
7887 }
7888 }
7889
7890 return ar;
7891}
7892
7893static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7894 struct PIC_s *pic)
7895{
7896 unsigned int ar;
7897 int i, j;
7898 char *p;
7899 unsigned size = 0;
7900 unsigned type = 0;
7901 struct vframe_master_display_colour_s *vf_dp
7902 = &vf->prop.master_display_colour;
7903
7904 vf->width = pic->width /
7905 get_double_write_ratio(hevc, pic->double_write_mode);
7906 vf->height = pic->height /
7907 get_double_write_ratio(hevc, pic->double_write_mode);
7908
7909 vf->duration = hevc->frame_dur;
7910 vf->duration_pulldown = 0;
7911 vf->flag = 0;
7912
7913 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7914 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7915
7916
7917 if (((pic->aspect_ratio_idc == 255) &&
7918 pic->sar_width &&
7919 pic->sar_height) ||
7920 ((pic->aspect_ratio_idc != 255) &&
7921 (pic->width))) {
7922 ar = min_t(u32,
7923 calc_ar(pic->aspect_ratio_idc,
7924 pic->sar_width,
7925 pic->sar_height,
7926 pic->width,
7927 pic->height),
7928 DISP_RATIO_ASPECT_RATIO_MAX);
7929 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7930 }
7931 hevc->ratio_control = vf->ratio_control;
7932 if (pic->aux_data_buf
7933 && pic->aux_data_size) {
7934 /* parser sei */
7935 p = pic->aux_data_buf;
7936 while (p < pic->aux_data_buf
7937 + pic->aux_data_size - 8) {
7938 size = *p++;
7939 size = (size << 8) | *p++;
7940 size = (size << 8) | *p++;
7941 size = (size << 8) | *p++;
7942 type = *p++;
7943 type = (type << 8) | *p++;
7944 type = (type << 8) | *p++;
7945 type = (type << 8) | *p++;
7946 if (type == 0x02000000) {
7947 /* hevc_print(hevc, 0,
7948 "sei(%d)\n", size); */
7949 parse_sei(hevc, pic, p, size);
7950 }
7951 p += size;
7952 }
7953 }
7954 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7955 vf->signal_type = pic->video_signal_type;
7956 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7957 u32 data;
7958 data = vf->signal_type;
7959 data = data & 0xFFFF00FF;
7960 data = data | (0x30<<8);
7961 vf->signal_type = data;
7962 }
7963 }
7964 else
7965 vf->signal_type = 0;
7966 hevc->video_signal_type_debug = vf->signal_type;
7967
7968 /* master_display_colour */
7969 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7970 for (i = 0; i < 3; i++)
7971 for (j = 0; j < 2; j++)
7972 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7973 for (i = 0; i < 2; i++) {
7974 vf_dp->white_point[i] = hevc->white_point[i];
7975 vf_dp->luminance[i]
7976 = hevc->luminance[i];
7977 }
7978 vf_dp->present_flag = 1;
7979 } else
7980 vf_dp->present_flag = 0;
7981
7982 /* content_light_level */
7983 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7984 vf_dp->content_light_level.max_content
7985 = hevc->content_light_level[0];
7986 vf_dp->content_light_level.max_pic_average
7987 = hevc->content_light_level[1];
7988 vf_dp->content_light_level.present_flag = 1;
7989 } else
7990 vf_dp->content_light_level.present_flag = 0;
7991
7992 if (hevc->is_used_v4l &&
7993 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7994 (vf_dp->present_flag) ||
7995 (vf_dp->content_light_level.present_flag))) {
7996 struct aml_vdec_hdr_infos hdr;
7997 struct aml_vcodec_ctx *ctx =
7998 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
7999
8000 memset(&hdr, 0, sizeof(hdr));
8001 hdr.signal_type = vf->signal_type;
8002 hdr.color_parms = *vf_dp;
8003 vdec_v4l_set_hdr_infos(ctx, &hdr);
8004 }
8005}
8006
8007static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8008{
8009 unsigned long flags;
8010#ifdef MULTI_INSTANCE_SUPPORT
8011 struct vdec_s *vdec = op_arg;
8012 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8013#else
8014 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8015#endif
8016
8017 spin_lock_irqsave(&lock, flags);
8018
8019 states->vf_pool_size = VF_POOL_SIZE;
8020 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8021 states->buf_avail_num = kfifo_len(&hevc->display_q);
8022
8023 if (step == 2)
8024 states->buf_avail_num = 0;
8025 spin_unlock_irqrestore(&lock, flags);
8026 return 0;
8027}
8028
8029static struct vframe_s *vh265_vf_peek(void *op_arg)
8030{
8031 struct vframe_s *vf[2] = {0, 0};
8032#ifdef MULTI_INSTANCE_SUPPORT
8033 struct vdec_s *vdec = op_arg;
8034 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8035#else
8036 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8037#endif
8038
8039 if (step == 2)
8040 return NULL;
8041
8042 if (force_disp_pic_index & 0x100) {
8043 if (force_disp_pic_index & 0x200)
8044 return NULL;
8045 return &hevc->vframe_dummy;
8046 }
8047
8048
8049 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8050 if (vf[1]) {
8051 vf[0]->next_vf_pts_valid = true;
8052 vf[0]->next_vf_pts = vf[1]->pts;
8053 } else
8054 vf[0]->next_vf_pts_valid = false;
8055 return vf[0];
8056 }
8057
8058 return NULL;
8059}
8060
8061static struct vframe_s *vh265_vf_get(void *op_arg)
8062{
8063 struct vframe_s *vf;
8064#ifdef MULTI_INSTANCE_SUPPORT
8065 struct vdec_s *vdec = op_arg;
8066 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8067#else
8068 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8069#endif
8070
8071 if (step == 2)
8072 return NULL;
8073 else if (step == 1)
8074 step = 2;
8075
8076#if 0
8077 if (force_disp_pic_index & 0x100) {
8078 int buffer_index = force_disp_pic_index & 0xff;
8079 struct PIC_s *pic = NULL;
8080 if (buffer_index >= 0
8081 && buffer_index < MAX_REF_PIC_NUM)
8082 pic = hevc->m_PIC[buffer_index];
8083 if (pic == NULL)
8084 return NULL;
8085 if (force_disp_pic_index & 0x200)
8086 return NULL;
8087
8088 vf = &hevc->vframe_dummy;
8089 if (get_double_write_mode(hevc)) {
8090 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8091 VIDTYPE_VIU_NV21;
8092 if (hevc->m_ins_flag) {
8093 vf->canvas0Addr = vf->canvas1Addr = -1;
8094 vf->plane_num = 2;
8095 vf->canvas0_config[0] =
8096 pic->canvas_config[0];
8097 vf->canvas0_config[1] =
8098 pic->canvas_config[1];
8099
8100 vf->canvas1_config[0] =
8101 pic->canvas_config[0];
8102 vf->canvas1_config[1] =
8103 pic->canvas_config[1];
8104 } else {
8105 vf->canvas0Addr = vf->canvas1Addr
8106 = spec2canvas(pic);
8107 }
8108 } else {
8109 vf->canvas0Addr = vf->canvas1Addr = 0;
8110 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8111 if (hevc->mmu_enable)
8112 vf->type |= VIDTYPE_SCATTER;
8113 }
8114 vf->compWidth = pic->width;
8115 vf->compHeight = pic->height;
8116 update_vf_memhandle(hevc, vf, pic);
8117 switch (hevc->bit_depth_luma) {
8118 case 9:
8119 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8120 break;
8121 case 10:
8122 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8123 | BITDEPTH_V10;
8124 break;
8125 default:
8126 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8127 break;
8128 }
8129 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8130 vf->bitdepth =
8131 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8132 if (hevc->mem_saving_mode == 1)
8133 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8134 vf->duration_pulldown = 0;
8135 vf->pts = 0;
8136 vf->pts_us64 = 0;
8137 set_frame_info(hevc, vf);
8138
8139 vf->width = pic->width /
8140 get_double_write_ratio(hevc, pic->double_write_mode);
8141 vf->height = pic->height /
8142 get_double_write_ratio(hevc, pic->double_write_mode);
8143
8144 force_disp_pic_index |= 0x200;
8145 return vf;
8146 }
8147#endif
8148
8149 if (kfifo_get(&hevc->display_q, &vf)) {
8150 struct vframe_s *next_vf;
8151 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8152 hevc_print(hevc, 0,
8153 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8154 __func__, vf, vf->type, vf->index,
8155 get_pic_poc(hevc, vf->index & 0xff),
8156 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8157 vf->pts, vf->pts_us64,
8158 vf->duration);
8159#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8160 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8161 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8162 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8163 int i;
8164 struct PIC_s *pic =
8165 hevc->m_PIC[vf->index & 0xff];
8166 hevc_print(hevc, 0,
8167 "pic 0x%p aux size %d:\n",
8168 pic, pic->aux_data_size);
8169 for (i = 0; i < pic->aux_data_size; i++) {
8170 hevc_print_cont(hevc, 0,
8171 "%02x ", pic->aux_data_buf[i]);
8172 if (((i + 1) & 0xf) == 0)
8173 hevc_print_cont(hevc, 0, "\n");
8174 }
8175 hevc_print_cont(hevc, 0, "\n");
8176 }
8177 }
8178#endif
8179 hevc->show_frame_num++;
8180 vf->index_disp = hevc->vf_get_count;
8181 hevc->vf_get_count++;
8182
8183 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8184 vf->next_vf_pts_valid = true;
8185 vf->next_vf_pts = next_vf->pts;
8186 } else
8187 vf->next_vf_pts_valid = false;
8188
8189 return vf;
8190 }
8191
8192 return NULL;
8193}
8194static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8195 int i;
8196 for (i = 0; i < VF_POOL_SIZE; i++) {
8197 if (vf == &hevc->vfpool[i])
8198 return true;
8199 }
8200 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8201 for (i = 0; i < VF_POOL_SIZE; i++) {
8202 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8203 }
8204 return false;
8205}
8206
8207static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8208{
8209 unsigned long flags;
8210#ifdef MULTI_INSTANCE_SUPPORT
8211 struct vdec_s *vdec = op_arg;
8212 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8213#else
8214 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8215#endif
8216 unsigned char index_top;
8217 unsigned char index_bot;
8218
8219 if (vf && (vf_valid_check(vf, hevc) == false))
8220 return;
8221 if (vf == (&hevc->vframe_dummy))
8222 return;
8223 index_top = vf->index & 0xff;
8224 index_bot = (vf->index >> 8) & 0xff;
8225 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8226 hevc_print(hevc, 0,
8227 "%s(type %d index 0x%x)\n",
8228 __func__, vf->type, vf->index);
8229 hevc->vf_put_count++;
8230 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8231 spin_lock_irqsave(&lock, flags);
8232
8233 if (index_top != 0xff
8234 && index_top < MAX_REF_PIC_NUM
8235 && hevc->m_PIC[index_top]) {
8236 if (hevc->is_used_v4l)
8237 hevc->m_PIC[index_top]->vframe_bound = true;
8238 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8239 hevc->m_PIC[index_top]->vf_ref--;
8240
8241 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8242 hevc->m_PIC[index_top]->output_ready = 0;
8243
8244 if (hevc->wait_buf != 0)
8245 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8246 0x1);
8247 }
8248 }
8249 }
8250
8251 if (index_bot != 0xff
8252 && index_bot < MAX_REF_PIC_NUM
8253 && hevc->m_PIC[index_bot]) {
8254 if (hevc->is_used_v4l)
8255 hevc->m_PIC[index_bot]->vframe_bound = true;
8256 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8257 hevc->m_PIC[index_bot]->vf_ref--;
8258
8259 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8260 hevc->m_PIC[index_bot]->output_ready = 0;
8261 if (hevc->wait_buf != 0)
8262 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8263 0x1);
8264 }
8265 }
8266 }
8267 spin_unlock_irqrestore(&lock, flags);
8268}
8269
8270static int vh265_event_cb(int type, void *data, void *op_arg)
8271{
8272 unsigned long flags;
8273#ifdef MULTI_INSTANCE_SUPPORT
8274 struct vdec_s *vdec = op_arg;
8275 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8276#else
8277 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8278#endif
8279 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8280#if 0
8281 amhevc_stop();
8282#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8283 vf_light_unreg_provider(&vh265_vf_prov);
8284#endif
8285 spin_lock_irqsave(&hevc->lock, flags);
8286 vh265_local_init();
8287 vh265_prot_init();
8288 spin_unlock_irqrestore(&hevc->lock, flags);
8289#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8290 vf_reg_provider(&vh265_vf_prov);
8291#endif
8292 amhevc_start();
8293#endif
8294 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8295 struct provider_aux_req_s *req =
8296 (struct provider_aux_req_s *)data;
8297 unsigned char index;
8298
8299 if (!req->vf) {
8300 req->aux_size = hevc->vf_put_count;
8301 return 0;
8302 }
8303 spin_lock_irqsave(&lock, flags);
8304 index = req->vf->index & 0xff;
8305 req->aux_buf = NULL;
8306 req->aux_size = 0;
8307 if (req->bot_flag)
8308 index = (req->vf->index >> 8) & 0xff;
8309 if (index != 0xff
8310 && index < MAX_REF_PIC_NUM
8311 && hevc->m_PIC[index]) {
8312 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8313 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8314#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8315 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8316 req->dv_enhance_exist = false;
8317 else
8318 req->dv_enhance_exist =
8319 hevc->m_PIC[index]->dv_enhance_exist;
8320 hevc_print(hevc, H265_DEBUG_DV,
8321 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8322 req->vf,
8323 hevc->m_PIC[index]->POC, index,
8324 req->dv_enhance_exist, req->aux_size);
8325#else
8326 req->dv_enhance_exist = 0;
8327#endif
8328 }
8329 spin_unlock_irqrestore(&lock, flags);
8330
8331 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8332 hevc_print(hevc, 0,
8333 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8334 __func__, type, index, req->aux_size);
8335#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8336 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8337 if ((force_bypass_dvenl & 0x80000000) == 0) {
8338 hevc_print(hevc, 0,
8339 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8340 __func__);
8341 hevc->bypass_dvenl_enable = 1;
8342 }
8343
8344#endif
8345 }
8346 return 0;
8347}
8348
8349#ifdef HEVC_PIC_STRUCT_SUPPORT
8350static int process_pending_vframe(struct hevc_state_s *hevc,
8351 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8352{
8353 struct vframe_s *vf;
8354
8355 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8356 hevc_print(hevc, 0,
8357 "%s: pair_pic index 0x%x %s\n",
8358 __func__, pair_pic->index,
8359 pair_frame_top_flag ?
8360 "top" : "bot");
8361
8362 if (kfifo_len(&hevc->pending_q) > 1) {
8363 unsigned long flags;
8364 /* do not pending more than 1 frame */
8365 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8366 hevc_print(hevc, 0,
8367 "fatal error, no available buffer slot.");
8368 return -1;
8369 }
8370 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8371 hevc_print(hevc, 0,
8372 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8373 __func__, vf->index);
8374 if ((hevc->double_write_mode == 3) &&
8375 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8376 vf->type |= VIDTYPE_COMPRESS;
8377 if (hevc->mmu_enable)
8378 vf->type |= VIDTYPE_SCATTER;
8379 }
8380 hevc->vf_pre_count++;
8381 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8382 spin_lock_irqsave(&lock, flags);
8383 vf->index &= 0xff;
8384 hevc->m_PIC[vf->index]->output_ready = 0;
8385 if (hevc->wait_buf != 0)
8386 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8387 0x1);
8388 spin_unlock_irqrestore(&lock, flags);
8389
8390 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8391 }
8392
8393 if (kfifo_peek(&hevc->pending_q, &vf)) {
8394 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8395 /*
8396 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8397 *do not use it
8398 */
8399 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8400 hevc_print(hevc, 0,
8401 "fatal error, no available buffer slot.");
8402 return -1;
8403 }
8404 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8405 hevc_print(hevc, 0,
8406 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8407 __func__, vf->index);
8408 if (vf) {
8409 if ((hevc->double_write_mode == 3) &&
8410 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8411 vf->type |= VIDTYPE_COMPRESS;
8412 if (hevc->mmu_enable)
8413 vf->type |= VIDTYPE_SCATTER;
8414 }
8415 hevc->vf_pre_count++;
8416 kfifo_put(&hevc->display_q,
8417 (const struct vframe_s *)vf);
8418 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8419 }
8420 } else if ((!pair_frame_top_flag) &&
8421 (((vf->index >> 8) & 0xff) == 0xff)) {
8422 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8423 hevc_print(hevc, 0,
8424 "fatal error, no available buffer slot.");
8425 return -1;
8426 }
8427 if (vf) {
8428 if ((hevc->double_write_mode == 3) &&
8429 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8430 vf->type |= VIDTYPE_COMPRESS;
8431 if (hevc->mmu_enable)
8432 vf->type |= VIDTYPE_SCATTER;
8433 }
8434 vf->index &= 0xff;
8435 vf->index |= (pair_pic->index << 8);
8436 vf->canvas1Addr = spec2canvas(pair_pic);
8437 pair_pic->vf_ref++;
8438 kfifo_put(&hevc->display_q,
8439 (const struct vframe_s *)vf);
8440 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8441 hevc->vf_pre_count++;
8442 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8443 hevc_print(hevc, 0,
8444 "%s vf => display_q: (index 0x%x)\n",
8445 __func__, vf->index);
8446 }
8447 } else if (pair_frame_top_flag &&
8448 ((vf->index & 0xff) == 0xff)) {
8449 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8450 hevc_print(hevc, 0,
8451 "fatal error, no available buffer slot.");
8452 return -1;
8453 }
8454 if (vf) {
8455 if ((hevc->double_write_mode == 3) &&
8456 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8457 vf->type |= VIDTYPE_COMPRESS;
8458 if (hevc->mmu_enable)
8459 vf->type |= VIDTYPE_SCATTER;
8460 }
8461 vf->index &= 0xff00;
8462 vf->index |= pair_pic->index;
8463 vf->canvas0Addr = spec2canvas(pair_pic);
8464 pair_pic->vf_ref++;
8465 kfifo_put(&hevc->display_q,
8466 (const struct vframe_s *)vf);
8467 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8468 hevc->vf_pre_count++;
8469 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8470 hevc_print(hevc, 0,
8471 "%s vf => display_q: (index 0x%x)\n",
8472 __func__, vf->index);
8473 }
8474 }
8475 }
8476 return 0;
8477}
8478#endif
8479static void update_vf_memhandle(struct hevc_state_s *hevc,
8480 struct vframe_s *vf, struct PIC_s *pic)
8481{
8482 if (pic->index < 0) {
8483 vf->mem_handle = NULL;
8484 vf->mem_head_handle = NULL;
8485 } else if (vf->type & VIDTYPE_SCATTER) {
8486 vf->mem_handle =
8487 decoder_mmu_box_get_mem_handle(
8488 hevc->mmu_box, pic->index);
8489 vf->mem_head_handle =
8490 decoder_bmmu_box_get_mem_handle(
8491 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8492 } else {
8493 vf->mem_handle =
8494 decoder_bmmu_box_get_mem_handle(
8495 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8496 vf->mem_head_handle = NULL;
8497 /*vf->mem_head_handle =
8498 decoder_bmmu_box_get_mem_handle(
8499 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8500 }
8501 return;
8502}
8503
8504static void fill_frame_info(struct hevc_state_s *hevc,
8505 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8506{
8507 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8508 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8509 vframe_qos->type = 4;
8510 else if (pic->slice_type == I_SLICE)
8511 vframe_qos->type = 1;
8512 else if (pic->slice_type == P_SLICE)
8513 vframe_qos->type = 2;
8514 else if (pic->slice_type == B_SLICE)
8515 vframe_qos->type = 3;
8516/*
8517#define SHOW_QOS_INFO
8518*/
8519 vframe_qos->size = framesize;
8520 vframe_qos->pts = pts;
8521#ifdef SHOW_QOS_INFO
8522 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8523#endif
8524
8525
8526 vframe_qos->max_mv = pic->max_mv;
8527 vframe_qos->avg_mv = pic->avg_mv;
8528 vframe_qos->min_mv = pic->min_mv;
8529#ifdef SHOW_QOS_INFO
8530 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8531 vframe_qos->max_mv,
8532 vframe_qos->avg_mv,
8533 vframe_qos->min_mv);
8534#endif
8535
8536 vframe_qos->max_qp = pic->max_qp;
8537 vframe_qos->avg_qp = pic->avg_qp;
8538 vframe_qos->min_qp = pic->min_qp;
8539#ifdef SHOW_QOS_INFO
8540 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8541 vframe_qos->max_qp,
8542 vframe_qos->avg_qp,
8543 vframe_qos->min_qp);
8544#endif
8545
8546 vframe_qos->max_skip = pic->max_skip;
8547 vframe_qos->avg_skip = pic->avg_skip;
8548 vframe_qos->min_skip = pic->min_skip;
8549#ifdef SHOW_QOS_INFO
8550 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8551 vframe_qos->max_skip,
8552 vframe_qos->avg_skip,
8553 vframe_qos->min_skip);
8554#endif
8555
8556 vframe_qos->num++;
8557
8558 if (hevc->frameinfo_enable)
8559 vdec_fill_frame_info(vframe_qos, 1);
8560}
8561
8562static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8563{
8564#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8565 struct vdec_s *vdec = hw_to_vdec(hevc);
8566#endif
8567 struct vframe_s *vf = NULL;
8568 int stream_offset = pic->stream_offset;
8569 unsigned short slice_type = pic->slice_type;
8570 u32 frame_size;
8571
8572 if (force_disp_pic_index & 0x100) {
8573 /*recycle directly*/
8574 pic->output_ready = 0;
8575 return -1;
8576 }
8577 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8578 hevc_print(hevc, 0,
8579 "fatal error, no available buffer slot.");
8580 return -1;
8581 }
8582 display_frame_count[hevc->index]++;
8583 if (vf) {
8584 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8585 "%s: pic index 0x%x\n",
8586 __func__, pic->index);*/
8587
8588 if (hevc->is_used_v4l) {
8589 vf->v4l_mem_handle
8590 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8591 if (hevc->mmu_enable) {
8592 vf->mm_box.bmmu_box = hevc->bmmu_box;
8593 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8594 vf->mm_box.mmu_box = hevc->mmu_box;
8595 vf->mm_box.mmu_idx = pic->index;
8596 }
8597 }
8598
8599#ifdef MULTI_INSTANCE_SUPPORT
8600 if (vdec_frame_based(hw_to_vdec(hevc))) {
8601 vf->pts = pic->pts;
8602 vf->pts_us64 = pic->pts64;
8603 vf->timestamp = pic->timestamp;
8604 }
8605 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8606 stream_offset, &vf->pts, 0) != 0) { */
8607#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8608 else if (vdec->master == NULL) {
8609#else
8610 else {
8611#endif
8612#endif
8613 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8614 "call pts_lookup_offset_us64(0x%x)\n",
8615 stream_offset);
8616 if (pts_lookup_offset_us64
8617 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8618 &frame_size, 0,
8619 &vf->pts_us64) != 0) {
8620#ifdef DEBUG_PTS
8621 hevc->pts_missed++;
8622#endif
8623 vf->pts = 0;
8624 vf->pts_us64 = 0;
8625 }
8626#ifdef DEBUG_PTS
8627 else
8628 hevc->pts_hit++;
8629#endif
8630#ifdef MULTI_INSTANCE_SUPPORT
8631#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8632 } else {
8633 vf->pts = 0;
8634 vf->pts_us64 = 0;
8635 }
8636#else
8637 }
8638#endif
8639#endif
8640 if (pts_unstable && (hevc->frame_dur > 0))
8641 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8642
8643 fill_frame_info(hevc, pic, frame_size, vf->pts);
8644
8645 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8646 && hevc->get_frame_dur) {
8647 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8648
8649 if (pts_diff < 0) {
8650 hevc->pts_mode_switching_count++;
8651 hevc->pts_mode_recovery_count = 0;
8652
8653 if (hevc->pts_mode_switching_count >=
8654 PTS_MODE_SWITCHING_THRESHOLD) {
8655 hevc->pts_mode =
8656 PTS_NONE_REF_USE_DURATION;
8657 hevc_print(hevc, 0,
8658 "HEVC: switch to n_d mode.\n");
8659 }
8660
8661 } else {
8662 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8663
8664 hevc->pts_mode_recovery_count++;
8665 if (hevc->pts_mode_recovery_count > p) {
8666 hevc->pts_mode_switching_count = 0;
8667 hevc->pts_mode_recovery_count = 0;
8668 }
8669 }
8670 }
8671
8672 if (vf->pts != 0)
8673 hevc->last_lookup_pts = vf->pts;
8674
8675 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8676 && (slice_type != 2))
8677 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8678 hevc->last_pts = vf->pts;
8679
8680 if (vf->pts_us64 != 0)
8681 hevc->last_lookup_pts_us64 = vf->pts_us64;
8682
8683 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8684 && (slice_type != 2)) {
8685 vf->pts_us64 =
8686 hevc->last_pts_us64 +
8687 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8688 }
8689 hevc->last_pts_us64 = vf->pts_us64;
8690 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8691 hevc_print(hevc, 0,
8692 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8693 vf->pts, vf->pts_us64);
8694 }
8695
8696 /*
8697 *vf->index:
8698 *(1) vf->type is VIDTYPE_PROGRESSIVE
8699 * and vf->canvas0Addr != vf->canvas1Addr,
8700 * vf->index[7:0] is the index of top pic
8701 * vf->index[15:8] is the index of bot pic
8702 *(2) other cases,
8703 * only vf->index[7:0] is used
8704 * vf->index[15:8] == 0xff
8705 */
8706 vf->index = 0xff00 | pic->index;
8707#if 1
8708/*SUPPORT_10BIT*/
8709 if (pic->double_write_mode & 0x10) {
8710 /* double write only */
8711 vf->compBodyAddr = 0;
8712 vf->compHeadAddr = 0;
8713 } else {
8714
8715 if (hevc->mmu_enable) {
8716 vf->compBodyAddr = 0;
8717 vf->compHeadAddr = pic->header_adr;
8718 } else {
8719 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8720 vf->compHeadAddr = pic->mc_y_adr +
8721 pic->losless_comp_body_size;
8722 vf->mem_head_handle = NULL;
8723 }
8724
8725 /*head adr*/
8726 vf->canvas0Addr = vf->canvas1Addr = 0;
8727 }
8728 if (pic->double_write_mode) {
8729 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8730 vf->type |= VIDTYPE_VIU_NV21;
8731
8732 if ((pic->double_write_mode == 3) &&
8733 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8734 vf->type |= VIDTYPE_COMPRESS;
8735 if (hevc->mmu_enable)
8736 vf->type |= VIDTYPE_SCATTER;
8737 }
8738#ifdef MULTI_INSTANCE_SUPPORT
8739 if (hevc->m_ins_flag &&
8740 (get_dbg_flag(hevc)
8741 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8742 vf->canvas0Addr = vf->canvas1Addr = -1;
8743 vf->plane_num = 2;
8744 vf->canvas0_config[0] =
8745 pic->canvas_config[0];
8746 vf->canvas0_config[1] =
8747 pic->canvas_config[1];
8748
8749 vf->canvas1_config[0] =
8750 pic->canvas_config[0];
8751 vf->canvas1_config[1] =
8752 pic->canvas_config[1];
8753
8754 } else
8755#endif
8756 vf->canvas0Addr = vf->canvas1Addr
8757 = spec2canvas(pic);
8758 } else {
8759 vf->canvas0Addr = vf->canvas1Addr = 0;
8760 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8761 if (hevc->mmu_enable)
8762 vf->type |= VIDTYPE_SCATTER;
8763 }
8764 vf->compWidth = pic->width;
8765 vf->compHeight = pic->height;
8766 update_vf_memhandle(hevc, vf, pic);
8767 switch (pic->bit_depth_luma) {
8768 case 9:
8769 vf->bitdepth = BITDEPTH_Y9;
8770 break;
8771 case 10:
8772 vf->bitdepth = BITDEPTH_Y10;
8773 break;
8774 default:
8775 vf->bitdepth = BITDEPTH_Y8;
8776 break;
8777 }
8778 switch (pic->bit_depth_chroma) {
8779 case 9:
8780 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8781 break;
8782 case 10:
8783 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8784 break;
8785 default:
8786 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8787 break;
8788 }
8789 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8790 vf->bitdepth =
8791 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8792 if (pic->mem_saving_mode == 1)
8793 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8794#else
8795 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8796 vf->type |= VIDTYPE_VIU_NV21;
8797 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8798#endif
8799 set_frame_info(hevc, vf, pic);
8800 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8801 /* hevc_print(hevc, 0,
8802 "aaa: %d/%d, %d/%d\n",
8803 vf->width,vf->height, pic->width, pic->height); */
8804 vf->width = pic->width;
8805 vf->height = pic->height;
8806
8807 if (force_w_h != 0) {
8808 vf->width = (force_w_h >> 16) & 0xffff;
8809 vf->height = force_w_h & 0xffff;
8810 }
8811 if (force_fps & 0x100) {
8812 u32 rate = force_fps & 0xff;
8813
8814 if (rate)
8815 vf->duration = 96000/rate;
8816 else
8817 vf->duration = 0;
8818 }
8819 if (force_fps & 0x200) {
8820 vf->pts = 0;
8821 vf->pts_us64 = 0;
8822 }
8823 /*
8824 * !!! to do ...
8825 * need move below code to get_new_pic(),
8826 * hevc->xxx can only be used by current decoded pic
8827 */
8828 if (pic->conformance_window_flag &&
8829 (get_dbg_flag(hevc) &
8830 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8831 unsigned int SubWidthC, SubHeightC;
8832
8833 switch (pic->chroma_format_idc) {
8834 case 1:
8835 SubWidthC = 2;
8836 SubHeightC = 2;
8837 break;
8838 case 2:
8839 SubWidthC = 2;
8840 SubHeightC = 1;
8841 break;
8842 default:
8843 SubWidthC = 1;
8844 SubHeightC = 1;
8845 break;
8846 }
8847 vf->width -= SubWidthC *
8848 (pic->conf_win_left_offset +
8849 pic->conf_win_right_offset);
8850 vf->height -= SubHeightC *
8851 (pic->conf_win_top_offset +
8852 pic->conf_win_bottom_offset);
8853
8854 vf->compWidth -= SubWidthC *
8855 (pic->conf_win_left_offset +
8856 pic->conf_win_right_offset);
8857 vf->compHeight -= SubHeightC *
8858 (pic->conf_win_top_offset +
8859 pic->conf_win_bottom_offset);
8860
8861 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8862 hevc_print(hevc, 0,
8863 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8864 pic->chroma_format_idc,
8865 pic->conf_win_left_offset,
8866 pic->conf_win_right_offset,
8867 pic->conf_win_top_offset,
8868 pic->conf_win_bottom_offset,
8869 vf->width, vf->height, vf->compWidth, vf->compHeight);
8870 }
8871
8872 vf->width = vf->width /
8873 get_double_write_ratio(hevc, pic->double_write_mode);
8874 vf->height = vf->height /
8875 get_double_write_ratio(hevc, pic->double_write_mode);
8876#ifdef HEVC_PIC_STRUCT_SUPPORT
8877 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8878 struct vframe_s *vf2;
8879
8880 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8881 hevc_print(hevc, 0,
8882 "pic_struct = %d index 0x%x\n",
8883 pic->pic_struct,
8884 pic->index);
8885
8886 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8887 hevc_print(hevc, 0,
8888 "fatal error, no available buffer slot.");
8889 return -1;
8890 }
8891 pic->vf_ref = 2;
8892 vf->duration = vf->duration>>1;
8893 memcpy(vf2, vf, sizeof(struct vframe_s));
8894
8895 if (pic->pic_struct == 3) {
8896 vf->type = VIDTYPE_INTERLACE_TOP
8897 | VIDTYPE_VIU_NV21;
8898 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8899 | VIDTYPE_VIU_NV21;
8900 } else {
8901 vf->type = VIDTYPE_INTERLACE_BOTTOM
8902 | VIDTYPE_VIU_NV21;
8903 vf2->type = VIDTYPE_INTERLACE_TOP
8904 | VIDTYPE_VIU_NV21;
8905 }
8906 hevc->vf_pre_count++;
8907 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8908 kfifo_put(&hevc->display_q,
8909 (const struct vframe_s *)vf);
8910 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8911 hevc->vf_pre_count++;
8912 kfifo_put(&hevc->display_q,
8913 (const struct vframe_s *)vf2);
8914 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8915 } else if (pic->pic_struct == 5
8916 || pic->pic_struct == 6) {
8917 struct vframe_s *vf2, *vf3;
8918
8919 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8920 hevc_print(hevc, 0,
8921 "pic_struct = %d index 0x%x\n",
8922 pic->pic_struct,
8923 pic->index);
8924
8925 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8926 hevc_print(hevc, 0,
8927 "fatal error, no available buffer slot.");
8928 return -1;
8929 }
8930 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8931 hevc_print(hevc, 0,
8932 "fatal error, no available buffer slot.");
8933 return -1;
8934 }
8935 pic->vf_ref = 3;
8936 vf->duration = vf->duration/3;
8937 memcpy(vf2, vf, sizeof(struct vframe_s));
8938 memcpy(vf3, vf, sizeof(struct vframe_s));
8939
8940 if (pic->pic_struct == 5) {
8941 vf->type = VIDTYPE_INTERLACE_TOP
8942 | VIDTYPE_VIU_NV21;
8943 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8944 | VIDTYPE_VIU_NV21;
8945 vf3->type = VIDTYPE_INTERLACE_TOP
8946 | VIDTYPE_VIU_NV21;
8947 } else {
8948 vf->type = VIDTYPE_INTERLACE_BOTTOM
8949 | VIDTYPE_VIU_NV21;
8950 vf2->type = VIDTYPE_INTERLACE_TOP
8951 | VIDTYPE_VIU_NV21;
8952 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8953 | VIDTYPE_VIU_NV21;
8954 }
8955 hevc->vf_pre_count++;
8956 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8957 kfifo_put(&hevc->display_q,
8958 (const struct vframe_s *)vf);
8959 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8960 hevc->vf_pre_count++;
8961 kfifo_put(&hevc->display_q,
8962 (const struct vframe_s *)vf2);
8963 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8964 hevc->vf_pre_count++;
8965 kfifo_put(&hevc->display_q,
8966 (const struct vframe_s *)vf3);
8967 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8968
8969 } else if (pic->pic_struct == 9
8970 || pic->pic_struct == 10) {
8971 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8972 hevc_print(hevc, 0,
8973 "pic_struct = %d index 0x%x\n",
8974 pic->pic_struct,
8975 pic->index);
8976
8977 pic->vf_ref = 1;
8978 /* process previous pending vf*/
8979 process_pending_vframe(hevc,
8980 pic, (pic->pic_struct == 9));
8981
8982 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8983 /* process current vf */
8984 kfifo_put(&hevc->pending_q,
8985 (const struct vframe_s *)vf);
8986 vf->height <<= 1;
8987 if (pic->pic_struct == 9) {
8988 vf->type = VIDTYPE_INTERLACE_TOP
8989 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8990 process_pending_vframe(hevc,
8991 hevc->pre_bot_pic, 0);
8992 } else {
8993 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8994 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8995 vf->index = (pic->index << 8) | 0xff;
8996 process_pending_vframe(hevc,
8997 hevc->pre_top_pic, 1);
8998 }
8999
9000 if (hevc->vf_pre_count == 0)
9001 hevc->vf_pre_count++;
9002
9003 /**/
9004 if (pic->pic_struct == 9)
9005 hevc->pre_top_pic = pic;
9006 else
9007 hevc->pre_bot_pic = pic;
9008
9009 } else if (pic->pic_struct == 11
9010 || pic->pic_struct == 12) {
9011 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9012 hevc_print(hevc, 0,
9013 "pic_struct = %d index 0x%x\n",
9014 pic->pic_struct,
9015 pic->index);
9016 pic->vf_ref = 1;
9017 /* process previous pending vf*/
9018 process_pending_vframe(hevc, pic,
9019 (pic->pic_struct == 11));
9020
9021 /* put current into pending q */
9022 vf->height <<= 1;
9023 if (pic->pic_struct == 11)
9024 vf->type = VIDTYPE_INTERLACE_TOP |
9025 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9026 else {
9027 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9028 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9029 vf->index = (pic->index << 8) | 0xff;
9030 }
9031 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9032 kfifo_put(&hevc->pending_q,
9033 (const struct vframe_s *)vf);
9034 if (hevc->vf_pre_count == 0)
9035 hevc->vf_pre_count++;
9036
9037 /**/
9038 if (pic->pic_struct == 11)
9039 hevc->pre_top_pic = pic;
9040 else
9041 hevc->pre_bot_pic = pic;
9042
9043 } else {
9044 pic->vf_ref = 1;
9045
9046 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9047 hevc_print(hevc, 0,
9048 "pic_struct = %d index 0x%x\n",
9049 pic->pic_struct,
9050 pic->index);
9051
9052 switch (pic->pic_struct) {
9053 case 7:
9054 vf->duration <<= 1;
9055 break;
9056 case 8:
9057 vf->duration = vf->duration * 3;
9058 break;
9059 case 1:
9060 vf->height <<= 1;
9061 vf->type = VIDTYPE_INTERLACE_TOP |
9062 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9063 process_pending_vframe(hevc, pic, 1);
9064 hevc->pre_top_pic = pic;
9065 break;
9066 case 2:
9067 vf->height <<= 1;
9068 vf->type = VIDTYPE_INTERLACE_BOTTOM
9069 | VIDTYPE_VIU_NV21
9070 | VIDTYPE_VIU_FIELD;
9071 process_pending_vframe(hevc, pic, 0);
9072 hevc->pre_bot_pic = pic;
9073 break;
9074 }
9075 hevc->vf_pre_count++;
9076 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9077 kfifo_put(&hevc->display_q,
9078 (const struct vframe_s *)vf);
9079 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9080 }
9081#else
9082 vf->type_original = vf->type;
9083 pic->vf_ref = 1;
9084 hevc->vf_pre_count++;
9085 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9086 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9087 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9088
9089 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9090 hevc_print(hevc, 0,
9091 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9092 __func__, vf->type, vf->index,
9093 get_pic_poc(hevc, vf->index & 0xff),
9094 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9095 vf->pts, vf->pts_us64,
9096 vf->duration);
9097#endif
9098#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9099 /*count info*/
9100 vdec_count_info(gvs, 0, stream_offset);
9101#endif
9102 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
9103 if (without_display_mode == 0) {
9104 vf_notify_receiver(hevc->provider_name,
9105 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9106 }
9107 else
9108 vh265_vf_put(vh265_vf_get(vdec), vdec);
9109 }
9110
9111 return 0;
9112}
9113
9114static int notify_v4l_eos(struct vdec_s *vdec)
9115{
9116 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9117 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9118 struct vframe_s *vf = &hw->vframe_dummy;
9119 struct vdec_v4l2_buffer *fb = NULL;
9120 int index = INVALID_IDX;
9121 ulong expires;
9122
9123 if (hw->is_used_v4l && hw->eos) {
9124 expires = jiffies + msecs_to_jiffies(2000);
9125 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9126 if (time_after(jiffies, expires))
9127 break;
9128 }
9129
9130 if (index == INVALID_IDX) {
9131 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9132 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9133 return -1;
9134 }
9135 }
9136
9137 vf->type |= VIDTYPE_V4L_EOS;
9138 vf->timestamp = ULONG_MAX;
9139 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9140 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9141 hw->m_BUF[index].v4l_ref_buf_addr;
9142 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9143 vf_notify_receiver(vdec->vf_provider_name,
9144 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9145
9146 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9147 }
9148
9149 return 0;
9150}
9151
9152static void process_nal_sei(struct hevc_state_s *hevc,
9153 int payload_type, int payload_size)
9154{
9155 unsigned short data;
9156
9157 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9158 hevc_print(hevc, 0,
9159 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9160 payload_type, payload_size);
9161
9162 if (payload_type == 137) {
9163 int i, j;
9164 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9165 if (payload_size >= 24) {
9166 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9167 hevc_print(hevc, 0,
9168 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9169 for (i = 0; i < 3; i++) {
9170 for (j = 0; j < 2; j++) {
9171 data =
9172 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9173 hevc->primaries[i][j] = data;
9174 WRITE_HREG(HEVC_SHIFT_COMMAND,
9175 (1<<7)|16);
9176 if (get_dbg_flag(hevc) &
9177 H265_DEBUG_PRINT_SEI)
9178 hevc_print(hevc, 0,
9179 "\t\tprimaries[%1d][%1d] = %04x\n",
9180 i, j, hevc->primaries[i][j]);
9181 }
9182 }
9183 for (i = 0; i < 2; i++) {
9184 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9185 hevc->white_point[i] = data;
9186 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9187 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9188 hevc_print(hevc, 0,
9189 "\t\twhite_point[%1d] = %04x\n",
9190 i, hevc->white_point[i]);
9191 }
9192 for (i = 0; i < 2; i++) {
9193 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9194 hevc->luminance[i] = data << 16;
9195 WRITE_HREG(HEVC_SHIFT_COMMAND,
9196 (1<<7)|16);
9197 data =
9198 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9199 hevc->luminance[i] |= data;
9200 WRITE_HREG(HEVC_SHIFT_COMMAND,
9201 (1<<7)|16);
9202 if (get_dbg_flag(hevc) &
9203 H265_DEBUG_PRINT_SEI)
9204 hevc_print(hevc, 0,
9205 "\t\tluminance[%1d] = %08x\n",
9206 i, hevc->luminance[i]);
9207 }
9208 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9209 }
9210 payload_size -= 24;
9211 while (payload_size > 0) {
9212 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9213 payload_size--;
9214 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9215 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9216 }
9217 }
9218}
9219
9220static int hevc_recover(struct hevc_state_s *hevc)
9221{
9222 int ret = -1;
9223 u32 rem;
9224 u64 shift_byte_count64;
9225 unsigned int hevc_shift_byte_count;
9226 unsigned int hevc_stream_start_addr;
9227 unsigned int hevc_stream_end_addr;
9228 unsigned int hevc_stream_rd_ptr;
9229 unsigned int hevc_stream_wr_ptr;
9230 unsigned int hevc_stream_control;
9231 unsigned int hevc_stream_fifo_ctl;
9232 unsigned int hevc_stream_buf_size;
9233
9234 mutex_lock(&vh265_mutex);
9235#if 0
9236 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9237 int ii;
9238
9239 for (ii = 0; ii < 4; ii++)
9240 hevc_print(hevc, 0,
9241 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9242 if (((i + ii) & 0xf) == 0)
9243 hevc_print(hevc, 0, "\n");
9244 }
9245#endif
9246#define ES_VID_MAN_RD_PTR (1<<0)
9247 if (!hevc->init_flag) {
9248 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9249 mutex_unlock(&vh265_mutex);
9250 return ret;
9251 }
9252 amhevc_stop();
9253 msleep(20);
9254 ret = 0;
9255 /* reset */
9256 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9257 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9258
9259 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9260 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9261 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9262 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9263 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9264 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9265 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9266
9267 /* HEVC streaming buffer will reset and restart
9268 * from current hevc_stream_rd_ptr position
9269 */
9270 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9271 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9272 if ((hevc->shift_byte_count_lo & (1 << 31))
9273 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9274 hevc->shift_byte_count_hi++;
9275
9276 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9277 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9278 hevc->shift_byte_count_lo;
9279 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9280 shift_byte_count64 -= rem;
9281 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9282
9283 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9284 shift_byte_count64 += hevc_stream_buf_size;
9285
9286 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9287 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9288
9289 WRITE_VREG(DOS_SW_RESET3,
9290 /* (1<<2)| */
9291 (1 << 3) | (1 << 4) | (1 << 8) |
9292 (1 << 11) | (1 << 12) | (1 << 14)
9293 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9294 WRITE_VREG(DOS_SW_RESET3, 0);
9295
9296 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9297 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9298 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9299 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9300 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9301 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9302 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9303
9304 hevc_config_work_space_hw(hevc);
9305 decoder_hw_reset();
9306
9307 hevc->have_vps = 0;
9308 hevc->have_sps = 0;
9309 hevc->have_pps = 0;
9310
9311 hevc->have_valid_start_slice = 0;
9312
9313 if (get_double_write_mode(hevc) & 0x10)
9314 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9315 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9316 );
9317
9318 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9319 /* clear mailbox interrupt */
9320 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9321 /* enable mailbox interrupt */
9322 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9323 /* disable PSCALE for hardware sharing */
9324 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9325
9326 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9327
9328 WRITE_VREG(DEBUG_REG1, 0x0);
9329
9330 if ((error_handle_policy & 1) == 0) {
9331 if ((error_handle_policy & 4) == 0) {
9332 /* ucode auto mode, and do not check vps/sps/pps/idr */
9333 WRITE_VREG(NAL_SEARCH_CTL,
9334 0xc);
9335 } else {
9336 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9337 }
9338 } else {
9339 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9340 }
9341
9342 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9343 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9344 WRITE_VREG(NAL_SEARCH_CTL,
9345 READ_VREG(NAL_SEARCH_CTL)
9346 | ((parser_sei_enable & 0x7) << 17));
9347#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9348 WRITE_VREG(NAL_SEARCH_CTL,
9349 READ_VREG(NAL_SEARCH_CTL) |
9350 ((parser_dolby_vision_enable & 0x1) << 20));
9351#endif
9352 config_decode_mode(hevc);
9353 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9354
9355 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9356 /* amhevc_disable(); */
9357 /* return -EBUSY; */
9358 /* } */
9359#if 0
9360 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9361 int ii;
9362
9363 for (ii = 0; ii < 4; ii++) {
9364 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9365 hevc_print(hevc, 0,
9366 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9367 }
9368 if (((i + ii) & 0xf) == 0)
9369 hevc_print(hevc, 0, "\n");
9370 }
9371#endif
9372 init_pic_list_hw(hevc);
9373
9374 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9375 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9376
9377#ifdef SWAP_HEVC_UCODE
9378 if (!tee_enabled() && hevc->is_swap &&
9379 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9380 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9381 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9382 }
9383#endif
9384 amhevc_start();
9385
9386 /* skip, search next start code */
9387 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9388 hevc->skip_flag = 1;
9389#ifdef ERROR_HANDLE_DEBUG
9390 if (dbg_nal_skip_count & 0x20000) {
9391 dbg_nal_skip_count &= ~0x20000;
9392 mutex_unlock(&vh265_mutex);
9393 return ret;
9394 }
9395#endif
9396 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9397 /* Interrupt Amrisc to excute */
9398 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9399#ifdef MULTI_INSTANCE_SUPPORT
9400 if (!hevc->m_ins_flag)
9401#endif
9402 hevc->first_pic_after_recover = 1;
9403 mutex_unlock(&vh265_mutex);
9404 return ret;
9405}
9406
9407static void dump_aux_buf(struct hevc_state_s *hevc)
9408{
9409 int i;
9410 unsigned short *aux_adr =
9411 (unsigned short *)
9412 hevc->aux_addr;
9413 unsigned int aux_size =
9414 (READ_VREG(HEVC_AUX_DATA_SIZE)
9415 >> 16) << 4;
9416
9417 if (hevc->prefix_aux_size > 0) {
9418 hevc_print(hevc, 0,
9419 "prefix aux: (size %d)\n",
9420 aux_size);
9421 for (i = 0; i <
9422 (aux_size >> 1); i++) {
9423 hevc_print_cont(hevc, 0,
9424 "%04x ",
9425 *(aux_adr + i));
9426 if (((i + 1) & 0xf)
9427 == 0)
9428 hevc_print_cont(hevc,
9429 0, "\n");
9430 }
9431 }
9432 if (hevc->suffix_aux_size > 0) {
9433 aux_adr = (unsigned short *)
9434 (hevc->aux_addr +
9435 hevc->prefix_aux_size);
9436 aux_size =
9437 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9438 << 4;
9439 hevc_print(hevc, 0,
9440 "suffix aux: (size %d)\n",
9441 aux_size);
9442 for (i = 0; i <
9443 (aux_size >> 1); i++) {
9444 hevc_print_cont(hevc, 0,
9445 "%04x ", *(aux_adr + i));
9446 if (((i + 1) & 0xf) == 0)
9447 hevc_print_cont(hevc, 0, "\n");
9448 }
9449 }
9450}
9451
9452#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9453static void dolby_get_meta(struct hevc_state_s *hevc)
9454{
9455 struct vdec_s *vdec = hw_to_vdec(hevc);
9456
9457 if (get_dbg_flag(hevc) &
9458 H265_DEBUG_BUFMGR_MORE)
9459 dump_aux_buf(hevc);
9460 if (vdec->dolby_meta_with_el || vdec->slave) {
9461 set_aux_data(hevc,
9462 hevc->cur_pic, 0, 0);
9463 } else if (vdec->master) {
9464 struct hevc_state_s *hevc_ba =
9465 (struct hevc_state_s *)
9466 vdec->master->private;
9467 /*do not use hevc_ba*/
9468 set_aux_data(hevc,
9469 hevc_ba->cur_pic,
9470 0, 1);
9471 set_aux_data(hevc,
9472 hevc->cur_pic, 0, 2);
9473 }
9474}
9475#endif
9476
9477static void read_decode_info(struct hevc_state_s *hevc)
9478{
9479 uint32_t decode_info =
9480 READ_HREG(HEVC_DECODE_INFO);
9481 hevc->start_decoding_flag |=
9482 (decode_info & 0xff);
9483 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9484}
9485
9486static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9487{
9488 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9489 unsigned int dec_status = hevc->dec_status;
9490 int i, ret;
9491
9492#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9493 struct vdec_s *vdec = hw_to_vdec(hevc);
9494#endif
9495
9496 if (hevc->eos)
9497 return IRQ_HANDLED;
9498 if (
9499#ifdef MULTI_INSTANCE_SUPPORT
9500 (!hevc->m_ins_flag) &&
9501#endif
9502 hevc->error_flag == 1) {
9503 if ((error_handle_policy & 0x10) == 0) {
9504 if (hevc->cur_pic) {
9505 int current_lcu_idx =
9506 READ_VREG(HEVC_PARSER_LCU_START)
9507 & 0xffffff;
9508 if (current_lcu_idx <
9509 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9510 hevc->cur_pic->error_mark = 1;
9511
9512 }
9513 }
9514 if ((error_handle_policy & 1) == 0) {
9515 hevc->error_skip_nal_count = 1;
9516 /* manual search nal, skip error_skip_nal_count
9517 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9518 */
9519 WRITE_VREG(NAL_SEARCH_CTL,
9520 (error_skip_nal_count << 4) | 0x1);
9521 } else {
9522 hevc->error_skip_nal_count = error_skip_nal_count;
9523 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9524 }
9525 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9526#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9527 || vdec->master
9528 || vdec->slave
9529#endif
9530 ) {
9531 WRITE_VREG(NAL_SEARCH_CTL,
9532 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9533 }
9534 WRITE_VREG(NAL_SEARCH_CTL,
9535 READ_VREG(NAL_SEARCH_CTL)
9536 | ((parser_sei_enable & 0x7) << 17));
9537#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9538 WRITE_VREG(NAL_SEARCH_CTL,
9539 READ_VREG(NAL_SEARCH_CTL) |
9540 ((parser_dolby_vision_enable & 0x1) << 20));
9541#endif
9542 config_decode_mode(hevc);
9543 /* search new nal */
9544 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9545 /* Interrupt Amrisc to excute */
9546 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9547
9548 /* hevc_print(hevc, 0,
9549 *"%s: error handle\n", __func__);
9550 */
9551 hevc->error_flag = 2;
9552 return IRQ_HANDLED;
9553 } else if (
9554#ifdef MULTI_INSTANCE_SUPPORT
9555 (!hevc->m_ins_flag) &&
9556#endif
9557 hevc->error_flag == 3) {
9558 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9559 hevc_recover(hevc);
9560 hevc->error_flag = 0;
9561
9562 if ((error_handle_policy & 0x10) == 0) {
9563 if (hevc->cur_pic) {
9564 int current_lcu_idx =
9565 READ_VREG(HEVC_PARSER_LCU_START)
9566 & 0xffffff;
9567 if (current_lcu_idx <
9568 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9569 hevc->cur_pic->error_mark = 1;
9570
9571 }
9572 }
9573 if ((error_handle_policy & 1) == 0) {
9574 /* need skip some data when
9575 * error_flag of 3 is triggered,
9576 */
9577 /* to avoid hevc_recover() being called
9578 * for many times at the same bitstream position
9579 */
9580 hevc->error_skip_nal_count = 1;
9581 /* manual search nal, skip error_skip_nal_count
9582 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9583 */
9584 WRITE_VREG(NAL_SEARCH_CTL,
9585 (error_skip_nal_count << 4) | 0x1);
9586 }
9587
9588 if ((error_handle_policy & 0x2) == 0) {
9589 hevc->have_vps = 1;
9590 hevc->have_sps = 1;
9591 hevc->have_pps = 1;
9592 }
9593 return IRQ_HANDLED;
9594 }
9595 if (!hevc->m_ins_flag) {
9596 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9597 if ((hevc->shift_byte_count_lo & (1 << 31))
9598 && ((i & (1 << 31)) == 0))
9599 hevc->shift_byte_count_hi++;
9600 hevc->shift_byte_count_lo = i;
9601 }
9602#ifdef MULTI_INSTANCE_SUPPORT
9603 mutex_lock(&hevc->chunks_mutex);
9604 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9605 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9606 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9607 && (hevc->chunk)) {
9608 hevc->cur_pic->pts = hevc->chunk->pts;
9609 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9610 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9611 }
9612 mutex_unlock(&hevc->chunks_mutex);
9613
9614 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9615 dec_status == HEVC_DECODE_BUFEMPTY2) {
9616 if (hevc->m_ins_flag) {
9617 read_decode_info(hevc);
9618 if (vdec_frame_based(hw_to_vdec(hevc))) {
9619 hevc->empty_flag = 1;
9620 goto pic_done;
9621 } else {
9622 if (
9623#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9624 vdec->master ||
9625 vdec->slave ||
9626#endif
9627 (data_resend_policy & 0x1)) {
9628 hevc->dec_result = DEC_RESULT_AGAIN;
9629 amhevc_stop();
9630 restore_decode_state(hevc);
9631 } else
9632 hevc->dec_result = DEC_RESULT_GET_DATA;
9633 }
9634 reset_process_time(hevc);
9635 vdec_schedule_work(&hevc->work);
9636 }
9637 return IRQ_HANDLED;
9638 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9639 (dec_status == HEVC_NAL_DECODE_DONE)
9640 ) {
9641 if (hevc->m_ins_flag) {
9642 read_decode_info(hevc);
9643 if (vdec_frame_based(hw_to_vdec(hevc))) {
9644 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9645 hevc->empty_flag = 1;
9646 goto pic_done;
9647 } else {
9648 hevc->dec_result = DEC_RESULT_AGAIN;
9649 amhevc_stop();
9650 restore_decode_state(hevc);
9651 }
9652
9653 reset_process_time(hevc);
9654 vdec_schedule_work(&hevc->work);
9655 }
9656
9657 return IRQ_HANDLED;
9658 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9659 if (hevc->m_ins_flag) {
9660 struct PIC_s *pic;
9661 struct PIC_s *pic_display;
9662 int decoded_poc;
9663#ifdef DETREFILL_ENABLE
9664 if (hevc->is_swap &&
9665 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9666 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9667 && READ_VREG(HEVC_SAO_DBG_MODE0))
9668 hevc->delrefill_check = 2;
9669 }
9670#endif
9671 hevc->empty_flag = 0;
9672pic_done:
9673 if (input_frame_based(hw_to_vdec(hevc)) &&
9674 frmbase_cont_bitlevel != 0 &&
9675 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9676 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9677 > frmbase_cont_bitlevel)) {
9678 /*handle the case: multi pictures in one packet*/
9679 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9680 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9681 __func__,
9682 hevc->decode_idx, hevc->decode_size,
9683 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9684 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9685 start_process_time(hevc);
9686 return IRQ_HANDLED;
9687 }
9688
9689 read_decode_info(hevc);
9690 get_picture_qos_info(hevc);
9691#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9692 hevc->start_parser_type = 0;
9693 hevc->switch_dvlayer_flag = 0;
9694#endif
9695 hevc->decoded_poc = hevc->curr_POC;
9696 hevc->decoding_pic = NULL;
9697 hevc->dec_result = DEC_RESULT_DONE;
9698#ifdef DETREFILL_ENABLE
9699 if (hevc->is_swap &&
9700 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9701 if (hevc->delrefill_check != 2)
9702#endif
9703
9704 amhevc_stop();
9705
9706 reset_process_time(hevc);
9707
9708 if (hevc->vf_pre_count == 0) {
9709 decoded_poc = hevc->curr_POC;
9710 pic = get_pic_by_POC(hevc, decoded_poc);
9711 if (pic && (pic->POC != INVALID_POC)) {
9712 /*PB skip control */
9713 if (pic->error_mark == 0
9714 && hevc->PB_skip_mode == 1) {
9715 /* start decoding after
9716 * first I
9717 */
9718 hevc->ignore_bufmgr_error |= 0x1;
9719 }
9720 if (hevc->ignore_bufmgr_error & 1) {
9721 if (hevc->PB_skip_count_after_decoding > 0) {
9722 hevc->PB_skip_count_after_decoding--;
9723 } else {
9724 /* start displaying */
9725 hevc->ignore_bufmgr_error |= 0x2;
9726 }
9727 }
9728 if (hevc->mmu_enable
9729 && ((hevc->double_write_mode & 0x10) == 0)) {
9730 if (!hevc->m_ins_flag) {
9731 hevc->used_4k_num =
9732 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9733
9734 if ((!is_skip_decoding(hevc, pic)) &&
9735 (hevc->used_4k_num >= 0) &&
9736 (hevc->cur_pic->scatter_alloc
9737 == 1)) {
9738 hevc_print(hevc,
9739 H265_DEBUG_BUFMGR_MORE,
9740 "%s pic index %d scatter_alloc %d page_start %d\n",
9741 "decoder_mmu_box_free_idx_tail",
9742 hevc->cur_pic->index,
9743 hevc->cur_pic->scatter_alloc,
9744 hevc->used_4k_num);
9745 decoder_mmu_box_free_idx_tail(
9746 hevc->mmu_box,
9747 hevc->cur_pic->index,
9748 hevc->used_4k_num);
9749 hevc->cur_pic->scatter_alloc
9750 = 2;
9751 }
9752 hevc->used_4k_num = -1;
9753 }
9754 }
9755
9756 pic->output_mark = 1;
9757 pic->recon_mark = 1;
9758 }
9759 check_pic_decoded_error(hevc,
9760 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9761 if (hevc->cur_pic != NULL &&
9762 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9763 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9764 hevc->cur_pic->error_mark = 1;
9765force_output:
9766 pic_display = output_pic(hevc, 1);
9767 if (pic_display) {
9768 if ((pic_display->error_mark &&
9769 ((hevc->ignore_bufmgr_error &
9770 0x2) == 0))
9771 || (get_dbg_flag(hevc) &
9772 H265_DEBUG_DISPLAY_CUR_FRAME)
9773 || (get_dbg_flag(hevc) &
9774 H265_DEBUG_NO_DISPLAY)) {
9775 pic_display->output_ready = 0;
9776 if (get_dbg_flag(hevc) &
9777 H265_DEBUG_BUFMGR) {
9778 hevc_print(hevc, 0,
9779 "[BM] Display: POC %d, ",
9780 pic_display->POC);
9781 hevc_print_cont(hevc, 0,
9782 "decoding index %d ==> ",
9783 pic_display->
9784 decode_idx);
9785 hevc_print_cont(hevc, 0,
9786 "Debug or err,recycle it\n");
9787 }
9788 } else {
9789 if (pic_display->
9790 slice_type != 2) {
9791 pic_display->output_ready = 0;
9792 } else {
9793 prepare_display_buf
9794 (hevc,
9795 pic_display);
9796 hevc->first_pic_flag = 1;
9797 }
9798 }
9799 }
9800 }
9801
9802 vdec_schedule_work(&hevc->work);
9803 }
9804
9805 return IRQ_HANDLED;
9806#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9807 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9808 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9809 if (hevc->m_ins_flag) {
9810 unsigned char next_parser_type =
9811 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9812 read_decode_info(hevc);
9813
9814 if (vdec->slave &&
9815 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9816 /*cur is base, found enhance*/
9817 struct hevc_state_s *hevc_el =
9818 (struct hevc_state_s *)
9819 vdec->slave->private;
9820 hevc->switch_dvlayer_flag = 1;
9821 hevc->no_switch_dvlayer_count = 0;
9822 hevc_el->start_parser_type =
9823 next_parser_type;
9824 hevc_print(hevc, H265_DEBUG_DV,
9825 "switch (poc %d) to el\n",
9826 hevc->cur_pic ?
9827 hevc->cur_pic->POC :
9828 INVALID_POC);
9829 } else if (vdec->master &&
9830 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9831 /*cur is enhance, found base*/
9832 struct hevc_state_s *hevc_ba =
9833 (struct hevc_state_s *)
9834 vdec->master->private;
9835 hevc->switch_dvlayer_flag = 1;
9836 hevc->no_switch_dvlayer_count = 0;
9837 hevc_ba->start_parser_type =
9838 next_parser_type;
9839 hevc_print(hevc, H265_DEBUG_DV,
9840 "switch (poc %d) to bl\n",
9841 hevc->cur_pic ?
9842 hevc->cur_pic->POC :
9843 INVALID_POC);
9844 } else {
9845 hevc->switch_dvlayer_flag = 0;
9846 hevc->start_parser_type =
9847 next_parser_type;
9848 hevc->no_switch_dvlayer_count++;
9849 hevc_print(hevc, H265_DEBUG_DV,
9850 "%s: no_switch_dvlayer_count = %d\n",
9851 vdec->master ? "el" : "bl",
9852 hevc->no_switch_dvlayer_count);
9853 if (vdec->slave &&
9854 dolby_el_flush_th != 0 &&
9855 hevc->no_switch_dvlayer_count >
9856 dolby_el_flush_th) {
9857 struct hevc_state_s *hevc_el =
9858 (struct hevc_state_s *)
9859 vdec->slave->private;
9860 struct PIC_s *el_pic;
9861 check_pic_decoded_error(hevc_el,
9862 hevc_el->pic_decoded_lcu_idx);
9863 el_pic = get_pic_by_POC(hevc_el,
9864 hevc_el->curr_POC);
9865 hevc_el->curr_POC = INVALID_POC;
9866 hevc_el->m_pocRandomAccess = MAX_INT;
9867 flush_output(hevc_el, el_pic);
9868 hevc_el->decoded_poc = INVALID_POC; /*
9869 already call flush_output*/
9870 hevc_el->decoding_pic = NULL;
9871 hevc->no_switch_dvlayer_count = 0;
9872 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9873 hevc_print(hevc, 0,
9874 "no el anymore, flush_output el\n");
9875 }
9876 }
9877 hevc->decoded_poc = hevc->curr_POC;
9878 hevc->decoding_pic = NULL;
9879 hevc->dec_result = DEC_RESULT_DONE;
9880 amhevc_stop();
9881 reset_process_time(hevc);
9882 if (aux_data_is_avaible(hevc))
9883 dolby_get_meta(hevc);
9884 if(hevc->cur_pic->slice_type == 2 &&
9885 hevc->vf_pre_count == 0) {
9886 hevc_print(hevc, 0,
9887 "first slice_type %x no_switch_dvlayer_count %x\n",
9888 hevc->cur_pic->slice_type,
9889 hevc->no_switch_dvlayer_count);
9890 goto force_output;
9891 }
9892 vdec_schedule_work(&hevc->work);
9893 }
9894
9895 return IRQ_HANDLED;
9896#endif
9897 }
9898
9899#endif
9900
9901 if (dec_status == HEVC_SEI_DAT) {
9902 if (!hevc->m_ins_flag) {
9903 int payload_type =
9904 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9905 int payload_size =
9906 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9907 process_nal_sei(hevc,
9908 payload_type, payload_size);
9909 }
9910 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9911 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9912 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9913 int parse_type = HEVC_DISCARD_NAL;
9914
9915 hevc->error_watchdog_count = 0;
9916 hevc->error_skip_nal_wt_cnt = 0;
9917#ifdef MULTI_INSTANCE_SUPPORT
9918 if (hevc->m_ins_flag)
9919 reset_process_time(hevc);
9920#endif
9921 if (slice_parse_begin > 0 &&
9922 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9923 hevc_print(hevc, 0,
9924 "nal type %d, discard %d\n", naltype,
9925 slice_parse_begin);
9926 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9927 slice_parse_begin--;
9928 }
9929 if (naltype == NAL_UNIT_EOS) {
9930 struct PIC_s *pic;
9931
9932 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9933#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9934 if ((vdec->master || vdec->slave) &&
9935 aux_data_is_avaible(hevc)) {
9936 if (hevc->decoding_pic)
9937 dolby_get_meta(hevc);
9938 }
9939#endif
9940 check_pic_decoded_error(hevc,
9941 hevc->pic_decoded_lcu_idx);
9942 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9943 hevc->curr_POC = INVALID_POC;
9944 /* add to fix RAP_B_Bossen_1 */
9945 hevc->m_pocRandomAccess = MAX_INT;
9946 flush_output(hevc, pic);
9947 clear_poc_flag(hevc);
9948 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9949 /* Interrupt Amrisc to excute */
9950 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9951#ifdef MULTI_INSTANCE_SUPPORT
9952 if (hevc->m_ins_flag) {
9953 hevc->decoded_poc = INVALID_POC; /*
9954 already call flush_output*/
9955 hevc->decoding_pic = NULL;
9956 hevc->dec_result = DEC_RESULT_DONE;
9957 amhevc_stop();
9958
9959 vdec_schedule_work(&hevc->work);
9960 }
9961#endif
9962 return IRQ_HANDLED;
9963 }
9964
9965 if (
9966#ifdef MULTI_INSTANCE_SUPPORT
9967 (!hevc->m_ins_flag) &&
9968#endif
9969 hevc->error_skip_nal_count > 0) {
9970 hevc_print(hevc, 0,
9971 "nal type %d, discard %d\n", naltype,
9972 hevc->error_skip_nal_count);
9973 hevc->error_skip_nal_count--;
9974 if (hevc->error_skip_nal_count == 0) {
9975 hevc_recover(hevc);
9976 hevc->error_flag = 0;
9977 if ((error_handle_policy & 0x2) == 0) {
9978 hevc->have_vps = 1;
9979 hevc->have_sps = 1;
9980 hevc->have_pps = 1;
9981 }
9982 return IRQ_HANDLED;
9983 }
9984 } else if (naltype == NAL_UNIT_VPS) {
9985 parse_type = HEVC_NAL_UNIT_VPS;
9986 hevc->have_vps = 1;
9987#ifdef ERROR_HANDLE_DEBUG
9988 if (dbg_nal_skip_flag & 1)
9989 parse_type = HEVC_DISCARD_NAL;
9990#endif
9991 } else if (hevc->have_vps) {
9992 if (naltype == NAL_UNIT_SPS) {
9993 parse_type = HEVC_NAL_UNIT_SPS;
9994 hevc->have_sps = 1;
9995#ifdef ERROR_HANDLE_DEBUG
9996 if (dbg_nal_skip_flag & 2)
9997 parse_type = HEVC_DISCARD_NAL;
9998#endif
9999 } else if (naltype == NAL_UNIT_PPS) {
10000 parse_type = HEVC_NAL_UNIT_PPS;
10001 hevc->have_pps = 1;
10002#ifdef ERROR_HANDLE_DEBUG
10003 if (dbg_nal_skip_flag & 4)
10004 parse_type = HEVC_DISCARD_NAL;
10005#endif
10006 } else if (hevc->have_sps && hevc->have_pps) {
10007 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10008
10009 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10010 (naltype ==
10011 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10012 || (naltype ==
10013 NAL_UNIT_CODED_SLICE_CRA)
10014 || (naltype ==
10015 NAL_UNIT_CODED_SLICE_BLA)
10016 || (naltype ==
10017 NAL_UNIT_CODED_SLICE_BLANT)
10018 || (naltype ==
10019 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10020 ) {
10021 if (slice_parse_begin > 0) {
10022 hevc_print(hevc, 0,
10023 "discard %d, for debugging\n",
10024 slice_parse_begin);
10025 slice_parse_begin--;
10026 } else {
10027 parse_type = seg;
10028 }
10029 hevc->have_valid_start_slice = 1;
10030 } else if (naltype <=
10031 NAL_UNIT_CODED_SLICE_CRA
10032 && (hevc->have_valid_start_slice
10033 || (hevc->PB_skip_mode != 3))) {
10034 if (slice_parse_begin > 0) {
10035 hevc_print(hevc, 0,
10036 "discard %d, dd\n",
10037 slice_parse_begin);
10038 slice_parse_begin--;
10039 } else
10040 parse_type = seg;
10041
10042 }
10043 }
10044 }
10045 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10046 && hevc->have_valid_start_slice &&
10047 hevc->error_flag == 0) {
10048 if ((get_dbg_flag(hevc) &
10049 H265_DEBUG_MAN_SEARCH_NAL) == 0
10050 /* && (!hevc->m_ins_flag)*/) {
10051 /* auot parser NAL; do not check
10052 *vps/sps/pps/idr
10053 */
10054 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10055 }
10056
10057 if ((get_dbg_flag(hevc) &
10058 H265_DEBUG_NO_EOS_SEARCH_DONE)
10059#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10060 || vdec->master
10061 || vdec->slave
10062#endif
10063 ) {
10064 WRITE_VREG(NAL_SEARCH_CTL,
10065 READ_VREG(NAL_SEARCH_CTL) |
10066 0x10000);
10067 }
10068 WRITE_VREG(NAL_SEARCH_CTL,
10069 READ_VREG(NAL_SEARCH_CTL)
10070 | ((parser_sei_enable & 0x7) << 17));
10071#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10072 WRITE_VREG(NAL_SEARCH_CTL,
10073 READ_VREG(NAL_SEARCH_CTL) |
10074 ((parser_dolby_vision_enable & 0x1) << 20));
10075#endif
10076 config_decode_mode(hevc);
10077 }
10078
10079 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10080 hevc_print(hevc, 0,
10081 "naltype = %d parse_type %d\n %d %d %d %d\n",
10082 naltype, parse_type, hevc->have_vps,
10083 hevc->have_sps, hevc->have_pps,
10084 hevc->have_valid_start_slice);
10085 }
10086
10087 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10088 /* Interrupt Amrisc to excute */
10089 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10090#ifdef MULTI_INSTANCE_SUPPORT
10091 if (hevc->m_ins_flag)
10092 start_process_time(hevc);
10093#endif
10094 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10095#ifdef MULTI_INSTANCE_SUPPORT
10096 if (hevc->m_ins_flag) {
10097 reset_process_time(hevc);
10098 read_decode_info(hevc);
10099
10100 }
10101#endif
10102 if (hevc->start_decoding_time > 0) {
10103 u32 process_time = 1000*
10104 (jiffies - hevc->start_decoding_time)/HZ;
10105 if (process_time > max_decoding_time)
10106 max_decoding_time = process_time;
10107 }
10108
10109 hevc->error_watchdog_count = 0;
10110 if (hevc->pic_list_init_flag == 2) {
10111 hevc->pic_list_init_flag = 3;
10112 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10113 } else if (hevc->wait_buf == 0) {
10114 u32 vui_time_scale;
10115 u32 vui_num_units_in_tick;
10116 unsigned char reconfig_flag = 0;
10117
10118 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10119 get_rpm_param(&hevc->param);
10120 else {
10121
10122 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10123 int ii;
10124
10125 for (ii = 0; ii < 4; ii++) {
10126 hevc->param.l.data[i + ii] =
10127 hevc->rpm_ptr[i + 3
10128 - ii];
10129 }
10130 }
10131#ifdef SEND_LMEM_WITH_RPM
10132 check_head_error(hevc);
10133#endif
10134 }
10135 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10136 hevc_print(hevc, 0,
10137 "rpm_param: (%d)\n", hevc->slice_idx);
10138 hevc->slice_idx++;
10139 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10140 hevc_print_cont(hevc, 0,
10141 "%04x ", hevc->param.l.data[i]);
10142 if (((i + 1) & 0xf) == 0)
10143 hevc_print_cont(hevc, 0, "\n");
10144 }
10145
10146 hevc_print(hevc, 0,
10147 "vui_timing_info: %x, %x, %x, %x\n",
10148 hevc->param.p.vui_num_units_in_tick_hi,
10149 hevc->param.p.vui_num_units_in_tick_lo,
10150 hevc->param.p.vui_time_scale_hi,
10151 hevc->param.p.vui_time_scale_lo);
10152 }
10153
10154 if (hevc->is_used_v4l) {
10155 struct aml_vcodec_ctx *ctx =
10156 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10157
10158 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10159 struct aml_vdec_ps_infos ps;
10160
10161 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10162 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10163 ps.visible_width = hevc->frame_width;
10164 ps.visible_height = hevc->frame_height;
10165 ps.coded_width = ALIGN(hevc->frame_width, 32);
10166 ps.coded_height = ALIGN(hevc->frame_height, 32);
10167 ps.dpb_size = get_work_pic_num(hevc);
10168 hevc->v4l_params_parsed = true;
10169 /*notice the v4l2 codec.*/
10170 vdec_v4l_set_ps_infos(ctx, &ps);
10171 }
10172 }
10173
10174 if (
10175#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10176 vdec->master == NULL &&
10177 vdec->slave == NULL &&
10178#endif
10179 aux_data_is_avaible(hevc)
10180 ) {
10181
10182 if (get_dbg_flag(hevc) &
10183 H265_DEBUG_BUFMGR_MORE)
10184 dump_aux_buf(hevc);
10185 }
10186
10187 vui_time_scale =
10188 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10189 hevc->param.p.vui_time_scale_lo;
10190 vui_num_units_in_tick =
10191 (u32)(hevc->param.
10192 p.vui_num_units_in_tick_hi << 16) |
10193 hevc->param.
10194 p.vui_num_units_in_tick_lo;
10195 if (hevc->bit_depth_luma !=
10196 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10197 reconfig_flag = 1;
10198 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10199 (hevc->param.p.bit_depth & 0xf) + 8);
10200 }
10201 if (hevc->bit_depth_chroma !=
10202 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10203 reconfig_flag = 1;
10204 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10205 ((hevc->param.p.bit_depth >> 4) &
10206 0xf) + 8);
10207 }
10208 hevc->bit_depth_luma =
10209 (hevc->param.p.bit_depth & 0xf) + 8;
10210 hevc->bit_depth_chroma =
10211 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10212 bit_depth_luma = hevc->bit_depth_luma;
10213 bit_depth_chroma = hevc->bit_depth_chroma;
10214#ifdef SUPPORT_10BIT
10215 if (hevc->bit_depth_luma == 8 &&
10216 hevc->bit_depth_chroma == 8 &&
10217 enable_mem_saving)
10218 hevc->mem_saving_mode = 1;
10219 else
10220 hevc->mem_saving_mode = 0;
10221#endif
10222 if (reconfig_flag &&
10223 (get_double_write_mode(hevc) & 0x10) == 0)
10224 init_decode_head_hw(hevc);
10225
10226 if ((vui_time_scale != 0)
10227 && (vui_num_units_in_tick != 0)) {
10228 hevc->frame_dur =
10229 div_u64(96000ULL *
10230 vui_num_units_in_tick,
10231 vui_time_scale);
10232 if (hevc->get_frame_dur != true)
10233 vdec_schedule_work(
10234 &hevc->notify_work);
10235
10236 hevc->get_frame_dur = true;
10237#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10238 gvs->frame_dur = hevc->frame_dur;
10239#endif
10240 }
10241
10242 if (hevc->video_signal_type !=
10243 ((hevc->param.p.video_signal_type << 16)
10244 | hevc->param.p.color_description)) {
10245 u32 v = hevc->param.p.video_signal_type;
10246 u32 c = hevc->param.p.color_description;
10247#if 0
10248 if (v & 0x2000) {
10249 hevc_print(hevc, 0,
10250 "video_signal_type present:\n");
10251 hevc_print(hevc, 0, " %s %s\n",
10252 video_format_names[(v >> 10) & 7],
10253 ((v >> 9) & 1) ?
10254 "full_range" : "limited");
10255 if (v & 0x100) {
10256 hevc_print(hevc, 0,
10257 " color_description present:\n");
10258 hevc_print(hevc, 0,
10259 " color_primarie = %s\n",
10260 color_primaries_names
10261 [v & 0xff]);
10262 hevc_print(hevc, 0,
10263 " transfer_characteristic = %s\n",
10264 transfer_characteristics_names
10265 [(c >> 8) & 0xff]);
10266 hevc_print(hevc, 0,
10267 " matrix_coefficient = %s\n",
10268 matrix_coeffs_names[c & 0xff]);
10269 }
10270 }
10271#endif
10272 hevc->video_signal_type = (v << 16) | c;
10273 video_signal_type = hevc->video_signal_type;
10274 }
10275
10276 if (use_cma &&
10277 (hevc->param.p.slice_segment_address == 0)
10278 && (hevc->pic_list_init_flag == 0)) {
10279 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10280 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10281
10282 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10283 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10284 hevc->lcu_size = 1 << (log + 3 + log_s);
10285 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10286 if (hevc->pic_w == 0 || hevc->pic_h == 0
10287 || hevc->lcu_size == 0
10288 || is_oversize(hevc->pic_w, hevc->pic_h)
10289 || (!hevc->skip_first_nal &&
10290 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10291 /* skip search next start code */
10292 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10293 & (~0x2));
10294 if ( !hevc->skip_first_nal &&
10295 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10296 hevc->skip_first_nal = 1;
10297 hevc->skip_flag = 1;
10298 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10299 /* Interrupt Amrisc to excute */
10300 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10301#ifdef MULTI_INSTANCE_SUPPORT
10302 if (hevc->m_ins_flag)
10303 start_process_time(hevc);
10304#endif
10305 } else {
10306 hevc->sps_num_reorder_pics_0 =
10307 hevc->param.p.sps_num_reorder_pics_0;
10308 hevc->pic_list_init_flag = 1;
10309#ifdef MULTI_INSTANCE_SUPPORT
10310 if (hevc->m_ins_flag) {
10311 vdec_schedule_work(&hevc->work);
10312 } else
10313#endif
10314 up(&h265_sema);
10315 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10316 }
10317 return IRQ_HANDLED;
10318 }
10319
10320}
10321 ret =
10322 hevc_slice_segment_header_process(hevc,
10323 &hevc->param, decode_pic_begin);
10324 if (ret < 0) {
10325#ifdef MULTI_INSTANCE_SUPPORT
10326 if (hevc->m_ins_flag) {
10327 hevc->wait_buf = 0;
10328 hevc->dec_result = DEC_RESULT_AGAIN;
10329 amhevc_stop();
10330 restore_decode_state(hevc);
10331 reset_process_time(hevc);
10332 vdec_schedule_work(&hevc->work);
10333 return IRQ_HANDLED;
10334 }
10335#else
10336 ;
10337#endif
10338 } else if (ret == 0) {
10339 if ((hevc->new_pic) && (hevc->cur_pic)) {
10340 hevc->cur_pic->stream_offset =
10341 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10342 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10343 "read stream_offset = 0x%x\n",
10344 hevc->cur_pic->stream_offset);
10345 hevc->cur_pic->aspect_ratio_idc =
10346 hevc->param.p.aspect_ratio_idc;
10347 hevc->cur_pic->sar_width =
10348 hevc->param.p.sar_width;
10349 hevc->cur_pic->sar_height =
10350 hevc->param.p.sar_height;
10351 }
10352
10353 WRITE_VREG(HEVC_DEC_STATUS_REG,
10354 HEVC_CODED_SLICE_SEGMENT_DAT);
10355 /* Interrupt Amrisc to excute */
10356 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10357
10358 hevc->start_decoding_time = jiffies;
10359#ifdef MULTI_INSTANCE_SUPPORT
10360 if (hevc->m_ins_flag)
10361 start_process_time(hevc);
10362#endif
10363#if 1
10364 /*to do..., copy aux data to hevc->cur_pic*/
10365#endif
10366#ifdef MULTI_INSTANCE_SUPPORT
10367 } else if (hevc->m_ins_flag) {
10368 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10369 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10370 __func__, ret);
10371 hevc->decoded_poc = INVALID_POC;
10372 hevc->decoding_pic = NULL;
10373 hevc->dec_result = DEC_RESULT_DONE;
10374 amhevc_stop();
10375 reset_process_time(hevc);
10376 vdec_schedule_work(&hevc->work);
10377#endif
10378 } else {
10379 /* skip, search next start code */
10380#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10381 gvs->drop_frame_count++;
10382#endif
10383 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10384 hevc->skip_flag = 1;
10385 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10386 /* Interrupt Amrisc to excute */
10387 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10388 }
10389
10390 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10391 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10392#ifdef MULTI_INSTANCE_SUPPORT
10393 if (!hevc->m_ins_flag)
10394 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10395 H265_DEBUG_DIS_SYS_ERROR_PROC);
10396#endif
10397 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10398 }
10399 return IRQ_HANDLED;
10400}
10401
10402static void wait_hevc_search_done(struct hevc_state_s *hevc)
10403{
10404 int count = 0;
10405 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10406 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10407 msleep(20);
10408 count++;
10409 if (count > 100) {
10410 hevc_print(hevc, 0, "%s timeout\n", __func__);
10411 break;
10412 }
10413 }
10414}
10415static irqreturn_t vh265_isr(int irq, void *data)
10416{
10417 int i, temp;
10418 unsigned int dec_status;
10419 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10420 u32 debug_tag;
10421 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10422
10423 if (hevc->init_flag == 0)
10424 return IRQ_HANDLED;
10425 hevc->dec_status = dec_status;
10426 if (is_log_enable(hevc))
10427 add_log(hevc,
10428 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10429 dec_status, READ_HREG(HEVC_DECODE_INFO),
10430 READ_VREG(HEVC_MPRED_CURR_LCU),
10431 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10432 READ_VREG(HEVC_SHIFT_STATUS));
10433
10434 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10435 hevc_print(hevc, 0,
10436 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10437 dec_status, READ_HREG(HEVC_DECODE_INFO),
10438 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10439 READ_VREG(HEVC_SHIFT_STATUS));
10440
10441 debug_tag = READ_HREG(DEBUG_REG1);
10442 if (debug_tag & 0x10000) {
10443 hevc_print(hevc, 0,
10444 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10445
10446 if (hevc->mmu_enable)
10447 temp = 0x500;
10448 else
10449 temp = 0x400;
10450 for (i = 0; i < temp; i += 4) {
10451 int ii;
10452 if ((i & 0xf) == 0)
10453 hevc_print_cont(hevc, 0, "%03x: ", i);
10454 for (ii = 0; ii < 4; ii++) {
10455 hevc_print_cont(hevc, 0, "%04x ",
10456 hevc->lmem_ptr[i + 3 - ii]);
10457 }
10458 if (((i + ii) & 0xf) == 0)
10459 hevc_print_cont(hevc, 0, "\n");
10460 }
10461
10462 if (((udebug_pause_pos & 0xffff)
10463 == (debug_tag & 0xffff)) &&
10464 (udebug_pause_decode_idx == 0 ||
10465 udebug_pause_decode_idx == hevc->decode_idx) &&
10466 (udebug_pause_val == 0 ||
10467 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10468 udebug_pause_pos &= 0xffff;
10469 hevc->ucode_pause_pos = udebug_pause_pos;
10470 }
10471 else if (debug_tag & 0x20000)
10472 hevc->ucode_pause_pos = 0xffffffff;
10473 if (hevc->ucode_pause_pos)
10474 reset_process_time(hevc);
10475 else
10476 WRITE_HREG(DEBUG_REG1, 0);
10477 } else if (debug_tag != 0) {
10478 hevc_print(hevc, 0,
10479 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10480 READ_HREG(DEBUG_REG2),
10481 READ_VREG(HEVC_STREAM_LEVEL),
10482 READ_VREG(HEVC_STREAM_WR_PTR),
10483 READ_VREG(HEVC_STREAM_RD_PTR));
10484 if (((udebug_pause_pos & 0xffff)
10485 == (debug_tag & 0xffff)) &&
10486 (udebug_pause_decode_idx == 0 ||
10487 udebug_pause_decode_idx == hevc->decode_idx) &&
10488 (udebug_pause_val == 0 ||
10489 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10490 udebug_pause_pos &= 0xffff;
10491 hevc->ucode_pause_pos = udebug_pause_pos;
10492 }
10493 if (hevc->ucode_pause_pos)
10494 reset_process_time(hevc);
10495 else
10496 WRITE_HREG(DEBUG_REG1, 0);
10497 return IRQ_HANDLED;
10498 }
10499
10500
10501 if (hevc->pic_list_init_flag == 1)
10502 return IRQ_HANDLED;
10503
10504 if (!hevc->m_ins_flag) {
10505 if (dec_status == HEVC_OVER_DECODE) {
10506 hevc->over_decode = 1;
10507 hevc_print(hevc, 0,
10508 "isr: over decode\n"),
10509 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10510 return IRQ_HANDLED;
10511 }
10512 }
10513
10514 return IRQ_WAKE_THREAD;
10515
10516}
10517
10518static void vh265_set_clk(struct work_struct *work)
10519{
10520 struct hevc_state_s *hevc = container_of(work,
10521 struct hevc_state_s, set_clk_work);
10522
10523 int fps = 96000 / hevc->frame_dur;
10524
10525 if (hevc_source_changed(VFORMAT_HEVC,
10526 hevc->frame_width, hevc->frame_height, fps) > 0)
10527 hevc->saved_resolution = hevc->frame_width *
10528 hevc->frame_height * fps;
10529}
10530
10531static void vh265_check_timer_func(unsigned long arg)
10532{
10533 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10534 struct timer_list *timer = &hevc->timer;
10535 unsigned char empty_flag;
10536 unsigned int buf_level;
10537
10538 enum receviver_start_e state = RECEIVER_INACTIVE;
10539
10540 if (hevc->init_flag == 0) {
10541 if (hevc->stat & STAT_TIMER_ARM) {
10542 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10543 }
10544 return;
10545 }
10546#ifdef MULTI_INSTANCE_SUPPORT
10547 if (hevc->m_ins_flag &&
10548 (get_dbg_flag(hevc) &
10549 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10550 hw_to_vdec(hevc)->next_status ==
10551 VDEC_STATUS_DISCONNECTED) {
10552 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10553 vdec_schedule_work(&hevc->work);
10554 hevc_print(hevc,
10555 0, "vdec requested to be disconnected\n");
10556 return;
10557 }
10558
10559 if (hevc->m_ins_flag) {
10560 if ((input_frame_based(hw_to_vdec(hevc)) ||
10561 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10562 ((get_dbg_flag(hevc) &
10563 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10564 (decode_timeout_val > 0) &&
10565 (hevc->start_process_time > 0) &&
10566 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10567 > decode_timeout_val)
10568 ) {
10569 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10570 int current_lcu_idx =
10571 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10572 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10573 if (hevc->last_lcu_idx == current_lcu_idx) {
10574 if (hevc->decode_timeout_count > 0)
10575 hevc->decode_timeout_count--;
10576 if (hevc->decode_timeout_count == 0)
10577 timeout_process(hevc);
10578 } else
10579 restart_process_time(hevc);
10580 hevc->last_lcu_idx = current_lcu_idx;
10581 } else {
10582 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10583 timeout_process(hevc);
10584 }
10585 }
10586 } else {
10587#endif
10588 if (hevc->m_ins_flag == 0 &&
10589 vf_get_receiver(hevc->provider_name)) {
10590 state =
10591 vf_notify_receiver(hevc->provider_name,
10592 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10593 NULL);
10594 if ((state == RECEIVER_STATE_NULL)
10595 || (state == RECEIVER_STATE_NONE))
10596 state = RECEIVER_INACTIVE;
10597 } else
10598 state = RECEIVER_INACTIVE;
10599
10600 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10601 /* error watchdog */
10602 if (hevc->m_ins_flag == 0 &&
10603 (empty_flag == 0)
10604 && (hevc->pic_list_init_flag == 0
10605 || hevc->pic_list_init_flag
10606 == 3)) {
10607 /* decoder has input */
10608 if ((get_dbg_flag(hevc) &
10609 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10610
10611 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10612 /* receiver has no buffer to recycle */
10613 if ((state == RECEIVER_INACTIVE) &&
10614 (kfifo_is_empty(&hevc->display_q) &&
10615 buf_level > 0x200)
10616 ) {
10617 if (hevc->error_flag == 0) {
10618 hevc->error_watchdog_count++;
10619 if (hevc->error_watchdog_count ==
10620 error_handle_threshold) {
10621 hevc_print(hevc, 0,
10622 "H265 dec err local reset.\n");
10623 hevc->error_flag = 1;
10624 hevc->error_watchdog_count = 0;
10625 hevc->error_skip_nal_wt_cnt = 0;
10626 hevc->
10627 error_system_watchdog_count++;
10628 WRITE_VREG
10629 (HEVC_ASSIST_MBOX0_IRQ_REG,
10630 0x1);
10631 }
10632 } else if (hevc->error_flag == 2) {
10633 int th =
10634 error_handle_nal_skip_threshold;
10635 hevc->error_skip_nal_wt_cnt++;
10636 if (hevc->error_skip_nal_wt_cnt
10637 == th) {
10638 hevc->error_flag = 3;
10639 hevc->error_watchdog_count = 0;
10640 hevc->
10641 error_skip_nal_wt_cnt = 0;
10642 WRITE_VREG
10643 (HEVC_ASSIST_MBOX0_IRQ_REG,
10644 0x1);
10645 }
10646 }
10647 }
10648 }
10649
10650 if ((get_dbg_flag(hevc)
10651 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10652 /* receiver has no buffer to recycle */
10653 if ((state == RECEIVER_INACTIVE) &&
10654 (kfifo_is_empty(&hevc->display_q))
10655 ) { /* no buffer to recycle */
10656 if ((get_dbg_flag(hevc) &
10657 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10658 0)
10659 hevc->error_system_watchdog_count++;
10660 if (hevc->error_system_watchdog_count ==
10661 error_handle_system_threshold) {
10662 /* and it lasts for a while */
10663 hevc_print(hevc, 0,
10664 "H265 dec fatal error watchdog.\n");
10665 hevc->
10666 error_system_watchdog_count = 0;
10667 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10668 }
10669 }
10670 } else {
10671 hevc->error_watchdog_count = 0;
10672 hevc->error_system_watchdog_count = 0;
10673 }
10674#ifdef MULTI_INSTANCE_SUPPORT
10675 }
10676#endif
10677 if ((hevc->ucode_pause_pos != 0) &&
10678 (hevc->ucode_pause_pos != 0xffffffff) &&
10679 udebug_pause_pos != hevc->ucode_pause_pos) {
10680 hevc->ucode_pause_pos = 0;
10681 WRITE_HREG(DEBUG_REG1, 0);
10682 }
10683
10684 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10685 dump_pic_list(hevc);
10686 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10687 }
10688 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10689 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10690 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10691 }
10692#ifdef TEST_NO_BUF
10693 if (hevc->wait_buf)
10694 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10695#endif
10696 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10697 hevc->error_skip_nal_count = error_skip_nal_count;
10698 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10699
10700 debug &= ~H265_DEBUG_HW_RESET;
10701 }
10702
10703#ifdef ERROR_HANDLE_DEBUG
10704 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10705 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10706 dbg_nal_skip_count &= ~0x10000;
10707 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10708 }
10709#endif
10710
10711 if (radr != 0) {
10712 if (rval != 0) {
10713 WRITE_VREG(radr, rval);
10714 hevc_print(hevc, 0,
10715 "WRITE_VREG(%x,%x)\n", radr, rval);
10716 } else
10717 hevc_print(hevc, 0,
10718 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10719 rval = 0;
10720 radr = 0;
10721 }
10722 if (dbg_cmd != 0) {
10723 if (dbg_cmd == 1) {
10724 u32 disp_laddr;
10725
10726 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10727 get_double_write_mode(hevc) == 0) {
10728 disp_laddr =
10729 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10730 } else {
10731 struct canvas_s cur_canvas;
10732
10733 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10734 & 0xff), &cur_canvas);
10735 disp_laddr = cur_canvas.addr;
10736 }
10737 hevc_print(hevc, 0,
10738 "current displayed buffer address %x\r\n",
10739 disp_laddr);
10740 }
10741 dbg_cmd = 0;
10742 }
10743 /*don't changed at start.*/
10744 if (hevc->m_ins_flag == 0 &&
10745 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10746 hevc->frame_dur > 0 && hevc->saved_resolution !=
10747 hevc->frame_width * hevc->frame_height *
10748 (96000 / hevc->frame_dur))
10749 vdec_schedule_work(&hevc->set_clk_work);
10750
10751 mod_timer(timer, jiffies + PUT_INTERVAL);
10752}
10753
10754static int h265_task_handle(void *data)
10755{
10756 int ret = 0;
10757 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10758
10759 set_user_nice(current, -10);
10760 while (1) {
10761 if (use_cma == 0) {
10762 hevc_print(hevc, 0,
10763 "ERROR: use_cma can not be changed dynamically\n");
10764 }
10765 ret = down_interruptible(&h265_sema);
10766 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10767 init_pic_list(hevc);
10768 init_pic_list_hw(hevc);
10769 init_buf_spec(hevc);
10770 hevc->pic_list_init_flag = 2;
10771 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10772
10773 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10774
10775 }
10776
10777 if (hevc->uninit_list) {
10778 /*USE_BUF_BLOCK*/
10779 uninit_pic_list(hevc);
10780 hevc_print(hevc, 0, "uninit list\n");
10781 hevc->uninit_list = 0;
10782#ifdef USE_UNINIT_SEMA
10783 if (use_cma) {
10784 up(&hevc->h265_uninit_done_sema);
10785 while (!kthread_should_stop())
10786 msleep(1);
10787 break;
10788 }
10789#endif
10790 }
10791 }
10792
10793 return 0;
10794}
10795
10796void vh265_free_cmabuf(void)
10797{
10798 struct hevc_state_s *hevc = gHevc;
10799
10800 mutex_lock(&vh265_mutex);
10801
10802 if (hevc->init_flag) {
10803 mutex_unlock(&vh265_mutex);
10804 return;
10805 }
10806
10807 mutex_unlock(&vh265_mutex);
10808}
10809
10810#ifdef MULTI_INSTANCE_SUPPORT
10811int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10812#else
10813int vh265_dec_status(struct vdec_info *vstatus)
10814#endif
10815{
10816#ifdef MULTI_INSTANCE_SUPPORT
10817 struct hevc_state_s *hevc =
10818 (struct hevc_state_s *)vdec->private;
10819#else
10820 struct hevc_state_s *hevc = gHevc;
10821#endif
10822 if (!hevc)
10823 return -1;
10824
10825 vstatus->frame_width = hevc->frame_width;
10826 vstatus->frame_height = hevc->frame_height;
10827 if (hevc->frame_dur != 0)
10828 vstatus->frame_rate = 96000 / hevc->frame_dur;
10829 else
10830 vstatus->frame_rate = -1;
10831 vstatus->error_count = 0;
10832 vstatus->status = hevc->stat | hevc->fatal_error;
10833#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10834 vstatus->bit_rate = gvs->bit_rate;
10835 vstatus->frame_dur = hevc->frame_dur;
10836 if (gvs) {
10837 vstatus->bit_rate = gvs->bit_rate;
10838 vstatus->frame_data = gvs->frame_data;
10839 vstatus->total_data = gvs->total_data;
10840 vstatus->frame_count = gvs->frame_count;
10841 vstatus->error_frame_count = gvs->error_frame_count;
10842 vstatus->drop_frame_count = gvs->drop_frame_count;
10843 vstatus->total_data = gvs->total_data;
10844 vstatus->samp_cnt = gvs->samp_cnt;
10845 vstatus->offset = gvs->offset;
10846 }
10847 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10848 "%s", DRIVER_NAME);
10849#endif
10850 vstatus->ratio_control = hevc->ratio_control;
10851 return 0;
10852}
10853
10854int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10855{
10856 is_reset = isreset;
10857 return 0;
10858}
10859
10860static int vh265_vdec_info_init(void)
10861{
10862 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10863 if (NULL == gvs) {
10864 pr_info("the struct of vdec status malloc failed.\n");
10865 return -ENOMEM;
10866 }
10867 return 0;
10868}
10869
10870#if 0
10871static void H265_DECODE_INIT(void)
10872{
10873 /* enable hevc clocks */
10874 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10875 /* *************************************************************** */
10876 /* Power ON HEVC */
10877 /* *************************************************************** */
10878 /* Powerup HEVC */
10879 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10880 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10881 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10882 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10883 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10884 /* remove isolations */
10885 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10886 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10887
10888}
10889#endif
10890
10891static void config_decode_mode(struct hevc_state_s *hevc)
10892{
10893#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10894 struct vdec_s *vdec = hw_to_vdec(hevc);
10895#endif
10896 unsigned decode_mode;
10897 if (!hevc->m_ins_flag)
10898 decode_mode = DECODE_MODE_SINGLE;
10899 else if (vdec_frame_based(hw_to_vdec(hevc)))
10900 decode_mode =
10901 DECODE_MODE_MULTI_FRAMEBASE;
10902#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10903 else if (vdec->slave) {
10904 if (force_bypass_dvenl & 0x80000000)
10905 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10906 else
10907 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10908 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10909 hevc->bypass_dvenl = 0;
10910 hevc_print(hevc, 0,
10911 "NOT support bypass_dvenl when meta_with_el\n");
10912 }
10913 if (hevc->bypass_dvenl)
10914 decode_mode =
10915 (hevc->start_parser_type << 8)
10916 | DECODE_MODE_MULTI_STREAMBASE;
10917 else
10918 decode_mode =
10919 (hevc->start_parser_type << 8)
10920 | DECODE_MODE_MULTI_DVBAL;
10921 } else if (vdec->master)
10922 decode_mode =
10923 (hevc->start_parser_type << 8)
10924 | DECODE_MODE_MULTI_DVENL;
10925#endif
10926 else
10927 decode_mode =
10928 DECODE_MODE_MULTI_STREAMBASE;
10929
10930 if (hevc->m_ins_flag)
10931 decode_mode |=
10932 (hevc->start_decoding_flag << 16);
10933 /* set MBX0 interrupt flag */
10934 decode_mode |= (0x80 << 24);
10935 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10936 WRITE_VREG(HEVC_DECODE_MODE2,
10937 hevc->rps_set_id);
10938}
10939
10940static void vh265_prot_init(struct hevc_state_s *hevc)
10941{
10942#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10943 struct vdec_s *vdec = hw_to_vdec(hevc);
10944#endif
10945 /* H265_DECODE_INIT(); */
10946
10947 hevc_config_work_space_hw(hevc);
10948
10949 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10950
10951 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10952
10953 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10954
10955 /* clear mailbox interrupt */
10956 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10957
10958 /* enable mailbox interrupt */
10959 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10960
10961 /* disable PSCALE for hardware sharing */
10962 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10963
10964 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10965
10966 if ((get_dbg_flag(hevc) &
10967 (H265_DEBUG_MAN_SKIP_NAL |
10968 H265_DEBUG_MAN_SEARCH_NAL))
10969 /*||hevc->m_ins_flag*/
10970 ) {
10971 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10972 } else {
10973 /* check vps/sps/pps/i-slice in ucode */
10974 unsigned ctl_val = 0x8;
10975 if (hevc->PB_skip_mode == 0)
10976 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10977 else if (hevc->PB_skip_mode == 3)
10978 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10979 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10980 }
10981 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10982#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10983 || vdec->master
10984 || vdec->slave
10985#endif
10986 )
10987 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10988
10989 WRITE_VREG(NAL_SEARCH_CTL,
10990 READ_VREG(NAL_SEARCH_CTL)
10991 | ((parser_sei_enable & 0x7) << 17));
10992#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10993 WRITE_VREG(NAL_SEARCH_CTL,
10994 READ_VREG(NAL_SEARCH_CTL) |
10995 ((parser_dolby_vision_enable & 0x1) << 20));
10996#endif
10997 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10998
10999 config_decode_mode(hevc);
11000 config_aux_buf(hevc);
11001#ifdef SWAP_HEVC_UCODE
11002 if (!tee_enabled() && hevc->is_swap &&
11003 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11004 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11005 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11006 }
11007#endif
11008#ifdef DETREFILL_ENABLE
11009 if (hevc->is_swap &&
11010 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11011 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11012 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11013 }
11014#endif
11015}
11016
11017static int vh265_local_init(struct hevc_state_s *hevc)
11018{
11019 int i;
11020 int ret = -1;
11021
11022#ifdef DEBUG_PTS
11023 hevc->pts_missed = 0;
11024 hevc->pts_hit = 0;
11025#endif
11026
11027 hevc->saved_resolution = 0;
11028 hevc->get_frame_dur = false;
11029 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11030 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11031 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11032 pr_info("over size : %u x %u.\n",
11033 hevc->frame_width, hevc->frame_height);
11034 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11035 return ret;
11036 }
11037
11038 if (hevc->max_pic_w && hevc->max_pic_h) {
11039 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11040 ((hevc->max_pic_w * hevc->max_pic_h) >
11041 1920 * 1088) ? true : false;
11042 } else {
11043 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11044 ((hevc->frame_width * hevc->frame_height) >
11045 1920 * 1088) ? true : false;
11046 }
11047
11048 hevc->frame_dur =
11049 (hevc->vh265_amstream_dec_info.rate ==
11050 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11051#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
11052 gvs->frame_dur = hevc->frame_dur;
11053#endif
11054 if (hevc->frame_width && hevc->frame_height)
11055 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11056
11057 if (i_only_flag)
11058 hevc->i_only = i_only_flag & 0xff;
11059 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11060 & 0x08)
11061 hevc->i_only = 0x7;
11062 else
11063 hevc->i_only = 0x0;
11064 hevc->error_watchdog_count = 0;
11065 hevc->sei_present_flag = 0;
11066 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11067 & 0x40) >> 6;
11068 hevc_print(hevc, 0,
11069 "h265:pts_unstable=%d\n", pts_unstable);
11070/*
11071 *TODO:FOR VERSION
11072 */
11073 hevc_print(hevc, 0,
11074 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11075 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11076
11077 if (hevc->frame_dur == 0)
11078 hevc->frame_dur = 96000 / 24;
11079
11080 INIT_KFIFO(hevc->display_q);
11081 INIT_KFIFO(hevc->newframe_q);
11082 INIT_KFIFO(hevc->pending_q);
11083
11084 for (i = 0; i < VF_POOL_SIZE; i++) {
11085 const struct vframe_s *vf = &hevc->vfpool[i];
11086
11087 hevc->vfpool[i].index = -1;
11088 kfifo_put(&hevc->newframe_q, vf);
11089 }
11090
11091
11092 ret = hevc_local_init(hevc);
11093
11094 return ret;
11095}
11096#ifdef MULTI_INSTANCE_SUPPORT
11097static s32 vh265_init(struct vdec_s *vdec)
11098{
11099 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11100#else
11101static s32 vh265_init(struct hevc_state_s *hevc)
11102{
11103
11104#endif
11105 int ret, size = -1;
11106 int fw_size = 0x1000 * 16;
11107 struct firmware_s *fw = NULL;
11108
11109 init_timer(&hevc->timer);
11110
11111 hevc->stat |= STAT_TIMER_INIT;
11112
11113 if (hevc->m_ins_flag) {
11114#ifdef USE_UNINIT_SEMA
11115 sema_init(&hevc->h265_uninit_done_sema, 0);
11116#endif
11117 INIT_WORK(&hevc->work, vh265_work);
11118 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11119 }
11120
11121 if (vh265_local_init(hevc) < 0)
11122 return -EBUSY;
11123
11124 mutex_init(&hevc->chunks_mutex);
11125 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11126 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11127
11128 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11129 if (IS_ERR_OR_NULL(fw))
11130 return -ENOMEM;
11131
11132 if (hevc->mmu_enable)
11133 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11134 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11135 else {
11136 if (!hevc->is_4k) {
11137 /* if an older version of the fw was loaded, */
11138 /* needs try to load noswap fw because the */
11139 /* old fw package dose not contain the swap fw.*/
11140 size = get_firmware_data(
11141 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11142 if (size < 0)
11143 size = get_firmware_data(
11144 VIDEO_DEC_HEVC_MMU, fw->data);
11145 else if (size)
11146 hevc->is_swap = true;
11147 } else
11148 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11149 fw->data);
11150 }
11151 else
11152 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11153
11154 if (size < 0) {
11155 pr_err("get firmware fail.\n");
11156 vfree(fw);
11157 return -1;
11158 }
11159
11160 fw->len = size;
11161
11162#ifdef SWAP_HEVC_UCODE
11163 if (!tee_enabled() && hevc->is_swap &&
11164 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11165 if (hevc->mmu_enable) {
11166 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11167 hevc->mc_cpu_addr =
11168 dma_alloc_coherent(amports_get_dma_device(),
11169 hevc->swap_size,
11170 &hevc->mc_dma_handle, GFP_KERNEL);
11171 if (!hevc->mc_cpu_addr) {
11172 amhevc_disable();
11173 pr_info("vh265 mmu swap ucode loaded fail.\n");
11174 return -ENOMEM;
11175 }
11176
11177 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11178 hevc->swap_size);
11179
11180 hevc_print(hevc, 0,
11181 "vh265 mmu ucode swap loaded %x\n",
11182 hevc->mc_dma_handle);
11183 }
11184 }
11185#endif
11186
11187#ifdef MULTI_INSTANCE_SUPPORT
11188 if (hevc->m_ins_flag) {
11189 hevc->timer.data = (ulong) hevc;
11190 hevc->timer.function = vh265_check_timer_func;
11191 hevc->timer.expires = jiffies + PUT_INTERVAL;
11192
11193 hevc->fw = fw;
11194
11195 return 0;
11196 }
11197#endif
11198 amhevc_enable();
11199
11200 if (hevc->mmu_enable)
11201 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11202 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11203 else {
11204 if (!hevc->is_4k) {
11205 /* if an older version of the fw was loaded, */
11206 /* needs try to load noswap fw because the */
11207 /* old fw package dose not contain the swap fw. */
11208 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11209 "hevc_mmu_swap", fw->data);
11210 if (ret < 0)
11211 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11212 "h265_mmu", fw->data);
11213 else
11214 hevc->is_swap = true;
11215 } else
11216 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11217 "h265_mmu", fw->data);
11218 }
11219 else
11220 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11221
11222 if (ret < 0) {
11223 amhevc_disable();
11224 vfree(fw);
11225 pr_err("H265: the %s fw loading failed, err: %x\n",
11226 tee_enabled() ? "TEE" : "local", ret);
11227 return -EBUSY;
11228 }
11229
11230 vfree(fw);
11231
11232 hevc->stat |= STAT_MC_LOAD;
11233
11234#ifdef DETREFILL_ENABLE
11235 if (hevc->is_swap &&
11236 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11237 init_detrefill_buf(hevc);
11238#endif
11239 /* enable AMRISC side protocol */
11240 vh265_prot_init(hevc);
11241
11242 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11243 vh265_isr_thread_fn,
11244 IRQF_ONESHOT,/*run thread on this irq disabled*/
11245 "vh265-irq", (void *)hevc)) {
11246 hevc_print(hevc, 0, "vh265 irq register error.\n");
11247 amhevc_disable();
11248 return -ENOENT;
11249 }
11250
11251 hevc->stat |= STAT_ISR_REG;
11252 hevc->provider_name = PROVIDER_NAME;
11253
11254#ifdef MULTI_INSTANCE_SUPPORT
11255 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11256 &vh265_vf_provider, vdec);
11257 vf_reg_provider(&vh265_vf_prov);
11258 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11259 NULL);
11260 if (hevc->frame_dur != 0) {
11261 if (!is_reset) {
11262 vf_notify_receiver(hevc->provider_name,
11263 VFRAME_EVENT_PROVIDER_FR_HINT,
11264 (void *)
11265 ((unsigned long)hevc->frame_dur));
11266 fr_hint_status = VDEC_HINTED;
11267 }
11268 } else
11269 fr_hint_status = VDEC_NEED_HINT;
11270#else
11271 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11272 hevc);
11273 vf_reg_provider(&vh265_vf_prov);
11274 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11275 if (hevc->frame_dur != 0) {
11276 vf_notify_receiver(PROVIDER_NAME,
11277 VFRAME_EVENT_PROVIDER_FR_HINT,
11278 (void *)
11279 ((unsigned long)hevc->frame_dur));
11280 fr_hint_status = VDEC_HINTED;
11281 } else
11282 fr_hint_status = VDEC_NEED_HINT;
11283#endif
11284 hevc->stat |= STAT_VF_HOOK;
11285
11286 hevc->timer.data = (ulong) hevc;
11287 hevc->timer.function = vh265_check_timer_func;
11288 hevc->timer.expires = jiffies + PUT_INTERVAL;
11289
11290 add_timer(&hevc->timer);
11291
11292 hevc->stat |= STAT_TIMER_ARM;
11293
11294 if (use_cma) {
11295#ifdef USE_UNINIT_SEMA
11296 sema_init(&hevc->h265_uninit_done_sema, 0);
11297#endif
11298 if (h265_task == NULL) {
11299 sema_init(&h265_sema, 1);
11300 h265_task =
11301 kthread_run(h265_task_handle, hevc,
11302 "kthread_h265");
11303 }
11304 }
11305 /* hevc->stat |= STAT_KTHREAD; */
11306#if 0
11307 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11308 hevc_print(hevc, 0, "%s force clk\n", __func__);
11309 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11310 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11311 ((1 << 2) | (1 << 1)));
11312 WRITE_VREG(HEVC_DBLK_CFG0,
11313 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11314 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11315 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11316 (1 << 2)); /* 2 */
11317 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11318 (1 << 24)); /* 24 */
11319 WRITE_VREG(HEVC_STREAM_CONTROL,
11320 READ_VREG(HEVC_STREAM_CONTROL) |
11321 (1 << 15)); /* 15 */
11322 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11323 (1 << 13)); /* 13 */
11324 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11325 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11326 (1 << 15)); /* 15 */
11327 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11328 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11329 (1 << 15)); /* 15 */
11330 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11331 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11332 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11333 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11334 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11335 (1 << 3)); /* 3 */
11336 }
11337#endif
11338#ifdef SWAP_HEVC_UCODE
11339 if (!tee_enabled() && hevc->is_swap &&
11340 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11341 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11342 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11343 }
11344#endif
11345
11346#ifndef MULTI_INSTANCE_SUPPORT
11347 set_vdec_func(&vh265_dec_status);
11348#endif
11349 amhevc_start();
11350 hevc->stat |= STAT_VDEC_RUN;
11351 hevc->init_flag = 1;
11352 error_handle_threshold = 30;
11353 /* pr_info("%d, vh265_init, RP=0x%x\n",
11354 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11355 */
11356
11357 return 0;
11358}
11359
11360static int vh265_stop(struct hevc_state_s *hevc)
11361{
11362 if (get_dbg_flag(hevc) &
11363 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11364 int wait_timeout_count = 0;
11365
11366 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11367 HEVC_CODED_SLICE_SEGMENT_DAT &&
11368 wait_timeout_count < 10){
11369 wait_timeout_count++;
11370 msleep(20);
11371 }
11372 }
11373 if (hevc->stat & STAT_VDEC_RUN) {
11374 amhevc_stop();
11375 hevc->stat &= ~STAT_VDEC_RUN;
11376 }
11377
11378 if (hevc->stat & STAT_ISR_REG) {
11379#ifdef MULTI_INSTANCE_SUPPORT
11380 if (!hevc->m_ins_flag)
11381#endif
11382 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11383 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11384 hevc->stat &= ~STAT_ISR_REG;
11385 }
11386
11387 hevc->stat &= ~STAT_TIMER_INIT;
11388 if (hevc->stat & STAT_TIMER_ARM) {
11389 del_timer_sync(&hevc->timer);
11390 hevc->stat &= ~STAT_TIMER_ARM;
11391 }
11392
11393 if (hevc->stat & STAT_VF_HOOK) {
11394 if (fr_hint_status == VDEC_HINTED) {
11395 vf_notify_receiver(hevc->provider_name,
11396 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11397 NULL);
11398 }
11399 fr_hint_status = VDEC_NO_NEED_HINT;
11400 vf_unreg_provider(&vh265_vf_prov);
11401 hevc->stat &= ~STAT_VF_HOOK;
11402 }
11403
11404 hevc_local_uninit(hevc);
11405
11406 if (use_cma) {
11407 hevc->uninit_list = 1;
11408 up(&h265_sema);
11409#ifdef USE_UNINIT_SEMA
11410 down(&hevc->h265_uninit_done_sema);
11411 if (!IS_ERR(h265_task)) {
11412 kthread_stop(h265_task);
11413 h265_task = NULL;
11414 }
11415#else
11416 while (hevc->uninit_list) /* wait uninit complete */
11417 msleep(20);
11418#endif
11419
11420 }
11421 hevc->init_flag = 0;
11422 hevc->first_sc_checked = 0;
11423 cancel_work_sync(&hevc->notify_work);
11424 cancel_work_sync(&hevc->set_clk_work);
11425 uninit_mmu_buffers(hevc);
11426 amhevc_disable();
11427
11428 kfree(gvs);
11429 gvs = NULL;
11430
11431 return 0;
11432}
11433
11434#ifdef MULTI_INSTANCE_SUPPORT
11435static void reset_process_time(struct hevc_state_s *hevc)
11436{
11437 if (hevc->start_process_time) {
11438 unsigned int process_time =
11439 1000 * (jiffies - hevc->start_process_time) / HZ;
11440 hevc->start_process_time = 0;
11441 if (process_time > max_process_time[hevc->index])
11442 max_process_time[hevc->index] = process_time;
11443 }
11444}
11445
11446static void start_process_time(struct hevc_state_s *hevc)
11447{
11448 hevc->start_process_time = jiffies;
11449 hevc->decode_timeout_count = 2;
11450 hevc->last_lcu_idx = 0;
11451}
11452
11453static void restart_process_time(struct hevc_state_s *hevc)
11454{
11455 hevc->start_process_time = jiffies;
11456 hevc->decode_timeout_count = 2;
11457}
11458
11459static void timeout_process(struct hevc_state_s *hevc)
11460{
11461 /*
11462 * In this very timeout point,the vh265_work arrives,
11463 * let it to handle the scenario.
11464 */
11465 if (work_pending(&hevc->work))
11466 return;
11467
11468 hevc->timeout_num++;
11469 amhevc_stop();
11470 read_decode_info(hevc);
11471
11472 hevc_print(hevc,
11473 0, "%s decoder timeout\n", __func__);
11474 check_pic_decoded_error(hevc,
11475 hevc->pic_decoded_lcu_idx);
11476 hevc->decoded_poc = hevc->curr_POC;
11477 hevc->decoding_pic = NULL;
11478 hevc->dec_result = DEC_RESULT_DONE;
11479 reset_process_time(hevc);
11480
11481 if (work_pending(&hevc->work))
11482 return;
11483 vdec_schedule_work(&hevc->timeout_work);
11484}
11485
11486#ifdef CONSTRAIN_MAX_BUF_NUM
11487static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11488{
11489 struct PIC_s *pic;
11490 int i;
11491 int count = 0;
11492 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11493 pic = hevc->m_PIC[i];
11494 if (pic == NULL || pic->index == -1)
11495 continue;
11496 if (pic->output_mark == 0 && pic->referenced == 0
11497 && pic->output_ready == 1)
11498 count++;
11499 }
11500
11501 return count;
11502}
11503
11504static int get_used_buf_count(struct hevc_state_s *hevc)
11505{
11506 struct PIC_s *pic;
11507 int i;
11508 int count = 0;
11509 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11510 pic = hevc->m_PIC[i];
11511 if (pic == NULL || pic->index == -1)
11512 continue;
11513 if (pic->output_mark != 0 || pic->referenced != 0
11514 || pic->output_ready != 0)
11515 count++;
11516 }
11517
11518 return count;
11519}
11520#endif
11521
11522
11523static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11524{
11525 struct PIC_s *new_pic = NULL;
11526 struct PIC_s *pic;
11527 /* recycle un-used pic */
11528 int i;
11529 int ref_pic = 0;
11530 struct vdec_s *vdec = hw_to_vdec(hevc);
11531 /*return 1 if pic_list is not initialized yet*/
11532 if (hevc->pic_list_init_flag != 3)
11533 return 1;
11534
11535 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11536 pic = hevc->m_PIC[i];
11537 if (pic == NULL || pic->index == -1)
11538 continue;
11539 if (pic->referenced == 1)
11540 ref_pic++;
11541 if (pic->output_mark == 0 && pic->referenced == 0
11542 && pic->output_ready == 0
11543 ) {
11544 if (new_pic) {
11545 if (pic->POC < new_pic->POC)
11546 new_pic = pic;
11547 } else
11548 new_pic = pic;
11549 }
11550 }
11551/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11552/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11553/*decode. all reference frames are removed and setting error flag.*/
11554/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11555/*when file is interlace.*/
11556 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11557 (ref_pic >=
11558 get_work_pic_num(hevc) -
11559 hevc->sps_num_reorder_pics_0 - 3)) {
11560 enum receviver_start_e state = RECEIVER_INACTIVE;
11561 if (vf_get_receiver(vdec->vf_provider_name)) {
11562 state =
11563 vf_notify_receiver(vdec->vf_provider_name,
11564 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11565 NULL);
11566 if ((state == RECEIVER_STATE_NULL)
11567 || (state == RECEIVER_STATE_NONE))
11568 state = RECEIVER_INACTIVE;
11569 }
11570 if (state == RECEIVER_INACTIVE) {
11571 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11572 pic = hevc->m_PIC[i];
11573 if (pic == NULL || pic->index == -1)
11574 continue;
11575
11576 if ((pic->referenced == 1) &&
11577 (pic->error_mark == 1)) {
11578 pic->referenced = 0;
11579 put_mv_buf(hevc, pic);
11580 }
11581 pic->error_mark = 1;
11582 }
11583 }
11584 }
11585
11586 return (new_pic != NULL) ? 1 : 0;
11587}
11588
11589static int vmh265_stop(struct hevc_state_s *hevc)
11590{
11591 if (hevc->stat & STAT_TIMER_ARM) {
11592 del_timer_sync(&hevc->timer);
11593 hevc->stat &= ~STAT_TIMER_ARM;
11594 }
11595 if (hevc->stat & STAT_VDEC_RUN) {
11596 amhevc_stop();
11597 hevc->stat &= ~STAT_VDEC_RUN;
11598 }
11599 if (hevc->stat & STAT_ISR_REG) {
11600 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11601 hevc->stat &= ~STAT_ISR_REG;
11602 }
11603
11604 if (hevc->stat & STAT_VF_HOOK) {
11605 if (fr_hint_status == VDEC_HINTED)
11606 vf_notify_receiver(hevc->provider_name,
11607 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11608 NULL);
11609 fr_hint_status = VDEC_NO_NEED_HINT;
11610 vf_unreg_provider(&vh265_vf_prov);
11611 hevc->stat &= ~STAT_VF_HOOK;
11612 }
11613
11614 hevc_local_uninit(hevc);
11615
11616 hevc->init_flag = 0;
11617 hevc->first_sc_checked = 0;
11618 cancel_work_sync(&hevc->notify_work);
11619 cancel_work_sync(&hevc->set_clk_work);
11620 cancel_work_sync(&hevc->timeout_work);
11621
11622 uninit_mmu_buffers(hevc);
11623
11624 if (use_cma) {
11625 hevc->uninit_list = 1;
11626 reset_process_time(hevc);
11627 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11628 vdec_schedule_work(&hevc->work);
11629 flush_work(&hevc->work);
11630#ifdef USE_UNINIT_SEMA
11631 if (hevc->init_flag) {
11632 down(&hevc->h265_uninit_done_sema);
11633 }
11634#else
11635 while (hevc->uninit_list) /* wait uninit complete */
11636 msleep(20);
11637#endif
11638 }
11639 cancel_work_sync(&hevc->work);
11640
11641 vfree(hevc->fw);
11642 hevc->fw = NULL;
11643
11644 dump_log(hevc);
11645 return 0;
11646}
11647
11648static unsigned char get_data_check_sum
11649 (struct hevc_state_s *hevc, int size)
11650{
11651 int jj;
11652 int sum = 0;
11653 u8 *data = NULL;
11654
11655 if (!hevc->chunk->block->is_mapped)
11656 data = codec_mm_vmap(hevc->chunk->block->start +
11657 hevc->chunk->offset, size);
11658 else
11659 data = ((u8 *)hevc->chunk->block->start_virt) +
11660 hevc->chunk->offset;
11661
11662 for (jj = 0; jj < size; jj++)
11663 sum += data[jj];
11664
11665 if (!hevc->chunk->block->is_mapped)
11666 codec_mm_unmap_phyaddr(data);
11667 return sum;
11668}
11669
11670static void vh265_notify_work(struct work_struct *work)
11671{
11672 struct hevc_state_s *hevc =
11673 container_of(work,
11674 struct hevc_state_s,
11675 notify_work);
11676 struct vdec_s *vdec = hw_to_vdec(hevc);
11677#ifdef MULTI_INSTANCE_SUPPORT
11678 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11679 vf_notify_receiver(hevc->provider_name,
11680 VFRAME_EVENT_PROVIDER_FR_HINT,
11681 (void *)
11682 ((unsigned long)hevc->frame_dur));
11683 vdec->fr_hint_state = VDEC_HINTED;
11684 } else if (fr_hint_status == VDEC_NEED_HINT) {
11685 vf_notify_receiver(hevc->provider_name,
11686 VFRAME_EVENT_PROVIDER_FR_HINT,
11687 (void *)
11688 ((unsigned long)hevc->frame_dur));
11689 fr_hint_status = VDEC_HINTED;
11690 }
11691#else
11692 if (fr_hint_status == VDEC_NEED_HINT)
11693 vf_notify_receiver(PROVIDER_NAME,
11694 VFRAME_EVENT_PROVIDER_FR_HINT,
11695 (void *)
11696 ((unsigned long)hevc->frame_dur));
11697 fr_hint_status = VDEC_HINTED;
11698 }
11699#endif
11700
11701 return;
11702}
11703
11704static void vh265_work_implement(struct hevc_state_s *hevc,
11705 struct vdec_s *vdec,int from)
11706{
11707 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11708 /*USE_BUF_BLOCK*/
11709 uninit_pic_list(hevc);
11710 hevc_print(hevc, 0, "uninit list\n");
11711 hevc->uninit_list = 0;
11712#ifdef USE_UNINIT_SEMA
11713 up(&hevc->h265_uninit_done_sema);
11714#endif
11715 return;
11716 }
11717
11718 /* finished decoding one frame or error,
11719 * notify vdec core to switch context
11720 */
11721 if (hevc->pic_list_init_flag == 1
11722 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11723 hevc->pic_list_init_flag = 2;
11724 init_pic_list(hevc);
11725 init_pic_list_hw(hevc);
11726 init_buf_spec(hevc);
11727 hevc_print(hevc, 0,
11728 "set pic_list_init_flag to 2\n");
11729
11730 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11731 return;
11732 }
11733
11734 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11735 "%s dec_result %d %x %x %x\n",
11736 __func__,
11737 hevc->dec_result,
11738 READ_VREG(HEVC_STREAM_LEVEL),
11739 READ_VREG(HEVC_STREAM_WR_PTR),
11740 READ_VREG(HEVC_STREAM_RD_PTR));
11741
11742 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11743 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11744 && (hw_to_vdec(hevc)->next_status !=
11745 VDEC_STATUS_DISCONNECTED)) {
11746 if (!vdec_has_more_input(vdec)) {
11747 hevc->dec_result = DEC_RESULT_EOS;
11748 vdec_schedule_work(&hevc->work);
11749 return;
11750 }
11751 if (!input_frame_based(vdec)) {
11752 int r = vdec_sync_input(vdec);
11753 if (r >= 0x200) {
11754 WRITE_VREG(HEVC_DECODE_SIZE,
11755 READ_VREG(HEVC_DECODE_SIZE) + r);
11756
11757 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11758 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11759 __func__,
11760 READ_VREG(HEVC_STREAM_LEVEL),
11761 READ_VREG(HEVC_STREAM_WR_PTR),
11762 READ_VREG(HEVC_STREAM_RD_PTR),
11763 READ_VREG(HEVC_MPC_E), r);
11764
11765 start_process_time(hevc);
11766 if (READ_VREG(HEVC_DEC_STATUS_REG)
11767 == HEVC_DECODE_BUFEMPTY2)
11768 WRITE_VREG(HEVC_DEC_STATUS_REG,
11769 HEVC_ACTION_DONE);
11770 else
11771 WRITE_VREG(HEVC_DEC_STATUS_REG,
11772 HEVC_ACTION_DEC_CONT);
11773 } else {
11774 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11775 vdec_schedule_work(&hevc->work);
11776 }
11777 return;
11778 }
11779
11780 /*below for frame_base*/
11781 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11782 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11783 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11784 __func__,
11785 READ_VREG(HEVC_STREAM_LEVEL),
11786 READ_VREG(HEVC_STREAM_WR_PTR),
11787 READ_VREG(HEVC_STREAM_RD_PTR),
11788 READ_VREG(HEVC_MPC_E));
11789 mutex_lock(&hevc->chunks_mutex);
11790 vdec_vframe_dirty(vdec, hevc->chunk);
11791 hevc->chunk = NULL;
11792 mutex_unlock(&hevc->chunks_mutex);
11793 vdec_clean_input(vdec);
11794 }
11795
11796 /*if (is_new_pic_available(hevc)) {*/
11797 if (run_ready(vdec, VDEC_HEVC)) {
11798 int r;
11799 int decode_size;
11800 r = vdec_prepare_input(vdec, &hevc->chunk);
11801 if (r < 0) {
11802 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11803
11804 hevc_print(hevc,
11805 PRINT_FLAG_VDEC_DETAIL,
11806 "amvdec_vh265: Insufficient data\n");
11807
11808 vdec_schedule_work(&hevc->work);
11809 return;
11810 }
11811 hevc->dec_result = DEC_RESULT_NONE;
11812 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11813 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11814 __func__, r,
11815 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11816 get_data_check_sum(hevc, r) : 0,
11817 READ_VREG(HEVC_MPC_E));
11818
11819 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11820 int jj;
11821 u8 *data = NULL;
11822
11823 if (!hevc->chunk->block->is_mapped)
11824 data = codec_mm_vmap(
11825 hevc->chunk->block->start +
11826 hevc->chunk->offset, r);
11827 else
11828 data = ((u8 *)
11829 hevc->chunk->block->start_virt)
11830 + hevc->chunk->offset;
11831
11832 for (jj = 0; jj < r; jj++) {
11833 if ((jj & 0xf) == 0)
11834 hevc_print(hevc,
11835 PRINT_FRAMEBASE_DATA,
11836 "%06x:", jj);
11837 hevc_print_cont(hevc,
11838 PRINT_FRAMEBASE_DATA,
11839 "%02x ", data[jj]);
11840 if (((jj + 1) & 0xf) == 0)
11841 hevc_print_cont(hevc,
11842 PRINT_FRAMEBASE_DATA,
11843 "\n");
11844 }
11845
11846 if (!hevc->chunk->block->is_mapped)
11847 codec_mm_unmap_phyaddr(data);
11848 }
11849
11850 decode_size = hevc->chunk->size +
11851 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11852 WRITE_VREG(HEVC_DECODE_SIZE,
11853 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11854
11855 vdec_enable_input(vdec);
11856
11857 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11858 "%s: mpc %x\n",
11859 __func__, READ_VREG(HEVC_MPC_E));
11860
11861 start_process_time(hevc);
11862 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11863 } else{
11864 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11865
11866 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11867 * "amvdec_vh265: Insufficient data\n");
11868 */
11869
11870 vdec_schedule_work(&hevc->work);
11871 }
11872 return;
11873 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11874 /* if (!hevc->ctx_valid)
11875 hevc->ctx_valid = 1; */
11876 decode_frame_count[hevc->index]++;
11877#ifdef DETREFILL_ENABLE
11878 if (hevc->is_swap &&
11879 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11880 if (hevc->delrefill_check == 2) {
11881 delrefill(hevc);
11882 amhevc_stop();
11883 }
11884 }
11885#endif
11886 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11887 hevc->used_4k_num =
11888 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11889 if (hevc->used_4k_num >= 0 &&
11890 hevc->cur_pic &&
11891 hevc->cur_pic->scatter_alloc
11892 == 1) {
11893 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11894 "%s pic index %d scatter_alloc %d page_start %d\n",
11895 "decoder_mmu_box_free_idx_tail",
11896 hevc->cur_pic->index,
11897 hevc->cur_pic->scatter_alloc,
11898 hevc->used_4k_num);
11899 if (hevc->m_ins_flag)
11900 hevc_mmu_dma_check(hw_to_vdec(hevc));
11901 decoder_mmu_box_free_idx_tail(
11902 hevc->mmu_box,
11903 hevc->cur_pic->index,
11904 hevc->used_4k_num);
11905 hevc->cur_pic->scatter_alloc = 2;
11906 }
11907 }
11908 hevc->pic_decoded_lcu_idx =
11909 READ_VREG(HEVC_PARSER_LCU_START)
11910 & 0xffffff;
11911
11912 if (vdec->master == NULL && vdec->slave == NULL &&
11913 hevc->empty_flag == 0) {
11914 hevc->over_decode =
11915 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11916 if (hevc->over_decode)
11917 hevc_print(hevc, 0,
11918 "!!!Over decode\n");
11919 }
11920
11921 if (is_log_enable(hevc))
11922 add_log(hevc,
11923 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11924 __func__,
11925 hevc->dec_result,
11926 hevc->pic_decoded_lcu_idx,
11927 hevc->used_4k_num,
11928 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11929 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11930 hevc->start_shift_bytes
11931 );
11932
11933 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11934 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11935 __func__,
11936 hevc->dec_result,
11937 READ_VREG(HEVC_STREAM_LEVEL),
11938 READ_VREG(HEVC_STREAM_WR_PTR),
11939 READ_VREG(HEVC_STREAM_RD_PTR),
11940 hevc->pic_decoded_lcu_idx,
11941 hevc->used_4k_num,
11942 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11943 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11944 hevc->start_shift_bytes
11945 );
11946
11947 hevc->used_4k_num = -1;
11948
11949 check_pic_decoded_error(hevc,
11950 hevc->pic_decoded_lcu_idx);
11951#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11952#if 1
11953 if (vdec->slave) {
11954 if (dv_debug & 0x1)
11955 vdec_set_flag(vdec->slave,
11956 VDEC_FLAG_SELF_INPUT_CONTEXT);
11957 else
11958 vdec_set_flag(vdec->slave,
11959 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11960 }
11961#else
11962 if (vdec->slave) {
11963 if (no_interleaved_el_slice)
11964 vdec_set_flag(vdec->slave,
11965 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11966 /* this will move real HW pointer for input */
11967 else
11968 vdec_set_flag(vdec->slave, 0);
11969 /* this will not move real HW pointer
11970 *and SL layer decoding
11971 *will start from same stream position
11972 *as current BL decoder
11973 */
11974 }
11975#endif
11976#endif
11977#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11978 hevc->shift_byte_count_lo
11979 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11980 if (vdec->slave) {
11981 /*cur is base, found enhance*/
11982 struct hevc_state_s *hevc_el =
11983 (struct hevc_state_s *)
11984 vdec->slave->private;
11985 if (hevc_el)
11986 hevc_el->shift_byte_count_lo =
11987 hevc->shift_byte_count_lo;
11988 } else if (vdec->master) {
11989 /*cur is enhance, found base*/
11990 struct hevc_state_s *hevc_ba =
11991 (struct hevc_state_s *)
11992 vdec->master->private;
11993 if (hevc_ba)
11994 hevc_ba->shift_byte_count_lo =
11995 hevc->shift_byte_count_lo;
11996 }
11997#endif
11998 mutex_lock(&hevc->chunks_mutex);
11999 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12000 hevc->chunk = NULL;
12001 mutex_unlock(&hevc->chunks_mutex);
12002 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12003 /*
12004 stream base: stream buf empty or timeout
12005 frame base: vdec_prepare_input fail
12006 */
12007 if (!vdec_has_more_input(vdec)) {
12008 hevc->dec_result = DEC_RESULT_EOS;
12009 vdec_schedule_work(&hevc->work);
12010 return;
12011 }
12012#ifdef AGAIN_HAS_THRESHOLD
12013 hevc->next_again_flag = 1;
12014#endif
12015 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12016 struct PIC_s *pic;
12017 hevc->eos = 1;
12018#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12019 if ((vdec->master || vdec->slave) &&
12020 aux_data_is_avaible(hevc))
12021 dolby_get_meta(hevc);
12022#endif
12023 check_pic_decoded_error(hevc,
12024 hevc->pic_decoded_lcu_idx);
12025 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12026 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12027 "%s: end of stream, last dec poc %d => 0x%pf\n",
12028 __func__, hevc->curr_POC, pic);
12029 flush_output(hevc, pic);
12030
12031 if (hevc->is_used_v4l)
12032 notify_v4l_eos(hw_to_vdec(hevc));
12033#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12034 hevc->shift_byte_count_lo
12035 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12036 if (vdec->slave) {
12037 /*cur is base, found enhance*/
12038 struct hevc_state_s *hevc_el =
12039 (struct hevc_state_s *)
12040 vdec->slave->private;
12041 if (hevc_el)
12042 hevc_el->shift_byte_count_lo =
12043 hevc->shift_byte_count_lo;
12044 } else if (vdec->master) {
12045 /*cur is enhance, found base*/
12046 struct hevc_state_s *hevc_ba =
12047 (struct hevc_state_s *)
12048 vdec->master->private;
12049 if (hevc_ba)
12050 hevc_ba->shift_byte_count_lo =
12051 hevc->shift_byte_count_lo;
12052 }
12053#endif
12054 mutex_lock(&hevc->chunks_mutex);
12055 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12056 hevc->chunk = NULL;
12057 mutex_unlock(&hevc->chunks_mutex);
12058 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12059 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12060 "%s: force exit\n",
12061 __func__);
12062 if (hevc->stat & STAT_VDEC_RUN) {
12063 amhevc_stop();
12064 hevc->stat &= ~STAT_VDEC_RUN;
12065 }
12066 if (hevc->stat & STAT_ISR_REG) {
12067 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12068 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12069 hevc->stat &= ~STAT_ISR_REG;
12070 }
12071 hevc_print(hevc, 0, "%s: force exit end\n",
12072 __func__);
12073 }
12074
12075 if (hevc->stat & STAT_VDEC_RUN) {
12076 amhevc_stop();
12077 hevc->stat &= ~STAT_VDEC_RUN;
12078 }
12079
12080 if (hevc->stat & STAT_TIMER_ARM) {
12081 del_timer_sync(&hevc->timer);
12082 hevc->stat &= ~STAT_TIMER_ARM;
12083 }
12084
12085 wait_hevc_search_done(hevc);
12086#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12087 if (hevc->switch_dvlayer_flag) {
12088 if (vdec->slave)
12089 vdec_set_next_sched(vdec, vdec->slave);
12090 else if (vdec->master)
12091 vdec_set_next_sched(vdec, vdec->master);
12092 } else if (vdec->slave || vdec->master)
12093 vdec_set_next_sched(vdec, vdec);
12094#endif
12095
12096 if (from == 1) {
12097 /* This is a timeout work */
12098 if (work_pending(&hevc->work)) {
12099 /*
12100 * The vh265_work arrives at the last second,
12101 * give it a chance to handle the scenario.
12102 */
12103 return;
12104 //cancel_work_sync(&hevc->work);//reserved for future considraion
12105 }
12106 }
12107
12108 /* mark itself has all HW resource released and input released */
12109 if (vdec->parallel_dec == 1)
12110 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12111 else
12112 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12113
12114 if (hevc->is_used_v4l) {
12115 struct aml_vcodec_ctx *ctx =
12116 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12117
12118 if (ctx->param_sets_from_ucode &&
12119 !hevc->v4l_params_parsed)
12120 vdec_v4l_write_frame_sync(ctx);
12121 }
12122
12123 if (hevc->vdec_cb)
12124 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12125}
12126
12127static void vh265_work(struct work_struct *work)
12128{
12129 struct hevc_state_s *hevc = container_of(work,
12130 struct hevc_state_s, work);
12131 struct vdec_s *vdec = hw_to_vdec(hevc);
12132
12133 vh265_work_implement(hevc, vdec, 0);
12134}
12135
12136static void vh265_timeout_work(struct work_struct *work)
12137{
12138 struct hevc_state_s *hevc = container_of(work,
12139 struct hevc_state_s, timeout_work);
12140 struct vdec_s *vdec = hw_to_vdec(hevc);
12141
12142 if (work_pending(&hevc->work))
12143 return;
12144 vh265_work_implement(hevc, vdec, 1);
12145}
12146
12147
12148static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12149{
12150 /* new to do ... */
12151 vh265_prot_init(hevc);
12152 return 0;
12153}
12154static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12155{
12156 struct hevc_state_s *hevc =
12157 (struct hevc_state_s *)vdec->private;
12158 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12159 CODEC_MM_FLAGS_TVP : 0;
12160 bool ret = 0;
12161 if (step == 0x12)
12162 return 0;
12163 else if (step == 0x11)
12164 step = 0x12;
12165
12166 if (hevc->eos)
12167 return 0;
12168 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12169 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12170 hevc->first_sc_checked =1;
12171 hevc_print(hevc, 0,
12172 "vh265 cached=%d need_size=%d speed= %d ms\n",
12173 size, (hevc->need_cache_size >> PAGE_SHIFT),
12174 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12175 }
12176 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12177 && pre_decode_buf_level != 0) {
12178 u32 rp, wp, level;
12179
12180 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12181 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12182 if (wp < rp)
12183 level = vdec->input.size + wp - rp;
12184 else
12185 level = wp - rp;
12186
12187 if (level < pre_decode_buf_level)
12188 return 0;
12189 }
12190
12191#ifdef AGAIN_HAS_THRESHOLD
12192 if (hevc->next_again_flag &&
12193 (!vdec_frame_based(vdec))) {
12194 u32 parser_wr_ptr =
12195 READ_PARSER_REG(PARSER_VIDEO_WP);
12196 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12197 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12198 again_threshold) {
12199 int r = vdec_sync_input(vdec);
12200 hevc_print(hevc,
12201 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12202 return 0;
12203 }
12204 }
12205#endif
12206
12207 if (disp_vframe_valve_level &&
12208 kfifo_len(&hevc->display_q) >=
12209 disp_vframe_valve_level) {
12210 hevc->valve_count--;
12211 if (hevc->valve_count <= 0)
12212 hevc->valve_count = 2;
12213 else
12214 return 0;
12215 }
12216
12217 ret = is_new_pic_available(hevc);
12218 if (!ret) {
12219 hevc_print(hevc,
12220 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12221 __func__, ret);
12222 }
12223
12224#ifdef CONSTRAIN_MAX_BUF_NUM
12225 if (hevc->pic_list_init_flag == 3) {
12226 if (run_ready_max_vf_only_num > 0 &&
12227 get_vf_ref_only_buf_count(hevc) >=
12228 run_ready_max_vf_only_num
12229 )
12230 ret = 0;
12231 if (run_ready_display_q_num > 0 &&
12232 kfifo_len(&hevc->display_q) >=
12233 run_ready_display_q_num)
12234 ret = 0;
12235
12236 /*avoid more buffers consumed when
12237 switching resolution*/
12238 if (run_ready_max_buf_num == 0xff &&
12239 get_used_buf_count(hevc) >=
12240 get_work_pic_num(hevc))
12241 ret = 0;
12242 else if (run_ready_max_buf_num &&
12243 get_used_buf_count(hevc) >=
12244 run_ready_max_buf_num)
12245 ret = 0;
12246 }
12247#endif
12248
12249 if (hevc->is_used_v4l) {
12250 struct aml_vcodec_ctx *ctx =
12251 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12252
12253 if (ctx->param_sets_from_ucode &&
12254 !ctx->v4l_codec_ready &&
12255 hevc->v4l_params_parsed) {
12256 ret = 0; /*the params has parsed.*/
12257 } else if (!ctx->v4l_codec_dpb_ready) {
12258 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12259 run_ready_min_buf_num)
12260 ret = 0;
12261 }
12262 }
12263
12264 if (ret)
12265 not_run_ready[hevc->index] = 0;
12266 else
12267 not_run_ready[hevc->index]++;
12268 if (vdec->parallel_dec == 1)
12269 return ret ? (CORE_MASK_HEVC) : 0;
12270 else
12271 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12272}
12273
12274static void run(struct vdec_s *vdec, unsigned long mask,
12275 void (*callback)(struct vdec_s *, void *), void *arg)
12276{
12277 struct hevc_state_s *hevc =
12278 (struct hevc_state_s *)vdec->private;
12279 int r, loadr = 0;
12280 unsigned char check_sum = 0;
12281
12282 run_count[hevc->index]++;
12283 hevc->vdec_cb_arg = arg;
12284 hevc->vdec_cb = callback;
12285 hevc->aux_data_dirty = 1;
12286 hevc_reset_core(vdec);
12287
12288#ifdef AGAIN_HAS_THRESHOLD
12289 hevc->pre_parser_wr_ptr =
12290 READ_PARSER_REG(PARSER_VIDEO_WP);
12291 hevc->next_again_flag = 0;
12292#endif
12293 r = vdec_prepare_input(vdec, &hevc->chunk);
12294 if (r < 0) {
12295 input_empty[hevc->index]++;
12296 hevc->dec_result = DEC_RESULT_AGAIN;
12297 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12298 "ammvdec_vh265: Insufficient data\n");
12299
12300 vdec_schedule_work(&hevc->work);
12301 return;
12302 }
12303 input_empty[hevc->index] = 0;
12304 hevc->dec_result = DEC_RESULT_NONE;
12305 if (vdec_frame_based(vdec) &&
12306 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12307 || is_log_enable(hevc)))
12308 check_sum = get_data_check_sum(hevc, r);
12309
12310 if (is_log_enable(hevc))
12311 add_log(hevc,
12312 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12313 __func__, r,
12314 check_sum,
12315 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12316 );
12317 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12318 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12319 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12320 __func__, r,
12321 check_sum,
12322 READ_VREG(HEVC_STREAM_LEVEL),
12323 READ_VREG(HEVC_STREAM_WR_PTR),
12324 READ_VREG(HEVC_STREAM_RD_PTR),
12325 READ_PARSER_REG(PARSER_VIDEO_RP),
12326 READ_PARSER_REG(PARSER_VIDEO_WP),
12327 hevc->start_shift_bytes
12328 );
12329 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12330 input_frame_based(vdec)) {
12331 int jj;
12332 u8 *data = NULL;
12333
12334 if (!hevc->chunk->block->is_mapped)
12335 data = codec_mm_vmap(hevc->chunk->block->start +
12336 hevc->chunk->offset, r);
12337 else
12338 data = ((u8 *)hevc->chunk->block->start_virt)
12339 + hevc->chunk->offset;
12340
12341 for (jj = 0; jj < r; jj++) {
12342 if ((jj & 0xf) == 0)
12343 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12344 "%06x:", jj);
12345 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12346 "%02x ", data[jj]);
12347 if (((jj + 1) & 0xf) == 0)
12348 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12349 "\n");
12350 }
12351
12352 if (!hevc->chunk->block->is_mapped)
12353 codec_mm_unmap_phyaddr(data);
12354 }
12355 if (vdec->mc_loaded) {
12356 /*firmware have load before,
12357 and not changes to another.
12358 ignore reload.
12359 */
12360 if (tee_enabled() && hevc->is_swap &&
12361 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12362 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12363 } else {
12364 if (hevc->mmu_enable)
12365 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12366 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12367 "h265_mmu", hevc->fw->data);
12368 else {
12369 if (!hevc->is_4k) {
12370 /* if an older version of the fw was loaded, */
12371 /* needs try to load noswap fw because the */
12372 /* old fw package dose not contain the swap fw.*/
12373 loadr = amhevc_vdec_loadmc_ex(
12374 VFORMAT_HEVC, vdec,
12375 "hevc_mmu_swap",
12376 hevc->fw->data);
12377 if (loadr < 0)
12378 loadr = amhevc_vdec_loadmc_ex(
12379 VFORMAT_HEVC, vdec,
12380 "h265_mmu",
12381 hevc->fw->data);
12382 else
12383 hevc->is_swap = true;
12384 } else
12385 loadr = amhevc_vdec_loadmc_ex(
12386 VFORMAT_HEVC, vdec,
12387 "h265_mmu", hevc->fw->data);
12388 }
12389 else
12390 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12391 NULL, hevc->fw->data);
12392 if (loadr < 0) {
12393 amhevc_disable();
12394 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12395 tee_enabled() ? "TEE" : "local", loadr);
12396 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12397 vdec_schedule_work(&hevc->work);
12398 return;
12399 }
12400
12401 if (tee_enabled() && hevc->is_swap &&
12402 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12403 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12404#ifdef DETREFILL_ENABLE
12405 if (hevc->is_swap &&
12406 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12407 init_detrefill_buf(hevc);
12408#endif
12409 vdec->mc_loaded = 1;
12410 vdec->mc_type = VFORMAT_HEVC;
12411 }
12412 if (vh265_hw_ctx_restore(hevc) < 0) {
12413 vdec_schedule_work(&hevc->work);
12414 return;
12415 }
12416 vdec_enable_input(vdec);
12417
12418 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12419
12420 if (vdec_frame_based(vdec)) {
12421 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12422 r = hevc->chunk->size +
12423 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12424 hevc->decode_size = r;
12425 }
12426#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12427 else {
12428 if (vdec->master || vdec->slave)
12429 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12430 hevc->shift_byte_count_lo);
12431 }
12432#endif
12433 WRITE_VREG(HEVC_DECODE_SIZE, r);
12434 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12435 hevc->init_flag = 1;
12436
12437 if (hevc->pic_list_init_flag == 3)
12438 init_pic_list_hw(hevc);
12439
12440 backup_decode_state(hevc);
12441
12442 start_process_time(hevc);
12443 mod_timer(&hevc->timer, jiffies);
12444 hevc->stat |= STAT_TIMER_ARM;
12445 hevc->stat |= STAT_ISR_REG;
12446 amhevc_start();
12447 hevc->stat |= STAT_VDEC_RUN;
12448}
12449
12450static void aml_free_canvas(struct vdec_s *vdec)
12451{
12452 int i;
12453 struct hevc_state_s *hevc =
12454 (struct hevc_state_s *)vdec->private;
12455
12456 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12457 struct PIC_s *pic = hevc->m_PIC[i];
12458
12459 if (pic) {
12460 if (vdec->parallel_dec == 1) {
12461 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12462 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12463 }
12464 }
12465 }
12466}
12467
12468static void reset(struct vdec_s *vdec)
12469{
12470 struct hevc_state_s *hevc =
12471 (struct hevc_state_s *)vdec->private;
12472 int i;
12473
12474 cancel_work_sync(&hevc->work);
12475 cancel_work_sync(&hevc->notify_work);
12476 if (hevc->stat & STAT_VDEC_RUN) {
12477 amhevc_stop();
12478 hevc->stat &= ~STAT_VDEC_RUN;
12479 }
12480
12481 if (hevc->stat & STAT_TIMER_ARM) {
12482 del_timer_sync(&hevc->timer);
12483 hevc->stat &= ~STAT_TIMER_ARM;
12484 }
12485 hevc->dec_result = DEC_RESULT_NONE;
12486 reset_process_time(hevc);
12487 hevc->init_flag = 0;
12488 hevc->pic_list_init_flag = 0;
12489 dealloc_mv_bufs(hevc);
12490 aml_free_canvas(vdec);
12491 hevc_local_uninit(hevc);
12492 if (vh265_local_init(hevc) < 0)
12493 pr_debug(" %s local init fail\n", __func__);
12494 for (i = 0; i < BUF_POOL_SIZE; i++) {
12495 hevc->m_BUF[i].start_adr = 0;
12496 }
12497
12498 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12499}
12500
12501static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12502{
12503 struct hevc_state_s *hevc =
12504 (struct hevc_state_s *)vdec->private;
12505
12506 return vh265_isr(0, hevc);
12507}
12508
12509static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12510{
12511 struct hevc_state_s *hevc =
12512 (struct hevc_state_s *)vdec->private;
12513
12514 return vh265_isr_thread_fn(0, hevc);
12515}
12516#endif
12517
12518static int amvdec_h265_probe(struct platform_device *pdev)
12519{
12520#ifdef MULTI_INSTANCE_SUPPORT
12521 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12522#else
12523 struct vdec_dev_reg_s *pdata =
12524 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12525#endif
12526 char *tmpbuf;
12527 int ret;
12528 struct hevc_state_s *hevc;
12529
12530 hevc = vmalloc(sizeof(struct hevc_state_s));
12531 if (hevc == NULL) {
12532 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12533 return -ENOMEM;
12534 }
12535 gHevc = hevc;
12536 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12537 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12538 H265_DEBUG_DIS_SYS_ERROR_PROC));
12539 memset(hevc, 0, sizeof(struct hevc_state_s));
12540 if (get_dbg_flag(hevc))
12541 hevc_print(hevc, 0, "%s\r\n", __func__);
12542 mutex_lock(&vh265_mutex);
12543
12544 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12545 (parser_sei_enable & 0x100) == 0)
12546 parser_sei_enable = 7; /*old 1*/
12547 hevc->m_ins_flag = 0;
12548 hevc->init_flag = 0;
12549 hevc->first_sc_checked = 0;
12550 hevc->uninit_list = 0;
12551 hevc->fatal_error = 0;
12552 hevc->show_frame_num = 0;
12553 hevc->frameinfo_enable = 1;
12554#ifdef MULTI_INSTANCE_SUPPORT
12555 hevc->platform_dev = pdev;
12556 platform_set_drvdata(pdev, pdata);
12557#endif
12558
12559 if (pdata == NULL) {
12560 hevc_print(hevc, 0,
12561 "\namvdec_h265 memory resource undefined.\n");
12562 vfree(hevc);
12563 mutex_unlock(&vh265_mutex);
12564 return -EFAULT;
12565 }
12566 if (mmu_enable_force == 0) {
12567 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12568 || double_write_mode == 0x10)
12569 hevc->mmu_enable = 0;
12570 else
12571 hevc->mmu_enable = 1;
12572 }
12573 if (init_mmu_buffers(hevc)) {
12574 hevc_print(hevc, 0,
12575 "\n 265 mmu init failed!\n");
12576 vfree(hevc);
12577 mutex_unlock(&vh265_mutex);
12578 return -EFAULT;
12579 }
12580
12581 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12582 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12583 if (ret < 0) {
12584 uninit_mmu_buffers(hevc);
12585 vfree(hevc);
12586 mutex_unlock(&vh265_mutex);
12587 return ret;
12588 }
12589 hevc->buf_size = work_buf_size;
12590
12591
12592 if (!vdec_secure(pdata)) {
12593 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12594 if (tmpbuf) {
12595 memset(tmpbuf, 0, work_buf_size);
12596 dma_sync_single_for_device(amports_get_dma_device(),
12597 hevc->buf_start,
12598 work_buf_size, DMA_TO_DEVICE);
12599 } else {
12600 tmpbuf = codec_mm_vmap(hevc->buf_start,
12601 work_buf_size);
12602 if (tmpbuf) {
12603 memset(tmpbuf, 0, work_buf_size);
12604 dma_sync_single_for_device(
12605 amports_get_dma_device(),
12606 hevc->buf_start,
12607 work_buf_size,
12608 DMA_TO_DEVICE);
12609 codec_mm_unmap_phyaddr(tmpbuf);
12610 }
12611 }
12612 }
12613
12614 if (get_dbg_flag(hevc)) {
12615 hevc_print(hevc, 0,
12616 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12617 hevc->buf_start, hevc->buf_size);
12618 }
12619
12620 if (pdata->sys_info)
12621 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12622 else {
12623 hevc->vh265_amstream_dec_info.width = 0;
12624 hevc->vh265_amstream_dec_info.height = 0;
12625 hevc->vh265_amstream_dec_info.rate = 30;
12626 }
12627#ifndef MULTI_INSTANCE_SUPPORT
12628 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12629 workaround_enable |= 3;
12630 hevc_print(hevc, 0,
12631 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12632 } else
12633 workaround_enable &= ~3;
12634#endif
12635 hevc->cma_dev = pdata->cma_dev;
12636 vh265_vdec_info_init();
12637
12638#ifdef MULTI_INSTANCE_SUPPORT
12639 pdata->private = hevc;
12640 pdata->dec_status = vh265_dec_status;
12641 pdata->set_isreset = vh265_set_isreset;
12642 is_reset = 0;
12643 if (vh265_init(pdata) < 0) {
12644#else
12645 if (vh265_init(hevc) < 0) {
12646#endif
12647 hevc_print(hevc, 0,
12648 "\namvdec_h265 init failed.\n");
12649 hevc_local_uninit(hevc);
12650 uninit_mmu_buffers(hevc);
12651 vfree(hevc);
12652 pdata->dec_status = NULL;
12653 mutex_unlock(&vh265_mutex);
12654 return -ENODEV;
12655 }
12656 /*set the max clk for smooth playing...*/
12657 hevc_source_changed(VFORMAT_HEVC,
12658 3840, 2160, 60);
12659 mutex_unlock(&vh265_mutex);
12660
12661 return 0;
12662}
12663
12664static int amvdec_h265_remove(struct platform_device *pdev)
12665{
12666 struct hevc_state_s *hevc = gHevc;
12667
12668 if (get_dbg_flag(hevc))
12669 hevc_print(hevc, 0, "%s\r\n", __func__);
12670
12671 mutex_lock(&vh265_mutex);
12672
12673 vh265_stop(hevc);
12674
12675 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12676
12677
12678#ifdef DEBUG_PTS
12679 hevc_print(hevc, 0,
12680 "pts missed %ld, pts hit %ld, duration %d\n",
12681 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12682#endif
12683
12684 vfree(hevc);
12685 hevc = NULL;
12686 gHevc = NULL;
12687
12688 mutex_unlock(&vh265_mutex);
12689
12690 return 0;
12691}
12692/****************************************/
12693#ifdef CONFIG_PM
12694static int h265_suspend(struct device *dev)
12695{
12696 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12697 return 0;
12698}
12699
12700static int h265_resume(struct device *dev)
12701{
12702 amhevc_resume(to_platform_device(dev));
12703 return 0;
12704}
12705
12706static const struct dev_pm_ops h265_pm_ops = {
12707 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12708};
12709#endif
12710
12711static struct platform_driver amvdec_h265_driver = {
12712 .probe = amvdec_h265_probe,
12713 .remove = amvdec_h265_remove,
12714 .driver = {
12715 .name = DRIVER_NAME,
12716#ifdef CONFIG_PM
12717 .pm = &h265_pm_ops,
12718#endif
12719 }
12720};
12721
12722#ifdef MULTI_INSTANCE_SUPPORT
12723static void vh265_dump_state(struct vdec_s *vdec)
12724{
12725 int i;
12726 struct hevc_state_s *hevc =
12727 (struct hevc_state_s *)vdec->private;
12728 hevc_print(hevc, 0,
12729 "====== %s\n", __func__);
12730
12731 hevc_print(hevc, 0,
12732 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12733 hevc->frame_width,
12734 hevc->frame_height,
12735 hevc->sps_num_reorder_pics_0,
12736 get_work_pic_num(hevc),
12737 hevc->video_signal_type_debug,
12738 hevc->is_swap
12739 );
12740
12741 hevc_print(hevc, 0,
12742 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12743 input_frame_based(vdec),
12744 hevc->eos,
12745 hevc->dec_result,
12746 decode_frame_count[hevc->index],
12747 display_frame_count[hevc->index],
12748 run_count[hevc->index],
12749 not_run_ready[hevc->index],
12750 input_empty[hevc->index]
12751 );
12752
12753 if (vf_get_receiver(vdec->vf_provider_name)) {
12754 enum receviver_start_e state =
12755 vf_notify_receiver(vdec->vf_provider_name,
12756 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12757 NULL);
12758 hevc_print(hevc, 0,
12759 "\nreceiver(%s) state %d\n",
12760 vdec->vf_provider_name,
12761 state);
12762 }
12763
12764 hevc_print(hevc, 0,
12765 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12766 __func__,
12767 kfifo_len(&hevc->newframe_q),
12768 VF_POOL_SIZE,
12769 kfifo_len(&hevc->display_q),
12770 VF_POOL_SIZE,
12771 hevc->vf_pre_count,
12772 hevc->vf_get_count,
12773 hevc->vf_put_count,
12774 hevc->pic_list_init_flag,
12775 is_new_pic_available(hevc)
12776 );
12777
12778 dump_pic_list(hevc);
12779
12780 for (i = 0; i < BUF_POOL_SIZE; i++) {
12781 hevc_print(hevc, 0,
12782 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12783 i,
12784 hevc->m_BUF[i].start_adr,
12785 hevc->m_BUF[i].size,
12786 hevc->m_BUF[i].used_flag);
12787 }
12788
12789 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12790 hevc_print(hevc, 0,
12791 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12792 i,
12793 hevc->m_mv_BUF[i].start_adr,
12794 hevc->m_mv_BUF[i].size,
12795 hevc->m_mv_BUF[i].used_flag);
12796 }
12797
12798 hevc_print(hevc, 0,
12799 "HEVC_DEC_STATUS_REG=0x%x\n",
12800 READ_VREG(HEVC_DEC_STATUS_REG));
12801 hevc_print(hevc, 0,
12802 "HEVC_MPC_E=0x%x\n",
12803 READ_VREG(HEVC_MPC_E));
12804 hevc_print(hevc, 0,
12805 "HEVC_DECODE_MODE=0x%x\n",
12806 READ_VREG(HEVC_DECODE_MODE));
12807 hevc_print(hevc, 0,
12808 "HEVC_DECODE_MODE2=0x%x\n",
12809 READ_VREG(HEVC_DECODE_MODE2));
12810 hevc_print(hevc, 0,
12811 "NAL_SEARCH_CTL=0x%x\n",
12812 READ_VREG(NAL_SEARCH_CTL));
12813 hevc_print(hevc, 0,
12814 "HEVC_PARSER_LCU_START=0x%x\n",
12815 READ_VREG(HEVC_PARSER_LCU_START));
12816 hevc_print(hevc, 0,
12817 "HEVC_DECODE_SIZE=0x%x\n",
12818 READ_VREG(HEVC_DECODE_SIZE));
12819 hevc_print(hevc, 0,
12820 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12821 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12822 hevc_print(hevc, 0,
12823 "HEVC_STREAM_START_ADDR=0x%x\n",
12824 READ_VREG(HEVC_STREAM_START_ADDR));
12825 hevc_print(hevc, 0,
12826 "HEVC_STREAM_END_ADDR=0x%x\n",
12827 READ_VREG(HEVC_STREAM_END_ADDR));
12828 hevc_print(hevc, 0,
12829 "HEVC_STREAM_LEVEL=0x%x\n",
12830 READ_VREG(HEVC_STREAM_LEVEL));
12831 hevc_print(hevc, 0,
12832 "HEVC_STREAM_WR_PTR=0x%x\n",
12833 READ_VREG(HEVC_STREAM_WR_PTR));
12834 hevc_print(hevc, 0,
12835 "HEVC_STREAM_RD_PTR=0x%x\n",
12836 READ_VREG(HEVC_STREAM_RD_PTR));
12837 hevc_print(hevc, 0,
12838 "PARSER_VIDEO_RP=0x%x\n",
12839 READ_PARSER_REG(PARSER_VIDEO_RP));
12840 hevc_print(hevc, 0,
12841 "PARSER_VIDEO_WP=0x%x\n",
12842 READ_PARSER_REG(PARSER_VIDEO_WP));
12843
12844 if (input_frame_based(vdec) &&
12845 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12846 ) {
12847 int jj;
12848 if (hevc->chunk && hevc->chunk->block &&
12849 hevc->chunk->size > 0) {
12850 u8 *data = NULL;
12851 if (!hevc->chunk->block->is_mapped)
12852 data = codec_mm_vmap(hevc->chunk->block->start +
12853 hevc->chunk->offset, hevc->chunk->size);
12854 else
12855 data = ((u8 *)hevc->chunk->block->start_virt)
12856 + hevc->chunk->offset;
12857 hevc_print(hevc, 0,
12858 "frame data size 0x%x\n",
12859 hevc->chunk->size);
12860 for (jj = 0; jj < hevc->chunk->size; jj++) {
12861 if ((jj & 0xf) == 0)
12862 hevc_print(hevc,
12863 PRINT_FRAMEBASE_DATA,
12864 "%06x:", jj);
12865 hevc_print_cont(hevc,
12866 PRINT_FRAMEBASE_DATA,
12867 "%02x ", data[jj]);
12868 if (((jj + 1) & 0xf) == 0)
12869 hevc_print_cont(hevc,
12870 PRINT_FRAMEBASE_DATA,
12871 "\n");
12872 }
12873
12874 if (!hevc->chunk->block->is_mapped)
12875 codec_mm_unmap_phyaddr(data);
12876 }
12877 }
12878
12879}
12880
12881
12882static int ammvdec_h265_probe(struct platform_device *pdev)
12883{
12884
12885 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12886 struct hevc_state_s *hevc = NULL;
12887 int ret;
12888#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12889 int config_val;
12890#endif
12891 if (pdata == NULL) {
12892 pr_info("\nammvdec_h265 memory resource undefined.\n");
12893 return -EFAULT;
12894 }
12895
12896 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12897 sizeof(struct hevc_state_s), GFP_KERNEL); */
12898 hevc = vmalloc(sizeof(struct hevc_state_s));
12899 if (hevc == NULL) {
12900 pr_info("\nammvdec_h265 device data allocation failed\n");
12901 return -ENOMEM;
12902 }
12903 memset(hevc, 0, sizeof(struct hevc_state_s));
12904
12905 /* the ctx from v4l2 driver. */
12906 hevc->v4l2_ctx = pdata->private;
12907
12908 pdata->private = hevc;
12909 pdata->dec_status = vh265_dec_status;
12910 /* pdata->set_trickmode = set_trickmode; */
12911 pdata->run_ready = run_ready;
12912 pdata->run = run;
12913 pdata->reset = reset;
12914 pdata->irq_handler = vh265_irq_cb;
12915 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12916 pdata->dump_state = vh265_dump_state;
12917
12918 hevc->index = pdev->id;
12919 hevc->m_ins_flag = 1;
12920
12921 if (pdata->use_vfm_path) {
12922 snprintf(pdata->vf_provider_name,
12923 VDEC_PROVIDER_NAME_SIZE,
12924 VFM_DEC_PROVIDER_NAME);
12925 hevc->frameinfo_enable = 1;
12926 }
12927#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12928 else if (vdec_dual(pdata)) {
12929 struct hevc_state_s *hevc_pair = NULL;
12930
12931 if (dv_toggle_prov_name) /*debug purpose*/
12932 snprintf(pdata->vf_provider_name,
12933 VDEC_PROVIDER_NAME_SIZE,
12934 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12935 VFM_DEC_DVEL_PROVIDER_NAME);
12936 else
12937 snprintf(pdata->vf_provider_name,
12938 VDEC_PROVIDER_NAME_SIZE,
12939 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12940 VFM_DEC_DVBL_PROVIDER_NAME);
12941 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12942 if (pdata->master)
12943 hevc_pair = (struct hevc_state_s *)
12944 pdata->master->private;
12945 else if (pdata->slave)
12946 hevc_pair = (struct hevc_state_s *)
12947 pdata->slave->private;
12948 if (hevc_pair)
12949 hevc->shift_byte_count_lo =
12950 hevc_pair->shift_byte_count_lo;
12951 }
12952#endif
12953 else
12954 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12955 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12956
12957 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12958 &vh265_vf_provider, pdata);
12959
12960 hevc->provider_name = pdata->vf_provider_name;
12961 platform_set_drvdata(pdev, pdata);
12962
12963 hevc->platform_dev = pdev;
12964
12965 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12966 pdata->config && pdata->config_len) {
12967#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12968 /*use ptr config for doubel_write_mode, etc*/
12969 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12970
12971 if (get_config_int(pdata->config, "hevc_double_write_mode",
12972 &config_val) == 0)
12973 hevc->double_write_mode = config_val;
12974 else
12975 hevc->double_write_mode = double_write_mode;
12976
12977 if (get_config_int(pdata->config, "save_buffer_mode",
12978 &config_val) == 0)
12979 hevc->save_buffer_mode = config_val;
12980 else
12981 hevc->save_buffer_mode = 0;
12982
12983 /*use ptr config for max_pic_w, etc*/
12984 if (get_config_int(pdata->config, "hevc_buf_width",
12985 &config_val) == 0) {
12986 hevc->max_pic_w = config_val;
12987 }
12988 if (get_config_int(pdata->config, "hevc_buf_height",
12989 &config_val) == 0) {
12990 hevc->max_pic_h = config_val;
12991 }
12992
12993 if (get_config_int(pdata->config,
12994 "parm_v4l_codec_enable",
12995 &config_val) == 0)
12996 hevc->is_used_v4l = config_val;
12997
12998 if (get_config_int(pdata->config,
12999 "parm_v4l_buffer_margin",
13000 &config_val) == 0)
13001 hevc->dynamic_buf_num_margin = config_val;
13002
13003 if (get_config_int(pdata->config,
13004 "parm_v4l_canvas_mem_mode",
13005 &config_val) == 0)
13006 hevc->mem_map_mode = config_val;
13007#endif
13008 } else {
13009 if (pdata->sys_info)
13010 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13011 else {
13012 hevc->vh265_amstream_dec_info.width = 0;
13013 hevc->vh265_amstream_dec_info.height = 0;
13014 hevc->vh265_amstream_dec_info.rate = 30;
13015 }
13016 hevc->double_write_mode = double_write_mode;
13017 }
13018 if (!hevc->is_used_v4l) {
13019 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13020 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13021 else
13022 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13023
13024 hevc->mem_map_mode = mem_map_mode;
13025 }
13026
13027 if (mmu_enable_force == 0) {
13028 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13029 hevc->mmu_enable = 0;
13030 else
13031 hevc->mmu_enable = 1;
13032 }
13033
13034 if (init_mmu_buffers(hevc) < 0) {
13035 hevc_print(hevc, 0,
13036 "\n 265 mmu init failed!\n");
13037 mutex_unlock(&vh265_mutex);
13038 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13039 if (hevc)
13040 vfree((void *)hevc);
13041 pdata->dec_status = NULL;
13042 return -EFAULT;
13043 }
13044#if 0
13045 hevc->buf_start = pdata->mem_start;
13046 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13047#else
13048
13049 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13050 BMMU_WORKSPACE_ID, work_buf_size,
13051 DRIVER_NAME, &hevc->buf_start);
13052 if (ret < 0) {
13053 uninit_mmu_buffers(hevc);
13054 /* devm_kfree(&pdev->dev, (void *)hevc); */
13055 if (hevc)
13056 vfree((void *)hevc);
13057 pdata->dec_status = NULL;
13058 mutex_unlock(&vh265_mutex);
13059 return ret;
13060 }
13061 hevc->buf_size = work_buf_size;
13062#endif
13063 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13064 (parser_sei_enable & 0x100) == 0)
13065 parser_sei_enable = 7;
13066 hevc->init_flag = 0;
13067 hevc->first_sc_checked = 0;
13068 hevc->uninit_list = 0;
13069 hevc->fatal_error = 0;
13070 hevc->show_frame_num = 0;
13071
13072 /*
13073 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13074 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13075 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13076 */
13077 if (get_dbg_flag(hevc)) {
13078 hevc_print(hevc, 0,
13079 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13080 hevc->buf_start, hevc->buf_size);
13081 }
13082
13083 hevc_print(hevc, 0,
13084 "dynamic_buf_num_margin=%d\n",
13085 hevc->dynamic_buf_num_margin);
13086 hevc_print(hevc, 0,
13087 "double_write_mode=%d\n",
13088 hevc->double_write_mode);
13089
13090 hevc->cma_dev = pdata->cma_dev;
13091
13092 if (vh265_init(pdata) < 0) {
13093 hevc_print(hevc, 0,
13094 "\namvdec_h265 init failed.\n");
13095 hevc_local_uninit(hevc);
13096 uninit_mmu_buffers(hevc);
13097 /* devm_kfree(&pdev->dev, (void *)hevc); */
13098 if (hevc)
13099 vfree((void *)hevc);
13100 pdata->dec_status = NULL;
13101 return -ENODEV;
13102 }
13103
13104 vdec_set_prepare_level(pdata, start_decode_buf_level);
13105
13106 /*set the max clk for smooth playing...*/
13107 hevc_source_changed(VFORMAT_HEVC,
13108 3840, 2160, 60);
13109 if (pdata->parallel_dec == 1)
13110 vdec_core_request(pdata, CORE_MASK_HEVC);
13111 else
13112 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13113 | CORE_MASK_COMBINE);
13114
13115 return 0;
13116}
13117
13118static int ammvdec_h265_remove(struct platform_device *pdev)
13119{
13120 struct hevc_state_s *hevc =
13121 (struct hevc_state_s *)
13122 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13123 struct vdec_s *vdec = hw_to_vdec(hevc);
13124
13125 if (hevc == NULL)
13126 return 0;
13127
13128 if (get_dbg_flag(hevc))
13129 hevc_print(hevc, 0, "%s\r\n", __func__);
13130
13131 vmh265_stop(hevc);
13132
13133 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13134 if (vdec->parallel_dec == 1)
13135 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13136 else
13137 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13138
13139 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13140
13141 vfree((void *)hevc);
13142 return 0;
13143}
13144
13145static struct platform_driver ammvdec_h265_driver = {
13146 .probe = ammvdec_h265_probe,
13147 .remove = ammvdec_h265_remove,
13148 .driver = {
13149 .name = MULTI_DRIVER_NAME,
13150#ifdef CONFIG_PM
13151 .pm = &h265_pm_ops,
13152#endif
13153 }
13154};
13155#endif
13156
13157static struct codec_profile_t amvdec_h265_profile = {
13158 .name = "hevc",
13159 .profile = ""
13160};
13161
13162static struct codec_profile_t amvdec_h265_profile_single,
13163 amvdec_h265_profile_mult;
13164
13165static struct mconfig h265_configs[] = {
13166 MC_PU32("use_cma", &use_cma),
13167 MC_PU32("bit_depth_luma", &bit_depth_luma),
13168 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13169 MC_PU32("video_signal_type", &video_signal_type),
13170#ifdef ERROR_HANDLE_DEBUG
13171 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13172 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13173#endif
13174 MC_PU32("radr", &radr),
13175 MC_PU32("rval", &rval),
13176 MC_PU32("dbg_cmd", &dbg_cmd),
13177 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13178 MC_PU32("endian", &endian),
13179 MC_PU32("step", &step),
13180 MC_PU32("udebug_flag", &udebug_flag),
13181 MC_PU32("decode_pic_begin", &decode_pic_begin),
13182 MC_PU32("slice_parse_begin", &slice_parse_begin),
13183 MC_PU32("nal_skip_policy", &nal_skip_policy),
13184 MC_PU32("i_only_flag", &i_only_flag),
13185 MC_PU32("error_handle_policy", &error_handle_policy),
13186 MC_PU32("error_handle_threshold", &error_handle_threshold),
13187 MC_PU32("error_handle_nal_skip_threshold",
13188 &error_handle_nal_skip_threshold),
13189 MC_PU32("error_handle_system_threshold",
13190 &error_handle_system_threshold),
13191 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13192 MC_PU32("debug", &debug),
13193 MC_PU32("debug_mask", &debug_mask),
13194 MC_PU32("buffer_mode", &buffer_mode),
13195 MC_PU32("double_write_mode", &double_write_mode),
13196 MC_PU32("buf_alloc_width", &buf_alloc_width),
13197 MC_PU32("buf_alloc_height", &buf_alloc_height),
13198 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13199 MC_PU32("max_buf_num", &max_buf_num),
13200 MC_PU32("buf_alloc_size", &buf_alloc_size),
13201 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13202 MC_PU32("mem_map_mode", &mem_map_mode),
13203 MC_PU32("enable_mem_saving", &enable_mem_saving),
13204 MC_PU32("force_w_h", &force_w_h),
13205 MC_PU32("force_fps", &force_fps),
13206 MC_PU32("max_decoding_time", &max_decoding_time),
13207 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13208 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13209 MC_PU32("interlace_enable", &interlace_enable),
13210 MC_PU32("pts_unstable", &pts_unstable),
13211 MC_PU32("parser_sei_enable", &parser_sei_enable),
13212 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13213 MC_PU32("decode_timeout_val", &decode_timeout_val),
13214#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13215 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13216 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13217 MC_PU32("dv_debug", &dv_debug),
13218#endif
13219};
13220static struct mconfig_node decoder_265_node;
13221
13222static int __init amvdec_h265_driver_init_module(void)
13223{
13224 struct BuffInfo_s *p_buf_info;
13225
13226 if (vdec_is_support_4k()) {
13227 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13228 p_buf_info = &amvh265_workbuff_spec[2];
13229 else
13230 p_buf_info = &amvh265_workbuff_spec[1];
13231 } else
13232 p_buf_info = &amvh265_workbuff_spec[0];
13233
13234 init_buff_spec(NULL, p_buf_info);
13235 work_buf_size =
13236 (p_buf_info->end_adr - p_buf_info->start_adr
13237 + 0xffff) & (~0xffff);
13238
13239 pr_debug("amvdec_h265 module init\n");
13240 error_handle_policy = 0;
13241
13242#ifdef ERROR_HANDLE_DEBUG
13243 dbg_nal_skip_flag = 0;
13244 dbg_nal_skip_count = 0;
13245#endif
13246 udebug_flag = 0;
13247 decode_pic_begin = 0;
13248 slice_parse_begin = 0;
13249 step = 0;
13250 buf_alloc_size = 0;
13251
13252#ifdef MULTI_INSTANCE_SUPPORT
13253 if (platform_driver_register(&ammvdec_h265_driver))
13254 pr_err("failed to register ammvdec_h265 driver\n");
13255
13256#endif
13257 if (platform_driver_register(&amvdec_h265_driver)) {
13258 pr_err("failed to register amvdec_h265 driver\n");
13259 return -ENODEV;
13260 }
13261#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13262 if (!has_hevc_vdec()) {
13263 /* not support hevc */
13264 amvdec_h265_profile.name = "hevc_unsupport";
13265 }
13266 if (vdec_is_support_4k()) {
13267 if (is_meson_m8m2_cpu()) {
13268 /* m8m2 support 4k */
13269 amvdec_h265_profile.profile = "4k";
13270 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13271 amvdec_h265_profile.profile =
13272 "8k, 8bit, 10bit, dwrite, compressed";
13273 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13274 amvdec_h265_profile.profile =
13275 "4k, 8bit, 10bit, dwrite, compressed";
13276 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13277 amvdec_h265_profile.profile = "4k";
13278 }
13279#endif
13280 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13281 pr_info("amvdec_h265 default mmu enabled.\n");
13282 mmu_enable = 1;
13283 }
13284
13285 vcodec_profile_register(&amvdec_h265_profile);
13286 amvdec_h265_profile_single = amvdec_h265_profile;
13287 amvdec_h265_profile_single.name = "h265";
13288 vcodec_profile_register(&amvdec_h265_profile_single);
13289 amvdec_h265_profile_mult = amvdec_h265_profile;
13290 amvdec_h265_profile_mult.name = "mh265";
13291 vcodec_profile_register(&amvdec_h265_profile_mult);
13292 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13293 "h265", h265_configs, CONFIG_FOR_RW);
13294 return 0;
13295}
13296
13297static void __exit amvdec_h265_driver_remove_module(void)
13298{
13299 pr_debug("amvdec_h265 module remove.\n");
13300
13301#ifdef MULTI_INSTANCE_SUPPORT
13302 platform_driver_unregister(&ammvdec_h265_driver);
13303#endif
13304 platform_driver_unregister(&amvdec_h265_driver);
13305}
13306
13307/****************************************/
13308/*
13309 *module_param(stat, uint, 0664);
13310 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13311 */
13312module_param(use_cma, uint, 0664);
13313MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13314
13315module_param(bit_depth_luma, uint, 0664);
13316MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13317
13318module_param(bit_depth_chroma, uint, 0664);
13319MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13320
13321module_param(video_signal_type, uint, 0664);
13322MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13323
13324#ifdef ERROR_HANDLE_DEBUG
13325module_param(dbg_nal_skip_flag, uint, 0664);
13326MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13327
13328module_param(dbg_nal_skip_count, uint, 0664);
13329MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13330#endif
13331
13332module_param(radr, uint, 0664);
13333MODULE_PARM_DESC(radr, "\n radr\n");
13334
13335module_param(rval, uint, 0664);
13336MODULE_PARM_DESC(rval, "\n rval\n");
13337
13338module_param(dbg_cmd, uint, 0664);
13339MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13340
13341module_param(dump_nal, uint, 0664);
13342MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13343
13344module_param(dbg_skip_decode_index, uint, 0664);
13345MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13346
13347module_param(endian, uint, 0664);
13348MODULE_PARM_DESC(endian, "\n rval\n");
13349
13350module_param(step, uint, 0664);
13351MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13352
13353module_param(decode_pic_begin, uint, 0664);
13354MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13355
13356module_param(slice_parse_begin, uint, 0664);
13357MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13358
13359module_param(nal_skip_policy, uint, 0664);
13360MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13361
13362module_param(i_only_flag, uint, 0664);
13363MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13364
13365module_param(fast_output_enable, uint, 0664);
13366MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13367
13368module_param(error_handle_policy, uint, 0664);
13369MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13370
13371module_param(error_handle_threshold, uint, 0664);
13372MODULE_PARM_DESC(error_handle_threshold,
13373 "\n amvdec_h265 error_handle_threshold\n");
13374
13375module_param(error_handle_nal_skip_threshold, uint, 0664);
13376MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13377 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13378
13379module_param(error_handle_system_threshold, uint, 0664);
13380MODULE_PARM_DESC(error_handle_system_threshold,
13381 "\n amvdec_h265 error_handle_system_threshold\n");
13382
13383module_param(error_skip_nal_count, uint, 0664);
13384MODULE_PARM_DESC(error_skip_nal_count,
13385 "\n amvdec_h265 error_skip_nal_count\n");
13386
13387module_param(debug, uint, 0664);
13388MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13389
13390module_param(debug_mask, uint, 0664);
13391MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13392
13393module_param(log_mask, uint, 0664);
13394MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13395
13396module_param(buffer_mode, uint, 0664);
13397MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13398
13399module_param(double_write_mode, uint, 0664);
13400MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13401
13402module_param(buf_alloc_width, uint, 0664);
13403MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13404
13405module_param(buf_alloc_height, uint, 0664);
13406MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13407
13408module_param(dynamic_buf_num_margin, uint, 0664);
13409MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13410
13411module_param(max_buf_num, uint, 0664);
13412MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13413
13414module_param(buf_alloc_size, uint, 0664);
13415MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13416
13417#ifdef CONSTRAIN_MAX_BUF_NUM
13418module_param(run_ready_max_vf_only_num, uint, 0664);
13419MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13420
13421module_param(run_ready_display_q_num, uint, 0664);
13422MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13423
13424module_param(run_ready_max_buf_num, uint, 0664);
13425MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13426#endif
13427
13428#if 0
13429module_param(re_config_pic_flag, uint, 0664);
13430MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13431#endif
13432
13433module_param(buffer_mode_dbg, uint, 0664);
13434MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13435
13436module_param(mem_map_mode, uint, 0664);
13437MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13438
13439module_param(enable_mem_saving, uint, 0664);
13440MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13441
13442module_param(force_w_h, uint, 0664);
13443MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13444
13445module_param(force_fps, uint, 0664);
13446MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13447
13448module_param(max_decoding_time, uint, 0664);
13449MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13450
13451module_param(prefix_aux_buf_size, uint, 0664);
13452MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13453
13454module_param(suffix_aux_buf_size, uint, 0664);
13455MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13456
13457module_param(interlace_enable, uint, 0664);
13458MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13459module_param(pts_unstable, uint, 0664);
13460MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13461module_param(parser_sei_enable, uint, 0664);
13462MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13463
13464#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13465module_param(parser_dolby_vision_enable, uint, 0664);
13466MODULE_PARM_DESC(parser_dolby_vision_enable,
13467 "\n parser_dolby_vision_enable\n");
13468
13469module_param(dolby_meta_with_el, uint, 0664);
13470MODULE_PARM_DESC(dolby_meta_with_el,
13471 "\n dolby_meta_with_el\n");
13472
13473module_param(dolby_el_flush_th, uint, 0664);
13474MODULE_PARM_DESC(dolby_el_flush_th,
13475 "\n dolby_el_flush_th\n");
13476#endif
13477module_param(mmu_enable, uint, 0664);
13478MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13479
13480module_param(mmu_enable_force, uint, 0664);
13481MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13482
13483#ifdef MULTI_INSTANCE_SUPPORT
13484module_param(start_decode_buf_level, int, 0664);
13485MODULE_PARM_DESC(start_decode_buf_level,
13486 "\n h265 start_decode_buf_level\n");
13487
13488module_param(decode_timeout_val, uint, 0664);
13489MODULE_PARM_DESC(decode_timeout_val,
13490 "\n h265 decode_timeout_val\n");
13491
13492module_param(data_resend_policy, uint, 0664);
13493MODULE_PARM_DESC(data_resend_policy,
13494 "\n h265 data_resend_policy\n");
13495
13496module_param_array(decode_frame_count, uint,
13497 &max_decode_instance_num, 0664);
13498
13499module_param_array(display_frame_count, uint,
13500 &max_decode_instance_num, 0664);
13501
13502module_param_array(max_process_time, uint,
13503 &max_decode_instance_num, 0664);
13504
13505module_param_array(max_get_frame_interval,
13506 uint, &max_decode_instance_num, 0664);
13507
13508module_param_array(run_count, uint,
13509 &max_decode_instance_num, 0664);
13510
13511module_param_array(input_empty, uint,
13512 &max_decode_instance_num, 0664);
13513
13514module_param_array(not_run_ready, uint,
13515 &max_decode_instance_num, 0664);
13516
13517module_param_array(ref_frame_mark_flag, uint,
13518 &max_decode_instance_num, 0664);
13519
13520#endif
13521#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13522module_param(dv_toggle_prov_name, uint, 0664);
13523MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13524
13525module_param(dv_debug, uint, 0664);
13526MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13527
13528module_param(force_bypass_dvenl, uint, 0664);
13529MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13530#endif
13531
13532#ifdef AGAIN_HAS_THRESHOLD
13533module_param(again_threshold, uint, 0664);
13534MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13535#endif
13536
13537module_param(force_disp_pic_index, int, 0664);
13538MODULE_PARM_DESC(force_disp_pic_index,
13539 "\n amvdec_h265 force_disp_pic_index\n");
13540
13541module_param(frmbase_cont_bitlevel, uint, 0664);
13542MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13543
13544module_param(udebug_flag, uint, 0664);
13545MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13546
13547module_param(udebug_pause_pos, uint, 0664);
13548MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13549
13550module_param(udebug_pause_val, uint, 0664);
13551MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13552
13553module_param(pre_decode_buf_level, int, 0664);
13554MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13555
13556module_param(udebug_pause_decode_idx, uint, 0664);
13557MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13558
13559module_param(disp_vframe_valve_level, uint, 0664);
13560MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13561
13562module_param(pic_list_debug, uint, 0664);
13563MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13564
13565module_param(without_display_mode, uint, 0664);
13566MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13567
13568module_init(amvdec_h265_driver_init_module);
13569module_exit(amvdec_h265_driver_remove_module);
13570
13571MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13572MODULE_LICENSE("GPL");
13573MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13574