summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 65295a6597c3cd10dc91037f011ab345c2271f81
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52#define CONSTRAIN_MAX_BUF_NUM
53
54#define SWAP_HEVC_UCODE
55#define DETREFILL_ENABLE
56
57#define AGAIN_HAS_THRESHOLD
58/*#define TEST_NO_BUF*/
59#define HEVC_PIC_STRUCT_SUPPORT
60#define MULTI_INSTANCE_SUPPORT
61#define USE_UNINIT_SEMA
62
63 /* .buf_size = 0x100000*16,
64 //4k2k , 0x100000 per buffer */
65 /* 4096x2304 , 0x120000 per buffer */
66#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
67#define MPRED_4K_MV_BUF_SIZE (0x120000)
68#define MPRED_MV_BUF_SIZE (0x40000)
69
70#define MMU_COMPRESS_HEADER_SIZE 0x48000
71#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
72
73#define MAX_FRAME_4K_NUM 0x1200
74#define MAX_FRAME_8K_NUM (0x1200*4)
75
76//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
77#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
78
79#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
80
81#define HEVC_CM_HEADER_START_ADDR 0x3628
82#define HEVC_SAO_MMU_VH1_ADDR 0x363b
83#define HEVC_SAO_MMU_VH0_ADDR 0x363a
84
85#define HEVC_DBLK_CFGB 0x350b
86#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
87#define SWAP_HEVC_OFFSET (3 * 0x1000)
88
89#define MEM_NAME "codec_265"
90/* #include <mach/am_regs.h> */
91#include <linux/amlogic/media/utils/vdec_reg.h>
92
93#include "../utils/vdec.h"
94#include "../utils/amvdec.h"
95#include <linux/amlogic/media/video_sink/video.h>
96#include <linux/amlogic/media/codec_mm/configs.h>
97
98#define SEND_LMEM_WITH_RPM
99#define SUPPORT_10BIT
100/* #define ERROR_HANDLE_DEBUG */
101
102#ifndef STAT_KTHREAD
103#define STAT_KTHREAD 0x40
104#endif
105
106#ifdef MULTI_INSTANCE_SUPPORT
107#define MAX_DECODE_INSTANCE_NUM 9
108#define MULTI_DRIVER_NAME "ammvdec_h265"
109#endif
110#define DRIVER_NAME "amvdec_h265"
111#define MODULE_NAME "amvdec_h265"
112#define DRIVER_HEADER_NAME "amvdec_h265_header"
113
114#define PUT_INTERVAL (HZ/100)
115#define ERROR_SYSTEM_RESET_COUNT 200
116
117#define PTS_NORMAL 0
118#define PTS_NONE_REF_USE_DURATION 1
119
120#define PTS_MODE_SWITCHING_THRESHOLD 3
121#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
122
123#define DUR2PTS(x) ((x)*90/96)
124
125#define MAX_SIZE_8K (8192 * 4608)
126#define MAX_SIZE_4K (4096 * 2304)
127
128#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
129#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
130
131#define SEI_UserDataITU_T_T35 4
132#define INVALID_IDX -1 /* Invalid buffer index.*/
133
134static struct semaphore h265_sema;
135
136struct hevc_state_s;
137static int hevc_print(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int hevc_print_cont(struct hevc_state_s *hevc,
140 int debug_flag, const char *fmt, ...);
141static int vh265_vf_states(struct vframe_states *states, void *);
142static struct vframe_s *vh265_vf_peek(void *);
143static struct vframe_s *vh265_vf_get(void *);
144static void vh265_vf_put(struct vframe_s *, void *);
145static int vh265_event_cb(int type, void *data, void *private_data);
146
147static int vh265_stop(struct hevc_state_s *hevc);
148#ifdef MULTI_INSTANCE_SUPPORT
149static int vmh265_stop(struct hevc_state_s *hevc);
150static s32 vh265_init(struct vdec_s *vdec);
151static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
152static void reset_process_time(struct hevc_state_s *hevc);
153static void start_process_time(struct hevc_state_s *hevc);
154static void restart_process_time(struct hevc_state_s *hevc);
155static void timeout_process(struct hevc_state_s *hevc);
156#else
157static s32 vh265_init(struct hevc_state_s *hevc);
158#endif
159static void vh265_prot_init(struct hevc_state_s *hevc);
160static int vh265_local_init(struct hevc_state_s *hevc);
161static void vh265_check_timer_func(unsigned long arg);
162static void config_decode_mode(struct hevc_state_s *hevc);
163
164static const char vh265_dec_id[] = "vh265-dev";
165
166#define PROVIDER_NAME "decoder.h265"
167#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
168
169static const struct vframe_operations_s vh265_vf_provider = {
170 .peek = vh265_vf_peek,
171 .get = vh265_vf_get,
172 .put = vh265_vf_put,
173 .event_cb = vh265_event_cb,
174 .vf_states = vh265_vf_states,
175};
176
177static struct vframe_provider_s vh265_vf_prov;
178
179static u32 bit_depth_luma;
180static u32 bit_depth_chroma;
181static u32 video_signal_type;
182
183static int start_decode_buf_level = 0x8000;
184
185static unsigned int decode_timeout_val = 200;
186
187static u32 run_ready_min_buf_num = 2;
188
189/*data_resend_policy:
190 bit 0, stream base resend data when decoding buf empty
191*/
192static u32 data_resend_policy = 1;
193
194#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
195/*
196static const char * const video_format_names[] = {
197 "component", "PAL", "NTSC", "SECAM",
198 "MAC", "unspecified", "unspecified", "unspecified"
199};
200
201static const char * const color_primaries_names[] = {
202 "unknown", "bt709", "undef", "unknown",
203 "bt470m", "bt470bg", "smpte170m", "smpte240m",
204 "film", "bt2020"
205};
206
207static const char * const transfer_characteristics_names[] = {
208 "unknown", "bt709", "undef", "unknown",
209 "bt470m", "bt470bg", "smpte170m", "smpte240m",
210 "linear", "log100", "log316", "iec61966-2-4",
211 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
212 "smpte-st-2084", "smpte-st-428"
213};
214
215static const char * const matrix_coeffs_names[] = {
216 "GBR", "bt709", "undef", "unknown",
217 "fcc", "bt470bg", "smpte170m", "smpte240m",
218 "YCgCo", "bt2020nc", "bt2020c"
219};
220*/
221#ifdef SUPPORT_10BIT
222#define HEVC_CM_BODY_START_ADDR 0x3626
223#define HEVC_CM_BODY_LENGTH 0x3627
224#define HEVC_CM_HEADER_LENGTH 0x3629
225#define HEVC_CM_HEADER_OFFSET 0x362b
226#define HEVC_SAO_CTRL9 0x362d
227#define LOSLESS_COMPRESS_MODE
228/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
229/* double_write_mode:
230 * 0, no double write;
231 * 1, 1:1 ratio;
232 * 2, (1/4):(1/4) ratio;
233 * 3, (1/4):(1/4) ratio, with both compressed frame included
234 * 4, (1/2):(1/2) ratio;
235 * 0x10, double write only
236 * 0x100, if > 1080p,use mode 4,else use mode 1;
237 * 0x200, if > 1080p,use mode 2,else use mode 1;
238 * 0x300, if > 720p, use mode 4, else use mode 1;
239 */
240static u32 double_write_mode;
241
242/*#define DECOMP_HEADR_SURGENT*/
243
244static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
245static u32 enable_mem_saving = 1;
246static u32 workaround_enable;
247static u32 force_w_h;
248#endif
249static u32 force_fps;
250static u32 pts_unstable;
251#define H265_DEBUG_BUFMGR 0x01
252#define H265_DEBUG_BUFMGR_MORE 0x02
253#define H265_DEBUG_DETAIL 0x04
254#define H265_DEBUG_REG 0x08
255#define H265_DEBUG_MAN_SEARCH_NAL 0x10
256#define H265_DEBUG_MAN_SKIP_NAL 0x20
257#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
258#define H265_DEBUG_FORCE_CLK 0x80
259#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
260#define H265_DEBUG_NO_DISPLAY 0x200
261#define H265_DEBUG_DISCARD_NAL 0x400
262#define H265_DEBUG_OUT_PTS 0x800
263#define H265_DEBUG_DUMP_PIC_LIST 0x1000
264#define H265_DEBUG_PRINT_SEI 0x2000
265#define H265_DEBUG_PIC_STRUCT 0x4000
266#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
267#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
268#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
269#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
270#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
271#define H265_DEBUG_HW_RESET 0x100000
272#define H265_CFG_CANVAS_IN_DECODE 0x200000
273#define H265_DEBUG_DV 0x400000
274#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
275#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
276#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
277#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
278#ifdef MULTI_INSTANCE_SUPPORT
279#define PRINT_FLAG_ERROR 0x0
280#define IGNORE_PARAM_FROM_CONFIG 0x08000000
281#define PRINT_FRAMEBASE_DATA 0x10000000
282#define PRINT_FLAG_VDEC_STATUS 0x20000000
283#define PRINT_FLAG_VDEC_DETAIL 0x40000000
284#define PRINT_FLAG_V4L_DETAIL 0x80000000
285#endif
286
287#define BUF_POOL_SIZE 32
288#define MAX_BUF_NUM 24
289#define MAX_REF_PIC_NUM 24
290#define MAX_REF_ACTIVE 16
291
292#ifdef MV_USE_FIXED_BUF
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#else
297#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
298#define VF_BUFFER_IDX(n) (n)
299#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
300#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
301#endif
302
303#define HEVC_MV_INFO 0x310d
304#define HEVC_QP_INFO 0x3137
305#define HEVC_SKIP_INFO 0x3136
306
307const u32 h265_version = 201602101;
308static u32 debug_mask = 0xffffffff;
309static u32 log_mask;
310static u32 debug;
311static u32 radr;
312static u32 rval;
313static u32 dbg_cmd;
314static u32 dump_nal;
315static u32 dbg_skip_decode_index;
316static u32 endian = 0xff0;
317#ifdef ERROR_HANDLE_DEBUG
318static u32 dbg_nal_skip_flag;
319 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
320static u32 dbg_nal_skip_count;
321#endif
322/*for debug*/
323/*
324 udebug_flag:
325 bit 0, enable ucode print
326 bit 1, enable ucode detail print
327 bit [31:16] not 0, pos to dump lmem
328 bit 2, pop bits to lmem
329 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
330*/
331static u32 udebug_flag;
332/*
333 when udebug_flag[1:0] is not 0
334 udebug_pause_pos not 0,
335 pause position
336*/
337static u32 udebug_pause_pos;
338/*
339 when udebug_flag[1:0] is not 0
340 and udebug_pause_pos is not 0,
341 pause only when DEBUG_REG2 is equal to this val
342*/
343static u32 udebug_pause_val;
344
345static u32 udebug_pause_decode_idx;
346
347static u32 decode_pic_begin;
348static uint slice_parse_begin;
349static u32 step;
350static bool is_reset;
351
352#ifdef CONSTRAIN_MAX_BUF_NUM
353static u32 run_ready_max_vf_only_num;
354static u32 run_ready_display_q_num;
355 /*0: not check
356 0xff: work_pic_num
357 */
358static u32 run_ready_max_buf_num = 0xff;
359#endif
360
361static u32 dynamic_buf_num_margin = 7;
362static u32 buf_alloc_width;
363static u32 buf_alloc_height;
364
365static u32 max_buf_num = 16;
366static u32 buf_alloc_size;
367/*static u32 re_config_pic_flag;*/
368/*
369 *bit[0]: 0,
370 *bit[1]: 0, always release cma buffer when stop
371 *bit[1]: 1, never release cma buffer when stop
372 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
373 *do not release cma buffer is blackout is not 1
374 *
375 *bit[2]: 0, when start decoding, check current displayed buffer
376 * (only for buffer decoded by h265) if blackout is 0
377 * 1, do not check current displayed buffer
378 *
379 *bit[3]: 1, if blackout is not 1, do not release current
380 * displayed cma buffer always.
381 */
382/* set to 1 for fast play;
383 * set to 8 for other case of "keep last frame"
384 */
385static u32 buffer_mode = 1;
386
387/* buffer_mode_dbg: debug only*/
388static u32 buffer_mode_dbg = 0xffff0000;
389/**/
390/*
391 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
392 *1, start decoding after first I;
393 *2, only decode and display none error picture;
394 *3, start decoding and display after IDR,etc
395 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
396 *only for mode 0 and 1.
397 */
398static u32 nal_skip_policy = 2;
399
400/*
401 *bit 0, 1: only display I picture;
402 *bit 1, 1: only decode I picture;
403 */
404static u32 i_only_flag;
405
406/*
407bit 0, fast output first I picture
408*/
409static u32 fast_output_enable = 1;
410
411static u32 frmbase_cont_bitlevel = 0x60;
412
413/*
414use_cma: 1, use both reserver memory and cma for buffers
4152, only use cma for buffers
416*/
417static u32 use_cma = 2;
418
419#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
420static u32 prefix_aux_buf_size = (16 * 1024);
421static u32 suffix_aux_buf_size;
422
423static u32 max_decoding_time;
424/*
425 *error handling
426 */
427/*error_handle_policy:
428 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
429 *1, skip error_skip_nal_count nals before error recovery;
430 *bit 1 (valid only when bit0 == 1):
431 *1, wait vps/sps/pps after error recovery;
432 *bit 2 (valid only when bit0 == 0):
433 *0, auto search after error recovery (hevc_recover() called);
434 *1, manual search after error recovery
435 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
436 *
437 *bit 4: 0, set error_mark after reset/recover
438 * 1, do not set error_mark after reset/recover
439 *bit 5: 0, check total lcu for every picture
440 * 1, do not check total lcu
441 *bit 6: 0, do not check head error
442 * 1, check head error
443 *
444 */
445
446static u32 error_handle_policy;
447static u32 error_skip_nal_count = 6;
448static u32 error_handle_threshold = 30;
449static u32 error_handle_nal_skip_threshold = 10;
450static u32 error_handle_system_threshold = 30;
451static u32 interlace_enable = 1;
452static u32 fr_hint_status;
453
454 /*
455 *parser_sei_enable:
456 * bit 0, sei;
457 * bit 1, sei_suffix (fill aux buf)
458 * bit 2, fill sei to aux buf (when bit 0 is 1)
459 * bit 8, debug flag
460 */
461static u32 parser_sei_enable;
462#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
463static u32 parser_dolby_vision_enable = 1;
464static u32 dolby_meta_with_el;
465static u32 dolby_el_flush_th = 2;
466#endif
467/* this is only for h265 mmu enable */
468
469static u32 mmu_enable = 1;
470static u32 mmu_enable_force;
471static u32 work_buf_size;
472static unsigned int force_disp_pic_index;
473static unsigned int disp_vframe_valve_level;
474static int pre_decode_buf_level = 0x1000;
475static unsigned int pic_list_debug;
476
477
478#ifdef MULTI_INSTANCE_SUPPORT
479static unsigned int max_decode_instance_num
480 = MAX_DECODE_INSTANCE_NUM;
481static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
484static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
485static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
486static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
487static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
488static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
489{1, 1, 1, 1, 1, 1, 1, 1, 1};
490
491#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
492static unsigned char get_idx(struct hevc_state_s *hevc);
493#endif
494
495#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
496static u32 dv_toggle_prov_name;
497
498static u32 dv_debug;
499
500static u32 force_bypass_dvenl;
501#endif
502#endif
503
504
505#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
506#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
507#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
508#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
509#else
510#define get_dbg_flag(hevc) debug
511#define get_dbg_flag2(hevc) debug
512#define is_log_enable(hevc) (log_mask ? 1 : 0)
513#define get_valid_double_write_mode(hevc) double_write_mode
514#define get_buf_alloc_width(hevc) buf_alloc_width
515#define get_buf_alloc_height(hevc) buf_alloc_height
516#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
517#endif
518#define get_buffer_mode(hevc) buffer_mode
519
520
521DEFINE_SPINLOCK(lock);
522struct task_struct *h265_task = NULL;
523#undef DEBUG_REG
524#ifdef DEBUG_REG
525void WRITE_VREG_DBG(unsigned adr, unsigned val)
526{
527 if (debug & H265_DEBUG_REG)
528 pr_info("%s(%x, %x)\n", __func__, adr, val);
529 WRITE_VREG(adr, val);
530}
531
532#undef WRITE_VREG
533#define WRITE_VREG WRITE_VREG_DBG
534#endif
535
536static DEFINE_MUTEX(vh265_mutex);
537
538static DEFINE_MUTEX(vh265_log_mutex);
539
540static struct vdec_info *gvs;
541
542static u32 without_display_mode;
543
544/**************************************************
545 *
546 *h265 buffer management include
547 *
548 ***************************************************
549 */
550enum NalUnitType {
551 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
552 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
553
554 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
555 /* Current name in the spec: TSA_R */
556 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
557
558 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
559 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
560
561 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
562 /* Current name in the spec: RADL_R */
563 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
564
565 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
566 /* Current name in the spec: RASL_R */
567 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
568
569 NAL_UNIT_RESERVED_10,
570 NAL_UNIT_RESERVED_11,
571 NAL_UNIT_RESERVED_12,
572 NAL_UNIT_RESERVED_13,
573 NAL_UNIT_RESERVED_14,
574 NAL_UNIT_RESERVED_15,
575
576 /* Current name in the spec: BLA_W_LP */
577 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
578 /* Current name in the spec: BLA_W_DLP */
579 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
580 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
581 /* Current name in the spec: IDR_W_DLP */
582 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
583 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
584 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
585 NAL_UNIT_RESERVED_22,
586 NAL_UNIT_RESERVED_23,
587
588 NAL_UNIT_RESERVED_24,
589 NAL_UNIT_RESERVED_25,
590 NAL_UNIT_RESERVED_26,
591 NAL_UNIT_RESERVED_27,
592 NAL_UNIT_RESERVED_28,
593 NAL_UNIT_RESERVED_29,
594 NAL_UNIT_RESERVED_30,
595 NAL_UNIT_RESERVED_31,
596
597 NAL_UNIT_VPS, /* 32 */
598 NAL_UNIT_SPS, /* 33 */
599 NAL_UNIT_PPS, /* 34 */
600 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
601 NAL_UNIT_EOS, /* 36 */
602 NAL_UNIT_EOB, /* 37 */
603 NAL_UNIT_FILLER_DATA, /* 38 */
604 NAL_UNIT_SEI, /* 39 Prefix SEI */
605 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
606 NAL_UNIT_RESERVED_41,
607 NAL_UNIT_RESERVED_42,
608 NAL_UNIT_RESERVED_43,
609 NAL_UNIT_RESERVED_44,
610 NAL_UNIT_RESERVED_45,
611 NAL_UNIT_RESERVED_46,
612 NAL_UNIT_RESERVED_47,
613 NAL_UNIT_UNSPECIFIED_48,
614 NAL_UNIT_UNSPECIFIED_49,
615 NAL_UNIT_UNSPECIFIED_50,
616 NAL_UNIT_UNSPECIFIED_51,
617 NAL_UNIT_UNSPECIFIED_52,
618 NAL_UNIT_UNSPECIFIED_53,
619 NAL_UNIT_UNSPECIFIED_54,
620 NAL_UNIT_UNSPECIFIED_55,
621 NAL_UNIT_UNSPECIFIED_56,
622 NAL_UNIT_UNSPECIFIED_57,
623 NAL_UNIT_UNSPECIFIED_58,
624 NAL_UNIT_UNSPECIFIED_59,
625 NAL_UNIT_UNSPECIFIED_60,
626 NAL_UNIT_UNSPECIFIED_61,
627 NAL_UNIT_UNSPECIFIED_62,
628 NAL_UNIT_UNSPECIFIED_63,
629 NAL_UNIT_INVALID,
630};
631
632/* --------------------------------------------------- */
633/* Amrisc Software Interrupt */
634/* --------------------------------------------------- */
635#define AMRISC_STREAM_EMPTY_REQ 0x01
636#define AMRISC_PARSER_REQ 0x02
637#define AMRISC_MAIN_REQ 0x04
638
639/* --------------------------------------------------- */
640/* HEVC_DEC_STATUS define */
641/* --------------------------------------------------- */
642#define HEVC_DEC_IDLE 0x0
643#define HEVC_NAL_UNIT_VPS 0x1
644#define HEVC_NAL_UNIT_SPS 0x2
645#define HEVC_NAL_UNIT_PPS 0x3
646#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
647#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
648#define HEVC_SLICE_DECODING 0x6
649#define HEVC_NAL_UNIT_SEI 0x7
650#define HEVC_SLICE_SEGMENT_DONE 0x8
651#define HEVC_NAL_SEARCH_DONE 0x9
652#define HEVC_DECPIC_DATA_DONE 0xa
653#define HEVC_DECPIC_DATA_ERROR 0xb
654#define HEVC_SEI_DAT 0xc
655#define HEVC_SEI_DAT_DONE 0xd
656#define HEVC_NAL_DECODE_DONE 0xe
657#define HEVC_OVER_DECODE 0xf
658
659#define HEVC_DATA_REQUEST 0x12
660
661#define HEVC_DECODE_BUFEMPTY 0x20
662#define HEVC_DECODE_TIMEOUT 0x21
663#define HEVC_SEARCH_BUFEMPTY 0x22
664#define HEVC_DECODE_OVER_SIZE 0x23
665#define HEVC_DECODE_BUFEMPTY2 0x24
666#define HEVC_FIND_NEXT_PIC_NAL 0x50
667#define HEVC_FIND_NEXT_DVEL_NAL 0x51
668
669#define HEVC_DUMP_LMEM 0x30
670
671#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
672#define HEVC_DISCARD_NAL 0xf0
673#define HEVC_ACTION_DEC_CONT 0xfd
674#define HEVC_ACTION_ERROR 0xfe
675#define HEVC_ACTION_DONE 0xff
676
677/* --------------------------------------------------- */
678/* Include "parser_cmd.h" */
679/* --------------------------------------------------- */
680#define PARSER_CMD_SKIP_CFG_0 0x0000090b
681
682#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
683
684#define PARSER_CMD_SKIP_CFG_2 0x001b1910
685
686#define PARSER_CMD_NUMBER 37
687
688/**************************************************
689 *
690 *h265 buffer management
691 *
692 ***************************************************
693 */
694/* #define BUFFER_MGR_ONLY */
695/* #define CONFIG_HEVC_CLK_FORCED_ON */
696/* #define ENABLE_SWAP_TEST */
697#define MCRCC_ENABLE
698#define INVALID_POC 0x80000000
699
700#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
701#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
702#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
703#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
704#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
705#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
706#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
707#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
708#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
709#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
710#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
711#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
712#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
713#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
714#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
715#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
716#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
717#ifdef ENABLE_SWAP_TEST
718#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
719#endif
720
721/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
722/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
723#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
724 /*do not define ENABLE_SWAP_TEST*/
725#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
726#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
727
728#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
729#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
730/*
731 *ucode parser/search control
732 *bit 0: 0, header auto parse; 1, header manual parse
733 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
734 *bit [3:2]: valid when bit1==0;
735 *0, auto skip nal before first vps/sps/pps/idr;
736 *1, auto skip nal before first vps/sps/pps
737 *2, auto skip nal before first vps/sps/pps,
738 * and not decode until the first I slice (with slice address of 0)
739 *
740 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
741 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
742 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
743 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
744 *bit [17]: for NAL_SEI when bit0 is 0:
745 * 0, do not parse/fetch SEI in ucode;
746 * 1, parse/fetch SEI in ucode
747 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
748 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
749 * 1, fetch NAL_SEL_SUFFIX data to aux buf
750 *bit [19]:
751 * 0, parse NAL_SEI in ucode
752 * 1, fetch NAL_SEI to aux buf
753 *bit [20]: for DOLBY_VISION_META
754 * 0, do not fetch DOLBY_VISION_META to aux buf
755 * 1, fetch DOLBY_VISION_META to aux buf
756 */
757#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
758 /*read only*/
759#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
760 /*
761 [15 : 8] rps_set_id
762 [7 : 0] start_decoding_flag
763 */
764#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
765 /*set before start decoder*/
766#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
767#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
768#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
769
770#define DECODE_MODE_SINGLE 0x0
771#define DECODE_MODE_MULTI_FRAMEBASE 0x1
772#define DECODE_MODE_MULTI_STREAMBASE 0x2
773#define DECODE_MODE_MULTI_DVBAL 0x3
774#define DECODE_MODE_MULTI_DVENL 0x4
775
776#define MAX_INT 0x7FFFFFFF
777
778#define RPM_BEGIN 0x100
779#define modification_list_cur 0x148
780#define RPM_END 0x180
781
782#define RPS_USED_BIT 14
783/* MISC_FLAG0 */
784#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
785#define PCM_ENABLE_FLAG_BIT 1
786#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
787#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
788#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
789#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
790#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
791#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
792#define SLICE_SAO_LUMA_FLAG_BIT 8
793#define SLICE_SAO_CHROMA_FLAG_BIT 9
794#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
795
796union param_u {
797 struct {
798 unsigned short data[RPM_END - RPM_BEGIN];
799 } l;
800 struct {
801 /* from ucode lmem, do not change this struct */
802 unsigned short CUR_RPS[0x10];
803 unsigned short num_ref_idx_l0_active;
804 unsigned short num_ref_idx_l1_active;
805 unsigned short slice_type;
806 unsigned short slice_temporal_mvp_enable_flag;
807 unsigned short dependent_slice_segment_flag;
808 unsigned short slice_segment_address;
809 unsigned short num_title_rows_minus1;
810 unsigned short pic_width_in_luma_samples;
811 unsigned short pic_height_in_luma_samples;
812 unsigned short log2_min_coding_block_size_minus3;
813 unsigned short log2_diff_max_min_coding_block_size;
814 unsigned short log2_max_pic_order_cnt_lsb_minus4;
815 unsigned short POClsb;
816 unsigned short collocated_from_l0_flag;
817 unsigned short collocated_ref_idx;
818 unsigned short log2_parallel_merge_level;
819 unsigned short five_minus_max_num_merge_cand;
820 unsigned short sps_num_reorder_pics_0;
821 unsigned short modification_flag;
822 unsigned short tiles_enabled_flag;
823 unsigned short num_tile_columns_minus1;
824 unsigned short num_tile_rows_minus1;
825 unsigned short tile_width[8];
826 unsigned short tile_height[8];
827 unsigned short misc_flag0;
828 unsigned short pps_beta_offset_div2;
829 unsigned short pps_tc_offset_div2;
830 unsigned short slice_beta_offset_div2;
831 unsigned short slice_tc_offset_div2;
832 unsigned short pps_cb_qp_offset;
833 unsigned short pps_cr_qp_offset;
834 unsigned short first_slice_segment_in_pic_flag;
835 unsigned short m_temporalId;
836 unsigned short m_nalUnitType;
837
838 unsigned short vui_num_units_in_tick_hi;
839 unsigned short vui_num_units_in_tick_lo;
840 unsigned short vui_time_scale_hi;
841 unsigned short vui_time_scale_lo;
842 unsigned short bit_depth;
843 unsigned short profile_etc;
844 unsigned short sei_frame_field_info;
845 unsigned short video_signal_type;
846 unsigned short modification_list[0x20];
847 unsigned short conformance_window_flag;
848 unsigned short conf_win_left_offset;
849 unsigned short conf_win_right_offset;
850 unsigned short conf_win_top_offset;
851 unsigned short conf_win_bottom_offset;
852 unsigned short chroma_format_idc;
853 unsigned short color_description;
854 unsigned short aspect_ratio_idc;
855 unsigned short sar_width;
856 unsigned short sar_height;
857 unsigned short sps_max_dec_pic_buffering_minus1_0;
858 } p;
859};
860
861#define RPM_BUF_SIZE (0x80*2)
862/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
863#define LMEM_BUF_SIZE (0x500 * 2)
864
865struct buff_s {
866 u32 buf_start;
867 u32 buf_size;
868 u32 buf_end;
869};
870
871struct BuffInfo_s {
872 u32 max_width;
873 u32 max_height;
874 unsigned int start_adr;
875 unsigned int end_adr;
876 struct buff_s ipp;
877 struct buff_s sao_abv;
878 struct buff_s sao_vb;
879 struct buff_s short_term_rps;
880 struct buff_s vps;
881 struct buff_s sps;
882 struct buff_s pps;
883 struct buff_s sao_up;
884 struct buff_s swap_buf;
885 struct buff_s swap_buf2;
886 struct buff_s scalelut;
887 struct buff_s dblk_para;
888 struct buff_s dblk_data;
889 struct buff_s dblk_data2;
890 struct buff_s mmu_vbh;
891 struct buff_s cm_header;
892 struct buff_s mpred_above;
893#ifdef MV_USE_FIXED_BUF
894 struct buff_s mpred_mv;
895#endif
896 struct buff_s rpm;
897 struct buff_s lmem;
898};
899#define WORK_BUF_SPEC_NUM 3
900static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
901 {
902 /* 8M bytes */
903 .max_width = 1920,
904 .max_height = 1088,
905 .ipp = {
906 /* IPP work space calculation :
907 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
908 */
909 .buf_size = 0x4000,
910 },
911 .sao_abv = {
912 .buf_size = 0x30000,
913 },
914 .sao_vb = {
915 .buf_size = 0x30000,
916 },
917 .short_term_rps = {
918 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
919 * total 64x16x2 = 2048 bytes (0x800)
920 */
921 .buf_size = 0x800,
922 },
923 .vps = {
924 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
925 * total 0x0800 bytes
926 */
927 .buf_size = 0x800,
928 },
929 .sps = {
930 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
931 * total 0x0800 bytes
932 */
933 .buf_size = 0x800,
934 },
935 .pps = {
936 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
937 * total 0x2000 bytes
938 */
939 .buf_size = 0x2000,
940 },
941 .sao_up = {
942 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
943 * each has 16 bytes total 0x2800 bytes
944 */
945 .buf_size = 0x2800,
946 },
947 .swap_buf = {
948 /* 256cyclex64bit = 2K bytes 0x800
949 * (only 144 cycles valid)
950 */
951 .buf_size = 0x800,
952 },
953 .swap_buf2 = {
954 .buf_size = 0x800,
955 },
956 .scalelut = {
957 /* support up to 32 SCALELUT 1024x32 =
958 * 32Kbytes (0x8000)
959 */
960 .buf_size = 0x8000,
961 },
962 .dblk_para = {
963#ifdef SUPPORT_10BIT
964 .buf_size = 0x40000,
965#else
966 /* DBLK -> Max 256(4096/16) LCU, each para
967 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
968 */
969 .buf_size = 0x20000,
970#endif
971 },
972 .dblk_data = {
973 .buf_size = 0x40000,
974 },
975 .dblk_data2 = {
976 .buf_size = 0x40000,
977 }, /*dblk data for adapter*/
978 .mmu_vbh = {
979 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
980 },
981#if 0
982 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
983 .buf_size = MMU_COMPRESS_HEADER_SIZE *
984 (MAX_REF_PIC_NUM + 1),
985 },
986#endif
987 .mpred_above = {
988 .buf_size = 0x8000,
989 },
990#ifdef MV_USE_FIXED_BUF
991 .mpred_mv = {/* 1080p, 0x40000 per buffer */
992 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
993 },
994#endif
995 .rpm = {
996 .buf_size = RPM_BUF_SIZE,
997 },
998 .lmem = {
999 .buf_size = 0x500 * 2,
1000 }
1001 },
1002 {
1003 .max_width = 4096,
1004 .max_height = 2048,
1005 .ipp = {
1006 /* IPP work space calculation :
1007 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1008 */
1009 .buf_size = 0x4000,
1010 },
1011 .sao_abv = {
1012 .buf_size = 0x30000,
1013 },
1014 .sao_vb = {
1015 .buf_size = 0x30000,
1016 },
1017 .short_term_rps = {
1018 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1019 * total 64x16x2 = 2048 bytes (0x800)
1020 */
1021 .buf_size = 0x800,
1022 },
1023 .vps = {
1024 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1025 * total 0x0800 bytes
1026 */
1027 .buf_size = 0x800,
1028 },
1029 .sps = {
1030 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1031 * total 0x0800 bytes
1032 */
1033 .buf_size = 0x800,
1034 },
1035 .pps = {
1036 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1037 * total 0x2000 bytes
1038 */
1039 .buf_size = 0x2000,
1040 },
1041 .sao_up = {
1042 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1043 * each has 16 bytes total 0x2800 bytes
1044 */
1045 .buf_size = 0x2800,
1046 },
1047 .swap_buf = {
1048 /* 256cyclex64bit = 2K bytes 0x800
1049 * (only 144 cycles valid)
1050 */
1051 .buf_size = 0x800,
1052 },
1053 .swap_buf2 = {
1054 .buf_size = 0x800,
1055 },
1056 .scalelut = {
1057 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1058 * (0x8000)
1059 */
1060 .buf_size = 0x8000,
1061 },
1062 .dblk_para = {
1063 /* DBLK -> Max 256(4096/16) LCU, each para
1064 * 512bytes(total:0x20000),
1065 * data 1024bytes(total:0x40000)
1066 */
1067 .buf_size = 0x20000,
1068 },
1069 .dblk_data = {
1070 .buf_size = 0x80000,
1071 },
1072 .dblk_data2 = {
1073 .buf_size = 0x80000,
1074 }, /*dblk data for adapter*/
1075 .mmu_vbh = {
1076 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1077 },
1078#if 0
1079 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1080 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1081 (MAX_REF_PIC_NUM + 1),
1082 },
1083#endif
1084 .mpred_above = {
1085 .buf_size = 0x8000,
1086 },
1087#ifdef MV_USE_FIXED_BUF
1088 .mpred_mv = {
1089 /* .buf_size = 0x100000*16,
1090 //4k2k , 0x100000 per buffer */
1091 /* 4096x2304 , 0x120000 per buffer */
1092 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1093 },
1094#endif
1095 .rpm = {
1096 .buf_size = RPM_BUF_SIZE,
1097 },
1098 .lmem = {
1099 .buf_size = 0x500 * 2,
1100 }
1101 },
1102
1103 {
1104 .max_width = 4096*2,
1105 .max_height = 2048*2,
1106 .ipp = {
1107 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1108 .buf_size = 0x4000*2,
1109 },
1110 .sao_abv = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .sao_vb = {
1114 .buf_size = 0x30000*2,
1115 },
1116 .short_term_rps = {
1117 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1118 .buf_size = 0x800,
1119 },
1120 .vps = {
1121 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .sps = {
1125 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1126 .buf_size = 0x800,
1127 },
1128 .pps = {
1129 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1130 .buf_size = 0x2000,
1131 },
1132 .sao_up = {
1133 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1134 .buf_size = 0x2800*2,
1135 },
1136 .swap_buf = {
1137 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1138 .buf_size = 0x800,
1139 },
1140 .swap_buf2 = {
1141 .buf_size = 0x800,
1142 },
1143 .scalelut = {
1144 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1145 .buf_size = 0x8000*2,
1146 },
1147 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1148 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1149 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1150 .mmu_vbh = {
1151 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1152 },
1153#if 0
1154 .cm_header = {
1155 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1156 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1157 },
1158#endif
1159 .mpred_above = {
1160 .buf_size = 0x8000*2,
1161 },
1162#ifdef MV_USE_FIXED_BUF
1163 .mpred_mv = {
1164 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1165 },
1166#endif
1167 .rpm = {
1168 .buf_size = RPM_BUF_SIZE,
1169 },
1170 .lmem = {
1171 .buf_size = 0x500 * 2,
1172 },
1173 }
1174};
1175
1176static void init_buff_spec(struct hevc_state_s *hevc,
1177 struct BuffInfo_s *buf_spec)
1178{
1179 buf_spec->ipp.buf_start = buf_spec->start_adr;
1180 buf_spec->sao_abv.buf_start =
1181 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1182
1183 buf_spec->sao_vb.buf_start =
1184 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1185 buf_spec->short_term_rps.buf_start =
1186 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1187 buf_spec->vps.buf_start =
1188 buf_spec->short_term_rps.buf_start +
1189 buf_spec->short_term_rps.buf_size;
1190 buf_spec->sps.buf_start =
1191 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1192 buf_spec->pps.buf_start =
1193 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1194 buf_spec->sao_up.buf_start =
1195 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1196 buf_spec->swap_buf.buf_start =
1197 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1198 buf_spec->swap_buf2.buf_start =
1199 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1200 buf_spec->scalelut.buf_start =
1201 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1202 buf_spec->dblk_para.buf_start =
1203 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1204 buf_spec->dblk_data.buf_start =
1205 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1206 buf_spec->dblk_data2.buf_start =
1207 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1208 buf_spec->mmu_vbh.buf_start =
1209 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1210 buf_spec->mpred_above.buf_start =
1211 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1212#ifdef MV_USE_FIXED_BUF
1213 buf_spec->mpred_mv.buf_start =
1214 buf_spec->mpred_above.buf_start +
1215 buf_spec->mpred_above.buf_size;
1216
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_mv.buf_start +
1219 buf_spec->mpred_mv.buf_size;
1220#else
1221 buf_spec->rpm.buf_start =
1222 buf_spec->mpred_above.buf_start +
1223 buf_spec->mpred_above.buf_size;
1224#endif
1225 buf_spec->lmem.buf_start =
1226 buf_spec->rpm.buf_start +
1227 buf_spec->rpm.buf_size;
1228 buf_spec->end_adr =
1229 buf_spec->lmem.buf_start +
1230 buf_spec->lmem.buf_size;
1231
1232 if (hevc && get_dbg_flag2(hevc)) {
1233 hevc_print(hevc, 0,
1234 "%s workspace (%x %x) size = %x\n", __func__,
1235 buf_spec->start_adr, buf_spec->end_adr,
1236 buf_spec->end_adr - buf_spec->start_adr);
1237
1238 hevc_print(hevc, 0,
1239 "ipp.buf_start :%x\n",
1240 buf_spec->ipp.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_abv.buf_start :%x\n",
1243 buf_spec->sao_abv.buf_start);
1244 hevc_print(hevc, 0,
1245 "sao_vb.buf_start :%x\n",
1246 buf_spec->sao_vb.buf_start);
1247 hevc_print(hevc, 0,
1248 "short_term_rps.buf_start :%x\n",
1249 buf_spec->short_term_rps.buf_start);
1250 hevc_print(hevc, 0,
1251 "vps.buf_start :%x\n",
1252 buf_spec->vps.buf_start);
1253 hevc_print(hevc, 0,
1254 "sps.buf_start :%x\n",
1255 buf_spec->sps.buf_start);
1256 hevc_print(hevc, 0,
1257 "pps.buf_start :%x\n",
1258 buf_spec->pps.buf_start);
1259 hevc_print(hevc, 0,
1260 "sao_up.buf_start :%x\n",
1261 buf_spec->sao_up.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf.buf_start :%x\n",
1264 buf_spec->swap_buf.buf_start);
1265 hevc_print(hevc, 0,
1266 "swap_buf2.buf_start :%x\n",
1267 buf_spec->swap_buf2.buf_start);
1268 hevc_print(hevc, 0,
1269 "scalelut.buf_start :%x\n",
1270 buf_spec->scalelut.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_para.buf_start :%x\n",
1273 buf_spec->dblk_para.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data.buf_start :%x\n",
1276 buf_spec->dblk_data.buf_start);
1277 hevc_print(hevc, 0,
1278 "dblk_data2.buf_start :%x\n",
1279 buf_spec->dblk_data2.buf_start);
1280 hevc_print(hevc, 0,
1281 "mpred_above.buf_start :%x\n",
1282 buf_spec->mpred_above.buf_start);
1283#ifdef MV_USE_FIXED_BUF
1284 hevc_print(hevc, 0,
1285 "mpred_mv.buf_start :%x\n",
1286 buf_spec->mpred_mv.buf_start);
1287#endif
1288 if ((get_dbg_flag2(hevc)
1289 &
1290 H265_DEBUG_SEND_PARAM_WITH_REG)
1291 == 0) {
1292 hevc_print(hevc, 0,
1293 "rpm.buf_start :%x\n",
1294 buf_spec->rpm.buf_start);
1295 }
1296 }
1297
1298}
1299
1300enum SliceType {
1301 B_SLICE,
1302 P_SLICE,
1303 I_SLICE
1304};
1305
1306/*USE_BUF_BLOCK*/
1307struct BUF_s {
1308 ulong start_adr;
1309 u32 size;
1310 u32 luma_size;
1311 ulong header_addr;
1312 u32 header_size;
1313 int used_flag;
1314 ulong v4l_ref_buf_addr;
1315} /*BUF_t */;
1316
1317/* level 6, 6.1 maximum slice number is 800; other is 200 */
1318#define MAX_SLICE_NUM 800
1319struct PIC_s {
1320 int index;
1321 int scatter_alloc;
1322 int BUF_index;
1323 int mv_buf_index;
1324 int POC;
1325 int decode_idx;
1326 int slice_type;
1327 int RefNum_L0;
1328 int RefNum_L1;
1329 int num_reorder_pic;
1330 int stream_offset;
1331 unsigned char referenced;
1332 unsigned char output_mark;
1333 unsigned char recon_mark;
1334 unsigned char output_ready;
1335 unsigned char error_mark;
1336 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1337 unsigned char dis_mark;
1338 /**/ int slice_idx;
1339 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1340 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1341 /*buffer */
1342 unsigned int header_adr;
1343#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1344 unsigned char dv_enhance_exist;
1345#endif
1346 char *aux_data_buf;
1347 int aux_data_size;
1348 unsigned long cma_alloc_addr;
1349 struct page *alloc_pages;
1350 unsigned int mpred_mv_wr_start_addr;
1351 unsigned int mc_y_adr;
1352 unsigned int mc_u_v_adr;
1353#ifdef SUPPORT_10BIT
1354 /*unsigned int comp_body_size;*/
1355 unsigned int dw_y_adr;
1356 unsigned int dw_u_v_adr;
1357#endif
1358 int mc_canvas_y;
1359 int mc_canvas_u_v;
1360 int width;
1361 int height;
1362
1363 int y_canvas_index;
1364 int uv_canvas_index;
1365#ifdef MULTI_INSTANCE_SUPPORT
1366 struct canvas_config_s canvas_config[2];
1367#endif
1368#ifdef SUPPORT_10BIT
1369 int mem_saving_mode;
1370 u32 bit_depth_luma;
1371 u32 bit_depth_chroma;
1372#endif
1373#ifdef LOSLESS_COMPRESS_MODE
1374 unsigned int losless_comp_body_size;
1375#endif
1376 unsigned char pic_struct;
1377 int vf_ref;
1378
1379 u32 pts;
1380 u64 pts64;
1381 u64 timestamp;
1382
1383 u32 aspect_ratio_idc;
1384 u32 sar_width;
1385 u32 sar_height;
1386 u32 double_write_mode;
1387 u32 video_signal_type;
1388 unsigned short conformance_window_flag;
1389 unsigned short conf_win_left_offset;
1390 unsigned short conf_win_right_offset;
1391 unsigned short conf_win_top_offset;
1392 unsigned short conf_win_bottom_offset;
1393 unsigned short chroma_format_idc;
1394
1395 /* picture qos infomation*/
1396 int max_qp;
1397 int avg_qp;
1398 int min_qp;
1399 int max_skip;
1400 int avg_skip;
1401 int min_skip;
1402 int max_mv;
1403 int min_mv;
1404 int avg_mv;
1405
1406 bool vframe_bound;
1407} /*PIC_t */;
1408
1409#define MAX_TILE_COL_NUM 10
1410#define MAX_TILE_ROW_NUM 20
1411struct tile_s {
1412 int width;
1413 int height;
1414 int start_cu_x;
1415 int start_cu_y;
1416
1417 unsigned int sao_vb_start_addr;
1418 unsigned int sao_abv_start_addr;
1419};
1420
1421#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1422#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1423#define SEI_HDR10PLUS_MASK 0x00000004
1424
1425#define VF_POOL_SIZE 32
1426
1427#ifdef MULTI_INSTANCE_SUPPORT
1428#define DEC_RESULT_NONE 0
1429#define DEC_RESULT_DONE 1
1430#define DEC_RESULT_AGAIN 2
1431#define DEC_RESULT_CONFIG_PARAM 3
1432#define DEC_RESULT_ERROR 4
1433#define DEC_INIT_PICLIST 5
1434#define DEC_UNINIT_PICLIST 6
1435#define DEC_RESULT_GET_DATA 7
1436#define DEC_RESULT_GET_DATA_RETRY 8
1437#define DEC_RESULT_EOS 9
1438#define DEC_RESULT_FORCE_EXIT 10
1439#define DEC_RESULT_FREE_CANVAS 11
1440
1441static void vh265_work(struct work_struct *work);
1442static void vh265_timeout_work(struct work_struct *work);
1443static void vh265_notify_work(struct work_struct *work);
1444
1445#endif
1446
1447struct debug_log_s {
1448 struct list_head list;
1449 uint8_t data; /*will alloc more size*/
1450};
1451
1452struct hevc_state_s {
1453#ifdef MULTI_INSTANCE_SUPPORT
1454 struct platform_device *platform_dev;
1455 void (*vdec_cb)(struct vdec_s *, void *);
1456 void *vdec_cb_arg;
1457 struct vframe_chunk_s *chunk;
1458 int dec_result;
1459 struct work_struct work;
1460 struct work_struct timeout_work;
1461 struct work_struct notify_work;
1462 struct work_struct set_clk_work;
1463 /* timeout handle */
1464 unsigned long int start_process_time;
1465 unsigned int last_lcu_idx;
1466 unsigned int decode_timeout_count;
1467 unsigned int timeout_num;
1468#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1469 unsigned char switch_dvlayer_flag;
1470 unsigned char no_switch_dvlayer_count;
1471 unsigned char bypass_dvenl_enable;
1472 unsigned char bypass_dvenl;
1473#endif
1474 unsigned char start_parser_type;
1475 /*start_decoding_flag:
1476 vps/pps/sps/idr info from ucode*/
1477 unsigned char start_decoding_flag;
1478 unsigned char rps_set_id;
1479 unsigned char eos;
1480 int pic_decoded_lcu_idx;
1481 u8 over_decode;
1482 u8 empty_flag;
1483#endif
1484 struct vframe_s vframe_dummy;
1485 char *provider_name;
1486 int index;
1487 struct device *cma_dev;
1488 unsigned char m_ins_flag;
1489 unsigned char dolby_enhance_flag;
1490 unsigned long buf_start;
1491 u32 buf_size;
1492 u32 mv_buf_size;
1493
1494 struct BuffInfo_s work_space_buf_store;
1495 struct BuffInfo_s *work_space_buf;
1496
1497 u8 aux_data_dirty;
1498 u32 prefix_aux_size;
1499 u32 suffix_aux_size;
1500 void *aux_addr;
1501 void *rpm_addr;
1502 void *lmem_addr;
1503 dma_addr_t aux_phy_addr;
1504 dma_addr_t rpm_phy_addr;
1505 dma_addr_t lmem_phy_addr;
1506
1507 unsigned int pic_list_init_flag;
1508 unsigned int use_cma_flag;
1509
1510 unsigned short *rpm_ptr;
1511 unsigned short *lmem_ptr;
1512 unsigned short *debug_ptr;
1513 int debug_ptr_size;
1514 int pic_w;
1515 int pic_h;
1516 int lcu_x_num;
1517 int lcu_y_num;
1518 int lcu_total;
1519 int lcu_size;
1520 int lcu_size_log2;
1521 int lcu_x_num_pre;
1522 int lcu_y_num_pre;
1523 int first_pic_after_recover;
1524
1525 int num_tile_col;
1526 int num_tile_row;
1527 int tile_enabled;
1528 int tile_x;
1529 int tile_y;
1530 int tile_y_x;
1531 int tile_start_lcu_x;
1532 int tile_start_lcu_y;
1533 int tile_width_lcu;
1534 int tile_height_lcu;
1535
1536 int slice_type;
1537 unsigned int slice_addr;
1538 unsigned int slice_segment_addr;
1539
1540 unsigned char interlace_flag;
1541 unsigned char curr_pic_struct;
1542 unsigned char frame_field_info_present_flag;
1543
1544 unsigned short sps_num_reorder_pics_0;
1545 unsigned short misc_flag0;
1546 int m_temporalId;
1547 int m_nalUnitType;
1548 int TMVPFlag;
1549 int isNextSliceSegment;
1550 int LDCFlag;
1551 int m_pocRandomAccess;
1552 int plevel;
1553 int MaxNumMergeCand;
1554
1555 int new_pic;
1556 int new_tile;
1557 int curr_POC;
1558 int iPrevPOC;
1559#ifdef MULTI_INSTANCE_SUPPORT
1560 int decoded_poc;
1561 struct PIC_s *decoding_pic;
1562#endif
1563 int iPrevTid0POC;
1564 int list_no;
1565 int RefNum_L0;
1566 int RefNum_L1;
1567 int ColFromL0Flag;
1568 int LongTerm_Curr;
1569 int LongTerm_Col;
1570 int Col_POC;
1571 int LongTerm_Ref;
1572#ifdef MULTI_INSTANCE_SUPPORT
1573 int m_pocRandomAccess_bak;
1574 int curr_POC_bak;
1575 int iPrevPOC_bak;
1576 int iPrevTid0POC_bak;
1577 unsigned char start_parser_type_bak;
1578 unsigned char start_decoding_flag_bak;
1579 unsigned char rps_set_id_bak;
1580 int pic_decoded_lcu_idx_bak;
1581 int decode_idx_bak;
1582#endif
1583 struct PIC_s *cur_pic;
1584 struct PIC_s *col_pic;
1585 int skip_flag;
1586 int decode_idx;
1587 int slice_idx;
1588 unsigned char have_vps;
1589 unsigned char have_sps;
1590 unsigned char have_pps;
1591 unsigned char have_valid_start_slice;
1592 unsigned char wait_buf;
1593 unsigned char error_flag;
1594 unsigned int error_skip_nal_count;
1595 long used_4k_num;
1596
1597 unsigned char
1598 ignore_bufmgr_error; /* bit 0, for decoding;
1599 bit 1, for displaying
1600 bit 1 must be set if bit 0 is 1*/
1601 int PB_skip_mode;
1602 int PB_skip_count_after_decoding;
1603#ifdef SUPPORT_10BIT
1604 int mem_saving_mode;
1605#endif
1606#ifdef LOSLESS_COMPRESS_MODE
1607 unsigned int losless_comp_body_size;
1608#endif
1609 int pts_mode;
1610 int last_lookup_pts;
1611 int last_pts;
1612 u64 last_lookup_pts_us64;
1613 u64 last_pts_us64;
1614 u32 shift_byte_count_lo;
1615 u32 shift_byte_count_hi;
1616 int pts_mode_switching_count;
1617 int pts_mode_recovery_count;
1618
1619 int pic_num;
1620
1621 /**/
1622 union param_u param;
1623
1624 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1625
1626 struct timer_list timer;
1627 struct BUF_s m_BUF[BUF_POOL_SIZE];
1628 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1629 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1630
1631 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1632 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1633 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1634 struct vframe_s vfpool[VF_POOL_SIZE];
1635
1636 u32 stat;
1637 u32 frame_width;
1638 u32 frame_height;
1639 u32 frame_dur;
1640 u32 frame_ar;
1641 u32 bit_depth_luma;
1642 u32 bit_depth_chroma;
1643 u32 video_signal_type;
1644 u32 video_signal_type_debug;
1645 u32 saved_resolution;
1646 bool get_frame_dur;
1647 u32 error_watchdog_count;
1648 u32 error_skip_nal_wt_cnt;
1649 u32 error_system_watchdog_count;
1650
1651#ifdef DEBUG_PTS
1652 unsigned long pts_missed;
1653 unsigned long pts_hit;
1654#endif
1655 struct dec_sysinfo vh265_amstream_dec_info;
1656 unsigned char init_flag;
1657 unsigned char first_sc_checked;
1658 unsigned char uninit_list;
1659 u32 start_decoding_time;
1660
1661 int show_frame_num;
1662#ifdef USE_UNINIT_SEMA
1663 struct semaphore h265_uninit_done_sema;
1664#endif
1665 int fatal_error;
1666
1667
1668 u32 sei_present_flag;
1669 void *frame_mmu_map_addr;
1670 dma_addr_t frame_mmu_map_phy_addr;
1671 unsigned int mmu_mc_buf_start;
1672 unsigned int mmu_mc_buf_end;
1673 unsigned int mmu_mc_start_4k_adr;
1674 void *mmu_box;
1675 void *bmmu_box;
1676 int mmu_enable;
1677
1678 unsigned int dec_status;
1679
1680 /* data for SEI_MASTER_DISPLAY_COLOR */
1681 unsigned int primaries[3][2];
1682 unsigned int white_point[2];
1683 unsigned int luminance[2];
1684 /* data for SEI_CONTENT_LIGHT_LEVEL */
1685 unsigned int content_light_level[2];
1686
1687 struct PIC_s *pre_top_pic;
1688 struct PIC_s *pre_bot_pic;
1689
1690#ifdef MULTI_INSTANCE_SUPPORT
1691 int double_write_mode;
1692 int dynamic_buf_num_margin;
1693 int start_action;
1694 int save_buffer_mode;
1695#endif
1696 u32 i_only;
1697 struct list_head log_list;
1698 u32 ucode_pause_pos;
1699 u32 start_shift_bytes;
1700
1701 u32 vf_pre_count;
1702 u32 vf_get_count;
1703 u32 vf_put_count;
1704#ifdef SWAP_HEVC_UCODE
1705 dma_addr_t mc_dma_handle;
1706 void *mc_cpu_addr;
1707 int swap_size;
1708 ulong swap_addr;
1709#endif
1710#ifdef DETREFILL_ENABLE
1711 dma_addr_t detbuf_adr;
1712 u16 *detbuf_adr_virt;
1713 u8 delrefill_check;
1714#endif
1715 u8 head_error_flag;
1716 int valve_count;
1717 struct firmware_s *fw;
1718 int max_pic_w;
1719 int max_pic_h;
1720#ifdef AGAIN_HAS_THRESHOLD
1721 u8 next_again_flag;
1722 u32 pre_parser_wr_ptr;
1723#endif
1724 u32 ratio_control;
1725 u32 first_pic_flag;
1726 u32 decode_size;
1727 struct mutex chunks_mutex;
1728 int need_cache_size;
1729 u64 sc_start_time;
1730 u32 skip_first_nal;
1731 bool is_swap;
1732 bool is_4k;
1733 int frameinfo_enable;
1734 struct vframe_qos_s vframe_qos;
1735 bool is_used_v4l;
1736 void *v4l2_ctx;
1737 bool v4l_params_parsed;
1738 u32 mem_map_mode;
1739} /*hevc_stru_t */;
1740
1741#ifdef AGAIN_HAS_THRESHOLD
1742u32 again_threshold;
1743#endif
1744#ifdef SEND_LMEM_WITH_RPM
1745#define get_lmem_params(hevc, ladr) \
1746 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1747
1748
1749static int get_frame_mmu_map_size(void)
1750{
1751 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1752 return (MAX_FRAME_8K_NUM * 4);
1753
1754 return (MAX_FRAME_4K_NUM * 4);
1755}
1756
1757static int is_oversize(int w, int h)
1758{
1759 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1760 MAX_SIZE_8K : MAX_SIZE_4K;
1761
1762 if (w < 0 || h < 0)
1763 return true;
1764
1765 if (h != 0 && (w > max / h))
1766 return true;
1767
1768 return false;
1769}
1770
1771void check_head_error(struct hevc_state_s *hevc)
1772{
1773#define pcm_enabled_flag 0x040
1774#define pcm_sample_bit_depth_luma 0x041
1775#define pcm_sample_bit_depth_chroma 0x042
1776 hevc->head_error_flag = 0;
1777 if ((error_handle_policy & 0x40) == 0)
1778 return;
1779 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1780 uint16_t pcm_depth_luma = get_lmem_params(
1781 hevc, pcm_sample_bit_depth_luma);
1782 uint16_t pcm_sample_chroma = get_lmem_params(
1783 hevc, pcm_sample_bit_depth_chroma);
1784 if (pcm_depth_luma >
1785 hevc->bit_depth_luma ||
1786 pcm_sample_chroma >
1787 hevc->bit_depth_chroma) {
1788 hevc_print(hevc, 0,
1789 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1790 pcm_depth_luma,
1791 pcm_sample_chroma,
1792 hevc->bit_depth_luma,
1793 hevc->bit_depth_chroma);
1794 hevc->head_error_flag = 1;
1795 }
1796 }
1797}
1798#endif
1799
1800#ifdef SUPPORT_10BIT
1801/* Losless compression body buffer size 4K per 64x32 (jt) */
1802static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1803 int width, int height, int mem_saving_mode)
1804{
1805 int width_x64;
1806 int height_x32;
1807 int bsize;
1808
1809 width_x64 = width + 63;
1810 width_x64 >>= 6;
1811
1812 height_x32 = height + 31;
1813 height_x32 >>= 5;
1814 if (mem_saving_mode == 1 && hevc->mmu_enable)
1815 bsize = 3200 * width_x64 * height_x32;
1816 else if (mem_saving_mode == 1)
1817 bsize = 3072 * width_x64 * height_x32;
1818 else
1819 bsize = 4096 * width_x64 * height_x32;
1820
1821 return bsize;
1822}
1823
1824/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1825static int compute_losless_comp_header_size(int width, int height)
1826{
1827 int width_x128;
1828 int height_x64;
1829 int hsize;
1830
1831 width_x128 = width + 127;
1832 width_x128 >>= 7;
1833
1834 height_x64 = height + 63;
1835 height_x64 >>= 6;
1836
1837 hsize = 32*width_x128*height_x64;
1838
1839 return hsize;
1840}
1841#endif
1842
1843static int add_log(struct hevc_state_s *hevc,
1844 const char *fmt, ...)
1845{
1846#define HEVC_LOG_BUF 196
1847 struct debug_log_s *log_item;
1848 unsigned char buf[HEVC_LOG_BUF];
1849 int len = 0;
1850 va_list args;
1851 mutex_lock(&vh265_log_mutex);
1852 va_start(args, fmt);
1853 len = sprintf(buf, "<%ld> <%05d> ",
1854 jiffies, hevc->decode_idx);
1855 len += vsnprintf(buf + len,
1856 HEVC_LOG_BUF - len, fmt, args);
1857 va_end(args);
1858 log_item = kmalloc(
1859 sizeof(struct debug_log_s) + len,
1860 GFP_KERNEL);
1861 if (log_item) {
1862 INIT_LIST_HEAD(&log_item->list);
1863 strcpy(&log_item->data, buf);
1864 list_add_tail(&log_item->list,
1865 &hevc->log_list);
1866 }
1867 mutex_unlock(&vh265_log_mutex);
1868 return 0;
1869}
1870
1871static void dump_log(struct hevc_state_s *hevc)
1872{
1873 int i = 0;
1874 struct debug_log_s *log_item, *tmp;
1875 mutex_lock(&vh265_log_mutex);
1876 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1877 hevc_print(hevc, 0,
1878 "[LOG%04d]%s\n",
1879 i++,
1880 &log_item->data);
1881 list_del(&log_item->list);
1882 kfree(log_item);
1883 }
1884 mutex_unlock(&vh265_log_mutex);
1885}
1886
1887static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1888 struct PIC_s *pic)
1889{
1890 if (pic->error_mark
1891 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1892 return 1;
1893 return 0;
1894}
1895
1896static int get_pic_poc(struct hevc_state_s *hevc,
1897 unsigned int idx)
1898{
1899 if (idx != 0xff
1900 && idx < MAX_REF_PIC_NUM
1901 && hevc->m_PIC[idx])
1902 return hevc->m_PIC[idx]->POC;
1903 return INVALID_POC;
1904}
1905
1906#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1907static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1908{
1909 return (hevc->m_ins_flag &&
1910 ((double_write_mode & 0x80000000) == 0)) ?
1911 hevc->double_write_mode :
1912 (double_write_mode & 0x7fffffff);
1913}
1914
1915static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1916{
1917 return (hevc->m_ins_flag &&
1918 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1919 hevc->dynamic_buf_num_margin :
1920 (dynamic_buf_num_margin & 0x7fffffff);
1921}
1922#endif
1923
1924static int get_double_write_mode(struct hevc_state_s *hevc)
1925{
1926 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1927 int w = hevc->pic_w;
1928 int h = hevc->pic_h;
1929 u32 dw = 0x1; /*1:1*/
1930 switch (valid_dw_mode) {
1931 case 0x100:
1932 if (w > 1920 && h > 1088)
1933 dw = 0x4; /*1:2*/
1934 break;
1935 case 0x200:
1936 if (w > 1920 && h > 1088)
1937 dw = 0x2; /*1:4*/
1938 break;
1939 case 0x300:
1940 if (w > 1280 && h > 720)
1941 dw = 0x4; /*1:2*/
1942 break;
1943 default:
1944 dw = valid_dw_mode;
1945 break;
1946 }
1947 return dw;
1948}
1949
1950static int get_double_write_ratio(struct hevc_state_s *hevc,
1951 int dw_mode)
1952{
1953 int ratio = 1;
1954 if ((dw_mode == 2) ||
1955 (dw_mode == 3))
1956 ratio = 4;
1957 else if (dw_mode == 4)
1958 ratio = 2;
1959 return ratio;
1960}
1961#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1962static unsigned char get_idx(struct hevc_state_s *hevc)
1963{
1964 return hevc->index;
1965}
1966#endif
1967
1968#undef pr_info
1969#define pr_info printk
1970static int hevc_print(struct hevc_state_s *hevc,
1971 int flag, const char *fmt, ...)
1972{
1973#define HEVC_PRINT_BUF 256
1974 unsigned char buf[HEVC_PRINT_BUF];
1975 int len = 0;
1976#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1977 if (hevc == NULL ||
1978 (flag == 0) ||
1979 ((debug_mask &
1980 (1 << hevc->index))
1981 && (debug & flag))) {
1982#endif
1983 va_list args;
1984
1985 va_start(args, fmt);
1986 if (hevc)
1987 len = sprintf(buf, "[%d]", hevc->index);
1988 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1989 pr_debug("%s", buf);
1990 va_end(args);
1991#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1992 }
1993#endif
1994 return 0;
1995}
1996
1997static int hevc_print_cont(struct hevc_state_s *hevc,
1998 int flag, const char *fmt, ...)
1999{
2000 unsigned char buf[HEVC_PRINT_BUF];
2001 int len = 0;
2002#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2003 if (hevc == NULL ||
2004 (flag == 0) ||
2005 ((debug_mask &
2006 (1 << hevc->index))
2007 && (debug & flag))) {
2008#endif
2009 va_list args;
2010
2011 va_start(args, fmt);
2012 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2013 pr_info("%s", buf);
2014 va_end(args);
2015#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2016 }
2017#endif
2018 return 0;
2019}
2020
2021static void put_mv_buf(struct hevc_state_s *hevc,
2022 struct PIC_s *pic);
2023
2024static void update_vf_memhandle(struct hevc_state_s *hevc,
2025 struct vframe_s *vf, struct PIC_s *pic);
2026
2027static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2028
2029static void release_aux_data(struct hevc_state_s *hevc,
2030 struct PIC_s *pic);
2031static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2032
2033#ifdef MULTI_INSTANCE_SUPPORT
2034static void backup_decode_state(struct hevc_state_s *hevc)
2035{
2036 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2037 hevc->curr_POC_bak = hevc->curr_POC;
2038 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2039 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2040 hevc->start_parser_type_bak = hevc->start_parser_type;
2041 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2042 hevc->rps_set_id_bak = hevc->rps_set_id;
2043 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2044 hevc->decode_idx_bak = hevc->decode_idx;
2045
2046}
2047
2048static void restore_decode_state(struct hevc_state_s *hevc)
2049{
2050 struct vdec_s *vdec = hw_to_vdec(hevc);
2051 if (!vdec_has_more_input(vdec)) {
2052 hevc->pic_decoded_lcu_idx =
2053 READ_VREG(HEVC_PARSER_LCU_START)
2054 & 0xffffff;
2055 return;
2056 }
2057 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2058 "%s: discard pic index 0x%x\n",
2059 __func__, hevc->decoding_pic ?
2060 hevc->decoding_pic->index : 0xff);
2061 if (hevc->decoding_pic) {
2062 hevc->decoding_pic->error_mark = 0;
2063 hevc->decoding_pic->output_ready = 0;
2064 hevc->decoding_pic->output_mark = 0;
2065 hevc->decoding_pic->referenced = 0;
2066 hevc->decoding_pic->POC = INVALID_POC;
2067 put_mv_buf(hevc, hevc->decoding_pic);
2068 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2069 release_aux_data(hevc, hevc->decoding_pic);
2070 hevc->decoding_pic = NULL;
2071 }
2072 hevc->decode_idx = hevc->decode_idx_bak;
2073 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2074 hevc->curr_POC = hevc->curr_POC_bak;
2075 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2076 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2077 hevc->start_parser_type = hevc->start_parser_type_bak;
2078 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2079 hevc->rps_set_id = hevc->rps_set_id_bak;
2080 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2081
2082 if (hevc->pic_list_init_flag == 1)
2083 hevc->pic_list_init_flag = 0;
2084 /*if (hevc->decode_idx == 0)
2085 hevc->start_decoding_flag = 0;*/
2086
2087 hevc->slice_idx = 0;
2088 hevc->used_4k_num = -1;
2089}
2090#endif
2091
2092static void hevc_init_stru(struct hevc_state_s *hevc,
2093 struct BuffInfo_s *buf_spec_i)
2094{
2095 int i;
2096 INIT_LIST_HEAD(&hevc->log_list);
2097 hevc->work_space_buf = buf_spec_i;
2098 hevc->prefix_aux_size = 0;
2099 hevc->suffix_aux_size = 0;
2100 hevc->aux_addr = NULL;
2101 hevc->rpm_addr = NULL;
2102 hevc->lmem_addr = NULL;
2103
2104 hevc->curr_POC = INVALID_POC;
2105
2106 hevc->pic_list_init_flag = 0;
2107 hevc->use_cma_flag = 0;
2108 hevc->decode_idx = 0;
2109 hevc->slice_idx = 0;
2110 hevc->new_pic = 0;
2111 hevc->new_tile = 0;
2112 hevc->iPrevPOC = 0;
2113 hevc->list_no = 0;
2114 /* int m_uiMaxCUWidth = 1<<7; */
2115 /* int m_uiMaxCUHeight = 1<<7; */
2116 hevc->m_pocRandomAccess = MAX_INT;
2117 hevc->tile_enabled = 0;
2118 hevc->tile_x = 0;
2119 hevc->tile_y = 0;
2120 hevc->iPrevTid0POC = 0;
2121 hevc->slice_addr = 0;
2122 hevc->slice_segment_addr = 0;
2123 hevc->skip_flag = 0;
2124 hevc->misc_flag0 = 0;
2125
2126 hevc->cur_pic = NULL;
2127 hevc->col_pic = NULL;
2128 hevc->wait_buf = 0;
2129 hevc->error_flag = 0;
2130 hevc->head_error_flag = 0;
2131 hevc->error_skip_nal_count = 0;
2132 hevc->have_vps = 0;
2133 hevc->have_sps = 0;
2134 hevc->have_pps = 0;
2135 hevc->have_valid_start_slice = 0;
2136
2137 hevc->pts_mode = PTS_NORMAL;
2138 hevc->last_pts = 0;
2139 hevc->last_lookup_pts = 0;
2140 hevc->last_pts_us64 = 0;
2141 hevc->last_lookup_pts_us64 = 0;
2142 hevc->pts_mode_switching_count = 0;
2143 hevc->pts_mode_recovery_count = 0;
2144
2145 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2146 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2147 if (hevc->PB_skip_mode == 0)
2148 hevc->ignore_bufmgr_error = 0x1;
2149 else
2150 hevc->ignore_bufmgr_error = 0x0;
2151
2152 if (hevc->is_used_v4l) {
2153 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2154 if (hevc->m_PIC[i] != NULL) {
2155 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2156 hevc->m_PIC[i]->index = i;
2157 }
2158 }
2159 }
2160
2161 hevc->pic_num = 0;
2162 hevc->lcu_x_num_pre = 0;
2163 hevc->lcu_y_num_pre = 0;
2164 hevc->first_pic_after_recover = 0;
2165
2166 hevc->pre_top_pic = NULL;
2167 hevc->pre_bot_pic = NULL;
2168
2169 hevc->sei_present_flag = 0;
2170 hevc->valve_count = 0;
2171 hevc->first_pic_flag = 0;
2172#ifdef MULTI_INSTANCE_SUPPORT
2173 hevc->decoded_poc = INVALID_POC;
2174 hevc->start_process_time = 0;
2175 hevc->last_lcu_idx = 0;
2176 hevc->decode_timeout_count = 0;
2177 hevc->timeout_num = 0;
2178 hevc->eos = 0;
2179 hevc->pic_decoded_lcu_idx = -1;
2180 hevc->over_decode = 0;
2181 hevc->used_4k_num = -1;
2182 hevc->start_decoding_flag = 0;
2183 hevc->rps_set_id = 0;
2184 backup_decode_state(hevc);
2185#endif
2186#ifdef DETREFILL_ENABLE
2187 hevc->detbuf_adr = 0;
2188 hevc->detbuf_adr_virt = NULL;
2189#endif
2190}
2191
2192static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2193static int H265_alloc_mmu(struct hevc_state_s *hevc,
2194 struct PIC_s *new_pic, unsigned short bit_depth,
2195 unsigned int *mmu_index_adr);
2196
2197#ifdef DETREFILL_ENABLE
2198#define DETREFILL_BUF_SIZE (4 * 0x4000)
2199#define HEVC_SAO_DBG_MODE0 0x361e
2200#define HEVC_SAO_DBG_MODE1 0x361f
2201#define HEVC_SAO_CTRL10 0x362e
2202#define HEVC_SAO_CTRL11 0x362f
2203static int init_detrefill_buf(struct hevc_state_s *hevc)
2204{
2205 if (hevc->detbuf_adr_virt)
2206 return 0;
2207
2208 hevc->detbuf_adr_virt =
2209 (void *)dma_alloc_coherent(amports_get_dma_device(),
2210 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2211 GFP_KERNEL);
2212
2213 if (hevc->detbuf_adr_virt == NULL) {
2214 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2215 return -1;
2216 }
2217 return 0;
2218}
2219
2220static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2221{
2222 if (hevc->detbuf_adr_virt) {
2223 dma_free_coherent(amports_get_dma_device(),
2224 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2225 hevc->detbuf_adr);
2226
2227 hevc->detbuf_adr_virt = NULL;
2228 hevc->detbuf_adr = 0;
2229 }
2230}
2231
2232/*
2233 * convert uncompressed frame buffer data from/to ddr
2234 */
2235static void convUnc8x4blk(uint16_t* blk8x4Luma,
2236 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2237{
2238 if (direction == 0) {
2239 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2240 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2241 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2242 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2243 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2244 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2245 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2246 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2247 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2248 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2249 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2250 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2251 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2252 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2253 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2254 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2255 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2256 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2257
2258 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2259 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2260 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2261 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2262 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2263 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2264 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2265 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2266 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2267 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2268 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2269 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2270 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2271 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2272 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2273 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2274 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2275 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2276
2277 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2278 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2279 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2280 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2281 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2282 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2283 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2284 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2285 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2286 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2287 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2288 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2289 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2290 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2291 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2292 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2293 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2294 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2295
2296 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2297 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2298 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2299 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2300 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2301 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2302 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2303 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2304 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2305 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2306 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2307 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2308 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2309 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2310 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2311 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2312 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2313 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2314 } else {
2315 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2316 blk8x4Luma[3 + 0 * 8];
2317 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2318 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2319 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2320 (blk8x4Luma[3 + 3 * 8] >> 2);
2321 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2322 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2323 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2324 (blk8x4Luma[7 + 2 * 8] >>4);
2325 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2326 blk8x4Cb[0 + 0 * 4];
2327 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2328 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2329 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2330
2331 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2332 blk8x4Luma[0 + 0 * 8];
2333 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2334 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2335 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2336 (blk8x4Luma[0 + 1 * 8] >> 2);
2337 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2338 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2339 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2340 (blk8x4Luma[0 + 2 * 8] >>4);
2341 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2342 blk8x4Luma[2 + 2 * 8];
2343 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2344 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2345 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2346
2347 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2348 blk8x4Luma[4 + 0 * 8];
2349 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2350 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2351 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2352 (blk8x4Luma[4 + 1 * 8] >> 2);
2353 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2354 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2355 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2356 (blk8x4Luma[4 + 2 * 8] >>4);
2357 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2358 blk8x4Luma[6 + 2 * 8];
2359 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2360 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2361 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2362
2363 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2364 blk8x4Cb[1 + 0 * 4];
2365 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2366 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2367 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2368 (blk8x4Cr[2 + 0 * 4] >> 2);
2369 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2370 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2371 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2372 (blk8x4Cb[1 + 1 * 4] >>4);
2373 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2374 blk8x4Cb[2 + 1 * 4];
2375 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2376 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2377 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2378 }
2379}
2380
2381static void corrRefillWithAmrisc (
2382 struct hevc_state_s *hevc,
2383 uint32_t cmHeaderBaseAddr,
2384 uint32_t picWidth,
2385 uint32_t ctuPosition)
2386{
2387 int32_t i;
2388 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2389 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2390 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2391
2392 uint16_t cmBodyBuf[32 * 18];
2393
2394 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2395 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2396 uint32_t stride64x64 = pic_width_x64 * 128;
2397 uint32_t addr_offset64x64_abv = stride64x64 *
2398 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2399 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2400 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2401 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2402 unsigned int tmpData32;
2403
2404 uint16_t blkBuf0Y[32];
2405 uint16_t blkBuf0Cb[8];
2406 uint16_t blkBuf0Cr[8];
2407 uint16_t blkBuf1Y[32];
2408 uint16_t blkBuf1Cb[8];
2409 uint16_t blkBuf1Cr[8];
2410 int32_t blkBufCnt = 0;
2411
2412 int32_t blkIdx;
2413
2414 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2415 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2416 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2417 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2418
2419 for (i = 0; i < 32 * 18; i++)
2420 cmBodyBuf[i] = 0;
2421
2422 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2423 "%s, %d\n", __func__, __LINE__);
2424 do {
2425 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2426 } while (tmpData32);
2427 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2428 "%s, %d\n", __func__, __LINE__);
2429
2430 hevc_print(hevc, H265_DEBUG_DETAIL,
2431 "cmBodyBuf from detbuf:\n");
2432 for (i = 0; i < 32 * 18; i++) {
2433 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2434 if (get_dbg_flag(hevc) &
2435 H265_DEBUG_DETAIL) {
2436 if ((i & 0xf) == 0)
2437 hevc_print_cont(hevc, 0, "\n");
2438 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2439 }
2440 }
2441 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2442
2443 for (i = 0; i < 32; i++)
2444 blkBuf0Y[i] = 0;
2445 for (i = 0; i < 8; i++)
2446 blkBuf0Cb[i] = 0;
2447 for (i = 0; i < 8; i++)
2448 blkBuf0Cr[i] = 0;
2449 for (i = 0; i < 32; i++)
2450 blkBuf1Y[i] = 0;
2451 for (i = 0; i < 8; i++)
2452 blkBuf1Cb[i] = 0;
2453 for (i = 0; i < 8; i++)
2454 blkBuf1Cr[i] = 0;
2455
2456 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2457 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2458 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2459 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2460 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2461 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2462 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2463
2464 if (!aboveCtuAvailable && inAboveCtu)
2465 continue;
2466
2467 /* detRefillBuf --> 8x4block*/
2468 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2469
2470 if (restoreEnable) {
2471 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2472 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2473 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2474 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2475 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2476 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2477 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2478 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2479 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2480 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2481 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2482 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2483 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2484 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2485 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2486 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2487 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2488 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2489 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2490 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2491 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2492 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2493 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2494 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2495 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2496 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2497 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2498 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2499 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2500 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2501 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2502 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2503
2504 /*Store data back to DDR*/
2505 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2506 }
2507
2508 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2509 }
2510
2511 hevc_print(hevc, H265_DEBUG_DETAIL,
2512 "cmBodyBuf to detbuf:\n");
2513 for (i = 0; i < 32 * 18; i++) {
2514 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2515 if (get_dbg_flag(hevc) &
2516 H265_DEBUG_DETAIL) {
2517 if ((i & 0xf) == 0)
2518 hevc_print_cont(hevc, 0, "\n");
2519 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2520 }
2521 }
2522 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2523
2524 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2525 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2526 "%s, %d\n", __func__, __LINE__);
2527 do {
2528 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2529 } while (tmpData32);
2530 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2531 "%s, %d\n", __func__, __LINE__);
2532}
2533
2534static void delrefill(struct hevc_state_s *hevc)
2535{
2536 /*
2537 * corrRefill
2538 */
2539 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2540 [31:30]error number
2541 [29:20]error2([9:7]tilex[6:0]ctuy)
2542 [19:10]error1 [9:0]error0*/
2543 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2544 uint32_t errorIdx;
2545 uint32_t errorNum = (detResult>>30);
2546
2547 if (detResult) {
2548 hevc_print(hevc, H265_DEBUG_BUFMGR,
2549 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2550 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2551 uint32_t errorPos = errorIdx * 10;
2552 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2553 uint32_t tilex = (errorResult >> 7) - 1;
2554 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2555 + hevc->m_tile[0][tilex].width - 1;
2556 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2557 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2558 hevc_print(hevc, H265_DEBUG_BUFMGR,
2559 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2560 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2561 corrRefillWithAmrisc(
2562 hevc,
2563 (uint32_t)hevc->cur_pic->header_adr,
2564 hevc->pic_w,
2565 ctuPosition);
2566 }
2567
2568 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2569 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2570 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2571 }
2572}
2573#endif
2574
2575static void get_rpm_param(union param_u *params)
2576{
2577 int i;
2578 unsigned int data32;
2579
2580 for (i = 0; i < 128; i++) {
2581 do {
2582 data32 = READ_VREG(RPM_CMD_REG);
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 } while ((data32 & 0x10000) == 0);
2585 params->l.data[i] = data32 & 0xffff;
2586 /* hevc_print(hevc, 0, "%x\n", data32); */
2587 WRITE_VREG(RPM_CMD_REG, 0);
2588 }
2589}
2590
2591static int get_free_buf_idx(struct hevc_state_s *hevc)
2592{
2593 int index = INVALID_IDX;
2594 struct PIC_s *pic;
2595 int i;
2596
2597 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2598 pic = hevc->m_PIC[i];
2599 if (pic == NULL ||
2600 pic->index == -1 ||
2601 pic->BUF_index == -1)
2602 continue;
2603
2604 if (pic->output_mark == 0 &&
2605 pic->referenced == 0 &&
2606 pic->output_ready == 0) {
2607 pic->output_ready = 1;
2608 index = i;
2609 break;
2610 }
2611 }
2612
2613 return index;
2614}
2615
2616static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2617{
2618 int i;
2619 struct PIC_s *pic;
2620 struct PIC_s *ret_pic = NULL;
2621 if (POC == INVALID_POC)
2622 return NULL;
2623 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2624 pic = hevc->m_PIC[i];
2625 if (pic == NULL || pic->index == -1 ||
2626 pic->BUF_index == -1)
2627 continue;
2628 if (pic->POC == POC) {
2629 if (ret_pic == NULL)
2630 ret_pic = pic;
2631 else {
2632 if (pic->decode_idx > ret_pic->decode_idx)
2633 ret_pic = pic;
2634 }
2635 }
2636 }
2637 return ret_pic;
2638}
2639
2640static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2641{
2642 int i;
2643 struct PIC_s *pic;
2644 struct PIC_s *ret_pic = NULL;
2645
2646 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2647 pic = hevc->m_PIC[i];
2648 if (pic == NULL || pic->index == -1 ||
2649 pic->BUF_index == -1)
2650 continue;
2651 if ((pic->POC == POC) && (pic->referenced)) {
2652 if (ret_pic == NULL)
2653 ret_pic = pic;
2654 else {
2655 if (pic->decode_idx > ret_pic->decode_idx)
2656 ret_pic = pic;
2657 }
2658 }
2659 }
2660
2661 if (ret_pic == NULL) {
2662 if (get_dbg_flag(hevc)) {
2663 hevc_print(hevc, 0,
2664 "Wrong, POC of %d is not in referenced list\n",
2665 POC);
2666 }
2667 ret_pic = get_pic_by_POC(hevc, POC);
2668 }
2669 return ret_pic;
2670}
2671
2672static unsigned int log2i(unsigned int val)
2673{
2674 unsigned int ret = -1;
2675
2676 while (val != 0) {
2677 val >>= 1;
2678 ret++;
2679 }
2680 return ret;
2681}
2682
2683static int init_buf_spec(struct hevc_state_s *hevc);
2684
2685static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2686{
2687 int i;
2688
2689 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2690 struct PIC_s *pic = hevc->m_PIC[i];
2691
2692 if (pic && pic->vframe_bound)
2693 return true;
2694 }
2695
2696 return false;
2697}
2698
2699static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2700{
2701 int i;
2702
2703 /* release workspace */
2704 if (hevc->bmmu_box)
2705 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2706 BMMU_WORKSPACE_ID);
2707 /*
2708 * it's only when vframe get back to driver, right now we can be sure
2709 * that vframe and fd are related. if the playback exits, the capture
2710 * requires the upper app to release when the fd is closed, and others
2711 * buffers drivers are released by driver.
2712 */
2713 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2714 struct PIC_s *pic = hevc->m_PIC[i];
2715
2716 if (pic && !pic->vframe_bound) {
2717 if (hevc->bmmu_box)
2718 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2719 VF_BUFFER_IDX(i));
2720 if (hevc->mmu_box)
2721 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2722
2723 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2724 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2725 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2726 }
2727 }
2728}
2729
2730static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2731{
2732 if (hevc->is_used_v4l &&
2733 v4l_is_there_vframe_bound(hevc)) {
2734 if (get_double_write_mode(hevc) != 0x10) {
2735 v4l_mmu_buffer_release(hevc);
2736 return;
2737 }
2738 }
2739
2740 if (hevc->mmu_box)
2741 decoder_mmu_box_free(hevc->mmu_box);
2742 hevc->mmu_box = NULL;
2743
2744 if (hevc->bmmu_box)
2745 decoder_bmmu_box_free(hevc->bmmu_box);
2746 hevc->bmmu_box = NULL;
2747}
2748static int init_mmu_buffers(struct hevc_state_s *hevc)
2749{
2750 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2751 CODEC_MM_FLAGS_TVP : 0;
2752 int buf_size = 64;
2753
2754 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2755 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2756 buf_size = 24;
2757 }
2758
2759 if (get_dbg_flag(hevc)) {
2760 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2761 __func__, hevc->max_pic_w, hevc->max_pic_h);
2762 }
2763
2764 hevc->need_cache_size = buf_size * SZ_1M;
2765 hevc->sc_start_time = get_jiffies_64();
2766 if (hevc->mmu_enable
2767 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2768 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2769 hevc->index,
2770 MAX_REF_PIC_NUM,
2771 buf_size * SZ_1M,
2772 tvp_flag
2773 );
2774 if (!hevc->mmu_box) {
2775 pr_err("h265 alloc mmu box failed!!\n");
2776 return -1;
2777 }
2778 }
2779
2780 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2781 hevc->index,
2782 BMMU_MAX_BUFFERS,
2783 4 + PAGE_SHIFT,
2784 CODEC_MM_FLAGS_CMA_CLEAR |
2785 CODEC_MM_FLAGS_FOR_VDECODER |
2786 tvp_flag);
2787 if (!hevc->bmmu_box) {
2788 if (hevc->mmu_box)
2789 decoder_mmu_box_free(hevc->mmu_box);
2790 hevc->mmu_box = NULL;
2791 pr_err("h265 alloc mmu box failed!!\n");
2792 return -1;
2793 }
2794 return 0;
2795}
2796
2797struct buf_stru_s
2798{
2799 int lcu_total;
2800 int mc_buffer_size_h;
2801 int mc_buffer_size_u_v_h;
2802};
2803
2804#ifndef MV_USE_FIXED_BUF
2805static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2806{
2807 int i;
2808 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2809 if (hevc->m_mv_BUF[i].start_adr) {
2810 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2811 hevc_print(hevc, 0,
2812 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2813 i, hevc->m_mv_BUF[i].start_adr,
2814 hevc->m_mv_BUF[i].size,
2815 hevc->m_mv_BUF[i].used_flag);
2816 decoder_bmmu_box_free_idx(
2817 hevc->bmmu_box,
2818 MV_BUFFER_IDX(i));
2819 hevc->m_mv_BUF[i].start_adr = 0;
2820 hevc->m_mv_BUF[i].size = 0;
2821 hevc->m_mv_BUF[i].used_flag = 0;
2822 }
2823 }
2824 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2825 if (hevc->m_PIC[i] != NULL)
2826 hevc->m_PIC[i]->mv_buf_index = -1;
2827 }
2828}
2829
2830static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2831{
2832 int ret = 0;
2833 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2834 if (decoder_bmmu_box_alloc_buf_phy
2835 (hevc->bmmu_box,
2836 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2837 DRIVER_NAME,
2838 &hevc->m_mv_BUF[i].start_adr) < 0) {
2839 hevc->m_mv_BUF[i].start_adr = 0;
2840 ret = -1;
2841 } else {
2842 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2843 hevc->m_mv_BUF[i].used_flag = 0;
2844 ret = 0;
2845 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2846 hevc_print(hevc, 0,
2847 "MV Buffer %d: start_adr %p size %x\n",
2848 i,
2849 (void *)hevc->m_mv_BUF[i].start_adr,
2850 hevc->m_mv_BUF[i].size);
2851 }
2852 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2853 void *mem_start_virt;
2854 mem_start_virt =
2855 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2856 if (mem_start_virt) {
2857 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2858 codec_mm_dma_flush(mem_start_virt,
2859 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2860 } else {
2861 mem_start_virt = codec_mm_vmap(
2862 hevc->m_mv_BUF[i].start_adr,
2863 hevc->m_mv_BUF[i].size);
2864 if (mem_start_virt) {
2865 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2866 codec_mm_dma_flush(mem_start_virt,
2867 hevc->m_mv_BUF[i].size,
2868 DMA_TO_DEVICE);
2869 codec_mm_unmap_phyaddr(mem_start_virt);
2870 } else {
2871 /*not virt for tvp playing,
2872 may need clear on ucode.*/
2873 pr_err("ref %s mem_start_virt failed\n", __func__);
2874 }
2875 }
2876 }
2877 }
2878 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2879 return ret;
2880}
2881#endif
2882
2883static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2884{
2885#ifdef MV_USE_FIXED_BUF
2886 if (pic && pic->index >= 0) {
2887 if (IS_8K_SIZE(pic->width, pic->height)) {
2888 pic->mpred_mv_wr_start_addr =
2889 hevc->work_space_buf->mpred_mv.buf_start
2890 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2891 } else {
2892 pic->mpred_mv_wr_start_addr =
2893 hevc->work_space_buf->mpred_mv.buf_start
2894 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2895 }
2896 }
2897 return 0;
2898#else
2899 int i;
2900 int ret = -1;
2901 int new_size;
2902 if (IS_8K_SIZE(pic->width, pic->height))
2903 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2904 else if (IS_4K_SIZE(pic->width, pic->height))
2905 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2906 else
2907 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2908 if (new_size != hevc->mv_buf_size) {
2909 dealloc_mv_bufs(hevc);
2910 hevc->mv_buf_size = new_size;
2911 }
2912 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2913 if (hevc->m_mv_BUF[i].start_adr &&
2914 hevc->m_mv_BUF[i].used_flag == 0) {
2915 hevc->m_mv_BUF[i].used_flag = 1;
2916 ret = i;
2917 break;
2918 }
2919 }
2920 if (ret < 0) {
2921 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2922 if (hevc->m_mv_BUF[i].start_adr == 0) {
2923 if (alloc_mv_buf(hevc, i) >= 0) {
2924 hevc->m_mv_BUF[i].used_flag = 1;
2925 ret = i;
2926 }
2927 break;
2928 }
2929 }
2930 }
2931
2932 if (ret >= 0) {
2933 pic->mv_buf_index = ret;
2934 pic->mpred_mv_wr_start_addr =
2935 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2936 (~0xffff);
2937 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2938 "%s => %d (0x%x) size 0x%x\n",
2939 __func__, ret,
2940 pic->mpred_mv_wr_start_addr,
2941 hevc->m_mv_BUF[ret].size);
2942
2943 } else {
2944 hevc_print(hevc, 0,
2945 "%s: Error, mv buf is not enough\n",
2946 __func__);
2947 }
2948 return ret;
2949
2950#endif
2951}
2952
2953static void put_mv_buf(struct hevc_state_s *hevc,
2954 struct PIC_s *pic)
2955{
2956#ifndef MV_USE_FIXED_BUF
2957 int i = pic->mv_buf_index;
2958 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2959 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2960 "%s: index %d beyond range\n",
2961 __func__, i);
2962 return;
2963 }
2964 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2965 "%s(%d): used_flag(%d)\n",
2966 __func__, i,
2967 hevc->m_mv_BUF[i].used_flag);
2968
2969 if (hevc->m_mv_BUF[i].start_adr &&
2970 hevc->m_mv_BUF[i].used_flag)
2971 hevc->m_mv_BUF[i].used_flag = 0;
2972 pic->mv_buf_index = -1;
2973#endif
2974}
2975
2976static int cal_current_buf_size(struct hevc_state_s *hevc,
2977 struct buf_stru_s *buf_stru)
2978{
2979
2980 int buf_size;
2981 int pic_width = hevc->pic_w;
2982 int pic_height = hevc->pic_h;
2983 int lcu_size = hevc->lcu_size;
2984 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2985 1 : pic_width / lcu_size;
2986 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2987 1 : pic_height / lcu_size;
2988 /*SUPPORT_10BIT*/
2989 int losless_comp_header_size = compute_losless_comp_header_size
2990 (pic_width, pic_height);
2991 /*always alloc buf for 10bit*/
2992 int losless_comp_body_size = compute_losless_comp_body_size
2993 (hevc, pic_width, pic_height, 0);
2994 int mc_buffer_size = losless_comp_header_size
2995 + losless_comp_body_size;
2996 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2997 int mc_buffer_size_u_v_h = 0;
2998
2999 int dw_mode = get_double_write_mode(hevc);
3000
3001 if (hevc->mmu_enable) {
3002 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3003 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3004 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3005 << 16;
3006 else
3007 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3008 << 16;
3009 } else
3010 buf_size = 0;
3011
3012 if (dw_mode) {
3013 int pic_width_dw = pic_width /
3014 get_double_write_ratio(hevc, dw_mode);
3015 int pic_height_dw = pic_height /
3016 get_double_write_ratio(hevc, dw_mode);
3017
3018 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3019 pic_width_dw / lcu_size + 1 :
3020 pic_width_dw / lcu_size;
3021 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3022 pic_height_dw / lcu_size + 1 :
3023 pic_height_dw / lcu_size;
3024 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3025
3026 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3027 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3028 /*64k alignment*/
3029 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3030 }
3031
3032 if ((!hevc->mmu_enable) &&
3033 ((dw_mode & 0x10) == 0)) {
3034 /* use compress mode without mmu,
3035 need buf for compress decoding*/
3036 buf_size += (mc_buffer_size_h << 16);
3037 }
3038
3039 /*in case start adr is not 64k alignment*/
3040 if (buf_size > 0)
3041 buf_size += 0x10000;
3042
3043 if (buf_stru) {
3044 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3045 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3046 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3047 }
3048
3049 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3050 pic_width, pic_height, losless_comp_header_size,
3051 losless_comp_body_size, mc_buffer_size_h,
3052 mc_buffer_size_u_v_h, buf_size);
3053
3054 return buf_size;
3055}
3056
3057static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3058{
3059 int ret = -1;
3060 int i = pic->index;
3061 struct vdec_v4l2_buffer *fb = NULL;
3062
3063 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3064 return ret;
3065
3066 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3067 if (ret < 0) {
3068 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3069 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3070 return ret;
3071 }
3072
3073 if (hevc->mmu_enable) {
3074 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3075 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3076 hevc->m_BUF[i].header_size =
3077 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3078 else
3079 hevc->m_BUF[i].header_size =
3080 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3081
3082 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3083 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3084 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3085 if (ret < 0) {
3086 hevc_print(hevc, PRINT_FLAG_ERROR,
3087 "%s[%d], header size: %d, no mem fatal err\n",
3088 __func__, i, hevc->m_BUF[i].header_size);
3089 return ret;
3090 }
3091 }
3092
3093 hevc->m_BUF[i].used_flag = 0;
3094 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3095 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3096 if (fb->num_planes == 1) {
3097 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3098 hevc->m_BUF[i].size = fb->m.mem[0].size;
3099 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3100 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3101 } else if (fb->num_planes == 2) {
3102 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3103 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3104 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3105 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3106 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3107 }
3108
3109 return ret;
3110}
3111
3112static int alloc_buf(struct hevc_state_s *hevc)
3113{
3114 int i;
3115 int ret = -1;
3116 int buf_size = cal_current_buf_size(hevc, NULL);
3117
3118 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3119 return ret;
3120
3121 for (i = 0; i < BUF_POOL_SIZE; i++) {
3122 if (hevc->m_BUF[i].start_adr == 0)
3123 break;
3124 }
3125 if (i < BUF_POOL_SIZE) {
3126 if (buf_size > 0) {
3127 ret = decoder_bmmu_box_alloc_buf_phy
3128 (hevc->bmmu_box,
3129 VF_BUFFER_IDX(i), buf_size,
3130 DRIVER_NAME,
3131 &hevc->m_BUF[i].start_adr);
3132 if (ret < 0) {
3133 hevc->m_BUF[i].start_adr = 0;
3134 if (i <= 8) {
3135 hevc->fatal_error |=
3136 DECODER_FATAL_ERROR_NO_MEM;
3137 hevc_print(hevc, PRINT_FLAG_ERROR,
3138 "%s[%d], size: %d, no mem fatal err\n",
3139 __func__, i, buf_size);
3140 }
3141 }
3142
3143 if (ret >= 0) {
3144 hevc->m_BUF[i].size = buf_size;
3145 hevc->m_BUF[i].used_flag = 0;
3146 ret = 0;
3147
3148 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3149 hevc_print(hevc, 0,
3150 "Buffer %d: start_adr %p size %x\n",
3151 i,
3152 (void *)hevc->m_BUF[i].start_adr,
3153 hevc->m_BUF[i].size);
3154 }
3155 /*flush the buffer make sure no cache dirty*/
3156 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3157 void *mem_start_virt;
3158 mem_start_virt =
3159 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3160 if (mem_start_virt) {
3161 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3162 codec_mm_dma_flush(mem_start_virt,
3163 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3164 } else {
3165 mem_start_virt = codec_mm_vmap(
3166 hevc->m_BUF[i].start_adr,
3167 hevc->m_BUF[i].size);
3168 if (mem_start_virt) {
3169 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3170 codec_mm_dma_flush(mem_start_virt,
3171 hevc->m_BUF[i].size,
3172 DMA_TO_DEVICE);
3173 codec_mm_unmap_phyaddr(mem_start_virt);
3174 } else {
3175 /*not virt for tvp playing,
3176 may need clear on ucode.*/
3177 pr_err("ref %s mem_start_virt failed\n", __func__);
3178 }
3179 }
3180 }
3181 }
3182 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3183 } else
3184 ret = 0;
3185 }
3186
3187 if (ret >= 0) {
3188 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3189 hevc_print(hevc, 0,
3190 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3191 i, hevc->pic_w, hevc->pic_h,
3192 buf_size,
3193 hevc->m_BUF[i].start_adr);
3194 }
3195 } else {
3196 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3197 hevc_print(hevc, 0,
3198 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3199 i, hevc->pic_w, hevc->pic_h,
3200 buf_size);
3201 }
3202 }
3203 return ret;
3204}
3205
3206static void set_buf_unused(struct hevc_state_s *hevc, int i)
3207{
3208 if (i >= 0 && i < BUF_POOL_SIZE)
3209 hevc->m_BUF[i].used_flag = 0;
3210}
3211
3212static void dealloc_unused_buf(struct hevc_state_s *hevc)
3213{
3214 int i;
3215 for (i = 0; i < BUF_POOL_SIZE; i++) {
3216 if (hevc->m_BUF[i].start_adr &&
3217 hevc->m_BUF[i].used_flag == 0) {
3218 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3219 hevc_print(hevc, 0,
3220 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3221 i, hevc->m_BUF[i].start_adr,
3222 hevc->m_BUF[i].size);
3223 }
3224 if (!hevc->is_used_v4l)
3225 decoder_bmmu_box_free_idx(
3226 hevc->bmmu_box,
3227 VF_BUFFER_IDX(i));
3228 hevc->m_BUF[i].start_adr = 0;
3229 hevc->m_BUF[i].size = 0;
3230 }
3231 }
3232}
3233
3234static void dealloc_pic_buf(struct hevc_state_s *hevc,
3235 struct PIC_s *pic)
3236{
3237 int i = pic->BUF_index;
3238 pic->BUF_index = -1;
3239 if (i >= 0 &&
3240 i < BUF_POOL_SIZE &&
3241 hevc->m_BUF[i].start_adr) {
3242 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3243 hevc_print(hevc, 0,
3244 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3245 i, hevc->m_BUF[i].start_adr,
3246 hevc->m_BUF[i].size);
3247 }
3248
3249 if (!hevc->is_used_v4l)
3250 decoder_bmmu_box_free_idx(
3251 hevc->bmmu_box,
3252 VF_BUFFER_IDX(i));
3253 hevc->m_BUF[i].used_flag = 0;
3254 hevc->m_BUF[i].start_adr = 0;
3255 hevc->m_BUF[i].size = 0;
3256 }
3257}
3258
3259static int get_work_pic_num(struct hevc_state_s *hevc)
3260{
3261 int used_buf_num = 0;
3262 int sps_pic_buf_diff = 0;
3263
3264 if (get_dynamic_buf_num_margin(hevc) > 0) {
3265 if ((!hevc->sps_num_reorder_pics_0) &&
3266 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3267 /* the range of sps_num_reorder_pics_0 is in
3268 [0, sps_max_dec_pic_buffering_minus1_0] */
3269 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3270 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3271 } else
3272 used_buf_num = hevc->sps_num_reorder_pics_0
3273 + get_dynamic_buf_num_margin(hevc);
3274
3275 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3276 - hevc->sps_num_reorder_pics_0;
3277#ifdef MULTI_INSTANCE_SUPPORT
3278 /*
3279 need one more for multi instance, as
3280 apply_ref_pic_set() has no chanch to run to
3281 to clear referenced flag in some case
3282 */
3283 if (hevc->m_ins_flag)
3284 used_buf_num++;
3285#endif
3286 } else
3287 used_buf_num = max_buf_num;
3288
3289 if (hevc->save_buffer_mode)
3290 hevc_print(hevc, 0,
3291 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3292 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3293
3294 if (sps_pic_buf_diff >= 4)
3295 {
3296 used_buf_num += 1;
3297 }
3298
3299 if (used_buf_num > MAX_BUF_NUM)
3300 used_buf_num = MAX_BUF_NUM;
3301 return used_buf_num;
3302}
3303
3304static int get_alloc_pic_count(struct hevc_state_s *hevc)
3305{
3306 int alloc_pic_count = 0;
3307 int i;
3308 struct PIC_s *pic;
3309 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3310 pic = hevc->m_PIC[i];
3311 if (pic && pic->index >= 0)
3312 alloc_pic_count++;
3313 }
3314 return alloc_pic_count;
3315}
3316
3317static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3318{
3319 int i = pic->index;
3320 int dw_mode = get_double_write_mode(hevc);
3321
3322 if (hevc->mmu_enable)
3323 pic->header_adr = hevc->m_BUF[i].header_addr;
3324
3325 pic->BUF_index = i;
3326 pic->POC = INVALID_POC;
3327 pic->mc_canvas_y = pic->index;
3328 pic->mc_canvas_u_v = pic->index;
3329
3330 if (dw_mode & 0x10) {
3331 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3332 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3333 pic->mc_canvas_y = (pic->index << 1);
3334 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3335
3336 pic->dw_y_adr = pic->mc_y_adr;
3337 pic->dw_u_v_adr = pic->mc_u_v_adr;
3338 } else if (dw_mode) {
3339 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3340 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3341 }
3342
3343 return 0;
3344}
3345
3346static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3347{
3348 int ret = -1;
3349 int i;
3350 /*int lcu_size_log2 = hevc->lcu_size_log2;
3351 int MV_MEM_UNIT=lcu_size_log2==
3352 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3353 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3354 5 ? 0x80 : 0x20;
3355 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3356 hevc->work_space_buf->mpred_mv.buf_size;*/
3357 unsigned int y_adr = 0;
3358 struct buf_stru_s buf_stru;
3359 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3360 int dw_mode = get_double_write_mode(hevc);
3361
3362 for (i = 0; i < BUF_POOL_SIZE; i++) {
3363 if (hevc->m_BUF[i].start_adr != 0 &&
3364 hevc->m_BUF[i].used_flag == 0 &&
3365 buf_size <= hevc->m_BUF[i].size) {
3366 hevc->m_BUF[i].used_flag = 1;
3367 break;
3368 }
3369 }
3370
3371 if (i >= BUF_POOL_SIZE)
3372 return -1;
3373
3374 if (hevc->mmu_enable) {
3375 pic->header_adr = hevc->m_BUF[i].start_adr;
3376 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3377 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3378 y_adr = hevc->m_BUF[i].start_adr +
3379 MMU_COMPRESS_8K_HEADER_SIZE;
3380 else
3381 y_adr = hevc->m_BUF[i].start_adr +
3382 MMU_COMPRESS_HEADER_SIZE;
3383 } else
3384 y_adr = hevc->m_BUF[i].start_adr;
3385
3386 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3387
3388 pic->POC = INVALID_POC;
3389 /*ensure get_pic_by_POC()
3390 not get the buffer not decoded*/
3391 pic->BUF_index = i;
3392
3393 if ((!hevc->mmu_enable) &&
3394 ((dw_mode & 0x10) == 0)
3395 ) {
3396 pic->mc_y_adr = y_adr;
3397 y_adr += (buf_stru.mc_buffer_size_h << 16);
3398 }
3399 pic->mc_canvas_y = pic->index;
3400 pic->mc_canvas_u_v = pic->index;
3401 if (dw_mode & 0x10) {
3402 pic->mc_y_adr = y_adr;
3403 pic->mc_u_v_adr = y_adr +
3404 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3405 pic->mc_canvas_y = (pic->index << 1);
3406 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3407
3408 pic->dw_y_adr = pic->mc_y_adr;
3409 pic->dw_u_v_adr = pic->mc_u_v_adr;
3410 } else if (dw_mode) {
3411 pic->dw_y_adr = y_adr;
3412 pic->dw_u_v_adr = pic->dw_y_adr +
3413 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3414 }
3415
3416 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3417 hevc_print(hevc, 0,
3418 "%s index %d BUF_index %d mc_y_adr %x\n",
3419 __func__, pic->index,
3420 pic->BUF_index, pic->mc_y_adr);
3421 if (hevc->mmu_enable &&
3422 dw_mode)
3423 hevc_print(hevc, 0,
3424 "mmu double write adr %ld\n",
3425 pic->cma_alloc_addr);
3426 }
3427 ret = 0;
3428
3429 return ret;
3430}
3431
3432static void init_pic_list(struct hevc_state_s *hevc)
3433{
3434 int i;
3435 int init_buf_num = get_work_pic_num(hevc);
3436 int dw_mode = get_double_write_mode(hevc);
3437 struct vdec_s *vdec = hw_to_vdec(hevc);
3438 /*alloc decoder buf will be delay if work on v4l. */
3439 if (!hevc->is_used_v4l) {
3440 for (i = 0; i < init_buf_num; i++) {
3441 if (alloc_buf(hevc) < 0) {
3442 if (i <= 8) {
3443 /*if alloced (i+1)>=9
3444 don't send errors.*/
3445 hevc->fatal_error |=
3446 DECODER_FATAL_ERROR_NO_MEM;
3447 }
3448 break;
3449 }
3450 }
3451 }
3452
3453 for (i = 0; i < init_buf_num; i++) {
3454 struct PIC_s *pic = hevc->m_PIC[i];
3455
3456 if (!pic) {
3457 pic = vmalloc(sizeof(struct PIC_s));
3458 if (pic == NULL) {
3459 hevc_print(hevc, 0,
3460 "%s: alloc pic %d fail!!!\n",
3461 __func__, i);
3462 break;
3463 }
3464 hevc->m_PIC[i] = pic;
3465 }
3466 memset(pic, 0, sizeof(struct PIC_s));
3467
3468 pic->index = i;
3469 pic->BUF_index = -1;
3470 pic->mv_buf_index = -1;
3471 if (vdec->parallel_dec == 1) {
3472 pic->y_canvas_index = -1;
3473 pic->uv_canvas_index = -1;
3474 }
3475
3476 pic->width = hevc->pic_w;
3477 pic->height = hevc->pic_h;
3478 pic->double_write_mode = dw_mode;
3479
3480 /*config canvas will be delay if work on v4l. */
3481 if (!hevc->is_used_v4l) {
3482 if (config_pic(hevc, pic) < 0) {
3483 if (get_dbg_flag(hevc))
3484 hevc_print(hevc, 0,
3485 "Config_pic %d fail\n", pic->index);
3486 pic->index = -1;
3487 i++;
3488 break;
3489 }
3490
3491 if (pic->double_write_mode)
3492 set_canvas(hevc, pic);
3493 }
3494 }
3495
3496 for (; i < MAX_REF_PIC_NUM; i++) {
3497 struct PIC_s *pic = hevc->m_PIC[i];
3498
3499 if (!pic) {
3500 pic = vmalloc(sizeof(struct PIC_s));
3501 if (pic == NULL) {
3502 hevc_print(hevc, 0,
3503 "%s: alloc pic %d fail!!!\n",
3504 __func__, i);
3505 break;
3506 }
3507 hevc->m_PIC[i] = pic;
3508 }
3509 memset(pic, 0, sizeof(struct PIC_s));
3510
3511 pic->index = -1;
3512 pic->BUF_index = -1;
3513 if (vdec->parallel_dec == 1) {
3514 pic->y_canvas_index = -1;
3515 pic->uv_canvas_index = -1;
3516 }
3517 }
3518
3519}
3520
3521static void uninit_pic_list(struct hevc_state_s *hevc)
3522{
3523 struct vdec_s *vdec = hw_to_vdec(hevc);
3524 int i;
3525#ifndef MV_USE_FIXED_BUF
3526 dealloc_mv_bufs(hevc);
3527#endif
3528 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3529 struct PIC_s *pic = hevc->m_PIC[i];
3530
3531 if (pic) {
3532 if (vdec->parallel_dec == 1) {
3533 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3534 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3535 }
3536 release_aux_data(hevc, pic);
3537 vfree(pic);
3538 hevc->m_PIC[i] = NULL;
3539 }
3540 }
3541}
3542
3543#ifdef LOSLESS_COMPRESS_MODE
3544static void init_decode_head_hw(struct hevc_state_s *hevc)
3545{
3546
3547 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3548 unsigned int data32;
3549
3550 int losless_comp_header_size =
3551 compute_losless_comp_header_size(hevc->pic_w,
3552 hevc->pic_h);
3553 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3554 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3555
3556 hevc->losless_comp_body_size = losless_comp_body_size;
3557
3558
3559 if (hevc->mmu_enable) {
3560 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3561 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3562 } else {
3563 if (hevc->mem_saving_mode == 1)
3564 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3565 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3566 else
3567 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3568 ((workaround_enable & 2) ? 1 : 0));
3569 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3570 /*
3571 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3572 * //8-bit mode
3573 */
3574 }
3575 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3576 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3577 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3578
3579 if (hevc->mmu_enable) {
3580 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3581 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3582 buf_spec->mmu_vbh.buf_start +
3583 buf_spec->mmu_vbh.buf_size/2);
3584 data32 = READ_VREG(HEVC_SAO_CTRL9);
3585 data32 |= 0x1;
3586 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3587
3588 /* use HEVC_CM_HEADER_START_ADDR */
3589 data32 = READ_VREG(HEVC_SAO_CTRL5);
3590 data32 |= (1<<10);
3591 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3592 }
3593
3594 if (!hevc->m_ins_flag)
3595 hevc_print(hevc, 0,
3596 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3597 __func__, hevc->pic_w, hevc->pic_h,
3598 losless_comp_body_size, losless_comp_header_size);
3599
3600}
3601#endif
3602#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3603
3604static void init_pic_list_hw(struct hevc_state_s *hevc)
3605{
3606 int i;
3607 int cur_pic_num = MAX_REF_PIC_NUM;
3608 int dw_mode = get_double_write_mode(hevc);
3609 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3610 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3611 (0x1 << 1) | (0x1 << 2));
3612 else
3613 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3614
3615 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3616 if (hevc->m_PIC[i] == NULL ||
3617 hevc->m_PIC[i]->index == -1) {
3618 cur_pic_num = i;
3619 break;
3620 }
3621 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3622 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3623 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3624 hevc->m_PIC[i]->header_adr>>5);
3625 else
3626 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3627 hevc->m_PIC[i]->mc_y_adr >> 5);
3628 } else
3629 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3630 hevc->m_PIC[i]->mc_y_adr |
3631 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3632 if (dw_mode & 0x10) {
3633 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3634 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3635 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3636 }
3637 else
3638 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3639 hevc->m_PIC[i]->mc_u_v_adr |
3640 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3641 | 0x1);
3642 }
3643 }
3644 if (cur_pic_num == 0)
3645 return;
3646 for (; i < MAX_REF_PIC_NUM; i++) {
3647 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3648 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3649 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3650 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3651 else
3652 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3653 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3654#ifndef LOSLESS_COMPRESS_MODE
3655 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3656 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3657#endif
3658 } else {
3659 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3660 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3661 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3662 | 0x1);
3663#ifndef LOSLESS_COMPRESS_MODE
3664 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3665 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3666 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3667 | 0x1);
3668#endif
3669 }
3670 }
3671
3672 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3673
3674 /* Zero out canvas registers in IPP -- avoid simulation X */
3675 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3676 (0 << 8) | (0 << 1) | 1);
3677 for (i = 0; i < 32; i++)
3678 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3679
3680#ifdef LOSLESS_COMPRESS_MODE
3681 if ((dw_mode & 0x10) == 0)
3682 init_decode_head_hw(hevc);
3683#endif
3684
3685}
3686
3687
3688static void dump_pic_list(struct hevc_state_s *hevc)
3689{
3690 int i;
3691 struct PIC_s *pic;
3692
3693 hevc_print(hevc, 0,
3694 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3695 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3696 pic = hevc->m_PIC[i];
3697 if (pic == NULL || pic->index == -1)
3698 continue;
3699 hevc_print_cont(hevc, 0,
3700 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3701 pic->index, pic->BUF_index,
3702#ifndef MV_USE_FIXED_BUF
3703 pic->mv_buf_index,
3704#else
3705 -1,
3706#endif
3707 pic->decode_idx, pic->POC, pic->referenced);
3708 hevc_print_cont(hevc, 0,
3709 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3710 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3711 pic->width, pic->height);
3712 hevc_print_cont(hevc, 0,
3713 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3714 pic->output_ready, pic->mpred_mv_wr_start_addr,
3715 pic->vf_ref);
3716 }
3717}
3718
3719static void clear_referenced_flag(struct hevc_state_s *hevc)
3720{
3721 int i;
3722 struct PIC_s *pic;
3723 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3724 pic = hevc->m_PIC[i];
3725 if (pic == NULL || pic->index == -1)
3726 continue;
3727 if (pic->referenced) {
3728 pic->referenced = 0;
3729 put_mv_buf(hevc, pic);
3730 }
3731 }
3732}
3733
3734static void clear_poc_flag(struct hevc_state_s *hevc)
3735{
3736 int i;
3737 struct PIC_s *pic;
3738 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3739 pic = hevc->m_PIC[i];
3740 if (pic == NULL || pic->index == -1)
3741 continue;
3742 pic->POC = INVALID_POC;
3743 }
3744}
3745
3746static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3747 unsigned char flush_flag)
3748{
3749 int num_pic_not_yet_display = 0;
3750 int i;
3751 struct PIC_s *pic;
3752 struct PIC_s *pic_display = NULL;
3753 struct vdec_s *vdec = hw_to_vdec(hevc);
3754
3755 if (hevc->i_only & 0x4) {
3756 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3757 pic = hevc->m_PIC[i];
3758 if (pic == NULL ||
3759 (pic->index == -1) ||
3760 (pic->BUF_index == -1) ||
3761 (pic->POC == INVALID_POC))
3762 continue;
3763 if (pic->output_mark) {
3764 if (pic_display) {
3765 if (pic->decode_idx <
3766 pic_display->decode_idx)
3767 pic_display = pic;
3768
3769 } else
3770 pic_display = pic;
3771
3772 }
3773 }
3774 if (pic_display) {
3775 pic_display->output_mark = 0;
3776 pic_display->recon_mark = 0;
3777 pic_display->output_ready = 1;
3778 pic_display->referenced = 0;
3779 put_mv_buf(hevc, pic_display);
3780 }
3781 } else {
3782 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3783 pic = hevc->m_PIC[i];
3784 if (pic == NULL ||
3785 (pic->index == -1) ||
3786 (pic->BUF_index == -1) ||
3787 (pic->POC == INVALID_POC))
3788 continue;
3789 if (pic->output_mark)
3790 num_pic_not_yet_display++;
3791 if (pic->slice_type == 2 &&
3792 hevc->vf_pre_count == 0 &&
3793 fast_output_enable & 0x1) {
3794 /*fast output for first I picture*/
3795 pic->num_reorder_pic = 0;
3796 if (vdec->master || vdec->slave)
3797 pic_display = pic;
3798 hevc_print(hevc, 0, "VH265: output first frame\n");
3799 }
3800 }
3801
3802 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3803 pic = hevc->m_PIC[i];
3804 if (pic == NULL ||
3805 (pic->index == -1) ||
3806 (pic->BUF_index == -1) ||
3807 (pic->POC == INVALID_POC))
3808 continue;
3809 if (pic->output_mark) {
3810 if (pic_display) {
3811 if (pic->POC < pic_display->POC)
3812 pic_display = pic;
3813 else if ((pic->POC == pic_display->POC)
3814 && (pic->decode_idx <
3815 pic_display->
3816 decode_idx))
3817 pic_display
3818 = pic;
3819 } else
3820 pic_display = pic;
3821 }
3822 }
3823 if (pic_display) {
3824 if ((num_pic_not_yet_display >
3825 pic_display->num_reorder_pic)
3826 || flush_flag) {
3827 pic_display->output_mark = 0;
3828 pic_display->recon_mark = 0;
3829 pic_display->output_ready = 1;
3830 } else if (num_pic_not_yet_display >=
3831 (MAX_REF_PIC_NUM - 1)) {
3832 pic_display->output_mark = 0;
3833 pic_display->recon_mark = 0;
3834 pic_display->output_ready = 1;
3835 hevc_print(hevc, 0,
3836 "Warning, num_reorder_pic %d is byeond buf num\n",
3837 pic_display->num_reorder_pic);
3838 } else
3839 pic_display = NULL;
3840 }
3841 }
3842
3843 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3844 pic_display = NULL;
3845 hevc->first_pic_flag = 0;
3846 }
3847 return pic_display;
3848}
3849
3850static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3851{
3852 int i;
3853 struct PIC_s *pic;
3854
3855 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3856 hevc_print(hevc, 0,
3857 "config_mc_buffer entered .....\n");
3858 if (cur_pic->slice_type != 2) { /* P and B pic */
3859 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3860 (0 << 8) | (0 << 1) | 1);
3861 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3862 pic =
3863 get_ref_pic_by_POC(hevc,
3864 cur_pic->
3865 m_aiRefPOCList0[cur_pic->
3866 slice_idx][i]);
3867 if (pic) {
3868 if ((pic->width != hevc->pic_w) ||
3869 (pic->height != hevc->pic_h)) {
3870 hevc_print(hevc, 0,
3871 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3872 __func__, pic->POC,
3873 pic->width, pic->height);
3874 cur_pic->error_mark = 1;
3875 }
3876 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3877 cur_pic->error_mark = 1;
3878 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3879 (pic->mc_canvas_u_v << 16)
3880 | (pic->mc_canvas_u_v
3881 << 8) |
3882 pic->mc_canvas_y);
3883 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3884 hevc_print_cont(hevc, 0,
3885 "refid %x mc_canvas_u_v %x",
3886 i, pic->mc_canvas_u_v);
3887 hevc_print_cont(hevc, 0,
3888 " mc_canvas_y %x\n",
3889 pic->mc_canvas_y);
3890 }
3891 } else
3892 cur_pic->error_mark = 1;
3893
3894 if (pic == NULL || pic->error_mark) {
3895 hevc_print(hevc, 0,
3896 "Error %s, %dth poc (%d) %s",
3897 __func__, i,
3898 cur_pic->m_aiRefPOCList0[cur_pic->
3899 slice_idx][i],
3900 pic ? "has error" :
3901 "not in list0");
3902 }
3903 }
3904 }
3905 if (cur_pic->slice_type == 0) { /* B pic */
3906 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3907 hevc_print(hevc, 0,
3908 "config_mc_buffer RefNum_L1\n");
3909 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3910 (16 << 8) | (0 << 1) | 1);
3911
3912 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3913 pic =
3914 get_ref_pic_by_POC(hevc,
3915 cur_pic->
3916 m_aiRefPOCList1[cur_pic->
3917 slice_idx][i]);
3918 if (pic) {
3919 if ((pic->width != hevc->pic_w) ||
3920 (pic->height != hevc->pic_h)) {
3921 hevc_print(hevc, 0,
3922 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3923 __func__, pic->POC,
3924 pic->width, pic->height);
3925 cur_pic->error_mark = 1;
3926 }
3927
3928 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3929 cur_pic->error_mark = 1;
3930 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3931 (pic->mc_canvas_u_v << 16)
3932 | (pic->mc_canvas_u_v
3933 << 8) |
3934 pic->mc_canvas_y);
3935 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3936 hevc_print_cont(hevc, 0,
3937 "refid %x mc_canvas_u_v %x",
3938 i, pic->mc_canvas_u_v);
3939 hevc_print_cont(hevc, 0,
3940 " mc_canvas_y %x\n",
3941 pic->mc_canvas_y);
3942 }
3943 } else
3944 cur_pic->error_mark = 1;
3945
3946 if (pic == NULL || pic->error_mark) {
3947 hevc_print(hevc, 0,
3948 "Error %s, %dth poc (%d) %s",
3949 __func__, i,
3950 cur_pic->m_aiRefPOCList1[cur_pic->
3951 slice_idx][i],
3952 pic ? "has error" :
3953 "not in list1");
3954 }
3955 }
3956 }
3957 return 0;
3958}
3959
3960static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3961 union param_u *params)
3962{
3963 int ii, i;
3964 int poc_tmp;
3965 struct PIC_s *pic;
3966 unsigned char is_referenced;
3967 /* hevc_print(hevc, 0,
3968 "%s cur_poc %d\n", __func__, cur_poc); */
3969 if (pic_list_debug & 0x2) {
3970 pr_err("cur poc %d\n", cur_poc);
3971 }
3972 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3973 pic = hevc->m_PIC[ii];
3974 if (pic == NULL ||
3975 pic->index == -1 ||
3976 pic->BUF_index == -1
3977 )
3978 continue;
3979
3980 if ((pic->referenced == 0 || pic->POC == cur_poc))
3981 continue;
3982 is_referenced = 0;
3983 for (i = 0; i < 16; i++) {
3984 int delt;
3985
3986 if (params->p.CUR_RPS[i] & 0x8000)
3987 break;
3988 delt =
3989 params->p.CUR_RPS[i] &
3990 ((1 << (RPS_USED_BIT - 1)) - 1);
3991 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3992 poc_tmp =
3993 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3994 delt);
3995 } else
3996 poc_tmp = cur_poc + delt;
3997 if (poc_tmp == pic->POC) {
3998 is_referenced = 1;
3999 /* hevc_print(hevc, 0, "i is %d\n", i); */
4000 break;
4001 }
4002 }
4003 if (is_referenced == 0) {
4004 pic->referenced = 0;
4005 put_mv_buf(hevc, pic);
4006 /* hevc_print(hevc, 0,
4007 "set poc %d reference to 0\n", pic->POC); */
4008 if (pic_list_debug & 0x2) {
4009 pr_err("set poc %d reference to 0\n", pic->POC);
4010 }
4011 }
4012 }
4013
4014}
4015
4016static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4017{
4018 struct PIC_s *pic = hevc->cur_pic;
4019 int i, rIdx;
4020 int num_neg = 0;
4021 int num_pos = 0;
4022 int total_num;
4023 int num_ref_idx_l0_active =
4024 (params->p.num_ref_idx_l0_active >
4025 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4026 params->p.num_ref_idx_l0_active;
4027 int num_ref_idx_l1_active =
4028 (params->p.num_ref_idx_l1_active >
4029 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4030 params->p.num_ref_idx_l1_active;
4031
4032 int RefPicSetStCurr0[16];
4033 int RefPicSetStCurr1[16];
4034
4035 for (i = 0; i < 16; i++) {
4036 RefPicSetStCurr0[i] = 0;
4037 RefPicSetStCurr1[i] = 0;
4038 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4039 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4040 }
4041 for (i = 0; i < 16; i++) {
4042 if (params->p.CUR_RPS[i] & 0x8000)
4043 break;
4044 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4045 int delt =
4046 params->p.CUR_RPS[i] &
4047 ((1 << (RPS_USED_BIT - 1)) - 1);
4048
4049 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4050 RefPicSetStCurr0[num_neg] =
4051 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4052 delt);
4053 /* hevc_print(hevc, 0,
4054 * "RefPicSetStCurr0 %x %x %x\n",
4055 * RefPicSetStCurr0[num_neg], pic->POC,
4056 * (0x800-(params[i]&0x7ff)));
4057 */
4058 num_neg++;
4059 } else {
4060 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4061 /* hevc_print(hevc, 0,
4062 * "RefPicSetStCurr1 %d\n",
4063 * RefPicSetStCurr1[num_pos]);
4064 */
4065 num_pos++;
4066 }
4067 }
4068 }
4069 total_num = num_neg + num_pos;
4070 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4071 hevc_print(hevc, 0,
4072 "%s: curpoc %d slice_type %d, total %d ",
4073 __func__, pic->POC, params->p.slice_type, total_num);
4074 hevc_print_cont(hevc, 0,
4075 "num_neg %d num_list0 %d num_list1 %d\n",
4076 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4077 }
4078
4079 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4080 hevc_print(hevc, 0,
4081 "HEVC Stream buf start ");
4082 hevc_print_cont(hevc, 0,
4083 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4084 READ_VREG(HEVC_STREAM_START_ADDR),
4085 READ_VREG(HEVC_STREAM_END_ADDR),
4086 READ_VREG(HEVC_STREAM_WR_PTR),
4087 READ_VREG(HEVC_STREAM_RD_PTR),
4088 READ_VREG(HEVC_STREAM_LEVEL),
4089 READ_VREG(HEVC_STREAM_FIFO_CTL),
4090 READ_VREG(HEVC_PARSER_INT_CONTROL));
4091 }
4092
4093 if (total_num > 0) {
4094 if (params->p.modification_flag & 0x1) {
4095 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4096 hevc_print(hevc, 0, "ref0 POC (modification):");
4097 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4098 int cIdx = params->p.modification_list[rIdx];
4099
4100 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4101 cIdx >=
4102 num_neg ? RefPicSetStCurr1[cIdx -
4103 num_neg] :
4104 RefPicSetStCurr0[cIdx];
4105 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4106 hevc_print_cont(hevc, 0, "%d ",
4107 pic->m_aiRefPOCList0[pic->
4108 slice_idx]
4109 [rIdx]);
4110 }
4111 }
4112 } else {
4113 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4114 hevc_print(hevc, 0, "ref0 POC:");
4115 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4116 int cIdx = rIdx % total_num;
4117
4118 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4119 cIdx >=
4120 num_neg ? RefPicSetStCurr1[cIdx -
4121 num_neg] :
4122 RefPicSetStCurr0[cIdx];
4123 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4124 hevc_print_cont(hevc, 0, "%d ",
4125 pic->m_aiRefPOCList0[pic->
4126 slice_idx]
4127 [rIdx]);
4128 }
4129 }
4130 }
4131 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4132 hevc_print_cont(hevc, 0, "\n");
4133 if (params->p.slice_type == B_SLICE) {
4134 if (params->p.modification_flag & 0x2) {
4135 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4136 hevc_print(hevc, 0,
4137 "ref1 POC (modification):");
4138 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4139 rIdx++) {
4140 int cIdx;
4141
4142 if (params->p.modification_flag & 0x1) {
4143 cIdx =
4144 params->p.
4145 modification_list
4146 [num_ref_idx_l0_active +
4147 rIdx];
4148 } else {
4149 cIdx =
4150 params->p.
4151 modification_list[rIdx];
4152 }
4153 pic->m_aiRefPOCList1[pic->
4154 slice_idx][rIdx] =
4155 cIdx >=
4156 num_pos ?
4157 RefPicSetStCurr0[cIdx - num_pos]
4158 : RefPicSetStCurr1[cIdx];
4159 if (get_dbg_flag(hevc) &
4160 H265_DEBUG_BUFMGR) {
4161 hevc_print_cont(hevc, 0, "%d ",
4162 pic->
4163 m_aiRefPOCList1[pic->
4164 slice_idx]
4165 [rIdx]);
4166 }
4167 }
4168 } else {
4169 if (get_dbg_flag(hevc) &
4170 H265_DEBUG_BUFMGR)
4171 hevc_print(hevc, 0, "ref1 POC:");
4172 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4173 rIdx++) {
4174 int cIdx = rIdx % total_num;
4175
4176 pic->m_aiRefPOCList1[pic->
4177 slice_idx][rIdx] =
4178 cIdx >=
4179 num_pos ?
4180 RefPicSetStCurr0[cIdx -
4181 num_pos]
4182 : RefPicSetStCurr1[cIdx];
4183 if (get_dbg_flag(hevc) &
4184 H265_DEBUG_BUFMGR) {
4185 hevc_print_cont(hevc, 0, "%d ",
4186 pic->
4187 m_aiRefPOCList1[pic->
4188 slice_idx]
4189 [rIdx]);
4190 }
4191 }
4192 }
4193 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4194 hevc_print_cont(hevc, 0, "\n");
4195 }
4196 }
4197 /*set m_PIC */
4198 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4199 (params->p.slice_type == P_SLICE) ? 1 :
4200 (params->p.slice_type == B_SLICE) ? 0 : 3;
4201 pic->RefNum_L0 = num_ref_idx_l0_active;
4202 pic->RefNum_L1 = num_ref_idx_l1_active;
4203}
4204
4205static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4206 int pic_height_cu, int sao_mem_unit,
4207 union param_u *params)
4208{
4209 int i, j;
4210 int start_cu_x, start_cu_y;
4211 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4212 int sao_abv_size = sao_mem_unit * pic_width_cu;
4213#ifdef DETREFILL_ENABLE
4214 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4215 int tmpRefillLcuSize = 1 <<
4216 (params->p.log2_min_coding_block_size_minus3 +
4217 3 + params->p.log2_diff_max_min_coding_block_size);
4218 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4219 "%x, %x, %x, %x\n",
4220 params->p.slice_segment_address,
4221 params->p.bit_depth,
4222 params->p.tiles_enabled_flag,
4223 tmpRefillLcuSize);
4224 if (params->p.slice_segment_address == 0 &&
4225 params->p.bit_depth != 0 &&
4226 (params->p.tiles_enabled_flag & 1) &&
4227 tmpRefillLcuSize == 64)
4228 hevc->delrefill_check = 1;
4229 else
4230 hevc->delrefill_check = 0;
4231 }
4232#endif
4233
4234 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4235 if (params->p.tiles_enabled_flag & 1) {
4236 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4237 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4238
4239 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4240 || hevc->num_tile_row <= 0) {
4241 hevc->num_tile_row = 1;
4242 hevc_print(hevc, 0,
4243 "%s: num_tile_rows_minus1 (%d) error!!\n",
4244 __func__, params->p.num_tile_rows_minus1);
4245 }
4246 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4247 || hevc->num_tile_col <= 0) {
4248 hevc->num_tile_col = 1;
4249 hevc_print(hevc, 0,
4250 "%s: num_tile_columns_minus1 (%d) error!!\n",
4251 __func__, params->p.num_tile_columns_minus1);
4252 }
4253 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4254 hevc_print(hevc, 0,
4255 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4256 __func__, pic_width_cu, pic_height_cu);
4257 hevc_print_cont(hevc, 0,
4258 "num_tile_col %d num_tile_row %d:\n",
4259 hevc->num_tile_col, hevc->num_tile_row);
4260 }
4261
4262 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4263 int w = pic_width_cu / hevc->num_tile_col;
4264 int h = pic_height_cu / hevc->num_tile_row;
4265
4266 start_cu_y = 0;
4267 for (i = 0; i < hevc->num_tile_row; i++) {
4268 start_cu_x = 0;
4269 for (j = 0; j < hevc->num_tile_col; j++) {
4270 if (j == (hevc->num_tile_col - 1)) {
4271 hevc->m_tile[i][j].width =
4272 pic_width_cu -
4273 start_cu_x;
4274 } else
4275 hevc->m_tile[i][j].width = w;
4276 if (i == (hevc->num_tile_row - 1)) {
4277 hevc->m_tile[i][j].height =
4278 pic_height_cu -
4279 start_cu_y;
4280 } else
4281 hevc->m_tile[i][j].height = h;
4282 hevc->m_tile[i][j].start_cu_x
4283 = start_cu_x;
4284 hevc->m_tile[i][j].start_cu_y
4285 = start_cu_y;
4286 hevc->m_tile[i][j].sao_vb_start_addr =
4287 hevc->work_space_buf->sao_vb.
4288 buf_start + j * sao_vb_size;
4289 hevc->m_tile[i][j].sao_abv_start_addr =
4290 hevc->work_space_buf->sao_abv.
4291 buf_start + i * sao_abv_size;
4292 if (get_dbg_flag(hevc) &
4293 H265_DEBUG_BUFMGR) {
4294 hevc_print_cont(hevc, 0,
4295 "{y=%d, x=%d w %d h %d ",
4296 i, j, hevc->m_tile[i][j].width,
4297 hevc->m_tile[i][j].height);
4298 hevc_print_cont(hevc, 0,
4299 "start_x %d start_y %d ",
4300 hevc->m_tile[i][j].start_cu_x,
4301 hevc->m_tile[i][j].start_cu_y);
4302 hevc_print_cont(hevc, 0,
4303 "sao_vb_start 0x%x ",
4304 hevc->m_tile[i][j].
4305 sao_vb_start_addr);
4306 hevc_print_cont(hevc, 0,
4307 "sao_abv_start 0x%x}\n",
4308 hevc->m_tile[i][j].
4309 sao_abv_start_addr);
4310 }
4311 start_cu_x += hevc->m_tile[i][j].width;
4312
4313 }
4314 start_cu_y += hevc->m_tile[i][0].height;
4315 }
4316 } else {
4317 start_cu_y = 0;
4318 for (i = 0; i < hevc->num_tile_row; i++) {
4319 start_cu_x = 0;
4320 for (j = 0; j < hevc->num_tile_col; j++) {
4321 if (j == (hevc->num_tile_col - 1)) {
4322 hevc->m_tile[i][j].width =
4323 pic_width_cu -
4324 start_cu_x;
4325 } else {
4326 hevc->m_tile[i][j].width =
4327 params->p.tile_width[j];
4328 }
4329 if (i == (hevc->num_tile_row - 1)) {
4330 hevc->m_tile[i][j].height =
4331 pic_height_cu -
4332 start_cu_y;
4333 } else {
4334 hevc->m_tile[i][j].height =
4335 params->
4336 p.tile_height[i];
4337 }
4338 hevc->m_tile[i][j].start_cu_x
4339 = start_cu_x;
4340 hevc->m_tile[i][j].start_cu_y
4341 = start_cu_y;
4342 hevc->m_tile[i][j].sao_vb_start_addr =
4343 hevc->work_space_buf->sao_vb.
4344 buf_start + j * sao_vb_size;
4345 hevc->m_tile[i][j].sao_abv_start_addr =
4346 hevc->work_space_buf->sao_abv.
4347 buf_start + i * sao_abv_size;
4348 if (get_dbg_flag(hevc) &
4349 H265_DEBUG_BUFMGR) {
4350 hevc_print_cont(hevc, 0,
4351 "{y=%d, x=%d w %d h %d ",
4352 i, j, hevc->m_tile[i][j].width,
4353 hevc->m_tile[i][j].height);
4354 hevc_print_cont(hevc, 0,
4355 "start_x %d start_y %d ",
4356 hevc->m_tile[i][j].start_cu_x,
4357 hevc->m_tile[i][j].start_cu_y);
4358 hevc_print_cont(hevc, 0,
4359 "sao_vb_start 0x%x ",
4360 hevc->m_tile[i][j].
4361 sao_vb_start_addr);
4362 hevc_print_cont(hevc, 0,
4363 "sao_abv_start 0x%x}\n",
4364 hevc->m_tile[i][j].
4365 sao_abv_start_addr);
4366
4367 }
4368 start_cu_x += hevc->m_tile[i][j].width;
4369 }
4370 start_cu_y += hevc->m_tile[i][0].height;
4371 }
4372 }
4373 } else {
4374 hevc->num_tile_col = 1;
4375 hevc->num_tile_row = 1;
4376 hevc->m_tile[0][0].width = pic_width_cu;
4377 hevc->m_tile[0][0].height = pic_height_cu;
4378 hevc->m_tile[0][0].start_cu_x = 0;
4379 hevc->m_tile[0][0].start_cu_y = 0;
4380 hevc->m_tile[0][0].sao_vb_start_addr =
4381 hevc->work_space_buf->sao_vb.buf_start;
4382 hevc->m_tile[0][0].sao_abv_start_addr =
4383 hevc->work_space_buf->sao_abv.buf_start;
4384 }
4385}
4386
4387static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4388 int pic_width_lcu)
4389{
4390 int cu_x;
4391 int cu_y;
4392 int tile_x = 0;
4393 int tile_y = 0;
4394 int i;
4395
4396 if (pic_width_lcu == 0) {
4397 if (get_dbg_flag(hevc)) {
4398 hevc_print(hevc, 0,
4399 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4400 __func__, hevc->pic_w, hevc->pic_h);
4401 }
4402 return -1;
4403 }
4404 cu_x = cu_adr % pic_width_lcu;
4405 cu_y = cu_adr / pic_width_lcu;
4406 if (hevc->tile_enabled) {
4407 for (i = 0; i < hevc->num_tile_col; i++) {
4408 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4409 tile_x = i;
4410 else
4411 break;
4412 }
4413 for (i = 0; i < hevc->num_tile_row; i++) {
4414 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4415 tile_y = i;
4416 else
4417 break;
4418 }
4419 }
4420 return (tile_x) | (tile_y << 8);
4421}
4422
4423static void print_scratch_error(int error_num)
4424{
4425#if 0
4426 if (get_dbg_flag(hevc)) {
4427 hevc_print(hevc, 0,
4428 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4429 error_num);
4430 }
4431#endif
4432}
4433
4434static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4435{
4436 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4437
4438 if (get_dbg_flag(hevc))
4439 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4440 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4441 __func__,
4442 buf_spec->ipp.buf_start,
4443 buf_spec->start_adr,
4444 buf_spec->short_term_rps.buf_start,
4445 buf_spec->vps.buf_start,
4446 buf_spec->sps.buf_start,
4447 buf_spec->pps.buf_start,
4448 buf_spec->sao_up.buf_start,
4449 buf_spec->swap_buf.buf_start,
4450 buf_spec->swap_buf2.buf_start,
4451 buf_spec->scalelut.buf_start,
4452 buf_spec->dblk_para.buf_start,
4453 buf_spec->dblk_data.buf_start,
4454 buf_spec->dblk_data2.buf_start);
4455 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4456 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4457 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4458 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4459 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4460 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4461 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4462 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4463 if (hevc->mmu_enable) {
4464 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4465 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4466 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4467 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4468 } else
4469 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4470 } /*else
4471 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4472 buf_spec->swap_buf.buf_start);
4473 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4474 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4475 /* cfg_p_addr */
4476 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4477 /* cfg_d_addr */
4478 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4479
4480 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4481
4482 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4483}
4484
4485static void parser_cmd_write(void)
4486{
4487 u32 i;
4488 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4489 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4490 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4491 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4492 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4493 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4494 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4495 0x7C00
4496 };
4497 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4498 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4499}
4500
4501static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4502 int decode_pic_begin, int decode_pic_num)
4503{
4504 unsigned int data32;
4505 int i;
4506#if 0
4507 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4508 /* Set MCR fetch priorities*/
4509 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4510 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4511 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4512 }
4513#endif
4514#if 1
4515 /* m8baby test1902 */
4516 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4517 hevc_print(hevc, 0,
4518 "%s\n", __func__);
4519 data32 = READ_VREG(HEVC_PARSER_VERSION);
4520 if (data32 != 0x00010001) {
4521 print_scratch_error(25);
4522 return;
4523 }
4524 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4525 data32 = READ_VREG(HEVC_PARSER_VERSION);
4526 if (data32 != 0x5a5a55aa) {
4527 print_scratch_error(26);
4528 return;
4529 }
4530#if 0
4531 /* test Parser Reset */
4532 /* reset iqit to start mem init again */
4533 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4534 (1 << 3) /* reset_whole parser */
4535 );
4536 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4537 data32 = READ_VREG(HEVC_PARSER_VERSION);
4538 if (data32 != 0x00010001)
4539 hevc_print(hevc, 0,
4540 "Test Parser Fatal Error\n");
4541#endif
4542 /* reset iqit to start mem init again */
4543 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4544 );
4545 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4546 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4547
4548#endif
4549 if (!hevc->m_ins_flag) {
4550 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4551 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4552 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4553 data32 |= (0xf << 25); /*arwlen_axi_max*/
4554 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4555 }
4556 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4557 if (data32 != 0x00000100) {
4558 print_scratch_error(29);
4559 return;
4560 }
4561 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4562 if (data32 != 0x00000300) {
4563 print_scratch_error(30);
4564 return;
4565 }
4566 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4567 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4568 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4569 if (data32 != 0x12345678) {
4570 print_scratch_error(31);
4571 return;
4572 }
4573 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4574 if (data32 != 0x9abcdef0) {
4575 print_scratch_error(32);
4576 return;
4577 }
4578 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4579 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4580
4581 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4582 data32 &= 0x03ffffff;
4583 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4584 | /* stream_buffer_empty_int_amrisc_enable */
4585 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4586 (1 << 7) | /* dec_done_int_cpu_enable */
4587 (1 << 4) | /* startcode_found_int_cpu_enable */
4588 (0 << 3) | /* startcode_found_int_amrisc_enable */
4589 (1 << 0) /* parser_int_enable */
4590 ;
4591 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4592
4593 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4594 data32 = data32 | (1 << 1) | /* emulation_check_on */
4595 (1 << 0) /* startcode_check_on */
4596 ;
4597 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4598
4599 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4600 (2 << 4) | /* emulate_code_length_sub_1 */
4601 (2 << 1) | /* start_code_length_sub_1 */
4602 (1 << 0) /* stream_shift_enable */
4603 );
4604
4605 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4606 );
4607 /* hevc_parser_core_clk_en */
4608 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4609 );
4610
4611 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4612
4613 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4614 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4615 for (i = 0; i < 1024; i++)
4616 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4617
4618#ifdef ENABLE_SWAP_TEST
4619 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4620#endif
4621
4622 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4623 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4624 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4625 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4626 /* Send parser_cmd */
4627 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4628
4629 parser_cmd_write();
4630
4631 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4632 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4633 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4634
4635 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4636 /* (1 << 8) | // sao_sw_pred_enable */
4637 (1 << 5) | /* parser_sao_if_en */
4638 (1 << 2) | /* parser_mpred_if_en */
4639 (1 << 0) /* parser_scaler_if_en */
4640 );
4641
4642 /* Changed to Start MPRED in microcode */
4643 /*
4644 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4645 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4646 * (1<<31)
4647 * );
4648 */
4649
4650 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4651 (1 << 0) /* software reset ipp and mpp */
4652 );
4653 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4654 (0 << 0) /* software reset ipp and mpp */
4655 );
4656
4657 if (get_double_write_mode(hevc) & 0x10)
4658 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4659 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4660 );
4661
4662}
4663
4664static void decoder_hw_reset(void)
4665{
4666 int i;
4667 unsigned int data32;
4668 /* reset iqit to start mem init again */
4669 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4670 );
4671 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4672 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4673
4674 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4675 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4676 ;
4677 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4678
4679 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4680 if (data32 != 0x00000100) {
4681 print_scratch_error(29);
4682 return;
4683 }
4684 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4685 if (data32 != 0x00000300) {
4686 print_scratch_error(30);
4687 return;
4688 }
4689 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4690 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4691 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4692 if (data32 != 0x12345678) {
4693 print_scratch_error(31);
4694 return;
4695 }
4696 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4697 if (data32 != 0x9abcdef0) {
4698 print_scratch_error(32);
4699 return;
4700 }
4701 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4702 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4703
4704 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4705 data32 &= 0x03ffffff;
4706 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4707 | /* stream_buffer_empty_int_amrisc_enable */
4708 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4709 (1 << 7) | /* dec_done_int_cpu_enable */
4710 (1 << 4) | /* startcode_found_int_cpu_enable */
4711 (0 << 3) | /* startcode_found_int_amrisc_enable */
4712 (1 << 0) /* parser_int_enable */
4713 ;
4714 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4715
4716 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4717 data32 = data32 | (1 << 1) | /* emulation_check_on */
4718 (1 << 0) /* startcode_check_on */
4719 ;
4720 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4721
4722 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4723 (2 << 4) | /* emulate_code_length_sub_1 */
4724 (2 << 1) | /* start_code_length_sub_1 */
4725 (1 << 0) /* stream_shift_enable */
4726 );
4727
4728 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4729 );
4730 /* hevc_parser_core_clk_en */
4731 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4732 );
4733
4734 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4735 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4736 for (i = 0; i < 1024; i++)
4737 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4738
4739 /* Send parser_cmd */
4740 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4741
4742 parser_cmd_write();
4743
4744 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4745 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4746 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4747
4748 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4749 /* (1 << 8) | // sao_sw_pred_enable */
4750 (1 << 5) | /* parser_sao_if_en */
4751 (1 << 2) | /* parser_mpred_if_en */
4752 (1 << 0) /* parser_scaler_if_en */
4753 );
4754
4755 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4756 (1 << 0) /* software reset ipp and mpp */
4757 );
4758 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4759 (0 << 0) /* software reset ipp and mpp */
4760 );
4761}
4762
4763#ifdef CONFIG_HEVC_CLK_FORCED_ON
4764static void config_hevc_clk_forced_on(void)
4765{
4766 unsigned int rdata32;
4767 /* IQIT */
4768 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4769 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4770
4771 /* DBLK */
4772 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4773 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4774
4775 /* SAO */
4776 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4777 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4778
4779 /* MPRED */
4780 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4781 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4782
4783 /* PARSER */
4784 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4785 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4786 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4787 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4788 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4789 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4790 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4791 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4792 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4793 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4794 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4795 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4796 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4797
4798 /* IPP */
4799 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4800 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4801
4802 /* MCRCC */
4803 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4804 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4805}
4806#endif
4807
4808#ifdef MCRCC_ENABLE
4809static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4810{
4811 unsigned int rdata32;
4812 unsigned int rdata32_2;
4813 int l0_cnt = 0;
4814 int l1_cnt = 0x7fff;
4815
4816 if (get_double_write_mode(hevc) & 0x10) {
4817 l0_cnt = hevc->cur_pic->RefNum_L0;
4818 l1_cnt = hevc->cur_pic->RefNum_L1;
4819 }
4820
4821 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4822
4823 if (slice_type == 2) { /* I-PIC */
4824 /* remove reset -- disables clock */
4825 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4826 return;
4827 }
4828
4829 if (slice_type == 0) { /* B-PIC */
4830 /* Programme canvas0 */
4831 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4832 (0 << 8) | (0 << 1) | 0);
4833 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4834 rdata32 = rdata32 & 0xffff;
4835 rdata32 = rdata32 | (rdata32 << 16);
4836 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4837
4838 /* Programme canvas1 */
4839 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4840 (16 << 8) | (1 << 1) | 0);
4841 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4842 rdata32_2 = rdata32_2 & 0xffff;
4843 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4844 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4845 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4846 rdata32_2 = rdata32_2 & 0xffff;
4847 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4848 }
4849 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4850 } else { /* P-PIC */
4851 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4852 (0 << 8) | (1 << 1) | 0);
4853 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4854 rdata32 = rdata32 & 0xffff;
4855 rdata32 = rdata32 | (rdata32 << 16);
4856 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4857
4858 if (l0_cnt == 1) {
4859 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4860 } else {
4861 /* Programme canvas1 */
4862 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4863 rdata32 = rdata32 & 0xffff;
4864 rdata32 = rdata32 | (rdata32 << 16);
4865 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4866 }
4867 }
4868 /* enable mcrcc progressive-mode */
4869 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4870}
4871#endif
4872
4873static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4874 int sao_mem_unit)
4875{
4876 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4877 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4878 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4879 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4880}
4881
4882static u32 init_aux_size;
4883static int aux_data_is_avaible(struct hevc_state_s *hevc)
4884{
4885 u32 reg_val;
4886
4887 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4888 if (reg_val != 0 && reg_val != init_aux_size)
4889 return 1;
4890 else
4891 return 0;
4892}
4893
4894static void config_aux_buf(struct hevc_state_s *hevc)
4895{
4896 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4897 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4898 (hevc->suffix_aux_size >> 4);
4899 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4900}
4901
4902static void config_mpred_hw(struct hevc_state_s *hevc)
4903{
4904 int i;
4905 unsigned int data32;
4906 struct PIC_s *cur_pic = hevc->cur_pic;
4907 struct PIC_s *col_pic = hevc->col_pic;
4908 int AMVP_MAX_NUM_CANDS_MEM = 3;
4909 int AMVP_MAX_NUM_CANDS = 2;
4910 int NUM_CHROMA_MODE = 5;
4911 int DM_CHROMA_IDX = 36;
4912 int above_ptr_ctrl = 0;
4913 int buffer_linear = 1;
4914 int cu_size_log2 = 3;
4915
4916 int mpred_mv_rd_start_addr;
4917 int mpred_curr_lcu_x;
4918 int mpred_curr_lcu_y;
4919 int mpred_above_buf_start;
4920 int mpred_mv_rd_ptr;
4921 int mpred_mv_rd_ptr_p1;
4922 int mpred_mv_rd_end_addr;
4923 int MV_MEM_UNIT;
4924 int mpred_mv_wr_ptr;
4925 int *ref_poc_L0, *ref_poc_L1;
4926
4927 int above_en;
4928 int mv_wr_en;
4929 int mv_rd_en;
4930 int col_isIntra;
4931
4932 if (hevc->slice_type != 2) {
4933 above_en = 1;
4934 mv_wr_en = 1;
4935 mv_rd_en = 1;
4936 col_isIntra = 0;
4937 } else {
4938 above_en = 1;
4939 mv_wr_en = 1;
4940 mv_rd_en = 0;
4941 col_isIntra = 0;
4942 }
4943
4944 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4945 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4946 mpred_curr_lcu_x = data32 & 0xffff;
4947 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4948
4949 MV_MEM_UNIT =
4950 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4951 5 ? 0x80 : 0x20;
4952 mpred_mv_rd_ptr =
4953 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4954
4955 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4956 mpred_mv_rd_end_addr =
4957 mpred_mv_rd_start_addr +
4958 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4959
4960 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4961
4962 mpred_mv_wr_ptr =
4963 cur_pic->mpred_mv_wr_start_addr +
4964 (hevc->slice_addr * MV_MEM_UNIT);
4965
4966 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4967 hevc_print(hevc, 0,
4968 "cur pic index %d col pic index %d\n", cur_pic->index,
4969 col_pic->index);
4970 }
4971
4972 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4973 cur_pic->mpred_mv_wr_start_addr);
4974 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4975
4976 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4977 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4978 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4979
4980 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4981 data32 = (hevc->slice_type |
4982 hevc->new_pic << 2 |
4983 hevc->new_tile << 3 |
4984 hevc->isNextSliceSegment << 4 |
4985 hevc->TMVPFlag << 5 |
4986 hevc->LDCFlag << 6 |
4987 hevc->ColFromL0Flag << 7 |
4988 above_ptr_ctrl << 8 |
4989 above_en << 9 |
4990 mv_wr_en << 10 |
4991 mv_rd_en << 11 |
4992 col_isIntra << 12 |
4993 buffer_linear << 13 |
4994 hevc->LongTerm_Curr << 14 |
4995 hevc->LongTerm_Col << 15 |
4996 hevc->lcu_size_log2 << 16 |
4997 cu_size_log2 << 20 | hevc->plevel << 24);
4998 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
4999
5000 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5001 data32 = (
5002#if 0
5003 /* no set in m8baby test1902 */
5004 /* Don't override clk_forced_on , */
5005 (data32 & (0x1 << 24)) |
5006#endif
5007 hevc->MaxNumMergeCand |
5008 AMVP_MAX_NUM_CANDS << 4 |
5009 AMVP_MAX_NUM_CANDS_MEM << 8 |
5010 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5011 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5012
5013 data32 = (hevc->pic_w | hevc->pic_h << 16);
5014 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5015
5016 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5017 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5018
5019 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5020 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5021
5022 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5023 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5024
5025 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5026 /* col_RefNum_L0<<16| */
5027 /* col_RefNum_L1<<24 */
5028 );
5029 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5030
5031 data32 = (hevc->LongTerm_Ref);
5032 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5033
5034 data32 = 0;
5035 for (i = 0; i < hevc->RefNum_L0; i++)
5036 data32 = data32 | (1 << i);
5037 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5038
5039 data32 = 0;
5040 for (i = 0; i < hevc->RefNum_L1; i++)
5041 data32 = data32 | (1 << i);
5042 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5043
5044 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5045 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5046
5047 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5048 * Ref_POC_xx_L1 in pair write order!!!
5049 */
5050 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5051 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5052
5053 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5054 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5055
5056 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5057 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5058
5059 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5060 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5061
5062 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5063 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5064
5065 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5066 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5067
5068 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5069 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5070
5071 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5072 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5073
5074 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5075 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5076
5077 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5078 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5079
5080 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5081 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5082
5083 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5084 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5085
5086 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5087 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5088
5089 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5090 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5091
5092 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5093 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5094
5095 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5096 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5097
5098 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5099 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5100
5101 if (hevc->new_pic) {
5102 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5103 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5104 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5105 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5106 } else if (!hevc->isNextSliceSegment) {
5107 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5108 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5109 }
5110
5111 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5112}
5113
5114static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5115{
5116 unsigned int data32, data32_2;
5117 int misc_flag0 = hevc->misc_flag0;
5118 int slice_deblocking_filter_disabled_flag = 0;
5119
5120 int mc_buffer_size_u_v =
5121 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5122 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5123 struct PIC_s *cur_pic = hevc->cur_pic;
5124 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5125
5126 data32 = READ_VREG(HEVC_SAO_CTRL0);
5127 data32 &= (~0xf);
5128 data32 |= hevc->lcu_size_log2;
5129 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5130
5131 data32 = (hevc->pic_w | hevc->pic_h << 16);
5132 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5133
5134 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5135 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5136
5137 if (hevc->new_pic)
5138 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5139#ifdef LOSLESS_COMPRESS_MODE
5140/*SUPPORT_10BIT*/
5141 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5142 data32 = READ_VREG(HEVC_SAO_CTRL5);
5143 data32 &= (~(0xff << 16));
5144
5145 if (get_double_write_mode(hevc) == 2 ||
5146 get_double_write_mode(hevc) == 3)
5147 data32 |= (0xff<<16);
5148 else if (get_double_write_mode(hevc) == 4)
5149 data32 |= (0x33<<16);
5150
5151 if (hevc->mem_saving_mode == 1)
5152 data32 |= (1 << 9);
5153 else
5154 data32 &= ~(1 << 9);
5155 if (workaround_enable & 1)
5156 data32 |= (1 << 7);
5157 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5158 }
5159 data32 = cur_pic->mc_y_adr;
5160 if (get_double_write_mode(hevc))
5161 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5162
5163 if ((get_double_write_mode(hevc) & 0x10) == 0)
5164 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5165
5166 if (hevc->mmu_enable)
5167 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5168#else
5169 data32 = cur_pic->mc_y_adr;
5170 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5171#endif
5172 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5173 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5174
5175#ifdef LOSLESS_COMPRESS_MODE
5176/*SUPPORT_10BIT*/
5177 if (get_double_write_mode(hevc))
5178 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5179#else
5180 data32 = cur_pic->mc_u_v_adr;
5181 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5182#endif
5183 data32 = (mc_buffer_size_u_v_h << 16);
5184 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5185
5186#ifdef LOSLESS_COMPRESS_MODE
5187/*SUPPORT_10BIT*/
5188 if (get_double_write_mode(hevc)) {
5189 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5190 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5191 }
5192#else
5193 /* multi tile to do... */
5194 data32 = cur_pic->mc_y_adr;
5195 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5196
5197 data32 = cur_pic->mc_u_v_adr;
5198 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5199#endif
5200 /* DBLK CONFIG HERE */
5201 if (hevc->new_pic) {
5202 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5203 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5204 data32 = (0xff << 8) | (0x0 << 0);
5205 else
5206 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5207 (0x0 << 0); /* h265 video format*/
5208
5209 if (hevc->pic_w >= 1280)
5210 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5211 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5212 if (get_double_write_mode(hevc) == 0)
5213 data32 |= (0x1 << 8); /*enable first write*/
5214 else if (get_double_write_mode(hevc) == 0x10)
5215 data32 |= (0x1 << 9); /*double write only*/
5216 else
5217 data32 |= ((0x1 << 8) |(0x1 << 9));
5218
5219 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5220 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5221 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5222 }
5223 data32 = (hevc->pic_w | hevc->pic_h << 16);
5224 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5225
5226 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5227 data32 =
5228 ((misc_flag0 >>
5229 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5230 0x1) << 3;
5231 } else
5232 data32 = 0;
5233 data32 |=
5234 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5235 ((params->p.pps_cr_qp_offset
5236 & 0x1f) <<
5237 9));
5238 data32 |=
5239 (hevc->lcu_size ==
5240 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5241
5242 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5243
5244 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5245 /*if (debug & 0x80) {*/
5246 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5247 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5248 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5249 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5250 data32);
5251 /*}*/
5252 }
5253 }
5254#if 0
5255 data32 = READ_VREG(HEVC_SAO_CTRL1);
5256 data32 &= (~0x3000);
5257 data32 |= (hevc->mem_map_mode <<
5258 12);
5259
5260/* [13:12] axi_aformat,
5261 * 0-Linear, 1-32x32, 2-64x32
5262 */
5263 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5264
5265 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5266 data32 &= (~0x30);
5267 data32 |= (hevc->mem_map_mode <<
5268 4);
5269
5270/* [5:4] -- address_format
5271 * 00:linear 01:32x32 10:64x32
5272 */
5273 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5274#else
5275 /* m8baby test1902 */
5276 data32 = READ_VREG(HEVC_SAO_CTRL1);
5277 data32 &= (~0x3000);
5278 data32 |= (hevc->mem_map_mode <<
5279 12);
5280
5281/* [13:12] axi_aformat, 0-Linear,
5282 * 1-32x32, 2-64x32
5283 */
5284 data32 &= (~0xff0);
5285 /* data32 |= 0x670; // Big-Endian per 64-bit */
5286 data32 |= endian; /* Big-Endian per 64-bit */
5287 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5288 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5289 if (get_double_write_mode(hevc) == 0)
5290 data32 |= 0x2; /*disable double write*/
5291 else if (get_double_write_mode(hevc) & 0x10)
5292 data32 |= 0x1; /*disable cm*/
5293 } else {
5294 unsigned int data;
5295 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5296 (0x0 << 0); /* h265 video format*/
5297 if (hevc->pic_w >= 1280)
5298 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5299 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5300 if (get_double_write_mode(hevc) == 0)
5301 data |= (0x1 << 8); /*enable first write*/
5302 else if (get_double_write_mode(hevc) & 0x10)
5303 data |= (0x1 << 9); /*double write only*/
5304 else
5305 data |= ((0x1 << 8) |(0x1 << 9));
5306
5307 WRITE_VREG(HEVC_DBLK_CFGB, data);
5308 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5309 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5310 }
5311
5312 /* swap uv */
5313 if (hevc->is_used_v4l) {
5314 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5315 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5316 data32 &= ~(1 << 8); /* NV21 */
5317 else
5318 data32 |= (1 << 8); /* NV12 */
5319 }
5320
5321 /*
5322 * [31:24] ar_fifo1_axi_thred
5323 * [23:16] ar_fifo0_axi_thred
5324 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5325 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5326 * [11:08] axi_lendian_C
5327 * [07:04] axi_lendian_Y
5328 * [3] reserved
5329 * [2] clk_forceon
5330 * [1] dw_disable:disable double write output
5331 * [0] cm_disable:disable compress output
5332 */
5333 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5334 if (get_double_write_mode(hevc) & 0x10) {
5335 /* [23:22] dw_v1_ctrl
5336 *[21:20] dw_v0_ctrl
5337 *[19:18] dw_h1_ctrl
5338 *[17:16] dw_h0_ctrl
5339 */
5340 data32 = READ_VREG(HEVC_SAO_CTRL5);
5341 /*set them all 0 for H265_NV21 (no down-scale)*/
5342 data32 &= ~(0xff << 16);
5343 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5344 }
5345
5346 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5347 data32 &= (~0x30);
5348 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5349 data32 |= (hevc->mem_map_mode <<
5350 4);
5351 data32 &= (~0xF);
5352 data32 |= 0xf; /* valid only when double write only */
5353 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5354
5355 /* swap uv */
5356 if (hevc->is_used_v4l) {
5357 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5358 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5359 data32 |= (1 << 12); /* NV21 */
5360 else
5361 data32 &= ~(1 << 12); /* NV12 */
5362 }
5363
5364 /*
5365 * [3:0] little_endian
5366 * [5:4] address_format 00:linear 01:32x32 10:64x32
5367 * [7:6] reserved
5368 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5369 * [11:10] reserved
5370 * [12] CbCr_byte_swap
5371 * [31:13] reserved
5372 */
5373 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5374#endif
5375 data32 = 0;
5376 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5377 data32_2 &= (~0x300);
5378 /* slice_deblocking_filter_disabled_flag = 0;
5379 * ucode has handle it , so read it from ucode directly
5380 */
5381 if (hevc->tile_enabled) {
5382 data32 |=
5383 ((misc_flag0 >>
5384 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5385 0x1) << 0;
5386 data32_2 |=
5387 ((misc_flag0 >>
5388 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5389 0x1) << 8;
5390 }
5391 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5392 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5393 0x1; /* ucode has handle it,so read it from ucode directly */
5394 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5395 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5396 /* slice_deblocking_filter_disabled_flag =
5397 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5398 * //ucode has handle it , so read it from ucode directly
5399 */
5400 data32 |= slice_deblocking_filter_disabled_flag << 2;
5401 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5402 hevc_print_cont(hevc, 0,
5403 "(1,%x)", data32);
5404 if (!slice_deblocking_filter_disabled_flag) {
5405 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5406 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5407 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5408 hevc_print_cont(hevc, 0,
5409 "(2,%x)", data32);
5410 }
5411 } else {
5412 data32 |=
5413 ((misc_flag0 >>
5414 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5415 0x1) << 2;
5416 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5417 hevc_print_cont(hevc, 0,
5418 "(3,%x)", data32);
5419 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5420 0x1) == 0) {
5421 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5422 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5423 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5424 hevc_print_cont(hevc, 0,
5425 "(4,%x)", data32);
5426 }
5427 }
5428 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5429 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5430 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5431 || (!slice_deblocking_filter_disabled_flag))) {
5432 data32 |=
5433 ((misc_flag0 >>
5434 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5435 & 0x1) << 1;
5436 data32_2 |=
5437 ((misc_flag0 >>
5438 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5439 & 0x1) << 9;
5440 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5441 hevc_print_cont(hevc, 0,
5442 "(5,%x)\n", data32);
5443 } else {
5444 data32 |=
5445 ((misc_flag0 >>
5446 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5447 & 0x1) << 1;
5448 data32_2 |=
5449 ((misc_flag0 >>
5450 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5451 & 0x1) << 9;
5452 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5453 hevc_print_cont(hevc, 0,
5454 "(6,%x)\n", data32);
5455 }
5456 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5457 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5458}
5459
5460#ifdef TEST_NO_BUF
5461static unsigned char test_flag = 1;
5462#endif
5463
5464static void pic_list_process(struct hevc_state_s *hevc)
5465{
5466 int work_pic_num = get_work_pic_num(hevc);
5467 int alloc_pic_count = 0;
5468 int i;
5469 struct PIC_s *pic;
5470 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5471 pic = hevc->m_PIC[i];
5472 if (pic == NULL || pic->index == -1)
5473 continue;
5474 alloc_pic_count++;
5475 if (pic->output_mark == 0 && pic->referenced == 0
5476 && pic->output_ready == 0
5477 && (pic->width != hevc->pic_w ||
5478 pic->height != hevc->pic_h)
5479 ) {
5480 set_buf_unused(hevc, pic->BUF_index);
5481 pic->BUF_index = -1;
5482 if (alloc_pic_count > work_pic_num) {
5483 pic->width = 0;
5484 pic->height = 0;
5485 pic->index = -1;
5486 } else {
5487 pic->width = hevc->pic_w;
5488 pic->height = hevc->pic_h;
5489 }
5490 }
5491 }
5492 if (alloc_pic_count < work_pic_num) {
5493 int new_count = alloc_pic_count;
5494 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5495 pic = hevc->m_PIC[i];
5496 if (pic && pic->index == -1) {
5497 pic->index = i;
5498 pic->BUF_index = -1;
5499 pic->width = hevc->pic_w;
5500 pic->height = hevc->pic_h;
5501 new_count++;
5502 if (new_count >=
5503 work_pic_num)
5504 break;
5505 }
5506 }
5507
5508 }
5509 dealloc_unused_buf(hevc);
5510 if (get_alloc_pic_count(hevc)
5511 != alloc_pic_count) {
5512 hevc_print_cont(hevc, 0,
5513 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5514 __func__,
5515 work_pic_num,
5516 alloc_pic_count,
5517 get_alloc_pic_count(hevc));
5518 }
5519}
5520
5521static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5522{
5523 int i;
5524 struct PIC_s *pic;
5525 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5526 pic = hevc->m_PIC[i];
5527 if (pic == NULL || pic->index == -1)
5528 continue;
5529
5530 if (pic->output_mark == 0 && pic->referenced == 0
5531 && pic->output_ready == 0
5532 && pic->scatter_alloc
5533 )
5534 release_pic_mmu_buf(hevc, pic);
5535 }
5536
5537}
5538
5539static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5540 union param_u *rpm_param)
5541{
5542 struct PIC_s *new_pic = NULL;
5543 struct PIC_s *pic;
5544 int i;
5545 int ret;
5546
5547 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5548 pic = hevc->m_PIC[i];
5549 if (pic == NULL || pic->index == -1)
5550 continue;
5551
5552 if (pic->output_mark == 0 && pic->referenced == 0
5553 && pic->output_ready == 0
5554 && pic->width == hevc->pic_w
5555 && pic->height == hevc->pic_h
5556 ) {
5557 if (new_pic) {
5558 if (new_pic->POC != INVALID_POC) {
5559 if (pic->POC == INVALID_POC ||
5560 pic->POC < new_pic->POC)
5561 new_pic = pic;
5562 }
5563 } else
5564 new_pic = pic;
5565 }
5566 }
5567
5568 if (new_pic == NULL)
5569 return NULL;
5570
5571 if (new_pic->BUF_index < 0) {
5572 if (alloc_buf(hevc) < 0)
5573 return NULL;
5574 else {
5575 if (config_pic(hevc, new_pic) < 0) {
5576 dealloc_pic_buf(hevc, new_pic);
5577 return NULL;
5578 }
5579 }
5580 new_pic->width = hevc->pic_w;
5581 new_pic->height = hevc->pic_h;
5582 set_canvas(hevc, new_pic);
5583
5584 init_pic_list_hw(hevc);
5585 }
5586
5587 if (new_pic) {
5588 new_pic->double_write_mode =
5589 get_double_write_mode(hevc);
5590 if (new_pic->double_write_mode)
5591 set_canvas(hevc, new_pic);
5592
5593#ifdef TEST_NO_BUF
5594 if (test_flag) {
5595 test_flag = 0;
5596 return NULL;
5597 } else
5598 test_flag = 1;
5599#endif
5600 if (get_mv_buf(hevc, new_pic) < 0)
5601 return NULL;
5602
5603 if (hevc->mmu_enable) {
5604 ret = H265_alloc_mmu(hevc, new_pic,
5605 rpm_param->p.bit_depth,
5606 hevc->frame_mmu_map_addr);
5607 if (ret != 0) {
5608 put_mv_buf(hevc, new_pic);
5609 hevc_print(hevc, 0,
5610 "can't alloc need mmu1,idx %d ret =%d\n",
5611 new_pic->decode_idx,
5612 ret);
5613 return NULL;
5614 }
5615 }
5616 new_pic->referenced = 1;
5617 new_pic->decode_idx = hevc->decode_idx;
5618 new_pic->slice_idx = 0;
5619 new_pic->referenced = 1;
5620 new_pic->output_mark = 0;
5621 new_pic->recon_mark = 0;
5622 new_pic->error_mark = 0;
5623 new_pic->dis_mark = 0;
5624 /* new_pic->output_ready = 0; */
5625 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5626 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5627 new_pic->POC = hevc->curr_POC;
5628 new_pic->pic_struct = hevc->curr_pic_struct;
5629 if (new_pic->aux_data_buf)
5630 release_aux_data(hevc, new_pic);
5631 new_pic->mem_saving_mode =
5632 hevc->mem_saving_mode;
5633 new_pic->bit_depth_luma =
5634 hevc->bit_depth_luma;
5635 new_pic->bit_depth_chroma =
5636 hevc->bit_depth_chroma;
5637 new_pic->video_signal_type =
5638 hevc->video_signal_type;
5639
5640 new_pic->conformance_window_flag =
5641 hevc->param.p.conformance_window_flag;
5642 new_pic->conf_win_left_offset =
5643 hevc->param.p.conf_win_left_offset;
5644 new_pic->conf_win_right_offset =
5645 hevc->param.p.conf_win_right_offset;
5646 new_pic->conf_win_top_offset =
5647 hevc->param.p.conf_win_top_offset;
5648 new_pic->conf_win_bottom_offset =
5649 hevc->param.p.conf_win_bottom_offset;
5650 new_pic->chroma_format_idc =
5651 hevc->param.p.chroma_format_idc;
5652
5653 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5654 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5655 __func__, new_pic->index,
5656 new_pic->BUF_index, new_pic->decode_idx,
5657 new_pic->POC);
5658
5659 }
5660 if (pic_list_debug & 0x1) {
5661 dump_pic_list(hevc);
5662 pr_err("\n*******************************************\n");
5663 }
5664
5665 return new_pic;
5666}
5667
5668static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5669 union param_u *rpm_param)
5670{
5671 int ret;
5672 int used_buf_num = get_work_pic_num(hevc);
5673 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5674 struct PIC_s *new_pic = NULL;
5675 struct PIC_s *pic = NULL;
5676 int i;
5677
5678 for (i = 0; i < used_buf_num; ++i) {
5679 struct v4l_buff_pool *pool = &v4l->cap_pool;
5680 u32 state = (pool->seq[i] >> 16);
5681 u32 index = (pool->seq[i] & 0xffff);
5682
5683 switch (state) {
5684 case V4L_CAP_BUFF_IN_DEC:
5685 pic = hevc->m_PIC[i];
5686 if (pic && (pic->index != -1) &&
5687 (pic->output_mark == 0) &&
5688 (pic->referenced == 0) &&
5689 (pic->output_ready == 0) &&
5690 (pic->width == hevc->pic_w) &&
5691 (pic->height == hevc->pic_h) &&
5692 pic->cma_alloc_addr) {
5693 new_pic = pic;
5694 }
5695 break;
5696 case V4L_CAP_BUFF_IN_M2M:
5697 pic = hevc->m_PIC[index];
5698 pic->width = hevc->pic_w;
5699 pic->height = hevc->pic_h;
5700 if ((pic->index != -1) &&
5701 !v4l_alloc_buf(hevc, pic)) {
5702 v4l_config_pic(hevc, pic);
5703 init_pic_list_hw(hevc);
5704 new_pic = pic;
5705 }
5706 break;
5707 default:
5708 pr_err("v4l buffer state err %d.\n", state);
5709 break;
5710 }
5711
5712 if (new_pic)
5713 break;
5714 }
5715
5716 if (new_pic == NULL)
5717 return NULL;
5718
5719 new_pic->double_write_mode = get_double_write_mode(hevc);
5720 if (new_pic->double_write_mode)
5721 set_canvas(hevc, new_pic);
5722
5723 if (get_mv_buf(hevc, new_pic) < 0)
5724 return NULL;
5725
5726 if (hevc->mmu_enable) {
5727 ret = H265_alloc_mmu(hevc, new_pic,
5728 rpm_param->p.bit_depth,
5729 hevc->frame_mmu_map_addr);
5730 if (ret != 0) {
5731 put_mv_buf(hevc, new_pic);
5732 hevc_print(hevc, 0,
5733 "can't alloc need mmu1,idx %d ret =%d\n",
5734 new_pic->decode_idx, ret);
5735 return NULL;
5736 }
5737 }
5738
5739 new_pic->referenced = 1;
5740 new_pic->decode_idx = hevc->decode_idx;
5741 new_pic->slice_idx = 0;
5742 new_pic->referenced = 1;
5743 new_pic->output_mark = 0;
5744 new_pic->recon_mark = 0;
5745 new_pic->error_mark = 0;
5746 new_pic->dis_mark = 0;
5747 /* new_pic->output_ready = 0; */
5748 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5749 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5750 new_pic->POC = hevc->curr_POC;
5751 new_pic->pic_struct = hevc->curr_pic_struct;
5752
5753 if (new_pic->aux_data_buf)
5754 release_aux_data(hevc, new_pic);
5755 new_pic->mem_saving_mode =
5756 hevc->mem_saving_mode;
5757 new_pic->bit_depth_luma =
5758 hevc->bit_depth_luma;
5759 new_pic->bit_depth_chroma =
5760 hevc->bit_depth_chroma;
5761 new_pic->video_signal_type =
5762 hevc->video_signal_type;
5763
5764 new_pic->conformance_window_flag =
5765 hevc->param.p.conformance_window_flag;
5766 new_pic->conf_win_left_offset =
5767 hevc->param.p.conf_win_left_offset;
5768 new_pic->conf_win_right_offset =
5769 hevc->param.p.conf_win_right_offset;
5770 new_pic->conf_win_top_offset =
5771 hevc->param.p.conf_win_top_offset;
5772 new_pic->conf_win_bottom_offset =
5773 hevc->param.p.conf_win_bottom_offset;
5774 new_pic->chroma_format_idc =
5775 hevc->param.p.chroma_format_idc;
5776
5777 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5778 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5779 __func__, new_pic->index,
5780 new_pic->BUF_index, new_pic->decode_idx,
5781 new_pic->POC);
5782
5783 return new_pic;
5784}
5785
5786static int get_display_pic_num(struct hevc_state_s *hevc)
5787{
5788 int i;
5789 struct PIC_s *pic;
5790 int num = 0;
5791
5792 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5793 pic = hevc->m_PIC[i];
5794 if (pic == NULL ||
5795 pic->index == -1)
5796 continue;
5797
5798 if (pic->output_ready == 1)
5799 num++;
5800 }
5801 return num;
5802}
5803
5804static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5805{
5806 struct PIC_s *pic_display;
5807
5808 if (pic) {
5809 /*PB skip control */
5810 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5811 /* start decoding after first I */
5812 hevc->ignore_bufmgr_error |= 0x1;
5813 }
5814 if (hevc->ignore_bufmgr_error & 1) {
5815 if (hevc->PB_skip_count_after_decoding > 0)
5816 hevc->PB_skip_count_after_decoding--;
5817 else {
5818 /* start displaying */
5819 hevc->ignore_bufmgr_error |= 0x2;
5820 }
5821 }
5822 /**/
5823 if (pic->POC != INVALID_POC) {
5824 pic->output_mark = 1;
5825 pic->recon_mark = 1;
5826 }
5827 pic->recon_mark = 1;
5828 }
5829 do {
5830 pic_display = output_pic(hevc, 1);
5831
5832 if (pic_display) {
5833 pic_display->referenced = 0;
5834 put_mv_buf(hevc, pic_display);
5835 if ((pic_display->error_mark
5836 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5837 || (get_dbg_flag(hevc) &
5838 H265_DEBUG_DISPLAY_CUR_FRAME)
5839 || (get_dbg_flag(hevc) &
5840 H265_DEBUG_NO_DISPLAY)) {
5841 pic_display->output_ready = 0;
5842 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5843 hevc_print(hevc, 0,
5844 "[BM] Display: POC %d, ",
5845 pic_display->POC);
5846 hevc_print_cont(hevc, 0,
5847 "decoding index %d ==> ",
5848 pic_display->decode_idx);
5849 hevc_print_cont(hevc, 0,
5850 "Debug mode or error, recycle it\n");
5851 }
5852 } else {
5853 if (hevc->i_only & 0x1
5854 && pic_display->slice_type != 2) {
5855 pic_display->output_ready = 0;
5856 } else {
5857 prepare_display_buf(hevc, pic_display);
5858 if (get_dbg_flag(hevc)
5859 & H265_DEBUG_BUFMGR) {
5860 hevc_print(hevc, 0,
5861 "[BM] flush Display: POC %d, ",
5862 pic_display->POC);
5863 hevc_print_cont(hevc, 0,
5864 "decoding index %d\n",
5865 pic_display->decode_idx);
5866 }
5867 }
5868 }
5869 }
5870 } while (pic_display);
5871 clear_referenced_flag(hevc);
5872}
5873
5874/*
5875* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5876*/
5877static void set_aux_data(struct hevc_state_s *hevc,
5878 struct PIC_s *pic, unsigned char suffix_flag,
5879 unsigned char dv_meta_flag)
5880{
5881 int i;
5882 unsigned short *aux_adr;
5883 unsigned int size_reg_val =
5884 READ_VREG(HEVC_AUX_DATA_SIZE);
5885 unsigned int aux_count = 0;
5886 int aux_size = 0;
5887 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5888 return;
5889
5890 if (hevc->aux_data_dirty ||
5891 hevc->m_ins_flag == 0) {
5892
5893 hevc->aux_data_dirty = 0;
5894 }
5895
5896 if (suffix_flag) {
5897 aux_adr = (unsigned short *)
5898 (hevc->aux_addr +
5899 hevc->prefix_aux_size);
5900 aux_count =
5901 ((size_reg_val & 0xffff) << 4)
5902 >> 1;
5903 aux_size =
5904 hevc->suffix_aux_size;
5905 } else {
5906 aux_adr =
5907 (unsigned short *)hevc->aux_addr;
5908 aux_count =
5909 ((size_reg_val >> 16) << 4)
5910 >> 1;
5911 aux_size =
5912 hevc->prefix_aux_size;
5913 }
5914 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5915 hevc_print(hevc, 0,
5916 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5917 __func__, pic, pic->aux_data_size,
5918 aux_count, suffix_flag, dv_meta_flag);
5919 }
5920 if (aux_size > 0 && aux_count > 0) {
5921 int heads_size = 0;
5922 int new_size;
5923 char *new_buf;
5924
5925 for (i = 0; i < aux_count; i++) {
5926 unsigned char tag = aux_adr[i] >> 8;
5927 if (tag != 0 && tag != 0xff) {
5928 if (dv_meta_flag == 0)
5929 heads_size += 8;
5930 else if (dv_meta_flag == 1 && tag == 0x1)
5931 heads_size += 8;
5932 else if (dv_meta_flag == 2 && tag != 0x1)
5933 heads_size += 8;
5934 }
5935 }
5936 new_size = pic->aux_data_size + aux_count + heads_size;
5937 new_buf = vmalloc(new_size);
5938 if (new_buf) {
5939 unsigned char valid_tag = 0;
5940 unsigned char *h =
5941 new_buf +
5942 pic->aux_data_size;
5943 unsigned char *p = h + 8;
5944 int len = 0;
5945 int padding_len = 0;
5946 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5947 if (pic->aux_data_buf)
5948 vfree(pic->aux_data_buf);
5949 pic->aux_data_buf = new_buf;
5950 for (i = 0; i < aux_count; i += 4) {
5951 int ii;
5952 unsigned char tag = aux_adr[i + 3] >> 8;
5953 if (tag != 0 && tag != 0xff) {
5954 if (dv_meta_flag == 0)
5955 valid_tag = 1;
5956 else if (dv_meta_flag == 1
5957 && tag == 0x1)
5958 valid_tag = 1;
5959 else if (dv_meta_flag == 2
5960 && tag != 0x1)
5961 valid_tag = 1;
5962 else
5963 valid_tag = 0;
5964 if (valid_tag && len > 0) {
5965 pic->aux_data_size +=
5966 (len + 8);
5967 h[0] = (len >> 24)
5968 & 0xff;
5969 h[1] = (len >> 16)
5970 & 0xff;
5971 h[2] = (len >> 8)
5972 & 0xff;
5973 h[3] = (len >> 0)
5974 & 0xff;
5975 h[6] =
5976 (padding_len >> 8)
5977 & 0xff;
5978 h[7] = (padding_len)
5979 & 0xff;
5980 h += (len + 8);
5981 p += 8;
5982 len = 0;
5983 padding_len = 0;
5984 }
5985 if (valid_tag) {
5986 h[4] = tag;
5987 h[5] = 0;
5988 h[6] = 0;
5989 h[7] = 0;
5990 }
5991 }
5992 if (valid_tag) {
5993 for (ii = 0; ii < 4; ii++) {
5994 unsigned short aa =
5995 aux_adr[i + 3
5996 - ii];
5997 *p = aa & 0xff;
5998 p++;
5999 len++;
6000 /*if ((aa >> 8) == 0xff)
6001 padding_len++;*/
6002 }
6003 }
6004 }
6005 if (len > 0) {
6006 pic->aux_data_size += (len + 8);
6007 h[0] = (len >> 24) & 0xff;
6008 h[1] = (len >> 16) & 0xff;
6009 h[2] = (len >> 8) & 0xff;
6010 h[3] = (len >> 0) & 0xff;
6011 h[6] = (padding_len >> 8) & 0xff;
6012 h[7] = (padding_len) & 0xff;
6013 }
6014 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6015 hevc_print(hevc, 0,
6016 "aux: (size %d) suffix_flag %d\n",
6017 pic->aux_data_size, suffix_flag);
6018 for (i = 0; i < pic->aux_data_size; i++) {
6019 hevc_print_cont(hevc, 0,
6020 "%02x ", pic->aux_data_buf[i]);
6021 if (((i + 1) & 0xf) == 0)
6022 hevc_print_cont(hevc, 0, "\n");
6023 }
6024 hevc_print_cont(hevc, 0, "\n");
6025 }
6026
6027 } else {
6028 hevc_print(hevc, 0, "new buf alloc failed\n");
6029 if (pic->aux_data_buf)
6030 vfree(pic->aux_data_buf);
6031 pic->aux_data_buf = NULL;
6032 pic->aux_data_size = 0;
6033 }
6034 }
6035
6036}
6037
6038static void release_aux_data(struct hevc_state_s *hevc,
6039 struct PIC_s *pic)
6040{
6041 if (pic->aux_data_buf)
6042 vfree(pic->aux_data_buf);
6043 pic->aux_data_buf = NULL;
6044 pic->aux_data_size = 0;
6045}
6046
6047static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6048 struct PIC_s *pic)
6049{
6050
6051 /* prev pic */
6052 /*if (hevc->curr_POC != 0) {*/
6053 int decoded_poc = hevc->iPrevPOC;
6054#ifdef MULTI_INSTANCE_SUPPORT
6055 if (hevc->m_ins_flag) {
6056 decoded_poc = hevc->decoded_poc;
6057 hevc->decoded_poc = INVALID_POC;
6058 }
6059#endif
6060 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6061 && hevc->m_nalUnitType !=
6062 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6063 struct PIC_s *pic_display;
6064
6065 pic = get_pic_by_POC(hevc, decoded_poc);
6066 if (pic && (pic->POC != INVALID_POC)) {
6067 /*PB skip control */
6068 if (pic->error_mark == 0
6069 && hevc->PB_skip_mode == 1) {
6070 /* start decoding after
6071 * first I
6072 */
6073 hevc->ignore_bufmgr_error |= 0x1;
6074 }
6075 if (hevc->ignore_bufmgr_error & 1) {
6076 if (hevc->PB_skip_count_after_decoding > 0) {
6077 hevc->PB_skip_count_after_decoding--;
6078 } else {
6079 /* start displaying */
6080 hevc->ignore_bufmgr_error |= 0x2;
6081 }
6082 }
6083 if (hevc->mmu_enable
6084 && ((hevc->double_write_mode & 0x10) == 0)) {
6085 if (!hevc->m_ins_flag) {
6086 hevc->used_4k_num =
6087 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6088
6089 if ((!is_skip_decoding(hevc, pic)) &&
6090 (hevc->used_4k_num >= 0) &&
6091 (hevc->cur_pic->scatter_alloc
6092 == 1)) {
6093 hevc_print(hevc,
6094 H265_DEBUG_BUFMGR_MORE,
6095 "%s pic index %d scatter_alloc %d page_start %d\n",
6096 "decoder_mmu_box_free_idx_tail",
6097 hevc->cur_pic->index,
6098 hevc->cur_pic->scatter_alloc,
6099 hevc->used_4k_num);
6100 hevc_mmu_dma_check(hw_to_vdec(hevc));
6101 decoder_mmu_box_free_idx_tail(
6102 hevc->mmu_box,
6103 hevc->cur_pic->index,
6104 hevc->used_4k_num);
6105 hevc->cur_pic->scatter_alloc
6106 = 2;
6107 }
6108 hevc->used_4k_num = -1;
6109 }
6110 }
6111
6112 pic->output_mark = 1;
6113 pic->recon_mark = 1;
6114 pic->dis_mark = 1;
6115 }
6116 do {
6117 pic_display = output_pic(hevc, 0);
6118
6119 if (pic_display) {
6120 if ((pic_display->error_mark &&
6121 ((hevc->ignore_bufmgr_error &
6122 0x2) == 0))
6123 || (get_dbg_flag(hevc) &
6124 H265_DEBUG_DISPLAY_CUR_FRAME)
6125 || (get_dbg_flag(hevc) &
6126 H265_DEBUG_NO_DISPLAY)) {
6127 pic_display->output_ready = 0;
6128 if (get_dbg_flag(hevc) &
6129 H265_DEBUG_BUFMGR) {
6130 hevc_print(hevc, 0,
6131 "[BM] Display: POC %d, ",
6132 pic_display->POC);
6133 hevc_print_cont(hevc, 0,
6134 "decoding index %d ==> ",
6135 pic_display->
6136 decode_idx);
6137 hevc_print_cont(hevc, 0,
6138 "Debug or err,recycle it\n");
6139 }
6140 } else {
6141 if (hevc->i_only & 0x1
6142 && pic_display->
6143 slice_type != 2) {
6144 pic_display->output_ready = 0;
6145 } else {
6146 prepare_display_buf
6147 (hevc,
6148 pic_display);
6149 if (get_dbg_flag(hevc) &
6150 H265_DEBUG_BUFMGR) {
6151 hevc_print(hevc, 0,
6152 "[BM] Display: POC %d, ",
6153 pic_display->POC);
6154 hevc_print_cont(hevc, 0,
6155 "decoding index %d\n",
6156 pic_display->
6157 decode_idx);
6158 }
6159 }
6160 }
6161 }
6162 } while (pic_display);
6163 } else {
6164 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6165 hevc_print(hevc, 0,
6166 "[BM] current pic is IDR, ");
6167 hevc_print(hevc, 0,
6168 "clear referenced flag of all buffers\n");
6169 }
6170 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6171 dump_pic_list(hevc);
6172 pic = get_pic_by_POC(hevc, decoded_poc);
6173 flush_output(hevc, pic);
6174 }
6175
6176}
6177
6178static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6179 int decoded_lcu)
6180{
6181 int current_lcu_idx = decoded_lcu;
6182 if (decoded_lcu < 0)
6183 return;
6184
6185 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6186 hevc_print(hevc, 0,
6187 "cur lcu idx = %d, (total %d)\n",
6188 current_lcu_idx, hevc->lcu_total);
6189 }
6190 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6191 if (hevc->first_pic_after_recover) {
6192 if (current_lcu_idx !=
6193 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6194 hevc->cur_pic->error_mark = 1;
6195 } else {
6196 if (hevc->lcu_x_num_pre != 0
6197 && hevc->lcu_y_num_pre != 0
6198 && current_lcu_idx != 0
6199 && current_lcu_idx <
6200 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6201 hevc->cur_pic->error_mark = 1;
6202 }
6203 if (hevc->cur_pic->error_mark) {
6204 hevc_print(hevc, 0,
6205 "cur lcu idx = %d, (total %d), set error_mark\n",
6206 current_lcu_idx,
6207 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6208 if (is_log_enable(hevc))
6209 add_log(hevc,
6210 "cur lcu idx = %d, (total %d), set error_mark",
6211 current_lcu_idx,
6212 hevc->lcu_x_num_pre *
6213 hevc->lcu_y_num_pre);
6214
6215 }
6216
6217 }
6218 if (hevc->cur_pic && hevc->head_error_flag) {
6219 hevc->cur_pic->error_mark = 1;
6220 hevc_print(hevc, 0,
6221 "head has error, set error_mark\n");
6222 }
6223
6224 if ((error_handle_policy & 0x80) == 0) {
6225 if (hevc->over_decode && hevc->cur_pic) {
6226 hevc_print(hevc, 0,
6227 "over decode, set error_mark\n");
6228 hevc->cur_pic->error_mark = 1;
6229 }
6230 }
6231
6232 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6233 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6234}
6235
6236static void check_pic_decoded_error(struct hevc_state_s *hevc,
6237 int decoded_lcu)
6238{
6239 int current_lcu_idx = decoded_lcu;
6240 if (decoded_lcu < 0)
6241 return;
6242
6243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6244 hevc_print(hevc, 0,
6245 "cur lcu idx = %d, (total %d)\n",
6246 current_lcu_idx, hevc->lcu_total);
6247 }
6248 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6249 if (hevc->lcu_x_num != 0
6250 && hevc->lcu_y_num != 0
6251 && current_lcu_idx != 0
6252 && current_lcu_idx <
6253 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6254 hevc->cur_pic->error_mark = 1;
6255 if (hevc->cur_pic->error_mark) {
6256 hevc_print(hevc, 0,
6257 "cur lcu idx = %d, (total %d), set error_mark\n",
6258 current_lcu_idx,
6259 hevc->lcu_x_num*hevc->lcu_y_num);
6260 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6261 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6262 hevc_print(hevc, 0,
6263 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6264 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6265 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6266 }
6267 if (is_log_enable(hevc))
6268 add_log(hevc,
6269 "cur lcu idx = %d, (total %d), set error_mark",
6270 current_lcu_idx,
6271 hevc->lcu_x_num *
6272 hevc->lcu_y_num);
6273
6274 }
6275
6276 }
6277 if (hevc->cur_pic && hevc->head_error_flag) {
6278 hevc->cur_pic->error_mark = 1;
6279 hevc_print(hevc, 0,
6280 "head has error, set error_mark\n");
6281 }
6282
6283 if ((error_handle_policy & 0x80) == 0) {
6284 if (hevc->over_decode && hevc->cur_pic) {
6285 hevc_print(hevc, 0,
6286 "over decode, set error_mark\n");
6287 hevc->cur_pic->error_mark = 1;
6288 }
6289 }
6290}
6291
6292/* only when we decoded one field or one frame,
6293we can call this function to get qos info*/
6294static void get_picture_qos_info(struct hevc_state_s *hevc)
6295{
6296 struct PIC_s *picture = hevc->cur_pic;
6297
6298/*
6299#define DEBUG_QOS
6300*/
6301
6302 if (!hevc->cur_pic)
6303 return;
6304
6305 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6306 unsigned char a[3];
6307 unsigned char i, j, t;
6308 unsigned long data;
6309
6310 data = READ_VREG(HEVC_MV_INFO);
6311 if (picture->slice_type == I_SLICE)
6312 data = 0;
6313 a[0] = data & 0xff;
6314 a[1] = (data >> 8) & 0xff;
6315 a[2] = (data >> 16) & 0xff;
6316
6317 for (i = 0; i < 3; i++)
6318 for (j = i+1; j < 3; j++) {
6319 if (a[j] < a[i]) {
6320 t = a[j];
6321 a[j] = a[i];
6322 a[i] = t;
6323 } else if (a[j] == a[i]) {
6324 a[i]++;
6325 t = a[j];
6326 a[j] = a[i];
6327 a[i] = t;
6328 }
6329 }
6330 picture->max_mv = a[2];
6331 picture->avg_mv = a[1];
6332 picture->min_mv = a[0];
6333#ifdef DEBUG_QOS
6334 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6335 data, a[0], a[1], a[2]);
6336#endif
6337
6338 data = READ_VREG(HEVC_QP_INFO);
6339 a[0] = data & 0x1f;
6340 a[1] = (data >> 8) & 0x3f;
6341 a[2] = (data >> 16) & 0x7f;
6342
6343 for (i = 0; i < 3; i++)
6344 for (j = i+1; j < 3; j++) {
6345 if (a[j] < a[i]) {
6346 t = a[j];
6347 a[j] = a[i];
6348 a[i] = t;
6349 } else if (a[j] == a[i]) {
6350 a[i]++;
6351 t = a[j];
6352 a[j] = a[i];
6353 a[i] = t;
6354 }
6355 }
6356 picture->max_qp = a[2];
6357 picture->avg_qp = a[1];
6358 picture->min_qp = a[0];
6359#ifdef DEBUG_QOS
6360 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6361 data, a[0], a[1], a[2]);
6362#endif
6363
6364 data = READ_VREG(HEVC_SKIP_INFO);
6365 a[0] = data & 0x1f;
6366 a[1] = (data >> 8) & 0x3f;
6367 a[2] = (data >> 16) & 0x7f;
6368
6369 for (i = 0; i < 3; i++)
6370 for (j = i+1; j < 3; j++) {
6371 if (a[j] < a[i]) {
6372 t = a[j];
6373 a[j] = a[i];
6374 a[i] = t;
6375 } else if (a[j] == a[i]) {
6376 a[i]++;
6377 t = a[j];
6378 a[j] = a[i];
6379 a[i] = t;
6380 }
6381 }
6382 picture->max_skip = a[2];
6383 picture->avg_skip = a[1];
6384 picture->min_skip = a[0];
6385
6386#ifdef DEBUG_QOS
6387 hevc_print(hevc, 0,
6388 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6389 data, a[0], a[1], a[2]);
6390#endif
6391 } else {
6392 uint32_t blk88_y_count;
6393 uint32_t blk88_c_count;
6394 uint32_t blk22_mv_count;
6395 uint32_t rdata32;
6396 int32_t mv_hi;
6397 int32_t mv_lo;
6398 uint32_t rdata32_l;
6399 uint32_t mvx_L0_hi;
6400 uint32_t mvy_L0_hi;
6401 uint32_t mvx_L1_hi;
6402 uint32_t mvy_L1_hi;
6403 int64_t value;
6404 uint64_t temp_value;
6405#ifdef DEBUG_QOS
6406 int pic_number = picture->POC;
6407#endif
6408
6409 picture->max_mv = 0;
6410 picture->avg_mv = 0;
6411 picture->min_mv = 0;
6412
6413 picture->max_skip = 0;
6414 picture->avg_skip = 0;
6415 picture->min_skip = 0;
6416
6417 picture->max_qp = 0;
6418 picture->avg_qp = 0;
6419 picture->min_qp = 0;
6420
6421
6422
6423#ifdef DEBUG_QOS
6424 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6425 picture->slice_type,
6426 picture->POC);
6427#endif
6428 /* set rd_idx to 0 */
6429 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6430
6431 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6432 if (blk88_y_count == 0) {
6433#ifdef DEBUG_QOS
6434 hevc_print(hevc, 0,
6435 "[Picture %d Quality] NO Data yet.\n",
6436 pic_number);
6437#endif
6438 /* reset all counts */
6439 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6440 return;
6441 }
6442 /* qp_y_sum */
6443 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6444#ifdef DEBUG_QOS
6445 hevc_print(hevc, 0,
6446 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6447 pic_number, rdata32/blk88_y_count,
6448 rdata32, blk88_y_count);
6449#endif
6450 picture->avg_qp = rdata32/blk88_y_count;
6451 /* intra_y_count */
6452 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6453#ifdef DEBUG_QOS
6454 hevc_print(hevc, 0,
6455 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6456 pic_number, rdata32*100/blk88_y_count,
6457 '%', rdata32);
6458#endif
6459 /* skipped_y_count */
6460 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6461#ifdef DEBUG_QOS
6462 hevc_print(hevc, 0,
6463 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6464 pic_number, rdata32*100/blk88_y_count,
6465 '%', rdata32);
6466#endif
6467 picture->avg_skip = rdata32*100/blk88_y_count;
6468 /* coeff_non_zero_y_count */
6469 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6470#ifdef DEBUG_QOS
6471 hevc_print(hevc, 0,
6472 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6473 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6474 '%', rdata32);
6475#endif
6476 /* blk66_c_count */
6477 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6478 if (blk88_c_count == 0) {
6479#ifdef DEBUG_QOS
6480 hevc_print(hevc, 0,
6481 "[Picture %d Quality] NO Data yet.\n",
6482 pic_number);
6483#endif
6484 /* reset all counts */
6485 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6486 return;
6487 }
6488 /* qp_c_sum */
6489 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6490#ifdef DEBUG_QOS
6491 hevc_print(hevc, 0,
6492 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6493 pic_number, rdata32/blk88_c_count,
6494 rdata32, blk88_c_count);
6495#endif
6496 /* intra_c_count */
6497 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6498#ifdef DEBUG_QOS
6499 hevc_print(hevc, 0,
6500 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6501 pic_number, rdata32*100/blk88_c_count,
6502 '%', rdata32);
6503#endif
6504 /* skipped_cu_c_count */
6505 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6506#ifdef DEBUG_QOS
6507 hevc_print(hevc, 0,
6508 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6509 pic_number, rdata32*100/blk88_c_count,
6510 '%', rdata32);
6511#endif
6512 /* coeff_non_zero_c_count */
6513 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6514#ifdef DEBUG_QOS
6515 hevc_print(hevc, 0,
6516 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6517 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6518 '%', rdata32);
6519#endif
6520
6521 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6522 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6523 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6524#ifdef DEBUG_QOS
6525 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6526 pic_number, (rdata32>>0)&0xff);
6527#endif
6528 picture->min_qp = (rdata32>>0)&0xff;
6529
6530#ifdef DEBUG_QOS
6531 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6532 pic_number, (rdata32>>8)&0xff);
6533#endif
6534 picture->max_qp = (rdata32>>8)&0xff;
6535
6536#ifdef DEBUG_QOS
6537 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6538 pic_number, (rdata32>>16)&0xff);
6539 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6540 pic_number, (rdata32>>24)&0xff);
6541#endif
6542
6543 /* blk22_mv_count */
6544 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6545 if (blk22_mv_count == 0) {
6546#ifdef DEBUG_QOS
6547 hevc_print(hevc, 0,
6548 "[Picture %d Quality] NO MV Data yet.\n",
6549 pic_number);
6550#endif
6551 /* reset all counts */
6552 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6553 return;
6554 }
6555 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6556 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6557 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6558 /* should all be 0x00 or 0xff */
6559#ifdef DEBUG_QOS
6560 hevc_print(hevc, 0,
6561 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6562 pic_number, rdata32);
6563#endif
6564 mvx_L0_hi = ((rdata32>>0)&0xff);
6565 mvy_L0_hi = ((rdata32>>8)&0xff);
6566 mvx_L1_hi = ((rdata32>>16)&0xff);
6567 mvy_L1_hi = ((rdata32>>24)&0xff);
6568
6569 /* mvx_L0_count[31:0] */
6570 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6571 temp_value = mvx_L0_hi;
6572 temp_value = (temp_value << 32) | rdata32_l;
6573
6574 if (mvx_L0_hi & 0x80)
6575 value = 0xFFFFFFF000000000 | temp_value;
6576 else
6577 value = temp_value;
6578 value = div_s64(value, blk22_mv_count);
6579#ifdef DEBUG_QOS
6580 hevc_print(hevc, 0,
6581 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6582 pic_number, (int)value,
6583 value, blk22_mv_count);
6584#endif
6585 picture->avg_mv = value;
6586
6587 /* mvy_L0_count[31:0] */
6588 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6589 temp_value = mvy_L0_hi;
6590 temp_value = (temp_value << 32) | rdata32_l;
6591
6592 if (mvy_L0_hi & 0x80)
6593 value = 0xFFFFFFF000000000 | temp_value;
6594 else
6595 value = temp_value;
6596#ifdef DEBUG_QOS
6597 hevc_print(hevc, 0,
6598 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6599 pic_number, rdata32_l/blk22_mv_count,
6600 value, blk22_mv_count);
6601#endif
6602
6603 /* mvx_L1_count[31:0] */
6604 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6605 temp_value = mvx_L1_hi;
6606 temp_value = (temp_value << 32) | rdata32_l;
6607 if (mvx_L1_hi & 0x80)
6608 value = 0xFFFFFFF000000000 | temp_value;
6609 else
6610 value = temp_value;
6611#ifdef DEBUG_QOS
6612 hevc_print(hevc, 0,
6613 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6614 pic_number, rdata32_l/blk22_mv_count,
6615 value, blk22_mv_count);
6616#endif
6617
6618 /* mvy_L1_count[31:0] */
6619 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6620 temp_value = mvy_L1_hi;
6621 temp_value = (temp_value << 32) | rdata32_l;
6622 if (mvy_L1_hi & 0x80)
6623 value = 0xFFFFFFF000000000 | temp_value;
6624 else
6625 value = temp_value;
6626#ifdef DEBUG_QOS
6627 hevc_print(hevc, 0,
6628 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6629 pic_number, rdata32_l/blk22_mv_count,
6630 value, blk22_mv_count);
6631#endif
6632
6633 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6634 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6635 mv_hi = (rdata32>>16)&0xffff;
6636 if (mv_hi & 0x8000)
6637 mv_hi = 0x8000 - mv_hi;
6638#ifdef DEBUG_QOS
6639 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6640 pic_number, mv_hi);
6641#endif
6642 picture->max_mv = mv_hi;
6643
6644 mv_lo = (rdata32>>0)&0xffff;
6645 if (mv_lo & 0x8000)
6646 mv_lo = 0x8000 - mv_lo;
6647#ifdef DEBUG_QOS
6648 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6649 pic_number, mv_lo);
6650#endif
6651 picture->min_mv = mv_lo;
6652
6653 /* {mvy_L0_max, mvy_L0_min} */
6654 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6655 mv_hi = (rdata32>>16)&0xffff;
6656 if (mv_hi & 0x8000)
6657 mv_hi = 0x8000 - mv_hi;
6658#ifdef DEBUG_QOS
6659 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6660 pic_number, mv_hi);
6661#endif
6662
6663 mv_lo = (rdata32>>0)&0xffff;
6664 if (mv_lo & 0x8000)
6665 mv_lo = 0x8000 - mv_lo;
6666#ifdef DEBUG_QOS
6667 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6668 pic_number, mv_lo);
6669#endif
6670
6671 /* {mvx_L1_max, mvx_L1_min} */
6672 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6673 mv_hi = (rdata32>>16)&0xffff;
6674 if (mv_hi & 0x8000)
6675 mv_hi = 0x8000 - mv_hi;
6676#ifdef DEBUG_QOS
6677 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6678 pic_number, mv_hi);
6679#endif
6680
6681 mv_lo = (rdata32>>0)&0xffff;
6682 if (mv_lo & 0x8000)
6683 mv_lo = 0x8000 - mv_lo;
6684#ifdef DEBUG_QOS
6685 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6686 pic_number, mv_lo);
6687#endif
6688
6689 /* {mvy_L1_max, mvy_L1_min} */
6690 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6691 mv_hi = (rdata32>>16)&0xffff;
6692 if (mv_hi & 0x8000)
6693 mv_hi = 0x8000 - mv_hi;
6694#ifdef DEBUG_QOS
6695 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6696 pic_number, mv_hi);
6697#endif
6698 mv_lo = (rdata32>>0)&0xffff;
6699 if (mv_lo & 0x8000)
6700 mv_lo = 0x8000 - mv_lo;
6701#ifdef DEBUG_QOS
6702 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6703 pic_number, mv_lo);
6704#endif
6705
6706 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6707#ifdef DEBUG_QOS
6708 hevc_print(hevc, 0,
6709 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6710 pic_number, rdata32);
6711#endif
6712 /* reset all counts */
6713 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6714 }
6715}
6716
6717static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6718 union param_u *rpm_param,
6719 int decode_pic_begin)
6720{
6721#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6722 struct vdec_s *vdec = hw_to_vdec(hevc);
6723#endif
6724 int i;
6725 int lcu_x_num_div;
6726 int lcu_y_num_div;
6727 int Col_ref;
6728 int dbg_skip_flag = 0;
6729
6730 if (hevc->wait_buf == 0) {
6731 hevc->sps_num_reorder_pics_0 =
6732 rpm_param->p.sps_num_reorder_pics_0;
6733 hevc->m_temporalId = rpm_param->p.m_temporalId;
6734 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6735 hevc->interlace_flag =
6736 (rpm_param->p.profile_etc >> 2) & 0x1;
6737 hevc->curr_pic_struct =
6738 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6739 if (parser_sei_enable & 0x4) {
6740 hevc->frame_field_info_present_flag =
6741 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6742 }
6743
6744 if (interlace_enable == 0 || hevc->m_ins_flag)
6745 hevc->interlace_flag = 0;
6746 if (interlace_enable & 0x100)
6747 hevc->interlace_flag = interlace_enable & 0x1;
6748 if (hevc->interlace_flag == 0)
6749 hevc->curr_pic_struct = 0;
6750 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6751 /*
6752 *hevc->m_pocRandomAccess = MAX_INT;
6753 * //add to fix RAP_B_Bossen_1
6754 */
6755 /* } */
6756 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6757 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6758 hevc->slice_segment_addr =
6759 rpm_param->p.slice_segment_address;
6760 if (!rpm_param->p.dependent_slice_segment_flag)
6761 hevc->slice_addr = hevc->slice_segment_addr;
6762 } else {
6763 hevc->slice_segment_addr = 0;
6764 hevc->slice_addr = 0;
6765 }
6766
6767 hevc->iPrevPOC = hevc->curr_POC;
6768 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6769 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6770 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6771 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6772 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6773 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6774 hevc->isNextSliceSegment =
6775 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6776 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6777 || hevc->pic_h !=
6778 rpm_param->p.pic_height_in_luma_samples) {
6779 hevc_print(hevc, 0,
6780 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6781 hevc->pic_w, hevc->pic_h,
6782 rpm_param->p.pic_width_in_luma_samples,
6783 rpm_param->p.pic_height_in_luma_samples,
6784 hevc->interlace_flag);
6785
6786 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6787 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6788 hevc->frame_width = hevc->pic_w;
6789 hevc->frame_height = hevc->pic_h;
6790#ifdef LOSLESS_COMPRESS_MODE
6791 if (/*re_config_pic_flag == 0 &&*/
6792 (get_double_write_mode(hevc) & 0x10) == 0)
6793 init_decode_head_hw(hevc);
6794#endif
6795 }
6796
6797 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6798 hevc_print(hevc, 0, "over size : %u x %u.\n",
6799 hevc->pic_w, hevc->pic_h);
6800 if ((!hevc->m_ins_flag) &&
6801 ((debug &
6802 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6803 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6804 H265_DEBUG_DIS_SYS_ERROR_PROC);
6805 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6806 return 3;
6807 }
6808 if (hevc->bit_depth_chroma > 10 ||
6809 hevc->bit_depth_luma > 10) {
6810 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6811 hevc->bit_depth_chroma,
6812 hevc->bit_depth_luma);
6813 if (!hevc->m_ins_flag)
6814 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6815 H265_DEBUG_DIS_SYS_ERROR_PROC);
6816 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6817 return 4;
6818 }
6819
6820 /* it will cause divide 0 error */
6821 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6822 if (get_dbg_flag(hevc)) {
6823 hevc_print(hevc, 0,
6824 "Fatal Error, pic_w = %d, pic_h = %d\n",
6825 hevc->pic_w, hevc->pic_h);
6826 }
6827 return 3;
6828 }
6829 pic_list_process(hevc);
6830
6831 hevc->lcu_size =
6832 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6833 3 + rpm_param->
6834 p.log2_diff_max_min_coding_block_size);
6835 if (hevc->lcu_size == 0) {
6836 hevc_print(hevc, 0,
6837 "Error, lcu_size = 0 (%d,%d)\n",
6838 rpm_param->p.
6839 log2_min_coding_block_size_minus3,
6840 rpm_param->p.
6841 log2_diff_max_min_coding_block_size);
6842 return 3;
6843 }
6844 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6845 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6846 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6847 hevc->lcu_x_num =
6848 ((hevc->pic_w % hevc->lcu_size) ==
6849 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6850 hevc->lcu_y_num =
6851 ((hevc->pic_h % hevc->lcu_size) ==
6852 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6853 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6854
6855 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6856 || hevc->m_nalUnitType ==
6857 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6858 hevc->curr_POC = 0;
6859 if ((hevc->m_temporalId - 1) == 0)
6860 hevc->iPrevTid0POC = hevc->curr_POC;
6861 } else {
6862 int iMaxPOClsb =
6863 1 << (rpm_param->p.
6864 log2_max_pic_order_cnt_lsb_minus4 + 4);
6865 int iPrevPOClsb;
6866 int iPrevPOCmsb;
6867 int iPOCmsb;
6868 int iPOClsb = rpm_param->p.POClsb;
6869
6870 if (iMaxPOClsb == 0) {
6871 hevc_print(hevc, 0,
6872 "error iMaxPOClsb is 0\n");
6873 return 3;
6874 }
6875
6876 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6877 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6878
6879 if ((iPOClsb < iPrevPOClsb)
6880 && ((iPrevPOClsb - iPOClsb) >=
6881 (iMaxPOClsb / 2)))
6882 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6883 else if ((iPOClsb > iPrevPOClsb)
6884 && ((iPOClsb - iPrevPOClsb) >
6885 (iMaxPOClsb / 2)))
6886 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6887 else
6888 iPOCmsb = iPrevPOCmsb;
6889 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6890 hevc_print(hevc, 0,
6891 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6892 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6893 iPOClsb);
6894 }
6895 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6896 || hevc->m_nalUnitType ==
6897 NAL_UNIT_CODED_SLICE_BLANT
6898 || hevc->m_nalUnitType ==
6899 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6900 /* For BLA picture types, POCmsb is set to 0. */
6901 iPOCmsb = 0;
6902 }
6903 hevc->curr_POC = (iPOCmsb + iPOClsb);
6904 if ((hevc->m_temporalId - 1) == 0)
6905 hevc->iPrevTid0POC = hevc->curr_POC;
6906 else {
6907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6908 hevc_print(hevc, 0,
6909 "m_temporalID is %d\n",
6910 hevc->m_temporalId);
6911 }
6912 }
6913 }
6914 hevc->RefNum_L0 =
6915 (rpm_param->p.num_ref_idx_l0_active >
6916 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6917 num_ref_idx_l0_active;
6918 hevc->RefNum_L1 =
6919 (rpm_param->p.num_ref_idx_l1_active >
6920 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6921 num_ref_idx_l1_active;
6922
6923 /* if(curr_POC==0x10) dump_lmem(); */
6924
6925 /* skip RASL pictures after CRA/BLA pictures */
6926 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6927 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6928 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6929 || hevc->m_nalUnitType ==
6930 NAL_UNIT_CODED_SLICE_BLANT
6931 || hevc->m_nalUnitType ==
6932 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6933 hevc->m_pocRandomAccess = hevc->curr_POC;
6934 else
6935 hevc->m_pocRandomAccess = -MAX_INT;
6936 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6937 || hevc->m_nalUnitType ==
6938 NAL_UNIT_CODED_SLICE_BLANT
6939 || hevc->m_nalUnitType ==
6940 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6941 hevc->m_pocRandomAccess = hevc->curr_POC;
6942 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6943 (nal_skip_policy >= 3) &&
6944 (hevc->m_nalUnitType ==
6945 NAL_UNIT_CODED_SLICE_RASL_N ||
6946 hevc->m_nalUnitType ==
6947 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6948 if (get_dbg_flag(hevc)) {
6949 hevc_print(hevc, 0,
6950 "RASL picture with POC %d < %d ",
6951 hevc->curr_POC, hevc->m_pocRandomAccess);
6952 hevc_print(hevc, 0,
6953 "RandomAccess point POC), skip it\n");
6954 }
6955 return 1;
6956 }
6957
6958 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6959 hevc->skip_flag = 0;
6960 /**/
6961 /* if((iPrevPOC != curr_POC)){ */
6962 if (rpm_param->p.slice_segment_address == 0) {
6963 struct PIC_s *pic;
6964
6965 hevc->new_pic = 1;
6966#ifdef MULTI_INSTANCE_SUPPORT
6967 if (!hevc->m_ins_flag)
6968#endif
6969 check_pic_decoded_error_pre(hevc,
6970 READ_VREG(HEVC_PARSER_LCU_START)
6971 & 0xffffff);
6972 /**/ if (use_cma == 0) {
6973 if (hevc->pic_list_init_flag == 0) {
6974 init_pic_list(hevc);
6975 init_pic_list_hw(hevc);
6976 init_buf_spec(hevc);
6977 hevc->pic_list_init_flag = 3;
6978 }
6979 }
6980 if (!hevc->m_ins_flag) {
6981 if (hevc->cur_pic)
6982 get_picture_qos_info(hevc);
6983 }
6984 hevc->first_pic_after_recover = 0;
6985 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6986 dump_pic_list(hevc);
6987 /* prev pic */
6988 hevc_pre_pic(hevc, pic);
6989 /*
6990 *update referenced of old pictures
6991 *(cur_pic->referenced is 1 and not updated)
6992 */
6993 apply_ref_pic_set(hevc, hevc->curr_POC,
6994 rpm_param);
6995
6996 if (hevc->mmu_enable)
6997 recycle_mmu_bufs(hevc);
6998
6999#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7000 if (vdec->master) {
7001 struct hevc_state_s *hevc_ba =
7002 (struct hevc_state_s *)
7003 vdec->master->private;
7004 if (hevc_ba->cur_pic != NULL) {
7005 hevc_ba->cur_pic->dv_enhance_exist = 1;
7006 hevc_print(hevc, H265_DEBUG_DV,
7007 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7008 hevc->curr_POC, hevc_ba->cur_pic->POC);
7009 }
7010 }
7011 if (vdec->master == NULL &&
7012 vdec->slave == NULL)
7013 set_aux_data(hevc,
7014 hevc->cur_pic, 1, 0); /*suffix*/
7015 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7016 set_aux_data(hevc,
7017 hevc->cur_pic, 0, 1); /*dv meta only*/
7018#else
7019 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7020#endif
7021 /* new pic */
7022 hevc->cur_pic = hevc->is_used_v4l ?
7023 v4l_get_new_pic(hevc, rpm_param) :
7024 get_new_pic(hevc, rpm_param);
7025 if (hevc->cur_pic == NULL) {
7026 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7027 dump_pic_list(hevc);
7028 hevc->wait_buf = 1;
7029 return -1;
7030 }
7031#ifdef MULTI_INSTANCE_SUPPORT
7032 hevc->decoding_pic = hevc->cur_pic;
7033 if (!hevc->m_ins_flag)
7034 hevc->over_decode = 0;
7035#endif
7036#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7037 hevc->cur_pic->dv_enhance_exist = 0;
7038 if (vdec->slave)
7039 hevc_print(hevc, H265_DEBUG_DV,
7040 "Clear bl (poc %d) dv_enhance_exist flag\n",
7041 hevc->curr_POC);
7042 if (vdec->master == NULL &&
7043 vdec->slave == NULL)
7044 set_aux_data(hevc,
7045 hevc->cur_pic, 0, 0); /*prefix*/
7046
7047 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7048 set_aux_data(hevc,
7049 hevc->cur_pic, 0, 2); /*pre sei only*/
7050#else
7051 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7052#endif
7053 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7054 hevc->cur_pic->output_ready = 1;
7055 hevc->cur_pic->stream_offset =
7056 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7057 prepare_display_buf(hevc, hevc->cur_pic);
7058 hevc->wait_buf = 2;
7059 return -1;
7060 }
7061 } else {
7062 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7063#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7064 if (vdec->master == NULL &&
7065 vdec->slave == NULL) {
7066 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7067 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7068 }
7069#else
7070 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7071 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7072#endif
7073 }
7074 if (hevc->pic_list_init_flag != 3
7075 || hevc->cur_pic == NULL) {
7076 /* make it dec from the first slice segment */
7077 return 3;
7078 }
7079 hevc->cur_pic->slice_idx++;
7080 hevc->new_pic = 0;
7081 }
7082 } else {
7083 if (hevc->wait_buf == 1) {
7084 pic_list_process(hevc);
7085 hevc->cur_pic = hevc->is_used_v4l ?
7086 v4l_get_new_pic(hevc, rpm_param) :
7087 get_new_pic(hevc, rpm_param);
7088 if (hevc->cur_pic == NULL)
7089 return -1;
7090
7091 if (!hevc->m_ins_flag)
7092 hevc->over_decode = 0;
7093
7094#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7095 hevc->cur_pic->dv_enhance_exist = 0;
7096 if (vdec->master == NULL &&
7097 vdec->slave == NULL)
7098 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7099#else
7100 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7101#endif
7102 hevc->wait_buf = 0;
7103 } else if (hevc->wait_buf ==
7104 2) {
7105 if (get_display_pic_num(hevc) >
7106 1)
7107 return -1;
7108 hevc->wait_buf = 0;
7109 }
7110 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7111 dump_pic_list(hevc);
7112 }
7113
7114 if (hevc->new_pic) {
7115#if 1
7116 /*SUPPORT_10BIT*/
7117 int sao_mem_unit =
7118 (hevc->lcu_size == 16 ? 9 :
7119 hevc->lcu_size ==
7120 32 ? 14 : 24) << 4;
7121#else
7122 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7123#endif
7124 int pic_height_cu =
7125 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7126 int pic_width_cu =
7127 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7128 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7129
7130 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7131 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7132 hevc_print(hevc, 0,
7133 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7134 __func__,
7135 hevc->decode_idx,
7136 hevc->curr_pic_struct,
7137 hevc->interlace_flag,
7138 hevc->cur_pic->index);
7139 }
7140 if (dbg_skip_decode_index != 0 &&
7141 hevc->decode_idx == dbg_skip_decode_index)
7142 dbg_skip_flag = 1;
7143
7144 hevc->decode_idx++;
7145 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7146 sao_mem_unit, rpm_param);
7147
7148 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7149 }
7150
7151 if (hevc->iPrevPOC != hevc->curr_POC) {
7152 hevc->new_tile = 1;
7153 hevc->tile_x = 0;
7154 hevc->tile_y = 0;
7155 hevc->tile_y_x = 0;
7156 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7157 hevc_print(hevc, 0,
7158 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7159 hevc->tile_x, hevc->tile_y);
7160 }
7161 } else if (hevc->tile_enabled) {
7162 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7163 hevc_print(hevc, 0,
7164 "slice_segment_address is %d\n",
7165 rpm_param->p.slice_segment_address);
7166 }
7167 hevc->tile_y_x =
7168 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7169 (hevc->pic_w +
7170 hevc->lcu_size -
7171 1) / hevc->lcu_size);
7172 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7173 && (hevc->tile_y_x != -1)) {
7174 hevc->new_tile = 1;
7175 hevc->tile_x = hevc->tile_y_x & 0xff;
7176 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7177 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7178 hevc_print(hevc, 0,
7179 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7180 rpm_param->p.slice_segment_address,
7181 hevc->tile_x, hevc->tile_y);
7182 }
7183 } else
7184 hevc->new_tile = 0;
7185 } else
7186 hevc->new_tile = 0;
7187
7188 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7189 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7190 hevc->new_tile = 0;
7191
7192 if (hevc->new_tile) {
7193 hevc->tile_start_lcu_x =
7194 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7195 hevc->tile_start_lcu_y =
7196 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7197 hevc->tile_width_lcu =
7198 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7199 hevc->tile_height_lcu =
7200 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7201 }
7202
7203 set_ref_pic_list(hevc, rpm_param);
7204
7205 Col_ref = rpm_param->p.collocated_ref_idx;
7206
7207 hevc->LDCFlag = 0;
7208 if (rpm_param->p.slice_type != I_SLICE) {
7209 hevc->LDCFlag = 1;
7210 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7211 if (hevc->cur_pic->
7212 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7213 hevc->curr_POC)
7214 hevc->LDCFlag = 0;
7215 }
7216 if (rpm_param->p.slice_type == B_SLICE) {
7217 for (i = 0; (i < hevc->RefNum_L1)
7218 && hevc->LDCFlag; i++) {
7219 if (hevc->cur_pic->
7220 m_aiRefPOCList1[hevc->cur_pic->
7221 slice_idx][i] >
7222 hevc->curr_POC)
7223 hevc->LDCFlag = 0;
7224 }
7225 }
7226 }
7227
7228 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7229
7230 hevc->plevel =
7231 rpm_param->p.log2_parallel_merge_level;
7232 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7233
7234 hevc->LongTerm_Curr = 0; /* to do ... */
7235 hevc->LongTerm_Col = 0; /* to do ... */
7236
7237 hevc->list_no = 0;
7238 if (rpm_param->p.slice_type == B_SLICE)
7239 hevc->list_no = 1 - hevc->ColFromL0Flag;
7240 if (hevc->list_no == 0) {
7241 if (Col_ref < hevc->RefNum_L0) {
7242 hevc->Col_POC =
7243 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7244 slice_idx][Col_ref];
7245 } else
7246 hevc->Col_POC = INVALID_POC;
7247 } else {
7248 if (Col_ref < hevc->RefNum_L1) {
7249 hevc->Col_POC =
7250 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7251 slice_idx][Col_ref];
7252 } else
7253 hevc->Col_POC = INVALID_POC;
7254 }
7255
7256 hevc->LongTerm_Ref = 0; /* to do ... */
7257
7258 if (hevc->slice_type != 2) {
7259 /* if(hevc->i_only==1){ */
7260 /* return 0xf; */
7261 /* } */
7262
7263 if (hevc->Col_POC != INVALID_POC) {
7264 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7265 if (hevc->col_pic == NULL) {
7266 hevc->cur_pic->error_mark = 1;
7267 if (get_dbg_flag(hevc)) {
7268 hevc_print(hevc, 0,
7269 "WRONG,fail to get the pic Col_POC\n");
7270 }
7271 if (is_log_enable(hevc))
7272 add_log(hevc,
7273 "WRONG,fail to get the pic Col_POC");
7274 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7275 hevc->cur_pic->error_mark = 1;
7276 if (get_dbg_flag(hevc)) {
7277 hevc_print(hevc, 0,
7278 "WRONG, Col_POC error_mark is 1\n");
7279 }
7280 if (is_log_enable(hevc))
7281 add_log(hevc,
7282 "WRONG, Col_POC error_mark is 1");
7283 } else {
7284 if ((hevc->col_pic->width
7285 != hevc->pic_w) ||
7286 (hevc->col_pic->height
7287 != hevc->pic_h)) {
7288 hevc_print(hevc, 0,
7289 "Wrong reference pic (poc %d) width/height %d/%d\n",
7290 hevc->col_pic->POC,
7291 hevc->col_pic->width,
7292 hevc->col_pic->height);
7293 hevc->cur_pic->error_mark = 1;
7294 }
7295
7296 }
7297
7298 if (hevc->cur_pic->error_mark
7299 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7300#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7301 /*count info*/
7302 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7303 hevc->cur_pic->stream_offset);
7304#endif
7305 }
7306
7307 if (is_skip_decoding(hevc,
7308 hevc->cur_pic)) {
7309 return 2;
7310 }
7311 } else
7312 hevc->col_pic = hevc->cur_pic;
7313 } /* */
7314 if (hevc->col_pic == NULL)
7315 hevc->col_pic = hevc->cur_pic;
7316#ifdef BUFFER_MGR_ONLY
7317 return 0xf;
7318#else
7319 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7320 || (dbg_skip_flag))
7321 return 0xf;
7322#endif
7323
7324 config_mc_buffer(hevc, hevc->cur_pic);
7325
7326 if (is_skip_decoding(hevc,
7327 hevc->cur_pic)) {
7328 if (get_dbg_flag(hevc))
7329 hevc_print(hevc, 0,
7330 "Discard this picture index %d\n",
7331 hevc->cur_pic->index);
7332#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7333 /*count info*/
7334 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7335 hevc->cur_pic->stream_offset);
7336#endif
7337 return 2;
7338 }
7339#ifdef MCRCC_ENABLE
7340 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7341#endif
7342 config_mpred_hw(hevc);
7343
7344 config_sao_hw(hevc, rpm_param);
7345
7346 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7347 return 0xf;
7348
7349 return 0;
7350}
7351
7352
7353
7354static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7355 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7356 int cur_buf_idx = new_pic->index;
7357 int bit_depth_10 = (bit_depth != 0x00);
7358 int picture_size;
7359 int cur_mmu_4k_number;
7360 int ret, max_frame_num;
7361 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7362 new_pic->height, !bit_depth_10);
7363 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7364 if (hevc->double_write_mode & 0x10)
7365 return 0;
7366 /*hevc_print(hevc, 0,
7367 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7368 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7369 if (new_pic->scatter_alloc) {
7370 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7371 new_pic->scatter_alloc = 0;
7372 }
7373 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7374 max_frame_num = MAX_FRAME_8K_NUM;
7375 else
7376 max_frame_num = MAX_FRAME_4K_NUM;
7377 if (cur_mmu_4k_number > max_frame_num) {
7378 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7379 cur_mmu_4k_number,
7380 new_pic->width,
7381 new_pic->height);
7382 return -1;
7383 }
7384 ret = decoder_mmu_box_alloc_idx(
7385 hevc->mmu_box,
7386 cur_buf_idx,
7387 cur_mmu_4k_number,
7388 mmu_index_adr);
7389 if (ret == 0)
7390 new_pic->scatter_alloc = 1;
7391
7392 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7393 "%s pic index %d page count(%d) ret =%d\n",
7394 __func__, cur_buf_idx,
7395 cur_mmu_4k_number, ret);
7396 return ret;
7397}
7398
7399
7400static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7401 struct PIC_s *pic)
7402{
7403 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7404 "%s pic index %d scatter_alloc %d\n",
7405 __func__, pic->index,
7406 pic->scatter_alloc);
7407
7408 if (hevc->mmu_enable
7409 && ((hevc->double_write_mode & 0x10) == 0)
7410 && pic->scatter_alloc)
7411 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7412 pic->scatter_alloc = 0;
7413}
7414
7415/*
7416 *************************************************
7417 *
7418 *h265 buffer management end
7419 *
7420 **************************************************
7421 */
7422static struct hevc_state_s *gHevc;
7423
7424static void hevc_local_uninit(struct hevc_state_s *hevc)
7425{
7426 hevc->rpm_ptr = NULL;
7427 hevc->lmem_ptr = NULL;
7428
7429#ifdef SWAP_HEVC_UCODE
7430 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7431 if (hevc->mc_cpu_addr != NULL) {
7432 dma_free_coherent(amports_get_dma_device(),
7433 hevc->swap_size, hevc->mc_cpu_addr,
7434 hevc->mc_dma_handle);
7435 hevc->mc_cpu_addr = NULL;
7436 }
7437
7438 }
7439#endif
7440#ifdef DETREFILL_ENABLE
7441 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7442 uninit_detrefill_buf(hevc);
7443#endif
7444 if (hevc->aux_addr) {
7445 dma_free_coherent(amports_get_dma_device(),
7446 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7447 hevc->aux_phy_addr);
7448 hevc->aux_addr = NULL;
7449 }
7450 if (hevc->rpm_addr) {
7451 dma_free_coherent(amports_get_dma_device(),
7452 RPM_BUF_SIZE, hevc->rpm_addr,
7453 hevc->rpm_phy_addr);
7454 hevc->rpm_addr = NULL;
7455 }
7456 if (hevc->lmem_addr) {
7457 dma_free_coherent(amports_get_dma_device(),
7458 RPM_BUF_SIZE, hevc->lmem_addr,
7459 hevc->lmem_phy_addr);
7460 hevc->lmem_addr = NULL;
7461 }
7462
7463 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7464 if (hevc->frame_mmu_map_phy_addr)
7465 dma_free_coherent(amports_get_dma_device(),
7466 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7467 hevc->frame_mmu_map_phy_addr);
7468
7469 hevc->frame_mmu_map_addr = NULL;
7470 }
7471
7472 kfree(gvs);
7473 gvs = NULL;
7474}
7475
7476static int hevc_local_init(struct hevc_state_s *hevc)
7477{
7478 int ret = -1;
7479 struct BuffInfo_s *cur_buf_info = NULL;
7480
7481 memset(&hevc->param, 0, sizeof(union param_u));
7482
7483 cur_buf_info = &hevc->work_space_buf_store;
7484
7485 if (vdec_is_support_4k()) {
7486 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7487 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7488 sizeof(struct BuffInfo_s));
7489 else
7490 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7491 sizeof(struct BuffInfo_s));
7492 } else
7493 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7494 sizeof(struct BuffInfo_s));
7495
7496 cur_buf_info->start_adr = hevc->buf_start;
7497 init_buff_spec(hevc, cur_buf_info);
7498
7499 hevc_init_stru(hevc, cur_buf_info);
7500
7501 hevc->bit_depth_luma = 8;
7502 hevc->bit_depth_chroma = 8;
7503 hevc->video_signal_type = 0;
7504 hevc->video_signal_type_debug = 0;
7505 bit_depth_luma = hevc->bit_depth_luma;
7506 bit_depth_chroma = hevc->bit_depth_chroma;
7507 video_signal_type = hevc->video_signal_type;
7508
7509 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7510 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7511 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7512 if (hevc->rpm_addr == NULL) {
7513 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7514 return -1;
7515 }
7516 hevc->rpm_ptr = hevc->rpm_addr;
7517 }
7518
7519 if (prefix_aux_buf_size > 0 ||
7520 suffix_aux_buf_size > 0) {
7521 u32 aux_buf_size;
7522
7523 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7524 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7525 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7526 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7527 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7528 if (hevc->aux_addr == NULL) {
7529 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7530 return -1;
7531 }
7532 }
7533
7534 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7535 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7536 if (hevc->lmem_addr == NULL) {
7537 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7538 return -1;
7539 }
7540 hevc->lmem_ptr = hevc->lmem_addr;
7541
7542 if (hevc->mmu_enable) {
7543 hevc->frame_mmu_map_addr =
7544 dma_alloc_coherent(amports_get_dma_device(),
7545 get_frame_mmu_map_size(),
7546 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7547 if (hevc->frame_mmu_map_addr == NULL) {
7548 pr_err("%s: failed to alloc count_buffer\n", __func__);
7549 return -1;
7550 }
7551 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7552 }
7553 ret = 0;
7554 return ret;
7555}
7556
7557/*
7558 *******************************************
7559 * Mailbox command
7560 *******************************************
7561 */
7562#define CMD_FINISHED 0
7563#define CMD_ALLOC_VIEW 1
7564#define CMD_FRAME_DISPLAY 3
7565#define CMD_DEBUG 10
7566
7567
7568#define DECODE_BUFFER_NUM_MAX 32
7569#define DISPLAY_BUFFER_NUM 6
7570
7571#define video_domain_addr(adr) (adr&0x7fffffff)
7572#define DECODER_WORK_SPACE_SIZE 0x800000
7573
7574#define spec2canvas(x) \
7575 (((x)->uv_canvas_index << 16) | \
7576 ((x)->uv_canvas_index << 8) | \
7577 ((x)->y_canvas_index << 0))
7578
7579
7580static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7581{
7582 struct vdec_s *vdec = hw_to_vdec(hevc);
7583 int canvas_w = ALIGN(pic->width, 64)/4;
7584 int canvas_h = ALIGN(pic->height, 32)/4;
7585 int blkmode = hevc->mem_map_mode;
7586
7587 /*CANVAS_BLKMODE_64X32*/
7588#ifdef SUPPORT_10BIT
7589 if (pic->double_write_mode) {
7590 canvas_w = pic->width /
7591 get_double_write_ratio(hevc, pic->double_write_mode);
7592 canvas_h = pic->height /
7593 get_double_write_ratio(hevc, pic->double_write_mode);
7594
7595 if (hevc->mem_map_mode == 0)
7596 canvas_w = ALIGN(canvas_w, 32);
7597 else
7598 canvas_w = ALIGN(canvas_w, 64);
7599 canvas_h = ALIGN(canvas_h, 32);
7600
7601 if (vdec->parallel_dec == 1) {
7602 if (pic->y_canvas_index == -1)
7603 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7604 if (pic->uv_canvas_index == -1)
7605 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7606 } else {
7607 pic->y_canvas_index = 128 + pic->index * 2;
7608 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7609 }
7610
7611 canvas_config_ex(pic->y_canvas_index,
7612 pic->dw_y_adr, canvas_w, canvas_h,
7613 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7614 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7615 canvas_w, canvas_h,
7616 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7617#ifdef MULTI_INSTANCE_SUPPORT
7618 pic->canvas_config[0].phy_addr =
7619 pic->dw_y_adr;
7620 pic->canvas_config[0].width =
7621 canvas_w;
7622 pic->canvas_config[0].height =
7623 canvas_h;
7624 pic->canvas_config[0].block_mode =
7625 blkmode;
7626 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7627
7628 pic->canvas_config[1].phy_addr =
7629 pic->dw_u_v_adr;
7630 pic->canvas_config[1].width =
7631 canvas_w;
7632 pic->canvas_config[1].height =
7633 canvas_h;
7634 pic->canvas_config[1].block_mode =
7635 blkmode;
7636 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7637#endif
7638 } else {
7639 if (!hevc->mmu_enable) {
7640 /* to change after 10bit VPU is ready ... */
7641 if (vdec->parallel_dec == 1) {
7642 if (pic->y_canvas_index == -1)
7643 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7644 pic->uv_canvas_index = pic->y_canvas_index;
7645 } else {
7646 pic->y_canvas_index = 128 + pic->index;
7647 pic->uv_canvas_index = 128 + pic->index;
7648 }
7649
7650 canvas_config_ex(pic->y_canvas_index,
7651 pic->mc_y_adr, canvas_w, canvas_h,
7652 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7653 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7654 canvas_w, canvas_h,
7655 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7656 }
7657 }
7658#else
7659 if (vdec->parallel_dec == 1) {
7660 if (pic->y_canvas_index == -1)
7661 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7662 if (pic->uv_canvas_index == -1)
7663 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7664 } else {
7665 pic->y_canvas_index = 128 + pic->index * 2;
7666 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7667 }
7668
7669
7670 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7671 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7672 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7673 canvas_w, canvas_h,
7674 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7675#endif
7676}
7677
7678static int init_buf_spec(struct hevc_state_s *hevc)
7679{
7680 int pic_width = hevc->pic_w;
7681 int pic_height = hevc->pic_h;
7682
7683 /* hevc_print(hevc, 0,
7684 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7685 */
7686 hevc_print(hevc, 0,
7687 "%s2 %d %d\n", __func__, pic_width, pic_height);
7688 /* pic_width = hevc->pic_w; */
7689 /* pic_height = hevc->pic_h; */
7690
7691 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7692 hevc->frame_width = pic_width;
7693 hevc->frame_height = pic_height;
7694
7695 }
7696
7697 return 0;
7698}
7699
7700static int parse_sei(struct hevc_state_s *hevc,
7701 struct PIC_s *pic, char *sei_buf, uint32_t size)
7702{
7703 char *p = sei_buf;
7704 char *p_sei;
7705 uint16_t header;
7706 uint8_t nal_unit_type;
7707 uint8_t payload_type, payload_size;
7708 int i, j;
7709
7710 if (size < 2)
7711 return 0;
7712 header = *p++;
7713 header <<= 8;
7714 header += *p++;
7715 nal_unit_type = header >> 9;
7716 if ((nal_unit_type != NAL_UNIT_SEI)
7717 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7718 return 0;
7719 while (p+2 <= sei_buf+size) {
7720 payload_type = *p++;
7721 payload_size = *p++;
7722 if (p+payload_size <= sei_buf+size) {
7723 switch (payload_type) {
7724 case SEI_PicTiming:
7725 if ((parser_sei_enable & 0x4) &&
7726 hevc->frame_field_info_present_flag) {
7727 p_sei = p;
7728 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7729 pic->pic_struct = hevc->curr_pic_struct;
7730 if (get_dbg_flag(hevc) &
7731 H265_DEBUG_PIC_STRUCT) {
7732 hevc_print(hevc, 0,
7733 "parse result pic_struct = %d\n",
7734 hevc->curr_pic_struct);
7735 }
7736 }
7737 break;
7738 case SEI_UserDataITU_T_T35:
7739 p_sei = p;
7740 if (p_sei[0] == 0xB5
7741 && p_sei[1] == 0x00
7742 && p_sei[2] == 0x3C
7743 && p_sei[3] == 0x00
7744 && p_sei[4] == 0x01
7745 && p_sei[5] == 0x04)
7746 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7747
7748 break;
7749 case SEI_MasteringDisplayColorVolume:
7750 /*hevc_print(hevc, 0,
7751 "sei type: primary display color volume %d, size %d\n",
7752 payload_type,
7753 payload_size);*/
7754 /* master_display_colour */
7755 p_sei = p;
7756 for (i = 0; i < 3; i++) {
7757 for (j = 0; j < 2; j++) {
7758 hevc->primaries[i][j]
7759 = (*p_sei<<8)
7760 | *(p_sei+1);
7761 p_sei += 2;
7762 }
7763 }
7764 for (i = 0; i < 2; i++) {
7765 hevc->white_point[i]
7766 = (*p_sei<<8)
7767 | *(p_sei+1);
7768 p_sei += 2;
7769 }
7770 for (i = 0; i < 2; i++) {
7771 hevc->luminance[i]
7772 = (*p_sei<<24)
7773 | (*(p_sei+1)<<16)
7774 | (*(p_sei+2)<<8)
7775 | *(p_sei+3);
7776 p_sei += 4;
7777 }
7778 hevc->sei_present_flag |=
7779 SEI_MASTER_DISPLAY_COLOR_MASK;
7780 /*for (i = 0; i < 3; i++)
7781 for (j = 0; j < 2; j++)
7782 hevc_print(hevc, 0,
7783 "\tprimaries[%1d][%1d] = %04x\n",
7784 i, j,
7785 hevc->primaries[i][j]);
7786 hevc_print(hevc, 0,
7787 "\twhite_point = (%04x, %04x)\n",
7788 hevc->white_point[0],
7789 hevc->white_point[1]);
7790 hevc_print(hevc, 0,
7791 "\tmax,min luminance = %08x, %08x\n",
7792 hevc->luminance[0],
7793 hevc->luminance[1]);*/
7794 break;
7795 case SEI_ContentLightLevel:
7796 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7797 hevc_print(hevc, 0,
7798 "sei type: max content light level %d, size %d\n",
7799 payload_type, payload_size);
7800 /* content_light_level */
7801 p_sei = p;
7802 hevc->content_light_level[0]
7803 = (*p_sei<<8) | *(p_sei+1);
7804 p_sei += 2;
7805 hevc->content_light_level[1]
7806 = (*p_sei<<8) | *(p_sei+1);
7807 p_sei += 2;
7808 hevc->sei_present_flag |=
7809 SEI_CONTENT_LIGHT_LEVEL_MASK;
7810 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7811 hevc_print(hevc, 0,
7812 "\tmax cll = %04x, max_pa_cll = %04x\n",
7813 hevc->content_light_level[0],
7814 hevc->content_light_level[1]);
7815 break;
7816 default:
7817 break;
7818 }
7819 }
7820 p += payload_size;
7821 }
7822 return 0;
7823}
7824
7825static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7826 unsigned w, unsigned h)
7827{
7828 unsigned ar;
7829
7830 if (idc == 255) {
7831 ar = div_u64(256ULL * sar_h * h,
7832 sar_w * w);
7833 } else {
7834 switch (idc) {
7835 case 1:
7836 ar = 0x100 * h / w;
7837 break;
7838 case 2:
7839 ar = 0x100 * h * 11 / (w * 12);
7840 break;
7841 case 3:
7842 ar = 0x100 * h * 11 / (w * 10);
7843 break;
7844 case 4:
7845 ar = 0x100 * h * 11 / (w * 16);
7846 break;
7847 case 5:
7848 ar = 0x100 * h * 33 / (w * 40);
7849 break;
7850 case 6:
7851 ar = 0x100 * h * 11 / (w * 24);
7852 break;
7853 case 7:
7854 ar = 0x100 * h * 11 / (w * 20);
7855 break;
7856 case 8:
7857 ar = 0x100 * h * 11 / (w * 32);
7858 break;
7859 case 9:
7860 ar = 0x100 * h * 33 / (w * 80);
7861 break;
7862 case 10:
7863 ar = 0x100 * h * 11 / (w * 18);
7864 break;
7865 case 11:
7866 ar = 0x100 * h * 11 / (w * 15);
7867 break;
7868 case 12:
7869 ar = 0x100 * h * 33 / (w * 64);
7870 break;
7871 case 13:
7872 ar = 0x100 * h * 99 / (w * 160);
7873 break;
7874 case 14:
7875 ar = 0x100 * h * 3 / (w * 4);
7876 break;
7877 case 15:
7878 ar = 0x100 * h * 2 / (w * 3);
7879 break;
7880 case 16:
7881 ar = 0x100 * h * 1 / (w * 2);
7882 break;
7883 default:
7884 ar = h * 0x100 / w;
7885 break;
7886 }
7887 }
7888
7889 return ar;
7890}
7891
7892static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7893 struct PIC_s *pic)
7894{
7895 unsigned int ar;
7896 int i, j;
7897 char *p;
7898 unsigned size = 0;
7899 unsigned type = 0;
7900 struct vframe_master_display_colour_s *vf_dp
7901 = &vf->prop.master_display_colour;
7902
7903 vf->width = pic->width /
7904 get_double_write_ratio(hevc, pic->double_write_mode);
7905 vf->height = pic->height /
7906 get_double_write_ratio(hevc, pic->double_write_mode);
7907
7908 vf->duration = hevc->frame_dur;
7909 vf->duration_pulldown = 0;
7910 vf->flag = 0;
7911
7912 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7913 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7914
7915
7916 if (((pic->aspect_ratio_idc == 255) &&
7917 pic->sar_width &&
7918 pic->sar_height) ||
7919 ((pic->aspect_ratio_idc != 255) &&
7920 (pic->width))) {
7921 ar = min_t(u32,
7922 calc_ar(pic->aspect_ratio_idc,
7923 pic->sar_width,
7924 pic->sar_height,
7925 pic->width,
7926 pic->height),
7927 DISP_RATIO_ASPECT_RATIO_MAX);
7928 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7929 }
7930 hevc->ratio_control = vf->ratio_control;
7931 if (pic->aux_data_buf
7932 && pic->aux_data_size) {
7933 /* parser sei */
7934 p = pic->aux_data_buf;
7935 while (p < pic->aux_data_buf
7936 + pic->aux_data_size - 8) {
7937 size = *p++;
7938 size = (size << 8) | *p++;
7939 size = (size << 8) | *p++;
7940 size = (size << 8) | *p++;
7941 type = *p++;
7942 type = (type << 8) | *p++;
7943 type = (type << 8) | *p++;
7944 type = (type << 8) | *p++;
7945 if (type == 0x02000000) {
7946 /* hevc_print(hevc, 0,
7947 "sei(%d)\n", size); */
7948 parse_sei(hevc, pic, p, size);
7949 }
7950 p += size;
7951 }
7952 }
7953 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7954 vf->signal_type = pic->video_signal_type;
7955 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7956 u32 data;
7957 data = vf->signal_type;
7958 data = data & 0xFFFF00FF;
7959 data = data | (0x30<<8);
7960 vf->signal_type = data;
7961 }
7962 }
7963 else
7964 vf->signal_type = 0;
7965 hevc->video_signal_type_debug = vf->signal_type;
7966
7967 /* master_display_colour */
7968 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7969 for (i = 0; i < 3; i++)
7970 for (j = 0; j < 2; j++)
7971 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7972 for (i = 0; i < 2; i++) {
7973 vf_dp->white_point[i] = hevc->white_point[i];
7974 vf_dp->luminance[i]
7975 = hevc->luminance[i];
7976 }
7977 vf_dp->present_flag = 1;
7978 } else
7979 vf_dp->present_flag = 0;
7980
7981 /* content_light_level */
7982 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7983 vf_dp->content_light_level.max_content
7984 = hevc->content_light_level[0];
7985 vf_dp->content_light_level.max_pic_average
7986 = hevc->content_light_level[1];
7987 vf_dp->content_light_level.present_flag = 1;
7988 } else
7989 vf_dp->content_light_level.present_flag = 0;
7990
7991 if (hevc->is_used_v4l &&
7992 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7993 (vf_dp->present_flag) ||
7994 (vf_dp->content_light_level.present_flag))) {
7995 struct aml_vdec_hdr_infos hdr;
7996 struct aml_vcodec_ctx *ctx =
7997 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
7998
7999 memset(&hdr, 0, sizeof(hdr));
8000 hdr.signal_type = vf->signal_type;
8001 hdr.color_parms = *vf_dp;
8002 vdec_v4l_set_hdr_infos(ctx, &hdr);
8003 }
8004}
8005
8006static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8007{
8008 unsigned long flags;
8009#ifdef MULTI_INSTANCE_SUPPORT
8010 struct vdec_s *vdec = op_arg;
8011 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8012#else
8013 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8014#endif
8015
8016 spin_lock_irqsave(&lock, flags);
8017
8018 states->vf_pool_size = VF_POOL_SIZE;
8019 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8020 states->buf_avail_num = kfifo_len(&hevc->display_q);
8021
8022 if (step == 2)
8023 states->buf_avail_num = 0;
8024 spin_unlock_irqrestore(&lock, flags);
8025 return 0;
8026}
8027
8028static struct vframe_s *vh265_vf_peek(void *op_arg)
8029{
8030 struct vframe_s *vf[2] = {0, 0};
8031#ifdef MULTI_INSTANCE_SUPPORT
8032 struct vdec_s *vdec = op_arg;
8033 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8034#else
8035 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8036#endif
8037
8038 if (step == 2)
8039 return NULL;
8040
8041 if (force_disp_pic_index & 0x100) {
8042 if (force_disp_pic_index & 0x200)
8043 return NULL;
8044 return &hevc->vframe_dummy;
8045 }
8046
8047
8048 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8049 if (vf[1]) {
8050 vf[0]->next_vf_pts_valid = true;
8051 vf[0]->next_vf_pts = vf[1]->pts;
8052 } else
8053 vf[0]->next_vf_pts_valid = false;
8054 return vf[0];
8055 }
8056
8057 return NULL;
8058}
8059
8060static struct vframe_s *vh265_vf_get(void *op_arg)
8061{
8062 struct vframe_s *vf;
8063#ifdef MULTI_INSTANCE_SUPPORT
8064 struct vdec_s *vdec = op_arg;
8065 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8066#else
8067 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8068#endif
8069
8070 if (step == 2)
8071 return NULL;
8072 else if (step == 1)
8073 step = 2;
8074
8075#if 0
8076 if (force_disp_pic_index & 0x100) {
8077 int buffer_index = force_disp_pic_index & 0xff;
8078 struct PIC_s *pic = NULL;
8079 if (buffer_index >= 0
8080 && buffer_index < MAX_REF_PIC_NUM)
8081 pic = hevc->m_PIC[buffer_index];
8082 if (pic == NULL)
8083 return NULL;
8084 if (force_disp_pic_index & 0x200)
8085 return NULL;
8086
8087 vf = &hevc->vframe_dummy;
8088 if (get_double_write_mode(hevc)) {
8089 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8090 VIDTYPE_VIU_NV21;
8091 if (hevc->m_ins_flag) {
8092 vf->canvas0Addr = vf->canvas1Addr = -1;
8093 vf->plane_num = 2;
8094 vf->canvas0_config[0] =
8095 pic->canvas_config[0];
8096 vf->canvas0_config[1] =
8097 pic->canvas_config[1];
8098
8099 vf->canvas1_config[0] =
8100 pic->canvas_config[0];
8101 vf->canvas1_config[1] =
8102 pic->canvas_config[1];
8103 } else {
8104 vf->canvas0Addr = vf->canvas1Addr
8105 = spec2canvas(pic);
8106 }
8107 } else {
8108 vf->canvas0Addr = vf->canvas1Addr = 0;
8109 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8110 if (hevc->mmu_enable)
8111 vf->type |= VIDTYPE_SCATTER;
8112 }
8113 vf->compWidth = pic->width;
8114 vf->compHeight = pic->height;
8115 update_vf_memhandle(hevc, vf, pic);
8116 switch (hevc->bit_depth_luma) {
8117 case 9:
8118 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8119 break;
8120 case 10:
8121 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8122 | BITDEPTH_V10;
8123 break;
8124 default:
8125 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8126 break;
8127 }
8128 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8129 vf->bitdepth =
8130 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8131 if (hevc->mem_saving_mode == 1)
8132 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8133 vf->duration_pulldown = 0;
8134 vf->pts = 0;
8135 vf->pts_us64 = 0;
8136 set_frame_info(hevc, vf);
8137
8138 vf->width = pic->width /
8139 get_double_write_ratio(hevc, pic->double_write_mode);
8140 vf->height = pic->height /
8141 get_double_write_ratio(hevc, pic->double_write_mode);
8142
8143 force_disp_pic_index |= 0x200;
8144 return vf;
8145 }
8146#endif
8147
8148 if (kfifo_get(&hevc->display_q, &vf)) {
8149 struct vframe_s *next_vf;
8150 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8151 hevc_print(hevc, 0,
8152 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8153 __func__, vf, vf->type, vf->index,
8154 get_pic_poc(hevc, vf->index & 0xff),
8155 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8156 vf->pts, vf->pts_us64,
8157 vf->duration);
8158#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8159 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8160 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8161 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8162 int i;
8163 struct PIC_s *pic =
8164 hevc->m_PIC[vf->index & 0xff];
8165 hevc_print(hevc, 0,
8166 "pic 0x%p aux size %d:\n",
8167 pic, pic->aux_data_size);
8168 for (i = 0; i < pic->aux_data_size; i++) {
8169 hevc_print_cont(hevc, 0,
8170 "%02x ", pic->aux_data_buf[i]);
8171 if (((i + 1) & 0xf) == 0)
8172 hevc_print_cont(hevc, 0, "\n");
8173 }
8174 hevc_print_cont(hevc, 0, "\n");
8175 }
8176 }
8177#endif
8178 hevc->show_frame_num++;
8179 vf->index_disp = hevc->vf_get_count;
8180 hevc->vf_get_count++;
8181
8182 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8183 vf->next_vf_pts_valid = true;
8184 vf->next_vf_pts = next_vf->pts;
8185 } else
8186 vf->next_vf_pts_valid = false;
8187
8188 return vf;
8189 }
8190
8191 return NULL;
8192}
8193static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8194 int i;
8195 for (i = 0; i < VF_POOL_SIZE; i++) {
8196 if (vf == &hevc->vfpool[i])
8197 return true;
8198 }
8199 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8200 for (i = 0; i < VF_POOL_SIZE; i++) {
8201 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8202 }
8203 return false;
8204}
8205
8206static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8207{
8208 unsigned long flags;
8209#ifdef MULTI_INSTANCE_SUPPORT
8210 struct vdec_s *vdec = op_arg;
8211 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8212#else
8213 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8214#endif
8215 unsigned char index_top;
8216 unsigned char index_bot;
8217
8218 if (vf && (vf_valid_check(vf, hevc) == false))
8219 return;
8220 if (vf == (&hevc->vframe_dummy))
8221 return;
8222 index_top = vf->index & 0xff;
8223 index_bot = (vf->index >> 8) & 0xff;
8224 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8225 hevc_print(hevc, 0,
8226 "%s(type %d index 0x%x)\n",
8227 __func__, vf->type, vf->index);
8228 hevc->vf_put_count++;
8229 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8230 spin_lock_irqsave(&lock, flags);
8231
8232 if (index_top != 0xff
8233 && index_top < MAX_REF_PIC_NUM
8234 && hevc->m_PIC[index_top]) {
8235 if (hevc->is_used_v4l)
8236 hevc->m_PIC[index_top]->vframe_bound = true;
8237 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8238 hevc->m_PIC[index_top]->vf_ref--;
8239
8240 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8241 hevc->m_PIC[index_top]->output_ready = 0;
8242
8243 if (hevc->wait_buf != 0)
8244 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8245 0x1);
8246 }
8247 }
8248 }
8249
8250 if (index_bot != 0xff
8251 && index_bot < MAX_REF_PIC_NUM
8252 && hevc->m_PIC[index_bot]) {
8253 if (hevc->is_used_v4l)
8254 hevc->m_PIC[index_bot]->vframe_bound = true;
8255 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8256 hevc->m_PIC[index_bot]->vf_ref--;
8257
8258 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8259 hevc->m_PIC[index_bot]->output_ready = 0;
8260 if (hevc->wait_buf != 0)
8261 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8262 0x1);
8263 }
8264 }
8265 }
8266 spin_unlock_irqrestore(&lock, flags);
8267}
8268
8269static int vh265_event_cb(int type, void *data, void *op_arg)
8270{
8271 unsigned long flags;
8272#ifdef MULTI_INSTANCE_SUPPORT
8273 struct vdec_s *vdec = op_arg;
8274 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8275#else
8276 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8277#endif
8278 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8279#if 0
8280 amhevc_stop();
8281#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8282 vf_light_unreg_provider(&vh265_vf_prov);
8283#endif
8284 spin_lock_irqsave(&hevc->lock, flags);
8285 vh265_local_init();
8286 vh265_prot_init();
8287 spin_unlock_irqrestore(&hevc->lock, flags);
8288#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8289 vf_reg_provider(&vh265_vf_prov);
8290#endif
8291 amhevc_start();
8292#endif
8293 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8294 struct provider_aux_req_s *req =
8295 (struct provider_aux_req_s *)data;
8296 unsigned char index;
8297
8298 if (!req->vf) {
8299 req->aux_size = hevc->vf_put_count;
8300 return 0;
8301 }
8302 spin_lock_irqsave(&lock, flags);
8303 index = req->vf->index & 0xff;
8304 req->aux_buf = NULL;
8305 req->aux_size = 0;
8306 if (req->bot_flag)
8307 index = (req->vf->index >> 8) & 0xff;
8308 if (index != 0xff
8309 && index < MAX_REF_PIC_NUM
8310 && hevc->m_PIC[index]) {
8311 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8312 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8313#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8314 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8315 req->dv_enhance_exist = false;
8316 else
8317 req->dv_enhance_exist =
8318 hevc->m_PIC[index]->dv_enhance_exist;
8319 hevc_print(hevc, H265_DEBUG_DV,
8320 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8321 req->vf,
8322 hevc->m_PIC[index]->POC, index,
8323 req->dv_enhance_exist, req->aux_size);
8324#else
8325 req->dv_enhance_exist = 0;
8326#endif
8327 }
8328 spin_unlock_irqrestore(&lock, flags);
8329
8330 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8331 hevc_print(hevc, 0,
8332 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8333 __func__, type, index, req->aux_size);
8334#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8335 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8336 if ((force_bypass_dvenl & 0x80000000) == 0) {
8337 hevc_print(hevc, 0,
8338 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8339 __func__);
8340 hevc->bypass_dvenl_enable = 1;
8341 }
8342
8343#endif
8344 }
8345 return 0;
8346}
8347
8348#ifdef HEVC_PIC_STRUCT_SUPPORT
8349static int process_pending_vframe(struct hevc_state_s *hevc,
8350 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8351{
8352 struct vframe_s *vf;
8353
8354 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8355 hevc_print(hevc, 0,
8356 "%s: pair_pic index 0x%x %s\n",
8357 __func__, pair_pic->index,
8358 pair_frame_top_flag ?
8359 "top" : "bot");
8360
8361 if (kfifo_len(&hevc->pending_q) > 1) {
8362 unsigned long flags;
8363 /* do not pending more than 1 frame */
8364 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8365 hevc_print(hevc, 0,
8366 "fatal error, no available buffer slot.");
8367 return -1;
8368 }
8369 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8370 hevc_print(hevc, 0,
8371 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8372 __func__, vf->index);
8373 if ((hevc->double_write_mode == 3) &&
8374 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8375 vf->type |= VIDTYPE_COMPRESS;
8376 if (hevc->mmu_enable)
8377 vf->type |= VIDTYPE_SCATTER;
8378 }
8379 hevc->vf_pre_count++;
8380 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8381 spin_lock_irqsave(&lock, flags);
8382 vf->index &= 0xff;
8383 hevc->m_PIC[vf->index]->output_ready = 0;
8384 if (hevc->wait_buf != 0)
8385 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8386 0x1);
8387 spin_unlock_irqrestore(&lock, flags);
8388
8389 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8390 }
8391
8392 if (kfifo_peek(&hevc->pending_q, &vf)) {
8393 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8394 /*
8395 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8396 *do not use it
8397 */
8398 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8399 hevc_print(hevc, 0,
8400 "fatal error, no available buffer slot.");
8401 return -1;
8402 }
8403 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8404 hevc_print(hevc, 0,
8405 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8406 __func__, vf->index);
8407 if (vf) {
8408 if ((hevc->double_write_mode == 3) &&
8409 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8410 vf->type |= VIDTYPE_COMPRESS;
8411 if (hevc->mmu_enable)
8412 vf->type |= VIDTYPE_SCATTER;
8413 }
8414 hevc->vf_pre_count++;
8415 kfifo_put(&hevc->display_q,
8416 (const struct vframe_s *)vf);
8417 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8418 }
8419 } else if ((!pair_frame_top_flag) &&
8420 (((vf->index >> 8) & 0xff) == 0xff)) {
8421 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8422 hevc_print(hevc, 0,
8423 "fatal error, no available buffer slot.");
8424 return -1;
8425 }
8426 if (vf) {
8427 if ((hevc->double_write_mode == 3) &&
8428 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8429 vf->type |= VIDTYPE_COMPRESS;
8430 if (hevc->mmu_enable)
8431 vf->type |= VIDTYPE_SCATTER;
8432 }
8433 vf->index &= 0xff;
8434 vf->index |= (pair_pic->index << 8);
8435 vf->canvas1Addr = spec2canvas(pair_pic);
8436 pair_pic->vf_ref++;
8437 kfifo_put(&hevc->display_q,
8438 (const struct vframe_s *)vf);
8439 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8440 hevc->vf_pre_count++;
8441 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8442 hevc_print(hevc, 0,
8443 "%s vf => display_q: (index 0x%x)\n",
8444 __func__, vf->index);
8445 }
8446 } else if (pair_frame_top_flag &&
8447 ((vf->index & 0xff) == 0xff)) {
8448 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8449 hevc_print(hevc, 0,
8450 "fatal error, no available buffer slot.");
8451 return -1;
8452 }
8453 if (vf) {
8454 if ((hevc->double_write_mode == 3) &&
8455 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8456 vf->type |= VIDTYPE_COMPRESS;
8457 if (hevc->mmu_enable)
8458 vf->type |= VIDTYPE_SCATTER;
8459 }
8460 vf->index &= 0xff00;
8461 vf->index |= pair_pic->index;
8462 vf->canvas0Addr = spec2canvas(pair_pic);
8463 pair_pic->vf_ref++;
8464 kfifo_put(&hevc->display_q,
8465 (const struct vframe_s *)vf);
8466 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8467 hevc->vf_pre_count++;
8468 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8469 hevc_print(hevc, 0,
8470 "%s vf => display_q: (index 0x%x)\n",
8471 __func__, vf->index);
8472 }
8473 }
8474 }
8475 return 0;
8476}
8477#endif
8478static void update_vf_memhandle(struct hevc_state_s *hevc,
8479 struct vframe_s *vf, struct PIC_s *pic)
8480{
8481 if (pic->index < 0) {
8482 vf->mem_handle = NULL;
8483 vf->mem_head_handle = NULL;
8484 } else if (vf->type & VIDTYPE_SCATTER) {
8485 vf->mem_handle =
8486 decoder_mmu_box_get_mem_handle(
8487 hevc->mmu_box, pic->index);
8488 vf->mem_head_handle =
8489 decoder_bmmu_box_get_mem_handle(
8490 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8491 } else {
8492 vf->mem_handle =
8493 decoder_bmmu_box_get_mem_handle(
8494 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8495 vf->mem_head_handle = NULL;
8496 /*vf->mem_head_handle =
8497 decoder_bmmu_box_get_mem_handle(
8498 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8499 }
8500 return;
8501}
8502
8503static void fill_frame_info(struct hevc_state_s *hevc,
8504 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8505{
8506 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8507 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8508 vframe_qos->type = 4;
8509 else if (pic->slice_type == I_SLICE)
8510 vframe_qos->type = 1;
8511 else if (pic->slice_type == P_SLICE)
8512 vframe_qos->type = 2;
8513 else if (pic->slice_type == B_SLICE)
8514 vframe_qos->type = 3;
8515/*
8516#define SHOW_QOS_INFO
8517*/
8518 vframe_qos->size = framesize;
8519 vframe_qos->pts = pts;
8520#ifdef SHOW_QOS_INFO
8521 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8522#endif
8523
8524
8525 vframe_qos->max_mv = pic->max_mv;
8526 vframe_qos->avg_mv = pic->avg_mv;
8527 vframe_qos->min_mv = pic->min_mv;
8528#ifdef SHOW_QOS_INFO
8529 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8530 vframe_qos->max_mv,
8531 vframe_qos->avg_mv,
8532 vframe_qos->min_mv);
8533#endif
8534
8535 vframe_qos->max_qp = pic->max_qp;
8536 vframe_qos->avg_qp = pic->avg_qp;
8537 vframe_qos->min_qp = pic->min_qp;
8538#ifdef SHOW_QOS_INFO
8539 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8540 vframe_qos->max_qp,
8541 vframe_qos->avg_qp,
8542 vframe_qos->min_qp);
8543#endif
8544
8545 vframe_qos->max_skip = pic->max_skip;
8546 vframe_qos->avg_skip = pic->avg_skip;
8547 vframe_qos->min_skip = pic->min_skip;
8548#ifdef SHOW_QOS_INFO
8549 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8550 vframe_qos->max_skip,
8551 vframe_qos->avg_skip,
8552 vframe_qos->min_skip);
8553#endif
8554
8555 vframe_qos->num++;
8556
8557 if (hevc->frameinfo_enable)
8558 vdec_fill_frame_info(vframe_qos, 1);
8559}
8560
8561static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8562{
8563#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8564 struct vdec_s *vdec = hw_to_vdec(hevc);
8565#endif
8566 struct vframe_s *vf = NULL;
8567 int stream_offset = pic->stream_offset;
8568 unsigned short slice_type = pic->slice_type;
8569 u32 frame_size;
8570
8571 if (force_disp_pic_index & 0x100) {
8572 /*recycle directly*/
8573 pic->output_ready = 0;
8574 return -1;
8575 }
8576 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8577 hevc_print(hevc, 0,
8578 "fatal error, no available buffer slot.");
8579 return -1;
8580 }
8581 display_frame_count[hevc->index]++;
8582 if (vf) {
8583 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8584 "%s: pic index 0x%x\n",
8585 __func__, pic->index);*/
8586
8587 if (hevc->is_used_v4l) {
8588 vf->v4l_mem_handle
8589 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8590 if (hevc->mmu_enable) {
8591 vf->mm_box.bmmu_box = hevc->bmmu_box;
8592 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8593 vf->mm_box.mmu_box = hevc->mmu_box;
8594 vf->mm_box.mmu_idx = pic->index;
8595 }
8596 }
8597
8598#ifdef MULTI_INSTANCE_SUPPORT
8599 if (vdec_frame_based(hw_to_vdec(hevc))) {
8600 vf->pts = pic->pts;
8601 vf->pts_us64 = pic->pts64;
8602 vf->timestamp = pic->timestamp;
8603 }
8604 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8605 stream_offset, &vf->pts, 0) != 0) { */
8606#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8607 else if (vdec->master == NULL) {
8608#else
8609 else {
8610#endif
8611#endif
8612 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8613 "call pts_lookup_offset_us64(0x%x)\n",
8614 stream_offset);
8615 if (pts_lookup_offset_us64
8616 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8617 &frame_size, 0,
8618 &vf->pts_us64) != 0) {
8619#ifdef DEBUG_PTS
8620 hevc->pts_missed++;
8621#endif
8622 vf->pts = 0;
8623 vf->pts_us64 = 0;
8624 }
8625#ifdef DEBUG_PTS
8626 else
8627 hevc->pts_hit++;
8628#endif
8629#ifdef MULTI_INSTANCE_SUPPORT
8630#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8631 } else {
8632 vf->pts = 0;
8633 vf->pts_us64 = 0;
8634 }
8635#else
8636 }
8637#endif
8638#endif
8639 if (pts_unstable && (hevc->frame_dur > 0))
8640 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8641
8642 fill_frame_info(hevc, pic, frame_size, vf->pts);
8643
8644 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8645 && hevc->get_frame_dur) {
8646 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8647
8648 if (pts_diff < 0) {
8649 hevc->pts_mode_switching_count++;
8650 hevc->pts_mode_recovery_count = 0;
8651
8652 if (hevc->pts_mode_switching_count >=
8653 PTS_MODE_SWITCHING_THRESHOLD) {
8654 hevc->pts_mode =
8655 PTS_NONE_REF_USE_DURATION;
8656 hevc_print(hevc, 0,
8657 "HEVC: switch to n_d mode.\n");
8658 }
8659
8660 } else {
8661 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8662
8663 hevc->pts_mode_recovery_count++;
8664 if (hevc->pts_mode_recovery_count > p) {
8665 hevc->pts_mode_switching_count = 0;
8666 hevc->pts_mode_recovery_count = 0;
8667 }
8668 }
8669 }
8670
8671 if (vf->pts != 0)
8672 hevc->last_lookup_pts = vf->pts;
8673
8674 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8675 && (slice_type != 2))
8676 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8677 hevc->last_pts = vf->pts;
8678
8679 if (vf->pts_us64 != 0)
8680 hevc->last_lookup_pts_us64 = vf->pts_us64;
8681
8682 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8683 && (slice_type != 2)) {
8684 vf->pts_us64 =
8685 hevc->last_pts_us64 +
8686 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8687 }
8688 hevc->last_pts_us64 = vf->pts_us64;
8689 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8690 hevc_print(hevc, 0,
8691 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8692 vf->pts, vf->pts_us64);
8693 }
8694
8695 /*
8696 *vf->index:
8697 *(1) vf->type is VIDTYPE_PROGRESSIVE
8698 * and vf->canvas0Addr != vf->canvas1Addr,
8699 * vf->index[7:0] is the index of top pic
8700 * vf->index[15:8] is the index of bot pic
8701 *(2) other cases,
8702 * only vf->index[7:0] is used
8703 * vf->index[15:8] == 0xff
8704 */
8705 vf->index = 0xff00 | pic->index;
8706#if 1
8707/*SUPPORT_10BIT*/
8708 if (pic->double_write_mode & 0x10) {
8709 /* double write only */
8710 vf->compBodyAddr = 0;
8711 vf->compHeadAddr = 0;
8712 } else {
8713
8714 if (hevc->mmu_enable) {
8715 vf->compBodyAddr = 0;
8716 vf->compHeadAddr = pic->header_adr;
8717 } else {
8718 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8719 vf->compHeadAddr = pic->mc_y_adr +
8720 pic->losless_comp_body_size;
8721 vf->mem_head_handle = NULL;
8722 }
8723
8724 /*head adr*/
8725 vf->canvas0Addr = vf->canvas1Addr = 0;
8726 }
8727 if (pic->double_write_mode) {
8728 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8729 vf->type |= VIDTYPE_VIU_NV21;
8730
8731 if ((pic->double_write_mode == 3) &&
8732 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8733 vf->type |= VIDTYPE_COMPRESS;
8734 if (hevc->mmu_enable)
8735 vf->type |= VIDTYPE_SCATTER;
8736 }
8737#ifdef MULTI_INSTANCE_SUPPORT
8738 if (hevc->m_ins_flag &&
8739 (get_dbg_flag(hevc)
8740 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8741 vf->canvas0Addr = vf->canvas1Addr = -1;
8742 vf->plane_num = 2;
8743 vf->canvas0_config[0] =
8744 pic->canvas_config[0];
8745 vf->canvas0_config[1] =
8746 pic->canvas_config[1];
8747
8748 vf->canvas1_config[0] =
8749 pic->canvas_config[0];
8750 vf->canvas1_config[1] =
8751 pic->canvas_config[1];
8752
8753 } else
8754#endif
8755 vf->canvas0Addr = vf->canvas1Addr
8756 = spec2canvas(pic);
8757 } else {
8758 vf->canvas0Addr = vf->canvas1Addr = 0;
8759 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8760 if (hevc->mmu_enable)
8761 vf->type |= VIDTYPE_SCATTER;
8762 }
8763 vf->compWidth = pic->width;
8764 vf->compHeight = pic->height;
8765 update_vf_memhandle(hevc, vf, pic);
8766 switch (pic->bit_depth_luma) {
8767 case 9:
8768 vf->bitdepth = BITDEPTH_Y9;
8769 break;
8770 case 10:
8771 vf->bitdepth = BITDEPTH_Y10;
8772 break;
8773 default:
8774 vf->bitdepth = BITDEPTH_Y8;
8775 break;
8776 }
8777 switch (pic->bit_depth_chroma) {
8778 case 9:
8779 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8780 break;
8781 case 10:
8782 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8783 break;
8784 default:
8785 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8786 break;
8787 }
8788 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8789 vf->bitdepth =
8790 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8791 if (pic->mem_saving_mode == 1)
8792 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8793#else
8794 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8795 vf->type |= VIDTYPE_VIU_NV21;
8796 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8797#endif
8798 set_frame_info(hevc, vf, pic);
8799 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8800 /* hevc_print(hevc, 0,
8801 "aaa: %d/%d, %d/%d\n",
8802 vf->width,vf->height, pic->width, pic->height); */
8803 vf->width = pic->width;
8804 vf->height = pic->height;
8805
8806 if (force_w_h != 0) {
8807 vf->width = (force_w_h >> 16) & 0xffff;
8808 vf->height = force_w_h & 0xffff;
8809 }
8810 if (force_fps & 0x100) {
8811 u32 rate = force_fps & 0xff;
8812
8813 if (rate)
8814 vf->duration = 96000/rate;
8815 else
8816 vf->duration = 0;
8817 }
8818 if (force_fps & 0x200) {
8819 vf->pts = 0;
8820 vf->pts_us64 = 0;
8821 }
8822 /*
8823 * !!! to do ...
8824 * need move below code to get_new_pic(),
8825 * hevc->xxx can only be used by current decoded pic
8826 */
8827 if (pic->conformance_window_flag &&
8828 (get_dbg_flag(hevc) &
8829 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8830 unsigned int SubWidthC, SubHeightC;
8831
8832 switch (pic->chroma_format_idc) {
8833 case 1:
8834 SubWidthC = 2;
8835 SubHeightC = 2;
8836 break;
8837 case 2:
8838 SubWidthC = 2;
8839 SubHeightC = 1;
8840 break;
8841 default:
8842 SubWidthC = 1;
8843 SubHeightC = 1;
8844 break;
8845 }
8846 vf->width -= SubWidthC *
8847 (pic->conf_win_left_offset +
8848 pic->conf_win_right_offset);
8849 vf->height -= SubHeightC *
8850 (pic->conf_win_top_offset +
8851 pic->conf_win_bottom_offset);
8852
8853 vf->compWidth -= SubWidthC *
8854 (pic->conf_win_left_offset +
8855 pic->conf_win_right_offset);
8856 vf->compHeight -= SubHeightC *
8857 (pic->conf_win_top_offset +
8858 pic->conf_win_bottom_offset);
8859
8860 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8861 hevc_print(hevc, 0,
8862 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8863 pic->chroma_format_idc,
8864 pic->conf_win_left_offset,
8865 pic->conf_win_right_offset,
8866 pic->conf_win_top_offset,
8867 pic->conf_win_bottom_offset,
8868 vf->width, vf->height, vf->compWidth, vf->compHeight);
8869 }
8870
8871 vf->width = vf->width /
8872 get_double_write_ratio(hevc, pic->double_write_mode);
8873 vf->height = vf->height /
8874 get_double_write_ratio(hevc, pic->double_write_mode);
8875#ifdef HEVC_PIC_STRUCT_SUPPORT
8876 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8877 struct vframe_s *vf2;
8878
8879 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8880 hevc_print(hevc, 0,
8881 "pic_struct = %d index 0x%x\n",
8882 pic->pic_struct,
8883 pic->index);
8884
8885 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8886 hevc_print(hevc, 0,
8887 "fatal error, no available buffer slot.");
8888 return -1;
8889 }
8890 pic->vf_ref = 2;
8891 vf->duration = vf->duration>>1;
8892 memcpy(vf2, vf, sizeof(struct vframe_s));
8893
8894 if (pic->pic_struct == 3) {
8895 vf->type = VIDTYPE_INTERLACE_TOP
8896 | VIDTYPE_VIU_NV21;
8897 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8898 | VIDTYPE_VIU_NV21;
8899 } else {
8900 vf->type = VIDTYPE_INTERLACE_BOTTOM
8901 | VIDTYPE_VIU_NV21;
8902 vf2->type = VIDTYPE_INTERLACE_TOP
8903 | VIDTYPE_VIU_NV21;
8904 }
8905 hevc->vf_pre_count++;
8906 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8907 kfifo_put(&hevc->display_q,
8908 (const struct vframe_s *)vf);
8909 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8910 hevc->vf_pre_count++;
8911 kfifo_put(&hevc->display_q,
8912 (const struct vframe_s *)vf2);
8913 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8914 } else if (pic->pic_struct == 5
8915 || pic->pic_struct == 6) {
8916 struct vframe_s *vf2, *vf3;
8917
8918 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8919 hevc_print(hevc, 0,
8920 "pic_struct = %d index 0x%x\n",
8921 pic->pic_struct,
8922 pic->index);
8923
8924 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8925 hevc_print(hevc, 0,
8926 "fatal error, no available buffer slot.");
8927 return -1;
8928 }
8929 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8930 hevc_print(hevc, 0,
8931 "fatal error, no available buffer slot.");
8932 return -1;
8933 }
8934 pic->vf_ref = 3;
8935 vf->duration = vf->duration/3;
8936 memcpy(vf2, vf, sizeof(struct vframe_s));
8937 memcpy(vf3, vf, sizeof(struct vframe_s));
8938
8939 if (pic->pic_struct == 5) {
8940 vf->type = VIDTYPE_INTERLACE_TOP
8941 | VIDTYPE_VIU_NV21;
8942 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8943 | VIDTYPE_VIU_NV21;
8944 vf3->type = VIDTYPE_INTERLACE_TOP
8945 | VIDTYPE_VIU_NV21;
8946 } else {
8947 vf->type = VIDTYPE_INTERLACE_BOTTOM
8948 | VIDTYPE_VIU_NV21;
8949 vf2->type = VIDTYPE_INTERLACE_TOP
8950 | VIDTYPE_VIU_NV21;
8951 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8952 | VIDTYPE_VIU_NV21;
8953 }
8954 hevc->vf_pre_count++;
8955 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8956 kfifo_put(&hevc->display_q,
8957 (const struct vframe_s *)vf);
8958 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8959 hevc->vf_pre_count++;
8960 kfifo_put(&hevc->display_q,
8961 (const struct vframe_s *)vf2);
8962 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8963 hevc->vf_pre_count++;
8964 kfifo_put(&hevc->display_q,
8965 (const struct vframe_s *)vf3);
8966 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8967
8968 } else if (pic->pic_struct == 9
8969 || pic->pic_struct == 10) {
8970 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8971 hevc_print(hevc, 0,
8972 "pic_struct = %d index 0x%x\n",
8973 pic->pic_struct,
8974 pic->index);
8975
8976 pic->vf_ref = 1;
8977 /* process previous pending vf*/
8978 process_pending_vframe(hevc,
8979 pic, (pic->pic_struct == 9));
8980
8981 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8982 /* process current vf */
8983 kfifo_put(&hevc->pending_q,
8984 (const struct vframe_s *)vf);
8985 vf->height <<= 1;
8986 if (pic->pic_struct == 9) {
8987 vf->type = VIDTYPE_INTERLACE_TOP
8988 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8989 process_pending_vframe(hevc,
8990 hevc->pre_bot_pic, 0);
8991 } else {
8992 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8993 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8994 vf->index = (pic->index << 8) | 0xff;
8995 process_pending_vframe(hevc,
8996 hevc->pre_top_pic, 1);
8997 }
8998
8999 if (hevc->vf_pre_count == 0)
9000 hevc->vf_pre_count++;
9001
9002 /**/
9003 if (pic->pic_struct == 9)
9004 hevc->pre_top_pic = pic;
9005 else
9006 hevc->pre_bot_pic = pic;
9007
9008 } else if (pic->pic_struct == 11
9009 || pic->pic_struct == 12) {
9010 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9011 hevc_print(hevc, 0,
9012 "pic_struct = %d index 0x%x\n",
9013 pic->pic_struct,
9014 pic->index);
9015 pic->vf_ref = 1;
9016 /* process previous pending vf*/
9017 process_pending_vframe(hevc, pic,
9018 (pic->pic_struct == 11));
9019
9020 /* put current into pending q */
9021 vf->height <<= 1;
9022 if (pic->pic_struct == 11)
9023 vf->type = VIDTYPE_INTERLACE_TOP |
9024 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9025 else {
9026 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9027 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9028 vf->index = (pic->index << 8) | 0xff;
9029 }
9030 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9031 kfifo_put(&hevc->pending_q,
9032 (const struct vframe_s *)vf);
9033 if (hevc->vf_pre_count == 0)
9034 hevc->vf_pre_count++;
9035
9036 /**/
9037 if (pic->pic_struct == 11)
9038 hevc->pre_top_pic = pic;
9039 else
9040 hevc->pre_bot_pic = pic;
9041
9042 } else {
9043 pic->vf_ref = 1;
9044
9045 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9046 hevc_print(hevc, 0,
9047 "pic_struct = %d index 0x%x\n",
9048 pic->pic_struct,
9049 pic->index);
9050
9051 switch (pic->pic_struct) {
9052 case 7:
9053 vf->duration <<= 1;
9054 break;
9055 case 8:
9056 vf->duration = vf->duration * 3;
9057 break;
9058 case 1:
9059 vf->height <<= 1;
9060 vf->type = VIDTYPE_INTERLACE_TOP |
9061 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9062 process_pending_vframe(hevc, pic, 1);
9063 hevc->pre_top_pic = pic;
9064 break;
9065 case 2:
9066 vf->height <<= 1;
9067 vf->type = VIDTYPE_INTERLACE_BOTTOM
9068 | VIDTYPE_VIU_NV21
9069 | VIDTYPE_VIU_FIELD;
9070 process_pending_vframe(hevc, pic, 0);
9071 hevc->pre_bot_pic = pic;
9072 break;
9073 }
9074 hevc->vf_pre_count++;
9075 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9076 kfifo_put(&hevc->display_q,
9077 (const struct vframe_s *)vf);
9078 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9079 }
9080#else
9081 vf->type_original = vf->type;
9082 pic->vf_ref = 1;
9083 hevc->vf_pre_count++;
9084 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9085 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9086 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9087
9088 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9089 hevc_print(hevc, 0,
9090 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9091 __func__, vf->type, vf->index,
9092 get_pic_poc(hevc, vf->index & 0xff),
9093 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9094 vf->pts, vf->pts_us64,
9095 vf->duration);
9096#endif
9097#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9098 /*count info*/
9099 vdec_count_info(gvs, 0, stream_offset);
9100#endif
9101 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
9102 if (without_display_mode == 0) {
9103 vf_notify_receiver(hevc->provider_name,
9104 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9105 }
9106 else
9107 vh265_vf_put(vh265_vf_get(vdec), vdec);
9108 }
9109
9110 return 0;
9111}
9112
9113static int notify_v4l_eos(struct vdec_s *vdec)
9114{
9115 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9116 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9117 struct vframe_s *vf = &hw->vframe_dummy;
9118 struct vdec_v4l2_buffer *fb = NULL;
9119 int index = INVALID_IDX;
9120 ulong expires;
9121
9122 if (hw->is_used_v4l && hw->eos) {
9123 expires = jiffies + msecs_to_jiffies(2000);
9124 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9125 if (time_after(jiffies, expires))
9126 break;
9127 }
9128
9129 if (index == INVALID_IDX) {
9130 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9131 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9132 return -1;
9133 }
9134 }
9135
9136 vf->type |= VIDTYPE_V4L_EOS;
9137 vf->timestamp = ULONG_MAX;
9138 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9139 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9140 hw->m_BUF[index].v4l_ref_buf_addr;
9141 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9142 vf_notify_receiver(vdec->vf_provider_name,
9143 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9144
9145 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9146 }
9147
9148 return 0;
9149}
9150
9151static void process_nal_sei(struct hevc_state_s *hevc,
9152 int payload_type, int payload_size)
9153{
9154 unsigned short data;
9155
9156 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9157 hevc_print(hevc, 0,
9158 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9159 payload_type, payload_size);
9160
9161 if (payload_type == 137) {
9162 int i, j;
9163 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9164 if (payload_size >= 24) {
9165 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9166 hevc_print(hevc, 0,
9167 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9168 for (i = 0; i < 3; i++) {
9169 for (j = 0; j < 2; j++) {
9170 data =
9171 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9172 hevc->primaries[i][j] = data;
9173 WRITE_HREG(HEVC_SHIFT_COMMAND,
9174 (1<<7)|16);
9175 if (get_dbg_flag(hevc) &
9176 H265_DEBUG_PRINT_SEI)
9177 hevc_print(hevc, 0,
9178 "\t\tprimaries[%1d][%1d] = %04x\n",
9179 i, j, hevc->primaries[i][j]);
9180 }
9181 }
9182 for (i = 0; i < 2; i++) {
9183 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9184 hevc->white_point[i] = data;
9185 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9186 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9187 hevc_print(hevc, 0,
9188 "\t\twhite_point[%1d] = %04x\n",
9189 i, hevc->white_point[i]);
9190 }
9191 for (i = 0; i < 2; i++) {
9192 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9193 hevc->luminance[i] = data << 16;
9194 WRITE_HREG(HEVC_SHIFT_COMMAND,
9195 (1<<7)|16);
9196 data =
9197 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9198 hevc->luminance[i] |= data;
9199 WRITE_HREG(HEVC_SHIFT_COMMAND,
9200 (1<<7)|16);
9201 if (get_dbg_flag(hevc) &
9202 H265_DEBUG_PRINT_SEI)
9203 hevc_print(hevc, 0,
9204 "\t\tluminance[%1d] = %08x\n",
9205 i, hevc->luminance[i]);
9206 }
9207 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9208 }
9209 payload_size -= 24;
9210 while (payload_size > 0) {
9211 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9212 payload_size--;
9213 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9214 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9215 }
9216 }
9217}
9218
9219static int hevc_recover(struct hevc_state_s *hevc)
9220{
9221 int ret = -1;
9222 u32 rem;
9223 u64 shift_byte_count64;
9224 unsigned int hevc_shift_byte_count;
9225 unsigned int hevc_stream_start_addr;
9226 unsigned int hevc_stream_end_addr;
9227 unsigned int hevc_stream_rd_ptr;
9228 unsigned int hevc_stream_wr_ptr;
9229 unsigned int hevc_stream_control;
9230 unsigned int hevc_stream_fifo_ctl;
9231 unsigned int hevc_stream_buf_size;
9232
9233 mutex_lock(&vh265_mutex);
9234#if 0
9235 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9236 int ii;
9237
9238 for (ii = 0; ii < 4; ii++)
9239 hevc_print(hevc, 0,
9240 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9241 if (((i + ii) & 0xf) == 0)
9242 hevc_print(hevc, 0, "\n");
9243 }
9244#endif
9245#define ES_VID_MAN_RD_PTR (1<<0)
9246 if (!hevc->init_flag) {
9247 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9248 mutex_unlock(&vh265_mutex);
9249 return ret;
9250 }
9251 amhevc_stop();
9252 msleep(20);
9253 ret = 0;
9254 /* reset */
9255 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9256 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9257
9258 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9259 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9260 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9261 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9262 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9263 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9264 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9265
9266 /* HEVC streaming buffer will reset and restart
9267 * from current hevc_stream_rd_ptr position
9268 */
9269 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9270 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9271 if ((hevc->shift_byte_count_lo & (1 << 31))
9272 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9273 hevc->shift_byte_count_hi++;
9274
9275 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9276 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9277 hevc->shift_byte_count_lo;
9278 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9279 shift_byte_count64 -= rem;
9280 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9281
9282 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9283 shift_byte_count64 += hevc_stream_buf_size;
9284
9285 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9286 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9287
9288 WRITE_VREG(DOS_SW_RESET3,
9289 /* (1<<2)| */
9290 (1 << 3) | (1 << 4) | (1 << 8) |
9291 (1 << 11) | (1 << 12) | (1 << 14)
9292 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9293 WRITE_VREG(DOS_SW_RESET3, 0);
9294
9295 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9296 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9297 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9298 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9299 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9300 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9301 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9302
9303 hevc_config_work_space_hw(hevc);
9304 decoder_hw_reset();
9305
9306 hevc->have_vps = 0;
9307 hevc->have_sps = 0;
9308 hevc->have_pps = 0;
9309
9310 hevc->have_valid_start_slice = 0;
9311
9312 if (get_double_write_mode(hevc) & 0x10)
9313 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9314 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9315 );
9316
9317 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9318 /* clear mailbox interrupt */
9319 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9320 /* enable mailbox interrupt */
9321 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9322 /* disable PSCALE for hardware sharing */
9323 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9324
9325 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9326
9327 WRITE_VREG(DEBUG_REG1, 0x0);
9328
9329 if ((error_handle_policy & 1) == 0) {
9330 if ((error_handle_policy & 4) == 0) {
9331 /* ucode auto mode, and do not check vps/sps/pps/idr */
9332 WRITE_VREG(NAL_SEARCH_CTL,
9333 0xc);
9334 } else {
9335 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9336 }
9337 } else {
9338 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9339 }
9340
9341 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9342 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9343 WRITE_VREG(NAL_SEARCH_CTL,
9344 READ_VREG(NAL_SEARCH_CTL)
9345 | ((parser_sei_enable & 0x7) << 17));
9346#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9347 WRITE_VREG(NAL_SEARCH_CTL,
9348 READ_VREG(NAL_SEARCH_CTL) |
9349 ((parser_dolby_vision_enable & 0x1) << 20));
9350#endif
9351 config_decode_mode(hevc);
9352 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9353
9354 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9355 /* amhevc_disable(); */
9356 /* return -EBUSY; */
9357 /* } */
9358#if 0
9359 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9360 int ii;
9361
9362 for (ii = 0; ii < 4; ii++) {
9363 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9364 hevc_print(hevc, 0,
9365 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9366 }
9367 if (((i + ii) & 0xf) == 0)
9368 hevc_print(hevc, 0, "\n");
9369 }
9370#endif
9371 init_pic_list_hw(hevc);
9372
9373 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9374 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9375
9376#ifdef SWAP_HEVC_UCODE
9377 if (!tee_enabled() && hevc->is_swap &&
9378 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9379 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9380 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9381 }
9382#endif
9383 amhevc_start();
9384
9385 /* skip, search next start code */
9386 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9387 hevc->skip_flag = 1;
9388#ifdef ERROR_HANDLE_DEBUG
9389 if (dbg_nal_skip_count & 0x20000) {
9390 dbg_nal_skip_count &= ~0x20000;
9391 mutex_unlock(&vh265_mutex);
9392 return ret;
9393 }
9394#endif
9395 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9396 /* Interrupt Amrisc to excute */
9397 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9398#ifdef MULTI_INSTANCE_SUPPORT
9399 if (!hevc->m_ins_flag)
9400#endif
9401 hevc->first_pic_after_recover = 1;
9402 mutex_unlock(&vh265_mutex);
9403 return ret;
9404}
9405
9406static void dump_aux_buf(struct hevc_state_s *hevc)
9407{
9408 int i;
9409 unsigned short *aux_adr =
9410 (unsigned short *)
9411 hevc->aux_addr;
9412 unsigned int aux_size =
9413 (READ_VREG(HEVC_AUX_DATA_SIZE)
9414 >> 16) << 4;
9415
9416 if (hevc->prefix_aux_size > 0) {
9417 hevc_print(hevc, 0,
9418 "prefix aux: (size %d)\n",
9419 aux_size);
9420 for (i = 0; i <
9421 (aux_size >> 1); i++) {
9422 hevc_print_cont(hevc, 0,
9423 "%04x ",
9424 *(aux_adr + i));
9425 if (((i + 1) & 0xf)
9426 == 0)
9427 hevc_print_cont(hevc,
9428 0, "\n");
9429 }
9430 }
9431 if (hevc->suffix_aux_size > 0) {
9432 aux_adr = (unsigned short *)
9433 (hevc->aux_addr +
9434 hevc->prefix_aux_size);
9435 aux_size =
9436 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9437 << 4;
9438 hevc_print(hevc, 0,
9439 "suffix aux: (size %d)\n",
9440 aux_size);
9441 for (i = 0; i <
9442 (aux_size >> 1); i++) {
9443 hevc_print_cont(hevc, 0,
9444 "%04x ", *(aux_adr + i));
9445 if (((i + 1) & 0xf) == 0)
9446 hevc_print_cont(hevc, 0, "\n");
9447 }
9448 }
9449}
9450
9451#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9452static void dolby_get_meta(struct hevc_state_s *hevc)
9453{
9454 struct vdec_s *vdec = hw_to_vdec(hevc);
9455
9456 if (get_dbg_flag(hevc) &
9457 H265_DEBUG_BUFMGR_MORE)
9458 dump_aux_buf(hevc);
9459 if (vdec->dolby_meta_with_el || vdec->slave) {
9460 set_aux_data(hevc,
9461 hevc->cur_pic, 0, 0);
9462 } else if (vdec->master) {
9463 struct hevc_state_s *hevc_ba =
9464 (struct hevc_state_s *)
9465 vdec->master->private;
9466 /*do not use hevc_ba*/
9467 set_aux_data(hevc,
9468 hevc_ba->cur_pic,
9469 0, 1);
9470 set_aux_data(hevc,
9471 hevc->cur_pic, 0, 2);
9472 }
9473}
9474#endif
9475
9476static void read_decode_info(struct hevc_state_s *hevc)
9477{
9478 uint32_t decode_info =
9479 READ_HREG(HEVC_DECODE_INFO);
9480 hevc->start_decoding_flag |=
9481 (decode_info & 0xff);
9482 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9483}
9484
9485static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9486{
9487 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9488 unsigned int dec_status = hevc->dec_status;
9489 int i, ret;
9490
9491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9492 struct vdec_s *vdec = hw_to_vdec(hevc);
9493#endif
9494
9495 if (hevc->eos)
9496 return IRQ_HANDLED;
9497 if (
9498#ifdef MULTI_INSTANCE_SUPPORT
9499 (!hevc->m_ins_flag) &&
9500#endif
9501 hevc->error_flag == 1) {
9502 if ((error_handle_policy & 0x10) == 0) {
9503 if (hevc->cur_pic) {
9504 int current_lcu_idx =
9505 READ_VREG(HEVC_PARSER_LCU_START)
9506 & 0xffffff;
9507 if (current_lcu_idx <
9508 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9509 hevc->cur_pic->error_mark = 1;
9510
9511 }
9512 }
9513 if ((error_handle_policy & 1) == 0) {
9514 hevc->error_skip_nal_count = 1;
9515 /* manual search nal, skip error_skip_nal_count
9516 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9517 */
9518 WRITE_VREG(NAL_SEARCH_CTL,
9519 (error_skip_nal_count << 4) | 0x1);
9520 } else {
9521 hevc->error_skip_nal_count = error_skip_nal_count;
9522 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9523 }
9524 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9525#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9526 || vdec->master
9527 || vdec->slave
9528#endif
9529 ) {
9530 WRITE_VREG(NAL_SEARCH_CTL,
9531 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9532 }
9533 WRITE_VREG(NAL_SEARCH_CTL,
9534 READ_VREG(NAL_SEARCH_CTL)
9535 | ((parser_sei_enable & 0x7) << 17));
9536#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9537 WRITE_VREG(NAL_SEARCH_CTL,
9538 READ_VREG(NAL_SEARCH_CTL) |
9539 ((parser_dolby_vision_enable & 0x1) << 20));
9540#endif
9541 config_decode_mode(hevc);
9542 /* search new nal */
9543 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9544 /* Interrupt Amrisc to excute */
9545 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9546
9547 /* hevc_print(hevc, 0,
9548 *"%s: error handle\n", __func__);
9549 */
9550 hevc->error_flag = 2;
9551 return IRQ_HANDLED;
9552 } else if (
9553#ifdef MULTI_INSTANCE_SUPPORT
9554 (!hevc->m_ins_flag) &&
9555#endif
9556 hevc->error_flag == 3) {
9557 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9558 hevc_recover(hevc);
9559 hevc->error_flag = 0;
9560
9561 if ((error_handle_policy & 0x10) == 0) {
9562 if (hevc->cur_pic) {
9563 int current_lcu_idx =
9564 READ_VREG(HEVC_PARSER_LCU_START)
9565 & 0xffffff;
9566 if (current_lcu_idx <
9567 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9568 hevc->cur_pic->error_mark = 1;
9569
9570 }
9571 }
9572 if ((error_handle_policy & 1) == 0) {
9573 /* need skip some data when
9574 * error_flag of 3 is triggered,
9575 */
9576 /* to avoid hevc_recover() being called
9577 * for many times at the same bitstream position
9578 */
9579 hevc->error_skip_nal_count = 1;
9580 /* manual search nal, skip error_skip_nal_count
9581 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9582 */
9583 WRITE_VREG(NAL_SEARCH_CTL,
9584 (error_skip_nal_count << 4) | 0x1);
9585 }
9586
9587 if ((error_handle_policy & 0x2) == 0) {
9588 hevc->have_vps = 1;
9589 hevc->have_sps = 1;
9590 hevc->have_pps = 1;
9591 }
9592 return IRQ_HANDLED;
9593 }
9594 if (!hevc->m_ins_flag) {
9595 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9596 if ((hevc->shift_byte_count_lo & (1 << 31))
9597 && ((i & (1 << 31)) == 0))
9598 hevc->shift_byte_count_hi++;
9599 hevc->shift_byte_count_lo = i;
9600 }
9601#ifdef MULTI_INSTANCE_SUPPORT
9602 mutex_lock(&hevc->chunks_mutex);
9603 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9604 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9605 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9606 && (hevc->chunk)) {
9607 hevc->cur_pic->pts = hevc->chunk->pts;
9608 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9609 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9610 }
9611 mutex_unlock(&hevc->chunks_mutex);
9612
9613 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9614 dec_status == HEVC_DECODE_BUFEMPTY2) {
9615 if (hevc->m_ins_flag) {
9616 read_decode_info(hevc);
9617 if (vdec_frame_based(hw_to_vdec(hevc))) {
9618 hevc->empty_flag = 1;
9619 goto pic_done;
9620 } else {
9621 if (
9622#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9623 vdec->master ||
9624 vdec->slave ||
9625#endif
9626 (data_resend_policy & 0x1)) {
9627 hevc->dec_result = DEC_RESULT_AGAIN;
9628 amhevc_stop();
9629 restore_decode_state(hevc);
9630 } else
9631 hevc->dec_result = DEC_RESULT_GET_DATA;
9632 }
9633 reset_process_time(hevc);
9634 vdec_schedule_work(&hevc->work);
9635 }
9636 return IRQ_HANDLED;
9637 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9638 (dec_status == HEVC_NAL_DECODE_DONE)
9639 ) {
9640 if (hevc->m_ins_flag) {
9641 read_decode_info(hevc);
9642 if (vdec_frame_based(hw_to_vdec(hevc))) {
9643 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9644 hevc->empty_flag = 1;
9645 goto pic_done;
9646 } else {
9647 hevc->dec_result = DEC_RESULT_AGAIN;
9648 amhevc_stop();
9649 restore_decode_state(hevc);
9650 }
9651
9652 reset_process_time(hevc);
9653 vdec_schedule_work(&hevc->work);
9654 }
9655
9656 return IRQ_HANDLED;
9657 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9658 if (hevc->m_ins_flag) {
9659 struct PIC_s *pic;
9660 struct PIC_s *pic_display;
9661 int decoded_poc;
9662#ifdef DETREFILL_ENABLE
9663 if (hevc->is_swap &&
9664 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9665 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9666 && READ_VREG(HEVC_SAO_DBG_MODE0))
9667 hevc->delrefill_check = 2;
9668 }
9669#endif
9670 hevc->empty_flag = 0;
9671pic_done:
9672 if (input_frame_based(hw_to_vdec(hevc)) &&
9673 frmbase_cont_bitlevel != 0 &&
9674 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9675 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9676 > frmbase_cont_bitlevel)) {
9677 /*handle the case: multi pictures in one packet*/
9678 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9679 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9680 __func__,
9681 hevc->decode_idx, hevc->decode_size,
9682 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9683 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9684 start_process_time(hevc);
9685 return IRQ_HANDLED;
9686 }
9687
9688 read_decode_info(hevc);
9689 get_picture_qos_info(hevc);
9690#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9691 hevc->start_parser_type = 0;
9692 hevc->switch_dvlayer_flag = 0;
9693#endif
9694 hevc->decoded_poc = hevc->curr_POC;
9695 hevc->decoding_pic = NULL;
9696 hevc->dec_result = DEC_RESULT_DONE;
9697#ifdef DETREFILL_ENABLE
9698 if (hevc->is_swap &&
9699 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9700 if (hevc->delrefill_check != 2)
9701#endif
9702
9703 amhevc_stop();
9704
9705 reset_process_time(hevc);
9706
9707 if (hevc->vf_pre_count == 0) {
9708 decoded_poc = hevc->curr_POC;
9709 pic = get_pic_by_POC(hevc, decoded_poc);
9710 if (pic && (pic->POC != INVALID_POC)) {
9711 /*PB skip control */
9712 if (pic->error_mark == 0
9713 && hevc->PB_skip_mode == 1) {
9714 /* start decoding after
9715 * first I
9716 */
9717 hevc->ignore_bufmgr_error |= 0x1;
9718 }
9719 if (hevc->ignore_bufmgr_error & 1) {
9720 if (hevc->PB_skip_count_after_decoding > 0) {
9721 hevc->PB_skip_count_after_decoding--;
9722 } else {
9723 /* start displaying */
9724 hevc->ignore_bufmgr_error |= 0x2;
9725 }
9726 }
9727 if (hevc->mmu_enable
9728 && ((hevc->double_write_mode & 0x10) == 0)) {
9729 if (!hevc->m_ins_flag) {
9730 hevc->used_4k_num =
9731 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9732
9733 if ((!is_skip_decoding(hevc, pic)) &&
9734 (hevc->used_4k_num >= 0) &&
9735 (hevc->cur_pic->scatter_alloc
9736 == 1)) {
9737 hevc_print(hevc,
9738 H265_DEBUG_BUFMGR_MORE,
9739 "%s pic index %d scatter_alloc %d page_start %d\n",
9740 "decoder_mmu_box_free_idx_tail",
9741 hevc->cur_pic->index,
9742 hevc->cur_pic->scatter_alloc,
9743 hevc->used_4k_num);
9744 decoder_mmu_box_free_idx_tail(
9745 hevc->mmu_box,
9746 hevc->cur_pic->index,
9747 hevc->used_4k_num);
9748 hevc->cur_pic->scatter_alloc
9749 = 2;
9750 }
9751 hevc->used_4k_num = -1;
9752 }
9753 }
9754
9755 pic->output_mark = 1;
9756 pic->recon_mark = 1;
9757 }
9758 check_pic_decoded_error(hevc,
9759 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9760 if (hevc->cur_pic != NULL &&
9761 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9762 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9763 hevc->cur_pic->error_mark = 1;
9764force_output:
9765 pic_display = output_pic(hevc, 1);
9766 if (pic_display) {
9767 if ((pic_display->error_mark &&
9768 ((hevc->ignore_bufmgr_error &
9769 0x2) == 0))
9770 || (get_dbg_flag(hevc) &
9771 H265_DEBUG_DISPLAY_CUR_FRAME)
9772 || (get_dbg_flag(hevc) &
9773 H265_DEBUG_NO_DISPLAY)) {
9774 pic_display->output_ready = 0;
9775 if (get_dbg_flag(hevc) &
9776 H265_DEBUG_BUFMGR) {
9777 hevc_print(hevc, 0,
9778 "[BM] Display: POC %d, ",
9779 pic_display->POC);
9780 hevc_print_cont(hevc, 0,
9781 "decoding index %d ==> ",
9782 pic_display->
9783 decode_idx);
9784 hevc_print_cont(hevc, 0,
9785 "Debug or err,recycle it\n");
9786 }
9787 } else {
9788 if (pic_display->
9789 slice_type != 2) {
9790 pic_display->output_ready = 0;
9791 } else {
9792 prepare_display_buf
9793 (hevc,
9794 pic_display);
9795 hevc->first_pic_flag = 1;
9796 }
9797 }
9798 }
9799 }
9800
9801 vdec_schedule_work(&hevc->work);
9802 }
9803
9804 return IRQ_HANDLED;
9805#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9806 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9807 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9808 if (hevc->m_ins_flag) {
9809 unsigned char next_parser_type =
9810 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9811 read_decode_info(hevc);
9812
9813 if (vdec->slave &&
9814 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9815 /*cur is base, found enhance*/
9816 struct hevc_state_s *hevc_el =
9817 (struct hevc_state_s *)
9818 vdec->slave->private;
9819 hevc->switch_dvlayer_flag = 1;
9820 hevc->no_switch_dvlayer_count = 0;
9821 hevc_el->start_parser_type =
9822 next_parser_type;
9823 hevc_print(hevc, H265_DEBUG_DV,
9824 "switch (poc %d) to el\n",
9825 hevc->cur_pic ?
9826 hevc->cur_pic->POC :
9827 INVALID_POC);
9828 } else if (vdec->master &&
9829 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9830 /*cur is enhance, found base*/
9831 struct hevc_state_s *hevc_ba =
9832 (struct hevc_state_s *)
9833 vdec->master->private;
9834 hevc->switch_dvlayer_flag = 1;
9835 hevc->no_switch_dvlayer_count = 0;
9836 hevc_ba->start_parser_type =
9837 next_parser_type;
9838 hevc_print(hevc, H265_DEBUG_DV,
9839 "switch (poc %d) to bl\n",
9840 hevc->cur_pic ?
9841 hevc->cur_pic->POC :
9842 INVALID_POC);
9843 } else {
9844 hevc->switch_dvlayer_flag = 0;
9845 hevc->start_parser_type =
9846 next_parser_type;
9847 hevc->no_switch_dvlayer_count++;
9848 hevc_print(hevc, H265_DEBUG_DV,
9849 "%s: no_switch_dvlayer_count = %d\n",
9850 vdec->master ? "el" : "bl",
9851 hevc->no_switch_dvlayer_count);
9852 if (vdec->slave &&
9853 dolby_el_flush_th != 0 &&
9854 hevc->no_switch_dvlayer_count >
9855 dolby_el_flush_th) {
9856 struct hevc_state_s *hevc_el =
9857 (struct hevc_state_s *)
9858 vdec->slave->private;
9859 struct PIC_s *el_pic;
9860 check_pic_decoded_error(hevc_el,
9861 hevc_el->pic_decoded_lcu_idx);
9862 el_pic = get_pic_by_POC(hevc_el,
9863 hevc_el->curr_POC);
9864 hevc_el->curr_POC = INVALID_POC;
9865 hevc_el->m_pocRandomAccess = MAX_INT;
9866 flush_output(hevc_el, el_pic);
9867 hevc_el->decoded_poc = INVALID_POC; /*
9868 already call flush_output*/
9869 hevc_el->decoding_pic = NULL;
9870 hevc->no_switch_dvlayer_count = 0;
9871 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9872 hevc_print(hevc, 0,
9873 "no el anymore, flush_output el\n");
9874 }
9875 }
9876 hevc->decoded_poc = hevc->curr_POC;
9877 hevc->decoding_pic = NULL;
9878 hevc->dec_result = DEC_RESULT_DONE;
9879 amhevc_stop();
9880 reset_process_time(hevc);
9881 if (aux_data_is_avaible(hevc))
9882 dolby_get_meta(hevc);
9883 if(hevc->cur_pic->slice_type == 2 &&
9884 hevc->vf_pre_count == 0) {
9885 hevc_print(hevc, 0,
9886 "first slice_type %x no_switch_dvlayer_count %x\n",
9887 hevc->cur_pic->slice_type,
9888 hevc->no_switch_dvlayer_count);
9889 goto force_output;
9890 }
9891 vdec_schedule_work(&hevc->work);
9892 }
9893
9894 return IRQ_HANDLED;
9895#endif
9896 }
9897
9898#endif
9899
9900 if (dec_status == HEVC_SEI_DAT) {
9901 if (!hevc->m_ins_flag) {
9902 int payload_type =
9903 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9904 int payload_size =
9905 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9906 process_nal_sei(hevc,
9907 payload_type, payload_size);
9908 }
9909 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9910 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9911 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9912 int parse_type = HEVC_DISCARD_NAL;
9913
9914 hevc->error_watchdog_count = 0;
9915 hevc->error_skip_nal_wt_cnt = 0;
9916#ifdef MULTI_INSTANCE_SUPPORT
9917 if (hevc->m_ins_flag)
9918 reset_process_time(hevc);
9919#endif
9920 if (slice_parse_begin > 0 &&
9921 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9922 hevc_print(hevc, 0,
9923 "nal type %d, discard %d\n", naltype,
9924 slice_parse_begin);
9925 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9926 slice_parse_begin--;
9927 }
9928 if (naltype == NAL_UNIT_EOS) {
9929 struct PIC_s *pic;
9930
9931 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9932#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9933 if ((vdec->master || vdec->slave) &&
9934 aux_data_is_avaible(hevc)) {
9935 if (hevc->decoding_pic)
9936 dolby_get_meta(hevc);
9937 }
9938#endif
9939 check_pic_decoded_error(hevc,
9940 hevc->pic_decoded_lcu_idx);
9941 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9942 hevc->curr_POC = INVALID_POC;
9943 /* add to fix RAP_B_Bossen_1 */
9944 hevc->m_pocRandomAccess = MAX_INT;
9945 flush_output(hevc, pic);
9946 clear_poc_flag(hevc);
9947 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9948 /* Interrupt Amrisc to excute */
9949 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9950#ifdef MULTI_INSTANCE_SUPPORT
9951 if (hevc->m_ins_flag) {
9952 hevc->decoded_poc = INVALID_POC; /*
9953 already call flush_output*/
9954 hevc->decoding_pic = NULL;
9955 hevc->dec_result = DEC_RESULT_DONE;
9956 amhevc_stop();
9957
9958 vdec_schedule_work(&hevc->work);
9959 }
9960#endif
9961 return IRQ_HANDLED;
9962 }
9963
9964 if (
9965#ifdef MULTI_INSTANCE_SUPPORT
9966 (!hevc->m_ins_flag) &&
9967#endif
9968 hevc->error_skip_nal_count > 0) {
9969 hevc_print(hevc, 0,
9970 "nal type %d, discard %d\n", naltype,
9971 hevc->error_skip_nal_count);
9972 hevc->error_skip_nal_count--;
9973 if (hevc->error_skip_nal_count == 0) {
9974 hevc_recover(hevc);
9975 hevc->error_flag = 0;
9976 if ((error_handle_policy & 0x2) == 0) {
9977 hevc->have_vps = 1;
9978 hevc->have_sps = 1;
9979 hevc->have_pps = 1;
9980 }
9981 return IRQ_HANDLED;
9982 }
9983 } else if (naltype == NAL_UNIT_VPS) {
9984 parse_type = HEVC_NAL_UNIT_VPS;
9985 hevc->have_vps = 1;
9986#ifdef ERROR_HANDLE_DEBUG
9987 if (dbg_nal_skip_flag & 1)
9988 parse_type = HEVC_DISCARD_NAL;
9989#endif
9990 } else if (hevc->have_vps) {
9991 if (naltype == NAL_UNIT_SPS) {
9992 parse_type = HEVC_NAL_UNIT_SPS;
9993 hevc->have_sps = 1;
9994#ifdef ERROR_HANDLE_DEBUG
9995 if (dbg_nal_skip_flag & 2)
9996 parse_type = HEVC_DISCARD_NAL;
9997#endif
9998 } else if (naltype == NAL_UNIT_PPS) {
9999 parse_type = HEVC_NAL_UNIT_PPS;
10000 hevc->have_pps = 1;
10001#ifdef ERROR_HANDLE_DEBUG
10002 if (dbg_nal_skip_flag & 4)
10003 parse_type = HEVC_DISCARD_NAL;
10004#endif
10005 } else if (hevc->have_sps && hevc->have_pps) {
10006 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10007
10008 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10009 (naltype ==
10010 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10011 || (naltype ==
10012 NAL_UNIT_CODED_SLICE_CRA)
10013 || (naltype ==
10014 NAL_UNIT_CODED_SLICE_BLA)
10015 || (naltype ==
10016 NAL_UNIT_CODED_SLICE_BLANT)
10017 || (naltype ==
10018 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10019 ) {
10020 if (slice_parse_begin > 0) {
10021 hevc_print(hevc, 0,
10022 "discard %d, for debugging\n",
10023 slice_parse_begin);
10024 slice_parse_begin--;
10025 } else {
10026 parse_type = seg;
10027 }
10028 hevc->have_valid_start_slice = 1;
10029 } else if (naltype <=
10030 NAL_UNIT_CODED_SLICE_CRA
10031 && (hevc->have_valid_start_slice
10032 || (hevc->PB_skip_mode != 3))) {
10033 if (slice_parse_begin > 0) {
10034 hevc_print(hevc, 0,
10035 "discard %d, dd\n",
10036 slice_parse_begin);
10037 slice_parse_begin--;
10038 } else
10039 parse_type = seg;
10040
10041 }
10042 }
10043 }
10044 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10045 && hevc->have_valid_start_slice &&
10046 hevc->error_flag == 0) {
10047 if ((get_dbg_flag(hevc) &
10048 H265_DEBUG_MAN_SEARCH_NAL) == 0
10049 /* && (!hevc->m_ins_flag)*/) {
10050 /* auot parser NAL; do not check
10051 *vps/sps/pps/idr
10052 */
10053 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10054 }
10055
10056 if ((get_dbg_flag(hevc) &
10057 H265_DEBUG_NO_EOS_SEARCH_DONE)
10058#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10059 || vdec->master
10060 || vdec->slave
10061#endif
10062 ) {
10063 WRITE_VREG(NAL_SEARCH_CTL,
10064 READ_VREG(NAL_SEARCH_CTL) |
10065 0x10000);
10066 }
10067 WRITE_VREG(NAL_SEARCH_CTL,
10068 READ_VREG(NAL_SEARCH_CTL)
10069 | ((parser_sei_enable & 0x7) << 17));
10070#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10071 WRITE_VREG(NAL_SEARCH_CTL,
10072 READ_VREG(NAL_SEARCH_CTL) |
10073 ((parser_dolby_vision_enable & 0x1) << 20));
10074#endif
10075 config_decode_mode(hevc);
10076 }
10077
10078 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10079 hevc_print(hevc, 0,
10080 "naltype = %d parse_type %d\n %d %d %d %d\n",
10081 naltype, parse_type, hevc->have_vps,
10082 hevc->have_sps, hevc->have_pps,
10083 hevc->have_valid_start_slice);
10084 }
10085
10086 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10087 /* Interrupt Amrisc to excute */
10088 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10089#ifdef MULTI_INSTANCE_SUPPORT
10090 if (hevc->m_ins_flag)
10091 start_process_time(hevc);
10092#endif
10093 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10094#ifdef MULTI_INSTANCE_SUPPORT
10095 if (hevc->m_ins_flag) {
10096 reset_process_time(hevc);
10097 read_decode_info(hevc);
10098
10099 }
10100#endif
10101 if (hevc->start_decoding_time > 0) {
10102 u32 process_time = 1000*
10103 (jiffies - hevc->start_decoding_time)/HZ;
10104 if (process_time > max_decoding_time)
10105 max_decoding_time = process_time;
10106 }
10107
10108 hevc->error_watchdog_count = 0;
10109 if (hevc->pic_list_init_flag == 2) {
10110 hevc->pic_list_init_flag = 3;
10111 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10112 } else if (hevc->wait_buf == 0) {
10113 u32 vui_time_scale;
10114 u32 vui_num_units_in_tick;
10115 unsigned char reconfig_flag = 0;
10116
10117 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10118 get_rpm_param(&hevc->param);
10119 else {
10120
10121 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10122 int ii;
10123
10124 for (ii = 0; ii < 4; ii++) {
10125 hevc->param.l.data[i + ii] =
10126 hevc->rpm_ptr[i + 3
10127 - ii];
10128 }
10129 }
10130#ifdef SEND_LMEM_WITH_RPM
10131 check_head_error(hevc);
10132#endif
10133 }
10134 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10135 hevc_print(hevc, 0,
10136 "rpm_param: (%d)\n", hevc->slice_idx);
10137 hevc->slice_idx++;
10138 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10139 hevc_print_cont(hevc, 0,
10140 "%04x ", hevc->param.l.data[i]);
10141 if (((i + 1) & 0xf) == 0)
10142 hevc_print_cont(hevc, 0, "\n");
10143 }
10144
10145 hevc_print(hevc, 0,
10146 "vui_timing_info: %x, %x, %x, %x\n",
10147 hevc->param.p.vui_num_units_in_tick_hi,
10148 hevc->param.p.vui_num_units_in_tick_lo,
10149 hevc->param.p.vui_time_scale_hi,
10150 hevc->param.p.vui_time_scale_lo);
10151 }
10152
10153 if (hevc->is_used_v4l) {
10154 struct aml_vcodec_ctx *ctx =
10155 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10156
10157 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10158 struct aml_vdec_ps_infos ps;
10159
10160 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10161 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10162 ps.visible_width = hevc->frame_width;
10163 ps.visible_height = hevc->frame_height;
10164 ps.coded_width = ALIGN(hevc->frame_width, 32);
10165 ps.coded_height = ALIGN(hevc->frame_height, 32);
10166 ps.dpb_size = get_work_pic_num(hevc);
10167 hevc->v4l_params_parsed = true;
10168 /*notice the v4l2 codec.*/
10169 vdec_v4l_set_ps_infos(ctx, &ps);
10170 }
10171 }
10172
10173 if (
10174#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10175 vdec->master == NULL &&
10176 vdec->slave == NULL &&
10177#endif
10178 aux_data_is_avaible(hevc)
10179 ) {
10180
10181 if (get_dbg_flag(hevc) &
10182 H265_DEBUG_BUFMGR_MORE)
10183 dump_aux_buf(hevc);
10184 }
10185
10186 vui_time_scale =
10187 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10188 hevc->param.p.vui_time_scale_lo;
10189 vui_num_units_in_tick =
10190 (u32)(hevc->param.
10191 p.vui_num_units_in_tick_hi << 16) |
10192 hevc->param.
10193 p.vui_num_units_in_tick_lo;
10194 if (hevc->bit_depth_luma !=
10195 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10196 reconfig_flag = 1;
10197 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10198 (hevc->param.p.bit_depth & 0xf) + 8);
10199 }
10200 if (hevc->bit_depth_chroma !=
10201 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10202 reconfig_flag = 1;
10203 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10204 ((hevc->param.p.bit_depth >> 4) &
10205 0xf) + 8);
10206 }
10207 hevc->bit_depth_luma =
10208 (hevc->param.p.bit_depth & 0xf) + 8;
10209 hevc->bit_depth_chroma =
10210 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10211 bit_depth_luma = hevc->bit_depth_luma;
10212 bit_depth_chroma = hevc->bit_depth_chroma;
10213#ifdef SUPPORT_10BIT
10214 if (hevc->bit_depth_luma == 8 &&
10215 hevc->bit_depth_chroma == 8 &&
10216 enable_mem_saving)
10217 hevc->mem_saving_mode = 1;
10218 else
10219 hevc->mem_saving_mode = 0;
10220#endif
10221 if (reconfig_flag &&
10222 (get_double_write_mode(hevc) & 0x10) == 0)
10223 init_decode_head_hw(hevc);
10224
10225 if ((vui_time_scale != 0)
10226 && (vui_num_units_in_tick != 0)) {
10227 hevc->frame_dur =
10228 div_u64(96000ULL *
10229 vui_num_units_in_tick,
10230 vui_time_scale);
10231 if (hevc->get_frame_dur != true)
10232 vdec_schedule_work(
10233 &hevc->notify_work);
10234
10235 hevc->get_frame_dur = true;
10236#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10237 gvs->frame_dur = hevc->frame_dur;
10238#endif
10239 }
10240
10241 if (hevc->video_signal_type !=
10242 ((hevc->param.p.video_signal_type << 16)
10243 | hevc->param.p.color_description)) {
10244 u32 v = hevc->param.p.video_signal_type;
10245 u32 c = hevc->param.p.color_description;
10246#if 0
10247 if (v & 0x2000) {
10248 hevc_print(hevc, 0,
10249 "video_signal_type present:\n");
10250 hevc_print(hevc, 0, " %s %s\n",
10251 video_format_names[(v >> 10) & 7],
10252 ((v >> 9) & 1) ?
10253 "full_range" : "limited");
10254 if (v & 0x100) {
10255 hevc_print(hevc, 0,
10256 " color_description present:\n");
10257 hevc_print(hevc, 0,
10258 " color_primarie = %s\n",
10259 color_primaries_names
10260 [v & 0xff]);
10261 hevc_print(hevc, 0,
10262 " transfer_characteristic = %s\n",
10263 transfer_characteristics_names
10264 [(c >> 8) & 0xff]);
10265 hevc_print(hevc, 0,
10266 " matrix_coefficient = %s\n",
10267 matrix_coeffs_names[c & 0xff]);
10268 }
10269 }
10270#endif
10271 hevc->video_signal_type = (v << 16) | c;
10272 video_signal_type = hevc->video_signal_type;
10273 }
10274
10275 if (use_cma &&
10276 (hevc->param.p.slice_segment_address == 0)
10277 && (hevc->pic_list_init_flag == 0)) {
10278 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10279 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10280
10281 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10282 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10283 hevc->lcu_size = 1 << (log + 3 + log_s);
10284 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10285 if (hevc->pic_w == 0 || hevc->pic_h == 0
10286 || hevc->lcu_size == 0
10287 || is_oversize(hevc->pic_w, hevc->pic_h)
10288 || (!hevc->skip_first_nal &&
10289 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10290 /* skip search next start code */
10291 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10292 & (~0x2));
10293 if ( !hevc->skip_first_nal &&
10294 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10295 hevc->skip_first_nal = 1;
10296 hevc->skip_flag = 1;
10297 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10298 /* Interrupt Amrisc to excute */
10299 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10300#ifdef MULTI_INSTANCE_SUPPORT
10301 if (hevc->m_ins_flag)
10302 start_process_time(hevc);
10303#endif
10304 } else {
10305 hevc->sps_num_reorder_pics_0 =
10306 hevc->param.p.sps_num_reorder_pics_0;
10307 hevc->pic_list_init_flag = 1;
10308#ifdef MULTI_INSTANCE_SUPPORT
10309 if (hevc->m_ins_flag) {
10310 vdec_schedule_work(&hevc->work);
10311 } else
10312#endif
10313 up(&h265_sema);
10314 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10315 }
10316 return IRQ_HANDLED;
10317 }
10318
10319}
10320 ret =
10321 hevc_slice_segment_header_process(hevc,
10322 &hevc->param, decode_pic_begin);
10323 if (ret < 0) {
10324#ifdef MULTI_INSTANCE_SUPPORT
10325 if (hevc->m_ins_flag) {
10326 hevc->wait_buf = 0;
10327 hevc->dec_result = DEC_RESULT_AGAIN;
10328 amhevc_stop();
10329 restore_decode_state(hevc);
10330 reset_process_time(hevc);
10331 vdec_schedule_work(&hevc->work);
10332 return IRQ_HANDLED;
10333 }
10334#else
10335 ;
10336#endif
10337 } else if (ret == 0) {
10338 if ((hevc->new_pic) && (hevc->cur_pic)) {
10339 hevc->cur_pic->stream_offset =
10340 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10341 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10342 "read stream_offset = 0x%x\n",
10343 hevc->cur_pic->stream_offset);
10344 hevc->cur_pic->aspect_ratio_idc =
10345 hevc->param.p.aspect_ratio_idc;
10346 hevc->cur_pic->sar_width =
10347 hevc->param.p.sar_width;
10348 hevc->cur_pic->sar_height =
10349 hevc->param.p.sar_height;
10350 }
10351
10352 WRITE_VREG(HEVC_DEC_STATUS_REG,
10353 HEVC_CODED_SLICE_SEGMENT_DAT);
10354 /* Interrupt Amrisc to excute */
10355 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10356
10357 hevc->start_decoding_time = jiffies;
10358#ifdef MULTI_INSTANCE_SUPPORT
10359 if (hevc->m_ins_flag)
10360 start_process_time(hevc);
10361#endif
10362#if 1
10363 /*to do..., copy aux data to hevc->cur_pic*/
10364#endif
10365#ifdef MULTI_INSTANCE_SUPPORT
10366 } else if (hevc->m_ins_flag) {
10367 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10368 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10369 __func__, ret);
10370 hevc->decoded_poc = INVALID_POC;
10371 hevc->decoding_pic = NULL;
10372 hevc->dec_result = DEC_RESULT_DONE;
10373 amhevc_stop();
10374 reset_process_time(hevc);
10375 vdec_schedule_work(&hevc->work);
10376#endif
10377 } else {
10378 /* skip, search next start code */
10379#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10380 gvs->drop_frame_count++;
10381#endif
10382 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10383 hevc->skip_flag = 1;
10384 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10385 /* Interrupt Amrisc to excute */
10386 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10387 }
10388
10389 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10390 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10391#ifdef MULTI_INSTANCE_SUPPORT
10392 if (!hevc->m_ins_flag)
10393 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10394 H265_DEBUG_DIS_SYS_ERROR_PROC);
10395#endif
10396 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10397 }
10398 return IRQ_HANDLED;
10399}
10400
10401static void wait_hevc_search_done(struct hevc_state_s *hevc)
10402{
10403 int count = 0;
10404 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10405 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10406 msleep(20);
10407 count++;
10408 if (count > 100) {
10409 hevc_print(hevc, 0, "%s timeout\n", __func__);
10410 break;
10411 }
10412 }
10413}
10414static irqreturn_t vh265_isr(int irq, void *data)
10415{
10416 int i, temp;
10417 unsigned int dec_status;
10418 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10419 u32 debug_tag;
10420 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10421
10422 if (hevc->init_flag == 0)
10423 return IRQ_HANDLED;
10424 hevc->dec_status = dec_status;
10425 if (is_log_enable(hevc))
10426 add_log(hevc,
10427 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10428 dec_status, READ_HREG(HEVC_DECODE_INFO),
10429 READ_VREG(HEVC_MPRED_CURR_LCU),
10430 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10431 READ_VREG(HEVC_SHIFT_STATUS));
10432
10433 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10434 hevc_print(hevc, 0,
10435 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10436 dec_status, READ_HREG(HEVC_DECODE_INFO),
10437 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10438 READ_VREG(HEVC_SHIFT_STATUS));
10439
10440 debug_tag = READ_HREG(DEBUG_REG1);
10441 if (debug_tag & 0x10000) {
10442 hevc_print(hevc, 0,
10443 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10444
10445 if (hevc->mmu_enable)
10446 temp = 0x500;
10447 else
10448 temp = 0x400;
10449 for (i = 0; i < temp; i += 4) {
10450 int ii;
10451 if ((i & 0xf) == 0)
10452 hevc_print_cont(hevc, 0, "%03x: ", i);
10453 for (ii = 0; ii < 4; ii++) {
10454 hevc_print_cont(hevc, 0, "%04x ",
10455 hevc->lmem_ptr[i + 3 - ii]);
10456 }
10457 if (((i + ii) & 0xf) == 0)
10458 hevc_print_cont(hevc, 0, "\n");
10459 }
10460
10461 if (((udebug_pause_pos & 0xffff)
10462 == (debug_tag & 0xffff)) &&
10463 (udebug_pause_decode_idx == 0 ||
10464 udebug_pause_decode_idx == hevc->decode_idx) &&
10465 (udebug_pause_val == 0 ||
10466 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10467 udebug_pause_pos &= 0xffff;
10468 hevc->ucode_pause_pos = udebug_pause_pos;
10469 }
10470 else if (debug_tag & 0x20000)
10471 hevc->ucode_pause_pos = 0xffffffff;
10472 if (hevc->ucode_pause_pos)
10473 reset_process_time(hevc);
10474 else
10475 WRITE_HREG(DEBUG_REG1, 0);
10476 } else if (debug_tag != 0) {
10477 hevc_print(hevc, 0,
10478 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10479 READ_HREG(DEBUG_REG2),
10480 READ_VREG(HEVC_STREAM_LEVEL),
10481 READ_VREG(HEVC_STREAM_WR_PTR),
10482 READ_VREG(HEVC_STREAM_RD_PTR));
10483 if (((udebug_pause_pos & 0xffff)
10484 == (debug_tag & 0xffff)) &&
10485 (udebug_pause_decode_idx == 0 ||
10486 udebug_pause_decode_idx == hevc->decode_idx) &&
10487 (udebug_pause_val == 0 ||
10488 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10489 udebug_pause_pos &= 0xffff;
10490 hevc->ucode_pause_pos = udebug_pause_pos;
10491 }
10492 if (hevc->ucode_pause_pos)
10493 reset_process_time(hevc);
10494 else
10495 WRITE_HREG(DEBUG_REG1, 0);
10496 return IRQ_HANDLED;
10497 }
10498
10499
10500 if (hevc->pic_list_init_flag == 1)
10501 return IRQ_HANDLED;
10502
10503 if (!hevc->m_ins_flag) {
10504 if (dec_status == HEVC_OVER_DECODE) {
10505 hevc->over_decode = 1;
10506 hevc_print(hevc, 0,
10507 "isr: over decode\n"),
10508 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10509 return IRQ_HANDLED;
10510 }
10511 }
10512
10513 return IRQ_WAKE_THREAD;
10514
10515}
10516
10517static void vh265_set_clk(struct work_struct *work)
10518{
10519 struct hevc_state_s *hevc = container_of(work,
10520 struct hevc_state_s, set_clk_work);
10521
10522 int fps = 96000 / hevc->frame_dur;
10523
10524 if (hevc_source_changed(VFORMAT_HEVC,
10525 hevc->frame_width, hevc->frame_height, fps) > 0)
10526 hevc->saved_resolution = hevc->frame_width *
10527 hevc->frame_height * fps;
10528}
10529
10530static void vh265_check_timer_func(unsigned long arg)
10531{
10532 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10533 struct timer_list *timer = &hevc->timer;
10534 unsigned char empty_flag;
10535 unsigned int buf_level;
10536
10537 enum receviver_start_e state = RECEIVER_INACTIVE;
10538
10539 if (hevc->init_flag == 0) {
10540 if (hevc->stat & STAT_TIMER_ARM) {
10541 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10542 }
10543 return;
10544 }
10545#ifdef MULTI_INSTANCE_SUPPORT
10546 if (hevc->m_ins_flag &&
10547 (get_dbg_flag(hevc) &
10548 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10549 hw_to_vdec(hevc)->next_status ==
10550 VDEC_STATUS_DISCONNECTED) {
10551 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10552 vdec_schedule_work(&hevc->work);
10553 hevc_print(hevc,
10554 0, "vdec requested to be disconnected\n");
10555 return;
10556 }
10557
10558 if (hevc->m_ins_flag) {
10559 if ((input_frame_based(hw_to_vdec(hevc)) ||
10560 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10561 ((get_dbg_flag(hevc) &
10562 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10563 (decode_timeout_val > 0) &&
10564 (hevc->start_process_time > 0) &&
10565 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10566 > decode_timeout_val)
10567 ) {
10568 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10569 int current_lcu_idx =
10570 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10571 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10572 if (hevc->last_lcu_idx == current_lcu_idx) {
10573 if (hevc->decode_timeout_count > 0)
10574 hevc->decode_timeout_count--;
10575 if (hevc->decode_timeout_count == 0)
10576 timeout_process(hevc);
10577 } else
10578 restart_process_time(hevc);
10579 hevc->last_lcu_idx = current_lcu_idx;
10580 } else {
10581 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10582 timeout_process(hevc);
10583 }
10584 }
10585 } else {
10586#endif
10587 if (hevc->m_ins_flag == 0 &&
10588 vf_get_receiver(hevc->provider_name)) {
10589 state =
10590 vf_notify_receiver(hevc->provider_name,
10591 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10592 NULL);
10593 if ((state == RECEIVER_STATE_NULL)
10594 || (state == RECEIVER_STATE_NONE))
10595 state = RECEIVER_INACTIVE;
10596 } else
10597 state = RECEIVER_INACTIVE;
10598
10599 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10600 /* error watchdog */
10601 if (hevc->m_ins_flag == 0 &&
10602 (empty_flag == 0)
10603 && (hevc->pic_list_init_flag == 0
10604 || hevc->pic_list_init_flag
10605 == 3)) {
10606 /* decoder has input */
10607 if ((get_dbg_flag(hevc) &
10608 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10609
10610 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10611 /* receiver has no buffer to recycle */
10612 if ((state == RECEIVER_INACTIVE) &&
10613 (kfifo_is_empty(&hevc->display_q) &&
10614 buf_level > 0x200)
10615 ) {
10616 if (hevc->error_flag == 0) {
10617 hevc->error_watchdog_count++;
10618 if (hevc->error_watchdog_count ==
10619 error_handle_threshold) {
10620 hevc_print(hevc, 0,
10621 "H265 dec err local reset.\n");
10622 hevc->error_flag = 1;
10623 hevc->error_watchdog_count = 0;
10624 hevc->error_skip_nal_wt_cnt = 0;
10625 hevc->
10626 error_system_watchdog_count++;
10627 WRITE_VREG
10628 (HEVC_ASSIST_MBOX0_IRQ_REG,
10629 0x1);
10630 }
10631 } else if (hevc->error_flag == 2) {
10632 int th =
10633 error_handle_nal_skip_threshold;
10634 hevc->error_skip_nal_wt_cnt++;
10635 if (hevc->error_skip_nal_wt_cnt
10636 == th) {
10637 hevc->error_flag = 3;
10638 hevc->error_watchdog_count = 0;
10639 hevc->
10640 error_skip_nal_wt_cnt = 0;
10641 WRITE_VREG
10642 (HEVC_ASSIST_MBOX0_IRQ_REG,
10643 0x1);
10644 }
10645 }
10646 }
10647 }
10648
10649 if ((get_dbg_flag(hevc)
10650 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10651 /* receiver has no buffer to recycle */
10652 if ((state == RECEIVER_INACTIVE) &&
10653 (kfifo_is_empty(&hevc->display_q))
10654 ) { /* no buffer to recycle */
10655 if ((get_dbg_flag(hevc) &
10656 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10657 0)
10658 hevc->error_system_watchdog_count++;
10659 if (hevc->error_system_watchdog_count ==
10660 error_handle_system_threshold) {
10661 /* and it lasts for a while */
10662 hevc_print(hevc, 0,
10663 "H265 dec fatal error watchdog.\n");
10664 hevc->
10665 error_system_watchdog_count = 0;
10666 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10667 }
10668 }
10669 } else {
10670 hevc->error_watchdog_count = 0;
10671 hevc->error_system_watchdog_count = 0;
10672 }
10673#ifdef MULTI_INSTANCE_SUPPORT
10674 }
10675#endif
10676 if ((hevc->ucode_pause_pos != 0) &&
10677 (hevc->ucode_pause_pos != 0xffffffff) &&
10678 udebug_pause_pos != hevc->ucode_pause_pos) {
10679 hevc->ucode_pause_pos = 0;
10680 WRITE_HREG(DEBUG_REG1, 0);
10681 }
10682
10683 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10684 dump_pic_list(hevc);
10685 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10686 }
10687 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10688 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10689 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10690 }
10691#ifdef TEST_NO_BUF
10692 if (hevc->wait_buf)
10693 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10694#endif
10695 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10696 hevc->error_skip_nal_count = error_skip_nal_count;
10697 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10698
10699 debug &= ~H265_DEBUG_HW_RESET;
10700 }
10701
10702#ifdef ERROR_HANDLE_DEBUG
10703 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10704 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10705 dbg_nal_skip_count &= ~0x10000;
10706 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10707 }
10708#endif
10709
10710 if (radr != 0) {
10711 if (rval != 0) {
10712 WRITE_VREG(radr, rval);
10713 hevc_print(hevc, 0,
10714 "WRITE_VREG(%x,%x)\n", radr, rval);
10715 } else
10716 hevc_print(hevc, 0,
10717 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10718 rval = 0;
10719 radr = 0;
10720 }
10721 if (dbg_cmd != 0) {
10722 if (dbg_cmd == 1) {
10723 u32 disp_laddr;
10724
10725 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10726 get_double_write_mode(hevc) == 0) {
10727 disp_laddr =
10728 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10729 } else {
10730 struct canvas_s cur_canvas;
10731
10732 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10733 & 0xff), &cur_canvas);
10734 disp_laddr = cur_canvas.addr;
10735 }
10736 hevc_print(hevc, 0,
10737 "current displayed buffer address %x\r\n",
10738 disp_laddr);
10739 }
10740 dbg_cmd = 0;
10741 }
10742 /*don't changed at start.*/
10743 if (hevc->m_ins_flag == 0 &&
10744 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10745 hevc->frame_dur > 0 && hevc->saved_resolution !=
10746 hevc->frame_width * hevc->frame_height *
10747 (96000 / hevc->frame_dur))
10748 vdec_schedule_work(&hevc->set_clk_work);
10749
10750 mod_timer(timer, jiffies + PUT_INTERVAL);
10751}
10752
10753static int h265_task_handle(void *data)
10754{
10755 int ret = 0;
10756 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10757
10758 set_user_nice(current, -10);
10759 while (1) {
10760 if (use_cma == 0) {
10761 hevc_print(hevc, 0,
10762 "ERROR: use_cma can not be changed dynamically\n");
10763 }
10764 ret = down_interruptible(&h265_sema);
10765 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10766 init_pic_list(hevc);
10767 init_pic_list_hw(hevc);
10768 init_buf_spec(hevc);
10769 hevc->pic_list_init_flag = 2;
10770 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10771
10772 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10773
10774 }
10775
10776 if (hevc->uninit_list) {
10777 /*USE_BUF_BLOCK*/
10778 uninit_pic_list(hevc);
10779 hevc_print(hevc, 0, "uninit list\n");
10780 hevc->uninit_list = 0;
10781#ifdef USE_UNINIT_SEMA
10782 if (use_cma) {
10783 up(&hevc->h265_uninit_done_sema);
10784 while (!kthread_should_stop())
10785 msleep(1);
10786 break;
10787 }
10788#endif
10789 }
10790 }
10791
10792 return 0;
10793}
10794
10795void vh265_free_cmabuf(void)
10796{
10797 struct hevc_state_s *hevc = gHevc;
10798
10799 mutex_lock(&vh265_mutex);
10800
10801 if (hevc->init_flag) {
10802 mutex_unlock(&vh265_mutex);
10803 return;
10804 }
10805
10806 mutex_unlock(&vh265_mutex);
10807}
10808
10809#ifdef MULTI_INSTANCE_SUPPORT
10810int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10811#else
10812int vh265_dec_status(struct vdec_info *vstatus)
10813#endif
10814{
10815#ifdef MULTI_INSTANCE_SUPPORT
10816 struct hevc_state_s *hevc =
10817 (struct hevc_state_s *)vdec->private;
10818#else
10819 struct hevc_state_s *hevc = gHevc;
10820#endif
10821 if (!hevc)
10822 return -1;
10823
10824 vstatus->frame_width = hevc->frame_width;
10825 vstatus->frame_height = hevc->frame_height;
10826 if (hevc->frame_dur != 0)
10827 vstatus->frame_rate = 96000 / hevc->frame_dur;
10828 else
10829 vstatus->frame_rate = -1;
10830 vstatus->error_count = 0;
10831 vstatus->status = hevc->stat | hevc->fatal_error;
10832#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10833 vstatus->bit_rate = gvs->bit_rate;
10834 vstatus->frame_dur = hevc->frame_dur;
10835 if (gvs) {
10836 vstatus->bit_rate = gvs->bit_rate;
10837 vstatus->frame_data = gvs->frame_data;
10838 vstatus->total_data = gvs->total_data;
10839 vstatus->frame_count = gvs->frame_count;
10840 vstatus->error_frame_count = gvs->error_frame_count;
10841 vstatus->drop_frame_count = gvs->drop_frame_count;
10842 vstatus->total_data = gvs->total_data;
10843 vstatus->samp_cnt = gvs->samp_cnt;
10844 vstatus->offset = gvs->offset;
10845 }
10846 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10847 "%s", DRIVER_NAME);
10848#endif
10849 vstatus->ratio_control = hevc->ratio_control;
10850 return 0;
10851}
10852
10853int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10854{
10855 is_reset = isreset;
10856 return 0;
10857}
10858
10859static int vh265_vdec_info_init(void)
10860{
10861 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10862 if (NULL == gvs) {
10863 pr_info("the struct of vdec status malloc failed.\n");
10864 return -ENOMEM;
10865 }
10866 return 0;
10867}
10868
10869#if 0
10870static void H265_DECODE_INIT(void)
10871{
10872 /* enable hevc clocks */
10873 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10874 /* *************************************************************** */
10875 /* Power ON HEVC */
10876 /* *************************************************************** */
10877 /* Powerup HEVC */
10878 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10879 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10880 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10881 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10882 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10883 /* remove isolations */
10884 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10885 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10886
10887}
10888#endif
10889
10890static void config_decode_mode(struct hevc_state_s *hevc)
10891{
10892#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10893 struct vdec_s *vdec = hw_to_vdec(hevc);
10894#endif
10895 unsigned decode_mode;
10896 if (!hevc->m_ins_flag)
10897 decode_mode = DECODE_MODE_SINGLE;
10898 else if (vdec_frame_based(hw_to_vdec(hevc)))
10899 decode_mode =
10900 DECODE_MODE_MULTI_FRAMEBASE;
10901#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10902 else if (vdec->slave) {
10903 if (force_bypass_dvenl & 0x80000000)
10904 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10905 else
10906 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10907 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10908 hevc->bypass_dvenl = 0;
10909 hevc_print(hevc, 0,
10910 "NOT support bypass_dvenl when meta_with_el\n");
10911 }
10912 if (hevc->bypass_dvenl)
10913 decode_mode =
10914 (hevc->start_parser_type << 8)
10915 | DECODE_MODE_MULTI_STREAMBASE;
10916 else
10917 decode_mode =
10918 (hevc->start_parser_type << 8)
10919 | DECODE_MODE_MULTI_DVBAL;
10920 } else if (vdec->master)
10921 decode_mode =
10922 (hevc->start_parser_type << 8)
10923 | DECODE_MODE_MULTI_DVENL;
10924#endif
10925 else
10926 decode_mode =
10927 DECODE_MODE_MULTI_STREAMBASE;
10928
10929 if (hevc->m_ins_flag)
10930 decode_mode |=
10931 (hevc->start_decoding_flag << 16);
10932 /* set MBX0 interrupt flag */
10933 decode_mode |= (0x80 << 24);
10934 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10935 WRITE_VREG(HEVC_DECODE_MODE2,
10936 hevc->rps_set_id);
10937}
10938
10939static void vh265_prot_init(struct hevc_state_s *hevc)
10940{
10941#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10942 struct vdec_s *vdec = hw_to_vdec(hevc);
10943#endif
10944 /* H265_DECODE_INIT(); */
10945
10946 hevc_config_work_space_hw(hevc);
10947
10948 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10949
10950 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10951
10952 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10953
10954 /* clear mailbox interrupt */
10955 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10956
10957 /* enable mailbox interrupt */
10958 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10959
10960 /* disable PSCALE for hardware sharing */
10961 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10962
10963 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10964
10965 if ((get_dbg_flag(hevc) &
10966 (H265_DEBUG_MAN_SKIP_NAL |
10967 H265_DEBUG_MAN_SEARCH_NAL))
10968 /*||hevc->m_ins_flag*/
10969 ) {
10970 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10971 } else {
10972 /* check vps/sps/pps/i-slice in ucode */
10973 unsigned ctl_val = 0x8;
10974 if (hevc->PB_skip_mode == 0)
10975 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10976 else if (hevc->PB_skip_mode == 3)
10977 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10978 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10979 }
10980 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10981#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10982 || vdec->master
10983 || vdec->slave
10984#endif
10985 )
10986 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10987
10988 WRITE_VREG(NAL_SEARCH_CTL,
10989 READ_VREG(NAL_SEARCH_CTL)
10990 | ((parser_sei_enable & 0x7) << 17));
10991#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10992 WRITE_VREG(NAL_SEARCH_CTL,
10993 READ_VREG(NAL_SEARCH_CTL) |
10994 ((parser_dolby_vision_enable & 0x1) << 20));
10995#endif
10996 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10997
10998 config_decode_mode(hevc);
10999 config_aux_buf(hevc);
11000#ifdef SWAP_HEVC_UCODE
11001 if (!tee_enabled() && hevc->is_swap &&
11002 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11003 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11004 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11005 }
11006#endif
11007#ifdef DETREFILL_ENABLE
11008 if (hevc->is_swap &&
11009 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11010 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11011 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11012 }
11013#endif
11014}
11015
11016static int vh265_local_init(struct hevc_state_s *hevc)
11017{
11018 int i;
11019 int ret = -1;
11020
11021#ifdef DEBUG_PTS
11022 hevc->pts_missed = 0;
11023 hevc->pts_hit = 0;
11024#endif
11025
11026 hevc->saved_resolution = 0;
11027 hevc->get_frame_dur = false;
11028 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11029 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11030 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11031 pr_info("over size : %u x %u.\n",
11032 hevc->frame_width, hevc->frame_height);
11033 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11034 return ret;
11035 }
11036
11037 if (hevc->max_pic_w && hevc->max_pic_h) {
11038 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11039 ((hevc->max_pic_w * hevc->max_pic_h) >
11040 1920 * 1088) ? true : false;
11041 } else {
11042 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11043 ((hevc->frame_width * hevc->frame_height) >
11044 1920 * 1088) ? true : false;
11045 }
11046
11047 hevc->frame_dur =
11048 (hevc->vh265_amstream_dec_info.rate ==
11049 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11050#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
11051 gvs->frame_dur = hevc->frame_dur;
11052#endif
11053 if (hevc->frame_width && hevc->frame_height)
11054 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11055
11056 if (i_only_flag)
11057 hevc->i_only = i_only_flag & 0xff;
11058 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11059 & 0x08)
11060 hevc->i_only = 0x7;
11061 else
11062 hevc->i_only = 0x0;
11063 hevc->error_watchdog_count = 0;
11064 hevc->sei_present_flag = 0;
11065 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11066 & 0x40) >> 6;
11067 hevc_print(hevc, 0,
11068 "h265:pts_unstable=%d\n", pts_unstable);
11069/*
11070 *TODO:FOR VERSION
11071 */
11072 hevc_print(hevc, 0,
11073 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11074 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11075
11076 if (hevc->frame_dur == 0)
11077 hevc->frame_dur = 96000 / 24;
11078
11079 INIT_KFIFO(hevc->display_q);
11080 INIT_KFIFO(hevc->newframe_q);
11081 INIT_KFIFO(hevc->pending_q);
11082
11083 for (i = 0; i < VF_POOL_SIZE; i++) {
11084 const struct vframe_s *vf = &hevc->vfpool[i];
11085
11086 hevc->vfpool[i].index = -1;
11087 kfifo_put(&hevc->newframe_q, vf);
11088 }
11089
11090
11091 ret = hevc_local_init(hevc);
11092
11093 return ret;
11094}
11095#ifdef MULTI_INSTANCE_SUPPORT
11096static s32 vh265_init(struct vdec_s *vdec)
11097{
11098 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11099#else
11100static s32 vh265_init(struct hevc_state_s *hevc)
11101{
11102
11103#endif
11104 int ret, size = -1;
11105 int fw_size = 0x1000 * 16;
11106 struct firmware_s *fw = NULL;
11107
11108 init_timer(&hevc->timer);
11109
11110 hevc->stat |= STAT_TIMER_INIT;
11111
11112 if (hevc->m_ins_flag) {
11113#ifdef USE_UNINIT_SEMA
11114 sema_init(&hevc->h265_uninit_done_sema, 0);
11115#endif
11116 INIT_WORK(&hevc->work, vh265_work);
11117 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11118 }
11119
11120 if (vh265_local_init(hevc) < 0)
11121 return -EBUSY;
11122
11123 mutex_init(&hevc->chunks_mutex);
11124 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11125 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11126
11127 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11128 if (IS_ERR_OR_NULL(fw))
11129 return -ENOMEM;
11130
11131 if (hevc->mmu_enable)
11132 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11133 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11134 else {
11135 if (!hevc->is_4k) {
11136 /* if an older version of the fw was loaded, */
11137 /* needs try to load noswap fw because the */
11138 /* old fw package dose not contain the swap fw.*/
11139 size = get_firmware_data(
11140 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11141 if (size < 0)
11142 size = get_firmware_data(
11143 VIDEO_DEC_HEVC_MMU, fw->data);
11144 else if (size)
11145 hevc->is_swap = true;
11146 } else
11147 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11148 fw->data);
11149 }
11150 else
11151 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11152
11153 if (size < 0) {
11154 pr_err("get firmware fail.\n");
11155 vfree(fw);
11156 return -1;
11157 }
11158
11159 fw->len = size;
11160
11161#ifdef SWAP_HEVC_UCODE
11162 if (!tee_enabled() && hevc->is_swap &&
11163 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11164 if (hevc->mmu_enable) {
11165 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11166 hevc->mc_cpu_addr =
11167 dma_alloc_coherent(amports_get_dma_device(),
11168 hevc->swap_size,
11169 &hevc->mc_dma_handle, GFP_KERNEL);
11170 if (!hevc->mc_cpu_addr) {
11171 amhevc_disable();
11172 pr_info("vh265 mmu swap ucode loaded fail.\n");
11173 return -ENOMEM;
11174 }
11175
11176 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11177 hevc->swap_size);
11178
11179 hevc_print(hevc, 0,
11180 "vh265 mmu ucode swap loaded %x\n",
11181 hevc->mc_dma_handle);
11182 }
11183 }
11184#endif
11185
11186#ifdef MULTI_INSTANCE_SUPPORT
11187 if (hevc->m_ins_flag) {
11188 hevc->timer.data = (ulong) hevc;
11189 hevc->timer.function = vh265_check_timer_func;
11190 hevc->timer.expires = jiffies + PUT_INTERVAL;
11191
11192 hevc->fw = fw;
11193
11194 return 0;
11195 }
11196#endif
11197 amhevc_enable();
11198
11199 if (hevc->mmu_enable)
11200 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11201 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11202 else {
11203 if (!hevc->is_4k) {
11204 /* if an older version of the fw was loaded, */
11205 /* needs try to load noswap fw because the */
11206 /* old fw package dose not contain the swap fw. */
11207 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11208 "hevc_mmu_swap", fw->data);
11209 if (ret < 0)
11210 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11211 "h265_mmu", fw->data);
11212 else
11213 hevc->is_swap = true;
11214 } else
11215 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11216 "h265_mmu", fw->data);
11217 }
11218 else
11219 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11220
11221 if (ret < 0) {
11222 amhevc_disable();
11223 vfree(fw);
11224 pr_err("H265: the %s fw loading failed, err: %x\n",
11225 tee_enabled() ? "TEE" : "local", ret);
11226 return -EBUSY;
11227 }
11228
11229 vfree(fw);
11230
11231 hevc->stat |= STAT_MC_LOAD;
11232
11233#ifdef DETREFILL_ENABLE
11234 if (hevc->is_swap &&
11235 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11236 init_detrefill_buf(hevc);
11237#endif
11238 /* enable AMRISC side protocol */
11239 vh265_prot_init(hevc);
11240
11241 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11242 vh265_isr_thread_fn,
11243 IRQF_ONESHOT,/*run thread on this irq disabled*/
11244 "vh265-irq", (void *)hevc)) {
11245 hevc_print(hevc, 0, "vh265 irq register error.\n");
11246 amhevc_disable();
11247 return -ENOENT;
11248 }
11249
11250 hevc->stat |= STAT_ISR_REG;
11251 hevc->provider_name = PROVIDER_NAME;
11252
11253#ifdef MULTI_INSTANCE_SUPPORT
11254 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11255 &vh265_vf_provider, vdec);
11256 vf_reg_provider(&vh265_vf_prov);
11257 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11258 NULL);
11259 if (hevc->frame_dur != 0) {
11260 if (!is_reset) {
11261 vf_notify_receiver(hevc->provider_name,
11262 VFRAME_EVENT_PROVIDER_FR_HINT,
11263 (void *)
11264 ((unsigned long)hevc->frame_dur));
11265 fr_hint_status = VDEC_HINTED;
11266 }
11267 } else
11268 fr_hint_status = VDEC_NEED_HINT;
11269#else
11270 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11271 hevc);
11272 vf_reg_provider(&vh265_vf_prov);
11273 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11274 if (hevc->frame_dur != 0) {
11275 vf_notify_receiver(PROVIDER_NAME,
11276 VFRAME_EVENT_PROVIDER_FR_HINT,
11277 (void *)
11278 ((unsigned long)hevc->frame_dur));
11279 fr_hint_status = VDEC_HINTED;
11280 } else
11281 fr_hint_status = VDEC_NEED_HINT;
11282#endif
11283 hevc->stat |= STAT_VF_HOOK;
11284
11285 hevc->timer.data = (ulong) hevc;
11286 hevc->timer.function = vh265_check_timer_func;
11287 hevc->timer.expires = jiffies + PUT_INTERVAL;
11288
11289 add_timer(&hevc->timer);
11290
11291 hevc->stat |= STAT_TIMER_ARM;
11292
11293 if (use_cma) {
11294#ifdef USE_UNINIT_SEMA
11295 sema_init(&hevc->h265_uninit_done_sema, 0);
11296#endif
11297 if (h265_task == NULL) {
11298 sema_init(&h265_sema, 1);
11299 h265_task =
11300 kthread_run(h265_task_handle, hevc,
11301 "kthread_h265");
11302 }
11303 }
11304 /* hevc->stat |= STAT_KTHREAD; */
11305#if 0
11306 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11307 hevc_print(hevc, 0, "%s force clk\n", __func__);
11308 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11309 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11310 ((1 << 2) | (1 << 1)));
11311 WRITE_VREG(HEVC_DBLK_CFG0,
11312 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11313 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11314 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11315 (1 << 2)); /* 2 */
11316 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11317 (1 << 24)); /* 24 */
11318 WRITE_VREG(HEVC_STREAM_CONTROL,
11319 READ_VREG(HEVC_STREAM_CONTROL) |
11320 (1 << 15)); /* 15 */
11321 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11322 (1 << 13)); /* 13 */
11323 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11324 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11325 (1 << 15)); /* 15 */
11326 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11327 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11328 (1 << 15)); /* 15 */
11329 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11330 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11331 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11332 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11333 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11334 (1 << 3)); /* 3 */
11335 }
11336#endif
11337#ifdef SWAP_HEVC_UCODE
11338 if (!tee_enabled() && hevc->is_swap &&
11339 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11340 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11341 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11342 }
11343#endif
11344
11345#ifndef MULTI_INSTANCE_SUPPORT
11346 set_vdec_func(&vh265_dec_status);
11347#endif
11348 amhevc_start();
11349 hevc->stat |= STAT_VDEC_RUN;
11350 hevc->init_flag = 1;
11351 error_handle_threshold = 30;
11352 /* pr_info("%d, vh265_init, RP=0x%x\n",
11353 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11354 */
11355
11356 return 0;
11357}
11358
11359static int vh265_stop(struct hevc_state_s *hevc)
11360{
11361 if (get_dbg_flag(hevc) &
11362 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11363 int wait_timeout_count = 0;
11364
11365 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11366 HEVC_CODED_SLICE_SEGMENT_DAT &&
11367 wait_timeout_count < 10){
11368 wait_timeout_count++;
11369 msleep(20);
11370 }
11371 }
11372 if (hevc->stat & STAT_VDEC_RUN) {
11373 amhevc_stop();
11374 hevc->stat &= ~STAT_VDEC_RUN;
11375 }
11376
11377 if (hevc->stat & STAT_ISR_REG) {
11378#ifdef MULTI_INSTANCE_SUPPORT
11379 if (!hevc->m_ins_flag)
11380#endif
11381 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11382 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11383 hevc->stat &= ~STAT_ISR_REG;
11384 }
11385
11386 hevc->stat &= ~STAT_TIMER_INIT;
11387 if (hevc->stat & STAT_TIMER_ARM) {
11388 del_timer_sync(&hevc->timer);
11389 hevc->stat &= ~STAT_TIMER_ARM;
11390 }
11391
11392 if (hevc->stat & STAT_VF_HOOK) {
11393 if (fr_hint_status == VDEC_HINTED) {
11394 vf_notify_receiver(hevc->provider_name,
11395 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11396 NULL);
11397 }
11398 fr_hint_status = VDEC_NO_NEED_HINT;
11399 vf_unreg_provider(&vh265_vf_prov);
11400 hevc->stat &= ~STAT_VF_HOOK;
11401 }
11402
11403 hevc_local_uninit(hevc);
11404
11405 if (use_cma) {
11406 hevc->uninit_list = 1;
11407 up(&h265_sema);
11408#ifdef USE_UNINIT_SEMA
11409 down(&hevc->h265_uninit_done_sema);
11410 if (!IS_ERR(h265_task)) {
11411 kthread_stop(h265_task);
11412 h265_task = NULL;
11413 }
11414#else
11415 while (hevc->uninit_list) /* wait uninit complete */
11416 msleep(20);
11417#endif
11418
11419 }
11420 hevc->init_flag = 0;
11421 hevc->first_sc_checked = 0;
11422 cancel_work_sync(&hevc->notify_work);
11423 cancel_work_sync(&hevc->set_clk_work);
11424 uninit_mmu_buffers(hevc);
11425 amhevc_disable();
11426
11427 kfree(gvs);
11428 gvs = NULL;
11429
11430 return 0;
11431}
11432
11433#ifdef MULTI_INSTANCE_SUPPORT
11434static void reset_process_time(struct hevc_state_s *hevc)
11435{
11436 if (hevc->start_process_time) {
11437 unsigned int process_time =
11438 1000 * (jiffies - hevc->start_process_time) / HZ;
11439 hevc->start_process_time = 0;
11440 if (process_time > max_process_time[hevc->index])
11441 max_process_time[hevc->index] = process_time;
11442 }
11443}
11444
11445static void start_process_time(struct hevc_state_s *hevc)
11446{
11447 hevc->start_process_time = jiffies;
11448 hevc->decode_timeout_count = 2;
11449 hevc->last_lcu_idx = 0;
11450}
11451
11452static void restart_process_time(struct hevc_state_s *hevc)
11453{
11454 hevc->start_process_time = jiffies;
11455 hevc->decode_timeout_count = 2;
11456}
11457
11458static void timeout_process(struct hevc_state_s *hevc)
11459{
11460 /*
11461 * In this very timeout point,the vh265_work arrives,
11462 * let it to handle the scenario.
11463 */
11464 if (work_pending(&hevc->work))
11465 return;
11466
11467 hevc->timeout_num++;
11468 amhevc_stop();
11469 read_decode_info(hevc);
11470
11471 hevc_print(hevc,
11472 0, "%s decoder timeout\n", __func__);
11473 check_pic_decoded_error(hevc,
11474 hevc->pic_decoded_lcu_idx);
11475 hevc->decoded_poc = hevc->curr_POC;
11476 hevc->decoding_pic = NULL;
11477 hevc->dec_result = DEC_RESULT_DONE;
11478 reset_process_time(hevc);
11479
11480 if (work_pending(&hevc->work))
11481 return;
11482 vdec_schedule_work(&hevc->timeout_work);
11483}
11484
11485#ifdef CONSTRAIN_MAX_BUF_NUM
11486static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11487{
11488 struct PIC_s *pic;
11489 int i;
11490 int count = 0;
11491 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11492 pic = hevc->m_PIC[i];
11493 if (pic == NULL || pic->index == -1)
11494 continue;
11495 if (pic->output_mark == 0 && pic->referenced == 0
11496 && pic->output_ready == 1)
11497 count++;
11498 }
11499
11500 return count;
11501}
11502
11503static int get_used_buf_count(struct hevc_state_s *hevc)
11504{
11505 struct PIC_s *pic;
11506 int i;
11507 int count = 0;
11508 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11509 pic = hevc->m_PIC[i];
11510 if (pic == NULL || pic->index == -1)
11511 continue;
11512 if (pic->output_mark != 0 || pic->referenced != 0
11513 || pic->output_ready != 0)
11514 count++;
11515 }
11516
11517 return count;
11518}
11519#endif
11520
11521
11522static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11523{
11524 struct PIC_s *new_pic = NULL;
11525 struct PIC_s *pic;
11526 /* recycle un-used pic */
11527 int i;
11528 int ref_pic = 0;
11529 struct vdec_s *vdec = hw_to_vdec(hevc);
11530 /*return 1 if pic_list is not initialized yet*/
11531 if (hevc->pic_list_init_flag != 3)
11532 return 1;
11533
11534 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11535 pic = hevc->m_PIC[i];
11536 if (pic == NULL || pic->index == -1)
11537 continue;
11538 if (pic->referenced == 1)
11539 ref_pic++;
11540 if (pic->output_mark == 0 && pic->referenced == 0
11541 && pic->output_ready == 0
11542 ) {
11543 if (new_pic) {
11544 if (pic->POC < new_pic->POC)
11545 new_pic = pic;
11546 } else
11547 new_pic = pic;
11548 }
11549 }
11550/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11551/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11552/*decode. all reference frames are removed and setting error flag.*/
11553/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11554/*when file is interlace.*/
11555 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11556 (ref_pic >=
11557 get_work_pic_num(hevc) -
11558 hevc->sps_num_reorder_pics_0 - 3)) {
11559 enum receviver_start_e state = RECEIVER_INACTIVE;
11560 if (vf_get_receiver(vdec->vf_provider_name)) {
11561 state =
11562 vf_notify_receiver(vdec->vf_provider_name,
11563 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11564 NULL);
11565 if ((state == RECEIVER_STATE_NULL)
11566 || (state == RECEIVER_STATE_NONE))
11567 state = RECEIVER_INACTIVE;
11568 }
11569 if (state == RECEIVER_INACTIVE) {
11570 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11571 pic = hevc->m_PIC[i];
11572 if (pic == NULL || pic->index == -1)
11573 continue;
11574
11575 if ((pic->referenced == 1) &&
11576 (pic->error_mark == 1)) {
11577 pic->referenced = 0;
11578 put_mv_buf(hevc, pic);
11579 }
11580 pic->error_mark = 1;
11581 }
11582 }
11583 }
11584
11585 return (new_pic != NULL) ? 1 : 0;
11586}
11587
11588static int vmh265_stop(struct hevc_state_s *hevc)
11589{
11590 if (hevc->stat & STAT_TIMER_ARM) {
11591 del_timer_sync(&hevc->timer);
11592 hevc->stat &= ~STAT_TIMER_ARM;
11593 }
11594 if (hevc->stat & STAT_VDEC_RUN) {
11595 amhevc_stop();
11596 hevc->stat &= ~STAT_VDEC_RUN;
11597 }
11598 if (hevc->stat & STAT_ISR_REG) {
11599 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11600 hevc->stat &= ~STAT_ISR_REG;
11601 }
11602
11603 if (hevc->stat & STAT_VF_HOOK) {
11604 if (fr_hint_status == VDEC_HINTED)
11605 vf_notify_receiver(hevc->provider_name,
11606 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11607 NULL);
11608 fr_hint_status = VDEC_NO_NEED_HINT;
11609 vf_unreg_provider(&vh265_vf_prov);
11610 hevc->stat &= ~STAT_VF_HOOK;
11611 }
11612
11613 hevc_local_uninit(hevc);
11614
11615 hevc->init_flag = 0;
11616 hevc->first_sc_checked = 0;
11617 cancel_work_sync(&hevc->notify_work);
11618 cancel_work_sync(&hevc->set_clk_work);
11619 cancel_work_sync(&hevc->timeout_work);
11620
11621 uninit_mmu_buffers(hevc);
11622
11623 if (use_cma) {
11624 hevc->uninit_list = 1;
11625 reset_process_time(hevc);
11626 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11627 vdec_schedule_work(&hevc->work);
11628 flush_work(&hevc->work);
11629#ifdef USE_UNINIT_SEMA
11630 if (hevc->init_flag) {
11631 down(&hevc->h265_uninit_done_sema);
11632 }
11633#else
11634 while (hevc->uninit_list) /* wait uninit complete */
11635 msleep(20);
11636#endif
11637 }
11638 cancel_work_sync(&hevc->work);
11639
11640 vfree(hevc->fw);
11641 hevc->fw = NULL;
11642
11643 dump_log(hevc);
11644 return 0;
11645}
11646
11647static unsigned char get_data_check_sum
11648 (struct hevc_state_s *hevc, int size)
11649{
11650 int jj;
11651 int sum = 0;
11652 u8 *data = NULL;
11653
11654 if (!hevc->chunk->block->is_mapped)
11655 data = codec_mm_vmap(hevc->chunk->block->start +
11656 hevc->chunk->offset, size);
11657 else
11658 data = ((u8 *)hevc->chunk->block->start_virt) +
11659 hevc->chunk->offset;
11660
11661 for (jj = 0; jj < size; jj++)
11662 sum += data[jj];
11663
11664 if (!hevc->chunk->block->is_mapped)
11665 codec_mm_unmap_phyaddr(data);
11666 return sum;
11667}
11668
11669static void vh265_notify_work(struct work_struct *work)
11670{
11671 struct hevc_state_s *hevc =
11672 container_of(work,
11673 struct hevc_state_s,
11674 notify_work);
11675 struct vdec_s *vdec = hw_to_vdec(hevc);
11676#ifdef MULTI_INSTANCE_SUPPORT
11677 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11678 vf_notify_receiver(hevc->provider_name,
11679 VFRAME_EVENT_PROVIDER_FR_HINT,
11680 (void *)
11681 ((unsigned long)hevc->frame_dur));
11682 vdec->fr_hint_state = VDEC_HINTED;
11683 } else if (fr_hint_status == VDEC_NEED_HINT) {
11684 vf_notify_receiver(hevc->provider_name,
11685 VFRAME_EVENT_PROVIDER_FR_HINT,
11686 (void *)
11687 ((unsigned long)hevc->frame_dur));
11688 fr_hint_status = VDEC_HINTED;
11689 }
11690#else
11691 if (fr_hint_status == VDEC_NEED_HINT)
11692 vf_notify_receiver(PROVIDER_NAME,
11693 VFRAME_EVENT_PROVIDER_FR_HINT,
11694 (void *)
11695 ((unsigned long)hevc->frame_dur));
11696 fr_hint_status = VDEC_HINTED;
11697 }
11698#endif
11699
11700 return;
11701}
11702
11703static void vh265_work_implement(struct hevc_state_s *hevc,
11704 struct vdec_s *vdec,int from)
11705{
11706 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11707 /*USE_BUF_BLOCK*/
11708 uninit_pic_list(hevc);
11709 hevc_print(hevc, 0, "uninit list\n");
11710 hevc->uninit_list = 0;
11711#ifdef USE_UNINIT_SEMA
11712 up(&hevc->h265_uninit_done_sema);
11713#endif
11714 return;
11715 }
11716
11717 /* finished decoding one frame or error,
11718 * notify vdec core to switch context
11719 */
11720 if (hevc->pic_list_init_flag == 1
11721 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11722 hevc->pic_list_init_flag = 2;
11723 init_pic_list(hevc);
11724 init_pic_list_hw(hevc);
11725 init_buf_spec(hevc);
11726 hevc_print(hevc, 0,
11727 "set pic_list_init_flag to 2\n");
11728
11729 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11730 return;
11731 }
11732
11733 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11734 "%s dec_result %d %x %x %x\n",
11735 __func__,
11736 hevc->dec_result,
11737 READ_VREG(HEVC_STREAM_LEVEL),
11738 READ_VREG(HEVC_STREAM_WR_PTR),
11739 READ_VREG(HEVC_STREAM_RD_PTR));
11740
11741 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11742 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11743 && (hw_to_vdec(hevc)->next_status !=
11744 VDEC_STATUS_DISCONNECTED)) {
11745 if (!vdec_has_more_input(vdec)) {
11746 hevc->dec_result = DEC_RESULT_EOS;
11747 vdec_schedule_work(&hevc->work);
11748 return;
11749 }
11750 if (!input_frame_based(vdec)) {
11751 int r = vdec_sync_input(vdec);
11752 if (r >= 0x200) {
11753 WRITE_VREG(HEVC_DECODE_SIZE,
11754 READ_VREG(HEVC_DECODE_SIZE) + r);
11755
11756 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11757 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11758 __func__,
11759 READ_VREG(HEVC_STREAM_LEVEL),
11760 READ_VREG(HEVC_STREAM_WR_PTR),
11761 READ_VREG(HEVC_STREAM_RD_PTR),
11762 READ_VREG(HEVC_MPC_E), r);
11763
11764 start_process_time(hevc);
11765 if (READ_VREG(HEVC_DEC_STATUS_REG)
11766 == HEVC_DECODE_BUFEMPTY2)
11767 WRITE_VREG(HEVC_DEC_STATUS_REG,
11768 HEVC_ACTION_DONE);
11769 else
11770 WRITE_VREG(HEVC_DEC_STATUS_REG,
11771 HEVC_ACTION_DEC_CONT);
11772 } else {
11773 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11774 vdec_schedule_work(&hevc->work);
11775 }
11776 return;
11777 }
11778
11779 /*below for frame_base*/
11780 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11781 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11782 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11783 __func__,
11784 READ_VREG(HEVC_STREAM_LEVEL),
11785 READ_VREG(HEVC_STREAM_WR_PTR),
11786 READ_VREG(HEVC_STREAM_RD_PTR),
11787 READ_VREG(HEVC_MPC_E));
11788 mutex_lock(&hevc->chunks_mutex);
11789 vdec_vframe_dirty(vdec, hevc->chunk);
11790 hevc->chunk = NULL;
11791 mutex_unlock(&hevc->chunks_mutex);
11792 vdec_clean_input(vdec);
11793 }
11794
11795 /*if (is_new_pic_available(hevc)) {*/
11796 if (run_ready(vdec, VDEC_HEVC)) {
11797 int r;
11798 int decode_size;
11799 r = vdec_prepare_input(vdec, &hevc->chunk);
11800 if (r < 0) {
11801 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11802
11803 hevc_print(hevc,
11804 PRINT_FLAG_VDEC_DETAIL,
11805 "amvdec_vh265: Insufficient data\n");
11806
11807 vdec_schedule_work(&hevc->work);
11808 return;
11809 }
11810 hevc->dec_result = DEC_RESULT_NONE;
11811 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11812 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11813 __func__, r,
11814 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11815 get_data_check_sum(hevc, r) : 0,
11816 READ_VREG(HEVC_MPC_E));
11817
11818 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11819 int jj;
11820 u8 *data = NULL;
11821
11822 if (!hevc->chunk->block->is_mapped)
11823 data = codec_mm_vmap(
11824 hevc->chunk->block->start +
11825 hevc->chunk->offset, r);
11826 else
11827 data = ((u8 *)
11828 hevc->chunk->block->start_virt)
11829 + hevc->chunk->offset;
11830
11831 for (jj = 0; jj < r; jj++) {
11832 if ((jj & 0xf) == 0)
11833 hevc_print(hevc,
11834 PRINT_FRAMEBASE_DATA,
11835 "%06x:", jj);
11836 hevc_print_cont(hevc,
11837 PRINT_FRAMEBASE_DATA,
11838 "%02x ", data[jj]);
11839 if (((jj + 1) & 0xf) == 0)
11840 hevc_print_cont(hevc,
11841 PRINT_FRAMEBASE_DATA,
11842 "\n");
11843 }
11844
11845 if (!hevc->chunk->block->is_mapped)
11846 codec_mm_unmap_phyaddr(data);
11847 }
11848
11849 decode_size = hevc->chunk->size +
11850 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11851 WRITE_VREG(HEVC_DECODE_SIZE,
11852 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11853
11854 vdec_enable_input(vdec);
11855
11856 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11857 "%s: mpc %x\n",
11858 __func__, READ_VREG(HEVC_MPC_E));
11859
11860 start_process_time(hevc);
11861 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11862 } else{
11863 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11864
11865 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11866 * "amvdec_vh265: Insufficient data\n");
11867 */
11868
11869 vdec_schedule_work(&hevc->work);
11870 }
11871 return;
11872 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11873 /* if (!hevc->ctx_valid)
11874 hevc->ctx_valid = 1; */
11875 decode_frame_count[hevc->index]++;
11876#ifdef DETREFILL_ENABLE
11877 if (hevc->is_swap &&
11878 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11879 if (hevc->delrefill_check == 2) {
11880 delrefill(hevc);
11881 amhevc_stop();
11882 }
11883 }
11884#endif
11885 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11886 hevc->used_4k_num =
11887 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11888 if (hevc->used_4k_num >= 0 &&
11889 hevc->cur_pic &&
11890 hevc->cur_pic->scatter_alloc
11891 == 1) {
11892 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11893 "%s pic index %d scatter_alloc %d page_start %d\n",
11894 "decoder_mmu_box_free_idx_tail",
11895 hevc->cur_pic->index,
11896 hevc->cur_pic->scatter_alloc,
11897 hevc->used_4k_num);
11898 if (hevc->m_ins_flag)
11899 hevc_mmu_dma_check(hw_to_vdec(hevc));
11900 decoder_mmu_box_free_idx_tail(
11901 hevc->mmu_box,
11902 hevc->cur_pic->index,
11903 hevc->used_4k_num);
11904 hevc->cur_pic->scatter_alloc = 2;
11905 }
11906 }
11907 hevc->pic_decoded_lcu_idx =
11908 READ_VREG(HEVC_PARSER_LCU_START)
11909 & 0xffffff;
11910
11911 if (vdec->master == NULL && vdec->slave == NULL &&
11912 hevc->empty_flag == 0) {
11913 hevc->over_decode =
11914 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11915 if (hevc->over_decode)
11916 hevc_print(hevc, 0,
11917 "!!!Over decode\n");
11918 }
11919
11920 if (is_log_enable(hevc))
11921 add_log(hevc,
11922 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11923 __func__,
11924 hevc->dec_result,
11925 hevc->pic_decoded_lcu_idx,
11926 hevc->used_4k_num,
11927 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11928 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11929 hevc->start_shift_bytes
11930 );
11931
11932 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11933 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11934 __func__,
11935 hevc->dec_result,
11936 READ_VREG(HEVC_STREAM_LEVEL),
11937 READ_VREG(HEVC_STREAM_WR_PTR),
11938 READ_VREG(HEVC_STREAM_RD_PTR),
11939 hevc->pic_decoded_lcu_idx,
11940 hevc->used_4k_num,
11941 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11942 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11943 hevc->start_shift_bytes
11944 );
11945
11946 hevc->used_4k_num = -1;
11947
11948 check_pic_decoded_error(hevc,
11949 hevc->pic_decoded_lcu_idx);
11950#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11951#if 1
11952 if (vdec->slave) {
11953 if (dv_debug & 0x1)
11954 vdec_set_flag(vdec->slave,
11955 VDEC_FLAG_SELF_INPUT_CONTEXT);
11956 else
11957 vdec_set_flag(vdec->slave,
11958 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11959 }
11960#else
11961 if (vdec->slave) {
11962 if (no_interleaved_el_slice)
11963 vdec_set_flag(vdec->slave,
11964 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11965 /* this will move real HW pointer for input */
11966 else
11967 vdec_set_flag(vdec->slave, 0);
11968 /* this will not move real HW pointer
11969 *and SL layer decoding
11970 *will start from same stream position
11971 *as current BL decoder
11972 */
11973 }
11974#endif
11975#endif
11976#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11977 hevc->shift_byte_count_lo
11978 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11979 if (vdec->slave) {
11980 /*cur is base, found enhance*/
11981 struct hevc_state_s *hevc_el =
11982 (struct hevc_state_s *)
11983 vdec->slave->private;
11984 if (hevc_el)
11985 hevc_el->shift_byte_count_lo =
11986 hevc->shift_byte_count_lo;
11987 } else if (vdec->master) {
11988 /*cur is enhance, found base*/
11989 struct hevc_state_s *hevc_ba =
11990 (struct hevc_state_s *)
11991 vdec->master->private;
11992 if (hevc_ba)
11993 hevc_ba->shift_byte_count_lo =
11994 hevc->shift_byte_count_lo;
11995 }
11996#endif
11997 mutex_lock(&hevc->chunks_mutex);
11998 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11999 hevc->chunk = NULL;
12000 mutex_unlock(&hevc->chunks_mutex);
12001 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12002 /*
12003 stream base: stream buf empty or timeout
12004 frame base: vdec_prepare_input fail
12005 */
12006 if (!vdec_has_more_input(vdec)) {
12007 hevc->dec_result = DEC_RESULT_EOS;
12008 vdec_schedule_work(&hevc->work);
12009 return;
12010 }
12011#ifdef AGAIN_HAS_THRESHOLD
12012 hevc->next_again_flag = 1;
12013#endif
12014 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12015 struct PIC_s *pic;
12016 hevc->eos = 1;
12017#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12018 if ((vdec->master || vdec->slave) &&
12019 aux_data_is_avaible(hevc))
12020 dolby_get_meta(hevc);
12021#endif
12022 check_pic_decoded_error(hevc,
12023 hevc->pic_decoded_lcu_idx);
12024 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12025 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12026 "%s: end of stream, last dec poc %d => 0x%pf\n",
12027 __func__, hevc->curr_POC, pic);
12028 flush_output(hevc, pic);
12029
12030 if (hevc->is_used_v4l)
12031 notify_v4l_eos(hw_to_vdec(hevc));
12032#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12033 hevc->shift_byte_count_lo
12034 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12035 if (vdec->slave) {
12036 /*cur is base, found enhance*/
12037 struct hevc_state_s *hevc_el =
12038 (struct hevc_state_s *)
12039 vdec->slave->private;
12040 if (hevc_el)
12041 hevc_el->shift_byte_count_lo =
12042 hevc->shift_byte_count_lo;
12043 } else if (vdec->master) {
12044 /*cur is enhance, found base*/
12045 struct hevc_state_s *hevc_ba =
12046 (struct hevc_state_s *)
12047 vdec->master->private;
12048 if (hevc_ba)
12049 hevc_ba->shift_byte_count_lo =
12050 hevc->shift_byte_count_lo;
12051 }
12052#endif
12053 mutex_lock(&hevc->chunks_mutex);
12054 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12055 hevc->chunk = NULL;
12056 mutex_unlock(&hevc->chunks_mutex);
12057 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12058 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12059 "%s: force exit\n",
12060 __func__);
12061 if (hevc->stat & STAT_VDEC_RUN) {
12062 amhevc_stop();
12063 hevc->stat &= ~STAT_VDEC_RUN;
12064 }
12065 if (hevc->stat & STAT_ISR_REG) {
12066 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12067 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12068 hevc->stat &= ~STAT_ISR_REG;
12069 }
12070 hevc_print(hevc, 0, "%s: force exit end\n",
12071 __func__);
12072 }
12073
12074 if (hevc->stat & STAT_VDEC_RUN) {
12075 amhevc_stop();
12076 hevc->stat &= ~STAT_VDEC_RUN;
12077 }
12078
12079 if (hevc->stat & STAT_TIMER_ARM) {
12080 del_timer_sync(&hevc->timer);
12081 hevc->stat &= ~STAT_TIMER_ARM;
12082 }
12083
12084 wait_hevc_search_done(hevc);
12085#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12086 if (hevc->switch_dvlayer_flag) {
12087 if (vdec->slave)
12088 vdec_set_next_sched(vdec, vdec->slave);
12089 else if (vdec->master)
12090 vdec_set_next_sched(vdec, vdec->master);
12091 } else if (vdec->slave || vdec->master)
12092 vdec_set_next_sched(vdec, vdec);
12093#endif
12094
12095 if (from == 1) {
12096 /* This is a timeout work */
12097 if (work_pending(&hevc->work)) {
12098 /*
12099 * The vh265_work arrives at the last second,
12100 * give it a chance to handle the scenario.
12101 */
12102 return;
12103 //cancel_work_sync(&hevc->work);//reserved for future considraion
12104 }
12105 }
12106
12107 /* mark itself has all HW resource released and input released */
12108 if (vdec->parallel_dec == 1)
12109 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12110 else
12111 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12112
12113 if (hevc->is_used_v4l) {
12114 struct aml_vcodec_ctx *ctx =
12115 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12116
12117 if (ctx->param_sets_from_ucode &&
12118 !hevc->v4l_params_parsed)
12119 vdec_v4l_write_frame_sync(ctx);
12120 }
12121
12122 if (hevc->vdec_cb)
12123 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12124}
12125
12126static void vh265_work(struct work_struct *work)
12127{
12128 struct hevc_state_s *hevc = container_of(work,
12129 struct hevc_state_s, work);
12130 struct vdec_s *vdec = hw_to_vdec(hevc);
12131
12132 vh265_work_implement(hevc, vdec, 0);
12133}
12134
12135static void vh265_timeout_work(struct work_struct *work)
12136{
12137 struct hevc_state_s *hevc = container_of(work,
12138 struct hevc_state_s, timeout_work);
12139 struct vdec_s *vdec = hw_to_vdec(hevc);
12140
12141 if (work_pending(&hevc->work))
12142 return;
12143 vh265_work_implement(hevc, vdec, 1);
12144}
12145
12146
12147static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12148{
12149 /* new to do ... */
12150 vh265_prot_init(hevc);
12151 return 0;
12152}
12153static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12154{
12155 struct hevc_state_s *hevc =
12156 (struct hevc_state_s *)vdec->private;
12157 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12158 CODEC_MM_FLAGS_TVP : 0;
12159 bool ret = 0;
12160 if (step == 0x12)
12161 return 0;
12162 else if (step == 0x11)
12163 step = 0x12;
12164
12165 if (hevc->eos)
12166 return 0;
12167 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12168 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12169 hevc->first_sc_checked =1;
12170 hevc_print(hevc, 0,
12171 "vh265 cached=%d need_size=%d speed= %d ms\n",
12172 size, (hevc->need_cache_size >> PAGE_SHIFT),
12173 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12174 }
12175 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12176 && pre_decode_buf_level != 0) {
12177 u32 rp, wp, level;
12178
12179 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12180 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12181 if (wp < rp)
12182 level = vdec->input.size + wp - rp;
12183 else
12184 level = wp - rp;
12185
12186 if (level < pre_decode_buf_level)
12187 return 0;
12188 }
12189
12190#ifdef AGAIN_HAS_THRESHOLD
12191 if (hevc->next_again_flag &&
12192 (!vdec_frame_based(vdec))) {
12193 u32 parser_wr_ptr =
12194 READ_PARSER_REG(PARSER_VIDEO_WP);
12195 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12196 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12197 again_threshold) {
12198 int r = vdec_sync_input(vdec);
12199 hevc_print(hevc,
12200 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12201 return 0;
12202 }
12203 }
12204#endif
12205
12206 if (disp_vframe_valve_level &&
12207 kfifo_len(&hevc->display_q) >=
12208 disp_vframe_valve_level) {
12209 hevc->valve_count--;
12210 if (hevc->valve_count <= 0)
12211 hevc->valve_count = 2;
12212 else
12213 return 0;
12214 }
12215
12216 ret = is_new_pic_available(hevc);
12217 if (!ret) {
12218 hevc_print(hevc,
12219 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12220 __func__, ret);
12221 }
12222
12223#ifdef CONSTRAIN_MAX_BUF_NUM
12224 if (hevc->pic_list_init_flag == 3) {
12225 if (run_ready_max_vf_only_num > 0 &&
12226 get_vf_ref_only_buf_count(hevc) >=
12227 run_ready_max_vf_only_num
12228 )
12229 ret = 0;
12230 if (run_ready_display_q_num > 0 &&
12231 kfifo_len(&hevc->display_q) >=
12232 run_ready_display_q_num)
12233 ret = 0;
12234
12235 /*avoid more buffers consumed when
12236 switching resolution*/
12237 if (run_ready_max_buf_num == 0xff &&
12238 get_used_buf_count(hevc) >=
12239 get_work_pic_num(hevc))
12240 ret = 0;
12241 else if (run_ready_max_buf_num &&
12242 get_used_buf_count(hevc) >=
12243 run_ready_max_buf_num)
12244 ret = 0;
12245 }
12246#endif
12247
12248 if (hevc->is_used_v4l) {
12249 struct aml_vcodec_ctx *ctx =
12250 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12251
12252 if (ctx->param_sets_from_ucode &&
12253 !ctx->v4l_codec_ready &&
12254 hevc->v4l_params_parsed) {
12255 ret = 0; /*the params has parsed.*/
12256 } else if (!ctx->v4l_codec_dpb_ready) {
12257 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12258 run_ready_min_buf_num)
12259 ret = 0;
12260 }
12261 }
12262
12263 if (ret)
12264 not_run_ready[hevc->index] = 0;
12265 else
12266 not_run_ready[hevc->index]++;
12267 if (vdec->parallel_dec == 1)
12268 return ret ? (CORE_MASK_HEVC) : 0;
12269 else
12270 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12271}
12272
12273static void run(struct vdec_s *vdec, unsigned long mask,
12274 void (*callback)(struct vdec_s *, void *), void *arg)
12275{
12276 struct hevc_state_s *hevc =
12277 (struct hevc_state_s *)vdec->private;
12278 int r, loadr = 0;
12279 unsigned char check_sum = 0;
12280
12281 run_count[hevc->index]++;
12282 hevc->vdec_cb_arg = arg;
12283 hevc->vdec_cb = callback;
12284 hevc->aux_data_dirty = 1;
12285 hevc_reset_core(vdec);
12286
12287#ifdef AGAIN_HAS_THRESHOLD
12288 hevc->pre_parser_wr_ptr =
12289 READ_PARSER_REG(PARSER_VIDEO_WP);
12290 hevc->next_again_flag = 0;
12291#endif
12292 r = vdec_prepare_input(vdec, &hevc->chunk);
12293 if (r < 0) {
12294 input_empty[hevc->index]++;
12295 hevc->dec_result = DEC_RESULT_AGAIN;
12296 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12297 "ammvdec_vh265: Insufficient data\n");
12298
12299 vdec_schedule_work(&hevc->work);
12300 return;
12301 }
12302 input_empty[hevc->index] = 0;
12303 hevc->dec_result = DEC_RESULT_NONE;
12304 if (vdec_frame_based(vdec) &&
12305 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12306 || is_log_enable(hevc)))
12307 check_sum = get_data_check_sum(hevc, r);
12308
12309 if (is_log_enable(hevc))
12310 add_log(hevc,
12311 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12312 __func__, r,
12313 check_sum,
12314 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12315 );
12316 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12317 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12318 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12319 __func__, r,
12320 check_sum,
12321 READ_VREG(HEVC_STREAM_LEVEL),
12322 READ_VREG(HEVC_STREAM_WR_PTR),
12323 READ_VREG(HEVC_STREAM_RD_PTR),
12324 READ_PARSER_REG(PARSER_VIDEO_RP),
12325 READ_PARSER_REG(PARSER_VIDEO_WP),
12326 hevc->start_shift_bytes
12327 );
12328 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12329 input_frame_based(vdec)) {
12330 int jj;
12331 u8 *data = NULL;
12332
12333 if (!hevc->chunk->block->is_mapped)
12334 data = codec_mm_vmap(hevc->chunk->block->start +
12335 hevc->chunk->offset, r);
12336 else
12337 data = ((u8 *)hevc->chunk->block->start_virt)
12338 + hevc->chunk->offset;
12339
12340 for (jj = 0; jj < r; jj++) {
12341 if ((jj & 0xf) == 0)
12342 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12343 "%06x:", jj);
12344 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12345 "%02x ", data[jj]);
12346 if (((jj + 1) & 0xf) == 0)
12347 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12348 "\n");
12349 }
12350
12351 if (!hevc->chunk->block->is_mapped)
12352 codec_mm_unmap_phyaddr(data);
12353 }
12354 if (vdec->mc_loaded) {
12355 /*firmware have load before,
12356 and not changes to another.
12357 ignore reload.
12358 */
12359 if (tee_enabled() && hevc->is_swap &&
12360 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12361 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12362 } else {
12363 if (hevc->mmu_enable)
12364 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12365 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12366 "h265_mmu", hevc->fw->data);
12367 else {
12368 if (!hevc->is_4k) {
12369 /* if an older version of the fw was loaded, */
12370 /* needs try to load noswap fw because the */
12371 /* old fw package dose not contain the swap fw.*/
12372 loadr = amhevc_vdec_loadmc_ex(
12373 VFORMAT_HEVC, vdec,
12374 "hevc_mmu_swap",
12375 hevc->fw->data);
12376 if (loadr < 0)
12377 loadr = amhevc_vdec_loadmc_ex(
12378 VFORMAT_HEVC, vdec,
12379 "h265_mmu",
12380 hevc->fw->data);
12381 else
12382 hevc->is_swap = true;
12383 } else
12384 loadr = amhevc_vdec_loadmc_ex(
12385 VFORMAT_HEVC, vdec,
12386 "h265_mmu", hevc->fw->data);
12387 }
12388 else
12389 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12390 NULL, hevc->fw->data);
12391 if (loadr < 0) {
12392 amhevc_disable();
12393 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12394 tee_enabled() ? "TEE" : "local", loadr);
12395 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12396 vdec_schedule_work(&hevc->work);
12397 return;
12398 }
12399
12400 if (tee_enabled() && hevc->is_swap &&
12401 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12402 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12403#ifdef DETREFILL_ENABLE
12404 if (hevc->is_swap &&
12405 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12406 init_detrefill_buf(hevc);
12407#endif
12408 vdec->mc_loaded = 1;
12409 vdec->mc_type = VFORMAT_HEVC;
12410 }
12411 if (vh265_hw_ctx_restore(hevc) < 0) {
12412 vdec_schedule_work(&hevc->work);
12413 return;
12414 }
12415 vdec_enable_input(vdec);
12416
12417 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12418
12419 if (vdec_frame_based(vdec)) {
12420 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12421 r = hevc->chunk->size +
12422 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12423 hevc->decode_size = r;
12424 }
12425#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12426 else {
12427 if (vdec->master || vdec->slave)
12428 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12429 hevc->shift_byte_count_lo);
12430 }
12431#endif
12432 WRITE_VREG(HEVC_DECODE_SIZE, r);
12433 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12434 hevc->init_flag = 1;
12435
12436 if (hevc->pic_list_init_flag == 3)
12437 init_pic_list_hw(hevc);
12438
12439 backup_decode_state(hevc);
12440
12441 start_process_time(hevc);
12442 mod_timer(&hevc->timer, jiffies);
12443 hevc->stat |= STAT_TIMER_ARM;
12444 hevc->stat |= STAT_ISR_REG;
12445 amhevc_start();
12446 hevc->stat |= STAT_VDEC_RUN;
12447}
12448
12449static void aml_free_canvas(struct vdec_s *vdec)
12450{
12451 int i;
12452 struct hevc_state_s *hevc =
12453 (struct hevc_state_s *)vdec->private;
12454
12455 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12456 struct PIC_s *pic = hevc->m_PIC[i];
12457
12458 if (pic) {
12459 if (vdec->parallel_dec == 1) {
12460 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12461 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12462 }
12463 }
12464 }
12465}
12466
12467static void reset(struct vdec_s *vdec)
12468{
12469 struct hevc_state_s *hevc =
12470 (struct hevc_state_s *)vdec->private;
12471 int i;
12472
12473 cancel_work_sync(&hevc->work);
12474 cancel_work_sync(&hevc->notify_work);
12475 if (hevc->stat & STAT_VDEC_RUN) {
12476 amhevc_stop();
12477 hevc->stat &= ~STAT_VDEC_RUN;
12478 }
12479
12480 if (hevc->stat & STAT_TIMER_ARM) {
12481 del_timer_sync(&hevc->timer);
12482 hevc->stat &= ~STAT_TIMER_ARM;
12483 }
12484 hevc->dec_result = DEC_RESULT_NONE;
12485 reset_process_time(hevc);
12486 hevc->init_flag = 0;
12487 hevc->pic_list_init_flag = 0;
12488 dealloc_mv_bufs(hevc);
12489 aml_free_canvas(vdec);
12490 hevc_local_uninit(hevc);
12491 if (vh265_local_init(hevc) < 0)
12492 pr_debug(" %s local init fail\n", __func__);
12493 for (i = 0; i < BUF_POOL_SIZE; i++) {
12494 hevc->m_BUF[i].start_adr = 0;
12495 }
12496
12497 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12498}
12499
12500static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12501{
12502 struct hevc_state_s *hevc =
12503 (struct hevc_state_s *)vdec->private;
12504
12505 return vh265_isr(0, hevc);
12506}
12507
12508static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12509{
12510 struct hevc_state_s *hevc =
12511 (struct hevc_state_s *)vdec->private;
12512
12513 return vh265_isr_thread_fn(0, hevc);
12514}
12515#endif
12516
12517static int amvdec_h265_probe(struct platform_device *pdev)
12518{
12519#ifdef MULTI_INSTANCE_SUPPORT
12520 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12521#else
12522 struct vdec_dev_reg_s *pdata =
12523 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12524#endif
12525 char *tmpbuf;
12526 int ret;
12527 struct hevc_state_s *hevc;
12528
12529 hevc = vmalloc(sizeof(struct hevc_state_s));
12530 if (hevc == NULL) {
12531 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12532 return -ENOMEM;
12533 }
12534 gHevc = hevc;
12535 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12536 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12537 H265_DEBUG_DIS_SYS_ERROR_PROC));
12538 memset(hevc, 0, sizeof(struct hevc_state_s));
12539 if (get_dbg_flag(hevc))
12540 hevc_print(hevc, 0, "%s\r\n", __func__);
12541 mutex_lock(&vh265_mutex);
12542
12543 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12544 (parser_sei_enable & 0x100) == 0)
12545 parser_sei_enable = 7; /*old 1*/
12546 hevc->m_ins_flag = 0;
12547 hevc->init_flag = 0;
12548 hevc->first_sc_checked = 0;
12549 hevc->uninit_list = 0;
12550 hevc->fatal_error = 0;
12551 hevc->show_frame_num = 0;
12552 hevc->frameinfo_enable = 1;
12553#ifdef MULTI_INSTANCE_SUPPORT
12554 hevc->platform_dev = pdev;
12555 platform_set_drvdata(pdev, pdata);
12556#endif
12557
12558 if (pdata == NULL) {
12559 hevc_print(hevc, 0,
12560 "\namvdec_h265 memory resource undefined.\n");
12561 vfree(hevc);
12562 mutex_unlock(&vh265_mutex);
12563 return -EFAULT;
12564 }
12565 if (mmu_enable_force == 0) {
12566 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12567 || double_write_mode == 0x10)
12568 hevc->mmu_enable = 0;
12569 else
12570 hevc->mmu_enable = 1;
12571 }
12572 if (init_mmu_buffers(hevc)) {
12573 hevc_print(hevc, 0,
12574 "\n 265 mmu init failed!\n");
12575 vfree(hevc);
12576 mutex_unlock(&vh265_mutex);
12577 return -EFAULT;
12578 }
12579
12580 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12581 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12582 if (ret < 0) {
12583 uninit_mmu_buffers(hevc);
12584 vfree(hevc);
12585 mutex_unlock(&vh265_mutex);
12586 return ret;
12587 }
12588 hevc->buf_size = work_buf_size;
12589
12590
12591 if (!vdec_secure(pdata)) {
12592 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12593 if (tmpbuf) {
12594 memset(tmpbuf, 0, work_buf_size);
12595 dma_sync_single_for_device(amports_get_dma_device(),
12596 hevc->buf_start,
12597 work_buf_size, DMA_TO_DEVICE);
12598 } else {
12599 tmpbuf = codec_mm_vmap(hevc->buf_start,
12600 work_buf_size);
12601 if (tmpbuf) {
12602 memset(tmpbuf, 0, work_buf_size);
12603 dma_sync_single_for_device(
12604 amports_get_dma_device(),
12605 hevc->buf_start,
12606 work_buf_size,
12607 DMA_TO_DEVICE);
12608 codec_mm_unmap_phyaddr(tmpbuf);
12609 }
12610 }
12611 }
12612
12613 if (get_dbg_flag(hevc)) {
12614 hevc_print(hevc, 0,
12615 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12616 hevc->buf_start, hevc->buf_size);
12617 }
12618
12619 if (pdata->sys_info)
12620 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12621 else {
12622 hevc->vh265_amstream_dec_info.width = 0;
12623 hevc->vh265_amstream_dec_info.height = 0;
12624 hevc->vh265_amstream_dec_info.rate = 30;
12625 }
12626#ifndef MULTI_INSTANCE_SUPPORT
12627 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12628 workaround_enable |= 3;
12629 hevc_print(hevc, 0,
12630 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12631 } else
12632 workaround_enable &= ~3;
12633#endif
12634 hevc->cma_dev = pdata->cma_dev;
12635 vh265_vdec_info_init();
12636
12637#ifdef MULTI_INSTANCE_SUPPORT
12638 pdata->private = hevc;
12639 pdata->dec_status = vh265_dec_status;
12640 pdata->set_isreset = vh265_set_isreset;
12641 is_reset = 0;
12642 if (vh265_init(pdata) < 0) {
12643#else
12644 if (vh265_init(hevc) < 0) {
12645#endif
12646 hevc_print(hevc, 0,
12647 "\namvdec_h265 init failed.\n");
12648 hevc_local_uninit(hevc);
12649 uninit_mmu_buffers(hevc);
12650 vfree(hevc);
12651 pdata->dec_status = NULL;
12652 mutex_unlock(&vh265_mutex);
12653 return -ENODEV;
12654 }
12655 /*set the max clk for smooth playing...*/
12656 hevc_source_changed(VFORMAT_HEVC,
12657 3840, 2160, 60);
12658 mutex_unlock(&vh265_mutex);
12659
12660 return 0;
12661}
12662
12663static int amvdec_h265_remove(struct platform_device *pdev)
12664{
12665 struct hevc_state_s *hevc = gHevc;
12666
12667 if (get_dbg_flag(hevc))
12668 hevc_print(hevc, 0, "%s\r\n", __func__);
12669
12670 mutex_lock(&vh265_mutex);
12671
12672 vh265_stop(hevc);
12673
12674 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12675
12676
12677#ifdef DEBUG_PTS
12678 hevc_print(hevc, 0,
12679 "pts missed %ld, pts hit %ld, duration %d\n",
12680 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12681#endif
12682
12683 vfree(hevc);
12684 hevc = NULL;
12685 gHevc = NULL;
12686
12687 mutex_unlock(&vh265_mutex);
12688
12689 return 0;
12690}
12691/****************************************/
12692#ifdef CONFIG_PM
12693static int h265_suspend(struct device *dev)
12694{
12695 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12696 return 0;
12697}
12698
12699static int h265_resume(struct device *dev)
12700{
12701 amhevc_resume(to_platform_device(dev));
12702 return 0;
12703}
12704
12705static const struct dev_pm_ops h265_pm_ops = {
12706 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12707};
12708#endif
12709
12710static struct platform_driver amvdec_h265_driver = {
12711 .probe = amvdec_h265_probe,
12712 .remove = amvdec_h265_remove,
12713 .driver = {
12714 .name = DRIVER_NAME,
12715#ifdef CONFIG_PM
12716 .pm = &h265_pm_ops,
12717#endif
12718 }
12719};
12720
12721#ifdef MULTI_INSTANCE_SUPPORT
12722static void vh265_dump_state(struct vdec_s *vdec)
12723{
12724 int i;
12725 struct hevc_state_s *hevc =
12726 (struct hevc_state_s *)vdec->private;
12727 hevc_print(hevc, 0,
12728 "====== %s\n", __func__);
12729
12730 hevc_print(hevc, 0,
12731 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12732 hevc->frame_width,
12733 hevc->frame_height,
12734 hevc->sps_num_reorder_pics_0,
12735 get_work_pic_num(hevc),
12736 hevc->video_signal_type_debug,
12737 hevc->is_swap
12738 );
12739
12740 hevc_print(hevc, 0,
12741 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12742 input_frame_based(vdec),
12743 hevc->eos,
12744 hevc->dec_result,
12745 decode_frame_count[hevc->index],
12746 display_frame_count[hevc->index],
12747 run_count[hevc->index],
12748 not_run_ready[hevc->index],
12749 input_empty[hevc->index]
12750 );
12751
12752 if (vf_get_receiver(vdec->vf_provider_name)) {
12753 enum receviver_start_e state =
12754 vf_notify_receiver(vdec->vf_provider_name,
12755 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12756 NULL);
12757 hevc_print(hevc, 0,
12758 "\nreceiver(%s) state %d\n",
12759 vdec->vf_provider_name,
12760 state);
12761 }
12762
12763 hevc_print(hevc, 0,
12764 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12765 __func__,
12766 kfifo_len(&hevc->newframe_q),
12767 VF_POOL_SIZE,
12768 kfifo_len(&hevc->display_q),
12769 VF_POOL_SIZE,
12770 hevc->vf_pre_count,
12771 hevc->vf_get_count,
12772 hevc->vf_put_count,
12773 hevc->pic_list_init_flag,
12774 is_new_pic_available(hevc)
12775 );
12776
12777 dump_pic_list(hevc);
12778
12779 for (i = 0; i < BUF_POOL_SIZE; i++) {
12780 hevc_print(hevc, 0,
12781 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12782 i,
12783 hevc->m_BUF[i].start_adr,
12784 hevc->m_BUF[i].size,
12785 hevc->m_BUF[i].used_flag);
12786 }
12787
12788 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12789 hevc_print(hevc, 0,
12790 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12791 i,
12792 hevc->m_mv_BUF[i].start_adr,
12793 hevc->m_mv_BUF[i].size,
12794 hevc->m_mv_BUF[i].used_flag);
12795 }
12796
12797 hevc_print(hevc, 0,
12798 "HEVC_DEC_STATUS_REG=0x%x\n",
12799 READ_VREG(HEVC_DEC_STATUS_REG));
12800 hevc_print(hevc, 0,
12801 "HEVC_MPC_E=0x%x\n",
12802 READ_VREG(HEVC_MPC_E));
12803 hevc_print(hevc, 0,
12804 "HEVC_DECODE_MODE=0x%x\n",
12805 READ_VREG(HEVC_DECODE_MODE));
12806 hevc_print(hevc, 0,
12807 "HEVC_DECODE_MODE2=0x%x\n",
12808 READ_VREG(HEVC_DECODE_MODE2));
12809 hevc_print(hevc, 0,
12810 "NAL_SEARCH_CTL=0x%x\n",
12811 READ_VREG(NAL_SEARCH_CTL));
12812 hevc_print(hevc, 0,
12813 "HEVC_PARSER_LCU_START=0x%x\n",
12814 READ_VREG(HEVC_PARSER_LCU_START));
12815 hevc_print(hevc, 0,
12816 "HEVC_DECODE_SIZE=0x%x\n",
12817 READ_VREG(HEVC_DECODE_SIZE));
12818 hevc_print(hevc, 0,
12819 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12820 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12821 hevc_print(hevc, 0,
12822 "HEVC_STREAM_START_ADDR=0x%x\n",
12823 READ_VREG(HEVC_STREAM_START_ADDR));
12824 hevc_print(hevc, 0,
12825 "HEVC_STREAM_END_ADDR=0x%x\n",
12826 READ_VREG(HEVC_STREAM_END_ADDR));
12827 hevc_print(hevc, 0,
12828 "HEVC_STREAM_LEVEL=0x%x\n",
12829 READ_VREG(HEVC_STREAM_LEVEL));
12830 hevc_print(hevc, 0,
12831 "HEVC_STREAM_WR_PTR=0x%x\n",
12832 READ_VREG(HEVC_STREAM_WR_PTR));
12833 hevc_print(hevc, 0,
12834 "HEVC_STREAM_RD_PTR=0x%x\n",
12835 READ_VREG(HEVC_STREAM_RD_PTR));
12836 hevc_print(hevc, 0,
12837 "PARSER_VIDEO_RP=0x%x\n",
12838 READ_PARSER_REG(PARSER_VIDEO_RP));
12839 hevc_print(hevc, 0,
12840 "PARSER_VIDEO_WP=0x%x\n",
12841 READ_PARSER_REG(PARSER_VIDEO_WP));
12842
12843 if (input_frame_based(vdec) &&
12844 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12845 ) {
12846 int jj;
12847 if (hevc->chunk && hevc->chunk->block &&
12848 hevc->chunk->size > 0) {
12849 u8 *data = NULL;
12850 if (!hevc->chunk->block->is_mapped)
12851 data = codec_mm_vmap(hevc->chunk->block->start +
12852 hevc->chunk->offset, hevc->chunk->size);
12853 else
12854 data = ((u8 *)hevc->chunk->block->start_virt)
12855 + hevc->chunk->offset;
12856 hevc_print(hevc, 0,
12857 "frame data size 0x%x\n",
12858 hevc->chunk->size);
12859 for (jj = 0; jj < hevc->chunk->size; jj++) {
12860 if ((jj & 0xf) == 0)
12861 hevc_print(hevc,
12862 PRINT_FRAMEBASE_DATA,
12863 "%06x:", jj);
12864 hevc_print_cont(hevc,
12865 PRINT_FRAMEBASE_DATA,
12866 "%02x ", data[jj]);
12867 if (((jj + 1) & 0xf) == 0)
12868 hevc_print_cont(hevc,
12869 PRINT_FRAMEBASE_DATA,
12870 "\n");
12871 }
12872
12873 if (!hevc->chunk->block->is_mapped)
12874 codec_mm_unmap_phyaddr(data);
12875 }
12876 }
12877
12878}
12879
12880
12881static int ammvdec_h265_probe(struct platform_device *pdev)
12882{
12883
12884 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12885 struct hevc_state_s *hevc = NULL;
12886 int ret;
12887#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12888 int config_val;
12889#endif
12890 if (pdata == NULL) {
12891 pr_info("\nammvdec_h265 memory resource undefined.\n");
12892 return -EFAULT;
12893 }
12894
12895 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12896 sizeof(struct hevc_state_s), GFP_KERNEL); */
12897 hevc = vmalloc(sizeof(struct hevc_state_s));
12898 if (hevc == NULL) {
12899 pr_info("\nammvdec_h265 device data allocation failed\n");
12900 return -ENOMEM;
12901 }
12902 memset(hevc, 0, sizeof(struct hevc_state_s));
12903
12904 /* the ctx from v4l2 driver. */
12905 hevc->v4l2_ctx = pdata->private;
12906
12907 pdata->private = hevc;
12908 pdata->dec_status = vh265_dec_status;
12909 /* pdata->set_trickmode = set_trickmode; */
12910 pdata->run_ready = run_ready;
12911 pdata->run = run;
12912 pdata->reset = reset;
12913 pdata->irq_handler = vh265_irq_cb;
12914 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12915 pdata->dump_state = vh265_dump_state;
12916
12917 hevc->index = pdev->id;
12918 hevc->m_ins_flag = 1;
12919
12920 if (pdata->use_vfm_path) {
12921 snprintf(pdata->vf_provider_name,
12922 VDEC_PROVIDER_NAME_SIZE,
12923 VFM_DEC_PROVIDER_NAME);
12924 hevc->frameinfo_enable = 1;
12925 }
12926#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12927 else if (vdec_dual(pdata)) {
12928 struct hevc_state_s *hevc_pair = NULL;
12929
12930 if (dv_toggle_prov_name) /*debug purpose*/
12931 snprintf(pdata->vf_provider_name,
12932 VDEC_PROVIDER_NAME_SIZE,
12933 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12934 VFM_DEC_DVEL_PROVIDER_NAME);
12935 else
12936 snprintf(pdata->vf_provider_name,
12937 VDEC_PROVIDER_NAME_SIZE,
12938 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12939 VFM_DEC_DVBL_PROVIDER_NAME);
12940 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12941 if (pdata->master)
12942 hevc_pair = (struct hevc_state_s *)
12943 pdata->master->private;
12944 else if (pdata->slave)
12945 hevc_pair = (struct hevc_state_s *)
12946 pdata->slave->private;
12947 if (hevc_pair)
12948 hevc->shift_byte_count_lo =
12949 hevc_pair->shift_byte_count_lo;
12950 }
12951#endif
12952 else
12953 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12954 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12955
12956 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12957 &vh265_vf_provider, pdata);
12958
12959 hevc->provider_name = pdata->vf_provider_name;
12960 platform_set_drvdata(pdev, pdata);
12961
12962 hevc->platform_dev = pdev;
12963
12964 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12965 pdata->config && pdata->config_len) {
12966#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12967 /*use ptr config for doubel_write_mode, etc*/
12968 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12969
12970 if (get_config_int(pdata->config, "hevc_double_write_mode",
12971 &config_val) == 0)
12972 hevc->double_write_mode = config_val;
12973 else
12974 hevc->double_write_mode = double_write_mode;
12975
12976 if (get_config_int(pdata->config, "save_buffer_mode",
12977 &config_val) == 0)
12978 hevc->save_buffer_mode = config_val;
12979 else
12980 hevc->save_buffer_mode = 0;
12981
12982 /*use ptr config for max_pic_w, etc*/
12983 if (get_config_int(pdata->config, "hevc_buf_width",
12984 &config_val) == 0) {
12985 hevc->max_pic_w = config_val;
12986 }
12987 if (get_config_int(pdata->config, "hevc_buf_height",
12988 &config_val) == 0) {
12989 hevc->max_pic_h = config_val;
12990 }
12991
12992 if (get_config_int(pdata->config,
12993 "parm_v4l_codec_enable",
12994 &config_val) == 0)
12995 hevc->is_used_v4l = config_val;
12996
12997 if (get_config_int(pdata->config,
12998 "parm_v4l_buffer_margin",
12999 &config_val) == 0)
13000 hevc->dynamic_buf_num_margin = config_val;
13001
13002 if (get_config_int(pdata->config,
13003 "parm_v4l_canvas_mem_mode",
13004 &config_val) == 0)
13005 hevc->mem_map_mode = config_val;
13006#endif
13007 } else {
13008 if (pdata->sys_info)
13009 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13010 else {
13011 hevc->vh265_amstream_dec_info.width = 0;
13012 hevc->vh265_amstream_dec_info.height = 0;
13013 hevc->vh265_amstream_dec_info.rate = 30;
13014 }
13015 hevc->double_write_mode = double_write_mode;
13016 }
13017 if (!hevc->is_used_v4l) {
13018 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13019 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13020 else
13021 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13022
13023 hevc->mem_map_mode = mem_map_mode;
13024 }
13025
13026 if (mmu_enable_force == 0) {
13027 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13028 hevc->mmu_enable = 0;
13029 else
13030 hevc->mmu_enable = 1;
13031 }
13032
13033 if (init_mmu_buffers(hevc) < 0) {
13034 hevc_print(hevc, 0,
13035 "\n 265 mmu init failed!\n");
13036 mutex_unlock(&vh265_mutex);
13037 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13038 if (hevc)
13039 vfree((void *)hevc);
13040 pdata->dec_status = NULL;
13041 return -EFAULT;
13042 }
13043#if 0
13044 hevc->buf_start = pdata->mem_start;
13045 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13046#else
13047
13048 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13049 BMMU_WORKSPACE_ID, work_buf_size,
13050 DRIVER_NAME, &hevc->buf_start);
13051 if (ret < 0) {
13052 uninit_mmu_buffers(hevc);
13053 /* devm_kfree(&pdev->dev, (void *)hevc); */
13054 if (hevc)
13055 vfree((void *)hevc);
13056 pdata->dec_status = NULL;
13057 mutex_unlock(&vh265_mutex);
13058 return ret;
13059 }
13060 hevc->buf_size = work_buf_size;
13061#endif
13062 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13063 (parser_sei_enable & 0x100) == 0)
13064 parser_sei_enable = 7;
13065 hevc->init_flag = 0;
13066 hevc->first_sc_checked = 0;
13067 hevc->uninit_list = 0;
13068 hevc->fatal_error = 0;
13069 hevc->show_frame_num = 0;
13070
13071 /*
13072 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13073 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13074 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13075 */
13076 if (get_dbg_flag(hevc)) {
13077 hevc_print(hevc, 0,
13078 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13079 hevc->buf_start, hevc->buf_size);
13080 }
13081
13082 hevc_print(hevc, 0,
13083 "dynamic_buf_num_margin=%d\n",
13084 hevc->dynamic_buf_num_margin);
13085 hevc_print(hevc, 0,
13086 "double_write_mode=%d\n",
13087 hevc->double_write_mode);
13088
13089 hevc->cma_dev = pdata->cma_dev;
13090
13091 if (vh265_init(pdata) < 0) {
13092 hevc_print(hevc, 0,
13093 "\namvdec_h265 init failed.\n");
13094 hevc_local_uninit(hevc);
13095 uninit_mmu_buffers(hevc);
13096 /* devm_kfree(&pdev->dev, (void *)hevc); */
13097 if (hevc)
13098 vfree((void *)hevc);
13099 pdata->dec_status = NULL;
13100 return -ENODEV;
13101 }
13102
13103 vdec_set_prepare_level(pdata, start_decode_buf_level);
13104
13105 /*set the max clk for smooth playing...*/
13106 hevc_source_changed(VFORMAT_HEVC,
13107 3840, 2160, 60);
13108 if (pdata->parallel_dec == 1)
13109 vdec_core_request(pdata, CORE_MASK_HEVC);
13110 else
13111 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13112 | CORE_MASK_COMBINE);
13113
13114 return 0;
13115}
13116
13117static int ammvdec_h265_remove(struct platform_device *pdev)
13118{
13119 struct hevc_state_s *hevc =
13120 (struct hevc_state_s *)
13121 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13122 struct vdec_s *vdec = hw_to_vdec(hevc);
13123
13124 if (hevc == NULL)
13125 return 0;
13126
13127 if (get_dbg_flag(hevc))
13128 hevc_print(hevc, 0, "%s\r\n", __func__);
13129
13130 vmh265_stop(hevc);
13131
13132 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13133 if (vdec->parallel_dec == 1)
13134 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13135 else
13136 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13137
13138 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13139
13140 vfree((void *)hevc);
13141 return 0;
13142}
13143
13144static struct platform_driver ammvdec_h265_driver = {
13145 .probe = ammvdec_h265_probe,
13146 .remove = ammvdec_h265_remove,
13147 .driver = {
13148 .name = MULTI_DRIVER_NAME,
13149#ifdef CONFIG_PM
13150 .pm = &h265_pm_ops,
13151#endif
13152 }
13153};
13154#endif
13155
13156static struct codec_profile_t amvdec_h265_profile = {
13157 .name = "hevc",
13158 .profile = ""
13159};
13160
13161static struct codec_profile_t amvdec_h265_profile_single,
13162 amvdec_h265_profile_mult;
13163
13164static struct mconfig h265_configs[] = {
13165 MC_PU32("use_cma", &use_cma),
13166 MC_PU32("bit_depth_luma", &bit_depth_luma),
13167 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13168 MC_PU32("video_signal_type", &video_signal_type),
13169#ifdef ERROR_HANDLE_DEBUG
13170 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13171 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13172#endif
13173 MC_PU32("radr", &radr),
13174 MC_PU32("rval", &rval),
13175 MC_PU32("dbg_cmd", &dbg_cmd),
13176 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13177 MC_PU32("endian", &endian),
13178 MC_PU32("step", &step),
13179 MC_PU32("udebug_flag", &udebug_flag),
13180 MC_PU32("decode_pic_begin", &decode_pic_begin),
13181 MC_PU32("slice_parse_begin", &slice_parse_begin),
13182 MC_PU32("nal_skip_policy", &nal_skip_policy),
13183 MC_PU32("i_only_flag", &i_only_flag),
13184 MC_PU32("error_handle_policy", &error_handle_policy),
13185 MC_PU32("error_handle_threshold", &error_handle_threshold),
13186 MC_PU32("error_handle_nal_skip_threshold",
13187 &error_handle_nal_skip_threshold),
13188 MC_PU32("error_handle_system_threshold",
13189 &error_handle_system_threshold),
13190 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13191 MC_PU32("debug", &debug),
13192 MC_PU32("debug_mask", &debug_mask),
13193 MC_PU32("buffer_mode", &buffer_mode),
13194 MC_PU32("double_write_mode", &double_write_mode),
13195 MC_PU32("buf_alloc_width", &buf_alloc_width),
13196 MC_PU32("buf_alloc_height", &buf_alloc_height),
13197 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13198 MC_PU32("max_buf_num", &max_buf_num),
13199 MC_PU32("buf_alloc_size", &buf_alloc_size),
13200 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13201 MC_PU32("mem_map_mode", &mem_map_mode),
13202 MC_PU32("enable_mem_saving", &enable_mem_saving),
13203 MC_PU32("force_w_h", &force_w_h),
13204 MC_PU32("force_fps", &force_fps),
13205 MC_PU32("max_decoding_time", &max_decoding_time),
13206 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13207 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13208 MC_PU32("interlace_enable", &interlace_enable),
13209 MC_PU32("pts_unstable", &pts_unstable),
13210 MC_PU32("parser_sei_enable", &parser_sei_enable),
13211 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13212 MC_PU32("decode_timeout_val", &decode_timeout_val),
13213#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13214 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13215 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13216 MC_PU32("dv_debug", &dv_debug),
13217#endif
13218};
13219static struct mconfig_node decoder_265_node;
13220
13221static int __init amvdec_h265_driver_init_module(void)
13222{
13223 struct BuffInfo_s *p_buf_info;
13224
13225 if (vdec_is_support_4k()) {
13226 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13227 p_buf_info = &amvh265_workbuff_spec[2];
13228 else
13229 p_buf_info = &amvh265_workbuff_spec[1];
13230 } else
13231 p_buf_info = &amvh265_workbuff_spec[0];
13232
13233 init_buff_spec(NULL, p_buf_info);
13234 work_buf_size =
13235 (p_buf_info->end_adr - p_buf_info->start_adr
13236 + 0xffff) & (~0xffff);
13237
13238 pr_debug("amvdec_h265 module init\n");
13239 error_handle_policy = 0;
13240
13241#ifdef ERROR_HANDLE_DEBUG
13242 dbg_nal_skip_flag = 0;
13243 dbg_nal_skip_count = 0;
13244#endif
13245 udebug_flag = 0;
13246 decode_pic_begin = 0;
13247 slice_parse_begin = 0;
13248 step = 0;
13249 buf_alloc_size = 0;
13250
13251#ifdef MULTI_INSTANCE_SUPPORT
13252 if (platform_driver_register(&ammvdec_h265_driver))
13253 pr_err("failed to register ammvdec_h265 driver\n");
13254
13255#endif
13256 if (platform_driver_register(&amvdec_h265_driver)) {
13257 pr_err("failed to register amvdec_h265 driver\n");
13258 return -ENODEV;
13259 }
13260#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13261 if (!has_hevc_vdec()) {
13262 /* not support hevc */
13263 amvdec_h265_profile.name = "hevc_unsupport";
13264 }
13265 if (vdec_is_support_4k()) {
13266 if (is_meson_m8m2_cpu()) {
13267 /* m8m2 support 4k */
13268 amvdec_h265_profile.profile = "4k";
13269 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13270 amvdec_h265_profile.profile =
13271 "8k, 8bit, 10bit, dwrite, compressed";
13272 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13273 amvdec_h265_profile.profile =
13274 "4k, 8bit, 10bit, dwrite, compressed";
13275 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13276 amvdec_h265_profile.profile = "4k";
13277 }
13278#endif
13279 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13280 pr_info("amvdec_h265 default mmu enabled.\n");
13281 mmu_enable = 1;
13282 }
13283
13284 vcodec_profile_register(&amvdec_h265_profile);
13285 amvdec_h265_profile_single = amvdec_h265_profile;
13286 amvdec_h265_profile_single.name = "h265";
13287 vcodec_profile_register(&amvdec_h265_profile_single);
13288 amvdec_h265_profile_mult = amvdec_h265_profile;
13289 amvdec_h265_profile_mult.name = "mh265";
13290 vcodec_profile_register(&amvdec_h265_profile_mult);
13291 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13292 "h265", h265_configs, CONFIG_FOR_RW);
13293 return 0;
13294}
13295
13296static void __exit amvdec_h265_driver_remove_module(void)
13297{
13298 pr_debug("amvdec_h265 module remove.\n");
13299
13300#ifdef MULTI_INSTANCE_SUPPORT
13301 platform_driver_unregister(&ammvdec_h265_driver);
13302#endif
13303 platform_driver_unregister(&amvdec_h265_driver);
13304}
13305
13306/****************************************/
13307/*
13308 *module_param(stat, uint, 0664);
13309 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13310 */
13311module_param(use_cma, uint, 0664);
13312MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13313
13314module_param(bit_depth_luma, uint, 0664);
13315MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13316
13317module_param(bit_depth_chroma, uint, 0664);
13318MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13319
13320module_param(video_signal_type, uint, 0664);
13321MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13322
13323#ifdef ERROR_HANDLE_DEBUG
13324module_param(dbg_nal_skip_flag, uint, 0664);
13325MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13326
13327module_param(dbg_nal_skip_count, uint, 0664);
13328MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13329#endif
13330
13331module_param(radr, uint, 0664);
13332MODULE_PARM_DESC(radr, "\n radr\n");
13333
13334module_param(rval, uint, 0664);
13335MODULE_PARM_DESC(rval, "\n rval\n");
13336
13337module_param(dbg_cmd, uint, 0664);
13338MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13339
13340module_param(dump_nal, uint, 0664);
13341MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13342
13343module_param(dbg_skip_decode_index, uint, 0664);
13344MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13345
13346module_param(endian, uint, 0664);
13347MODULE_PARM_DESC(endian, "\n rval\n");
13348
13349module_param(step, uint, 0664);
13350MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13351
13352module_param(decode_pic_begin, uint, 0664);
13353MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13354
13355module_param(slice_parse_begin, uint, 0664);
13356MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13357
13358module_param(nal_skip_policy, uint, 0664);
13359MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13360
13361module_param(i_only_flag, uint, 0664);
13362MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13363
13364module_param(fast_output_enable, uint, 0664);
13365MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13366
13367module_param(error_handle_policy, uint, 0664);
13368MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13369
13370module_param(error_handle_threshold, uint, 0664);
13371MODULE_PARM_DESC(error_handle_threshold,
13372 "\n amvdec_h265 error_handle_threshold\n");
13373
13374module_param(error_handle_nal_skip_threshold, uint, 0664);
13375MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13376 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13377
13378module_param(error_handle_system_threshold, uint, 0664);
13379MODULE_PARM_DESC(error_handle_system_threshold,
13380 "\n amvdec_h265 error_handle_system_threshold\n");
13381
13382module_param(error_skip_nal_count, uint, 0664);
13383MODULE_PARM_DESC(error_skip_nal_count,
13384 "\n amvdec_h265 error_skip_nal_count\n");
13385
13386module_param(debug, uint, 0664);
13387MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13388
13389module_param(debug_mask, uint, 0664);
13390MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13391
13392module_param(log_mask, uint, 0664);
13393MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13394
13395module_param(buffer_mode, uint, 0664);
13396MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13397
13398module_param(double_write_mode, uint, 0664);
13399MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13400
13401module_param(buf_alloc_width, uint, 0664);
13402MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13403
13404module_param(buf_alloc_height, uint, 0664);
13405MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13406
13407module_param(dynamic_buf_num_margin, uint, 0664);
13408MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13409
13410module_param(max_buf_num, uint, 0664);
13411MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13412
13413module_param(buf_alloc_size, uint, 0664);
13414MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13415
13416#ifdef CONSTRAIN_MAX_BUF_NUM
13417module_param(run_ready_max_vf_only_num, uint, 0664);
13418MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13419
13420module_param(run_ready_display_q_num, uint, 0664);
13421MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13422
13423module_param(run_ready_max_buf_num, uint, 0664);
13424MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13425#endif
13426
13427#if 0
13428module_param(re_config_pic_flag, uint, 0664);
13429MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13430#endif
13431
13432module_param(buffer_mode_dbg, uint, 0664);
13433MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13434
13435module_param(mem_map_mode, uint, 0664);
13436MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13437
13438module_param(enable_mem_saving, uint, 0664);
13439MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13440
13441module_param(force_w_h, uint, 0664);
13442MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13443
13444module_param(force_fps, uint, 0664);
13445MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13446
13447module_param(max_decoding_time, uint, 0664);
13448MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13449
13450module_param(prefix_aux_buf_size, uint, 0664);
13451MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13452
13453module_param(suffix_aux_buf_size, uint, 0664);
13454MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13455
13456module_param(interlace_enable, uint, 0664);
13457MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13458module_param(pts_unstable, uint, 0664);
13459MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13460module_param(parser_sei_enable, uint, 0664);
13461MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13462
13463#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13464module_param(parser_dolby_vision_enable, uint, 0664);
13465MODULE_PARM_DESC(parser_dolby_vision_enable,
13466 "\n parser_dolby_vision_enable\n");
13467
13468module_param(dolby_meta_with_el, uint, 0664);
13469MODULE_PARM_DESC(dolby_meta_with_el,
13470 "\n dolby_meta_with_el\n");
13471
13472module_param(dolby_el_flush_th, uint, 0664);
13473MODULE_PARM_DESC(dolby_el_flush_th,
13474 "\n dolby_el_flush_th\n");
13475#endif
13476module_param(mmu_enable, uint, 0664);
13477MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13478
13479module_param(mmu_enable_force, uint, 0664);
13480MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13481
13482#ifdef MULTI_INSTANCE_SUPPORT
13483module_param(start_decode_buf_level, int, 0664);
13484MODULE_PARM_DESC(start_decode_buf_level,
13485 "\n h265 start_decode_buf_level\n");
13486
13487module_param(decode_timeout_val, uint, 0664);
13488MODULE_PARM_DESC(decode_timeout_val,
13489 "\n h265 decode_timeout_val\n");
13490
13491module_param(data_resend_policy, uint, 0664);
13492MODULE_PARM_DESC(data_resend_policy,
13493 "\n h265 data_resend_policy\n");
13494
13495module_param_array(decode_frame_count, uint,
13496 &max_decode_instance_num, 0664);
13497
13498module_param_array(display_frame_count, uint,
13499 &max_decode_instance_num, 0664);
13500
13501module_param_array(max_process_time, uint,
13502 &max_decode_instance_num, 0664);
13503
13504module_param_array(max_get_frame_interval,
13505 uint, &max_decode_instance_num, 0664);
13506
13507module_param_array(run_count, uint,
13508 &max_decode_instance_num, 0664);
13509
13510module_param_array(input_empty, uint,
13511 &max_decode_instance_num, 0664);
13512
13513module_param_array(not_run_ready, uint,
13514 &max_decode_instance_num, 0664);
13515
13516module_param_array(ref_frame_mark_flag, uint,
13517 &max_decode_instance_num, 0664);
13518
13519#endif
13520#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13521module_param(dv_toggle_prov_name, uint, 0664);
13522MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13523
13524module_param(dv_debug, uint, 0664);
13525MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13526
13527module_param(force_bypass_dvenl, uint, 0664);
13528MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13529#endif
13530
13531#ifdef AGAIN_HAS_THRESHOLD
13532module_param(again_threshold, uint, 0664);
13533MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13534#endif
13535
13536module_param(force_disp_pic_index, int, 0664);
13537MODULE_PARM_DESC(force_disp_pic_index,
13538 "\n amvdec_h265 force_disp_pic_index\n");
13539
13540module_param(frmbase_cont_bitlevel, uint, 0664);
13541MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13542
13543module_param(udebug_flag, uint, 0664);
13544MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13545
13546module_param(udebug_pause_pos, uint, 0664);
13547MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13548
13549module_param(udebug_pause_val, uint, 0664);
13550MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13551
13552module_param(pre_decode_buf_level, int, 0664);
13553MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13554
13555module_param(udebug_pause_decode_idx, uint, 0664);
13556MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13557
13558module_param(disp_vframe_valve_level, uint, 0664);
13559MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13560
13561module_param(pic_list_debug, uint, 0664);
13562MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13563
13564module_param(without_display_mode, uint, 0664);
13565MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13566
13567module_init(amvdec_h265_driver_init_module);
13568module_exit(amvdec_h265_driver_remove_module);
13569
13570MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13571MODULE_LICENSE("GPL");
13572MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13573