summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 170cc224125ed65973121b43088e0827c1b03f92
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52#define CONSTRAIN_MAX_BUF_NUM
53
54#define SWAP_HEVC_UCODE
55#define DETREFILL_ENABLE
56
57#define AGAIN_HAS_THRESHOLD
58/*#define TEST_NO_BUF*/
59#define HEVC_PIC_STRUCT_SUPPORT
60#define MULTI_INSTANCE_SUPPORT
61#define USE_UNINIT_SEMA
62
63 /* .buf_size = 0x100000*16,
64 //4k2k , 0x100000 per buffer */
65 /* 4096x2304 , 0x120000 per buffer */
66#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
67#define MPRED_4K_MV_BUF_SIZE (0x120000)
68#define MPRED_MV_BUF_SIZE (0x40000)
69
70#define MMU_COMPRESS_HEADER_SIZE 0x48000
71#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
72
73#define MAX_FRAME_4K_NUM 0x1200
74#define MAX_FRAME_8K_NUM (0x1200*4)
75
76//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
77#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
78
79#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
80
81#define HEVC_CM_HEADER_START_ADDR 0x3628
82#define HEVC_SAO_MMU_VH1_ADDR 0x363b
83#define HEVC_SAO_MMU_VH0_ADDR 0x363a
84
85#define HEVC_DBLK_CFGB 0x350b
86#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
87#define SWAP_HEVC_OFFSET (3 * 0x1000)
88
89#define MEM_NAME "codec_265"
90/* #include <mach/am_regs.h> */
91#include <linux/amlogic/media/utils/vdec_reg.h>
92
93#include "../utils/vdec.h"
94#include "../utils/amvdec.h"
95#include <linux/amlogic/media/video_sink/video.h>
96#include <linux/amlogic/media/codec_mm/configs.h>
97
98#define SEND_LMEM_WITH_RPM
99#define SUPPORT_10BIT
100/* #define ERROR_HANDLE_DEBUG */
101
102#ifndef STAT_KTHREAD
103#define STAT_KTHREAD 0x40
104#endif
105
106#ifdef MULTI_INSTANCE_SUPPORT
107#define MAX_DECODE_INSTANCE_NUM 9
108#define MULTI_DRIVER_NAME "ammvdec_h265"
109#endif
110#define DRIVER_NAME "amvdec_h265"
111#define MODULE_NAME "amvdec_h265"
112#define DRIVER_HEADER_NAME "amvdec_h265_header"
113
114#define PUT_INTERVAL (HZ/100)
115#define ERROR_SYSTEM_RESET_COUNT 200
116
117#define PTS_NORMAL 0
118#define PTS_NONE_REF_USE_DURATION 1
119
120#define PTS_MODE_SWITCHING_THRESHOLD 3
121#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
122
123#define DUR2PTS(x) ((x)*90/96)
124
125#define MAX_SIZE_8K (8192 * 4608)
126#define MAX_SIZE_4K (4096 * 2304)
127
128#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
129#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
130
131#define SEI_UserDataITU_T_T35 4
132#define INVALID_IDX -1 /* Invalid buffer index.*/
133
134static struct semaphore h265_sema;
135
136struct hevc_state_s;
137static int hevc_print(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int hevc_print_cont(struct hevc_state_s *hevc,
140 int debug_flag, const char *fmt, ...);
141static int vh265_vf_states(struct vframe_states *states, void *);
142static struct vframe_s *vh265_vf_peek(void *);
143static struct vframe_s *vh265_vf_get(void *);
144static void vh265_vf_put(struct vframe_s *, void *);
145static int vh265_event_cb(int type, void *data, void *private_data);
146
147static int vh265_stop(struct hevc_state_s *hevc);
148#ifdef MULTI_INSTANCE_SUPPORT
149static int vmh265_stop(struct hevc_state_s *hevc);
150static s32 vh265_init(struct vdec_s *vdec);
151static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
152static void reset_process_time(struct hevc_state_s *hevc);
153static void start_process_time(struct hevc_state_s *hevc);
154static void restart_process_time(struct hevc_state_s *hevc);
155static void timeout_process(struct hevc_state_s *hevc);
156#else
157static s32 vh265_init(struct hevc_state_s *hevc);
158#endif
159static void vh265_prot_init(struct hevc_state_s *hevc);
160static int vh265_local_init(struct hevc_state_s *hevc);
161static void vh265_check_timer_func(unsigned long arg);
162static void config_decode_mode(struct hevc_state_s *hevc);
163
164static const char vh265_dec_id[] = "vh265-dev";
165
166#define PROVIDER_NAME "decoder.h265"
167#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
168
169static const struct vframe_operations_s vh265_vf_provider = {
170 .peek = vh265_vf_peek,
171 .get = vh265_vf_get,
172 .put = vh265_vf_put,
173 .event_cb = vh265_event_cb,
174 .vf_states = vh265_vf_states,
175};
176
177static struct vframe_provider_s vh265_vf_prov;
178
179static u32 bit_depth_luma;
180static u32 bit_depth_chroma;
181static u32 video_signal_type;
182
183static int start_decode_buf_level = 0x8000;
184
185static unsigned int decode_timeout_val = 200;
186
187static u32 run_ready_min_buf_num = 2;
188
189/*data_resend_policy:
190 bit 0, stream base resend data when decoding buf empty
191*/
192static u32 data_resend_policy = 1;
193
194#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
195/*
196static const char * const video_format_names[] = {
197 "component", "PAL", "NTSC", "SECAM",
198 "MAC", "unspecified", "unspecified", "unspecified"
199};
200
201static const char * const color_primaries_names[] = {
202 "unknown", "bt709", "undef", "unknown",
203 "bt470m", "bt470bg", "smpte170m", "smpte240m",
204 "film", "bt2020"
205};
206
207static const char * const transfer_characteristics_names[] = {
208 "unknown", "bt709", "undef", "unknown",
209 "bt470m", "bt470bg", "smpte170m", "smpte240m",
210 "linear", "log100", "log316", "iec61966-2-4",
211 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
212 "smpte-st-2084", "smpte-st-428"
213};
214
215static const char * const matrix_coeffs_names[] = {
216 "GBR", "bt709", "undef", "unknown",
217 "fcc", "bt470bg", "smpte170m", "smpte240m",
218 "YCgCo", "bt2020nc", "bt2020c"
219};
220*/
221#ifdef SUPPORT_10BIT
222#define HEVC_CM_BODY_START_ADDR 0x3626
223#define HEVC_CM_BODY_LENGTH 0x3627
224#define HEVC_CM_HEADER_LENGTH 0x3629
225#define HEVC_CM_HEADER_OFFSET 0x362b
226#define HEVC_SAO_CTRL9 0x362d
227#define LOSLESS_COMPRESS_MODE
228/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
229/* double_write_mode:
230 * 0, no double write;
231 * 1, 1:1 ratio;
232 * 2, (1/4):(1/4) ratio;
233 * 3, (1/4):(1/4) ratio, with both compressed frame included
234 * 4, (1/2):(1/2) ratio;
235 * 0x10, double write only
236 * 0x100, if > 1080p,use mode 4,else use mode 1;
237 * 0x200, if > 1080p,use mode 2,else use mode 1;
238 * 0x300, if > 720p, use mode 4, else use mode 1;
239 */
240static u32 double_write_mode;
241
242/*#define DECOMP_HEADR_SURGENT*/
243
244static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
245static u32 enable_mem_saving = 1;
246static u32 workaround_enable;
247static u32 force_w_h;
248#endif
249static u32 force_fps;
250static u32 pts_unstable;
251#define H265_DEBUG_BUFMGR 0x01
252#define H265_DEBUG_BUFMGR_MORE 0x02
253#define H265_DEBUG_DETAIL 0x04
254#define H265_DEBUG_REG 0x08
255#define H265_DEBUG_MAN_SEARCH_NAL 0x10
256#define H265_DEBUG_MAN_SKIP_NAL 0x20
257#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
258#define H265_DEBUG_FORCE_CLK 0x80
259#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
260#define H265_DEBUG_NO_DISPLAY 0x200
261#define H265_DEBUG_DISCARD_NAL 0x400
262#define H265_DEBUG_OUT_PTS 0x800
263#define H265_DEBUG_DUMP_PIC_LIST 0x1000
264#define H265_DEBUG_PRINT_SEI 0x2000
265#define H265_DEBUG_PIC_STRUCT 0x4000
266#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
267#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
268#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
269#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
270#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
271#define H265_DEBUG_HW_RESET 0x100000
272#define H265_CFG_CANVAS_IN_DECODE 0x200000
273#define H265_DEBUG_DV 0x400000
274#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
275#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
276#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
277#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
278#ifdef MULTI_INSTANCE_SUPPORT
279#define PRINT_FLAG_ERROR 0x0
280#define IGNORE_PARAM_FROM_CONFIG 0x08000000
281#define PRINT_FRAMEBASE_DATA 0x10000000
282#define PRINT_FLAG_VDEC_STATUS 0x20000000
283#define PRINT_FLAG_VDEC_DETAIL 0x40000000
284#define PRINT_FLAG_V4L_DETAIL 0x80000000
285#endif
286
287#define BUF_POOL_SIZE 32
288#define MAX_BUF_NUM 24
289#define MAX_REF_PIC_NUM 24
290#define MAX_REF_ACTIVE 16
291
292#ifdef MV_USE_FIXED_BUF
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#else
297#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
298#define VF_BUFFER_IDX(n) (n)
299#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
300#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
301#endif
302
303#define HEVC_MV_INFO 0x310d
304#define HEVC_QP_INFO 0x3137
305#define HEVC_SKIP_INFO 0x3136
306
307const u32 h265_version = 201602101;
308static u32 debug_mask = 0xffffffff;
309static u32 log_mask;
310static u32 debug;
311static u32 radr;
312static u32 rval;
313static u32 dbg_cmd;
314static u32 dump_nal;
315static u32 dbg_skip_decode_index;
316static u32 endian = 0xff0;
317#ifdef ERROR_HANDLE_DEBUG
318static u32 dbg_nal_skip_flag;
319 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
320static u32 dbg_nal_skip_count;
321#endif
322/*for debug*/
323/*
324 udebug_flag:
325 bit 0, enable ucode print
326 bit 1, enable ucode detail print
327 bit [31:16] not 0, pos to dump lmem
328 bit 2, pop bits to lmem
329 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
330*/
331static u32 udebug_flag;
332/*
333 when udebug_flag[1:0] is not 0
334 udebug_pause_pos not 0,
335 pause position
336*/
337static u32 udebug_pause_pos;
338/*
339 when udebug_flag[1:0] is not 0
340 and udebug_pause_pos is not 0,
341 pause only when DEBUG_REG2 is equal to this val
342*/
343static u32 udebug_pause_val;
344
345static u32 udebug_pause_decode_idx;
346
347static u32 decode_pic_begin;
348static uint slice_parse_begin;
349static u32 step;
350static bool is_reset;
351
352#ifdef CONSTRAIN_MAX_BUF_NUM
353static u32 run_ready_max_vf_only_num;
354static u32 run_ready_display_q_num;
355 /*0: not check
356 0xff: work_pic_num
357 */
358static u32 run_ready_max_buf_num = 0xff;
359#endif
360
361static u32 dynamic_buf_num_margin = 7;
362static u32 buf_alloc_width;
363static u32 buf_alloc_height;
364
365static u32 max_buf_num = 16;
366static u32 buf_alloc_size;
367/*static u32 re_config_pic_flag;*/
368/*
369 *bit[0]: 0,
370 *bit[1]: 0, always release cma buffer when stop
371 *bit[1]: 1, never release cma buffer when stop
372 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
373 *do not release cma buffer is blackout is not 1
374 *
375 *bit[2]: 0, when start decoding, check current displayed buffer
376 * (only for buffer decoded by h265) if blackout is 0
377 * 1, do not check current displayed buffer
378 *
379 *bit[3]: 1, if blackout is not 1, do not release current
380 * displayed cma buffer always.
381 */
382/* set to 1 for fast play;
383 * set to 8 for other case of "keep last frame"
384 */
385static u32 buffer_mode = 1;
386
387/* buffer_mode_dbg: debug only*/
388static u32 buffer_mode_dbg = 0xffff0000;
389/**/
390/*
391 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
392 *1, start decoding after first I;
393 *2, only decode and display none error picture;
394 *3, start decoding and display after IDR,etc
395 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
396 *only for mode 0 and 1.
397 */
398static u32 nal_skip_policy = 2;
399
400/*
401 *bit 0, 1: only display I picture;
402 *bit 1, 1: only decode I picture;
403 */
404static u32 i_only_flag;
405
406/*
407bit 0, fast output first I picture
408*/
409static u32 fast_output_enable = 1;
410
411static u32 frmbase_cont_bitlevel = 0x60;
412
413/*
414use_cma: 1, use both reserver memory and cma for buffers
4152, only use cma for buffers
416*/
417static u32 use_cma = 2;
418
419#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
420static u32 prefix_aux_buf_size = (16 * 1024);
421static u32 suffix_aux_buf_size;
422
423static u32 max_decoding_time;
424/*
425 *error handling
426 */
427/*error_handle_policy:
428 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
429 *1, skip error_skip_nal_count nals before error recovery;
430 *bit 1 (valid only when bit0 == 1):
431 *1, wait vps/sps/pps after error recovery;
432 *bit 2 (valid only when bit0 == 0):
433 *0, auto search after error recovery (hevc_recover() called);
434 *1, manual search after error recovery
435 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
436 *
437 *bit 4: 0, set error_mark after reset/recover
438 * 1, do not set error_mark after reset/recover
439 *bit 5: 0, check total lcu for every picture
440 * 1, do not check total lcu
441 *bit 6: 0, do not check head error
442 * 1, check head error
443 *
444 */
445
446static u32 error_handle_policy;
447static u32 error_skip_nal_count = 6;
448static u32 error_handle_threshold = 30;
449static u32 error_handle_nal_skip_threshold = 10;
450static u32 error_handle_system_threshold = 30;
451static u32 interlace_enable = 1;
452static u32 fr_hint_status;
453
454 /*
455 *parser_sei_enable:
456 * bit 0, sei;
457 * bit 1, sei_suffix (fill aux buf)
458 * bit 2, fill sei to aux buf (when bit 0 is 1)
459 * bit 8, debug flag
460 */
461static u32 parser_sei_enable;
462#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
463static u32 parser_dolby_vision_enable = 1;
464static u32 dolby_meta_with_el;
465static u32 dolby_el_flush_th = 2;
466#endif
467/* this is only for h265 mmu enable */
468
469static u32 mmu_enable = 1;
470static u32 mmu_enable_force;
471static u32 work_buf_size;
472static unsigned int force_disp_pic_index;
473static unsigned int disp_vframe_valve_level;
474static int pre_decode_buf_level = 0x1000;
475static unsigned int pic_list_debug;
476
477
478#ifdef MULTI_INSTANCE_SUPPORT
479static unsigned int max_decode_instance_num
480 = MAX_DECODE_INSTANCE_NUM;
481static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
484static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
485static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
486static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
487static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
488static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
489{1, 1, 1, 1, 1, 1, 1, 1, 1};
490
491#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
492static unsigned char get_idx(struct hevc_state_s *hevc);
493#endif
494
495#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
496static u32 dv_toggle_prov_name;
497
498static u32 dv_debug;
499
500static u32 force_bypass_dvenl;
501#endif
502#endif
503
504
505#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
506#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
507#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
508#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
509#else
510#define get_dbg_flag(hevc) debug
511#define get_dbg_flag2(hevc) debug
512#define is_log_enable(hevc) (log_mask ? 1 : 0)
513#define get_valid_double_write_mode(hevc) double_write_mode
514#define get_buf_alloc_width(hevc) buf_alloc_width
515#define get_buf_alloc_height(hevc) buf_alloc_height
516#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
517#endif
518#define get_buffer_mode(hevc) buffer_mode
519
520
521DEFINE_SPINLOCK(lock);
522struct task_struct *h265_task = NULL;
523#undef DEBUG_REG
524#ifdef DEBUG_REG
525void WRITE_VREG_DBG(unsigned adr, unsigned val)
526{
527 if (debug & H265_DEBUG_REG)
528 pr_info("%s(%x, %x)\n", __func__, adr, val);
529 WRITE_VREG(adr, val);
530}
531
532#undef WRITE_VREG
533#define WRITE_VREG WRITE_VREG_DBG
534#endif
535
536static DEFINE_MUTEX(vh265_mutex);
537
538static DEFINE_MUTEX(vh265_log_mutex);
539
540static struct vdec_info *gvs;
541
542static u32 without_display_mode;
543
544/**************************************************
545 *
546 *h265 buffer management include
547 *
548 ***************************************************
549 */
550enum NalUnitType {
551 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
552 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
553
554 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
555 /* Current name in the spec: TSA_R */
556 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
557
558 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
559 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
560
561 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
562 /* Current name in the spec: RADL_R */
563 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
564
565 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
566 /* Current name in the spec: RASL_R */
567 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
568
569 NAL_UNIT_RESERVED_10,
570 NAL_UNIT_RESERVED_11,
571 NAL_UNIT_RESERVED_12,
572 NAL_UNIT_RESERVED_13,
573 NAL_UNIT_RESERVED_14,
574 NAL_UNIT_RESERVED_15,
575
576 /* Current name in the spec: BLA_W_LP */
577 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
578 /* Current name in the spec: BLA_W_DLP */
579 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
580 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
581 /* Current name in the spec: IDR_W_DLP */
582 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
583 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
584 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
585 NAL_UNIT_RESERVED_22,
586 NAL_UNIT_RESERVED_23,
587
588 NAL_UNIT_RESERVED_24,
589 NAL_UNIT_RESERVED_25,
590 NAL_UNIT_RESERVED_26,
591 NAL_UNIT_RESERVED_27,
592 NAL_UNIT_RESERVED_28,
593 NAL_UNIT_RESERVED_29,
594 NAL_UNIT_RESERVED_30,
595 NAL_UNIT_RESERVED_31,
596
597 NAL_UNIT_VPS, /* 32 */
598 NAL_UNIT_SPS, /* 33 */
599 NAL_UNIT_PPS, /* 34 */
600 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
601 NAL_UNIT_EOS, /* 36 */
602 NAL_UNIT_EOB, /* 37 */
603 NAL_UNIT_FILLER_DATA, /* 38 */
604 NAL_UNIT_SEI, /* 39 Prefix SEI */
605 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
606 NAL_UNIT_RESERVED_41,
607 NAL_UNIT_RESERVED_42,
608 NAL_UNIT_RESERVED_43,
609 NAL_UNIT_RESERVED_44,
610 NAL_UNIT_RESERVED_45,
611 NAL_UNIT_RESERVED_46,
612 NAL_UNIT_RESERVED_47,
613 NAL_UNIT_UNSPECIFIED_48,
614 NAL_UNIT_UNSPECIFIED_49,
615 NAL_UNIT_UNSPECIFIED_50,
616 NAL_UNIT_UNSPECIFIED_51,
617 NAL_UNIT_UNSPECIFIED_52,
618 NAL_UNIT_UNSPECIFIED_53,
619 NAL_UNIT_UNSPECIFIED_54,
620 NAL_UNIT_UNSPECIFIED_55,
621 NAL_UNIT_UNSPECIFIED_56,
622 NAL_UNIT_UNSPECIFIED_57,
623 NAL_UNIT_UNSPECIFIED_58,
624 NAL_UNIT_UNSPECIFIED_59,
625 NAL_UNIT_UNSPECIFIED_60,
626 NAL_UNIT_UNSPECIFIED_61,
627 NAL_UNIT_UNSPECIFIED_62,
628 NAL_UNIT_UNSPECIFIED_63,
629 NAL_UNIT_INVALID,
630};
631
632/* --------------------------------------------------- */
633/* Amrisc Software Interrupt */
634/* --------------------------------------------------- */
635#define AMRISC_STREAM_EMPTY_REQ 0x01
636#define AMRISC_PARSER_REQ 0x02
637#define AMRISC_MAIN_REQ 0x04
638
639/* --------------------------------------------------- */
640/* HEVC_DEC_STATUS define */
641/* --------------------------------------------------- */
642#define HEVC_DEC_IDLE 0x0
643#define HEVC_NAL_UNIT_VPS 0x1
644#define HEVC_NAL_UNIT_SPS 0x2
645#define HEVC_NAL_UNIT_PPS 0x3
646#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
647#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
648#define HEVC_SLICE_DECODING 0x6
649#define HEVC_NAL_UNIT_SEI 0x7
650#define HEVC_SLICE_SEGMENT_DONE 0x8
651#define HEVC_NAL_SEARCH_DONE 0x9
652#define HEVC_DECPIC_DATA_DONE 0xa
653#define HEVC_DECPIC_DATA_ERROR 0xb
654#define HEVC_SEI_DAT 0xc
655#define HEVC_SEI_DAT_DONE 0xd
656#define HEVC_NAL_DECODE_DONE 0xe
657#define HEVC_OVER_DECODE 0xf
658
659#define HEVC_DATA_REQUEST 0x12
660
661#define HEVC_DECODE_BUFEMPTY 0x20
662#define HEVC_DECODE_TIMEOUT 0x21
663#define HEVC_SEARCH_BUFEMPTY 0x22
664#define HEVC_DECODE_OVER_SIZE 0x23
665#define HEVC_DECODE_BUFEMPTY2 0x24
666#define HEVC_FIND_NEXT_PIC_NAL 0x50
667#define HEVC_FIND_NEXT_DVEL_NAL 0x51
668
669#define HEVC_DUMP_LMEM 0x30
670
671#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
672#define HEVC_DISCARD_NAL 0xf0
673#define HEVC_ACTION_DEC_CONT 0xfd
674#define HEVC_ACTION_ERROR 0xfe
675#define HEVC_ACTION_DONE 0xff
676
677/* --------------------------------------------------- */
678/* Include "parser_cmd.h" */
679/* --------------------------------------------------- */
680#define PARSER_CMD_SKIP_CFG_0 0x0000090b
681
682#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
683
684#define PARSER_CMD_SKIP_CFG_2 0x001b1910
685
686#define PARSER_CMD_NUMBER 37
687
688/**************************************************
689 *
690 *h265 buffer management
691 *
692 ***************************************************
693 */
694/* #define BUFFER_MGR_ONLY */
695/* #define CONFIG_HEVC_CLK_FORCED_ON */
696/* #define ENABLE_SWAP_TEST */
697#define MCRCC_ENABLE
698#define INVALID_POC 0x80000000
699
700#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
701#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
702#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
703#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
704#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
705#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
706#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
707#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
708#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
709#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
710#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
711#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
712#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
713#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
714#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
715#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
716#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
717#ifdef ENABLE_SWAP_TEST
718#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
719#endif
720
721/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
722/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
723#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
724 /*do not define ENABLE_SWAP_TEST*/
725#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
726#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
727
728#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
729#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
730/*
731 *ucode parser/search control
732 *bit 0: 0, header auto parse; 1, header manual parse
733 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
734 *bit [3:2]: valid when bit1==0;
735 *0, auto skip nal before first vps/sps/pps/idr;
736 *1, auto skip nal before first vps/sps/pps
737 *2, auto skip nal before first vps/sps/pps,
738 * and not decode until the first I slice (with slice address of 0)
739 *
740 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
741 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
742 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
743 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
744 *bit [17]: for NAL_SEI when bit0 is 0:
745 * 0, do not parse/fetch SEI in ucode;
746 * 1, parse/fetch SEI in ucode
747 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
748 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
749 * 1, fetch NAL_SEL_SUFFIX data to aux buf
750 *bit [19]:
751 * 0, parse NAL_SEI in ucode
752 * 1, fetch NAL_SEI to aux buf
753 *bit [20]: for DOLBY_VISION_META
754 * 0, do not fetch DOLBY_VISION_META to aux buf
755 * 1, fetch DOLBY_VISION_META to aux buf
756 */
757#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
758 /*read only*/
759#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
760 /*
761 [15 : 8] rps_set_id
762 [7 : 0] start_decoding_flag
763 */
764#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
765 /*set before start decoder*/
766#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
767#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
768#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
769
770#define DECODE_MODE_SINGLE 0x0
771#define DECODE_MODE_MULTI_FRAMEBASE 0x1
772#define DECODE_MODE_MULTI_STREAMBASE 0x2
773#define DECODE_MODE_MULTI_DVBAL 0x3
774#define DECODE_MODE_MULTI_DVENL 0x4
775
776#define MAX_INT 0x7FFFFFFF
777
778#define RPM_BEGIN 0x100
779#define modification_list_cur 0x148
780#define RPM_END 0x180
781
782#define RPS_USED_BIT 14
783/* MISC_FLAG0 */
784#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
785#define PCM_ENABLE_FLAG_BIT 1
786#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
787#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
788#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
789#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
790#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
791#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
792#define SLICE_SAO_LUMA_FLAG_BIT 8
793#define SLICE_SAO_CHROMA_FLAG_BIT 9
794#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
795
796union param_u {
797 struct {
798 unsigned short data[RPM_END - RPM_BEGIN];
799 } l;
800 struct {
801 /* from ucode lmem, do not change this struct */
802 unsigned short CUR_RPS[0x10];
803 unsigned short num_ref_idx_l0_active;
804 unsigned short num_ref_idx_l1_active;
805 unsigned short slice_type;
806 unsigned short slice_temporal_mvp_enable_flag;
807 unsigned short dependent_slice_segment_flag;
808 unsigned short slice_segment_address;
809 unsigned short num_title_rows_minus1;
810 unsigned short pic_width_in_luma_samples;
811 unsigned short pic_height_in_luma_samples;
812 unsigned short log2_min_coding_block_size_minus3;
813 unsigned short log2_diff_max_min_coding_block_size;
814 unsigned short log2_max_pic_order_cnt_lsb_minus4;
815 unsigned short POClsb;
816 unsigned short collocated_from_l0_flag;
817 unsigned short collocated_ref_idx;
818 unsigned short log2_parallel_merge_level;
819 unsigned short five_minus_max_num_merge_cand;
820 unsigned short sps_num_reorder_pics_0;
821 unsigned short modification_flag;
822 unsigned short tiles_enabled_flag;
823 unsigned short num_tile_columns_minus1;
824 unsigned short num_tile_rows_minus1;
825 unsigned short tile_width[8];
826 unsigned short tile_height[8];
827 unsigned short misc_flag0;
828 unsigned short pps_beta_offset_div2;
829 unsigned short pps_tc_offset_div2;
830 unsigned short slice_beta_offset_div2;
831 unsigned short slice_tc_offset_div2;
832 unsigned short pps_cb_qp_offset;
833 unsigned short pps_cr_qp_offset;
834 unsigned short first_slice_segment_in_pic_flag;
835 unsigned short m_temporalId;
836 unsigned short m_nalUnitType;
837
838 unsigned short vui_num_units_in_tick_hi;
839 unsigned short vui_num_units_in_tick_lo;
840 unsigned short vui_time_scale_hi;
841 unsigned short vui_time_scale_lo;
842 unsigned short bit_depth;
843 unsigned short profile_etc;
844 unsigned short sei_frame_field_info;
845 unsigned short video_signal_type;
846 unsigned short modification_list[0x20];
847 unsigned short conformance_window_flag;
848 unsigned short conf_win_left_offset;
849 unsigned short conf_win_right_offset;
850 unsigned short conf_win_top_offset;
851 unsigned short conf_win_bottom_offset;
852 unsigned short chroma_format_idc;
853 unsigned short color_description;
854 unsigned short aspect_ratio_idc;
855 unsigned short sar_width;
856 unsigned short sar_height;
857 unsigned short sps_max_dec_pic_buffering_minus1_0;
858 } p;
859};
860
861#define RPM_BUF_SIZE (0x80*2)
862/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
863#define LMEM_BUF_SIZE (0x500 * 2)
864
865struct buff_s {
866 u32 buf_start;
867 u32 buf_size;
868 u32 buf_end;
869};
870
871struct BuffInfo_s {
872 u32 max_width;
873 u32 max_height;
874 unsigned int start_adr;
875 unsigned int end_adr;
876 struct buff_s ipp;
877 struct buff_s sao_abv;
878 struct buff_s sao_vb;
879 struct buff_s short_term_rps;
880 struct buff_s vps;
881 struct buff_s sps;
882 struct buff_s pps;
883 struct buff_s sao_up;
884 struct buff_s swap_buf;
885 struct buff_s swap_buf2;
886 struct buff_s scalelut;
887 struct buff_s dblk_para;
888 struct buff_s dblk_data;
889 struct buff_s dblk_data2;
890 struct buff_s mmu_vbh;
891 struct buff_s cm_header;
892 struct buff_s mpred_above;
893#ifdef MV_USE_FIXED_BUF
894 struct buff_s mpred_mv;
895#endif
896 struct buff_s rpm;
897 struct buff_s lmem;
898};
899#define WORK_BUF_SPEC_NUM 3
900static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
901 {
902 /* 8M bytes */
903 .max_width = 1920,
904 .max_height = 1088,
905 .ipp = {
906 /* IPP work space calculation :
907 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
908 */
909 .buf_size = 0x4000,
910 },
911 .sao_abv = {
912 .buf_size = 0x30000,
913 },
914 .sao_vb = {
915 .buf_size = 0x30000,
916 },
917 .short_term_rps = {
918 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
919 * total 64x16x2 = 2048 bytes (0x800)
920 */
921 .buf_size = 0x800,
922 },
923 .vps = {
924 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
925 * total 0x0800 bytes
926 */
927 .buf_size = 0x800,
928 },
929 .sps = {
930 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
931 * total 0x0800 bytes
932 */
933 .buf_size = 0x800,
934 },
935 .pps = {
936 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
937 * total 0x2000 bytes
938 */
939 .buf_size = 0x2000,
940 },
941 .sao_up = {
942 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
943 * each has 16 bytes total 0x2800 bytes
944 */
945 .buf_size = 0x2800,
946 },
947 .swap_buf = {
948 /* 256cyclex64bit = 2K bytes 0x800
949 * (only 144 cycles valid)
950 */
951 .buf_size = 0x800,
952 },
953 .swap_buf2 = {
954 .buf_size = 0x800,
955 },
956 .scalelut = {
957 /* support up to 32 SCALELUT 1024x32 =
958 * 32Kbytes (0x8000)
959 */
960 .buf_size = 0x8000,
961 },
962 .dblk_para = {
963#ifdef SUPPORT_10BIT
964 .buf_size = 0x40000,
965#else
966 /* DBLK -> Max 256(4096/16) LCU, each para
967 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
968 */
969 .buf_size = 0x20000,
970#endif
971 },
972 .dblk_data = {
973 .buf_size = 0x40000,
974 },
975 .dblk_data2 = {
976 .buf_size = 0x40000,
977 }, /*dblk data for adapter*/
978 .mmu_vbh = {
979 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
980 },
981#if 0
982 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
983 .buf_size = MMU_COMPRESS_HEADER_SIZE *
984 (MAX_REF_PIC_NUM + 1),
985 },
986#endif
987 .mpred_above = {
988 .buf_size = 0x8000,
989 },
990#ifdef MV_USE_FIXED_BUF
991 .mpred_mv = {/* 1080p, 0x40000 per buffer */
992 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
993 },
994#endif
995 .rpm = {
996 .buf_size = RPM_BUF_SIZE,
997 },
998 .lmem = {
999 .buf_size = 0x500 * 2,
1000 }
1001 },
1002 {
1003 .max_width = 4096,
1004 .max_height = 2048,
1005 .ipp = {
1006 /* IPP work space calculation :
1007 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1008 */
1009 .buf_size = 0x4000,
1010 },
1011 .sao_abv = {
1012 .buf_size = 0x30000,
1013 },
1014 .sao_vb = {
1015 .buf_size = 0x30000,
1016 },
1017 .short_term_rps = {
1018 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1019 * total 64x16x2 = 2048 bytes (0x800)
1020 */
1021 .buf_size = 0x800,
1022 },
1023 .vps = {
1024 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1025 * total 0x0800 bytes
1026 */
1027 .buf_size = 0x800,
1028 },
1029 .sps = {
1030 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1031 * total 0x0800 bytes
1032 */
1033 .buf_size = 0x800,
1034 },
1035 .pps = {
1036 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1037 * total 0x2000 bytes
1038 */
1039 .buf_size = 0x2000,
1040 },
1041 .sao_up = {
1042 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1043 * each has 16 bytes total 0x2800 bytes
1044 */
1045 .buf_size = 0x2800,
1046 },
1047 .swap_buf = {
1048 /* 256cyclex64bit = 2K bytes 0x800
1049 * (only 144 cycles valid)
1050 */
1051 .buf_size = 0x800,
1052 },
1053 .swap_buf2 = {
1054 .buf_size = 0x800,
1055 },
1056 .scalelut = {
1057 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1058 * (0x8000)
1059 */
1060 .buf_size = 0x8000,
1061 },
1062 .dblk_para = {
1063 /* DBLK -> Max 256(4096/16) LCU, each para
1064 * 512bytes(total:0x20000),
1065 * data 1024bytes(total:0x40000)
1066 */
1067 .buf_size = 0x20000,
1068 },
1069 .dblk_data = {
1070 .buf_size = 0x80000,
1071 },
1072 .dblk_data2 = {
1073 .buf_size = 0x80000,
1074 }, /*dblk data for adapter*/
1075 .mmu_vbh = {
1076 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1077 },
1078#if 0
1079 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1080 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1081 (MAX_REF_PIC_NUM + 1),
1082 },
1083#endif
1084 .mpred_above = {
1085 .buf_size = 0x8000,
1086 },
1087#ifdef MV_USE_FIXED_BUF
1088 .mpred_mv = {
1089 /* .buf_size = 0x100000*16,
1090 //4k2k , 0x100000 per buffer */
1091 /* 4096x2304 , 0x120000 per buffer */
1092 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1093 },
1094#endif
1095 .rpm = {
1096 .buf_size = RPM_BUF_SIZE,
1097 },
1098 .lmem = {
1099 .buf_size = 0x500 * 2,
1100 }
1101 },
1102
1103 {
1104 .max_width = 4096*2,
1105 .max_height = 2048*2,
1106 .ipp = {
1107 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1108 .buf_size = 0x4000*2,
1109 },
1110 .sao_abv = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .sao_vb = {
1114 .buf_size = 0x30000*2,
1115 },
1116 .short_term_rps = {
1117 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1118 .buf_size = 0x800,
1119 },
1120 .vps = {
1121 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .sps = {
1125 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1126 .buf_size = 0x800,
1127 },
1128 .pps = {
1129 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1130 .buf_size = 0x2000,
1131 },
1132 .sao_up = {
1133 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1134 .buf_size = 0x2800*2,
1135 },
1136 .swap_buf = {
1137 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1138 .buf_size = 0x800,
1139 },
1140 .swap_buf2 = {
1141 .buf_size = 0x800,
1142 },
1143 .scalelut = {
1144 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1145 .buf_size = 0x8000*2,
1146 },
1147 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1148 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1149 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1150 .mmu_vbh = {
1151 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1152 },
1153#if 0
1154 .cm_header = {
1155 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1156 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1157 },
1158#endif
1159 .mpred_above = {
1160 .buf_size = 0x8000*2,
1161 },
1162#ifdef MV_USE_FIXED_BUF
1163 .mpred_mv = {
1164 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1165 },
1166#endif
1167 .rpm = {
1168 .buf_size = RPM_BUF_SIZE,
1169 },
1170 .lmem = {
1171 .buf_size = 0x500 * 2,
1172 },
1173 }
1174};
1175
1176static void init_buff_spec(struct hevc_state_s *hevc,
1177 struct BuffInfo_s *buf_spec)
1178{
1179 buf_spec->ipp.buf_start = buf_spec->start_adr;
1180 buf_spec->sao_abv.buf_start =
1181 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1182
1183 buf_spec->sao_vb.buf_start =
1184 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1185 buf_spec->short_term_rps.buf_start =
1186 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1187 buf_spec->vps.buf_start =
1188 buf_spec->short_term_rps.buf_start +
1189 buf_spec->short_term_rps.buf_size;
1190 buf_spec->sps.buf_start =
1191 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1192 buf_spec->pps.buf_start =
1193 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1194 buf_spec->sao_up.buf_start =
1195 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1196 buf_spec->swap_buf.buf_start =
1197 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1198 buf_spec->swap_buf2.buf_start =
1199 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1200 buf_spec->scalelut.buf_start =
1201 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1202 buf_spec->dblk_para.buf_start =
1203 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1204 buf_spec->dblk_data.buf_start =
1205 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1206 buf_spec->dblk_data2.buf_start =
1207 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1208 buf_spec->mmu_vbh.buf_start =
1209 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1210 buf_spec->mpred_above.buf_start =
1211 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1212#ifdef MV_USE_FIXED_BUF
1213 buf_spec->mpred_mv.buf_start =
1214 buf_spec->mpred_above.buf_start +
1215 buf_spec->mpred_above.buf_size;
1216
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_mv.buf_start +
1219 buf_spec->mpred_mv.buf_size;
1220#else
1221 buf_spec->rpm.buf_start =
1222 buf_spec->mpred_above.buf_start +
1223 buf_spec->mpred_above.buf_size;
1224#endif
1225 buf_spec->lmem.buf_start =
1226 buf_spec->rpm.buf_start +
1227 buf_spec->rpm.buf_size;
1228 buf_spec->end_adr =
1229 buf_spec->lmem.buf_start +
1230 buf_spec->lmem.buf_size;
1231
1232 if (hevc && get_dbg_flag2(hevc)) {
1233 hevc_print(hevc, 0,
1234 "%s workspace (%x %x) size = %x\n", __func__,
1235 buf_spec->start_adr, buf_spec->end_adr,
1236 buf_spec->end_adr - buf_spec->start_adr);
1237
1238 hevc_print(hevc, 0,
1239 "ipp.buf_start :%x\n",
1240 buf_spec->ipp.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_abv.buf_start :%x\n",
1243 buf_spec->sao_abv.buf_start);
1244 hevc_print(hevc, 0,
1245 "sao_vb.buf_start :%x\n",
1246 buf_spec->sao_vb.buf_start);
1247 hevc_print(hevc, 0,
1248 "short_term_rps.buf_start :%x\n",
1249 buf_spec->short_term_rps.buf_start);
1250 hevc_print(hevc, 0,
1251 "vps.buf_start :%x\n",
1252 buf_spec->vps.buf_start);
1253 hevc_print(hevc, 0,
1254 "sps.buf_start :%x\n",
1255 buf_spec->sps.buf_start);
1256 hevc_print(hevc, 0,
1257 "pps.buf_start :%x\n",
1258 buf_spec->pps.buf_start);
1259 hevc_print(hevc, 0,
1260 "sao_up.buf_start :%x\n",
1261 buf_spec->sao_up.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf.buf_start :%x\n",
1264 buf_spec->swap_buf.buf_start);
1265 hevc_print(hevc, 0,
1266 "swap_buf2.buf_start :%x\n",
1267 buf_spec->swap_buf2.buf_start);
1268 hevc_print(hevc, 0,
1269 "scalelut.buf_start :%x\n",
1270 buf_spec->scalelut.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_para.buf_start :%x\n",
1273 buf_spec->dblk_para.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data.buf_start :%x\n",
1276 buf_spec->dblk_data.buf_start);
1277 hevc_print(hevc, 0,
1278 "dblk_data2.buf_start :%x\n",
1279 buf_spec->dblk_data2.buf_start);
1280 hevc_print(hevc, 0,
1281 "mpred_above.buf_start :%x\n",
1282 buf_spec->mpred_above.buf_start);
1283#ifdef MV_USE_FIXED_BUF
1284 hevc_print(hevc, 0,
1285 "mpred_mv.buf_start :%x\n",
1286 buf_spec->mpred_mv.buf_start);
1287#endif
1288 if ((get_dbg_flag2(hevc)
1289 &
1290 H265_DEBUG_SEND_PARAM_WITH_REG)
1291 == 0) {
1292 hevc_print(hevc, 0,
1293 "rpm.buf_start :%x\n",
1294 buf_spec->rpm.buf_start);
1295 }
1296 }
1297
1298}
1299
1300enum SliceType {
1301 B_SLICE,
1302 P_SLICE,
1303 I_SLICE
1304};
1305
1306/*USE_BUF_BLOCK*/
1307struct BUF_s {
1308 ulong start_adr;
1309 u32 size;
1310 u32 luma_size;
1311 ulong header_addr;
1312 u32 header_size;
1313 int used_flag;
1314 ulong v4l_ref_buf_addr;
1315} /*BUF_t */;
1316
1317/* level 6, 6.1 maximum slice number is 800; other is 200 */
1318#define MAX_SLICE_NUM 800
1319struct PIC_s {
1320 int index;
1321 int scatter_alloc;
1322 int BUF_index;
1323 int mv_buf_index;
1324 int POC;
1325 int decode_idx;
1326 int slice_type;
1327 int RefNum_L0;
1328 int RefNum_L1;
1329 int num_reorder_pic;
1330 int stream_offset;
1331 unsigned char referenced;
1332 unsigned char output_mark;
1333 unsigned char recon_mark;
1334 unsigned char output_ready;
1335 unsigned char error_mark;
1336 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1337 unsigned char dis_mark;
1338 /**/ int slice_idx;
1339 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1340 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1341 /*buffer */
1342 unsigned int header_adr;
1343#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1344 unsigned char dv_enhance_exist;
1345#endif
1346 char *aux_data_buf;
1347 int aux_data_size;
1348 unsigned long cma_alloc_addr;
1349 struct page *alloc_pages;
1350 unsigned int mpred_mv_wr_start_addr;
1351 unsigned int mc_y_adr;
1352 unsigned int mc_u_v_adr;
1353#ifdef SUPPORT_10BIT
1354 /*unsigned int comp_body_size;*/
1355 unsigned int dw_y_adr;
1356 unsigned int dw_u_v_adr;
1357#endif
1358 int mc_canvas_y;
1359 int mc_canvas_u_v;
1360 int width;
1361 int height;
1362
1363 int y_canvas_index;
1364 int uv_canvas_index;
1365#ifdef MULTI_INSTANCE_SUPPORT
1366 struct canvas_config_s canvas_config[2];
1367#endif
1368#ifdef SUPPORT_10BIT
1369 int mem_saving_mode;
1370 u32 bit_depth_luma;
1371 u32 bit_depth_chroma;
1372#endif
1373#ifdef LOSLESS_COMPRESS_MODE
1374 unsigned int losless_comp_body_size;
1375#endif
1376 unsigned char pic_struct;
1377 int vf_ref;
1378
1379 u32 pts;
1380 u64 pts64;
1381 u64 timestamp;
1382
1383 u32 aspect_ratio_idc;
1384 u32 sar_width;
1385 u32 sar_height;
1386 u32 double_write_mode;
1387 u32 video_signal_type;
1388 unsigned short conformance_window_flag;
1389 unsigned short conf_win_left_offset;
1390 unsigned short conf_win_right_offset;
1391 unsigned short conf_win_top_offset;
1392 unsigned short conf_win_bottom_offset;
1393 unsigned short chroma_format_idc;
1394
1395 /* picture qos infomation*/
1396 int max_qp;
1397 int avg_qp;
1398 int min_qp;
1399 int max_skip;
1400 int avg_skip;
1401 int min_skip;
1402 int max_mv;
1403 int min_mv;
1404 int avg_mv;
1405
1406 bool vframe_bound;
1407} /*PIC_t */;
1408
1409#define MAX_TILE_COL_NUM 10
1410#define MAX_TILE_ROW_NUM 20
1411struct tile_s {
1412 int width;
1413 int height;
1414 int start_cu_x;
1415 int start_cu_y;
1416
1417 unsigned int sao_vb_start_addr;
1418 unsigned int sao_abv_start_addr;
1419};
1420
1421#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1422#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1423#define SEI_HDR10PLUS_MASK 0x00000004
1424
1425#define VF_POOL_SIZE 32
1426
1427#ifdef MULTI_INSTANCE_SUPPORT
1428#define DEC_RESULT_NONE 0
1429#define DEC_RESULT_DONE 1
1430#define DEC_RESULT_AGAIN 2
1431#define DEC_RESULT_CONFIG_PARAM 3
1432#define DEC_RESULT_ERROR 4
1433#define DEC_INIT_PICLIST 5
1434#define DEC_UNINIT_PICLIST 6
1435#define DEC_RESULT_GET_DATA 7
1436#define DEC_RESULT_GET_DATA_RETRY 8
1437#define DEC_RESULT_EOS 9
1438#define DEC_RESULT_FORCE_EXIT 10
1439#define DEC_RESULT_FREE_CANVAS 11
1440
1441static void vh265_work(struct work_struct *work);
1442static void vh265_timeout_work(struct work_struct *work);
1443static void vh265_notify_work(struct work_struct *work);
1444
1445#endif
1446
1447struct debug_log_s {
1448 struct list_head list;
1449 uint8_t data; /*will alloc more size*/
1450};
1451
1452struct hevc_state_s {
1453#ifdef MULTI_INSTANCE_SUPPORT
1454 struct platform_device *platform_dev;
1455 void (*vdec_cb)(struct vdec_s *, void *);
1456 void *vdec_cb_arg;
1457 struct vframe_chunk_s *chunk;
1458 int dec_result;
1459 struct work_struct work;
1460 struct work_struct timeout_work;
1461 struct work_struct notify_work;
1462 struct work_struct set_clk_work;
1463 /* timeout handle */
1464 unsigned long int start_process_time;
1465 unsigned int last_lcu_idx;
1466 unsigned int decode_timeout_count;
1467 unsigned int timeout_num;
1468#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1469 unsigned char switch_dvlayer_flag;
1470 unsigned char no_switch_dvlayer_count;
1471 unsigned char bypass_dvenl_enable;
1472 unsigned char bypass_dvenl;
1473#endif
1474 unsigned char start_parser_type;
1475 /*start_decoding_flag:
1476 vps/pps/sps/idr info from ucode*/
1477 unsigned char start_decoding_flag;
1478 unsigned char rps_set_id;
1479 unsigned char eos;
1480 int pic_decoded_lcu_idx;
1481 u8 over_decode;
1482 u8 empty_flag;
1483#endif
1484 struct vframe_s vframe_dummy;
1485 char *provider_name;
1486 int index;
1487 struct device *cma_dev;
1488 unsigned char m_ins_flag;
1489 unsigned char dolby_enhance_flag;
1490 unsigned long buf_start;
1491 u32 buf_size;
1492 u32 mv_buf_size;
1493
1494 struct BuffInfo_s work_space_buf_store;
1495 struct BuffInfo_s *work_space_buf;
1496
1497 u8 aux_data_dirty;
1498 u32 prefix_aux_size;
1499 u32 suffix_aux_size;
1500 void *aux_addr;
1501 void *rpm_addr;
1502 void *lmem_addr;
1503 dma_addr_t aux_phy_addr;
1504 dma_addr_t rpm_phy_addr;
1505 dma_addr_t lmem_phy_addr;
1506
1507 unsigned int pic_list_init_flag;
1508 unsigned int use_cma_flag;
1509
1510 unsigned short *rpm_ptr;
1511 unsigned short *lmem_ptr;
1512 unsigned short *debug_ptr;
1513 int debug_ptr_size;
1514 int pic_w;
1515 int pic_h;
1516 int lcu_x_num;
1517 int lcu_y_num;
1518 int lcu_total;
1519 int lcu_size;
1520 int lcu_size_log2;
1521 int lcu_x_num_pre;
1522 int lcu_y_num_pre;
1523 int first_pic_after_recover;
1524
1525 int num_tile_col;
1526 int num_tile_row;
1527 int tile_enabled;
1528 int tile_x;
1529 int tile_y;
1530 int tile_y_x;
1531 int tile_start_lcu_x;
1532 int tile_start_lcu_y;
1533 int tile_width_lcu;
1534 int tile_height_lcu;
1535
1536 int slice_type;
1537 unsigned int slice_addr;
1538 unsigned int slice_segment_addr;
1539
1540 unsigned char interlace_flag;
1541 unsigned char curr_pic_struct;
1542 unsigned char frame_field_info_present_flag;
1543
1544 unsigned short sps_num_reorder_pics_0;
1545 unsigned short misc_flag0;
1546 int m_temporalId;
1547 int m_nalUnitType;
1548 int TMVPFlag;
1549 int isNextSliceSegment;
1550 int LDCFlag;
1551 int m_pocRandomAccess;
1552 int plevel;
1553 int MaxNumMergeCand;
1554
1555 int new_pic;
1556 int new_tile;
1557 int curr_POC;
1558 int iPrevPOC;
1559#ifdef MULTI_INSTANCE_SUPPORT
1560 int decoded_poc;
1561 struct PIC_s *decoding_pic;
1562#endif
1563 int iPrevTid0POC;
1564 int list_no;
1565 int RefNum_L0;
1566 int RefNum_L1;
1567 int ColFromL0Flag;
1568 int LongTerm_Curr;
1569 int LongTerm_Col;
1570 int Col_POC;
1571 int LongTerm_Ref;
1572#ifdef MULTI_INSTANCE_SUPPORT
1573 int m_pocRandomAccess_bak;
1574 int curr_POC_bak;
1575 int iPrevPOC_bak;
1576 int iPrevTid0POC_bak;
1577 unsigned char start_parser_type_bak;
1578 unsigned char start_decoding_flag_bak;
1579 unsigned char rps_set_id_bak;
1580 int pic_decoded_lcu_idx_bak;
1581 int decode_idx_bak;
1582#endif
1583 struct PIC_s *cur_pic;
1584 struct PIC_s *col_pic;
1585 int skip_flag;
1586 int decode_idx;
1587 int slice_idx;
1588 unsigned char have_vps;
1589 unsigned char have_sps;
1590 unsigned char have_pps;
1591 unsigned char have_valid_start_slice;
1592 unsigned char wait_buf;
1593 unsigned char error_flag;
1594 unsigned int error_skip_nal_count;
1595 long used_4k_num;
1596
1597 unsigned char
1598 ignore_bufmgr_error; /* bit 0, for decoding;
1599 bit 1, for displaying
1600 bit 1 must be set if bit 0 is 1*/
1601 int PB_skip_mode;
1602 int PB_skip_count_after_decoding;
1603#ifdef SUPPORT_10BIT
1604 int mem_saving_mode;
1605#endif
1606#ifdef LOSLESS_COMPRESS_MODE
1607 unsigned int losless_comp_body_size;
1608#endif
1609 int pts_mode;
1610 int last_lookup_pts;
1611 int last_pts;
1612 u64 last_lookup_pts_us64;
1613 u64 last_pts_us64;
1614 u32 shift_byte_count_lo;
1615 u32 shift_byte_count_hi;
1616 int pts_mode_switching_count;
1617 int pts_mode_recovery_count;
1618
1619 int pic_num;
1620
1621 /**/
1622 union param_u param;
1623
1624 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1625
1626 struct timer_list timer;
1627 struct BUF_s m_BUF[BUF_POOL_SIZE];
1628 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1629 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1630
1631 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1632 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1633 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1634 struct vframe_s vfpool[VF_POOL_SIZE];
1635
1636 u32 stat;
1637 u32 frame_width;
1638 u32 frame_height;
1639 u32 frame_dur;
1640 u32 frame_ar;
1641 u32 bit_depth_luma;
1642 u32 bit_depth_chroma;
1643 u32 video_signal_type;
1644 u32 video_signal_type_debug;
1645 u32 saved_resolution;
1646 bool get_frame_dur;
1647 u32 error_watchdog_count;
1648 u32 error_skip_nal_wt_cnt;
1649 u32 error_system_watchdog_count;
1650
1651#ifdef DEBUG_PTS
1652 unsigned long pts_missed;
1653 unsigned long pts_hit;
1654#endif
1655 struct dec_sysinfo vh265_amstream_dec_info;
1656 unsigned char init_flag;
1657 unsigned char first_sc_checked;
1658 unsigned char uninit_list;
1659 u32 start_decoding_time;
1660
1661 int show_frame_num;
1662#ifdef USE_UNINIT_SEMA
1663 struct semaphore h265_uninit_done_sema;
1664#endif
1665 int fatal_error;
1666
1667
1668 u32 sei_present_flag;
1669 void *frame_mmu_map_addr;
1670 dma_addr_t frame_mmu_map_phy_addr;
1671 unsigned int mmu_mc_buf_start;
1672 unsigned int mmu_mc_buf_end;
1673 unsigned int mmu_mc_start_4k_adr;
1674 void *mmu_box;
1675 void *bmmu_box;
1676 int mmu_enable;
1677
1678 unsigned int dec_status;
1679
1680 /* data for SEI_MASTER_DISPLAY_COLOR */
1681 unsigned int primaries[3][2];
1682 unsigned int white_point[2];
1683 unsigned int luminance[2];
1684 /* data for SEI_CONTENT_LIGHT_LEVEL */
1685 unsigned int content_light_level[2];
1686
1687 struct PIC_s *pre_top_pic;
1688 struct PIC_s *pre_bot_pic;
1689
1690#ifdef MULTI_INSTANCE_SUPPORT
1691 int double_write_mode;
1692 int dynamic_buf_num_margin;
1693 int start_action;
1694 int save_buffer_mode;
1695#endif
1696 u32 i_only;
1697 struct list_head log_list;
1698 u32 ucode_pause_pos;
1699 u32 start_shift_bytes;
1700
1701 u32 vf_pre_count;
1702 u32 vf_get_count;
1703 u32 vf_put_count;
1704#ifdef SWAP_HEVC_UCODE
1705 dma_addr_t mc_dma_handle;
1706 void *mc_cpu_addr;
1707 int swap_size;
1708 ulong swap_addr;
1709#endif
1710#ifdef DETREFILL_ENABLE
1711 dma_addr_t detbuf_adr;
1712 u16 *detbuf_adr_virt;
1713 u8 delrefill_check;
1714#endif
1715 u8 head_error_flag;
1716 int valve_count;
1717 struct firmware_s *fw;
1718 int max_pic_w;
1719 int max_pic_h;
1720#ifdef AGAIN_HAS_THRESHOLD
1721 u8 next_again_flag;
1722 u32 pre_parser_wr_ptr;
1723#endif
1724 u32 ratio_control;
1725 u32 first_pic_flag;
1726 u32 decode_size;
1727 struct mutex chunks_mutex;
1728 int need_cache_size;
1729 u64 sc_start_time;
1730 u32 skip_first_nal;
1731 bool is_swap;
1732 bool is_4k;
1733 int frameinfo_enable;
1734 struct vframe_qos_s vframe_qos;
1735 bool is_used_v4l;
1736 void *v4l2_ctx;
1737 bool v4l_params_parsed;
1738 u32 mem_map_mode;
1739} /*hevc_stru_t */;
1740
1741#ifdef AGAIN_HAS_THRESHOLD
1742u32 again_threshold;
1743#endif
1744#ifdef SEND_LMEM_WITH_RPM
1745#define get_lmem_params(hevc, ladr) \
1746 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1747
1748
1749static int get_frame_mmu_map_size(void)
1750{
1751 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1752 return (MAX_FRAME_8K_NUM * 4);
1753
1754 return (MAX_FRAME_4K_NUM * 4);
1755}
1756
1757static int is_oversize(int w, int h)
1758{
1759 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1760 MAX_SIZE_8K : MAX_SIZE_4K;
1761
1762 if (w < 0 || h < 0)
1763 return true;
1764
1765 if (h != 0 && (w > max / h))
1766 return true;
1767
1768 return false;
1769}
1770
1771void check_head_error(struct hevc_state_s *hevc)
1772{
1773#define pcm_enabled_flag 0x040
1774#define pcm_sample_bit_depth_luma 0x041
1775#define pcm_sample_bit_depth_chroma 0x042
1776 hevc->head_error_flag = 0;
1777 if ((error_handle_policy & 0x40) == 0)
1778 return;
1779 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1780 uint16_t pcm_depth_luma = get_lmem_params(
1781 hevc, pcm_sample_bit_depth_luma);
1782 uint16_t pcm_sample_chroma = get_lmem_params(
1783 hevc, pcm_sample_bit_depth_chroma);
1784 if (pcm_depth_luma >
1785 hevc->bit_depth_luma ||
1786 pcm_sample_chroma >
1787 hevc->bit_depth_chroma) {
1788 hevc_print(hevc, 0,
1789 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1790 pcm_depth_luma,
1791 pcm_sample_chroma,
1792 hevc->bit_depth_luma,
1793 hevc->bit_depth_chroma);
1794 hevc->head_error_flag = 1;
1795 }
1796 }
1797}
1798#endif
1799
1800#ifdef SUPPORT_10BIT
1801/* Losless compression body buffer size 4K per 64x32 (jt) */
1802static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1803 int width, int height, int mem_saving_mode)
1804{
1805 int width_x64;
1806 int height_x32;
1807 int bsize;
1808
1809 width_x64 = width + 63;
1810 width_x64 >>= 6;
1811
1812 height_x32 = height + 31;
1813 height_x32 >>= 5;
1814 if (mem_saving_mode == 1 && hevc->mmu_enable)
1815 bsize = 3200 * width_x64 * height_x32;
1816 else if (mem_saving_mode == 1)
1817 bsize = 3072 * width_x64 * height_x32;
1818 else
1819 bsize = 4096 * width_x64 * height_x32;
1820
1821 return bsize;
1822}
1823
1824/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1825static int compute_losless_comp_header_size(int width, int height)
1826{
1827 int width_x128;
1828 int height_x64;
1829 int hsize;
1830
1831 width_x128 = width + 127;
1832 width_x128 >>= 7;
1833
1834 height_x64 = height + 63;
1835 height_x64 >>= 6;
1836
1837 hsize = 32*width_x128*height_x64;
1838
1839 return hsize;
1840}
1841#endif
1842
1843static int add_log(struct hevc_state_s *hevc,
1844 const char *fmt, ...)
1845{
1846#define HEVC_LOG_BUF 196
1847 struct debug_log_s *log_item;
1848 unsigned char buf[HEVC_LOG_BUF];
1849 int len = 0;
1850 va_list args;
1851 mutex_lock(&vh265_log_mutex);
1852 va_start(args, fmt);
1853 len = sprintf(buf, "<%ld> <%05d> ",
1854 jiffies, hevc->decode_idx);
1855 len += vsnprintf(buf + len,
1856 HEVC_LOG_BUF - len, fmt, args);
1857 va_end(args);
1858 log_item = kmalloc(
1859 sizeof(struct debug_log_s) + len,
1860 GFP_KERNEL);
1861 if (log_item) {
1862 INIT_LIST_HEAD(&log_item->list);
1863 strcpy(&log_item->data, buf);
1864 list_add_tail(&log_item->list,
1865 &hevc->log_list);
1866 }
1867 mutex_unlock(&vh265_log_mutex);
1868 return 0;
1869}
1870
1871static void dump_log(struct hevc_state_s *hevc)
1872{
1873 int i = 0;
1874 struct debug_log_s *log_item, *tmp;
1875 mutex_lock(&vh265_log_mutex);
1876 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1877 hevc_print(hevc, 0,
1878 "[LOG%04d]%s\n",
1879 i++,
1880 &log_item->data);
1881 list_del(&log_item->list);
1882 kfree(log_item);
1883 }
1884 mutex_unlock(&vh265_log_mutex);
1885}
1886
1887static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1888 struct PIC_s *pic)
1889{
1890 if (pic->error_mark
1891 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1892 return 1;
1893 return 0;
1894}
1895
1896static int get_pic_poc(struct hevc_state_s *hevc,
1897 unsigned int idx)
1898{
1899 if (idx != 0xff
1900 && idx < MAX_REF_PIC_NUM
1901 && hevc->m_PIC[idx])
1902 return hevc->m_PIC[idx]->POC;
1903 return INVALID_POC;
1904}
1905
1906#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1907static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1908{
1909 return (hevc->m_ins_flag &&
1910 ((double_write_mode & 0x80000000) == 0)) ?
1911 hevc->double_write_mode :
1912 (double_write_mode & 0x7fffffff);
1913}
1914
1915static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1916{
1917 return (hevc->m_ins_flag &&
1918 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1919 hevc->dynamic_buf_num_margin :
1920 (dynamic_buf_num_margin & 0x7fffffff);
1921}
1922#endif
1923
1924static int get_double_write_mode(struct hevc_state_s *hevc)
1925{
1926 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1927 int w = hevc->pic_w;
1928 int h = hevc->pic_h;
1929 u32 dw = 0x1; /*1:1*/
1930 switch (valid_dw_mode) {
1931 case 0x100:
1932 if (w > 1920 && h > 1088)
1933 dw = 0x4; /*1:2*/
1934 break;
1935 case 0x200:
1936 if (w > 1920 && h > 1088)
1937 dw = 0x2; /*1:4*/
1938 break;
1939 case 0x300:
1940 if (w > 1280 && h > 720)
1941 dw = 0x4; /*1:2*/
1942 break;
1943 default:
1944 dw = valid_dw_mode;
1945 break;
1946 }
1947 return dw;
1948}
1949
1950static int get_double_write_ratio(struct hevc_state_s *hevc,
1951 int dw_mode)
1952{
1953 int ratio = 1;
1954 if ((dw_mode == 2) ||
1955 (dw_mode == 3))
1956 ratio = 4;
1957 else if (dw_mode == 4)
1958 ratio = 2;
1959 return ratio;
1960}
1961#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1962static unsigned char get_idx(struct hevc_state_s *hevc)
1963{
1964 return hevc->index;
1965}
1966#endif
1967
1968#undef pr_info
1969#define pr_info printk
1970static int hevc_print(struct hevc_state_s *hevc,
1971 int flag, const char *fmt, ...)
1972{
1973#define HEVC_PRINT_BUF 256
1974 unsigned char buf[HEVC_PRINT_BUF];
1975 int len = 0;
1976#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1977 if (hevc == NULL ||
1978 (flag == 0) ||
1979 ((debug_mask &
1980 (1 << hevc->index))
1981 && (debug & flag))) {
1982#endif
1983 va_list args;
1984
1985 va_start(args, fmt);
1986 if (hevc)
1987 len = sprintf(buf, "[%d]", hevc->index);
1988 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1989 pr_debug("%s", buf);
1990 va_end(args);
1991#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1992 }
1993#endif
1994 return 0;
1995}
1996
1997static int hevc_print_cont(struct hevc_state_s *hevc,
1998 int flag, const char *fmt, ...)
1999{
2000 unsigned char buf[HEVC_PRINT_BUF];
2001 int len = 0;
2002#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2003 if (hevc == NULL ||
2004 (flag == 0) ||
2005 ((debug_mask &
2006 (1 << hevc->index))
2007 && (debug & flag))) {
2008#endif
2009 va_list args;
2010
2011 va_start(args, fmt);
2012 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2013 pr_info("%s", buf);
2014 va_end(args);
2015#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2016 }
2017#endif
2018 return 0;
2019}
2020
2021static void put_mv_buf(struct hevc_state_s *hevc,
2022 struct PIC_s *pic);
2023
2024static void update_vf_memhandle(struct hevc_state_s *hevc,
2025 struct vframe_s *vf, struct PIC_s *pic);
2026
2027static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2028
2029static void release_aux_data(struct hevc_state_s *hevc,
2030 struct PIC_s *pic);
2031static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2032
2033#ifdef MULTI_INSTANCE_SUPPORT
2034static void backup_decode_state(struct hevc_state_s *hevc)
2035{
2036 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2037 hevc->curr_POC_bak = hevc->curr_POC;
2038 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2039 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2040 hevc->start_parser_type_bak = hevc->start_parser_type;
2041 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2042 hevc->rps_set_id_bak = hevc->rps_set_id;
2043 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2044 hevc->decode_idx_bak = hevc->decode_idx;
2045
2046}
2047
2048static void restore_decode_state(struct hevc_state_s *hevc)
2049{
2050 struct vdec_s *vdec = hw_to_vdec(hevc);
2051 if (!vdec_has_more_input(vdec)) {
2052 hevc->pic_decoded_lcu_idx =
2053 READ_VREG(HEVC_PARSER_LCU_START)
2054 & 0xffffff;
2055 return;
2056 }
2057 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2058 "%s: discard pic index 0x%x\n",
2059 __func__, hevc->decoding_pic ?
2060 hevc->decoding_pic->index : 0xff);
2061 if (hevc->decoding_pic) {
2062 hevc->decoding_pic->error_mark = 0;
2063 hevc->decoding_pic->output_ready = 0;
2064 hevc->decoding_pic->output_mark = 0;
2065 hevc->decoding_pic->referenced = 0;
2066 hevc->decoding_pic->POC = INVALID_POC;
2067 put_mv_buf(hevc, hevc->decoding_pic);
2068 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2069 release_aux_data(hevc, hevc->decoding_pic);
2070 hevc->decoding_pic = NULL;
2071 }
2072 hevc->decode_idx = hevc->decode_idx_bak;
2073 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2074 hevc->curr_POC = hevc->curr_POC_bak;
2075 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2076 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2077 hevc->start_parser_type = hevc->start_parser_type_bak;
2078 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2079 hevc->rps_set_id = hevc->rps_set_id_bak;
2080 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2081
2082 if (hevc->pic_list_init_flag == 1)
2083 hevc->pic_list_init_flag = 0;
2084 /*if (hevc->decode_idx == 0)
2085 hevc->start_decoding_flag = 0;*/
2086
2087 hevc->slice_idx = 0;
2088 hevc->used_4k_num = -1;
2089}
2090#endif
2091
2092static void hevc_init_stru(struct hevc_state_s *hevc,
2093 struct BuffInfo_s *buf_spec_i)
2094{
2095 int i;
2096 INIT_LIST_HEAD(&hevc->log_list);
2097 hevc->work_space_buf = buf_spec_i;
2098 hevc->prefix_aux_size = 0;
2099 hevc->suffix_aux_size = 0;
2100 hevc->aux_addr = NULL;
2101 hevc->rpm_addr = NULL;
2102 hevc->lmem_addr = NULL;
2103
2104 hevc->curr_POC = INVALID_POC;
2105
2106 hevc->pic_list_init_flag = 0;
2107 hevc->use_cma_flag = 0;
2108 hevc->decode_idx = 0;
2109 hevc->slice_idx = 0;
2110 hevc->new_pic = 0;
2111 hevc->new_tile = 0;
2112 hevc->iPrevPOC = 0;
2113 hevc->list_no = 0;
2114 /* int m_uiMaxCUWidth = 1<<7; */
2115 /* int m_uiMaxCUHeight = 1<<7; */
2116 hevc->m_pocRandomAccess = MAX_INT;
2117 hevc->tile_enabled = 0;
2118 hevc->tile_x = 0;
2119 hevc->tile_y = 0;
2120 hevc->iPrevTid0POC = 0;
2121 hevc->slice_addr = 0;
2122 hevc->slice_segment_addr = 0;
2123 hevc->skip_flag = 0;
2124 hevc->misc_flag0 = 0;
2125
2126 hevc->cur_pic = NULL;
2127 hevc->col_pic = NULL;
2128 hevc->wait_buf = 0;
2129 hevc->error_flag = 0;
2130 hevc->head_error_flag = 0;
2131 hevc->error_skip_nal_count = 0;
2132 hevc->have_vps = 0;
2133 hevc->have_sps = 0;
2134 hevc->have_pps = 0;
2135 hevc->have_valid_start_slice = 0;
2136
2137 hevc->pts_mode = PTS_NORMAL;
2138 hevc->last_pts = 0;
2139 hevc->last_lookup_pts = 0;
2140 hevc->last_pts_us64 = 0;
2141 hevc->last_lookup_pts_us64 = 0;
2142 hevc->pts_mode_switching_count = 0;
2143 hevc->pts_mode_recovery_count = 0;
2144
2145 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2146 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2147 if (hevc->PB_skip_mode == 0)
2148 hevc->ignore_bufmgr_error = 0x1;
2149 else
2150 hevc->ignore_bufmgr_error = 0x0;
2151
2152 if (hevc->is_used_v4l) {
2153 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2154 if (hevc->m_PIC[i] != NULL) {
2155 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2156 hevc->m_PIC[i]->index = i;
2157 }
2158 }
2159 }
2160
2161 hevc->pic_num = 0;
2162 hevc->lcu_x_num_pre = 0;
2163 hevc->lcu_y_num_pre = 0;
2164 hevc->first_pic_after_recover = 0;
2165
2166 hevc->pre_top_pic = NULL;
2167 hevc->pre_bot_pic = NULL;
2168
2169 hevc->sei_present_flag = 0;
2170 hevc->valve_count = 0;
2171 hevc->first_pic_flag = 0;
2172#ifdef MULTI_INSTANCE_SUPPORT
2173 hevc->decoded_poc = INVALID_POC;
2174 hevc->start_process_time = 0;
2175 hevc->last_lcu_idx = 0;
2176 hevc->decode_timeout_count = 0;
2177 hevc->timeout_num = 0;
2178 hevc->eos = 0;
2179 hevc->pic_decoded_lcu_idx = -1;
2180 hevc->over_decode = 0;
2181 hevc->used_4k_num = -1;
2182 hevc->start_decoding_flag = 0;
2183 hevc->rps_set_id = 0;
2184 backup_decode_state(hevc);
2185#endif
2186#ifdef DETREFILL_ENABLE
2187 hevc->detbuf_adr = 0;
2188 hevc->detbuf_adr_virt = NULL;
2189#endif
2190}
2191
2192static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2193static int H265_alloc_mmu(struct hevc_state_s *hevc,
2194 struct PIC_s *new_pic, unsigned short bit_depth,
2195 unsigned int *mmu_index_adr);
2196
2197#ifdef DETREFILL_ENABLE
2198#define DETREFILL_BUF_SIZE (4 * 0x4000)
2199#define HEVC_SAO_DBG_MODE0 0x361e
2200#define HEVC_SAO_DBG_MODE1 0x361f
2201#define HEVC_SAO_CTRL10 0x362e
2202#define HEVC_SAO_CTRL11 0x362f
2203static int init_detrefill_buf(struct hevc_state_s *hevc)
2204{
2205 if (hevc->detbuf_adr_virt)
2206 return 0;
2207
2208 hevc->detbuf_adr_virt =
2209 (void *)dma_alloc_coherent(amports_get_dma_device(),
2210 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2211 GFP_KERNEL);
2212
2213 if (hevc->detbuf_adr_virt == NULL) {
2214 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2215 return -1;
2216 }
2217 return 0;
2218}
2219
2220static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2221{
2222 if (hevc->detbuf_adr_virt) {
2223 dma_free_coherent(amports_get_dma_device(),
2224 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2225 hevc->detbuf_adr);
2226
2227 hevc->detbuf_adr_virt = NULL;
2228 hevc->detbuf_adr = 0;
2229 }
2230}
2231
2232/*
2233 * convert uncompressed frame buffer data from/to ddr
2234 */
2235static void convUnc8x4blk(uint16_t* blk8x4Luma,
2236 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2237{
2238 if (direction == 0) {
2239 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2240 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2241 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2242 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2243 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2244 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2245 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2246 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2247 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2248 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2249 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2250 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2251 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2252 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2253 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2254 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2255 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2256 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2257
2258 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2259 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2260 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2261 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2262 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2263 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2264 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2265 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2266 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2267 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2268 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2269 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2270 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2271 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2272 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2273 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2274 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2275 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2276
2277 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2278 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2279 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2280 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2281 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2282 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2283 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2284 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2285 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2286 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2287 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2288 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2289 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2290 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2291 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2292 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2293 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2294 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2295
2296 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2297 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2298 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2299 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2300 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2301 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2302 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2303 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2304 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2305 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2306 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2307 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2308 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2309 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2310 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2311 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2312 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2313 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2314 } else {
2315 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2316 blk8x4Luma[3 + 0 * 8];
2317 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2318 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2319 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2320 (blk8x4Luma[3 + 3 * 8] >> 2);
2321 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2322 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2323 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2324 (blk8x4Luma[7 + 2 * 8] >>4);
2325 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2326 blk8x4Cb[0 + 0 * 4];
2327 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2328 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2329 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2330
2331 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2332 blk8x4Luma[0 + 0 * 8];
2333 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2334 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2335 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2336 (blk8x4Luma[0 + 1 * 8] >> 2);
2337 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2338 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2339 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2340 (blk8x4Luma[0 + 2 * 8] >>4);
2341 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2342 blk8x4Luma[2 + 2 * 8];
2343 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2344 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2345 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2346
2347 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2348 blk8x4Luma[4 + 0 * 8];
2349 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2350 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2351 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2352 (blk8x4Luma[4 + 1 * 8] >> 2);
2353 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2354 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2355 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2356 (blk8x4Luma[4 + 2 * 8] >>4);
2357 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2358 blk8x4Luma[6 + 2 * 8];
2359 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2360 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2361 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2362
2363 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2364 blk8x4Cb[1 + 0 * 4];
2365 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2366 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2367 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2368 (blk8x4Cr[2 + 0 * 4] >> 2);
2369 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2370 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2371 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2372 (blk8x4Cb[1 + 1 * 4] >>4);
2373 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2374 blk8x4Cb[2 + 1 * 4];
2375 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2376 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2377 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2378 }
2379}
2380
2381static void corrRefillWithAmrisc (
2382 struct hevc_state_s *hevc,
2383 uint32_t cmHeaderBaseAddr,
2384 uint32_t picWidth,
2385 uint32_t ctuPosition)
2386{
2387 int32_t i;
2388 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2389 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2390 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2391
2392 uint16_t cmBodyBuf[32 * 18];
2393
2394 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2395 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2396 uint32_t stride64x64 = pic_width_x64 * 128;
2397 uint32_t addr_offset64x64_abv = stride64x64 *
2398 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2399 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2400 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2401 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2402 unsigned int tmpData32;
2403
2404 uint16_t blkBuf0Y[32];
2405 uint16_t blkBuf0Cb[8];
2406 uint16_t blkBuf0Cr[8];
2407 uint16_t blkBuf1Y[32];
2408 uint16_t blkBuf1Cb[8];
2409 uint16_t blkBuf1Cr[8];
2410 int32_t blkBufCnt = 0;
2411
2412 int32_t blkIdx;
2413
2414 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2415 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2416 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2417 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2418
2419 for (i = 0; i < 32 * 18; i++)
2420 cmBodyBuf[i] = 0;
2421
2422 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2423 "%s, %d\n", __func__, __LINE__);
2424 do {
2425 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2426 } while (tmpData32);
2427 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2428 "%s, %d\n", __func__, __LINE__);
2429
2430 hevc_print(hevc, H265_DEBUG_DETAIL,
2431 "cmBodyBuf from detbuf:\n");
2432 for (i = 0; i < 32 * 18; i++) {
2433 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2434 if (get_dbg_flag(hevc) &
2435 H265_DEBUG_DETAIL) {
2436 if ((i & 0xf) == 0)
2437 hevc_print_cont(hevc, 0, "\n");
2438 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2439 }
2440 }
2441 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2442
2443 for (i = 0; i < 32; i++)
2444 blkBuf0Y[i] = 0;
2445 for (i = 0; i < 8; i++)
2446 blkBuf0Cb[i] = 0;
2447 for (i = 0; i < 8; i++)
2448 blkBuf0Cr[i] = 0;
2449 for (i = 0; i < 32; i++)
2450 blkBuf1Y[i] = 0;
2451 for (i = 0; i < 8; i++)
2452 blkBuf1Cb[i] = 0;
2453 for (i = 0; i < 8; i++)
2454 blkBuf1Cr[i] = 0;
2455
2456 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2457 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2458 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2459 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2460 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2461 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2462 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2463
2464 if (!aboveCtuAvailable && inAboveCtu)
2465 continue;
2466
2467 /* detRefillBuf --> 8x4block*/
2468 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2469
2470 if (restoreEnable) {
2471 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2472 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2473 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2474 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2475 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2476 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2477 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2478 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2479 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2480 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2481 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2482 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2483 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2484 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2485 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2486 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2487 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2488 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2489 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2490 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2491 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2492 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2493 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2494 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2495 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2496 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2497 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2498 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2499 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2500 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2501 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2502 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2503
2504 /*Store data back to DDR*/
2505 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2506 }
2507
2508 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2509 }
2510
2511 hevc_print(hevc, H265_DEBUG_DETAIL,
2512 "cmBodyBuf to detbuf:\n");
2513 for (i = 0; i < 32 * 18; i++) {
2514 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2515 if (get_dbg_flag(hevc) &
2516 H265_DEBUG_DETAIL) {
2517 if ((i & 0xf) == 0)
2518 hevc_print_cont(hevc, 0, "\n");
2519 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2520 }
2521 }
2522 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2523
2524 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2525 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2526 "%s, %d\n", __func__, __LINE__);
2527 do {
2528 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2529 } while (tmpData32);
2530 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2531 "%s, %d\n", __func__, __LINE__);
2532}
2533
2534static void delrefill(struct hevc_state_s *hevc)
2535{
2536 /*
2537 * corrRefill
2538 */
2539 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2540 [31:30]error number
2541 [29:20]error2([9:7]tilex[6:0]ctuy)
2542 [19:10]error1 [9:0]error0*/
2543 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2544 uint32_t errorIdx;
2545 uint32_t errorNum = (detResult>>30);
2546
2547 if (detResult) {
2548 hevc_print(hevc, H265_DEBUG_BUFMGR,
2549 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2550 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2551 uint32_t errorPos = errorIdx * 10;
2552 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2553 uint32_t tilex = (errorResult >> 7) - 1;
2554 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2555 + hevc->m_tile[0][tilex].width - 1;
2556 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2557 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2558 hevc_print(hevc, H265_DEBUG_BUFMGR,
2559 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2560 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2561 corrRefillWithAmrisc(
2562 hevc,
2563 (uint32_t)hevc->cur_pic->header_adr,
2564 hevc->pic_w,
2565 ctuPosition);
2566 }
2567
2568 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2569 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2570 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2571 }
2572}
2573#endif
2574
2575static void get_rpm_param(union param_u *params)
2576{
2577 int i;
2578 unsigned int data32;
2579
2580 for (i = 0; i < 128; i++) {
2581 do {
2582 data32 = READ_VREG(RPM_CMD_REG);
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 } while ((data32 & 0x10000) == 0);
2585 params->l.data[i] = data32 & 0xffff;
2586 /* hevc_print(hevc, 0, "%x\n", data32); */
2587 WRITE_VREG(RPM_CMD_REG, 0);
2588 }
2589}
2590
2591static int get_free_buf_idx(struct hevc_state_s *hevc)
2592{
2593 int index = INVALID_IDX;
2594 struct PIC_s *pic;
2595 int i;
2596
2597 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2598 pic = hevc->m_PIC[i];
2599 if (pic == NULL ||
2600 pic->index == -1 ||
2601 pic->BUF_index == -1)
2602 continue;
2603
2604 if (pic->output_mark == 0 &&
2605 pic->referenced == 0 &&
2606 pic->output_ready == 0 &&
2607 pic->cma_alloc_addr) {
2608 pic->output_ready = 1;
2609 index = i;
2610 break;
2611 }
2612 }
2613
2614 return index;
2615}
2616
2617static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2618{
2619 int i;
2620 struct PIC_s *pic;
2621 struct PIC_s *ret_pic = NULL;
2622 if (POC == INVALID_POC)
2623 return NULL;
2624 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2625 pic = hevc->m_PIC[i];
2626 if (pic == NULL || pic->index == -1 ||
2627 pic->BUF_index == -1)
2628 continue;
2629 if (pic->POC == POC) {
2630 if (ret_pic == NULL)
2631 ret_pic = pic;
2632 else {
2633 if (pic->decode_idx > ret_pic->decode_idx)
2634 ret_pic = pic;
2635 }
2636 }
2637 }
2638 return ret_pic;
2639}
2640
2641static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2642{
2643 int i;
2644 struct PIC_s *pic;
2645 struct PIC_s *ret_pic = NULL;
2646
2647 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2648 pic = hevc->m_PIC[i];
2649 if (pic == NULL || pic->index == -1 ||
2650 pic->BUF_index == -1)
2651 continue;
2652 if ((pic->POC == POC) && (pic->referenced)) {
2653 if (ret_pic == NULL)
2654 ret_pic = pic;
2655 else {
2656 if (pic->decode_idx > ret_pic->decode_idx)
2657 ret_pic = pic;
2658 }
2659 }
2660 }
2661
2662 if (ret_pic == NULL) {
2663 if (get_dbg_flag(hevc)) {
2664 hevc_print(hevc, 0,
2665 "Wrong, POC of %d is not in referenced list\n",
2666 POC);
2667 }
2668 ret_pic = get_pic_by_POC(hevc, POC);
2669 }
2670 return ret_pic;
2671}
2672
2673static unsigned int log2i(unsigned int val)
2674{
2675 unsigned int ret = -1;
2676
2677 while (val != 0) {
2678 val >>= 1;
2679 ret++;
2680 }
2681 return ret;
2682}
2683
2684static int init_buf_spec(struct hevc_state_s *hevc);
2685
2686static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2687{
2688 int i;
2689
2690 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2691 struct PIC_s *pic = hevc->m_PIC[i];
2692
2693 if (pic && pic->vframe_bound)
2694 return true;
2695 }
2696
2697 return false;
2698}
2699
2700static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2701{
2702 int i;
2703
2704 /* release workspace */
2705 if (hevc->bmmu_box)
2706 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2707 BMMU_WORKSPACE_ID);
2708 /*
2709 * it's only when vframe get back to driver, right now we can be sure
2710 * that vframe and fd are related. if the playback exits, the capture
2711 * requires the upper app to release when the fd is closed, and others
2712 * buffers drivers are released by driver.
2713 */
2714 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2715 struct PIC_s *pic = hevc->m_PIC[i];
2716
2717 if (pic && !pic->vframe_bound) {
2718 if (hevc->bmmu_box)
2719 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2720 VF_BUFFER_IDX(i));
2721 if (hevc->mmu_box)
2722 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2723
2724 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2725 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2726 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2727 }
2728 }
2729}
2730
2731static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2732{
2733 if (hevc->is_used_v4l &&
2734 v4l_is_there_vframe_bound(hevc)) {
2735 if (get_double_write_mode(hevc) != 0x10) {
2736 v4l_mmu_buffer_release(hevc);
2737 return;
2738 }
2739 }
2740
2741 if (hevc->mmu_box)
2742 decoder_mmu_box_free(hevc->mmu_box);
2743 hevc->mmu_box = NULL;
2744
2745 if (hevc->bmmu_box)
2746 decoder_bmmu_box_free(hevc->bmmu_box);
2747 hevc->bmmu_box = NULL;
2748}
2749static int init_mmu_buffers(struct hevc_state_s *hevc)
2750{
2751 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2752 CODEC_MM_FLAGS_TVP : 0;
2753 int buf_size = 64;
2754
2755 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2756 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2757 buf_size = 24;
2758 }
2759
2760 if (get_dbg_flag(hevc)) {
2761 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2762 __func__, hevc->max_pic_w, hevc->max_pic_h);
2763 }
2764
2765 hevc->need_cache_size = buf_size * SZ_1M;
2766 hevc->sc_start_time = get_jiffies_64();
2767 if (hevc->mmu_enable
2768 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2769 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2770 hevc->index,
2771 MAX_REF_PIC_NUM,
2772 buf_size * SZ_1M,
2773 tvp_flag
2774 );
2775 if (!hevc->mmu_box) {
2776 pr_err("h265 alloc mmu box failed!!\n");
2777 return -1;
2778 }
2779 }
2780
2781 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2782 hevc->index,
2783 BMMU_MAX_BUFFERS,
2784 4 + PAGE_SHIFT,
2785 CODEC_MM_FLAGS_CMA_CLEAR |
2786 CODEC_MM_FLAGS_FOR_VDECODER |
2787 tvp_flag);
2788 if (!hevc->bmmu_box) {
2789 if (hevc->mmu_box)
2790 decoder_mmu_box_free(hevc->mmu_box);
2791 hevc->mmu_box = NULL;
2792 pr_err("h265 alloc mmu box failed!!\n");
2793 return -1;
2794 }
2795 return 0;
2796}
2797
2798struct buf_stru_s
2799{
2800 int lcu_total;
2801 int mc_buffer_size_h;
2802 int mc_buffer_size_u_v_h;
2803};
2804
2805#ifndef MV_USE_FIXED_BUF
2806static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2807{
2808 int i;
2809 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2810 if (hevc->m_mv_BUF[i].start_adr) {
2811 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2812 hevc_print(hevc, 0,
2813 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2814 i, hevc->m_mv_BUF[i].start_adr,
2815 hevc->m_mv_BUF[i].size,
2816 hevc->m_mv_BUF[i].used_flag);
2817 decoder_bmmu_box_free_idx(
2818 hevc->bmmu_box,
2819 MV_BUFFER_IDX(i));
2820 hevc->m_mv_BUF[i].start_adr = 0;
2821 hevc->m_mv_BUF[i].size = 0;
2822 hevc->m_mv_BUF[i].used_flag = 0;
2823 }
2824 }
2825 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2826 if (hevc->m_PIC[i] != NULL)
2827 hevc->m_PIC[i]->mv_buf_index = -1;
2828 }
2829}
2830
2831static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2832{
2833 int ret = 0;
2834 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2835 if (decoder_bmmu_box_alloc_buf_phy
2836 (hevc->bmmu_box,
2837 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2838 DRIVER_NAME,
2839 &hevc->m_mv_BUF[i].start_adr) < 0) {
2840 hevc->m_mv_BUF[i].start_adr = 0;
2841 ret = -1;
2842 } else {
2843 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2844 hevc->m_mv_BUF[i].used_flag = 0;
2845 ret = 0;
2846 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2847 hevc_print(hevc, 0,
2848 "MV Buffer %d: start_adr %p size %x\n",
2849 i,
2850 (void *)hevc->m_mv_BUF[i].start_adr,
2851 hevc->m_mv_BUF[i].size);
2852 }
2853 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2854 void *mem_start_virt;
2855 mem_start_virt =
2856 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2857 if (mem_start_virt) {
2858 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2859 codec_mm_dma_flush(mem_start_virt,
2860 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2861 } else {
2862 mem_start_virt = codec_mm_vmap(
2863 hevc->m_mv_BUF[i].start_adr,
2864 hevc->m_mv_BUF[i].size);
2865 if (mem_start_virt) {
2866 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2867 codec_mm_dma_flush(mem_start_virt,
2868 hevc->m_mv_BUF[i].size,
2869 DMA_TO_DEVICE);
2870 codec_mm_unmap_phyaddr(mem_start_virt);
2871 } else {
2872 /*not virt for tvp playing,
2873 may need clear on ucode.*/
2874 pr_err("ref %s mem_start_virt failed\n", __func__);
2875 }
2876 }
2877 }
2878 }
2879 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2880 return ret;
2881}
2882#endif
2883
2884static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2885{
2886#ifdef MV_USE_FIXED_BUF
2887 if (pic && pic->index >= 0) {
2888 if (IS_8K_SIZE(pic->width, pic->height)) {
2889 pic->mpred_mv_wr_start_addr =
2890 hevc->work_space_buf->mpred_mv.buf_start
2891 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2892 } else {
2893 pic->mpred_mv_wr_start_addr =
2894 hevc->work_space_buf->mpred_mv.buf_start
2895 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2896 }
2897 }
2898 return 0;
2899#else
2900 int i;
2901 int ret = -1;
2902 int new_size;
2903 if (IS_8K_SIZE(pic->width, pic->height))
2904 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2905 else if (IS_4K_SIZE(pic->width, pic->height))
2906 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2907 else
2908 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2909 if (new_size != hevc->mv_buf_size) {
2910 dealloc_mv_bufs(hevc);
2911 hevc->mv_buf_size = new_size;
2912 }
2913 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2914 if (hevc->m_mv_BUF[i].start_adr &&
2915 hevc->m_mv_BUF[i].used_flag == 0) {
2916 hevc->m_mv_BUF[i].used_flag = 1;
2917 ret = i;
2918 break;
2919 }
2920 }
2921 if (ret < 0) {
2922 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2923 if (hevc->m_mv_BUF[i].start_adr == 0) {
2924 if (alloc_mv_buf(hevc, i) >= 0) {
2925 hevc->m_mv_BUF[i].used_flag = 1;
2926 ret = i;
2927 }
2928 break;
2929 }
2930 }
2931 }
2932
2933 if (ret >= 0) {
2934 pic->mv_buf_index = ret;
2935 pic->mpred_mv_wr_start_addr =
2936 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2937 (~0xffff);
2938 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2939 "%s => %d (0x%x) size 0x%x\n",
2940 __func__, ret,
2941 pic->mpred_mv_wr_start_addr,
2942 hevc->m_mv_BUF[ret].size);
2943
2944 } else {
2945 hevc_print(hevc, 0,
2946 "%s: Error, mv buf is not enough\n",
2947 __func__);
2948 }
2949 return ret;
2950
2951#endif
2952}
2953
2954static void put_mv_buf(struct hevc_state_s *hevc,
2955 struct PIC_s *pic)
2956{
2957#ifndef MV_USE_FIXED_BUF
2958 int i = pic->mv_buf_index;
2959 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2960 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2961 "%s: index %d beyond range\n",
2962 __func__, i);
2963 return;
2964 }
2965 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2966 "%s(%d): used_flag(%d)\n",
2967 __func__, i,
2968 hevc->m_mv_BUF[i].used_flag);
2969
2970 if (hevc->m_mv_BUF[i].start_adr &&
2971 hevc->m_mv_BUF[i].used_flag)
2972 hevc->m_mv_BUF[i].used_flag = 0;
2973 pic->mv_buf_index = -1;
2974#endif
2975}
2976
2977static int cal_current_buf_size(struct hevc_state_s *hevc,
2978 struct buf_stru_s *buf_stru)
2979{
2980
2981 int buf_size;
2982 int pic_width = hevc->pic_w;
2983 int pic_height = hevc->pic_h;
2984 int lcu_size = hevc->lcu_size;
2985 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2986 1 : pic_width / lcu_size;
2987 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2988 1 : pic_height / lcu_size;
2989 /*SUPPORT_10BIT*/
2990 int losless_comp_header_size = compute_losless_comp_header_size
2991 (pic_width, pic_height);
2992 /*always alloc buf for 10bit*/
2993 int losless_comp_body_size = compute_losless_comp_body_size
2994 (hevc, pic_width, pic_height, 0);
2995 int mc_buffer_size = losless_comp_header_size
2996 + losless_comp_body_size;
2997 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2998 int mc_buffer_size_u_v_h = 0;
2999
3000 int dw_mode = get_double_write_mode(hevc);
3001
3002 if (hevc->mmu_enable) {
3003 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3004 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3005 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3006 << 16;
3007 else
3008 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3009 << 16;
3010 } else
3011 buf_size = 0;
3012
3013 if (dw_mode) {
3014 int pic_width_dw = pic_width /
3015 get_double_write_ratio(hevc, dw_mode);
3016 int pic_height_dw = pic_height /
3017 get_double_write_ratio(hevc, dw_mode);
3018
3019 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3020 pic_width_dw / lcu_size + 1 :
3021 pic_width_dw / lcu_size;
3022 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3023 pic_height_dw / lcu_size + 1 :
3024 pic_height_dw / lcu_size;
3025 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3026
3027 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3028 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3029 /*64k alignment*/
3030 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3031 }
3032
3033 if ((!hevc->mmu_enable) &&
3034 ((dw_mode & 0x10) == 0)) {
3035 /* use compress mode without mmu,
3036 need buf for compress decoding*/
3037 buf_size += (mc_buffer_size_h << 16);
3038 }
3039
3040 /*in case start adr is not 64k alignment*/
3041 if (buf_size > 0)
3042 buf_size += 0x10000;
3043
3044 if (buf_stru) {
3045 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3046 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3047 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3048 }
3049
3050 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3051 pic_width, pic_height, losless_comp_header_size,
3052 losless_comp_body_size, mc_buffer_size_h,
3053 mc_buffer_size_u_v_h, buf_size);
3054
3055 return buf_size;
3056}
3057
3058static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3059{
3060 int ret = -1;
3061 int i = pic->index;
3062 struct vdec_v4l2_buffer *fb = NULL;
3063
3064 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3065 return ret;
3066
3067 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3068 if (ret < 0) {
3069 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3070 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3071 return ret;
3072 }
3073
3074 if (hevc->mmu_enable) {
3075 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3076 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3077 hevc->m_BUF[i].header_size =
3078 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3079 else
3080 hevc->m_BUF[i].header_size =
3081 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3082
3083 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3084 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3085 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3086 if (ret < 0) {
3087 hevc_print(hevc, PRINT_FLAG_ERROR,
3088 "%s[%d], header size: %d, no mem fatal err\n",
3089 __func__, i, hevc->m_BUF[i].header_size);
3090 return ret;
3091 }
3092 }
3093
3094 hevc->m_BUF[i].used_flag = 0;
3095 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3096 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3097 if (fb->num_planes == 1) {
3098 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3099 hevc->m_BUF[i].size = fb->m.mem[0].size;
3100 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3101 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3102 } else if (fb->num_planes == 2) {
3103 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3104 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3105 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3106 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3107 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3108 }
3109
3110 return ret;
3111}
3112
3113static int alloc_buf(struct hevc_state_s *hevc)
3114{
3115 int i;
3116 int ret = -1;
3117 int buf_size = cal_current_buf_size(hevc, NULL);
3118
3119 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3120 return ret;
3121
3122 for (i = 0; i < BUF_POOL_SIZE; i++) {
3123 if (hevc->m_BUF[i].start_adr == 0)
3124 break;
3125 }
3126 if (i < BUF_POOL_SIZE) {
3127 if (buf_size > 0) {
3128 ret = decoder_bmmu_box_alloc_buf_phy
3129 (hevc->bmmu_box,
3130 VF_BUFFER_IDX(i), buf_size,
3131 DRIVER_NAME,
3132 &hevc->m_BUF[i].start_adr);
3133 if (ret < 0) {
3134 hevc->m_BUF[i].start_adr = 0;
3135 if (i <= 8) {
3136 hevc->fatal_error |=
3137 DECODER_FATAL_ERROR_NO_MEM;
3138 hevc_print(hevc, PRINT_FLAG_ERROR,
3139 "%s[%d], size: %d, no mem fatal err\n",
3140 __func__, i, buf_size);
3141 }
3142 }
3143
3144 if (ret >= 0) {
3145 hevc->m_BUF[i].size = buf_size;
3146 hevc->m_BUF[i].used_flag = 0;
3147 ret = 0;
3148
3149 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3150 hevc_print(hevc, 0,
3151 "Buffer %d: start_adr %p size %x\n",
3152 i,
3153 (void *)hevc->m_BUF[i].start_adr,
3154 hevc->m_BUF[i].size);
3155 }
3156 /*flush the buffer make sure no cache dirty*/
3157 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3158 void *mem_start_virt;
3159 mem_start_virt =
3160 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3161 if (mem_start_virt) {
3162 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3163 codec_mm_dma_flush(mem_start_virt,
3164 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3165 } else {
3166 mem_start_virt = codec_mm_vmap(
3167 hevc->m_BUF[i].start_adr,
3168 hevc->m_BUF[i].size);
3169 if (mem_start_virt) {
3170 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3171 codec_mm_dma_flush(mem_start_virt,
3172 hevc->m_BUF[i].size,
3173 DMA_TO_DEVICE);
3174 codec_mm_unmap_phyaddr(mem_start_virt);
3175 } else {
3176 /*not virt for tvp playing,
3177 may need clear on ucode.*/
3178 pr_err("ref %s mem_start_virt failed\n", __func__);
3179 }
3180 }
3181 }
3182 }
3183 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3184 } else
3185 ret = 0;
3186 }
3187
3188 if (ret >= 0) {
3189 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3190 hevc_print(hevc, 0,
3191 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3192 i, hevc->pic_w, hevc->pic_h,
3193 buf_size,
3194 hevc->m_BUF[i].start_adr);
3195 }
3196 } else {
3197 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3198 hevc_print(hevc, 0,
3199 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3200 i, hevc->pic_w, hevc->pic_h,
3201 buf_size);
3202 }
3203 }
3204 return ret;
3205}
3206
3207static void set_buf_unused(struct hevc_state_s *hevc, int i)
3208{
3209 if (i >= 0 && i < BUF_POOL_SIZE)
3210 hevc->m_BUF[i].used_flag = 0;
3211}
3212
3213static void dealloc_unused_buf(struct hevc_state_s *hevc)
3214{
3215 int i;
3216 for (i = 0; i < BUF_POOL_SIZE; i++) {
3217 if (hevc->m_BUF[i].start_adr &&
3218 hevc->m_BUF[i].used_flag == 0) {
3219 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3220 hevc_print(hevc, 0,
3221 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3222 i, hevc->m_BUF[i].start_adr,
3223 hevc->m_BUF[i].size);
3224 }
3225 if (!hevc->is_used_v4l)
3226 decoder_bmmu_box_free_idx(
3227 hevc->bmmu_box,
3228 VF_BUFFER_IDX(i));
3229 hevc->m_BUF[i].start_adr = 0;
3230 hevc->m_BUF[i].size = 0;
3231 }
3232 }
3233}
3234
3235static void dealloc_pic_buf(struct hevc_state_s *hevc,
3236 struct PIC_s *pic)
3237{
3238 int i = pic->BUF_index;
3239 pic->BUF_index = -1;
3240 if (i >= 0 &&
3241 i < BUF_POOL_SIZE &&
3242 hevc->m_BUF[i].start_adr) {
3243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3244 hevc_print(hevc, 0,
3245 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3246 i, hevc->m_BUF[i].start_adr,
3247 hevc->m_BUF[i].size);
3248 }
3249
3250 if (!hevc->is_used_v4l)
3251 decoder_bmmu_box_free_idx(
3252 hevc->bmmu_box,
3253 VF_BUFFER_IDX(i));
3254 hevc->m_BUF[i].used_flag = 0;
3255 hevc->m_BUF[i].start_adr = 0;
3256 hevc->m_BUF[i].size = 0;
3257 }
3258}
3259
3260static int get_work_pic_num(struct hevc_state_s *hevc)
3261{
3262 int used_buf_num = 0;
3263 int sps_pic_buf_diff = 0;
3264
3265 if (get_dynamic_buf_num_margin(hevc) > 0) {
3266 if ((!hevc->sps_num_reorder_pics_0) &&
3267 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3268 /* the range of sps_num_reorder_pics_0 is in
3269 [0, sps_max_dec_pic_buffering_minus1_0] */
3270 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3271 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3272 } else
3273 used_buf_num = hevc->sps_num_reorder_pics_0
3274 + get_dynamic_buf_num_margin(hevc);
3275
3276 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3277 - hevc->sps_num_reorder_pics_0;
3278#ifdef MULTI_INSTANCE_SUPPORT
3279 /*
3280 need one more for multi instance, as
3281 apply_ref_pic_set() has no chanch to run to
3282 to clear referenced flag in some case
3283 */
3284 if (hevc->m_ins_flag)
3285 used_buf_num++;
3286#endif
3287 } else
3288 used_buf_num = max_buf_num;
3289
3290 if (hevc->save_buffer_mode)
3291 hevc_print(hevc, 0,
3292 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3293 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3294
3295 if (sps_pic_buf_diff >= 4)
3296 {
3297 used_buf_num += 1;
3298 }
3299
3300 if (used_buf_num > MAX_BUF_NUM)
3301 used_buf_num = MAX_BUF_NUM;
3302 return used_buf_num;
3303}
3304
3305static int get_alloc_pic_count(struct hevc_state_s *hevc)
3306{
3307 int alloc_pic_count = 0;
3308 int i;
3309 struct PIC_s *pic;
3310 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3311 pic = hevc->m_PIC[i];
3312 if (pic && pic->index >= 0)
3313 alloc_pic_count++;
3314 }
3315 return alloc_pic_count;
3316}
3317
3318static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3319{
3320 int i = pic->index;
3321 int dw_mode = get_double_write_mode(hevc);
3322
3323 if (hevc->mmu_enable)
3324 pic->header_adr = hevc->m_BUF[i].header_addr;
3325
3326 pic->BUF_index = i;
3327 pic->POC = INVALID_POC;
3328 pic->mc_canvas_y = pic->index;
3329 pic->mc_canvas_u_v = pic->index;
3330
3331 if (dw_mode & 0x10) {
3332 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3333 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3334 pic->mc_canvas_y = (pic->index << 1);
3335 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3336
3337 pic->dw_y_adr = pic->mc_y_adr;
3338 pic->dw_u_v_adr = pic->mc_u_v_adr;
3339 } else if (dw_mode) {
3340 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3341 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3342 }
3343
3344 return 0;
3345}
3346
3347static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3348{
3349 int ret = -1;
3350 int i;
3351 /*int lcu_size_log2 = hevc->lcu_size_log2;
3352 int MV_MEM_UNIT=lcu_size_log2==
3353 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3354 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3355 5 ? 0x80 : 0x20;
3356 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3357 hevc->work_space_buf->mpred_mv.buf_size;*/
3358 unsigned int y_adr = 0;
3359 struct buf_stru_s buf_stru;
3360 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3361 int dw_mode = get_double_write_mode(hevc);
3362
3363 for (i = 0; i < BUF_POOL_SIZE; i++) {
3364 if (hevc->m_BUF[i].start_adr != 0 &&
3365 hevc->m_BUF[i].used_flag == 0 &&
3366 buf_size <= hevc->m_BUF[i].size) {
3367 hevc->m_BUF[i].used_flag = 1;
3368 break;
3369 }
3370 }
3371
3372 if (i >= BUF_POOL_SIZE)
3373 return -1;
3374
3375 if (hevc->mmu_enable) {
3376 pic->header_adr = hevc->m_BUF[i].start_adr;
3377 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3378 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3379 y_adr = hevc->m_BUF[i].start_adr +
3380 MMU_COMPRESS_8K_HEADER_SIZE;
3381 else
3382 y_adr = hevc->m_BUF[i].start_adr +
3383 MMU_COMPRESS_HEADER_SIZE;
3384 } else
3385 y_adr = hevc->m_BUF[i].start_adr;
3386
3387 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3388
3389 pic->POC = INVALID_POC;
3390 /*ensure get_pic_by_POC()
3391 not get the buffer not decoded*/
3392 pic->BUF_index = i;
3393
3394 if ((!hevc->mmu_enable) &&
3395 ((dw_mode & 0x10) == 0)
3396 ) {
3397 pic->mc_y_adr = y_adr;
3398 y_adr += (buf_stru.mc_buffer_size_h << 16);
3399 }
3400 pic->mc_canvas_y = pic->index;
3401 pic->mc_canvas_u_v = pic->index;
3402 if (dw_mode & 0x10) {
3403 pic->mc_y_adr = y_adr;
3404 pic->mc_u_v_adr = y_adr +
3405 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3406 pic->mc_canvas_y = (pic->index << 1);
3407 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3408
3409 pic->dw_y_adr = pic->mc_y_adr;
3410 pic->dw_u_v_adr = pic->mc_u_v_adr;
3411 } else if (dw_mode) {
3412 pic->dw_y_adr = y_adr;
3413 pic->dw_u_v_adr = pic->dw_y_adr +
3414 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3415 }
3416
3417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3418 hevc_print(hevc, 0,
3419 "%s index %d BUF_index %d mc_y_adr %x\n",
3420 __func__, pic->index,
3421 pic->BUF_index, pic->mc_y_adr);
3422 if (hevc->mmu_enable &&
3423 dw_mode)
3424 hevc_print(hevc, 0,
3425 "mmu double write adr %ld\n",
3426 pic->cma_alloc_addr);
3427 }
3428 ret = 0;
3429
3430 return ret;
3431}
3432
3433static void init_pic_list(struct hevc_state_s *hevc)
3434{
3435 int i;
3436 int init_buf_num = get_work_pic_num(hevc);
3437 int dw_mode = get_double_write_mode(hevc);
3438 struct vdec_s *vdec = hw_to_vdec(hevc);
3439 /*alloc decoder buf will be delay if work on v4l. */
3440 if (!hevc->is_used_v4l) {
3441 for (i = 0; i < init_buf_num; i++) {
3442 if (alloc_buf(hevc) < 0) {
3443 if (i <= 8) {
3444 /*if alloced (i+1)>=9
3445 don't send errors.*/
3446 hevc->fatal_error |=
3447 DECODER_FATAL_ERROR_NO_MEM;
3448 }
3449 break;
3450 }
3451 }
3452 }
3453
3454 for (i = 0; i < init_buf_num; i++) {
3455 struct PIC_s *pic = hevc->m_PIC[i];
3456
3457 if (!pic) {
3458 pic = vmalloc(sizeof(struct PIC_s));
3459 if (pic == NULL) {
3460 hevc_print(hevc, 0,
3461 "%s: alloc pic %d fail!!!\n",
3462 __func__, i);
3463 break;
3464 }
3465 hevc->m_PIC[i] = pic;
3466 }
3467 memset(pic, 0, sizeof(struct PIC_s));
3468
3469 pic->index = i;
3470 pic->BUF_index = -1;
3471 pic->mv_buf_index = -1;
3472 if (vdec->parallel_dec == 1) {
3473 pic->y_canvas_index = -1;
3474 pic->uv_canvas_index = -1;
3475 }
3476
3477 pic->width = hevc->pic_w;
3478 pic->height = hevc->pic_h;
3479 pic->double_write_mode = dw_mode;
3480
3481 /*config canvas will be delay if work on v4l. */
3482 if (!hevc->is_used_v4l) {
3483 if (config_pic(hevc, pic) < 0) {
3484 if (get_dbg_flag(hevc))
3485 hevc_print(hevc, 0,
3486 "Config_pic %d fail\n", pic->index);
3487 pic->index = -1;
3488 i++;
3489 break;
3490 }
3491
3492 if (pic->double_write_mode)
3493 set_canvas(hevc, pic);
3494 }
3495 }
3496
3497 for (; i < MAX_REF_PIC_NUM; i++) {
3498 struct PIC_s *pic = hevc->m_PIC[i];
3499
3500 if (!pic) {
3501 pic = vmalloc(sizeof(struct PIC_s));
3502 if (pic == NULL) {
3503 hevc_print(hevc, 0,
3504 "%s: alloc pic %d fail!!!\n",
3505 __func__, i);
3506 break;
3507 }
3508 hevc->m_PIC[i] = pic;
3509 }
3510 memset(pic, 0, sizeof(struct PIC_s));
3511
3512 pic->index = -1;
3513 pic->BUF_index = -1;
3514 if (vdec->parallel_dec == 1) {
3515 pic->y_canvas_index = -1;
3516 pic->uv_canvas_index = -1;
3517 }
3518 }
3519
3520}
3521
3522static void uninit_pic_list(struct hevc_state_s *hevc)
3523{
3524 struct vdec_s *vdec = hw_to_vdec(hevc);
3525 int i;
3526#ifndef MV_USE_FIXED_BUF
3527 dealloc_mv_bufs(hevc);
3528#endif
3529 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3530 struct PIC_s *pic = hevc->m_PIC[i];
3531
3532 if (pic) {
3533 if (vdec->parallel_dec == 1) {
3534 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3535 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3536 }
3537 release_aux_data(hevc, pic);
3538 vfree(pic);
3539 hevc->m_PIC[i] = NULL;
3540 }
3541 }
3542}
3543
3544#ifdef LOSLESS_COMPRESS_MODE
3545static void init_decode_head_hw(struct hevc_state_s *hevc)
3546{
3547
3548 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3549 unsigned int data32;
3550
3551 int losless_comp_header_size =
3552 compute_losless_comp_header_size(hevc->pic_w,
3553 hevc->pic_h);
3554 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3555 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3556
3557 hevc->losless_comp_body_size = losless_comp_body_size;
3558
3559
3560 if (hevc->mmu_enable) {
3561 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3562 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3563 } else {
3564 if (hevc->mem_saving_mode == 1)
3565 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3566 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3567 else
3568 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3569 ((workaround_enable & 2) ? 1 : 0));
3570 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3571 /*
3572 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3573 * //8-bit mode
3574 */
3575 }
3576 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3577 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3578 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3579
3580 if (hevc->mmu_enable) {
3581 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3582 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3583 buf_spec->mmu_vbh.buf_start +
3584 buf_spec->mmu_vbh.buf_size/2);
3585 data32 = READ_VREG(HEVC_SAO_CTRL9);
3586 data32 |= 0x1;
3587 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3588
3589 /* use HEVC_CM_HEADER_START_ADDR */
3590 data32 = READ_VREG(HEVC_SAO_CTRL5);
3591 data32 |= (1<<10);
3592 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3593 }
3594
3595 if (!hevc->m_ins_flag)
3596 hevc_print(hevc, 0,
3597 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3598 __func__, hevc->pic_w, hevc->pic_h,
3599 losless_comp_body_size, losless_comp_header_size);
3600
3601}
3602#endif
3603#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3604
3605static void init_pic_list_hw(struct hevc_state_s *hevc)
3606{
3607 int i;
3608 int cur_pic_num = MAX_REF_PIC_NUM;
3609 int dw_mode = get_double_write_mode(hevc);
3610 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3611 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3612 (0x1 << 1) | (0x1 << 2));
3613 else
3614 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3615
3616 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3617 if (hevc->m_PIC[i] == NULL ||
3618 hevc->m_PIC[i]->index == -1) {
3619 cur_pic_num = i;
3620 break;
3621 }
3622 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3623 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3624 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3625 hevc->m_PIC[i]->header_adr>>5);
3626 else
3627 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3628 hevc->m_PIC[i]->mc_y_adr >> 5);
3629 } else
3630 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3631 hevc->m_PIC[i]->mc_y_adr |
3632 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3633 if (dw_mode & 0x10) {
3634 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3635 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3636 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3637 }
3638 else
3639 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3640 hevc->m_PIC[i]->mc_u_v_adr |
3641 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3642 | 0x1);
3643 }
3644 }
3645 if (cur_pic_num == 0)
3646 return;
3647 for (; i < MAX_REF_PIC_NUM; i++) {
3648 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3649 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3650 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3651 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3652 else
3653 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3654 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3655#ifndef LOSLESS_COMPRESS_MODE
3656 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3657 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3658#endif
3659 } else {
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3661 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3662 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3663 | 0x1);
3664#ifndef LOSLESS_COMPRESS_MODE
3665 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3666 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3667 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3668 | 0x1);
3669#endif
3670 }
3671 }
3672
3673 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3674
3675 /* Zero out canvas registers in IPP -- avoid simulation X */
3676 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3677 (0 << 8) | (0 << 1) | 1);
3678 for (i = 0; i < 32; i++)
3679 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3680
3681#ifdef LOSLESS_COMPRESS_MODE
3682 if ((dw_mode & 0x10) == 0)
3683 init_decode_head_hw(hevc);
3684#endif
3685
3686}
3687
3688
3689static void dump_pic_list(struct hevc_state_s *hevc)
3690{
3691 int i;
3692 struct PIC_s *pic;
3693
3694 hevc_print(hevc, 0,
3695 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3696 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3697 pic = hevc->m_PIC[i];
3698 if (pic == NULL || pic->index == -1)
3699 continue;
3700 hevc_print_cont(hevc, 0,
3701 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3702 pic->index, pic->BUF_index,
3703#ifndef MV_USE_FIXED_BUF
3704 pic->mv_buf_index,
3705#else
3706 -1,
3707#endif
3708 pic->decode_idx, pic->POC, pic->referenced);
3709 hevc_print_cont(hevc, 0,
3710 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3711 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3712 pic->width, pic->height);
3713 hevc_print_cont(hevc, 0,
3714 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3715 pic->output_ready, pic->mpred_mv_wr_start_addr,
3716 pic->vf_ref);
3717 }
3718}
3719
3720static void clear_referenced_flag(struct hevc_state_s *hevc)
3721{
3722 int i;
3723 struct PIC_s *pic;
3724 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3725 pic = hevc->m_PIC[i];
3726 if (pic == NULL || pic->index == -1)
3727 continue;
3728 if (pic->referenced) {
3729 pic->referenced = 0;
3730 put_mv_buf(hevc, pic);
3731 }
3732 }
3733}
3734
3735static void clear_poc_flag(struct hevc_state_s *hevc)
3736{
3737 int i;
3738 struct PIC_s *pic;
3739 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3740 pic = hevc->m_PIC[i];
3741 if (pic == NULL || pic->index == -1)
3742 continue;
3743 pic->POC = INVALID_POC;
3744 }
3745}
3746
3747static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3748 unsigned char flush_flag)
3749{
3750 int num_pic_not_yet_display = 0;
3751 int i;
3752 struct PIC_s *pic;
3753 struct PIC_s *pic_display = NULL;
3754 struct vdec_s *vdec = hw_to_vdec(hevc);
3755
3756 if (hevc->i_only & 0x4) {
3757 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3758 pic = hevc->m_PIC[i];
3759 if (pic == NULL ||
3760 (pic->index == -1) ||
3761 (pic->BUF_index == -1) ||
3762 (pic->POC == INVALID_POC))
3763 continue;
3764 if (pic->output_mark) {
3765 if (pic_display) {
3766 if (pic->decode_idx <
3767 pic_display->decode_idx)
3768 pic_display = pic;
3769
3770 } else
3771 pic_display = pic;
3772
3773 }
3774 }
3775 if (pic_display) {
3776 pic_display->output_mark = 0;
3777 pic_display->recon_mark = 0;
3778 pic_display->output_ready = 1;
3779 pic_display->referenced = 0;
3780 put_mv_buf(hevc, pic_display);
3781 }
3782 } else {
3783 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3784 pic = hevc->m_PIC[i];
3785 if (pic == NULL ||
3786 (pic->index == -1) ||
3787 (pic->BUF_index == -1) ||
3788 (pic->POC == INVALID_POC))
3789 continue;
3790 if (pic->output_mark)
3791 num_pic_not_yet_display++;
3792 if (pic->slice_type == 2 &&
3793 hevc->vf_pre_count == 0 &&
3794 fast_output_enable & 0x1) {
3795 /*fast output for first I picture*/
3796 pic->num_reorder_pic = 0;
3797 if (vdec->master || vdec->slave)
3798 pic_display = pic;
3799 hevc_print(hevc, 0, "VH265: output first frame\n");
3800 }
3801 }
3802
3803 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3804 pic = hevc->m_PIC[i];
3805 if (pic == NULL ||
3806 (pic->index == -1) ||
3807 (pic->BUF_index == -1) ||
3808 (pic->POC == INVALID_POC))
3809 continue;
3810 if (pic->output_mark) {
3811 if (pic_display) {
3812 if (pic->POC < pic_display->POC)
3813 pic_display = pic;
3814 else if ((pic->POC == pic_display->POC)
3815 && (pic->decode_idx <
3816 pic_display->
3817 decode_idx))
3818 pic_display
3819 = pic;
3820 } else
3821 pic_display = pic;
3822 }
3823 }
3824 if (pic_display) {
3825 if ((num_pic_not_yet_display >
3826 pic_display->num_reorder_pic)
3827 || flush_flag) {
3828 pic_display->output_mark = 0;
3829 pic_display->recon_mark = 0;
3830 pic_display->output_ready = 1;
3831 } else if (num_pic_not_yet_display >=
3832 (MAX_REF_PIC_NUM - 1)) {
3833 pic_display->output_mark = 0;
3834 pic_display->recon_mark = 0;
3835 pic_display->output_ready = 1;
3836 hevc_print(hevc, 0,
3837 "Warning, num_reorder_pic %d is byeond buf num\n",
3838 pic_display->num_reorder_pic);
3839 } else
3840 pic_display = NULL;
3841 }
3842 }
3843
3844 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3845 pic_display = NULL;
3846 hevc->first_pic_flag = 0;
3847 }
3848 return pic_display;
3849}
3850
3851static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3852{
3853 int i;
3854 struct PIC_s *pic;
3855
3856 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3857 hevc_print(hevc, 0,
3858 "config_mc_buffer entered .....\n");
3859 if (cur_pic->slice_type != 2) { /* P and B pic */
3860 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3861 (0 << 8) | (0 << 1) | 1);
3862 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3863 pic =
3864 get_ref_pic_by_POC(hevc,
3865 cur_pic->
3866 m_aiRefPOCList0[cur_pic->
3867 slice_idx][i]);
3868 if (pic) {
3869 if ((pic->width != hevc->pic_w) ||
3870 (pic->height != hevc->pic_h)) {
3871 hevc_print(hevc, 0,
3872 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3873 __func__, pic->POC,
3874 pic->width, pic->height);
3875 cur_pic->error_mark = 1;
3876 }
3877 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3878 cur_pic->error_mark = 1;
3879 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3880 (pic->mc_canvas_u_v << 16)
3881 | (pic->mc_canvas_u_v
3882 << 8) |
3883 pic->mc_canvas_y);
3884 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3885 hevc_print_cont(hevc, 0,
3886 "refid %x mc_canvas_u_v %x",
3887 i, pic->mc_canvas_u_v);
3888 hevc_print_cont(hevc, 0,
3889 " mc_canvas_y %x\n",
3890 pic->mc_canvas_y);
3891 }
3892 } else
3893 cur_pic->error_mark = 1;
3894
3895 if (pic == NULL || pic->error_mark) {
3896 hevc_print(hevc, 0,
3897 "Error %s, %dth poc (%d) %s",
3898 __func__, i,
3899 cur_pic->m_aiRefPOCList0[cur_pic->
3900 slice_idx][i],
3901 pic ? "has error" :
3902 "not in list0");
3903 }
3904 }
3905 }
3906 if (cur_pic->slice_type == 0) { /* B pic */
3907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3908 hevc_print(hevc, 0,
3909 "config_mc_buffer RefNum_L1\n");
3910 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3911 (16 << 8) | (0 << 1) | 1);
3912
3913 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3914 pic =
3915 get_ref_pic_by_POC(hevc,
3916 cur_pic->
3917 m_aiRefPOCList1[cur_pic->
3918 slice_idx][i]);
3919 if (pic) {
3920 if ((pic->width != hevc->pic_w) ||
3921 (pic->height != hevc->pic_h)) {
3922 hevc_print(hevc, 0,
3923 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3924 __func__, pic->POC,
3925 pic->width, pic->height);
3926 cur_pic->error_mark = 1;
3927 }
3928
3929 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3930 cur_pic->error_mark = 1;
3931 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3932 (pic->mc_canvas_u_v << 16)
3933 | (pic->mc_canvas_u_v
3934 << 8) |
3935 pic->mc_canvas_y);
3936 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3937 hevc_print_cont(hevc, 0,
3938 "refid %x mc_canvas_u_v %x",
3939 i, pic->mc_canvas_u_v);
3940 hevc_print_cont(hevc, 0,
3941 " mc_canvas_y %x\n",
3942 pic->mc_canvas_y);
3943 }
3944 } else
3945 cur_pic->error_mark = 1;
3946
3947 if (pic == NULL || pic->error_mark) {
3948 hevc_print(hevc, 0,
3949 "Error %s, %dth poc (%d) %s",
3950 __func__, i,
3951 cur_pic->m_aiRefPOCList1[cur_pic->
3952 slice_idx][i],
3953 pic ? "has error" :
3954 "not in list1");
3955 }
3956 }
3957 }
3958 return 0;
3959}
3960
3961static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3962 union param_u *params)
3963{
3964 int ii, i;
3965 int poc_tmp;
3966 struct PIC_s *pic;
3967 unsigned char is_referenced;
3968 /* hevc_print(hevc, 0,
3969 "%s cur_poc %d\n", __func__, cur_poc); */
3970 if (pic_list_debug & 0x2) {
3971 pr_err("cur poc %d\n", cur_poc);
3972 }
3973 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3974 pic = hevc->m_PIC[ii];
3975 if (pic == NULL ||
3976 pic->index == -1 ||
3977 pic->BUF_index == -1
3978 )
3979 continue;
3980
3981 if ((pic->referenced == 0 || pic->POC == cur_poc))
3982 continue;
3983 is_referenced = 0;
3984 for (i = 0; i < 16; i++) {
3985 int delt;
3986
3987 if (params->p.CUR_RPS[i] & 0x8000)
3988 break;
3989 delt =
3990 params->p.CUR_RPS[i] &
3991 ((1 << (RPS_USED_BIT - 1)) - 1);
3992 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3993 poc_tmp =
3994 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3995 delt);
3996 } else
3997 poc_tmp = cur_poc + delt;
3998 if (poc_tmp == pic->POC) {
3999 is_referenced = 1;
4000 /* hevc_print(hevc, 0, "i is %d\n", i); */
4001 break;
4002 }
4003 }
4004 if (is_referenced == 0) {
4005 pic->referenced = 0;
4006 put_mv_buf(hevc, pic);
4007 /* hevc_print(hevc, 0,
4008 "set poc %d reference to 0\n", pic->POC); */
4009 if (pic_list_debug & 0x2) {
4010 pr_err("set poc %d reference to 0\n", pic->POC);
4011 }
4012 }
4013 }
4014
4015}
4016
4017static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4018{
4019 struct PIC_s *pic = hevc->cur_pic;
4020 int i, rIdx;
4021 int num_neg = 0;
4022 int num_pos = 0;
4023 int total_num;
4024 int num_ref_idx_l0_active =
4025 (params->p.num_ref_idx_l0_active >
4026 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4027 params->p.num_ref_idx_l0_active;
4028 int num_ref_idx_l1_active =
4029 (params->p.num_ref_idx_l1_active >
4030 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4031 params->p.num_ref_idx_l1_active;
4032
4033 int RefPicSetStCurr0[16];
4034 int RefPicSetStCurr1[16];
4035
4036 for (i = 0; i < 16; i++) {
4037 RefPicSetStCurr0[i] = 0;
4038 RefPicSetStCurr1[i] = 0;
4039 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4040 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4041 }
4042 for (i = 0; i < 16; i++) {
4043 if (params->p.CUR_RPS[i] & 0x8000)
4044 break;
4045 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4046 int delt =
4047 params->p.CUR_RPS[i] &
4048 ((1 << (RPS_USED_BIT - 1)) - 1);
4049
4050 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4051 RefPicSetStCurr0[num_neg] =
4052 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4053 delt);
4054 /* hevc_print(hevc, 0,
4055 * "RefPicSetStCurr0 %x %x %x\n",
4056 * RefPicSetStCurr0[num_neg], pic->POC,
4057 * (0x800-(params[i]&0x7ff)));
4058 */
4059 num_neg++;
4060 } else {
4061 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4062 /* hevc_print(hevc, 0,
4063 * "RefPicSetStCurr1 %d\n",
4064 * RefPicSetStCurr1[num_pos]);
4065 */
4066 num_pos++;
4067 }
4068 }
4069 }
4070 total_num = num_neg + num_pos;
4071 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4072 hevc_print(hevc, 0,
4073 "%s: curpoc %d slice_type %d, total %d ",
4074 __func__, pic->POC, params->p.slice_type, total_num);
4075 hevc_print_cont(hevc, 0,
4076 "num_neg %d num_list0 %d num_list1 %d\n",
4077 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4078 }
4079
4080 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4081 hevc_print(hevc, 0,
4082 "HEVC Stream buf start ");
4083 hevc_print_cont(hevc, 0,
4084 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4085 READ_VREG(HEVC_STREAM_START_ADDR),
4086 READ_VREG(HEVC_STREAM_END_ADDR),
4087 READ_VREG(HEVC_STREAM_WR_PTR),
4088 READ_VREG(HEVC_STREAM_RD_PTR),
4089 READ_VREG(HEVC_STREAM_LEVEL),
4090 READ_VREG(HEVC_STREAM_FIFO_CTL),
4091 READ_VREG(HEVC_PARSER_INT_CONTROL));
4092 }
4093
4094 if (total_num > 0) {
4095 if (params->p.modification_flag & 0x1) {
4096 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4097 hevc_print(hevc, 0, "ref0 POC (modification):");
4098 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4099 int cIdx = params->p.modification_list[rIdx];
4100
4101 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4102 cIdx >=
4103 num_neg ? RefPicSetStCurr1[cIdx -
4104 num_neg] :
4105 RefPicSetStCurr0[cIdx];
4106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4107 hevc_print_cont(hevc, 0, "%d ",
4108 pic->m_aiRefPOCList0[pic->
4109 slice_idx]
4110 [rIdx]);
4111 }
4112 }
4113 } else {
4114 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4115 hevc_print(hevc, 0, "ref0 POC:");
4116 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4117 int cIdx = rIdx % total_num;
4118
4119 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4120 cIdx >=
4121 num_neg ? RefPicSetStCurr1[cIdx -
4122 num_neg] :
4123 RefPicSetStCurr0[cIdx];
4124 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4125 hevc_print_cont(hevc, 0, "%d ",
4126 pic->m_aiRefPOCList0[pic->
4127 slice_idx]
4128 [rIdx]);
4129 }
4130 }
4131 }
4132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4133 hevc_print_cont(hevc, 0, "\n");
4134 if (params->p.slice_type == B_SLICE) {
4135 if (params->p.modification_flag & 0x2) {
4136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4137 hevc_print(hevc, 0,
4138 "ref1 POC (modification):");
4139 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4140 rIdx++) {
4141 int cIdx;
4142
4143 if (params->p.modification_flag & 0x1) {
4144 cIdx =
4145 params->p.
4146 modification_list
4147 [num_ref_idx_l0_active +
4148 rIdx];
4149 } else {
4150 cIdx =
4151 params->p.
4152 modification_list[rIdx];
4153 }
4154 pic->m_aiRefPOCList1[pic->
4155 slice_idx][rIdx] =
4156 cIdx >=
4157 num_pos ?
4158 RefPicSetStCurr0[cIdx - num_pos]
4159 : RefPicSetStCurr1[cIdx];
4160 if (get_dbg_flag(hevc) &
4161 H265_DEBUG_BUFMGR) {
4162 hevc_print_cont(hevc, 0, "%d ",
4163 pic->
4164 m_aiRefPOCList1[pic->
4165 slice_idx]
4166 [rIdx]);
4167 }
4168 }
4169 } else {
4170 if (get_dbg_flag(hevc) &
4171 H265_DEBUG_BUFMGR)
4172 hevc_print(hevc, 0, "ref1 POC:");
4173 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4174 rIdx++) {
4175 int cIdx = rIdx % total_num;
4176
4177 pic->m_aiRefPOCList1[pic->
4178 slice_idx][rIdx] =
4179 cIdx >=
4180 num_pos ?
4181 RefPicSetStCurr0[cIdx -
4182 num_pos]
4183 : RefPicSetStCurr1[cIdx];
4184 if (get_dbg_flag(hevc) &
4185 H265_DEBUG_BUFMGR) {
4186 hevc_print_cont(hevc, 0, "%d ",
4187 pic->
4188 m_aiRefPOCList1[pic->
4189 slice_idx]
4190 [rIdx]);
4191 }
4192 }
4193 }
4194 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4195 hevc_print_cont(hevc, 0, "\n");
4196 }
4197 }
4198 /*set m_PIC */
4199 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4200 (params->p.slice_type == P_SLICE) ? 1 :
4201 (params->p.slice_type == B_SLICE) ? 0 : 3;
4202 pic->RefNum_L0 = num_ref_idx_l0_active;
4203 pic->RefNum_L1 = num_ref_idx_l1_active;
4204}
4205
4206static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4207 int pic_height_cu, int sao_mem_unit,
4208 union param_u *params)
4209{
4210 int i, j;
4211 int start_cu_x, start_cu_y;
4212 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4213 int sao_abv_size = sao_mem_unit * pic_width_cu;
4214#ifdef DETREFILL_ENABLE
4215 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4216 int tmpRefillLcuSize = 1 <<
4217 (params->p.log2_min_coding_block_size_minus3 +
4218 3 + params->p.log2_diff_max_min_coding_block_size);
4219 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4220 "%x, %x, %x, %x\n",
4221 params->p.slice_segment_address,
4222 params->p.bit_depth,
4223 params->p.tiles_enabled_flag,
4224 tmpRefillLcuSize);
4225 if (params->p.slice_segment_address == 0 &&
4226 params->p.bit_depth != 0 &&
4227 (params->p.tiles_enabled_flag & 1) &&
4228 tmpRefillLcuSize == 64)
4229 hevc->delrefill_check = 1;
4230 else
4231 hevc->delrefill_check = 0;
4232 }
4233#endif
4234
4235 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4236 if (params->p.tiles_enabled_flag & 1) {
4237 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4238 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4239
4240 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4241 || hevc->num_tile_row <= 0) {
4242 hevc->num_tile_row = 1;
4243 hevc_print(hevc, 0,
4244 "%s: num_tile_rows_minus1 (%d) error!!\n",
4245 __func__, params->p.num_tile_rows_minus1);
4246 }
4247 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4248 || hevc->num_tile_col <= 0) {
4249 hevc->num_tile_col = 1;
4250 hevc_print(hevc, 0,
4251 "%s: num_tile_columns_minus1 (%d) error!!\n",
4252 __func__, params->p.num_tile_columns_minus1);
4253 }
4254 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4255 hevc_print(hevc, 0,
4256 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4257 __func__, pic_width_cu, pic_height_cu);
4258 hevc_print_cont(hevc, 0,
4259 "num_tile_col %d num_tile_row %d:\n",
4260 hevc->num_tile_col, hevc->num_tile_row);
4261 }
4262
4263 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4264 int w = pic_width_cu / hevc->num_tile_col;
4265 int h = pic_height_cu / hevc->num_tile_row;
4266
4267 start_cu_y = 0;
4268 for (i = 0; i < hevc->num_tile_row; i++) {
4269 start_cu_x = 0;
4270 for (j = 0; j < hevc->num_tile_col; j++) {
4271 if (j == (hevc->num_tile_col - 1)) {
4272 hevc->m_tile[i][j].width =
4273 pic_width_cu -
4274 start_cu_x;
4275 } else
4276 hevc->m_tile[i][j].width = w;
4277 if (i == (hevc->num_tile_row - 1)) {
4278 hevc->m_tile[i][j].height =
4279 pic_height_cu -
4280 start_cu_y;
4281 } else
4282 hevc->m_tile[i][j].height = h;
4283 hevc->m_tile[i][j].start_cu_x
4284 = start_cu_x;
4285 hevc->m_tile[i][j].start_cu_y
4286 = start_cu_y;
4287 hevc->m_tile[i][j].sao_vb_start_addr =
4288 hevc->work_space_buf->sao_vb.
4289 buf_start + j * sao_vb_size;
4290 hevc->m_tile[i][j].sao_abv_start_addr =
4291 hevc->work_space_buf->sao_abv.
4292 buf_start + i * sao_abv_size;
4293 if (get_dbg_flag(hevc) &
4294 H265_DEBUG_BUFMGR) {
4295 hevc_print_cont(hevc, 0,
4296 "{y=%d, x=%d w %d h %d ",
4297 i, j, hevc->m_tile[i][j].width,
4298 hevc->m_tile[i][j].height);
4299 hevc_print_cont(hevc, 0,
4300 "start_x %d start_y %d ",
4301 hevc->m_tile[i][j].start_cu_x,
4302 hevc->m_tile[i][j].start_cu_y);
4303 hevc_print_cont(hevc, 0,
4304 "sao_vb_start 0x%x ",
4305 hevc->m_tile[i][j].
4306 sao_vb_start_addr);
4307 hevc_print_cont(hevc, 0,
4308 "sao_abv_start 0x%x}\n",
4309 hevc->m_tile[i][j].
4310 sao_abv_start_addr);
4311 }
4312 start_cu_x += hevc->m_tile[i][j].width;
4313
4314 }
4315 start_cu_y += hevc->m_tile[i][0].height;
4316 }
4317 } else {
4318 start_cu_y = 0;
4319 for (i = 0; i < hevc->num_tile_row; i++) {
4320 start_cu_x = 0;
4321 for (j = 0; j < hevc->num_tile_col; j++) {
4322 if (j == (hevc->num_tile_col - 1)) {
4323 hevc->m_tile[i][j].width =
4324 pic_width_cu -
4325 start_cu_x;
4326 } else {
4327 hevc->m_tile[i][j].width =
4328 params->p.tile_width[j];
4329 }
4330 if (i == (hevc->num_tile_row - 1)) {
4331 hevc->m_tile[i][j].height =
4332 pic_height_cu -
4333 start_cu_y;
4334 } else {
4335 hevc->m_tile[i][j].height =
4336 params->
4337 p.tile_height[i];
4338 }
4339 hevc->m_tile[i][j].start_cu_x
4340 = start_cu_x;
4341 hevc->m_tile[i][j].start_cu_y
4342 = start_cu_y;
4343 hevc->m_tile[i][j].sao_vb_start_addr =
4344 hevc->work_space_buf->sao_vb.
4345 buf_start + j * sao_vb_size;
4346 hevc->m_tile[i][j].sao_abv_start_addr =
4347 hevc->work_space_buf->sao_abv.
4348 buf_start + i * sao_abv_size;
4349 if (get_dbg_flag(hevc) &
4350 H265_DEBUG_BUFMGR) {
4351 hevc_print_cont(hevc, 0,
4352 "{y=%d, x=%d w %d h %d ",
4353 i, j, hevc->m_tile[i][j].width,
4354 hevc->m_tile[i][j].height);
4355 hevc_print_cont(hevc, 0,
4356 "start_x %d start_y %d ",
4357 hevc->m_tile[i][j].start_cu_x,
4358 hevc->m_tile[i][j].start_cu_y);
4359 hevc_print_cont(hevc, 0,
4360 "sao_vb_start 0x%x ",
4361 hevc->m_tile[i][j].
4362 sao_vb_start_addr);
4363 hevc_print_cont(hevc, 0,
4364 "sao_abv_start 0x%x}\n",
4365 hevc->m_tile[i][j].
4366 sao_abv_start_addr);
4367
4368 }
4369 start_cu_x += hevc->m_tile[i][j].width;
4370 }
4371 start_cu_y += hevc->m_tile[i][0].height;
4372 }
4373 }
4374 } else {
4375 hevc->num_tile_col = 1;
4376 hevc->num_tile_row = 1;
4377 hevc->m_tile[0][0].width = pic_width_cu;
4378 hevc->m_tile[0][0].height = pic_height_cu;
4379 hevc->m_tile[0][0].start_cu_x = 0;
4380 hevc->m_tile[0][0].start_cu_y = 0;
4381 hevc->m_tile[0][0].sao_vb_start_addr =
4382 hevc->work_space_buf->sao_vb.buf_start;
4383 hevc->m_tile[0][0].sao_abv_start_addr =
4384 hevc->work_space_buf->sao_abv.buf_start;
4385 }
4386}
4387
4388static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4389 int pic_width_lcu)
4390{
4391 int cu_x;
4392 int cu_y;
4393 int tile_x = 0;
4394 int tile_y = 0;
4395 int i;
4396
4397 if (pic_width_lcu == 0) {
4398 if (get_dbg_flag(hevc)) {
4399 hevc_print(hevc, 0,
4400 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4401 __func__, hevc->pic_w, hevc->pic_h);
4402 }
4403 return -1;
4404 }
4405 cu_x = cu_adr % pic_width_lcu;
4406 cu_y = cu_adr / pic_width_lcu;
4407 if (hevc->tile_enabled) {
4408 for (i = 0; i < hevc->num_tile_col; i++) {
4409 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4410 tile_x = i;
4411 else
4412 break;
4413 }
4414 for (i = 0; i < hevc->num_tile_row; i++) {
4415 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4416 tile_y = i;
4417 else
4418 break;
4419 }
4420 }
4421 return (tile_x) | (tile_y << 8);
4422}
4423
4424static void print_scratch_error(int error_num)
4425{
4426#if 0
4427 if (get_dbg_flag(hevc)) {
4428 hevc_print(hevc, 0,
4429 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4430 error_num);
4431 }
4432#endif
4433}
4434
4435static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4436{
4437 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4438
4439 if (get_dbg_flag(hevc))
4440 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4441 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4442 __func__,
4443 buf_spec->ipp.buf_start,
4444 buf_spec->start_adr,
4445 buf_spec->short_term_rps.buf_start,
4446 buf_spec->vps.buf_start,
4447 buf_spec->sps.buf_start,
4448 buf_spec->pps.buf_start,
4449 buf_spec->sao_up.buf_start,
4450 buf_spec->swap_buf.buf_start,
4451 buf_spec->swap_buf2.buf_start,
4452 buf_spec->scalelut.buf_start,
4453 buf_spec->dblk_para.buf_start,
4454 buf_spec->dblk_data.buf_start,
4455 buf_spec->dblk_data2.buf_start);
4456 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4457 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4458 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4459 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4460 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4461 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4462 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4463 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4464 if (hevc->mmu_enable) {
4465 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4466 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4467 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4468 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4469 } else
4470 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4471 } /*else
4472 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4473 buf_spec->swap_buf.buf_start);
4474 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4475 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4476 /* cfg_p_addr */
4477 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4478 /* cfg_d_addr */
4479 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4480
4481 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4482
4483 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4484}
4485
4486static void parser_cmd_write(void)
4487{
4488 u32 i;
4489 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4490 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4491 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4492 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4493 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4494 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4495 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4496 0x7C00
4497 };
4498 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4499 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4500}
4501
4502static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4503 int decode_pic_begin, int decode_pic_num)
4504{
4505 unsigned int data32;
4506 int i;
4507#if 0
4508 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4509 /* Set MCR fetch priorities*/
4510 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4511 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4512 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4513 }
4514#endif
4515#if 1
4516 /* m8baby test1902 */
4517 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4518 hevc_print(hevc, 0,
4519 "%s\n", __func__);
4520 data32 = READ_VREG(HEVC_PARSER_VERSION);
4521 if (data32 != 0x00010001) {
4522 print_scratch_error(25);
4523 return;
4524 }
4525 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4526 data32 = READ_VREG(HEVC_PARSER_VERSION);
4527 if (data32 != 0x5a5a55aa) {
4528 print_scratch_error(26);
4529 return;
4530 }
4531#if 0
4532 /* test Parser Reset */
4533 /* reset iqit to start mem init again */
4534 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4535 (1 << 3) /* reset_whole parser */
4536 );
4537 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4538 data32 = READ_VREG(HEVC_PARSER_VERSION);
4539 if (data32 != 0x00010001)
4540 hevc_print(hevc, 0,
4541 "Test Parser Fatal Error\n");
4542#endif
4543 /* reset iqit to start mem init again */
4544 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4545 );
4546 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4547 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4548
4549#endif
4550 if (!hevc->m_ins_flag) {
4551 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4552 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4553 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4554 data32 |= (0xf << 25); /*arwlen_axi_max*/
4555 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4556 }
4557 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4558 if (data32 != 0x00000100) {
4559 print_scratch_error(29);
4560 return;
4561 }
4562 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4563 if (data32 != 0x00000300) {
4564 print_scratch_error(30);
4565 return;
4566 }
4567 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4568 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4569 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4570 if (data32 != 0x12345678) {
4571 print_scratch_error(31);
4572 return;
4573 }
4574 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4575 if (data32 != 0x9abcdef0) {
4576 print_scratch_error(32);
4577 return;
4578 }
4579 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4580 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4581
4582 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4583 data32 &= 0x03ffffff;
4584 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4585 | /* stream_buffer_empty_int_amrisc_enable */
4586 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4587 (1 << 7) | /* dec_done_int_cpu_enable */
4588 (1 << 4) | /* startcode_found_int_cpu_enable */
4589 (0 << 3) | /* startcode_found_int_amrisc_enable */
4590 (1 << 0) /* parser_int_enable */
4591 ;
4592 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4593
4594 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4595 data32 = data32 | (1 << 1) | /* emulation_check_on */
4596 (1 << 0) /* startcode_check_on */
4597 ;
4598 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4599
4600 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4601 (2 << 4) | /* emulate_code_length_sub_1 */
4602 (2 << 1) | /* start_code_length_sub_1 */
4603 (1 << 0) /* stream_shift_enable */
4604 );
4605
4606 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4607 );
4608 /* hevc_parser_core_clk_en */
4609 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4610 );
4611
4612 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4613
4614 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4615 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4616 for (i = 0; i < 1024; i++)
4617 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4618
4619#ifdef ENABLE_SWAP_TEST
4620 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4621#endif
4622
4623 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4624 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4625 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4626 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4627 /* Send parser_cmd */
4628 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4629
4630 parser_cmd_write();
4631
4632 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4633 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4634 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4635
4636 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4637 /* (1 << 8) | // sao_sw_pred_enable */
4638 (1 << 5) | /* parser_sao_if_en */
4639 (1 << 2) | /* parser_mpred_if_en */
4640 (1 << 0) /* parser_scaler_if_en */
4641 );
4642
4643 /* Changed to Start MPRED in microcode */
4644 /*
4645 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4646 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4647 * (1<<31)
4648 * );
4649 */
4650
4651 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4652 (1 << 0) /* software reset ipp and mpp */
4653 );
4654 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4655 (0 << 0) /* software reset ipp and mpp */
4656 );
4657
4658 if (get_double_write_mode(hevc) & 0x10)
4659 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4660 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4661 );
4662
4663}
4664
4665static void decoder_hw_reset(void)
4666{
4667 int i;
4668 unsigned int data32;
4669 /* reset iqit to start mem init again */
4670 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4671 );
4672 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4673 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4674
4675 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4676 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4677 ;
4678 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4679
4680 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4681 if (data32 != 0x00000100) {
4682 print_scratch_error(29);
4683 return;
4684 }
4685 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4686 if (data32 != 0x00000300) {
4687 print_scratch_error(30);
4688 return;
4689 }
4690 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4691 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4692 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4693 if (data32 != 0x12345678) {
4694 print_scratch_error(31);
4695 return;
4696 }
4697 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4698 if (data32 != 0x9abcdef0) {
4699 print_scratch_error(32);
4700 return;
4701 }
4702 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4703 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4704
4705 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4706 data32 &= 0x03ffffff;
4707 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4708 | /* stream_buffer_empty_int_amrisc_enable */
4709 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4710 (1 << 7) | /* dec_done_int_cpu_enable */
4711 (1 << 4) | /* startcode_found_int_cpu_enable */
4712 (0 << 3) | /* startcode_found_int_amrisc_enable */
4713 (1 << 0) /* parser_int_enable */
4714 ;
4715 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4716
4717 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4718 data32 = data32 | (1 << 1) | /* emulation_check_on */
4719 (1 << 0) /* startcode_check_on */
4720 ;
4721 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4722
4723 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4724 (2 << 4) | /* emulate_code_length_sub_1 */
4725 (2 << 1) | /* start_code_length_sub_1 */
4726 (1 << 0) /* stream_shift_enable */
4727 );
4728
4729 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4730 );
4731 /* hevc_parser_core_clk_en */
4732 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4733 );
4734
4735 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4736 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4737 for (i = 0; i < 1024; i++)
4738 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4739
4740 /* Send parser_cmd */
4741 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4742
4743 parser_cmd_write();
4744
4745 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4746 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4747 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4748
4749 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4750 /* (1 << 8) | // sao_sw_pred_enable */
4751 (1 << 5) | /* parser_sao_if_en */
4752 (1 << 2) | /* parser_mpred_if_en */
4753 (1 << 0) /* parser_scaler_if_en */
4754 );
4755
4756 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4757 (1 << 0) /* software reset ipp and mpp */
4758 );
4759 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4760 (0 << 0) /* software reset ipp and mpp */
4761 );
4762}
4763
4764#ifdef CONFIG_HEVC_CLK_FORCED_ON
4765static void config_hevc_clk_forced_on(void)
4766{
4767 unsigned int rdata32;
4768 /* IQIT */
4769 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4770 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4771
4772 /* DBLK */
4773 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4774 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4775
4776 /* SAO */
4777 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4778 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4779
4780 /* MPRED */
4781 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4782 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4783
4784 /* PARSER */
4785 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4786 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4787 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4788 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4789 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4790 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4791 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4792 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4793 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4794 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4795 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4796 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4797 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4798
4799 /* IPP */
4800 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4801 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4802
4803 /* MCRCC */
4804 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4805 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4806}
4807#endif
4808
4809#ifdef MCRCC_ENABLE
4810static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4811{
4812 unsigned int rdata32;
4813 unsigned int rdata32_2;
4814 int l0_cnt = 0;
4815 int l1_cnt = 0x7fff;
4816
4817 if (get_double_write_mode(hevc) & 0x10) {
4818 l0_cnt = hevc->cur_pic->RefNum_L0;
4819 l1_cnt = hevc->cur_pic->RefNum_L1;
4820 }
4821
4822 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4823
4824 if (slice_type == 2) { /* I-PIC */
4825 /* remove reset -- disables clock */
4826 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4827 return;
4828 }
4829
4830 if (slice_type == 0) { /* B-PIC */
4831 /* Programme canvas0 */
4832 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4833 (0 << 8) | (0 << 1) | 0);
4834 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4835 rdata32 = rdata32 & 0xffff;
4836 rdata32 = rdata32 | (rdata32 << 16);
4837 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4838
4839 /* Programme canvas1 */
4840 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4841 (16 << 8) | (1 << 1) | 0);
4842 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4843 rdata32_2 = rdata32_2 & 0xffff;
4844 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4845 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4846 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4847 rdata32_2 = rdata32_2 & 0xffff;
4848 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4849 }
4850 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4851 } else { /* P-PIC */
4852 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4853 (0 << 8) | (1 << 1) | 0);
4854 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4855 rdata32 = rdata32 & 0xffff;
4856 rdata32 = rdata32 | (rdata32 << 16);
4857 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4858
4859 if (l0_cnt == 1) {
4860 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4861 } else {
4862 /* Programme canvas1 */
4863 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4864 rdata32 = rdata32 & 0xffff;
4865 rdata32 = rdata32 | (rdata32 << 16);
4866 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4867 }
4868 }
4869 /* enable mcrcc progressive-mode */
4870 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4871}
4872#endif
4873
4874static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4875 int sao_mem_unit)
4876{
4877 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4878 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4879 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4880 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4881}
4882
4883static u32 init_aux_size;
4884static int aux_data_is_avaible(struct hevc_state_s *hevc)
4885{
4886 u32 reg_val;
4887
4888 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4889 if (reg_val != 0 && reg_val != init_aux_size)
4890 return 1;
4891 else
4892 return 0;
4893}
4894
4895static void config_aux_buf(struct hevc_state_s *hevc)
4896{
4897 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4898 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4899 (hevc->suffix_aux_size >> 4);
4900 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4901}
4902
4903static void config_mpred_hw(struct hevc_state_s *hevc)
4904{
4905 int i;
4906 unsigned int data32;
4907 struct PIC_s *cur_pic = hevc->cur_pic;
4908 struct PIC_s *col_pic = hevc->col_pic;
4909 int AMVP_MAX_NUM_CANDS_MEM = 3;
4910 int AMVP_MAX_NUM_CANDS = 2;
4911 int NUM_CHROMA_MODE = 5;
4912 int DM_CHROMA_IDX = 36;
4913 int above_ptr_ctrl = 0;
4914 int buffer_linear = 1;
4915 int cu_size_log2 = 3;
4916
4917 int mpred_mv_rd_start_addr;
4918 int mpred_curr_lcu_x;
4919 int mpred_curr_lcu_y;
4920 int mpred_above_buf_start;
4921 int mpred_mv_rd_ptr;
4922 int mpred_mv_rd_ptr_p1;
4923 int mpred_mv_rd_end_addr;
4924 int MV_MEM_UNIT;
4925 int mpred_mv_wr_ptr;
4926 int *ref_poc_L0, *ref_poc_L1;
4927
4928 int above_en;
4929 int mv_wr_en;
4930 int mv_rd_en;
4931 int col_isIntra;
4932
4933 if (hevc->slice_type != 2) {
4934 above_en = 1;
4935 mv_wr_en = 1;
4936 mv_rd_en = 1;
4937 col_isIntra = 0;
4938 } else {
4939 above_en = 1;
4940 mv_wr_en = 1;
4941 mv_rd_en = 0;
4942 col_isIntra = 0;
4943 }
4944
4945 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4946 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4947 mpred_curr_lcu_x = data32 & 0xffff;
4948 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4949
4950 MV_MEM_UNIT =
4951 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4952 5 ? 0x80 : 0x20;
4953 mpred_mv_rd_ptr =
4954 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4955
4956 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4957 mpred_mv_rd_end_addr =
4958 mpred_mv_rd_start_addr +
4959 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4960
4961 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4962
4963 mpred_mv_wr_ptr =
4964 cur_pic->mpred_mv_wr_start_addr +
4965 (hevc->slice_addr * MV_MEM_UNIT);
4966
4967 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4968 hevc_print(hevc, 0,
4969 "cur pic index %d col pic index %d\n", cur_pic->index,
4970 col_pic->index);
4971 }
4972
4973 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4974 cur_pic->mpred_mv_wr_start_addr);
4975 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4976
4977 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4978 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4979 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4980
4981 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4982 data32 = (hevc->slice_type |
4983 hevc->new_pic << 2 |
4984 hevc->new_tile << 3 |
4985 hevc->isNextSliceSegment << 4 |
4986 hevc->TMVPFlag << 5 |
4987 hevc->LDCFlag << 6 |
4988 hevc->ColFromL0Flag << 7 |
4989 above_ptr_ctrl << 8 |
4990 above_en << 9 |
4991 mv_wr_en << 10 |
4992 mv_rd_en << 11 |
4993 col_isIntra << 12 |
4994 buffer_linear << 13 |
4995 hevc->LongTerm_Curr << 14 |
4996 hevc->LongTerm_Col << 15 |
4997 hevc->lcu_size_log2 << 16 |
4998 cu_size_log2 << 20 | hevc->plevel << 24);
4999 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5000
5001 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5002 data32 = (
5003#if 0
5004 /* no set in m8baby test1902 */
5005 /* Don't override clk_forced_on , */
5006 (data32 & (0x1 << 24)) |
5007#endif
5008 hevc->MaxNumMergeCand |
5009 AMVP_MAX_NUM_CANDS << 4 |
5010 AMVP_MAX_NUM_CANDS_MEM << 8 |
5011 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5012 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5013
5014 data32 = (hevc->pic_w | hevc->pic_h << 16);
5015 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5016
5017 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5018 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5019
5020 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5021 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5022
5023 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5024 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5025
5026 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5027 /* col_RefNum_L0<<16| */
5028 /* col_RefNum_L1<<24 */
5029 );
5030 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5031
5032 data32 = (hevc->LongTerm_Ref);
5033 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5034
5035 data32 = 0;
5036 for (i = 0; i < hevc->RefNum_L0; i++)
5037 data32 = data32 | (1 << i);
5038 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5039
5040 data32 = 0;
5041 for (i = 0; i < hevc->RefNum_L1; i++)
5042 data32 = data32 | (1 << i);
5043 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5044
5045 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5046 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5047
5048 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5049 * Ref_POC_xx_L1 in pair write order!!!
5050 */
5051 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5052 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5053
5054 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5055 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5056
5057 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5058 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5059
5060 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5061 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5062
5063 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5064 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5065
5066 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5067 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5068
5069 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5070 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5071
5072 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5073 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5074
5075 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5076 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5077
5078 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5079 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5080
5081 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5082 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5083
5084 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5085 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5086
5087 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5088 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5089
5090 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5091 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5092
5093 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5094 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5095
5096 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5097 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5098
5099 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5100 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5101
5102 if (hevc->new_pic) {
5103 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5104 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5105 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5106 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5107 } else if (!hevc->isNextSliceSegment) {
5108 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5109 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5110 }
5111
5112 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5113}
5114
5115static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5116{
5117 unsigned int data32, data32_2;
5118 int misc_flag0 = hevc->misc_flag0;
5119 int slice_deblocking_filter_disabled_flag = 0;
5120
5121 int mc_buffer_size_u_v =
5122 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5123 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5124 struct PIC_s *cur_pic = hevc->cur_pic;
5125 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5126
5127 data32 = READ_VREG(HEVC_SAO_CTRL0);
5128 data32 &= (~0xf);
5129 data32 |= hevc->lcu_size_log2;
5130 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5131
5132 data32 = (hevc->pic_w | hevc->pic_h << 16);
5133 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5134
5135 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5136 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5137
5138 if (hevc->new_pic)
5139 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5140#ifdef LOSLESS_COMPRESS_MODE
5141/*SUPPORT_10BIT*/
5142 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5143 data32 = READ_VREG(HEVC_SAO_CTRL5);
5144 data32 &= (~(0xff << 16));
5145
5146 if (get_double_write_mode(hevc) == 2 ||
5147 get_double_write_mode(hevc) == 3)
5148 data32 |= (0xff<<16);
5149 else if (get_double_write_mode(hevc) == 4)
5150 data32 |= (0x33<<16);
5151
5152 if (hevc->mem_saving_mode == 1)
5153 data32 |= (1 << 9);
5154 else
5155 data32 &= ~(1 << 9);
5156 if (workaround_enable & 1)
5157 data32 |= (1 << 7);
5158 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5159 }
5160 data32 = cur_pic->mc_y_adr;
5161 if (get_double_write_mode(hevc))
5162 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5163
5164 if ((get_double_write_mode(hevc) & 0x10) == 0)
5165 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5166
5167 if (hevc->mmu_enable)
5168 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5169#else
5170 data32 = cur_pic->mc_y_adr;
5171 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5172#endif
5173 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5174 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5175
5176#ifdef LOSLESS_COMPRESS_MODE
5177/*SUPPORT_10BIT*/
5178 if (get_double_write_mode(hevc))
5179 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5180#else
5181 data32 = cur_pic->mc_u_v_adr;
5182 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5183#endif
5184 data32 = (mc_buffer_size_u_v_h << 16);
5185 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5186
5187#ifdef LOSLESS_COMPRESS_MODE
5188/*SUPPORT_10BIT*/
5189 if (get_double_write_mode(hevc)) {
5190 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5191 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5192 }
5193#else
5194 /* multi tile to do... */
5195 data32 = cur_pic->mc_y_adr;
5196 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5197
5198 data32 = cur_pic->mc_u_v_adr;
5199 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5200#endif
5201 /* DBLK CONFIG HERE */
5202 if (hevc->new_pic) {
5203 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5204 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5205 data32 = (0xff << 8) | (0x0 << 0);
5206 else
5207 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5208 (0x0 << 0); /* h265 video format*/
5209
5210 if (hevc->pic_w >= 1280)
5211 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5212 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5213 if (get_double_write_mode(hevc) == 0)
5214 data32 |= (0x1 << 8); /*enable first write*/
5215 else if (get_double_write_mode(hevc) == 0x10)
5216 data32 |= (0x1 << 9); /*double write only*/
5217 else
5218 data32 |= ((0x1 << 8) |(0x1 << 9));
5219
5220 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5221 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5222 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5223 }
5224 data32 = (hevc->pic_w | hevc->pic_h << 16);
5225 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5226
5227 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5228 data32 =
5229 ((misc_flag0 >>
5230 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5231 0x1) << 3;
5232 } else
5233 data32 = 0;
5234 data32 |=
5235 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5236 ((params->p.pps_cr_qp_offset
5237 & 0x1f) <<
5238 9));
5239 data32 |=
5240 (hevc->lcu_size ==
5241 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5242
5243 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5244
5245 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5246 /*if (debug & 0x80) {*/
5247 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5248 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5249 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5250 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5251 data32);
5252 /*}*/
5253 }
5254 }
5255#if 0
5256 data32 = READ_VREG(HEVC_SAO_CTRL1);
5257 data32 &= (~0x3000);
5258 data32 |= (hevc->mem_map_mode <<
5259 12);
5260
5261/* [13:12] axi_aformat,
5262 * 0-Linear, 1-32x32, 2-64x32
5263 */
5264 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5265
5266 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5267 data32 &= (~0x30);
5268 data32 |= (hevc->mem_map_mode <<
5269 4);
5270
5271/* [5:4] -- address_format
5272 * 00:linear 01:32x32 10:64x32
5273 */
5274 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5275#else
5276 /* m8baby test1902 */
5277 data32 = READ_VREG(HEVC_SAO_CTRL1);
5278 data32 &= (~0x3000);
5279 data32 |= (hevc->mem_map_mode <<
5280 12);
5281
5282/* [13:12] axi_aformat, 0-Linear,
5283 * 1-32x32, 2-64x32
5284 */
5285 data32 &= (~0xff0);
5286 /* data32 |= 0x670; // Big-Endian per 64-bit */
5287 data32 |= endian; /* Big-Endian per 64-bit */
5288 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5289 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5290 if (get_double_write_mode(hevc) == 0)
5291 data32 |= 0x2; /*disable double write*/
5292 else if (get_double_write_mode(hevc) & 0x10)
5293 data32 |= 0x1; /*disable cm*/
5294 } else {
5295 unsigned int data;
5296 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5297 (0x0 << 0); /* h265 video format*/
5298 if (hevc->pic_w >= 1280)
5299 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5300 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5301 if (get_double_write_mode(hevc) == 0)
5302 data |= (0x1 << 8); /*enable first write*/
5303 else if (get_double_write_mode(hevc) & 0x10)
5304 data |= (0x1 << 9); /*double write only*/
5305 else
5306 data |= ((0x1 << 8) |(0x1 << 9));
5307
5308 WRITE_VREG(HEVC_DBLK_CFGB, data);
5309 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5310 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5311 }
5312
5313 /* swap uv */
5314 if (hevc->is_used_v4l) {
5315 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5316 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5317 data32 &= ~(1 << 8); /* NV21 */
5318 else
5319 data32 |= (1 << 8); /* NV12 */
5320 }
5321
5322 /*
5323 * [31:24] ar_fifo1_axi_thred
5324 * [23:16] ar_fifo0_axi_thred
5325 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5326 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5327 * [11:08] axi_lendian_C
5328 * [07:04] axi_lendian_Y
5329 * [3] reserved
5330 * [2] clk_forceon
5331 * [1] dw_disable:disable double write output
5332 * [0] cm_disable:disable compress output
5333 */
5334 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5335 if (get_double_write_mode(hevc) & 0x10) {
5336 /* [23:22] dw_v1_ctrl
5337 *[21:20] dw_v0_ctrl
5338 *[19:18] dw_h1_ctrl
5339 *[17:16] dw_h0_ctrl
5340 */
5341 data32 = READ_VREG(HEVC_SAO_CTRL5);
5342 /*set them all 0 for H265_NV21 (no down-scale)*/
5343 data32 &= ~(0xff << 16);
5344 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5345 }
5346
5347 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5348 data32 &= (~0x30);
5349 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5350 data32 |= (hevc->mem_map_mode <<
5351 4);
5352 data32 &= (~0xF);
5353 data32 |= 0xf; /* valid only when double write only */
5354 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5355
5356 /* swap uv */
5357 if (hevc->is_used_v4l) {
5358 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5359 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5360 data32 |= (1 << 12); /* NV21 */
5361 else
5362 data32 &= ~(1 << 12); /* NV12 */
5363 }
5364
5365 /*
5366 * [3:0] little_endian
5367 * [5:4] address_format 00:linear 01:32x32 10:64x32
5368 * [7:6] reserved
5369 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5370 * [11:10] reserved
5371 * [12] CbCr_byte_swap
5372 * [31:13] reserved
5373 */
5374 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5375#endif
5376 data32 = 0;
5377 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5378 data32_2 &= (~0x300);
5379 /* slice_deblocking_filter_disabled_flag = 0;
5380 * ucode has handle it , so read it from ucode directly
5381 */
5382 if (hevc->tile_enabled) {
5383 data32 |=
5384 ((misc_flag0 >>
5385 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5386 0x1) << 0;
5387 data32_2 |=
5388 ((misc_flag0 >>
5389 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5390 0x1) << 8;
5391 }
5392 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5393 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5394 0x1; /* ucode has handle it,so read it from ucode directly */
5395 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5396 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5397 /* slice_deblocking_filter_disabled_flag =
5398 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5399 * //ucode has handle it , so read it from ucode directly
5400 */
5401 data32 |= slice_deblocking_filter_disabled_flag << 2;
5402 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5403 hevc_print_cont(hevc, 0,
5404 "(1,%x)", data32);
5405 if (!slice_deblocking_filter_disabled_flag) {
5406 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5407 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5408 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5409 hevc_print_cont(hevc, 0,
5410 "(2,%x)", data32);
5411 }
5412 } else {
5413 data32 |=
5414 ((misc_flag0 >>
5415 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5416 0x1) << 2;
5417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5418 hevc_print_cont(hevc, 0,
5419 "(3,%x)", data32);
5420 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5421 0x1) == 0) {
5422 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5423 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5424 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5425 hevc_print_cont(hevc, 0,
5426 "(4,%x)", data32);
5427 }
5428 }
5429 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5430 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5431 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5432 || (!slice_deblocking_filter_disabled_flag))) {
5433 data32 |=
5434 ((misc_flag0 >>
5435 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5436 & 0x1) << 1;
5437 data32_2 |=
5438 ((misc_flag0 >>
5439 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5440 & 0x1) << 9;
5441 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5442 hevc_print_cont(hevc, 0,
5443 "(5,%x)\n", data32);
5444 } else {
5445 data32 |=
5446 ((misc_flag0 >>
5447 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5448 & 0x1) << 1;
5449 data32_2 |=
5450 ((misc_flag0 >>
5451 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5452 & 0x1) << 9;
5453 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5454 hevc_print_cont(hevc, 0,
5455 "(6,%x)\n", data32);
5456 }
5457 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5458 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5459}
5460
5461#ifdef TEST_NO_BUF
5462static unsigned char test_flag = 1;
5463#endif
5464
5465static void pic_list_process(struct hevc_state_s *hevc)
5466{
5467 int work_pic_num = get_work_pic_num(hevc);
5468 int alloc_pic_count = 0;
5469 int i;
5470 struct PIC_s *pic;
5471 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5472 pic = hevc->m_PIC[i];
5473 if (pic == NULL || pic->index == -1)
5474 continue;
5475 alloc_pic_count++;
5476 if (pic->output_mark == 0 && pic->referenced == 0
5477 && pic->output_ready == 0
5478 && (pic->width != hevc->pic_w ||
5479 pic->height != hevc->pic_h)
5480 ) {
5481 set_buf_unused(hevc, pic->BUF_index);
5482 pic->BUF_index = -1;
5483 if (alloc_pic_count > work_pic_num) {
5484 pic->width = 0;
5485 pic->height = 0;
5486 pic->index = -1;
5487 } else {
5488 pic->width = hevc->pic_w;
5489 pic->height = hevc->pic_h;
5490 }
5491 }
5492 }
5493 if (alloc_pic_count < work_pic_num) {
5494 int new_count = alloc_pic_count;
5495 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5496 pic = hevc->m_PIC[i];
5497 if (pic && pic->index == -1) {
5498 pic->index = i;
5499 pic->BUF_index = -1;
5500 pic->width = hevc->pic_w;
5501 pic->height = hevc->pic_h;
5502 new_count++;
5503 if (new_count >=
5504 work_pic_num)
5505 break;
5506 }
5507 }
5508
5509 }
5510 dealloc_unused_buf(hevc);
5511 if (get_alloc_pic_count(hevc)
5512 != alloc_pic_count) {
5513 hevc_print_cont(hevc, 0,
5514 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5515 __func__,
5516 work_pic_num,
5517 alloc_pic_count,
5518 get_alloc_pic_count(hevc));
5519 }
5520}
5521
5522static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5523{
5524 int i;
5525 struct PIC_s *pic;
5526 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5527 pic = hevc->m_PIC[i];
5528 if (pic == NULL || pic->index == -1)
5529 continue;
5530
5531 if (pic->output_mark == 0 && pic->referenced == 0
5532 && pic->output_ready == 0
5533 && pic->scatter_alloc
5534 )
5535 release_pic_mmu_buf(hevc, pic);
5536 }
5537
5538}
5539
5540static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5541 union param_u *rpm_param)
5542{
5543 struct PIC_s *new_pic = NULL;
5544 struct PIC_s *pic;
5545 int i;
5546 int ret;
5547
5548 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5549 pic = hevc->m_PIC[i];
5550 if (pic == NULL || pic->index == -1)
5551 continue;
5552
5553 if (pic->output_mark == 0 && pic->referenced == 0
5554 && pic->output_ready == 0
5555 && pic->width == hevc->pic_w
5556 && pic->height == hevc->pic_h
5557 && pic->vf_ref == 0
5558 ) {
5559 if (new_pic) {
5560 if (new_pic->POC != INVALID_POC) {
5561 if (pic->POC == INVALID_POC ||
5562 pic->POC < new_pic->POC)
5563 new_pic = pic;
5564 }
5565 } else
5566 new_pic = pic;
5567 }
5568 }
5569
5570 if (new_pic == NULL)
5571 return NULL;
5572
5573 if (new_pic->BUF_index < 0) {
5574 if (alloc_buf(hevc) < 0)
5575 return NULL;
5576 else {
5577 if (config_pic(hevc, new_pic) < 0) {
5578 dealloc_pic_buf(hevc, new_pic);
5579 return NULL;
5580 }
5581 }
5582 new_pic->width = hevc->pic_w;
5583 new_pic->height = hevc->pic_h;
5584 set_canvas(hevc, new_pic);
5585
5586 init_pic_list_hw(hevc);
5587 }
5588
5589 if (new_pic) {
5590 new_pic->double_write_mode =
5591 get_double_write_mode(hevc);
5592 if (new_pic->double_write_mode)
5593 set_canvas(hevc, new_pic);
5594
5595#ifdef TEST_NO_BUF
5596 if (test_flag) {
5597 test_flag = 0;
5598 return NULL;
5599 } else
5600 test_flag = 1;
5601#endif
5602 if (get_mv_buf(hevc, new_pic) < 0)
5603 return NULL;
5604
5605 if (hevc->mmu_enable) {
5606 ret = H265_alloc_mmu(hevc, new_pic,
5607 rpm_param->p.bit_depth,
5608 hevc->frame_mmu_map_addr);
5609 if (ret != 0) {
5610 put_mv_buf(hevc, new_pic);
5611 hevc_print(hevc, 0,
5612 "can't alloc need mmu1,idx %d ret =%d\n",
5613 new_pic->decode_idx,
5614 ret);
5615 return NULL;
5616 }
5617 }
5618 new_pic->referenced = 1;
5619 new_pic->decode_idx = hevc->decode_idx;
5620 new_pic->slice_idx = 0;
5621 new_pic->referenced = 1;
5622 new_pic->output_mark = 0;
5623 new_pic->recon_mark = 0;
5624 new_pic->error_mark = 0;
5625 new_pic->dis_mark = 0;
5626 /* new_pic->output_ready = 0; */
5627 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5628 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5629 new_pic->POC = hevc->curr_POC;
5630 new_pic->pic_struct = hevc->curr_pic_struct;
5631 if (new_pic->aux_data_buf)
5632 release_aux_data(hevc, new_pic);
5633 new_pic->mem_saving_mode =
5634 hevc->mem_saving_mode;
5635 new_pic->bit_depth_luma =
5636 hevc->bit_depth_luma;
5637 new_pic->bit_depth_chroma =
5638 hevc->bit_depth_chroma;
5639 new_pic->video_signal_type =
5640 hevc->video_signal_type;
5641
5642 new_pic->conformance_window_flag =
5643 hevc->param.p.conformance_window_flag;
5644 new_pic->conf_win_left_offset =
5645 hevc->param.p.conf_win_left_offset;
5646 new_pic->conf_win_right_offset =
5647 hevc->param.p.conf_win_right_offset;
5648 new_pic->conf_win_top_offset =
5649 hevc->param.p.conf_win_top_offset;
5650 new_pic->conf_win_bottom_offset =
5651 hevc->param.p.conf_win_bottom_offset;
5652 new_pic->chroma_format_idc =
5653 hevc->param.p.chroma_format_idc;
5654
5655 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5656 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5657 __func__, new_pic->index,
5658 new_pic->BUF_index, new_pic->decode_idx,
5659 new_pic->POC);
5660
5661 }
5662 if (pic_list_debug & 0x1) {
5663 dump_pic_list(hevc);
5664 pr_err("\n*******************************************\n");
5665 }
5666
5667 return new_pic;
5668}
5669
5670static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5671 union param_u *rpm_param)
5672{
5673 int ret;
5674 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5675 struct v4l_buff_pool *pool = &v4l->cap_pool;
5676 struct PIC_s *new_pic = NULL;
5677 struct PIC_s *pic = NULL;
5678 int i;
5679
5680 for (i = 0; i < pool->in; ++i) {
5681 u32 state = (pool->seq[i] >> 16);
5682 u32 index = (pool->seq[i] & 0xffff);
5683
5684 switch (state) {
5685 case V4L_CAP_BUFF_IN_DEC:
5686 pic = hevc->m_PIC[i];
5687 if (pic && (pic->index != -1) &&
5688 (pic->output_mark == 0) &&
5689 (pic->referenced == 0) &&
5690 (pic->output_ready == 0) &&
5691 (pic->width == hevc->pic_w) &&
5692 (pic->height == hevc->pic_h) &&
5693 (pic->vf_ref == 0) &&
5694 pic->cma_alloc_addr) {
5695 new_pic = pic;
5696 }
5697 break;
5698 case V4L_CAP_BUFF_IN_M2M:
5699 pic = hevc->m_PIC[index];
5700 pic->width = hevc->pic_w;
5701 pic->height = hevc->pic_h;
5702 if ((pic->index != -1) &&
5703 !v4l_alloc_buf(hevc, pic)) {
5704 v4l_config_pic(hevc, pic);
5705 init_pic_list_hw(hevc);
5706 new_pic = pic;
5707 }
5708 break;
5709 default:
5710 pr_err("v4l buffer state err %d.\n", state);
5711 break;
5712 }
5713
5714 if (new_pic)
5715 break;
5716 }
5717
5718 if (new_pic == NULL)
5719 return NULL;
5720
5721 new_pic->double_write_mode = get_double_write_mode(hevc);
5722 if (new_pic->double_write_mode)
5723 set_canvas(hevc, new_pic);
5724
5725 if (get_mv_buf(hevc, new_pic) < 0)
5726 return NULL;
5727
5728 if (hevc->mmu_enable) {
5729 ret = H265_alloc_mmu(hevc, new_pic,
5730 rpm_param->p.bit_depth,
5731 hevc->frame_mmu_map_addr);
5732 if (ret != 0) {
5733 put_mv_buf(hevc, new_pic);
5734 hevc_print(hevc, 0,
5735 "can't alloc need mmu1,idx %d ret =%d\n",
5736 new_pic->decode_idx, ret);
5737 return NULL;
5738 }
5739 }
5740
5741 new_pic->referenced = 1;
5742 new_pic->decode_idx = hevc->decode_idx;
5743 new_pic->slice_idx = 0;
5744 new_pic->referenced = 1;
5745 new_pic->output_mark = 0;
5746 new_pic->recon_mark = 0;
5747 new_pic->error_mark = 0;
5748 new_pic->dis_mark = 0;
5749 /* new_pic->output_ready = 0; */
5750 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5751 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5752 new_pic->POC = hevc->curr_POC;
5753 new_pic->pic_struct = hevc->curr_pic_struct;
5754
5755 if (new_pic->aux_data_buf)
5756 release_aux_data(hevc, new_pic);
5757 new_pic->mem_saving_mode =
5758 hevc->mem_saving_mode;
5759 new_pic->bit_depth_luma =
5760 hevc->bit_depth_luma;
5761 new_pic->bit_depth_chroma =
5762 hevc->bit_depth_chroma;
5763 new_pic->video_signal_type =
5764 hevc->video_signal_type;
5765
5766 new_pic->conformance_window_flag =
5767 hevc->param.p.conformance_window_flag;
5768 new_pic->conf_win_left_offset =
5769 hevc->param.p.conf_win_left_offset;
5770 new_pic->conf_win_right_offset =
5771 hevc->param.p.conf_win_right_offset;
5772 new_pic->conf_win_top_offset =
5773 hevc->param.p.conf_win_top_offset;
5774 new_pic->conf_win_bottom_offset =
5775 hevc->param.p.conf_win_bottom_offset;
5776 new_pic->chroma_format_idc =
5777 hevc->param.p.chroma_format_idc;
5778
5779 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5780 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5781 __func__, new_pic->index,
5782 new_pic->BUF_index, new_pic->decode_idx,
5783 new_pic->POC);
5784
5785 return new_pic;
5786}
5787
5788static int get_display_pic_num(struct hevc_state_s *hevc)
5789{
5790 int i;
5791 struct PIC_s *pic;
5792 int num = 0;
5793
5794 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5795 pic = hevc->m_PIC[i];
5796 if (pic == NULL ||
5797 pic->index == -1)
5798 continue;
5799
5800 if (pic->output_ready == 1)
5801 num++;
5802 }
5803 return num;
5804}
5805
5806static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5807{
5808 struct PIC_s *pic_display;
5809
5810 if (pic) {
5811 /*PB skip control */
5812 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5813 /* start decoding after first I */
5814 hevc->ignore_bufmgr_error |= 0x1;
5815 }
5816 if (hevc->ignore_bufmgr_error & 1) {
5817 if (hevc->PB_skip_count_after_decoding > 0)
5818 hevc->PB_skip_count_after_decoding--;
5819 else {
5820 /* start displaying */
5821 hevc->ignore_bufmgr_error |= 0x2;
5822 }
5823 }
5824 /**/
5825 if (pic->POC != INVALID_POC) {
5826 pic->output_mark = 1;
5827 pic->recon_mark = 1;
5828 }
5829 pic->recon_mark = 1;
5830 }
5831 do {
5832 pic_display = output_pic(hevc, 1);
5833
5834 if (pic_display) {
5835 pic_display->referenced = 0;
5836 put_mv_buf(hevc, pic_display);
5837 if ((pic_display->error_mark
5838 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5839 || (get_dbg_flag(hevc) &
5840 H265_DEBUG_DISPLAY_CUR_FRAME)
5841 || (get_dbg_flag(hevc) &
5842 H265_DEBUG_NO_DISPLAY)) {
5843 pic_display->output_ready = 0;
5844 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5845 hevc_print(hevc, 0,
5846 "[BM] Display: POC %d, ",
5847 pic_display->POC);
5848 hevc_print_cont(hevc, 0,
5849 "decoding index %d ==> ",
5850 pic_display->decode_idx);
5851 hevc_print_cont(hevc, 0,
5852 "Debug mode or error, recycle it\n");
5853 }
5854 } else {
5855 if (hevc->i_only & 0x1
5856 && pic_display->slice_type != 2) {
5857 pic_display->output_ready = 0;
5858 } else {
5859 prepare_display_buf(hevc, pic_display);
5860 if (get_dbg_flag(hevc)
5861 & H265_DEBUG_BUFMGR) {
5862 hevc_print(hevc, 0,
5863 "[BM] flush Display: POC %d, ",
5864 pic_display->POC);
5865 hevc_print_cont(hevc, 0,
5866 "decoding index %d\n",
5867 pic_display->decode_idx);
5868 }
5869 }
5870 }
5871 }
5872 } while (pic_display);
5873 clear_referenced_flag(hevc);
5874}
5875
5876/*
5877* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5878*/
5879static void set_aux_data(struct hevc_state_s *hevc,
5880 struct PIC_s *pic, unsigned char suffix_flag,
5881 unsigned char dv_meta_flag)
5882{
5883 int i;
5884 unsigned short *aux_adr;
5885 unsigned int size_reg_val =
5886 READ_VREG(HEVC_AUX_DATA_SIZE);
5887 unsigned int aux_count = 0;
5888 int aux_size = 0;
5889 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5890 return;
5891
5892 if (hevc->aux_data_dirty ||
5893 hevc->m_ins_flag == 0) {
5894
5895 hevc->aux_data_dirty = 0;
5896 }
5897
5898 if (suffix_flag) {
5899 aux_adr = (unsigned short *)
5900 (hevc->aux_addr +
5901 hevc->prefix_aux_size);
5902 aux_count =
5903 ((size_reg_val & 0xffff) << 4)
5904 >> 1;
5905 aux_size =
5906 hevc->suffix_aux_size;
5907 } else {
5908 aux_adr =
5909 (unsigned short *)hevc->aux_addr;
5910 aux_count =
5911 ((size_reg_val >> 16) << 4)
5912 >> 1;
5913 aux_size =
5914 hevc->prefix_aux_size;
5915 }
5916 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5917 hevc_print(hevc, 0,
5918 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5919 __func__, pic, pic->aux_data_size,
5920 aux_count, suffix_flag, dv_meta_flag);
5921 }
5922 if (aux_size > 0 && aux_count > 0) {
5923 int heads_size = 0;
5924 int new_size;
5925 char *new_buf;
5926
5927 for (i = 0; i < aux_count; i++) {
5928 unsigned char tag = aux_adr[i] >> 8;
5929 if (tag != 0 && tag != 0xff) {
5930 if (dv_meta_flag == 0)
5931 heads_size += 8;
5932 else if (dv_meta_flag == 1 && tag == 0x1)
5933 heads_size += 8;
5934 else if (dv_meta_flag == 2 && tag != 0x1)
5935 heads_size += 8;
5936 }
5937 }
5938 new_size = pic->aux_data_size + aux_count + heads_size;
5939 new_buf = vmalloc(new_size);
5940 if (new_buf) {
5941 unsigned char valid_tag = 0;
5942 unsigned char *h =
5943 new_buf +
5944 pic->aux_data_size;
5945 unsigned char *p = h + 8;
5946 int len = 0;
5947 int padding_len = 0;
5948 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5949 if (pic->aux_data_buf)
5950 vfree(pic->aux_data_buf);
5951 pic->aux_data_buf = new_buf;
5952 for (i = 0; i < aux_count; i += 4) {
5953 int ii;
5954 unsigned char tag = aux_adr[i + 3] >> 8;
5955 if (tag != 0 && tag != 0xff) {
5956 if (dv_meta_flag == 0)
5957 valid_tag = 1;
5958 else if (dv_meta_flag == 1
5959 && tag == 0x1)
5960 valid_tag = 1;
5961 else if (dv_meta_flag == 2
5962 && tag != 0x1)
5963 valid_tag = 1;
5964 else
5965 valid_tag = 0;
5966 if (valid_tag && len > 0) {
5967 pic->aux_data_size +=
5968 (len + 8);
5969 h[0] = (len >> 24)
5970 & 0xff;
5971 h[1] = (len >> 16)
5972 & 0xff;
5973 h[2] = (len >> 8)
5974 & 0xff;
5975 h[3] = (len >> 0)
5976 & 0xff;
5977 h[6] =
5978 (padding_len >> 8)
5979 & 0xff;
5980 h[7] = (padding_len)
5981 & 0xff;
5982 h += (len + 8);
5983 p += 8;
5984 len = 0;
5985 padding_len = 0;
5986 }
5987 if (valid_tag) {
5988 h[4] = tag;
5989 h[5] = 0;
5990 h[6] = 0;
5991 h[7] = 0;
5992 }
5993 }
5994 if (valid_tag) {
5995 for (ii = 0; ii < 4; ii++) {
5996 unsigned short aa =
5997 aux_adr[i + 3
5998 - ii];
5999 *p = aa & 0xff;
6000 p++;
6001 len++;
6002 /*if ((aa >> 8) == 0xff)
6003 padding_len++;*/
6004 }
6005 }
6006 }
6007 if (len > 0) {
6008 pic->aux_data_size += (len + 8);
6009 h[0] = (len >> 24) & 0xff;
6010 h[1] = (len >> 16) & 0xff;
6011 h[2] = (len >> 8) & 0xff;
6012 h[3] = (len >> 0) & 0xff;
6013 h[6] = (padding_len >> 8) & 0xff;
6014 h[7] = (padding_len) & 0xff;
6015 }
6016 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6017 hevc_print(hevc, 0,
6018 "aux: (size %d) suffix_flag %d\n",
6019 pic->aux_data_size, suffix_flag);
6020 for (i = 0; i < pic->aux_data_size; i++) {
6021 hevc_print_cont(hevc, 0,
6022 "%02x ", pic->aux_data_buf[i]);
6023 if (((i + 1) & 0xf) == 0)
6024 hevc_print_cont(hevc, 0, "\n");
6025 }
6026 hevc_print_cont(hevc, 0, "\n");
6027 }
6028
6029 } else {
6030 hevc_print(hevc, 0, "new buf alloc failed\n");
6031 if (pic->aux_data_buf)
6032 vfree(pic->aux_data_buf);
6033 pic->aux_data_buf = NULL;
6034 pic->aux_data_size = 0;
6035 }
6036 }
6037
6038}
6039
6040static void release_aux_data(struct hevc_state_s *hevc,
6041 struct PIC_s *pic)
6042{
6043 if (pic->aux_data_buf)
6044 vfree(pic->aux_data_buf);
6045 pic->aux_data_buf = NULL;
6046 pic->aux_data_size = 0;
6047}
6048
6049static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6050 struct PIC_s *pic)
6051{
6052
6053 /* prev pic */
6054 /*if (hevc->curr_POC != 0) {*/
6055 int decoded_poc = hevc->iPrevPOC;
6056#ifdef MULTI_INSTANCE_SUPPORT
6057 if (hevc->m_ins_flag) {
6058 decoded_poc = hevc->decoded_poc;
6059 hevc->decoded_poc = INVALID_POC;
6060 }
6061#endif
6062 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6063 && hevc->m_nalUnitType !=
6064 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6065 struct PIC_s *pic_display;
6066
6067 pic = get_pic_by_POC(hevc, decoded_poc);
6068 if (pic && (pic->POC != INVALID_POC)) {
6069 /*PB skip control */
6070 if (pic->error_mark == 0
6071 && hevc->PB_skip_mode == 1) {
6072 /* start decoding after
6073 * first I
6074 */
6075 hevc->ignore_bufmgr_error |= 0x1;
6076 }
6077 if (hevc->ignore_bufmgr_error & 1) {
6078 if (hevc->PB_skip_count_after_decoding > 0) {
6079 hevc->PB_skip_count_after_decoding--;
6080 } else {
6081 /* start displaying */
6082 hevc->ignore_bufmgr_error |= 0x2;
6083 }
6084 }
6085 if (hevc->mmu_enable
6086 && ((hevc->double_write_mode & 0x10) == 0)) {
6087 if (!hevc->m_ins_flag) {
6088 hevc->used_4k_num =
6089 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6090
6091 if ((!is_skip_decoding(hevc, pic)) &&
6092 (hevc->used_4k_num >= 0) &&
6093 (hevc->cur_pic->scatter_alloc
6094 == 1)) {
6095 hevc_print(hevc,
6096 H265_DEBUG_BUFMGR_MORE,
6097 "%s pic index %d scatter_alloc %d page_start %d\n",
6098 "decoder_mmu_box_free_idx_tail",
6099 hevc->cur_pic->index,
6100 hevc->cur_pic->scatter_alloc,
6101 hevc->used_4k_num);
6102 hevc_mmu_dma_check(hw_to_vdec(hevc));
6103 decoder_mmu_box_free_idx_tail(
6104 hevc->mmu_box,
6105 hevc->cur_pic->index,
6106 hevc->used_4k_num);
6107 hevc->cur_pic->scatter_alloc
6108 = 2;
6109 }
6110 hevc->used_4k_num = -1;
6111 }
6112 }
6113
6114 pic->output_mark = 1;
6115 pic->recon_mark = 1;
6116 pic->dis_mark = 1;
6117 }
6118 do {
6119 pic_display = output_pic(hevc, 0);
6120
6121 if (pic_display) {
6122 if ((pic_display->error_mark &&
6123 ((hevc->ignore_bufmgr_error &
6124 0x2) == 0))
6125 || (get_dbg_flag(hevc) &
6126 H265_DEBUG_DISPLAY_CUR_FRAME)
6127 || (get_dbg_flag(hevc) &
6128 H265_DEBUG_NO_DISPLAY)) {
6129 pic_display->output_ready = 0;
6130 if (get_dbg_flag(hevc) &
6131 H265_DEBUG_BUFMGR) {
6132 hevc_print(hevc, 0,
6133 "[BM] Display: POC %d, ",
6134 pic_display->POC);
6135 hevc_print_cont(hevc, 0,
6136 "decoding index %d ==> ",
6137 pic_display->
6138 decode_idx);
6139 hevc_print_cont(hevc, 0,
6140 "Debug or err,recycle it\n");
6141 }
6142 } else {
6143 if (hevc->i_only & 0x1
6144 && pic_display->
6145 slice_type != 2) {
6146 pic_display->output_ready = 0;
6147 } else {
6148 prepare_display_buf
6149 (hevc,
6150 pic_display);
6151 if (get_dbg_flag(hevc) &
6152 H265_DEBUG_BUFMGR) {
6153 hevc_print(hevc, 0,
6154 "[BM] Display: POC %d, ",
6155 pic_display->POC);
6156 hevc_print_cont(hevc, 0,
6157 "decoding index %d\n",
6158 pic_display->
6159 decode_idx);
6160 }
6161 }
6162 }
6163 }
6164 } while (pic_display);
6165 } else {
6166 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6167 hevc_print(hevc, 0,
6168 "[BM] current pic is IDR, ");
6169 hevc_print(hevc, 0,
6170 "clear referenced flag of all buffers\n");
6171 }
6172 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6173 dump_pic_list(hevc);
6174 pic = get_pic_by_POC(hevc, decoded_poc);
6175 flush_output(hevc, pic);
6176 }
6177
6178}
6179
6180static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6181 int decoded_lcu)
6182{
6183 int current_lcu_idx = decoded_lcu;
6184 if (decoded_lcu < 0)
6185 return;
6186
6187 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6188 hevc_print(hevc, 0,
6189 "cur lcu idx = %d, (total %d)\n",
6190 current_lcu_idx, hevc->lcu_total);
6191 }
6192 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6193 if (hevc->first_pic_after_recover) {
6194 if (current_lcu_idx !=
6195 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6196 hevc->cur_pic->error_mark = 1;
6197 } else {
6198 if (hevc->lcu_x_num_pre != 0
6199 && hevc->lcu_y_num_pre != 0
6200 && current_lcu_idx != 0
6201 && current_lcu_idx <
6202 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6203 hevc->cur_pic->error_mark = 1;
6204 }
6205 if (hevc->cur_pic->error_mark) {
6206 hevc_print(hevc, 0,
6207 "cur lcu idx = %d, (total %d), set error_mark\n",
6208 current_lcu_idx,
6209 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6210 if (is_log_enable(hevc))
6211 add_log(hevc,
6212 "cur lcu idx = %d, (total %d), set error_mark",
6213 current_lcu_idx,
6214 hevc->lcu_x_num_pre *
6215 hevc->lcu_y_num_pre);
6216
6217 }
6218
6219 }
6220 if (hevc->cur_pic && hevc->head_error_flag) {
6221 hevc->cur_pic->error_mark = 1;
6222 hevc_print(hevc, 0,
6223 "head has error, set error_mark\n");
6224 }
6225
6226 if ((error_handle_policy & 0x80) == 0) {
6227 if (hevc->over_decode && hevc->cur_pic) {
6228 hevc_print(hevc, 0,
6229 "over decode, set error_mark\n");
6230 hevc->cur_pic->error_mark = 1;
6231 }
6232 }
6233
6234 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6235 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6236}
6237
6238static void check_pic_decoded_error(struct hevc_state_s *hevc,
6239 int decoded_lcu)
6240{
6241 int current_lcu_idx = decoded_lcu;
6242 if (decoded_lcu < 0)
6243 return;
6244
6245 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6246 hevc_print(hevc, 0,
6247 "cur lcu idx = %d, (total %d)\n",
6248 current_lcu_idx, hevc->lcu_total);
6249 }
6250 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6251 if (hevc->lcu_x_num != 0
6252 && hevc->lcu_y_num != 0
6253 && current_lcu_idx != 0
6254 && current_lcu_idx <
6255 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6256 hevc->cur_pic->error_mark = 1;
6257 if (hevc->cur_pic->error_mark) {
6258 hevc_print(hevc, 0,
6259 "cur lcu idx = %d, (total %d), set error_mark\n",
6260 current_lcu_idx,
6261 hevc->lcu_x_num*hevc->lcu_y_num);
6262 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6263 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6264 hevc_print(hevc, 0,
6265 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6266 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6267 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6268 }
6269 if (is_log_enable(hevc))
6270 add_log(hevc,
6271 "cur lcu idx = %d, (total %d), set error_mark",
6272 current_lcu_idx,
6273 hevc->lcu_x_num *
6274 hevc->lcu_y_num);
6275
6276 }
6277
6278 }
6279 if (hevc->cur_pic && hevc->head_error_flag) {
6280 hevc->cur_pic->error_mark = 1;
6281 hevc_print(hevc, 0,
6282 "head has error, set error_mark\n");
6283 }
6284
6285 if ((error_handle_policy & 0x80) == 0) {
6286 if (hevc->over_decode && hevc->cur_pic) {
6287 hevc_print(hevc, 0,
6288 "over decode, set error_mark\n");
6289 hevc->cur_pic->error_mark = 1;
6290 }
6291 }
6292}
6293
6294/* only when we decoded one field or one frame,
6295we can call this function to get qos info*/
6296static void get_picture_qos_info(struct hevc_state_s *hevc)
6297{
6298 struct PIC_s *picture = hevc->cur_pic;
6299
6300/*
6301#define DEBUG_QOS
6302*/
6303
6304 if (!hevc->cur_pic)
6305 return;
6306
6307 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6308 unsigned char a[3];
6309 unsigned char i, j, t;
6310 unsigned long data;
6311
6312 data = READ_VREG(HEVC_MV_INFO);
6313 if (picture->slice_type == I_SLICE)
6314 data = 0;
6315 a[0] = data & 0xff;
6316 a[1] = (data >> 8) & 0xff;
6317 a[2] = (data >> 16) & 0xff;
6318
6319 for (i = 0; i < 3; i++)
6320 for (j = i+1; j < 3; j++) {
6321 if (a[j] < a[i]) {
6322 t = a[j];
6323 a[j] = a[i];
6324 a[i] = t;
6325 } else if (a[j] == a[i]) {
6326 a[i]++;
6327 t = a[j];
6328 a[j] = a[i];
6329 a[i] = t;
6330 }
6331 }
6332 picture->max_mv = a[2];
6333 picture->avg_mv = a[1];
6334 picture->min_mv = a[0];
6335#ifdef DEBUG_QOS
6336 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6337 data, a[0], a[1], a[2]);
6338#endif
6339
6340 data = READ_VREG(HEVC_QP_INFO);
6341 a[0] = data & 0x1f;
6342 a[1] = (data >> 8) & 0x3f;
6343 a[2] = (data >> 16) & 0x7f;
6344
6345 for (i = 0; i < 3; i++)
6346 for (j = i+1; j < 3; j++) {
6347 if (a[j] < a[i]) {
6348 t = a[j];
6349 a[j] = a[i];
6350 a[i] = t;
6351 } else if (a[j] == a[i]) {
6352 a[i]++;
6353 t = a[j];
6354 a[j] = a[i];
6355 a[i] = t;
6356 }
6357 }
6358 picture->max_qp = a[2];
6359 picture->avg_qp = a[1];
6360 picture->min_qp = a[0];
6361#ifdef DEBUG_QOS
6362 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6363 data, a[0], a[1], a[2]);
6364#endif
6365
6366 data = READ_VREG(HEVC_SKIP_INFO);
6367 a[0] = data & 0x1f;
6368 a[1] = (data >> 8) & 0x3f;
6369 a[2] = (data >> 16) & 0x7f;
6370
6371 for (i = 0; i < 3; i++)
6372 for (j = i+1; j < 3; j++) {
6373 if (a[j] < a[i]) {
6374 t = a[j];
6375 a[j] = a[i];
6376 a[i] = t;
6377 } else if (a[j] == a[i]) {
6378 a[i]++;
6379 t = a[j];
6380 a[j] = a[i];
6381 a[i] = t;
6382 }
6383 }
6384 picture->max_skip = a[2];
6385 picture->avg_skip = a[1];
6386 picture->min_skip = a[0];
6387
6388#ifdef DEBUG_QOS
6389 hevc_print(hevc, 0,
6390 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6391 data, a[0], a[1], a[2]);
6392#endif
6393 } else {
6394 uint32_t blk88_y_count;
6395 uint32_t blk88_c_count;
6396 uint32_t blk22_mv_count;
6397 uint32_t rdata32;
6398 int32_t mv_hi;
6399 int32_t mv_lo;
6400 uint32_t rdata32_l;
6401 uint32_t mvx_L0_hi;
6402 uint32_t mvy_L0_hi;
6403 uint32_t mvx_L1_hi;
6404 uint32_t mvy_L1_hi;
6405 int64_t value;
6406 uint64_t temp_value;
6407#ifdef DEBUG_QOS
6408 int pic_number = picture->POC;
6409#endif
6410
6411 picture->max_mv = 0;
6412 picture->avg_mv = 0;
6413 picture->min_mv = 0;
6414
6415 picture->max_skip = 0;
6416 picture->avg_skip = 0;
6417 picture->min_skip = 0;
6418
6419 picture->max_qp = 0;
6420 picture->avg_qp = 0;
6421 picture->min_qp = 0;
6422
6423
6424
6425#ifdef DEBUG_QOS
6426 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6427 picture->slice_type,
6428 picture->POC);
6429#endif
6430 /* set rd_idx to 0 */
6431 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6432
6433 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6434 if (blk88_y_count == 0) {
6435#ifdef DEBUG_QOS
6436 hevc_print(hevc, 0,
6437 "[Picture %d Quality] NO Data yet.\n",
6438 pic_number);
6439#endif
6440 /* reset all counts */
6441 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6442 return;
6443 }
6444 /* qp_y_sum */
6445 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6446#ifdef DEBUG_QOS
6447 hevc_print(hevc, 0,
6448 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6449 pic_number, rdata32/blk88_y_count,
6450 rdata32, blk88_y_count);
6451#endif
6452 picture->avg_qp = rdata32/blk88_y_count;
6453 /* intra_y_count */
6454 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6455#ifdef DEBUG_QOS
6456 hevc_print(hevc, 0,
6457 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6458 pic_number, rdata32*100/blk88_y_count,
6459 '%', rdata32);
6460#endif
6461 /* skipped_y_count */
6462 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6463#ifdef DEBUG_QOS
6464 hevc_print(hevc, 0,
6465 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6466 pic_number, rdata32*100/blk88_y_count,
6467 '%', rdata32);
6468#endif
6469 picture->avg_skip = rdata32*100/blk88_y_count;
6470 /* coeff_non_zero_y_count */
6471 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6472#ifdef DEBUG_QOS
6473 hevc_print(hevc, 0,
6474 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6475 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6476 '%', rdata32);
6477#endif
6478 /* blk66_c_count */
6479 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6480 if (blk88_c_count == 0) {
6481#ifdef DEBUG_QOS
6482 hevc_print(hevc, 0,
6483 "[Picture %d Quality] NO Data yet.\n",
6484 pic_number);
6485#endif
6486 /* reset all counts */
6487 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6488 return;
6489 }
6490 /* qp_c_sum */
6491 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6492#ifdef DEBUG_QOS
6493 hevc_print(hevc, 0,
6494 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6495 pic_number, rdata32/blk88_c_count,
6496 rdata32, blk88_c_count);
6497#endif
6498 /* intra_c_count */
6499 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6500#ifdef DEBUG_QOS
6501 hevc_print(hevc, 0,
6502 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6503 pic_number, rdata32*100/blk88_c_count,
6504 '%', rdata32);
6505#endif
6506 /* skipped_cu_c_count */
6507 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6508#ifdef DEBUG_QOS
6509 hevc_print(hevc, 0,
6510 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6511 pic_number, rdata32*100/blk88_c_count,
6512 '%', rdata32);
6513#endif
6514 /* coeff_non_zero_c_count */
6515 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6516#ifdef DEBUG_QOS
6517 hevc_print(hevc, 0,
6518 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6519 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6520 '%', rdata32);
6521#endif
6522
6523 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6524 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6525 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6526#ifdef DEBUG_QOS
6527 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6528 pic_number, (rdata32>>0)&0xff);
6529#endif
6530 picture->min_qp = (rdata32>>0)&0xff;
6531
6532#ifdef DEBUG_QOS
6533 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6534 pic_number, (rdata32>>8)&0xff);
6535#endif
6536 picture->max_qp = (rdata32>>8)&0xff;
6537
6538#ifdef DEBUG_QOS
6539 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6540 pic_number, (rdata32>>16)&0xff);
6541 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6542 pic_number, (rdata32>>24)&0xff);
6543#endif
6544
6545 /* blk22_mv_count */
6546 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6547 if (blk22_mv_count == 0) {
6548#ifdef DEBUG_QOS
6549 hevc_print(hevc, 0,
6550 "[Picture %d Quality] NO MV Data yet.\n",
6551 pic_number);
6552#endif
6553 /* reset all counts */
6554 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6555 return;
6556 }
6557 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6558 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6559 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6560 /* should all be 0x00 or 0xff */
6561#ifdef DEBUG_QOS
6562 hevc_print(hevc, 0,
6563 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6564 pic_number, rdata32);
6565#endif
6566 mvx_L0_hi = ((rdata32>>0)&0xff);
6567 mvy_L0_hi = ((rdata32>>8)&0xff);
6568 mvx_L1_hi = ((rdata32>>16)&0xff);
6569 mvy_L1_hi = ((rdata32>>24)&0xff);
6570
6571 /* mvx_L0_count[31:0] */
6572 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6573 temp_value = mvx_L0_hi;
6574 temp_value = (temp_value << 32) | rdata32_l;
6575
6576 if (mvx_L0_hi & 0x80)
6577 value = 0xFFFFFFF000000000 | temp_value;
6578 else
6579 value = temp_value;
6580 value = div_s64(value, blk22_mv_count);
6581#ifdef DEBUG_QOS
6582 hevc_print(hevc, 0,
6583 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6584 pic_number, (int)value,
6585 value, blk22_mv_count);
6586#endif
6587 picture->avg_mv = value;
6588
6589 /* mvy_L0_count[31:0] */
6590 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6591 temp_value = mvy_L0_hi;
6592 temp_value = (temp_value << 32) | rdata32_l;
6593
6594 if (mvy_L0_hi & 0x80)
6595 value = 0xFFFFFFF000000000 | temp_value;
6596 else
6597 value = temp_value;
6598#ifdef DEBUG_QOS
6599 hevc_print(hevc, 0,
6600 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6601 pic_number, rdata32_l/blk22_mv_count,
6602 value, blk22_mv_count);
6603#endif
6604
6605 /* mvx_L1_count[31:0] */
6606 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6607 temp_value = mvx_L1_hi;
6608 temp_value = (temp_value << 32) | rdata32_l;
6609 if (mvx_L1_hi & 0x80)
6610 value = 0xFFFFFFF000000000 | temp_value;
6611 else
6612 value = temp_value;
6613#ifdef DEBUG_QOS
6614 hevc_print(hevc, 0,
6615 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6616 pic_number, rdata32_l/blk22_mv_count,
6617 value, blk22_mv_count);
6618#endif
6619
6620 /* mvy_L1_count[31:0] */
6621 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6622 temp_value = mvy_L1_hi;
6623 temp_value = (temp_value << 32) | rdata32_l;
6624 if (mvy_L1_hi & 0x80)
6625 value = 0xFFFFFFF000000000 | temp_value;
6626 else
6627 value = temp_value;
6628#ifdef DEBUG_QOS
6629 hevc_print(hevc, 0,
6630 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6631 pic_number, rdata32_l/blk22_mv_count,
6632 value, blk22_mv_count);
6633#endif
6634
6635 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6636 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6637 mv_hi = (rdata32>>16)&0xffff;
6638 if (mv_hi & 0x8000)
6639 mv_hi = 0x8000 - mv_hi;
6640#ifdef DEBUG_QOS
6641 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6642 pic_number, mv_hi);
6643#endif
6644 picture->max_mv = mv_hi;
6645
6646 mv_lo = (rdata32>>0)&0xffff;
6647 if (mv_lo & 0x8000)
6648 mv_lo = 0x8000 - mv_lo;
6649#ifdef DEBUG_QOS
6650 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6651 pic_number, mv_lo);
6652#endif
6653 picture->min_mv = mv_lo;
6654
6655 /* {mvy_L0_max, mvy_L0_min} */
6656 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6657 mv_hi = (rdata32>>16)&0xffff;
6658 if (mv_hi & 0x8000)
6659 mv_hi = 0x8000 - mv_hi;
6660#ifdef DEBUG_QOS
6661 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6662 pic_number, mv_hi);
6663#endif
6664
6665 mv_lo = (rdata32>>0)&0xffff;
6666 if (mv_lo & 0x8000)
6667 mv_lo = 0x8000 - mv_lo;
6668#ifdef DEBUG_QOS
6669 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6670 pic_number, mv_lo);
6671#endif
6672
6673 /* {mvx_L1_max, mvx_L1_min} */
6674 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6675 mv_hi = (rdata32>>16)&0xffff;
6676 if (mv_hi & 0x8000)
6677 mv_hi = 0x8000 - mv_hi;
6678#ifdef DEBUG_QOS
6679 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6680 pic_number, mv_hi);
6681#endif
6682
6683 mv_lo = (rdata32>>0)&0xffff;
6684 if (mv_lo & 0x8000)
6685 mv_lo = 0x8000 - mv_lo;
6686#ifdef DEBUG_QOS
6687 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6688 pic_number, mv_lo);
6689#endif
6690
6691 /* {mvy_L1_max, mvy_L1_min} */
6692 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6693 mv_hi = (rdata32>>16)&0xffff;
6694 if (mv_hi & 0x8000)
6695 mv_hi = 0x8000 - mv_hi;
6696#ifdef DEBUG_QOS
6697 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6698 pic_number, mv_hi);
6699#endif
6700 mv_lo = (rdata32>>0)&0xffff;
6701 if (mv_lo & 0x8000)
6702 mv_lo = 0x8000 - mv_lo;
6703#ifdef DEBUG_QOS
6704 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6705 pic_number, mv_lo);
6706#endif
6707
6708 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6709#ifdef DEBUG_QOS
6710 hevc_print(hevc, 0,
6711 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6712 pic_number, rdata32);
6713#endif
6714 /* reset all counts */
6715 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6716 }
6717}
6718
6719static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6720 union param_u *rpm_param,
6721 int decode_pic_begin)
6722{
6723#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6724 struct vdec_s *vdec = hw_to_vdec(hevc);
6725#endif
6726 int i;
6727 int lcu_x_num_div;
6728 int lcu_y_num_div;
6729 int Col_ref;
6730 int dbg_skip_flag = 0;
6731
6732 if (hevc->wait_buf == 0) {
6733 hevc->sps_num_reorder_pics_0 =
6734 rpm_param->p.sps_num_reorder_pics_0;
6735 hevc->m_temporalId = rpm_param->p.m_temporalId;
6736 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6737 hevc->interlace_flag =
6738 (rpm_param->p.profile_etc >> 2) & 0x1;
6739 hevc->curr_pic_struct =
6740 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6741 if (parser_sei_enable & 0x4) {
6742 hevc->frame_field_info_present_flag =
6743 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6744 }
6745
6746 if (interlace_enable == 0 || hevc->m_ins_flag)
6747 hevc->interlace_flag = 0;
6748 if (interlace_enable & 0x100)
6749 hevc->interlace_flag = interlace_enable & 0x1;
6750 if (hevc->interlace_flag == 0)
6751 hevc->curr_pic_struct = 0;
6752 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6753 /*
6754 *hevc->m_pocRandomAccess = MAX_INT;
6755 * //add to fix RAP_B_Bossen_1
6756 */
6757 /* } */
6758 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6759 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6760 hevc->slice_segment_addr =
6761 rpm_param->p.slice_segment_address;
6762 if (!rpm_param->p.dependent_slice_segment_flag)
6763 hevc->slice_addr = hevc->slice_segment_addr;
6764 } else {
6765 hevc->slice_segment_addr = 0;
6766 hevc->slice_addr = 0;
6767 }
6768
6769 hevc->iPrevPOC = hevc->curr_POC;
6770 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6771 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6772 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6773 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6774 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6775 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6776 hevc->isNextSliceSegment =
6777 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6778 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6779 || hevc->pic_h !=
6780 rpm_param->p.pic_height_in_luma_samples) {
6781 hevc_print(hevc, 0,
6782 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6783 hevc->pic_w, hevc->pic_h,
6784 rpm_param->p.pic_width_in_luma_samples,
6785 rpm_param->p.pic_height_in_luma_samples,
6786 hevc->interlace_flag);
6787
6788 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6789 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6790 hevc->frame_width = hevc->pic_w;
6791 hevc->frame_height = hevc->pic_h;
6792#ifdef LOSLESS_COMPRESS_MODE
6793 if (/*re_config_pic_flag == 0 &&*/
6794 (get_double_write_mode(hevc) & 0x10) == 0)
6795 init_decode_head_hw(hevc);
6796#endif
6797 }
6798
6799 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6800 hevc_print(hevc, 0, "over size : %u x %u.\n",
6801 hevc->pic_w, hevc->pic_h);
6802 if ((!hevc->m_ins_flag) &&
6803 ((debug &
6804 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6805 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6806 H265_DEBUG_DIS_SYS_ERROR_PROC);
6807 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6808 return 3;
6809 }
6810 if (hevc->bit_depth_chroma > 10 ||
6811 hevc->bit_depth_luma > 10) {
6812 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6813 hevc->bit_depth_chroma,
6814 hevc->bit_depth_luma);
6815 if (!hevc->m_ins_flag)
6816 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6817 H265_DEBUG_DIS_SYS_ERROR_PROC);
6818 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6819 return 4;
6820 }
6821
6822 /* it will cause divide 0 error */
6823 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6824 if (get_dbg_flag(hevc)) {
6825 hevc_print(hevc, 0,
6826 "Fatal Error, pic_w = %d, pic_h = %d\n",
6827 hevc->pic_w, hevc->pic_h);
6828 }
6829 return 3;
6830 }
6831 pic_list_process(hevc);
6832
6833 hevc->lcu_size =
6834 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6835 3 + rpm_param->
6836 p.log2_diff_max_min_coding_block_size);
6837 if (hevc->lcu_size == 0) {
6838 hevc_print(hevc, 0,
6839 "Error, lcu_size = 0 (%d,%d)\n",
6840 rpm_param->p.
6841 log2_min_coding_block_size_minus3,
6842 rpm_param->p.
6843 log2_diff_max_min_coding_block_size);
6844 return 3;
6845 }
6846 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6847 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6848 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6849 hevc->lcu_x_num =
6850 ((hevc->pic_w % hevc->lcu_size) ==
6851 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6852 hevc->lcu_y_num =
6853 ((hevc->pic_h % hevc->lcu_size) ==
6854 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6855 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6856
6857 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6858 || hevc->m_nalUnitType ==
6859 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6860 hevc->curr_POC = 0;
6861 if ((hevc->m_temporalId - 1) == 0)
6862 hevc->iPrevTid0POC = hevc->curr_POC;
6863 } else {
6864 int iMaxPOClsb =
6865 1 << (rpm_param->p.
6866 log2_max_pic_order_cnt_lsb_minus4 + 4);
6867 int iPrevPOClsb;
6868 int iPrevPOCmsb;
6869 int iPOCmsb;
6870 int iPOClsb = rpm_param->p.POClsb;
6871
6872 if (iMaxPOClsb == 0) {
6873 hevc_print(hevc, 0,
6874 "error iMaxPOClsb is 0\n");
6875 return 3;
6876 }
6877
6878 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6879 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6880
6881 if ((iPOClsb < iPrevPOClsb)
6882 && ((iPrevPOClsb - iPOClsb) >=
6883 (iMaxPOClsb / 2)))
6884 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6885 else if ((iPOClsb > iPrevPOClsb)
6886 && ((iPOClsb - iPrevPOClsb) >
6887 (iMaxPOClsb / 2)))
6888 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6889 else
6890 iPOCmsb = iPrevPOCmsb;
6891 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6892 hevc_print(hevc, 0,
6893 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6894 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6895 iPOClsb);
6896 }
6897 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6898 || hevc->m_nalUnitType ==
6899 NAL_UNIT_CODED_SLICE_BLANT
6900 || hevc->m_nalUnitType ==
6901 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6902 /* For BLA picture types, POCmsb is set to 0. */
6903 iPOCmsb = 0;
6904 }
6905 hevc->curr_POC = (iPOCmsb + iPOClsb);
6906 if ((hevc->m_temporalId - 1) == 0)
6907 hevc->iPrevTid0POC = hevc->curr_POC;
6908 else {
6909 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6910 hevc_print(hevc, 0,
6911 "m_temporalID is %d\n",
6912 hevc->m_temporalId);
6913 }
6914 }
6915 }
6916 hevc->RefNum_L0 =
6917 (rpm_param->p.num_ref_idx_l0_active >
6918 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6919 num_ref_idx_l0_active;
6920 hevc->RefNum_L1 =
6921 (rpm_param->p.num_ref_idx_l1_active >
6922 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6923 num_ref_idx_l1_active;
6924
6925 /* if(curr_POC==0x10) dump_lmem(); */
6926
6927 /* skip RASL pictures after CRA/BLA pictures */
6928 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6929 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6930 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6931 || hevc->m_nalUnitType ==
6932 NAL_UNIT_CODED_SLICE_BLANT
6933 || hevc->m_nalUnitType ==
6934 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6935 hevc->m_pocRandomAccess = hevc->curr_POC;
6936 else
6937 hevc->m_pocRandomAccess = -MAX_INT;
6938 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6939 || hevc->m_nalUnitType ==
6940 NAL_UNIT_CODED_SLICE_BLANT
6941 || hevc->m_nalUnitType ==
6942 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6943 hevc->m_pocRandomAccess = hevc->curr_POC;
6944 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6945 (nal_skip_policy >= 3) &&
6946 (hevc->m_nalUnitType ==
6947 NAL_UNIT_CODED_SLICE_RASL_N ||
6948 hevc->m_nalUnitType ==
6949 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6950 if (get_dbg_flag(hevc)) {
6951 hevc_print(hevc, 0,
6952 "RASL picture with POC %d < %d ",
6953 hevc->curr_POC, hevc->m_pocRandomAccess);
6954 hevc_print(hevc, 0,
6955 "RandomAccess point POC), skip it\n");
6956 }
6957 return 1;
6958 }
6959
6960 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6961 hevc->skip_flag = 0;
6962 /**/
6963 /* if((iPrevPOC != curr_POC)){ */
6964 if (rpm_param->p.slice_segment_address == 0) {
6965 struct PIC_s *pic;
6966
6967 hevc->new_pic = 1;
6968#ifdef MULTI_INSTANCE_SUPPORT
6969 if (!hevc->m_ins_flag)
6970#endif
6971 check_pic_decoded_error_pre(hevc,
6972 READ_VREG(HEVC_PARSER_LCU_START)
6973 & 0xffffff);
6974 /**/ if (use_cma == 0) {
6975 if (hevc->pic_list_init_flag == 0) {
6976 init_pic_list(hevc);
6977 init_pic_list_hw(hevc);
6978 init_buf_spec(hevc);
6979 hevc->pic_list_init_flag = 3;
6980 }
6981 }
6982 if (!hevc->m_ins_flag) {
6983 if (hevc->cur_pic)
6984 get_picture_qos_info(hevc);
6985 }
6986 hevc->first_pic_after_recover = 0;
6987 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6988 dump_pic_list(hevc);
6989 /* prev pic */
6990 hevc_pre_pic(hevc, pic);
6991 /*
6992 *update referenced of old pictures
6993 *(cur_pic->referenced is 1 and not updated)
6994 */
6995 apply_ref_pic_set(hevc, hevc->curr_POC,
6996 rpm_param);
6997
6998 if (hevc->mmu_enable)
6999 recycle_mmu_bufs(hevc);
7000
7001#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7002 if (vdec->master) {
7003 struct hevc_state_s *hevc_ba =
7004 (struct hevc_state_s *)
7005 vdec->master->private;
7006 if (hevc_ba->cur_pic != NULL) {
7007 hevc_ba->cur_pic->dv_enhance_exist = 1;
7008 hevc_print(hevc, H265_DEBUG_DV,
7009 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7010 hevc->curr_POC, hevc_ba->cur_pic->POC);
7011 }
7012 }
7013 if (vdec->master == NULL &&
7014 vdec->slave == NULL)
7015 set_aux_data(hevc,
7016 hevc->cur_pic, 1, 0); /*suffix*/
7017 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7018 set_aux_data(hevc,
7019 hevc->cur_pic, 0, 1); /*dv meta only*/
7020#else
7021 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7022#endif
7023 /* new pic */
7024 hevc->cur_pic = hevc->is_used_v4l ?
7025 v4l_get_new_pic(hevc, rpm_param) :
7026 get_new_pic(hevc, rpm_param);
7027 if (hevc->cur_pic == NULL) {
7028 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7029 dump_pic_list(hevc);
7030 hevc->wait_buf = 1;
7031 return -1;
7032 }
7033#ifdef MULTI_INSTANCE_SUPPORT
7034 hevc->decoding_pic = hevc->cur_pic;
7035 if (!hevc->m_ins_flag)
7036 hevc->over_decode = 0;
7037#endif
7038#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7039 hevc->cur_pic->dv_enhance_exist = 0;
7040 if (vdec->slave)
7041 hevc_print(hevc, H265_DEBUG_DV,
7042 "Clear bl (poc %d) dv_enhance_exist flag\n",
7043 hevc->curr_POC);
7044 if (vdec->master == NULL &&
7045 vdec->slave == NULL)
7046 set_aux_data(hevc,
7047 hevc->cur_pic, 0, 0); /*prefix*/
7048
7049 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7050 set_aux_data(hevc,
7051 hevc->cur_pic, 0, 2); /*pre sei only*/
7052#else
7053 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7054#endif
7055 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7056 hevc->cur_pic->output_ready = 1;
7057 hevc->cur_pic->stream_offset =
7058 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7059 prepare_display_buf(hevc, hevc->cur_pic);
7060 hevc->wait_buf = 2;
7061 return -1;
7062 }
7063 } else {
7064 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7065#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7066 if (vdec->master == NULL &&
7067 vdec->slave == NULL) {
7068 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7069 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7070 }
7071#else
7072 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7073 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7074#endif
7075 }
7076 if (hevc->pic_list_init_flag != 3
7077 || hevc->cur_pic == NULL) {
7078 /* make it dec from the first slice segment */
7079 return 3;
7080 }
7081 hevc->cur_pic->slice_idx++;
7082 hevc->new_pic = 0;
7083 }
7084 } else {
7085 if (hevc->wait_buf == 1) {
7086 pic_list_process(hevc);
7087 hevc->cur_pic = hevc->is_used_v4l ?
7088 v4l_get_new_pic(hevc, rpm_param) :
7089 get_new_pic(hevc, rpm_param);
7090 if (hevc->cur_pic == NULL)
7091 return -1;
7092
7093 if (!hevc->m_ins_flag)
7094 hevc->over_decode = 0;
7095
7096#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7097 hevc->cur_pic->dv_enhance_exist = 0;
7098 if (vdec->master == NULL &&
7099 vdec->slave == NULL)
7100 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7101#else
7102 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7103#endif
7104 hevc->wait_buf = 0;
7105 } else if (hevc->wait_buf ==
7106 2) {
7107 if (get_display_pic_num(hevc) >
7108 1)
7109 return -1;
7110 hevc->wait_buf = 0;
7111 }
7112 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7113 dump_pic_list(hevc);
7114 }
7115
7116 if (hevc->new_pic) {
7117#if 1
7118 /*SUPPORT_10BIT*/
7119 int sao_mem_unit =
7120 (hevc->lcu_size == 16 ? 9 :
7121 hevc->lcu_size ==
7122 32 ? 14 : 24) << 4;
7123#else
7124 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7125#endif
7126 int pic_height_cu =
7127 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7128 int pic_width_cu =
7129 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7130 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7131
7132 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7133 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7134 hevc_print(hevc, 0,
7135 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7136 __func__,
7137 hevc->decode_idx,
7138 hevc->curr_pic_struct,
7139 hevc->interlace_flag,
7140 hevc->cur_pic->index);
7141 }
7142 if (dbg_skip_decode_index != 0 &&
7143 hevc->decode_idx == dbg_skip_decode_index)
7144 dbg_skip_flag = 1;
7145
7146 hevc->decode_idx++;
7147 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7148 sao_mem_unit, rpm_param);
7149
7150 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7151 }
7152
7153 if (hevc->iPrevPOC != hevc->curr_POC) {
7154 hevc->new_tile = 1;
7155 hevc->tile_x = 0;
7156 hevc->tile_y = 0;
7157 hevc->tile_y_x = 0;
7158 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7159 hevc_print(hevc, 0,
7160 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7161 hevc->tile_x, hevc->tile_y);
7162 }
7163 } else if (hevc->tile_enabled) {
7164 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7165 hevc_print(hevc, 0,
7166 "slice_segment_address is %d\n",
7167 rpm_param->p.slice_segment_address);
7168 }
7169 hevc->tile_y_x =
7170 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7171 (hevc->pic_w +
7172 hevc->lcu_size -
7173 1) / hevc->lcu_size);
7174 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7175 && (hevc->tile_y_x != -1)) {
7176 hevc->new_tile = 1;
7177 hevc->tile_x = hevc->tile_y_x & 0xff;
7178 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7179 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7180 hevc_print(hevc, 0,
7181 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7182 rpm_param->p.slice_segment_address,
7183 hevc->tile_x, hevc->tile_y);
7184 }
7185 } else
7186 hevc->new_tile = 0;
7187 } else
7188 hevc->new_tile = 0;
7189
7190 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7191 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7192 hevc->new_tile = 0;
7193
7194 if (hevc->new_tile) {
7195 hevc->tile_start_lcu_x =
7196 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7197 hevc->tile_start_lcu_y =
7198 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7199 hevc->tile_width_lcu =
7200 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7201 hevc->tile_height_lcu =
7202 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7203 }
7204
7205 set_ref_pic_list(hevc, rpm_param);
7206
7207 Col_ref = rpm_param->p.collocated_ref_idx;
7208
7209 hevc->LDCFlag = 0;
7210 if (rpm_param->p.slice_type != I_SLICE) {
7211 hevc->LDCFlag = 1;
7212 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7213 if (hevc->cur_pic->
7214 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7215 hevc->curr_POC)
7216 hevc->LDCFlag = 0;
7217 }
7218 if (rpm_param->p.slice_type == B_SLICE) {
7219 for (i = 0; (i < hevc->RefNum_L1)
7220 && hevc->LDCFlag; i++) {
7221 if (hevc->cur_pic->
7222 m_aiRefPOCList1[hevc->cur_pic->
7223 slice_idx][i] >
7224 hevc->curr_POC)
7225 hevc->LDCFlag = 0;
7226 }
7227 }
7228 }
7229
7230 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7231
7232 hevc->plevel =
7233 rpm_param->p.log2_parallel_merge_level;
7234 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7235
7236 hevc->LongTerm_Curr = 0; /* to do ... */
7237 hevc->LongTerm_Col = 0; /* to do ... */
7238
7239 hevc->list_no = 0;
7240 if (rpm_param->p.slice_type == B_SLICE)
7241 hevc->list_no = 1 - hevc->ColFromL0Flag;
7242 if (hevc->list_no == 0) {
7243 if (Col_ref < hevc->RefNum_L0) {
7244 hevc->Col_POC =
7245 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7246 slice_idx][Col_ref];
7247 } else
7248 hevc->Col_POC = INVALID_POC;
7249 } else {
7250 if (Col_ref < hevc->RefNum_L1) {
7251 hevc->Col_POC =
7252 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7253 slice_idx][Col_ref];
7254 } else
7255 hevc->Col_POC = INVALID_POC;
7256 }
7257
7258 hevc->LongTerm_Ref = 0; /* to do ... */
7259
7260 if (hevc->slice_type != 2) {
7261 /* if(hevc->i_only==1){ */
7262 /* return 0xf; */
7263 /* } */
7264
7265 if (hevc->Col_POC != INVALID_POC) {
7266 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7267 if (hevc->col_pic == NULL) {
7268 hevc->cur_pic->error_mark = 1;
7269 if (get_dbg_flag(hevc)) {
7270 hevc_print(hevc, 0,
7271 "WRONG,fail to get the pic Col_POC\n");
7272 }
7273 if (is_log_enable(hevc))
7274 add_log(hevc,
7275 "WRONG,fail to get the pic Col_POC");
7276 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7277 hevc->cur_pic->error_mark = 1;
7278 if (get_dbg_flag(hevc)) {
7279 hevc_print(hevc, 0,
7280 "WRONG, Col_POC error_mark is 1\n");
7281 }
7282 if (is_log_enable(hevc))
7283 add_log(hevc,
7284 "WRONG, Col_POC error_mark is 1");
7285 } else {
7286 if ((hevc->col_pic->width
7287 != hevc->pic_w) ||
7288 (hevc->col_pic->height
7289 != hevc->pic_h)) {
7290 hevc_print(hevc, 0,
7291 "Wrong reference pic (poc %d) width/height %d/%d\n",
7292 hevc->col_pic->POC,
7293 hevc->col_pic->width,
7294 hevc->col_pic->height);
7295 hevc->cur_pic->error_mark = 1;
7296 }
7297
7298 }
7299
7300 if (hevc->cur_pic->error_mark
7301 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7302#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7303 /*count info*/
7304 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7305 hevc->cur_pic->stream_offset);
7306#endif
7307 }
7308
7309 if (is_skip_decoding(hevc,
7310 hevc->cur_pic)) {
7311 return 2;
7312 }
7313 } else
7314 hevc->col_pic = hevc->cur_pic;
7315 } /* */
7316 if (hevc->col_pic == NULL)
7317 hevc->col_pic = hevc->cur_pic;
7318#ifdef BUFFER_MGR_ONLY
7319 return 0xf;
7320#else
7321 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7322 || (dbg_skip_flag))
7323 return 0xf;
7324#endif
7325
7326 config_mc_buffer(hevc, hevc->cur_pic);
7327
7328 if (is_skip_decoding(hevc,
7329 hevc->cur_pic)) {
7330 if (get_dbg_flag(hevc))
7331 hevc_print(hevc, 0,
7332 "Discard this picture index %d\n",
7333 hevc->cur_pic->index);
7334#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7335 /*count info*/
7336 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7337 hevc->cur_pic->stream_offset);
7338#endif
7339 return 2;
7340 }
7341#ifdef MCRCC_ENABLE
7342 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7343#endif
7344 config_mpred_hw(hevc);
7345
7346 config_sao_hw(hevc, rpm_param);
7347
7348 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7349 return 0xf;
7350
7351 return 0;
7352}
7353
7354
7355
7356static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7357 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7358 int cur_buf_idx = new_pic->index;
7359 int bit_depth_10 = (bit_depth != 0x00);
7360 int picture_size;
7361 int cur_mmu_4k_number;
7362 int ret, max_frame_num;
7363 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7364 new_pic->height, !bit_depth_10);
7365 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7366 if (hevc->double_write_mode & 0x10)
7367 return 0;
7368 /*hevc_print(hevc, 0,
7369 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7370 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7371 if (new_pic->scatter_alloc) {
7372 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7373 new_pic->scatter_alloc = 0;
7374 }
7375 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7376 max_frame_num = MAX_FRAME_8K_NUM;
7377 else
7378 max_frame_num = MAX_FRAME_4K_NUM;
7379 if (cur_mmu_4k_number > max_frame_num) {
7380 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7381 cur_mmu_4k_number,
7382 new_pic->width,
7383 new_pic->height);
7384 return -1;
7385 }
7386 ret = decoder_mmu_box_alloc_idx(
7387 hevc->mmu_box,
7388 cur_buf_idx,
7389 cur_mmu_4k_number,
7390 mmu_index_adr);
7391 if (ret == 0)
7392 new_pic->scatter_alloc = 1;
7393
7394 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7395 "%s pic index %d page count(%d) ret =%d\n",
7396 __func__, cur_buf_idx,
7397 cur_mmu_4k_number, ret);
7398 return ret;
7399}
7400
7401
7402static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7403 struct PIC_s *pic)
7404{
7405 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7406 "%s pic index %d scatter_alloc %d\n",
7407 __func__, pic->index,
7408 pic->scatter_alloc);
7409
7410 if (hevc->mmu_enable
7411 && ((hevc->double_write_mode & 0x10) == 0)
7412 && pic->scatter_alloc)
7413 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7414 pic->scatter_alloc = 0;
7415}
7416
7417/*
7418 *************************************************
7419 *
7420 *h265 buffer management end
7421 *
7422 **************************************************
7423 */
7424static struct hevc_state_s *gHevc;
7425
7426static void hevc_local_uninit(struct hevc_state_s *hevc)
7427{
7428 hevc->rpm_ptr = NULL;
7429 hevc->lmem_ptr = NULL;
7430
7431#ifdef SWAP_HEVC_UCODE
7432 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7433 if (hevc->mc_cpu_addr != NULL) {
7434 dma_free_coherent(amports_get_dma_device(),
7435 hevc->swap_size, hevc->mc_cpu_addr,
7436 hevc->mc_dma_handle);
7437 hevc->mc_cpu_addr = NULL;
7438 }
7439
7440 }
7441#endif
7442#ifdef DETREFILL_ENABLE
7443 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7444 uninit_detrefill_buf(hevc);
7445#endif
7446 if (hevc->aux_addr) {
7447 dma_free_coherent(amports_get_dma_device(),
7448 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7449 hevc->aux_phy_addr);
7450 hevc->aux_addr = NULL;
7451 }
7452 if (hevc->rpm_addr) {
7453 dma_free_coherent(amports_get_dma_device(),
7454 RPM_BUF_SIZE, hevc->rpm_addr,
7455 hevc->rpm_phy_addr);
7456 hevc->rpm_addr = NULL;
7457 }
7458 if (hevc->lmem_addr) {
7459 dma_free_coherent(amports_get_dma_device(),
7460 RPM_BUF_SIZE, hevc->lmem_addr,
7461 hevc->lmem_phy_addr);
7462 hevc->lmem_addr = NULL;
7463 }
7464
7465 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7466 if (hevc->frame_mmu_map_phy_addr)
7467 dma_free_coherent(amports_get_dma_device(),
7468 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7469 hevc->frame_mmu_map_phy_addr);
7470
7471 hevc->frame_mmu_map_addr = NULL;
7472 }
7473
7474 kfree(gvs);
7475 gvs = NULL;
7476}
7477
7478static int hevc_local_init(struct hevc_state_s *hevc)
7479{
7480 int ret = -1;
7481 struct BuffInfo_s *cur_buf_info = NULL;
7482
7483 memset(&hevc->param, 0, sizeof(union param_u));
7484
7485 cur_buf_info = &hevc->work_space_buf_store;
7486
7487 if (vdec_is_support_4k()) {
7488 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7489 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7490 sizeof(struct BuffInfo_s));
7491 else
7492 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7493 sizeof(struct BuffInfo_s));
7494 } else
7495 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7496 sizeof(struct BuffInfo_s));
7497
7498 cur_buf_info->start_adr = hevc->buf_start;
7499 init_buff_spec(hevc, cur_buf_info);
7500
7501 hevc_init_stru(hevc, cur_buf_info);
7502
7503 hevc->bit_depth_luma = 8;
7504 hevc->bit_depth_chroma = 8;
7505 hevc->video_signal_type = 0;
7506 hevc->video_signal_type_debug = 0;
7507 bit_depth_luma = hevc->bit_depth_luma;
7508 bit_depth_chroma = hevc->bit_depth_chroma;
7509 video_signal_type = hevc->video_signal_type;
7510
7511 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7512 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7513 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7514 if (hevc->rpm_addr == NULL) {
7515 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7516 return -1;
7517 }
7518 hevc->rpm_ptr = hevc->rpm_addr;
7519 }
7520
7521 if (prefix_aux_buf_size > 0 ||
7522 suffix_aux_buf_size > 0) {
7523 u32 aux_buf_size;
7524
7525 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7526 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7527 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7528 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7529 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7530 if (hevc->aux_addr == NULL) {
7531 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7532 return -1;
7533 }
7534 }
7535
7536 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7537 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7538 if (hevc->lmem_addr == NULL) {
7539 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7540 return -1;
7541 }
7542 hevc->lmem_ptr = hevc->lmem_addr;
7543
7544 if (hevc->mmu_enable) {
7545 hevc->frame_mmu_map_addr =
7546 dma_alloc_coherent(amports_get_dma_device(),
7547 get_frame_mmu_map_size(),
7548 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7549 if (hevc->frame_mmu_map_addr == NULL) {
7550 pr_err("%s: failed to alloc count_buffer\n", __func__);
7551 return -1;
7552 }
7553 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7554 }
7555 ret = 0;
7556 return ret;
7557}
7558
7559/*
7560 *******************************************
7561 * Mailbox command
7562 *******************************************
7563 */
7564#define CMD_FINISHED 0
7565#define CMD_ALLOC_VIEW 1
7566#define CMD_FRAME_DISPLAY 3
7567#define CMD_DEBUG 10
7568
7569
7570#define DECODE_BUFFER_NUM_MAX 32
7571#define DISPLAY_BUFFER_NUM 6
7572
7573#define video_domain_addr(adr) (adr&0x7fffffff)
7574#define DECODER_WORK_SPACE_SIZE 0x800000
7575
7576#define spec2canvas(x) \
7577 (((x)->uv_canvas_index << 16) | \
7578 ((x)->uv_canvas_index << 8) | \
7579 ((x)->y_canvas_index << 0))
7580
7581
7582static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7583{
7584 struct vdec_s *vdec = hw_to_vdec(hevc);
7585 int canvas_w = ALIGN(pic->width, 64)/4;
7586 int canvas_h = ALIGN(pic->height, 32)/4;
7587 int blkmode = hevc->mem_map_mode;
7588
7589 /*CANVAS_BLKMODE_64X32*/
7590#ifdef SUPPORT_10BIT
7591 if (pic->double_write_mode) {
7592 canvas_w = pic->width /
7593 get_double_write_ratio(hevc, pic->double_write_mode);
7594 canvas_h = pic->height /
7595 get_double_write_ratio(hevc, pic->double_write_mode);
7596
7597 if (hevc->mem_map_mode == 0)
7598 canvas_w = ALIGN(canvas_w, 32);
7599 else
7600 canvas_w = ALIGN(canvas_w, 64);
7601 canvas_h = ALIGN(canvas_h, 32);
7602
7603 if (vdec->parallel_dec == 1) {
7604 if (pic->y_canvas_index == -1)
7605 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7606 if (pic->uv_canvas_index == -1)
7607 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7608 } else {
7609 pic->y_canvas_index = 128 + pic->index * 2;
7610 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7611 }
7612
7613 canvas_config_ex(pic->y_canvas_index,
7614 pic->dw_y_adr, canvas_w, canvas_h,
7615 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7616 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7617 canvas_w, canvas_h,
7618 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7619#ifdef MULTI_INSTANCE_SUPPORT
7620 pic->canvas_config[0].phy_addr =
7621 pic->dw_y_adr;
7622 pic->canvas_config[0].width =
7623 canvas_w;
7624 pic->canvas_config[0].height =
7625 canvas_h;
7626 pic->canvas_config[0].block_mode =
7627 blkmode;
7628 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7629
7630 pic->canvas_config[1].phy_addr =
7631 pic->dw_u_v_adr;
7632 pic->canvas_config[1].width =
7633 canvas_w;
7634 pic->canvas_config[1].height =
7635 canvas_h;
7636 pic->canvas_config[1].block_mode =
7637 blkmode;
7638 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7639#endif
7640 } else {
7641 if (!hevc->mmu_enable) {
7642 /* to change after 10bit VPU is ready ... */
7643 if (vdec->parallel_dec == 1) {
7644 if (pic->y_canvas_index == -1)
7645 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7646 pic->uv_canvas_index = pic->y_canvas_index;
7647 } else {
7648 pic->y_canvas_index = 128 + pic->index;
7649 pic->uv_canvas_index = 128 + pic->index;
7650 }
7651
7652 canvas_config_ex(pic->y_canvas_index,
7653 pic->mc_y_adr, canvas_w, canvas_h,
7654 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7655 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7656 canvas_w, canvas_h,
7657 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7658 }
7659 }
7660#else
7661 if (vdec->parallel_dec == 1) {
7662 if (pic->y_canvas_index == -1)
7663 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7664 if (pic->uv_canvas_index == -1)
7665 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7666 } else {
7667 pic->y_canvas_index = 128 + pic->index * 2;
7668 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7669 }
7670
7671
7672 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7673 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7674 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7675 canvas_w, canvas_h,
7676 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7677#endif
7678}
7679
7680static int init_buf_spec(struct hevc_state_s *hevc)
7681{
7682 int pic_width = hevc->pic_w;
7683 int pic_height = hevc->pic_h;
7684
7685 /* hevc_print(hevc, 0,
7686 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7687 */
7688 hevc_print(hevc, 0,
7689 "%s2 %d %d\n", __func__, pic_width, pic_height);
7690 /* pic_width = hevc->pic_w; */
7691 /* pic_height = hevc->pic_h; */
7692
7693 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7694 hevc->frame_width = pic_width;
7695 hevc->frame_height = pic_height;
7696
7697 }
7698
7699 return 0;
7700}
7701
7702static int parse_sei(struct hevc_state_s *hevc,
7703 struct PIC_s *pic, char *sei_buf, uint32_t size)
7704{
7705 char *p = sei_buf;
7706 char *p_sei;
7707 uint16_t header;
7708 uint8_t nal_unit_type;
7709 uint8_t payload_type, payload_size;
7710 int i, j;
7711
7712 if (size < 2)
7713 return 0;
7714 header = *p++;
7715 header <<= 8;
7716 header += *p++;
7717 nal_unit_type = header >> 9;
7718 if ((nal_unit_type != NAL_UNIT_SEI)
7719 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7720 return 0;
7721 while (p+2 <= sei_buf+size) {
7722 payload_type = *p++;
7723 payload_size = *p++;
7724 if (p+payload_size <= sei_buf+size) {
7725 switch (payload_type) {
7726 case SEI_PicTiming:
7727 if ((parser_sei_enable & 0x4) &&
7728 hevc->frame_field_info_present_flag) {
7729 p_sei = p;
7730 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7731 pic->pic_struct = hevc->curr_pic_struct;
7732 if (get_dbg_flag(hevc) &
7733 H265_DEBUG_PIC_STRUCT) {
7734 hevc_print(hevc, 0,
7735 "parse result pic_struct = %d\n",
7736 hevc->curr_pic_struct);
7737 }
7738 }
7739 break;
7740 case SEI_UserDataITU_T_T35:
7741 p_sei = p;
7742 if (p_sei[0] == 0xB5
7743 && p_sei[1] == 0x00
7744 && p_sei[2] == 0x3C
7745 && p_sei[3] == 0x00
7746 && p_sei[4] == 0x01
7747 && p_sei[5] == 0x04)
7748 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7749
7750 break;
7751 case SEI_MasteringDisplayColorVolume:
7752 /*hevc_print(hevc, 0,
7753 "sei type: primary display color volume %d, size %d\n",
7754 payload_type,
7755 payload_size);*/
7756 /* master_display_colour */
7757 p_sei = p;
7758 for (i = 0; i < 3; i++) {
7759 for (j = 0; j < 2; j++) {
7760 hevc->primaries[i][j]
7761 = (*p_sei<<8)
7762 | *(p_sei+1);
7763 p_sei += 2;
7764 }
7765 }
7766 for (i = 0; i < 2; i++) {
7767 hevc->white_point[i]
7768 = (*p_sei<<8)
7769 | *(p_sei+1);
7770 p_sei += 2;
7771 }
7772 for (i = 0; i < 2; i++) {
7773 hevc->luminance[i]
7774 = (*p_sei<<24)
7775 | (*(p_sei+1)<<16)
7776 | (*(p_sei+2)<<8)
7777 | *(p_sei+3);
7778 p_sei += 4;
7779 }
7780 hevc->sei_present_flag |=
7781 SEI_MASTER_DISPLAY_COLOR_MASK;
7782 /*for (i = 0; i < 3; i++)
7783 for (j = 0; j < 2; j++)
7784 hevc_print(hevc, 0,
7785 "\tprimaries[%1d][%1d] = %04x\n",
7786 i, j,
7787 hevc->primaries[i][j]);
7788 hevc_print(hevc, 0,
7789 "\twhite_point = (%04x, %04x)\n",
7790 hevc->white_point[0],
7791 hevc->white_point[1]);
7792 hevc_print(hevc, 0,
7793 "\tmax,min luminance = %08x, %08x\n",
7794 hevc->luminance[0],
7795 hevc->luminance[1]);*/
7796 break;
7797 case SEI_ContentLightLevel:
7798 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7799 hevc_print(hevc, 0,
7800 "sei type: max content light level %d, size %d\n",
7801 payload_type, payload_size);
7802 /* content_light_level */
7803 p_sei = p;
7804 hevc->content_light_level[0]
7805 = (*p_sei<<8) | *(p_sei+1);
7806 p_sei += 2;
7807 hevc->content_light_level[1]
7808 = (*p_sei<<8) | *(p_sei+1);
7809 p_sei += 2;
7810 hevc->sei_present_flag |=
7811 SEI_CONTENT_LIGHT_LEVEL_MASK;
7812 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7813 hevc_print(hevc, 0,
7814 "\tmax cll = %04x, max_pa_cll = %04x\n",
7815 hevc->content_light_level[0],
7816 hevc->content_light_level[1]);
7817 break;
7818 default:
7819 break;
7820 }
7821 }
7822 p += payload_size;
7823 }
7824 return 0;
7825}
7826
7827static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7828 unsigned w, unsigned h)
7829{
7830 unsigned ar;
7831
7832 if (idc == 255) {
7833 ar = div_u64(256ULL * sar_h * h,
7834 sar_w * w);
7835 } else {
7836 switch (idc) {
7837 case 1:
7838 ar = 0x100 * h / w;
7839 break;
7840 case 2:
7841 ar = 0x100 * h * 11 / (w * 12);
7842 break;
7843 case 3:
7844 ar = 0x100 * h * 11 / (w * 10);
7845 break;
7846 case 4:
7847 ar = 0x100 * h * 11 / (w * 16);
7848 break;
7849 case 5:
7850 ar = 0x100 * h * 33 / (w * 40);
7851 break;
7852 case 6:
7853 ar = 0x100 * h * 11 / (w * 24);
7854 break;
7855 case 7:
7856 ar = 0x100 * h * 11 / (w * 20);
7857 break;
7858 case 8:
7859 ar = 0x100 * h * 11 / (w * 32);
7860 break;
7861 case 9:
7862 ar = 0x100 * h * 33 / (w * 80);
7863 break;
7864 case 10:
7865 ar = 0x100 * h * 11 / (w * 18);
7866 break;
7867 case 11:
7868 ar = 0x100 * h * 11 / (w * 15);
7869 break;
7870 case 12:
7871 ar = 0x100 * h * 33 / (w * 64);
7872 break;
7873 case 13:
7874 ar = 0x100 * h * 99 / (w * 160);
7875 break;
7876 case 14:
7877 ar = 0x100 * h * 3 / (w * 4);
7878 break;
7879 case 15:
7880 ar = 0x100 * h * 2 / (w * 3);
7881 break;
7882 case 16:
7883 ar = 0x100 * h * 1 / (w * 2);
7884 break;
7885 default:
7886 ar = h * 0x100 / w;
7887 break;
7888 }
7889 }
7890
7891 return ar;
7892}
7893
7894static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7895 struct PIC_s *pic)
7896{
7897 unsigned int ar;
7898 int i, j;
7899 char *p;
7900 unsigned size = 0;
7901 unsigned type = 0;
7902 struct vframe_master_display_colour_s *vf_dp
7903 = &vf->prop.master_display_colour;
7904
7905 vf->width = pic->width /
7906 get_double_write_ratio(hevc, pic->double_write_mode);
7907 vf->height = pic->height /
7908 get_double_write_ratio(hevc, pic->double_write_mode);
7909
7910 vf->duration = hevc->frame_dur;
7911 vf->duration_pulldown = 0;
7912 vf->flag = 0;
7913
7914 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7915 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7916
7917
7918 if (((pic->aspect_ratio_idc == 255) &&
7919 pic->sar_width &&
7920 pic->sar_height) ||
7921 ((pic->aspect_ratio_idc != 255) &&
7922 (pic->width))) {
7923 ar = min_t(u32,
7924 calc_ar(pic->aspect_ratio_idc,
7925 pic->sar_width,
7926 pic->sar_height,
7927 pic->width,
7928 pic->height),
7929 DISP_RATIO_ASPECT_RATIO_MAX);
7930 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7931 }
7932 hevc->ratio_control = vf->ratio_control;
7933 if (pic->aux_data_buf
7934 && pic->aux_data_size) {
7935 /* parser sei */
7936 p = pic->aux_data_buf;
7937 while (p < pic->aux_data_buf
7938 + pic->aux_data_size - 8) {
7939 size = *p++;
7940 size = (size << 8) | *p++;
7941 size = (size << 8) | *p++;
7942 size = (size << 8) | *p++;
7943 type = *p++;
7944 type = (type << 8) | *p++;
7945 type = (type << 8) | *p++;
7946 type = (type << 8) | *p++;
7947 if (type == 0x02000000) {
7948 /* hevc_print(hevc, 0,
7949 "sei(%d)\n", size); */
7950 parse_sei(hevc, pic, p, size);
7951 }
7952 p += size;
7953 }
7954 }
7955 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7956 vf->signal_type = pic->video_signal_type;
7957 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7958 u32 data;
7959 data = vf->signal_type;
7960 data = data & 0xFFFF00FF;
7961 data = data | (0x30<<8);
7962 vf->signal_type = data;
7963 }
7964 }
7965 else
7966 vf->signal_type = 0;
7967 hevc->video_signal_type_debug = vf->signal_type;
7968
7969 /* master_display_colour */
7970 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7971 for (i = 0; i < 3; i++)
7972 for (j = 0; j < 2; j++)
7973 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7974 for (i = 0; i < 2; i++) {
7975 vf_dp->white_point[i] = hevc->white_point[i];
7976 vf_dp->luminance[i]
7977 = hevc->luminance[i];
7978 }
7979 vf_dp->present_flag = 1;
7980 } else
7981 vf_dp->present_flag = 0;
7982
7983 /* content_light_level */
7984 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7985 vf_dp->content_light_level.max_content
7986 = hevc->content_light_level[0];
7987 vf_dp->content_light_level.max_pic_average
7988 = hevc->content_light_level[1];
7989 vf_dp->content_light_level.present_flag = 1;
7990 } else
7991 vf_dp->content_light_level.present_flag = 0;
7992
7993 if (hevc->is_used_v4l &&
7994 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7995 (vf_dp->present_flag) ||
7996 (vf_dp->content_light_level.present_flag))) {
7997 struct aml_vdec_hdr_infos hdr;
7998 struct aml_vcodec_ctx *ctx =
7999 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8000
8001 memset(&hdr, 0, sizeof(hdr));
8002 hdr.signal_type = vf->signal_type;
8003 hdr.color_parms = *vf_dp;
8004 vdec_v4l_set_hdr_infos(ctx, &hdr);
8005 }
8006}
8007
8008static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8009{
8010 unsigned long flags;
8011#ifdef MULTI_INSTANCE_SUPPORT
8012 struct vdec_s *vdec = op_arg;
8013 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8014#else
8015 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8016#endif
8017
8018 spin_lock_irqsave(&lock, flags);
8019
8020 states->vf_pool_size = VF_POOL_SIZE;
8021 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8022 states->buf_avail_num = kfifo_len(&hevc->display_q);
8023
8024 if (step == 2)
8025 states->buf_avail_num = 0;
8026 spin_unlock_irqrestore(&lock, flags);
8027 return 0;
8028}
8029
8030static struct vframe_s *vh265_vf_peek(void *op_arg)
8031{
8032 struct vframe_s *vf[2] = {0, 0};
8033#ifdef MULTI_INSTANCE_SUPPORT
8034 struct vdec_s *vdec = op_arg;
8035 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8036#else
8037 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8038#endif
8039
8040 if (step == 2)
8041 return NULL;
8042
8043 if (force_disp_pic_index & 0x100) {
8044 if (force_disp_pic_index & 0x200)
8045 return NULL;
8046 return &hevc->vframe_dummy;
8047 }
8048
8049
8050 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8051 if (vf[1]) {
8052 vf[0]->next_vf_pts_valid = true;
8053 vf[0]->next_vf_pts = vf[1]->pts;
8054 } else
8055 vf[0]->next_vf_pts_valid = false;
8056 return vf[0];
8057 }
8058
8059 return NULL;
8060}
8061
8062static struct vframe_s *vh265_vf_get(void *op_arg)
8063{
8064 struct vframe_s *vf;
8065#ifdef MULTI_INSTANCE_SUPPORT
8066 struct vdec_s *vdec = op_arg;
8067 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8068#else
8069 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8070#endif
8071
8072 if (step == 2)
8073 return NULL;
8074 else if (step == 1)
8075 step = 2;
8076
8077#if 0
8078 if (force_disp_pic_index & 0x100) {
8079 int buffer_index = force_disp_pic_index & 0xff;
8080 struct PIC_s *pic = NULL;
8081 if (buffer_index >= 0
8082 && buffer_index < MAX_REF_PIC_NUM)
8083 pic = hevc->m_PIC[buffer_index];
8084 if (pic == NULL)
8085 return NULL;
8086 if (force_disp_pic_index & 0x200)
8087 return NULL;
8088
8089 vf = &hevc->vframe_dummy;
8090 if (get_double_write_mode(hevc)) {
8091 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8092 VIDTYPE_VIU_NV21;
8093 if (hevc->m_ins_flag) {
8094 vf->canvas0Addr = vf->canvas1Addr = -1;
8095 vf->plane_num = 2;
8096 vf->canvas0_config[0] =
8097 pic->canvas_config[0];
8098 vf->canvas0_config[1] =
8099 pic->canvas_config[1];
8100
8101 vf->canvas1_config[0] =
8102 pic->canvas_config[0];
8103 vf->canvas1_config[1] =
8104 pic->canvas_config[1];
8105 } else {
8106 vf->canvas0Addr = vf->canvas1Addr
8107 = spec2canvas(pic);
8108 }
8109 } else {
8110 vf->canvas0Addr = vf->canvas1Addr = 0;
8111 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8112 if (hevc->mmu_enable)
8113 vf->type |= VIDTYPE_SCATTER;
8114 }
8115 vf->compWidth = pic->width;
8116 vf->compHeight = pic->height;
8117 update_vf_memhandle(hevc, vf, pic);
8118 switch (hevc->bit_depth_luma) {
8119 case 9:
8120 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8121 break;
8122 case 10:
8123 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8124 | BITDEPTH_V10;
8125 break;
8126 default:
8127 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8128 break;
8129 }
8130 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8131 vf->bitdepth =
8132 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8133 if (hevc->mem_saving_mode == 1)
8134 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8135 vf->duration_pulldown = 0;
8136 vf->pts = 0;
8137 vf->pts_us64 = 0;
8138 set_frame_info(hevc, vf);
8139
8140 vf->width = pic->width /
8141 get_double_write_ratio(hevc, pic->double_write_mode);
8142 vf->height = pic->height /
8143 get_double_write_ratio(hevc, pic->double_write_mode);
8144
8145 force_disp_pic_index |= 0x200;
8146 return vf;
8147 }
8148#endif
8149
8150 if (kfifo_get(&hevc->display_q, &vf)) {
8151 struct vframe_s *next_vf;
8152 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8153 hevc_print(hevc, 0,
8154 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8155 __func__, vf, vf->type, vf->index,
8156 get_pic_poc(hevc, vf->index & 0xff),
8157 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8158 vf->pts, vf->pts_us64,
8159 vf->duration);
8160#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8161 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8162 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8163 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8164 int i;
8165 struct PIC_s *pic =
8166 hevc->m_PIC[vf->index & 0xff];
8167 hevc_print(hevc, 0,
8168 "pic 0x%p aux size %d:\n",
8169 pic, pic->aux_data_size);
8170 for (i = 0; i < pic->aux_data_size; i++) {
8171 hevc_print_cont(hevc, 0,
8172 "%02x ", pic->aux_data_buf[i]);
8173 if (((i + 1) & 0xf) == 0)
8174 hevc_print_cont(hevc, 0, "\n");
8175 }
8176 hevc_print_cont(hevc, 0, "\n");
8177 }
8178 }
8179#endif
8180 hevc->show_frame_num++;
8181 vf->index_disp = hevc->vf_get_count;
8182 hevc->vf_get_count++;
8183
8184 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8185 vf->next_vf_pts_valid = true;
8186 vf->next_vf_pts = next_vf->pts;
8187 } else
8188 vf->next_vf_pts_valid = false;
8189
8190 return vf;
8191 }
8192
8193 return NULL;
8194}
8195static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8196 int i;
8197 for (i = 0; i < VF_POOL_SIZE; i++) {
8198 if (vf == &hevc->vfpool[i])
8199 return true;
8200 }
8201 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8202 for (i = 0; i < VF_POOL_SIZE; i++) {
8203 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8204 }
8205 return false;
8206}
8207
8208static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8209{
8210 unsigned long flags;
8211#ifdef MULTI_INSTANCE_SUPPORT
8212 struct vdec_s *vdec = op_arg;
8213 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8214#else
8215 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8216#endif
8217 unsigned char index_top;
8218 unsigned char index_bot;
8219
8220 if (vf && (vf_valid_check(vf, hevc) == false))
8221 return;
8222 if (vf == (&hevc->vframe_dummy))
8223 return;
8224 index_top = vf->index & 0xff;
8225 index_bot = (vf->index >> 8) & 0xff;
8226 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8227 hevc_print(hevc, 0,
8228 "%s(type %d index 0x%x)\n",
8229 __func__, vf->type, vf->index);
8230 hevc->vf_put_count++;
8231 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8232 spin_lock_irqsave(&lock, flags);
8233
8234 if (index_top != 0xff
8235 && index_top < MAX_REF_PIC_NUM
8236 && hevc->m_PIC[index_top]) {
8237 if (hevc->is_used_v4l)
8238 hevc->m_PIC[index_top]->vframe_bound = true;
8239 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8240 hevc->m_PIC[index_top]->vf_ref--;
8241
8242 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8243 hevc->m_PIC[index_top]->output_ready = 0;
8244
8245 if (hevc->wait_buf != 0)
8246 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8247 0x1);
8248 }
8249 }
8250 }
8251
8252 if (index_bot != 0xff
8253 && index_bot < MAX_REF_PIC_NUM
8254 && hevc->m_PIC[index_bot]) {
8255 if (hevc->is_used_v4l)
8256 hevc->m_PIC[index_bot]->vframe_bound = true;
8257 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8258 hevc->m_PIC[index_bot]->vf_ref--;
8259
8260 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8261 hevc->m_PIC[index_bot]->output_ready = 0;
8262 if (hevc->wait_buf != 0)
8263 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8264 0x1);
8265 }
8266 }
8267 }
8268 spin_unlock_irqrestore(&lock, flags);
8269}
8270
8271static int vh265_event_cb(int type, void *data, void *op_arg)
8272{
8273 unsigned long flags;
8274#ifdef MULTI_INSTANCE_SUPPORT
8275 struct vdec_s *vdec = op_arg;
8276 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8277#else
8278 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8279#endif
8280 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8281#if 0
8282 amhevc_stop();
8283#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8284 vf_light_unreg_provider(&vh265_vf_prov);
8285#endif
8286 spin_lock_irqsave(&hevc->lock, flags);
8287 vh265_local_init();
8288 vh265_prot_init();
8289 spin_unlock_irqrestore(&hevc->lock, flags);
8290#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8291 vf_reg_provider(&vh265_vf_prov);
8292#endif
8293 amhevc_start();
8294#endif
8295 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8296 struct provider_aux_req_s *req =
8297 (struct provider_aux_req_s *)data;
8298 unsigned char index;
8299
8300 if (!req->vf) {
8301 req->aux_size = hevc->vf_put_count;
8302 return 0;
8303 }
8304 spin_lock_irqsave(&lock, flags);
8305 index = req->vf->index & 0xff;
8306 req->aux_buf = NULL;
8307 req->aux_size = 0;
8308 if (req->bot_flag)
8309 index = (req->vf->index >> 8) & 0xff;
8310 if (index != 0xff
8311 && index < MAX_REF_PIC_NUM
8312 && hevc->m_PIC[index]) {
8313 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8314 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8315#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8316 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8317 req->dv_enhance_exist = false;
8318 else
8319 req->dv_enhance_exist =
8320 hevc->m_PIC[index]->dv_enhance_exist;
8321 hevc_print(hevc, H265_DEBUG_DV,
8322 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8323 req->vf,
8324 hevc->m_PIC[index]->POC, index,
8325 req->dv_enhance_exist, req->aux_size);
8326#else
8327 req->dv_enhance_exist = 0;
8328#endif
8329 }
8330 spin_unlock_irqrestore(&lock, flags);
8331
8332 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8333 hevc_print(hevc, 0,
8334 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8335 __func__, type, index, req->aux_size);
8336#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8337 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8338 if ((force_bypass_dvenl & 0x80000000) == 0) {
8339 hevc_print(hevc, 0,
8340 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8341 __func__);
8342 hevc->bypass_dvenl_enable = 1;
8343 }
8344
8345#endif
8346 }
8347 return 0;
8348}
8349
8350#ifdef HEVC_PIC_STRUCT_SUPPORT
8351static int process_pending_vframe(struct hevc_state_s *hevc,
8352 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8353{
8354 struct vframe_s *vf;
8355
8356 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8357 hevc_print(hevc, 0,
8358 "%s: pair_pic index 0x%x %s\n",
8359 __func__, pair_pic->index,
8360 pair_frame_top_flag ?
8361 "top" : "bot");
8362
8363 if (kfifo_len(&hevc->pending_q) > 1) {
8364 unsigned long flags;
8365 /* do not pending more than 1 frame */
8366 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8367 hevc_print(hevc, 0,
8368 "fatal error, no available buffer slot.");
8369 return -1;
8370 }
8371 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8372 hevc_print(hevc, 0,
8373 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8374 __func__, vf->index);
8375 if ((hevc->double_write_mode == 3) &&
8376 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8377 vf->type |= VIDTYPE_COMPRESS;
8378 if (hevc->mmu_enable)
8379 vf->type |= VIDTYPE_SCATTER;
8380 }
8381 hevc->vf_pre_count++;
8382 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8383 spin_lock_irqsave(&lock, flags);
8384 vf->index &= 0xff;
8385 hevc->m_PIC[vf->index]->output_ready = 0;
8386 if (hevc->wait_buf != 0)
8387 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8388 0x1);
8389 spin_unlock_irqrestore(&lock, flags);
8390
8391 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8392 }
8393
8394 if (kfifo_peek(&hevc->pending_q, &vf)) {
8395 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8396 /*
8397 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8398 *do not use it
8399 */
8400 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8401 hevc_print(hevc, 0,
8402 "fatal error, no available buffer slot.");
8403 return -1;
8404 }
8405 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8406 hevc_print(hevc, 0,
8407 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8408 __func__, vf->index);
8409 if (vf) {
8410 if ((hevc->double_write_mode == 3) &&
8411 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8412 vf->type |= VIDTYPE_COMPRESS;
8413 if (hevc->mmu_enable)
8414 vf->type |= VIDTYPE_SCATTER;
8415 }
8416 hevc->vf_pre_count++;
8417 kfifo_put(&hevc->display_q,
8418 (const struct vframe_s *)vf);
8419 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8420 }
8421 } else if ((!pair_frame_top_flag) &&
8422 (((vf->index >> 8) & 0xff) == 0xff)) {
8423 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8424 hevc_print(hevc, 0,
8425 "fatal error, no available buffer slot.");
8426 return -1;
8427 }
8428 if (vf) {
8429 if ((hevc->double_write_mode == 3) &&
8430 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8431 vf->type |= VIDTYPE_COMPRESS;
8432 if (hevc->mmu_enable)
8433 vf->type |= VIDTYPE_SCATTER;
8434 }
8435 vf->index &= 0xff;
8436 vf->index |= (pair_pic->index << 8);
8437 vf->canvas1Addr = spec2canvas(pair_pic);
8438 pair_pic->vf_ref++;
8439 kfifo_put(&hevc->display_q,
8440 (const struct vframe_s *)vf);
8441 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8442 hevc->vf_pre_count++;
8443 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8444 hevc_print(hevc, 0,
8445 "%s vf => display_q: (index 0x%x)\n",
8446 __func__, vf->index);
8447 }
8448 } else if (pair_frame_top_flag &&
8449 ((vf->index & 0xff) == 0xff)) {
8450 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8451 hevc_print(hevc, 0,
8452 "fatal error, no available buffer slot.");
8453 return -1;
8454 }
8455 if (vf) {
8456 if ((hevc->double_write_mode == 3) &&
8457 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8458 vf->type |= VIDTYPE_COMPRESS;
8459 if (hevc->mmu_enable)
8460 vf->type |= VIDTYPE_SCATTER;
8461 }
8462 vf->index &= 0xff00;
8463 vf->index |= pair_pic->index;
8464 vf->canvas0Addr = spec2canvas(pair_pic);
8465 pair_pic->vf_ref++;
8466 kfifo_put(&hevc->display_q,
8467 (const struct vframe_s *)vf);
8468 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8469 hevc->vf_pre_count++;
8470 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8471 hevc_print(hevc, 0,
8472 "%s vf => display_q: (index 0x%x)\n",
8473 __func__, vf->index);
8474 }
8475 }
8476 }
8477 return 0;
8478}
8479#endif
8480static void update_vf_memhandle(struct hevc_state_s *hevc,
8481 struct vframe_s *vf, struct PIC_s *pic)
8482{
8483 if (pic->index < 0) {
8484 vf->mem_handle = NULL;
8485 vf->mem_head_handle = NULL;
8486 } else if (vf->type & VIDTYPE_SCATTER) {
8487 vf->mem_handle =
8488 decoder_mmu_box_get_mem_handle(
8489 hevc->mmu_box, pic->index);
8490 vf->mem_head_handle =
8491 decoder_bmmu_box_get_mem_handle(
8492 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8493 } else {
8494 vf->mem_handle =
8495 decoder_bmmu_box_get_mem_handle(
8496 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8497 vf->mem_head_handle = NULL;
8498 /*vf->mem_head_handle =
8499 decoder_bmmu_box_get_mem_handle(
8500 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8501 }
8502 return;
8503}
8504
8505static void fill_frame_info(struct hevc_state_s *hevc,
8506 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8507{
8508 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8509 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8510 vframe_qos->type = 4;
8511 else if (pic->slice_type == I_SLICE)
8512 vframe_qos->type = 1;
8513 else if (pic->slice_type == P_SLICE)
8514 vframe_qos->type = 2;
8515 else if (pic->slice_type == B_SLICE)
8516 vframe_qos->type = 3;
8517/*
8518#define SHOW_QOS_INFO
8519*/
8520 vframe_qos->size = framesize;
8521 vframe_qos->pts = pts;
8522#ifdef SHOW_QOS_INFO
8523 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8524#endif
8525
8526
8527 vframe_qos->max_mv = pic->max_mv;
8528 vframe_qos->avg_mv = pic->avg_mv;
8529 vframe_qos->min_mv = pic->min_mv;
8530#ifdef SHOW_QOS_INFO
8531 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8532 vframe_qos->max_mv,
8533 vframe_qos->avg_mv,
8534 vframe_qos->min_mv);
8535#endif
8536
8537 vframe_qos->max_qp = pic->max_qp;
8538 vframe_qos->avg_qp = pic->avg_qp;
8539 vframe_qos->min_qp = pic->min_qp;
8540#ifdef SHOW_QOS_INFO
8541 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8542 vframe_qos->max_qp,
8543 vframe_qos->avg_qp,
8544 vframe_qos->min_qp);
8545#endif
8546
8547 vframe_qos->max_skip = pic->max_skip;
8548 vframe_qos->avg_skip = pic->avg_skip;
8549 vframe_qos->min_skip = pic->min_skip;
8550#ifdef SHOW_QOS_INFO
8551 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8552 vframe_qos->max_skip,
8553 vframe_qos->avg_skip,
8554 vframe_qos->min_skip);
8555#endif
8556
8557 vframe_qos->num++;
8558
8559 if (hevc->frameinfo_enable)
8560 vdec_fill_frame_info(vframe_qos, 1);
8561}
8562
8563static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8564{
8565#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8566 struct vdec_s *vdec = hw_to_vdec(hevc);
8567#endif
8568 struct vframe_s *vf = NULL;
8569 int stream_offset = pic->stream_offset;
8570 unsigned short slice_type = pic->slice_type;
8571 u32 frame_size;
8572
8573 if (force_disp_pic_index & 0x100) {
8574 /*recycle directly*/
8575 pic->output_ready = 0;
8576 return -1;
8577 }
8578 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8579 hevc_print(hevc, 0,
8580 "fatal error, no available buffer slot.");
8581 return -1;
8582 }
8583 display_frame_count[hevc->index]++;
8584 if (vf) {
8585 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8586 "%s: pic index 0x%x\n",
8587 __func__, pic->index);*/
8588
8589 if (hevc->is_used_v4l) {
8590 vf->v4l_mem_handle
8591 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8592 if (hevc->mmu_enable) {
8593 vf->mm_box.bmmu_box = hevc->bmmu_box;
8594 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8595 vf->mm_box.mmu_box = hevc->mmu_box;
8596 vf->mm_box.mmu_idx = pic->index;
8597 }
8598 }
8599
8600#ifdef MULTI_INSTANCE_SUPPORT
8601 if (vdec_frame_based(hw_to_vdec(hevc))) {
8602 vf->pts = pic->pts;
8603 vf->pts_us64 = pic->pts64;
8604 vf->timestamp = pic->timestamp;
8605 }
8606 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8607 stream_offset, &vf->pts, 0) != 0) { */
8608#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8609 else if (vdec->master == NULL) {
8610#else
8611 else {
8612#endif
8613#endif
8614 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8615 "call pts_lookup_offset_us64(0x%x)\n",
8616 stream_offset);
8617 if (pts_lookup_offset_us64
8618 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8619 &frame_size, 0,
8620 &vf->pts_us64) != 0) {
8621#ifdef DEBUG_PTS
8622 hevc->pts_missed++;
8623#endif
8624 vf->pts = 0;
8625 vf->pts_us64 = 0;
8626 }
8627#ifdef DEBUG_PTS
8628 else
8629 hevc->pts_hit++;
8630#endif
8631#ifdef MULTI_INSTANCE_SUPPORT
8632#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8633 } else {
8634 vf->pts = 0;
8635 vf->pts_us64 = 0;
8636 }
8637#else
8638 }
8639#endif
8640#endif
8641 if (pts_unstable && (hevc->frame_dur > 0))
8642 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8643
8644 fill_frame_info(hevc, pic, frame_size, vf->pts);
8645
8646 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8647 && hevc->get_frame_dur) {
8648 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8649
8650 if (pts_diff < 0) {
8651 hevc->pts_mode_switching_count++;
8652 hevc->pts_mode_recovery_count = 0;
8653
8654 if (hevc->pts_mode_switching_count >=
8655 PTS_MODE_SWITCHING_THRESHOLD) {
8656 hevc->pts_mode =
8657 PTS_NONE_REF_USE_DURATION;
8658 hevc_print(hevc, 0,
8659 "HEVC: switch to n_d mode.\n");
8660 }
8661
8662 } else {
8663 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8664
8665 hevc->pts_mode_recovery_count++;
8666 if (hevc->pts_mode_recovery_count > p) {
8667 hevc->pts_mode_switching_count = 0;
8668 hevc->pts_mode_recovery_count = 0;
8669 }
8670 }
8671 }
8672
8673 if (vf->pts != 0)
8674 hevc->last_lookup_pts = vf->pts;
8675
8676 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8677 && (slice_type != 2))
8678 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8679 hevc->last_pts = vf->pts;
8680
8681 if (vf->pts_us64 != 0)
8682 hevc->last_lookup_pts_us64 = vf->pts_us64;
8683
8684 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8685 && (slice_type != 2)) {
8686 vf->pts_us64 =
8687 hevc->last_pts_us64 +
8688 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8689 }
8690 hevc->last_pts_us64 = vf->pts_us64;
8691 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8692 hevc_print(hevc, 0,
8693 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8694 vf->pts, vf->pts_us64);
8695 }
8696
8697 /*
8698 *vf->index:
8699 *(1) vf->type is VIDTYPE_PROGRESSIVE
8700 * and vf->canvas0Addr != vf->canvas1Addr,
8701 * vf->index[7:0] is the index of top pic
8702 * vf->index[15:8] is the index of bot pic
8703 *(2) other cases,
8704 * only vf->index[7:0] is used
8705 * vf->index[15:8] == 0xff
8706 */
8707 vf->index = 0xff00 | pic->index;
8708#if 1
8709/*SUPPORT_10BIT*/
8710 if (pic->double_write_mode & 0x10) {
8711 /* double write only */
8712 vf->compBodyAddr = 0;
8713 vf->compHeadAddr = 0;
8714 } else {
8715
8716 if (hevc->mmu_enable) {
8717 vf->compBodyAddr = 0;
8718 vf->compHeadAddr = pic->header_adr;
8719 } else {
8720 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8721 vf->compHeadAddr = pic->mc_y_adr +
8722 pic->losless_comp_body_size;
8723 vf->mem_head_handle = NULL;
8724 }
8725
8726 /*head adr*/
8727 vf->canvas0Addr = vf->canvas1Addr = 0;
8728 }
8729 if (pic->double_write_mode) {
8730 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8731 vf->type |= VIDTYPE_VIU_NV21;
8732
8733 if ((pic->double_write_mode == 3) &&
8734 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8735 vf->type |= VIDTYPE_COMPRESS;
8736 if (hevc->mmu_enable)
8737 vf->type |= VIDTYPE_SCATTER;
8738 }
8739#ifdef MULTI_INSTANCE_SUPPORT
8740 if (hevc->m_ins_flag &&
8741 (get_dbg_flag(hevc)
8742 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8743 vf->canvas0Addr = vf->canvas1Addr = -1;
8744 vf->plane_num = 2;
8745 vf->canvas0_config[0] =
8746 pic->canvas_config[0];
8747 vf->canvas0_config[1] =
8748 pic->canvas_config[1];
8749
8750 vf->canvas1_config[0] =
8751 pic->canvas_config[0];
8752 vf->canvas1_config[1] =
8753 pic->canvas_config[1];
8754
8755 } else
8756#endif
8757 vf->canvas0Addr = vf->canvas1Addr
8758 = spec2canvas(pic);
8759 } else {
8760 vf->canvas0Addr = vf->canvas1Addr = 0;
8761 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8762 if (hevc->mmu_enable)
8763 vf->type |= VIDTYPE_SCATTER;
8764 }
8765 vf->compWidth = pic->width;
8766 vf->compHeight = pic->height;
8767 update_vf_memhandle(hevc, vf, pic);
8768 switch (pic->bit_depth_luma) {
8769 case 9:
8770 vf->bitdepth = BITDEPTH_Y9;
8771 break;
8772 case 10:
8773 vf->bitdepth = BITDEPTH_Y10;
8774 break;
8775 default:
8776 vf->bitdepth = BITDEPTH_Y8;
8777 break;
8778 }
8779 switch (pic->bit_depth_chroma) {
8780 case 9:
8781 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8782 break;
8783 case 10:
8784 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8785 break;
8786 default:
8787 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8788 break;
8789 }
8790 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8791 vf->bitdepth =
8792 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8793 if (pic->mem_saving_mode == 1)
8794 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8795#else
8796 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8797 vf->type |= VIDTYPE_VIU_NV21;
8798 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8799#endif
8800 set_frame_info(hevc, vf, pic);
8801 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8802 /* hevc_print(hevc, 0,
8803 "aaa: %d/%d, %d/%d\n",
8804 vf->width,vf->height, pic->width, pic->height); */
8805 vf->width = pic->width;
8806 vf->height = pic->height;
8807
8808 if (force_w_h != 0) {
8809 vf->width = (force_w_h >> 16) & 0xffff;
8810 vf->height = force_w_h & 0xffff;
8811 }
8812 if (force_fps & 0x100) {
8813 u32 rate = force_fps & 0xff;
8814
8815 if (rate)
8816 vf->duration = 96000/rate;
8817 else
8818 vf->duration = 0;
8819 }
8820 if (force_fps & 0x200) {
8821 vf->pts = 0;
8822 vf->pts_us64 = 0;
8823 }
8824 /*
8825 * !!! to do ...
8826 * need move below code to get_new_pic(),
8827 * hevc->xxx can only be used by current decoded pic
8828 */
8829 if (pic->conformance_window_flag &&
8830 (get_dbg_flag(hevc) &
8831 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8832 unsigned int SubWidthC, SubHeightC;
8833
8834 switch (pic->chroma_format_idc) {
8835 case 1:
8836 SubWidthC = 2;
8837 SubHeightC = 2;
8838 break;
8839 case 2:
8840 SubWidthC = 2;
8841 SubHeightC = 1;
8842 break;
8843 default:
8844 SubWidthC = 1;
8845 SubHeightC = 1;
8846 break;
8847 }
8848 vf->width -= SubWidthC *
8849 (pic->conf_win_left_offset +
8850 pic->conf_win_right_offset);
8851 vf->height -= SubHeightC *
8852 (pic->conf_win_top_offset +
8853 pic->conf_win_bottom_offset);
8854
8855 vf->compWidth -= SubWidthC *
8856 (pic->conf_win_left_offset +
8857 pic->conf_win_right_offset);
8858 vf->compHeight -= SubHeightC *
8859 (pic->conf_win_top_offset +
8860 pic->conf_win_bottom_offset);
8861
8862 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8863 hevc_print(hevc, 0,
8864 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8865 pic->chroma_format_idc,
8866 pic->conf_win_left_offset,
8867 pic->conf_win_right_offset,
8868 pic->conf_win_top_offset,
8869 pic->conf_win_bottom_offset,
8870 vf->width, vf->height, vf->compWidth, vf->compHeight);
8871 }
8872
8873 vf->width = vf->width /
8874 get_double_write_ratio(hevc, pic->double_write_mode);
8875 vf->height = vf->height /
8876 get_double_write_ratio(hevc, pic->double_write_mode);
8877#ifdef HEVC_PIC_STRUCT_SUPPORT
8878 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8879 struct vframe_s *vf2;
8880
8881 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8882 hevc_print(hevc, 0,
8883 "pic_struct = %d index 0x%x\n",
8884 pic->pic_struct,
8885 pic->index);
8886
8887 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8888 hevc_print(hevc, 0,
8889 "fatal error, no available buffer slot.");
8890 return -1;
8891 }
8892 pic->vf_ref = 2;
8893 vf->duration = vf->duration>>1;
8894 memcpy(vf2, vf, sizeof(struct vframe_s));
8895
8896 if (pic->pic_struct == 3) {
8897 vf->type = VIDTYPE_INTERLACE_TOP
8898 | VIDTYPE_VIU_NV21;
8899 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8900 | VIDTYPE_VIU_NV21;
8901 } else {
8902 vf->type = VIDTYPE_INTERLACE_BOTTOM
8903 | VIDTYPE_VIU_NV21;
8904 vf2->type = VIDTYPE_INTERLACE_TOP
8905 | VIDTYPE_VIU_NV21;
8906 }
8907 hevc->vf_pre_count++;
8908 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8909 kfifo_put(&hevc->display_q,
8910 (const struct vframe_s *)vf);
8911 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8912 hevc->vf_pre_count++;
8913 kfifo_put(&hevc->display_q,
8914 (const struct vframe_s *)vf2);
8915 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8916 } else if (pic->pic_struct == 5
8917 || pic->pic_struct == 6) {
8918 struct vframe_s *vf2, *vf3;
8919
8920 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8921 hevc_print(hevc, 0,
8922 "pic_struct = %d index 0x%x\n",
8923 pic->pic_struct,
8924 pic->index);
8925
8926 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8927 hevc_print(hevc, 0,
8928 "fatal error, no available buffer slot.");
8929 return -1;
8930 }
8931 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8932 hevc_print(hevc, 0,
8933 "fatal error, no available buffer slot.");
8934 return -1;
8935 }
8936 pic->vf_ref = 3;
8937 vf->duration = vf->duration/3;
8938 memcpy(vf2, vf, sizeof(struct vframe_s));
8939 memcpy(vf3, vf, sizeof(struct vframe_s));
8940
8941 if (pic->pic_struct == 5) {
8942 vf->type = VIDTYPE_INTERLACE_TOP
8943 | VIDTYPE_VIU_NV21;
8944 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8945 | VIDTYPE_VIU_NV21;
8946 vf3->type = VIDTYPE_INTERLACE_TOP
8947 | VIDTYPE_VIU_NV21;
8948 } else {
8949 vf->type = VIDTYPE_INTERLACE_BOTTOM
8950 | VIDTYPE_VIU_NV21;
8951 vf2->type = VIDTYPE_INTERLACE_TOP
8952 | VIDTYPE_VIU_NV21;
8953 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8954 | VIDTYPE_VIU_NV21;
8955 }
8956 hevc->vf_pre_count++;
8957 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8958 kfifo_put(&hevc->display_q,
8959 (const struct vframe_s *)vf);
8960 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8961 hevc->vf_pre_count++;
8962 kfifo_put(&hevc->display_q,
8963 (const struct vframe_s *)vf2);
8964 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8965 hevc->vf_pre_count++;
8966 kfifo_put(&hevc->display_q,
8967 (const struct vframe_s *)vf3);
8968 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8969
8970 } else if (pic->pic_struct == 9
8971 || pic->pic_struct == 10) {
8972 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8973 hevc_print(hevc, 0,
8974 "pic_struct = %d index 0x%x\n",
8975 pic->pic_struct,
8976 pic->index);
8977
8978 pic->vf_ref = 1;
8979 /* process previous pending vf*/
8980 process_pending_vframe(hevc,
8981 pic, (pic->pic_struct == 9));
8982
8983 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8984 /* process current vf */
8985 kfifo_put(&hevc->pending_q,
8986 (const struct vframe_s *)vf);
8987 vf->height <<= 1;
8988 if (pic->pic_struct == 9) {
8989 vf->type = VIDTYPE_INTERLACE_TOP
8990 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8991 process_pending_vframe(hevc,
8992 hevc->pre_bot_pic, 0);
8993 } else {
8994 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8995 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8996 vf->index = (pic->index << 8) | 0xff;
8997 process_pending_vframe(hevc,
8998 hevc->pre_top_pic, 1);
8999 }
9000
9001 if (hevc->vf_pre_count == 0)
9002 hevc->vf_pre_count++;
9003
9004 /**/
9005 if (pic->pic_struct == 9)
9006 hevc->pre_top_pic = pic;
9007 else
9008 hevc->pre_bot_pic = pic;
9009
9010 } else if (pic->pic_struct == 11
9011 || pic->pic_struct == 12) {
9012 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9013 hevc_print(hevc, 0,
9014 "pic_struct = %d index 0x%x\n",
9015 pic->pic_struct,
9016 pic->index);
9017 pic->vf_ref = 1;
9018 /* process previous pending vf*/
9019 process_pending_vframe(hevc, pic,
9020 (pic->pic_struct == 11));
9021
9022 /* put current into pending q */
9023 vf->height <<= 1;
9024 if (pic->pic_struct == 11)
9025 vf->type = VIDTYPE_INTERLACE_TOP |
9026 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9027 else {
9028 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9029 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9030 vf->index = (pic->index << 8) | 0xff;
9031 }
9032 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9033 kfifo_put(&hevc->pending_q,
9034 (const struct vframe_s *)vf);
9035 if (hevc->vf_pre_count == 0)
9036 hevc->vf_pre_count++;
9037
9038 /**/
9039 if (pic->pic_struct == 11)
9040 hevc->pre_top_pic = pic;
9041 else
9042 hevc->pre_bot_pic = pic;
9043
9044 } else {
9045 pic->vf_ref = 1;
9046
9047 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9048 hevc_print(hevc, 0,
9049 "pic_struct = %d index 0x%x\n",
9050 pic->pic_struct,
9051 pic->index);
9052
9053 switch (pic->pic_struct) {
9054 case 7:
9055 vf->duration <<= 1;
9056 break;
9057 case 8:
9058 vf->duration = vf->duration * 3;
9059 break;
9060 case 1:
9061 vf->height <<= 1;
9062 vf->type = VIDTYPE_INTERLACE_TOP |
9063 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9064 process_pending_vframe(hevc, pic, 1);
9065 hevc->pre_top_pic = pic;
9066 break;
9067 case 2:
9068 vf->height <<= 1;
9069 vf->type = VIDTYPE_INTERLACE_BOTTOM
9070 | VIDTYPE_VIU_NV21
9071 | VIDTYPE_VIU_FIELD;
9072 process_pending_vframe(hevc, pic, 0);
9073 hevc->pre_bot_pic = pic;
9074 break;
9075 }
9076 hevc->vf_pre_count++;
9077 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9078 kfifo_put(&hevc->display_q,
9079 (const struct vframe_s *)vf);
9080 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9081 }
9082#else
9083 vf->type_original = vf->type;
9084 pic->vf_ref = 1;
9085 hevc->vf_pre_count++;
9086 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9087 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9088 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9089
9090 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9091 hevc_print(hevc, 0,
9092 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9093 __func__, vf->type, vf->index,
9094 get_pic_poc(hevc, vf->index & 0xff),
9095 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9096 vf->pts, vf->pts_us64,
9097 vf->duration);
9098#endif
9099#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9100 /*count info*/
9101 vdec_count_info(gvs, 0, stream_offset);
9102#endif
9103 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
9104 if (without_display_mode == 0) {
9105 vf_notify_receiver(hevc->provider_name,
9106 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9107 }
9108 else
9109 vh265_vf_put(vh265_vf_get(vdec), vdec);
9110 }
9111
9112 return 0;
9113}
9114
9115static int notify_v4l_eos(struct vdec_s *vdec)
9116{
9117 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9118 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9119 struct vframe_s *vf = &hw->vframe_dummy;
9120 struct vdec_v4l2_buffer *fb = NULL;
9121 int index = INVALID_IDX;
9122 ulong expires;
9123
9124 if (hw->is_used_v4l && hw->eos) {
9125 expires = jiffies + msecs_to_jiffies(2000);
9126 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9127 if (time_after(jiffies, expires) ||
9128 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx))
9129 break;
9130 }
9131
9132 if (index == INVALID_IDX) {
9133 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9134 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9135 return -1;
9136 }
9137 }
9138
9139 vf->type |= VIDTYPE_V4L_EOS;
9140 vf->timestamp = ULONG_MAX;
9141 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9142 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9143 hw->m_BUF[index].v4l_ref_buf_addr;
9144 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9145 vf_notify_receiver(vdec->vf_provider_name,
9146 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9147
9148 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9149 }
9150
9151 return 0;
9152}
9153
9154static void process_nal_sei(struct hevc_state_s *hevc,
9155 int payload_type, int payload_size)
9156{
9157 unsigned short data;
9158
9159 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9160 hevc_print(hevc, 0,
9161 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9162 payload_type, payload_size);
9163
9164 if (payload_type == 137) {
9165 int i, j;
9166 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9167 if (payload_size >= 24) {
9168 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9169 hevc_print(hevc, 0,
9170 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9171 for (i = 0; i < 3; i++) {
9172 for (j = 0; j < 2; j++) {
9173 data =
9174 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9175 hevc->primaries[i][j] = data;
9176 WRITE_HREG(HEVC_SHIFT_COMMAND,
9177 (1<<7)|16);
9178 if (get_dbg_flag(hevc) &
9179 H265_DEBUG_PRINT_SEI)
9180 hevc_print(hevc, 0,
9181 "\t\tprimaries[%1d][%1d] = %04x\n",
9182 i, j, hevc->primaries[i][j]);
9183 }
9184 }
9185 for (i = 0; i < 2; i++) {
9186 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9187 hevc->white_point[i] = data;
9188 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9189 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9190 hevc_print(hevc, 0,
9191 "\t\twhite_point[%1d] = %04x\n",
9192 i, hevc->white_point[i]);
9193 }
9194 for (i = 0; i < 2; i++) {
9195 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9196 hevc->luminance[i] = data << 16;
9197 WRITE_HREG(HEVC_SHIFT_COMMAND,
9198 (1<<7)|16);
9199 data =
9200 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9201 hevc->luminance[i] |= data;
9202 WRITE_HREG(HEVC_SHIFT_COMMAND,
9203 (1<<7)|16);
9204 if (get_dbg_flag(hevc) &
9205 H265_DEBUG_PRINT_SEI)
9206 hevc_print(hevc, 0,
9207 "\t\tluminance[%1d] = %08x\n",
9208 i, hevc->luminance[i]);
9209 }
9210 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9211 }
9212 payload_size -= 24;
9213 while (payload_size > 0) {
9214 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9215 payload_size--;
9216 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9217 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9218 }
9219 }
9220}
9221
9222static int hevc_recover(struct hevc_state_s *hevc)
9223{
9224 int ret = -1;
9225 u32 rem;
9226 u64 shift_byte_count64;
9227 unsigned int hevc_shift_byte_count;
9228 unsigned int hevc_stream_start_addr;
9229 unsigned int hevc_stream_end_addr;
9230 unsigned int hevc_stream_rd_ptr;
9231 unsigned int hevc_stream_wr_ptr;
9232 unsigned int hevc_stream_control;
9233 unsigned int hevc_stream_fifo_ctl;
9234 unsigned int hevc_stream_buf_size;
9235
9236 mutex_lock(&vh265_mutex);
9237#if 0
9238 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9239 int ii;
9240
9241 for (ii = 0; ii < 4; ii++)
9242 hevc_print(hevc, 0,
9243 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9244 if (((i + ii) & 0xf) == 0)
9245 hevc_print(hevc, 0, "\n");
9246 }
9247#endif
9248#define ES_VID_MAN_RD_PTR (1<<0)
9249 if (!hevc->init_flag) {
9250 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9251 mutex_unlock(&vh265_mutex);
9252 return ret;
9253 }
9254 amhevc_stop();
9255 msleep(20);
9256 ret = 0;
9257 /* reset */
9258 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9259 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9260
9261 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9262 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9263 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9264 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9265 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9266 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9267 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9268
9269 /* HEVC streaming buffer will reset and restart
9270 * from current hevc_stream_rd_ptr position
9271 */
9272 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9273 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9274 if ((hevc->shift_byte_count_lo & (1 << 31))
9275 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9276 hevc->shift_byte_count_hi++;
9277
9278 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9279 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9280 hevc->shift_byte_count_lo;
9281 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9282 shift_byte_count64 -= rem;
9283 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9284
9285 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9286 shift_byte_count64 += hevc_stream_buf_size;
9287
9288 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9289 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9290
9291 WRITE_VREG(DOS_SW_RESET3,
9292 /* (1<<2)| */
9293 (1 << 3) | (1 << 4) | (1 << 8) |
9294 (1 << 11) | (1 << 12) | (1 << 14)
9295 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9296 WRITE_VREG(DOS_SW_RESET3, 0);
9297
9298 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9299 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9300 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9301 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9302 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9303 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9304 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9305
9306 hevc_config_work_space_hw(hevc);
9307 decoder_hw_reset();
9308
9309 hevc->have_vps = 0;
9310 hevc->have_sps = 0;
9311 hevc->have_pps = 0;
9312
9313 hevc->have_valid_start_slice = 0;
9314
9315 if (get_double_write_mode(hevc) & 0x10)
9316 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9317 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9318 );
9319
9320 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9321 /* clear mailbox interrupt */
9322 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9323 /* enable mailbox interrupt */
9324 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9325 /* disable PSCALE for hardware sharing */
9326 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9327
9328 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9329
9330 WRITE_VREG(DEBUG_REG1, 0x0);
9331
9332 if ((error_handle_policy & 1) == 0) {
9333 if ((error_handle_policy & 4) == 0) {
9334 /* ucode auto mode, and do not check vps/sps/pps/idr */
9335 WRITE_VREG(NAL_SEARCH_CTL,
9336 0xc);
9337 } else {
9338 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9339 }
9340 } else {
9341 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9342 }
9343
9344 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9345 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9346 WRITE_VREG(NAL_SEARCH_CTL,
9347 READ_VREG(NAL_SEARCH_CTL)
9348 | ((parser_sei_enable & 0x7) << 17));
9349#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9350 WRITE_VREG(NAL_SEARCH_CTL,
9351 READ_VREG(NAL_SEARCH_CTL) |
9352 ((parser_dolby_vision_enable & 0x1) << 20));
9353#endif
9354 config_decode_mode(hevc);
9355 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9356
9357 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9358 /* amhevc_disable(); */
9359 /* return -EBUSY; */
9360 /* } */
9361#if 0
9362 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9363 int ii;
9364
9365 for (ii = 0; ii < 4; ii++) {
9366 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9367 hevc_print(hevc, 0,
9368 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9369 }
9370 if (((i + ii) & 0xf) == 0)
9371 hevc_print(hevc, 0, "\n");
9372 }
9373#endif
9374 init_pic_list_hw(hevc);
9375
9376 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9377 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9378
9379#ifdef SWAP_HEVC_UCODE
9380 if (!tee_enabled() && hevc->is_swap &&
9381 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9382 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9383 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9384 }
9385#endif
9386 amhevc_start();
9387
9388 /* skip, search next start code */
9389 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9390 hevc->skip_flag = 1;
9391#ifdef ERROR_HANDLE_DEBUG
9392 if (dbg_nal_skip_count & 0x20000) {
9393 dbg_nal_skip_count &= ~0x20000;
9394 mutex_unlock(&vh265_mutex);
9395 return ret;
9396 }
9397#endif
9398 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9399 /* Interrupt Amrisc to excute */
9400 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9401#ifdef MULTI_INSTANCE_SUPPORT
9402 if (!hevc->m_ins_flag)
9403#endif
9404 hevc->first_pic_after_recover = 1;
9405 mutex_unlock(&vh265_mutex);
9406 return ret;
9407}
9408
9409static void dump_aux_buf(struct hevc_state_s *hevc)
9410{
9411 int i;
9412 unsigned short *aux_adr =
9413 (unsigned short *)
9414 hevc->aux_addr;
9415 unsigned int aux_size =
9416 (READ_VREG(HEVC_AUX_DATA_SIZE)
9417 >> 16) << 4;
9418
9419 if (hevc->prefix_aux_size > 0) {
9420 hevc_print(hevc, 0,
9421 "prefix aux: (size %d)\n",
9422 aux_size);
9423 for (i = 0; i <
9424 (aux_size >> 1); i++) {
9425 hevc_print_cont(hevc, 0,
9426 "%04x ",
9427 *(aux_adr + i));
9428 if (((i + 1) & 0xf)
9429 == 0)
9430 hevc_print_cont(hevc,
9431 0, "\n");
9432 }
9433 }
9434 if (hevc->suffix_aux_size > 0) {
9435 aux_adr = (unsigned short *)
9436 (hevc->aux_addr +
9437 hevc->prefix_aux_size);
9438 aux_size =
9439 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9440 << 4;
9441 hevc_print(hevc, 0,
9442 "suffix aux: (size %d)\n",
9443 aux_size);
9444 for (i = 0; i <
9445 (aux_size >> 1); i++) {
9446 hevc_print_cont(hevc, 0,
9447 "%04x ", *(aux_adr + i));
9448 if (((i + 1) & 0xf) == 0)
9449 hevc_print_cont(hevc, 0, "\n");
9450 }
9451 }
9452}
9453
9454#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9455static void dolby_get_meta(struct hevc_state_s *hevc)
9456{
9457 struct vdec_s *vdec = hw_to_vdec(hevc);
9458
9459 if (get_dbg_flag(hevc) &
9460 H265_DEBUG_BUFMGR_MORE)
9461 dump_aux_buf(hevc);
9462 if (vdec->dolby_meta_with_el || vdec->slave) {
9463 set_aux_data(hevc,
9464 hevc->cur_pic, 0, 0);
9465 } else if (vdec->master) {
9466 struct hevc_state_s *hevc_ba =
9467 (struct hevc_state_s *)
9468 vdec->master->private;
9469 /*do not use hevc_ba*/
9470 set_aux_data(hevc,
9471 hevc_ba->cur_pic,
9472 0, 1);
9473 set_aux_data(hevc,
9474 hevc->cur_pic, 0, 2);
9475 }
9476}
9477#endif
9478
9479static void read_decode_info(struct hevc_state_s *hevc)
9480{
9481 uint32_t decode_info =
9482 READ_HREG(HEVC_DECODE_INFO);
9483 hevc->start_decoding_flag |=
9484 (decode_info & 0xff);
9485 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9486}
9487
9488static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9489{
9490 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9491 unsigned int dec_status = hevc->dec_status;
9492 int i, ret;
9493
9494#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9495 struct vdec_s *vdec = hw_to_vdec(hevc);
9496#endif
9497
9498 if (hevc->eos)
9499 return IRQ_HANDLED;
9500 if (
9501#ifdef MULTI_INSTANCE_SUPPORT
9502 (!hevc->m_ins_flag) &&
9503#endif
9504 hevc->error_flag == 1) {
9505 if ((error_handle_policy & 0x10) == 0) {
9506 if (hevc->cur_pic) {
9507 int current_lcu_idx =
9508 READ_VREG(HEVC_PARSER_LCU_START)
9509 & 0xffffff;
9510 if (current_lcu_idx <
9511 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9512 hevc->cur_pic->error_mark = 1;
9513
9514 }
9515 }
9516 if ((error_handle_policy & 1) == 0) {
9517 hevc->error_skip_nal_count = 1;
9518 /* manual search nal, skip error_skip_nal_count
9519 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9520 */
9521 WRITE_VREG(NAL_SEARCH_CTL,
9522 (error_skip_nal_count << 4) | 0x1);
9523 } else {
9524 hevc->error_skip_nal_count = error_skip_nal_count;
9525 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9526 }
9527 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9528#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9529 || vdec->master
9530 || vdec->slave
9531#endif
9532 ) {
9533 WRITE_VREG(NAL_SEARCH_CTL,
9534 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9535 }
9536 WRITE_VREG(NAL_SEARCH_CTL,
9537 READ_VREG(NAL_SEARCH_CTL)
9538 | ((parser_sei_enable & 0x7) << 17));
9539#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9540 WRITE_VREG(NAL_SEARCH_CTL,
9541 READ_VREG(NAL_SEARCH_CTL) |
9542 ((parser_dolby_vision_enable & 0x1) << 20));
9543#endif
9544 config_decode_mode(hevc);
9545 /* search new nal */
9546 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9547 /* Interrupt Amrisc to excute */
9548 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9549
9550 /* hevc_print(hevc, 0,
9551 *"%s: error handle\n", __func__);
9552 */
9553 hevc->error_flag = 2;
9554 return IRQ_HANDLED;
9555 } else if (
9556#ifdef MULTI_INSTANCE_SUPPORT
9557 (!hevc->m_ins_flag) &&
9558#endif
9559 hevc->error_flag == 3) {
9560 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9561 hevc_recover(hevc);
9562 hevc->error_flag = 0;
9563
9564 if ((error_handle_policy & 0x10) == 0) {
9565 if (hevc->cur_pic) {
9566 int current_lcu_idx =
9567 READ_VREG(HEVC_PARSER_LCU_START)
9568 & 0xffffff;
9569 if (current_lcu_idx <
9570 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9571 hevc->cur_pic->error_mark = 1;
9572
9573 }
9574 }
9575 if ((error_handle_policy & 1) == 0) {
9576 /* need skip some data when
9577 * error_flag of 3 is triggered,
9578 */
9579 /* to avoid hevc_recover() being called
9580 * for many times at the same bitstream position
9581 */
9582 hevc->error_skip_nal_count = 1;
9583 /* manual search nal, skip error_skip_nal_count
9584 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9585 */
9586 WRITE_VREG(NAL_SEARCH_CTL,
9587 (error_skip_nal_count << 4) | 0x1);
9588 }
9589
9590 if ((error_handle_policy & 0x2) == 0) {
9591 hevc->have_vps = 1;
9592 hevc->have_sps = 1;
9593 hevc->have_pps = 1;
9594 }
9595 return IRQ_HANDLED;
9596 }
9597 if (!hevc->m_ins_flag) {
9598 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9599 if ((hevc->shift_byte_count_lo & (1 << 31))
9600 && ((i & (1 << 31)) == 0))
9601 hevc->shift_byte_count_hi++;
9602 hevc->shift_byte_count_lo = i;
9603 }
9604#ifdef MULTI_INSTANCE_SUPPORT
9605 mutex_lock(&hevc->chunks_mutex);
9606 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9607 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9608 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9609 && (hevc->chunk)) {
9610 hevc->cur_pic->pts = hevc->chunk->pts;
9611 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9612 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9613 }
9614 mutex_unlock(&hevc->chunks_mutex);
9615
9616 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9617 dec_status == HEVC_DECODE_BUFEMPTY2) {
9618 if (hevc->m_ins_flag) {
9619 read_decode_info(hevc);
9620 if (vdec_frame_based(hw_to_vdec(hevc))) {
9621 hevc->empty_flag = 1;
9622 goto pic_done;
9623 } else {
9624 if (
9625#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9626 vdec->master ||
9627 vdec->slave ||
9628#endif
9629 (data_resend_policy & 0x1)) {
9630 hevc->dec_result = DEC_RESULT_AGAIN;
9631 amhevc_stop();
9632 restore_decode_state(hevc);
9633 } else
9634 hevc->dec_result = DEC_RESULT_GET_DATA;
9635 }
9636 reset_process_time(hevc);
9637 vdec_schedule_work(&hevc->work);
9638 }
9639 return IRQ_HANDLED;
9640 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9641 (dec_status == HEVC_NAL_DECODE_DONE)
9642 ) {
9643 if (hevc->m_ins_flag) {
9644 read_decode_info(hevc);
9645 if (vdec_frame_based(hw_to_vdec(hevc))) {
9646 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9647 hevc->empty_flag = 1;
9648 goto pic_done;
9649 } else {
9650 hevc->dec_result = DEC_RESULT_AGAIN;
9651 amhevc_stop();
9652 restore_decode_state(hevc);
9653 }
9654
9655 reset_process_time(hevc);
9656 vdec_schedule_work(&hevc->work);
9657 }
9658
9659 return IRQ_HANDLED;
9660 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9661 if (hevc->m_ins_flag) {
9662 struct PIC_s *pic;
9663 struct PIC_s *pic_display;
9664 int decoded_poc;
9665#ifdef DETREFILL_ENABLE
9666 if (hevc->is_swap &&
9667 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9668 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9669 && READ_VREG(HEVC_SAO_DBG_MODE0))
9670 hevc->delrefill_check = 2;
9671 }
9672#endif
9673 hevc->empty_flag = 0;
9674pic_done:
9675 if (input_frame_based(hw_to_vdec(hevc)) &&
9676 frmbase_cont_bitlevel != 0 &&
9677 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9678 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9679 > frmbase_cont_bitlevel)) {
9680 /*handle the case: multi pictures in one packet*/
9681 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9682 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9683 __func__,
9684 hevc->decode_idx, hevc->decode_size,
9685 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9686 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9687 start_process_time(hevc);
9688 return IRQ_HANDLED;
9689 }
9690
9691 read_decode_info(hevc);
9692 get_picture_qos_info(hevc);
9693#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9694 hevc->start_parser_type = 0;
9695 hevc->switch_dvlayer_flag = 0;
9696#endif
9697 hevc->decoded_poc = hevc->curr_POC;
9698 hevc->decoding_pic = NULL;
9699 hevc->dec_result = DEC_RESULT_DONE;
9700#ifdef DETREFILL_ENABLE
9701 if (hevc->is_swap &&
9702 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9703 if (hevc->delrefill_check != 2)
9704#endif
9705
9706 amhevc_stop();
9707
9708 reset_process_time(hevc);
9709
9710 if (hevc->vf_pre_count == 0) {
9711 decoded_poc = hevc->curr_POC;
9712 pic = get_pic_by_POC(hevc, decoded_poc);
9713 if (pic && (pic->POC != INVALID_POC)) {
9714 /*PB skip control */
9715 if (pic->error_mark == 0
9716 && hevc->PB_skip_mode == 1) {
9717 /* start decoding after
9718 * first I
9719 */
9720 hevc->ignore_bufmgr_error |= 0x1;
9721 }
9722 if (hevc->ignore_bufmgr_error & 1) {
9723 if (hevc->PB_skip_count_after_decoding > 0) {
9724 hevc->PB_skip_count_after_decoding--;
9725 } else {
9726 /* start displaying */
9727 hevc->ignore_bufmgr_error |= 0x2;
9728 }
9729 }
9730 if (hevc->mmu_enable
9731 && ((hevc->double_write_mode & 0x10) == 0)) {
9732 if (!hevc->m_ins_flag) {
9733 hevc->used_4k_num =
9734 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9735
9736 if ((!is_skip_decoding(hevc, pic)) &&
9737 (hevc->used_4k_num >= 0) &&
9738 (hevc->cur_pic->scatter_alloc
9739 == 1)) {
9740 hevc_print(hevc,
9741 H265_DEBUG_BUFMGR_MORE,
9742 "%s pic index %d scatter_alloc %d page_start %d\n",
9743 "decoder_mmu_box_free_idx_tail",
9744 hevc->cur_pic->index,
9745 hevc->cur_pic->scatter_alloc,
9746 hevc->used_4k_num);
9747 decoder_mmu_box_free_idx_tail(
9748 hevc->mmu_box,
9749 hevc->cur_pic->index,
9750 hevc->used_4k_num);
9751 hevc->cur_pic->scatter_alloc
9752 = 2;
9753 }
9754 hevc->used_4k_num = -1;
9755 }
9756 }
9757
9758 pic->output_mark = 1;
9759 pic->recon_mark = 1;
9760 }
9761 check_pic_decoded_error(hevc,
9762 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9763 if (hevc->cur_pic != NULL &&
9764 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9765 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9766 hevc->cur_pic->error_mark = 1;
9767force_output:
9768 pic_display = output_pic(hevc, 1);
9769 if (pic_display) {
9770 if ((pic_display->error_mark &&
9771 ((hevc->ignore_bufmgr_error &
9772 0x2) == 0))
9773 || (get_dbg_flag(hevc) &
9774 H265_DEBUG_DISPLAY_CUR_FRAME)
9775 || (get_dbg_flag(hevc) &
9776 H265_DEBUG_NO_DISPLAY)) {
9777 pic_display->output_ready = 0;
9778 if (get_dbg_flag(hevc) &
9779 H265_DEBUG_BUFMGR) {
9780 hevc_print(hevc, 0,
9781 "[BM] Display: POC %d, ",
9782 pic_display->POC);
9783 hevc_print_cont(hevc, 0,
9784 "decoding index %d ==> ",
9785 pic_display->
9786 decode_idx);
9787 hevc_print_cont(hevc, 0,
9788 "Debug or err,recycle it\n");
9789 }
9790 } else {
9791 if (pic_display->
9792 slice_type != 2) {
9793 pic_display->output_ready = 0;
9794 } else {
9795 prepare_display_buf
9796 (hevc,
9797 pic_display);
9798 hevc->first_pic_flag = 1;
9799 }
9800 }
9801 }
9802 }
9803
9804 vdec_schedule_work(&hevc->work);
9805 }
9806
9807 return IRQ_HANDLED;
9808#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9809 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9810 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9811 if (hevc->m_ins_flag) {
9812 unsigned char next_parser_type =
9813 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9814 read_decode_info(hevc);
9815
9816 if (vdec->slave &&
9817 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9818 /*cur is base, found enhance*/
9819 struct hevc_state_s *hevc_el =
9820 (struct hevc_state_s *)
9821 vdec->slave->private;
9822 hevc->switch_dvlayer_flag = 1;
9823 hevc->no_switch_dvlayer_count = 0;
9824 hevc_el->start_parser_type =
9825 next_parser_type;
9826 hevc_print(hevc, H265_DEBUG_DV,
9827 "switch (poc %d) to el\n",
9828 hevc->cur_pic ?
9829 hevc->cur_pic->POC :
9830 INVALID_POC);
9831 } else if (vdec->master &&
9832 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9833 /*cur is enhance, found base*/
9834 struct hevc_state_s *hevc_ba =
9835 (struct hevc_state_s *)
9836 vdec->master->private;
9837 hevc->switch_dvlayer_flag = 1;
9838 hevc->no_switch_dvlayer_count = 0;
9839 hevc_ba->start_parser_type =
9840 next_parser_type;
9841 hevc_print(hevc, H265_DEBUG_DV,
9842 "switch (poc %d) to bl\n",
9843 hevc->cur_pic ?
9844 hevc->cur_pic->POC :
9845 INVALID_POC);
9846 } else {
9847 hevc->switch_dvlayer_flag = 0;
9848 hevc->start_parser_type =
9849 next_parser_type;
9850 hevc->no_switch_dvlayer_count++;
9851 hevc_print(hevc, H265_DEBUG_DV,
9852 "%s: no_switch_dvlayer_count = %d\n",
9853 vdec->master ? "el" : "bl",
9854 hevc->no_switch_dvlayer_count);
9855 if (vdec->slave &&
9856 dolby_el_flush_th != 0 &&
9857 hevc->no_switch_dvlayer_count >
9858 dolby_el_flush_th) {
9859 struct hevc_state_s *hevc_el =
9860 (struct hevc_state_s *)
9861 vdec->slave->private;
9862 struct PIC_s *el_pic;
9863 check_pic_decoded_error(hevc_el,
9864 hevc_el->pic_decoded_lcu_idx);
9865 el_pic = get_pic_by_POC(hevc_el,
9866 hevc_el->curr_POC);
9867 hevc_el->curr_POC = INVALID_POC;
9868 hevc_el->m_pocRandomAccess = MAX_INT;
9869 flush_output(hevc_el, el_pic);
9870 hevc_el->decoded_poc = INVALID_POC; /*
9871 already call flush_output*/
9872 hevc_el->decoding_pic = NULL;
9873 hevc->no_switch_dvlayer_count = 0;
9874 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9875 hevc_print(hevc, 0,
9876 "no el anymore, flush_output el\n");
9877 }
9878 }
9879 hevc->decoded_poc = hevc->curr_POC;
9880 hevc->decoding_pic = NULL;
9881 hevc->dec_result = DEC_RESULT_DONE;
9882 amhevc_stop();
9883 reset_process_time(hevc);
9884 if (aux_data_is_avaible(hevc))
9885 dolby_get_meta(hevc);
9886 if(hevc->cur_pic->slice_type == 2 &&
9887 hevc->vf_pre_count == 0) {
9888 hevc_print(hevc, 0,
9889 "first slice_type %x no_switch_dvlayer_count %x\n",
9890 hevc->cur_pic->slice_type,
9891 hevc->no_switch_dvlayer_count);
9892 goto force_output;
9893 }
9894 vdec_schedule_work(&hevc->work);
9895 }
9896
9897 return IRQ_HANDLED;
9898#endif
9899 }
9900
9901#endif
9902
9903 if (dec_status == HEVC_SEI_DAT) {
9904 if (!hevc->m_ins_flag) {
9905 int payload_type =
9906 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9907 int payload_size =
9908 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9909 process_nal_sei(hevc,
9910 payload_type, payload_size);
9911 }
9912 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9913 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9914 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9915 int parse_type = HEVC_DISCARD_NAL;
9916
9917 hevc->error_watchdog_count = 0;
9918 hevc->error_skip_nal_wt_cnt = 0;
9919#ifdef MULTI_INSTANCE_SUPPORT
9920 if (hevc->m_ins_flag)
9921 reset_process_time(hevc);
9922#endif
9923 if (slice_parse_begin > 0 &&
9924 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9925 hevc_print(hevc, 0,
9926 "nal type %d, discard %d\n", naltype,
9927 slice_parse_begin);
9928 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9929 slice_parse_begin--;
9930 }
9931 if (naltype == NAL_UNIT_EOS) {
9932 struct PIC_s *pic;
9933
9934 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9935#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9936 if ((vdec->master || vdec->slave) &&
9937 aux_data_is_avaible(hevc)) {
9938 if (hevc->decoding_pic)
9939 dolby_get_meta(hevc);
9940 }
9941#endif
9942 check_pic_decoded_error(hevc,
9943 hevc->pic_decoded_lcu_idx);
9944 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9945 hevc->curr_POC = INVALID_POC;
9946 /* add to fix RAP_B_Bossen_1 */
9947 hevc->m_pocRandomAccess = MAX_INT;
9948 flush_output(hevc, pic);
9949 clear_poc_flag(hevc);
9950 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9951 /* Interrupt Amrisc to excute */
9952 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9953#ifdef MULTI_INSTANCE_SUPPORT
9954 if (hevc->m_ins_flag) {
9955 hevc->decoded_poc = INVALID_POC; /*
9956 already call flush_output*/
9957 hevc->decoding_pic = NULL;
9958 hevc->dec_result = DEC_RESULT_DONE;
9959 amhevc_stop();
9960
9961 vdec_schedule_work(&hevc->work);
9962 }
9963#endif
9964 return IRQ_HANDLED;
9965 }
9966
9967 if (
9968#ifdef MULTI_INSTANCE_SUPPORT
9969 (!hevc->m_ins_flag) &&
9970#endif
9971 hevc->error_skip_nal_count > 0) {
9972 hevc_print(hevc, 0,
9973 "nal type %d, discard %d\n", naltype,
9974 hevc->error_skip_nal_count);
9975 hevc->error_skip_nal_count--;
9976 if (hevc->error_skip_nal_count == 0) {
9977 hevc_recover(hevc);
9978 hevc->error_flag = 0;
9979 if ((error_handle_policy & 0x2) == 0) {
9980 hevc->have_vps = 1;
9981 hevc->have_sps = 1;
9982 hevc->have_pps = 1;
9983 }
9984 return IRQ_HANDLED;
9985 }
9986 } else if (naltype == NAL_UNIT_VPS) {
9987 parse_type = HEVC_NAL_UNIT_VPS;
9988 hevc->have_vps = 1;
9989#ifdef ERROR_HANDLE_DEBUG
9990 if (dbg_nal_skip_flag & 1)
9991 parse_type = HEVC_DISCARD_NAL;
9992#endif
9993 } else if (hevc->have_vps) {
9994 if (naltype == NAL_UNIT_SPS) {
9995 parse_type = HEVC_NAL_UNIT_SPS;
9996 hevc->have_sps = 1;
9997#ifdef ERROR_HANDLE_DEBUG
9998 if (dbg_nal_skip_flag & 2)
9999 parse_type = HEVC_DISCARD_NAL;
10000#endif
10001 } else if (naltype == NAL_UNIT_PPS) {
10002 parse_type = HEVC_NAL_UNIT_PPS;
10003 hevc->have_pps = 1;
10004#ifdef ERROR_HANDLE_DEBUG
10005 if (dbg_nal_skip_flag & 4)
10006 parse_type = HEVC_DISCARD_NAL;
10007#endif
10008 } else if (hevc->have_sps && hevc->have_pps) {
10009 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10010
10011 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10012 (naltype ==
10013 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10014 || (naltype ==
10015 NAL_UNIT_CODED_SLICE_CRA)
10016 || (naltype ==
10017 NAL_UNIT_CODED_SLICE_BLA)
10018 || (naltype ==
10019 NAL_UNIT_CODED_SLICE_BLANT)
10020 || (naltype ==
10021 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10022 ) {
10023 if (slice_parse_begin > 0) {
10024 hevc_print(hevc, 0,
10025 "discard %d, for debugging\n",
10026 slice_parse_begin);
10027 slice_parse_begin--;
10028 } else {
10029 parse_type = seg;
10030 }
10031 hevc->have_valid_start_slice = 1;
10032 } else if (naltype <=
10033 NAL_UNIT_CODED_SLICE_CRA
10034 && (hevc->have_valid_start_slice
10035 || (hevc->PB_skip_mode != 3))) {
10036 if (slice_parse_begin > 0) {
10037 hevc_print(hevc, 0,
10038 "discard %d, dd\n",
10039 slice_parse_begin);
10040 slice_parse_begin--;
10041 } else
10042 parse_type = seg;
10043
10044 }
10045 }
10046 }
10047 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10048 && hevc->have_valid_start_slice &&
10049 hevc->error_flag == 0) {
10050 if ((get_dbg_flag(hevc) &
10051 H265_DEBUG_MAN_SEARCH_NAL) == 0
10052 /* && (!hevc->m_ins_flag)*/) {
10053 /* auot parser NAL; do not check
10054 *vps/sps/pps/idr
10055 */
10056 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10057 }
10058
10059 if ((get_dbg_flag(hevc) &
10060 H265_DEBUG_NO_EOS_SEARCH_DONE)
10061#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10062 || vdec->master
10063 || vdec->slave
10064#endif
10065 ) {
10066 WRITE_VREG(NAL_SEARCH_CTL,
10067 READ_VREG(NAL_SEARCH_CTL) |
10068 0x10000);
10069 }
10070 WRITE_VREG(NAL_SEARCH_CTL,
10071 READ_VREG(NAL_SEARCH_CTL)
10072 | ((parser_sei_enable & 0x7) << 17));
10073#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10074 WRITE_VREG(NAL_SEARCH_CTL,
10075 READ_VREG(NAL_SEARCH_CTL) |
10076 ((parser_dolby_vision_enable & 0x1) << 20));
10077#endif
10078 config_decode_mode(hevc);
10079 }
10080
10081 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10082 hevc_print(hevc, 0,
10083 "naltype = %d parse_type %d\n %d %d %d %d\n",
10084 naltype, parse_type, hevc->have_vps,
10085 hevc->have_sps, hevc->have_pps,
10086 hevc->have_valid_start_slice);
10087 }
10088
10089 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10090 /* Interrupt Amrisc to excute */
10091 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10092#ifdef MULTI_INSTANCE_SUPPORT
10093 if (hevc->m_ins_flag)
10094 start_process_time(hevc);
10095#endif
10096 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10097#ifdef MULTI_INSTANCE_SUPPORT
10098 if (hevc->m_ins_flag) {
10099 reset_process_time(hevc);
10100 read_decode_info(hevc);
10101
10102 }
10103#endif
10104 if (hevc->start_decoding_time > 0) {
10105 u32 process_time = 1000*
10106 (jiffies - hevc->start_decoding_time)/HZ;
10107 if (process_time > max_decoding_time)
10108 max_decoding_time = process_time;
10109 }
10110
10111 hevc->error_watchdog_count = 0;
10112 if (hevc->pic_list_init_flag == 2) {
10113 hevc->pic_list_init_flag = 3;
10114 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10115 } else if (hevc->wait_buf == 0) {
10116 u32 vui_time_scale;
10117 u32 vui_num_units_in_tick;
10118 unsigned char reconfig_flag = 0;
10119
10120 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10121 get_rpm_param(&hevc->param);
10122 else {
10123
10124 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10125 int ii;
10126
10127 for (ii = 0; ii < 4; ii++) {
10128 hevc->param.l.data[i + ii] =
10129 hevc->rpm_ptr[i + 3
10130 - ii];
10131 }
10132 }
10133#ifdef SEND_LMEM_WITH_RPM
10134 check_head_error(hevc);
10135#endif
10136 }
10137 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10138 hevc_print(hevc, 0,
10139 "rpm_param: (%d)\n", hevc->slice_idx);
10140 hevc->slice_idx++;
10141 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10142 hevc_print_cont(hevc, 0,
10143 "%04x ", hevc->param.l.data[i]);
10144 if (((i + 1) & 0xf) == 0)
10145 hevc_print_cont(hevc, 0, "\n");
10146 }
10147
10148 hevc_print(hevc, 0,
10149 "vui_timing_info: %x, %x, %x, %x\n",
10150 hevc->param.p.vui_num_units_in_tick_hi,
10151 hevc->param.p.vui_num_units_in_tick_lo,
10152 hevc->param.p.vui_time_scale_hi,
10153 hevc->param.p.vui_time_scale_lo);
10154 }
10155
10156 if (hevc->is_used_v4l) {
10157 struct aml_vcodec_ctx *ctx =
10158 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10159
10160 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10161 struct aml_vdec_ps_infos ps;
10162
10163 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10164 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10165 ps.visible_width = hevc->frame_width;
10166 ps.visible_height = hevc->frame_height;
10167 ps.coded_width = ALIGN(hevc->frame_width, 32);
10168 ps.coded_height = ALIGN(hevc->frame_height, 32);
10169 ps.dpb_size = get_work_pic_num(hevc);
10170 hevc->v4l_params_parsed = true;
10171 /*notice the v4l2 codec.*/
10172 vdec_v4l_set_ps_infos(ctx, &ps);
10173 }
10174 }
10175
10176 if (
10177#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10178 vdec->master == NULL &&
10179 vdec->slave == NULL &&
10180#endif
10181 aux_data_is_avaible(hevc)
10182 ) {
10183
10184 if (get_dbg_flag(hevc) &
10185 H265_DEBUG_BUFMGR_MORE)
10186 dump_aux_buf(hevc);
10187 }
10188
10189 vui_time_scale =
10190 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10191 hevc->param.p.vui_time_scale_lo;
10192 vui_num_units_in_tick =
10193 (u32)(hevc->param.
10194 p.vui_num_units_in_tick_hi << 16) |
10195 hevc->param.
10196 p.vui_num_units_in_tick_lo;
10197 if (hevc->bit_depth_luma !=
10198 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10199 reconfig_flag = 1;
10200 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10201 (hevc->param.p.bit_depth & 0xf) + 8);
10202 }
10203 if (hevc->bit_depth_chroma !=
10204 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10205 reconfig_flag = 1;
10206 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10207 ((hevc->param.p.bit_depth >> 4) &
10208 0xf) + 8);
10209 }
10210 hevc->bit_depth_luma =
10211 (hevc->param.p.bit_depth & 0xf) + 8;
10212 hevc->bit_depth_chroma =
10213 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10214 bit_depth_luma = hevc->bit_depth_luma;
10215 bit_depth_chroma = hevc->bit_depth_chroma;
10216#ifdef SUPPORT_10BIT
10217 if (hevc->bit_depth_luma == 8 &&
10218 hevc->bit_depth_chroma == 8 &&
10219 enable_mem_saving)
10220 hevc->mem_saving_mode = 1;
10221 else
10222 hevc->mem_saving_mode = 0;
10223#endif
10224 if (reconfig_flag &&
10225 (get_double_write_mode(hevc) & 0x10) == 0)
10226 init_decode_head_hw(hevc);
10227
10228 if ((vui_time_scale != 0)
10229 && (vui_num_units_in_tick != 0)) {
10230 hevc->frame_dur =
10231 div_u64(96000ULL *
10232 vui_num_units_in_tick,
10233 vui_time_scale);
10234 if (hevc->get_frame_dur != true)
10235 vdec_schedule_work(
10236 &hevc->notify_work);
10237
10238 hevc->get_frame_dur = true;
10239#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10240 gvs->frame_dur = hevc->frame_dur;
10241#endif
10242 }
10243
10244 if (hevc->video_signal_type !=
10245 ((hevc->param.p.video_signal_type << 16)
10246 | hevc->param.p.color_description)) {
10247 u32 v = hevc->param.p.video_signal_type;
10248 u32 c = hevc->param.p.color_description;
10249#if 0
10250 if (v & 0x2000) {
10251 hevc_print(hevc, 0,
10252 "video_signal_type present:\n");
10253 hevc_print(hevc, 0, " %s %s\n",
10254 video_format_names[(v >> 10) & 7],
10255 ((v >> 9) & 1) ?
10256 "full_range" : "limited");
10257 if (v & 0x100) {
10258 hevc_print(hevc, 0,
10259 " color_description present:\n");
10260 hevc_print(hevc, 0,
10261 " color_primarie = %s\n",
10262 color_primaries_names
10263 [v & 0xff]);
10264 hevc_print(hevc, 0,
10265 " transfer_characteristic = %s\n",
10266 transfer_characteristics_names
10267 [(c >> 8) & 0xff]);
10268 hevc_print(hevc, 0,
10269 " matrix_coefficient = %s\n",
10270 matrix_coeffs_names[c & 0xff]);
10271 }
10272 }
10273#endif
10274 hevc->video_signal_type = (v << 16) | c;
10275 video_signal_type = hevc->video_signal_type;
10276 }
10277
10278 if (use_cma &&
10279 (hevc->param.p.slice_segment_address == 0)
10280 && (hevc->pic_list_init_flag == 0)) {
10281 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10282 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10283
10284 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10285 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10286 hevc->lcu_size = 1 << (log + 3 + log_s);
10287 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10288 if (hevc->pic_w == 0 || hevc->pic_h == 0
10289 || hevc->lcu_size == 0
10290 || is_oversize(hevc->pic_w, hevc->pic_h)
10291 || (!hevc->skip_first_nal &&
10292 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10293 /* skip search next start code */
10294 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10295 & (~0x2));
10296 if ( !hevc->skip_first_nal &&
10297 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10298 hevc->skip_first_nal = 1;
10299 hevc->skip_flag = 1;
10300 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10301 /* Interrupt Amrisc to excute */
10302 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10303#ifdef MULTI_INSTANCE_SUPPORT
10304 if (hevc->m_ins_flag)
10305 start_process_time(hevc);
10306#endif
10307 } else {
10308 hevc->sps_num_reorder_pics_0 =
10309 hevc->param.p.sps_num_reorder_pics_0;
10310 hevc->pic_list_init_flag = 1;
10311#ifdef MULTI_INSTANCE_SUPPORT
10312 if (hevc->m_ins_flag) {
10313 vdec_schedule_work(&hevc->work);
10314 } else
10315#endif
10316 up(&h265_sema);
10317 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10318 }
10319 return IRQ_HANDLED;
10320 }
10321
10322}
10323 ret =
10324 hevc_slice_segment_header_process(hevc,
10325 &hevc->param, decode_pic_begin);
10326 if (ret < 0) {
10327#ifdef MULTI_INSTANCE_SUPPORT
10328 if (hevc->m_ins_flag) {
10329 hevc->wait_buf = 0;
10330 hevc->dec_result = DEC_RESULT_AGAIN;
10331 amhevc_stop();
10332 restore_decode_state(hevc);
10333 reset_process_time(hevc);
10334 vdec_schedule_work(&hevc->work);
10335 return IRQ_HANDLED;
10336 }
10337#else
10338 ;
10339#endif
10340 } else if (ret == 0) {
10341 if ((hevc->new_pic) && (hevc->cur_pic)) {
10342 hevc->cur_pic->stream_offset =
10343 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10344 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10345 "read stream_offset = 0x%x\n",
10346 hevc->cur_pic->stream_offset);
10347 hevc->cur_pic->aspect_ratio_idc =
10348 hevc->param.p.aspect_ratio_idc;
10349 hevc->cur_pic->sar_width =
10350 hevc->param.p.sar_width;
10351 hevc->cur_pic->sar_height =
10352 hevc->param.p.sar_height;
10353 }
10354
10355 WRITE_VREG(HEVC_DEC_STATUS_REG,
10356 HEVC_CODED_SLICE_SEGMENT_DAT);
10357 /* Interrupt Amrisc to excute */
10358 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10359
10360 hevc->start_decoding_time = jiffies;
10361#ifdef MULTI_INSTANCE_SUPPORT
10362 if (hevc->m_ins_flag)
10363 start_process_time(hevc);
10364#endif
10365#if 1
10366 /*to do..., copy aux data to hevc->cur_pic*/
10367#endif
10368#ifdef MULTI_INSTANCE_SUPPORT
10369 } else if (hevc->m_ins_flag) {
10370 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10371 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10372 __func__, ret);
10373 hevc->decoded_poc = INVALID_POC;
10374 hevc->decoding_pic = NULL;
10375 hevc->dec_result = DEC_RESULT_DONE;
10376 amhevc_stop();
10377 reset_process_time(hevc);
10378 vdec_schedule_work(&hevc->work);
10379#endif
10380 } else {
10381 /* skip, search next start code */
10382#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10383 gvs->drop_frame_count++;
10384#endif
10385 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10386 hevc->skip_flag = 1;
10387 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10388 /* Interrupt Amrisc to excute */
10389 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10390 }
10391
10392 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10393 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10394#ifdef MULTI_INSTANCE_SUPPORT
10395 if (!hevc->m_ins_flag)
10396 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10397 H265_DEBUG_DIS_SYS_ERROR_PROC);
10398#endif
10399 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10400 }
10401 return IRQ_HANDLED;
10402}
10403
10404static void wait_hevc_search_done(struct hevc_state_s *hevc)
10405{
10406 int count = 0;
10407 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10408 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10409 msleep(20);
10410 count++;
10411 if (count > 100) {
10412 hevc_print(hevc, 0, "%s timeout\n", __func__);
10413 break;
10414 }
10415 }
10416}
10417static irqreturn_t vh265_isr(int irq, void *data)
10418{
10419 int i, temp;
10420 unsigned int dec_status;
10421 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10422 u32 debug_tag;
10423 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10424
10425 if (hevc->init_flag == 0)
10426 return IRQ_HANDLED;
10427 hevc->dec_status = dec_status;
10428 if (is_log_enable(hevc))
10429 add_log(hevc,
10430 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10431 dec_status, READ_HREG(HEVC_DECODE_INFO),
10432 READ_VREG(HEVC_MPRED_CURR_LCU),
10433 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10434 READ_VREG(HEVC_SHIFT_STATUS));
10435
10436 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10437 hevc_print(hevc, 0,
10438 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10439 dec_status, READ_HREG(HEVC_DECODE_INFO),
10440 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10441 READ_VREG(HEVC_SHIFT_STATUS));
10442
10443 debug_tag = READ_HREG(DEBUG_REG1);
10444 if (debug_tag & 0x10000) {
10445 hevc_print(hevc, 0,
10446 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10447
10448 if (hevc->mmu_enable)
10449 temp = 0x500;
10450 else
10451 temp = 0x400;
10452 for (i = 0; i < temp; i += 4) {
10453 int ii;
10454 if ((i & 0xf) == 0)
10455 hevc_print_cont(hevc, 0, "%03x: ", i);
10456 for (ii = 0; ii < 4; ii++) {
10457 hevc_print_cont(hevc, 0, "%04x ",
10458 hevc->lmem_ptr[i + 3 - ii]);
10459 }
10460 if (((i + ii) & 0xf) == 0)
10461 hevc_print_cont(hevc, 0, "\n");
10462 }
10463
10464 if (((udebug_pause_pos & 0xffff)
10465 == (debug_tag & 0xffff)) &&
10466 (udebug_pause_decode_idx == 0 ||
10467 udebug_pause_decode_idx == hevc->decode_idx) &&
10468 (udebug_pause_val == 0 ||
10469 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10470 udebug_pause_pos &= 0xffff;
10471 hevc->ucode_pause_pos = udebug_pause_pos;
10472 }
10473 else if (debug_tag & 0x20000)
10474 hevc->ucode_pause_pos = 0xffffffff;
10475 if (hevc->ucode_pause_pos)
10476 reset_process_time(hevc);
10477 else
10478 WRITE_HREG(DEBUG_REG1, 0);
10479 } else if (debug_tag != 0) {
10480 hevc_print(hevc, 0,
10481 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10482 READ_HREG(DEBUG_REG2),
10483 READ_VREG(HEVC_STREAM_LEVEL),
10484 READ_VREG(HEVC_STREAM_WR_PTR),
10485 READ_VREG(HEVC_STREAM_RD_PTR));
10486 if (((udebug_pause_pos & 0xffff)
10487 == (debug_tag & 0xffff)) &&
10488 (udebug_pause_decode_idx == 0 ||
10489 udebug_pause_decode_idx == hevc->decode_idx) &&
10490 (udebug_pause_val == 0 ||
10491 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10492 udebug_pause_pos &= 0xffff;
10493 hevc->ucode_pause_pos = udebug_pause_pos;
10494 }
10495 if (hevc->ucode_pause_pos)
10496 reset_process_time(hevc);
10497 else
10498 WRITE_HREG(DEBUG_REG1, 0);
10499 return IRQ_HANDLED;
10500 }
10501
10502
10503 if (hevc->pic_list_init_flag == 1)
10504 return IRQ_HANDLED;
10505
10506 if (!hevc->m_ins_flag) {
10507 if (dec_status == HEVC_OVER_DECODE) {
10508 hevc->over_decode = 1;
10509 hevc_print(hevc, 0,
10510 "isr: over decode\n"),
10511 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10512 return IRQ_HANDLED;
10513 }
10514 }
10515
10516 return IRQ_WAKE_THREAD;
10517
10518}
10519
10520static void vh265_set_clk(struct work_struct *work)
10521{
10522 struct hevc_state_s *hevc = container_of(work,
10523 struct hevc_state_s, set_clk_work);
10524
10525 int fps = 96000 / hevc->frame_dur;
10526
10527 if (hevc_source_changed(VFORMAT_HEVC,
10528 hevc->frame_width, hevc->frame_height, fps) > 0)
10529 hevc->saved_resolution = hevc->frame_width *
10530 hevc->frame_height * fps;
10531}
10532
10533static void vh265_check_timer_func(unsigned long arg)
10534{
10535 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10536 struct timer_list *timer = &hevc->timer;
10537 unsigned char empty_flag;
10538 unsigned int buf_level;
10539
10540 enum receviver_start_e state = RECEIVER_INACTIVE;
10541
10542 if (hevc->init_flag == 0) {
10543 if (hevc->stat & STAT_TIMER_ARM) {
10544 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10545 }
10546 return;
10547 }
10548#ifdef MULTI_INSTANCE_SUPPORT
10549 if (hevc->m_ins_flag &&
10550 (get_dbg_flag(hevc) &
10551 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10552 hw_to_vdec(hevc)->next_status ==
10553 VDEC_STATUS_DISCONNECTED) {
10554 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10555 vdec_schedule_work(&hevc->work);
10556 hevc_print(hevc,
10557 0, "vdec requested to be disconnected\n");
10558 return;
10559 }
10560
10561 if (hevc->m_ins_flag) {
10562 if ((input_frame_based(hw_to_vdec(hevc)) ||
10563 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10564 ((get_dbg_flag(hevc) &
10565 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10566 (decode_timeout_val > 0) &&
10567 (hevc->start_process_time > 0) &&
10568 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10569 > decode_timeout_val)
10570 ) {
10571 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10572 int current_lcu_idx =
10573 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10574 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10575 if (hevc->last_lcu_idx == current_lcu_idx) {
10576 if (hevc->decode_timeout_count > 0)
10577 hevc->decode_timeout_count--;
10578 if (hevc->decode_timeout_count == 0)
10579 timeout_process(hevc);
10580 } else
10581 restart_process_time(hevc);
10582 hevc->last_lcu_idx = current_lcu_idx;
10583 } else {
10584 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10585 timeout_process(hevc);
10586 }
10587 }
10588 } else {
10589#endif
10590 if (hevc->m_ins_flag == 0 &&
10591 vf_get_receiver(hevc->provider_name)) {
10592 state =
10593 vf_notify_receiver(hevc->provider_name,
10594 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10595 NULL);
10596 if ((state == RECEIVER_STATE_NULL)
10597 || (state == RECEIVER_STATE_NONE))
10598 state = RECEIVER_INACTIVE;
10599 } else
10600 state = RECEIVER_INACTIVE;
10601
10602 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10603 /* error watchdog */
10604 if (hevc->m_ins_flag == 0 &&
10605 (empty_flag == 0)
10606 && (hevc->pic_list_init_flag == 0
10607 || hevc->pic_list_init_flag
10608 == 3)) {
10609 /* decoder has input */
10610 if ((get_dbg_flag(hevc) &
10611 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10612
10613 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10614 /* receiver has no buffer to recycle */
10615 if ((state == RECEIVER_INACTIVE) &&
10616 (kfifo_is_empty(&hevc->display_q) &&
10617 buf_level > 0x200)
10618 ) {
10619 if (hevc->error_flag == 0) {
10620 hevc->error_watchdog_count++;
10621 if (hevc->error_watchdog_count ==
10622 error_handle_threshold) {
10623 hevc_print(hevc, 0,
10624 "H265 dec err local reset.\n");
10625 hevc->error_flag = 1;
10626 hevc->error_watchdog_count = 0;
10627 hevc->error_skip_nal_wt_cnt = 0;
10628 hevc->
10629 error_system_watchdog_count++;
10630 WRITE_VREG
10631 (HEVC_ASSIST_MBOX0_IRQ_REG,
10632 0x1);
10633 }
10634 } else if (hevc->error_flag == 2) {
10635 int th =
10636 error_handle_nal_skip_threshold;
10637 hevc->error_skip_nal_wt_cnt++;
10638 if (hevc->error_skip_nal_wt_cnt
10639 == th) {
10640 hevc->error_flag = 3;
10641 hevc->error_watchdog_count = 0;
10642 hevc->
10643 error_skip_nal_wt_cnt = 0;
10644 WRITE_VREG
10645 (HEVC_ASSIST_MBOX0_IRQ_REG,
10646 0x1);
10647 }
10648 }
10649 }
10650 }
10651
10652 if ((get_dbg_flag(hevc)
10653 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10654 /* receiver has no buffer to recycle */
10655 if ((state == RECEIVER_INACTIVE) &&
10656 (kfifo_is_empty(&hevc->display_q))
10657 ) { /* no buffer to recycle */
10658 if ((get_dbg_flag(hevc) &
10659 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10660 0)
10661 hevc->error_system_watchdog_count++;
10662 if (hevc->error_system_watchdog_count ==
10663 error_handle_system_threshold) {
10664 /* and it lasts for a while */
10665 hevc_print(hevc, 0,
10666 "H265 dec fatal error watchdog.\n");
10667 hevc->
10668 error_system_watchdog_count = 0;
10669 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10670 }
10671 }
10672 } else {
10673 hevc->error_watchdog_count = 0;
10674 hevc->error_system_watchdog_count = 0;
10675 }
10676#ifdef MULTI_INSTANCE_SUPPORT
10677 }
10678#endif
10679 if ((hevc->ucode_pause_pos != 0) &&
10680 (hevc->ucode_pause_pos != 0xffffffff) &&
10681 udebug_pause_pos != hevc->ucode_pause_pos) {
10682 hevc->ucode_pause_pos = 0;
10683 WRITE_HREG(DEBUG_REG1, 0);
10684 }
10685
10686 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10687 dump_pic_list(hevc);
10688 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10689 }
10690 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10691 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10692 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10693 }
10694#ifdef TEST_NO_BUF
10695 if (hevc->wait_buf)
10696 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10697#endif
10698 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10699 hevc->error_skip_nal_count = error_skip_nal_count;
10700 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10701
10702 debug &= ~H265_DEBUG_HW_RESET;
10703 }
10704
10705#ifdef ERROR_HANDLE_DEBUG
10706 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10707 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10708 dbg_nal_skip_count &= ~0x10000;
10709 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10710 }
10711#endif
10712
10713 if (radr != 0) {
10714 if (rval != 0) {
10715 WRITE_VREG(radr, rval);
10716 hevc_print(hevc, 0,
10717 "WRITE_VREG(%x,%x)\n", radr, rval);
10718 } else
10719 hevc_print(hevc, 0,
10720 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10721 rval = 0;
10722 radr = 0;
10723 }
10724 if (dbg_cmd != 0) {
10725 if (dbg_cmd == 1) {
10726 u32 disp_laddr;
10727
10728 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10729 get_double_write_mode(hevc) == 0) {
10730 disp_laddr =
10731 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10732 } else {
10733 struct canvas_s cur_canvas;
10734
10735 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10736 & 0xff), &cur_canvas);
10737 disp_laddr = cur_canvas.addr;
10738 }
10739 hevc_print(hevc, 0,
10740 "current displayed buffer address %x\r\n",
10741 disp_laddr);
10742 }
10743 dbg_cmd = 0;
10744 }
10745 /*don't changed at start.*/
10746 if (hevc->m_ins_flag == 0 &&
10747 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10748 hevc->frame_dur > 0 && hevc->saved_resolution !=
10749 hevc->frame_width * hevc->frame_height *
10750 (96000 / hevc->frame_dur))
10751 vdec_schedule_work(&hevc->set_clk_work);
10752
10753 mod_timer(timer, jiffies + PUT_INTERVAL);
10754}
10755
10756static int h265_task_handle(void *data)
10757{
10758 int ret = 0;
10759 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10760
10761 set_user_nice(current, -10);
10762 while (1) {
10763 if (use_cma == 0) {
10764 hevc_print(hevc, 0,
10765 "ERROR: use_cma can not be changed dynamically\n");
10766 }
10767 ret = down_interruptible(&h265_sema);
10768 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10769 init_pic_list(hevc);
10770 init_pic_list_hw(hevc);
10771 init_buf_spec(hevc);
10772 hevc->pic_list_init_flag = 2;
10773 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10774
10775 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10776
10777 }
10778
10779 if (hevc->uninit_list) {
10780 /*USE_BUF_BLOCK*/
10781 uninit_pic_list(hevc);
10782 hevc_print(hevc, 0, "uninit list\n");
10783 hevc->uninit_list = 0;
10784#ifdef USE_UNINIT_SEMA
10785 if (use_cma) {
10786 up(&hevc->h265_uninit_done_sema);
10787 while (!kthread_should_stop())
10788 msleep(1);
10789 break;
10790 }
10791#endif
10792 }
10793 }
10794
10795 return 0;
10796}
10797
10798void vh265_free_cmabuf(void)
10799{
10800 struct hevc_state_s *hevc = gHevc;
10801
10802 mutex_lock(&vh265_mutex);
10803
10804 if (hevc->init_flag) {
10805 mutex_unlock(&vh265_mutex);
10806 return;
10807 }
10808
10809 mutex_unlock(&vh265_mutex);
10810}
10811
10812#ifdef MULTI_INSTANCE_SUPPORT
10813int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10814#else
10815int vh265_dec_status(struct vdec_info *vstatus)
10816#endif
10817{
10818#ifdef MULTI_INSTANCE_SUPPORT
10819 struct hevc_state_s *hevc =
10820 (struct hevc_state_s *)vdec->private;
10821#else
10822 struct hevc_state_s *hevc = gHevc;
10823#endif
10824 if (!hevc)
10825 return -1;
10826
10827 vstatus->frame_width = hevc->frame_width;
10828 vstatus->frame_height = hevc->frame_height;
10829 if (hevc->frame_dur != 0)
10830 vstatus->frame_rate = 96000 / hevc->frame_dur;
10831 else
10832 vstatus->frame_rate = -1;
10833 vstatus->error_count = 0;
10834 vstatus->status = hevc->stat | hevc->fatal_error;
10835#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10836 vstatus->bit_rate = gvs->bit_rate;
10837 vstatus->frame_dur = hevc->frame_dur;
10838 if (gvs) {
10839 vstatus->bit_rate = gvs->bit_rate;
10840 vstatus->frame_data = gvs->frame_data;
10841 vstatus->total_data = gvs->total_data;
10842 vstatus->frame_count = gvs->frame_count;
10843 vstatus->error_frame_count = gvs->error_frame_count;
10844 vstatus->drop_frame_count = gvs->drop_frame_count;
10845 vstatus->total_data = gvs->total_data;
10846 vstatus->samp_cnt = gvs->samp_cnt;
10847 vstatus->offset = gvs->offset;
10848 }
10849 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10850 "%s", DRIVER_NAME);
10851#endif
10852 vstatus->ratio_control = hevc->ratio_control;
10853 return 0;
10854}
10855
10856int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10857{
10858 is_reset = isreset;
10859 return 0;
10860}
10861
10862static int vh265_vdec_info_init(void)
10863{
10864 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10865 if (NULL == gvs) {
10866 pr_info("the struct of vdec status malloc failed.\n");
10867 return -ENOMEM;
10868 }
10869 return 0;
10870}
10871
10872#if 0
10873static void H265_DECODE_INIT(void)
10874{
10875 /* enable hevc clocks */
10876 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10877 /* *************************************************************** */
10878 /* Power ON HEVC */
10879 /* *************************************************************** */
10880 /* Powerup HEVC */
10881 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10882 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10883 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10884 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10885 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10886 /* remove isolations */
10887 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10888 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10889
10890}
10891#endif
10892
10893static void config_decode_mode(struct hevc_state_s *hevc)
10894{
10895#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10896 struct vdec_s *vdec = hw_to_vdec(hevc);
10897#endif
10898 unsigned decode_mode;
10899 if (!hevc->m_ins_flag)
10900 decode_mode = DECODE_MODE_SINGLE;
10901 else if (vdec_frame_based(hw_to_vdec(hevc)))
10902 decode_mode =
10903 DECODE_MODE_MULTI_FRAMEBASE;
10904#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10905 else if (vdec->slave) {
10906 if (force_bypass_dvenl & 0x80000000)
10907 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10908 else
10909 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10910 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10911 hevc->bypass_dvenl = 0;
10912 hevc_print(hevc, 0,
10913 "NOT support bypass_dvenl when meta_with_el\n");
10914 }
10915 if (hevc->bypass_dvenl)
10916 decode_mode =
10917 (hevc->start_parser_type << 8)
10918 | DECODE_MODE_MULTI_STREAMBASE;
10919 else
10920 decode_mode =
10921 (hevc->start_parser_type << 8)
10922 | DECODE_MODE_MULTI_DVBAL;
10923 } else if (vdec->master)
10924 decode_mode =
10925 (hevc->start_parser_type << 8)
10926 | DECODE_MODE_MULTI_DVENL;
10927#endif
10928 else
10929 decode_mode =
10930 DECODE_MODE_MULTI_STREAMBASE;
10931
10932 if (hevc->m_ins_flag)
10933 decode_mode |=
10934 (hevc->start_decoding_flag << 16);
10935 /* set MBX0 interrupt flag */
10936 decode_mode |= (0x80 << 24);
10937 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10938 WRITE_VREG(HEVC_DECODE_MODE2,
10939 hevc->rps_set_id);
10940}
10941
10942static void vh265_prot_init(struct hevc_state_s *hevc)
10943{
10944#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10945 struct vdec_s *vdec = hw_to_vdec(hevc);
10946#endif
10947 /* H265_DECODE_INIT(); */
10948
10949 hevc_config_work_space_hw(hevc);
10950
10951 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10952
10953 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10954
10955 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10956
10957 /* clear mailbox interrupt */
10958 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10959
10960 /* enable mailbox interrupt */
10961 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10962
10963 /* disable PSCALE for hardware sharing */
10964 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10965
10966 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10967
10968 if ((get_dbg_flag(hevc) &
10969 (H265_DEBUG_MAN_SKIP_NAL |
10970 H265_DEBUG_MAN_SEARCH_NAL))
10971 /*||hevc->m_ins_flag*/
10972 ) {
10973 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10974 } else {
10975 /* check vps/sps/pps/i-slice in ucode */
10976 unsigned ctl_val = 0x8;
10977 if (hevc->PB_skip_mode == 0)
10978 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10979 else if (hevc->PB_skip_mode == 3)
10980 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10981 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10982 }
10983 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10984#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10985 || vdec->master
10986 || vdec->slave
10987#endif
10988 )
10989 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10990
10991 WRITE_VREG(NAL_SEARCH_CTL,
10992 READ_VREG(NAL_SEARCH_CTL)
10993 | ((parser_sei_enable & 0x7) << 17));
10994#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10995 WRITE_VREG(NAL_SEARCH_CTL,
10996 READ_VREG(NAL_SEARCH_CTL) |
10997 ((parser_dolby_vision_enable & 0x1) << 20));
10998#endif
10999 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
11000
11001 config_decode_mode(hevc);
11002 config_aux_buf(hevc);
11003#ifdef SWAP_HEVC_UCODE
11004 if (!tee_enabled() && hevc->is_swap &&
11005 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11006 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11007 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11008 }
11009#endif
11010#ifdef DETREFILL_ENABLE
11011 if (hevc->is_swap &&
11012 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11013 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11014 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11015 }
11016#endif
11017}
11018
11019static int vh265_local_init(struct hevc_state_s *hevc)
11020{
11021 int i;
11022 int ret = -1;
11023
11024#ifdef DEBUG_PTS
11025 hevc->pts_missed = 0;
11026 hevc->pts_hit = 0;
11027#endif
11028
11029 hevc->saved_resolution = 0;
11030 hevc->get_frame_dur = false;
11031 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11032 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11033 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11034 pr_info("over size : %u x %u.\n",
11035 hevc->frame_width, hevc->frame_height);
11036 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11037 return ret;
11038 }
11039
11040 if (hevc->max_pic_w && hevc->max_pic_h) {
11041 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11042 ((hevc->max_pic_w * hevc->max_pic_h) >
11043 1920 * 1088) ? true : false;
11044 } else {
11045 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11046 ((hevc->frame_width * hevc->frame_height) >
11047 1920 * 1088) ? true : false;
11048 }
11049
11050 hevc->frame_dur =
11051 (hevc->vh265_amstream_dec_info.rate ==
11052 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11053#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
11054 gvs->frame_dur = hevc->frame_dur;
11055#endif
11056 if (hevc->frame_width && hevc->frame_height)
11057 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11058
11059 if (i_only_flag)
11060 hevc->i_only = i_only_flag & 0xff;
11061 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11062 & 0x08)
11063 hevc->i_only = 0x7;
11064 else
11065 hevc->i_only = 0x0;
11066 hevc->error_watchdog_count = 0;
11067 hevc->sei_present_flag = 0;
11068 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11069 & 0x40) >> 6;
11070 hevc_print(hevc, 0,
11071 "h265:pts_unstable=%d\n", pts_unstable);
11072/*
11073 *TODO:FOR VERSION
11074 */
11075 hevc_print(hevc, 0,
11076 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11077 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11078
11079 if (hevc->frame_dur == 0)
11080 hevc->frame_dur = 96000 / 24;
11081
11082 INIT_KFIFO(hevc->display_q);
11083 INIT_KFIFO(hevc->newframe_q);
11084 INIT_KFIFO(hevc->pending_q);
11085
11086 for (i = 0; i < VF_POOL_SIZE; i++) {
11087 const struct vframe_s *vf = &hevc->vfpool[i];
11088
11089 hevc->vfpool[i].index = -1;
11090 kfifo_put(&hevc->newframe_q, vf);
11091 }
11092
11093
11094 ret = hevc_local_init(hevc);
11095
11096 return ret;
11097}
11098#ifdef MULTI_INSTANCE_SUPPORT
11099static s32 vh265_init(struct vdec_s *vdec)
11100{
11101 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11102#else
11103static s32 vh265_init(struct hevc_state_s *hevc)
11104{
11105
11106#endif
11107 int ret, size = -1;
11108 int fw_size = 0x1000 * 16;
11109 struct firmware_s *fw = NULL;
11110
11111 init_timer(&hevc->timer);
11112
11113 hevc->stat |= STAT_TIMER_INIT;
11114
11115 if (hevc->m_ins_flag) {
11116#ifdef USE_UNINIT_SEMA
11117 sema_init(&hevc->h265_uninit_done_sema, 0);
11118#endif
11119 INIT_WORK(&hevc->work, vh265_work);
11120 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11121 }
11122
11123 if (vh265_local_init(hevc) < 0)
11124 return -EBUSY;
11125
11126 mutex_init(&hevc->chunks_mutex);
11127 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11128 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11129
11130 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11131 if (IS_ERR_OR_NULL(fw))
11132 return -ENOMEM;
11133
11134 if (hevc->mmu_enable)
11135 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11136 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11137 else {
11138 if (!hevc->is_4k) {
11139 /* if an older version of the fw was loaded, */
11140 /* needs try to load noswap fw because the */
11141 /* old fw package dose not contain the swap fw.*/
11142 size = get_firmware_data(
11143 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11144 if (size < 0)
11145 size = get_firmware_data(
11146 VIDEO_DEC_HEVC_MMU, fw->data);
11147 else if (size)
11148 hevc->is_swap = true;
11149 } else
11150 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11151 fw->data);
11152 }
11153 else
11154 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11155
11156 if (size < 0) {
11157 pr_err("get firmware fail.\n");
11158 vfree(fw);
11159 return -1;
11160 }
11161
11162 fw->len = size;
11163
11164#ifdef SWAP_HEVC_UCODE
11165 if (!tee_enabled() && hevc->is_swap &&
11166 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11167 if (hevc->mmu_enable) {
11168 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11169 hevc->mc_cpu_addr =
11170 dma_alloc_coherent(amports_get_dma_device(),
11171 hevc->swap_size,
11172 &hevc->mc_dma_handle, GFP_KERNEL);
11173 if (!hevc->mc_cpu_addr) {
11174 amhevc_disable();
11175 pr_info("vh265 mmu swap ucode loaded fail.\n");
11176 return -ENOMEM;
11177 }
11178
11179 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11180 hevc->swap_size);
11181
11182 hevc_print(hevc, 0,
11183 "vh265 mmu ucode swap loaded %x\n",
11184 hevc->mc_dma_handle);
11185 }
11186 }
11187#endif
11188
11189#ifdef MULTI_INSTANCE_SUPPORT
11190 if (hevc->m_ins_flag) {
11191 hevc->timer.data = (ulong) hevc;
11192 hevc->timer.function = vh265_check_timer_func;
11193 hevc->timer.expires = jiffies + PUT_INTERVAL;
11194
11195 hevc->fw = fw;
11196
11197 return 0;
11198 }
11199#endif
11200 amhevc_enable();
11201
11202 if (hevc->mmu_enable)
11203 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11204 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11205 else {
11206 if (!hevc->is_4k) {
11207 /* if an older version of the fw was loaded, */
11208 /* needs try to load noswap fw because the */
11209 /* old fw package dose not contain the swap fw. */
11210 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11211 "hevc_mmu_swap", fw->data);
11212 if (ret < 0)
11213 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11214 "h265_mmu", fw->data);
11215 else
11216 hevc->is_swap = true;
11217 } else
11218 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11219 "h265_mmu", fw->data);
11220 }
11221 else
11222 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11223
11224 if (ret < 0) {
11225 amhevc_disable();
11226 vfree(fw);
11227 pr_err("H265: the %s fw loading failed, err: %x\n",
11228 tee_enabled() ? "TEE" : "local", ret);
11229 return -EBUSY;
11230 }
11231
11232 vfree(fw);
11233
11234 hevc->stat |= STAT_MC_LOAD;
11235
11236#ifdef DETREFILL_ENABLE
11237 if (hevc->is_swap &&
11238 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11239 init_detrefill_buf(hevc);
11240#endif
11241 /* enable AMRISC side protocol */
11242 vh265_prot_init(hevc);
11243
11244 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11245 vh265_isr_thread_fn,
11246 IRQF_ONESHOT,/*run thread on this irq disabled*/
11247 "vh265-irq", (void *)hevc)) {
11248 hevc_print(hevc, 0, "vh265 irq register error.\n");
11249 amhevc_disable();
11250 return -ENOENT;
11251 }
11252
11253 hevc->stat |= STAT_ISR_REG;
11254 hevc->provider_name = PROVIDER_NAME;
11255
11256#ifdef MULTI_INSTANCE_SUPPORT
11257 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11258 &vh265_vf_provider, vdec);
11259 vf_reg_provider(&vh265_vf_prov);
11260 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11261 NULL);
11262 if (hevc->frame_dur != 0) {
11263 if (!is_reset) {
11264 vf_notify_receiver(hevc->provider_name,
11265 VFRAME_EVENT_PROVIDER_FR_HINT,
11266 (void *)
11267 ((unsigned long)hevc->frame_dur));
11268 fr_hint_status = VDEC_HINTED;
11269 }
11270 } else
11271 fr_hint_status = VDEC_NEED_HINT;
11272#else
11273 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11274 hevc);
11275 vf_reg_provider(&vh265_vf_prov);
11276 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11277 if (hevc->frame_dur != 0) {
11278 vf_notify_receiver(PROVIDER_NAME,
11279 VFRAME_EVENT_PROVIDER_FR_HINT,
11280 (void *)
11281 ((unsigned long)hevc->frame_dur));
11282 fr_hint_status = VDEC_HINTED;
11283 } else
11284 fr_hint_status = VDEC_NEED_HINT;
11285#endif
11286 hevc->stat |= STAT_VF_HOOK;
11287
11288 hevc->timer.data = (ulong) hevc;
11289 hevc->timer.function = vh265_check_timer_func;
11290 hevc->timer.expires = jiffies + PUT_INTERVAL;
11291
11292 add_timer(&hevc->timer);
11293
11294 hevc->stat |= STAT_TIMER_ARM;
11295
11296 if (use_cma) {
11297#ifdef USE_UNINIT_SEMA
11298 sema_init(&hevc->h265_uninit_done_sema, 0);
11299#endif
11300 if (h265_task == NULL) {
11301 sema_init(&h265_sema, 1);
11302 h265_task =
11303 kthread_run(h265_task_handle, hevc,
11304 "kthread_h265");
11305 }
11306 }
11307 /* hevc->stat |= STAT_KTHREAD; */
11308#if 0
11309 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11310 hevc_print(hevc, 0, "%s force clk\n", __func__);
11311 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11312 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11313 ((1 << 2) | (1 << 1)));
11314 WRITE_VREG(HEVC_DBLK_CFG0,
11315 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11316 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11317 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11318 (1 << 2)); /* 2 */
11319 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11320 (1 << 24)); /* 24 */
11321 WRITE_VREG(HEVC_STREAM_CONTROL,
11322 READ_VREG(HEVC_STREAM_CONTROL) |
11323 (1 << 15)); /* 15 */
11324 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11325 (1 << 13)); /* 13 */
11326 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11327 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11328 (1 << 15)); /* 15 */
11329 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11330 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11331 (1 << 15)); /* 15 */
11332 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11333 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11334 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11335 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11336 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11337 (1 << 3)); /* 3 */
11338 }
11339#endif
11340#ifdef SWAP_HEVC_UCODE
11341 if (!tee_enabled() && hevc->is_swap &&
11342 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11343 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11344 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11345 }
11346#endif
11347
11348#ifndef MULTI_INSTANCE_SUPPORT
11349 set_vdec_func(&vh265_dec_status);
11350#endif
11351 amhevc_start();
11352 hevc->stat |= STAT_VDEC_RUN;
11353 hevc->init_flag = 1;
11354 error_handle_threshold = 30;
11355 /* pr_info("%d, vh265_init, RP=0x%x\n",
11356 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11357 */
11358
11359 return 0;
11360}
11361
11362static int vh265_stop(struct hevc_state_s *hevc)
11363{
11364 if (get_dbg_flag(hevc) &
11365 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11366 int wait_timeout_count = 0;
11367
11368 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11369 HEVC_CODED_SLICE_SEGMENT_DAT &&
11370 wait_timeout_count < 10){
11371 wait_timeout_count++;
11372 msleep(20);
11373 }
11374 }
11375 if (hevc->stat & STAT_VDEC_RUN) {
11376 amhevc_stop();
11377 hevc->stat &= ~STAT_VDEC_RUN;
11378 }
11379
11380 if (hevc->stat & STAT_ISR_REG) {
11381#ifdef MULTI_INSTANCE_SUPPORT
11382 if (!hevc->m_ins_flag)
11383#endif
11384 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11385 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11386 hevc->stat &= ~STAT_ISR_REG;
11387 }
11388
11389 hevc->stat &= ~STAT_TIMER_INIT;
11390 if (hevc->stat & STAT_TIMER_ARM) {
11391 del_timer_sync(&hevc->timer);
11392 hevc->stat &= ~STAT_TIMER_ARM;
11393 }
11394
11395 if (hevc->stat & STAT_VF_HOOK) {
11396 if (fr_hint_status == VDEC_HINTED) {
11397 vf_notify_receiver(hevc->provider_name,
11398 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11399 NULL);
11400 }
11401 fr_hint_status = VDEC_NO_NEED_HINT;
11402 vf_unreg_provider(&vh265_vf_prov);
11403 hevc->stat &= ~STAT_VF_HOOK;
11404 }
11405
11406 hevc_local_uninit(hevc);
11407
11408 if (use_cma) {
11409 hevc->uninit_list = 1;
11410 up(&h265_sema);
11411#ifdef USE_UNINIT_SEMA
11412 down(&hevc->h265_uninit_done_sema);
11413 if (!IS_ERR(h265_task)) {
11414 kthread_stop(h265_task);
11415 h265_task = NULL;
11416 }
11417#else
11418 while (hevc->uninit_list) /* wait uninit complete */
11419 msleep(20);
11420#endif
11421
11422 }
11423 hevc->init_flag = 0;
11424 hevc->first_sc_checked = 0;
11425 cancel_work_sync(&hevc->notify_work);
11426 cancel_work_sync(&hevc->set_clk_work);
11427 uninit_mmu_buffers(hevc);
11428 amhevc_disable();
11429
11430 kfree(gvs);
11431 gvs = NULL;
11432
11433 return 0;
11434}
11435
11436#ifdef MULTI_INSTANCE_SUPPORT
11437static void reset_process_time(struct hevc_state_s *hevc)
11438{
11439 if (hevc->start_process_time) {
11440 unsigned int process_time =
11441 1000 * (jiffies - hevc->start_process_time) / HZ;
11442 hevc->start_process_time = 0;
11443 if (process_time > max_process_time[hevc->index])
11444 max_process_time[hevc->index] = process_time;
11445 }
11446}
11447
11448static void start_process_time(struct hevc_state_s *hevc)
11449{
11450 hevc->start_process_time = jiffies;
11451 hevc->decode_timeout_count = 2;
11452 hevc->last_lcu_idx = 0;
11453}
11454
11455static void restart_process_time(struct hevc_state_s *hevc)
11456{
11457 hevc->start_process_time = jiffies;
11458 hevc->decode_timeout_count = 2;
11459}
11460
11461static void timeout_process(struct hevc_state_s *hevc)
11462{
11463 /*
11464 * In this very timeout point,the vh265_work arrives,
11465 * let it to handle the scenario.
11466 */
11467 if (work_pending(&hevc->work))
11468 return;
11469
11470 hevc->timeout_num++;
11471 amhevc_stop();
11472 read_decode_info(hevc);
11473
11474 hevc_print(hevc,
11475 0, "%s decoder timeout\n", __func__);
11476 check_pic_decoded_error(hevc,
11477 hevc->pic_decoded_lcu_idx);
11478 hevc->decoded_poc = hevc->curr_POC;
11479 hevc->decoding_pic = NULL;
11480 hevc->dec_result = DEC_RESULT_DONE;
11481 reset_process_time(hevc);
11482
11483 if (work_pending(&hevc->work))
11484 return;
11485 vdec_schedule_work(&hevc->timeout_work);
11486}
11487
11488#ifdef CONSTRAIN_MAX_BUF_NUM
11489static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11490{
11491 struct PIC_s *pic;
11492 int i;
11493 int count = 0;
11494 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11495 pic = hevc->m_PIC[i];
11496 if (pic == NULL || pic->index == -1)
11497 continue;
11498 if (pic->output_mark == 0 && pic->referenced == 0
11499 && pic->output_ready == 1)
11500 count++;
11501 }
11502
11503 return count;
11504}
11505
11506static int get_used_buf_count(struct hevc_state_s *hevc)
11507{
11508 struct PIC_s *pic;
11509 int i;
11510 int count = 0;
11511 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11512 pic = hevc->m_PIC[i];
11513 if (pic == NULL || pic->index == -1)
11514 continue;
11515 if (pic->output_mark != 0 || pic->referenced != 0
11516 || pic->output_ready != 0)
11517 count++;
11518 }
11519
11520 return count;
11521}
11522#endif
11523
11524
11525static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11526{
11527 struct PIC_s *new_pic = NULL;
11528 struct PIC_s *pic;
11529 /* recycle un-used pic */
11530 int i;
11531 int ref_pic = 0;
11532 struct vdec_s *vdec = hw_to_vdec(hevc);
11533 /*return 1 if pic_list is not initialized yet*/
11534 if (hevc->pic_list_init_flag != 3)
11535 return 1;
11536
11537 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11538 pic = hevc->m_PIC[i];
11539 if (pic == NULL || pic->index == -1)
11540 continue;
11541 if (pic->referenced == 1)
11542 ref_pic++;
11543 if (pic->output_mark == 0 && pic->referenced == 0
11544 && pic->output_ready == 0
11545 ) {
11546 if (new_pic) {
11547 if (pic->POC < new_pic->POC)
11548 new_pic = pic;
11549 } else
11550 new_pic = pic;
11551 }
11552 }
11553/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11554/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11555/*decode. all reference frames are removed and setting error flag.*/
11556/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11557/*when file is interlace.*/
11558 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11559 (ref_pic >=
11560 get_work_pic_num(hevc) -
11561 hevc->sps_num_reorder_pics_0 - 3)) {
11562 enum receviver_start_e state = RECEIVER_INACTIVE;
11563 if (vf_get_receiver(vdec->vf_provider_name)) {
11564 state =
11565 vf_notify_receiver(vdec->vf_provider_name,
11566 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11567 NULL);
11568 if ((state == RECEIVER_STATE_NULL)
11569 || (state == RECEIVER_STATE_NONE))
11570 state = RECEIVER_INACTIVE;
11571 }
11572 if (state == RECEIVER_INACTIVE) {
11573 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11574 pic = hevc->m_PIC[i];
11575 if (pic == NULL || pic->index == -1)
11576 continue;
11577
11578 if ((pic->referenced == 1) &&
11579 (pic->error_mark == 1)) {
11580 pic->referenced = 0;
11581 put_mv_buf(hevc, pic);
11582 }
11583 pic->error_mark = 1;
11584 }
11585 }
11586 }
11587
11588 return (new_pic != NULL) ? 1 : 0;
11589}
11590
11591static int vmh265_stop(struct hevc_state_s *hevc)
11592{
11593 if (hevc->stat & STAT_TIMER_ARM) {
11594 del_timer_sync(&hevc->timer);
11595 hevc->stat &= ~STAT_TIMER_ARM;
11596 }
11597 if (hevc->stat & STAT_VDEC_RUN) {
11598 amhevc_stop();
11599 hevc->stat &= ~STAT_VDEC_RUN;
11600 }
11601 if (hevc->stat & STAT_ISR_REG) {
11602 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11603 hevc->stat &= ~STAT_ISR_REG;
11604 }
11605
11606 if (hevc->stat & STAT_VF_HOOK) {
11607 if (fr_hint_status == VDEC_HINTED)
11608 vf_notify_receiver(hevc->provider_name,
11609 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11610 NULL);
11611 fr_hint_status = VDEC_NO_NEED_HINT;
11612 vf_unreg_provider(&vh265_vf_prov);
11613 hevc->stat &= ~STAT_VF_HOOK;
11614 }
11615
11616 hevc_local_uninit(hevc);
11617
11618 hevc->init_flag = 0;
11619 hevc->first_sc_checked = 0;
11620 cancel_work_sync(&hevc->notify_work);
11621 cancel_work_sync(&hevc->set_clk_work);
11622 cancel_work_sync(&hevc->timeout_work);
11623
11624 uninit_mmu_buffers(hevc);
11625
11626 if (use_cma) {
11627 hevc->uninit_list = 1;
11628 reset_process_time(hevc);
11629 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11630 vdec_schedule_work(&hevc->work);
11631 flush_work(&hevc->work);
11632#ifdef USE_UNINIT_SEMA
11633 if (hevc->init_flag) {
11634 down(&hevc->h265_uninit_done_sema);
11635 }
11636#else
11637 while (hevc->uninit_list) /* wait uninit complete */
11638 msleep(20);
11639#endif
11640 }
11641 cancel_work_sync(&hevc->work);
11642
11643 vfree(hevc->fw);
11644 hevc->fw = NULL;
11645
11646 dump_log(hevc);
11647 return 0;
11648}
11649
11650static unsigned char get_data_check_sum
11651 (struct hevc_state_s *hevc, int size)
11652{
11653 int jj;
11654 int sum = 0;
11655 u8 *data = NULL;
11656
11657 if (!hevc->chunk->block->is_mapped)
11658 data = codec_mm_vmap(hevc->chunk->block->start +
11659 hevc->chunk->offset, size);
11660 else
11661 data = ((u8 *)hevc->chunk->block->start_virt) +
11662 hevc->chunk->offset;
11663
11664 for (jj = 0; jj < size; jj++)
11665 sum += data[jj];
11666
11667 if (!hevc->chunk->block->is_mapped)
11668 codec_mm_unmap_phyaddr(data);
11669 return sum;
11670}
11671
11672static void vh265_notify_work(struct work_struct *work)
11673{
11674 struct hevc_state_s *hevc =
11675 container_of(work,
11676 struct hevc_state_s,
11677 notify_work);
11678 struct vdec_s *vdec = hw_to_vdec(hevc);
11679#ifdef MULTI_INSTANCE_SUPPORT
11680 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11681 vf_notify_receiver(hevc->provider_name,
11682 VFRAME_EVENT_PROVIDER_FR_HINT,
11683 (void *)
11684 ((unsigned long)hevc->frame_dur));
11685 vdec->fr_hint_state = VDEC_HINTED;
11686 } else if (fr_hint_status == VDEC_NEED_HINT) {
11687 vf_notify_receiver(hevc->provider_name,
11688 VFRAME_EVENT_PROVIDER_FR_HINT,
11689 (void *)
11690 ((unsigned long)hevc->frame_dur));
11691 fr_hint_status = VDEC_HINTED;
11692 }
11693#else
11694 if (fr_hint_status == VDEC_NEED_HINT)
11695 vf_notify_receiver(PROVIDER_NAME,
11696 VFRAME_EVENT_PROVIDER_FR_HINT,
11697 (void *)
11698 ((unsigned long)hevc->frame_dur));
11699 fr_hint_status = VDEC_HINTED;
11700 }
11701#endif
11702
11703 return;
11704}
11705
11706static void vh265_work_implement(struct hevc_state_s *hevc,
11707 struct vdec_s *vdec,int from)
11708{
11709 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11710 /*USE_BUF_BLOCK*/
11711 uninit_pic_list(hevc);
11712 hevc_print(hevc, 0, "uninit list\n");
11713 hevc->uninit_list = 0;
11714#ifdef USE_UNINIT_SEMA
11715 up(&hevc->h265_uninit_done_sema);
11716#endif
11717 return;
11718 }
11719
11720 /* finished decoding one frame or error,
11721 * notify vdec core to switch context
11722 */
11723 if (hevc->pic_list_init_flag == 1
11724 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11725 hevc->pic_list_init_flag = 2;
11726 init_pic_list(hevc);
11727 init_pic_list_hw(hevc);
11728 init_buf_spec(hevc);
11729 hevc_print(hevc, 0,
11730 "set pic_list_init_flag to 2\n");
11731
11732 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11733 return;
11734 }
11735
11736 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11737 "%s dec_result %d %x %x %x\n",
11738 __func__,
11739 hevc->dec_result,
11740 READ_VREG(HEVC_STREAM_LEVEL),
11741 READ_VREG(HEVC_STREAM_WR_PTR),
11742 READ_VREG(HEVC_STREAM_RD_PTR));
11743
11744 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11745 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11746 && (hw_to_vdec(hevc)->next_status !=
11747 VDEC_STATUS_DISCONNECTED)) {
11748 if (!vdec_has_more_input(vdec)) {
11749 hevc->dec_result = DEC_RESULT_EOS;
11750 vdec_schedule_work(&hevc->work);
11751 return;
11752 }
11753 if (!input_frame_based(vdec)) {
11754 int r = vdec_sync_input(vdec);
11755 if (r >= 0x200) {
11756 WRITE_VREG(HEVC_DECODE_SIZE,
11757 READ_VREG(HEVC_DECODE_SIZE) + r);
11758
11759 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11760 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11761 __func__,
11762 READ_VREG(HEVC_STREAM_LEVEL),
11763 READ_VREG(HEVC_STREAM_WR_PTR),
11764 READ_VREG(HEVC_STREAM_RD_PTR),
11765 READ_VREG(HEVC_MPC_E), r);
11766
11767 start_process_time(hevc);
11768 if (READ_VREG(HEVC_DEC_STATUS_REG)
11769 == HEVC_DECODE_BUFEMPTY2)
11770 WRITE_VREG(HEVC_DEC_STATUS_REG,
11771 HEVC_ACTION_DONE);
11772 else
11773 WRITE_VREG(HEVC_DEC_STATUS_REG,
11774 HEVC_ACTION_DEC_CONT);
11775 } else {
11776 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11777 vdec_schedule_work(&hevc->work);
11778 }
11779 return;
11780 }
11781
11782 /*below for frame_base*/
11783 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11784 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11785 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11786 __func__,
11787 READ_VREG(HEVC_STREAM_LEVEL),
11788 READ_VREG(HEVC_STREAM_WR_PTR),
11789 READ_VREG(HEVC_STREAM_RD_PTR),
11790 READ_VREG(HEVC_MPC_E));
11791 mutex_lock(&hevc->chunks_mutex);
11792 vdec_vframe_dirty(vdec, hevc->chunk);
11793 hevc->chunk = NULL;
11794 mutex_unlock(&hevc->chunks_mutex);
11795 vdec_clean_input(vdec);
11796 }
11797
11798 /*if (is_new_pic_available(hevc)) {*/
11799 if (run_ready(vdec, VDEC_HEVC)) {
11800 int r;
11801 int decode_size;
11802 r = vdec_prepare_input(vdec, &hevc->chunk);
11803 if (r < 0) {
11804 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11805
11806 hevc_print(hevc,
11807 PRINT_FLAG_VDEC_DETAIL,
11808 "amvdec_vh265: Insufficient data\n");
11809
11810 vdec_schedule_work(&hevc->work);
11811 return;
11812 }
11813 hevc->dec_result = DEC_RESULT_NONE;
11814 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11815 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11816 __func__, r,
11817 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11818 get_data_check_sum(hevc, r) : 0,
11819 READ_VREG(HEVC_MPC_E));
11820
11821 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11822 int jj;
11823 u8 *data = NULL;
11824
11825 if (!hevc->chunk->block->is_mapped)
11826 data = codec_mm_vmap(
11827 hevc->chunk->block->start +
11828 hevc->chunk->offset, r);
11829 else
11830 data = ((u8 *)
11831 hevc->chunk->block->start_virt)
11832 + hevc->chunk->offset;
11833
11834 for (jj = 0; jj < r; jj++) {
11835 if ((jj & 0xf) == 0)
11836 hevc_print(hevc,
11837 PRINT_FRAMEBASE_DATA,
11838 "%06x:", jj);
11839 hevc_print_cont(hevc,
11840 PRINT_FRAMEBASE_DATA,
11841 "%02x ", data[jj]);
11842 if (((jj + 1) & 0xf) == 0)
11843 hevc_print_cont(hevc,
11844 PRINT_FRAMEBASE_DATA,
11845 "\n");
11846 }
11847
11848 if (!hevc->chunk->block->is_mapped)
11849 codec_mm_unmap_phyaddr(data);
11850 }
11851
11852 decode_size = hevc->chunk->size +
11853 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11854 WRITE_VREG(HEVC_DECODE_SIZE,
11855 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11856
11857 vdec_enable_input(vdec);
11858
11859 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11860 "%s: mpc %x\n",
11861 __func__, READ_VREG(HEVC_MPC_E));
11862
11863 start_process_time(hevc);
11864 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11865 } else{
11866 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11867
11868 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11869 * "amvdec_vh265: Insufficient data\n");
11870 */
11871
11872 vdec_schedule_work(&hevc->work);
11873 }
11874 return;
11875 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11876 /* if (!hevc->ctx_valid)
11877 hevc->ctx_valid = 1; */
11878 decode_frame_count[hevc->index]++;
11879#ifdef DETREFILL_ENABLE
11880 if (hevc->is_swap &&
11881 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11882 if (hevc->delrefill_check == 2) {
11883 delrefill(hevc);
11884 amhevc_stop();
11885 }
11886 }
11887#endif
11888 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11889 hevc->used_4k_num =
11890 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11891 if (hevc->used_4k_num >= 0 &&
11892 hevc->cur_pic &&
11893 hevc->cur_pic->scatter_alloc
11894 == 1) {
11895 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11896 "%s pic index %d scatter_alloc %d page_start %d\n",
11897 "decoder_mmu_box_free_idx_tail",
11898 hevc->cur_pic->index,
11899 hevc->cur_pic->scatter_alloc,
11900 hevc->used_4k_num);
11901 if (hevc->m_ins_flag)
11902 hevc_mmu_dma_check(hw_to_vdec(hevc));
11903 decoder_mmu_box_free_idx_tail(
11904 hevc->mmu_box,
11905 hevc->cur_pic->index,
11906 hevc->used_4k_num);
11907 hevc->cur_pic->scatter_alloc = 2;
11908 }
11909 }
11910 hevc->pic_decoded_lcu_idx =
11911 READ_VREG(HEVC_PARSER_LCU_START)
11912 & 0xffffff;
11913
11914 if (vdec->master == NULL && vdec->slave == NULL &&
11915 hevc->empty_flag == 0) {
11916 hevc->over_decode =
11917 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11918 if (hevc->over_decode)
11919 hevc_print(hevc, 0,
11920 "!!!Over decode\n");
11921 }
11922
11923 if (is_log_enable(hevc))
11924 add_log(hevc,
11925 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11926 __func__,
11927 hevc->dec_result,
11928 hevc->pic_decoded_lcu_idx,
11929 hevc->used_4k_num,
11930 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11931 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11932 hevc->start_shift_bytes
11933 );
11934
11935 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11936 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11937 __func__,
11938 hevc->dec_result,
11939 READ_VREG(HEVC_STREAM_LEVEL),
11940 READ_VREG(HEVC_STREAM_WR_PTR),
11941 READ_VREG(HEVC_STREAM_RD_PTR),
11942 hevc->pic_decoded_lcu_idx,
11943 hevc->used_4k_num,
11944 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11945 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11946 hevc->start_shift_bytes
11947 );
11948
11949 hevc->used_4k_num = -1;
11950
11951 check_pic_decoded_error(hevc,
11952 hevc->pic_decoded_lcu_idx);
11953#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11954#if 1
11955 if (vdec->slave) {
11956 if (dv_debug & 0x1)
11957 vdec_set_flag(vdec->slave,
11958 VDEC_FLAG_SELF_INPUT_CONTEXT);
11959 else
11960 vdec_set_flag(vdec->slave,
11961 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11962 }
11963#else
11964 if (vdec->slave) {
11965 if (no_interleaved_el_slice)
11966 vdec_set_flag(vdec->slave,
11967 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11968 /* this will move real HW pointer for input */
11969 else
11970 vdec_set_flag(vdec->slave, 0);
11971 /* this will not move real HW pointer
11972 *and SL layer decoding
11973 *will start from same stream position
11974 *as current BL decoder
11975 */
11976 }
11977#endif
11978#endif
11979#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11980 hevc->shift_byte_count_lo
11981 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11982 if (vdec->slave) {
11983 /*cur is base, found enhance*/
11984 struct hevc_state_s *hevc_el =
11985 (struct hevc_state_s *)
11986 vdec->slave->private;
11987 if (hevc_el)
11988 hevc_el->shift_byte_count_lo =
11989 hevc->shift_byte_count_lo;
11990 } else if (vdec->master) {
11991 /*cur is enhance, found base*/
11992 struct hevc_state_s *hevc_ba =
11993 (struct hevc_state_s *)
11994 vdec->master->private;
11995 if (hevc_ba)
11996 hevc_ba->shift_byte_count_lo =
11997 hevc->shift_byte_count_lo;
11998 }
11999#endif
12000 mutex_lock(&hevc->chunks_mutex);
12001 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12002 hevc->chunk = NULL;
12003 mutex_unlock(&hevc->chunks_mutex);
12004 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12005 /*
12006 stream base: stream buf empty or timeout
12007 frame base: vdec_prepare_input fail
12008 */
12009 if (!vdec_has_more_input(vdec)) {
12010 hevc->dec_result = DEC_RESULT_EOS;
12011 vdec_schedule_work(&hevc->work);
12012 return;
12013 }
12014#ifdef AGAIN_HAS_THRESHOLD
12015 hevc->next_again_flag = 1;
12016#endif
12017 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12018 struct PIC_s *pic;
12019 hevc->eos = 1;
12020#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12021 if ((vdec->master || vdec->slave) &&
12022 aux_data_is_avaible(hevc))
12023 dolby_get_meta(hevc);
12024#endif
12025 check_pic_decoded_error(hevc,
12026 hevc->pic_decoded_lcu_idx);
12027 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12028 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12029 "%s: end of stream, last dec poc %d => 0x%pf\n",
12030 __func__, hevc->curr_POC, pic);
12031 flush_output(hevc, pic);
12032
12033 if (hevc->is_used_v4l)
12034 notify_v4l_eos(hw_to_vdec(hevc));
12035#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12036 hevc->shift_byte_count_lo
12037 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12038 if (vdec->slave) {
12039 /*cur is base, found enhance*/
12040 struct hevc_state_s *hevc_el =
12041 (struct hevc_state_s *)
12042 vdec->slave->private;
12043 if (hevc_el)
12044 hevc_el->shift_byte_count_lo =
12045 hevc->shift_byte_count_lo;
12046 } else if (vdec->master) {
12047 /*cur is enhance, found base*/
12048 struct hevc_state_s *hevc_ba =
12049 (struct hevc_state_s *)
12050 vdec->master->private;
12051 if (hevc_ba)
12052 hevc_ba->shift_byte_count_lo =
12053 hevc->shift_byte_count_lo;
12054 }
12055#endif
12056 mutex_lock(&hevc->chunks_mutex);
12057 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12058 hevc->chunk = NULL;
12059 mutex_unlock(&hevc->chunks_mutex);
12060 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12061 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12062 "%s: force exit\n",
12063 __func__);
12064 if (hevc->stat & STAT_VDEC_RUN) {
12065 amhevc_stop();
12066 hevc->stat &= ~STAT_VDEC_RUN;
12067 }
12068 if (hevc->stat & STAT_ISR_REG) {
12069 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12070 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12071 hevc->stat &= ~STAT_ISR_REG;
12072 }
12073 hevc_print(hevc, 0, "%s: force exit end\n",
12074 __func__);
12075 }
12076
12077 if (hevc->stat & STAT_VDEC_RUN) {
12078 amhevc_stop();
12079 hevc->stat &= ~STAT_VDEC_RUN;
12080 }
12081
12082 if (hevc->stat & STAT_TIMER_ARM) {
12083 del_timer_sync(&hevc->timer);
12084 hevc->stat &= ~STAT_TIMER_ARM;
12085 }
12086
12087 wait_hevc_search_done(hevc);
12088#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12089 if (hevc->switch_dvlayer_flag) {
12090 if (vdec->slave)
12091 vdec_set_next_sched(vdec, vdec->slave);
12092 else if (vdec->master)
12093 vdec_set_next_sched(vdec, vdec->master);
12094 } else if (vdec->slave || vdec->master)
12095 vdec_set_next_sched(vdec, vdec);
12096#endif
12097
12098 if (from == 1) {
12099 /* This is a timeout work */
12100 if (work_pending(&hevc->work)) {
12101 /*
12102 * The vh265_work arrives at the last second,
12103 * give it a chance to handle the scenario.
12104 */
12105 return;
12106 //cancel_work_sync(&hevc->work);//reserved for future considraion
12107 }
12108 }
12109
12110 /* mark itself has all HW resource released and input released */
12111 if (vdec->parallel_dec == 1)
12112 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12113 else
12114 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12115
12116 if (hevc->is_used_v4l) {
12117 struct aml_vcodec_ctx *ctx =
12118 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12119
12120 if (ctx->param_sets_from_ucode &&
12121 !hevc->v4l_params_parsed)
12122 vdec_v4l_write_frame_sync(ctx);
12123 }
12124
12125 if (hevc->vdec_cb)
12126 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12127}
12128
12129static void vh265_work(struct work_struct *work)
12130{
12131 struct hevc_state_s *hevc = container_of(work,
12132 struct hevc_state_s, work);
12133 struct vdec_s *vdec = hw_to_vdec(hevc);
12134
12135 vh265_work_implement(hevc, vdec, 0);
12136}
12137
12138static void vh265_timeout_work(struct work_struct *work)
12139{
12140 struct hevc_state_s *hevc = container_of(work,
12141 struct hevc_state_s, timeout_work);
12142 struct vdec_s *vdec = hw_to_vdec(hevc);
12143
12144 if (work_pending(&hevc->work))
12145 return;
12146 vh265_work_implement(hevc, vdec, 1);
12147}
12148
12149
12150static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12151{
12152 /* new to do ... */
12153 vh265_prot_init(hevc);
12154 return 0;
12155}
12156static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12157{
12158 struct hevc_state_s *hevc =
12159 (struct hevc_state_s *)vdec->private;
12160 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12161 CODEC_MM_FLAGS_TVP : 0;
12162 bool ret = 0;
12163 if (step == 0x12)
12164 return 0;
12165 else if (step == 0x11)
12166 step = 0x12;
12167
12168 if (hevc->eos)
12169 return 0;
12170 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12171 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12172 hevc->first_sc_checked =1;
12173 hevc_print(hevc, 0,
12174 "vh265 cached=%d need_size=%d speed= %d ms\n",
12175 size, (hevc->need_cache_size >> PAGE_SHIFT),
12176 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12177 }
12178 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12179 && pre_decode_buf_level != 0) {
12180 u32 rp, wp, level;
12181
12182 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12183 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12184 if (wp < rp)
12185 level = vdec->input.size + wp - rp;
12186 else
12187 level = wp - rp;
12188
12189 if (level < pre_decode_buf_level)
12190 return 0;
12191 }
12192
12193#ifdef AGAIN_HAS_THRESHOLD
12194 if (hevc->next_again_flag &&
12195 (!vdec_frame_based(vdec))) {
12196 u32 parser_wr_ptr =
12197 READ_PARSER_REG(PARSER_VIDEO_WP);
12198 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12199 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12200 again_threshold) {
12201 int r = vdec_sync_input(vdec);
12202 hevc_print(hevc,
12203 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12204 return 0;
12205 }
12206 }
12207#endif
12208
12209 if (disp_vframe_valve_level &&
12210 kfifo_len(&hevc->display_q) >=
12211 disp_vframe_valve_level) {
12212 hevc->valve_count--;
12213 if (hevc->valve_count <= 0)
12214 hevc->valve_count = 2;
12215 else
12216 return 0;
12217 }
12218
12219 ret = is_new_pic_available(hevc);
12220 if (!ret) {
12221 hevc_print(hevc,
12222 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12223 __func__, ret);
12224 }
12225
12226#ifdef CONSTRAIN_MAX_BUF_NUM
12227 if (hevc->pic_list_init_flag == 3) {
12228 if (run_ready_max_vf_only_num > 0 &&
12229 get_vf_ref_only_buf_count(hevc) >=
12230 run_ready_max_vf_only_num
12231 )
12232 ret = 0;
12233 if (run_ready_display_q_num > 0 &&
12234 kfifo_len(&hevc->display_q) >=
12235 run_ready_display_q_num)
12236 ret = 0;
12237
12238 /*avoid more buffers consumed when
12239 switching resolution*/
12240 if (run_ready_max_buf_num == 0xff &&
12241 get_used_buf_count(hevc) >=
12242 get_work_pic_num(hevc))
12243 ret = 0;
12244 else if (run_ready_max_buf_num &&
12245 get_used_buf_count(hevc) >=
12246 run_ready_max_buf_num)
12247 ret = 0;
12248 }
12249#endif
12250
12251 if (hevc->is_used_v4l) {
12252 struct aml_vcodec_ctx *ctx =
12253 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12254
12255 if (ctx->param_sets_from_ucode &&
12256 !ctx->v4l_codec_ready &&
12257 hevc->v4l_params_parsed) {
12258 ret = 0; /*the params has parsed.*/
12259 } else if (!ctx->v4l_codec_dpb_ready) {
12260 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12261 run_ready_min_buf_num)
12262 ret = 0;
12263 }
12264 }
12265
12266 if (ret)
12267 not_run_ready[hevc->index] = 0;
12268 else
12269 not_run_ready[hevc->index]++;
12270 if (vdec->parallel_dec == 1)
12271 return ret ? (CORE_MASK_HEVC) : 0;
12272 else
12273 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12274}
12275
12276static void run(struct vdec_s *vdec, unsigned long mask,
12277 void (*callback)(struct vdec_s *, void *), void *arg)
12278{
12279 struct hevc_state_s *hevc =
12280 (struct hevc_state_s *)vdec->private;
12281 int r, loadr = 0;
12282 unsigned char check_sum = 0;
12283
12284 run_count[hevc->index]++;
12285 hevc->vdec_cb_arg = arg;
12286 hevc->vdec_cb = callback;
12287 hevc->aux_data_dirty = 1;
12288 hevc_reset_core(vdec);
12289
12290#ifdef AGAIN_HAS_THRESHOLD
12291 hevc->pre_parser_wr_ptr =
12292 READ_PARSER_REG(PARSER_VIDEO_WP);
12293 hevc->next_again_flag = 0;
12294#endif
12295 r = vdec_prepare_input(vdec, &hevc->chunk);
12296 if (r < 0) {
12297 input_empty[hevc->index]++;
12298 hevc->dec_result = DEC_RESULT_AGAIN;
12299 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12300 "ammvdec_vh265: Insufficient data\n");
12301
12302 vdec_schedule_work(&hevc->work);
12303 return;
12304 }
12305 input_empty[hevc->index] = 0;
12306 hevc->dec_result = DEC_RESULT_NONE;
12307 if (vdec_frame_based(vdec) &&
12308 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12309 || is_log_enable(hevc)))
12310 check_sum = get_data_check_sum(hevc, r);
12311
12312 if (is_log_enable(hevc))
12313 add_log(hevc,
12314 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12315 __func__, r,
12316 check_sum,
12317 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12318 );
12319 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12320 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12321 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12322 __func__, r,
12323 check_sum,
12324 READ_VREG(HEVC_STREAM_LEVEL),
12325 READ_VREG(HEVC_STREAM_WR_PTR),
12326 READ_VREG(HEVC_STREAM_RD_PTR),
12327 READ_PARSER_REG(PARSER_VIDEO_RP),
12328 READ_PARSER_REG(PARSER_VIDEO_WP),
12329 hevc->start_shift_bytes
12330 );
12331 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12332 input_frame_based(vdec)) {
12333 int jj;
12334 u8 *data = NULL;
12335
12336 if (!hevc->chunk->block->is_mapped)
12337 data = codec_mm_vmap(hevc->chunk->block->start +
12338 hevc->chunk->offset, r);
12339 else
12340 data = ((u8 *)hevc->chunk->block->start_virt)
12341 + hevc->chunk->offset;
12342
12343 for (jj = 0; jj < r; jj++) {
12344 if ((jj & 0xf) == 0)
12345 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12346 "%06x:", jj);
12347 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12348 "%02x ", data[jj]);
12349 if (((jj + 1) & 0xf) == 0)
12350 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12351 "\n");
12352 }
12353
12354 if (!hevc->chunk->block->is_mapped)
12355 codec_mm_unmap_phyaddr(data);
12356 }
12357 if (vdec->mc_loaded) {
12358 /*firmware have load before,
12359 and not changes to another.
12360 ignore reload.
12361 */
12362 if (tee_enabled() && hevc->is_swap &&
12363 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12364 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12365 } else {
12366 if (hevc->mmu_enable)
12367 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12368 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12369 "h265_mmu", hevc->fw->data);
12370 else {
12371 if (!hevc->is_4k) {
12372 /* if an older version of the fw was loaded, */
12373 /* needs try to load noswap fw because the */
12374 /* old fw package dose not contain the swap fw.*/
12375 loadr = amhevc_vdec_loadmc_ex(
12376 VFORMAT_HEVC, vdec,
12377 "hevc_mmu_swap",
12378 hevc->fw->data);
12379 if (loadr < 0)
12380 loadr = amhevc_vdec_loadmc_ex(
12381 VFORMAT_HEVC, vdec,
12382 "h265_mmu",
12383 hevc->fw->data);
12384 else
12385 hevc->is_swap = true;
12386 } else
12387 loadr = amhevc_vdec_loadmc_ex(
12388 VFORMAT_HEVC, vdec,
12389 "h265_mmu", hevc->fw->data);
12390 }
12391 else
12392 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12393 NULL, hevc->fw->data);
12394 if (loadr < 0) {
12395 amhevc_disable();
12396 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12397 tee_enabled() ? "TEE" : "local", loadr);
12398 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12399 vdec_schedule_work(&hevc->work);
12400 return;
12401 }
12402
12403 if (tee_enabled() && hevc->is_swap &&
12404 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12405 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12406#ifdef DETREFILL_ENABLE
12407 if (hevc->is_swap &&
12408 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12409 init_detrefill_buf(hevc);
12410#endif
12411 vdec->mc_loaded = 1;
12412 vdec->mc_type = VFORMAT_HEVC;
12413 }
12414 if (vh265_hw_ctx_restore(hevc) < 0) {
12415 vdec_schedule_work(&hevc->work);
12416 return;
12417 }
12418 vdec_enable_input(vdec);
12419
12420 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12421
12422 if (vdec_frame_based(vdec)) {
12423 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12424 r = hevc->chunk->size +
12425 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12426 hevc->decode_size = r;
12427 }
12428#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12429 else {
12430 if (vdec->master || vdec->slave)
12431 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12432 hevc->shift_byte_count_lo);
12433 }
12434#endif
12435 WRITE_VREG(HEVC_DECODE_SIZE, r);
12436 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12437 hevc->init_flag = 1;
12438
12439 if (hevc->pic_list_init_flag == 3)
12440 init_pic_list_hw(hevc);
12441
12442 backup_decode_state(hevc);
12443
12444 start_process_time(hevc);
12445 mod_timer(&hevc->timer, jiffies);
12446 hevc->stat |= STAT_TIMER_ARM;
12447 hevc->stat |= STAT_ISR_REG;
12448 amhevc_start();
12449 hevc->stat |= STAT_VDEC_RUN;
12450}
12451
12452static void aml_free_canvas(struct vdec_s *vdec)
12453{
12454 int i;
12455 struct hevc_state_s *hevc =
12456 (struct hevc_state_s *)vdec->private;
12457
12458 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12459 struct PIC_s *pic = hevc->m_PIC[i];
12460
12461 if (pic) {
12462 if (vdec->parallel_dec == 1) {
12463 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12464 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12465 }
12466 }
12467 }
12468}
12469
12470static void reset(struct vdec_s *vdec)
12471{
12472 struct hevc_state_s *hevc =
12473 (struct hevc_state_s *)vdec->private;
12474 int i;
12475
12476 cancel_work_sync(&hevc->work);
12477 cancel_work_sync(&hevc->notify_work);
12478 if (hevc->stat & STAT_VDEC_RUN) {
12479 amhevc_stop();
12480 hevc->stat &= ~STAT_VDEC_RUN;
12481 }
12482
12483 if (hevc->stat & STAT_TIMER_ARM) {
12484 del_timer_sync(&hevc->timer);
12485 hevc->stat &= ~STAT_TIMER_ARM;
12486 }
12487 hevc->dec_result = DEC_RESULT_NONE;
12488 reset_process_time(hevc);
12489 hevc->init_flag = 0;
12490 hevc->pic_list_init_flag = 0;
12491 dealloc_mv_bufs(hevc);
12492 aml_free_canvas(vdec);
12493 hevc_local_uninit(hevc);
12494 if (vh265_local_init(hevc) < 0)
12495 pr_debug(" %s local init fail\n", __func__);
12496 for (i = 0; i < BUF_POOL_SIZE; i++) {
12497 hevc->m_BUF[i].start_adr = 0;
12498 }
12499
12500 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12501}
12502
12503static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12504{
12505 struct hevc_state_s *hevc =
12506 (struct hevc_state_s *)vdec->private;
12507
12508 return vh265_isr(0, hevc);
12509}
12510
12511static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12512{
12513 struct hevc_state_s *hevc =
12514 (struct hevc_state_s *)vdec->private;
12515
12516 return vh265_isr_thread_fn(0, hevc);
12517}
12518#endif
12519
12520static int amvdec_h265_probe(struct platform_device *pdev)
12521{
12522#ifdef MULTI_INSTANCE_SUPPORT
12523 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12524#else
12525 struct vdec_dev_reg_s *pdata =
12526 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12527#endif
12528 char *tmpbuf;
12529 int ret;
12530 struct hevc_state_s *hevc;
12531
12532 hevc = vmalloc(sizeof(struct hevc_state_s));
12533 if (hevc == NULL) {
12534 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12535 return -ENOMEM;
12536 }
12537 gHevc = hevc;
12538 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12539 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12540 H265_DEBUG_DIS_SYS_ERROR_PROC));
12541 memset(hevc, 0, sizeof(struct hevc_state_s));
12542 if (get_dbg_flag(hevc))
12543 hevc_print(hevc, 0, "%s\r\n", __func__);
12544 mutex_lock(&vh265_mutex);
12545
12546 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12547 (parser_sei_enable & 0x100) == 0)
12548 parser_sei_enable = 7; /*old 1*/
12549 hevc->m_ins_flag = 0;
12550 hevc->init_flag = 0;
12551 hevc->first_sc_checked = 0;
12552 hevc->uninit_list = 0;
12553 hevc->fatal_error = 0;
12554 hevc->show_frame_num = 0;
12555 hevc->frameinfo_enable = 1;
12556#ifdef MULTI_INSTANCE_SUPPORT
12557 hevc->platform_dev = pdev;
12558 platform_set_drvdata(pdev, pdata);
12559#endif
12560
12561 if (pdata == NULL) {
12562 hevc_print(hevc, 0,
12563 "\namvdec_h265 memory resource undefined.\n");
12564 vfree(hevc);
12565 mutex_unlock(&vh265_mutex);
12566 return -EFAULT;
12567 }
12568 if (mmu_enable_force == 0) {
12569 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12570 || double_write_mode == 0x10)
12571 hevc->mmu_enable = 0;
12572 else
12573 hevc->mmu_enable = 1;
12574 }
12575 if (init_mmu_buffers(hevc)) {
12576 hevc_print(hevc, 0,
12577 "\n 265 mmu init failed!\n");
12578 vfree(hevc);
12579 mutex_unlock(&vh265_mutex);
12580 return -EFAULT;
12581 }
12582
12583 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12584 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12585 if (ret < 0) {
12586 uninit_mmu_buffers(hevc);
12587 vfree(hevc);
12588 mutex_unlock(&vh265_mutex);
12589 return ret;
12590 }
12591 hevc->buf_size = work_buf_size;
12592
12593
12594 if (!vdec_secure(pdata)) {
12595 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12596 if (tmpbuf) {
12597 memset(tmpbuf, 0, work_buf_size);
12598 dma_sync_single_for_device(amports_get_dma_device(),
12599 hevc->buf_start,
12600 work_buf_size, DMA_TO_DEVICE);
12601 } else {
12602 tmpbuf = codec_mm_vmap(hevc->buf_start,
12603 work_buf_size);
12604 if (tmpbuf) {
12605 memset(tmpbuf, 0, work_buf_size);
12606 dma_sync_single_for_device(
12607 amports_get_dma_device(),
12608 hevc->buf_start,
12609 work_buf_size,
12610 DMA_TO_DEVICE);
12611 codec_mm_unmap_phyaddr(tmpbuf);
12612 }
12613 }
12614 }
12615
12616 if (get_dbg_flag(hevc)) {
12617 hevc_print(hevc, 0,
12618 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12619 hevc->buf_start, hevc->buf_size);
12620 }
12621
12622 if (pdata->sys_info)
12623 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12624 else {
12625 hevc->vh265_amstream_dec_info.width = 0;
12626 hevc->vh265_amstream_dec_info.height = 0;
12627 hevc->vh265_amstream_dec_info.rate = 30;
12628 }
12629#ifndef MULTI_INSTANCE_SUPPORT
12630 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12631 workaround_enable |= 3;
12632 hevc_print(hevc, 0,
12633 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12634 } else
12635 workaround_enable &= ~3;
12636#endif
12637 hevc->cma_dev = pdata->cma_dev;
12638 vh265_vdec_info_init();
12639
12640#ifdef MULTI_INSTANCE_SUPPORT
12641 pdata->private = hevc;
12642 pdata->dec_status = vh265_dec_status;
12643 pdata->set_isreset = vh265_set_isreset;
12644 is_reset = 0;
12645 if (vh265_init(pdata) < 0) {
12646#else
12647 if (vh265_init(hevc) < 0) {
12648#endif
12649 hevc_print(hevc, 0,
12650 "\namvdec_h265 init failed.\n");
12651 hevc_local_uninit(hevc);
12652 uninit_mmu_buffers(hevc);
12653 vfree(hevc);
12654 pdata->dec_status = NULL;
12655 mutex_unlock(&vh265_mutex);
12656 return -ENODEV;
12657 }
12658 /*set the max clk for smooth playing...*/
12659 hevc_source_changed(VFORMAT_HEVC,
12660 3840, 2160, 60);
12661 mutex_unlock(&vh265_mutex);
12662
12663 return 0;
12664}
12665
12666static int amvdec_h265_remove(struct platform_device *pdev)
12667{
12668 struct hevc_state_s *hevc = gHevc;
12669
12670 if (get_dbg_flag(hevc))
12671 hevc_print(hevc, 0, "%s\r\n", __func__);
12672
12673 mutex_lock(&vh265_mutex);
12674
12675 vh265_stop(hevc);
12676
12677 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12678
12679
12680#ifdef DEBUG_PTS
12681 hevc_print(hevc, 0,
12682 "pts missed %ld, pts hit %ld, duration %d\n",
12683 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12684#endif
12685
12686 vfree(hevc);
12687 hevc = NULL;
12688 gHevc = NULL;
12689
12690 mutex_unlock(&vh265_mutex);
12691
12692 return 0;
12693}
12694/****************************************/
12695#ifdef CONFIG_PM
12696static int h265_suspend(struct device *dev)
12697{
12698 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12699 return 0;
12700}
12701
12702static int h265_resume(struct device *dev)
12703{
12704 amhevc_resume(to_platform_device(dev));
12705 return 0;
12706}
12707
12708static const struct dev_pm_ops h265_pm_ops = {
12709 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12710};
12711#endif
12712
12713static struct platform_driver amvdec_h265_driver = {
12714 .probe = amvdec_h265_probe,
12715 .remove = amvdec_h265_remove,
12716 .driver = {
12717 .name = DRIVER_NAME,
12718#ifdef CONFIG_PM
12719 .pm = &h265_pm_ops,
12720#endif
12721 }
12722};
12723
12724#ifdef MULTI_INSTANCE_SUPPORT
12725static void vh265_dump_state(struct vdec_s *vdec)
12726{
12727 int i;
12728 struct hevc_state_s *hevc =
12729 (struct hevc_state_s *)vdec->private;
12730 hevc_print(hevc, 0,
12731 "====== %s\n", __func__);
12732
12733 hevc_print(hevc, 0,
12734 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12735 hevc->frame_width,
12736 hevc->frame_height,
12737 hevc->sps_num_reorder_pics_0,
12738 get_work_pic_num(hevc),
12739 hevc->video_signal_type_debug,
12740 hevc->is_swap
12741 );
12742
12743 hevc_print(hevc, 0,
12744 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12745 input_frame_based(vdec),
12746 hevc->eos,
12747 hevc->dec_result,
12748 decode_frame_count[hevc->index],
12749 display_frame_count[hevc->index],
12750 run_count[hevc->index],
12751 not_run_ready[hevc->index],
12752 input_empty[hevc->index]
12753 );
12754
12755 if (vf_get_receiver(vdec->vf_provider_name)) {
12756 enum receviver_start_e state =
12757 vf_notify_receiver(vdec->vf_provider_name,
12758 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12759 NULL);
12760 hevc_print(hevc, 0,
12761 "\nreceiver(%s) state %d\n",
12762 vdec->vf_provider_name,
12763 state);
12764 }
12765
12766 hevc_print(hevc, 0,
12767 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12768 __func__,
12769 kfifo_len(&hevc->newframe_q),
12770 VF_POOL_SIZE,
12771 kfifo_len(&hevc->display_q),
12772 VF_POOL_SIZE,
12773 hevc->vf_pre_count,
12774 hevc->vf_get_count,
12775 hevc->vf_put_count,
12776 hevc->pic_list_init_flag,
12777 is_new_pic_available(hevc)
12778 );
12779
12780 dump_pic_list(hevc);
12781
12782 for (i = 0; i < BUF_POOL_SIZE; i++) {
12783 hevc_print(hevc, 0,
12784 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12785 i,
12786 hevc->m_BUF[i].start_adr,
12787 hevc->m_BUF[i].size,
12788 hevc->m_BUF[i].used_flag);
12789 }
12790
12791 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12792 hevc_print(hevc, 0,
12793 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12794 i,
12795 hevc->m_mv_BUF[i].start_adr,
12796 hevc->m_mv_BUF[i].size,
12797 hevc->m_mv_BUF[i].used_flag);
12798 }
12799
12800 hevc_print(hevc, 0,
12801 "HEVC_DEC_STATUS_REG=0x%x\n",
12802 READ_VREG(HEVC_DEC_STATUS_REG));
12803 hevc_print(hevc, 0,
12804 "HEVC_MPC_E=0x%x\n",
12805 READ_VREG(HEVC_MPC_E));
12806 hevc_print(hevc, 0,
12807 "HEVC_DECODE_MODE=0x%x\n",
12808 READ_VREG(HEVC_DECODE_MODE));
12809 hevc_print(hevc, 0,
12810 "HEVC_DECODE_MODE2=0x%x\n",
12811 READ_VREG(HEVC_DECODE_MODE2));
12812 hevc_print(hevc, 0,
12813 "NAL_SEARCH_CTL=0x%x\n",
12814 READ_VREG(NAL_SEARCH_CTL));
12815 hevc_print(hevc, 0,
12816 "HEVC_PARSER_LCU_START=0x%x\n",
12817 READ_VREG(HEVC_PARSER_LCU_START));
12818 hevc_print(hevc, 0,
12819 "HEVC_DECODE_SIZE=0x%x\n",
12820 READ_VREG(HEVC_DECODE_SIZE));
12821 hevc_print(hevc, 0,
12822 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12823 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12824 hevc_print(hevc, 0,
12825 "HEVC_STREAM_START_ADDR=0x%x\n",
12826 READ_VREG(HEVC_STREAM_START_ADDR));
12827 hevc_print(hevc, 0,
12828 "HEVC_STREAM_END_ADDR=0x%x\n",
12829 READ_VREG(HEVC_STREAM_END_ADDR));
12830 hevc_print(hevc, 0,
12831 "HEVC_STREAM_LEVEL=0x%x\n",
12832 READ_VREG(HEVC_STREAM_LEVEL));
12833 hevc_print(hevc, 0,
12834 "HEVC_STREAM_WR_PTR=0x%x\n",
12835 READ_VREG(HEVC_STREAM_WR_PTR));
12836 hevc_print(hevc, 0,
12837 "HEVC_STREAM_RD_PTR=0x%x\n",
12838 READ_VREG(HEVC_STREAM_RD_PTR));
12839 hevc_print(hevc, 0,
12840 "PARSER_VIDEO_RP=0x%x\n",
12841 READ_PARSER_REG(PARSER_VIDEO_RP));
12842 hevc_print(hevc, 0,
12843 "PARSER_VIDEO_WP=0x%x\n",
12844 READ_PARSER_REG(PARSER_VIDEO_WP));
12845
12846 if (input_frame_based(vdec) &&
12847 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12848 ) {
12849 int jj;
12850 if (hevc->chunk && hevc->chunk->block &&
12851 hevc->chunk->size > 0) {
12852 u8 *data = NULL;
12853 if (!hevc->chunk->block->is_mapped)
12854 data = codec_mm_vmap(hevc->chunk->block->start +
12855 hevc->chunk->offset, hevc->chunk->size);
12856 else
12857 data = ((u8 *)hevc->chunk->block->start_virt)
12858 + hevc->chunk->offset;
12859 hevc_print(hevc, 0,
12860 "frame data size 0x%x\n",
12861 hevc->chunk->size);
12862 for (jj = 0; jj < hevc->chunk->size; jj++) {
12863 if ((jj & 0xf) == 0)
12864 hevc_print(hevc,
12865 PRINT_FRAMEBASE_DATA,
12866 "%06x:", jj);
12867 hevc_print_cont(hevc,
12868 PRINT_FRAMEBASE_DATA,
12869 "%02x ", data[jj]);
12870 if (((jj + 1) & 0xf) == 0)
12871 hevc_print_cont(hevc,
12872 PRINT_FRAMEBASE_DATA,
12873 "\n");
12874 }
12875
12876 if (!hevc->chunk->block->is_mapped)
12877 codec_mm_unmap_phyaddr(data);
12878 }
12879 }
12880
12881}
12882
12883
12884static int ammvdec_h265_probe(struct platform_device *pdev)
12885{
12886
12887 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12888 struct hevc_state_s *hevc = NULL;
12889 int ret;
12890#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12891 int config_val;
12892#endif
12893 if (pdata == NULL) {
12894 pr_info("\nammvdec_h265 memory resource undefined.\n");
12895 return -EFAULT;
12896 }
12897
12898 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12899 sizeof(struct hevc_state_s), GFP_KERNEL); */
12900 hevc = vmalloc(sizeof(struct hevc_state_s));
12901 if (hevc == NULL) {
12902 pr_info("\nammvdec_h265 device data allocation failed\n");
12903 return -ENOMEM;
12904 }
12905 memset(hevc, 0, sizeof(struct hevc_state_s));
12906
12907 /* the ctx from v4l2 driver. */
12908 hevc->v4l2_ctx = pdata->private;
12909
12910 pdata->private = hevc;
12911 pdata->dec_status = vh265_dec_status;
12912 /* pdata->set_trickmode = set_trickmode; */
12913 pdata->run_ready = run_ready;
12914 pdata->run = run;
12915 pdata->reset = reset;
12916 pdata->irq_handler = vh265_irq_cb;
12917 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12918 pdata->dump_state = vh265_dump_state;
12919
12920 hevc->index = pdev->id;
12921 hevc->m_ins_flag = 1;
12922
12923 if (pdata->use_vfm_path) {
12924 snprintf(pdata->vf_provider_name,
12925 VDEC_PROVIDER_NAME_SIZE,
12926 VFM_DEC_PROVIDER_NAME);
12927 hevc->frameinfo_enable = 1;
12928 }
12929#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12930 else if (vdec_dual(pdata)) {
12931 struct hevc_state_s *hevc_pair = NULL;
12932
12933 if (dv_toggle_prov_name) /*debug purpose*/
12934 snprintf(pdata->vf_provider_name,
12935 VDEC_PROVIDER_NAME_SIZE,
12936 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12937 VFM_DEC_DVEL_PROVIDER_NAME);
12938 else
12939 snprintf(pdata->vf_provider_name,
12940 VDEC_PROVIDER_NAME_SIZE,
12941 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12942 VFM_DEC_DVBL_PROVIDER_NAME);
12943 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12944 if (pdata->master)
12945 hevc_pair = (struct hevc_state_s *)
12946 pdata->master->private;
12947 else if (pdata->slave)
12948 hevc_pair = (struct hevc_state_s *)
12949 pdata->slave->private;
12950 if (hevc_pair)
12951 hevc->shift_byte_count_lo =
12952 hevc_pair->shift_byte_count_lo;
12953 }
12954#endif
12955 else
12956 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12957 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12958
12959 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12960 &vh265_vf_provider, pdata);
12961
12962 hevc->provider_name = pdata->vf_provider_name;
12963 platform_set_drvdata(pdev, pdata);
12964
12965 hevc->platform_dev = pdev;
12966
12967 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12968 pdata->config && pdata->config_len) {
12969#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12970 /*use ptr config for doubel_write_mode, etc*/
12971 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12972
12973 if (get_config_int(pdata->config, "hevc_double_write_mode",
12974 &config_val) == 0)
12975 hevc->double_write_mode = config_val;
12976 else
12977 hevc->double_write_mode = double_write_mode;
12978
12979 if (get_config_int(pdata->config, "save_buffer_mode",
12980 &config_val) == 0)
12981 hevc->save_buffer_mode = config_val;
12982 else
12983 hevc->save_buffer_mode = 0;
12984
12985 /*use ptr config for max_pic_w, etc*/
12986 if (get_config_int(pdata->config, "hevc_buf_width",
12987 &config_val) == 0) {
12988 hevc->max_pic_w = config_val;
12989 }
12990 if (get_config_int(pdata->config, "hevc_buf_height",
12991 &config_val) == 0) {
12992 hevc->max_pic_h = config_val;
12993 }
12994
12995 if (get_config_int(pdata->config,
12996 "parm_v4l_codec_enable",
12997 &config_val) == 0)
12998 hevc->is_used_v4l = config_val;
12999
13000 if (get_config_int(pdata->config,
13001 "parm_v4l_buffer_margin",
13002 &config_val) == 0)
13003 hevc->dynamic_buf_num_margin = config_val;
13004
13005 if (get_config_int(pdata->config,
13006 "parm_v4l_canvas_mem_mode",
13007 &config_val) == 0)
13008 hevc->mem_map_mode = config_val;
13009#endif
13010 } else {
13011 if (pdata->sys_info)
13012 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13013 else {
13014 hevc->vh265_amstream_dec_info.width = 0;
13015 hevc->vh265_amstream_dec_info.height = 0;
13016 hevc->vh265_amstream_dec_info.rate = 30;
13017 }
13018 hevc->double_write_mode = double_write_mode;
13019 }
13020 if (!hevc->is_used_v4l) {
13021 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13022 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13023 else
13024 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13025
13026 hevc->mem_map_mode = mem_map_mode;
13027 }
13028
13029 if (mmu_enable_force == 0) {
13030 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13031 hevc->mmu_enable = 0;
13032 else
13033 hevc->mmu_enable = 1;
13034 }
13035
13036 if (init_mmu_buffers(hevc) < 0) {
13037 hevc_print(hevc, 0,
13038 "\n 265 mmu init failed!\n");
13039 mutex_unlock(&vh265_mutex);
13040 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13041 if (hevc)
13042 vfree((void *)hevc);
13043 pdata->dec_status = NULL;
13044 return -EFAULT;
13045 }
13046#if 0
13047 hevc->buf_start = pdata->mem_start;
13048 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13049#else
13050
13051 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13052 BMMU_WORKSPACE_ID, work_buf_size,
13053 DRIVER_NAME, &hevc->buf_start);
13054 if (ret < 0) {
13055 uninit_mmu_buffers(hevc);
13056 /* devm_kfree(&pdev->dev, (void *)hevc); */
13057 if (hevc)
13058 vfree((void *)hevc);
13059 pdata->dec_status = NULL;
13060 mutex_unlock(&vh265_mutex);
13061 return ret;
13062 }
13063 hevc->buf_size = work_buf_size;
13064#endif
13065 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13066 (parser_sei_enable & 0x100) == 0)
13067 parser_sei_enable = 7;
13068 hevc->init_flag = 0;
13069 hevc->first_sc_checked = 0;
13070 hevc->uninit_list = 0;
13071 hevc->fatal_error = 0;
13072 hevc->show_frame_num = 0;
13073
13074 /*
13075 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13076 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13077 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13078 */
13079 if (get_dbg_flag(hevc)) {
13080 hevc_print(hevc, 0,
13081 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13082 hevc->buf_start, hevc->buf_size);
13083 }
13084
13085 hevc_print(hevc, 0,
13086 "dynamic_buf_num_margin=%d\n",
13087 hevc->dynamic_buf_num_margin);
13088 hevc_print(hevc, 0,
13089 "double_write_mode=%d\n",
13090 hevc->double_write_mode);
13091
13092 hevc->cma_dev = pdata->cma_dev;
13093
13094 if (vh265_init(pdata) < 0) {
13095 hevc_print(hevc, 0,
13096 "\namvdec_h265 init failed.\n");
13097 hevc_local_uninit(hevc);
13098 uninit_mmu_buffers(hevc);
13099 /* devm_kfree(&pdev->dev, (void *)hevc); */
13100 if (hevc)
13101 vfree((void *)hevc);
13102 pdata->dec_status = NULL;
13103 return -ENODEV;
13104 }
13105
13106 vdec_set_prepare_level(pdata, start_decode_buf_level);
13107
13108 /*set the max clk for smooth playing...*/
13109 hevc_source_changed(VFORMAT_HEVC,
13110 3840, 2160, 60);
13111 if (pdata->parallel_dec == 1)
13112 vdec_core_request(pdata, CORE_MASK_HEVC);
13113 else
13114 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13115 | CORE_MASK_COMBINE);
13116
13117 return 0;
13118}
13119
13120static int ammvdec_h265_remove(struct platform_device *pdev)
13121{
13122 struct hevc_state_s *hevc =
13123 (struct hevc_state_s *)
13124 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13125 struct vdec_s *vdec = hw_to_vdec(hevc);
13126
13127 if (hevc == NULL)
13128 return 0;
13129
13130 if (get_dbg_flag(hevc))
13131 hevc_print(hevc, 0, "%s\r\n", __func__);
13132
13133 vmh265_stop(hevc);
13134
13135 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13136 if (vdec->parallel_dec == 1)
13137 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13138 else
13139 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13140
13141 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13142
13143 vfree((void *)hevc);
13144 return 0;
13145}
13146
13147static struct platform_driver ammvdec_h265_driver = {
13148 .probe = ammvdec_h265_probe,
13149 .remove = ammvdec_h265_remove,
13150 .driver = {
13151 .name = MULTI_DRIVER_NAME,
13152#ifdef CONFIG_PM
13153 .pm = &h265_pm_ops,
13154#endif
13155 }
13156};
13157#endif
13158
13159static struct codec_profile_t amvdec_h265_profile = {
13160 .name = "hevc",
13161 .profile = ""
13162};
13163
13164static struct codec_profile_t amvdec_h265_profile_single,
13165 amvdec_h265_profile_mult;
13166
13167static struct mconfig h265_configs[] = {
13168 MC_PU32("use_cma", &use_cma),
13169 MC_PU32("bit_depth_luma", &bit_depth_luma),
13170 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13171 MC_PU32("video_signal_type", &video_signal_type),
13172#ifdef ERROR_HANDLE_DEBUG
13173 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13174 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13175#endif
13176 MC_PU32("radr", &radr),
13177 MC_PU32("rval", &rval),
13178 MC_PU32("dbg_cmd", &dbg_cmd),
13179 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13180 MC_PU32("endian", &endian),
13181 MC_PU32("step", &step),
13182 MC_PU32("udebug_flag", &udebug_flag),
13183 MC_PU32("decode_pic_begin", &decode_pic_begin),
13184 MC_PU32("slice_parse_begin", &slice_parse_begin),
13185 MC_PU32("nal_skip_policy", &nal_skip_policy),
13186 MC_PU32("i_only_flag", &i_only_flag),
13187 MC_PU32("error_handle_policy", &error_handle_policy),
13188 MC_PU32("error_handle_threshold", &error_handle_threshold),
13189 MC_PU32("error_handle_nal_skip_threshold",
13190 &error_handle_nal_skip_threshold),
13191 MC_PU32("error_handle_system_threshold",
13192 &error_handle_system_threshold),
13193 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13194 MC_PU32("debug", &debug),
13195 MC_PU32("debug_mask", &debug_mask),
13196 MC_PU32("buffer_mode", &buffer_mode),
13197 MC_PU32("double_write_mode", &double_write_mode),
13198 MC_PU32("buf_alloc_width", &buf_alloc_width),
13199 MC_PU32("buf_alloc_height", &buf_alloc_height),
13200 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13201 MC_PU32("max_buf_num", &max_buf_num),
13202 MC_PU32("buf_alloc_size", &buf_alloc_size),
13203 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13204 MC_PU32("mem_map_mode", &mem_map_mode),
13205 MC_PU32("enable_mem_saving", &enable_mem_saving),
13206 MC_PU32("force_w_h", &force_w_h),
13207 MC_PU32("force_fps", &force_fps),
13208 MC_PU32("max_decoding_time", &max_decoding_time),
13209 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13210 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13211 MC_PU32("interlace_enable", &interlace_enable),
13212 MC_PU32("pts_unstable", &pts_unstable),
13213 MC_PU32("parser_sei_enable", &parser_sei_enable),
13214 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13215 MC_PU32("decode_timeout_val", &decode_timeout_val),
13216#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13217 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13218 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13219 MC_PU32("dv_debug", &dv_debug),
13220#endif
13221};
13222static struct mconfig_node decoder_265_node;
13223
13224static int __init amvdec_h265_driver_init_module(void)
13225{
13226 struct BuffInfo_s *p_buf_info;
13227
13228 if (vdec_is_support_4k()) {
13229 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13230 p_buf_info = &amvh265_workbuff_spec[2];
13231 else
13232 p_buf_info = &amvh265_workbuff_spec[1];
13233 } else
13234 p_buf_info = &amvh265_workbuff_spec[0];
13235
13236 init_buff_spec(NULL, p_buf_info);
13237 work_buf_size =
13238 (p_buf_info->end_adr - p_buf_info->start_adr
13239 + 0xffff) & (~0xffff);
13240
13241 pr_debug("amvdec_h265 module init\n");
13242 error_handle_policy = 0;
13243
13244#ifdef ERROR_HANDLE_DEBUG
13245 dbg_nal_skip_flag = 0;
13246 dbg_nal_skip_count = 0;
13247#endif
13248 udebug_flag = 0;
13249 decode_pic_begin = 0;
13250 slice_parse_begin = 0;
13251 step = 0;
13252 buf_alloc_size = 0;
13253
13254#ifdef MULTI_INSTANCE_SUPPORT
13255 if (platform_driver_register(&ammvdec_h265_driver))
13256 pr_err("failed to register ammvdec_h265 driver\n");
13257
13258#endif
13259 if (platform_driver_register(&amvdec_h265_driver)) {
13260 pr_err("failed to register amvdec_h265 driver\n");
13261 return -ENODEV;
13262 }
13263#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13264 if (!has_hevc_vdec()) {
13265 /* not support hevc */
13266 amvdec_h265_profile.name = "hevc_unsupport";
13267 }
13268 if (vdec_is_support_4k()) {
13269 if (is_meson_m8m2_cpu()) {
13270 /* m8m2 support 4k */
13271 amvdec_h265_profile.profile = "4k";
13272 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13273 amvdec_h265_profile.profile =
13274 "8k, 8bit, 10bit, dwrite, compressed";
13275 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13276 amvdec_h265_profile.profile =
13277 "4k, 8bit, 10bit, dwrite, compressed";
13278 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13279 amvdec_h265_profile.profile = "4k";
13280 }
13281#endif
13282 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13283 pr_info("amvdec_h265 default mmu enabled.\n");
13284 mmu_enable = 1;
13285 }
13286
13287 vcodec_profile_register(&amvdec_h265_profile);
13288 amvdec_h265_profile_single = amvdec_h265_profile;
13289 amvdec_h265_profile_single.name = "h265";
13290 vcodec_profile_register(&amvdec_h265_profile_single);
13291 amvdec_h265_profile_mult = amvdec_h265_profile;
13292 amvdec_h265_profile_mult.name = "mh265";
13293 vcodec_profile_register(&amvdec_h265_profile_mult);
13294 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13295 "h265", h265_configs, CONFIG_FOR_RW);
13296 return 0;
13297}
13298
13299static void __exit amvdec_h265_driver_remove_module(void)
13300{
13301 pr_debug("amvdec_h265 module remove.\n");
13302
13303#ifdef MULTI_INSTANCE_SUPPORT
13304 platform_driver_unregister(&ammvdec_h265_driver);
13305#endif
13306 platform_driver_unregister(&amvdec_h265_driver);
13307}
13308
13309/****************************************/
13310/*
13311 *module_param(stat, uint, 0664);
13312 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13313 */
13314module_param(use_cma, uint, 0664);
13315MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13316
13317module_param(bit_depth_luma, uint, 0664);
13318MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13319
13320module_param(bit_depth_chroma, uint, 0664);
13321MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13322
13323module_param(video_signal_type, uint, 0664);
13324MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13325
13326#ifdef ERROR_HANDLE_DEBUG
13327module_param(dbg_nal_skip_flag, uint, 0664);
13328MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13329
13330module_param(dbg_nal_skip_count, uint, 0664);
13331MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13332#endif
13333
13334module_param(radr, uint, 0664);
13335MODULE_PARM_DESC(radr, "\n radr\n");
13336
13337module_param(rval, uint, 0664);
13338MODULE_PARM_DESC(rval, "\n rval\n");
13339
13340module_param(dbg_cmd, uint, 0664);
13341MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13342
13343module_param(dump_nal, uint, 0664);
13344MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13345
13346module_param(dbg_skip_decode_index, uint, 0664);
13347MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13348
13349module_param(endian, uint, 0664);
13350MODULE_PARM_DESC(endian, "\n rval\n");
13351
13352module_param(step, uint, 0664);
13353MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13354
13355module_param(decode_pic_begin, uint, 0664);
13356MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13357
13358module_param(slice_parse_begin, uint, 0664);
13359MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13360
13361module_param(nal_skip_policy, uint, 0664);
13362MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13363
13364module_param(i_only_flag, uint, 0664);
13365MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13366
13367module_param(fast_output_enable, uint, 0664);
13368MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13369
13370module_param(error_handle_policy, uint, 0664);
13371MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13372
13373module_param(error_handle_threshold, uint, 0664);
13374MODULE_PARM_DESC(error_handle_threshold,
13375 "\n amvdec_h265 error_handle_threshold\n");
13376
13377module_param(error_handle_nal_skip_threshold, uint, 0664);
13378MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13379 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13380
13381module_param(error_handle_system_threshold, uint, 0664);
13382MODULE_PARM_DESC(error_handle_system_threshold,
13383 "\n amvdec_h265 error_handle_system_threshold\n");
13384
13385module_param(error_skip_nal_count, uint, 0664);
13386MODULE_PARM_DESC(error_skip_nal_count,
13387 "\n amvdec_h265 error_skip_nal_count\n");
13388
13389module_param(debug, uint, 0664);
13390MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13391
13392module_param(debug_mask, uint, 0664);
13393MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13394
13395module_param(log_mask, uint, 0664);
13396MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13397
13398module_param(buffer_mode, uint, 0664);
13399MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13400
13401module_param(double_write_mode, uint, 0664);
13402MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13403
13404module_param(buf_alloc_width, uint, 0664);
13405MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13406
13407module_param(buf_alloc_height, uint, 0664);
13408MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13409
13410module_param(dynamic_buf_num_margin, uint, 0664);
13411MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13412
13413module_param(max_buf_num, uint, 0664);
13414MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13415
13416module_param(buf_alloc_size, uint, 0664);
13417MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13418
13419#ifdef CONSTRAIN_MAX_BUF_NUM
13420module_param(run_ready_max_vf_only_num, uint, 0664);
13421MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13422
13423module_param(run_ready_display_q_num, uint, 0664);
13424MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13425
13426module_param(run_ready_max_buf_num, uint, 0664);
13427MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13428#endif
13429
13430#if 0
13431module_param(re_config_pic_flag, uint, 0664);
13432MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13433#endif
13434
13435module_param(buffer_mode_dbg, uint, 0664);
13436MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13437
13438module_param(mem_map_mode, uint, 0664);
13439MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13440
13441module_param(enable_mem_saving, uint, 0664);
13442MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13443
13444module_param(force_w_h, uint, 0664);
13445MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13446
13447module_param(force_fps, uint, 0664);
13448MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13449
13450module_param(max_decoding_time, uint, 0664);
13451MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13452
13453module_param(prefix_aux_buf_size, uint, 0664);
13454MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13455
13456module_param(suffix_aux_buf_size, uint, 0664);
13457MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13458
13459module_param(interlace_enable, uint, 0664);
13460MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13461module_param(pts_unstable, uint, 0664);
13462MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13463module_param(parser_sei_enable, uint, 0664);
13464MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13465
13466#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13467module_param(parser_dolby_vision_enable, uint, 0664);
13468MODULE_PARM_DESC(parser_dolby_vision_enable,
13469 "\n parser_dolby_vision_enable\n");
13470
13471module_param(dolby_meta_with_el, uint, 0664);
13472MODULE_PARM_DESC(dolby_meta_with_el,
13473 "\n dolby_meta_with_el\n");
13474
13475module_param(dolby_el_flush_th, uint, 0664);
13476MODULE_PARM_DESC(dolby_el_flush_th,
13477 "\n dolby_el_flush_th\n");
13478#endif
13479module_param(mmu_enable, uint, 0664);
13480MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13481
13482module_param(mmu_enable_force, uint, 0664);
13483MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13484
13485#ifdef MULTI_INSTANCE_SUPPORT
13486module_param(start_decode_buf_level, int, 0664);
13487MODULE_PARM_DESC(start_decode_buf_level,
13488 "\n h265 start_decode_buf_level\n");
13489
13490module_param(decode_timeout_val, uint, 0664);
13491MODULE_PARM_DESC(decode_timeout_val,
13492 "\n h265 decode_timeout_val\n");
13493
13494module_param(data_resend_policy, uint, 0664);
13495MODULE_PARM_DESC(data_resend_policy,
13496 "\n h265 data_resend_policy\n");
13497
13498module_param_array(decode_frame_count, uint,
13499 &max_decode_instance_num, 0664);
13500
13501module_param_array(display_frame_count, uint,
13502 &max_decode_instance_num, 0664);
13503
13504module_param_array(max_process_time, uint,
13505 &max_decode_instance_num, 0664);
13506
13507module_param_array(max_get_frame_interval,
13508 uint, &max_decode_instance_num, 0664);
13509
13510module_param_array(run_count, uint,
13511 &max_decode_instance_num, 0664);
13512
13513module_param_array(input_empty, uint,
13514 &max_decode_instance_num, 0664);
13515
13516module_param_array(not_run_ready, uint,
13517 &max_decode_instance_num, 0664);
13518
13519module_param_array(ref_frame_mark_flag, uint,
13520 &max_decode_instance_num, 0664);
13521
13522#endif
13523#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13524module_param(dv_toggle_prov_name, uint, 0664);
13525MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13526
13527module_param(dv_debug, uint, 0664);
13528MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13529
13530module_param(force_bypass_dvenl, uint, 0664);
13531MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13532#endif
13533
13534#ifdef AGAIN_HAS_THRESHOLD
13535module_param(again_threshold, uint, 0664);
13536MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13537#endif
13538
13539module_param(force_disp_pic_index, int, 0664);
13540MODULE_PARM_DESC(force_disp_pic_index,
13541 "\n amvdec_h265 force_disp_pic_index\n");
13542
13543module_param(frmbase_cont_bitlevel, uint, 0664);
13544MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13545
13546module_param(udebug_flag, uint, 0664);
13547MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13548
13549module_param(udebug_pause_pos, uint, 0664);
13550MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13551
13552module_param(udebug_pause_val, uint, 0664);
13553MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13554
13555module_param(pre_decode_buf_level, int, 0664);
13556MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13557
13558module_param(udebug_pause_decode_idx, uint, 0664);
13559MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13560
13561module_param(disp_vframe_valve_level, uint, 0664);
13562MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13563
13564module_param(pic_list_debug, uint, 0664);
13565MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13566
13567module_param(without_display_mode, uint, 0664);
13568MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13569
13570module_init(amvdec_h265_driver_init_module);
13571module_exit(amvdec_h265_driver_remove_module);
13572
13573MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13574MODULE_LICENSE("GPL");
13575MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13576