summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: fe582a99584f8ed9997330491aafc6585b4cc213
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52#define CONSTRAIN_MAX_BUF_NUM
53
54#define SWAP_HEVC_UCODE
55#define DETREFILL_ENABLE
56
57#define AGAIN_HAS_THRESHOLD
58/*#define TEST_NO_BUF*/
59#define HEVC_PIC_STRUCT_SUPPORT
60#define MULTI_INSTANCE_SUPPORT
61#define USE_UNINIT_SEMA
62
63 /* .buf_size = 0x100000*16,
64 //4k2k , 0x100000 per buffer */
65 /* 4096x2304 , 0x120000 per buffer */
66#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
67#define MPRED_4K_MV_BUF_SIZE (0x120000)
68#define MPRED_MV_BUF_SIZE (0x40000)
69
70#define MMU_COMPRESS_HEADER_SIZE 0x48000
71#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
72
73#define MAX_FRAME_4K_NUM 0x1200
74#define MAX_FRAME_8K_NUM (0x1200*4)
75
76//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
77#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
78
79#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
80
81#define HEVC_CM_HEADER_START_ADDR 0x3628
82#define HEVC_SAO_MMU_VH1_ADDR 0x363b
83#define HEVC_SAO_MMU_VH0_ADDR 0x363a
84
85#define HEVC_DBLK_CFGB 0x350b
86#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
87#define SWAP_HEVC_OFFSET (3 * 0x1000)
88
89#define MEM_NAME "codec_265"
90/* #include <mach/am_regs.h> */
91#include <linux/amlogic/media/utils/vdec_reg.h>
92
93#include "../utils/vdec.h"
94#include "../utils/amvdec.h"
95#include <linux/amlogic/media/video_sink/video.h>
96#include <linux/amlogic/media/codec_mm/configs.h>
97
98#define SEND_LMEM_WITH_RPM
99#define SUPPORT_10BIT
100/* #define ERROR_HANDLE_DEBUG */
101
102#ifndef STAT_KTHREAD
103#define STAT_KTHREAD 0x40
104#endif
105
106#ifdef MULTI_INSTANCE_SUPPORT
107#define MAX_DECODE_INSTANCE_NUM 9
108#define MULTI_DRIVER_NAME "ammvdec_h265"
109#endif
110#define DRIVER_NAME "amvdec_h265"
111#define MODULE_NAME "amvdec_h265"
112#define DRIVER_HEADER_NAME "amvdec_h265_header"
113
114#define PUT_INTERVAL (HZ/100)
115#define ERROR_SYSTEM_RESET_COUNT 200
116
117#define PTS_NORMAL 0
118#define PTS_NONE_REF_USE_DURATION 1
119
120#define PTS_MODE_SWITCHING_THRESHOLD 3
121#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
122
123#define DUR2PTS(x) ((x)*90/96)
124
125#define MAX_SIZE_8K (8192 * 4608)
126#define MAX_SIZE_4K (4096 * 2304)
127
128#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
129#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
130
131#define SEI_UserDataITU_T_T35 4
132#define INVALID_IDX -1 /* Invalid buffer index.*/
133
134static struct semaphore h265_sema;
135
136struct hevc_state_s;
137static int hevc_print(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int hevc_print_cont(struct hevc_state_s *hevc,
140 int debug_flag, const char *fmt, ...);
141static int vh265_vf_states(struct vframe_states *states, void *);
142static struct vframe_s *vh265_vf_peek(void *);
143static struct vframe_s *vh265_vf_get(void *);
144static void vh265_vf_put(struct vframe_s *, void *);
145static int vh265_event_cb(int type, void *data, void *private_data);
146
147static int vh265_stop(struct hevc_state_s *hevc);
148#ifdef MULTI_INSTANCE_SUPPORT
149static int vmh265_stop(struct hevc_state_s *hevc);
150static s32 vh265_init(struct vdec_s *vdec);
151static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
152static void reset_process_time(struct hevc_state_s *hevc);
153static void start_process_time(struct hevc_state_s *hevc);
154static void restart_process_time(struct hevc_state_s *hevc);
155static void timeout_process(struct hevc_state_s *hevc);
156#else
157static s32 vh265_init(struct hevc_state_s *hevc);
158#endif
159static void vh265_prot_init(struct hevc_state_s *hevc);
160static int vh265_local_init(struct hevc_state_s *hevc);
161static void vh265_check_timer_func(unsigned long arg);
162static void config_decode_mode(struct hevc_state_s *hevc);
163
164static const char vh265_dec_id[] = "vh265-dev";
165
166#define PROVIDER_NAME "decoder.h265"
167#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
168
169static const struct vframe_operations_s vh265_vf_provider = {
170 .peek = vh265_vf_peek,
171 .get = vh265_vf_get,
172 .put = vh265_vf_put,
173 .event_cb = vh265_event_cb,
174 .vf_states = vh265_vf_states,
175};
176
177static struct vframe_provider_s vh265_vf_prov;
178
179static u32 bit_depth_luma;
180static u32 bit_depth_chroma;
181static u32 video_signal_type;
182
183static int start_decode_buf_level = 0x8000;
184
185static unsigned int decode_timeout_val = 200;
186
187static u32 run_ready_min_buf_num = 2;
188
189/*data_resend_policy:
190 bit 0, stream base resend data when decoding buf empty
191*/
192static u32 data_resend_policy = 1;
193
194#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
195/*
196static const char * const video_format_names[] = {
197 "component", "PAL", "NTSC", "SECAM",
198 "MAC", "unspecified", "unspecified", "unspecified"
199};
200
201static const char * const color_primaries_names[] = {
202 "unknown", "bt709", "undef", "unknown",
203 "bt470m", "bt470bg", "smpte170m", "smpte240m",
204 "film", "bt2020"
205};
206
207static const char * const transfer_characteristics_names[] = {
208 "unknown", "bt709", "undef", "unknown",
209 "bt470m", "bt470bg", "smpte170m", "smpte240m",
210 "linear", "log100", "log316", "iec61966-2-4",
211 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
212 "smpte-st-2084", "smpte-st-428"
213};
214
215static const char * const matrix_coeffs_names[] = {
216 "GBR", "bt709", "undef", "unknown",
217 "fcc", "bt470bg", "smpte170m", "smpte240m",
218 "YCgCo", "bt2020nc", "bt2020c"
219};
220*/
221#ifdef SUPPORT_10BIT
222#define HEVC_CM_BODY_START_ADDR 0x3626
223#define HEVC_CM_BODY_LENGTH 0x3627
224#define HEVC_CM_HEADER_LENGTH 0x3629
225#define HEVC_CM_HEADER_OFFSET 0x362b
226#define HEVC_SAO_CTRL9 0x362d
227#define LOSLESS_COMPRESS_MODE
228/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
229/* double_write_mode:
230 * 0, no double write;
231 * 1, 1:1 ratio;
232 * 2, (1/4):(1/4) ratio;
233 * 3, (1/4):(1/4) ratio, with both compressed frame included
234 * 4, (1/2):(1/2) ratio;
235 * 0x10, double write only
236 * 0x100, if > 1080p,use mode 4,else use mode 1;
237 * 0x200, if > 1080p,use mode 2,else use mode 1;
238 * 0x300, if > 720p, use mode 4, else use mode 1;
239 */
240static u32 double_write_mode;
241
242/*#define DECOMP_HEADR_SURGENT*/
243
244static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
245static u32 enable_mem_saving = 1;
246static u32 workaround_enable;
247static u32 force_w_h;
248#endif
249static u32 force_fps;
250static u32 pts_unstable;
251#define H265_DEBUG_BUFMGR 0x01
252#define H265_DEBUG_BUFMGR_MORE 0x02
253#define H265_DEBUG_DETAIL 0x04
254#define H265_DEBUG_REG 0x08
255#define H265_DEBUG_MAN_SEARCH_NAL 0x10
256#define H265_DEBUG_MAN_SKIP_NAL 0x20
257#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
258#define H265_DEBUG_FORCE_CLK 0x80
259#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
260#define H265_DEBUG_NO_DISPLAY 0x200
261#define H265_DEBUG_DISCARD_NAL 0x400
262#define H265_DEBUG_OUT_PTS 0x800
263#define H265_DEBUG_DUMP_PIC_LIST 0x1000
264#define H265_DEBUG_PRINT_SEI 0x2000
265#define H265_DEBUG_PIC_STRUCT 0x4000
266#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
267#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
268#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
269#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
270#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
271#define H265_DEBUG_HW_RESET 0x100000
272#define H265_CFG_CANVAS_IN_DECODE 0x200000
273#define H265_DEBUG_DV 0x400000
274#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
275#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
276#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
277#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
278#ifdef MULTI_INSTANCE_SUPPORT
279#define PRINT_FLAG_ERROR 0x0
280#define IGNORE_PARAM_FROM_CONFIG 0x08000000
281#define PRINT_FRAMEBASE_DATA 0x10000000
282#define PRINT_FLAG_VDEC_STATUS 0x20000000
283#define PRINT_FLAG_VDEC_DETAIL 0x40000000
284#define PRINT_FLAG_V4L_DETAIL 0x80000000
285#endif
286
287#define BUF_POOL_SIZE 32
288#define MAX_BUF_NUM 24
289#define MAX_REF_PIC_NUM 24
290#define MAX_REF_ACTIVE 16
291
292#ifdef MV_USE_FIXED_BUF
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#else
297#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
298#define VF_BUFFER_IDX(n) (n)
299#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
300#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
301#endif
302
303#define HEVC_MV_INFO 0x310d
304#define HEVC_QP_INFO 0x3137
305#define HEVC_SKIP_INFO 0x3136
306
307const u32 h265_version = 201602101;
308static u32 debug_mask = 0xffffffff;
309static u32 log_mask;
310static u32 debug;
311static u32 radr;
312static u32 rval;
313static u32 dbg_cmd;
314static u32 dump_nal;
315static u32 dbg_skip_decode_index;
316static u32 endian = 0xff0;
317#ifdef ERROR_HANDLE_DEBUG
318static u32 dbg_nal_skip_flag;
319 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
320static u32 dbg_nal_skip_count;
321#endif
322/*for debug*/
323/*
324 udebug_flag:
325 bit 0, enable ucode print
326 bit 1, enable ucode detail print
327 bit [31:16] not 0, pos to dump lmem
328 bit 2, pop bits to lmem
329 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
330*/
331static u32 udebug_flag;
332/*
333 when udebug_flag[1:0] is not 0
334 udebug_pause_pos not 0,
335 pause position
336*/
337static u32 udebug_pause_pos;
338/*
339 when udebug_flag[1:0] is not 0
340 and udebug_pause_pos is not 0,
341 pause only when DEBUG_REG2 is equal to this val
342*/
343static u32 udebug_pause_val;
344
345static u32 udebug_pause_decode_idx;
346
347static u32 decode_pic_begin;
348static uint slice_parse_begin;
349static u32 step;
350static bool is_reset;
351
352#ifdef CONSTRAIN_MAX_BUF_NUM
353static u32 run_ready_max_vf_only_num;
354static u32 run_ready_display_q_num;
355 /*0: not check
356 0xff: work_pic_num
357 */
358static u32 run_ready_max_buf_num = 0xff;
359#endif
360
361static u32 dynamic_buf_num_margin = 7;
362static u32 buf_alloc_width;
363static u32 buf_alloc_height;
364
365static u32 max_buf_num = 16;
366static u32 buf_alloc_size;
367/*static u32 re_config_pic_flag;*/
368/*
369 *bit[0]: 0,
370 *bit[1]: 0, always release cma buffer when stop
371 *bit[1]: 1, never release cma buffer when stop
372 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
373 *do not release cma buffer is blackout is not 1
374 *
375 *bit[2]: 0, when start decoding, check current displayed buffer
376 * (only for buffer decoded by h265) if blackout is 0
377 * 1, do not check current displayed buffer
378 *
379 *bit[3]: 1, if blackout is not 1, do not release current
380 * displayed cma buffer always.
381 */
382/* set to 1 for fast play;
383 * set to 8 for other case of "keep last frame"
384 */
385static u32 buffer_mode = 1;
386
387/* buffer_mode_dbg: debug only*/
388static u32 buffer_mode_dbg = 0xffff0000;
389/**/
390/*
391 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
392 *1, start decoding after first I;
393 *2, only decode and display none error picture;
394 *3, start decoding and display after IDR,etc
395 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
396 *only for mode 0 and 1.
397 */
398static u32 nal_skip_policy = 2;
399
400/*
401 *bit 0, 1: only display I picture;
402 *bit 1, 1: only decode I picture;
403 */
404static u32 i_only_flag;
405
406/*
407bit 0, fast output first I picture
408*/
409static u32 fast_output_enable = 1;
410
411static u32 frmbase_cont_bitlevel = 0x60;
412
413/*
414use_cma: 1, use both reserver memory and cma for buffers
4152, only use cma for buffers
416*/
417static u32 use_cma = 2;
418
419#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
420static u32 prefix_aux_buf_size = (16 * 1024);
421static u32 suffix_aux_buf_size;
422
423static u32 max_decoding_time;
424/*
425 *error handling
426 */
427/*error_handle_policy:
428 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
429 *1, skip error_skip_nal_count nals before error recovery;
430 *bit 1 (valid only when bit0 == 1):
431 *1, wait vps/sps/pps after error recovery;
432 *bit 2 (valid only when bit0 == 0):
433 *0, auto search after error recovery (hevc_recover() called);
434 *1, manual search after error recovery
435 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
436 *
437 *bit 4: 0, set error_mark after reset/recover
438 * 1, do not set error_mark after reset/recover
439 *bit 5: 0, check total lcu for every picture
440 * 1, do not check total lcu
441 *bit 6: 0, do not check head error
442 * 1, check head error
443 *
444 */
445
446static u32 error_handle_policy;
447static u32 error_skip_nal_count = 6;
448static u32 error_handle_threshold = 30;
449static u32 error_handle_nal_skip_threshold = 10;
450static u32 error_handle_system_threshold = 30;
451static u32 interlace_enable = 1;
452static u32 fr_hint_status;
453
454 /*
455 *parser_sei_enable:
456 * bit 0, sei;
457 * bit 1, sei_suffix (fill aux buf)
458 * bit 2, fill sei to aux buf (when bit 0 is 1)
459 * bit 8, debug flag
460 */
461static u32 parser_sei_enable;
462#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
463static u32 parser_dolby_vision_enable = 1;
464static u32 dolby_meta_with_el;
465static u32 dolby_el_flush_th = 2;
466#endif
467/* this is only for h265 mmu enable */
468
469static u32 mmu_enable = 1;
470static u32 mmu_enable_force;
471static u32 work_buf_size;
472static unsigned int force_disp_pic_index;
473static unsigned int disp_vframe_valve_level;
474static int pre_decode_buf_level = 0x1000;
475static unsigned int pic_list_debug;
476
477
478#ifdef MULTI_INSTANCE_SUPPORT
479static unsigned int max_decode_instance_num
480 = MAX_DECODE_INSTANCE_NUM;
481static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
484static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
485static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
486static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
487static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
488static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
489{1, 1, 1, 1, 1, 1, 1, 1, 1};
490
491#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
492static unsigned char get_idx(struct hevc_state_s *hevc);
493#endif
494
495#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
496static u32 dv_toggle_prov_name;
497
498static u32 dv_debug;
499
500static u32 force_bypass_dvenl;
501#endif
502#endif
503
504
505#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
506#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
507#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
508#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
509#else
510#define get_dbg_flag(hevc) debug
511#define get_dbg_flag2(hevc) debug
512#define is_log_enable(hevc) (log_mask ? 1 : 0)
513#define get_valid_double_write_mode(hevc) double_write_mode
514#define get_buf_alloc_width(hevc) buf_alloc_width
515#define get_buf_alloc_height(hevc) buf_alloc_height
516#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
517#endif
518#define get_buffer_mode(hevc) buffer_mode
519
520
521DEFINE_SPINLOCK(lock);
522struct task_struct *h265_task = NULL;
523#undef DEBUG_REG
524#ifdef DEBUG_REG
525void WRITE_VREG_DBG(unsigned adr, unsigned val)
526{
527 if (debug & H265_DEBUG_REG)
528 pr_info("%s(%x, %x)\n", __func__, adr, val);
529 WRITE_VREG(adr, val);
530}
531
532#undef WRITE_VREG
533#define WRITE_VREG WRITE_VREG_DBG
534#endif
535
536static DEFINE_MUTEX(vh265_mutex);
537
538static DEFINE_MUTEX(vh265_log_mutex);
539
540static struct vdec_info *gvs;
541
542static u32 without_display_mode;
543
544/**************************************************
545 *
546 *h265 buffer management include
547 *
548 ***************************************************
549 */
550enum NalUnitType {
551 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
552 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
553
554 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
555 /* Current name in the spec: TSA_R */
556 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
557
558 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
559 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
560
561 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
562 /* Current name in the spec: RADL_R */
563 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
564
565 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
566 /* Current name in the spec: RASL_R */
567 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
568
569 NAL_UNIT_RESERVED_10,
570 NAL_UNIT_RESERVED_11,
571 NAL_UNIT_RESERVED_12,
572 NAL_UNIT_RESERVED_13,
573 NAL_UNIT_RESERVED_14,
574 NAL_UNIT_RESERVED_15,
575
576 /* Current name in the spec: BLA_W_LP */
577 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
578 /* Current name in the spec: BLA_W_DLP */
579 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
580 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
581 /* Current name in the spec: IDR_W_DLP */
582 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
583 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
584 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
585 NAL_UNIT_RESERVED_22,
586 NAL_UNIT_RESERVED_23,
587
588 NAL_UNIT_RESERVED_24,
589 NAL_UNIT_RESERVED_25,
590 NAL_UNIT_RESERVED_26,
591 NAL_UNIT_RESERVED_27,
592 NAL_UNIT_RESERVED_28,
593 NAL_UNIT_RESERVED_29,
594 NAL_UNIT_RESERVED_30,
595 NAL_UNIT_RESERVED_31,
596
597 NAL_UNIT_VPS, /* 32 */
598 NAL_UNIT_SPS, /* 33 */
599 NAL_UNIT_PPS, /* 34 */
600 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
601 NAL_UNIT_EOS, /* 36 */
602 NAL_UNIT_EOB, /* 37 */
603 NAL_UNIT_FILLER_DATA, /* 38 */
604 NAL_UNIT_SEI, /* 39 Prefix SEI */
605 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
606 NAL_UNIT_RESERVED_41,
607 NAL_UNIT_RESERVED_42,
608 NAL_UNIT_RESERVED_43,
609 NAL_UNIT_RESERVED_44,
610 NAL_UNIT_RESERVED_45,
611 NAL_UNIT_RESERVED_46,
612 NAL_UNIT_RESERVED_47,
613 NAL_UNIT_UNSPECIFIED_48,
614 NAL_UNIT_UNSPECIFIED_49,
615 NAL_UNIT_UNSPECIFIED_50,
616 NAL_UNIT_UNSPECIFIED_51,
617 NAL_UNIT_UNSPECIFIED_52,
618 NAL_UNIT_UNSPECIFIED_53,
619 NAL_UNIT_UNSPECIFIED_54,
620 NAL_UNIT_UNSPECIFIED_55,
621 NAL_UNIT_UNSPECIFIED_56,
622 NAL_UNIT_UNSPECIFIED_57,
623 NAL_UNIT_UNSPECIFIED_58,
624 NAL_UNIT_UNSPECIFIED_59,
625 NAL_UNIT_UNSPECIFIED_60,
626 NAL_UNIT_UNSPECIFIED_61,
627 NAL_UNIT_UNSPECIFIED_62,
628 NAL_UNIT_UNSPECIFIED_63,
629 NAL_UNIT_INVALID,
630};
631
632/* --------------------------------------------------- */
633/* Amrisc Software Interrupt */
634/* --------------------------------------------------- */
635#define AMRISC_STREAM_EMPTY_REQ 0x01
636#define AMRISC_PARSER_REQ 0x02
637#define AMRISC_MAIN_REQ 0x04
638
639/* --------------------------------------------------- */
640/* HEVC_DEC_STATUS define */
641/* --------------------------------------------------- */
642#define HEVC_DEC_IDLE 0x0
643#define HEVC_NAL_UNIT_VPS 0x1
644#define HEVC_NAL_UNIT_SPS 0x2
645#define HEVC_NAL_UNIT_PPS 0x3
646#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
647#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
648#define HEVC_SLICE_DECODING 0x6
649#define HEVC_NAL_UNIT_SEI 0x7
650#define HEVC_SLICE_SEGMENT_DONE 0x8
651#define HEVC_NAL_SEARCH_DONE 0x9
652#define HEVC_DECPIC_DATA_DONE 0xa
653#define HEVC_DECPIC_DATA_ERROR 0xb
654#define HEVC_SEI_DAT 0xc
655#define HEVC_SEI_DAT_DONE 0xd
656#define HEVC_NAL_DECODE_DONE 0xe
657#define HEVC_OVER_DECODE 0xf
658
659#define HEVC_DATA_REQUEST 0x12
660
661#define HEVC_DECODE_BUFEMPTY 0x20
662#define HEVC_DECODE_TIMEOUT 0x21
663#define HEVC_SEARCH_BUFEMPTY 0x22
664#define HEVC_DECODE_OVER_SIZE 0x23
665#define HEVC_DECODE_BUFEMPTY2 0x24
666#define HEVC_FIND_NEXT_PIC_NAL 0x50
667#define HEVC_FIND_NEXT_DVEL_NAL 0x51
668
669#define HEVC_DUMP_LMEM 0x30
670
671#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
672#define HEVC_DISCARD_NAL 0xf0
673#define HEVC_ACTION_DEC_CONT 0xfd
674#define HEVC_ACTION_ERROR 0xfe
675#define HEVC_ACTION_DONE 0xff
676
677/* --------------------------------------------------- */
678/* Include "parser_cmd.h" */
679/* --------------------------------------------------- */
680#define PARSER_CMD_SKIP_CFG_0 0x0000090b
681
682#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
683
684#define PARSER_CMD_SKIP_CFG_2 0x001b1910
685
686#define PARSER_CMD_NUMBER 37
687
688/**************************************************
689 *
690 *h265 buffer management
691 *
692 ***************************************************
693 */
694/* #define BUFFER_MGR_ONLY */
695/* #define CONFIG_HEVC_CLK_FORCED_ON */
696/* #define ENABLE_SWAP_TEST */
697#define MCRCC_ENABLE
698#define INVALID_POC 0x80000000
699
700#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
701#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
702#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
703#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
704#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
705#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
706#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
707#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
708#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
709#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
710#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
711#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
712#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
713#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
714#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
715#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
716#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
717#ifdef ENABLE_SWAP_TEST
718#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
719#endif
720
721/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
722/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
723#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
724 /*do not define ENABLE_SWAP_TEST*/
725#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
726#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
727
728#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
729#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
730/*
731 *ucode parser/search control
732 *bit 0: 0, header auto parse; 1, header manual parse
733 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
734 *bit [3:2]: valid when bit1==0;
735 *0, auto skip nal before first vps/sps/pps/idr;
736 *1, auto skip nal before first vps/sps/pps
737 *2, auto skip nal before first vps/sps/pps,
738 * and not decode until the first I slice (with slice address of 0)
739 *
740 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
741 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
742 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
743 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
744 *bit [17]: for NAL_SEI when bit0 is 0:
745 * 0, do not parse/fetch SEI in ucode;
746 * 1, parse/fetch SEI in ucode
747 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
748 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
749 * 1, fetch NAL_SEL_SUFFIX data to aux buf
750 *bit [19]:
751 * 0, parse NAL_SEI in ucode
752 * 1, fetch NAL_SEI to aux buf
753 *bit [20]: for DOLBY_VISION_META
754 * 0, do not fetch DOLBY_VISION_META to aux buf
755 * 1, fetch DOLBY_VISION_META to aux buf
756 */
757#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
758 /*read only*/
759#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
760 /*
761 [15 : 8] rps_set_id
762 [7 : 0] start_decoding_flag
763 */
764#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
765 /*set before start decoder*/
766#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
767#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
768#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
769
770#define DECODE_MODE_SINGLE 0x0
771#define DECODE_MODE_MULTI_FRAMEBASE 0x1
772#define DECODE_MODE_MULTI_STREAMBASE 0x2
773#define DECODE_MODE_MULTI_DVBAL 0x3
774#define DECODE_MODE_MULTI_DVENL 0x4
775
776#define MAX_INT 0x7FFFFFFF
777
778#define RPM_BEGIN 0x100
779#define modification_list_cur 0x148
780#define RPM_END 0x180
781
782#define RPS_USED_BIT 14
783/* MISC_FLAG0 */
784#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
785#define PCM_ENABLE_FLAG_BIT 1
786#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
787#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
788#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
789#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
790#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
791#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
792#define SLICE_SAO_LUMA_FLAG_BIT 8
793#define SLICE_SAO_CHROMA_FLAG_BIT 9
794#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
795
796union param_u {
797 struct {
798 unsigned short data[RPM_END - RPM_BEGIN];
799 } l;
800 struct {
801 /* from ucode lmem, do not change this struct */
802 unsigned short CUR_RPS[0x10];
803 unsigned short num_ref_idx_l0_active;
804 unsigned short num_ref_idx_l1_active;
805 unsigned short slice_type;
806 unsigned short slice_temporal_mvp_enable_flag;
807 unsigned short dependent_slice_segment_flag;
808 unsigned short slice_segment_address;
809 unsigned short num_title_rows_minus1;
810 unsigned short pic_width_in_luma_samples;
811 unsigned short pic_height_in_luma_samples;
812 unsigned short log2_min_coding_block_size_minus3;
813 unsigned short log2_diff_max_min_coding_block_size;
814 unsigned short log2_max_pic_order_cnt_lsb_minus4;
815 unsigned short POClsb;
816 unsigned short collocated_from_l0_flag;
817 unsigned short collocated_ref_idx;
818 unsigned short log2_parallel_merge_level;
819 unsigned short five_minus_max_num_merge_cand;
820 unsigned short sps_num_reorder_pics_0;
821 unsigned short modification_flag;
822 unsigned short tiles_enabled_flag;
823 unsigned short num_tile_columns_minus1;
824 unsigned short num_tile_rows_minus1;
825 unsigned short tile_width[8];
826 unsigned short tile_height[8];
827 unsigned short misc_flag0;
828 unsigned short pps_beta_offset_div2;
829 unsigned short pps_tc_offset_div2;
830 unsigned short slice_beta_offset_div2;
831 unsigned short slice_tc_offset_div2;
832 unsigned short pps_cb_qp_offset;
833 unsigned short pps_cr_qp_offset;
834 unsigned short first_slice_segment_in_pic_flag;
835 unsigned short m_temporalId;
836 unsigned short m_nalUnitType;
837
838 unsigned short vui_num_units_in_tick_hi;
839 unsigned short vui_num_units_in_tick_lo;
840 unsigned short vui_time_scale_hi;
841 unsigned short vui_time_scale_lo;
842 unsigned short bit_depth;
843 unsigned short profile_etc;
844 unsigned short sei_frame_field_info;
845 unsigned short video_signal_type;
846 unsigned short modification_list[0x20];
847 unsigned short conformance_window_flag;
848 unsigned short conf_win_left_offset;
849 unsigned short conf_win_right_offset;
850 unsigned short conf_win_top_offset;
851 unsigned short conf_win_bottom_offset;
852 unsigned short chroma_format_idc;
853 unsigned short color_description;
854 unsigned short aspect_ratio_idc;
855 unsigned short sar_width;
856 unsigned short sar_height;
857 unsigned short sps_max_dec_pic_buffering_minus1_0;
858 } p;
859};
860
861#define RPM_BUF_SIZE (0x80*2)
862/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
863#define LMEM_BUF_SIZE (0x500 * 2)
864
865struct buff_s {
866 u32 buf_start;
867 u32 buf_size;
868 u32 buf_end;
869};
870
871struct BuffInfo_s {
872 u32 max_width;
873 u32 max_height;
874 unsigned int start_adr;
875 unsigned int end_adr;
876 struct buff_s ipp;
877 struct buff_s sao_abv;
878 struct buff_s sao_vb;
879 struct buff_s short_term_rps;
880 struct buff_s vps;
881 struct buff_s sps;
882 struct buff_s pps;
883 struct buff_s sao_up;
884 struct buff_s swap_buf;
885 struct buff_s swap_buf2;
886 struct buff_s scalelut;
887 struct buff_s dblk_para;
888 struct buff_s dblk_data;
889 struct buff_s dblk_data2;
890 struct buff_s mmu_vbh;
891 struct buff_s cm_header;
892 struct buff_s mpred_above;
893#ifdef MV_USE_FIXED_BUF
894 struct buff_s mpred_mv;
895#endif
896 struct buff_s rpm;
897 struct buff_s lmem;
898};
899#define WORK_BUF_SPEC_NUM 3
900static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
901 {
902 /* 8M bytes */
903 .max_width = 1920,
904 .max_height = 1088,
905 .ipp = {
906 /* IPP work space calculation :
907 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
908 */
909 .buf_size = 0x4000,
910 },
911 .sao_abv = {
912 .buf_size = 0x30000,
913 },
914 .sao_vb = {
915 .buf_size = 0x30000,
916 },
917 .short_term_rps = {
918 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
919 * total 64x16x2 = 2048 bytes (0x800)
920 */
921 .buf_size = 0x800,
922 },
923 .vps = {
924 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
925 * total 0x0800 bytes
926 */
927 .buf_size = 0x800,
928 },
929 .sps = {
930 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
931 * total 0x0800 bytes
932 */
933 .buf_size = 0x800,
934 },
935 .pps = {
936 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
937 * total 0x2000 bytes
938 */
939 .buf_size = 0x2000,
940 },
941 .sao_up = {
942 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
943 * each has 16 bytes total 0x2800 bytes
944 */
945 .buf_size = 0x2800,
946 },
947 .swap_buf = {
948 /* 256cyclex64bit = 2K bytes 0x800
949 * (only 144 cycles valid)
950 */
951 .buf_size = 0x800,
952 },
953 .swap_buf2 = {
954 .buf_size = 0x800,
955 },
956 .scalelut = {
957 /* support up to 32 SCALELUT 1024x32 =
958 * 32Kbytes (0x8000)
959 */
960 .buf_size = 0x8000,
961 },
962 .dblk_para = {
963#ifdef SUPPORT_10BIT
964 .buf_size = 0x40000,
965#else
966 /* DBLK -> Max 256(4096/16) LCU, each para
967 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
968 */
969 .buf_size = 0x20000,
970#endif
971 },
972 .dblk_data = {
973 .buf_size = 0x40000,
974 },
975 .dblk_data2 = {
976 .buf_size = 0x40000,
977 }, /*dblk data for adapter*/
978 .mmu_vbh = {
979 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
980 },
981#if 0
982 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
983 .buf_size = MMU_COMPRESS_HEADER_SIZE *
984 (MAX_REF_PIC_NUM + 1),
985 },
986#endif
987 .mpred_above = {
988 .buf_size = 0x8000,
989 },
990#ifdef MV_USE_FIXED_BUF
991 .mpred_mv = {/* 1080p, 0x40000 per buffer */
992 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
993 },
994#endif
995 .rpm = {
996 .buf_size = RPM_BUF_SIZE,
997 },
998 .lmem = {
999 .buf_size = 0x500 * 2,
1000 }
1001 },
1002 {
1003 .max_width = 4096,
1004 .max_height = 2048,
1005 .ipp = {
1006 /* IPP work space calculation :
1007 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1008 */
1009 .buf_size = 0x4000,
1010 },
1011 .sao_abv = {
1012 .buf_size = 0x30000,
1013 },
1014 .sao_vb = {
1015 .buf_size = 0x30000,
1016 },
1017 .short_term_rps = {
1018 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1019 * total 64x16x2 = 2048 bytes (0x800)
1020 */
1021 .buf_size = 0x800,
1022 },
1023 .vps = {
1024 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1025 * total 0x0800 bytes
1026 */
1027 .buf_size = 0x800,
1028 },
1029 .sps = {
1030 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1031 * total 0x0800 bytes
1032 */
1033 .buf_size = 0x800,
1034 },
1035 .pps = {
1036 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1037 * total 0x2000 bytes
1038 */
1039 .buf_size = 0x2000,
1040 },
1041 .sao_up = {
1042 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1043 * each has 16 bytes total 0x2800 bytes
1044 */
1045 .buf_size = 0x2800,
1046 },
1047 .swap_buf = {
1048 /* 256cyclex64bit = 2K bytes 0x800
1049 * (only 144 cycles valid)
1050 */
1051 .buf_size = 0x800,
1052 },
1053 .swap_buf2 = {
1054 .buf_size = 0x800,
1055 },
1056 .scalelut = {
1057 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1058 * (0x8000)
1059 */
1060 .buf_size = 0x8000,
1061 },
1062 .dblk_para = {
1063 /* DBLK -> Max 256(4096/16) LCU, each para
1064 * 512bytes(total:0x20000),
1065 * data 1024bytes(total:0x40000)
1066 */
1067 .buf_size = 0x20000,
1068 },
1069 .dblk_data = {
1070 .buf_size = 0x80000,
1071 },
1072 .dblk_data2 = {
1073 .buf_size = 0x80000,
1074 }, /*dblk data for adapter*/
1075 .mmu_vbh = {
1076 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1077 },
1078#if 0
1079 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1080 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1081 (MAX_REF_PIC_NUM + 1),
1082 },
1083#endif
1084 .mpred_above = {
1085 .buf_size = 0x8000,
1086 },
1087#ifdef MV_USE_FIXED_BUF
1088 .mpred_mv = {
1089 /* .buf_size = 0x100000*16,
1090 //4k2k , 0x100000 per buffer */
1091 /* 4096x2304 , 0x120000 per buffer */
1092 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1093 },
1094#endif
1095 .rpm = {
1096 .buf_size = RPM_BUF_SIZE,
1097 },
1098 .lmem = {
1099 .buf_size = 0x500 * 2,
1100 }
1101 },
1102
1103 {
1104 .max_width = 4096*2,
1105 .max_height = 2048*2,
1106 .ipp = {
1107 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1108 .buf_size = 0x4000*2,
1109 },
1110 .sao_abv = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .sao_vb = {
1114 .buf_size = 0x30000*2,
1115 },
1116 .short_term_rps = {
1117 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1118 .buf_size = 0x800,
1119 },
1120 .vps = {
1121 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .sps = {
1125 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1126 .buf_size = 0x800,
1127 },
1128 .pps = {
1129 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1130 .buf_size = 0x2000,
1131 },
1132 .sao_up = {
1133 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1134 .buf_size = 0x2800*2,
1135 },
1136 .swap_buf = {
1137 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1138 .buf_size = 0x800,
1139 },
1140 .swap_buf2 = {
1141 .buf_size = 0x800,
1142 },
1143 .scalelut = {
1144 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1145 .buf_size = 0x8000*2,
1146 },
1147 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1148 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1149 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1150 .mmu_vbh = {
1151 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1152 },
1153#if 0
1154 .cm_header = {
1155 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1156 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1157 },
1158#endif
1159 .mpred_above = {
1160 .buf_size = 0x8000*2,
1161 },
1162#ifdef MV_USE_FIXED_BUF
1163 .mpred_mv = {
1164 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1165 },
1166#endif
1167 .rpm = {
1168 .buf_size = RPM_BUF_SIZE,
1169 },
1170 .lmem = {
1171 .buf_size = 0x500 * 2,
1172 },
1173 }
1174};
1175
1176static void init_buff_spec(struct hevc_state_s *hevc,
1177 struct BuffInfo_s *buf_spec)
1178{
1179 buf_spec->ipp.buf_start = buf_spec->start_adr;
1180 buf_spec->sao_abv.buf_start =
1181 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1182
1183 buf_spec->sao_vb.buf_start =
1184 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1185 buf_spec->short_term_rps.buf_start =
1186 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1187 buf_spec->vps.buf_start =
1188 buf_spec->short_term_rps.buf_start +
1189 buf_spec->short_term_rps.buf_size;
1190 buf_spec->sps.buf_start =
1191 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1192 buf_spec->pps.buf_start =
1193 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1194 buf_spec->sao_up.buf_start =
1195 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1196 buf_spec->swap_buf.buf_start =
1197 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1198 buf_spec->swap_buf2.buf_start =
1199 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1200 buf_spec->scalelut.buf_start =
1201 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1202 buf_spec->dblk_para.buf_start =
1203 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1204 buf_spec->dblk_data.buf_start =
1205 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1206 buf_spec->dblk_data2.buf_start =
1207 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1208 buf_spec->mmu_vbh.buf_start =
1209 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1210 buf_spec->mpred_above.buf_start =
1211 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1212#ifdef MV_USE_FIXED_BUF
1213 buf_spec->mpred_mv.buf_start =
1214 buf_spec->mpred_above.buf_start +
1215 buf_spec->mpred_above.buf_size;
1216
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_mv.buf_start +
1219 buf_spec->mpred_mv.buf_size;
1220#else
1221 buf_spec->rpm.buf_start =
1222 buf_spec->mpred_above.buf_start +
1223 buf_spec->mpred_above.buf_size;
1224#endif
1225 buf_spec->lmem.buf_start =
1226 buf_spec->rpm.buf_start +
1227 buf_spec->rpm.buf_size;
1228 buf_spec->end_adr =
1229 buf_spec->lmem.buf_start +
1230 buf_spec->lmem.buf_size;
1231
1232 if (hevc && get_dbg_flag2(hevc)) {
1233 hevc_print(hevc, 0,
1234 "%s workspace (%x %x) size = %x\n", __func__,
1235 buf_spec->start_adr, buf_spec->end_adr,
1236 buf_spec->end_adr - buf_spec->start_adr);
1237
1238 hevc_print(hevc, 0,
1239 "ipp.buf_start :%x\n",
1240 buf_spec->ipp.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_abv.buf_start :%x\n",
1243 buf_spec->sao_abv.buf_start);
1244 hevc_print(hevc, 0,
1245 "sao_vb.buf_start :%x\n",
1246 buf_spec->sao_vb.buf_start);
1247 hevc_print(hevc, 0,
1248 "short_term_rps.buf_start :%x\n",
1249 buf_spec->short_term_rps.buf_start);
1250 hevc_print(hevc, 0,
1251 "vps.buf_start :%x\n",
1252 buf_spec->vps.buf_start);
1253 hevc_print(hevc, 0,
1254 "sps.buf_start :%x\n",
1255 buf_spec->sps.buf_start);
1256 hevc_print(hevc, 0,
1257 "pps.buf_start :%x\n",
1258 buf_spec->pps.buf_start);
1259 hevc_print(hevc, 0,
1260 "sao_up.buf_start :%x\n",
1261 buf_spec->sao_up.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf.buf_start :%x\n",
1264 buf_spec->swap_buf.buf_start);
1265 hevc_print(hevc, 0,
1266 "swap_buf2.buf_start :%x\n",
1267 buf_spec->swap_buf2.buf_start);
1268 hevc_print(hevc, 0,
1269 "scalelut.buf_start :%x\n",
1270 buf_spec->scalelut.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_para.buf_start :%x\n",
1273 buf_spec->dblk_para.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data.buf_start :%x\n",
1276 buf_spec->dblk_data.buf_start);
1277 hevc_print(hevc, 0,
1278 "dblk_data2.buf_start :%x\n",
1279 buf_spec->dblk_data2.buf_start);
1280 hevc_print(hevc, 0,
1281 "mpred_above.buf_start :%x\n",
1282 buf_spec->mpred_above.buf_start);
1283#ifdef MV_USE_FIXED_BUF
1284 hevc_print(hevc, 0,
1285 "mpred_mv.buf_start :%x\n",
1286 buf_spec->mpred_mv.buf_start);
1287#endif
1288 if ((get_dbg_flag2(hevc)
1289 &
1290 H265_DEBUG_SEND_PARAM_WITH_REG)
1291 == 0) {
1292 hevc_print(hevc, 0,
1293 "rpm.buf_start :%x\n",
1294 buf_spec->rpm.buf_start);
1295 }
1296 }
1297
1298}
1299
1300enum SliceType {
1301 B_SLICE,
1302 P_SLICE,
1303 I_SLICE
1304};
1305
1306/*USE_BUF_BLOCK*/
1307struct BUF_s {
1308 ulong start_adr;
1309 u32 size;
1310 u32 luma_size;
1311 ulong header_addr;
1312 u32 header_size;
1313 int used_flag;
1314 ulong v4l_ref_buf_addr;
1315} /*BUF_t */;
1316
1317/* level 6, 6.1 maximum slice number is 800; other is 200 */
1318#define MAX_SLICE_NUM 800
1319struct PIC_s {
1320 int index;
1321 int scatter_alloc;
1322 int BUF_index;
1323 int mv_buf_index;
1324 int POC;
1325 int decode_idx;
1326 int slice_type;
1327 int RefNum_L0;
1328 int RefNum_L1;
1329 int num_reorder_pic;
1330 int stream_offset;
1331 unsigned char referenced;
1332 unsigned char output_mark;
1333 unsigned char recon_mark;
1334 unsigned char output_ready;
1335 unsigned char error_mark;
1336 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1337 unsigned char dis_mark;
1338 /**/ int slice_idx;
1339 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1340 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1341 /*buffer */
1342 unsigned int header_adr;
1343#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1344 unsigned char dv_enhance_exist;
1345#endif
1346 char *aux_data_buf;
1347 int aux_data_size;
1348 unsigned long cma_alloc_addr;
1349 struct page *alloc_pages;
1350 unsigned int mpred_mv_wr_start_addr;
1351 unsigned int mc_y_adr;
1352 unsigned int mc_u_v_adr;
1353#ifdef SUPPORT_10BIT
1354 /*unsigned int comp_body_size;*/
1355 unsigned int dw_y_adr;
1356 unsigned int dw_u_v_adr;
1357#endif
1358 int mc_canvas_y;
1359 int mc_canvas_u_v;
1360 int width;
1361 int height;
1362
1363 int y_canvas_index;
1364 int uv_canvas_index;
1365#ifdef MULTI_INSTANCE_SUPPORT
1366 struct canvas_config_s canvas_config[2];
1367#endif
1368#ifdef SUPPORT_10BIT
1369 int mem_saving_mode;
1370 u32 bit_depth_luma;
1371 u32 bit_depth_chroma;
1372#endif
1373#ifdef LOSLESS_COMPRESS_MODE
1374 unsigned int losless_comp_body_size;
1375#endif
1376 unsigned char pic_struct;
1377 int vf_ref;
1378
1379 u32 pts;
1380 u64 pts64;
1381 u64 timestamp;
1382
1383 u32 aspect_ratio_idc;
1384 u32 sar_width;
1385 u32 sar_height;
1386 u32 double_write_mode;
1387 u32 video_signal_type;
1388 unsigned short conformance_window_flag;
1389 unsigned short conf_win_left_offset;
1390 unsigned short conf_win_right_offset;
1391 unsigned short conf_win_top_offset;
1392 unsigned short conf_win_bottom_offset;
1393 unsigned short chroma_format_idc;
1394
1395 /* picture qos infomation*/
1396 int max_qp;
1397 int avg_qp;
1398 int min_qp;
1399 int max_skip;
1400 int avg_skip;
1401 int min_skip;
1402 int max_mv;
1403 int min_mv;
1404 int avg_mv;
1405
1406 bool vframe_bound;
1407} /*PIC_t */;
1408
1409#define MAX_TILE_COL_NUM 10
1410#define MAX_TILE_ROW_NUM 20
1411struct tile_s {
1412 int width;
1413 int height;
1414 int start_cu_x;
1415 int start_cu_y;
1416
1417 unsigned int sao_vb_start_addr;
1418 unsigned int sao_abv_start_addr;
1419};
1420
1421#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1422#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1423#define SEI_HDR10PLUS_MASK 0x00000004
1424
1425#define VF_POOL_SIZE 32
1426
1427#ifdef MULTI_INSTANCE_SUPPORT
1428#define DEC_RESULT_NONE 0
1429#define DEC_RESULT_DONE 1
1430#define DEC_RESULT_AGAIN 2
1431#define DEC_RESULT_CONFIG_PARAM 3
1432#define DEC_RESULT_ERROR 4
1433#define DEC_INIT_PICLIST 5
1434#define DEC_UNINIT_PICLIST 6
1435#define DEC_RESULT_GET_DATA 7
1436#define DEC_RESULT_GET_DATA_RETRY 8
1437#define DEC_RESULT_EOS 9
1438#define DEC_RESULT_FORCE_EXIT 10
1439#define DEC_RESULT_FREE_CANVAS 11
1440
1441static void vh265_work(struct work_struct *work);
1442static void vh265_timeout_work(struct work_struct *work);
1443static void vh265_notify_work(struct work_struct *work);
1444
1445#endif
1446
1447struct debug_log_s {
1448 struct list_head list;
1449 uint8_t data; /*will alloc more size*/
1450};
1451
1452struct hevc_state_s {
1453#ifdef MULTI_INSTANCE_SUPPORT
1454 struct platform_device *platform_dev;
1455 void (*vdec_cb)(struct vdec_s *, void *);
1456 void *vdec_cb_arg;
1457 struct vframe_chunk_s *chunk;
1458 int dec_result;
1459 struct work_struct work;
1460 struct work_struct timeout_work;
1461 struct work_struct notify_work;
1462 struct work_struct set_clk_work;
1463 /* timeout handle */
1464 unsigned long int start_process_time;
1465 unsigned int last_lcu_idx;
1466 unsigned int decode_timeout_count;
1467 unsigned int timeout_num;
1468#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1469 unsigned char switch_dvlayer_flag;
1470 unsigned char no_switch_dvlayer_count;
1471 unsigned char bypass_dvenl_enable;
1472 unsigned char bypass_dvenl;
1473#endif
1474 unsigned char start_parser_type;
1475 /*start_decoding_flag:
1476 vps/pps/sps/idr info from ucode*/
1477 unsigned char start_decoding_flag;
1478 unsigned char rps_set_id;
1479 unsigned char eos;
1480 int pic_decoded_lcu_idx;
1481 u8 over_decode;
1482 u8 empty_flag;
1483#endif
1484 struct vframe_s vframe_dummy;
1485 char *provider_name;
1486 int index;
1487 struct device *cma_dev;
1488 unsigned char m_ins_flag;
1489 unsigned char dolby_enhance_flag;
1490 unsigned long buf_start;
1491 u32 buf_size;
1492 u32 mv_buf_size;
1493
1494 struct BuffInfo_s work_space_buf_store;
1495 struct BuffInfo_s *work_space_buf;
1496
1497 u8 aux_data_dirty;
1498 u32 prefix_aux_size;
1499 u32 suffix_aux_size;
1500 void *aux_addr;
1501 void *rpm_addr;
1502 void *lmem_addr;
1503 dma_addr_t aux_phy_addr;
1504 dma_addr_t rpm_phy_addr;
1505 dma_addr_t lmem_phy_addr;
1506
1507 unsigned int pic_list_init_flag;
1508 unsigned int use_cma_flag;
1509
1510 unsigned short *rpm_ptr;
1511 unsigned short *lmem_ptr;
1512 unsigned short *debug_ptr;
1513 int debug_ptr_size;
1514 int pic_w;
1515 int pic_h;
1516 int lcu_x_num;
1517 int lcu_y_num;
1518 int lcu_total;
1519 int lcu_size;
1520 int lcu_size_log2;
1521 int lcu_x_num_pre;
1522 int lcu_y_num_pre;
1523 int first_pic_after_recover;
1524
1525 int num_tile_col;
1526 int num_tile_row;
1527 int tile_enabled;
1528 int tile_x;
1529 int tile_y;
1530 int tile_y_x;
1531 int tile_start_lcu_x;
1532 int tile_start_lcu_y;
1533 int tile_width_lcu;
1534 int tile_height_lcu;
1535
1536 int slice_type;
1537 unsigned int slice_addr;
1538 unsigned int slice_segment_addr;
1539
1540 unsigned char interlace_flag;
1541 unsigned char curr_pic_struct;
1542 unsigned char frame_field_info_present_flag;
1543
1544 unsigned short sps_num_reorder_pics_0;
1545 unsigned short misc_flag0;
1546 int m_temporalId;
1547 int m_nalUnitType;
1548 int TMVPFlag;
1549 int isNextSliceSegment;
1550 int LDCFlag;
1551 int m_pocRandomAccess;
1552 int plevel;
1553 int MaxNumMergeCand;
1554
1555 int new_pic;
1556 int new_tile;
1557 int curr_POC;
1558 int iPrevPOC;
1559#ifdef MULTI_INSTANCE_SUPPORT
1560 int decoded_poc;
1561 struct PIC_s *decoding_pic;
1562#endif
1563 int iPrevTid0POC;
1564 int list_no;
1565 int RefNum_L0;
1566 int RefNum_L1;
1567 int ColFromL0Flag;
1568 int LongTerm_Curr;
1569 int LongTerm_Col;
1570 int Col_POC;
1571 int LongTerm_Ref;
1572#ifdef MULTI_INSTANCE_SUPPORT
1573 int m_pocRandomAccess_bak;
1574 int curr_POC_bak;
1575 int iPrevPOC_bak;
1576 int iPrevTid0POC_bak;
1577 unsigned char start_parser_type_bak;
1578 unsigned char start_decoding_flag_bak;
1579 unsigned char rps_set_id_bak;
1580 int pic_decoded_lcu_idx_bak;
1581 int decode_idx_bak;
1582#endif
1583 struct PIC_s *cur_pic;
1584 struct PIC_s *col_pic;
1585 int skip_flag;
1586 int decode_idx;
1587 int slice_idx;
1588 unsigned char have_vps;
1589 unsigned char have_sps;
1590 unsigned char have_pps;
1591 unsigned char have_valid_start_slice;
1592 unsigned char wait_buf;
1593 unsigned char error_flag;
1594 unsigned int error_skip_nal_count;
1595 long used_4k_num;
1596
1597 unsigned char
1598 ignore_bufmgr_error; /* bit 0, for decoding;
1599 bit 1, for displaying
1600 bit 1 must be set if bit 0 is 1*/
1601 int PB_skip_mode;
1602 int PB_skip_count_after_decoding;
1603#ifdef SUPPORT_10BIT
1604 int mem_saving_mode;
1605#endif
1606#ifdef LOSLESS_COMPRESS_MODE
1607 unsigned int losless_comp_body_size;
1608#endif
1609 int pts_mode;
1610 int last_lookup_pts;
1611 int last_pts;
1612 u64 last_lookup_pts_us64;
1613 u64 last_pts_us64;
1614 u32 shift_byte_count_lo;
1615 u32 shift_byte_count_hi;
1616 int pts_mode_switching_count;
1617 int pts_mode_recovery_count;
1618
1619 int pic_num;
1620
1621 /**/
1622 union param_u param;
1623
1624 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1625
1626 struct timer_list timer;
1627 struct BUF_s m_BUF[BUF_POOL_SIZE];
1628 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1629 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1630
1631 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1632 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1633 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1634 struct vframe_s vfpool[VF_POOL_SIZE];
1635
1636 u32 stat;
1637 u32 frame_width;
1638 u32 frame_height;
1639 u32 frame_dur;
1640 u32 frame_ar;
1641 u32 bit_depth_luma;
1642 u32 bit_depth_chroma;
1643 u32 video_signal_type;
1644 u32 video_signal_type_debug;
1645 u32 saved_resolution;
1646 bool get_frame_dur;
1647 u32 error_watchdog_count;
1648 u32 error_skip_nal_wt_cnt;
1649 u32 error_system_watchdog_count;
1650
1651#ifdef DEBUG_PTS
1652 unsigned long pts_missed;
1653 unsigned long pts_hit;
1654#endif
1655 struct dec_sysinfo vh265_amstream_dec_info;
1656 unsigned char init_flag;
1657 unsigned char first_sc_checked;
1658 unsigned char uninit_list;
1659 u32 start_decoding_time;
1660
1661 int show_frame_num;
1662#ifdef USE_UNINIT_SEMA
1663 struct semaphore h265_uninit_done_sema;
1664#endif
1665 int fatal_error;
1666
1667
1668 u32 sei_present_flag;
1669 void *frame_mmu_map_addr;
1670 dma_addr_t frame_mmu_map_phy_addr;
1671 unsigned int mmu_mc_buf_start;
1672 unsigned int mmu_mc_buf_end;
1673 unsigned int mmu_mc_start_4k_adr;
1674 void *mmu_box;
1675 void *bmmu_box;
1676 int mmu_enable;
1677
1678 unsigned int dec_status;
1679
1680 /* data for SEI_MASTER_DISPLAY_COLOR */
1681 unsigned int primaries[3][2];
1682 unsigned int white_point[2];
1683 unsigned int luminance[2];
1684 /* data for SEI_CONTENT_LIGHT_LEVEL */
1685 unsigned int content_light_level[2];
1686
1687 struct PIC_s *pre_top_pic;
1688 struct PIC_s *pre_bot_pic;
1689
1690#ifdef MULTI_INSTANCE_SUPPORT
1691 int double_write_mode;
1692 int dynamic_buf_num_margin;
1693 int start_action;
1694 int save_buffer_mode;
1695#endif
1696 u32 i_only;
1697 struct list_head log_list;
1698 u32 ucode_pause_pos;
1699 u32 start_shift_bytes;
1700
1701 u32 vf_pre_count;
1702 u32 vf_get_count;
1703 u32 vf_put_count;
1704#ifdef SWAP_HEVC_UCODE
1705 dma_addr_t mc_dma_handle;
1706 void *mc_cpu_addr;
1707 int swap_size;
1708 ulong swap_addr;
1709#endif
1710#ifdef DETREFILL_ENABLE
1711 dma_addr_t detbuf_adr;
1712 u16 *detbuf_adr_virt;
1713 u8 delrefill_check;
1714#endif
1715 u8 head_error_flag;
1716 int valve_count;
1717 struct firmware_s *fw;
1718 int max_pic_w;
1719 int max_pic_h;
1720#ifdef AGAIN_HAS_THRESHOLD
1721 u8 next_again_flag;
1722 u32 pre_parser_wr_ptr;
1723#endif
1724 u32 ratio_control;
1725 u32 first_pic_flag;
1726 u32 decode_size;
1727 struct mutex chunks_mutex;
1728 int need_cache_size;
1729 u64 sc_start_time;
1730 u32 skip_first_nal;
1731 bool is_swap;
1732 bool is_4k;
1733 int frameinfo_enable;
1734 struct vframe_qos_s vframe_qos;
1735 bool is_used_v4l;
1736 void *v4l2_ctx;
1737 bool v4l_params_parsed;
1738 u32 mem_map_mode;
1739} /*hevc_stru_t */;
1740
1741#ifdef AGAIN_HAS_THRESHOLD
1742u32 again_threshold;
1743#endif
1744#ifdef SEND_LMEM_WITH_RPM
1745#define get_lmem_params(hevc, ladr) \
1746 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1747
1748
1749static int get_frame_mmu_map_size(void)
1750{
1751 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1752 return (MAX_FRAME_8K_NUM * 4);
1753
1754 return (MAX_FRAME_4K_NUM * 4);
1755}
1756
1757static int is_oversize(int w, int h)
1758{
1759 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1760 MAX_SIZE_8K : MAX_SIZE_4K;
1761
1762 if (w < 0 || h < 0)
1763 return true;
1764
1765 if (h != 0 && (w > max / h))
1766 return true;
1767
1768 return false;
1769}
1770
1771void check_head_error(struct hevc_state_s *hevc)
1772{
1773#define pcm_enabled_flag 0x040
1774#define pcm_sample_bit_depth_luma 0x041
1775#define pcm_sample_bit_depth_chroma 0x042
1776 hevc->head_error_flag = 0;
1777 if ((error_handle_policy & 0x40) == 0)
1778 return;
1779 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1780 uint16_t pcm_depth_luma = get_lmem_params(
1781 hevc, pcm_sample_bit_depth_luma);
1782 uint16_t pcm_sample_chroma = get_lmem_params(
1783 hevc, pcm_sample_bit_depth_chroma);
1784 if (pcm_depth_luma >
1785 hevc->bit_depth_luma ||
1786 pcm_sample_chroma >
1787 hevc->bit_depth_chroma) {
1788 hevc_print(hevc, 0,
1789 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1790 pcm_depth_luma,
1791 pcm_sample_chroma,
1792 hevc->bit_depth_luma,
1793 hevc->bit_depth_chroma);
1794 hevc->head_error_flag = 1;
1795 }
1796 }
1797}
1798#endif
1799
1800#ifdef SUPPORT_10BIT
1801/* Losless compression body buffer size 4K per 64x32 (jt) */
1802static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1803 int width, int height, int mem_saving_mode)
1804{
1805 int width_x64;
1806 int height_x32;
1807 int bsize;
1808
1809 width_x64 = width + 63;
1810 width_x64 >>= 6;
1811
1812 height_x32 = height + 31;
1813 height_x32 >>= 5;
1814 if (mem_saving_mode == 1 && hevc->mmu_enable)
1815 bsize = 3200 * width_x64 * height_x32;
1816 else if (mem_saving_mode == 1)
1817 bsize = 3072 * width_x64 * height_x32;
1818 else
1819 bsize = 4096 * width_x64 * height_x32;
1820
1821 return bsize;
1822}
1823
1824/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1825static int compute_losless_comp_header_size(int width, int height)
1826{
1827 int width_x128;
1828 int height_x64;
1829 int hsize;
1830
1831 width_x128 = width + 127;
1832 width_x128 >>= 7;
1833
1834 height_x64 = height + 63;
1835 height_x64 >>= 6;
1836
1837 hsize = 32*width_x128*height_x64;
1838
1839 return hsize;
1840}
1841#endif
1842
1843static int add_log(struct hevc_state_s *hevc,
1844 const char *fmt, ...)
1845{
1846#define HEVC_LOG_BUF 196
1847 struct debug_log_s *log_item;
1848 unsigned char buf[HEVC_LOG_BUF];
1849 int len = 0;
1850 va_list args;
1851 mutex_lock(&vh265_log_mutex);
1852 va_start(args, fmt);
1853 len = sprintf(buf, "<%ld> <%05d> ",
1854 jiffies, hevc->decode_idx);
1855 len += vsnprintf(buf + len,
1856 HEVC_LOG_BUF - len, fmt, args);
1857 va_end(args);
1858 log_item = kmalloc(
1859 sizeof(struct debug_log_s) + len,
1860 GFP_KERNEL);
1861 if (log_item) {
1862 INIT_LIST_HEAD(&log_item->list);
1863 strcpy(&log_item->data, buf);
1864 list_add_tail(&log_item->list,
1865 &hevc->log_list);
1866 }
1867 mutex_unlock(&vh265_log_mutex);
1868 return 0;
1869}
1870
1871static void dump_log(struct hevc_state_s *hevc)
1872{
1873 int i = 0;
1874 struct debug_log_s *log_item, *tmp;
1875 mutex_lock(&vh265_log_mutex);
1876 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1877 hevc_print(hevc, 0,
1878 "[LOG%04d]%s\n",
1879 i++,
1880 &log_item->data);
1881 list_del(&log_item->list);
1882 kfree(log_item);
1883 }
1884 mutex_unlock(&vh265_log_mutex);
1885}
1886
1887static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1888 struct PIC_s *pic)
1889{
1890 if (pic->error_mark
1891 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1892 return 1;
1893 return 0;
1894}
1895
1896static int get_pic_poc(struct hevc_state_s *hevc,
1897 unsigned int idx)
1898{
1899 if (idx != 0xff
1900 && idx < MAX_REF_PIC_NUM
1901 && hevc->m_PIC[idx])
1902 return hevc->m_PIC[idx]->POC;
1903 return INVALID_POC;
1904}
1905
1906#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1907static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1908{
1909 return (hevc->m_ins_flag &&
1910 ((double_write_mode & 0x80000000) == 0)) ?
1911 hevc->double_write_mode :
1912 (double_write_mode & 0x7fffffff);
1913}
1914
1915static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1916{
1917 return (hevc->m_ins_flag &&
1918 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1919 hevc->dynamic_buf_num_margin :
1920 (dynamic_buf_num_margin & 0x7fffffff);
1921}
1922#endif
1923
1924static int get_double_write_mode(struct hevc_state_s *hevc)
1925{
1926 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1927 int w = hevc->pic_w;
1928 int h = hevc->pic_h;
1929 u32 dw = 0x1; /*1:1*/
1930 switch (valid_dw_mode) {
1931 case 0x100:
1932 if (w > 1920 && h > 1088)
1933 dw = 0x4; /*1:2*/
1934 break;
1935 case 0x200:
1936 if (w > 1920 && h > 1088)
1937 dw = 0x2; /*1:4*/
1938 break;
1939 case 0x300:
1940 if (w > 1280 && h > 720)
1941 dw = 0x4; /*1:2*/
1942 break;
1943 default:
1944 dw = valid_dw_mode;
1945 break;
1946 }
1947 return dw;
1948}
1949
1950static int get_double_write_ratio(struct hevc_state_s *hevc,
1951 int dw_mode)
1952{
1953 int ratio = 1;
1954 if ((dw_mode == 2) ||
1955 (dw_mode == 3))
1956 ratio = 4;
1957 else if (dw_mode == 4)
1958 ratio = 2;
1959 return ratio;
1960}
1961#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1962static unsigned char get_idx(struct hevc_state_s *hevc)
1963{
1964 return hevc->index;
1965}
1966#endif
1967
1968#undef pr_info
1969#define pr_info printk
1970static int hevc_print(struct hevc_state_s *hevc,
1971 int flag, const char *fmt, ...)
1972{
1973#define HEVC_PRINT_BUF 256
1974 unsigned char buf[HEVC_PRINT_BUF];
1975 int len = 0;
1976#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1977 if (hevc == NULL ||
1978 (flag == 0) ||
1979 ((debug_mask &
1980 (1 << hevc->index))
1981 && (debug & flag))) {
1982#endif
1983 va_list args;
1984
1985 va_start(args, fmt);
1986 if (hevc)
1987 len = sprintf(buf, "[%d]", hevc->index);
1988 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1989 pr_debug("%s", buf);
1990 va_end(args);
1991#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1992 }
1993#endif
1994 return 0;
1995}
1996
1997static int hevc_print_cont(struct hevc_state_s *hevc,
1998 int flag, const char *fmt, ...)
1999{
2000 unsigned char buf[HEVC_PRINT_BUF];
2001 int len = 0;
2002#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2003 if (hevc == NULL ||
2004 (flag == 0) ||
2005 ((debug_mask &
2006 (1 << hevc->index))
2007 && (debug & flag))) {
2008#endif
2009 va_list args;
2010
2011 va_start(args, fmt);
2012 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2013 pr_info("%s", buf);
2014 va_end(args);
2015#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2016 }
2017#endif
2018 return 0;
2019}
2020
2021static void put_mv_buf(struct hevc_state_s *hevc,
2022 struct PIC_s *pic);
2023
2024static void update_vf_memhandle(struct hevc_state_s *hevc,
2025 struct vframe_s *vf, struct PIC_s *pic);
2026
2027static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2028
2029static void release_aux_data(struct hevc_state_s *hevc,
2030 struct PIC_s *pic);
2031static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2032
2033#ifdef MULTI_INSTANCE_SUPPORT
2034static void backup_decode_state(struct hevc_state_s *hevc)
2035{
2036 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2037 hevc->curr_POC_bak = hevc->curr_POC;
2038 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2039 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2040 hevc->start_parser_type_bak = hevc->start_parser_type;
2041 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2042 hevc->rps_set_id_bak = hevc->rps_set_id;
2043 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2044 hevc->decode_idx_bak = hevc->decode_idx;
2045
2046}
2047
2048static void restore_decode_state(struct hevc_state_s *hevc)
2049{
2050 struct vdec_s *vdec = hw_to_vdec(hevc);
2051 if (!vdec_has_more_input(vdec)) {
2052 hevc->pic_decoded_lcu_idx =
2053 READ_VREG(HEVC_PARSER_LCU_START)
2054 & 0xffffff;
2055 return;
2056 }
2057 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2058 "%s: discard pic index 0x%x\n",
2059 __func__, hevc->decoding_pic ?
2060 hevc->decoding_pic->index : 0xff);
2061 if (hevc->decoding_pic) {
2062 hevc->decoding_pic->error_mark = 0;
2063 hevc->decoding_pic->output_ready = 0;
2064 hevc->decoding_pic->output_mark = 0;
2065 hevc->decoding_pic->referenced = 0;
2066 hevc->decoding_pic->POC = INVALID_POC;
2067 put_mv_buf(hevc, hevc->decoding_pic);
2068 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2069 release_aux_data(hevc, hevc->decoding_pic);
2070 hevc->decoding_pic = NULL;
2071 }
2072 hevc->decode_idx = hevc->decode_idx_bak;
2073 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2074 hevc->curr_POC = hevc->curr_POC_bak;
2075 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2076 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2077 hevc->start_parser_type = hevc->start_parser_type_bak;
2078 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2079 hevc->rps_set_id = hevc->rps_set_id_bak;
2080 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2081
2082 if (hevc->pic_list_init_flag == 1)
2083 hevc->pic_list_init_flag = 0;
2084 /*if (hevc->decode_idx == 0)
2085 hevc->start_decoding_flag = 0;*/
2086
2087 hevc->slice_idx = 0;
2088 hevc->used_4k_num = -1;
2089}
2090#endif
2091
2092static void hevc_init_stru(struct hevc_state_s *hevc,
2093 struct BuffInfo_s *buf_spec_i)
2094{
2095 int i;
2096 INIT_LIST_HEAD(&hevc->log_list);
2097 hevc->work_space_buf = buf_spec_i;
2098 hevc->prefix_aux_size = 0;
2099 hevc->suffix_aux_size = 0;
2100 hevc->aux_addr = NULL;
2101 hevc->rpm_addr = NULL;
2102 hevc->lmem_addr = NULL;
2103
2104 hevc->curr_POC = INVALID_POC;
2105
2106 hevc->pic_list_init_flag = 0;
2107 hevc->use_cma_flag = 0;
2108 hevc->decode_idx = 0;
2109 hevc->slice_idx = 0;
2110 hevc->new_pic = 0;
2111 hevc->new_tile = 0;
2112 hevc->iPrevPOC = 0;
2113 hevc->list_no = 0;
2114 /* int m_uiMaxCUWidth = 1<<7; */
2115 /* int m_uiMaxCUHeight = 1<<7; */
2116 hevc->m_pocRandomAccess = MAX_INT;
2117 hevc->tile_enabled = 0;
2118 hevc->tile_x = 0;
2119 hevc->tile_y = 0;
2120 hevc->iPrevTid0POC = 0;
2121 hevc->slice_addr = 0;
2122 hevc->slice_segment_addr = 0;
2123 hevc->skip_flag = 0;
2124 hevc->misc_flag0 = 0;
2125
2126 hevc->cur_pic = NULL;
2127 hevc->col_pic = NULL;
2128 hevc->wait_buf = 0;
2129 hevc->error_flag = 0;
2130 hevc->head_error_flag = 0;
2131 hevc->error_skip_nal_count = 0;
2132 hevc->have_vps = 0;
2133 hevc->have_sps = 0;
2134 hevc->have_pps = 0;
2135 hevc->have_valid_start_slice = 0;
2136
2137 hevc->pts_mode = PTS_NORMAL;
2138 hevc->last_pts = 0;
2139 hevc->last_lookup_pts = 0;
2140 hevc->last_pts_us64 = 0;
2141 hevc->last_lookup_pts_us64 = 0;
2142 hevc->pts_mode_switching_count = 0;
2143 hevc->pts_mode_recovery_count = 0;
2144
2145 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2146 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2147 if (hevc->PB_skip_mode == 0)
2148 hevc->ignore_bufmgr_error = 0x1;
2149 else
2150 hevc->ignore_bufmgr_error = 0x0;
2151
2152 if (hevc->is_used_v4l) {
2153 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2154 if (hevc->m_PIC[i] != NULL) {
2155 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2156 hevc->m_PIC[i]->index = i;
2157 }
2158 }
2159 }
2160
2161 hevc->pic_num = 0;
2162 hevc->lcu_x_num_pre = 0;
2163 hevc->lcu_y_num_pre = 0;
2164 hevc->first_pic_after_recover = 0;
2165
2166 hevc->pre_top_pic = NULL;
2167 hevc->pre_bot_pic = NULL;
2168
2169 hevc->sei_present_flag = 0;
2170 hevc->valve_count = 0;
2171 hevc->first_pic_flag = 0;
2172#ifdef MULTI_INSTANCE_SUPPORT
2173 hevc->decoded_poc = INVALID_POC;
2174 hevc->start_process_time = 0;
2175 hevc->last_lcu_idx = 0;
2176 hevc->decode_timeout_count = 0;
2177 hevc->timeout_num = 0;
2178 hevc->eos = 0;
2179 hevc->pic_decoded_lcu_idx = -1;
2180 hevc->over_decode = 0;
2181 hevc->used_4k_num = -1;
2182 hevc->start_decoding_flag = 0;
2183 hevc->rps_set_id = 0;
2184 backup_decode_state(hevc);
2185#endif
2186#ifdef DETREFILL_ENABLE
2187 hevc->detbuf_adr = 0;
2188 hevc->detbuf_adr_virt = NULL;
2189#endif
2190}
2191
2192static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2193static int H265_alloc_mmu(struct hevc_state_s *hevc,
2194 struct PIC_s *new_pic, unsigned short bit_depth,
2195 unsigned int *mmu_index_adr);
2196
2197#ifdef DETREFILL_ENABLE
2198#define DETREFILL_BUF_SIZE (4 * 0x4000)
2199#define HEVC_SAO_DBG_MODE0 0x361e
2200#define HEVC_SAO_DBG_MODE1 0x361f
2201#define HEVC_SAO_CTRL10 0x362e
2202#define HEVC_SAO_CTRL11 0x362f
2203static int init_detrefill_buf(struct hevc_state_s *hevc)
2204{
2205 if (hevc->detbuf_adr_virt)
2206 return 0;
2207
2208 hevc->detbuf_adr_virt =
2209 (void *)dma_alloc_coherent(amports_get_dma_device(),
2210 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2211 GFP_KERNEL);
2212
2213 if (hevc->detbuf_adr_virt == NULL) {
2214 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2215 return -1;
2216 }
2217 return 0;
2218}
2219
2220static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2221{
2222 if (hevc->detbuf_adr_virt) {
2223 dma_free_coherent(amports_get_dma_device(),
2224 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2225 hevc->detbuf_adr);
2226
2227 hevc->detbuf_adr_virt = NULL;
2228 hevc->detbuf_adr = 0;
2229 }
2230}
2231
2232/*
2233 * convert uncompressed frame buffer data from/to ddr
2234 */
2235static void convUnc8x4blk(uint16_t* blk8x4Luma,
2236 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2237{
2238 if (direction == 0) {
2239 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2240 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2241 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2242 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2243 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2244 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2245 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2246 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2247 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2248 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2249 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2250 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2251 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2252 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2253 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2254 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2255 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2256 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2257
2258 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2259 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2260 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2261 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2262 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2263 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2264 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2265 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2266 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2267 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2268 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2269 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2270 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2271 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2272 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2273 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2274 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2275 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2276
2277 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2278 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2279 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2280 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2281 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2282 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2283 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2284 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2285 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2286 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2287 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2288 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2289 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2290 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2291 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2292 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2293 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2294 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2295
2296 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2297 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2298 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2299 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2300 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2301 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2302 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2303 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2304 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2305 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2306 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2307 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2308 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2309 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2310 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2311 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2312 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2313 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2314 } else {
2315 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2316 blk8x4Luma[3 + 0 * 8];
2317 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2318 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2319 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2320 (blk8x4Luma[3 + 3 * 8] >> 2);
2321 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2322 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2323 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2324 (blk8x4Luma[7 + 2 * 8] >>4);
2325 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2326 blk8x4Cb[0 + 0 * 4];
2327 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2328 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2329 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2330
2331 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2332 blk8x4Luma[0 + 0 * 8];
2333 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2334 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2335 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2336 (blk8x4Luma[0 + 1 * 8] >> 2);
2337 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2338 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2339 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2340 (blk8x4Luma[0 + 2 * 8] >>4);
2341 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2342 blk8x4Luma[2 + 2 * 8];
2343 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2344 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2345 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2346
2347 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2348 blk8x4Luma[4 + 0 * 8];
2349 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2350 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2351 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2352 (blk8x4Luma[4 + 1 * 8] >> 2);
2353 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2354 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2355 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2356 (blk8x4Luma[4 + 2 * 8] >>4);
2357 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2358 blk8x4Luma[6 + 2 * 8];
2359 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2360 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2361 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2362
2363 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2364 blk8x4Cb[1 + 0 * 4];
2365 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2366 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2367 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2368 (blk8x4Cr[2 + 0 * 4] >> 2);
2369 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2370 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2371 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2372 (blk8x4Cb[1 + 1 * 4] >>4);
2373 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2374 blk8x4Cb[2 + 1 * 4];
2375 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2376 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2377 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2378 }
2379}
2380
2381static void corrRefillWithAmrisc (
2382 struct hevc_state_s *hevc,
2383 uint32_t cmHeaderBaseAddr,
2384 uint32_t picWidth,
2385 uint32_t ctuPosition)
2386{
2387 int32_t i;
2388 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2389 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2390 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2391
2392 uint16_t cmBodyBuf[32 * 18];
2393
2394 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2395 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2396 uint32_t stride64x64 = pic_width_x64 * 128;
2397 uint32_t addr_offset64x64_abv = stride64x64 *
2398 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2399 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2400 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2401 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2402 unsigned int tmpData32;
2403
2404 uint16_t blkBuf0Y[32];
2405 uint16_t blkBuf0Cb[8];
2406 uint16_t blkBuf0Cr[8];
2407 uint16_t blkBuf1Y[32];
2408 uint16_t blkBuf1Cb[8];
2409 uint16_t blkBuf1Cr[8];
2410 int32_t blkBufCnt = 0;
2411
2412 int32_t blkIdx;
2413
2414 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2415 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2416 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2417 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2418
2419 for (i = 0; i < 32 * 18; i++)
2420 cmBodyBuf[i] = 0;
2421
2422 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2423 "%s, %d\n", __func__, __LINE__);
2424 do {
2425 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2426 } while (tmpData32);
2427 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2428 "%s, %d\n", __func__, __LINE__);
2429
2430 hevc_print(hevc, H265_DEBUG_DETAIL,
2431 "cmBodyBuf from detbuf:\n");
2432 for (i = 0; i < 32 * 18; i++) {
2433 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2434 if (get_dbg_flag(hevc) &
2435 H265_DEBUG_DETAIL) {
2436 if ((i & 0xf) == 0)
2437 hevc_print_cont(hevc, 0, "\n");
2438 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2439 }
2440 }
2441 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2442
2443 for (i = 0; i < 32; i++)
2444 blkBuf0Y[i] = 0;
2445 for (i = 0; i < 8; i++)
2446 blkBuf0Cb[i] = 0;
2447 for (i = 0; i < 8; i++)
2448 blkBuf0Cr[i] = 0;
2449 for (i = 0; i < 32; i++)
2450 blkBuf1Y[i] = 0;
2451 for (i = 0; i < 8; i++)
2452 blkBuf1Cb[i] = 0;
2453 for (i = 0; i < 8; i++)
2454 blkBuf1Cr[i] = 0;
2455
2456 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2457 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2458 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2459 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2460 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2461 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2462 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2463
2464 if (!aboveCtuAvailable && inAboveCtu)
2465 continue;
2466
2467 /* detRefillBuf --> 8x4block*/
2468 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2469
2470 if (restoreEnable) {
2471 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2472 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2473 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2474 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2475 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2476 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2477 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2478 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2479 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2480 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2481 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2482 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2483 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2484 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2485 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2486 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2487 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2488 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2489 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2490 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2491 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2492 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2493 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2494 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2495 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2496 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2497 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2498 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2499 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2500 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2501 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2502 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2503
2504 /*Store data back to DDR*/
2505 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2506 }
2507
2508 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2509 }
2510
2511 hevc_print(hevc, H265_DEBUG_DETAIL,
2512 "cmBodyBuf to detbuf:\n");
2513 for (i = 0; i < 32 * 18; i++) {
2514 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2515 if (get_dbg_flag(hevc) &
2516 H265_DEBUG_DETAIL) {
2517 if ((i & 0xf) == 0)
2518 hevc_print_cont(hevc, 0, "\n");
2519 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2520 }
2521 }
2522 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2523
2524 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2525 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2526 "%s, %d\n", __func__, __LINE__);
2527 do {
2528 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2529 } while (tmpData32);
2530 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2531 "%s, %d\n", __func__, __LINE__);
2532}
2533
2534static void delrefill(struct hevc_state_s *hevc)
2535{
2536 /*
2537 * corrRefill
2538 */
2539 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2540 [31:30]error number
2541 [29:20]error2([9:7]tilex[6:0]ctuy)
2542 [19:10]error1 [9:0]error0*/
2543 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2544 uint32_t errorIdx;
2545 uint32_t errorNum = (detResult>>30);
2546
2547 if (detResult) {
2548 hevc_print(hevc, H265_DEBUG_BUFMGR,
2549 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2550 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2551 uint32_t errorPos = errorIdx * 10;
2552 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2553 uint32_t tilex = (errorResult >> 7) - 1;
2554 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2555 + hevc->m_tile[0][tilex].width - 1;
2556 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2557 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2558 hevc_print(hevc, H265_DEBUG_BUFMGR,
2559 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2560 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2561 corrRefillWithAmrisc(
2562 hevc,
2563 (uint32_t)hevc->cur_pic->header_adr,
2564 hevc->pic_w,
2565 ctuPosition);
2566 }
2567
2568 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2569 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2570 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2571 }
2572}
2573#endif
2574
2575static void get_rpm_param(union param_u *params)
2576{
2577 int i;
2578 unsigned int data32;
2579
2580 for (i = 0; i < 128; i++) {
2581 do {
2582 data32 = READ_VREG(RPM_CMD_REG);
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 } while ((data32 & 0x10000) == 0);
2585 params->l.data[i] = data32 & 0xffff;
2586 /* hevc_print(hevc, 0, "%x\n", data32); */
2587 WRITE_VREG(RPM_CMD_REG, 0);
2588 }
2589}
2590
2591static int get_free_buf_idx(struct hevc_state_s *hevc)
2592{
2593 int index = INVALID_IDX;
2594 struct PIC_s *pic;
2595 int i;
2596
2597 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2598 pic = hevc->m_PIC[i];
2599 if (pic == NULL ||
2600 pic->index == -1 ||
2601 pic->BUF_index == -1)
2602 continue;
2603
2604 if (pic->output_mark == 0 &&
2605 pic->referenced == 0 &&
2606 pic->output_ready == 0 &&
2607 pic->cma_alloc_addr) {
2608 pic->output_ready = 1;
2609 index = i;
2610 break;
2611 }
2612 }
2613
2614 return index;
2615}
2616
2617static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2618{
2619 int i;
2620 struct PIC_s *pic;
2621 struct PIC_s *ret_pic = NULL;
2622 if (POC == INVALID_POC)
2623 return NULL;
2624 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2625 pic = hevc->m_PIC[i];
2626 if (pic == NULL || pic->index == -1 ||
2627 pic->BUF_index == -1)
2628 continue;
2629 if (pic->POC == POC) {
2630 if (ret_pic == NULL)
2631 ret_pic = pic;
2632 else {
2633 if (pic->decode_idx > ret_pic->decode_idx)
2634 ret_pic = pic;
2635 }
2636 }
2637 }
2638 return ret_pic;
2639}
2640
2641static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2642{
2643 int i;
2644 struct PIC_s *pic;
2645 struct PIC_s *ret_pic = NULL;
2646
2647 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2648 pic = hevc->m_PIC[i];
2649 if (pic == NULL || pic->index == -1 ||
2650 pic->BUF_index == -1)
2651 continue;
2652 if ((pic->POC == POC) && (pic->referenced)) {
2653 if (ret_pic == NULL)
2654 ret_pic = pic;
2655 else {
2656 if (pic->decode_idx > ret_pic->decode_idx)
2657 ret_pic = pic;
2658 }
2659 }
2660 }
2661
2662 if (ret_pic == NULL) {
2663 if (get_dbg_flag(hevc)) {
2664 hevc_print(hevc, 0,
2665 "Wrong, POC of %d is not in referenced list\n",
2666 POC);
2667 }
2668 ret_pic = get_pic_by_POC(hevc, POC);
2669 }
2670 return ret_pic;
2671}
2672
2673static unsigned int log2i(unsigned int val)
2674{
2675 unsigned int ret = -1;
2676
2677 while (val != 0) {
2678 val >>= 1;
2679 ret++;
2680 }
2681 return ret;
2682}
2683
2684static int init_buf_spec(struct hevc_state_s *hevc);
2685
2686static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2687{
2688 int i;
2689
2690 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2691 struct PIC_s *pic = hevc->m_PIC[i];
2692
2693 if (pic && pic->vframe_bound)
2694 return true;
2695 }
2696
2697 return false;
2698}
2699
2700static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2701{
2702 int i;
2703
2704 /* release workspace */
2705 if (hevc->bmmu_box)
2706 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2707 BMMU_WORKSPACE_ID);
2708 /*
2709 * it's only when vframe get back to driver, right now we can be sure
2710 * that vframe and fd are related. if the playback exits, the capture
2711 * requires the upper app to release when the fd is closed, and others
2712 * buffers drivers are released by driver.
2713 */
2714 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2715 struct PIC_s *pic = hevc->m_PIC[i];
2716
2717 if (pic && !pic->vframe_bound) {
2718 if (hevc->bmmu_box)
2719 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2720 VF_BUFFER_IDX(i));
2721 if (hevc->mmu_box)
2722 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2723
2724 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2725 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2726 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2727 }
2728 }
2729}
2730
2731static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2732{
2733 if (hevc->is_used_v4l &&
2734 v4l_is_there_vframe_bound(hevc)) {
2735 if (get_double_write_mode(hevc) != 0x10) {
2736 v4l_mmu_buffer_release(hevc);
2737 return;
2738 }
2739 }
2740
2741 if (hevc->mmu_box)
2742 decoder_mmu_box_free(hevc->mmu_box);
2743 hevc->mmu_box = NULL;
2744
2745 if (hevc->bmmu_box)
2746 decoder_bmmu_box_free(hevc->bmmu_box);
2747 hevc->bmmu_box = NULL;
2748}
2749static int init_mmu_buffers(struct hevc_state_s *hevc)
2750{
2751 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2752 CODEC_MM_FLAGS_TVP : 0;
2753 int buf_size = 64;
2754
2755 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2756 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2757 buf_size = 24;
2758 }
2759
2760 if (get_dbg_flag(hevc)) {
2761 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2762 __func__, hevc->max_pic_w, hevc->max_pic_h);
2763 }
2764
2765 hevc->need_cache_size = buf_size * SZ_1M;
2766 hevc->sc_start_time = get_jiffies_64();
2767 if (hevc->mmu_enable
2768 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2769 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2770 hevc->index,
2771 MAX_REF_PIC_NUM,
2772 buf_size * SZ_1M,
2773 tvp_flag
2774 );
2775 if (!hevc->mmu_box) {
2776 pr_err("h265 alloc mmu box failed!!\n");
2777 return -1;
2778 }
2779 }
2780
2781 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2782 hevc->index,
2783 BMMU_MAX_BUFFERS,
2784 4 + PAGE_SHIFT,
2785 CODEC_MM_FLAGS_CMA_CLEAR |
2786 CODEC_MM_FLAGS_FOR_VDECODER |
2787 tvp_flag);
2788 if (!hevc->bmmu_box) {
2789 if (hevc->mmu_box)
2790 decoder_mmu_box_free(hevc->mmu_box);
2791 hevc->mmu_box = NULL;
2792 pr_err("h265 alloc mmu box failed!!\n");
2793 return -1;
2794 }
2795 return 0;
2796}
2797
2798struct buf_stru_s
2799{
2800 int lcu_total;
2801 int mc_buffer_size_h;
2802 int mc_buffer_size_u_v_h;
2803};
2804
2805#ifndef MV_USE_FIXED_BUF
2806static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2807{
2808 int i;
2809 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2810 if (hevc->m_mv_BUF[i].start_adr) {
2811 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2812 hevc_print(hevc, 0,
2813 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2814 i, hevc->m_mv_BUF[i].start_adr,
2815 hevc->m_mv_BUF[i].size,
2816 hevc->m_mv_BUF[i].used_flag);
2817 decoder_bmmu_box_free_idx(
2818 hevc->bmmu_box,
2819 MV_BUFFER_IDX(i));
2820 hevc->m_mv_BUF[i].start_adr = 0;
2821 hevc->m_mv_BUF[i].size = 0;
2822 hevc->m_mv_BUF[i].used_flag = 0;
2823 }
2824 }
2825 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2826 if (hevc->m_PIC[i] != NULL)
2827 hevc->m_PIC[i]->mv_buf_index = -1;
2828 }
2829}
2830
2831static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2832{
2833 int ret = 0;
2834 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2835 if (decoder_bmmu_box_alloc_buf_phy
2836 (hevc->bmmu_box,
2837 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2838 DRIVER_NAME,
2839 &hevc->m_mv_BUF[i].start_adr) < 0) {
2840 hevc->m_mv_BUF[i].start_adr = 0;
2841 ret = -1;
2842 } else {
2843 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2844 hevc->m_mv_BUF[i].used_flag = 0;
2845 ret = 0;
2846 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2847 hevc_print(hevc, 0,
2848 "MV Buffer %d: start_adr %p size %x\n",
2849 i,
2850 (void *)hevc->m_mv_BUF[i].start_adr,
2851 hevc->m_mv_BUF[i].size);
2852 }
2853 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2854 void *mem_start_virt;
2855 mem_start_virt =
2856 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2857 if (mem_start_virt) {
2858 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2859 codec_mm_dma_flush(mem_start_virt,
2860 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2861 } else {
2862 mem_start_virt = codec_mm_vmap(
2863 hevc->m_mv_BUF[i].start_adr,
2864 hevc->m_mv_BUF[i].size);
2865 if (mem_start_virt) {
2866 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2867 codec_mm_dma_flush(mem_start_virt,
2868 hevc->m_mv_BUF[i].size,
2869 DMA_TO_DEVICE);
2870 codec_mm_unmap_phyaddr(mem_start_virt);
2871 } else {
2872 /*not virt for tvp playing,
2873 may need clear on ucode.*/
2874 pr_err("ref %s mem_start_virt failed\n", __func__);
2875 }
2876 }
2877 }
2878 }
2879 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2880 return ret;
2881}
2882#endif
2883
2884static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2885{
2886#ifdef MV_USE_FIXED_BUF
2887 if (pic && pic->index >= 0) {
2888 if (IS_8K_SIZE(pic->width, pic->height)) {
2889 pic->mpred_mv_wr_start_addr =
2890 hevc->work_space_buf->mpred_mv.buf_start
2891 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2892 } else {
2893 pic->mpred_mv_wr_start_addr =
2894 hevc->work_space_buf->mpred_mv.buf_start
2895 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2896 }
2897 }
2898 return 0;
2899#else
2900 int i;
2901 int ret = -1;
2902 int new_size;
2903 if (IS_8K_SIZE(pic->width, pic->height))
2904 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2905 else if (IS_4K_SIZE(pic->width, pic->height))
2906 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2907 else
2908 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2909 if (new_size != hevc->mv_buf_size) {
2910 dealloc_mv_bufs(hevc);
2911 hevc->mv_buf_size = new_size;
2912 }
2913 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2914 if (hevc->m_mv_BUF[i].start_adr &&
2915 hevc->m_mv_BUF[i].used_flag == 0) {
2916 hevc->m_mv_BUF[i].used_flag = 1;
2917 ret = i;
2918 break;
2919 }
2920 }
2921 if (ret < 0) {
2922 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2923 if (hevc->m_mv_BUF[i].start_adr == 0) {
2924 if (alloc_mv_buf(hevc, i) >= 0) {
2925 hevc->m_mv_BUF[i].used_flag = 1;
2926 ret = i;
2927 }
2928 break;
2929 }
2930 }
2931 }
2932
2933 if (ret >= 0) {
2934 pic->mv_buf_index = ret;
2935 pic->mpred_mv_wr_start_addr =
2936 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2937 (~0xffff);
2938 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2939 "%s => %d (0x%x) size 0x%x\n",
2940 __func__, ret,
2941 pic->mpred_mv_wr_start_addr,
2942 hevc->m_mv_BUF[ret].size);
2943
2944 } else {
2945 hevc_print(hevc, 0,
2946 "%s: Error, mv buf is not enough\n",
2947 __func__);
2948 }
2949 return ret;
2950
2951#endif
2952}
2953
2954static void put_mv_buf(struct hevc_state_s *hevc,
2955 struct PIC_s *pic)
2956{
2957#ifndef MV_USE_FIXED_BUF
2958 int i = pic->mv_buf_index;
2959 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2960 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2961 "%s: index %d beyond range\n",
2962 __func__, i);
2963 return;
2964 }
2965 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2966 "%s(%d): used_flag(%d)\n",
2967 __func__, i,
2968 hevc->m_mv_BUF[i].used_flag);
2969
2970 if (hevc->m_mv_BUF[i].start_adr &&
2971 hevc->m_mv_BUF[i].used_flag)
2972 hevc->m_mv_BUF[i].used_flag = 0;
2973 pic->mv_buf_index = -1;
2974#endif
2975}
2976
2977static int cal_current_buf_size(struct hevc_state_s *hevc,
2978 struct buf_stru_s *buf_stru)
2979{
2980
2981 int buf_size;
2982 int pic_width = hevc->pic_w;
2983 int pic_height = hevc->pic_h;
2984 int lcu_size = hevc->lcu_size;
2985 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2986 1 : pic_width / lcu_size;
2987 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2988 1 : pic_height / lcu_size;
2989 /*SUPPORT_10BIT*/
2990 int losless_comp_header_size = compute_losless_comp_header_size
2991 (pic_width, pic_height);
2992 /*always alloc buf for 10bit*/
2993 int losless_comp_body_size = compute_losless_comp_body_size
2994 (hevc, pic_width, pic_height, 0);
2995 int mc_buffer_size = losless_comp_header_size
2996 + losless_comp_body_size;
2997 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2998 int mc_buffer_size_u_v_h = 0;
2999
3000 int dw_mode = get_double_write_mode(hevc);
3001
3002 if (hevc->mmu_enable) {
3003 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3004 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3005 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3006 << 16;
3007 else
3008 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3009 << 16;
3010 } else
3011 buf_size = 0;
3012
3013 if (dw_mode) {
3014 int pic_width_dw = pic_width /
3015 get_double_write_ratio(hevc, dw_mode);
3016 int pic_height_dw = pic_height /
3017 get_double_write_ratio(hevc, dw_mode);
3018
3019 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3020 pic_width_dw / lcu_size + 1 :
3021 pic_width_dw / lcu_size;
3022 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3023 pic_height_dw / lcu_size + 1 :
3024 pic_height_dw / lcu_size;
3025 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3026
3027 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3028 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3029 /*64k alignment*/
3030 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3031 }
3032
3033 if ((!hevc->mmu_enable) &&
3034 ((dw_mode & 0x10) == 0)) {
3035 /* use compress mode without mmu,
3036 need buf for compress decoding*/
3037 buf_size += (mc_buffer_size_h << 16);
3038 }
3039
3040 /*in case start adr is not 64k alignment*/
3041 if (buf_size > 0)
3042 buf_size += 0x10000;
3043
3044 if (buf_stru) {
3045 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3046 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3047 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3048 }
3049
3050 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3051 pic_width, pic_height, losless_comp_header_size,
3052 losless_comp_body_size, mc_buffer_size_h,
3053 mc_buffer_size_u_v_h, buf_size);
3054
3055 return buf_size;
3056}
3057
3058static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3059{
3060 int ret = -1;
3061 int i = pic->index;
3062 struct vdec_v4l2_buffer *fb = NULL;
3063
3064 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3065 return ret;
3066
3067 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3068 if (ret < 0) {
3069 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3070 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3071 return ret;
3072 }
3073
3074 if (hevc->mmu_enable) {
3075 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3076 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3077 hevc->m_BUF[i].header_size =
3078 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3079 else
3080 hevc->m_BUF[i].header_size =
3081 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3082
3083 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3084 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3085 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3086 if (ret < 0) {
3087 hevc_print(hevc, PRINT_FLAG_ERROR,
3088 "%s[%d], header size: %d, no mem fatal err\n",
3089 __func__, i, hevc->m_BUF[i].header_size);
3090 return ret;
3091 }
3092 }
3093
3094 hevc->m_BUF[i].used_flag = 0;
3095 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3096 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3097 if (fb->num_planes == 1) {
3098 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3099 hevc->m_BUF[i].size = fb->m.mem[0].size;
3100 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3101 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3102 } else if (fb->num_planes == 2) {
3103 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3104 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3105 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3106 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3107 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3108 }
3109
3110 return ret;
3111}
3112
3113static int alloc_buf(struct hevc_state_s *hevc)
3114{
3115 int i;
3116 int ret = -1;
3117 int buf_size = cal_current_buf_size(hevc, NULL);
3118
3119 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3120 return ret;
3121
3122 for (i = 0; i < BUF_POOL_SIZE; i++) {
3123 if (hevc->m_BUF[i].start_adr == 0)
3124 break;
3125 }
3126 if (i < BUF_POOL_SIZE) {
3127 if (buf_size > 0) {
3128 ret = decoder_bmmu_box_alloc_buf_phy
3129 (hevc->bmmu_box,
3130 VF_BUFFER_IDX(i), buf_size,
3131 DRIVER_NAME,
3132 &hevc->m_BUF[i].start_adr);
3133 if (ret < 0) {
3134 hevc->m_BUF[i].start_adr = 0;
3135 if (i <= 8) {
3136 hevc->fatal_error |=
3137 DECODER_FATAL_ERROR_NO_MEM;
3138 hevc_print(hevc, PRINT_FLAG_ERROR,
3139 "%s[%d], size: %d, no mem fatal err\n",
3140 __func__, i, buf_size);
3141 }
3142 }
3143
3144 if (ret >= 0) {
3145 hevc->m_BUF[i].size = buf_size;
3146 hevc->m_BUF[i].used_flag = 0;
3147 ret = 0;
3148
3149 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3150 hevc_print(hevc, 0,
3151 "Buffer %d: start_adr %p size %x\n",
3152 i,
3153 (void *)hevc->m_BUF[i].start_adr,
3154 hevc->m_BUF[i].size);
3155 }
3156 /*flush the buffer make sure no cache dirty*/
3157 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3158 void *mem_start_virt;
3159 mem_start_virt =
3160 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3161 if (mem_start_virt) {
3162 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3163 codec_mm_dma_flush(mem_start_virt,
3164 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3165 } else {
3166 mem_start_virt = codec_mm_vmap(
3167 hevc->m_BUF[i].start_adr,
3168 hevc->m_BUF[i].size);
3169 if (mem_start_virt) {
3170 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3171 codec_mm_dma_flush(mem_start_virt,
3172 hevc->m_BUF[i].size,
3173 DMA_TO_DEVICE);
3174 codec_mm_unmap_phyaddr(mem_start_virt);
3175 } else {
3176 /*not virt for tvp playing,
3177 may need clear on ucode.*/
3178 pr_err("ref %s mem_start_virt failed\n", __func__);
3179 }
3180 }
3181 }
3182 }
3183 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3184 } else
3185 ret = 0;
3186 }
3187
3188 if (ret >= 0) {
3189 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3190 hevc_print(hevc, 0,
3191 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3192 i, hevc->pic_w, hevc->pic_h,
3193 buf_size,
3194 hevc->m_BUF[i].start_adr);
3195 }
3196 } else {
3197 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3198 hevc_print(hevc, 0,
3199 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3200 i, hevc->pic_w, hevc->pic_h,
3201 buf_size);
3202 }
3203 }
3204 return ret;
3205}
3206
3207static void set_buf_unused(struct hevc_state_s *hevc, int i)
3208{
3209 if (i >= 0 && i < BUF_POOL_SIZE)
3210 hevc->m_BUF[i].used_flag = 0;
3211}
3212
3213static void dealloc_unused_buf(struct hevc_state_s *hevc)
3214{
3215 int i;
3216 for (i = 0; i < BUF_POOL_SIZE; i++) {
3217 if (hevc->m_BUF[i].start_adr &&
3218 hevc->m_BUF[i].used_flag == 0) {
3219 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3220 hevc_print(hevc, 0,
3221 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3222 i, hevc->m_BUF[i].start_adr,
3223 hevc->m_BUF[i].size);
3224 }
3225 if (!hevc->is_used_v4l)
3226 decoder_bmmu_box_free_idx(
3227 hevc->bmmu_box,
3228 VF_BUFFER_IDX(i));
3229 hevc->m_BUF[i].start_adr = 0;
3230 hevc->m_BUF[i].size = 0;
3231 }
3232 }
3233}
3234
3235static void dealloc_pic_buf(struct hevc_state_s *hevc,
3236 struct PIC_s *pic)
3237{
3238 int i = pic->BUF_index;
3239 pic->BUF_index = -1;
3240 if (i >= 0 &&
3241 i < BUF_POOL_SIZE &&
3242 hevc->m_BUF[i].start_adr) {
3243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3244 hevc_print(hevc, 0,
3245 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3246 i, hevc->m_BUF[i].start_adr,
3247 hevc->m_BUF[i].size);
3248 }
3249
3250 if (!hevc->is_used_v4l)
3251 decoder_bmmu_box_free_idx(
3252 hevc->bmmu_box,
3253 VF_BUFFER_IDX(i));
3254 hevc->m_BUF[i].used_flag = 0;
3255 hevc->m_BUF[i].start_adr = 0;
3256 hevc->m_BUF[i].size = 0;
3257 }
3258}
3259
3260static int get_work_pic_num(struct hevc_state_s *hevc)
3261{
3262 int used_buf_num = 0;
3263 int sps_pic_buf_diff = 0;
3264
3265 if (get_dynamic_buf_num_margin(hevc) > 0) {
3266 if ((!hevc->sps_num_reorder_pics_0) &&
3267 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3268 /* the range of sps_num_reorder_pics_0 is in
3269 [0, sps_max_dec_pic_buffering_minus1_0] */
3270 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3271 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3272 } else
3273 used_buf_num = hevc->sps_num_reorder_pics_0
3274 + get_dynamic_buf_num_margin(hevc);
3275
3276 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3277 - hevc->sps_num_reorder_pics_0;
3278#ifdef MULTI_INSTANCE_SUPPORT
3279 /*
3280 need one more for multi instance, as
3281 apply_ref_pic_set() has no chanch to run to
3282 to clear referenced flag in some case
3283 */
3284 if (hevc->m_ins_flag)
3285 used_buf_num++;
3286#endif
3287 } else
3288 used_buf_num = max_buf_num;
3289
3290 if (hevc->save_buffer_mode)
3291 hevc_print(hevc, 0,
3292 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3293 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3294
3295 if (sps_pic_buf_diff >= 4)
3296 {
3297 used_buf_num += 1;
3298 }
3299
3300 if (used_buf_num > MAX_BUF_NUM)
3301 used_buf_num = MAX_BUF_NUM;
3302 return used_buf_num;
3303}
3304
3305static int get_alloc_pic_count(struct hevc_state_s *hevc)
3306{
3307 int alloc_pic_count = 0;
3308 int i;
3309 struct PIC_s *pic;
3310 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3311 pic = hevc->m_PIC[i];
3312 if (pic && pic->index >= 0)
3313 alloc_pic_count++;
3314 }
3315 return alloc_pic_count;
3316}
3317
3318static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3319{
3320 int i = pic->index;
3321 int dw_mode = get_double_write_mode(hevc);
3322
3323 if (hevc->mmu_enable)
3324 pic->header_adr = hevc->m_BUF[i].header_addr;
3325
3326 pic->BUF_index = i;
3327 pic->POC = INVALID_POC;
3328 pic->mc_canvas_y = pic->index;
3329 pic->mc_canvas_u_v = pic->index;
3330
3331 if (dw_mode & 0x10) {
3332 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3333 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3334 pic->mc_canvas_y = (pic->index << 1);
3335 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3336
3337 pic->dw_y_adr = pic->mc_y_adr;
3338 pic->dw_u_v_adr = pic->mc_u_v_adr;
3339 } else if (dw_mode) {
3340 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3341 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3342 }
3343
3344 return 0;
3345}
3346
3347static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3348{
3349 int ret = -1;
3350 int i;
3351 /*int lcu_size_log2 = hevc->lcu_size_log2;
3352 int MV_MEM_UNIT=lcu_size_log2==
3353 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3354 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3355 5 ? 0x80 : 0x20;
3356 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3357 hevc->work_space_buf->mpred_mv.buf_size;*/
3358 unsigned int y_adr = 0;
3359 struct buf_stru_s buf_stru;
3360 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3361 int dw_mode = get_double_write_mode(hevc);
3362
3363 for (i = 0; i < BUF_POOL_SIZE; i++) {
3364 if (hevc->m_BUF[i].start_adr != 0 &&
3365 hevc->m_BUF[i].used_flag == 0 &&
3366 buf_size <= hevc->m_BUF[i].size) {
3367 hevc->m_BUF[i].used_flag = 1;
3368 break;
3369 }
3370 }
3371
3372 if (i >= BUF_POOL_SIZE)
3373 return -1;
3374
3375 if (hevc->mmu_enable) {
3376 pic->header_adr = hevc->m_BUF[i].start_adr;
3377 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3378 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3379 y_adr = hevc->m_BUF[i].start_adr +
3380 MMU_COMPRESS_8K_HEADER_SIZE;
3381 else
3382 y_adr = hevc->m_BUF[i].start_adr +
3383 MMU_COMPRESS_HEADER_SIZE;
3384 } else
3385 y_adr = hevc->m_BUF[i].start_adr;
3386
3387 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3388
3389 pic->POC = INVALID_POC;
3390 /*ensure get_pic_by_POC()
3391 not get the buffer not decoded*/
3392 pic->BUF_index = i;
3393
3394 if ((!hevc->mmu_enable) &&
3395 ((dw_mode & 0x10) == 0)
3396 ) {
3397 pic->mc_y_adr = y_adr;
3398 y_adr += (buf_stru.mc_buffer_size_h << 16);
3399 }
3400 pic->mc_canvas_y = pic->index;
3401 pic->mc_canvas_u_v = pic->index;
3402 if (dw_mode & 0x10) {
3403 pic->mc_y_adr = y_adr;
3404 pic->mc_u_v_adr = y_adr +
3405 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3406 pic->mc_canvas_y = (pic->index << 1);
3407 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3408
3409 pic->dw_y_adr = pic->mc_y_adr;
3410 pic->dw_u_v_adr = pic->mc_u_v_adr;
3411 } else if (dw_mode) {
3412 pic->dw_y_adr = y_adr;
3413 pic->dw_u_v_adr = pic->dw_y_adr +
3414 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3415 }
3416
3417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3418 hevc_print(hevc, 0,
3419 "%s index %d BUF_index %d mc_y_adr %x\n",
3420 __func__, pic->index,
3421 pic->BUF_index, pic->mc_y_adr);
3422 if (hevc->mmu_enable &&
3423 dw_mode)
3424 hevc_print(hevc, 0,
3425 "mmu double write adr %ld\n",
3426 pic->cma_alloc_addr);
3427 }
3428 ret = 0;
3429
3430 return ret;
3431}
3432
3433static void init_pic_list(struct hevc_state_s *hevc)
3434{
3435 int i;
3436 int init_buf_num = get_work_pic_num(hevc);
3437 int dw_mode = get_double_write_mode(hevc);
3438 struct vdec_s *vdec = hw_to_vdec(hevc);
3439 /*alloc decoder buf will be delay if work on v4l. */
3440 if (!hevc->is_used_v4l) {
3441 for (i = 0; i < init_buf_num; i++) {
3442 if (alloc_buf(hevc) < 0) {
3443 if (i <= 8) {
3444 /*if alloced (i+1)>=9
3445 don't send errors.*/
3446 hevc->fatal_error |=
3447 DECODER_FATAL_ERROR_NO_MEM;
3448 }
3449 break;
3450 }
3451 }
3452 }
3453
3454 for (i = 0; i < init_buf_num; i++) {
3455 struct PIC_s *pic = hevc->m_PIC[i];
3456
3457 if (!pic) {
3458 pic = vmalloc(sizeof(struct PIC_s));
3459 if (pic == NULL) {
3460 hevc_print(hevc, 0,
3461 "%s: alloc pic %d fail!!!\n",
3462 __func__, i);
3463 break;
3464 }
3465 hevc->m_PIC[i] = pic;
3466 }
3467 memset(pic, 0, sizeof(struct PIC_s));
3468
3469 pic->index = i;
3470 pic->BUF_index = -1;
3471 pic->mv_buf_index = -1;
3472 if (vdec->parallel_dec == 1) {
3473 pic->y_canvas_index = -1;
3474 pic->uv_canvas_index = -1;
3475 }
3476
3477 pic->width = hevc->pic_w;
3478 pic->height = hevc->pic_h;
3479 pic->double_write_mode = dw_mode;
3480
3481 /*config canvas will be delay if work on v4l. */
3482 if (!hevc->is_used_v4l) {
3483 if (config_pic(hevc, pic) < 0) {
3484 if (get_dbg_flag(hevc))
3485 hevc_print(hevc, 0,
3486 "Config_pic %d fail\n", pic->index);
3487 pic->index = -1;
3488 i++;
3489 break;
3490 }
3491
3492 if (pic->double_write_mode)
3493 set_canvas(hevc, pic);
3494 }
3495 }
3496
3497 for (; i < MAX_REF_PIC_NUM; i++) {
3498 struct PIC_s *pic = hevc->m_PIC[i];
3499
3500 if (!pic) {
3501 pic = vmalloc(sizeof(struct PIC_s));
3502 if (pic == NULL) {
3503 hevc_print(hevc, 0,
3504 "%s: alloc pic %d fail!!!\n",
3505 __func__, i);
3506 break;
3507 }
3508 hevc->m_PIC[i] = pic;
3509 }
3510 memset(pic, 0, sizeof(struct PIC_s));
3511
3512 pic->index = -1;
3513 pic->BUF_index = -1;
3514 if (vdec->parallel_dec == 1) {
3515 pic->y_canvas_index = -1;
3516 pic->uv_canvas_index = -1;
3517 }
3518 }
3519
3520}
3521
3522static void uninit_pic_list(struct hevc_state_s *hevc)
3523{
3524 struct vdec_s *vdec = hw_to_vdec(hevc);
3525 int i;
3526#ifndef MV_USE_FIXED_BUF
3527 dealloc_mv_bufs(hevc);
3528#endif
3529 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3530 struct PIC_s *pic = hevc->m_PIC[i];
3531
3532 if (pic) {
3533 if (vdec->parallel_dec == 1) {
3534 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3535 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3536 }
3537 release_aux_data(hevc, pic);
3538 vfree(pic);
3539 hevc->m_PIC[i] = NULL;
3540 }
3541 }
3542}
3543
3544#ifdef LOSLESS_COMPRESS_MODE
3545static void init_decode_head_hw(struct hevc_state_s *hevc)
3546{
3547
3548 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3549 unsigned int data32;
3550
3551 int losless_comp_header_size =
3552 compute_losless_comp_header_size(hevc->pic_w,
3553 hevc->pic_h);
3554 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3555 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3556
3557 hevc->losless_comp_body_size = losless_comp_body_size;
3558
3559
3560 if (hevc->mmu_enable) {
3561 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3562 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3563 } else {
3564 if (hevc->mem_saving_mode == 1)
3565 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3566 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3567 else
3568 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3569 ((workaround_enable & 2) ? 1 : 0));
3570 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3571 /*
3572 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3573 * //8-bit mode
3574 */
3575 }
3576 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3577 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3578 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3579
3580 if (hevc->mmu_enable) {
3581 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3582 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3583 buf_spec->mmu_vbh.buf_start +
3584 buf_spec->mmu_vbh.buf_size/2);
3585 data32 = READ_VREG(HEVC_SAO_CTRL9);
3586 data32 |= 0x1;
3587 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3588
3589 /* use HEVC_CM_HEADER_START_ADDR */
3590 data32 = READ_VREG(HEVC_SAO_CTRL5);
3591 data32 |= (1<<10);
3592 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3593 }
3594
3595 if (!hevc->m_ins_flag)
3596 hevc_print(hevc, 0,
3597 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3598 __func__, hevc->pic_w, hevc->pic_h,
3599 losless_comp_body_size, losless_comp_header_size);
3600
3601}
3602#endif
3603#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3604
3605static void init_pic_list_hw(struct hevc_state_s *hevc)
3606{
3607 int i;
3608 int cur_pic_num = MAX_REF_PIC_NUM;
3609 int dw_mode = get_double_write_mode(hevc);
3610 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3611 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3612 (0x1 << 1) | (0x1 << 2));
3613 else
3614 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3615
3616 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3617 if (hevc->m_PIC[i] == NULL ||
3618 hevc->m_PIC[i]->index == -1) {
3619 cur_pic_num = i;
3620 break;
3621 }
3622 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3623 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3624 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3625 hevc->m_PIC[i]->header_adr>>5);
3626 else
3627 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3628 hevc->m_PIC[i]->mc_y_adr >> 5);
3629 } else
3630 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3631 hevc->m_PIC[i]->mc_y_adr |
3632 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3633 if (dw_mode & 0x10) {
3634 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3635 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3636 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3637 }
3638 else
3639 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3640 hevc->m_PIC[i]->mc_u_v_adr |
3641 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3642 | 0x1);
3643 }
3644 }
3645 if (cur_pic_num == 0)
3646 return;
3647 for (; i < MAX_REF_PIC_NUM; i++) {
3648 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3649 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3650 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3651 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3652 else
3653 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3654 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3655#ifndef LOSLESS_COMPRESS_MODE
3656 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3657 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3658#endif
3659 } else {
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3661 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3662 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3663 | 0x1);
3664#ifndef LOSLESS_COMPRESS_MODE
3665 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3666 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3667 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3668 | 0x1);
3669#endif
3670 }
3671 }
3672
3673 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3674
3675 /* Zero out canvas registers in IPP -- avoid simulation X */
3676 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3677 (0 << 8) | (0 << 1) | 1);
3678 for (i = 0; i < 32; i++)
3679 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3680
3681#ifdef LOSLESS_COMPRESS_MODE
3682 if ((dw_mode & 0x10) == 0)
3683 init_decode_head_hw(hevc);
3684#endif
3685
3686}
3687
3688
3689static void dump_pic_list(struct hevc_state_s *hevc)
3690{
3691 int i;
3692 struct PIC_s *pic;
3693
3694 hevc_print(hevc, 0,
3695 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3696 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3697 pic = hevc->m_PIC[i];
3698 if (pic == NULL || pic->index == -1)
3699 continue;
3700 hevc_print_cont(hevc, 0,
3701 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3702 pic->index, pic->BUF_index,
3703#ifndef MV_USE_FIXED_BUF
3704 pic->mv_buf_index,
3705#else
3706 -1,
3707#endif
3708 pic->decode_idx, pic->POC, pic->referenced);
3709 hevc_print_cont(hevc, 0,
3710 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3711 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3712 pic->width, pic->height);
3713 hevc_print_cont(hevc, 0,
3714 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3715 pic->output_ready, pic->mpred_mv_wr_start_addr,
3716 pic->vf_ref);
3717 }
3718}
3719
3720static void clear_referenced_flag(struct hevc_state_s *hevc)
3721{
3722 int i;
3723 struct PIC_s *pic;
3724 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3725 pic = hevc->m_PIC[i];
3726 if (pic == NULL || pic->index == -1)
3727 continue;
3728 if (pic->referenced) {
3729 pic->referenced = 0;
3730 put_mv_buf(hevc, pic);
3731 }
3732 }
3733}
3734
3735static void clear_poc_flag(struct hevc_state_s *hevc)
3736{
3737 int i;
3738 struct PIC_s *pic;
3739 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3740 pic = hevc->m_PIC[i];
3741 if (pic == NULL || pic->index == -1)
3742 continue;
3743 pic->POC = INVALID_POC;
3744 }
3745}
3746
3747static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3748 unsigned char flush_flag)
3749{
3750 int num_pic_not_yet_display = 0;
3751 int i;
3752 struct PIC_s *pic;
3753 struct PIC_s *pic_display = NULL;
3754 struct vdec_s *vdec = hw_to_vdec(hevc);
3755
3756 if (hevc->i_only & 0x4) {
3757 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3758 pic = hevc->m_PIC[i];
3759 if (pic == NULL ||
3760 (pic->index == -1) ||
3761 (pic->BUF_index == -1) ||
3762 (pic->POC == INVALID_POC))
3763 continue;
3764 if (pic->output_mark) {
3765 if (pic_display) {
3766 if (pic->decode_idx <
3767 pic_display->decode_idx)
3768 pic_display = pic;
3769
3770 } else
3771 pic_display = pic;
3772
3773 }
3774 }
3775 if (pic_display) {
3776 pic_display->output_mark = 0;
3777 pic_display->recon_mark = 0;
3778 pic_display->output_ready = 1;
3779 pic_display->referenced = 0;
3780 put_mv_buf(hevc, pic_display);
3781 }
3782 } else {
3783 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3784 pic = hevc->m_PIC[i];
3785 if (pic == NULL ||
3786 (pic->index == -1) ||
3787 (pic->BUF_index == -1) ||
3788 (pic->POC == INVALID_POC))
3789 continue;
3790 if (pic->output_mark)
3791 num_pic_not_yet_display++;
3792 if (pic->slice_type == 2 &&
3793 hevc->vf_pre_count == 0 &&
3794 fast_output_enable & 0x1) {
3795 /*fast output for first I picture*/
3796 pic->num_reorder_pic = 0;
3797 if (vdec->master || vdec->slave)
3798 pic_display = pic;
3799 hevc_print(hevc, 0, "VH265: output first frame\n");
3800 }
3801 }
3802
3803 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3804 pic = hevc->m_PIC[i];
3805 if (pic == NULL ||
3806 (pic->index == -1) ||
3807 (pic->BUF_index == -1) ||
3808 (pic->POC == INVALID_POC))
3809 continue;
3810 if (pic->output_mark) {
3811 if (pic_display) {
3812 if (pic->POC < pic_display->POC)
3813 pic_display = pic;
3814 else if ((pic->POC == pic_display->POC)
3815 && (pic->decode_idx <
3816 pic_display->
3817 decode_idx))
3818 pic_display
3819 = pic;
3820 } else
3821 pic_display = pic;
3822 }
3823 }
3824 if (pic_display) {
3825 if ((num_pic_not_yet_display >
3826 pic_display->num_reorder_pic)
3827 || flush_flag) {
3828 pic_display->output_mark = 0;
3829 pic_display->recon_mark = 0;
3830 pic_display->output_ready = 1;
3831 } else if (num_pic_not_yet_display >=
3832 (MAX_REF_PIC_NUM - 1)) {
3833 pic_display->output_mark = 0;
3834 pic_display->recon_mark = 0;
3835 pic_display->output_ready = 1;
3836 hevc_print(hevc, 0,
3837 "Warning, num_reorder_pic %d is byeond buf num\n",
3838 pic_display->num_reorder_pic);
3839 } else
3840 pic_display = NULL;
3841 }
3842 }
3843
3844 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3845 pic_display = NULL;
3846 hevc->first_pic_flag = 0;
3847 }
3848 return pic_display;
3849}
3850
3851static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3852{
3853 int i;
3854 struct PIC_s *pic;
3855
3856 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3857 hevc_print(hevc, 0,
3858 "config_mc_buffer entered .....\n");
3859 if (cur_pic->slice_type != 2) { /* P and B pic */
3860 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3861 (0 << 8) | (0 << 1) | 1);
3862 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3863 pic =
3864 get_ref_pic_by_POC(hevc,
3865 cur_pic->
3866 m_aiRefPOCList0[cur_pic->
3867 slice_idx][i]);
3868 if (pic) {
3869 if ((pic->width != hevc->pic_w) ||
3870 (pic->height != hevc->pic_h)) {
3871 hevc_print(hevc, 0,
3872 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3873 __func__, pic->POC,
3874 pic->width, pic->height);
3875 cur_pic->error_mark = 1;
3876 }
3877 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3878 cur_pic->error_mark = 1;
3879 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3880 (pic->mc_canvas_u_v << 16)
3881 | (pic->mc_canvas_u_v
3882 << 8) |
3883 pic->mc_canvas_y);
3884 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3885 hevc_print_cont(hevc, 0,
3886 "refid %x mc_canvas_u_v %x",
3887 i, pic->mc_canvas_u_v);
3888 hevc_print_cont(hevc, 0,
3889 " mc_canvas_y %x\n",
3890 pic->mc_canvas_y);
3891 }
3892 } else
3893 cur_pic->error_mark = 1;
3894
3895 if (pic == NULL || pic->error_mark) {
3896 hevc_print(hevc, 0,
3897 "Error %s, %dth poc (%d) %s",
3898 __func__, i,
3899 cur_pic->m_aiRefPOCList0[cur_pic->
3900 slice_idx][i],
3901 pic ? "has error" :
3902 "not in list0");
3903 }
3904 }
3905 }
3906 if (cur_pic->slice_type == 0) { /* B pic */
3907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3908 hevc_print(hevc, 0,
3909 "config_mc_buffer RefNum_L1\n");
3910 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3911 (16 << 8) | (0 << 1) | 1);
3912
3913 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3914 pic =
3915 get_ref_pic_by_POC(hevc,
3916 cur_pic->
3917 m_aiRefPOCList1[cur_pic->
3918 slice_idx][i]);
3919 if (pic) {
3920 if ((pic->width != hevc->pic_w) ||
3921 (pic->height != hevc->pic_h)) {
3922 hevc_print(hevc, 0,
3923 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3924 __func__, pic->POC,
3925 pic->width, pic->height);
3926 cur_pic->error_mark = 1;
3927 }
3928
3929 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3930 cur_pic->error_mark = 1;
3931 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3932 (pic->mc_canvas_u_v << 16)
3933 | (pic->mc_canvas_u_v
3934 << 8) |
3935 pic->mc_canvas_y);
3936 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3937 hevc_print_cont(hevc, 0,
3938 "refid %x mc_canvas_u_v %x",
3939 i, pic->mc_canvas_u_v);
3940 hevc_print_cont(hevc, 0,
3941 " mc_canvas_y %x\n",
3942 pic->mc_canvas_y);
3943 }
3944 } else
3945 cur_pic->error_mark = 1;
3946
3947 if (pic == NULL || pic->error_mark) {
3948 hevc_print(hevc, 0,
3949 "Error %s, %dth poc (%d) %s",
3950 __func__, i,
3951 cur_pic->m_aiRefPOCList1[cur_pic->
3952 slice_idx][i],
3953 pic ? "has error" :
3954 "not in list1");
3955 }
3956 }
3957 }
3958 return 0;
3959}
3960
3961static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3962 union param_u *params)
3963{
3964 int ii, i;
3965 int poc_tmp;
3966 struct PIC_s *pic;
3967 unsigned char is_referenced;
3968 /* hevc_print(hevc, 0,
3969 "%s cur_poc %d\n", __func__, cur_poc); */
3970 if (pic_list_debug & 0x2) {
3971 pr_err("cur poc %d\n", cur_poc);
3972 }
3973 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3974 pic = hevc->m_PIC[ii];
3975 if (pic == NULL ||
3976 pic->index == -1 ||
3977 pic->BUF_index == -1
3978 )
3979 continue;
3980
3981 if ((pic->referenced == 0 || pic->POC == cur_poc))
3982 continue;
3983 is_referenced = 0;
3984 for (i = 0; i < 16; i++) {
3985 int delt;
3986
3987 if (params->p.CUR_RPS[i] & 0x8000)
3988 break;
3989 delt =
3990 params->p.CUR_RPS[i] &
3991 ((1 << (RPS_USED_BIT - 1)) - 1);
3992 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3993 poc_tmp =
3994 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3995 delt);
3996 } else
3997 poc_tmp = cur_poc + delt;
3998 if (poc_tmp == pic->POC) {
3999 is_referenced = 1;
4000 /* hevc_print(hevc, 0, "i is %d\n", i); */
4001 break;
4002 }
4003 }
4004 if (is_referenced == 0) {
4005 pic->referenced = 0;
4006 put_mv_buf(hevc, pic);
4007 /* hevc_print(hevc, 0,
4008 "set poc %d reference to 0\n", pic->POC); */
4009 if (pic_list_debug & 0x2) {
4010 pr_err("set poc %d reference to 0\n", pic->POC);
4011 }
4012 }
4013 }
4014
4015}
4016
4017static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4018{
4019 struct PIC_s *pic = hevc->cur_pic;
4020 int i, rIdx;
4021 int num_neg = 0;
4022 int num_pos = 0;
4023 int total_num;
4024 int num_ref_idx_l0_active =
4025 (params->p.num_ref_idx_l0_active >
4026 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4027 params->p.num_ref_idx_l0_active;
4028 int num_ref_idx_l1_active =
4029 (params->p.num_ref_idx_l1_active >
4030 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4031 params->p.num_ref_idx_l1_active;
4032
4033 int RefPicSetStCurr0[16];
4034 int RefPicSetStCurr1[16];
4035
4036 for (i = 0; i < 16; i++) {
4037 RefPicSetStCurr0[i] = 0;
4038 RefPicSetStCurr1[i] = 0;
4039 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4040 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4041 }
4042 for (i = 0; i < 16; i++) {
4043 if (params->p.CUR_RPS[i] & 0x8000)
4044 break;
4045 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4046 int delt =
4047 params->p.CUR_RPS[i] &
4048 ((1 << (RPS_USED_BIT - 1)) - 1);
4049
4050 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4051 RefPicSetStCurr0[num_neg] =
4052 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4053 delt);
4054 /* hevc_print(hevc, 0,
4055 * "RefPicSetStCurr0 %x %x %x\n",
4056 * RefPicSetStCurr0[num_neg], pic->POC,
4057 * (0x800-(params[i]&0x7ff)));
4058 */
4059 num_neg++;
4060 } else {
4061 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4062 /* hevc_print(hevc, 0,
4063 * "RefPicSetStCurr1 %d\n",
4064 * RefPicSetStCurr1[num_pos]);
4065 */
4066 num_pos++;
4067 }
4068 }
4069 }
4070 total_num = num_neg + num_pos;
4071 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4072 hevc_print(hevc, 0,
4073 "%s: curpoc %d slice_type %d, total %d ",
4074 __func__, pic->POC, params->p.slice_type, total_num);
4075 hevc_print_cont(hevc, 0,
4076 "num_neg %d num_list0 %d num_list1 %d\n",
4077 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4078 }
4079
4080 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4081 hevc_print(hevc, 0,
4082 "HEVC Stream buf start ");
4083 hevc_print_cont(hevc, 0,
4084 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4085 READ_VREG(HEVC_STREAM_START_ADDR),
4086 READ_VREG(HEVC_STREAM_END_ADDR),
4087 READ_VREG(HEVC_STREAM_WR_PTR),
4088 READ_VREG(HEVC_STREAM_RD_PTR),
4089 READ_VREG(HEVC_STREAM_LEVEL),
4090 READ_VREG(HEVC_STREAM_FIFO_CTL),
4091 READ_VREG(HEVC_PARSER_INT_CONTROL));
4092 }
4093
4094 if (total_num > 0) {
4095 if (params->p.modification_flag & 0x1) {
4096 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4097 hevc_print(hevc, 0, "ref0 POC (modification):");
4098 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4099 int cIdx = params->p.modification_list[rIdx];
4100
4101 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4102 cIdx >=
4103 num_neg ? RefPicSetStCurr1[cIdx -
4104 num_neg] :
4105 RefPicSetStCurr0[cIdx];
4106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4107 hevc_print_cont(hevc, 0, "%d ",
4108 pic->m_aiRefPOCList0[pic->
4109 slice_idx]
4110 [rIdx]);
4111 }
4112 }
4113 } else {
4114 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4115 hevc_print(hevc, 0, "ref0 POC:");
4116 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4117 int cIdx = rIdx % total_num;
4118
4119 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4120 cIdx >=
4121 num_neg ? RefPicSetStCurr1[cIdx -
4122 num_neg] :
4123 RefPicSetStCurr0[cIdx];
4124 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4125 hevc_print_cont(hevc, 0, "%d ",
4126 pic->m_aiRefPOCList0[pic->
4127 slice_idx]
4128 [rIdx]);
4129 }
4130 }
4131 }
4132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4133 hevc_print_cont(hevc, 0, "\n");
4134 if (params->p.slice_type == B_SLICE) {
4135 if (params->p.modification_flag & 0x2) {
4136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4137 hevc_print(hevc, 0,
4138 "ref1 POC (modification):");
4139 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4140 rIdx++) {
4141 int cIdx;
4142
4143 if (params->p.modification_flag & 0x1) {
4144 cIdx =
4145 params->p.
4146 modification_list
4147 [num_ref_idx_l0_active +
4148 rIdx];
4149 } else {
4150 cIdx =
4151 params->p.
4152 modification_list[rIdx];
4153 }
4154 pic->m_aiRefPOCList1[pic->
4155 slice_idx][rIdx] =
4156 cIdx >=
4157 num_pos ?
4158 RefPicSetStCurr0[cIdx - num_pos]
4159 : RefPicSetStCurr1[cIdx];
4160 if (get_dbg_flag(hevc) &
4161 H265_DEBUG_BUFMGR) {
4162 hevc_print_cont(hevc, 0, "%d ",
4163 pic->
4164 m_aiRefPOCList1[pic->
4165 slice_idx]
4166 [rIdx]);
4167 }
4168 }
4169 } else {
4170 if (get_dbg_flag(hevc) &
4171 H265_DEBUG_BUFMGR)
4172 hevc_print(hevc, 0, "ref1 POC:");
4173 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4174 rIdx++) {
4175 int cIdx = rIdx % total_num;
4176
4177 pic->m_aiRefPOCList1[pic->
4178 slice_idx][rIdx] =
4179 cIdx >=
4180 num_pos ?
4181 RefPicSetStCurr0[cIdx -
4182 num_pos]
4183 : RefPicSetStCurr1[cIdx];
4184 if (get_dbg_flag(hevc) &
4185 H265_DEBUG_BUFMGR) {
4186 hevc_print_cont(hevc, 0, "%d ",
4187 pic->
4188 m_aiRefPOCList1[pic->
4189 slice_idx]
4190 [rIdx]);
4191 }
4192 }
4193 }
4194 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4195 hevc_print_cont(hevc, 0, "\n");
4196 }
4197 }
4198 /*set m_PIC */
4199 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4200 (params->p.slice_type == P_SLICE) ? 1 :
4201 (params->p.slice_type == B_SLICE) ? 0 : 3;
4202 pic->RefNum_L0 = num_ref_idx_l0_active;
4203 pic->RefNum_L1 = num_ref_idx_l1_active;
4204}
4205
4206static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4207 int pic_height_cu, int sao_mem_unit,
4208 union param_u *params)
4209{
4210 int i, j;
4211 int start_cu_x, start_cu_y;
4212 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4213 int sao_abv_size = sao_mem_unit * pic_width_cu;
4214#ifdef DETREFILL_ENABLE
4215 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4216 int tmpRefillLcuSize = 1 <<
4217 (params->p.log2_min_coding_block_size_minus3 +
4218 3 + params->p.log2_diff_max_min_coding_block_size);
4219 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4220 "%x, %x, %x, %x\n",
4221 params->p.slice_segment_address,
4222 params->p.bit_depth,
4223 params->p.tiles_enabled_flag,
4224 tmpRefillLcuSize);
4225 if (params->p.slice_segment_address == 0 &&
4226 params->p.bit_depth != 0 &&
4227 (params->p.tiles_enabled_flag & 1) &&
4228 tmpRefillLcuSize == 64)
4229 hevc->delrefill_check = 1;
4230 else
4231 hevc->delrefill_check = 0;
4232 }
4233#endif
4234
4235 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4236 if (params->p.tiles_enabled_flag & 1) {
4237 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4238 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4239
4240 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4241 || hevc->num_tile_row <= 0) {
4242 hevc->num_tile_row = 1;
4243 hevc_print(hevc, 0,
4244 "%s: num_tile_rows_minus1 (%d) error!!\n",
4245 __func__, params->p.num_tile_rows_minus1);
4246 }
4247 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4248 || hevc->num_tile_col <= 0) {
4249 hevc->num_tile_col = 1;
4250 hevc_print(hevc, 0,
4251 "%s: num_tile_columns_minus1 (%d) error!!\n",
4252 __func__, params->p.num_tile_columns_minus1);
4253 }
4254 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4255 hevc_print(hevc, 0,
4256 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4257 __func__, pic_width_cu, pic_height_cu);
4258 hevc_print_cont(hevc, 0,
4259 "num_tile_col %d num_tile_row %d:\n",
4260 hevc->num_tile_col, hevc->num_tile_row);
4261 }
4262
4263 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4264 int w = pic_width_cu / hevc->num_tile_col;
4265 int h = pic_height_cu / hevc->num_tile_row;
4266
4267 start_cu_y = 0;
4268 for (i = 0; i < hevc->num_tile_row; i++) {
4269 start_cu_x = 0;
4270 for (j = 0; j < hevc->num_tile_col; j++) {
4271 if (j == (hevc->num_tile_col - 1)) {
4272 hevc->m_tile[i][j].width =
4273 pic_width_cu -
4274 start_cu_x;
4275 } else
4276 hevc->m_tile[i][j].width = w;
4277 if (i == (hevc->num_tile_row - 1)) {
4278 hevc->m_tile[i][j].height =
4279 pic_height_cu -
4280 start_cu_y;
4281 } else
4282 hevc->m_tile[i][j].height = h;
4283 hevc->m_tile[i][j].start_cu_x
4284 = start_cu_x;
4285 hevc->m_tile[i][j].start_cu_y
4286 = start_cu_y;
4287 hevc->m_tile[i][j].sao_vb_start_addr =
4288 hevc->work_space_buf->sao_vb.
4289 buf_start + j * sao_vb_size;
4290 hevc->m_tile[i][j].sao_abv_start_addr =
4291 hevc->work_space_buf->sao_abv.
4292 buf_start + i * sao_abv_size;
4293 if (get_dbg_flag(hevc) &
4294 H265_DEBUG_BUFMGR) {
4295 hevc_print_cont(hevc, 0,
4296 "{y=%d, x=%d w %d h %d ",
4297 i, j, hevc->m_tile[i][j].width,
4298 hevc->m_tile[i][j].height);
4299 hevc_print_cont(hevc, 0,
4300 "start_x %d start_y %d ",
4301 hevc->m_tile[i][j].start_cu_x,
4302 hevc->m_tile[i][j].start_cu_y);
4303 hevc_print_cont(hevc, 0,
4304 "sao_vb_start 0x%x ",
4305 hevc->m_tile[i][j].
4306 sao_vb_start_addr);
4307 hevc_print_cont(hevc, 0,
4308 "sao_abv_start 0x%x}\n",
4309 hevc->m_tile[i][j].
4310 sao_abv_start_addr);
4311 }
4312 start_cu_x += hevc->m_tile[i][j].width;
4313
4314 }
4315 start_cu_y += hevc->m_tile[i][0].height;
4316 }
4317 } else {
4318 start_cu_y = 0;
4319 for (i = 0; i < hevc->num_tile_row; i++) {
4320 start_cu_x = 0;
4321 for (j = 0; j < hevc->num_tile_col; j++) {
4322 if (j == (hevc->num_tile_col - 1)) {
4323 hevc->m_tile[i][j].width =
4324 pic_width_cu -
4325 start_cu_x;
4326 } else {
4327 hevc->m_tile[i][j].width =
4328 params->p.tile_width[j];
4329 }
4330 if (i == (hevc->num_tile_row - 1)) {
4331 hevc->m_tile[i][j].height =
4332 pic_height_cu -
4333 start_cu_y;
4334 } else {
4335 hevc->m_tile[i][j].height =
4336 params->
4337 p.tile_height[i];
4338 }
4339 hevc->m_tile[i][j].start_cu_x
4340 = start_cu_x;
4341 hevc->m_tile[i][j].start_cu_y
4342 = start_cu_y;
4343 hevc->m_tile[i][j].sao_vb_start_addr =
4344 hevc->work_space_buf->sao_vb.
4345 buf_start + j * sao_vb_size;
4346 hevc->m_tile[i][j].sao_abv_start_addr =
4347 hevc->work_space_buf->sao_abv.
4348 buf_start + i * sao_abv_size;
4349 if (get_dbg_flag(hevc) &
4350 H265_DEBUG_BUFMGR) {
4351 hevc_print_cont(hevc, 0,
4352 "{y=%d, x=%d w %d h %d ",
4353 i, j, hevc->m_tile[i][j].width,
4354 hevc->m_tile[i][j].height);
4355 hevc_print_cont(hevc, 0,
4356 "start_x %d start_y %d ",
4357 hevc->m_tile[i][j].start_cu_x,
4358 hevc->m_tile[i][j].start_cu_y);
4359 hevc_print_cont(hevc, 0,
4360 "sao_vb_start 0x%x ",
4361 hevc->m_tile[i][j].
4362 sao_vb_start_addr);
4363 hevc_print_cont(hevc, 0,
4364 "sao_abv_start 0x%x}\n",
4365 hevc->m_tile[i][j].
4366 sao_abv_start_addr);
4367
4368 }
4369 start_cu_x += hevc->m_tile[i][j].width;
4370 }
4371 start_cu_y += hevc->m_tile[i][0].height;
4372 }
4373 }
4374 } else {
4375 hevc->num_tile_col = 1;
4376 hevc->num_tile_row = 1;
4377 hevc->m_tile[0][0].width = pic_width_cu;
4378 hevc->m_tile[0][0].height = pic_height_cu;
4379 hevc->m_tile[0][0].start_cu_x = 0;
4380 hevc->m_tile[0][0].start_cu_y = 0;
4381 hevc->m_tile[0][0].sao_vb_start_addr =
4382 hevc->work_space_buf->sao_vb.buf_start;
4383 hevc->m_tile[0][0].sao_abv_start_addr =
4384 hevc->work_space_buf->sao_abv.buf_start;
4385 }
4386}
4387
4388static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4389 int pic_width_lcu)
4390{
4391 int cu_x;
4392 int cu_y;
4393 int tile_x = 0;
4394 int tile_y = 0;
4395 int i;
4396
4397 if (pic_width_lcu == 0) {
4398 if (get_dbg_flag(hevc)) {
4399 hevc_print(hevc, 0,
4400 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4401 __func__, hevc->pic_w, hevc->pic_h);
4402 }
4403 return -1;
4404 }
4405 cu_x = cu_adr % pic_width_lcu;
4406 cu_y = cu_adr / pic_width_lcu;
4407 if (hevc->tile_enabled) {
4408 for (i = 0; i < hevc->num_tile_col; i++) {
4409 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4410 tile_x = i;
4411 else
4412 break;
4413 }
4414 for (i = 0; i < hevc->num_tile_row; i++) {
4415 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4416 tile_y = i;
4417 else
4418 break;
4419 }
4420 }
4421 return (tile_x) | (tile_y << 8);
4422}
4423
4424static void print_scratch_error(int error_num)
4425{
4426#if 0
4427 if (get_dbg_flag(hevc)) {
4428 hevc_print(hevc, 0,
4429 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4430 error_num);
4431 }
4432#endif
4433}
4434
4435static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4436{
4437 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4438
4439 if (get_dbg_flag(hevc))
4440 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4441 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4442 __func__,
4443 buf_spec->ipp.buf_start,
4444 buf_spec->start_adr,
4445 buf_spec->short_term_rps.buf_start,
4446 buf_spec->vps.buf_start,
4447 buf_spec->sps.buf_start,
4448 buf_spec->pps.buf_start,
4449 buf_spec->sao_up.buf_start,
4450 buf_spec->swap_buf.buf_start,
4451 buf_spec->swap_buf2.buf_start,
4452 buf_spec->scalelut.buf_start,
4453 buf_spec->dblk_para.buf_start,
4454 buf_spec->dblk_data.buf_start,
4455 buf_spec->dblk_data2.buf_start);
4456 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4457 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4458 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4459 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4460 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4461 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4462 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4463 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4464 if (hevc->mmu_enable) {
4465 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4466 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4467 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4468 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4469 } else
4470 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4471 } /*else
4472 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4473 buf_spec->swap_buf.buf_start);
4474 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4475 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4476 /* cfg_p_addr */
4477 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4478 /* cfg_d_addr */
4479 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4480
4481 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4482
4483 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4484}
4485
4486static void parser_cmd_write(void)
4487{
4488 u32 i;
4489 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4490 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4491 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4492 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4493 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4494 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4495 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4496 0x7C00
4497 };
4498 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4499 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4500}
4501
4502static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4503 int decode_pic_begin, int decode_pic_num)
4504{
4505 unsigned int data32;
4506 int i;
4507#if 0
4508 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4509 /* Set MCR fetch priorities*/
4510 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4511 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4512 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4513 }
4514#endif
4515#if 1
4516 /* m8baby test1902 */
4517 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4518 hevc_print(hevc, 0,
4519 "%s\n", __func__);
4520 data32 = READ_VREG(HEVC_PARSER_VERSION);
4521 if (data32 != 0x00010001) {
4522 print_scratch_error(25);
4523 return;
4524 }
4525 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4526 data32 = READ_VREG(HEVC_PARSER_VERSION);
4527 if (data32 != 0x5a5a55aa) {
4528 print_scratch_error(26);
4529 return;
4530 }
4531#if 0
4532 /* test Parser Reset */
4533 /* reset iqit to start mem init again */
4534 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4535 (1 << 3) /* reset_whole parser */
4536 );
4537 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4538 data32 = READ_VREG(HEVC_PARSER_VERSION);
4539 if (data32 != 0x00010001)
4540 hevc_print(hevc, 0,
4541 "Test Parser Fatal Error\n");
4542#endif
4543 /* reset iqit to start mem init again */
4544 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4545 );
4546 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4547 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4548
4549#endif
4550 if (!hevc->m_ins_flag) {
4551 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4552 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4553 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4554 data32 |= (0xf << 25); /*arwlen_axi_max*/
4555 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4556 }
4557 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4558 if (data32 != 0x00000100) {
4559 print_scratch_error(29);
4560 return;
4561 }
4562 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4563 if (data32 != 0x00000300) {
4564 print_scratch_error(30);
4565 return;
4566 }
4567 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4568 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4569 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4570 if (data32 != 0x12345678) {
4571 print_scratch_error(31);
4572 return;
4573 }
4574 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4575 if (data32 != 0x9abcdef0) {
4576 print_scratch_error(32);
4577 return;
4578 }
4579 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4580 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4581
4582 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4583 data32 &= 0x03ffffff;
4584 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4585 | /* stream_buffer_empty_int_amrisc_enable */
4586 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4587 (1 << 7) | /* dec_done_int_cpu_enable */
4588 (1 << 4) | /* startcode_found_int_cpu_enable */
4589 (0 << 3) | /* startcode_found_int_amrisc_enable */
4590 (1 << 0) /* parser_int_enable */
4591 ;
4592 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4593
4594 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4595 data32 = data32 | (1 << 1) | /* emulation_check_on */
4596 (1 << 0) /* startcode_check_on */
4597 ;
4598 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4599
4600 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4601 (2 << 4) | /* emulate_code_length_sub_1 */
4602 (2 << 1) | /* start_code_length_sub_1 */
4603 (1 << 0) /* stream_shift_enable */
4604 );
4605
4606 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4607 );
4608 /* hevc_parser_core_clk_en */
4609 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4610 );
4611
4612 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4613
4614 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4615 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4616 for (i = 0; i < 1024; i++)
4617 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4618
4619#ifdef ENABLE_SWAP_TEST
4620 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4621#endif
4622
4623 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4624 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4625 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4626 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4627 /* Send parser_cmd */
4628 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4629
4630 parser_cmd_write();
4631
4632 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4633 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4634 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4635
4636 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4637 /* (1 << 8) | // sao_sw_pred_enable */
4638 (1 << 5) | /* parser_sao_if_en */
4639 (1 << 2) | /* parser_mpred_if_en */
4640 (1 << 0) /* parser_scaler_if_en */
4641 );
4642
4643 /* Changed to Start MPRED in microcode */
4644 /*
4645 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4646 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4647 * (1<<31)
4648 * );
4649 */
4650
4651 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4652 (1 << 0) /* software reset ipp and mpp */
4653 );
4654 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4655 (0 << 0) /* software reset ipp and mpp */
4656 );
4657
4658 if (get_double_write_mode(hevc) & 0x10)
4659 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4660 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4661 );
4662
4663}
4664
4665static void decoder_hw_reset(void)
4666{
4667 int i;
4668 unsigned int data32;
4669 /* reset iqit to start mem init again */
4670 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4671 );
4672 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4673 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4674
4675 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4676 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4677 ;
4678 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4679
4680 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4681 if (data32 != 0x00000100) {
4682 print_scratch_error(29);
4683 return;
4684 }
4685 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4686 if (data32 != 0x00000300) {
4687 print_scratch_error(30);
4688 return;
4689 }
4690 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4691 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4692 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4693 if (data32 != 0x12345678) {
4694 print_scratch_error(31);
4695 return;
4696 }
4697 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4698 if (data32 != 0x9abcdef0) {
4699 print_scratch_error(32);
4700 return;
4701 }
4702 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4703 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4704
4705 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4706 data32 &= 0x03ffffff;
4707 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4708 | /* stream_buffer_empty_int_amrisc_enable */
4709 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4710 (1 << 7) | /* dec_done_int_cpu_enable */
4711 (1 << 4) | /* startcode_found_int_cpu_enable */
4712 (0 << 3) | /* startcode_found_int_amrisc_enable */
4713 (1 << 0) /* parser_int_enable */
4714 ;
4715 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4716
4717 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4718 data32 = data32 | (1 << 1) | /* emulation_check_on */
4719 (1 << 0) /* startcode_check_on */
4720 ;
4721 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4722
4723 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4724 (2 << 4) | /* emulate_code_length_sub_1 */
4725 (2 << 1) | /* start_code_length_sub_1 */
4726 (1 << 0) /* stream_shift_enable */
4727 );
4728
4729 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4730 );
4731 /* hevc_parser_core_clk_en */
4732 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4733 );
4734
4735 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4736 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4737 for (i = 0; i < 1024; i++)
4738 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4739
4740 /* Send parser_cmd */
4741 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4742
4743 parser_cmd_write();
4744
4745 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4746 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4747 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4748
4749 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4750 /* (1 << 8) | // sao_sw_pred_enable */
4751 (1 << 5) | /* parser_sao_if_en */
4752 (1 << 2) | /* parser_mpred_if_en */
4753 (1 << 0) /* parser_scaler_if_en */
4754 );
4755
4756 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4757 (1 << 0) /* software reset ipp and mpp */
4758 );
4759 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4760 (0 << 0) /* software reset ipp and mpp */
4761 );
4762}
4763
4764#ifdef CONFIG_HEVC_CLK_FORCED_ON
4765static void config_hevc_clk_forced_on(void)
4766{
4767 unsigned int rdata32;
4768 /* IQIT */
4769 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4770 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4771
4772 /* DBLK */
4773 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4774 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4775
4776 /* SAO */
4777 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4778 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4779
4780 /* MPRED */
4781 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4782 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4783
4784 /* PARSER */
4785 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4786 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4787 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4788 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4789 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4790 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4791 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4792 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4793 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4794 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4795 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4796 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4797 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4798
4799 /* IPP */
4800 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4801 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4802
4803 /* MCRCC */
4804 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4805 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4806}
4807#endif
4808
4809#ifdef MCRCC_ENABLE
4810static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4811{
4812 unsigned int rdata32;
4813 unsigned int rdata32_2;
4814 int l0_cnt = 0;
4815 int l1_cnt = 0x7fff;
4816
4817 if (get_double_write_mode(hevc) & 0x10) {
4818 l0_cnt = hevc->cur_pic->RefNum_L0;
4819 l1_cnt = hevc->cur_pic->RefNum_L1;
4820 }
4821
4822 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4823
4824 if (slice_type == 2) { /* I-PIC */
4825 /* remove reset -- disables clock */
4826 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4827 return;
4828 }
4829
4830 if (slice_type == 0) { /* B-PIC */
4831 /* Programme canvas0 */
4832 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4833 (0 << 8) | (0 << 1) | 0);
4834 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4835 rdata32 = rdata32 & 0xffff;
4836 rdata32 = rdata32 | (rdata32 << 16);
4837 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4838
4839 /* Programme canvas1 */
4840 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4841 (16 << 8) | (1 << 1) | 0);
4842 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4843 rdata32_2 = rdata32_2 & 0xffff;
4844 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4845 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4846 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4847 rdata32_2 = rdata32_2 & 0xffff;
4848 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4849 }
4850 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4851 } else { /* P-PIC */
4852 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4853 (0 << 8) | (1 << 1) | 0);
4854 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4855 rdata32 = rdata32 & 0xffff;
4856 rdata32 = rdata32 | (rdata32 << 16);
4857 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4858
4859 if (l0_cnt == 1) {
4860 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4861 } else {
4862 /* Programme canvas1 */
4863 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4864 rdata32 = rdata32 & 0xffff;
4865 rdata32 = rdata32 | (rdata32 << 16);
4866 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4867 }
4868 }
4869 /* enable mcrcc progressive-mode */
4870 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4871}
4872#endif
4873
4874static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4875 int sao_mem_unit)
4876{
4877 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4878 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4879 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4880 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4881}
4882
4883static u32 init_aux_size;
4884static int aux_data_is_avaible(struct hevc_state_s *hevc)
4885{
4886 u32 reg_val;
4887
4888 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4889 if (reg_val != 0 && reg_val != init_aux_size)
4890 return 1;
4891 else
4892 return 0;
4893}
4894
4895static void config_aux_buf(struct hevc_state_s *hevc)
4896{
4897 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4898 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4899 (hevc->suffix_aux_size >> 4);
4900 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4901}
4902
4903static void config_mpred_hw(struct hevc_state_s *hevc)
4904{
4905 int i;
4906 unsigned int data32;
4907 struct PIC_s *cur_pic = hevc->cur_pic;
4908 struct PIC_s *col_pic = hevc->col_pic;
4909 int AMVP_MAX_NUM_CANDS_MEM = 3;
4910 int AMVP_MAX_NUM_CANDS = 2;
4911 int NUM_CHROMA_MODE = 5;
4912 int DM_CHROMA_IDX = 36;
4913 int above_ptr_ctrl = 0;
4914 int buffer_linear = 1;
4915 int cu_size_log2 = 3;
4916
4917 int mpred_mv_rd_start_addr;
4918 int mpred_curr_lcu_x;
4919 int mpred_curr_lcu_y;
4920 int mpred_above_buf_start;
4921 int mpred_mv_rd_ptr;
4922 int mpred_mv_rd_ptr_p1;
4923 int mpred_mv_rd_end_addr;
4924 int MV_MEM_UNIT;
4925 int mpred_mv_wr_ptr;
4926 int *ref_poc_L0, *ref_poc_L1;
4927
4928 int above_en;
4929 int mv_wr_en;
4930 int mv_rd_en;
4931 int col_isIntra;
4932
4933 if (hevc->slice_type != 2) {
4934 above_en = 1;
4935 mv_wr_en = 1;
4936 mv_rd_en = 1;
4937 col_isIntra = 0;
4938 } else {
4939 above_en = 1;
4940 mv_wr_en = 1;
4941 mv_rd_en = 0;
4942 col_isIntra = 0;
4943 }
4944
4945 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4946 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4947 mpred_curr_lcu_x = data32 & 0xffff;
4948 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4949
4950 MV_MEM_UNIT =
4951 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4952 5 ? 0x80 : 0x20;
4953 mpred_mv_rd_ptr =
4954 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4955
4956 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4957 mpred_mv_rd_end_addr =
4958 mpred_mv_rd_start_addr +
4959 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4960
4961 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4962
4963 mpred_mv_wr_ptr =
4964 cur_pic->mpred_mv_wr_start_addr +
4965 (hevc->slice_addr * MV_MEM_UNIT);
4966
4967 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4968 hevc_print(hevc, 0,
4969 "cur pic index %d col pic index %d\n", cur_pic->index,
4970 col_pic->index);
4971 }
4972
4973 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4974 cur_pic->mpred_mv_wr_start_addr);
4975 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4976
4977 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4978 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4979 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4980
4981 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4982 data32 = (hevc->slice_type |
4983 hevc->new_pic << 2 |
4984 hevc->new_tile << 3 |
4985 hevc->isNextSliceSegment << 4 |
4986 hevc->TMVPFlag << 5 |
4987 hevc->LDCFlag << 6 |
4988 hevc->ColFromL0Flag << 7 |
4989 above_ptr_ctrl << 8 |
4990 above_en << 9 |
4991 mv_wr_en << 10 |
4992 mv_rd_en << 11 |
4993 col_isIntra << 12 |
4994 buffer_linear << 13 |
4995 hevc->LongTerm_Curr << 14 |
4996 hevc->LongTerm_Col << 15 |
4997 hevc->lcu_size_log2 << 16 |
4998 cu_size_log2 << 20 | hevc->plevel << 24);
4999 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5000
5001 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5002 data32 = (
5003#if 0
5004 /* no set in m8baby test1902 */
5005 /* Don't override clk_forced_on , */
5006 (data32 & (0x1 << 24)) |
5007#endif
5008 hevc->MaxNumMergeCand |
5009 AMVP_MAX_NUM_CANDS << 4 |
5010 AMVP_MAX_NUM_CANDS_MEM << 8 |
5011 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5012 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5013
5014 data32 = (hevc->pic_w | hevc->pic_h << 16);
5015 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5016
5017 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5018 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5019
5020 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5021 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5022
5023 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5024 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5025
5026 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5027 /* col_RefNum_L0<<16| */
5028 /* col_RefNum_L1<<24 */
5029 );
5030 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5031
5032 data32 = (hevc->LongTerm_Ref);
5033 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5034
5035 data32 = 0;
5036 for (i = 0; i < hevc->RefNum_L0; i++)
5037 data32 = data32 | (1 << i);
5038 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5039
5040 data32 = 0;
5041 for (i = 0; i < hevc->RefNum_L1; i++)
5042 data32 = data32 | (1 << i);
5043 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5044
5045 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5046 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5047
5048 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5049 * Ref_POC_xx_L1 in pair write order!!!
5050 */
5051 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5052 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5053
5054 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5055 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5056
5057 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5058 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5059
5060 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5061 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5062
5063 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5064 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5065
5066 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5067 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5068
5069 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5070 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5071
5072 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5073 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5074
5075 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5076 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5077
5078 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5079 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5080
5081 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5082 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5083
5084 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5085 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5086
5087 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5088 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5089
5090 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5091 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5092
5093 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5094 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5095
5096 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5097 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5098
5099 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5100 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5101
5102 if (hevc->new_pic) {
5103 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5104 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5105 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5106 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5107 } else if (!hevc->isNextSliceSegment) {
5108 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5109 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5110 }
5111
5112 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5113}
5114
5115static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5116{
5117 unsigned int data32, data32_2;
5118 int misc_flag0 = hevc->misc_flag0;
5119 int slice_deblocking_filter_disabled_flag = 0;
5120
5121 int mc_buffer_size_u_v =
5122 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5123 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5124 struct PIC_s *cur_pic = hevc->cur_pic;
5125 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5126
5127 data32 = READ_VREG(HEVC_SAO_CTRL0);
5128 data32 &= (~0xf);
5129 data32 |= hevc->lcu_size_log2;
5130 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5131
5132 data32 = (hevc->pic_w | hevc->pic_h << 16);
5133 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5134
5135 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5136 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5137
5138 if (hevc->new_pic)
5139 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5140#ifdef LOSLESS_COMPRESS_MODE
5141/*SUPPORT_10BIT*/
5142 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5143 data32 = READ_VREG(HEVC_SAO_CTRL5);
5144 data32 &= (~(0xff << 16));
5145
5146 if (get_double_write_mode(hevc) == 2 ||
5147 get_double_write_mode(hevc) == 3)
5148 data32 |= (0xff<<16);
5149 else if (get_double_write_mode(hevc) == 4)
5150 data32 |= (0x33<<16);
5151
5152 if (hevc->mem_saving_mode == 1)
5153 data32 |= (1 << 9);
5154 else
5155 data32 &= ~(1 << 9);
5156 if (workaround_enable & 1)
5157 data32 |= (1 << 7);
5158 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5159 }
5160 data32 = cur_pic->mc_y_adr;
5161 if (get_double_write_mode(hevc))
5162 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5163
5164 if ((get_double_write_mode(hevc) & 0x10) == 0)
5165 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5166
5167 if (hevc->mmu_enable)
5168 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5169#else
5170 data32 = cur_pic->mc_y_adr;
5171 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5172#endif
5173 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5174 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5175
5176#ifdef LOSLESS_COMPRESS_MODE
5177/*SUPPORT_10BIT*/
5178 if (get_double_write_mode(hevc))
5179 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5180#else
5181 data32 = cur_pic->mc_u_v_adr;
5182 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5183#endif
5184 data32 = (mc_buffer_size_u_v_h << 16);
5185 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5186
5187#ifdef LOSLESS_COMPRESS_MODE
5188/*SUPPORT_10BIT*/
5189 if (get_double_write_mode(hevc)) {
5190 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5191 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5192 }
5193#else
5194 /* multi tile to do... */
5195 data32 = cur_pic->mc_y_adr;
5196 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5197
5198 data32 = cur_pic->mc_u_v_adr;
5199 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5200#endif
5201 /* DBLK CONFIG HERE */
5202 if (hevc->new_pic) {
5203 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5204 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5205 data32 = (0xff << 8) | (0x0 << 0);
5206 else
5207 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5208 (0x0 << 0); /* h265 video format*/
5209
5210 if (hevc->pic_w >= 1280)
5211 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5212 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5213 if (get_double_write_mode(hevc) == 0)
5214 data32 |= (0x1 << 8); /*enable first write*/
5215 else if (get_double_write_mode(hevc) == 0x10)
5216 data32 |= (0x1 << 9); /*double write only*/
5217 else
5218 data32 |= ((0x1 << 8) |(0x1 << 9));
5219
5220 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5221 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5222 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5223 }
5224 data32 = (hevc->pic_w | hevc->pic_h << 16);
5225 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5226
5227 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5228 data32 =
5229 ((misc_flag0 >>
5230 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5231 0x1) << 3;
5232 } else
5233 data32 = 0;
5234 data32 |=
5235 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5236 ((params->p.pps_cr_qp_offset
5237 & 0x1f) <<
5238 9));
5239 data32 |=
5240 (hevc->lcu_size ==
5241 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5242
5243 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5244
5245 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5246 /*if (debug & 0x80) {*/
5247 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5248 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5249 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5250 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5251 data32);
5252 /*}*/
5253 }
5254 }
5255#if 0
5256 data32 = READ_VREG(HEVC_SAO_CTRL1);
5257 data32 &= (~0x3000);
5258 data32 |= (hevc->mem_map_mode <<
5259 12);
5260
5261/* [13:12] axi_aformat,
5262 * 0-Linear, 1-32x32, 2-64x32
5263 */
5264 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5265
5266 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5267 data32 &= (~0x30);
5268 data32 |= (hevc->mem_map_mode <<
5269 4);
5270
5271/* [5:4] -- address_format
5272 * 00:linear 01:32x32 10:64x32
5273 */
5274 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5275#else
5276 /* m8baby test1902 */
5277 data32 = READ_VREG(HEVC_SAO_CTRL1);
5278 data32 &= (~0x3000);
5279 data32 |= (hevc->mem_map_mode <<
5280 12);
5281
5282/* [13:12] axi_aformat, 0-Linear,
5283 * 1-32x32, 2-64x32
5284 */
5285 data32 &= (~0xff0);
5286 /* data32 |= 0x670; // Big-Endian per 64-bit */
5287 data32 |= endian; /* Big-Endian per 64-bit */
5288 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5289 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5290 if (get_double_write_mode(hevc) == 0)
5291 data32 |= 0x2; /*disable double write*/
5292 else if (get_double_write_mode(hevc) & 0x10)
5293 data32 |= 0x1; /*disable cm*/
5294 } else {
5295 unsigned int data;
5296 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5297 (0x0 << 0); /* h265 video format*/
5298 if (hevc->pic_w >= 1280)
5299 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5300 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5301 if (get_double_write_mode(hevc) == 0)
5302 data |= (0x1 << 8); /*enable first write*/
5303 else if (get_double_write_mode(hevc) & 0x10)
5304 data |= (0x1 << 9); /*double write only*/
5305 else
5306 data |= ((0x1 << 8) |(0x1 << 9));
5307
5308 WRITE_VREG(HEVC_DBLK_CFGB, data);
5309 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5310 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5311 }
5312
5313 /* swap uv */
5314 if (hevc->is_used_v4l) {
5315 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5316 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5317 data32 &= ~(1 << 8); /* NV21 */
5318 else
5319 data32 |= (1 << 8); /* NV12 */
5320 }
5321
5322 /*
5323 * [31:24] ar_fifo1_axi_thred
5324 * [23:16] ar_fifo0_axi_thred
5325 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5326 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5327 * [11:08] axi_lendian_C
5328 * [07:04] axi_lendian_Y
5329 * [3] reserved
5330 * [2] clk_forceon
5331 * [1] dw_disable:disable double write output
5332 * [0] cm_disable:disable compress output
5333 */
5334 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5335 if (get_double_write_mode(hevc) & 0x10) {
5336 /* [23:22] dw_v1_ctrl
5337 *[21:20] dw_v0_ctrl
5338 *[19:18] dw_h1_ctrl
5339 *[17:16] dw_h0_ctrl
5340 */
5341 data32 = READ_VREG(HEVC_SAO_CTRL5);
5342 /*set them all 0 for H265_NV21 (no down-scale)*/
5343 data32 &= ~(0xff << 16);
5344 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5345 }
5346
5347 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5348 data32 &= (~0x30);
5349 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5350 data32 |= (hevc->mem_map_mode <<
5351 4);
5352 data32 &= (~0xF);
5353 data32 |= 0xf; /* valid only when double write only */
5354 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5355
5356 /* swap uv */
5357 if (hevc->is_used_v4l) {
5358 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5359 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5360 data32 |= (1 << 12); /* NV21 */
5361 else
5362 data32 &= ~(1 << 12); /* NV12 */
5363 }
5364
5365 /*
5366 * [3:0] little_endian
5367 * [5:4] address_format 00:linear 01:32x32 10:64x32
5368 * [7:6] reserved
5369 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5370 * [11:10] reserved
5371 * [12] CbCr_byte_swap
5372 * [31:13] reserved
5373 */
5374 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5375#endif
5376 data32 = 0;
5377 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5378 data32_2 &= (~0x300);
5379 /* slice_deblocking_filter_disabled_flag = 0;
5380 * ucode has handle it , so read it from ucode directly
5381 */
5382 if (hevc->tile_enabled) {
5383 data32 |=
5384 ((misc_flag0 >>
5385 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5386 0x1) << 0;
5387 data32_2 |=
5388 ((misc_flag0 >>
5389 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5390 0x1) << 8;
5391 }
5392 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5393 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5394 0x1; /* ucode has handle it,so read it from ucode directly */
5395 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5396 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5397 /* slice_deblocking_filter_disabled_flag =
5398 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5399 * //ucode has handle it , so read it from ucode directly
5400 */
5401 data32 |= slice_deblocking_filter_disabled_flag << 2;
5402 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5403 hevc_print_cont(hevc, 0,
5404 "(1,%x)", data32);
5405 if (!slice_deblocking_filter_disabled_flag) {
5406 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5407 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5408 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5409 hevc_print_cont(hevc, 0,
5410 "(2,%x)", data32);
5411 }
5412 } else {
5413 data32 |=
5414 ((misc_flag0 >>
5415 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5416 0x1) << 2;
5417 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5418 hevc_print_cont(hevc, 0,
5419 "(3,%x)", data32);
5420 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5421 0x1) == 0) {
5422 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5423 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5424 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5425 hevc_print_cont(hevc, 0,
5426 "(4,%x)", data32);
5427 }
5428 }
5429 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5430 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5431 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5432 || (!slice_deblocking_filter_disabled_flag))) {
5433 data32 |=
5434 ((misc_flag0 >>
5435 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5436 & 0x1) << 1;
5437 data32_2 |=
5438 ((misc_flag0 >>
5439 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5440 & 0x1) << 9;
5441 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5442 hevc_print_cont(hevc, 0,
5443 "(5,%x)\n", data32);
5444 } else {
5445 data32 |=
5446 ((misc_flag0 >>
5447 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5448 & 0x1) << 1;
5449 data32_2 |=
5450 ((misc_flag0 >>
5451 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5452 & 0x1) << 9;
5453 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5454 hevc_print_cont(hevc, 0,
5455 "(6,%x)\n", data32);
5456 }
5457 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5458 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5459}
5460
5461#ifdef TEST_NO_BUF
5462static unsigned char test_flag = 1;
5463#endif
5464
5465static void pic_list_process(struct hevc_state_s *hevc)
5466{
5467 int work_pic_num = get_work_pic_num(hevc);
5468 int alloc_pic_count = 0;
5469 int i;
5470 struct PIC_s *pic;
5471 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5472 pic = hevc->m_PIC[i];
5473 if (pic == NULL || pic->index == -1)
5474 continue;
5475 alloc_pic_count++;
5476 if (pic->output_mark == 0 && pic->referenced == 0
5477 && pic->output_ready == 0
5478 && (pic->width != hevc->pic_w ||
5479 pic->height != hevc->pic_h)
5480 ) {
5481 set_buf_unused(hevc, pic->BUF_index);
5482 pic->BUF_index = -1;
5483 if (alloc_pic_count > work_pic_num) {
5484 pic->width = 0;
5485 pic->height = 0;
5486 pic->index = -1;
5487 } else {
5488 pic->width = hevc->pic_w;
5489 pic->height = hevc->pic_h;
5490 }
5491 }
5492 }
5493 if (alloc_pic_count < work_pic_num) {
5494 int new_count = alloc_pic_count;
5495 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5496 pic = hevc->m_PIC[i];
5497 if (pic && pic->index == -1) {
5498 pic->index = i;
5499 pic->BUF_index = -1;
5500 pic->width = hevc->pic_w;
5501 pic->height = hevc->pic_h;
5502 new_count++;
5503 if (new_count >=
5504 work_pic_num)
5505 break;
5506 }
5507 }
5508
5509 }
5510 dealloc_unused_buf(hevc);
5511 if (get_alloc_pic_count(hevc)
5512 != alloc_pic_count) {
5513 hevc_print_cont(hevc, 0,
5514 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5515 __func__,
5516 work_pic_num,
5517 alloc_pic_count,
5518 get_alloc_pic_count(hevc));
5519 }
5520}
5521
5522static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5523{
5524 int i;
5525 struct PIC_s *pic;
5526 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5527 pic = hevc->m_PIC[i];
5528 if (pic == NULL || pic->index == -1)
5529 continue;
5530
5531 if (pic->output_mark == 0 && pic->referenced == 0
5532 && pic->output_ready == 0
5533 && pic->scatter_alloc
5534 )
5535 release_pic_mmu_buf(hevc, pic);
5536 }
5537
5538}
5539
5540static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5541 union param_u *rpm_param)
5542{
5543 struct PIC_s *new_pic = NULL;
5544 struct PIC_s *pic;
5545 int i;
5546 int ret;
5547
5548 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5549 pic = hevc->m_PIC[i];
5550 if (pic == NULL || pic->index == -1)
5551 continue;
5552
5553 if (pic->output_mark == 0 && pic->referenced == 0
5554 && pic->output_ready == 0
5555 && pic->width == hevc->pic_w
5556 && pic->height == hevc->pic_h
5557 && pic->vf_ref == 0
5558 ) {
5559 if (new_pic) {
5560 if (new_pic->POC != INVALID_POC) {
5561 if (pic->POC == INVALID_POC ||
5562 pic->POC < new_pic->POC)
5563 new_pic = pic;
5564 }
5565 } else
5566 new_pic = pic;
5567 }
5568 }
5569
5570 if (new_pic == NULL)
5571 return NULL;
5572
5573 if (new_pic->BUF_index < 0) {
5574 if (alloc_buf(hevc) < 0)
5575 return NULL;
5576 else {
5577 if (config_pic(hevc, new_pic) < 0) {
5578 dealloc_pic_buf(hevc, new_pic);
5579 return NULL;
5580 }
5581 }
5582 new_pic->width = hevc->pic_w;
5583 new_pic->height = hevc->pic_h;
5584 set_canvas(hevc, new_pic);
5585
5586 init_pic_list_hw(hevc);
5587 }
5588
5589 if (new_pic) {
5590 new_pic->double_write_mode =
5591 get_double_write_mode(hevc);
5592 if (new_pic->double_write_mode)
5593 set_canvas(hevc, new_pic);
5594
5595#ifdef TEST_NO_BUF
5596 if (test_flag) {
5597 test_flag = 0;
5598 return NULL;
5599 } else
5600 test_flag = 1;
5601#endif
5602 if (get_mv_buf(hevc, new_pic) < 0)
5603 return NULL;
5604
5605 if (hevc->mmu_enable) {
5606 ret = H265_alloc_mmu(hevc, new_pic,
5607 rpm_param->p.bit_depth,
5608 hevc->frame_mmu_map_addr);
5609 if (ret != 0) {
5610 put_mv_buf(hevc, new_pic);
5611 hevc_print(hevc, 0,
5612 "can't alloc need mmu1,idx %d ret =%d\n",
5613 new_pic->decode_idx,
5614 ret);
5615 return NULL;
5616 }
5617 }
5618 new_pic->referenced = 1;
5619 new_pic->decode_idx = hevc->decode_idx;
5620 new_pic->slice_idx = 0;
5621 new_pic->referenced = 1;
5622 new_pic->output_mark = 0;
5623 new_pic->recon_mark = 0;
5624 new_pic->error_mark = 0;
5625 new_pic->dis_mark = 0;
5626 /* new_pic->output_ready = 0; */
5627 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5628 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5629 new_pic->POC = hevc->curr_POC;
5630 new_pic->pic_struct = hevc->curr_pic_struct;
5631 if (new_pic->aux_data_buf)
5632 release_aux_data(hevc, new_pic);
5633 new_pic->mem_saving_mode =
5634 hevc->mem_saving_mode;
5635 new_pic->bit_depth_luma =
5636 hevc->bit_depth_luma;
5637 new_pic->bit_depth_chroma =
5638 hevc->bit_depth_chroma;
5639 new_pic->video_signal_type =
5640 hevc->video_signal_type;
5641
5642 new_pic->conformance_window_flag =
5643 hevc->param.p.conformance_window_flag;
5644 new_pic->conf_win_left_offset =
5645 hevc->param.p.conf_win_left_offset;
5646 new_pic->conf_win_right_offset =
5647 hevc->param.p.conf_win_right_offset;
5648 new_pic->conf_win_top_offset =
5649 hevc->param.p.conf_win_top_offset;
5650 new_pic->conf_win_bottom_offset =
5651 hevc->param.p.conf_win_bottom_offset;
5652 new_pic->chroma_format_idc =
5653 hevc->param.p.chroma_format_idc;
5654
5655 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5656 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5657 __func__, new_pic->index,
5658 new_pic->BUF_index, new_pic->decode_idx,
5659 new_pic->POC);
5660
5661 }
5662 if (pic_list_debug & 0x1) {
5663 dump_pic_list(hevc);
5664 pr_err("\n*******************************************\n");
5665 }
5666
5667 return new_pic;
5668}
5669
5670static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5671 union param_u *rpm_param)
5672{
5673 int ret;
5674 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5675 struct v4l_buff_pool *pool = &v4l->cap_pool;
5676 struct PIC_s *new_pic = NULL;
5677 struct PIC_s *pic = NULL;
5678 int i;
5679
5680 for (i = 0; i < pool->in; ++i) {
5681 u32 state = (pool->seq[i] >> 16);
5682 u32 index = (pool->seq[i] & 0xffff);
5683
5684 switch (state) {
5685 case V4L_CAP_BUFF_IN_DEC:
5686 pic = hevc->m_PIC[i];
5687 if (pic && (pic->index != -1) &&
5688 (pic->output_mark == 0) &&
5689 (pic->referenced == 0) &&
5690 (pic->output_ready == 0) &&
5691 (pic->width == hevc->pic_w) &&
5692 (pic->height == hevc->pic_h) &&
5693 (pic->vf_ref == 0) &&
5694 pic->cma_alloc_addr) {
5695 new_pic = pic;
5696 }
5697 break;
5698 case V4L_CAP_BUFF_IN_M2M:
5699 pic = hevc->m_PIC[index];
5700 pic->width = hevc->pic_w;
5701 pic->height = hevc->pic_h;
5702 if ((pic->index != -1) &&
5703 !v4l_alloc_buf(hevc, pic)) {
5704 v4l_config_pic(hevc, pic);
5705 init_pic_list_hw(hevc);
5706 new_pic = pic;
5707 }
5708 break;
5709 default:
5710 pr_err("v4l buffer state err %d.\n", state);
5711 break;
5712 }
5713
5714 if (new_pic)
5715 break;
5716 }
5717
5718 if (new_pic == NULL)
5719 return NULL;
5720
5721 new_pic->double_write_mode = get_double_write_mode(hevc);
5722 if (new_pic->double_write_mode)
5723 set_canvas(hevc, new_pic);
5724
5725 if (get_mv_buf(hevc, new_pic) < 0)
5726 return NULL;
5727
5728 if (hevc->mmu_enable) {
5729 ret = H265_alloc_mmu(hevc, new_pic,
5730 rpm_param->p.bit_depth,
5731 hevc->frame_mmu_map_addr);
5732 if (ret != 0) {
5733 put_mv_buf(hevc, new_pic);
5734 hevc_print(hevc, 0,
5735 "can't alloc need mmu1,idx %d ret =%d\n",
5736 new_pic->decode_idx, ret);
5737 return NULL;
5738 }
5739 }
5740
5741 new_pic->referenced = 1;
5742 new_pic->decode_idx = hevc->decode_idx;
5743 new_pic->slice_idx = 0;
5744 new_pic->referenced = 1;
5745 new_pic->output_mark = 0;
5746 new_pic->recon_mark = 0;
5747 new_pic->error_mark = 0;
5748 new_pic->dis_mark = 0;
5749 /* new_pic->output_ready = 0; */
5750 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5751 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5752 new_pic->POC = hevc->curr_POC;
5753 new_pic->pic_struct = hevc->curr_pic_struct;
5754
5755 if (new_pic->aux_data_buf)
5756 release_aux_data(hevc, new_pic);
5757 new_pic->mem_saving_mode =
5758 hevc->mem_saving_mode;
5759 new_pic->bit_depth_luma =
5760 hevc->bit_depth_luma;
5761 new_pic->bit_depth_chroma =
5762 hevc->bit_depth_chroma;
5763 new_pic->video_signal_type =
5764 hevc->video_signal_type;
5765
5766 new_pic->conformance_window_flag =
5767 hevc->param.p.conformance_window_flag;
5768 new_pic->conf_win_left_offset =
5769 hevc->param.p.conf_win_left_offset;
5770 new_pic->conf_win_right_offset =
5771 hevc->param.p.conf_win_right_offset;
5772 new_pic->conf_win_top_offset =
5773 hevc->param.p.conf_win_top_offset;
5774 new_pic->conf_win_bottom_offset =
5775 hevc->param.p.conf_win_bottom_offset;
5776 new_pic->chroma_format_idc =
5777 hevc->param.p.chroma_format_idc;
5778
5779 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5780 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5781 __func__, new_pic->index,
5782 new_pic->BUF_index, new_pic->decode_idx,
5783 new_pic->POC);
5784
5785 return new_pic;
5786}
5787
5788static int get_display_pic_num(struct hevc_state_s *hevc)
5789{
5790 int i;
5791 struct PIC_s *pic;
5792 int num = 0;
5793
5794 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5795 pic = hevc->m_PIC[i];
5796 if (pic == NULL ||
5797 pic->index == -1)
5798 continue;
5799
5800 if (pic->output_ready == 1)
5801 num++;
5802 }
5803 return num;
5804}
5805
5806static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5807{
5808 struct PIC_s *pic_display;
5809
5810 if (pic) {
5811 /*PB skip control */
5812 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5813 /* start decoding after first I */
5814 hevc->ignore_bufmgr_error |= 0x1;
5815 }
5816 if (hevc->ignore_bufmgr_error & 1) {
5817 if (hevc->PB_skip_count_after_decoding > 0)
5818 hevc->PB_skip_count_after_decoding--;
5819 else {
5820 /* start displaying */
5821 hevc->ignore_bufmgr_error |= 0x2;
5822 }
5823 }
5824 /**/
5825 if (pic->POC != INVALID_POC) {
5826 pic->output_mark = 1;
5827 pic->recon_mark = 1;
5828 }
5829 pic->recon_mark = 1;
5830 }
5831 do {
5832 pic_display = output_pic(hevc, 1);
5833
5834 if (pic_display) {
5835 pic_display->referenced = 0;
5836 put_mv_buf(hevc, pic_display);
5837 if ((pic_display->error_mark
5838 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5839 || (get_dbg_flag(hevc) &
5840 H265_DEBUG_DISPLAY_CUR_FRAME)
5841 || (get_dbg_flag(hevc) &
5842 H265_DEBUG_NO_DISPLAY)) {
5843 pic_display->output_ready = 0;
5844 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5845 hevc_print(hevc, 0,
5846 "[BM] Display: POC %d, ",
5847 pic_display->POC);
5848 hevc_print_cont(hevc, 0,
5849 "decoding index %d ==> ",
5850 pic_display->decode_idx);
5851 hevc_print_cont(hevc, 0,
5852 "Debug mode or error, recycle it\n");
5853 }
5854 } else {
5855 if (hevc->i_only & 0x1
5856 && pic_display->slice_type != 2) {
5857 pic_display->output_ready = 0;
5858 } else {
5859 prepare_display_buf(hevc, pic_display);
5860 if (get_dbg_flag(hevc)
5861 & H265_DEBUG_BUFMGR) {
5862 hevc_print(hevc, 0,
5863 "[BM] flush Display: POC %d, ",
5864 pic_display->POC);
5865 hevc_print_cont(hevc, 0,
5866 "decoding index %d\n",
5867 pic_display->decode_idx);
5868 }
5869 }
5870 }
5871 }
5872 } while (pic_display);
5873 clear_referenced_flag(hevc);
5874}
5875
5876/*
5877* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5878*/
5879static void set_aux_data(struct hevc_state_s *hevc,
5880 struct PIC_s *pic, unsigned char suffix_flag,
5881 unsigned char dv_meta_flag)
5882{
5883 int i;
5884 unsigned short *aux_adr;
5885 unsigned int size_reg_val =
5886 READ_VREG(HEVC_AUX_DATA_SIZE);
5887 unsigned int aux_count = 0;
5888 int aux_size = 0;
5889 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5890 return;
5891
5892 if (hevc->aux_data_dirty ||
5893 hevc->m_ins_flag == 0) {
5894
5895 hevc->aux_data_dirty = 0;
5896 }
5897
5898 if (suffix_flag) {
5899 aux_adr = (unsigned short *)
5900 (hevc->aux_addr +
5901 hevc->prefix_aux_size);
5902 aux_count =
5903 ((size_reg_val & 0xffff) << 4)
5904 >> 1;
5905 aux_size =
5906 hevc->suffix_aux_size;
5907 } else {
5908 aux_adr =
5909 (unsigned short *)hevc->aux_addr;
5910 aux_count =
5911 ((size_reg_val >> 16) << 4)
5912 >> 1;
5913 aux_size =
5914 hevc->prefix_aux_size;
5915 }
5916 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5917 hevc_print(hevc, 0,
5918 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5919 __func__, pic, pic->aux_data_size,
5920 aux_count, suffix_flag, dv_meta_flag);
5921 }
5922 if (aux_size > 0 && aux_count > 0) {
5923 int heads_size = 0;
5924 int new_size;
5925 char *new_buf;
5926
5927 for (i = 0; i < aux_count; i++) {
5928 unsigned char tag = aux_adr[i] >> 8;
5929 if (tag != 0 && tag != 0xff) {
5930 if (dv_meta_flag == 0)
5931 heads_size += 8;
5932 else if (dv_meta_flag == 1 && tag == 0x1)
5933 heads_size += 8;
5934 else if (dv_meta_flag == 2 && tag != 0x1)
5935 heads_size += 8;
5936 }
5937 }
5938 new_size = pic->aux_data_size + aux_count + heads_size;
5939 new_buf = vmalloc(new_size);
5940 if (new_buf) {
5941 unsigned char valid_tag = 0;
5942 unsigned char *h =
5943 new_buf +
5944 pic->aux_data_size;
5945 unsigned char *p = h + 8;
5946 int len = 0;
5947 int padding_len = 0;
5948 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5949 if (pic->aux_data_buf)
5950 vfree(pic->aux_data_buf);
5951 pic->aux_data_buf = new_buf;
5952 for (i = 0; i < aux_count; i += 4) {
5953 int ii;
5954 unsigned char tag = aux_adr[i + 3] >> 8;
5955 if (tag != 0 && tag != 0xff) {
5956 if (dv_meta_flag == 0)
5957 valid_tag = 1;
5958 else if (dv_meta_flag == 1
5959 && tag == 0x1)
5960 valid_tag = 1;
5961 else if (dv_meta_flag == 2
5962 && tag != 0x1)
5963 valid_tag = 1;
5964 else
5965 valid_tag = 0;
5966 if (valid_tag && len > 0) {
5967 pic->aux_data_size +=
5968 (len + 8);
5969 h[0] = (len >> 24)
5970 & 0xff;
5971 h[1] = (len >> 16)
5972 & 0xff;
5973 h[2] = (len >> 8)
5974 & 0xff;
5975 h[3] = (len >> 0)
5976 & 0xff;
5977 h[6] =
5978 (padding_len >> 8)
5979 & 0xff;
5980 h[7] = (padding_len)
5981 & 0xff;
5982 h += (len + 8);
5983 p += 8;
5984 len = 0;
5985 padding_len = 0;
5986 }
5987 if (valid_tag) {
5988 h[4] = tag;
5989 h[5] = 0;
5990 h[6] = 0;
5991 h[7] = 0;
5992 }
5993 }
5994 if (valid_tag) {
5995 for (ii = 0; ii < 4; ii++) {
5996 unsigned short aa =
5997 aux_adr[i + 3
5998 - ii];
5999 *p = aa & 0xff;
6000 p++;
6001 len++;
6002 /*if ((aa >> 8) == 0xff)
6003 padding_len++;*/
6004 }
6005 }
6006 }
6007 if (len > 0) {
6008 pic->aux_data_size += (len + 8);
6009 h[0] = (len >> 24) & 0xff;
6010 h[1] = (len >> 16) & 0xff;
6011 h[2] = (len >> 8) & 0xff;
6012 h[3] = (len >> 0) & 0xff;
6013 h[6] = (padding_len >> 8) & 0xff;
6014 h[7] = (padding_len) & 0xff;
6015 }
6016 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6017 hevc_print(hevc, 0,
6018 "aux: (size %d) suffix_flag %d\n",
6019 pic->aux_data_size, suffix_flag);
6020 for (i = 0; i < pic->aux_data_size; i++) {
6021 hevc_print_cont(hevc, 0,
6022 "%02x ", pic->aux_data_buf[i]);
6023 if (((i + 1) & 0xf) == 0)
6024 hevc_print_cont(hevc, 0, "\n");
6025 }
6026 hevc_print_cont(hevc, 0, "\n");
6027 }
6028
6029 } else {
6030 hevc_print(hevc, 0, "new buf alloc failed\n");
6031 if (pic->aux_data_buf)
6032 vfree(pic->aux_data_buf);
6033 pic->aux_data_buf = NULL;
6034 pic->aux_data_size = 0;
6035 }
6036 }
6037
6038}
6039
6040static void release_aux_data(struct hevc_state_s *hevc,
6041 struct PIC_s *pic)
6042{
6043 if (pic->aux_data_buf)
6044 vfree(pic->aux_data_buf);
6045 pic->aux_data_buf = NULL;
6046 pic->aux_data_size = 0;
6047}
6048
6049static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6050 struct PIC_s *pic)
6051{
6052
6053 /* prev pic */
6054 /*if (hevc->curr_POC != 0) {*/
6055 int decoded_poc = hevc->iPrevPOC;
6056#ifdef MULTI_INSTANCE_SUPPORT
6057 if (hevc->m_ins_flag) {
6058 decoded_poc = hevc->decoded_poc;
6059 hevc->decoded_poc = INVALID_POC;
6060 }
6061#endif
6062 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6063 && hevc->m_nalUnitType !=
6064 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6065 struct PIC_s *pic_display;
6066
6067 pic = get_pic_by_POC(hevc, decoded_poc);
6068 if (pic && (pic->POC != INVALID_POC)) {
6069 /*PB skip control */
6070 if (pic->error_mark == 0
6071 && hevc->PB_skip_mode == 1) {
6072 /* start decoding after
6073 * first I
6074 */
6075 hevc->ignore_bufmgr_error |= 0x1;
6076 }
6077 if (hevc->ignore_bufmgr_error & 1) {
6078 if (hevc->PB_skip_count_after_decoding > 0) {
6079 hevc->PB_skip_count_after_decoding--;
6080 } else {
6081 /* start displaying */
6082 hevc->ignore_bufmgr_error |= 0x2;
6083 }
6084 }
6085 if (hevc->mmu_enable
6086 && ((hevc->double_write_mode & 0x10) == 0)) {
6087 if (!hevc->m_ins_flag) {
6088 hevc->used_4k_num =
6089 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6090
6091 if ((!is_skip_decoding(hevc, pic)) &&
6092 (hevc->used_4k_num >= 0) &&
6093 (hevc->cur_pic->scatter_alloc
6094 == 1)) {
6095 hevc_print(hevc,
6096 H265_DEBUG_BUFMGR_MORE,
6097 "%s pic index %d scatter_alloc %d page_start %d\n",
6098 "decoder_mmu_box_free_idx_tail",
6099 hevc->cur_pic->index,
6100 hevc->cur_pic->scatter_alloc,
6101 hevc->used_4k_num);
6102 hevc_mmu_dma_check(hw_to_vdec(hevc));
6103 decoder_mmu_box_free_idx_tail(
6104 hevc->mmu_box,
6105 hevc->cur_pic->index,
6106 hevc->used_4k_num);
6107 hevc->cur_pic->scatter_alloc
6108 = 2;
6109 }
6110 hevc->used_4k_num = -1;
6111 }
6112 }
6113
6114 pic->output_mark = 1;
6115 pic->recon_mark = 1;
6116 pic->dis_mark = 1;
6117 }
6118 do {
6119 pic_display = output_pic(hevc, 0);
6120
6121 if (pic_display) {
6122 if ((pic_display->error_mark &&
6123 ((hevc->ignore_bufmgr_error &
6124 0x2) == 0))
6125 || (get_dbg_flag(hevc) &
6126 H265_DEBUG_DISPLAY_CUR_FRAME)
6127 || (get_dbg_flag(hevc) &
6128 H265_DEBUG_NO_DISPLAY)) {
6129 pic_display->output_ready = 0;
6130 if (get_dbg_flag(hevc) &
6131 H265_DEBUG_BUFMGR) {
6132 hevc_print(hevc, 0,
6133 "[BM] Display: POC %d, ",
6134 pic_display->POC);
6135 hevc_print_cont(hevc, 0,
6136 "decoding index %d ==> ",
6137 pic_display->
6138 decode_idx);
6139 hevc_print_cont(hevc, 0,
6140 "Debug or err,recycle it\n");
6141 }
6142 } else {
6143 if (hevc->i_only & 0x1
6144 && pic_display->
6145 slice_type != 2) {
6146 pic_display->output_ready = 0;
6147 } else {
6148 prepare_display_buf
6149 (hevc,
6150 pic_display);
6151 if (get_dbg_flag(hevc) &
6152 H265_DEBUG_BUFMGR) {
6153 hevc_print(hevc, 0,
6154 "[BM] Display: POC %d, ",
6155 pic_display->POC);
6156 hevc_print_cont(hevc, 0,
6157 "decoding index %d\n",
6158 pic_display->
6159 decode_idx);
6160 }
6161 }
6162 }
6163 }
6164 } while (pic_display);
6165 } else {
6166 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6167 hevc_print(hevc, 0,
6168 "[BM] current pic is IDR, ");
6169 hevc_print(hevc, 0,
6170 "clear referenced flag of all buffers\n");
6171 }
6172 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6173 dump_pic_list(hevc);
6174 pic = get_pic_by_POC(hevc, decoded_poc);
6175 flush_output(hevc, pic);
6176 }
6177
6178}
6179
6180static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6181 int decoded_lcu)
6182{
6183 int current_lcu_idx = decoded_lcu;
6184 if (decoded_lcu < 0)
6185 return;
6186
6187 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6188 hevc_print(hevc, 0,
6189 "cur lcu idx = %d, (total %d)\n",
6190 current_lcu_idx, hevc->lcu_total);
6191 }
6192 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6193 if (hevc->first_pic_after_recover) {
6194 if (current_lcu_idx !=
6195 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6196 hevc->cur_pic->error_mark = 1;
6197 } else {
6198 if (hevc->lcu_x_num_pre != 0
6199 && hevc->lcu_y_num_pre != 0
6200 && current_lcu_idx != 0
6201 && current_lcu_idx <
6202 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6203 hevc->cur_pic->error_mark = 1;
6204 }
6205 if (hevc->cur_pic->error_mark) {
6206 hevc_print(hevc, 0,
6207 "cur lcu idx = %d, (total %d), set error_mark\n",
6208 current_lcu_idx,
6209 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6210 if (is_log_enable(hevc))
6211 add_log(hevc,
6212 "cur lcu idx = %d, (total %d), set error_mark",
6213 current_lcu_idx,
6214 hevc->lcu_x_num_pre *
6215 hevc->lcu_y_num_pre);
6216
6217 }
6218
6219 }
6220 if (hevc->cur_pic && hevc->head_error_flag) {
6221 hevc->cur_pic->error_mark = 1;
6222 hevc_print(hevc, 0,
6223 "head has error, set error_mark\n");
6224 }
6225
6226 if ((error_handle_policy & 0x80) == 0) {
6227 if (hevc->over_decode && hevc->cur_pic) {
6228 hevc_print(hevc, 0,
6229 "over decode, set error_mark\n");
6230 hevc->cur_pic->error_mark = 1;
6231 }
6232 }
6233
6234 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6235 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6236}
6237
6238static void check_pic_decoded_error(struct hevc_state_s *hevc,
6239 int decoded_lcu)
6240{
6241 int current_lcu_idx = decoded_lcu;
6242 if (decoded_lcu < 0)
6243 return;
6244
6245 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6246 hevc_print(hevc, 0,
6247 "cur lcu idx = %d, (total %d)\n",
6248 current_lcu_idx, hevc->lcu_total);
6249 }
6250 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6251 if (hevc->lcu_x_num != 0
6252 && hevc->lcu_y_num != 0
6253 && current_lcu_idx != 0
6254 && current_lcu_idx <
6255 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6256 hevc->cur_pic->error_mark = 1;
6257 if (hevc->cur_pic->error_mark) {
6258 hevc_print(hevc, 0,
6259 "cur lcu idx = %d, (total %d), set error_mark\n",
6260 current_lcu_idx,
6261 hevc->lcu_x_num*hevc->lcu_y_num);
6262 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6263 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6264 hevc_print(hevc, 0,
6265 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6266 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6267 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6268 }
6269 if (is_log_enable(hevc))
6270 add_log(hevc,
6271 "cur lcu idx = %d, (total %d), set error_mark",
6272 current_lcu_idx,
6273 hevc->lcu_x_num *
6274 hevc->lcu_y_num);
6275
6276 }
6277
6278 }
6279 if (hevc->cur_pic && hevc->head_error_flag) {
6280 hevc->cur_pic->error_mark = 1;
6281 hevc_print(hevc, 0,
6282 "head has error, set error_mark\n");
6283 }
6284
6285 if ((error_handle_policy & 0x80) == 0) {
6286 if (hevc->over_decode && hevc->cur_pic) {
6287 hevc_print(hevc, 0,
6288 "over decode, set error_mark\n");
6289 hevc->cur_pic->error_mark = 1;
6290 }
6291 }
6292}
6293
6294/* only when we decoded one field or one frame,
6295we can call this function to get qos info*/
6296static void get_picture_qos_info(struct hevc_state_s *hevc)
6297{
6298 struct PIC_s *picture = hevc->cur_pic;
6299
6300/*
6301#define DEBUG_QOS
6302*/
6303
6304 if (!hevc->cur_pic)
6305 return;
6306
6307 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6308 unsigned char a[3];
6309 unsigned char i, j, t;
6310 unsigned long data;
6311
6312 data = READ_VREG(HEVC_MV_INFO);
6313 if (picture->slice_type == I_SLICE)
6314 data = 0;
6315 a[0] = data & 0xff;
6316 a[1] = (data >> 8) & 0xff;
6317 a[2] = (data >> 16) & 0xff;
6318
6319 for (i = 0; i < 3; i++)
6320 for (j = i+1; j < 3; j++) {
6321 if (a[j] < a[i]) {
6322 t = a[j];
6323 a[j] = a[i];
6324 a[i] = t;
6325 } else if (a[j] == a[i]) {
6326 a[i]++;
6327 t = a[j];
6328 a[j] = a[i];
6329 a[i] = t;
6330 }
6331 }
6332 picture->max_mv = a[2];
6333 picture->avg_mv = a[1];
6334 picture->min_mv = a[0];
6335#ifdef DEBUG_QOS
6336 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6337 data, a[0], a[1], a[2]);
6338#endif
6339
6340 data = READ_VREG(HEVC_QP_INFO);
6341 a[0] = data & 0x1f;
6342 a[1] = (data >> 8) & 0x3f;
6343 a[2] = (data >> 16) & 0x7f;
6344
6345 for (i = 0; i < 3; i++)
6346 for (j = i+1; j < 3; j++) {
6347 if (a[j] < a[i]) {
6348 t = a[j];
6349 a[j] = a[i];
6350 a[i] = t;
6351 } else if (a[j] == a[i]) {
6352 a[i]++;
6353 t = a[j];
6354 a[j] = a[i];
6355 a[i] = t;
6356 }
6357 }
6358 picture->max_qp = a[2];
6359 picture->avg_qp = a[1];
6360 picture->min_qp = a[0];
6361#ifdef DEBUG_QOS
6362 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6363 data, a[0], a[1], a[2]);
6364#endif
6365
6366 data = READ_VREG(HEVC_SKIP_INFO);
6367 a[0] = data & 0x1f;
6368 a[1] = (data >> 8) & 0x3f;
6369 a[2] = (data >> 16) & 0x7f;
6370
6371 for (i = 0; i < 3; i++)
6372 for (j = i+1; j < 3; j++) {
6373 if (a[j] < a[i]) {
6374 t = a[j];
6375 a[j] = a[i];
6376 a[i] = t;
6377 } else if (a[j] == a[i]) {
6378 a[i]++;
6379 t = a[j];
6380 a[j] = a[i];
6381 a[i] = t;
6382 }
6383 }
6384 picture->max_skip = a[2];
6385 picture->avg_skip = a[1];
6386 picture->min_skip = a[0];
6387
6388#ifdef DEBUG_QOS
6389 hevc_print(hevc, 0,
6390 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6391 data, a[0], a[1], a[2]);
6392#endif
6393 } else {
6394 uint32_t blk88_y_count;
6395 uint32_t blk88_c_count;
6396 uint32_t blk22_mv_count;
6397 uint32_t rdata32;
6398 int32_t mv_hi;
6399 int32_t mv_lo;
6400 uint32_t rdata32_l;
6401 uint32_t mvx_L0_hi;
6402 uint32_t mvy_L0_hi;
6403 uint32_t mvx_L1_hi;
6404 uint32_t mvy_L1_hi;
6405 int64_t value;
6406 uint64_t temp_value;
6407#ifdef DEBUG_QOS
6408 int pic_number = picture->POC;
6409#endif
6410
6411 picture->max_mv = 0;
6412 picture->avg_mv = 0;
6413 picture->min_mv = 0;
6414
6415 picture->max_skip = 0;
6416 picture->avg_skip = 0;
6417 picture->min_skip = 0;
6418
6419 picture->max_qp = 0;
6420 picture->avg_qp = 0;
6421 picture->min_qp = 0;
6422
6423
6424
6425#ifdef DEBUG_QOS
6426 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6427 picture->slice_type,
6428 picture->POC);
6429#endif
6430 /* set rd_idx to 0 */
6431 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6432
6433 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6434 if (blk88_y_count == 0) {
6435#ifdef DEBUG_QOS
6436 hevc_print(hevc, 0,
6437 "[Picture %d Quality] NO Data yet.\n",
6438 pic_number);
6439#endif
6440 /* reset all counts */
6441 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6442 return;
6443 }
6444 /* qp_y_sum */
6445 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6446#ifdef DEBUG_QOS
6447 hevc_print(hevc, 0,
6448 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6449 pic_number, rdata32/blk88_y_count,
6450 rdata32, blk88_y_count);
6451#endif
6452 picture->avg_qp = rdata32/blk88_y_count;
6453 /* intra_y_count */
6454 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6455#ifdef DEBUG_QOS
6456 hevc_print(hevc, 0,
6457 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6458 pic_number, rdata32*100/blk88_y_count,
6459 '%', rdata32);
6460#endif
6461 /* skipped_y_count */
6462 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6463#ifdef DEBUG_QOS
6464 hevc_print(hevc, 0,
6465 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6466 pic_number, rdata32*100/blk88_y_count,
6467 '%', rdata32);
6468#endif
6469 picture->avg_skip = rdata32*100/blk88_y_count;
6470 /* coeff_non_zero_y_count */
6471 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6472#ifdef DEBUG_QOS
6473 hevc_print(hevc, 0,
6474 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6475 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6476 '%', rdata32);
6477#endif
6478 /* blk66_c_count */
6479 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6480 if (blk88_c_count == 0) {
6481#ifdef DEBUG_QOS
6482 hevc_print(hevc, 0,
6483 "[Picture %d Quality] NO Data yet.\n",
6484 pic_number);
6485#endif
6486 /* reset all counts */
6487 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6488 return;
6489 }
6490 /* qp_c_sum */
6491 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6492#ifdef DEBUG_QOS
6493 hevc_print(hevc, 0,
6494 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6495 pic_number, rdata32/blk88_c_count,
6496 rdata32, blk88_c_count);
6497#endif
6498 /* intra_c_count */
6499 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6500#ifdef DEBUG_QOS
6501 hevc_print(hevc, 0,
6502 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6503 pic_number, rdata32*100/blk88_c_count,
6504 '%', rdata32);
6505#endif
6506 /* skipped_cu_c_count */
6507 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6508#ifdef DEBUG_QOS
6509 hevc_print(hevc, 0,
6510 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6511 pic_number, rdata32*100/blk88_c_count,
6512 '%', rdata32);
6513#endif
6514 /* coeff_non_zero_c_count */
6515 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6516#ifdef DEBUG_QOS
6517 hevc_print(hevc, 0,
6518 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6519 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6520 '%', rdata32);
6521#endif
6522
6523 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6524 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6525 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6526#ifdef DEBUG_QOS
6527 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6528 pic_number, (rdata32>>0)&0xff);
6529#endif
6530 picture->min_qp = (rdata32>>0)&0xff;
6531
6532#ifdef DEBUG_QOS
6533 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6534 pic_number, (rdata32>>8)&0xff);
6535#endif
6536 picture->max_qp = (rdata32>>8)&0xff;
6537
6538#ifdef DEBUG_QOS
6539 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6540 pic_number, (rdata32>>16)&0xff);
6541 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6542 pic_number, (rdata32>>24)&0xff);
6543#endif
6544
6545 /* blk22_mv_count */
6546 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6547 if (blk22_mv_count == 0) {
6548#ifdef DEBUG_QOS
6549 hevc_print(hevc, 0,
6550 "[Picture %d Quality] NO MV Data yet.\n",
6551 pic_number);
6552#endif
6553 /* reset all counts */
6554 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6555 return;
6556 }
6557 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6558 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6559 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6560 /* should all be 0x00 or 0xff */
6561#ifdef DEBUG_QOS
6562 hevc_print(hevc, 0,
6563 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6564 pic_number, rdata32);
6565#endif
6566 mvx_L0_hi = ((rdata32>>0)&0xff);
6567 mvy_L0_hi = ((rdata32>>8)&0xff);
6568 mvx_L1_hi = ((rdata32>>16)&0xff);
6569 mvy_L1_hi = ((rdata32>>24)&0xff);
6570
6571 /* mvx_L0_count[31:0] */
6572 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6573 temp_value = mvx_L0_hi;
6574 temp_value = (temp_value << 32) | rdata32_l;
6575
6576 if (mvx_L0_hi & 0x80)
6577 value = 0xFFFFFFF000000000 | temp_value;
6578 else
6579 value = temp_value;
6580 value = div_s64(value, blk22_mv_count);
6581#ifdef DEBUG_QOS
6582 hevc_print(hevc, 0,
6583 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6584 pic_number, (int)value,
6585 value, blk22_mv_count);
6586#endif
6587 picture->avg_mv = value;
6588
6589 /* mvy_L0_count[31:0] */
6590 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6591 temp_value = mvy_L0_hi;
6592 temp_value = (temp_value << 32) | rdata32_l;
6593
6594 if (mvy_L0_hi & 0x80)
6595 value = 0xFFFFFFF000000000 | temp_value;
6596 else
6597 value = temp_value;
6598#ifdef DEBUG_QOS
6599 hevc_print(hevc, 0,
6600 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6601 pic_number, rdata32_l/blk22_mv_count,
6602 value, blk22_mv_count);
6603#endif
6604
6605 /* mvx_L1_count[31:0] */
6606 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6607 temp_value = mvx_L1_hi;
6608 temp_value = (temp_value << 32) | rdata32_l;
6609 if (mvx_L1_hi & 0x80)
6610 value = 0xFFFFFFF000000000 | temp_value;
6611 else
6612 value = temp_value;
6613#ifdef DEBUG_QOS
6614 hevc_print(hevc, 0,
6615 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6616 pic_number, rdata32_l/blk22_mv_count,
6617 value, blk22_mv_count);
6618#endif
6619
6620 /* mvy_L1_count[31:0] */
6621 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6622 temp_value = mvy_L1_hi;
6623 temp_value = (temp_value << 32) | rdata32_l;
6624 if (mvy_L1_hi & 0x80)
6625 value = 0xFFFFFFF000000000 | temp_value;
6626 else
6627 value = temp_value;
6628#ifdef DEBUG_QOS
6629 hevc_print(hevc, 0,
6630 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6631 pic_number, rdata32_l/blk22_mv_count,
6632 value, blk22_mv_count);
6633#endif
6634
6635 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6636 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6637 mv_hi = (rdata32>>16)&0xffff;
6638 if (mv_hi & 0x8000)
6639 mv_hi = 0x8000 - mv_hi;
6640#ifdef DEBUG_QOS
6641 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6642 pic_number, mv_hi);
6643#endif
6644 picture->max_mv = mv_hi;
6645
6646 mv_lo = (rdata32>>0)&0xffff;
6647 if (mv_lo & 0x8000)
6648 mv_lo = 0x8000 - mv_lo;
6649#ifdef DEBUG_QOS
6650 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6651 pic_number, mv_lo);
6652#endif
6653 picture->min_mv = mv_lo;
6654
6655 /* {mvy_L0_max, mvy_L0_min} */
6656 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6657 mv_hi = (rdata32>>16)&0xffff;
6658 if (mv_hi & 0x8000)
6659 mv_hi = 0x8000 - mv_hi;
6660#ifdef DEBUG_QOS
6661 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6662 pic_number, mv_hi);
6663#endif
6664
6665 mv_lo = (rdata32>>0)&0xffff;
6666 if (mv_lo & 0x8000)
6667 mv_lo = 0x8000 - mv_lo;
6668#ifdef DEBUG_QOS
6669 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6670 pic_number, mv_lo);
6671#endif
6672
6673 /* {mvx_L1_max, mvx_L1_min} */
6674 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6675 mv_hi = (rdata32>>16)&0xffff;
6676 if (mv_hi & 0x8000)
6677 mv_hi = 0x8000 - mv_hi;
6678#ifdef DEBUG_QOS
6679 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6680 pic_number, mv_hi);
6681#endif
6682
6683 mv_lo = (rdata32>>0)&0xffff;
6684 if (mv_lo & 0x8000)
6685 mv_lo = 0x8000 - mv_lo;
6686#ifdef DEBUG_QOS
6687 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6688 pic_number, mv_lo);
6689#endif
6690
6691 /* {mvy_L1_max, mvy_L1_min} */
6692 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6693 mv_hi = (rdata32>>16)&0xffff;
6694 if (mv_hi & 0x8000)
6695 mv_hi = 0x8000 - mv_hi;
6696#ifdef DEBUG_QOS
6697 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6698 pic_number, mv_hi);
6699#endif
6700 mv_lo = (rdata32>>0)&0xffff;
6701 if (mv_lo & 0x8000)
6702 mv_lo = 0x8000 - mv_lo;
6703#ifdef DEBUG_QOS
6704 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6705 pic_number, mv_lo);
6706#endif
6707
6708 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6709#ifdef DEBUG_QOS
6710 hevc_print(hevc, 0,
6711 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6712 pic_number, rdata32);
6713#endif
6714 /* reset all counts */
6715 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6716 }
6717}
6718
6719static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6720 union param_u *rpm_param,
6721 int decode_pic_begin)
6722{
6723#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6724 struct vdec_s *vdec = hw_to_vdec(hevc);
6725#endif
6726 int i;
6727 int lcu_x_num_div;
6728 int lcu_y_num_div;
6729 int Col_ref;
6730 int dbg_skip_flag = 0;
6731
6732 if (hevc->wait_buf == 0) {
6733 hevc->sps_num_reorder_pics_0 =
6734 rpm_param->p.sps_num_reorder_pics_0;
6735 hevc->m_temporalId = rpm_param->p.m_temporalId;
6736 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6737 hevc->interlace_flag =
6738 (rpm_param->p.profile_etc >> 2) & 0x1;
6739 hevc->curr_pic_struct =
6740 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6741 if (parser_sei_enable & 0x4) {
6742 hevc->frame_field_info_present_flag =
6743 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6744 }
6745
6746 if (interlace_enable == 0 || hevc->m_ins_flag)
6747 hevc->interlace_flag = 0;
6748 if (interlace_enable & 0x100)
6749 hevc->interlace_flag = interlace_enable & 0x1;
6750 if (hevc->interlace_flag == 0)
6751 hevc->curr_pic_struct = 0;
6752 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6753 /*
6754 *hevc->m_pocRandomAccess = MAX_INT;
6755 * //add to fix RAP_B_Bossen_1
6756 */
6757 /* } */
6758 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6759 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6760 hevc->slice_segment_addr =
6761 rpm_param->p.slice_segment_address;
6762 if (!rpm_param->p.dependent_slice_segment_flag)
6763 hevc->slice_addr = hevc->slice_segment_addr;
6764 } else {
6765 hevc->slice_segment_addr = 0;
6766 hevc->slice_addr = 0;
6767 }
6768
6769 hevc->iPrevPOC = hevc->curr_POC;
6770 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6771 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6772 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6773 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6774 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6775 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6776 hevc->isNextSliceSegment =
6777 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6778 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6779 || hevc->pic_h !=
6780 rpm_param->p.pic_height_in_luma_samples) {
6781 hevc_print(hevc, 0,
6782 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6783 hevc->pic_w, hevc->pic_h,
6784 rpm_param->p.pic_width_in_luma_samples,
6785 rpm_param->p.pic_height_in_luma_samples,
6786 hevc->interlace_flag);
6787
6788 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6789 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6790 hevc->frame_width = hevc->pic_w;
6791 hevc->frame_height = hevc->pic_h;
6792#ifdef LOSLESS_COMPRESS_MODE
6793 if (/*re_config_pic_flag == 0 &&*/
6794 (get_double_write_mode(hevc) & 0x10) == 0)
6795 init_decode_head_hw(hevc);
6796#endif
6797 }
6798
6799 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6800 hevc_print(hevc, 0, "over size : %u x %u.\n",
6801 hevc->pic_w, hevc->pic_h);
6802 if ((!hevc->m_ins_flag) &&
6803 ((debug &
6804 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6805 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6806 H265_DEBUG_DIS_SYS_ERROR_PROC);
6807 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6808 return 3;
6809 }
6810 if (hevc->bit_depth_chroma > 10 ||
6811 hevc->bit_depth_luma > 10) {
6812 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6813 hevc->bit_depth_chroma,
6814 hevc->bit_depth_luma);
6815 if (!hevc->m_ins_flag)
6816 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6817 H265_DEBUG_DIS_SYS_ERROR_PROC);
6818 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6819 return 4;
6820 }
6821
6822 /* it will cause divide 0 error */
6823 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6824 if (get_dbg_flag(hevc)) {
6825 hevc_print(hevc, 0,
6826 "Fatal Error, pic_w = %d, pic_h = %d\n",
6827 hevc->pic_w, hevc->pic_h);
6828 }
6829 return 3;
6830 }
6831 pic_list_process(hevc);
6832
6833 hevc->lcu_size =
6834 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6835 3 + rpm_param->
6836 p.log2_diff_max_min_coding_block_size);
6837 if (hevc->lcu_size == 0) {
6838 hevc_print(hevc, 0,
6839 "Error, lcu_size = 0 (%d,%d)\n",
6840 rpm_param->p.
6841 log2_min_coding_block_size_minus3,
6842 rpm_param->p.
6843 log2_diff_max_min_coding_block_size);
6844 return 3;
6845 }
6846 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6847 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6848 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6849 hevc->lcu_x_num =
6850 ((hevc->pic_w % hevc->lcu_size) ==
6851 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6852 hevc->lcu_y_num =
6853 ((hevc->pic_h % hevc->lcu_size) ==
6854 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6855 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6856
6857 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6858 || hevc->m_nalUnitType ==
6859 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6860 hevc->curr_POC = 0;
6861 if ((hevc->m_temporalId - 1) == 0)
6862 hevc->iPrevTid0POC = hevc->curr_POC;
6863 } else {
6864 int iMaxPOClsb =
6865 1 << (rpm_param->p.
6866 log2_max_pic_order_cnt_lsb_minus4 + 4);
6867 int iPrevPOClsb;
6868 int iPrevPOCmsb;
6869 int iPOCmsb;
6870 int iPOClsb = rpm_param->p.POClsb;
6871
6872 if (iMaxPOClsb == 0) {
6873 hevc_print(hevc, 0,
6874 "error iMaxPOClsb is 0\n");
6875 return 3;
6876 }
6877
6878 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6879 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6880
6881 if ((iPOClsb < iPrevPOClsb)
6882 && ((iPrevPOClsb - iPOClsb) >=
6883 (iMaxPOClsb / 2)))
6884 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6885 else if ((iPOClsb > iPrevPOClsb)
6886 && ((iPOClsb - iPrevPOClsb) >
6887 (iMaxPOClsb / 2)))
6888 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6889 else
6890 iPOCmsb = iPrevPOCmsb;
6891 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6892 hevc_print(hevc, 0,
6893 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6894 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6895 iPOClsb);
6896 }
6897 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6898 || hevc->m_nalUnitType ==
6899 NAL_UNIT_CODED_SLICE_BLANT
6900 || hevc->m_nalUnitType ==
6901 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6902 /* For BLA picture types, POCmsb is set to 0. */
6903 iPOCmsb = 0;
6904 }
6905 hevc->curr_POC = (iPOCmsb + iPOClsb);
6906 if ((hevc->m_temporalId - 1) == 0)
6907 hevc->iPrevTid0POC = hevc->curr_POC;
6908 else {
6909 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6910 hevc_print(hevc, 0,
6911 "m_temporalID is %d\n",
6912 hevc->m_temporalId);
6913 }
6914 }
6915 }
6916 hevc->RefNum_L0 =
6917 (rpm_param->p.num_ref_idx_l0_active >
6918 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6919 num_ref_idx_l0_active;
6920 hevc->RefNum_L1 =
6921 (rpm_param->p.num_ref_idx_l1_active >
6922 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6923 num_ref_idx_l1_active;
6924
6925 /* if(curr_POC==0x10) dump_lmem(); */
6926
6927 /* skip RASL pictures after CRA/BLA pictures */
6928 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6929 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6930 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6931 || hevc->m_nalUnitType ==
6932 NAL_UNIT_CODED_SLICE_BLANT
6933 || hevc->m_nalUnitType ==
6934 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6935 hevc->m_pocRandomAccess = hevc->curr_POC;
6936 else
6937 hevc->m_pocRandomAccess = -MAX_INT;
6938 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6939 || hevc->m_nalUnitType ==
6940 NAL_UNIT_CODED_SLICE_BLANT
6941 || hevc->m_nalUnitType ==
6942 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6943 hevc->m_pocRandomAccess = hevc->curr_POC;
6944 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6945 (nal_skip_policy >= 3) &&
6946 (hevc->m_nalUnitType ==
6947 NAL_UNIT_CODED_SLICE_RASL_N ||
6948 hevc->m_nalUnitType ==
6949 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6950 if (get_dbg_flag(hevc)) {
6951 hevc_print(hevc, 0,
6952 "RASL picture with POC %d < %d ",
6953 hevc->curr_POC, hevc->m_pocRandomAccess);
6954 hevc_print(hevc, 0,
6955 "RandomAccess point POC), skip it\n");
6956 }
6957 return 1;
6958 }
6959
6960 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6961 hevc->skip_flag = 0;
6962 /**/
6963 /* if((iPrevPOC != curr_POC)){ */
6964 if (rpm_param->p.slice_segment_address == 0) {
6965 struct PIC_s *pic;
6966
6967 hevc->new_pic = 1;
6968#ifdef MULTI_INSTANCE_SUPPORT
6969 if (!hevc->m_ins_flag)
6970#endif
6971 check_pic_decoded_error_pre(hevc,
6972 READ_VREG(HEVC_PARSER_LCU_START)
6973 & 0xffffff);
6974 /**/ if (use_cma == 0) {
6975 if (hevc->pic_list_init_flag == 0) {
6976 init_pic_list(hevc);
6977 init_pic_list_hw(hevc);
6978 init_buf_spec(hevc);
6979 hevc->pic_list_init_flag = 3;
6980 }
6981 }
6982 if (!hevc->m_ins_flag) {
6983 if (hevc->cur_pic)
6984 get_picture_qos_info(hevc);
6985 }
6986 hevc->first_pic_after_recover = 0;
6987 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6988 dump_pic_list(hevc);
6989 /* prev pic */
6990 hevc_pre_pic(hevc, pic);
6991 /*
6992 *update referenced of old pictures
6993 *(cur_pic->referenced is 1 and not updated)
6994 */
6995 apply_ref_pic_set(hevc, hevc->curr_POC,
6996 rpm_param);
6997
6998 if (hevc->mmu_enable)
6999 recycle_mmu_bufs(hevc);
7000
7001#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7002 if (vdec->master) {
7003 struct hevc_state_s *hevc_ba =
7004 (struct hevc_state_s *)
7005 vdec->master->private;
7006 if (hevc_ba->cur_pic != NULL) {
7007 hevc_ba->cur_pic->dv_enhance_exist = 1;
7008 hevc_print(hevc, H265_DEBUG_DV,
7009 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7010 hevc->curr_POC, hevc_ba->cur_pic->POC);
7011 }
7012 }
7013 if (vdec->master == NULL &&
7014 vdec->slave == NULL)
7015 set_aux_data(hevc,
7016 hevc->cur_pic, 1, 0); /*suffix*/
7017 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7018 set_aux_data(hevc,
7019 hevc->cur_pic, 0, 1); /*dv meta only*/
7020#else
7021 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7022#endif
7023 /* new pic */
7024 hevc->cur_pic = hevc->is_used_v4l ?
7025 v4l_get_new_pic(hevc, rpm_param) :
7026 get_new_pic(hevc, rpm_param);
7027 if (hevc->cur_pic == NULL) {
7028 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7029 dump_pic_list(hevc);
7030 hevc->wait_buf = 1;
7031 return -1;
7032 }
7033#ifdef MULTI_INSTANCE_SUPPORT
7034 hevc->decoding_pic = hevc->cur_pic;
7035 if (!hevc->m_ins_flag)
7036 hevc->over_decode = 0;
7037#endif
7038#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7039 hevc->cur_pic->dv_enhance_exist = 0;
7040 if (vdec->slave)
7041 hevc_print(hevc, H265_DEBUG_DV,
7042 "Clear bl (poc %d) dv_enhance_exist flag\n",
7043 hevc->curr_POC);
7044 if (vdec->master == NULL &&
7045 vdec->slave == NULL)
7046 set_aux_data(hevc,
7047 hevc->cur_pic, 0, 0); /*prefix*/
7048
7049 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7050 set_aux_data(hevc,
7051 hevc->cur_pic, 0, 2); /*pre sei only*/
7052#else
7053 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7054#endif
7055 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7056 hevc->cur_pic->output_ready = 1;
7057 hevc->cur_pic->stream_offset =
7058 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7059 prepare_display_buf(hevc, hevc->cur_pic);
7060 hevc->wait_buf = 2;
7061 return -1;
7062 }
7063 } else {
7064 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7065#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7066 if (vdec->master == NULL &&
7067 vdec->slave == NULL) {
7068 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7069 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7070 }
7071#else
7072 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7073 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7074#endif
7075 }
7076 if (hevc->pic_list_init_flag != 3
7077 || hevc->cur_pic == NULL) {
7078 /* make it dec from the first slice segment */
7079 return 3;
7080 }
7081 hevc->cur_pic->slice_idx++;
7082 hevc->new_pic = 0;
7083 }
7084 } else {
7085 if (hevc->wait_buf == 1) {
7086 pic_list_process(hevc);
7087 hevc->cur_pic = hevc->is_used_v4l ?
7088 v4l_get_new_pic(hevc, rpm_param) :
7089 get_new_pic(hevc, rpm_param);
7090 if (hevc->cur_pic == NULL)
7091 return -1;
7092
7093 if (!hevc->m_ins_flag)
7094 hevc->over_decode = 0;
7095
7096#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7097 hevc->cur_pic->dv_enhance_exist = 0;
7098 if (vdec->master == NULL &&
7099 vdec->slave == NULL)
7100 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7101#else
7102 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7103#endif
7104 hevc->wait_buf = 0;
7105 } else if (hevc->wait_buf ==
7106 2) {
7107 if (get_display_pic_num(hevc) >
7108 1)
7109 return -1;
7110 hevc->wait_buf = 0;
7111 }
7112 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7113 dump_pic_list(hevc);
7114 }
7115
7116 if (hevc->new_pic) {
7117#if 1
7118 /*SUPPORT_10BIT*/
7119 int sao_mem_unit =
7120 (hevc->lcu_size == 16 ? 9 :
7121 hevc->lcu_size ==
7122 32 ? 14 : 24) << 4;
7123#else
7124 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7125#endif
7126 int pic_height_cu =
7127 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7128 int pic_width_cu =
7129 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7130 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7131
7132 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7133 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7134 hevc_print(hevc, 0,
7135 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7136 __func__,
7137 hevc->decode_idx,
7138 hevc->curr_pic_struct,
7139 hevc->interlace_flag,
7140 hevc->cur_pic->index);
7141 }
7142 if (dbg_skip_decode_index != 0 &&
7143 hevc->decode_idx == dbg_skip_decode_index)
7144 dbg_skip_flag = 1;
7145
7146 hevc->decode_idx++;
7147 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7148 sao_mem_unit, rpm_param);
7149
7150 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7151 }
7152
7153 if (hevc->iPrevPOC != hevc->curr_POC) {
7154 hevc->new_tile = 1;
7155 hevc->tile_x = 0;
7156 hevc->tile_y = 0;
7157 hevc->tile_y_x = 0;
7158 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7159 hevc_print(hevc, 0,
7160 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7161 hevc->tile_x, hevc->tile_y);
7162 }
7163 } else if (hevc->tile_enabled) {
7164 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7165 hevc_print(hevc, 0,
7166 "slice_segment_address is %d\n",
7167 rpm_param->p.slice_segment_address);
7168 }
7169 hevc->tile_y_x =
7170 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7171 (hevc->pic_w +
7172 hevc->lcu_size -
7173 1) / hevc->lcu_size);
7174 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7175 && (hevc->tile_y_x != -1)) {
7176 hevc->new_tile = 1;
7177 hevc->tile_x = hevc->tile_y_x & 0xff;
7178 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7179 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7180 hevc_print(hevc, 0,
7181 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7182 rpm_param->p.slice_segment_address,
7183 hevc->tile_x, hevc->tile_y);
7184 }
7185 } else
7186 hevc->new_tile = 0;
7187 } else
7188 hevc->new_tile = 0;
7189
7190 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7191 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7192 hevc->new_tile = 0;
7193
7194 if (hevc->new_tile) {
7195 hevc->tile_start_lcu_x =
7196 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7197 hevc->tile_start_lcu_y =
7198 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7199 hevc->tile_width_lcu =
7200 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7201 hevc->tile_height_lcu =
7202 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7203 }
7204
7205 set_ref_pic_list(hevc, rpm_param);
7206
7207 Col_ref = rpm_param->p.collocated_ref_idx;
7208
7209 hevc->LDCFlag = 0;
7210 if (rpm_param->p.slice_type != I_SLICE) {
7211 hevc->LDCFlag = 1;
7212 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7213 if (hevc->cur_pic->
7214 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7215 hevc->curr_POC)
7216 hevc->LDCFlag = 0;
7217 }
7218 if (rpm_param->p.slice_type == B_SLICE) {
7219 for (i = 0; (i < hevc->RefNum_L1)
7220 && hevc->LDCFlag; i++) {
7221 if (hevc->cur_pic->
7222 m_aiRefPOCList1[hevc->cur_pic->
7223 slice_idx][i] >
7224 hevc->curr_POC)
7225 hevc->LDCFlag = 0;
7226 }
7227 }
7228 }
7229
7230 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7231
7232 hevc->plevel =
7233 rpm_param->p.log2_parallel_merge_level;
7234 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7235
7236 hevc->LongTerm_Curr = 0; /* to do ... */
7237 hevc->LongTerm_Col = 0; /* to do ... */
7238
7239 hevc->list_no = 0;
7240 if (rpm_param->p.slice_type == B_SLICE)
7241 hevc->list_no = 1 - hevc->ColFromL0Flag;
7242 if (hevc->list_no == 0) {
7243 if (Col_ref < hevc->RefNum_L0) {
7244 hevc->Col_POC =
7245 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7246 slice_idx][Col_ref];
7247 } else
7248 hevc->Col_POC = INVALID_POC;
7249 } else {
7250 if (Col_ref < hevc->RefNum_L1) {
7251 hevc->Col_POC =
7252 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7253 slice_idx][Col_ref];
7254 } else
7255 hevc->Col_POC = INVALID_POC;
7256 }
7257
7258 hevc->LongTerm_Ref = 0; /* to do ... */
7259
7260 if (hevc->slice_type != 2) {
7261 /* if(hevc->i_only==1){ */
7262 /* return 0xf; */
7263 /* } */
7264
7265 if (hevc->Col_POC != INVALID_POC) {
7266 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7267 if (hevc->col_pic == NULL) {
7268 hevc->cur_pic->error_mark = 1;
7269 if (get_dbg_flag(hevc)) {
7270 hevc_print(hevc, 0,
7271 "WRONG,fail to get the pic Col_POC\n");
7272 }
7273 if (is_log_enable(hevc))
7274 add_log(hevc,
7275 "WRONG,fail to get the pic Col_POC");
7276 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7277 hevc->cur_pic->error_mark = 1;
7278 if (get_dbg_flag(hevc)) {
7279 hevc_print(hevc, 0,
7280 "WRONG, Col_POC error_mark is 1\n");
7281 }
7282 if (is_log_enable(hevc))
7283 add_log(hevc,
7284 "WRONG, Col_POC error_mark is 1");
7285 } else {
7286 if ((hevc->col_pic->width
7287 != hevc->pic_w) ||
7288 (hevc->col_pic->height
7289 != hevc->pic_h)) {
7290 hevc_print(hevc, 0,
7291 "Wrong reference pic (poc %d) width/height %d/%d\n",
7292 hevc->col_pic->POC,
7293 hevc->col_pic->width,
7294 hevc->col_pic->height);
7295 hevc->cur_pic->error_mark = 1;
7296 }
7297
7298 }
7299
7300 if (hevc->cur_pic->error_mark
7301 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7302#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7303 /*count info*/
7304 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7305 hevc->cur_pic->stream_offset);
7306#endif
7307 }
7308
7309 if (is_skip_decoding(hevc,
7310 hevc->cur_pic)) {
7311 return 2;
7312 }
7313 } else
7314 hevc->col_pic = hevc->cur_pic;
7315 } /* */
7316 if (hevc->col_pic == NULL)
7317 hevc->col_pic = hevc->cur_pic;
7318#ifdef BUFFER_MGR_ONLY
7319 return 0xf;
7320#else
7321 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7322 || (dbg_skip_flag))
7323 return 0xf;
7324#endif
7325
7326 config_mc_buffer(hevc, hevc->cur_pic);
7327
7328 if (is_skip_decoding(hevc,
7329 hevc->cur_pic)) {
7330 if (get_dbg_flag(hevc))
7331 hevc_print(hevc, 0,
7332 "Discard this picture index %d\n",
7333 hevc->cur_pic->index);
7334#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7335 /*count info*/
7336 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7337 hevc->cur_pic->stream_offset);
7338#endif
7339 return 2;
7340 }
7341#ifdef MCRCC_ENABLE
7342 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7343#endif
7344 config_mpred_hw(hevc);
7345
7346 config_sao_hw(hevc, rpm_param);
7347
7348 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7349 return 0xf;
7350
7351 return 0;
7352}
7353
7354
7355
7356static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7357 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7358 int cur_buf_idx = new_pic->index;
7359 int bit_depth_10 = (bit_depth != 0x00);
7360 int picture_size;
7361 int cur_mmu_4k_number;
7362 int ret, max_frame_num;
7363 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7364 new_pic->height, !bit_depth_10);
7365 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7366 if (hevc->double_write_mode & 0x10)
7367 return 0;
7368 /*hevc_print(hevc, 0,
7369 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7370 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7371 if (new_pic->scatter_alloc) {
7372 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7373 new_pic->scatter_alloc = 0;
7374 }
7375 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7376 max_frame_num = MAX_FRAME_8K_NUM;
7377 else
7378 max_frame_num = MAX_FRAME_4K_NUM;
7379 if (cur_mmu_4k_number > max_frame_num) {
7380 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7381 cur_mmu_4k_number,
7382 new_pic->width,
7383 new_pic->height);
7384 return -1;
7385 }
7386 ret = decoder_mmu_box_alloc_idx(
7387 hevc->mmu_box,
7388 cur_buf_idx,
7389 cur_mmu_4k_number,
7390 mmu_index_adr);
7391 if (ret == 0)
7392 new_pic->scatter_alloc = 1;
7393
7394 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7395 "%s pic index %d page count(%d) ret =%d\n",
7396 __func__, cur_buf_idx,
7397 cur_mmu_4k_number, ret);
7398 return ret;
7399}
7400
7401
7402static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7403 struct PIC_s *pic)
7404{
7405 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7406 "%s pic index %d scatter_alloc %d\n",
7407 __func__, pic->index,
7408 pic->scatter_alloc);
7409
7410 if (hevc->mmu_enable
7411 && ((hevc->double_write_mode & 0x10) == 0)
7412 && pic->scatter_alloc)
7413 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7414 pic->scatter_alloc = 0;
7415}
7416
7417/*
7418 *************************************************
7419 *
7420 *h265 buffer management end
7421 *
7422 **************************************************
7423 */
7424static struct hevc_state_s *gHevc;
7425
7426static void hevc_local_uninit(struct hevc_state_s *hevc)
7427{
7428 hevc->rpm_ptr = NULL;
7429 hevc->lmem_ptr = NULL;
7430
7431#ifdef SWAP_HEVC_UCODE
7432 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7433 if (hevc->mc_cpu_addr != NULL) {
7434 dma_free_coherent(amports_get_dma_device(),
7435 hevc->swap_size, hevc->mc_cpu_addr,
7436 hevc->mc_dma_handle);
7437 hevc->mc_cpu_addr = NULL;
7438 }
7439
7440 }
7441#endif
7442#ifdef DETREFILL_ENABLE
7443 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7444 uninit_detrefill_buf(hevc);
7445#endif
7446 if (hevc->aux_addr) {
7447 dma_free_coherent(amports_get_dma_device(),
7448 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7449 hevc->aux_phy_addr);
7450 hevc->aux_addr = NULL;
7451 }
7452 if (hevc->rpm_addr) {
7453 dma_free_coherent(amports_get_dma_device(),
7454 RPM_BUF_SIZE, hevc->rpm_addr,
7455 hevc->rpm_phy_addr);
7456 hevc->rpm_addr = NULL;
7457 }
7458 if (hevc->lmem_addr) {
7459 dma_free_coherent(amports_get_dma_device(),
7460 RPM_BUF_SIZE, hevc->lmem_addr,
7461 hevc->lmem_phy_addr);
7462 hevc->lmem_addr = NULL;
7463 }
7464
7465 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7466 if (hevc->frame_mmu_map_phy_addr)
7467 dma_free_coherent(amports_get_dma_device(),
7468 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7469 hevc->frame_mmu_map_phy_addr);
7470
7471 hevc->frame_mmu_map_addr = NULL;
7472 }
7473
7474 kfree(gvs);
7475 gvs = NULL;
7476}
7477
7478static int hevc_local_init(struct hevc_state_s *hevc)
7479{
7480 int ret = -1;
7481 struct BuffInfo_s *cur_buf_info = NULL;
7482
7483 memset(&hevc->param, 0, sizeof(union param_u));
7484
7485 cur_buf_info = &hevc->work_space_buf_store;
7486
7487 if (vdec_is_support_4k()) {
7488 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7489 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7490 sizeof(struct BuffInfo_s));
7491 else
7492 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7493 sizeof(struct BuffInfo_s));
7494 } else
7495 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7496 sizeof(struct BuffInfo_s));
7497
7498 cur_buf_info->start_adr = hevc->buf_start;
7499 init_buff_spec(hevc, cur_buf_info);
7500
7501 hevc_init_stru(hevc, cur_buf_info);
7502
7503 hevc->bit_depth_luma = 8;
7504 hevc->bit_depth_chroma = 8;
7505 hevc->video_signal_type = 0;
7506 hevc->video_signal_type_debug = 0;
7507 bit_depth_luma = hevc->bit_depth_luma;
7508 bit_depth_chroma = hevc->bit_depth_chroma;
7509 video_signal_type = hevc->video_signal_type;
7510
7511 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7512 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7513 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7514 if (hevc->rpm_addr == NULL) {
7515 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7516 return -1;
7517 }
7518 hevc->rpm_ptr = hevc->rpm_addr;
7519 }
7520
7521 if (prefix_aux_buf_size > 0 ||
7522 suffix_aux_buf_size > 0) {
7523 u32 aux_buf_size;
7524
7525 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7526 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7527 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7528 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7529 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7530 if (hevc->aux_addr == NULL) {
7531 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7532 return -1;
7533 }
7534 }
7535
7536 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7537 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7538 if (hevc->lmem_addr == NULL) {
7539 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7540 return -1;
7541 }
7542 hevc->lmem_ptr = hevc->lmem_addr;
7543
7544 if (hevc->mmu_enable) {
7545 hevc->frame_mmu_map_addr =
7546 dma_alloc_coherent(amports_get_dma_device(),
7547 get_frame_mmu_map_size(),
7548 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7549 if (hevc->frame_mmu_map_addr == NULL) {
7550 pr_err("%s: failed to alloc count_buffer\n", __func__);
7551 return -1;
7552 }
7553 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7554 }
7555 ret = 0;
7556 return ret;
7557}
7558
7559/*
7560 *******************************************
7561 * Mailbox command
7562 *******************************************
7563 */
7564#define CMD_FINISHED 0
7565#define CMD_ALLOC_VIEW 1
7566#define CMD_FRAME_DISPLAY 3
7567#define CMD_DEBUG 10
7568
7569
7570#define DECODE_BUFFER_NUM_MAX 32
7571#define DISPLAY_BUFFER_NUM 6
7572
7573#define video_domain_addr(adr) (adr&0x7fffffff)
7574#define DECODER_WORK_SPACE_SIZE 0x800000
7575
7576#define spec2canvas(x) \
7577 (((x)->uv_canvas_index << 16) | \
7578 ((x)->uv_canvas_index << 8) | \
7579 ((x)->y_canvas_index << 0))
7580
7581
7582static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7583{
7584 struct vdec_s *vdec = hw_to_vdec(hevc);
7585 int canvas_w = ALIGN(pic->width, 64)/4;
7586 int canvas_h = ALIGN(pic->height, 32)/4;
7587 int blkmode = hevc->mem_map_mode;
7588
7589 /*CANVAS_BLKMODE_64X32*/
7590#ifdef SUPPORT_10BIT
7591 if (pic->double_write_mode) {
7592 canvas_w = pic->width /
7593 get_double_write_ratio(hevc, pic->double_write_mode);
7594 canvas_h = pic->height /
7595 get_double_write_ratio(hevc, pic->double_write_mode);
7596
7597 if (hevc->mem_map_mode == 0)
7598 canvas_w = ALIGN(canvas_w, 32);
7599 else
7600 canvas_w = ALIGN(canvas_w, 64);
7601 canvas_h = ALIGN(canvas_h, 32);
7602
7603 if (vdec->parallel_dec == 1) {
7604 if (pic->y_canvas_index == -1)
7605 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7606 if (pic->uv_canvas_index == -1)
7607 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7608 } else {
7609 pic->y_canvas_index = 128 + pic->index * 2;
7610 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7611 }
7612
7613 canvas_config_ex(pic->y_canvas_index,
7614 pic->dw_y_adr, canvas_w, canvas_h,
7615 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7616 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7617 canvas_w, canvas_h,
7618 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7619#ifdef MULTI_INSTANCE_SUPPORT
7620 pic->canvas_config[0].phy_addr =
7621 pic->dw_y_adr;
7622 pic->canvas_config[0].width =
7623 canvas_w;
7624 pic->canvas_config[0].height =
7625 canvas_h;
7626 pic->canvas_config[0].block_mode =
7627 blkmode;
7628 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7629
7630 pic->canvas_config[1].phy_addr =
7631 pic->dw_u_v_adr;
7632 pic->canvas_config[1].width =
7633 canvas_w;
7634 pic->canvas_config[1].height =
7635 canvas_h;
7636 pic->canvas_config[1].block_mode =
7637 blkmode;
7638 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7639#endif
7640 } else {
7641 if (!hevc->mmu_enable) {
7642 /* to change after 10bit VPU is ready ... */
7643 if (vdec->parallel_dec == 1) {
7644 if (pic->y_canvas_index == -1)
7645 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7646 pic->uv_canvas_index = pic->y_canvas_index;
7647 } else {
7648 pic->y_canvas_index = 128 + pic->index;
7649 pic->uv_canvas_index = 128 + pic->index;
7650 }
7651
7652 canvas_config_ex(pic->y_canvas_index,
7653 pic->mc_y_adr, canvas_w, canvas_h,
7654 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7655 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7656 canvas_w, canvas_h,
7657 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7658 }
7659 }
7660#else
7661 if (vdec->parallel_dec == 1) {
7662 if (pic->y_canvas_index == -1)
7663 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7664 if (pic->uv_canvas_index == -1)
7665 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7666 } else {
7667 pic->y_canvas_index = 128 + pic->index * 2;
7668 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7669 }
7670
7671
7672 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7673 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7674 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7675 canvas_w, canvas_h,
7676 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7677#endif
7678}
7679
7680static int init_buf_spec(struct hevc_state_s *hevc)
7681{
7682 int pic_width = hevc->pic_w;
7683 int pic_height = hevc->pic_h;
7684
7685 /* hevc_print(hevc, 0,
7686 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7687 */
7688 hevc_print(hevc, 0,
7689 "%s2 %d %d\n", __func__, pic_width, pic_height);
7690 /* pic_width = hevc->pic_w; */
7691 /* pic_height = hevc->pic_h; */
7692
7693 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7694 hevc->frame_width = pic_width;
7695 hevc->frame_height = pic_height;
7696
7697 }
7698
7699 return 0;
7700}
7701
7702static int parse_sei(struct hevc_state_s *hevc,
7703 struct PIC_s *pic, char *sei_buf, uint32_t size)
7704{
7705 char *p = sei_buf;
7706 char *p_sei;
7707 uint16_t header;
7708 uint8_t nal_unit_type;
7709 uint8_t payload_type, payload_size;
7710 int i, j;
7711
7712 if (size < 2)
7713 return 0;
7714 header = *p++;
7715 header <<= 8;
7716 header += *p++;
7717 nal_unit_type = header >> 9;
7718 if ((nal_unit_type != NAL_UNIT_SEI)
7719 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7720 return 0;
7721 while (p+2 <= sei_buf+size) {
7722 payload_type = *p++;
7723 payload_size = *p++;
7724 if (p+payload_size <= sei_buf+size) {
7725 switch (payload_type) {
7726 case SEI_PicTiming:
7727 if ((parser_sei_enable & 0x4) &&
7728 hevc->frame_field_info_present_flag) {
7729 p_sei = p;
7730 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7731 pic->pic_struct = hevc->curr_pic_struct;
7732 if (get_dbg_flag(hevc) &
7733 H265_DEBUG_PIC_STRUCT) {
7734 hevc_print(hevc, 0,
7735 "parse result pic_struct = %d\n",
7736 hevc->curr_pic_struct);
7737 }
7738 }
7739 break;
7740 case SEI_UserDataITU_T_T35:
7741 p_sei = p;
7742 if (p_sei[0] == 0xB5
7743 && p_sei[1] == 0x00
7744 && p_sei[2] == 0x3C
7745 && p_sei[3] == 0x00
7746 && p_sei[4] == 0x01
7747 && p_sei[5] == 0x04)
7748 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7749
7750 break;
7751 case SEI_MasteringDisplayColorVolume:
7752 /*hevc_print(hevc, 0,
7753 "sei type: primary display color volume %d, size %d\n",
7754 payload_type,
7755 payload_size);*/
7756 /* master_display_colour */
7757 p_sei = p;
7758 for (i = 0; i < 3; i++) {
7759 for (j = 0; j < 2; j++) {
7760 hevc->primaries[i][j]
7761 = (*p_sei<<8)
7762 | *(p_sei+1);
7763 p_sei += 2;
7764 }
7765 }
7766 for (i = 0; i < 2; i++) {
7767 hevc->white_point[i]
7768 = (*p_sei<<8)
7769 | *(p_sei+1);
7770 p_sei += 2;
7771 }
7772 for (i = 0; i < 2; i++) {
7773 hevc->luminance[i]
7774 = (*p_sei<<24)
7775 | (*(p_sei+1)<<16)
7776 | (*(p_sei+2)<<8)
7777 | *(p_sei+3);
7778 p_sei += 4;
7779 }
7780 hevc->sei_present_flag |=
7781 SEI_MASTER_DISPLAY_COLOR_MASK;
7782 /*for (i = 0; i < 3; i++)
7783 for (j = 0; j < 2; j++)
7784 hevc_print(hevc, 0,
7785 "\tprimaries[%1d][%1d] = %04x\n",
7786 i, j,
7787 hevc->primaries[i][j]);
7788 hevc_print(hevc, 0,
7789 "\twhite_point = (%04x, %04x)\n",
7790 hevc->white_point[0],
7791 hevc->white_point[1]);
7792 hevc_print(hevc, 0,
7793 "\tmax,min luminance = %08x, %08x\n",
7794 hevc->luminance[0],
7795 hevc->luminance[1]);*/
7796 break;
7797 case SEI_ContentLightLevel:
7798 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7799 hevc_print(hevc, 0,
7800 "sei type: max content light level %d, size %d\n",
7801 payload_type, payload_size);
7802 /* content_light_level */
7803 p_sei = p;
7804 hevc->content_light_level[0]
7805 = (*p_sei<<8) | *(p_sei+1);
7806 p_sei += 2;
7807 hevc->content_light_level[1]
7808 = (*p_sei<<8) | *(p_sei+1);
7809 p_sei += 2;
7810 hevc->sei_present_flag |=
7811 SEI_CONTENT_LIGHT_LEVEL_MASK;
7812 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7813 hevc_print(hevc, 0,
7814 "\tmax cll = %04x, max_pa_cll = %04x\n",
7815 hevc->content_light_level[0],
7816 hevc->content_light_level[1]);
7817 break;
7818 default:
7819 break;
7820 }
7821 }
7822 p += payload_size;
7823 }
7824 return 0;
7825}
7826
7827static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7828 unsigned w, unsigned h)
7829{
7830 unsigned ar;
7831
7832 if (idc == 255) {
7833 ar = div_u64(256ULL * sar_h * h,
7834 sar_w * w);
7835 } else {
7836 switch (idc) {
7837 case 1:
7838 ar = 0x100 * h / w;
7839 break;
7840 case 2:
7841 ar = 0x100 * h * 11 / (w * 12);
7842 break;
7843 case 3:
7844 ar = 0x100 * h * 11 / (w * 10);
7845 break;
7846 case 4:
7847 ar = 0x100 * h * 11 / (w * 16);
7848 break;
7849 case 5:
7850 ar = 0x100 * h * 33 / (w * 40);
7851 break;
7852 case 6:
7853 ar = 0x100 * h * 11 / (w * 24);
7854 break;
7855 case 7:
7856 ar = 0x100 * h * 11 / (w * 20);
7857 break;
7858 case 8:
7859 ar = 0x100 * h * 11 / (w * 32);
7860 break;
7861 case 9:
7862 ar = 0x100 * h * 33 / (w * 80);
7863 break;
7864 case 10:
7865 ar = 0x100 * h * 11 / (w * 18);
7866 break;
7867 case 11:
7868 ar = 0x100 * h * 11 / (w * 15);
7869 break;
7870 case 12:
7871 ar = 0x100 * h * 33 / (w * 64);
7872 break;
7873 case 13:
7874 ar = 0x100 * h * 99 / (w * 160);
7875 break;
7876 case 14:
7877 ar = 0x100 * h * 3 / (w * 4);
7878 break;
7879 case 15:
7880 ar = 0x100 * h * 2 / (w * 3);
7881 break;
7882 case 16:
7883 ar = 0x100 * h * 1 / (w * 2);
7884 break;
7885 default:
7886 ar = h * 0x100 / w;
7887 break;
7888 }
7889 }
7890
7891 return ar;
7892}
7893
7894static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7895 struct PIC_s *pic)
7896{
7897 unsigned int ar;
7898 int i, j;
7899 char *p;
7900 unsigned size = 0;
7901 unsigned type = 0;
7902 struct vframe_master_display_colour_s *vf_dp
7903 = &vf->prop.master_display_colour;
7904
7905 vf->width = pic->width /
7906 get_double_write_ratio(hevc, pic->double_write_mode);
7907 vf->height = pic->height /
7908 get_double_write_ratio(hevc, pic->double_write_mode);
7909
7910 vf->duration = hevc->frame_dur;
7911 vf->duration_pulldown = 0;
7912 vf->flag = 0;
7913
7914 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7915 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7916
7917
7918 if (((pic->aspect_ratio_idc == 255) &&
7919 pic->sar_width &&
7920 pic->sar_height) ||
7921 ((pic->aspect_ratio_idc != 255) &&
7922 (pic->width))) {
7923 ar = min_t(u32,
7924 calc_ar(pic->aspect_ratio_idc,
7925 pic->sar_width,
7926 pic->sar_height,
7927 pic->width,
7928 pic->height),
7929 DISP_RATIO_ASPECT_RATIO_MAX);
7930 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7931 }
7932 hevc->ratio_control = vf->ratio_control;
7933 if (pic->aux_data_buf
7934 && pic->aux_data_size) {
7935 /* parser sei */
7936 p = pic->aux_data_buf;
7937 while (p < pic->aux_data_buf
7938 + pic->aux_data_size - 8) {
7939 size = *p++;
7940 size = (size << 8) | *p++;
7941 size = (size << 8) | *p++;
7942 size = (size << 8) | *p++;
7943 type = *p++;
7944 type = (type << 8) | *p++;
7945 type = (type << 8) | *p++;
7946 type = (type << 8) | *p++;
7947 if (type == 0x02000000) {
7948 /* hevc_print(hevc, 0,
7949 "sei(%d)\n", size); */
7950 parse_sei(hevc, pic, p, size);
7951 }
7952 p += size;
7953 }
7954 }
7955 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7956 vf->signal_type = pic->video_signal_type;
7957 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7958 u32 data;
7959 data = vf->signal_type;
7960 data = data & 0xFFFF00FF;
7961 data = data | (0x30<<8);
7962 vf->signal_type = data;
7963 }
7964 }
7965 else
7966 vf->signal_type = 0;
7967 hevc->video_signal_type_debug = vf->signal_type;
7968
7969 /* master_display_colour */
7970 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7971 for (i = 0; i < 3; i++)
7972 for (j = 0; j < 2; j++)
7973 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7974 for (i = 0; i < 2; i++) {
7975 vf_dp->white_point[i] = hevc->white_point[i];
7976 vf_dp->luminance[i]
7977 = hevc->luminance[i];
7978 }
7979 vf_dp->present_flag = 1;
7980 } else
7981 vf_dp->present_flag = 0;
7982
7983 /* content_light_level */
7984 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7985 vf_dp->content_light_level.max_content
7986 = hevc->content_light_level[0];
7987 vf_dp->content_light_level.max_pic_average
7988 = hevc->content_light_level[1];
7989 vf_dp->content_light_level.present_flag = 1;
7990 } else
7991 vf_dp->content_light_level.present_flag = 0;
7992
7993 if (hevc->is_used_v4l &&
7994 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7995 (vf_dp->present_flag) ||
7996 (vf_dp->content_light_level.present_flag))) {
7997 struct aml_vdec_hdr_infos hdr;
7998 struct aml_vcodec_ctx *ctx =
7999 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8000
8001 memset(&hdr, 0, sizeof(hdr));
8002 hdr.signal_type = vf->signal_type;
8003 hdr.color_parms = *vf_dp;
8004 vdec_v4l_set_hdr_infos(ctx, &hdr);
8005 }
8006}
8007
8008static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8009{
8010 unsigned long flags;
8011#ifdef MULTI_INSTANCE_SUPPORT
8012 struct vdec_s *vdec = op_arg;
8013 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8014#else
8015 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8016#endif
8017
8018 spin_lock_irqsave(&lock, flags);
8019
8020 states->vf_pool_size = VF_POOL_SIZE;
8021 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8022 states->buf_avail_num = kfifo_len(&hevc->display_q);
8023
8024 if (step == 2)
8025 states->buf_avail_num = 0;
8026 spin_unlock_irqrestore(&lock, flags);
8027 return 0;
8028}
8029
8030static struct vframe_s *vh265_vf_peek(void *op_arg)
8031{
8032 struct vframe_s *vf[2] = {0, 0};
8033#ifdef MULTI_INSTANCE_SUPPORT
8034 struct vdec_s *vdec = op_arg;
8035 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8036#else
8037 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8038#endif
8039
8040 if (step == 2)
8041 return NULL;
8042
8043 if (force_disp_pic_index & 0x100) {
8044 if (force_disp_pic_index & 0x200)
8045 return NULL;
8046 return &hevc->vframe_dummy;
8047 }
8048
8049
8050 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8051 if (vf[1]) {
8052 vf[0]->next_vf_pts_valid = true;
8053 vf[0]->next_vf_pts = vf[1]->pts;
8054 } else
8055 vf[0]->next_vf_pts_valid = false;
8056 return vf[0];
8057 }
8058
8059 return NULL;
8060}
8061
8062static struct vframe_s *vh265_vf_get(void *op_arg)
8063{
8064 struct vframe_s *vf;
8065#ifdef MULTI_INSTANCE_SUPPORT
8066 struct vdec_s *vdec = op_arg;
8067 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8068#else
8069 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8070#endif
8071
8072 if (step == 2)
8073 return NULL;
8074 else if (step == 1)
8075 step = 2;
8076
8077#if 0
8078 if (force_disp_pic_index & 0x100) {
8079 int buffer_index = force_disp_pic_index & 0xff;
8080 struct PIC_s *pic = NULL;
8081 if (buffer_index >= 0
8082 && buffer_index < MAX_REF_PIC_NUM)
8083 pic = hevc->m_PIC[buffer_index];
8084 if (pic == NULL)
8085 return NULL;
8086 if (force_disp_pic_index & 0x200)
8087 return NULL;
8088
8089 vf = &hevc->vframe_dummy;
8090 if (get_double_write_mode(hevc)) {
8091 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8092 VIDTYPE_VIU_NV21;
8093 if (hevc->m_ins_flag) {
8094 vf->canvas0Addr = vf->canvas1Addr = -1;
8095 vf->plane_num = 2;
8096 vf->canvas0_config[0] =
8097 pic->canvas_config[0];
8098 vf->canvas0_config[1] =
8099 pic->canvas_config[1];
8100
8101 vf->canvas1_config[0] =
8102 pic->canvas_config[0];
8103 vf->canvas1_config[1] =
8104 pic->canvas_config[1];
8105 } else {
8106 vf->canvas0Addr = vf->canvas1Addr
8107 = spec2canvas(pic);
8108 }
8109 } else {
8110 vf->canvas0Addr = vf->canvas1Addr = 0;
8111 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8112 if (hevc->mmu_enable)
8113 vf->type |= VIDTYPE_SCATTER;
8114 }
8115 vf->compWidth = pic->width;
8116 vf->compHeight = pic->height;
8117 update_vf_memhandle(hevc, vf, pic);
8118 switch (hevc->bit_depth_luma) {
8119 case 9:
8120 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8121 break;
8122 case 10:
8123 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8124 | BITDEPTH_V10;
8125 break;
8126 default:
8127 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8128 break;
8129 }
8130 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8131 vf->bitdepth =
8132 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8133 if (hevc->mem_saving_mode == 1)
8134 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8135 vf->duration_pulldown = 0;
8136 vf->pts = 0;
8137 vf->pts_us64 = 0;
8138 set_frame_info(hevc, vf);
8139
8140 vf->width = pic->width /
8141 get_double_write_ratio(hevc, pic->double_write_mode);
8142 vf->height = pic->height /
8143 get_double_write_ratio(hevc, pic->double_write_mode);
8144
8145 force_disp_pic_index |= 0x200;
8146 return vf;
8147 }
8148#endif
8149
8150 if (kfifo_get(&hevc->display_q, &vf)) {
8151 struct vframe_s *next_vf;
8152 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8153 hevc_print(hevc, 0,
8154 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8155 __func__, vf, vf->type, vf->index,
8156 get_pic_poc(hevc, vf->index & 0xff),
8157 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8158 vf->pts, vf->pts_us64,
8159 vf->duration);
8160#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8161 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8162 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8163 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8164 int i;
8165 struct PIC_s *pic =
8166 hevc->m_PIC[vf->index & 0xff];
8167 hevc_print(hevc, 0,
8168 "pic 0x%p aux size %d:\n",
8169 pic, pic->aux_data_size);
8170 for (i = 0; i < pic->aux_data_size; i++) {
8171 hevc_print_cont(hevc, 0,
8172 "%02x ", pic->aux_data_buf[i]);
8173 if (((i + 1) & 0xf) == 0)
8174 hevc_print_cont(hevc, 0, "\n");
8175 }
8176 hevc_print_cont(hevc, 0, "\n");
8177 }
8178 }
8179#endif
8180 hevc->show_frame_num++;
8181 vf->index_disp = hevc->vf_get_count;
8182 hevc->vf_get_count++;
8183
8184 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8185 vf->next_vf_pts_valid = true;
8186 vf->next_vf_pts = next_vf->pts;
8187 } else
8188 vf->next_vf_pts_valid = false;
8189
8190 return vf;
8191 }
8192
8193 return NULL;
8194}
8195static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8196 int i;
8197 for (i = 0; i < VF_POOL_SIZE; i++) {
8198 if (vf == &hevc->vfpool[i])
8199 return true;
8200 }
8201 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8202 for (i = 0; i < VF_POOL_SIZE; i++) {
8203 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8204 }
8205 return false;
8206}
8207
8208static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8209{
8210 unsigned long flags;
8211#ifdef MULTI_INSTANCE_SUPPORT
8212 struct vdec_s *vdec = op_arg;
8213 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8214#else
8215 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8216#endif
8217 unsigned char index_top;
8218 unsigned char index_bot;
8219
8220 if (vf && (vf_valid_check(vf, hevc) == false))
8221 return;
8222 if (vf == (&hevc->vframe_dummy))
8223 return;
8224 index_top = vf->index & 0xff;
8225 index_bot = (vf->index >> 8) & 0xff;
8226 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8227 hevc_print(hevc, 0,
8228 "%s(type %d index 0x%x)\n",
8229 __func__, vf->type, vf->index);
8230 hevc->vf_put_count++;
8231 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8232 spin_lock_irqsave(&lock, flags);
8233
8234 if (index_top != 0xff
8235 && index_top < MAX_REF_PIC_NUM
8236 && hevc->m_PIC[index_top]) {
8237 if (hevc->is_used_v4l)
8238 hevc->m_PIC[index_top]->vframe_bound = true;
8239 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8240 hevc->m_PIC[index_top]->vf_ref--;
8241
8242 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8243 hevc->m_PIC[index_top]->output_ready = 0;
8244
8245 if (hevc->wait_buf != 0)
8246 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8247 0x1);
8248 }
8249 }
8250 }
8251
8252 if (index_bot != 0xff
8253 && index_bot < MAX_REF_PIC_NUM
8254 && hevc->m_PIC[index_bot]) {
8255 if (hevc->is_used_v4l)
8256 hevc->m_PIC[index_bot]->vframe_bound = true;
8257 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8258 hevc->m_PIC[index_bot]->vf_ref--;
8259
8260 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8261 hevc->m_PIC[index_bot]->output_ready = 0;
8262 if (hevc->wait_buf != 0)
8263 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8264 0x1);
8265 }
8266 }
8267 }
8268 spin_unlock_irqrestore(&lock, flags);
8269}
8270
8271static int vh265_event_cb(int type, void *data, void *op_arg)
8272{
8273 unsigned long flags;
8274#ifdef MULTI_INSTANCE_SUPPORT
8275 struct vdec_s *vdec = op_arg;
8276 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8277#else
8278 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8279#endif
8280 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8281#if 0
8282 amhevc_stop();
8283#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8284 vf_light_unreg_provider(&vh265_vf_prov);
8285#endif
8286 spin_lock_irqsave(&hevc->lock, flags);
8287 vh265_local_init();
8288 vh265_prot_init();
8289 spin_unlock_irqrestore(&hevc->lock, flags);
8290#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8291 vf_reg_provider(&vh265_vf_prov);
8292#endif
8293 amhevc_start();
8294#endif
8295 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8296 struct provider_aux_req_s *req =
8297 (struct provider_aux_req_s *)data;
8298 unsigned char index;
8299
8300 if (!req->vf) {
8301 req->aux_size = hevc->vf_put_count;
8302 return 0;
8303 }
8304 spin_lock_irqsave(&lock, flags);
8305 index = req->vf->index & 0xff;
8306 req->aux_buf = NULL;
8307 req->aux_size = 0;
8308 if (req->bot_flag)
8309 index = (req->vf->index >> 8) & 0xff;
8310 if (index != 0xff
8311 && index < MAX_REF_PIC_NUM
8312 && hevc->m_PIC[index]) {
8313 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8314 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8315#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8316 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8317 req->dv_enhance_exist = false;
8318 else
8319 req->dv_enhance_exist =
8320 hevc->m_PIC[index]->dv_enhance_exist;
8321 hevc_print(hevc, H265_DEBUG_DV,
8322 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8323 req->vf,
8324 hevc->m_PIC[index]->POC, index,
8325 req->dv_enhance_exist, req->aux_size);
8326#else
8327 req->dv_enhance_exist = 0;
8328#endif
8329 }
8330 spin_unlock_irqrestore(&lock, flags);
8331
8332 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8333 hevc_print(hevc, 0,
8334 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8335 __func__, type, index, req->aux_size);
8336#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8337 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8338 if ((force_bypass_dvenl & 0x80000000) == 0) {
8339 hevc_print(hevc, 0,
8340 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8341 __func__);
8342 hevc->bypass_dvenl_enable = 1;
8343 }
8344
8345#endif
8346 }
8347 return 0;
8348}
8349
8350#ifdef HEVC_PIC_STRUCT_SUPPORT
8351static int process_pending_vframe(struct hevc_state_s *hevc,
8352 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8353{
8354 struct vframe_s *vf;
8355
8356 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8357 hevc_print(hevc, 0,
8358 "%s: pair_pic index 0x%x %s\n",
8359 __func__, pair_pic->index,
8360 pair_frame_top_flag ?
8361 "top" : "bot");
8362
8363 if (kfifo_len(&hevc->pending_q) > 1) {
8364 unsigned long flags;
8365 /* do not pending more than 1 frame */
8366 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8367 hevc_print(hevc, 0,
8368 "fatal error, no available buffer slot.");
8369 return -1;
8370 }
8371 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8372 hevc_print(hevc, 0,
8373 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8374 __func__, vf->index);
8375 if ((hevc->double_write_mode == 3) &&
8376 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8377 vf->type |= VIDTYPE_COMPRESS;
8378 if (hevc->mmu_enable)
8379 vf->type |= VIDTYPE_SCATTER;
8380 }
8381 hevc->vf_pre_count++;
8382 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8383 spin_lock_irqsave(&lock, flags);
8384 vf->index &= 0xff;
8385 hevc->m_PIC[vf->index]->output_ready = 0;
8386 if (hevc->wait_buf != 0)
8387 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8388 0x1);
8389 spin_unlock_irqrestore(&lock, flags);
8390
8391 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8392 }
8393
8394 if (kfifo_peek(&hevc->pending_q, &vf)) {
8395 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8396 /*
8397 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8398 *do not use it
8399 */
8400 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8401 hevc_print(hevc, 0,
8402 "fatal error, no available buffer slot.");
8403 return -1;
8404 }
8405 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8406 hevc_print(hevc, 0,
8407 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8408 __func__, vf->index);
8409 if (vf) {
8410 if ((hevc->double_write_mode == 3) &&
8411 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8412 vf->type |= VIDTYPE_COMPRESS;
8413 if (hevc->mmu_enable)
8414 vf->type |= VIDTYPE_SCATTER;
8415 }
8416 hevc->vf_pre_count++;
8417 kfifo_put(&hevc->display_q,
8418 (const struct vframe_s *)vf);
8419 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8420 }
8421 } else if ((!pair_frame_top_flag) &&
8422 (((vf->index >> 8) & 0xff) == 0xff)) {
8423 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8424 hevc_print(hevc, 0,
8425 "fatal error, no available buffer slot.");
8426 return -1;
8427 }
8428 if (vf) {
8429 if ((hevc->double_write_mode == 3) &&
8430 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8431 vf->type |= VIDTYPE_COMPRESS;
8432 if (hevc->mmu_enable)
8433 vf->type |= VIDTYPE_SCATTER;
8434 }
8435 vf->index &= 0xff;
8436 vf->index |= (pair_pic->index << 8);
8437 vf->canvas1Addr = spec2canvas(pair_pic);
8438 pair_pic->vf_ref++;
8439 kfifo_put(&hevc->display_q,
8440 (const struct vframe_s *)vf);
8441 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8442 hevc->vf_pre_count++;
8443 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8444 hevc_print(hevc, 0,
8445 "%s vf => display_q: (index 0x%x)\n",
8446 __func__, vf->index);
8447 }
8448 } else if (pair_frame_top_flag &&
8449 ((vf->index & 0xff) == 0xff)) {
8450 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8451 hevc_print(hevc, 0,
8452 "fatal error, no available buffer slot.");
8453 return -1;
8454 }
8455 if (vf) {
8456 if ((hevc->double_write_mode == 3) &&
8457 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8458 vf->type |= VIDTYPE_COMPRESS;
8459 if (hevc->mmu_enable)
8460 vf->type |= VIDTYPE_SCATTER;
8461 }
8462 vf->index &= 0xff00;
8463 vf->index |= pair_pic->index;
8464 vf->canvas0Addr = spec2canvas(pair_pic);
8465 pair_pic->vf_ref++;
8466 kfifo_put(&hevc->display_q,
8467 (const struct vframe_s *)vf);
8468 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8469 hevc->vf_pre_count++;
8470 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8471 hevc_print(hevc, 0,
8472 "%s vf => display_q: (index 0x%x)\n",
8473 __func__, vf->index);
8474 }
8475 }
8476 }
8477 return 0;
8478}
8479#endif
8480static void update_vf_memhandle(struct hevc_state_s *hevc,
8481 struct vframe_s *vf, struct PIC_s *pic)
8482{
8483 if (pic->index < 0) {
8484 vf->mem_handle = NULL;
8485 vf->mem_head_handle = NULL;
8486 } else if (vf->type & VIDTYPE_SCATTER) {
8487 vf->mem_handle =
8488 decoder_mmu_box_get_mem_handle(
8489 hevc->mmu_box, pic->index);
8490 vf->mem_head_handle =
8491 decoder_bmmu_box_get_mem_handle(
8492 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8493 } else {
8494 vf->mem_handle =
8495 decoder_bmmu_box_get_mem_handle(
8496 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8497 vf->mem_head_handle = NULL;
8498 /*vf->mem_head_handle =
8499 decoder_bmmu_box_get_mem_handle(
8500 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8501 }
8502 return;
8503}
8504
8505static void fill_frame_info(struct hevc_state_s *hevc,
8506 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8507{
8508 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8509 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8510 vframe_qos->type = 4;
8511 else if (pic->slice_type == I_SLICE)
8512 vframe_qos->type = 1;
8513 else if (pic->slice_type == P_SLICE)
8514 vframe_qos->type = 2;
8515 else if (pic->slice_type == B_SLICE)
8516 vframe_qos->type = 3;
8517/*
8518#define SHOW_QOS_INFO
8519*/
8520 vframe_qos->size = framesize;
8521 vframe_qos->pts = pts;
8522#ifdef SHOW_QOS_INFO
8523 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8524#endif
8525
8526
8527 vframe_qos->max_mv = pic->max_mv;
8528 vframe_qos->avg_mv = pic->avg_mv;
8529 vframe_qos->min_mv = pic->min_mv;
8530#ifdef SHOW_QOS_INFO
8531 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8532 vframe_qos->max_mv,
8533 vframe_qos->avg_mv,
8534 vframe_qos->min_mv);
8535#endif
8536
8537 vframe_qos->max_qp = pic->max_qp;
8538 vframe_qos->avg_qp = pic->avg_qp;
8539 vframe_qos->min_qp = pic->min_qp;
8540#ifdef SHOW_QOS_INFO
8541 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8542 vframe_qos->max_qp,
8543 vframe_qos->avg_qp,
8544 vframe_qos->min_qp);
8545#endif
8546
8547 vframe_qos->max_skip = pic->max_skip;
8548 vframe_qos->avg_skip = pic->avg_skip;
8549 vframe_qos->min_skip = pic->min_skip;
8550#ifdef SHOW_QOS_INFO
8551 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8552 vframe_qos->max_skip,
8553 vframe_qos->avg_skip,
8554 vframe_qos->min_skip);
8555#endif
8556
8557 vframe_qos->num++;
8558
8559 if (hevc->frameinfo_enable)
8560 vdec_fill_frame_info(vframe_qos, 1);
8561}
8562
8563static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8564{
8565#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8566 struct vdec_s *vdec = hw_to_vdec(hevc);
8567#endif
8568 struct vframe_s *vf = NULL;
8569 int stream_offset = pic->stream_offset;
8570 unsigned short slice_type = pic->slice_type;
8571 u32 frame_size;
8572
8573 if (force_disp_pic_index & 0x100) {
8574 /*recycle directly*/
8575 pic->output_ready = 0;
8576 return -1;
8577 }
8578 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8579 hevc_print(hevc, 0,
8580 "fatal error, no available buffer slot.");
8581 return -1;
8582 }
8583 display_frame_count[hevc->index]++;
8584 if (vf) {
8585 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8586 "%s: pic index 0x%x\n",
8587 __func__, pic->index);*/
8588
8589 if (hevc->is_used_v4l) {
8590 vf->v4l_mem_handle
8591 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8592 if (hevc->mmu_enable) {
8593 vf->mm_box.bmmu_box = hevc->bmmu_box;
8594 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8595 vf->mm_box.mmu_box = hevc->mmu_box;
8596 vf->mm_box.mmu_idx = pic->index;
8597 }
8598 }
8599
8600#ifdef MULTI_INSTANCE_SUPPORT
8601 if (vdec_frame_based(hw_to_vdec(hevc))) {
8602 vf->pts = pic->pts;
8603 vf->pts_us64 = pic->pts64;
8604 vf->timestamp = pic->timestamp;
8605 }
8606 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8607 stream_offset, &vf->pts, 0) != 0) { */
8608#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8609 else if (vdec->master == NULL) {
8610#else
8611 else {
8612#endif
8613#endif
8614 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8615 "call pts_lookup_offset_us64(0x%x)\n",
8616 stream_offset);
8617 if (pts_lookup_offset_us64
8618 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8619 &frame_size, 0,
8620 &vf->pts_us64) != 0) {
8621#ifdef DEBUG_PTS
8622 hevc->pts_missed++;
8623#endif
8624 vf->pts = 0;
8625 vf->pts_us64 = 0;
8626 }
8627#ifdef DEBUG_PTS
8628 else
8629 hevc->pts_hit++;
8630#endif
8631#ifdef MULTI_INSTANCE_SUPPORT
8632#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8633 } else {
8634 vf->pts = 0;
8635 vf->pts_us64 = 0;
8636 }
8637#else
8638 }
8639#endif
8640#endif
8641 if (pts_unstable && (hevc->frame_dur > 0))
8642 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8643
8644 fill_frame_info(hevc, pic, frame_size, vf->pts);
8645
8646 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8647 && hevc->get_frame_dur) {
8648 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8649
8650 if (pts_diff < 0) {
8651 hevc->pts_mode_switching_count++;
8652 hevc->pts_mode_recovery_count = 0;
8653
8654 if (hevc->pts_mode_switching_count >=
8655 PTS_MODE_SWITCHING_THRESHOLD) {
8656 hevc->pts_mode =
8657 PTS_NONE_REF_USE_DURATION;
8658 hevc_print(hevc, 0,
8659 "HEVC: switch to n_d mode.\n");
8660 }
8661
8662 } else {
8663 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8664
8665 hevc->pts_mode_recovery_count++;
8666 if (hevc->pts_mode_recovery_count > p) {
8667 hevc->pts_mode_switching_count = 0;
8668 hevc->pts_mode_recovery_count = 0;
8669 }
8670 }
8671 }
8672
8673 if (vf->pts != 0)
8674 hevc->last_lookup_pts = vf->pts;
8675
8676 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8677 && (slice_type != 2))
8678 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8679 hevc->last_pts = vf->pts;
8680
8681 if (vf->pts_us64 != 0)
8682 hevc->last_lookup_pts_us64 = vf->pts_us64;
8683
8684 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8685 && (slice_type != 2)) {
8686 vf->pts_us64 =
8687 hevc->last_pts_us64 +
8688 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8689 }
8690 hevc->last_pts_us64 = vf->pts_us64;
8691 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8692 hevc_print(hevc, 0,
8693 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8694 vf->pts, vf->pts_us64);
8695 }
8696
8697 /*
8698 *vf->index:
8699 *(1) vf->type is VIDTYPE_PROGRESSIVE
8700 * and vf->canvas0Addr != vf->canvas1Addr,
8701 * vf->index[7:0] is the index of top pic
8702 * vf->index[15:8] is the index of bot pic
8703 *(2) other cases,
8704 * only vf->index[7:0] is used
8705 * vf->index[15:8] == 0xff
8706 */
8707 vf->index = 0xff00 | pic->index;
8708#if 1
8709/*SUPPORT_10BIT*/
8710 if (pic->double_write_mode & 0x10) {
8711 /* double write only */
8712 vf->compBodyAddr = 0;
8713 vf->compHeadAddr = 0;
8714 } else {
8715
8716 if (hevc->mmu_enable) {
8717 vf->compBodyAddr = 0;
8718 vf->compHeadAddr = pic->header_adr;
8719 } else {
8720 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8721 vf->compHeadAddr = pic->mc_y_adr +
8722 pic->losless_comp_body_size;
8723 vf->mem_head_handle = NULL;
8724 }
8725
8726 /*head adr*/
8727 vf->canvas0Addr = vf->canvas1Addr = 0;
8728 }
8729 if (pic->double_write_mode) {
8730 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8731 vf->type |= VIDTYPE_VIU_NV21;
8732
8733 if ((pic->double_write_mode == 3) &&
8734 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8735 vf->type |= VIDTYPE_COMPRESS;
8736 if (hevc->mmu_enable)
8737 vf->type |= VIDTYPE_SCATTER;
8738 }
8739#ifdef MULTI_INSTANCE_SUPPORT
8740 if (hevc->m_ins_flag &&
8741 (get_dbg_flag(hevc)
8742 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8743 vf->canvas0Addr = vf->canvas1Addr = -1;
8744 vf->plane_num = 2;
8745 vf->canvas0_config[0] =
8746 pic->canvas_config[0];
8747 vf->canvas0_config[1] =
8748 pic->canvas_config[1];
8749
8750 vf->canvas1_config[0] =
8751 pic->canvas_config[0];
8752 vf->canvas1_config[1] =
8753 pic->canvas_config[1];
8754
8755 } else
8756#endif
8757 vf->canvas0Addr = vf->canvas1Addr
8758 = spec2canvas(pic);
8759 } else {
8760 vf->canvas0Addr = vf->canvas1Addr = 0;
8761 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8762 if (hevc->mmu_enable)
8763 vf->type |= VIDTYPE_SCATTER;
8764 }
8765 vf->compWidth = pic->width;
8766 vf->compHeight = pic->height;
8767 update_vf_memhandle(hevc, vf, pic);
8768 switch (pic->bit_depth_luma) {
8769 case 9:
8770 vf->bitdepth = BITDEPTH_Y9;
8771 break;
8772 case 10:
8773 vf->bitdepth = BITDEPTH_Y10;
8774 break;
8775 default:
8776 vf->bitdepth = BITDEPTH_Y8;
8777 break;
8778 }
8779 switch (pic->bit_depth_chroma) {
8780 case 9:
8781 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8782 break;
8783 case 10:
8784 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8785 break;
8786 default:
8787 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8788 break;
8789 }
8790 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8791 vf->bitdepth =
8792 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8793 if (pic->mem_saving_mode == 1)
8794 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8795#else
8796 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8797 vf->type |= VIDTYPE_VIU_NV21;
8798 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8799#endif
8800 set_frame_info(hevc, vf, pic);
8801 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8802 /* hevc_print(hevc, 0,
8803 "aaa: %d/%d, %d/%d\n",
8804 vf->width,vf->height, pic->width, pic->height); */
8805 vf->width = pic->width;
8806 vf->height = pic->height;
8807
8808 if (force_w_h != 0) {
8809 vf->width = (force_w_h >> 16) & 0xffff;
8810 vf->height = force_w_h & 0xffff;
8811 }
8812 if (force_fps & 0x100) {
8813 u32 rate = force_fps & 0xff;
8814
8815 if (rate)
8816 vf->duration = 96000/rate;
8817 else
8818 vf->duration = 0;
8819 }
8820 if (force_fps & 0x200) {
8821 vf->pts = 0;
8822 vf->pts_us64 = 0;
8823 }
8824 /*
8825 * !!! to do ...
8826 * need move below code to get_new_pic(),
8827 * hevc->xxx can only be used by current decoded pic
8828 */
8829 if (pic->conformance_window_flag &&
8830 (get_dbg_flag(hevc) &
8831 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8832 unsigned int SubWidthC, SubHeightC;
8833
8834 switch (pic->chroma_format_idc) {
8835 case 1:
8836 SubWidthC = 2;
8837 SubHeightC = 2;
8838 break;
8839 case 2:
8840 SubWidthC = 2;
8841 SubHeightC = 1;
8842 break;
8843 default:
8844 SubWidthC = 1;
8845 SubHeightC = 1;
8846 break;
8847 }
8848 vf->width -= SubWidthC *
8849 (pic->conf_win_left_offset +
8850 pic->conf_win_right_offset);
8851 vf->height -= SubHeightC *
8852 (pic->conf_win_top_offset +
8853 pic->conf_win_bottom_offset);
8854
8855 vf->compWidth -= SubWidthC *
8856 (pic->conf_win_left_offset +
8857 pic->conf_win_right_offset);
8858 vf->compHeight -= SubHeightC *
8859 (pic->conf_win_top_offset +
8860 pic->conf_win_bottom_offset);
8861
8862 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8863 hevc_print(hevc, 0,
8864 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8865 pic->chroma_format_idc,
8866 pic->conf_win_left_offset,
8867 pic->conf_win_right_offset,
8868 pic->conf_win_top_offset,
8869 pic->conf_win_bottom_offset,
8870 vf->width, vf->height, vf->compWidth, vf->compHeight);
8871 }
8872
8873 vf->width = vf->width /
8874 get_double_write_ratio(hevc, pic->double_write_mode);
8875 vf->height = vf->height /
8876 get_double_write_ratio(hevc, pic->double_write_mode);
8877#ifdef HEVC_PIC_STRUCT_SUPPORT
8878 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8879 struct vframe_s *vf2;
8880
8881 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8882 hevc_print(hevc, 0,
8883 "pic_struct = %d index 0x%x\n",
8884 pic->pic_struct,
8885 pic->index);
8886
8887 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8888 hevc_print(hevc, 0,
8889 "fatal error, no available buffer slot.");
8890 return -1;
8891 }
8892 pic->vf_ref = 2;
8893 vf->duration = vf->duration>>1;
8894 memcpy(vf2, vf, sizeof(struct vframe_s));
8895
8896 if (pic->pic_struct == 3) {
8897 vf->type = VIDTYPE_INTERLACE_TOP
8898 | VIDTYPE_VIU_NV21;
8899 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8900 | VIDTYPE_VIU_NV21;
8901 } else {
8902 vf->type = VIDTYPE_INTERLACE_BOTTOM
8903 | VIDTYPE_VIU_NV21;
8904 vf2->type = VIDTYPE_INTERLACE_TOP
8905 | VIDTYPE_VIU_NV21;
8906 }
8907 hevc->vf_pre_count++;
8908 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8909 kfifo_put(&hevc->display_q,
8910 (const struct vframe_s *)vf);
8911 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8912 hevc->vf_pre_count++;
8913 kfifo_put(&hevc->display_q,
8914 (const struct vframe_s *)vf2);
8915 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8916 } else if (pic->pic_struct == 5
8917 || pic->pic_struct == 6) {
8918 struct vframe_s *vf2, *vf3;
8919
8920 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8921 hevc_print(hevc, 0,
8922 "pic_struct = %d index 0x%x\n",
8923 pic->pic_struct,
8924 pic->index);
8925
8926 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8927 hevc_print(hevc, 0,
8928 "fatal error, no available buffer slot.");
8929 return -1;
8930 }
8931 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8932 hevc_print(hevc, 0,
8933 "fatal error, no available buffer slot.");
8934 return -1;
8935 }
8936 pic->vf_ref = 3;
8937 vf->duration = vf->duration/3;
8938 memcpy(vf2, vf, sizeof(struct vframe_s));
8939 memcpy(vf3, vf, sizeof(struct vframe_s));
8940
8941 if (pic->pic_struct == 5) {
8942 vf->type = VIDTYPE_INTERLACE_TOP
8943 | VIDTYPE_VIU_NV21;
8944 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8945 | VIDTYPE_VIU_NV21;
8946 vf3->type = VIDTYPE_INTERLACE_TOP
8947 | VIDTYPE_VIU_NV21;
8948 } else {
8949 vf->type = VIDTYPE_INTERLACE_BOTTOM
8950 | VIDTYPE_VIU_NV21;
8951 vf2->type = VIDTYPE_INTERLACE_TOP
8952 | VIDTYPE_VIU_NV21;
8953 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8954 | VIDTYPE_VIU_NV21;
8955 }
8956 hevc->vf_pre_count++;
8957 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8958 kfifo_put(&hevc->display_q,
8959 (const struct vframe_s *)vf);
8960 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8961 hevc->vf_pre_count++;
8962 kfifo_put(&hevc->display_q,
8963 (const struct vframe_s *)vf2);
8964 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8965 hevc->vf_pre_count++;
8966 kfifo_put(&hevc->display_q,
8967 (const struct vframe_s *)vf3);
8968 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8969
8970 } else if (pic->pic_struct == 9
8971 || pic->pic_struct == 10) {
8972 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8973 hevc_print(hevc, 0,
8974 "pic_struct = %d index 0x%x\n",
8975 pic->pic_struct,
8976 pic->index);
8977
8978 pic->vf_ref = 1;
8979 /* process previous pending vf*/
8980 process_pending_vframe(hevc,
8981 pic, (pic->pic_struct == 9));
8982
8983 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8984 /* process current vf */
8985 kfifo_put(&hevc->pending_q,
8986 (const struct vframe_s *)vf);
8987 vf->height <<= 1;
8988 if (pic->pic_struct == 9) {
8989 vf->type = VIDTYPE_INTERLACE_TOP
8990 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8991 process_pending_vframe(hevc,
8992 hevc->pre_bot_pic, 0);
8993 } else {
8994 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8995 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8996 vf->index = (pic->index << 8) | 0xff;
8997 process_pending_vframe(hevc,
8998 hevc->pre_top_pic, 1);
8999 }
9000
9001 if (hevc->vf_pre_count == 0)
9002 hevc->vf_pre_count++;
9003
9004 /**/
9005 if (pic->pic_struct == 9)
9006 hevc->pre_top_pic = pic;
9007 else
9008 hevc->pre_bot_pic = pic;
9009
9010 } else if (pic->pic_struct == 11
9011 || pic->pic_struct == 12) {
9012 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9013 hevc_print(hevc, 0,
9014 "pic_struct = %d index 0x%x\n",
9015 pic->pic_struct,
9016 pic->index);
9017 pic->vf_ref = 1;
9018 /* process previous pending vf*/
9019 process_pending_vframe(hevc, pic,
9020 (pic->pic_struct == 11));
9021
9022 /* put current into pending q */
9023 vf->height <<= 1;
9024 if (pic->pic_struct == 11)
9025 vf->type = VIDTYPE_INTERLACE_TOP |
9026 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9027 else {
9028 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9029 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9030 vf->index = (pic->index << 8) | 0xff;
9031 }
9032 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9033 kfifo_put(&hevc->pending_q,
9034 (const struct vframe_s *)vf);
9035 if (hevc->vf_pre_count == 0)
9036 hevc->vf_pre_count++;
9037
9038 /**/
9039 if (pic->pic_struct == 11)
9040 hevc->pre_top_pic = pic;
9041 else
9042 hevc->pre_bot_pic = pic;
9043
9044 } else {
9045 pic->vf_ref = 1;
9046
9047 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9048 hevc_print(hevc, 0,
9049 "pic_struct = %d index 0x%x\n",
9050 pic->pic_struct,
9051 pic->index);
9052
9053 switch (pic->pic_struct) {
9054 case 7:
9055 vf->duration <<= 1;
9056 break;
9057 case 8:
9058 vf->duration = vf->duration * 3;
9059 break;
9060 case 1:
9061 vf->height <<= 1;
9062 vf->type = VIDTYPE_INTERLACE_TOP |
9063 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
9064 process_pending_vframe(hevc, pic, 1);
9065 hevc->pre_top_pic = pic;
9066 break;
9067 case 2:
9068 vf->height <<= 1;
9069 vf->type = VIDTYPE_INTERLACE_BOTTOM
9070 | VIDTYPE_VIU_NV21
9071 | VIDTYPE_VIU_FIELD;
9072 process_pending_vframe(hevc, pic, 0);
9073 hevc->pre_bot_pic = pic;
9074 break;
9075 }
9076 hevc->vf_pre_count++;
9077 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9078 kfifo_put(&hevc->display_q,
9079 (const struct vframe_s *)vf);
9080 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9081 }
9082#else
9083 vf->type_original = vf->type;
9084 pic->vf_ref = 1;
9085 hevc->vf_pre_count++;
9086 decoder_do_frame_check(hw_to_vdec(hevc), vf);
9087 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9088 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9089
9090 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9091 hevc_print(hevc, 0,
9092 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9093 __func__, vf->type, vf->index,
9094 get_pic_poc(hevc, vf->index & 0xff),
9095 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9096 vf->pts, vf->pts_us64,
9097 vf->duration);
9098#endif
9099#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9100 /*count info*/
9101 vdec_count_info(gvs, 0, stream_offset);
9102#endif
9103 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
9104 if (without_display_mode == 0) {
9105 vf_notify_receiver(hevc->provider_name,
9106 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9107 }
9108 else
9109 vh265_vf_put(vh265_vf_get(vdec), vdec);
9110 }
9111
9112 return 0;
9113}
9114
9115static int notify_v4l_eos(struct vdec_s *vdec)
9116{
9117 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9118 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9119 struct vframe_s *vf = &hw->vframe_dummy;
9120 struct vdec_v4l2_buffer *fb = NULL;
9121 int index = INVALID_IDX;
9122 ulong expires;
9123
9124 if (hw->is_used_v4l && hw->eos) {
9125 expires = jiffies + msecs_to_jiffies(2000);
9126 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9127 if (time_after(jiffies, expires))
9128 break;
9129 }
9130
9131 if (index == INVALID_IDX) {
9132 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9133 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9134 return -1;
9135 }
9136 }
9137
9138 vf->type |= VIDTYPE_V4L_EOS;
9139 vf->timestamp = ULONG_MAX;
9140 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9141 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9142 hw->m_BUF[index].v4l_ref_buf_addr;
9143 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9144 vf_notify_receiver(vdec->vf_provider_name,
9145 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9146
9147 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9148 }
9149
9150 return 0;
9151}
9152
9153static void process_nal_sei(struct hevc_state_s *hevc,
9154 int payload_type, int payload_size)
9155{
9156 unsigned short data;
9157
9158 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9159 hevc_print(hevc, 0,
9160 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9161 payload_type, payload_size);
9162
9163 if (payload_type == 137) {
9164 int i, j;
9165 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9166 if (payload_size >= 24) {
9167 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9168 hevc_print(hevc, 0,
9169 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9170 for (i = 0; i < 3; i++) {
9171 for (j = 0; j < 2; j++) {
9172 data =
9173 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9174 hevc->primaries[i][j] = data;
9175 WRITE_HREG(HEVC_SHIFT_COMMAND,
9176 (1<<7)|16);
9177 if (get_dbg_flag(hevc) &
9178 H265_DEBUG_PRINT_SEI)
9179 hevc_print(hevc, 0,
9180 "\t\tprimaries[%1d][%1d] = %04x\n",
9181 i, j, hevc->primaries[i][j]);
9182 }
9183 }
9184 for (i = 0; i < 2; i++) {
9185 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9186 hevc->white_point[i] = data;
9187 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9188 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9189 hevc_print(hevc, 0,
9190 "\t\twhite_point[%1d] = %04x\n",
9191 i, hevc->white_point[i]);
9192 }
9193 for (i = 0; i < 2; i++) {
9194 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9195 hevc->luminance[i] = data << 16;
9196 WRITE_HREG(HEVC_SHIFT_COMMAND,
9197 (1<<7)|16);
9198 data =
9199 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9200 hevc->luminance[i] |= data;
9201 WRITE_HREG(HEVC_SHIFT_COMMAND,
9202 (1<<7)|16);
9203 if (get_dbg_flag(hevc) &
9204 H265_DEBUG_PRINT_SEI)
9205 hevc_print(hevc, 0,
9206 "\t\tluminance[%1d] = %08x\n",
9207 i, hevc->luminance[i]);
9208 }
9209 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9210 }
9211 payload_size -= 24;
9212 while (payload_size > 0) {
9213 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9214 payload_size--;
9215 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9216 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9217 }
9218 }
9219}
9220
9221static int hevc_recover(struct hevc_state_s *hevc)
9222{
9223 int ret = -1;
9224 u32 rem;
9225 u64 shift_byte_count64;
9226 unsigned int hevc_shift_byte_count;
9227 unsigned int hevc_stream_start_addr;
9228 unsigned int hevc_stream_end_addr;
9229 unsigned int hevc_stream_rd_ptr;
9230 unsigned int hevc_stream_wr_ptr;
9231 unsigned int hevc_stream_control;
9232 unsigned int hevc_stream_fifo_ctl;
9233 unsigned int hevc_stream_buf_size;
9234
9235 mutex_lock(&vh265_mutex);
9236#if 0
9237 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9238 int ii;
9239
9240 for (ii = 0; ii < 4; ii++)
9241 hevc_print(hevc, 0,
9242 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9243 if (((i + ii) & 0xf) == 0)
9244 hevc_print(hevc, 0, "\n");
9245 }
9246#endif
9247#define ES_VID_MAN_RD_PTR (1<<0)
9248 if (!hevc->init_flag) {
9249 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9250 mutex_unlock(&vh265_mutex);
9251 return ret;
9252 }
9253 amhevc_stop();
9254 msleep(20);
9255 ret = 0;
9256 /* reset */
9257 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9258 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9259
9260 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9261 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9262 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9263 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9264 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9265 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9266 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9267
9268 /* HEVC streaming buffer will reset and restart
9269 * from current hevc_stream_rd_ptr position
9270 */
9271 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9272 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9273 if ((hevc->shift_byte_count_lo & (1 << 31))
9274 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9275 hevc->shift_byte_count_hi++;
9276
9277 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9278 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9279 hevc->shift_byte_count_lo;
9280 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9281 shift_byte_count64 -= rem;
9282 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9283
9284 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9285 shift_byte_count64 += hevc_stream_buf_size;
9286
9287 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9288 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9289
9290 WRITE_VREG(DOS_SW_RESET3,
9291 /* (1<<2)| */
9292 (1 << 3) | (1 << 4) | (1 << 8) |
9293 (1 << 11) | (1 << 12) | (1 << 14)
9294 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9295 WRITE_VREG(DOS_SW_RESET3, 0);
9296
9297 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9298 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9299 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9300 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9301 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9302 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9303 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9304
9305 hevc_config_work_space_hw(hevc);
9306 decoder_hw_reset();
9307
9308 hevc->have_vps = 0;
9309 hevc->have_sps = 0;
9310 hevc->have_pps = 0;
9311
9312 hevc->have_valid_start_slice = 0;
9313
9314 if (get_double_write_mode(hevc) & 0x10)
9315 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9316 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9317 );
9318
9319 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9320 /* clear mailbox interrupt */
9321 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9322 /* enable mailbox interrupt */
9323 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9324 /* disable PSCALE for hardware sharing */
9325 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9326
9327 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9328
9329 WRITE_VREG(DEBUG_REG1, 0x0);
9330
9331 if ((error_handle_policy & 1) == 0) {
9332 if ((error_handle_policy & 4) == 0) {
9333 /* ucode auto mode, and do not check vps/sps/pps/idr */
9334 WRITE_VREG(NAL_SEARCH_CTL,
9335 0xc);
9336 } else {
9337 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9338 }
9339 } else {
9340 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9341 }
9342
9343 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9344 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9345 WRITE_VREG(NAL_SEARCH_CTL,
9346 READ_VREG(NAL_SEARCH_CTL)
9347 | ((parser_sei_enable & 0x7) << 17));
9348#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9349 WRITE_VREG(NAL_SEARCH_CTL,
9350 READ_VREG(NAL_SEARCH_CTL) |
9351 ((parser_dolby_vision_enable & 0x1) << 20));
9352#endif
9353 config_decode_mode(hevc);
9354 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9355
9356 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9357 /* amhevc_disable(); */
9358 /* return -EBUSY; */
9359 /* } */
9360#if 0
9361 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9362 int ii;
9363
9364 for (ii = 0; ii < 4; ii++) {
9365 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9366 hevc_print(hevc, 0,
9367 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9368 }
9369 if (((i + ii) & 0xf) == 0)
9370 hevc_print(hevc, 0, "\n");
9371 }
9372#endif
9373 init_pic_list_hw(hevc);
9374
9375 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9376 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9377
9378#ifdef SWAP_HEVC_UCODE
9379 if (!tee_enabled() && hevc->is_swap &&
9380 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9381 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9382 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9383 }
9384#endif
9385 amhevc_start();
9386
9387 /* skip, search next start code */
9388 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9389 hevc->skip_flag = 1;
9390#ifdef ERROR_HANDLE_DEBUG
9391 if (dbg_nal_skip_count & 0x20000) {
9392 dbg_nal_skip_count &= ~0x20000;
9393 mutex_unlock(&vh265_mutex);
9394 return ret;
9395 }
9396#endif
9397 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9398 /* Interrupt Amrisc to excute */
9399 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9400#ifdef MULTI_INSTANCE_SUPPORT
9401 if (!hevc->m_ins_flag)
9402#endif
9403 hevc->first_pic_after_recover = 1;
9404 mutex_unlock(&vh265_mutex);
9405 return ret;
9406}
9407
9408static void dump_aux_buf(struct hevc_state_s *hevc)
9409{
9410 int i;
9411 unsigned short *aux_adr =
9412 (unsigned short *)
9413 hevc->aux_addr;
9414 unsigned int aux_size =
9415 (READ_VREG(HEVC_AUX_DATA_SIZE)
9416 >> 16) << 4;
9417
9418 if (hevc->prefix_aux_size > 0) {
9419 hevc_print(hevc, 0,
9420 "prefix aux: (size %d)\n",
9421 aux_size);
9422 for (i = 0; i <
9423 (aux_size >> 1); i++) {
9424 hevc_print_cont(hevc, 0,
9425 "%04x ",
9426 *(aux_adr + i));
9427 if (((i + 1) & 0xf)
9428 == 0)
9429 hevc_print_cont(hevc,
9430 0, "\n");
9431 }
9432 }
9433 if (hevc->suffix_aux_size > 0) {
9434 aux_adr = (unsigned short *)
9435 (hevc->aux_addr +
9436 hevc->prefix_aux_size);
9437 aux_size =
9438 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9439 << 4;
9440 hevc_print(hevc, 0,
9441 "suffix aux: (size %d)\n",
9442 aux_size);
9443 for (i = 0; i <
9444 (aux_size >> 1); i++) {
9445 hevc_print_cont(hevc, 0,
9446 "%04x ", *(aux_adr + i));
9447 if (((i + 1) & 0xf) == 0)
9448 hevc_print_cont(hevc, 0, "\n");
9449 }
9450 }
9451}
9452
9453#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9454static void dolby_get_meta(struct hevc_state_s *hevc)
9455{
9456 struct vdec_s *vdec = hw_to_vdec(hevc);
9457
9458 if (get_dbg_flag(hevc) &
9459 H265_DEBUG_BUFMGR_MORE)
9460 dump_aux_buf(hevc);
9461 if (vdec->dolby_meta_with_el || vdec->slave) {
9462 set_aux_data(hevc,
9463 hevc->cur_pic, 0, 0);
9464 } else if (vdec->master) {
9465 struct hevc_state_s *hevc_ba =
9466 (struct hevc_state_s *)
9467 vdec->master->private;
9468 /*do not use hevc_ba*/
9469 set_aux_data(hevc,
9470 hevc_ba->cur_pic,
9471 0, 1);
9472 set_aux_data(hevc,
9473 hevc->cur_pic, 0, 2);
9474 }
9475}
9476#endif
9477
9478static void read_decode_info(struct hevc_state_s *hevc)
9479{
9480 uint32_t decode_info =
9481 READ_HREG(HEVC_DECODE_INFO);
9482 hevc->start_decoding_flag |=
9483 (decode_info & 0xff);
9484 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9485}
9486
9487static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9488{
9489 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9490 unsigned int dec_status = hevc->dec_status;
9491 int i, ret;
9492
9493#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9494 struct vdec_s *vdec = hw_to_vdec(hevc);
9495#endif
9496
9497 if (hevc->eos)
9498 return IRQ_HANDLED;
9499 if (
9500#ifdef MULTI_INSTANCE_SUPPORT
9501 (!hevc->m_ins_flag) &&
9502#endif
9503 hevc->error_flag == 1) {
9504 if ((error_handle_policy & 0x10) == 0) {
9505 if (hevc->cur_pic) {
9506 int current_lcu_idx =
9507 READ_VREG(HEVC_PARSER_LCU_START)
9508 & 0xffffff;
9509 if (current_lcu_idx <
9510 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9511 hevc->cur_pic->error_mark = 1;
9512
9513 }
9514 }
9515 if ((error_handle_policy & 1) == 0) {
9516 hevc->error_skip_nal_count = 1;
9517 /* manual search nal, skip error_skip_nal_count
9518 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9519 */
9520 WRITE_VREG(NAL_SEARCH_CTL,
9521 (error_skip_nal_count << 4) | 0x1);
9522 } else {
9523 hevc->error_skip_nal_count = error_skip_nal_count;
9524 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9525 }
9526 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9527#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9528 || vdec->master
9529 || vdec->slave
9530#endif
9531 ) {
9532 WRITE_VREG(NAL_SEARCH_CTL,
9533 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9534 }
9535 WRITE_VREG(NAL_SEARCH_CTL,
9536 READ_VREG(NAL_SEARCH_CTL)
9537 | ((parser_sei_enable & 0x7) << 17));
9538#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9539 WRITE_VREG(NAL_SEARCH_CTL,
9540 READ_VREG(NAL_SEARCH_CTL) |
9541 ((parser_dolby_vision_enable & 0x1) << 20));
9542#endif
9543 config_decode_mode(hevc);
9544 /* search new nal */
9545 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9546 /* Interrupt Amrisc to excute */
9547 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9548
9549 /* hevc_print(hevc, 0,
9550 *"%s: error handle\n", __func__);
9551 */
9552 hevc->error_flag = 2;
9553 return IRQ_HANDLED;
9554 } else if (
9555#ifdef MULTI_INSTANCE_SUPPORT
9556 (!hevc->m_ins_flag) &&
9557#endif
9558 hevc->error_flag == 3) {
9559 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9560 hevc_recover(hevc);
9561 hevc->error_flag = 0;
9562
9563 if ((error_handle_policy & 0x10) == 0) {
9564 if (hevc->cur_pic) {
9565 int current_lcu_idx =
9566 READ_VREG(HEVC_PARSER_LCU_START)
9567 & 0xffffff;
9568 if (current_lcu_idx <
9569 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9570 hevc->cur_pic->error_mark = 1;
9571
9572 }
9573 }
9574 if ((error_handle_policy & 1) == 0) {
9575 /* need skip some data when
9576 * error_flag of 3 is triggered,
9577 */
9578 /* to avoid hevc_recover() being called
9579 * for many times at the same bitstream position
9580 */
9581 hevc->error_skip_nal_count = 1;
9582 /* manual search nal, skip error_skip_nal_count
9583 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9584 */
9585 WRITE_VREG(NAL_SEARCH_CTL,
9586 (error_skip_nal_count << 4) | 0x1);
9587 }
9588
9589 if ((error_handle_policy & 0x2) == 0) {
9590 hevc->have_vps = 1;
9591 hevc->have_sps = 1;
9592 hevc->have_pps = 1;
9593 }
9594 return IRQ_HANDLED;
9595 }
9596 if (!hevc->m_ins_flag) {
9597 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9598 if ((hevc->shift_byte_count_lo & (1 << 31))
9599 && ((i & (1 << 31)) == 0))
9600 hevc->shift_byte_count_hi++;
9601 hevc->shift_byte_count_lo = i;
9602 }
9603#ifdef MULTI_INSTANCE_SUPPORT
9604 mutex_lock(&hevc->chunks_mutex);
9605 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9606 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9607 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9608 && (hevc->chunk)) {
9609 hevc->cur_pic->pts = hevc->chunk->pts;
9610 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9611 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9612 }
9613 mutex_unlock(&hevc->chunks_mutex);
9614
9615 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9616 dec_status == HEVC_DECODE_BUFEMPTY2) {
9617 if (hevc->m_ins_flag) {
9618 read_decode_info(hevc);
9619 if (vdec_frame_based(hw_to_vdec(hevc))) {
9620 hevc->empty_flag = 1;
9621 goto pic_done;
9622 } else {
9623 if (
9624#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9625 vdec->master ||
9626 vdec->slave ||
9627#endif
9628 (data_resend_policy & 0x1)) {
9629 hevc->dec_result = DEC_RESULT_AGAIN;
9630 amhevc_stop();
9631 restore_decode_state(hevc);
9632 } else
9633 hevc->dec_result = DEC_RESULT_GET_DATA;
9634 }
9635 reset_process_time(hevc);
9636 vdec_schedule_work(&hevc->work);
9637 }
9638 return IRQ_HANDLED;
9639 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9640 (dec_status == HEVC_NAL_DECODE_DONE)
9641 ) {
9642 if (hevc->m_ins_flag) {
9643 read_decode_info(hevc);
9644 if (vdec_frame_based(hw_to_vdec(hevc))) {
9645 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9646 hevc->empty_flag = 1;
9647 goto pic_done;
9648 } else {
9649 hevc->dec_result = DEC_RESULT_AGAIN;
9650 amhevc_stop();
9651 restore_decode_state(hevc);
9652 }
9653
9654 reset_process_time(hevc);
9655 vdec_schedule_work(&hevc->work);
9656 }
9657
9658 return IRQ_HANDLED;
9659 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9660 if (hevc->m_ins_flag) {
9661 struct PIC_s *pic;
9662 struct PIC_s *pic_display;
9663 int decoded_poc;
9664#ifdef DETREFILL_ENABLE
9665 if (hevc->is_swap &&
9666 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9667 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9668 && READ_VREG(HEVC_SAO_DBG_MODE0))
9669 hevc->delrefill_check = 2;
9670 }
9671#endif
9672 hevc->empty_flag = 0;
9673pic_done:
9674 if (input_frame_based(hw_to_vdec(hevc)) &&
9675 frmbase_cont_bitlevel != 0 &&
9676 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9677 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9678 > frmbase_cont_bitlevel)) {
9679 /*handle the case: multi pictures in one packet*/
9680 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9681 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9682 __func__,
9683 hevc->decode_idx, hevc->decode_size,
9684 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9685 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9686 start_process_time(hevc);
9687 return IRQ_HANDLED;
9688 }
9689
9690 read_decode_info(hevc);
9691 get_picture_qos_info(hevc);
9692#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9693 hevc->start_parser_type = 0;
9694 hevc->switch_dvlayer_flag = 0;
9695#endif
9696 hevc->decoded_poc = hevc->curr_POC;
9697 hevc->decoding_pic = NULL;
9698 hevc->dec_result = DEC_RESULT_DONE;
9699#ifdef DETREFILL_ENABLE
9700 if (hevc->is_swap &&
9701 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9702 if (hevc->delrefill_check != 2)
9703#endif
9704
9705 amhevc_stop();
9706
9707 reset_process_time(hevc);
9708
9709 if (hevc->vf_pre_count == 0) {
9710 decoded_poc = hevc->curr_POC;
9711 pic = get_pic_by_POC(hevc, decoded_poc);
9712 if (pic && (pic->POC != INVALID_POC)) {
9713 /*PB skip control */
9714 if (pic->error_mark == 0
9715 && hevc->PB_skip_mode == 1) {
9716 /* start decoding after
9717 * first I
9718 */
9719 hevc->ignore_bufmgr_error |= 0x1;
9720 }
9721 if (hevc->ignore_bufmgr_error & 1) {
9722 if (hevc->PB_skip_count_after_decoding > 0) {
9723 hevc->PB_skip_count_after_decoding--;
9724 } else {
9725 /* start displaying */
9726 hevc->ignore_bufmgr_error |= 0x2;
9727 }
9728 }
9729 if (hevc->mmu_enable
9730 && ((hevc->double_write_mode & 0x10) == 0)) {
9731 if (!hevc->m_ins_flag) {
9732 hevc->used_4k_num =
9733 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9734
9735 if ((!is_skip_decoding(hevc, pic)) &&
9736 (hevc->used_4k_num >= 0) &&
9737 (hevc->cur_pic->scatter_alloc
9738 == 1)) {
9739 hevc_print(hevc,
9740 H265_DEBUG_BUFMGR_MORE,
9741 "%s pic index %d scatter_alloc %d page_start %d\n",
9742 "decoder_mmu_box_free_idx_tail",
9743 hevc->cur_pic->index,
9744 hevc->cur_pic->scatter_alloc,
9745 hevc->used_4k_num);
9746 decoder_mmu_box_free_idx_tail(
9747 hevc->mmu_box,
9748 hevc->cur_pic->index,
9749 hevc->used_4k_num);
9750 hevc->cur_pic->scatter_alloc
9751 = 2;
9752 }
9753 hevc->used_4k_num = -1;
9754 }
9755 }
9756
9757 pic->output_mark = 1;
9758 pic->recon_mark = 1;
9759 }
9760 check_pic_decoded_error(hevc,
9761 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9762 if (hevc->cur_pic != NULL &&
9763 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9764 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9765 hevc->cur_pic->error_mark = 1;
9766force_output:
9767 pic_display = output_pic(hevc, 1);
9768 if (pic_display) {
9769 if ((pic_display->error_mark &&
9770 ((hevc->ignore_bufmgr_error &
9771 0x2) == 0))
9772 || (get_dbg_flag(hevc) &
9773 H265_DEBUG_DISPLAY_CUR_FRAME)
9774 || (get_dbg_flag(hevc) &
9775 H265_DEBUG_NO_DISPLAY)) {
9776 pic_display->output_ready = 0;
9777 if (get_dbg_flag(hevc) &
9778 H265_DEBUG_BUFMGR) {
9779 hevc_print(hevc, 0,
9780 "[BM] Display: POC %d, ",
9781 pic_display->POC);
9782 hevc_print_cont(hevc, 0,
9783 "decoding index %d ==> ",
9784 pic_display->
9785 decode_idx);
9786 hevc_print_cont(hevc, 0,
9787 "Debug or err,recycle it\n");
9788 }
9789 } else {
9790 if (pic_display->
9791 slice_type != 2) {
9792 pic_display->output_ready = 0;
9793 } else {
9794 prepare_display_buf
9795 (hevc,
9796 pic_display);
9797 hevc->first_pic_flag = 1;
9798 }
9799 }
9800 }
9801 }
9802
9803 vdec_schedule_work(&hevc->work);
9804 }
9805
9806 return IRQ_HANDLED;
9807#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9808 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9809 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9810 if (hevc->m_ins_flag) {
9811 unsigned char next_parser_type =
9812 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9813 read_decode_info(hevc);
9814
9815 if (vdec->slave &&
9816 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9817 /*cur is base, found enhance*/
9818 struct hevc_state_s *hevc_el =
9819 (struct hevc_state_s *)
9820 vdec->slave->private;
9821 hevc->switch_dvlayer_flag = 1;
9822 hevc->no_switch_dvlayer_count = 0;
9823 hevc_el->start_parser_type =
9824 next_parser_type;
9825 hevc_print(hevc, H265_DEBUG_DV,
9826 "switch (poc %d) to el\n",
9827 hevc->cur_pic ?
9828 hevc->cur_pic->POC :
9829 INVALID_POC);
9830 } else if (vdec->master &&
9831 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9832 /*cur is enhance, found base*/
9833 struct hevc_state_s *hevc_ba =
9834 (struct hevc_state_s *)
9835 vdec->master->private;
9836 hevc->switch_dvlayer_flag = 1;
9837 hevc->no_switch_dvlayer_count = 0;
9838 hevc_ba->start_parser_type =
9839 next_parser_type;
9840 hevc_print(hevc, H265_DEBUG_DV,
9841 "switch (poc %d) to bl\n",
9842 hevc->cur_pic ?
9843 hevc->cur_pic->POC :
9844 INVALID_POC);
9845 } else {
9846 hevc->switch_dvlayer_flag = 0;
9847 hevc->start_parser_type =
9848 next_parser_type;
9849 hevc->no_switch_dvlayer_count++;
9850 hevc_print(hevc, H265_DEBUG_DV,
9851 "%s: no_switch_dvlayer_count = %d\n",
9852 vdec->master ? "el" : "bl",
9853 hevc->no_switch_dvlayer_count);
9854 if (vdec->slave &&
9855 dolby_el_flush_th != 0 &&
9856 hevc->no_switch_dvlayer_count >
9857 dolby_el_flush_th) {
9858 struct hevc_state_s *hevc_el =
9859 (struct hevc_state_s *)
9860 vdec->slave->private;
9861 struct PIC_s *el_pic;
9862 check_pic_decoded_error(hevc_el,
9863 hevc_el->pic_decoded_lcu_idx);
9864 el_pic = get_pic_by_POC(hevc_el,
9865 hevc_el->curr_POC);
9866 hevc_el->curr_POC = INVALID_POC;
9867 hevc_el->m_pocRandomAccess = MAX_INT;
9868 flush_output(hevc_el, el_pic);
9869 hevc_el->decoded_poc = INVALID_POC; /*
9870 already call flush_output*/
9871 hevc_el->decoding_pic = NULL;
9872 hevc->no_switch_dvlayer_count = 0;
9873 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9874 hevc_print(hevc, 0,
9875 "no el anymore, flush_output el\n");
9876 }
9877 }
9878 hevc->decoded_poc = hevc->curr_POC;
9879 hevc->decoding_pic = NULL;
9880 hevc->dec_result = DEC_RESULT_DONE;
9881 amhevc_stop();
9882 reset_process_time(hevc);
9883 if (aux_data_is_avaible(hevc))
9884 dolby_get_meta(hevc);
9885 if(hevc->cur_pic->slice_type == 2 &&
9886 hevc->vf_pre_count == 0) {
9887 hevc_print(hevc, 0,
9888 "first slice_type %x no_switch_dvlayer_count %x\n",
9889 hevc->cur_pic->slice_type,
9890 hevc->no_switch_dvlayer_count);
9891 goto force_output;
9892 }
9893 vdec_schedule_work(&hevc->work);
9894 }
9895
9896 return IRQ_HANDLED;
9897#endif
9898 }
9899
9900#endif
9901
9902 if (dec_status == HEVC_SEI_DAT) {
9903 if (!hevc->m_ins_flag) {
9904 int payload_type =
9905 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9906 int payload_size =
9907 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9908 process_nal_sei(hevc,
9909 payload_type, payload_size);
9910 }
9911 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9912 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9913 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9914 int parse_type = HEVC_DISCARD_NAL;
9915
9916 hevc->error_watchdog_count = 0;
9917 hevc->error_skip_nal_wt_cnt = 0;
9918#ifdef MULTI_INSTANCE_SUPPORT
9919 if (hevc->m_ins_flag)
9920 reset_process_time(hevc);
9921#endif
9922 if (slice_parse_begin > 0 &&
9923 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9924 hevc_print(hevc, 0,
9925 "nal type %d, discard %d\n", naltype,
9926 slice_parse_begin);
9927 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9928 slice_parse_begin--;
9929 }
9930 if (naltype == NAL_UNIT_EOS) {
9931 struct PIC_s *pic;
9932
9933 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9934#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9935 if ((vdec->master || vdec->slave) &&
9936 aux_data_is_avaible(hevc)) {
9937 if (hevc->decoding_pic)
9938 dolby_get_meta(hevc);
9939 }
9940#endif
9941 check_pic_decoded_error(hevc,
9942 hevc->pic_decoded_lcu_idx);
9943 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9944 hevc->curr_POC = INVALID_POC;
9945 /* add to fix RAP_B_Bossen_1 */
9946 hevc->m_pocRandomAccess = MAX_INT;
9947 flush_output(hevc, pic);
9948 clear_poc_flag(hevc);
9949 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9950 /* Interrupt Amrisc to excute */
9951 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9952#ifdef MULTI_INSTANCE_SUPPORT
9953 if (hevc->m_ins_flag) {
9954 hevc->decoded_poc = INVALID_POC; /*
9955 already call flush_output*/
9956 hevc->decoding_pic = NULL;
9957 hevc->dec_result = DEC_RESULT_DONE;
9958 amhevc_stop();
9959
9960 vdec_schedule_work(&hevc->work);
9961 }
9962#endif
9963 return IRQ_HANDLED;
9964 }
9965
9966 if (
9967#ifdef MULTI_INSTANCE_SUPPORT
9968 (!hevc->m_ins_flag) &&
9969#endif
9970 hevc->error_skip_nal_count > 0) {
9971 hevc_print(hevc, 0,
9972 "nal type %d, discard %d\n", naltype,
9973 hevc->error_skip_nal_count);
9974 hevc->error_skip_nal_count--;
9975 if (hevc->error_skip_nal_count == 0) {
9976 hevc_recover(hevc);
9977 hevc->error_flag = 0;
9978 if ((error_handle_policy & 0x2) == 0) {
9979 hevc->have_vps = 1;
9980 hevc->have_sps = 1;
9981 hevc->have_pps = 1;
9982 }
9983 return IRQ_HANDLED;
9984 }
9985 } else if (naltype == NAL_UNIT_VPS) {
9986 parse_type = HEVC_NAL_UNIT_VPS;
9987 hevc->have_vps = 1;
9988#ifdef ERROR_HANDLE_DEBUG
9989 if (dbg_nal_skip_flag & 1)
9990 parse_type = HEVC_DISCARD_NAL;
9991#endif
9992 } else if (hevc->have_vps) {
9993 if (naltype == NAL_UNIT_SPS) {
9994 parse_type = HEVC_NAL_UNIT_SPS;
9995 hevc->have_sps = 1;
9996#ifdef ERROR_HANDLE_DEBUG
9997 if (dbg_nal_skip_flag & 2)
9998 parse_type = HEVC_DISCARD_NAL;
9999#endif
10000 } else if (naltype == NAL_UNIT_PPS) {
10001 parse_type = HEVC_NAL_UNIT_PPS;
10002 hevc->have_pps = 1;
10003#ifdef ERROR_HANDLE_DEBUG
10004 if (dbg_nal_skip_flag & 4)
10005 parse_type = HEVC_DISCARD_NAL;
10006#endif
10007 } else if (hevc->have_sps && hevc->have_pps) {
10008 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10009
10010 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10011 (naltype ==
10012 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10013 || (naltype ==
10014 NAL_UNIT_CODED_SLICE_CRA)
10015 || (naltype ==
10016 NAL_UNIT_CODED_SLICE_BLA)
10017 || (naltype ==
10018 NAL_UNIT_CODED_SLICE_BLANT)
10019 || (naltype ==
10020 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10021 ) {
10022 if (slice_parse_begin > 0) {
10023 hevc_print(hevc, 0,
10024 "discard %d, for debugging\n",
10025 slice_parse_begin);
10026 slice_parse_begin--;
10027 } else {
10028 parse_type = seg;
10029 }
10030 hevc->have_valid_start_slice = 1;
10031 } else if (naltype <=
10032 NAL_UNIT_CODED_SLICE_CRA
10033 && (hevc->have_valid_start_slice
10034 || (hevc->PB_skip_mode != 3))) {
10035 if (slice_parse_begin > 0) {
10036 hevc_print(hevc, 0,
10037 "discard %d, dd\n",
10038 slice_parse_begin);
10039 slice_parse_begin--;
10040 } else
10041 parse_type = seg;
10042
10043 }
10044 }
10045 }
10046 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10047 && hevc->have_valid_start_slice &&
10048 hevc->error_flag == 0) {
10049 if ((get_dbg_flag(hevc) &
10050 H265_DEBUG_MAN_SEARCH_NAL) == 0
10051 /* && (!hevc->m_ins_flag)*/) {
10052 /* auot parser NAL; do not check
10053 *vps/sps/pps/idr
10054 */
10055 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10056 }
10057
10058 if ((get_dbg_flag(hevc) &
10059 H265_DEBUG_NO_EOS_SEARCH_DONE)
10060#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10061 || vdec->master
10062 || vdec->slave
10063#endif
10064 ) {
10065 WRITE_VREG(NAL_SEARCH_CTL,
10066 READ_VREG(NAL_SEARCH_CTL) |
10067 0x10000);
10068 }
10069 WRITE_VREG(NAL_SEARCH_CTL,
10070 READ_VREG(NAL_SEARCH_CTL)
10071 | ((parser_sei_enable & 0x7) << 17));
10072#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10073 WRITE_VREG(NAL_SEARCH_CTL,
10074 READ_VREG(NAL_SEARCH_CTL) |
10075 ((parser_dolby_vision_enable & 0x1) << 20));
10076#endif
10077 config_decode_mode(hevc);
10078 }
10079
10080 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10081 hevc_print(hevc, 0,
10082 "naltype = %d parse_type %d\n %d %d %d %d\n",
10083 naltype, parse_type, hevc->have_vps,
10084 hevc->have_sps, hevc->have_pps,
10085 hevc->have_valid_start_slice);
10086 }
10087
10088 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10089 /* Interrupt Amrisc to excute */
10090 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10091#ifdef MULTI_INSTANCE_SUPPORT
10092 if (hevc->m_ins_flag)
10093 start_process_time(hevc);
10094#endif
10095 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10096#ifdef MULTI_INSTANCE_SUPPORT
10097 if (hevc->m_ins_flag) {
10098 reset_process_time(hevc);
10099 read_decode_info(hevc);
10100
10101 }
10102#endif
10103 if (hevc->start_decoding_time > 0) {
10104 u32 process_time = 1000*
10105 (jiffies - hevc->start_decoding_time)/HZ;
10106 if (process_time > max_decoding_time)
10107 max_decoding_time = process_time;
10108 }
10109
10110 hevc->error_watchdog_count = 0;
10111 if (hevc->pic_list_init_flag == 2) {
10112 hevc->pic_list_init_flag = 3;
10113 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10114 } else if (hevc->wait_buf == 0) {
10115 u32 vui_time_scale;
10116 u32 vui_num_units_in_tick;
10117 unsigned char reconfig_flag = 0;
10118
10119 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10120 get_rpm_param(&hevc->param);
10121 else {
10122
10123 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10124 int ii;
10125
10126 for (ii = 0; ii < 4; ii++) {
10127 hevc->param.l.data[i + ii] =
10128 hevc->rpm_ptr[i + 3
10129 - ii];
10130 }
10131 }
10132#ifdef SEND_LMEM_WITH_RPM
10133 check_head_error(hevc);
10134#endif
10135 }
10136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10137 hevc_print(hevc, 0,
10138 "rpm_param: (%d)\n", hevc->slice_idx);
10139 hevc->slice_idx++;
10140 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10141 hevc_print_cont(hevc, 0,
10142 "%04x ", hevc->param.l.data[i]);
10143 if (((i + 1) & 0xf) == 0)
10144 hevc_print_cont(hevc, 0, "\n");
10145 }
10146
10147 hevc_print(hevc, 0,
10148 "vui_timing_info: %x, %x, %x, %x\n",
10149 hevc->param.p.vui_num_units_in_tick_hi,
10150 hevc->param.p.vui_num_units_in_tick_lo,
10151 hevc->param.p.vui_time_scale_hi,
10152 hevc->param.p.vui_time_scale_lo);
10153 }
10154
10155 if (hevc->is_used_v4l) {
10156 struct aml_vcodec_ctx *ctx =
10157 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10158
10159 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10160 struct aml_vdec_ps_infos ps;
10161
10162 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10163 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10164 ps.visible_width = hevc->frame_width;
10165 ps.visible_height = hevc->frame_height;
10166 ps.coded_width = ALIGN(hevc->frame_width, 32);
10167 ps.coded_height = ALIGN(hevc->frame_height, 32);
10168 ps.dpb_size = get_work_pic_num(hevc);
10169 hevc->v4l_params_parsed = true;
10170 /*notice the v4l2 codec.*/
10171 vdec_v4l_set_ps_infos(ctx, &ps);
10172 }
10173 }
10174
10175 if (
10176#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10177 vdec->master == NULL &&
10178 vdec->slave == NULL &&
10179#endif
10180 aux_data_is_avaible(hevc)
10181 ) {
10182
10183 if (get_dbg_flag(hevc) &
10184 H265_DEBUG_BUFMGR_MORE)
10185 dump_aux_buf(hevc);
10186 }
10187
10188 vui_time_scale =
10189 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10190 hevc->param.p.vui_time_scale_lo;
10191 vui_num_units_in_tick =
10192 (u32)(hevc->param.
10193 p.vui_num_units_in_tick_hi << 16) |
10194 hevc->param.
10195 p.vui_num_units_in_tick_lo;
10196 if (hevc->bit_depth_luma !=
10197 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10198 reconfig_flag = 1;
10199 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10200 (hevc->param.p.bit_depth & 0xf) + 8);
10201 }
10202 if (hevc->bit_depth_chroma !=
10203 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10204 reconfig_flag = 1;
10205 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10206 ((hevc->param.p.bit_depth >> 4) &
10207 0xf) + 8);
10208 }
10209 hevc->bit_depth_luma =
10210 (hevc->param.p.bit_depth & 0xf) + 8;
10211 hevc->bit_depth_chroma =
10212 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10213 bit_depth_luma = hevc->bit_depth_luma;
10214 bit_depth_chroma = hevc->bit_depth_chroma;
10215#ifdef SUPPORT_10BIT
10216 if (hevc->bit_depth_luma == 8 &&
10217 hevc->bit_depth_chroma == 8 &&
10218 enable_mem_saving)
10219 hevc->mem_saving_mode = 1;
10220 else
10221 hevc->mem_saving_mode = 0;
10222#endif
10223 if (reconfig_flag &&
10224 (get_double_write_mode(hevc) & 0x10) == 0)
10225 init_decode_head_hw(hevc);
10226
10227 if ((vui_time_scale != 0)
10228 && (vui_num_units_in_tick != 0)) {
10229 hevc->frame_dur =
10230 div_u64(96000ULL *
10231 vui_num_units_in_tick,
10232 vui_time_scale);
10233 if (hevc->get_frame_dur != true)
10234 vdec_schedule_work(
10235 &hevc->notify_work);
10236
10237 hevc->get_frame_dur = true;
10238#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10239 gvs->frame_dur = hevc->frame_dur;
10240#endif
10241 }
10242
10243 if (hevc->video_signal_type !=
10244 ((hevc->param.p.video_signal_type << 16)
10245 | hevc->param.p.color_description)) {
10246 u32 v = hevc->param.p.video_signal_type;
10247 u32 c = hevc->param.p.color_description;
10248#if 0
10249 if (v & 0x2000) {
10250 hevc_print(hevc, 0,
10251 "video_signal_type present:\n");
10252 hevc_print(hevc, 0, " %s %s\n",
10253 video_format_names[(v >> 10) & 7],
10254 ((v >> 9) & 1) ?
10255 "full_range" : "limited");
10256 if (v & 0x100) {
10257 hevc_print(hevc, 0,
10258 " color_description present:\n");
10259 hevc_print(hevc, 0,
10260 " color_primarie = %s\n",
10261 color_primaries_names
10262 [v & 0xff]);
10263 hevc_print(hevc, 0,
10264 " transfer_characteristic = %s\n",
10265 transfer_characteristics_names
10266 [(c >> 8) & 0xff]);
10267 hevc_print(hevc, 0,
10268 " matrix_coefficient = %s\n",
10269 matrix_coeffs_names[c & 0xff]);
10270 }
10271 }
10272#endif
10273 hevc->video_signal_type = (v << 16) | c;
10274 video_signal_type = hevc->video_signal_type;
10275 }
10276
10277 if (use_cma &&
10278 (hevc->param.p.slice_segment_address == 0)
10279 && (hevc->pic_list_init_flag == 0)) {
10280 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10281 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10282
10283 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10284 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10285 hevc->lcu_size = 1 << (log + 3 + log_s);
10286 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10287 if (hevc->pic_w == 0 || hevc->pic_h == 0
10288 || hevc->lcu_size == 0
10289 || is_oversize(hevc->pic_w, hevc->pic_h)
10290 || (!hevc->skip_first_nal &&
10291 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10292 /* skip search next start code */
10293 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10294 & (~0x2));
10295 if ( !hevc->skip_first_nal &&
10296 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10297 hevc->skip_first_nal = 1;
10298 hevc->skip_flag = 1;
10299 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10300 /* Interrupt Amrisc to excute */
10301 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10302#ifdef MULTI_INSTANCE_SUPPORT
10303 if (hevc->m_ins_flag)
10304 start_process_time(hevc);
10305#endif
10306 } else {
10307 hevc->sps_num_reorder_pics_0 =
10308 hevc->param.p.sps_num_reorder_pics_0;
10309 hevc->pic_list_init_flag = 1;
10310#ifdef MULTI_INSTANCE_SUPPORT
10311 if (hevc->m_ins_flag) {
10312 vdec_schedule_work(&hevc->work);
10313 } else
10314#endif
10315 up(&h265_sema);
10316 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10317 }
10318 return IRQ_HANDLED;
10319 }
10320
10321}
10322 ret =
10323 hevc_slice_segment_header_process(hevc,
10324 &hevc->param, decode_pic_begin);
10325 if (ret < 0) {
10326#ifdef MULTI_INSTANCE_SUPPORT
10327 if (hevc->m_ins_flag) {
10328 hevc->wait_buf = 0;
10329 hevc->dec_result = DEC_RESULT_AGAIN;
10330 amhevc_stop();
10331 restore_decode_state(hevc);
10332 reset_process_time(hevc);
10333 vdec_schedule_work(&hevc->work);
10334 return IRQ_HANDLED;
10335 }
10336#else
10337 ;
10338#endif
10339 } else if (ret == 0) {
10340 if ((hevc->new_pic) && (hevc->cur_pic)) {
10341 hevc->cur_pic->stream_offset =
10342 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10343 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10344 "read stream_offset = 0x%x\n",
10345 hevc->cur_pic->stream_offset);
10346 hevc->cur_pic->aspect_ratio_idc =
10347 hevc->param.p.aspect_ratio_idc;
10348 hevc->cur_pic->sar_width =
10349 hevc->param.p.sar_width;
10350 hevc->cur_pic->sar_height =
10351 hevc->param.p.sar_height;
10352 }
10353
10354 WRITE_VREG(HEVC_DEC_STATUS_REG,
10355 HEVC_CODED_SLICE_SEGMENT_DAT);
10356 /* Interrupt Amrisc to excute */
10357 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10358
10359 hevc->start_decoding_time = jiffies;
10360#ifdef MULTI_INSTANCE_SUPPORT
10361 if (hevc->m_ins_flag)
10362 start_process_time(hevc);
10363#endif
10364#if 1
10365 /*to do..., copy aux data to hevc->cur_pic*/
10366#endif
10367#ifdef MULTI_INSTANCE_SUPPORT
10368 } else if (hevc->m_ins_flag) {
10369 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10370 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10371 __func__, ret);
10372 hevc->decoded_poc = INVALID_POC;
10373 hevc->decoding_pic = NULL;
10374 hevc->dec_result = DEC_RESULT_DONE;
10375 amhevc_stop();
10376 reset_process_time(hevc);
10377 vdec_schedule_work(&hevc->work);
10378#endif
10379 } else {
10380 /* skip, search next start code */
10381#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10382 gvs->drop_frame_count++;
10383#endif
10384 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10385 hevc->skip_flag = 1;
10386 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10387 /* Interrupt Amrisc to excute */
10388 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10389 }
10390
10391 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10392 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10393#ifdef MULTI_INSTANCE_SUPPORT
10394 if (!hevc->m_ins_flag)
10395 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10396 H265_DEBUG_DIS_SYS_ERROR_PROC);
10397#endif
10398 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10399 }
10400 return IRQ_HANDLED;
10401}
10402
10403static void wait_hevc_search_done(struct hevc_state_s *hevc)
10404{
10405 int count = 0;
10406 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10407 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10408 msleep(20);
10409 count++;
10410 if (count > 100) {
10411 hevc_print(hevc, 0, "%s timeout\n", __func__);
10412 break;
10413 }
10414 }
10415}
10416static irqreturn_t vh265_isr(int irq, void *data)
10417{
10418 int i, temp;
10419 unsigned int dec_status;
10420 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10421 u32 debug_tag;
10422 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10423
10424 if (hevc->init_flag == 0)
10425 return IRQ_HANDLED;
10426 hevc->dec_status = dec_status;
10427 if (is_log_enable(hevc))
10428 add_log(hevc,
10429 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10430 dec_status, READ_HREG(HEVC_DECODE_INFO),
10431 READ_VREG(HEVC_MPRED_CURR_LCU),
10432 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10433 READ_VREG(HEVC_SHIFT_STATUS));
10434
10435 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10436 hevc_print(hevc, 0,
10437 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10438 dec_status, READ_HREG(HEVC_DECODE_INFO),
10439 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10440 READ_VREG(HEVC_SHIFT_STATUS));
10441
10442 debug_tag = READ_HREG(DEBUG_REG1);
10443 if (debug_tag & 0x10000) {
10444 hevc_print(hevc, 0,
10445 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10446
10447 if (hevc->mmu_enable)
10448 temp = 0x500;
10449 else
10450 temp = 0x400;
10451 for (i = 0; i < temp; i += 4) {
10452 int ii;
10453 if ((i & 0xf) == 0)
10454 hevc_print_cont(hevc, 0, "%03x: ", i);
10455 for (ii = 0; ii < 4; ii++) {
10456 hevc_print_cont(hevc, 0, "%04x ",
10457 hevc->lmem_ptr[i + 3 - ii]);
10458 }
10459 if (((i + ii) & 0xf) == 0)
10460 hevc_print_cont(hevc, 0, "\n");
10461 }
10462
10463 if (((udebug_pause_pos & 0xffff)
10464 == (debug_tag & 0xffff)) &&
10465 (udebug_pause_decode_idx == 0 ||
10466 udebug_pause_decode_idx == hevc->decode_idx) &&
10467 (udebug_pause_val == 0 ||
10468 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10469 udebug_pause_pos &= 0xffff;
10470 hevc->ucode_pause_pos = udebug_pause_pos;
10471 }
10472 else if (debug_tag & 0x20000)
10473 hevc->ucode_pause_pos = 0xffffffff;
10474 if (hevc->ucode_pause_pos)
10475 reset_process_time(hevc);
10476 else
10477 WRITE_HREG(DEBUG_REG1, 0);
10478 } else if (debug_tag != 0) {
10479 hevc_print(hevc, 0,
10480 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10481 READ_HREG(DEBUG_REG2),
10482 READ_VREG(HEVC_STREAM_LEVEL),
10483 READ_VREG(HEVC_STREAM_WR_PTR),
10484 READ_VREG(HEVC_STREAM_RD_PTR));
10485 if (((udebug_pause_pos & 0xffff)
10486 == (debug_tag & 0xffff)) &&
10487 (udebug_pause_decode_idx == 0 ||
10488 udebug_pause_decode_idx == hevc->decode_idx) &&
10489 (udebug_pause_val == 0 ||
10490 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10491 udebug_pause_pos &= 0xffff;
10492 hevc->ucode_pause_pos = udebug_pause_pos;
10493 }
10494 if (hevc->ucode_pause_pos)
10495 reset_process_time(hevc);
10496 else
10497 WRITE_HREG(DEBUG_REG1, 0);
10498 return IRQ_HANDLED;
10499 }
10500
10501
10502 if (hevc->pic_list_init_flag == 1)
10503 return IRQ_HANDLED;
10504
10505 if (!hevc->m_ins_flag) {
10506 if (dec_status == HEVC_OVER_DECODE) {
10507 hevc->over_decode = 1;
10508 hevc_print(hevc, 0,
10509 "isr: over decode\n"),
10510 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10511 return IRQ_HANDLED;
10512 }
10513 }
10514
10515 return IRQ_WAKE_THREAD;
10516
10517}
10518
10519static void vh265_set_clk(struct work_struct *work)
10520{
10521 struct hevc_state_s *hevc = container_of(work,
10522 struct hevc_state_s, set_clk_work);
10523
10524 int fps = 96000 / hevc->frame_dur;
10525
10526 if (hevc_source_changed(VFORMAT_HEVC,
10527 hevc->frame_width, hevc->frame_height, fps) > 0)
10528 hevc->saved_resolution = hevc->frame_width *
10529 hevc->frame_height * fps;
10530}
10531
10532static void vh265_check_timer_func(unsigned long arg)
10533{
10534 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10535 struct timer_list *timer = &hevc->timer;
10536 unsigned char empty_flag;
10537 unsigned int buf_level;
10538
10539 enum receviver_start_e state = RECEIVER_INACTIVE;
10540
10541 if (hevc->init_flag == 0) {
10542 if (hevc->stat & STAT_TIMER_ARM) {
10543 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10544 }
10545 return;
10546 }
10547#ifdef MULTI_INSTANCE_SUPPORT
10548 if (hevc->m_ins_flag &&
10549 (get_dbg_flag(hevc) &
10550 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10551 hw_to_vdec(hevc)->next_status ==
10552 VDEC_STATUS_DISCONNECTED) {
10553 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10554 vdec_schedule_work(&hevc->work);
10555 hevc_print(hevc,
10556 0, "vdec requested to be disconnected\n");
10557 return;
10558 }
10559
10560 if (hevc->m_ins_flag) {
10561 if ((input_frame_based(hw_to_vdec(hevc)) ||
10562 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10563 ((get_dbg_flag(hevc) &
10564 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10565 (decode_timeout_val > 0) &&
10566 (hevc->start_process_time > 0) &&
10567 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10568 > decode_timeout_val)
10569 ) {
10570 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10571 int current_lcu_idx =
10572 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10573 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10574 if (hevc->last_lcu_idx == current_lcu_idx) {
10575 if (hevc->decode_timeout_count > 0)
10576 hevc->decode_timeout_count--;
10577 if (hevc->decode_timeout_count == 0)
10578 timeout_process(hevc);
10579 } else
10580 restart_process_time(hevc);
10581 hevc->last_lcu_idx = current_lcu_idx;
10582 } else {
10583 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10584 timeout_process(hevc);
10585 }
10586 }
10587 } else {
10588#endif
10589 if (hevc->m_ins_flag == 0 &&
10590 vf_get_receiver(hevc->provider_name)) {
10591 state =
10592 vf_notify_receiver(hevc->provider_name,
10593 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10594 NULL);
10595 if ((state == RECEIVER_STATE_NULL)
10596 || (state == RECEIVER_STATE_NONE))
10597 state = RECEIVER_INACTIVE;
10598 } else
10599 state = RECEIVER_INACTIVE;
10600
10601 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10602 /* error watchdog */
10603 if (hevc->m_ins_flag == 0 &&
10604 (empty_flag == 0)
10605 && (hevc->pic_list_init_flag == 0
10606 || hevc->pic_list_init_flag
10607 == 3)) {
10608 /* decoder has input */
10609 if ((get_dbg_flag(hevc) &
10610 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10611
10612 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10613 /* receiver has no buffer to recycle */
10614 if ((state == RECEIVER_INACTIVE) &&
10615 (kfifo_is_empty(&hevc->display_q) &&
10616 buf_level > 0x200)
10617 ) {
10618 if (hevc->error_flag == 0) {
10619 hevc->error_watchdog_count++;
10620 if (hevc->error_watchdog_count ==
10621 error_handle_threshold) {
10622 hevc_print(hevc, 0,
10623 "H265 dec err local reset.\n");
10624 hevc->error_flag = 1;
10625 hevc->error_watchdog_count = 0;
10626 hevc->error_skip_nal_wt_cnt = 0;
10627 hevc->
10628 error_system_watchdog_count++;
10629 WRITE_VREG
10630 (HEVC_ASSIST_MBOX0_IRQ_REG,
10631 0x1);
10632 }
10633 } else if (hevc->error_flag == 2) {
10634 int th =
10635 error_handle_nal_skip_threshold;
10636 hevc->error_skip_nal_wt_cnt++;
10637 if (hevc->error_skip_nal_wt_cnt
10638 == th) {
10639 hevc->error_flag = 3;
10640 hevc->error_watchdog_count = 0;
10641 hevc->
10642 error_skip_nal_wt_cnt = 0;
10643 WRITE_VREG
10644 (HEVC_ASSIST_MBOX0_IRQ_REG,
10645 0x1);
10646 }
10647 }
10648 }
10649 }
10650
10651 if ((get_dbg_flag(hevc)
10652 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10653 /* receiver has no buffer to recycle */
10654 if ((state == RECEIVER_INACTIVE) &&
10655 (kfifo_is_empty(&hevc->display_q))
10656 ) { /* no buffer to recycle */
10657 if ((get_dbg_flag(hevc) &
10658 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10659 0)
10660 hevc->error_system_watchdog_count++;
10661 if (hevc->error_system_watchdog_count ==
10662 error_handle_system_threshold) {
10663 /* and it lasts for a while */
10664 hevc_print(hevc, 0,
10665 "H265 dec fatal error watchdog.\n");
10666 hevc->
10667 error_system_watchdog_count = 0;
10668 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10669 }
10670 }
10671 } else {
10672 hevc->error_watchdog_count = 0;
10673 hevc->error_system_watchdog_count = 0;
10674 }
10675#ifdef MULTI_INSTANCE_SUPPORT
10676 }
10677#endif
10678 if ((hevc->ucode_pause_pos != 0) &&
10679 (hevc->ucode_pause_pos != 0xffffffff) &&
10680 udebug_pause_pos != hevc->ucode_pause_pos) {
10681 hevc->ucode_pause_pos = 0;
10682 WRITE_HREG(DEBUG_REG1, 0);
10683 }
10684
10685 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10686 dump_pic_list(hevc);
10687 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10688 }
10689 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10690 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10691 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10692 }
10693#ifdef TEST_NO_BUF
10694 if (hevc->wait_buf)
10695 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10696#endif
10697 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10698 hevc->error_skip_nal_count = error_skip_nal_count;
10699 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10700
10701 debug &= ~H265_DEBUG_HW_RESET;
10702 }
10703
10704#ifdef ERROR_HANDLE_DEBUG
10705 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10706 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10707 dbg_nal_skip_count &= ~0x10000;
10708 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10709 }
10710#endif
10711
10712 if (radr != 0) {
10713 if (rval != 0) {
10714 WRITE_VREG(radr, rval);
10715 hevc_print(hevc, 0,
10716 "WRITE_VREG(%x,%x)\n", radr, rval);
10717 } else
10718 hevc_print(hevc, 0,
10719 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10720 rval = 0;
10721 radr = 0;
10722 }
10723 if (dbg_cmd != 0) {
10724 if (dbg_cmd == 1) {
10725 u32 disp_laddr;
10726
10727 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10728 get_double_write_mode(hevc) == 0) {
10729 disp_laddr =
10730 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10731 } else {
10732 struct canvas_s cur_canvas;
10733
10734 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10735 & 0xff), &cur_canvas);
10736 disp_laddr = cur_canvas.addr;
10737 }
10738 hevc_print(hevc, 0,
10739 "current displayed buffer address %x\r\n",
10740 disp_laddr);
10741 }
10742 dbg_cmd = 0;
10743 }
10744 /*don't changed at start.*/
10745 if (hevc->m_ins_flag == 0 &&
10746 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10747 hevc->frame_dur > 0 && hevc->saved_resolution !=
10748 hevc->frame_width * hevc->frame_height *
10749 (96000 / hevc->frame_dur))
10750 vdec_schedule_work(&hevc->set_clk_work);
10751
10752 mod_timer(timer, jiffies + PUT_INTERVAL);
10753}
10754
10755static int h265_task_handle(void *data)
10756{
10757 int ret = 0;
10758 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10759
10760 set_user_nice(current, -10);
10761 while (1) {
10762 if (use_cma == 0) {
10763 hevc_print(hevc, 0,
10764 "ERROR: use_cma can not be changed dynamically\n");
10765 }
10766 ret = down_interruptible(&h265_sema);
10767 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10768 init_pic_list(hevc);
10769 init_pic_list_hw(hevc);
10770 init_buf_spec(hevc);
10771 hevc->pic_list_init_flag = 2;
10772 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10773
10774 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10775
10776 }
10777
10778 if (hevc->uninit_list) {
10779 /*USE_BUF_BLOCK*/
10780 uninit_pic_list(hevc);
10781 hevc_print(hevc, 0, "uninit list\n");
10782 hevc->uninit_list = 0;
10783#ifdef USE_UNINIT_SEMA
10784 if (use_cma) {
10785 up(&hevc->h265_uninit_done_sema);
10786 while (!kthread_should_stop())
10787 msleep(1);
10788 break;
10789 }
10790#endif
10791 }
10792 }
10793
10794 return 0;
10795}
10796
10797void vh265_free_cmabuf(void)
10798{
10799 struct hevc_state_s *hevc = gHevc;
10800
10801 mutex_lock(&vh265_mutex);
10802
10803 if (hevc->init_flag) {
10804 mutex_unlock(&vh265_mutex);
10805 return;
10806 }
10807
10808 mutex_unlock(&vh265_mutex);
10809}
10810
10811#ifdef MULTI_INSTANCE_SUPPORT
10812int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10813#else
10814int vh265_dec_status(struct vdec_info *vstatus)
10815#endif
10816{
10817#ifdef MULTI_INSTANCE_SUPPORT
10818 struct hevc_state_s *hevc =
10819 (struct hevc_state_s *)vdec->private;
10820#else
10821 struct hevc_state_s *hevc = gHevc;
10822#endif
10823 if (!hevc)
10824 return -1;
10825
10826 vstatus->frame_width = hevc->frame_width;
10827 vstatus->frame_height = hevc->frame_height;
10828 if (hevc->frame_dur != 0)
10829 vstatus->frame_rate = 96000 / hevc->frame_dur;
10830 else
10831 vstatus->frame_rate = -1;
10832 vstatus->error_count = 0;
10833 vstatus->status = hevc->stat | hevc->fatal_error;
10834#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10835 vstatus->bit_rate = gvs->bit_rate;
10836 vstatus->frame_dur = hevc->frame_dur;
10837 if (gvs) {
10838 vstatus->bit_rate = gvs->bit_rate;
10839 vstatus->frame_data = gvs->frame_data;
10840 vstatus->total_data = gvs->total_data;
10841 vstatus->frame_count = gvs->frame_count;
10842 vstatus->error_frame_count = gvs->error_frame_count;
10843 vstatus->drop_frame_count = gvs->drop_frame_count;
10844 vstatus->total_data = gvs->total_data;
10845 vstatus->samp_cnt = gvs->samp_cnt;
10846 vstatus->offset = gvs->offset;
10847 }
10848 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10849 "%s", DRIVER_NAME);
10850#endif
10851 vstatus->ratio_control = hevc->ratio_control;
10852 return 0;
10853}
10854
10855int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10856{
10857 is_reset = isreset;
10858 return 0;
10859}
10860
10861static int vh265_vdec_info_init(void)
10862{
10863 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10864 if (NULL == gvs) {
10865 pr_info("the struct of vdec status malloc failed.\n");
10866 return -ENOMEM;
10867 }
10868 return 0;
10869}
10870
10871#if 0
10872static void H265_DECODE_INIT(void)
10873{
10874 /* enable hevc clocks */
10875 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10876 /* *************************************************************** */
10877 /* Power ON HEVC */
10878 /* *************************************************************** */
10879 /* Powerup HEVC */
10880 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10881 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10882 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10883 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10884 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10885 /* remove isolations */
10886 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10887 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10888
10889}
10890#endif
10891
10892static void config_decode_mode(struct hevc_state_s *hevc)
10893{
10894#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10895 struct vdec_s *vdec = hw_to_vdec(hevc);
10896#endif
10897 unsigned decode_mode;
10898 if (!hevc->m_ins_flag)
10899 decode_mode = DECODE_MODE_SINGLE;
10900 else if (vdec_frame_based(hw_to_vdec(hevc)))
10901 decode_mode =
10902 DECODE_MODE_MULTI_FRAMEBASE;
10903#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10904 else if (vdec->slave) {
10905 if (force_bypass_dvenl & 0x80000000)
10906 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10907 else
10908 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10909 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10910 hevc->bypass_dvenl = 0;
10911 hevc_print(hevc, 0,
10912 "NOT support bypass_dvenl when meta_with_el\n");
10913 }
10914 if (hevc->bypass_dvenl)
10915 decode_mode =
10916 (hevc->start_parser_type << 8)
10917 | DECODE_MODE_MULTI_STREAMBASE;
10918 else
10919 decode_mode =
10920 (hevc->start_parser_type << 8)
10921 | DECODE_MODE_MULTI_DVBAL;
10922 } else if (vdec->master)
10923 decode_mode =
10924 (hevc->start_parser_type << 8)
10925 | DECODE_MODE_MULTI_DVENL;
10926#endif
10927 else
10928 decode_mode =
10929 DECODE_MODE_MULTI_STREAMBASE;
10930
10931 if (hevc->m_ins_flag)
10932 decode_mode |=
10933 (hevc->start_decoding_flag << 16);
10934 /* set MBX0 interrupt flag */
10935 decode_mode |= (0x80 << 24);
10936 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10937 WRITE_VREG(HEVC_DECODE_MODE2,
10938 hevc->rps_set_id);
10939}
10940
10941static void vh265_prot_init(struct hevc_state_s *hevc)
10942{
10943#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10944 struct vdec_s *vdec = hw_to_vdec(hevc);
10945#endif
10946 /* H265_DECODE_INIT(); */
10947
10948 hevc_config_work_space_hw(hevc);
10949
10950 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10951
10952 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10953
10954 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10955
10956 /* clear mailbox interrupt */
10957 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10958
10959 /* enable mailbox interrupt */
10960 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10961
10962 /* disable PSCALE for hardware sharing */
10963 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10964
10965 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10966
10967 if ((get_dbg_flag(hevc) &
10968 (H265_DEBUG_MAN_SKIP_NAL |
10969 H265_DEBUG_MAN_SEARCH_NAL))
10970 /*||hevc->m_ins_flag*/
10971 ) {
10972 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10973 } else {
10974 /* check vps/sps/pps/i-slice in ucode */
10975 unsigned ctl_val = 0x8;
10976 if (hevc->PB_skip_mode == 0)
10977 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10978 else if (hevc->PB_skip_mode == 3)
10979 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10980 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10981 }
10982 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10983#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10984 || vdec->master
10985 || vdec->slave
10986#endif
10987 )
10988 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10989
10990 WRITE_VREG(NAL_SEARCH_CTL,
10991 READ_VREG(NAL_SEARCH_CTL)
10992 | ((parser_sei_enable & 0x7) << 17));
10993#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10994 WRITE_VREG(NAL_SEARCH_CTL,
10995 READ_VREG(NAL_SEARCH_CTL) |
10996 ((parser_dolby_vision_enable & 0x1) << 20));
10997#endif
10998 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10999
11000 config_decode_mode(hevc);
11001 config_aux_buf(hevc);
11002#ifdef SWAP_HEVC_UCODE
11003 if (!tee_enabled() && hevc->is_swap &&
11004 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11005 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11006 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11007 }
11008#endif
11009#ifdef DETREFILL_ENABLE
11010 if (hevc->is_swap &&
11011 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11012 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11013 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11014 }
11015#endif
11016}
11017
11018static int vh265_local_init(struct hevc_state_s *hevc)
11019{
11020 int i;
11021 int ret = -1;
11022
11023#ifdef DEBUG_PTS
11024 hevc->pts_missed = 0;
11025 hevc->pts_hit = 0;
11026#endif
11027
11028 hevc->saved_resolution = 0;
11029 hevc->get_frame_dur = false;
11030 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11031 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11032 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11033 pr_info("over size : %u x %u.\n",
11034 hevc->frame_width, hevc->frame_height);
11035 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11036 return ret;
11037 }
11038
11039 if (hevc->max_pic_w && hevc->max_pic_h) {
11040 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11041 ((hevc->max_pic_w * hevc->max_pic_h) >
11042 1920 * 1088) ? true : false;
11043 } else {
11044 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11045 ((hevc->frame_width * hevc->frame_height) >
11046 1920 * 1088) ? true : false;
11047 }
11048
11049 hevc->frame_dur =
11050 (hevc->vh265_amstream_dec_info.rate ==
11051 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11052#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
11053 gvs->frame_dur = hevc->frame_dur;
11054#endif
11055 if (hevc->frame_width && hevc->frame_height)
11056 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11057
11058 if (i_only_flag)
11059 hevc->i_only = i_only_flag & 0xff;
11060 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11061 & 0x08)
11062 hevc->i_only = 0x7;
11063 else
11064 hevc->i_only = 0x0;
11065 hevc->error_watchdog_count = 0;
11066 hevc->sei_present_flag = 0;
11067 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11068 & 0x40) >> 6;
11069 hevc_print(hevc, 0,
11070 "h265:pts_unstable=%d\n", pts_unstable);
11071/*
11072 *TODO:FOR VERSION
11073 */
11074 hevc_print(hevc, 0,
11075 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11076 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11077
11078 if (hevc->frame_dur == 0)
11079 hevc->frame_dur = 96000 / 24;
11080
11081 INIT_KFIFO(hevc->display_q);
11082 INIT_KFIFO(hevc->newframe_q);
11083 INIT_KFIFO(hevc->pending_q);
11084
11085 for (i = 0; i < VF_POOL_SIZE; i++) {
11086 const struct vframe_s *vf = &hevc->vfpool[i];
11087
11088 hevc->vfpool[i].index = -1;
11089 kfifo_put(&hevc->newframe_q, vf);
11090 }
11091
11092
11093 ret = hevc_local_init(hevc);
11094
11095 return ret;
11096}
11097#ifdef MULTI_INSTANCE_SUPPORT
11098static s32 vh265_init(struct vdec_s *vdec)
11099{
11100 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11101#else
11102static s32 vh265_init(struct hevc_state_s *hevc)
11103{
11104
11105#endif
11106 int ret, size = -1;
11107 int fw_size = 0x1000 * 16;
11108 struct firmware_s *fw = NULL;
11109
11110 init_timer(&hevc->timer);
11111
11112 hevc->stat |= STAT_TIMER_INIT;
11113
11114 if (hevc->m_ins_flag) {
11115#ifdef USE_UNINIT_SEMA
11116 sema_init(&hevc->h265_uninit_done_sema, 0);
11117#endif
11118 INIT_WORK(&hevc->work, vh265_work);
11119 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11120 }
11121
11122 if (vh265_local_init(hevc) < 0)
11123 return -EBUSY;
11124
11125 mutex_init(&hevc->chunks_mutex);
11126 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11127 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11128
11129 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11130 if (IS_ERR_OR_NULL(fw))
11131 return -ENOMEM;
11132
11133 if (hevc->mmu_enable)
11134 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11135 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11136 else {
11137 if (!hevc->is_4k) {
11138 /* if an older version of the fw was loaded, */
11139 /* needs try to load noswap fw because the */
11140 /* old fw package dose not contain the swap fw.*/
11141 size = get_firmware_data(
11142 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11143 if (size < 0)
11144 size = get_firmware_data(
11145 VIDEO_DEC_HEVC_MMU, fw->data);
11146 else if (size)
11147 hevc->is_swap = true;
11148 } else
11149 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11150 fw->data);
11151 }
11152 else
11153 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11154
11155 if (size < 0) {
11156 pr_err("get firmware fail.\n");
11157 vfree(fw);
11158 return -1;
11159 }
11160
11161 fw->len = size;
11162
11163#ifdef SWAP_HEVC_UCODE
11164 if (!tee_enabled() && hevc->is_swap &&
11165 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11166 if (hevc->mmu_enable) {
11167 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11168 hevc->mc_cpu_addr =
11169 dma_alloc_coherent(amports_get_dma_device(),
11170 hevc->swap_size,
11171 &hevc->mc_dma_handle, GFP_KERNEL);
11172 if (!hevc->mc_cpu_addr) {
11173 amhevc_disable();
11174 pr_info("vh265 mmu swap ucode loaded fail.\n");
11175 return -ENOMEM;
11176 }
11177
11178 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11179 hevc->swap_size);
11180
11181 hevc_print(hevc, 0,
11182 "vh265 mmu ucode swap loaded %x\n",
11183 hevc->mc_dma_handle);
11184 }
11185 }
11186#endif
11187
11188#ifdef MULTI_INSTANCE_SUPPORT
11189 if (hevc->m_ins_flag) {
11190 hevc->timer.data = (ulong) hevc;
11191 hevc->timer.function = vh265_check_timer_func;
11192 hevc->timer.expires = jiffies + PUT_INTERVAL;
11193
11194 hevc->fw = fw;
11195
11196 return 0;
11197 }
11198#endif
11199 amhevc_enable();
11200
11201 if (hevc->mmu_enable)
11202 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11203 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11204 else {
11205 if (!hevc->is_4k) {
11206 /* if an older version of the fw was loaded, */
11207 /* needs try to load noswap fw because the */
11208 /* old fw package dose not contain the swap fw. */
11209 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11210 "hevc_mmu_swap", fw->data);
11211 if (ret < 0)
11212 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11213 "h265_mmu", fw->data);
11214 else
11215 hevc->is_swap = true;
11216 } else
11217 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11218 "h265_mmu", fw->data);
11219 }
11220 else
11221 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11222
11223 if (ret < 0) {
11224 amhevc_disable();
11225 vfree(fw);
11226 pr_err("H265: the %s fw loading failed, err: %x\n",
11227 tee_enabled() ? "TEE" : "local", ret);
11228 return -EBUSY;
11229 }
11230
11231 vfree(fw);
11232
11233 hevc->stat |= STAT_MC_LOAD;
11234
11235#ifdef DETREFILL_ENABLE
11236 if (hevc->is_swap &&
11237 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11238 init_detrefill_buf(hevc);
11239#endif
11240 /* enable AMRISC side protocol */
11241 vh265_prot_init(hevc);
11242
11243 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11244 vh265_isr_thread_fn,
11245 IRQF_ONESHOT,/*run thread on this irq disabled*/
11246 "vh265-irq", (void *)hevc)) {
11247 hevc_print(hevc, 0, "vh265 irq register error.\n");
11248 amhevc_disable();
11249 return -ENOENT;
11250 }
11251
11252 hevc->stat |= STAT_ISR_REG;
11253 hevc->provider_name = PROVIDER_NAME;
11254
11255#ifdef MULTI_INSTANCE_SUPPORT
11256 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11257 &vh265_vf_provider, vdec);
11258 vf_reg_provider(&vh265_vf_prov);
11259 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11260 NULL);
11261 if (hevc->frame_dur != 0) {
11262 if (!is_reset) {
11263 vf_notify_receiver(hevc->provider_name,
11264 VFRAME_EVENT_PROVIDER_FR_HINT,
11265 (void *)
11266 ((unsigned long)hevc->frame_dur));
11267 fr_hint_status = VDEC_HINTED;
11268 }
11269 } else
11270 fr_hint_status = VDEC_NEED_HINT;
11271#else
11272 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11273 hevc);
11274 vf_reg_provider(&vh265_vf_prov);
11275 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11276 if (hevc->frame_dur != 0) {
11277 vf_notify_receiver(PROVIDER_NAME,
11278 VFRAME_EVENT_PROVIDER_FR_HINT,
11279 (void *)
11280 ((unsigned long)hevc->frame_dur));
11281 fr_hint_status = VDEC_HINTED;
11282 } else
11283 fr_hint_status = VDEC_NEED_HINT;
11284#endif
11285 hevc->stat |= STAT_VF_HOOK;
11286
11287 hevc->timer.data = (ulong) hevc;
11288 hevc->timer.function = vh265_check_timer_func;
11289 hevc->timer.expires = jiffies + PUT_INTERVAL;
11290
11291 add_timer(&hevc->timer);
11292
11293 hevc->stat |= STAT_TIMER_ARM;
11294
11295 if (use_cma) {
11296#ifdef USE_UNINIT_SEMA
11297 sema_init(&hevc->h265_uninit_done_sema, 0);
11298#endif
11299 if (h265_task == NULL) {
11300 sema_init(&h265_sema, 1);
11301 h265_task =
11302 kthread_run(h265_task_handle, hevc,
11303 "kthread_h265");
11304 }
11305 }
11306 /* hevc->stat |= STAT_KTHREAD; */
11307#if 0
11308 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11309 hevc_print(hevc, 0, "%s force clk\n", __func__);
11310 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11311 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11312 ((1 << 2) | (1 << 1)));
11313 WRITE_VREG(HEVC_DBLK_CFG0,
11314 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11315 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11316 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11317 (1 << 2)); /* 2 */
11318 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11319 (1 << 24)); /* 24 */
11320 WRITE_VREG(HEVC_STREAM_CONTROL,
11321 READ_VREG(HEVC_STREAM_CONTROL) |
11322 (1 << 15)); /* 15 */
11323 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11324 (1 << 13)); /* 13 */
11325 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11326 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11327 (1 << 15)); /* 15 */
11328 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11329 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11330 (1 << 15)); /* 15 */
11331 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11332 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11333 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11334 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11335 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11336 (1 << 3)); /* 3 */
11337 }
11338#endif
11339#ifdef SWAP_HEVC_UCODE
11340 if (!tee_enabled() && hevc->is_swap &&
11341 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11342 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11343 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11344 }
11345#endif
11346
11347#ifndef MULTI_INSTANCE_SUPPORT
11348 set_vdec_func(&vh265_dec_status);
11349#endif
11350 amhevc_start();
11351 hevc->stat |= STAT_VDEC_RUN;
11352 hevc->init_flag = 1;
11353 error_handle_threshold = 30;
11354 /* pr_info("%d, vh265_init, RP=0x%x\n",
11355 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11356 */
11357
11358 return 0;
11359}
11360
11361static int vh265_stop(struct hevc_state_s *hevc)
11362{
11363 if (get_dbg_flag(hevc) &
11364 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11365 int wait_timeout_count = 0;
11366
11367 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11368 HEVC_CODED_SLICE_SEGMENT_DAT &&
11369 wait_timeout_count < 10){
11370 wait_timeout_count++;
11371 msleep(20);
11372 }
11373 }
11374 if (hevc->stat & STAT_VDEC_RUN) {
11375 amhevc_stop();
11376 hevc->stat &= ~STAT_VDEC_RUN;
11377 }
11378
11379 if (hevc->stat & STAT_ISR_REG) {
11380#ifdef MULTI_INSTANCE_SUPPORT
11381 if (!hevc->m_ins_flag)
11382#endif
11383 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11384 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11385 hevc->stat &= ~STAT_ISR_REG;
11386 }
11387
11388 hevc->stat &= ~STAT_TIMER_INIT;
11389 if (hevc->stat & STAT_TIMER_ARM) {
11390 del_timer_sync(&hevc->timer);
11391 hevc->stat &= ~STAT_TIMER_ARM;
11392 }
11393
11394 if (hevc->stat & STAT_VF_HOOK) {
11395 if (fr_hint_status == VDEC_HINTED) {
11396 vf_notify_receiver(hevc->provider_name,
11397 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11398 NULL);
11399 }
11400 fr_hint_status = VDEC_NO_NEED_HINT;
11401 vf_unreg_provider(&vh265_vf_prov);
11402 hevc->stat &= ~STAT_VF_HOOK;
11403 }
11404
11405 hevc_local_uninit(hevc);
11406
11407 if (use_cma) {
11408 hevc->uninit_list = 1;
11409 up(&h265_sema);
11410#ifdef USE_UNINIT_SEMA
11411 down(&hevc->h265_uninit_done_sema);
11412 if (!IS_ERR(h265_task)) {
11413 kthread_stop(h265_task);
11414 h265_task = NULL;
11415 }
11416#else
11417 while (hevc->uninit_list) /* wait uninit complete */
11418 msleep(20);
11419#endif
11420
11421 }
11422 hevc->init_flag = 0;
11423 hevc->first_sc_checked = 0;
11424 cancel_work_sync(&hevc->notify_work);
11425 cancel_work_sync(&hevc->set_clk_work);
11426 uninit_mmu_buffers(hevc);
11427 amhevc_disable();
11428
11429 kfree(gvs);
11430 gvs = NULL;
11431
11432 return 0;
11433}
11434
11435#ifdef MULTI_INSTANCE_SUPPORT
11436static void reset_process_time(struct hevc_state_s *hevc)
11437{
11438 if (hevc->start_process_time) {
11439 unsigned int process_time =
11440 1000 * (jiffies - hevc->start_process_time) / HZ;
11441 hevc->start_process_time = 0;
11442 if (process_time > max_process_time[hevc->index])
11443 max_process_time[hevc->index] = process_time;
11444 }
11445}
11446
11447static void start_process_time(struct hevc_state_s *hevc)
11448{
11449 hevc->start_process_time = jiffies;
11450 hevc->decode_timeout_count = 2;
11451 hevc->last_lcu_idx = 0;
11452}
11453
11454static void restart_process_time(struct hevc_state_s *hevc)
11455{
11456 hevc->start_process_time = jiffies;
11457 hevc->decode_timeout_count = 2;
11458}
11459
11460static void timeout_process(struct hevc_state_s *hevc)
11461{
11462 /*
11463 * In this very timeout point,the vh265_work arrives,
11464 * let it to handle the scenario.
11465 */
11466 if (work_pending(&hevc->work))
11467 return;
11468
11469 hevc->timeout_num++;
11470 amhevc_stop();
11471 read_decode_info(hevc);
11472
11473 hevc_print(hevc,
11474 0, "%s decoder timeout\n", __func__);
11475 check_pic_decoded_error(hevc,
11476 hevc->pic_decoded_lcu_idx);
11477 hevc->decoded_poc = hevc->curr_POC;
11478 hevc->decoding_pic = NULL;
11479 hevc->dec_result = DEC_RESULT_DONE;
11480 reset_process_time(hevc);
11481
11482 if (work_pending(&hevc->work))
11483 return;
11484 vdec_schedule_work(&hevc->timeout_work);
11485}
11486
11487#ifdef CONSTRAIN_MAX_BUF_NUM
11488static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11489{
11490 struct PIC_s *pic;
11491 int i;
11492 int count = 0;
11493 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11494 pic = hevc->m_PIC[i];
11495 if (pic == NULL || pic->index == -1)
11496 continue;
11497 if (pic->output_mark == 0 && pic->referenced == 0
11498 && pic->output_ready == 1)
11499 count++;
11500 }
11501
11502 return count;
11503}
11504
11505static int get_used_buf_count(struct hevc_state_s *hevc)
11506{
11507 struct PIC_s *pic;
11508 int i;
11509 int count = 0;
11510 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11511 pic = hevc->m_PIC[i];
11512 if (pic == NULL || pic->index == -1)
11513 continue;
11514 if (pic->output_mark != 0 || pic->referenced != 0
11515 || pic->output_ready != 0)
11516 count++;
11517 }
11518
11519 return count;
11520}
11521#endif
11522
11523
11524static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11525{
11526 struct PIC_s *new_pic = NULL;
11527 struct PIC_s *pic;
11528 /* recycle un-used pic */
11529 int i;
11530 int ref_pic = 0;
11531 struct vdec_s *vdec = hw_to_vdec(hevc);
11532 /*return 1 if pic_list is not initialized yet*/
11533 if (hevc->pic_list_init_flag != 3)
11534 return 1;
11535
11536 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11537 pic = hevc->m_PIC[i];
11538 if (pic == NULL || pic->index == -1)
11539 continue;
11540 if (pic->referenced == 1)
11541 ref_pic++;
11542 if (pic->output_mark == 0 && pic->referenced == 0
11543 && pic->output_ready == 0
11544 ) {
11545 if (new_pic) {
11546 if (pic->POC < new_pic->POC)
11547 new_pic = pic;
11548 } else
11549 new_pic = pic;
11550 }
11551 }
11552/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11553/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11554/*decode. all reference frames are removed and setting error flag.*/
11555/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11556/*when file is interlace.*/
11557 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11558 (ref_pic >=
11559 get_work_pic_num(hevc) -
11560 hevc->sps_num_reorder_pics_0 - 3)) {
11561 enum receviver_start_e state = RECEIVER_INACTIVE;
11562 if (vf_get_receiver(vdec->vf_provider_name)) {
11563 state =
11564 vf_notify_receiver(vdec->vf_provider_name,
11565 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11566 NULL);
11567 if ((state == RECEIVER_STATE_NULL)
11568 || (state == RECEIVER_STATE_NONE))
11569 state = RECEIVER_INACTIVE;
11570 }
11571 if (state == RECEIVER_INACTIVE) {
11572 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11573 pic = hevc->m_PIC[i];
11574 if (pic == NULL || pic->index == -1)
11575 continue;
11576
11577 if ((pic->referenced == 1) &&
11578 (pic->error_mark == 1)) {
11579 pic->referenced = 0;
11580 put_mv_buf(hevc, pic);
11581 }
11582 pic->error_mark = 1;
11583 }
11584 }
11585 }
11586
11587 return (new_pic != NULL) ? 1 : 0;
11588}
11589
11590static int vmh265_stop(struct hevc_state_s *hevc)
11591{
11592 if (hevc->stat & STAT_TIMER_ARM) {
11593 del_timer_sync(&hevc->timer);
11594 hevc->stat &= ~STAT_TIMER_ARM;
11595 }
11596 if (hevc->stat & STAT_VDEC_RUN) {
11597 amhevc_stop();
11598 hevc->stat &= ~STAT_VDEC_RUN;
11599 }
11600 if (hevc->stat & STAT_ISR_REG) {
11601 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11602 hevc->stat &= ~STAT_ISR_REG;
11603 }
11604
11605 if (hevc->stat & STAT_VF_HOOK) {
11606 if (fr_hint_status == VDEC_HINTED)
11607 vf_notify_receiver(hevc->provider_name,
11608 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11609 NULL);
11610 fr_hint_status = VDEC_NO_NEED_HINT;
11611 vf_unreg_provider(&vh265_vf_prov);
11612 hevc->stat &= ~STAT_VF_HOOK;
11613 }
11614
11615 hevc_local_uninit(hevc);
11616
11617 hevc->init_flag = 0;
11618 hevc->first_sc_checked = 0;
11619 cancel_work_sync(&hevc->notify_work);
11620 cancel_work_sync(&hevc->set_clk_work);
11621 cancel_work_sync(&hevc->timeout_work);
11622
11623 uninit_mmu_buffers(hevc);
11624
11625 if (use_cma) {
11626 hevc->uninit_list = 1;
11627 reset_process_time(hevc);
11628 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11629 vdec_schedule_work(&hevc->work);
11630 flush_work(&hevc->work);
11631#ifdef USE_UNINIT_SEMA
11632 if (hevc->init_flag) {
11633 down(&hevc->h265_uninit_done_sema);
11634 }
11635#else
11636 while (hevc->uninit_list) /* wait uninit complete */
11637 msleep(20);
11638#endif
11639 }
11640 cancel_work_sync(&hevc->work);
11641
11642 vfree(hevc->fw);
11643 hevc->fw = NULL;
11644
11645 dump_log(hevc);
11646 return 0;
11647}
11648
11649static unsigned char get_data_check_sum
11650 (struct hevc_state_s *hevc, int size)
11651{
11652 int jj;
11653 int sum = 0;
11654 u8 *data = NULL;
11655
11656 if (!hevc->chunk->block->is_mapped)
11657 data = codec_mm_vmap(hevc->chunk->block->start +
11658 hevc->chunk->offset, size);
11659 else
11660 data = ((u8 *)hevc->chunk->block->start_virt) +
11661 hevc->chunk->offset;
11662
11663 for (jj = 0; jj < size; jj++)
11664 sum += data[jj];
11665
11666 if (!hevc->chunk->block->is_mapped)
11667 codec_mm_unmap_phyaddr(data);
11668 return sum;
11669}
11670
11671static void vh265_notify_work(struct work_struct *work)
11672{
11673 struct hevc_state_s *hevc =
11674 container_of(work,
11675 struct hevc_state_s,
11676 notify_work);
11677 struct vdec_s *vdec = hw_to_vdec(hevc);
11678#ifdef MULTI_INSTANCE_SUPPORT
11679 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11680 vf_notify_receiver(hevc->provider_name,
11681 VFRAME_EVENT_PROVIDER_FR_HINT,
11682 (void *)
11683 ((unsigned long)hevc->frame_dur));
11684 vdec->fr_hint_state = VDEC_HINTED;
11685 } else if (fr_hint_status == VDEC_NEED_HINT) {
11686 vf_notify_receiver(hevc->provider_name,
11687 VFRAME_EVENT_PROVIDER_FR_HINT,
11688 (void *)
11689 ((unsigned long)hevc->frame_dur));
11690 fr_hint_status = VDEC_HINTED;
11691 }
11692#else
11693 if (fr_hint_status == VDEC_NEED_HINT)
11694 vf_notify_receiver(PROVIDER_NAME,
11695 VFRAME_EVENT_PROVIDER_FR_HINT,
11696 (void *)
11697 ((unsigned long)hevc->frame_dur));
11698 fr_hint_status = VDEC_HINTED;
11699 }
11700#endif
11701
11702 return;
11703}
11704
11705static void vh265_work_implement(struct hevc_state_s *hevc,
11706 struct vdec_s *vdec,int from)
11707{
11708 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11709 /*USE_BUF_BLOCK*/
11710 uninit_pic_list(hevc);
11711 hevc_print(hevc, 0, "uninit list\n");
11712 hevc->uninit_list = 0;
11713#ifdef USE_UNINIT_SEMA
11714 up(&hevc->h265_uninit_done_sema);
11715#endif
11716 return;
11717 }
11718
11719 /* finished decoding one frame or error,
11720 * notify vdec core to switch context
11721 */
11722 if (hevc->pic_list_init_flag == 1
11723 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11724 hevc->pic_list_init_flag = 2;
11725 init_pic_list(hevc);
11726 init_pic_list_hw(hevc);
11727 init_buf_spec(hevc);
11728 hevc_print(hevc, 0,
11729 "set pic_list_init_flag to 2\n");
11730
11731 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11732 return;
11733 }
11734
11735 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11736 "%s dec_result %d %x %x %x\n",
11737 __func__,
11738 hevc->dec_result,
11739 READ_VREG(HEVC_STREAM_LEVEL),
11740 READ_VREG(HEVC_STREAM_WR_PTR),
11741 READ_VREG(HEVC_STREAM_RD_PTR));
11742
11743 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11744 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11745 && (hw_to_vdec(hevc)->next_status !=
11746 VDEC_STATUS_DISCONNECTED)) {
11747 if (!vdec_has_more_input(vdec)) {
11748 hevc->dec_result = DEC_RESULT_EOS;
11749 vdec_schedule_work(&hevc->work);
11750 return;
11751 }
11752 if (!input_frame_based(vdec)) {
11753 int r = vdec_sync_input(vdec);
11754 if (r >= 0x200) {
11755 WRITE_VREG(HEVC_DECODE_SIZE,
11756 READ_VREG(HEVC_DECODE_SIZE) + r);
11757
11758 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11759 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11760 __func__,
11761 READ_VREG(HEVC_STREAM_LEVEL),
11762 READ_VREG(HEVC_STREAM_WR_PTR),
11763 READ_VREG(HEVC_STREAM_RD_PTR),
11764 READ_VREG(HEVC_MPC_E), r);
11765
11766 start_process_time(hevc);
11767 if (READ_VREG(HEVC_DEC_STATUS_REG)
11768 == HEVC_DECODE_BUFEMPTY2)
11769 WRITE_VREG(HEVC_DEC_STATUS_REG,
11770 HEVC_ACTION_DONE);
11771 else
11772 WRITE_VREG(HEVC_DEC_STATUS_REG,
11773 HEVC_ACTION_DEC_CONT);
11774 } else {
11775 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11776 vdec_schedule_work(&hevc->work);
11777 }
11778 return;
11779 }
11780
11781 /*below for frame_base*/
11782 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11783 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11784 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11785 __func__,
11786 READ_VREG(HEVC_STREAM_LEVEL),
11787 READ_VREG(HEVC_STREAM_WR_PTR),
11788 READ_VREG(HEVC_STREAM_RD_PTR),
11789 READ_VREG(HEVC_MPC_E));
11790 mutex_lock(&hevc->chunks_mutex);
11791 vdec_vframe_dirty(vdec, hevc->chunk);
11792 hevc->chunk = NULL;
11793 mutex_unlock(&hevc->chunks_mutex);
11794 vdec_clean_input(vdec);
11795 }
11796
11797 /*if (is_new_pic_available(hevc)) {*/
11798 if (run_ready(vdec, VDEC_HEVC)) {
11799 int r;
11800 int decode_size;
11801 r = vdec_prepare_input(vdec, &hevc->chunk);
11802 if (r < 0) {
11803 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11804
11805 hevc_print(hevc,
11806 PRINT_FLAG_VDEC_DETAIL,
11807 "amvdec_vh265: Insufficient data\n");
11808
11809 vdec_schedule_work(&hevc->work);
11810 return;
11811 }
11812 hevc->dec_result = DEC_RESULT_NONE;
11813 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11814 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11815 __func__, r,
11816 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11817 get_data_check_sum(hevc, r) : 0,
11818 READ_VREG(HEVC_MPC_E));
11819
11820 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11821 int jj;
11822 u8 *data = NULL;
11823
11824 if (!hevc->chunk->block->is_mapped)
11825 data = codec_mm_vmap(
11826 hevc->chunk->block->start +
11827 hevc->chunk->offset, r);
11828 else
11829 data = ((u8 *)
11830 hevc->chunk->block->start_virt)
11831 + hevc->chunk->offset;
11832
11833 for (jj = 0; jj < r; jj++) {
11834 if ((jj & 0xf) == 0)
11835 hevc_print(hevc,
11836 PRINT_FRAMEBASE_DATA,
11837 "%06x:", jj);
11838 hevc_print_cont(hevc,
11839 PRINT_FRAMEBASE_DATA,
11840 "%02x ", data[jj]);
11841 if (((jj + 1) & 0xf) == 0)
11842 hevc_print_cont(hevc,
11843 PRINT_FRAMEBASE_DATA,
11844 "\n");
11845 }
11846
11847 if (!hevc->chunk->block->is_mapped)
11848 codec_mm_unmap_phyaddr(data);
11849 }
11850
11851 decode_size = hevc->chunk->size +
11852 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11853 WRITE_VREG(HEVC_DECODE_SIZE,
11854 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11855
11856 vdec_enable_input(vdec);
11857
11858 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11859 "%s: mpc %x\n",
11860 __func__, READ_VREG(HEVC_MPC_E));
11861
11862 start_process_time(hevc);
11863 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11864 } else{
11865 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11866
11867 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11868 * "amvdec_vh265: Insufficient data\n");
11869 */
11870
11871 vdec_schedule_work(&hevc->work);
11872 }
11873 return;
11874 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11875 /* if (!hevc->ctx_valid)
11876 hevc->ctx_valid = 1; */
11877 decode_frame_count[hevc->index]++;
11878#ifdef DETREFILL_ENABLE
11879 if (hevc->is_swap &&
11880 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11881 if (hevc->delrefill_check == 2) {
11882 delrefill(hevc);
11883 amhevc_stop();
11884 }
11885 }
11886#endif
11887 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11888 hevc->used_4k_num =
11889 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11890 if (hevc->used_4k_num >= 0 &&
11891 hevc->cur_pic &&
11892 hevc->cur_pic->scatter_alloc
11893 == 1) {
11894 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11895 "%s pic index %d scatter_alloc %d page_start %d\n",
11896 "decoder_mmu_box_free_idx_tail",
11897 hevc->cur_pic->index,
11898 hevc->cur_pic->scatter_alloc,
11899 hevc->used_4k_num);
11900 if (hevc->m_ins_flag)
11901 hevc_mmu_dma_check(hw_to_vdec(hevc));
11902 decoder_mmu_box_free_idx_tail(
11903 hevc->mmu_box,
11904 hevc->cur_pic->index,
11905 hevc->used_4k_num);
11906 hevc->cur_pic->scatter_alloc = 2;
11907 }
11908 }
11909 hevc->pic_decoded_lcu_idx =
11910 READ_VREG(HEVC_PARSER_LCU_START)
11911 & 0xffffff;
11912
11913 if (vdec->master == NULL && vdec->slave == NULL &&
11914 hevc->empty_flag == 0) {
11915 hevc->over_decode =
11916 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11917 if (hevc->over_decode)
11918 hevc_print(hevc, 0,
11919 "!!!Over decode\n");
11920 }
11921
11922 if (is_log_enable(hevc))
11923 add_log(hevc,
11924 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11925 __func__,
11926 hevc->dec_result,
11927 hevc->pic_decoded_lcu_idx,
11928 hevc->used_4k_num,
11929 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11930 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11931 hevc->start_shift_bytes
11932 );
11933
11934 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11935 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11936 __func__,
11937 hevc->dec_result,
11938 READ_VREG(HEVC_STREAM_LEVEL),
11939 READ_VREG(HEVC_STREAM_WR_PTR),
11940 READ_VREG(HEVC_STREAM_RD_PTR),
11941 hevc->pic_decoded_lcu_idx,
11942 hevc->used_4k_num,
11943 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11944 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11945 hevc->start_shift_bytes
11946 );
11947
11948 hevc->used_4k_num = -1;
11949
11950 check_pic_decoded_error(hevc,
11951 hevc->pic_decoded_lcu_idx);
11952#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11953#if 1
11954 if (vdec->slave) {
11955 if (dv_debug & 0x1)
11956 vdec_set_flag(vdec->slave,
11957 VDEC_FLAG_SELF_INPUT_CONTEXT);
11958 else
11959 vdec_set_flag(vdec->slave,
11960 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11961 }
11962#else
11963 if (vdec->slave) {
11964 if (no_interleaved_el_slice)
11965 vdec_set_flag(vdec->slave,
11966 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11967 /* this will move real HW pointer for input */
11968 else
11969 vdec_set_flag(vdec->slave, 0);
11970 /* this will not move real HW pointer
11971 *and SL layer decoding
11972 *will start from same stream position
11973 *as current BL decoder
11974 */
11975 }
11976#endif
11977#endif
11978#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11979 hevc->shift_byte_count_lo
11980 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11981 if (vdec->slave) {
11982 /*cur is base, found enhance*/
11983 struct hevc_state_s *hevc_el =
11984 (struct hevc_state_s *)
11985 vdec->slave->private;
11986 if (hevc_el)
11987 hevc_el->shift_byte_count_lo =
11988 hevc->shift_byte_count_lo;
11989 } else if (vdec->master) {
11990 /*cur is enhance, found base*/
11991 struct hevc_state_s *hevc_ba =
11992 (struct hevc_state_s *)
11993 vdec->master->private;
11994 if (hevc_ba)
11995 hevc_ba->shift_byte_count_lo =
11996 hevc->shift_byte_count_lo;
11997 }
11998#endif
11999 mutex_lock(&hevc->chunks_mutex);
12000 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12001 hevc->chunk = NULL;
12002 mutex_unlock(&hevc->chunks_mutex);
12003 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12004 /*
12005 stream base: stream buf empty or timeout
12006 frame base: vdec_prepare_input fail
12007 */
12008 if (!vdec_has_more_input(vdec)) {
12009 hevc->dec_result = DEC_RESULT_EOS;
12010 vdec_schedule_work(&hevc->work);
12011 return;
12012 }
12013#ifdef AGAIN_HAS_THRESHOLD
12014 hevc->next_again_flag = 1;
12015#endif
12016 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12017 struct PIC_s *pic;
12018 hevc->eos = 1;
12019#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12020 if ((vdec->master || vdec->slave) &&
12021 aux_data_is_avaible(hevc))
12022 dolby_get_meta(hevc);
12023#endif
12024 check_pic_decoded_error(hevc,
12025 hevc->pic_decoded_lcu_idx);
12026 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12027 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12028 "%s: end of stream, last dec poc %d => 0x%pf\n",
12029 __func__, hevc->curr_POC, pic);
12030 flush_output(hevc, pic);
12031
12032 if (hevc->is_used_v4l)
12033 notify_v4l_eos(hw_to_vdec(hevc));
12034#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12035 hevc->shift_byte_count_lo
12036 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12037 if (vdec->slave) {
12038 /*cur is base, found enhance*/
12039 struct hevc_state_s *hevc_el =
12040 (struct hevc_state_s *)
12041 vdec->slave->private;
12042 if (hevc_el)
12043 hevc_el->shift_byte_count_lo =
12044 hevc->shift_byte_count_lo;
12045 } else if (vdec->master) {
12046 /*cur is enhance, found base*/
12047 struct hevc_state_s *hevc_ba =
12048 (struct hevc_state_s *)
12049 vdec->master->private;
12050 if (hevc_ba)
12051 hevc_ba->shift_byte_count_lo =
12052 hevc->shift_byte_count_lo;
12053 }
12054#endif
12055 mutex_lock(&hevc->chunks_mutex);
12056 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12057 hevc->chunk = NULL;
12058 mutex_unlock(&hevc->chunks_mutex);
12059 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12060 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12061 "%s: force exit\n",
12062 __func__);
12063 if (hevc->stat & STAT_VDEC_RUN) {
12064 amhevc_stop();
12065 hevc->stat &= ~STAT_VDEC_RUN;
12066 }
12067 if (hevc->stat & STAT_ISR_REG) {
12068 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12069 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12070 hevc->stat &= ~STAT_ISR_REG;
12071 }
12072 hevc_print(hevc, 0, "%s: force exit end\n",
12073 __func__);
12074 }
12075
12076 if (hevc->stat & STAT_VDEC_RUN) {
12077 amhevc_stop();
12078 hevc->stat &= ~STAT_VDEC_RUN;
12079 }
12080
12081 if (hevc->stat & STAT_TIMER_ARM) {
12082 del_timer_sync(&hevc->timer);
12083 hevc->stat &= ~STAT_TIMER_ARM;
12084 }
12085
12086 wait_hevc_search_done(hevc);
12087#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12088 if (hevc->switch_dvlayer_flag) {
12089 if (vdec->slave)
12090 vdec_set_next_sched(vdec, vdec->slave);
12091 else if (vdec->master)
12092 vdec_set_next_sched(vdec, vdec->master);
12093 } else if (vdec->slave || vdec->master)
12094 vdec_set_next_sched(vdec, vdec);
12095#endif
12096
12097 if (from == 1) {
12098 /* This is a timeout work */
12099 if (work_pending(&hevc->work)) {
12100 /*
12101 * The vh265_work arrives at the last second,
12102 * give it a chance to handle the scenario.
12103 */
12104 return;
12105 //cancel_work_sync(&hevc->work);//reserved for future considraion
12106 }
12107 }
12108
12109 /* mark itself has all HW resource released and input released */
12110 if (vdec->parallel_dec == 1)
12111 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12112 else
12113 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12114
12115 if (hevc->is_used_v4l) {
12116 struct aml_vcodec_ctx *ctx =
12117 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12118
12119 if (ctx->param_sets_from_ucode &&
12120 !hevc->v4l_params_parsed)
12121 vdec_v4l_write_frame_sync(ctx);
12122 }
12123
12124 if (hevc->vdec_cb)
12125 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12126}
12127
12128static void vh265_work(struct work_struct *work)
12129{
12130 struct hevc_state_s *hevc = container_of(work,
12131 struct hevc_state_s, work);
12132 struct vdec_s *vdec = hw_to_vdec(hevc);
12133
12134 vh265_work_implement(hevc, vdec, 0);
12135}
12136
12137static void vh265_timeout_work(struct work_struct *work)
12138{
12139 struct hevc_state_s *hevc = container_of(work,
12140 struct hevc_state_s, timeout_work);
12141 struct vdec_s *vdec = hw_to_vdec(hevc);
12142
12143 if (work_pending(&hevc->work))
12144 return;
12145 vh265_work_implement(hevc, vdec, 1);
12146}
12147
12148
12149static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12150{
12151 /* new to do ... */
12152 vh265_prot_init(hevc);
12153 return 0;
12154}
12155static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12156{
12157 struct hevc_state_s *hevc =
12158 (struct hevc_state_s *)vdec->private;
12159 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12160 CODEC_MM_FLAGS_TVP : 0;
12161 bool ret = 0;
12162 if (step == 0x12)
12163 return 0;
12164 else if (step == 0x11)
12165 step = 0x12;
12166
12167 if (hevc->eos)
12168 return 0;
12169 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12170 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12171 hevc->first_sc_checked =1;
12172 hevc_print(hevc, 0,
12173 "vh265 cached=%d need_size=%d speed= %d ms\n",
12174 size, (hevc->need_cache_size >> PAGE_SHIFT),
12175 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12176 }
12177 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12178 && pre_decode_buf_level != 0) {
12179 u32 rp, wp, level;
12180
12181 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12182 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12183 if (wp < rp)
12184 level = vdec->input.size + wp - rp;
12185 else
12186 level = wp - rp;
12187
12188 if (level < pre_decode_buf_level)
12189 return 0;
12190 }
12191
12192#ifdef AGAIN_HAS_THRESHOLD
12193 if (hevc->next_again_flag &&
12194 (!vdec_frame_based(vdec))) {
12195 u32 parser_wr_ptr =
12196 READ_PARSER_REG(PARSER_VIDEO_WP);
12197 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12198 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12199 again_threshold) {
12200 int r = vdec_sync_input(vdec);
12201 hevc_print(hevc,
12202 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12203 return 0;
12204 }
12205 }
12206#endif
12207
12208 if (disp_vframe_valve_level &&
12209 kfifo_len(&hevc->display_q) >=
12210 disp_vframe_valve_level) {
12211 hevc->valve_count--;
12212 if (hevc->valve_count <= 0)
12213 hevc->valve_count = 2;
12214 else
12215 return 0;
12216 }
12217
12218 ret = is_new_pic_available(hevc);
12219 if (!ret) {
12220 hevc_print(hevc,
12221 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12222 __func__, ret);
12223 }
12224
12225#ifdef CONSTRAIN_MAX_BUF_NUM
12226 if (hevc->pic_list_init_flag == 3) {
12227 if (run_ready_max_vf_only_num > 0 &&
12228 get_vf_ref_only_buf_count(hevc) >=
12229 run_ready_max_vf_only_num
12230 )
12231 ret = 0;
12232 if (run_ready_display_q_num > 0 &&
12233 kfifo_len(&hevc->display_q) >=
12234 run_ready_display_q_num)
12235 ret = 0;
12236
12237 /*avoid more buffers consumed when
12238 switching resolution*/
12239 if (run_ready_max_buf_num == 0xff &&
12240 get_used_buf_count(hevc) >=
12241 get_work_pic_num(hevc))
12242 ret = 0;
12243 else if (run_ready_max_buf_num &&
12244 get_used_buf_count(hevc) >=
12245 run_ready_max_buf_num)
12246 ret = 0;
12247 }
12248#endif
12249
12250 if (hevc->is_used_v4l) {
12251 struct aml_vcodec_ctx *ctx =
12252 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12253
12254 if (ctx->param_sets_from_ucode &&
12255 !ctx->v4l_codec_ready &&
12256 hevc->v4l_params_parsed) {
12257 ret = 0; /*the params has parsed.*/
12258 } else if (!ctx->v4l_codec_dpb_ready) {
12259 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12260 run_ready_min_buf_num)
12261 ret = 0;
12262 }
12263 }
12264
12265 if (ret)
12266 not_run_ready[hevc->index] = 0;
12267 else
12268 not_run_ready[hevc->index]++;
12269 if (vdec->parallel_dec == 1)
12270 return ret ? (CORE_MASK_HEVC) : 0;
12271 else
12272 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12273}
12274
12275static void run(struct vdec_s *vdec, unsigned long mask,
12276 void (*callback)(struct vdec_s *, void *), void *arg)
12277{
12278 struct hevc_state_s *hevc =
12279 (struct hevc_state_s *)vdec->private;
12280 int r, loadr = 0;
12281 unsigned char check_sum = 0;
12282
12283 run_count[hevc->index]++;
12284 hevc->vdec_cb_arg = arg;
12285 hevc->vdec_cb = callback;
12286 hevc->aux_data_dirty = 1;
12287 hevc_reset_core(vdec);
12288
12289#ifdef AGAIN_HAS_THRESHOLD
12290 hevc->pre_parser_wr_ptr =
12291 READ_PARSER_REG(PARSER_VIDEO_WP);
12292 hevc->next_again_flag = 0;
12293#endif
12294 r = vdec_prepare_input(vdec, &hevc->chunk);
12295 if (r < 0) {
12296 input_empty[hevc->index]++;
12297 hevc->dec_result = DEC_RESULT_AGAIN;
12298 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12299 "ammvdec_vh265: Insufficient data\n");
12300
12301 vdec_schedule_work(&hevc->work);
12302 return;
12303 }
12304 input_empty[hevc->index] = 0;
12305 hevc->dec_result = DEC_RESULT_NONE;
12306 if (vdec_frame_based(vdec) &&
12307 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12308 || is_log_enable(hevc)))
12309 check_sum = get_data_check_sum(hevc, r);
12310
12311 if (is_log_enable(hevc))
12312 add_log(hevc,
12313 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12314 __func__, r,
12315 check_sum,
12316 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12317 );
12318 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12319 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12320 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12321 __func__, r,
12322 check_sum,
12323 READ_VREG(HEVC_STREAM_LEVEL),
12324 READ_VREG(HEVC_STREAM_WR_PTR),
12325 READ_VREG(HEVC_STREAM_RD_PTR),
12326 READ_PARSER_REG(PARSER_VIDEO_RP),
12327 READ_PARSER_REG(PARSER_VIDEO_WP),
12328 hevc->start_shift_bytes
12329 );
12330 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12331 input_frame_based(vdec)) {
12332 int jj;
12333 u8 *data = NULL;
12334
12335 if (!hevc->chunk->block->is_mapped)
12336 data = codec_mm_vmap(hevc->chunk->block->start +
12337 hevc->chunk->offset, r);
12338 else
12339 data = ((u8 *)hevc->chunk->block->start_virt)
12340 + hevc->chunk->offset;
12341
12342 for (jj = 0; jj < r; jj++) {
12343 if ((jj & 0xf) == 0)
12344 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12345 "%06x:", jj);
12346 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12347 "%02x ", data[jj]);
12348 if (((jj + 1) & 0xf) == 0)
12349 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12350 "\n");
12351 }
12352
12353 if (!hevc->chunk->block->is_mapped)
12354 codec_mm_unmap_phyaddr(data);
12355 }
12356 if (vdec->mc_loaded) {
12357 /*firmware have load before,
12358 and not changes to another.
12359 ignore reload.
12360 */
12361 if (tee_enabled() && hevc->is_swap &&
12362 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12363 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12364 } else {
12365 if (hevc->mmu_enable)
12366 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12367 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12368 "h265_mmu", hevc->fw->data);
12369 else {
12370 if (!hevc->is_4k) {
12371 /* if an older version of the fw was loaded, */
12372 /* needs try to load noswap fw because the */
12373 /* old fw package dose not contain the swap fw.*/
12374 loadr = amhevc_vdec_loadmc_ex(
12375 VFORMAT_HEVC, vdec,
12376 "hevc_mmu_swap",
12377 hevc->fw->data);
12378 if (loadr < 0)
12379 loadr = amhevc_vdec_loadmc_ex(
12380 VFORMAT_HEVC, vdec,
12381 "h265_mmu",
12382 hevc->fw->data);
12383 else
12384 hevc->is_swap = true;
12385 } else
12386 loadr = amhevc_vdec_loadmc_ex(
12387 VFORMAT_HEVC, vdec,
12388 "h265_mmu", hevc->fw->data);
12389 }
12390 else
12391 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12392 NULL, hevc->fw->data);
12393 if (loadr < 0) {
12394 amhevc_disable();
12395 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12396 tee_enabled() ? "TEE" : "local", loadr);
12397 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12398 vdec_schedule_work(&hevc->work);
12399 return;
12400 }
12401
12402 if (tee_enabled() && hevc->is_swap &&
12403 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12404 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12405#ifdef DETREFILL_ENABLE
12406 if (hevc->is_swap &&
12407 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12408 init_detrefill_buf(hevc);
12409#endif
12410 vdec->mc_loaded = 1;
12411 vdec->mc_type = VFORMAT_HEVC;
12412 }
12413 if (vh265_hw_ctx_restore(hevc) < 0) {
12414 vdec_schedule_work(&hevc->work);
12415 return;
12416 }
12417 vdec_enable_input(vdec);
12418
12419 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12420
12421 if (vdec_frame_based(vdec)) {
12422 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12423 r = hevc->chunk->size +
12424 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12425 hevc->decode_size = r;
12426 }
12427#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12428 else {
12429 if (vdec->master || vdec->slave)
12430 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12431 hevc->shift_byte_count_lo);
12432 }
12433#endif
12434 WRITE_VREG(HEVC_DECODE_SIZE, r);
12435 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12436 hevc->init_flag = 1;
12437
12438 if (hevc->pic_list_init_flag == 3)
12439 init_pic_list_hw(hevc);
12440
12441 backup_decode_state(hevc);
12442
12443 start_process_time(hevc);
12444 mod_timer(&hevc->timer, jiffies);
12445 hevc->stat |= STAT_TIMER_ARM;
12446 hevc->stat |= STAT_ISR_REG;
12447 amhevc_start();
12448 hevc->stat |= STAT_VDEC_RUN;
12449}
12450
12451static void aml_free_canvas(struct vdec_s *vdec)
12452{
12453 int i;
12454 struct hevc_state_s *hevc =
12455 (struct hevc_state_s *)vdec->private;
12456
12457 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12458 struct PIC_s *pic = hevc->m_PIC[i];
12459
12460 if (pic) {
12461 if (vdec->parallel_dec == 1) {
12462 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12463 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12464 }
12465 }
12466 }
12467}
12468
12469static void reset(struct vdec_s *vdec)
12470{
12471 struct hevc_state_s *hevc =
12472 (struct hevc_state_s *)vdec->private;
12473 int i;
12474
12475 cancel_work_sync(&hevc->work);
12476 cancel_work_sync(&hevc->notify_work);
12477 if (hevc->stat & STAT_VDEC_RUN) {
12478 amhevc_stop();
12479 hevc->stat &= ~STAT_VDEC_RUN;
12480 }
12481
12482 if (hevc->stat & STAT_TIMER_ARM) {
12483 del_timer_sync(&hevc->timer);
12484 hevc->stat &= ~STAT_TIMER_ARM;
12485 }
12486 hevc->dec_result = DEC_RESULT_NONE;
12487 reset_process_time(hevc);
12488 hevc->init_flag = 0;
12489 hevc->pic_list_init_flag = 0;
12490 dealloc_mv_bufs(hevc);
12491 aml_free_canvas(vdec);
12492 hevc_local_uninit(hevc);
12493 if (vh265_local_init(hevc) < 0)
12494 pr_debug(" %s local init fail\n", __func__);
12495 for (i = 0; i < BUF_POOL_SIZE; i++) {
12496 hevc->m_BUF[i].start_adr = 0;
12497 }
12498
12499 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12500}
12501
12502static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12503{
12504 struct hevc_state_s *hevc =
12505 (struct hevc_state_s *)vdec->private;
12506
12507 return vh265_isr(0, hevc);
12508}
12509
12510static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12511{
12512 struct hevc_state_s *hevc =
12513 (struct hevc_state_s *)vdec->private;
12514
12515 return vh265_isr_thread_fn(0, hevc);
12516}
12517#endif
12518
12519static int amvdec_h265_probe(struct platform_device *pdev)
12520{
12521#ifdef MULTI_INSTANCE_SUPPORT
12522 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12523#else
12524 struct vdec_dev_reg_s *pdata =
12525 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12526#endif
12527 char *tmpbuf;
12528 int ret;
12529 struct hevc_state_s *hevc;
12530
12531 hevc = vmalloc(sizeof(struct hevc_state_s));
12532 if (hevc == NULL) {
12533 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12534 return -ENOMEM;
12535 }
12536 gHevc = hevc;
12537 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12538 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12539 H265_DEBUG_DIS_SYS_ERROR_PROC));
12540 memset(hevc, 0, sizeof(struct hevc_state_s));
12541 if (get_dbg_flag(hevc))
12542 hevc_print(hevc, 0, "%s\r\n", __func__);
12543 mutex_lock(&vh265_mutex);
12544
12545 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12546 (parser_sei_enable & 0x100) == 0)
12547 parser_sei_enable = 7; /*old 1*/
12548 hevc->m_ins_flag = 0;
12549 hevc->init_flag = 0;
12550 hevc->first_sc_checked = 0;
12551 hevc->uninit_list = 0;
12552 hevc->fatal_error = 0;
12553 hevc->show_frame_num = 0;
12554 hevc->frameinfo_enable = 1;
12555#ifdef MULTI_INSTANCE_SUPPORT
12556 hevc->platform_dev = pdev;
12557 platform_set_drvdata(pdev, pdata);
12558#endif
12559
12560 if (pdata == NULL) {
12561 hevc_print(hevc, 0,
12562 "\namvdec_h265 memory resource undefined.\n");
12563 vfree(hevc);
12564 mutex_unlock(&vh265_mutex);
12565 return -EFAULT;
12566 }
12567 if (mmu_enable_force == 0) {
12568 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12569 || double_write_mode == 0x10)
12570 hevc->mmu_enable = 0;
12571 else
12572 hevc->mmu_enable = 1;
12573 }
12574 if (init_mmu_buffers(hevc)) {
12575 hevc_print(hevc, 0,
12576 "\n 265 mmu init failed!\n");
12577 vfree(hevc);
12578 mutex_unlock(&vh265_mutex);
12579 return -EFAULT;
12580 }
12581
12582 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12583 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12584 if (ret < 0) {
12585 uninit_mmu_buffers(hevc);
12586 vfree(hevc);
12587 mutex_unlock(&vh265_mutex);
12588 return ret;
12589 }
12590 hevc->buf_size = work_buf_size;
12591
12592
12593 if (!vdec_secure(pdata)) {
12594 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12595 if (tmpbuf) {
12596 memset(tmpbuf, 0, work_buf_size);
12597 dma_sync_single_for_device(amports_get_dma_device(),
12598 hevc->buf_start,
12599 work_buf_size, DMA_TO_DEVICE);
12600 } else {
12601 tmpbuf = codec_mm_vmap(hevc->buf_start,
12602 work_buf_size);
12603 if (tmpbuf) {
12604 memset(tmpbuf, 0, work_buf_size);
12605 dma_sync_single_for_device(
12606 amports_get_dma_device(),
12607 hevc->buf_start,
12608 work_buf_size,
12609 DMA_TO_DEVICE);
12610 codec_mm_unmap_phyaddr(tmpbuf);
12611 }
12612 }
12613 }
12614
12615 if (get_dbg_flag(hevc)) {
12616 hevc_print(hevc, 0,
12617 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12618 hevc->buf_start, hevc->buf_size);
12619 }
12620
12621 if (pdata->sys_info)
12622 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12623 else {
12624 hevc->vh265_amstream_dec_info.width = 0;
12625 hevc->vh265_amstream_dec_info.height = 0;
12626 hevc->vh265_amstream_dec_info.rate = 30;
12627 }
12628#ifndef MULTI_INSTANCE_SUPPORT
12629 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12630 workaround_enable |= 3;
12631 hevc_print(hevc, 0,
12632 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12633 } else
12634 workaround_enable &= ~3;
12635#endif
12636 hevc->cma_dev = pdata->cma_dev;
12637 vh265_vdec_info_init();
12638
12639#ifdef MULTI_INSTANCE_SUPPORT
12640 pdata->private = hevc;
12641 pdata->dec_status = vh265_dec_status;
12642 pdata->set_isreset = vh265_set_isreset;
12643 is_reset = 0;
12644 if (vh265_init(pdata) < 0) {
12645#else
12646 if (vh265_init(hevc) < 0) {
12647#endif
12648 hevc_print(hevc, 0,
12649 "\namvdec_h265 init failed.\n");
12650 hevc_local_uninit(hevc);
12651 uninit_mmu_buffers(hevc);
12652 vfree(hevc);
12653 pdata->dec_status = NULL;
12654 mutex_unlock(&vh265_mutex);
12655 return -ENODEV;
12656 }
12657 /*set the max clk for smooth playing...*/
12658 hevc_source_changed(VFORMAT_HEVC,
12659 3840, 2160, 60);
12660 mutex_unlock(&vh265_mutex);
12661
12662 return 0;
12663}
12664
12665static int amvdec_h265_remove(struct platform_device *pdev)
12666{
12667 struct hevc_state_s *hevc = gHevc;
12668
12669 if (get_dbg_flag(hevc))
12670 hevc_print(hevc, 0, "%s\r\n", __func__);
12671
12672 mutex_lock(&vh265_mutex);
12673
12674 vh265_stop(hevc);
12675
12676 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12677
12678
12679#ifdef DEBUG_PTS
12680 hevc_print(hevc, 0,
12681 "pts missed %ld, pts hit %ld, duration %d\n",
12682 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12683#endif
12684
12685 vfree(hevc);
12686 hevc = NULL;
12687 gHevc = NULL;
12688
12689 mutex_unlock(&vh265_mutex);
12690
12691 return 0;
12692}
12693/****************************************/
12694#ifdef CONFIG_PM
12695static int h265_suspend(struct device *dev)
12696{
12697 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12698 return 0;
12699}
12700
12701static int h265_resume(struct device *dev)
12702{
12703 amhevc_resume(to_platform_device(dev));
12704 return 0;
12705}
12706
12707static const struct dev_pm_ops h265_pm_ops = {
12708 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12709};
12710#endif
12711
12712static struct platform_driver amvdec_h265_driver = {
12713 .probe = amvdec_h265_probe,
12714 .remove = amvdec_h265_remove,
12715 .driver = {
12716 .name = DRIVER_NAME,
12717#ifdef CONFIG_PM
12718 .pm = &h265_pm_ops,
12719#endif
12720 }
12721};
12722
12723#ifdef MULTI_INSTANCE_SUPPORT
12724static void vh265_dump_state(struct vdec_s *vdec)
12725{
12726 int i;
12727 struct hevc_state_s *hevc =
12728 (struct hevc_state_s *)vdec->private;
12729 hevc_print(hevc, 0,
12730 "====== %s\n", __func__);
12731
12732 hevc_print(hevc, 0,
12733 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12734 hevc->frame_width,
12735 hevc->frame_height,
12736 hevc->sps_num_reorder_pics_0,
12737 get_work_pic_num(hevc),
12738 hevc->video_signal_type_debug,
12739 hevc->is_swap
12740 );
12741
12742 hevc_print(hevc, 0,
12743 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12744 input_frame_based(vdec),
12745 hevc->eos,
12746 hevc->dec_result,
12747 decode_frame_count[hevc->index],
12748 display_frame_count[hevc->index],
12749 run_count[hevc->index],
12750 not_run_ready[hevc->index],
12751 input_empty[hevc->index]
12752 );
12753
12754 if (vf_get_receiver(vdec->vf_provider_name)) {
12755 enum receviver_start_e state =
12756 vf_notify_receiver(vdec->vf_provider_name,
12757 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12758 NULL);
12759 hevc_print(hevc, 0,
12760 "\nreceiver(%s) state %d\n",
12761 vdec->vf_provider_name,
12762 state);
12763 }
12764
12765 hevc_print(hevc, 0,
12766 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12767 __func__,
12768 kfifo_len(&hevc->newframe_q),
12769 VF_POOL_SIZE,
12770 kfifo_len(&hevc->display_q),
12771 VF_POOL_SIZE,
12772 hevc->vf_pre_count,
12773 hevc->vf_get_count,
12774 hevc->vf_put_count,
12775 hevc->pic_list_init_flag,
12776 is_new_pic_available(hevc)
12777 );
12778
12779 dump_pic_list(hevc);
12780
12781 for (i = 0; i < BUF_POOL_SIZE; i++) {
12782 hevc_print(hevc, 0,
12783 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12784 i,
12785 hevc->m_BUF[i].start_adr,
12786 hevc->m_BUF[i].size,
12787 hevc->m_BUF[i].used_flag);
12788 }
12789
12790 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12791 hevc_print(hevc, 0,
12792 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12793 i,
12794 hevc->m_mv_BUF[i].start_adr,
12795 hevc->m_mv_BUF[i].size,
12796 hevc->m_mv_BUF[i].used_flag);
12797 }
12798
12799 hevc_print(hevc, 0,
12800 "HEVC_DEC_STATUS_REG=0x%x\n",
12801 READ_VREG(HEVC_DEC_STATUS_REG));
12802 hevc_print(hevc, 0,
12803 "HEVC_MPC_E=0x%x\n",
12804 READ_VREG(HEVC_MPC_E));
12805 hevc_print(hevc, 0,
12806 "HEVC_DECODE_MODE=0x%x\n",
12807 READ_VREG(HEVC_DECODE_MODE));
12808 hevc_print(hevc, 0,
12809 "HEVC_DECODE_MODE2=0x%x\n",
12810 READ_VREG(HEVC_DECODE_MODE2));
12811 hevc_print(hevc, 0,
12812 "NAL_SEARCH_CTL=0x%x\n",
12813 READ_VREG(NAL_SEARCH_CTL));
12814 hevc_print(hevc, 0,
12815 "HEVC_PARSER_LCU_START=0x%x\n",
12816 READ_VREG(HEVC_PARSER_LCU_START));
12817 hevc_print(hevc, 0,
12818 "HEVC_DECODE_SIZE=0x%x\n",
12819 READ_VREG(HEVC_DECODE_SIZE));
12820 hevc_print(hevc, 0,
12821 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12822 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12823 hevc_print(hevc, 0,
12824 "HEVC_STREAM_START_ADDR=0x%x\n",
12825 READ_VREG(HEVC_STREAM_START_ADDR));
12826 hevc_print(hevc, 0,
12827 "HEVC_STREAM_END_ADDR=0x%x\n",
12828 READ_VREG(HEVC_STREAM_END_ADDR));
12829 hevc_print(hevc, 0,
12830 "HEVC_STREAM_LEVEL=0x%x\n",
12831 READ_VREG(HEVC_STREAM_LEVEL));
12832 hevc_print(hevc, 0,
12833 "HEVC_STREAM_WR_PTR=0x%x\n",
12834 READ_VREG(HEVC_STREAM_WR_PTR));
12835 hevc_print(hevc, 0,
12836 "HEVC_STREAM_RD_PTR=0x%x\n",
12837 READ_VREG(HEVC_STREAM_RD_PTR));
12838 hevc_print(hevc, 0,
12839 "PARSER_VIDEO_RP=0x%x\n",
12840 READ_PARSER_REG(PARSER_VIDEO_RP));
12841 hevc_print(hevc, 0,
12842 "PARSER_VIDEO_WP=0x%x\n",
12843 READ_PARSER_REG(PARSER_VIDEO_WP));
12844
12845 if (input_frame_based(vdec) &&
12846 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12847 ) {
12848 int jj;
12849 if (hevc->chunk && hevc->chunk->block &&
12850 hevc->chunk->size > 0) {
12851 u8 *data = NULL;
12852 if (!hevc->chunk->block->is_mapped)
12853 data = codec_mm_vmap(hevc->chunk->block->start +
12854 hevc->chunk->offset, hevc->chunk->size);
12855 else
12856 data = ((u8 *)hevc->chunk->block->start_virt)
12857 + hevc->chunk->offset;
12858 hevc_print(hevc, 0,
12859 "frame data size 0x%x\n",
12860 hevc->chunk->size);
12861 for (jj = 0; jj < hevc->chunk->size; jj++) {
12862 if ((jj & 0xf) == 0)
12863 hevc_print(hevc,
12864 PRINT_FRAMEBASE_DATA,
12865 "%06x:", jj);
12866 hevc_print_cont(hevc,
12867 PRINT_FRAMEBASE_DATA,
12868 "%02x ", data[jj]);
12869 if (((jj + 1) & 0xf) == 0)
12870 hevc_print_cont(hevc,
12871 PRINT_FRAMEBASE_DATA,
12872 "\n");
12873 }
12874
12875 if (!hevc->chunk->block->is_mapped)
12876 codec_mm_unmap_phyaddr(data);
12877 }
12878 }
12879
12880}
12881
12882
12883static int ammvdec_h265_probe(struct platform_device *pdev)
12884{
12885
12886 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12887 struct hevc_state_s *hevc = NULL;
12888 int ret;
12889#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12890 int config_val;
12891#endif
12892 if (pdata == NULL) {
12893 pr_info("\nammvdec_h265 memory resource undefined.\n");
12894 return -EFAULT;
12895 }
12896
12897 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12898 sizeof(struct hevc_state_s), GFP_KERNEL); */
12899 hevc = vmalloc(sizeof(struct hevc_state_s));
12900 if (hevc == NULL) {
12901 pr_info("\nammvdec_h265 device data allocation failed\n");
12902 return -ENOMEM;
12903 }
12904 memset(hevc, 0, sizeof(struct hevc_state_s));
12905
12906 /* the ctx from v4l2 driver. */
12907 hevc->v4l2_ctx = pdata->private;
12908
12909 pdata->private = hevc;
12910 pdata->dec_status = vh265_dec_status;
12911 /* pdata->set_trickmode = set_trickmode; */
12912 pdata->run_ready = run_ready;
12913 pdata->run = run;
12914 pdata->reset = reset;
12915 pdata->irq_handler = vh265_irq_cb;
12916 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12917 pdata->dump_state = vh265_dump_state;
12918
12919 hevc->index = pdev->id;
12920 hevc->m_ins_flag = 1;
12921
12922 if (pdata->use_vfm_path) {
12923 snprintf(pdata->vf_provider_name,
12924 VDEC_PROVIDER_NAME_SIZE,
12925 VFM_DEC_PROVIDER_NAME);
12926 hevc->frameinfo_enable = 1;
12927 }
12928#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12929 else if (vdec_dual(pdata)) {
12930 struct hevc_state_s *hevc_pair = NULL;
12931
12932 if (dv_toggle_prov_name) /*debug purpose*/
12933 snprintf(pdata->vf_provider_name,
12934 VDEC_PROVIDER_NAME_SIZE,
12935 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12936 VFM_DEC_DVEL_PROVIDER_NAME);
12937 else
12938 snprintf(pdata->vf_provider_name,
12939 VDEC_PROVIDER_NAME_SIZE,
12940 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12941 VFM_DEC_DVBL_PROVIDER_NAME);
12942 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12943 if (pdata->master)
12944 hevc_pair = (struct hevc_state_s *)
12945 pdata->master->private;
12946 else if (pdata->slave)
12947 hevc_pair = (struct hevc_state_s *)
12948 pdata->slave->private;
12949 if (hevc_pair)
12950 hevc->shift_byte_count_lo =
12951 hevc_pair->shift_byte_count_lo;
12952 }
12953#endif
12954 else
12955 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12956 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12957
12958 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12959 &vh265_vf_provider, pdata);
12960
12961 hevc->provider_name = pdata->vf_provider_name;
12962 platform_set_drvdata(pdev, pdata);
12963
12964 hevc->platform_dev = pdev;
12965
12966 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12967 pdata->config && pdata->config_len) {
12968#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12969 /*use ptr config for doubel_write_mode, etc*/
12970 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12971
12972 if (get_config_int(pdata->config, "hevc_double_write_mode",
12973 &config_val) == 0)
12974 hevc->double_write_mode = config_val;
12975 else
12976 hevc->double_write_mode = double_write_mode;
12977
12978 if (get_config_int(pdata->config, "save_buffer_mode",
12979 &config_val) == 0)
12980 hevc->save_buffer_mode = config_val;
12981 else
12982 hevc->save_buffer_mode = 0;
12983
12984 /*use ptr config for max_pic_w, etc*/
12985 if (get_config_int(pdata->config, "hevc_buf_width",
12986 &config_val) == 0) {
12987 hevc->max_pic_w = config_val;
12988 }
12989 if (get_config_int(pdata->config, "hevc_buf_height",
12990 &config_val) == 0) {
12991 hevc->max_pic_h = config_val;
12992 }
12993
12994 if (get_config_int(pdata->config,
12995 "parm_v4l_codec_enable",
12996 &config_val) == 0)
12997 hevc->is_used_v4l = config_val;
12998
12999 if (get_config_int(pdata->config,
13000 "parm_v4l_buffer_margin",
13001 &config_val) == 0)
13002 hevc->dynamic_buf_num_margin = config_val;
13003
13004 if (get_config_int(pdata->config,
13005 "parm_v4l_canvas_mem_mode",
13006 &config_val) == 0)
13007 hevc->mem_map_mode = config_val;
13008#endif
13009 } else {
13010 if (pdata->sys_info)
13011 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13012 else {
13013 hevc->vh265_amstream_dec_info.width = 0;
13014 hevc->vh265_amstream_dec_info.height = 0;
13015 hevc->vh265_amstream_dec_info.rate = 30;
13016 }
13017 hevc->double_write_mode = double_write_mode;
13018 }
13019 if (!hevc->is_used_v4l) {
13020 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13021 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13022 else
13023 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13024
13025 hevc->mem_map_mode = mem_map_mode;
13026 }
13027
13028 if (mmu_enable_force == 0) {
13029 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13030 hevc->mmu_enable = 0;
13031 else
13032 hevc->mmu_enable = 1;
13033 }
13034
13035 if (init_mmu_buffers(hevc) < 0) {
13036 hevc_print(hevc, 0,
13037 "\n 265 mmu init failed!\n");
13038 mutex_unlock(&vh265_mutex);
13039 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13040 if (hevc)
13041 vfree((void *)hevc);
13042 pdata->dec_status = NULL;
13043 return -EFAULT;
13044 }
13045#if 0
13046 hevc->buf_start = pdata->mem_start;
13047 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13048#else
13049
13050 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13051 BMMU_WORKSPACE_ID, work_buf_size,
13052 DRIVER_NAME, &hevc->buf_start);
13053 if (ret < 0) {
13054 uninit_mmu_buffers(hevc);
13055 /* devm_kfree(&pdev->dev, (void *)hevc); */
13056 if (hevc)
13057 vfree((void *)hevc);
13058 pdata->dec_status = NULL;
13059 mutex_unlock(&vh265_mutex);
13060 return ret;
13061 }
13062 hevc->buf_size = work_buf_size;
13063#endif
13064 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13065 (parser_sei_enable & 0x100) == 0)
13066 parser_sei_enable = 7;
13067 hevc->init_flag = 0;
13068 hevc->first_sc_checked = 0;
13069 hevc->uninit_list = 0;
13070 hevc->fatal_error = 0;
13071 hevc->show_frame_num = 0;
13072
13073 /*
13074 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13075 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13076 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13077 */
13078 if (get_dbg_flag(hevc)) {
13079 hevc_print(hevc, 0,
13080 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13081 hevc->buf_start, hevc->buf_size);
13082 }
13083
13084 hevc_print(hevc, 0,
13085 "dynamic_buf_num_margin=%d\n",
13086 hevc->dynamic_buf_num_margin);
13087 hevc_print(hevc, 0,
13088 "double_write_mode=%d\n",
13089 hevc->double_write_mode);
13090
13091 hevc->cma_dev = pdata->cma_dev;
13092
13093 if (vh265_init(pdata) < 0) {
13094 hevc_print(hevc, 0,
13095 "\namvdec_h265 init failed.\n");
13096 hevc_local_uninit(hevc);
13097 uninit_mmu_buffers(hevc);
13098 /* devm_kfree(&pdev->dev, (void *)hevc); */
13099 if (hevc)
13100 vfree((void *)hevc);
13101 pdata->dec_status = NULL;
13102 return -ENODEV;
13103 }
13104
13105 vdec_set_prepare_level(pdata, start_decode_buf_level);
13106
13107 /*set the max clk for smooth playing...*/
13108 hevc_source_changed(VFORMAT_HEVC,
13109 3840, 2160, 60);
13110 if (pdata->parallel_dec == 1)
13111 vdec_core_request(pdata, CORE_MASK_HEVC);
13112 else
13113 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13114 | CORE_MASK_COMBINE);
13115
13116 return 0;
13117}
13118
13119static int ammvdec_h265_remove(struct platform_device *pdev)
13120{
13121 struct hevc_state_s *hevc =
13122 (struct hevc_state_s *)
13123 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13124 struct vdec_s *vdec = hw_to_vdec(hevc);
13125
13126 if (hevc == NULL)
13127 return 0;
13128
13129 if (get_dbg_flag(hevc))
13130 hevc_print(hevc, 0, "%s\r\n", __func__);
13131
13132 vmh265_stop(hevc);
13133
13134 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13135 if (vdec->parallel_dec == 1)
13136 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13137 else
13138 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13139
13140 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13141
13142 vfree((void *)hevc);
13143 return 0;
13144}
13145
13146static struct platform_driver ammvdec_h265_driver = {
13147 .probe = ammvdec_h265_probe,
13148 .remove = ammvdec_h265_remove,
13149 .driver = {
13150 .name = MULTI_DRIVER_NAME,
13151#ifdef CONFIG_PM
13152 .pm = &h265_pm_ops,
13153#endif
13154 }
13155};
13156#endif
13157
13158static struct codec_profile_t amvdec_h265_profile = {
13159 .name = "hevc",
13160 .profile = ""
13161};
13162
13163static struct codec_profile_t amvdec_h265_profile_single,
13164 amvdec_h265_profile_mult;
13165
13166static struct mconfig h265_configs[] = {
13167 MC_PU32("use_cma", &use_cma),
13168 MC_PU32("bit_depth_luma", &bit_depth_luma),
13169 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13170 MC_PU32("video_signal_type", &video_signal_type),
13171#ifdef ERROR_HANDLE_DEBUG
13172 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13173 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13174#endif
13175 MC_PU32("radr", &radr),
13176 MC_PU32("rval", &rval),
13177 MC_PU32("dbg_cmd", &dbg_cmd),
13178 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13179 MC_PU32("endian", &endian),
13180 MC_PU32("step", &step),
13181 MC_PU32("udebug_flag", &udebug_flag),
13182 MC_PU32("decode_pic_begin", &decode_pic_begin),
13183 MC_PU32("slice_parse_begin", &slice_parse_begin),
13184 MC_PU32("nal_skip_policy", &nal_skip_policy),
13185 MC_PU32("i_only_flag", &i_only_flag),
13186 MC_PU32("error_handle_policy", &error_handle_policy),
13187 MC_PU32("error_handle_threshold", &error_handle_threshold),
13188 MC_PU32("error_handle_nal_skip_threshold",
13189 &error_handle_nal_skip_threshold),
13190 MC_PU32("error_handle_system_threshold",
13191 &error_handle_system_threshold),
13192 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13193 MC_PU32("debug", &debug),
13194 MC_PU32("debug_mask", &debug_mask),
13195 MC_PU32("buffer_mode", &buffer_mode),
13196 MC_PU32("double_write_mode", &double_write_mode),
13197 MC_PU32("buf_alloc_width", &buf_alloc_width),
13198 MC_PU32("buf_alloc_height", &buf_alloc_height),
13199 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13200 MC_PU32("max_buf_num", &max_buf_num),
13201 MC_PU32("buf_alloc_size", &buf_alloc_size),
13202 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13203 MC_PU32("mem_map_mode", &mem_map_mode),
13204 MC_PU32("enable_mem_saving", &enable_mem_saving),
13205 MC_PU32("force_w_h", &force_w_h),
13206 MC_PU32("force_fps", &force_fps),
13207 MC_PU32("max_decoding_time", &max_decoding_time),
13208 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13209 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13210 MC_PU32("interlace_enable", &interlace_enable),
13211 MC_PU32("pts_unstable", &pts_unstable),
13212 MC_PU32("parser_sei_enable", &parser_sei_enable),
13213 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13214 MC_PU32("decode_timeout_val", &decode_timeout_val),
13215#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13216 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13217 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13218 MC_PU32("dv_debug", &dv_debug),
13219#endif
13220};
13221static struct mconfig_node decoder_265_node;
13222
13223static int __init amvdec_h265_driver_init_module(void)
13224{
13225 struct BuffInfo_s *p_buf_info;
13226
13227 if (vdec_is_support_4k()) {
13228 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13229 p_buf_info = &amvh265_workbuff_spec[2];
13230 else
13231 p_buf_info = &amvh265_workbuff_spec[1];
13232 } else
13233 p_buf_info = &amvh265_workbuff_spec[0];
13234
13235 init_buff_spec(NULL, p_buf_info);
13236 work_buf_size =
13237 (p_buf_info->end_adr - p_buf_info->start_adr
13238 + 0xffff) & (~0xffff);
13239
13240 pr_debug("amvdec_h265 module init\n");
13241 error_handle_policy = 0;
13242
13243#ifdef ERROR_HANDLE_DEBUG
13244 dbg_nal_skip_flag = 0;
13245 dbg_nal_skip_count = 0;
13246#endif
13247 udebug_flag = 0;
13248 decode_pic_begin = 0;
13249 slice_parse_begin = 0;
13250 step = 0;
13251 buf_alloc_size = 0;
13252
13253#ifdef MULTI_INSTANCE_SUPPORT
13254 if (platform_driver_register(&ammvdec_h265_driver))
13255 pr_err("failed to register ammvdec_h265 driver\n");
13256
13257#endif
13258 if (platform_driver_register(&amvdec_h265_driver)) {
13259 pr_err("failed to register amvdec_h265 driver\n");
13260 return -ENODEV;
13261 }
13262#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13263 if (!has_hevc_vdec()) {
13264 /* not support hevc */
13265 amvdec_h265_profile.name = "hevc_unsupport";
13266 }
13267 if (vdec_is_support_4k()) {
13268 if (is_meson_m8m2_cpu()) {
13269 /* m8m2 support 4k */
13270 amvdec_h265_profile.profile = "4k";
13271 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13272 amvdec_h265_profile.profile =
13273 "8k, 8bit, 10bit, dwrite, compressed";
13274 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13275 amvdec_h265_profile.profile =
13276 "4k, 8bit, 10bit, dwrite, compressed";
13277 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13278 amvdec_h265_profile.profile = "4k";
13279 }
13280#endif
13281 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13282 pr_info("amvdec_h265 default mmu enabled.\n");
13283 mmu_enable = 1;
13284 }
13285
13286 vcodec_profile_register(&amvdec_h265_profile);
13287 amvdec_h265_profile_single = amvdec_h265_profile;
13288 amvdec_h265_profile_single.name = "h265";
13289 vcodec_profile_register(&amvdec_h265_profile_single);
13290 amvdec_h265_profile_mult = amvdec_h265_profile;
13291 amvdec_h265_profile_mult.name = "mh265";
13292 vcodec_profile_register(&amvdec_h265_profile_mult);
13293 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13294 "h265", h265_configs, CONFIG_FOR_RW);
13295 return 0;
13296}
13297
13298static void __exit amvdec_h265_driver_remove_module(void)
13299{
13300 pr_debug("amvdec_h265 module remove.\n");
13301
13302#ifdef MULTI_INSTANCE_SUPPORT
13303 platform_driver_unregister(&ammvdec_h265_driver);
13304#endif
13305 platform_driver_unregister(&amvdec_h265_driver);
13306}
13307
13308/****************************************/
13309/*
13310 *module_param(stat, uint, 0664);
13311 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13312 */
13313module_param(use_cma, uint, 0664);
13314MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13315
13316module_param(bit_depth_luma, uint, 0664);
13317MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13318
13319module_param(bit_depth_chroma, uint, 0664);
13320MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13321
13322module_param(video_signal_type, uint, 0664);
13323MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13324
13325#ifdef ERROR_HANDLE_DEBUG
13326module_param(dbg_nal_skip_flag, uint, 0664);
13327MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13328
13329module_param(dbg_nal_skip_count, uint, 0664);
13330MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13331#endif
13332
13333module_param(radr, uint, 0664);
13334MODULE_PARM_DESC(radr, "\n radr\n");
13335
13336module_param(rval, uint, 0664);
13337MODULE_PARM_DESC(rval, "\n rval\n");
13338
13339module_param(dbg_cmd, uint, 0664);
13340MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13341
13342module_param(dump_nal, uint, 0664);
13343MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13344
13345module_param(dbg_skip_decode_index, uint, 0664);
13346MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13347
13348module_param(endian, uint, 0664);
13349MODULE_PARM_DESC(endian, "\n rval\n");
13350
13351module_param(step, uint, 0664);
13352MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13353
13354module_param(decode_pic_begin, uint, 0664);
13355MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13356
13357module_param(slice_parse_begin, uint, 0664);
13358MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13359
13360module_param(nal_skip_policy, uint, 0664);
13361MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13362
13363module_param(i_only_flag, uint, 0664);
13364MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13365
13366module_param(fast_output_enable, uint, 0664);
13367MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13368
13369module_param(error_handle_policy, uint, 0664);
13370MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13371
13372module_param(error_handle_threshold, uint, 0664);
13373MODULE_PARM_DESC(error_handle_threshold,
13374 "\n amvdec_h265 error_handle_threshold\n");
13375
13376module_param(error_handle_nal_skip_threshold, uint, 0664);
13377MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13378 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13379
13380module_param(error_handle_system_threshold, uint, 0664);
13381MODULE_PARM_DESC(error_handle_system_threshold,
13382 "\n amvdec_h265 error_handle_system_threshold\n");
13383
13384module_param(error_skip_nal_count, uint, 0664);
13385MODULE_PARM_DESC(error_skip_nal_count,
13386 "\n amvdec_h265 error_skip_nal_count\n");
13387
13388module_param(debug, uint, 0664);
13389MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13390
13391module_param(debug_mask, uint, 0664);
13392MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13393
13394module_param(log_mask, uint, 0664);
13395MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13396
13397module_param(buffer_mode, uint, 0664);
13398MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13399
13400module_param(double_write_mode, uint, 0664);
13401MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13402
13403module_param(buf_alloc_width, uint, 0664);
13404MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13405
13406module_param(buf_alloc_height, uint, 0664);
13407MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13408
13409module_param(dynamic_buf_num_margin, uint, 0664);
13410MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13411
13412module_param(max_buf_num, uint, 0664);
13413MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13414
13415module_param(buf_alloc_size, uint, 0664);
13416MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13417
13418#ifdef CONSTRAIN_MAX_BUF_NUM
13419module_param(run_ready_max_vf_only_num, uint, 0664);
13420MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13421
13422module_param(run_ready_display_q_num, uint, 0664);
13423MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13424
13425module_param(run_ready_max_buf_num, uint, 0664);
13426MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13427#endif
13428
13429#if 0
13430module_param(re_config_pic_flag, uint, 0664);
13431MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13432#endif
13433
13434module_param(buffer_mode_dbg, uint, 0664);
13435MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13436
13437module_param(mem_map_mode, uint, 0664);
13438MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13439
13440module_param(enable_mem_saving, uint, 0664);
13441MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13442
13443module_param(force_w_h, uint, 0664);
13444MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13445
13446module_param(force_fps, uint, 0664);
13447MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13448
13449module_param(max_decoding_time, uint, 0664);
13450MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13451
13452module_param(prefix_aux_buf_size, uint, 0664);
13453MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13454
13455module_param(suffix_aux_buf_size, uint, 0664);
13456MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13457
13458module_param(interlace_enable, uint, 0664);
13459MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13460module_param(pts_unstable, uint, 0664);
13461MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13462module_param(parser_sei_enable, uint, 0664);
13463MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13464
13465#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13466module_param(parser_dolby_vision_enable, uint, 0664);
13467MODULE_PARM_DESC(parser_dolby_vision_enable,
13468 "\n parser_dolby_vision_enable\n");
13469
13470module_param(dolby_meta_with_el, uint, 0664);
13471MODULE_PARM_DESC(dolby_meta_with_el,
13472 "\n dolby_meta_with_el\n");
13473
13474module_param(dolby_el_flush_th, uint, 0664);
13475MODULE_PARM_DESC(dolby_el_flush_th,
13476 "\n dolby_el_flush_th\n");
13477#endif
13478module_param(mmu_enable, uint, 0664);
13479MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13480
13481module_param(mmu_enable_force, uint, 0664);
13482MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13483
13484#ifdef MULTI_INSTANCE_SUPPORT
13485module_param(start_decode_buf_level, int, 0664);
13486MODULE_PARM_DESC(start_decode_buf_level,
13487 "\n h265 start_decode_buf_level\n");
13488
13489module_param(decode_timeout_val, uint, 0664);
13490MODULE_PARM_DESC(decode_timeout_val,
13491 "\n h265 decode_timeout_val\n");
13492
13493module_param(data_resend_policy, uint, 0664);
13494MODULE_PARM_DESC(data_resend_policy,
13495 "\n h265 data_resend_policy\n");
13496
13497module_param_array(decode_frame_count, uint,
13498 &max_decode_instance_num, 0664);
13499
13500module_param_array(display_frame_count, uint,
13501 &max_decode_instance_num, 0664);
13502
13503module_param_array(max_process_time, uint,
13504 &max_decode_instance_num, 0664);
13505
13506module_param_array(max_get_frame_interval,
13507 uint, &max_decode_instance_num, 0664);
13508
13509module_param_array(run_count, uint,
13510 &max_decode_instance_num, 0664);
13511
13512module_param_array(input_empty, uint,
13513 &max_decode_instance_num, 0664);
13514
13515module_param_array(not_run_ready, uint,
13516 &max_decode_instance_num, 0664);
13517
13518module_param_array(ref_frame_mark_flag, uint,
13519 &max_decode_instance_num, 0664);
13520
13521#endif
13522#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13523module_param(dv_toggle_prov_name, uint, 0664);
13524MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13525
13526module_param(dv_debug, uint, 0664);
13527MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13528
13529module_param(force_bypass_dvenl, uint, 0664);
13530MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13531#endif
13532
13533#ifdef AGAIN_HAS_THRESHOLD
13534module_param(again_threshold, uint, 0664);
13535MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13536#endif
13537
13538module_param(force_disp_pic_index, int, 0664);
13539MODULE_PARM_DESC(force_disp_pic_index,
13540 "\n amvdec_h265 force_disp_pic_index\n");
13541
13542module_param(frmbase_cont_bitlevel, uint, 0664);
13543MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13544
13545module_param(udebug_flag, uint, 0664);
13546MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13547
13548module_param(udebug_pause_pos, uint, 0664);
13549MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13550
13551module_param(udebug_pause_val, uint, 0664);
13552MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13553
13554module_param(pre_decode_buf_level, int, 0664);
13555MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13556
13557module_param(udebug_pause_decode_idx, uint, 0664);
13558MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13559
13560module_param(disp_vframe_valve_level, uint, 0664);
13561MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13562
13563module_param(pic_list_debug, uint, 0664);
13564MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13565
13566module_param(without_display_mode, uint, 0664);
13567MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13568
13569module_init(amvdec_h265_driver_init_module);
13570module_exit(amvdec_h265_driver_remove_module);
13571
13572MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13573MODULE_LICENSE("GPL");
13574MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13575