summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: e74aa4ffc9709ed0f4468d7e63a5d68fae05c95a
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../../../amvdec_ports/vdec_drv_base.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131
132static struct semaphore h265_sema;
133
134struct hevc_state_s;
135static int hevc_print(struct hevc_state_s *hevc,
136 int debug_flag, const char *fmt, ...);
137static int hevc_print_cont(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int vh265_vf_states(struct vframe_states *states, void *);
140static struct vframe_s *vh265_vf_peek(void *);
141static struct vframe_s *vh265_vf_get(void *);
142static void vh265_vf_put(struct vframe_s *, void *);
143static int vh265_event_cb(int type, void *data, void *private_data);
144
145static int vh265_stop(struct hevc_state_s *hevc);
146#ifdef MULTI_INSTANCE_SUPPORT
147static int vmh265_stop(struct hevc_state_s *hevc);
148static s32 vh265_init(struct vdec_s *vdec);
149static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
150static void reset_process_time(struct hevc_state_s *hevc);
151static void start_process_time(struct hevc_state_s *hevc);
152static void restart_process_time(struct hevc_state_s *hevc);
153static void timeout_process(struct hevc_state_s *hevc);
154#else
155static s32 vh265_init(struct hevc_state_s *hevc);
156#endif
157static void vh265_prot_init(struct hevc_state_s *hevc);
158static int vh265_local_init(struct hevc_state_s *hevc);
159static void vh265_check_timer_func(unsigned long arg);
160static void config_decode_mode(struct hevc_state_s *hevc);
161
162static const char vh265_dec_id[] = "vh265-dev";
163
164#define PROVIDER_NAME "decoder.h265"
165#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
166
167static const struct vframe_operations_s vh265_vf_provider = {
168 .peek = vh265_vf_peek,
169 .get = vh265_vf_get,
170 .put = vh265_vf_put,
171 .event_cb = vh265_event_cb,
172 .vf_states = vh265_vf_states,
173};
174
175static struct vframe_provider_s vh265_vf_prov;
176
177static u32 bit_depth_luma;
178static u32 bit_depth_chroma;
179static u32 video_signal_type;
180
181static int start_decode_buf_level = 0x8000;
182
183static unsigned int decode_timeout_val = 200;
184
185/*data_resend_policy:
186 bit 0, stream base resend data when decoding buf empty
187*/
188static u32 data_resend_policy = 1;
189
190#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
191/*
192static const char * const video_format_names[] = {
193 "component", "PAL", "NTSC", "SECAM",
194 "MAC", "unspecified", "unspecified", "unspecified"
195};
196
197static const char * const color_primaries_names[] = {
198 "unknown", "bt709", "undef", "unknown",
199 "bt470m", "bt470bg", "smpte170m", "smpte240m",
200 "film", "bt2020"
201};
202
203static const char * const transfer_characteristics_names[] = {
204 "unknown", "bt709", "undef", "unknown",
205 "bt470m", "bt470bg", "smpte170m", "smpte240m",
206 "linear", "log100", "log316", "iec61966-2-4",
207 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
208 "smpte-st-2084", "smpte-st-428"
209};
210
211static const char * const matrix_coeffs_names[] = {
212 "GBR", "bt709", "undef", "unknown",
213 "fcc", "bt470bg", "smpte170m", "smpte240m",
214 "YCgCo", "bt2020nc", "bt2020c"
215};
216*/
217#ifdef SUPPORT_10BIT
218#define HEVC_CM_BODY_START_ADDR 0x3626
219#define HEVC_CM_BODY_LENGTH 0x3627
220#define HEVC_CM_HEADER_LENGTH 0x3629
221#define HEVC_CM_HEADER_OFFSET 0x362b
222#define HEVC_SAO_CTRL9 0x362d
223#define LOSLESS_COMPRESS_MODE
224/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
225/* double_write_mode:
226 * 0, no double write;
227 * 1, 1:1 ratio;
228 * 2, (1/4):(1/4) ratio;
229 * 3, (1/4):(1/4) ratio, with both compressed frame included
230 * 4, (1/2):(1/2) ratio;
231 * 0x10, double write only
232 * 0x100, if > 1080p,use mode 4,else use mode 1;
233 * 0x200, if > 1080p,use mode 2,else use mode 1;
234 * 0x300, if > 720p, use mode 4, else use mode 1;
235 */
236static u32 double_write_mode;
237
238/*#define DECOMP_HEADR_SURGENT*/
239
240static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
241static u32 enable_mem_saving = 1;
242static u32 workaround_enable;
243static u32 force_w_h;
244#endif
245static u32 force_fps;
246static u32 pts_unstable;
247#define H265_DEBUG_BUFMGR 0x01
248#define H265_DEBUG_BUFMGR_MORE 0x02
249#define H265_DEBUG_DETAIL 0x04
250#define H265_DEBUG_REG 0x08
251#define H265_DEBUG_MAN_SEARCH_NAL 0x10
252#define H265_DEBUG_MAN_SKIP_NAL 0x20
253#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
254#define H265_DEBUG_FORCE_CLK 0x80
255#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
256#define H265_DEBUG_NO_DISPLAY 0x200
257#define H265_DEBUG_DISCARD_NAL 0x400
258#define H265_DEBUG_OUT_PTS 0x800
259#define H265_DEBUG_DUMP_PIC_LIST 0x1000
260#define H265_DEBUG_PRINT_SEI 0x2000
261#define H265_DEBUG_PIC_STRUCT 0x4000
262#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
263#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
264#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
265#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
266#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
267#define H265_DEBUG_HW_RESET 0x100000
268#define H265_CFG_CANVAS_IN_DECODE 0x200000
269#define H265_DEBUG_DV 0x400000
270#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
271#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
272#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
273#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
274#ifdef MULTI_INSTANCE_SUPPORT
275#define PRINT_FLAG_ERROR 0x0
276#define IGNORE_PARAM_FROM_CONFIG 0x08000000
277#define PRINT_FRAMEBASE_DATA 0x10000000
278#define PRINT_FLAG_VDEC_STATUS 0x20000000
279#define PRINT_FLAG_VDEC_DETAIL 0x40000000
280#define PRINT_FLAG_V4L_DETAIL 0x80000000
281#endif
282
283#define BUF_POOL_SIZE 32
284#define MAX_BUF_NUM 24
285#define MAX_REF_PIC_NUM 24
286#define MAX_REF_ACTIVE 16
287
288#ifdef MV_USE_FIXED_BUF
289#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
290#define VF_BUFFER_IDX(n) (n)
291#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
292#else
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
297#endif
298
299#define HEVC_MV_INFO 0x310d
300#define HEVC_QP_INFO 0x3137
301#define HEVC_SKIP_INFO 0x3136
302
303const u32 h265_version = 201602101;
304static u32 debug_mask = 0xffffffff;
305static u32 log_mask;
306static u32 debug;
307static u32 radr;
308static u32 rval;
309static u32 dbg_cmd;
310static u32 dump_nal;
311static u32 dbg_skip_decode_index;
312static u32 endian = 0xff0;
313#ifdef ERROR_HANDLE_DEBUG
314static u32 dbg_nal_skip_flag;
315 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
316static u32 dbg_nal_skip_count;
317#endif
318/*for debug*/
319/*
320 udebug_flag:
321 bit 0, enable ucode print
322 bit 1, enable ucode detail print
323 bit [31:16] not 0, pos to dump lmem
324 bit 2, pop bits to lmem
325 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
326*/
327static u32 udebug_flag;
328/*
329 when udebug_flag[1:0] is not 0
330 udebug_pause_pos not 0,
331 pause position
332*/
333static u32 udebug_pause_pos;
334/*
335 when udebug_flag[1:0] is not 0
336 and udebug_pause_pos is not 0,
337 pause only when DEBUG_REG2 is equal to this val
338*/
339static u32 udebug_pause_val;
340
341static u32 udebug_pause_decode_idx;
342
343static u32 decode_pic_begin;
344static uint slice_parse_begin;
345static u32 step;
346static bool is_reset;
347
348#ifdef CONSTRAIN_MAX_BUF_NUM
349static u32 run_ready_max_vf_only_num;
350static u32 run_ready_display_q_num;
351 /*0: not check
352 0xff: work_pic_num
353 */
354static u32 run_ready_max_buf_num = 0xff;
355#endif
356
357static u32 dynamic_buf_num_margin = 7;
358static u32 buf_alloc_width;
359static u32 buf_alloc_height;
360
361static u32 max_buf_num = 16;
362static u32 buf_alloc_size;
363/*static u32 re_config_pic_flag;*/
364/*
365 *bit[0]: 0,
366 *bit[1]: 0, always release cma buffer when stop
367 *bit[1]: 1, never release cma buffer when stop
368 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
369 *do not release cma buffer is blackout is not 1
370 *
371 *bit[2]: 0, when start decoding, check current displayed buffer
372 * (only for buffer decoded by h265) if blackout is 0
373 * 1, do not check current displayed buffer
374 *
375 *bit[3]: 1, if blackout is not 1, do not release current
376 * displayed cma buffer always.
377 */
378/* set to 1 for fast play;
379 * set to 8 for other case of "keep last frame"
380 */
381static u32 buffer_mode = 1;
382
383/* buffer_mode_dbg: debug only*/
384static u32 buffer_mode_dbg = 0xffff0000;
385/**/
386/*
387 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
388 *1, start decoding after first I;
389 *2, only decode and display none error picture;
390 *3, start decoding and display after IDR,etc
391 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
392 *only for mode 0 and 1.
393 */
394static u32 nal_skip_policy = 2;
395
396/*
397 *bit 0, 1: only display I picture;
398 *bit 1, 1: only decode I picture;
399 */
400static u32 i_only_flag;
401
402/*
403bit 0, fast output first I picture
404*/
405static u32 fast_output_enable = 1;
406
407static u32 frmbase_cont_bitlevel = 0x60;
408
409/*
410use_cma: 1, use both reserver memory and cma for buffers
4112, only use cma for buffers
412*/
413static u32 use_cma = 2;
414
415#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
416static u32 prefix_aux_buf_size = (16 * 1024);
417static u32 suffix_aux_buf_size;
418
419static u32 max_decoding_time;
420/*
421 *error handling
422 */
423/*error_handle_policy:
424 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
425 *1, skip error_skip_nal_count nals before error recovery;
426 *bit 1 (valid only when bit0 == 1):
427 *1, wait vps/sps/pps after error recovery;
428 *bit 2 (valid only when bit0 == 0):
429 *0, auto search after error recovery (hevc_recover() called);
430 *1, manual search after error recovery
431 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
432 *
433 *bit 4: 0, set error_mark after reset/recover
434 * 1, do not set error_mark after reset/recover
435 *bit 5: 0, check total lcu for every picture
436 * 1, do not check total lcu
437 *bit 6: 0, do not check head error
438 * 1, check head error
439 *
440 */
441
442static u32 error_handle_policy;
443static u32 error_skip_nal_count = 6;
444static u32 error_handle_threshold = 30;
445static u32 error_handle_nal_skip_threshold = 10;
446static u32 error_handle_system_threshold = 30;
447static u32 interlace_enable = 1;
448static u32 fr_hint_status;
449
450 /*
451 *parser_sei_enable:
452 * bit 0, sei;
453 * bit 1, sei_suffix (fill aux buf)
454 * bit 2, fill sei to aux buf (when bit 0 is 1)
455 * bit 8, debug flag
456 */
457static u32 parser_sei_enable;
458#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
459static u32 parser_dolby_vision_enable = 1;
460static u32 dolby_meta_with_el;
461static u32 dolby_el_flush_th = 2;
462#endif
463/* this is only for h265 mmu enable */
464
465static u32 mmu_enable = 1;
466static u32 mmu_enable_force;
467static u32 work_buf_size;
468static unsigned int force_disp_pic_index;
469static unsigned int disp_vframe_valve_level;
470static int pre_decode_buf_level = 0x1000;
471static unsigned int pic_list_debug;
472
473
474#ifdef MULTI_INSTANCE_SUPPORT
475static unsigned int max_decode_instance_num
476 = MAX_DECODE_INSTANCE_NUM;
477static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
478static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
481static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
483static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
484static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
485{1, 1, 1, 1, 1, 1, 1, 1, 1};
486
487#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
488static unsigned char get_idx(struct hevc_state_s *hevc);
489#endif
490
491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
492static u32 dv_toggle_prov_name;
493
494static u32 dv_debug;
495
496static u32 force_bypass_dvenl;
497#endif
498#endif
499
500
501#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
502#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
503#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
504#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
505#else
506#define get_dbg_flag(hevc) debug
507#define get_dbg_flag2(hevc) debug
508#define is_log_enable(hevc) (log_mask ? 1 : 0)
509#define get_valid_double_write_mode(hevc) double_write_mode
510#define get_buf_alloc_width(hevc) buf_alloc_width
511#define get_buf_alloc_height(hevc) buf_alloc_height
512#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
513#endif
514#define get_buffer_mode(hevc) buffer_mode
515
516
517DEFINE_SPINLOCK(lock);
518struct task_struct *h265_task = NULL;
519#undef DEBUG_REG
520#ifdef DEBUG_REG
521void WRITE_VREG_DBG(unsigned adr, unsigned val)
522{
523 if (debug & H265_DEBUG_REG)
524 pr_info("%s(%x, %x)\n", __func__, adr, val);
525 WRITE_VREG(adr, val);
526}
527
528#undef WRITE_VREG
529#define WRITE_VREG WRITE_VREG_DBG
530#endif
531
532static DEFINE_MUTEX(vh265_mutex);
533
534static DEFINE_MUTEX(vh265_log_mutex);
535
536static struct vdec_info *gvs;
537
538static u32 without_display_mode;
539
540/**************************************************
541 *
542 *h265 buffer management include
543 *
544 ***************************************************
545 */
546enum NalUnitType {
547 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
548 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
549
550 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
551 /* Current name in the spec: TSA_R */
552 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
553
554 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
555 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
556
557 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
558 /* Current name in the spec: RADL_R */
559 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
560
561 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
562 /* Current name in the spec: RASL_R */
563 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
564
565 NAL_UNIT_RESERVED_10,
566 NAL_UNIT_RESERVED_11,
567 NAL_UNIT_RESERVED_12,
568 NAL_UNIT_RESERVED_13,
569 NAL_UNIT_RESERVED_14,
570 NAL_UNIT_RESERVED_15,
571
572 /* Current name in the spec: BLA_W_LP */
573 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
574 /* Current name in the spec: BLA_W_DLP */
575 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
576 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
577 /* Current name in the spec: IDR_W_DLP */
578 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
579 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
580 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
581 NAL_UNIT_RESERVED_22,
582 NAL_UNIT_RESERVED_23,
583
584 NAL_UNIT_RESERVED_24,
585 NAL_UNIT_RESERVED_25,
586 NAL_UNIT_RESERVED_26,
587 NAL_UNIT_RESERVED_27,
588 NAL_UNIT_RESERVED_28,
589 NAL_UNIT_RESERVED_29,
590 NAL_UNIT_RESERVED_30,
591 NAL_UNIT_RESERVED_31,
592
593 NAL_UNIT_VPS, /* 32 */
594 NAL_UNIT_SPS, /* 33 */
595 NAL_UNIT_PPS, /* 34 */
596 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
597 NAL_UNIT_EOS, /* 36 */
598 NAL_UNIT_EOB, /* 37 */
599 NAL_UNIT_FILLER_DATA, /* 38 */
600 NAL_UNIT_SEI, /* 39 Prefix SEI */
601 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
602 NAL_UNIT_RESERVED_41,
603 NAL_UNIT_RESERVED_42,
604 NAL_UNIT_RESERVED_43,
605 NAL_UNIT_RESERVED_44,
606 NAL_UNIT_RESERVED_45,
607 NAL_UNIT_RESERVED_46,
608 NAL_UNIT_RESERVED_47,
609 NAL_UNIT_UNSPECIFIED_48,
610 NAL_UNIT_UNSPECIFIED_49,
611 NAL_UNIT_UNSPECIFIED_50,
612 NAL_UNIT_UNSPECIFIED_51,
613 NAL_UNIT_UNSPECIFIED_52,
614 NAL_UNIT_UNSPECIFIED_53,
615 NAL_UNIT_UNSPECIFIED_54,
616 NAL_UNIT_UNSPECIFIED_55,
617 NAL_UNIT_UNSPECIFIED_56,
618 NAL_UNIT_UNSPECIFIED_57,
619 NAL_UNIT_UNSPECIFIED_58,
620 NAL_UNIT_UNSPECIFIED_59,
621 NAL_UNIT_UNSPECIFIED_60,
622 NAL_UNIT_UNSPECIFIED_61,
623 NAL_UNIT_UNSPECIFIED_62,
624 NAL_UNIT_UNSPECIFIED_63,
625 NAL_UNIT_INVALID,
626};
627
628/* --------------------------------------------------- */
629/* Amrisc Software Interrupt */
630/* --------------------------------------------------- */
631#define AMRISC_STREAM_EMPTY_REQ 0x01
632#define AMRISC_PARSER_REQ 0x02
633#define AMRISC_MAIN_REQ 0x04
634
635/* --------------------------------------------------- */
636/* HEVC_DEC_STATUS define */
637/* --------------------------------------------------- */
638#define HEVC_DEC_IDLE 0x0
639#define HEVC_NAL_UNIT_VPS 0x1
640#define HEVC_NAL_UNIT_SPS 0x2
641#define HEVC_NAL_UNIT_PPS 0x3
642#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
643#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
644#define HEVC_SLICE_DECODING 0x6
645#define HEVC_NAL_UNIT_SEI 0x7
646#define HEVC_SLICE_SEGMENT_DONE 0x8
647#define HEVC_NAL_SEARCH_DONE 0x9
648#define HEVC_DECPIC_DATA_DONE 0xa
649#define HEVC_DECPIC_DATA_ERROR 0xb
650#define HEVC_SEI_DAT 0xc
651#define HEVC_SEI_DAT_DONE 0xd
652#define HEVC_NAL_DECODE_DONE 0xe
653#define HEVC_OVER_DECODE 0xf
654
655#define HEVC_DATA_REQUEST 0x12
656
657#define HEVC_DECODE_BUFEMPTY 0x20
658#define HEVC_DECODE_TIMEOUT 0x21
659#define HEVC_SEARCH_BUFEMPTY 0x22
660#define HEVC_DECODE_OVER_SIZE 0x23
661#define HEVC_DECODE_BUFEMPTY2 0x24
662#define HEVC_FIND_NEXT_PIC_NAL 0x50
663#define HEVC_FIND_NEXT_DVEL_NAL 0x51
664
665#define HEVC_DUMP_LMEM 0x30
666
667#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
668#define HEVC_DISCARD_NAL 0xf0
669#define HEVC_ACTION_DEC_CONT 0xfd
670#define HEVC_ACTION_ERROR 0xfe
671#define HEVC_ACTION_DONE 0xff
672
673/* --------------------------------------------------- */
674/* Include "parser_cmd.h" */
675/* --------------------------------------------------- */
676#define PARSER_CMD_SKIP_CFG_0 0x0000090b
677
678#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
679
680#define PARSER_CMD_SKIP_CFG_2 0x001b1910
681
682#define PARSER_CMD_NUMBER 37
683
684/**************************************************
685 *
686 *h265 buffer management
687 *
688 ***************************************************
689 */
690/* #define BUFFER_MGR_ONLY */
691/* #define CONFIG_HEVC_CLK_FORCED_ON */
692/* #define ENABLE_SWAP_TEST */
693#define MCRCC_ENABLE
694#define INVALID_POC 0x80000000
695
696#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
697#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
698#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
699#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
700#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
701#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
702#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
703#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
704#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
705#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
706#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
707#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
708#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
709#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
710#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
711#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
712#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
713#ifdef ENABLE_SWAP_TEST
714#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
715#endif
716
717/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
718/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
719#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
720 /*do not define ENABLE_SWAP_TEST*/
721#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
722#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
723
724#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
725#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
726/*
727 *ucode parser/search control
728 *bit 0: 0, header auto parse; 1, header manual parse
729 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
730 *bit [3:2]: valid when bit1==0;
731 *0, auto skip nal before first vps/sps/pps/idr;
732 *1, auto skip nal before first vps/sps/pps
733 *2, auto skip nal before first vps/sps/pps,
734 * and not decode until the first I slice (with slice address of 0)
735 *
736 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
737 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
738 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
739 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
740 *bit [17]: for NAL_SEI when bit0 is 0:
741 * 0, do not parse/fetch SEI in ucode;
742 * 1, parse/fetch SEI in ucode
743 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
744 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
745 * 1, fetch NAL_SEL_SUFFIX data to aux buf
746 *bit [19]:
747 * 0, parse NAL_SEI in ucode
748 * 1, fetch NAL_SEI to aux buf
749 *bit [20]: for DOLBY_VISION_META
750 * 0, do not fetch DOLBY_VISION_META to aux buf
751 * 1, fetch DOLBY_VISION_META to aux buf
752 */
753#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
754 /*read only*/
755#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
756 /*
757 [15 : 8] rps_set_id
758 [7 : 0] start_decoding_flag
759 */
760#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
761 /*set before start decoder*/
762#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
763#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
764#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
765
766#define DECODE_MODE_SINGLE 0x0
767#define DECODE_MODE_MULTI_FRAMEBASE 0x1
768#define DECODE_MODE_MULTI_STREAMBASE 0x2
769#define DECODE_MODE_MULTI_DVBAL 0x3
770#define DECODE_MODE_MULTI_DVENL 0x4
771
772#define MAX_INT 0x7FFFFFFF
773
774#define RPM_BEGIN 0x100
775#define modification_list_cur 0x148
776#define RPM_END 0x180
777
778#define RPS_USED_BIT 14
779/* MISC_FLAG0 */
780#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
781#define PCM_ENABLE_FLAG_BIT 1
782#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
783#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
784#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
785#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
786#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
787#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
788#define SLICE_SAO_LUMA_FLAG_BIT 8
789#define SLICE_SAO_CHROMA_FLAG_BIT 9
790#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
791
792union param_u {
793 struct {
794 unsigned short data[RPM_END - RPM_BEGIN];
795 } l;
796 struct {
797 /* from ucode lmem, do not change this struct */
798 unsigned short CUR_RPS[0x10];
799 unsigned short num_ref_idx_l0_active;
800 unsigned short num_ref_idx_l1_active;
801 unsigned short slice_type;
802 unsigned short slice_temporal_mvp_enable_flag;
803 unsigned short dependent_slice_segment_flag;
804 unsigned short slice_segment_address;
805 unsigned short num_title_rows_minus1;
806 unsigned short pic_width_in_luma_samples;
807 unsigned short pic_height_in_luma_samples;
808 unsigned short log2_min_coding_block_size_minus3;
809 unsigned short log2_diff_max_min_coding_block_size;
810 unsigned short log2_max_pic_order_cnt_lsb_minus4;
811 unsigned short POClsb;
812 unsigned short collocated_from_l0_flag;
813 unsigned short collocated_ref_idx;
814 unsigned short log2_parallel_merge_level;
815 unsigned short five_minus_max_num_merge_cand;
816 unsigned short sps_num_reorder_pics_0;
817 unsigned short modification_flag;
818 unsigned short tiles_enabled_flag;
819 unsigned short num_tile_columns_minus1;
820 unsigned short num_tile_rows_minus1;
821 unsigned short tile_width[8];
822 unsigned short tile_height[8];
823 unsigned short misc_flag0;
824 unsigned short pps_beta_offset_div2;
825 unsigned short pps_tc_offset_div2;
826 unsigned short slice_beta_offset_div2;
827 unsigned short slice_tc_offset_div2;
828 unsigned short pps_cb_qp_offset;
829 unsigned short pps_cr_qp_offset;
830 unsigned short first_slice_segment_in_pic_flag;
831 unsigned short m_temporalId;
832 unsigned short m_nalUnitType;
833
834 unsigned short vui_num_units_in_tick_hi;
835 unsigned short vui_num_units_in_tick_lo;
836 unsigned short vui_time_scale_hi;
837 unsigned short vui_time_scale_lo;
838 unsigned short bit_depth;
839 unsigned short profile_etc;
840 unsigned short sei_frame_field_info;
841 unsigned short video_signal_type;
842 unsigned short modification_list[0x20];
843 unsigned short conformance_window_flag;
844 unsigned short conf_win_left_offset;
845 unsigned short conf_win_right_offset;
846 unsigned short conf_win_top_offset;
847 unsigned short conf_win_bottom_offset;
848 unsigned short chroma_format_idc;
849 unsigned short color_description;
850 unsigned short aspect_ratio_idc;
851 unsigned short sar_width;
852 unsigned short sar_height;
853 unsigned short sps_max_dec_pic_buffering_minus1_0;
854 } p;
855};
856
857#define RPM_BUF_SIZE (0x80*2)
858/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
859#define LMEM_BUF_SIZE (0x500 * 2)
860
861struct buff_s {
862 u32 buf_start;
863 u32 buf_size;
864 u32 buf_end;
865};
866
867struct BuffInfo_s {
868 u32 max_width;
869 u32 max_height;
870 unsigned int start_adr;
871 unsigned int end_adr;
872 struct buff_s ipp;
873 struct buff_s sao_abv;
874 struct buff_s sao_vb;
875 struct buff_s short_term_rps;
876 struct buff_s vps;
877 struct buff_s sps;
878 struct buff_s pps;
879 struct buff_s sao_up;
880 struct buff_s swap_buf;
881 struct buff_s swap_buf2;
882 struct buff_s scalelut;
883 struct buff_s dblk_para;
884 struct buff_s dblk_data;
885 struct buff_s dblk_data2;
886 struct buff_s mmu_vbh;
887 struct buff_s cm_header;
888 struct buff_s mpred_above;
889#ifdef MV_USE_FIXED_BUF
890 struct buff_s mpred_mv;
891#endif
892 struct buff_s rpm;
893 struct buff_s lmem;
894};
895#define WORK_BUF_SPEC_NUM 3
896static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
897 {
898 /* 8M bytes */
899 .max_width = 1920,
900 .max_height = 1088,
901 .ipp = {
902 /* IPP work space calculation :
903 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
904 */
905 .buf_size = 0x4000,
906 },
907 .sao_abv = {
908 .buf_size = 0x30000,
909 },
910 .sao_vb = {
911 .buf_size = 0x30000,
912 },
913 .short_term_rps = {
914 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
915 * total 64x16x2 = 2048 bytes (0x800)
916 */
917 .buf_size = 0x800,
918 },
919 .vps = {
920 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
921 * total 0x0800 bytes
922 */
923 .buf_size = 0x800,
924 },
925 .sps = {
926 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
927 * total 0x0800 bytes
928 */
929 .buf_size = 0x800,
930 },
931 .pps = {
932 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
933 * total 0x2000 bytes
934 */
935 .buf_size = 0x2000,
936 },
937 .sao_up = {
938 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
939 * each has 16 bytes total 0x2800 bytes
940 */
941 .buf_size = 0x2800,
942 },
943 .swap_buf = {
944 /* 256cyclex64bit = 2K bytes 0x800
945 * (only 144 cycles valid)
946 */
947 .buf_size = 0x800,
948 },
949 .swap_buf2 = {
950 .buf_size = 0x800,
951 },
952 .scalelut = {
953 /* support up to 32 SCALELUT 1024x32 =
954 * 32Kbytes (0x8000)
955 */
956 .buf_size = 0x8000,
957 },
958 .dblk_para = {
959#ifdef SUPPORT_10BIT
960 .buf_size = 0x40000,
961#else
962 /* DBLK -> Max 256(4096/16) LCU, each para
963 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
964 */
965 .buf_size = 0x20000,
966#endif
967 },
968 .dblk_data = {
969 .buf_size = 0x40000,
970 },
971 .dblk_data2 = {
972 .buf_size = 0x40000,
973 }, /*dblk data for adapter*/
974 .mmu_vbh = {
975 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
976 },
977#if 0
978 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
979 .buf_size = MMU_COMPRESS_HEADER_SIZE *
980 (MAX_REF_PIC_NUM + 1),
981 },
982#endif
983 .mpred_above = {
984 .buf_size = 0x8000,
985 },
986#ifdef MV_USE_FIXED_BUF
987 .mpred_mv = {/* 1080p, 0x40000 per buffer */
988 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
989 },
990#endif
991 .rpm = {
992 .buf_size = RPM_BUF_SIZE,
993 },
994 .lmem = {
995 .buf_size = 0x500 * 2,
996 }
997 },
998 {
999 .max_width = 4096,
1000 .max_height = 2048,
1001 .ipp = {
1002 /* IPP work space calculation :
1003 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1004 */
1005 .buf_size = 0x4000,
1006 },
1007 .sao_abv = {
1008 .buf_size = 0x30000,
1009 },
1010 .sao_vb = {
1011 .buf_size = 0x30000,
1012 },
1013 .short_term_rps = {
1014 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1015 * total 64x16x2 = 2048 bytes (0x800)
1016 */
1017 .buf_size = 0x800,
1018 },
1019 .vps = {
1020 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1021 * total 0x0800 bytes
1022 */
1023 .buf_size = 0x800,
1024 },
1025 .sps = {
1026 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1027 * total 0x0800 bytes
1028 */
1029 .buf_size = 0x800,
1030 },
1031 .pps = {
1032 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1033 * total 0x2000 bytes
1034 */
1035 .buf_size = 0x2000,
1036 },
1037 .sao_up = {
1038 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1039 * each has 16 bytes total 0x2800 bytes
1040 */
1041 .buf_size = 0x2800,
1042 },
1043 .swap_buf = {
1044 /* 256cyclex64bit = 2K bytes 0x800
1045 * (only 144 cycles valid)
1046 */
1047 .buf_size = 0x800,
1048 },
1049 .swap_buf2 = {
1050 .buf_size = 0x800,
1051 },
1052 .scalelut = {
1053 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1054 * (0x8000)
1055 */
1056 .buf_size = 0x8000,
1057 },
1058 .dblk_para = {
1059 /* DBLK -> Max 256(4096/16) LCU, each para
1060 * 512bytes(total:0x20000),
1061 * data 1024bytes(total:0x40000)
1062 */
1063 .buf_size = 0x20000,
1064 },
1065 .dblk_data = {
1066 .buf_size = 0x80000,
1067 },
1068 .dblk_data2 = {
1069 .buf_size = 0x80000,
1070 }, /*dblk data for adapter*/
1071 .mmu_vbh = {
1072 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1073 },
1074#if 0
1075 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1076 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1077 (MAX_REF_PIC_NUM + 1),
1078 },
1079#endif
1080 .mpred_above = {
1081 .buf_size = 0x8000,
1082 },
1083#ifdef MV_USE_FIXED_BUF
1084 .mpred_mv = {
1085 /* .buf_size = 0x100000*16,
1086 //4k2k , 0x100000 per buffer */
1087 /* 4096x2304 , 0x120000 per buffer */
1088 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1089 },
1090#endif
1091 .rpm = {
1092 .buf_size = RPM_BUF_SIZE,
1093 },
1094 .lmem = {
1095 .buf_size = 0x500 * 2,
1096 }
1097 },
1098
1099 {
1100 .max_width = 4096*2,
1101 .max_height = 2048*2,
1102 .ipp = {
1103 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1104 .buf_size = 0x4000*2,
1105 },
1106 .sao_abv = {
1107 .buf_size = 0x30000*2,
1108 },
1109 .sao_vb = {
1110 .buf_size = 0x30000*2,
1111 },
1112 .short_term_rps = {
1113 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1114 .buf_size = 0x800,
1115 },
1116 .vps = {
1117 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1118 .buf_size = 0x800,
1119 },
1120 .sps = {
1121 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .pps = {
1125 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1126 .buf_size = 0x2000,
1127 },
1128 .sao_up = {
1129 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1130 .buf_size = 0x2800*2,
1131 },
1132 .swap_buf = {
1133 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1134 .buf_size = 0x800,
1135 },
1136 .swap_buf2 = {
1137 .buf_size = 0x800,
1138 },
1139 .scalelut = {
1140 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1141 .buf_size = 0x8000*2,
1142 },
1143 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1144 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1145 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1146 .mmu_vbh = {
1147 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1148 },
1149#if 0
1150 .cm_header = {
1151 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1152 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1153 },
1154#endif
1155 .mpred_above = {
1156 .buf_size = 0x8000*2,
1157 },
1158#ifdef MV_USE_FIXED_BUF
1159 .mpred_mv = {
1160 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1161 },
1162#endif
1163 .rpm = {
1164 .buf_size = RPM_BUF_SIZE,
1165 },
1166 .lmem = {
1167 .buf_size = 0x500 * 2,
1168 },
1169 }
1170};
1171
1172static void init_buff_spec(struct hevc_state_s *hevc,
1173 struct BuffInfo_s *buf_spec)
1174{
1175 buf_spec->ipp.buf_start = buf_spec->start_adr;
1176 buf_spec->sao_abv.buf_start =
1177 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1178
1179 buf_spec->sao_vb.buf_start =
1180 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1181 buf_spec->short_term_rps.buf_start =
1182 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1183 buf_spec->vps.buf_start =
1184 buf_spec->short_term_rps.buf_start +
1185 buf_spec->short_term_rps.buf_size;
1186 buf_spec->sps.buf_start =
1187 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1188 buf_spec->pps.buf_start =
1189 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1190 buf_spec->sao_up.buf_start =
1191 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1192 buf_spec->swap_buf.buf_start =
1193 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1194 buf_spec->swap_buf2.buf_start =
1195 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1196 buf_spec->scalelut.buf_start =
1197 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1198 buf_spec->dblk_para.buf_start =
1199 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1200 buf_spec->dblk_data.buf_start =
1201 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1202 buf_spec->dblk_data2.buf_start =
1203 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1204 buf_spec->mmu_vbh.buf_start =
1205 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1206 buf_spec->mpred_above.buf_start =
1207 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1208#ifdef MV_USE_FIXED_BUF
1209 buf_spec->mpred_mv.buf_start =
1210 buf_spec->mpred_above.buf_start +
1211 buf_spec->mpred_above.buf_size;
1212
1213 buf_spec->rpm.buf_start =
1214 buf_spec->mpred_mv.buf_start +
1215 buf_spec->mpred_mv.buf_size;
1216#else
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_above.buf_start +
1219 buf_spec->mpred_above.buf_size;
1220#endif
1221 buf_spec->lmem.buf_start =
1222 buf_spec->rpm.buf_start +
1223 buf_spec->rpm.buf_size;
1224 buf_spec->end_adr =
1225 buf_spec->lmem.buf_start +
1226 buf_spec->lmem.buf_size;
1227
1228 if (hevc && get_dbg_flag2(hevc)) {
1229 hevc_print(hevc, 0,
1230 "%s workspace (%x %x) size = %x\n", __func__,
1231 buf_spec->start_adr, buf_spec->end_adr,
1232 buf_spec->end_adr - buf_spec->start_adr);
1233
1234 hevc_print(hevc, 0,
1235 "ipp.buf_start :%x\n",
1236 buf_spec->ipp.buf_start);
1237 hevc_print(hevc, 0,
1238 "sao_abv.buf_start :%x\n",
1239 buf_spec->sao_abv.buf_start);
1240 hevc_print(hevc, 0,
1241 "sao_vb.buf_start :%x\n",
1242 buf_spec->sao_vb.buf_start);
1243 hevc_print(hevc, 0,
1244 "short_term_rps.buf_start :%x\n",
1245 buf_spec->short_term_rps.buf_start);
1246 hevc_print(hevc, 0,
1247 "vps.buf_start :%x\n",
1248 buf_spec->vps.buf_start);
1249 hevc_print(hevc, 0,
1250 "sps.buf_start :%x\n",
1251 buf_spec->sps.buf_start);
1252 hevc_print(hevc, 0,
1253 "pps.buf_start :%x\n",
1254 buf_spec->pps.buf_start);
1255 hevc_print(hevc, 0,
1256 "sao_up.buf_start :%x\n",
1257 buf_spec->sao_up.buf_start);
1258 hevc_print(hevc, 0,
1259 "swap_buf.buf_start :%x\n",
1260 buf_spec->swap_buf.buf_start);
1261 hevc_print(hevc, 0,
1262 "swap_buf2.buf_start :%x\n",
1263 buf_spec->swap_buf2.buf_start);
1264 hevc_print(hevc, 0,
1265 "scalelut.buf_start :%x\n",
1266 buf_spec->scalelut.buf_start);
1267 hevc_print(hevc, 0,
1268 "dblk_para.buf_start :%x\n",
1269 buf_spec->dblk_para.buf_start);
1270 hevc_print(hevc, 0,
1271 "dblk_data.buf_start :%x\n",
1272 buf_spec->dblk_data.buf_start);
1273 hevc_print(hevc, 0,
1274 "dblk_data2.buf_start :%x\n",
1275 buf_spec->dblk_data2.buf_start);
1276 hevc_print(hevc, 0,
1277 "mpred_above.buf_start :%x\n",
1278 buf_spec->mpred_above.buf_start);
1279#ifdef MV_USE_FIXED_BUF
1280 hevc_print(hevc, 0,
1281 "mpred_mv.buf_start :%x\n",
1282 buf_spec->mpred_mv.buf_start);
1283#endif
1284 if ((get_dbg_flag2(hevc)
1285 &
1286 H265_DEBUG_SEND_PARAM_WITH_REG)
1287 == 0) {
1288 hevc_print(hevc, 0,
1289 "rpm.buf_start :%x\n",
1290 buf_spec->rpm.buf_start);
1291 }
1292 }
1293
1294}
1295
1296enum SliceType {
1297 B_SLICE,
1298 P_SLICE,
1299 I_SLICE
1300};
1301
1302/*USE_BUF_BLOCK*/
1303struct BUF_s {
1304 unsigned long start_adr;
1305 unsigned int size;
1306 int used_flag;
1307 ulong v4l_ref_buf_addr;
1308} /*BUF_t */;
1309
1310/* level 6, 6.1 maximum slice number is 800; other is 200 */
1311#define MAX_SLICE_NUM 800
1312struct PIC_s {
1313 int index;
1314 int scatter_alloc;
1315 int BUF_index;
1316 int mv_buf_index;
1317 int POC;
1318 int decode_idx;
1319 int slice_type;
1320 int RefNum_L0;
1321 int RefNum_L1;
1322 int num_reorder_pic;
1323 int stream_offset;
1324 unsigned char referenced;
1325 unsigned char output_mark;
1326 unsigned char recon_mark;
1327 unsigned char output_ready;
1328 unsigned char error_mark;
1329 /**/ int slice_idx;
1330 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1331 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1332 /*buffer */
1333 unsigned int header_adr;
1334#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1335 unsigned char dv_enhance_exist;
1336#endif
1337 char *aux_data_buf;
1338 int aux_data_size;
1339 unsigned long cma_alloc_addr;
1340 struct page *alloc_pages;
1341 unsigned int mpred_mv_wr_start_addr;
1342 unsigned int mc_y_adr;
1343 unsigned int mc_u_v_adr;
1344#ifdef SUPPORT_10BIT
1345 /*unsigned int comp_body_size;*/
1346 unsigned int dw_y_adr;
1347 unsigned int dw_u_v_adr;
1348#endif
1349 int mc_canvas_y;
1350 int mc_canvas_u_v;
1351 int width;
1352 int height;
1353
1354 int y_canvas_index;
1355 int uv_canvas_index;
1356#ifdef MULTI_INSTANCE_SUPPORT
1357 struct canvas_config_s canvas_config[2];
1358#endif
1359#ifdef SUPPORT_10BIT
1360 int mem_saving_mode;
1361 u32 bit_depth_luma;
1362 u32 bit_depth_chroma;
1363#endif
1364#ifdef LOSLESS_COMPRESS_MODE
1365 unsigned int losless_comp_body_size;
1366#endif
1367 unsigned char pic_struct;
1368 int vf_ref;
1369
1370 u32 pts;
1371 u64 pts64;
1372
1373 u32 aspect_ratio_idc;
1374 u32 sar_width;
1375 u32 sar_height;
1376 u32 double_write_mode;
1377 u32 video_signal_type;
1378 unsigned short conformance_window_flag;
1379 unsigned short conf_win_left_offset;
1380 unsigned short conf_win_right_offset;
1381 unsigned short conf_win_top_offset;
1382 unsigned short conf_win_bottom_offset;
1383 unsigned short chroma_format_idc;
1384
1385 /* picture qos infomation*/
1386 int max_qp;
1387 int avg_qp;
1388 int min_qp;
1389 int max_skip;
1390 int avg_skip;
1391 int min_skip;
1392 int max_mv;
1393 int min_mv;
1394 int avg_mv;
1395} /*PIC_t */;
1396
1397#define MAX_TILE_COL_NUM 10
1398#define MAX_TILE_ROW_NUM 20
1399struct tile_s {
1400 int width;
1401 int height;
1402 int start_cu_x;
1403 int start_cu_y;
1404
1405 unsigned int sao_vb_start_addr;
1406 unsigned int sao_abv_start_addr;
1407};
1408
1409#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1410#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1411#define SEI_HDR10PLUS_MASK 0x00000004
1412
1413#define VF_POOL_SIZE 32
1414
1415#ifdef MULTI_INSTANCE_SUPPORT
1416#define DEC_RESULT_NONE 0
1417#define DEC_RESULT_DONE 1
1418#define DEC_RESULT_AGAIN 2
1419#define DEC_RESULT_CONFIG_PARAM 3
1420#define DEC_RESULT_ERROR 4
1421#define DEC_INIT_PICLIST 5
1422#define DEC_UNINIT_PICLIST 6
1423#define DEC_RESULT_GET_DATA 7
1424#define DEC_RESULT_GET_DATA_RETRY 8
1425#define DEC_RESULT_EOS 9
1426#define DEC_RESULT_FORCE_EXIT 10
1427
1428static void vh265_work(struct work_struct *work);
1429static void vh265_timeout_work(struct work_struct *work);
1430static void vh265_notify_work(struct work_struct *work);
1431
1432#endif
1433
1434struct debug_log_s {
1435 struct list_head list;
1436 uint8_t data; /*will alloc more size*/
1437};
1438
1439struct hevc_state_s {
1440#ifdef MULTI_INSTANCE_SUPPORT
1441 struct platform_device *platform_dev;
1442 void (*vdec_cb)(struct vdec_s *, void *);
1443 void *vdec_cb_arg;
1444 struct vframe_chunk_s *chunk;
1445 int dec_result;
1446 struct work_struct work;
1447 struct work_struct timeout_work;
1448 struct work_struct notify_work;
1449 struct work_struct set_clk_work;
1450 /* timeout handle */
1451 unsigned long int start_process_time;
1452 unsigned int last_lcu_idx;
1453 unsigned int decode_timeout_count;
1454 unsigned int timeout_num;
1455#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1456 unsigned char switch_dvlayer_flag;
1457 unsigned char no_switch_dvlayer_count;
1458 unsigned char bypass_dvenl_enable;
1459 unsigned char bypass_dvenl;
1460#endif
1461 unsigned char start_parser_type;
1462 /*start_decoding_flag:
1463 vps/pps/sps/idr info from ucode*/
1464 unsigned char start_decoding_flag;
1465 unsigned char rps_set_id;
1466 unsigned char eos;
1467 int pic_decoded_lcu_idx;
1468 u8 over_decode;
1469 u8 empty_flag;
1470#endif
1471 struct vframe_s vframe_dummy;
1472 char *provider_name;
1473 int index;
1474 struct device *cma_dev;
1475 unsigned char m_ins_flag;
1476 unsigned char dolby_enhance_flag;
1477 unsigned long buf_start;
1478 u32 buf_size;
1479 u32 mv_buf_size;
1480
1481 struct BuffInfo_s work_space_buf_store;
1482 struct BuffInfo_s *work_space_buf;
1483
1484 u8 aux_data_dirty;
1485 u32 prefix_aux_size;
1486 u32 suffix_aux_size;
1487 void *aux_addr;
1488 void *rpm_addr;
1489 void *lmem_addr;
1490 dma_addr_t aux_phy_addr;
1491 dma_addr_t rpm_phy_addr;
1492 dma_addr_t lmem_phy_addr;
1493
1494 unsigned int pic_list_init_flag;
1495 unsigned int use_cma_flag;
1496
1497 unsigned short *rpm_ptr;
1498 unsigned short *lmem_ptr;
1499 unsigned short *debug_ptr;
1500 int debug_ptr_size;
1501 int pic_w;
1502 int pic_h;
1503 int lcu_x_num;
1504 int lcu_y_num;
1505 int lcu_total;
1506 int lcu_size;
1507 int lcu_size_log2;
1508 int lcu_x_num_pre;
1509 int lcu_y_num_pre;
1510 int first_pic_after_recover;
1511
1512 int num_tile_col;
1513 int num_tile_row;
1514 int tile_enabled;
1515 int tile_x;
1516 int tile_y;
1517 int tile_y_x;
1518 int tile_start_lcu_x;
1519 int tile_start_lcu_y;
1520 int tile_width_lcu;
1521 int tile_height_lcu;
1522
1523 int slice_type;
1524 unsigned int slice_addr;
1525 unsigned int slice_segment_addr;
1526
1527 unsigned char interlace_flag;
1528 unsigned char curr_pic_struct;
1529 unsigned char frame_field_info_present_flag;
1530
1531 unsigned short sps_num_reorder_pics_0;
1532 unsigned short misc_flag0;
1533 int m_temporalId;
1534 int m_nalUnitType;
1535 int TMVPFlag;
1536 int isNextSliceSegment;
1537 int LDCFlag;
1538 int m_pocRandomAccess;
1539 int plevel;
1540 int MaxNumMergeCand;
1541
1542 int new_pic;
1543 int new_tile;
1544 int curr_POC;
1545 int iPrevPOC;
1546#ifdef MULTI_INSTANCE_SUPPORT
1547 int decoded_poc;
1548 struct PIC_s *decoding_pic;
1549#endif
1550 int iPrevTid0POC;
1551 int list_no;
1552 int RefNum_L0;
1553 int RefNum_L1;
1554 int ColFromL0Flag;
1555 int LongTerm_Curr;
1556 int LongTerm_Col;
1557 int Col_POC;
1558 int LongTerm_Ref;
1559#ifdef MULTI_INSTANCE_SUPPORT
1560 int m_pocRandomAccess_bak;
1561 int curr_POC_bak;
1562 int iPrevPOC_bak;
1563 int iPrevTid0POC_bak;
1564 unsigned char start_parser_type_bak;
1565 unsigned char start_decoding_flag_bak;
1566 unsigned char rps_set_id_bak;
1567 int pic_decoded_lcu_idx_bak;
1568 int decode_idx_bak;
1569#endif
1570 struct PIC_s *cur_pic;
1571 struct PIC_s *col_pic;
1572 int skip_flag;
1573 int decode_idx;
1574 int slice_idx;
1575 unsigned char have_vps;
1576 unsigned char have_sps;
1577 unsigned char have_pps;
1578 unsigned char have_valid_start_slice;
1579 unsigned char wait_buf;
1580 unsigned char error_flag;
1581 unsigned int error_skip_nal_count;
1582 long used_4k_num;
1583
1584 unsigned char
1585 ignore_bufmgr_error; /* bit 0, for decoding;
1586 bit 1, for displaying
1587 bit 1 must be set if bit 0 is 1*/
1588 int PB_skip_mode;
1589 int PB_skip_count_after_decoding;
1590#ifdef SUPPORT_10BIT
1591 int mem_saving_mode;
1592#endif
1593#ifdef LOSLESS_COMPRESS_MODE
1594 unsigned int losless_comp_body_size;
1595#endif
1596 int pts_mode;
1597 int last_lookup_pts;
1598 int last_pts;
1599 u64 last_lookup_pts_us64;
1600 u64 last_pts_us64;
1601 u32 shift_byte_count_lo;
1602 u32 shift_byte_count_hi;
1603 int pts_mode_switching_count;
1604 int pts_mode_recovery_count;
1605
1606 int pic_num;
1607
1608 /**/
1609 union param_u param;
1610
1611 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1612
1613 struct timer_list timer;
1614 struct BUF_s m_BUF[BUF_POOL_SIZE];
1615 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1616 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1617
1618 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1619 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1620 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1621 struct vframe_s vfpool[VF_POOL_SIZE];
1622
1623 u32 stat;
1624 u32 frame_width;
1625 u32 frame_height;
1626 u32 frame_dur;
1627 u32 frame_ar;
1628 u32 bit_depth_luma;
1629 u32 bit_depth_chroma;
1630 u32 video_signal_type;
1631 u32 video_signal_type_debug;
1632 u32 saved_resolution;
1633 bool get_frame_dur;
1634 u32 error_watchdog_count;
1635 u32 error_skip_nal_wt_cnt;
1636 u32 error_system_watchdog_count;
1637
1638#ifdef DEBUG_PTS
1639 unsigned long pts_missed;
1640 unsigned long pts_hit;
1641#endif
1642 struct dec_sysinfo vh265_amstream_dec_info;
1643 unsigned char init_flag;
1644 unsigned char first_sc_checked;
1645 unsigned char uninit_list;
1646 u32 start_decoding_time;
1647
1648 int show_frame_num;
1649#ifdef USE_UNINIT_SEMA
1650 struct semaphore h265_uninit_done_sema;
1651#endif
1652 int fatal_error;
1653
1654
1655 u32 sei_present_flag;
1656 void *frame_mmu_map_addr;
1657 dma_addr_t frame_mmu_map_phy_addr;
1658 unsigned int mmu_mc_buf_start;
1659 unsigned int mmu_mc_buf_end;
1660 unsigned int mmu_mc_start_4k_adr;
1661 void *mmu_box;
1662 void *bmmu_box;
1663 int mmu_enable;
1664
1665 unsigned int dec_status;
1666
1667 /* data for SEI_MASTER_DISPLAY_COLOR */
1668 unsigned int primaries[3][2];
1669 unsigned int white_point[2];
1670 unsigned int luminance[2];
1671 /* data for SEI_CONTENT_LIGHT_LEVEL */
1672 unsigned int content_light_level[2];
1673
1674 struct PIC_s *pre_top_pic;
1675 struct PIC_s *pre_bot_pic;
1676
1677#ifdef MULTI_INSTANCE_SUPPORT
1678 int double_write_mode;
1679 int dynamic_buf_num_margin;
1680 int start_action;
1681 int save_buffer_mode;
1682#endif
1683 u32 i_only;
1684 struct list_head log_list;
1685 u32 ucode_pause_pos;
1686 u32 start_shift_bytes;
1687
1688 u32 vf_pre_count;
1689 u32 vf_get_count;
1690 u32 vf_put_count;
1691#ifdef SWAP_HEVC_UCODE
1692 dma_addr_t mc_dma_handle;
1693 void *mc_cpu_addr;
1694 int swap_size;
1695 ulong swap_addr;
1696#endif
1697#ifdef DETREFILL_ENABLE
1698 dma_addr_t detbuf_adr;
1699 u16 *detbuf_adr_virt;
1700 u8 delrefill_check;
1701#endif
1702 u8 head_error_flag;
1703 int valve_count;
1704 struct firmware_s *fw;
1705 int max_pic_w;
1706 int max_pic_h;
1707#ifdef AGAIN_HAS_THRESHOLD
1708 u8 next_again_flag;
1709 u32 pre_parser_wr_ptr;
1710#endif
1711 u32 ratio_control;
1712 u32 first_pic_flag;
1713 u32 decode_size;
1714 struct mutex chunks_mutex;
1715 int need_cache_size;
1716 u64 sc_start_time;
1717 u32 skip_first_nal;
1718 bool is_swap;
1719 bool is_4k;
1720 int frameinfo_enable;
1721 struct vframe_qos_s vframe_qos;
1722 bool is_used_v4l;
1723 void *v4l2_ctx;
1724} /*hevc_stru_t */;
1725
1726static int v4l_get_fb(struct aml_vcodec_ctx *ctx, struct vdec_fb **out)
1727{
1728 int ret = 0;
1729
1730 ret = ctx->dec_if->get_param(ctx->drv_handle,
1731 GET_PARAM_FREE_FRAME_BUFFER, out);
1732
1733 return ret;
1734}
1735
1736#ifdef AGAIN_HAS_THRESHOLD
1737u32 again_threshold;
1738#endif
1739#ifdef SEND_LMEM_WITH_RPM
1740#define get_lmem_params(hevc, ladr) \
1741 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1742
1743
1744static int get_frame_mmu_map_size(void)
1745{
1746 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1747 return (MAX_FRAME_8K_NUM * 4);
1748
1749 return (MAX_FRAME_4K_NUM * 4);
1750}
1751
1752static int is_oversize(int w, int h)
1753{
1754 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1755 MAX_SIZE_8K : MAX_SIZE_4K;
1756
1757 if (w < 0 || h < 0)
1758 return true;
1759
1760 if (h != 0 && (w > max / h))
1761 return true;
1762
1763 return false;
1764}
1765
1766void check_head_error(struct hevc_state_s *hevc)
1767{
1768#define pcm_enabled_flag 0x040
1769#define pcm_sample_bit_depth_luma 0x041
1770#define pcm_sample_bit_depth_chroma 0x042
1771 hevc->head_error_flag = 0;
1772 if ((error_handle_policy & 0x40) == 0)
1773 return;
1774 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1775 uint16_t pcm_depth_luma = get_lmem_params(
1776 hevc, pcm_sample_bit_depth_luma);
1777 uint16_t pcm_sample_chroma = get_lmem_params(
1778 hevc, pcm_sample_bit_depth_chroma);
1779 if (pcm_depth_luma >
1780 hevc->bit_depth_luma ||
1781 pcm_sample_chroma >
1782 hevc->bit_depth_chroma) {
1783 hevc_print(hevc, 0,
1784 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1785 pcm_depth_luma,
1786 pcm_sample_chroma,
1787 hevc->bit_depth_luma,
1788 hevc->bit_depth_chroma);
1789 hevc->head_error_flag = 1;
1790 }
1791 }
1792}
1793#endif
1794
1795#ifdef SUPPORT_10BIT
1796/* Losless compression body buffer size 4K per 64x32 (jt) */
1797static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1798 int width, int height, int mem_saving_mode)
1799{
1800 int width_x64;
1801 int height_x32;
1802 int bsize;
1803
1804 width_x64 = width + 63;
1805 width_x64 >>= 6;
1806
1807 height_x32 = height + 31;
1808 height_x32 >>= 5;
1809 if (mem_saving_mode == 1 && hevc->mmu_enable)
1810 bsize = 3200 * width_x64 * height_x32;
1811 else if (mem_saving_mode == 1)
1812 bsize = 3072 * width_x64 * height_x32;
1813 else
1814 bsize = 4096 * width_x64 * height_x32;
1815
1816 return bsize;
1817}
1818
1819/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1820static int compute_losless_comp_header_size(int width, int height)
1821{
1822 int width_x128;
1823 int height_x64;
1824 int hsize;
1825
1826 width_x128 = width + 127;
1827 width_x128 >>= 7;
1828
1829 height_x64 = height + 63;
1830 height_x64 >>= 6;
1831
1832 hsize = 32*width_x128*height_x64;
1833
1834 return hsize;
1835}
1836#endif
1837
1838static int add_log(struct hevc_state_s *hevc,
1839 const char *fmt, ...)
1840{
1841#define HEVC_LOG_BUF 196
1842 struct debug_log_s *log_item;
1843 unsigned char buf[HEVC_LOG_BUF];
1844 int len = 0;
1845 va_list args;
1846 mutex_lock(&vh265_log_mutex);
1847 va_start(args, fmt);
1848 len = sprintf(buf, "<%ld> <%05d> ",
1849 jiffies, hevc->decode_idx);
1850 len += vsnprintf(buf + len,
1851 HEVC_LOG_BUF - len, fmt, args);
1852 va_end(args);
1853 log_item = kmalloc(
1854 sizeof(struct debug_log_s) + len,
1855 GFP_KERNEL);
1856 if (log_item) {
1857 INIT_LIST_HEAD(&log_item->list);
1858 strcpy(&log_item->data, buf);
1859 list_add_tail(&log_item->list,
1860 &hevc->log_list);
1861 }
1862 mutex_unlock(&vh265_log_mutex);
1863 return 0;
1864}
1865
1866static void dump_log(struct hevc_state_s *hevc)
1867{
1868 int i = 0;
1869 struct debug_log_s *log_item, *tmp;
1870 mutex_lock(&vh265_log_mutex);
1871 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1872 hevc_print(hevc, 0,
1873 "[LOG%04d]%s\n",
1874 i++,
1875 &log_item->data);
1876 list_del(&log_item->list);
1877 kfree(log_item);
1878 }
1879 mutex_unlock(&vh265_log_mutex);
1880}
1881
1882static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1883 struct PIC_s *pic)
1884{
1885 if (pic->error_mark
1886 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1887 return 1;
1888 return 0;
1889}
1890
1891static int get_pic_poc(struct hevc_state_s *hevc,
1892 unsigned int idx)
1893{
1894 if (idx != 0xff
1895 && idx < MAX_REF_PIC_NUM
1896 && hevc->m_PIC[idx])
1897 return hevc->m_PIC[idx]->POC;
1898 return INVALID_POC;
1899}
1900
1901#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1902static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1903{
1904 return (hevc->m_ins_flag &&
1905 ((double_write_mode & 0x80000000) == 0)) ?
1906 hevc->double_write_mode :
1907 (double_write_mode & 0x7fffffff);
1908}
1909
1910static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1911{
1912 return (hevc->m_ins_flag &&
1913 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1914 hevc->dynamic_buf_num_margin :
1915 (dynamic_buf_num_margin & 0x7fffffff);
1916}
1917#endif
1918
1919static int get_double_write_mode(struct hevc_state_s *hevc)
1920{
1921 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1922 int w = hevc->pic_w;
1923 int h = hevc->pic_h;
1924 u32 dw = 0x1; /*1:1*/
1925 switch (valid_dw_mode) {
1926 case 0x100:
1927 if (w > 1920 && h > 1088)
1928 dw = 0x4; /*1:2*/
1929 break;
1930 case 0x200:
1931 if (w > 1920 && h > 1088)
1932 dw = 0x2; /*1:4*/
1933 break;
1934 case 0x300:
1935 if (w > 1280 && h > 720)
1936 dw = 0x4; /*1:2*/
1937 break;
1938 default:
1939 dw = valid_dw_mode;
1940 break;
1941 }
1942 return dw;
1943}
1944
1945static int get_double_write_ratio(struct hevc_state_s *hevc,
1946 int dw_mode)
1947{
1948 int ratio = 1;
1949 if ((dw_mode == 2) ||
1950 (dw_mode == 3))
1951 ratio = 4;
1952 else if (dw_mode == 4)
1953 ratio = 2;
1954 return ratio;
1955}
1956#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1957static unsigned char get_idx(struct hevc_state_s *hevc)
1958{
1959 return hevc->index;
1960}
1961#endif
1962
1963#undef pr_info
1964#define pr_info printk
1965static int hevc_print(struct hevc_state_s *hevc,
1966 int flag, const char *fmt, ...)
1967{
1968#define HEVC_PRINT_BUF 256
1969 unsigned char buf[HEVC_PRINT_BUF];
1970 int len = 0;
1971#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1972 if (hevc == NULL ||
1973 (flag == 0) ||
1974 ((debug_mask &
1975 (1 << hevc->index))
1976 && (debug & flag))) {
1977#endif
1978 va_list args;
1979
1980 va_start(args, fmt);
1981 if (hevc)
1982 len = sprintf(buf, "[%d]", hevc->index);
1983 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1984 pr_debug("%s", buf);
1985 va_end(args);
1986#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1987 }
1988#endif
1989 return 0;
1990}
1991
1992static int hevc_print_cont(struct hevc_state_s *hevc,
1993 int flag, const char *fmt, ...)
1994{
1995 unsigned char buf[HEVC_PRINT_BUF];
1996 int len = 0;
1997#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1998 if (hevc == NULL ||
1999 (flag == 0) ||
2000 ((debug_mask &
2001 (1 << hevc->index))
2002 && (debug & flag))) {
2003#endif
2004 va_list args;
2005
2006 va_start(args, fmt);
2007 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2008 pr_info("%s", buf);
2009 va_end(args);
2010#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2011 }
2012#endif
2013 return 0;
2014}
2015
2016static void put_mv_buf(struct hevc_state_s *hevc,
2017 struct PIC_s *pic);
2018
2019static void update_vf_memhandle(struct hevc_state_s *hevc,
2020 struct vframe_s *vf, struct PIC_s *pic);
2021
2022static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2023
2024static void release_aux_data(struct hevc_state_s *hevc,
2025 struct PIC_s *pic);
2026static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2027
2028#ifdef MULTI_INSTANCE_SUPPORT
2029static void backup_decode_state(struct hevc_state_s *hevc)
2030{
2031 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2032 hevc->curr_POC_bak = hevc->curr_POC;
2033 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2034 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2035 hevc->start_parser_type_bak = hevc->start_parser_type;
2036 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2037 hevc->rps_set_id_bak = hevc->rps_set_id;
2038 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2039 hevc->decode_idx_bak = hevc->decode_idx;
2040
2041}
2042
2043static void restore_decode_state(struct hevc_state_s *hevc)
2044{
2045 struct vdec_s *vdec = hw_to_vdec(hevc);
2046 if (!vdec_has_more_input(vdec)) {
2047 hevc->pic_decoded_lcu_idx =
2048 READ_VREG(HEVC_PARSER_LCU_START)
2049 & 0xffffff;
2050 return;
2051 }
2052 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2053 "%s: discard pic index 0x%x\n",
2054 __func__, hevc->decoding_pic ?
2055 hevc->decoding_pic->index : 0xff);
2056 if (hevc->decoding_pic) {
2057 hevc->decoding_pic->error_mark = 0;
2058 hevc->decoding_pic->output_ready = 0;
2059 hevc->decoding_pic->output_mark = 0;
2060 hevc->decoding_pic->referenced = 0;
2061 hevc->decoding_pic->POC = INVALID_POC;
2062 put_mv_buf(hevc, hevc->decoding_pic);
2063 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2064 release_aux_data(hevc, hevc->decoding_pic);
2065 hevc->decoding_pic = NULL;
2066 }
2067 hevc->decode_idx = hevc->decode_idx_bak;
2068 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2069 hevc->curr_POC = hevc->curr_POC_bak;
2070 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2071 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2072 hevc->start_parser_type = hevc->start_parser_type_bak;
2073 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2074 hevc->rps_set_id = hevc->rps_set_id_bak;
2075 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2076
2077 if (hevc->pic_list_init_flag == 1)
2078 hevc->pic_list_init_flag = 0;
2079 /*if (hevc->decode_idx == 0)
2080 hevc->start_decoding_flag = 0;*/
2081
2082 hevc->slice_idx = 0;
2083 hevc->used_4k_num = -1;
2084}
2085#endif
2086
2087static void hevc_init_stru(struct hevc_state_s *hevc,
2088 struct BuffInfo_s *buf_spec_i)
2089{
2090 int i;
2091 INIT_LIST_HEAD(&hevc->log_list);
2092 hevc->work_space_buf = buf_spec_i;
2093 hevc->prefix_aux_size = 0;
2094 hevc->suffix_aux_size = 0;
2095 hevc->aux_addr = NULL;
2096 hevc->rpm_addr = NULL;
2097 hevc->lmem_addr = NULL;
2098
2099 hevc->curr_POC = INVALID_POC;
2100
2101 hevc->pic_list_init_flag = 0;
2102 hevc->use_cma_flag = 0;
2103 hevc->decode_idx = 0;
2104 hevc->slice_idx = 0;
2105 hevc->new_pic = 0;
2106 hevc->new_tile = 0;
2107 hevc->iPrevPOC = 0;
2108 hevc->list_no = 0;
2109 /* int m_uiMaxCUWidth = 1<<7; */
2110 /* int m_uiMaxCUHeight = 1<<7; */
2111 hevc->m_pocRandomAccess = MAX_INT;
2112 hevc->tile_enabled = 0;
2113 hevc->tile_x = 0;
2114 hevc->tile_y = 0;
2115 hevc->iPrevTid0POC = 0;
2116 hevc->slice_addr = 0;
2117 hevc->slice_segment_addr = 0;
2118 hevc->skip_flag = 0;
2119 hevc->misc_flag0 = 0;
2120
2121 hevc->cur_pic = NULL;
2122 hevc->col_pic = NULL;
2123 hevc->wait_buf = 0;
2124 hevc->error_flag = 0;
2125 hevc->head_error_flag = 0;
2126 hevc->error_skip_nal_count = 0;
2127 hevc->have_vps = 0;
2128 hevc->have_sps = 0;
2129 hevc->have_pps = 0;
2130 hevc->have_valid_start_slice = 0;
2131
2132 hevc->pts_mode = PTS_NORMAL;
2133 hevc->last_pts = 0;
2134 hevc->last_lookup_pts = 0;
2135 hevc->last_pts_us64 = 0;
2136 hevc->last_lookup_pts_us64 = 0;
2137 hevc->pts_mode_switching_count = 0;
2138 hevc->pts_mode_recovery_count = 0;
2139
2140 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2141 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2142 if (hevc->PB_skip_mode == 0)
2143 hevc->ignore_bufmgr_error = 0x1;
2144 else
2145 hevc->ignore_bufmgr_error = 0x0;
2146
2147 for (i = 0; i < MAX_REF_PIC_NUM; i++)
2148 hevc->m_PIC[i] = NULL;
2149 hevc->pic_num = 0;
2150 hevc->lcu_x_num_pre = 0;
2151 hevc->lcu_y_num_pre = 0;
2152 hevc->first_pic_after_recover = 0;
2153
2154 hevc->pre_top_pic = NULL;
2155 hevc->pre_bot_pic = NULL;
2156
2157 hevc->sei_present_flag = 0;
2158 hevc->valve_count = 0;
2159 hevc->first_pic_flag = 0;
2160#ifdef MULTI_INSTANCE_SUPPORT
2161 hevc->decoded_poc = INVALID_POC;
2162 hevc->start_process_time = 0;
2163 hevc->last_lcu_idx = 0;
2164 hevc->decode_timeout_count = 0;
2165 hevc->timeout_num = 0;
2166 hevc->eos = 0;
2167 hevc->pic_decoded_lcu_idx = -1;
2168 hevc->over_decode = 0;
2169 hevc->used_4k_num = -1;
2170 hevc->start_decoding_flag = 0;
2171 hevc->rps_set_id = 0;
2172 backup_decode_state(hevc);
2173#endif
2174#ifdef DETREFILL_ENABLE
2175 hevc->detbuf_adr = 0;
2176 hevc->detbuf_adr_virt = NULL;
2177#endif
2178}
2179
2180static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2181static int H265_alloc_mmu(struct hevc_state_s *hevc,
2182 struct PIC_s *new_pic, unsigned short bit_depth,
2183 unsigned int *mmu_index_adr);
2184
2185#ifdef DETREFILL_ENABLE
2186#define DETREFILL_BUF_SIZE (4 * 0x4000)
2187#define HEVC_SAO_DBG_MODE0 0x361e
2188#define HEVC_SAO_DBG_MODE1 0x361f
2189#define HEVC_SAO_CTRL10 0x362e
2190#define HEVC_SAO_CTRL11 0x362f
2191static int init_detrefill_buf(struct hevc_state_s *hevc)
2192{
2193 if (hevc->detbuf_adr_virt)
2194 return 0;
2195
2196 hevc->detbuf_adr_virt =
2197 (void *)dma_alloc_coherent(amports_get_dma_device(),
2198 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2199 GFP_KERNEL);
2200
2201 if (hevc->detbuf_adr_virt == NULL) {
2202 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2203 return -1;
2204 }
2205 return 0;
2206}
2207
2208static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2209{
2210 if (hevc->detbuf_adr_virt) {
2211 dma_free_coherent(amports_get_dma_device(),
2212 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2213 hevc->detbuf_adr);
2214
2215 hevc->detbuf_adr_virt = NULL;
2216 hevc->detbuf_adr = 0;
2217 }
2218}
2219
2220/*
2221 * convert uncompressed frame buffer data from/to ddr
2222 */
2223static void convUnc8x4blk(uint16_t* blk8x4Luma,
2224 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2225{
2226 if (direction == 0) {
2227 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2228 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2229 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2230 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2231 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2232 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2233 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2234 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2235 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2236 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2237 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2238 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2239 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2240 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2241 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2242 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2243 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2244 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2245
2246 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2247 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2248 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2249 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2250 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2251 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2252 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2253 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2254 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2255 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2256 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2257 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2258 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2259 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2260 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2261 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2262 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2263 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2264
2265 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2266 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2267 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2268 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2269 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2270 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2271 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2272 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2273 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2274 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2275 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2276 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2277 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2278 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2279 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2280 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2281 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2282 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2283
2284 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2285 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2286 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2287 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2288 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2289 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2290 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2291 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2292 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2293 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2294 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2295 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2296 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2297 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2298 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2299 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2300 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2301 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2302 } else {
2303 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2304 blk8x4Luma[3 + 0 * 8];
2305 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2306 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2307 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2308 (blk8x4Luma[3 + 3 * 8] >> 2);
2309 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2310 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2311 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2312 (blk8x4Luma[7 + 2 * 8] >>4);
2313 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2314 blk8x4Cb[0 + 0 * 4];
2315 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2316 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2317 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2318
2319 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2320 blk8x4Luma[0 + 0 * 8];
2321 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2322 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2323 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2324 (blk8x4Luma[0 + 1 * 8] >> 2);
2325 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2326 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2327 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2328 (blk8x4Luma[0 + 2 * 8] >>4);
2329 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2330 blk8x4Luma[2 + 2 * 8];
2331 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2332 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2333 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2334
2335 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2336 blk8x4Luma[4 + 0 * 8];
2337 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2338 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2339 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2340 (blk8x4Luma[4 + 1 * 8] >> 2);
2341 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2342 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2343 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2344 (blk8x4Luma[4 + 2 * 8] >>4);
2345 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2346 blk8x4Luma[6 + 2 * 8];
2347 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2348 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2349 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2350
2351 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2352 blk8x4Cb[1 + 0 * 4];
2353 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2354 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2355 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2356 (blk8x4Cr[2 + 0 * 4] >> 2);
2357 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2358 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2359 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2360 (blk8x4Cb[1 + 1 * 4] >>4);
2361 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2362 blk8x4Cb[2 + 1 * 4];
2363 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2364 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2365 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2366 }
2367}
2368
2369static void corrRefillWithAmrisc (
2370 struct hevc_state_s *hevc,
2371 uint32_t cmHeaderBaseAddr,
2372 uint32_t picWidth,
2373 uint32_t ctuPosition)
2374{
2375 int32_t i;
2376 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2377 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2378 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2379
2380 uint16_t cmBodyBuf[32 * 18];
2381
2382 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2383 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2384 uint32_t stride64x64 = pic_width_x64 * 128;
2385 uint32_t addr_offset64x64_abv = stride64x64 *
2386 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2387 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2388 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2389 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2390 unsigned int tmpData32;
2391
2392 uint16_t blkBuf0Y[32];
2393 uint16_t blkBuf0Cb[8];
2394 uint16_t blkBuf0Cr[8];
2395 uint16_t blkBuf1Y[32];
2396 uint16_t blkBuf1Cb[8];
2397 uint16_t blkBuf1Cr[8];
2398 int32_t blkBufCnt = 0;
2399
2400 int32_t blkIdx;
2401
2402 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2403 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2404 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2405 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2406
2407 for (i = 0; i < 32 * 18; i++)
2408 cmBodyBuf[i] = 0;
2409
2410 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2411 "%s, %d\n", __func__, __LINE__);
2412 do {
2413 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2414 } while (tmpData32);
2415 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2416 "%s, %d\n", __func__, __LINE__);
2417
2418 hevc_print(hevc, H265_DEBUG_DETAIL,
2419 "cmBodyBuf from detbuf:\n");
2420 for (i = 0; i < 32 * 18; i++) {
2421 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2422 if (get_dbg_flag(hevc) &
2423 H265_DEBUG_DETAIL) {
2424 if ((i & 0xf) == 0)
2425 hevc_print_cont(hevc, 0, "\n");
2426 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2427 }
2428 }
2429 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2430
2431 for (i = 0; i < 32; i++)
2432 blkBuf0Y[i] = 0;
2433 for (i = 0; i < 8; i++)
2434 blkBuf0Cb[i] = 0;
2435 for (i = 0; i < 8; i++)
2436 blkBuf0Cr[i] = 0;
2437 for (i = 0; i < 32; i++)
2438 blkBuf1Y[i] = 0;
2439 for (i = 0; i < 8; i++)
2440 blkBuf1Cb[i] = 0;
2441 for (i = 0; i < 8; i++)
2442 blkBuf1Cr[i] = 0;
2443
2444 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2445 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2446 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2447 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2448 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2449 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2450 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2451
2452 if (!aboveCtuAvailable && inAboveCtu)
2453 continue;
2454
2455 /* detRefillBuf --> 8x4block*/
2456 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2457
2458 if (restoreEnable) {
2459 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2460 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2461 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2462 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2463 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2464 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2465 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2466 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2467 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2468 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2469 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2470 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2471 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2472 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2473 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2474 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2475 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2476 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2477 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2478 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2479 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2480 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2481 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2482 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2483 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2484 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2485 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2486 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2487 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2488 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2489 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2490 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2491
2492 /*Store data back to DDR*/
2493 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2494 }
2495
2496 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2497 }
2498
2499 hevc_print(hevc, H265_DEBUG_DETAIL,
2500 "cmBodyBuf to detbuf:\n");
2501 for (i = 0; i < 32 * 18; i++) {
2502 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2503 if (get_dbg_flag(hevc) &
2504 H265_DEBUG_DETAIL) {
2505 if ((i & 0xf) == 0)
2506 hevc_print_cont(hevc, 0, "\n");
2507 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2508 }
2509 }
2510 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2511
2512 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2513 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2514 "%s, %d\n", __func__, __LINE__);
2515 do {
2516 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2517 } while (tmpData32);
2518 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2519 "%s, %d\n", __func__, __LINE__);
2520}
2521
2522static void delrefill(struct hevc_state_s *hevc)
2523{
2524 /*
2525 * corrRefill
2526 */
2527 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2528 [31:30]error number
2529 [29:20]error2([9:7]tilex[6:0]ctuy)
2530 [19:10]error1 [9:0]error0*/
2531 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2532 uint32_t errorIdx;
2533 uint32_t errorNum = (detResult>>30);
2534
2535 if (detResult) {
2536 hevc_print(hevc, H265_DEBUG_BUFMGR,
2537 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2538 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2539 uint32_t errorPos = errorIdx * 10;
2540 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2541 uint32_t tilex = (errorResult >> 7) - 1;
2542 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2543 + hevc->m_tile[0][tilex].width - 1;
2544 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2545 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2546 hevc_print(hevc, H265_DEBUG_BUFMGR,
2547 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2548 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2549 corrRefillWithAmrisc(
2550 hevc,
2551 (uint32_t)hevc->cur_pic->header_adr,
2552 hevc->pic_w,
2553 ctuPosition);
2554 }
2555
2556 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2557 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2558 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2559 }
2560}
2561#endif
2562
2563static void get_rpm_param(union param_u *params)
2564{
2565 int i;
2566 unsigned int data32;
2567
2568 for (i = 0; i < 128; i++) {
2569 do {
2570 data32 = READ_VREG(RPM_CMD_REG);
2571 /* hevc_print(hevc, 0, "%x\n", data32); */
2572 } while ((data32 & 0x10000) == 0);
2573 params->l.data[i] = data32 & 0xffff;
2574 /* hevc_print(hevc, 0, "%x\n", data32); */
2575 WRITE_VREG(RPM_CMD_REG, 0);
2576 }
2577}
2578
2579static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2580{
2581 int i;
2582 struct PIC_s *pic;
2583 struct PIC_s *ret_pic = NULL;
2584 if (POC == INVALID_POC)
2585 return NULL;
2586 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2587 pic = hevc->m_PIC[i];
2588 if (pic == NULL || pic->index == -1 ||
2589 pic->BUF_index == -1)
2590 continue;
2591 if (pic->POC == POC) {
2592 if (ret_pic == NULL)
2593 ret_pic = pic;
2594 else {
2595 if (pic->decode_idx > ret_pic->decode_idx)
2596 ret_pic = pic;
2597 }
2598 }
2599 }
2600 return ret_pic;
2601}
2602
2603static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2604{
2605 int i;
2606 struct PIC_s *pic;
2607 struct PIC_s *ret_pic = NULL;
2608
2609 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2610 pic = hevc->m_PIC[i];
2611 if (pic == NULL || pic->index == -1 ||
2612 pic->BUF_index == -1)
2613 continue;
2614 if ((pic->POC == POC) && (pic->referenced)) {
2615 if (ret_pic == NULL)
2616 ret_pic = pic;
2617 else {
2618 if (pic->decode_idx > ret_pic->decode_idx)
2619 ret_pic = pic;
2620 }
2621 }
2622 }
2623
2624 if (ret_pic == NULL) {
2625 if (get_dbg_flag(hevc)) {
2626 hevc_print(hevc, 0,
2627 "Wrong, POC of %d is not in referenced list\n",
2628 POC);
2629 }
2630 ret_pic = get_pic_by_POC(hevc, POC);
2631 }
2632 return ret_pic;
2633}
2634
2635static unsigned int log2i(unsigned int val)
2636{
2637 unsigned int ret = -1;
2638
2639 while (val != 0) {
2640 val >>= 1;
2641 ret++;
2642 }
2643 return ret;
2644}
2645
2646static int init_buf_spec(struct hevc_state_s *hevc);
2647static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2648{
2649 if (hevc->mmu_box)
2650 decoder_mmu_box_free(hevc->mmu_box);
2651 hevc->mmu_box = NULL;
2652
2653 if (hevc->bmmu_box)
2654 decoder_bmmu_box_free(hevc->bmmu_box);
2655 hevc->bmmu_box = NULL;
2656}
2657static int init_mmu_buffers(struct hevc_state_s *hevc)
2658{
2659 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2660 CODEC_MM_FLAGS_TVP : 0;
2661 int buf_size = 64;
2662
2663 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2664 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2665 buf_size = 24;
2666 }
2667
2668 if (get_dbg_flag(hevc)) {
2669 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2670 __func__, hevc->max_pic_w, hevc->max_pic_h);
2671 }
2672
2673 hevc->need_cache_size = buf_size * SZ_1M;
2674 hevc->sc_start_time = get_jiffies_64();
2675 if (hevc->mmu_enable
2676 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2677 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2678 hevc->index,
2679 MAX_REF_PIC_NUM,
2680 buf_size * SZ_1M,
2681 tvp_flag
2682 );
2683 if (!hevc->mmu_box) {
2684 pr_err("h265 alloc mmu box failed!!\n");
2685 return -1;
2686 }
2687 }
2688
2689 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2690 hevc->index,
2691 BMMU_MAX_BUFFERS,
2692 4 + PAGE_SHIFT,
2693 CODEC_MM_FLAGS_CMA_CLEAR |
2694 CODEC_MM_FLAGS_FOR_VDECODER |
2695 tvp_flag);
2696 if (!hevc->bmmu_box) {
2697 if (hevc->mmu_box)
2698 decoder_mmu_box_free(hevc->mmu_box);
2699 hevc->mmu_box = NULL;
2700 pr_err("h265 alloc mmu box failed!!\n");
2701 return -1;
2702 }
2703 return 0;
2704}
2705
2706struct buf_stru_s
2707{
2708 int lcu_total;
2709 int mc_buffer_size_h;
2710 int mc_buffer_size_u_v_h;
2711};
2712
2713#ifndef MV_USE_FIXED_BUF
2714static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2715{
2716 int i;
2717 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2718 if (hevc->m_mv_BUF[i].start_adr) {
2719 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2720 hevc_print(hevc, 0,
2721 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2722 i, hevc->m_mv_BUF[i].start_adr,
2723 hevc->m_mv_BUF[i].size,
2724 hevc->m_mv_BUF[i].used_flag);
2725 decoder_bmmu_box_free_idx(
2726 hevc->bmmu_box,
2727 MV_BUFFER_IDX(i));
2728 hevc->m_mv_BUF[i].start_adr = 0;
2729 hevc->m_mv_BUF[i].size = 0;
2730 hevc->m_mv_BUF[i].used_flag = 0;
2731 }
2732 }
2733 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2734 if (hevc->m_PIC[i] != NULL)
2735 hevc->m_PIC[i]->mv_buf_index = -1;
2736 }
2737
2738}
2739
2740static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2741{
2742 int ret = 0;
2743 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2744 if (decoder_bmmu_box_alloc_buf_phy
2745 (hevc->bmmu_box,
2746 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2747 DRIVER_NAME,
2748 &hevc->m_mv_BUF[i].start_adr) < 0) {
2749 hevc->m_mv_BUF[i].start_adr = 0;
2750 ret = -1;
2751 } else {
2752 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2753 hevc->m_mv_BUF[i].used_flag = 0;
2754 ret = 0;
2755 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2756 hevc_print(hevc, 0,
2757 "MV Buffer %d: start_adr %p size %x\n",
2758 i,
2759 (void *)hevc->m_mv_BUF[i].start_adr,
2760 hevc->m_mv_BUF[i].size);
2761 }
2762 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2763 void *mem_start_virt;
2764 mem_start_virt =
2765 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2766 if (mem_start_virt) {
2767 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2768 codec_mm_dma_flush(mem_start_virt,
2769 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2770 } else {
2771 mem_start_virt = codec_mm_vmap(
2772 hevc->m_mv_BUF[i].start_adr,
2773 hevc->m_mv_BUF[i].size);
2774 if (mem_start_virt) {
2775 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2776 codec_mm_dma_flush(mem_start_virt,
2777 hevc->m_mv_BUF[i].size,
2778 DMA_TO_DEVICE);
2779 codec_mm_unmap_phyaddr(mem_start_virt);
2780 } else {
2781 /*not virt for tvp playing,
2782 may need clear on ucode.*/
2783 pr_err("ref %s mem_start_virt failed\n", __func__);
2784 }
2785 }
2786 }
2787 }
2788 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2789 return ret;
2790}
2791#endif
2792
2793static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2794{
2795#ifdef MV_USE_FIXED_BUF
2796 if (pic && pic->index >= 0) {
2797 if (IS_8K_SIZE(pic->width, pic->height)) {
2798 pic->mpred_mv_wr_start_addr =
2799 hevc->work_space_buf->mpred_mv.buf_start
2800 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2801 } else {
2802 pic->mpred_mv_wr_start_addr =
2803 hevc->work_space_buf->mpred_mv.buf_start
2804 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2805 }
2806 }
2807 return 0;
2808#else
2809 int i;
2810 int ret = -1;
2811 int new_size;
2812 if (IS_8K_SIZE(pic->width, pic->height))
2813 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2814 else if (IS_4K_SIZE(pic->width, pic->height))
2815 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2816 else
2817 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2818 if (new_size != hevc->mv_buf_size) {
2819 dealloc_mv_bufs(hevc);
2820 hevc->mv_buf_size = new_size;
2821 }
2822 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2823 if (hevc->m_mv_BUF[i].start_adr &&
2824 hevc->m_mv_BUF[i].used_flag == 0) {
2825 hevc->m_mv_BUF[i].used_flag = 1;
2826 ret = i;
2827 break;
2828 }
2829 }
2830 if (ret < 0) {
2831 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2832 if (hevc->m_mv_BUF[i].start_adr == 0) {
2833 if (alloc_mv_buf(hevc, i) >= 0) {
2834 hevc->m_mv_BUF[i].used_flag = 1;
2835 ret = i;
2836 }
2837 break;
2838 }
2839 }
2840 }
2841
2842 if (ret >= 0) {
2843 pic->mv_buf_index = ret;
2844 pic->mpred_mv_wr_start_addr =
2845 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2846 (~0xffff);
2847 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2848 "%s => %d (0x%x) size 0x%x\n",
2849 __func__, ret,
2850 pic->mpred_mv_wr_start_addr,
2851 hevc->m_mv_BUF[ret].size);
2852
2853 } else {
2854 hevc_print(hevc, 0,
2855 "%s: Error, mv buf is not enough\n",
2856 __func__);
2857 }
2858 return ret;
2859
2860#endif
2861}
2862
2863static void put_mv_buf(struct hevc_state_s *hevc,
2864 struct PIC_s *pic)
2865{
2866#ifndef MV_USE_FIXED_BUF
2867 int i = pic->mv_buf_index;
2868 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2869 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2870 "%s: index %d beyond range\n",
2871 __func__, i);
2872 return;
2873 }
2874 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2875 "%s(%d): used_flag(%d)\n",
2876 __func__, i,
2877 hevc->m_mv_BUF[i].used_flag);
2878
2879 if (hevc->m_mv_BUF[i].start_adr &&
2880 hevc->m_mv_BUF[i].used_flag)
2881 hevc->m_mv_BUF[i].used_flag = 0;
2882 pic->mv_buf_index = -1;
2883#endif
2884}
2885
2886static int cal_current_buf_size(struct hevc_state_s *hevc,
2887 struct buf_stru_s *buf_stru)
2888{
2889
2890 int buf_size;
2891 int pic_width = hevc->pic_w;
2892 int pic_height = hevc->pic_h;
2893 int lcu_size = hevc->lcu_size;
2894 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2895 1 : pic_width / lcu_size;
2896 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2897 1 : pic_height / lcu_size;
2898 /*SUPPORT_10BIT*/
2899 int losless_comp_header_size = compute_losless_comp_header_size
2900 (pic_width, pic_height);
2901 /*always alloc buf for 10bit*/
2902 int losless_comp_body_size = compute_losless_comp_body_size
2903 (hevc, pic_width, pic_height, 0);
2904 int mc_buffer_size = losless_comp_header_size
2905 + losless_comp_body_size;
2906 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2907 int mc_buffer_size_u_v_h = 0;
2908
2909 int dw_mode = get_double_write_mode(hevc);
2910
2911 if (hevc->mmu_enable) {
2912 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
2913 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
2914 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
2915 << 16;
2916 else
2917 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
2918 << 16;
2919 } else
2920 buf_size = 0;
2921
2922 if (dw_mode) {
2923 int pic_width_dw = pic_width /
2924 get_double_write_ratio(hevc, dw_mode);
2925 int pic_height_dw = pic_height /
2926 get_double_write_ratio(hevc, dw_mode);
2927
2928 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
2929 pic_width_dw / lcu_size + 1 :
2930 pic_width_dw / lcu_size;
2931 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
2932 pic_height_dw / lcu_size + 1 :
2933 pic_height_dw / lcu_size;
2934 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
2935
2936 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
2937 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
2938 /*64k alignment*/
2939 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
2940 }
2941
2942 if ((!hevc->mmu_enable) &&
2943 ((dw_mode & 0x10) == 0)) {
2944 /* use compress mode without mmu,
2945 need buf for compress decoding*/
2946 buf_size += (mc_buffer_size_h << 16);
2947 }
2948
2949 /*in case start adr is not 64k alignment*/
2950 if (buf_size > 0)
2951 buf_size += 0x10000;
2952
2953 if (buf_stru) {
2954 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
2955 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
2956 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
2957 }
2958
2959 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
2960 pic_width, pic_height, losless_comp_header_size,
2961 losless_comp_body_size, mc_buffer_size_h,
2962 mc_buffer_size_u_v_h, buf_size);
2963
2964 return buf_size;
2965}
2966
2967static int alloc_buf(struct hevc_state_s *hevc)
2968{
2969 int i;
2970 int ret = -1;
2971 int buf_size = cal_current_buf_size(hevc, NULL);
2972 struct vdec_fb *fb = NULL;
2973
2974 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
2975 return ret;
2976
2977 for (i = 0; i < BUF_POOL_SIZE; i++) {
2978 if (hevc->m_BUF[i].start_adr == 0)
2979 break;
2980 }
2981 if (i < BUF_POOL_SIZE) {
2982 if (buf_size > 0) {
2983 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2984 /*alloc compress header first*/
2985
2986 if (hevc->is_used_v4l) {
2987 ret = v4l_get_fb(hevc->v4l2_ctx, &fb);
2988 if (ret) {
2989 hevc_print(hevc, PRINT_FLAG_ERROR,
2990 "[%d] get fb fail.\n",
2991 ((struct aml_vcodec_ctx *)
2992 (hevc->v4l2_ctx))->id);
2993 return ret;
2994 }
2995
2996 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
2997 hevc->m_BUF[i].start_adr =
2998 virt_to_phys(fb->base_y.va);
2999 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3000 "[%d] %s(), v4l ref buf addr: 0x%x\n",
3001 ((struct aml_vcodec_ctx *)
3002 (hevc->v4l2_ctx))->id, __func__, fb);
3003 } else {
3004 ret = decoder_bmmu_box_alloc_buf_phy
3005 (hevc->bmmu_box,
3006 VF_BUFFER_IDX(i), buf_size,
3007 DRIVER_NAME,
3008 &hevc->m_BUF[i].start_adr);
3009 if (ret < 0) {
3010 hevc->m_BUF[i].start_adr = 0;
3011 if (i <= 8) {
3012 hevc->fatal_error |=
3013 DECODER_FATAL_ERROR_NO_MEM;
3014 hevc_print(hevc, PRINT_FLAG_ERROR,
3015 "%s[%d], size: %d, no mem fatal err\n",
3016 __func__, i, buf_size);
3017 }
3018 }
3019 }
3020
3021 if (ret >= 0) {
3022 hevc->m_BUF[i].size = buf_size;
3023 hevc->m_BUF[i].used_flag = 0;
3024 ret = 0;
3025 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3026 hevc_print(hevc, 0,
3027 "Buffer %d: start_adr %p size %x\n",
3028 i,
3029 (void *)hevc->m_BUF[i].start_adr,
3030 hevc->m_BUF[i].size);
3031 }
3032 /*flush the buffer make sure no cache dirty*/
3033 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3034 void *mem_start_virt;
3035 mem_start_virt =
3036 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3037 if (mem_start_virt) {
3038 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3039 codec_mm_dma_flush(mem_start_virt,
3040 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3041 } else {
3042 mem_start_virt = codec_mm_vmap(
3043 hevc->m_BUF[i].start_adr,
3044 hevc->m_BUF[i].size);
3045 if (mem_start_virt) {
3046 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3047 codec_mm_dma_flush(mem_start_virt,
3048 hevc->m_BUF[i].size,
3049 DMA_TO_DEVICE);
3050 codec_mm_unmap_phyaddr(mem_start_virt);
3051 } else {
3052 /*not virt for tvp playing,
3053 may need clear on ucode.*/
3054 pr_err("ref %s mem_start_virt failed\n", __func__);
3055 }
3056 }
3057 }
3058 }
3059 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3060 } else
3061 ret = 0;
3062 }
3063 if (ret >= 0) {
3064 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3065 hevc_print(hevc, 0,
3066 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3067 i, hevc->pic_w, hevc->pic_h,
3068 buf_size,
3069 hevc->m_BUF[i].start_adr);
3070 }
3071 } else {
3072 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3073 hevc_print(hevc, 0,
3074 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3075 i, hevc->pic_w, hevc->pic_h,
3076 buf_size);
3077 }
3078 }
3079 return ret;
3080}
3081
3082static void set_buf_unused(struct hevc_state_s *hevc, int i)
3083{
3084 if (i >= 0 && i < BUF_POOL_SIZE)
3085 hevc->m_BUF[i].used_flag = 0;
3086}
3087
3088static void dealloc_unused_buf(struct hevc_state_s *hevc)
3089{
3090 int i;
3091 for (i = 0; i < BUF_POOL_SIZE; i++) {
3092 if (hevc->m_BUF[i].start_adr &&
3093 hevc->m_BUF[i].used_flag == 0) {
3094 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3095 hevc_print(hevc, 0,
3096 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3097 i, hevc->m_BUF[i].start_adr,
3098 hevc->m_BUF[i].size);
3099 }
3100 if (!hevc->is_used_v4l)
3101 decoder_bmmu_box_free_idx(
3102 hevc->bmmu_box,
3103 VF_BUFFER_IDX(i));
3104 hevc->m_BUF[i].start_adr = 0;
3105 hevc->m_BUF[i].size = 0;
3106 }
3107 }
3108
3109}
3110
3111static void dealloc_pic_buf(struct hevc_state_s *hevc,
3112 struct PIC_s *pic)
3113{
3114 int i = pic->BUF_index;
3115 pic->BUF_index = -1;
3116 if (i >= 0 &&
3117 i < BUF_POOL_SIZE &&
3118 hevc->m_BUF[i].start_adr) {
3119 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3120 hevc_print(hevc, 0,
3121 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3122 i, hevc->m_BUF[i].start_adr,
3123 hevc->m_BUF[i].size);
3124 }
3125
3126 if (!hevc->is_used_v4l)
3127 decoder_bmmu_box_free_idx(
3128 hevc->bmmu_box,
3129 VF_BUFFER_IDX(i));
3130 hevc->m_BUF[i].used_flag = 0;
3131 hevc->m_BUF[i].start_adr = 0;
3132 hevc->m_BUF[i].size = 0;
3133 }
3134}
3135
3136static int get_work_pic_num(struct hevc_state_s *hevc)
3137{
3138 int used_buf_num = 0;
3139 int sps_pic_buf_diff = 0;
3140
3141 if (get_dynamic_buf_num_margin(hevc) > 0) {
3142 if ((!hevc->sps_num_reorder_pics_0) &&
3143 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3144 /* the range of sps_num_reorder_pics_0 is in
3145 [0, sps_max_dec_pic_buffering_minus1_0] */
3146 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3147 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3148 } else
3149 used_buf_num = hevc->sps_num_reorder_pics_0
3150 + get_dynamic_buf_num_margin(hevc);
3151
3152 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3153 - hevc->sps_num_reorder_pics_0;
3154
3155#ifdef MULTI_INSTANCE_SUPPORT
3156 /*
3157 need one more for multi instance, as
3158 apply_ref_pic_set() has no chanch to run to
3159 to clear referenced flag in some case
3160 */
3161 if (hevc->m_ins_flag)
3162 used_buf_num++;
3163#endif
3164 } else
3165 used_buf_num = max_buf_num;
3166
3167 if (hevc->save_buffer_mode)
3168 hevc_print(hevc, 0,
3169 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3170 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3171
3172 if (sps_pic_buf_diff >= 4)
3173 {
3174 used_buf_num += 1;
3175 }
3176
3177 if (used_buf_num > MAX_BUF_NUM)
3178 used_buf_num = MAX_BUF_NUM;
3179 return used_buf_num;
3180}
3181
3182static int get_alloc_pic_count(struct hevc_state_s *hevc)
3183{
3184 int alloc_pic_count = 0;
3185 int i;
3186 struct PIC_s *pic;
3187 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3188 pic = hevc->m_PIC[i];
3189 if (pic && pic->index >= 0)
3190 alloc_pic_count++;
3191 }
3192 return alloc_pic_count;
3193}
3194
3195static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3196{
3197 int ret = -1;
3198 int i;
3199 /*int lcu_size_log2 = hevc->lcu_size_log2;
3200 int MV_MEM_UNIT=lcu_size_log2==
3201 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3202 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3203 5 ? 0x80 : 0x20;
3204 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3205 hevc->work_space_buf->mpred_mv.buf_size;*/
3206 unsigned int y_adr = 0;
3207 struct buf_stru_s buf_stru;
3208 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3209 int dw_mode = get_double_write_mode(hevc);
3210
3211 for (i = 0; i < BUF_POOL_SIZE; i++) {
3212 if (hevc->m_BUF[i].start_adr != 0 &&
3213 hevc->m_BUF[i].used_flag == 0 &&
3214 buf_size <= hevc->m_BUF[i].size) {
3215 hevc->m_BUF[i].used_flag = 1;
3216 break;
3217 }
3218 }
3219 if (i >= BUF_POOL_SIZE)
3220 return -1;
3221
3222 if (hevc->mmu_enable) {
3223 pic->header_adr = hevc->m_BUF[i].start_adr;
3224 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3225 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3226 y_adr = hevc->m_BUF[i].start_adr +
3227 MMU_COMPRESS_8K_HEADER_SIZE;
3228 else
3229 y_adr = hevc->m_BUF[i].start_adr +
3230 MMU_COMPRESS_HEADER_SIZE;
3231 } else
3232 y_adr = hevc->m_BUF[i].start_adr;
3233
3234 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3235 pic->POC = INVALID_POC;
3236 /*ensure get_pic_by_POC()
3237 not get the buffer not decoded*/
3238 pic->BUF_index = i;
3239
3240 if ((!hevc->mmu_enable) &&
3241 ((dw_mode & 0x10) == 0)
3242 ) {
3243 pic->mc_y_adr = y_adr;
3244 y_adr += (buf_stru.mc_buffer_size_h << 16);
3245 }
3246 pic->mc_canvas_y = pic->index;
3247 pic->mc_canvas_u_v = pic->index;
3248 if (dw_mode & 0x10) {
3249 pic->mc_y_adr = y_adr;
3250 pic->mc_u_v_adr = y_adr +
3251 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3252
3253 pic->mc_canvas_y = (pic->index << 1);
3254 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3255
3256 pic->dw_y_adr = pic->mc_y_adr;
3257 pic->dw_u_v_adr = pic->mc_u_v_adr;
3258 } else if (dw_mode) {
3259 pic->dw_y_adr = y_adr;
3260 pic->dw_u_v_adr = pic->dw_y_adr +
3261 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3262 }
3263
3264
3265 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3266 hevc_print(hevc, 0,
3267 "%s index %d BUF_index %d mc_y_adr %x\n",
3268 __func__, pic->index,
3269 pic->BUF_index, pic->mc_y_adr);
3270 if (hevc->mmu_enable &&
3271 dw_mode)
3272 hevc_print(hevc, 0,
3273 "mmu double write adr %ld\n",
3274 pic->cma_alloc_addr);
3275
3276
3277 }
3278 ret = 0;
3279
3280 return ret;
3281}
3282
3283static void init_pic_list(struct hevc_state_s *hevc)
3284{
3285 int i;
3286 int init_buf_num = get_work_pic_num(hevc);
3287 int dw_mode = get_double_write_mode(hevc);
3288 struct vdec_s *vdec = hw_to_vdec(hevc);
3289 /*alloc decoder buf*/
3290 for (i = 0; i < init_buf_num; i++) {
3291 if (alloc_buf(hevc) < 0) {
3292 if (i <= 8) {
3293 /*if alloced (i+1)>=9
3294 don't send errors.*/
3295 hevc->fatal_error |=
3296 DECODER_FATAL_ERROR_NO_MEM;
3297 }
3298 break;
3299 }
3300 }
3301
3302 for (i = 0; i < init_buf_num; i++) {
3303 struct PIC_s *pic =
3304 vmalloc(sizeof(struct PIC_s));
3305 if (pic == NULL) {
3306 hevc_print(hevc, 0,
3307 "%s: alloc pic %d fail!!!\n",
3308 __func__, i);
3309 break;
3310 }
3311 memset(pic, 0, sizeof(struct PIC_s));
3312 hevc->m_PIC[i] = pic;
3313 pic->index = i;
3314 pic->BUF_index = -1;
3315 pic->mv_buf_index = -1;
3316 if (vdec->parallel_dec == 1) {
3317 pic->y_canvas_index = -1;
3318 pic->uv_canvas_index = -1;
3319 }
3320 if (config_pic(hevc, pic) < 0) {
3321 if (get_dbg_flag(hevc))
3322 hevc_print(hevc, 0,
3323 "Config_pic %d fail\n", pic->index);
3324 pic->index = -1;
3325 i++;
3326 break;
3327 }
3328 pic->width = hevc->pic_w;
3329 pic->height = hevc->pic_h;
3330 pic->double_write_mode = dw_mode;
3331 if (pic->double_write_mode)
3332 set_canvas(hevc, pic);
3333 }
3334
3335 for (; i < MAX_REF_PIC_NUM; i++) {
3336 struct PIC_s *pic =
3337 vmalloc(sizeof(struct PIC_s));
3338 if (pic == NULL) {
3339 hevc_print(hevc, 0,
3340 "%s: alloc pic %d fail!!!\n",
3341 __func__, i);
3342 break;
3343 }
3344 memset(pic, 0, sizeof(struct PIC_s));
3345 hevc->m_PIC[i] = pic;
3346 pic->index = -1;
3347 pic->BUF_index = -1;
3348 if (vdec->parallel_dec == 1) {
3349 pic->y_canvas_index = -1;
3350 pic->uv_canvas_index = -1;
3351 }
3352 }
3353
3354}
3355
3356static void uninit_pic_list(struct hevc_state_s *hevc)
3357{
3358 struct vdec_s *vdec = hw_to_vdec(hevc);
3359 int i;
3360#ifndef MV_USE_FIXED_BUF
3361 dealloc_mv_bufs(hevc);
3362#endif
3363 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3364 struct PIC_s *pic = hevc->m_PIC[i];
3365
3366 if (pic) {
3367 if (vdec->parallel_dec == 1) {
3368 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3369 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3370 }
3371 release_aux_data(hevc, pic);
3372 vfree(pic);
3373 hevc->m_PIC[i] = NULL;
3374 }
3375 }
3376}
3377
3378#ifdef LOSLESS_COMPRESS_MODE
3379static void init_decode_head_hw(struct hevc_state_s *hevc)
3380{
3381
3382 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3383 unsigned int data32;
3384
3385 int losless_comp_header_size =
3386 compute_losless_comp_header_size(hevc->pic_w,
3387 hevc->pic_h);
3388 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3389 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3390
3391 hevc->losless_comp_body_size = losless_comp_body_size;
3392
3393
3394 if (hevc->mmu_enable) {
3395 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3396 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3397 } else {
3398 if (hevc->mem_saving_mode == 1)
3399 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3400 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3401 else
3402 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3403 ((workaround_enable & 2) ? 1 : 0));
3404 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3405 /*
3406 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3407 * //8-bit mode
3408 */
3409 }
3410 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3411 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3412 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3413
3414 if (hevc->mmu_enable) {
3415 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3416 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3417 buf_spec->mmu_vbh.buf_start +
3418 buf_spec->mmu_vbh.buf_size/2);
3419 data32 = READ_VREG(HEVC_SAO_CTRL9);
3420 data32 |= 0x1;
3421 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3422
3423 /* use HEVC_CM_HEADER_START_ADDR */
3424 data32 = READ_VREG(HEVC_SAO_CTRL5);
3425 data32 |= (1<<10);
3426 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3427 }
3428
3429 if (!hevc->m_ins_flag)
3430 hevc_print(hevc, 0,
3431 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3432 __func__, hevc->pic_w, hevc->pic_h,
3433 losless_comp_body_size, losless_comp_header_size);
3434
3435}
3436#endif
3437#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3438
3439static void init_pic_list_hw(struct hevc_state_s *hevc)
3440{
3441 int i;
3442 int cur_pic_num = MAX_REF_PIC_NUM;
3443 int dw_mode = get_double_write_mode(hevc);
3444 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3445 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3446 (0x1 << 1) | (0x1 << 2));
3447 else
3448 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3449
3450 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3451 if (hevc->m_PIC[i] == NULL ||
3452 hevc->m_PIC[i]->index == -1) {
3453 cur_pic_num = i;
3454 break;
3455 }
3456 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3457 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3458 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3459 hevc->m_PIC[i]->header_adr>>5);
3460 else
3461 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3462 hevc->m_PIC[i]->mc_y_adr >> 5);
3463 } else
3464 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3465 hevc->m_PIC[i]->mc_y_adr |
3466 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3467 if (dw_mode & 0x10) {
3468 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3469 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3470 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3471 }
3472 else
3473 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3474 hevc->m_PIC[i]->mc_u_v_adr |
3475 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3476 | 0x1);
3477 }
3478 }
3479 if (cur_pic_num == 0)
3480 return;
3481 for (; i < MAX_REF_PIC_NUM; i++) {
3482 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3483 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3484 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3485 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3486 else
3487 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3488 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3489#ifndef LOSLESS_COMPRESS_MODE
3490 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3491 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3492#endif
3493 } else {
3494 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3495 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3496 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3497 | 0x1);
3498#ifndef LOSLESS_COMPRESS_MODE
3499 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3500 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3501 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3502 | 0x1);
3503#endif
3504 }
3505 }
3506
3507 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3508
3509 /* Zero out canvas registers in IPP -- avoid simulation X */
3510 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3511 (0 << 8) | (0 << 1) | 1);
3512 for (i = 0; i < 32; i++)
3513 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3514
3515#ifdef LOSLESS_COMPRESS_MODE
3516 if ((dw_mode & 0x10) == 0)
3517 init_decode_head_hw(hevc);
3518#endif
3519
3520}
3521
3522
3523static void dump_pic_list(struct hevc_state_s *hevc)
3524{
3525 int i;
3526 struct PIC_s *pic;
3527
3528 hevc_print(hevc, 0,
3529 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3530 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3531 pic = hevc->m_PIC[i];
3532 if (pic == NULL || pic->index == -1)
3533 continue;
3534 hevc_print_cont(hevc, 0,
3535 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3536 pic->index, pic->BUF_index,
3537#ifndef MV_USE_FIXED_BUF
3538 pic->mv_buf_index,
3539#else
3540 -1,
3541#endif
3542 pic->decode_idx, pic->POC, pic->referenced);
3543 hevc_print_cont(hevc, 0,
3544 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3545 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3546 pic->width, pic->height);
3547 hevc_print_cont(hevc, 0,
3548 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3549 pic->output_ready, pic->mpred_mv_wr_start_addr,
3550 pic->vf_ref);
3551 }
3552}
3553
3554static void clear_referenced_flag(struct hevc_state_s *hevc)
3555{
3556 int i;
3557 struct PIC_s *pic;
3558 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3559 pic = hevc->m_PIC[i];
3560 if (pic == NULL || pic->index == -1)
3561 continue;
3562 if (pic->referenced) {
3563 pic->referenced = 0;
3564 put_mv_buf(hevc, pic);
3565 }
3566 }
3567}
3568
3569static void clear_poc_flag(struct hevc_state_s *hevc)
3570{
3571 int i;
3572 struct PIC_s *pic;
3573 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3574 pic = hevc->m_PIC[i];
3575 if (pic == NULL || pic->index == -1)
3576 continue;
3577 pic->POC = INVALID_POC;
3578 }
3579}
3580
3581static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3582 unsigned char flush_flag)
3583{
3584 int num_pic_not_yet_display = 0;
3585 int i;
3586 struct PIC_s *pic;
3587 struct PIC_s *pic_display = NULL;
3588 struct vdec_s *vdec = hw_to_vdec(hevc);
3589
3590 if (hevc->i_only & 0x4) {
3591 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3592 pic = hevc->m_PIC[i];
3593 if (pic == NULL ||
3594 (pic->index == -1) ||
3595 (pic->BUF_index == -1) ||
3596 (pic->POC == INVALID_POC))
3597 continue;
3598 if (pic->output_mark) {
3599 if (pic_display) {
3600 if (pic->decode_idx <
3601 pic_display->decode_idx)
3602 pic_display = pic;
3603
3604 } else
3605 pic_display = pic;
3606
3607 }
3608 }
3609 if (pic_display) {
3610 pic_display->output_mark = 0;
3611 pic_display->recon_mark = 0;
3612 pic_display->output_ready = 1;
3613 pic_display->referenced = 0;
3614 put_mv_buf(hevc, pic_display);
3615 }
3616 } else {
3617 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3618 pic = hevc->m_PIC[i];
3619 if (pic == NULL ||
3620 (pic->index == -1) ||
3621 (pic->BUF_index == -1) ||
3622 (pic->POC == INVALID_POC))
3623 continue;
3624 if (pic->output_mark)
3625 num_pic_not_yet_display++;
3626 if (pic->slice_type == 2 &&
3627 hevc->vf_pre_count == 0 &&
3628 fast_output_enable & 0x1) {
3629 /*fast output for first I picture*/
3630 pic->num_reorder_pic = 0;
3631 if (vdec->master || vdec->slave)
3632 pic_display = pic;
3633 hevc_print(hevc, 0, "VH265: output first frame\n");
3634 }
3635 }
3636
3637 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3638 pic = hevc->m_PIC[i];
3639 if (pic == NULL ||
3640 (pic->index == -1) ||
3641 (pic->BUF_index == -1) ||
3642 (pic->POC == INVALID_POC))
3643 continue;
3644 if (pic->output_mark) {
3645 if (pic_display) {
3646 if (pic->POC < pic_display->POC)
3647 pic_display = pic;
3648 else if ((pic->POC == pic_display->POC)
3649 && (pic->decode_idx <
3650 pic_display->
3651 decode_idx))
3652 pic_display
3653 = pic;
3654 } else
3655 pic_display = pic;
3656 }
3657 }
3658 if (pic_display) {
3659 if ((num_pic_not_yet_display >
3660 pic_display->num_reorder_pic)
3661 || flush_flag) {
3662 pic_display->output_mark = 0;
3663 pic_display->recon_mark = 0;
3664 pic_display->output_ready = 1;
3665 } else if (num_pic_not_yet_display >=
3666 (MAX_REF_PIC_NUM - 1)) {
3667 pic_display->output_mark = 0;
3668 pic_display->recon_mark = 0;
3669 pic_display->output_ready = 1;
3670 hevc_print(hevc, 0,
3671 "Warning, num_reorder_pic %d is byeond buf num\n",
3672 pic_display->num_reorder_pic);
3673 } else
3674 pic_display = NULL;
3675 }
3676 }
3677
3678 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3679 pic_display = NULL;
3680 hevc->first_pic_flag = 0;
3681 }
3682 return pic_display;
3683}
3684
3685static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3686{
3687 int i;
3688 struct PIC_s *pic;
3689
3690 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3691 hevc_print(hevc, 0,
3692 "config_mc_buffer entered .....\n");
3693 if (cur_pic->slice_type != 2) { /* P and B pic */
3694 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3695 (0 << 8) | (0 << 1) | 1);
3696 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3697 pic =
3698 get_ref_pic_by_POC(hevc,
3699 cur_pic->
3700 m_aiRefPOCList0[cur_pic->
3701 slice_idx][i]);
3702 if (pic) {
3703 if ((pic->width != hevc->pic_w) ||
3704 (pic->height != hevc->pic_h)) {
3705 hevc_print(hevc, 0,
3706 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3707 __func__, pic->POC,
3708 pic->width, pic->height);
3709 cur_pic->error_mark = 1;
3710 }
3711 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3712 cur_pic->error_mark = 1;
3713 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3714 (pic->mc_canvas_u_v << 16)
3715 | (pic->mc_canvas_u_v
3716 << 8) |
3717 pic->mc_canvas_y);
3718 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3719 hevc_print_cont(hevc, 0,
3720 "refid %x mc_canvas_u_v %x",
3721 i, pic->mc_canvas_u_v);
3722 hevc_print_cont(hevc, 0,
3723 " mc_canvas_y %x\n",
3724 pic->mc_canvas_y);
3725 }
3726 } else
3727 cur_pic->error_mark = 1;
3728
3729 if (pic == NULL || pic->error_mark) {
3730 hevc_print(hevc, 0,
3731 "Error %s, %dth poc (%d) %s",
3732 __func__, i,
3733 cur_pic->m_aiRefPOCList0[cur_pic->
3734 slice_idx][i],
3735 pic ? "has error" :
3736 "not in list0");
3737 }
3738 }
3739 }
3740 if (cur_pic->slice_type == 0) { /* B pic */
3741 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3742 hevc_print(hevc, 0,
3743 "config_mc_buffer RefNum_L1\n");
3744 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3745 (16 << 8) | (0 << 1) | 1);
3746
3747 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3748 pic =
3749 get_ref_pic_by_POC(hevc,
3750 cur_pic->
3751 m_aiRefPOCList1[cur_pic->
3752 slice_idx][i]);
3753 if (pic) {
3754 if ((pic->width != hevc->pic_w) ||
3755 (pic->height != hevc->pic_h)) {
3756 hevc_print(hevc, 0,
3757 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3758 __func__, pic->POC,
3759 pic->width, pic->height);
3760 cur_pic->error_mark = 1;
3761 }
3762
3763 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3764 cur_pic->error_mark = 1;
3765 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3766 (pic->mc_canvas_u_v << 16)
3767 | (pic->mc_canvas_u_v
3768 << 8) |
3769 pic->mc_canvas_y);
3770 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3771 hevc_print_cont(hevc, 0,
3772 "refid %x mc_canvas_u_v %x",
3773 i, pic->mc_canvas_u_v);
3774 hevc_print_cont(hevc, 0,
3775 " mc_canvas_y %x\n",
3776 pic->mc_canvas_y);
3777 }
3778 } else
3779 cur_pic->error_mark = 1;
3780
3781 if (pic == NULL || pic->error_mark) {
3782 hevc_print(hevc, 0,
3783 "Error %s, %dth poc (%d) %s",
3784 __func__, i,
3785 cur_pic->m_aiRefPOCList1[cur_pic->
3786 slice_idx][i],
3787 pic ? "has error" :
3788 "not in list1");
3789 }
3790 }
3791 }
3792 return 0;
3793}
3794
3795static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3796 union param_u *params)
3797{
3798 int ii, i;
3799 int poc_tmp;
3800 struct PIC_s *pic;
3801 unsigned char is_referenced;
3802 /* hevc_print(hevc, 0,
3803 "%s cur_poc %d\n", __func__, cur_poc); */
3804 if (pic_list_debug & 0x2) {
3805 pr_err("cur poc %d\n", cur_poc);
3806 }
3807 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3808 pic = hevc->m_PIC[ii];
3809 if (pic == NULL ||
3810 pic->index == -1 ||
3811 pic->BUF_index == -1
3812 )
3813 continue;
3814
3815 if ((pic->referenced == 0 || pic->POC == cur_poc))
3816 continue;
3817 is_referenced = 0;
3818 for (i = 0; i < 16; i++) {
3819 int delt;
3820
3821 if (params->p.CUR_RPS[i] & 0x8000)
3822 break;
3823 delt =
3824 params->p.CUR_RPS[i] &
3825 ((1 << (RPS_USED_BIT - 1)) - 1);
3826 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3827 poc_tmp =
3828 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3829 delt);
3830 } else
3831 poc_tmp = cur_poc + delt;
3832 if (poc_tmp == pic->POC) {
3833 is_referenced = 1;
3834 /* hevc_print(hevc, 0, "i is %d\n", i); */
3835 break;
3836 }
3837 }
3838 if (is_referenced == 0) {
3839 pic->referenced = 0;
3840 put_mv_buf(hevc, pic);
3841 /* hevc_print(hevc, 0,
3842 "set poc %d reference to 0\n", pic->POC); */
3843 if (pic_list_debug & 0x2) {
3844 pr_err("set poc %d reference to 0\n", pic->POC);
3845 }
3846 }
3847 }
3848
3849}
3850
3851static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
3852{
3853 struct PIC_s *pic = hevc->cur_pic;
3854 int i, rIdx;
3855 int num_neg = 0;
3856 int num_pos = 0;
3857 int total_num;
3858 int num_ref_idx_l0_active =
3859 (params->p.num_ref_idx_l0_active >
3860 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3861 params->p.num_ref_idx_l0_active;
3862 int num_ref_idx_l1_active =
3863 (params->p.num_ref_idx_l1_active >
3864 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3865 params->p.num_ref_idx_l1_active;
3866
3867 int RefPicSetStCurr0[16];
3868 int RefPicSetStCurr1[16];
3869
3870 for (i = 0; i < 16; i++) {
3871 RefPicSetStCurr0[i] = 0;
3872 RefPicSetStCurr1[i] = 0;
3873 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
3874 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
3875 }
3876 for (i = 0; i < 16; i++) {
3877 if (params->p.CUR_RPS[i] & 0x8000)
3878 break;
3879 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
3880 int delt =
3881 params->p.CUR_RPS[i] &
3882 ((1 << (RPS_USED_BIT - 1)) - 1);
3883
3884 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
3885 RefPicSetStCurr0[num_neg] =
3886 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
3887 delt);
3888 /* hevc_print(hevc, 0,
3889 * "RefPicSetStCurr0 %x %x %x\n",
3890 * RefPicSetStCurr0[num_neg], pic->POC,
3891 * (0x800-(params[i]&0x7ff)));
3892 */
3893 num_neg++;
3894 } else {
3895 RefPicSetStCurr1[num_pos] = pic->POC + delt;
3896 /* hevc_print(hevc, 0,
3897 * "RefPicSetStCurr1 %d\n",
3898 * RefPicSetStCurr1[num_pos]);
3899 */
3900 num_pos++;
3901 }
3902 }
3903 }
3904 total_num = num_neg + num_pos;
3905 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3906 hevc_print(hevc, 0,
3907 "%s: curpoc %d slice_type %d, total %d ",
3908 __func__, pic->POC, params->p.slice_type, total_num);
3909 hevc_print_cont(hevc, 0,
3910 "num_neg %d num_list0 %d num_list1 %d\n",
3911 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
3912 }
3913
3914 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3915 hevc_print(hevc, 0,
3916 "HEVC Stream buf start ");
3917 hevc_print_cont(hevc, 0,
3918 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
3919 READ_VREG(HEVC_STREAM_START_ADDR),
3920 READ_VREG(HEVC_STREAM_END_ADDR),
3921 READ_VREG(HEVC_STREAM_WR_PTR),
3922 READ_VREG(HEVC_STREAM_RD_PTR),
3923 READ_VREG(HEVC_STREAM_LEVEL),
3924 READ_VREG(HEVC_STREAM_FIFO_CTL),
3925 READ_VREG(HEVC_PARSER_INT_CONTROL));
3926 }
3927
3928 if (total_num > 0) {
3929 if (params->p.modification_flag & 0x1) {
3930 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3931 hevc_print(hevc, 0, "ref0 POC (modification):");
3932 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3933 int cIdx = params->p.modification_list[rIdx];
3934
3935 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3936 cIdx >=
3937 num_neg ? RefPicSetStCurr1[cIdx -
3938 num_neg] :
3939 RefPicSetStCurr0[cIdx];
3940 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3941 hevc_print_cont(hevc, 0, "%d ",
3942 pic->m_aiRefPOCList0[pic->
3943 slice_idx]
3944 [rIdx]);
3945 }
3946 }
3947 } else {
3948 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3949 hevc_print(hevc, 0, "ref0 POC:");
3950 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3951 int cIdx = rIdx % total_num;
3952
3953 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3954 cIdx >=
3955 num_neg ? RefPicSetStCurr1[cIdx -
3956 num_neg] :
3957 RefPicSetStCurr0[cIdx];
3958 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3959 hevc_print_cont(hevc, 0, "%d ",
3960 pic->m_aiRefPOCList0[pic->
3961 slice_idx]
3962 [rIdx]);
3963 }
3964 }
3965 }
3966 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3967 hevc_print_cont(hevc, 0, "\n");
3968 if (params->p.slice_type == B_SLICE) {
3969 if (params->p.modification_flag & 0x2) {
3970 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3971 hevc_print(hevc, 0,
3972 "ref1 POC (modification):");
3973 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
3974 rIdx++) {
3975 int cIdx;
3976
3977 if (params->p.modification_flag & 0x1) {
3978 cIdx =
3979 params->p.
3980 modification_list
3981 [num_ref_idx_l0_active +
3982 rIdx];
3983 } else {
3984 cIdx =
3985 params->p.
3986 modification_list[rIdx];
3987 }
3988 pic->m_aiRefPOCList1[pic->
3989 slice_idx][rIdx] =
3990 cIdx >=
3991 num_pos ?
3992 RefPicSetStCurr0[cIdx - num_pos]
3993 : RefPicSetStCurr1[cIdx];
3994 if (get_dbg_flag(hevc) &
3995 H265_DEBUG_BUFMGR) {
3996 hevc_print_cont(hevc, 0, "%d ",
3997 pic->
3998 m_aiRefPOCList1[pic->
3999 slice_idx]
4000 [rIdx]);
4001 }
4002 }
4003 } else {
4004 if (get_dbg_flag(hevc) &
4005 H265_DEBUG_BUFMGR)
4006 hevc_print(hevc, 0, "ref1 POC:");
4007 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4008 rIdx++) {
4009 int cIdx = rIdx % total_num;
4010
4011 pic->m_aiRefPOCList1[pic->
4012 slice_idx][rIdx] =
4013 cIdx >=
4014 num_pos ?
4015 RefPicSetStCurr0[cIdx -
4016 num_pos]
4017 : RefPicSetStCurr1[cIdx];
4018 if (get_dbg_flag(hevc) &
4019 H265_DEBUG_BUFMGR) {
4020 hevc_print_cont(hevc, 0, "%d ",
4021 pic->
4022 m_aiRefPOCList1[pic->
4023 slice_idx]
4024 [rIdx]);
4025 }
4026 }
4027 }
4028 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4029 hevc_print_cont(hevc, 0, "\n");
4030 }
4031 }
4032 /*set m_PIC */
4033 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4034 (params->p.slice_type == P_SLICE) ? 1 :
4035 (params->p.slice_type == B_SLICE) ? 0 : 3;
4036 pic->RefNum_L0 = num_ref_idx_l0_active;
4037 pic->RefNum_L1 = num_ref_idx_l1_active;
4038}
4039
4040static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4041 int pic_height_cu, int sao_mem_unit,
4042 union param_u *params)
4043{
4044 int i, j;
4045 int start_cu_x, start_cu_y;
4046 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4047 int sao_abv_size = sao_mem_unit * pic_width_cu;
4048#ifdef DETREFILL_ENABLE
4049 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4050 int tmpRefillLcuSize = 1 <<
4051 (params->p.log2_min_coding_block_size_minus3 +
4052 3 + params->p.log2_diff_max_min_coding_block_size);
4053 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4054 "%x, %x, %x, %x\n",
4055 params->p.slice_segment_address,
4056 params->p.bit_depth,
4057 params->p.tiles_enabled_flag,
4058 tmpRefillLcuSize);
4059 if (params->p.slice_segment_address == 0 &&
4060 params->p.bit_depth != 0 &&
4061 (params->p.tiles_enabled_flag & 1) &&
4062 tmpRefillLcuSize == 64)
4063 hevc->delrefill_check = 1;
4064 else
4065 hevc->delrefill_check = 0;
4066 }
4067#endif
4068
4069 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4070 if (params->p.tiles_enabled_flag & 1) {
4071 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4072 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4073
4074 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4075 || hevc->num_tile_row <= 0) {
4076 hevc->num_tile_row = 1;
4077 hevc_print(hevc, 0,
4078 "%s: num_tile_rows_minus1 (%d) error!!\n",
4079 __func__, params->p.num_tile_rows_minus1);
4080 }
4081 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4082 || hevc->num_tile_col <= 0) {
4083 hevc->num_tile_col = 1;
4084 hevc_print(hevc, 0,
4085 "%s: num_tile_columns_minus1 (%d) error!!\n",
4086 __func__, params->p.num_tile_columns_minus1);
4087 }
4088 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4089 hevc_print(hevc, 0,
4090 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4091 __func__, pic_width_cu, pic_height_cu);
4092 hevc_print_cont(hevc, 0,
4093 "num_tile_col %d num_tile_row %d:\n",
4094 hevc->num_tile_col, hevc->num_tile_row);
4095 }
4096
4097 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4098 int w = pic_width_cu / hevc->num_tile_col;
4099 int h = pic_height_cu / hevc->num_tile_row;
4100
4101 start_cu_y = 0;
4102 for (i = 0; i < hevc->num_tile_row; i++) {
4103 start_cu_x = 0;
4104 for (j = 0; j < hevc->num_tile_col; j++) {
4105 if (j == (hevc->num_tile_col - 1)) {
4106 hevc->m_tile[i][j].width =
4107 pic_width_cu -
4108 start_cu_x;
4109 } else
4110 hevc->m_tile[i][j].width = w;
4111 if (i == (hevc->num_tile_row - 1)) {
4112 hevc->m_tile[i][j].height =
4113 pic_height_cu -
4114 start_cu_y;
4115 } else
4116 hevc->m_tile[i][j].height = h;
4117 hevc->m_tile[i][j].start_cu_x
4118 = start_cu_x;
4119 hevc->m_tile[i][j].start_cu_y
4120 = start_cu_y;
4121 hevc->m_tile[i][j].sao_vb_start_addr =
4122 hevc->work_space_buf->sao_vb.
4123 buf_start + j * sao_vb_size;
4124 hevc->m_tile[i][j].sao_abv_start_addr =
4125 hevc->work_space_buf->sao_abv.
4126 buf_start + i * sao_abv_size;
4127 if (get_dbg_flag(hevc) &
4128 H265_DEBUG_BUFMGR) {
4129 hevc_print_cont(hevc, 0,
4130 "{y=%d, x=%d w %d h %d ",
4131 i, j, hevc->m_tile[i][j].width,
4132 hevc->m_tile[i][j].height);
4133 hevc_print_cont(hevc, 0,
4134 "start_x %d start_y %d ",
4135 hevc->m_tile[i][j].start_cu_x,
4136 hevc->m_tile[i][j].start_cu_y);
4137 hevc_print_cont(hevc, 0,
4138 "sao_vb_start 0x%x ",
4139 hevc->m_tile[i][j].
4140 sao_vb_start_addr);
4141 hevc_print_cont(hevc, 0,
4142 "sao_abv_start 0x%x}\n",
4143 hevc->m_tile[i][j].
4144 sao_abv_start_addr);
4145 }
4146 start_cu_x += hevc->m_tile[i][j].width;
4147
4148 }
4149 start_cu_y += hevc->m_tile[i][0].height;
4150 }
4151 } else {
4152 start_cu_y = 0;
4153 for (i = 0; i < hevc->num_tile_row; i++) {
4154 start_cu_x = 0;
4155 for (j = 0; j < hevc->num_tile_col; j++) {
4156 if (j == (hevc->num_tile_col - 1)) {
4157 hevc->m_tile[i][j].width =
4158 pic_width_cu -
4159 start_cu_x;
4160 } else {
4161 hevc->m_tile[i][j].width =
4162 params->p.tile_width[j];
4163 }
4164 if (i == (hevc->num_tile_row - 1)) {
4165 hevc->m_tile[i][j].height =
4166 pic_height_cu -
4167 start_cu_y;
4168 } else {
4169 hevc->m_tile[i][j].height =
4170 params->
4171 p.tile_height[i];
4172 }
4173 hevc->m_tile[i][j].start_cu_x
4174 = start_cu_x;
4175 hevc->m_tile[i][j].start_cu_y
4176 = start_cu_y;
4177 hevc->m_tile[i][j].sao_vb_start_addr =
4178 hevc->work_space_buf->sao_vb.
4179 buf_start + j * sao_vb_size;
4180 hevc->m_tile[i][j].sao_abv_start_addr =
4181 hevc->work_space_buf->sao_abv.
4182 buf_start + i * sao_abv_size;
4183 if (get_dbg_flag(hevc) &
4184 H265_DEBUG_BUFMGR) {
4185 hevc_print_cont(hevc, 0,
4186 "{y=%d, x=%d w %d h %d ",
4187 i, j, hevc->m_tile[i][j].width,
4188 hevc->m_tile[i][j].height);
4189 hevc_print_cont(hevc, 0,
4190 "start_x %d start_y %d ",
4191 hevc->m_tile[i][j].start_cu_x,
4192 hevc->m_tile[i][j].start_cu_y);
4193 hevc_print_cont(hevc, 0,
4194 "sao_vb_start 0x%x ",
4195 hevc->m_tile[i][j].
4196 sao_vb_start_addr);
4197 hevc_print_cont(hevc, 0,
4198 "sao_abv_start 0x%x}\n",
4199 hevc->m_tile[i][j].
4200 sao_abv_start_addr);
4201
4202 }
4203 start_cu_x += hevc->m_tile[i][j].width;
4204 }
4205 start_cu_y += hevc->m_tile[i][0].height;
4206 }
4207 }
4208 } else {
4209 hevc->num_tile_col = 1;
4210 hevc->num_tile_row = 1;
4211 hevc->m_tile[0][0].width = pic_width_cu;
4212 hevc->m_tile[0][0].height = pic_height_cu;
4213 hevc->m_tile[0][0].start_cu_x = 0;
4214 hevc->m_tile[0][0].start_cu_y = 0;
4215 hevc->m_tile[0][0].sao_vb_start_addr =
4216 hevc->work_space_buf->sao_vb.buf_start;
4217 hevc->m_tile[0][0].sao_abv_start_addr =
4218 hevc->work_space_buf->sao_abv.buf_start;
4219 }
4220}
4221
4222static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4223 int pic_width_lcu)
4224{
4225 int cu_x;
4226 int cu_y;
4227 int tile_x = 0;
4228 int tile_y = 0;
4229 int i;
4230
4231 if (pic_width_lcu == 0) {
4232 if (get_dbg_flag(hevc)) {
4233 hevc_print(hevc, 0,
4234 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4235 __func__, hevc->pic_w, hevc->pic_h);
4236 }
4237 return -1;
4238 }
4239 cu_x = cu_adr % pic_width_lcu;
4240 cu_y = cu_adr / pic_width_lcu;
4241 if (hevc->tile_enabled) {
4242 for (i = 0; i < hevc->num_tile_col; i++) {
4243 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4244 tile_x = i;
4245 else
4246 break;
4247 }
4248 for (i = 0; i < hevc->num_tile_row; i++) {
4249 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4250 tile_y = i;
4251 else
4252 break;
4253 }
4254 }
4255 return (tile_x) | (tile_y << 8);
4256}
4257
4258static void print_scratch_error(int error_num)
4259{
4260#if 0
4261 if (get_dbg_flag(hevc)) {
4262 hevc_print(hevc, 0,
4263 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4264 error_num);
4265 }
4266#endif
4267}
4268
4269static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4270{
4271 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4272
4273 if (get_dbg_flag(hevc))
4274 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4275 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4276 __func__,
4277 buf_spec->ipp.buf_start,
4278 buf_spec->start_adr,
4279 buf_spec->short_term_rps.buf_start,
4280 buf_spec->vps.buf_start,
4281 buf_spec->sps.buf_start,
4282 buf_spec->pps.buf_start,
4283 buf_spec->sao_up.buf_start,
4284 buf_spec->swap_buf.buf_start,
4285 buf_spec->swap_buf2.buf_start,
4286 buf_spec->scalelut.buf_start,
4287 buf_spec->dblk_para.buf_start,
4288 buf_spec->dblk_data.buf_start,
4289 buf_spec->dblk_data2.buf_start);
4290 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4291 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4292 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4293 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4294 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4295 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4296 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4297 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4298 if (hevc->mmu_enable) {
4299 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4300 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4301 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4302 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4303 } else
4304 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4305 } /*else
4306 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4307 buf_spec->swap_buf.buf_start);
4308 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4309 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4310 /* cfg_p_addr */
4311 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4312 /* cfg_d_addr */
4313 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4314
4315 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4316
4317 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4318}
4319
4320static void parser_cmd_write(void)
4321{
4322 u32 i;
4323 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4324 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4325 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4326 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4327 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4328 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4329 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4330 0x7C00
4331 };
4332 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4333 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4334}
4335
4336static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4337 int decode_pic_begin, int decode_pic_num)
4338{
4339 unsigned int data32;
4340 int i;
4341#if 0
4342 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4343 /* Set MCR fetch priorities*/
4344 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4345 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4346 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4347 }
4348#endif
4349#if 1
4350 /* m8baby test1902 */
4351 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4352 hevc_print(hevc, 0,
4353 "%s\n", __func__);
4354 data32 = READ_VREG(HEVC_PARSER_VERSION);
4355 if (data32 != 0x00010001) {
4356 print_scratch_error(25);
4357 return;
4358 }
4359 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4360 data32 = READ_VREG(HEVC_PARSER_VERSION);
4361 if (data32 != 0x5a5a55aa) {
4362 print_scratch_error(26);
4363 return;
4364 }
4365#if 0
4366 /* test Parser Reset */
4367 /* reset iqit to start mem init again */
4368 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4369 (1 << 3) /* reset_whole parser */
4370 );
4371 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4372 data32 = READ_VREG(HEVC_PARSER_VERSION);
4373 if (data32 != 0x00010001)
4374 hevc_print(hevc, 0,
4375 "Test Parser Fatal Error\n");
4376#endif
4377 /* reset iqit to start mem init again */
4378 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4379 );
4380 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4381 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4382
4383#endif
4384 if (!hevc->m_ins_flag) {
4385 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4386 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4387 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4388 data32 |= (0xf << 25); /*arwlen_axi_max*/
4389 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4390 }
4391 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4392 if (data32 != 0x00000100) {
4393 print_scratch_error(29);
4394 return;
4395 }
4396 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4397 if (data32 != 0x00000300) {
4398 print_scratch_error(30);
4399 return;
4400 }
4401 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4402 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4403 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4404 if (data32 != 0x12345678) {
4405 print_scratch_error(31);
4406 return;
4407 }
4408 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4409 if (data32 != 0x9abcdef0) {
4410 print_scratch_error(32);
4411 return;
4412 }
4413 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4414 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4415
4416 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4417 data32 &= 0x03ffffff;
4418 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4419 | /* stream_buffer_empty_int_amrisc_enable */
4420 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4421 (1 << 7) | /* dec_done_int_cpu_enable */
4422 (1 << 4) | /* startcode_found_int_cpu_enable */
4423 (0 << 3) | /* startcode_found_int_amrisc_enable */
4424 (1 << 0) /* parser_int_enable */
4425 ;
4426 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4427
4428 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4429 data32 = data32 | (1 << 1) | /* emulation_check_on */
4430 (1 << 0) /* startcode_check_on */
4431 ;
4432 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4433
4434 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4435 (2 << 4) | /* emulate_code_length_sub_1 */
4436 (2 << 1) | /* start_code_length_sub_1 */
4437 (1 << 0) /* stream_shift_enable */
4438 );
4439
4440 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4441 );
4442 /* hevc_parser_core_clk_en */
4443 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4444 );
4445
4446 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4447
4448 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4449 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4450 for (i = 0; i < 1024; i++)
4451 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4452
4453#ifdef ENABLE_SWAP_TEST
4454 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4455#endif
4456
4457 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4458 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4459 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4460 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4461 /* Send parser_cmd */
4462 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4463
4464 parser_cmd_write();
4465
4466 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4467 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4468 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4469
4470 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4471 /* (1 << 8) | // sao_sw_pred_enable */
4472 (1 << 5) | /* parser_sao_if_en */
4473 (1 << 2) | /* parser_mpred_if_en */
4474 (1 << 0) /* parser_scaler_if_en */
4475 );
4476
4477 /* Changed to Start MPRED in microcode */
4478 /*
4479 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4480 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4481 * (1<<31)
4482 * );
4483 */
4484
4485 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4486 (1 << 0) /* software reset ipp and mpp */
4487 );
4488 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4489 (0 << 0) /* software reset ipp and mpp */
4490 );
4491
4492 if (get_double_write_mode(hevc) & 0x10)
4493 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4494 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4495 );
4496
4497}
4498
4499static void decoder_hw_reset(void)
4500{
4501 int i;
4502 unsigned int data32;
4503 /* reset iqit to start mem init again */
4504 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4505 );
4506 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4507 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4508
4509 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4510 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4511 ;
4512 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4513
4514 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4515 if (data32 != 0x00000100) {
4516 print_scratch_error(29);
4517 return;
4518 }
4519 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4520 if (data32 != 0x00000300) {
4521 print_scratch_error(30);
4522 return;
4523 }
4524 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4525 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4526 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4527 if (data32 != 0x12345678) {
4528 print_scratch_error(31);
4529 return;
4530 }
4531 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4532 if (data32 != 0x9abcdef0) {
4533 print_scratch_error(32);
4534 return;
4535 }
4536 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4537 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4538
4539 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4540 data32 &= 0x03ffffff;
4541 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4542 | /* stream_buffer_empty_int_amrisc_enable */
4543 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4544 (1 << 7) | /* dec_done_int_cpu_enable */
4545 (1 << 4) | /* startcode_found_int_cpu_enable */
4546 (0 << 3) | /* startcode_found_int_amrisc_enable */
4547 (1 << 0) /* parser_int_enable */
4548 ;
4549 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4550
4551 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4552 data32 = data32 | (1 << 1) | /* emulation_check_on */
4553 (1 << 0) /* startcode_check_on */
4554 ;
4555 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4556
4557 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4558 (2 << 4) | /* emulate_code_length_sub_1 */
4559 (2 << 1) | /* start_code_length_sub_1 */
4560 (1 << 0) /* stream_shift_enable */
4561 );
4562
4563 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4564 );
4565 /* hevc_parser_core_clk_en */
4566 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4567 );
4568
4569 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4570 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4571 for (i = 0; i < 1024; i++)
4572 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4573
4574 /* Send parser_cmd */
4575 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4576
4577 parser_cmd_write();
4578
4579 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4580 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4581 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4582
4583 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4584 /* (1 << 8) | // sao_sw_pred_enable */
4585 (1 << 5) | /* parser_sao_if_en */
4586 (1 << 2) | /* parser_mpred_if_en */
4587 (1 << 0) /* parser_scaler_if_en */
4588 );
4589
4590 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4591 (1 << 0) /* software reset ipp and mpp */
4592 );
4593 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4594 (0 << 0) /* software reset ipp and mpp */
4595 );
4596}
4597
4598#ifdef CONFIG_HEVC_CLK_FORCED_ON
4599static void config_hevc_clk_forced_on(void)
4600{
4601 unsigned int rdata32;
4602 /* IQIT */
4603 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4604 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4605
4606 /* DBLK */
4607 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4608 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4609
4610 /* SAO */
4611 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4612 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4613
4614 /* MPRED */
4615 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4616 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4617
4618 /* PARSER */
4619 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4620 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4621 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4622 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4623 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4624 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4625 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4626 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4627 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4628 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4629 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4630 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4631 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4632
4633 /* IPP */
4634 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4635 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4636
4637 /* MCRCC */
4638 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4639 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4640}
4641#endif
4642
4643#ifdef MCRCC_ENABLE
4644static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4645{
4646 unsigned int rdata32;
4647 unsigned int rdata32_2;
4648 int l0_cnt = 0;
4649 int l1_cnt = 0x7fff;
4650
4651 if (get_double_write_mode(hevc) & 0x10) {
4652 l0_cnt = hevc->cur_pic->RefNum_L0;
4653 l1_cnt = hevc->cur_pic->RefNum_L1;
4654 }
4655
4656 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4657
4658 if (slice_type == 2) { /* I-PIC */
4659 /* remove reset -- disables clock */
4660 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4661 return;
4662 }
4663
4664 if (slice_type == 0) { /* B-PIC */
4665 /* Programme canvas0 */
4666 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4667 (0 << 8) | (0 << 1) | 0);
4668 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4669 rdata32 = rdata32 & 0xffff;
4670 rdata32 = rdata32 | (rdata32 << 16);
4671 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4672
4673 /* Programme canvas1 */
4674 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4675 (16 << 8) | (1 << 1) | 0);
4676 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4677 rdata32_2 = rdata32_2 & 0xffff;
4678 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4679 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4680 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4681 rdata32_2 = rdata32_2 & 0xffff;
4682 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4683 }
4684 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4685 } else { /* P-PIC */
4686 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4687 (0 << 8) | (1 << 1) | 0);
4688 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4689 rdata32 = rdata32 & 0xffff;
4690 rdata32 = rdata32 | (rdata32 << 16);
4691 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4692
4693 if (l0_cnt == 1) {
4694 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4695 } else {
4696 /* Programme canvas1 */
4697 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4698 rdata32 = rdata32 & 0xffff;
4699 rdata32 = rdata32 | (rdata32 << 16);
4700 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4701 }
4702 }
4703 /* enable mcrcc progressive-mode */
4704 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4705}
4706#endif
4707
4708static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4709 int sao_mem_unit)
4710{
4711 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4712 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4713 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4714 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4715}
4716
4717static u32 init_aux_size;
4718static int aux_data_is_avaible(struct hevc_state_s *hevc)
4719{
4720 u32 reg_val;
4721
4722 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4723 if (reg_val != 0 && reg_val != init_aux_size)
4724 return 1;
4725 else
4726 return 0;
4727}
4728
4729static void config_aux_buf(struct hevc_state_s *hevc)
4730{
4731 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4732 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4733 (hevc->suffix_aux_size >> 4);
4734 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4735}
4736
4737static void config_mpred_hw(struct hevc_state_s *hevc)
4738{
4739 int i;
4740 unsigned int data32;
4741 struct PIC_s *cur_pic = hevc->cur_pic;
4742 struct PIC_s *col_pic = hevc->col_pic;
4743 int AMVP_MAX_NUM_CANDS_MEM = 3;
4744 int AMVP_MAX_NUM_CANDS = 2;
4745 int NUM_CHROMA_MODE = 5;
4746 int DM_CHROMA_IDX = 36;
4747 int above_ptr_ctrl = 0;
4748 int buffer_linear = 1;
4749 int cu_size_log2 = 3;
4750
4751 int mpred_mv_rd_start_addr;
4752 int mpred_curr_lcu_x;
4753 int mpred_curr_lcu_y;
4754 int mpred_above_buf_start;
4755 int mpred_mv_rd_ptr;
4756 int mpred_mv_rd_ptr_p1;
4757 int mpred_mv_rd_end_addr;
4758 int MV_MEM_UNIT;
4759 int mpred_mv_wr_ptr;
4760 int *ref_poc_L0, *ref_poc_L1;
4761
4762 int above_en;
4763 int mv_wr_en;
4764 int mv_rd_en;
4765 int col_isIntra;
4766
4767 if (hevc->slice_type != 2) {
4768 above_en = 1;
4769 mv_wr_en = 1;
4770 mv_rd_en = 1;
4771 col_isIntra = 0;
4772 } else {
4773 above_en = 1;
4774 mv_wr_en = 1;
4775 mv_rd_en = 0;
4776 col_isIntra = 0;
4777 }
4778
4779 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4780 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4781 mpred_curr_lcu_x = data32 & 0xffff;
4782 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4783
4784 MV_MEM_UNIT =
4785 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4786 5 ? 0x80 : 0x20;
4787 mpred_mv_rd_ptr =
4788 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4789
4790 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4791 mpred_mv_rd_end_addr =
4792 mpred_mv_rd_start_addr +
4793 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4794
4795 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4796
4797 mpred_mv_wr_ptr =
4798 cur_pic->mpred_mv_wr_start_addr +
4799 (hevc->slice_addr * MV_MEM_UNIT);
4800
4801 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4802 hevc_print(hevc, 0,
4803 "cur pic index %d col pic index %d\n", cur_pic->index,
4804 col_pic->index);
4805 }
4806
4807 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4808 cur_pic->mpred_mv_wr_start_addr);
4809 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4810
4811 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4812 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4813 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4814
4815 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4816 data32 = (hevc->slice_type |
4817 hevc->new_pic << 2 |
4818 hevc->new_tile << 3 |
4819 hevc->isNextSliceSegment << 4 |
4820 hevc->TMVPFlag << 5 |
4821 hevc->LDCFlag << 6 |
4822 hevc->ColFromL0Flag << 7 |
4823 above_ptr_ctrl << 8 |
4824 above_en << 9 |
4825 mv_wr_en << 10 |
4826 mv_rd_en << 11 |
4827 col_isIntra << 12 |
4828 buffer_linear << 13 |
4829 hevc->LongTerm_Curr << 14 |
4830 hevc->LongTerm_Col << 15 |
4831 hevc->lcu_size_log2 << 16 |
4832 cu_size_log2 << 20 | hevc->plevel << 24);
4833 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
4834
4835 data32 = READ_VREG(HEVC_MPRED_CTRL1);
4836 data32 = (
4837#if 0
4838 /* no set in m8baby test1902 */
4839 /* Don't override clk_forced_on , */
4840 (data32 & (0x1 << 24)) |
4841#endif
4842 hevc->MaxNumMergeCand |
4843 AMVP_MAX_NUM_CANDS << 4 |
4844 AMVP_MAX_NUM_CANDS_MEM << 8 |
4845 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
4846 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
4847
4848 data32 = (hevc->pic_w | hevc->pic_h << 16);
4849 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
4850
4851 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
4852 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
4853
4854 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
4855 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
4856
4857 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
4858 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
4859
4860 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
4861 /* col_RefNum_L0<<16| */
4862 /* col_RefNum_L1<<24 */
4863 );
4864 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
4865
4866 data32 = (hevc->LongTerm_Ref);
4867 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
4868
4869 data32 = 0;
4870 for (i = 0; i < hevc->RefNum_L0; i++)
4871 data32 = data32 | (1 << i);
4872 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
4873
4874 data32 = 0;
4875 for (i = 0; i < hevc->RefNum_L1; i++)
4876 data32 = data32 | (1 << i);
4877 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
4878
4879 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
4880 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
4881
4882 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
4883 * Ref_POC_xx_L1 in pair write order!!!
4884 */
4885 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
4886 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
4887
4888 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
4889 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
4890
4891 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
4892 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
4893
4894 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
4895 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
4896
4897 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
4898 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
4899
4900 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
4901 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
4902
4903 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
4904 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
4905
4906 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
4907 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
4908
4909 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
4910 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
4911
4912 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
4913 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
4914
4915 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
4916 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
4917
4918 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
4919 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
4920
4921 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
4922 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
4923
4924 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
4925 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
4926
4927 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
4928 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
4929
4930 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
4931 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
4932
4933 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
4934 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
4935
4936 if (hevc->new_pic) {
4937 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
4938 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
4939 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
4940 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
4941 } else if (!hevc->isNextSliceSegment) {
4942 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
4943 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
4944 }
4945
4946 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
4947}
4948
4949static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
4950{
4951 unsigned int data32, data32_2;
4952 int misc_flag0 = hevc->misc_flag0;
4953 int slice_deblocking_filter_disabled_flag = 0;
4954
4955 int mc_buffer_size_u_v =
4956 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
4957 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
4958 struct PIC_s *cur_pic = hevc->cur_pic;
4959
4960 data32 = READ_VREG(HEVC_SAO_CTRL0);
4961 data32 &= (~0xf);
4962 data32 |= hevc->lcu_size_log2;
4963 WRITE_VREG(HEVC_SAO_CTRL0, data32);
4964
4965 data32 = (hevc->pic_w | hevc->pic_h << 16);
4966 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
4967
4968 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
4969 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
4970
4971 if (hevc->new_pic)
4972 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
4973#ifdef LOSLESS_COMPRESS_MODE
4974/*SUPPORT_10BIT*/
4975 if ((get_double_write_mode(hevc) & 0x10) == 0) {
4976 data32 = READ_VREG(HEVC_SAO_CTRL5);
4977 data32 &= (~(0xff << 16));
4978
4979 if (get_double_write_mode(hevc) == 2 ||
4980 get_double_write_mode(hevc) == 3)
4981 data32 |= (0xff<<16);
4982 else if (get_double_write_mode(hevc) == 4)
4983 data32 |= (0x33<<16);
4984
4985 if (hevc->mem_saving_mode == 1)
4986 data32 |= (1 << 9);
4987 else
4988 data32 &= ~(1 << 9);
4989 if (workaround_enable & 1)
4990 data32 |= (1 << 7);
4991 WRITE_VREG(HEVC_SAO_CTRL5, data32);
4992 }
4993 data32 = cur_pic->mc_y_adr;
4994 if (get_double_write_mode(hevc))
4995 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
4996
4997 if ((get_double_write_mode(hevc) & 0x10) == 0)
4998 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
4999
5000 if (hevc->mmu_enable)
5001 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5002#else
5003 data32 = cur_pic->mc_y_adr;
5004 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5005#endif
5006 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5007 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5008
5009#ifdef LOSLESS_COMPRESS_MODE
5010/*SUPPORT_10BIT*/
5011 if (get_double_write_mode(hevc))
5012 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5013#else
5014 data32 = cur_pic->mc_u_v_adr;
5015 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5016#endif
5017 data32 = (mc_buffer_size_u_v_h << 16);
5018 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5019
5020#ifdef LOSLESS_COMPRESS_MODE
5021/*SUPPORT_10BIT*/
5022 if (get_double_write_mode(hevc)) {
5023 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5024 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5025 }
5026#else
5027 /* multi tile to do... */
5028 data32 = cur_pic->mc_y_adr;
5029 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5030
5031 data32 = cur_pic->mc_u_v_adr;
5032 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5033#endif
5034 /* DBLK CONFIG HERE */
5035 if (hevc->new_pic) {
5036 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5037 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5038 data32 = (0xff << 8) | (0x0 << 0);
5039 else
5040 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5041 (0x0 << 0); /* h265 video format*/
5042
5043 if (hevc->pic_w >= 1280)
5044 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5045 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5046 if (get_double_write_mode(hevc) == 0)
5047 data32 |= (0x1 << 8); /*enable first write*/
5048 else if (get_double_write_mode(hevc) == 0x10)
5049 data32 |= (0x1 << 9); /*double write only*/
5050 else
5051 data32 |= ((0x1 << 8) |(0x1 << 9));
5052
5053 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5054 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5055 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5056 }
5057 data32 = (hevc->pic_w | hevc->pic_h << 16);
5058 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5059
5060 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5061 data32 =
5062 ((misc_flag0 >>
5063 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5064 0x1) << 3;
5065 } else
5066 data32 = 0;
5067 data32 |=
5068 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5069 ((params->p.pps_cr_qp_offset
5070 & 0x1f) <<
5071 9));
5072 data32 |=
5073 (hevc->lcu_size ==
5074 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5075
5076 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5077
5078 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5079 /*if (debug & 0x80) {*/
5080 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5081 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5082 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5083 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5084 data32);
5085 /*}*/
5086 }
5087 }
5088#if 0
5089 data32 = READ_VREG(HEVC_SAO_CTRL1);
5090 data32 &= (~0x3000);
5091 data32 |= (mem_map_mode <<
5092 12);
5093
5094/* [13:12] axi_aformat,
5095 * 0-Linear, 1-32x32, 2-64x32
5096 */
5097 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5098
5099 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5100 data32 &= (~0x30);
5101 data32 |= (mem_map_mode <<
5102 4);
5103
5104/* [5:4] -- address_format
5105 * 00:linear 01:32x32 10:64x32
5106 */
5107 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5108#else
5109 /* m8baby test1902 */
5110 data32 = READ_VREG(HEVC_SAO_CTRL1);
5111 data32 &= (~0x3000);
5112 data32 |= (mem_map_mode <<
5113 12);
5114
5115/* [13:12] axi_aformat, 0-Linear,
5116 * 1-32x32, 2-64x32
5117 */
5118 data32 &= (~0xff0);
5119 /* data32 |= 0x670; // Big-Endian per 64-bit */
5120 data32 |= endian; /* Big-Endian per 64-bit */
5121 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5122 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5123 if (get_double_write_mode(hevc) == 0)
5124 data32 |= 0x2; /*disable double write*/
5125 else if (get_double_write_mode(hevc) & 0x10)
5126 data32 |= 0x1; /*disable cm*/
5127 } else {
5128 unsigned int data;
5129 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5130 (0x0 << 0); /* h265 video format*/
5131 if (hevc->pic_w >= 1280)
5132 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5133 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5134 if (get_double_write_mode(hevc) == 0)
5135 data |= (0x1 << 8); /*enable first write*/
5136 else if (get_double_write_mode(hevc) & 0x10)
5137 data |= (0x1 << 9); /*double write only*/
5138 else
5139 data |= ((0x1 << 8) |(0x1 << 9));
5140
5141 WRITE_VREG(HEVC_DBLK_CFGB, data);
5142 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5143 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5144 }
5145
5146 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5147 if (get_double_write_mode(hevc) & 0x10) {
5148 /* [23:22] dw_v1_ctrl
5149 *[21:20] dw_v0_ctrl
5150 *[19:18] dw_h1_ctrl
5151 *[17:16] dw_h0_ctrl
5152 */
5153 data32 = READ_VREG(HEVC_SAO_CTRL5);
5154 /*set them all 0 for H265_NV21 (no down-scale)*/
5155 data32 &= ~(0xff << 16);
5156 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5157 }
5158
5159 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5160 data32 &= (~0x30);
5161 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5162 data32 |= (mem_map_mode <<
5163 4);
5164 data32 &= (~0xF);
5165 data32 |= 0xf; /* valid only when double write only */
5166 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5167 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5168#endif
5169 data32 = 0;
5170 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5171 data32_2 &= (~0x300);
5172 /* slice_deblocking_filter_disabled_flag = 0;
5173 * ucode has handle it , so read it from ucode directly
5174 */
5175 if (hevc->tile_enabled) {
5176 data32 |=
5177 ((misc_flag0 >>
5178 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5179 0x1) << 0;
5180 data32_2 |=
5181 ((misc_flag0 >>
5182 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5183 0x1) << 8;
5184 }
5185 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5186 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5187 0x1; /* ucode has handle it,so read it from ucode directly */
5188 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5189 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5190 /* slice_deblocking_filter_disabled_flag =
5191 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5192 * //ucode has handle it , so read it from ucode directly
5193 */
5194 data32 |= slice_deblocking_filter_disabled_flag << 2;
5195 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5196 hevc_print_cont(hevc, 0,
5197 "(1,%x)", data32);
5198 if (!slice_deblocking_filter_disabled_flag) {
5199 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5200 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5201 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5202 hevc_print_cont(hevc, 0,
5203 "(2,%x)", data32);
5204 }
5205 } else {
5206 data32 |=
5207 ((misc_flag0 >>
5208 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5209 0x1) << 2;
5210 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5211 hevc_print_cont(hevc, 0,
5212 "(3,%x)", data32);
5213 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5214 0x1) == 0) {
5215 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5216 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5217 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5218 hevc_print_cont(hevc, 0,
5219 "(4,%x)", data32);
5220 }
5221 }
5222 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5223 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5224 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5225 || (!slice_deblocking_filter_disabled_flag))) {
5226 data32 |=
5227 ((misc_flag0 >>
5228 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5229 & 0x1) << 1;
5230 data32_2 |=
5231 ((misc_flag0 >>
5232 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5233 & 0x1) << 9;
5234 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5235 hevc_print_cont(hevc, 0,
5236 "(5,%x)\n", data32);
5237 } else {
5238 data32 |=
5239 ((misc_flag0 >>
5240 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5241 & 0x1) << 1;
5242 data32_2 |=
5243 ((misc_flag0 >>
5244 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5245 & 0x1) << 9;
5246 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5247 hevc_print_cont(hevc, 0,
5248 "(6,%x)\n", data32);
5249 }
5250 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5251 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5252}
5253
5254#ifdef TEST_NO_BUF
5255static unsigned char test_flag = 1;
5256#endif
5257
5258static void pic_list_process(struct hevc_state_s *hevc)
5259{
5260 int work_pic_num = get_work_pic_num(hevc);
5261 int alloc_pic_count = 0;
5262 int i;
5263 struct PIC_s *pic;
5264 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5265 pic = hevc->m_PIC[i];
5266 if (pic == NULL || pic->index == -1)
5267 continue;
5268 alloc_pic_count++;
5269 if (pic->output_mark == 0 && pic->referenced == 0
5270 && pic->output_ready == 0
5271 && (pic->width != hevc->pic_w ||
5272 pic->height != hevc->pic_h)
5273 ) {
5274 set_buf_unused(hevc, pic->BUF_index);
5275 pic->BUF_index = -1;
5276 if (alloc_pic_count > work_pic_num) {
5277 pic->width = 0;
5278 pic->height = 0;
5279 pic->index = -1;
5280 } else {
5281 pic->width = hevc->pic_w;
5282 pic->height = hevc->pic_h;
5283 }
5284 }
5285 }
5286 if (alloc_pic_count < work_pic_num) {
5287 int new_count = alloc_pic_count;
5288 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5289 pic = hevc->m_PIC[i];
5290 if (pic && pic->index == -1) {
5291 pic->index = i;
5292 pic->BUF_index = -1;
5293 pic->width = hevc->pic_w;
5294 pic->height = hevc->pic_h;
5295 new_count++;
5296 if (new_count >=
5297 work_pic_num)
5298 break;
5299 }
5300 }
5301
5302 }
5303 dealloc_unused_buf(hevc);
5304 if (get_alloc_pic_count(hevc)
5305 != alloc_pic_count) {
5306 hevc_print_cont(hevc, 0,
5307 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5308 __func__,
5309 work_pic_num,
5310 alloc_pic_count,
5311 get_alloc_pic_count(hevc));
5312 }
5313}
5314
5315static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5316{
5317 int i;
5318 struct PIC_s *pic;
5319 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5320 pic = hevc->m_PIC[i];
5321 if (pic == NULL || pic->index == -1)
5322 continue;
5323
5324 if (pic->output_mark == 0 && pic->referenced == 0
5325 && pic->output_ready == 0
5326 && pic->scatter_alloc
5327 )
5328 release_pic_mmu_buf(hevc, pic);
5329 }
5330
5331}
5332
5333static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5334 union param_u *rpm_param)
5335{
5336 struct PIC_s *new_pic = NULL;
5337 struct PIC_s *pic;
5338 int i;
5339 int ret;
5340
5341 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5342 pic = hevc->m_PIC[i];
5343 if (pic == NULL || pic->index == -1)
5344 continue;
5345
5346 if (pic->output_mark == 0 && pic->referenced == 0
5347 && pic->output_ready == 0
5348 && pic->width == hevc->pic_w
5349 && pic->height == hevc->pic_h
5350 ) {
5351 if (new_pic) {
5352 if (new_pic->POC != INVALID_POC) {
5353 if (pic->POC == INVALID_POC ||
5354 pic->POC < new_pic->POC)
5355 new_pic = pic;
5356 }
5357 } else
5358 new_pic = pic;
5359 }
5360 }
5361
5362 if (new_pic == NULL)
5363 return NULL;
5364
5365 if (new_pic->BUF_index < 0) {
5366 if (alloc_buf(hevc) < 0)
5367 return NULL;
5368 else {
5369 if (config_pic(hevc, new_pic) < 0) {
5370 dealloc_pic_buf(hevc, new_pic);
5371 return NULL;
5372 }
5373 }
5374 new_pic->width = hevc->pic_w;
5375 new_pic->height = hevc->pic_h;
5376 set_canvas(hevc, new_pic);
5377
5378 init_pic_list_hw(hevc);
5379 }
5380
5381 if (new_pic) {
5382 new_pic->double_write_mode =
5383 get_double_write_mode(hevc);
5384 if (new_pic->double_write_mode)
5385 set_canvas(hevc, new_pic);
5386
5387#ifdef TEST_NO_BUF
5388 if (test_flag) {
5389 test_flag = 0;
5390 return NULL;
5391 } else
5392 test_flag = 1;
5393#endif
5394 if (get_mv_buf(hevc, new_pic) < 0)
5395 return NULL;
5396
5397 if (hevc->mmu_enable) {
5398 ret = H265_alloc_mmu(hevc, new_pic,
5399 rpm_param->p.bit_depth,
5400 hevc->frame_mmu_map_addr);
5401 if (ret != 0) {
5402 put_mv_buf(hevc, new_pic);
5403 hevc_print(hevc, 0,
5404 "can't alloc need mmu1,idx %d ret =%d\n",
5405 new_pic->decode_idx,
5406 ret);
5407 return NULL;
5408 }
5409 }
5410 new_pic->referenced = 1;
5411 new_pic->decode_idx = hevc->decode_idx;
5412 new_pic->slice_idx = 0;
5413 new_pic->referenced = 1;
5414 new_pic->output_mark = 0;
5415 new_pic->recon_mark = 0;
5416 new_pic->error_mark = 0;
5417 /* new_pic->output_ready = 0; */
5418 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5419 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5420 new_pic->POC = hevc->curr_POC;
5421 new_pic->pic_struct = hevc->curr_pic_struct;
5422 if (new_pic->aux_data_buf)
5423 release_aux_data(hevc, new_pic);
5424 new_pic->mem_saving_mode =
5425 hevc->mem_saving_mode;
5426 new_pic->bit_depth_luma =
5427 hevc->bit_depth_luma;
5428 new_pic->bit_depth_chroma =
5429 hevc->bit_depth_chroma;
5430 new_pic->video_signal_type =
5431 hevc->video_signal_type;
5432
5433 new_pic->conformance_window_flag =
5434 hevc->param.p.conformance_window_flag;
5435 new_pic->conf_win_left_offset =
5436 hevc->param.p.conf_win_left_offset;
5437 new_pic->conf_win_right_offset =
5438 hevc->param.p.conf_win_right_offset;
5439 new_pic->conf_win_top_offset =
5440 hevc->param.p.conf_win_top_offset;
5441 new_pic->conf_win_bottom_offset =
5442 hevc->param.p.conf_win_bottom_offset;
5443 new_pic->chroma_format_idc =
5444 hevc->param.p.chroma_format_idc;
5445
5446 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5447 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5448 __func__, new_pic->index,
5449 new_pic->BUF_index, new_pic->decode_idx,
5450 new_pic->POC);
5451
5452 }
5453 if (pic_list_debug & 0x1) {
5454 dump_pic_list(hevc);
5455 pr_err("\n*******************************************\n");
5456 }
5457
5458 return new_pic;
5459}
5460
5461static int get_display_pic_num(struct hevc_state_s *hevc)
5462{
5463 int i;
5464 struct PIC_s *pic;
5465 int num = 0;
5466
5467 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5468 pic = hevc->m_PIC[i];
5469 if (pic == NULL ||
5470 pic->index == -1)
5471 continue;
5472
5473 if (pic->output_ready == 1)
5474 num++;
5475 }
5476 return num;
5477}
5478
5479static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5480{
5481 struct PIC_s *pic_display;
5482
5483 if (pic) {
5484 /*PB skip control */
5485 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5486 /* start decoding after first I */
5487 hevc->ignore_bufmgr_error |= 0x1;
5488 }
5489 if (hevc->ignore_bufmgr_error & 1) {
5490 if (hevc->PB_skip_count_after_decoding > 0)
5491 hevc->PB_skip_count_after_decoding--;
5492 else {
5493 /* start displaying */
5494 hevc->ignore_bufmgr_error |= 0x2;
5495 }
5496 }
5497 /**/
5498 if (pic->POC != INVALID_POC) {
5499 pic->output_mark = 1;
5500 pic->recon_mark = 1;
5501 }
5502 pic->recon_mark = 1;
5503 }
5504 do {
5505 pic_display = output_pic(hevc, 1);
5506
5507 if (pic_display) {
5508 pic_display->referenced = 0;
5509 put_mv_buf(hevc, pic_display);
5510 if ((pic_display->error_mark
5511 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5512 || (get_dbg_flag(hevc) &
5513 H265_DEBUG_DISPLAY_CUR_FRAME)
5514 || (get_dbg_flag(hevc) &
5515 H265_DEBUG_NO_DISPLAY)) {
5516 pic_display->output_ready = 0;
5517 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5518 hevc_print(hevc, 0,
5519 "[BM] Display: POC %d, ",
5520 pic_display->POC);
5521 hevc_print_cont(hevc, 0,
5522 "decoding index %d ==> ",
5523 pic_display->decode_idx);
5524 hevc_print_cont(hevc, 0,
5525 "Debug mode or error, recycle it\n");
5526 }
5527 } else {
5528 if (hevc->i_only & 0x1
5529 && pic_display->slice_type != 2) {
5530 pic_display->output_ready = 0;
5531 } else {
5532 prepare_display_buf(hevc, pic_display);
5533 if (get_dbg_flag(hevc)
5534 & H265_DEBUG_BUFMGR) {
5535 hevc_print(hevc, 0,
5536 "[BM] flush Display: POC %d, ",
5537 pic_display->POC);
5538 hevc_print_cont(hevc, 0,
5539 "decoding index %d\n",
5540 pic_display->decode_idx);
5541 }
5542 }
5543 }
5544 }
5545 } while (pic_display);
5546 clear_referenced_flag(hevc);
5547}
5548
5549/*
5550* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5551*/
5552static void set_aux_data(struct hevc_state_s *hevc,
5553 struct PIC_s *pic, unsigned char suffix_flag,
5554 unsigned char dv_meta_flag)
5555{
5556 int i;
5557 unsigned short *aux_adr;
5558 unsigned int size_reg_val =
5559 READ_VREG(HEVC_AUX_DATA_SIZE);
5560 unsigned int aux_count = 0;
5561 int aux_size = 0;
5562 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5563 return;
5564
5565 if (hevc->aux_data_dirty ||
5566 hevc->m_ins_flag == 0) {
5567
5568 hevc->aux_data_dirty = 0;
5569 }
5570
5571 if (suffix_flag) {
5572 aux_adr = (unsigned short *)
5573 (hevc->aux_addr +
5574 hevc->prefix_aux_size);
5575 aux_count =
5576 ((size_reg_val & 0xffff) << 4)
5577 >> 1;
5578 aux_size =
5579 hevc->suffix_aux_size;
5580 } else {
5581 aux_adr =
5582 (unsigned short *)hevc->aux_addr;
5583 aux_count =
5584 ((size_reg_val >> 16) << 4)
5585 >> 1;
5586 aux_size =
5587 hevc->prefix_aux_size;
5588 }
5589 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5590 hevc_print(hevc, 0,
5591 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5592 __func__, pic, pic->aux_data_size,
5593 aux_count, suffix_flag, dv_meta_flag);
5594 }
5595 if (aux_size > 0 && aux_count > 0) {
5596 int heads_size = 0;
5597 int new_size;
5598 char *new_buf;
5599
5600 for (i = 0; i < aux_count; i++) {
5601 unsigned char tag = aux_adr[i] >> 8;
5602 if (tag != 0 && tag != 0xff) {
5603 if (dv_meta_flag == 0)
5604 heads_size += 8;
5605 else if (dv_meta_flag == 1 && tag == 0x1)
5606 heads_size += 8;
5607 else if (dv_meta_flag == 2 && tag != 0x1)
5608 heads_size += 8;
5609 }
5610 }
5611 new_size = pic->aux_data_size + aux_count + heads_size;
5612 new_buf = vmalloc(new_size);
5613 if (new_buf) {
5614 unsigned char valid_tag = 0;
5615 unsigned char *h =
5616 new_buf +
5617 pic->aux_data_size;
5618 unsigned char *p = h + 8;
5619 int len = 0;
5620 int padding_len = 0;
5621 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5622 if (pic->aux_data_buf)
5623 vfree(pic->aux_data_buf);
5624 pic->aux_data_buf = new_buf;
5625 for (i = 0; i < aux_count; i += 4) {
5626 int ii;
5627 unsigned char tag = aux_adr[i + 3] >> 8;
5628 if (tag != 0 && tag != 0xff) {
5629 if (dv_meta_flag == 0)
5630 valid_tag = 1;
5631 else if (dv_meta_flag == 1
5632 && tag == 0x1)
5633 valid_tag = 1;
5634 else if (dv_meta_flag == 2
5635 && tag != 0x1)
5636 valid_tag = 1;
5637 else
5638 valid_tag = 0;
5639 if (valid_tag && len > 0) {
5640 pic->aux_data_size +=
5641 (len + 8);
5642 h[0] = (len >> 24)
5643 & 0xff;
5644 h[1] = (len >> 16)
5645 & 0xff;
5646 h[2] = (len >> 8)
5647 & 0xff;
5648 h[3] = (len >> 0)
5649 & 0xff;
5650 h[6] =
5651 (padding_len >> 8)
5652 & 0xff;
5653 h[7] = (padding_len)
5654 & 0xff;
5655 h += (len + 8);
5656 p += 8;
5657 len = 0;
5658 padding_len = 0;
5659 }
5660 if (valid_tag) {
5661 h[4] = tag;
5662 h[5] = 0;
5663 h[6] = 0;
5664 h[7] = 0;
5665 }
5666 }
5667 if (valid_tag) {
5668 for (ii = 0; ii < 4; ii++) {
5669 unsigned short aa =
5670 aux_adr[i + 3
5671 - ii];
5672 *p = aa & 0xff;
5673 p++;
5674 len++;
5675 /*if ((aa >> 8) == 0xff)
5676 padding_len++;*/
5677 }
5678 }
5679 }
5680 if (len > 0) {
5681 pic->aux_data_size += (len + 8);
5682 h[0] = (len >> 24) & 0xff;
5683 h[1] = (len >> 16) & 0xff;
5684 h[2] = (len >> 8) & 0xff;
5685 h[3] = (len >> 0) & 0xff;
5686 h[6] = (padding_len >> 8) & 0xff;
5687 h[7] = (padding_len) & 0xff;
5688 }
5689 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5690 hevc_print(hevc, 0,
5691 "aux: (size %d) suffix_flag %d\n",
5692 pic->aux_data_size, suffix_flag);
5693 for (i = 0; i < pic->aux_data_size; i++) {
5694 hevc_print_cont(hevc, 0,
5695 "%02x ", pic->aux_data_buf[i]);
5696 if (((i + 1) & 0xf) == 0)
5697 hevc_print_cont(hevc, 0, "\n");
5698 }
5699 hevc_print_cont(hevc, 0, "\n");
5700 }
5701
5702 } else {
5703 hevc_print(hevc, 0, "new buf alloc failed\n");
5704 if (pic->aux_data_buf)
5705 vfree(pic->aux_data_buf);
5706 pic->aux_data_buf = NULL;
5707 pic->aux_data_size = 0;
5708 }
5709 }
5710
5711}
5712
5713static void release_aux_data(struct hevc_state_s *hevc,
5714 struct PIC_s *pic)
5715{
5716 if (pic->aux_data_buf)
5717 vfree(pic->aux_data_buf);
5718 pic->aux_data_buf = NULL;
5719 pic->aux_data_size = 0;
5720}
5721
5722static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5723 struct PIC_s *pic)
5724{
5725
5726 /* prev pic */
5727 /*if (hevc->curr_POC != 0) {*/
5728 int decoded_poc = hevc->iPrevPOC;
5729#ifdef MULTI_INSTANCE_SUPPORT
5730 if (hevc->m_ins_flag) {
5731 decoded_poc = hevc->decoded_poc;
5732 hevc->decoded_poc = INVALID_POC;
5733 }
5734#endif
5735 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5736 && hevc->m_nalUnitType !=
5737 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5738 struct PIC_s *pic_display;
5739
5740 pic = get_pic_by_POC(hevc, decoded_poc);
5741 if (pic && (pic->POC != INVALID_POC)) {
5742 /*PB skip control */
5743 if (pic->error_mark == 0
5744 && hevc->PB_skip_mode == 1) {
5745 /* start decoding after
5746 * first I
5747 */
5748 hevc->ignore_bufmgr_error |= 0x1;
5749 }
5750 if (hevc->ignore_bufmgr_error & 1) {
5751 if (hevc->PB_skip_count_after_decoding > 0) {
5752 hevc->PB_skip_count_after_decoding--;
5753 } else {
5754 /* start displaying */
5755 hevc->ignore_bufmgr_error |= 0x2;
5756 }
5757 }
5758 if (hevc->mmu_enable
5759 && ((hevc->double_write_mode & 0x10) == 0)) {
5760 if (!hevc->m_ins_flag) {
5761 hevc->used_4k_num =
5762 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5763
5764 if ((!is_skip_decoding(hevc, pic)) &&
5765 (hevc->used_4k_num >= 0) &&
5766 (hevc->cur_pic->scatter_alloc
5767 == 1)) {
5768 hevc_print(hevc,
5769 H265_DEBUG_BUFMGR_MORE,
5770 "%s pic index %d scatter_alloc %d page_start %d\n",
5771 "decoder_mmu_box_free_idx_tail",
5772 hevc->cur_pic->index,
5773 hevc->cur_pic->scatter_alloc,
5774 hevc->used_4k_num);
5775 hevc_mmu_dma_check(hw_to_vdec(hevc));
5776 decoder_mmu_box_free_idx_tail(
5777 hevc->mmu_box,
5778 hevc->cur_pic->index,
5779 hevc->used_4k_num);
5780 hevc->cur_pic->scatter_alloc
5781 = 2;
5782 }
5783 hevc->used_4k_num = -1;
5784 }
5785 }
5786
5787 pic->output_mark = 1;
5788 pic->recon_mark = 1;
5789 }
5790 do {
5791 pic_display = output_pic(hevc, 0);
5792
5793 if (pic_display) {
5794 if ((pic_display->error_mark &&
5795 ((hevc->ignore_bufmgr_error &
5796 0x2) == 0))
5797 || (get_dbg_flag(hevc) &
5798 H265_DEBUG_DISPLAY_CUR_FRAME)
5799 || (get_dbg_flag(hevc) &
5800 H265_DEBUG_NO_DISPLAY)) {
5801 pic_display->output_ready = 0;
5802 if (get_dbg_flag(hevc) &
5803 H265_DEBUG_BUFMGR) {
5804 hevc_print(hevc, 0,
5805 "[BM] Display: POC %d, ",
5806 pic_display->POC);
5807 hevc_print_cont(hevc, 0,
5808 "decoding index %d ==> ",
5809 pic_display->
5810 decode_idx);
5811 hevc_print_cont(hevc, 0,
5812 "Debug or err,recycle it\n");
5813 }
5814 } else {
5815 if (hevc->i_only & 0x1
5816 && pic_display->
5817 slice_type != 2) {
5818 pic_display->output_ready = 0;
5819 } else {
5820 prepare_display_buf
5821 (hevc,
5822 pic_display);
5823 if (get_dbg_flag(hevc) &
5824 H265_DEBUG_BUFMGR) {
5825 hevc_print(hevc, 0,
5826 "[BM] Display: POC %d, ",
5827 pic_display->POC);
5828 hevc_print_cont(hevc, 0,
5829 "decoding index %d\n",
5830 pic_display->
5831 decode_idx);
5832 }
5833 }
5834 }
5835 }
5836 } while (pic_display);
5837 } else {
5838 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5839 hevc_print(hevc, 0,
5840 "[BM] current pic is IDR, ");
5841 hevc_print(hevc, 0,
5842 "clear referenced flag of all buffers\n");
5843 }
5844 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5845 dump_pic_list(hevc);
5846 pic = get_pic_by_POC(hevc, decoded_poc);
5847 flush_output(hevc, pic);
5848 }
5849
5850}
5851
5852static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
5853 int decoded_lcu)
5854{
5855 int current_lcu_idx = decoded_lcu;
5856 if (decoded_lcu < 0)
5857 return;
5858
5859 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5860 hevc_print(hevc, 0,
5861 "cur lcu idx = %d, (total %d)\n",
5862 current_lcu_idx, hevc->lcu_total);
5863 }
5864 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5865 if (hevc->first_pic_after_recover) {
5866 if (current_lcu_idx !=
5867 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5868 hevc->cur_pic->error_mark = 1;
5869 } else {
5870 if (hevc->lcu_x_num_pre != 0
5871 && hevc->lcu_y_num_pre != 0
5872 && current_lcu_idx != 0
5873 && current_lcu_idx <
5874 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5875 hevc->cur_pic->error_mark = 1;
5876 }
5877 if (hevc->cur_pic->error_mark) {
5878 hevc_print(hevc, 0,
5879 "cur lcu idx = %d, (total %d), set error_mark\n",
5880 current_lcu_idx,
5881 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
5882 if (is_log_enable(hevc))
5883 add_log(hevc,
5884 "cur lcu idx = %d, (total %d), set error_mark",
5885 current_lcu_idx,
5886 hevc->lcu_x_num_pre *
5887 hevc->lcu_y_num_pre);
5888
5889 }
5890
5891 }
5892 if (hevc->cur_pic && hevc->head_error_flag) {
5893 hevc->cur_pic->error_mark = 1;
5894 hevc_print(hevc, 0,
5895 "head has error, set error_mark\n");
5896 }
5897
5898 if ((error_handle_policy & 0x80) == 0) {
5899 if (hevc->over_decode && hevc->cur_pic) {
5900 hevc_print(hevc, 0,
5901 "over decode, set error_mark\n");
5902 hevc->cur_pic->error_mark = 1;
5903 }
5904 }
5905
5906 hevc->lcu_x_num_pre = hevc->lcu_x_num;
5907 hevc->lcu_y_num_pre = hevc->lcu_y_num;
5908}
5909
5910static void check_pic_decoded_error(struct hevc_state_s *hevc,
5911 int decoded_lcu)
5912{
5913 int current_lcu_idx = decoded_lcu;
5914 if (decoded_lcu < 0)
5915 return;
5916
5917 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5918 hevc_print(hevc, 0,
5919 "cur lcu idx = %d, (total %d)\n",
5920 current_lcu_idx, hevc->lcu_total);
5921 }
5922 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5923 if (hevc->lcu_x_num != 0
5924 && hevc->lcu_y_num != 0
5925 && current_lcu_idx != 0
5926 && current_lcu_idx <
5927 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
5928 hevc->cur_pic->error_mark = 1;
5929 if (hevc->cur_pic->error_mark) {
5930 hevc_print(hevc, 0,
5931 "cur lcu idx = %d, (total %d), set error_mark\n",
5932 current_lcu_idx,
5933 hevc->lcu_x_num*hevc->lcu_y_num);
5934 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
5935 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
5936 hevc_print(hevc, 0,
5937 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
5938 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
5939 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
5940 }
5941 if (is_log_enable(hevc))
5942 add_log(hevc,
5943 "cur lcu idx = %d, (total %d), set error_mark",
5944 current_lcu_idx,
5945 hevc->lcu_x_num *
5946 hevc->lcu_y_num);
5947
5948 }
5949
5950 }
5951 if (hevc->cur_pic && hevc->head_error_flag) {
5952 hevc->cur_pic->error_mark = 1;
5953 hevc_print(hevc, 0,
5954 "head has error, set error_mark\n");
5955 }
5956
5957 if ((error_handle_policy & 0x80) == 0) {
5958 if (hevc->over_decode && hevc->cur_pic) {
5959 hevc_print(hevc, 0,
5960 "over decode, set error_mark\n");
5961 hevc->cur_pic->error_mark = 1;
5962 }
5963 }
5964}
5965
5966/* only when we decoded one field or one frame,
5967we can call this function to get qos info*/
5968static void get_picture_qos_info(struct hevc_state_s *hevc)
5969{
5970 struct PIC_s *picture = hevc->cur_pic;
5971
5972/*
5973#define DEBUG_QOS
5974*/
5975
5976 if (!hevc->cur_pic)
5977 return;
5978
5979 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5980 unsigned char a[3];
5981 unsigned char i, j, t;
5982 unsigned long data;
5983
5984 data = READ_VREG(HEVC_MV_INFO);
5985 if (picture->slice_type == I_SLICE)
5986 data = 0;
5987 a[0] = data & 0xff;
5988 a[1] = (data >> 8) & 0xff;
5989 a[2] = (data >> 16) & 0xff;
5990
5991 for (i = 0; i < 3; i++)
5992 for (j = i+1; j < 3; j++) {
5993 if (a[j] < a[i]) {
5994 t = a[j];
5995 a[j] = a[i];
5996 a[i] = t;
5997 } else if (a[j] == a[i]) {
5998 a[i]++;
5999 t = a[j];
6000 a[j] = a[i];
6001 a[i] = t;
6002 }
6003 }
6004 picture->max_mv = a[2];
6005 picture->avg_mv = a[1];
6006 picture->min_mv = a[0];
6007#ifdef DEBUG_QOS
6008 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6009 data, a[0], a[1], a[2]);
6010#endif
6011
6012 data = READ_VREG(HEVC_QP_INFO);
6013 a[0] = data & 0x1f;
6014 a[1] = (data >> 8) & 0x3f;
6015 a[2] = (data >> 16) & 0x7f;
6016
6017 for (i = 0; i < 3; i++)
6018 for (j = i+1; j < 3; j++) {
6019 if (a[j] < a[i]) {
6020 t = a[j];
6021 a[j] = a[i];
6022 a[i] = t;
6023 } else if (a[j] == a[i]) {
6024 a[i]++;
6025 t = a[j];
6026 a[j] = a[i];
6027 a[i] = t;
6028 }
6029 }
6030 picture->max_qp = a[2];
6031 picture->avg_qp = a[1];
6032 picture->min_qp = a[0];
6033#ifdef DEBUG_QOS
6034 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6035 data, a[0], a[1], a[2]);
6036#endif
6037
6038 data = READ_VREG(HEVC_SKIP_INFO);
6039 a[0] = data & 0x1f;
6040 a[1] = (data >> 8) & 0x3f;
6041 a[2] = (data >> 16) & 0x7f;
6042
6043 for (i = 0; i < 3; i++)
6044 for (j = i+1; j < 3; j++) {
6045 if (a[j] < a[i]) {
6046 t = a[j];
6047 a[j] = a[i];
6048 a[i] = t;
6049 } else if (a[j] == a[i]) {
6050 a[i]++;
6051 t = a[j];
6052 a[j] = a[i];
6053 a[i] = t;
6054 }
6055 }
6056 picture->max_skip = a[2];
6057 picture->avg_skip = a[1];
6058 picture->min_skip = a[0];
6059
6060#ifdef DEBUG_QOS
6061 hevc_print(hevc, 0,
6062 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6063 data, a[0], a[1], a[2]);
6064#endif
6065 } else {
6066 uint32_t blk88_y_count;
6067 uint32_t blk88_c_count;
6068 uint32_t blk22_mv_count;
6069 uint32_t rdata32;
6070 int32_t mv_hi;
6071 int32_t mv_lo;
6072 uint32_t rdata32_l;
6073 uint32_t mvx_L0_hi;
6074 uint32_t mvy_L0_hi;
6075 uint32_t mvx_L1_hi;
6076 uint32_t mvy_L1_hi;
6077 int64_t value;
6078 uint64_t temp_value;
6079#ifdef DEBUG_QOS
6080 int pic_number = picture->POC;
6081#endif
6082
6083 picture->max_mv = 0;
6084 picture->avg_mv = 0;
6085 picture->min_mv = 0;
6086
6087 picture->max_skip = 0;
6088 picture->avg_skip = 0;
6089 picture->min_skip = 0;
6090
6091 picture->max_qp = 0;
6092 picture->avg_qp = 0;
6093 picture->min_qp = 0;
6094
6095
6096
6097#ifdef DEBUG_QOS
6098 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6099 picture->slice_type,
6100 picture->POC);
6101#endif
6102 /* set rd_idx to 0 */
6103 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6104
6105 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6106 if (blk88_y_count == 0) {
6107#ifdef DEBUG_QOS
6108 hevc_print(hevc, 0,
6109 "[Picture %d Quality] NO Data yet.\n",
6110 pic_number);
6111#endif
6112 /* reset all counts */
6113 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6114 return;
6115 }
6116 /* qp_y_sum */
6117 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6118#ifdef DEBUG_QOS
6119 hevc_print(hevc, 0,
6120 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6121 pic_number, rdata32/blk88_y_count,
6122 rdata32, blk88_y_count);
6123#endif
6124 picture->avg_qp = rdata32/blk88_y_count;
6125 /* intra_y_count */
6126 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6127#ifdef DEBUG_QOS
6128 hevc_print(hevc, 0,
6129 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6130 pic_number, rdata32*100/blk88_y_count,
6131 '%', rdata32);
6132#endif
6133 /* skipped_y_count */
6134 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6135#ifdef DEBUG_QOS
6136 hevc_print(hevc, 0,
6137 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6138 pic_number, rdata32*100/blk88_y_count,
6139 '%', rdata32);
6140#endif
6141 picture->avg_skip = rdata32*100/blk88_y_count;
6142 /* coeff_non_zero_y_count */
6143 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6144#ifdef DEBUG_QOS
6145 hevc_print(hevc, 0,
6146 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6147 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6148 '%', rdata32);
6149#endif
6150 /* blk66_c_count */
6151 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6152 if (blk88_c_count == 0) {
6153#ifdef DEBUG_QOS
6154 hevc_print(hevc, 0,
6155 "[Picture %d Quality] NO Data yet.\n",
6156 pic_number);
6157#endif
6158 /* reset all counts */
6159 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6160 return;
6161 }
6162 /* qp_c_sum */
6163 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6164#ifdef DEBUG_QOS
6165 hevc_print(hevc, 0,
6166 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6167 pic_number, rdata32/blk88_c_count,
6168 rdata32, blk88_c_count);
6169#endif
6170 /* intra_c_count */
6171 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6172#ifdef DEBUG_QOS
6173 hevc_print(hevc, 0,
6174 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6175 pic_number, rdata32*100/blk88_c_count,
6176 '%', rdata32);
6177#endif
6178 /* skipped_cu_c_count */
6179 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6180#ifdef DEBUG_QOS
6181 hevc_print(hevc, 0,
6182 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6183 pic_number, rdata32*100/blk88_c_count,
6184 '%', rdata32);
6185#endif
6186 /* coeff_non_zero_c_count */
6187 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6188#ifdef DEBUG_QOS
6189 hevc_print(hevc, 0,
6190 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6191 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6192 '%', rdata32);
6193#endif
6194
6195 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6196 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6197 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6198#ifdef DEBUG_QOS
6199 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6200 pic_number, (rdata32>>0)&0xff);
6201#endif
6202 picture->min_qp = (rdata32>>0)&0xff;
6203
6204#ifdef DEBUG_QOS
6205 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6206 pic_number, (rdata32>>8)&0xff);
6207#endif
6208 picture->max_qp = (rdata32>>8)&0xff;
6209
6210#ifdef DEBUG_QOS
6211 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6212 pic_number, (rdata32>>16)&0xff);
6213 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6214 pic_number, (rdata32>>24)&0xff);
6215#endif
6216
6217 /* blk22_mv_count */
6218 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6219 if (blk22_mv_count == 0) {
6220#ifdef DEBUG_QOS
6221 hevc_print(hevc, 0,
6222 "[Picture %d Quality] NO MV Data yet.\n",
6223 pic_number);
6224#endif
6225 /* reset all counts */
6226 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6227 return;
6228 }
6229 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6230 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6231 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6232 /* should all be 0x00 or 0xff */
6233#ifdef DEBUG_QOS
6234 hevc_print(hevc, 0,
6235 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6236 pic_number, rdata32);
6237#endif
6238 mvx_L0_hi = ((rdata32>>0)&0xff);
6239 mvy_L0_hi = ((rdata32>>8)&0xff);
6240 mvx_L1_hi = ((rdata32>>16)&0xff);
6241 mvy_L1_hi = ((rdata32>>24)&0xff);
6242
6243 /* mvx_L0_count[31:0] */
6244 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6245 temp_value = mvx_L0_hi;
6246 temp_value = (temp_value << 32) | rdata32_l;
6247
6248 if (mvx_L0_hi & 0x80)
6249 value = 0xFFFFFFF000000000 | temp_value;
6250 else
6251 value = temp_value;
6252 value = div_s64(value, blk22_mv_count);
6253#ifdef DEBUG_QOS
6254 hevc_print(hevc, 0,
6255 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6256 pic_number, (int)value,
6257 value, blk22_mv_count);
6258#endif
6259 picture->avg_mv = value;
6260
6261 /* mvy_L0_count[31:0] */
6262 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6263 temp_value = mvy_L0_hi;
6264 temp_value = (temp_value << 32) | rdata32_l;
6265
6266 if (mvy_L0_hi & 0x80)
6267 value = 0xFFFFFFF000000000 | temp_value;
6268 else
6269 value = temp_value;
6270#ifdef DEBUG_QOS
6271 hevc_print(hevc, 0,
6272 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6273 pic_number, rdata32_l/blk22_mv_count,
6274 value, blk22_mv_count);
6275#endif
6276
6277 /* mvx_L1_count[31:0] */
6278 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6279 temp_value = mvx_L1_hi;
6280 temp_value = (temp_value << 32) | rdata32_l;
6281 if (mvx_L1_hi & 0x80)
6282 value = 0xFFFFFFF000000000 | temp_value;
6283 else
6284 value = temp_value;
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0,
6287 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6288 pic_number, rdata32_l/blk22_mv_count,
6289 value, blk22_mv_count);
6290#endif
6291
6292 /* mvy_L1_count[31:0] */
6293 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6294 temp_value = mvy_L1_hi;
6295 temp_value = (temp_value << 32) | rdata32_l;
6296 if (mvy_L1_hi & 0x80)
6297 value = 0xFFFFFFF000000000 | temp_value;
6298 else
6299 value = temp_value;
6300#ifdef DEBUG_QOS
6301 hevc_print(hevc, 0,
6302 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6303 pic_number, rdata32_l/blk22_mv_count,
6304 value, blk22_mv_count);
6305#endif
6306
6307 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6308 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6309 mv_hi = (rdata32>>16)&0xffff;
6310 if (mv_hi & 0x8000)
6311 mv_hi = 0x8000 - mv_hi;
6312#ifdef DEBUG_QOS
6313 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6314 pic_number, mv_hi);
6315#endif
6316 picture->max_mv = mv_hi;
6317
6318 mv_lo = (rdata32>>0)&0xffff;
6319 if (mv_lo & 0x8000)
6320 mv_lo = 0x8000 - mv_lo;
6321#ifdef DEBUG_QOS
6322 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6323 pic_number, mv_lo);
6324#endif
6325 picture->min_mv = mv_lo;
6326
6327 /* {mvy_L0_max, mvy_L0_min} */
6328 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6329 mv_hi = (rdata32>>16)&0xffff;
6330 if (mv_hi & 0x8000)
6331 mv_hi = 0x8000 - mv_hi;
6332#ifdef DEBUG_QOS
6333 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6334 pic_number, mv_hi);
6335#endif
6336
6337 mv_lo = (rdata32>>0)&0xffff;
6338 if (mv_lo & 0x8000)
6339 mv_lo = 0x8000 - mv_lo;
6340#ifdef DEBUG_QOS
6341 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6342 pic_number, mv_lo);
6343#endif
6344
6345 /* {mvx_L1_max, mvx_L1_min} */
6346 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6347 mv_hi = (rdata32>>16)&0xffff;
6348 if (mv_hi & 0x8000)
6349 mv_hi = 0x8000 - mv_hi;
6350#ifdef DEBUG_QOS
6351 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6352 pic_number, mv_hi);
6353#endif
6354
6355 mv_lo = (rdata32>>0)&0xffff;
6356 if (mv_lo & 0x8000)
6357 mv_lo = 0x8000 - mv_lo;
6358#ifdef DEBUG_QOS
6359 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6360 pic_number, mv_lo);
6361#endif
6362
6363 /* {mvy_L1_max, mvy_L1_min} */
6364 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6365 mv_hi = (rdata32>>16)&0xffff;
6366 if (mv_hi & 0x8000)
6367 mv_hi = 0x8000 - mv_hi;
6368#ifdef DEBUG_QOS
6369 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6370 pic_number, mv_hi);
6371#endif
6372 mv_lo = (rdata32>>0)&0xffff;
6373 if (mv_lo & 0x8000)
6374 mv_lo = 0x8000 - mv_lo;
6375#ifdef DEBUG_QOS
6376 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6377 pic_number, mv_lo);
6378#endif
6379
6380 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6381#ifdef DEBUG_QOS
6382 hevc_print(hevc, 0,
6383 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6384 pic_number, rdata32);
6385#endif
6386 /* reset all counts */
6387 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6388 }
6389}
6390
6391static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6392 union param_u *rpm_param,
6393 int decode_pic_begin)
6394{
6395#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6396 struct vdec_s *vdec = hw_to_vdec(hevc);
6397#endif
6398 int i;
6399 int lcu_x_num_div;
6400 int lcu_y_num_div;
6401 int Col_ref;
6402 int dbg_skip_flag = 0;
6403
6404 if (hevc->wait_buf == 0) {
6405 hevc->sps_num_reorder_pics_0 =
6406 rpm_param->p.sps_num_reorder_pics_0;
6407 hevc->m_temporalId = rpm_param->p.m_temporalId;
6408 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6409 hevc->interlace_flag =
6410 (rpm_param->p.profile_etc >> 2) & 0x1;
6411 hevc->curr_pic_struct =
6412 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6413 if (parser_sei_enable & 0x4) {
6414 hevc->frame_field_info_present_flag =
6415 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6416 }
6417
6418 if (interlace_enable == 0 || hevc->m_ins_flag)
6419 hevc->interlace_flag = 0;
6420 if (interlace_enable & 0x100)
6421 hevc->interlace_flag = interlace_enable & 0x1;
6422 if (hevc->interlace_flag == 0)
6423 hevc->curr_pic_struct = 0;
6424 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6425 /*
6426 *hevc->m_pocRandomAccess = MAX_INT;
6427 * //add to fix RAP_B_Bossen_1
6428 */
6429 /* } */
6430 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6431 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6432 hevc->slice_segment_addr =
6433 rpm_param->p.slice_segment_address;
6434 if (!rpm_param->p.dependent_slice_segment_flag)
6435 hevc->slice_addr = hevc->slice_segment_addr;
6436 } else {
6437 hevc->slice_segment_addr = 0;
6438 hevc->slice_addr = 0;
6439 }
6440
6441 hevc->iPrevPOC = hevc->curr_POC;
6442 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6443 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6444 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6445 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6446 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6447 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6448 hevc->isNextSliceSegment =
6449 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6450 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6451 || hevc->pic_h !=
6452 rpm_param->p.pic_height_in_luma_samples) {
6453 hevc_print(hevc, 0,
6454 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6455 hevc->pic_w, hevc->pic_h,
6456 rpm_param->p.pic_width_in_luma_samples,
6457 rpm_param->p.pic_height_in_luma_samples,
6458 hevc->interlace_flag);
6459
6460 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6461 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6462 hevc->frame_width = hevc->pic_w;
6463 hevc->frame_height = hevc->pic_h;
6464#ifdef LOSLESS_COMPRESS_MODE
6465 if (/*re_config_pic_flag == 0 &&*/
6466 (get_double_write_mode(hevc) & 0x10) == 0)
6467 init_decode_head_hw(hevc);
6468#endif
6469 }
6470
6471 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6472 hevc_print(hevc, 0, "over size : %u x %u.\n",
6473 hevc->pic_w, hevc->pic_h);
6474 if ((!hevc->m_ins_flag) &&
6475 ((debug &
6476 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6477 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6478 H265_DEBUG_DIS_SYS_ERROR_PROC);
6479 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6480 return 3;
6481 }
6482 if (hevc->bit_depth_chroma > 10 ||
6483 hevc->bit_depth_luma > 10) {
6484 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6485 hevc->bit_depth_chroma,
6486 hevc->bit_depth_luma);
6487 if (!hevc->m_ins_flag)
6488 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6489 H265_DEBUG_DIS_SYS_ERROR_PROC);
6490 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6491 return 4;
6492 }
6493
6494 /* it will cause divide 0 error */
6495 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6496 if (get_dbg_flag(hevc)) {
6497 hevc_print(hevc, 0,
6498 "Fatal Error, pic_w = %d, pic_h = %d\n",
6499 hevc->pic_w, hevc->pic_h);
6500 }
6501 return 3;
6502 }
6503 pic_list_process(hevc);
6504
6505 hevc->lcu_size =
6506 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6507 3 + rpm_param->
6508 p.log2_diff_max_min_coding_block_size);
6509 if (hevc->lcu_size == 0) {
6510 hevc_print(hevc, 0,
6511 "Error, lcu_size = 0 (%d,%d)\n",
6512 rpm_param->p.
6513 log2_min_coding_block_size_minus3,
6514 rpm_param->p.
6515 log2_diff_max_min_coding_block_size);
6516 return 3;
6517 }
6518 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6519 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6520 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6521 hevc->lcu_x_num =
6522 ((hevc->pic_w % hevc->lcu_size) ==
6523 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6524 hevc->lcu_y_num =
6525 ((hevc->pic_h % hevc->lcu_size) ==
6526 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6527 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6528
6529 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6530 || hevc->m_nalUnitType ==
6531 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6532 hevc->curr_POC = 0;
6533 if ((hevc->m_temporalId - 1) == 0)
6534 hevc->iPrevTid0POC = hevc->curr_POC;
6535 } else {
6536 int iMaxPOClsb =
6537 1 << (rpm_param->p.
6538 log2_max_pic_order_cnt_lsb_minus4 + 4);
6539 int iPrevPOClsb;
6540 int iPrevPOCmsb;
6541 int iPOCmsb;
6542 int iPOClsb = rpm_param->p.POClsb;
6543
6544 if (iMaxPOClsb == 0) {
6545 hevc_print(hevc, 0,
6546 "error iMaxPOClsb is 0\n");
6547 return 3;
6548 }
6549
6550 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6551 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6552
6553 if ((iPOClsb < iPrevPOClsb)
6554 && ((iPrevPOClsb - iPOClsb) >=
6555 (iMaxPOClsb / 2)))
6556 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6557 else if ((iPOClsb > iPrevPOClsb)
6558 && ((iPOClsb - iPrevPOClsb) >
6559 (iMaxPOClsb / 2)))
6560 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6561 else
6562 iPOCmsb = iPrevPOCmsb;
6563 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6564 hevc_print(hevc, 0,
6565 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6566 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6567 iPOClsb);
6568 }
6569 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6570 || hevc->m_nalUnitType ==
6571 NAL_UNIT_CODED_SLICE_BLANT
6572 || hevc->m_nalUnitType ==
6573 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6574 /* For BLA picture types, POCmsb is set to 0. */
6575 iPOCmsb = 0;
6576 }
6577 hevc->curr_POC = (iPOCmsb + iPOClsb);
6578 if ((hevc->m_temporalId - 1) == 0)
6579 hevc->iPrevTid0POC = hevc->curr_POC;
6580 else {
6581 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6582 hevc_print(hevc, 0,
6583 "m_temporalID is %d\n",
6584 hevc->m_temporalId);
6585 }
6586 }
6587 }
6588 hevc->RefNum_L0 =
6589 (rpm_param->p.num_ref_idx_l0_active >
6590 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6591 num_ref_idx_l0_active;
6592 hevc->RefNum_L1 =
6593 (rpm_param->p.num_ref_idx_l1_active >
6594 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6595 num_ref_idx_l1_active;
6596
6597 /* if(curr_POC==0x10) dump_lmem(); */
6598
6599 /* skip RASL pictures after CRA/BLA pictures */
6600 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6601 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6602 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6603 || hevc->m_nalUnitType ==
6604 NAL_UNIT_CODED_SLICE_BLANT
6605 || hevc->m_nalUnitType ==
6606 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6607 hevc->m_pocRandomAccess = hevc->curr_POC;
6608 else
6609 hevc->m_pocRandomAccess = -MAX_INT;
6610 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6611 || hevc->m_nalUnitType ==
6612 NAL_UNIT_CODED_SLICE_BLANT
6613 || hevc->m_nalUnitType ==
6614 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6615 hevc->m_pocRandomAccess = hevc->curr_POC;
6616 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6617 (nal_skip_policy >= 3) &&
6618 (hevc->m_nalUnitType ==
6619 NAL_UNIT_CODED_SLICE_RASL_N ||
6620 hevc->m_nalUnitType ==
6621 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6622 if (get_dbg_flag(hevc)) {
6623 hevc_print(hevc, 0,
6624 "RASL picture with POC %d < %d ",
6625 hevc->curr_POC, hevc->m_pocRandomAccess);
6626 hevc_print(hevc, 0,
6627 "RandomAccess point POC), skip it\n");
6628 }
6629 return 1;
6630 }
6631
6632 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6633 hevc->skip_flag = 0;
6634 /**/
6635 /* if((iPrevPOC != curr_POC)){ */
6636 if (rpm_param->p.slice_segment_address == 0) {
6637 struct PIC_s *pic;
6638
6639 hevc->new_pic = 1;
6640#ifdef MULTI_INSTANCE_SUPPORT
6641 if (!hevc->m_ins_flag)
6642#endif
6643 check_pic_decoded_error_pre(hevc,
6644 READ_VREG(HEVC_PARSER_LCU_START)
6645 & 0xffffff);
6646 /**/ if (use_cma == 0) {
6647 if (hevc->pic_list_init_flag == 0) {
6648 init_pic_list(hevc);
6649 init_pic_list_hw(hevc);
6650 init_buf_spec(hevc);
6651 hevc->pic_list_init_flag = 3;
6652 }
6653 }
6654 if (!hevc->m_ins_flag) {
6655 if (hevc->cur_pic)
6656 get_picture_qos_info(hevc);
6657 }
6658 hevc->first_pic_after_recover = 0;
6659 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6660 dump_pic_list(hevc);
6661 /* prev pic */
6662 hevc_pre_pic(hevc, pic);
6663 /*
6664 *update referenced of old pictures
6665 *(cur_pic->referenced is 1 and not updated)
6666 */
6667 apply_ref_pic_set(hevc, hevc->curr_POC,
6668 rpm_param);
6669
6670 if (hevc->mmu_enable)
6671 recycle_mmu_bufs(hevc);
6672
6673#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6674 if (vdec->master) {
6675 struct hevc_state_s *hevc_ba =
6676 (struct hevc_state_s *)
6677 vdec->master->private;
6678 if (hevc_ba->cur_pic != NULL) {
6679 hevc_ba->cur_pic->dv_enhance_exist = 1;
6680 hevc_print(hevc, H265_DEBUG_DV,
6681 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6682 hevc->curr_POC, hevc_ba->cur_pic->POC);
6683 }
6684 }
6685 if (vdec->master == NULL &&
6686 vdec->slave == NULL)
6687 set_aux_data(hevc,
6688 hevc->cur_pic, 1, 0); /*suffix*/
6689 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6690 set_aux_data(hevc,
6691 hevc->cur_pic, 0, 1); /*dv meta only*/
6692#else
6693 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6694#endif
6695 /* new pic */
6696 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6697 if (hevc->cur_pic == NULL) {
6698 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6699 dump_pic_list(hevc);
6700 hevc->wait_buf = 1;
6701 return -1;
6702 }
6703#ifdef MULTI_INSTANCE_SUPPORT
6704 hevc->decoding_pic = hevc->cur_pic;
6705 if (!hevc->m_ins_flag)
6706 hevc->over_decode = 0;
6707#endif
6708#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6709 hevc->cur_pic->dv_enhance_exist = 0;
6710 if (vdec->slave)
6711 hevc_print(hevc, H265_DEBUG_DV,
6712 "Clear bl (poc %d) dv_enhance_exist flag\n",
6713 hevc->curr_POC);
6714 if (vdec->master == NULL &&
6715 vdec->slave == NULL)
6716 set_aux_data(hevc,
6717 hevc->cur_pic, 0, 0); /*prefix*/
6718
6719 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6720 set_aux_data(hevc,
6721 hevc->cur_pic, 0, 2); /*pre sei only*/
6722#else
6723 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6724#endif
6725 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6726 hevc->cur_pic->output_ready = 1;
6727 hevc->cur_pic->stream_offset =
6728 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6729 prepare_display_buf(hevc, hevc->cur_pic);
6730 hevc->wait_buf = 2;
6731 return -1;
6732 }
6733 } else {
6734 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6735#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6736 if (vdec->master == NULL &&
6737 vdec->slave == NULL) {
6738 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6739 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6740 }
6741#else
6742 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6743 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6744#endif
6745 }
6746 if (hevc->pic_list_init_flag != 3
6747 || hevc->cur_pic == NULL) {
6748 /* make it dec from the first slice segment */
6749 return 3;
6750 }
6751 hevc->cur_pic->slice_idx++;
6752 hevc->new_pic = 0;
6753 }
6754 } else {
6755 if (hevc->wait_buf == 1) {
6756 pic_list_process(hevc);
6757 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6758 if (hevc->cur_pic == NULL)
6759 return -1;
6760
6761 if (!hevc->m_ins_flag)
6762 hevc->over_decode = 0;
6763
6764#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6765 hevc->cur_pic->dv_enhance_exist = 0;
6766 if (vdec->master == NULL &&
6767 vdec->slave == NULL)
6768 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6769#else
6770 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6771#endif
6772 hevc->wait_buf = 0;
6773 } else if (hevc->wait_buf ==
6774 2) {
6775 if (get_display_pic_num(hevc) >
6776 1)
6777 return -1;
6778 hevc->wait_buf = 0;
6779 }
6780 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6781 dump_pic_list(hevc);
6782 }
6783
6784 if (hevc->new_pic) {
6785#if 1
6786 /*SUPPORT_10BIT*/
6787 int sao_mem_unit =
6788 (hevc->lcu_size == 16 ? 9 :
6789 hevc->lcu_size ==
6790 32 ? 14 : 24) << 4;
6791#else
6792 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
6793#endif
6794 int pic_height_cu =
6795 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
6796 int pic_width_cu =
6797 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
6798 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
6799
6800 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
6801 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6802 hevc_print(hevc, 0,
6803 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
6804 __func__,
6805 hevc->decode_idx,
6806 hevc->curr_pic_struct,
6807 hevc->interlace_flag,
6808 hevc->cur_pic->index);
6809 }
6810 if (dbg_skip_decode_index != 0 &&
6811 hevc->decode_idx == dbg_skip_decode_index)
6812 dbg_skip_flag = 1;
6813
6814 hevc->decode_idx++;
6815 update_tile_info(hevc, pic_width_cu, pic_height_cu,
6816 sao_mem_unit, rpm_param);
6817
6818 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
6819 }
6820
6821 if (hevc->iPrevPOC != hevc->curr_POC) {
6822 hevc->new_tile = 1;
6823 hevc->tile_x = 0;
6824 hevc->tile_y = 0;
6825 hevc->tile_y_x = 0;
6826 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6827 hevc_print(hevc, 0,
6828 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
6829 hevc->tile_x, hevc->tile_y);
6830 }
6831 } else if (hevc->tile_enabled) {
6832 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6833 hevc_print(hevc, 0,
6834 "slice_segment_address is %d\n",
6835 rpm_param->p.slice_segment_address);
6836 }
6837 hevc->tile_y_x =
6838 get_tile_index(hevc, rpm_param->p.slice_segment_address,
6839 (hevc->pic_w +
6840 hevc->lcu_size -
6841 1) / hevc->lcu_size);
6842 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
6843 && (hevc->tile_y_x != -1)) {
6844 hevc->new_tile = 1;
6845 hevc->tile_x = hevc->tile_y_x & 0xff;
6846 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
6847 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6848 hevc_print(hevc, 0,
6849 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
6850 rpm_param->p.slice_segment_address,
6851 hevc->tile_x, hevc->tile_y);
6852 }
6853 } else
6854 hevc->new_tile = 0;
6855 } else
6856 hevc->new_tile = 0;
6857
6858 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
6859 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
6860 hevc->new_tile = 0;
6861
6862 if (hevc->new_tile) {
6863 hevc->tile_start_lcu_x =
6864 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
6865 hevc->tile_start_lcu_y =
6866 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
6867 hevc->tile_width_lcu =
6868 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
6869 hevc->tile_height_lcu =
6870 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
6871 }
6872
6873 set_ref_pic_list(hevc, rpm_param);
6874
6875 Col_ref = rpm_param->p.collocated_ref_idx;
6876
6877 hevc->LDCFlag = 0;
6878 if (rpm_param->p.slice_type != I_SLICE) {
6879 hevc->LDCFlag = 1;
6880 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
6881 if (hevc->cur_pic->
6882 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
6883 hevc->curr_POC)
6884 hevc->LDCFlag = 0;
6885 }
6886 if (rpm_param->p.slice_type == B_SLICE) {
6887 for (i = 0; (i < hevc->RefNum_L1)
6888 && hevc->LDCFlag; i++) {
6889 if (hevc->cur_pic->
6890 m_aiRefPOCList1[hevc->cur_pic->
6891 slice_idx][i] >
6892 hevc->curr_POC)
6893 hevc->LDCFlag = 0;
6894 }
6895 }
6896 }
6897
6898 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
6899
6900 hevc->plevel =
6901 rpm_param->p.log2_parallel_merge_level;
6902 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
6903
6904 hevc->LongTerm_Curr = 0; /* to do ... */
6905 hevc->LongTerm_Col = 0; /* to do ... */
6906
6907 hevc->list_no = 0;
6908 if (rpm_param->p.slice_type == B_SLICE)
6909 hevc->list_no = 1 - hevc->ColFromL0Flag;
6910 if (hevc->list_no == 0) {
6911 if (Col_ref < hevc->RefNum_L0) {
6912 hevc->Col_POC =
6913 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
6914 slice_idx][Col_ref];
6915 } else
6916 hevc->Col_POC = INVALID_POC;
6917 } else {
6918 if (Col_ref < hevc->RefNum_L1) {
6919 hevc->Col_POC =
6920 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
6921 slice_idx][Col_ref];
6922 } else
6923 hevc->Col_POC = INVALID_POC;
6924 }
6925
6926 hevc->LongTerm_Ref = 0; /* to do ... */
6927
6928 if (hevc->slice_type != 2) {
6929 /* if(hevc->i_only==1){ */
6930 /* return 0xf; */
6931 /* } */
6932
6933 if (hevc->Col_POC != INVALID_POC) {
6934 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
6935 if (hevc->col_pic == NULL) {
6936 hevc->cur_pic->error_mark = 1;
6937 if (get_dbg_flag(hevc)) {
6938 hevc_print(hevc, 0,
6939 "WRONG,fail to get the pic Col_POC\n");
6940 }
6941 if (is_log_enable(hevc))
6942 add_log(hevc,
6943 "WRONG,fail to get the pic Col_POC");
6944 } else if (hevc->col_pic->error_mark) {
6945 hevc->cur_pic->error_mark = 1;
6946 if (get_dbg_flag(hevc)) {
6947 hevc_print(hevc, 0,
6948 "WRONG, Col_POC error_mark is 1\n");
6949 }
6950 if (is_log_enable(hevc))
6951 add_log(hevc,
6952 "WRONG, Col_POC error_mark is 1");
6953 } else {
6954 if ((hevc->col_pic->width
6955 != hevc->pic_w) ||
6956 (hevc->col_pic->height
6957 != hevc->pic_h)) {
6958 hevc_print(hevc, 0,
6959 "Wrong reference pic (poc %d) width/height %d/%d\n",
6960 hevc->col_pic->POC,
6961 hevc->col_pic->width,
6962 hevc->col_pic->height);
6963 hevc->cur_pic->error_mark = 1;
6964 }
6965
6966 }
6967
6968 if (hevc->cur_pic->error_mark
6969 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
6970#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
6971 /*count info*/
6972 vdec_count_info(gvs, hevc->cur_pic->error_mark,
6973 hevc->cur_pic->stream_offset);
6974#endif
6975 }
6976
6977 if (is_skip_decoding(hevc,
6978 hevc->cur_pic)) {
6979 return 2;
6980 }
6981 } else
6982 hevc->col_pic = hevc->cur_pic;
6983 } /* */
6984 if (hevc->col_pic == NULL)
6985 hevc->col_pic = hevc->cur_pic;
6986#ifdef BUFFER_MGR_ONLY
6987 return 0xf;
6988#else
6989 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
6990 || (dbg_skip_flag))
6991 return 0xf;
6992#endif
6993
6994 config_mc_buffer(hevc, hevc->cur_pic);
6995
6996 if (is_skip_decoding(hevc,
6997 hevc->cur_pic)) {
6998 if (get_dbg_flag(hevc))
6999 hevc_print(hevc, 0,
7000 "Discard this picture index %d\n",
7001 hevc->cur_pic->index);
7002#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7003 /*count info*/
7004 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7005 hevc->cur_pic->stream_offset);
7006#endif
7007 return 2;
7008 }
7009#ifdef MCRCC_ENABLE
7010 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7011#endif
7012 config_mpred_hw(hevc);
7013
7014 config_sao_hw(hevc, rpm_param);
7015
7016 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7017 return 0xf;
7018
7019 return 0;
7020}
7021
7022
7023
7024static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7025 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7026 int cur_buf_idx = new_pic->index;
7027 int bit_depth_10 = (bit_depth != 0x00);
7028 int picture_size;
7029 int cur_mmu_4k_number;
7030 int ret, max_frame_num;
7031 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7032 new_pic->height, !bit_depth_10);
7033 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7034 if (hevc->double_write_mode & 0x10)
7035 return 0;
7036 /*hevc_print(hevc, 0,
7037 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7038 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7039 if (new_pic->scatter_alloc) {
7040 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7041 new_pic->scatter_alloc = 0;
7042 }
7043 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7044 max_frame_num = MAX_FRAME_8K_NUM;
7045 else
7046 max_frame_num = MAX_FRAME_4K_NUM;
7047 if (cur_mmu_4k_number > max_frame_num) {
7048 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7049 cur_mmu_4k_number,
7050 new_pic->width,
7051 new_pic->height);
7052 return -1;
7053 }
7054 ret = decoder_mmu_box_alloc_idx(
7055 hevc->mmu_box,
7056 cur_buf_idx,
7057 cur_mmu_4k_number,
7058 mmu_index_adr);
7059 if (ret == 0)
7060 new_pic->scatter_alloc = 1;
7061 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7062 "%s pic index %d page count(%d) ret =%d\n",
7063 __func__, cur_buf_idx,
7064 cur_mmu_4k_number,
7065 ret);
7066 return ret;
7067}
7068
7069
7070static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7071 struct PIC_s *pic)
7072{
7073 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7074 "%s pic index %d scatter_alloc %d\n",
7075 __func__, pic->index,
7076 pic->scatter_alloc);
7077
7078 if (hevc->mmu_enable
7079 && ((hevc->double_write_mode & 0x10) == 0)
7080 && pic->scatter_alloc)
7081 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7082 pic->scatter_alloc = 0;
7083}
7084
7085/*
7086 *************************************************
7087 *
7088 *h265 buffer management end
7089 *
7090 **************************************************
7091 */
7092static struct hevc_state_s *gHevc;
7093
7094static void hevc_local_uninit(struct hevc_state_s *hevc)
7095{
7096 hevc->rpm_ptr = NULL;
7097 hevc->lmem_ptr = NULL;
7098
7099#ifdef SWAP_HEVC_UCODE
7100 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7101 if (hevc->mc_cpu_addr != NULL) {
7102 dma_free_coherent(amports_get_dma_device(),
7103 hevc->swap_size, hevc->mc_cpu_addr,
7104 hevc->mc_dma_handle);
7105 hevc->mc_cpu_addr = NULL;
7106 }
7107
7108 }
7109#endif
7110#ifdef DETREFILL_ENABLE
7111 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7112 uninit_detrefill_buf(hevc);
7113#endif
7114 if (hevc->aux_addr) {
7115 dma_free_coherent(amports_get_dma_device(),
7116 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7117 hevc->aux_phy_addr);
7118 hevc->aux_addr = NULL;
7119 }
7120 if (hevc->rpm_addr) {
7121 dma_free_coherent(amports_get_dma_device(),
7122 RPM_BUF_SIZE, hevc->rpm_addr,
7123 hevc->rpm_phy_addr);
7124 hevc->rpm_addr = NULL;
7125 }
7126 if (hevc->lmem_addr) {
7127 dma_free_coherent(amports_get_dma_device(),
7128 RPM_BUF_SIZE, hevc->lmem_addr,
7129 hevc->lmem_phy_addr);
7130 hevc->lmem_addr = NULL;
7131 }
7132
7133 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7134 if (hevc->frame_mmu_map_phy_addr)
7135 dma_free_coherent(amports_get_dma_device(),
7136 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7137 hevc->frame_mmu_map_phy_addr);
7138
7139 hevc->frame_mmu_map_addr = NULL;
7140 }
7141
7142 kfree(gvs);
7143 gvs = NULL;
7144}
7145
7146static int hevc_local_init(struct hevc_state_s *hevc)
7147{
7148 int ret = -1;
7149 struct BuffInfo_s *cur_buf_info = NULL;
7150
7151 memset(&hevc->param, 0, sizeof(union param_u));
7152
7153 cur_buf_info = &hevc->work_space_buf_store;
7154
7155 if (vdec_is_support_4k()) {
7156 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7157 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7158 sizeof(struct BuffInfo_s));
7159 else
7160 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7161 sizeof(struct BuffInfo_s));
7162 } else
7163 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7164 sizeof(struct BuffInfo_s));
7165
7166 cur_buf_info->start_adr = hevc->buf_start;
7167 init_buff_spec(hevc, cur_buf_info);
7168
7169 hevc_init_stru(hevc, cur_buf_info);
7170
7171 hevc->bit_depth_luma = 8;
7172 hevc->bit_depth_chroma = 8;
7173 hevc->video_signal_type = 0;
7174 hevc->video_signal_type_debug = 0;
7175 bit_depth_luma = hevc->bit_depth_luma;
7176 bit_depth_chroma = hevc->bit_depth_chroma;
7177 video_signal_type = hevc->video_signal_type;
7178
7179 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7180 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7181 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7182 if (hevc->rpm_addr == NULL) {
7183 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7184 return -1;
7185 }
7186 hevc->rpm_ptr = hevc->rpm_addr;
7187 }
7188
7189 if (prefix_aux_buf_size > 0 ||
7190 suffix_aux_buf_size > 0) {
7191 u32 aux_buf_size;
7192
7193 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7194 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7195 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7196 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7197 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7198 if (hevc->aux_addr == NULL) {
7199 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7200 return -1;
7201 }
7202 }
7203
7204 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7205 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7206 if (hevc->lmem_addr == NULL) {
7207 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7208 return -1;
7209 }
7210 hevc->lmem_ptr = hevc->lmem_addr;
7211
7212 if (hevc->mmu_enable) {
7213 hevc->frame_mmu_map_addr =
7214 dma_alloc_coherent(amports_get_dma_device(),
7215 get_frame_mmu_map_size(),
7216 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7217 if (hevc->frame_mmu_map_addr == NULL) {
7218 pr_err("%s: failed to alloc count_buffer\n", __func__);
7219 return -1;
7220 }
7221 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7222 }
7223 ret = 0;
7224 return ret;
7225}
7226
7227/*
7228 *******************************************
7229 * Mailbox command
7230 *******************************************
7231 */
7232#define CMD_FINISHED 0
7233#define CMD_ALLOC_VIEW 1
7234#define CMD_FRAME_DISPLAY 3
7235#define CMD_DEBUG 10
7236
7237
7238#define DECODE_BUFFER_NUM_MAX 32
7239#define DISPLAY_BUFFER_NUM 6
7240
7241#define video_domain_addr(adr) (adr&0x7fffffff)
7242#define DECODER_WORK_SPACE_SIZE 0x800000
7243
7244#define spec2canvas(x) \
7245 (((x)->uv_canvas_index << 16) | \
7246 ((x)->uv_canvas_index << 8) | \
7247 ((x)->y_canvas_index << 0))
7248
7249
7250static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7251{
7252 struct vdec_s *vdec = hw_to_vdec(hevc);
7253 int canvas_w = ALIGN(pic->width, 64)/4;
7254 int canvas_h = ALIGN(pic->height, 32)/4;
7255 int blkmode = mem_map_mode;
7256
7257 /*CANVAS_BLKMODE_64X32*/
7258#ifdef SUPPORT_10BIT
7259 if (pic->double_write_mode) {
7260 canvas_w = pic->width /
7261 get_double_write_ratio(hevc, pic->double_write_mode);
7262 canvas_h = pic->height /
7263 get_double_write_ratio(hevc, pic->double_write_mode);
7264
7265 if (mem_map_mode == 0)
7266 canvas_w = ALIGN(canvas_w, 32);
7267 else
7268 canvas_w = ALIGN(canvas_w, 64);
7269 canvas_h = ALIGN(canvas_h, 32);
7270
7271 if (vdec->parallel_dec == 1) {
7272 if (pic->y_canvas_index == -1)
7273 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7274 if (pic->uv_canvas_index == -1)
7275 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7276 } else {
7277 pic->y_canvas_index = 128 + pic->index * 2;
7278 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7279 }
7280
7281 canvas_config_ex(pic->y_canvas_index,
7282 pic->dw_y_adr, canvas_w, canvas_h,
7283 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7284 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7285 canvas_w, canvas_h,
7286 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7287#ifdef MULTI_INSTANCE_SUPPORT
7288 pic->canvas_config[0].phy_addr =
7289 pic->dw_y_adr;
7290 pic->canvas_config[0].width =
7291 canvas_w;
7292 pic->canvas_config[0].height =
7293 canvas_h;
7294 pic->canvas_config[0].block_mode =
7295 blkmode;
7296 pic->canvas_config[0].endian = 7;
7297
7298 pic->canvas_config[1].phy_addr =
7299 pic->dw_u_v_adr;
7300 pic->canvas_config[1].width =
7301 canvas_w;
7302 pic->canvas_config[1].height =
7303 canvas_h;
7304 pic->canvas_config[1].block_mode =
7305 blkmode;
7306 pic->canvas_config[1].endian = 7;
7307#endif
7308 } else {
7309 if (!hevc->mmu_enable) {
7310 /* to change after 10bit VPU is ready ... */
7311 if (vdec->parallel_dec == 1) {
7312 if (pic->y_canvas_index == -1)
7313 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7314 pic->uv_canvas_index = pic->y_canvas_index;
7315 } else {
7316 pic->y_canvas_index = 128 + pic->index;
7317 pic->uv_canvas_index = 128 + pic->index;
7318 }
7319
7320 canvas_config_ex(pic->y_canvas_index,
7321 pic->mc_y_adr, canvas_w, canvas_h,
7322 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7323 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7324 canvas_w, canvas_h,
7325 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7326 }
7327 }
7328#else
7329 if (vdec->parallel_dec == 1) {
7330 if (pic->y_canvas_index == -1)
7331 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7332 if (pic->uv_canvas_index == -1)
7333 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7334 } else {
7335 pic->y_canvas_index = 128 + pic->index * 2;
7336 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7337 }
7338
7339
7340 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7341 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7342 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7343 canvas_w, canvas_h,
7344 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7345#endif
7346}
7347
7348static int init_buf_spec(struct hevc_state_s *hevc)
7349{
7350 int pic_width = hevc->pic_w;
7351 int pic_height = hevc->pic_h;
7352
7353 /* hevc_print(hevc, 0,
7354 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7355 */
7356 hevc_print(hevc, 0,
7357 "%s2 %d %d\n", __func__, pic_width, pic_height);
7358 /* pic_width = hevc->pic_w; */
7359 /* pic_height = hevc->pic_h; */
7360
7361 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7362 hevc->frame_width = pic_width;
7363 hevc->frame_height = pic_height;
7364
7365 }
7366
7367 return 0;
7368}
7369
7370static int parse_sei(struct hevc_state_s *hevc,
7371 struct PIC_s *pic, char *sei_buf, uint32_t size)
7372{
7373 char *p = sei_buf;
7374 char *p_sei;
7375 uint16_t header;
7376 uint8_t nal_unit_type;
7377 uint8_t payload_type, payload_size;
7378 int i, j;
7379
7380 if (size < 2)
7381 return 0;
7382 header = *p++;
7383 header <<= 8;
7384 header += *p++;
7385 nal_unit_type = header >> 9;
7386 if ((nal_unit_type != NAL_UNIT_SEI)
7387 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7388 return 0;
7389 while (p+2 <= sei_buf+size) {
7390 payload_type = *p++;
7391 payload_size = *p++;
7392 if (p+payload_size <= sei_buf+size) {
7393 switch (payload_type) {
7394 case SEI_PicTiming:
7395 if ((parser_sei_enable & 0x4) &&
7396 hevc->frame_field_info_present_flag) {
7397 p_sei = p;
7398 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7399 pic->pic_struct = hevc->curr_pic_struct;
7400 if (get_dbg_flag(hevc) &
7401 H265_DEBUG_PIC_STRUCT) {
7402 hevc_print(hevc, 0,
7403 "parse result pic_struct = %d\n",
7404 hevc->curr_pic_struct);
7405 }
7406 }
7407 break;
7408 case SEI_UserDataITU_T_T35:
7409 p_sei = p;
7410 if (p_sei[0] == 0xB5
7411 && p_sei[1] == 0x00
7412 && p_sei[2] == 0x3C
7413 && p_sei[3] == 0x00
7414 && p_sei[4] == 0x01
7415 && p_sei[5] == 0x04)
7416 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7417
7418 break;
7419 case SEI_MasteringDisplayColorVolume:
7420 /*hevc_print(hevc, 0,
7421 "sei type: primary display color volume %d, size %d\n",
7422 payload_type,
7423 payload_size);*/
7424 /* master_display_colour */
7425 p_sei = p;
7426 for (i = 0; i < 3; i++) {
7427 for (j = 0; j < 2; j++) {
7428 hevc->primaries[i][j]
7429 = (*p_sei<<8)
7430 | *(p_sei+1);
7431 p_sei += 2;
7432 }
7433 }
7434 for (i = 0; i < 2; i++) {
7435 hevc->white_point[i]
7436 = (*p_sei<<8)
7437 | *(p_sei+1);
7438 p_sei += 2;
7439 }
7440 for (i = 0; i < 2; i++) {
7441 hevc->luminance[i]
7442 = (*p_sei<<24)
7443 | (*(p_sei+1)<<16)
7444 | (*(p_sei+2)<<8)
7445 | *(p_sei+3);
7446 p_sei += 4;
7447 }
7448 hevc->sei_present_flag |=
7449 SEI_MASTER_DISPLAY_COLOR_MASK;
7450 /*for (i = 0; i < 3; i++)
7451 for (j = 0; j < 2; j++)
7452 hevc_print(hevc, 0,
7453 "\tprimaries[%1d][%1d] = %04x\n",
7454 i, j,
7455 hevc->primaries[i][j]);
7456 hevc_print(hevc, 0,
7457 "\twhite_point = (%04x, %04x)\n",
7458 hevc->white_point[0],
7459 hevc->white_point[1]);
7460 hevc_print(hevc, 0,
7461 "\tmax,min luminance = %08x, %08x\n",
7462 hevc->luminance[0],
7463 hevc->luminance[1]);*/
7464 break;
7465 case SEI_ContentLightLevel:
7466 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7467 hevc_print(hevc, 0,
7468 "sei type: max content light level %d, size %d\n",
7469 payload_type, payload_size);
7470 /* content_light_level */
7471 p_sei = p;
7472 hevc->content_light_level[0]
7473 = (*p_sei<<8) | *(p_sei+1);
7474 p_sei += 2;
7475 hevc->content_light_level[1]
7476 = (*p_sei<<8) | *(p_sei+1);
7477 p_sei += 2;
7478 hevc->sei_present_flag |=
7479 SEI_CONTENT_LIGHT_LEVEL_MASK;
7480 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7481 hevc_print(hevc, 0,
7482 "\tmax cll = %04x, max_pa_cll = %04x\n",
7483 hevc->content_light_level[0],
7484 hevc->content_light_level[1]);
7485 break;
7486 default:
7487 break;
7488 }
7489 }
7490 p += payload_size;
7491 }
7492 return 0;
7493}
7494
7495static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7496 unsigned w, unsigned h)
7497{
7498 unsigned ar;
7499
7500 if (idc == 255) {
7501 ar = div_u64(256ULL * sar_h * h,
7502 sar_w * w);
7503 } else {
7504 switch (idc) {
7505 case 1:
7506 ar = 0x100 * h / w;
7507 break;
7508 case 2:
7509 ar = 0x100 * h * 11 / (w * 12);
7510 break;
7511 case 3:
7512 ar = 0x100 * h * 11 / (w * 10);
7513 break;
7514 case 4:
7515 ar = 0x100 * h * 11 / (w * 16);
7516 break;
7517 case 5:
7518 ar = 0x100 * h * 33 / (w * 40);
7519 break;
7520 case 6:
7521 ar = 0x100 * h * 11 / (w * 24);
7522 break;
7523 case 7:
7524 ar = 0x100 * h * 11 / (w * 20);
7525 break;
7526 case 8:
7527 ar = 0x100 * h * 11 / (w * 32);
7528 break;
7529 case 9:
7530 ar = 0x100 * h * 33 / (w * 80);
7531 break;
7532 case 10:
7533 ar = 0x100 * h * 11 / (w * 18);
7534 break;
7535 case 11:
7536 ar = 0x100 * h * 11 / (w * 15);
7537 break;
7538 case 12:
7539 ar = 0x100 * h * 33 / (w * 64);
7540 break;
7541 case 13:
7542 ar = 0x100 * h * 99 / (w * 160);
7543 break;
7544 case 14:
7545 ar = 0x100 * h * 3 / (w * 4);
7546 break;
7547 case 15:
7548 ar = 0x100 * h * 2 / (w * 3);
7549 break;
7550 case 16:
7551 ar = 0x100 * h * 1 / (w * 2);
7552 break;
7553 default:
7554 ar = h * 0x100 / w;
7555 break;
7556 }
7557 }
7558
7559 return ar;
7560}
7561
7562static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7563 struct PIC_s *pic)
7564{
7565 unsigned int ar;
7566 int i, j;
7567 char *p;
7568 unsigned size = 0;
7569 unsigned type = 0;
7570 struct vframe_master_display_colour_s *vf_dp
7571 = &vf->prop.master_display_colour;
7572
7573 vf->width = pic->width /
7574 get_double_write_ratio(hevc, pic->double_write_mode);
7575 vf->height = pic->height /
7576 get_double_write_ratio(hevc, pic->double_write_mode);
7577
7578 vf->duration = hevc->frame_dur;
7579 vf->duration_pulldown = 0;
7580 vf->flag = 0;
7581
7582 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7583 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7584
7585
7586 if (((pic->aspect_ratio_idc == 255) &&
7587 pic->sar_width &&
7588 pic->sar_height) ||
7589 ((pic->aspect_ratio_idc != 255) &&
7590 (pic->width))) {
7591 ar = min_t(u32,
7592 calc_ar(pic->aspect_ratio_idc,
7593 pic->sar_width,
7594 pic->sar_height,
7595 pic->width,
7596 pic->height),
7597 DISP_RATIO_ASPECT_RATIO_MAX);
7598 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7599 }
7600 hevc->ratio_control = vf->ratio_control;
7601 if (pic->aux_data_buf
7602 && pic->aux_data_size) {
7603 /* parser sei */
7604 p = pic->aux_data_buf;
7605 while (p < pic->aux_data_buf
7606 + pic->aux_data_size - 8) {
7607 size = *p++;
7608 size = (size << 8) | *p++;
7609 size = (size << 8) | *p++;
7610 size = (size << 8) | *p++;
7611 type = *p++;
7612 type = (type << 8) | *p++;
7613 type = (type << 8) | *p++;
7614 type = (type << 8) | *p++;
7615 if (type == 0x02000000) {
7616 /* hevc_print(hevc, 0,
7617 "sei(%d)\n", size); */
7618 parse_sei(hevc, pic, p, size);
7619 }
7620 p += size;
7621 }
7622 }
7623 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7624 vf->signal_type = pic->video_signal_type;
7625 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7626 u32 data;
7627 data = vf->signal_type;
7628 data = data & 0xFFFF00FF;
7629 data = data | (0x30<<8);
7630 vf->signal_type = data;
7631 }
7632 }
7633 else
7634 vf->signal_type = 0;
7635 hevc->video_signal_type_debug = vf->signal_type;
7636
7637 /* master_display_colour */
7638 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7639 for (i = 0; i < 3; i++)
7640 for (j = 0; j < 2; j++)
7641 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7642 for (i = 0; i < 2; i++) {
7643 vf_dp->white_point[i] = hevc->white_point[i];
7644 vf_dp->luminance[i]
7645 = hevc->luminance[i];
7646 }
7647 vf_dp->present_flag = 1;
7648 } else
7649 vf_dp->present_flag = 0;
7650
7651 /* content_light_level */
7652 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7653 vf_dp->content_light_level.max_content
7654 = hevc->content_light_level[0];
7655 vf_dp->content_light_level.max_pic_average
7656 = hevc->content_light_level[1];
7657 vf_dp->content_light_level.present_flag = 1;
7658 } else
7659 vf_dp->content_light_level.present_flag = 0;
7660}
7661
7662static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7663{
7664 unsigned long flags;
7665#ifdef MULTI_INSTANCE_SUPPORT
7666 struct vdec_s *vdec = op_arg;
7667 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7668#else
7669 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7670#endif
7671
7672 spin_lock_irqsave(&lock, flags);
7673
7674 states->vf_pool_size = VF_POOL_SIZE;
7675 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7676 states->buf_avail_num = kfifo_len(&hevc->display_q);
7677
7678 if (step == 2)
7679 states->buf_avail_num = 0;
7680 spin_unlock_irqrestore(&lock, flags);
7681 return 0;
7682}
7683
7684static struct vframe_s *vh265_vf_peek(void *op_arg)
7685{
7686 struct vframe_s *vf[2] = {0, 0};
7687#ifdef MULTI_INSTANCE_SUPPORT
7688 struct vdec_s *vdec = op_arg;
7689 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7690#else
7691 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7692#endif
7693
7694 if (step == 2)
7695 return NULL;
7696
7697 if (force_disp_pic_index & 0x100) {
7698 if (force_disp_pic_index & 0x200)
7699 return NULL;
7700 return &hevc->vframe_dummy;
7701 }
7702
7703
7704 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7705 if (vf[1]) {
7706 vf[0]->next_vf_pts_valid = true;
7707 vf[0]->next_vf_pts = vf[1]->pts;
7708 } else
7709 vf[0]->next_vf_pts_valid = false;
7710 return vf[0];
7711 }
7712
7713 return NULL;
7714}
7715
7716static struct vframe_s *vh265_vf_get(void *op_arg)
7717{
7718 struct vframe_s *vf;
7719#ifdef MULTI_INSTANCE_SUPPORT
7720 struct vdec_s *vdec = op_arg;
7721 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7722#else
7723 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7724#endif
7725
7726 if (step == 2)
7727 return NULL;
7728 else if (step == 1)
7729 step = 2;
7730
7731#if 0
7732 if (force_disp_pic_index & 0x100) {
7733 int buffer_index = force_disp_pic_index & 0xff;
7734 struct PIC_s *pic = NULL;
7735 if (buffer_index >= 0
7736 && buffer_index < MAX_REF_PIC_NUM)
7737 pic = hevc->m_PIC[buffer_index];
7738 if (pic == NULL)
7739 return NULL;
7740 if (force_disp_pic_index & 0x200)
7741 return NULL;
7742
7743 vf = &hevc->vframe_dummy;
7744 if (get_double_write_mode(hevc)) {
7745 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7746 VIDTYPE_VIU_NV21;
7747 if (hevc->m_ins_flag) {
7748 vf->canvas0Addr = vf->canvas1Addr = -1;
7749 vf->plane_num = 2;
7750 vf->canvas0_config[0] =
7751 pic->canvas_config[0];
7752 vf->canvas0_config[1] =
7753 pic->canvas_config[1];
7754
7755 vf->canvas1_config[0] =
7756 pic->canvas_config[0];
7757 vf->canvas1_config[1] =
7758 pic->canvas_config[1];
7759 } else {
7760 vf->canvas0Addr = vf->canvas1Addr
7761 = spec2canvas(pic);
7762 }
7763 } else {
7764 vf->canvas0Addr = vf->canvas1Addr = 0;
7765 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7766 if (hevc->mmu_enable)
7767 vf->type |= VIDTYPE_SCATTER;
7768 }
7769 vf->compWidth = pic->width;
7770 vf->compHeight = pic->height;
7771 update_vf_memhandle(hevc, vf, pic);
7772 switch (hevc->bit_depth_luma) {
7773 case 9:
7774 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
7775 break;
7776 case 10:
7777 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
7778 | BITDEPTH_V10;
7779 break;
7780 default:
7781 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7782 break;
7783 }
7784 if ((vf->type & VIDTYPE_COMPRESS) == 0)
7785 vf->bitdepth =
7786 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7787 if (hevc->mem_saving_mode == 1)
7788 vf->bitdepth |= BITDEPTH_SAVING_MODE;
7789 vf->duration_pulldown = 0;
7790 vf->pts = 0;
7791 vf->pts_us64 = 0;
7792 set_frame_info(hevc, vf);
7793
7794 vf->width = pic->width /
7795 get_double_write_ratio(hevc, pic->double_write_mode);
7796 vf->height = pic->height /
7797 get_double_write_ratio(hevc, pic->double_write_mode);
7798
7799 force_disp_pic_index |= 0x200;
7800 return vf;
7801 }
7802#endif
7803
7804 if (kfifo_get(&hevc->display_q, &vf)) {
7805 struct vframe_s *next_vf;
7806 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7807 hevc_print(hevc, 0,
7808 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
7809 __func__, vf, vf->type, vf->index,
7810 get_pic_poc(hevc, vf->index & 0xff),
7811 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
7812 vf->pts, vf->pts_us64,
7813 vf->duration);
7814#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7815 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
7816 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
7817 if (pic->aux_data_buf && pic->aux_data_size > 0) {
7818 int i;
7819 struct PIC_s *pic =
7820 hevc->m_PIC[vf->index & 0xff];
7821 hevc_print(hevc, 0,
7822 "pic 0x%p aux size %d:\n",
7823 pic, pic->aux_data_size);
7824 for (i = 0; i < pic->aux_data_size; i++) {
7825 hevc_print_cont(hevc, 0,
7826 "%02x ", pic->aux_data_buf[i]);
7827 if (((i + 1) & 0xf) == 0)
7828 hevc_print_cont(hevc, 0, "\n");
7829 }
7830 hevc_print_cont(hevc, 0, "\n");
7831 }
7832 }
7833#endif
7834 hevc->show_frame_num++;
7835 hevc->vf_get_count++;
7836
7837 if (kfifo_peek(&hevc->display_q, &next_vf)) {
7838 vf->next_vf_pts_valid = true;
7839 vf->next_vf_pts = next_vf->pts;
7840 } else
7841 vf->next_vf_pts_valid = false;
7842
7843 return vf;
7844 }
7845
7846 return NULL;
7847}
7848static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
7849 int i;
7850 for (i = 0; i < VF_POOL_SIZE; i++) {
7851 if (vf == &hevc->vfpool[i])
7852 return true;
7853 }
7854 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
7855 for (i = 0; i < VF_POOL_SIZE; i++) {
7856 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
7857 }
7858 return false;
7859}
7860
7861static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
7862{
7863 unsigned long flags;
7864#ifdef MULTI_INSTANCE_SUPPORT
7865 struct vdec_s *vdec = op_arg;
7866 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7867#else
7868 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7869#endif
7870 unsigned char index_top;
7871 unsigned char index_bot;
7872
7873 if (vf && (vf_valid_check(vf, hevc) == false))
7874 return;
7875 if (vf == (&hevc->vframe_dummy))
7876 return;
7877 index_top = vf->index & 0xff;
7878 index_bot = (vf->index >> 8) & 0xff;
7879 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7880 hevc_print(hevc, 0,
7881 "%s(type %d index 0x%x)\n",
7882 __func__, vf->type, vf->index);
7883 hevc->vf_put_count++;
7884 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
7885 spin_lock_irqsave(&lock, flags);
7886
7887 if (index_top != 0xff
7888 && index_top < MAX_REF_PIC_NUM
7889 && hevc->m_PIC[index_top]) {
7890 if (hevc->m_PIC[index_top]->vf_ref > 0) {
7891 hevc->m_PIC[index_top]->vf_ref--;
7892
7893 if (hevc->m_PIC[index_top]->vf_ref == 0) {
7894 hevc->m_PIC[index_top]->output_ready = 0;
7895
7896 if (hevc->wait_buf != 0)
7897 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7898 0x1);
7899 }
7900 }
7901 }
7902
7903 if (index_bot != 0xff
7904 && index_bot < MAX_REF_PIC_NUM
7905 && hevc->m_PIC[index_bot]) {
7906 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
7907 hevc->m_PIC[index_bot]->vf_ref--;
7908
7909 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
7910 hevc->m_PIC[index_bot]->output_ready = 0;
7911 if (hevc->wait_buf != 0)
7912 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7913 0x1);
7914 }
7915 }
7916 }
7917 spin_unlock_irqrestore(&lock, flags);
7918}
7919
7920static int vh265_event_cb(int type, void *data, void *op_arg)
7921{
7922 unsigned long flags;
7923#ifdef MULTI_INSTANCE_SUPPORT
7924 struct vdec_s *vdec = op_arg;
7925 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7926#else
7927 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7928#endif
7929 if (type & VFRAME_EVENT_RECEIVER_RESET) {
7930#if 0
7931 amhevc_stop();
7932#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
7933 vf_light_unreg_provider(&vh265_vf_prov);
7934#endif
7935 spin_lock_irqsave(&hevc->lock, flags);
7936 vh265_local_init();
7937 vh265_prot_init();
7938 spin_unlock_irqrestore(&hevc->lock, flags);
7939#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
7940 vf_reg_provider(&vh265_vf_prov);
7941#endif
7942 amhevc_start();
7943#endif
7944 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
7945 struct provider_aux_req_s *req =
7946 (struct provider_aux_req_s *)data;
7947 unsigned char index;
7948
7949 spin_lock_irqsave(&lock, flags);
7950 index = req->vf->index & 0xff;
7951 req->aux_buf = NULL;
7952 req->aux_size = 0;
7953 if (req->bot_flag)
7954 index = (req->vf->index >> 8) & 0xff;
7955 if (index != 0xff
7956 && index < MAX_REF_PIC_NUM
7957 && hevc->m_PIC[index]) {
7958 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
7959 req->aux_size = hevc->m_PIC[index]->aux_data_size;
7960#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7961 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7962 req->dv_enhance_exist = false;
7963 else
7964 req->dv_enhance_exist =
7965 hevc->m_PIC[index]->dv_enhance_exist;
7966 hevc_print(hevc, H265_DEBUG_DV,
7967 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
7968 req->vf,
7969 hevc->m_PIC[index]->POC, index,
7970 req->dv_enhance_exist, req->aux_size);
7971#else
7972 req->dv_enhance_exist = 0;
7973#endif
7974 }
7975 spin_unlock_irqrestore(&lock, flags);
7976
7977 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7978 hevc_print(hevc, 0,
7979 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
7980 __func__, type, index, req->aux_size);
7981#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7982 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
7983 if ((force_bypass_dvenl & 0x80000000) == 0) {
7984 hevc_print(hevc, 0,
7985 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
7986 __func__);
7987 hevc->bypass_dvenl_enable = 1;
7988 }
7989
7990#endif
7991 }
7992 return 0;
7993}
7994
7995#ifdef HEVC_PIC_STRUCT_SUPPORT
7996static int process_pending_vframe(struct hevc_state_s *hevc,
7997 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
7998{
7999 struct vframe_s *vf;
8000
8001 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8002 hevc_print(hevc, 0,
8003 "%s: pair_pic index 0x%x %s\n",
8004 __func__, pair_pic->index,
8005 pair_frame_top_flag ?
8006 "top" : "bot");
8007
8008 if (kfifo_len(&hevc->pending_q) > 1) {
8009 /* do not pending more than 1 frame */
8010 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8011 hevc_print(hevc, 0,
8012 "fatal error, no available buffer slot.");
8013 return -1;
8014 }
8015 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8016 hevc_print(hevc, 0,
8017 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8018 __func__, vf->index);
8019 hevc->vf_pre_count++;
8020 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8021 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8022 }
8023
8024 if (kfifo_peek(&hevc->pending_q, &vf)) {
8025 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8026 /*
8027 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8028 *do not use it
8029 */
8030 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8031 hevc_print(hevc, 0,
8032 "fatal error, no available buffer slot.");
8033 return -1;
8034 }
8035 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8036 hevc_print(hevc, 0,
8037 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8038 __func__, vf->index);
8039 if (vf) {
8040 hevc->vf_pre_count++;
8041 kfifo_put(&hevc->display_q,
8042 (const struct vframe_s *)vf);
8043 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8044 }
8045 } else if ((!pair_frame_top_flag) &&
8046 (((vf->index >> 8) & 0xff) == 0xff)) {
8047 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8048 hevc_print(hevc, 0,
8049 "fatal error, no available buffer slot.");
8050 return -1;
8051 }
8052 if (vf) {
8053 vf->type = VIDTYPE_PROGRESSIVE
8054 | VIDTYPE_VIU_NV21;
8055 vf->index &= 0xff;
8056 vf->index |= (pair_pic->index << 8);
8057 vf->canvas1Addr = spec2canvas(pair_pic);
8058 pair_pic->vf_ref++;
8059 kfifo_put(&hevc->display_q,
8060 (const struct vframe_s *)vf);
8061 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8062 hevc->vf_pre_count++;
8063 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8064 hevc_print(hevc, 0,
8065 "%s vf => display_q: (index 0x%x)\n",
8066 __func__, vf->index);
8067 }
8068 } else if (pair_frame_top_flag &&
8069 ((vf->index & 0xff) == 0xff)) {
8070 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8071 hevc_print(hevc, 0,
8072 "fatal error, no available buffer slot.");
8073 return -1;
8074 }
8075 if (vf) {
8076 vf->type = VIDTYPE_PROGRESSIVE
8077 | VIDTYPE_VIU_NV21;
8078 vf->index &= 0xff00;
8079 vf->index |= pair_pic->index;
8080 vf->canvas0Addr = spec2canvas(pair_pic);
8081 pair_pic->vf_ref++;
8082 kfifo_put(&hevc->display_q,
8083 (const struct vframe_s *)vf);
8084 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8085 hevc->vf_pre_count++;
8086 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8087 hevc_print(hevc, 0,
8088 "%s vf => display_q: (index 0x%x)\n",
8089 __func__, vf->index);
8090 }
8091 }
8092 }
8093 return 0;
8094}
8095#endif
8096static void update_vf_memhandle(struct hevc_state_s *hevc,
8097 struct vframe_s *vf, struct PIC_s *pic)
8098{
8099 if (pic->index < 0) {
8100 vf->mem_handle = NULL;
8101 vf->mem_head_handle = NULL;
8102 } else if (vf->type & VIDTYPE_SCATTER) {
8103 vf->mem_handle =
8104 decoder_mmu_box_get_mem_handle(
8105 hevc->mmu_box, pic->index);
8106 vf->mem_head_handle =
8107 decoder_bmmu_box_get_mem_handle(
8108 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8109 } else {
8110 vf->mem_handle =
8111 decoder_bmmu_box_get_mem_handle(
8112 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8113 vf->mem_head_handle = NULL;
8114 /*vf->mem_head_handle =
8115 decoder_bmmu_box_get_mem_handle(
8116 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8117 }
8118 return;
8119}
8120
8121static void fill_frame_info(struct hevc_state_s *hevc,
8122 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8123{
8124 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8125 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8126 vframe_qos->type = 4;
8127 else if (pic->slice_type == I_SLICE)
8128 vframe_qos->type = 1;
8129 else if (pic->slice_type == P_SLICE)
8130 vframe_qos->type = 2;
8131 else if (pic->slice_type == B_SLICE)
8132 vframe_qos->type = 3;
8133/*
8134#define SHOW_QOS_INFO
8135*/
8136 vframe_qos->size = framesize;
8137 vframe_qos->pts = pts;
8138#ifdef SHOW_QOS_INFO
8139 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8140#endif
8141
8142
8143 vframe_qos->max_mv = pic->max_mv;
8144 vframe_qos->avg_mv = pic->avg_mv;
8145 vframe_qos->min_mv = pic->min_mv;
8146#ifdef SHOW_QOS_INFO
8147 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8148 vframe_qos->max_mv,
8149 vframe_qos->avg_mv,
8150 vframe_qos->min_mv);
8151#endif
8152
8153 vframe_qos->max_qp = pic->max_qp;
8154 vframe_qos->avg_qp = pic->avg_qp;
8155 vframe_qos->min_qp = pic->min_qp;
8156#ifdef SHOW_QOS_INFO
8157 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8158 vframe_qos->max_qp,
8159 vframe_qos->avg_qp,
8160 vframe_qos->min_qp);
8161#endif
8162
8163 vframe_qos->max_skip = pic->max_skip;
8164 vframe_qos->avg_skip = pic->avg_skip;
8165 vframe_qos->min_skip = pic->min_skip;
8166#ifdef SHOW_QOS_INFO
8167 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8168 vframe_qos->max_skip,
8169 vframe_qos->avg_skip,
8170 vframe_qos->min_skip);
8171#endif
8172
8173 vframe_qos->num++;
8174
8175 if (hevc->frameinfo_enable)
8176 vdec_fill_frame_info(vframe_qos, 1);
8177}
8178
8179static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8180{
8181#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8182 struct vdec_s *vdec = hw_to_vdec(hevc);
8183#endif
8184 struct vframe_s *vf = NULL;
8185 int stream_offset = pic->stream_offset;
8186 unsigned short slice_type = pic->slice_type;
8187 u32 frame_size;
8188
8189 if (force_disp_pic_index & 0x100) {
8190 /*recycle directly*/
8191 pic->output_ready = 0;
8192 return -1;
8193 }
8194 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8195 hevc_print(hevc, 0,
8196 "fatal error, no available buffer slot.");
8197 return -1;
8198 }
8199 display_frame_count[hevc->index]++;
8200 if (vf) {
8201 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8202 "%s: pic index 0x%x\n",
8203 __func__, pic->index);*/
8204
8205 if (hevc->is_used_v4l) {
8206 vf->v4l_mem_handle
8207 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8208 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8209 "[%d] %s(), v4l mem handle: 0x%lx\n",
8210 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id,
8211 __func__, vf->v4l_mem_handle);
8212 }
8213
8214#ifdef MULTI_INSTANCE_SUPPORT
8215 if (vdec_frame_based(hw_to_vdec(hevc))) {
8216 vf->pts = pic->pts;
8217 vf->pts_us64 = pic->pts64;
8218 }
8219 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8220 stream_offset, &vf->pts, 0) != 0) { */
8221#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8222 else if (vdec->master == NULL) {
8223#else
8224 else {
8225#endif
8226#endif
8227 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8228 "call pts_lookup_offset_us64(0x%x)\n",
8229 stream_offset);
8230 if (pts_lookup_offset_us64
8231 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8232 &frame_size, 0,
8233 &vf->pts_us64) != 0) {
8234#ifdef DEBUG_PTS
8235 hevc->pts_missed++;
8236#endif
8237 vf->pts = 0;
8238 vf->pts_us64 = 0;
8239 }
8240#ifdef DEBUG_PTS
8241 else
8242 hevc->pts_hit++;
8243#endif
8244#ifdef MULTI_INSTANCE_SUPPORT
8245#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8246 } else {
8247 vf->pts = 0;
8248 vf->pts_us64 = 0;
8249 }
8250#else
8251 }
8252#endif
8253#endif
8254 if (pts_unstable && (hevc->frame_dur > 0))
8255 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8256
8257 fill_frame_info(hevc, pic, frame_size, vf->pts);
8258
8259 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8260 && hevc->get_frame_dur) {
8261 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8262
8263 if (pts_diff < 0) {
8264 hevc->pts_mode_switching_count++;
8265 hevc->pts_mode_recovery_count = 0;
8266
8267 if (hevc->pts_mode_switching_count >=
8268 PTS_MODE_SWITCHING_THRESHOLD) {
8269 hevc->pts_mode =
8270 PTS_NONE_REF_USE_DURATION;
8271 hevc_print(hevc, 0,
8272 "HEVC: switch to n_d mode.\n");
8273 }
8274
8275 } else {
8276 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8277
8278 hevc->pts_mode_recovery_count++;
8279 if (hevc->pts_mode_recovery_count > p) {
8280 hevc->pts_mode_switching_count = 0;
8281 hevc->pts_mode_recovery_count = 0;
8282 }
8283 }
8284 }
8285
8286 if (vf->pts != 0)
8287 hevc->last_lookup_pts = vf->pts;
8288
8289 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8290 && (slice_type != 2))
8291 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8292 hevc->last_pts = vf->pts;
8293
8294 if (vf->pts_us64 != 0)
8295 hevc->last_lookup_pts_us64 = vf->pts_us64;
8296
8297 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8298 && (slice_type != 2)) {
8299 vf->pts_us64 =
8300 hevc->last_pts_us64 +
8301 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8302 }
8303 hevc->last_pts_us64 = vf->pts_us64;
8304 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8305 hevc_print(hevc, 0,
8306 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8307 vf->pts, vf->pts_us64);
8308 }
8309
8310 /*
8311 *vf->index:
8312 *(1) vf->type is VIDTYPE_PROGRESSIVE
8313 * and vf->canvas0Addr != vf->canvas1Addr,
8314 * vf->index[7:0] is the index of top pic
8315 * vf->index[15:8] is the index of bot pic
8316 *(2) other cases,
8317 * only vf->index[7:0] is used
8318 * vf->index[15:8] == 0xff
8319 */
8320 vf->index = 0xff00 | pic->index;
8321#if 1
8322/*SUPPORT_10BIT*/
8323 if (pic->double_write_mode & 0x10) {
8324 /* double write only */
8325 vf->compBodyAddr = 0;
8326 vf->compHeadAddr = 0;
8327 } else {
8328
8329 if (hevc->mmu_enable) {
8330 vf->compBodyAddr = 0;
8331 vf->compHeadAddr = pic->header_adr;
8332 } else {
8333 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8334 vf->compHeadAddr = pic->mc_y_adr +
8335 pic->losless_comp_body_size;
8336 vf->mem_head_handle = NULL;
8337 }
8338
8339 /*head adr*/
8340 vf->canvas0Addr = vf->canvas1Addr = 0;
8341 }
8342 if (pic->double_write_mode) {
8343 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8344 vf->type |= VIDTYPE_VIU_NV21;
8345 if ((pic->double_write_mode == 3) &&
8346 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8347 vf->type |= VIDTYPE_COMPRESS;
8348 if (hevc->mmu_enable)
8349 vf->type |= VIDTYPE_SCATTER;
8350 }
8351#ifdef MULTI_INSTANCE_SUPPORT
8352 if (hevc->m_ins_flag &&
8353 (get_dbg_flag(hevc)
8354 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8355 vf->canvas0Addr = vf->canvas1Addr = -1;
8356 vf->plane_num = 2;
8357 vf->canvas0_config[0] =
8358 pic->canvas_config[0];
8359 vf->canvas0_config[1] =
8360 pic->canvas_config[1];
8361
8362 vf->canvas1_config[0] =
8363 pic->canvas_config[0];
8364 vf->canvas1_config[1] =
8365 pic->canvas_config[1];
8366
8367 } else
8368#endif
8369 vf->canvas0Addr = vf->canvas1Addr
8370 = spec2canvas(pic);
8371 } else {
8372 vf->canvas0Addr = vf->canvas1Addr = 0;
8373 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8374 if (hevc->mmu_enable)
8375 vf->type |= VIDTYPE_SCATTER;
8376 }
8377 vf->compWidth = pic->width;
8378 vf->compHeight = pic->height;
8379 update_vf_memhandle(hevc, vf, pic);
8380 switch (pic->bit_depth_luma) {
8381 case 9:
8382 vf->bitdepth = BITDEPTH_Y9;
8383 break;
8384 case 10:
8385 vf->bitdepth = BITDEPTH_Y10;
8386 break;
8387 default:
8388 vf->bitdepth = BITDEPTH_Y8;
8389 break;
8390 }
8391 switch (pic->bit_depth_chroma) {
8392 case 9:
8393 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8394 break;
8395 case 10:
8396 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8397 break;
8398 default:
8399 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8400 break;
8401 }
8402 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8403 vf->bitdepth =
8404 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8405 if (pic->mem_saving_mode == 1)
8406 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8407#else
8408 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8409 vf->type |= VIDTYPE_VIU_NV21;
8410 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8411#endif
8412 set_frame_info(hevc, vf, pic);
8413 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8414 /* hevc_print(hevc, 0,
8415 "aaa: %d/%d, %d/%d\n",
8416 vf->width,vf->height, pic->width, pic->height); */
8417 vf->width = pic->width;
8418 vf->height = pic->height;
8419
8420 if (force_w_h != 0) {
8421 vf->width = (force_w_h >> 16) & 0xffff;
8422 vf->height = force_w_h & 0xffff;
8423 }
8424 if (force_fps & 0x100) {
8425 u32 rate = force_fps & 0xff;
8426
8427 if (rate)
8428 vf->duration = 96000/rate;
8429 else
8430 vf->duration = 0;
8431 }
8432 if (force_fps & 0x200) {
8433 vf->pts = 0;
8434 vf->pts_us64 = 0;
8435 }
8436 /*
8437 * !!! to do ...
8438 * need move below code to get_new_pic(),
8439 * hevc->xxx can only be used by current decoded pic
8440 */
8441 if (pic->conformance_window_flag &&
8442 (get_dbg_flag(hevc) &
8443 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8444 unsigned int SubWidthC, SubHeightC;
8445
8446 switch (pic->chroma_format_idc) {
8447 case 1:
8448 SubWidthC = 2;
8449 SubHeightC = 2;
8450 break;
8451 case 2:
8452 SubWidthC = 2;
8453 SubHeightC = 1;
8454 break;
8455 default:
8456 SubWidthC = 1;
8457 SubHeightC = 1;
8458 break;
8459 }
8460 vf->width -= SubWidthC *
8461 (pic->conf_win_left_offset +
8462 pic->conf_win_right_offset);
8463 vf->height -= SubHeightC *
8464 (pic->conf_win_top_offset +
8465 pic->conf_win_bottom_offset);
8466
8467 vf->compWidth -= SubWidthC *
8468 (pic->conf_win_left_offset +
8469 pic->conf_win_right_offset);
8470 vf->compHeight -= SubHeightC *
8471 (pic->conf_win_top_offset +
8472 pic->conf_win_bottom_offset);
8473
8474 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8475 hevc_print(hevc, 0,
8476 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8477 pic->chroma_format_idc,
8478 pic->conf_win_left_offset,
8479 pic->conf_win_right_offset,
8480 pic->conf_win_top_offset,
8481 pic->conf_win_bottom_offset,
8482 vf->width, vf->height, vf->compWidth, vf->compHeight);
8483 }
8484
8485 vf->width = vf->width /
8486 get_double_write_ratio(hevc, pic->double_write_mode);
8487 vf->height = vf->height /
8488 get_double_write_ratio(hevc, pic->double_write_mode);
8489#ifdef HEVC_PIC_STRUCT_SUPPORT
8490 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8491 struct vframe_s *vf2;
8492
8493 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8494 hevc_print(hevc, 0,
8495 "pic_struct = %d index 0x%x\n",
8496 pic->pic_struct,
8497 pic->index);
8498
8499 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8500 hevc_print(hevc, 0,
8501 "fatal error, no available buffer slot.");
8502 return -1;
8503 }
8504 pic->vf_ref = 2;
8505 vf->duration = vf->duration>>1;
8506 memcpy(vf2, vf, sizeof(struct vframe_s));
8507
8508 if (pic->pic_struct == 3) {
8509 vf->type = VIDTYPE_INTERLACE_TOP
8510 | VIDTYPE_VIU_NV21;
8511 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8512 | VIDTYPE_VIU_NV21;
8513 } else {
8514 vf->type = VIDTYPE_INTERLACE_BOTTOM
8515 | VIDTYPE_VIU_NV21;
8516 vf2->type = VIDTYPE_INTERLACE_TOP
8517 | VIDTYPE_VIU_NV21;
8518 }
8519 hevc->vf_pre_count++;
8520 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8521 kfifo_put(&hevc->display_q,
8522 (const struct vframe_s *)vf);
8523 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8524 hevc->vf_pre_count++;
8525 kfifo_put(&hevc->display_q,
8526 (const struct vframe_s *)vf2);
8527 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8528 } else if (pic->pic_struct == 5
8529 || pic->pic_struct == 6) {
8530 struct vframe_s *vf2, *vf3;
8531
8532 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8533 hevc_print(hevc, 0,
8534 "pic_struct = %d index 0x%x\n",
8535 pic->pic_struct,
8536 pic->index);
8537
8538 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8539 hevc_print(hevc, 0,
8540 "fatal error, no available buffer slot.");
8541 return -1;
8542 }
8543 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8544 hevc_print(hevc, 0,
8545 "fatal error, no available buffer slot.");
8546 return -1;
8547 }
8548 pic->vf_ref = 3;
8549 vf->duration = vf->duration/3;
8550 memcpy(vf2, vf, sizeof(struct vframe_s));
8551 memcpy(vf3, vf, sizeof(struct vframe_s));
8552
8553 if (pic->pic_struct == 5) {
8554 vf->type = VIDTYPE_INTERLACE_TOP
8555 | VIDTYPE_VIU_NV21;
8556 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8557 | VIDTYPE_VIU_NV21;
8558 vf3->type = VIDTYPE_INTERLACE_TOP
8559 | VIDTYPE_VIU_NV21;
8560 } else {
8561 vf->type = VIDTYPE_INTERLACE_BOTTOM
8562 | VIDTYPE_VIU_NV21;
8563 vf2->type = VIDTYPE_INTERLACE_TOP
8564 | VIDTYPE_VIU_NV21;
8565 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8566 | VIDTYPE_VIU_NV21;
8567 }
8568 hevc->vf_pre_count++;
8569 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8570 kfifo_put(&hevc->display_q,
8571 (const struct vframe_s *)vf);
8572 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8573 hevc->vf_pre_count++;
8574 kfifo_put(&hevc->display_q,
8575 (const struct vframe_s *)vf2);
8576 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8577 hevc->vf_pre_count++;
8578 kfifo_put(&hevc->display_q,
8579 (const struct vframe_s *)vf3);
8580 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8581
8582 } else if (pic->pic_struct == 9
8583 || pic->pic_struct == 10) {
8584 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8585 hevc_print(hevc, 0,
8586 "pic_struct = %d index 0x%x\n",
8587 pic->pic_struct,
8588 pic->index);
8589
8590 pic->vf_ref = 1;
8591 /* process previous pending vf*/
8592 process_pending_vframe(hevc,
8593 pic, (pic->pic_struct == 9));
8594
8595 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8596 /* process current vf */
8597 kfifo_put(&hevc->pending_q,
8598 (const struct vframe_s *)vf);
8599 vf->height <<= 1;
8600 if (pic->pic_struct == 9) {
8601 vf->type = VIDTYPE_INTERLACE_TOP
8602 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8603 process_pending_vframe(hevc,
8604 hevc->pre_bot_pic, 0);
8605 } else {
8606 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8607 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8608 vf->index = (pic->index << 8) | 0xff;
8609 process_pending_vframe(hevc,
8610 hevc->pre_top_pic, 1);
8611 }
8612
8613 /**/
8614 if (pic->pic_struct == 9)
8615 hevc->pre_top_pic = pic;
8616 else
8617 hevc->pre_bot_pic = pic;
8618
8619 } else if (pic->pic_struct == 11
8620 || pic->pic_struct == 12) {
8621 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8622 hevc_print(hevc, 0,
8623 "pic_struct = %d index 0x%x\n",
8624 pic->pic_struct,
8625 pic->index);
8626 pic->vf_ref = 1;
8627 /* process previous pending vf*/
8628 process_pending_vframe(hevc, pic,
8629 (pic->pic_struct == 11));
8630
8631 /* put current into pending q */
8632 vf->height <<= 1;
8633 if (pic->pic_struct == 11)
8634 vf->type = VIDTYPE_INTERLACE_TOP |
8635 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8636 else {
8637 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8638 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8639 vf->index = (pic->index << 8) | 0xff;
8640 }
8641 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8642 kfifo_put(&hevc->pending_q,
8643 (const struct vframe_s *)vf);
8644
8645 /**/
8646 if (pic->pic_struct == 11)
8647 hevc->pre_top_pic = pic;
8648 else
8649 hevc->pre_bot_pic = pic;
8650
8651 } else {
8652 pic->vf_ref = 1;
8653
8654 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8655 hevc_print(hevc, 0,
8656 "pic_struct = %d index 0x%x\n",
8657 pic->pic_struct,
8658 pic->index);
8659
8660 switch (pic->pic_struct) {
8661 case 7:
8662 vf->duration <<= 1;
8663 break;
8664 case 8:
8665 vf->duration = vf->duration * 3;
8666 break;
8667 case 1:
8668 vf->height <<= 1;
8669 vf->type = VIDTYPE_INTERLACE_TOP |
8670 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8671 process_pending_vframe(hevc, pic, 1);
8672 hevc->pre_top_pic = pic;
8673 break;
8674 case 2:
8675 vf->height <<= 1;
8676 vf->type = VIDTYPE_INTERLACE_BOTTOM
8677 | VIDTYPE_VIU_NV21
8678 | VIDTYPE_VIU_FIELD;
8679 process_pending_vframe(hevc, pic, 0);
8680 hevc->pre_bot_pic = pic;
8681 break;
8682 }
8683 hevc->vf_pre_count++;
8684 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8685 kfifo_put(&hevc->display_q,
8686 (const struct vframe_s *)vf);
8687 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8688 }
8689#else
8690 vf->type_original = vf->type;
8691 pic->vf_ref = 1;
8692 hevc->vf_pre_count++;
8693 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8694 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8695 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8696
8697 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8698 hevc_print(hevc, 0,
8699 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8700 __func__, vf->type, vf->index,
8701 get_pic_poc(hevc, vf->index & 0xff),
8702 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8703 vf->pts, vf->pts_us64,
8704 vf->duration);
8705#endif
8706#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8707 /*count info*/
8708 vdec_count_info(gvs, 0, stream_offset);
8709#endif
8710 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8711 if (without_display_mode == 0) {
8712 vf_notify_receiver(hevc->provider_name,
8713 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8714 }
8715 else
8716 vh265_vf_put(vh265_vf_get(vdec), vdec);
8717 }
8718
8719 return 0;
8720}
8721
8722static int notify_v4l_eos(struct vdec_s *vdec)
8723{
8724 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
8725 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
8726 struct vframe_s *vf = NULL;
8727 struct vdec_fb *fb = NULL;
8728
8729 if (hw->is_used_v4l && hw->eos) {
8730 if (kfifo_get(&hw->newframe_q, &vf) == 0 || vf == NULL) {
8731 hevc_print(hw, 0,
8732 "%s fatal error, no available buffer slot.\n",
8733 __func__);
8734 return -1;
8735 }
8736
8737 if (v4l_get_fb(hw->v4l2_ctx, &fb)) {
8738 pr_err("[%d] get fb fail.\n", ctx->id);
8739 return -1;
8740 }
8741
8742 vf->timestamp = ULONG_MAX;
8743 vf->v4l_mem_handle = (unsigned long)fb;
8744 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
8745
8746 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
8747 vf_notify_receiver(vdec->vf_provider_name,
8748 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8749
8750 pr_info("[%d] H265 EOS notify.\n", ctx->id);
8751 }
8752
8753 return 0;
8754}
8755
8756static void process_nal_sei(struct hevc_state_s *hevc,
8757 int payload_type, int payload_size)
8758{
8759 unsigned short data;
8760
8761 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8762 hevc_print(hevc, 0,
8763 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
8764 payload_type, payload_size);
8765
8766 if (payload_type == 137) {
8767 int i, j;
8768 /* MASTERING_DISPLAY_COLOUR_VOLUME */
8769 if (payload_size >= 24) {
8770 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8771 hevc_print(hevc, 0,
8772 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
8773 for (i = 0; i < 3; i++) {
8774 for (j = 0; j < 2; j++) {
8775 data =
8776 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8777 hevc->primaries[i][j] = data;
8778 WRITE_HREG(HEVC_SHIFT_COMMAND,
8779 (1<<7)|16);
8780 if (get_dbg_flag(hevc) &
8781 H265_DEBUG_PRINT_SEI)
8782 hevc_print(hevc, 0,
8783 "\t\tprimaries[%1d][%1d] = %04x\n",
8784 i, j, hevc->primaries[i][j]);
8785 }
8786 }
8787 for (i = 0; i < 2; i++) {
8788 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8789 hevc->white_point[i] = data;
8790 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
8791 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8792 hevc_print(hevc, 0,
8793 "\t\twhite_point[%1d] = %04x\n",
8794 i, hevc->white_point[i]);
8795 }
8796 for (i = 0; i < 2; i++) {
8797 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8798 hevc->luminance[i] = data << 16;
8799 WRITE_HREG(HEVC_SHIFT_COMMAND,
8800 (1<<7)|16);
8801 data =
8802 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8803 hevc->luminance[i] |= data;
8804 WRITE_HREG(HEVC_SHIFT_COMMAND,
8805 (1<<7)|16);
8806 if (get_dbg_flag(hevc) &
8807 H265_DEBUG_PRINT_SEI)
8808 hevc_print(hevc, 0,
8809 "\t\tluminance[%1d] = %08x\n",
8810 i, hevc->luminance[i]);
8811 }
8812 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
8813 }
8814 payload_size -= 24;
8815 while (payload_size > 0) {
8816 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
8817 payload_size--;
8818 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
8819 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
8820 }
8821 }
8822}
8823
8824static int hevc_recover(struct hevc_state_s *hevc)
8825{
8826 int ret = -1;
8827 u32 rem;
8828 u64 shift_byte_count64;
8829 unsigned int hevc_shift_byte_count;
8830 unsigned int hevc_stream_start_addr;
8831 unsigned int hevc_stream_end_addr;
8832 unsigned int hevc_stream_rd_ptr;
8833 unsigned int hevc_stream_wr_ptr;
8834 unsigned int hevc_stream_control;
8835 unsigned int hevc_stream_fifo_ctl;
8836 unsigned int hevc_stream_buf_size;
8837
8838 mutex_lock(&vh265_mutex);
8839#if 0
8840 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
8841 int ii;
8842
8843 for (ii = 0; ii < 4; ii++)
8844 hevc_print(hevc, 0,
8845 "%04x ", hevc->debug_ptr[i + 3 - ii]);
8846 if (((i + ii) & 0xf) == 0)
8847 hevc_print(hevc, 0, "\n");
8848 }
8849#endif
8850#define ES_VID_MAN_RD_PTR (1<<0)
8851 if (!hevc->init_flag) {
8852 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
8853 mutex_unlock(&vh265_mutex);
8854 return ret;
8855 }
8856 amhevc_stop();
8857 msleep(20);
8858 ret = 0;
8859 /* reset */
8860 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
8861 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
8862
8863 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
8864 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
8865 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
8866 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
8867 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
8868 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
8869 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
8870
8871 /* HEVC streaming buffer will reset and restart
8872 * from current hevc_stream_rd_ptr position
8873 */
8874 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
8875 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
8876 if ((hevc->shift_byte_count_lo & (1 << 31))
8877 && ((hevc_shift_byte_count & (1 << 31)) == 0))
8878 hevc->shift_byte_count_hi++;
8879
8880 hevc->shift_byte_count_lo = hevc_shift_byte_count;
8881 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
8882 hevc->shift_byte_count_lo;
8883 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
8884 shift_byte_count64 -= rem;
8885 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
8886
8887 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
8888 shift_byte_count64 += hevc_stream_buf_size;
8889
8890 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
8891 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
8892
8893 WRITE_VREG(DOS_SW_RESET3,
8894 /* (1<<2)| */
8895 (1 << 3) | (1 << 4) | (1 << 8) |
8896 (1 << 11) | (1 << 12) | (1 << 14)
8897 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
8898 WRITE_VREG(DOS_SW_RESET3, 0);
8899
8900 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
8901 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
8902 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
8903 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
8904 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
8905 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
8906 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
8907
8908 hevc_config_work_space_hw(hevc);
8909 decoder_hw_reset();
8910
8911 hevc->have_vps = 0;
8912 hevc->have_sps = 0;
8913 hevc->have_pps = 0;
8914
8915 hevc->have_valid_start_slice = 0;
8916
8917 if (get_double_write_mode(hevc) & 0x10)
8918 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
8919 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
8920 );
8921
8922 WRITE_VREG(HEVC_WAIT_FLAG, 1);
8923 /* clear mailbox interrupt */
8924 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
8925 /* enable mailbox interrupt */
8926 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
8927 /* disable PSCALE for hardware sharing */
8928 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
8929
8930 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
8931
8932 WRITE_VREG(DEBUG_REG1, 0x0);
8933
8934 if ((error_handle_policy & 1) == 0) {
8935 if ((error_handle_policy & 4) == 0) {
8936 /* ucode auto mode, and do not check vps/sps/pps/idr */
8937 WRITE_VREG(NAL_SEARCH_CTL,
8938 0xc);
8939 } else {
8940 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
8941 }
8942 } else {
8943 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
8944 }
8945
8946 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
8947 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
8948 WRITE_VREG(NAL_SEARCH_CTL,
8949 READ_VREG(NAL_SEARCH_CTL)
8950 | ((parser_sei_enable & 0x7) << 17));
8951#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8952 WRITE_VREG(NAL_SEARCH_CTL,
8953 READ_VREG(NAL_SEARCH_CTL) |
8954 ((parser_dolby_vision_enable & 0x1) << 20));
8955#endif
8956 config_decode_mode(hevc);
8957 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
8958
8959 /* if (amhevc_loadmc(vh265_mc) < 0) { */
8960 /* amhevc_disable(); */
8961 /* return -EBUSY; */
8962 /* } */
8963#if 0
8964 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
8965 int ii;
8966
8967 for (ii = 0; ii < 4; ii++) {
8968 /* hevc->debug_ptr[i+3-ii]=ttt++; */
8969 hevc_print(hevc, 0,
8970 "%04x ", hevc->debug_ptr[i + 3 - ii]);
8971 }
8972 if (((i + ii) & 0xf) == 0)
8973 hevc_print(hevc, 0, "\n");
8974 }
8975#endif
8976 init_pic_list_hw(hevc);
8977
8978 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
8979 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
8980
8981#ifdef SWAP_HEVC_UCODE
8982 if (!tee_enabled() && hevc->is_swap &&
8983 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
8984 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
8985 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
8986 }
8987#endif
8988 amhevc_start();
8989
8990 /* skip, search next start code */
8991 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
8992 hevc->skip_flag = 1;
8993#ifdef ERROR_HANDLE_DEBUG
8994 if (dbg_nal_skip_count & 0x20000) {
8995 dbg_nal_skip_count &= ~0x20000;
8996 mutex_unlock(&vh265_mutex);
8997 return ret;
8998 }
8999#endif
9000 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9001 /* Interrupt Amrisc to excute */
9002 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9003#ifdef MULTI_INSTANCE_SUPPORT
9004 if (!hevc->m_ins_flag)
9005#endif
9006 hevc->first_pic_after_recover = 1;
9007 mutex_unlock(&vh265_mutex);
9008 return ret;
9009}
9010
9011static void dump_aux_buf(struct hevc_state_s *hevc)
9012{
9013 int i;
9014 unsigned short *aux_adr =
9015 (unsigned short *)
9016 hevc->aux_addr;
9017 unsigned int aux_size =
9018 (READ_VREG(HEVC_AUX_DATA_SIZE)
9019 >> 16) << 4;
9020
9021 if (hevc->prefix_aux_size > 0) {
9022 hevc_print(hevc, 0,
9023 "prefix aux: (size %d)\n",
9024 aux_size);
9025 for (i = 0; i <
9026 (aux_size >> 1); i++) {
9027 hevc_print_cont(hevc, 0,
9028 "%04x ",
9029 *(aux_adr + i));
9030 if (((i + 1) & 0xf)
9031 == 0)
9032 hevc_print_cont(hevc,
9033 0, "\n");
9034 }
9035 }
9036 if (hevc->suffix_aux_size > 0) {
9037 aux_adr = (unsigned short *)
9038 (hevc->aux_addr +
9039 hevc->prefix_aux_size);
9040 aux_size =
9041 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9042 << 4;
9043 hevc_print(hevc, 0,
9044 "suffix aux: (size %d)\n",
9045 aux_size);
9046 for (i = 0; i <
9047 (aux_size >> 1); i++) {
9048 hevc_print_cont(hevc, 0,
9049 "%04x ", *(aux_adr + i));
9050 if (((i + 1) & 0xf) == 0)
9051 hevc_print_cont(hevc, 0, "\n");
9052 }
9053 }
9054}
9055
9056#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9057static void dolby_get_meta(struct hevc_state_s *hevc)
9058{
9059 struct vdec_s *vdec = hw_to_vdec(hevc);
9060
9061 if (get_dbg_flag(hevc) &
9062 H265_DEBUG_BUFMGR_MORE)
9063 dump_aux_buf(hevc);
9064 if (vdec->dolby_meta_with_el || vdec->slave) {
9065 set_aux_data(hevc,
9066 hevc->cur_pic, 0, 0);
9067 } else if (vdec->master) {
9068 struct hevc_state_s *hevc_ba =
9069 (struct hevc_state_s *)
9070 vdec->master->private;
9071 /*do not use hevc_ba*/
9072 set_aux_data(hevc,
9073 hevc_ba->cur_pic,
9074 0, 1);
9075 set_aux_data(hevc,
9076 hevc->cur_pic, 0, 2);
9077 }
9078}
9079#endif
9080
9081static void read_decode_info(struct hevc_state_s *hevc)
9082{
9083 uint32_t decode_info =
9084 READ_HREG(HEVC_DECODE_INFO);
9085 hevc->start_decoding_flag |=
9086 (decode_info & 0xff);
9087 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9088}
9089
9090static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9091{
9092 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9093 unsigned int dec_status = hevc->dec_status;
9094 int i, ret;
9095
9096#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9097 struct vdec_s *vdec = hw_to_vdec(hevc);
9098#endif
9099
9100 if (hevc->eos)
9101 return IRQ_HANDLED;
9102 if (
9103#ifdef MULTI_INSTANCE_SUPPORT
9104 (!hevc->m_ins_flag) &&
9105#endif
9106 hevc->error_flag == 1) {
9107 if ((error_handle_policy & 0x10) == 0) {
9108 if (hevc->cur_pic) {
9109 int current_lcu_idx =
9110 READ_VREG(HEVC_PARSER_LCU_START)
9111 & 0xffffff;
9112 if (current_lcu_idx <
9113 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9114 hevc->cur_pic->error_mark = 1;
9115
9116 }
9117 }
9118 if ((error_handle_policy & 1) == 0) {
9119 hevc->error_skip_nal_count = 1;
9120 /* manual search nal, skip error_skip_nal_count
9121 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9122 */
9123 WRITE_VREG(NAL_SEARCH_CTL,
9124 (error_skip_nal_count << 4) | 0x1);
9125 } else {
9126 hevc->error_skip_nal_count = error_skip_nal_count;
9127 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9128 }
9129 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9130#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9131 || vdec->master
9132 || vdec->slave
9133#endif
9134 ) {
9135 WRITE_VREG(NAL_SEARCH_CTL,
9136 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9137 }
9138 WRITE_VREG(NAL_SEARCH_CTL,
9139 READ_VREG(NAL_SEARCH_CTL)
9140 | ((parser_sei_enable & 0x7) << 17));
9141#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9142 WRITE_VREG(NAL_SEARCH_CTL,
9143 READ_VREG(NAL_SEARCH_CTL) |
9144 ((parser_dolby_vision_enable & 0x1) << 20));
9145#endif
9146 config_decode_mode(hevc);
9147 /* search new nal */
9148 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9149 /* Interrupt Amrisc to excute */
9150 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9151
9152 /* hevc_print(hevc, 0,
9153 *"%s: error handle\n", __func__);
9154 */
9155 hevc->error_flag = 2;
9156 return IRQ_HANDLED;
9157 } else if (
9158#ifdef MULTI_INSTANCE_SUPPORT
9159 (!hevc->m_ins_flag) &&
9160#endif
9161 hevc->error_flag == 3) {
9162 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9163 hevc_recover(hevc);
9164 hevc->error_flag = 0;
9165
9166 if ((error_handle_policy & 0x10) == 0) {
9167 if (hevc->cur_pic) {
9168 int current_lcu_idx =
9169 READ_VREG(HEVC_PARSER_LCU_START)
9170 & 0xffffff;
9171 if (current_lcu_idx <
9172 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9173 hevc->cur_pic->error_mark = 1;
9174
9175 }
9176 }
9177 if ((error_handle_policy & 1) == 0) {
9178 /* need skip some data when
9179 * error_flag of 3 is triggered,
9180 */
9181 /* to avoid hevc_recover() being called
9182 * for many times at the same bitstream position
9183 */
9184 hevc->error_skip_nal_count = 1;
9185 /* manual search nal, skip error_skip_nal_count
9186 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9187 */
9188 WRITE_VREG(NAL_SEARCH_CTL,
9189 (error_skip_nal_count << 4) | 0x1);
9190 }
9191
9192 if ((error_handle_policy & 0x2) == 0) {
9193 hevc->have_vps = 1;
9194 hevc->have_sps = 1;
9195 hevc->have_pps = 1;
9196 }
9197 return IRQ_HANDLED;
9198 }
9199 if (!hevc->m_ins_flag) {
9200 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9201 if ((hevc->shift_byte_count_lo & (1 << 31))
9202 && ((i & (1 << 31)) == 0))
9203 hevc->shift_byte_count_hi++;
9204 hevc->shift_byte_count_lo = i;
9205 }
9206#ifdef MULTI_INSTANCE_SUPPORT
9207 mutex_lock(&hevc->chunks_mutex);
9208 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9209 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9210 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9211 && (hevc->chunk)) {
9212 hevc->cur_pic->pts = hevc->chunk->pts;
9213 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9214 }
9215 mutex_unlock(&hevc->chunks_mutex);
9216
9217 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9218 dec_status == HEVC_DECODE_BUFEMPTY2) {
9219 if (hevc->m_ins_flag) {
9220 read_decode_info(hevc);
9221 if (vdec_frame_based(hw_to_vdec(hevc))) {
9222 hevc->empty_flag = 1;
9223 goto pic_done;
9224 } else {
9225 if (
9226#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9227 vdec->master ||
9228 vdec->slave ||
9229#endif
9230 (data_resend_policy & 0x1)) {
9231 hevc->dec_result = DEC_RESULT_AGAIN;
9232 amhevc_stop();
9233 restore_decode_state(hevc);
9234 } else
9235 hevc->dec_result = DEC_RESULT_GET_DATA;
9236 }
9237 reset_process_time(hevc);
9238 vdec_schedule_work(&hevc->work);
9239 }
9240 return IRQ_HANDLED;
9241 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9242 (dec_status == HEVC_NAL_DECODE_DONE)
9243 ) {
9244 if (hevc->m_ins_flag) {
9245 read_decode_info(hevc);
9246 if (vdec_frame_based(hw_to_vdec(hevc))) {
9247 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9248 hevc->empty_flag = 1;
9249 goto pic_done;
9250 } else {
9251 hevc->dec_result = DEC_RESULT_AGAIN;
9252 amhevc_stop();
9253 restore_decode_state(hevc);
9254 }
9255
9256 reset_process_time(hevc);
9257 vdec_schedule_work(&hevc->work);
9258 }
9259
9260 return IRQ_HANDLED;
9261 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9262 if (hevc->m_ins_flag) {
9263 struct PIC_s *pic;
9264 struct PIC_s *pic_display;
9265 int decoded_poc;
9266#ifdef DETREFILL_ENABLE
9267 if (hevc->is_swap &&
9268 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9269 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9270 && READ_VREG(HEVC_SAO_DBG_MODE0))
9271 hevc->delrefill_check = 2;
9272 }
9273#endif
9274 hevc->empty_flag = 0;
9275pic_done:
9276 if (input_frame_based(hw_to_vdec(hevc)) &&
9277 frmbase_cont_bitlevel != 0 &&
9278 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9279 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9280 > frmbase_cont_bitlevel)) {
9281 /*handle the case: multi pictures in one packet*/
9282 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9283 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9284 __func__,
9285 hevc->decode_idx, hevc->decode_size,
9286 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9287 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9288 start_process_time(hevc);
9289 return IRQ_HANDLED;
9290 }
9291
9292 read_decode_info(hevc);
9293 get_picture_qos_info(hevc);
9294#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9295 hevc->start_parser_type = 0;
9296 hevc->switch_dvlayer_flag = 0;
9297#endif
9298 hevc->decoded_poc = hevc->curr_POC;
9299 hevc->decoding_pic = NULL;
9300 hevc->dec_result = DEC_RESULT_DONE;
9301#ifdef DETREFILL_ENABLE
9302 if (hevc->is_swap &&
9303 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9304 if (hevc->delrefill_check != 2)
9305#endif
9306
9307 amhevc_stop();
9308
9309 reset_process_time(hevc);
9310
9311 if (hevc->vf_pre_count == 0) {
9312 decoded_poc = hevc->curr_POC;
9313 pic = get_pic_by_POC(hevc, decoded_poc);
9314 if (pic && (pic->POC != INVALID_POC)) {
9315 /*PB skip control */
9316 if (pic->error_mark == 0
9317 && hevc->PB_skip_mode == 1) {
9318 /* start decoding after
9319 * first I
9320 */
9321 hevc->ignore_bufmgr_error |= 0x1;
9322 }
9323 if (hevc->ignore_bufmgr_error & 1) {
9324 if (hevc->PB_skip_count_after_decoding > 0) {
9325 hevc->PB_skip_count_after_decoding--;
9326 } else {
9327 /* start displaying */
9328 hevc->ignore_bufmgr_error |= 0x2;
9329 }
9330 }
9331 if (hevc->mmu_enable
9332 && ((hevc->double_write_mode & 0x10) == 0)) {
9333 if (!hevc->m_ins_flag) {
9334 hevc->used_4k_num =
9335 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9336
9337 if ((!is_skip_decoding(hevc, pic)) &&
9338 (hevc->used_4k_num >= 0) &&
9339 (hevc->cur_pic->scatter_alloc
9340 == 1)) {
9341 hevc_print(hevc,
9342 H265_DEBUG_BUFMGR_MORE,
9343 "%s pic index %d scatter_alloc %d page_start %d\n",
9344 "decoder_mmu_box_free_idx_tail",
9345 hevc->cur_pic->index,
9346 hevc->cur_pic->scatter_alloc,
9347 hevc->used_4k_num);
9348 decoder_mmu_box_free_idx_tail(
9349 hevc->mmu_box,
9350 hevc->cur_pic->index,
9351 hevc->used_4k_num);
9352 hevc->cur_pic->scatter_alloc
9353 = 2;
9354 }
9355 hevc->used_4k_num = -1;
9356 }
9357 }
9358
9359 pic->output_mark = 1;
9360 pic->recon_mark = 1;
9361 }
9362force_output:
9363 pic_display = output_pic(hevc, 1);
9364
9365 if (pic_display) {
9366 if ((pic_display->error_mark &&
9367 ((hevc->ignore_bufmgr_error &
9368 0x2) == 0))
9369 || (get_dbg_flag(hevc) &
9370 H265_DEBUG_DISPLAY_CUR_FRAME)
9371 || (get_dbg_flag(hevc) &
9372 H265_DEBUG_NO_DISPLAY)) {
9373 pic_display->output_ready = 0;
9374 if (get_dbg_flag(hevc) &
9375 H265_DEBUG_BUFMGR) {
9376 hevc_print(hevc, 0,
9377 "[BM] Display: POC %d, ",
9378 pic_display->POC);
9379 hevc_print_cont(hevc, 0,
9380 "decoding index %d ==> ",
9381 pic_display->
9382 decode_idx);
9383 hevc_print_cont(hevc, 0,
9384 "Debug or err,recycle it\n");
9385 }
9386 } else {
9387 if (pic_display->
9388 slice_type != 2) {
9389 pic_display->output_ready = 0;
9390 } else {
9391 prepare_display_buf
9392 (hevc,
9393 pic_display);
9394 hevc->first_pic_flag = 1;
9395 }
9396 }
9397 }
9398 }
9399
9400 vdec_schedule_work(&hevc->work);
9401 }
9402
9403 return IRQ_HANDLED;
9404#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9405 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9406 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9407 if (hevc->m_ins_flag) {
9408 unsigned char next_parser_type =
9409 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9410 read_decode_info(hevc);
9411
9412 if (vdec->slave &&
9413 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9414 /*cur is base, found enhance*/
9415 struct hevc_state_s *hevc_el =
9416 (struct hevc_state_s *)
9417 vdec->slave->private;
9418 hevc->switch_dvlayer_flag = 1;
9419 hevc->no_switch_dvlayer_count = 0;
9420 hevc_el->start_parser_type =
9421 next_parser_type;
9422 hevc_print(hevc, H265_DEBUG_DV,
9423 "switch (poc %d) to el\n",
9424 hevc->cur_pic ?
9425 hevc->cur_pic->POC :
9426 INVALID_POC);
9427 } else if (vdec->master &&
9428 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9429 /*cur is enhance, found base*/
9430 struct hevc_state_s *hevc_ba =
9431 (struct hevc_state_s *)
9432 vdec->master->private;
9433 hevc->switch_dvlayer_flag = 1;
9434 hevc->no_switch_dvlayer_count = 0;
9435 hevc_ba->start_parser_type =
9436 next_parser_type;
9437 hevc_print(hevc, H265_DEBUG_DV,
9438 "switch (poc %d) to bl\n",
9439 hevc->cur_pic ?
9440 hevc->cur_pic->POC :
9441 INVALID_POC);
9442 } else {
9443 hevc->switch_dvlayer_flag = 0;
9444 hevc->start_parser_type =
9445 next_parser_type;
9446 hevc->no_switch_dvlayer_count++;
9447 hevc_print(hevc, H265_DEBUG_DV,
9448 "%s: no_switch_dvlayer_count = %d\n",
9449 vdec->master ? "el" : "bl",
9450 hevc->no_switch_dvlayer_count);
9451 if (vdec->slave &&
9452 dolby_el_flush_th != 0 &&
9453 hevc->no_switch_dvlayer_count >
9454 dolby_el_flush_th) {
9455 struct hevc_state_s *hevc_el =
9456 (struct hevc_state_s *)
9457 vdec->slave->private;
9458 struct PIC_s *el_pic;
9459 check_pic_decoded_error(hevc_el,
9460 hevc_el->pic_decoded_lcu_idx);
9461 el_pic = get_pic_by_POC(hevc_el,
9462 hevc_el->curr_POC);
9463 hevc_el->curr_POC = INVALID_POC;
9464 hevc_el->m_pocRandomAccess = MAX_INT;
9465 flush_output(hevc_el, el_pic);
9466 hevc_el->decoded_poc = INVALID_POC; /*
9467 already call flush_output*/
9468 hevc_el->decoding_pic = NULL;
9469 hevc->no_switch_dvlayer_count = 0;
9470 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9471 hevc_print(hevc, 0,
9472 "no el anymore, flush_output el\n");
9473 }
9474 }
9475 hevc->decoded_poc = hevc->curr_POC;
9476 hevc->decoding_pic = NULL;
9477 hevc->dec_result = DEC_RESULT_DONE;
9478 amhevc_stop();
9479 reset_process_time(hevc);
9480 if (aux_data_is_avaible(hevc))
9481 dolby_get_meta(hevc);
9482 if(hevc->cur_pic->slice_type == 2 &&
9483 hevc->vf_pre_count == 0) {
9484 hevc_print(hevc, 0,
9485 "first slice_type %x no_switch_dvlayer_count %x\n",
9486 hevc->cur_pic->slice_type,
9487 hevc->no_switch_dvlayer_count);
9488 goto force_output;
9489 }
9490 vdec_schedule_work(&hevc->work);
9491 }
9492
9493 return IRQ_HANDLED;
9494#endif
9495 }
9496
9497#endif
9498
9499 if (dec_status == HEVC_SEI_DAT) {
9500 if (!hevc->m_ins_flag) {
9501 int payload_type =
9502 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9503 int payload_size =
9504 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9505 process_nal_sei(hevc,
9506 payload_type, payload_size);
9507 }
9508 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9509 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9510 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9511 int parse_type = HEVC_DISCARD_NAL;
9512
9513 hevc->error_watchdog_count = 0;
9514 hevc->error_skip_nal_wt_cnt = 0;
9515#ifdef MULTI_INSTANCE_SUPPORT
9516 if (hevc->m_ins_flag)
9517 reset_process_time(hevc);
9518#endif
9519 if (slice_parse_begin > 0 &&
9520 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9521 hevc_print(hevc, 0,
9522 "nal type %d, discard %d\n", naltype,
9523 slice_parse_begin);
9524 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9525 slice_parse_begin--;
9526 }
9527 if (naltype == NAL_UNIT_EOS) {
9528 struct PIC_s *pic;
9529
9530 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9531#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9532 if ((vdec->master || vdec->slave) &&
9533 aux_data_is_avaible(hevc)) {
9534 if (hevc->decoding_pic)
9535 dolby_get_meta(hevc);
9536 }
9537#endif
9538 check_pic_decoded_error(hevc,
9539 hevc->pic_decoded_lcu_idx);
9540 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9541 hevc->curr_POC = INVALID_POC;
9542 /* add to fix RAP_B_Bossen_1 */
9543 hevc->m_pocRandomAccess = MAX_INT;
9544 flush_output(hevc, pic);
9545 clear_poc_flag(hevc);
9546 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9547 /* Interrupt Amrisc to excute */
9548 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9549#ifdef MULTI_INSTANCE_SUPPORT
9550 if (hevc->m_ins_flag) {
9551 hevc->decoded_poc = INVALID_POC; /*
9552 already call flush_output*/
9553 hevc->decoding_pic = NULL;
9554 hevc->dec_result = DEC_RESULT_DONE;
9555 amhevc_stop();
9556
9557 vdec_schedule_work(&hevc->work);
9558 }
9559#endif
9560 return IRQ_HANDLED;
9561 }
9562
9563 if (
9564#ifdef MULTI_INSTANCE_SUPPORT
9565 (!hevc->m_ins_flag) &&
9566#endif
9567 hevc->error_skip_nal_count > 0) {
9568 hevc_print(hevc, 0,
9569 "nal type %d, discard %d\n", naltype,
9570 hevc->error_skip_nal_count);
9571 hevc->error_skip_nal_count--;
9572 if (hevc->error_skip_nal_count == 0) {
9573 hevc_recover(hevc);
9574 hevc->error_flag = 0;
9575 if ((error_handle_policy & 0x2) == 0) {
9576 hevc->have_vps = 1;
9577 hevc->have_sps = 1;
9578 hevc->have_pps = 1;
9579 }
9580 return IRQ_HANDLED;
9581 }
9582 } else if (naltype == NAL_UNIT_VPS) {
9583 parse_type = HEVC_NAL_UNIT_VPS;
9584 hevc->have_vps = 1;
9585#ifdef ERROR_HANDLE_DEBUG
9586 if (dbg_nal_skip_flag & 1)
9587 parse_type = HEVC_DISCARD_NAL;
9588#endif
9589 } else if (hevc->have_vps) {
9590 if (naltype == NAL_UNIT_SPS) {
9591 parse_type = HEVC_NAL_UNIT_SPS;
9592 hevc->have_sps = 1;
9593#ifdef ERROR_HANDLE_DEBUG
9594 if (dbg_nal_skip_flag & 2)
9595 parse_type = HEVC_DISCARD_NAL;
9596#endif
9597 } else if (naltype == NAL_UNIT_PPS) {
9598 parse_type = HEVC_NAL_UNIT_PPS;
9599 hevc->have_pps = 1;
9600#ifdef ERROR_HANDLE_DEBUG
9601 if (dbg_nal_skip_flag & 4)
9602 parse_type = HEVC_DISCARD_NAL;
9603#endif
9604 } else if (hevc->have_sps && hevc->have_pps) {
9605 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9606
9607 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9608 (naltype ==
9609 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9610 || (naltype ==
9611 NAL_UNIT_CODED_SLICE_CRA)
9612 || (naltype ==
9613 NAL_UNIT_CODED_SLICE_BLA)
9614 || (naltype ==
9615 NAL_UNIT_CODED_SLICE_BLANT)
9616 || (naltype ==
9617 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9618 ) {
9619 if (slice_parse_begin > 0) {
9620 hevc_print(hevc, 0,
9621 "discard %d, for debugging\n",
9622 slice_parse_begin);
9623 slice_parse_begin--;
9624 } else {
9625 parse_type = seg;
9626 }
9627 hevc->have_valid_start_slice = 1;
9628 } else if (naltype <=
9629 NAL_UNIT_CODED_SLICE_CRA
9630 && (hevc->have_valid_start_slice
9631 || (hevc->PB_skip_mode != 3))) {
9632 if (slice_parse_begin > 0) {
9633 hevc_print(hevc, 0,
9634 "discard %d, dd\n",
9635 slice_parse_begin);
9636 slice_parse_begin--;
9637 } else
9638 parse_type = seg;
9639
9640 }
9641 }
9642 }
9643 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9644 && hevc->have_valid_start_slice &&
9645 hevc->error_flag == 0) {
9646 if ((get_dbg_flag(hevc) &
9647 H265_DEBUG_MAN_SEARCH_NAL) == 0
9648 /* && (!hevc->m_ins_flag)*/) {
9649 /* auot parser NAL; do not check
9650 *vps/sps/pps/idr
9651 */
9652 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9653 }
9654
9655 if ((get_dbg_flag(hevc) &
9656 H265_DEBUG_NO_EOS_SEARCH_DONE)
9657#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9658 || vdec->master
9659 || vdec->slave
9660#endif
9661 ) {
9662 WRITE_VREG(NAL_SEARCH_CTL,
9663 READ_VREG(NAL_SEARCH_CTL) |
9664 0x10000);
9665 }
9666 WRITE_VREG(NAL_SEARCH_CTL,
9667 READ_VREG(NAL_SEARCH_CTL)
9668 | ((parser_sei_enable & 0x7) << 17));
9669#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9670 WRITE_VREG(NAL_SEARCH_CTL,
9671 READ_VREG(NAL_SEARCH_CTL) |
9672 ((parser_dolby_vision_enable & 0x1) << 20));
9673#endif
9674 config_decode_mode(hevc);
9675 }
9676
9677 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9678 hevc_print(hevc, 0,
9679 "naltype = %d parse_type %d\n %d %d %d %d\n",
9680 naltype, parse_type, hevc->have_vps,
9681 hevc->have_sps, hevc->have_pps,
9682 hevc->have_valid_start_slice);
9683 }
9684
9685 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9686 /* Interrupt Amrisc to excute */
9687 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9688#ifdef MULTI_INSTANCE_SUPPORT
9689 if (hevc->m_ins_flag)
9690 start_process_time(hevc);
9691#endif
9692 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9693#ifdef MULTI_INSTANCE_SUPPORT
9694 if (hevc->m_ins_flag) {
9695 reset_process_time(hevc);
9696 read_decode_info(hevc);
9697
9698 }
9699#endif
9700 if (hevc->start_decoding_time > 0) {
9701 u32 process_time = 1000*
9702 (jiffies - hevc->start_decoding_time)/HZ;
9703 if (process_time > max_decoding_time)
9704 max_decoding_time = process_time;
9705 }
9706
9707 hevc->error_watchdog_count = 0;
9708 if (hevc->pic_list_init_flag == 2) {
9709 hevc->pic_list_init_flag = 3;
9710 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
9711 } else if (hevc->wait_buf == 0) {
9712 u32 vui_time_scale;
9713 u32 vui_num_units_in_tick;
9714 unsigned char reconfig_flag = 0;
9715
9716 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
9717 get_rpm_param(&hevc->param);
9718 else {
9719
9720 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
9721 int ii;
9722
9723 for (ii = 0; ii < 4; ii++) {
9724 hevc->param.l.data[i + ii] =
9725 hevc->rpm_ptr[i + 3
9726 - ii];
9727 }
9728 }
9729#ifdef SEND_LMEM_WITH_RPM
9730 check_head_error(hevc);
9731#endif
9732 }
9733 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
9734 hevc_print(hevc, 0,
9735 "rpm_param: (%d)\n", hevc->slice_idx);
9736 hevc->slice_idx++;
9737 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
9738 hevc_print_cont(hevc, 0,
9739 "%04x ", hevc->param.l.data[i]);
9740 if (((i + 1) & 0xf) == 0)
9741 hevc_print_cont(hevc, 0, "\n");
9742 }
9743
9744 hevc_print(hevc, 0,
9745 "vui_timing_info: %x, %x, %x, %x\n",
9746 hevc->param.p.vui_num_units_in_tick_hi,
9747 hevc->param.p.vui_num_units_in_tick_lo,
9748 hevc->param.p.vui_time_scale_hi,
9749 hevc->param.p.vui_time_scale_lo);
9750 }
9751 if (
9752#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9753 vdec->master == NULL &&
9754 vdec->slave == NULL &&
9755#endif
9756 aux_data_is_avaible(hevc)
9757 ) {
9758
9759 if (get_dbg_flag(hevc) &
9760 H265_DEBUG_BUFMGR_MORE)
9761 dump_aux_buf(hevc);
9762 }
9763
9764 vui_time_scale =
9765 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
9766 hevc->param.p.vui_time_scale_lo;
9767 vui_num_units_in_tick =
9768 (u32)(hevc->param.
9769 p.vui_num_units_in_tick_hi << 16) |
9770 hevc->param.
9771 p.vui_num_units_in_tick_lo;
9772 if (hevc->bit_depth_luma !=
9773 ((hevc->param.p.bit_depth & 0xf) + 8)) {
9774 reconfig_flag = 1;
9775 hevc_print(hevc, 0, "Bit depth luma = %d\n",
9776 (hevc->param.p.bit_depth & 0xf) + 8);
9777 }
9778 if (hevc->bit_depth_chroma !=
9779 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
9780 reconfig_flag = 1;
9781 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
9782 ((hevc->param.p.bit_depth >> 4) &
9783 0xf) + 8);
9784 }
9785 hevc->bit_depth_luma =
9786 (hevc->param.p.bit_depth & 0xf) + 8;
9787 hevc->bit_depth_chroma =
9788 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
9789 bit_depth_luma = hevc->bit_depth_luma;
9790 bit_depth_chroma = hevc->bit_depth_chroma;
9791#ifdef SUPPORT_10BIT
9792 if (hevc->bit_depth_luma == 8 &&
9793 hevc->bit_depth_chroma == 8 &&
9794 enable_mem_saving)
9795 hevc->mem_saving_mode = 1;
9796 else
9797 hevc->mem_saving_mode = 0;
9798#endif
9799 if (reconfig_flag &&
9800 (get_double_write_mode(hevc) & 0x10) == 0)
9801 init_decode_head_hw(hevc);
9802
9803 if ((vui_time_scale != 0)
9804 && (vui_num_units_in_tick != 0)) {
9805 hevc->frame_dur =
9806 div_u64(96000ULL *
9807 vui_num_units_in_tick,
9808 vui_time_scale);
9809 if (hevc->get_frame_dur != true)
9810 vdec_schedule_work(
9811 &hevc->notify_work);
9812
9813 hevc->get_frame_dur = true;
9814#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9815 gvs->frame_dur = hevc->frame_dur;
9816#endif
9817 }
9818
9819 if (hevc->video_signal_type !=
9820 ((hevc->param.p.video_signal_type << 16)
9821 | hevc->param.p.color_description)) {
9822 u32 v = hevc->param.p.video_signal_type;
9823 u32 c = hevc->param.p.color_description;
9824#if 0
9825 if (v & 0x2000) {
9826 hevc_print(hevc, 0,
9827 "video_signal_type present:\n");
9828 hevc_print(hevc, 0, " %s %s\n",
9829 video_format_names[(v >> 10) & 7],
9830 ((v >> 9) & 1) ?
9831 "full_range" : "limited");
9832 if (v & 0x100) {
9833 hevc_print(hevc, 0,
9834 " color_description present:\n");
9835 hevc_print(hevc, 0,
9836 " color_primarie = %s\n",
9837 color_primaries_names
9838 [v & 0xff]);
9839 hevc_print(hevc, 0,
9840 " transfer_characteristic = %s\n",
9841 transfer_characteristics_names
9842 [(c >> 8) & 0xff]);
9843 hevc_print(hevc, 0,
9844 " matrix_coefficient = %s\n",
9845 matrix_coeffs_names[c & 0xff]);
9846 }
9847 }
9848#endif
9849 hevc->video_signal_type = (v << 16) | c;
9850 video_signal_type = hevc->video_signal_type;
9851 }
9852
9853 if (use_cma &&
9854 (hevc->param.p.slice_segment_address == 0)
9855 && (hevc->pic_list_init_flag == 0)) {
9856 int log = hevc->param.p.log2_min_coding_block_size_minus3;
9857 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
9858
9859 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
9860 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
9861 hevc->lcu_size = 1 << (log + 3 + log_s);
9862 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
9863 if (hevc->pic_w == 0 || hevc->pic_h == 0
9864 || hevc->lcu_size == 0
9865 || is_oversize(hevc->pic_w, hevc->pic_h)
9866 || (!hevc->skip_first_nal &&
9867 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
9868 /* skip search next start code */
9869 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
9870 & (~0x2));
9871 if ( !hevc->skip_first_nal &&
9872 (hevc->pic_h == 96) && (hevc->pic_w == 160))
9873 hevc->skip_first_nal = 1;
9874 hevc->skip_flag = 1;
9875 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9876 /* Interrupt Amrisc to excute */
9877 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9878#ifdef MULTI_INSTANCE_SUPPORT
9879 if (hevc->m_ins_flag)
9880 start_process_time(hevc);
9881#endif
9882 } else {
9883 hevc->sps_num_reorder_pics_0 =
9884 hevc->param.p.sps_num_reorder_pics_0;
9885 hevc->pic_list_init_flag = 1;
9886#ifdef MULTI_INSTANCE_SUPPORT
9887 if (hevc->m_ins_flag) {
9888 vdec_schedule_work(&hevc->work);
9889 } else
9890#endif
9891 up(&h265_sema);
9892 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
9893 }
9894 return IRQ_HANDLED;
9895 }
9896
9897}
9898 ret =
9899 hevc_slice_segment_header_process(hevc,
9900 &hevc->param, decode_pic_begin);
9901 if (ret < 0) {
9902#ifdef MULTI_INSTANCE_SUPPORT
9903 if (hevc->m_ins_flag) {
9904 hevc->wait_buf = 0;
9905 hevc->dec_result = DEC_RESULT_AGAIN;
9906 amhevc_stop();
9907 restore_decode_state(hevc);
9908 reset_process_time(hevc);
9909 vdec_schedule_work(&hevc->work);
9910 return IRQ_HANDLED;
9911 }
9912#else
9913 ;
9914#endif
9915 } else if (ret == 0) {
9916 if ((hevc->new_pic) && (hevc->cur_pic)) {
9917 hevc->cur_pic->stream_offset =
9918 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9919 hevc_print(hevc, H265_DEBUG_OUT_PTS,
9920 "read stream_offset = 0x%x\n",
9921 hevc->cur_pic->stream_offset);
9922 hevc->cur_pic->aspect_ratio_idc =
9923 hevc->param.p.aspect_ratio_idc;
9924 hevc->cur_pic->sar_width =
9925 hevc->param.p.sar_width;
9926 hevc->cur_pic->sar_height =
9927 hevc->param.p.sar_height;
9928 }
9929
9930 WRITE_VREG(HEVC_DEC_STATUS_REG,
9931 HEVC_CODED_SLICE_SEGMENT_DAT);
9932 /* Interrupt Amrisc to excute */
9933 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9934
9935 hevc->start_decoding_time = jiffies;
9936#ifdef MULTI_INSTANCE_SUPPORT
9937 if (hevc->m_ins_flag)
9938 start_process_time(hevc);
9939#endif
9940#if 1
9941 /*to do..., copy aux data to hevc->cur_pic*/
9942#endif
9943#ifdef MULTI_INSTANCE_SUPPORT
9944 } else if (hevc->m_ins_flag) {
9945 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9946 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
9947 __func__, ret);
9948 hevc->decoded_poc = INVALID_POC;
9949 hevc->decoding_pic = NULL;
9950 hevc->dec_result = DEC_RESULT_DONE;
9951 amhevc_stop();
9952 reset_process_time(hevc);
9953 vdec_schedule_work(&hevc->work);
9954#endif
9955 } else {
9956 /* skip, search next start code */
9957#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9958 gvs->drop_frame_count++;
9959#endif
9960 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9961 hevc->skip_flag = 1;
9962 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9963 /* Interrupt Amrisc to excute */
9964 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9965 }
9966
9967 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
9968 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
9969#ifdef MULTI_INSTANCE_SUPPORT
9970 if (!hevc->m_ins_flag)
9971 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
9972 H265_DEBUG_DIS_SYS_ERROR_PROC);
9973#endif
9974 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
9975 }
9976 return IRQ_HANDLED;
9977}
9978
9979static void wait_hevc_search_done(struct hevc_state_s *hevc)
9980{
9981 int count = 0;
9982 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
9983 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
9984 msleep(20);
9985 count++;
9986 if (count > 100) {
9987 hevc_print(hevc, 0, "%s timeout\n", __func__);
9988 break;
9989 }
9990 }
9991}
9992static irqreturn_t vh265_isr(int irq, void *data)
9993{
9994 int i, temp;
9995 unsigned int dec_status;
9996 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
9997 u32 debug_tag;
9998 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
9999
10000 if (hevc->init_flag == 0)
10001 return IRQ_HANDLED;
10002 hevc->dec_status = dec_status;
10003 if (is_log_enable(hevc))
10004 add_log(hevc,
10005 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10006 dec_status, READ_HREG(HEVC_DECODE_INFO),
10007 READ_VREG(HEVC_MPRED_CURR_LCU),
10008 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10009 READ_VREG(HEVC_SHIFT_STATUS));
10010
10011 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10012 hevc_print(hevc, 0,
10013 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10014 dec_status, READ_HREG(HEVC_DECODE_INFO),
10015 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10016 READ_VREG(HEVC_SHIFT_STATUS));
10017
10018 debug_tag = READ_HREG(DEBUG_REG1);
10019 if (debug_tag & 0x10000) {
10020 hevc_print(hevc, 0,
10021 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10022
10023 if (hevc->mmu_enable)
10024 temp = 0x500;
10025 else
10026 temp = 0x400;
10027 for (i = 0; i < temp; i += 4) {
10028 int ii;
10029 if ((i & 0xf) == 0)
10030 hevc_print_cont(hevc, 0, "%03x: ", i);
10031 for (ii = 0; ii < 4; ii++) {
10032 hevc_print_cont(hevc, 0, "%04x ",
10033 hevc->lmem_ptr[i + 3 - ii]);
10034 }
10035 if (((i + ii) & 0xf) == 0)
10036 hevc_print_cont(hevc, 0, "\n");
10037 }
10038
10039 if (((udebug_pause_pos & 0xffff)
10040 == (debug_tag & 0xffff)) &&
10041 (udebug_pause_decode_idx == 0 ||
10042 udebug_pause_decode_idx == hevc->decode_idx) &&
10043 (udebug_pause_val == 0 ||
10044 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10045 udebug_pause_pos &= 0xffff;
10046 hevc->ucode_pause_pos = udebug_pause_pos;
10047 }
10048 else if (debug_tag & 0x20000)
10049 hevc->ucode_pause_pos = 0xffffffff;
10050 if (hevc->ucode_pause_pos)
10051 reset_process_time(hevc);
10052 else
10053 WRITE_HREG(DEBUG_REG1, 0);
10054 } else if (debug_tag != 0) {
10055 hevc_print(hevc, 0,
10056 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10057 READ_HREG(DEBUG_REG2),
10058 READ_VREG(HEVC_STREAM_LEVEL),
10059 READ_VREG(HEVC_STREAM_WR_PTR),
10060 READ_VREG(HEVC_STREAM_RD_PTR));
10061 if (((udebug_pause_pos & 0xffff)
10062 == (debug_tag & 0xffff)) &&
10063 (udebug_pause_decode_idx == 0 ||
10064 udebug_pause_decode_idx == hevc->decode_idx) &&
10065 (udebug_pause_val == 0 ||
10066 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10067 udebug_pause_pos &= 0xffff;
10068 hevc->ucode_pause_pos = udebug_pause_pos;
10069 }
10070 if (hevc->ucode_pause_pos)
10071 reset_process_time(hevc);
10072 else
10073 WRITE_HREG(DEBUG_REG1, 0);
10074 return IRQ_HANDLED;
10075 }
10076
10077
10078 if (hevc->pic_list_init_flag == 1)
10079 return IRQ_HANDLED;
10080
10081 if (!hevc->m_ins_flag) {
10082 if (dec_status == HEVC_OVER_DECODE) {
10083 hevc->over_decode = 1;
10084 hevc_print(hevc, 0,
10085 "isr: over decode\n"),
10086 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10087 return IRQ_HANDLED;
10088 }
10089 }
10090
10091 return IRQ_WAKE_THREAD;
10092
10093}
10094
10095static void vh265_set_clk(struct work_struct *work)
10096{
10097 struct hevc_state_s *hevc = container_of(work,
10098 struct hevc_state_s, set_clk_work);
10099
10100 int fps = 96000 / hevc->frame_dur;
10101
10102 if (hevc_source_changed(VFORMAT_HEVC,
10103 hevc->frame_width, hevc->frame_height, fps) > 0)
10104 hevc->saved_resolution = hevc->frame_width *
10105 hevc->frame_height * fps;
10106}
10107
10108static void vh265_check_timer_func(unsigned long arg)
10109{
10110 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10111 struct timer_list *timer = &hevc->timer;
10112 unsigned char empty_flag;
10113 unsigned int buf_level;
10114
10115 enum receviver_start_e state = RECEIVER_INACTIVE;
10116
10117 if (hevc->init_flag == 0) {
10118 if (hevc->stat & STAT_TIMER_ARM) {
10119 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10120 }
10121 return;
10122 }
10123#ifdef MULTI_INSTANCE_SUPPORT
10124 if (hevc->m_ins_flag &&
10125 (get_dbg_flag(hevc) &
10126 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10127 hw_to_vdec(hevc)->next_status ==
10128 VDEC_STATUS_DISCONNECTED) {
10129 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10130 vdec_schedule_work(&hevc->work);
10131 hevc_print(hevc,
10132 0, "vdec requested to be disconnected\n");
10133 return;
10134 }
10135
10136 if (hevc->m_ins_flag) {
10137 if ((input_frame_based(hw_to_vdec(hevc)) ||
10138 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10139 ((get_dbg_flag(hevc) &
10140 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10141 (decode_timeout_val > 0) &&
10142 (hevc->start_process_time > 0) &&
10143 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10144 > decode_timeout_val)
10145 ) {
10146 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10147 int current_lcu_idx =
10148 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10149 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10150 if (hevc->last_lcu_idx == current_lcu_idx) {
10151 if (hevc->decode_timeout_count > 0)
10152 hevc->decode_timeout_count--;
10153 if (hevc->decode_timeout_count == 0)
10154 timeout_process(hevc);
10155 } else
10156 restart_process_time(hevc);
10157 hevc->last_lcu_idx = current_lcu_idx;
10158 } else {
10159 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10160 timeout_process(hevc);
10161 }
10162 }
10163 } else {
10164#endif
10165 if (hevc->m_ins_flag == 0 &&
10166 vf_get_receiver(hevc->provider_name)) {
10167 state =
10168 vf_notify_receiver(hevc->provider_name,
10169 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10170 NULL);
10171 if ((state == RECEIVER_STATE_NULL)
10172 || (state == RECEIVER_STATE_NONE))
10173 state = RECEIVER_INACTIVE;
10174 } else
10175 state = RECEIVER_INACTIVE;
10176
10177 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10178 /* error watchdog */
10179 if (hevc->m_ins_flag == 0 &&
10180 (empty_flag == 0)
10181 && (hevc->pic_list_init_flag == 0
10182 || hevc->pic_list_init_flag
10183 == 3)) {
10184 /* decoder has input */
10185 if ((get_dbg_flag(hevc) &
10186 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10187
10188 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10189 /* receiver has no buffer to recycle */
10190 if ((state == RECEIVER_INACTIVE) &&
10191 (kfifo_is_empty(&hevc->display_q) &&
10192 buf_level > 0x200)
10193 ) {
10194 if (hevc->error_flag == 0) {
10195 hevc->error_watchdog_count++;
10196 if (hevc->error_watchdog_count ==
10197 error_handle_threshold) {
10198 hevc_print(hevc, 0,
10199 "H265 dec err local reset.\n");
10200 hevc->error_flag = 1;
10201 hevc->error_watchdog_count = 0;
10202 hevc->error_skip_nal_wt_cnt = 0;
10203 hevc->
10204 error_system_watchdog_count++;
10205 WRITE_VREG
10206 (HEVC_ASSIST_MBOX0_IRQ_REG,
10207 0x1);
10208 }
10209 } else if (hevc->error_flag == 2) {
10210 int th =
10211 error_handle_nal_skip_threshold;
10212 hevc->error_skip_nal_wt_cnt++;
10213 if (hevc->error_skip_nal_wt_cnt
10214 == th) {
10215 hevc->error_flag = 3;
10216 hevc->error_watchdog_count = 0;
10217 hevc->
10218 error_skip_nal_wt_cnt = 0;
10219 WRITE_VREG
10220 (HEVC_ASSIST_MBOX0_IRQ_REG,
10221 0x1);
10222 }
10223 }
10224 }
10225 }
10226
10227 if ((get_dbg_flag(hevc)
10228 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10229 /* receiver has no buffer to recycle */
10230 if ((state == RECEIVER_INACTIVE) &&
10231 (kfifo_is_empty(&hevc->display_q))
10232 ) { /* no buffer to recycle */
10233 if ((get_dbg_flag(hevc) &
10234 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10235 0)
10236 hevc->error_system_watchdog_count++;
10237 if (hevc->error_system_watchdog_count ==
10238 error_handle_system_threshold) {
10239 /* and it lasts for a while */
10240 hevc_print(hevc, 0,
10241 "H265 dec fatal error watchdog.\n");
10242 hevc->
10243 error_system_watchdog_count = 0;
10244 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10245 }
10246 }
10247 } else {
10248 hevc->error_watchdog_count = 0;
10249 hevc->error_system_watchdog_count = 0;
10250 }
10251#ifdef MULTI_INSTANCE_SUPPORT
10252 }
10253#endif
10254 if ((hevc->ucode_pause_pos != 0) &&
10255 (hevc->ucode_pause_pos != 0xffffffff) &&
10256 udebug_pause_pos != hevc->ucode_pause_pos) {
10257 hevc->ucode_pause_pos = 0;
10258 WRITE_HREG(DEBUG_REG1, 0);
10259 }
10260
10261 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10262 dump_pic_list(hevc);
10263 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10264 }
10265 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10266 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10267 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10268 }
10269#ifdef TEST_NO_BUF
10270 if (hevc->wait_buf)
10271 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10272#endif
10273 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10274 hevc->error_skip_nal_count = error_skip_nal_count;
10275 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10276
10277 debug &= ~H265_DEBUG_HW_RESET;
10278 }
10279
10280#ifdef ERROR_HANDLE_DEBUG
10281 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10282 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10283 dbg_nal_skip_count &= ~0x10000;
10284 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10285 }
10286#endif
10287
10288 if (radr != 0) {
10289 if (rval != 0) {
10290 WRITE_VREG(radr, rval);
10291 hevc_print(hevc, 0,
10292 "WRITE_VREG(%x,%x)\n", radr, rval);
10293 } else
10294 hevc_print(hevc, 0,
10295 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10296 rval = 0;
10297 radr = 0;
10298 }
10299 if (dbg_cmd != 0) {
10300 if (dbg_cmd == 1) {
10301 u32 disp_laddr;
10302
10303 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10304 get_double_write_mode(hevc) == 0) {
10305 disp_laddr =
10306 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10307 } else {
10308 struct canvas_s cur_canvas;
10309
10310 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10311 & 0xff), &cur_canvas);
10312 disp_laddr = cur_canvas.addr;
10313 }
10314 hevc_print(hevc, 0,
10315 "current displayed buffer address %x\r\n",
10316 disp_laddr);
10317 }
10318 dbg_cmd = 0;
10319 }
10320 /*don't changed at start.*/
10321 if (hevc->m_ins_flag == 0 &&
10322 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10323 hevc->frame_dur > 0 && hevc->saved_resolution !=
10324 hevc->frame_width * hevc->frame_height *
10325 (96000 / hevc->frame_dur))
10326 vdec_schedule_work(&hevc->set_clk_work);
10327
10328 mod_timer(timer, jiffies + PUT_INTERVAL);
10329}
10330
10331static int h265_task_handle(void *data)
10332{
10333 int ret = 0;
10334 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10335
10336 set_user_nice(current, -10);
10337 while (1) {
10338 if (use_cma == 0) {
10339 hevc_print(hevc, 0,
10340 "ERROR: use_cma can not be changed dynamically\n");
10341 }
10342 ret = down_interruptible(&h265_sema);
10343 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10344 init_pic_list(hevc);
10345 init_pic_list_hw(hevc);
10346 init_buf_spec(hevc);
10347 hevc->pic_list_init_flag = 2;
10348 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10349
10350 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10351
10352 }
10353
10354 if (hevc->uninit_list) {
10355 /*USE_BUF_BLOCK*/
10356 uninit_pic_list(hevc);
10357 hevc_print(hevc, 0, "uninit list\n");
10358 hevc->uninit_list = 0;
10359#ifdef USE_UNINIT_SEMA
10360 if (use_cma) {
10361 up(&hevc->h265_uninit_done_sema);
10362 while (!kthread_should_stop())
10363 msleep(1);
10364 break;
10365 }
10366#endif
10367 }
10368 }
10369
10370 return 0;
10371}
10372
10373void vh265_free_cmabuf(void)
10374{
10375 struct hevc_state_s *hevc = gHevc;
10376
10377 mutex_lock(&vh265_mutex);
10378
10379 if (hevc->init_flag) {
10380 mutex_unlock(&vh265_mutex);
10381 return;
10382 }
10383
10384 mutex_unlock(&vh265_mutex);
10385}
10386
10387#ifdef MULTI_INSTANCE_SUPPORT
10388int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10389#else
10390int vh265_dec_status(struct vdec_info *vstatus)
10391#endif
10392{
10393#ifdef MULTI_INSTANCE_SUPPORT
10394 struct hevc_state_s *hevc =
10395 (struct hevc_state_s *)vdec->private;
10396#else
10397 struct hevc_state_s *hevc = gHevc;
10398#endif
10399 if (!hevc)
10400 return -1;
10401
10402 vstatus->frame_width = hevc->frame_width;
10403 vstatus->frame_height = hevc->frame_height;
10404 if (hevc->frame_dur != 0)
10405 vstatus->frame_rate = 96000 / hevc->frame_dur;
10406 else
10407 vstatus->frame_rate = -1;
10408 vstatus->error_count = 0;
10409 vstatus->status = hevc->stat | hevc->fatal_error;
10410#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10411 vstatus->bit_rate = gvs->bit_rate;
10412 vstatus->frame_dur = hevc->frame_dur;
10413 if (gvs) {
10414 vstatus->bit_rate = gvs->bit_rate;
10415 vstatus->frame_data = gvs->frame_data;
10416 vstatus->total_data = gvs->total_data;
10417 vstatus->frame_count = gvs->frame_count;
10418 vstatus->error_frame_count = gvs->error_frame_count;
10419 vstatus->drop_frame_count = gvs->drop_frame_count;
10420 vstatus->total_data = gvs->total_data;
10421 vstatus->samp_cnt = gvs->samp_cnt;
10422 vstatus->offset = gvs->offset;
10423 }
10424 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10425 "%s", DRIVER_NAME);
10426#endif
10427 vstatus->ratio_control = hevc->ratio_control;
10428 return 0;
10429}
10430
10431int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10432{
10433 is_reset = isreset;
10434 return 0;
10435}
10436
10437static int vh265_vdec_info_init(void)
10438{
10439 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10440 if (NULL == gvs) {
10441 pr_info("the struct of vdec status malloc failed.\n");
10442 return -ENOMEM;
10443 }
10444 return 0;
10445}
10446
10447#if 0
10448static void H265_DECODE_INIT(void)
10449{
10450 /* enable hevc clocks */
10451 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10452 /* *************************************************************** */
10453 /* Power ON HEVC */
10454 /* *************************************************************** */
10455 /* Powerup HEVC */
10456 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10457 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10458 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10459 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10460 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10461 /* remove isolations */
10462 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10463 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10464
10465}
10466#endif
10467
10468static void config_decode_mode(struct hevc_state_s *hevc)
10469{
10470#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10471 struct vdec_s *vdec = hw_to_vdec(hevc);
10472#endif
10473 unsigned decode_mode;
10474 if (!hevc->m_ins_flag)
10475 decode_mode = DECODE_MODE_SINGLE;
10476 else if (vdec_frame_based(hw_to_vdec(hevc)))
10477 decode_mode =
10478 DECODE_MODE_MULTI_FRAMEBASE;
10479#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10480 else if (vdec->slave) {
10481 if (force_bypass_dvenl & 0x80000000)
10482 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10483 else
10484 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10485 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10486 hevc->bypass_dvenl = 0;
10487 hevc_print(hevc, 0,
10488 "NOT support bypass_dvenl when meta_with_el\n");
10489 }
10490 if (hevc->bypass_dvenl)
10491 decode_mode =
10492 (hevc->start_parser_type << 8)
10493 | DECODE_MODE_MULTI_STREAMBASE;
10494 else
10495 decode_mode =
10496 (hevc->start_parser_type << 8)
10497 | DECODE_MODE_MULTI_DVBAL;
10498 } else if (vdec->master)
10499 decode_mode =
10500 (hevc->start_parser_type << 8)
10501 | DECODE_MODE_MULTI_DVENL;
10502#endif
10503 else
10504 decode_mode =
10505 DECODE_MODE_MULTI_STREAMBASE;
10506
10507 if (hevc->m_ins_flag)
10508 decode_mode |=
10509 (hevc->start_decoding_flag << 16);
10510 /* set MBX0 interrupt flag */
10511 decode_mode |= (0x80 << 24);
10512 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10513 WRITE_VREG(HEVC_DECODE_MODE2,
10514 hevc->rps_set_id);
10515}
10516
10517static void vh265_prot_init(struct hevc_state_s *hevc)
10518{
10519#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10520 struct vdec_s *vdec = hw_to_vdec(hevc);
10521#endif
10522 /* H265_DECODE_INIT(); */
10523
10524 hevc_config_work_space_hw(hevc);
10525
10526 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10527
10528 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10529
10530 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10531
10532 /* clear mailbox interrupt */
10533 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10534
10535 /* enable mailbox interrupt */
10536 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10537
10538 /* disable PSCALE for hardware sharing */
10539 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10540
10541 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10542
10543 if ((get_dbg_flag(hevc) &
10544 (H265_DEBUG_MAN_SKIP_NAL |
10545 H265_DEBUG_MAN_SEARCH_NAL))
10546 /*||hevc->m_ins_flag*/
10547 ) {
10548 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10549 } else {
10550 /* check vps/sps/pps/i-slice in ucode */
10551 unsigned ctl_val = 0x8;
10552 if (hevc->PB_skip_mode == 0)
10553 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10554 else if (hevc->PB_skip_mode == 3)
10555 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10556 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10557 }
10558 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10559#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10560 || vdec->master
10561 || vdec->slave
10562#endif
10563 )
10564 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10565
10566 WRITE_VREG(NAL_SEARCH_CTL,
10567 READ_VREG(NAL_SEARCH_CTL)
10568 | ((parser_sei_enable & 0x7) << 17));
10569#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10570 WRITE_VREG(NAL_SEARCH_CTL,
10571 READ_VREG(NAL_SEARCH_CTL) |
10572 ((parser_dolby_vision_enable & 0x1) << 20));
10573#endif
10574 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10575
10576 config_decode_mode(hevc);
10577 config_aux_buf(hevc);
10578#ifdef SWAP_HEVC_UCODE
10579 if (!tee_enabled() && hevc->is_swap &&
10580 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10581 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10582 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10583 }
10584#endif
10585#ifdef DETREFILL_ENABLE
10586 if (hevc->is_swap &&
10587 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10588 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10589 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10590 }
10591#endif
10592}
10593
10594static int vh265_local_init(struct hevc_state_s *hevc)
10595{
10596 int i;
10597 int ret = -1;
10598
10599#ifdef DEBUG_PTS
10600 hevc->pts_missed = 0;
10601 hevc->pts_hit = 0;
10602#endif
10603
10604 hevc->saved_resolution = 0;
10605 hevc->get_frame_dur = false;
10606 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10607 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10608 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10609 pr_info("over size : %u x %u.\n",
10610 hevc->frame_width, hevc->frame_height);
10611 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10612 return ret;
10613 }
10614
10615 if (hevc->max_pic_w && hevc->max_pic_h) {
10616 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10617 ((hevc->max_pic_w * hevc->max_pic_h) >
10618 1920 * 1088) ? true : false;
10619 } else {
10620 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10621 ((hevc->frame_width * hevc->frame_height) >
10622 1920 * 1088) ? true : false;
10623 }
10624
10625 hevc->frame_dur =
10626 (hevc->vh265_amstream_dec_info.rate ==
10627 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10628#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10629 gvs->frame_dur = hevc->frame_dur;
10630#endif
10631 if (hevc->frame_width && hevc->frame_height)
10632 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10633
10634 if (i_only_flag)
10635 hevc->i_only = i_only_flag & 0xff;
10636 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10637 & 0x08)
10638 hevc->i_only = 0x7;
10639 else
10640 hevc->i_only = 0x0;
10641 hevc->error_watchdog_count = 0;
10642 hevc->sei_present_flag = 0;
10643 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10644 & 0x40) >> 6;
10645 hevc_print(hevc, 0,
10646 "h265:pts_unstable=%d\n", pts_unstable);
10647/*
10648 *TODO:FOR VERSION
10649 */
10650 hevc_print(hevc, 0,
10651 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10652 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10653
10654 if (hevc->frame_dur == 0)
10655 hevc->frame_dur = 96000 / 24;
10656
10657 INIT_KFIFO(hevc->display_q);
10658 INIT_KFIFO(hevc->newframe_q);
10659
10660
10661 for (i = 0; i < VF_POOL_SIZE; i++) {
10662 const struct vframe_s *vf = &hevc->vfpool[i];
10663
10664 hevc->vfpool[i].index = -1;
10665 kfifo_put(&hevc->newframe_q, vf);
10666 }
10667
10668
10669 ret = hevc_local_init(hevc);
10670
10671 return ret;
10672}
10673#ifdef MULTI_INSTANCE_SUPPORT
10674static s32 vh265_init(struct vdec_s *vdec)
10675{
10676 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10677#else
10678static s32 vh265_init(struct hevc_state_s *hevc)
10679{
10680
10681#endif
10682 int ret, size = -1;
10683 int fw_size = 0x1000 * 16;
10684 struct firmware_s *fw = NULL;
10685
10686 init_timer(&hevc->timer);
10687
10688 hevc->stat |= STAT_TIMER_INIT;
10689 if (vh265_local_init(hevc) < 0)
10690 return -EBUSY;
10691
10692 mutex_init(&hevc->chunks_mutex);
10693 INIT_WORK(&hevc->notify_work, vh265_notify_work);
10694 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
10695
10696 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
10697 if (IS_ERR_OR_NULL(fw))
10698 return -ENOMEM;
10699
10700 if (hevc->mmu_enable)
10701 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10702 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
10703 else {
10704 if (!hevc->is_4k) {
10705 /* if an older version of the fw was loaded, */
10706 /* needs try to load noswap fw because the */
10707 /* old fw package dose not contain the swap fw.*/
10708 size = get_firmware_data(
10709 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
10710 if (size < 0)
10711 size = get_firmware_data(
10712 VIDEO_DEC_HEVC_MMU, fw->data);
10713 else if (size)
10714 hevc->is_swap = true;
10715 } else
10716 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
10717 fw->data);
10718 }
10719 else
10720 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
10721
10722 if (size < 0) {
10723 pr_err("get firmware fail.\n");
10724 vfree(fw);
10725 return -1;
10726 }
10727
10728 fw->len = size;
10729
10730#ifdef SWAP_HEVC_UCODE
10731 if (!tee_enabled() && hevc->is_swap &&
10732 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10733 if (hevc->mmu_enable) {
10734 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
10735 hevc->mc_cpu_addr =
10736 dma_alloc_coherent(amports_get_dma_device(),
10737 hevc->swap_size,
10738 &hevc->mc_dma_handle, GFP_KERNEL);
10739 if (!hevc->mc_cpu_addr) {
10740 amhevc_disable();
10741 pr_info("vh265 mmu swap ucode loaded fail.\n");
10742 return -ENOMEM;
10743 }
10744
10745 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
10746 hevc->swap_size);
10747
10748 hevc_print(hevc, 0,
10749 "vh265 mmu ucode swap loaded %x\n",
10750 hevc->mc_dma_handle);
10751 }
10752 }
10753#endif
10754
10755#ifdef MULTI_INSTANCE_SUPPORT
10756 if (hevc->m_ins_flag) {
10757 hevc->timer.data = (ulong) hevc;
10758 hevc->timer.function = vh265_check_timer_func;
10759 hevc->timer.expires = jiffies + PUT_INTERVAL;
10760
10761#ifdef USE_UNINIT_SEMA
10762 sema_init(&hevc->h265_uninit_done_sema, 0);
10763#endif
10764
10765 /*add_timer(&hevc->timer);
10766 *hevc->stat |= STAT_TIMER_ARM;
10767 */
10768
10769 INIT_WORK(&hevc->work, vh265_work);
10770 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
10771
10772 hevc->fw = fw;
10773
10774 return 0;
10775 }
10776#endif
10777 amhevc_enable();
10778
10779 if (hevc->mmu_enable)
10780 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10781 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
10782 else {
10783 if (!hevc->is_4k) {
10784 /* if an older version of the fw was loaded, */
10785 /* needs try to load noswap fw because the */
10786 /* old fw package dose not contain the swap fw. */
10787 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10788 "hevc_mmu_swap", fw->data);
10789 if (ret < 0)
10790 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10791 "h265_mmu", fw->data);
10792 else
10793 hevc->is_swap = true;
10794 } else
10795 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10796 "h265_mmu", fw->data);
10797 }
10798 else
10799 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
10800
10801 if (ret < 0) {
10802 amhevc_disable();
10803 vfree(fw);
10804 pr_err("H265: the %s fw loading failed, err: %x\n",
10805 tee_enabled() ? "TEE" : "local", ret);
10806 return -EBUSY;
10807 }
10808
10809 vfree(fw);
10810
10811 hevc->stat |= STAT_MC_LOAD;
10812
10813#ifdef DETREFILL_ENABLE
10814 if (hevc->is_swap &&
10815 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10816 init_detrefill_buf(hevc);
10817#endif
10818 /* enable AMRISC side protocol */
10819 vh265_prot_init(hevc);
10820
10821 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
10822 vh265_isr_thread_fn,
10823 IRQF_ONESHOT,/*run thread on this irq disabled*/
10824 "vh265-irq", (void *)hevc)) {
10825 hevc_print(hevc, 0, "vh265 irq register error.\n");
10826 amhevc_disable();
10827 return -ENOENT;
10828 }
10829
10830 hevc->stat |= STAT_ISR_REG;
10831 hevc->provider_name = PROVIDER_NAME;
10832
10833#ifdef MULTI_INSTANCE_SUPPORT
10834 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
10835 &vh265_vf_provider, vdec);
10836 vf_reg_provider(&vh265_vf_prov);
10837 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
10838 NULL);
10839 if (hevc->frame_dur != 0) {
10840 if (!is_reset) {
10841 vf_notify_receiver(hevc->provider_name,
10842 VFRAME_EVENT_PROVIDER_FR_HINT,
10843 (void *)
10844 ((unsigned long)hevc->frame_dur));
10845 fr_hint_status = VDEC_HINTED;
10846 }
10847 } else
10848 fr_hint_status = VDEC_NEED_HINT;
10849#else
10850 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
10851 hevc);
10852 vf_reg_provider(&vh265_vf_prov);
10853 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
10854 if (hevc->frame_dur != 0) {
10855 vf_notify_receiver(PROVIDER_NAME,
10856 VFRAME_EVENT_PROVIDER_FR_HINT,
10857 (void *)
10858 ((unsigned long)hevc->frame_dur));
10859 fr_hint_status = VDEC_HINTED;
10860 } else
10861 fr_hint_status = VDEC_NEED_HINT;
10862#endif
10863 hevc->stat |= STAT_VF_HOOK;
10864
10865 hevc->timer.data = (ulong) hevc;
10866 hevc->timer.function = vh265_check_timer_func;
10867 hevc->timer.expires = jiffies + PUT_INTERVAL;
10868
10869 add_timer(&hevc->timer);
10870
10871 hevc->stat |= STAT_TIMER_ARM;
10872
10873 if (use_cma) {
10874#ifdef USE_UNINIT_SEMA
10875 sema_init(&hevc->h265_uninit_done_sema, 0);
10876#endif
10877 if (h265_task == NULL) {
10878 sema_init(&h265_sema, 1);
10879 h265_task =
10880 kthread_run(h265_task_handle, hevc,
10881 "kthread_h265");
10882 }
10883 }
10884 /* hevc->stat |= STAT_KTHREAD; */
10885#if 0
10886 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
10887 hevc_print(hevc, 0, "%s force clk\n", __func__);
10888 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
10889 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
10890 ((1 << 2) | (1 << 1)));
10891 WRITE_VREG(HEVC_DBLK_CFG0,
10892 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
10893 (1 << 1) | 0x3fff0000));/* 2,29:16 */
10894 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
10895 (1 << 2)); /* 2 */
10896 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
10897 (1 << 24)); /* 24 */
10898 WRITE_VREG(HEVC_STREAM_CONTROL,
10899 READ_VREG(HEVC_STREAM_CONTROL) |
10900 (1 << 15)); /* 15 */
10901 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
10902 (1 << 13)); /* 13 */
10903 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
10904 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
10905 (1 << 15)); /* 15 */
10906 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
10907 READ_VREG(HEVC_PARSER_INT_CONTROL) |
10908 (1 << 15)); /* 15 */
10909 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
10910 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
10911 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
10912 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
10913 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
10914 (1 << 3)); /* 3 */
10915 }
10916#endif
10917#ifdef SWAP_HEVC_UCODE
10918 if (!tee_enabled() && hevc->is_swap &&
10919 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10920 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10921 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10922 }
10923#endif
10924
10925#ifndef MULTI_INSTANCE_SUPPORT
10926 set_vdec_func(&vh265_dec_status);
10927#endif
10928 amhevc_start();
10929 hevc->stat |= STAT_VDEC_RUN;
10930 hevc->init_flag = 1;
10931 error_handle_threshold = 30;
10932 /* pr_info("%d, vh265_init, RP=0x%x\n",
10933 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
10934 */
10935
10936 return 0;
10937}
10938
10939static int vh265_stop(struct hevc_state_s *hevc)
10940{
10941 if (get_dbg_flag(hevc) &
10942 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
10943 int wait_timeout_count = 0;
10944
10945 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
10946 HEVC_CODED_SLICE_SEGMENT_DAT &&
10947 wait_timeout_count < 10){
10948 wait_timeout_count++;
10949 msleep(20);
10950 }
10951 }
10952 if (hevc->stat & STAT_VDEC_RUN) {
10953 amhevc_stop();
10954 hevc->stat &= ~STAT_VDEC_RUN;
10955 }
10956
10957 if (hevc->stat & STAT_ISR_REG) {
10958#ifdef MULTI_INSTANCE_SUPPORT
10959 if (!hevc->m_ins_flag)
10960#endif
10961 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
10962 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
10963 hevc->stat &= ~STAT_ISR_REG;
10964 }
10965
10966 hevc->stat &= ~STAT_TIMER_INIT;
10967 if (hevc->stat & STAT_TIMER_ARM) {
10968 del_timer_sync(&hevc->timer);
10969 hevc->stat &= ~STAT_TIMER_ARM;
10970 }
10971
10972 if (hevc->stat & STAT_VF_HOOK) {
10973 if (fr_hint_status == VDEC_HINTED) {
10974 vf_notify_receiver(hevc->provider_name,
10975 VFRAME_EVENT_PROVIDER_FR_END_HINT,
10976 NULL);
10977 }
10978 fr_hint_status = VDEC_NO_NEED_HINT;
10979 vf_unreg_provider(&vh265_vf_prov);
10980 hevc->stat &= ~STAT_VF_HOOK;
10981 }
10982
10983 hevc_local_uninit(hevc);
10984
10985 if (use_cma) {
10986 hevc->uninit_list = 1;
10987 up(&h265_sema);
10988#ifdef USE_UNINIT_SEMA
10989 down(&hevc->h265_uninit_done_sema);
10990 if (!IS_ERR(h265_task)) {
10991 kthread_stop(h265_task);
10992 h265_task = NULL;
10993 }
10994#else
10995 while (hevc->uninit_list) /* wait uninit complete */
10996 msleep(20);
10997#endif
10998
10999 }
11000 hevc->init_flag = 0;
11001 hevc->first_sc_checked = 0;
11002 cancel_work_sync(&hevc->notify_work);
11003 cancel_work_sync(&hevc->set_clk_work);
11004 uninit_mmu_buffers(hevc);
11005 amhevc_disable();
11006
11007 kfree(gvs);
11008 gvs = NULL;
11009
11010 return 0;
11011}
11012
11013#ifdef MULTI_INSTANCE_SUPPORT
11014static void reset_process_time(struct hevc_state_s *hevc)
11015{
11016 if (hevc->start_process_time) {
11017 unsigned int process_time =
11018 1000 * (jiffies - hevc->start_process_time) / HZ;
11019 hevc->start_process_time = 0;
11020 if (process_time > max_process_time[hevc->index])
11021 max_process_time[hevc->index] = process_time;
11022 }
11023}
11024
11025static void start_process_time(struct hevc_state_s *hevc)
11026{
11027 hevc->start_process_time = jiffies;
11028 hevc->decode_timeout_count = 2;
11029 hevc->last_lcu_idx = 0;
11030}
11031
11032static void restart_process_time(struct hevc_state_s *hevc)
11033{
11034 hevc->start_process_time = jiffies;
11035 hevc->decode_timeout_count = 2;
11036}
11037
11038static void timeout_process(struct hevc_state_s *hevc)
11039{
11040 /*
11041 * In this very timeout point,the vh265_work arrives,
11042 * let it to handle the scenario.
11043 */
11044 if (work_pending(&hevc->work))
11045 return;
11046
11047 hevc->timeout_num++;
11048 amhevc_stop();
11049 read_decode_info(hevc);
11050
11051 hevc_print(hevc,
11052 0, "%s decoder timeout\n", __func__);
11053 check_pic_decoded_error(hevc,
11054 hevc->pic_decoded_lcu_idx);
11055 hevc->decoded_poc = hevc->curr_POC;
11056 hevc->decoding_pic = NULL;
11057 hevc->dec_result = DEC_RESULT_DONE;
11058 reset_process_time(hevc);
11059
11060 if (work_pending(&hevc->work))
11061 return;
11062 vdec_schedule_work(&hevc->timeout_work);
11063}
11064
11065#ifdef CONSTRAIN_MAX_BUF_NUM
11066static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11067{
11068 struct PIC_s *pic;
11069 int i;
11070 int count = 0;
11071 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11072 pic = hevc->m_PIC[i];
11073 if (pic == NULL || pic->index == -1)
11074 continue;
11075 if (pic->output_mark == 0 && pic->referenced == 0
11076 && pic->output_ready == 1)
11077 count++;
11078 }
11079
11080 return count;
11081}
11082
11083static int get_used_buf_count(struct hevc_state_s *hevc)
11084{
11085 struct PIC_s *pic;
11086 int i;
11087 int count = 0;
11088 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11089 pic = hevc->m_PIC[i];
11090 if (pic == NULL || pic->index == -1)
11091 continue;
11092 if (pic->output_mark != 0 || pic->referenced != 0
11093 || pic->output_ready != 0)
11094 count++;
11095 }
11096
11097 return count;
11098}
11099#endif
11100
11101
11102static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11103{
11104 struct PIC_s *new_pic = NULL;
11105 struct PIC_s *pic;
11106 /* recycle un-used pic */
11107 int i;
11108 int ref_pic = 0;
11109 struct vdec_s *vdec = hw_to_vdec(hevc);
11110 /*return 1 if pic_list is not initialized yet*/
11111 if (hevc->pic_list_init_flag != 3)
11112 return 1;
11113
11114 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11115 pic = hevc->m_PIC[i];
11116 if (pic == NULL || pic->index == -1)
11117 continue;
11118 if (pic->referenced == 1)
11119 ref_pic++;
11120 if (pic->output_mark == 0 && pic->referenced == 0
11121 && pic->output_ready == 0
11122 ) {
11123 if (new_pic) {
11124 if (pic->POC < new_pic->POC)
11125 new_pic = pic;
11126 } else
11127 new_pic = pic;
11128 }
11129 }
11130
11131 if ((new_pic == NULL) &&
11132 (ref_pic >=
11133 get_work_pic_num(hevc) -
11134 hevc->sps_num_reorder_pics_0 - 1)) {
11135 enum receviver_start_e state = RECEIVER_INACTIVE;
11136 if (vf_get_receiver(vdec->vf_provider_name)) {
11137 state =
11138 vf_notify_receiver(vdec->vf_provider_name,
11139 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11140 NULL);
11141 if ((state == RECEIVER_STATE_NULL)
11142 || (state == RECEIVER_STATE_NONE))
11143 state = RECEIVER_INACTIVE;
11144 }
11145 if (state == RECEIVER_INACTIVE) {
11146 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11147 pic = hevc->m_PIC[i];
11148 if (pic == NULL || pic->index == -1)
11149 continue;
11150
11151 if ((pic->referenced == 1) &&
11152 (pic->error_mark == 1)) {
11153 if (new_pic) {
11154 if (pic->POC < new_pic->POC)
11155 new_pic = pic;
11156 } else
11157 new_pic = pic;
11158 }
11159 }
11160 if (new_pic != NULL) {
11161 new_pic->referenced = 0;
11162 put_mv_buf(hevc, pic);
11163 if (pic_list_debug & 0x2)
11164 pr_err("err ref poc :%d\n", new_pic->POC);
11165 }
11166 }
11167 }
11168
11169 return (new_pic != NULL) ? 1 : 0;
11170}
11171
11172static int vmh265_stop(struct hevc_state_s *hevc)
11173{
11174 if (hevc->stat & STAT_TIMER_ARM) {
11175 del_timer_sync(&hevc->timer);
11176 hevc->stat &= ~STAT_TIMER_ARM;
11177 }
11178 if (hevc->stat & STAT_VDEC_RUN) {
11179 amhevc_stop();
11180 hevc->stat &= ~STAT_VDEC_RUN;
11181 }
11182 if (hevc->stat & STAT_ISR_REG) {
11183 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11184 hevc->stat &= ~STAT_ISR_REG;
11185 }
11186
11187 if (hevc->stat & STAT_VF_HOOK) {
11188 if (fr_hint_status == VDEC_HINTED)
11189 vf_notify_receiver(hevc->provider_name,
11190 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11191 NULL);
11192 fr_hint_status = VDEC_NO_NEED_HINT;
11193 vf_unreg_provider(&vh265_vf_prov);
11194 hevc->stat &= ~STAT_VF_HOOK;
11195 }
11196
11197 hevc_local_uninit(hevc);
11198
11199 if (use_cma) {
11200 hevc->uninit_list = 1;
11201 reset_process_time(hevc);
11202 vdec_schedule_work(&hevc->work);
11203#ifdef USE_UNINIT_SEMA
11204 if (hevc->init_flag) {
11205 down(&hevc->h265_uninit_done_sema);
11206 }
11207#else
11208 while (hevc->uninit_list) /* wait uninit complete */
11209 msleep(20);
11210#endif
11211 }
11212 hevc->init_flag = 0;
11213 hevc->first_sc_checked = 0;
11214 cancel_work_sync(&hevc->notify_work);
11215 cancel_work_sync(&hevc->set_clk_work);
11216 cancel_work_sync(&hevc->work);
11217 cancel_work_sync(&hevc->timeout_work);
11218 uninit_mmu_buffers(hevc);
11219
11220 vfree(hevc->fw);
11221 hevc->fw = NULL;
11222
11223 dump_log(hevc);
11224 return 0;
11225}
11226
11227static unsigned char get_data_check_sum
11228 (struct hevc_state_s *hevc, int size)
11229{
11230 int jj;
11231 int sum = 0;
11232 u8 *data = NULL;
11233
11234 if (!hevc->chunk->block->is_mapped)
11235 data = codec_mm_vmap(hevc->chunk->block->start +
11236 hevc->chunk->offset, size);
11237 else
11238 data = ((u8 *)hevc->chunk->block->start_virt) +
11239 hevc->chunk->offset;
11240
11241 for (jj = 0; jj < size; jj++)
11242 sum += data[jj];
11243
11244 if (!hevc->chunk->block->is_mapped)
11245 codec_mm_unmap_phyaddr(data);
11246 return sum;
11247}
11248
11249static void vh265_notify_work(struct work_struct *work)
11250{
11251 struct hevc_state_s *hevc =
11252 container_of(work,
11253 struct hevc_state_s,
11254 notify_work);
11255 struct vdec_s *vdec = hw_to_vdec(hevc);
11256#ifdef MULTI_INSTANCE_SUPPORT
11257 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11258 vf_notify_receiver(hevc->provider_name,
11259 VFRAME_EVENT_PROVIDER_FR_HINT,
11260 (void *)
11261 ((unsigned long)hevc->frame_dur));
11262 vdec->fr_hint_state = VDEC_HINTED;
11263 } else if (fr_hint_status == VDEC_NEED_HINT) {
11264 vf_notify_receiver(hevc->provider_name,
11265 VFRAME_EVENT_PROVIDER_FR_HINT,
11266 (void *)
11267 ((unsigned long)hevc->frame_dur));
11268 fr_hint_status = VDEC_HINTED;
11269 }
11270#else
11271 if (fr_hint_status == VDEC_NEED_HINT)
11272 vf_notify_receiver(PROVIDER_NAME,
11273 VFRAME_EVENT_PROVIDER_FR_HINT,
11274 (void *)
11275 ((unsigned long)hevc->frame_dur));
11276 fr_hint_status = VDEC_HINTED;
11277 }
11278#endif
11279
11280 return;
11281}
11282
11283static void vh265_work_implement(struct hevc_state_s *hevc,
11284 struct vdec_s *vdec,int from)
11285{
11286 if (hevc->uninit_list) {
11287 /*USE_BUF_BLOCK*/
11288 uninit_pic_list(hevc);
11289 hevc_print(hevc, 0, "uninit list\n");
11290 hevc->uninit_list = 0;
11291#ifdef USE_UNINIT_SEMA
11292 up(&hevc->h265_uninit_done_sema);
11293#endif
11294 return;
11295 }
11296
11297 /* finished decoding one frame or error,
11298 * notify vdec core to switch context
11299 */
11300 if (hevc->pic_list_init_flag == 1
11301 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11302 hevc->pic_list_init_flag = 2;
11303 init_pic_list(hevc);
11304 init_pic_list_hw(hevc);
11305 init_buf_spec(hevc);
11306 hevc_print(hevc, 0,
11307 "set pic_list_init_flag to 2\n");
11308
11309 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11310 return;
11311 }
11312
11313 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11314 "%s dec_result %d %x %x %x\n",
11315 __func__,
11316 hevc->dec_result,
11317 READ_VREG(HEVC_STREAM_LEVEL),
11318 READ_VREG(HEVC_STREAM_WR_PTR),
11319 READ_VREG(HEVC_STREAM_RD_PTR));
11320
11321 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11322 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11323 && (hw_to_vdec(hevc)->next_status !=
11324 VDEC_STATUS_DISCONNECTED)) {
11325 if (!vdec_has_more_input(vdec)) {
11326 hevc->dec_result = DEC_RESULT_EOS;
11327 vdec_schedule_work(&hevc->work);
11328 return;
11329 }
11330 if (!input_frame_based(vdec)) {
11331 int r = vdec_sync_input(vdec);
11332 if (r >= 0x200) {
11333 WRITE_VREG(HEVC_DECODE_SIZE,
11334 READ_VREG(HEVC_DECODE_SIZE) + r);
11335
11336 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11337 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11338 __func__,
11339 READ_VREG(HEVC_STREAM_LEVEL),
11340 READ_VREG(HEVC_STREAM_WR_PTR),
11341 READ_VREG(HEVC_STREAM_RD_PTR),
11342 READ_VREG(HEVC_MPC_E), r);
11343
11344 start_process_time(hevc);
11345 if (READ_VREG(HEVC_DEC_STATUS_REG)
11346 == HEVC_DECODE_BUFEMPTY2)
11347 WRITE_VREG(HEVC_DEC_STATUS_REG,
11348 HEVC_ACTION_DONE);
11349 else
11350 WRITE_VREG(HEVC_DEC_STATUS_REG,
11351 HEVC_ACTION_DEC_CONT);
11352 } else {
11353 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11354 vdec_schedule_work(&hevc->work);
11355 }
11356 return;
11357 }
11358
11359 /*below for frame_base*/
11360 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11361 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11362 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11363 __func__,
11364 READ_VREG(HEVC_STREAM_LEVEL),
11365 READ_VREG(HEVC_STREAM_WR_PTR),
11366 READ_VREG(HEVC_STREAM_RD_PTR),
11367 READ_VREG(HEVC_MPC_E));
11368 mutex_lock(&hevc->chunks_mutex);
11369 vdec_vframe_dirty(vdec, hevc->chunk);
11370 hevc->chunk = NULL;
11371 mutex_unlock(&hevc->chunks_mutex);
11372 vdec_clean_input(vdec);
11373 }
11374
11375 /*if (is_new_pic_available(hevc)) {*/
11376 if (run_ready(vdec, VDEC_HEVC)) {
11377 int r;
11378 int decode_size;
11379 r = vdec_prepare_input(vdec, &hevc->chunk);
11380 if (r < 0) {
11381 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11382
11383 hevc_print(hevc,
11384 PRINT_FLAG_VDEC_DETAIL,
11385 "amvdec_vh265: Insufficient data\n");
11386
11387 vdec_schedule_work(&hevc->work);
11388 return;
11389 }
11390 hevc->dec_result = DEC_RESULT_NONE;
11391 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11392 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11393 __func__, r,
11394 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11395 get_data_check_sum(hevc, r) : 0,
11396 READ_VREG(HEVC_MPC_E));
11397
11398 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11399 int jj;
11400 u8 *data = NULL;
11401
11402 if (!hevc->chunk->block->is_mapped)
11403 data = codec_mm_vmap(
11404 hevc->chunk->block->start +
11405 hevc->chunk->offset, r);
11406 else
11407 data = ((u8 *)
11408 hevc->chunk->block->start_virt)
11409 + hevc->chunk->offset;
11410
11411 for (jj = 0; jj < r; jj++) {
11412 if ((jj & 0xf) == 0)
11413 hevc_print(hevc,
11414 PRINT_FRAMEBASE_DATA,
11415 "%06x:", jj);
11416 hevc_print_cont(hevc,
11417 PRINT_FRAMEBASE_DATA,
11418 "%02x ", data[jj]);
11419 if (((jj + 1) & 0xf) == 0)
11420 hevc_print_cont(hevc,
11421 PRINT_FRAMEBASE_DATA,
11422 "\n");
11423 }
11424
11425 if (!hevc->chunk->block->is_mapped)
11426 codec_mm_unmap_phyaddr(data);
11427 }
11428
11429 decode_size = hevc->chunk->size +
11430 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11431 WRITE_VREG(HEVC_DECODE_SIZE,
11432 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11433
11434 vdec_enable_input(vdec);
11435
11436 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11437 "%s: mpc %x\n",
11438 __func__, READ_VREG(HEVC_MPC_E));
11439
11440 start_process_time(hevc);
11441 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11442 } else{
11443 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11444
11445 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11446 * "amvdec_vh265: Insufficient data\n");
11447 */
11448
11449 vdec_schedule_work(&hevc->work);
11450 }
11451 return;
11452 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11453 /* if (!hevc->ctx_valid)
11454 hevc->ctx_valid = 1; */
11455 decode_frame_count[hevc->index]++;
11456#ifdef DETREFILL_ENABLE
11457 if (hevc->is_swap &&
11458 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11459 if (hevc->delrefill_check == 2) {
11460 delrefill(hevc);
11461 amhevc_stop();
11462 }
11463 }
11464#endif
11465 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11466 hevc->used_4k_num =
11467 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11468 if (hevc->used_4k_num >= 0 &&
11469 hevc->cur_pic &&
11470 hevc->cur_pic->scatter_alloc
11471 == 1) {
11472 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11473 "%s pic index %d scatter_alloc %d page_start %d\n",
11474 "decoder_mmu_box_free_idx_tail",
11475 hevc->cur_pic->index,
11476 hevc->cur_pic->scatter_alloc,
11477 hevc->used_4k_num);
11478 if (hevc->m_ins_flag)
11479 hevc_mmu_dma_check(hw_to_vdec(hevc));
11480 decoder_mmu_box_free_idx_tail(
11481 hevc->mmu_box,
11482 hevc->cur_pic->index,
11483 hevc->used_4k_num);
11484 hevc->cur_pic->scatter_alloc = 2;
11485 }
11486 }
11487 hevc->pic_decoded_lcu_idx =
11488 READ_VREG(HEVC_PARSER_LCU_START)
11489 & 0xffffff;
11490
11491 if (vdec->master == NULL && vdec->slave == NULL &&
11492 hevc->empty_flag == 0) {
11493 hevc->over_decode =
11494 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11495 if (hevc->over_decode)
11496 hevc_print(hevc, 0,
11497 "!!!Over decode\n");
11498 }
11499
11500 if (is_log_enable(hevc))
11501 add_log(hevc,
11502 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11503 __func__,
11504 hevc->dec_result,
11505 hevc->pic_decoded_lcu_idx,
11506 hevc->used_4k_num,
11507 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11508 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11509 hevc->start_shift_bytes
11510 );
11511
11512 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11513 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11514 __func__,
11515 hevc->dec_result,
11516 READ_VREG(HEVC_STREAM_LEVEL),
11517 READ_VREG(HEVC_STREAM_WR_PTR),
11518 READ_VREG(HEVC_STREAM_RD_PTR),
11519 hevc->pic_decoded_lcu_idx,
11520 hevc->used_4k_num,
11521 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11522 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11523 hevc->start_shift_bytes
11524 );
11525
11526 hevc->used_4k_num = -1;
11527
11528 check_pic_decoded_error(hevc,
11529 hevc->pic_decoded_lcu_idx);
11530#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11531#if 1
11532 if (vdec->slave) {
11533 if (dv_debug & 0x1)
11534 vdec_set_flag(vdec->slave,
11535 VDEC_FLAG_SELF_INPUT_CONTEXT);
11536 else
11537 vdec_set_flag(vdec->slave,
11538 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11539 }
11540#else
11541 if (vdec->slave) {
11542 if (no_interleaved_el_slice)
11543 vdec_set_flag(vdec->slave,
11544 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11545 /* this will move real HW pointer for input */
11546 else
11547 vdec_set_flag(vdec->slave, 0);
11548 /* this will not move real HW pointer
11549 *and SL layer decoding
11550 *will start from same stream position
11551 *as current BL decoder
11552 */
11553 }
11554#endif
11555#endif
11556#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11557 hevc->shift_byte_count_lo
11558 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11559 if (vdec->slave) {
11560 /*cur is base, found enhance*/
11561 struct hevc_state_s *hevc_el =
11562 (struct hevc_state_s *)
11563 vdec->slave->private;
11564 if (hevc_el)
11565 hevc_el->shift_byte_count_lo =
11566 hevc->shift_byte_count_lo;
11567 } else if (vdec->master) {
11568 /*cur is enhance, found base*/
11569 struct hevc_state_s *hevc_ba =
11570 (struct hevc_state_s *)
11571 vdec->master->private;
11572 if (hevc_ba)
11573 hevc_ba->shift_byte_count_lo =
11574 hevc->shift_byte_count_lo;
11575 }
11576#endif
11577 mutex_lock(&hevc->chunks_mutex);
11578 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11579 hevc->chunk = NULL;
11580 mutex_unlock(&hevc->chunks_mutex);
11581 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11582 /*
11583 stream base: stream buf empty or timeout
11584 frame base: vdec_prepare_input fail
11585 */
11586 if (!vdec_has_more_input(vdec)) {
11587 hevc->dec_result = DEC_RESULT_EOS;
11588 vdec_schedule_work(&hevc->work);
11589 return;
11590 }
11591#ifdef AGAIN_HAS_THRESHOLD
11592 hevc->next_again_flag = 1;
11593#endif
11594 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11595 struct PIC_s *pic;
11596 hevc->eos = 1;
11597#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11598 if ((vdec->master || vdec->slave) &&
11599 aux_data_is_avaible(hevc))
11600 dolby_get_meta(hevc);
11601#endif
11602 check_pic_decoded_error(hevc,
11603 hevc->pic_decoded_lcu_idx);
11604 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11605 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11606 "%s: end of stream, last dec poc %d => 0x%pf\n",
11607 __func__, hevc->curr_POC, pic);
11608 flush_output(hevc, pic);
11609
11610 if (hevc->is_used_v4l)
11611 notify_v4l_eos(hw_to_vdec(hevc));
11612#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11613 hevc->shift_byte_count_lo
11614 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11615 if (vdec->slave) {
11616 /*cur is base, found enhance*/
11617 struct hevc_state_s *hevc_el =
11618 (struct hevc_state_s *)
11619 vdec->slave->private;
11620 if (hevc_el)
11621 hevc_el->shift_byte_count_lo =
11622 hevc->shift_byte_count_lo;
11623 } else if (vdec->master) {
11624 /*cur is enhance, found base*/
11625 struct hevc_state_s *hevc_ba =
11626 (struct hevc_state_s *)
11627 vdec->master->private;
11628 if (hevc_ba)
11629 hevc_ba->shift_byte_count_lo =
11630 hevc->shift_byte_count_lo;
11631 }
11632#endif
11633 mutex_lock(&hevc->chunks_mutex);
11634 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11635 hevc->chunk = NULL;
11636 mutex_unlock(&hevc->chunks_mutex);
11637 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11638 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11639 "%s: force exit\n",
11640 __func__);
11641 if (hevc->stat & STAT_VDEC_RUN) {
11642 amhevc_stop();
11643 hevc->stat &= ~STAT_VDEC_RUN;
11644 }
11645 if (hevc->stat & STAT_ISR_REG) {
11646 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11647 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11648 hevc->stat &= ~STAT_ISR_REG;
11649 }
11650 hevc_print(hevc, 0, "%s: force exit end\n",
11651 __func__);
11652 }
11653
11654 if (hevc->stat & STAT_VDEC_RUN) {
11655 amhevc_stop();
11656 hevc->stat &= ~STAT_VDEC_RUN;
11657 }
11658
11659 if (hevc->stat & STAT_TIMER_ARM) {
11660 del_timer_sync(&hevc->timer);
11661 hevc->stat &= ~STAT_TIMER_ARM;
11662 }
11663
11664 wait_hevc_search_done(hevc);
11665#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11666 if (hevc->switch_dvlayer_flag) {
11667 if (vdec->slave)
11668 vdec_set_next_sched(vdec, vdec->slave);
11669 else if (vdec->master)
11670 vdec_set_next_sched(vdec, vdec->master);
11671 } else if (vdec->slave || vdec->master)
11672 vdec_set_next_sched(vdec, vdec);
11673#endif
11674
11675 if (from == 1) {
11676 /* This is a timeout work */
11677 if (work_pending(&hevc->work)) {
11678 /*
11679 * The vh265_work arrives at the last second,
11680 * give it a chance to handle the scenario.
11681 */
11682 return;
11683 //cancel_work_sync(&hevc->work);//reserved for future considraion
11684 }
11685 }
11686
11687 /* mark itself has all HW resource released and input released */
11688 if (vdec->parallel_dec == 1)
11689 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
11690 else
11691 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
11692
11693 if (hevc->vdec_cb)
11694 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
11695}
11696
11697static void vh265_work(struct work_struct *work)
11698{
11699 struct hevc_state_s *hevc = container_of(work,
11700 struct hevc_state_s, work);
11701 struct vdec_s *vdec = hw_to_vdec(hevc);
11702
11703 vh265_work_implement(hevc, vdec, 0);
11704}
11705
11706static void vh265_timeout_work(struct work_struct *work)
11707{
11708 struct hevc_state_s *hevc = container_of(work,
11709 struct hevc_state_s, timeout_work);
11710 struct vdec_s *vdec = hw_to_vdec(hevc);
11711
11712 if (work_pending(&hevc->work))
11713 return;
11714 vh265_work_implement(hevc, vdec, 1);
11715}
11716
11717
11718static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
11719{
11720 /* new to do ... */
11721 vh265_prot_init(hevc);
11722 return 0;
11723}
11724static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
11725{
11726 struct hevc_state_s *hevc =
11727 (struct hevc_state_s *)vdec->private;
11728 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
11729 CODEC_MM_FLAGS_TVP : 0;
11730 bool ret = 0;
11731 if (step == 0x12)
11732 return 0;
11733 else if (step == 0x11)
11734 step = 0x12;
11735
11736 if (hevc->eos)
11737 return 0;
11738 if (!hevc->first_sc_checked && hevc->mmu_enable) {
11739 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
11740 hevc->first_sc_checked =1;
11741 hevc_print(hevc, 0,
11742 "vh265 cached=%d need_size=%d speed= %d ms\n",
11743 size, (hevc->need_cache_size >> PAGE_SHIFT),
11744 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
11745 }
11746 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
11747 && pre_decode_buf_level != 0) {
11748 u32 rp, wp, level;
11749
11750 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
11751 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
11752 if (wp < rp)
11753 level = vdec->input.size + wp - rp;
11754 else
11755 level = wp - rp;
11756
11757 if (level < pre_decode_buf_level)
11758 return 0;
11759 }
11760
11761#ifdef AGAIN_HAS_THRESHOLD
11762 if (hevc->next_again_flag &&
11763 (!vdec_frame_based(vdec))) {
11764 u32 parser_wr_ptr =
11765 READ_PARSER_REG(PARSER_VIDEO_WP);
11766 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
11767 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
11768 again_threshold) {
11769 int r = vdec_sync_input(vdec);
11770 hevc_print(hevc,
11771 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
11772 return 0;
11773 }
11774 }
11775#endif
11776
11777 if (disp_vframe_valve_level &&
11778 kfifo_len(&hevc->display_q) >=
11779 disp_vframe_valve_level) {
11780 hevc->valve_count--;
11781 if (hevc->valve_count <= 0)
11782 hevc->valve_count = 2;
11783 else
11784 return 0;
11785 }
11786
11787 ret = is_new_pic_available(hevc);
11788 if (!ret) {
11789 hevc_print(hevc,
11790 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
11791 __func__, ret);
11792 }
11793
11794#ifdef CONSTRAIN_MAX_BUF_NUM
11795 if (hevc->pic_list_init_flag == 3) {
11796 if (run_ready_max_vf_only_num > 0 &&
11797 get_vf_ref_only_buf_count(hevc) >=
11798 run_ready_max_vf_only_num
11799 )
11800 ret = 0;
11801 if (run_ready_display_q_num > 0 &&
11802 kfifo_len(&hevc->display_q) >=
11803 run_ready_display_q_num)
11804 ret = 0;
11805
11806 /*avoid more buffers consumed when
11807 switching resolution*/
11808 if (run_ready_max_buf_num == 0xff &&
11809 get_used_buf_count(hevc) >=
11810 get_work_pic_num(hevc))
11811 ret = 0;
11812 else if (run_ready_max_buf_num &&
11813 get_used_buf_count(hevc) >=
11814 run_ready_max_buf_num)
11815 ret = 0;
11816 }
11817#endif
11818
11819 if (ret)
11820 not_run_ready[hevc->index] = 0;
11821 else
11822 not_run_ready[hevc->index]++;
11823 if (vdec->parallel_dec == 1)
11824 return ret ? (CORE_MASK_HEVC) : 0;
11825 else
11826 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
11827}
11828
11829static void run(struct vdec_s *vdec, unsigned long mask,
11830 void (*callback)(struct vdec_s *, void *), void *arg)
11831{
11832 struct hevc_state_s *hevc =
11833 (struct hevc_state_s *)vdec->private;
11834 int r, loadr = 0;
11835 unsigned char check_sum = 0;
11836
11837 run_count[hevc->index]++;
11838 hevc->vdec_cb_arg = arg;
11839 hevc->vdec_cb = callback;
11840 hevc->aux_data_dirty = 1;
11841 hevc_reset_core(vdec);
11842
11843#ifdef AGAIN_HAS_THRESHOLD
11844 hevc->pre_parser_wr_ptr =
11845 READ_PARSER_REG(PARSER_VIDEO_WP);
11846 hevc->next_again_flag = 0;
11847#endif
11848 r = vdec_prepare_input(vdec, &hevc->chunk);
11849 if (r < 0) {
11850 input_empty[hevc->index]++;
11851 hevc->dec_result = DEC_RESULT_AGAIN;
11852 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11853 "ammvdec_vh265: Insufficient data\n");
11854
11855 vdec_schedule_work(&hevc->work);
11856 return;
11857 }
11858 input_empty[hevc->index] = 0;
11859 hevc->dec_result = DEC_RESULT_NONE;
11860 if (vdec_frame_based(vdec) &&
11861 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
11862 || is_log_enable(hevc)))
11863 check_sum = get_data_check_sum(hevc, r);
11864
11865 if (is_log_enable(hevc))
11866 add_log(hevc,
11867 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
11868 __func__, r,
11869 check_sum,
11870 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
11871 );
11872 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11873 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11874 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
11875 __func__, r,
11876 check_sum,
11877 READ_VREG(HEVC_STREAM_LEVEL),
11878 READ_VREG(HEVC_STREAM_WR_PTR),
11879 READ_VREG(HEVC_STREAM_RD_PTR),
11880 READ_PARSER_REG(PARSER_VIDEO_RP),
11881 READ_PARSER_REG(PARSER_VIDEO_WP),
11882 hevc->start_shift_bytes
11883 );
11884 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
11885 input_frame_based(vdec)) {
11886 int jj;
11887 u8 *data = NULL;
11888
11889 if (!hevc->chunk->block->is_mapped)
11890 data = codec_mm_vmap(hevc->chunk->block->start +
11891 hevc->chunk->offset, r);
11892 else
11893 data = ((u8 *)hevc->chunk->block->start_virt)
11894 + hevc->chunk->offset;
11895
11896 for (jj = 0; jj < r; jj++) {
11897 if ((jj & 0xf) == 0)
11898 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
11899 "%06x:", jj);
11900 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
11901 "%02x ", data[jj]);
11902 if (((jj + 1) & 0xf) == 0)
11903 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
11904 "\n");
11905 }
11906
11907 if (!hevc->chunk->block->is_mapped)
11908 codec_mm_unmap_phyaddr(data);
11909 }
11910 if (vdec->mc_loaded) {
11911 /*firmware have load before,
11912 and not changes to another.
11913 ignore reload.
11914 */
11915 if (tee_enabled() && hevc->is_swap &&
11916 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11917 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
11918 } else {
11919 if (hevc->mmu_enable)
11920 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11921 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
11922 "h265_mmu", hevc->fw->data);
11923 else {
11924 if (!hevc->is_4k) {
11925 /* if an older version of the fw was loaded, */
11926 /* needs try to load noswap fw because the */
11927 /* old fw package dose not contain the swap fw.*/
11928 loadr = amhevc_vdec_loadmc_ex(
11929 VFORMAT_HEVC, vdec,
11930 "hevc_mmu_swap",
11931 hevc->fw->data);
11932 if (loadr < 0)
11933 loadr = amhevc_vdec_loadmc_ex(
11934 VFORMAT_HEVC, vdec,
11935 "h265_mmu",
11936 hevc->fw->data);
11937 else
11938 hevc->is_swap = true;
11939 } else
11940 loadr = amhevc_vdec_loadmc_ex(
11941 VFORMAT_HEVC, vdec,
11942 "h265_mmu", hevc->fw->data);
11943 }
11944 else
11945 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
11946 NULL, hevc->fw->data);
11947 if (loadr < 0) {
11948 amhevc_disable();
11949 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
11950 tee_enabled() ? "TEE" : "local", loadr);
11951 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
11952 vdec_schedule_work(&hevc->work);
11953 return;
11954 }
11955
11956 if (tee_enabled() && hevc->is_swap &&
11957 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11958 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
11959#ifdef DETREFILL_ENABLE
11960 if (hevc->is_swap &&
11961 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11962 init_detrefill_buf(hevc);
11963#endif
11964 vdec->mc_loaded = 1;
11965 vdec->mc_type = VFORMAT_HEVC;
11966 }
11967 if (vh265_hw_ctx_restore(hevc) < 0) {
11968 vdec_schedule_work(&hevc->work);
11969 return;
11970 }
11971 vdec_enable_input(vdec);
11972
11973 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11974
11975 if (vdec_frame_based(vdec)) {
11976 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
11977 r = hevc->chunk->size +
11978 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11979 hevc->decode_size = r;
11980 }
11981#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11982 else {
11983 if (vdec->master || vdec->slave)
11984 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
11985 hevc->shift_byte_count_lo);
11986 }
11987#endif
11988 WRITE_VREG(HEVC_DECODE_SIZE, r);
11989 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
11990 hevc->init_flag = 1;
11991
11992 if (hevc->pic_list_init_flag == 3)
11993 init_pic_list_hw(hevc);
11994
11995 backup_decode_state(hevc);
11996
11997 start_process_time(hevc);
11998 mod_timer(&hevc->timer, jiffies);
11999 hevc->stat |= STAT_TIMER_ARM;
12000 hevc->stat |= STAT_ISR_REG;
12001 amhevc_start();
12002 hevc->stat |= STAT_VDEC_RUN;
12003}
12004
12005static void reset(struct vdec_s *vdec)
12006{
12007
12008 struct hevc_state_s *hevc =
12009 (struct hevc_state_s *)vdec->private;
12010
12011 hevc_print(hevc,
12012 PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12013
12014}
12015
12016static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12017{
12018 struct hevc_state_s *hevc =
12019 (struct hevc_state_s *)vdec->private;
12020
12021 return vh265_isr(0, hevc);
12022}
12023
12024static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12025{
12026 struct hevc_state_s *hevc =
12027 (struct hevc_state_s *)vdec->private;
12028
12029 return vh265_isr_thread_fn(0, hevc);
12030}
12031#endif
12032
12033static int amvdec_h265_probe(struct platform_device *pdev)
12034{
12035#ifdef MULTI_INSTANCE_SUPPORT
12036 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12037#else
12038 struct vdec_dev_reg_s *pdata =
12039 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12040#endif
12041 char *tmpbuf;
12042 int ret;
12043 struct hevc_state_s *hevc;
12044
12045 hevc = vmalloc(sizeof(struct hevc_state_s));
12046 if (hevc == NULL) {
12047 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12048 return -ENOMEM;
12049 }
12050 gHevc = hevc;
12051 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12052 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12053 H265_DEBUG_DIS_SYS_ERROR_PROC));
12054 memset(hevc, 0, sizeof(struct hevc_state_s));
12055 if (get_dbg_flag(hevc))
12056 hevc_print(hevc, 0, "%s\r\n", __func__);
12057 mutex_lock(&vh265_mutex);
12058
12059 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12060 (parser_sei_enable & 0x100) == 0)
12061 parser_sei_enable = 7; /*old 1*/
12062 hevc->m_ins_flag = 0;
12063 hevc->init_flag = 0;
12064 hevc->first_sc_checked = 0;
12065 hevc->uninit_list = 0;
12066 hevc->fatal_error = 0;
12067 hevc->show_frame_num = 0;
12068 hevc->frameinfo_enable = 1;
12069#ifdef MULTI_INSTANCE_SUPPORT
12070 hevc->platform_dev = pdev;
12071 platform_set_drvdata(pdev, pdata);
12072#endif
12073
12074 if (pdata == NULL) {
12075 hevc_print(hevc, 0,
12076 "\namvdec_h265 memory resource undefined.\n");
12077 vfree(hevc);
12078 mutex_unlock(&vh265_mutex);
12079 return -EFAULT;
12080 }
12081 if (mmu_enable_force == 0) {
12082 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12083 || double_write_mode == 0x10)
12084 hevc->mmu_enable = 0;
12085 else
12086 hevc->mmu_enable = 1;
12087 }
12088 if (init_mmu_buffers(hevc)) {
12089 hevc_print(hevc, 0,
12090 "\n 265 mmu init failed!\n");
12091 vfree(hevc);
12092 mutex_unlock(&vh265_mutex);
12093 return -EFAULT;
12094 }
12095
12096 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12097 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12098 if (ret < 0) {
12099 uninit_mmu_buffers(hevc);
12100 vfree(hevc);
12101 mutex_unlock(&vh265_mutex);
12102 return ret;
12103 }
12104 hevc->buf_size = work_buf_size;
12105
12106
12107 if (!vdec_secure(pdata)) {
12108 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12109 if (tmpbuf) {
12110 memset(tmpbuf, 0, work_buf_size);
12111 dma_sync_single_for_device(amports_get_dma_device(),
12112 hevc->buf_start,
12113 work_buf_size, DMA_TO_DEVICE);
12114 } else {
12115 tmpbuf = codec_mm_vmap(hevc->buf_start,
12116 work_buf_size);
12117 if (tmpbuf) {
12118 memset(tmpbuf, 0, work_buf_size);
12119 dma_sync_single_for_device(
12120 amports_get_dma_device(),
12121 hevc->buf_start,
12122 work_buf_size,
12123 DMA_TO_DEVICE);
12124 codec_mm_unmap_phyaddr(tmpbuf);
12125 }
12126 }
12127 }
12128
12129 if (get_dbg_flag(hevc)) {
12130 hevc_print(hevc, 0,
12131 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12132 hevc->buf_start, hevc->buf_size);
12133 }
12134
12135 if (pdata->sys_info)
12136 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12137 else {
12138 hevc->vh265_amstream_dec_info.width = 0;
12139 hevc->vh265_amstream_dec_info.height = 0;
12140 hevc->vh265_amstream_dec_info.rate = 30;
12141 }
12142#ifndef MULTI_INSTANCE_SUPPORT
12143 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12144 workaround_enable |= 3;
12145 hevc_print(hevc, 0,
12146 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12147 } else
12148 workaround_enable &= ~3;
12149#endif
12150 hevc->cma_dev = pdata->cma_dev;
12151 vh265_vdec_info_init();
12152
12153#ifdef MULTI_INSTANCE_SUPPORT
12154 pdata->private = hevc;
12155 pdata->dec_status = vh265_dec_status;
12156 pdata->set_isreset = vh265_set_isreset;
12157 is_reset = 0;
12158 if (vh265_init(pdata) < 0) {
12159#else
12160 if (vh265_init(hevc) < 0) {
12161#endif
12162 hevc_print(hevc, 0,
12163 "\namvdec_h265 init failed.\n");
12164 hevc_local_uninit(hevc);
12165 uninit_mmu_buffers(hevc);
12166 vfree(hevc);
12167 pdata->dec_status = NULL;
12168 mutex_unlock(&vh265_mutex);
12169 return -ENODEV;
12170 }
12171 /*set the max clk for smooth playing...*/
12172 hevc_source_changed(VFORMAT_HEVC,
12173 3840, 2160, 60);
12174 mutex_unlock(&vh265_mutex);
12175
12176 return 0;
12177}
12178
12179static int amvdec_h265_remove(struct platform_device *pdev)
12180{
12181 struct hevc_state_s *hevc = gHevc;
12182
12183 if (get_dbg_flag(hevc))
12184 hevc_print(hevc, 0, "%s\r\n", __func__);
12185
12186 mutex_lock(&vh265_mutex);
12187
12188 vh265_stop(hevc);
12189
12190 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12191
12192
12193#ifdef DEBUG_PTS
12194 hevc_print(hevc, 0,
12195 "pts missed %ld, pts hit %ld, duration %d\n",
12196 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12197#endif
12198
12199 vfree(hevc);
12200 hevc = NULL;
12201 gHevc = NULL;
12202
12203 mutex_unlock(&vh265_mutex);
12204
12205 return 0;
12206}
12207/****************************************/
12208#ifdef CONFIG_PM
12209static int h265_suspend(struct device *dev)
12210{
12211 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12212 return 0;
12213}
12214
12215static int h265_resume(struct device *dev)
12216{
12217 amhevc_resume(to_platform_device(dev));
12218 return 0;
12219}
12220
12221static const struct dev_pm_ops h265_pm_ops = {
12222 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12223};
12224#endif
12225
12226static struct platform_driver amvdec_h265_driver = {
12227 .probe = amvdec_h265_probe,
12228 .remove = amvdec_h265_remove,
12229 .driver = {
12230 .name = DRIVER_NAME,
12231#ifdef CONFIG_PM
12232 .pm = &h265_pm_ops,
12233#endif
12234 }
12235};
12236
12237#ifdef MULTI_INSTANCE_SUPPORT
12238static void vh265_dump_state(struct vdec_s *vdec)
12239{
12240 int i;
12241 struct hevc_state_s *hevc =
12242 (struct hevc_state_s *)vdec->private;
12243 hevc_print(hevc, 0,
12244 "====== %s\n", __func__);
12245
12246 hevc_print(hevc, 0,
12247 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12248 hevc->frame_width,
12249 hevc->frame_height,
12250 hevc->sps_num_reorder_pics_0,
12251 get_work_pic_num(hevc),
12252 hevc->video_signal_type_debug,
12253 hevc->is_swap
12254 );
12255
12256 hevc_print(hevc, 0,
12257 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12258 input_frame_based(vdec),
12259 hevc->eos,
12260 hevc->dec_result,
12261 decode_frame_count[hevc->index],
12262 display_frame_count[hevc->index],
12263 run_count[hevc->index],
12264 not_run_ready[hevc->index],
12265 input_empty[hevc->index]
12266 );
12267
12268 if (vf_get_receiver(vdec->vf_provider_name)) {
12269 enum receviver_start_e state =
12270 vf_notify_receiver(vdec->vf_provider_name,
12271 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12272 NULL);
12273 hevc_print(hevc, 0,
12274 "\nreceiver(%s) state %d\n",
12275 vdec->vf_provider_name,
12276 state);
12277 }
12278
12279 hevc_print(hevc, 0,
12280 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12281 __func__,
12282 kfifo_len(&hevc->newframe_q),
12283 VF_POOL_SIZE,
12284 kfifo_len(&hevc->display_q),
12285 VF_POOL_SIZE,
12286 hevc->vf_pre_count,
12287 hevc->vf_get_count,
12288 hevc->vf_put_count,
12289 hevc->pic_list_init_flag,
12290 is_new_pic_available(hevc)
12291 );
12292
12293 dump_pic_list(hevc);
12294
12295 for (i = 0; i < BUF_POOL_SIZE; i++) {
12296 hevc_print(hevc, 0,
12297 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12298 i,
12299 hevc->m_BUF[i].start_adr,
12300 hevc->m_BUF[i].size,
12301 hevc->m_BUF[i].used_flag);
12302 }
12303
12304 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12305 hevc_print(hevc, 0,
12306 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12307 i,
12308 hevc->m_mv_BUF[i].start_adr,
12309 hevc->m_mv_BUF[i].size,
12310 hevc->m_mv_BUF[i].used_flag);
12311 }
12312
12313 hevc_print(hevc, 0,
12314 "HEVC_DEC_STATUS_REG=0x%x\n",
12315 READ_VREG(HEVC_DEC_STATUS_REG));
12316 hevc_print(hevc, 0,
12317 "HEVC_MPC_E=0x%x\n",
12318 READ_VREG(HEVC_MPC_E));
12319 hevc_print(hevc, 0,
12320 "HEVC_DECODE_MODE=0x%x\n",
12321 READ_VREG(HEVC_DECODE_MODE));
12322 hevc_print(hevc, 0,
12323 "HEVC_DECODE_MODE2=0x%x\n",
12324 READ_VREG(HEVC_DECODE_MODE2));
12325 hevc_print(hevc, 0,
12326 "NAL_SEARCH_CTL=0x%x\n",
12327 READ_VREG(NAL_SEARCH_CTL));
12328 hevc_print(hevc, 0,
12329 "HEVC_PARSER_LCU_START=0x%x\n",
12330 READ_VREG(HEVC_PARSER_LCU_START));
12331 hevc_print(hevc, 0,
12332 "HEVC_DECODE_SIZE=0x%x\n",
12333 READ_VREG(HEVC_DECODE_SIZE));
12334 hevc_print(hevc, 0,
12335 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12336 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12337 hevc_print(hevc, 0,
12338 "HEVC_STREAM_START_ADDR=0x%x\n",
12339 READ_VREG(HEVC_STREAM_START_ADDR));
12340 hevc_print(hevc, 0,
12341 "HEVC_STREAM_END_ADDR=0x%x\n",
12342 READ_VREG(HEVC_STREAM_END_ADDR));
12343 hevc_print(hevc, 0,
12344 "HEVC_STREAM_LEVEL=0x%x\n",
12345 READ_VREG(HEVC_STREAM_LEVEL));
12346 hevc_print(hevc, 0,
12347 "HEVC_STREAM_WR_PTR=0x%x\n",
12348 READ_VREG(HEVC_STREAM_WR_PTR));
12349 hevc_print(hevc, 0,
12350 "HEVC_STREAM_RD_PTR=0x%x\n",
12351 READ_VREG(HEVC_STREAM_RD_PTR));
12352 hevc_print(hevc, 0,
12353 "PARSER_VIDEO_RP=0x%x\n",
12354 READ_PARSER_REG(PARSER_VIDEO_RP));
12355 hevc_print(hevc, 0,
12356 "PARSER_VIDEO_WP=0x%x\n",
12357 READ_PARSER_REG(PARSER_VIDEO_WP));
12358
12359 if (input_frame_based(vdec) &&
12360 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12361 ) {
12362 int jj;
12363 if (hevc->chunk && hevc->chunk->block &&
12364 hevc->chunk->size > 0) {
12365 u8 *data = NULL;
12366 if (!hevc->chunk->block->is_mapped)
12367 data = codec_mm_vmap(hevc->chunk->block->start +
12368 hevc->chunk->offset, hevc->chunk->size);
12369 else
12370 data = ((u8 *)hevc->chunk->block->start_virt)
12371 + hevc->chunk->offset;
12372 hevc_print(hevc, 0,
12373 "frame data size 0x%x\n",
12374 hevc->chunk->size);
12375 for (jj = 0; jj < hevc->chunk->size; jj++) {
12376 if ((jj & 0xf) == 0)
12377 hevc_print(hevc,
12378 PRINT_FRAMEBASE_DATA,
12379 "%06x:", jj);
12380 hevc_print_cont(hevc,
12381 PRINT_FRAMEBASE_DATA,
12382 "%02x ", data[jj]);
12383 if (((jj + 1) & 0xf) == 0)
12384 hevc_print_cont(hevc,
12385 PRINT_FRAMEBASE_DATA,
12386 "\n");
12387 }
12388
12389 if (!hevc->chunk->block->is_mapped)
12390 codec_mm_unmap_phyaddr(data);
12391 }
12392 }
12393
12394}
12395
12396
12397static int ammvdec_h265_probe(struct platform_device *pdev)
12398{
12399
12400 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12401 struct hevc_state_s *hevc = NULL;
12402 int ret;
12403#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12404 int config_val;
12405#endif
12406 if (pdata == NULL) {
12407 pr_info("\nammvdec_h265 memory resource undefined.\n");
12408 return -EFAULT;
12409 }
12410
12411 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12412 sizeof(struct hevc_state_s), GFP_KERNEL); */
12413 hevc = vmalloc(sizeof(struct hevc_state_s));
12414 if (hevc == NULL) {
12415 pr_info("\nammvdec_h265 device data allocation failed\n");
12416 return -ENOMEM;
12417 }
12418 memset(hevc, 0, sizeof(struct hevc_state_s));
12419
12420 /* the ctx from v4l2 driver. */
12421 hevc->v4l2_ctx = pdata->private;
12422
12423 pdata->private = hevc;
12424 pdata->dec_status = vh265_dec_status;
12425 /* pdata->set_trickmode = set_trickmode; */
12426 pdata->run_ready = run_ready;
12427 pdata->run = run;
12428 pdata->reset = reset;
12429 pdata->irq_handler = vh265_irq_cb;
12430 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12431 pdata->dump_state = vh265_dump_state;
12432
12433 hevc->index = pdev->id;
12434 hevc->m_ins_flag = 1;
12435
12436 if (pdata->use_vfm_path) {
12437 snprintf(pdata->vf_provider_name,
12438 VDEC_PROVIDER_NAME_SIZE,
12439 VFM_DEC_PROVIDER_NAME);
12440 hevc->frameinfo_enable = 1;
12441 }
12442#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12443 else if (vdec_dual(pdata)) {
12444 struct hevc_state_s *hevc_pair = NULL;
12445
12446 if (dv_toggle_prov_name) /*debug purpose*/
12447 snprintf(pdata->vf_provider_name,
12448 VDEC_PROVIDER_NAME_SIZE,
12449 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12450 VFM_DEC_DVEL_PROVIDER_NAME);
12451 else
12452 snprintf(pdata->vf_provider_name,
12453 VDEC_PROVIDER_NAME_SIZE,
12454 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12455 VFM_DEC_DVBL_PROVIDER_NAME);
12456 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12457 if (pdata->master)
12458 hevc_pair = (struct hevc_state_s *)
12459 pdata->master->private;
12460 else if (pdata->slave)
12461 hevc_pair = (struct hevc_state_s *)
12462 pdata->slave->private;
12463 if (hevc_pair)
12464 hevc->shift_byte_count_lo =
12465 hevc_pair->shift_byte_count_lo;
12466 }
12467#endif
12468 else
12469 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12470 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12471
12472 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12473 &vh265_vf_provider, pdata);
12474
12475 hevc->provider_name = pdata->vf_provider_name;
12476 platform_set_drvdata(pdev, pdata);
12477
12478 hevc->platform_dev = pdev;
12479
12480 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12481 pdata->config && pdata->config_len) {
12482#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12483 /*use ptr config for doubel_write_mode, etc*/
12484 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12485
12486 if (get_config_int(pdata->config, "hevc_double_write_mode",
12487 &config_val) == 0)
12488 hevc->double_write_mode = config_val;
12489 else
12490 hevc->double_write_mode = double_write_mode;
12491
12492 if (get_config_int(pdata->config, "save_buffer_mode",
12493 &config_val) == 0)
12494 hevc->save_buffer_mode = config_val;
12495 else
12496 hevc->save_buffer_mode = 0;
12497
12498 /*use ptr config for max_pic_w, etc*/
12499 if (get_config_int(pdata->config, "hevc_buf_width",
12500 &config_val) == 0) {
12501 hevc->max_pic_w = config_val;
12502 }
12503 if (get_config_int(pdata->config, "hevc_buf_height",
12504 &config_val) == 0) {
12505 hevc->max_pic_h = config_val;
12506 }
12507
12508#endif
12509 } else {
12510 if (pdata->sys_info)
12511 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12512 else {
12513 hevc->vh265_amstream_dec_info.width = 0;
12514 hevc->vh265_amstream_dec_info.height = 0;
12515 hevc->vh265_amstream_dec_info.rate = 30;
12516 }
12517 hevc->double_write_mode = double_write_mode;
12518 }
12519 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12520 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12521 else
12522 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12523
12524 if (mmu_enable_force == 0) {
12525 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12526 hevc->mmu_enable = 0;
12527 else
12528 hevc->mmu_enable = 1;
12529 }
12530
12531 hevc->is_used_v4l = (((unsigned long)
12532 hevc->vh265_amstream_dec_info.param & 0x80) >> 7);
12533 if (hevc->is_used_v4l) {
12534 hevc->double_write_mode = 0x10;
12535 hevc->mmu_enable = 0;
12536 }
12537
12538 if (init_mmu_buffers(hevc) < 0) {
12539 hevc_print(hevc, 0,
12540 "\n 265 mmu init failed!\n");
12541 mutex_unlock(&vh265_mutex);
12542 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12543 if (hevc)
12544 vfree((void *)hevc);
12545 pdata->dec_status = NULL;
12546 return -EFAULT;
12547 }
12548#if 0
12549 hevc->buf_start = pdata->mem_start;
12550 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12551#else
12552
12553 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12554 BMMU_WORKSPACE_ID, work_buf_size,
12555 DRIVER_NAME, &hevc->buf_start);
12556 if (ret < 0) {
12557 uninit_mmu_buffers(hevc);
12558 /* devm_kfree(&pdev->dev, (void *)hevc); */
12559 if (hevc)
12560 vfree((void *)hevc);
12561 pdata->dec_status = NULL;
12562 mutex_unlock(&vh265_mutex);
12563 return ret;
12564 }
12565 hevc->buf_size = work_buf_size;
12566#endif
12567 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12568 (parser_sei_enable & 0x100) == 0)
12569 parser_sei_enable = 7;
12570 hevc->init_flag = 0;
12571 hevc->first_sc_checked = 0;
12572 hevc->uninit_list = 0;
12573 hevc->fatal_error = 0;
12574 hevc->show_frame_num = 0;
12575
12576 /*
12577 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12578 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12579 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12580 */
12581 if (get_dbg_flag(hevc)) {
12582 hevc_print(hevc, 0,
12583 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12584 hevc->buf_start, hevc->buf_size);
12585 }
12586
12587 hevc_print(hevc, 0,
12588 "dynamic_buf_num_margin=%d\n",
12589 hevc->dynamic_buf_num_margin);
12590 hevc_print(hevc, 0,
12591 "double_write_mode=%d\n",
12592 hevc->double_write_mode);
12593
12594 hevc->cma_dev = pdata->cma_dev;
12595
12596 if (vh265_init(pdata) < 0) {
12597 hevc_print(hevc, 0,
12598 "\namvdec_h265 init failed.\n");
12599 hevc_local_uninit(hevc);
12600 uninit_mmu_buffers(hevc);
12601 /* devm_kfree(&pdev->dev, (void *)hevc); */
12602 if (hevc)
12603 vfree((void *)hevc);
12604 pdata->dec_status = NULL;
12605 return -ENODEV;
12606 }
12607
12608 vdec_set_prepare_level(pdata, start_decode_buf_level);
12609
12610 /*set the max clk for smooth playing...*/
12611 hevc_source_changed(VFORMAT_HEVC,
12612 3840, 2160, 60);
12613 if (pdata->parallel_dec == 1)
12614 vdec_core_request(pdata, CORE_MASK_HEVC);
12615 else
12616 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
12617 | CORE_MASK_COMBINE);
12618
12619 return 0;
12620}
12621
12622static int ammvdec_h265_remove(struct platform_device *pdev)
12623{
12624 struct hevc_state_s *hevc =
12625 (struct hevc_state_s *)
12626 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
12627 struct vdec_s *vdec = hw_to_vdec(hevc);
12628
12629 if (hevc == NULL)
12630 return 0;
12631
12632 if (get_dbg_flag(hevc))
12633 hevc_print(hevc, 0, "%s\r\n", __func__);
12634
12635 vmh265_stop(hevc);
12636
12637 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
12638 if (vdec->parallel_dec == 1)
12639 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12640 else
12641 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12642
12643 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
12644
12645 vfree((void *)hevc);
12646 return 0;
12647}
12648
12649static struct platform_driver ammvdec_h265_driver = {
12650 .probe = ammvdec_h265_probe,
12651 .remove = ammvdec_h265_remove,
12652 .driver = {
12653 .name = MULTI_DRIVER_NAME,
12654#ifdef CONFIG_PM
12655 .pm = &h265_pm_ops,
12656#endif
12657 }
12658};
12659#endif
12660
12661static struct codec_profile_t amvdec_h265_profile = {
12662 .name = "hevc",
12663 .profile = ""
12664};
12665
12666static struct codec_profile_t amvdec_h265_profile_single,
12667 amvdec_h265_profile_mult;
12668
12669static struct mconfig h265_configs[] = {
12670 MC_PU32("use_cma", &use_cma),
12671 MC_PU32("bit_depth_luma", &bit_depth_luma),
12672 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
12673 MC_PU32("video_signal_type", &video_signal_type),
12674#ifdef ERROR_HANDLE_DEBUG
12675 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
12676 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
12677#endif
12678 MC_PU32("radr", &radr),
12679 MC_PU32("rval", &rval),
12680 MC_PU32("dbg_cmd", &dbg_cmd),
12681 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
12682 MC_PU32("endian", &endian),
12683 MC_PU32("step", &step),
12684 MC_PU32("udebug_flag", &udebug_flag),
12685 MC_PU32("decode_pic_begin", &decode_pic_begin),
12686 MC_PU32("slice_parse_begin", &slice_parse_begin),
12687 MC_PU32("nal_skip_policy", &nal_skip_policy),
12688 MC_PU32("i_only_flag", &i_only_flag),
12689 MC_PU32("error_handle_policy", &error_handle_policy),
12690 MC_PU32("error_handle_threshold", &error_handle_threshold),
12691 MC_PU32("error_handle_nal_skip_threshold",
12692 &error_handle_nal_skip_threshold),
12693 MC_PU32("error_handle_system_threshold",
12694 &error_handle_system_threshold),
12695 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
12696 MC_PU32("debug", &debug),
12697 MC_PU32("debug_mask", &debug_mask),
12698 MC_PU32("buffer_mode", &buffer_mode),
12699 MC_PU32("double_write_mode", &double_write_mode),
12700 MC_PU32("buf_alloc_width", &buf_alloc_width),
12701 MC_PU32("buf_alloc_height", &buf_alloc_height),
12702 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
12703 MC_PU32("max_buf_num", &max_buf_num),
12704 MC_PU32("buf_alloc_size", &buf_alloc_size),
12705 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
12706 MC_PU32("mem_map_mode", &mem_map_mode),
12707 MC_PU32("enable_mem_saving", &enable_mem_saving),
12708 MC_PU32("force_w_h", &force_w_h),
12709 MC_PU32("force_fps", &force_fps),
12710 MC_PU32("max_decoding_time", &max_decoding_time),
12711 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
12712 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
12713 MC_PU32("interlace_enable", &interlace_enable),
12714 MC_PU32("pts_unstable", &pts_unstable),
12715 MC_PU32("parser_sei_enable", &parser_sei_enable),
12716 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
12717 MC_PU32("decode_timeout_val", &decode_timeout_val),
12718#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12719 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
12720 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
12721 MC_PU32("dv_debug", &dv_debug),
12722#endif
12723};
12724static struct mconfig_node decoder_265_node;
12725
12726static int __init amvdec_h265_driver_init_module(void)
12727{
12728 struct BuffInfo_s *p_buf_info;
12729
12730 if (vdec_is_support_4k()) {
12731 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
12732 p_buf_info = &amvh265_workbuff_spec[2];
12733 else
12734 p_buf_info = &amvh265_workbuff_spec[1];
12735 } else
12736 p_buf_info = &amvh265_workbuff_spec[0];
12737
12738 init_buff_spec(NULL, p_buf_info);
12739 work_buf_size =
12740 (p_buf_info->end_adr - p_buf_info->start_adr
12741 + 0xffff) & (~0xffff);
12742
12743 pr_debug("amvdec_h265 module init\n");
12744 error_handle_policy = 0;
12745
12746#ifdef ERROR_HANDLE_DEBUG
12747 dbg_nal_skip_flag = 0;
12748 dbg_nal_skip_count = 0;
12749#endif
12750 udebug_flag = 0;
12751 decode_pic_begin = 0;
12752 slice_parse_begin = 0;
12753 step = 0;
12754 buf_alloc_size = 0;
12755
12756#ifdef MULTI_INSTANCE_SUPPORT
12757 if (platform_driver_register(&ammvdec_h265_driver))
12758 pr_err("failed to register ammvdec_h265 driver\n");
12759
12760#endif
12761 if (platform_driver_register(&amvdec_h265_driver)) {
12762 pr_err("failed to register amvdec_h265 driver\n");
12763 return -ENODEV;
12764 }
12765#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
12766 if (!has_hevc_vdec()) {
12767 /* not support hevc */
12768 amvdec_h265_profile.name = "hevc_unsupport";
12769 }
12770 if (vdec_is_support_4k()) {
12771 if (is_meson_m8m2_cpu()) {
12772 /* m8m2 support 4k */
12773 amvdec_h265_profile.profile = "4k";
12774 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
12775 amvdec_h265_profile.profile =
12776 "8k, 8bit, 10bit, dwrite, compressed";
12777 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
12778 amvdec_h265_profile.profile =
12779 "4k, 8bit, 10bit, dwrite, compressed";
12780 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
12781 amvdec_h265_profile.profile = "4k";
12782 }
12783#endif
12784 if (codec_mm_get_total_size() < 80 * SZ_1M) {
12785 pr_info("amvdec_h265 default mmu enabled.\n");
12786 mmu_enable = 1;
12787 }
12788
12789 vcodec_profile_register(&amvdec_h265_profile);
12790 amvdec_h265_profile_single = amvdec_h265_profile;
12791 amvdec_h265_profile_single.name = "h265";
12792 vcodec_profile_register(&amvdec_h265_profile_single);
12793 amvdec_h265_profile_mult = amvdec_h265_profile;
12794 amvdec_h265_profile_mult.name = "mh265";
12795 vcodec_profile_register(&amvdec_h265_profile_mult);
12796 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
12797 "h265", h265_configs, CONFIG_FOR_RW);
12798 return 0;
12799}
12800
12801static void __exit amvdec_h265_driver_remove_module(void)
12802{
12803 pr_debug("amvdec_h265 module remove.\n");
12804
12805#ifdef MULTI_INSTANCE_SUPPORT
12806 platform_driver_unregister(&ammvdec_h265_driver);
12807#endif
12808 platform_driver_unregister(&amvdec_h265_driver);
12809}
12810
12811/****************************************/
12812/*
12813 *module_param(stat, uint, 0664);
12814 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
12815 */
12816module_param(use_cma, uint, 0664);
12817MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
12818
12819module_param(bit_depth_luma, uint, 0664);
12820MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
12821
12822module_param(bit_depth_chroma, uint, 0664);
12823MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
12824
12825module_param(video_signal_type, uint, 0664);
12826MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
12827
12828#ifdef ERROR_HANDLE_DEBUG
12829module_param(dbg_nal_skip_flag, uint, 0664);
12830MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
12831
12832module_param(dbg_nal_skip_count, uint, 0664);
12833MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
12834#endif
12835
12836module_param(radr, uint, 0664);
12837MODULE_PARM_DESC(radr, "\n radr\n");
12838
12839module_param(rval, uint, 0664);
12840MODULE_PARM_DESC(rval, "\n rval\n");
12841
12842module_param(dbg_cmd, uint, 0664);
12843MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
12844
12845module_param(dump_nal, uint, 0664);
12846MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
12847
12848module_param(dbg_skip_decode_index, uint, 0664);
12849MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
12850
12851module_param(endian, uint, 0664);
12852MODULE_PARM_DESC(endian, "\n rval\n");
12853
12854module_param(step, uint, 0664);
12855MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
12856
12857module_param(decode_pic_begin, uint, 0664);
12858MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
12859
12860module_param(slice_parse_begin, uint, 0664);
12861MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
12862
12863module_param(nal_skip_policy, uint, 0664);
12864MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
12865
12866module_param(i_only_flag, uint, 0664);
12867MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
12868
12869module_param(fast_output_enable, uint, 0664);
12870MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
12871
12872module_param(error_handle_policy, uint, 0664);
12873MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
12874
12875module_param(error_handle_threshold, uint, 0664);
12876MODULE_PARM_DESC(error_handle_threshold,
12877 "\n amvdec_h265 error_handle_threshold\n");
12878
12879module_param(error_handle_nal_skip_threshold, uint, 0664);
12880MODULE_PARM_DESC(error_handle_nal_skip_threshold,
12881 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
12882
12883module_param(error_handle_system_threshold, uint, 0664);
12884MODULE_PARM_DESC(error_handle_system_threshold,
12885 "\n amvdec_h265 error_handle_system_threshold\n");
12886
12887module_param(error_skip_nal_count, uint, 0664);
12888MODULE_PARM_DESC(error_skip_nal_count,
12889 "\n amvdec_h265 error_skip_nal_count\n");
12890
12891module_param(debug, uint, 0664);
12892MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
12893
12894module_param(debug_mask, uint, 0664);
12895MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
12896
12897module_param(log_mask, uint, 0664);
12898MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
12899
12900module_param(buffer_mode, uint, 0664);
12901MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
12902
12903module_param(double_write_mode, uint, 0664);
12904MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
12905
12906module_param(buf_alloc_width, uint, 0664);
12907MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
12908
12909module_param(buf_alloc_height, uint, 0664);
12910MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
12911
12912module_param(dynamic_buf_num_margin, uint, 0664);
12913MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
12914
12915module_param(max_buf_num, uint, 0664);
12916MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
12917
12918module_param(buf_alloc_size, uint, 0664);
12919MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
12920
12921#ifdef CONSTRAIN_MAX_BUF_NUM
12922module_param(run_ready_max_vf_only_num, uint, 0664);
12923MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
12924
12925module_param(run_ready_display_q_num, uint, 0664);
12926MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
12927
12928module_param(run_ready_max_buf_num, uint, 0664);
12929MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
12930#endif
12931
12932#if 0
12933module_param(re_config_pic_flag, uint, 0664);
12934MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
12935#endif
12936
12937module_param(buffer_mode_dbg, uint, 0664);
12938MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
12939
12940module_param(mem_map_mode, uint, 0664);
12941MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
12942
12943module_param(enable_mem_saving, uint, 0664);
12944MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
12945
12946module_param(force_w_h, uint, 0664);
12947MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
12948
12949module_param(force_fps, uint, 0664);
12950MODULE_PARM_DESC(force_fps, "\n force_fps\n");
12951
12952module_param(max_decoding_time, uint, 0664);
12953MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
12954
12955module_param(prefix_aux_buf_size, uint, 0664);
12956MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
12957
12958module_param(suffix_aux_buf_size, uint, 0664);
12959MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
12960
12961module_param(interlace_enable, uint, 0664);
12962MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
12963module_param(pts_unstable, uint, 0664);
12964MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
12965module_param(parser_sei_enable, uint, 0664);
12966MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
12967
12968#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12969module_param(parser_dolby_vision_enable, uint, 0664);
12970MODULE_PARM_DESC(parser_dolby_vision_enable,
12971 "\n parser_dolby_vision_enable\n");
12972
12973module_param(dolby_meta_with_el, uint, 0664);
12974MODULE_PARM_DESC(dolby_meta_with_el,
12975 "\n dolby_meta_with_el\n");
12976
12977module_param(dolby_el_flush_th, uint, 0664);
12978MODULE_PARM_DESC(dolby_el_flush_th,
12979 "\n dolby_el_flush_th\n");
12980#endif
12981module_param(mmu_enable, uint, 0664);
12982MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
12983
12984module_param(mmu_enable_force, uint, 0664);
12985MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
12986
12987#ifdef MULTI_INSTANCE_SUPPORT
12988module_param(start_decode_buf_level, int, 0664);
12989MODULE_PARM_DESC(start_decode_buf_level,
12990 "\n h265 start_decode_buf_level\n");
12991
12992module_param(decode_timeout_val, uint, 0664);
12993MODULE_PARM_DESC(decode_timeout_val,
12994 "\n h265 decode_timeout_val\n");
12995
12996module_param(data_resend_policy, uint, 0664);
12997MODULE_PARM_DESC(data_resend_policy,
12998 "\n h265 data_resend_policy\n");
12999
13000module_param_array(decode_frame_count, uint,
13001 &max_decode_instance_num, 0664);
13002
13003module_param_array(display_frame_count, uint,
13004 &max_decode_instance_num, 0664);
13005
13006module_param_array(max_process_time, uint,
13007 &max_decode_instance_num, 0664);
13008
13009module_param_array(max_get_frame_interval,
13010 uint, &max_decode_instance_num, 0664);
13011
13012module_param_array(run_count, uint,
13013 &max_decode_instance_num, 0664);
13014
13015module_param_array(input_empty, uint,
13016 &max_decode_instance_num, 0664);
13017
13018module_param_array(not_run_ready, uint,
13019 &max_decode_instance_num, 0664);
13020
13021module_param_array(ref_frame_mark_flag, uint,
13022 &max_decode_instance_num, 0664);
13023
13024#endif
13025#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13026module_param(dv_toggle_prov_name, uint, 0664);
13027MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13028
13029module_param(dv_debug, uint, 0664);
13030MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13031
13032module_param(force_bypass_dvenl, uint, 0664);
13033MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13034#endif
13035
13036#ifdef AGAIN_HAS_THRESHOLD
13037module_param(again_threshold, uint, 0664);
13038MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13039#endif
13040
13041module_param(force_disp_pic_index, int, 0664);
13042MODULE_PARM_DESC(force_disp_pic_index,
13043 "\n amvdec_h265 force_disp_pic_index\n");
13044
13045module_param(frmbase_cont_bitlevel, uint, 0664);
13046MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13047
13048module_param(udebug_flag, uint, 0664);
13049MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13050
13051module_param(udebug_pause_pos, uint, 0664);
13052MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13053
13054module_param(udebug_pause_val, uint, 0664);
13055MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13056
13057module_param(pre_decode_buf_level, int, 0664);
13058MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13059
13060module_param(udebug_pause_decode_idx, uint, 0664);
13061MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13062
13063module_param(disp_vframe_valve_level, uint, 0664);
13064MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13065
13066module_param(pic_list_debug, uint, 0664);
13067MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13068
13069module_param(without_display_mode, uint, 0664);
13070MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13071
13072module_init(amvdec_h265_driver_init_module);
13073module_exit(amvdec_h265_driver_remove_module);
13074
13075MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13076MODULE_LICENSE("GPL");
13077MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13078