summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: d1bfec795edb3d7fbe93beeebb65394c3137ba8a
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131
132static struct semaphore h265_sema;
133
134struct hevc_state_s;
135static int hevc_print(struct hevc_state_s *hevc,
136 int debug_flag, const char *fmt, ...);
137static int hevc_print_cont(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int vh265_vf_states(struct vframe_states *states, void *);
140static struct vframe_s *vh265_vf_peek(void *);
141static struct vframe_s *vh265_vf_get(void *);
142static void vh265_vf_put(struct vframe_s *, void *);
143static int vh265_event_cb(int type, void *data, void *private_data);
144
145static int vh265_stop(struct hevc_state_s *hevc);
146#ifdef MULTI_INSTANCE_SUPPORT
147static int vmh265_stop(struct hevc_state_s *hevc);
148static s32 vh265_init(struct vdec_s *vdec);
149static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
150static void reset_process_time(struct hevc_state_s *hevc);
151static void start_process_time(struct hevc_state_s *hevc);
152static void restart_process_time(struct hevc_state_s *hevc);
153static void timeout_process(struct hevc_state_s *hevc);
154#else
155static s32 vh265_init(struct hevc_state_s *hevc);
156#endif
157static void vh265_prot_init(struct hevc_state_s *hevc);
158static int vh265_local_init(struct hevc_state_s *hevc);
159static void vh265_check_timer_func(unsigned long arg);
160static void config_decode_mode(struct hevc_state_s *hevc);
161
162static const char vh265_dec_id[] = "vh265-dev";
163
164#define PROVIDER_NAME "decoder.h265"
165#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
166
167static const struct vframe_operations_s vh265_vf_provider = {
168 .peek = vh265_vf_peek,
169 .get = vh265_vf_get,
170 .put = vh265_vf_put,
171 .event_cb = vh265_event_cb,
172 .vf_states = vh265_vf_states,
173};
174
175static struct vframe_provider_s vh265_vf_prov;
176
177static u32 bit_depth_luma;
178static u32 bit_depth_chroma;
179static u32 video_signal_type;
180
181static int start_decode_buf_level = 0x8000;
182
183static unsigned int decode_timeout_val = 200;
184
185/*data_resend_policy:
186 bit 0, stream base resend data when decoding buf empty
187*/
188static u32 data_resend_policy = 1;
189
190#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
191/*
192static const char * const video_format_names[] = {
193 "component", "PAL", "NTSC", "SECAM",
194 "MAC", "unspecified", "unspecified", "unspecified"
195};
196
197static const char * const color_primaries_names[] = {
198 "unknown", "bt709", "undef", "unknown",
199 "bt470m", "bt470bg", "smpte170m", "smpte240m",
200 "film", "bt2020"
201};
202
203static const char * const transfer_characteristics_names[] = {
204 "unknown", "bt709", "undef", "unknown",
205 "bt470m", "bt470bg", "smpte170m", "smpte240m",
206 "linear", "log100", "log316", "iec61966-2-4",
207 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
208 "smpte-st-2084", "smpte-st-428"
209};
210
211static const char * const matrix_coeffs_names[] = {
212 "GBR", "bt709", "undef", "unknown",
213 "fcc", "bt470bg", "smpte170m", "smpte240m",
214 "YCgCo", "bt2020nc", "bt2020c"
215};
216*/
217#ifdef SUPPORT_10BIT
218#define HEVC_CM_BODY_START_ADDR 0x3626
219#define HEVC_CM_BODY_LENGTH 0x3627
220#define HEVC_CM_HEADER_LENGTH 0x3629
221#define HEVC_CM_HEADER_OFFSET 0x362b
222#define HEVC_SAO_CTRL9 0x362d
223#define LOSLESS_COMPRESS_MODE
224/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
225/* double_write_mode:
226 * 0, no double write;
227 * 1, 1:1 ratio;
228 * 2, (1/4):(1/4) ratio;
229 * 3, (1/4):(1/4) ratio, with both compressed frame included
230 * 4, (1/2):(1/2) ratio;
231 * 0x10, double write only
232 * 0x100, if > 1080p,use mode 4,else use mode 1;
233 * 0x200, if > 1080p,use mode 2,else use mode 1;
234 * 0x300, if > 720p, use mode 4, else use mode 1;
235 */
236static u32 double_write_mode;
237
238/*#define DECOMP_HEADR_SURGENT*/
239
240static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
241static u32 enable_mem_saving = 1;
242static u32 workaround_enable;
243static u32 force_w_h;
244#endif
245static u32 force_fps;
246static u32 pts_unstable;
247#define H265_DEBUG_BUFMGR 0x01
248#define H265_DEBUG_BUFMGR_MORE 0x02
249#define H265_DEBUG_DETAIL 0x04
250#define H265_DEBUG_REG 0x08
251#define H265_DEBUG_MAN_SEARCH_NAL 0x10
252#define H265_DEBUG_MAN_SKIP_NAL 0x20
253#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
254#define H265_DEBUG_FORCE_CLK 0x80
255#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
256#define H265_DEBUG_NO_DISPLAY 0x200
257#define H265_DEBUG_DISCARD_NAL 0x400
258#define H265_DEBUG_OUT_PTS 0x800
259#define H265_DEBUG_DUMP_PIC_LIST 0x1000
260#define H265_DEBUG_PRINT_SEI 0x2000
261#define H265_DEBUG_PIC_STRUCT 0x4000
262#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
263#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
264#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
265#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
266#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
267#define H265_DEBUG_HW_RESET 0x100000
268#define H265_CFG_CANVAS_IN_DECODE 0x200000
269#define H265_DEBUG_DV 0x400000
270#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
271#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
272#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
273#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
274#ifdef MULTI_INSTANCE_SUPPORT
275#define PRINT_FLAG_ERROR 0x0
276#define IGNORE_PARAM_FROM_CONFIG 0x08000000
277#define PRINT_FRAMEBASE_DATA 0x10000000
278#define PRINT_FLAG_VDEC_STATUS 0x20000000
279#define PRINT_FLAG_VDEC_DETAIL 0x40000000
280#define PRINT_FLAG_V4L_DETAIL 0x80000000
281#endif
282
283#define BUF_POOL_SIZE 32
284#define MAX_BUF_NUM 24
285#define MAX_REF_PIC_NUM 24
286#define MAX_REF_ACTIVE 16
287
288#ifdef MV_USE_FIXED_BUF
289#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
290#define VF_BUFFER_IDX(n) (n)
291#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
292#else
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
297#endif
298
299#define HEVC_MV_INFO 0x310d
300#define HEVC_QP_INFO 0x3137
301#define HEVC_SKIP_INFO 0x3136
302
303const u32 h265_version = 201602101;
304static u32 debug_mask = 0xffffffff;
305static u32 log_mask;
306static u32 debug;
307static u32 radr;
308static u32 rval;
309static u32 dbg_cmd;
310static u32 dump_nal;
311static u32 dbg_skip_decode_index;
312static u32 endian = 0xff0;
313#ifdef ERROR_HANDLE_DEBUG
314static u32 dbg_nal_skip_flag;
315 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
316static u32 dbg_nal_skip_count;
317#endif
318/*for debug*/
319/*
320 udebug_flag:
321 bit 0, enable ucode print
322 bit 1, enable ucode detail print
323 bit [31:16] not 0, pos to dump lmem
324 bit 2, pop bits to lmem
325 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
326*/
327static u32 udebug_flag;
328/*
329 when udebug_flag[1:0] is not 0
330 udebug_pause_pos not 0,
331 pause position
332*/
333static u32 udebug_pause_pos;
334/*
335 when udebug_flag[1:0] is not 0
336 and udebug_pause_pos is not 0,
337 pause only when DEBUG_REG2 is equal to this val
338*/
339static u32 udebug_pause_val;
340
341static u32 udebug_pause_decode_idx;
342
343static u32 decode_pic_begin;
344static uint slice_parse_begin;
345static u32 step;
346static bool is_reset;
347
348#ifdef CONSTRAIN_MAX_BUF_NUM
349static u32 run_ready_max_vf_only_num;
350static u32 run_ready_display_q_num;
351 /*0: not check
352 0xff: work_pic_num
353 */
354static u32 run_ready_max_buf_num = 0xff;
355#endif
356
357static u32 dynamic_buf_num_margin = 7;
358static u32 buf_alloc_width;
359static u32 buf_alloc_height;
360
361static u32 max_buf_num = 16;
362static u32 buf_alloc_size;
363/*static u32 re_config_pic_flag;*/
364/*
365 *bit[0]: 0,
366 *bit[1]: 0, always release cma buffer when stop
367 *bit[1]: 1, never release cma buffer when stop
368 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
369 *do not release cma buffer is blackout is not 1
370 *
371 *bit[2]: 0, when start decoding, check current displayed buffer
372 * (only for buffer decoded by h265) if blackout is 0
373 * 1, do not check current displayed buffer
374 *
375 *bit[3]: 1, if blackout is not 1, do not release current
376 * displayed cma buffer always.
377 */
378/* set to 1 for fast play;
379 * set to 8 for other case of "keep last frame"
380 */
381static u32 buffer_mode = 1;
382
383/* buffer_mode_dbg: debug only*/
384static u32 buffer_mode_dbg = 0xffff0000;
385/**/
386/*
387 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
388 *1, start decoding after first I;
389 *2, only decode and display none error picture;
390 *3, start decoding and display after IDR,etc
391 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
392 *only for mode 0 and 1.
393 */
394static u32 nal_skip_policy = 2;
395
396/*
397 *bit 0, 1: only display I picture;
398 *bit 1, 1: only decode I picture;
399 */
400static u32 i_only_flag;
401
402/*
403bit 0, fast output first I picture
404*/
405static u32 fast_output_enable = 1;
406
407static u32 frmbase_cont_bitlevel = 0x60;
408
409/*
410use_cma: 1, use both reserver memory and cma for buffers
4112, only use cma for buffers
412*/
413static u32 use_cma = 2;
414
415#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
416static u32 prefix_aux_buf_size = (16 * 1024);
417static u32 suffix_aux_buf_size;
418
419static u32 max_decoding_time;
420/*
421 *error handling
422 */
423/*error_handle_policy:
424 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
425 *1, skip error_skip_nal_count nals before error recovery;
426 *bit 1 (valid only when bit0 == 1):
427 *1, wait vps/sps/pps after error recovery;
428 *bit 2 (valid only when bit0 == 0):
429 *0, auto search after error recovery (hevc_recover() called);
430 *1, manual search after error recovery
431 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
432 *
433 *bit 4: 0, set error_mark after reset/recover
434 * 1, do not set error_mark after reset/recover
435 *bit 5: 0, check total lcu for every picture
436 * 1, do not check total lcu
437 *bit 6: 0, do not check head error
438 * 1, check head error
439 *
440 */
441
442static u32 error_handle_policy;
443static u32 error_skip_nal_count = 6;
444static u32 error_handle_threshold = 30;
445static u32 error_handle_nal_skip_threshold = 10;
446static u32 error_handle_system_threshold = 30;
447static u32 interlace_enable = 1;
448static u32 fr_hint_status;
449
450 /*
451 *parser_sei_enable:
452 * bit 0, sei;
453 * bit 1, sei_suffix (fill aux buf)
454 * bit 2, fill sei to aux buf (when bit 0 is 1)
455 * bit 8, debug flag
456 */
457static u32 parser_sei_enable;
458#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
459static u32 parser_dolby_vision_enable = 1;
460static u32 dolby_meta_with_el;
461static u32 dolby_el_flush_th = 2;
462#endif
463/* this is only for h265 mmu enable */
464
465static u32 mmu_enable = 1;
466static u32 mmu_enable_force;
467static u32 work_buf_size;
468static unsigned int force_disp_pic_index;
469static unsigned int disp_vframe_valve_level;
470static int pre_decode_buf_level = 0x1000;
471static unsigned int pic_list_debug;
472
473
474#ifdef MULTI_INSTANCE_SUPPORT
475static unsigned int max_decode_instance_num
476 = MAX_DECODE_INSTANCE_NUM;
477static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
478static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
481static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
483static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
484static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
485{1, 1, 1, 1, 1, 1, 1, 1, 1};
486
487#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
488static unsigned char get_idx(struct hevc_state_s *hevc);
489#endif
490
491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
492static u32 dv_toggle_prov_name;
493
494static u32 dv_debug;
495
496static u32 force_bypass_dvenl;
497#endif
498#endif
499
500
501#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
502#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
503#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
504#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
505#else
506#define get_dbg_flag(hevc) debug
507#define get_dbg_flag2(hevc) debug
508#define is_log_enable(hevc) (log_mask ? 1 : 0)
509#define get_valid_double_write_mode(hevc) double_write_mode
510#define get_buf_alloc_width(hevc) buf_alloc_width
511#define get_buf_alloc_height(hevc) buf_alloc_height
512#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
513#endif
514#define get_buffer_mode(hevc) buffer_mode
515
516
517DEFINE_SPINLOCK(lock);
518struct task_struct *h265_task = NULL;
519#undef DEBUG_REG
520#ifdef DEBUG_REG
521void WRITE_VREG_DBG(unsigned adr, unsigned val)
522{
523 if (debug & H265_DEBUG_REG)
524 pr_info("%s(%x, %x)\n", __func__, adr, val);
525 WRITE_VREG(adr, val);
526}
527
528#undef WRITE_VREG
529#define WRITE_VREG WRITE_VREG_DBG
530#endif
531
532static DEFINE_MUTEX(vh265_mutex);
533
534static DEFINE_MUTEX(vh265_log_mutex);
535
536static struct vdec_info *gvs;
537
538static u32 without_display_mode;
539
540/**************************************************
541 *
542 *h265 buffer management include
543 *
544 ***************************************************
545 */
546enum NalUnitType {
547 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
548 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
549
550 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
551 /* Current name in the spec: TSA_R */
552 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
553
554 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
555 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
556
557 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
558 /* Current name in the spec: RADL_R */
559 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
560
561 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
562 /* Current name in the spec: RASL_R */
563 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
564
565 NAL_UNIT_RESERVED_10,
566 NAL_UNIT_RESERVED_11,
567 NAL_UNIT_RESERVED_12,
568 NAL_UNIT_RESERVED_13,
569 NAL_UNIT_RESERVED_14,
570 NAL_UNIT_RESERVED_15,
571
572 /* Current name in the spec: BLA_W_LP */
573 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
574 /* Current name in the spec: BLA_W_DLP */
575 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
576 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
577 /* Current name in the spec: IDR_W_DLP */
578 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
579 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
580 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
581 NAL_UNIT_RESERVED_22,
582 NAL_UNIT_RESERVED_23,
583
584 NAL_UNIT_RESERVED_24,
585 NAL_UNIT_RESERVED_25,
586 NAL_UNIT_RESERVED_26,
587 NAL_UNIT_RESERVED_27,
588 NAL_UNIT_RESERVED_28,
589 NAL_UNIT_RESERVED_29,
590 NAL_UNIT_RESERVED_30,
591 NAL_UNIT_RESERVED_31,
592
593 NAL_UNIT_VPS, /* 32 */
594 NAL_UNIT_SPS, /* 33 */
595 NAL_UNIT_PPS, /* 34 */
596 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
597 NAL_UNIT_EOS, /* 36 */
598 NAL_UNIT_EOB, /* 37 */
599 NAL_UNIT_FILLER_DATA, /* 38 */
600 NAL_UNIT_SEI, /* 39 Prefix SEI */
601 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
602 NAL_UNIT_RESERVED_41,
603 NAL_UNIT_RESERVED_42,
604 NAL_UNIT_RESERVED_43,
605 NAL_UNIT_RESERVED_44,
606 NAL_UNIT_RESERVED_45,
607 NAL_UNIT_RESERVED_46,
608 NAL_UNIT_RESERVED_47,
609 NAL_UNIT_UNSPECIFIED_48,
610 NAL_UNIT_UNSPECIFIED_49,
611 NAL_UNIT_UNSPECIFIED_50,
612 NAL_UNIT_UNSPECIFIED_51,
613 NAL_UNIT_UNSPECIFIED_52,
614 NAL_UNIT_UNSPECIFIED_53,
615 NAL_UNIT_UNSPECIFIED_54,
616 NAL_UNIT_UNSPECIFIED_55,
617 NAL_UNIT_UNSPECIFIED_56,
618 NAL_UNIT_UNSPECIFIED_57,
619 NAL_UNIT_UNSPECIFIED_58,
620 NAL_UNIT_UNSPECIFIED_59,
621 NAL_UNIT_UNSPECIFIED_60,
622 NAL_UNIT_UNSPECIFIED_61,
623 NAL_UNIT_UNSPECIFIED_62,
624 NAL_UNIT_UNSPECIFIED_63,
625 NAL_UNIT_INVALID,
626};
627
628/* --------------------------------------------------- */
629/* Amrisc Software Interrupt */
630/* --------------------------------------------------- */
631#define AMRISC_STREAM_EMPTY_REQ 0x01
632#define AMRISC_PARSER_REQ 0x02
633#define AMRISC_MAIN_REQ 0x04
634
635/* --------------------------------------------------- */
636/* HEVC_DEC_STATUS define */
637/* --------------------------------------------------- */
638#define HEVC_DEC_IDLE 0x0
639#define HEVC_NAL_UNIT_VPS 0x1
640#define HEVC_NAL_UNIT_SPS 0x2
641#define HEVC_NAL_UNIT_PPS 0x3
642#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
643#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
644#define HEVC_SLICE_DECODING 0x6
645#define HEVC_NAL_UNIT_SEI 0x7
646#define HEVC_SLICE_SEGMENT_DONE 0x8
647#define HEVC_NAL_SEARCH_DONE 0x9
648#define HEVC_DECPIC_DATA_DONE 0xa
649#define HEVC_DECPIC_DATA_ERROR 0xb
650#define HEVC_SEI_DAT 0xc
651#define HEVC_SEI_DAT_DONE 0xd
652#define HEVC_NAL_DECODE_DONE 0xe
653#define HEVC_OVER_DECODE 0xf
654
655#define HEVC_DATA_REQUEST 0x12
656
657#define HEVC_DECODE_BUFEMPTY 0x20
658#define HEVC_DECODE_TIMEOUT 0x21
659#define HEVC_SEARCH_BUFEMPTY 0x22
660#define HEVC_DECODE_OVER_SIZE 0x23
661#define HEVC_DECODE_BUFEMPTY2 0x24
662#define HEVC_FIND_NEXT_PIC_NAL 0x50
663#define HEVC_FIND_NEXT_DVEL_NAL 0x51
664
665#define HEVC_DUMP_LMEM 0x30
666
667#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
668#define HEVC_DISCARD_NAL 0xf0
669#define HEVC_ACTION_DEC_CONT 0xfd
670#define HEVC_ACTION_ERROR 0xfe
671#define HEVC_ACTION_DONE 0xff
672
673/* --------------------------------------------------- */
674/* Include "parser_cmd.h" */
675/* --------------------------------------------------- */
676#define PARSER_CMD_SKIP_CFG_0 0x0000090b
677
678#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
679
680#define PARSER_CMD_SKIP_CFG_2 0x001b1910
681
682#define PARSER_CMD_NUMBER 37
683
684/**************************************************
685 *
686 *h265 buffer management
687 *
688 ***************************************************
689 */
690/* #define BUFFER_MGR_ONLY */
691/* #define CONFIG_HEVC_CLK_FORCED_ON */
692/* #define ENABLE_SWAP_TEST */
693#define MCRCC_ENABLE
694#define INVALID_POC 0x80000000
695
696#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
697#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
698#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
699#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
700#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
701#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
702#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
703#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
704#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
705#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
706#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
707#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
708#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
709#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
710#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
711#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
712#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
713#ifdef ENABLE_SWAP_TEST
714#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
715#endif
716
717/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
718/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
719#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
720 /*do not define ENABLE_SWAP_TEST*/
721#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
722#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
723
724#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
725#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
726/*
727 *ucode parser/search control
728 *bit 0: 0, header auto parse; 1, header manual parse
729 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
730 *bit [3:2]: valid when bit1==0;
731 *0, auto skip nal before first vps/sps/pps/idr;
732 *1, auto skip nal before first vps/sps/pps
733 *2, auto skip nal before first vps/sps/pps,
734 * and not decode until the first I slice (with slice address of 0)
735 *
736 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
737 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
738 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
739 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
740 *bit [17]: for NAL_SEI when bit0 is 0:
741 * 0, do not parse/fetch SEI in ucode;
742 * 1, parse/fetch SEI in ucode
743 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
744 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
745 * 1, fetch NAL_SEL_SUFFIX data to aux buf
746 *bit [19]:
747 * 0, parse NAL_SEI in ucode
748 * 1, fetch NAL_SEI to aux buf
749 *bit [20]: for DOLBY_VISION_META
750 * 0, do not fetch DOLBY_VISION_META to aux buf
751 * 1, fetch DOLBY_VISION_META to aux buf
752 */
753#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
754 /*read only*/
755#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
756 /*
757 [15 : 8] rps_set_id
758 [7 : 0] start_decoding_flag
759 */
760#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
761 /*set before start decoder*/
762#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
763#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
764#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
765
766#define DECODE_MODE_SINGLE 0x0
767#define DECODE_MODE_MULTI_FRAMEBASE 0x1
768#define DECODE_MODE_MULTI_STREAMBASE 0x2
769#define DECODE_MODE_MULTI_DVBAL 0x3
770#define DECODE_MODE_MULTI_DVENL 0x4
771
772#define MAX_INT 0x7FFFFFFF
773
774#define RPM_BEGIN 0x100
775#define modification_list_cur 0x148
776#define RPM_END 0x180
777
778#define RPS_USED_BIT 14
779/* MISC_FLAG0 */
780#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
781#define PCM_ENABLE_FLAG_BIT 1
782#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
783#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
784#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
785#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
786#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
787#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
788#define SLICE_SAO_LUMA_FLAG_BIT 8
789#define SLICE_SAO_CHROMA_FLAG_BIT 9
790#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
791
792union param_u {
793 struct {
794 unsigned short data[RPM_END - RPM_BEGIN];
795 } l;
796 struct {
797 /* from ucode lmem, do not change this struct */
798 unsigned short CUR_RPS[0x10];
799 unsigned short num_ref_idx_l0_active;
800 unsigned short num_ref_idx_l1_active;
801 unsigned short slice_type;
802 unsigned short slice_temporal_mvp_enable_flag;
803 unsigned short dependent_slice_segment_flag;
804 unsigned short slice_segment_address;
805 unsigned short num_title_rows_minus1;
806 unsigned short pic_width_in_luma_samples;
807 unsigned short pic_height_in_luma_samples;
808 unsigned short log2_min_coding_block_size_minus3;
809 unsigned short log2_diff_max_min_coding_block_size;
810 unsigned short log2_max_pic_order_cnt_lsb_minus4;
811 unsigned short POClsb;
812 unsigned short collocated_from_l0_flag;
813 unsigned short collocated_ref_idx;
814 unsigned short log2_parallel_merge_level;
815 unsigned short five_minus_max_num_merge_cand;
816 unsigned short sps_num_reorder_pics_0;
817 unsigned short modification_flag;
818 unsigned short tiles_enabled_flag;
819 unsigned short num_tile_columns_minus1;
820 unsigned short num_tile_rows_minus1;
821 unsigned short tile_width[8];
822 unsigned short tile_height[8];
823 unsigned short misc_flag0;
824 unsigned short pps_beta_offset_div2;
825 unsigned short pps_tc_offset_div2;
826 unsigned short slice_beta_offset_div2;
827 unsigned short slice_tc_offset_div2;
828 unsigned short pps_cb_qp_offset;
829 unsigned short pps_cr_qp_offset;
830 unsigned short first_slice_segment_in_pic_flag;
831 unsigned short m_temporalId;
832 unsigned short m_nalUnitType;
833
834 unsigned short vui_num_units_in_tick_hi;
835 unsigned short vui_num_units_in_tick_lo;
836 unsigned short vui_time_scale_hi;
837 unsigned short vui_time_scale_lo;
838 unsigned short bit_depth;
839 unsigned short profile_etc;
840 unsigned short sei_frame_field_info;
841 unsigned short video_signal_type;
842 unsigned short modification_list[0x20];
843 unsigned short conformance_window_flag;
844 unsigned short conf_win_left_offset;
845 unsigned short conf_win_right_offset;
846 unsigned short conf_win_top_offset;
847 unsigned short conf_win_bottom_offset;
848 unsigned short chroma_format_idc;
849 unsigned short color_description;
850 unsigned short aspect_ratio_idc;
851 unsigned short sar_width;
852 unsigned short sar_height;
853 unsigned short sps_max_dec_pic_buffering_minus1_0;
854 } p;
855};
856
857#define RPM_BUF_SIZE (0x80*2)
858/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
859#define LMEM_BUF_SIZE (0x500 * 2)
860
861struct buff_s {
862 u32 buf_start;
863 u32 buf_size;
864 u32 buf_end;
865};
866
867struct BuffInfo_s {
868 u32 max_width;
869 u32 max_height;
870 unsigned int start_adr;
871 unsigned int end_adr;
872 struct buff_s ipp;
873 struct buff_s sao_abv;
874 struct buff_s sao_vb;
875 struct buff_s short_term_rps;
876 struct buff_s vps;
877 struct buff_s sps;
878 struct buff_s pps;
879 struct buff_s sao_up;
880 struct buff_s swap_buf;
881 struct buff_s swap_buf2;
882 struct buff_s scalelut;
883 struct buff_s dblk_para;
884 struct buff_s dblk_data;
885 struct buff_s dblk_data2;
886 struct buff_s mmu_vbh;
887 struct buff_s cm_header;
888 struct buff_s mpred_above;
889#ifdef MV_USE_FIXED_BUF
890 struct buff_s mpred_mv;
891#endif
892 struct buff_s rpm;
893 struct buff_s lmem;
894};
895#define WORK_BUF_SPEC_NUM 3
896static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
897 {
898 /* 8M bytes */
899 .max_width = 1920,
900 .max_height = 1088,
901 .ipp = {
902 /* IPP work space calculation :
903 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
904 */
905 .buf_size = 0x4000,
906 },
907 .sao_abv = {
908 .buf_size = 0x30000,
909 },
910 .sao_vb = {
911 .buf_size = 0x30000,
912 },
913 .short_term_rps = {
914 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
915 * total 64x16x2 = 2048 bytes (0x800)
916 */
917 .buf_size = 0x800,
918 },
919 .vps = {
920 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
921 * total 0x0800 bytes
922 */
923 .buf_size = 0x800,
924 },
925 .sps = {
926 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
927 * total 0x0800 bytes
928 */
929 .buf_size = 0x800,
930 },
931 .pps = {
932 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
933 * total 0x2000 bytes
934 */
935 .buf_size = 0x2000,
936 },
937 .sao_up = {
938 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
939 * each has 16 bytes total 0x2800 bytes
940 */
941 .buf_size = 0x2800,
942 },
943 .swap_buf = {
944 /* 256cyclex64bit = 2K bytes 0x800
945 * (only 144 cycles valid)
946 */
947 .buf_size = 0x800,
948 },
949 .swap_buf2 = {
950 .buf_size = 0x800,
951 },
952 .scalelut = {
953 /* support up to 32 SCALELUT 1024x32 =
954 * 32Kbytes (0x8000)
955 */
956 .buf_size = 0x8000,
957 },
958 .dblk_para = {
959#ifdef SUPPORT_10BIT
960 .buf_size = 0x40000,
961#else
962 /* DBLK -> Max 256(4096/16) LCU, each para
963 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
964 */
965 .buf_size = 0x20000,
966#endif
967 },
968 .dblk_data = {
969 .buf_size = 0x40000,
970 },
971 .dblk_data2 = {
972 .buf_size = 0x40000,
973 }, /*dblk data for adapter*/
974 .mmu_vbh = {
975 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
976 },
977#if 0
978 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
979 .buf_size = MMU_COMPRESS_HEADER_SIZE *
980 (MAX_REF_PIC_NUM + 1),
981 },
982#endif
983 .mpred_above = {
984 .buf_size = 0x8000,
985 },
986#ifdef MV_USE_FIXED_BUF
987 .mpred_mv = {/* 1080p, 0x40000 per buffer */
988 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
989 },
990#endif
991 .rpm = {
992 .buf_size = RPM_BUF_SIZE,
993 },
994 .lmem = {
995 .buf_size = 0x500 * 2,
996 }
997 },
998 {
999 .max_width = 4096,
1000 .max_height = 2048,
1001 .ipp = {
1002 /* IPP work space calculation :
1003 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1004 */
1005 .buf_size = 0x4000,
1006 },
1007 .sao_abv = {
1008 .buf_size = 0x30000,
1009 },
1010 .sao_vb = {
1011 .buf_size = 0x30000,
1012 },
1013 .short_term_rps = {
1014 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1015 * total 64x16x2 = 2048 bytes (0x800)
1016 */
1017 .buf_size = 0x800,
1018 },
1019 .vps = {
1020 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1021 * total 0x0800 bytes
1022 */
1023 .buf_size = 0x800,
1024 },
1025 .sps = {
1026 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1027 * total 0x0800 bytes
1028 */
1029 .buf_size = 0x800,
1030 },
1031 .pps = {
1032 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1033 * total 0x2000 bytes
1034 */
1035 .buf_size = 0x2000,
1036 },
1037 .sao_up = {
1038 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1039 * each has 16 bytes total 0x2800 bytes
1040 */
1041 .buf_size = 0x2800,
1042 },
1043 .swap_buf = {
1044 /* 256cyclex64bit = 2K bytes 0x800
1045 * (only 144 cycles valid)
1046 */
1047 .buf_size = 0x800,
1048 },
1049 .swap_buf2 = {
1050 .buf_size = 0x800,
1051 },
1052 .scalelut = {
1053 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1054 * (0x8000)
1055 */
1056 .buf_size = 0x8000,
1057 },
1058 .dblk_para = {
1059 /* DBLK -> Max 256(4096/16) LCU, each para
1060 * 512bytes(total:0x20000),
1061 * data 1024bytes(total:0x40000)
1062 */
1063 .buf_size = 0x20000,
1064 },
1065 .dblk_data = {
1066 .buf_size = 0x80000,
1067 },
1068 .dblk_data2 = {
1069 .buf_size = 0x80000,
1070 }, /*dblk data for adapter*/
1071 .mmu_vbh = {
1072 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1073 },
1074#if 0
1075 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1076 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1077 (MAX_REF_PIC_NUM + 1),
1078 },
1079#endif
1080 .mpred_above = {
1081 .buf_size = 0x8000,
1082 },
1083#ifdef MV_USE_FIXED_BUF
1084 .mpred_mv = {
1085 /* .buf_size = 0x100000*16,
1086 //4k2k , 0x100000 per buffer */
1087 /* 4096x2304 , 0x120000 per buffer */
1088 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1089 },
1090#endif
1091 .rpm = {
1092 .buf_size = RPM_BUF_SIZE,
1093 },
1094 .lmem = {
1095 .buf_size = 0x500 * 2,
1096 }
1097 },
1098
1099 {
1100 .max_width = 4096*2,
1101 .max_height = 2048*2,
1102 .ipp = {
1103 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1104 .buf_size = 0x4000*2,
1105 },
1106 .sao_abv = {
1107 .buf_size = 0x30000*2,
1108 },
1109 .sao_vb = {
1110 .buf_size = 0x30000*2,
1111 },
1112 .short_term_rps = {
1113 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1114 .buf_size = 0x800,
1115 },
1116 .vps = {
1117 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1118 .buf_size = 0x800,
1119 },
1120 .sps = {
1121 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .pps = {
1125 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1126 .buf_size = 0x2000,
1127 },
1128 .sao_up = {
1129 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1130 .buf_size = 0x2800*2,
1131 },
1132 .swap_buf = {
1133 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1134 .buf_size = 0x800,
1135 },
1136 .swap_buf2 = {
1137 .buf_size = 0x800,
1138 },
1139 .scalelut = {
1140 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1141 .buf_size = 0x8000*2,
1142 },
1143 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1144 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1145 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1146 .mmu_vbh = {
1147 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1148 },
1149#if 0
1150 .cm_header = {
1151 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1152 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1153 },
1154#endif
1155 .mpred_above = {
1156 .buf_size = 0x8000*2,
1157 },
1158#ifdef MV_USE_FIXED_BUF
1159 .mpred_mv = {
1160 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1161 },
1162#endif
1163 .rpm = {
1164 .buf_size = RPM_BUF_SIZE,
1165 },
1166 .lmem = {
1167 .buf_size = 0x500 * 2,
1168 },
1169 }
1170};
1171
1172static void init_buff_spec(struct hevc_state_s *hevc,
1173 struct BuffInfo_s *buf_spec)
1174{
1175 buf_spec->ipp.buf_start = buf_spec->start_adr;
1176 buf_spec->sao_abv.buf_start =
1177 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1178
1179 buf_spec->sao_vb.buf_start =
1180 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1181 buf_spec->short_term_rps.buf_start =
1182 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1183 buf_spec->vps.buf_start =
1184 buf_spec->short_term_rps.buf_start +
1185 buf_spec->short_term_rps.buf_size;
1186 buf_spec->sps.buf_start =
1187 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1188 buf_spec->pps.buf_start =
1189 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1190 buf_spec->sao_up.buf_start =
1191 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1192 buf_spec->swap_buf.buf_start =
1193 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1194 buf_spec->swap_buf2.buf_start =
1195 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1196 buf_spec->scalelut.buf_start =
1197 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1198 buf_spec->dblk_para.buf_start =
1199 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1200 buf_spec->dblk_data.buf_start =
1201 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1202 buf_spec->dblk_data2.buf_start =
1203 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1204 buf_spec->mmu_vbh.buf_start =
1205 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1206 buf_spec->mpred_above.buf_start =
1207 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1208#ifdef MV_USE_FIXED_BUF
1209 buf_spec->mpred_mv.buf_start =
1210 buf_spec->mpred_above.buf_start +
1211 buf_spec->mpred_above.buf_size;
1212
1213 buf_spec->rpm.buf_start =
1214 buf_spec->mpred_mv.buf_start +
1215 buf_spec->mpred_mv.buf_size;
1216#else
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_above.buf_start +
1219 buf_spec->mpred_above.buf_size;
1220#endif
1221 buf_spec->lmem.buf_start =
1222 buf_spec->rpm.buf_start +
1223 buf_spec->rpm.buf_size;
1224 buf_spec->end_adr =
1225 buf_spec->lmem.buf_start +
1226 buf_spec->lmem.buf_size;
1227
1228 if (hevc && get_dbg_flag2(hevc)) {
1229 hevc_print(hevc, 0,
1230 "%s workspace (%x %x) size = %x\n", __func__,
1231 buf_spec->start_adr, buf_spec->end_adr,
1232 buf_spec->end_adr - buf_spec->start_adr);
1233
1234 hevc_print(hevc, 0,
1235 "ipp.buf_start :%x\n",
1236 buf_spec->ipp.buf_start);
1237 hevc_print(hevc, 0,
1238 "sao_abv.buf_start :%x\n",
1239 buf_spec->sao_abv.buf_start);
1240 hevc_print(hevc, 0,
1241 "sao_vb.buf_start :%x\n",
1242 buf_spec->sao_vb.buf_start);
1243 hevc_print(hevc, 0,
1244 "short_term_rps.buf_start :%x\n",
1245 buf_spec->short_term_rps.buf_start);
1246 hevc_print(hevc, 0,
1247 "vps.buf_start :%x\n",
1248 buf_spec->vps.buf_start);
1249 hevc_print(hevc, 0,
1250 "sps.buf_start :%x\n",
1251 buf_spec->sps.buf_start);
1252 hevc_print(hevc, 0,
1253 "pps.buf_start :%x\n",
1254 buf_spec->pps.buf_start);
1255 hevc_print(hevc, 0,
1256 "sao_up.buf_start :%x\n",
1257 buf_spec->sao_up.buf_start);
1258 hevc_print(hevc, 0,
1259 "swap_buf.buf_start :%x\n",
1260 buf_spec->swap_buf.buf_start);
1261 hevc_print(hevc, 0,
1262 "swap_buf2.buf_start :%x\n",
1263 buf_spec->swap_buf2.buf_start);
1264 hevc_print(hevc, 0,
1265 "scalelut.buf_start :%x\n",
1266 buf_spec->scalelut.buf_start);
1267 hevc_print(hevc, 0,
1268 "dblk_para.buf_start :%x\n",
1269 buf_spec->dblk_para.buf_start);
1270 hevc_print(hevc, 0,
1271 "dblk_data.buf_start :%x\n",
1272 buf_spec->dblk_data.buf_start);
1273 hevc_print(hevc, 0,
1274 "dblk_data2.buf_start :%x\n",
1275 buf_spec->dblk_data2.buf_start);
1276 hevc_print(hevc, 0,
1277 "mpred_above.buf_start :%x\n",
1278 buf_spec->mpred_above.buf_start);
1279#ifdef MV_USE_FIXED_BUF
1280 hevc_print(hevc, 0,
1281 "mpred_mv.buf_start :%x\n",
1282 buf_spec->mpred_mv.buf_start);
1283#endif
1284 if ((get_dbg_flag2(hevc)
1285 &
1286 H265_DEBUG_SEND_PARAM_WITH_REG)
1287 == 0) {
1288 hevc_print(hevc, 0,
1289 "rpm.buf_start :%x\n",
1290 buf_spec->rpm.buf_start);
1291 }
1292 }
1293
1294}
1295
1296enum SliceType {
1297 B_SLICE,
1298 P_SLICE,
1299 I_SLICE
1300};
1301
1302/*USE_BUF_BLOCK*/
1303struct BUF_s {
1304 unsigned long start_adr;
1305 unsigned int size;
1306 int used_flag;
1307 unsigned int y_size;
1308 ulong v4l_ref_buf_addr;
1309} /*BUF_t */;
1310
1311/* level 6, 6.1 maximum slice number is 800; other is 200 */
1312#define MAX_SLICE_NUM 800
1313struct PIC_s {
1314 int index;
1315 int scatter_alloc;
1316 int BUF_index;
1317 int mv_buf_index;
1318 int POC;
1319 int decode_idx;
1320 int slice_type;
1321 int RefNum_L0;
1322 int RefNum_L1;
1323 int num_reorder_pic;
1324 int stream_offset;
1325 unsigned char referenced;
1326 unsigned char output_mark;
1327 unsigned char recon_mark;
1328 unsigned char output_ready;
1329 unsigned char error_mark;
1330 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1331 unsigned char dis_mark;
1332 /**/ int slice_idx;
1333 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1334 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1335 /*buffer */
1336 unsigned int header_adr;
1337#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1338 unsigned char dv_enhance_exist;
1339#endif
1340 char *aux_data_buf;
1341 int aux_data_size;
1342 unsigned long cma_alloc_addr;
1343 struct page *alloc_pages;
1344 unsigned int mpred_mv_wr_start_addr;
1345 unsigned int mc_y_adr;
1346 unsigned int mc_u_v_adr;
1347#ifdef SUPPORT_10BIT
1348 /*unsigned int comp_body_size;*/
1349 unsigned int dw_y_adr;
1350 unsigned int dw_u_v_adr;
1351#endif
1352 int mc_canvas_y;
1353 int mc_canvas_u_v;
1354 int width;
1355 int height;
1356
1357 int y_canvas_index;
1358 int uv_canvas_index;
1359#ifdef MULTI_INSTANCE_SUPPORT
1360 struct canvas_config_s canvas_config[2];
1361#endif
1362#ifdef SUPPORT_10BIT
1363 int mem_saving_mode;
1364 u32 bit_depth_luma;
1365 u32 bit_depth_chroma;
1366#endif
1367#ifdef LOSLESS_COMPRESS_MODE
1368 unsigned int losless_comp_body_size;
1369#endif
1370 unsigned char pic_struct;
1371 int vf_ref;
1372
1373 u32 pts;
1374 u64 pts64;
1375 u64 timestamp;
1376
1377 u32 aspect_ratio_idc;
1378 u32 sar_width;
1379 u32 sar_height;
1380 u32 double_write_mode;
1381 u32 video_signal_type;
1382 unsigned short conformance_window_flag;
1383 unsigned short conf_win_left_offset;
1384 unsigned short conf_win_right_offset;
1385 unsigned short conf_win_top_offset;
1386 unsigned short conf_win_bottom_offset;
1387 unsigned short chroma_format_idc;
1388
1389 /* picture qos infomation*/
1390 int max_qp;
1391 int avg_qp;
1392 int min_qp;
1393 int max_skip;
1394 int avg_skip;
1395 int min_skip;
1396 int max_mv;
1397 int min_mv;
1398 int avg_mv;
1399} /*PIC_t */;
1400
1401#define MAX_TILE_COL_NUM 10
1402#define MAX_TILE_ROW_NUM 20
1403struct tile_s {
1404 int width;
1405 int height;
1406 int start_cu_x;
1407 int start_cu_y;
1408
1409 unsigned int sao_vb_start_addr;
1410 unsigned int sao_abv_start_addr;
1411};
1412
1413#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1414#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1415#define SEI_HDR10PLUS_MASK 0x00000004
1416
1417#define VF_POOL_SIZE 32
1418
1419#ifdef MULTI_INSTANCE_SUPPORT
1420#define DEC_RESULT_NONE 0
1421#define DEC_RESULT_DONE 1
1422#define DEC_RESULT_AGAIN 2
1423#define DEC_RESULT_CONFIG_PARAM 3
1424#define DEC_RESULT_ERROR 4
1425#define DEC_INIT_PICLIST 5
1426#define DEC_UNINIT_PICLIST 6
1427#define DEC_RESULT_GET_DATA 7
1428#define DEC_RESULT_GET_DATA_RETRY 8
1429#define DEC_RESULT_EOS 9
1430#define DEC_RESULT_FORCE_EXIT 10
1431#define DEC_RESULT_FREE_CANVAS 11
1432
1433static void vh265_work(struct work_struct *work);
1434static void vh265_timeout_work(struct work_struct *work);
1435static void vh265_notify_work(struct work_struct *work);
1436
1437#endif
1438
1439struct debug_log_s {
1440 struct list_head list;
1441 uint8_t data; /*will alloc more size*/
1442};
1443
1444struct hevc_state_s {
1445#ifdef MULTI_INSTANCE_SUPPORT
1446 struct platform_device *platform_dev;
1447 void (*vdec_cb)(struct vdec_s *, void *);
1448 void *vdec_cb_arg;
1449 struct vframe_chunk_s *chunk;
1450 int dec_result;
1451 struct work_struct work;
1452 struct work_struct timeout_work;
1453 struct work_struct notify_work;
1454 struct work_struct set_clk_work;
1455 /* timeout handle */
1456 unsigned long int start_process_time;
1457 unsigned int last_lcu_idx;
1458 unsigned int decode_timeout_count;
1459 unsigned int timeout_num;
1460#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1461 unsigned char switch_dvlayer_flag;
1462 unsigned char no_switch_dvlayer_count;
1463 unsigned char bypass_dvenl_enable;
1464 unsigned char bypass_dvenl;
1465#endif
1466 unsigned char start_parser_type;
1467 /*start_decoding_flag:
1468 vps/pps/sps/idr info from ucode*/
1469 unsigned char start_decoding_flag;
1470 unsigned char rps_set_id;
1471 unsigned char eos;
1472 int pic_decoded_lcu_idx;
1473 u8 over_decode;
1474 u8 empty_flag;
1475#endif
1476 struct vframe_s vframe_dummy;
1477 char *provider_name;
1478 int index;
1479 struct device *cma_dev;
1480 unsigned char m_ins_flag;
1481 unsigned char dolby_enhance_flag;
1482 unsigned long buf_start;
1483 u32 buf_size;
1484 u32 mv_buf_size;
1485
1486 struct BuffInfo_s work_space_buf_store;
1487 struct BuffInfo_s *work_space_buf;
1488
1489 u8 aux_data_dirty;
1490 u32 prefix_aux_size;
1491 u32 suffix_aux_size;
1492 void *aux_addr;
1493 void *rpm_addr;
1494 void *lmem_addr;
1495 dma_addr_t aux_phy_addr;
1496 dma_addr_t rpm_phy_addr;
1497 dma_addr_t lmem_phy_addr;
1498
1499 unsigned int pic_list_init_flag;
1500 unsigned int use_cma_flag;
1501
1502 unsigned short *rpm_ptr;
1503 unsigned short *lmem_ptr;
1504 unsigned short *debug_ptr;
1505 int debug_ptr_size;
1506 int pic_w;
1507 int pic_h;
1508 int lcu_x_num;
1509 int lcu_y_num;
1510 int lcu_total;
1511 int lcu_size;
1512 int lcu_size_log2;
1513 int lcu_x_num_pre;
1514 int lcu_y_num_pre;
1515 int first_pic_after_recover;
1516
1517 int num_tile_col;
1518 int num_tile_row;
1519 int tile_enabled;
1520 int tile_x;
1521 int tile_y;
1522 int tile_y_x;
1523 int tile_start_lcu_x;
1524 int tile_start_lcu_y;
1525 int tile_width_lcu;
1526 int tile_height_lcu;
1527
1528 int slice_type;
1529 unsigned int slice_addr;
1530 unsigned int slice_segment_addr;
1531
1532 unsigned char interlace_flag;
1533 unsigned char curr_pic_struct;
1534 unsigned char frame_field_info_present_flag;
1535
1536 unsigned short sps_num_reorder_pics_0;
1537 unsigned short misc_flag0;
1538 int m_temporalId;
1539 int m_nalUnitType;
1540 int TMVPFlag;
1541 int isNextSliceSegment;
1542 int LDCFlag;
1543 int m_pocRandomAccess;
1544 int plevel;
1545 int MaxNumMergeCand;
1546
1547 int new_pic;
1548 int new_tile;
1549 int curr_POC;
1550 int iPrevPOC;
1551#ifdef MULTI_INSTANCE_SUPPORT
1552 int decoded_poc;
1553 struct PIC_s *decoding_pic;
1554#endif
1555 int iPrevTid0POC;
1556 int list_no;
1557 int RefNum_L0;
1558 int RefNum_L1;
1559 int ColFromL0Flag;
1560 int LongTerm_Curr;
1561 int LongTerm_Col;
1562 int Col_POC;
1563 int LongTerm_Ref;
1564#ifdef MULTI_INSTANCE_SUPPORT
1565 int m_pocRandomAccess_bak;
1566 int curr_POC_bak;
1567 int iPrevPOC_bak;
1568 int iPrevTid0POC_bak;
1569 unsigned char start_parser_type_bak;
1570 unsigned char start_decoding_flag_bak;
1571 unsigned char rps_set_id_bak;
1572 int pic_decoded_lcu_idx_bak;
1573 int decode_idx_bak;
1574#endif
1575 struct PIC_s *cur_pic;
1576 struct PIC_s *col_pic;
1577 int skip_flag;
1578 int decode_idx;
1579 int slice_idx;
1580 unsigned char have_vps;
1581 unsigned char have_sps;
1582 unsigned char have_pps;
1583 unsigned char have_valid_start_slice;
1584 unsigned char wait_buf;
1585 unsigned char error_flag;
1586 unsigned int error_skip_nal_count;
1587 long used_4k_num;
1588
1589 unsigned char
1590 ignore_bufmgr_error; /* bit 0, for decoding;
1591 bit 1, for displaying
1592 bit 1 must be set if bit 0 is 1*/
1593 int PB_skip_mode;
1594 int PB_skip_count_after_decoding;
1595#ifdef SUPPORT_10BIT
1596 int mem_saving_mode;
1597#endif
1598#ifdef LOSLESS_COMPRESS_MODE
1599 unsigned int losless_comp_body_size;
1600#endif
1601 int pts_mode;
1602 int last_lookup_pts;
1603 int last_pts;
1604 u64 last_lookup_pts_us64;
1605 u64 last_pts_us64;
1606 u32 shift_byte_count_lo;
1607 u32 shift_byte_count_hi;
1608 int pts_mode_switching_count;
1609 int pts_mode_recovery_count;
1610
1611 int pic_num;
1612
1613 /**/
1614 union param_u param;
1615
1616 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1617
1618 struct timer_list timer;
1619 struct BUF_s m_BUF[BUF_POOL_SIZE];
1620 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1621 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1622
1623 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1624 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1625 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1626 struct vframe_s vfpool[VF_POOL_SIZE];
1627
1628 u32 stat;
1629 u32 frame_width;
1630 u32 frame_height;
1631 u32 frame_dur;
1632 u32 frame_ar;
1633 u32 bit_depth_luma;
1634 u32 bit_depth_chroma;
1635 u32 video_signal_type;
1636 u32 video_signal_type_debug;
1637 u32 saved_resolution;
1638 bool get_frame_dur;
1639 u32 error_watchdog_count;
1640 u32 error_skip_nal_wt_cnt;
1641 u32 error_system_watchdog_count;
1642
1643#ifdef DEBUG_PTS
1644 unsigned long pts_missed;
1645 unsigned long pts_hit;
1646#endif
1647 struct dec_sysinfo vh265_amstream_dec_info;
1648 unsigned char init_flag;
1649 unsigned char first_sc_checked;
1650 unsigned char uninit_list;
1651 u32 start_decoding_time;
1652
1653 int show_frame_num;
1654#ifdef USE_UNINIT_SEMA
1655 struct semaphore h265_uninit_done_sema;
1656#endif
1657 int fatal_error;
1658
1659
1660 u32 sei_present_flag;
1661 void *frame_mmu_map_addr;
1662 dma_addr_t frame_mmu_map_phy_addr;
1663 unsigned int mmu_mc_buf_start;
1664 unsigned int mmu_mc_buf_end;
1665 unsigned int mmu_mc_start_4k_adr;
1666 void *mmu_box;
1667 void *bmmu_box;
1668 int mmu_enable;
1669
1670 unsigned int dec_status;
1671
1672 /* data for SEI_MASTER_DISPLAY_COLOR */
1673 unsigned int primaries[3][2];
1674 unsigned int white_point[2];
1675 unsigned int luminance[2];
1676 /* data for SEI_CONTENT_LIGHT_LEVEL */
1677 unsigned int content_light_level[2];
1678
1679 struct PIC_s *pre_top_pic;
1680 struct PIC_s *pre_bot_pic;
1681
1682#ifdef MULTI_INSTANCE_SUPPORT
1683 int double_write_mode;
1684 int dynamic_buf_num_margin;
1685 int start_action;
1686 int save_buffer_mode;
1687#endif
1688 u32 i_only;
1689 struct list_head log_list;
1690 u32 ucode_pause_pos;
1691 u32 start_shift_bytes;
1692
1693 u32 vf_pre_count;
1694 u32 vf_get_count;
1695 u32 vf_put_count;
1696#ifdef SWAP_HEVC_UCODE
1697 dma_addr_t mc_dma_handle;
1698 void *mc_cpu_addr;
1699 int swap_size;
1700 ulong swap_addr;
1701#endif
1702#ifdef DETREFILL_ENABLE
1703 dma_addr_t detbuf_adr;
1704 u16 *detbuf_adr_virt;
1705 u8 delrefill_check;
1706#endif
1707 u8 head_error_flag;
1708 int valve_count;
1709 struct firmware_s *fw;
1710 int max_pic_w;
1711 int max_pic_h;
1712#ifdef AGAIN_HAS_THRESHOLD
1713 u8 next_again_flag;
1714 u32 pre_parser_wr_ptr;
1715#endif
1716 u32 ratio_control;
1717 u32 first_pic_flag;
1718 u32 decode_size;
1719 struct mutex chunks_mutex;
1720 int need_cache_size;
1721 u64 sc_start_time;
1722 u32 skip_first_nal;
1723 bool is_swap;
1724 bool is_4k;
1725 int frameinfo_enable;
1726 struct vframe_qos_s vframe_qos;
1727 bool is_used_v4l;
1728 void *v4l2_ctx;
1729 bool v4l_params_parsed;
1730} /*hevc_stru_t */;
1731
1732#ifdef AGAIN_HAS_THRESHOLD
1733u32 again_threshold;
1734#endif
1735#ifdef SEND_LMEM_WITH_RPM
1736#define get_lmem_params(hevc, ladr) \
1737 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1738
1739
1740static int get_frame_mmu_map_size(void)
1741{
1742 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1743 return (MAX_FRAME_8K_NUM * 4);
1744
1745 return (MAX_FRAME_4K_NUM * 4);
1746}
1747
1748static int is_oversize(int w, int h)
1749{
1750 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1751 MAX_SIZE_8K : MAX_SIZE_4K;
1752
1753 if (w < 0 || h < 0)
1754 return true;
1755
1756 if (h != 0 && (w > max / h))
1757 return true;
1758
1759 return false;
1760}
1761
1762void check_head_error(struct hevc_state_s *hevc)
1763{
1764#define pcm_enabled_flag 0x040
1765#define pcm_sample_bit_depth_luma 0x041
1766#define pcm_sample_bit_depth_chroma 0x042
1767 hevc->head_error_flag = 0;
1768 if ((error_handle_policy & 0x40) == 0)
1769 return;
1770 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1771 uint16_t pcm_depth_luma = get_lmem_params(
1772 hevc, pcm_sample_bit_depth_luma);
1773 uint16_t pcm_sample_chroma = get_lmem_params(
1774 hevc, pcm_sample_bit_depth_chroma);
1775 if (pcm_depth_luma >
1776 hevc->bit_depth_luma ||
1777 pcm_sample_chroma >
1778 hevc->bit_depth_chroma) {
1779 hevc_print(hevc, 0,
1780 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1781 pcm_depth_luma,
1782 pcm_sample_chroma,
1783 hevc->bit_depth_luma,
1784 hevc->bit_depth_chroma);
1785 hevc->head_error_flag = 1;
1786 }
1787 }
1788}
1789#endif
1790
1791#ifdef SUPPORT_10BIT
1792/* Losless compression body buffer size 4K per 64x32 (jt) */
1793static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1794 int width, int height, int mem_saving_mode)
1795{
1796 int width_x64;
1797 int height_x32;
1798 int bsize;
1799
1800 width_x64 = width + 63;
1801 width_x64 >>= 6;
1802
1803 height_x32 = height + 31;
1804 height_x32 >>= 5;
1805 if (mem_saving_mode == 1 && hevc->mmu_enable)
1806 bsize = 3200 * width_x64 * height_x32;
1807 else if (mem_saving_mode == 1)
1808 bsize = 3072 * width_x64 * height_x32;
1809 else
1810 bsize = 4096 * width_x64 * height_x32;
1811
1812 return bsize;
1813}
1814
1815/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1816static int compute_losless_comp_header_size(int width, int height)
1817{
1818 int width_x128;
1819 int height_x64;
1820 int hsize;
1821
1822 width_x128 = width + 127;
1823 width_x128 >>= 7;
1824
1825 height_x64 = height + 63;
1826 height_x64 >>= 6;
1827
1828 hsize = 32*width_x128*height_x64;
1829
1830 return hsize;
1831}
1832#endif
1833
1834static int add_log(struct hevc_state_s *hevc,
1835 const char *fmt, ...)
1836{
1837#define HEVC_LOG_BUF 196
1838 struct debug_log_s *log_item;
1839 unsigned char buf[HEVC_LOG_BUF];
1840 int len = 0;
1841 va_list args;
1842 mutex_lock(&vh265_log_mutex);
1843 va_start(args, fmt);
1844 len = sprintf(buf, "<%ld> <%05d> ",
1845 jiffies, hevc->decode_idx);
1846 len += vsnprintf(buf + len,
1847 HEVC_LOG_BUF - len, fmt, args);
1848 va_end(args);
1849 log_item = kmalloc(
1850 sizeof(struct debug_log_s) + len,
1851 GFP_KERNEL);
1852 if (log_item) {
1853 INIT_LIST_HEAD(&log_item->list);
1854 strcpy(&log_item->data, buf);
1855 list_add_tail(&log_item->list,
1856 &hevc->log_list);
1857 }
1858 mutex_unlock(&vh265_log_mutex);
1859 return 0;
1860}
1861
1862static void dump_log(struct hevc_state_s *hevc)
1863{
1864 int i = 0;
1865 struct debug_log_s *log_item, *tmp;
1866 mutex_lock(&vh265_log_mutex);
1867 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1868 hevc_print(hevc, 0,
1869 "[LOG%04d]%s\n",
1870 i++,
1871 &log_item->data);
1872 list_del(&log_item->list);
1873 kfree(log_item);
1874 }
1875 mutex_unlock(&vh265_log_mutex);
1876}
1877
1878static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1879 struct PIC_s *pic)
1880{
1881 if (pic->error_mark
1882 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1883 return 1;
1884 return 0;
1885}
1886
1887static int get_pic_poc(struct hevc_state_s *hevc,
1888 unsigned int idx)
1889{
1890 if (idx != 0xff
1891 && idx < MAX_REF_PIC_NUM
1892 && hevc->m_PIC[idx])
1893 return hevc->m_PIC[idx]->POC;
1894 return INVALID_POC;
1895}
1896
1897#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1898static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1899{
1900 return (hevc->m_ins_flag &&
1901 ((double_write_mode & 0x80000000) == 0)) ?
1902 hevc->double_write_mode :
1903 (double_write_mode & 0x7fffffff);
1904}
1905
1906static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1907{
1908 return (hevc->m_ins_flag &&
1909 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1910 hevc->dynamic_buf_num_margin :
1911 (dynamic_buf_num_margin & 0x7fffffff);
1912}
1913#endif
1914
1915static int get_double_write_mode(struct hevc_state_s *hevc)
1916{
1917 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1918 int w = hevc->pic_w;
1919 int h = hevc->pic_h;
1920 u32 dw = 0x1; /*1:1*/
1921 switch (valid_dw_mode) {
1922 case 0x100:
1923 if (w > 1920 && h > 1088)
1924 dw = 0x4; /*1:2*/
1925 break;
1926 case 0x200:
1927 if (w > 1920 && h > 1088)
1928 dw = 0x2; /*1:4*/
1929 break;
1930 case 0x300:
1931 if (w > 1280 && h > 720)
1932 dw = 0x4; /*1:2*/
1933 break;
1934 default:
1935 dw = valid_dw_mode;
1936 break;
1937 }
1938 return dw;
1939}
1940
1941static int get_double_write_ratio(struct hevc_state_s *hevc,
1942 int dw_mode)
1943{
1944 int ratio = 1;
1945 if ((dw_mode == 2) ||
1946 (dw_mode == 3))
1947 ratio = 4;
1948 else if (dw_mode == 4)
1949 ratio = 2;
1950 return ratio;
1951}
1952#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1953static unsigned char get_idx(struct hevc_state_s *hevc)
1954{
1955 return hevc->index;
1956}
1957#endif
1958
1959#undef pr_info
1960#define pr_info printk
1961static int hevc_print(struct hevc_state_s *hevc,
1962 int flag, const char *fmt, ...)
1963{
1964#define HEVC_PRINT_BUF 256
1965 unsigned char buf[HEVC_PRINT_BUF];
1966 int len = 0;
1967#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1968 if (hevc == NULL ||
1969 (flag == 0) ||
1970 ((debug_mask &
1971 (1 << hevc->index))
1972 && (debug & flag))) {
1973#endif
1974 va_list args;
1975
1976 va_start(args, fmt);
1977 if (hevc)
1978 len = sprintf(buf, "[%d]", hevc->index);
1979 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1980 pr_debug("%s", buf);
1981 va_end(args);
1982#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1983 }
1984#endif
1985 return 0;
1986}
1987
1988static int hevc_print_cont(struct hevc_state_s *hevc,
1989 int flag, const char *fmt, ...)
1990{
1991 unsigned char buf[HEVC_PRINT_BUF];
1992 int len = 0;
1993#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1994 if (hevc == NULL ||
1995 (flag == 0) ||
1996 ((debug_mask &
1997 (1 << hevc->index))
1998 && (debug & flag))) {
1999#endif
2000 va_list args;
2001
2002 va_start(args, fmt);
2003 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2004 pr_info("%s", buf);
2005 va_end(args);
2006#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2007 }
2008#endif
2009 return 0;
2010}
2011
2012static void put_mv_buf(struct hevc_state_s *hevc,
2013 struct PIC_s *pic);
2014
2015static void update_vf_memhandle(struct hevc_state_s *hevc,
2016 struct vframe_s *vf, struct PIC_s *pic);
2017
2018static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2019
2020static void release_aux_data(struct hevc_state_s *hevc,
2021 struct PIC_s *pic);
2022static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2023
2024#ifdef MULTI_INSTANCE_SUPPORT
2025static void backup_decode_state(struct hevc_state_s *hevc)
2026{
2027 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2028 hevc->curr_POC_bak = hevc->curr_POC;
2029 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2030 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2031 hevc->start_parser_type_bak = hevc->start_parser_type;
2032 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2033 hevc->rps_set_id_bak = hevc->rps_set_id;
2034 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2035 hevc->decode_idx_bak = hevc->decode_idx;
2036
2037}
2038
2039static void restore_decode_state(struct hevc_state_s *hevc)
2040{
2041 struct vdec_s *vdec = hw_to_vdec(hevc);
2042 if (!vdec_has_more_input(vdec)) {
2043 hevc->pic_decoded_lcu_idx =
2044 READ_VREG(HEVC_PARSER_LCU_START)
2045 & 0xffffff;
2046 return;
2047 }
2048 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2049 "%s: discard pic index 0x%x\n",
2050 __func__, hevc->decoding_pic ?
2051 hevc->decoding_pic->index : 0xff);
2052 if (hevc->decoding_pic) {
2053 hevc->decoding_pic->error_mark = 0;
2054 hevc->decoding_pic->output_ready = 0;
2055 hevc->decoding_pic->output_mark = 0;
2056 hevc->decoding_pic->referenced = 0;
2057 hevc->decoding_pic->POC = INVALID_POC;
2058 put_mv_buf(hevc, hevc->decoding_pic);
2059 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2060 release_aux_data(hevc, hevc->decoding_pic);
2061 hevc->decoding_pic = NULL;
2062 }
2063 hevc->decode_idx = hevc->decode_idx_bak;
2064 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2065 hevc->curr_POC = hevc->curr_POC_bak;
2066 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2067 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2068 hevc->start_parser_type = hevc->start_parser_type_bak;
2069 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2070 hevc->rps_set_id = hevc->rps_set_id_bak;
2071 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2072
2073 if (hevc->pic_list_init_flag == 1)
2074 hevc->pic_list_init_flag = 0;
2075 /*if (hevc->decode_idx == 0)
2076 hevc->start_decoding_flag = 0;*/
2077
2078 hevc->slice_idx = 0;
2079 hevc->used_4k_num = -1;
2080}
2081#endif
2082
2083static void hevc_init_stru(struct hevc_state_s *hevc,
2084 struct BuffInfo_s *buf_spec_i)
2085{
2086 //int i;
2087 INIT_LIST_HEAD(&hevc->log_list);
2088 hevc->work_space_buf = buf_spec_i;
2089 hevc->prefix_aux_size = 0;
2090 hevc->suffix_aux_size = 0;
2091 hevc->aux_addr = NULL;
2092 hevc->rpm_addr = NULL;
2093 hevc->lmem_addr = NULL;
2094
2095 hevc->curr_POC = INVALID_POC;
2096
2097 hevc->pic_list_init_flag = 0;
2098 hevc->use_cma_flag = 0;
2099 hevc->decode_idx = 0;
2100 hevc->slice_idx = 0;
2101 hevc->new_pic = 0;
2102 hevc->new_tile = 0;
2103 hevc->iPrevPOC = 0;
2104 hevc->list_no = 0;
2105 /* int m_uiMaxCUWidth = 1<<7; */
2106 /* int m_uiMaxCUHeight = 1<<7; */
2107 hevc->m_pocRandomAccess = MAX_INT;
2108 hevc->tile_enabled = 0;
2109 hevc->tile_x = 0;
2110 hevc->tile_y = 0;
2111 hevc->iPrevTid0POC = 0;
2112 hevc->slice_addr = 0;
2113 hevc->slice_segment_addr = 0;
2114 hevc->skip_flag = 0;
2115 hevc->misc_flag0 = 0;
2116
2117 hevc->cur_pic = NULL;
2118 hevc->col_pic = NULL;
2119 hevc->wait_buf = 0;
2120 hevc->error_flag = 0;
2121 hevc->head_error_flag = 0;
2122 hevc->error_skip_nal_count = 0;
2123 hevc->have_vps = 0;
2124 hevc->have_sps = 0;
2125 hevc->have_pps = 0;
2126 hevc->have_valid_start_slice = 0;
2127
2128 hevc->pts_mode = PTS_NORMAL;
2129 hevc->last_pts = 0;
2130 hevc->last_lookup_pts = 0;
2131 hevc->last_pts_us64 = 0;
2132 hevc->last_lookup_pts_us64 = 0;
2133 hevc->pts_mode_switching_count = 0;
2134 hevc->pts_mode_recovery_count = 0;
2135
2136 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2137 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2138 if (hevc->PB_skip_mode == 0)
2139 hevc->ignore_bufmgr_error = 0x1;
2140 else
2141 hevc->ignore_bufmgr_error = 0x0;
2142
2143 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
2144 vdec_schedule_work(&hevc->work);
2145 /*for (i = 0; i < MAX_REF_PIC_NUM; i++)
2146 hevc->m_PIC[i] = NULL;*/
2147
2148 hevc->pic_num = 0;
2149 hevc->lcu_x_num_pre = 0;
2150 hevc->lcu_y_num_pre = 0;
2151 hevc->first_pic_after_recover = 0;
2152
2153 hevc->pre_top_pic = NULL;
2154 hevc->pre_bot_pic = NULL;
2155
2156 hevc->sei_present_flag = 0;
2157 hevc->valve_count = 0;
2158 hevc->first_pic_flag = 0;
2159#ifdef MULTI_INSTANCE_SUPPORT
2160 hevc->decoded_poc = INVALID_POC;
2161 hevc->start_process_time = 0;
2162 hevc->last_lcu_idx = 0;
2163 hevc->decode_timeout_count = 0;
2164 hevc->timeout_num = 0;
2165 hevc->eos = 0;
2166 hevc->pic_decoded_lcu_idx = -1;
2167 hevc->over_decode = 0;
2168 hevc->used_4k_num = -1;
2169 hevc->start_decoding_flag = 0;
2170 hevc->rps_set_id = 0;
2171 backup_decode_state(hevc);
2172#endif
2173#ifdef DETREFILL_ENABLE
2174 hevc->detbuf_adr = 0;
2175 hevc->detbuf_adr_virt = NULL;
2176#endif
2177}
2178
2179static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2180static int H265_alloc_mmu(struct hevc_state_s *hevc,
2181 struct PIC_s *new_pic, unsigned short bit_depth,
2182 unsigned int *mmu_index_adr);
2183
2184#ifdef DETREFILL_ENABLE
2185#define DETREFILL_BUF_SIZE (4 * 0x4000)
2186#define HEVC_SAO_DBG_MODE0 0x361e
2187#define HEVC_SAO_DBG_MODE1 0x361f
2188#define HEVC_SAO_CTRL10 0x362e
2189#define HEVC_SAO_CTRL11 0x362f
2190static int init_detrefill_buf(struct hevc_state_s *hevc)
2191{
2192 if (hevc->detbuf_adr_virt)
2193 return 0;
2194
2195 hevc->detbuf_adr_virt =
2196 (void *)dma_alloc_coherent(amports_get_dma_device(),
2197 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2198 GFP_KERNEL);
2199
2200 if (hevc->detbuf_adr_virt == NULL) {
2201 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2202 return -1;
2203 }
2204 return 0;
2205}
2206
2207static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2208{
2209 if (hevc->detbuf_adr_virt) {
2210 dma_free_coherent(amports_get_dma_device(),
2211 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2212 hevc->detbuf_adr);
2213
2214 hevc->detbuf_adr_virt = NULL;
2215 hevc->detbuf_adr = 0;
2216 }
2217}
2218
2219/*
2220 * convert uncompressed frame buffer data from/to ddr
2221 */
2222static void convUnc8x4blk(uint16_t* blk8x4Luma,
2223 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2224{
2225 if (direction == 0) {
2226 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2227 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2228 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2229 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2230 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2231 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2232 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2233 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2234 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2235 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2236 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2237 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2238 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2239 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2240 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2241 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2242 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2243 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2244
2245 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2246 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2247 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2248 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2249 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2250 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2251 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2252 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2253 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2254 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2255 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2256 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2257 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2258 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2259 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2260 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2261 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2262 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2263
2264 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2265 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2266 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2267 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2268 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2269 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2270 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2271 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2272 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2273 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2274 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2275 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2276 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2277 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2278 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2279 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2280 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2281 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2282
2283 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2284 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2285 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2286 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2287 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2288 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2289 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2290 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2291 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2292 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2293 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2294 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2295 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2296 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2297 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2298 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2299 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2300 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2301 } else {
2302 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2303 blk8x4Luma[3 + 0 * 8];
2304 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2305 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2306 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2307 (blk8x4Luma[3 + 3 * 8] >> 2);
2308 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2309 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2310 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2311 (blk8x4Luma[7 + 2 * 8] >>4);
2312 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2313 blk8x4Cb[0 + 0 * 4];
2314 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2315 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2316 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2317
2318 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2319 blk8x4Luma[0 + 0 * 8];
2320 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2321 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2322 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2323 (blk8x4Luma[0 + 1 * 8] >> 2);
2324 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2325 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2326 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2327 (blk8x4Luma[0 + 2 * 8] >>4);
2328 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2329 blk8x4Luma[2 + 2 * 8];
2330 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2331 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2332 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2333
2334 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2335 blk8x4Luma[4 + 0 * 8];
2336 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2337 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2338 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2339 (blk8x4Luma[4 + 1 * 8] >> 2);
2340 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2341 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2342 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2343 (blk8x4Luma[4 + 2 * 8] >>4);
2344 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2345 blk8x4Luma[6 + 2 * 8];
2346 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2347 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2348 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2349
2350 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2351 blk8x4Cb[1 + 0 * 4];
2352 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2353 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2354 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2355 (blk8x4Cr[2 + 0 * 4] >> 2);
2356 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2357 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2358 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2359 (blk8x4Cb[1 + 1 * 4] >>4);
2360 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2361 blk8x4Cb[2 + 1 * 4];
2362 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2363 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2364 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2365 }
2366}
2367
2368static void corrRefillWithAmrisc (
2369 struct hevc_state_s *hevc,
2370 uint32_t cmHeaderBaseAddr,
2371 uint32_t picWidth,
2372 uint32_t ctuPosition)
2373{
2374 int32_t i;
2375 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2376 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2377 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2378
2379 uint16_t cmBodyBuf[32 * 18];
2380
2381 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2382 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2383 uint32_t stride64x64 = pic_width_x64 * 128;
2384 uint32_t addr_offset64x64_abv = stride64x64 *
2385 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2386 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2387 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2388 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2389 unsigned int tmpData32;
2390
2391 uint16_t blkBuf0Y[32];
2392 uint16_t blkBuf0Cb[8];
2393 uint16_t blkBuf0Cr[8];
2394 uint16_t blkBuf1Y[32];
2395 uint16_t blkBuf1Cb[8];
2396 uint16_t blkBuf1Cr[8];
2397 int32_t blkBufCnt = 0;
2398
2399 int32_t blkIdx;
2400
2401 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2402 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2403 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2404 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2405
2406 for (i = 0; i < 32 * 18; i++)
2407 cmBodyBuf[i] = 0;
2408
2409 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2410 "%s, %d\n", __func__, __LINE__);
2411 do {
2412 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2413 } while (tmpData32);
2414 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2415 "%s, %d\n", __func__, __LINE__);
2416
2417 hevc_print(hevc, H265_DEBUG_DETAIL,
2418 "cmBodyBuf from detbuf:\n");
2419 for (i = 0; i < 32 * 18; i++) {
2420 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2421 if (get_dbg_flag(hevc) &
2422 H265_DEBUG_DETAIL) {
2423 if ((i & 0xf) == 0)
2424 hevc_print_cont(hevc, 0, "\n");
2425 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2426 }
2427 }
2428 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2429
2430 for (i = 0; i < 32; i++)
2431 blkBuf0Y[i] = 0;
2432 for (i = 0; i < 8; i++)
2433 blkBuf0Cb[i] = 0;
2434 for (i = 0; i < 8; i++)
2435 blkBuf0Cr[i] = 0;
2436 for (i = 0; i < 32; i++)
2437 blkBuf1Y[i] = 0;
2438 for (i = 0; i < 8; i++)
2439 blkBuf1Cb[i] = 0;
2440 for (i = 0; i < 8; i++)
2441 blkBuf1Cr[i] = 0;
2442
2443 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2444 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2445 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2446 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2447 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2448 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2449 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2450
2451 if (!aboveCtuAvailable && inAboveCtu)
2452 continue;
2453
2454 /* detRefillBuf --> 8x4block*/
2455 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2456
2457 if (restoreEnable) {
2458 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2459 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2460 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2461 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2462 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2463 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2464 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2465 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2466 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2467 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2468 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2469 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2470 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2471 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2472 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2473 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2474 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2475 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2476 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2477 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2478 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2479 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2480 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2481 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2482 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2483 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2484 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2485 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2486 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2487 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2488 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2489 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2490
2491 /*Store data back to DDR*/
2492 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2493 }
2494
2495 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2496 }
2497
2498 hevc_print(hevc, H265_DEBUG_DETAIL,
2499 "cmBodyBuf to detbuf:\n");
2500 for (i = 0; i < 32 * 18; i++) {
2501 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2502 if (get_dbg_flag(hevc) &
2503 H265_DEBUG_DETAIL) {
2504 if ((i & 0xf) == 0)
2505 hevc_print_cont(hevc, 0, "\n");
2506 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2507 }
2508 }
2509 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2510
2511 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2512 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2513 "%s, %d\n", __func__, __LINE__);
2514 do {
2515 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2516 } while (tmpData32);
2517 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2518 "%s, %d\n", __func__, __LINE__);
2519}
2520
2521static void delrefill(struct hevc_state_s *hevc)
2522{
2523 /*
2524 * corrRefill
2525 */
2526 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2527 [31:30]error number
2528 [29:20]error2([9:7]tilex[6:0]ctuy)
2529 [19:10]error1 [9:0]error0*/
2530 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2531 uint32_t errorIdx;
2532 uint32_t errorNum = (detResult>>30);
2533
2534 if (detResult) {
2535 hevc_print(hevc, H265_DEBUG_BUFMGR,
2536 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2537 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2538 uint32_t errorPos = errorIdx * 10;
2539 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2540 uint32_t tilex = (errorResult >> 7) - 1;
2541 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2542 + hevc->m_tile[0][tilex].width - 1;
2543 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2544 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2545 hevc_print(hevc, H265_DEBUG_BUFMGR,
2546 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2547 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2548 corrRefillWithAmrisc(
2549 hevc,
2550 (uint32_t)hevc->cur_pic->header_adr,
2551 hevc->pic_w,
2552 ctuPosition);
2553 }
2554
2555 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2556 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2557 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2558 }
2559}
2560#endif
2561
2562static void get_rpm_param(union param_u *params)
2563{
2564 int i;
2565 unsigned int data32;
2566
2567 for (i = 0; i < 128; i++) {
2568 do {
2569 data32 = READ_VREG(RPM_CMD_REG);
2570 /* hevc_print(hevc, 0, "%x\n", data32); */
2571 } while ((data32 & 0x10000) == 0);
2572 params->l.data[i] = data32 & 0xffff;
2573 /* hevc_print(hevc, 0, "%x\n", data32); */
2574 WRITE_VREG(RPM_CMD_REG, 0);
2575 }
2576}
2577
2578static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2579{
2580 int i;
2581 struct PIC_s *pic;
2582 struct PIC_s *ret_pic = NULL;
2583 if (POC == INVALID_POC)
2584 return NULL;
2585 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2586 pic = hevc->m_PIC[i];
2587 if (pic == NULL || pic->index == -1 ||
2588 pic->BUF_index == -1)
2589 continue;
2590 if (pic->POC == POC) {
2591 if (ret_pic == NULL)
2592 ret_pic = pic;
2593 else {
2594 if (pic->decode_idx > ret_pic->decode_idx)
2595 ret_pic = pic;
2596 }
2597 }
2598 }
2599 return ret_pic;
2600}
2601
2602static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2603{
2604 int i;
2605 struct PIC_s *pic;
2606 struct PIC_s *ret_pic = NULL;
2607
2608 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2609 pic = hevc->m_PIC[i];
2610 if (pic == NULL || pic->index == -1 ||
2611 pic->BUF_index == -1)
2612 continue;
2613 if ((pic->POC == POC) && (pic->referenced)) {
2614 if (ret_pic == NULL)
2615 ret_pic = pic;
2616 else {
2617 if (pic->decode_idx > ret_pic->decode_idx)
2618 ret_pic = pic;
2619 }
2620 }
2621 }
2622
2623 if (ret_pic == NULL) {
2624 if (get_dbg_flag(hevc)) {
2625 hevc_print(hevc, 0,
2626 "Wrong, POC of %d is not in referenced list\n",
2627 POC);
2628 }
2629 ret_pic = get_pic_by_POC(hevc, POC);
2630 }
2631 return ret_pic;
2632}
2633
2634static unsigned int log2i(unsigned int val)
2635{
2636 unsigned int ret = -1;
2637
2638 while (val != 0) {
2639 val >>= 1;
2640 ret++;
2641 }
2642 return ret;
2643}
2644
2645static int init_buf_spec(struct hevc_state_s *hevc);
2646static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2647{
2648 if (hevc->mmu_box)
2649 decoder_mmu_box_free(hevc->mmu_box);
2650 hevc->mmu_box = NULL;
2651
2652 if (hevc->bmmu_box)
2653 decoder_bmmu_box_free(hevc->bmmu_box);
2654 hevc->bmmu_box = NULL;
2655}
2656static int init_mmu_buffers(struct hevc_state_s *hevc)
2657{
2658 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2659 CODEC_MM_FLAGS_TVP : 0;
2660 int buf_size = 64;
2661
2662 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2663 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2664 buf_size = 24;
2665 }
2666
2667 if (get_dbg_flag(hevc)) {
2668 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2669 __func__, hevc->max_pic_w, hevc->max_pic_h);
2670 }
2671
2672 hevc->need_cache_size = buf_size * SZ_1M;
2673 hevc->sc_start_time = get_jiffies_64();
2674 if (hevc->mmu_enable
2675 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2676 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2677 hevc->index,
2678 MAX_REF_PIC_NUM,
2679 buf_size * SZ_1M,
2680 tvp_flag
2681 );
2682 if (!hevc->mmu_box) {
2683 pr_err("h265 alloc mmu box failed!!\n");
2684 return -1;
2685 }
2686 }
2687
2688 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2689 hevc->index,
2690 BMMU_MAX_BUFFERS,
2691 4 + PAGE_SHIFT,
2692 CODEC_MM_FLAGS_CMA_CLEAR |
2693 CODEC_MM_FLAGS_FOR_VDECODER |
2694 tvp_flag);
2695 if (!hevc->bmmu_box) {
2696 if (hevc->mmu_box)
2697 decoder_mmu_box_free(hevc->mmu_box);
2698 hevc->mmu_box = NULL;
2699 pr_err("h265 alloc mmu box failed!!\n");
2700 return -1;
2701 }
2702 return 0;
2703}
2704
2705struct buf_stru_s
2706{
2707 int lcu_total;
2708 int mc_buffer_size_h;
2709 int mc_buffer_size_u_v_h;
2710};
2711
2712#ifndef MV_USE_FIXED_BUF
2713static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2714{
2715 int i;
2716 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2717 if (hevc->m_mv_BUF[i].start_adr) {
2718 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2719 hevc_print(hevc, 0,
2720 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2721 i, hevc->m_mv_BUF[i].start_adr,
2722 hevc->m_mv_BUF[i].size,
2723 hevc->m_mv_BUF[i].used_flag);
2724 decoder_bmmu_box_free_idx(
2725 hevc->bmmu_box,
2726 MV_BUFFER_IDX(i));
2727 hevc->m_mv_BUF[i].start_adr = 0;
2728 hevc->m_mv_BUF[i].size = 0;
2729 hevc->m_mv_BUF[i].used_flag = 0;
2730 }
2731 }
2732 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2733 if (hevc->m_PIC[i] != NULL)
2734 hevc->m_PIC[i]->mv_buf_index = -1;
2735 }
2736
2737}
2738
2739static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2740{
2741 int ret = 0;
2742 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2743 if (decoder_bmmu_box_alloc_buf_phy
2744 (hevc->bmmu_box,
2745 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2746 DRIVER_NAME,
2747 &hevc->m_mv_BUF[i].start_adr) < 0) {
2748 hevc->m_mv_BUF[i].start_adr = 0;
2749 ret = -1;
2750 } else {
2751 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2752 hevc->m_mv_BUF[i].used_flag = 0;
2753 ret = 0;
2754 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2755 hevc_print(hevc, 0,
2756 "MV Buffer %d: start_adr %p size %x\n",
2757 i,
2758 (void *)hevc->m_mv_BUF[i].start_adr,
2759 hevc->m_mv_BUF[i].size);
2760 }
2761 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2762 void *mem_start_virt;
2763 mem_start_virt =
2764 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2765 if (mem_start_virt) {
2766 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2767 codec_mm_dma_flush(mem_start_virt,
2768 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2769 } else {
2770 mem_start_virt = codec_mm_vmap(
2771 hevc->m_mv_BUF[i].start_adr,
2772 hevc->m_mv_BUF[i].size);
2773 if (mem_start_virt) {
2774 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2775 codec_mm_dma_flush(mem_start_virt,
2776 hevc->m_mv_BUF[i].size,
2777 DMA_TO_DEVICE);
2778 codec_mm_unmap_phyaddr(mem_start_virt);
2779 } else {
2780 /*not virt for tvp playing,
2781 may need clear on ucode.*/
2782 pr_err("ref %s mem_start_virt failed\n", __func__);
2783 }
2784 }
2785 }
2786 }
2787 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2788 return ret;
2789}
2790#endif
2791
2792static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2793{
2794#ifdef MV_USE_FIXED_BUF
2795 if (pic && pic->index >= 0) {
2796 if (IS_8K_SIZE(pic->width, pic->height)) {
2797 pic->mpred_mv_wr_start_addr =
2798 hevc->work_space_buf->mpred_mv.buf_start
2799 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2800 } else {
2801 pic->mpred_mv_wr_start_addr =
2802 hevc->work_space_buf->mpred_mv.buf_start
2803 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2804 }
2805 }
2806 return 0;
2807#else
2808 int i;
2809 int ret = -1;
2810 int new_size;
2811 if (IS_8K_SIZE(pic->width, pic->height))
2812 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2813 else if (IS_4K_SIZE(pic->width, pic->height))
2814 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2815 else
2816 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2817 if (new_size != hevc->mv_buf_size) {
2818 dealloc_mv_bufs(hevc);
2819 hevc->mv_buf_size = new_size;
2820 }
2821 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2822 if (hevc->m_mv_BUF[i].start_adr &&
2823 hevc->m_mv_BUF[i].used_flag == 0) {
2824 hevc->m_mv_BUF[i].used_flag = 1;
2825 ret = i;
2826 break;
2827 }
2828 }
2829 if (ret < 0) {
2830 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2831 if (hevc->m_mv_BUF[i].start_adr == 0) {
2832 if (alloc_mv_buf(hevc, i) >= 0) {
2833 hevc->m_mv_BUF[i].used_flag = 1;
2834 ret = i;
2835 }
2836 break;
2837 }
2838 }
2839 }
2840
2841 if (ret >= 0) {
2842 pic->mv_buf_index = ret;
2843 pic->mpred_mv_wr_start_addr =
2844 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2845 (~0xffff);
2846 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2847 "%s => %d (0x%x) size 0x%x\n",
2848 __func__, ret,
2849 pic->mpred_mv_wr_start_addr,
2850 hevc->m_mv_BUF[ret].size);
2851
2852 } else {
2853 hevc_print(hevc, 0,
2854 "%s: Error, mv buf is not enough\n",
2855 __func__);
2856 }
2857 return ret;
2858
2859#endif
2860}
2861
2862static void put_mv_buf(struct hevc_state_s *hevc,
2863 struct PIC_s *pic)
2864{
2865#ifndef MV_USE_FIXED_BUF
2866 int i = pic->mv_buf_index;
2867 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2868 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2869 "%s: index %d beyond range\n",
2870 __func__, i);
2871 return;
2872 }
2873 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2874 "%s(%d): used_flag(%d)\n",
2875 __func__, i,
2876 hevc->m_mv_BUF[i].used_flag);
2877
2878 if (hevc->m_mv_BUF[i].start_adr &&
2879 hevc->m_mv_BUF[i].used_flag)
2880 hevc->m_mv_BUF[i].used_flag = 0;
2881 pic->mv_buf_index = -1;
2882#endif
2883}
2884
2885static int cal_current_buf_size(struct hevc_state_s *hevc,
2886 struct buf_stru_s *buf_stru)
2887{
2888
2889 int buf_size;
2890 int pic_width = hevc->pic_w;
2891 int pic_height = hevc->pic_h;
2892 int lcu_size = hevc->lcu_size;
2893 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2894 1 : pic_width / lcu_size;
2895 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2896 1 : pic_height / lcu_size;
2897 /*SUPPORT_10BIT*/
2898 int losless_comp_header_size = compute_losless_comp_header_size
2899 (pic_width, pic_height);
2900 /*always alloc buf for 10bit*/
2901 int losless_comp_body_size = compute_losless_comp_body_size
2902 (hevc, pic_width, pic_height, 0);
2903 int mc_buffer_size = losless_comp_header_size
2904 + losless_comp_body_size;
2905 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2906 int mc_buffer_size_u_v_h = 0;
2907
2908 int dw_mode = get_double_write_mode(hevc);
2909
2910 if (hevc->mmu_enable) {
2911 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
2912 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
2913 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
2914 << 16;
2915 else
2916 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
2917 << 16;
2918 } else
2919 buf_size = 0;
2920
2921 if (dw_mode) {
2922 int pic_width_dw = pic_width /
2923 get_double_write_ratio(hevc, dw_mode);
2924 int pic_height_dw = pic_height /
2925 get_double_write_ratio(hevc, dw_mode);
2926
2927 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
2928 pic_width_dw / lcu_size + 1 :
2929 pic_width_dw / lcu_size;
2930 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
2931 pic_height_dw / lcu_size + 1 :
2932 pic_height_dw / lcu_size;
2933 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
2934
2935 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
2936 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
2937 /*64k alignment*/
2938 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
2939 }
2940
2941 if ((!hevc->mmu_enable) &&
2942 ((dw_mode & 0x10) == 0)) {
2943 /* use compress mode without mmu,
2944 need buf for compress decoding*/
2945 buf_size += (mc_buffer_size_h << 16);
2946 }
2947
2948 /*in case start adr is not 64k alignment*/
2949 if (buf_size > 0)
2950 buf_size += 0x10000;
2951
2952 if (buf_stru) {
2953 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
2954 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
2955 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
2956 }
2957
2958 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
2959 pic_width, pic_height, losless_comp_header_size,
2960 losless_comp_body_size, mc_buffer_size_h,
2961 mc_buffer_size_u_v_h, buf_size);
2962
2963 return buf_size;
2964}
2965
2966static int alloc_buf(struct hevc_state_s *hevc)
2967{
2968 int i;
2969 int ret = -1;
2970 int buf_size = cal_current_buf_size(hevc, NULL);
2971
2972 if (hevc->is_used_v4l)
2973 return 0;
2974
2975 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
2976 return ret;
2977
2978 for (i = 0; i < BUF_POOL_SIZE; i++) {
2979 if (hevc->m_BUF[i].start_adr == 0)
2980 break;
2981 }
2982 if (i < BUF_POOL_SIZE) {
2983 if (buf_size > 0) {
2984 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2985 /*alloc compress header first*/
2986
2987 ret = decoder_bmmu_box_alloc_buf_phy
2988 (hevc->bmmu_box,
2989 VF_BUFFER_IDX(i), buf_size,
2990 DRIVER_NAME,
2991 &hevc->m_BUF[i].start_adr);
2992 if (ret < 0) {
2993 hevc->m_BUF[i].start_adr = 0;
2994 if (i <= 8) {
2995 hevc->fatal_error |=
2996 DECODER_FATAL_ERROR_NO_MEM;
2997 hevc_print(hevc, PRINT_FLAG_ERROR,
2998 "%s[%d], size: %d, no mem fatal err\n",
2999 __func__, i, buf_size);
3000 }
3001 }
3002
3003 if (ret >= 0) {
3004 hevc->m_BUF[i].size = buf_size;
3005 hevc->m_BUF[i].used_flag = 0;
3006 ret = 0;
3007
3008 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3009 hevc_print(hevc, 0,
3010 "Buffer %d: start_adr %p size %x\n",
3011 i,
3012 (void *)hevc->m_BUF[i].start_adr,
3013 hevc->m_BUF[i].size);
3014 }
3015 /*flush the buffer make sure no cache dirty*/
3016 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3017 void *mem_start_virt;
3018 mem_start_virt =
3019 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3020 if (mem_start_virt) {
3021 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3022 codec_mm_dma_flush(mem_start_virt,
3023 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3024 } else {
3025 mem_start_virt = codec_mm_vmap(
3026 hevc->m_BUF[i].start_adr,
3027 hevc->m_BUF[i].size);
3028 if (mem_start_virt) {
3029 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3030 codec_mm_dma_flush(mem_start_virt,
3031 hevc->m_BUF[i].size,
3032 DMA_TO_DEVICE);
3033 codec_mm_unmap_phyaddr(mem_start_virt);
3034 } else {
3035 /*not virt for tvp playing,
3036 may need clear on ucode.*/
3037 pr_err("ref %s mem_start_virt failed\n", __func__);
3038 }
3039 }
3040 }
3041 }
3042 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3043 } else
3044 ret = 0;
3045 }
3046 if (ret >= 0) {
3047 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3048 hevc_print(hevc, 0,
3049 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3050 i, hevc->pic_w, hevc->pic_h,
3051 buf_size,
3052 hevc->m_BUF[i].start_adr);
3053 }
3054 } else {
3055 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3056 hevc_print(hevc, 0,
3057 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3058 i, hevc->pic_w, hevc->pic_h,
3059 buf_size);
3060 }
3061 }
3062 return ret;
3063}
3064
3065static void set_buf_unused(struct hevc_state_s *hevc, int i)
3066{
3067 if (i >= 0 && i < BUF_POOL_SIZE)
3068 hevc->m_BUF[i].used_flag = 0;
3069}
3070
3071static void dealloc_unused_buf(struct hevc_state_s *hevc)
3072{
3073 int i;
3074 for (i = 0; i < BUF_POOL_SIZE; i++) {
3075 if (hevc->m_BUF[i].start_adr &&
3076 hevc->m_BUF[i].used_flag == 0) {
3077 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3078 hevc_print(hevc, 0,
3079 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3080 i, hevc->m_BUF[i].start_adr,
3081 hevc->m_BUF[i].size);
3082 }
3083 if (!hevc->is_used_v4l)
3084 decoder_bmmu_box_free_idx(
3085 hevc->bmmu_box,
3086 VF_BUFFER_IDX(i));
3087 hevc->m_BUF[i].start_adr = 0;
3088 hevc->m_BUF[i].size = 0;
3089 }
3090 }
3091
3092}
3093
3094static void dealloc_pic_buf(struct hevc_state_s *hevc,
3095 struct PIC_s *pic)
3096{
3097 int i = pic->BUF_index;
3098 pic->BUF_index = -1;
3099 if (i >= 0 &&
3100 i < BUF_POOL_SIZE &&
3101 hevc->m_BUF[i].start_adr) {
3102 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3103 hevc_print(hevc, 0,
3104 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3105 i, hevc->m_BUF[i].start_adr,
3106 hevc->m_BUF[i].size);
3107 }
3108
3109 if (!hevc->is_used_v4l)
3110 decoder_bmmu_box_free_idx(
3111 hevc->bmmu_box,
3112 VF_BUFFER_IDX(i));
3113 hevc->m_BUF[i].used_flag = 0;
3114 hevc->m_BUF[i].start_adr = 0;
3115 hevc->m_BUF[i].size = 0;
3116 }
3117}
3118
3119static int get_work_pic_num(struct hevc_state_s *hevc)
3120{
3121 int used_buf_num = 0;
3122 int sps_pic_buf_diff = 0;
3123
3124 if (get_dynamic_buf_num_margin(hevc) > 0) {
3125 if ((!hevc->sps_num_reorder_pics_0) &&
3126 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3127 /* the range of sps_num_reorder_pics_0 is in
3128 [0, sps_max_dec_pic_buffering_minus1_0] */
3129 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3130 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3131 } else
3132 used_buf_num = hevc->sps_num_reorder_pics_0
3133 + get_dynamic_buf_num_margin(hevc);
3134
3135 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3136 - hevc->sps_num_reorder_pics_0;
3137#ifdef MULTI_INSTANCE_SUPPORT
3138 /*
3139 need one more for multi instance, as
3140 apply_ref_pic_set() has no chanch to run to
3141 to clear referenced flag in some case
3142 */
3143 if (hevc->m_ins_flag)
3144 used_buf_num++;
3145#endif
3146 } else
3147 used_buf_num = max_buf_num;
3148
3149 if (hevc->save_buffer_mode)
3150 hevc_print(hevc, 0,
3151 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3152 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3153
3154 if (sps_pic_buf_diff >= 4)
3155 {
3156 used_buf_num += 1;
3157 }
3158
3159 if (used_buf_num > MAX_BUF_NUM)
3160 used_buf_num = MAX_BUF_NUM;
3161 return used_buf_num;
3162}
3163
3164static int get_alloc_pic_count(struct hevc_state_s *hevc)
3165{
3166 int alloc_pic_count = 0;
3167 int i;
3168 struct PIC_s *pic;
3169 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3170 pic = hevc->m_PIC[i];
3171 if (pic && pic->index >= 0)
3172 alloc_pic_count++;
3173 }
3174 return alloc_pic_count;
3175}
3176
3177static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3178{
3179 int ret = -1;
3180 int i;
3181 /*int lcu_size_log2 = hevc->lcu_size_log2;
3182 int MV_MEM_UNIT=lcu_size_log2==
3183 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3184 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3185 5 ? 0x80 : 0x20;
3186 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3187 hevc->work_space_buf->mpred_mv.buf_size;*/
3188 unsigned int y_adr = 0;
3189 struct buf_stru_s buf_stru;
3190 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3191 int dw_mode = get_double_write_mode(hevc);
3192 struct vdec_v4l2_buffer *fb = NULL;
3193
3194 if (hevc->is_used_v4l)
3195 buf_size = 0;
3196
3197 for (i = 0; i < BUF_POOL_SIZE; i++) {
3198 if (hevc->is_used_v4l && !hevc->m_BUF[i].start_adr) {
3199 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3200 if (ret) {
3201 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3202 "[%d] get fb fail.\n",
3203 ((struct aml_vcodec_ctx *)
3204 (hevc->v4l2_ctx))->id);
3205 return ret;
3206 }
3207
3208 hevc->m_BUF[i].used_flag = 0;
3209 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3210 if (fb->num_planes == 1) {
3211 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3212 hevc->m_BUF[i].size = fb->m.mem[0].size;
3213 hevc->m_BUF[i].y_size = fb->m.mem[0].offset;
3214 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3215 } else if (fb->num_planes == 2) {
3216 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3217 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3218 hevc->m_BUF[i].y_size = fb->m.mem[0].size;
3219 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3220 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3221 }
3222
3223 pic->BUF_index = i;
3224
3225 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3226 "[%d] %s(), v4l ref buf addr: 0x%x\n",
3227 ((struct aml_vcodec_ctx *)
3228 (hevc->v4l2_ctx))->id, __func__, fb);
3229 }
3230
3231 if (hevc->m_BUF[i].start_adr != 0 &&
3232 hevc->m_BUF[i].used_flag == 0 &&
3233 buf_size <= hevc->m_BUF[i].size) {
3234 hevc->m_BUF[i].used_flag = 1;
3235 break;
3236 }
3237 }
3238
3239 if (i >= BUF_POOL_SIZE)
3240 return -1;
3241
3242 if (hevc->mmu_enable) {
3243 pic->header_adr = hevc->m_BUF[i].start_adr;
3244 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3245 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3246 y_adr = hevc->m_BUF[i].start_adr +
3247 MMU_COMPRESS_8K_HEADER_SIZE;
3248 else
3249 y_adr = hevc->m_BUF[i].start_adr +
3250 MMU_COMPRESS_HEADER_SIZE;
3251 } else
3252 y_adr = hevc->m_BUF[i].start_adr;
3253
3254 if (!hevc->is_used_v4l)
3255 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3256
3257 pic->POC = INVALID_POC;
3258 /*ensure get_pic_by_POC()
3259 not get the buffer not decoded*/
3260 pic->BUF_index = i;
3261
3262 if ((!hevc->mmu_enable) &&
3263 ((dw_mode & 0x10) == 0)
3264 ) {
3265 pic->mc_y_adr = y_adr;
3266 y_adr += (buf_stru.mc_buffer_size_h << 16);
3267 }
3268 pic->mc_canvas_y = pic->index;
3269 pic->mc_canvas_u_v = pic->index;
3270 if (dw_mode & 0x10) {
3271 if (hevc->is_used_v4l) {
3272 pic->mc_y_adr = y_adr;
3273 pic->mc_u_v_adr = y_adr + hevc->m_BUF[i].y_size;
3274 } else {
3275 pic->mc_y_adr = y_adr;
3276 pic->mc_u_v_adr = y_adr +
3277 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3278 }
3279
3280 pic->mc_canvas_y = (pic->index << 1);
3281 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3282
3283 pic->dw_y_adr = pic->mc_y_adr;
3284 pic->dw_u_v_adr = pic->mc_u_v_adr;
3285 } else if (dw_mode) {
3286 pic->dw_y_adr = y_adr;
3287 pic->dw_u_v_adr = pic->dw_y_adr +
3288 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3289 }
3290
3291
3292 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3293 hevc_print(hevc, 0,
3294 "%s index %d BUF_index %d mc_y_adr %x\n",
3295 __func__, pic->index,
3296 pic->BUF_index, pic->mc_y_adr);
3297 if (hevc->mmu_enable &&
3298 dw_mode)
3299 hevc_print(hevc, 0,
3300 "mmu double write adr %ld\n",
3301 pic->cma_alloc_addr);
3302
3303
3304 }
3305 ret = 0;
3306
3307 return ret;
3308}
3309
3310static void init_pic_list(struct hevc_state_s *hevc)
3311{
3312 int i;
3313 int init_buf_num = get_work_pic_num(hevc);
3314 int dw_mode = get_double_write_mode(hevc);
3315 struct vdec_s *vdec = hw_to_vdec(hevc);
3316 /*alloc decoder buf*/
3317 for (i = 0; i < init_buf_num; i++) {
3318 if (alloc_buf(hevc) < 0) {
3319 if (i <= 8) {
3320 /*if alloced (i+1)>=9
3321 don't send errors.*/
3322 hevc->fatal_error |=
3323 DECODER_FATAL_ERROR_NO_MEM;
3324 }
3325 break;
3326 }
3327 }
3328
3329 for (i = 0; i < init_buf_num; i++) {
3330 struct PIC_s *pic =
3331 vmalloc(sizeof(struct PIC_s));
3332 if (pic == NULL) {
3333 hevc_print(hevc, 0,
3334 "%s: alloc pic %d fail!!!\n",
3335 __func__, i);
3336 break;
3337 }
3338 memset(pic, 0, sizeof(struct PIC_s));
3339 hevc->m_PIC[i] = pic;
3340 pic->index = i;
3341 pic->BUF_index = -1;
3342 pic->mv_buf_index = -1;
3343 if (vdec->parallel_dec == 1) {
3344 pic->y_canvas_index = -1;
3345 pic->uv_canvas_index = -1;
3346 }
3347
3348 pic->width = hevc->pic_w;
3349 pic->height = hevc->pic_h;
3350 pic->double_write_mode = dw_mode;
3351
3352 if (!hevc->is_used_v4l) {
3353 if (config_pic(hevc, pic) < 0) {
3354 if (get_dbg_flag(hevc))
3355 hevc_print(hevc, 0,
3356 "Config_pic %d fail\n", pic->index);
3357 pic->index = -1;
3358 i++;
3359 break;
3360 }
3361
3362 if (pic->double_write_mode)
3363 set_canvas(hevc, pic);
3364 }
3365 }
3366
3367 for (; i < MAX_REF_PIC_NUM; i++) {
3368 struct PIC_s *pic =
3369 vmalloc(sizeof(struct PIC_s));
3370 if (pic == NULL) {
3371 hevc_print(hevc, 0,
3372 "%s: alloc pic %d fail!!!\n",
3373 __func__, i);
3374 break;
3375 }
3376 memset(pic, 0, sizeof(struct PIC_s));
3377 hevc->m_PIC[i] = pic;
3378 pic->index = -1;
3379 pic->BUF_index = -1;
3380 if (vdec->parallel_dec == 1) {
3381 pic->y_canvas_index = -1;
3382 pic->uv_canvas_index = -1;
3383 }
3384 }
3385
3386}
3387
3388static void uninit_pic_list(struct hevc_state_s *hevc)
3389{
3390 struct vdec_s *vdec = hw_to_vdec(hevc);
3391 int i;
3392#ifndef MV_USE_FIXED_BUF
3393 dealloc_mv_bufs(hevc);
3394#endif
3395 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3396 struct PIC_s *pic = hevc->m_PIC[i];
3397
3398 if (pic) {
3399 if (vdec->parallel_dec == 1) {
3400 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3401 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3402 }
3403 release_aux_data(hevc, pic);
3404 vfree(pic);
3405 hevc->m_PIC[i] = NULL;
3406 }
3407 }
3408}
3409
3410#ifdef LOSLESS_COMPRESS_MODE
3411static void init_decode_head_hw(struct hevc_state_s *hevc)
3412{
3413
3414 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3415 unsigned int data32;
3416
3417 int losless_comp_header_size =
3418 compute_losless_comp_header_size(hevc->pic_w,
3419 hevc->pic_h);
3420 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3421 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3422
3423 hevc->losless_comp_body_size = losless_comp_body_size;
3424
3425
3426 if (hevc->mmu_enable) {
3427 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3428 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3429 } else {
3430 if (hevc->mem_saving_mode == 1)
3431 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3432 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3433 else
3434 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3435 ((workaround_enable & 2) ? 1 : 0));
3436 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3437 /*
3438 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3439 * //8-bit mode
3440 */
3441 }
3442 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3443 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3444 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3445
3446 if (hevc->mmu_enable) {
3447 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3448 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3449 buf_spec->mmu_vbh.buf_start +
3450 buf_spec->mmu_vbh.buf_size/2);
3451 data32 = READ_VREG(HEVC_SAO_CTRL9);
3452 data32 |= 0x1;
3453 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3454
3455 /* use HEVC_CM_HEADER_START_ADDR */
3456 data32 = READ_VREG(HEVC_SAO_CTRL5);
3457 data32 |= (1<<10);
3458 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3459 }
3460
3461 if (!hevc->m_ins_flag)
3462 hevc_print(hevc, 0,
3463 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3464 __func__, hevc->pic_w, hevc->pic_h,
3465 losless_comp_body_size, losless_comp_header_size);
3466
3467}
3468#endif
3469#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3470
3471static void init_pic_list_hw(struct hevc_state_s *hevc)
3472{
3473 int i;
3474 int cur_pic_num = MAX_REF_PIC_NUM;
3475 int dw_mode = get_double_write_mode(hevc);
3476 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3477 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3478 (0x1 << 1) | (0x1 << 2));
3479 else
3480 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3481
3482 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3483 if (hevc->m_PIC[i] == NULL ||
3484 hevc->m_PIC[i]->index == -1) {
3485 cur_pic_num = i;
3486 break;
3487 }
3488 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3489 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3490 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3491 hevc->m_PIC[i]->header_adr>>5);
3492 else
3493 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3494 hevc->m_PIC[i]->mc_y_adr >> 5);
3495 } else
3496 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3497 hevc->m_PIC[i]->mc_y_adr |
3498 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3499 if (dw_mode & 0x10) {
3500 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3501 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3502 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3503 }
3504 else
3505 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3506 hevc->m_PIC[i]->mc_u_v_adr |
3507 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3508 | 0x1);
3509 }
3510 }
3511 if (cur_pic_num == 0)
3512 return;
3513 for (; i < MAX_REF_PIC_NUM; i++) {
3514 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3515 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3516 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3517 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3518 else
3519 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3520 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3521#ifndef LOSLESS_COMPRESS_MODE
3522 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3523 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3524#endif
3525 } else {
3526 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3527 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3528 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3529 | 0x1);
3530#ifndef LOSLESS_COMPRESS_MODE
3531 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3532 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3533 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3534 | 0x1);
3535#endif
3536 }
3537 }
3538
3539 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3540
3541 /* Zero out canvas registers in IPP -- avoid simulation X */
3542 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3543 (0 << 8) | (0 << 1) | 1);
3544 for (i = 0; i < 32; i++)
3545 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3546
3547#ifdef LOSLESS_COMPRESS_MODE
3548 if ((dw_mode & 0x10) == 0)
3549 init_decode_head_hw(hevc);
3550#endif
3551
3552}
3553
3554
3555static void dump_pic_list(struct hevc_state_s *hevc)
3556{
3557 int i;
3558 struct PIC_s *pic;
3559
3560 hevc_print(hevc, 0,
3561 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3562 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3563 pic = hevc->m_PIC[i];
3564 if (pic == NULL || pic->index == -1)
3565 continue;
3566 hevc_print_cont(hevc, 0,
3567 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3568 pic->index, pic->BUF_index,
3569#ifndef MV_USE_FIXED_BUF
3570 pic->mv_buf_index,
3571#else
3572 -1,
3573#endif
3574 pic->decode_idx, pic->POC, pic->referenced);
3575 hevc_print_cont(hevc, 0,
3576 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3577 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3578 pic->width, pic->height);
3579 hevc_print_cont(hevc, 0,
3580 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3581 pic->output_ready, pic->mpred_mv_wr_start_addr,
3582 pic->vf_ref);
3583 }
3584}
3585
3586static void clear_referenced_flag(struct hevc_state_s *hevc)
3587{
3588 int i;
3589 struct PIC_s *pic;
3590 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3591 pic = hevc->m_PIC[i];
3592 if (pic == NULL || pic->index == -1)
3593 continue;
3594 if (pic->referenced) {
3595 pic->referenced = 0;
3596 put_mv_buf(hevc, pic);
3597 }
3598 }
3599}
3600
3601static void clear_poc_flag(struct hevc_state_s *hevc)
3602{
3603 int i;
3604 struct PIC_s *pic;
3605 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3606 pic = hevc->m_PIC[i];
3607 if (pic == NULL || pic->index == -1)
3608 continue;
3609 pic->POC = INVALID_POC;
3610 }
3611}
3612
3613static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3614 unsigned char flush_flag)
3615{
3616 int num_pic_not_yet_display = 0;
3617 int i;
3618 struct PIC_s *pic;
3619 struct PIC_s *pic_display = NULL;
3620 struct vdec_s *vdec = hw_to_vdec(hevc);
3621
3622 if (hevc->i_only & 0x4) {
3623 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3624 pic = hevc->m_PIC[i];
3625 if (pic == NULL ||
3626 (pic->index == -1) ||
3627 (pic->BUF_index == -1) ||
3628 (pic->POC == INVALID_POC))
3629 continue;
3630 if (pic->output_mark) {
3631 if (pic_display) {
3632 if (pic->decode_idx <
3633 pic_display->decode_idx)
3634 pic_display = pic;
3635
3636 } else
3637 pic_display = pic;
3638
3639 }
3640 }
3641 if (pic_display) {
3642 pic_display->output_mark = 0;
3643 pic_display->recon_mark = 0;
3644 pic_display->output_ready = 1;
3645 pic_display->referenced = 0;
3646 put_mv_buf(hevc, pic_display);
3647 }
3648 } else {
3649 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3650 pic = hevc->m_PIC[i];
3651 if (pic == NULL ||
3652 (pic->index == -1) ||
3653 (pic->BUF_index == -1) ||
3654 (pic->POC == INVALID_POC))
3655 continue;
3656 if (pic->output_mark)
3657 num_pic_not_yet_display++;
3658 if (pic->slice_type == 2 &&
3659 hevc->vf_pre_count == 0 &&
3660 fast_output_enable & 0x1) {
3661 /*fast output for first I picture*/
3662 pic->num_reorder_pic = 0;
3663 if (vdec->master || vdec->slave)
3664 pic_display = pic;
3665 hevc_print(hevc, 0, "VH265: output first frame\n");
3666 }
3667 }
3668
3669 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3670 pic = hevc->m_PIC[i];
3671 if (pic == NULL ||
3672 (pic->index == -1) ||
3673 (pic->BUF_index == -1) ||
3674 (pic->POC == INVALID_POC))
3675 continue;
3676 if (pic->output_mark) {
3677 if (pic_display) {
3678 if (pic->POC < pic_display->POC)
3679 pic_display = pic;
3680 else if ((pic->POC == pic_display->POC)
3681 && (pic->decode_idx <
3682 pic_display->
3683 decode_idx))
3684 pic_display
3685 = pic;
3686 } else
3687 pic_display = pic;
3688 }
3689 }
3690 if (pic_display) {
3691 if ((num_pic_not_yet_display >
3692 pic_display->num_reorder_pic)
3693 || flush_flag) {
3694 pic_display->output_mark = 0;
3695 pic_display->recon_mark = 0;
3696 pic_display->output_ready = 1;
3697 } else if (num_pic_not_yet_display >=
3698 (MAX_REF_PIC_NUM - 1)) {
3699 pic_display->output_mark = 0;
3700 pic_display->recon_mark = 0;
3701 pic_display->output_ready = 1;
3702 hevc_print(hevc, 0,
3703 "Warning, num_reorder_pic %d is byeond buf num\n",
3704 pic_display->num_reorder_pic);
3705 } else
3706 pic_display = NULL;
3707 }
3708 }
3709
3710 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3711 pic_display = NULL;
3712 hevc->first_pic_flag = 0;
3713 }
3714 return pic_display;
3715}
3716
3717static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3718{
3719 int i;
3720 struct PIC_s *pic;
3721
3722 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3723 hevc_print(hevc, 0,
3724 "config_mc_buffer entered .....\n");
3725 if (cur_pic->slice_type != 2) { /* P and B pic */
3726 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3727 (0 << 8) | (0 << 1) | 1);
3728 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3729 pic =
3730 get_ref_pic_by_POC(hevc,
3731 cur_pic->
3732 m_aiRefPOCList0[cur_pic->
3733 slice_idx][i]);
3734 if (pic) {
3735 if ((pic->width != hevc->pic_w) ||
3736 (pic->height != hevc->pic_h)) {
3737 hevc_print(hevc, 0,
3738 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3739 __func__, pic->POC,
3740 pic->width, pic->height);
3741 cur_pic->error_mark = 1;
3742 }
3743 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3744 cur_pic->error_mark = 1;
3745 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3746 (pic->mc_canvas_u_v << 16)
3747 | (pic->mc_canvas_u_v
3748 << 8) |
3749 pic->mc_canvas_y);
3750 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3751 hevc_print_cont(hevc, 0,
3752 "refid %x mc_canvas_u_v %x",
3753 i, pic->mc_canvas_u_v);
3754 hevc_print_cont(hevc, 0,
3755 " mc_canvas_y %x\n",
3756 pic->mc_canvas_y);
3757 }
3758 } else
3759 cur_pic->error_mark = 1;
3760
3761 if (pic == NULL || pic->error_mark) {
3762 hevc_print(hevc, 0,
3763 "Error %s, %dth poc (%d) %s",
3764 __func__, i,
3765 cur_pic->m_aiRefPOCList0[cur_pic->
3766 slice_idx][i],
3767 pic ? "has error" :
3768 "not in list0");
3769 }
3770 }
3771 }
3772 if (cur_pic->slice_type == 0) { /* B pic */
3773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3774 hevc_print(hevc, 0,
3775 "config_mc_buffer RefNum_L1\n");
3776 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3777 (16 << 8) | (0 << 1) | 1);
3778
3779 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3780 pic =
3781 get_ref_pic_by_POC(hevc,
3782 cur_pic->
3783 m_aiRefPOCList1[cur_pic->
3784 slice_idx][i]);
3785 if (pic) {
3786 if ((pic->width != hevc->pic_w) ||
3787 (pic->height != hevc->pic_h)) {
3788 hevc_print(hevc, 0,
3789 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3790 __func__, pic->POC,
3791 pic->width, pic->height);
3792 cur_pic->error_mark = 1;
3793 }
3794
3795 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3796 cur_pic->error_mark = 1;
3797 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3798 (pic->mc_canvas_u_v << 16)
3799 | (pic->mc_canvas_u_v
3800 << 8) |
3801 pic->mc_canvas_y);
3802 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3803 hevc_print_cont(hevc, 0,
3804 "refid %x mc_canvas_u_v %x",
3805 i, pic->mc_canvas_u_v);
3806 hevc_print_cont(hevc, 0,
3807 " mc_canvas_y %x\n",
3808 pic->mc_canvas_y);
3809 }
3810 } else
3811 cur_pic->error_mark = 1;
3812
3813 if (pic == NULL || pic->error_mark) {
3814 hevc_print(hevc, 0,
3815 "Error %s, %dth poc (%d) %s",
3816 __func__, i,
3817 cur_pic->m_aiRefPOCList1[cur_pic->
3818 slice_idx][i],
3819 pic ? "has error" :
3820 "not in list1");
3821 }
3822 }
3823 }
3824 return 0;
3825}
3826
3827static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3828 union param_u *params)
3829{
3830 int ii, i;
3831 int poc_tmp;
3832 struct PIC_s *pic;
3833 unsigned char is_referenced;
3834 /* hevc_print(hevc, 0,
3835 "%s cur_poc %d\n", __func__, cur_poc); */
3836 if (pic_list_debug & 0x2) {
3837 pr_err("cur poc %d\n", cur_poc);
3838 }
3839 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3840 pic = hevc->m_PIC[ii];
3841 if (pic == NULL ||
3842 pic->index == -1 ||
3843 pic->BUF_index == -1
3844 )
3845 continue;
3846
3847 if ((pic->referenced == 0 || pic->POC == cur_poc))
3848 continue;
3849 is_referenced = 0;
3850 for (i = 0; i < 16; i++) {
3851 int delt;
3852
3853 if (params->p.CUR_RPS[i] & 0x8000)
3854 break;
3855 delt =
3856 params->p.CUR_RPS[i] &
3857 ((1 << (RPS_USED_BIT - 1)) - 1);
3858 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3859 poc_tmp =
3860 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3861 delt);
3862 } else
3863 poc_tmp = cur_poc + delt;
3864 if (poc_tmp == pic->POC) {
3865 is_referenced = 1;
3866 /* hevc_print(hevc, 0, "i is %d\n", i); */
3867 break;
3868 }
3869 }
3870 if (is_referenced == 0) {
3871 pic->referenced = 0;
3872 put_mv_buf(hevc, pic);
3873 /* hevc_print(hevc, 0,
3874 "set poc %d reference to 0\n", pic->POC); */
3875 if (pic_list_debug & 0x2) {
3876 pr_err("set poc %d reference to 0\n", pic->POC);
3877 }
3878 }
3879 }
3880
3881}
3882
3883static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
3884{
3885 struct PIC_s *pic = hevc->cur_pic;
3886 int i, rIdx;
3887 int num_neg = 0;
3888 int num_pos = 0;
3889 int total_num;
3890 int num_ref_idx_l0_active =
3891 (params->p.num_ref_idx_l0_active >
3892 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3893 params->p.num_ref_idx_l0_active;
3894 int num_ref_idx_l1_active =
3895 (params->p.num_ref_idx_l1_active >
3896 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3897 params->p.num_ref_idx_l1_active;
3898
3899 int RefPicSetStCurr0[16];
3900 int RefPicSetStCurr1[16];
3901
3902 for (i = 0; i < 16; i++) {
3903 RefPicSetStCurr0[i] = 0;
3904 RefPicSetStCurr1[i] = 0;
3905 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
3906 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
3907 }
3908 for (i = 0; i < 16; i++) {
3909 if (params->p.CUR_RPS[i] & 0x8000)
3910 break;
3911 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
3912 int delt =
3913 params->p.CUR_RPS[i] &
3914 ((1 << (RPS_USED_BIT - 1)) - 1);
3915
3916 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
3917 RefPicSetStCurr0[num_neg] =
3918 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
3919 delt);
3920 /* hevc_print(hevc, 0,
3921 * "RefPicSetStCurr0 %x %x %x\n",
3922 * RefPicSetStCurr0[num_neg], pic->POC,
3923 * (0x800-(params[i]&0x7ff)));
3924 */
3925 num_neg++;
3926 } else {
3927 RefPicSetStCurr1[num_pos] = pic->POC + delt;
3928 /* hevc_print(hevc, 0,
3929 * "RefPicSetStCurr1 %d\n",
3930 * RefPicSetStCurr1[num_pos]);
3931 */
3932 num_pos++;
3933 }
3934 }
3935 }
3936 total_num = num_neg + num_pos;
3937 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3938 hevc_print(hevc, 0,
3939 "%s: curpoc %d slice_type %d, total %d ",
3940 __func__, pic->POC, params->p.slice_type, total_num);
3941 hevc_print_cont(hevc, 0,
3942 "num_neg %d num_list0 %d num_list1 %d\n",
3943 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
3944 }
3945
3946 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3947 hevc_print(hevc, 0,
3948 "HEVC Stream buf start ");
3949 hevc_print_cont(hevc, 0,
3950 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
3951 READ_VREG(HEVC_STREAM_START_ADDR),
3952 READ_VREG(HEVC_STREAM_END_ADDR),
3953 READ_VREG(HEVC_STREAM_WR_PTR),
3954 READ_VREG(HEVC_STREAM_RD_PTR),
3955 READ_VREG(HEVC_STREAM_LEVEL),
3956 READ_VREG(HEVC_STREAM_FIFO_CTL),
3957 READ_VREG(HEVC_PARSER_INT_CONTROL));
3958 }
3959
3960 if (total_num > 0) {
3961 if (params->p.modification_flag & 0x1) {
3962 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3963 hevc_print(hevc, 0, "ref0 POC (modification):");
3964 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3965 int cIdx = params->p.modification_list[rIdx];
3966
3967 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3968 cIdx >=
3969 num_neg ? RefPicSetStCurr1[cIdx -
3970 num_neg] :
3971 RefPicSetStCurr0[cIdx];
3972 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3973 hevc_print_cont(hevc, 0, "%d ",
3974 pic->m_aiRefPOCList0[pic->
3975 slice_idx]
3976 [rIdx]);
3977 }
3978 }
3979 } else {
3980 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3981 hevc_print(hevc, 0, "ref0 POC:");
3982 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3983 int cIdx = rIdx % total_num;
3984
3985 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3986 cIdx >=
3987 num_neg ? RefPicSetStCurr1[cIdx -
3988 num_neg] :
3989 RefPicSetStCurr0[cIdx];
3990 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3991 hevc_print_cont(hevc, 0, "%d ",
3992 pic->m_aiRefPOCList0[pic->
3993 slice_idx]
3994 [rIdx]);
3995 }
3996 }
3997 }
3998 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3999 hevc_print_cont(hevc, 0, "\n");
4000 if (params->p.slice_type == B_SLICE) {
4001 if (params->p.modification_flag & 0x2) {
4002 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4003 hevc_print(hevc, 0,
4004 "ref1 POC (modification):");
4005 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4006 rIdx++) {
4007 int cIdx;
4008
4009 if (params->p.modification_flag & 0x1) {
4010 cIdx =
4011 params->p.
4012 modification_list
4013 [num_ref_idx_l0_active +
4014 rIdx];
4015 } else {
4016 cIdx =
4017 params->p.
4018 modification_list[rIdx];
4019 }
4020 pic->m_aiRefPOCList1[pic->
4021 slice_idx][rIdx] =
4022 cIdx >=
4023 num_pos ?
4024 RefPicSetStCurr0[cIdx - num_pos]
4025 : RefPicSetStCurr1[cIdx];
4026 if (get_dbg_flag(hevc) &
4027 H265_DEBUG_BUFMGR) {
4028 hevc_print_cont(hevc, 0, "%d ",
4029 pic->
4030 m_aiRefPOCList1[pic->
4031 slice_idx]
4032 [rIdx]);
4033 }
4034 }
4035 } else {
4036 if (get_dbg_flag(hevc) &
4037 H265_DEBUG_BUFMGR)
4038 hevc_print(hevc, 0, "ref1 POC:");
4039 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4040 rIdx++) {
4041 int cIdx = rIdx % total_num;
4042
4043 pic->m_aiRefPOCList1[pic->
4044 slice_idx][rIdx] =
4045 cIdx >=
4046 num_pos ?
4047 RefPicSetStCurr0[cIdx -
4048 num_pos]
4049 : RefPicSetStCurr1[cIdx];
4050 if (get_dbg_flag(hevc) &
4051 H265_DEBUG_BUFMGR) {
4052 hevc_print_cont(hevc, 0, "%d ",
4053 pic->
4054 m_aiRefPOCList1[pic->
4055 slice_idx]
4056 [rIdx]);
4057 }
4058 }
4059 }
4060 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4061 hevc_print_cont(hevc, 0, "\n");
4062 }
4063 }
4064 /*set m_PIC */
4065 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4066 (params->p.slice_type == P_SLICE) ? 1 :
4067 (params->p.slice_type == B_SLICE) ? 0 : 3;
4068 pic->RefNum_L0 = num_ref_idx_l0_active;
4069 pic->RefNum_L1 = num_ref_idx_l1_active;
4070}
4071
4072static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4073 int pic_height_cu, int sao_mem_unit,
4074 union param_u *params)
4075{
4076 int i, j;
4077 int start_cu_x, start_cu_y;
4078 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4079 int sao_abv_size = sao_mem_unit * pic_width_cu;
4080#ifdef DETREFILL_ENABLE
4081 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4082 int tmpRefillLcuSize = 1 <<
4083 (params->p.log2_min_coding_block_size_minus3 +
4084 3 + params->p.log2_diff_max_min_coding_block_size);
4085 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4086 "%x, %x, %x, %x\n",
4087 params->p.slice_segment_address,
4088 params->p.bit_depth,
4089 params->p.tiles_enabled_flag,
4090 tmpRefillLcuSize);
4091 if (params->p.slice_segment_address == 0 &&
4092 params->p.bit_depth != 0 &&
4093 (params->p.tiles_enabled_flag & 1) &&
4094 tmpRefillLcuSize == 64)
4095 hevc->delrefill_check = 1;
4096 else
4097 hevc->delrefill_check = 0;
4098 }
4099#endif
4100
4101 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4102 if (params->p.tiles_enabled_flag & 1) {
4103 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4104 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4105
4106 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4107 || hevc->num_tile_row <= 0) {
4108 hevc->num_tile_row = 1;
4109 hevc_print(hevc, 0,
4110 "%s: num_tile_rows_minus1 (%d) error!!\n",
4111 __func__, params->p.num_tile_rows_minus1);
4112 }
4113 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4114 || hevc->num_tile_col <= 0) {
4115 hevc->num_tile_col = 1;
4116 hevc_print(hevc, 0,
4117 "%s: num_tile_columns_minus1 (%d) error!!\n",
4118 __func__, params->p.num_tile_columns_minus1);
4119 }
4120 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4121 hevc_print(hevc, 0,
4122 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4123 __func__, pic_width_cu, pic_height_cu);
4124 hevc_print_cont(hevc, 0,
4125 "num_tile_col %d num_tile_row %d:\n",
4126 hevc->num_tile_col, hevc->num_tile_row);
4127 }
4128
4129 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4130 int w = pic_width_cu / hevc->num_tile_col;
4131 int h = pic_height_cu / hevc->num_tile_row;
4132
4133 start_cu_y = 0;
4134 for (i = 0; i < hevc->num_tile_row; i++) {
4135 start_cu_x = 0;
4136 for (j = 0; j < hevc->num_tile_col; j++) {
4137 if (j == (hevc->num_tile_col - 1)) {
4138 hevc->m_tile[i][j].width =
4139 pic_width_cu -
4140 start_cu_x;
4141 } else
4142 hevc->m_tile[i][j].width = w;
4143 if (i == (hevc->num_tile_row - 1)) {
4144 hevc->m_tile[i][j].height =
4145 pic_height_cu -
4146 start_cu_y;
4147 } else
4148 hevc->m_tile[i][j].height = h;
4149 hevc->m_tile[i][j].start_cu_x
4150 = start_cu_x;
4151 hevc->m_tile[i][j].start_cu_y
4152 = start_cu_y;
4153 hevc->m_tile[i][j].sao_vb_start_addr =
4154 hevc->work_space_buf->sao_vb.
4155 buf_start + j * sao_vb_size;
4156 hevc->m_tile[i][j].sao_abv_start_addr =
4157 hevc->work_space_buf->sao_abv.
4158 buf_start + i * sao_abv_size;
4159 if (get_dbg_flag(hevc) &
4160 H265_DEBUG_BUFMGR) {
4161 hevc_print_cont(hevc, 0,
4162 "{y=%d, x=%d w %d h %d ",
4163 i, j, hevc->m_tile[i][j].width,
4164 hevc->m_tile[i][j].height);
4165 hevc_print_cont(hevc, 0,
4166 "start_x %d start_y %d ",
4167 hevc->m_tile[i][j].start_cu_x,
4168 hevc->m_tile[i][j].start_cu_y);
4169 hevc_print_cont(hevc, 0,
4170 "sao_vb_start 0x%x ",
4171 hevc->m_tile[i][j].
4172 sao_vb_start_addr);
4173 hevc_print_cont(hevc, 0,
4174 "sao_abv_start 0x%x}\n",
4175 hevc->m_tile[i][j].
4176 sao_abv_start_addr);
4177 }
4178 start_cu_x += hevc->m_tile[i][j].width;
4179
4180 }
4181 start_cu_y += hevc->m_tile[i][0].height;
4182 }
4183 } else {
4184 start_cu_y = 0;
4185 for (i = 0; i < hevc->num_tile_row; i++) {
4186 start_cu_x = 0;
4187 for (j = 0; j < hevc->num_tile_col; j++) {
4188 if (j == (hevc->num_tile_col - 1)) {
4189 hevc->m_tile[i][j].width =
4190 pic_width_cu -
4191 start_cu_x;
4192 } else {
4193 hevc->m_tile[i][j].width =
4194 params->p.tile_width[j];
4195 }
4196 if (i == (hevc->num_tile_row - 1)) {
4197 hevc->m_tile[i][j].height =
4198 pic_height_cu -
4199 start_cu_y;
4200 } else {
4201 hevc->m_tile[i][j].height =
4202 params->
4203 p.tile_height[i];
4204 }
4205 hevc->m_tile[i][j].start_cu_x
4206 = start_cu_x;
4207 hevc->m_tile[i][j].start_cu_y
4208 = start_cu_y;
4209 hevc->m_tile[i][j].sao_vb_start_addr =
4210 hevc->work_space_buf->sao_vb.
4211 buf_start + j * sao_vb_size;
4212 hevc->m_tile[i][j].sao_abv_start_addr =
4213 hevc->work_space_buf->sao_abv.
4214 buf_start + i * sao_abv_size;
4215 if (get_dbg_flag(hevc) &
4216 H265_DEBUG_BUFMGR) {
4217 hevc_print_cont(hevc, 0,
4218 "{y=%d, x=%d w %d h %d ",
4219 i, j, hevc->m_tile[i][j].width,
4220 hevc->m_tile[i][j].height);
4221 hevc_print_cont(hevc, 0,
4222 "start_x %d start_y %d ",
4223 hevc->m_tile[i][j].start_cu_x,
4224 hevc->m_tile[i][j].start_cu_y);
4225 hevc_print_cont(hevc, 0,
4226 "sao_vb_start 0x%x ",
4227 hevc->m_tile[i][j].
4228 sao_vb_start_addr);
4229 hevc_print_cont(hevc, 0,
4230 "sao_abv_start 0x%x}\n",
4231 hevc->m_tile[i][j].
4232 sao_abv_start_addr);
4233
4234 }
4235 start_cu_x += hevc->m_tile[i][j].width;
4236 }
4237 start_cu_y += hevc->m_tile[i][0].height;
4238 }
4239 }
4240 } else {
4241 hevc->num_tile_col = 1;
4242 hevc->num_tile_row = 1;
4243 hevc->m_tile[0][0].width = pic_width_cu;
4244 hevc->m_tile[0][0].height = pic_height_cu;
4245 hevc->m_tile[0][0].start_cu_x = 0;
4246 hevc->m_tile[0][0].start_cu_y = 0;
4247 hevc->m_tile[0][0].sao_vb_start_addr =
4248 hevc->work_space_buf->sao_vb.buf_start;
4249 hevc->m_tile[0][0].sao_abv_start_addr =
4250 hevc->work_space_buf->sao_abv.buf_start;
4251 }
4252}
4253
4254static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4255 int pic_width_lcu)
4256{
4257 int cu_x;
4258 int cu_y;
4259 int tile_x = 0;
4260 int tile_y = 0;
4261 int i;
4262
4263 if (pic_width_lcu == 0) {
4264 if (get_dbg_flag(hevc)) {
4265 hevc_print(hevc, 0,
4266 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4267 __func__, hevc->pic_w, hevc->pic_h);
4268 }
4269 return -1;
4270 }
4271 cu_x = cu_adr % pic_width_lcu;
4272 cu_y = cu_adr / pic_width_lcu;
4273 if (hevc->tile_enabled) {
4274 for (i = 0; i < hevc->num_tile_col; i++) {
4275 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4276 tile_x = i;
4277 else
4278 break;
4279 }
4280 for (i = 0; i < hevc->num_tile_row; i++) {
4281 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4282 tile_y = i;
4283 else
4284 break;
4285 }
4286 }
4287 return (tile_x) | (tile_y << 8);
4288}
4289
4290static void print_scratch_error(int error_num)
4291{
4292#if 0
4293 if (get_dbg_flag(hevc)) {
4294 hevc_print(hevc, 0,
4295 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4296 error_num);
4297 }
4298#endif
4299}
4300
4301static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4302{
4303 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4304
4305 if (get_dbg_flag(hevc))
4306 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4307 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4308 __func__,
4309 buf_spec->ipp.buf_start,
4310 buf_spec->start_adr,
4311 buf_spec->short_term_rps.buf_start,
4312 buf_spec->vps.buf_start,
4313 buf_spec->sps.buf_start,
4314 buf_spec->pps.buf_start,
4315 buf_spec->sao_up.buf_start,
4316 buf_spec->swap_buf.buf_start,
4317 buf_spec->swap_buf2.buf_start,
4318 buf_spec->scalelut.buf_start,
4319 buf_spec->dblk_para.buf_start,
4320 buf_spec->dblk_data.buf_start,
4321 buf_spec->dblk_data2.buf_start);
4322 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4323 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4324 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4325 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4326 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4327 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4328 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4329 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4330 if (hevc->mmu_enable) {
4331 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4332 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4333 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4334 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4335 } else
4336 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4337 } /*else
4338 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4339 buf_spec->swap_buf.buf_start);
4340 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4341 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4342 /* cfg_p_addr */
4343 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4344 /* cfg_d_addr */
4345 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4346
4347 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4348
4349 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4350}
4351
4352static void parser_cmd_write(void)
4353{
4354 u32 i;
4355 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4356 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4357 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4358 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4359 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4360 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4361 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4362 0x7C00
4363 };
4364 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4365 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4366}
4367
4368static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4369 int decode_pic_begin, int decode_pic_num)
4370{
4371 unsigned int data32;
4372 int i;
4373#if 0
4374 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4375 /* Set MCR fetch priorities*/
4376 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4377 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4378 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4379 }
4380#endif
4381#if 1
4382 /* m8baby test1902 */
4383 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4384 hevc_print(hevc, 0,
4385 "%s\n", __func__);
4386 data32 = READ_VREG(HEVC_PARSER_VERSION);
4387 if (data32 != 0x00010001) {
4388 print_scratch_error(25);
4389 return;
4390 }
4391 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4392 data32 = READ_VREG(HEVC_PARSER_VERSION);
4393 if (data32 != 0x5a5a55aa) {
4394 print_scratch_error(26);
4395 return;
4396 }
4397#if 0
4398 /* test Parser Reset */
4399 /* reset iqit to start mem init again */
4400 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4401 (1 << 3) /* reset_whole parser */
4402 );
4403 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4404 data32 = READ_VREG(HEVC_PARSER_VERSION);
4405 if (data32 != 0x00010001)
4406 hevc_print(hevc, 0,
4407 "Test Parser Fatal Error\n");
4408#endif
4409 /* reset iqit to start mem init again */
4410 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4411 );
4412 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4413 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4414
4415#endif
4416 if (!hevc->m_ins_flag) {
4417 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4418 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4419 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4420 data32 |= (0xf << 25); /*arwlen_axi_max*/
4421 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4422 }
4423 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4424 if (data32 != 0x00000100) {
4425 print_scratch_error(29);
4426 return;
4427 }
4428 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4429 if (data32 != 0x00000300) {
4430 print_scratch_error(30);
4431 return;
4432 }
4433 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4434 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4435 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4436 if (data32 != 0x12345678) {
4437 print_scratch_error(31);
4438 return;
4439 }
4440 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4441 if (data32 != 0x9abcdef0) {
4442 print_scratch_error(32);
4443 return;
4444 }
4445 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4446 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4447
4448 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4449 data32 &= 0x03ffffff;
4450 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4451 | /* stream_buffer_empty_int_amrisc_enable */
4452 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4453 (1 << 7) | /* dec_done_int_cpu_enable */
4454 (1 << 4) | /* startcode_found_int_cpu_enable */
4455 (0 << 3) | /* startcode_found_int_amrisc_enable */
4456 (1 << 0) /* parser_int_enable */
4457 ;
4458 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4459
4460 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4461 data32 = data32 | (1 << 1) | /* emulation_check_on */
4462 (1 << 0) /* startcode_check_on */
4463 ;
4464 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4465
4466 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4467 (2 << 4) | /* emulate_code_length_sub_1 */
4468 (2 << 1) | /* start_code_length_sub_1 */
4469 (1 << 0) /* stream_shift_enable */
4470 );
4471
4472 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4473 );
4474 /* hevc_parser_core_clk_en */
4475 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4476 );
4477
4478 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4479
4480 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4481 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4482 for (i = 0; i < 1024; i++)
4483 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4484
4485#ifdef ENABLE_SWAP_TEST
4486 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4487#endif
4488
4489 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4490 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4491 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4492 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4493 /* Send parser_cmd */
4494 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4495
4496 parser_cmd_write();
4497
4498 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4499 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4500 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4501
4502 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4503 /* (1 << 8) | // sao_sw_pred_enable */
4504 (1 << 5) | /* parser_sao_if_en */
4505 (1 << 2) | /* parser_mpred_if_en */
4506 (1 << 0) /* parser_scaler_if_en */
4507 );
4508
4509 /* Changed to Start MPRED in microcode */
4510 /*
4511 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4512 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4513 * (1<<31)
4514 * );
4515 */
4516
4517 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4518 (1 << 0) /* software reset ipp and mpp */
4519 );
4520 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4521 (0 << 0) /* software reset ipp and mpp */
4522 );
4523
4524 if (get_double_write_mode(hevc) & 0x10)
4525 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4526 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4527 );
4528
4529}
4530
4531static void decoder_hw_reset(void)
4532{
4533 int i;
4534 unsigned int data32;
4535 /* reset iqit to start mem init again */
4536 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4537 );
4538 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4539 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4540
4541 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4542 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4543 ;
4544 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4545
4546 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4547 if (data32 != 0x00000100) {
4548 print_scratch_error(29);
4549 return;
4550 }
4551 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4552 if (data32 != 0x00000300) {
4553 print_scratch_error(30);
4554 return;
4555 }
4556 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4557 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4558 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4559 if (data32 != 0x12345678) {
4560 print_scratch_error(31);
4561 return;
4562 }
4563 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4564 if (data32 != 0x9abcdef0) {
4565 print_scratch_error(32);
4566 return;
4567 }
4568 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4569 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4570
4571 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4572 data32 &= 0x03ffffff;
4573 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4574 | /* stream_buffer_empty_int_amrisc_enable */
4575 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4576 (1 << 7) | /* dec_done_int_cpu_enable */
4577 (1 << 4) | /* startcode_found_int_cpu_enable */
4578 (0 << 3) | /* startcode_found_int_amrisc_enable */
4579 (1 << 0) /* parser_int_enable */
4580 ;
4581 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4582
4583 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4584 data32 = data32 | (1 << 1) | /* emulation_check_on */
4585 (1 << 0) /* startcode_check_on */
4586 ;
4587 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4588
4589 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4590 (2 << 4) | /* emulate_code_length_sub_1 */
4591 (2 << 1) | /* start_code_length_sub_1 */
4592 (1 << 0) /* stream_shift_enable */
4593 );
4594
4595 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4596 );
4597 /* hevc_parser_core_clk_en */
4598 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4599 );
4600
4601 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4602 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4603 for (i = 0; i < 1024; i++)
4604 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4605
4606 /* Send parser_cmd */
4607 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4608
4609 parser_cmd_write();
4610
4611 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4612 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4613 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4614
4615 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4616 /* (1 << 8) | // sao_sw_pred_enable */
4617 (1 << 5) | /* parser_sao_if_en */
4618 (1 << 2) | /* parser_mpred_if_en */
4619 (1 << 0) /* parser_scaler_if_en */
4620 );
4621
4622 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4623 (1 << 0) /* software reset ipp and mpp */
4624 );
4625 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4626 (0 << 0) /* software reset ipp and mpp */
4627 );
4628}
4629
4630#ifdef CONFIG_HEVC_CLK_FORCED_ON
4631static void config_hevc_clk_forced_on(void)
4632{
4633 unsigned int rdata32;
4634 /* IQIT */
4635 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4636 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4637
4638 /* DBLK */
4639 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4640 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4641
4642 /* SAO */
4643 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4644 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4645
4646 /* MPRED */
4647 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4648 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4649
4650 /* PARSER */
4651 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4652 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4653 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4654 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4655 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4656 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4657 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4658 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4659 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4660 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4661 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4662 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4663 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4664
4665 /* IPP */
4666 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4667 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4668
4669 /* MCRCC */
4670 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4671 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4672}
4673#endif
4674
4675#ifdef MCRCC_ENABLE
4676static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4677{
4678 unsigned int rdata32;
4679 unsigned int rdata32_2;
4680 int l0_cnt = 0;
4681 int l1_cnt = 0x7fff;
4682
4683 if (get_double_write_mode(hevc) & 0x10) {
4684 l0_cnt = hevc->cur_pic->RefNum_L0;
4685 l1_cnt = hevc->cur_pic->RefNum_L1;
4686 }
4687
4688 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4689
4690 if (slice_type == 2) { /* I-PIC */
4691 /* remove reset -- disables clock */
4692 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4693 return;
4694 }
4695
4696 if (slice_type == 0) { /* B-PIC */
4697 /* Programme canvas0 */
4698 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4699 (0 << 8) | (0 << 1) | 0);
4700 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4701 rdata32 = rdata32 & 0xffff;
4702 rdata32 = rdata32 | (rdata32 << 16);
4703 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4704
4705 /* Programme canvas1 */
4706 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4707 (16 << 8) | (1 << 1) | 0);
4708 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4709 rdata32_2 = rdata32_2 & 0xffff;
4710 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4711 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4712 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4713 rdata32_2 = rdata32_2 & 0xffff;
4714 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4715 }
4716 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4717 } else { /* P-PIC */
4718 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4719 (0 << 8) | (1 << 1) | 0);
4720 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4721 rdata32 = rdata32 & 0xffff;
4722 rdata32 = rdata32 | (rdata32 << 16);
4723 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4724
4725 if (l0_cnt == 1) {
4726 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4727 } else {
4728 /* Programme canvas1 */
4729 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4730 rdata32 = rdata32 & 0xffff;
4731 rdata32 = rdata32 | (rdata32 << 16);
4732 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4733 }
4734 }
4735 /* enable mcrcc progressive-mode */
4736 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4737}
4738#endif
4739
4740static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4741 int sao_mem_unit)
4742{
4743 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4744 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4745 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4746 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4747}
4748
4749static u32 init_aux_size;
4750static int aux_data_is_avaible(struct hevc_state_s *hevc)
4751{
4752 u32 reg_val;
4753
4754 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4755 if (reg_val != 0 && reg_val != init_aux_size)
4756 return 1;
4757 else
4758 return 0;
4759}
4760
4761static void config_aux_buf(struct hevc_state_s *hevc)
4762{
4763 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4764 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4765 (hevc->suffix_aux_size >> 4);
4766 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4767}
4768
4769static void config_mpred_hw(struct hevc_state_s *hevc)
4770{
4771 int i;
4772 unsigned int data32;
4773 struct PIC_s *cur_pic = hevc->cur_pic;
4774 struct PIC_s *col_pic = hevc->col_pic;
4775 int AMVP_MAX_NUM_CANDS_MEM = 3;
4776 int AMVP_MAX_NUM_CANDS = 2;
4777 int NUM_CHROMA_MODE = 5;
4778 int DM_CHROMA_IDX = 36;
4779 int above_ptr_ctrl = 0;
4780 int buffer_linear = 1;
4781 int cu_size_log2 = 3;
4782
4783 int mpred_mv_rd_start_addr;
4784 int mpred_curr_lcu_x;
4785 int mpred_curr_lcu_y;
4786 int mpred_above_buf_start;
4787 int mpred_mv_rd_ptr;
4788 int mpred_mv_rd_ptr_p1;
4789 int mpred_mv_rd_end_addr;
4790 int MV_MEM_UNIT;
4791 int mpred_mv_wr_ptr;
4792 int *ref_poc_L0, *ref_poc_L1;
4793
4794 int above_en;
4795 int mv_wr_en;
4796 int mv_rd_en;
4797 int col_isIntra;
4798
4799 if (hevc->slice_type != 2) {
4800 above_en = 1;
4801 mv_wr_en = 1;
4802 mv_rd_en = 1;
4803 col_isIntra = 0;
4804 } else {
4805 above_en = 1;
4806 mv_wr_en = 1;
4807 mv_rd_en = 0;
4808 col_isIntra = 0;
4809 }
4810
4811 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4812 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4813 mpred_curr_lcu_x = data32 & 0xffff;
4814 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4815
4816 MV_MEM_UNIT =
4817 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4818 5 ? 0x80 : 0x20;
4819 mpred_mv_rd_ptr =
4820 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4821
4822 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4823 mpred_mv_rd_end_addr =
4824 mpred_mv_rd_start_addr +
4825 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4826
4827 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4828
4829 mpred_mv_wr_ptr =
4830 cur_pic->mpred_mv_wr_start_addr +
4831 (hevc->slice_addr * MV_MEM_UNIT);
4832
4833 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4834 hevc_print(hevc, 0,
4835 "cur pic index %d col pic index %d\n", cur_pic->index,
4836 col_pic->index);
4837 }
4838
4839 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4840 cur_pic->mpred_mv_wr_start_addr);
4841 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4842
4843 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4844 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4845 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4846
4847 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4848 data32 = (hevc->slice_type |
4849 hevc->new_pic << 2 |
4850 hevc->new_tile << 3 |
4851 hevc->isNextSliceSegment << 4 |
4852 hevc->TMVPFlag << 5 |
4853 hevc->LDCFlag << 6 |
4854 hevc->ColFromL0Flag << 7 |
4855 above_ptr_ctrl << 8 |
4856 above_en << 9 |
4857 mv_wr_en << 10 |
4858 mv_rd_en << 11 |
4859 col_isIntra << 12 |
4860 buffer_linear << 13 |
4861 hevc->LongTerm_Curr << 14 |
4862 hevc->LongTerm_Col << 15 |
4863 hevc->lcu_size_log2 << 16 |
4864 cu_size_log2 << 20 | hevc->plevel << 24);
4865 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
4866
4867 data32 = READ_VREG(HEVC_MPRED_CTRL1);
4868 data32 = (
4869#if 0
4870 /* no set in m8baby test1902 */
4871 /* Don't override clk_forced_on , */
4872 (data32 & (0x1 << 24)) |
4873#endif
4874 hevc->MaxNumMergeCand |
4875 AMVP_MAX_NUM_CANDS << 4 |
4876 AMVP_MAX_NUM_CANDS_MEM << 8 |
4877 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
4878 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
4879
4880 data32 = (hevc->pic_w | hevc->pic_h << 16);
4881 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
4882
4883 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
4884 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
4885
4886 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
4887 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
4888
4889 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
4890 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
4891
4892 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
4893 /* col_RefNum_L0<<16| */
4894 /* col_RefNum_L1<<24 */
4895 );
4896 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
4897
4898 data32 = (hevc->LongTerm_Ref);
4899 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
4900
4901 data32 = 0;
4902 for (i = 0; i < hevc->RefNum_L0; i++)
4903 data32 = data32 | (1 << i);
4904 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
4905
4906 data32 = 0;
4907 for (i = 0; i < hevc->RefNum_L1; i++)
4908 data32 = data32 | (1 << i);
4909 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
4910
4911 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
4912 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
4913
4914 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
4915 * Ref_POC_xx_L1 in pair write order!!!
4916 */
4917 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
4918 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
4919
4920 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
4921 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
4922
4923 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
4924 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
4925
4926 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
4927 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
4928
4929 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
4930 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
4931
4932 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
4933 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
4934
4935 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
4936 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
4937
4938 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
4939 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
4940
4941 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
4942 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
4943
4944 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
4945 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
4946
4947 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
4948 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
4949
4950 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
4951 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
4952
4953 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
4954 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
4955
4956 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
4957 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
4958
4959 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
4960 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
4961
4962 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
4963 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
4964
4965 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
4966 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
4967
4968 if (hevc->new_pic) {
4969 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
4970 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
4971 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
4972 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
4973 } else if (!hevc->isNextSliceSegment) {
4974 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
4975 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
4976 }
4977
4978 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
4979}
4980
4981static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
4982{
4983 unsigned int data32, data32_2;
4984 int misc_flag0 = hevc->misc_flag0;
4985 int slice_deblocking_filter_disabled_flag = 0;
4986
4987 int mc_buffer_size_u_v =
4988 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
4989 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
4990 struct PIC_s *cur_pic = hevc->cur_pic;
4991 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
4992
4993 data32 = READ_VREG(HEVC_SAO_CTRL0);
4994 data32 &= (~0xf);
4995 data32 |= hevc->lcu_size_log2;
4996 WRITE_VREG(HEVC_SAO_CTRL0, data32);
4997
4998 data32 = (hevc->pic_w | hevc->pic_h << 16);
4999 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5000
5001 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5002 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5003
5004 if (hevc->new_pic)
5005 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5006#ifdef LOSLESS_COMPRESS_MODE
5007/*SUPPORT_10BIT*/
5008 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5009 data32 = READ_VREG(HEVC_SAO_CTRL5);
5010 data32 &= (~(0xff << 16));
5011
5012 if (get_double_write_mode(hevc) == 2 ||
5013 get_double_write_mode(hevc) == 3)
5014 data32 |= (0xff<<16);
5015 else if (get_double_write_mode(hevc) == 4)
5016 data32 |= (0x33<<16);
5017
5018 if (hevc->mem_saving_mode == 1)
5019 data32 |= (1 << 9);
5020 else
5021 data32 &= ~(1 << 9);
5022 if (workaround_enable & 1)
5023 data32 |= (1 << 7);
5024 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5025 }
5026 data32 = cur_pic->mc_y_adr;
5027 if (get_double_write_mode(hevc))
5028 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5029
5030 if ((get_double_write_mode(hevc) & 0x10) == 0)
5031 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5032
5033 if (hevc->mmu_enable)
5034 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5035#else
5036 data32 = cur_pic->mc_y_adr;
5037 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5038#endif
5039 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5040 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5041
5042#ifdef LOSLESS_COMPRESS_MODE
5043/*SUPPORT_10BIT*/
5044 if (get_double_write_mode(hevc))
5045 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5046#else
5047 data32 = cur_pic->mc_u_v_adr;
5048 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5049#endif
5050 data32 = (mc_buffer_size_u_v_h << 16);
5051 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5052
5053#ifdef LOSLESS_COMPRESS_MODE
5054/*SUPPORT_10BIT*/
5055 if (get_double_write_mode(hevc)) {
5056 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5057 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5058 }
5059#else
5060 /* multi tile to do... */
5061 data32 = cur_pic->mc_y_adr;
5062 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5063
5064 data32 = cur_pic->mc_u_v_adr;
5065 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5066#endif
5067 /* DBLK CONFIG HERE */
5068 if (hevc->new_pic) {
5069 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5070 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5071 data32 = (0xff << 8) | (0x0 << 0);
5072 else
5073 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5074 (0x0 << 0); /* h265 video format*/
5075
5076 if (hevc->pic_w >= 1280)
5077 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5078 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5079 if (get_double_write_mode(hevc) == 0)
5080 data32 |= (0x1 << 8); /*enable first write*/
5081 else if (get_double_write_mode(hevc) == 0x10)
5082 data32 |= (0x1 << 9); /*double write only*/
5083 else
5084 data32 |= ((0x1 << 8) |(0x1 << 9));
5085
5086 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5087 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5088 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5089 }
5090 data32 = (hevc->pic_w | hevc->pic_h << 16);
5091 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5092
5093 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5094 data32 =
5095 ((misc_flag0 >>
5096 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5097 0x1) << 3;
5098 } else
5099 data32 = 0;
5100 data32 |=
5101 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5102 ((params->p.pps_cr_qp_offset
5103 & 0x1f) <<
5104 9));
5105 data32 |=
5106 (hevc->lcu_size ==
5107 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5108
5109 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5110
5111 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5112 /*if (debug & 0x80) {*/
5113 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5114 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5115 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5116 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5117 data32);
5118 /*}*/
5119 }
5120 }
5121#if 0
5122 data32 = READ_VREG(HEVC_SAO_CTRL1);
5123 data32 &= (~0x3000);
5124 data32 |= (mem_map_mode <<
5125 12);
5126
5127/* [13:12] axi_aformat,
5128 * 0-Linear, 1-32x32, 2-64x32
5129 */
5130 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5131
5132 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5133 data32 &= (~0x30);
5134 data32 |= (mem_map_mode <<
5135 4);
5136
5137/* [5:4] -- address_format
5138 * 00:linear 01:32x32 10:64x32
5139 */
5140 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5141#else
5142 /* m8baby test1902 */
5143 data32 = READ_VREG(HEVC_SAO_CTRL1);
5144 data32 &= (~0x3000);
5145 data32 |= (mem_map_mode <<
5146 12);
5147
5148/* [13:12] axi_aformat, 0-Linear,
5149 * 1-32x32, 2-64x32
5150 */
5151 data32 &= (~0xff0);
5152 /* data32 |= 0x670; // Big-Endian per 64-bit */
5153 data32 |= endian; /* Big-Endian per 64-bit */
5154 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5155 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5156 if (get_double_write_mode(hevc) == 0)
5157 data32 |= 0x2; /*disable double write*/
5158 else if (get_double_write_mode(hevc) & 0x10)
5159 data32 |= 0x1; /*disable cm*/
5160 } else {
5161 unsigned int data;
5162 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5163 (0x0 << 0); /* h265 video format*/
5164 if (hevc->pic_w >= 1280)
5165 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5166 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5167 if (get_double_write_mode(hevc) == 0)
5168 data |= (0x1 << 8); /*enable first write*/
5169 else if (get_double_write_mode(hevc) & 0x10)
5170 data |= (0x1 << 9); /*double write only*/
5171 else
5172 data |= ((0x1 << 8) |(0x1 << 9));
5173
5174 WRITE_VREG(HEVC_DBLK_CFGB, data);
5175 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5176 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5177 }
5178
5179 /* swap uv */
5180 if (hevc->is_used_v4l) {
5181 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5182 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5183 data32 &= ~(1 << 8); /* NV21 */
5184 else
5185 data32 |= (1 << 8); /* NV12 */
5186 }
5187
5188 /*
5189 * [31:24] ar_fifo1_axi_thred
5190 * [23:16] ar_fifo0_axi_thred
5191 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5192 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5193 * [11:08] axi_lendian_C
5194 * [07:04] axi_lendian_Y
5195 * [3] reserved
5196 * [2] clk_forceon
5197 * [1] dw_disable:disable double write output
5198 * [0] cm_disable:disable compress output
5199 */
5200 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5201 if (get_double_write_mode(hevc) & 0x10) {
5202 /* [23:22] dw_v1_ctrl
5203 *[21:20] dw_v0_ctrl
5204 *[19:18] dw_h1_ctrl
5205 *[17:16] dw_h0_ctrl
5206 */
5207 data32 = READ_VREG(HEVC_SAO_CTRL5);
5208 /*set them all 0 for H265_NV21 (no down-scale)*/
5209 data32 &= ~(0xff << 16);
5210 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5211 }
5212
5213 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5214 data32 &= (~0x30);
5215 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5216 data32 |= (mem_map_mode <<
5217 4);
5218 data32 &= (~0xF);
5219 data32 |= 0xf; /* valid only when double write only */
5220 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5221
5222 /* swap uv */
5223 if (hevc->is_used_v4l) {
5224 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5225 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5226 data32 |= (1 << 12); /* NV21 */
5227 else
5228 data32 &= ~(1 << 12); /* NV12 */
5229 }
5230
5231 /*
5232 * [3:0] little_endian
5233 * [5:4] address_format 00:linear 01:32x32 10:64x32
5234 * [7:6] reserved
5235 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5236 * [11:10] reserved
5237 * [12] CbCr_byte_swap
5238 * [31:13] reserved
5239 */
5240 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5241#endif
5242 data32 = 0;
5243 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5244 data32_2 &= (~0x300);
5245 /* slice_deblocking_filter_disabled_flag = 0;
5246 * ucode has handle it , so read it from ucode directly
5247 */
5248 if (hevc->tile_enabled) {
5249 data32 |=
5250 ((misc_flag0 >>
5251 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5252 0x1) << 0;
5253 data32_2 |=
5254 ((misc_flag0 >>
5255 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5256 0x1) << 8;
5257 }
5258 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5259 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5260 0x1; /* ucode has handle it,so read it from ucode directly */
5261 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5262 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5263 /* slice_deblocking_filter_disabled_flag =
5264 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5265 * //ucode has handle it , so read it from ucode directly
5266 */
5267 data32 |= slice_deblocking_filter_disabled_flag << 2;
5268 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5269 hevc_print_cont(hevc, 0,
5270 "(1,%x)", data32);
5271 if (!slice_deblocking_filter_disabled_flag) {
5272 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5273 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5274 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5275 hevc_print_cont(hevc, 0,
5276 "(2,%x)", data32);
5277 }
5278 } else {
5279 data32 |=
5280 ((misc_flag0 >>
5281 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5282 0x1) << 2;
5283 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5284 hevc_print_cont(hevc, 0,
5285 "(3,%x)", data32);
5286 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5287 0x1) == 0) {
5288 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5289 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5290 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5291 hevc_print_cont(hevc, 0,
5292 "(4,%x)", data32);
5293 }
5294 }
5295 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5296 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5297 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5298 || (!slice_deblocking_filter_disabled_flag))) {
5299 data32 |=
5300 ((misc_flag0 >>
5301 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5302 & 0x1) << 1;
5303 data32_2 |=
5304 ((misc_flag0 >>
5305 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5306 & 0x1) << 9;
5307 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5308 hevc_print_cont(hevc, 0,
5309 "(5,%x)\n", data32);
5310 } else {
5311 data32 |=
5312 ((misc_flag0 >>
5313 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5314 & 0x1) << 1;
5315 data32_2 |=
5316 ((misc_flag0 >>
5317 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5318 & 0x1) << 9;
5319 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5320 hevc_print_cont(hevc, 0,
5321 "(6,%x)\n", data32);
5322 }
5323 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5324 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5325}
5326
5327#ifdef TEST_NO_BUF
5328static unsigned char test_flag = 1;
5329#endif
5330
5331static void pic_list_process(struct hevc_state_s *hevc)
5332{
5333 int work_pic_num = get_work_pic_num(hevc);
5334 int alloc_pic_count = 0;
5335 int i;
5336 struct PIC_s *pic;
5337 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5338 pic = hevc->m_PIC[i];
5339 if (pic == NULL || pic->index == -1)
5340 continue;
5341 alloc_pic_count++;
5342 if (pic->output_mark == 0 && pic->referenced == 0
5343 && pic->output_ready == 0
5344 && (pic->width != hevc->pic_w ||
5345 pic->height != hevc->pic_h)
5346 ) {
5347 set_buf_unused(hevc, pic->BUF_index);
5348 pic->BUF_index = -1;
5349 if (alloc_pic_count > work_pic_num) {
5350 pic->width = 0;
5351 pic->height = 0;
5352 pic->index = -1;
5353 } else {
5354 pic->width = hevc->pic_w;
5355 pic->height = hevc->pic_h;
5356 }
5357 }
5358 }
5359 if (alloc_pic_count < work_pic_num) {
5360 int new_count = alloc_pic_count;
5361 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5362 pic = hevc->m_PIC[i];
5363 if (pic && pic->index == -1) {
5364 pic->index = i;
5365 pic->BUF_index = -1;
5366 pic->width = hevc->pic_w;
5367 pic->height = hevc->pic_h;
5368 new_count++;
5369 if (new_count >=
5370 work_pic_num)
5371 break;
5372 }
5373 }
5374
5375 }
5376 dealloc_unused_buf(hevc);
5377 if (get_alloc_pic_count(hevc)
5378 != alloc_pic_count) {
5379 hevc_print_cont(hevc, 0,
5380 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5381 __func__,
5382 work_pic_num,
5383 alloc_pic_count,
5384 get_alloc_pic_count(hevc));
5385 }
5386}
5387
5388static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5389{
5390 int i;
5391 struct PIC_s *pic;
5392 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5393 pic = hevc->m_PIC[i];
5394 if (pic == NULL || pic->index == -1)
5395 continue;
5396
5397 if (pic->output_mark == 0 && pic->referenced == 0
5398 && pic->output_ready == 0
5399 && pic->scatter_alloc
5400 )
5401 release_pic_mmu_buf(hevc, pic);
5402 }
5403
5404}
5405
5406static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5407 union param_u *rpm_param)
5408{
5409 struct PIC_s *new_pic = NULL;
5410 struct PIC_s *pic;
5411 int i;
5412 int ret;
5413
5414 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5415 pic = hevc->m_PIC[i];
5416 if (pic == NULL || pic->index == -1)
5417 continue;
5418
5419 if (pic->output_mark == 0 && pic->referenced == 0
5420 && pic->output_ready == 0
5421 && pic->width == hevc->pic_w
5422 && pic->height == hevc->pic_h
5423 ) {
5424 if (new_pic) {
5425 if (new_pic->POC != INVALID_POC) {
5426 if (pic->POC == INVALID_POC ||
5427 pic->POC < new_pic->POC)
5428 new_pic = pic;
5429 }
5430 } else
5431 new_pic = pic;
5432 }
5433 }
5434
5435 if (new_pic == NULL)
5436 return NULL;
5437
5438 if (new_pic->BUF_index < 0) {
5439 if (alloc_buf(hevc) < 0)
5440 return NULL;
5441 else {
5442 if (config_pic(hevc, new_pic) < 0) {
5443 dealloc_pic_buf(hevc, new_pic);
5444 return NULL;
5445 }
5446 }
5447 new_pic->width = hevc->pic_w;
5448 new_pic->height = hevc->pic_h;
5449 set_canvas(hevc, new_pic);
5450
5451 init_pic_list_hw(hevc);
5452 }
5453
5454 if (new_pic) {
5455 new_pic->double_write_mode =
5456 get_double_write_mode(hevc);
5457 if (new_pic->double_write_mode)
5458 set_canvas(hevc, new_pic);
5459
5460#ifdef TEST_NO_BUF
5461 if (test_flag) {
5462 test_flag = 0;
5463 return NULL;
5464 } else
5465 test_flag = 1;
5466#endif
5467 if (get_mv_buf(hevc, new_pic) < 0)
5468 return NULL;
5469
5470 if (hevc->mmu_enable) {
5471 ret = H265_alloc_mmu(hevc, new_pic,
5472 rpm_param->p.bit_depth,
5473 hevc->frame_mmu_map_addr);
5474 if (ret != 0) {
5475 put_mv_buf(hevc, new_pic);
5476 hevc_print(hevc, 0,
5477 "can't alloc need mmu1,idx %d ret =%d\n",
5478 new_pic->decode_idx,
5479 ret);
5480 return NULL;
5481 }
5482 }
5483 new_pic->referenced = 1;
5484 new_pic->decode_idx = hevc->decode_idx;
5485 new_pic->slice_idx = 0;
5486 new_pic->referenced = 1;
5487 new_pic->output_mark = 0;
5488 new_pic->recon_mark = 0;
5489 new_pic->error_mark = 0;
5490 new_pic->dis_mark = 0;
5491 /* new_pic->output_ready = 0; */
5492 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5493 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5494 new_pic->POC = hevc->curr_POC;
5495 new_pic->pic_struct = hevc->curr_pic_struct;
5496 if (new_pic->aux_data_buf)
5497 release_aux_data(hevc, new_pic);
5498 new_pic->mem_saving_mode =
5499 hevc->mem_saving_mode;
5500 new_pic->bit_depth_luma =
5501 hevc->bit_depth_luma;
5502 new_pic->bit_depth_chroma =
5503 hevc->bit_depth_chroma;
5504 new_pic->video_signal_type =
5505 hevc->video_signal_type;
5506
5507 new_pic->conformance_window_flag =
5508 hevc->param.p.conformance_window_flag;
5509 new_pic->conf_win_left_offset =
5510 hevc->param.p.conf_win_left_offset;
5511 new_pic->conf_win_right_offset =
5512 hevc->param.p.conf_win_right_offset;
5513 new_pic->conf_win_top_offset =
5514 hevc->param.p.conf_win_top_offset;
5515 new_pic->conf_win_bottom_offset =
5516 hevc->param.p.conf_win_bottom_offset;
5517 new_pic->chroma_format_idc =
5518 hevc->param.p.chroma_format_idc;
5519
5520 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5521 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5522 __func__, new_pic->index,
5523 new_pic->BUF_index, new_pic->decode_idx,
5524 new_pic->POC);
5525
5526 }
5527 if (pic_list_debug & 0x1) {
5528 dump_pic_list(hevc);
5529 pr_err("\n*******************************************\n");
5530 }
5531
5532 return new_pic;
5533}
5534
5535static int get_display_pic_num(struct hevc_state_s *hevc)
5536{
5537 int i;
5538 struct PIC_s *pic;
5539 int num = 0;
5540
5541 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5542 pic = hevc->m_PIC[i];
5543 if (pic == NULL ||
5544 pic->index == -1)
5545 continue;
5546
5547 if (pic->output_ready == 1)
5548 num++;
5549 }
5550 return num;
5551}
5552
5553static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5554{
5555 struct PIC_s *pic_display;
5556
5557 if (pic) {
5558 /*PB skip control */
5559 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5560 /* start decoding after first I */
5561 hevc->ignore_bufmgr_error |= 0x1;
5562 }
5563 if (hevc->ignore_bufmgr_error & 1) {
5564 if (hevc->PB_skip_count_after_decoding > 0)
5565 hevc->PB_skip_count_after_decoding--;
5566 else {
5567 /* start displaying */
5568 hevc->ignore_bufmgr_error |= 0x2;
5569 }
5570 }
5571 /**/
5572 if (pic->POC != INVALID_POC) {
5573 pic->output_mark = 1;
5574 pic->recon_mark = 1;
5575 }
5576 pic->recon_mark = 1;
5577 }
5578 do {
5579 pic_display = output_pic(hevc, 1);
5580
5581 if (pic_display) {
5582 pic_display->referenced = 0;
5583 put_mv_buf(hevc, pic_display);
5584 if ((pic_display->error_mark
5585 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5586 || (get_dbg_flag(hevc) &
5587 H265_DEBUG_DISPLAY_CUR_FRAME)
5588 || (get_dbg_flag(hevc) &
5589 H265_DEBUG_NO_DISPLAY)) {
5590 pic_display->output_ready = 0;
5591 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5592 hevc_print(hevc, 0,
5593 "[BM] Display: POC %d, ",
5594 pic_display->POC);
5595 hevc_print_cont(hevc, 0,
5596 "decoding index %d ==> ",
5597 pic_display->decode_idx);
5598 hevc_print_cont(hevc, 0,
5599 "Debug mode or error, recycle it\n");
5600 }
5601 } else {
5602 if (hevc->i_only & 0x1
5603 && pic_display->slice_type != 2) {
5604 pic_display->output_ready = 0;
5605 } else {
5606 prepare_display_buf(hevc, pic_display);
5607 if (get_dbg_flag(hevc)
5608 & H265_DEBUG_BUFMGR) {
5609 hevc_print(hevc, 0,
5610 "[BM] flush Display: POC %d, ",
5611 pic_display->POC);
5612 hevc_print_cont(hevc, 0,
5613 "decoding index %d\n",
5614 pic_display->decode_idx);
5615 }
5616 }
5617 }
5618 }
5619 } while (pic_display);
5620 clear_referenced_flag(hevc);
5621}
5622
5623/*
5624* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5625*/
5626static void set_aux_data(struct hevc_state_s *hevc,
5627 struct PIC_s *pic, unsigned char suffix_flag,
5628 unsigned char dv_meta_flag)
5629{
5630 int i;
5631 unsigned short *aux_adr;
5632 unsigned int size_reg_val =
5633 READ_VREG(HEVC_AUX_DATA_SIZE);
5634 unsigned int aux_count = 0;
5635 int aux_size = 0;
5636 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5637 return;
5638
5639 if (hevc->aux_data_dirty ||
5640 hevc->m_ins_flag == 0) {
5641
5642 hevc->aux_data_dirty = 0;
5643 }
5644
5645 if (suffix_flag) {
5646 aux_adr = (unsigned short *)
5647 (hevc->aux_addr +
5648 hevc->prefix_aux_size);
5649 aux_count =
5650 ((size_reg_val & 0xffff) << 4)
5651 >> 1;
5652 aux_size =
5653 hevc->suffix_aux_size;
5654 } else {
5655 aux_adr =
5656 (unsigned short *)hevc->aux_addr;
5657 aux_count =
5658 ((size_reg_val >> 16) << 4)
5659 >> 1;
5660 aux_size =
5661 hevc->prefix_aux_size;
5662 }
5663 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5664 hevc_print(hevc, 0,
5665 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5666 __func__, pic, pic->aux_data_size,
5667 aux_count, suffix_flag, dv_meta_flag);
5668 }
5669 if (aux_size > 0 && aux_count > 0) {
5670 int heads_size = 0;
5671 int new_size;
5672 char *new_buf;
5673
5674 for (i = 0; i < aux_count; i++) {
5675 unsigned char tag = aux_adr[i] >> 8;
5676 if (tag != 0 && tag != 0xff) {
5677 if (dv_meta_flag == 0)
5678 heads_size += 8;
5679 else if (dv_meta_flag == 1 && tag == 0x1)
5680 heads_size += 8;
5681 else if (dv_meta_flag == 2 && tag != 0x1)
5682 heads_size += 8;
5683 }
5684 }
5685 new_size = pic->aux_data_size + aux_count + heads_size;
5686 new_buf = vmalloc(new_size);
5687 if (new_buf) {
5688 unsigned char valid_tag = 0;
5689 unsigned char *h =
5690 new_buf +
5691 pic->aux_data_size;
5692 unsigned char *p = h + 8;
5693 int len = 0;
5694 int padding_len = 0;
5695 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5696 if (pic->aux_data_buf)
5697 vfree(pic->aux_data_buf);
5698 pic->aux_data_buf = new_buf;
5699 for (i = 0; i < aux_count; i += 4) {
5700 int ii;
5701 unsigned char tag = aux_adr[i + 3] >> 8;
5702 if (tag != 0 && tag != 0xff) {
5703 if (dv_meta_flag == 0)
5704 valid_tag = 1;
5705 else if (dv_meta_flag == 1
5706 && tag == 0x1)
5707 valid_tag = 1;
5708 else if (dv_meta_flag == 2
5709 && tag != 0x1)
5710 valid_tag = 1;
5711 else
5712 valid_tag = 0;
5713 if (valid_tag && len > 0) {
5714 pic->aux_data_size +=
5715 (len + 8);
5716 h[0] = (len >> 24)
5717 & 0xff;
5718 h[1] = (len >> 16)
5719 & 0xff;
5720 h[2] = (len >> 8)
5721 & 0xff;
5722 h[3] = (len >> 0)
5723 & 0xff;
5724 h[6] =
5725 (padding_len >> 8)
5726 & 0xff;
5727 h[7] = (padding_len)
5728 & 0xff;
5729 h += (len + 8);
5730 p += 8;
5731 len = 0;
5732 padding_len = 0;
5733 }
5734 if (valid_tag) {
5735 h[4] = tag;
5736 h[5] = 0;
5737 h[6] = 0;
5738 h[7] = 0;
5739 }
5740 }
5741 if (valid_tag) {
5742 for (ii = 0; ii < 4; ii++) {
5743 unsigned short aa =
5744 aux_adr[i + 3
5745 - ii];
5746 *p = aa & 0xff;
5747 p++;
5748 len++;
5749 /*if ((aa >> 8) == 0xff)
5750 padding_len++;*/
5751 }
5752 }
5753 }
5754 if (len > 0) {
5755 pic->aux_data_size += (len + 8);
5756 h[0] = (len >> 24) & 0xff;
5757 h[1] = (len >> 16) & 0xff;
5758 h[2] = (len >> 8) & 0xff;
5759 h[3] = (len >> 0) & 0xff;
5760 h[6] = (padding_len >> 8) & 0xff;
5761 h[7] = (padding_len) & 0xff;
5762 }
5763 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5764 hevc_print(hevc, 0,
5765 "aux: (size %d) suffix_flag %d\n",
5766 pic->aux_data_size, suffix_flag);
5767 for (i = 0; i < pic->aux_data_size; i++) {
5768 hevc_print_cont(hevc, 0,
5769 "%02x ", pic->aux_data_buf[i]);
5770 if (((i + 1) & 0xf) == 0)
5771 hevc_print_cont(hevc, 0, "\n");
5772 }
5773 hevc_print_cont(hevc, 0, "\n");
5774 }
5775
5776 } else {
5777 hevc_print(hevc, 0, "new buf alloc failed\n");
5778 if (pic->aux_data_buf)
5779 vfree(pic->aux_data_buf);
5780 pic->aux_data_buf = NULL;
5781 pic->aux_data_size = 0;
5782 }
5783 }
5784
5785}
5786
5787static void release_aux_data(struct hevc_state_s *hevc,
5788 struct PIC_s *pic)
5789{
5790 if (pic->aux_data_buf)
5791 vfree(pic->aux_data_buf);
5792 pic->aux_data_buf = NULL;
5793 pic->aux_data_size = 0;
5794}
5795
5796static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5797 struct PIC_s *pic)
5798{
5799
5800 /* prev pic */
5801 /*if (hevc->curr_POC != 0) {*/
5802 int decoded_poc = hevc->iPrevPOC;
5803#ifdef MULTI_INSTANCE_SUPPORT
5804 if (hevc->m_ins_flag) {
5805 decoded_poc = hevc->decoded_poc;
5806 hevc->decoded_poc = INVALID_POC;
5807 }
5808#endif
5809 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5810 && hevc->m_nalUnitType !=
5811 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5812 struct PIC_s *pic_display;
5813
5814 pic = get_pic_by_POC(hevc, decoded_poc);
5815 if (pic && (pic->POC != INVALID_POC)) {
5816 /*PB skip control */
5817 if (pic->error_mark == 0
5818 && hevc->PB_skip_mode == 1) {
5819 /* start decoding after
5820 * first I
5821 */
5822 hevc->ignore_bufmgr_error |= 0x1;
5823 }
5824 if (hevc->ignore_bufmgr_error & 1) {
5825 if (hevc->PB_skip_count_after_decoding > 0) {
5826 hevc->PB_skip_count_after_decoding--;
5827 } else {
5828 /* start displaying */
5829 hevc->ignore_bufmgr_error |= 0x2;
5830 }
5831 }
5832 if (hevc->mmu_enable
5833 && ((hevc->double_write_mode & 0x10) == 0)) {
5834 if (!hevc->m_ins_flag) {
5835 hevc->used_4k_num =
5836 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5837
5838 if ((!is_skip_decoding(hevc, pic)) &&
5839 (hevc->used_4k_num >= 0) &&
5840 (hevc->cur_pic->scatter_alloc
5841 == 1)) {
5842 hevc_print(hevc,
5843 H265_DEBUG_BUFMGR_MORE,
5844 "%s pic index %d scatter_alloc %d page_start %d\n",
5845 "decoder_mmu_box_free_idx_tail",
5846 hevc->cur_pic->index,
5847 hevc->cur_pic->scatter_alloc,
5848 hevc->used_4k_num);
5849 hevc_mmu_dma_check(hw_to_vdec(hevc));
5850 decoder_mmu_box_free_idx_tail(
5851 hevc->mmu_box,
5852 hevc->cur_pic->index,
5853 hevc->used_4k_num);
5854 hevc->cur_pic->scatter_alloc
5855 = 2;
5856 }
5857 hevc->used_4k_num = -1;
5858 }
5859 }
5860
5861 pic->output_mark = 1;
5862 pic->recon_mark = 1;
5863 pic->dis_mark = 1;
5864 }
5865 do {
5866 pic_display = output_pic(hevc, 0);
5867
5868 if (pic_display) {
5869 if ((pic_display->error_mark &&
5870 ((hevc->ignore_bufmgr_error &
5871 0x2) == 0))
5872 || (get_dbg_flag(hevc) &
5873 H265_DEBUG_DISPLAY_CUR_FRAME)
5874 || (get_dbg_flag(hevc) &
5875 H265_DEBUG_NO_DISPLAY)) {
5876 pic_display->output_ready = 0;
5877 if (get_dbg_flag(hevc) &
5878 H265_DEBUG_BUFMGR) {
5879 hevc_print(hevc, 0,
5880 "[BM] Display: POC %d, ",
5881 pic_display->POC);
5882 hevc_print_cont(hevc, 0,
5883 "decoding index %d ==> ",
5884 pic_display->
5885 decode_idx);
5886 hevc_print_cont(hevc, 0,
5887 "Debug or err,recycle it\n");
5888 }
5889 } else {
5890 if (hevc->i_only & 0x1
5891 && pic_display->
5892 slice_type != 2) {
5893 pic_display->output_ready = 0;
5894 } else {
5895 prepare_display_buf
5896 (hevc,
5897 pic_display);
5898 if (get_dbg_flag(hevc) &
5899 H265_DEBUG_BUFMGR) {
5900 hevc_print(hevc, 0,
5901 "[BM] Display: POC %d, ",
5902 pic_display->POC);
5903 hevc_print_cont(hevc, 0,
5904 "decoding index %d\n",
5905 pic_display->
5906 decode_idx);
5907 }
5908 }
5909 }
5910 }
5911 } while (pic_display);
5912 } else {
5913 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5914 hevc_print(hevc, 0,
5915 "[BM] current pic is IDR, ");
5916 hevc_print(hevc, 0,
5917 "clear referenced flag of all buffers\n");
5918 }
5919 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5920 dump_pic_list(hevc);
5921 pic = get_pic_by_POC(hevc, decoded_poc);
5922 flush_output(hevc, pic);
5923 }
5924
5925}
5926
5927static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
5928 int decoded_lcu)
5929{
5930 int current_lcu_idx = decoded_lcu;
5931 if (decoded_lcu < 0)
5932 return;
5933
5934 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5935 hevc_print(hevc, 0,
5936 "cur lcu idx = %d, (total %d)\n",
5937 current_lcu_idx, hevc->lcu_total);
5938 }
5939 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5940 if (hevc->first_pic_after_recover) {
5941 if (current_lcu_idx !=
5942 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5943 hevc->cur_pic->error_mark = 1;
5944 } else {
5945 if (hevc->lcu_x_num_pre != 0
5946 && hevc->lcu_y_num_pre != 0
5947 && current_lcu_idx != 0
5948 && current_lcu_idx <
5949 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5950 hevc->cur_pic->error_mark = 1;
5951 }
5952 if (hevc->cur_pic->error_mark) {
5953 hevc_print(hevc, 0,
5954 "cur lcu idx = %d, (total %d), set error_mark\n",
5955 current_lcu_idx,
5956 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
5957 if (is_log_enable(hevc))
5958 add_log(hevc,
5959 "cur lcu idx = %d, (total %d), set error_mark",
5960 current_lcu_idx,
5961 hevc->lcu_x_num_pre *
5962 hevc->lcu_y_num_pre);
5963
5964 }
5965
5966 }
5967 if (hevc->cur_pic && hevc->head_error_flag) {
5968 hevc->cur_pic->error_mark = 1;
5969 hevc_print(hevc, 0,
5970 "head has error, set error_mark\n");
5971 }
5972
5973 if ((error_handle_policy & 0x80) == 0) {
5974 if (hevc->over_decode && hevc->cur_pic) {
5975 hevc_print(hevc, 0,
5976 "over decode, set error_mark\n");
5977 hevc->cur_pic->error_mark = 1;
5978 }
5979 }
5980
5981 hevc->lcu_x_num_pre = hevc->lcu_x_num;
5982 hevc->lcu_y_num_pre = hevc->lcu_y_num;
5983}
5984
5985static void check_pic_decoded_error(struct hevc_state_s *hevc,
5986 int decoded_lcu)
5987{
5988 int current_lcu_idx = decoded_lcu;
5989 if (decoded_lcu < 0)
5990 return;
5991
5992 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5993 hevc_print(hevc, 0,
5994 "cur lcu idx = %d, (total %d)\n",
5995 current_lcu_idx, hevc->lcu_total);
5996 }
5997 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5998 if (hevc->lcu_x_num != 0
5999 && hevc->lcu_y_num != 0
6000 && current_lcu_idx != 0
6001 && current_lcu_idx <
6002 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6003 hevc->cur_pic->error_mark = 1;
6004 if (hevc->cur_pic->error_mark) {
6005 hevc_print(hevc, 0,
6006 "cur lcu idx = %d, (total %d), set error_mark\n",
6007 current_lcu_idx,
6008 hevc->lcu_x_num*hevc->lcu_y_num);
6009 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6010 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6011 hevc_print(hevc, 0,
6012 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6013 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6014 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6015 }
6016 if (is_log_enable(hevc))
6017 add_log(hevc,
6018 "cur lcu idx = %d, (total %d), set error_mark",
6019 current_lcu_idx,
6020 hevc->lcu_x_num *
6021 hevc->lcu_y_num);
6022
6023 }
6024
6025 }
6026 if (hevc->cur_pic && hevc->head_error_flag) {
6027 hevc->cur_pic->error_mark = 1;
6028 hevc_print(hevc, 0,
6029 "head has error, set error_mark\n");
6030 }
6031
6032 if ((error_handle_policy & 0x80) == 0) {
6033 if (hevc->over_decode && hevc->cur_pic) {
6034 hevc_print(hevc, 0,
6035 "over decode, set error_mark\n");
6036 hevc->cur_pic->error_mark = 1;
6037 }
6038 }
6039}
6040
6041/* only when we decoded one field or one frame,
6042we can call this function to get qos info*/
6043static void get_picture_qos_info(struct hevc_state_s *hevc)
6044{
6045 struct PIC_s *picture = hevc->cur_pic;
6046
6047/*
6048#define DEBUG_QOS
6049*/
6050
6051 if (!hevc->cur_pic)
6052 return;
6053
6054 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6055 unsigned char a[3];
6056 unsigned char i, j, t;
6057 unsigned long data;
6058
6059 data = READ_VREG(HEVC_MV_INFO);
6060 if (picture->slice_type == I_SLICE)
6061 data = 0;
6062 a[0] = data & 0xff;
6063 a[1] = (data >> 8) & 0xff;
6064 a[2] = (data >> 16) & 0xff;
6065
6066 for (i = 0; i < 3; i++)
6067 for (j = i+1; j < 3; j++) {
6068 if (a[j] < a[i]) {
6069 t = a[j];
6070 a[j] = a[i];
6071 a[i] = t;
6072 } else if (a[j] == a[i]) {
6073 a[i]++;
6074 t = a[j];
6075 a[j] = a[i];
6076 a[i] = t;
6077 }
6078 }
6079 picture->max_mv = a[2];
6080 picture->avg_mv = a[1];
6081 picture->min_mv = a[0];
6082#ifdef DEBUG_QOS
6083 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6084 data, a[0], a[1], a[2]);
6085#endif
6086
6087 data = READ_VREG(HEVC_QP_INFO);
6088 a[0] = data & 0x1f;
6089 a[1] = (data >> 8) & 0x3f;
6090 a[2] = (data >> 16) & 0x7f;
6091
6092 for (i = 0; i < 3; i++)
6093 for (j = i+1; j < 3; j++) {
6094 if (a[j] < a[i]) {
6095 t = a[j];
6096 a[j] = a[i];
6097 a[i] = t;
6098 } else if (a[j] == a[i]) {
6099 a[i]++;
6100 t = a[j];
6101 a[j] = a[i];
6102 a[i] = t;
6103 }
6104 }
6105 picture->max_qp = a[2];
6106 picture->avg_qp = a[1];
6107 picture->min_qp = a[0];
6108#ifdef DEBUG_QOS
6109 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6110 data, a[0], a[1], a[2]);
6111#endif
6112
6113 data = READ_VREG(HEVC_SKIP_INFO);
6114 a[0] = data & 0x1f;
6115 a[1] = (data >> 8) & 0x3f;
6116 a[2] = (data >> 16) & 0x7f;
6117
6118 for (i = 0; i < 3; i++)
6119 for (j = i+1; j < 3; j++) {
6120 if (a[j] < a[i]) {
6121 t = a[j];
6122 a[j] = a[i];
6123 a[i] = t;
6124 } else if (a[j] == a[i]) {
6125 a[i]++;
6126 t = a[j];
6127 a[j] = a[i];
6128 a[i] = t;
6129 }
6130 }
6131 picture->max_skip = a[2];
6132 picture->avg_skip = a[1];
6133 picture->min_skip = a[0];
6134
6135#ifdef DEBUG_QOS
6136 hevc_print(hevc, 0,
6137 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6138 data, a[0], a[1], a[2]);
6139#endif
6140 } else {
6141 uint32_t blk88_y_count;
6142 uint32_t blk88_c_count;
6143 uint32_t blk22_mv_count;
6144 uint32_t rdata32;
6145 int32_t mv_hi;
6146 int32_t mv_lo;
6147 uint32_t rdata32_l;
6148 uint32_t mvx_L0_hi;
6149 uint32_t mvy_L0_hi;
6150 uint32_t mvx_L1_hi;
6151 uint32_t mvy_L1_hi;
6152 int64_t value;
6153 uint64_t temp_value;
6154#ifdef DEBUG_QOS
6155 int pic_number = picture->POC;
6156#endif
6157
6158 picture->max_mv = 0;
6159 picture->avg_mv = 0;
6160 picture->min_mv = 0;
6161
6162 picture->max_skip = 0;
6163 picture->avg_skip = 0;
6164 picture->min_skip = 0;
6165
6166 picture->max_qp = 0;
6167 picture->avg_qp = 0;
6168 picture->min_qp = 0;
6169
6170
6171
6172#ifdef DEBUG_QOS
6173 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6174 picture->slice_type,
6175 picture->POC);
6176#endif
6177 /* set rd_idx to 0 */
6178 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6179
6180 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6181 if (blk88_y_count == 0) {
6182#ifdef DEBUG_QOS
6183 hevc_print(hevc, 0,
6184 "[Picture %d Quality] NO Data yet.\n",
6185 pic_number);
6186#endif
6187 /* reset all counts */
6188 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6189 return;
6190 }
6191 /* qp_y_sum */
6192 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6193#ifdef DEBUG_QOS
6194 hevc_print(hevc, 0,
6195 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6196 pic_number, rdata32/blk88_y_count,
6197 rdata32, blk88_y_count);
6198#endif
6199 picture->avg_qp = rdata32/blk88_y_count;
6200 /* intra_y_count */
6201 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6202#ifdef DEBUG_QOS
6203 hevc_print(hevc, 0,
6204 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6205 pic_number, rdata32*100/blk88_y_count,
6206 '%', rdata32);
6207#endif
6208 /* skipped_y_count */
6209 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6210#ifdef DEBUG_QOS
6211 hevc_print(hevc, 0,
6212 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6213 pic_number, rdata32*100/blk88_y_count,
6214 '%', rdata32);
6215#endif
6216 picture->avg_skip = rdata32*100/blk88_y_count;
6217 /* coeff_non_zero_y_count */
6218 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6219#ifdef DEBUG_QOS
6220 hevc_print(hevc, 0,
6221 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6222 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6223 '%', rdata32);
6224#endif
6225 /* blk66_c_count */
6226 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6227 if (blk88_c_count == 0) {
6228#ifdef DEBUG_QOS
6229 hevc_print(hevc, 0,
6230 "[Picture %d Quality] NO Data yet.\n",
6231 pic_number);
6232#endif
6233 /* reset all counts */
6234 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6235 return;
6236 }
6237 /* qp_c_sum */
6238 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6239#ifdef DEBUG_QOS
6240 hevc_print(hevc, 0,
6241 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6242 pic_number, rdata32/blk88_c_count,
6243 rdata32, blk88_c_count);
6244#endif
6245 /* intra_c_count */
6246 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6247#ifdef DEBUG_QOS
6248 hevc_print(hevc, 0,
6249 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6250 pic_number, rdata32*100/blk88_c_count,
6251 '%', rdata32);
6252#endif
6253 /* skipped_cu_c_count */
6254 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6255#ifdef DEBUG_QOS
6256 hevc_print(hevc, 0,
6257 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6258 pic_number, rdata32*100/blk88_c_count,
6259 '%', rdata32);
6260#endif
6261 /* coeff_non_zero_c_count */
6262 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6263#ifdef DEBUG_QOS
6264 hevc_print(hevc, 0,
6265 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6266 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6267 '%', rdata32);
6268#endif
6269
6270 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6271 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6272 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6273#ifdef DEBUG_QOS
6274 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6275 pic_number, (rdata32>>0)&0xff);
6276#endif
6277 picture->min_qp = (rdata32>>0)&0xff;
6278
6279#ifdef DEBUG_QOS
6280 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6281 pic_number, (rdata32>>8)&0xff);
6282#endif
6283 picture->max_qp = (rdata32>>8)&0xff;
6284
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6287 pic_number, (rdata32>>16)&0xff);
6288 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6289 pic_number, (rdata32>>24)&0xff);
6290#endif
6291
6292 /* blk22_mv_count */
6293 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6294 if (blk22_mv_count == 0) {
6295#ifdef DEBUG_QOS
6296 hevc_print(hevc, 0,
6297 "[Picture %d Quality] NO MV Data yet.\n",
6298 pic_number);
6299#endif
6300 /* reset all counts */
6301 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6302 return;
6303 }
6304 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6305 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6306 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6307 /* should all be 0x00 or 0xff */
6308#ifdef DEBUG_QOS
6309 hevc_print(hevc, 0,
6310 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6311 pic_number, rdata32);
6312#endif
6313 mvx_L0_hi = ((rdata32>>0)&0xff);
6314 mvy_L0_hi = ((rdata32>>8)&0xff);
6315 mvx_L1_hi = ((rdata32>>16)&0xff);
6316 mvy_L1_hi = ((rdata32>>24)&0xff);
6317
6318 /* mvx_L0_count[31:0] */
6319 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6320 temp_value = mvx_L0_hi;
6321 temp_value = (temp_value << 32) | rdata32_l;
6322
6323 if (mvx_L0_hi & 0x80)
6324 value = 0xFFFFFFF000000000 | temp_value;
6325 else
6326 value = temp_value;
6327 value = div_s64(value, blk22_mv_count);
6328#ifdef DEBUG_QOS
6329 hevc_print(hevc, 0,
6330 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6331 pic_number, (int)value,
6332 value, blk22_mv_count);
6333#endif
6334 picture->avg_mv = value;
6335
6336 /* mvy_L0_count[31:0] */
6337 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6338 temp_value = mvy_L0_hi;
6339 temp_value = (temp_value << 32) | rdata32_l;
6340
6341 if (mvy_L0_hi & 0x80)
6342 value = 0xFFFFFFF000000000 | temp_value;
6343 else
6344 value = temp_value;
6345#ifdef DEBUG_QOS
6346 hevc_print(hevc, 0,
6347 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6348 pic_number, rdata32_l/blk22_mv_count,
6349 value, blk22_mv_count);
6350#endif
6351
6352 /* mvx_L1_count[31:0] */
6353 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6354 temp_value = mvx_L1_hi;
6355 temp_value = (temp_value << 32) | rdata32_l;
6356 if (mvx_L1_hi & 0x80)
6357 value = 0xFFFFFFF000000000 | temp_value;
6358 else
6359 value = temp_value;
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0,
6362 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6363 pic_number, rdata32_l/blk22_mv_count,
6364 value, blk22_mv_count);
6365#endif
6366
6367 /* mvy_L1_count[31:0] */
6368 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6369 temp_value = mvy_L1_hi;
6370 temp_value = (temp_value << 32) | rdata32_l;
6371 if (mvy_L1_hi & 0x80)
6372 value = 0xFFFFFFF000000000 | temp_value;
6373 else
6374 value = temp_value;
6375#ifdef DEBUG_QOS
6376 hevc_print(hevc, 0,
6377 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6378 pic_number, rdata32_l/blk22_mv_count,
6379 value, blk22_mv_count);
6380#endif
6381
6382 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6383 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6384 mv_hi = (rdata32>>16)&0xffff;
6385 if (mv_hi & 0x8000)
6386 mv_hi = 0x8000 - mv_hi;
6387#ifdef DEBUG_QOS
6388 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6389 pic_number, mv_hi);
6390#endif
6391 picture->max_mv = mv_hi;
6392
6393 mv_lo = (rdata32>>0)&0xffff;
6394 if (mv_lo & 0x8000)
6395 mv_lo = 0x8000 - mv_lo;
6396#ifdef DEBUG_QOS
6397 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6398 pic_number, mv_lo);
6399#endif
6400 picture->min_mv = mv_lo;
6401
6402 /* {mvy_L0_max, mvy_L0_min} */
6403 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6404 mv_hi = (rdata32>>16)&0xffff;
6405 if (mv_hi & 0x8000)
6406 mv_hi = 0x8000 - mv_hi;
6407#ifdef DEBUG_QOS
6408 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6409 pic_number, mv_hi);
6410#endif
6411
6412 mv_lo = (rdata32>>0)&0xffff;
6413 if (mv_lo & 0x8000)
6414 mv_lo = 0x8000 - mv_lo;
6415#ifdef DEBUG_QOS
6416 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6417 pic_number, mv_lo);
6418#endif
6419
6420 /* {mvx_L1_max, mvx_L1_min} */
6421 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6422 mv_hi = (rdata32>>16)&0xffff;
6423 if (mv_hi & 0x8000)
6424 mv_hi = 0x8000 - mv_hi;
6425#ifdef DEBUG_QOS
6426 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6427 pic_number, mv_hi);
6428#endif
6429
6430 mv_lo = (rdata32>>0)&0xffff;
6431 if (mv_lo & 0x8000)
6432 mv_lo = 0x8000 - mv_lo;
6433#ifdef DEBUG_QOS
6434 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6435 pic_number, mv_lo);
6436#endif
6437
6438 /* {mvy_L1_max, mvy_L1_min} */
6439 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6440 mv_hi = (rdata32>>16)&0xffff;
6441 if (mv_hi & 0x8000)
6442 mv_hi = 0x8000 - mv_hi;
6443#ifdef DEBUG_QOS
6444 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6445 pic_number, mv_hi);
6446#endif
6447 mv_lo = (rdata32>>0)&0xffff;
6448 if (mv_lo & 0x8000)
6449 mv_lo = 0x8000 - mv_lo;
6450#ifdef DEBUG_QOS
6451 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6452 pic_number, mv_lo);
6453#endif
6454
6455 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6456#ifdef DEBUG_QOS
6457 hevc_print(hevc, 0,
6458 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6459 pic_number, rdata32);
6460#endif
6461 /* reset all counts */
6462 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6463 }
6464}
6465
6466static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6467 union param_u *rpm_param,
6468 int decode_pic_begin)
6469{
6470#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6471 struct vdec_s *vdec = hw_to_vdec(hevc);
6472#endif
6473 int i;
6474 int lcu_x_num_div;
6475 int lcu_y_num_div;
6476 int Col_ref;
6477 int dbg_skip_flag = 0;
6478
6479 if (hevc->wait_buf == 0) {
6480 hevc->sps_num_reorder_pics_0 =
6481 rpm_param->p.sps_num_reorder_pics_0;
6482 hevc->m_temporalId = rpm_param->p.m_temporalId;
6483 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6484 hevc->interlace_flag =
6485 (rpm_param->p.profile_etc >> 2) & 0x1;
6486 hevc->curr_pic_struct =
6487 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6488 if (parser_sei_enable & 0x4) {
6489 hevc->frame_field_info_present_flag =
6490 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6491 }
6492
6493 if (interlace_enable == 0 || hevc->m_ins_flag)
6494 hevc->interlace_flag = 0;
6495 if (interlace_enable & 0x100)
6496 hevc->interlace_flag = interlace_enable & 0x1;
6497 if (hevc->interlace_flag == 0)
6498 hevc->curr_pic_struct = 0;
6499 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6500 /*
6501 *hevc->m_pocRandomAccess = MAX_INT;
6502 * //add to fix RAP_B_Bossen_1
6503 */
6504 /* } */
6505 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6506 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6507 hevc->slice_segment_addr =
6508 rpm_param->p.slice_segment_address;
6509 if (!rpm_param->p.dependent_slice_segment_flag)
6510 hevc->slice_addr = hevc->slice_segment_addr;
6511 } else {
6512 hevc->slice_segment_addr = 0;
6513 hevc->slice_addr = 0;
6514 }
6515
6516 hevc->iPrevPOC = hevc->curr_POC;
6517 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6518 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6519 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6520 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6521 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6522 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6523 hevc->isNextSliceSegment =
6524 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6525 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6526 || hevc->pic_h !=
6527 rpm_param->p.pic_height_in_luma_samples) {
6528 hevc_print(hevc, 0,
6529 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6530 hevc->pic_w, hevc->pic_h,
6531 rpm_param->p.pic_width_in_luma_samples,
6532 rpm_param->p.pic_height_in_luma_samples,
6533 hevc->interlace_flag);
6534
6535 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6536 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6537 hevc->frame_width = hevc->pic_w;
6538 hevc->frame_height = hevc->pic_h;
6539#ifdef LOSLESS_COMPRESS_MODE
6540 if (/*re_config_pic_flag == 0 &&*/
6541 (get_double_write_mode(hevc) & 0x10) == 0)
6542 init_decode_head_hw(hevc);
6543#endif
6544 }
6545
6546 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6547 hevc_print(hevc, 0, "over size : %u x %u.\n",
6548 hevc->pic_w, hevc->pic_h);
6549 if ((!hevc->m_ins_flag) &&
6550 ((debug &
6551 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6552 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6553 H265_DEBUG_DIS_SYS_ERROR_PROC);
6554 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6555 return 3;
6556 }
6557 if (hevc->bit_depth_chroma > 10 ||
6558 hevc->bit_depth_luma > 10) {
6559 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6560 hevc->bit_depth_chroma,
6561 hevc->bit_depth_luma);
6562 if (!hevc->m_ins_flag)
6563 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6564 H265_DEBUG_DIS_SYS_ERROR_PROC);
6565 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6566 return 4;
6567 }
6568
6569 /* it will cause divide 0 error */
6570 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6571 if (get_dbg_flag(hevc)) {
6572 hevc_print(hevc, 0,
6573 "Fatal Error, pic_w = %d, pic_h = %d\n",
6574 hevc->pic_w, hevc->pic_h);
6575 }
6576 return 3;
6577 }
6578 pic_list_process(hevc);
6579
6580 hevc->lcu_size =
6581 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6582 3 + rpm_param->
6583 p.log2_diff_max_min_coding_block_size);
6584 if (hevc->lcu_size == 0) {
6585 hevc_print(hevc, 0,
6586 "Error, lcu_size = 0 (%d,%d)\n",
6587 rpm_param->p.
6588 log2_min_coding_block_size_minus3,
6589 rpm_param->p.
6590 log2_diff_max_min_coding_block_size);
6591 return 3;
6592 }
6593 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6594 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6595 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6596 hevc->lcu_x_num =
6597 ((hevc->pic_w % hevc->lcu_size) ==
6598 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6599 hevc->lcu_y_num =
6600 ((hevc->pic_h % hevc->lcu_size) ==
6601 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6602 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6603
6604 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6605 || hevc->m_nalUnitType ==
6606 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6607 hevc->curr_POC = 0;
6608 if ((hevc->m_temporalId - 1) == 0)
6609 hevc->iPrevTid0POC = hevc->curr_POC;
6610 } else {
6611 int iMaxPOClsb =
6612 1 << (rpm_param->p.
6613 log2_max_pic_order_cnt_lsb_minus4 + 4);
6614 int iPrevPOClsb;
6615 int iPrevPOCmsb;
6616 int iPOCmsb;
6617 int iPOClsb = rpm_param->p.POClsb;
6618
6619 if (iMaxPOClsb == 0) {
6620 hevc_print(hevc, 0,
6621 "error iMaxPOClsb is 0\n");
6622 return 3;
6623 }
6624
6625 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6626 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6627
6628 if ((iPOClsb < iPrevPOClsb)
6629 && ((iPrevPOClsb - iPOClsb) >=
6630 (iMaxPOClsb / 2)))
6631 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6632 else if ((iPOClsb > iPrevPOClsb)
6633 && ((iPOClsb - iPrevPOClsb) >
6634 (iMaxPOClsb / 2)))
6635 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6636 else
6637 iPOCmsb = iPrevPOCmsb;
6638 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6639 hevc_print(hevc, 0,
6640 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6641 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6642 iPOClsb);
6643 }
6644 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6645 || hevc->m_nalUnitType ==
6646 NAL_UNIT_CODED_SLICE_BLANT
6647 || hevc->m_nalUnitType ==
6648 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6649 /* For BLA picture types, POCmsb is set to 0. */
6650 iPOCmsb = 0;
6651 }
6652 hevc->curr_POC = (iPOCmsb + iPOClsb);
6653 if ((hevc->m_temporalId - 1) == 0)
6654 hevc->iPrevTid0POC = hevc->curr_POC;
6655 else {
6656 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6657 hevc_print(hevc, 0,
6658 "m_temporalID is %d\n",
6659 hevc->m_temporalId);
6660 }
6661 }
6662 }
6663 hevc->RefNum_L0 =
6664 (rpm_param->p.num_ref_idx_l0_active >
6665 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6666 num_ref_idx_l0_active;
6667 hevc->RefNum_L1 =
6668 (rpm_param->p.num_ref_idx_l1_active >
6669 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6670 num_ref_idx_l1_active;
6671
6672 /* if(curr_POC==0x10) dump_lmem(); */
6673
6674 /* skip RASL pictures after CRA/BLA pictures */
6675 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6676 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6677 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6678 || hevc->m_nalUnitType ==
6679 NAL_UNIT_CODED_SLICE_BLANT
6680 || hevc->m_nalUnitType ==
6681 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6682 hevc->m_pocRandomAccess = hevc->curr_POC;
6683 else
6684 hevc->m_pocRandomAccess = -MAX_INT;
6685 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6686 || hevc->m_nalUnitType ==
6687 NAL_UNIT_CODED_SLICE_BLANT
6688 || hevc->m_nalUnitType ==
6689 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6690 hevc->m_pocRandomAccess = hevc->curr_POC;
6691 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6692 (nal_skip_policy >= 3) &&
6693 (hevc->m_nalUnitType ==
6694 NAL_UNIT_CODED_SLICE_RASL_N ||
6695 hevc->m_nalUnitType ==
6696 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6697 if (get_dbg_flag(hevc)) {
6698 hevc_print(hevc, 0,
6699 "RASL picture with POC %d < %d ",
6700 hevc->curr_POC, hevc->m_pocRandomAccess);
6701 hevc_print(hevc, 0,
6702 "RandomAccess point POC), skip it\n");
6703 }
6704 return 1;
6705 }
6706
6707 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6708 hevc->skip_flag = 0;
6709 /**/
6710 /* if((iPrevPOC != curr_POC)){ */
6711 if (rpm_param->p.slice_segment_address == 0) {
6712 struct PIC_s *pic;
6713
6714 hevc->new_pic = 1;
6715#ifdef MULTI_INSTANCE_SUPPORT
6716 if (!hevc->m_ins_flag)
6717#endif
6718 check_pic_decoded_error_pre(hevc,
6719 READ_VREG(HEVC_PARSER_LCU_START)
6720 & 0xffffff);
6721 /**/ if (use_cma == 0) {
6722 if (hevc->pic_list_init_flag == 0) {
6723 init_pic_list(hevc);
6724 init_pic_list_hw(hevc);
6725 init_buf_spec(hevc);
6726 hevc->pic_list_init_flag = 3;
6727 }
6728 }
6729 if (!hevc->m_ins_flag) {
6730 if (hevc->cur_pic)
6731 get_picture_qos_info(hevc);
6732 }
6733 hevc->first_pic_after_recover = 0;
6734 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6735 dump_pic_list(hevc);
6736 /* prev pic */
6737 hevc_pre_pic(hevc, pic);
6738 /*
6739 *update referenced of old pictures
6740 *(cur_pic->referenced is 1 and not updated)
6741 */
6742 apply_ref_pic_set(hevc, hevc->curr_POC,
6743 rpm_param);
6744
6745 if (hevc->mmu_enable)
6746 recycle_mmu_bufs(hevc);
6747
6748#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6749 if (vdec->master) {
6750 struct hevc_state_s *hevc_ba =
6751 (struct hevc_state_s *)
6752 vdec->master->private;
6753 if (hevc_ba->cur_pic != NULL) {
6754 hevc_ba->cur_pic->dv_enhance_exist = 1;
6755 hevc_print(hevc, H265_DEBUG_DV,
6756 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6757 hevc->curr_POC, hevc_ba->cur_pic->POC);
6758 }
6759 }
6760 if (vdec->master == NULL &&
6761 vdec->slave == NULL)
6762 set_aux_data(hevc,
6763 hevc->cur_pic, 1, 0); /*suffix*/
6764 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6765 set_aux_data(hevc,
6766 hevc->cur_pic, 0, 1); /*dv meta only*/
6767#else
6768 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6769#endif
6770 /* new pic */
6771 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6772 if (hevc->cur_pic == NULL) {
6773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6774 dump_pic_list(hevc);
6775 hevc->wait_buf = 1;
6776 return -1;
6777 }
6778#ifdef MULTI_INSTANCE_SUPPORT
6779 hevc->decoding_pic = hevc->cur_pic;
6780 if (!hevc->m_ins_flag)
6781 hevc->over_decode = 0;
6782#endif
6783#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6784 hevc->cur_pic->dv_enhance_exist = 0;
6785 if (vdec->slave)
6786 hevc_print(hevc, H265_DEBUG_DV,
6787 "Clear bl (poc %d) dv_enhance_exist flag\n",
6788 hevc->curr_POC);
6789 if (vdec->master == NULL &&
6790 vdec->slave == NULL)
6791 set_aux_data(hevc,
6792 hevc->cur_pic, 0, 0); /*prefix*/
6793
6794 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6795 set_aux_data(hevc,
6796 hevc->cur_pic, 0, 2); /*pre sei only*/
6797#else
6798 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6799#endif
6800 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6801 hevc->cur_pic->output_ready = 1;
6802 hevc->cur_pic->stream_offset =
6803 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6804 prepare_display_buf(hevc, hevc->cur_pic);
6805 hevc->wait_buf = 2;
6806 return -1;
6807 }
6808 } else {
6809 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6810#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6811 if (vdec->master == NULL &&
6812 vdec->slave == NULL) {
6813 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6814 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6815 }
6816#else
6817 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6818 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6819#endif
6820 }
6821 if (hevc->pic_list_init_flag != 3
6822 || hevc->cur_pic == NULL) {
6823 /* make it dec from the first slice segment */
6824 return 3;
6825 }
6826 hevc->cur_pic->slice_idx++;
6827 hevc->new_pic = 0;
6828 }
6829 } else {
6830 if (hevc->wait_buf == 1) {
6831 pic_list_process(hevc);
6832 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6833 if (hevc->cur_pic == NULL)
6834 return -1;
6835
6836 if (!hevc->m_ins_flag)
6837 hevc->over_decode = 0;
6838
6839#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6840 hevc->cur_pic->dv_enhance_exist = 0;
6841 if (vdec->master == NULL &&
6842 vdec->slave == NULL)
6843 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6844#else
6845 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6846#endif
6847 hevc->wait_buf = 0;
6848 } else if (hevc->wait_buf ==
6849 2) {
6850 if (get_display_pic_num(hevc) >
6851 1)
6852 return -1;
6853 hevc->wait_buf = 0;
6854 }
6855 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6856 dump_pic_list(hevc);
6857 }
6858
6859 if (hevc->new_pic) {
6860#if 1
6861 /*SUPPORT_10BIT*/
6862 int sao_mem_unit =
6863 (hevc->lcu_size == 16 ? 9 :
6864 hevc->lcu_size ==
6865 32 ? 14 : 24) << 4;
6866#else
6867 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
6868#endif
6869 int pic_height_cu =
6870 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
6871 int pic_width_cu =
6872 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
6873 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
6874
6875 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
6876 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6877 hevc_print(hevc, 0,
6878 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
6879 __func__,
6880 hevc->decode_idx,
6881 hevc->curr_pic_struct,
6882 hevc->interlace_flag,
6883 hevc->cur_pic->index);
6884 }
6885 if (dbg_skip_decode_index != 0 &&
6886 hevc->decode_idx == dbg_skip_decode_index)
6887 dbg_skip_flag = 1;
6888
6889 hevc->decode_idx++;
6890 update_tile_info(hevc, pic_width_cu, pic_height_cu,
6891 sao_mem_unit, rpm_param);
6892
6893 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
6894 }
6895
6896 if (hevc->iPrevPOC != hevc->curr_POC) {
6897 hevc->new_tile = 1;
6898 hevc->tile_x = 0;
6899 hevc->tile_y = 0;
6900 hevc->tile_y_x = 0;
6901 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6902 hevc_print(hevc, 0,
6903 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
6904 hevc->tile_x, hevc->tile_y);
6905 }
6906 } else if (hevc->tile_enabled) {
6907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6908 hevc_print(hevc, 0,
6909 "slice_segment_address is %d\n",
6910 rpm_param->p.slice_segment_address);
6911 }
6912 hevc->tile_y_x =
6913 get_tile_index(hevc, rpm_param->p.slice_segment_address,
6914 (hevc->pic_w +
6915 hevc->lcu_size -
6916 1) / hevc->lcu_size);
6917 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
6918 && (hevc->tile_y_x != -1)) {
6919 hevc->new_tile = 1;
6920 hevc->tile_x = hevc->tile_y_x & 0xff;
6921 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
6922 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6923 hevc_print(hevc, 0,
6924 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
6925 rpm_param->p.slice_segment_address,
6926 hevc->tile_x, hevc->tile_y);
6927 }
6928 } else
6929 hevc->new_tile = 0;
6930 } else
6931 hevc->new_tile = 0;
6932
6933 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
6934 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
6935 hevc->new_tile = 0;
6936
6937 if (hevc->new_tile) {
6938 hevc->tile_start_lcu_x =
6939 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
6940 hevc->tile_start_lcu_y =
6941 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
6942 hevc->tile_width_lcu =
6943 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
6944 hevc->tile_height_lcu =
6945 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
6946 }
6947
6948 set_ref_pic_list(hevc, rpm_param);
6949
6950 Col_ref = rpm_param->p.collocated_ref_idx;
6951
6952 hevc->LDCFlag = 0;
6953 if (rpm_param->p.slice_type != I_SLICE) {
6954 hevc->LDCFlag = 1;
6955 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
6956 if (hevc->cur_pic->
6957 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
6958 hevc->curr_POC)
6959 hevc->LDCFlag = 0;
6960 }
6961 if (rpm_param->p.slice_type == B_SLICE) {
6962 for (i = 0; (i < hevc->RefNum_L1)
6963 && hevc->LDCFlag; i++) {
6964 if (hevc->cur_pic->
6965 m_aiRefPOCList1[hevc->cur_pic->
6966 slice_idx][i] >
6967 hevc->curr_POC)
6968 hevc->LDCFlag = 0;
6969 }
6970 }
6971 }
6972
6973 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
6974
6975 hevc->plevel =
6976 rpm_param->p.log2_parallel_merge_level;
6977 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
6978
6979 hevc->LongTerm_Curr = 0; /* to do ... */
6980 hevc->LongTerm_Col = 0; /* to do ... */
6981
6982 hevc->list_no = 0;
6983 if (rpm_param->p.slice_type == B_SLICE)
6984 hevc->list_no = 1 - hevc->ColFromL0Flag;
6985 if (hevc->list_no == 0) {
6986 if (Col_ref < hevc->RefNum_L0) {
6987 hevc->Col_POC =
6988 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
6989 slice_idx][Col_ref];
6990 } else
6991 hevc->Col_POC = INVALID_POC;
6992 } else {
6993 if (Col_ref < hevc->RefNum_L1) {
6994 hevc->Col_POC =
6995 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
6996 slice_idx][Col_ref];
6997 } else
6998 hevc->Col_POC = INVALID_POC;
6999 }
7000
7001 hevc->LongTerm_Ref = 0; /* to do ... */
7002
7003 if (hevc->slice_type != 2) {
7004 /* if(hevc->i_only==1){ */
7005 /* return 0xf; */
7006 /* } */
7007
7008 if (hevc->Col_POC != INVALID_POC) {
7009 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7010 if (hevc->col_pic == NULL) {
7011 hevc->cur_pic->error_mark = 1;
7012 if (get_dbg_flag(hevc)) {
7013 hevc_print(hevc, 0,
7014 "WRONG,fail to get the pic Col_POC\n");
7015 }
7016 if (is_log_enable(hevc))
7017 add_log(hevc,
7018 "WRONG,fail to get the pic Col_POC");
7019 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7020 hevc->cur_pic->error_mark = 1;
7021 if (get_dbg_flag(hevc)) {
7022 hevc_print(hevc, 0,
7023 "WRONG, Col_POC error_mark is 1\n");
7024 }
7025 if (is_log_enable(hevc))
7026 add_log(hevc,
7027 "WRONG, Col_POC error_mark is 1");
7028 } else {
7029 if ((hevc->col_pic->width
7030 != hevc->pic_w) ||
7031 (hevc->col_pic->height
7032 != hevc->pic_h)) {
7033 hevc_print(hevc, 0,
7034 "Wrong reference pic (poc %d) width/height %d/%d\n",
7035 hevc->col_pic->POC,
7036 hevc->col_pic->width,
7037 hevc->col_pic->height);
7038 hevc->cur_pic->error_mark = 1;
7039 }
7040
7041 }
7042
7043 if (hevc->cur_pic->error_mark
7044 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7045#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7046 /*count info*/
7047 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7048 hevc->cur_pic->stream_offset);
7049#endif
7050 }
7051
7052 if (is_skip_decoding(hevc,
7053 hevc->cur_pic)) {
7054 return 2;
7055 }
7056 } else
7057 hevc->col_pic = hevc->cur_pic;
7058 } /* */
7059 if (hevc->col_pic == NULL)
7060 hevc->col_pic = hevc->cur_pic;
7061#ifdef BUFFER_MGR_ONLY
7062 return 0xf;
7063#else
7064 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7065 || (dbg_skip_flag))
7066 return 0xf;
7067#endif
7068
7069 config_mc_buffer(hevc, hevc->cur_pic);
7070
7071 if (is_skip_decoding(hevc,
7072 hevc->cur_pic)) {
7073 if (get_dbg_flag(hevc))
7074 hevc_print(hevc, 0,
7075 "Discard this picture index %d\n",
7076 hevc->cur_pic->index);
7077#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7078 /*count info*/
7079 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7080 hevc->cur_pic->stream_offset);
7081#endif
7082 return 2;
7083 }
7084#ifdef MCRCC_ENABLE
7085 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7086#endif
7087 config_mpred_hw(hevc);
7088
7089 config_sao_hw(hevc, rpm_param);
7090
7091 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7092 return 0xf;
7093
7094 return 0;
7095}
7096
7097
7098
7099static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7100 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7101 int cur_buf_idx = new_pic->index;
7102 int bit_depth_10 = (bit_depth != 0x00);
7103 int picture_size;
7104 int cur_mmu_4k_number;
7105 int ret, max_frame_num;
7106 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7107 new_pic->height, !bit_depth_10);
7108 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7109 if (hevc->double_write_mode & 0x10)
7110 return 0;
7111 /*hevc_print(hevc, 0,
7112 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7113 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7114 if (new_pic->scatter_alloc) {
7115 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7116 new_pic->scatter_alloc = 0;
7117 }
7118 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7119 max_frame_num = MAX_FRAME_8K_NUM;
7120 else
7121 max_frame_num = MAX_FRAME_4K_NUM;
7122 if (cur_mmu_4k_number > max_frame_num) {
7123 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7124 cur_mmu_4k_number,
7125 new_pic->width,
7126 new_pic->height);
7127 return -1;
7128 }
7129 ret = decoder_mmu_box_alloc_idx(
7130 hevc->mmu_box,
7131 cur_buf_idx,
7132 cur_mmu_4k_number,
7133 mmu_index_adr);
7134 if (ret == 0)
7135 new_pic->scatter_alloc = 1;
7136 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7137 "%s pic index %d page count(%d) ret =%d\n",
7138 __func__, cur_buf_idx,
7139 cur_mmu_4k_number,
7140 ret);
7141 return ret;
7142}
7143
7144
7145static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7146 struct PIC_s *pic)
7147{
7148 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7149 "%s pic index %d scatter_alloc %d\n",
7150 __func__, pic->index,
7151 pic->scatter_alloc);
7152
7153 if (hevc->mmu_enable
7154 && ((hevc->double_write_mode & 0x10) == 0)
7155 && pic->scatter_alloc)
7156 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7157 pic->scatter_alloc = 0;
7158}
7159
7160/*
7161 *************************************************
7162 *
7163 *h265 buffer management end
7164 *
7165 **************************************************
7166 */
7167static struct hevc_state_s *gHevc;
7168
7169static void hevc_local_uninit(struct hevc_state_s *hevc)
7170{
7171 hevc->rpm_ptr = NULL;
7172 hevc->lmem_ptr = NULL;
7173
7174#ifdef SWAP_HEVC_UCODE
7175 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7176 if (hevc->mc_cpu_addr != NULL) {
7177 dma_free_coherent(amports_get_dma_device(),
7178 hevc->swap_size, hevc->mc_cpu_addr,
7179 hevc->mc_dma_handle);
7180 hevc->mc_cpu_addr = NULL;
7181 }
7182
7183 }
7184#endif
7185#ifdef DETREFILL_ENABLE
7186 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7187 uninit_detrefill_buf(hevc);
7188#endif
7189 if (hevc->aux_addr) {
7190 dma_free_coherent(amports_get_dma_device(),
7191 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7192 hevc->aux_phy_addr);
7193 hevc->aux_addr = NULL;
7194 }
7195 if (hevc->rpm_addr) {
7196 dma_free_coherent(amports_get_dma_device(),
7197 RPM_BUF_SIZE, hevc->rpm_addr,
7198 hevc->rpm_phy_addr);
7199 hevc->rpm_addr = NULL;
7200 }
7201 if (hevc->lmem_addr) {
7202 dma_free_coherent(amports_get_dma_device(),
7203 RPM_BUF_SIZE, hevc->lmem_addr,
7204 hevc->lmem_phy_addr);
7205 hevc->lmem_addr = NULL;
7206 }
7207
7208 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7209 if (hevc->frame_mmu_map_phy_addr)
7210 dma_free_coherent(amports_get_dma_device(),
7211 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7212 hevc->frame_mmu_map_phy_addr);
7213
7214 hevc->frame_mmu_map_addr = NULL;
7215 }
7216
7217 kfree(gvs);
7218 gvs = NULL;
7219}
7220
7221static int hevc_local_init(struct hevc_state_s *hevc)
7222{
7223 int ret = -1;
7224 struct BuffInfo_s *cur_buf_info = NULL;
7225
7226 memset(&hevc->param, 0, sizeof(union param_u));
7227
7228 cur_buf_info = &hevc->work_space_buf_store;
7229
7230 if (vdec_is_support_4k()) {
7231 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7232 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7233 sizeof(struct BuffInfo_s));
7234 else
7235 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7236 sizeof(struct BuffInfo_s));
7237 } else
7238 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7239 sizeof(struct BuffInfo_s));
7240
7241 cur_buf_info->start_adr = hevc->buf_start;
7242 init_buff_spec(hevc, cur_buf_info);
7243
7244 hevc_init_stru(hevc, cur_buf_info);
7245
7246 hevc->bit_depth_luma = 8;
7247 hevc->bit_depth_chroma = 8;
7248 hevc->video_signal_type = 0;
7249 hevc->video_signal_type_debug = 0;
7250 bit_depth_luma = hevc->bit_depth_luma;
7251 bit_depth_chroma = hevc->bit_depth_chroma;
7252 video_signal_type = hevc->video_signal_type;
7253
7254 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7255 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7256 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7257 if (hevc->rpm_addr == NULL) {
7258 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7259 return -1;
7260 }
7261 hevc->rpm_ptr = hevc->rpm_addr;
7262 }
7263
7264 if (prefix_aux_buf_size > 0 ||
7265 suffix_aux_buf_size > 0) {
7266 u32 aux_buf_size;
7267
7268 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7269 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7270 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7271 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7272 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7273 if (hevc->aux_addr == NULL) {
7274 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7275 return -1;
7276 }
7277 }
7278
7279 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7280 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7281 if (hevc->lmem_addr == NULL) {
7282 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7283 return -1;
7284 }
7285 hevc->lmem_ptr = hevc->lmem_addr;
7286
7287 if (hevc->mmu_enable) {
7288 hevc->frame_mmu_map_addr =
7289 dma_alloc_coherent(amports_get_dma_device(),
7290 get_frame_mmu_map_size(),
7291 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7292 if (hevc->frame_mmu_map_addr == NULL) {
7293 pr_err("%s: failed to alloc count_buffer\n", __func__);
7294 return -1;
7295 }
7296 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7297 }
7298 ret = 0;
7299 return ret;
7300}
7301
7302/*
7303 *******************************************
7304 * Mailbox command
7305 *******************************************
7306 */
7307#define CMD_FINISHED 0
7308#define CMD_ALLOC_VIEW 1
7309#define CMD_FRAME_DISPLAY 3
7310#define CMD_DEBUG 10
7311
7312
7313#define DECODE_BUFFER_NUM_MAX 32
7314#define DISPLAY_BUFFER_NUM 6
7315
7316#define video_domain_addr(adr) (adr&0x7fffffff)
7317#define DECODER_WORK_SPACE_SIZE 0x800000
7318
7319#define spec2canvas(x) \
7320 (((x)->uv_canvas_index << 16) | \
7321 ((x)->uv_canvas_index << 8) | \
7322 ((x)->y_canvas_index << 0))
7323
7324
7325static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7326{
7327 struct vdec_s *vdec = hw_to_vdec(hevc);
7328 int canvas_w = ALIGN(pic->width, 64)/4;
7329 int canvas_h = ALIGN(pic->height, 32)/4;
7330 int blkmode = mem_map_mode;
7331
7332 /*CANVAS_BLKMODE_64X32*/
7333#ifdef SUPPORT_10BIT
7334 if (pic->double_write_mode) {
7335 canvas_w = pic->width /
7336 get_double_write_ratio(hevc, pic->double_write_mode);
7337 canvas_h = pic->height /
7338 get_double_write_ratio(hevc, pic->double_write_mode);
7339
7340 if (mem_map_mode == 0)
7341 canvas_w = ALIGN(canvas_w, 32);
7342 else
7343 canvas_w = ALIGN(canvas_w, 64);
7344 canvas_h = ALIGN(canvas_h, 32);
7345
7346 if (vdec->parallel_dec == 1) {
7347 if (pic->y_canvas_index == -1)
7348 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7349 if (pic->uv_canvas_index == -1)
7350 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7351 } else {
7352 pic->y_canvas_index = 128 + pic->index * 2;
7353 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7354 }
7355
7356 canvas_config_ex(pic->y_canvas_index,
7357 pic->dw_y_adr, canvas_w, canvas_h,
7358 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7359 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7360 canvas_w, canvas_h,
7361 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7362#ifdef MULTI_INSTANCE_SUPPORT
7363 pic->canvas_config[0].phy_addr =
7364 pic->dw_y_adr;
7365 pic->canvas_config[0].width =
7366 canvas_w;
7367 pic->canvas_config[0].height =
7368 canvas_h;
7369 pic->canvas_config[0].block_mode =
7370 blkmode;
7371 pic->canvas_config[0].endian = 7;
7372
7373 pic->canvas_config[1].phy_addr =
7374 pic->dw_u_v_adr;
7375 pic->canvas_config[1].width =
7376 canvas_w;
7377 pic->canvas_config[1].height =
7378 canvas_h;
7379 pic->canvas_config[1].block_mode =
7380 blkmode;
7381 pic->canvas_config[1].endian = 7;
7382#endif
7383 } else {
7384 if (!hevc->mmu_enable) {
7385 /* to change after 10bit VPU is ready ... */
7386 if (vdec->parallel_dec == 1) {
7387 if (pic->y_canvas_index == -1)
7388 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7389 pic->uv_canvas_index = pic->y_canvas_index;
7390 } else {
7391 pic->y_canvas_index = 128 + pic->index;
7392 pic->uv_canvas_index = 128 + pic->index;
7393 }
7394
7395 canvas_config_ex(pic->y_canvas_index,
7396 pic->mc_y_adr, canvas_w, canvas_h,
7397 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7398 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7399 canvas_w, canvas_h,
7400 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7401 }
7402 }
7403#else
7404 if (vdec->parallel_dec == 1) {
7405 if (pic->y_canvas_index == -1)
7406 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7407 if (pic->uv_canvas_index == -1)
7408 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7409 } else {
7410 pic->y_canvas_index = 128 + pic->index * 2;
7411 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7412 }
7413
7414
7415 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7416 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7417 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7418 canvas_w, canvas_h,
7419 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7420#endif
7421}
7422
7423static int init_buf_spec(struct hevc_state_s *hevc)
7424{
7425 int pic_width = hevc->pic_w;
7426 int pic_height = hevc->pic_h;
7427
7428 /* hevc_print(hevc, 0,
7429 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7430 */
7431 hevc_print(hevc, 0,
7432 "%s2 %d %d\n", __func__, pic_width, pic_height);
7433 /* pic_width = hevc->pic_w; */
7434 /* pic_height = hevc->pic_h; */
7435
7436 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7437 hevc->frame_width = pic_width;
7438 hevc->frame_height = pic_height;
7439
7440 }
7441
7442 return 0;
7443}
7444
7445static int parse_sei(struct hevc_state_s *hevc,
7446 struct PIC_s *pic, char *sei_buf, uint32_t size)
7447{
7448 char *p = sei_buf;
7449 char *p_sei;
7450 uint16_t header;
7451 uint8_t nal_unit_type;
7452 uint8_t payload_type, payload_size;
7453 int i, j;
7454
7455 if (size < 2)
7456 return 0;
7457 header = *p++;
7458 header <<= 8;
7459 header += *p++;
7460 nal_unit_type = header >> 9;
7461 if ((nal_unit_type != NAL_UNIT_SEI)
7462 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7463 return 0;
7464 while (p+2 <= sei_buf+size) {
7465 payload_type = *p++;
7466 payload_size = *p++;
7467 if (p+payload_size <= sei_buf+size) {
7468 switch (payload_type) {
7469 case SEI_PicTiming:
7470 if ((parser_sei_enable & 0x4) &&
7471 hevc->frame_field_info_present_flag) {
7472 p_sei = p;
7473 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7474 pic->pic_struct = hevc->curr_pic_struct;
7475 if (get_dbg_flag(hevc) &
7476 H265_DEBUG_PIC_STRUCT) {
7477 hevc_print(hevc, 0,
7478 "parse result pic_struct = %d\n",
7479 hevc->curr_pic_struct);
7480 }
7481 }
7482 break;
7483 case SEI_UserDataITU_T_T35:
7484 p_sei = p;
7485 if (p_sei[0] == 0xB5
7486 && p_sei[1] == 0x00
7487 && p_sei[2] == 0x3C
7488 && p_sei[3] == 0x00
7489 && p_sei[4] == 0x01
7490 && p_sei[5] == 0x04)
7491 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7492
7493 break;
7494 case SEI_MasteringDisplayColorVolume:
7495 /*hevc_print(hevc, 0,
7496 "sei type: primary display color volume %d, size %d\n",
7497 payload_type,
7498 payload_size);*/
7499 /* master_display_colour */
7500 p_sei = p;
7501 for (i = 0; i < 3; i++) {
7502 for (j = 0; j < 2; j++) {
7503 hevc->primaries[i][j]
7504 = (*p_sei<<8)
7505 | *(p_sei+1);
7506 p_sei += 2;
7507 }
7508 }
7509 for (i = 0; i < 2; i++) {
7510 hevc->white_point[i]
7511 = (*p_sei<<8)
7512 | *(p_sei+1);
7513 p_sei += 2;
7514 }
7515 for (i = 0; i < 2; i++) {
7516 hevc->luminance[i]
7517 = (*p_sei<<24)
7518 | (*(p_sei+1)<<16)
7519 | (*(p_sei+2)<<8)
7520 | *(p_sei+3);
7521 p_sei += 4;
7522 }
7523 hevc->sei_present_flag |=
7524 SEI_MASTER_DISPLAY_COLOR_MASK;
7525 /*for (i = 0; i < 3; i++)
7526 for (j = 0; j < 2; j++)
7527 hevc_print(hevc, 0,
7528 "\tprimaries[%1d][%1d] = %04x\n",
7529 i, j,
7530 hevc->primaries[i][j]);
7531 hevc_print(hevc, 0,
7532 "\twhite_point = (%04x, %04x)\n",
7533 hevc->white_point[0],
7534 hevc->white_point[1]);
7535 hevc_print(hevc, 0,
7536 "\tmax,min luminance = %08x, %08x\n",
7537 hevc->luminance[0],
7538 hevc->luminance[1]);*/
7539 break;
7540 case SEI_ContentLightLevel:
7541 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7542 hevc_print(hevc, 0,
7543 "sei type: max content light level %d, size %d\n",
7544 payload_type, payload_size);
7545 /* content_light_level */
7546 p_sei = p;
7547 hevc->content_light_level[0]
7548 = (*p_sei<<8) | *(p_sei+1);
7549 p_sei += 2;
7550 hevc->content_light_level[1]
7551 = (*p_sei<<8) | *(p_sei+1);
7552 p_sei += 2;
7553 hevc->sei_present_flag |=
7554 SEI_CONTENT_LIGHT_LEVEL_MASK;
7555 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7556 hevc_print(hevc, 0,
7557 "\tmax cll = %04x, max_pa_cll = %04x\n",
7558 hevc->content_light_level[0],
7559 hevc->content_light_level[1]);
7560 break;
7561 default:
7562 break;
7563 }
7564 }
7565 p += payload_size;
7566 }
7567 return 0;
7568}
7569
7570static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7571 unsigned w, unsigned h)
7572{
7573 unsigned ar;
7574
7575 if (idc == 255) {
7576 ar = div_u64(256ULL * sar_h * h,
7577 sar_w * w);
7578 } else {
7579 switch (idc) {
7580 case 1:
7581 ar = 0x100 * h / w;
7582 break;
7583 case 2:
7584 ar = 0x100 * h * 11 / (w * 12);
7585 break;
7586 case 3:
7587 ar = 0x100 * h * 11 / (w * 10);
7588 break;
7589 case 4:
7590 ar = 0x100 * h * 11 / (w * 16);
7591 break;
7592 case 5:
7593 ar = 0x100 * h * 33 / (w * 40);
7594 break;
7595 case 6:
7596 ar = 0x100 * h * 11 / (w * 24);
7597 break;
7598 case 7:
7599 ar = 0x100 * h * 11 / (w * 20);
7600 break;
7601 case 8:
7602 ar = 0x100 * h * 11 / (w * 32);
7603 break;
7604 case 9:
7605 ar = 0x100 * h * 33 / (w * 80);
7606 break;
7607 case 10:
7608 ar = 0x100 * h * 11 / (w * 18);
7609 break;
7610 case 11:
7611 ar = 0x100 * h * 11 / (w * 15);
7612 break;
7613 case 12:
7614 ar = 0x100 * h * 33 / (w * 64);
7615 break;
7616 case 13:
7617 ar = 0x100 * h * 99 / (w * 160);
7618 break;
7619 case 14:
7620 ar = 0x100 * h * 3 / (w * 4);
7621 break;
7622 case 15:
7623 ar = 0x100 * h * 2 / (w * 3);
7624 break;
7625 case 16:
7626 ar = 0x100 * h * 1 / (w * 2);
7627 break;
7628 default:
7629 ar = h * 0x100 / w;
7630 break;
7631 }
7632 }
7633
7634 return ar;
7635}
7636
7637static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7638 struct PIC_s *pic)
7639{
7640 unsigned int ar;
7641 int i, j;
7642 char *p;
7643 unsigned size = 0;
7644 unsigned type = 0;
7645 struct vframe_master_display_colour_s *vf_dp
7646 = &vf->prop.master_display_colour;
7647
7648 vf->width = pic->width /
7649 get_double_write_ratio(hevc, pic->double_write_mode);
7650 vf->height = pic->height /
7651 get_double_write_ratio(hevc, pic->double_write_mode);
7652
7653 vf->duration = hevc->frame_dur;
7654 vf->duration_pulldown = 0;
7655 vf->flag = 0;
7656
7657 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7658 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7659
7660
7661 if (((pic->aspect_ratio_idc == 255) &&
7662 pic->sar_width &&
7663 pic->sar_height) ||
7664 ((pic->aspect_ratio_idc != 255) &&
7665 (pic->width))) {
7666 ar = min_t(u32,
7667 calc_ar(pic->aspect_ratio_idc,
7668 pic->sar_width,
7669 pic->sar_height,
7670 pic->width,
7671 pic->height),
7672 DISP_RATIO_ASPECT_RATIO_MAX);
7673 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7674 }
7675 hevc->ratio_control = vf->ratio_control;
7676 if (pic->aux_data_buf
7677 && pic->aux_data_size) {
7678 /* parser sei */
7679 p = pic->aux_data_buf;
7680 while (p < pic->aux_data_buf
7681 + pic->aux_data_size - 8) {
7682 size = *p++;
7683 size = (size << 8) | *p++;
7684 size = (size << 8) | *p++;
7685 size = (size << 8) | *p++;
7686 type = *p++;
7687 type = (type << 8) | *p++;
7688 type = (type << 8) | *p++;
7689 type = (type << 8) | *p++;
7690 if (type == 0x02000000) {
7691 /* hevc_print(hevc, 0,
7692 "sei(%d)\n", size); */
7693 parse_sei(hevc, pic, p, size);
7694 }
7695 p += size;
7696 }
7697 }
7698 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7699 vf->signal_type = pic->video_signal_type;
7700 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7701 u32 data;
7702 data = vf->signal_type;
7703 data = data & 0xFFFF00FF;
7704 data = data | (0x30<<8);
7705 vf->signal_type = data;
7706 }
7707 }
7708 else
7709 vf->signal_type = 0;
7710 hevc->video_signal_type_debug = vf->signal_type;
7711
7712 /* master_display_colour */
7713 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7714 for (i = 0; i < 3; i++)
7715 for (j = 0; j < 2; j++)
7716 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7717 for (i = 0; i < 2; i++) {
7718 vf_dp->white_point[i] = hevc->white_point[i];
7719 vf_dp->luminance[i]
7720 = hevc->luminance[i];
7721 }
7722 vf_dp->present_flag = 1;
7723 } else
7724 vf_dp->present_flag = 0;
7725
7726 /* content_light_level */
7727 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7728 vf_dp->content_light_level.max_content
7729 = hevc->content_light_level[0];
7730 vf_dp->content_light_level.max_pic_average
7731 = hevc->content_light_level[1];
7732 vf_dp->content_light_level.present_flag = 1;
7733 } else
7734 vf_dp->content_light_level.present_flag = 0;
7735}
7736
7737static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7738{
7739 unsigned long flags;
7740#ifdef MULTI_INSTANCE_SUPPORT
7741 struct vdec_s *vdec = op_arg;
7742 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7743#else
7744 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7745#endif
7746
7747 spin_lock_irqsave(&lock, flags);
7748
7749 states->vf_pool_size = VF_POOL_SIZE;
7750 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7751 states->buf_avail_num = kfifo_len(&hevc->display_q);
7752
7753 if (step == 2)
7754 states->buf_avail_num = 0;
7755 spin_unlock_irqrestore(&lock, flags);
7756 return 0;
7757}
7758
7759static struct vframe_s *vh265_vf_peek(void *op_arg)
7760{
7761 struct vframe_s *vf[2] = {0, 0};
7762#ifdef MULTI_INSTANCE_SUPPORT
7763 struct vdec_s *vdec = op_arg;
7764 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7765#else
7766 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7767#endif
7768
7769 if (step == 2)
7770 return NULL;
7771
7772 if (force_disp_pic_index & 0x100) {
7773 if (force_disp_pic_index & 0x200)
7774 return NULL;
7775 return &hevc->vframe_dummy;
7776 }
7777
7778
7779 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7780 if (vf[1]) {
7781 vf[0]->next_vf_pts_valid = true;
7782 vf[0]->next_vf_pts = vf[1]->pts;
7783 } else
7784 vf[0]->next_vf_pts_valid = false;
7785 return vf[0];
7786 }
7787
7788 return NULL;
7789}
7790
7791static struct vframe_s *vh265_vf_get(void *op_arg)
7792{
7793 struct vframe_s *vf;
7794#ifdef MULTI_INSTANCE_SUPPORT
7795 struct vdec_s *vdec = op_arg;
7796 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7797#else
7798 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7799#endif
7800
7801 if (step == 2)
7802 return NULL;
7803 else if (step == 1)
7804 step = 2;
7805
7806#if 0
7807 if (force_disp_pic_index & 0x100) {
7808 int buffer_index = force_disp_pic_index & 0xff;
7809 struct PIC_s *pic = NULL;
7810 if (buffer_index >= 0
7811 && buffer_index < MAX_REF_PIC_NUM)
7812 pic = hevc->m_PIC[buffer_index];
7813 if (pic == NULL)
7814 return NULL;
7815 if (force_disp_pic_index & 0x200)
7816 return NULL;
7817
7818 vf = &hevc->vframe_dummy;
7819 if (get_double_write_mode(hevc)) {
7820 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7821 VIDTYPE_VIU_NV21;
7822 if (hevc->m_ins_flag) {
7823 vf->canvas0Addr = vf->canvas1Addr = -1;
7824 vf->plane_num = 2;
7825 vf->canvas0_config[0] =
7826 pic->canvas_config[0];
7827 vf->canvas0_config[1] =
7828 pic->canvas_config[1];
7829
7830 vf->canvas1_config[0] =
7831 pic->canvas_config[0];
7832 vf->canvas1_config[1] =
7833 pic->canvas_config[1];
7834 } else {
7835 vf->canvas0Addr = vf->canvas1Addr
7836 = spec2canvas(pic);
7837 }
7838 } else {
7839 vf->canvas0Addr = vf->canvas1Addr = 0;
7840 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7841 if (hevc->mmu_enable)
7842 vf->type |= VIDTYPE_SCATTER;
7843 }
7844 vf->compWidth = pic->width;
7845 vf->compHeight = pic->height;
7846 update_vf_memhandle(hevc, vf, pic);
7847 switch (hevc->bit_depth_luma) {
7848 case 9:
7849 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
7850 break;
7851 case 10:
7852 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
7853 | BITDEPTH_V10;
7854 break;
7855 default:
7856 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7857 break;
7858 }
7859 if ((vf->type & VIDTYPE_COMPRESS) == 0)
7860 vf->bitdepth =
7861 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7862 if (hevc->mem_saving_mode == 1)
7863 vf->bitdepth |= BITDEPTH_SAVING_MODE;
7864 vf->duration_pulldown = 0;
7865 vf->pts = 0;
7866 vf->pts_us64 = 0;
7867 set_frame_info(hevc, vf);
7868
7869 vf->width = pic->width /
7870 get_double_write_ratio(hevc, pic->double_write_mode);
7871 vf->height = pic->height /
7872 get_double_write_ratio(hevc, pic->double_write_mode);
7873
7874 force_disp_pic_index |= 0x200;
7875 return vf;
7876 }
7877#endif
7878
7879 if (kfifo_get(&hevc->display_q, &vf)) {
7880 struct vframe_s *next_vf;
7881 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7882 hevc_print(hevc, 0,
7883 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
7884 __func__, vf, vf->type, vf->index,
7885 get_pic_poc(hevc, vf->index & 0xff),
7886 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
7887 vf->pts, vf->pts_us64,
7888 vf->duration);
7889#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7890 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
7891 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
7892 if (pic->aux_data_buf && pic->aux_data_size > 0) {
7893 int i;
7894 struct PIC_s *pic =
7895 hevc->m_PIC[vf->index & 0xff];
7896 hevc_print(hevc, 0,
7897 "pic 0x%p aux size %d:\n",
7898 pic, pic->aux_data_size);
7899 for (i = 0; i < pic->aux_data_size; i++) {
7900 hevc_print_cont(hevc, 0,
7901 "%02x ", pic->aux_data_buf[i]);
7902 if (((i + 1) & 0xf) == 0)
7903 hevc_print_cont(hevc, 0, "\n");
7904 }
7905 hevc_print_cont(hevc, 0, "\n");
7906 }
7907 }
7908#endif
7909 hevc->show_frame_num++;
7910 hevc->vf_get_count++;
7911
7912 if (kfifo_peek(&hevc->display_q, &next_vf)) {
7913 vf->next_vf_pts_valid = true;
7914 vf->next_vf_pts = next_vf->pts;
7915 } else
7916 vf->next_vf_pts_valid = false;
7917
7918 return vf;
7919 }
7920
7921 return NULL;
7922}
7923static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
7924 int i;
7925 for (i = 0; i < VF_POOL_SIZE; i++) {
7926 if (vf == &hevc->vfpool[i])
7927 return true;
7928 }
7929 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
7930 for (i = 0; i < VF_POOL_SIZE; i++) {
7931 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
7932 }
7933 return false;
7934}
7935
7936static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
7937{
7938 unsigned long flags;
7939#ifdef MULTI_INSTANCE_SUPPORT
7940 struct vdec_s *vdec = op_arg;
7941 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7942#else
7943 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7944#endif
7945 unsigned char index_top;
7946 unsigned char index_bot;
7947
7948 if (vf && (vf_valid_check(vf, hevc) == false))
7949 return;
7950 if (vf == (&hevc->vframe_dummy))
7951 return;
7952 index_top = vf->index & 0xff;
7953 index_bot = (vf->index >> 8) & 0xff;
7954 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7955 hevc_print(hevc, 0,
7956 "%s(type %d index 0x%x)\n",
7957 __func__, vf->type, vf->index);
7958 hevc->vf_put_count++;
7959 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
7960 spin_lock_irqsave(&lock, flags);
7961
7962 if (index_top != 0xff
7963 && index_top < MAX_REF_PIC_NUM
7964 && hevc->m_PIC[index_top]) {
7965 if (hevc->m_PIC[index_top]->vf_ref > 0) {
7966 hevc->m_PIC[index_top]->vf_ref--;
7967
7968 if (hevc->m_PIC[index_top]->vf_ref == 0) {
7969 hevc->m_PIC[index_top]->output_ready = 0;
7970
7971 if (hevc->wait_buf != 0)
7972 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7973 0x1);
7974 }
7975 }
7976 }
7977
7978 if (index_bot != 0xff
7979 && index_bot < MAX_REF_PIC_NUM
7980 && hevc->m_PIC[index_bot]) {
7981 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
7982 hevc->m_PIC[index_bot]->vf_ref--;
7983
7984 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
7985 hevc->m_PIC[index_bot]->output_ready = 0;
7986 if (hevc->wait_buf != 0)
7987 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7988 0x1);
7989 }
7990 }
7991 }
7992 spin_unlock_irqrestore(&lock, flags);
7993}
7994
7995static int vh265_event_cb(int type, void *data, void *op_arg)
7996{
7997 unsigned long flags;
7998#ifdef MULTI_INSTANCE_SUPPORT
7999 struct vdec_s *vdec = op_arg;
8000 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8001#else
8002 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8003#endif
8004 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8005#if 0
8006 amhevc_stop();
8007#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8008 vf_light_unreg_provider(&vh265_vf_prov);
8009#endif
8010 spin_lock_irqsave(&hevc->lock, flags);
8011 vh265_local_init();
8012 vh265_prot_init();
8013 spin_unlock_irqrestore(&hevc->lock, flags);
8014#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8015 vf_reg_provider(&vh265_vf_prov);
8016#endif
8017 amhevc_start();
8018#endif
8019 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8020 struct provider_aux_req_s *req =
8021 (struct provider_aux_req_s *)data;
8022 unsigned char index;
8023
8024 spin_lock_irqsave(&lock, flags);
8025 index = req->vf->index & 0xff;
8026 req->aux_buf = NULL;
8027 req->aux_size = 0;
8028 if (req->bot_flag)
8029 index = (req->vf->index >> 8) & 0xff;
8030 if (index != 0xff
8031 && index < MAX_REF_PIC_NUM
8032 && hevc->m_PIC[index]) {
8033 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8034 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8035#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8036 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8037 req->dv_enhance_exist = false;
8038 else
8039 req->dv_enhance_exist =
8040 hevc->m_PIC[index]->dv_enhance_exist;
8041 hevc_print(hevc, H265_DEBUG_DV,
8042 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8043 req->vf,
8044 hevc->m_PIC[index]->POC, index,
8045 req->dv_enhance_exist, req->aux_size);
8046#else
8047 req->dv_enhance_exist = 0;
8048#endif
8049 }
8050 spin_unlock_irqrestore(&lock, flags);
8051
8052 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8053 hevc_print(hevc, 0,
8054 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8055 __func__, type, index, req->aux_size);
8056#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8057 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8058 if ((force_bypass_dvenl & 0x80000000) == 0) {
8059 hevc_print(hevc, 0,
8060 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8061 __func__);
8062 hevc->bypass_dvenl_enable = 1;
8063 }
8064
8065#endif
8066 }
8067 return 0;
8068}
8069
8070#ifdef HEVC_PIC_STRUCT_SUPPORT
8071static int process_pending_vframe(struct hevc_state_s *hevc,
8072 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8073{
8074 struct vframe_s *vf;
8075
8076 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8077 hevc_print(hevc, 0,
8078 "%s: pair_pic index 0x%x %s\n",
8079 __func__, pair_pic->index,
8080 pair_frame_top_flag ?
8081 "top" : "bot");
8082
8083 if (kfifo_len(&hevc->pending_q) > 1) {
8084 /* do not pending more than 1 frame */
8085 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8086 hevc_print(hevc, 0,
8087 "fatal error, no available buffer slot.");
8088 return -1;
8089 }
8090 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8091 hevc_print(hevc, 0,
8092 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8093 __func__, vf->index);
8094 hevc->vf_pre_count++;
8095 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8096 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8097 }
8098
8099 if (kfifo_peek(&hevc->pending_q, &vf)) {
8100 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8101 /*
8102 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8103 *do not use it
8104 */
8105 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8106 hevc_print(hevc, 0,
8107 "fatal error, no available buffer slot.");
8108 return -1;
8109 }
8110 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8111 hevc_print(hevc, 0,
8112 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8113 __func__, vf->index);
8114 if (vf) {
8115 hevc->vf_pre_count++;
8116 kfifo_put(&hevc->display_q,
8117 (const struct vframe_s *)vf);
8118 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8119 }
8120 } else if ((!pair_frame_top_flag) &&
8121 (((vf->index >> 8) & 0xff) == 0xff)) {
8122 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8123 hevc_print(hevc, 0,
8124 "fatal error, no available buffer slot.");
8125 return -1;
8126 }
8127 if (vf) {
8128 vf->type = VIDTYPE_PROGRESSIVE
8129 | VIDTYPE_VIU_NV21;
8130 vf->index &= 0xff;
8131 vf->index |= (pair_pic->index << 8);
8132 vf->canvas1Addr = spec2canvas(pair_pic);
8133 pair_pic->vf_ref++;
8134 kfifo_put(&hevc->display_q,
8135 (const struct vframe_s *)vf);
8136 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8137 hevc->vf_pre_count++;
8138 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8139 hevc_print(hevc, 0,
8140 "%s vf => display_q: (index 0x%x)\n",
8141 __func__, vf->index);
8142 }
8143 } else if (pair_frame_top_flag &&
8144 ((vf->index & 0xff) == 0xff)) {
8145 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8146 hevc_print(hevc, 0,
8147 "fatal error, no available buffer slot.");
8148 return -1;
8149 }
8150 if (vf) {
8151 vf->type = VIDTYPE_PROGRESSIVE
8152 | VIDTYPE_VIU_NV21;
8153 vf->index &= 0xff00;
8154 vf->index |= pair_pic->index;
8155 vf->canvas0Addr = spec2canvas(pair_pic);
8156 pair_pic->vf_ref++;
8157 kfifo_put(&hevc->display_q,
8158 (const struct vframe_s *)vf);
8159 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8160 hevc->vf_pre_count++;
8161 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8162 hevc_print(hevc, 0,
8163 "%s vf => display_q: (index 0x%x)\n",
8164 __func__, vf->index);
8165 }
8166 }
8167 }
8168 return 0;
8169}
8170#endif
8171static void update_vf_memhandle(struct hevc_state_s *hevc,
8172 struct vframe_s *vf, struct PIC_s *pic)
8173{
8174 if (pic->index < 0) {
8175 vf->mem_handle = NULL;
8176 vf->mem_head_handle = NULL;
8177 } else if (vf->type & VIDTYPE_SCATTER) {
8178 vf->mem_handle =
8179 decoder_mmu_box_get_mem_handle(
8180 hevc->mmu_box, pic->index);
8181 vf->mem_head_handle =
8182 decoder_bmmu_box_get_mem_handle(
8183 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8184 } else {
8185 vf->mem_handle =
8186 decoder_bmmu_box_get_mem_handle(
8187 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8188 vf->mem_head_handle = NULL;
8189 /*vf->mem_head_handle =
8190 decoder_bmmu_box_get_mem_handle(
8191 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8192 }
8193 return;
8194}
8195
8196static void fill_frame_info(struct hevc_state_s *hevc,
8197 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8198{
8199 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8200 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8201 vframe_qos->type = 4;
8202 else if (pic->slice_type == I_SLICE)
8203 vframe_qos->type = 1;
8204 else if (pic->slice_type == P_SLICE)
8205 vframe_qos->type = 2;
8206 else if (pic->slice_type == B_SLICE)
8207 vframe_qos->type = 3;
8208/*
8209#define SHOW_QOS_INFO
8210*/
8211 vframe_qos->size = framesize;
8212 vframe_qos->pts = pts;
8213#ifdef SHOW_QOS_INFO
8214 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8215#endif
8216
8217
8218 vframe_qos->max_mv = pic->max_mv;
8219 vframe_qos->avg_mv = pic->avg_mv;
8220 vframe_qos->min_mv = pic->min_mv;
8221#ifdef SHOW_QOS_INFO
8222 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8223 vframe_qos->max_mv,
8224 vframe_qos->avg_mv,
8225 vframe_qos->min_mv);
8226#endif
8227
8228 vframe_qos->max_qp = pic->max_qp;
8229 vframe_qos->avg_qp = pic->avg_qp;
8230 vframe_qos->min_qp = pic->min_qp;
8231#ifdef SHOW_QOS_INFO
8232 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8233 vframe_qos->max_qp,
8234 vframe_qos->avg_qp,
8235 vframe_qos->min_qp);
8236#endif
8237
8238 vframe_qos->max_skip = pic->max_skip;
8239 vframe_qos->avg_skip = pic->avg_skip;
8240 vframe_qos->min_skip = pic->min_skip;
8241#ifdef SHOW_QOS_INFO
8242 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8243 vframe_qos->max_skip,
8244 vframe_qos->avg_skip,
8245 vframe_qos->min_skip);
8246#endif
8247
8248 vframe_qos->num++;
8249
8250 if (hevc->frameinfo_enable)
8251 vdec_fill_frame_info(vframe_qos, 1);
8252}
8253
8254static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8255{
8256#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8257 struct vdec_s *vdec = hw_to_vdec(hevc);
8258#endif
8259 struct vframe_s *vf = NULL;
8260 int stream_offset = pic->stream_offset;
8261 unsigned short slice_type = pic->slice_type;
8262 u32 frame_size;
8263
8264 if (force_disp_pic_index & 0x100) {
8265 /*recycle directly*/
8266 pic->output_ready = 0;
8267 return -1;
8268 }
8269 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8270 hevc_print(hevc, 0,
8271 "fatal error, no available buffer slot.");
8272 return -1;
8273 }
8274 display_frame_count[hevc->index]++;
8275 if (vf) {
8276 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8277 "%s: pic index 0x%x\n",
8278 __func__, pic->index);*/
8279
8280 if (hevc->is_used_v4l) {
8281 vf->v4l_mem_handle
8282 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8283 if (hevc->mmu_enable) {
8284 if (vdec_v4l_binding_fd_and_vf(vf->v4l_mem_handle, vf) < 0) {
8285 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8286 "v4l: binding vf fail.\n");
8287 return -1;
8288 }
8289 }
8290 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8291 "[%d] %s(), v4l mem handle: 0x%lx\n",
8292 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id,
8293 __func__, vf->v4l_mem_handle);
8294 }
8295
8296#ifdef MULTI_INSTANCE_SUPPORT
8297 if (vdec_frame_based(hw_to_vdec(hevc))) {
8298 vf->pts = pic->pts;
8299 vf->pts_us64 = pic->pts64;
8300 vf->timestamp = pic->timestamp;
8301 }
8302 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8303 stream_offset, &vf->pts, 0) != 0) { */
8304#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8305 else if (vdec->master == NULL) {
8306#else
8307 else {
8308#endif
8309#endif
8310 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8311 "call pts_lookup_offset_us64(0x%x)\n",
8312 stream_offset);
8313 if (pts_lookup_offset_us64
8314 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8315 &frame_size, 0,
8316 &vf->pts_us64) != 0) {
8317#ifdef DEBUG_PTS
8318 hevc->pts_missed++;
8319#endif
8320 vf->pts = 0;
8321 vf->pts_us64 = 0;
8322 }
8323#ifdef DEBUG_PTS
8324 else
8325 hevc->pts_hit++;
8326#endif
8327#ifdef MULTI_INSTANCE_SUPPORT
8328#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8329 } else {
8330 vf->pts = 0;
8331 vf->pts_us64 = 0;
8332 }
8333#else
8334 }
8335#endif
8336#endif
8337 if (pts_unstable && (hevc->frame_dur > 0))
8338 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8339
8340 fill_frame_info(hevc, pic, frame_size, vf->pts);
8341
8342 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8343 && hevc->get_frame_dur) {
8344 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8345
8346 if (pts_diff < 0) {
8347 hevc->pts_mode_switching_count++;
8348 hevc->pts_mode_recovery_count = 0;
8349
8350 if (hevc->pts_mode_switching_count >=
8351 PTS_MODE_SWITCHING_THRESHOLD) {
8352 hevc->pts_mode =
8353 PTS_NONE_REF_USE_DURATION;
8354 hevc_print(hevc, 0,
8355 "HEVC: switch to n_d mode.\n");
8356 }
8357
8358 } else {
8359 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8360
8361 hevc->pts_mode_recovery_count++;
8362 if (hevc->pts_mode_recovery_count > p) {
8363 hevc->pts_mode_switching_count = 0;
8364 hevc->pts_mode_recovery_count = 0;
8365 }
8366 }
8367 }
8368
8369 if (vf->pts != 0)
8370 hevc->last_lookup_pts = vf->pts;
8371
8372 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8373 && (slice_type != 2))
8374 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8375 hevc->last_pts = vf->pts;
8376
8377 if (vf->pts_us64 != 0)
8378 hevc->last_lookup_pts_us64 = vf->pts_us64;
8379
8380 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8381 && (slice_type != 2)) {
8382 vf->pts_us64 =
8383 hevc->last_pts_us64 +
8384 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8385 }
8386 hevc->last_pts_us64 = vf->pts_us64;
8387 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8388 hevc_print(hevc, 0,
8389 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8390 vf->pts, vf->pts_us64);
8391 }
8392
8393 /*
8394 *vf->index:
8395 *(1) vf->type is VIDTYPE_PROGRESSIVE
8396 * and vf->canvas0Addr != vf->canvas1Addr,
8397 * vf->index[7:0] is the index of top pic
8398 * vf->index[15:8] is the index of bot pic
8399 *(2) other cases,
8400 * only vf->index[7:0] is used
8401 * vf->index[15:8] == 0xff
8402 */
8403 vf->index = 0xff00 | pic->index;
8404#if 1
8405/*SUPPORT_10BIT*/
8406 if (pic->double_write_mode & 0x10) {
8407 /* double write only */
8408 vf->compBodyAddr = 0;
8409 vf->compHeadAddr = 0;
8410 } else {
8411
8412 if (hevc->mmu_enable) {
8413 vf->compBodyAddr = 0;
8414 vf->compHeadAddr = pic->header_adr;
8415 } else {
8416 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8417 vf->compHeadAddr = pic->mc_y_adr +
8418 pic->losless_comp_body_size;
8419 vf->mem_head_handle = NULL;
8420 }
8421
8422 /*head adr*/
8423 vf->canvas0Addr = vf->canvas1Addr = 0;
8424 }
8425 if (pic->double_write_mode) {
8426 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8427 vf->type |= VIDTYPE_VIU_NV21;
8428 if ((pic->double_write_mode == 3) &&
8429 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8430 vf->type |= VIDTYPE_COMPRESS;
8431 if (hevc->mmu_enable)
8432 vf->type |= VIDTYPE_SCATTER;
8433 }
8434#ifdef MULTI_INSTANCE_SUPPORT
8435 if (hevc->m_ins_flag &&
8436 (get_dbg_flag(hevc)
8437 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8438 vf->canvas0Addr = vf->canvas1Addr = -1;
8439 vf->plane_num = 2;
8440 vf->canvas0_config[0] =
8441 pic->canvas_config[0];
8442 vf->canvas0_config[1] =
8443 pic->canvas_config[1];
8444
8445 vf->canvas1_config[0] =
8446 pic->canvas_config[0];
8447 vf->canvas1_config[1] =
8448 pic->canvas_config[1];
8449
8450 } else
8451#endif
8452 vf->canvas0Addr = vf->canvas1Addr
8453 = spec2canvas(pic);
8454 } else {
8455 vf->canvas0Addr = vf->canvas1Addr = 0;
8456 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8457 if (hevc->mmu_enable)
8458 vf->type |= VIDTYPE_SCATTER;
8459 }
8460 vf->compWidth = pic->width;
8461 vf->compHeight = pic->height;
8462 update_vf_memhandle(hevc, vf, pic);
8463 switch (pic->bit_depth_luma) {
8464 case 9:
8465 vf->bitdepth = BITDEPTH_Y9;
8466 break;
8467 case 10:
8468 vf->bitdepth = BITDEPTH_Y10;
8469 break;
8470 default:
8471 vf->bitdepth = BITDEPTH_Y8;
8472 break;
8473 }
8474 switch (pic->bit_depth_chroma) {
8475 case 9:
8476 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8477 break;
8478 case 10:
8479 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8480 break;
8481 default:
8482 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8483 break;
8484 }
8485 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8486 vf->bitdepth =
8487 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8488 if (pic->mem_saving_mode == 1)
8489 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8490#else
8491 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8492 vf->type |= VIDTYPE_VIU_NV21;
8493 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8494#endif
8495 set_frame_info(hevc, vf, pic);
8496 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8497 /* hevc_print(hevc, 0,
8498 "aaa: %d/%d, %d/%d\n",
8499 vf->width,vf->height, pic->width, pic->height); */
8500 vf->width = pic->width;
8501 vf->height = pic->height;
8502
8503 if (force_w_h != 0) {
8504 vf->width = (force_w_h >> 16) & 0xffff;
8505 vf->height = force_w_h & 0xffff;
8506 }
8507 if (force_fps & 0x100) {
8508 u32 rate = force_fps & 0xff;
8509
8510 if (rate)
8511 vf->duration = 96000/rate;
8512 else
8513 vf->duration = 0;
8514 }
8515 if (force_fps & 0x200) {
8516 vf->pts = 0;
8517 vf->pts_us64 = 0;
8518 }
8519 /*
8520 * !!! to do ...
8521 * need move below code to get_new_pic(),
8522 * hevc->xxx can only be used by current decoded pic
8523 */
8524 if (pic->conformance_window_flag &&
8525 (get_dbg_flag(hevc) &
8526 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8527 unsigned int SubWidthC, SubHeightC;
8528
8529 switch (pic->chroma_format_idc) {
8530 case 1:
8531 SubWidthC = 2;
8532 SubHeightC = 2;
8533 break;
8534 case 2:
8535 SubWidthC = 2;
8536 SubHeightC = 1;
8537 break;
8538 default:
8539 SubWidthC = 1;
8540 SubHeightC = 1;
8541 break;
8542 }
8543 vf->width -= SubWidthC *
8544 (pic->conf_win_left_offset +
8545 pic->conf_win_right_offset);
8546 vf->height -= SubHeightC *
8547 (pic->conf_win_top_offset +
8548 pic->conf_win_bottom_offset);
8549
8550 vf->compWidth -= SubWidthC *
8551 (pic->conf_win_left_offset +
8552 pic->conf_win_right_offset);
8553 vf->compHeight -= SubHeightC *
8554 (pic->conf_win_top_offset +
8555 pic->conf_win_bottom_offset);
8556
8557 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8558 hevc_print(hevc, 0,
8559 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8560 pic->chroma_format_idc,
8561 pic->conf_win_left_offset,
8562 pic->conf_win_right_offset,
8563 pic->conf_win_top_offset,
8564 pic->conf_win_bottom_offset,
8565 vf->width, vf->height, vf->compWidth, vf->compHeight);
8566 }
8567
8568 vf->width = vf->width /
8569 get_double_write_ratio(hevc, pic->double_write_mode);
8570 vf->height = vf->height /
8571 get_double_write_ratio(hevc, pic->double_write_mode);
8572#ifdef HEVC_PIC_STRUCT_SUPPORT
8573 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8574 struct vframe_s *vf2;
8575
8576 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8577 hevc_print(hevc, 0,
8578 "pic_struct = %d index 0x%x\n",
8579 pic->pic_struct,
8580 pic->index);
8581
8582 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8583 hevc_print(hevc, 0,
8584 "fatal error, no available buffer slot.");
8585 return -1;
8586 }
8587 pic->vf_ref = 2;
8588 vf->duration = vf->duration>>1;
8589 memcpy(vf2, vf, sizeof(struct vframe_s));
8590
8591 if (pic->pic_struct == 3) {
8592 vf->type = VIDTYPE_INTERLACE_TOP
8593 | VIDTYPE_VIU_NV21;
8594 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8595 | VIDTYPE_VIU_NV21;
8596 } else {
8597 vf->type = VIDTYPE_INTERLACE_BOTTOM
8598 | VIDTYPE_VIU_NV21;
8599 vf2->type = VIDTYPE_INTERLACE_TOP
8600 | VIDTYPE_VIU_NV21;
8601 }
8602 hevc->vf_pre_count++;
8603 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8604 kfifo_put(&hevc->display_q,
8605 (const struct vframe_s *)vf);
8606 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8607 hevc->vf_pre_count++;
8608 kfifo_put(&hevc->display_q,
8609 (const struct vframe_s *)vf2);
8610 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8611 } else if (pic->pic_struct == 5
8612 || pic->pic_struct == 6) {
8613 struct vframe_s *vf2, *vf3;
8614
8615 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8616 hevc_print(hevc, 0,
8617 "pic_struct = %d index 0x%x\n",
8618 pic->pic_struct,
8619 pic->index);
8620
8621 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8622 hevc_print(hevc, 0,
8623 "fatal error, no available buffer slot.");
8624 return -1;
8625 }
8626 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8627 hevc_print(hevc, 0,
8628 "fatal error, no available buffer slot.");
8629 return -1;
8630 }
8631 pic->vf_ref = 3;
8632 vf->duration = vf->duration/3;
8633 memcpy(vf2, vf, sizeof(struct vframe_s));
8634 memcpy(vf3, vf, sizeof(struct vframe_s));
8635
8636 if (pic->pic_struct == 5) {
8637 vf->type = VIDTYPE_INTERLACE_TOP
8638 | VIDTYPE_VIU_NV21;
8639 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8640 | VIDTYPE_VIU_NV21;
8641 vf3->type = VIDTYPE_INTERLACE_TOP
8642 | VIDTYPE_VIU_NV21;
8643 } else {
8644 vf->type = VIDTYPE_INTERLACE_BOTTOM
8645 | VIDTYPE_VIU_NV21;
8646 vf2->type = VIDTYPE_INTERLACE_TOP
8647 | VIDTYPE_VIU_NV21;
8648 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8649 | VIDTYPE_VIU_NV21;
8650 }
8651 hevc->vf_pre_count++;
8652 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8653 kfifo_put(&hevc->display_q,
8654 (const struct vframe_s *)vf);
8655 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8656 hevc->vf_pre_count++;
8657 kfifo_put(&hevc->display_q,
8658 (const struct vframe_s *)vf2);
8659 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8660 hevc->vf_pre_count++;
8661 kfifo_put(&hevc->display_q,
8662 (const struct vframe_s *)vf3);
8663 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8664
8665 } else if (pic->pic_struct == 9
8666 || pic->pic_struct == 10) {
8667 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8668 hevc_print(hevc, 0,
8669 "pic_struct = %d index 0x%x\n",
8670 pic->pic_struct,
8671 pic->index);
8672
8673 pic->vf_ref = 1;
8674 /* process previous pending vf*/
8675 process_pending_vframe(hevc,
8676 pic, (pic->pic_struct == 9));
8677
8678 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8679 /* process current vf */
8680 kfifo_put(&hevc->pending_q,
8681 (const struct vframe_s *)vf);
8682 vf->height <<= 1;
8683 if (pic->pic_struct == 9) {
8684 vf->type = VIDTYPE_INTERLACE_TOP
8685 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8686 process_pending_vframe(hevc,
8687 hevc->pre_bot_pic, 0);
8688 } else {
8689 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8690 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8691 vf->index = (pic->index << 8) | 0xff;
8692 process_pending_vframe(hevc,
8693 hevc->pre_top_pic, 1);
8694 }
8695
8696 /**/
8697 if (pic->pic_struct == 9)
8698 hevc->pre_top_pic = pic;
8699 else
8700 hevc->pre_bot_pic = pic;
8701
8702 } else if (pic->pic_struct == 11
8703 || pic->pic_struct == 12) {
8704 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8705 hevc_print(hevc, 0,
8706 "pic_struct = %d index 0x%x\n",
8707 pic->pic_struct,
8708 pic->index);
8709 pic->vf_ref = 1;
8710 /* process previous pending vf*/
8711 process_pending_vframe(hevc, pic,
8712 (pic->pic_struct == 11));
8713
8714 /* put current into pending q */
8715 vf->height <<= 1;
8716 if (pic->pic_struct == 11)
8717 vf->type = VIDTYPE_INTERLACE_TOP |
8718 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8719 else {
8720 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8721 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8722 vf->index = (pic->index << 8) | 0xff;
8723 }
8724 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8725 kfifo_put(&hevc->pending_q,
8726 (const struct vframe_s *)vf);
8727
8728 /**/
8729 if (pic->pic_struct == 11)
8730 hevc->pre_top_pic = pic;
8731 else
8732 hevc->pre_bot_pic = pic;
8733
8734 } else {
8735 pic->vf_ref = 1;
8736
8737 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8738 hevc_print(hevc, 0,
8739 "pic_struct = %d index 0x%x\n",
8740 pic->pic_struct,
8741 pic->index);
8742
8743 switch (pic->pic_struct) {
8744 case 7:
8745 vf->duration <<= 1;
8746 break;
8747 case 8:
8748 vf->duration = vf->duration * 3;
8749 break;
8750 case 1:
8751 vf->height <<= 1;
8752 vf->type = VIDTYPE_INTERLACE_TOP |
8753 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8754 process_pending_vframe(hevc, pic, 1);
8755 hevc->pre_top_pic = pic;
8756 break;
8757 case 2:
8758 vf->height <<= 1;
8759 vf->type = VIDTYPE_INTERLACE_BOTTOM
8760 | VIDTYPE_VIU_NV21
8761 | VIDTYPE_VIU_FIELD;
8762 process_pending_vframe(hevc, pic, 0);
8763 hevc->pre_bot_pic = pic;
8764 break;
8765 }
8766 hevc->vf_pre_count++;
8767 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8768 kfifo_put(&hevc->display_q,
8769 (const struct vframe_s *)vf);
8770 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8771 }
8772#else
8773 vf->type_original = vf->type;
8774 pic->vf_ref = 1;
8775 hevc->vf_pre_count++;
8776 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8777 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8778 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8779
8780 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8781 hevc_print(hevc, 0,
8782 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8783 __func__, vf->type, vf->index,
8784 get_pic_poc(hevc, vf->index & 0xff),
8785 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8786 vf->pts, vf->pts_us64,
8787 vf->duration);
8788#endif
8789#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8790 /*count info*/
8791 vdec_count_info(gvs, 0, stream_offset);
8792#endif
8793 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8794 if (without_display_mode == 0) {
8795 vf_notify_receiver(hevc->provider_name,
8796 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8797 }
8798 else
8799 vh265_vf_put(vh265_vf_get(vdec), vdec);
8800 }
8801
8802 return 0;
8803}
8804
8805static int notify_v4l_eos(struct vdec_s *vdec)
8806{
8807 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
8808 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
8809 struct vframe_s *vf = NULL;
8810 struct vdec_v4l2_buffer *fb = NULL;
8811
8812 if (hw->is_used_v4l && hw->eos) {
8813 if (kfifo_get(&hw->newframe_q, &vf) == 0 || vf == NULL) {
8814 hevc_print(hw, 0,
8815 "%s fatal error, no available buffer slot.\n",
8816 __func__);
8817 return -1;
8818 }
8819
8820 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb)) {
8821 pr_err("[%d] get fb fail.\n", ctx->id);
8822 return -1;
8823 }
8824
8825 vf->type |= VIDTYPE_V4L_EOS;
8826 vf->timestamp = ULONG_MAX;
8827 vf->v4l_mem_handle = (unsigned long)fb;
8828 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
8829
8830 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
8831 vf_notify_receiver(vdec->vf_provider_name,
8832 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8833
8834 pr_info("[%d] H265 EOS notify.\n", ctx->id);
8835 }
8836
8837 return 0;
8838}
8839
8840static void process_nal_sei(struct hevc_state_s *hevc,
8841 int payload_type, int payload_size)
8842{
8843 unsigned short data;
8844
8845 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8846 hevc_print(hevc, 0,
8847 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
8848 payload_type, payload_size);
8849
8850 if (payload_type == 137) {
8851 int i, j;
8852 /* MASTERING_DISPLAY_COLOUR_VOLUME */
8853 if (payload_size >= 24) {
8854 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8855 hevc_print(hevc, 0,
8856 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
8857 for (i = 0; i < 3; i++) {
8858 for (j = 0; j < 2; j++) {
8859 data =
8860 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8861 hevc->primaries[i][j] = data;
8862 WRITE_HREG(HEVC_SHIFT_COMMAND,
8863 (1<<7)|16);
8864 if (get_dbg_flag(hevc) &
8865 H265_DEBUG_PRINT_SEI)
8866 hevc_print(hevc, 0,
8867 "\t\tprimaries[%1d][%1d] = %04x\n",
8868 i, j, hevc->primaries[i][j]);
8869 }
8870 }
8871 for (i = 0; i < 2; i++) {
8872 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8873 hevc->white_point[i] = data;
8874 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
8875 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8876 hevc_print(hevc, 0,
8877 "\t\twhite_point[%1d] = %04x\n",
8878 i, hevc->white_point[i]);
8879 }
8880 for (i = 0; i < 2; i++) {
8881 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8882 hevc->luminance[i] = data << 16;
8883 WRITE_HREG(HEVC_SHIFT_COMMAND,
8884 (1<<7)|16);
8885 data =
8886 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8887 hevc->luminance[i] |= data;
8888 WRITE_HREG(HEVC_SHIFT_COMMAND,
8889 (1<<7)|16);
8890 if (get_dbg_flag(hevc) &
8891 H265_DEBUG_PRINT_SEI)
8892 hevc_print(hevc, 0,
8893 "\t\tluminance[%1d] = %08x\n",
8894 i, hevc->luminance[i]);
8895 }
8896 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
8897 }
8898 payload_size -= 24;
8899 while (payload_size > 0) {
8900 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
8901 payload_size--;
8902 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
8903 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
8904 }
8905 }
8906}
8907
8908static int hevc_recover(struct hevc_state_s *hevc)
8909{
8910 int ret = -1;
8911 u32 rem;
8912 u64 shift_byte_count64;
8913 unsigned int hevc_shift_byte_count;
8914 unsigned int hevc_stream_start_addr;
8915 unsigned int hevc_stream_end_addr;
8916 unsigned int hevc_stream_rd_ptr;
8917 unsigned int hevc_stream_wr_ptr;
8918 unsigned int hevc_stream_control;
8919 unsigned int hevc_stream_fifo_ctl;
8920 unsigned int hevc_stream_buf_size;
8921
8922 mutex_lock(&vh265_mutex);
8923#if 0
8924 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
8925 int ii;
8926
8927 for (ii = 0; ii < 4; ii++)
8928 hevc_print(hevc, 0,
8929 "%04x ", hevc->debug_ptr[i + 3 - ii]);
8930 if (((i + ii) & 0xf) == 0)
8931 hevc_print(hevc, 0, "\n");
8932 }
8933#endif
8934#define ES_VID_MAN_RD_PTR (1<<0)
8935 if (!hevc->init_flag) {
8936 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
8937 mutex_unlock(&vh265_mutex);
8938 return ret;
8939 }
8940 amhevc_stop();
8941 msleep(20);
8942 ret = 0;
8943 /* reset */
8944 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
8945 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
8946
8947 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
8948 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
8949 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
8950 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
8951 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
8952 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
8953 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
8954
8955 /* HEVC streaming buffer will reset and restart
8956 * from current hevc_stream_rd_ptr position
8957 */
8958 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
8959 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
8960 if ((hevc->shift_byte_count_lo & (1 << 31))
8961 && ((hevc_shift_byte_count & (1 << 31)) == 0))
8962 hevc->shift_byte_count_hi++;
8963
8964 hevc->shift_byte_count_lo = hevc_shift_byte_count;
8965 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
8966 hevc->shift_byte_count_lo;
8967 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
8968 shift_byte_count64 -= rem;
8969 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
8970
8971 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
8972 shift_byte_count64 += hevc_stream_buf_size;
8973
8974 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
8975 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
8976
8977 WRITE_VREG(DOS_SW_RESET3,
8978 /* (1<<2)| */
8979 (1 << 3) | (1 << 4) | (1 << 8) |
8980 (1 << 11) | (1 << 12) | (1 << 14)
8981 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
8982 WRITE_VREG(DOS_SW_RESET3, 0);
8983
8984 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
8985 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
8986 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
8987 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
8988 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
8989 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
8990 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
8991
8992 hevc_config_work_space_hw(hevc);
8993 decoder_hw_reset();
8994
8995 hevc->have_vps = 0;
8996 hevc->have_sps = 0;
8997 hevc->have_pps = 0;
8998
8999 hevc->have_valid_start_slice = 0;
9000
9001 if (get_double_write_mode(hevc) & 0x10)
9002 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9003 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9004 );
9005
9006 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9007 /* clear mailbox interrupt */
9008 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9009 /* enable mailbox interrupt */
9010 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9011 /* disable PSCALE for hardware sharing */
9012 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9013
9014 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9015
9016 WRITE_VREG(DEBUG_REG1, 0x0);
9017
9018 if ((error_handle_policy & 1) == 0) {
9019 if ((error_handle_policy & 4) == 0) {
9020 /* ucode auto mode, and do not check vps/sps/pps/idr */
9021 WRITE_VREG(NAL_SEARCH_CTL,
9022 0xc);
9023 } else {
9024 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9025 }
9026 } else {
9027 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9028 }
9029
9030 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9031 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9032 WRITE_VREG(NAL_SEARCH_CTL,
9033 READ_VREG(NAL_SEARCH_CTL)
9034 | ((parser_sei_enable & 0x7) << 17));
9035#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9036 WRITE_VREG(NAL_SEARCH_CTL,
9037 READ_VREG(NAL_SEARCH_CTL) |
9038 ((parser_dolby_vision_enable & 0x1) << 20));
9039#endif
9040 config_decode_mode(hevc);
9041 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9042
9043 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9044 /* amhevc_disable(); */
9045 /* return -EBUSY; */
9046 /* } */
9047#if 0
9048 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9049 int ii;
9050
9051 for (ii = 0; ii < 4; ii++) {
9052 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9053 hevc_print(hevc, 0,
9054 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9055 }
9056 if (((i + ii) & 0xf) == 0)
9057 hevc_print(hevc, 0, "\n");
9058 }
9059#endif
9060 init_pic_list_hw(hevc);
9061
9062 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9063 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9064
9065#ifdef SWAP_HEVC_UCODE
9066 if (!tee_enabled() && hevc->is_swap &&
9067 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9068 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9069 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9070 }
9071#endif
9072 amhevc_start();
9073
9074 /* skip, search next start code */
9075 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9076 hevc->skip_flag = 1;
9077#ifdef ERROR_HANDLE_DEBUG
9078 if (dbg_nal_skip_count & 0x20000) {
9079 dbg_nal_skip_count &= ~0x20000;
9080 mutex_unlock(&vh265_mutex);
9081 return ret;
9082 }
9083#endif
9084 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9085 /* Interrupt Amrisc to excute */
9086 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9087#ifdef MULTI_INSTANCE_SUPPORT
9088 if (!hevc->m_ins_flag)
9089#endif
9090 hevc->first_pic_after_recover = 1;
9091 mutex_unlock(&vh265_mutex);
9092 return ret;
9093}
9094
9095static void dump_aux_buf(struct hevc_state_s *hevc)
9096{
9097 int i;
9098 unsigned short *aux_adr =
9099 (unsigned short *)
9100 hevc->aux_addr;
9101 unsigned int aux_size =
9102 (READ_VREG(HEVC_AUX_DATA_SIZE)
9103 >> 16) << 4;
9104
9105 if (hevc->prefix_aux_size > 0) {
9106 hevc_print(hevc, 0,
9107 "prefix aux: (size %d)\n",
9108 aux_size);
9109 for (i = 0; i <
9110 (aux_size >> 1); i++) {
9111 hevc_print_cont(hevc, 0,
9112 "%04x ",
9113 *(aux_adr + i));
9114 if (((i + 1) & 0xf)
9115 == 0)
9116 hevc_print_cont(hevc,
9117 0, "\n");
9118 }
9119 }
9120 if (hevc->suffix_aux_size > 0) {
9121 aux_adr = (unsigned short *)
9122 (hevc->aux_addr +
9123 hevc->prefix_aux_size);
9124 aux_size =
9125 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9126 << 4;
9127 hevc_print(hevc, 0,
9128 "suffix aux: (size %d)\n",
9129 aux_size);
9130 for (i = 0; i <
9131 (aux_size >> 1); i++) {
9132 hevc_print_cont(hevc, 0,
9133 "%04x ", *(aux_adr + i));
9134 if (((i + 1) & 0xf) == 0)
9135 hevc_print_cont(hevc, 0, "\n");
9136 }
9137 }
9138}
9139
9140#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9141static void dolby_get_meta(struct hevc_state_s *hevc)
9142{
9143 struct vdec_s *vdec = hw_to_vdec(hevc);
9144
9145 if (get_dbg_flag(hevc) &
9146 H265_DEBUG_BUFMGR_MORE)
9147 dump_aux_buf(hevc);
9148 if (vdec->dolby_meta_with_el || vdec->slave) {
9149 set_aux_data(hevc,
9150 hevc->cur_pic, 0, 0);
9151 } else if (vdec->master) {
9152 struct hevc_state_s *hevc_ba =
9153 (struct hevc_state_s *)
9154 vdec->master->private;
9155 /*do not use hevc_ba*/
9156 set_aux_data(hevc,
9157 hevc_ba->cur_pic,
9158 0, 1);
9159 set_aux_data(hevc,
9160 hevc->cur_pic, 0, 2);
9161 }
9162}
9163#endif
9164
9165static void read_decode_info(struct hevc_state_s *hevc)
9166{
9167 uint32_t decode_info =
9168 READ_HREG(HEVC_DECODE_INFO);
9169 hevc->start_decoding_flag |=
9170 (decode_info & 0xff);
9171 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9172}
9173
9174static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9175{
9176 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9177 unsigned int dec_status = hevc->dec_status;
9178 int i, ret;
9179
9180#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9181 struct vdec_s *vdec = hw_to_vdec(hevc);
9182#endif
9183
9184 if (hevc->eos)
9185 return IRQ_HANDLED;
9186 if (
9187#ifdef MULTI_INSTANCE_SUPPORT
9188 (!hevc->m_ins_flag) &&
9189#endif
9190 hevc->error_flag == 1) {
9191 if ((error_handle_policy & 0x10) == 0) {
9192 if (hevc->cur_pic) {
9193 int current_lcu_idx =
9194 READ_VREG(HEVC_PARSER_LCU_START)
9195 & 0xffffff;
9196 if (current_lcu_idx <
9197 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9198 hevc->cur_pic->error_mark = 1;
9199
9200 }
9201 }
9202 if ((error_handle_policy & 1) == 0) {
9203 hevc->error_skip_nal_count = 1;
9204 /* manual search nal, skip error_skip_nal_count
9205 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9206 */
9207 WRITE_VREG(NAL_SEARCH_CTL,
9208 (error_skip_nal_count << 4) | 0x1);
9209 } else {
9210 hevc->error_skip_nal_count = error_skip_nal_count;
9211 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9212 }
9213 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9214#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9215 || vdec->master
9216 || vdec->slave
9217#endif
9218 ) {
9219 WRITE_VREG(NAL_SEARCH_CTL,
9220 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9221 }
9222 WRITE_VREG(NAL_SEARCH_CTL,
9223 READ_VREG(NAL_SEARCH_CTL)
9224 | ((parser_sei_enable & 0x7) << 17));
9225#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9226 WRITE_VREG(NAL_SEARCH_CTL,
9227 READ_VREG(NAL_SEARCH_CTL) |
9228 ((parser_dolby_vision_enable & 0x1) << 20));
9229#endif
9230 config_decode_mode(hevc);
9231 /* search new nal */
9232 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9233 /* Interrupt Amrisc to excute */
9234 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9235
9236 /* hevc_print(hevc, 0,
9237 *"%s: error handle\n", __func__);
9238 */
9239 hevc->error_flag = 2;
9240 return IRQ_HANDLED;
9241 } else if (
9242#ifdef MULTI_INSTANCE_SUPPORT
9243 (!hevc->m_ins_flag) &&
9244#endif
9245 hevc->error_flag == 3) {
9246 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9247 hevc_recover(hevc);
9248 hevc->error_flag = 0;
9249
9250 if ((error_handle_policy & 0x10) == 0) {
9251 if (hevc->cur_pic) {
9252 int current_lcu_idx =
9253 READ_VREG(HEVC_PARSER_LCU_START)
9254 & 0xffffff;
9255 if (current_lcu_idx <
9256 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9257 hevc->cur_pic->error_mark = 1;
9258
9259 }
9260 }
9261 if ((error_handle_policy & 1) == 0) {
9262 /* need skip some data when
9263 * error_flag of 3 is triggered,
9264 */
9265 /* to avoid hevc_recover() being called
9266 * for many times at the same bitstream position
9267 */
9268 hevc->error_skip_nal_count = 1;
9269 /* manual search nal, skip error_skip_nal_count
9270 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9271 */
9272 WRITE_VREG(NAL_SEARCH_CTL,
9273 (error_skip_nal_count << 4) | 0x1);
9274 }
9275
9276 if ((error_handle_policy & 0x2) == 0) {
9277 hevc->have_vps = 1;
9278 hevc->have_sps = 1;
9279 hevc->have_pps = 1;
9280 }
9281 return IRQ_HANDLED;
9282 }
9283 if (!hevc->m_ins_flag) {
9284 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9285 if ((hevc->shift_byte_count_lo & (1 << 31))
9286 && ((i & (1 << 31)) == 0))
9287 hevc->shift_byte_count_hi++;
9288 hevc->shift_byte_count_lo = i;
9289 }
9290#ifdef MULTI_INSTANCE_SUPPORT
9291 mutex_lock(&hevc->chunks_mutex);
9292 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9293 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9294 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9295 && (hevc->chunk)) {
9296 hevc->cur_pic->pts = hevc->chunk->pts;
9297 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9298 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9299 }
9300 mutex_unlock(&hevc->chunks_mutex);
9301
9302 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9303 dec_status == HEVC_DECODE_BUFEMPTY2) {
9304 if (hevc->m_ins_flag) {
9305 read_decode_info(hevc);
9306 if (vdec_frame_based(hw_to_vdec(hevc))) {
9307 hevc->empty_flag = 1;
9308 goto pic_done;
9309 } else {
9310 if (
9311#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9312 vdec->master ||
9313 vdec->slave ||
9314#endif
9315 (data_resend_policy & 0x1)) {
9316 hevc->dec_result = DEC_RESULT_AGAIN;
9317 amhevc_stop();
9318 restore_decode_state(hevc);
9319 } else
9320 hevc->dec_result = DEC_RESULT_GET_DATA;
9321 }
9322 reset_process_time(hevc);
9323 vdec_schedule_work(&hevc->work);
9324 }
9325 return IRQ_HANDLED;
9326 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9327 (dec_status == HEVC_NAL_DECODE_DONE)
9328 ) {
9329 if (hevc->m_ins_flag) {
9330 read_decode_info(hevc);
9331 if (vdec_frame_based(hw_to_vdec(hevc))) {
9332 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9333 hevc->empty_flag = 1;
9334 goto pic_done;
9335 } else {
9336 hevc->dec_result = DEC_RESULT_AGAIN;
9337 amhevc_stop();
9338 restore_decode_state(hevc);
9339 }
9340
9341 reset_process_time(hevc);
9342 vdec_schedule_work(&hevc->work);
9343 }
9344
9345 return IRQ_HANDLED;
9346 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9347 if (hevc->m_ins_flag) {
9348 struct PIC_s *pic;
9349 struct PIC_s *pic_display;
9350 int decoded_poc;
9351#ifdef DETREFILL_ENABLE
9352 if (hevc->is_swap &&
9353 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9354 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9355 && READ_VREG(HEVC_SAO_DBG_MODE0))
9356 hevc->delrefill_check = 2;
9357 }
9358#endif
9359 hevc->empty_flag = 0;
9360pic_done:
9361 if (input_frame_based(hw_to_vdec(hevc)) &&
9362 frmbase_cont_bitlevel != 0 &&
9363 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9364 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9365 > frmbase_cont_bitlevel)) {
9366 /*handle the case: multi pictures in one packet*/
9367 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9368 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9369 __func__,
9370 hevc->decode_idx, hevc->decode_size,
9371 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9372 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9373 start_process_time(hevc);
9374 return IRQ_HANDLED;
9375 }
9376
9377 read_decode_info(hevc);
9378 get_picture_qos_info(hevc);
9379#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9380 hevc->start_parser_type = 0;
9381 hevc->switch_dvlayer_flag = 0;
9382#endif
9383 hevc->decoded_poc = hevc->curr_POC;
9384 hevc->decoding_pic = NULL;
9385 hevc->dec_result = DEC_RESULT_DONE;
9386#ifdef DETREFILL_ENABLE
9387 if (hevc->is_swap &&
9388 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9389 if (hevc->delrefill_check != 2)
9390#endif
9391
9392 amhevc_stop();
9393
9394 reset_process_time(hevc);
9395
9396 if (hevc->vf_pre_count == 0) {
9397 decoded_poc = hevc->curr_POC;
9398 pic = get_pic_by_POC(hevc, decoded_poc);
9399 if (pic && (pic->POC != INVALID_POC)) {
9400 /*PB skip control */
9401 if (pic->error_mark == 0
9402 && hevc->PB_skip_mode == 1) {
9403 /* start decoding after
9404 * first I
9405 */
9406 hevc->ignore_bufmgr_error |= 0x1;
9407 }
9408 if (hevc->ignore_bufmgr_error & 1) {
9409 if (hevc->PB_skip_count_after_decoding > 0) {
9410 hevc->PB_skip_count_after_decoding--;
9411 } else {
9412 /* start displaying */
9413 hevc->ignore_bufmgr_error |= 0x2;
9414 }
9415 }
9416 if (hevc->mmu_enable
9417 && ((hevc->double_write_mode & 0x10) == 0)) {
9418 if (!hevc->m_ins_flag) {
9419 hevc->used_4k_num =
9420 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9421
9422 if ((!is_skip_decoding(hevc, pic)) &&
9423 (hevc->used_4k_num >= 0) &&
9424 (hevc->cur_pic->scatter_alloc
9425 == 1)) {
9426 hevc_print(hevc,
9427 H265_DEBUG_BUFMGR_MORE,
9428 "%s pic index %d scatter_alloc %d page_start %d\n",
9429 "decoder_mmu_box_free_idx_tail",
9430 hevc->cur_pic->index,
9431 hevc->cur_pic->scatter_alloc,
9432 hevc->used_4k_num);
9433 decoder_mmu_box_free_idx_tail(
9434 hevc->mmu_box,
9435 hevc->cur_pic->index,
9436 hevc->used_4k_num);
9437 hevc->cur_pic->scatter_alloc
9438 = 2;
9439 }
9440 hevc->used_4k_num = -1;
9441 }
9442 }
9443
9444 pic->output_mark = 1;
9445 pic->recon_mark = 1;
9446 }
9447force_output:
9448 pic_display = output_pic(hevc, 1);
9449
9450 if (pic_display) {
9451 if ((pic_display->error_mark &&
9452 ((hevc->ignore_bufmgr_error &
9453 0x2) == 0))
9454 || (get_dbg_flag(hevc) &
9455 H265_DEBUG_DISPLAY_CUR_FRAME)
9456 || (get_dbg_flag(hevc) &
9457 H265_DEBUG_NO_DISPLAY)) {
9458 pic_display->output_ready = 0;
9459 if (get_dbg_flag(hevc) &
9460 H265_DEBUG_BUFMGR) {
9461 hevc_print(hevc, 0,
9462 "[BM] Display: POC %d, ",
9463 pic_display->POC);
9464 hevc_print_cont(hevc, 0,
9465 "decoding index %d ==> ",
9466 pic_display->
9467 decode_idx);
9468 hevc_print_cont(hevc, 0,
9469 "Debug or err,recycle it\n");
9470 }
9471 } else {
9472 if (pic_display->
9473 slice_type != 2) {
9474 pic_display->output_ready = 0;
9475 } else {
9476 prepare_display_buf
9477 (hevc,
9478 pic_display);
9479 hevc->first_pic_flag = 1;
9480 }
9481 }
9482 }
9483 }
9484
9485 vdec_schedule_work(&hevc->work);
9486 }
9487
9488 return IRQ_HANDLED;
9489#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9490 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9491 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9492 if (hevc->m_ins_flag) {
9493 unsigned char next_parser_type =
9494 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9495 read_decode_info(hevc);
9496
9497 if (vdec->slave &&
9498 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9499 /*cur is base, found enhance*/
9500 struct hevc_state_s *hevc_el =
9501 (struct hevc_state_s *)
9502 vdec->slave->private;
9503 hevc->switch_dvlayer_flag = 1;
9504 hevc->no_switch_dvlayer_count = 0;
9505 hevc_el->start_parser_type =
9506 next_parser_type;
9507 hevc_print(hevc, H265_DEBUG_DV,
9508 "switch (poc %d) to el\n",
9509 hevc->cur_pic ?
9510 hevc->cur_pic->POC :
9511 INVALID_POC);
9512 } else if (vdec->master &&
9513 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9514 /*cur is enhance, found base*/
9515 struct hevc_state_s *hevc_ba =
9516 (struct hevc_state_s *)
9517 vdec->master->private;
9518 hevc->switch_dvlayer_flag = 1;
9519 hevc->no_switch_dvlayer_count = 0;
9520 hevc_ba->start_parser_type =
9521 next_parser_type;
9522 hevc_print(hevc, H265_DEBUG_DV,
9523 "switch (poc %d) to bl\n",
9524 hevc->cur_pic ?
9525 hevc->cur_pic->POC :
9526 INVALID_POC);
9527 } else {
9528 hevc->switch_dvlayer_flag = 0;
9529 hevc->start_parser_type =
9530 next_parser_type;
9531 hevc->no_switch_dvlayer_count++;
9532 hevc_print(hevc, H265_DEBUG_DV,
9533 "%s: no_switch_dvlayer_count = %d\n",
9534 vdec->master ? "el" : "bl",
9535 hevc->no_switch_dvlayer_count);
9536 if (vdec->slave &&
9537 dolby_el_flush_th != 0 &&
9538 hevc->no_switch_dvlayer_count >
9539 dolby_el_flush_th) {
9540 struct hevc_state_s *hevc_el =
9541 (struct hevc_state_s *)
9542 vdec->slave->private;
9543 struct PIC_s *el_pic;
9544 check_pic_decoded_error(hevc_el,
9545 hevc_el->pic_decoded_lcu_idx);
9546 el_pic = get_pic_by_POC(hevc_el,
9547 hevc_el->curr_POC);
9548 hevc_el->curr_POC = INVALID_POC;
9549 hevc_el->m_pocRandomAccess = MAX_INT;
9550 flush_output(hevc_el, el_pic);
9551 hevc_el->decoded_poc = INVALID_POC; /*
9552 already call flush_output*/
9553 hevc_el->decoding_pic = NULL;
9554 hevc->no_switch_dvlayer_count = 0;
9555 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9556 hevc_print(hevc, 0,
9557 "no el anymore, flush_output el\n");
9558 }
9559 }
9560 hevc->decoded_poc = hevc->curr_POC;
9561 hevc->decoding_pic = NULL;
9562 hevc->dec_result = DEC_RESULT_DONE;
9563 amhevc_stop();
9564 reset_process_time(hevc);
9565 if (aux_data_is_avaible(hevc))
9566 dolby_get_meta(hevc);
9567 if(hevc->cur_pic->slice_type == 2 &&
9568 hevc->vf_pre_count == 0) {
9569 hevc_print(hevc, 0,
9570 "first slice_type %x no_switch_dvlayer_count %x\n",
9571 hevc->cur_pic->slice_type,
9572 hevc->no_switch_dvlayer_count);
9573 goto force_output;
9574 }
9575 vdec_schedule_work(&hevc->work);
9576 }
9577
9578 return IRQ_HANDLED;
9579#endif
9580 }
9581
9582#endif
9583
9584 if (dec_status == HEVC_SEI_DAT) {
9585 if (!hevc->m_ins_flag) {
9586 int payload_type =
9587 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9588 int payload_size =
9589 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9590 process_nal_sei(hevc,
9591 payload_type, payload_size);
9592 }
9593 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9594 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9595 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9596 int parse_type = HEVC_DISCARD_NAL;
9597
9598 hevc->error_watchdog_count = 0;
9599 hevc->error_skip_nal_wt_cnt = 0;
9600#ifdef MULTI_INSTANCE_SUPPORT
9601 if (hevc->m_ins_flag)
9602 reset_process_time(hevc);
9603#endif
9604 if (slice_parse_begin > 0 &&
9605 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9606 hevc_print(hevc, 0,
9607 "nal type %d, discard %d\n", naltype,
9608 slice_parse_begin);
9609 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9610 slice_parse_begin--;
9611 }
9612 if (naltype == NAL_UNIT_EOS) {
9613 struct PIC_s *pic;
9614
9615 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9616#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9617 if ((vdec->master || vdec->slave) &&
9618 aux_data_is_avaible(hevc)) {
9619 if (hevc->decoding_pic)
9620 dolby_get_meta(hevc);
9621 }
9622#endif
9623 check_pic_decoded_error(hevc,
9624 hevc->pic_decoded_lcu_idx);
9625 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9626 hevc->curr_POC = INVALID_POC;
9627 /* add to fix RAP_B_Bossen_1 */
9628 hevc->m_pocRandomAccess = MAX_INT;
9629 flush_output(hevc, pic);
9630 clear_poc_flag(hevc);
9631 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9632 /* Interrupt Amrisc to excute */
9633 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9634#ifdef MULTI_INSTANCE_SUPPORT
9635 if (hevc->m_ins_flag) {
9636 hevc->decoded_poc = INVALID_POC; /*
9637 already call flush_output*/
9638 hevc->decoding_pic = NULL;
9639 hevc->dec_result = DEC_RESULT_DONE;
9640 amhevc_stop();
9641
9642 vdec_schedule_work(&hevc->work);
9643 }
9644#endif
9645 return IRQ_HANDLED;
9646 }
9647
9648 if (
9649#ifdef MULTI_INSTANCE_SUPPORT
9650 (!hevc->m_ins_flag) &&
9651#endif
9652 hevc->error_skip_nal_count > 0) {
9653 hevc_print(hevc, 0,
9654 "nal type %d, discard %d\n", naltype,
9655 hevc->error_skip_nal_count);
9656 hevc->error_skip_nal_count--;
9657 if (hevc->error_skip_nal_count == 0) {
9658 hevc_recover(hevc);
9659 hevc->error_flag = 0;
9660 if ((error_handle_policy & 0x2) == 0) {
9661 hevc->have_vps = 1;
9662 hevc->have_sps = 1;
9663 hevc->have_pps = 1;
9664 }
9665 return IRQ_HANDLED;
9666 }
9667 } else if (naltype == NAL_UNIT_VPS) {
9668 parse_type = HEVC_NAL_UNIT_VPS;
9669 hevc->have_vps = 1;
9670#ifdef ERROR_HANDLE_DEBUG
9671 if (dbg_nal_skip_flag & 1)
9672 parse_type = HEVC_DISCARD_NAL;
9673#endif
9674 } else if (hevc->have_vps) {
9675 if (naltype == NAL_UNIT_SPS) {
9676 parse_type = HEVC_NAL_UNIT_SPS;
9677 hevc->have_sps = 1;
9678#ifdef ERROR_HANDLE_DEBUG
9679 if (dbg_nal_skip_flag & 2)
9680 parse_type = HEVC_DISCARD_NAL;
9681#endif
9682 } else if (naltype == NAL_UNIT_PPS) {
9683 parse_type = HEVC_NAL_UNIT_PPS;
9684 hevc->have_pps = 1;
9685#ifdef ERROR_HANDLE_DEBUG
9686 if (dbg_nal_skip_flag & 4)
9687 parse_type = HEVC_DISCARD_NAL;
9688#endif
9689 } else if (hevc->have_sps && hevc->have_pps) {
9690 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9691
9692 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9693 (naltype ==
9694 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9695 || (naltype ==
9696 NAL_UNIT_CODED_SLICE_CRA)
9697 || (naltype ==
9698 NAL_UNIT_CODED_SLICE_BLA)
9699 || (naltype ==
9700 NAL_UNIT_CODED_SLICE_BLANT)
9701 || (naltype ==
9702 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9703 ) {
9704 if (slice_parse_begin > 0) {
9705 hevc_print(hevc, 0,
9706 "discard %d, for debugging\n",
9707 slice_parse_begin);
9708 slice_parse_begin--;
9709 } else {
9710 parse_type = seg;
9711 }
9712 hevc->have_valid_start_slice = 1;
9713 } else if (naltype <=
9714 NAL_UNIT_CODED_SLICE_CRA
9715 && (hevc->have_valid_start_slice
9716 || (hevc->PB_skip_mode != 3))) {
9717 if (slice_parse_begin > 0) {
9718 hevc_print(hevc, 0,
9719 "discard %d, dd\n",
9720 slice_parse_begin);
9721 slice_parse_begin--;
9722 } else
9723 parse_type = seg;
9724
9725 }
9726 }
9727 }
9728 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9729 && hevc->have_valid_start_slice &&
9730 hevc->error_flag == 0) {
9731 if ((get_dbg_flag(hevc) &
9732 H265_DEBUG_MAN_SEARCH_NAL) == 0
9733 /* && (!hevc->m_ins_flag)*/) {
9734 /* auot parser NAL; do not check
9735 *vps/sps/pps/idr
9736 */
9737 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9738 }
9739
9740 if ((get_dbg_flag(hevc) &
9741 H265_DEBUG_NO_EOS_SEARCH_DONE)
9742#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9743 || vdec->master
9744 || vdec->slave
9745#endif
9746 ) {
9747 WRITE_VREG(NAL_SEARCH_CTL,
9748 READ_VREG(NAL_SEARCH_CTL) |
9749 0x10000);
9750 }
9751 WRITE_VREG(NAL_SEARCH_CTL,
9752 READ_VREG(NAL_SEARCH_CTL)
9753 | ((parser_sei_enable & 0x7) << 17));
9754#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9755 WRITE_VREG(NAL_SEARCH_CTL,
9756 READ_VREG(NAL_SEARCH_CTL) |
9757 ((parser_dolby_vision_enable & 0x1) << 20));
9758#endif
9759 config_decode_mode(hevc);
9760 }
9761
9762 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9763 hevc_print(hevc, 0,
9764 "naltype = %d parse_type %d\n %d %d %d %d\n",
9765 naltype, parse_type, hevc->have_vps,
9766 hevc->have_sps, hevc->have_pps,
9767 hevc->have_valid_start_slice);
9768 }
9769
9770 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9771 /* Interrupt Amrisc to excute */
9772 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9773#ifdef MULTI_INSTANCE_SUPPORT
9774 if (hevc->m_ins_flag)
9775 start_process_time(hevc);
9776#endif
9777 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9778#ifdef MULTI_INSTANCE_SUPPORT
9779 if (hevc->m_ins_flag) {
9780 reset_process_time(hevc);
9781 read_decode_info(hevc);
9782
9783 }
9784#endif
9785 if (hevc->start_decoding_time > 0) {
9786 u32 process_time = 1000*
9787 (jiffies - hevc->start_decoding_time)/HZ;
9788 if (process_time > max_decoding_time)
9789 max_decoding_time = process_time;
9790 }
9791
9792 hevc->error_watchdog_count = 0;
9793 if (hevc->pic_list_init_flag == 2) {
9794 hevc->pic_list_init_flag = 3;
9795 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
9796 } else if (hevc->wait_buf == 0) {
9797 u32 vui_time_scale;
9798 u32 vui_num_units_in_tick;
9799 unsigned char reconfig_flag = 0;
9800
9801 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
9802 get_rpm_param(&hevc->param);
9803 else {
9804
9805 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
9806 int ii;
9807
9808 for (ii = 0; ii < 4; ii++) {
9809 hevc->param.l.data[i + ii] =
9810 hevc->rpm_ptr[i + 3
9811 - ii];
9812 }
9813 }
9814#ifdef SEND_LMEM_WITH_RPM
9815 check_head_error(hevc);
9816#endif
9817 }
9818 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
9819 hevc_print(hevc, 0,
9820 "rpm_param: (%d)\n", hevc->slice_idx);
9821 hevc->slice_idx++;
9822 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
9823 hevc_print_cont(hevc, 0,
9824 "%04x ", hevc->param.l.data[i]);
9825 if (((i + 1) & 0xf) == 0)
9826 hevc_print_cont(hevc, 0, "\n");
9827 }
9828
9829 hevc_print(hevc, 0,
9830 "vui_timing_info: %x, %x, %x, %x\n",
9831 hevc->param.p.vui_num_units_in_tick_hi,
9832 hevc->param.p.vui_num_units_in_tick_lo,
9833 hevc->param.p.vui_time_scale_hi,
9834 hevc->param.p.vui_time_scale_lo);
9835 }
9836
9837 if (hevc->is_used_v4l) {
9838 struct aml_vcodec_ctx *ctx =
9839 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9840
9841 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
9842 struct aml_vdec_pic_infos info;
9843
9844 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
9845 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
9846 info.visible_width = hevc->frame_width;
9847 info.visible_height = hevc->frame_height;
9848 info.coded_width = ALIGN(hevc->frame_width, 32);
9849 info.coded_height = ALIGN(hevc->frame_height, 32);
9850 info.dpb_size = get_work_pic_num(hevc);
9851 hevc->v4l_params_parsed = true;
9852 /*notice the v4l2 codec.*/
9853 vdec_v4l_set_pic_infos(ctx, &info);
9854 }
9855 }
9856
9857 if (
9858#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9859 vdec->master == NULL &&
9860 vdec->slave == NULL &&
9861#endif
9862 aux_data_is_avaible(hevc)
9863 ) {
9864
9865 if (get_dbg_flag(hevc) &
9866 H265_DEBUG_BUFMGR_MORE)
9867 dump_aux_buf(hevc);
9868 }
9869
9870 vui_time_scale =
9871 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
9872 hevc->param.p.vui_time_scale_lo;
9873 vui_num_units_in_tick =
9874 (u32)(hevc->param.
9875 p.vui_num_units_in_tick_hi << 16) |
9876 hevc->param.
9877 p.vui_num_units_in_tick_lo;
9878 if (hevc->bit_depth_luma !=
9879 ((hevc->param.p.bit_depth & 0xf) + 8)) {
9880 reconfig_flag = 1;
9881 hevc_print(hevc, 0, "Bit depth luma = %d\n",
9882 (hevc->param.p.bit_depth & 0xf) + 8);
9883 }
9884 if (hevc->bit_depth_chroma !=
9885 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
9886 reconfig_flag = 1;
9887 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
9888 ((hevc->param.p.bit_depth >> 4) &
9889 0xf) + 8);
9890 }
9891 hevc->bit_depth_luma =
9892 (hevc->param.p.bit_depth & 0xf) + 8;
9893 hevc->bit_depth_chroma =
9894 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
9895 bit_depth_luma = hevc->bit_depth_luma;
9896 bit_depth_chroma = hevc->bit_depth_chroma;
9897#ifdef SUPPORT_10BIT
9898 if (hevc->bit_depth_luma == 8 &&
9899 hevc->bit_depth_chroma == 8 &&
9900 enable_mem_saving)
9901 hevc->mem_saving_mode = 1;
9902 else
9903 hevc->mem_saving_mode = 0;
9904#endif
9905 if (reconfig_flag &&
9906 (get_double_write_mode(hevc) & 0x10) == 0)
9907 init_decode_head_hw(hevc);
9908
9909 if ((vui_time_scale != 0)
9910 && (vui_num_units_in_tick != 0)) {
9911 hevc->frame_dur =
9912 div_u64(96000ULL *
9913 vui_num_units_in_tick,
9914 vui_time_scale);
9915 if (hevc->get_frame_dur != true)
9916 vdec_schedule_work(
9917 &hevc->notify_work);
9918
9919 hevc->get_frame_dur = true;
9920#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9921 gvs->frame_dur = hevc->frame_dur;
9922#endif
9923 }
9924
9925 if (hevc->video_signal_type !=
9926 ((hevc->param.p.video_signal_type << 16)
9927 | hevc->param.p.color_description)) {
9928 u32 v = hevc->param.p.video_signal_type;
9929 u32 c = hevc->param.p.color_description;
9930#if 0
9931 if (v & 0x2000) {
9932 hevc_print(hevc, 0,
9933 "video_signal_type present:\n");
9934 hevc_print(hevc, 0, " %s %s\n",
9935 video_format_names[(v >> 10) & 7],
9936 ((v >> 9) & 1) ?
9937 "full_range" : "limited");
9938 if (v & 0x100) {
9939 hevc_print(hevc, 0,
9940 " color_description present:\n");
9941 hevc_print(hevc, 0,
9942 " color_primarie = %s\n",
9943 color_primaries_names
9944 [v & 0xff]);
9945 hevc_print(hevc, 0,
9946 " transfer_characteristic = %s\n",
9947 transfer_characteristics_names
9948 [(c >> 8) & 0xff]);
9949 hevc_print(hevc, 0,
9950 " matrix_coefficient = %s\n",
9951 matrix_coeffs_names[c & 0xff]);
9952 }
9953 }
9954#endif
9955 hevc->video_signal_type = (v << 16) | c;
9956 video_signal_type = hevc->video_signal_type;
9957 }
9958
9959 if (use_cma &&
9960 (hevc->param.p.slice_segment_address == 0)
9961 && (hevc->pic_list_init_flag == 0)) {
9962 int log = hevc->param.p.log2_min_coding_block_size_minus3;
9963 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
9964
9965 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
9966 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
9967 hevc->lcu_size = 1 << (log + 3 + log_s);
9968 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
9969 if (hevc->pic_w == 0 || hevc->pic_h == 0
9970 || hevc->lcu_size == 0
9971 || is_oversize(hevc->pic_w, hevc->pic_h)
9972 || (!hevc->skip_first_nal &&
9973 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
9974 /* skip search next start code */
9975 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
9976 & (~0x2));
9977 if ( !hevc->skip_first_nal &&
9978 (hevc->pic_h == 96) && (hevc->pic_w == 160))
9979 hevc->skip_first_nal = 1;
9980 hevc->skip_flag = 1;
9981 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9982 /* Interrupt Amrisc to excute */
9983 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9984#ifdef MULTI_INSTANCE_SUPPORT
9985 if (hevc->m_ins_flag)
9986 start_process_time(hevc);
9987#endif
9988 } else {
9989 hevc->sps_num_reorder_pics_0 =
9990 hevc->param.p.sps_num_reorder_pics_0;
9991 hevc->pic_list_init_flag = 1;
9992#ifdef MULTI_INSTANCE_SUPPORT
9993 if (hevc->m_ins_flag) {
9994 vdec_schedule_work(&hevc->work);
9995 } else
9996#endif
9997 up(&h265_sema);
9998 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
9999 }
10000 return IRQ_HANDLED;
10001 }
10002
10003}
10004 ret =
10005 hevc_slice_segment_header_process(hevc,
10006 &hevc->param, decode_pic_begin);
10007 if (ret < 0) {
10008#ifdef MULTI_INSTANCE_SUPPORT
10009 if (hevc->m_ins_flag) {
10010 hevc->wait_buf = 0;
10011 hevc->dec_result = DEC_RESULT_AGAIN;
10012 amhevc_stop();
10013 restore_decode_state(hevc);
10014 reset_process_time(hevc);
10015 vdec_schedule_work(&hevc->work);
10016 return IRQ_HANDLED;
10017 }
10018#else
10019 ;
10020#endif
10021 } else if (ret == 0) {
10022 if ((hevc->new_pic) && (hevc->cur_pic)) {
10023 hevc->cur_pic->stream_offset =
10024 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10025 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10026 "read stream_offset = 0x%x\n",
10027 hevc->cur_pic->stream_offset);
10028 hevc->cur_pic->aspect_ratio_idc =
10029 hevc->param.p.aspect_ratio_idc;
10030 hevc->cur_pic->sar_width =
10031 hevc->param.p.sar_width;
10032 hevc->cur_pic->sar_height =
10033 hevc->param.p.sar_height;
10034 }
10035
10036 WRITE_VREG(HEVC_DEC_STATUS_REG,
10037 HEVC_CODED_SLICE_SEGMENT_DAT);
10038 /* Interrupt Amrisc to excute */
10039 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10040
10041 hevc->start_decoding_time = jiffies;
10042#ifdef MULTI_INSTANCE_SUPPORT
10043 if (hevc->m_ins_flag)
10044 start_process_time(hevc);
10045#endif
10046#if 1
10047 /*to do..., copy aux data to hevc->cur_pic*/
10048#endif
10049#ifdef MULTI_INSTANCE_SUPPORT
10050 } else if (hevc->m_ins_flag) {
10051 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10052 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10053 __func__, ret);
10054 hevc->decoded_poc = INVALID_POC;
10055 hevc->decoding_pic = NULL;
10056 hevc->dec_result = DEC_RESULT_DONE;
10057 amhevc_stop();
10058 reset_process_time(hevc);
10059 vdec_schedule_work(&hevc->work);
10060#endif
10061 } else {
10062 /* skip, search next start code */
10063#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10064 gvs->drop_frame_count++;
10065#endif
10066 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10067 hevc->skip_flag = 1;
10068 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10069 /* Interrupt Amrisc to excute */
10070 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10071 }
10072
10073 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10074 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10075#ifdef MULTI_INSTANCE_SUPPORT
10076 if (!hevc->m_ins_flag)
10077 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10078 H265_DEBUG_DIS_SYS_ERROR_PROC);
10079#endif
10080 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10081 }
10082 return IRQ_HANDLED;
10083}
10084
10085static void wait_hevc_search_done(struct hevc_state_s *hevc)
10086{
10087 int count = 0;
10088 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10089 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10090 msleep(20);
10091 count++;
10092 if (count > 100) {
10093 hevc_print(hevc, 0, "%s timeout\n", __func__);
10094 break;
10095 }
10096 }
10097}
10098static irqreturn_t vh265_isr(int irq, void *data)
10099{
10100 int i, temp;
10101 unsigned int dec_status;
10102 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10103 u32 debug_tag;
10104 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10105
10106 if (hevc->init_flag == 0)
10107 return IRQ_HANDLED;
10108 hevc->dec_status = dec_status;
10109 if (is_log_enable(hevc))
10110 add_log(hevc,
10111 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10112 dec_status, READ_HREG(HEVC_DECODE_INFO),
10113 READ_VREG(HEVC_MPRED_CURR_LCU),
10114 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10115 READ_VREG(HEVC_SHIFT_STATUS));
10116
10117 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10118 hevc_print(hevc, 0,
10119 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10120 dec_status, READ_HREG(HEVC_DECODE_INFO),
10121 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10122 READ_VREG(HEVC_SHIFT_STATUS));
10123
10124 debug_tag = READ_HREG(DEBUG_REG1);
10125 if (debug_tag & 0x10000) {
10126 hevc_print(hevc, 0,
10127 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10128
10129 if (hevc->mmu_enable)
10130 temp = 0x500;
10131 else
10132 temp = 0x400;
10133 for (i = 0; i < temp; i += 4) {
10134 int ii;
10135 if ((i & 0xf) == 0)
10136 hevc_print_cont(hevc, 0, "%03x: ", i);
10137 for (ii = 0; ii < 4; ii++) {
10138 hevc_print_cont(hevc, 0, "%04x ",
10139 hevc->lmem_ptr[i + 3 - ii]);
10140 }
10141 if (((i + ii) & 0xf) == 0)
10142 hevc_print_cont(hevc, 0, "\n");
10143 }
10144
10145 if (((udebug_pause_pos & 0xffff)
10146 == (debug_tag & 0xffff)) &&
10147 (udebug_pause_decode_idx == 0 ||
10148 udebug_pause_decode_idx == hevc->decode_idx) &&
10149 (udebug_pause_val == 0 ||
10150 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10151 udebug_pause_pos &= 0xffff;
10152 hevc->ucode_pause_pos = udebug_pause_pos;
10153 }
10154 else if (debug_tag & 0x20000)
10155 hevc->ucode_pause_pos = 0xffffffff;
10156 if (hevc->ucode_pause_pos)
10157 reset_process_time(hevc);
10158 else
10159 WRITE_HREG(DEBUG_REG1, 0);
10160 } else if (debug_tag != 0) {
10161 hevc_print(hevc, 0,
10162 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10163 READ_HREG(DEBUG_REG2),
10164 READ_VREG(HEVC_STREAM_LEVEL),
10165 READ_VREG(HEVC_STREAM_WR_PTR),
10166 READ_VREG(HEVC_STREAM_RD_PTR));
10167 if (((udebug_pause_pos & 0xffff)
10168 == (debug_tag & 0xffff)) &&
10169 (udebug_pause_decode_idx == 0 ||
10170 udebug_pause_decode_idx == hevc->decode_idx) &&
10171 (udebug_pause_val == 0 ||
10172 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10173 udebug_pause_pos &= 0xffff;
10174 hevc->ucode_pause_pos = udebug_pause_pos;
10175 }
10176 if (hevc->ucode_pause_pos)
10177 reset_process_time(hevc);
10178 else
10179 WRITE_HREG(DEBUG_REG1, 0);
10180 return IRQ_HANDLED;
10181 }
10182
10183
10184 if (hevc->pic_list_init_flag == 1)
10185 return IRQ_HANDLED;
10186
10187 if (!hevc->m_ins_flag) {
10188 if (dec_status == HEVC_OVER_DECODE) {
10189 hevc->over_decode = 1;
10190 hevc_print(hevc, 0,
10191 "isr: over decode\n"),
10192 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10193 return IRQ_HANDLED;
10194 }
10195 }
10196
10197 return IRQ_WAKE_THREAD;
10198
10199}
10200
10201static void vh265_set_clk(struct work_struct *work)
10202{
10203 struct hevc_state_s *hevc = container_of(work,
10204 struct hevc_state_s, set_clk_work);
10205
10206 int fps = 96000 / hevc->frame_dur;
10207
10208 if (hevc_source_changed(VFORMAT_HEVC,
10209 hevc->frame_width, hevc->frame_height, fps) > 0)
10210 hevc->saved_resolution = hevc->frame_width *
10211 hevc->frame_height * fps;
10212}
10213
10214static void vh265_check_timer_func(unsigned long arg)
10215{
10216 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10217 struct timer_list *timer = &hevc->timer;
10218 unsigned char empty_flag;
10219 unsigned int buf_level;
10220
10221 enum receviver_start_e state = RECEIVER_INACTIVE;
10222
10223 if (hevc->init_flag == 0) {
10224 if (hevc->stat & STAT_TIMER_ARM) {
10225 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10226 }
10227 return;
10228 }
10229#ifdef MULTI_INSTANCE_SUPPORT
10230 if (hevc->m_ins_flag &&
10231 (get_dbg_flag(hevc) &
10232 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10233 hw_to_vdec(hevc)->next_status ==
10234 VDEC_STATUS_DISCONNECTED) {
10235 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10236 vdec_schedule_work(&hevc->work);
10237 hevc_print(hevc,
10238 0, "vdec requested to be disconnected\n");
10239 return;
10240 }
10241
10242 if (hevc->m_ins_flag) {
10243 if ((input_frame_based(hw_to_vdec(hevc)) ||
10244 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10245 ((get_dbg_flag(hevc) &
10246 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10247 (decode_timeout_val > 0) &&
10248 (hevc->start_process_time > 0) &&
10249 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10250 > decode_timeout_val)
10251 ) {
10252 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10253 int current_lcu_idx =
10254 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10255 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10256 if (hevc->last_lcu_idx == current_lcu_idx) {
10257 if (hevc->decode_timeout_count > 0)
10258 hevc->decode_timeout_count--;
10259 if (hevc->decode_timeout_count == 0)
10260 timeout_process(hevc);
10261 } else
10262 restart_process_time(hevc);
10263 hevc->last_lcu_idx = current_lcu_idx;
10264 } else {
10265 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10266 timeout_process(hevc);
10267 }
10268 }
10269 } else {
10270#endif
10271 if (hevc->m_ins_flag == 0 &&
10272 vf_get_receiver(hevc->provider_name)) {
10273 state =
10274 vf_notify_receiver(hevc->provider_name,
10275 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10276 NULL);
10277 if ((state == RECEIVER_STATE_NULL)
10278 || (state == RECEIVER_STATE_NONE))
10279 state = RECEIVER_INACTIVE;
10280 } else
10281 state = RECEIVER_INACTIVE;
10282
10283 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10284 /* error watchdog */
10285 if (hevc->m_ins_flag == 0 &&
10286 (empty_flag == 0)
10287 && (hevc->pic_list_init_flag == 0
10288 || hevc->pic_list_init_flag
10289 == 3)) {
10290 /* decoder has input */
10291 if ((get_dbg_flag(hevc) &
10292 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10293
10294 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10295 /* receiver has no buffer to recycle */
10296 if ((state == RECEIVER_INACTIVE) &&
10297 (kfifo_is_empty(&hevc->display_q) &&
10298 buf_level > 0x200)
10299 ) {
10300 if (hevc->error_flag == 0) {
10301 hevc->error_watchdog_count++;
10302 if (hevc->error_watchdog_count ==
10303 error_handle_threshold) {
10304 hevc_print(hevc, 0,
10305 "H265 dec err local reset.\n");
10306 hevc->error_flag = 1;
10307 hevc->error_watchdog_count = 0;
10308 hevc->error_skip_nal_wt_cnt = 0;
10309 hevc->
10310 error_system_watchdog_count++;
10311 WRITE_VREG
10312 (HEVC_ASSIST_MBOX0_IRQ_REG,
10313 0x1);
10314 }
10315 } else if (hevc->error_flag == 2) {
10316 int th =
10317 error_handle_nal_skip_threshold;
10318 hevc->error_skip_nal_wt_cnt++;
10319 if (hevc->error_skip_nal_wt_cnt
10320 == th) {
10321 hevc->error_flag = 3;
10322 hevc->error_watchdog_count = 0;
10323 hevc->
10324 error_skip_nal_wt_cnt = 0;
10325 WRITE_VREG
10326 (HEVC_ASSIST_MBOX0_IRQ_REG,
10327 0x1);
10328 }
10329 }
10330 }
10331 }
10332
10333 if ((get_dbg_flag(hevc)
10334 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10335 /* receiver has no buffer to recycle */
10336 if ((state == RECEIVER_INACTIVE) &&
10337 (kfifo_is_empty(&hevc->display_q))
10338 ) { /* no buffer to recycle */
10339 if ((get_dbg_flag(hevc) &
10340 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10341 0)
10342 hevc->error_system_watchdog_count++;
10343 if (hevc->error_system_watchdog_count ==
10344 error_handle_system_threshold) {
10345 /* and it lasts for a while */
10346 hevc_print(hevc, 0,
10347 "H265 dec fatal error watchdog.\n");
10348 hevc->
10349 error_system_watchdog_count = 0;
10350 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10351 }
10352 }
10353 } else {
10354 hevc->error_watchdog_count = 0;
10355 hevc->error_system_watchdog_count = 0;
10356 }
10357#ifdef MULTI_INSTANCE_SUPPORT
10358 }
10359#endif
10360 if ((hevc->ucode_pause_pos != 0) &&
10361 (hevc->ucode_pause_pos != 0xffffffff) &&
10362 udebug_pause_pos != hevc->ucode_pause_pos) {
10363 hevc->ucode_pause_pos = 0;
10364 WRITE_HREG(DEBUG_REG1, 0);
10365 }
10366
10367 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10368 dump_pic_list(hevc);
10369 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10370 }
10371 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10372 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10373 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10374 }
10375#ifdef TEST_NO_BUF
10376 if (hevc->wait_buf)
10377 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10378#endif
10379 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10380 hevc->error_skip_nal_count = error_skip_nal_count;
10381 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10382
10383 debug &= ~H265_DEBUG_HW_RESET;
10384 }
10385
10386#ifdef ERROR_HANDLE_DEBUG
10387 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10388 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10389 dbg_nal_skip_count &= ~0x10000;
10390 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10391 }
10392#endif
10393
10394 if (radr != 0) {
10395 if (rval != 0) {
10396 WRITE_VREG(radr, rval);
10397 hevc_print(hevc, 0,
10398 "WRITE_VREG(%x,%x)\n", radr, rval);
10399 } else
10400 hevc_print(hevc, 0,
10401 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10402 rval = 0;
10403 radr = 0;
10404 }
10405 if (dbg_cmd != 0) {
10406 if (dbg_cmd == 1) {
10407 u32 disp_laddr;
10408
10409 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10410 get_double_write_mode(hevc) == 0) {
10411 disp_laddr =
10412 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10413 } else {
10414 struct canvas_s cur_canvas;
10415
10416 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10417 & 0xff), &cur_canvas);
10418 disp_laddr = cur_canvas.addr;
10419 }
10420 hevc_print(hevc, 0,
10421 "current displayed buffer address %x\r\n",
10422 disp_laddr);
10423 }
10424 dbg_cmd = 0;
10425 }
10426 /*don't changed at start.*/
10427 if (hevc->m_ins_flag == 0 &&
10428 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10429 hevc->frame_dur > 0 && hevc->saved_resolution !=
10430 hevc->frame_width * hevc->frame_height *
10431 (96000 / hevc->frame_dur))
10432 vdec_schedule_work(&hevc->set_clk_work);
10433
10434 mod_timer(timer, jiffies + PUT_INTERVAL);
10435}
10436
10437static int h265_task_handle(void *data)
10438{
10439 int ret = 0;
10440 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10441
10442 set_user_nice(current, -10);
10443 while (1) {
10444 if (use_cma == 0) {
10445 hevc_print(hevc, 0,
10446 "ERROR: use_cma can not be changed dynamically\n");
10447 }
10448 ret = down_interruptible(&h265_sema);
10449 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10450 init_pic_list(hevc);
10451 init_pic_list_hw(hevc);
10452 init_buf_spec(hevc);
10453 hevc->pic_list_init_flag = 2;
10454 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10455
10456 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10457
10458 }
10459
10460 if (hevc->uninit_list) {
10461 /*USE_BUF_BLOCK*/
10462 uninit_pic_list(hevc);
10463 hevc_print(hevc, 0, "uninit list\n");
10464 hevc->uninit_list = 0;
10465#ifdef USE_UNINIT_SEMA
10466 if (use_cma) {
10467 up(&hevc->h265_uninit_done_sema);
10468 while (!kthread_should_stop())
10469 msleep(1);
10470 break;
10471 }
10472#endif
10473 }
10474 }
10475
10476 return 0;
10477}
10478
10479void vh265_free_cmabuf(void)
10480{
10481 struct hevc_state_s *hevc = gHevc;
10482
10483 mutex_lock(&vh265_mutex);
10484
10485 if (hevc->init_flag) {
10486 mutex_unlock(&vh265_mutex);
10487 return;
10488 }
10489
10490 mutex_unlock(&vh265_mutex);
10491}
10492
10493#ifdef MULTI_INSTANCE_SUPPORT
10494int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10495#else
10496int vh265_dec_status(struct vdec_info *vstatus)
10497#endif
10498{
10499#ifdef MULTI_INSTANCE_SUPPORT
10500 struct hevc_state_s *hevc =
10501 (struct hevc_state_s *)vdec->private;
10502#else
10503 struct hevc_state_s *hevc = gHevc;
10504#endif
10505 if (!hevc)
10506 return -1;
10507
10508 vstatus->frame_width = hevc->frame_width;
10509 vstatus->frame_height = hevc->frame_height;
10510 if (hevc->frame_dur != 0)
10511 vstatus->frame_rate = 96000 / hevc->frame_dur;
10512 else
10513 vstatus->frame_rate = -1;
10514 vstatus->error_count = 0;
10515 vstatus->status = hevc->stat | hevc->fatal_error;
10516#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10517 vstatus->bit_rate = gvs->bit_rate;
10518 vstatus->frame_dur = hevc->frame_dur;
10519 if (gvs) {
10520 vstatus->bit_rate = gvs->bit_rate;
10521 vstatus->frame_data = gvs->frame_data;
10522 vstatus->total_data = gvs->total_data;
10523 vstatus->frame_count = gvs->frame_count;
10524 vstatus->error_frame_count = gvs->error_frame_count;
10525 vstatus->drop_frame_count = gvs->drop_frame_count;
10526 vstatus->total_data = gvs->total_data;
10527 vstatus->samp_cnt = gvs->samp_cnt;
10528 vstatus->offset = gvs->offset;
10529 }
10530 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10531 "%s", DRIVER_NAME);
10532#endif
10533 vstatus->ratio_control = hevc->ratio_control;
10534 return 0;
10535}
10536
10537int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10538{
10539 is_reset = isreset;
10540 return 0;
10541}
10542
10543static int vh265_vdec_info_init(void)
10544{
10545 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10546 if (NULL == gvs) {
10547 pr_info("the struct of vdec status malloc failed.\n");
10548 return -ENOMEM;
10549 }
10550 return 0;
10551}
10552
10553#if 0
10554static void H265_DECODE_INIT(void)
10555{
10556 /* enable hevc clocks */
10557 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10558 /* *************************************************************** */
10559 /* Power ON HEVC */
10560 /* *************************************************************** */
10561 /* Powerup HEVC */
10562 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10563 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10564 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10565 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10566 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10567 /* remove isolations */
10568 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10569 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10570
10571}
10572#endif
10573
10574static void config_decode_mode(struct hevc_state_s *hevc)
10575{
10576#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10577 struct vdec_s *vdec = hw_to_vdec(hevc);
10578#endif
10579 unsigned decode_mode;
10580 if (!hevc->m_ins_flag)
10581 decode_mode = DECODE_MODE_SINGLE;
10582 else if (vdec_frame_based(hw_to_vdec(hevc)))
10583 decode_mode =
10584 DECODE_MODE_MULTI_FRAMEBASE;
10585#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10586 else if (vdec->slave) {
10587 if (force_bypass_dvenl & 0x80000000)
10588 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10589 else
10590 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10591 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10592 hevc->bypass_dvenl = 0;
10593 hevc_print(hevc, 0,
10594 "NOT support bypass_dvenl when meta_with_el\n");
10595 }
10596 if (hevc->bypass_dvenl)
10597 decode_mode =
10598 (hevc->start_parser_type << 8)
10599 | DECODE_MODE_MULTI_STREAMBASE;
10600 else
10601 decode_mode =
10602 (hevc->start_parser_type << 8)
10603 | DECODE_MODE_MULTI_DVBAL;
10604 } else if (vdec->master)
10605 decode_mode =
10606 (hevc->start_parser_type << 8)
10607 | DECODE_MODE_MULTI_DVENL;
10608#endif
10609 else
10610 decode_mode =
10611 DECODE_MODE_MULTI_STREAMBASE;
10612
10613 if (hevc->m_ins_flag)
10614 decode_mode |=
10615 (hevc->start_decoding_flag << 16);
10616 /* set MBX0 interrupt flag */
10617 decode_mode |= (0x80 << 24);
10618 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10619 WRITE_VREG(HEVC_DECODE_MODE2,
10620 hevc->rps_set_id);
10621}
10622
10623static void vh265_prot_init(struct hevc_state_s *hevc)
10624{
10625#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10626 struct vdec_s *vdec = hw_to_vdec(hevc);
10627#endif
10628 /* H265_DECODE_INIT(); */
10629
10630 hevc_config_work_space_hw(hevc);
10631
10632 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10633
10634 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10635
10636 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10637
10638 /* clear mailbox interrupt */
10639 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10640
10641 /* enable mailbox interrupt */
10642 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10643
10644 /* disable PSCALE for hardware sharing */
10645 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10646
10647 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10648
10649 if ((get_dbg_flag(hevc) &
10650 (H265_DEBUG_MAN_SKIP_NAL |
10651 H265_DEBUG_MAN_SEARCH_NAL))
10652 /*||hevc->m_ins_flag*/
10653 ) {
10654 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10655 } else {
10656 /* check vps/sps/pps/i-slice in ucode */
10657 unsigned ctl_val = 0x8;
10658 if (hevc->PB_skip_mode == 0)
10659 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10660 else if (hevc->PB_skip_mode == 3)
10661 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10662 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10663 }
10664 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10665#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10666 || vdec->master
10667 || vdec->slave
10668#endif
10669 )
10670 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10671
10672 WRITE_VREG(NAL_SEARCH_CTL,
10673 READ_VREG(NAL_SEARCH_CTL)
10674 | ((parser_sei_enable & 0x7) << 17));
10675#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10676 WRITE_VREG(NAL_SEARCH_CTL,
10677 READ_VREG(NAL_SEARCH_CTL) |
10678 ((parser_dolby_vision_enable & 0x1) << 20));
10679#endif
10680 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10681
10682 config_decode_mode(hevc);
10683 config_aux_buf(hevc);
10684#ifdef SWAP_HEVC_UCODE
10685 if (!tee_enabled() && hevc->is_swap &&
10686 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10687 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10688 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10689 }
10690#endif
10691#ifdef DETREFILL_ENABLE
10692 if (hevc->is_swap &&
10693 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10694 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10695 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10696 }
10697#endif
10698}
10699
10700static int vh265_local_init(struct hevc_state_s *hevc)
10701{
10702 int i;
10703 int ret = -1;
10704
10705#ifdef DEBUG_PTS
10706 hevc->pts_missed = 0;
10707 hevc->pts_hit = 0;
10708#endif
10709
10710 hevc->saved_resolution = 0;
10711 hevc->get_frame_dur = false;
10712 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10713 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10714 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10715 pr_info("over size : %u x %u.\n",
10716 hevc->frame_width, hevc->frame_height);
10717 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10718 return ret;
10719 }
10720
10721 if (hevc->max_pic_w && hevc->max_pic_h) {
10722 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10723 ((hevc->max_pic_w * hevc->max_pic_h) >
10724 1920 * 1088) ? true : false;
10725 } else {
10726 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10727 ((hevc->frame_width * hevc->frame_height) >
10728 1920 * 1088) ? true : false;
10729 }
10730
10731 hevc->frame_dur =
10732 (hevc->vh265_amstream_dec_info.rate ==
10733 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10734#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10735 gvs->frame_dur = hevc->frame_dur;
10736#endif
10737 if (hevc->frame_width && hevc->frame_height)
10738 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10739
10740 if (i_only_flag)
10741 hevc->i_only = i_only_flag & 0xff;
10742 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10743 & 0x08)
10744 hevc->i_only = 0x7;
10745 else
10746 hevc->i_only = 0x0;
10747 hevc->error_watchdog_count = 0;
10748 hevc->sei_present_flag = 0;
10749 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10750 & 0x40) >> 6;
10751 hevc_print(hevc, 0,
10752 "h265:pts_unstable=%d\n", pts_unstable);
10753/*
10754 *TODO:FOR VERSION
10755 */
10756 hevc_print(hevc, 0,
10757 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10758 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10759
10760 if (hevc->frame_dur == 0)
10761 hevc->frame_dur = 96000 / 24;
10762
10763 INIT_KFIFO(hevc->display_q);
10764 INIT_KFIFO(hevc->newframe_q);
10765
10766
10767 for (i = 0; i < VF_POOL_SIZE; i++) {
10768 const struct vframe_s *vf = &hevc->vfpool[i];
10769
10770 hevc->vfpool[i].index = -1;
10771 kfifo_put(&hevc->newframe_q, vf);
10772 }
10773
10774
10775 ret = hevc_local_init(hevc);
10776
10777 return ret;
10778}
10779#ifdef MULTI_INSTANCE_SUPPORT
10780static s32 vh265_init(struct vdec_s *vdec)
10781{
10782 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10783#else
10784static s32 vh265_init(struct hevc_state_s *hevc)
10785{
10786
10787#endif
10788 int ret, size = -1;
10789 int fw_size = 0x1000 * 16;
10790 struct firmware_s *fw = NULL;
10791
10792 init_timer(&hevc->timer);
10793
10794 hevc->stat |= STAT_TIMER_INIT;
10795
10796 if (hevc->m_ins_flag) {
10797#ifdef USE_UNINIT_SEMA
10798 sema_init(&hevc->h265_uninit_done_sema, 0);
10799#endif
10800 INIT_WORK(&hevc->work, vh265_work);
10801 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
10802 }
10803
10804 if (vh265_local_init(hevc) < 0)
10805 return -EBUSY;
10806
10807 mutex_init(&hevc->chunks_mutex);
10808 INIT_WORK(&hevc->notify_work, vh265_notify_work);
10809 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
10810
10811 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
10812 if (IS_ERR_OR_NULL(fw))
10813 return -ENOMEM;
10814
10815 if (hevc->mmu_enable)
10816 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10817 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
10818 else {
10819 if (!hevc->is_4k) {
10820 /* if an older version of the fw was loaded, */
10821 /* needs try to load noswap fw because the */
10822 /* old fw package dose not contain the swap fw.*/
10823 size = get_firmware_data(
10824 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
10825 if (size < 0)
10826 size = get_firmware_data(
10827 VIDEO_DEC_HEVC_MMU, fw->data);
10828 else if (size)
10829 hevc->is_swap = true;
10830 } else
10831 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
10832 fw->data);
10833 }
10834 else
10835 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
10836
10837 if (size < 0) {
10838 pr_err("get firmware fail.\n");
10839 vfree(fw);
10840 return -1;
10841 }
10842
10843 fw->len = size;
10844
10845#ifdef SWAP_HEVC_UCODE
10846 if (!tee_enabled() && hevc->is_swap &&
10847 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10848 if (hevc->mmu_enable) {
10849 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
10850 hevc->mc_cpu_addr =
10851 dma_alloc_coherent(amports_get_dma_device(),
10852 hevc->swap_size,
10853 &hevc->mc_dma_handle, GFP_KERNEL);
10854 if (!hevc->mc_cpu_addr) {
10855 amhevc_disable();
10856 pr_info("vh265 mmu swap ucode loaded fail.\n");
10857 return -ENOMEM;
10858 }
10859
10860 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
10861 hevc->swap_size);
10862
10863 hevc_print(hevc, 0,
10864 "vh265 mmu ucode swap loaded %x\n",
10865 hevc->mc_dma_handle);
10866 }
10867 }
10868#endif
10869
10870#ifdef MULTI_INSTANCE_SUPPORT
10871 if (hevc->m_ins_flag) {
10872 hevc->timer.data = (ulong) hevc;
10873 hevc->timer.function = vh265_check_timer_func;
10874 hevc->timer.expires = jiffies + PUT_INTERVAL;
10875
10876 hevc->fw = fw;
10877
10878 return 0;
10879 }
10880#endif
10881 amhevc_enable();
10882
10883 if (hevc->mmu_enable)
10884 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10885 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
10886 else {
10887 if (!hevc->is_4k) {
10888 /* if an older version of the fw was loaded, */
10889 /* needs try to load noswap fw because the */
10890 /* old fw package dose not contain the swap fw. */
10891 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10892 "hevc_mmu_swap", fw->data);
10893 if (ret < 0)
10894 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10895 "h265_mmu", fw->data);
10896 else
10897 hevc->is_swap = true;
10898 } else
10899 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10900 "h265_mmu", fw->data);
10901 }
10902 else
10903 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
10904
10905 if (ret < 0) {
10906 amhevc_disable();
10907 vfree(fw);
10908 pr_err("H265: the %s fw loading failed, err: %x\n",
10909 tee_enabled() ? "TEE" : "local", ret);
10910 return -EBUSY;
10911 }
10912
10913 vfree(fw);
10914
10915 hevc->stat |= STAT_MC_LOAD;
10916
10917#ifdef DETREFILL_ENABLE
10918 if (hevc->is_swap &&
10919 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10920 init_detrefill_buf(hevc);
10921#endif
10922 /* enable AMRISC side protocol */
10923 vh265_prot_init(hevc);
10924
10925 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
10926 vh265_isr_thread_fn,
10927 IRQF_ONESHOT,/*run thread on this irq disabled*/
10928 "vh265-irq", (void *)hevc)) {
10929 hevc_print(hevc, 0, "vh265 irq register error.\n");
10930 amhevc_disable();
10931 return -ENOENT;
10932 }
10933
10934 hevc->stat |= STAT_ISR_REG;
10935 hevc->provider_name = PROVIDER_NAME;
10936
10937#ifdef MULTI_INSTANCE_SUPPORT
10938 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
10939 &vh265_vf_provider, vdec);
10940 vf_reg_provider(&vh265_vf_prov);
10941 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
10942 NULL);
10943 if (hevc->frame_dur != 0) {
10944 if (!is_reset) {
10945 vf_notify_receiver(hevc->provider_name,
10946 VFRAME_EVENT_PROVIDER_FR_HINT,
10947 (void *)
10948 ((unsigned long)hevc->frame_dur));
10949 fr_hint_status = VDEC_HINTED;
10950 }
10951 } else
10952 fr_hint_status = VDEC_NEED_HINT;
10953#else
10954 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
10955 hevc);
10956 vf_reg_provider(&vh265_vf_prov);
10957 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
10958 if (hevc->frame_dur != 0) {
10959 vf_notify_receiver(PROVIDER_NAME,
10960 VFRAME_EVENT_PROVIDER_FR_HINT,
10961 (void *)
10962 ((unsigned long)hevc->frame_dur));
10963 fr_hint_status = VDEC_HINTED;
10964 } else
10965 fr_hint_status = VDEC_NEED_HINT;
10966#endif
10967 hevc->stat |= STAT_VF_HOOK;
10968
10969 hevc->timer.data = (ulong) hevc;
10970 hevc->timer.function = vh265_check_timer_func;
10971 hevc->timer.expires = jiffies + PUT_INTERVAL;
10972
10973 add_timer(&hevc->timer);
10974
10975 hevc->stat |= STAT_TIMER_ARM;
10976
10977 if (use_cma) {
10978#ifdef USE_UNINIT_SEMA
10979 sema_init(&hevc->h265_uninit_done_sema, 0);
10980#endif
10981 if (h265_task == NULL) {
10982 sema_init(&h265_sema, 1);
10983 h265_task =
10984 kthread_run(h265_task_handle, hevc,
10985 "kthread_h265");
10986 }
10987 }
10988 /* hevc->stat |= STAT_KTHREAD; */
10989#if 0
10990 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
10991 hevc_print(hevc, 0, "%s force clk\n", __func__);
10992 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
10993 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
10994 ((1 << 2) | (1 << 1)));
10995 WRITE_VREG(HEVC_DBLK_CFG0,
10996 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
10997 (1 << 1) | 0x3fff0000));/* 2,29:16 */
10998 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
10999 (1 << 2)); /* 2 */
11000 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11001 (1 << 24)); /* 24 */
11002 WRITE_VREG(HEVC_STREAM_CONTROL,
11003 READ_VREG(HEVC_STREAM_CONTROL) |
11004 (1 << 15)); /* 15 */
11005 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11006 (1 << 13)); /* 13 */
11007 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11008 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11009 (1 << 15)); /* 15 */
11010 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11011 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11012 (1 << 15)); /* 15 */
11013 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11014 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11015 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11016 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11017 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11018 (1 << 3)); /* 3 */
11019 }
11020#endif
11021#ifdef SWAP_HEVC_UCODE
11022 if (!tee_enabled() && hevc->is_swap &&
11023 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11024 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11025 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11026 }
11027#endif
11028
11029#ifndef MULTI_INSTANCE_SUPPORT
11030 set_vdec_func(&vh265_dec_status);
11031#endif
11032 amhevc_start();
11033 hevc->stat |= STAT_VDEC_RUN;
11034 hevc->init_flag = 1;
11035 error_handle_threshold = 30;
11036 /* pr_info("%d, vh265_init, RP=0x%x\n",
11037 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11038 */
11039
11040 return 0;
11041}
11042
11043static int vh265_stop(struct hevc_state_s *hevc)
11044{
11045 if (get_dbg_flag(hevc) &
11046 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11047 int wait_timeout_count = 0;
11048
11049 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11050 HEVC_CODED_SLICE_SEGMENT_DAT &&
11051 wait_timeout_count < 10){
11052 wait_timeout_count++;
11053 msleep(20);
11054 }
11055 }
11056 if (hevc->stat & STAT_VDEC_RUN) {
11057 amhevc_stop();
11058 hevc->stat &= ~STAT_VDEC_RUN;
11059 }
11060
11061 if (hevc->stat & STAT_ISR_REG) {
11062#ifdef MULTI_INSTANCE_SUPPORT
11063 if (!hevc->m_ins_flag)
11064#endif
11065 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11066 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11067 hevc->stat &= ~STAT_ISR_REG;
11068 }
11069
11070 hevc->stat &= ~STAT_TIMER_INIT;
11071 if (hevc->stat & STAT_TIMER_ARM) {
11072 del_timer_sync(&hevc->timer);
11073 hevc->stat &= ~STAT_TIMER_ARM;
11074 }
11075
11076 if (hevc->stat & STAT_VF_HOOK) {
11077 if (fr_hint_status == VDEC_HINTED) {
11078 vf_notify_receiver(hevc->provider_name,
11079 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11080 NULL);
11081 }
11082 fr_hint_status = VDEC_NO_NEED_HINT;
11083 vf_unreg_provider(&vh265_vf_prov);
11084 hevc->stat &= ~STAT_VF_HOOK;
11085 }
11086
11087 hevc_local_uninit(hevc);
11088
11089 if (use_cma) {
11090 hevc->uninit_list = 1;
11091 up(&h265_sema);
11092#ifdef USE_UNINIT_SEMA
11093 down(&hevc->h265_uninit_done_sema);
11094 if (!IS_ERR(h265_task)) {
11095 kthread_stop(h265_task);
11096 h265_task = NULL;
11097 }
11098#else
11099 while (hevc->uninit_list) /* wait uninit complete */
11100 msleep(20);
11101#endif
11102
11103 }
11104 hevc->init_flag = 0;
11105 hevc->first_sc_checked = 0;
11106 cancel_work_sync(&hevc->notify_work);
11107 cancel_work_sync(&hevc->set_clk_work);
11108 uninit_mmu_buffers(hevc);
11109 amhevc_disable();
11110
11111 kfree(gvs);
11112 gvs = NULL;
11113
11114 return 0;
11115}
11116
11117#ifdef MULTI_INSTANCE_SUPPORT
11118static void reset_process_time(struct hevc_state_s *hevc)
11119{
11120 if (hevc->start_process_time) {
11121 unsigned int process_time =
11122 1000 * (jiffies - hevc->start_process_time) / HZ;
11123 hevc->start_process_time = 0;
11124 if (process_time > max_process_time[hevc->index])
11125 max_process_time[hevc->index] = process_time;
11126 }
11127}
11128
11129static void start_process_time(struct hevc_state_s *hevc)
11130{
11131 hevc->start_process_time = jiffies;
11132 hevc->decode_timeout_count = 2;
11133 hevc->last_lcu_idx = 0;
11134}
11135
11136static void restart_process_time(struct hevc_state_s *hevc)
11137{
11138 hevc->start_process_time = jiffies;
11139 hevc->decode_timeout_count = 2;
11140}
11141
11142static void timeout_process(struct hevc_state_s *hevc)
11143{
11144 /*
11145 * In this very timeout point,the vh265_work arrives,
11146 * let it to handle the scenario.
11147 */
11148 if (work_pending(&hevc->work))
11149 return;
11150
11151 hevc->timeout_num++;
11152 amhevc_stop();
11153 read_decode_info(hevc);
11154
11155 hevc_print(hevc,
11156 0, "%s decoder timeout\n", __func__);
11157 check_pic_decoded_error(hevc,
11158 hevc->pic_decoded_lcu_idx);
11159 hevc->decoded_poc = hevc->curr_POC;
11160 hevc->decoding_pic = NULL;
11161 hevc->dec_result = DEC_RESULT_DONE;
11162 reset_process_time(hevc);
11163
11164 if (work_pending(&hevc->work))
11165 return;
11166 vdec_schedule_work(&hevc->timeout_work);
11167}
11168
11169#ifdef CONSTRAIN_MAX_BUF_NUM
11170static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11171{
11172 struct PIC_s *pic;
11173 int i;
11174 int count = 0;
11175 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11176 pic = hevc->m_PIC[i];
11177 if (pic == NULL || pic->index == -1)
11178 continue;
11179 if (pic->output_mark == 0 && pic->referenced == 0
11180 && pic->output_ready == 1)
11181 count++;
11182 }
11183
11184 return count;
11185}
11186
11187static int get_used_buf_count(struct hevc_state_s *hevc)
11188{
11189 struct PIC_s *pic;
11190 int i;
11191 int count = 0;
11192 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11193 pic = hevc->m_PIC[i];
11194 if (pic == NULL || pic->index == -1)
11195 continue;
11196 if (pic->output_mark != 0 || pic->referenced != 0
11197 || pic->output_ready != 0)
11198 count++;
11199 }
11200
11201 return count;
11202}
11203#endif
11204
11205
11206static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11207{
11208 struct PIC_s *new_pic = NULL;
11209 struct PIC_s *pic;
11210 /* recycle un-used pic */
11211 int i;
11212 int ref_pic = 0;
11213 struct vdec_s *vdec = hw_to_vdec(hevc);
11214 /*return 1 if pic_list is not initialized yet*/
11215 if (hevc->pic_list_init_flag != 3)
11216 return 1;
11217
11218 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11219 pic = hevc->m_PIC[i];
11220 if (pic == NULL || pic->index == -1)
11221 continue;
11222 if (pic->referenced == 1)
11223 ref_pic++;
11224 if (pic->output_mark == 0 && pic->referenced == 0
11225 && pic->output_ready == 0
11226 ) {
11227 if (new_pic) {
11228 if (pic->POC < new_pic->POC)
11229 new_pic = pic;
11230 } else
11231 new_pic = pic;
11232 }
11233 }
11234
11235 if ((new_pic == NULL) &&
11236 (ref_pic >=
11237 get_work_pic_num(hevc) -
11238 hevc->sps_num_reorder_pics_0 - 1)) {
11239 enum receviver_start_e state = RECEIVER_INACTIVE;
11240 if (vf_get_receiver(vdec->vf_provider_name)) {
11241 state =
11242 vf_notify_receiver(vdec->vf_provider_name,
11243 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11244 NULL);
11245 if ((state == RECEIVER_STATE_NULL)
11246 || (state == RECEIVER_STATE_NONE))
11247 state = RECEIVER_INACTIVE;
11248 }
11249 if (state == RECEIVER_INACTIVE) {
11250 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11251 pic = hevc->m_PIC[i];
11252 if (pic == NULL || pic->index == -1)
11253 continue;
11254
11255 if ((pic->referenced == 1) &&
11256 (pic->error_mark == 1)) {
11257 if (new_pic) {
11258 if (pic->POC < new_pic->POC)
11259 new_pic = pic;
11260 } else
11261 new_pic = pic;
11262 }
11263 }
11264 if (new_pic != NULL) {
11265 new_pic->referenced = 0;
11266 put_mv_buf(hevc, pic);
11267 if (pic_list_debug & 0x2)
11268 pr_err("err ref poc :%d\n", new_pic->POC);
11269 }
11270 }
11271 }
11272
11273 return (new_pic != NULL) ? 1 : 0;
11274}
11275
11276static int vmh265_stop(struct hevc_state_s *hevc)
11277{
11278 if (hevc->stat & STAT_TIMER_ARM) {
11279 del_timer_sync(&hevc->timer);
11280 hevc->stat &= ~STAT_TIMER_ARM;
11281 }
11282 if (hevc->stat & STAT_VDEC_RUN) {
11283 amhevc_stop();
11284 hevc->stat &= ~STAT_VDEC_RUN;
11285 }
11286 if (hevc->stat & STAT_ISR_REG) {
11287 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11288 hevc->stat &= ~STAT_ISR_REG;
11289 }
11290
11291 if (hevc->stat & STAT_VF_HOOK) {
11292 if (fr_hint_status == VDEC_HINTED)
11293 vf_notify_receiver(hevc->provider_name,
11294 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11295 NULL);
11296 fr_hint_status = VDEC_NO_NEED_HINT;
11297 vf_unreg_provider(&vh265_vf_prov);
11298 hevc->stat &= ~STAT_VF_HOOK;
11299 }
11300
11301 hevc_local_uninit(hevc);
11302
11303 if (use_cma) {
11304 hevc->uninit_list = 1;
11305 reset_process_time(hevc);
11306 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11307 vdec_schedule_work(&hevc->work);
11308 flush_work(&hevc->work);
11309#ifdef USE_UNINIT_SEMA
11310 if (hevc->init_flag) {
11311 down(&hevc->h265_uninit_done_sema);
11312 }
11313#else
11314 while (hevc->uninit_list) /* wait uninit complete */
11315 msleep(20);
11316#endif
11317 }
11318 hevc->init_flag = 0;
11319 hevc->first_sc_checked = 0;
11320 cancel_work_sync(&hevc->notify_work);
11321 cancel_work_sync(&hevc->set_clk_work);
11322 cancel_work_sync(&hevc->work);
11323 cancel_work_sync(&hevc->timeout_work);
11324 uninit_mmu_buffers(hevc);
11325
11326 vfree(hevc->fw);
11327 hevc->fw = NULL;
11328
11329 dump_log(hevc);
11330 return 0;
11331}
11332
11333static unsigned char get_data_check_sum
11334 (struct hevc_state_s *hevc, int size)
11335{
11336 int jj;
11337 int sum = 0;
11338 u8 *data = NULL;
11339
11340 if (!hevc->chunk->block->is_mapped)
11341 data = codec_mm_vmap(hevc->chunk->block->start +
11342 hevc->chunk->offset, size);
11343 else
11344 data = ((u8 *)hevc->chunk->block->start_virt) +
11345 hevc->chunk->offset;
11346
11347 for (jj = 0; jj < size; jj++)
11348 sum += data[jj];
11349
11350 if (!hevc->chunk->block->is_mapped)
11351 codec_mm_unmap_phyaddr(data);
11352 return sum;
11353}
11354
11355static void vh265_notify_work(struct work_struct *work)
11356{
11357 struct hevc_state_s *hevc =
11358 container_of(work,
11359 struct hevc_state_s,
11360 notify_work);
11361 struct vdec_s *vdec = hw_to_vdec(hevc);
11362#ifdef MULTI_INSTANCE_SUPPORT
11363 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11364 vf_notify_receiver(hevc->provider_name,
11365 VFRAME_EVENT_PROVIDER_FR_HINT,
11366 (void *)
11367 ((unsigned long)hevc->frame_dur));
11368 vdec->fr_hint_state = VDEC_HINTED;
11369 } else if (fr_hint_status == VDEC_NEED_HINT) {
11370 vf_notify_receiver(hevc->provider_name,
11371 VFRAME_EVENT_PROVIDER_FR_HINT,
11372 (void *)
11373 ((unsigned long)hevc->frame_dur));
11374 fr_hint_status = VDEC_HINTED;
11375 }
11376#else
11377 if (fr_hint_status == VDEC_NEED_HINT)
11378 vf_notify_receiver(PROVIDER_NAME,
11379 VFRAME_EVENT_PROVIDER_FR_HINT,
11380 (void *)
11381 ((unsigned long)hevc->frame_dur));
11382 fr_hint_status = VDEC_HINTED;
11383 }
11384#endif
11385
11386 return;
11387}
11388
11389static void vh265_work_implement(struct hevc_state_s *hevc,
11390 struct vdec_s *vdec,int from)
11391{
11392 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11393 /*USE_BUF_BLOCK*/
11394 uninit_pic_list(hevc);
11395 hevc_print(hevc, 0, "uninit list\n");
11396 hevc->uninit_list = 0;
11397#ifdef USE_UNINIT_SEMA
11398 up(&hevc->h265_uninit_done_sema);
11399#endif
11400 return;
11401 }
11402
11403 /* finished decoding one frame or error,
11404 * notify vdec core to switch context
11405 */
11406 if (hevc->pic_list_init_flag == 1
11407 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11408 hevc->pic_list_init_flag = 2;
11409 init_pic_list(hevc);
11410 init_pic_list_hw(hevc);
11411 init_buf_spec(hevc);
11412 hevc_print(hevc, 0,
11413 "set pic_list_init_flag to 2\n");
11414
11415 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11416 return;
11417 }
11418
11419 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11420 "%s dec_result %d %x %x %x\n",
11421 __func__,
11422 hevc->dec_result,
11423 READ_VREG(HEVC_STREAM_LEVEL),
11424 READ_VREG(HEVC_STREAM_WR_PTR),
11425 READ_VREG(HEVC_STREAM_RD_PTR));
11426
11427 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11428 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11429 && (hw_to_vdec(hevc)->next_status !=
11430 VDEC_STATUS_DISCONNECTED)) {
11431 if (!vdec_has_more_input(vdec)) {
11432 hevc->dec_result = DEC_RESULT_EOS;
11433 vdec_schedule_work(&hevc->work);
11434 return;
11435 }
11436 if (!input_frame_based(vdec)) {
11437 int r = vdec_sync_input(vdec);
11438 if (r >= 0x200) {
11439 WRITE_VREG(HEVC_DECODE_SIZE,
11440 READ_VREG(HEVC_DECODE_SIZE) + r);
11441
11442 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11443 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11444 __func__,
11445 READ_VREG(HEVC_STREAM_LEVEL),
11446 READ_VREG(HEVC_STREAM_WR_PTR),
11447 READ_VREG(HEVC_STREAM_RD_PTR),
11448 READ_VREG(HEVC_MPC_E), r);
11449
11450 start_process_time(hevc);
11451 if (READ_VREG(HEVC_DEC_STATUS_REG)
11452 == HEVC_DECODE_BUFEMPTY2)
11453 WRITE_VREG(HEVC_DEC_STATUS_REG,
11454 HEVC_ACTION_DONE);
11455 else
11456 WRITE_VREG(HEVC_DEC_STATUS_REG,
11457 HEVC_ACTION_DEC_CONT);
11458 } else {
11459 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11460 vdec_schedule_work(&hevc->work);
11461 }
11462 return;
11463 }
11464
11465 /*below for frame_base*/
11466 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11467 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11468 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11469 __func__,
11470 READ_VREG(HEVC_STREAM_LEVEL),
11471 READ_VREG(HEVC_STREAM_WR_PTR),
11472 READ_VREG(HEVC_STREAM_RD_PTR),
11473 READ_VREG(HEVC_MPC_E));
11474 mutex_lock(&hevc->chunks_mutex);
11475 vdec_vframe_dirty(vdec, hevc->chunk);
11476 hevc->chunk = NULL;
11477 mutex_unlock(&hevc->chunks_mutex);
11478 vdec_clean_input(vdec);
11479 }
11480
11481 /*if (is_new_pic_available(hevc)) {*/
11482 if (run_ready(vdec, VDEC_HEVC)) {
11483 int r;
11484 int decode_size;
11485 r = vdec_prepare_input(vdec, &hevc->chunk);
11486 if (r < 0) {
11487 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11488
11489 hevc_print(hevc,
11490 PRINT_FLAG_VDEC_DETAIL,
11491 "amvdec_vh265: Insufficient data\n");
11492
11493 vdec_schedule_work(&hevc->work);
11494 return;
11495 }
11496 hevc->dec_result = DEC_RESULT_NONE;
11497 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11498 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11499 __func__, r,
11500 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11501 get_data_check_sum(hevc, r) : 0,
11502 READ_VREG(HEVC_MPC_E));
11503
11504 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11505 int jj;
11506 u8 *data = NULL;
11507
11508 if (!hevc->chunk->block->is_mapped)
11509 data = codec_mm_vmap(
11510 hevc->chunk->block->start +
11511 hevc->chunk->offset, r);
11512 else
11513 data = ((u8 *)
11514 hevc->chunk->block->start_virt)
11515 + hevc->chunk->offset;
11516
11517 for (jj = 0; jj < r; jj++) {
11518 if ((jj & 0xf) == 0)
11519 hevc_print(hevc,
11520 PRINT_FRAMEBASE_DATA,
11521 "%06x:", jj);
11522 hevc_print_cont(hevc,
11523 PRINT_FRAMEBASE_DATA,
11524 "%02x ", data[jj]);
11525 if (((jj + 1) & 0xf) == 0)
11526 hevc_print_cont(hevc,
11527 PRINT_FRAMEBASE_DATA,
11528 "\n");
11529 }
11530
11531 if (!hevc->chunk->block->is_mapped)
11532 codec_mm_unmap_phyaddr(data);
11533 }
11534
11535 decode_size = hevc->chunk->size +
11536 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11537 WRITE_VREG(HEVC_DECODE_SIZE,
11538 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11539
11540 vdec_enable_input(vdec);
11541
11542 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11543 "%s: mpc %x\n",
11544 __func__, READ_VREG(HEVC_MPC_E));
11545
11546 start_process_time(hevc);
11547 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11548 } else{
11549 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11550
11551 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11552 * "amvdec_vh265: Insufficient data\n");
11553 */
11554
11555 vdec_schedule_work(&hevc->work);
11556 }
11557 return;
11558 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11559 /* if (!hevc->ctx_valid)
11560 hevc->ctx_valid = 1; */
11561 decode_frame_count[hevc->index]++;
11562#ifdef DETREFILL_ENABLE
11563 if (hevc->is_swap &&
11564 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11565 if (hevc->delrefill_check == 2) {
11566 delrefill(hevc);
11567 amhevc_stop();
11568 }
11569 }
11570#endif
11571 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11572 hevc->used_4k_num =
11573 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11574 if (hevc->used_4k_num >= 0 &&
11575 hevc->cur_pic &&
11576 hevc->cur_pic->scatter_alloc
11577 == 1) {
11578 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11579 "%s pic index %d scatter_alloc %d page_start %d\n",
11580 "decoder_mmu_box_free_idx_tail",
11581 hevc->cur_pic->index,
11582 hevc->cur_pic->scatter_alloc,
11583 hevc->used_4k_num);
11584 if (hevc->m_ins_flag)
11585 hevc_mmu_dma_check(hw_to_vdec(hevc));
11586 decoder_mmu_box_free_idx_tail(
11587 hevc->mmu_box,
11588 hevc->cur_pic->index,
11589 hevc->used_4k_num);
11590 hevc->cur_pic->scatter_alloc = 2;
11591 }
11592 }
11593 hevc->pic_decoded_lcu_idx =
11594 READ_VREG(HEVC_PARSER_LCU_START)
11595 & 0xffffff;
11596
11597 if (vdec->master == NULL && vdec->slave == NULL &&
11598 hevc->empty_flag == 0) {
11599 hevc->over_decode =
11600 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11601 if (hevc->over_decode)
11602 hevc_print(hevc, 0,
11603 "!!!Over decode\n");
11604 }
11605
11606 if (is_log_enable(hevc))
11607 add_log(hevc,
11608 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11609 __func__,
11610 hevc->dec_result,
11611 hevc->pic_decoded_lcu_idx,
11612 hevc->used_4k_num,
11613 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11614 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11615 hevc->start_shift_bytes
11616 );
11617
11618 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11619 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11620 __func__,
11621 hevc->dec_result,
11622 READ_VREG(HEVC_STREAM_LEVEL),
11623 READ_VREG(HEVC_STREAM_WR_PTR),
11624 READ_VREG(HEVC_STREAM_RD_PTR),
11625 hevc->pic_decoded_lcu_idx,
11626 hevc->used_4k_num,
11627 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11628 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11629 hevc->start_shift_bytes
11630 );
11631
11632 hevc->used_4k_num = -1;
11633
11634 check_pic_decoded_error(hevc,
11635 hevc->pic_decoded_lcu_idx);
11636#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11637#if 1
11638 if (vdec->slave) {
11639 if (dv_debug & 0x1)
11640 vdec_set_flag(vdec->slave,
11641 VDEC_FLAG_SELF_INPUT_CONTEXT);
11642 else
11643 vdec_set_flag(vdec->slave,
11644 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11645 }
11646#else
11647 if (vdec->slave) {
11648 if (no_interleaved_el_slice)
11649 vdec_set_flag(vdec->slave,
11650 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11651 /* this will move real HW pointer for input */
11652 else
11653 vdec_set_flag(vdec->slave, 0);
11654 /* this will not move real HW pointer
11655 *and SL layer decoding
11656 *will start from same stream position
11657 *as current BL decoder
11658 */
11659 }
11660#endif
11661#endif
11662#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11663 hevc->shift_byte_count_lo
11664 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11665 if (vdec->slave) {
11666 /*cur is base, found enhance*/
11667 struct hevc_state_s *hevc_el =
11668 (struct hevc_state_s *)
11669 vdec->slave->private;
11670 if (hevc_el)
11671 hevc_el->shift_byte_count_lo =
11672 hevc->shift_byte_count_lo;
11673 } else if (vdec->master) {
11674 /*cur is enhance, found base*/
11675 struct hevc_state_s *hevc_ba =
11676 (struct hevc_state_s *)
11677 vdec->master->private;
11678 if (hevc_ba)
11679 hevc_ba->shift_byte_count_lo =
11680 hevc->shift_byte_count_lo;
11681 }
11682#endif
11683 mutex_lock(&hevc->chunks_mutex);
11684 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11685 hevc->chunk = NULL;
11686 mutex_unlock(&hevc->chunks_mutex);
11687 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11688 /*
11689 stream base: stream buf empty or timeout
11690 frame base: vdec_prepare_input fail
11691 */
11692 if (!vdec_has_more_input(vdec)) {
11693 hevc->dec_result = DEC_RESULT_EOS;
11694 vdec_schedule_work(&hevc->work);
11695 return;
11696 }
11697#ifdef AGAIN_HAS_THRESHOLD
11698 hevc->next_again_flag = 1;
11699#endif
11700 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11701 struct PIC_s *pic;
11702 hevc->eos = 1;
11703#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11704 if ((vdec->master || vdec->slave) &&
11705 aux_data_is_avaible(hevc))
11706 dolby_get_meta(hevc);
11707#endif
11708 check_pic_decoded_error(hevc,
11709 hevc->pic_decoded_lcu_idx);
11710 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11711 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11712 "%s: end of stream, last dec poc %d => 0x%pf\n",
11713 __func__, hevc->curr_POC, pic);
11714 flush_output(hevc, pic);
11715
11716 if (hevc->is_used_v4l)
11717 notify_v4l_eos(hw_to_vdec(hevc));
11718#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11719 hevc->shift_byte_count_lo
11720 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11721 if (vdec->slave) {
11722 /*cur is base, found enhance*/
11723 struct hevc_state_s *hevc_el =
11724 (struct hevc_state_s *)
11725 vdec->slave->private;
11726 if (hevc_el)
11727 hevc_el->shift_byte_count_lo =
11728 hevc->shift_byte_count_lo;
11729 } else if (vdec->master) {
11730 /*cur is enhance, found base*/
11731 struct hevc_state_s *hevc_ba =
11732 (struct hevc_state_s *)
11733 vdec->master->private;
11734 if (hevc_ba)
11735 hevc_ba->shift_byte_count_lo =
11736 hevc->shift_byte_count_lo;
11737 }
11738#endif
11739 mutex_lock(&hevc->chunks_mutex);
11740 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11741 hevc->chunk = NULL;
11742 mutex_unlock(&hevc->chunks_mutex);
11743 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11744 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11745 "%s: force exit\n",
11746 __func__);
11747 if (hevc->stat & STAT_VDEC_RUN) {
11748 amhevc_stop();
11749 hevc->stat &= ~STAT_VDEC_RUN;
11750 }
11751 if (hevc->stat & STAT_ISR_REG) {
11752 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11753 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11754 hevc->stat &= ~STAT_ISR_REG;
11755 }
11756 hevc_print(hevc, 0, "%s: force exit end\n",
11757 __func__);
11758 }
11759
11760 if (hevc->stat & STAT_VDEC_RUN) {
11761 amhevc_stop();
11762 hevc->stat &= ~STAT_VDEC_RUN;
11763 }
11764
11765 if (hevc->stat & STAT_TIMER_ARM) {
11766 del_timer_sync(&hevc->timer);
11767 hevc->stat &= ~STAT_TIMER_ARM;
11768 }
11769
11770 wait_hevc_search_done(hevc);
11771#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11772 if (hevc->switch_dvlayer_flag) {
11773 if (vdec->slave)
11774 vdec_set_next_sched(vdec, vdec->slave);
11775 else if (vdec->master)
11776 vdec_set_next_sched(vdec, vdec->master);
11777 } else if (vdec->slave || vdec->master)
11778 vdec_set_next_sched(vdec, vdec);
11779#endif
11780
11781 if (from == 1) {
11782 /* This is a timeout work */
11783 if (work_pending(&hevc->work)) {
11784 /*
11785 * The vh265_work arrives at the last second,
11786 * give it a chance to handle the scenario.
11787 */
11788 return;
11789 //cancel_work_sync(&hevc->work);//reserved for future considraion
11790 }
11791 }
11792
11793 /* mark itself has all HW resource released and input released */
11794 if (vdec->parallel_dec == 1)
11795 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
11796 else
11797 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
11798
11799 if (hevc->is_used_v4l) {
11800 struct aml_vcodec_ctx *ctx =
11801 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11802
11803 if (ctx->param_sets_from_ucode &&
11804 !hevc->v4l_params_parsed)
11805 vdec_v4l_write_frame_sync(ctx);
11806 }
11807
11808 if (hevc->vdec_cb)
11809 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
11810}
11811
11812static void vh265_work(struct work_struct *work)
11813{
11814 struct hevc_state_s *hevc = container_of(work,
11815 struct hevc_state_s, work);
11816 struct vdec_s *vdec = hw_to_vdec(hevc);
11817
11818 vh265_work_implement(hevc, vdec, 0);
11819}
11820
11821static void vh265_timeout_work(struct work_struct *work)
11822{
11823 struct hevc_state_s *hevc = container_of(work,
11824 struct hevc_state_s, timeout_work);
11825 struct vdec_s *vdec = hw_to_vdec(hevc);
11826
11827 if (work_pending(&hevc->work))
11828 return;
11829 vh265_work_implement(hevc, vdec, 1);
11830}
11831
11832
11833static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
11834{
11835 /* new to do ... */
11836 vh265_prot_init(hevc);
11837 return 0;
11838}
11839static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
11840{
11841 struct hevc_state_s *hevc =
11842 (struct hevc_state_s *)vdec->private;
11843 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
11844 CODEC_MM_FLAGS_TVP : 0;
11845 bool ret = 0;
11846 if (step == 0x12)
11847 return 0;
11848 else if (step == 0x11)
11849 step = 0x12;
11850
11851 if (hevc->eos)
11852 return 0;
11853 if (!hevc->first_sc_checked && hevc->mmu_enable) {
11854 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
11855 hevc->first_sc_checked =1;
11856 hevc_print(hevc, 0,
11857 "vh265 cached=%d need_size=%d speed= %d ms\n",
11858 size, (hevc->need_cache_size >> PAGE_SHIFT),
11859 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
11860 }
11861 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
11862 && pre_decode_buf_level != 0) {
11863 u32 rp, wp, level;
11864
11865 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
11866 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
11867 if (wp < rp)
11868 level = vdec->input.size + wp - rp;
11869 else
11870 level = wp - rp;
11871
11872 if (level < pre_decode_buf_level)
11873 return 0;
11874 }
11875
11876#ifdef AGAIN_HAS_THRESHOLD
11877 if (hevc->next_again_flag &&
11878 (!vdec_frame_based(vdec))) {
11879 u32 parser_wr_ptr =
11880 READ_PARSER_REG(PARSER_VIDEO_WP);
11881 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
11882 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
11883 again_threshold) {
11884 int r = vdec_sync_input(vdec);
11885 hevc_print(hevc,
11886 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
11887 return 0;
11888 }
11889 }
11890#endif
11891
11892 if (disp_vframe_valve_level &&
11893 kfifo_len(&hevc->display_q) >=
11894 disp_vframe_valve_level) {
11895 hevc->valve_count--;
11896 if (hevc->valve_count <= 0)
11897 hevc->valve_count = 2;
11898 else
11899 return 0;
11900 }
11901
11902 ret = is_new_pic_available(hevc);
11903 if (!ret) {
11904 hevc_print(hevc,
11905 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
11906 __func__, ret);
11907 }
11908
11909#ifdef CONSTRAIN_MAX_BUF_NUM
11910 if (hevc->pic_list_init_flag == 3) {
11911 if (run_ready_max_vf_only_num > 0 &&
11912 get_vf_ref_only_buf_count(hevc) >=
11913 run_ready_max_vf_only_num
11914 )
11915 ret = 0;
11916 if (run_ready_display_q_num > 0 &&
11917 kfifo_len(&hevc->display_q) >=
11918 run_ready_display_q_num)
11919 ret = 0;
11920
11921 /*avoid more buffers consumed when
11922 switching resolution*/
11923 if (run_ready_max_buf_num == 0xff &&
11924 get_used_buf_count(hevc) >=
11925 get_work_pic_num(hevc))
11926 ret = 0;
11927 else if (run_ready_max_buf_num &&
11928 get_used_buf_count(hevc) >=
11929 run_ready_max_buf_num)
11930 ret = 0;
11931 }
11932#endif
11933
11934 if (hevc->is_used_v4l) {
11935 struct aml_vcodec_ctx *ctx =
11936 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11937
11938 if (ctx->param_sets_from_ucode &&
11939 !ctx->v4l_codec_ready &&
11940 hevc->v4l_params_parsed)
11941 ret = 0; /*the params has parsed.*/
11942 }
11943
11944
11945 if (ret)
11946 not_run_ready[hevc->index] = 0;
11947 else
11948 not_run_ready[hevc->index]++;
11949 if (vdec->parallel_dec == 1)
11950 return ret ? (CORE_MASK_HEVC) : 0;
11951 else
11952 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
11953}
11954
11955static void run(struct vdec_s *vdec, unsigned long mask,
11956 void (*callback)(struct vdec_s *, void *), void *arg)
11957{
11958 struct hevc_state_s *hevc =
11959 (struct hevc_state_s *)vdec->private;
11960 int r, loadr = 0;
11961 unsigned char check_sum = 0;
11962
11963 run_count[hevc->index]++;
11964 hevc->vdec_cb_arg = arg;
11965 hevc->vdec_cb = callback;
11966 hevc->aux_data_dirty = 1;
11967 hevc_reset_core(vdec);
11968
11969#ifdef AGAIN_HAS_THRESHOLD
11970 hevc->pre_parser_wr_ptr =
11971 READ_PARSER_REG(PARSER_VIDEO_WP);
11972 hevc->next_again_flag = 0;
11973#endif
11974 r = vdec_prepare_input(vdec, &hevc->chunk);
11975 if (r < 0) {
11976 input_empty[hevc->index]++;
11977 hevc->dec_result = DEC_RESULT_AGAIN;
11978 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11979 "ammvdec_vh265: Insufficient data\n");
11980
11981 vdec_schedule_work(&hevc->work);
11982 return;
11983 }
11984 input_empty[hevc->index] = 0;
11985 hevc->dec_result = DEC_RESULT_NONE;
11986 if (vdec_frame_based(vdec) &&
11987 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
11988 || is_log_enable(hevc)))
11989 check_sum = get_data_check_sum(hevc, r);
11990
11991 if (is_log_enable(hevc))
11992 add_log(hevc,
11993 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
11994 __func__, r,
11995 check_sum,
11996 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
11997 );
11998 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11999 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12000 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12001 __func__, r,
12002 check_sum,
12003 READ_VREG(HEVC_STREAM_LEVEL),
12004 READ_VREG(HEVC_STREAM_WR_PTR),
12005 READ_VREG(HEVC_STREAM_RD_PTR),
12006 READ_PARSER_REG(PARSER_VIDEO_RP),
12007 READ_PARSER_REG(PARSER_VIDEO_WP),
12008 hevc->start_shift_bytes
12009 );
12010 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12011 input_frame_based(vdec)) {
12012 int jj;
12013 u8 *data = NULL;
12014
12015 if (!hevc->chunk->block->is_mapped)
12016 data = codec_mm_vmap(hevc->chunk->block->start +
12017 hevc->chunk->offset, r);
12018 else
12019 data = ((u8 *)hevc->chunk->block->start_virt)
12020 + hevc->chunk->offset;
12021
12022 for (jj = 0; jj < r; jj++) {
12023 if ((jj & 0xf) == 0)
12024 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12025 "%06x:", jj);
12026 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12027 "%02x ", data[jj]);
12028 if (((jj + 1) & 0xf) == 0)
12029 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12030 "\n");
12031 }
12032
12033 if (!hevc->chunk->block->is_mapped)
12034 codec_mm_unmap_phyaddr(data);
12035 }
12036 if (vdec->mc_loaded) {
12037 /*firmware have load before,
12038 and not changes to another.
12039 ignore reload.
12040 */
12041 if (tee_enabled() && hevc->is_swap &&
12042 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12043 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12044 } else {
12045 if (hevc->mmu_enable)
12046 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12047 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12048 "h265_mmu", hevc->fw->data);
12049 else {
12050 if (!hevc->is_4k) {
12051 /* if an older version of the fw was loaded, */
12052 /* needs try to load noswap fw because the */
12053 /* old fw package dose not contain the swap fw.*/
12054 loadr = amhevc_vdec_loadmc_ex(
12055 VFORMAT_HEVC, vdec,
12056 "hevc_mmu_swap",
12057 hevc->fw->data);
12058 if (loadr < 0)
12059 loadr = amhevc_vdec_loadmc_ex(
12060 VFORMAT_HEVC, vdec,
12061 "h265_mmu",
12062 hevc->fw->data);
12063 else
12064 hevc->is_swap = true;
12065 } else
12066 loadr = amhevc_vdec_loadmc_ex(
12067 VFORMAT_HEVC, vdec,
12068 "h265_mmu", hevc->fw->data);
12069 }
12070 else
12071 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12072 NULL, hevc->fw->data);
12073 if (loadr < 0) {
12074 amhevc_disable();
12075 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12076 tee_enabled() ? "TEE" : "local", loadr);
12077 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12078 vdec_schedule_work(&hevc->work);
12079 return;
12080 }
12081
12082 if (tee_enabled() && hevc->is_swap &&
12083 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12084 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12085#ifdef DETREFILL_ENABLE
12086 if (hevc->is_swap &&
12087 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12088 init_detrefill_buf(hevc);
12089#endif
12090 vdec->mc_loaded = 1;
12091 vdec->mc_type = VFORMAT_HEVC;
12092 }
12093 if (vh265_hw_ctx_restore(hevc) < 0) {
12094 vdec_schedule_work(&hevc->work);
12095 return;
12096 }
12097 vdec_enable_input(vdec);
12098
12099 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12100
12101 if (vdec_frame_based(vdec)) {
12102 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12103 r = hevc->chunk->size +
12104 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12105 hevc->decode_size = r;
12106 }
12107#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12108 else {
12109 if (vdec->master || vdec->slave)
12110 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12111 hevc->shift_byte_count_lo);
12112 }
12113#endif
12114 WRITE_VREG(HEVC_DECODE_SIZE, r);
12115 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12116 hevc->init_flag = 1;
12117
12118 if (hevc->pic_list_init_flag == 3)
12119 init_pic_list_hw(hevc);
12120
12121 backup_decode_state(hevc);
12122
12123 start_process_time(hevc);
12124 mod_timer(&hevc->timer, jiffies);
12125 hevc->stat |= STAT_TIMER_ARM;
12126 hevc->stat |= STAT_ISR_REG;
12127 amhevc_start();
12128 hevc->stat |= STAT_VDEC_RUN;
12129}
12130
12131static void reset(struct vdec_s *vdec)
12132{
12133
12134 struct hevc_state_s *hevc =
12135 (struct hevc_state_s *)vdec->private;
12136 int i;
12137
12138 cancel_work_sync(&hevc->work);
12139 cancel_work_sync(&hevc->notify_work);
12140 if (hevc->stat & STAT_VDEC_RUN) {
12141 amhevc_stop();
12142 hevc->stat &= ~STAT_VDEC_RUN;
12143 }
12144
12145 if (hevc->stat & STAT_TIMER_ARM) {
12146 del_timer_sync(&hevc->timer);
12147 hevc->stat &= ~STAT_TIMER_ARM;
12148 }
12149 hevc->dec_result = DEC_RESULT_NONE;
12150 reset_process_time(hevc);
12151 hevc->init_flag = 0;
12152 hevc->pic_list_init_flag = 0;
12153 dealloc_mv_bufs(hevc);
12154 hevc_local_uninit(hevc);
12155 if (vh265_local_init(hevc) < 0)
12156 pr_debug(" %s local init fail\n", __func__);
12157 for (i = 0; i < BUF_POOL_SIZE; i++) {
12158 hevc->m_BUF[i].start_adr = 0;
12159 }
12160
12161 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12162}
12163
12164static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12165{
12166 struct hevc_state_s *hevc =
12167 (struct hevc_state_s *)vdec->private;
12168
12169 return vh265_isr(0, hevc);
12170}
12171
12172static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12173{
12174 struct hevc_state_s *hevc =
12175 (struct hevc_state_s *)vdec->private;
12176
12177 return vh265_isr_thread_fn(0, hevc);
12178}
12179#endif
12180
12181static int amvdec_h265_probe(struct platform_device *pdev)
12182{
12183#ifdef MULTI_INSTANCE_SUPPORT
12184 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12185#else
12186 struct vdec_dev_reg_s *pdata =
12187 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12188#endif
12189 char *tmpbuf;
12190 int ret;
12191 struct hevc_state_s *hevc;
12192
12193 hevc = vmalloc(sizeof(struct hevc_state_s));
12194 if (hevc == NULL) {
12195 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12196 return -ENOMEM;
12197 }
12198 gHevc = hevc;
12199 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12200 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12201 H265_DEBUG_DIS_SYS_ERROR_PROC));
12202 memset(hevc, 0, sizeof(struct hevc_state_s));
12203 if (get_dbg_flag(hevc))
12204 hevc_print(hevc, 0, "%s\r\n", __func__);
12205 mutex_lock(&vh265_mutex);
12206
12207 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12208 (parser_sei_enable & 0x100) == 0)
12209 parser_sei_enable = 7; /*old 1*/
12210 hevc->m_ins_flag = 0;
12211 hevc->init_flag = 0;
12212 hevc->first_sc_checked = 0;
12213 hevc->uninit_list = 0;
12214 hevc->fatal_error = 0;
12215 hevc->show_frame_num = 0;
12216 hevc->frameinfo_enable = 1;
12217#ifdef MULTI_INSTANCE_SUPPORT
12218 hevc->platform_dev = pdev;
12219 platform_set_drvdata(pdev, pdata);
12220#endif
12221
12222 if (pdata == NULL) {
12223 hevc_print(hevc, 0,
12224 "\namvdec_h265 memory resource undefined.\n");
12225 vfree(hevc);
12226 mutex_unlock(&vh265_mutex);
12227 return -EFAULT;
12228 }
12229 if (mmu_enable_force == 0) {
12230 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12231 || double_write_mode == 0x10)
12232 hevc->mmu_enable = 0;
12233 else
12234 hevc->mmu_enable = 1;
12235 }
12236 if (init_mmu_buffers(hevc)) {
12237 hevc_print(hevc, 0,
12238 "\n 265 mmu init failed!\n");
12239 vfree(hevc);
12240 mutex_unlock(&vh265_mutex);
12241 return -EFAULT;
12242 }
12243
12244 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12245 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12246 if (ret < 0) {
12247 uninit_mmu_buffers(hevc);
12248 vfree(hevc);
12249 mutex_unlock(&vh265_mutex);
12250 return ret;
12251 }
12252 hevc->buf_size = work_buf_size;
12253
12254
12255 if (!vdec_secure(pdata)) {
12256 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12257 if (tmpbuf) {
12258 memset(tmpbuf, 0, work_buf_size);
12259 dma_sync_single_for_device(amports_get_dma_device(),
12260 hevc->buf_start,
12261 work_buf_size, DMA_TO_DEVICE);
12262 } else {
12263 tmpbuf = codec_mm_vmap(hevc->buf_start,
12264 work_buf_size);
12265 if (tmpbuf) {
12266 memset(tmpbuf, 0, work_buf_size);
12267 dma_sync_single_for_device(
12268 amports_get_dma_device(),
12269 hevc->buf_start,
12270 work_buf_size,
12271 DMA_TO_DEVICE);
12272 codec_mm_unmap_phyaddr(tmpbuf);
12273 }
12274 }
12275 }
12276
12277 if (get_dbg_flag(hevc)) {
12278 hevc_print(hevc, 0,
12279 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12280 hevc->buf_start, hevc->buf_size);
12281 }
12282
12283 if (pdata->sys_info)
12284 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12285 else {
12286 hevc->vh265_amstream_dec_info.width = 0;
12287 hevc->vh265_amstream_dec_info.height = 0;
12288 hevc->vh265_amstream_dec_info.rate = 30;
12289 }
12290#ifndef MULTI_INSTANCE_SUPPORT
12291 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12292 workaround_enable |= 3;
12293 hevc_print(hevc, 0,
12294 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12295 } else
12296 workaround_enable &= ~3;
12297#endif
12298 hevc->cma_dev = pdata->cma_dev;
12299 vh265_vdec_info_init();
12300
12301#ifdef MULTI_INSTANCE_SUPPORT
12302 pdata->private = hevc;
12303 pdata->dec_status = vh265_dec_status;
12304 pdata->set_isreset = vh265_set_isreset;
12305 is_reset = 0;
12306 if (vh265_init(pdata) < 0) {
12307#else
12308 if (vh265_init(hevc) < 0) {
12309#endif
12310 hevc_print(hevc, 0,
12311 "\namvdec_h265 init failed.\n");
12312 hevc_local_uninit(hevc);
12313 uninit_mmu_buffers(hevc);
12314 vfree(hevc);
12315 pdata->dec_status = NULL;
12316 mutex_unlock(&vh265_mutex);
12317 return -ENODEV;
12318 }
12319 /*set the max clk for smooth playing...*/
12320 hevc_source_changed(VFORMAT_HEVC,
12321 3840, 2160, 60);
12322 mutex_unlock(&vh265_mutex);
12323
12324 return 0;
12325}
12326
12327static int amvdec_h265_remove(struct platform_device *pdev)
12328{
12329 struct hevc_state_s *hevc = gHevc;
12330
12331 if (get_dbg_flag(hevc))
12332 hevc_print(hevc, 0, "%s\r\n", __func__);
12333
12334 mutex_lock(&vh265_mutex);
12335
12336 vh265_stop(hevc);
12337
12338 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12339
12340
12341#ifdef DEBUG_PTS
12342 hevc_print(hevc, 0,
12343 "pts missed %ld, pts hit %ld, duration %d\n",
12344 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12345#endif
12346
12347 vfree(hevc);
12348 hevc = NULL;
12349 gHevc = NULL;
12350
12351 mutex_unlock(&vh265_mutex);
12352
12353 return 0;
12354}
12355/****************************************/
12356#ifdef CONFIG_PM
12357static int h265_suspend(struct device *dev)
12358{
12359 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12360 return 0;
12361}
12362
12363static int h265_resume(struct device *dev)
12364{
12365 amhevc_resume(to_platform_device(dev));
12366 return 0;
12367}
12368
12369static const struct dev_pm_ops h265_pm_ops = {
12370 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12371};
12372#endif
12373
12374static struct platform_driver amvdec_h265_driver = {
12375 .probe = amvdec_h265_probe,
12376 .remove = amvdec_h265_remove,
12377 .driver = {
12378 .name = DRIVER_NAME,
12379#ifdef CONFIG_PM
12380 .pm = &h265_pm_ops,
12381#endif
12382 }
12383};
12384
12385#ifdef MULTI_INSTANCE_SUPPORT
12386static void vh265_dump_state(struct vdec_s *vdec)
12387{
12388 int i;
12389 struct hevc_state_s *hevc =
12390 (struct hevc_state_s *)vdec->private;
12391 hevc_print(hevc, 0,
12392 "====== %s\n", __func__);
12393
12394 hevc_print(hevc, 0,
12395 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12396 hevc->frame_width,
12397 hevc->frame_height,
12398 hevc->sps_num_reorder_pics_0,
12399 get_work_pic_num(hevc),
12400 hevc->video_signal_type_debug,
12401 hevc->is_swap
12402 );
12403
12404 hevc_print(hevc, 0,
12405 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12406 input_frame_based(vdec),
12407 hevc->eos,
12408 hevc->dec_result,
12409 decode_frame_count[hevc->index],
12410 display_frame_count[hevc->index],
12411 run_count[hevc->index],
12412 not_run_ready[hevc->index],
12413 input_empty[hevc->index]
12414 );
12415
12416 if (vf_get_receiver(vdec->vf_provider_name)) {
12417 enum receviver_start_e state =
12418 vf_notify_receiver(vdec->vf_provider_name,
12419 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12420 NULL);
12421 hevc_print(hevc, 0,
12422 "\nreceiver(%s) state %d\n",
12423 vdec->vf_provider_name,
12424 state);
12425 }
12426
12427 hevc_print(hevc, 0,
12428 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12429 __func__,
12430 kfifo_len(&hevc->newframe_q),
12431 VF_POOL_SIZE,
12432 kfifo_len(&hevc->display_q),
12433 VF_POOL_SIZE,
12434 hevc->vf_pre_count,
12435 hevc->vf_get_count,
12436 hevc->vf_put_count,
12437 hevc->pic_list_init_flag,
12438 is_new_pic_available(hevc)
12439 );
12440
12441 dump_pic_list(hevc);
12442
12443 for (i = 0; i < BUF_POOL_SIZE; i++) {
12444 hevc_print(hevc, 0,
12445 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12446 i,
12447 hevc->m_BUF[i].start_adr,
12448 hevc->m_BUF[i].size,
12449 hevc->m_BUF[i].used_flag);
12450 }
12451
12452 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12453 hevc_print(hevc, 0,
12454 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12455 i,
12456 hevc->m_mv_BUF[i].start_adr,
12457 hevc->m_mv_BUF[i].size,
12458 hevc->m_mv_BUF[i].used_flag);
12459 }
12460
12461 hevc_print(hevc, 0,
12462 "HEVC_DEC_STATUS_REG=0x%x\n",
12463 READ_VREG(HEVC_DEC_STATUS_REG));
12464 hevc_print(hevc, 0,
12465 "HEVC_MPC_E=0x%x\n",
12466 READ_VREG(HEVC_MPC_E));
12467 hevc_print(hevc, 0,
12468 "HEVC_DECODE_MODE=0x%x\n",
12469 READ_VREG(HEVC_DECODE_MODE));
12470 hevc_print(hevc, 0,
12471 "HEVC_DECODE_MODE2=0x%x\n",
12472 READ_VREG(HEVC_DECODE_MODE2));
12473 hevc_print(hevc, 0,
12474 "NAL_SEARCH_CTL=0x%x\n",
12475 READ_VREG(NAL_SEARCH_CTL));
12476 hevc_print(hevc, 0,
12477 "HEVC_PARSER_LCU_START=0x%x\n",
12478 READ_VREG(HEVC_PARSER_LCU_START));
12479 hevc_print(hevc, 0,
12480 "HEVC_DECODE_SIZE=0x%x\n",
12481 READ_VREG(HEVC_DECODE_SIZE));
12482 hevc_print(hevc, 0,
12483 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12484 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12485 hevc_print(hevc, 0,
12486 "HEVC_STREAM_START_ADDR=0x%x\n",
12487 READ_VREG(HEVC_STREAM_START_ADDR));
12488 hevc_print(hevc, 0,
12489 "HEVC_STREAM_END_ADDR=0x%x\n",
12490 READ_VREG(HEVC_STREAM_END_ADDR));
12491 hevc_print(hevc, 0,
12492 "HEVC_STREAM_LEVEL=0x%x\n",
12493 READ_VREG(HEVC_STREAM_LEVEL));
12494 hevc_print(hevc, 0,
12495 "HEVC_STREAM_WR_PTR=0x%x\n",
12496 READ_VREG(HEVC_STREAM_WR_PTR));
12497 hevc_print(hevc, 0,
12498 "HEVC_STREAM_RD_PTR=0x%x\n",
12499 READ_VREG(HEVC_STREAM_RD_PTR));
12500 hevc_print(hevc, 0,
12501 "PARSER_VIDEO_RP=0x%x\n",
12502 READ_PARSER_REG(PARSER_VIDEO_RP));
12503 hevc_print(hevc, 0,
12504 "PARSER_VIDEO_WP=0x%x\n",
12505 READ_PARSER_REG(PARSER_VIDEO_WP));
12506
12507 if (input_frame_based(vdec) &&
12508 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12509 ) {
12510 int jj;
12511 if (hevc->chunk && hevc->chunk->block &&
12512 hevc->chunk->size > 0) {
12513 u8 *data = NULL;
12514 if (!hevc->chunk->block->is_mapped)
12515 data = codec_mm_vmap(hevc->chunk->block->start +
12516 hevc->chunk->offset, hevc->chunk->size);
12517 else
12518 data = ((u8 *)hevc->chunk->block->start_virt)
12519 + hevc->chunk->offset;
12520 hevc_print(hevc, 0,
12521 "frame data size 0x%x\n",
12522 hevc->chunk->size);
12523 for (jj = 0; jj < hevc->chunk->size; jj++) {
12524 if ((jj & 0xf) == 0)
12525 hevc_print(hevc,
12526 PRINT_FRAMEBASE_DATA,
12527 "%06x:", jj);
12528 hevc_print_cont(hevc,
12529 PRINT_FRAMEBASE_DATA,
12530 "%02x ", data[jj]);
12531 if (((jj + 1) & 0xf) == 0)
12532 hevc_print_cont(hevc,
12533 PRINT_FRAMEBASE_DATA,
12534 "\n");
12535 }
12536
12537 if (!hevc->chunk->block->is_mapped)
12538 codec_mm_unmap_phyaddr(data);
12539 }
12540 }
12541
12542}
12543
12544
12545static int ammvdec_h265_probe(struct platform_device *pdev)
12546{
12547
12548 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12549 struct hevc_state_s *hevc = NULL;
12550 int ret;
12551#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12552 int config_val;
12553#endif
12554 if (pdata == NULL) {
12555 pr_info("\nammvdec_h265 memory resource undefined.\n");
12556 return -EFAULT;
12557 }
12558
12559 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12560 sizeof(struct hevc_state_s), GFP_KERNEL); */
12561 hevc = vmalloc(sizeof(struct hevc_state_s));
12562 if (hevc == NULL) {
12563 pr_info("\nammvdec_h265 device data allocation failed\n");
12564 return -ENOMEM;
12565 }
12566 memset(hevc, 0, sizeof(struct hevc_state_s));
12567
12568 /* the ctx from v4l2 driver. */
12569 hevc->v4l2_ctx = pdata->private;
12570
12571 pdata->private = hevc;
12572 pdata->dec_status = vh265_dec_status;
12573 /* pdata->set_trickmode = set_trickmode; */
12574 pdata->run_ready = run_ready;
12575 pdata->run = run;
12576 pdata->reset = reset;
12577 pdata->irq_handler = vh265_irq_cb;
12578 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12579 pdata->dump_state = vh265_dump_state;
12580
12581 hevc->index = pdev->id;
12582 hevc->m_ins_flag = 1;
12583
12584 if (pdata->use_vfm_path) {
12585 snprintf(pdata->vf_provider_name,
12586 VDEC_PROVIDER_NAME_SIZE,
12587 VFM_DEC_PROVIDER_NAME);
12588 hevc->frameinfo_enable = 1;
12589 }
12590#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12591 else if (vdec_dual(pdata)) {
12592 struct hevc_state_s *hevc_pair = NULL;
12593
12594 if (dv_toggle_prov_name) /*debug purpose*/
12595 snprintf(pdata->vf_provider_name,
12596 VDEC_PROVIDER_NAME_SIZE,
12597 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12598 VFM_DEC_DVEL_PROVIDER_NAME);
12599 else
12600 snprintf(pdata->vf_provider_name,
12601 VDEC_PROVIDER_NAME_SIZE,
12602 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12603 VFM_DEC_DVBL_PROVIDER_NAME);
12604 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12605 if (pdata->master)
12606 hevc_pair = (struct hevc_state_s *)
12607 pdata->master->private;
12608 else if (pdata->slave)
12609 hevc_pair = (struct hevc_state_s *)
12610 pdata->slave->private;
12611 if (hevc_pair)
12612 hevc->shift_byte_count_lo =
12613 hevc_pair->shift_byte_count_lo;
12614 }
12615#endif
12616 else
12617 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12618 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12619
12620 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12621 &vh265_vf_provider, pdata);
12622
12623 hevc->provider_name = pdata->vf_provider_name;
12624 platform_set_drvdata(pdev, pdata);
12625
12626 hevc->platform_dev = pdev;
12627
12628 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12629 pdata->config && pdata->config_len) {
12630#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12631 /*use ptr config for doubel_write_mode, etc*/
12632 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12633
12634 if (get_config_int(pdata->config, "hevc_double_write_mode",
12635 &config_val) == 0)
12636 hevc->double_write_mode = config_val;
12637 else
12638 hevc->double_write_mode = double_write_mode;
12639
12640 if (get_config_int(pdata->config, "save_buffer_mode",
12641 &config_val) == 0)
12642 hevc->save_buffer_mode = config_val;
12643 else
12644 hevc->save_buffer_mode = 0;
12645
12646 /*use ptr config for max_pic_w, etc*/
12647 if (get_config_int(pdata->config, "hevc_buf_width",
12648 &config_val) == 0) {
12649 hevc->max_pic_w = config_val;
12650 }
12651 if (get_config_int(pdata->config, "hevc_buf_height",
12652 &config_val) == 0) {
12653 hevc->max_pic_h = config_val;
12654 }
12655
12656#endif
12657 } else {
12658 if (pdata->sys_info)
12659 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12660 else {
12661 hevc->vh265_amstream_dec_info.width = 0;
12662 hevc->vh265_amstream_dec_info.height = 0;
12663 hevc->vh265_amstream_dec_info.rate = 30;
12664 }
12665 hevc->double_write_mode = double_write_mode;
12666 }
12667 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12668 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12669 else
12670 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12671
12672 if (mmu_enable_force == 0) {
12673 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12674 hevc->mmu_enable = 0;
12675 else
12676 hevc->mmu_enable = 1;
12677 }
12678
12679 hevc->is_used_v4l = (((unsigned long)
12680 hevc->vh265_amstream_dec_info.param & 0x80) >> 7);
12681 if (hevc->is_used_v4l) {
12682 hevc->mmu_enable = (((unsigned long) // scatter mem
12683 hevc->vh265_amstream_dec_info.param & 0x100) >> 8);
12684 if (!hevc->mmu_enable)
12685 hevc->double_write_mode = 0x10;
12686
12687 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
12688 "%s v4: enable mmu %d.\n",
12689 __func__, hevc->mmu_enable);
12690 }
12691
12692 if (init_mmu_buffers(hevc) < 0) {
12693 hevc_print(hevc, 0,
12694 "\n 265 mmu init failed!\n");
12695 mutex_unlock(&vh265_mutex);
12696 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12697 if (hevc)
12698 vfree((void *)hevc);
12699 pdata->dec_status = NULL;
12700 return -EFAULT;
12701 }
12702#if 0
12703 hevc->buf_start = pdata->mem_start;
12704 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12705#else
12706
12707 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12708 BMMU_WORKSPACE_ID, work_buf_size,
12709 DRIVER_NAME, &hevc->buf_start);
12710 if (ret < 0) {
12711 uninit_mmu_buffers(hevc);
12712 /* devm_kfree(&pdev->dev, (void *)hevc); */
12713 if (hevc)
12714 vfree((void *)hevc);
12715 pdata->dec_status = NULL;
12716 mutex_unlock(&vh265_mutex);
12717 return ret;
12718 }
12719 hevc->buf_size = work_buf_size;
12720#endif
12721 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12722 (parser_sei_enable & 0x100) == 0)
12723 parser_sei_enable = 7;
12724 hevc->init_flag = 0;
12725 hevc->first_sc_checked = 0;
12726 hevc->uninit_list = 0;
12727 hevc->fatal_error = 0;
12728 hevc->show_frame_num = 0;
12729
12730 /*
12731 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12732 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12733 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12734 */
12735 if (get_dbg_flag(hevc)) {
12736 hevc_print(hevc, 0,
12737 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12738 hevc->buf_start, hevc->buf_size);
12739 }
12740
12741 hevc_print(hevc, 0,
12742 "dynamic_buf_num_margin=%d\n",
12743 hevc->dynamic_buf_num_margin);
12744 hevc_print(hevc, 0,
12745 "double_write_mode=%d\n",
12746 hevc->double_write_mode);
12747
12748 hevc->cma_dev = pdata->cma_dev;
12749
12750 if (vh265_init(pdata) < 0) {
12751 hevc_print(hevc, 0,
12752 "\namvdec_h265 init failed.\n");
12753 hevc_local_uninit(hevc);
12754 uninit_mmu_buffers(hevc);
12755 /* devm_kfree(&pdev->dev, (void *)hevc); */
12756 if (hevc)
12757 vfree((void *)hevc);
12758 pdata->dec_status = NULL;
12759 return -ENODEV;
12760 }
12761
12762 vdec_set_prepare_level(pdata, start_decode_buf_level);
12763
12764 /*set the max clk for smooth playing...*/
12765 hevc_source_changed(VFORMAT_HEVC,
12766 3840, 2160, 60);
12767 if (pdata->parallel_dec == 1)
12768 vdec_core_request(pdata, CORE_MASK_HEVC);
12769 else
12770 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
12771 | CORE_MASK_COMBINE);
12772
12773 return 0;
12774}
12775
12776static int ammvdec_h265_remove(struct platform_device *pdev)
12777{
12778 struct hevc_state_s *hevc =
12779 (struct hevc_state_s *)
12780 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
12781 struct vdec_s *vdec = hw_to_vdec(hevc);
12782
12783 if (hevc == NULL)
12784 return 0;
12785
12786 if (get_dbg_flag(hevc))
12787 hevc_print(hevc, 0, "%s\r\n", __func__);
12788
12789 vmh265_stop(hevc);
12790
12791 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
12792 if (vdec->parallel_dec == 1)
12793 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12794 else
12795 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12796
12797 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
12798
12799 vfree((void *)hevc);
12800 return 0;
12801}
12802
12803static struct platform_driver ammvdec_h265_driver = {
12804 .probe = ammvdec_h265_probe,
12805 .remove = ammvdec_h265_remove,
12806 .driver = {
12807 .name = MULTI_DRIVER_NAME,
12808#ifdef CONFIG_PM
12809 .pm = &h265_pm_ops,
12810#endif
12811 }
12812};
12813#endif
12814
12815static struct codec_profile_t amvdec_h265_profile = {
12816 .name = "hevc",
12817 .profile = ""
12818};
12819
12820static struct codec_profile_t amvdec_h265_profile_single,
12821 amvdec_h265_profile_mult;
12822
12823static struct mconfig h265_configs[] = {
12824 MC_PU32("use_cma", &use_cma),
12825 MC_PU32("bit_depth_luma", &bit_depth_luma),
12826 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
12827 MC_PU32("video_signal_type", &video_signal_type),
12828#ifdef ERROR_HANDLE_DEBUG
12829 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
12830 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
12831#endif
12832 MC_PU32("radr", &radr),
12833 MC_PU32("rval", &rval),
12834 MC_PU32("dbg_cmd", &dbg_cmd),
12835 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
12836 MC_PU32("endian", &endian),
12837 MC_PU32("step", &step),
12838 MC_PU32("udebug_flag", &udebug_flag),
12839 MC_PU32("decode_pic_begin", &decode_pic_begin),
12840 MC_PU32("slice_parse_begin", &slice_parse_begin),
12841 MC_PU32("nal_skip_policy", &nal_skip_policy),
12842 MC_PU32("i_only_flag", &i_only_flag),
12843 MC_PU32("error_handle_policy", &error_handle_policy),
12844 MC_PU32("error_handle_threshold", &error_handle_threshold),
12845 MC_PU32("error_handle_nal_skip_threshold",
12846 &error_handle_nal_skip_threshold),
12847 MC_PU32("error_handle_system_threshold",
12848 &error_handle_system_threshold),
12849 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
12850 MC_PU32("debug", &debug),
12851 MC_PU32("debug_mask", &debug_mask),
12852 MC_PU32("buffer_mode", &buffer_mode),
12853 MC_PU32("double_write_mode", &double_write_mode),
12854 MC_PU32("buf_alloc_width", &buf_alloc_width),
12855 MC_PU32("buf_alloc_height", &buf_alloc_height),
12856 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
12857 MC_PU32("max_buf_num", &max_buf_num),
12858 MC_PU32("buf_alloc_size", &buf_alloc_size),
12859 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
12860 MC_PU32("mem_map_mode", &mem_map_mode),
12861 MC_PU32("enable_mem_saving", &enable_mem_saving),
12862 MC_PU32("force_w_h", &force_w_h),
12863 MC_PU32("force_fps", &force_fps),
12864 MC_PU32("max_decoding_time", &max_decoding_time),
12865 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
12866 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
12867 MC_PU32("interlace_enable", &interlace_enable),
12868 MC_PU32("pts_unstable", &pts_unstable),
12869 MC_PU32("parser_sei_enable", &parser_sei_enable),
12870 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
12871 MC_PU32("decode_timeout_val", &decode_timeout_val),
12872#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12873 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
12874 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
12875 MC_PU32("dv_debug", &dv_debug),
12876#endif
12877};
12878static struct mconfig_node decoder_265_node;
12879
12880static int __init amvdec_h265_driver_init_module(void)
12881{
12882 struct BuffInfo_s *p_buf_info;
12883
12884 if (vdec_is_support_4k()) {
12885 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
12886 p_buf_info = &amvh265_workbuff_spec[2];
12887 else
12888 p_buf_info = &amvh265_workbuff_spec[1];
12889 } else
12890 p_buf_info = &amvh265_workbuff_spec[0];
12891
12892 init_buff_spec(NULL, p_buf_info);
12893 work_buf_size =
12894 (p_buf_info->end_adr - p_buf_info->start_adr
12895 + 0xffff) & (~0xffff);
12896
12897 pr_debug("amvdec_h265 module init\n");
12898 error_handle_policy = 0;
12899
12900#ifdef ERROR_HANDLE_DEBUG
12901 dbg_nal_skip_flag = 0;
12902 dbg_nal_skip_count = 0;
12903#endif
12904 udebug_flag = 0;
12905 decode_pic_begin = 0;
12906 slice_parse_begin = 0;
12907 step = 0;
12908 buf_alloc_size = 0;
12909
12910#ifdef MULTI_INSTANCE_SUPPORT
12911 if (platform_driver_register(&ammvdec_h265_driver))
12912 pr_err("failed to register ammvdec_h265 driver\n");
12913
12914#endif
12915 if (platform_driver_register(&amvdec_h265_driver)) {
12916 pr_err("failed to register amvdec_h265 driver\n");
12917 return -ENODEV;
12918 }
12919#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
12920 if (!has_hevc_vdec()) {
12921 /* not support hevc */
12922 amvdec_h265_profile.name = "hevc_unsupport";
12923 }
12924 if (vdec_is_support_4k()) {
12925 if (is_meson_m8m2_cpu()) {
12926 /* m8m2 support 4k */
12927 amvdec_h265_profile.profile = "4k";
12928 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
12929 amvdec_h265_profile.profile =
12930 "8k, 8bit, 10bit, dwrite, compressed";
12931 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
12932 amvdec_h265_profile.profile =
12933 "4k, 8bit, 10bit, dwrite, compressed";
12934 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
12935 amvdec_h265_profile.profile = "4k";
12936 }
12937#endif
12938 if (codec_mm_get_total_size() < 80 * SZ_1M) {
12939 pr_info("amvdec_h265 default mmu enabled.\n");
12940 mmu_enable = 1;
12941 }
12942
12943 vcodec_profile_register(&amvdec_h265_profile);
12944 amvdec_h265_profile_single = amvdec_h265_profile;
12945 amvdec_h265_profile_single.name = "h265";
12946 vcodec_profile_register(&amvdec_h265_profile_single);
12947 amvdec_h265_profile_mult = amvdec_h265_profile;
12948 amvdec_h265_profile_mult.name = "mh265";
12949 vcodec_profile_register(&amvdec_h265_profile_mult);
12950 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
12951 "h265", h265_configs, CONFIG_FOR_RW);
12952 return 0;
12953}
12954
12955static void __exit amvdec_h265_driver_remove_module(void)
12956{
12957 pr_debug("amvdec_h265 module remove.\n");
12958
12959#ifdef MULTI_INSTANCE_SUPPORT
12960 platform_driver_unregister(&ammvdec_h265_driver);
12961#endif
12962 platform_driver_unregister(&amvdec_h265_driver);
12963}
12964
12965/****************************************/
12966/*
12967 *module_param(stat, uint, 0664);
12968 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
12969 */
12970module_param(use_cma, uint, 0664);
12971MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
12972
12973module_param(bit_depth_luma, uint, 0664);
12974MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
12975
12976module_param(bit_depth_chroma, uint, 0664);
12977MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
12978
12979module_param(video_signal_type, uint, 0664);
12980MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
12981
12982#ifdef ERROR_HANDLE_DEBUG
12983module_param(dbg_nal_skip_flag, uint, 0664);
12984MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
12985
12986module_param(dbg_nal_skip_count, uint, 0664);
12987MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
12988#endif
12989
12990module_param(radr, uint, 0664);
12991MODULE_PARM_DESC(radr, "\n radr\n");
12992
12993module_param(rval, uint, 0664);
12994MODULE_PARM_DESC(rval, "\n rval\n");
12995
12996module_param(dbg_cmd, uint, 0664);
12997MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
12998
12999module_param(dump_nal, uint, 0664);
13000MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13001
13002module_param(dbg_skip_decode_index, uint, 0664);
13003MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13004
13005module_param(endian, uint, 0664);
13006MODULE_PARM_DESC(endian, "\n rval\n");
13007
13008module_param(step, uint, 0664);
13009MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13010
13011module_param(decode_pic_begin, uint, 0664);
13012MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13013
13014module_param(slice_parse_begin, uint, 0664);
13015MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13016
13017module_param(nal_skip_policy, uint, 0664);
13018MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13019
13020module_param(i_only_flag, uint, 0664);
13021MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13022
13023module_param(fast_output_enable, uint, 0664);
13024MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13025
13026module_param(error_handle_policy, uint, 0664);
13027MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13028
13029module_param(error_handle_threshold, uint, 0664);
13030MODULE_PARM_DESC(error_handle_threshold,
13031 "\n amvdec_h265 error_handle_threshold\n");
13032
13033module_param(error_handle_nal_skip_threshold, uint, 0664);
13034MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13035 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13036
13037module_param(error_handle_system_threshold, uint, 0664);
13038MODULE_PARM_DESC(error_handle_system_threshold,
13039 "\n amvdec_h265 error_handle_system_threshold\n");
13040
13041module_param(error_skip_nal_count, uint, 0664);
13042MODULE_PARM_DESC(error_skip_nal_count,
13043 "\n amvdec_h265 error_skip_nal_count\n");
13044
13045module_param(debug, uint, 0664);
13046MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13047
13048module_param(debug_mask, uint, 0664);
13049MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13050
13051module_param(log_mask, uint, 0664);
13052MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13053
13054module_param(buffer_mode, uint, 0664);
13055MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13056
13057module_param(double_write_mode, uint, 0664);
13058MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13059
13060module_param(buf_alloc_width, uint, 0664);
13061MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13062
13063module_param(buf_alloc_height, uint, 0664);
13064MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13065
13066module_param(dynamic_buf_num_margin, uint, 0664);
13067MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13068
13069module_param(max_buf_num, uint, 0664);
13070MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13071
13072module_param(buf_alloc_size, uint, 0664);
13073MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13074
13075#ifdef CONSTRAIN_MAX_BUF_NUM
13076module_param(run_ready_max_vf_only_num, uint, 0664);
13077MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13078
13079module_param(run_ready_display_q_num, uint, 0664);
13080MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13081
13082module_param(run_ready_max_buf_num, uint, 0664);
13083MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13084#endif
13085
13086#if 0
13087module_param(re_config_pic_flag, uint, 0664);
13088MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13089#endif
13090
13091module_param(buffer_mode_dbg, uint, 0664);
13092MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13093
13094module_param(mem_map_mode, uint, 0664);
13095MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13096
13097module_param(enable_mem_saving, uint, 0664);
13098MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13099
13100module_param(force_w_h, uint, 0664);
13101MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13102
13103module_param(force_fps, uint, 0664);
13104MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13105
13106module_param(max_decoding_time, uint, 0664);
13107MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13108
13109module_param(prefix_aux_buf_size, uint, 0664);
13110MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13111
13112module_param(suffix_aux_buf_size, uint, 0664);
13113MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13114
13115module_param(interlace_enable, uint, 0664);
13116MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13117module_param(pts_unstable, uint, 0664);
13118MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13119module_param(parser_sei_enable, uint, 0664);
13120MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13121
13122#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13123module_param(parser_dolby_vision_enable, uint, 0664);
13124MODULE_PARM_DESC(parser_dolby_vision_enable,
13125 "\n parser_dolby_vision_enable\n");
13126
13127module_param(dolby_meta_with_el, uint, 0664);
13128MODULE_PARM_DESC(dolby_meta_with_el,
13129 "\n dolby_meta_with_el\n");
13130
13131module_param(dolby_el_flush_th, uint, 0664);
13132MODULE_PARM_DESC(dolby_el_flush_th,
13133 "\n dolby_el_flush_th\n");
13134#endif
13135module_param(mmu_enable, uint, 0664);
13136MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13137
13138module_param(mmu_enable_force, uint, 0664);
13139MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13140
13141#ifdef MULTI_INSTANCE_SUPPORT
13142module_param(start_decode_buf_level, int, 0664);
13143MODULE_PARM_DESC(start_decode_buf_level,
13144 "\n h265 start_decode_buf_level\n");
13145
13146module_param(decode_timeout_val, uint, 0664);
13147MODULE_PARM_DESC(decode_timeout_val,
13148 "\n h265 decode_timeout_val\n");
13149
13150module_param(data_resend_policy, uint, 0664);
13151MODULE_PARM_DESC(data_resend_policy,
13152 "\n h265 data_resend_policy\n");
13153
13154module_param_array(decode_frame_count, uint,
13155 &max_decode_instance_num, 0664);
13156
13157module_param_array(display_frame_count, uint,
13158 &max_decode_instance_num, 0664);
13159
13160module_param_array(max_process_time, uint,
13161 &max_decode_instance_num, 0664);
13162
13163module_param_array(max_get_frame_interval,
13164 uint, &max_decode_instance_num, 0664);
13165
13166module_param_array(run_count, uint,
13167 &max_decode_instance_num, 0664);
13168
13169module_param_array(input_empty, uint,
13170 &max_decode_instance_num, 0664);
13171
13172module_param_array(not_run_ready, uint,
13173 &max_decode_instance_num, 0664);
13174
13175module_param_array(ref_frame_mark_flag, uint,
13176 &max_decode_instance_num, 0664);
13177
13178#endif
13179#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13180module_param(dv_toggle_prov_name, uint, 0664);
13181MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13182
13183module_param(dv_debug, uint, 0664);
13184MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13185
13186module_param(force_bypass_dvenl, uint, 0664);
13187MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13188#endif
13189
13190#ifdef AGAIN_HAS_THRESHOLD
13191module_param(again_threshold, uint, 0664);
13192MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13193#endif
13194
13195module_param(force_disp_pic_index, int, 0664);
13196MODULE_PARM_DESC(force_disp_pic_index,
13197 "\n amvdec_h265 force_disp_pic_index\n");
13198
13199module_param(frmbase_cont_bitlevel, uint, 0664);
13200MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13201
13202module_param(udebug_flag, uint, 0664);
13203MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13204
13205module_param(udebug_pause_pos, uint, 0664);
13206MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13207
13208module_param(udebug_pause_val, uint, 0664);
13209MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13210
13211module_param(pre_decode_buf_level, int, 0664);
13212MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13213
13214module_param(udebug_pause_decode_idx, uint, 0664);
13215MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13216
13217module_param(disp_vframe_valve_level, uint, 0664);
13218MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13219
13220module_param(pic_list_debug, uint, 0664);
13221MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13222
13223module_param(without_display_mode, uint, 0664);
13224MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13225
13226module_init(amvdec_h265_driver_init_module);
13227module_exit(amvdec_h265_driver_remove_module);
13228
13229MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13230MODULE_LICENSE("GPL");
13231MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13232