summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 37e6969ff58a8e99870d526f9ec07fc31586c4d1
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131
132static struct semaphore h265_sema;
133
134struct hevc_state_s;
135static int hevc_print(struct hevc_state_s *hevc,
136 int debug_flag, const char *fmt, ...);
137static int hevc_print_cont(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int vh265_vf_states(struct vframe_states *states, void *);
140static struct vframe_s *vh265_vf_peek(void *);
141static struct vframe_s *vh265_vf_get(void *);
142static void vh265_vf_put(struct vframe_s *, void *);
143static int vh265_event_cb(int type, void *data, void *private_data);
144
145static int vh265_stop(struct hevc_state_s *hevc);
146#ifdef MULTI_INSTANCE_SUPPORT
147static int vmh265_stop(struct hevc_state_s *hevc);
148static s32 vh265_init(struct vdec_s *vdec);
149static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
150static void reset_process_time(struct hevc_state_s *hevc);
151static void start_process_time(struct hevc_state_s *hevc);
152static void restart_process_time(struct hevc_state_s *hevc);
153static void timeout_process(struct hevc_state_s *hevc);
154#else
155static s32 vh265_init(struct hevc_state_s *hevc);
156#endif
157static void vh265_prot_init(struct hevc_state_s *hevc);
158static int vh265_local_init(struct hevc_state_s *hevc);
159static void vh265_check_timer_func(unsigned long arg);
160static void config_decode_mode(struct hevc_state_s *hevc);
161
162static const char vh265_dec_id[] = "vh265-dev";
163
164#define PROVIDER_NAME "decoder.h265"
165#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
166
167static const struct vframe_operations_s vh265_vf_provider = {
168 .peek = vh265_vf_peek,
169 .get = vh265_vf_get,
170 .put = vh265_vf_put,
171 .event_cb = vh265_event_cb,
172 .vf_states = vh265_vf_states,
173};
174
175static struct vframe_provider_s vh265_vf_prov;
176
177static u32 bit_depth_luma;
178static u32 bit_depth_chroma;
179static u32 video_signal_type;
180
181static int start_decode_buf_level = 0x8000;
182
183static unsigned int decode_timeout_val = 200;
184
185/*data_resend_policy:
186 bit 0, stream base resend data when decoding buf empty
187*/
188static u32 data_resend_policy = 1;
189
190#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
191/*
192static const char * const video_format_names[] = {
193 "component", "PAL", "NTSC", "SECAM",
194 "MAC", "unspecified", "unspecified", "unspecified"
195};
196
197static const char * const color_primaries_names[] = {
198 "unknown", "bt709", "undef", "unknown",
199 "bt470m", "bt470bg", "smpte170m", "smpte240m",
200 "film", "bt2020"
201};
202
203static const char * const transfer_characteristics_names[] = {
204 "unknown", "bt709", "undef", "unknown",
205 "bt470m", "bt470bg", "smpte170m", "smpte240m",
206 "linear", "log100", "log316", "iec61966-2-4",
207 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
208 "smpte-st-2084", "smpte-st-428"
209};
210
211static const char * const matrix_coeffs_names[] = {
212 "GBR", "bt709", "undef", "unknown",
213 "fcc", "bt470bg", "smpte170m", "smpte240m",
214 "YCgCo", "bt2020nc", "bt2020c"
215};
216*/
217#ifdef SUPPORT_10BIT
218#define HEVC_CM_BODY_START_ADDR 0x3626
219#define HEVC_CM_BODY_LENGTH 0x3627
220#define HEVC_CM_HEADER_LENGTH 0x3629
221#define HEVC_CM_HEADER_OFFSET 0x362b
222#define HEVC_SAO_CTRL9 0x362d
223#define LOSLESS_COMPRESS_MODE
224/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
225/* double_write_mode:
226 * 0, no double write;
227 * 1, 1:1 ratio;
228 * 2, (1/4):(1/4) ratio;
229 * 3, (1/4):(1/4) ratio, with both compressed frame included
230 * 4, (1/2):(1/2) ratio;
231 * 0x10, double write only
232 * 0x100, if > 1080p,use mode 4,else use mode 1;
233 * 0x200, if > 1080p,use mode 2,else use mode 1;
234 * 0x300, if > 720p, use mode 4, else use mode 1;
235 */
236static u32 double_write_mode;
237
238/*#define DECOMP_HEADR_SURGENT*/
239
240static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
241static u32 enable_mem_saving = 1;
242static u32 workaround_enable;
243static u32 force_w_h;
244#endif
245static u32 force_fps;
246static u32 pts_unstable;
247#define H265_DEBUG_BUFMGR 0x01
248#define H265_DEBUG_BUFMGR_MORE 0x02
249#define H265_DEBUG_DETAIL 0x04
250#define H265_DEBUG_REG 0x08
251#define H265_DEBUG_MAN_SEARCH_NAL 0x10
252#define H265_DEBUG_MAN_SKIP_NAL 0x20
253#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
254#define H265_DEBUG_FORCE_CLK 0x80
255#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
256#define H265_DEBUG_NO_DISPLAY 0x200
257#define H265_DEBUG_DISCARD_NAL 0x400
258#define H265_DEBUG_OUT_PTS 0x800
259#define H265_DEBUG_DUMP_PIC_LIST 0x1000
260#define H265_DEBUG_PRINT_SEI 0x2000
261#define H265_DEBUG_PIC_STRUCT 0x4000
262#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
263#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
264#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
265#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
266#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
267#define H265_DEBUG_HW_RESET 0x100000
268#define H265_CFG_CANVAS_IN_DECODE 0x200000
269#define H265_DEBUG_DV 0x400000
270#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
271#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
272#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
273#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
274#ifdef MULTI_INSTANCE_SUPPORT
275#define PRINT_FLAG_ERROR 0x0
276#define IGNORE_PARAM_FROM_CONFIG 0x08000000
277#define PRINT_FRAMEBASE_DATA 0x10000000
278#define PRINT_FLAG_VDEC_STATUS 0x20000000
279#define PRINT_FLAG_VDEC_DETAIL 0x40000000
280#define PRINT_FLAG_V4L_DETAIL 0x80000000
281#endif
282
283#define BUF_POOL_SIZE 32
284#define MAX_BUF_NUM 24
285#define MAX_REF_PIC_NUM 24
286#define MAX_REF_ACTIVE 16
287
288#ifdef MV_USE_FIXED_BUF
289#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
290#define VF_BUFFER_IDX(n) (n)
291#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
292#else
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
297#endif
298
299#define HEVC_MV_INFO 0x310d
300#define HEVC_QP_INFO 0x3137
301#define HEVC_SKIP_INFO 0x3136
302
303const u32 h265_version = 201602101;
304static u32 debug_mask = 0xffffffff;
305static u32 log_mask;
306static u32 debug;
307static u32 radr;
308static u32 rval;
309static u32 dbg_cmd;
310static u32 dump_nal;
311static u32 dbg_skip_decode_index;
312static u32 endian = 0xff0;
313#ifdef ERROR_HANDLE_DEBUG
314static u32 dbg_nal_skip_flag;
315 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
316static u32 dbg_nal_skip_count;
317#endif
318/*for debug*/
319/*
320 udebug_flag:
321 bit 0, enable ucode print
322 bit 1, enable ucode detail print
323 bit [31:16] not 0, pos to dump lmem
324 bit 2, pop bits to lmem
325 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
326*/
327static u32 udebug_flag;
328/*
329 when udebug_flag[1:0] is not 0
330 udebug_pause_pos not 0,
331 pause position
332*/
333static u32 udebug_pause_pos;
334/*
335 when udebug_flag[1:0] is not 0
336 and udebug_pause_pos is not 0,
337 pause only when DEBUG_REG2 is equal to this val
338*/
339static u32 udebug_pause_val;
340
341static u32 udebug_pause_decode_idx;
342
343static u32 decode_pic_begin;
344static uint slice_parse_begin;
345static u32 step;
346static bool is_reset;
347
348#ifdef CONSTRAIN_MAX_BUF_NUM
349static u32 run_ready_max_vf_only_num;
350static u32 run_ready_display_q_num;
351 /*0: not check
352 0xff: work_pic_num
353 */
354static u32 run_ready_max_buf_num = 0xff;
355#endif
356
357static u32 dynamic_buf_num_margin = 7;
358static u32 buf_alloc_width;
359static u32 buf_alloc_height;
360
361static u32 max_buf_num = 16;
362static u32 buf_alloc_size;
363/*static u32 re_config_pic_flag;*/
364/*
365 *bit[0]: 0,
366 *bit[1]: 0, always release cma buffer when stop
367 *bit[1]: 1, never release cma buffer when stop
368 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
369 *do not release cma buffer is blackout is not 1
370 *
371 *bit[2]: 0, when start decoding, check current displayed buffer
372 * (only for buffer decoded by h265) if blackout is 0
373 * 1, do not check current displayed buffer
374 *
375 *bit[3]: 1, if blackout is not 1, do not release current
376 * displayed cma buffer always.
377 */
378/* set to 1 for fast play;
379 * set to 8 for other case of "keep last frame"
380 */
381static u32 buffer_mode = 1;
382
383/* buffer_mode_dbg: debug only*/
384static u32 buffer_mode_dbg = 0xffff0000;
385/**/
386/*
387 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
388 *1, start decoding after first I;
389 *2, only decode and display none error picture;
390 *3, start decoding and display after IDR,etc
391 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
392 *only for mode 0 and 1.
393 */
394static u32 nal_skip_policy = 2;
395
396/*
397 *bit 0, 1: only display I picture;
398 *bit 1, 1: only decode I picture;
399 */
400static u32 i_only_flag;
401
402/*
403bit 0, fast output first I picture
404*/
405static u32 fast_output_enable = 1;
406
407static u32 frmbase_cont_bitlevel = 0x60;
408
409/*
410use_cma: 1, use both reserver memory and cma for buffers
4112, only use cma for buffers
412*/
413static u32 use_cma = 2;
414
415#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
416static u32 prefix_aux_buf_size = (16 * 1024);
417static u32 suffix_aux_buf_size;
418
419static u32 max_decoding_time;
420/*
421 *error handling
422 */
423/*error_handle_policy:
424 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
425 *1, skip error_skip_nal_count nals before error recovery;
426 *bit 1 (valid only when bit0 == 1):
427 *1, wait vps/sps/pps after error recovery;
428 *bit 2 (valid only when bit0 == 0):
429 *0, auto search after error recovery (hevc_recover() called);
430 *1, manual search after error recovery
431 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
432 *
433 *bit 4: 0, set error_mark after reset/recover
434 * 1, do not set error_mark after reset/recover
435 *bit 5: 0, check total lcu for every picture
436 * 1, do not check total lcu
437 *bit 6: 0, do not check head error
438 * 1, check head error
439 *
440 */
441
442static u32 error_handle_policy;
443static u32 error_skip_nal_count = 6;
444static u32 error_handle_threshold = 30;
445static u32 error_handle_nal_skip_threshold = 10;
446static u32 error_handle_system_threshold = 30;
447static u32 interlace_enable = 1;
448static u32 fr_hint_status;
449
450 /*
451 *parser_sei_enable:
452 * bit 0, sei;
453 * bit 1, sei_suffix (fill aux buf)
454 * bit 2, fill sei to aux buf (when bit 0 is 1)
455 * bit 8, debug flag
456 */
457static u32 parser_sei_enable;
458#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
459static u32 parser_dolby_vision_enable = 1;
460static u32 dolby_meta_with_el;
461static u32 dolby_el_flush_th = 2;
462#endif
463/* this is only for h265 mmu enable */
464
465static u32 mmu_enable = 1;
466static u32 mmu_enable_force;
467static u32 work_buf_size;
468static unsigned int force_disp_pic_index;
469static unsigned int disp_vframe_valve_level;
470static int pre_decode_buf_level = 0x1000;
471static unsigned int pic_list_debug;
472
473
474#ifdef MULTI_INSTANCE_SUPPORT
475static unsigned int max_decode_instance_num
476 = MAX_DECODE_INSTANCE_NUM;
477static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
478static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
481static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
483static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
484static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
485{1, 1, 1, 1, 1, 1, 1, 1, 1};
486
487#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
488static unsigned char get_idx(struct hevc_state_s *hevc);
489#endif
490
491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
492static u32 dv_toggle_prov_name;
493
494static u32 dv_debug;
495
496static u32 force_bypass_dvenl;
497#endif
498#endif
499
500
501#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
502#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
503#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
504#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
505#else
506#define get_dbg_flag(hevc) debug
507#define get_dbg_flag2(hevc) debug
508#define is_log_enable(hevc) (log_mask ? 1 : 0)
509#define get_valid_double_write_mode(hevc) double_write_mode
510#define get_buf_alloc_width(hevc) buf_alloc_width
511#define get_buf_alloc_height(hevc) buf_alloc_height
512#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
513#endif
514#define get_buffer_mode(hevc) buffer_mode
515
516
517DEFINE_SPINLOCK(lock);
518struct task_struct *h265_task = NULL;
519#undef DEBUG_REG
520#ifdef DEBUG_REG
521void WRITE_VREG_DBG(unsigned adr, unsigned val)
522{
523 if (debug & H265_DEBUG_REG)
524 pr_info("%s(%x, %x)\n", __func__, adr, val);
525 WRITE_VREG(adr, val);
526}
527
528#undef WRITE_VREG
529#define WRITE_VREG WRITE_VREG_DBG
530#endif
531
532static DEFINE_MUTEX(vh265_mutex);
533
534static DEFINE_MUTEX(vh265_log_mutex);
535
536static struct vdec_info *gvs;
537
538static u32 without_display_mode;
539
540/**************************************************
541 *
542 *h265 buffer management include
543 *
544 ***************************************************
545 */
546enum NalUnitType {
547 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
548 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
549
550 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
551 /* Current name in the spec: TSA_R */
552 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
553
554 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
555 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
556
557 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
558 /* Current name in the spec: RADL_R */
559 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
560
561 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
562 /* Current name in the spec: RASL_R */
563 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
564
565 NAL_UNIT_RESERVED_10,
566 NAL_UNIT_RESERVED_11,
567 NAL_UNIT_RESERVED_12,
568 NAL_UNIT_RESERVED_13,
569 NAL_UNIT_RESERVED_14,
570 NAL_UNIT_RESERVED_15,
571
572 /* Current name in the spec: BLA_W_LP */
573 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
574 /* Current name in the spec: BLA_W_DLP */
575 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
576 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
577 /* Current name in the spec: IDR_W_DLP */
578 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
579 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
580 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
581 NAL_UNIT_RESERVED_22,
582 NAL_UNIT_RESERVED_23,
583
584 NAL_UNIT_RESERVED_24,
585 NAL_UNIT_RESERVED_25,
586 NAL_UNIT_RESERVED_26,
587 NAL_UNIT_RESERVED_27,
588 NAL_UNIT_RESERVED_28,
589 NAL_UNIT_RESERVED_29,
590 NAL_UNIT_RESERVED_30,
591 NAL_UNIT_RESERVED_31,
592
593 NAL_UNIT_VPS, /* 32 */
594 NAL_UNIT_SPS, /* 33 */
595 NAL_UNIT_PPS, /* 34 */
596 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
597 NAL_UNIT_EOS, /* 36 */
598 NAL_UNIT_EOB, /* 37 */
599 NAL_UNIT_FILLER_DATA, /* 38 */
600 NAL_UNIT_SEI, /* 39 Prefix SEI */
601 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
602 NAL_UNIT_RESERVED_41,
603 NAL_UNIT_RESERVED_42,
604 NAL_UNIT_RESERVED_43,
605 NAL_UNIT_RESERVED_44,
606 NAL_UNIT_RESERVED_45,
607 NAL_UNIT_RESERVED_46,
608 NAL_UNIT_RESERVED_47,
609 NAL_UNIT_UNSPECIFIED_48,
610 NAL_UNIT_UNSPECIFIED_49,
611 NAL_UNIT_UNSPECIFIED_50,
612 NAL_UNIT_UNSPECIFIED_51,
613 NAL_UNIT_UNSPECIFIED_52,
614 NAL_UNIT_UNSPECIFIED_53,
615 NAL_UNIT_UNSPECIFIED_54,
616 NAL_UNIT_UNSPECIFIED_55,
617 NAL_UNIT_UNSPECIFIED_56,
618 NAL_UNIT_UNSPECIFIED_57,
619 NAL_UNIT_UNSPECIFIED_58,
620 NAL_UNIT_UNSPECIFIED_59,
621 NAL_UNIT_UNSPECIFIED_60,
622 NAL_UNIT_UNSPECIFIED_61,
623 NAL_UNIT_UNSPECIFIED_62,
624 NAL_UNIT_UNSPECIFIED_63,
625 NAL_UNIT_INVALID,
626};
627
628/* --------------------------------------------------- */
629/* Amrisc Software Interrupt */
630/* --------------------------------------------------- */
631#define AMRISC_STREAM_EMPTY_REQ 0x01
632#define AMRISC_PARSER_REQ 0x02
633#define AMRISC_MAIN_REQ 0x04
634
635/* --------------------------------------------------- */
636/* HEVC_DEC_STATUS define */
637/* --------------------------------------------------- */
638#define HEVC_DEC_IDLE 0x0
639#define HEVC_NAL_UNIT_VPS 0x1
640#define HEVC_NAL_UNIT_SPS 0x2
641#define HEVC_NAL_UNIT_PPS 0x3
642#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
643#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
644#define HEVC_SLICE_DECODING 0x6
645#define HEVC_NAL_UNIT_SEI 0x7
646#define HEVC_SLICE_SEGMENT_DONE 0x8
647#define HEVC_NAL_SEARCH_DONE 0x9
648#define HEVC_DECPIC_DATA_DONE 0xa
649#define HEVC_DECPIC_DATA_ERROR 0xb
650#define HEVC_SEI_DAT 0xc
651#define HEVC_SEI_DAT_DONE 0xd
652#define HEVC_NAL_DECODE_DONE 0xe
653#define HEVC_OVER_DECODE 0xf
654
655#define HEVC_DATA_REQUEST 0x12
656
657#define HEVC_DECODE_BUFEMPTY 0x20
658#define HEVC_DECODE_TIMEOUT 0x21
659#define HEVC_SEARCH_BUFEMPTY 0x22
660#define HEVC_DECODE_OVER_SIZE 0x23
661#define HEVC_DECODE_BUFEMPTY2 0x24
662#define HEVC_FIND_NEXT_PIC_NAL 0x50
663#define HEVC_FIND_NEXT_DVEL_NAL 0x51
664
665#define HEVC_DUMP_LMEM 0x30
666
667#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
668#define HEVC_DISCARD_NAL 0xf0
669#define HEVC_ACTION_DEC_CONT 0xfd
670#define HEVC_ACTION_ERROR 0xfe
671#define HEVC_ACTION_DONE 0xff
672
673/* --------------------------------------------------- */
674/* Include "parser_cmd.h" */
675/* --------------------------------------------------- */
676#define PARSER_CMD_SKIP_CFG_0 0x0000090b
677
678#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
679
680#define PARSER_CMD_SKIP_CFG_2 0x001b1910
681
682#define PARSER_CMD_NUMBER 37
683
684/**************************************************
685 *
686 *h265 buffer management
687 *
688 ***************************************************
689 */
690/* #define BUFFER_MGR_ONLY */
691/* #define CONFIG_HEVC_CLK_FORCED_ON */
692/* #define ENABLE_SWAP_TEST */
693#define MCRCC_ENABLE
694#define INVALID_POC 0x80000000
695
696#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
697#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
698#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
699#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
700#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
701#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
702#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
703#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
704#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
705#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
706#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
707#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
708#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
709#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
710#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
711#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
712#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
713#ifdef ENABLE_SWAP_TEST
714#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
715#endif
716
717/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
718/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
719#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
720 /*do not define ENABLE_SWAP_TEST*/
721#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
722#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
723
724#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
725#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
726/*
727 *ucode parser/search control
728 *bit 0: 0, header auto parse; 1, header manual parse
729 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
730 *bit [3:2]: valid when bit1==0;
731 *0, auto skip nal before first vps/sps/pps/idr;
732 *1, auto skip nal before first vps/sps/pps
733 *2, auto skip nal before first vps/sps/pps,
734 * and not decode until the first I slice (with slice address of 0)
735 *
736 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
737 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
738 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
739 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
740 *bit [17]: for NAL_SEI when bit0 is 0:
741 * 0, do not parse/fetch SEI in ucode;
742 * 1, parse/fetch SEI in ucode
743 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
744 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
745 * 1, fetch NAL_SEL_SUFFIX data to aux buf
746 *bit [19]:
747 * 0, parse NAL_SEI in ucode
748 * 1, fetch NAL_SEI to aux buf
749 *bit [20]: for DOLBY_VISION_META
750 * 0, do not fetch DOLBY_VISION_META to aux buf
751 * 1, fetch DOLBY_VISION_META to aux buf
752 */
753#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
754 /*read only*/
755#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
756 /*
757 [15 : 8] rps_set_id
758 [7 : 0] start_decoding_flag
759 */
760#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
761 /*set before start decoder*/
762#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
763#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
764#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
765
766#define DECODE_MODE_SINGLE 0x0
767#define DECODE_MODE_MULTI_FRAMEBASE 0x1
768#define DECODE_MODE_MULTI_STREAMBASE 0x2
769#define DECODE_MODE_MULTI_DVBAL 0x3
770#define DECODE_MODE_MULTI_DVENL 0x4
771
772#define MAX_INT 0x7FFFFFFF
773
774#define RPM_BEGIN 0x100
775#define modification_list_cur 0x148
776#define RPM_END 0x180
777
778#define RPS_USED_BIT 14
779/* MISC_FLAG0 */
780#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
781#define PCM_ENABLE_FLAG_BIT 1
782#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
783#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
784#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
785#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
786#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
787#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
788#define SLICE_SAO_LUMA_FLAG_BIT 8
789#define SLICE_SAO_CHROMA_FLAG_BIT 9
790#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
791
792union param_u {
793 struct {
794 unsigned short data[RPM_END - RPM_BEGIN];
795 } l;
796 struct {
797 /* from ucode lmem, do not change this struct */
798 unsigned short CUR_RPS[0x10];
799 unsigned short num_ref_idx_l0_active;
800 unsigned short num_ref_idx_l1_active;
801 unsigned short slice_type;
802 unsigned short slice_temporal_mvp_enable_flag;
803 unsigned short dependent_slice_segment_flag;
804 unsigned short slice_segment_address;
805 unsigned short num_title_rows_minus1;
806 unsigned short pic_width_in_luma_samples;
807 unsigned short pic_height_in_luma_samples;
808 unsigned short log2_min_coding_block_size_minus3;
809 unsigned short log2_diff_max_min_coding_block_size;
810 unsigned short log2_max_pic_order_cnt_lsb_minus4;
811 unsigned short POClsb;
812 unsigned short collocated_from_l0_flag;
813 unsigned short collocated_ref_idx;
814 unsigned short log2_parallel_merge_level;
815 unsigned short five_minus_max_num_merge_cand;
816 unsigned short sps_num_reorder_pics_0;
817 unsigned short modification_flag;
818 unsigned short tiles_enabled_flag;
819 unsigned short num_tile_columns_minus1;
820 unsigned short num_tile_rows_minus1;
821 unsigned short tile_width[8];
822 unsigned short tile_height[8];
823 unsigned short misc_flag0;
824 unsigned short pps_beta_offset_div2;
825 unsigned short pps_tc_offset_div2;
826 unsigned short slice_beta_offset_div2;
827 unsigned short slice_tc_offset_div2;
828 unsigned short pps_cb_qp_offset;
829 unsigned short pps_cr_qp_offset;
830 unsigned short first_slice_segment_in_pic_flag;
831 unsigned short m_temporalId;
832 unsigned short m_nalUnitType;
833
834 unsigned short vui_num_units_in_tick_hi;
835 unsigned short vui_num_units_in_tick_lo;
836 unsigned short vui_time_scale_hi;
837 unsigned short vui_time_scale_lo;
838 unsigned short bit_depth;
839 unsigned short profile_etc;
840 unsigned short sei_frame_field_info;
841 unsigned short video_signal_type;
842 unsigned short modification_list[0x20];
843 unsigned short conformance_window_flag;
844 unsigned short conf_win_left_offset;
845 unsigned short conf_win_right_offset;
846 unsigned short conf_win_top_offset;
847 unsigned short conf_win_bottom_offset;
848 unsigned short chroma_format_idc;
849 unsigned short color_description;
850 unsigned short aspect_ratio_idc;
851 unsigned short sar_width;
852 unsigned short sar_height;
853 unsigned short sps_max_dec_pic_buffering_minus1_0;
854 } p;
855};
856
857#define RPM_BUF_SIZE (0x80*2)
858/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
859#define LMEM_BUF_SIZE (0x500 * 2)
860
861struct buff_s {
862 u32 buf_start;
863 u32 buf_size;
864 u32 buf_end;
865};
866
867struct BuffInfo_s {
868 u32 max_width;
869 u32 max_height;
870 unsigned int start_adr;
871 unsigned int end_adr;
872 struct buff_s ipp;
873 struct buff_s sao_abv;
874 struct buff_s sao_vb;
875 struct buff_s short_term_rps;
876 struct buff_s vps;
877 struct buff_s sps;
878 struct buff_s pps;
879 struct buff_s sao_up;
880 struct buff_s swap_buf;
881 struct buff_s swap_buf2;
882 struct buff_s scalelut;
883 struct buff_s dblk_para;
884 struct buff_s dblk_data;
885 struct buff_s dblk_data2;
886 struct buff_s mmu_vbh;
887 struct buff_s cm_header;
888 struct buff_s mpred_above;
889#ifdef MV_USE_FIXED_BUF
890 struct buff_s mpred_mv;
891#endif
892 struct buff_s rpm;
893 struct buff_s lmem;
894};
895#define WORK_BUF_SPEC_NUM 3
896static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
897 {
898 /* 8M bytes */
899 .max_width = 1920,
900 .max_height = 1088,
901 .ipp = {
902 /* IPP work space calculation :
903 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
904 */
905 .buf_size = 0x4000,
906 },
907 .sao_abv = {
908 .buf_size = 0x30000,
909 },
910 .sao_vb = {
911 .buf_size = 0x30000,
912 },
913 .short_term_rps = {
914 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
915 * total 64x16x2 = 2048 bytes (0x800)
916 */
917 .buf_size = 0x800,
918 },
919 .vps = {
920 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
921 * total 0x0800 bytes
922 */
923 .buf_size = 0x800,
924 },
925 .sps = {
926 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
927 * total 0x0800 bytes
928 */
929 .buf_size = 0x800,
930 },
931 .pps = {
932 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
933 * total 0x2000 bytes
934 */
935 .buf_size = 0x2000,
936 },
937 .sao_up = {
938 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
939 * each has 16 bytes total 0x2800 bytes
940 */
941 .buf_size = 0x2800,
942 },
943 .swap_buf = {
944 /* 256cyclex64bit = 2K bytes 0x800
945 * (only 144 cycles valid)
946 */
947 .buf_size = 0x800,
948 },
949 .swap_buf2 = {
950 .buf_size = 0x800,
951 },
952 .scalelut = {
953 /* support up to 32 SCALELUT 1024x32 =
954 * 32Kbytes (0x8000)
955 */
956 .buf_size = 0x8000,
957 },
958 .dblk_para = {
959#ifdef SUPPORT_10BIT
960 .buf_size = 0x40000,
961#else
962 /* DBLK -> Max 256(4096/16) LCU, each para
963 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
964 */
965 .buf_size = 0x20000,
966#endif
967 },
968 .dblk_data = {
969 .buf_size = 0x40000,
970 },
971 .dblk_data2 = {
972 .buf_size = 0x40000,
973 }, /*dblk data for adapter*/
974 .mmu_vbh = {
975 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
976 },
977#if 0
978 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
979 .buf_size = MMU_COMPRESS_HEADER_SIZE *
980 (MAX_REF_PIC_NUM + 1),
981 },
982#endif
983 .mpred_above = {
984 .buf_size = 0x8000,
985 },
986#ifdef MV_USE_FIXED_BUF
987 .mpred_mv = {/* 1080p, 0x40000 per buffer */
988 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
989 },
990#endif
991 .rpm = {
992 .buf_size = RPM_BUF_SIZE,
993 },
994 .lmem = {
995 .buf_size = 0x500 * 2,
996 }
997 },
998 {
999 .max_width = 4096,
1000 .max_height = 2048,
1001 .ipp = {
1002 /* IPP work space calculation :
1003 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1004 */
1005 .buf_size = 0x4000,
1006 },
1007 .sao_abv = {
1008 .buf_size = 0x30000,
1009 },
1010 .sao_vb = {
1011 .buf_size = 0x30000,
1012 },
1013 .short_term_rps = {
1014 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1015 * total 64x16x2 = 2048 bytes (0x800)
1016 */
1017 .buf_size = 0x800,
1018 },
1019 .vps = {
1020 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1021 * total 0x0800 bytes
1022 */
1023 .buf_size = 0x800,
1024 },
1025 .sps = {
1026 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1027 * total 0x0800 bytes
1028 */
1029 .buf_size = 0x800,
1030 },
1031 .pps = {
1032 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1033 * total 0x2000 bytes
1034 */
1035 .buf_size = 0x2000,
1036 },
1037 .sao_up = {
1038 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1039 * each has 16 bytes total 0x2800 bytes
1040 */
1041 .buf_size = 0x2800,
1042 },
1043 .swap_buf = {
1044 /* 256cyclex64bit = 2K bytes 0x800
1045 * (only 144 cycles valid)
1046 */
1047 .buf_size = 0x800,
1048 },
1049 .swap_buf2 = {
1050 .buf_size = 0x800,
1051 },
1052 .scalelut = {
1053 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1054 * (0x8000)
1055 */
1056 .buf_size = 0x8000,
1057 },
1058 .dblk_para = {
1059 /* DBLK -> Max 256(4096/16) LCU, each para
1060 * 512bytes(total:0x20000),
1061 * data 1024bytes(total:0x40000)
1062 */
1063 .buf_size = 0x20000,
1064 },
1065 .dblk_data = {
1066 .buf_size = 0x80000,
1067 },
1068 .dblk_data2 = {
1069 .buf_size = 0x80000,
1070 }, /*dblk data for adapter*/
1071 .mmu_vbh = {
1072 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1073 },
1074#if 0
1075 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1076 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1077 (MAX_REF_PIC_NUM + 1),
1078 },
1079#endif
1080 .mpred_above = {
1081 .buf_size = 0x8000,
1082 },
1083#ifdef MV_USE_FIXED_BUF
1084 .mpred_mv = {
1085 /* .buf_size = 0x100000*16,
1086 //4k2k , 0x100000 per buffer */
1087 /* 4096x2304 , 0x120000 per buffer */
1088 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1089 },
1090#endif
1091 .rpm = {
1092 .buf_size = RPM_BUF_SIZE,
1093 },
1094 .lmem = {
1095 .buf_size = 0x500 * 2,
1096 }
1097 },
1098
1099 {
1100 .max_width = 4096*2,
1101 .max_height = 2048*2,
1102 .ipp = {
1103 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1104 .buf_size = 0x4000*2,
1105 },
1106 .sao_abv = {
1107 .buf_size = 0x30000*2,
1108 },
1109 .sao_vb = {
1110 .buf_size = 0x30000*2,
1111 },
1112 .short_term_rps = {
1113 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1114 .buf_size = 0x800,
1115 },
1116 .vps = {
1117 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1118 .buf_size = 0x800,
1119 },
1120 .sps = {
1121 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .pps = {
1125 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1126 .buf_size = 0x2000,
1127 },
1128 .sao_up = {
1129 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1130 .buf_size = 0x2800*2,
1131 },
1132 .swap_buf = {
1133 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1134 .buf_size = 0x800,
1135 },
1136 .swap_buf2 = {
1137 .buf_size = 0x800,
1138 },
1139 .scalelut = {
1140 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1141 .buf_size = 0x8000*2,
1142 },
1143 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1144 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1145 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1146 .mmu_vbh = {
1147 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1148 },
1149#if 0
1150 .cm_header = {
1151 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1152 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1153 },
1154#endif
1155 .mpred_above = {
1156 .buf_size = 0x8000*2,
1157 },
1158#ifdef MV_USE_FIXED_BUF
1159 .mpred_mv = {
1160 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1161 },
1162#endif
1163 .rpm = {
1164 .buf_size = RPM_BUF_SIZE,
1165 },
1166 .lmem = {
1167 .buf_size = 0x500 * 2,
1168 },
1169 }
1170};
1171
1172static void init_buff_spec(struct hevc_state_s *hevc,
1173 struct BuffInfo_s *buf_spec)
1174{
1175 buf_spec->ipp.buf_start = buf_spec->start_adr;
1176 buf_spec->sao_abv.buf_start =
1177 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1178
1179 buf_spec->sao_vb.buf_start =
1180 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1181 buf_spec->short_term_rps.buf_start =
1182 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1183 buf_spec->vps.buf_start =
1184 buf_spec->short_term_rps.buf_start +
1185 buf_spec->short_term_rps.buf_size;
1186 buf_spec->sps.buf_start =
1187 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1188 buf_spec->pps.buf_start =
1189 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1190 buf_spec->sao_up.buf_start =
1191 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1192 buf_spec->swap_buf.buf_start =
1193 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1194 buf_spec->swap_buf2.buf_start =
1195 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1196 buf_spec->scalelut.buf_start =
1197 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1198 buf_spec->dblk_para.buf_start =
1199 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1200 buf_spec->dblk_data.buf_start =
1201 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1202 buf_spec->dblk_data2.buf_start =
1203 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1204 buf_spec->mmu_vbh.buf_start =
1205 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1206 buf_spec->mpred_above.buf_start =
1207 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1208#ifdef MV_USE_FIXED_BUF
1209 buf_spec->mpred_mv.buf_start =
1210 buf_spec->mpred_above.buf_start +
1211 buf_spec->mpred_above.buf_size;
1212
1213 buf_spec->rpm.buf_start =
1214 buf_spec->mpred_mv.buf_start +
1215 buf_spec->mpred_mv.buf_size;
1216#else
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_above.buf_start +
1219 buf_spec->mpred_above.buf_size;
1220#endif
1221 buf_spec->lmem.buf_start =
1222 buf_spec->rpm.buf_start +
1223 buf_spec->rpm.buf_size;
1224 buf_spec->end_adr =
1225 buf_spec->lmem.buf_start +
1226 buf_spec->lmem.buf_size;
1227
1228 if (hevc && get_dbg_flag2(hevc)) {
1229 hevc_print(hevc, 0,
1230 "%s workspace (%x %x) size = %x\n", __func__,
1231 buf_spec->start_adr, buf_spec->end_adr,
1232 buf_spec->end_adr - buf_spec->start_adr);
1233
1234 hevc_print(hevc, 0,
1235 "ipp.buf_start :%x\n",
1236 buf_spec->ipp.buf_start);
1237 hevc_print(hevc, 0,
1238 "sao_abv.buf_start :%x\n",
1239 buf_spec->sao_abv.buf_start);
1240 hevc_print(hevc, 0,
1241 "sao_vb.buf_start :%x\n",
1242 buf_spec->sao_vb.buf_start);
1243 hevc_print(hevc, 0,
1244 "short_term_rps.buf_start :%x\n",
1245 buf_spec->short_term_rps.buf_start);
1246 hevc_print(hevc, 0,
1247 "vps.buf_start :%x\n",
1248 buf_spec->vps.buf_start);
1249 hevc_print(hevc, 0,
1250 "sps.buf_start :%x\n",
1251 buf_spec->sps.buf_start);
1252 hevc_print(hevc, 0,
1253 "pps.buf_start :%x\n",
1254 buf_spec->pps.buf_start);
1255 hevc_print(hevc, 0,
1256 "sao_up.buf_start :%x\n",
1257 buf_spec->sao_up.buf_start);
1258 hevc_print(hevc, 0,
1259 "swap_buf.buf_start :%x\n",
1260 buf_spec->swap_buf.buf_start);
1261 hevc_print(hevc, 0,
1262 "swap_buf2.buf_start :%x\n",
1263 buf_spec->swap_buf2.buf_start);
1264 hevc_print(hevc, 0,
1265 "scalelut.buf_start :%x\n",
1266 buf_spec->scalelut.buf_start);
1267 hevc_print(hevc, 0,
1268 "dblk_para.buf_start :%x\n",
1269 buf_spec->dblk_para.buf_start);
1270 hevc_print(hevc, 0,
1271 "dblk_data.buf_start :%x\n",
1272 buf_spec->dblk_data.buf_start);
1273 hevc_print(hevc, 0,
1274 "dblk_data2.buf_start :%x\n",
1275 buf_spec->dblk_data2.buf_start);
1276 hevc_print(hevc, 0,
1277 "mpred_above.buf_start :%x\n",
1278 buf_spec->mpred_above.buf_start);
1279#ifdef MV_USE_FIXED_BUF
1280 hevc_print(hevc, 0,
1281 "mpred_mv.buf_start :%x\n",
1282 buf_spec->mpred_mv.buf_start);
1283#endif
1284 if ((get_dbg_flag2(hevc)
1285 &
1286 H265_DEBUG_SEND_PARAM_WITH_REG)
1287 == 0) {
1288 hevc_print(hevc, 0,
1289 "rpm.buf_start :%x\n",
1290 buf_spec->rpm.buf_start);
1291 }
1292 }
1293
1294}
1295
1296enum SliceType {
1297 B_SLICE,
1298 P_SLICE,
1299 I_SLICE
1300};
1301
1302/*USE_BUF_BLOCK*/
1303struct BUF_s {
1304 unsigned long start_adr;
1305 unsigned int size;
1306 int used_flag;
1307 unsigned int y_size;
1308 ulong v4l_ref_buf_addr;
1309} /*BUF_t */;
1310
1311/* level 6, 6.1 maximum slice number is 800; other is 200 */
1312#define MAX_SLICE_NUM 800
1313struct PIC_s {
1314 int index;
1315 int scatter_alloc;
1316 int BUF_index;
1317 int mv_buf_index;
1318 int POC;
1319 int decode_idx;
1320 int slice_type;
1321 int RefNum_L0;
1322 int RefNum_L1;
1323 int num_reorder_pic;
1324 int stream_offset;
1325 unsigned char referenced;
1326 unsigned char output_mark;
1327 unsigned char recon_mark;
1328 unsigned char output_ready;
1329 unsigned char error_mark;
1330 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1331 unsigned char dis_mark;
1332 /**/ int slice_idx;
1333 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1334 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1335 /*buffer */
1336 unsigned int header_adr;
1337#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1338 unsigned char dv_enhance_exist;
1339#endif
1340 char *aux_data_buf;
1341 int aux_data_size;
1342 unsigned long cma_alloc_addr;
1343 struct page *alloc_pages;
1344 unsigned int mpred_mv_wr_start_addr;
1345 unsigned int mc_y_adr;
1346 unsigned int mc_u_v_adr;
1347#ifdef SUPPORT_10BIT
1348 /*unsigned int comp_body_size;*/
1349 unsigned int dw_y_adr;
1350 unsigned int dw_u_v_adr;
1351#endif
1352 int mc_canvas_y;
1353 int mc_canvas_u_v;
1354 int width;
1355 int height;
1356
1357 int y_canvas_index;
1358 int uv_canvas_index;
1359#ifdef MULTI_INSTANCE_SUPPORT
1360 struct canvas_config_s canvas_config[2];
1361#endif
1362#ifdef SUPPORT_10BIT
1363 int mem_saving_mode;
1364 u32 bit_depth_luma;
1365 u32 bit_depth_chroma;
1366#endif
1367#ifdef LOSLESS_COMPRESS_MODE
1368 unsigned int losless_comp_body_size;
1369#endif
1370 unsigned char pic_struct;
1371 int vf_ref;
1372
1373 u32 pts;
1374 u64 pts64;
1375 u64 timestamp;
1376
1377 u32 aspect_ratio_idc;
1378 u32 sar_width;
1379 u32 sar_height;
1380 u32 double_write_mode;
1381 u32 video_signal_type;
1382 unsigned short conformance_window_flag;
1383 unsigned short conf_win_left_offset;
1384 unsigned short conf_win_right_offset;
1385 unsigned short conf_win_top_offset;
1386 unsigned short conf_win_bottom_offset;
1387 unsigned short chroma_format_idc;
1388
1389 /* picture qos infomation*/
1390 int max_qp;
1391 int avg_qp;
1392 int min_qp;
1393 int max_skip;
1394 int avg_skip;
1395 int min_skip;
1396 int max_mv;
1397 int min_mv;
1398 int avg_mv;
1399} /*PIC_t */;
1400
1401#define MAX_TILE_COL_NUM 10
1402#define MAX_TILE_ROW_NUM 20
1403struct tile_s {
1404 int width;
1405 int height;
1406 int start_cu_x;
1407 int start_cu_y;
1408
1409 unsigned int sao_vb_start_addr;
1410 unsigned int sao_abv_start_addr;
1411};
1412
1413#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1414#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1415#define SEI_HDR10PLUS_MASK 0x00000004
1416
1417#define VF_POOL_SIZE 32
1418
1419#ifdef MULTI_INSTANCE_SUPPORT
1420#define DEC_RESULT_NONE 0
1421#define DEC_RESULT_DONE 1
1422#define DEC_RESULT_AGAIN 2
1423#define DEC_RESULT_CONFIG_PARAM 3
1424#define DEC_RESULT_ERROR 4
1425#define DEC_INIT_PICLIST 5
1426#define DEC_UNINIT_PICLIST 6
1427#define DEC_RESULT_GET_DATA 7
1428#define DEC_RESULT_GET_DATA_RETRY 8
1429#define DEC_RESULT_EOS 9
1430#define DEC_RESULT_FORCE_EXIT 10
1431#define DEC_RESULT_FREE_CANVAS 11
1432
1433static void vh265_work(struct work_struct *work);
1434static void vh265_timeout_work(struct work_struct *work);
1435static void vh265_notify_work(struct work_struct *work);
1436
1437#endif
1438
1439struct debug_log_s {
1440 struct list_head list;
1441 uint8_t data; /*will alloc more size*/
1442};
1443
1444struct hevc_state_s {
1445#ifdef MULTI_INSTANCE_SUPPORT
1446 struct platform_device *platform_dev;
1447 void (*vdec_cb)(struct vdec_s *, void *);
1448 void *vdec_cb_arg;
1449 struct vframe_chunk_s *chunk;
1450 int dec_result;
1451 struct work_struct work;
1452 struct work_struct timeout_work;
1453 struct work_struct notify_work;
1454 struct work_struct set_clk_work;
1455 /* timeout handle */
1456 unsigned long int start_process_time;
1457 unsigned int last_lcu_idx;
1458 unsigned int decode_timeout_count;
1459 unsigned int timeout_num;
1460#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1461 unsigned char switch_dvlayer_flag;
1462 unsigned char no_switch_dvlayer_count;
1463 unsigned char bypass_dvenl_enable;
1464 unsigned char bypass_dvenl;
1465#endif
1466 unsigned char start_parser_type;
1467 /*start_decoding_flag:
1468 vps/pps/sps/idr info from ucode*/
1469 unsigned char start_decoding_flag;
1470 unsigned char rps_set_id;
1471 unsigned char eos;
1472 int pic_decoded_lcu_idx;
1473 u8 over_decode;
1474 u8 empty_flag;
1475#endif
1476 struct vframe_s vframe_dummy;
1477 char *provider_name;
1478 int index;
1479 struct device *cma_dev;
1480 unsigned char m_ins_flag;
1481 unsigned char dolby_enhance_flag;
1482 unsigned long buf_start;
1483 u32 buf_size;
1484 u32 mv_buf_size;
1485
1486 struct BuffInfo_s work_space_buf_store;
1487 struct BuffInfo_s *work_space_buf;
1488
1489 u8 aux_data_dirty;
1490 u32 prefix_aux_size;
1491 u32 suffix_aux_size;
1492 void *aux_addr;
1493 void *rpm_addr;
1494 void *lmem_addr;
1495 dma_addr_t aux_phy_addr;
1496 dma_addr_t rpm_phy_addr;
1497 dma_addr_t lmem_phy_addr;
1498
1499 unsigned int pic_list_init_flag;
1500 unsigned int use_cma_flag;
1501
1502 unsigned short *rpm_ptr;
1503 unsigned short *lmem_ptr;
1504 unsigned short *debug_ptr;
1505 int debug_ptr_size;
1506 int pic_w;
1507 int pic_h;
1508 int lcu_x_num;
1509 int lcu_y_num;
1510 int lcu_total;
1511 int lcu_size;
1512 int lcu_size_log2;
1513 int lcu_x_num_pre;
1514 int lcu_y_num_pre;
1515 int first_pic_after_recover;
1516
1517 int num_tile_col;
1518 int num_tile_row;
1519 int tile_enabled;
1520 int tile_x;
1521 int tile_y;
1522 int tile_y_x;
1523 int tile_start_lcu_x;
1524 int tile_start_lcu_y;
1525 int tile_width_lcu;
1526 int tile_height_lcu;
1527
1528 int slice_type;
1529 unsigned int slice_addr;
1530 unsigned int slice_segment_addr;
1531
1532 unsigned char interlace_flag;
1533 unsigned char curr_pic_struct;
1534 unsigned char frame_field_info_present_flag;
1535
1536 unsigned short sps_num_reorder_pics_0;
1537 unsigned short misc_flag0;
1538 int m_temporalId;
1539 int m_nalUnitType;
1540 int TMVPFlag;
1541 int isNextSliceSegment;
1542 int LDCFlag;
1543 int m_pocRandomAccess;
1544 int plevel;
1545 int MaxNumMergeCand;
1546
1547 int new_pic;
1548 int new_tile;
1549 int curr_POC;
1550 int iPrevPOC;
1551#ifdef MULTI_INSTANCE_SUPPORT
1552 int decoded_poc;
1553 struct PIC_s *decoding_pic;
1554#endif
1555 int iPrevTid0POC;
1556 int list_no;
1557 int RefNum_L0;
1558 int RefNum_L1;
1559 int ColFromL0Flag;
1560 int LongTerm_Curr;
1561 int LongTerm_Col;
1562 int Col_POC;
1563 int LongTerm_Ref;
1564#ifdef MULTI_INSTANCE_SUPPORT
1565 int m_pocRandomAccess_bak;
1566 int curr_POC_bak;
1567 int iPrevPOC_bak;
1568 int iPrevTid0POC_bak;
1569 unsigned char start_parser_type_bak;
1570 unsigned char start_decoding_flag_bak;
1571 unsigned char rps_set_id_bak;
1572 int pic_decoded_lcu_idx_bak;
1573 int decode_idx_bak;
1574#endif
1575 struct PIC_s *cur_pic;
1576 struct PIC_s *col_pic;
1577 int skip_flag;
1578 int decode_idx;
1579 int slice_idx;
1580 unsigned char have_vps;
1581 unsigned char have_sps;
1582 unsigned char have_pps;
1583 unsigned char have_valid_start_slice;
1584 unsigned char wait_buf;
1585 unsigned char error_flag;
1586 unsigned int error_skip_nal_count;
1587 long used_4k_num;
1588
1589 unsigned char
1590 ignore_bufmgr_error; /* bit 0, for decoding;
1591 bit 1, for displaying
1592 bit 1 must be set if bit 0 is 1*/
1593 int PB_skip_mode;
1594 int PB_skip_count_after_decoding;
1595#ifdef SUPPORT_10BIT
1596 int mem_saving_mode;
1597#endif
1598#ifdef LOSLESS_COMPRESS_MODE
1599 unsigned int losless_comp_body_size;
1600#endif
1601 int pts_mode;
1602 int last_lookup_pts;
1603 int last_pts;
1604 u64 last_lookup_pts_us64;
1605 u64 last_pts_us64;
1606 u32 shift_byte_count_lo;
1607 u32 shift_byte_count_hi;
1608 int pts_mode_switching_count;
1609 int pts_mode_recovery_count;
1610
1611 int pic_num;
1612
1613 /**/
1614 union param_u param;
1615
1616 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1617
1618 struct timer_list timer;
1619 struct BUF_s m_BUF[BUF_POOL_SIZE];
1620 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1621 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1622
1623 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1624 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1625 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1626 struct vframe_s vfpool[VF_POOL_SIZE];
1627
1628 u32 stat;
1629 u32 frame_width;
1630 u32 frame_height;
1631 u32 frame_dur;
1632 u32 frame_ar;
1633 u32 bit_depth_luma;
1634 u32 bit_depth_chroma;
1635 u32 video_signal_type;
1636 u32 video_signal_type_debug;
1637 u32 saved_resolution;
1638 bool get_frame_dur;
1639 u32 error_watchdog_count;
1640 u32 error_skip_nal_wt_cnt;
1641 u32 error_system_watchdog_count;
1642
1643#ifdef DEBUG_PTS
1644 unsigned long pts_missed;
1645 unsigned long pts_hit;
1646#endif
1647 struct dec_sysinfo vh265_amstream_dec_info;
1648 unsigned char init_flag;
1649 unsigned char first_sc_checked;
1650 unsigned char uninit_list;
1651 u32 start_decoding_time;
1652
1653 int show_frame_num;
1654#ifdef USE_UNINIT_SEMA
1655 struct semaphore h265_uninit_done_sema;
1656#endif
1657 int fatal_error;
1658
1659
1660 u32 sei_present_flag;
1661 void *frame_mmu_map_addr;
1662 dma_addr_t frame_mmu_map_phy_addr;
1663 unsigned int mmu_mc_buf_start;
1664 unsigned int mmu_mc_buf_end;
1665 unsigned int mmu_mc_start_4k_adr;
1666 void *mmu_box;
1667 void *bmmu_box;
1668 int mmu_enable;
1669
1670 unsigned int dec_status;
1671
1672 /* data for SEI_MASTER_DISPLAY_COLOR */
1673 unsigned int primaries[3][2];
1674 unsigned int white_point[2];
1675 unsigned int luminance[2];
1676 /* data for SEI_CONTENT_LIGHT_LEVEL */
1677 unsigned int content_light_level[2];
1678
1679 struct PIC_s *pre_top_pic;
1680 struct PIC_s *pre_bot_pic;
1681
1682#ifdef MULTI_INSTANCE_SUPPORT
1683 int double_write_mode;
1684 int dynamic_buf_num_margin;
1685 int start_action;
1686 int save_buffer_mode;
1687#endif
1688 u32 i_only;
1689 struct list_head log_list;
1690 u32 ucode_pause_pos;
1691 u32 start_shift_bytes;
1692
1693 u32 vf_pre_count;
1694 u32 vf_get_count;
1695 u32 vf_put_count;
1696#ifdef SWAP_HEVC_UCODE
1697 dma_addr_t mc_dma_handle;
1698 void *mc_cpu_addr;
1699 int swap_size;
1700 ulong swap_addr;
1701#endif
1702#ifdef DETREFILL_ENABLE
1703 dma_addr_t detbuf_adr;
1704 u16 *detbuf_adr_virt;
1705 u8 delrefill_check;
1706#endif
1707 u8 head_error_flag;
1708 int valve_count;
1709 struct firmware_s *fw;
1710 int max_pic_w;
1711 int max_pic_h;
1712#ifdef AGAIN_HAS_THRESHOLD
1713 u8 next_again_flag;
1714 u32 pre_parser_wr_ptr;
1715#endif
1716 u32 ratio_control;
1717 u32 first_pic_flag;
1718 u32 decode_size;
1719 struct mutex chunks_mutex;
1720 int need_cache_size;
1721 u64 sc_start_time;
1722 u32 skip_first_nal;
1723 bool is_swap;
1724 bool is_4k;
1725 int frameinfo_enable;
1726 struct vframe_qos_s vframe_qos;
1727 bool is_used_v4l;
1728 void *v4l2_ctx;
1729 bool v4l_params_parsed;
1730} /*hevc_stru_t */;
1731
1732#ifdef AGAIN_HAS_THRESHOLD
1733u32 again_threshold;
1734#endif
1735#ifdef SEND_LMEM_WITH_RPM
1736#define get_lmem_params(hevc, ladr) \
1737 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1738
1739
1740static int get_frame_mmu_map_size(void)
1741{
1742 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1743 return (MAX_FRAME_8K_NUM * 4);
1744
1745 return (MAX_FRAME_4K_NUM * 4);
1746}
1747
1748static int is_oversize(int w, int h)
1749{
1750 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1751 MAX_SIZE_8K : MAX_SIZE_4K;
1752
1753 if (w < 0 || h < 0)
1754 return true;
1755
1756 if (h != 0 && (w > max / h))
1757 return true;
1758
1759 return false;
1760}
1761
1762void check_head_error(struct hevc_state_s *hevc)
1763{
1764#define pcm_enabled_flag 0x040
1765#define pcm_sample_bit_depth_luma 0x041
1766#define pcm_sample_bit_depth_chroma 0x042
1767 hevc->head_error_flag = 0;
1768 if ((error_handle_policy & 0x40) == 0)
1769 return;
1770 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1771 uint16_t pcm_depth_luma = get_lmem_params(
1772 hevc, pcm_sample_bit_depth_luma);
1773 uint16_t pcm_sample_chroma = get_lmem_params(
1774 hevc, pcm_sample_bit_depth_chroma);
1775 if (pcm_depth_luma >
1776 hevc->bit_depth_luma ||
1777 pcm_sample_chroma >
1778 hevc->bit_depth_chroma) {
1779 hevc_print(hevc, 0,
1780 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1781 pcm_depth_luma,
1782 pcm_sample_chroma,
1783 hevc->bit_depth_luma,
1784 hevc->bit_depth_chroma);
1785 hevc->head_error_flag = 1;
1786 }
1787 }
1788}
1789#endif
1790
1791#ifdef SUPPORT_10BIT
1792/* Losless compression body buffer size 4K per 64x32 (jt) */
1793static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1794 int width, int height, int mem_saving_mode)
1795{
1796 int width_x64;
1797 int height_x32;
1798 int bsize;
1799
1800 width_x64 = width + 63;
1801 width_x64 >>= 6;
1802
1803 height_x32 = height + 31;
1804 height_x32 >>= 5;
1805 if (mem_saving_mode == 1 && hevc->mmu_enable)
1806 bsize = 3200 * width_x64 * height_x32;
1807 else if (mem_saving_mode == 1)
1808 bsize = 3072 * width_x64 * height_x32;
1809 else
1810 bsize = 4096 * width_x64 * height_x32;
1811
1812 return bsize;
1813}
1814
1815/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1816static int compute_losless_comp_header_size(int width, int height)
1817{
1818 int width_x128;
1819 int height_x64;
1820 int hsize;
1821
1822 width_x128 = width + 127;
1823 width_x128 >>= 7;
1824
1825 height_x64 = height + 63;
1826 height_x64 >>= 6;
1827
1828 hsize = 32*width_x128*height_x64;
1829
1830 return hsize;
1831}
1832#endif
1833
1834static int add_log(struct hevc_state_s *hevc,
1835 const char *fmt, ...)
1836{
1837#define HEVC_LOG_BUF 196
1838 struct debug_log_s *log_item;
1839 unsigned char buf[HEVC_LOG_BUF];
1840 int len = 0;
1841 va_list args;
1842 mutex_lock(&vh265_log_mutex);
1843 va_start(args, fmt);
1844 len = sprintf(buf, "<%ld> <%05d> ",
1845 jiffies, hevc->decode_idx);
1846 len += vsnprintf(buf + len,
1847 HEVC_LOG_BUF - len, fmt, args);
1848 va_end(args);
1849 log_item = kmalloc(
1850 sizeof(struct debug_log_s) + len,
1851 GFP_KERNEL);
1852 if (log_item) {
1853 INIT_LIST_HEAD(&log_item->list);
1854 strcpy(&log_item->data, buf);
1855 list_add_tail(&log_item->list,
1856 &hevc->log_list);
1857 }
1858 mutex_unlock(&vh265_log_mutex);
1859 return 0;
1860}
1861
1862static void dump_log(struct hevc_state_s *hevc)
1863{
1864 int i = 0;
1865 struct debug_log_s *log_item, *tmp;
1866 mutex_lock(&vh265_log_mutex);
1867 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1868 hevc_print(hevc, 0,
1869 "[LOG%04d]%s\n",
1870 i++,
1871 &log_item->data);
1872 list_del(&log_item->list);
1873 kfree(log_item);
1874 }
1875 mutex_unlock(&vh265_log_mutex);
1876}
1877
1878static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1879 struct PIC_s *pic)
1880{
1881 if (pic->error_mark
1882 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1883 return 1;
1884 return 0;
1885}
1886
1887static int get_pic_poc(struct hevc_state_s *hevc,
1888 unsigned int idx)
1889{
1890 if (idx != 0xff
1891 && idx < MAX_REF_PIC_NUM
1892 && hevc->m_PIC[idx])
1893 return hevc->m_PIC[idx]->POC;
1894 return INVALID_POC;
1895}
1896
1897#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1898static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1899{
1900 return (hevc->m_ins_flag &&
1901 ((double_write_mode & 0x80000000) == 0)) ?
1902 hevc->double_write_mode :
1903 (double_write_mode & 0x7fffffff);
1904}
1905
1906static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1907{
1908 return (hevc->m_ins_flag &&
1909 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1910 hevc->dynamic_buf_num_margin :
1911 (dynamic_buf_num_margin & 0x7fffffff);
1912}
1913#endif
1914
1915static int get_double_write_mode(struct hevc_state_s *hevc)
1916{
1917 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1918 int w = hevc->pic_w;
1919 int h = hevc->pic_h;
1920 u32 dw = 0x1; /*1:1*/
1921 switch (valid_dw_mode) {
1922 case 0x100:
1923 if (w > 1920 && h > 1088)
1924 dw = 0x4; /*1:2*/
1925 break;
1926 case 0x200:
1927 if (w > 1920 && h > 1088)
1928 dw = 0x2; /*1:4*/
1929 break;
1930 case 0x300:
1931 if (w > 1280 && h > 720)
1932 dw = 0x4; /*1:2*/
1933 break;
1934 default:
1935 dw = valid_dw_mode;
1936 break;
1937 }
1938 return dw;
1939}
1940
1941static int get_double_write_ratio(struct hevc_state_s *hevc,
1942 int dw_mode)
1943{
1944 int ratio = 1;
1945 if ((dw_mode == 2) ||
1946 (dw_mode == 3))
1947 ratio = 4;
1948 else if (dw_mode == 4)
1949 ratio = 2;
1950 return ratio;
1951}
1952#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1953static unsigned char get_idx(struct hevc_state_s *hevc)
1954{
1955 return hevc->index;
1956}
1957#endif
1958
1959#undef pr_info
1960#define pr_info printk
1961static int hevc_print(struct hevc_state_s *hevc,
1962 int flag, const char *fmt, ...)
1963{
1964#define HEVC_PRINT_BUF 256
1965 unsigned char buf[HEVC_PRINT_BUF];
1966 int len = 0;
1967#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1968 if (hevc == NULL ||
1969 (flag == 0) ||
1970 ((debug_mask &
1971 (1 << hevc->index))
1972 && (debug & flag))) {
1973#endif
1974 va_list args;
1975
1976 va_start(args, fmt);
1977 if (hevc)
1978 len = sprintf(buf, "[%d]", hevc->index);
1979 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1980 pr_debug("%s", buf);
1981 va_end(args);
1982#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1983 }
1984#endif
1985 return 0;
1986}
1987
1988static int hevc_print_cont(struct hevc_state_s *hevc,
1989 int flag, const char *fmt, ...)
1990{
1991 unsigned char buf[HEVC_PRINT_BUF];
1992 int len = 0;
1993#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1994 if (hevc == NULL ||
1995 (flag == 0) ||
1996 ((debug_mask &
1997 (1 << hevc->index))
1998 && (debug & flag))) {
1999#endif
2000 va_list args;
2001
2002 va_start(args, fmt);
2003 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2004 pr_info("%s", buf);
2005 va_end(args);
2006#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2007 }
2008#endif
2009 return 0;
2010}
2011
2012static void put_mv_buf(struct hevc_state_s *hevc,
2013 struct PIC_s *pic);
2014
2015static void update_vf_memhandle(struct hevc_state_s *hevc,
2016 struct vframe_s *vf, struct PIC_s *pic);
2017
2018static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2019
2020static void release_aux_data(struct hevc_state_s *hevc,
2021 struct PIC_s *pic);
2022static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2023
2024#ifdef MULTI_INSTANCE_SUPPORT
2025static void backup_decode_state(struct hevc_state_s *hevc)
2026{
2027 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2028 hevc->curr_POC_bak = hevc->curr_POC;
2029 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2030 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2031 hevc->start_parser_type_bak = hevc->start_parser_type;
2032 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2033 hevc->rps_set_id_bak = hevc->rps_set_id;
2034 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2035 hevc->decode_idx_bak = hevc->decode_idx;
2036
2037}
2038
2039static void restore_decode_state(struct hevc_state_s *hevc)
2040{
2041 struct vdec_s *vdec = hw_to_vdec(hevc);
2042 if (!vdec_has_more_input(vdec)) {
2043 hevc->pic_decoded_lcu_idx =
2044 READ_VREG(HEVC_PARSER_LCU_START)
2045 & 0xffffff;
2046 return;
2047 }
2048 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2049 "%s: discard pic index 0x%x\n",
2050 __func__, hevc->decoding_pic ?
2051 hevc->decoding_pic->index : 0xff);
2052 if (hevc->decoding_pic) {
2053 hevc->decoding_pic->error_mark = 0;
2054 hevc->decoding_pic->output_ready = 0;
2055 hevc->decoding_pic->output_mark = 0;
2056 hevc->decoding_pic->referenced = 0;
2057 hevc->decoding_pic->POC = INVALID_POC;
2058 put_mv_buf(hevc, hevc->decoding_pic);
2059 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2060 release_aux_data(hevc, hevc->decoding_pic);
2061 hevc->decoding_pic = NULL;
2062 }
2063 hevc->decode_idx = hevc->decode_idx_bak;
2064 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2065 hevc->curr_POC = hevc->curr_POC_bak;
2066 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2067 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2068 hevc->start_parser_type = hevc->start_parser_type_bak;
2069 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2070 hevc->rps_set_id = hevc->rps_set_id_bak;
2071 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2072
2073 if (hevc->pic_list_init_flag == 1)
2074 hevc->pic_list_init_flag = 0;
2075 /*if (hevc->decode_idx == 0)
2076 hevc->start_decoding_flag = 0;*/
2077
2078 hevc->slice_idx = 0;
2079 hevc->used_4k_num = -1;
2080}
2081#endif
2082
2083static void hevc_init_stru(struct hevc_state_s *hevc,
2084 struct BuffInfo_s *buf_spec_i)
2085{
2086 //int i;
2087 INIT_LIST_HEAD(&hevc->log_list);
2088 hevc->work_space_buf = buf_spec_i;
2089 hevc->prefix_aux_size = 0;
2090 hevc->suffix_aux_size = 0;
2091 hevc->aux_addr = NULL;
2092 hevc->rpm_addr = NULL;
2093 hevc->lmem_addr = NULL;
2094
2095 hevc->curr_POC = INVALID_POC;
2096
2097 hevc->pic_list_init_flag = 0;
2098 hevc->use_cma_flag = 0;
2099 hevc->decode_idx = 0;
2100 hevc->slice_idx = 0;
2101 hevc->new_pic = 0;
2102 hevc->new_tile = 0;
2103 hevc->iPrevPOC = 0;
2104 hevc->list_no = 0;
2105 /* int m_uiMaxCUWidth = 1<<7; */
2106 /* int m_uiMaxCUHeight = 1<<7; */
2107 hevc->m_pocRandomAccess = MAX_INT;
2108 hevc->tile_enabled = 0;
2109 hevc->tile_x = 0;
2110 hevc->tile_y = 0;
2111 hevc->iPrevTid0POC = 0;
2112 hevc->slice_addr = 0;
2113 hevc->slice_segment_addr = 0;
2114 hevc->skip_flag = 0;
2115 hevc->misc_flag0 = 0;
2116
2117 hevc->cur_pic = NULL;
2118 hevc->col_pic = NULL;
2119 hevc->wait_buf = 0;
2120 hevc->error_flag = 0;
2121 hevc->head_error_flag = 0;
2122 hevc->error_skip_nal_count = 0;
2123 hevc->have_vps = 0;
2124 hevc->have_sps = 0;
2125 hevc->have_pps = 0;
2126 hevc->have_valid_start_slice = 0;
2127
2128 hevc->pts_mode = PTS_NORMAL;
2129 hevc->last_pts = 0;
2130 hevc->last_lookup_pts = 0;
2131 hevc->last_pts_us64 = 0;
2132 hevc->last_lookup_pts_us64 = 0;
2133 hevc->pts_mode_switching_count = 0;
2134 hevc->pts_mode_recovery_count = 0;
2135
2136 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2137 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2138 if (hevc->PB_skip_mode == 0)
2139 hevc->ignore_bufmgr_error = 0x1;
2140 else
2141 hevc->ignore_bufmgr_error = 0x0;
2142
2143 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
2144 vdec_schedule_work(&hevc->work);
2145 /*for (i = 0; i < MAX_REF_PIC_NUM; i++)
2146 hevc->m_PIC[i] = NULL;*/
2147
2148 hevc->pic_num = 0;
2149 hevc->lcu_x_num_pre = 0;
2150 hevc->lcu_y_num_pre = 0;
2151 hevc->first_pic_after_recover = 0;
2152
2153 hevc->pre_top_pic = NULL;
2154 hevc->pre_bot_pic = NULL;
2155
2156 hevc->sei_present_flag = 0;
2157 hevc->valve_count = 0;
2158 hevc->first_pic_flag = 0;
2159#ifdef MULTI_INSTANCE_SUPPORT
2160 hevc->decoded_poc = INVALID_POC;
2161 hevc->start_process_time = 0;
2162 hevc->last_lcu_idx = 0;
2163 hevc->decode_timeout_count = 0;
2164 hevc->timeout_num = 0;
2165 hevc->eos = 0;
2166 hevc->pic_decoded_lcu_idx = -1;
2167 hevc->over_decode = 0;
2168 hevc->used_4k_num = -1;
2169 hevc->start_decoding_flag = 0;
2170 hevc->rps_set_id = 0;
2171 backup_decode_state(hevc);
2172#endif
2173#ifdef DETREFILL_ENABLE
2174 hevc->detbuf_adr = 0;
2175 hevc->detbuf_adr_virt = NULL;
2176#endif
2177}
2178
2179static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2180static int H265_alloc_mmu(struct hevc_state_s *hevc,
2181 struct PIC_s *new_pic, unsigned short bit_depth,
2182 unsigned int *mmu_index_adr);
2183
2184#ifdef DETREFILL_ENABLE
2185#define DETREFILL_BUF_SIZE (4 * 0x4000)
2186#define HEVC_SAO_DBG_MODE0 0x361e
2187#define HEVC_SAO_DBG_MODE1 0x361f
2188#define HEVC_SAO_CTRL10 0x362e
2189#define HEVC_SAO_CTRL11 0x362f
2190static int init_detrefill_buf(struct hevc_state_s *hevc)
2191{
2192 if (hevc->detbuf_adr_virt)
2193 return 0;
2194
2195 hevc->detbuf_adr_virt =
2196 (void *)dma_alloc_coherent(amports_get_dma_device(),
2197 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2198 GFP_KERNEL);
2199
2200 if (hevc->detbuf_adr_virt == NULL) {
2201 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2202 return -1;
2203 }
2204 return 0;
2205}
2206
2207static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2208{
2209 if (hevc->detbuf_adr_virt) {
2210 dma_free_coherent(amports_get_dma_device(),
2211 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2212 hevc->detbuf_adr);
2213
2214 hevc->detbuf_adr_virt = NULL;
2215 hevc->detbuf_adr = 0;
2216 }
2217}
2218
2219/*
2220 * convert uncompressed frame buffer data from/to ddr
2221 */
2222static void convUnc8x4blk(uint16_t* blk8x4Luma,
2223 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2224{
2225 if (direction == 0) {
2226 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2227 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2228 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2229 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2230 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2231 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2232 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2233 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2234 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2235 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2236 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2237 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2238 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2239 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2240 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2241 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2242 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2243 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2244
2245 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2246 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2247 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2248 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2249 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2250 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2251 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2252 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2253 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2254 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2255 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2256 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2257 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2258 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2259 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2260 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2261 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2262 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2263
2264 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2265 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2266 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2267 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2268 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2269 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2270 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2271 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2272 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2273 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2274 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2275 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2276 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2277 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2278 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2279 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2280 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2281 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2282
2283 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2284 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2285 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2286 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2287 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2288 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2289 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2290 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2291 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2292 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2293 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2294 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2295 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2296 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2297 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2298 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2299 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2300 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2301 } else {
2302 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2303 blk8x4Luma[3 + 0 * 8];
2304 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2305 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2306 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2307 (blk8x4Luma[3 + 3 * 8] >> 2);
2308 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2309 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2310 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2311 (blk8x4Luma[7 + 2 * 8] >>4);
2312 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2313 blk8x4Cb[0 + 0 * 4];
2314 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2315 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2316 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2317
2318 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2319 blk8x4Luma[0 + 0 * 8];
2320 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2321 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2322 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2323 (blk8x4Luma[0 + 1 * 8] >> 2);
2324 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2325 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2326 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2327 (blk8x4Luma[0 + 2 * 8] >>4);
2328 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2329 blk8x4Luma[2 + 2 * 8];
2330 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2331 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2332 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2333
2334 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2335 blk8x4Luma[4 + 0 * 8];
2336 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2337 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2338 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2339 (blk8x4Luma[4 + 1 * 8] >> 2);
2340 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2341 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2342 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2343 (blk8x4Luma[4 + 2 * 8] >>4);
2344 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2345 blk8x4Luma[6 + 2 * 8];
2346 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2347 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2348 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2349
2350 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2351 blk8x4Cb[1 + 0 * 4];
2352 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2353 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2354 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2355 (blk8x4Cr[2 + 0 * 4] >> 2);
2356 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2357 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2358 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2359 (blk8x4Cb[1 + 1 * 4] >>4);
2360 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2361 blk8x4Cb[2 + 1 * 4];
2362 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2363 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2364 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2365 }
2366}
2367
2368static void corrRefillWithAmrisc (
2369 struct hevc_state_s *hevc,
2370 uint32_t cmHeaderBaseAddr,
2371 uint32_t picWidth,
2372 uint32_t ctuPosition)
2373{
2374 int32_t i;
2375 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2376 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2377 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2378
2379 uint16_t cmBodyBuf[32 * 18];
2380
2381 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2382 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2383 uint32_t stride64x64 = pic_width_x64 * 128;
2384 uint32_t addr_offset64x64_abv = stride64x64 *
2385 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2386 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2387 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2388 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2389 unsigned int tmpData32;
2390
2391 uint16_t blkBuf0Y[32];
2392 uint16_t blkBuf0Cb[8];
2393 uint16_t blkBuf0Cr[8];
2394 uint16_t blkBuf1Y[32];
2395 uint16_t blkBuf1Cb[8];
2396 uint16_t blkBuf1Cr[8];
2397 int32_t blkBufCnt = 0;
2398
2399 int32_t blkIdx;
2400
2401 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2402 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2403 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2404 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2405
2406 for (i = 0; i < 32 * 18; i++)
2407 cmBodyBuf[i] = 0;
2408
2409 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2410 "%s, %d\n", __func__, __LINE__);
2411 do {
2412 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2413 } while (tmpData32);
2414 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2415 "%s, %d\n", __func__, __LINE__);
2416
2417 hevc_print(hevc, H265_DEBUG_DETAIL,
2418 "cmBodyBuf from detbuf:\n");
2419 for (i = 0; i < 32 * 18; i++) {
2420 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2421 if (get_dbg_flag(hevc) &
2422 H265_DEBUG_DETAIL) {
2423 if ((i & 0xf) == 0)
2424 hevc_print_cont(hevc, 0, "\n");
2425 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2426 }
2427 }
2428 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2429
2430 for (i = 0; i < 32; i++)
2431 blkBuf0Y[i] = 0;
2432 for (i = 0; i < 8; i++)
2433 blkBuf0Cb[i] = 0;
2434 for (i = 0; i < 8; i++)
2435 blkBuf0Cr[i] = 0;
2436 for (i = 0; i < 32; i++)
2437 blkBuf1Y[i] = 0;
2438 for (i = 0; i < 8; i++)
2439 blkBuf1Cb[i] = 0;
2440 for (i = 0; i < 8; i++)
2441 blkBuf1Cr[i] = 0;
2442
2443 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2444 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2445 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2446 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2447 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2448 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2449 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2450
2451 if (!aboveCtuAvailable && inAboveCtu)
2452 continue;
2453
2454 /* detRefillBuf --> 8x4block*/
2455 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2456
2457 if (restoreEnable) {
2458 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2459 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2460 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2461 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2462 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2463 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2464 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2465 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2466 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2467 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2468 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2469 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2470 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2471 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2472 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2473 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2474 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2475 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2476 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2477 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2478 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2479 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2480 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2481 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2482 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2483 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2484 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2485 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2486 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2487 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2488 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2489 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2490
2491 /*Store data back to DDR*/
2492 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2493 }
2494
2495 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2496 }
2497
2498 hevc_print(hevc, H265_DEBUG_DETAIL,
2499 "cmBodyBuf to detbuf:\n");
2500 for (i = 0; i < 32 * 18; i++) {
2501 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2502 if (get_dbg_flag(hevc) &
2503 H265_DEBUG_DETAIL) {
2504 if ((i & 0xf) == 0)
2505 hevc_print_cont(hevc, 0, "\n");
2506 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2507 }
2508 }
2509 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2510
2511 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2512 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2513 "%s, %d\n", __func__, __LINE__);
2514 do {
2515 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2516 } while (tmpData32);
2517 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2518 "%s, %d\n", __func__, __LINE__);
2519}
2520
2521static void delrefill(struct hevc_state_s *hevc)
2522{
2523 /*
2524 * corrRefill
2525 */
2526 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2527 [31:30]error number
2528 [29:20]error2([9:7]tilex[6:0]ctuy)
2529 [19:10]error1 [9:0]error0*/
2530 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2531 uint32_t errorIdx;
2532 uint32_t errorNum = (detResult>>30);
2533
2534 if (detResult) {
2535 hevc_print(hevc, H265_DEBUG_BUFMGR,
2536 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2537 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2538 uint32_t errorPos = errorIdx * 10;
2539 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2540 uint32_t tilex = (errorResult >> 7) - 1;
2541 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2542 + hevc->m_tile[0][tilex].width - 1;
2543 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2544 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2545 hevc_print(hevc, H265_DEBUG_BUFMGR,
2546 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2547 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2548 corrRefillWithAmrisc(
2549 hevc,
2550 (uint32_t)hevc->cur_pic->header_adr,
2551 hevc->pic_w,
2552 ctuPosition);
2553 }
2554
2555 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2556 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2557 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2558 }
2559}
2560#endif
2561
2562static void get_rpm_param(union param_u *params)
2563{
2564 int i;
2565 unsigned int data32;
2566
2567 for (i = 0; i < 128; i++) {
2568 do {
2569 data32 = READ_VREG(RPM_CMD_REG);
2570 /* hevc_print(hevc, 0, "%x\n", data32); */
2571 } while ((data32 & 0x10000) == 0);
2572 params->l.data[i] = data32 & 0xffff;
2573 /* hevc_print(hevc, 0, "%x\n", data32); */
2574 WRITE_VREG(RPM_CMD_REG, 0);
2575 }
2576}
2577
2578static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2579{
2580 int i;
2581 struct PIC_s *pic;
2582 struct PIC_s *ret_pic = NULL;
2583 if (POC == INVALID_POC)
2584 return NULL;
2585 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2586 pic = hevc->m_PIC[i];
2587 if (pic == NULL || pic->index == -1 ||
2588 pic->BUF_index == -1)
2589 continue;
2590 if (pic->POC == POC) {
2591 if (ret_pic == NULL)
2592 ret_pic = pic;
2593 else {
2594 if (pic->decode_idx > ret_pic->decode_idx)
2595 ret_pic = pic;
2596 }
2597 }
2598 }
2599 return ret_pic;
2600}
2601
2602static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2603{
2604 int i;
2605 struct PIC_s *pic;
2606 struct PIC_s *ret_pic = NULL;
2607
2608 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2609 pic = hevc->m_PIC[i];
2610 if (pic == NULL || pic->index == -1 ||
2611 pic->BUF_index == -1)
2612 continue;
2613 if ((pic->POC == POC) && (pic->referenced)) {
2614 if (ret_pic == NULL)
2615 ret_pic = pic;
2616 else {
2617 if (pic->decode_idx > ret_pic->decode_idx)
2618 ret_pic = pic;
2619 }
2620 }
2621 }
2622
2623 if (ret_pic == NULL) {
2624 if (get_dbg_flag(hevc)) {
2625 hevc_print(hevc, 0,
2626 "Wrong, POC of %d is not in referenced list\n",
2627 POC);
2628 }
2629 ret_pic = get_pic_by_POC(hevc, POC);
2630 }
2631 return ret_pic;
2632}
2633
2634static unsigned int log2i(unsigned int val)
2635{
2636 unsigned int ret = -1;
2637
2638 while (val != 0) {
2639 val >>= 1;
2640 ret++;
2641 }
2642 return ret;
2643}
2644
2645static int init_buf_spec(struct hevc_state_s *hevc);
2646static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2647{
2648 if (hevc->mmu_box)
2649 decoder_mmu_box_free(hevc->mmu_box);
2650 hevc->mmu_box = NULL;
2651
2652 if (hevc->bmmu_box)
2653 decoder_bmmu_box_free(hevc->bmmu_box);
2654 hevc->bmmu_box = NULL;
2655}
2656static int init_mmu_buffers(struct hevc_state_s *hevc)
2657{
2658 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2659 CODEC_MM_FLAGS_TVP : 0;
2660 int buf_size = 64;
2661
2662 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2663 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2664 buf_size = 24;
2665 }
2666
2667 if (get_dbg_flag(hevc)) {
2668 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2669 __func__, hevc->max_pic_w, hevc->max_pic_h);
2670 }
2671
2672 hevc->need_cache_size = buf_size * SZ_1M;
2673 hevc->sc_start_time = get_jiffies_64();
2674 if (hevc->mmu_enable
2675 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2676 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2677 hevc->index,
2678 MAX_REF_PIC_NUM,
2679 buf_size * SZ_1M,
2680 tvp_flag
2681 );
2682 if (!hevc->mmu_box) {
2683 pr_err("h265 alloc mmu box failed!!\n");
2684 return -1;
2685 }
2686 }
2687
2688 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2689 hevc->index,
2690 BMMU_MAX_BUFFERS,
2691 4 + PAGE_SHIFT,
2692 CODEC_MM_FLAGS_CMA_CLEAR |
2693 CODEC_MM_FLAGS_FOR_VDECODER |
2694 tvp_flag);
2695 if (!hevc->bmmu_box) {
2696 if (hevc->mmu_box)
2697 decoder_mmu_box_free(hevc->mmu_box);
2698 hevc->mmu_box = NULL;
2699 pr_err("h265 alloc mmu box failed!!\n");
2700 return -1;
2701 }
2702 return 0;
2703}
2704
2705struct buf_stru_s
2706{
2707 int lcu_total;
2708 int mc_buffer_size_h;
2709 int mc_buffer_size_u_v_h;
2710};
2711
2712#ifndef MV_USE_FIXED_BUF
2713static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2714{
2715 int i;
2716 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2717 if (hevc->m_mv_BUF[i].start_adr) {
2718 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2719 hevc_print(hevc, 0,
2720 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2721 i, hevc->m_mv_BUF[i].start_adr,
2722 hevc->m_mv_BUF[i].size,
2723 hevc->m_mv_BUF[i].used_flag);
2724 decoder_bmmu_box_free_idx(
2725 hevc->bmmu_box,
2726 MV_BUFFER_IDX(i));
2727 hevc->m_mv_BUF[i].start_adr = 0;
2728 hevc->m_mv_BUF[i].size = 0;
2729 hevc->m_mv_BUF[i].used_flag = 0;
2730 }
2731 }
2732 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2733 if (hevc->m_PIC[i] != NULL)
2734 hevc->m_PIC[i]->mv_buf_index = -1;
2735 }
2736
2737}
2738
2739static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2740{
2741 int ret = 0;
2742 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2743 if (decoder_bmmu_box_alloc_buf_phy
2744 (hevc->bmmu_box,
2745 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2746 DRIVER_NAME,
2747 &hevc->m_mv_BUF[i].start_adr) < 0) {
2748 hevc->m_mv_BUF[i].start_adr = 0;
2749 ret = -1;
2750 } else {
2751 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2752 hevc->m_mv_BUF[i].used_flag = 0;
2753 ret = 0;
2754 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2755 hevc_print(hevc, 0,
2756 "MV Buffer %d: start_adr %p size %x\n",
2757 i,
2758 (void *)hevc->m_mv_BUF[i].start_adr,
2759 hevc->m_mv_BUF[i].size);
2760 }
2761 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2762 void *mem_start_virt;
2763 mem_start_virt =
2764 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2765 if (mem_start_virt) {
2766 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2767 codec_mm_dma_flush(mem_start_virt,
2768 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2769 } else {
2770 mem_start_virt = codec_mm_vmap(
2771 hevc->m_mv_BUF[i].start_adr,
2772 hevc->m_mv_BUF[i].size);
2773 if (mem_start_virt) {
2774 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2775 codec_mm_dma_flush(mem_start_virt,
2776 hevc->m_mv_BUF[i].size,
2777 DMA_TO_DEVICE);
2778 codec_mm_unmap_phyaddr(mem_start_virt);
2779 } else {
2780 /*not virt for tvp playing,
2781 may need clear on ucode.*/
2782 pr_err("ref %s mem_start_virt failed\n", __func__);
2783 }
2784 }
2785 }
2786 }
2787 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2788 return ret;
2789}
2790#endif
2791
2792static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2793{
2794#ifdef MV_USE_FIXED_BUF
2795 if (pic && pic->index >= 0) {
2796 if (IS_8K_SIZE(pic->width, pic->height)) {
2797 pic->mpred_mv_wr_start_addr =
2798 hevc->work_space_buf->mpred_mv.buf_start
2799 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2800 } else {
2801 pic->mpred_mv_wr_start_addr =
2802 hevc->work_space_buf->mpred_mv.buf_start
2803 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2804 }
2805 }
2806 return 0;
2807#else
2808 int i;
2809 int ret = -1;
2810 int new_size;
2811 if (IS_8K_SIZE(pic->width, pic->height))
2812 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2813 else if (IS_4K_SIZE(pic->width, pic->height))
2814 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2815 else
2816 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2817 if (new_size != hevc->mv_buf_size) {
2818 dealloc_mv_bufs(hevc);
2819 hevc->mv_buf_size = new_size;
2820 }
2821 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2822 if (hevc->m_mv_BUF[i].start_adr &&
2823 hevc->m_mv_BUF[i].used_flag == 0) {
2824 hevc->m_mv_BUF[i].used_flag = 1;
2825 ret = i;
2826 break;
2827 }
2828 }
2829 if (ret < 0) {
2830 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2831 if (hevc->m_mv_BUF[i].start_adr == 0) {
2832 if (alloc_mv_buf(hevc, i) >= 0) {
2833 hevc->m_mv_BUF[i].used_flag = 1;
2834 ret = i;
2835 }
2836 break;
2837 }
2838 }
2839 }
2840
2841 if (ret >= 0) {
2842 pic->mv_buf_index = ret;
2843 pic->mpred_mv_wr_start_addr =
2844 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2845 (~0xffff);
2846 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2847 "%s => %d (0x%x) size 0x%x\n",
2848 __func__, ret,
2849 pic->mpred_mv_wr_start_addr,
2850 hevc->m_mv_BUF[ret].size);
2851
2852 } else {
2853 hevc_print(hevc, 0,
2854 "%s: Error, mv buf is not enough\n",
2855 __func__);
2856 }
2857 return ret;
2858
2859#endif
2860}
2861
2862static void put_mv_buf(struct hevc_state_s *hevc,
2863 struct PIC_s *pic)
2864{
2865#ifndef MV_USE_FIXED_BUF
2866 int i = pic->mv_buf_index;
2867 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2868 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2869 "%s: index %d beyond range\n",
2870 __func__, i);
2871 return;
2872 }
2873 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2874 "%s(%d): used_flag(%d)\n",
2875 __func__, i,
2876 hevc->m_mv_BUF[i].used_flag);
2877
2878 if (hevc->m_mv_BUF[i].start_adr &&
2879 hevc->m_mv_BUF[i].used_flag)
2880 hevc->m_mv_BUF[i].used_flag = 0;
2881 pic->mv_buf_index = -1;
2882#endif
2883}
2884
2885static int cal_current_buf_size(struct hevc_state_s *hevc,
2886 struct buf_stru_s *buf_stru)
2887{
2888
2889 int buf_size;
2890 int pic_width = hevc->pic_w;
2891 int pic_height = hevc->pic_h;
2892 int lcu_size = hevc->lcu_size;
2893 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2894 1 : pic_width / lcu_size;
2895 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2896 1 : pic_height / lcu_size;
2897 /*SUPPORT_10BIT*/
2898 int losless_comp_header_size = compute_losless_comp_header_size
2899 (pic_width, pic_height);
2900 /*always alloc buf for 10bit*/
2901 int losless_comp_body_size = compute_losless_comp_body_size
2902 (hevc, pic_width, pic_height, 0);
2903 int mc_buffer_size = losless_comp_header_size
2904 + losless_comp_body_size;
2905 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2906 int mc_buffer_size_u_v_h = 0;
2907
2908 int dw_mode = get_double_write_mode(hevc);
2909
2910 if (hevc->mmu_enable) {
2911 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
2912 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
2913 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
2914 << 16;
2915 else
2916 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
2917 << 16;
2918 } else
2919 buf_size = 0;
2920
2921 if (dw_mode) {
2922 int pic_width_dw = pic_width /
2923 get_double_write_ratio(hevc, dw_mode);
2924 int pic_height_dw = pic_height /
2925 get_double_write_ratio(hevc, dw_mode);
2926
2927 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
2928 pic_width_dw / lcu_size + 1 :
2929 pic_width_dw / lcu_size;
2930 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
2931 pic_height_dw / lcu_size + 1 :
2932 pic_height_dw / lcu_size;
2933 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
2934
2935 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
2936 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
2937 /*64k alignment*/
2938 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
2939 }
2940
2941 if ((!hevc->mmu_enable) &&
2942 ((dw_mode & 0x10) == 0)) {
2943 /* use compress mode without mmu,
2944 need buf for compress decoding*/
2945 buf_size += (mc_buffer_size_h << 16);
2946 }
2947
2948 /*in case start adr is not 64k alignment*/
2949 if (buf_size > 0)
2950 buf_size += 0x10000;
2951
2952 if (buf_stru) {
2953 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
2954 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
2955 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
2956 }
2957
2958 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
2959 pic_width, pic_height, losless_comp_header_size,
2960 losless_comp_body_size, mc_buffer_size_h,
2961 mc_buffer_size_u_v_h, buf_size);
2962
2963 return buf_size;
2964}
2965
2966static int alloc_buf(struct hevc_state_s *hevc)
2967{
2968 int i;
2969 int ret = -1;
2970 int buf_size = cal_current_buf_size(hevc, NULL);
2971
2972 if (hevc->is_used_v4l)
2973 return 0;
2974
2975 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
2976 return ret;
2977
2978 for (i = 0; i < BUF_POOL_SIZE; i++) {
2979 if (hevc->m_BUF[i].start_adr == 0)
2980 break;
2981 }
2982 if (i < BUF_POOL_SIZE) {
2983 if (buf_size > 0) {
2984 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2985 /*alloc compress header first*/
2986
2987 ret = decoder_bmmu_box_alloc_buf_phy
2988 (hevc->bmmu_box,
2989 VF_BUFFER_IDX(i), buf_size,
2990 DRIVER_NAME,
2991 &hevc->m_BUF[i].start_adr);
2992 if (ret < 0) {
2993 hevc->m_BUF[i].start_adr = 0;
2994 if (i <= 8) {
2995 hevc->fatal_error |=
2996 DECODER_FATAL_ERROR_NO_MEM;
2997 hevc_print(hevc, PRINT_FLAG_ERROR,
2998 "%s[%d], size: %d, no mem fatal err\n",
2999 __func__, i, buf_size);
3000 }
3001 }
3002
3003 if (ret >= 0) {
3004 hevc->m_BUF[i].size = buf_size;
3005 hevc->m_BUF[i].used_flag = 0;
3006 ret = 0;
3007
3008 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3009 hevc_print(hevc, 0,
3010 "Buffer %d: start_adr %p size %x\n",
3011 i,
3012 (void *)hevc->m_BUF[i].start_adr,
3013 hevc->m_BUF[i].size);
3014 }
3015 /*flush the buffer make sure no cache dirty*/
3016 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3017 void *mem_start_virt;
3018 mem_start_virt =
3019 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3020 if (mem_start_virt) {
3021 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3022 codec_mm_dma_flush(mem_start_virt,
3023 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3024 } else {
3025 mem_start_virt = codec_mm_vmap(
3026 hevc->m_BUF[i].start_adr,
3027 hevc->m_BUF[i].size);
3028 if (mem_start_virt) {
3029 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3030 codec_mm_dma_flush(mem_start_virt,
3031 hevc->m_BUF[i].size,
3032 DMA_TO_DEVICE);
3033 codec_mm_unmap_phyaddr(mem_start_virt);
3034 } else {
3035 /*not virt for tvp playing,
3036 may need clear on ucode.*/
3037 pr_err("ref %s mem_start_virt failed\n", __func__);
3038 }
3039 }
3040 }
3041 }
3042 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3043 } else
3044 ret = 0;
3045 }
3046 if (ret >= 0) {
3047 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3048 hevc_print(hevc, 0,
3049 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3050 i, hevc->pic_w, hevc->pic_h,
3051 buf_size,
3052 hevc->m_BUF[i].start_adr);
3053 }
3054 } else {
3055 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3056 hevc_print(hevc, 0,
3057 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3058 i, hevc->pic_w, hevc->pic_h,
3059 buf_size);
3060 }
3061 }
3062 return ret;
3063}
3064
3065static void set_buf_unused(struct hevc_state_s *hevc, int i)
3066{
3067 if (i >= 0 && i < BUF_POOL_SIZE)
3068 hevc->m_BUF[i].used_flag = 0;
3069}
3070
3071static void dealloc_unused_buf(struct hevc_state_s *hevc)
3072{
3073 int i;
3074 for (i = 0; i < BUF_POOL_SIZE; i++) {
3075 if (hevc->m_BUF[i].start_adr &&
3076 hevc->m_BUF[i].used_flag == 0) {
3077 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3078 hevc_print(hevc, 0,
3079 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3080 i, hevc->m_BUF[i].start_adr,
3081 hevc->m_BUF[i].size);
3082 }
3083 if (!hevc->is_used_v4l)
3084 decoder_bmmu_box_free_idx(
3085 hevc->bmmu_box,
3086 VF_BUFFER_IDX(i));
3087 hevc->m_BUF[i].start_adr = 0;
3088 hevc->m_BUF[i].size = 0;
3089 }
3090 }
3091
3092}
3093
3094static void dealloc_pic_buf(struct hevc_state_s *hevc,
3095 struct PIC_s *pic)
3096{
3097 int i = pic->BUF_index;
3098 pic->BUF_index = -1;
3099 if (i >= 0 &&
3100 i < BUF_POOL_SIZE &&
3101 hevc->m_BUF[i].start_adr) {
3102 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3103 hevc_print(hevc, 0,
3104 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3105 i, hevc->m_BUF[i].start_adr,
3106 hevc->m_BUF[i].size);
3107 }
3108
3109 if (!hevc->is_used_v4l)
3110 decoder_bmmu_box_free_idx(
3111 hevc->bmmu_box,
3112 VF_BUFFER_IDX(i));
3113 hevc->m_BUF[i].used_flag = 0;
3114 hevc->m_BUF[i].start_adr = 0;
3115 hevc->m_BUF[i].size = 0;
3116 }
3117}
3118
3119static int get_work_pic_num(struct hevc_state_s *hevc)
3120{
3121 int used_buf_num = 0;
3122 int sps_pic_buf_diff = 0;
3123
3124 if (get_dynamic_buf_num_margin(hevc) > 0) {
3125 if ((!hevc->sps_num_reorder_pics_0) &&
3126 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3127 /* the range of sps_num_reorder_pics_0 is in
3128 [0, sps_max_dec_pic_buffering_minus1_0] */
3129 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3130 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3131 } else
3132 used_buf_num = hevc->sps_num_reorder_pics_0
3133 + get_dynamic_buf_num_margin(hevc);
3134
3135 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3136 - hevc->sps_num_reorder_pics_0;
3137#ifdef MULTI_INSTANCE_SUPPORT
3138 /*
3139 need one more for multi instance, as
3140 apply_ref_pic_set() has no chanch to run to
3141 to clear referenced flag in some case
3142 */
3143 if (hevc->m_ins_flag)
3144 used_buf_num++;
3145#endif
3146 } else
3147 used_buf_num = max_buf_num;
3148
3149 if (hevc->save_buffer_mode)
3150 hevc_print(hevc, 0,
3151 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3152 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3153
3154 if (sps_pic_buf_diff >= 4)
3155 {
3156 used_buf_num += 1;
3157 }
3158
3159 if (used_buf_num > MAX_BUF_NUM)
3160 used_buf_num = MAX_BUF_NUM;
3161 return used_buf_num;
3162}
3163
3164static int get_alloc_pic_count(struct hevc_state_s *hevc)
3165{
3166 int alloc_pic_count = 0;
3167 int i;
3168 struct PIC_s *pic;
3169 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3170 pic = hevc->m_PIC[i];
3171 if (pic && pic->index >= 0)
3172 alloc_pic_count++;
3173 }
3174 return alloc_pic_count;
3175}
3176
3177static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3178{
3179 int ret = -1;
3180 int i;
3181 /*int lcu_size_log2 = hevc->lcu_size_log2;
3182 int MV_MEM_UNIT=lcu_size_log2==
3183 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3184 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3185 5 ? 0x80 : 0x20;
3186 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3187 hevc->work_space_buf->mpred_mv.buf_size;*/
3188 unsigned int y_adr = 0;
3189 struct buf_stru_s buf_stru;
3190 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3191 int dw_mode = get_double_write_mode(hevc);
3192 struct vdec_v4l2_buffer *fb = NULL;
3193
3194 if (hevc->is_used_v4l)
3195 buf_size = 0;
3196
3197 for (i = 0; i < BUF_POOL_SIZE; i++) {
3198 if (hevc->is_used_v4l && !hevc->m_BUF[i].start_adr) {
3199 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3200 if (ret) {
3201 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3202 "[%d] get fb fail.\n",
3203 ((struct aml_vcodec_ctx *)
3204 (hevc->v4l2_ctx))->id);
3205 return ret;
3206 }
3207
3208 hevc->m_BUF[i].used_flag = 0;
3209 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3210 if (fb->num_planes == 1) {
3211 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3212 hevc->m_BUF[i].size = fb->m.mem[0].size;
3213 hevc->m_BUF[i].y_size = fb->m.mem[0].offset;
3214 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3215 } else if (fb->num_planes == 2) {
3216 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3217 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3218 hevc->m_BUF[i].y_size = fb->m.mem[0].size;
3219 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3220 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3221 }
3222
3223 pic->BUF_index = i;
3224
3225 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3226 "[%d] %s(), v4l ref buf addr: 0x%x\n",
3227 ((struct aml_vcodec_ctx *)
3228 (hevc->v4l2_ctx))->id, __func__, fb);
3229 }
3230
3231 if (hevc->m_BUF[i].start_adr != 0 &&
3232 hevc->m_BUF[i].used_flag == 0 &&
3233 buf_size <= hevc->m_BUF[i].size) {
3234 hevc->m_BUF[i].used_flag = 1;
3235 break;
3236 }
3237 }
3238
3239 if (i >= BUF_POOL_SIZE)
3240 return -1;
3241
3242 if (hevc->mmu_enable) {
3243 pic->header_adr = hevc->m_BUF[i].start_adr;
3244 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3245 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3246 y_adr = hevc->m_BUF[i].start_adr +
3247 MMU_COMPRESS_8K_HEADER_SIZE;
3248 else
3249 y_adr = hevc->m_BUF[i].start_adr +
3250 MMU_COMPRESS_HEADER_SIZE;
3251 } else
3252 y_adr = hevc->m_BUF[i].start_adr;
3253
3254 if (!hevc->is_used_v4l)
3255 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3256
3257 pic->POC = INVALID_POC;
3258 /*ensure get_pic_by_POC()
3259 not get the buffer not decoded*/
3260 pic->BUF_index = i;
3261
3262 if ((!hevc->mmu_enable) &&
3263 ((dw_mode & 0x10) == 0)
3264 ) {
3265 pic->mc_y_adr = y_adr;
3266 y_adr += (buf_stru.mc_buffer_size_h << 16);
3267 }
3268 pic->mc_canvas_y = pic->index;
3269 pic->mc_canvas_u_v = pic->index;
3270 if (dw_mode & 0x10) {
3271 if (hevc->is_used_v4l) {
3272 pic->mc_y_adr = y_adr;
3273 pic->mc_u_v_adr = y_adr + hevc->m_BUF[i].y_size;
3274 } else {
3275 pic->mc_y_adr = y_adr;
3276 pic->mc_u_v_adr = y_adr +
3277 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3278 }
3279
3280 pic->mc_canvas_y = (pic->index << 1);
3281 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3282
3283 pic->dw_y_adr = pic->mc_y_adr;
3284 pic->dw_u_v_adr = pic->mc_u_v_adr;
3285 } else if (dw_mode) {
3286 pic->dw_y_adr = y_adr;
3287 pic->dw_u_v_adr = pic->dw_y_adr +
3288 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3289 }
3290
3291
3292 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3293 hevc_print(hevc, 0,
3294 "%s index %d BUF_index %d mc_y_adr %x\n",
3295 __func__, pic->index,
3296 pic->BUF_index, pic->mc_y_adr);
3297 if (hevc->mmu_enable &&
3298 dw_mode)
3299 hevc_print(hevc, 0,
3300 "mmu double write adr %ld\n",
3301 pic->cma_alloc_addr);
3302
3303
3304 }
3305 ret = 0;
3306
3307 return ret;
3308}
3309
3310static void init_pic_list(struct hevc_state_s *hevc)
3311{
3312 int i;
3313 int init_buf_num = get_work_pic_num(hevc);
3314 int dw_mode = get_double_write_mode(hevc);
3315 struct vdec_s *vdec = hw_to_vdec(hevc);
3316 /*alloc decoder buf*/
3317 for (i = 0; i < init_buf_num; i++) {
3318 if (alloc_buf(hevc) < 0) {
3319 if (i <= 8) {
3320 /*if alloced (i+1)>=9
3321 don't send errors.*/
3322 hevc->fatal_error |=
3323 DECODER_FATAL_ERROR_NO_MEM;
3324 }
3325 break;
3326 }
3327 }
3328
3329 for (i = 0; i < init_buf_num; i++) {
3330 struct PIC_s *pic =
3331 vmalloc(sizeof(struct PIC_s));
3332 if (pic == NULL) {
3333 hevc_print(hevc, 0,
3334 "%s: alloc pic %d fail!!!\n",
3335 __func__, i);
3336 break;
3337 }
3338 memset(pic, 0, sizeof(struct PIC_s));
3339 hevc->m_PIC[i] = pic;
3340 pic->index = i;
3341 pic->BUF_index = -1;
3342 pic->mv_buf_index = -1;
3343 if (vdec->parallel_dec == 1) {
3344 pic->y_canvas_index = -1;
3345 pic->uv_canvas_index = -1;
3346 }
3347
3348 pic->width = hevc->pic_w;
3349 pic->height = hevc->pic_h;
3350 pic->double_write_mode = dw_mode;
3351
3352 if (!hevc->is_used_v4l) {
3353 if (config_pic(hevc, pic) < 0) {
3354 if (get_dbg_flag(hevc))
3355 hevc_print(hevc, 0,
3356 "Config_pic %d fail\n", pic->index);
3357 pic->index = -1;
3358 i++;
3359 break;
3360 }
3361
3362 if (pic->double_write_mode)
3363 set_canvas(hevc, pic);
3364 }
3365 }
3366
3367 for (; i < MAX_REF_PIC_NUM; i++) {
3368 struct PIC_s *pic =
3369 vmalloc(sizeof(struct PIC_s));
3370 if (pic == NULL) {
3371 hevc_print(hevc, 0,
3372 "%s: alloc pic %d fail!!!\n",
3373 __func__, i);
3374 break;
3375 }
3376 memset(pic, 0, sizeof(struct PIC_s));
3377 hevc->m_PIC[i] = pic;
3378 pic->index = -1;
3379 pic->BUF_index = -1;
3380 if (vdec->parallel_dec == 1) {
3381 pic->y_canvas_index = -1;
3382 pic->uv_canvas_index = -1;
3383 }
3384 }
3385
3386}
3387
3388static void uninit_pic_list(struct hevc_state_s *hevc)
3389{
3390 struct vdec_s *vdec = hw_to_vdec(hevc);
3391 int i;
3392#ifndef MV_USE_FIXED_BUF
3393 dealloc_mv_bufs(hevc);
3394#endif
3395 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3396 struct PIC_s *pic = hevc->m_PIC[i];
3397
3398 if (pic) {
3399 if (vdec->parallel_dec == 1) {
3400 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3401 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3402 }
3403 release_aux_data(hevc, pic);
3404 vfree(pic);
3405 hevc->m_PIC[i] = NULL;
3406 }
3407 }
3408}
3409
3410#ifdef LOSLESS_COMPRESS_MODE
3411static void init_decode_head_hw(struct hevc_state_s *hevc)
3412{
3413
3414 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3415 unsigned int data32;
3416
3417 int losless_comp_header_size =
3418 compute_losless_comp_header_size(hevc->pic_w,
3419 hevc->pic_h);
3420 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3421 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3422
3423 hevc->losless_comp_body_size = losless_comp_body_size;
3424
3425
3426 if (hevc->mmu_enable) {
3427 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3428 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3429 } else {
3430 if (hevc->mem_saving_mode == 1)
3431 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3432 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3433 else
3434 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3435 ((workaround_enable & 2) ? 1 : 0));
3436 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3437 /*
3438 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3439 * //8-bit mode
3440 */
3441 }
3442 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3443 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3444 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3445
3446 if (hevc->mmu_enable) {
3447 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3448 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3449 buf_spec->mmu_vbh.buf_start +
3450 buf_spec->mmu_vbh.buf_size/2);
3451 data32 = READ_VREG(HEVC_SAO_CTRL9);
3452 data32 |= 0x1;
3453 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3454
3455 /* use HEVC_CM_HEADER_START_ADDR */
3456 data32 = READ_VREG(HEVC_SAO_CTRL5);
3457 data32 |= (1<<10);
3458 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3459 }
3460
3461 if (!hevc->m_ins_flag)
3462 hevc_print(hevc, 0,
3463 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3464 __func__, hevc->pic_w, hevc->pic_h,
3465 losless_comp_body_size, losless_comp_header_size);
3466
3467}
3468#endif
3469#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3470
3471static void init_pic_list_hw(struct hevc_state_s *hevc)
3472{
3473 int i;
3474 int cur_pic_num = MAX_REF_PIC_NUM;
3475 int dw_mode = get_double_write_mode(hevc);
3476 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3477 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3478 (0x1 << 1) | (0x1 << 2));
3479 else
3480 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3481
3482 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3483 if (hevc->m_PIC[i] == NULL ||
3484 hevc->m_PIC[i]->index == -1) {
3485 cur_pic_num = i;
3486 break;
3487 }
3488 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3489 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3490 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3491 hevc->m_PIC[i]->header_adr>>5);
3492 else
3493 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3494 hevc->m_PIC[i]->mc_y_adr >> 5);
3495 } else
3496 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3497 hevc->m_PIC[i]->mc_y_adr |
3498 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3499 if (dw_mode & 0x10) {
3500 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3501 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3502 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3503 }
3504 else
3505 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3506 hevc->m_PIC[i]->mc_u_v_adr |
3507 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3508 | 0x1);
3509 }
3510 }
3511 if (cur_pic_num == 0)
3512 return;
3513 for (; i < MAX_REF_PIC_NUM; i++) {
3514 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3515 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3516 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3517 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3518 else
3519 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3520 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3521#ifndef LOSLESS_COMPRESS_MODE
3522 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3523 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3524#endif
3525 } else {
3526 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3527 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3528 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3529 | 0x1);
3530#ifndef LOSLESS_COMPRESS_MODE
3531 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3532 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3533 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3534 | 0x1);
3535#endif
3536 }
3537 }
3538
3539 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3540
3541 /* Zero out canvas registers in IPP -- avoid simulation X */
3542 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3543 (0 << 8) | (0 << 1) | 1);
3544 for (i = 0; i < 32; i++)
3545 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3546
3547#ifdef LOSLESS_COMPRESS_MODE
3548 if ((dw_mode & 0x10) == 0)
3549 init_decode_head_hw(hevc);
3550#endif
3551
3552}
3553
3554
3555static void dump_pic_list(struct hevc_state_s *hevc)
3556{
3557 int i;
3558 struct PIC_s *pic;
3559
3560 hevc_print(hevc, 0,
3561 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3562 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3563 pic = hevc->m_PIC[i];
3564 if (pic == NULL || pic->index == -1)
3565 continue;
3566 hevc_print_cont(hevc, 0,
3567 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3568 pic->index, pic->BUF_index,
3569#ifndef MV_USE_FIXED_BUF
3570 pic->mv_buf_index,
3571#else
3572 -1,
3573#endif
3574 pic->decode_idx, pic->POC, pic->referenced);
3575 hevc_print_cont(hevc, 0,
3576 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3577 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3578 pic->width, pic->height);
3579 hevc_print_cont(hevc, 0,
3580 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3581 pic->output_ready, pic->mpred_mv_wr_start_addr,
3582 pic->vf_ref);
3583 }
3584}
3585
3586static void clear_referenced_flag(struct hevc_state_s *hevc)
3587{
3588 int i;
3589 struct PIC_s *pic;
3590 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3591 pic = hevc->m_PIC[i];
3592 if (pic == NULL || pic->index == -1)
3593 continue;
3594 if (pic->referenced) {
3595 pic->referenced = 0;
3596 put_mv_buf(hevc, pic);
3597 }
3598 }
3599}
3600
3601static void clear_poc_flag(struct hevc_state_s *hevc)
3602{
3603 int i;
3604 struct PIC_s *pic;
3605 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3606 pic = hevc->m_PIC[i];
3607 if (pic == NULL || pic->index == -1)
3608 continue;
3609 pic->POC = INVALID_POC;
3610 }
3611}
3612
3613static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3614 unsigned char flush_flag)
3615{
3616 int num_pic_not_yet_display = 0;
3617 int i;
3618 struct PIC_s *pic;
3619 struct PIC_s *pic_display = NULL;
3620 struct vdec_s *vdec = hw_to_vdec(hevc);
3621
3622 if (hevc->i_only & 0x4) {
3623 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3624 pic = hevc->m_PIC[i];
3625 if (pic == NULL ||
3626 (pic->index == -1) ||
3627 (pic->BUF_index == -1) ||
3628 (pic->POC == INVALID_POC))
3629 continue;
3630 if (pic->output_mark) {
3631 if (pic_display) {
3632 if (pic->decode_idx <
3633 pic_display->decode_idx)
3634 pic_display = pic;
3635
3636 } else
3637 pic_display = pic;
3638
3639 }
3640 }
3641 if (pic_display) {
3642 pic_display->output_mark = 0;
3643 pic_display->recon_mark = 0;
3644 pic_display->output_ready = 1;
3645 pic_display->referenced = 0;
3646 put_mv_buf(hevc, pic_display);
3647 }
3648 } else {
3649 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3650 pic = hevc->m_PIC[i];
3651 if (pic == NULL ||
3652 (pic->index == -1) ||
3653 (pic->BUF_index == -1) ||
3654 (pic->POC == INVALID_POC))
3655 continue;
3656 if (pic->output_mark)
3657 num_pic_not_yet_display++;
3658 if (pic->slice_type == 2 &&
3659 hevc->vf_pre_count == 0 &&
3660 fast_output_enable & 0x1) {
3661 /*fast output for first I picture*/
3662 pic->num_reorder_pic = 0;
3663 if (vdec->master || vdec->slave)
3664 pic_display = pic;
3665 hevc_print(hevc, 0, "VH265: output first frame\n");
3666 }
3667 }
3668
3669 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3670 pic = hevc->m_PIC[i];
3671 if (pic == NULL ||
3672 (pic->index == -1) ||
3673 (pic->BUF_index == -1) ||
3674 (pic->POC == INVALID_POC))
3675 continue;
3676 if (pic->output_mark) {
3677 if (pic_display) {
3678 if (pic->POC < pic_display->POC)
3679 pic_display = pic;
3680 else if ((pic->POC == pic_display->POC)
3681 && (pic->decode_idx <
3682 pic_display->
3683 decode_idx))
3684 pic_display
3685 = pic;
3686 } else
3687 pic_display = pic;
3688 }
3689 }
3690 if (pic_display) {
3691 if ((num_pic_not_yet_display >
3692 pic_display->num_reorder_pic)
3693 || flush_flag) {
3694 pic_display->output_mark = 0;
3695 pic_display->recon_mark = 0;
3696 pic_display->output_ready = 1;
3697 } else if (num_pic_not_yet_display >=
3698 (MAX_REF_PIC_NUM - 1)) {
3699 pic_display->output_mark = 0;
3700 pic_display->recon_mark = 0;
3701 pic_display->output_ready = 1;
3702 hevc_print(hevc, 0,
3703 "Warning, num_reorder_pic %d is byeond buf num\n",
3704 pic_display->num_reorder_pic);
3705 } else
3706 pic_display = NULL;
3707 }
3708 }
3709
3710 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3711 pic_display = NULL;
3712 hevc->first_pic_flag = 0;
3713 }
3714 return pic_display;
3715}
3716
3717static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3718{
3719 int i;
3720 struct PIC_s *pic;
3721
3722 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3723 hevc_print(hevc, 0,
3724 "config_mc_buffer entered .....\n");
3725 if (cur_pic->slice_type != 2) { /* P and B pic */
3726 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3727 (0 << 8) | (0 << 1) | 1);
3728 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3729 pic =
3730 get_ref_pic_by_POC(hevc,
3731 cur_pic->
3732 m_aiRefPOCList0[cur_pic->
3733 slice_idx][i]);
3734 if (pic) {
3735 if ((pic->width != hevc->pic_w) ||
3736 (pic->height != hevc->pic_h)) {
3737 hevc_print(hevc, 0,
3738 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3739 __func__, pic->POC,
3740 pic->width, pic->height);
3741 cur_pic->error_mark = 1;
3742 }
3743 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3744 cur_pic->error_mark = 1;
3745 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3746 (pic->mc_canvas_u_v << 16)
3747 | (pic->mc_canvas_u_v
3748 << 8) |
3749 pic->mc_canvas_y);
3750 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3751 hevc_print_cont(hevc, 0,
3752 "refid %x mc_canvas_u_v %x",
3753 i, pic->mc_canvas_u_v);
3754 hevc_print_cont(hevc, 0,
3755 " mc_canvas_y %x\n",
3756 pic->mc_canvas_y);
3757 }
3758 } else
3759 cur_pic->error_mark = 1;
3760
3761 if (pic == NULL || pic->error_mark) {
3762 hevc_print(hevc, 0,
3763 "Error %s, %dth poc (%d) %s",
3764 __func__, i,
3765 cur_pic->m_aiRefPOCList0[cur_pic->
3766 slice_idx][i],
3767 pic ? "has error" :
3768 "not in list0");
3769 }
3770 }
3771 }
3772 if (cur_pic->slice_type == 0) { /* B pic */
3773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3774 hevc_print(hevc, 0,
3775 "config_mc_buffer RefNum_L1\n");
3776 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3777 (16 << 8) | (0 << 1) | 1);
3778
3779 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3780 pic =
3781 get_ref_pic_by_POC(hevc,
3782 cur_pic->
3783 m_aiRefPOCList1[cur_pic->
3784 slice_idx][i]);
3785 if (pic) {
3786 if ((pic->width != hevc->pic_w) ||
3787 (pic->height != hevc->pic_h)) {
3788 hevc_print(hevc, 0,
3789 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3790 __func__, pic->POC,
3791 pic->width, pic->height);
3792 cur_pic->error_mark = 1;
3793 }
3794
3795 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3796 cur_pic->error_mark = 1;
3797 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3798 (pic->mc_canvas_u_v << 16)
3799 | (pic->mc_canvas_u_v
3800 << 8) |
3801 pic->mc_canvas_y);
3802 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3803 hevc_print_cont(hevc, 0,
3804 "refid %x mc_canvas_u_v %x",
3805 i, pic->mc_canvas_u_v);
3806 hevc_print_cont(hevc, 0,
3807 " mc_canvas_y %x\n",
3808 pic->mc_canvas_y);
3809 }
3810 } else
3811 cur_pic->error_mark = 1;
3812
3813 if (pic == NULL || pic->error_mark) {
3814 hevc_print(hevc, 0,
3815 "Error %s, %dth poc (%d) %s",
3816 __func__, i,
3817 cur_pic->m_aiRefPOCList1[cur_pic->
3818 slice_idx][i],
3819 pic ? "has error" :
3820 "not in list1");
3821 }
3822 }
3823 }
3824 return 0;
3825}
3826
3827static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3828 union param_u *params)
3829{
3830 int ii, i;
3831 int poc_tmp;
3832 struct PIC_s *pic;
3833 unsigned char is_referenced;
3834 /* hevc_print(hevc, 0,
3835 "%s cur_poc %d\n", __func__, cur_poc); */
3836 if (pic_list_debug & 0x2) {
3837 pr_err("cur poc %d\n", cur_poc);
3838 }
3839 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3840 pic = hevc->m_PIC[ii];
3841 if (pic == NULL ||
3842 pic->index == -1 ||
3843 pic->BUF_index == -1
3844 )
3845 continue;
3846
3847 if ((pic->referenced == 0 || pic->POC == cur_poc))
3848 continue;
3849 is_referenced = 0;
3850 for (i = 0; i < 16; i++) {
3851 int delt;
3852
3853 if (params->p.CUR_RPS[i] & 0x8000)
3854 break;
3855 delt =
3856 params->p.CUR_RPS[i] &
3857 ((1 << (RPS_USED_BIT - 1)) - 1);
3858 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3859 poc_tmp =
3860 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3861 delt);
3862 } else
3863 poc_tmp = cur_poc + delt;
3864 if (poc_tmp == pic->POC) {
3865 is_referenced = 1;
3866 /* hevc_print(hevc, 0, "i is %d\n", i); */
3867 break;
3868 }
3869 }
3870 if (is_referenced == 0) {
3871 pic->referenced = 0;
3872 put_mv_buf(hevc, pic);
3873 /* hevc_print(hevc, 0,
3874 "set poc %d reference to 0\n", pic->POC); */
3875 if (pic_list_debug & 0x2) {
3876 pr_err("set poc %d reference to 0\n", pic->POC);
3877 }
3878 }
3879 }
3880
3881}
3882
3883static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
3884{
3885 struct PIC_s *pic = hevc->cur_pic;
3886 int i, rIdx;
3887 int num_neg = 0;
3888 int num_pos = 0;
3889 int total_num;
3890 int num_ref_idx_l0_active =
3891 (params->p.num_ref_idx_l0_active >
3892 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3893 params->p.num_ref_idx_l0_active;
3894 int num_ref_idx_l1_active =
3895 (params->p.num_ref_idx_l1_active >
3896 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3897 params->p.num_ref_idx_l1_active;
3898
3899 int RefPicSetStCurr0[16];
3900 int RefPicSetStCurr1[16];
3901
3902 for (i = 0; i < 16; i++) {
3903 RefPicSetStCurr0[i] = 0;
3904 RefPicSetStCurr1[i] = 0;
3905 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
3906 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
3907 }
3908 for (i = 0; i < 16; i++) {
3909 if (params->p.CUR_RPS[i] & 0x8000)
3910 break;
3911 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
3912 int delt =
3913 params->p.CUR_RPS[i] &
3914 ((1 << (RPS_USED_BIT - 1)) - 1);
3915
3916 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
3917 RefPicSetStCurr0[num_neg] =
3918 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
3919 delt);
3920 /* hevc_print(hevc, 0,
3921 * "RefPicSetStCurr0 %x %x %x\n",
3922 * RefPicSetStCurr0[num_neg], pic->POC,
3923 * (0x800-(params[i]&0x7ff)));
3924 */
3925 num_neg++;
3926 } else {
3927 RefPicSetStCurr1[num_pos] = pic->POC + delt;
3928 /* hevc_print(hevc, 0,
3929 * "RefPicSetStCurr1 %d\n",
3930 * RefPicSetStCurr1[num_pos]);
3931 */
3932 num_pos++;
3933 }
3934 }
3935 }
3936 total_num = num_neg + num_pos;
3937 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3938 hevc_print(hevc, 0,
3939 "%s: curpoc %d slice_type %d, total %d ",
3940 __func__, pic->POC, params->p.slice_type, total_num);
3941 hevc_print_cont(hevc, 0,
3942 "num_neg %d num_list0 %d num_list1 %d\n",
3943 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
3944 }
3945
3946 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3947 hevc_print(hevc, 0,
3948 "HEVC Stream buf start ");
3949 hevc_print_cont(hevc, 0,
3950 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
3951 READ_VREG(HEVC_STREAM_START_ADDR),
3952 READ_VREG(HEVC_STREAM_END_ADDR),
3953 READ_VREG(HEVC_STREAM_WR_PTR),
3954 READ_VREG(HEVC_STREAM_RD_PTR),
3955 READ_VREG(HEVC_STREAM_LEVEL),
3956 READ_VREG(HEVC_STREAM_FIFO_CTL),
3957 READ_VREG(HEVC_PARSER_INT_CONTROL));
3958 }
3959
3960 if (total_num > 0) {
3961 if (params->p.modification_flag & 0x1) {
3962 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3963 hevc_print(hevc, 0, "ref0 POC (modification):");
3964 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3965 int cIdx = params->p.modification_list[rIdx];
3966
3967 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3968 cIdx >=
3969 num_neg ? RefPicSetStCurr1[cIdx -
3970 num_neg] :
3971 RefPicSetStCurr0[cIdx];
3972 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3973 hevc_print_cont(hevc, 0, "%d ",
3974 pic->m_aiRefPOCList0[pic->
3975 slice_idx]
3976 [rIdx]);
3977 }
3978 }
3979 } else {
3980 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3981 hevc_print(hevc, 0, "ref0 POC:");
3982 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3983 int cIdx = rIdx % total_num;
3984
3985 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3986 cIdx >=
3987 num_neg ? RefPicSetStCurr1[cIdx -
3988 num_neg] :
3989 RefPicSetStCurr0[cIdx];
3990 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3991 hevc_print_cont(hevc, 0, "%d ",
3992 pic->m_aiRefPOCList0[pic->
3993 slice_idx]
3994 [rIdx]);
3995 }
3996 }
3997 }
3998 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3999 hevc_print_cont(hevc, 0, "\n");
4000 if (params->p.slice_type == B_SLICE) {
4001 if (params->p.modification_flag & 0x2) {
4002 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4003 hevc_print(hevc, 0,
4004 "ref1 POC (modification):");
4005 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4006 rIdx++) {
4007 int cIdx;
4008
4009 if (params->p.modification_flag & 0x1) {
4010 cIdx =
4011 params->p.
4012 modification_list
4013 [num_ref_idx_l0_active +
4014 rIdx];
4015 } else {
4016 cIdx =
4017 params->p.
4018 modification_list[rIdx];
4019 }
4020 pic->m_aiRefPOCList1[pic->
4021 slice_idx][rIdx] =
4022 cIdx >=
4023 num_pos ?
4024 RefPicSetStCurr0[cIdx - num_pos]
4025 : RefPicSetStCurr1[cIdx];
4026 if (get_dbg_flag(hevc) &
4027 H265_DEBUG_BUFMGR) {
4028 hevc_print_cont(hevc, 0, "%d ",
4029 pic->
4030 m_aiRefPOCList1[pic->
4031 slice_idx]
4032 [rIdx]);
4033 }
4034 }
4035 } else {
4036 if (get_dbg_flag(hevc) &
4037 H265_DEBUG_BUFMGR)
4038 hevc_print(hevc, 0, "ref1 POC:");
4039 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4040 rIdx++) {
4041 int cIdx = rIdx % total_num;
4042
4043 pic->m_aiRefPOCList1[pic->
4044 slice_idx][rIdx] =
4045 cIdx >=
4046 num_pos ?
4047 RefPicSetStCurr0[cIdx -
4048 num_pos]
4049 : RefPicSetStCurr1[cIdx];
4050 if (get_dbg_flag(hevc) &
4051 H265_DEBUG_BUFMGR) {
4052 hevc_print_cont(hevc, 0, "%d ",
4053 pic->
4054 m_aiRefPOCList1[pic->
4055 slice_idx]
4056 [rIdx]);
4057 }
4058 }
4059 }
4060 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4061 hevc_print_cont(hevc, 0, "\n");
4062 }
4063 }
4064 /*set m_PIC */
4065 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4066 (params->p.slice_type == P_SLICE) ? 1 :
4067 (params->p.slice_type == B_SLICE) ? 0 : 3;
4068 pic->RefNum_L0 = num_ref_idx_l0_active;
4069 pic->RefNum_L1 = num_ref_idx_l1_active;
4070}
4071
4072static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4073 int pic_height_cu, int sao_mem_unit,
4074 union param_u *params)
4075{
4076 int i, j;
4077 int start_cu_x, start_cu_y;
4078 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4079 int sao_abv_size = sao_mem_unit * pic_width_cu;
4080#ifdef DETREFILL_ENABLE
4081 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4082 int tmpRefillLcuSize = 1 <<
4083 (params->p.log2_min_coding_block_size_minus3 +
4084 3 + params->p.log2_diff_max_min_coding_block_size);
4085 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4086 "%x, %x, %x, %x\n",
4087 params->p.slice_segment_address,
4088 params->p.bit_depth,
4089 params->p.tiles_enabled_flag,
4090 tmpRefillLcuSize);
4091 if (params->p.slice_segment_address == 0 &&
4092 params->p.bit_depth != 0 &&
4093 (params->p.tiles_enabled_flag & 1) &&
4094 tmpRefillLcuSize == 64)
4095 hevc->delrefill_check = 1;
4096 else
4097 hevc->delrefill_check = 0;
4098 }
4099#endif
4100
4101 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4102 if (params->p.tiles_enabled_flag & 1) {
4103 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4104 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4105
4106 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4107 || hevc->num_tile_row <= 0) {
4108 hevc->num_tile_row = 1;
4109 hevc_print(hevc, 0,
4110 "%s: num_tile_rows_minus1 (%d) error!!\n",
4111 __func__, params->p.num_tile_rows_minus1);
4112 }
4113 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4114 || hevc->num_tile_col <= 0) {
4115 hevc->num_tile_col = 1;
4116 hevc_print(hevc, 0,
4117 "%s: num_tile_columns_minus1 (%d) error!!\n",
4118 __func__, params->p.num_tile_columns_minus1);
4119 }
4120 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4121 hevc_print(hevc, 0,
4122 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4123 __func__, pic_width_cu, pic_height_cu);
4124 hevc_print_cont(hevc, 0,
4125 "num_tile_col %d num_tile_row %d:\n",
4126 hevc->num_tile_col, hevc->num_tile_row);
4127 }
4128
4129 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4130 int w = pic_width_cu / hevc->num_tile_col;
4131 int h = pic_height_cu / hevc->num_tile_row;
4132
4133 start_cu_y = 0;
4134 for (i = 0; i < hevc->num_tile_row; i++) {
4135 start_cu_x = 0;
4136 for (j = 0; j < hevc->num_tile_col; j++) {
4137 if (j == (hevc->num_tile_col - 1)) {
4138 hevc->m_tile[i][j].width =
4139 pic_width_cu -
4140 start_cu_x;
4141 } else
4142 hevc->m_tile[i][j].width = w;
4143 if (i == (hevc->num_tile_row - 1)) {
4144 hevc->m_tile[i][j].height =
4145 pic_height_cu -
4146 start_cu_y;
4147 } else
4148 hevc->m_tile[i][j].height = h;
4149 hevc->m_tile[i][j].start_cu_x
4150 = start_cu_x;
4151 hevc->m_tile[i][j].start_cu_y
4152 = start_cu_y;
4153 hevc->m_tile[i][j].sao_vb_start_addr =
4154 hevc->work_space_buf->sao_vb.
4155 buf_start + j * sao_vb_size;
4156 hevc->m_tile[i][j].sao_abv_start_addr =
4157 hevc->work_space_buf->sao_abv.
4158 buf_start + i * sao_abv_size;
4159 if (get_dbg_flag(hevc) &
4160 H265_DEBUG_BUFMGR) {
4161 hevc_print_cont(hevc, 0,
4162 "{y=%d, x=%d w %d h %d ",
4163 i, j, hevc->m_tile[i][j].width,
4164 hevc->m_tile[i][j].height);
4165 hevc_print_cont(hevc, 0,
4166 "start_x %d start_y %d ",
4167 hevc->m_tile[i][j].start_cu_x,
4168 hevc->m_tile[i][j].start_cu_y);
4169 hevc_print_cont(hevc, 0,
4170 "sao_vb_start 0x%x ",
4171 hevc->m_tile[i][j].
4172 sao_vb_start_addr);
4173 hevc_print_cont(hevc, 0,
4174 "sao_abv_start 0x%x}\n",
4175 hevc->m_tile[i][j].
4176 sao_abv_start_addr);
4177 }
4178 start_cu_x += hevc->m_tile[i][j].width;
4179
4180 }
4181 start_cu_y += hevc->m_tile[i][0].height;
4182 }
4183 } else {
4184 start_cu_y = 0;
4185 for (i = 0; i < hevc->num_tile_row; i++) {
4186 start_cu_x = 0;
4187 for (j = 0; j < hevc->num_tile_col; j++) {
4188 if (j == (hevc->num_tile_col - 1)) {
4189 hevc->m_tile[i][j].width =
4190 pic_width_cu -
4191 start_cu_x;
4192 } else {
4193 hevc->m_tile[i][j].width =
4194 params->p.tile_width[j];
4195 }
4196 if (i == (hevc->num_tile_row - 1)) {
4197 hevc->m_tile[i][j].height =
4198 pic_height_cu -
4199 start_cu_y;
4200 } else {
4201 hevc->m_tile[i][j].height =
4202 params->
4203 p.tile_height[i];
4204 }
4205 hevc->m_tile[i][j].start_cu_x
4206 = start_cu_x;
4207 hevc->m_tile[i][j].start_cu_y
4208 = start_cu_y;
4209 hevc->m_tile[i][j].sao_vb_start_addr =
4210 hevc->work_space_buf->sao_vb.
4211 buf_start + j * sao_vb_size;
4212 hevc->m_tile[i][j].sao_abv_start_addr =
4213 hevc->work_space_buf->sao_abv.
4214 buf_start + i * sao_abv_size;
4215 if (get_dbg_flag(hevc) &
4216 H265_DEBUG_BUFMGR) {
4217 hevc_print_cont(hevc, 0,
4218 "{y=%d, x=%d w %d h %d ",
4219 i, j, hevc->m_tile[i][j].width,
4220 hevc->m_tile[i][j].height);
4221 hevc_print_cont(hevc, 0,
4222 "start_x %d start_y %d ",
4223 hevc->m_tile[i][j].start_cu_x,
4224 hevc->m_tile[i][j].start_cu_y);
4225 hevc_print_cont(hevc, 0,
4226 "sao_vb_start 0x%x ",
4227 hevc->m_tile[i][j].
4228 sao_vb_start_addr);
4229 hevc_print_cont(hevc, 0,
4230 "sao_abv_start 0x%x}\n",
4231 hevc->m_tile[i][j].
4232 sao_abv_start_addr);
4233
4234 }
4235 start_cu_x += hevc->m_tile[i][j].width;
4236 }
4237 start_cu_y += hevc->m_tile[i][0].height;
4238 }
4239 }
4240 } else {
4241 hevc->num_tile_col = 1;
4242 hevc->num_tile_row = 1;
4243 hevc->m_tile[0][0].width = pic_width_cu;
4244 hevc->m_tile[0][0].height = pic_height_cu;
4245 hevc->m_tile[0][0].start_cu_x = 0;
4246 hevc->m_tile[0][0].start_cu_y = 0;
4247 hevc->m_tile[0][0].sao_vb_start_addr =
4248 hevc->work_space_buf->sao_vb.buf_start;
4249 hevc->m_tile[0][0].sao_abv_start_addr =
4250 hevc->work_space_buf->sao_abv.buf_start;
4251 }
4252}
4253
4254static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4255 int pic_width_lcu)
4256{
4257 int cu_x;
4258 int cu_y;
4259 int tile_x = 0;
4260 int tile_y = 0;
4261 int i;
4262
4263 if (pic_width_lcu == 0) {
4264 if (get_dbg_flag(hevc)) {
4265 hevc_print(hevc, 0,
4266 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4267 __func__, hevc->pic_w, hevc->pic_h);
4268 }
4269 return -1;
4270 }
4271 cu_x = cu_adr % pic_width_lcu;
4272 cu_y = cu_adr / pic_width_lcu;
4273 if (hevc->tile_enabled) {
4274 for (i = 0; i < hevc->num_tile_col; i++) {
4275 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4276 tile_x = i;
4277 else
4278 break;
4279 }
4280 for (i = 0; i < hevc->num_tile_row; i++) {
4281 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4282 tile_y = i;
4283 else
4284 break;
4285 }
4286 }
4287 return (tile_x) | (tile_y << 8);
4288}
4289
4290static void print_scratch_error(int error_num)
4291{
4292#if 0
4293 if (get_dbg_flag(hevc)) {
4294 hevc_print(hevc, 0,
4295 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4296 error_num);
4297 }
4298#endif
4299}
4300
4301static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4302{
4303 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4304
4305 if (get_dbg_flag(hevc))
4306 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4307 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4308 __func__,
4309 buf_spec->ipp.buf_start,
4310 buf_spec->start_adr,
4311 buf_spec->short_term_rps.buf_start,
4312 buf_spec->vps.buf_start,
4313 buf_spec->sps.buf_start,
4314 buf_spec->pps.buf_start,
4315 buf_spec->sao_up.buf_start,
4316 buf_spec->swap_buf.buf_start,
4317 buf_spec->swap_buf2.buf_start,
4318 buf_spec->scalelut.buf_start,
4319 buf_spec->dblk_para.buf_start,
4320 buf_spec->dblk_data.buf_start,
4321 buf_spec->dblk_data2.buf_start);
4322 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4323 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4324 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4325 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4326 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4327 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4328 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4329 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4330 if (hevc->mmu_enable) {
4331 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4332 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4333 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4334 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4335 } else
4336 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4337 } /*else
4338 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4339 buf_spec->swap_buf.buf_start);
4340 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4341 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4342 /* cfg_p_addr */
4343 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4344 /* cfg_d_addr */
4345 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4346
4347 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4348
4349 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4350}
4351
4352static void parser_cmd_write(void)
4353{
4354 u32 i;
4355 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4356 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4357 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4358 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4359 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4360 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4361 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4362 0x7C00
4363 };
4364 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4365 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4366}
4367
4368static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4369 int decode_pic_begin, int decode_pic_num)
4370{
4371 unsigned int data32;
4372 int i;
4373#if 0
4374 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4375 /* Set MCR fetch priorities*/
4376 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4377 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4378 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4379 }
4380#endif
4381#if 1
4382 /* m8baby test1902 */
4383 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4384 hevc_print(hevc, 0,
4385 "%s\n", __func__);
4386 data32 = READ_VREG(HEVC_PARSER_VERSION);
4387 if (data32 != 0x00010001) {
4388 print_scratch_error(25);
4389 return;
4390 }
4391 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4392 data32 = READ_VREG(HEVC_PARSER_VERSION);
4393 if (data32 != 0x5a5a55aa) {
4394 print_scratch_error(26);
4395 return;
4396 }
4397#if 0
4398 /* test Parser Reset */
4399 /* reset iqit to start mem init again */
4400 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4401 (1 << 3) /* reset_whole parser */
4402 );
4403 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4404 data32 = READ_VREG(HEVC_PARSER_VERSION);
4405 if (data32 != 0x00010001)
4406 hevc_print(hevc, 0,
4407 "Test Parser Fatal Error\n");
4408#endif
4409 /* reset iqit to start mem init again */
4410 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4411 );
4412 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4413 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4414
4415#endif
4416 if (!hevc->m_ins_flag) {
4417 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4418 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4419 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4420 data32 |= (0xf << 25); /*arwlen_axi_max*/
4421 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4422 }
4423 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4424 if (data32 != 0x00000100) {
4425 print_scratch_error(29);
4426 return;
4427 }
4428 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4429 if (data32 != 0x00000300) {
4430 print_scratch_error(30);
4431 return;
4432 }
4433 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4434 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4435 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4436 if (data32 != 0x12345678) {
4437 print_scratch_error(31);
4438 return;
4439 }
4440 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4441 if (data32 != 0x9abcdef0) {
4442 print_scratch_error(32);
4443 return;
4444 }
4445 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4446 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4447
4448 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4449 data32 &= 0x03ffffff;
4450 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4451 | /* stream_buffer_empty_int_amrisc_enable */
4452 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4453 (1 << 7) | /* dec_done_int_cpu_enable */
4454 (1 << 4) | /* startcode_found_int_cpu_enable */
4455 (0 << 3) | /* startcode_found_int_amrisc_enable */
4456 (1 << 0) /* parser_int_enable */
4457 ;
4458 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4459
4460 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4461 data32 = data32 | (1 << 1) | /* emulation_check_on */
4462 (1 << 0) /* startcode_check_on */
4463 ;
4464 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4465
4466 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4467 (2 << 4) | /* emulate_code_length_sub_1 */
4468 (2 << 1) | /* start_code_length_sub_1 */
4469 (1 << 0) /* stream_shift_enable */
4470 );
4471
4472 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4473 );
4474 /* hevc_parser_core_clk_en */
4475 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4476 );
4477
4478 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4479
4480 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4481 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4482 for (i = 0; i < 1024; i++)
4483 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4484
4485#ifdef ENABLE_SWAP_TEST
4486 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4487#endif
4488
4489 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4490 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4491 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4492 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4493 /* Send parser_cmd */
4494 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4495
4496 parser_cmd_write();
4497
4498 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4499 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4500 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4501
4502 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4503 /* (1 << 8) | // sao_sw_pred_enable */
4504 (1 << 5) | /* parser_sao_if_en */
4505 (1 << 2) | /* parser_mpred_if_en */
4506 (1 << 0) /* parser_scaler_if_en */
4507 );
4508
4509 /* Changed to Start MPRED in microcode */
4510 /*
4511 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4512 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4513 * (1<<31)
4514 * );
4515 */
4516
4517 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4518 (1 << 0) /* software reset ipp and mpp */
4519 );
4520 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4521 (0 << 0) /* software reset ipp and mpp */
4522 );
4523
4524 if (get_double_write_mode(hevc) & 0x10)
4525 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4526 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4527 );
4528
4529}
4530
4531static void decoder_hw_reset(void)
4532{
4533 int i;
4534 unsigned int data32;
4535 /* reset iqit to start mem init again */
4536 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4537 );
4538 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4539 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4540
4541 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4542 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4543 ;
4544 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4545
4546 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4547 if (data32 != 0x00000100) {
4548 print_scratch_error(29);
4549 return;
4550 }
4551 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4552 if (data32 != 0x00000300) {
4553 print_scratch_error(30);
4554 return;
4555 }
4556 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4557 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4558 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4559 if (data32 != 0x12345678) {
4560 print_scratch_error(31);
4561 return;
4562 }
4563 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4564 if (data32 != 0x9abcdef0) {
4565 print_scratch_error(32);
4566 return;
4567 }
4568 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4569 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4570
4571 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4572 data32 &= 0x03ffffff;
4573 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4574 | /* stream_buffer_empty_int_amrisc_enable */
4575 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4576 (1 << 7) | /* dec_done_int_cpu_enable */
4577 (1 << 4) | /* startcode_found_int_cpu_enable */
4578 (0 << 3) | /* startcode_found_int_amrisc_enable */
4579 (1 << 0) /* parser_int_enable */
4580 ;
4581 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4582
4583 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4584 data32 = data32 | (1 << 1) | /* emulation_check_on */
4585 (1 << 0) /* startcode_check_on */
4586 ;
4587 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4588
4589 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4590 (2 << 4) | /* emulate_code_length_sub_1 */
4591 (2 << 1) | /* start_code_length_sub_1 */
4592 (1 << 0) /* stream_shift_enable */
4593 );
4594
4595 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4596 );
4597 /* hevc_parser_core_clk_en */
4598 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4599 );
4600
4601 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4602 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4603 for (i = 0; i < 1024; i++)
4604 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4605
4606 /* Send parser_cmd */
4607 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4608
4609 parser_cmd_write();
4610
4611 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4612 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4613 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4614
4615 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4616 /* (1 << 8) | // sao_sw_pred_enable */
4617 (1 << 5) | /* parser_sao_if_en */
4618 (1 << 2) | /* parser_mpred_if_en */
4619 (1 << 0) /* parser_scaler_if_en */
4620 );
4621
4622 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4623 (1 << 0) /* software reset ipp and mpp */
4624 );
4625 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4626 (0 << 0) /* software reset ipp and mpp */
4627 );
4628}
4629
4630#ifdef CONFIG_HEVC_CLK_FORCED_ON
4631static void config_hevc_clk_forced_on(void)
4632{
4633 unsigned int rdata32;
4634 /* IQIT */
4635 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4636 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4637
4638 /* DBLK */
4639 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4640 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4641
4642 /* SAO */
4643 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4644 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4645
4646 /* MPRED */
4647 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4648 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4649
4650 /* PARSER */
4651 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4652 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4653 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4654 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4655 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4656 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4657 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4658 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4659 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4660 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4661 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4662 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4663 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4664
4665 /* IPP */
4666 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4667 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4668
4669 /* MCRCC */
4670 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4671 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4672}
4673#endif
4674
4675#ifdef MCRCC_ENABLE
4676static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4677{
4678 unsigned int rdata32;
4679 unsigned int rdata32_2;
4680 int l0_cnt = 0;
4681 int l1_cnt = 0x7fff;
4682
4683 if (get_double_write_mode(hevc) & 0x10) {
4684 l0_cnt = hevc->cur_pic->RefNum_L0;
4685 l1_cnt = hevc->cur_pic->RefNum_L1;
4686 }
4687
4688 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4689
4690 if (slice_type == 2) { /* I-PIC */
4691 /* remove reset -- disables clock */
4692 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4693 return;
4694 }
4695
4696 if (slice_type == 0) { /* B-PIC */
4697 /* Programme canvas0 */
4698 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4699 (0 << 8) | (0 << 1) | 0);
4700 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4701 rdata32 = rdata32 & 0xffff;
4702 rdata32 = rdata32 | (rdata32 << 16);
4703 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4704
4705 /* Programme canvas1 */
4706 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4707 (16 << 8) | (1 << 1) | 0);
4708 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4709 rdata32_2 = rdata32_2 & 0xffff;
4710 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4711 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4712 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4713 rdata32_2 = rdata32_2 & 0xffff;
4714 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4715 }
4716 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4717 } else { /* P-PIC */
4718 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4719 (0 << 8) | (1 << 1) | 0);
4720 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4721 rdata32 = rdata32 & 0xffff;
4722 rdata32 = rdata32 | (rdata32 << 16);
4723 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4724
4725 if (l0_cnt == 1) {
4726 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4727 } else {
4728 /* Programme canvas1 */
4729 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4730 rdata32 = rdata32 & 0xffff;
4731 rdata32 = rdata32 | (rdata32 << 16);
4732 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4733 }
4734 }
4735 /* enable mcrcc progressive-mode */
4736 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4737}
4738#endif
4739
4740static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4741 int sao_mem_unit)
4742{
4743 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4744 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4745 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4746 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4747}
4748
4749static u32 init_aux_size;
4750static int aux_data_is_avaible(struct hevc_state_s *hevc)
4751{
4752 u32 reg_val;
4753
4754 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4755 if (reg_val != 0 && reg_val != init_aux_size)
4756 return 1;
4757 else
4758 return 0;
4759}
4760
4761static void config_aux_buf(struct hevc_state_s *hevc)
4762{
4763 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4764 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4765 (hevc->suffix_aux_size >> 4);
4766 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4767}
4768
4769static void config_mpred_hw(struct hevc_state_s *hevc)
4770{
4771 int i;
4772 unsigned int data32;
4773 struct PIC_s *cur_pic = hevc->cur_pic;
4774 struct PIC_s *col_pic = hevc->col_pic;
4775 int AMVP_MAX_NUM_CANDS_MEM = 3;
4776 int AMVP_MAX_NUM_CANDS = 2;
4777 int NUM_CHROMA_MODE = 5;
4778 int DM_CHROMA_IDX = 36;
4779 int above_ptr_ctrl = 0;
4780 int buffer_linear = 1;
4781 int cu_size_log2 = 3;
4782
4783 int mpred_mv_rd_start_addr;
4784 int mpred_curr_lcu_x;
4785 int mpred_curr_lcu_y;
4786 int mpred_above_buf_start;
4787 int mpred_mv_rd_ptr;
4788 int mpred_mv_rd_ptr_p1;
4789 int mpred_mv_rd_end_addr;
4790 int MV_MEM_UNIT;
4791 int mpred_mv_wr_ptr;
4792 int *ref_poc_L0, *ref_poc_L1;
4793
4794 int above_en;
4795 int mv_wr_en;
4796 int mv_rd_en;
4797 int col_isIntra;
4798
4799 if (hevc->slice_type != 2) {
4800 above_en = 1;
4801 mv_wr_en = 1;
4802 mv_rd_en = 1;
4803 col_isIntra = 0;
4804 } else {
4805 above_en = 1;
4806 mv_wr_en = 1;
4807 mv_rd_en = 0;
4808 col_isIntra = 0;
4809 }
4810
4811 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4812 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4813 mpred_curr_lcu_x = data32 & 0xffff;
4814 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4815
4816 MV_MEM_UNIT =
4817 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4818 5 ? 0x80 : 0x20;
4819 mpred_mv_rd_ptr =
4820 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4821
4822 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4823 mpred_mv_rd_end_addr =
4824 mpred_mv_rd_start_addr +
4825 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4826
4827 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4828
4829 mpred_mv_wr_ptr =
4830 cur_pic->mpred_mv_wr_start_addr +
4831 (hevc->slice_addr * MV_MEM_UNIT);
4832
4833 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4834 hevc_print(hevc, 0,
4835 "cur pic index %d col pic index %d\n", cur_pic->index,
4836 col_pic->index);
4837 }
4838
4839 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4840 cur_pic->mpred_mv_wr_start_addr);
4841 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4842
4843 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4844 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4845 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4846
4847 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4848 data32 = (hevc->slice_type |
4849 hevc->new_pic << 2 |
4850 hevc->new_tile << 3 |
4851 hevc->isNextSliceSegment << 4 |
4852 hevc->TMVPFlag << 5 |
4853 hevc->LDCFlag << 6 |
4854 hevc->ColFromL0Flag << 7 |
4855 above_ptr_ctrl << 8 |
4856 above_en << 9 |
4857 mv_wr_en << 10 |
4858 mv_rd_en << 11 |
4859 col_isIntra << 12 |
4860 buffer_linear << 13 |
4861 hevc->LongTerm_Curr << 14 |
4862 hevc->LongTerm_Col << 15 |
4863 hevc->lcu_size_log2 << 16 |
4864 cu_size_log2 << 20 | hevc->plevel << 24);
4865 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
4866
4867 data32 = READ_VREG(HEVC_MPRED_CTRL1);
4868 data32 = (
4869#if 0
4870 /* no set in m8baby test1902 */
4871 /* Don't override clk_forced_on , */
4872 (data32 & (0x1 << 24)) |
4873#endif
4874 hevc->MaxNumMergeCand |
4875 AMVP_MAX_NUM_CANDS << 4 |
4876 AMVP_MAX_NUM_CANDS_MEM << 8 |
4877 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
4878 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
4879
4880 data32 = (hevc->pic_w | hevc->pic_h << 16);
4881 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
4882
4883 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
4884 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
4885
4886 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
4887 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
4888
4889 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
4890 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
4891
4892 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
4893 /* col_RefNum_L0<<16| */
4894 /* col_RefNum_L1<<24 */
4895 );
4896 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
4897
4898 data32 = (hevc->LongTerm_Ref);
4899 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
4900
4901 data32 = 0;
4902 for (i = 0; i < hevc->RefNum_L0; i++)
4903 data32 = data32 | (1 << i);
4904 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
4905
4906 data32 = 0;
4907 for (i = 0; i < hevc->RefNum_L1; i++)
4908 data32 = data32 | (1 << i);
4909 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
4910
4911 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
4912 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
4913
4914 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
4915 * Ref_POC_xx_L1 in pair write order!!!
4916 */
4917 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
4918 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
4919
4920 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
4921 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
4922
4923 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
4924 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
4925
4926 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
4927 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
4928
4929 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
4930 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
4931
4932 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
4933 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
4934
4935 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
4936 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
4937
4938 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
4939 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
4940
4941 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
4942 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
4943
4944 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
4945 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
4946
4947 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
4948 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
4949
4950 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
4951 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
4952
4953 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
4954 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
4955
4956 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
4957 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
4958
4959 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
4960 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
4961
4962 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
4963 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
4964
4965 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
4966 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
4967
4968 if (hevc->new_pic) {
4969 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
4970 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
4971 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
4972 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
4973 } else if (!hevc->isNextSliceSegment) {
4974 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
4975 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
4976 }
4977
4978 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
4979}
4980
4981static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
4982{
4983 unsigned int data32, data32_2;
4984 int misc_flag0 = hevc->misc_flag0;
4985 int slice_deblocking_filter_disabled_flag = 0;
4986
4987 int mc_buffer_size_u_v =
4988 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
4989 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
4990 struct PIC_s *cur_pic = hevc->cur_pic;
4991 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
4992
4993 data32 = READ_VREG(HEVC_SAO_CTRL0);
4994 data32 &= (~0xf);
4995 data32 |= hevc->lcu_size_log2;
4996 WRITE_VREG(HEVC_SAO_CTRL0, data32);
4997
4998 data32 = (hevc->pic_w | hevc->pic_h << 16);
4999 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5000
5001 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5002 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5003
5004 if (hevc->new_pic)
5005 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5006#ifdef LOSLESS_COMPRESS_MODE
5007/*SUPPORT_10BIT*/
5008 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5009 data32 = READ_VREG(HEVC_SAO_CTRL5);
5010 data32 &= (~(0xff << 16));
5011
5012 if (get_double_write_mode(hevc) == 2 ||
5013 get_double_write_mode(hevc) == 3)
5014 data32 |= (0xff<<16);
5015 else if (get_double_write_mode(hevc) == 4)
5016 data32 |= (0x33<<16);
5017
5018 if (hevc->mem_saving_mode == 1)
5019 data32 |= (1 << 9);
5020 else
5021 data32 &= ~(1 << 9);
5022 if (workaround_enable & 1)
5023 data32 |= (1 << 7);
5024 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5025 }
5026 data32 = cur_pic->mc_y_adr;
5027 if (get_double_write_mode(hevc))
5028 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5029
5030 if ((get_double_write_mode(hevc) & 0x10) == 0)
5031 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5032
5033 if (hevc->mmu_enable)
5034 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5035#else
5036 data32 = cur_pic->mc_y_adr;
5037 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5038#endif
5039 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5040 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5041
5042#ifdef LOSLESS_COMPRESS_MODE
5043/*SUPPORT_10BIT*/
5044 if (get_double_write_mode(hevc))
5045 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5046#else
5047 data32 = cur_pic->mc_u_v_adr;
5048 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5049#endif
5050 data32 = (mc_buffer_size_u_v_h << 16);
5051 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5052
5053#ifdef LOSLESS_COMPRESS_MODE
5054/*SUPPORT_10BIT*/
5055 if (get_double_write_mode(hevc)) {
5056 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5057 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5058 }
5059#else
5060 /* multi tile to do... */
5061 data32 = cur_pic->mc_y_adr;
5062 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5063
5064 data32 = cur_pic->mc_u_v_adr;
5065 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5066#endif
5067 /* DBLK CONFIG HERE */
5068 if (hevc->new_pic) {
5069 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5070 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5071 data32 = (0xff << 8) | (0x0 << 0);
5072 else
5073 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5074 (0x0 << 0); /* h265 video format*/
5075
5076 if (hevc->pic_w >= 1280)
5077 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5078 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5079 if (get_double_write_mode(hevc) == 0)
5080 data32 |= (0x1 << 8); /*enable first write*/
5081 else if (get_double_write_mode(hevc) == 0x10)
5082 data32 |= (0x1 << 9); /*double write only*/
5083 else
5084 data32 |= ((0x1 << 8) |(0x1 << 9));
5085
5086 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5087 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5088 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5089 }
5090 data32 = (hevc->pic_w | hevc->pic_h << 16);
5091 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5092
5093 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5094 data32 =
5095 ((misc_flag0 >>
5096 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5097 0x1) << 3;
5098 } else
5099 data32 = 0;
5100 data32 |=
5101 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5102 ((params->p.pps_cr_qp_offset
5103 & 0x1f) <<
5104 9));
5105 data32 |=
5106 (hevc->lcu_size ==
5107 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5108
5109 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5110
5111 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5112 /*if (debug & 0x80) {*/
5113 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5114 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5115 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5116 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5117 data32);
5118 /*}*/
5119 }
5120 }
5121#if 0
5122 data32 = READ_VREG(HEVC_SAO_CTRL1);
5123 data32 &= (~0x3000);
5124 data32 |= (mem_map_mode <<
5125 12);
5126
5127/* [13:12] axi_aformat,
5128 * 0-Linear, 1-32x32, 2-64x32
5129 */
5130 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5131
5132 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5133 data32 &= (~0x30);
5134 data32 |= (mem_map_mode <<
5135 4);
5136
5137/* [5:4] -- address_format
5138 * 00:linear 01:32x32 10:64x32
5139 */
5140 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5141#else
5142 /* m8baby test1902 */
5143 data32 = READ_VREG(HEVC_SAO_CTRL1);
5144 data32 &= (~0x3000);
5145 data32 |= (mem_map_mode <<
5146 12);
5147
5148/* [13:12] axi_aformat, 0-Linear,
5149 * 1-32x32, 2-64x32
5150 */
5151 data32 &= (~0xff0);
5152 /* data32 |= 0x670; // Big-Endian per 64-bit */
5153 data32 |= endian; /* Big-Endian per 64-bit */
5154 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5155 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5156 if (get_double_write_mode(hevc) == 0)
5157 data32 |= 0x2; /*disable double write*/
5158 else if (get_double_write_mode(hevc) & 0x10)
5159 data32 |= 0x1; /*disable cm*/
5160 } else {
5161 unsigned int data;
5162 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5163 (0x0 << 0); /* h265 video format*/
5164 if (hevc->pic_w >= 1280)
5165 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5166 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5167 if (get_double_write_mode(hevc) == 0)
5168 data |= (0x1 << 8); /*enable first write*/
5169 else if (get_double_write_mode(hevc) & 0x10)
5170 data |= (0x1 << 9); /*double write only*/
5171 else
5172 data |= ((0x1 << 8) |(0x1 << 9));
5173
5174 WRITE_VREG(HEVC_DBLK_CFGB, data);
5175 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5176 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5177 }
5178
5179 /* swap uv */
5180 if (hevc->is_used_v4l) {
5181 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5182 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5183 data32 &= ~(1 << 8); /* NV21 */
5184 else
5185 data32 |= (1 << 8); /* NV12 */
5186 }
5187
5188 /*
5189 * [31:24] ar_fifo1_axi_thred
5190 * [23:16] ar_fifo0_axi_thred
5191 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5192 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5193 * [11:08] axi_lendian_C
5194 * [07:04] axi_lendian_Y
5195 * [3] reserved
5196 * [2] clk_forceon
5197 * [1] dw_disable:disable double write output
5198 * [0] cm_disable:disable compress output
5199 */
5200 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5201 if (get_double_write_mode(hevc) & 0x10) {
5202 /* [23:22] dw_v1_ctrl
5203 *[21:20] dw_v0_ctrl
5204 *[19:18] dw_h1_ctrl
5205 *[17:16] dw_h0_ctrl
5206 */
5207 data32 = READ_VREG(HEVC_SAO_CTRL5);
5208 /*set them all 0 for H265_NV21 (no down-scale)*/
5209 data32 &= ~(0xff << 16);
5210 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5211 }
5212
5213 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5214 data32 &= (~0x30);
5215 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5216 data32 |= (mem_map_mode <<
5217 4);
5218 data32 &= (~0xF);
5219 data32 |= 0xf; /* valid only when double write only */
5220 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5221
5222 /* swap uv */
5223 if (hevc->is_used_v4l) {
5224 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5225 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5226 data32 |= (1 << 12); /* NV21 */
5227 else
5228 data32 &= ~(1 << 12); /* NV12 */
5229 }
5230
5231 /*
5232 * [3:0] little_endian
5233 * [5:4] address_format 00:linear 01:32x32 10:64x32
5234 * [7:6] reserved
5235 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5236 * [11:10] reserved
5237 * [12] CbCr_byte_swap
5238 * [31:13] reserved
5239 */
5240 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5241#endif
5242 data32 = 0;
5243 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5244 data32_2 &= (~0x300);
5245 /* slice_deblocking_filter_disabled_flag = 0;
5246 * ucode has handle it , so read it from ucode directly
5247 */
5248 if (hevc->tile_enabled) {
5249 data32 |=
5250 ((misc_flag0 >>
5251 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5252 0x1) << 0;
5253 data32_2 |=
5254 ((misc_flag0 >>
5255 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5256 0x1) << 8;
5257 }
5258 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5259 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5260 0x1; /* ucode has handle it,so read it from ucode directly */
5261 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5262 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5263 /* slice_deblocking_filter_disabled_flag =
5264 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5265 * //ucode has handle it , so read it from ucode directly
5266 */
5267 data32 |= slice_deblocking_filter_disabled_flag << 2;
5268 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5269 hevc_print_cont(hevc, 0,
5270 "(1,%x)", data32);
5271 if (!slice_deblocking_filter_disabled_flag) {
5272 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5273 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5274 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5275 hevc_print_cont(hevc, 0,
5276 "(2,%x)", data32);
5277 }
5278 } else {
5279 data32 |=
5280 ((misc_flag0 >>
5281 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5282 0x1) << 2;
5283 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5284 hevc_print_cont(hevc, 0,
5285 "(3,%x)", data32);
5286 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5287 0x1) == 0) {
5288 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5289 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5290 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5291 hevc_print_cont(hevc, 0,
5292 "(4,%x)", data32);
5293 }
5294 }
5295 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5296 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5297 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5298 || (!slice_deblocking_filter_disabled_flag))) {
5299 data32 |=
5300 ((misc_flag0 >>
5301 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5302 & 0x1) << 1;
5303 data32_2 |=
5304 ((misc_flag0 >>
5305 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5306 & 0x1) << 9;
5307 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5308 hevc_print_cont(hevc, 0,
5309 "(5,%x)\n", data32);
5310 } else {
5311 data32 |=
5312 ((misc_flag0 >>
5313 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5314 & 0x1) << 1;
5315 data32_2 |=
5316 ((misc_flag0 >>
5317 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5318 & 0x1) << 9;
5319 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5320 hevc_print_cont(hevc, 0,
5321 "(6,%x)\n", data32);
5322 }
5323 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5324 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5325}
5326
5327#ifdef TEST_NO_BUF
5328static unsigned char test_flag = 1;
5329#endif
5330
5331static void pic_list_process(struct hevc_state_s *hevc)
5332{
5333 int work_pic_num = get_work_pic_num(hevc);
5334 int alloc_pic_count = 0;
5335 int i;
5336 struct PIC_s *pic;
5337 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5338 pic = hevc->m_PIC[i];
5339 if (pic == NULL || pic->index == -1)
5340 continue;
5341 alloc_pic_count++;
5342 if (pic->output_mark == 0 && pic->referenced == 0
5343 && pic->output_ready == 0
5344 && (pic->width != hevc->pic_w ||
5345 pic->height != hevc->pic_h)
5346 ) {
5347 set_buf_unused(hevc, pic->BUF_index);
5348 pic->BUF_index = -1;
5349 if (alloc_pic_count > work_pic_num) {
5350 pic->width = 0;
5351 pic->height = 0;
5352 pic->index = -1;
5353 } else {
5354 pic->width = hevc->pic_w;
5355 pic->height = hevc->pic_h;
5356 }
5357 }
5358 }
5359 if (alloc_pic_count < work_pic_num) {
5360 int new_count = alloc_pic_count;
5361 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5362 pic = hevc->m_PIC[i];
5363 if (pic && pic->index == -1) {
5364 pic->index = i;
5365 pic->BUF_index = -1;
5366 pic->width = hevc->pic_w;
5367 pic->height = hevc->pic_h;
5368 new_count++;
5369 if (new_count >=
5370 work_pic_num)
5371 break;
5372 }
5373 }
5374
5375 }
5376 dealloc_unused_buf(hevc);
5377 if (get_alloc_pic_count(hevc)
5378 != alloc_pic_count) {
5379 hevc_print_cont(hevc, 0,
5380 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5381 __func__,
5382 work_pic_num,
5383 alloc_pic_count,
5384 get_alloc_pic_count(hevc));
5385 }
5386}
5387
5388static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5389{
5390 int i;
5391 struct PIC_s *pic;
5392 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5393 pic = hevc->m_PIC[i];
5394 if (pic == NULL || pic->index == -1)
5395 continue;
5396
5397 if (pic->output_mark == 0 && pic->referenced == 0
5398 && pic->output_ready == 0
5399 && pic->scatter_alloc
5400 )
5401 release_pic_mmu_buf(hevc, pic);
5402 }
5403
5404}
5405
5406static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5407 union param_u *rpm_param)
5408{
5409 struct PIC_s *new_pic = NULL;
5410 struct PIC_s *pic;
5411 int i;
5412 int ret;
5413
5414 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5415 pic = hevc->m_PIC[i];
5416 if (pic == NULL || pic->index == -1)
5417 continue;
5418
5419 if (pic->output_mark == 0 && pic->referenced == 0
5420 && pic->output_ready == 0
5421 && pic->width == hevc->pic_w
5422 && pic->height == hevc->pic_h
5423 ) {
5424 if (new_pic) {
5425 if (new_pic->POC != INVALID_POC) {
5426 if (pic->POC == INVALID_POC ||
5427 pic->POC < new_pic->POC)
5428 new_pic = pic;
5429 }
5430 } else
5431 new_pic = pic;
5432 }
5433 }
5434
5435 if (new_pic == NULL)
5436 return NULL;
5437
5438 if (new_pic->BUF_index < 0) {
5439 if (alloc_buf(hevc) < 0)
5440 return NULL;
5441 else {
5442 if (config_pic(hevc, new_pic) < 0) {
5443 dealloc_pic_buf(hevc, new_pic);
5444 return NULL;
5445 }
5446 }
5447 new_pic->width = hevc->pic_w;
5448 new_pic->height = hevc->pic_h;
5449 set_canvas(hevc, new_pic);
5450
5451 init_pic_list_hw(hevc);
5452 }
5453
5454 if (new_pic) {
5455 new_pic->double_write_mode =
5456 get_double_write_mode(hevc);
5457 if (new_pic->double_write_mode)
5458 set_canvas(hevc, new_pic);
5459
5460#ifdef TEST_NO_BUF
5461 if (test_flag) {
5462 test_flag = 0;
5463 return NULL;
5464 } else
5465 test_flag = 1;
5466#endif
5467 if (get_mv_buf(hevc, new_pic) < 0)
5468 return NULL;
5469
5470 if (hevc->mmu_enable) {
5471 ret = H265_alloc_mmu(hevc, new_pic,
5472 rpm_param->p.bit_depth,
5473 hevc->frame_mmu_map_addr);
5474 if (ret != 0) {
5475 put_mv_buf(hevc, new_pic);
5476 hevc_print(hevc, 0,
5477 "can't alloc need mmu1,idx %d ret =%d\n",
5478 new_pic->decode_idx,
5479 ret);
5480 return NULL;
5481 }
5482 }
5483 new_pic->referenced = 1;
5484 new_pic->decode_idx = hevc->decode_idx;
5485 new_pic->slice_idx = 0;
5486 new_pic->referenced = 1;
5487 new_pic->output_mark = 0;
5488 new_pic->recon_mark = 0;
5489 new_pic->error_mark = 0;
5490 new_pic->dis_mark = 0;
5491 /* new_pic->output_ready = 0; */
5492 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5493 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5494 new_pic->POC = hevc->curr_POC;
5495 new_pic->pic_struct = hevc->curr_pic_struct;
5496 if (new_pic->aux_data_buf)
5497 release_aux_data(hevc, new_pic);
5498 new_pic->mem_saving_mode =
5499 hevc->mem_saving_mode;
5500 new_pic->bit_depth_luma =
5501 hevc->bit_depth_luma;
5502 new_pic->bit_depth_chroma =
5503 hevc->bit_depth_chroma;
5504 new_pic->video_signal_type =
5505 hevc->video_signal_type;
5506
5507 new_pic->conformance_window_flag =
5508 hevc->param.p.conformance_window_flag;
5509 new_pic->conf_win_left_offset =
5510 hevc->param.p.conf_win_left_offset;
5511 new_pic->conf_win_right_offset =
5512 hevc->param.p.conf_win_right_offset;
5513 new_pic->conf_win_top_offset =
5514 hevc->param.p.conf_win_top_offset;
5515 new_pic->conf_win_bottom_offset =
5516 hevc->param.p.conf_win_bottom_offset;
5517 new_pic->chroma_format_idc =
5518 hevc->param.p.chroma_format_idc;
5519
5520 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5521 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5522 __func__, new_pic->index,
5523 new_pic->BUF_index, new_pic->decode_idx,
5524 new_pic->POC);
5525
5526 }
5527 if (pic_list_debug & 0x1) {
5528 dump_pic_list(hevc);
5529 pr_err("\n*******************************************\n");
5530 }
5531
5532 return new_pic;
5533}
5534
5535static int get_display_pic_num(struct hevc_state_s *hevc)
5536{
5537 int i;
5538 struct PIC_s *pic;
5539 int num = 0;
5540
5541 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5542 pic = hevc->m_PIC[i];
5543 if (pic == NULL ||
5544 pic->index == -1)
5545 continue;
5546
5547 if (pic->output_ready == 1)
5548 num++;
5549 }
5550 return num;
5551}
5552
5553static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5554{
5555 struct PIC_s *pic_display;
5556
5557 if (pic) {
5558 /*PB skip control */
5559 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5560 /* start decoding after first I */
5561 hevc->ignore_bufmgr_error |= 0x1;
5562 }
5563 if (hevc->ignore_bufmgr_error & 1) {
5564 if (hevc->PB_skip_count_after_decoding > 0)
5565 hevc->PB_skip_count_after_decoding--;
5566 else {
5567 /* start displaying */
5568 hevc->ignore_bufmgr_error |= 0x2;
5569 }
5570 }
5571 /**/
5572 if (pic->POC != INVALID_POC) {
5573 pic->output_mark = 1;
5574 pic->recon_mark = 1;
5575 }
5576 pic->recon_mark = 1;
5577 }
5578 do {
5579 pic_display = output_pic(hevc, 1);
5580
5581 if (pic_display) {
5582 pic_display->referenced = 0;
5583 put_mv_buf(hevc, pic_display);
5584 if ((pic_display->error_mark
5585 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5586 || (get_dbg_flag(hevc) &
5587 H265_DEBUG_DISPLAY_CUR_FRAME)
5588 || (get_dbg_flag(hevc) &
5589 H265_DEBUG_NO_DISPLAY)) {
5590 pic_display->output_ready = 0;
5591 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5592 hevc_print(hevc, 0,
5593 "[BM] Display: POC %d, ",
5594 pic_display->POC);
5595 hevc_print_cont(hevc, 0,
5596 "decoding index %d ==> ",
5597 pic_display->decode_idx);
5598 hevc_print_cont(hevc, 0,
5599 "Debug mode or error, recycle it\n");
5600 }
5601 } else {
5602 if (hevc->i_only & 0x1
5603 && pic_display->slice_type != 2) {
5604 pic_display->output_ready = 0;
5605 } else {
5606 prepare_display_buf(hevc, pic_display);
5607 if (get_dbg_flag(hevc)
5608 & H265_DEBUG_BUFMGR) {
5609 hevc_print(hevc, 0,
5610 "[BM] flush Display: POC %d, ",
5611 pic_display->POC);
5612 hevc_print_cont(hevc, 0,
5613 "decoding index %d\n",
5614 pic_display->decode_idx);
5615 }
5616 }
5617 }
5618 }
5619 } while (pic_display);
5620 clear_referenced_flag(hevc);
5621}
5622
5623/*
5624* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5625*/
5626static void set_aux_data(struct hevc_state_s *hevc,
5627 struct PIC_s *pic, unsigned char suffix_flag,
5628 unsigned char dv_meta_flag)
5629{
5630 int i;
5631 unsigned short *aux_adr;
5632 unsigned int size_reg_val =
5633 READ_VREG(HEVC_AUX_DATA_SIZE);
5634 unsigned int aux_count = 0;
5635 int aux_size = 0;
5636 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5637 return;
5638
5639 if (hevc->aux_data_dirty ||
5640 hevc->m_ins_flag == 0) {
5641
5642 hevc->aux_data_dirty = 0;
5643 }
5644
5645 if (suffix_flag) {
5646 aux_adr = (unsigned short *)
5647 (hevc->aux_addr +
5648 hevc->prefix_aux_size);
5649 aux_count =
5650 ((size_reg_val & 0xffff) << 4)
5651 >> 1;
5652 aux_size =
5653 hevc->suffix_aux_size;
5654 } else {
5655 aux_adr =
5656 (unsigned short *)hevc->aux_addr;
5657 aux_count =
5658 ((size_reg_val >> 16) << 4)
5659 >> 1;
5660 aux_size =
5661 hevc->prefix_aux_size;
5662 }
5663 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5664 hevc_print(hevc, 0,
5665 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5666 __func__, pic, pic->aux_data_size,
5667 aux_count, suffix_flag, dv_meta_flag);
5668 }
5669 if (aux_size > 0 && aux_count > 0) {
5670 int heads_size = 0;
5671 int new_size;
5672 char *new_buf;
5673
5674 for (i = 0; i < aux_count; i++) {
5675 unsigned char tag = aux_adr[i] >> 8;
5676 if (tag != 0 && tag != 0xff) {
5677 if (dv_meta_flag == 0)
5678 heads_size += 8;
5679 else if (dv_meta_flag == 1 && tag == 0x1)
5680 heads_size += 8;
5681 else if (dv_meta_flag == 2 && tag != 0x1)
5682 heads_size += 8;
5683 }
5684 }
5685 new_size = pic->aux_data_size + aux_count + heads_size;
5686 new_buf = vmalloc(new_size);
5687 if (new_buf) {
5688 unsigned char valid_tag = 0;
5689 unsigned char *h =
5690 new_buf +
5691 pic->aux_data_size;
5692 unsigned char *p = h + 8;
5693 int len = 0;
5694 int padding_len = 0;
5695 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5696 if (pic->aux_data_buf)
5697 vfree(pic->aux_data_buf);
5698 pic->aux_data_buf = new_buf;
5699 for (i = 0; i < aux_count; i += 4) {
5700 int ii;
5701 unsigned char tag = aux_adr[i + 3] >> 8;
5702 if (tag != 0 && tag != 0xff) {
5703 if (dv_meta_flag == 0)
5704 valid_tag = 1;
5705 else if (dv_meta_flag == 1
5706 && tag == 0x1)
5707 valid_tag = 1;
5708 else if (dv_meta_flag == 2
5709 && tag != 0x1)
5710 valid_tag = 1;
5711 else
5712 valid_tag = 0;
5713 if (valid_tag && len > 0) {
5714 pic->aux_data_size +=
5715 (len + 8);
5716 h[0] = (len >> 24)
5717 & 0xff;
5718 h[1] = (len >> 16)
5719 & 0xff;
5720 h[2] = (len >> 8)
5721 & 0xff;
5722 h[3] = (len >> 0)
5723 & 0xff;
5724 h[6] =
5725 (padding_len >> 8)
5726 & 0xff;
5727 h[7] = (padding_len)
5728 & 0xff;
5729 h += (len + 8);
5730 p += 8;
5731 len = 0;
5732 padding_len = 0;
5733 }
5734 if (valid_tag) {
5735 h[4] = tag;
5736 h[5] = 0;
5737 h[6] = 0;
5738 h[7] = 0;
5739 }
5740 }
5741 if (valid_tag) {
5742 for (ii = 0; ii < 4; ii++) {
5743 unsigned short aa =
5744 aux_adr[i + 3
5745 - ii];
5746 *p = aa & 0xff;
5747 p++;
5748 len++;
5749 /*if ((aa >> 8) == 0xff)
5750 padding_len++;*/
5751 }
5752 }
5753 }
5754 if (len > 0) {
5755 pic->aux_data_size += (len + 8);
5756 h[0] = (len >> 24) & 0xff;
5757 h[1] = (len >> 16) & 0xff;
5758 h[2] = (len >> 8) & 0xff;
5759 h[3] = (len >> 0) & 0xff;
5760 h[6] = (padding_len >> 8) & 0xff;
5761 h[7] = (padding_len) & 0xff;
5762 }
5763 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5764 hevc_print(hevc, 0,
5765 "aux: (size %d) suffix_flag %d\n",
5766 pic->aux_data_size, suffix_flag);
5767 for (i = 0; i < pic->aux_data_size; i++) {
5768 hevc_print_cont(hevc, 0,
5769 "%02x ", pic->aux_data_buf[i]);
5770 if (((i + 1) & 0xf) == 0)
5771 hevc_print_cont(hevc, 0, "\n");
5772 }
5773 hevc_print_cont(hevc, 0, "\n");
5774 }
5775
5776 } else {
5777 hevc_print(hevc, 0, "new buf alloc failed\n");
5778 if (pic->aux_data_buf)
5779 vfree(pic->aux_data_buf);
5780 pic->aux_data_buf = NULL;
5781 pic->aux_data_size = 0;
5782 }
5783 }
5784
5785}
5786
5787static void release_aux_data(struct hevc_state_s *hevc,
5788 struct PIC_s *pic)
5789{
5790 if (pic->aux_data_buf)
5791 vfree(pic->aux_data_buf);
5792 pic->aux_data_buf = NULL;
5793 pic->aux_data_size = 0;
5794}
5795
5796static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5797 struct PIC_s *pic)
5798{
5799
5800 /* prev pic */
5801 /*if (hevc->curr_POC != 0) {*/
5802 int decoded_poc = hevc->iPrevPOC;
5803#ifdef MULTI_INSTANCE_SUPPORT
5804 if (hevc->m_ins_flag) {
5805 decoded_poc = hevc->decoded_poc;
5806 hevc->decoded_poc = INVALID_POC;
5807 }
5808#endif
5809 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5810 && hevc->m_nalUnitType !=
5811 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5812 struct PIC_s *pic_display;
5813
5814 pic = get_pic_by_POC(hevc, decoded_poc);
5815 if (pic && (pic->POC != INVALID_POC)) {
5816 /*PB skip control */
5817 if (pic->error_mark == 0
5818 && hevc->PB_skip_mode == 1) {
5819 /* start decoding after
5820 * first I
5821 */
5822 hevc->ignore_bufmgr_error |= 0x1;
5823 }
5824 if (hevc->ignore_bufmgr_error & 1) {
5825 if (hevc->PB_skip_count_after_decoding > 0) {
5826 hevc->PB_skip_count_after_decoding--;
5827 } else {
5828 /* start displaying */
5829 hevc->ignore_bufmgr_error |= 0x2;
5830 }
5831 }
5832 if (hevc->mmu_enable
5833 && ((hevc->double_write_mode & 0x10) == 0)) {
5834 if (!hevc->m_ins_flag) {
5835 hevc->used_4k_num =
5836 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5837
5838 if ((!is_skip_decoding(hevc, pic)) &&
5839 (hevc->used_4k_num >= 0) &&
5840 (hevc->cur_pic->scatter_alloc
5841 == 1)) {
5842 hevc_print(hevc,
5843 H265_DEBUG_BUFMGR_MORE,
5844 "%s pic index %d scatter_alloc %d page_start %d\n",
5845 "decoder_mmu_box_free_idx_tail",
5846 hevc->cur_pic->index,
5847 hevc->cur_pic->scatter_alloc,
5848 hevc->used_4k_num);
5849 hevc_mmu_dma_check(hw_to_vdec(hevc));
5850 decoder_mmu_box_free_idx_tail(
5851 hevc->mmu_box,
5852 hevc->cur_pic->index,
5853 hevc->used_4k_num);
5854 hevc->cur_pic->scatter_alloc
5855 = 2;
5856 }
5857 hevc->used_4k_num = -1;
5858 }
5859 }
5860
5861 pic->output_mark = 1;
5862 pic->recon_mark = 1;
5863 pic->dis_mark = 1;
5864 }
5865 do {
5866 pic_display = output_pic(hevc, 0);
5867
5868 if (pic_display) {
5869 if ((pic_display->error_mark &&
5870 ((hevc->ignore_bufmgr_error &
5871 0x2) == 0))
5872 || (get_dbg_flag(hevc) &
5873 H265_DEBUG_DISPLAY_CUR_FRAME)
5874 || (get_dbg_flag(hevc) &
5875 H265_DEBUG_NO_DISPLAY)) {
5876 pic_display->output_ready = 0;
5877 if (get_dbg_flag(hevc) &
5878 H265_DEBUG_BUFMGR) {
5879 hevc_print(hevc, 0,
5880 "[BM] Display: POC %d, ",
5881 pic_display->POC);
5882 hevc_print_cont(hevc, 0,
5883 "decoding index %d ==> ",
5884 pic_display->
5885 decode_idx);
5886 hevc_print_cont(hevc, 0,
5887 "Debug or err,recycle it\n");
5888 }
5889 } else {
5890 if (hevc->i_only & 0x1
5891 && pic_display->
5892 slice_type != 2) {
5893 pic_display->output_ready = 0;
5894 } else {
5895 prepare_display_buf
5896 (hevc,
5897 pic_display);
5898 if (get_dbg_flag(hevc) &
5899 H265_DEBUG_BUFMGR) {
5900 hevc_print(hevc, 0,
5901 "[BM] Display: POC %d, ",
5902 pic_display->POC);
5903 hevc_print_cont(hevc, 0,
5904 "decoding index %d\n",
5905 pic_display->
5906 decode_idx);
5907 }
5908 }
5909 }
5910 }
5911 } while (pic_display);
5912 } else {
5913 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5914 hevc_print(hevc, 0,
5915 "[BM] current pic is IDR, ");
5916 hevc_print(hevc, 0,
5917 "clear referenced flag of all buffers\n");
5918 }
5919 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5920 dump_pic_list(hevc);
5921 pic = get_pic_by_POC(hevc, decoded_poc);
5922 flush_output(hevc, pic);
5923 }
5924
5925}
5926
5927static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
5928 int decoded_lcu)
5929{
5930 int current_lcu_idx = decoded_lcu;
5931 if (decoded_lcu < 0)
5932 return;
5933
5934 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5935 hevc_print(hevc, 0,
5936 "cur lcu idx = %d, (total %d)\n",
5937 current_lcu_idx, hevc->lcu_total);
5938 }
5939 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5940 if (hevc->first_pic_after_recover) {
5941 if (current_lcu_idx !=
5942 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5943 hevc->cur_pic->error_mark = 1;
5944 } else {
5945 if (hevc->lcu_x_num_pre != 0
5946 && hevc->lcu_y_num_pre != 0
5947 && current_lcu_idx != 0
5948 && current_lcu_idx <
5949 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5950 hevc->cur_pic->error_mark = 1;
5951 }
5952 if (hevc->cur_pic->error_mark) {
5953 hevc_print(hevc, 0,
5954 "cur lcu idx = %d, (total %d), set error_mark\n",
5955 current_lcu_idx,
5956 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
5957 if (is_log_enable(hevc))
5958 add_log(hevc,
5959 "cur lcu idx = %d, (total %d), set error_mark",
5960 current_lcu_idx,
5961 hevc->lcu_x_num_pre *
5962 hevc->lcu_y_num_pre);
5963
5964 }
5965
5966 }
5967 if (hevc->cur_pic && hevc->head_error_flag) {
5968 hevc->cur_pic->error_mark = 1;
5969 hevc_print(hevc, 0,
5970 "head has error, set error_mark\n");
5971 }
5972
5973 if ((error_handle_policy & 0x80) == 0) {
5974 if (hevc->over_decode && hevc->cur_pic) {
5975 hevc_print(hevc, 0,
5976 "over decode, set error_mark\n");
5977 hevc->cur_pic->error_mark = 1;
5978 }
5979 }
5980
5981 hevc->lcu_x_num_pre = hevc->lcu_x_num;
5982 hevc->lcu_y_num_pre = hevc->lcu_y_num;
5983}
5984
5985static void check_pic_decoded_error(struct hevc_state_s *hevc,
5986 int decoded_lcu)
5987{
5988 int current_lcu_idx = decoded_lcu;
5989 if (decoded_lcu < 0)
5990 return;
5991
5992 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5993 hevc_print(hevc, 0,
5994 "cur lcu idx = %d, (total %d)\n",
5995 current_lcu_idx, hevc->lcu_total);
5996 }
5997 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5998 if (hevc->lcu_x_num != 0
5999 && hevc->lcu_y_num != 0
6000 && current_lcu_idx != 0
6001 && current_lcu_idx <
6002 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6003 hevc->cur_pic->error_mark = 1;
6004 if (hevc->cur_pic->error_mark) {
6005 hevc_print(hevc, 0,
6006 "cur lcu idx = %d, (total %d), set error_mark\n",
6007 current_lcu_idx,
6008 hevc->lcu_x_num*hevc->lcu_y_num);
6009 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6010 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6011 hevc_print(hevc, 0,
6012 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6013 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6014 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6015 }
6016 if (is_log_enable(hevc))
6017 add_log(hevc,
6018 "cur lcu idx = %d, (total %d), set error_mark",
6019 current_lcu_idx,
6020 hevc->lcu_x_num *
6021 hevc->lcu_y_num);
6022
6023 }
6024
6025 }
6026 if (hevc->cur_pic && hevc->head_error_flag) {
6027 hevc->cur_pic->error_mark = 1;
6028 hevc_print(hevc, 0,
6029 "head has error, set error_mark\n");
6030 }
6031
6032 if ((error_handle_policy & 0x80) == 0) {
6033 if (hevc->over_decode && hevc->cur_pic) {
6034 hevc_print(hevc, 0,
6035 "over decode, set error_mark\n");
6036 hevc->cur_pic->error_mark = 1;
6037 }
6038 }
6039}
6040
6041/* only when we decoded one field or one frame,
6042we can call this function to get qos info*/
6043static void get_picture_qos_info(struct hevc_state_s *hevc)
6044{
6045 struct PIC_s *picture = hevc->cur_pic;
6046
6047/*
6048#define DEBUG_QOS
6049*/
6050
6051 if (!hevc->cur_pic)
6052 return;
6053
6054 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6055 unsigned char a[3];
6056 unsigned char i, j, t;
6057 unsigned long data;
6058
6059 data = READ_VREG(HEVC_MV_INFO);
6060 if (picture->slice_type == I_SLICE)
6061 data = 0;
6062 a[0] = data & 0xff;
6063 a[1] = (data >> 8) & 0xff;
6064 a[2] = (data >> 16) & 0xff;
6065
6066 for (i = 0; i < 3; i++)
6067 for (j = i+1; j < 3; j++) {
6068 if (a[j] < a[i]) {
6069 t = a[j];
6070 a[j] = a[i];
6071 a[i] = t;
6072 } else if (a[j] == a[i]) {
6073 a[i]++;
6074 t = a[j];
6075 a[j] = a[i];
6076 a[i] = t;
6077 }
6078 }
6079 picture->max_mv = a[2];
6080 picture->avg_mv = a[1];
6081 picture->min_mv = a[0];
6082#ifdef DEBUG_QOS
6083 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6084 data, a[0], a[1], a[2]);
6085#endif
6086
6087 data = READ_VREG(HEVC_QP_INFO);
6088 a[0] = data & 0x1f;
6089 a[1] = (data >> 8) & 0x3f;
6090 a[2] = (data >> 16) & 0x7f;
6091
6092 for (i = 0; i < 3; i++)
6093 for (j = i+1; j < 3; j++) {
6094 if (a[j] < a[i]) {
6095 t = a[j];
6096 a[j] = a[i];
6097 a[i] = t;
6098 } else if (a[j] == a[i]) {
6099 a[i]++;
6100 t = a[j];
6101 a[j] = a[i];
6102 a[i] = t;
6103 }
6104 }
6105 picture->max_qp = a[2];
6106 picture->avg_qp = a[1];
6107 picture->min_qp = a[0];
6108#ifdef DEBUG_QOS
6109 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6110 data, a[0], a[1], a[2]);
6111#endif
6112
6113 data = READ_VREG(HEVC_SKIP_INFO);
6114 a[0] = data & 0x1f;
6115 a[1] = (data >> 8) & 0x3f;
6116 a[2] = (data >> 16) & 0x7f;
6117
6118 for (i = 0; i < 3; i++)
6119 for (j = i+1; j < 3; j++) {
6120 if (a[j] < a[i]) {
6121 t = a[j];
6122 a[j] = a[i];
6123 a[i] = t;
6124 } else if (a[j] == a[i]) {
6125 a[i]++;
6126 t = a[j];
6127 a[j] = a[i];
6128 a[i] = t;
6129 }
6130 }
6131 picture->max_skip = a[2];
6132 picture->avg_skip = a[1];
6133 picture->min_skip = a[0];
6134
6135#ifdef DEBUG_QOS
6136 hevc_print(hevc, 0,
6137 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6138 data, a[0], a[1], a[2]);
6139#endif
6140 } else {
6141 uint32_t blk88_y_count;
6142 uint32_t blk88_c_count;
6143 uint32_t blk22_mv_count;
6144 uint32_t rdata32;
6145 int32_t mv_hi;
6146 int32_t mv_lo;
6147 uint32_t rdata32_l;
6148 uint32_t mvx_L0_hi;
6149 uint32_t mvy_L0_hi;
6150 uint32_t mvx_L1_hi;
6151 uint32_t mvy_L1_hi;
6152 int64_t value;
6153 uint64_t temp_value;
6154#ifdef DEBUG_QOS
6155 int pic_number = picture->POC;
6156#endif
6157
6158 picture->max_mv = 0;
6159 picture->avg_mv = 0;
6160 picture->min_mv = 0;
6161
6162 picture->max_skip = 0;
6163 picture->avg_skip = 0;
6164 picture->min_skip = 0;
6165
6166 picture->max_qp = 0;
6167 picture->avg_qp = 0;
6168 picture->min_qp = 0;
6169
6170
6171
6172#ifdef DEBUG_QOS
6173 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6174 picture->slice_type,
6175 picture->POC);
6176#endif
6177 /* set rd_idx to 0 */
6178 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6179
6180 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6181 if (blk88_y_count == 0) {
6182#ifdef DEBUG_QOS
6183 hevc_print(hevc, 0,
6184 "[Picture %d Quality] NO Data yet.\n",
6185 pic_number);
6186#endif
6187 /* reset all counts */
6188 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6189 return;
6190 }
6191 /* qp_y_sum */
6192 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6193#ifdef DEBUG_QOS
6194 hevc_print(hevc, 0,
6195 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6196 pic_number, rdata32/blk88_y_count,
6197 rdata32, blk88_y_count);
6198#endif
6199 picture->avg_qp = rdata32/blk88_y_count;
6200 /* intra_y_count */
6201 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6202#ifdef DEBUG_QOS
6203 hevc_print(hevc, 0,
6204 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6205 pic_number, rdata32*100/blk88_y_count,
6206 '%', rdata32);
6207#endif
6208 /* skipped_y_count */
6209 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6210#ifdef DEBUG_QOS
6211 hevc_print(hevc, 0,
6212 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6213 pic_number, rdata32*100/blk88_y_count,
6214 '%', rdata32);
6215#endif
6216 picture->avg_skip = rdata32*100/blk88_y_count;
6217 /* coeff_non_zero_y_count */
6218 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6219#ifdef DEBUG_QOS
6220 hevc_print(hevc, 0,
6221 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6222 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6223 '%', rdata32);
6224#endif
6225 /* blk66_c_count */
6226 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6227 if (blk88_c_count == 0) {
6228#ifdef DEBUG_QOS
6229 hevc_print(hevc, 0,
6230 "[Picture %d Quality] NO Data yet.\n",
6231 pic_number);
6232#endif
6233 /* reset all counts */
6234 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6235 return;
6236 }
6237 /* qp_c_sum */
6238 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6239#ifdef DEBUG_QOS
6240 hevc_print(hevc, 0,
6241 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6242 pic_number, rdata32/blk88_c_count,
6243 rdata32, blk88_c_count);
6244#endif
6245 /* intra_c_count */
6246 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6247#ifdef DEBUG_QOS
6248 hevc_print(hevc, 0,
6249 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6250 pic_number, rdata32*100/blk88_c_count,
6251 '%', rdata32);
6252#endif
6253 /* skipped_cu_c_count */
6254 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6255#ifdef DEBUG_QOS
6256 hevc_print(hevc, 0,
6257 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6258 pic_number, rdata32*100/blk88_c_count,
6259 '%', rdata32);
6260#endif
6261 /* coeff_non_zero_c_count */
6262 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6263#ifdef DEBUG_QOS
6264 hevc_print(hevc, 0,
6265 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6266 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6267 '%', rdata32);
6268#endif
6269
6270 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6271 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6272 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6273#ifdef DEBUG_QOS
6274 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6275 pic_number, (rdata32>>0)&0xff);
6276#endif
6277 picture->min_qp = (rdata32>>0)&0xff;
6278
6279#ifdef DEBUG_QOS
6280 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6281 pic_number, (rdata32>>8)&0xff);
6282#endif
6283 picture->max_qp = (rdata32>>8)&0xff;
6284
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6287 pic_number, (rdata32>>16)&0xff);
6288 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6289 pic_number, (rdata32>>24)&0xff);
6290#endif
6291
6292 /* blk22_mv_count */
6293 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6294 if (blk22_mv_count == 0) {
6295#ifdef DEBUG_QOS
6296 hevc_print(hevc, 0,
6297 "[Picture %d Quality] NO MV Data yet.\n",
6298 pic_number);
6299#endif
6300 /* reset all counts */
6301 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6302 return;
6303 }
6304 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6305 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6306 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6307 /* should all be 0x00 or 0xff */
6308#ifdef DEBUG_QOS
6309 hevc_print(hevc, 0,
6310 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6311 pic_number, rdata32);
6312#endif
6313 mvx_L0_hi = ((rdata32>>0)&0xff);
6314 mvy_L0_hi = ((rdata32>>8)&0xff);
6315 mvx_L1_hi = ((rdata32>>16)&0xff);
6316 mvy_L1_hi = ((rdata32>>24)&0xff);
6317
6318 /* mvx_L0_count[31:0] */
6319 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6320 temp_value = mvx_L0_hi;
6321 temp_value = (temp_value << 32) | rdata32_l;
6322
6323 if (mvx_L0_hi & 0x80)
6324 value = 0xFFFFFFF000000000 | temp_value;
6325 else
6326 value = temp_value;
6327 value = div_s64(value, blk22_mv_count);
6328#ifdef DEBUG_QOS
6329 hevc_print(hevc, 0,
6330 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6331 pic_number, (int)value,
6332 value, blk22_mv_count);
6333#endif
6334 picture->avg_mv = value;
6335
6336 /* mvy_L0_count[31:0] */
6337 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6338 temp_value = mvy_L0_hi;
6339 temp_value = (temp_value << 32) | rdata32_l;
6340
6341 if (mvy_L0_hi & 0x80)
6342 value = 0xFFFFFFF000000000 | temp_value;
6343 else
6344 value = temp_value;
6345#ifdef DEBUG_QOS
6346 hevc_print(hevc, 0,
6347 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6348 pic_number, rdata32_l/blk22_mv_count,
6349 value, blk22_mv_count);
6350#endif
6351
6352 /* mvx_L1_count[31:0] */
6353 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6354 temp_value = mvx_L1_hi;
6355 temp_value = (temp_value << 32) | rdata32_l;
6356 if (mvx_L1_hi & 0x80)
6357 value = 0xFFFFFFF000000000 | temp_value;
6358 else
6359 value = temp_value;
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0,
6362 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6363 pic_number, rdata32_l/blk22_mv_count,
6364 value, blk22_mv_count);
6365#endif
6366
6367 /* mvy_L1_count[31:0] */
6368 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6369 temp_value = mvy_L1_hi;
6370 temp_value = (temp_value << 32) | rdata32_l;
6371 if (mvy_L1_hi & 0x80)
6372 value = 0xFFFFFFF000000000 | temp_value;
6373 else
6374 value = temp_value;
6375#ifdef DEBUG_QOS
6376 hevc_print(hevc, 0,
6377 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6378 pic_number, rdata32_l/blk22_mv_count,
6379 value, blk22_mv_count);
6380#endif
6381
6382 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6383 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6384 mv_hi = (rdata32>>16)&0xffff;
6385 if (mv_hi & 0x8000)
6386 mv_hi = 0x8000 - mv_hi;
6387#ifdef DEBUG_QOS
6388 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6389 pic_number, mv_hi);
6390#endif
6391 picture->max_mv = mv_hi;
6392
6393 mv_lo = (rdata32>>0)&0xffff;
6394 if (mv_lo & 0x8000)
6395 mv_lo = 0x8000 - mv_lo;
6396#ifdef DEBUG_QOS
6397 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6398 pic_number, mv_lo);
6399#endif
6400 picture->min_mv = mv_lo;
6401
6402 /* {mvy_L0_max, mvy_L0_min} */
6403 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6404 mv_hi = (rdata32>>16)&0xffff;
6405 if (mv_hi & 0x8000)
6406 mv_hi = 0x8000 - mv_hi;
6407#ifdef DEBUG_QOS
6408 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6409 pic_number, mv_hi);
6410#endif
6411
6412 mv_lo = (rdata32>>0)&0xffff;
6413 if (mv_lo & 0x8000)
6414 mv_lo = 0x8000 - mv_lo;
6415#ifdef DEBUG_QOS
6416 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6417 pic_number, mv_lo);
6418#endif
6419
6420 /* {mvx_L1_max, mvx_L1_min} */
6421 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6422 mv_hi = (rdata32>>16)&0xffff;
6423 if (mv_hi & 0x8000)
6424 mv_hi = 0x8000 - mv_hi;
6425#ifdef DEBUG_QOS
6426 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6427 pic_number, mv_hi);
6428#endif
6429
6430 mv_lo = (rdata32>>0)&0xffff;
6431 if (mv_lo & 0x8000)
6432 mv_lo = 0x8000 - mv_lo;
6433#ifdef DEBUG_QOS
6434 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6435 pic_number, mv_lo);
6436#endif
6437
6438 /* {mvy_L1_max, mvy_L1_min} */
6439 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6440 mv_hi = (rdata32>>16)&0xffff;
6441 if (mv_hi & 0x8000)
6442 mv_hi = 0x8000 - mv_hi;
6443#ifdef DEBUG_QOS
6444 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6445 pic_number, mv_hi);
6446#endif
6447 mv_lo = (rdata32>>0)&0xffff;
6448 if (mv_lo & 0x8000)
6449 mv_lo = 0x8000 - mv_lo;
6450#ifdef DEBUG_QOS
6451 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6452 pic_number, mv_lo);
6453#endif
6454
6455 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6456#ifdef DEBUG_QOS
6457 hevc_print(hevc, 0,
6458 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6459 pic_number, rdata32);
6460#endif
6461 /* reset all counts */
6462 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6463 }
6464}
6465
6466static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6467 union param_u *rpm_param,
6468 int decode_pic_begin)
6469{
6470#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6471 struct vdec_s *vdec = hw_to_vdec(hevc);
6472#endif
6473 int i;
6474 int lcu_x_num_div;
6475 int lcu_y_num_div;
6476 int Col_ref;
6477 int dbg_skip_flag = 0;
6478
6479 if (hevc->wait_buf == 0) {
6480 hevc->sps_num_reorder_pics_0 =
6481 rpm_param->p.sps_num_reorder_pics_0;
6482 hevc->m_temporalId = rpm_param->p.m_temporalId;
6483 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6484 hevc->interlace_flag =
6485 (rpm_param->p.profile_etc >> 2) & 0x1;
6486 hevc->curr_pic_struct =
6487 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6488 if (parser_sei_enable & 0x4) {
6489 hevc->frame_field_info_present_flag =
6490 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6491 }
6492
6493 if (interlace_enable == 0 || hevc->m_ins_flag)
6494 hevc->interlace_flag = 0;
6495 if (interlace_enable & 0x100)
6496 hevc->interlace_flag = interlace_enable & 0x1;
6497 if (hevc->interlace_flag == 0)
6498 hevc->curr_pic_struct = 0;
6499 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6500 /*
6501 *hevc->m_pocRandomAccess = MAX_INT;
6502 * //add to fix RAP_B_Bossen_1
6503 */
6504 /* } */
6505 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6506 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6507 hevc->slice_segment_addr =
6508 rpm_param->p.slice_segment_address;
6509 if (!rpm_param->p.dependent_slice_segment_flag)
6510 hevc->slice_addr = hevc->slice_segment_addr;
6511 } else {
6512 hevc->slice_segment_addr = 0;
6513 hevc->slice_addr = 0;
6514 }
6515
6516 hevc->iPrevPOC = hevc->curr_POC;
6517 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6518 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6519 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6520 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6521 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6522 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6523 hevc->isNextSliceSegment =
6524 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6525 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6526 || hevc->pic_h !=
6527 rpm_param->p.pic_height_in_luma_samples) {
6528 hevc_print(hevc, 0,
6529 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6530 hevc->pic_w, hevc->pic_h,
6531 rpm_param->p.pic_width_in_luma_samples,
6532 rpm_param->p.pic_height_in_luma_samples,
6533 hevc->interlace_flag);
6534
6535 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6536 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6537 hevc->frame_width = hevc->pic_w;
6538 hevc->frame_height = hevc->pic_h;
6539#ifdef LOSLESS_COMPRESS_MODE
6540 if (/*re_config_pic_flag == 0 &&*/
6541 (get_double_write_mode(hevc) & 0x10) == 0)
6542 init_decode_head_hw(hevc);
6543#endif
6544 }
6545
6546 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6547 hevc_print(hevc, 0, "over size : %u x %u.\n",
6548 hevc->pic_w, hevc->pic_h);
6549 if ((!hevc->m_ins_flag) &&
6550 ((debug &
6551 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6552 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6553 H265_DEBUG_DIS_SYS_ERROR_PROC);
6554 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6555 return 3;
6556 }
6557 if (hevc->bit_depth_chroma > 10 ||
6558 hevc->bit_depth_luma > 10) {
6559 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6560 hevc->bit_depth_chroma,
6561 hevc->bit_depth_luma);
6562 if (!hevc->m_ins_flag)
6563 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6564 H265_DEBUG_DIS_SYS_ERROR_PROC);
6565 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6566 return 4;
6567 }
6568
6569 /* it will cause divide 0 error */
6570 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6571 if (get_dbg_flag(hevc)) {
6572 hevc_print(hevc, 0,
6573 "Fatal Error, pic_w = %d, pic_h = %d\n",
6574 hevc->pic_w, hevc->pic_h);
6575 }
6576 return 3;
6577 }
6578 pic_list_process(hevc);
6579
6580 hevc->lcu_size =
6581 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6582 3 + rpm_param->
6583 p.log2_diff_max_min_coding_block_size);
6584 if (hevc->lcu_size == 0) {
6585 hevc_print(hevc, 0,
6586 "Error, lcu_size = 0 (%d,%d)\n",
6587 rpm_param->p.
6588 log2_min_coding_block_size_minus3,
6589 rpm_param->p.
6590 log2_diff_max_min_coding_block_size);
6591 return 3;
6592 }
6593 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6594 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6595 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6596 hevc->lcu_x_num =
6597 ((hevc->pic_w % hevc->lcu_size) ==
6598 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6599 hevc->lcu_y_num =
6600 ((hevc->pic_h % hevc->lcu_size) ==
6601 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6602 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6603
6604 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6605 || hevc->m_nalUnitType ==
6606 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6607 hevc->curr_POC = 0;
6608 if ((hevc->m_temporalId - 1) == 0)
6609 hevc->iPrevTid0POC = hevc->curr_POC;
6610 } else {
6611 int iMaxPOClsb =
6612 1 << (rpm_param->p.
6613 log2_max_pic_order_cnt_lsb_minus4 + 4);
6614 int iPrevPOClsb;
6615 int iPrevPOCmsb;
6616 int iPOCmsb;
6617 int iPOClsb = rpm_param->p.POClsb;
6618
6619 if (iMaxPOClsb == 0) {
6620 hevc_print(hevc, 0,
6621 "error iMaxPOClsb is 0\n");
6622 return 3;
6623 }
6624
6625 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6626 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6627
6628 if ((iPOClsb < iPrevPOClsb)
6629 && ((iPrevPOClsb - iPOClsb) >=
6630 (iMaxPOClsb / 2)))
6631 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6632 else if ((iPOClsb > iPrevPOClsb)
6633 && ((iPOClsb - iPrevPOClsb) >
6634 (iMaxPOClsb / 2)))
6635 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6636 else
6637 iPOCmsb = iPrevPOCmsb;
6638 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6639 hevc_print(hevc, 0,
6640 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6641 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6642 iPOClsb);
6643 }
6644 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6645 || hevc->m_nalUnitType ==
6646 NAL_UNIT_CODED_SLICE_BLANT
6647 || hevc->m_nalUnitType ==
6648 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6649 /* For BLA picture types, POCmsb is set to 0. */
6650 iPOCmsb = 0;
6651 }
6652 hevc->curr_POC = (iPOCmsb + iPOClsb);
6653 if ((hevc->m_temporalId - 1) == 0)
6654 hevc->iPrevTid0POC = hevc->curr_POC;
6655 else {
6656 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6657 hevc_print(hevc, 0,
6658 "m_temporalID is %d\n",
6659 hevc->m_temporalId);
6660 }
6661 }
6662 }
6663 hevc->RefNum_L0 =
6664 (rpm_param->p.num_ref_idx_l0_active >
6665 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6666 num_ref_idx_l0_active;
6667 hevc->RefNum_L1 =
6668 (rpm_param->p.num_ref_idx_l1_active >
6669 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6670 num_ref_idx_l1_active;
6671
6672 /* if(curr_POC==0x10) dump_lmem(); */
6673
6674 /* skip RASL pictures after CRA/BLA pictures */
6675 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6676 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6677 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6678 || hevc->m_nalUnitType ==
6679 NAL_UNIT_CODED_SLICE_BLANT
6680 || hevc->m_nalUnitType ==
6681 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6682 hevc->m_pocRandomAccess = hevc->curr_POC;
6683 else
6684 hevc->m_pocRandomAccess = -MAX_INT;
6685 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6686 || hevc->m_nalUnitType ==
6687 NAL_UNIT_CODED_SLICE_BLANT
6688 || hevc->m_nalUnitType ==
6689 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6690 hevc->m_pocRandomAccess = hevc->curr_POC;
6691 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6692 (nal_skip_policy >= 3) &&
6693 (hevc->m_nalUnitType ==
6694 NAL_UNIT_CODED_SLICE_RASL_N ||
6695 hevc->m_nalUnitType ==
6696 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6697 if (get_dbg_flag(hevc)) {
6698 hevc_print(hevc, 0,
6699 "RASL picture with POC %d < %d ",
6700 hevc->curr_POC, hevc->m_pocRandomAccess);
6701 hevc_print(hevc, 0,
6702 "RandomAccess point POC), skip it\n");
6703 }
6704 return 1;
6705 }
6706
6707 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6708 hevc->skip_flag = 0;
6709 /**/
6710 /* if((iPrevPOC != curr_POC)){ */
6711 if (rpm_param->p.slice_segment_address == 0) {
6712 struct PIC_s *pic;
6713
6714 hevc->new_pic = 1;
6715#ifdef MULTI_INSTANCE_SUPPORT
6716 if (!hevc->m_ins_flag)
6717#endif
6718 check_pic_decoded_error_pre(hevc,
6719 READ_VREG(HEVC_PARSER_LCU_START)
6720 & 0xffffff);
6721 /**/ if (use_cma == 0) {
6722 if (hevc->pic_list_init_flag == 0) {
6723 init_pic_list(hevc);
6724 init_pic_list_hw(hevc);
6725 init_buf_spec(hevc);
6726 hevc->pic_list_init_flag = 3;
6727 }
6728 }
6729 if (!hevc->m_ins_flag) {
6730 if (hevc->cur_pic)
6731 get_picture_qos_info(hevc);
6732 }
6733 hevc->first_pic_after_recover = 0;
6734 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6735 dump_pic_list(hevc);
6736 /* prev pic */
6737 hevc_pre_pic(hevc, pic);
6738 /*
6739 *update referenced of old pictures
6740 *(cur_pic->referenced is 1 and not updated)
6741 */
6742 apply_ref_pic_set(hevc, hevc->curr_POC,
6743 rpm_param);
6744
6745 if (hevc->mmu_enable)
6746 recycle_mmu_bufs(hevc);
6747
6748#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6749 if (vdec->master) {
6750 struct hevc_state_s *hevc_ba =
6751 (struct hevc_state_s *)
6752 vdec->master->private;
6753 if (hevc_ba->cur_pic != NULL) {
6754 hevc_ba->cur_pic->dv_enhance_exist = 1;
6755 hevc_print(hevc, H265_DEBUG_DV,
6756 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6757 hevc->curr_POC, hevc_ba->cur_pic->POC);
6758 }
6759 }
6760 if (vdec->master == NULL &&
6761 vdec->slave == NULL)
6762 set_aux_data(hevc,
6763 hevc->cur_pic, 1, 0); /*suffix*/
6764 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6765 set_aux_data(hevc,
6766 hevc->cur_pic, 0, 1); /*dv meta only*/
6767#else
6768 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6769#endif
6770 /* new pic */
6771 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6772 if (hevc->cur_pic == NULL) {
6773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6774 dump_pic_list(hevc);
6775 hevc->wait_buf = 1;
6776 return -1;
6777 }
6778#ifdef MULTI_INSTANCE_SUPPORT
6779 hevc->decoding_pic = hevc->cur_pic;
6780 if (!hevc->m_ins_flag)
6781 hevc->over_decode = 0;
6782#endif
6783#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6784 hevc->cur_pic->dv_enhance_exist = 0;
6785 if (vdec->slave)
6786 hevc_print(hevc, H265_DEBUG_DV,
6787 "Clear bl (poc %d) dv_enhance_exist flag\n",
6788 hevc->curr_POC);
6789 if (vdec->master == NULL &&
6790 vdec->slave == NULL)
6791 set_aux_data(hevc,
6792 hevc->cur_pic, 0, 0); /*prefix*/
6793
6794 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6795 set_aux_data(hevc,
6796 hevc->cur_pic, 0, 2); /*pre sei only*/
6797#else
6798 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6799#endif
6800 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6801 hevc->cur_pic->output_ready = 1;
6802 hevc->cur_pic->stream_offset =
6803 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6804 prepare_display_buf(hevc, hevc->cur_pic);
6805 hevc->wait_buf = 2;
6806 return -1;
6807 }
6808 } else {
6809 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6810#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6811 if (vdec->master == NULL &&
6812 vdec->slave == NULL) {
6813 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6814 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6815 }
6816#else
6817 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6818 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6819#endif
6820 }
6821 if (hevc->pic_list_init_flag != 3
6822 || hevc->cur_pic == NULL) {
6823 /* make it dec from the first slice segment */
6824 return 3;
6825 }
6826 hevc->cur_pic->slice_idx++;
6827 hevc->new_pic = 0;
6828 }
6829 } else {
6830 if (hevc->wait_buf == 1) {
6831 pic_list_process(hevc);
6832 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6833 if (hevc->cur_pic == NULL)
6834 return -1;
6835
6836 if (!hevc->m_ins_flag)
6837 hevc->over_decode = 0;
6838
6839#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6840 hevc->cur_pic->dv_enhance_exist = 0;
6841 if (vdec->master == NULL &&
6842 vdec->slave == NULL)
6843 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6844#else
6845 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6846#endif
6847 hevc->wait_buf = 0;
6848 } else if (hevc->wait_buf ==
6849 2) {
6850 if (get_display_pic_num(hevc) >
6851 1)
6852 return -1;
6853 hevc->wait_buf = 0;
6854 }
6855 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6856 dump_pic_list(hevc);
6857 }
6858
6859 if (hevc->new_pic) {
6860#if 1
6861 /*SUPPORT_10BIT*/
6862 int sao_mem_unit =
6863 (hevc->lcu_size == 16 ? 9 :
6864 hevc->lcu_size ==
6865 32 ? 14 : 24) << 4;
6866#else
6867 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
6868#endif
6869 int pic_height_cu =
6870 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
6871 int pic_width_cu =
6872 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
6873 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
6874
6875 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
6876 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6877 hevc_print(hevc, 0,
6878 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
6879 __func__,
6880 hevc->decode_idx,
6881 hevc->curr_pic_struct,
6882 hevc->interlace_flag,
6883 hevc->cur_pic->index);
6884 }
6885 if (dbg_skip_decode_index != 0 &&
6886 hevc->decode_idx == dbg_skip_decode_index)
6887 dbg_skip_flag = 1;
6888
6889 hevc->decode_idx++;
6890 update_tile_info(hevc, pic_width_cu, pic_height_cu,
6891 sao_mem_unit, rpm_param);
6892
6893 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
6894 }
6895
6896 if (hevc->iPrevPOC != hevc->curr_POC) {
6897 hevc->new_tile = 1;
6898 hevc->tile_x = 0;
6899 hevc->tile_y = 0;
6900 hevc->tile_y_x = 0;
6901 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6902 hevc_print(hevc, 0,
6903 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
6904 hevc->tile_x, hevc->tile_y);
6905 }
6906 } else if (hevc->tile_enabled) {
6907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6908 hevc_print(hevc, 0,
6909 "slice_segment_address is %d\n",
6910 rpm_param->p.slice_segment_address);
6911 }
6912 hevc->tile_y_x =
6913 get_tile_index(hevc, rpm_param->p.slice_segment_address,
6914 (hevc->pic_w +
6915 hevc->lcu_size -
6916 1) / hevc->lcu_size);
6917 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
6918 && (hevc->tile_y_x != -1)) {
6919 hevc->new_tile = 1;
6920 hevc->tile_x = hevc->tile_y_x & 0xff;
6921 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
6922 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6923 hevc_print(hevc, 0,
6924 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
6925 rpm_param->p.slice_segment_address,
6926 hevc->tile_x, hevc->tile_y);
6927 }
6928 } else
6929 hevc->new_tile = 0;
6930 } else
6931 hevc->new_tile = 0;
6932
6933 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
6934 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
6935 hevc->new_tile = 0;
6936
6937 if (hevc->new_tile) {
6938 hevc->tile_start_lcu_x =
6939 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
6940 hevc->tile_start_lcu_y =
6941 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
6942 hevc->tile_width_lcu =
6943 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
6944 hevc->tile_height_lcu =
6945 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
6946 }
6947
6948 set_ref_pic_list(hevc, rpm_param);
6949
6950 Col_ref = rpm_param->p.collocated_ref_idx;
6951
6952 hevc->LDCFlag = 0;
6953 if (rpm_param->p.slice_type != I_SLICE) {
6954 hevc->LDCFlag = 1;
6955 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
6956 if (hevc->cur_pic->
6957 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
6958 hevc->curr_POC)
6959 hevc->LDCFlag = 0;
6960 }
6961 if (rpm_param->p.slice_type == B_SLICE) {
6962 for (i = 0; (i < hevc->RefNum_L1)
6963 && hevc->LDCFlag; i++) {
6964 if (hevc->cur_pic->
6965 m_aiRefPOCList1[hevc->cur_pic->
6966 slice_idx][i] >
6967 hevc->curr_POC)
6968 hevc->LDCFlag = 0;
6969 }
6970 }
6971 }
6972
6973 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
6974
6975 hevc->plevel =
6976 rpm_param->p.log2_parallel_merge_level;
6977 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
6978
6979 hevc->LongTerm_Curr = 0; /* to do ... */
6980 hevc->LongTerm_Col = 0; /* to do ... */
6981
6982 hevc->list_no = 0;
6983 if (rpm_param->p.slice_type == B_SLICE)
6984 hevc->list_no = 1 - hevc->ColFromL0Flag;
6985 if (hevc->list_no == 0) {
6986 if (Col_ref < hevc->RefNum_L0) {
6987 hevc->Col_POC =
6988 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
6989 slice_idx][Col_ref];
6990 } else
6991 hevc->Col_POC = INVALID_POC;
6992 } else {
6993 if (Col_ref < hevc->RefNum_L1) {
6994 hevc->Col_POC =
6995 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
6996 slice_idx][Col_ref];
6997 } else
6998 hevc->Col_POC = INVALID_POC;
6999 }
7000
7001 hevc->LongTerm_Ref = 0; /* to do ... */
7002
7003 if (hevc->slice_type != 2) {
7004 /* if(hevc->i_only==1){ */
7005 /* return 0xf; */
7006 /* } */
7007
7008 if (hevc->Col_POC != INVALID_POC) {
7009 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7010 if (hevc->col_pic == NULL) {
7011 hevc->cur_pic->error_mark = 1;
7012 if (get_dbg_flag(hevc)) {
7013 hevc_print(hevc, 0,
7014 "WRONG,fail to get the pic Col_POC\n");
7015 }
7016 if (is_log_enable(hevc))
7017 add_log(hevc,
7018 "WRONG,fail to get the pic Col_POC");
7019 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7020 hevc->cur_pic->error_mark = 1;
7021 if (get_dbg_flag(hevc)) {
7022 hevc_print(hevc, 0,
7023 "WRONG, Col_POC error_mark is 1\n");
7024 }
7025 if (is_log_enable(hevc))
7026 add_log(hevc,
7027 "WRONG, Col_POC error_mark is 1");
7028 } else {
7029 if ((hevc->col_pic->width
7030 != hevc->pic_w) ||
7031 (hevc->col_pic->height
7032 != hevc->pic_h)) {
7033 hevc_print(hevc, 0,
7034 "Wrong reference pic (poc %d) width/height %d/%d\n",
7035 hevc->col_pic->POC,
7036 hevc->col_pic->width,
7037 hevc->col_pic->height);
7038 hevc->cur_pic->error_mark = 1;
7039 }
7040
7041 }
7042
7043 if (hevc->cur_pic->error_mark
7044 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7045#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7046 /*count info*/
7047 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7048 hevc->cur_pic->stream_offset);
7049#endif
7050 }
7051
7052 if (is_skip_decoding(hevc,
7053 hevc->cur_pic)) {
7054 return 2;
7055 }
7056 } else
7057 hevc->col_pic = hevc->cur_pic;
7058 } /* */
7059 if (hevc->col_pic == NULL)
7060 hevc->col_pic = hevc->cur_pic;
7061#ifdef BUFFER_MGR_ONLY
7062 return 0xf;
7063#else
7064 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7065 || (dbg_skip_flag))
7066 return 0xf;
7067#endif
7068
7069 config_mc_buffer(hevc, hevc->cur_pic);
7070
7071 if (is_skip_decoding(hevc,
7072 hevc->cur_pic)) {
7073 if (get_dbg_flag(hevc))
7074 hevc_print(hevc, 0,
7075 "Discard this picture index %d\n",
7076 hevc->cur_pic->index);
7077#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7078 /*count info*/
7079 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7080 hevc->cur_pic->stream_offset);
7081#endif
7082 return 2;
7083 }
7084#ifdef MCRCC_ENABLE
7085 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7086#endif
7087 config_mpred_hw(hevc);
7088
7089 config_sao_hw(hevc, rpm_param);
7090
7091 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7092 return 0xf;
7093
7094 return 0;
7095}
7096
7097
7098
7099static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7100 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7101 int cur_buf_idx = new_pic->index;
7102 int bit_depth_10 = (bit_depth != 0x00);
7103 int picture_size;
7104 int cur_mmu_4k_number;
7105 int ret, max_frame_num;
7106 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7107 new_pic->height, !bit_depth_10);
7108 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7109 if (hevc->double_write_mode & 0x10)
7110 return 0;
7111 /*hevc_print(hevc, 0,
7112 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7113 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7114 if (new_pic->scatter_alloc) {
7115 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7116 new_pic->scatter_alloc = 0;
7117 }
7118 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7119 max_frame_num = MAX_FRAME_8K_NUM;
7120 else
7121 max_frame_num = MAX_FRAME_4K_NUM;
7122 if (cur_mmu_4k_number > max_frame_num) {
7123 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7124 cur_mmu_4k_number,
7125 new_pic->width,
7126 new_pic->height);
7127 return -1;
7128 }
7129 ret = decoder_mmu_box_alloc_idx(
7130 hevc->mmu_box,
7131 cur_buf_idx,
7132 cur_mmu_4k_number,
7133 mmu_index_adr);
7134 if (ret == 0)
7135 new_pic->scatter_alloc = 1;
7136 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7137 "%s pic index %d page count(%d) ret =%d\n",
7138 __func__, cur_buf_idx,
7139 cur_mmu_4k_number,
7140 ret);
7141 return ret;
7142}
7143
7144
7145static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7146 struct PIC_s *pic)
7147{
7148 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7149 "%s pic index %d scatter_alloc %d\n",
7150 __func__, pic->index,
7151 pic->scatter_alloc);
7152
7153 if (hevc->mmu_enable
7154 && ((hevc->double_write_mode & 0x10) == 0)
7155 && pic->scatter_alloc)
7156 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7157 pic->scatter_alloc = 0;
7158}
7159
7160/*
7161 *************************************************
7162 *
7163 *h265 buffer management end
7164 *
7165 **************************************************
7166 */
7167static struct hevc_state_s *gHevc;
7168
7169static void hevc_local_uninit(struct hevc_state_s *hevc)
7170{
7171 hevc->rpm_ptr = NULL;
7172 hevc->lmem_ptr = NULL;
7173
7174#ifdef SWAP_HEVC_UCODE
7175 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7176 if (hevc->mc_cpu_addr != NULL) {
7177 dma_free_coherent(amports_get_dma_device(),
7178 hevc->swap_size, hevc->mc_cpu_addr,
7179 hevc->mc_dma_handle);
7180 hevc->mc_cpu_addr = NULL;
7181 }
7182
7183 }
7184#endif
7185#ifdef DETREFILL_ENABLE
7186 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7187 uninit_detrefill_buf(hevc);
7188#endif
7189 if (hevc->aux_addr) {
7190 dma_free_coherent(amports_get_dma_device(),
7191 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7192 hevc->aux_phy_addr);
7193 hevc->aux_addr = NULL;
7194 }
7195 if (hevc->rpm_addr) {
7196 dma_free_coherent(amports_get_dma_device(),
7197 RPM_BUF_SIZE, hevc->rpm_addr,
7198 hevc->rpm_phy_addr);
7199 hevc->rpm_addr = NULL;
7200 }
7201 if (hevc->lmem_addr) {
7202 dma_free_coherent(amports_get_dma_device(),
7203 RPM_BUF_SIZE, hevc->lmem_addr,
7204 hevc->lmem_phy_addr);
7205 hevc->lmem_addr = NULL;
7206 }
7207
7208 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7209 if (hevc->frame_mmu_map_phy_addr)
7210 dma_free_coherent(amports_get_dma_device(),
7211 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7212 hevc->frame_mmu_map_phy_addr);
7213
7214 hevc->frame_mmu_map_addr = NULL;
7215 }
7216
7217 kfree(gvs);
7218 gvs = NULL;
7219}
7220
7221static int hevc_local_init(struct hevc_state_s *hevc)
7222{
7223 int ret = -1;
7224 struct BuffInfo_s *cur_buf_info = NULL;
7225
7226 memset(&hevc->param, 0, sizeof(union param_u));
7227
7228 cur_buf_info = &hevc->work_space_buf_store;
7229
7230 if (vdec_is_support_4k()) {
7231 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7232 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7233 sizeof(struct BuffInfo_s));
7234 else
7235 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7236 sizeof(struct BuffInfo_s));
7237 } else
7238 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7239 sizeof(struct BuffInfo_s));
7240
7241 cur_buf_info->start_adr = hevc->buf_start;
7242 init_buff_spec(hevc, cur_buf_info);
7243
7244 hevc_init_stru(hevc, cur_buf_info);
7245
7246 hevc->bit_depth_luma = 8;
7247 hevc->bit_depth_chroma = 8;
7248 hevc->video_signal_type = 0;
7249 hevc->video_signal_type_debug = 0;
7250 bit_depth_luma = hevc->bit_depth_luma;
7251 bit_depth_chroma = hevc->bit_depth_chroma;
7252 video_signal_type = hevc->video_signal_type;
7253
7254 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7255 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7256 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7257 if (hevc->rpm_addr == NULL) {
7258 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7259 return -1;
7260 }
7261 hevc->rpm_ptr = hevc->rpm_addr;
7262 }
7263
7264 if (prefix_aux_buf_size > 0 ||
7265 suffix_aux_buf_size > 0) {
7266 u32 aux_buf_size;
7267
7268 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7269 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7270 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7271 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7272 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7273 if (hevc->aux_addr == NULL) {
7274 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7275 return -1;
7276 }
7277 }
7278
7279 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7280 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7281 if (hevc->lmem_addr == NULL) {
7282 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7283 return -1;
7284 }
7285 hevc->lmem_ptr = hevc->lmem_addr;
7286
7287 if (hevc->mmu_enable) {
7288 hevc->frame_mmu_map_addr =
7289 dma_alloc_coherent(amports_get_dma_device(),
7290 get_frame_mmu_map_size(),
7291 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7292 if (hevc->frame_mmu_map_addr == NULL) {
7293 pr_err("%s: failed to alloc count_buffer\n", __func__);
7294 return -1;
7295 }
7296 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7297 }
7298 ret = 0;
7299 return ret;
7300}
7301
7302/*
7303 *******************************************
7304 * Mailbox command
7305 *******************************************
7306 */
7307#define CMD_FINISHED 0
7308#define CMD_ALLOC_VIEW 1
7309#define CMD_FRAME_DISPLAY 3
7310#define CMD_DEBUG 10
7311
7312
7313#define DECODE_BUFFER_NUM_MAX 32
7314#define DISPLAY_BUFFER_NUM 6
7315
7316#define video_domain_addr(adr) (adr&0x7fffffff)
7317#define DECODER_WORK_SPACE_SIZE 0x800000
7318
7319#define spec2canvas(x) \
7320 (((x)->uv_canvas_index << 16) | \
7321 ((x)->uv_canvas_index << 8) | \
7322 ((x)->y_canvas_index << 0))
7323
7324
7325static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7326{
7327 struct vdec_s *vdec = hw_to_vdec(hevc);
7328 int canvas_w = ALIGN(pic->width, 64)/4;
7329 int canvas_h = ALIGN(pic->height, 32)/4;
7330 int blkmode = mem_map_mode;
7331
7332 /*CANVAS_BLKMODE_64X32*/
7333#ifdef SUPPORT_10BIT
7334 if (pic->double_write_mode) {
7335 canvas_w = pic->width /
7336 get_double_write_ratio(hevc, pic->double_write_mode);
7337 canvas_h = pic->height /
7338 get_double_write_ratio(hevc, pic->double_write_mode);
7339
7340 if (mem_map_mode == 0)
7341 canvas_w = ALIGN(canvas_w, 32);
7342 else
7343 canvas_w = ALIGN(canvas_w, 64);
7344 canvas_h = ALIGN(canvas_h, 32);
7345
7346 if (vdec->parallel_dec == 1) {
7347 if (pic->y_canvas_index == -1)
7348 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7349 if (pic->uv_canvas_index == -1)
7350 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7351 } else {
7352 pic->y_canvas_index = 128 + pic->index * 2;
7353 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7354 }
7355
7356 canvas_config_ex(pic->y_canvas_index,
7357 pic->dw_y_adr, canvas_w, canvas_h,
7358 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7359 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7360 canvas_w, canvas_h,
7361 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7362#ifdef MULTI_INSTANCE_SUPPORT
7363 pic->canvas_config[0].phy_addr =
7364 pic->dw_y_adr;
7365 pic->canvas_config[0].width =
7366 canvas_w;
7367 pic->canvas_config[0].height =
7368 canvas_h;
7369 pic->canvas_config[0].block_mode =
7370 blkmode;
7371 pic->canvas_config[0].endian = 7;
7372
7373 pic->canvas_config[1].phy_addr =
7374 pic->dw_u_v_adr;
7375 pic->canvas_config[1].width =
7376 canvas_w;
7377 pic->canvas_config[1].height =
7378 canvas_h;
7379 pic->canvas_config[1].block_mode =
7380 blkmode;
7381 pic->canvas_config[1].endian = 7;
7382#endif
7383 } else {
7384 if (!hevc->mmu_enable) {
7385 /* to change after 10bit VPU is ready ... */
7386 if (vdec->parallel_dec == 1) {
7387 if (pic->y_canvas_index == -1)
7388 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7389 pic->uv_canvas_index = pic->y_canvas_index;
7390 } else {
7391 pic->y_canvas_index = 128 + pic->index;
7392 pic->uv_canvas_index = 128 + pic->index;
7393 }
7394
7395 canvas_config_ex(pic->y_canvas_index,
7396 pic->mc_y_adr, canvas_w, canvas_h,
7397 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7398 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7399 canvas_w, canvas_h,
7400 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7401 }
7402 }
7403#else
7404 if (vdec->parallel_dec == 1) {
7405 if (pic->y_canvas_index == -1)
7406 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7407 if (pic->uv_canvas_index == -1)
7408 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7409 } else {
7410 pic->y_canvas_index = 128 + pic->index * 2;
7411 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7412 }
7413
7414
7415 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7416 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7417 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7418 canvas_w, canvas_h,
7419 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7420#endif
7421}
7422
7423static int init_buf_spec(struct hevc_state_s *hevc)
7424{
7425 int pic_width = hevc->pic_w;
7426 int pic_height = hevc->pic_h;
7427
7428 /* hevc_print(hevc, 0,
7429 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7430 */
7431 hevc_print(hevc, 0,
7432 "%s2 %d %d\n", __func__, pic_width, pic_height);
7433 /* pic_width = hevc->pic_w; */
7434 /* pic_height = hevc->pic_h; */
7435
7436 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7437 hevc->frame_width = pic_width;
7438 hevc->frame_height = pic_height;
7439
7440 }
7441
7442 return 0;
7443}
7444
7445static int parse_sei(struct hevc_state_s *hevc,
7446 struct PIC_s *pic, char *sei_buf, uint32_t size)
7447{
7448 char *p = sei_buf;
7449 char *p_sei;
7450 uint16_t header;
7451 uint8_t nal_unit_type;
7452 uint8_t payload_type, payload_size;
7453 int i, j;
7454
7455 if (size < 2)
7456 return 0;
7457 header = *p++;
7458 header <<= 8;
7459 header += *p++;
7460 nal_unit_type = header >> 9;
7461 if ((nal_unit_type != NAL_UNIT_SEI)
7462 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7463 return 0;
7464 while (p+2 <= sei_buf+size) {
7465 payload_type = *p++;
7466 payload_size = *p++;
7467 if (p+payload_size <= sei_buf+size) {
7468 switch (payload_type) {
7469 case SEI_PicTiming:
7470 if ((parser_sei_enable & 0x4) &&
7471 hevc->frame_field_info_present_flag) {
7472 p_sei = p;
7473 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7474 pic->pic_struct = hevc->curr_pic_struct;
7475 if (get_dbg_flag(hevc) &
7476 H265_DEBUG_PIC_STRUCT) {
7477 hevc_print(hevc, 0,
7478 "parse result pic_struct = %d\n",
7479 hevc->curr_pic_struct);
7480 }
7481 }
7482 break;
7483 case SEI_UserDataITU_T_T35:
7484 p_sei = p;
7485 if (p_sei[0] == 0xB5
7486 && p_sei[1] == 0x00
7487 && p_sei[2] == 0x3C
7488 && p_sei[3] == 0x00
7489 && p_sei[4] == 0x01
7490 && p_sei[5] == 0x04)
7491 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7492
7493 break;
7494 case SEI_MasteringDisplayColorVolume:
7495 /*hevc_print(hevc, 0,
7496 "sei type: primary display color volume %d, size %d\n",
7497 payload_type,
7498 payload_size);*/
7499 /* master_display_colour */
7500 p_sei = p;
7501 for (i = 0; i < 3; i++) {
7502 for (j = 0; j < 2; j++) {
7503 hevc->primaries[i][j]
7504 = (*p_sei<<8)
7505 | *(p_sei+1);
7506 p_sei += 2;
7507 }
7508 }
7509 for (i = 0; i < 2; i++) {
7510 hevc->white_point[i]
7511 = (*p_sei<<8)
7512 | *(p_sei+1);
7513 p_sei += 2;
7514 }
7515 for (i = 0; i < 2; i++) {
7516 hevc->luminance[i]
7517 = (*p_sei<<24)
7518 | (*(p_sei+1)<<16)
7519 | (*(p_sei+2)<<8)
7520 | *(p_sei+3);
7521 p_sei += 4;
7522 }
7523 hevc->sei_present_flag |=
7524 SEI_MASTER_DISPLAY_COLOR_MASK;
7525 /*for (i = 0; i < 3; i++)
7526 for (j = 0; j < 2; j++)
7527 hevc_print(hevc, 0,
7528 "\tprimaries[%1d][%1d] = %04x\n",
7529 i, j,
7530 hevc->primaries[i][j]);
7531 hevc_print(hevc, 0,
7532 "\twhite_point = (%04x, %04x)\n",
7533 hevc->white_point[0],
7534 hevc->white_point[1]);
7535 hevc_print(hevc, 0,
7536 "\tmax,min luminance = %08x, %08x\n",
7537 hevc->luminance[0],
7538 hevc->luminance[1]);*/
7539 break;
7540 case SEI_ContentLightLevel:
7541 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7542 hevc_print(hevc, 0,
7543 "sei type: max content light level %d, size %d\n",
7544 payload_type, payload_size);
7545 /* content_light_level */
7546 p_sei = p;
7547 hevc->content_light_level[0]
7548 = (*p_sei<<8) | *(p_sei+1);
7549 p_sei += 2;
7550 hevc->content_light_level[1]
7551 = (*p_sei<<8) | *(p_sei+1);
7552 p_sei += 2;
7553 hevc->sei_present_flag |=
7554 SEI_CONTENT_LIGHT_LEVEL_MASK;
7555 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7556 hevc_print(hevc, 0,
7557 "\tmax cll = %04x, max_pa_cll = %04x\n",
7558 hevc->content_light_level[0],
7559 hevc->content_light_level[1]);
7560 break;
7561 default:
7562 break;
7563 }
7564 }
7565 p += payload_size;
7566 }
7567 return 0;
7568}
7569
7570static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7571 unsigned w, unsigned h)
7572{
7573 unsigned ar;
7574
7575 if (idc == 255) {
7576 ar = div_u64(256ULL * sar_h * h,
7577 sar_w * w);
7578 } else {
7579 switch (idc) {
7580 case 1:
7581 ar = 0x100 * h / w;
7582 break;
7583 case 2:
7584 ar = 0x100 * h * 11 / (w * 12);
7585 break;
7586 case 3:
7587 ar = 0x100 * h * 11 / (w * 10);
7588 break;
7589 case 4:
7590 ar = 0x100 * h * 11 / (w * 16);
7591 break;
7592 case 5:
7593 ar = 0x100 * h * 33 / (w * 40);
7594 break;
7595 case 6:
7596 ar = 0x100 * h * 11 / (w * 24);
7597 break;
7598 case 7:
7599 ar = 0x100 * h * 11 / (w * 20);
7600 break;
7601 case 8:
7602 ar = 0x100 * h * 11 / (w * 32);
7603 break;
7604 case 9:
7605 ar = 0x100 * h * 33 / (w * 80);
7606 break;
7607 case 10:
7608 ar = 0x100 * h * 11 / (w * 18);
7609 break;
7610 case 11:
7611 ar = 0x100 * h * 11 / (w * 15);
7612 break;
7613 case 12:
7614 ar = 0x100 * h * 33 / (w * 64);
7615 break;
7616 case 13:
7617 ar = 0x100 * h * 99 / (w * 160);
7618 break;
7619 case 14:
7620 ar = 0x100 * h * 3 / (w * 4);
7621 break;
7622 case 15:
7623 ar = 0x100 * h * 2 / (w * 3);
7624 break;
7625 case 16:
7626 ar = 0x100 * h * 1 / (w * 2);
7627 break;
7628 default:
7629 ar = h * 0x100 / w;
7630 break;
7631 }
7632 }
7633
7634 return ar;
7635}
7636
7637static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7638 struct PIC_s *pic)
7639{
7640 unsigned int ar;
7641 int i, j;
7642 char *p;
7643 unsigned size = 0;
7644 unsigned type = 0;
7645 struct vframe_master_display_colour_s *vf_dp
7646 = &vf->prop.master_display_colour;
7647
7648 vf->width = pic->width /
7649 get_double_write_ratio(hevc, pic->double_write_mode);
7650 vf->height = pic->height /
7651 get_double_write_ratio(hevc, pic->double_write_mode);
7652
7653 vf->duration = hevc->frame_dur;
7654 vf->duration_pulldown = 0;
7655 vf->flag = 0;
7656
7657 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7658 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7659
7660
7661 if (((pic->aspect_ratio_idc == 255) &&
7662 pic->sar_width &&
7663 pic->sar_height) ||
7664 ((pic->aspect_ratio_idc != 255) &&
7665 (pic->width))) {
7666 ar = min_t(u32,
7667 calc_ar(pic->aspect_ratio_idc,
7668 pic->sar_width,
7669 pic->sar_height,
7670 pic->width,
7671 pic->height),
7672 DISP_RATIO_ASPECT_RATIO_MAX);
7673 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7674 }
7675 hevc->ratio_control = vf->ratio_control;
7676 if (pic->aux_data_buf
7677 && pic->aux_data_size) {
7678 /* parser sei */
7679 p = pic->aux_data_buf;
7680 while (p < pic->aux_data_buf
7681 + pic->aux_data_size - 8) {
7682 size = *p++;
7683 size = (size << 8) | *p++;
7684 size = (size << 8) | *p++;
7685 size = (size << 8) | *p++;
7686 type = *p++;
7687 type = (type << 8) | *p++;
7688 type = (type << 8) | *p++;
7689 type = (type << 8) | *p++;
7690 if (type == 0x02000000) {
7691 /* hevc_print(hevc, 0,
7692 "sei(%d)\n", size); */
7693 parse_sei(hevc, pic, p, size);
7694 }
7695 p += size;
7696 }
7697 }
7698 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7699 vf->signal_type = pic->video_signal_type;
7700 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7701 u32 data;
7702 data = vf->signal_type;
7703 data = data & 0xFFFF00FF;
7704 data = data | (0x30<<8);
7705 vf->signal_type = data;
7706 }
7707 }
7708 else
7709 vf->signal_type = 0;
7710 hevc->video_signal_type_debug = vf->signal_type;
7711
7712 /* master_display_colour */
7713 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7714 for (i = 0; i < 3; i++)
7715 for (j = 0; j < 2; j++)
7716 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7717 for (i = 0; i < 2; i++) {
7718 vf_dp->white_point[i] = hevc->white_point[i];
7719 vf_dp->luminance[i]
7720 = hevc->luminance[i];
7721 }
7722 vf_dp->present_flag = 1;
7723 } else
7724 vf_dp->present_flag = 0;
7725
7726 /* content_light_level */
7727 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7728 vf_dp->content_light_level.max_content
7729 = hevc->content_light_level[0];
7730 vf_dp->content_light_level.max_pic_average
7731 = hevc->content_light_level[1];
7732 vf_dp->content_light_level.present_flag = 1;
7733 } else
7734 vf_dp->content_light_level.present_flag = 0;
7735}
7736
7737static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7738{
7739 unsigned long flags;
7740#ifdef MULTI_INSTANCE_SUPPORT
7741 struct vdec_s *vdec = op_arg;
7742 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7743#else
7744 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7745#endif
7746
7747 spin_lock_irqsave(&lock, flags);
7748
7749 states->vf_pool_size = VF_POOL_SIZE;
7750 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7751 states->buf_avail_num = kfifo_len(&hevc->display_q);
7752
7753 if (step == 2)
7754 states->buf_avail_num = 0;
7755 spin_unlock_irqrestore(&lock, flags);
7756 return 0;
7757}
7758
7759static struct vframe_s *vh265_vf_peek(void *op_arg)
7760{
7761 struct vframe_s *vf[2] = {0, 0};
7762#ifdef MULTI_INSTANCE_SUPPORT
7763 struct vdec_s *vdec = op_arg;
7764 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7765#else
7766 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7767#endif
7768
7769 if (step == 2)
7770 return NULL;
7771
7772 if (force_disp_pic_index & 0x100) {
7773 if (force_disp_pic_index & 0x200)
7774 return NULL;
7775 return &hevc->vframe_dummy;
7776 }
7777
7778
7779 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7780 if (vf[1]) {
7781 vf[0]->next_vf_pts_valid = true;
7782 vf[0]->next_vf_pts = vf[1]->pts;
7783 } else
7784 vf[0]->next_vf_pts_valid = false;
7785 return vf[0];
7786 }
7787
7788 return NULL;
7789}
7790
7791static struct vframe_s *vh265_vf_get(void *op_arg)
7792{
7793 struct vframe_s *vf;
7794#ifdef MULTI_INSTANCE_SUPPORT
7795 struct vdec_s *vdec = op_arg;
7796 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7797#else
7798 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7799#endif
7800
7801 if (step == 2)
7802 return NULL;
7803 else if (step == 1)
7804 step = 2;
7805
7806#if 0
7807 if (force_disp_pic_index & 0x100) {
7808 int buffer_index = force_disp_pic_index & 0xff;
7809 struct PIC_s *pic = NULL;
7810 if (buffer_index >= 0
7811 && buffer_index < MAX_REF_PIC_NUM)
7812 pic = hevc->m_PIC[buffer_index];
7813 if (pic == NULL)
7814 return NULL;
7815 if (force_disp_pic_index & 0x200)
7816 return NULL;
7817
7818 vf = &hevc->vframe_dummy;
7819 if (get_double_write_mode(hevc)) {
7820 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7821 VIDTYPE_VIU_NV21;
7822 if (hevc->m_ins_flag) {
7823 vf->canvas0Addr = vf->canvas1Addr = -1;
7824 vf->plane_num = 2;
7825 vf->canvas0_config[0] =
7826 pic->canvas_config[0];
7827 vf->canvas0_config[1] =
7828 pic->canvas_config[1];
7829
7830 vf->canvas1_config[0] =
7831 pic->canvas_config[0];
7832 vf->canvas1_config[1] =
7833 pic->canvas_config[1];
7834 } else {
7835 vf->canvas0Addr = vf->canvas1Addr
7836 = spec2canvas(pic);
7837 }
7838 } else {
7839 vf->canvas0Addr = vf->canvas1Addr = 0;
7840 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7841 if (hevc->mmu_enable)
7842 vf->type |= VIDTYPE_SCATTER;
7843 }
7844 vf->compWidth = pic->width;
7845 vf->compHeight = pic->height;
7846 update_vf_memhandle(hevc, vf, pic);
7847 switch (hevc->bit_depth_luma) {
7848 case 9:
7849 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
7850 break;
7851 case 10:
7852 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
7853 | BITDEPTH_V10;
7854 break;
7855 default:
7856 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7857 break;
7858 }
7859 if ((vf->type & VIDTYPE_COMPRESS) == 0)
7860 vf->bitdepth =
7861 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7862 if (hevc->mem_saving_mode == 1)
7863 vf->bitdepth |= BITDEPTH_SAVING_MODE;
7864 vf->duration_pulldown = 0;
7865 vf->pts = 0;
7866 vf->pts_us64 = 0;
7867 set_frame_info(hevc, vf);
7868
7869 vf->width = pic->width /
7870 get_double_write_ratio(hevc, pic->double_write_mode);
7871 vf->height = pic->height /
7872 get_double_write_ratio(hevc, pic->double_write_mode);
7873
7874 force_disp_pic_index |= 0x200;
7875 return vf;
7876 }
7877#endif
7878
7879 if (kfifo_get(&hevc->display_q, &vf)) {
7880 struct vframe_s *next_vf;
7881 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7882 hevc_print(hevc, 0,
7883 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
7884 __func__, vf, vf->type, vf->index,
7885 get_pic_poc(hevc, vf->index & 0xff),
7886 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
7887 vf->pts, vf->pts_us64,
7888 vf->duration);
7889#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7890 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
7891 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
7892 if (pic->aux_data_buf && pic->aux_data_size > 0) {
7893 int i;
7894 struct PIC_s *pic =
7895 hevc->m_PIC[vf->index & 0xff];
7896 hevc_print(hevc, 0,
7897 "pic 0x%p aux size %d:\n",
7898 pic, pic->aux_data_size);
7899 for (i = 0; i < pic->aux_data_size; i++) {
7900 hevc_print_cont(hevc, 0,
7901 "%02x ", pic->aux_data_buf[i]);
7902 if (((i + 1) & 0xf) == 0)
7903 hevc_print_cont(hevc, 0, "\n");
7904 }
7905 hevc_print_cont(hevc, 0, "\n");
7906 }
7907 }
7908#endif
7909 hevc->show_frame_num++;
7910 hevc->vf_get_count++;
7911
7912 if (kfifo_peek(&hevc->display_q, &next_vf)) {
7913 vf->next_vf_pts_valid = true;
7914 vf->next_vf_pts = next_vf->pts;
7915 } else
7916 vf->next_vf_pts_valid = false;
7917
7918 return vf;
7919 }
7920
7921 return NULL;
7922}
7923static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
7924 int i;
7925 for (i = 0; i < VF_POOL_SIZE; i++) {
7926 if (vf == &hevc->vfpool[i])
7927 return true;
7928 }
7929 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
7930 for (i = 0; i < VF_POOL_SIZE; i++) {
7931 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
7932 }
7933 return false;
7934}
7935
7936static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
7937{
7938 unsigned long flags;
7939#ifdef MULTI_INSTANCE_SUPPORT
7940 struct vdec_s *vdec = op_arg;
7941 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7942#else
7943 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7944#endif
7945 unsigned char index_top;
7946 unsigned char index_bot;
7947
7948 if (vf && (vf_valid_check(vf, hevc) == false))
7949 return;
7950 if (vf == (&hevc->vframe_dummy))
7951 return;
7952 index_top = vf->index & 0xff;
7953 index_bot = (vf->index >> 8) & 0xff;
7954 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7955 hevc_print(hevc, 0,
7956 "%s(type %d index 0x%x)\n",
7957 __func__, vf->type, vf->index);
7958 hevc->vf_put_count++;
7959 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
7960 spin_lock_irqsave(&lock, flags);
7961
7962 if (index_top != 0xff
7963 && index_top < MAX_REF_PIC_NUM
7964 && hevc->m_PIC[index_top]) {
7965 if (hevc->m_PIC[index_top]->vf_ref > 0) {
7966 hevc->m_PIC[index_top]->vf_ref--;
7967
7968 if (hevc->m_PIC[index_top]->vf_ref == 0) {
7969 hevc->m_PIC[index_top]->output_ready = 0;
7970
7971 if (hevc->wait_buf != 0)
7972 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7973 0x1);
7974 }
7975 }
7976 }
7977
7978 if (index_bot != 0xff
7979 && index_bot < MAX_REF_PIC_NUM
7980 && hevc->m_PIC[index_bot]) {
7981 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
7982 hevc->m_PIC[index_bot]->vf_ref--;
7983
7984 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
7985 hevc->m_PIC[index_bot]->output_ready = 0;
7986 if (hevc->wait_buf != 0)
7987 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7988 0x1);
7989 }
7990 }
7991 }
7992 spin_unlock_irqrestore(&lock, flags);
7993}
7994
7995static int vh265_event_cb(int type, void *data, void *op_arg)
7996{
7997 unsigned long flags;
7998#ifdef MULTI_INSTANCE_SUPPORT
7999 struct vdec_s *vdec = op_arg;
8000 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8001#else
8002 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8003#endif
8004 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8005#if 0
8006 amhevc_stop();
8007#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8008 vf_light_unreg_provider(&vh265_vf_prov);
8009#endif
8010 spin_lock_irqsave(&hevc->lock, flags);
8011 vh265_local_init();
8012 vh265_prot_init();
8013 spin_unlock_irqrestore(&hevc->lock, flags);
8014#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8015 vf_reg_provider(&vh265_vf_prov);
8016#endif
8017 amhevc_start();
8018#endif
8019 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8020 struct provider_aux_req_s *req =
8021 (struct provider_aux_req_s *)data;
8022 unsigned char index;
8023
8024 spin_lock_irqsave(&lock, flags);
8025 index = req->vf->index & 0xff;
8026 req->aux_buf = NULL;
8027 req->aux_size = 0;
8028 if (req->bot_flag)
8029 index = (req->vf->index >> 8) & 0xff;
8030 if (index != 0xff
8031 && index < MAX_REF_PIC_NUM
8032 && hevc->m_PIC[index]) {
8033 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8034 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8035#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8036 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8037 req->dv_enhance_exist = false;
8038 else
8039 req->dv_enhance_exist =
8040 hevc->m_PIC[index]->dv_enhance_exist;
8041 hevc_print(hevc, H265_DEBUG_DV,
8042 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8043 req->vf,
8044 hevc->m_PIC[index]->POC, index,
8045 req->dv_enhance_exist, req->aux_size);
8046#else
8047 req->dv_enhance_exist = 0;
8048#endif
8049 }
8050 spin_unlock_irqrestore(&lock, flags);
8051
8052 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8053 hevc_print(hevc, 0,
8054 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8055 __func__, type, index, req->aux_size);
8056#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8057 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8058 if ((force_bypass_dvenl & 0x80000000) == 0) {
8059 hevc_print(hevc, 0,
8060 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8061 __func__);
8062 hevc->bypass_dvenl_enable = 1;
8063 }
8064
8065#endif
8066 }
8067 return 0;
8068}
8069
8070#ifdef HEVC_PIC_STRUCT_SUPPORT
8071static int process_pending_vframe(struct hevc_state_s *hevc,
8072 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8073{
8074 struct vframe_s *vf;
8075
8076 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8077 hevc_print(hevc, 0,
8078 "%s: pair_pic index 0x%x %s\n",
8079 __func__, pair_pic->index,
8080 pair_frame_top_flag ?
8081 "top" : "bot");
8082
8083 if (kfifo_len(&hevc->pending_q) > 1) {
8084 unsigned long flags;
8085 /* do not pending more than 1 frame */
8086 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8087 hevc_print(hevc, 0,
8088 "fatal error, no available buffer slot.");
8089 return -1;
8090 }
8091 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8092 hevc_print(hevc, 0,
8093 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8094 __func__, vf->index);
8095 if ((hevc->double_write_mode == 3) &&
8096 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8097 vf->type |= VIDTYPE_COMPRESS;
8098 if (hevc->mmu_enable)
8099 vf->type |= VIDTYPE_SCATTER;
8100 }
8101 hevc->vf_pre_count++;
8102 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8103 spin_lock_irqsave(&lock, flags);
8104 vf->index &= 0xff;
8105 hevc->m_PIC[vf->index]->output_ready = 0;
8106 if (hevc->wait_buf != 0)
8107 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8108 0x1);
8109 spin_unlock_irqrestore(&lock, flags);
8110
8111 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8112 }
8113
8114 if (kfifo_peek(&hevc->pending_q, &vf)) {
8115 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8116 /*
8117 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8118 *do not use it
8119 */
8120 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8121 hevc_print(hevc, 0,
8122 "fatal error, no available buffer slot.");
8123 return -1;
8124 }
8125 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8126 hevc_print(hevc, 0,
8127 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8128 __func__, vf->index);
8129 if (vf) {
8130 if ((hevc->double_write_mode == 3) &&
8131 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8132 vf->type |= VIDTYPE_COMPRESS;
8133 if (hevc->mmu_enable)
8134 vf->type |= VIDTYPE_SCATTER;
8135 }
8136 hevc->vf_pre_count++;
8137 kfifo_put(&hevc->display_q,
8138 (const struct vframe_s *)vf);
8139 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8140 }
8141 } else if ((!pair_frame_top_flag) &&
8142 (((vf->index >> 8) & 0xff) == 0xff)) {
8143 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8144 hevc_print(hevc, 0,
8145 "fatal error, no available buffer slot.");
8146 return -1;
8147 }
8148 if (vf) {
8149 if ((hevc->double_write_mode == 3) &&
8150 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8151 vf->type |= VIDTYPE_COMPRESS;
8152 if (hevc->mmu_enable)
8153 vf->type |= VIDTYPE_SCATTER;
8154 }
8155 vf->index &= 0xff;
8156 vf->index |= (pair_pic->index << 8);
8157 vf->canvas1Addr = spec2canvas(pair_pic);
8158 pair_pic->vf_ref++;
8159 kfifo_put(&hevc->display_q,
8160 (const struct vframe_s *)vf);
8161 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8162 hevc->vf_pre_count++;
8163 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8164 hevc_print(hevc, 0,
8165 "%s vf => display_q: (index 0x%x)\n",
8166 __func__, vf->index);
8167 }
8168 } else if (pair_frame_top_flag &&
8169 ((vf->index & 0xff) == 0xff)) {
8170 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8171 hevc_print(hevc, 0,
8172 "fatal error, no available buffer slot.");
8173 return -1;
8174 }
8175 if (vf) {
8176 if ((hevc->double_write_mode == 3) &&
8177 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8178 vf->type |= VIDTYPE_COMPRESS;
8179 if (hevc->mmu_enable)
8180 vf->type |= VIDTYPE_SCATTER;
8181 }
8182 vf->index &= 0xff00;
8183 vf->index |= pair_pic->index;
8184 vf->canvas0Addr = spec2canvas(pair_pic);
8185 pair_pic->vf_ref++;
8186 kfifo_put(&hevc->display_q,
8187 (const struct vframe_s *)vf);
8188 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8189 hevc->vf_pre_count++;
8190 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8191 hevc_print(hevc, 0,
8192 "%s vf => display_q: (index 0x%x)\n",
8193 __func__, vf->index);
8194 }
8195 }
8196 }
8197 return 0;
8198}
8199#endif
8200static void update_vf_memhandle(struct hevc_state_s *hevc,
8201 struct vframe_s *vf, struct PIC_s *pic)
8202{
8203 if (pic->index < 0) {
8204 vf->mem_handle = NULL;
8205 vf->mem_head_handle = NULL;
8206 } else if (vf->type & VIDTYPE_SCATTER) {
8207 vf->mem_handle =
8208 decoder_mmu_box_get_mem_handle(
8209 hevc->mmu_box, pic->index);
8210 vf->mem_head_handle =
8211 decoder_bmmu_box_get_mem_handle(
8212 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8213 } else {
8214 vf->mem_handle =
8215 decoder_bmmu_box_get_mem_handle(
8216 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8217 vf->mem_head_handle = NULL;
8218 /*vf->mem_head_handle =
8219 decoder_bmmu_box_get_mem_handle(
8220 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8221 }
8222 return;
8223}
8224
8225static void fill_frame_info(struct hevc_state_s *hevc,
8226 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8227{
8228 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8229 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8230 vframe_qos->type = 4;
8231 else if (pic->slice_type == I_SLICE)
8232 vframe_qos->type = 1;
8233 else if (pic->slice_type == P_SLICE)
8234 vframe_qos->type = 2;
8235 else if (pic->slice_type == B_SLICE)
8236 vframe_qos->type = 3;
8237/*
8238#define SHOW_QOS_INFO
8239*/
8240 vframe_qos->size = framesize;
8241 vframe_qos->pts = pts;
8242#ifdef SHOW_QOS_INFO
8243 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8244#endif
8245
8246
8247 vframe_qos->max_mv = pic->max_mv;
8248 vframe_qos->avg_mv = pic->avg_mv;
8249 vframe_qos->min_mv = pic->min_mv;
8250#ifdef SHOW_QOS_INFO
8251 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8252 vframe_qos->max_mv,
8253 vframe_qos->avg_mv,
8254 vframe_qos->min_mv);
8255#endif
8256
8257 vframe_qos->max_qp = pic->max_qp;
8258 vframe_qos->avg_qp = pic->avg_qp;
8259 vframe_qos->min_qp = pic->min_qp;
8260#ifdef SHOW_QOS_INFO
8261 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8262 vframe_qos->max_qp,
8263 vframe_qos->avg_qp,
8264 vframe_qos->min_qp);
8265#endif
8266
8267 vframe_qos->max_skip = pic->max_skip;
8268 vframe_qos->avg_skip = pic->avg_skip;
8269 vframe_qos->min_skip = pic->min_skip;
8270#ifdef SHOW_QOS_INFO
8271 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8272 vframe_qos->max_skip,
8273 vframe_qos->avg_skip,
8274 vframe_qos->min_skip);
8275#endif
8276
8277 vframe_qos->num++;
8278
8279 if (hevc->frameinfo_enable)
8280 vdec_fill_frame_info(vframe_qos, 1);
8281}
8282
8283static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8284{
8285#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8286 struct vdec_s *vdec = hw_to_vdec(hevc);
8287#endif
8288 struct vframe_s *vf = NULL;
8289 int stream_offset = pic->stream_offset;
8290 unsigned short slice_type = pic->slice_type;
8291 u32 frame_size;
8292
8293 if (force_disp_pic_index & 0x100) {
8294 /*recycle directly*/
8295 pic->output_ready = 0;
8296 return -1;
8297 }
8298 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8299 hevc_print(hevc, 0,
8300 "fatal error, no available buffer slot.");
8301 return -1;
8302 }
8303 display_frame_count[hevc->index]++;
8304 if (vf) {
8305 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8306 "%s: pic index 0x%x\n",
8307 __func__, pic->index);*/
8308
8309 if (hevc->is_used_v4l) {
8310 vf->v4l_mem_handle
8311 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8312 if (hevc->mmu_enable) {
8313 if (vdec_v4l_binding_fd_and_vf(vf->v4l_mem_handle, vf) < 0) {
8314 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8315 "v4l: binding vf fail.\n");
8316 return -1;
8317 }
8318 }
8319 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8320 "[%d] %s(), v4l mem handle: 0x%lx\n",
8321 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id,
8322 __func__, vf->v4l_mem_handle);
8323 }
8324
8325#ifdef MULTI_INSTANCE_SUPPORT
8326 if (vdec_frame_based(hw_to_vdec(hevc))) {
8327 vf->pts = pic->pts;
8328 vf->pts_us64 = pic->pts64;
8329 vf->timestamp = pic->timestamp;
8330 }
8331 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8332 stream_offset, &vf->pts, 0) != 0) { */
8333#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8334 else if (vdec->master == NULL) {
8335#else
8336 else {
8337#endif
8338#endif
8339 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8340 "call pts_lookup_offset_us64(0x%x)\n",
8341 stream_offset);
8342 if (pts_lookup_offset_us64
8343 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8344 &frame_size, 0,
8345 &vf->pts_us64) != 0) {
8346#ifdef DEBUG_PTS
8347 hevc->pts_missed++;
8348#endif
8349 vf->pts = 0;
8350 vf->pts_us64 = 0;
8351 }
8352#ifdef DEBUG_PTS
8353 else
8354 hevc->pts_hit++;
8355#endif
8356#ifdef MULTI_INSTANCE_SUPPORT
8357#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8358 } else {
8359 vf->pts = 0;
8360 vf->pts_us64 = 0;
8361 }
8362#else
8363 }
8364#endif
8365#endif
8366 if (pts_unstable && (hevc->frame_dur > 0))
8367 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8368
8369 fill_frame_info(hevc, pic, frame_size, vf->pts);
8370
8371 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8372 && hevc->get_frame_dur) {
8373 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8374
8375 if (pts_diff < 0) {
8376 hevc->pts_mode_switching_count++;
8377 hevc->pts_mode_recovery_count = 0;
8378
8379 if (hevc->pts_mode_switching_count >=
8380 PTS_MODE_SWITCHING_THRESHOLD) {
8381 hevc->pts_mode =
8382 PTS_NONE_REF_USE_DURATION;
8383 hevc_print(hevc, 0,
8384 "HEVC: switch to n_d mode.\n");
8385 }
8386
8387 } else {
8388 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8389
8390 hevc->pts_mode_recovery_count++;
8391 if (hevc->pts_mode_recovery_count > p) {
8392 hevc->pts_mode_switching_count = 0;
8393 hevc->pts_mode_recovery_count = 0;
8394 }
8395 }
8396 }
8397
8398 if (vf->pts != 0)
8399 hevc->last_lookup_pts = vf->pts;
8400
8401 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8402 && (slice_type != 2))
8403 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8404 hevc->last_pts = vf->pts;
8405
8406 if (vf->pts_us64 != 0)
8407 hevc->last_lookup_pts_us64 = vf->pts_us64;
8408
8409 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8410 && (slice_type != 2)) {
8411 vf->pts_us64 =
8412 hevc->last_pts_us64 +
8413 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8414 }
8415 hevc->last_pts_us64 = vf->pts_us64;
8416 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8417 hevc_print(hevc, 0,
8418 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8419 vf->pts, vf->pts_us64);
8420 }
8421
8422 /*
8423 *vf->index:
8424 *(1) vf->type is VIDTYPE_PROGRESSIVE
8425 * and vf->canvas0Addr != vf->canvas1Addr,
8426 * vf->index[7:0] is the index of top pic
8427 * vf->index[15:8] is the index of bot pic
8428 *(2) other cases,
8429 * only vf->index[7:0] is used
8430 * vf->index[15:8] == 0xff
8431 */
8432 vf->index = 0xff00 | pic->index;
8433#if 1
8434/*SUPPORT_10BIT*/
8435 if (pic->double_write_mode & 0x10) {
8436 /* double write only */
8437 vf->compBodyAddr = 0;
8438 vf->compHeadAddr = 0;
8439 } else {
8440
8441 if (hevc->mmu_enable) {
8442 vf->compBodyAddr = 0;
8443 vf->compHeadAddr = pic->header_adr;
8444 } else {
8445 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8446 vf->compHeadAddr = pic->mc_y_adr +
8447 pic->losless_comp_body_size;
8448 vf->mem_head_handle = NULL;
8449 }
8450
8451 /*head adr*/
8452 vf->canvas0Addr = vf->canvas1Addr = 0;
8453 }
8454 if (pic->double_write_mode) {
8455 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8456 vf->type |= VIDTYPE_VIU_NV21;
8457 if ((pic->double_write_mode == 3) &&
8458 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8459 vf->type |= VIDTYPE_COMPRESS;
8460 if (hevc->mmu_enable)
8461 vf->type |= VIDTYPE_SCATTER;
8462 }
8463#ifdef MULTI_INSTANCE_SUPPORT
8464 if (hevc->m_ins_flag &&
8465 (get_dbg_flag(hevc)
8466 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8467 vf->canvas0Addr = vf->canvas1Addr = -1;
8468 vf->plane_num = 2;
8469 vf->canvas0_config[0] =
8470 pic->canvas_config[0];
8471 vf->canvas0_config[1] =
8472 pic->canvas_config[1];
8473
8474 vf->canvas1_config[0] =
8475 pic->canvas_config[0];
8476 vf->canvas1_config[1] =
8477 pic->canvas_config[1];
8478
8479 } else
8480#endif
8481 vf->canvas0Addr = vf->canvas1Addr
8482 = spec2canvas(pic);
8483 } else {
8484 vf->canvas0Addr = vf->canvas1Addr = 0;
8485 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8486 if (hevc->mmu_enable)
8487 vf->type |= VIDTYPE_SCATTER;
8488 }
8489 vf->compWidth = pic->width;
8490 vf->compHeight = pic->height;
8491 update_vf_memhandle(hevc, vf, pic);
8492 switch (pic->bit_depth_luma) {
8493 case 9:
8494 vf->bitdepth = BITDEPTH_Y9;
8495 break;
8496 case 10:
8497 vf->bitdepth = BITDEPTH_Y10;
8498 break;
8499 default:
8500 vf->bitdepth = BITDEPTH_Y8;
8501 break;
8502 }
8503 switch (pic->bit_depth_chroma) {
8504 case 9:
8505 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8506 break;
8507 case 10:
8508 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8509 break;
8510 default:
8511 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8512 break;
8513 }
8514 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8515 vf->bitdepth =
8516 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8517 if (pic->mem_saving_mode == 1)
8518 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8519#else
8520 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8521 vf->type |= VIDTYPE_VIU_NV21;
8522 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8523#endif
8524 set_frame_info(hevc, vf, pic);
8525 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8526 /* hevc_print(hevc, 0,
8527 "aaa: %d/%d, %d/%d\n",
8528 vf->width,vf->height, pic->width, pic->height); */
8529 vf->width = pic->width;
8530 vf->height = pic->height;
8531
8532 if (force_w_h != 0) {
8533 vf->width = (force_w_h >> 16) & 0xffff;
8534 vf->height = force_w_h & 0xffff;
8535 }
8536 if (force_fps & 0x100) {
8537 u32 rate = force_fps & 0xff;
8538
8539 if (rate)
8540 vf->duration = 96000/rate;
8541 else
8542 vf->duration = 0;
8543 }
8544 if (force_fps & 0x200) {
8545 vf->pts = 0;
8546 vf->pts_us64 = 0;
8547 }
8548 /*
8549 * !!! to do ...
8550 * need move below code to get_new_pic(),
8551 * hevc->xxx can only be used by current decoded pic
8552 */
8553 if (pic->conformance_window_flag &&
8554 (get_dbg_flag(hevc) &
8555 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8556 unsigned int SubWidthC, SubHeightC;
8557
8558 switch (pic->chroma_format_idc) {
8559 case 1:
8560 SubWidthC = 2;
8561 SubHeightC = 2;
8562 break;
8563 case 2:
8564 SubWidthC = 2;
8565 SubHeightC = 1;
8566 break;
8567 default:
8568 SubWidthC = 1;
8569 SubHeightC = 1;
8570 break;
8571 }
8572 vf->width -= SubWidthC *
8573 (pic->conf_win_left_offset +
8574 pic->conf_win_right_offset);
8575 vf->height -= SubHeightC *
8576 (pic->conf_win_top_offset +
8577 pic->conf_win_bottom_offset);
8578
8579 vf->compWidth -= SubWidthC *
8580 (pic->conf_win_left_offset +
8581 pic->conf_win_right_offset);
8582 vf->compHeight -= SubHeightC *
8583 (pic->conf_win_top_offset +
8584 pic->conf_win_bottom_offset);
8585
8586 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8587 hevc_print(hevc, 0,
8588 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8589 pic->chroma_format_idc,
8590 pic->conf_win_left_offset,
8591 pic->conf_win_right_offset,
8592 pic->conf_win_top_offset,
8593 pic->conf_win_bottom_offset,
8594 vf->width, vf->height, vf->compWidth, vf->compHeight);
8595 }
8596
8597 vf->width = vf->width /
8598 get_double_write_ratio(hevc, pic->double_write_mode);
8599 vf->height = vf->height /
8600 get_double_write_ratio(hevc, pic->double_write_mode);
8601#ifdef HEVC_PIC_STRUCT_SUPPORT
8602 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8603 struct vframe_s *vf2;
8604
8605 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8606 hevc_print(hevc, 0,
8607 "pic_struct = %d index 0x%x\n",
8608 pic->pic_struct,
8609 pic->index);
8610
8611 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8612 hevc_print(hevc, 0,
8613 "fatal error, no available buffer slot.");
8614 return -1;
8615 }
8616 pic->vf_ref = 2;
8617 vf->duration = vf->duration>>1;
8618 memcpy(vf2, vf, sizeof(struct vframe_s));
8619
8620 if (pic->pic_struct == 3) {
8621 vf->type = VIDTYPE_INTERLACE_TOP
8622 | VIDTYPE_VIU_NV21;
8623 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8624 | VIDTYPE_VIU_NV21;
8625 } else {
8626 vf->type = VIDTYPE_INTERLACE_BOTTOM
8627 | VIDTYPE_VIU_NV21;
8628 vf2->type = VIDTYPE_INTERLACE_TOP
8629 | VIDTYPE_VIU_NV21;
8630 }
8631 hevc->vf_pre_count++;
8632 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8633 kfifo_put(&hevc->display_q,
8634 (const struct vframe_s *)vf);
8635 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8636 hevc->vf_pre_count++;
8637 kfifo_put(&hevc->display_q,
8638 (const struct vframe_s *)vf2);
8639 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8640 } else if (pic->pic_struct == 5
8641 || pic->pic_struct == 6) {
8642 struct vframe_s *vf2, *vf3;
8643
8644 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8645 hevc_print(hevc, 0,
8646 "pic_struct = %d index 0x%x\n",
8647 pic->pic_struct,
8648 pic->index);
8649
8650 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8651 hevc_print(hevc, 0,
8652 "fatal error, no available buffer slot.");
8653 return -1;
8654 }
8655 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8656 hevc_print(hevc, 0,
8657 "fatal error, no available buffer slot.");
8658 return -1;
8659 }
8660 pic->vf_ref = 3;
8661 vf->duration = vf->duration/3;
8662 memcpy(vf2, vf, sizeof(struct vframe_s));
8663 memcpy(vf3, vf, sizeof(struct vframe_s));
8664
8665 if (pic->pic_struct == 5) {
8666 vf->type = VIDTYPE_INTERLACE_TOP
8667 | VIDTYPE_VIU_NV21;
8668 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8669 | VIDTYPE_VIU_NV21;
8670 vf3->type = VIDTYPE_INTERLACE_TOP
8671 | VIDTYPE_VIU_NV21;
8672 } else {
8673 vf->type = VIDTYPE_INTERLACE_BOTTOM
8674 | VIDTYPE_VIU_NV21;
8675 vf2->type = VIDTYPE_INTERLACE_TOP
8676 | VIDTYPE_VIU_NV21;
8677 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8678 | VIDTYPE_VIU_NV21;
8679 }
8680 hevc->vf_pre_count++;
8681 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8682 kfifo_put(&hevc->display_q,
8683 (const struct vframe_s *)vf);
8684 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8685 hevc->vf_pre_count++;
8686 kfifo_put(&hevc->display_q,
8687 (const struct vframe_s *)vf2);
8688 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8689 hevc->vf_pre_count++;
8690 kfifo_put(&hevc->display_q,
8691 (const struct vframe_s *)vf3);
8692 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8693
8694 } else if (pic->pic_struct == 9
8695 || pic->pic_struct == 10) {
8696 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8697 hevc_print(hevc, 0,
8698 "pic_struct = %d index 0x%x\n",
8699 pic->pic_struct,
8700 pic->index);
8701
8702 pic->vf_ref = 1;
8703 /* process previous pending vf*/
8704 process_pending_vframe(hevc,
8705 pic, (pic->pic_struct == 9));
8706
8707 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8708 /* process current vf */
8709 kfifo_put(&hevc->pending_q,
8710 (const struct vframe_s *)vf);
8711 vf->height <<= 1;
8712 if (pic->pic_struct == 9) {
8713 vf->type = VIDTYPE_INTERLACE_TOP
8714 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8715 process_pending_vframe(hevc,
8716 hevc->pre_bot_pic, 0);
8717 } else {
8718 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8719 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8720 vf->index = (pic->index << 8) | 0xff;
8721 process_pending_vframe(hevc,
8722 hevc->pre_top_pic, 1);
8723 }
8724
8725 if (hevc->vf_pre_count == 0)
8726 hevc->vf_pre_count++;
8727
8728 /**/
8729 if (pic->pic_struct == 9)
8730 hevc->pre_top_pic = pic;
8731 else
8732 hevc->pre_bot_pic = pic;
8733
8734 } else if (pic->pic_struct == 11
8735 || pic->pic_struct == 12) {
8736 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8737 hevc_print(hevc, 0,
8738 "pic_struct = %d index 0x%x\n",
8739 pic->pic_struct,
8740 pic->index);
8741 pic->vf_ref = 1;
8742 /* process previous pending vf*/
8743 process_pending_vframe(hevc, pic,
8744 (pic->pic_struct == 11));
8745
8746 /* put current into pending q */
8747 vf->height <<= 1;
8748 if (pic->pic_struct == 11)
8749 vf->type = VIDTYPE_INTERLACE_TOP |
8750 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8751 else {
8752 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8753 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8754 vf->index = (pic->index << 8) | 0xff;
8755 }
8756 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8757 kfifo_put(&hevc->pending_q,
8758 (const struct vframe_s *)vf);
8759 if (hevc->vf_pre_count == 0)
8760 hevc->vf_pre_count++;
8761
8762 /**/
8763 if (pic->pic_struct == 11)
8764 hevc->pre_top_pic = pic;
8765 else
8766 hevc->pre_bot_pic = pic;
8767
8768 } else {
8769 pic->vf_ref = 1;
8770
8771 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8772 hevc_print(hevc, 0,
8773 "pic_struct = %d index 0x%x\n",
8774 pic->pic_struct,
8775 pic->index);
8776
8777 switch (pic->pic_struct) {
8778 case 7:
8779 vf->duration <<= 1;
8780 break;
8781 case 8:
8782 vf->duration = vf->duration * 3;
8783 break;
8784 case 1:
8785 vf->height <<= 1;
8786 vf->type = VIDTYPE_INTERLACE_TOP |
8787 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8788 process_pending_vframe(hevc, pic, 1);
8789 hevc->pre_top_pic = pic;
8790 break;
8791 case 2:
8792 vf->height <<= 1;
8793 vf->type = VIDTYPE_INTERLACE_BOTTOM
8794 | VIDTYPE_VIU_NV21
8795 | VIDTYPE_VIU_FIELD;
8796 process_pending_vframe(hevc, pic, 0);
8797 hevc->pre_bot_pic = pic;
8798 break;
8799 }
8800 hevc->vf_pre_count++;
8801 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8802 kfifo_put(&hevc->display_q,
8803 (const struct vframe_s *)vf);
8804 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8805 }
8806#else
8807 vf->type_original = vf->type;
8808 pic->vf_ref = 1;
8809 hevc->vf_pre_count++;
8810 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8811 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8812 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8813
8814 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8815 hevc_print(hevc, 0,
8816 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8817 __func__, vf->type, vf->index,
8818 get_pic_poc(hevc, vf->index & 0xff),
8819 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8820 vf->pts, vf->pts_us64,
8821 vf->duration);
8822#endif
8823#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8824 /*count info*/
8825 vdec_count_info(gvs, 0, stream_offset);
8826#endif
8827 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8828 if (without_display_mode == 0) {
8829 vf_notify_receiver(hevc->provider_name,
8830 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8831 }
8832 else
8833 vh265_vf_put(vh265_vf_get(vdec), vdec);
8834 }
8835
8836 return 0;
8837}
8838
8839static int notify_v4l_eos(struct vdec_s *vdec)
8840{
8841 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
8842 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
8843 struct vframe_s *vf = NULL;
8844 struct vdec_v4l2_buffer *fb = NULL;
8845
8846 if (hw->is_used_v4l && hw->eos) {
8847 if (kfifo_get(&hw->newframe_q, &vf) == 0 || vf == NULL) {
8848 hevc_print(hw, 0,
8849 "%s fatal error, no available buffer slot.\n",
8850 __func__);
8851 return -1;
8852 }
8853
8854 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb)) {
8855 pr_err("[%d] get fb fail.\n", ctx->id);
8856 return -1;
8857 }
8858
8859 vf->type |= VIDTYPE_V4L_EOS;
8860 vf->timestamp = ULONG_MAX;
8861 vf->v4l_mem_handle = (unsigned long)fb;
8862 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
8863
8864 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
8865 vf_notify_receiver(vdec->vf_provider_name,
8866 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8867
8868 pr_info("[%d] H265 EOS notify.\n", ctx->id);
8869 }
8870
8871 return 0;
8872}
8873
8874static void process_nal_sei(struct hevc_state_s *hevc,
8875 int payload_type, int payload_size)
8876{
8877 unsigned short data;
8878
8879 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8880 hevc_print(hevc, 0,
8881 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
8882 payload_type, payload_size);
8883
8884 if (payload_type == 137) {
8885 int i, j;
8886 /* MASTERING_DISPLAY_COLOUR_VOLUME */
8887 if (payload_size >= 24) {
8888 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8889 hevc_print(hevc, 0,
8890 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
8891 for (i = 0; i < 3; i++) {
8892 for (j = 0; j < 2; j++) {
8893 data =
8894 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8895 hevc->primaries[i][j] = data;
8896 WRITE_HREG(HEVC_SHIFT_COMMAND,
8897 (1<<7)|16);
8898 if (get_dbg_flag(hevc) &
8899 H265_DEBUG_PRINT_SEI)
8900 hevc_print(hevc, 0,
8901 "\t\tprimaries[%1d][%1d] = %04x\n",
8902 i, j, hevc->primaries[i][j]);
8903 }
8904 }
8905 for (i = 0; i < 2; i++) {
8906 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8907 hevc->white_point[i] = data;
8908 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
8909 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8910 hevc_print(hevc, 0,
8911 "\t\twhite_point[%1d] = %04x\n",
8912 i, hevc->white_point[i]);
8913 }
8914 for (i = 0; i < 2; i++) {
8915 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8916 hevc->luminance[i] = data << 16;
8917 WRITE_HREG(HEVC_SHIFT_COMMAND,
8918 (1<<7)|16);
8919 data =
8920 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8921 hevc->luminance[i] |= data;
8922 WRITE_HREG(HEVC_SHIFT_COMMAND,
8923 (1<<7)|16);
8924 if (get_dbg_flag(hevc) &
8925 H265_DEBUG_PRINT_SEI)
8926 hevc_print(hevc, 0,
8927 "\t\tluminance[%1d] = %08x\n",
8928 i, hevc->luminance[i]);
8929 }
8930 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
8931 }
8932 payload_size -= 24;
8933 while (payload_size > 0) {
8934 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
8935 payload_size--;
8936 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
8937 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
8938 }
8939 }
8940}
8941
8942static int hevc_recover(struct hevc_state_s *hevc)
8943{
8944 int ret = -1;
8945 u32 rem;
8946 u64 shift_byte_count64;
8947 unsigned int hevc_shift_byte_count;
8948 unsigned int hevc_stream_start_addr;
8949 unsigned int hevc_stream_end_addr;
8950 unsigned int hevc_stream_rd_ptr;
8951 unsigned int hevc_stream_wr_ptr;
8952 unsigned int hevc_stream_control;
8953 unsigned int hevc_stream_fifo_ctl;
8954 unsigned int hevc_stream_buf_size;
8955
8956 mutex_lock(&vh265_mutex);
8957#if 0
8958 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
8959 int ii;
8960
8961 for (ii = 0; ii < 4; ii++)
8962 hevc_print(hevc, 0,
8963 "%04x ", hevc->debug_ptr[i + 3 - ii]);
8964 if (((i + ii) & 0xf) == 0)
8965 hevc_print(hevc, 0, "\n");
8966 }
8967#endif
8968#define ES_VID_MAN_RD_PTR (1<<0)
8969 if (!hevc->init_flag) {
8970 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
8971 mutex_unlock(&vh265_mutex);
8972 return ret;
8973 }
8974 amhevc_stop();
8975 msleep(20);
8976 ret = 0;
8977 /* reset */
8978 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
8979 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
8980
8981 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
8982 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
8983 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
8984 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
8985 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
8986 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
8987 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
8988
8989 /* HEVC streaming buffer will reset and restart
8990 * from current hevc_stream_rd_ptr position
8991 */
8992 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
8993 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
8994 if ((hevc->shift_byte_count_lo & (1 << 31))
8995 && ((hevc_shift_byte_count & (1 << 31)) == 0))
8996 hevc->shift_byte_count_hi++;
8997
8998 hevc->shift_byte_count_lo = hevc_shift_byte_count;
8999 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9000 hevc->shift_byte_count_lo;
9001 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9002 shift_byte_count64 -= rem;
9003 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9004
9005 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9006 shift_byte_count64 += hevc_stream_buf_size;
9007
9008 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9009 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9010
9011 WRITE_VREG(DOS_SW_RESET3,
9012 /* (1<<2)| */
9013 (1 << 3) | (1 << 4) | (1 << 8) |
9014 (1 << 11) | (1 << 12) | (1 << 14)
9015 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9016 WRITE_VREG(DOS_SW_RESET3, 0);
9017
9018 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9019 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9020 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9021 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9022 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9023 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9024 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9025
9026 hevc_config_work_space_hw(hevc);
9027 decoder_hw_reset();
9028
9029 hevc->have_vps = 0;
9030 hevc->have_sps = 0;
9031 hevc->have_pps = 0;
9032
9033 hevc->have_valid_start_slice = 0;
9034
9035 if (get_double_write_mode(hevc) & 0x10)
9036 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9037 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9038 );
9039
9040 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9041 /* clear mailbox interrupt */
9042 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9043 /* enable mailbox interrupt */
9044 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9045 /* disable PSCALE for hardware sharing */
9046 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9047
9048 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9049
9050 WRITE_VREG(DEBUG_REG1, 0x0);
9051
9052 if ((error_handle_policy & 1) == 0) {
9053 if ((error_handle_policy & 4) == 0) {
9054 /* ucode auto mode, and do not check vps/sps/pps/idr */
9055 WRITE_VREG(NAL_SEARCH_CTL,
9056 0xc);
9057 } else {
9058 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9059 }
9060 } else {
9061 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9062 }
9063
9064 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9065 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9066 WRITE_VREG(NAL_SEARCH_CTL,
9067 READ_VREG(NAL_SEARCH_CTL)
9068 | ((parser_sei_enable & 0x7) << 17));
9069#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9070 WRITE_VREG(NAL_SEARCH_CTL,
9071 READ_VREG(NAL_SEARCH_CTL) |
9072 ((parser_dolby_vision_enable & 0x1) << 20));
9073#endif
9074 config_decode_mode(hevc);
9075 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9076
9077 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9078 /* amhevc_disable(); */
9079 /* return -EBUSY; */
9080 /* } */
9081#if 0
9082 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9083 int ii;
9084
9085 for (ii = 0; ii < 4; ii++) {
9086 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9087 hevc_print(hevc, 0,
9088 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9089 }
9090 if (((i + ii) & 0xf) == 0)
9091 hevc_print(hevc, 0, "\n");
9092 }
9093#endif
9094 init_pic_list_hw(hevc);
9095
9096 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9097 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9098
9099#ifdef SWAP_HEVC_UCODE
9100 if (!tee_enabled() && hevc->is_swap &&
9101 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9102 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9103 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9104 }
9105#endif
9106 amhevc_start();
9107
9108 /* skip, search next start code */
9109 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9110 hevc->skip_flag = 1;
9111#ifdef ERROR_HANDLE_DEBUG
9112 if (dbg_nal_skip_count & 0x20000) {
9113 dbg_nal_skip_count &= ~0x20000;
9114 mutex_unlock(&vh265_mutex);
9115 return ret;
9116 }
9117#endif
9118 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9119 /* Interrupt Amrisc to excute */
9120 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9121#ifdef MULTI_INSTANCE_SUPPORT
9122 if (!hevc->m_ins_flag)
9123#endif
9124 hevc->first_pic_after_recover = 1;
9125 mutex_unlock(&vh265_mutex);
9126 return ret;
9127}
9128
9129static void dump_aux_buf(struct hevc_state_s *hevc)
9130{
9131 int i;
9132 unsigned short *aux_adr =
9133 (unsigned short *)
9134 hevc->aux_addr;
9135 unsigned int aux_size =
9136 (READ_VREG(HEVC_AUX_DATA_SIZE)
9137 >> 16) << 4;
9138
9139 if (hevc->prefix_aux_size > 0) {
9140 hevc_print(hevc, 0,
9141 "prefix aux: (size %d)\n",
9142 aux_size);
9143 for (i = 0; i <
9144 (aux_size >> 1); i++) {
9145 hevc_print_cont(hevc, 0,
9146 "%04x ",
9147 *(aux_adr + i));
9148 if (((i + 1) & 0xf)
9149 == 0)
9150 hevc_print_cont(hevc,
9151 0, "\n");
9152 }
9153 }
9154 if (hevc->suffix_aux_size > 0) {
9155 aux_adr = (unsigned short *)
9156 (hevc->aux_addr +
9157 hevc->prefix_aux_size);
9158 aux_size =
9159 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9160 << 4;
9161 hevc_print(hevc, 0,
9162 "suffix aux: (size %d)\n",
9163 aux_size);
9164 for (i = 0; i <
9165 (aux_size >> 1); i++) {
9166 hevc_print_cont(hevc, 0,
9167 "%04x ", *(aux_adr + i));
9168 if (((i + 1) & 0xf) == 0)
9169 hevc_print_cont(hevc, 0, "\n");
9170 }
9171 }
9172}
9173
9174#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9175static void dolby_get_meta(struct hevc_state_s *hevc)
9176{
9177 struct vdec_s *vdec = hw_to_vdec(hevc);
9178
9179 if (get_dbg_flag(hevc) &
9180 H265_DEBUG_BUFMGR_MORE)
9181 dump_aux_buf(hevc);
9182 if (vdec->dolby_meta_with_el || vdec->slave) {
9183 set_aux_data(hevc,
9184 hevc->cur_pic, 0, 0);
9185 } else if (vdec->master) {
9186 struct hevc_state_s *hevc_ba =
9187 (struct hevc_state_s *)
9188 vdec->master->private;
9189 /*do not use hevc_ba*/
9190 set_aux_data(hevc,
9191 hevc_ba->cur_pic,
9192 0, 1);
9193 set_aux_data(hevc,
9194 hevc->cur_pic, 0, 2);
9195 }
9196}
9197#endif
9198
9199static void read_decode_info(struct hevc_state_s *hevc)
9200{
9201 uint32_t decode_info =
9202 READ_HREG(HEVC_DECODE_INFO);
9203 hevc->start_decoding_flag |=
9204 (decode_info & 0xff);
9205 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9206}
9207
9208static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9209{
9210 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9211 unsigned int dec_status = hevc->dec_status;
9212 int i, ret;
9213
9214#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9215 struct vdec_s *vdec = hw_to_vdec(hevc);
9216#endif
9217
9218 if (hevc->eos)
9219 return IRQ_HANDLED;
9220 if (
9221#ifdef MULTI_INSTANCE_SUPPORT
9222 (!hevc->m_ins_flag) &&
9223#endif
9224 hevc->error_flag == 1) {
9225 if ((error_handle_policy & 0x10) == 0) {
9226 if (hevc->cur_pic) {
9227 int current_lcu_idx =
9228 READ_VREG(HEVC_PARSER_LCU_START)
9229 & 0xffffff;
9230 if (current_lcu_idx <
9231 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9232 hevc->cur_pic->error_mark = 1;
9233
9234 }
9235 }
9236 if ((error_handle_policy & 1) == 0) {
9237 hevc->error_skip_nal_count = 1;
9238 /* manual search nal, skip error_skip_nal_count
9239 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9240 */
9241 WRITE_VREG(NAL_SEARCH_CTL,
9242 (error_skip_nal_count << 4) | 0x1);
9243 } else {
9244 hevc->error_skip_nal_count = error_skip_nal_count;
9245 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9246 }
9247 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9248#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9249 || vdec->master
9250 || vdec->slave
9251#endif
9252 ) {
9253 WRITE_VREG(NAL_SEARCH_CTL,
9254 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9255 }
9256 WRITE_VREG(NAL_SEARCH_CTL,
9257 READ_VREG(NAL_SEARCH_CTL)
9258 | ((parser_sei_enable & 0x7) << 17));
9259#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9260 WRITE_VREG(NAL_SEARCH_CTL,
9261 READ_VREG(NAL_SEARCH_CTL) |
9262 ((parser_dolby_vision_enable & 0x1) << 20));
9263#endif
9264 config_decode_mode(hevc);
9265 /* search new nal */
9266 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9267 /* Interrupt Amrisc to excute */
9268 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9269
9270 /* hevc_print(hevc, 0,
9271 *"%s: error handle\n", __func__);
9272 */
9273 hevc->error_flag = 2;
9274 return IRQ_HANDLED;
9275 } else if (
9276#ifdef MULTI_INSTANCE_SUPPORT
9277 (!hevc->m_ins_flag) &&
9278#endif
9279 hevc->error_flag == 3) {
9280 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9281 hevc_recover(hevc);
9282 hevc->error_flag = 0;
9283
9284 if ((error_handle_policy & 0x10) == 0) {
9285 if (hevc->cur_pic) {
9286 int current_lcu_idx =
9287 READ_VREG(HEVC_PARSER_LCU_START)
9288 & 0xffffff;
9289 if (current_lcu_idx <
9290 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9291 hevc->cur_pic->error_mark = 1;
9292
9293 }
9294 }
9295 if ((error_handle_policy & 1) == 0) {
9296 /* need skip some data when
9297 * error_flag of 3 is triggered,
9298 */
9299 /* to avoid hevc_recover() being called
9300 * for many times at the same bitstream position
9301 */
9302 hevc->error_skip_nal_count = 1;
9303 /* manual search nal, skip error_skip_nal_count
9304 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9305 */
9306 WRITE_VREG(NAL_SEARCH_CTL,
9307 (error_skip_nal_count << 4) | 0x1);
9308 }
9309
9310 if ((error_handle_policy & 0x2) == 0) {
9311 hevc->have_vps = 1;
9312 hevc->have_sps = 1;
9313 hevc->have_pps = 1;
9314 }
9315 return IRQ_HANDLED;
9316 }
9317 if (!hevc->m_ins_flag) {
9318 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9319 if ((hevc->shift_byte_count_lo & (1 << 31))
9320 && ((i & (1 << 31)) == 0))
9321 hevc->shift_byte_count_hi++;
9322 hevc->shift_byte_count_lo = i;
9323 }
9324#ifdef MULTI_INSTANCE_SUPPORT
9325 mutex_lock(&hevc->chunks_mutex);
9326 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9327 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9328 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9329 && (hevc->chunk)) {
9330 hevc->cur_pic->pts = hevc->chunk->pts;
9331 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9332 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9333 }
9334 mutex_unlock(&hevc->chunks_mutex);
9335
9336 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9337 dec_status == HEVC_DECODE_BUFEMPTY2) {
9338 if (hevc->m_ins_flag) {
9339 read_decode_info(hevc);
9340 if (vdec_frame_based(hw_to_vdec(hevc))) {
9341 hevc->empty_flag = 1;
9342 goto pic_done;
9343 } else {
9344 if (
9345#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9346 vdec->master ||
9347 vdec->slave ||
9348#endif
9349 (data_resend_policy & 0x1)) {
9350 hevc->dec_result = DEC_RESULT_AGAIN;
9351 amhevc_stop();
9352 restore_decode_state(hevc);
9353 } else
9354 hevc->dec_result = DEC_RESULT_GET_DATA;
9355 }
9356 reset_process_time(hevc);
9357 vdec_schedule_work(&hevc->work);
9358 }
9359 return IRQ_HANDLED;
9360 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9361 (dec_status == HEVC_NAL_DECODE_DONE)
9362 ) {
9363 if (hevc->m_ins_flag) {
9364 read_decode_info(hevc);
9365 if (vdec_frame_based(hw_to_vdec(hevc))) {
9366 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9367 hevc->empty_flag = 1;
9368 goto pic_done;
9369 } else {
9370 hevc->dec_result = DEC_RESULT_AGAIN;
9371 amhevc_stop();
9372 restore_decode_state(hevc);
9373 }
9374
9375 reset_process_time(hevc);
9376 vdec_schedule_work(&hevc->work);
9377 }
9378
9379 return IRQ_HANDLED;
9380 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9381 if (hevc->m_ins_flag) {
9382 struct PIC_s *pic;
9383 struct PIC_s *pic_display;
9384 int decoded_poc;
9385#ifdef DETREFILL_ENABLE
9386 if (hevc->is_swap &&
9387 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9388 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9389 && READ_VREG(HEVC_SAO_DBG_MODE0))
9390 hevc->delrefill_check = 2;
9391 }
9392#endif
9393 hevc->empty_flag = 0;
9394pic_done:
9395 if (input_frame_based(hw_to_vdec(hevc)) &&
9396 frmbase_cont_bitlevel != 0 &&
9397 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9398 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9399 > frmbase_cont_bitlevel)) {
9400 /*handle the case: multi pictures in one packet*/
9401 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9402 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9403 __func__,
9404 hevc->decode_idx, hevc->decode_size,
9405 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9406 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9407 start_process_time(hevc);
9408 return IRQ_HANDLED;
9409 }
9410
9411 read_decode_info(hevc);
9412 get_picture_qos_info(hevc);
9413#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9414 hevc->start_parser_type = 0;
9415 hevc->switch_dvlayer_flag = 0;
9416#endif
9417 hevc->decoded_poc = hevc->curr_POC;
9418 hevc->decoding_pic = NULL;
9419 hevc->dec_result = DEC_RESULT_DONE;
9420#ifdef DETREFILL_ENABLE
9421 if (hevc->is_swap &&
9422 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9423 if (hevc->delrefill_check != 2)
9424#endif
9425
9426 amhevc_stop();
9427
9428 reset_process_time(hevc);
9429
9430 if (hevc->vf_pre_count == 0) {
9431 decoded_poc = hevc->curr_POC;
9432 pic = get_pic_by_POC(hevc, decoded_poc);
9433 if (pic && (pic->POC != INVALID_POC)) {
9434 /*PB skip control */
9435 if (pic->error_mark == 0
9436 && hevc->PB_skip_mode == 1) {
9437 /* start decoding after
9438 * first I
9439 */
9440 hevc->ignore_bufmgr_error |= 0x1;
9441 }
9442 if (hevc->ignore_bufmgr_error & 1) {
9443 if (hevc->PB_skip_count_after_decoding > 0) {
9444 hevc->PB_skip_count_after_decoding--;
9445 } else {
9446 /* start displaying */
9447 hevc->ignore_bufmgr_error |= 0x2;
9448 }
9449 }
9450 if (hevc->mmu_enable
9451 && ((hevc->double_write_mode & 0x10) == 0)) {
9452 if (!hevc->m_ins_flag) {
9453 hevc->used_4k_num =
9454 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9455
9456 if ((!is_skip_decoding(hevc, pic)) &&
9457 (hevc->used_4k_num >= 0) &&
9458 (hevc->cur_pic->scatter_alloc
9459 == 1)) {
9460 hevc_print(hevc,
9461 H265_DEBUG_BUFMGR_MORE,
9462 "%s pic index %d scatter_alloc %d page_start %d\n",
9463 "decoder_mmu_box_free_idx_tail",
9464 hevc->cur_pic->index,
9465 hevc->cur_pic->scatter_alloc,
9466 hevc->used_4k_num);
9467 decoder_mmu_box_free_idx_tail(
9468 hevc->mmu_box,
9469 hevc->cur_pic->index,
9470 hevc->used_4k_num);
9471 hevc->cur_pic->scatter_alloc
9472 = 2;
9473 }
9474 hevc->used_4k_num = -1;
9475 }
9476 }
9477
9478 pic->output_mark = 1;
9479 pic->recon_mark = 1;
9480 }
9481 check_pic_decoded_error(hevc,
9482 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9483 if (hevc->cur_pic != NULL &&
9484 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0)
9485 hevc->cur_pic->error_mark = 1;
9486force_output:
9487 pic_display = output_pic(hevc, 1);
9488 if (pic_display) {
9489 if ((pic_display->error_mark &&
9490 ((hevc->ignore_bufmgr_error &
9491 0x2) == 0))
9492 || (get_dbg_flag(hevc) &
9493 H265_DEBUG_DISPLAY_CUR_FRAME)
9494 || (get_dbg_flag(hevc) &
9495 H265_DEBUG_NO_DISPLAY)) {
9496 pic_display->output_ready = 0;
9497 if (get_dbg_flag(hevc) &
9498 H265_DEBUG_BUFMGR) {
9499 hevc_print(hevc, 0,
9500 "[BM] Display: POC %d, ",
9501 pic_display->POC);
9502 hevc_print_cont(hevc, 0,
9503 "decoding index %d ==> ",
9504 pic_display->
9505 decode_idx);
9506 hevc_print_cont(hevc, 0,
9507 "Debug or err,recycle it\n");
9508 }
9509 } else {
9510 if (pic_display->
9511 slice_type != 2) {
9512 pic_display->output_ready = 0;
9513 } else {
9514 prepare_display_buf
9515 (hevc,
9516 pic_display);
9517 hevc->first_pic_flag = 1;
9518 }
9519 }
9520 }
9521 }
9522
9523 vdec_schedule_work(&hevc->work);
9524 }
9525
9526 return IRQ_HANDLED;
9527#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9528 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9529 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9530 if (hevc->m_ins_flag) {
9531 unsigned char next_parser_type =
9532 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9533 read_decode_info(hevc);
9534
9535 if (vdec->slave &&
9536 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9537 /*cur is base, found enhance*/
9538 struct hevc_state_s *hevc_el =
9539 (struct hevc_state_s *)
9540 vdec->slave->private;
9541 hevc->switch_dvlayer_flag = 1;
9542 hevc->no_switch_dvlayer_count = 0;
9543 hevc_el->start_parser_type =
9544 next_parser_type;
9545 hevc_print(hevc, H265_DEBUG_DV,
9546 "switch (poc %d) to el\n",
9547 hevc->cur_pic ?
9548 hevc->cur_pic->POC :
9549 INVALID_POC);
9550 } else if (vdec->master &&
9551 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9552 /*cur is enhance, found base*/
9553 struct hevc_state_s *hevc_ba =
9554 (struct hevc_state_s *)
9555 vdec->master->private;
9556 hevc->switch_dvlayer_flag = 1;
9557 hevc->no_switch_dvlayer_count = 0;
9558 hevc_ba->start_parser_type =
9559 next_parser_type;
9560 hevc_print(hevc, H265_DEBUG_DV,
9561 "switch (poc %d) to bl\n",
9562 hevc->cur_pic ?
9563 hevc->cur_pic->POC :
9564 INVALID_POC);
9565 } else {
9566 hevc->switch_dvlayer_flag = 0;
9567 hevc->start_parser_type =
9568 next_parser_type;
9569 hevc->no_switch_dvlayer_count++;
9570 hevc_print(hevc, H265_DEBUG_DV,
9571 "%s: no_switch_dvlayer_count = %d\n",
9572 vdec->master ? "el" : "bl",
9573 hevc->no_switch_dvlayer_count);
9574 if (vdec->slave &&
9575 dolby_el_flush_th != 0 &&
9576 hevc->no_switch_dvlayer_count >
9577 dolby_el_flush_th) {
9578 struct hevc_state_s *hevc_el =
9579 (struct hevc_state_s *)
9580 vdec->slave->private;
9581 struct PIC_s *el_pic;
9582 check_pic_decoded_error(hevc_el,
9583 hevc_el->pic_decoded_lcu_idx);
9584 el_pic = get_pic_by_POC(hevc_el,
9585 hevc_el->curr_POC);
9586 hevc_el->curr_POC = INVALID_POC;
9587 hevc_el->m_pocRandomAccess = MAX_INT;
9588 flush_output(hevc_el, el_pic);
9589 hevc_el->decoded_poc = INVALID_POC; /*
9590 already call flush_output*/
9591 hevc_el->decoding_pic = NULL;
9592 hevc->no_switch_dvlayer_count = 0;
9593 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9594 hevc_print(hevc, 0,
9595 "no el anymore, flush_output el\n");
9596 }
9597 }
9598 hevc->decoded_poc = hevc->curr_POC;
9599 hevc->decoding_pic = NULL;
9600 hevc->dec_result = DEC_RESULT_DONE;
9601 amhevc_stop();
9602 reset_process_time(hevc);
9603 if (aux_data_is_avaible(hevc))
9604 dolby_get_meta(hevc);
9605 if(hevc->cur_pic->slice_type == 2 &&
9606 hevc->vf_pre_count == 0) {
9607 hevc_print(hevc, 0,
9608 "first slice_type %x no_switch_dvlayer_count %x\n",
9609 hevc->cur_pic->slice_type,
9610 hevc->no_switch_dvlayer_count);
9611 goto force_output;
9612 }
9613 vdec_schedule_work(&hevc->work);
9614 }
9615
9616 return IRQ_HANDLED;
9617#endif
9618 }
9619
9620#endif
9621
9622 if (dec_status == HEVC_SEI_DAT) {
9623 if (!hevc->m_ins_flag) {
9624 int payload_type =
9625 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9626 int payload_size =
9627 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9628 process_nal_sei(hevc,
9629 payload_type, payload_size);
9630 }
9631 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9632 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9633 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9634 int parse_type = HEVC_DISCARD_NAL;
9635
9636 hevc->error_watchdog_count = 0;
9637 hevc->error_skip_nal_wt_cnt = 0;
9638#ifdef MULTI_INSTANCE_SUPPORT
9639 if (hevc->m_ins_flag)
9640 reset_process_time(hevc);
9641#endif
9642 if (slice_parse_begin > 0 &&
9643 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9644 hevc_print(hevc, 0,
9645 "nal type %d, discard %d\n", naltype,
9646 slice_parse_begin);
9647 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9648 slice_parse_begin--;
9649 }
9650 if (naltype == NAL_UNIT_EOS) {
9651 struct PIC_s *pic;
9652
9653 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9654#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9655 if ((vdec->master || vdec->slave) &&
9656 aux_data_is_avaible(hevc)) {
9657 if (hevc->decoding_pic)
9658 dolby_get_meta(hevc);
9659 }
9660#endif
9661 check_pic_decoded_error(hevc,
9662 hevc->pic_decoded_lcu_idx);
9663 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9664 hevc->curr_POC = INVALID_POC;
9665 /* add to fix RAP_B_Bossen_1 */
9666 hevc->m_pocRandomAccess = MAX_INT;
9667 flush_output(hevc, pic);
9668 clear_poc_flag(hevc);
9669 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9670 /* Interrupt Amrisc to excute */
9671 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9672#ifdef MULTI_INSTANCE_SUPPORT
9673 if (hevc->m_ins_flag) {
9674 hevc->decoded_poc = INVALID_POC; /*
9675 already call flush_output*/
9676 hevc->decoding_pic = NULL;
9677 hevc->dec_result = DEC_RESULT_DONE;
9678 amhevc_stop();
9679
9680 vdec_schedule_work(&hevc->work);
9681 }
9682#endif
9683 return IRQ_HANDLED;
9684 }
9685
9686 if (
9687#ifdef MULTI_INSTANCE_SUPPORT
9688 (!hevc->m_ins_flag) &&
9689#endif
9690 hevc->error_skip_nal_count > 0) {
9691 hevc_print(hevc, 0,
9692 "nal type %d, discard %d\n", naltype,
9693 hevc->error_skip_nal_count);
9694 hevc->error_skip_nal_count--;
9695 if (hevc->error_skip_nal_count == 0) {
9696 hevc_recover(hevc);
9697 hevc->error_flag = 0;
9698 if ((error_handle_policy & 0x2) == 0) {
9699 hevc->have_vps = 1;
9700 hevc->have_sps = 1;
9701 hevc->have_pps = 1;
9702 }
9703 return IRQ_HANDLED;
9704 }
9705 } else if (naltype == NAL_UNIT_VPS) {
9706 parse_type = HEVC_NAL_UNIT_VPS;
9707 hevc->have_vps = 1;
9708#ifdef ERROR_HANDLE_DEBUG
9709 if (dbg_nal_skip_flag & 1)
9710 parse_type = HEVC_DISCARD_NAL;
9711#endif
9712 } else if (hevc->have_vps) {
9713 if (naltype == NAL_UNIT_SPS) {
9714 parse_type = HEVC_NAL_UNIT_SPS;
9715 hevc->have_sps = 1;
9716#ifdef ERROR_HANDLE_DEBUG
9717 if (dbg_nal_skip_flag & 2)
9718 parse_type = HEVC_DISCARD_NAL;
9719#endif
9720 } else if (naltype == NAL_UNIT_PPS) {
9721 parse_type = HEVC_NAL_UNIT_PPS;
9722 hevc->have_pps = 1;
9723#ifdef ERROR_HANDLE_DEBUG
9724 if (dbg_nal_skip_flag & 4)
9725 parse_type = HEVC_DISCARD_NAL;
9726#endif
9727 } else if (hevc->have_sps && hevc->have_pps) {
9728 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9729
9730 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9731 (naltype ==
9732 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9733 || (naltype ==
9734 NAL_UNIT_CODED_SLICE_CRA)
9735 || (naltype ==
9736 NAL_UNIT_CODED_SLICE_BLA)
9737 || (naltype ==
9738 NAL_UNIT_CODED_SLICE_BLANT)
9739 || (naltype ==
9740 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9741 ) {
9742 if (slice_parse_begin > 0) {
9743 hevc_print(hevc, 0,
9744 "discard %d, for debugging\n",
9745 slice_parse_begin);
9746 slice_parse_begin--;
9747 } else {
9748 parse_type = seg;
9749 }
9750 hevc->have_valid_start_slice = 1;
9751 } else if (naltype <=
9752 NAL_UNIT_CODED_SLICE_CRA
9753 && (hevc->have_valid_start_slice
9754 || (hevc->PB_skip_mode != 3))) {
9755 if (slice_parse_begin > 0) {
9756 hevc_print(hevc, 0,
9757 "discard %d, dd\n",
9758 slice_parse_begin);
9759 slice_parse_begin--;
9760 } else
9761 parse_type = seg;
9762
9763 }
9764 }
9765 }
9766 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9767 && hevc->have_valid_start_slice &&
9768 hevc->error_flag == 0) {
9769 if ((get_dbg_flag(hevc) &
9770 H265_DEBUG_MAN_SEARCH_NAL) == 0
9771 /* && (!hevc->m_ins_flag)*/) {
9772 /* auot parser NAL; do not check
9773 *vps/sps/pps/idr
9774 */
9775 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9776 }
9777
9778 if ((get_dbg_flag(hevc) &
9779 H265_DEBUG_NO_EOS_SEARCH_DONE)
9780#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9781 || vdec->master
9782 || vdec->slave
9783#endif
9784 ) {
9785 WRITE_VREG(NAL_SEARCH_CTL,
9786 READ_VREG(NAL_SEARCH_CTL) |
9787 0x10000);
9788 }
9789 WRITE_VREG(NAL_SEARCH_CTL,
9790 READ_VREG(NAL_SEARCH_CTL)
9791 | ((parser_sei_enable & 0x7) << 17));
9792#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9793 WRITE_VREG(NAL_SEARCH_CTL,
9794 READ_VREG(NAL_SEARCH_CTL) |
9795 ((parser_dolby_vision_enable & 0x1) << 20));
9796#endif
9797 config_decode_mode(hevc);
9798 }
9799
9800 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9801 hevc_print(hevc, 0,
9802 "naltype = %d parse_type %d\n %d %d %d %d\n",
9803 naltype, parse_type, hevc->have_vps,
9804 hevc->have_sps, hevc->have_pps,
9805 hevc->have_valid_start_slice);
9806 }
9807
9808 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9809 /* Interrupt Amrisc to excute */
9810 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9811#ifdef MULTI_INSTANCE_SUPPORT
9812 if (hevc->m_ins_flag)
9813 start_process_time(hevc);
9814#endif
9815 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9816#ifdef MULTI_INSTANCE_SUPPORT
9817 if (hevc->m_ins_flag) {
9818 reset_process_time(hevc);
9819 read_decode_info(hevc);
9820
9821 }
9822#endif
9823 if (hevc->start_decoding_time > 0) {
9824 u32 process_time = 1000*
9825 (jiffies - hevc->start_decoding_time)/HZ;
9826 if (process_time > max_decoding_time)
9827 max_decoding_time = process_time;
9828 }
9829
9830 hevc->error_watchdog_count = 0;
9831 if (hevc->pic_list_init_flag == 2) {
9832 hevc->pic_list_init_flag = 3;
9833 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
9834 } else if (hevc->wait_buf == 0) {
9835 u32 vui_time_scale;
9836 u32 vui_num_units_in_tick;
9837 unsigned char reconfig_flag = 0;
9838
9839 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
9840 get_rpm_param(&hevc->param);
9841 else {
9842
9843 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
9844 int ii;
9845
9846 for (ii = 0; ii < 4; ii++) {
9847 hevc->param.l.data[i + ii] =
9848 hevc->rpm_ptr[i + 3
9849 - ii];
9850 }
9851 }
9852#ifdef SEND_LMEM_WITH_RPM
9853 check_head_error(hevc);
9854#endif
9855 }
9856 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
9857 hevc_print(hevc, 0,
9858 "rpm_param: (%d)\n", hevc->slice_idx);
9859 hevc->slice_idx++;
9860 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
9861 hevc_print_cont(hevc, 0,
9862 "%04x ", hevc->param.l.data[i]);
9863 if (((i + 1) & 0xf) == 0)
9864 hevc_print_cont(hevc, 0, "\n");
9865 }
9866
9867 hevc_print(hevc, 0,
9868 "vui_timing_info: %x, %x, %x, %x\n",
9869 hevc->param.p.vui_num_units_in_tick_hi,
9870 hevc->param.p.vui_num_units_in_tick_lo,
9871 hevc->param.p.vui_time_scale_hi,
9872 hevc->param.p.vui_time_scale_lo);
9873 }
9874
9875 if (hevc->is_used_v4l) {
9876 struct aml_vcodec_ctx *ctx =
9877 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9878
9879 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
9880 struct aml_vdec_pic_infos info;
9881
9882 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
9883 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
9884 info.visible_width = hevc->frame_width;
9885 info.visible_height = hevc->frame_height;
9886 info.coded_width = ALIGN(hevc->frame_width, 32);
9887 info.coded_height = ALIGN(hevc->frame_height, 32);
9888 info.dpb_size = get_work_pic_num(hevc);
9889 hevc->v4l_params_parsed = true;
9890 /*notice the v4l2 codec.*/
9891 vdec_v4l_set_pic_infos(ctx, &info);
9892 }
9893 }
9894
9895 if (
9896#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9897 vdec->master == NULL &&
9898 vdec->slave == NULL &&
9899#endif
9900 aux_data_is_avaible(hevc)
9901 ) {
9902
9903 if (get_dbg_flag(hevc) &
9904 H265_DEBUG_BUFMGR_MORE)
9905 dump_aux_buf(hevc);
9906 }
9907
9908 vui_time_scale =
9909 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
9910 hevc->param.p.vui_time_scale_lo;
9911 vui_num_units_in_tick =
9912 (u32)(hevc->param.
9913 p.vui_num_units_in_tick_hi << 16) |
9914 hevc->param.
9915 p.vui_num_units_in_tick_lo;
9916 if (hevc->bit_depth_luma !=
9917 ((hevc->param.p.bit_depth & 0xf) + 8)) {
9918 reconfig_flag = 1;
9919 hevc_print(hevc, 0, "Bit depth luma = %d\n",
9920 (hevc->param.p.bit_depth & 0xf) + 8);
9921 }
9922 if (hevc->bit_depth_chroma !=
9923 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
9924 reconfig_flag = 1;
9925 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
9926 ((hevc->param.p.bit_depth >> 4) &
9927 0xf) + 8);
9928 }
9929 hevc->bit_depth_luma =
9930 (hevc->param.p.bit_depth & 0xf) + 8;
9931 hevc->bit_depth_chroma =
9932 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
9933 bit_depth_luma = hevc->bit_depth_luma;
9934 bit_depth_chroma = hevc->bit_depth_chroma;
9935#ifdef SUPPORT_10BIT
9936 if (hevc->bit_depth_luma == 8 &&
9937 hevc->bit_depth_chroma == 8 &&
9938 enable_mem_saving)
9939 hevc->mem_saving_mode = 1;
9940 else
9941 hevc->mem_saving_mode = 0;
9942#endif
9943 if (reconfig_flag &&
9944 (get_double_write_mode(hevc) & 0x10) == 0)
9945 init_decode_head_hw(hevc);
9946
9947 if ((vui_time_scale != 0)
9948 && (vui_num_units_in_tick != 0)) {
9949 hevc->frame_dur =
9950 div_u64(96000ULL *
9951 vui_num_units_in_tick,
9952 vui_time_scale);
9953 if (hevc->get_frame_dur != true)
9954 vdec_schedule_work(
9955 &hevc->notify_work);
9956
9957 hevc->get_frame_dur = true;
9958#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9959 gvs->frame_dur = hevc->frame_dur;
9960#endif
9961 }
9962
9963 if (hevc->video_signal_type !=
9964 ((hevc->param.p.video_signal_type << 16)
9965 | hevc->param.p.color_description)) {
9966 u32 v = hevc->param.p.video_signal_type;
9967 u32 c = hevc->param.p.color_description;
9968#if 0
9969 if (v & 0x2000) {
9970 hevc_print(hevc, 0,
9971 "video_signal_type present:\n");
9972 hevc_print(hevc, 0, " %s %s\n",
9973 video_format_names[(v >> 10) & 7],
9974 ((v >> 9) & 1) ?
9975 "full_range" : "limited");
9976 if (v & 0x100) {
9977 hevc_print(hevc, 0,
9978 " color_description present:\n");
9979 hevc_print(hevc, 0,
9980 " color_primarie = %s\n",
9981 color_primaries_names
9982 [v & 0xff]);
9983 hevc_print(hevc, 0,
9984 " transfer_characteristic = %s\n",
9985 transfer_characteristics_names
9986 [(c >> 8) & 0xff]);
9987 hevc_print(hevc, 0,
9988 " matrix_coefficient = %s\n",
9989 matrix_coeffs_names[c & 0xff]);
9990 }
9991 }
9992#endif
9993 hevc->video_signal_type = (v << 16) | c;
9994 video_signal_type = hevc->video_signal_type;
9995 }
9996
9997 if (use_cma &&
9998 (hevc->param.p.slice_segment_address == 0)
9999 && (hevc->pic_list_init_flag == 0)) {
10000 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10001 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10002
10003 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10004 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10005 hevc->lcu_size = 1 << (log + 3 + log_s);
10006 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10007 if (hevc->pic_w == 0 || hevc->pic_h == 0
10008 || hevc->lcu_size == 0
10009 || is_oversize(hevc->pic_w, hevc->pic_h)
10010 || (!hevc->skip_first_nal &&
10011 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10012 /* skip search next start code */
10013 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10014 & (~0x2));
10015 if ( !hevc->skip_first_nal &&
10016 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10017 hevc->skip_first_nal = 1;
10018 hevc->skip_flag = 1;
10019 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10020 /* Interrupt Amrisc to excute */
10021 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10022#ifdef MULTI_INSTANCE_SUPPORT
10023 if (hevc->m_ins_flag)
10024 start_process_time(hevc);
10025#endif
10026 } else {
10027 hevc->sps_num_reorder_pics_0 =
10028 hevc->param.p.sps_num_reorder_pics_0;
10029 hevc->pic_list_init_flag = 1;
10030#ifdef MULTI_INSTANCE_SUPPORT
10031 if (hevc->m_ins_flag) {
10032 vdec_schedule_work(&hevc->work);
10033 } else
10034#endif
10035 up(&h265_sema);
10036 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10037 }
10038 return IRQ_HANDLED;
10039 }
10040
10041}
10042 ret =
10043 hevc_slice_segment_header_process(hevc,
10044 &hevc->param, decode_pic_begin);
10045 if (ret < 0) {
10046#ifdef MULTI_INSTANCE_SUPPORT
10047 if (hevc->m_ins_flag) {
10048 hevc->wait_buf = 0;
10049 hevc->dec_result = DEC_RESULT_AGAIN;
10050 amhevc_stop();
10051 restore_decode_state(hevc);
10052 reset_process_time(hevc);
10053 vdec_schedule_work(&hevc->work);
10054 return IRQ_HANDLED;
10055 }
10056#else
10057 ;
10058#endif
10059 } else if (ret == 0) {
10060 if ((hevc->new_pic) && (hevc->cur_pic)) {
10061 hevc->cur_pic->stream_offset =
10062 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10063 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10064 "read stream_offset = 0x%x\n",
10065 hevc->cur_pic->stream_offset);
10066 hevc->cur_pic->aspect_ratio_idc =
10067 hevc->param.p.aspect_ratio_idc;
10068 hevc->cur_pic->sar_width =
10069 hevc->param.p.sar_width;
10070 hevc->cur_pic->sar_height =
10071 hevc->param.p.sar_height;
10072 }
10073
10074 WRITE_VREG(HEVC_DEC_STATUS_REG,
10075 HEVC_CODED_SLICE_SEGMENT_DAT);
10076 /* Interrupt Amrisc to excute */
10077 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10078
10079 hevc->start_decoding_time = jiffies;
10080#ifdef MULTI_INSTANCE_SUPPORT
10081 if (hevc->m_ins_flag)
10082 start_process_time(hevc);
10083#endif
10084#if 1
10085 /*to do..., copy aux data to hevc->cur_pic*/
10086#endif
10087#ifdef MULTI_INSTANCE_SUPPORT
10088 } else if (hevc->m_ins_flag) {
10089 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10090 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10091 __func__, ret);
10092 hevc->decoded_poc = INVALID_POC;
10093 hevc->decoding_pic = NULL;
10094 hevc->dec_result = DEC_RESULT_DONE;
10095 amhevc_stop();
10096 reset_process_time(hevc);
10097 vdec_schedule_work(&hevc->work);
10098#endif
10099 } else {
10100 /* skip, search next start code */
10101#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10102 gvs->drop_frame_count++;
10103#endif
10104 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10105 hevc->skip_flag = 1;
10106 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10107 /* Interrupt Amrisc to excute */
10108 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10109 }
10110
10111 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10112 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10113#ifdef MULTI_INSTANCE_SUPPORT
10114 if (!hevc->m_ins_flag)
10115 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10116 H265_DEBUG_DIS_SYS_ERROR_PROC);
10117#endif
10118 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10119 }
10120 return IRQ_HANDLED;
10121}
10122
10123static void wait_hevc_search_done(struct hevc_state_s *hevc)
10124{
10125 int count = 0;
10126 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10127 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10128 msleep(20);
10129 count++;
10130 if (count > 100) {
10131 hevc_print(hevc, 0, "%s timeout\n", __func__);
10132 break;
10133 }
10134 }
10135}
10136static irqreturn_t vh265_isr(int irq, void *data)
10137{
10138 int i, temp;
10139 unsigned int dec_status;
10140 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10141 u32 debug_tag;
10142 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10143
10144 if (hevc->init_flag == 0)
10145 return IRQ_HANDLED;
10146 hevc->dec_status = dec_status;
10147 if (is_log_enable(hevc))
10148 add_log(hevc,
10149 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10150 dec_status, READ_HREG(HEVC_DECODE_INFO),
10151 READ_VREG(HEVC_MPRED_CURR_LCU),
10152 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10153 READ_VREG(HEVC_SHIFT_STATUS));
10154
10155 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10156 hevc_print(hevc, 0,
10157 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10158 dec_status, READ_HREG(HEVC_DECODE_INFO),
10159 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10160 READ_VREG(HEVC_SHIFT_STATUS));
10161
10162 debug_tag = READ_HREG(DEBUG_REG1);
10163 if (debug_tag & 0x10000) {
10164 hevc_print(hevc, 0,
10165 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10166
10167 if (hevc->mmu_enable)
10168 temp = 0x500;
10169 else
10170 temp = 0x400;
10171 for (i = 0; i < temp; i += 4) {
10172 int ii;
10173 if ((i & 0xf) == 0)
10174 hevc_print_cont(hevc, 0, "%03x: ", i);
10175 for (ii = 0; ii < 4; ii++) {
10176 hevc_print_cont(hevc, 0, "%04x ",
10177 hevc->lmem_ptr[i + 3 - ii]);
10178 }
10179 if (((i + ii) & 0xf) == 0)
10180 hevc_print_cont(hevc, 0, "\n");
10181 }
10182
10183 if (((udebug_pause_pos & 0xffff)
10184 == (debug_tag & 0xffff)) &&
10185 (udebug_pause_decode_idx == 0 ||
10186 udebug_pause_decode_idx == hevc->decode_idx) &&
10187 (udebug_pause_val == 0 ||
10188 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10189 udebug_pause_pos &= 0xffff;
10190 hevc->ucode_pause_pos = udebug_pause_pos;
10191 }
10192 else if (debug_tag & 0x20000)
10193 hevc->ucode_pause_pos = 0xffffffff;
10194 if (hevc->ucode_pause_pos)
10195 reset_process_time(hevc);
10196 else
10197 WRITE_HREG(DEBUG_REG1, 0);
10198 } else if (debug_tag != 0) {
10199 hevc_print(hevc, 0,
10200 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10201 READ_HREG(DEBUG_REG2),
10202 READ_VREG(HEVC_STREAM_LEVEL),
10203 READ_VREG(HEVC_STREAM_WR_PTR),
10204 READ_VREG(HEVC_STREAM_RD_PTR));
10205 if (((udebug_pause_pos & 0xffff)
10206 == (debug_tag & 0xffff)) &&
10207 (udebug_pause_decode_idx == 0 ||
10208 udebug_pause_decode_idx == hevc->decode_idx) &&
10209 (udebug_pause_val == 0 ||
10210 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10211 udebug_pause_pos &= 0xffff;
10212 hevc->ucode_pause_pos = udebug_pause_pos;
10213 }
10214 if (hevc->ucode_pause_pos)
10215 reset_process_time(hevc);
10216 else
10217 WRITE_HREG(DEBUG_REG1, 0);
10218 return IRQ_HANDLED;
10219 }
10220
10221
10222 if (hevc->pic_list_init_flag == 1)
10223 return IRQ_HANDLED;
10224
10225 if (!hevc->m_ins_flag) {
10226 if (dec_status == HEVC_OVER_DECODE) {
10227 hevc->over_decode = 1;
10228 hevc_print(hevc, 0,
10229 "isr: over decode\n"),
10230 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10231 return IRQ_HANDLED;
10232 }
10233 }
10234
10235 return IRQ_WAKE_THREAD;
10236
10237}
10238
10239static void vh265_set_clk(struct work_struct *work)
10240{
10241 struct hevc_state_s *hevc = container_of(work,
10242 struct hevc_state_s, set_clk_work);
10243
10244 int fps = 96000 / hevc->frame_dur;
10245
10246 if (hevc_source_changed(VFORMAT_HEVC,
10247 hevc->frame_width, hevc->frame_height, fps) > 0)
10248 hevc->saved_resolution = hevc->frame_width *
10249 hevc->frame_height * fps;
10250}
10251
10252static void vh265_check_timer_func(unsigned long arg)
10253{
10254 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10255 struct timer_list *timer = &hevc->timer;
10256 unsigned char empty_flag;
10257 unsigned int buf_level;
10258
10259 enum receviver_start_e state = RECEIVER_INACTIVE;
10260
10261 if (hevc->init_flag == 0) {
10262 if (hevc->stat & STAT_TIMER_ARM) {
10263 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10264 }
10265 return;
10266 }
10267#ifdef MULTI_INSTANCE_SUPPORT
10268 if (hevc->m_ins_flag &&
10269 (get_dbg_flag(hevc) &
10270 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10271 hw_to_vdec(hevc)->next_status ==
10272 VDEC_STATUS_DISCONNECTED) {
10273 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10274 vdec_schedule_work(&hevc->work);
10275 hevc_print(hevc,
10276 0, "vdec requested to be disconnected\n");
10277 return;
10278 }
10279
10280 if (hevc->m_ins_flag) {
10281 if ((input_frame_based(hw_to_vdec(hevc)) ||
10282 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10283 ((get_dbg_flag(hevc) &
10284 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10285 (decode_timeout_val > 0) &&
10286 (hevc->start_process_time > 0) &&
10287 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10288 > decode_timeout_val)
10289 ) {
10290 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10291 int current_lcu_idx =
10292 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10293 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10294 if (hevc->last_lcu_idx == current_lcu_idx) {
10295 if (hevc->decode_timeout_count > 0)
10296 hevc->decode_timeout_count--;
10297 if (hevc->decode_timeout_count == 0)
10298 timeout_process(hevc);
10299 } else
10300 restart_process_time(hevc);
10301 hevc->last_lcu_idx = current_lcu_idx;
10302 } else {
10303 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10304 timeout_process(hevc);
10305 }
10306 }
10307 } else {
10308#endif
10309 if (hevc->m_ins_flag == 0 &&
10310 vf_get_receiver(hevc->provider_name)) {
10311 state =
10312 vf_notify_receiver(hevc->provider_name,
10313 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10314 NULL);
10315 if ((state == RECEIVER_STATE_NULL)
10316 || (state == RECEIVER_STATE_NONE))
10317 state = RECEIVER_INACTIVE;
10318 } else
10319 state = RECEIVER_INACTIVE;
10320
10321 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10322 /* error watchdog */
10323 if (hevc->m_ins_flag == 0 &&
10324 (empty_flag == 0)
10325 && (hevc->pic_list_init_flag == 0
10326 || hevc->pic_list_init_flag
10327 == 3)) {
10328 /* decoder has input */
10329 if ((get_dbg_flag(hevc) &
10330 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10331
10332 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10333 /* receiver has no buffer to recycle */
10334 if ((state == RECEIVER_INACTIVE) &&
10335 (kfifo_is_empty(&hevc->display_q) &&
10336 buf_level > 0x200)
10337 ) {
10338 if (hevc->error_flag == 0) {
10339 hevc->error_watchdog_count++;
10340 if (hevc->error_watchdog_count ==
10341 error_handle_threshold) {
10342 hevc_print(hevc, 0,
10343 "H265 dec err local reset.\n");
10344 hevc->error_flag = 1;
10345 hevc->error_watchdog_count = 0;
10346 hevc->error_skip_nal_wt_cnt = 0;
10347 hevc->
10348 error_system_watchdog_count++;
10349 WRITE_VREG
10350 (HEVC_ASSIST_MBOX0_IRQ_REG,
10351 0x1);
10352 }
10353 } else if (hevc->error_flag == 2) {
10354 int th =
10355 error_handle_nal_skip_threshold;
10356 hevc->error_skip_nal_wt_cnt++;
10357 if (hevc->error_skip_nal_wt_cnt
10358 == th) {
10359 hevc->error_flag = 3;
10360 hevc->error_watchdog_count = 0;
10361 hevc->
10362 error_skip_nal_wt_cnt = 0;
10363 WRITE_VREG
10364 (HEVC_ASSIST_MBOX0_IRQ_REG,
10365 0x1);
10366 }
10367 }
10368 }
10369 }
10370
10371 if ((get_dbg_flag(hevc)
10372 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10373 /* receiver has no buffer to recycle */
10374 if ((state == RECEIVER_INACTIVE) &&
10375 (kfifo_is_empty(&hevc->display_q))
10376 ) { /* no buffer to recycle */
10377 if ((get_dbg_flag(hevc) &
10378 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10379 0)
10380 hevc->error_system_watchdog_count++;
10381 if (hevc->error_system_watchdog_count ==
10382 error_handle_system_threshold) {
10383 /* and it lasts for a while */
10384 hevc_print(hevc, 0,
10385 "H265 dec fatal error watchdog.\n");
10386 hevc->
10387 error_system_watchdog_count = 0;
10388 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10389 }
10390 }
10391 } else {
10392 hevc->error_watchdog_count = 0;
10393 hevc->error_system_watchdog_count = 0;
10394 }
10395#ifdef MULTI_INSTANCE_SUPPORT
10396 }
10397#endif
10398 if ((hevc->ucode_pause_pos != 0) &&
10399 (hevc->ucode_pause_pos != 0xffffffff) &&
10400 udebug_pause_pos != hevc->ucode_pause_pos) {
10401 hevc->ucode_pause_pos = 0;
10402 WRITE_HREG(DEBUG_REG1, 0);
10403 }
10404
10405 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10406 dump_pic_list(hevc);
10407 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10408 }
10409 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10410 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10411 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10412 }
10413#ifdef TEST_NO_BUF
10414 if (hevc->wait_buf)
10415 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10416#endif
10417 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10418 hevc->error_skip_nal_count = error_skip_nal_count;
10419 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10420
10421 debug &= ~H265_DEBUG_HW_RESET;
10422 }
10423
10424#ifdef ERROR_HANDLE_DEBUG
10425 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10426 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10427 dbg_nal_skip_count &= ~0x10000;
10428 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10429 }
10430#endif
10431
10432 if (radr != 0) {
10433 if (rval != 0) {
10434 WRITE_VREG(radr, rval);
10435 hevc_print(hevc, 0,
10436 "WRITE_VREG(%x,%x)\n", radr, rval);
10437 } else
10438 hevc_print(hevc, 0,
10439 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10440 rval = 0;
10441 radr = 0;
10442 }
10443 if (dbg_cmd != 0) {
10444 if (dbg_cmd == 1) {
10445 u32 disp_laddr;
10446
10447 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10448 get_double_write_mode(hevc) == 0) {
10449 disp_laddr =
10450 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10451 } else {
10452 struct canvas_s cur_canvas;
10453
10454 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10455 & 0xff), &cur_canvas);
10456 disp_laddr = cur_canvas.addr;
10457 }
10458 hevc_print(hevc, 0,
10459 "current displayed buffer address %x\r\n",
10460 disp_laddr);
10461 }
10462 dbg_cmd = 0;
10463 }
10464 /*don't changed at start.*/
10465 if (hevc->m_ins_flag == 0 &&
10466 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10467 hevc->frame_dur > 0 && hevc->saved_resolution !=
10468 hevc->frame_width * hevc->frame_height *
10469 (96000 / hevc->frame_dur))
10470 vdec_schedule_work(&hevc->set_clk_work);
10471
10472 mod_timer(timer, jiffies + PUT_INTERVAL);
10473}
10474
10475static int h265_task_handle(void *data)
10476{
10477 int ret = 0;
10478 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10479
10480 set_user_nice(current, -10);
10481 while (1) {
10482 if (use_cma == 0) {
10483 hevc_print(hevc, 0,
10484 "ERROR: use_cma can not be changed dynamically\n");
10485 }
10486 ret = down_interruptible(&h265_sema);
10487 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10488 init_pic_list(hevc);
10489 init_pic_list_hw(hevc);
10490 init_buf_spec(hevc);
10491 hevc->pic_list_init_flag = 2;
10492 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10493
10494 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10495
10496 }
10497
10498 if (hevc->uninit_list) {
10499 /*USE_BUF_BLOCK*/
10500 uninit_pic_list(hevc);
10501 hevc_print(hevc, 0, "uninit list\n");
10502 hevc->uninit_list = 0;
10503#ifdef USE_UNINIT_SEMA
10504 if (use_cma) {
10505 up(&hevc->h265_uninit_done_sema);
10506 while (!kthread_should_stop())
10507 msleep(1);
10508 break;
10509 }
10510#endif
10511 }
10512 }
10513
10514 return 0;
10515}
10516
10517void vh265_free_cmabuf(void)
10518{
10519 struct hevc_state_s *hevc = gHevc;
10520
10521 mutex_lock(&vh265_mutex);
10522
10523 if (hevc->init_flag) {
10524 mutex_unlock(&vh265_mutex);
10525 return;
10526 }
10527
10528 mutex_unlock(&vh265_mutex);
10529}
10530
10531#ifdef MULTI_INSTANCE_SUPPORT
10532int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10533#else
10534int vh265_dec_status(struct vdec_info *vstatus)
10535#endif
10536{
10537#ifdef MULTI_INSTANCE_SUPPORT
10538 struct hevc_state_s *hevc =
10539 (struct hevc_state_s *)vdec->private;
10540#else
10541 struct hevc_state_s *hevc = gHevc;
10542#endif
10543 if (!hevc)
10544 return -1;
10545
10546 vstatus->frame_width = hevc->frame_width;
10547 vstatus->frame_height = hevc->frame_height;
10548 if (hevc->frame_dur != 0)
10549 vstatus->frame_rate = 96000 / hevc->frame_dur;
10550 else
10551 vstatus->frame_rate = -1;
10552 vstatus->error_count = 0;
10553 vstatus->status = hevc->stat | hevc->fatal_error;
10554#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10555 vstatus->bit_rate = gvs->bit_rate;
10556 vstatus->frame_dur = hevc->frame_dur;
10557 if (gvs) {
10558 vstatus->bit_rate = gvs->bit_rate;
10559 vstatus->frame_data = gvs->frame_data;
10560 vstatus->total_data = gvs->total_data;
10561 vstatus->frame_count = gvs->frame_count;
10562 vstatus->error_frame_count = gvs->error_frame_count;
10563 vstatus->drop_frame_count = gvs->drop_frame_count;
10564 vstatus->total_data = gvs->total_data;
10565 vstatus->samp_cnt = gvs->samp_cnt;
10566 vstatus->offset = gvs->offset;
10567 }
10568 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10569 "%s", DRIVER_NAME);
10570#endif
10571 vstatus->ratio_control = hevc->ratio_control;
10572 return 0;
10573}
10574
10575int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10576{
10577 is_reset = isreset;
10578 return 0;
10579}
10580
10581static int vh265_vdec_info_init(void)
10582{
10583 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10584 if (NULL == gvs) {
10585 pr_info("the struct of vdec status malloc failed.\n");
10586 return -ENOMEM;
10587 }
10588 return 0;
10589}
10590
10591#if 0
10592static void H265_DECODE_INIT(void)
10593{
10594 /* enable hevc clocks */
10595 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10596 /* *************************************************************** */
10597 /* Power ON HEVC */
10598 /* *************************************************************** */
10599 /* Powerup HEVC */
10600 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10601 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10602 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10603 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10604 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10605 /* remove isolations */
10606 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10607 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10608
10609}
10610#endif
10611
10612static void config_decode_mode(struct hevc_state_s *hevc)
10613{
10614#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10615 struct vdec_s *vdec = hw_to_vdec(hevc);
10616#endif
10617 unsigned decode_mode;
10618 if (!hevc->m_ins_flag)
10619 decode_mode = DECODE_MODE_SINGLE;
10620 else if (vdec_frame_based(hw_to_vdec(hevc)))
10621 decode_mode =
10622 DECODE_MODE_MULTI_FRAMEBASE;
10623#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10624 else if (vdec->slave) {
10625 if (force_bypass_dvenl & 0x80000000)
10626 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10627 else
10628 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10629 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10630 hevc->bypass_dvenl = 0;
10631 hevc_print(hevc, 0,
10632 "NOT support bypass_dvenl when meta_with_el\n");
10633 }
10634 if (hevc->bypass_dvenl)
10635 decode_mode =
10636 (hevc->start_parser_type << 8)
10637 | DECODE_MODE_MULTI_STREAMBASE;
10638 else
10639 decode_mode =
10640 (hevc->start_parser_type << 8)
10641 | DECODE_MODE_MULTI_DVBAL;
10642 } else if (vdec->master)
10643 decode_mode =
10644 (hevc->start_parser_type << 8)
10645 | DECODE_MODE_MULTI_DVENL;
10646#endif
10647 else
10648 decode_mode =
10649 DECODE_MODE_MULTI_STREAMBASE;
10650
10651 if (hevc->m_ins_flag)
10652 decode_mode |=
10653 (hevc->start_decoding_flag << 16);
10654 /* set MBX0 interrupt flag */
10655 decode_mode |= (0x80 << 24);
10656 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10657 WRITE_VREG(HEVC_DECODE_MODE2,
10658 hevc->rps_set_id);
10659}
10660
10661static void vh265_prot_init(struct hevc_state_s *hevc)
10662{
10663#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10664 struct vdec_s *vdec = hw_to_vdec(hevc);
10665#endif
10666 /* H265_DECODE_INIT(); */
10667
10668 hevc_config_work_space_hw(hevc);
10669
10670 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10671
10672 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10673
10674 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10675
10676 /* clear mailbox interrupt */
10677 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10678
10679 /* enable mailbox interrupt */
10680 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10681
10682 /* disable PSCALE for hardware sharing */
10683 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10684
10685 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10686
10687 if ((get_dbg_flag(hevc) &
10688 (H265_DEBUG_MAN_SKIP_NAL |
10689 H265_DEBUG_MAN_SEARCH_NAL))
10690 /*||hevc->m_ins_flag*/
10691 ) {
10692 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10693 } else {
10694 /* check vps/sps/pps/i-slice in ucode */
10695 unsigned ctl_val = 0x8;
10696 if (hevc->PB_skip_mode == 0)
10697 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10698 else if (hevc->PB_skip_mode == 3)
10699 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10700 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10701 }
10702 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10703#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10704 || vdec->master
10705 || vdec->slave
10706#endif
10707 )
10708 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10709
10710 WRITE_VREG(NAL_SEARCH_CTL,
10711 READ_VREG(NAL_SEARCH_CTL)
10712 | ((parser_sei_enable & 0x7) << 17));
10713#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10714 WRITE_VREG(NAL_SEARCH_CTL,
10715 READ_VREG(NAL_SEARCH_CTL) |
10716 ((parser_dolby_vision_enable & 0x1) << 20));
10717#endif
10718 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10719
10720 config_decode_mode(hevc);
10721 config_aux_buf(hevc);
10722#ifdef SWAP_HEVC_UCODE
10723 if (!tee_enabled() && hevc->is_swap &&
10724 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10725 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10726 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10727 }
10728#endif
10729#ifdef DETREFILL_ENABLE
10730 if (hevc->is_swap &&
10731 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10732 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10733 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10734 }
10735#endif
10736}
10737
10738static int vh265_local_init(struct hevc_state_s *hevc)
10739{
10740 int i;
10741 int ret = -1;
10742
10743#ifdef DEBUG_PTS
10744 hevc->pts_missed = 0;
10745 hevc->pts_hit = 0;
10746#endif
10747
10748 hevc->saved_resolution = 0;
10749 hevc->get_frame_dur = false;
10750 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10751 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10752 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10753 pr_info("over size : %u x %u.\n",
10754 hevc->frame_width, hevc->frame_height);
10755 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10756 return ret;
10757 }
10758
10759 if (hevc->max_pic_w && hevc->max_pic_h) {
10760 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10761 ((hevc->max_pic_w * hevc->max_pic_h) >
10762 1920 * 1088) ? true : false;
10763 } else {
10764 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10765 ((hevc->frame_width * hevc->frame_height) >
10766 1920 * 1088) ? true : false;
10767 }
10768
10769 hevc->frame_dur =
10770 (hevc->vh265_amstream_dec_info.rate ==
10771 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10772#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10773 gvs->frame_dur = hevc->frame_dur;
10774#endif
10775 if (hevc->frame_width && hevc->frame_height)
10776 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10777
10778 if (i_only_flag)
10779 hevc->i_only = i_only_flag & 0xff;
10780 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10781 & 0x08)
10782 hevc->i_only = 0x7;
10783 else
10784 hevc->i_only = 0x0;
10785 hevc->error_watchdog_count = 0;
10786 hevc->sei_present_flag = 0;
10787 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10788 & 0x40) >> 6;
10789 hevc_print(hevc, 0,
10790 "h265:pts_unstable=%d\n", pts_unstable);
10791/*
10792 *TODO:FOR VERSION
10793 */
10794 hevc_print(hevc, 0,
10795 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10796 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10797
10798 if (hevc->frame_dur == 0)
10799 hevc->frame_dur = 96000 / 24;
10800
10801 INIT_KFIFO(hevc->display_q);
10802 INIT_KFIFO(hevc->newframe_q);
10803 INIT_KFIFO(hevc->pending_q);
10804
10805 for (i = 0; i < VF_POOL_SIZE; i++) {
10806 const struct vframe_s *vf = &hevc->vfpool[i];
10807
10808 hevc->vfpool[i].index = -1;
10809 kfifo_put(&hevc->newframe_q, vf);
10810 }
10811
10812
10813 ret = hevc_local_init(hevc);
10814
10815 return ret;
10816}
10817#ifdef MULTI_INSTANCE_SUPPORT
10818static s32 vh265_init(struct vdec_s *vdec)
10819{
10820 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10821#else
10822static s32 vh265_init(struct hevc_state_s *hevc)
10823{
10824
10825#endif
10826 int ret, size = -1;
10827 int fw_size = 0x1000 * 16;
10828 struct firmware_s *fw = NULL;
10829
10830 init_timer(&hevc->timer);
10831
10832 hevc->stat |= STAT_TIMER_INIT;
10833
10834 if (hevc->m_ins_flag) {
10835#ifdef USE_UNINIT_SEMA
10836 sema_init(&hevc->h265_uninit_done_sema, 0);
10837#endif
10838 INIT_WORK(&hevc->work, vh265_work);
10839 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
10840 }
10841
10842 if (vh265_local_init(hevc) < 0)
10843 return -EBUSY;
10844
10845 mutex_init(&hevc->chunks_mutex);
10846 INIT_WORK(&hevc->notify_work, vh265_notify_work);
10847 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
10848
10849 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
10850 if (IS_ERR_OR_NULL(fw))
10851 return -ENOMEM;
10852
10853 if (hevc->mmu_enable)
10854 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10855 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
10856 else {
10857 if (!hevc->is_4k) {
10858 /* if an older version of the fw was loaded, */
10859 /* needs try to load noswap fw because the */
10860 /* old fw package dose not contain the swap fw.*/
10861 size = get_firmware_data(
10862 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
10863 if (size < 0)
10864 size = get_firmware_data(
10865 VIDEO_DEC_HEVC_MMU, fw->data);
10866 else if (size)
10867 hevc->is_swap = true;
10868 } else
10869 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
10870 fw->data);
10871 }
10872 else
10873 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
10874
10875 if (size < 0) {
10876 pr_err("get firmware fail.\n");
10877 vfree(fw);
10878 return -1;
10879 }
10880
10881 fw->len = size;
10882
10883#ifdef SWAP_HEVC_UCODE
10884 if (!tee_enabled() && hevc->is_swap &&
10885 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10886 if (hevc->mmu_enable) {
10887 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
10888 hevc->mc_cpu_addr =
10889 dma_alloc_coherent(amports_get_dma_device(),
10890 hevc->swap_size,
10891 &hevc->mc_dma_handle, GFP_KERNEL);
10892 if (!hevc->mc_cpu_addr) {
10893 amhevc_disable();
10894 pr_info("vh265 mmu swap ucode loaded fail.\n");
10895 return -ENOMEM;
10896 }
10897
10898 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
10899 hevc->swap_size);
10900
10901 hevc_print(hevc, 0,
10902 "vh265 mmu ucode swap loaded %x\n",
10903 hevc->mc_dma_handle);
10904 }
10905 }
10906#endif
10907
10908#ifdef MULTI_INSTANCE_SUPPORT
10909 if (hevc->m_ins_flag) {
10910 hevc->timer.data = (ulong) hevc;
10911 hevc->timer.function = vh265_check_timer_func;
10912 hevc->timer.expires = jiffies + PUT_INTERVAL;
10913
10914 hevc->fw = fw;
10915
10916 return 0;
10917 }
10918#endif
10919 amhevc_enable();
10920
10921 if (hevc->mmu_enable)
10922 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10923 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
10924 else {
10925 if (!hevc->is_4k) {
10926 /* if an older version of the fw was loaded, */
10927 /* needs try to load noswap fw because the */
10928 /* old fw package dose not contain the swap fw. */
10929 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10930 "hevc_mmu_swap", fw->data);
10931 if (ret < 0)
10932 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10933 "h265_mmu", fw->data);
10934 else
10935 hevc->is_swap = true;
10936 } else
10937 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10938 "h265_mmu", fw->data);
10939 }
10940 else
10941 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
10942
10943 if (ret < 0) {
10944 amhevc_disable();
10945 vfree(fw);
10946 pr_err("H265: the %s fw loading failed, err: %x\n",
10947 tee_enabled() ? "TEE" : "local", ret);
10948 return -EBUSY;
10949 }
10950
10951 vfree(fw);
10952
10953 hevc->stat |= STAT_MC_LOAD;
10954
10955#ifdef DETREFILL_ENABLE
10956 if (hevc->is_swap &&
10957 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10958 init_detrefill_buf(hevc);
10959#endif
10960 /* enable AMRISC side protocol */
10961 vh265_prot_init(hevc);
10962
10963 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
10964 vh265_isr_thread_fn,
10965 IRQF_ONESHOT,/*run thread on this irq disabled*/
10966 "vh265-irq", (void *)hevc)) {
10967 hevc_print(hevc, 0, "vh265 irq register error.\n");
10968 amhevc_disable();
10969 return -ENOENT;
10970 }
10971
10972 hevc->stat |= STAT_ISR_REG;
10973 hevc->provider_name = PROVIDER_NAME;
10974
10975#ifdef MULTI_INSTANCE_SUPPORT
10976 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
10977 &vh265_vf_provider, vdec);
10978 vf_reg_provider(&vh265_vf_prov);
10979 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
10980 NULL);
10981 if (hevc->frame_dur != 0) {
10982 if (!is_reset) {
10983 vf_notify_receiver(hevc->provider_name,
10984 VFRAME_EVENT_PROVIDER_FR_HINT,
10985 (void *)
10986 ((unsigned long)hevc->frame_dur));
10987 fr_hint_status = VDEC_HINTED;
10988 }
10989 } else
10990 fr_hint_status = VDEC_NEED_HINT;
10991#else
10992 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
10993 hevc);
10994 vf_reg_provider(&vh265_vf_prov);
10995 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
10996 if (hevc->frame_dur != 0) {
10997 vf_notify_receiver(PROVIDER_NAME,
10998 VFRAME_EVENT_PROVIDER_FR_HINT,
10999 (void *)
11000 ((unsigned long)hevc->frame_dur));
11001 fr_hint_status = VDEC_HINTED;
11002 } else
11003 fr_hint_status = VDEC_NEED_HINT;
11004#endif
11005 hevc->stat |= STAT_VF_HOOK;
11006
11007 hevc->timer.data = (ulong) hevc;
11008 hevc->timer.function = vh265_check_timer_func;
11009 hevc->timer.expires = jiffies + PUT_INTERVAL;
11010
11011 add_timer(&hevc->timer);
11012
11013 hevc->stat |= STAT_TIMER_ARM;
11014
11015 if (use_cma) {
11016#ifdef USE_UNINIT_SEMA
11017 sema_init(&hevc->h265_uninit_done_sema, 0);
11018#endif
11019 if (h265_task == NULL) {
11020 sema_init(&h265_sema, 1);
11021 h265_task =
11022 kthread_run(h265_task_handle, hevc,
11023 "kthread_h265");
11024 }
11025 }
11026 /* hevc->stat |= STAT_KTHREAD; */
11027#if 0
11028 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11029 hevc_print(hevc, 0, "%s force clk\n", __func__);
11030 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11031 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11032 ((1 << 2) | (1 << 1)));
11033 WRITE_VREG(HEVC_DBLK_CFG0,
11034 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11035 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11036 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11037 (1 << 2)); /* 2 */
11038 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11039 (1 << 24)); /* 24 */
11040 WRITE_VREG(HEVC_STREAM_CONTROL,
11041 READ_VREG(HEVC_STREAM_CONTROL) |
11042 (1 << 15)); /* 15 */
11043 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11044 (1 << 13)); /* 13 */
11045 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11046 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11047 (1 << 15)); /* 15 */
11048 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11049 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11050 (1 << 15)); /* 15 */
11051 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11052 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11053 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11054 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11055 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11056 (1 << 3)); /* 3 */
11057 }
11058#endif
11059#ifdef SWAP_HEVC_UCODE
11060 if (!tee_enabled() && hevc->is_swap &&
11061 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11062 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11063 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11064 }
11065#endif
11066
11067#ifndef MULTI_INSTANCE_SUPPORT
11068 set_vdec_func(&vh265_dec_status);
11069#endif
11070 amhevc_start();
11071 hevc->stat |= STAT_VDEC_RUN;
11072 hevc->init_flag = 1;
11073 error_handle_threshold = 30;
11074 /* pr_info("%d, vh265_init, RP=0x%x\n",
11075 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11076 */
11077
11078 return 0;
11079}
11080
11081static int vh265_stop(struct hevc_state_s *hevc)
11082{
11083 if (get_dbg_flag(hevc) &
11084 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11085 int wait_timeout_count = 0;
11086
11087 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11088 HEVC_CODED_SLICE_SEGMENT_DAT &&
11089 wait_timeout_count < 10){
11090 wait_timeout_count++;
11091 msleep(20);
11092 }
11093 }
11094 if (hevc->stat & STAT_VDEC_RUN) {
11095 amhevc_stop();
11096 hevc->stat &= ~STAT_VDEC_RUN;
11097 }
11098
11099 if (hevc->stat & STAT_ISR_REG) {
11100#ifdef MULTI_INSTANCE_SUPPORT
11101 if (!hevc->m_ins_flag)
11102#endif
11103 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11104 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11105 hevc->stat &= ~STAT_ISR_REG;
11106 }
11107
11108 hevc->stat &= ~STAT_TIMER_INIT;
11109 if (hevc->stat & STAT_TIMER_ARM) {
11110 del_timer_sync(&hevc->timer);
11111 hevc->stat &= ~STAT_TIMER_ARM;
11112 }
11113
11114 if (hevc->stat & STAT_VF_HOOK) {
11115 if (fr_hint_status == VDEC_HINTED) {
11116 vf_notify_receiver(hevc->provider_name,
11117 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11118 NULL);
11119 }
11120 fr_hint_status = VDEC_NO_NEED_HINT;
11121 vf_unreg_provider(&vh265_vf_prov);
11122 hevc->stat &= ~STAT_VF_HOOK;
11123 }
11124
11125 hevc_local_uninit(hevc);
11126
11127 if (use_cma) {
11128 hevc->uninit_list = 1;
11129 up(&h265_sema);
11130#ifdef USE_UNINIT_SEMA
11131 down(&hevc->h265_uninit_done_sema);
11132 if (!IS_ERR(h265_task)) {
11133 kthread_stop(h265_task);
11134 h265_task = NULL;
11135 }
11136#else
11137 while (hevc->uninit_list) /* wait uninit complete */
11138 msleep(20);
11139#endif
11140
11141 }
11142 hevc->init_flag = 0;
11143 hevc->first_sc_checked = 0;
11144 cancel_work_sync(&hevc->notify_work);
11145 cancel_work_sync(&hevc->set_clk_work);
11146 uninit_mmu_buffers(hevc);
11147 amhevc_disable();
11148
11149 kfree(gvs);
11150 gvs = NULL;
11151
11152 return 0;
11153}
11154
11155#ifdef MULTI_INSTANCE_SUPPORT
11156static void reset_process_time(struct hevc_state_s *hevc)
11157{
11158 if (hevc->start_process_time) {
11159 unsigned int process_time =
11160 1000 * (jiffies - hevc->start_process_time) / HZ;
11161 hevc->start_process_time = 0;
11162 if (process_time > max_process_time[hevc->index])
11163 max_process_time[hevc->index] = process_time;
11164 }
11165}
11166
11167static void start_process_time(struct hevc_state_s *hevc)
11168{
11169 hevc->start_process_time = jiffies;
11170 hevc->decode_timeout_count = 2;
11171 hevc->last_lcu_idx = 0;
11172}
11173
11174static void restart_process_time(struct hevc_state_s *hevc)
11175{
11176 hevc->start_process_time = jiffies;
11177 hevc->decode_timeout_count = 2;
11178}
11179
11180static void timeout_process(struct hevc_state_s *hevc)
11181{
11182 /*
11183 * In this very timeout point,the vh265_work arrives,
11184 * let it to handle the scenario.
11185 */
11186 if (work_pending(&hevc->work))
11187 return;
11188
11189 hevc->timeout_num++;
11190 amhevc_stop();
11191 read_decode_info(hevc);
11192
11193 hevc_print(hevc,
11194 0, "%s decoder timeout\n", __func__);
11195 check_pic_decoded_error(hevc,
11196 hevc->pic_decoded_lcu_idx);
11197 hevc->decoded_poc = hevc->curr_POC;
11198 hevc->decoding_pic = NULL;
11199 hevc->dec_result = DEC_RESULT_DONE;
11200 reset_process_time(hevc);
11201
11202 if (work_pending(&hevc->work))
11203 return;
11204 vdec_schedule_work(&hevc->timeout_work);
11205}
11206
11207#ifdef CONSTRAIN_MAX_BUF_NUM
11208static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11209{
11210 struct PIC_s *pic;
11211 int i;
11212 int count = 0;
11213 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11214 pic = hevc->m_PIC[i];
11215 if (pic == NULL || pic->index == -1)
11216 continue;
11217 if (pic->output_mark == 0 && pic->referenced == 0
11218 && pic->output_ready == 1)
11219 count++;
11220 }
11221
11222 return count;
11223}
11224
11225static int get_used_buf_count(struct hevc_state_s *hevc)
11226{
11227 struct PIC_s *pic;
11228 int i;
11229 int count = 0;
11230 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11231 pic = hevc->m_PIC[i];
11232 if (pic == NULL || pic->index == -1)
11233 continue;
11234 if (pic->output_mark != 0 || pic->referenced != 0
11235 || pic->output_ready != 0)
11236 count++;
11237 }
11238
11239 return count;
11240}
11241#endif
11242
11243
11244static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11245{
11246 struct PIC_s *new_pic = NULL;
11247 struct PIC_s *pic;
11248 /* recycle un-used pic */
11249 int i;
11250 int ref_pic = 0;
11251 struct vdec_s *vdec = hw_to_vdec(hevc);
11252 /*return 1 if pic_list is not initialized yet*/
11253 if (hevc->pic_list_init_flag != 3)
11254 return 1;
11255
11256 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11257 pic = hevc->m_PIC[i];
11258 if (pic == NULL || pic->index == -1)
11259 continue;
11260 if (pic->referenced == 1)
11261 ref_pic++;
11262 if (pic->output_mark == 0 && pic->referenced == 0
11263 && pic->output_ready == 0
11264 ) {
11265 if (new_pic) {
11266 if (pic->POC < new_pic->POC)
11267 new_pic = pic;
11268 } else
11269 new_pic = pic;
11270 }
11271 }
11272/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11273/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11274/*decode. all reference frames are removed and setting error flag.*/
11275/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11276/*when file is interlace.*/
11277 if ((new_pic == NULL) &&
11278 (ref_pic >=
11279 get_work_pic_num(hevc) -
11280 hevc->sps_num_reorder_pics_0 - 3)) {
11281 enum receviver_start_e state = RECEIVER_INACTIVE;
11282 if (vf_get_receiver(vdec->vf_provider_name)) {
11283 state =
11284 vf_notify_receiver(vdec->vf_provider_name,
11285 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11286 NULL);
11287 if ((state == RECEIVER_STATE_NULL)
11288 || (state == RECEIVER_STATE_NONE))
11289 state = RECEIVER_INACTIVE;
11290 }
11291 if (state == RECEIVER_INACTIVE) {
11292 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11293 pic = hevc->m_PIC[i];
11294 if (pic == NULL || pic->index == -1)
11295 continue;
11296
11297 if ((pic->referenced == 1) &&
11298 (pic->error_mark == 1)) {
11299 pic->referenced = 0;
11300 put_mv_buf(hevc, pic);
11301 }
11302 pic->error_mark = 1;
11303 }
11304 }
11305 }
11306
11307 return (new_pic != NULL) ? 1 : 0;
11308}
11309
11310static int vmh265_stop(struct hevc_state_s *hevc)
11311{
11312 if (hevc->stat & STAT_TIMER_ARM) {
11313 del_timer_sync(&hevc->timer);
11314 hevc->stat &= ~STAT_TIMER_ARM;
11315 }
11316 if (hevc->stat & STAT_VDEC_RUN) {
11317 amhevc_stop();
11318 hevc->stat &= ~STAT_VDEC_RUN;
11319 }
11320 if (hevc->stat & STAT_ISR_REG) {
11321 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11322 hevc->stat &= ~STAT_ISR_REG;
11323 }
11324
11325 if (hevc->stat & STAT_VF_HOOK) {
11326 if (fr_hint_status == VDEC_HINTED)
11327 vf_notify_receiver(hevc->provider_name,
11328 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11329 NULL);
11330 fr_hint_status = VDEC_NO_NEED_HINT;
11331 vf_unreg_provider(&vh265_vf_prov);
11332 hevc->stat &= ~STAT_VF_HOOK;
11333 }
11334
11335 hevc_local_uninit(hevc);
11336
11337 if (use_cma) {
11338 hevc->uninit_list = 1;
11339 reset_process_time(hevc);
11340 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11341 vdec_schedule_work(&hevc->work);
11342 flush_work(&hevc->work);
11343#ifdef USE_UNINIT_SEMA
11344 if (hevc->init_flag) {
11345 down(&hevc->h265_uninit_done_sema);
11346 }
11347#else
11348 while (hevc->uninit_list) /* wait uninit complete */
11349 msleep(20);
11350#endif
11351 }
11352 hevc->init_flag = 0;
11353 hevc->first_sc_checked = 0;
11354 cancel_work_sync(&hevc->notify_work);
11355 cancel_work_sync(&hevc->set_clk_work);
11356 cancel_work_sync(&hevc->work);
11357 cancel_work_sync(&hevc->timeout_work);
11358 uninit_mmu_buffers(hevc);
11359
11360 vfree(hevc->fw);
11361 hevc->fw = NULL;
11362
11363 dump_log(hevc);
11364 return 0;
11365}
11366
11367static unsigned char get_data_check_sum
11368 (struct hevc_state_s *hevc, int size)
11369{
11370 int jj;
11371 int sum = 0;
11372 u8 *data = NULL;
11373
11374 if (!hevc->chunk->block->is_mapped)
11375 data = codec_mm_vmap(hevc->chunk->block->start +
11376 hevc->chunk->offset, size);
11377 else
11378 data = ((u8 *)hevc->chunk->block->start_virt) +
11379 hevc->chunk->offset;
11380
11381 for (jj = 0; jj < size; jj++)
11382 sum += data[jj];
11383
11384 if (!hevc->chunk->block->is_mapped)
11385 codec_mm_unmap_phyaddr(data);
11386 return sum;
11387}
11388
11389static void vh265_notify_work(struct work_struct *work)
11390{
11391 struct hevc_state_s *hevc =
11392 container_of(work,
11393 struct hevc_state_s,
11394 notify_work);
11395 struct vdec_s *vdec = hw_to_vdec(hevc);
11396#ifdef MULTI_INSTANCE_SUPPORT
11397 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11398 vf_notify_receiver(hevc->provider_name,
11399 VFRAME_EVENT_PROVIDER_FR_HINT,
11400 (void *)
11401 ((unsigned long)hevc->frame_dur));
11402 vdec->fr_hint_state = VDEC_HINTED;
11403 } else if (fr_hint_status == VDEC_NEED_HINT) {
11404 vf_notify_receiver(hevc->provider_name,
11405 VFRAME_EVENT_PROVIDER_FR_HINT,
11406 (void *)
11407 ((unsigned long)hevc->frame_dur));
11408 fr_hint_status = VDEC_HINTED;
11409 }
11410#else
11411 if (fr_hint_status == VDEC_NEED_HINT)
11412 vf_notify_receiver(PROVIDER_NAME,
11413 VFRAME_EVENT_PROVIDER_FR_HINT,
11414 (void *)
11415 ((unsigned long)hevc->frame_dur));
11416 fr_hint_status = VDEC_HINTED;
11417 }
11418#endif
11419
11420 return;
11421}
11422
11423static void vh265_work_implement(struct hevc_state_s *hevc,
11424 struct vdec_s *vdec,int from)
11425{
11426 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11427 /*USE_BUF_BLOCK*/
11428 uninit_pic_list(hevc);
11429 hevc_print(hevc, 0, "uninit list\n");
11430 hevc->uninit_list = 0;
11431#ifdef USE_UNINIT_SEMA
11432 up(&hevc->h265_uninit_done_sema);
11433#endif
11434 return;
11435 }
11436
11437 /* finished decoding one frame or error,
11438 * notify vdec core to switch context
11439 */
11440 if (hevc->pic_list_init_flag == 1
11441 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11442 hevc->pic_list_init_flag = 2;
11443 init_pic_list(hevc);
11444 init_pic_list_hw(hevc);
11445 init_buf_spec(hevc);
11446 hevc_print(hevc, 0,
11447 "set pic_list_init_flag to 2\n");
11448
11449 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11450 return;
11451 }
11452
11453 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11454 "%s dec_result %d %x %x %x\n",
11455 __func__,
11456 hevc->dec_result,
11457 READ_VREG(HEVC_STREAM_LEVEL),
11458 READ_VREG(HEVC_STREAM_WR_PTR),
11459 READ_VREG(HEVC_STREAM_RD_PTR));
11460
11461 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11462 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11463 && (hw_to_vdec(hevc)->next_status !=
11464 VDEC_STATUS_DISCONNECTED)) {
11465 if (!vdec_has_more_input(vdec)) {
11466 hevc->dec_result = DEC_RESULT_EOS;
11467 vdec_schedule_work(&hevc->work);
11468 return;
11469 }
11470 if (!input_frame_based(vdec)) {
11471 int r = vdec_sync_input(vdec);
11472 if (r >= 0x200) {
11473 WRITE_VREG(HEVC_DECODE_SIZE,
11474 READ_VREG(HEVC_DECODE_SIZE) + r);
11475
11476 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11477 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11478 __func__,
11479 READ_VREG(HEVC_STREAM_LEVEL),
11480 READ_VREG(HEVC_STREAM_WR_PTR),
11481 READ_VREG(HEVC_STREAM_RD_PTR),
11482 READ_VREG(HEVC_MPC_E), r);
11483
11484 start_process_time(hevc);
11485 if (READ_VREG(HEVC_DEC_STATUS_REG)
11486 == HEVC_DECODE_BUFEMPTY2)
11487 WRITE_VREG(HEVC_DEC_STATUS_REG,
11488 HEVC_ACTION_DONE);
11489 else
11490 WRITE_VREG(HEVC_DEC_STATUS_REG,
11491 HEVC_ACTION_DEC_CONT);
11492 } else {
11493 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11494 vdec_schedule_work(&hevc->work);
11495 }
11496 return;
11497 }
11498
11499 /*below for frame_base*/
11500 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11501 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11502 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11503 __func__,
11504 READ_VREG(HEVC_STREAM_LEVEL),
11505 READ_VREG(HEVC_STREAM_WR_PTR),
11506 READ_VREG(HEVC_STREAM_RD_PTR),
11507 READ_VREG(HEVC_MPC_E));
11508 mutex_lock(&hevc->chunks_mutex);
11509 vdec_vframe_dirty(vdec, hevc->chunk);
11510 hevc->chunk = NULL;
11511 mutex_unlock(&hevc->chunks_mutex);
11512 vdec_clean_input(vdec);
11513 }
11514
11515 /*if (is_new_pic_available(hevc)) {*/
11516 if (run_ready(vdec, VDEC_HEVC)) {
11517 int r;
11518 int decode_size;
11519 r = vdec_prepare_input(vdec, &hevc->chunk);
11520 if (r < 0) {
11521 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11522
11523 hevc_print(hevc,
11524 PRINT_FLAG_VDEC_DETAIL,
11525 "amvdec_vh265: Insufficient data\n");
11526
11527 vdec_schedule_work(&hevc->work);
11528 return;
11529 }
11530 hevc->dec_result = DEC_RESULT_NONE;
11531 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11532 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11533 __func__, r,
11534 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11535 get_data_check_sum(hevc, r) : 0,
11536 READ_VREG(HEVC_MPC_E));
11537
11538 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11539 int jj;
11540 u8 *data = NULL;
11541
11542 if (!hevc->chunk->block->is_mapped)
11543 data = codec_mm_vmap(
11544 hevc->chunk->block->start +
11545 hevc->chunk->offset, r);
11546 else
11547 data = ((u8 *)
11548 hevc->chunk->block->start_virt)
11549 + hevc->chunk->offset;
11550
11551 for (jj = 0; jj < r; jj++) {
11552 if ((jj & 0xf) == 0)
11553 hevc_print(hevc,
11554 PRINT_FRAMEBASE_DATA,
11555 "%06x:", jj);
11556 hevc_print_cont(hevc,
11557 PRINT_FRAMEBASE_DATA,
11558 "%02x ", data[jj]);
11559 if (((jj + 1) & 0xf) == 0)
11560 hevc_print_cont(hevc,
11561 PRINT_FRAMEBASE_DATA,
11562 "\n");
11563 }
11564
11565 if (!hevc->chunk->block->is_mapped)
11566 codec_mm_unmap_phyaddr(data);
11567 }
11568
11569 decode_size = hevc->chunk->size +
11570 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11571 WRITE_VREG(HEVC_DECODE_SIZE,
11572 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11573
11574 vdec_enable_input(vdec);
11575
11576 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11577 "%s: mpc %x\n",
11578 __func__, READ_VREG(HEVC_MPC_E));
11579
11580 start_process_time(hevc);
11581 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11582 } else{
11583 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11584
11585 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11586 * "amvdec_vh265: Insufficient data\n");
11587 */
11588
11589 vdec_schedule_work(&hevc->work);
11590 }
11591 return;
11592 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11593 /* if (!hevc->ctx_valid)
11594 hevc->ctx_valid = 1; */
11595 decode_frame_count[hevc->index]++;
11596#ifdef DETREFILL_ENABLE
11597 if (hevc->is_swap &&
11598 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11599 if (hevc->delrefill_check == 2) {
11600 delrefill(hevc);
11601 amhevc_stop();
11602 }
11603 }
11604#endif
11605 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11606 hevc->used_4k_num =
11607 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11608 if (hevc->used_4k_num >= 0 &&
11609 hevc->cur_pic &&
11610 hevc->cur_pic->scatter_alloc
11611 == 1) {
11612 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11613 "%s pic index %d scatter_alloc %d page_start %d\n",
11614 "decoder_mmu_box_free_idx_tail",
11615 hevc->cur_pic->index,
11616 hevc->cur_pic->scatter_alloc,
11617 hevc->used_4k_num);
11618 if (hevc->m_ins_flag)
11619 hevc_mmu_dma_check(hw_to_vdec(hevc));
11620 decoder_mmu_box_free_idx_tail(
11621 hevc->mmu_box,
11622 hevc->cur_pic->index,
11623 hevc->used_4k_num);
11624 hevc->cur_pic->scatter_alloc = 2;
11625 }
11626 }
11627 hevc->pic_decoded_lcu_idx =
11628 READ_VREG(HEVC_PARSER_LCU_START)
11629 & 0xffffff;
11630
11631 if (vdec->master == NULL && vdec->slave == NULL &&
11632 hevc->empty_flag == 0) {
11633 hevc->over_decode =
11634 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11635 if (hevc->over_decode)
11636 hevc_print(hevc, 0,
11637 "!!!Over decode\n");
11638 }
11639
11640 if (is_log_enable(hevc))
11641 add_log(hevc,
11642 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11643 __func__,
11644 hevc->dec_result,
11645 hevc->pic_decoded_lcu_idx,
11646 hevc->used_4k_num,
11647 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11648 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11649 hevc->start_shift_bytes
11650 );
11651
11652 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11653 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11654 __func__,
11655 hevc->dec_result,
11656 READ_VREG(HEVC_STREAM_LEVEL),
11657 READ_VREG(HEVC_STREAM_WR_PTR),
11658 READ_VREG(HEVC_STREAM_RD_PTR),
11659 hevc->pic_decoded_lcu_idx,
11660 hevc->used_4k_num,
11661 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11662 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11663 hevc->start_shift_bytes
11664 );
11665
11666 hevc->used_4k_num = -1;
11667
11668 check_pic_decoded_error(hevc,
11669 hevc->pic_decoded_lcu_idx);
11670#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11671#if 1
11672 if (vdec->slave) {
11673 if (dv_debug & 0x1)
11674 vdec_set_flag(vdec->slave,
11675 VDEC_FLAG_SELF_INPUT_CONTEXT);
11676 else
11677 vdec_set_flag(vdec->slave,
11678 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11679 }
11680#else
11681 if (vdec->slave) {
11682 if (no_interleaved_el_slice)
11683 vdec_set_flag(vdec->slave,
11684 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11685 /* this will move real HW pointer for input */
11686 else
11687 vdec_set_flag(vdec->slave, 0);
11688 /* this will not move real HW pointer
11689 *and SL layer decoding
11690 *will start from same stream position
11691 *as current BL decoder
11692 */
11693 }
11694#endif
11695#endif
11696#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11697 hevc->shift_byte_count_lo
11698 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11699 if (vdec->slave) {
11700 /*cur is base, found enhance*/
11701 struct hevc_state_s *hevc_el =
11702 (struct hevc_state_s *)
11703 vdec->slave->private;
11704 if (hevc_el)
11705 hevc_el->shift_byte_count_lo =
11706 hevc->shift_byte_count_lo;
11707 } else if (vdec->master) {
11708 /*cur is enhance, found base*/
11709 struct hevc_state_s *hevc_ba =
11710 (struct hevc_state_s *)
11711 vdec->master->private;
11712 if (hevc_ba)
11713 hevc_ba->shift_byte_count_lo =
11714 hevc->shift_byte_count_lo;
11715 }
11716#endif
11717 mutex_lock(&hevc->chunks_mutex);
11718 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11719 hevc->chunk = NULL;
11720 mutex_unlock(&hevc->chunks_mutex);
11721 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11722 /*
11723 stream base: stream buf empty or timeout
11724 frame base: vdec_prepare_input fail
11725 */
11726 if (!vdec_has_more_input(vdec)) {
11727 hevc->dec_result = DEC_RESULT_EOS;
11728 vdec_schedule_work(&hevc->work);
11729 return;
11730 }
11731#ifdef AGAIN_HAS_THRESHOLD
11732 hevc->next_again_flag = 1;
11733#endif
11734 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11735 struct PIC_s *pic;
11736 hevc->eos = 1;
11737#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11738 if ((vdec->master || vdec->slave) &&
11739 aux_data_is_avaible(hevc))
11740 dolby_get_meta(hevc);
11741#endif
11742 check_pic_decoded_error(hevc,
11743 hevc->pic_decoded_lcu_idx);
11744 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11745 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11746 "%s: end of stream, last dec poc %d => 0x%pf\n",
11747 __func__, hevc->curr_POC, pic);
11748 flush_output(hevc, pic);
11749
11750 if (hevc->is_used_v4l)
11751 notify_v4l_eos(hw_to_vdec(hevc));
11752#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11753 hevc->shift_byte_count_lo
11754 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11755 if (vdec->slave) {
11756 /*cur is base, found enhance*/
11757 struct hevc_state_s *hevc_el =
11758 (struct hevc_state_s *)
11759 vdec->slave->private;
11760 if (hevc_el)
11761 hevc_el->shift_byte_count_lo =
11762 hevc->shift_byte_count_lo;
11763 } else if (vdec->master) {
11764 /*cur is enhance, found base*/
11765 struct hevc_state_s *hevc_ba =
11766 (struct hevc_state_s *)
11767 vdec->master->private;
11768 if (hevc_ba)
11769 hevc_ba->shift_byte_count_lo =
11770 hevc->shift_byte_count_lo;
11771 }
11772#endif
11773 mutex_lock(&hevc->chunks_mutex);
11774 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11775 hevc->chunk = NULL;
11776 mutex_unlock(&hevc->chunks_mutex);
11777 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11778 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11779 "%s: force exit\n",
11780 __func__);
11781 if (hevc->stat & STAT_VDEC_RUN) {
11782 amhevc_stop();
11783 hevc->stat &= ~STAT_VDEC_RUN;
11784 }
11785 if (hevc->stat & STAT_ISR_REG) {
11786 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11787 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11788 hevc->stat &= ~STAT_ISR_REG;
11789 }
11790 hevc_print(hevc, 0, "%s: force exit end\n",
11791 __func__);
11792 }
11793
11794 if (hevc->stat & STAT_VDEC_RUN) {
11795 amhevc_stop();
11796 hevc->stat &= ~STAT_VDEC_RUN;
11797 }
11798
11799 if (hevc->stat & STAT_TIMER_ARM) {
11800 del_timer_sync(&hevc->timer);
11801 hevc->stat &= ~STAT_TIMER_ARM;
11802 }
11803
11804 wait_hevc_search_done(hevc);
11805#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11806 if (hevc->switch_dvlayer_flag) {
11807 if (vdec->slave)
11808 vdec_set_next_sched(vdec, vdec->slave);
11809 else if (vdec->master)
11810 vdec_set_next_sched(vdec, vdec->master);
11811 } else if (vdec->slave || vdec->master)
11812 vdec_set_next_sched(vdec, vdec);
11813#endif
11814
11815 if (from == 1) {
11816 /* This is a timeout work */
11817 if (work_pending(&hevc->work)) {
11818 /*
11819 * The vh265_work arrives at the last second,
11820 * give it a chance to handle the scenario.
11821 */
11822 return;
11823 //cancel_work_sync(&hevc->work);//reserved for future considraion
11824 }
11825 }
11826
11827 /* mark itself has all HW resource released and input released */
11828 if (vdec->parallel_dec == 1)
11829 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
11830 else
11831 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
11832
11833 if (hevc->is_used_v4l) {
11834 struct aml_vcodec_ctx *ctx =
11835 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11836
11837 if (ctx->param_sets_from_ucode &&
11838 !hevc->v4l_params_parsed)
11839 vdec_v4l_write_frame_sync(ctx);
11840 }
11841
11842 if (hevc->vdec_cb)
11843 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
11844}
11845
11846static void vh265_work(struct work_struct *work)
11847{
11848 struct hevc_state_s *hevc = container_of(work,
11849 struct hevc_state_s, work);
11850 struct vdec_s *vdec = hw_to_vdec(hevc);
11851
11852 vh265_work_implement(hevc, vdec, 0);
11853}
11854
11855static void vh265_timeout_work(struct work_struct *work)
11856{
11857 struct hevc_state_s *hevc = container_of(work,
11858 struct hevc_state_s, timeout_work);
11859 struct vdec_s *vdec = hw_to_vdec(hevc);
11860
11861 if (work_pending(&hevc->work))
11862 return;
11863 vh265_work_implement(hevc, vdec, 1);
11864}
11865
11866
11867static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
11868{
11869 /* new to do ... */
11870 vh265_prot_init(hevc);
11871 return 0;
11872}
11873static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
11874{
11875 struct hevc_state_s *hevc =
11876 (struct hevc_state_s *)vdec->private;
11877 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
11878 CODEC_MM_FLAGS_TVP : 0;
11879 bool ret = 0;
11880 if (step == 0x12)
11881 return 0;
11882 else if (step == 0x11)
11883 step = 0x12;
11884
11885 if (hevc->eos)
11886 return 0;
11887 if (!hevc->first_sc_checked && hevc->mmu_enable) {
11888 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
11889 hevc->first_sc_checked =1;
11890 hevc_print(hevc, 0,
11891 "vh265 cached=%d need_size=%d speed= %d ms\n",
11892 size, (hevc->need_cache_size >> PAGE_SHIFT),
11893 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
11894 }
11895 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
11896 && pre_decode_buf_level != 0) {
11897 u32 rp, wp, level;
11898
11899 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
11900 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
11901 if (wp < rp)
11902 level = vdec->input.size + wp - rp;
11903 else
11904 level = wp - rp;
11905
11906 if (level < pre_decode_buf_level)
11907 return 0;
11908 }
11909
11910#ifdef AGAIN_HAS_THRESHOLD
11911 if (hevc->next_again_flag &&
11912 (!vdec_frame_based(vdec))) {
11913 u32 parser_wr_ptr =
11914 READ_PARSER_REG(PARSER_VIDEO_WP);
11915 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
11916 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
11917 again_threshold) {
11918 int r = vdec_sync_input(vdec);
11919 hevc_print(hevc,
11920 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
11921 return 0;
11922 }
11923 }
11924#endif
11925
11926 if (disp_vframe_valve_level &&
11927 kfifo_len(&hevc->display_q) >=
11928 disp_vframe_valve_level) {
11929 hevc->valve_count--;
11930 if (hevc->valve_count <= 0)
11931 hevc->valve_count = 2;
11932 else
11933 return 0;
11934 }
11935
11936 ret = is_new_pic_available(hevc);
11937 if (!ret) {
11938 hevc_print(hevc,
11939 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
11940 __func__, ret);
11941 }
11942
11943#ifdef CONSTRAIN_MAX_BUF_NUM
11944 if (hevc->pic_list_init_flag == 3) {
11945 if (run_ready_max_vf_only_num > 0 &&
11946 get_vf_ref_only_buf_count(hevc) >=
11947 run_ready_max_vf_only_num
11948 )
11949 ret = 0;
11950 if (run_ready_display_q_num > 0 &&
11951 kfifo_len(&hevc->display_q) >=
11952 run_ready_display_q_num)
11953 ret = 0;
11954
11955 /*avoid more buffers consumed when
11956 switching resolution*/
11957 if (run_ready_max_buf_num == 0xff &&
11958 get_used_buf_count(hevc) >=
11959 get_work_pic_num(hevc))
11960 ret = 0;
11961 else if (run_ready_max_buf_num &&
11962 get_used_buf_count(hevc) >=
11963 run_ready_max_buf_num)
11964 ret = 0;
11965 }
11966#endif
11967
11968 if (hevc->is_used_v4l) {
11969 struct aml_vcodec_ctx *ctx =
11970 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11971
11972 if (ctx->param_sets_from_ucode &&
11973 !ctx->v4l_codec_ready &&
11974 hevc->v4l_params_parsed)
11975 ret = 0; /*the params has parsed.*/
11976 }
11977
11978
11979 if (ret)
11980 not_run_ready[hevc->index] = 0;
11981 else
11982 not_run_ready[hevc->index]++;
11983 if (vdec->parallel_dec == 1)
11984 return ret ? (CORE_MASK_HEVC) : 0;
11985 else
11986 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
11987}
11988
11989static void run(struct vdec_s *vdec, unsigned long mask,
11990 void (*callback)(struct vdec_s *, void *), void *arg)
11991{
11992 struct hevc_state_s *hevc =
11993 (struct hevc_state_s *)vdec->private;
11994 int r, loadr = 0;
11995 unsigned char check_sum = 0;
11996
11997 run_count[hevc->index]++;
11998 hevc->vdec_cb_arg = arg;
11999 hevc->vdec_cb = callback;
12000 hevc->aux_data_dirty = 1;
12001 hevc_reset_core(vdec);
12002
12003#ifdef AGAIN_HAS_THRESHOLD
12004 hevc->pre_parser_wr_ptr =
12005 READ_PARSER_REG(PARSER_VIDEO_WP);
12006 hevc->next_again_flag = 0;
12007#endif
12008 r = vdec_prepare_input(vdec, &hevc->chunk);
12009 if (r < 0) {
12010 input_empty[hevc->index]++;
12011 hevc->dec_result = DEC_RESULT_AGAIN;
12012 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12013 "ammvdec_vh265: Insufficient data\n");
12014
12015 vdec_schedule_work(&hevc->work);
12016 return;
12017 }
12018 input_empty[hevc->index] = 0;
12019 hevc->dec_result = DEC_RESULT_NONE;
12020 if (vdec_frame_based(vdec) &&
12021 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12022 || is_log_enable(hevc)))
12023 check_sum = get_data_check_sum(hevc, r);
12024
12025 if (is_log_enable(hevc))
12026 add_log(hevc,
12027 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12028 __func__, r,
12029 check_sum,
12030 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12031 );
12032 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12033 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12034 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12035 __func__, r,
12036 check_sum,
12037 READ_VREG(HEVC_STREAM_LEVEL),
12038 READ_VREG(HEVC_STREAM_WR_PTR),
12039 READ_VREG(HEVC_STREAM_RD_PTR),
12040 READ_PARSER_REG(PARSER_VIDEO_RP),
12041 READ_PARSER_REG(PARSER_VIDEO_WP),
12042 hevc->start_shift_bytes
12043 );
12044 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12045 input_frame_based(vdec)) {
12046 int jj;
12047 u8 *data = NULL;
12048
12049 if (!hevc->chunk->block->is_mapped)
12050 data = codec_mm_vmap(hevc->chunk->block->start +
12051 hevc->chunk->offset, r);
12052 else
12053 data = ((u8 *)hevc->chunk->block->start_virt)
12054 + hevc->chunk->offset;
12055
12056 for (jj = 0; jj < r; jj++) {
12057 if ((jj & 0xf) == 0)
12058 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12059 "%06x:", jj);
12060 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12061 "%02x ", data[jj]);
12062 if (((jj + 1) & 0xf) == 0)
12063 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12064 "\n");
12065 }
12066
12067 if (!hevc->chunk->block->is_mapped)
12068 codec_mm_unmap_phyaddr(data);
12069 }
12070 if (vdec->mc_loaded) {
12071 /*firmware have load before,
12072 and not changes to another.
12073 ignore reload.
12074 */
12075 if (tee_enabled() && hevc->is_swap &&
12076 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12077 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12078 } else {
12079 if (hevc->mmu_enable)
12080 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12081 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12082 "h265_mmu", hevc->fw->data);
12083 else {
12084 if (!hevc->is_4k) {
12085 /* if an older version of the fw was loaded, */
12086 /* needs try to load noswap fw because the */
12087 /* old fw package dose not contain the swap fw.*/
12088 loadr = amhevc_vdec_loadmc_ex(
12089 VFORMAT_HEVC, vdec,
12090 "hevc_mmu_swap",
12091 hevc->fw->data);
12092 if (loadr < 0)
12093 loadr = amhevc_vdec_loadmc_ex(
12094 VFORMAT_HEVC, vdec,
12095 "h265_mmu",
12096 hevc->fw->data);
12097 else
12098 hevc->is_swap = true;
12099 } else
12100 loadr = amhevc_vdec_loadmc_ex(
12101 VFORMAT_HEVC, vdec,
12102 "h265_mmu", hevc->fw->data);
12103 }
12104 else
12105 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12106 NULL, hevc->fw->data);
12107 if (loadr < 0) {
12108 amhevc_disable();
12109 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12110 tee_enabled() ? "TEE" : "local", loadr);
12111 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12112 vdec_schedule_work(&hevc->work);
12113 return;
12114 }
12115
12116 if (tee_enabled() && hevc->is_swap &&
12117 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12118 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12119#ifdef DETREFILL_ENABLE
12120 if (hevc->is_swap &&
12121 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12122 init_detrefill_buf(hevc);
12123#endif
12124 vdec->mc_loaded = 1;
12125 vdec->mc_type = VFORMAT_HEVC;
12126 }
12127 if (vh265_hw_ctx_restore(hevc) < 0) {
12128 vdec_schedule_work(&hevc->work);
12129 return;
12130 }
12131 vdec_enable_input(vdec);
12132
12133 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12134
12135 if (vdec_frame_based(vdec)) {
12136 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12137 r = hevc->chunk->size +
12138 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12139 hevc->decode_size = r;
12140 }
12141#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12142 else {
12143 if (vdec->master || vdec->slave)
12144 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12145 hevc->shift_byte_count_lo);
12146 }
12147#endif
12148 WRITE_VREG(HEVC_DECODE_SIZE, r);
12149 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12150 hevc->init_flag = 1;
12151
12152 if (hevc->pic_list_init_flag == 3)
12153 init_pic_list_hw(hevc);
12154
12155 backup_decode_state(hevc);
12156
12157 start_process_time(hevc);
12158 mod_timer(&hevc->timer, jiffies);
12159 hevc->stat |= STAT_TIMER_ARM;
12160 hevc->stat |= STAT_ISR_REG;
12161 amhevc_start();
12162 hevc->stat |= STAT_VDEC_RUN;
12163}
12164
12165static void reset(struct vdec_s *vdec)
12166{
12167
12168 struct hevc_state_s *hevc =
12169 (struct hevc_state_s *)vdec->private;
12170 int i;
12171
12172 cancel_work_sync(&hevc->work);
12173 cancel_work_sync(&hevc->notify_work);
12174 if (hevc->stat & STAT_VDEC_RUN) {
12175 amhevc_stop();
12176 hevc->stat &= ~STAT_VDEC_RUN;
12177 }
12178
12179 if (hevc->stat & STAT_TIMER_ARM) {
12180 del_timer_sync(&hevc->timer);
12181 hevc->stat &= ~STAT_TIMER_ARM;
12182 }
12183 hevc->dec_result = DEC_RESULT_NONE;
12184 reset_process_time(hevc);
12185 hevc->init_flag = 0;
12186 hevc->pic_list_init_flag = 0;
12187 dealloc_mv_bufs(hevc);
12188 hevc_local_uninit(hevc);
12189 if (vh265_local_init(hevc) < 0)
12190 pr_debug(" %s local init fail\n", __func__);
12191 for (i = 0; i < BUF_POOL_SIZE; i++) {
12192 hevc->m_BUF[i].start_adr = 0;
12193 }
12194
12195 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12196}
12197
12198static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12199{
12200 struct hevc_state_s *hevc =
12201 (struct hevc_state_s *)vdec->private;
12202
12203 return vh265_isr(0, hevc);
12204}
12205
12206static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12207{
12208 struct hevc_state_s *hevc =
12209 (struct hevc_state_s *)vdec->private;
12210
12211 return vh265_isr_thread_fn(0, hevc);
12212}
12213#endif
12214
12215static int amvdec_h265_probe(struct platform_device *pdev)
12216{
12217#ifdef MULTI_INSTANCE_SUPPORT
12218 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12219#else
12220 struct vdec_dev_reg_s *pdata =
12221 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12222#endif
12223 char *tmpbuf;
12224 int ret;
12225 struct hevc_state_s *hevc;
12226
12227 hevc = vmalloc(sizeof(struct hevc_state_s));
12228 if (hevc == NULL) {
12229 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12230 return -ENOMEM;
12231 }
12232 gHevc = hevc;
12233 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12234 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12235 H265_DEBUG_DIS_SYS_ERROR_PROC));
12236 memset(hevc, 0, sizeof(struct hevc_state_s));
12237 if (get_dbg_flag(hevc))
12238 hevc_print(hevc, 0, "%s\r\n", __func__);
12239 mutex_lock(&vh265_mutex);
12240
12241 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12242 (parser_sei_enable & 0x100) == 0)
12243 parser_sei_enable = 7; /*old 1*/
12244 hevc->m_ins_flag = 0;
12245 hevc->init_flag = 0;
12246 hevc->first_sc_checked = 0;
12247 hevc->uninit_list = 0;
12248 hevc->fatal_error = 0;
12249 hevc->show_frame_num = 0;
12250 hevc->frameinfo_enable = 1;
12251#ifdef MULTI_INSTANCE_SUPPORT
12252 hevc->platform_dev = pdev;
12253 platform_set_drvdata(pdev, pdata);
12254#endif
12255
12256 if (pdata == NULL) {
12257 hevc_print(hevc, 0,
12258 "\namvdec_h265 memory resource undefined.\n");
12259 vfree(hevc);
12260 mutex_unlock(&vh265_mutex);
12261 return -EFAULT;
12262 }
12263 if (mmu_enable_force == 0) {
12264 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12265 || double_write_mode == 0x10)
12266 hevc->mmu_enable = 0;
12267 else
12268 hevc->mmu_enable = 1;
12269 }
12270 if (init_mmu_buffers(hevc)) {
12271 hevc_print(hevc, 0,
12272 "\n 265 mmu init failed!\n");
12273 vfree(hevc);
12274 mutex_unlock(&vh265_mutex);
12275 return -EFAULT;
12276 }
12277
12278 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12279 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12280 if (ret < 0) {
12281 uninit_mmu_buffers(hevc);
12282 vfree(hevc);
12283 mutex_unlock(&vh265_mutex);
12284 return ret;
12285 }
12286 hevc->buf_size = work_buf_size;
12287
12288
12289 if (!vdec_secure(pdata)) {
12290 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12291 if (tmpbuf) {
12292 memset(tmpbuf, 0, work_buf_size);
12293 dma_sync_single_for_device(amports_get_dma_device(),
12294 hevc->buf_start,
12295 work_buf_size, DMA_TO_DEVICE);
12296 } else {
12297 tmpbuf = codec_mm_vmap(hevc->buf_start,
12298 work_buf_size);
12299 if (tmpbuf) {
12300 memset(tmpbuf, 0, work_buf_size);
12301 dma_sync_single_for_device(
12302 amports_get_dma_device(),
12303 hevc->buf_start,
12304 work_buf_size,
12305 DMA_TO_DEVICE);
12306 codec_mm_unmap_phyaddr(tmpbuf);
12307 }
12308 }
12309 }
12310
12311 if (get_dbg_flag(hevc)) {
12312 hevc_print(hevc, 0,
12313 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12314 hevc->buf_start, hevc->buf_size);
12315 }
12316
12317 if (pdata->sys_info)
12318 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12319 else {
12320 hevc->vh265_amstream_dec_info.width = 0;
12321 hevc->vh265_amstream_dec_info.height = 0;
12322 hevc->vh265_amstream_dec_info.rate = 30;
12323 }
12324#ifndef MULTI_INSTANCE_SUPPORT
12325 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12326 workaround_enable |= 3;
12327 hevc_print(hevc, 0,
12328 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12329 } else
12330 workaround_enable &= ~3;
12331#endif
12332 hevc->cma_dev = pdata->cma_dev;
12333 vh265_vdec_info_init();
12334
12335#ifdef MULTI_INSTANCE_SUPPORT
12336 pdata->private = hevc;
12337 pdata->dec_status = vh265_dec_status;
12338 pdata->set_isreset = vh265_set_isreset;
12339 is_reset = 0;
12340 if (vh265_init(pdata) < 0) {
12341#else
12342 if (vh265_init(hevc) < 0) {
12343#endif
12344 hevc_print(hevc, 0,
12345 "\namvdec_h265 init failed.\n");
12346 hevc_local_uninit(hevc);
12347 uninit_mmu_buffers(hevc);
12348 vfree(hevc);
12349 pdata->dec_status = NULL;
12350 mutex_unlock(&vh265_mutex);
12351 return -ENODEV;
12352 }
12353 /*set the max clk for smooth playing...*/
12354 hevc_source_changed(VFORMAT_HEVC,
12355 3840, 2160, 60);
12356 mutex_unlock(&vh265_mutex);
12357
12358 return 0;
12359}
12360
12361static int amvdec_h265_remove(struct platform_device *pdev)
12362{
12363 struct hevc_state_s *hevc = gHevc;
12364
12365 if (get_dbg_flag(hevc))
12366 hevc_print(hevc, 0, "%s\r\n", __func__);
12367
12368 mutex_lock(&vh265_mutex);
12369
12370 vh265_stop(hevc);
12371
12372 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12373
12374
12375#ifdef DEBUG_PTS
12376 hevc_print(hevc, 0,
12377 "pts missed %ld, pts hit %ld, duration %d\n",
12378 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12379#endif
12380
12381 vfree(hevc);
12382 hevc = NULL;
12383 gHevc = NULL;
12384
12385 mutex_unlock(&vh265_mutex);
12386
12387 return 0;
12388}
12389/****************************************/
12390#ifdef CONFIG_PM
12391static int h265_suspend(struct device *dev)
12392{
12393 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12394 return 0;
12395}
12396
12397static int h265_resume(struct device *dev)
12398{
12399 amhevc_resume(to_platform_device(dev));
12400 return 0;
12401}
12402
12403static const struct dev_pm_ops h265_pm_ops = {
12404 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12405};
12406#endif
12407
12408static struct platform_driver amvdec_h265_driver = {
12409 .probe = amvdec_h265_probe,
12410 .remove = amvdec_h265_remove,
12411 .driver = {
12412 .name = DRIVER_NAME,
12413#ifdef CONFIG_PM
12414 .pm = &h265_pm_ops,
12415#endif
12416 }
12417};
12418
12419#ifdef MULTI_INSTANCE_SUPPORT
12420static void vh265_dump_state(struct vdec_s *vdec)
12421{
12422 int i;
12423 struct hevc_state_s *hevc =
12424 (struct hevc_state_s *)vdec->private;
12425 hevc_print(hevc, 0,
12426 "====== %s\n", __func__);
12427
12428 hevc_print(hevc, 0,
12429 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12430 hevc->frame_width,
12431 hevc->frame_height,
12432 hevc->sps_num_reorder_pics_0,
12433 get_work_pic_num(hevc),
12434 hevc->video_signal_type_debug,
12435 hevc->is_swap
12436 );
12437
12438 hevc_print(hevc, 0,
12439 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12440 input_frame_based(vdec),
12441 hevc->eos,
12442 hevc->dec_result,
12443 decode_frame_count[hevc->index],
12444 display_frame_count[hevc->index],
12445 run_count[hevc->index],
12446 not_run_ready[hevc->index],
12447 input_empty[hevc->index]
12448 );
12449
12450 if (vf_get_receiver(vdec->vf_provider_name)) {
12451 enum receviver_start_e state =
12452 vf_notify_receiver(vdec->vf_provider_name,
12453 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12454 NULL);
12455 hevc_print(hevc, 0,
12456 "\nreceiver(%s) state %d\n",
12457 vdec->vf_provider_name,
12458 state);
12459 }
12460
12461 hevc_print(hevc, 0,
12462 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12463 __func__,
12464 kfifo_len(&hevc->newframe_q),
12465 VF_POOL_SIZE,
12466 kfifo_len(&hevc->display_q),
12467 VF_POOL_SIZE,
12468 hevc->vf_pre_count,
12469 hevc->vf_get_count,
12470 hevc->vf_put_count,
12471 hevc->pic_list_init_flag,
12472 is_new_pic_available(hevc)
12473 );
12474
12475 dump_pic_list(hevc);
12476
12477 for (i = 0; i < BUF_POOL_SIZE; i++) {
12478 hevc_print(hevc, 0,
12479 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12480 i,
12481 hevc->m_BUF[i].start_adr,
12482 hevc->m_BUF[i].size,
12483 hevc->m_BUF[i].used_flag);
12484 }
12485
12486 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12487 hevc_print(hevc, 0,
12488 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12489 i,
12490 hevc->m_mv_BUF[i].start_adr,
12491 hevc->m_mv_BUF[i].size,
12492 hevc->m_mv_BUF[i].used_flag);
12493 }
12494
12495 hevc_print(hevc, 0,
12496 "HEVC_DEC_STATUS_REG=0x%x\n",
12497 READ_VREG(HEVC_DEC_STATUS_REG));
12498 hevc_print(hevc, 0,
12499 "HEVC_MPC_E=0x%x\n",
12500 READ_VREG(HEVC_MPC_E));
12501 hevc_print(hevc, 0,
12502 "HEVC_DECODE_MODE=0x%x\n",
12503 READ_VREG(HEVC_DECODE_MODE));
12504 hevc_print(hevc, 0,
12505 "HEVC_DECODE_MODE2=0x%x\n",
12506 READ_VREG(HEVC_DECODE_MODE2));
12507 hevc_print(hevc, 0,
12508 "NAL_SEARCH_CTL=0x%x\n",
12509 READ_VREG(NAL_SEARCH_CTL));
12510 hevc_print(hevc, 0,
12511 "HEVC_PARSER_LCU_START=0x%x\n",
12512 READ_VREG(HEVC_PARSER_LCU_START));
12513 hevc_print(hevc, 0,
12514 "HEVC_DECODE_SIZE=0x%x\n",
12515 READ_VREG(HEVC_DECODE_SIZE));
12516 hevc_print(hevc, 0,
12517 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12518 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12519 hevc_print(hevc, 0,
12520 "HEVC_STREAM_START_ADDR=0x%x\n",
12521 READ_VREG(HEVC_STREAM_START_ADDR));
12522 hevc_print(hevc, 0,
12523 "HEVC_STREAM_END_ADDR=0x%x\n",
12524 READ_VREG(HEVC_STREAM_END_ADDR));
12525 hevc_print(hevc, 0,
12526 "HEVC_STREAM_LEVEL=0x%x\n",
12527 READ_VREG(HEVC_STREAM_LEVEL));
12528 hevc_print(hevc, 0,
12529 "HEVC_STREAM_WR_PTR=0x%x\n",
12530 READ_VREG(HEVC_STREAM_WR_PTR));
12531 hevc_print(hevc, 0,
12532 "HEVC_STREAM_RD_PTR=0x%x\n",
12533 READ_VREG(HEVC_STREAM_RD_PTR));
12534 hevc_print(hevc, 0,
12535 "PARSER_VIDEO_RP=0x%x\n",
12536 READ_PARSER_REG(PARSER_VIDEO_RP));
12537 hevc_print(hevc, 0,
12538 "PARSER_VIDEO_WP=0x%x\n",
12539 READ_PARSER_REG(PARSER_VIDEO_WP));
12540
12541 if (input_frame_based(vdec) &&
12542 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12543 ) {
12544 int jj;
12545 if (hevc->chunk && hevc->chunk->block &&
12546 hevc->chunk->size > 0) {
12547 u8 *data = NULL;
12548 if (!hevc->chunk->block->is_mapped)
12549 data = codec_mm_vmap(hevc->chunk->block->start +
12550 hevc->chunk->offset, hevc->chunk->size);
12551 else
12552 data = ((u8 *)hevc->chunk->block->start_virt)
12553 + hevc->chunk->offset;
12554 hevc_print(hevc, 0,
12555 "frame data size 0x%x\n",
12556 hevc->chunk->size);
12557 for (jj = 0; jj < hevc->chunk->size; jj++) {
12558 if ((jj & 0xf) == 0)
12559 hevc_print(hevc,
12560 PRINT_FRAMEBASE_DATA,
12561 "%06x:", jj);
12562 hevc_print_cont(hevc,
12563 PRINT_FRAMEBASE_DATA,
12564 "%02x ", data[jj]);
12565 if (((jj + 1) & 0xf) == 0)
12566 hevc_print_cont(hevc,
12567 PRINT_FRAMEBASE_DATA,
12568 "\n");
12569 }
12570
12571 if (!hevc->chunk->block->is_mapped)
12572 codec_mm_unmap_phyaddr(data);
12573 }
12574 }
12575
12576}
12577
12578
12579static int ammvdec_h265_probe(struct platform_device *pdev)
12580{
12581
12582 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12583 struct hevc_state_s *hevc = NULL;
12584 int ret;
12585#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12586 int config_val;
12587#endif
12588 if (pdata == NULL) {
12589 pr_info("\nammvdec_h265 memory resource undefined.\n");
12590 return -EFAULT;
12591 }
12592
12593 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12594 sizeof(struct hevc_state_s), GFP_KERNEL); */
12595 hevc = vmalloc(sizeof(struct hevc_state_s));
12596 if (hevc == NULL) {
12597 pr_info("\nammvdec_h265 device data allocation failed\n");
12598 return -ENOMEM;
12599 }
12600 memset(hevc, 0, sizeof(struct hevc_state_s));
12601
12602 /* the ctx from v4l2 driver. */
12603 hevc->v4l2_ctx = pdata->private;
12604
12605 pdata->private = hevc;
12606 pdata->dec_status = vh265_dec_status;
12607 /* pdata->set_trickmode = set_trickmode; */
12608 pdata->run_ready = run_ready;
12609 pdata->run = run;
12610 pdata->reset = reset;
12611 pdata->irq_handler = vh265_irq_cb;
12612 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12613 pdata->dump_state = vh265_dump_state;
12614
12615 hevc->index = pdev->id;
12616 hevc->m_ins_flag = 1;
12617
12618 if (pdata->use_vfm_path) {
12619 snprintf(pdata->vf_provider_name,
12620 VDEC_PROVIDER_NAME_SIZE,
12621 VFM_DEC_PROVIDER_NAME);
12622 hevc->frameinfo_enable = 1;
12623 }
12624#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12625 else if (vdec_dual(pdata)) {
12626 struct hevc_state_s *hevc_pair = NULL;
12627
12628 if (dv_toggle_prov_name) /*debug purpose*/
12629 snprintf(pdata->vf_provider_name,
12630 VDEC_PROVIDER_NAME_SIZE,
12631 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12632 VFM_DEC_DVEL_PROVIDER_NAME);
12633 else
12634 snprintf(pdata->vf_provider_name,
12635 VDEC_PROVIDER_NAME_SIZE,
12636 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12637 VFM_DEC_DVBL_PROVIDER_NAME);
12638 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12639 if (pdata->master)
12640 hevc_pair = (struct hevc_state_s *)
12641 pdata->master->private;
12642 else if (pdata->slave)
12643 hevc_pair = (struct hevc_state_s *)
12644 pdata->slave->private;
12645 if (hevc_pair)
12646 hevc->shift_byte_count_lo =
12647 hevc_pair->shift_byte_count_lo;
12648 }
12649#endif
12650 else
12651 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12652 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12653
12654 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12655 &vh265_vf_provider, pdata);
12656
12657 hevc->provider_name = pdata->vf_provider_name;
12658 platform_set_drvdata(pdev, pdata);
12659
12660 hevc->platform_dev = pdev;
12661
12662 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12663 pdata->config && pdata->config_len) {
12664#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12665 /*use ptr config for doubel_write_mode, etc*/
12666 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12667
12668 if (get_config_int(pdata->config, "hevc_double_write_mode",
12669 &config_val) == 0)
12670 hevc->double_write_mode = config_val;
12671 else
12672 hevc->double_write_mode = double_write_mode;
12673
12674 if (get_config_int(pdata->config, "save_buffer_mode",
12675 &config_val) == 0)
12676 hevc->save_buffer_mode = config_val;
12677 else
12678 hevc->save_buffer_mode = 0;
12679
12680 /*use ptr config for max_pic_w, etc*/
12681 if (get_config_int(pdata->config, "hevc_buf_width",
12682 &config_val) == 0) {
12683 hevc->max_pic_w = config_val;
12684 }
12685 if (get_config_int(pdata->config, "hevc_buf_height",
12686 &config_val) == 0) {
12687 hevc->max_pic_h = config_val;
12688 }
12689
12690#endif
12691 } else {
12692 if (pdata->sys_info)
12693 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12694 else {
12695 hevc->vh265_amstream_dec_info.width = 0;
12696 hevc->vh265_amstream_dec_info.height = 0;
12697 hevc->vh265_amstream_dec_info.rate = 30;
12698 }
12699 hevc->double_write_mode = double_write_mode;
12700 }
12701 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12702 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12703 else
12704 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12705
12706 if (mmu_enable_force == 0) {
12707 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12708 hevc->mmu_enable = 0;
12709 else
12710 hevc->mmu_enable = 1;
12711 }
12712
12713 hevc->is_used_v4l = (((unsigned long)
12714 hevc->vh265_amstream_dec_info.param & 0x80) >> 7);
12715 if (hevc->is_used_v4l) {
12716 hevc->mmu_enable = (((unsigned long) // scatter mem
12717 hevc->vh265_amstream_dec_info.param & 0x100) >> 8);
12718 if (!hevc->mmu_enable)
12719 hevc->double_write_mode = 0x10;
12720
12721 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
12722 "%s v4: enable mmu %d.\n",
12723 __func__, hevc->mmu_enable);
12724 }
12725
12726 if (init_mmu_buffers(hevc) < 0) {
12727 hevc_print(hevc, 0,
12728 "\n 265 mmu init failed!\n");
12729 mutex_unlock(&vh265_mutex);
12730 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12731 if (hevc)
12732 vfree((void *)hevc);
12733 pdata->dec_status = NULL;
12734 return -EFAULT;
12735 }
12736#if 0
12737 hevc->buf_start = pdata->mem_start;
12738 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12739#else
12740
12741 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12742 BMMU_WORKSPACE_ID, work_buf_size,
12743 DRIVER_NAME, &hevc->buf_start);
12744 if (ret < 0) {
12745 uninit_mmu_buffers(hevc);
12746 /* devm_kfree(&pdev->dev, (void *)hevc); */
12747 if (hevc)
12748 vfree((void *)hevc);
12749 pdata->dec_status = NULL;
12750 mutex_unlock(&vh265_mutex);
12751 return ret;
12752 }
12753 hevc->buf_size = work_buf_size;
12754#endif
12755 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12756 (parser_sei_enable & 0x100) == 0)
12757 parser_sei_enable = 7;
12758 hevc->init_flag = 0;
12759 hevc->first_sc_checked = 0;
12760 hevc->uninit_list = 0;
12761 hevc->fatal_error = 0;
12762 hevc->show_frame_num = 0;
12763
12764 /*
12765 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12766 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12767 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12768 */
12769 if (get_dbg_flag(hevc)) {
12770 hevc_print(hevc, 0,
12771 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12772 hevc->buf_start, hevc->buf_size);
12773 }
12774
12775 hevc_print(hevc, 0,
12776 "dynamic_buf_num_margin=%d\n",
12777 hevc->dynamic_buf_num_margin);
12778 hevc_print(hevc, 0,
12779 "double_write_mode=%d\n",
12780 hevc->double_write_mode);
12781
12782 hevc->cma_dev = pdata->cma_dev;
12783
12784 if (vh265_init(pdata) < 0) {
12785 hevc_print(hevc, 0,
12786 "\namvdec_h265 init failed.\n");
12787 hevc_local_uninit(hevc);
12788 uninit_mmu_buffers(hevc);
12789 /* devm_kfree(&pdev->dev, (void *)hevc); */
12790 if (hevc)
12791 vfree((void *)hevc);
12792 pdata->dec_status = NULL;
12793 return -ENODEV;
12794 }
12795
12796 vdec_set_prepare_level(pdata, start_decode_buf_level);
12797
12798 /*set the max clk for smooth playing...*/
12799 hevc_source_changed(VFORMAT_HEVC,
12800 3840, 2160, 60);
12801 if (pdata->parallel_dec == 1)
12802 vdec_core_request(pdata, CORE_MASK_HEVC);
12803 else
12804 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
12805 | CORE_MASK_COMBINE);
12806
12807 return 0;
12808}
12809
12810static int ammvdec_h265_remove(struct platform_device *pdev)
12811{
12812 struct hevc_state_s *hevc =
12813 (struct hevc_state_s *)
12814 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
12815 struct vdec_s *vdec = hw_to_vdec(hevc);
12816
12817 if (hevc == NULL)
12818 return 0;
12819
12820 if (get_dbg_flag(hevc))
12821 hevc_print(hevc, 0, "%s\r\n", __func__);
12822
12823 vmh265_stop(hevc);
12824
12825 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
12826 if (vdec->parallel_dec == 1)
12827 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12828 else
12829 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12830
12831 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
12832
12833 vfree((void *)hevc);
12834 return 0;
12835}
12836
12837static struct platform_driver ammvdec_h265_driver = {
12838 .probe = ammvdec_h265_probe,
12839 .remove = ammvdec_h265_remove,
12840 .driver = {
12841 .name = MULTI_DRIVER_NAME,
12842#ifdef CONFIG_PM
12843 .pm = &h265_pm_ops,
12844#endif
12845 }
12846};
12847#endif
12848
12849static struct codec_profile_t amvdec_h265_profile = {
12850 .name = "hevc",
12851 .profile = ""
12852};
12853
12854static struct codec_profile_t amvdec_h265_profile_single,
12855 amvdec_h265_profile_mult;
12856
12857static struct mconfig h265_configs[] = {
12858 MC_PU32("use_cma", &use_cma),
12859 MC_PU32("bit_depth_luma", &bit_depth_luma),
12860 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
12861 MC_PU32("video_signal_type", &video_signal_type),
12862#ifdef ERROR_HANDLE_DEBUG
12863 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
12864 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
12865#endif
12866 MC_PU32("radr", &radr),
12867 MC_PU32("rval", &rval),
12868 MC_PU32("dbg_cmd", &dbg_cmd),
12869 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
12870 MC_PU32("endian", &endian),
12871 MC_PU32("step", &step),
12872 MC_PU32("udebug_flag", &udebug_flag),
12873 MC_PU32("decode_pic_begin", &decode_pic_begin),
12874 MC_PU32("slice_parse_begin", &slice_parse_begin),
12875 MC_PU32("nal_skip_policy", &nal_skip_policy),
12876 MC_PU32("i_only_flag", &i_only_flag),
12877 MC_PU32("error_handle_policy", &error_handle_policy),
12878 MC_PU32("error_handle_threshold", &error_handle_threshold),
12879 MC_PU32("error_handle_nal_skip_threshold",
12880 &error_handle_nal_skip_threshold),
12881 MC_PU32("error_handle_system_threshold",
12882 &error_handle_system_threshold),
12883 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
12884 MC_PU32("debug", &debug),
12885 MC_PU32("debug_mask", &debug_mask),
12886 MC_PU32("buffer_mode", &buffer_mode),
12887 MC_PU32("double_write_mode", &double_write_mode),
12888 MC_PU32("buf_alloc_width", &buf_alloc_width),
12889 MC_PU32("buf_alloc_height", &buf_alloc_height),
12890 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
12891 MC_PU32("max_buf_num", &max_buf_num),
12892 MC_PU32("buf_alloc_size", &buf_alloc_size),
12893 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
12894 MC_PU32("mem_map_mode", &mem_map_mode),
12895 MC_PU32("enable_mem_saving", &enable_mem_saving),
12896 MC_PU32("force_w_h", &force_w_h),
12897 MC_PU32("force_fps", &force_fps),
12898 MC_PU32("max_decoding_time", &max_decoding_time),
12899 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
12900 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
12901 MC_PU32("interlace_enable", &interlace_enable),
12902 MC_PU32("pts_unstable", &pts_unstable),
12903 MC_PU32("parser_sei_enable", &parser_sei_enable),
12904 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
12905 MC_PU32("decode_timeout_val", &decode_timeout_val),
12906#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12907 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
12908 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
12909 MC_PU32("dv_debug", &dv_debug),
12910#endif
12911};
12912static struct mconfig_node decoder_265_node;
12913
12914static int __init amvdec_h265_driver_init_module(void)
12915{
12916 struct BuffInfo_s *p_buf_info;
12917
12918 if (vdec_is_support_4k()) {
12919 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
12920 p_buf_info = &amvh265_workbuff_spec[2];
12921 else
12922 p_buf_info = &amvh265_workbuff_spec[1];
12923 } else
12924 p_buf_info = &amvh265_workbuff_spec[0];
12925
12926 init_buff_spec(NULL, p_buf_info);
12927 work_buf_size =
12928 (p_buf_info->end_adr - p_buf_info->start_adr
12929 + 0xffff) & (~0xffff);
12930
12931 pr_debug("amvdec_h265 module init\n");
12932 error_handle_policy = 0;
12933
12934#ifdef ERROR_HANDLE_DEBUG
12935 dbg_nal_skip_flag = 0;
12936 dbg_nal_skip_count = 0;
12937#endif
12938 udebug_flag = 0;
12939 decode_pic_begin = 0;
12940 slice_parse_begin = 0;
12941 step = 0;
12942 buf_alloc_size = 0;
12943
12944#ifdef MULTI_INSTANCE_SUPPORT
12945 if (platform_driver_register(&ammvdec_h265_driver))
12946 pr_err("failed to register ammvdec_h265 driver\n");
12947
12948#endif
12949 if (platform_driver_register(&amvdec_h265_driver)) {
12950 pr_err("failed to register amvdec_h265 driver\n");
12951 return -ENODEV;
12952 }
12953#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
12954 if (!has_hevc_vdec()) {
12955 /* not support hevc */
12956 amvdec_h265_profile.name = "hevc_unsupport";
12957 }
12958 if (vdec_is_support_4k()) {
12959 if (is_meson_m8m2_cpu()) {
12960 /* m8m2 support 4k */
12961 amvdec_h265_profile.profile = "4k";
12962 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
12963 amvdec_h265_profile.profile =
12964 "8k, 8bit, 10bit, dwrite, compressed";
12965 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
12966 amvdec_h265_profile.profile =
12967 "4k, 8bit, 10bit, dwrite, compressed";
12968 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
12969 amvdec_h265_profile.profile = "4k";
12970 }
12971#endif
12972 if (codec_mm_get_total_size() < 80 * SZ_1M) {
12973 pr_info("amvdec_h265 default mmu enabled.\n");
12974 mmu_enable = 1;
12975 }
12976
12977 vcodec_profile_register(&amvdec_h265_profile);
12978 amvdec_h265_profile_single = amvdec_h265_profile;
12979 amvdec_h265_profile_single.name = "h265";
12980 vcodec_profile_register(&amvdec_h265_profile_single);
12981 amvdec_h265_profile_mult = amvdec_h265_profile;
12982 amvdec_h265_profile_mult.name = "mh265";
12983 vcodec_profile_register(&amvdec_h265_profile_mult);
12984 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
12985 "h265", h265_configs, CONFIG_FOR_RW);
12986 return 0;
12987}
12988
12989static void __exit amvdec_h265_driver_remove_module(void)
12990{
12991 pr_debug("amvdec_h265 module remove.\n");
12992
12993#ifdef MULTI_INSTANCE_SUPPORT
12994 platform_driver_unregister(&ammvdec_h265_driver);
12995#endif
12996 platform_driver_unregister(&amvdec_h265_driver);
12997}
12998
12999/****************************************/
13000/*
13001 *module_param(stat, uint, 0664);
13002 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13003 */
13004module_param(use_cma, uint, 0664);
13005MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13006
13007module_param(bit_depth_luma, uint, 0664);
13008MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13009
13010module_param(bit_depth_chroma, uint, 0664);
13011MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13012
13013module_param(video_signal_type, uint, 0664);
13014MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13015
13016#ifdef ERROR_HANDLE_DEBUG
13017module_param(dbg_nal_skip_flag, uint, 0664);
13018MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13019
13020module_param(dbg_nal_skip_count, uint, 0664);
13021MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13022#endif
13023
13024module_param(radr, uint, 0664);
13025MODULE_PARM_DESC(radr, "\n radr\n");
13026
13027module_param(rval, uint, 0664);
13028MODULE_PARM_DESC(rval, "\n rval\n");
13029
13030module_param(dbg_cmd, uint, 0664);
13031MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13032
13033module_param(dump_nal, uint, 0664);
13034MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13035
13036module_param(dbg_skip_decode_index, uint, 0664);
13037MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13038
13039module_param(endian, uint, 0664);
13040MODULE_PARM_DESC(endian, "\n rval\n");
13041
13042module_param(step, uint, 0664);
13043MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13044
13045module_param(decode_pic_begin, uint, 0664);
13046MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13047
13048module_param(slice_parse_begin, uint, 0664);
13049MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13050
13051module_param(nal_skip_policy, uint, 0664);
13052MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13053
13054module_param(i_only_flag, uint, 0664);
13055MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13056
13057module_param(fast_output_enable, uint, 0664);
13058MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13059
13060module_param(error_handle_policy, uint, 0664);
13061MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13062
13063module_param(error_handle_threshold, uint, 0664);
13064MODULE_PARM_DESC(error_handle_threshold,
13065 "\n amvdec_h265 error_handle_threshold\n");
13066
13067module_param(error_handle_nal_skip_threshold, uint, 0664);
13068MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13069 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13070
13071module_param(error_handle_system_threshold, uint, 0664);
13072MODULE_PARM_DESC(error_handle_system_threshold,
13073 "\n amvdec_h265 error_handle_system_threshold\n");
13074
13075module_param(error_skip_nal_count, uint, 0664);
13076MODULE_PARM_DESC(error_skip_nal_count,
13077 "\n amvdec_h265 error_skip_nal_count\n");
13078
13079module_param(debug, uint, 0664);
13080MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13081
13082module_param(debug_mask, uint, 0664);
13083MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13084
13085module_param(log_mask, uint, 0664);
13086MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13087
13088module_param(buffer_mode, uint, 0664);
13089MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13090
13091module_param(double_write_mode, uint, 0664);
13092MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13093
13094module_param(buf_alloc_width, uint, 0664);
13095MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13096
13097module_param(buf_alloc_height, uint, 0664);
13098MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13099
13100module_param(dynamic_buf_num_margin, uint, 0664);
13101MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13102
13103module_param(max_buf_num, uint, 0664);
13104MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13105
13106module_param(buf_alloc_size, uint, 0664);
13107MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13108
13109#ifdef CONSTRAIN_MAX_BUF_NUM
13110module_param(run_ready_max_vf_only_num, uint, 0664);
13111MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13112
13113module_param(run_ready_display_q_num, uint, 0664);
13114MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13115
13116module_param(run_ready_max_buf_num, uint, 0664);
13117MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13118#endif
13119
13120#if 0
13121module_param(re_config_pic_flag, uint, 0664);
13122MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13123#endif
13124
13125module_param(buffer_mode_dbg, uint, 0664);
13126MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13127
13128module_param(mem_map_mode, uint, 0664);
13129MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13130
13131module_param(enable_mem_saving, uint, 0664);
13132MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13133
13134module_param(force_w_h, uint, 0664);
13135MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13136
13137module_param(force_fps, uint, 0664);
13138MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13139
13140module_param(max_decoding_time, uint, 0664);
13141MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13142
13143module_param(prefix_aux_buf_size, uint, 0664);
13144MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13145
13146module_param(suffix_aux_buf_size, uint, 0664);
13147MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13148
13149module_param(interlace_enable, uint, 0664);
13150MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13151module_param(pts_unstable, uint, 0664);
13152MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13153module_param(parser_sei_enable, uint, 0664);
13154MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13155
13156#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13157module_param(parser_dolby_vision_enable, uint, 0664);
13158MODULE_PARM_DESC(parser_dolby_vision_enable,
13159 "\n parser_dolby_vision_enable\n");
13160
13161module_param(dolby_meta_with_el, uint, 0664);
13162MODULE_PARM_DESC(dolby_meta_with_el,
13163 "\n dolby_meta_with_el\n");
13164
13165module_param(dolby_el_flush_th, uint, 0664);
13166MODULE_PARM_DESC(dolby_el_flush_th,
13167 "\n dolby_el_flush_th\n");
13168#endif
13169module_param(mmu_enable, uint, 0664);
13170MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13171
13172module_param(mmu_enable_force, uint, 0664);
13173MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13174
13175#ifdef MULTI_INSTANCE_SUPPORT
13176module_param(start_decode_buf_level, int, 0664);
13177MODULE_PARM_DESC(start_decode_buf_level,
13178 "\n h265 start_decode_buf_level\n");
13179
13180module_param(decode_timeout_val, uint, 0664);
13181MODULE_PARM_DESC(decode_timeout_val,
13182 "\n h265 decode_timeout_val\n");
13183
13184module_param(data_resend_policy, uint, 0664);
13185MODULE_PARM_DESC(data_resend_policy,
13186 "\n h265 data_resend_policy\n");
13187
13188module_param_array(decode_frame_count, uint,
13189 &max_decode_instance_num, 0664);
13190
13191module_param_array(display_frame_count, uint,
13192 &max_decode_instance_num, 0664);
13193
13194module_param_array(max_process_time, uint,
13195 &max_decode_instance_num, 0664);
13196
13197module_param_array(max_get_frame_interval,
13198 uint, &max_decode_instance_num, 0664);
13199
13200module_param_array(run_count, uint,
13201 &max_decode_instance_num, 0664);
13202
13203module_param_array(input_empty, uint,
13204 &max_decode_instance_num, 0664);
13205
13206module_param_array(not_run_ready, uint,
13207 &max_decode_instance_num, 0664);
13208
13209module_param_array(ref_frame_mark_flag, uint,
13210 &max_decode_instance_num, 0664);
13211
13212#endif
13213#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13214module_param(dv_toggle_prov_name, uint, 0664);
13215MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13216
13217module_param(dv_debug, uint, 0664);
13218MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13219
13220module_param(force_bypass_dvenl, uint, 0664);
13221MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13222#endif
13223
13224#ifdef AGAIN_HAS_THRESHOLD
13225module_param(again_threshold, uint, 0664);
13226MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13227#endif
13228
13229module_param(force_disp_pic_index, int, 0664);
13230MODULE_PARM_DESC(force_disp_pic_index,
13231 "\n amvdec_h265 force_disp_pic_index\n");
13232
13233module_param(frmbase_cont_bitlevel, uint, 0664);
13234MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13235
13236module_param(udebug_flag, uint, 0664);
13237MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13238
13239module_param(udebug_pause_pos, uint, 0664);
13240MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13241
13242module_param(udebug_pause_val, uint, 0664);
13243MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13244
13245module_param(pre_decode_buf_level, int, 0664);
13246MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13247
13248module_param(udebug_pause_decode_idx, uint, 0664);
13249MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13250
13251module_param(disp_vframe_valve_level, uint, 0664);
13252MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13253
13254module_param(pic_list_debug, uint, 0664);
13255MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13256
13257module_param(without_display_mode, uint, 0664);
13258MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13259
13260module_init(amvdec_h265_driver_init_module);
13261module_exit(amvdec_h265_driver_remove_module);
13262
13263MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13264MODULE_LICENSE("GPL");
13265MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13266