summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 2cd939a4331c9f40376bebe71deddea816845010
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131
132static struct semaphore h265_sema;
133
134struct hevc_state_s;
135static int hevc_print(struct hevc_state_s *hevc,
136 int debug_flag, const char *fmt, ...);
137static int hevc_print_cont(struct hevc_state_s *hevc,
138 int debug_flag, const char *fmt, ...);
139static int vh265_vf_states(struct vframe_states *states, void *);
140static struct vframe_s *vh265_vf_peek(void *);
141static struct vframe_s *vh265_vf_get(void *);
142static void vh265_vf_put(struct vframe_s *, void *);
143static int vh265_event_cb(int type, void *data, void *private_data);
144
145static int vh265_stop(struct hevc_state_s *hevc);
146#ifdef MULTI_INSTANCE_SUPPORT
147static int vmh265_stop(struct hevc_state_s *hevc);
148static s32 vh265_init(struct vdec_s *vdec);
149static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
150static void reset_process_time(struct hevc_state_s *hevc);
151static void start_process_time(struct hevc_state_s *hevc);
152static void restart_process_time(struct hevc_state_s *hevc);
153static void timeout_process(struct hevc_state_s *hevc);
154#else
155static s32 vh265_init(struct hevc_state_s *hevc);
156#endif
157static void vh265_prot_init(struct hevc_state_s *hevc);
158static int vh265_local_init(struct hevc_state_s *hevc);
159static void vh265_check_timer_func(unsigned long arg);
160static void config_decode_mode(struct hevc_state_s *hevc);
161
162static const char vh265_dec_id[] = "vh265-dev";
163
164#define PROVIDER_NAME "decoder.h265"
165#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
166
167static const struct vframe_operations_s vh265_vf_provider = {
168 .peek = vh265_vf_peek,
169 .get = vh265_vf_get,
170 .put = vh265_vf_put,
171 .event_cb = vh265_event_cb,
172 .vf_states = vh265_vf_states,
173};
174
175static struct vframe_provider_s vh265_vf_prov;
176
177static u32 bit_depth_luma;
178static u32 bit_depth_chroma;
179static u32 video_signal_type;
180
181static int start_decode_buf_level = 0x8000;
182
183static unsigned int decode_timeout_val = 200;
184
185/*data_resend_policy:
186 bit 0, stream base resend data when decoding buf empty
187*/
188static u32 data_resend_policy = 1;
189
190#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
191/*
192static const char * const video_format_names[] = {
193 "component", "PAL", "NTSC", "SECAM",
194 "MAC", "unspecified", "unspecified", "unspecified"
195};
196
197static const char * const color_primaries_names[] = {
198 "unknown", "bt709", "undef", "unknown",
199 "bt470m", "bt470bg", "smpte170m", "smpte240m",
200 "film", "bt2020"
201};
202
203static const char * const transfer_characteristics_names[] = {
204 "unknown", "bt709", "undef", "unknown",
205 "bt470m", "bt470bg", "smpte170m", "smpte240m",
206 "linear", "log100", "log316", "iec61966-2-4",
207 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
208 "smpte-st-2084", "smpte-st-428"
209};
210
211static const char * const matrix_coeffs_names[] = {
212 "GBR", "bt709", "undef", "unknown",
213 "fcc", "bt470bg", "smpte170m", "smpte240m",
214 "YCgCo", "bt2020nc", "bt2020c"
215};
216*/
217#ifdef SUPPORT_10BIT
218#define HEVC_CM_BODY_START_ADDR 0x3626
219#define HEVC_CM_BODY_LENGTH 0x3627
220#define HEVC_CM_HEADER_LENGTH 0x3629
221#define HEVC_CM_HEADER_OFFSET 0x362b
222#define HEVC_SAO_CTRL9 0x362d
223#define LOSLESS_COMPRESS_MODE
224/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
225/* double_write_mode:
226 * 0, no double write;
227 * 1, 1:1 ratio;
228 * 2, (1/4):(1/4) ratio;
229 * 3, (1/4):(1/4) ratio, with both compressed frame included
230 * 4, (1/2):(1/2) ratio;
231 * 0x10, double write only
232 * 0x100, if > 1080p,use mode 4,else use mode 1;
233 * 0x200, if > 1080p,use mode 2,else use mode 1;
234 * 0x300, if > 720p, use mode 4, else use mode 1;
235 */
236static u32 double_write_mode;
237
238/*#define DECOMP_HEADR_SURGENT*/
239
240static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
241static u32 enable_mem_saving = 1;
242static u32 workaround_enable;
243static u32 force_w_h;
244#endif
245static u32 force_fps;
246static u32 pts_unstable;
247#define H265_DEBUG_BUFMGR 0x01
248#define H265_DEBUG_BUFMGR_MORE 0x02
249#define H265_DEBUG_DETAIL 0x04
250#define H265_DEBUG_REG 0x08
251#define H265_DEBUG_MAN_SEARCH_NAL 0x10
252#define H265_DEBUG_MAN_SKIP_NAL 0x20
253#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
254#define H265_DEBUG_FORCE_CLK 0x80
255#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
256#define H265_DEBUG_NO_DISPLAY 0x200
257#define H265_DEBUG_DISCARD_NAL 0x400
258#define H265_DEBUG_OUT_PTS 0x800
259#define H265_DEBUG_DUMP_PIC_LIST 0x1000
260#define H265_DEBUG_PRINT_SEI 0x2000
261#define H265_DEBUG_PIC_STRUCT 0x4000
262#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
263#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
264#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
265#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
266#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
267#define H265_DEBUG_HW_RESET 0x100000
268#define H265_CFG_CANVAS_IN_DECODE 0x200000
269#define H265_DEBUG_DV 0x400000
270#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
271#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
272#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
273#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
274#ifdef MULTI_INSTANCE_SUPPORT
275#define PRINT_FLAG_ERROR 0x0
276#define IGNORE_PARAM_FROM_CONFIG 0x08000000
277#define PRINT_FRAMEBASE_DATA 0x10000000
278#define PRINT_FLAG_VDEC_STATUS 0x20000000
279#define PRINT_FLAG_VDEC_DETAIL 0x40000000
280#define PRINT_FLAG_V4L_DETAIL 0x80000000
281#endif
282
283#define BUF_POOL_SIZE 32
284#define MAX_BUF_NUM 24
285#define MAX_REF_PIC_NUM 24
286#define MAX_REF_ACTIVE 16
287
288#ifdef MV_USE_FIXED_BUF
289#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
290#define VF_BUFFER_IDX(n) (n)
291#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
292#else
293#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
294#define VF_BUFFER_IDX(n) (n)
295#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
296#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
297#endif
298
299#define HEVC_MV_INFO 0x310d
300#define HEVC_QP_INFO 0x3137
301#define HEVC_SKIP_INFO 0x3136
302
303const u32 h265_version = 201602101;
304static u32 debug_mask = 0xffffffff;
305static u32 log_mask;
306static u32 debug;
307static u32 radr;
308static u32 rval;
309static u32 dbg_cmd;
310static u32 dump_nal;
311static u32 dbg_skip_decode_index;
312static u32 endian = 0xff0;
313#ifdef ERROR_HANDLE_DEBUG
314static u32 dbg_nal_skip_flag;
315 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
316static u32 dbg_nal_skip_count;
317#endif
318/*for debug*/
319/*
320 udebug_flag:
321 bit 0, enable ucode print
322 bit 1, enable ucode detail print
323 bit [31:16] not 0, pos to dump lmem
324 bit 2, pop bits to lmem
325 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
326*/
327static u32 udebug_flag;
328/*
329 when udebug_flag[1:0] is not 0
330 udebug_pause_pos not 0,
331 pause position
332*/
333static u32 udebug_pause_pos;
334/*
335 when udebug_flag[1:0] is not 0
336 and udebug_pause_pos is not 0,
337 pause only when DEBUG_REG2 is equal to this val
338*/
339static u32 udebug_pause_val;
340
341static u32 udebug_pause_decode_idx;
342
343static u32 decode_pic_begin;
344static uint slice_parse_begin;
345static u32 step;
346static bool is_reset;
347
348#ifdef CONSTRAIN_MAX_BUF_NUM
349static u32 run_ready_max_vf_only_num;
350static u32 run_ready_display_q_num;
351 /*0: not check
352 0xff: work_pic_num
353 */
354static u32 run_ready_max_buf_num = 0xff;
355#endif
356
357static u32 dynamic_buf_num_margin = 7;
358static u32 buf_alloc_width;
359static u32 buf_alloc_height;
360
361static u32 max_buf_num = 16;
362static u32 buf_alloc_size;
363/*static u32 re_config_pic_flag;*/
364/*
365 *bit[0]: 0,
366 *bit[1]: 0, always release cma buffer when stop
367 *bit[1]: 1, never release cma buffer when stop
368 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
369 *do not release cma buffer is blackout is not 1
370 *
371 *bit[2]: 0, when start decoding, check current displayed buffer
372 * (only for buffer decoded by h265) if blackout is 0
373 * 1, do not check current displayed buffer
374 *
375 *bit[3]: 1, if blackout is not 1, do not release current
376 * displayed cma buffer always.
377 */
378/* set to 1 for fast play;
379 * set to 8 for other case of "keep last frame"
380 */
381static u32 buffer_mode = 1;
382
383/* buffer_mode_dbg: debug only*/
384static u32 buffer_mode_dbg = 0xffff0000;
385/**/
386/*
387 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
388 *1, start decoding after first I;
389 *2, only decode and display none error picture;
390 *3, start decoding and display after IDR,etc
391 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
392 *only for mode 0 and 1.
393 */
394static u32 nal_skip_policy = 2;
395
396/*
397 *bit 0, 1: only display I picture;
398 *bit 1, 1: only decode I picture;
399 */
400static u32 i_only_flag;
401
402/*
403bit 0, fast output first I picture
404*/
405static u32 fast_output_enable = 1;
406
407static u32 frmbase_cont_bitlevel = 0x60;
408
409/*
410use_cma: 1, use both reserver memory and cma for buffers
4112, only use cma for buffers
412*/
413static u32 use_cma = 2;
414
415#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
416static u32 prefix_aux_buf_size = (16 * 1024);
417static u32 suffix_aux_buf_size;
418
419static u32 max_decoding_time;
420/*
421 *error handling
422 */
423/*error_handle_policy:
424 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
425 *1, skip error_skip_nal_count nals before error recovery;
426 *bit 1 (valid only when bit0 == 1):
427 *1, wait vps/sps/pps after error recovery;
428 *bit 2 (valid only when bit0 == 0):
429 *0, auto search after error recovery (hevc_recover() called);
430 *1, manual search after error recovery
431 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
432 *
433 *bit 4: 0, set error_mark after reset/recover
434 * 1, do not set error_mark after reset/recover
435 *bit 5: 0, check total lcu for every picture
436 * 1, do not check total lcu
437 *bit 6: 0, do not check head error
438 * 1, check head error
439 *
440 */
441
442static u32 error_handle_policy;
443static u32 error_skip_nal_count = 6;
444static u32 error_handle_threshold = 30;
445static u32 error_handle_nal_skip_threshold = 10;
446static u32 error_handle_system_threshold = 30;
447static u32 interlace_enable = 1;
448static u32 fr_hint_status;
449
450 /*
451 *parser_sei_enable:
452 * bit 0, sei;
453 * bit 1, sei_suffix (fill aux buf)
454 * bit 2, fill sei to aux buf (when bit 0 is 1)
455 * bit 8, debug flag
456 */
457static u32 parser_sei_enable;
458#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
459static u32 parser_dolby_vision_enable = 1;
460static u32 dolby_meta_with_el;
461static u32 dolby_el_flush_th = 2;
462#endif
463/* this is only for h265 mmu enable */
464
465static u32 mmu_enable = 1;
466static u32 mmu_enable_force;
467static u32 work_buf_size;
468static unsigned int force_disp_pic_index;
469static unsigned int disp_vframe_valve_level;
470static int pre_decode_buf_level = 0x1000;
471static unsigned int pic_list_debug;
472
473
474#ifdef MULTI_INSTANCE_SUPPORT
475static unsigned int max_decode_instance_num
476 = MAX_DECODE_INSTANCE_NUM;
477static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
478static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
481static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
482static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
483static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
484static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
485{1, 1, 1, 1, 1, 1, 1, 1, 1};
486
487#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
488static unsigned char get_idx(struct hevc_state_s *hevc);
489#endif
490
491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
492static u32 dv_toggle_prov_name;
493
494static u32 dv_debug;
495
496static u32 force_bypass_dvenl;
497#endif
498#endif
499
500
501#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
502#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
503#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
504#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
505#else
506#define get_dbg_flag(hevc) debug
507#define get_dbg_flag2(hevc) debug
508#define is_log_enable(hevc) (log_mask ? 1 : 0)
509#define get_valid_double_write_mode(hevc) double_write_mode
510#define get_buf_alloc_width(hevc) buf_alloc_width
511#define get_buf_alloc_height(hevc) buf_alloc_height
512#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
513#endif
514#define get_buffer_mode(hevc) buffer_mode
515
516
517DEFINE_SPINLOCK(lock);
518struct task_struct *h265_task = NULL;
519#undef DEBUG_REG
520#ifdef DEBUG_REG
521void WRITE_VREG_DBG(unsigned adr, unsigned val)
522{
523 if (debug & H265_DEBUG_REG)
524 pr_info("%s(%x, %x)\n", __func__, adr, val);
525 WRITE_VREG(adr, val);
526}
527
528#undef WRITE_VREG
529#define WRITE_VREG WRITE_VREG_DBG
530#endif
531
532static DEFINE_MUTEX(vh265_mutex);
533
534static DEFINE_MUTEX(vh265_log_mutex);
535
536static struct vdec_info *gvs;
537
538static u32 without_display_mode;
539
540/**************************************************
541 *
542 *h265 buffer management include
543 *
544 ***************************************************
545 */
546enum NalUnitType {
547 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
548 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
549
550 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
551 /* Current name in the spec: TSA_R */
552 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
553
554 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
555 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
556
557 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
558 /* Current name in the spec: RADL_R */
559 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
560
561 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
562 /* Current name in the spec: RASL_R */
563 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
564
565 NAL_UNIT_RESERVED_10,
566 NAL_UNIT_RESERVED_11,
567 NAL_UNIT_RESERVED_12,
568 NAL_UNIT_RESERVED_13,
569 NAL_UNIT_RESERVED_14,
570 NAL_UNIT_RESERVED_15,
571
572 /* Current name in the spec: BLA_W_LP */
573 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
574 /* Current name in the spec: BLA_W_DLP */
575 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
576 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
577 /* Current name in the spec: IDR_W_DLP */
578 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
579 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
580 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
581 NAL_UNIT_RESERVED_22,
582 NAL_UNIT_RESERVED_23,
583
584 NAL_UNIT_RESERVED_24,
585 NAL_UNIT_RESERVED_25,
586 NAL_UNIT_RESERVED_26,
587 NAL_UNIT_RESERVED_27,
588 NAL_UNIT_RESERVED_28,
589 NAL_UNIT_RESERVED_29,
590 NAL_UNIT_RESERVED_30,
591 NAL_UNIT_RESERVED_31,
592
593 NAL_UNIT_VPS, /* 32 */
594 NAL_UNIT_SPS, /* 33 */
595 NAL_UNIT_PPS, /* 34 */
596 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
597 NAL_UNIT_EOS, /* 36 */
598 NAL_UNIT_EOB, /* 37 */
599 NAL_UNIT_FILLER_DATA, /* 38 */
600 NAL_UNIT_SEI, /* 39 Prefix SEI */
601 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
602 NAL_UNIT_RESERVED_41,
603 NAL_UNIT_RESERVED_42,
604 NAL_UNIT_RESERVED_43,
605 NAL_UNIT_RESERVED_44,
606 NAL_UNIT_RESERVED_45,
607 NAL_UNIT_RESERVED_46,
608 NAL_UNIT_RESERVED_47,
609 NAL_UNIT_UNSPECIFIED_48,
610 NAL_UNIT_UNSPECIFIED_49,
611 NAL_UNIT_UNSPECIFIED_50,
612 NAL_UNIT_UNSPECIFIED_51,
613 NAL_UNIT_UNSPECIFIED_52,
614 NAL_UNIT_UNSPECIFIED_53,
615 NAL_UNIT_UNSPECIFIED_54,
616 NAL_UNIT_UNSPECIFIED_55,
617 NAL_UNIT_UNSPECIFIED_56,
618 NAL_UNIT_UNSPECIFIED_57,
619 NAL_UNIT_UNSPECIFIED_58,
620 NAL_UNIT_UNSPECIFIED_59,
621 NAL_UNIT_UNSPECIFIED_60,
622 NAL_UNIT_UNSPECIFIED_61,
623 NAL_UNIT_UNSPECIFIED_62,
624 NAL_UNIT_UNSPECIFIED_63,
625 NAL_UNIT_INVALID,
626};
627
628/* --------------------------------------------------- */
629/* Amrisc Software Interrupt */
630/* --------------------------------------------------- */
631#define AMRISC_STREAM_EMPTY_REQ 0x01
632#define AMRISC_PARSER_REQ 0x02
633#define AMRISC_MAIN_REQ 0x04
634
635/* --------------------------------------------------- */
636/* HEVC_DEC_STATUS define */
637/* --------------------------------------------------- */
638#define HEVC_DEC_IDLE 0x0
639#define HEVC_NAL_UNIT_VPS 0x1
640#define HEVC_NAL_UNIT_SPS 0x2
641#define HEVC_NAL_UNIT_PPS 0x3
642#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
643#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
644#define HEVC_SLICE_DECODING 0x6
645#define HEVC_NAL_UNIT_SEI 0x7
646#define HEVC_SLICE_SEGMENT_DONE 0x8
647#define HEVC_NAL_SEARCH_DONE 0x9
648#define HEVC_DECPIC_DATA_DONE 0xa
649#define HEVC_DECPIC_DATA_ERROR 0xb
650#define HEVC_SEI_DAT 0xc
651#define HEVC_SEI_DAT_DONE 0xd
652#define HEVC_NAL_DECODE_DONE 0xe
653#define HEVC_OVER_DECODE 0xf
654
655#define HEVC_DATA_REQUEST 0x12
656
657#define HEVC_DECODE_BUFEMPTY 0x20
658#define HEVC_DECODE_TIMEOUT 0x21
659#define HEVC_SEARCH_BUFEMPTY 0x22
660#define HEVC_DECODE_OVER_SIZE 0x23
661#define HEVC_DECODE_BUFEMPTY2 0x24
662#define HEVC_FIND_NEXT_PIC_NAL 0x50
663#define HEVC_FIND_NEXT_DVEL_NAL 0x51
664
665#define HEVC_DUMP_LMEM 0x30
666
667#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
668#define HEVC_DISCARD_NAL 0xf0
669#define HEVC_ACTION_DEC_CONT 0xfd
670#define HEVC_ACTION_ERROR 0xfe
671#define HEVC_ACTION_DONE 0xff
672
673/* --------------------------------------------------- */
674/* Include "parser_cmd.h" */
675/* --------------------------------------------------- */
676#define PARSER_CMD_SKIP_CFG_0 0x0000090b
677
678#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
679
680#define PARSER_CMD_SKIP_CFG_2 0x001b1910
681
682#define PARSER_CMD_NUMBER 37
683
684/**************************************************
685 *
686 *h265 buffer management
687 *
688 ***************************************************
689 */
690/* #define BUFFER_MGR_ONLY */
691/* #define CONFIG_HEVC_CLK_FORCED_ON */
692/* #define ENABLE_SWAP_TEST */
693#define MCRCC_ENABLE
694#define INVALID_POC 0x80000000
695
696#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
697#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
698#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
699#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
700#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
701#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
702#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
703#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
704#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
705#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
706#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
707#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
708#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
709#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
710#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
711#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
712#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
713#ifdef ENABLE_SWAP_TEST
714#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
715#endif
716
717/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
718/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
719#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
720 /*do not define ENABLE_SWAP_TEST*/
721#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
722#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
723
724#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
725#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
726/*
727 *ucode parser/search control
728 *bit 0: 0, header auto parse; 1, header manual parse
729 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
730 *bit [3:2]: valid when bit1==0;
731 *0, auto skip nal before first vps/sps/pps/idr;
732 *1, auto skip nal before first vps/sps/pps
733 *2, auto skip nal before first vps/sps/pps,
734 * and not decode until the first I slice (with slice address of 0)
735 *
736 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
737 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
738 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
739 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
740 *bit [17]: for NAL_SEI when bit0 is 0:
741 * 0, do not parse/fetch SEI in ucode;
742 * 1, parse/fetch SEI in ucode
743 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
744 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
745 * 1, fetch NAL_SEL_SUFFIX data to aux buf
746 *bit [19]:
747 * 0, parse NAL_SEI in ucode
748 * 1, fetch NAL_SEI to aux buf
749 *bit [20]: for DOLBY_VISION_META
750 * 0, do not fetch DOLBY_VISION_META to aux buf
751 * 1, fetch DOLBY_VISION_META to aux buf
752 */
753#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
754 /*read only*/
755#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
756 /*
757 [15 : 8] rps_set_id
758 [7 : 0] start_decoding_flag
759 */
760#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
761 /*set before start decoder*/
762#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
763#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
764#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
765
766#define DECODE_MODE_SINGLE 0x0
767#define DECODE_MODE_MULTI_FRAMEBASE 0x1
768#define DECODE_MODE_MULTI_STREAMBASE 0x2
769#define DECODE_MODE_MULTI_DVBAL 0x3
770#define DECODE_MODE_MULTI_DVENL 0x4
771
772#define MAX_INT 0x7FFFFFFF
773
774#define RPM_BEGIN 0x100
775#define modification_list_cur 0x148
776#define RPM_END 0x180
777
778#define RPS_USED_BIT 14
779/* MISC_FLAG0 */
780#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
781#define PCM_ENABLE_FLAG_BIT 1
782#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
783#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
784#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
785#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
786#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
787#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
788#define SLICE_SAO_LUMA_FLAG_BIT 8
789#define SLICE_SAO_CHROMA_FLAG_BIT 9
790#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
791
792union param_u {
793 struct {
794 unsigned short data[RPM_END - RPM_BEGIN];
795 } l;
796 struct {
797 /* from ucode lmem, do not change this struct */
798 unsigned short CUR_RPS[0x10];
799 unsigned short num_ref_idx_l0_active;
800 unsigned short num_ref_idx_l1_active;
801 unsigned short slice_type;
802 unsigned short slice_temporal_mvp_enable_flag;
803 unsigned short dependent_slice_segment_flag;
804 unsigned short slice_segment_address;
805 unsigned short num_title_rows_minus1;
806 unsigned short pic_width_in_luma_samples;
807 unsigned short pic_height_in_luma_samples;
808 unsigned short log2_min_coding_block_size_minus3;
809 unsigned short log2_diff_max_min_coding_block_size;
810 unsigned short log2_max_pic_order_cnt_lsb_minus4;
811 unsigned short POClsb;
812 unsigned short collocated_from_l0_flag;
813 unsigned short collocated_ref_idx;
814 unsigned short log2_parallel_merge_level;
815 unsigned short five_minus_max_num_merge_cand;
816 unsigned short sps_num_reorder_pics_0;
817 unsigned short modification_flag;
818 unsigned short tiles_enabled_flag;
819 unsigned short num_tile_columns_minus1;
820 unsigned short num_tile_rows_minus1;
821 unsigned short tile_width[8];
822 unsigned short tile_height[8];
823 unsigned short misc_flag0;
824 unsigned short pps_beta_offset_div2;
825 unsigned short pps_tc_offset_div2;
826 unsigned short slice_beta_offset_div2;
827 unsigned short slice_tc_offset_div2;
828 unsigned short pps_cb_qp_offset;
829 unsigned short pps_cr_qp_offset;
830 unsigned short first_slice_segment_in_pic_flag;
831 unsigned short m_temporalId;
832 unsigned short m_nalUnitType;
833
834 unsigned short vui_num_units_in_tick_hi;
835 unsigned short vui_num_units_in_tick_lo;
836 unsigned short vui_time_scale_hi;
837 unsigned short vui_time_scale_lo;
838 unsigned short bit_depth;
839 unsigned short profile_etc;
840 unsigned short sei_frame_field_info;
841 unsigned short video_signal_type;
842 unsigned short modification_list[0x20];
843 unsigned short conformance_window_flag;
844 unsigned short conf_win_left_offset;
845 unsigned short conf_win_right_offset;
846 unsigned short conf_win_top_offset;
847 unsigned short conf_win_bottom_offset;
848 unsigned short chroma_format_idc;
849 unsigned short color_description;
850 unsigned short aspect_ratio_idc;
851 unsigned short sar_width;
852 unsigned short sar_height;
853 unsigned short sps_max_dec_pic_buffering_minus1_0;
854 } p;
855};
856
857#define RPM_BUF_SIZE (0x80*2)
858/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
859#define LMEM_BUF_SIZE (0x500 * 2)
860
861struct buff_s {
862 u32 buf_start;
863 u32 buf_size;
864 u32 buf_end;
865};
866
867struct BuffInfo_s {
868 u32 max_width;
869 u32 max_height;
870 unsigned int start_adr;
871 unsigned int end_adr;
872 struct buff_s ipp;
873 struct buff_s sao_abv;
874 struct buff_s sao_vb;
875 struct buff_s short_term_rps;
876 struct buff_s vps;
877 struct buff_s sps;
878 struct buff_s pps;
879 struct buff_s sao_up;
880 struct buff_s swap_buf;
881 struct buff_s swap_buf2;
882 struct buff_s scalelut;
883 struct buff_s dblk_para;
884 struct buff_s dblk_data;
885 struct buff_s dblk_data2;
886 struct buff_s mmu_vbh;
887 struct buff_s cm_header;
888 struct buff_s mpred_above;
889#ifdef MV_USE_FIXED_BUF
890 struct buff_s mpred_mv;
891#endif
892 struct buff_s rpm;
893 struct buff_s lmem;
894};
895#define WORK_BUF_SPEC_NUM 3
896static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
897 {
898 /* 8M bytes */
899 .max_width = 1920,
900 .max_height = 1088,
901 .ipp = {
902 /* IPP work space calculation :
903 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
904 */
905 .buf_size = 0x4000,
906 },
907 .sao_abv = {
908 .buf_size = 0x30000,
909 },
910 .sao_vb = {
911 .buf_size = 0x30000,
912 },
913 .short_term_rps = {
914 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
915 * total 64x16x2 = 2048 bytes (0x800)
916 */
917 .buf_size = 0x800,
918 },
919 .vps = {
920 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
921 * total 0x0800 bytes
922 */
923 .buf_size = 0x800,
924 },
925 .sps = {
926 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
927 * total 0x0800 bytes
928 */
929 .buf_size = 0x800,
930 },
931 .pps = {
932 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
933 * total 0x2000 bytes
934 */
935 .buf_size = 0x2000,
936 },
937 .sao_up = {
938 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
939 * each has 16 bytes total 0x2800 bytes
940 */
941 .buf_size = 0x2800,
942 },
943 .swap_buf = {
944 /* 256cyclex64bit = 2K bytes 0x800
945 * (only 144 cycles valid)
946 */
947 .buf_size = 0x800,
948 },
949 .swap_buf2 = {
950 .buf_size = 0x800,
951 },
952 .scalelut = {
953 /* support up to 32 SCALELUT 1024x32 =
954 * 32Kbytes (0x8000)
955 */
956 .buf_size = 0x8000,
957 },
958 .dblk_para = {
959#ifdef SUPPORT_10BIT
960 .buf_size = 0x40000,
961#else
962 /* DBLK -> Max 256(4096/16) LCU, each para
963 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
964 */
965 .buf_size = 0x20000,
966#endif
967 },
968 .dblk_data = {
969 .buf_size = 0x40000,
970 },
971 .dblk_data2 = {
972 .buf_size = 0x40000,
973 }, /*dblk data for adapter*/
974 .mmu_vbh = {
975 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
976 },
977#if 0
978 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
979 .buf_size = MMU_COMPRESS_HEADER_SIZE *
980 (MAX_REF_PIC_NUM + 1),
981 },
982#endif
983 .mpred_above = {
984 .buf_size = 0x8000,
985 },
986#ifdef MV_USE_FIXED_BUF
987 .mpred_mv = {/* 1080p, 0x40000 per buffer */
988 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
989 },
990#endif
991 .rpm = {
992 .buf_size = RPM_BUF_SIZE,
993 },
994 .lmem = {
995 .buf_size = 0x500 * 2,
996 }
997 },
998 {
999 .max_width = 4096,
1000 .max_height = 2048,
1001 .ipp = {
1002 /* IPP work space calculation :
1003 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1004 */
1005 .buf_size = 0x4000,
1006 },
1007 .sao_abv = {
1008 .buf_size = 0x30000,
1009 },
1010 .sao_vb = {
1011 .buf_size = 0x30000,
1012 },
1013 .short_term_rps = {
1014 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1015 * total 64x16x2 = 2048 bytes (0x800)
1016 */
1017 .buf_size = 0x800,
1018 },
1019 .vps = {
1020 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1021 * total 0x0800 bytes
1022 */
1023 .buf_size = 0x800,
1024 },
1025 .sps = {
1026 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1027 * total 0x0800 bytes
1028 */
1029 .buf_size = 0x800,
1030 },
1031 .pps = {
1032 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1033 * total 0x2000 bytes
1034 */
1035 .buf_size = 0x2000,
1036 },
1037 .sao_up = {
1038 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1039 * each has 16 bytes total 0x2800 bytes
1040 */
1041 .buf_size = 0x2800,
1042 },
1043 .swap_buf = {
1044 /* 256cyclex64bit = 2K bytes 0x800
1045 * (only 144 cycles valid)
1046 */
1047 .buf_size = 0x800,
1048 },
1049 .swap_buf2 = {
1050 .buf_size = 0x800,
1051 },
1052 .scalelut = {
1053 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1054 * (0x8000)
1055 */
1056 .buf_size = 0x8000,
1057 },
1058 .dblk_para = {
1059 /* DBLK -> Max 256(4096/16) LCU, each para
1060 * 512bytes(total:0x20000),
1061 * data 1024bytes(total:0x40000)
1062 */
1063 .buf_size = 0x20000,
1064 },
1065 .dblk_data = {
1066 .buf_size = 0x80000,
1067 },
1068 .dblk_data2 = {
1069 .buf_size = 0x80000,
1070 }, /*dblk data for adapter*/
1071 .mmu_vbh = {
1072 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1073 },
1074#if 0
1075 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1076 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1077 (MAX_REF_PIC_NUM + 1),
1078 },
1079#endif
1080 .mpred_above = {
1081 .buf_size = 0x8000,
1082 },
1083#ifdef MV_USE_FIXED_BUF
1084 .mpred_mv = {
1085 /* .buf_size = 0x100000*16,
1086 //4k2k , 0x100000 per buffer */
1087 /* 4096x2304 , 0x120000 per buffer */
1088 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1089 },
1090#endif
1091 .rpm = {
1092 .buf_size = RPM_BUF_SIZE,
1093 },
1094 .lmem = {
1095 .buf_size = 0x500 * 2,
1096 }
1097 },
1098
1099 {
1100 .max_width = 4096*2,
1101 .max_height = 2048*2,
1102 .ipp = {
1103 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1104 .buf_size = 0x4000*2,
1105 },
1106 .sao_abv = {
1107 .buf_size = 0x30000*2,
1108 },
1109 .sao_vb = {
1110 .buf_size = 0x30000*2,
1111 },
1112 .short_term_rps = {
1113 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1114 .buf_size = 0x800,
1115 },
1116 .vps = {
1117 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1118 .buf_size = 0x800,
1119 },
1120 .sps = {
1121 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1122 .buf_size = 0x800,
1123 },
1124 .pps = {
1125 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1126 .buf_size = 0x2000,
1127 },
1128 .sao_up = {
1129 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1130 .buf_size = 0x2800*2,
1131 },
1132 .swap_buf = {
1133 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1134 .buf_size = 0x800,
1135 },
1136 .swap_buf2 = {
1137 .buf_size = 0x800,
1138 },
1139 .scalelut = {
1140 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1141 .buf_size = 0x8000*2,
1142 },
1143 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1144 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1145 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1146 .mmu_vbh = {
1147 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1148 },
1149#if 0
1150 .cm_header = {
1151 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1152 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1153 },
1154#endif
1155 .mpred_above = {
1156 .buf_size = 0x8000*2,
1157 },
1158#ifdef MV_USE_FIXED_BUF
1159 .mpred_mv = {
1160 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1161 },
1162#endif
1163 .rpm = {
1164 .buf_size = RPM_BUF_SIZE,
1165 },
1166 .lmem = {
1167 .buf_size = 0x500 * 2,
1168 },
1169 }
1170};
1171
1172static void init_buff_spec(struct hevc_state_s *hevc,
1173 struct BuffInfo_s *buf_spec)
1174{
1175 buf_spec->ipp.buf_start = buf_spec->start_adr;
1176 buf_spec->sao_abv.buf_start =
1177 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1178
1179 buf_spec->sao_vb.buf_start =
1180 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1181 buf_spec->short_term_rps.buf_start =
1182 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1183 buf_spec->vps.buf_start =
1184 buf_spec->short_term_rps.buf_start +
1185 buf_spec->short_term_rps.buf_size;
1186 buf_spec->sps.buf_start =
1187 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1188 buf_spec->pps.buf_start =
1189 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1190 buf_spec->sao_up.buf_start =
1191 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1192 buf_spec->swap_buf.buf_start =
1193 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1194 buf_spec->swap_buf2.buf_start =
1195 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1196 buf_spec->scalelut.buf_start =
1197 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1198 buf_spec->dblk_para.buf_start =
1199 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1200 buf_spec->dblk_data.buf_start =
1201 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1202 buf_spec->dblk_data2.buf_start =
1203 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1204 buf_spec->mmu_vbh.buf_start =
1205 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1206 buf_spec->mpred_above.buf_start =
1207 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1208#ifdef MV_USE_FIXED_BUF
1209 buf_spec->mpred_mv.buf_start =
1210 buf_spec->mpred_above.buf_start +
1211 buf_spec->mpred_above.buf_size;
1212
1213 buf_spec->rpm.buf_start =
1214 buf_spec->mpred_mv.buf_start +
1215 buf_spec->mpred_mv.buf_size;
1216#else
1217 buf_spec->rpm.buf_start =
1218 buf_spec->mpred_above.buf_start +
1219 buf_spec->mpred_above.buf_size;
1220#endif
1221 buf_spec->lmem.buf_start =
1222 buf_spec->rpm.buf_start +
1223 buf_spec->rpm.buf_size;
1224 buf_spec->end_adr =
1225 buf_spec->lmem.buf_start +
1226 buf_spec->lmem.buf_size;
1227
1228 if (hevc && get_dbg_flag2(hevc)) {
1229 hevc_print(hevc, 0,
1230 "%s workspace (%x %x) size = %x\n", __func__,
1231 buf_spec->start_adr, buf_spec->end_adr,
1232 buf_spec->end_adr - buf_spec->start_adr);
1233
1234 hevc_print(hevc, 0,
1235 "ipp.buf_start :%x\n",
1236 buf_spec->ipp.buf_start);
1237 hevc_print(hevc, 0,
1238 "sao_abv.buf_start :%x\n",
1239 buf_spec->sao_abv.buf_start);
1240 hevc_print(hevc, 0,
1241 "sao_vb.buf_start :%x\n",
1242 buf_spec->sao_vb.buf_start);
1243 hevc_print(hevc, 0,
1244 "short_term_rps.buf_start :%x\n",
1245 buf_spec->short_term_rps.buf_start);
1246 hevc_print(hevc, 0,
1247 "vps.buf_start :%x\n",
1248 buf_spec->vps.buf_start);
1249 hevc_print(hevc, 0,
1250 "sps.buf_start :%x\n",
1251 buf_spec->sps.buf_start);
1252 hevc_print(hevc, 0,
1253 "pps.buf_start :%x\n",
1254 buf_spec->pps.buf_start);
1255 hevc_print(hevc, 0,
1256 "sao_up.buf_start :%x\n",
1257 buf_spec->sao_up.buf_start);
1258 hevc_print(hevc, 0,
1259 "swap_buf.buf_start :%x\n",
1260 buf_spec->swap_buf.buf_start);
1261 hevc_print(hevc, 0,
1262 "swap_buf2.buf_start :%x\n",
1263 buf_spec->swap_buf2.buf_start);
1264 hevc_print(hevc, 0,
1265 "scalelut.buf_start :%x\n",
1266 buf_spec->scalelut.buf_start);
1267 hevc_print(hevc, 0,
1268 "dblk_para.buf_start :%x\n",
1269 buf_spec->dblk_para.buf_start);
1270 hevc_print(hevc, 0,
1271 "dblk_data.buf_start :%x\n",
1272 buf_spec->dblk_data.buf_start);
1273 hevc_print(hevc, 0,
1274 "dblk_data2.buf_start :%x\n",
1275 buf_spec->dblk_data2.buf_start);
1276 hevc_print(hevc, 0,
1277 "mpred_above.buf_start :%x\n",
1278 buf_spec->mpred_above.buf_start);
1279#ifdef MV_USE_FIXED_BUF
1280 hevc_print(hevc, 0,
1281 "mpred_mv.buf_start :%x\n",
1282 buf_spec->mpred_mv.buf_start);
1283#endif
1284 if ((get_dbg_flag2(hevc)
1285 &
1286 H265_DEBUG_SEND_PARAM_WITH_REG)
1287 == 0) {
1288 hevc_print(hevc, 0,
1289 "rpm.buf_start :%x\n",
1290 buf_spec->rpm.buf_start);
1291 }
1292 }
1293
1294}
1295
1296enum SliceType {
1297 B_SLICE,
1298 P_SLICE,
1299 I_SLICE
1300};
1301
1302/*USE_BUF_BLOCK*/
1303struct BUF_s {
1304 unsigned long start_adr;
1305 unsigned int size;
1306 int used_flag;
1307 unsigned int y_size;
1308 ulong v4l_ref_buf_addr;
1309} /*BUF_t */;
1310
1311/* level 6, 6.1 maximum slice number is 800; other is 200 */
1312#define MAX_SLICE_NUM 800
1313struct PIC_s {
1314 int index;
1315 int scatter_alloc;
1316 int BUF_index;
1317 int mv_buf_index;
1318 int POC;
1319 int decode_idx;
1320 int slice_type;
1321 int RefNum_L0;
1322 int RefNum_L1;
1323 int num_reorder_pic;
1324 int stream_offset;
1325 unsigned char referenced;
1326 unsigned char output_mark;
1327 unsigned char recon_mark;
1328 unsigned char output_ready;
1329 unsigned char error_mark;
1330 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1331 unsigned char dis_mark;
1332 /**/ int slice_idx;
1333 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1334 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1335 /*buffer */
1336 unsigned int header_adr;
1337#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1338 unsigned char dv_enhance_exist;
1339#endif
1340 char *aux_data_buf;
1341 int aux_data_size;
1342 unsigned long cma_alloc_addr;
1343 struct page *alloc_pages;
1344 unsigned int mpred_mv_wr_start_addr;
1345 unsigned int mc_y_adr;
1346 unsigned int mc_u_v_adr;
1347#ifdef SUPPORT_10BIT
1348 /*unsigned int comp_body_size;*/
1349 unsigned int dw_y_adr;
1350 unsigned int dw_u_v_adr;
1351#endif
1352 int mc_canvas_y;
1353 int mc_canvas_u_v;
1354 int width;
1355 int height;
1356
1357 int y_canvas_index;
1358 int uv_canvas_index;
1359#ifdef MULTI_INSTANCE_SUPPORT
1360 struct canvas_config_s canvas_config[2];
1361#endif
1362#ifdef SUPPORT_10BIT
1363 int mem_saving_mode;
1364 u32 bit_depth_luma;
1365 u32 bit_depth_chroma;
1366#endif
1367#ifdef LOSLESS_COMPRESS_MODE
1368 unsigned int losless_comp_body_size;
1369#endif
1370 unsigned char pic_struct;
1371 int vf_ref;
1372
1373 u32 pts;
1374 u64 pts64;
1375 u64 timestamp;
1376
1377 u32 aspect_ratio_idc;
1378 u32 sar_width;
1379 u32 sar_height;
1380 u32 double_write_mode;
1381 u32 video_signal_type;
1382 unsigned short conformance_window_flag;
1383 unsigned short conf_win_left_offset;
1384 unsigned short conf_win_right_offset;
1385 unsigned short conf_win_top_offset;
1386 unsigned short conf_win_bottom_offset;
1387 unsigned short chroma_format_idc;
1388
1389 /* picture qos infomation*/
1390 int max_qp;
1391 int avg_qp;
1392 int min_qp;
1393 int max_skip;
1394 int avg_skip;
1395 int min_skip;
1396 int max_mv;
1397 int min_mv;
1398 int avg_mv;
1399} /*PIC_t */;
1400
1401#define MAX_TILE_COL_NUM 10
1402#define MAX_TILE_ROW_NUM 20
1403struct tile_s {
1404 int width;
1405 int height;
1406 int start_cu_x;
1407 int start_cu_y;
1408
1409 unsigned int sao_vb_start_addr;
1410 unsigned int sao_abv_start_addr;
1411};
1412
1413#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1414#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1415#define SEI_HDR10PLUS_MASK 0x00000004
1416
1417#define VF_POOL_SIZE 32
1418
1419#ifdef MULTI_INSTANCE_SUPPORT
1420#define DEC_RESULT_NONE 0
1421#define DEC_RESULT_DONE 1
1422#define DEC_RESULT_AGAIN 2
1423#define DEC_RESULT_CONFIG_PARAM 3
1424#define DEC_RESULT_ERROR 4
1425#define DEC_INIT_PICLIST 5
1426#define DEC_UNINIT_PICLIST 6
1427#define DEC_RESULT_GET_DATA 7
1428#define DEC_RESULT_GET_DATA_RETRY 8
1429#define DEC_RESULT_EOS 9
1430#define DEC_RESULT_FORCE_EXIT 10
1431#define DEC_RESULT_FREE_CANVAS 11
1432
1433static void vh265_work(struct work_struct *work);
1434static void vh265_timeout_work(struct work_struct *work);
1435static void vh265_notify_work(struct work_struct *work);
1436
1437#endif
1438
1439struct debug_log_s {
1440 struct list_head list;
1441 uint8_t data; /*will alloc more size*/
1442};
1443
1444struct hevc_state_s {
1445#ifdef MULTI_INSTANCE_SUPPORT
1446 struct platform_device *platform_dev;
1447 void (*vdec_cb)(struct vdec_s *, void *);
1448 void *vdec_cb_arg;
1449 struct vframe_chunk_s *chunk;
1450 int dec_result;
1451 struct work_struct work;
1452 struct work_struct timeout_work;
1453 struct work_struct notify_work;
1454 struct work_struct set_clk_work;
1455 /* timeout handle */
1456 unsigned long int start_process_time;
1457 unsigned int last_lcu_idx;
1458 unsigned int decode_timeout_count;
1459 unsigned int timeout_num;
1460#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1461 unsigned char switch_dvlayer_flag;
1462 unsigned char no_switch_dvlayer_count;
1463 unsigned char bypass_dvenl_enable;
1464 unsigned char bypass_dvenl;
1465#endif
1466 unsigned char start_parser_type;
1467 /*start_decoding_flag:
1468 vps/pps/sps/idr info from ucode*/
1469 unsigned char start_decoding_flag;
1470 unsigned char rps_set_id;
1471 unsigned char eos;
1472 int pic_decoded_lcu_idx;
1473 u8 over_decode;
1474 u8 empty_flag;
1475#endif
1476 struct vframe_s vframe_dummy;
1477 char *provider_name;
1478 int index;
1479 struct device *cma_dev;
1480 unsigned char m_ins_flag;
1481 unsigned char dolby_enhance_flag;
1482 unsigned long buf_start;
1483 u32 buf_size;
1484 u32 mv_buf_size;
1485
1486 struct BuffInfo_s work_space_buf_store;
1487 struct BuffInfo_s *work_space_buf;
1488
1489 u8 aux_data_dirty;
1490 u32 prefix_aux_size;
1491 u32 suffix_aux_size;
1492 void *aux_addr;
1493 void *rpm_addr;
1494 void *lmem_addr;
1495 dma_addr_t aux_phy_addr;
1496 dma_addr_t rpm_phy_addr;
1497 dma_addr_t lmem_phy_addr;
1498
1499 unsigned int pic_list_init_flag;
1500 unsigned int use_cma_flag;
1501
1502 unsigned short *rpm_ptr;
1503 unsigned short *lmem_ptr;
1504 unsigned short *debug_ptr;
1505 int debug_ptr_size;
1506 int pic_w;
1507 int pic_h;
1508 int lcu_x_num;
1509 int lcu_y_num;
1510 int lcu_total;
1511 int lcu_size;
1512 int lcu_size_log2;
1513 int lcu_x_num_pre;
1514 int lcu_y_num_pre;
1515 int first_pic_after_recover;
1516
1517 int num_tile_col;
1518 int num_tile_row;
1519 int tile_enabled;
1520 int tile_x;
1521 int tile_y;
1522 int tile_y_x;
1523 int tile_start_lcu_x;
1524 int tile_start_lcu_y;
1525 int tile_width_lcu;
1526 int tile_height_lcu;
1527
1528 int slice_type;
1529 unsigned int slice_addr;
1530 unsigned int slice_segment_addr;
1531
1532 unsigned char interlace_flag;
1533 unsigned char curr_pic_struct;
1534 unsigned char frame_field_info_present_flag;
1535
1536 unsigned short sps_num_reorder_pics_0;
1537 unsigned short misc_flag0;
1538 int m_temporalId;
1539 int m_nalUnitType;
1540 int TMVPFlag;
1541 int isNextSliceSegment;
1542 int LDCFlag;
1543 int m_pocRandomAccess;
1544 int plevel;
1545 int MaxNumMergeCand;
1546
1547 int new_pic;
1548 int new_tile;
1549 int curr_POC;
1550 int iPrevPOC;
1551#ifdef MULTI_INSTANCE_SUPPORT
1552 int decoded_poc;
1553 struct PIC_s *decoding_pic;
1554#endif
1555 int iPrevTid0POC;
1556 int list_no;
1557 int RefNum_L0;
1558 int RefNum_L1;
1559 int ColFromL0Flag;
1560 int LongTerm_Curr;
1561 int LongTerm_Col;
1562 int Col_POC;
1563 int LongTerm_Ref;
1564#ifdef MULTI_INSTANCE_SUPPORT
1565 int m_pocRandomAccess_bak;
1566 int curr_POC_bak;
1567 int iPrevPOC_bak;
1568 int iPrevTid0POC_bak;
1569 unsigned char start_parser_type_bak;
1570 unsigned char start_decoding_flag_bak;
1571 unsigned char rps_set_id_bak;
1572 int pic_decoded_lcu_idx_bak;
1573 int decode_idx_bak;
1574#endif
1575 struct PIC_s *cur_pic;
1576 struct PIC_s *col_pic;
1577 int skip_flag;
1578 int decode_idx;
1579 int slice_idx;
1580 unsigned char have_vps;
1581 unsigned char have_sps;
1582 unsigned char have_pps;
1583 unsigned char have_valid_start_slice;
1584 unsigned char wait_buf;
1585 unsigned char error_flag;
1586 unsigned int error_skip_nal_count;
1587 long used_4k_num;
1588
1589 unsigned char
1590 ignore_bufmgr_error; /* bit 0, for decoding;
1591 bit 1, for displaying
1592 bit 1 must be set if bit 0 is 1*/
1593 int PB_skip_mode;
1594 int PB_skip_count_after_decoding;
1595#ifdef SUPPORT_10BIT
1596 int mem_saving_mode;
1597#endif
1598#ifdef LOSLESS_COMPRESS_MODE
1599 unsigned int losless_comp_body_size;
1600#endif
1601 int pts_mode;
1602 int last_lookup_pts;
1603 int last_pts;
1604 u64 last_lookup_pts_us64;
1605 u64 last_pts_us64;
1606 u32 shift_byte_count_lo;
1607 u32 shift_byte_count_hi;
1608 int pts_mode_switching_count;
1609 int pts_mode_recovery_count;
1610
1611 int pic_num;
1612
1613 /**/
1614 union param_u param;
1615
1616 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1617
1618 struct timer_list timer;
1619 struct BUF_s m_BUF[BUF_POOL_SIZE];
1620 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1621 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1622
1623 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1624 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1625 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1626 struct vframe_s vfpool[VF_POOL_SIZE];
1627
1628 u32 stat;
1629 u32 frame_width;
1630 u32 frame_height;
1631 u32 frame_dur;
1632 u32 frame_ar;
1633 u32 bit_depth_luma;
1634 u32 bit_depth_chroma;
1635 u32 video_signal_type;
1636 u32 video_signal_type_debug;
1637 u32 saved_resolution;
1638 bool get_frame_dur;
1639 u32 error_watchdog_count;
1640 u32 error_skip_nal_wt_cnt;
1641 u32 error_system_watchdog_count;
1642
1643#ifdef DEBUG_PTS
1644 unsigned long pts_missed;
1645 unsigned long pts_hit;
1646#endif
1647 struct dec_sysinfo vh265_amstream_dec_info;
1648 unsigned char init_flag;
1649 unsigned char first_sc_checked;
1650 unsigned char uninit_list;
1651 u32 start_decoding_time;
1652
1653 int show_frame_num;
1654#ifdef USE_UNINIT_SEMA
1655 struct semaphore h265_uninit_done_sema;
1656#endif
1657 int fatal_error;
1658
1659
1660 u32 sei_present_flag;
1661 void *frame_mmu_map_addr;
1662 dma_addr_t frame_mmu_map_phy_addr;
1663 unsigned int mmu_mc_buf_start;
1664 unsigned int mmu_mc_buf_end;
1665 unsigned int mmu_mc_start_4k_adr;
1666 void *mmu_box;
1667 void *bmmu_box;
1668 int mmu_enable;
1669
1670 unsigned int dec_status;
1671
1672 /* data for SEI_MASTER_DISPLAY_COLOR */
1673 unsigned int primaries[3][2];
1674 unsigned int white_point[2];
1675 unsigned int luminance[2];
1676 /* data for SEI_CONTENT_LIGHT_LEVEL */
1677 unsigned int content_light_level[2];
1678
1679 struct PIC_s *pre_top_pic;
1680 struct PIC_s *pre_bot_pic;
1681
1682#ifdef MULTI_INSTANCE_SUPPORT
1683 int double_write_mode;
1684 int dynamic_buf_num_margin;
1685 int start_action;
1686 int save_buffer_mode;
1687#endif
1688 u32 i_only;
1689 struct list_head log_list;
1690 u32 ucode_pause_pos;
1691 u32 start_shift_bytes;
1692
1693 u32 vf_pre_count;
1694 u32 vf_get_count;
1695 u32 vf_put_count;
1696#ifdef SWAP_HEVC_UCODE
1697 dma_addr_t mc_dma_handle;
1698 void *mc_cpu_addr;
1699 int swap_size;
1700 ulong swap_addr;
1701#endif
1702#ifdef DETREFILL_ENABLE
1703 dma_addr_t detbuf_adr;
1704 u16 *detbuf_adr_virt;
1705 u8 delrefill_check;
1706#endif
1707 u8 head_error_flag;
1708 int valve_count;
1709 struct firmware_s *fw;
1710 int max_pic_w;
1711 int max_pic_h;
1712#ifdef AGAIN_HAS_THRESHOLD
1713 u8 next_again_flag;
1714 u32 pre_parser_wr_ptr;
1715#endif
1716 u32 ratio_control;
1717 u32 first_pic_flag;
1718 u32 decode_size;
1719 struct mutex chunks_mutex;
1720 int need_cache_size;
1721 u64 sc_start_time;
1722 u32 skip_first_nal;
1723 bool is_swap;
1724 bool is_4k;
1725 int frameinfo_enable;
1726 struct vframe_qos_s vframe_qos;
1727 bool is_used_v4l;
1728 void *v4l2_ctx;
1729 bool v4l_params_parsed;
1730} /*hevc_stru_t */;
1731
1732#ifdef AGAIN_HAS_THRESHOLD
1733u32 again_threshold;
1734#endif
1735#ifdef SEND_LMEM_WITH_RPM
1736#define get_lmem_params(hevc, ladr) \
1737 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1738
1739
1740static int get_frame_mmu_map_size(void)
1741{
1742 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1743 return (MAX_FRAME_8K_NUM * 4);
1744
1745 return (MAX_FRAME_4K_NUM * 4);
1746}
1747
1748static int is_oversize(int w, int h)
1749{
1750 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1751 MAX_SIZE_8K : MAX_SIZE_4K;
1752
1753 if (w < 0 || h < 0)
1754 return true;
1755
1756 if (h != 0 && (w > max / h))
1757 return true;
1758
1759 return false;
1760}
1761
1762void check_head_error(struct hevc_state_s *hevc)
1763{
1764#define pcm_enabled_flag 0x040
1765#define pcm_sample_bit_depth_luma 0x041
1766#define pcm_sample_bit_depth_chroma 0x042
1767 hevc->head_error_flag = 0;
1768 if ((error_handle_policy & 0x40) == 0)
1769 return;
1770 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1771 uint16_t pcm_depth_luma = get_lmem_params(
1772 hevc, pcm_sample_bit_depth_luma);
1773 uint16_t pcm_sample_chroma = get_lmem_params(
1774 hevc, pcm_sample_bit_depth_chroma);
1775 if (pcm_depth_luma >
1776 hevc->bit_depth_luma ||
1777 pcm_sample_chroma >
1778 hevc->bit_depth_chroma) {
1779 hevc_print(hevc, 0,
1780 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1781 pcm_depth_luma,
1782 pcm_sample_chroma,
1783 hevc->bit_depth_luma,
1784 hevc->bit_depth_chroma);
1785 hevc->head_error_flag = 1;
1786 }
1787 }
1788}
1789#endif
1790
1791#ifdef SUPPORT_10BIT
1792/* Losless compression body buffer size 4K per 64x32 (jt) */
1793static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1794 int width, int height, int mem_saving_mode)
1795{
1796 int width_x64;
1797 int height_x32;
1798 int bsize;
1799
1800 width_x64 = width + 63;
1801 width_x64 >>= 6;
1802
1803 height_x32 = height + 31;
1804 height_x32 >>= 5;
1805 if (mem_saving_mode == 1 && hevc->mmu_enable)
1806 bsize = 3200 * width_x64 * height_x32;
1807 else if (mem_saving_mode == 1)
1808 bsize = 3072 * width_x64 * height_x32;
1809 else
1810 bsize = 4096 * width_x64 * height_x32;
1811
1812 return bsize;
1813}
1814
1815/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1816static int compute_losless_comp_header_size(int width, int height)
1817{
1818 int width_x128;
1819 int height_x64;
1820 int hsize;
1821
1822 width_x128 = width + 127;
1823 width_x128 >>= 7;
1824
1825 height_x64 = height + 63;
1826 height_x64 >>= 6;
1827
1828 hsize = 32*width_x128*height_x64;
1829
1830 return hsize;
1831}
1832#endif
1833
1834static int add_log(struct hevc_state_s *hevc,
1835 const char *fmt, ...)
1836{
1837#define HEVC_LOG_BUF 196
1838 struct debug_log_s *log_item;
1839 unsigned char buf[HEVC_LOG_BUF];
1840 int len = 0;
1841 va_list args;
1842 mutex_lock(&vh265_log_mutex);
1843 va_start(args, fmt);
1844 len = sprintf(buf, "<%ld> <%05d> ",
1845 jiffies, hevc->decode_idx);
1846 len += vsnprintf(buf + len,
1847 HEVC_LOG_BUF - len, fmt, args);
1848 va_end(args);
1849 log_item = kmalloc(
1850 sizeof(struct debug_log_s) + len,
1851 GFP_KERNEL);
1852 if (log_item) {
1853 INIT_LIST_HEAD(&log_item->list);
1854 strcpy(&log_item->data, buf);
1855 list_add_tail(&log_item->list,
1856 &hevc->log_list);
1857 }
1858 mutex_unlock(&vh265_log_mutex);
1859 return 0;
1860}
1861
1862static void dump_log(struct hevc_state_s *hevc)
1863{
1864 int i = 0;
1865 struct debug_log_s *log_item, *tmp;
1866 mutex_lock(&vh265_log_mutex);
1867 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1868 hevc_print(hevc, 0,
1869 "[LOG%04d]%s\n",
1870 i++,
1871 &log_item->data);
1872 list_del(&log_item->list);
1873 kfree(log_item);
1874 }
1875 mutex_unlock(&vh265_log_mutex);
1876}
1877
1878static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1879 struct PIC_s *pic)
1880{
1881 if (pic->error_mark
1882 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1883 return 1;
1884 return 0;
1885}
1886
1887static int get_pic_poc(struct hevc_state_s *hevc,
1888 unsigned int idx)
1889{
1890 if (idx != 0xff
1891 && idx < MAX_REF_PIC_NUM
1892 && hevc->m_PIC[idx])
1893 return hevc->m_PIC[idx]->POC;
1894 return INVALID_POC;
1895}
1896
1897#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1898static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1899{
1900 return (hevc->m_ins_flag &&
1901 ((double_write_mode & 0x80000000) == 0)) ?
1902 hevc->double_write_mode :
1903 (double_write_mode & 0x7fffffff);
1904}
1905
1906static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1907{
1908 return (hevc->m_ins_flag &&
1909 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1910 hevc->dynamic_buf_num_margin :
1911 (dynamic_buf_num_margin & 0x7fffffff);
1912}
1913#endif
1914
1915static int get_double_write_mode(struct hevc_state_s *hevc)
1916{
1917 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1918 int w = hevc->pic_w;
1919 int h = hevc->pic_h;
1920 u32 dw = 0x1; /*1:1*/
1921 switch (valid_dw_mode) {
1922 case 0x100:
1923 if (w > 1920 && h > 1088)
1924 dw = 0x4; /*1:2*/
1925 break;
1926 case 0x200:
1927 if (w > 1920 && h > 1088)
1928 dw = 0x2; /*1:4*/
1929 break;
1930 case 0x300:
1931 if (w > 1280 && h > 720)
1932 dw = 0x4; /*1:2*/
1933 break;
1934 default:
1935 dw = valid_dw_mode;
1936 break;
1937 }
1938 return dw;
1939}
1940
1941static int get_double_write_ratio(struct hevc_state_s *hevc,
1942 int dw_mode)
1943{
1944 int ratio = 1;
1945 if ((dw_mode == 2) ||
1946 (dw_mode == 3))
1947 ratio = 4;
1948 else if (dw_mode == 4)
1949 ratio = 2;
1950 return ratio;
1951}
1952#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1953static unsigned char get_idx(struct hevc_state_s *hevc)
1954{
1955 return hevc->index;
1956}
1957#endif
1958
1959#undef pr_info
1960#define pr_info printk
1961static int hevc_print(struct hevc_state_s *hevc,
1962 int flag, const char *fmt, ...)
1963{
1964#define HEVC_PRINT_BUF 256
1965 unsigned char buf[HEVC_PRINT_BUF];
1966 int len = 0;
1967#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1968 if (hevc == NULL ||
1969 (flag == 0) ||
1970 ((debug_mask &
1971 (1 << hevc->index))
1972 && (debug & flag))) {
1973#endif
1974 va_list args;
1975
1976 va_start(args, fmt);
1977 if (hevc)
1978 len = sprintf(buf, "[%d]", hevc->index);
1979 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1980 pr_debug("%s", buf);
1981 va_end(args);
1982#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1983 }
1984#endif
1985 return 0;
1986}
1987
1988static int hevc_print_cont(struct hevc_state_s *hevc,
1989 int flag, const char *fmt, ...)
1990{
1991 unsigned char buf[HEVC_PRINT_BUF];
1992 int len = 0;
1993#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1994 if (hevc == NULL ||
1995 (flag == 0) ||
1996 ((debug_mask &
1997 (1 << hevc->index))
1998 && (debug & flag))) {
1999#endif
2000 va_list args;
2001
2002 va_start(args, fmt);
2003 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2004 pr_info("%s", buf);
2005 va_end(args);
2006#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2007 }
2008#endif
2009 return 0;
2010}
2011
2012static void put_mv_buf(struct hevc_state_s *hevc,
2013 struct PIC_s *pic);
2014
2015static void update_vf_memhandle(struct hevc_state_s *hevc,
2016 struct vframe_s *vf, struct PIC_s *pic);
2017
2018static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2019
2020static void release_aux_data(struct hevc_state_s *hevc,
2021 struct PIC_s *pic);
2022static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2023
2024#ifdef MULTI_INSTANCE_SUPPORT
2025static void backup_decode_state(struct hevc_state_s *hevc)
2026{
2027 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2028 hevc->curr_POC_bak = hevc->curr_POC;
2029 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2030 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2031 hevc->start_parser_type_bak = hevc->start_parser_type;
2032 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2033 hevc->rps_set_id_bak = hevc->rps_set_id;
2034 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2035 hevc->decode_idx_bak = hevc->decode_idx;
2036
2037}
2038
2039static void restore_decode_state(struct hevc_state_s *hevc)
2040{
2041 struct vdec_s *vdec = hw_to_vdec(hevc);
2042 if (!vdec_has_more_input(vdec)) {
2043 hevc->pic_decoded_lcu_idx =
2044 READ_VREG(HEVC_PARSER_LCU_START)
2045 & 0xffffff;
2046 return;
2047 }
2048 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2049 "%s: discard pic index 0x%x\n",
2050 __func__, hevc->decoding_pic ?
2051 hevc->decoding_pic->index : 0xff);
2052 if (hevc->decoding_pic) {
2053 hevc->decoding_pic->error_mark = 0;
2054 hevc->decoding_pic->output_ready = 0;
2055 hevc->decoding_pic->output_mark = 0;
2056 hevc->decoding_pic->referenced = 0;
2057 hevc->decoding_pic->POC = INVALID_POC;
2058 put_mv_buf(hevc, hevc->decoding_pic);
2059 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2060 release_aux_data(hevc, hevc->decoding_pic);
2061 hevc->decoding_pic = NULL;
2062 }
2063 hevc->decode_idx = hevc->decode_idx_bak;
2064 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2065 hevc->curr_POC = hevc->curr_POC_bak;
2066 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2067 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2068 hevc->start_parser_type = hevc->start_parser_type_bak;
2069 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2070 hevc->rps_set_id = hevc->rps_set_id_bak;
2071 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2072
2073 if (hevc->pic_list_init_flag == 1)
2074 hevc->pic_list_init_flag = 0;
2075 /*if (hevc->decode_idx == 0)
2076 hevc->start_decoding_flag = 0;*/
2077
2078 hevc->slice_idx = 0;
2079 hevc->used_4k_num = -1;
2080}
2081#endif
2082
2083static void hevc_init_stru(struct hevc_state_s *hevc,
2084 struct BuffInfo_s *buf_spec_i)
2085{
2086 //int i;
2087 INIT_LIST_HEAD(&hevc->log_list);
2088 hevc->work_space_buf = buf_spec_i;
2089 hevc->prefix_aux_size = 0;
2090 hevc->suffix_aux_size = 0;
2091 hevc->aux_addr = NULL;
2092 hevc->rpm_addr = NULL;
2093 hevc->lmem_addr = NULL;
2094
2095 hevc->curr_POC = INVALID_POC;
2096
2097 hevc->pic_list_init_flag = 0;
2098 hevc->use_cma_flag = 0;
2099 hevc->decode_idx = 0;
2100 hevc->slice_idx = 0;
2101 hevc->new_pic = 0;
2102 hevc->new_tile = 0;
2103 hevc->iPrevPOC = 0;
2104 hevc->list_no = 0;
2105 /* int m_uiMaxCUWidth = 1<<7; */
2106 /* int m_uiMaxCUHeight = 1<<7; */
2107 hevc->m_pocRandomAccess = MAX_INT;
2108 hevc->tile_enabled = 0;
2109 hevc->tile_x = 0;
2110 hevc->tile_y = 0;
2111 hevc->iPrevTid0POC = 0;
2112 hevc->slice_addr = 0;
2113 hevc->slice_segment_addr = 0;
2114 hevc->skip_flag = 0;
2115 hevc->misc_flag0 = 0;
2116
2117 hevc->cur_pic = NULL;
2118 hevc->col_pic = NULL;
2119 hevc->wait_buf = 0;
2120 hevc->error_flag = 0;
2121 hevc->head_error_flag = 0;
2122 hevc->error_skip_nal_count = 0;
2123 hevc->have_vps = 0;
2124 hevc->have_sps = 0;
2125 hevc->have_pps = 0;
2126 hevc->have_valid_start_slice = 0;
2127
2128 hevc->pts_mode = PTS_NORMAL;
2129 hevc->last_pts = 0;
2130 hevc->last_lookup_pts = 0;
2131 hevc->last_pts_us64 = 0;
2132 hevc->last_lookup_pts_us64 = 0;
2133 hevc->pts_mode_switching_count = 0;
2134 hevc->pts_mode_recovery_count = 0;
2135
2136 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2137 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2138 if (hevc->PB_skip_mode == 0)
2139 hevc->ignore_bufmgr_error = 0x1;
2140 else
2141 hevc->ignore_bufmgr_error = 0x0;
2142
2143 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
2144 vdec_schedule_work(&hevc->work);
2145 /*for (i = 0; i < MAX_REF_PIC_NUM; i++)
2146 hevc->m_PIC[i] = NULL;*/
2147
2148 hevc->pic_num = 0;
2149 hevc->lcu_x_num_pre = 0;
2150 hevc->lcu_y_num_pre = 0;
2151 hevc->first_pic_after_recover = 0;
2152
2153 hevc->pre_top_pic = NULL;
2154 hevc->pre_bot_pic = NULL;
2155
2156 hevc->sei_present_flag = 0;
2157 hevc->valve_count = 0;
2158 hevc->first_pic_flag = 0;
2159#ifdef MULTI_INSTANCE_SUPPORT
2160 hevc->decoded_poc = INVALID_POC;
2161 hevc->start_process_time = 0;
2162 hevc->last_lcu_idx = 0;
2163 hevc->decode_timeout_count = 0;
2164 hevc->timeout_num = 0;
2165 hevc->eos = 0;
2166 hevc->pic_decoded_lcu_idx = -1;
2167 hevc->over_decode = 0;
2168 hevc->used_4k_num = -1;
2169 hevc->start_decoding_flag = 0;
2170 hevc->rps_set_id = 0;
2171 backup_decode_state(hevc);
2172#endif
2173#ifdef DETREFILL_ENABLE
2174 hevc->detbuf_adr = 0;
2175 hevc->detbuf_adr_virt = NULL;
2176#endif
2177}
2178
2179static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2180static int H265_alloc_mmu(struct hevc_state_s *hevc,
2181 struct PIC_s *new_pic, unsigned short bit_depth,
2182 unsigned int *mmu_index_adr);
2183
2184#ifdef DETREFILL_ENABLE
2185#define DETREFILL_BUF_SIZE (4 * 0x4000)
2186#define HEVC_SAO_DBG_MODE0 0x361e
2187#define HEVC_SAO_DBG_MODE1 0x361f
2188#define HEVC_SAO_CTRL10 0x362e
2189#define HEVC_SAO_CTRL11 0x362f
2190static int init_detrefill_buf(struct hevc_state_s *hevc)
2191{
2192 if (hevc->detbuf_adr_virt)
2193 return 0;
2194
2195 hevc->detbuf_adr_virt =
2196 (void *)dma_alloc_coherent(amports_get_dma_device(),
2197 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2198 GFP_KERNEL);
2199
2200 if (hevc->detbuf_adr_virt == NULL) {
2201 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2202 return -1;
2203 }
2204 return 0;
2205}
2206
2207static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2208{
2209 if (hevc->detbuf_adr_virt) {
2210 dma_free_coherent(amports_get_dma_device(),
2211 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2212 hevc->detbuf_adr);
2213
2214 hevc->detbuf_adr_virt = NULL;
2215 hevc->detbuf_adr = 0;
2216 }
2217}
2218
2219/*
2220 * convert uncompressed frame buffer data from/to ddr
2221 */
2222static void convUnc8x4blk(uint16_t* blk8x4Luma,
2223 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2224{
2225 if (direction == 0) {
2226 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2227 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2228 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2229 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2230 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2231 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2232 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2233 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2234 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2235 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2236 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2237 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2238 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2239 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2240 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2241 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2242 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2243 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2244
2245 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2246 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2247 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2248 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2249 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2250 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2251 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2252 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2253 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2254 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2255 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2256 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2257 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2258 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2259 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2260 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2261 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2262 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2263
2264 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2265 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2266 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2267 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2268 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2269 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2270 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2271 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2272 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2273 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2274 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2275 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2276 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2277 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2278 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2279 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2280 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2281 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2282
2283 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2284 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2285 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2286 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2287 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2288 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2289 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2290 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2291 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2292 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2293 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2294 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2295 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2296 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2297 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2298 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2299 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2300 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2301 } else {
2302 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2303 blk8x4Luma[3 + 0 * 8];
2304 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2305 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2306 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2307 (blk8x4Luma[3 + 3 * 8] >> 2);
2308 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2309 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2310 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2311 (blk8x4Luma[7 + 2 * 8] >>4);
2312 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2313 blk8x4Cb[0 + 0 * 4];
2314 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2315 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2316 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2317
2318 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2319 blk8x4Luma[0 + 0 * 8];
2320 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2321 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2322 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2323 (blk8x4Luma[0 + 1 * 8] >> 2);
2324 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2325 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2326 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2327 (blk8x4Luma[0 + 2 * 8] >>4);
2328 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2329 blk8x4Luma[2 + 2 * 8];
2330 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2331 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2332 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2333
2334 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2335 blk8x4Luma[4 + 0 * 8];
2336 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2337 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2338 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2339 (blk8x4Luma[4 + 1 * 8] >> 2);
2340 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2341 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2342 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2343 (blk8x4Luma[4 + 2 * 8] >>4);
2344 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2345 blk8x4Luma[6 + 2 * 8];
2346 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2347 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2348 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2349
2350 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2351 blk8x4Cb[1 + 0 * 4];
2352 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2353 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2354 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2355 (blk8x4Cr[2 + 0 * 4] >> 2);
2356 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2357 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2358 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2359 (blk8x4Cb[1 + 1 * 4] >>4);
2360 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2361 blk8x4Cb[2 + 1 * 4];
2362 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2363 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2364 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2365 }
2366}
2367
2368static void corrRefillWithAmrisc (
2369 struct hevc_state_s *hevc,
2370 uint32_t cmHeaderBaseAddr,
2371 uint32_t picWidth,
2372 uint32_t ctuPosition)
2373{
2374 int32_t i;
2375 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2376 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2377 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2378
2379 uint16_t cmBodyBuf[32 * 18];
2380
2381 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2382 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2383 uint32_t stride64x64 = pic_width_x64 * 128;
2384 uint32_t addr_offset64x64_abv = stride64x64 *
2385 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2386 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2387 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2388 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2389 unsigned int tmpData32;
2390
2391 uint16_t blkBuf0Y[32];
2392 uint16_t blkBuf0Cb[8];
2393 uint16_t blkBuf0Cr[8];
2394 uint16_t blkBuf1Y[32];
2395 uint16_t blkBuf1Cb[8];
2396 uint16_t blkBuf1Cr[8];
2397 int32_t blkBufCnt = 0;
2398
2399 int32_t blkIdx;
2400
2401 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2402 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2403 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2404 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2405
2406 for (i = 0; i < 32 * 18; i++)
2407 cmBodyBuf[i] = 0;
2408
2409 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2410 "%s, %d\n", __func__, __LINE__);
2411 do {
2412 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2413 } while (tmpData32);
2414 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2415 "%s, %d\n", __func__, __LINE__);
2416
2417 hevc_print(hevc, H265_DEBUG_DETAIL,
2418 "cmBodyBuf from detbuf:\n");
2419 for (i = 0; i < 32 * 18; i++) {
2420 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2421 if (get_dbg_flag(hevc) &
2422 H265_DEBUG_DETAIL) {
2423 if ((i & 0xf) == 0)
2424 hevc_print_cont(hevc, 0, "\n");
2425 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2426 }
2427 }
2428 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2429
2430 for (i = 0; i < 32; i++)
2431 blkBuf0Y[i] = 0;
2432 for (i = 0; i < 8; i++)
2433 blkBuf0Cb[i] = 0;
2434 for (i = 0; i < 8; i++)
2435 blkBuf0Cr[i] = 0;
2436 for (i = 0; i < 32; i++)
2437 blkBuf1Y[i] = 0;
2438 for (i = 0; i < 8; i++)
2439 blkBuf1Cb[i] = 0;
2440 for (i = 0; i < 8; i++)
2441 blkBuf1Cr[i] = 0;
2442
2443 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2444 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2445 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2446 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2447 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2448 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2449 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2450
2451 if (!aboveCtuAvailable && inAboveCtu)
2452 continue;
2453
2454 /* detRefillBuf --> 8x4block*/
2455 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2456
2457 if (restoreEnable) {
2458 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2459 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2460 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2461 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2462 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2463 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2464 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2465 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2466 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2467 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2468 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2469 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2470 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2471 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2472 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2473 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2474 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2475 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2476 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2477 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2478 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2479 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2480 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2481 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2482 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2483 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2484 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2485 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2486 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2487 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2488 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2489 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2490
2491 /*Store data back to DDR*/
2492 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2493 }
2494
2495 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2496 }
2497
2498 hevc_print(hevc, H265_DEBUG_DETAIL,
2499 "cmBodyBuf to detbuf:\n");
2500 for (i = 0; i < 32 * 18; i++) {
2501 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2502 if (get_dbg_flag(hevc) &
2503 H265_DEBUG_DETAIL) {
2504 if ((i & 0xf) == 0)
2505 hevc_print_cont(hevc, 0, "\n");
2506 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2507 }
2508 }
2509 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2510
2511 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2512 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2513 "%s, %d\n", __func__, __LINE__);
2514 do {
2515 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2516 } while (tmpData32);
2517 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2518 "%s, %d\n", __func__, __LINE__);
2519}
2520
2521static void delrefill(struct hevc_state_s *hevc)
2522{
2523 /*
2524 * corrRefill
2525 */
2526 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2527 [31:30]error number
2528 [29:20]error2([9:7]tilex[6:0]ctuy)
2529 [19:10]error1 [9:0]error0*/
2530 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2531 uint32_t errorIdx;
2532 uint32_t errorNum = (detResult>>30);
2533
2534 if (detResult) {
2535 hevc_print(hevc, H265_DEBUG_BUFMGR,
2536 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2537 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2538 uint32_t errorPos = errorIdx * 10;
2539 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2540 uint32_t tilex = (errorResult >> 7) - 1;
2541 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2542 + hevc->m_tile[0][tilex].width - 1;
2543 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2544 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2545 hevc_print(hevc, H265_DEBUG_BUFMGR,
2546 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2547 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2548 corrRefillWithAmrisc(
2549 hevc,
2550 (uint32_t)hevc->cur_pic->header_adr,
2551 hevc->pic_w,
2552 ctuPosition);
2553 }
2554
2555 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2556 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2557 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2558 }
2559}
2560#endif
2561
2562static void get_rpm_param(union param_u *params)
2563{
2564 int i;
2565 unsigned int data32;
2566
2567 for (i = 0; i < 128; i++) {
2568 do {
2569 data32 = READ_VREG(RPM_CMD_REG);
2570 /* hevc_print(hevc, 0, "%x\n", data32); */
2571 } while ((data32 & 0x10000) == 0);
2572 params->l.data[i] = data32 & 0xffff;
2573 /* hevc_print(hevc, 0, "%x\n", data32); */
2574 WRITE_VREG(RPM_CMD_REG, 0);
2575 }
2576}
2577
2578static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2579{
2580 int i;
2581 struct PIC_s *pic;
2582 struct PIC_s *ret_pic = NULL;
2583 if (POC == INVALID_POC)
2584 return NULL;
2585 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2586 pic = hevc->m_PIC[i];
2587 if (pic == NULL || pic->index == -1 ||
2588 pic->BUF_index == -1)
2589 continue;
2590 if (pic->POC == POC) {
2591 if (ret_pic == NULL)
2592 ret_pic = pic;
2593 else {
2594 if (pic->decode_idx > ret_pic->decode_idx)
2595 ret_pic = pic;
2596 }
2597 }
2598 }
2599 return ret_pic;
2600}
2601
2602static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2603{
2604 int i;
2605 struct PIC_s *pic;
2606 struct PIC_s *ret_pic = NULL;
2607
2608 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2609 pic = hevc->m_PIC[i];
2610 if (pic == NULL || pic->index == -1 ||
2611 pic->BUF_index == -1)
2612 continue;
2613 if ((pic->POC == POC) && (pic->referenced)) {
2614 if (ret_pic == NULL)
2615 ret_pic = pic;
2616 else {
2617 if (pic->decode_idx > ret_pic->decode_idx)
2618 ret_pic = pic;
2619 }
2620 }
2621 }
2622
2623 if (ret_pic == NULL) {
2624 if (get_dbg_flag(hevc)) {
2625 hevc_print(hevc, 0,
2626 "Wrong, POC of %d is not in referenced list\n",
2627 POC);
2628 }
2629 ret_pic = get_pic_by_POC(hevc, POC);
2630 }
2631 return ret_pic;
2632}
2633
2634static unsigned int log2i(unsigned int val)
2635{
2636 unsigned int ret = -1;
2637
2638 while (val != 0) {
2639 val >>= 1;
2640 ret++;
2641 }
2642 return ret;
2643}
2644
2645static int init_buf_spec(struct hevc_state_s *hevc);
2646static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2647{
2648 if (hevc->mmu_box)
2649 decoder_mmu_box_free(hevc->mmu_box);
2650 hevc->mmu_box = NULL;
2651
2652 if (hevc->bmmu_box)
2653 decoder_bmmu_box_free(hevc->bmmu_box);
2654 hevc->bmmu_box = NULL;
2655}
2656static int init_mmu_buffers(struct hevc_state_s *hevc)
2657{
2658 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2659 CODEC_MM_FLAGS_TVP : 0;
2660 int buf_size = 64;
2661
2662 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2663 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2664 buf_size = 24;
2665 }
2666
2667 if (get_dbg_flag(hevc)) {
2668 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2669 __func__, hevc->max_pic_w, hevc->max_pic_h);
2670 }
2671
2672 hevc->need_cache_size = buf_size * SZ_1M;
2673 hevc->sc_start_time = get_jiffies_64();
2674 if (hevc->mmu_enable
2675 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2676 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2677 hevc->index,
2678 MAX_REF_PIC_NUM,
2679 buf_size * SZ_1M,
2680 tvp_flag
2681 );
2682 if (!hevc->mmu_box) {
2683 pr_err("h265 alloc mmu box failed!!\n");
2684 return -1;
2685 }
2686 }
2687
2688 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2689 hevc->index,
2690 BMMU_MAX_BUFFERS,
2691 4 + PAGE_SHIFT,
2692 CODEC_MM_FLAGS_CMA_CLEAR |
2693 CODEC_MM_FLAGS_FOR_VDECODER |
2694 tvp_flag);
2695 if (!hevc->bmmu_box) {
2696 if (hevc->mmu_box)
2697 decoder_mmu_box_free(hevc->mmu_box);
2698 hevc->mmu_box = NULL;
2699 pr_err("h265 alloc mmu box failed!!\n");
2700 return -1;
2701 }
2702 return 0;
2703}
2704
2705struct buf_stru_s
2706{
2707 int lcu_total;
2708 int mc_buffer_size_h;
2709 int mc_buffer_size_u_v_h;
2710};
2711
2712#ifndef MV_USE_FIXED_BUF
2713static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2714{
2715 int i;
2716 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2717 if (hevc->m_mv_BUF[i].start_adr) {
2718 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2719 hevc_print(hevc, 0,
2720 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2721 i, hevc->m_mv_BUF[i].start_adr,
2722 hevc->m_mv_BUF[i].size,
2723 hevc->m_mv_BUF[i].used_flag);
2724 decoder_bmmu_box_free_idx(
2725 hevc->bmmu_box,
2726 MV_BUFFER_IDX(i));
2727 hevc->m_mv_BUF[i].start_adr = 0;
2728 hevc->m_mv_BUF[i].size = 0;
2729 hevc->m_mv_BUF[i].used_flag = 0;
2730 }
2731 }
2732 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2733 if (hevc->m_PIC[i] != NULL)
2734 hevc->m_PIC[i]->mv_buf_index = -1;
2735 }
2736
2737}
2738
2739static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2740{
2741 int ret = 0;
2742 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2743 if (decoder_bmmu_box_alloc_buf_phy
2744 (hevc->bmmu_box,
2745 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2746 DRIVER_NAME,
2747 &hevc->m_mv_BUF[i].start_adr) < 0) {
2748 hevc->m_mv_BUF[i].start_adr = 0;
2749 ret = -1;
2750 } else {
2751 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2752 hevc->m_mv_BUF[i].used_flag = 0;
2753 ret = 0;
2754 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2755 hevc_print(hevc, 0,
2756 "MV Buffer %d: start_adr %p size %x\n",
2757 i,
2758 (void *)hevc->m_mv_BUF[i].start_adr,
2759 hevc->m_mv_BUF[i].size);
2760 }
2761 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2762 void *mem_start_virt;
2763 mem_start_virt =
2764 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2765 if (mem_start_virt) {
2766 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2767 codec_mm_dma_flush(mem_start_virt,
2768 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2769 } else {
2770 mem_start_virt = codec_mm_vmap(
2771 hevc->m_mv_BUF[i].start_adr,
2772 hevc->m_mv_BUF[i].size);
2773 if (mem_start_virt) {
2774 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2775 codec_mm_dma_flush(mem_start_virt,
2776 hevc->m_mv_BUF[i].size,
2777 DMA_TO_DEVICE);
2778 codec_mm_unmap_phyaddr(mem_start_virt);
2779 } else {
2780 /*not virt for tvp playing,
2781 may need clear on ucode.*/
2782 pr_err("ref %s mem_start_virt failed\n", __func__);
2783 }
2784 }
2785 }
2786 }
2787 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2788 return ret;
2789}
2790#endif
2791
2792static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2793{
2794#ifdef MV_USE_FIXED_BUF
2795 if (pic && pic->index >= 0) {
2796 if (IS_8K_SIZE(pic->width, pic->height)) {
2797 pic->mpred_mv_wr_start_addr =
2798 hevc->work_space_buf->mpred_mv.buf_start
2799 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2800 } else {
2801 pic->mpred_mv_wr_start_addr =
2802 hevc->work_space_buf->mpred_mv.buf_start
2803 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2804 }
2805 }
2806 return 0;
2807#else
2808 int i;
2809 int ret = -1;
2810 int new_size;
2811 if (IS_8K_SIZE(pic->width, pic->height))
2812 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2813 else if (IS_4K_SIZE(pic->width, pic->height))
2814 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2815 else
2816 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2817 if (new_size != hevc->mv_buf_size) {
2818 dealloc_mv_bufs(hevc);
2819 hevc->mv_buf_size = new_size;
2820 }
2821 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2822 if (hevc->m_mv_BUF[i].start_adr &&
2823 hevc->m_mv_BUF[i].used_flag == 0) {
2824 hevc->m_mv_BUF[i].used_flag = 1;
2825 ret = i;
2826 break;
2827 }
2828 }
2829 if (ret < 0) {
2830 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2831 if (hevc->m_mv_BUF[i].start_adr == 0) {
2832 if (alloc_mv_buf(hevc, i) >= 0) {
2833 hevc->m_mv_BUF[i].used_flag = 1;
2834 ret = i;
2835 }
2836 break;
2837 }
2838 }
2839 }
2840
2841 if (ret >= 0) {
2842 pic->mv_buf_index = ret;
2843 pic->mpred_mv_wr_start_addr =
2844 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2845 (~0xffff);
2846 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2847 "%s => %d (0x%x) size 0x%x\n",
2848 __func__, ret,
2849 pic->mpred_mv_wr_start_addr,
2850 hevc->m_mv_BUF[ret].size);
2851
2852 } else {
2853 hevc_print(hevc, 0,
2854 "%s: Error, mv buf is not enough\n",
2855 __func__);
2856 }
2857 return ret;
2858
2859#endif
2860}
2861
2862static void put_mv_buf(struct hevc_state_s *hevc,
2863 struct PIC_s *pic)
2864{
2865#ifndef MV_USE_FIXED_BUF
2866 int i = pic->mv_buf_index;
2867 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2868 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2869 "%s: index %d beyond range\n",
2870 __func__, i);
2871 return;
2872 }
2873 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2874 "%s(%d): used_flag(%d)\n",
2875 __func__, i,
2876 hevc->m_mv_BUF[i].used_flag);
2877
2878 if (hevc->m_mv_BUF[i].start_adr &&
2879 hevc->m_mv_BUF[i].used_flag)
2880 hevc->m_mv_BUF[i].used_flag = 0;
2881 pic->mv_buf_index = -1;
2882#endif
2883}
2884
2885static int cal_current_buf_size(struct hevc_state_s *hevc,
2886 struct buf_stru_s *buf_stru)
2887{
2888
2889 int buf_size;
2890 int pic_width = hevc->pic_w;
2891 int pic_height = hevc->pic_h;
2892 int lcu_size = hevc->lcu_size;
2893 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2894 1 : pic_width / lcu_size;
2895 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2896 1 : pic_height / lcu_size;
2897 /*SUPPORT_10BIT*/
2898 int losless_comp_header_size = compute_losless_comp_header_size
2899 (pic_width, pic_height);
2900 /*always alloc buf for 10bit*/
2901 int losless_comp_body_size = compute_losless_comp_body_size
2902 (hevc, pic_width, pic_height, 0);
2903 int mc_buffer_size = losless_comp_header_size
2904 + losless_comp_body_size;
2905 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2906 int mc_buffer_size_u_v_h = 0;
2907
2908 int dw_mode = get_double_write_mode(hevc);
2909
2910 if (hevc->mmu_enable) {
2911 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
2912 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
2913 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
2914 << 16;
2915 else
2916 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
2917 << 16;
2918 } else
2919 buf_size = 0;
2920
2921 if (dw_mode) {
2922 int pic_width_dw = pic_width /
2923 get_double_write_ratio(hevc, dw_mode);
2924 int pic_height_dw = pic_height /
2925 get_double_write_ratio(hevc, dw_mode);
2926
2927 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
2928 pic_width_dw / lcu_size + 1 :
2929 pic_width_dw / lcu_size;
2930 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
2931 pic_height_dw / lcu_size + 1 :
2932 pic_height_dw / lcu_size;
2933 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
2934
2935 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
2936 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
2937 /*64k alignment*/
2938 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
2939 }
2940
2941 if ((!hevc->mmu_enable) &&
2942 ((dw_mode & 0x10) == 0)) {
2943 /* use compress mode without mmu,
2944 need buf for compress decoding*/
2945 buf_size += (mc_buffer_size_h << 16);
2946 }
2947
2948 /*in case start adr is not 64k alignment*/
2949 if (buf_size > 0)
2950 buf_size += 0x10000;
2951
2952 if (buf_stru) {
2953 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
2954 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
2955 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
2956 }
2957
2958 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
2959 pic_width, pic_height, losless_comp_header_size,
2960 losless_comp_body_size, mc_buffer_size_h,
2961 mc_buffer_size_u_v_h, buf_size);
2962
2963 return buf_size;
2964}
2965
2966static int alloc_buf(struct hevc_state_s *hevc)
2967{
2968 int i;
2969 int ret = -1;
2970 int buf_size = cal_current_buf_size(hevc, NULL);
2971
2972 if (hevc->is_used_v4l)
2973 return 0;
2974
2975 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
2976 return ret;
2977
2978 for (i = 0; i < BUF_POOL_SIZE; i++) {
2979 if (hevc->m_BUF[i].start_adr == 0)
2980 break;
2981 }
2982 if (i < BUF_POOL_SIZE) {
2983 if (buf_size > 0) {
2984 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2985 /*alloc compress header first*/
2986
2987 ret = decoder_bmmu_box_alloc_buf_phy
2988 (hevc->bmmu_box,
2989 VF_BUFFER_IDX(i), buf_size,
2990 DRIVER_NAME,
2991 &hevc->m_BUF[i].start_adr);
2992 if (ret < 0) {
2993 hevc->m_BUF[i].start_adr = 0;
2994 if (i <= 8) {
2995 hevc->fatal_error |=
2996 DECODER_FATAL_ERROR_NO_MEM;
2997 hevc_print(hevc, PRINT_FLAG_ERROR,
2998 "%s[%d], size: %d, no mem fatal err\n",
2999 __func__, i, buf_size);
3000 }
3001 }
3002
3003 if (ret >= 0) {
3004 hevc->m_BUF[i].size = buf_size;
3005 hevc->m_BUF[i].used_flag = 0;
3006 ret = 0;
3007
3008 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3009 hevc_print(hevc, 0,
3010 "Buffer %d: start_adr %p size %x\n",
3011 i,
3012 (void *)hevc->m_BUF[i].start_adr,
3013 hevc->m_BUF[i].size);
3014 }
3015 /*flush the buffer make sure no cache dirty*/
3016 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3017 void *mem_start_virt;
3018 mem_start_virt =
3019 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3020 if (mem_start_virt) {
3021 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3022 codec_mm_dma_flush(mem_start_virt,
3023 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3024 } else {
3025 mem_start_virt = codec_mm_vmap(
3026 hevc->m_BUF[i].start_adr,
3027 hevc->m_BUF[i].size);
3028 if (mem_start_virt) {
3029 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3030 codec_mm_dma_flush(mem_start_virt,
3031 hevc->m_BUF[i].size,
3032 DMA_TO_DEVICE);
3033 codec_mm_unmap_phyaddr(mem_start_virt);
3034 } else {
3035 /*not virt for tvp playing,
3036 may need clear on ucode.*/
3037 pr_err("ref %s mem_start_virt failed\n", __func__);
3038 }
3039 }
3040 }
3041 }
3042 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3043 } else
3044 ret = 0;
3045 }
3046 if (ret >= 0) {
3047 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3048 hevc_print(hevc, 0,
3049 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3050 i, hevc->pic_w, hevc->pic_h,
3051 buf_size,
3052 hevc->m_BUF[i].start_adr);
3053 }
3054 } else {
3055 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3056 hevc_print(hevc, 0,
3057 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3058 i, hevc->pic_w, hevc->pic_h,
3059 buf_size);
3060 }
3061 }
3062 return ret;
3063}
3064
3065static void set_buf_unused(struct hevc_state_s *hevc, int i)
3066{
3067 if (i >= 0 && i < BUF_POOL_SIZE)
3068 hevc->m_BUF[i].used_flag = 0;
3069}
3070
3071static void dealloc_unused_buf(struct hevc_state_s *hevc)
3072{
3073 int i;
3074 for (i = 0; i < BUF_POOL_SIZE; i++) {
3075 if (hevc->m_BUF[i].start_adr &&
3076 hevc->m_BUF[i].used_flag == 0) {
3077 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3078 hevc_print(hevc, 0,
3079 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3080 i, hevc->m_BUF[i].start_adr,
3081 hevc->m_BUF[i].size);
3082 }
3083 if (!hevc->is_used_v4l)
3084 decoder_bmmu_box_free_idx(
3085 hevc->bmmu_box,
3086 VF_BUFFER_IDX(i));
3087 hevc->m_BUF[i].start_adr = 0;
3088 hevc->m_BUF[i].size = 0;
3089 }
3090 }
3091
3092}
3093
3094static void dealloc_pic_buf(struct hevc_state_s *hevc,
3095 struct PIC_s *pic)
3096{
3097 int i = pic->BUF_index;
3098 pic->BUF_index = -1;
3099 if (i >= 0 &&
3100 i < BUF_POOL_SIZE &&
3101 hevc->m_BUF[i].start_adr) {
3102 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3103 hevc_print(hevc, 0,
3104 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3105 i, hevc->m_BUF[i].start_adr,
3106 hevc->m_BUF[i].size);
3107 }
3108
3109 if (!hevc->is_used_v4l)
3110 decoder_bmmu_box_free_idx(
3111 hevc->bmmu_box,
3112 VF_BUFFER_IDX(i));
3113 hevc->m_BUF[i].used_flag = 0;
3114 hevc->m_BUF[i].start_adr = 0;
3115 hevc->m_BUF[i].size = 0;
3116 }
3117}
3118
3119static int get_work_pic_num(struct hevc_state_s *hevc)
3120{
3121 int used_buf_num = 0;
3122 int sps_pic_buf_diff = 0;
3123
3124 if (get_dynamic_buf_num_margin(hevc) > 0) {
3125 if ((!hevc->sps_num_reorder_pics_0) &&
3126 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3127 /* the range of sps_num_reorder_pics_0 is in
3128 [0, sps_max_dec_pic_buffering_minus1_0] */
3129 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3130 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3131 } else
3132 used_buf_num = hevc->sps_num_reorder_pics_0
3133 + get_dynamic_buf_num_margin(hevc);
3134
3135 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3136 - hevc->sps_num_reorder_pics_0;
3137#ifdef MULTI_INSTANCE_SUPPORT
3138 /*
3139 need one more for multi instance, as
3140 apply_ref_pic_set() has no chanch to run to
3141 to clear referenced flag in some case
3142 */
3143 if (hevc->m_ins_flag)
3144 used_buf_num++;
3145#endif
3146 } else
3147 used_buf_num = max_buf_num;
3148
3149 if (hevc->save_buffer_mode)
3150 hevc_print(hevc, 0,
3151 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3152 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3153
3154 if (sps_pic_buf_diff >= 4)
3155 {
3156 used_buf_num += 1;
3157 }
3158
3159 if (used_buf_num > MAX_BUF_NUM)
3160 used_buf_num = MAX_BUF_NUM;
3161 return used_buf_num;
3162}
3163
3164static int get_alloc_pic_count(struct hevc_state_s *hevc)
3165{
3166 int alloc_pic_count = 0;
3167 int i;
3168 struct PIC_s *pic;
3169 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3170 pic = hevc->m_PIC[i];
3171 if (pic && pic->index >= 0)
3172 alloc_pic_count++;
3173 }
3174 return alloc_pic_count;
3175}
3176
3177static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3178{
3179 int ret = -1;
3180 int i;
3181 /*int lcu_size_log2 = hevc->lcu_size_log2;
3182 int MV_MEM_UNIT=lcu_size_log2==
3183 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3184 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3185 5 ? 0x80 : 0x20;
3186 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3187 hevc->work_space_buf->mpred_mv.buf_size;*/
3188 unsigned int y_adr = 0;
3189 struct buf_stru_s buf_stru;
3190 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3191 int dw_mode = get_double_write_mode(hevc);
3192 struct vdec_v4l2_buffer *fb = NULL;
3193
3194 if (hevc->is_used_v4l)
3195 buf_size = 0;
3196
3197 for (i = 0; i < BUF_POOL_SIZE; i++) {
3198 if (hevc->is_used_v4l && !hevc->m_BUF[i].start_adr) {
3199 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3200 if (ret) {
3201 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3202 "[%d] get fb fail.\n",
3203 ((struct aml_vcodec_ctx *)
3204 (hevc->v4l2_ctx))->id);
3205 return ret;
3206 }
3207
3208 hevc->m_BUF[i].used_flag = 0;
3209 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3210 if (fb->num_planes == 1) {
3211 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3212 hevc->m_BUF[i].size = fb->m.mem[0].size;
3213 hevc->m_BUF[i].y_size = fb->m.mem[0].offset;
3214 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3215 } else if (fb->num_planes == 2) {
3216 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3217 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3218 hevc->m_BUF[i].y_size = fb->m.mem[0].size;
3219 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3220 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3221 }
3222
3223 pic->BUF_index = i;
3224
3225 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
3226 "[%d] %s(), v4l ref buf addr: 0x%x\n",
3227 ((struct aml_vcodec_ctx *)
3228 (hevc->v4l2_ctx))->id, __func__, fb);
3229 }
3230
3231 if (hevc->m_BUF[i].start_adr != 0 &&
3232 hevc->m_BUF[i].used_flag == 0 &&
3233 buf_size <= hevc->m_BUF[i].size) {
3234 hevc->m_BUF[i].used_flag = 1;
3235 break;
3236 }
3237 }
3238
3239 if (i >= BUF_POOL_SIZE)
3240 return -1;
3241
3242 if (hevc->mmu_enable) {
3243 pic->header_adr = hevc->m_BUF[i].start_adr;
3244 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3245 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3246 y_adr = hevc->m_BUF[i].start_adr +
3247 MMU_COMPRESS_8K_HEADER_SIZE;
3248 else
3249 y_adr = hevc->m_BUF[i].start_adr +
3250 MMU_COMPRESS_HEADER_SIZE;
3251 } else
3252 y_adr = hevc->m_BUF[i].start_adr;
3253
3254 if (!hevc->is_used_v4l)
3255 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3256
3257 pic->POC = INVALID_POC;
3258 /*ensure get_pic_by_POC()
3259 not get the buffer not decoded*/
3260 pic->BUF_index = i;
3261
3262 if ((!hevc->mmu_enable) &&
3263 ((dw_mode & 0x10) == 0)
3264 ) {
3265 pic->mc_y_adr = y_adr;
3266 y_adr += (buf_stru.mc_buffer_size_h << 16);
3267 }
3268 pic->mc_canvas_y = pic->index;
3269 pic->mc_canvas_u_v = pic->index;
3270 if (dw_mode & 0x10) {
3271 if (hevc->is_used_v4l) {
3272 pic->mc_y_adr = y_adr;
3273 pic->mc_u_v_adr = y_adr + hevc->m_BUF[i].y_size;
3274 } else {
3275 pic->mc_y_adr = y_adr;
3276 pic->mc_u_v_adr = y_adr +
3277 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3278 }
3279
3280 pic->mc_canvas_y = (pic->index << 1);
3281 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3282
3283 pic->dw_y_adr = pic->mc_y_adr;
3284 pic->dw_u_v_adr = pic->mc_u_v_adr;
3285 } else if (dw_mode) {
3286 pic->dw_y_adr = y_adr;
3287 pic->dw_u_v_adr = pic->dw_y_adr +
3288 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3289 }
3290
3291
3292 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3293 hevc_print(hevc, 0,
3294 "%s index %d BUF_index %d mc_y_adr %x\n",
3295 __func__, pic->index,
3296 pic->BUF_index, pic->mc_y_adr);
3297 if (hevc->mmu_enable &&
3298 dw_mode)
3299 hevc_print(hevc, 0,
3300 "mmu double write adr %ld\n",
3301 pic->cma_alloc_addr);
3302
3303
3304 }
3305 ret = 0;
3306
3307 return ret;
3308}
3309
3310static void init_pic_list(struct hevc_state_s *hevc)
3311{
3312 int i;
3313 int init_buf_num = get_work_pic_num(hevc);
3314 int dw_mode = get_double_write_mode(hevc);
3315 struct vdec_s *vdec = hw_to_vdec(hevc);
3316 /*alloc decoder buf*/
3317 for (i = 0; i < init_buf_num; i++) {
3318 if (alloc_buf(hevc) < 0) {
3319 if (i <= 8) {
3320 /*if alloced (i+1)>=9
3321 don't send errors.*/
3322 hevc->fatal_error |=
3323 DECODER_FATAL_ERROR_NO_MEM;
3324 }
3325 break;
3326 }
3327 }
3328
3329 for (i = 0; i < init_buf_num; i++) {
3330 struct PIC_s *pic =
3331 vmalloc(sizeof(struct PIC_s));
3332 if (pic == NULL) {
3333 hevc_print(hevc, 0,
3334 "%s: alloc pic %d fail!!!\n",
3335 __func__, i);
3336 break;
3337 }
3338 memset(pic, 0, sizeof(struct PIC_s));
3339 hevc->m_PIC[i] = pic;
3340 pic->index = i;
3341 pic->BUF_index = -1;
3342 pic->mv_buf_index = -1;
3343 if (vdec->parallel_dec == 1) {
3344 pic->y_canvas_index = -1;
3345 pic->uv_canvas_index = -1;
3346 }
3347
3348 pic->width = hevc->pic_w;
3349 pic->height = hevc->pic_h;
3350 pic->double_write_mode = dw_mode;
3351
3352 if (!hevc->is_used_v4l) {
3353 if (config_pic(hevc, pic) < 0) {
3354 if (get_dbg_flag(hevc))
3355 hevc_print(hevc, 0,
3356 "Config_pic %d fail\n", pic->index);
3357 pic->index = -1;
3358 i++;
3359 break;
3360 }
3361
3362 if (pic->double_write_mode)
3363 set_canvas(hevc, pic);
3364 }
3365 }
3366
3367 for (; i < MAX_REF_PIC_NUM; i++) {
3368 struct PIC_s *pic =
3369 vmalloc(sizeof(struct PIC_s));
3370 if (pic == NULL) {
3371 hevc_print(hevc, 0,
3372 "%s: alloc pic %d fail!!!\n",
3373 __func__, i);
3374 break;
3375 }
3376 memset(pic, 0, sizeof(struct PIC_s));
3377 hevc->m_PIC[i] = pic;
3378 pic->index = -1;
3379 pic->BUF_index = -1;
3380 if (vdec->parallel_dec == 1) {
3381 pic->y_canvas_index = -1;
3382 pic->uv_canvas_index = -1;
3383 }
3384 }
3385
3386}
3387
3388static void uninit_pic_list(struct hevc_state_s *hevc)
3389{
3390 struct vdec_s *vdec = hw_to_vdec(hevc);
3391 int i;
3392#ifndef MV_USE_FIXED_BUF
3393 dealloc_mv_bufs(hevc);
3394#endif
3395 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3396 struct PIC_s *pic = hevc->m_PIC[i];
3397
3398 if (pic) {
3399 if (vdec->parallel_dec == 1) {
3400 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3401 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3402 }
3403 release_aux_data(hevc, pic);
3404 vfree(pic);
3405 hevc->m_PIC[i] = NULL;
3406 }
3407 }
3408}
3409
3410#ifdef LOSLESS_COMPRESS_MODE
3411static void init_decode_head_hw(struct hevc_state_s *hevc)
3412{
3413
3414 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3415 unsigned int data32;
3416
3417 int losless_comp_header_size =
3418 compute_losless_comp_header_size(hevc->pic_w,
3419 hevc->pic_h);
3420 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3421 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3422
3423 hevc->losless_comp_body_size = losless_comp_body_size;
3424
3425
3426 if (hevc->mmu_enable) {
3427 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3428 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3429 } else {
3430 if (hevc->mem_saving_mode == 1)
3431 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3432 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3433 else
3434 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3435 ((workaround_enable & 2) ? 1 : 0));
3436 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3437 /*
3438 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3439 * //8-bit mode
3440 */
3441 }
3442 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3443 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3444 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3445
3446 if (hevc->mmu_enable) {
3447 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3448 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3449 buf_spec->mmu_vbh.buf_start +
3450 buf_spec->mmu_vbh.buf_size/2);
3451 data32 = READ_VREG(HEVC_SAO_CTRL9);
3452 data32 |= 0x1;
3453 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3454
3455 /* use HEVC_CM_HEADER_START_ADDR */
3456 data32 = READ_VREG(HEVC_SAO_CTRL5);
3457 data32 |= (1<<10);
3458 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3459 }
3460
3461 if (!hevc->m_ins_flag)
3462 hevc_print(hevc, 0,
3463 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3464 __func__, hevc->pic_w, hevc->pic_h,
3465 losless_comp_body_size, losless_comp_header_size);
3466
3467}
3468#endif
3469#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3470
3471static void init_pic_list_hw(struct hevc_state_s *hevc)
3472{
3473 int i;
3474 int cur_pic_num = MAX_REF_PIC_NUM;
3475 int dw_mode = get_double_write_mode(hevc);
3476 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3477 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3478 (0x1 << 1) | (0x1 << 2));
3479 else
3480 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3481
3482 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3483 if (hevc->m_PIC[i] == NULL ||
3484 hevc->m_PIC[i]->index == -1) {
3485 cur_pic_num = i;
3486 break;
3487 }
3488 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3489 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3490 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3491 hevc->m_PIC[i]->header_adr>>5);
3492 else
3493 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3494 hevc->m_PIC[i]->mc_y_adr >> 5);
3495 } else
3496 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3497 hevc->m_PIC[i]->mc_y_adr |
3498 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3499 if (dw_mode & 0x10) {
3500 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3501 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3502 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3503 }
3504 else
3505 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3506 hevc->m_PIC[i]->mc_u_v_adr |
3507 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3508 | 0x1);
3509 }
3510 }
3511 if (cur_pic_num == 0)
3512 return;
3513 for (; i < MAX_REF_PIC_NUM; i++) {
3514 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3515 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3516 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3517 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3518 else
3519 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3520 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3521#ifndef LOSLESS_COMPRESS_MODE
3522 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3523 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3524#endif
3525 } else {
3526 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3527 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3528 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3529 | 0x1);
3530#ifndef LOSLESS_COMPRESS_MODE
3531 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3532 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3533 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3534 | 0x1);
3535#endif
3536 }
3537 }
3538
3539 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3540
3541 /* Zero out canvas registers in IPP -- avoid simulation X */
3542 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3543 (0 << 8) | (0 << 1) | 1);
3544 for (i = 0; i < 32; i++)
3545 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3546
3547#ifdef LOSLESS_COMPRESS_MODE
3548 if ((dw_mode & 0x10) == 0)
3549 init_decode_head_hw(hevc);
3550#endif
3551
3552}
3553
3554
3555static void dump_pic_list(struct hevc_state_s *hevc)
3556{
3557 int i;
3558 struct PIC_s *pic;
3559
3560 hevc_print(hevc, 0,
3561 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3562 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3563 pic = hevc->m_PIC[i];
3564 if (pic == NULL || pic->index == -1)
3565 continue;
3566 hevc_print_cont(hevc, 0,
3567 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3568 pic->index, pic->BUF_index,
3569#ifndef MV_USE_FIXED_BUF
3570 pic->mv_buf_index,
3571#else
3572 -1,
3573#endif
3574 pic->decode_idx, pic->POC, pic->referenced);
3575 hevc_print_cont(hevc, 0,
3576 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3577 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3578 pic->width, pic->height);
3579 hevc_print_cont(hevc, 0,
3580 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3581 pic->output_ready, pic->mpred_mv_wr_start_addr,
3582 pic->vf_ref);
3583 }
3584}
3585
3586static void clear_referenced_flag(struct hevc_state_s *hevc)
3587{
3588 int i;
3589 struct PIC_s *pic;
3590 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3591 pic = hevc->m_PIC[i];
3592 if (pic == NULL || pic->index == -1)
3593 continue;
3594 if (pic->referenced) {
3595 pic->referenced = 0;
3596 put_mv_buf(hevc, pic);
3597 }
3598 }
3599}
3600
3601static void clear_poc_flag(struct hevc_state_s *hevc)
3602{
3603 int i;
3604 struct PIC_s *pic;
3605 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3606 pic = hevc->m_PIC[i];
3607 if (pic == NULL || pic->index == -1)
3608 continue;
3609 pic->POC = INVALID_POC;
3610 }
3611}
3612
3613static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3614 unsigned char flush_flag)
3615{
3616 int num_pic_not_yet_display = 0;
3617 int i;
3618 struct PIC_s *pic;
3619 struct PIC_s *pic_display = NULL;
3620 struct vdec_s *vdec = hw_to_vdec(hevc);
3621
3622 if (hevc->i_only & 0x4) {
3623 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3624 pic = hevc->m_PIC[i];
3625 if (pic == NULL ||
3626 (pic->index == -1) ||
3627 (pic->BUF_index == -1) ||
3628 (pic->POC == INVALID_POC))
3629 continue;
3630 if (pic->output_mark) {
3631 if (pic_display) {
3632 if (pic->decode_idx <
3633 pic_display->decode_idx)
3634 pic_display = pic;
3635
3636 } else
3637 pic_display = pic;
3638
3639 }
3640 }
3641 if (pic_display) {
3642 pic_display->output_mark = 0;
3643 pic_display->recon_mark = 0;
3644 pic_display->output_ready = 1;
3645 pic_display->referenced = 0;
3646 put_mv_buf(hevc, pic_display);
3647 }
3648 } else {
3649 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3650 pic = hevc->m_PIC[i];
3651 if (pic == NULL ||
3652 (pic->index == -1) ||
3653 (pic->BUF_index == -1) ||
3654 (pic->POC == INVALID_POC))
3655 continue;
3656 if (pic->output_mark)
3657 num_pic_not_yet_display++;
3658 if (pic->slice_type == 2 &&
3659 hevc->vf_pre_count == 0 &&
3660 fast_output_enable & 0x1) {
3661 /*fast output for first I picture*/
3662 pic->num_reorder_pic = 0;
3663 if (vdec->master || vdec->slave)
3664 pic_display = pic;
3665 hevc_print(hevc, 0, "VH265: output first frame\n");
3666 }
3667 }
3668
3669 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3670 pic = hevc->m_PIC[i];
3671 if (pic == NULL ||
3672 (pic->index == -1) ||
3673 (pic->BUF_index == -1) ||
3674 (pic->POC == INVALID_POC))
3675 continue;
3676 if (pic->output_mark) {
3677 if (pic_display) {
3678 if (pic->POC < pic_display->POC)
3679 pic_display = pic;
3680 else if ((pic->POC == pic_display->POC)
3681 && (pic->decode_idx <
3682 pic_display->
3683 decode_idx))
3684 pic_display
3685 = pic;
3686 } else
3687 pic_display = pic;
3688 }
3689 }
3690 if (pic_display) {
3691 if ((num_pic_not_yet_display >
3692 pic_display->num_reorder_pic)
3693 || flush_flag) {
3694 pic_display->output_mark = 0;
3695 pic_display->recon_mark = 0;
3696 pic_display->output_ready = 1;
3697 } else if (num_pic_not_yet_display >=
3698 (MAX_REF_PIC_NUM - 1)) {
3699 pic_display->output_mark = 0;
3700 pic_display->recon_mark = 0;
3701 pic_display->output_ready = 1;
3702 hevc_print(hevc, 0,
3703 "Warning, num_reorder_pic %d is byeond buf num\n",
3704 pic_display->num_reorder_pic);
3705 } else
3706 pic_display = NULL;
3707 }
3708 }
3709
3710 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3711 pic_display = NULL;
3712 hevc->first_pic_flag = 0;
3713 }
3714 return pic_display;
3715}
3716
3717static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3718{
3719 int i;
3720 struct PIC_s *pic;
3721
3722 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3723 hevc_print(hevc, 0,
3724 "config_mc_buffer entered .....\n");
3725 if (cur_pic->slice_type != 2) { /* P and B pic */
3726 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3727 (0 << 8) | (0 << 1) | 1);
3728 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3729 pic =
3730 get_ref_pic_by_POC(hevc,
3731 cur_pic->
3732 m_aiRefPOCList0[cur_pic->
3733 slice_idx][i]);
3734 if (pic) {
3735 if ((pic->width != hevc->pic_w) ||
3736 (pic->height != hevc->pic_h)) {
3737 hevc_print(hevc, 0,
3738 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3739 __func__, pic->POC,
3740 pic->width, pic->height);
3741 cur_pic->error_mark = 1;
3742 }
3743 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3744 cur_pic->error_mark = 1;
3745 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3746 (pic->mc_canvas_u_v << 16)
3747 | (pic->mc_canvas_u_v
3748 << 8) |
3749 pic->mc_canvas_y);
3750 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3751 hevc_print_cont(hevc, 0,
3752 "refid %x mc_canvas_u_v %x",
3753 i, pic->mc_canvas_u_v);
3754 hevc_print_cont(hevc, 0,
3755 " mc_canvas_y %x\n",
3756 pic->mc_canvas_y);
3757 }
3758 } else
3759 cur_pic->error_mark = 1;
3760
3761 if (pic == NULL || pic->error_mark) {
3762 hevc_print(hevc, 0,
3763 "Error %s, %dth poc (%d) %s",
3764 __func__, i,
3765 cur_pic->m_aiRefPOCList0[cur_pic->
3766 slice_idx][i],
3767 pic ? "has error" :
3768 "not in list0");
3769 }
3770 }
3771 }
3772 if (cur_pic->slice_type == 0) { /* B pic */
3773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3774 hevc_print(hevc, 0,
3775 "config_mc_buffer RefNum_L1\n");
3776 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3777 (16 << 8) | (0 << 1) | 1);
3778
3779 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3780 pic =
3781 get_ref_pic_by_POC(hevc,
3782 cur_pic->
3783 m_aiRefPOCList1[cur_pic->
3784 slice_idx][i]);
3785 if (pic) {
3786 if ((pic->width != hevc->pic_w) ||
3787 (pic->height != hevc->pic_h)) {
3788 hevc_print(hevc, 0,
3789 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3790 __func__, pic->POC,
3791 pic->width, pic->height);
3792 cur_pic->error_mark = 1;
3793 }
3794
3795 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3796 cur_pic->error_mark = 1;
3797 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3798 (pic->mc_canvas_u_v << 16)
3799 | (pic->mc_canvas_u_v
3800 << 8) |
3801 pic->mc_canvas_y);
3802 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3803 hevc_print_cont(hevc, 0,
3804 "refid %x mc_canvas_u_v %x",
3805 i, pic->mc_canvas_u_v);
3806 hevc_print_cont(hevc, 0,
3807 " mc_canvas_y %x\n",
3808 pic->mc_canvas_y);
3809 }
3810 } else
3811 cur_pic->error_mark = 1;
3812
3813 if (pic == NULL || pic->error_mark) {
3814 hevc_print(hevc, 0,
3815 "Error %s, %dth poc (%d) %s",
3816 __func__, i,
3817 cur_pic->m_aiRefPOCList1[cur_pic->
3818 slice_idx][i],
3819 pic ? "has error" :
3820 "not in list1");
3821 }
3822 }
3823 }
3824 return 0;
3825}
3826
3827static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3828 union param_u *params)
3829{
3830 int ii, i;
3831 int poc_tmp;
3832 struct PIC_s *pic;
3833 unsigned char is_referenced;
3834 /* hevc_print(hevc, 0,
3835 "%s cur_poc %d\n", __func__, cur_poc); */
3836 if (pic_list_debug & 0x2) {
3837 pr_err("cur poc %d\n", cur_poc);
3838 }
3839 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3840 pic = hevc->m_PIC[ii];
3841 if (pic == NULL ||
3842 pic->index == -1 ||
3843 pic->BUF_index == -1
3844 )
3845 continue;
3846
3847 if ((pic->referenced == 0 || pic->POC == cur_poc))
3848 continue;
3849 is_referenced = 0;
3850 for (i = 0; i < 16; i++) {
3851 int delt;
3852
3853 if (params->p.CUR_RPS[i] & 0x8000)
3854 break;
3855 delt =
3856 params->p.CUR_RPS[i] &
3857 ((1 << (RPS_USED_BIT - 1)) - 1);
3858 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3859 poc_tmp =
3860 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3861 delt);
3862 } else
3863 poc_tmp = cur_poc + delt;
3864 if (poc_tmp == pic->POC) {
3865 is_referenced = 1;
3866 /* hevc_print(hevc, 0, "i is %d\n", i); */
3867 break;
3868 }
3869 }
3870 if (is_referenced == 0) {
3871 pic->referenced = 0;
3872 put_mv_buf(hevc, pic);
3873 /* hevc_print(hevc, 0,
3874 "set poc %d reference to 0\n", pic->POC); */
3875 if (pic_list_debug & 0x2) {
3876 pr_err("set poc %d reference to 0\n", pic->POC);
3877 }
3878 }
3879 }
3880
3881}
3882
3883static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
3884{
3885 struct PIC_s *pic = hevc->cur_pic;
3886 int i, rIdx;
3887 int num_neg = 0;
3888 int num_pos = 0;
3889 int total_num;
3890 int num_ref_idx_l0_active =
3891 (params->p.num_ref_idx_l0_active >
3892 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3893 params->p.num_ref_idx_l0_active;
3894 int num_ref_idx_l1_active =
3895 (params->p.num_ref_idx_l1_active >
3896 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
3897 params->p.num_ref_idx_l1_active;
3898
3899 int RefPicSetStCurr0[16];
3900 int RefPicSetStCurr1[16];
3901
3902 for (i = 0; i < 16; i++) {
3903 RefPicSetStCurr0[i] = 0;
3904 RefPicSetStCurr1[i] = 0;
3905 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
3906 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
3907 }
3908 for (i = 0; i < 16; i++) {
3909 if (params->p.CUR_RPS[i] & 0x8000)
3910 break;
3911 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
3912 int delt =
3913 params->p.CUR_RPS[i] &
3914 ((1 << (RPS_USED_BIT - 1)) - 1);
3915
3916 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
3917 RefPicSetStCurr0[num_neg] =
3918 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
3919 delt);
3920 /* hevc_print(hevc, 0,
3921 * "RefPicSetStCurr0 %x %x %x\n",
3922 * RefPicSetStCurr0[num_neg], pic->POC,
3923 * (0x800-(params[i]&0x7ff)));
3924 */
3925 num_neg++;
3926 } else {
3927 RefPicSetStCurr1[num_pos] = pic->POC + delt;
3928 /* hevc_print(hevc, 0,
3929 * "RefPicSetStCurr1 %d\n",
3930 * RefPicSetStCurr1[num_pos]);
3931 */
3932 num_pos++;
3933 }
3934 }
3935 }
3936 total_num = num_neg + num_pos;
3937 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3938 hevc_print(hevc, 0,
3939 "%s: curpoc %d slice_type %d, total %d ",
3940 __func__, pic->POC, params->p.slice_type, total_num);
3941 hevc_print_cont(hevc, 0,
3942 "num_neg %d num_list0 %d num_list1 %d\n",
3943 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
3944 }
3945
3946 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3947 hevc_print(hevc, 0,
3948 "HEVC Stream buf start ");
3949 hevc_print_cont(hevc, 0,
3950 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
3951 READ_VREG(HEVC_STREAM_START_ADDR),
3952 READ_VREG(HEVC_STREAM_END_ADDR),
3953 READ_VREG(HEVC_STREAM_WR_PTR),
3954 READ_VREG(HEVC_STREAM_RD_PTR),
3955 READ_VREG(HEVC_STREAM_LEVEL),
3956 READ_VREG(HEVC_STREAM_FIFO_CTL),
3957 READ_VREG(HEVC_PARSER_INT_CONTROL));
3958 }
3959
3960 if (total_num > 0) {
3961 if (params->p.modification_flag & 0x1) {
3962 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3963 hevc_print(hevc, 0, "ref0 POC (modification):");
3964 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3965 int cIdx = params->p.modification_list[rIdx];
3966
3967 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3968 cIdx >=
3969 num_neg ? RefPicSetStCurr1[cIdx -
3970 num_neg] :
3971 RefPicSetStCurr0[cIdx];
3972 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3973 hevc_print_cont(hevc, 0, "%d ",
3974 pic->m_aiRefPOCList0[pic->
3975 slice_idx]
3976 [rIdx]);
3977 }
3978 }
3979 } else {
3980 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3981 hevc_print(hevc, 0, "ref0 POC:");
3982 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
3983 int cIdx = rIdx % total_num;
3984
3985 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
3986 cIdx >=
3987 num_neg ? RefPicSetStCurr1[cIdx -
3988 num_neg] :
3989 RefPicSetStCurr0[cIdx];
3990 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3991 hevc_print_cont(hevc, 0, "%d ",
3992 pic->m_aiRefPOCList0[pic->
3993 slice_idx]
3994 [rIdx]);
3995 }
3996 }
3997 }
3998 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3999 hevc_print_cont(hevc, 0, "\n");
4000 if (params->p.slice_type == B_SLICE) {
4001 if (params->p.modification_flag & 0x2) {
4002 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4003 hevc_print(hevc, 0,
4004 "ref1 POC (modification):");
4005 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4006 rIdx++) {
4007 int cIdx;
4008
4009 if (params->p.modification_flag & 0x1) {
4010 cIdx =
4011 params->p.
4012 modification_list
4013 [num_ref_idx_l0_active +
4014 rIdx];
4015 } else {
4016 cIdx =
4017 params->p.
4018 modification_list[rIdx];
4019 }
4020 pic->m_aiRefPOCList1[pic->
4021 slice_idx][rIdx] =
4022 cIdx >=
4023 num_pos ?
4024 RefPicSetStCurr0[cIdx - num_pos]
4025 : RefPicSetStCurr1[cIdx];
4026 if (get_dbg_flag(hevc) &
4027 H265_DEBUG_BUFMGR) {
4028 hevc_print_cont(hevc, 0, "%d ",
4029 pic->
4030 m_aiRefPOCList1[pic->
4031 slice_idx]
4032 [rIdx]);
4033 }
4034 }
4035 } else {
4036 if (get_dbg_flag(hevc) &
4037 H265_DEBUG_BUFMGR)
4038 hevc_print(hevc, 0, "ref1 POC:");
4039 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4040 rIdx++) {
4041 int cIdx = rIdx % total_num;
4042
4043 pic->m_aiRefPOCList1[pic->
4044 slice_idx][rIdx] =
4045 cIdx >=
4046 num_pos ?
4047 RefPicSetStCurr0[cIdx -
4048 num_pos]
4049 : RefPicSetStCurr1[cIdx];
4050 if (get_dbg_flag(hevc) &
4051 H265_DEBUG_BUFMGR) {
4052 hevc_print_cont(hevc, 0, "%d ",
4053 pic->
4054 m_aiRefPOCList1[pic->
4055 slice_idx]
4056 [rIdx]);
4057 }
4058 }
4059 }
4060 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4061 hevc_print_cont(hevc, 0, "\n");
4062 }
4063 }
4064 /*set m_PIC */
4065 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4066 (params->p.slice_type == P_SLICE) ? 1 :
4067 (params->p.slice_type == B_SLICE) ? 0 : 3;
4068 pic->RefNum_L0 = num_ref_idx_l0_active;
4069 pic->RefNum_L1 = num_ref_idx_l1_active;
4070}
4071
4072static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4073 int pic_height_cu, int sao_mem_unit,
4074 union param_u *params)
4075{
4076 int i, j;
4077 int start_cu_x, start_cu_y;
4078 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4079 int sao_abv_size = sao_mem_unit * pic_width_cu;
4080#ifdef DETREFILL_ENABLE
4081 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4082 int tmpRefillLcuSize = 1 <<
4083 (params->p.log2_min_coding_block_size_minus3 +
4084 3 + params->p.log2_diff_max_min_coding_block_size);
4085 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4086 "%x, %x, %x, %x\n",
4087 params->p.slice_segment_address,
4088 params->p.bit_depth,
4089 params->p.tiles_enabled_flag,
4090 tmpRefillLcuSize);
4091 if (params->p.slice_segment_address == 0 &&
4092 params->p.bit_depth != 0 &&
4093 (params->p.tiles_enabled_flag & 1) &&
4094 tmpRefillLcuSize == 64)
4095 hevc->delrefill_check = 1;
4096 else
4097 hevc->delrefill_check = 0;
4098 }
4099#endif
4100
4101 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4102 if (params->p.tiles_enabled_flag & 1) {
4103 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4104 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4105
4106 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4107 || hevc->num_tile_row <= 0) {
4108 hevc->num_tile_row = 1;
4109 hevc_print(hevc, 0,
4110 "%s: num_tile_rows_minus1 (%d) error!!\n",
4111 __func__, params->p.num_tile_rows_minus1);
4112 }
4113 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4114 || hevc->num_tile_col <= 0) {
4115 hevc->num_tile_col = 1;
4116 hevc_print(hevc, 0,
4117 "%s: num_tile_columns_minus1 (%d) error!!\n",
4118 __func__, params->p.num_tile_columns_minus1);
4119 }
4120 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4121 hevc_print(hevc, 0,
4122 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4123 __func__, pic_width_cu, pic_height_cu);
4124 hevc_print_cont(hevc, 0,
4125 "num_tile_col %d num_tile_row %d:\n",
4126 hevc->num_tile_col, hevc->num_tile_row);
4127 }
4128
4129 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4130 int w = pic_width_cu / hevc->num_tile_col;
4131 int h = pic_height_cu / hevc->num_tile_row;
4132
4133 start_cu_y = 0;
4134 for (i = 0; i < hevc->num_tile_row; i++) {
4135 start_cu_x = 0;
4136 for (j = 0; j < hevc->num_tile_col; j++) {
4137 if (j == (hevc->num_tile_col - 1)) {
4138 hevc->m_tile[i][j].width =
4139 pic_width_cu -
4140 start_cu_x;
4141 } else
4142 hevc->m_tile[i][j].width = w;
4143 if (i == (hevc->num_tile_row - 1)) {
4144 hevc->m_tile[i][j].height =
4145 pic_height_cu -
4146 start_cu_y;
4147 } else
4148 hevc->m_tile[i][j].height = h;
4149 hevc->m_tile[i][j].start_cu_x
4150 = start_cu_x;
4151 hevc->m_tile[i][j].start_cu_y
4152 = start_cu_y;
4153 hevc->m_tile[i][j].sao_vb_start_addr =
4154 hevc->work_space_buf->sao_vb.
4155 buf_start + j * sao_vb_size;
4156 hevc->m_tile[i][j].sao_abv_start_addr =
4157 hevc->work_space_buf->sao_abv.
4158 buf_start + i * sao_abv_size;
4159 if (get_dbg_flag(hevc) &
4160 H265_DEBUG_BUFMGR) {
4161 hevc_print_cont(hevc, 0,
4162 "{y=%d, x=%d w %d h %d ",
4163 i, j, hevc->m_tile[i][j].width,
4164 hevc->m_tile[i][j].height);
4165 hevc_print_cont(hevc, 0,
4166 "start_x %d start_y %d ",
4167 hevc->m_tile[i][j].start_cu_x,
4168 hevc->m_tile[i][j].start_cu_y);
4169 hevc_print_cont(hevc, 0,
4170 "sao_vb_start 0x%x ",
4171 hevc->m_tile[i][j].
4172 sao_vb_start_addr);
4173 hevc_print_cont(hevc, 0,
4174 "sao_abv_start 0x%x}\n",
4175 hevc->m_tile[i][j].
4176 sao_abv_start_addr);
4177 }
4178 start_cu_x += hevc->m_tile[i][j].width;
4179
4180 }
4181 start_cu_y += hevc->m_tile[i][0].height;
4182 }
4183 } else {
4184 start_cu_y = 0;
4185 for (i = 0; i < hevc->num_tile_row; i++) {
4186 start_cu_x = 0;
4187 for (j = 0; j < hevc->num_tile_col; j++) {
4188 if (j == (hevc->num_tile_col - 1)) {
4189 hevc->m_tile[i][j].width =
4190 pic_width_cu -
4191 start_cu_x;
4192 } else {
4193 hevc->m_tile[i][j].width =
4194 params->p.tile_width[j];
4195 }
4196 if (i == (hevc->num_tile_row - 1)) {
4197 hevc->m_tile[i][j].height =
4198 pic_height_cu -
4199 start_cu_y;
4200 } else {
4201 hevc->m_tile[i][j].height =
4202 params->
4203 p.tile_height[i];
4204 }
4205 hevc->m_tile[i][j].start_cu_x
4206 = start_cu_x;
4207 hevc->m_tile[i][j].start_cu_y
4208 = start_cu_y;
4209 hevc->m_tile[i][j].sao_vb_start_addr =
4210 hevc->work_space_buf->sao_vb.
4211 buf_start + j * sao_vb_size;
4212 hevc->m_tile[i][j].sao_abv_start_addr =
4213 hevc->work_space_buf->sao_abv.
4214 buf_start + i * sao_abv_size;
4215 if (get_dbg_flag(hevc) &
4216 H265_DEBUG_BUFMGR) {
4217 hevc_print_cont(hevc, 0,
4218 "{y=%d, x=%d w %d h %d ",
4219 i, j, hevc->m_tile[i][j].width,
4220 hevc->m_tile[i][j].height);
4221 hevc_print_cont(hevc, 0,
4222 "start_x %d start_y %d ",
4223 hevc->m_tile[i][j].start_cu_x,
4224 hevc->m_tile[i][j].start_cu_y);
4225 hevc_print_cont(hevc, 0,
4226 "sao_vb_start 0x%x ",
4227 hevc->m_tile[i][j].
4228 sao_vb_start_addr);
4229 hevc_print_cont(hevc, 0,
4230 "sao_abv_start 0x%x}\n",
4231 hevc->m_tile[i][j].
4232 sao_abv_start_addr);
4233
4234 }
4235 start_cu_x += hevc->m_tile[i][j].width;
4236 }
4237 start_cu_y += hevc->m_tile[i][0].height;
4238 }
4239 }
4240 } else {
4241 hevc->num_tile_col = 1;
4242 hevc->num_tile_row = 1;
4243 hevc->m_tile[0][0].width = pic_width_cu;
4244 hevc->m_tile[0][0].height = pic_height_cu;
4245 hevc->m_tile[0][0].start_cu_x = 0;
4246 hevc->m_tile[0][0].start_cu_y = 0;
4247 hevc->m_tile[0][0].sao_vb_start_addr =
4248 hevc->work_space_buf->sao_vb.buf_start;
4249 hevc->m_tile[0][0].sao_abv_start_addr =
4250 hevc->work_space_buf->sao_abv.buf_start;
4251 }
4252}
4253
4254static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4255 int pic_width_lcu)
4256{
4257 int cu_x;
4258 int cu_y;
4259 int tile_x = 0;
4260 int tile_y = 0;
4261 int i;
4262
4263 if (pic_width_lcu == 0) {
4264 if (get_dbg_flag(hevc)) {
4265 hevc_print(hevc, 0,
4266 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4267 __func__, hevc->pic_w, hevc->pic_h);
4268 }
4269 return -1;
4270 }
4271 cu_x = cu_adr % pic_width_lcu;
4272 cu_y = cu_adr / pic_width_lcu;
4273 if (hevc->tile_enabled) {
4274 for (i = 0; i < hevc->num_tile_col; i++) {
4275 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4276 tile_x = i;
4277 else
4278 break;
4279 }
4280 for (i = 0; i < hevc->num_tile_row; i++) {
4281 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4282 tile_y = i;
4283 else
4284 break;
4285 }
4286 }
4287 return (tile_x) | (tile_y << 8);
4288}
4289
4290static void print_scratch_error(int error_num)
4291{
4292#if 0
4293 if (get_dbg_flag(hevc)) {
4294 hevc_print(hevc, 0,
4295 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4296 error_num);
4297 }
4298#endif
4299}
4300
4301static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4302{
4303 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4304
4305 if (get_dbg_flag(hevc))
4306 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4307 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4308 __func__,
4309 buf_spec->ipp.buf_start,
4310 buf_spec->start_adr,
4311 buf_spec->short_term_rps.buf_start,
4312 buf_spec->vps.buf_start,
4313 buf_spec->sps.buf_start,
4314 buf_spec->pps.buf_start,
4315 buf_spec->sao_up.buf_start,
4316 buf_spec->swap_buf.buf_start,
4317 buf_spec->swap_buf2.buf_start,
4318 buf_spec->scalelut.buf_start,
4319 buf_spec->dblk_para.buf_start,
4320 buf_spec->dblk_data.buf_start,
4321 buf_spec->dblk_data2.buf_start);
4322 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4323 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4324 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4325 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4326 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4327 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4328 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4329 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4330 if (hevc->mmu_enable) {
4331 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4332 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4333 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4334 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4335 } else
4336 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4337 } /*else
4338 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4339 buf_spec->swap_buf.buf_start);
4340 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4341 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4342 /* cfg_p_addr */
4343 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4344 /* cfg_d_addr */
4345 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4346
4347 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4348
4349 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4350}
4351
4352static void parser_cmd_write(void)
4353{
4354 u32 i;
4355 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4356 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4357 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4358 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4359 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4360 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4361 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4362 0x7C00
4363 };
4364 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4365 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4366}
4367
4368static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4369 int decode_pic_begin, int decode_pic_num)
4370{
4371 unsigned int data32;
4372 int i;
4373#if 0
4374 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4375 /* Set MCR fetch priorities*/
4376 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4377 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4378 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4379 }
4380#endif
4381#if 1
4382 /* m8baby test1902 */
4383 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4384 hevc_print(hevc, 0,
4385 "%s\n", __func__);
4386 data32 = READ_VREG(HEVC_PARSER_VERSION);
4387 if (data32 != 0x00010001) {
4388 print_scratch_error(25);
4389 return;
4390 }
4391 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4392 data32 = READ_VREG(HEVC_PARSER_VERSION);
4393 if (data32 != 0x5a5a55aa) {
4394 print_scratch_error(26);
4395 return;
4396 }
4397#if 0
4398 /* test Parser Reset */
4399 /* reset iqit to start mem init again */
4400 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4401 (1 << 3) /* reset_whole parser */
4402 );
4403 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4404 data32 = READ_VREG(HEVC_PARSER_VERSION);
4405 if (data32 != 0x00010001)
4406 hevc_print(hevc, 0,
4407 "Test Parser Fatal Error\n");
4408#endif
4409 /* reset iqit to start mem init again */
4410 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4411 );
4412 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4413 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4414
4415#endif
4416 if (!hevc->m_ins_flag) {
4417 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4418 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4419 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4420 data32 |= (0xf << 25); /*arwlen_axi_max*/
4421 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4422 }
4423 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4424 if (data32 != 0x00000100) {
4425 print_scratch_error(29);
4426 return;
4427 }
4428 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4429 if (data32 != 0x00000300) {
4430 print_scratch_error(30);
4431 return;
4432 }
4433 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4434 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4435 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4436 if (data32 != 0x12345678) {
4437 print_scratch_error(31);
4438 return;
4439 }
4440 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4441 if (data32 != 0x9abcdef0) {
4442 print_scratch_error(32);
4443 return;
4444 }
4445 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4446 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4447
4448 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4449 data32 &= 0x03ffffff;
4450 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4451 | /* stream_buffer_empty_int_amrisc_enable */
4452 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4453 (1 << 7) | /* dec_done_int_cpu_enable */
4454 (1 << 4) | /* startcode_found_int_cpu_enable */
4455 (0 << 3) | /* startcode_found_int_amrisc_enable */
4456 (1 << 0) /* parser_int_enable */
4457 ;
4458 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4459
4460 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4461 data32 = data32 | (1 << 1) | /* emulation_check_on */
4462 (1 << 0) /* startcode_check_on */
4463 ;
4464 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4465
4466 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4467 (2 << 4) | /* emulate_code_length_sub_1 */
4468 (2 << 1) | /* start_code_length_sub_1 */
4469 (1 << 0) /* stream_shift_enable */
4470 );
4471
4472 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4473 );
4474 /* hevc_parser_core_clk_en */
4475 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4476 );
4477
4478 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4479
4480 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4481 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4482 for (i = 0; i < 1024; i++)
4483 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4484
4485#ifdef ENABLE_SWAP_TEST
4486 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4487#endif
4488
4489 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4490 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4491 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4492 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4493 /* Send parser_cmd */
4494 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4495
4496 parser_cmd_write();
4497
4498 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4499 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4500 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4501
4502 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4503 /* (1 << 8) | // sao_sw_pred_enable */
4504 (1 << 5) | /* parser_sao_if_en */
4505 (1 << 2) | /* parser_mpred_if_en */
4506 (1 << 0) /* parser_scaler_if_en */
4507 );
4508
4509 /* Changed to Start MPRED in microcode */
4510 /*
4511 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4512 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4513 * (1<<31)
4514 * );
4515 */
4516
4517 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4518 (1 << 0) /* software reset ipp and mpp */
4519 );
4520 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4521 (0 << 0) /* software reset ipp and mpp */
4522 );
4523
4524 if (get_double_write_mode(hevc) & 0x10)
4525 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4526 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4527 );
4528
4529}
4530
4531static void decoder_hw_reset(void)
4532{
4533 int i;
4534 unsigned int data32;
4535 /* reset iqit to start mem init again */
4536 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4537 );
4538 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4539 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4540
4541 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4542 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4543 ;
4544 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4545
4546 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4547 if (data32 != 0x00000100) {
4548 print_scratch_error(29);
4549 return;
4550 }
4551 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4552 if (data32 != 0x00000300) {
4553 print_scratch_error(30);
4554 return;
4555 }
4556 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4557 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4558 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4559 if (data32 != 0x12345678) {
4560 print_scratch_error(31);
4561 return;
4562 }
4563 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4564 if (data32 != 0x9abcdef0) {
4565 print_scratch_error(32);
4566 return;
4567 }
4568 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4569 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4570
4571 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4572 data32 &= 0x03ffffff;
4573 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4574 | /* stream_buffer_empty_int_amrisc_enable */
4575 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4576 (1 << 7) | /* dec_done_int_cpu_enable */
4577 (1 << 4) | /* startcode_found_int_cpu_enable */
4578 (0 << 3) | /* startcode_found_int_amrisc_enable */
4579 (1 << 0) /* parser_int_enable */
4580 ;
4581 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4582
4583 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4584 data32 = data32 | (1 << 1) | /* emulation_check_on */
4585 (1 << 0) /* startcode_check_on */
4586 ;
4587 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4588
4589 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4590 (2 << 4) | /* emulate_code_length_sub_1 */
4591 (2 << 1) | /* start_code_length_sub_1 */
4592 (1 << 0) /* stream_shift_enable */
4593 );
4594
4595 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4596 );
4597 /* hevc_parser_core_clk_en */
4598 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4599 );
4600
4601 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4602 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4603 for (i = 0; i < 1024; i++)
4604 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4605
4606 /* Send parser_cmd */
4607 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4608
4609 parser_cmd_write();
4610
4611 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4612 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4613 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4614
4615 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4616 /* (1 << 8) | // sao_sw_pred_enable */
4617 (1 << 5) | /* parser_sao_if_en */
4618 (1 << 2) | /* parser_mpred_if_en */
4619 (1 << 0) /* parser_scaler_if_en */
4620 );
4621
4622 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4623 (1 << 0) /* software reset ipp and mpp */
4624 );
4625 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4626 (0 << 0) /* software reset ipp and mpp */
4627 );
4628}
4629
4630#ifdef CONFIG_HEVC_CLK_FORCED_ON
4631static void config_hevc_clk_forced_on(void)
4632{
4633 unsigned int rdata32;
4634 /* IQIT */
4635 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4636 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4637
4638 /* DBLK */
4639 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4640 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4641
4642 /* SAO */
4643 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4644 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4645
4646 /* MPRED */
4647 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4648 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4649
4650 /* PARSER */
4651 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4652 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4653 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4654 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4655 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4656 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4657 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4658 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4659 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4660 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4661 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4662 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4663 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4664
4665 /* IPP */
4666 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4667 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4668
4669 /* MCRCC */
4670 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4671 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4672}
4673#endif
4674
4675#ifdef MCRCC_ENABLE
4676static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4677{
4678 unsigned int rdata32;
4679 unsigned int rdata32_2;
4680 int l0_cnt = 0;
4681 int l1_cnt = 0x7fff;
4682
4683 if (get_double_write_mode(hevc) & 0x10) {
4684 l0_cnt = hevc->cur_pic->RefNum_L0;
4685 l1_cnt = hevc->cur_pic->RefNum_L1;
4686 }
4687
4688 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4689
4690 if (slice_type == 2) { /* I-PIC */
4691 /* remove reset -- disables clock */
4692 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4693 return;
4694 }
4695
4696 if (slice_type == 0) { /* B-PIC */
4697 /* Programme canvas0 */
4698 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4699 (0 << 8) | (0 << 1) | 0);
4700 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4701 rdata32 = rdata32 & 0xffff;
4702 rdata32 = rdata32 | (rdata32 << 16);
4703 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4704
4705 /* Programme canvas1 */
4706 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4707 (16 << 8) | (1 << 1) | 0);
4708 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4709 rdata32_2 = rdata32_2 & 0xffff;
4710 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4711 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4712 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4713 rdata32_2 = rdata32_2 & 0xffff;
4714 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4715 }
4716 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4717 } else { /* P-PIC */
4718 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4719 (0 << 8) | (1 << 1) | 0);
4720 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4721 rdata32 = rdata32 & 0xffff;
4722 rdata32 = rdata32 | (rdata32 << 16);
4723 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4724
4725 if (l0_cnt == 1) {
4726 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4727 } else {
4728 /* Programme canvas1 */
4729 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4730 rdata32 = rdata32 & 0xffff;
4731 rdata32 = rdata32 | (rdata32 << 16);
4732 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4733 }
4734 }
4735 /* enable mcrcc progressive-mode */
4736 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4737}
4738#endif
4739
4740static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4741 int sao_mem_unit)
4742{
4743 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4744 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4745 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4746 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4747}
4748
4749static u32 init_aux_size;
4750static int aux_data_is_avaible(struct hevc_state_s *hevc)
4751{
4752 u32 reg_val;
4753
4754 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4755 if (reg_val != 0 && reg_val != init_aux_size)
4756 return 1;
4757 else
4758 return 0;
4759}
4760
4761static void config_aux_buf(struct hevc_state_s *hevc)
4762{
4763 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4764 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4765 (hevc->suffix_aux_size >> 4);
4766 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4767}
4768
4769static void config_mpred_hw(struct hevc_state_s *hevc)
4770{
4771 int i;
4772 unsigned int data32;
4773 struct PIC_s *cur_pic = hevc->cur_pic;
4774 struct PIC_s *col_pic = hevc->col_pic;
4775 int AMVP_MAX_NUM_CANDS_MEM = 3;
4776 int AMVP_MAX_NUM_CANDS = 2;
4777 int NUM_CHROMA_MODE = 5;
4778 int DM_CHROMA_IDX = 36;
4779 int above_ptr_ctrl = 0;
4780 int buffer_linear = 1;
4781 int cu_size_log2 = 3;
4782
4783 int mpred_mv_rd_start_addr;
4784 int mpred_curr_lcu_x;
4785 int mpred_curr_lcu_y;
4786 int mpred_above_buf_start;
4787 int mpred_mv_rd_ptr;
4788 int mpred_mv_rd_ptr_p1;
4789 int mpred_mv_rd_end_addr;
4790 int MV_MEM_UNIT;
4791 int mpred_mv_wr_ptr;
4792 int *ref_poc_L0, *ref_poc_L1;
4793
4794 int above_en;
4795 int mv_wr_en;
4796 int mv_rd_en;
4797 int col_isIntra;
4798
4799 if (hevc->slice_type != 2) {
4800 above_en = 1;
4801 mv_wr_en = 1;
4802 mv_rd_en = 1;
4803 col_isIntra = 0;
4804 } else {
4805 above_en = 1;
4806 mv_wr_en = 1;
4807 mv_rd_en = 0;
4808 col_isIntra = 0;
4809 }
4810
4811 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4812 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4813 mpred_curr_lcu_x = data32 & 0xffff;
4814 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4815
4816 MV_MEM_UNIT =
4817 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4818 5 ? 0x80 : 0x20;
4819 mpred_mv_rd_ptr =
4820 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4821
4822 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4823 mpred_mv_rd_end_addr =
4824 mpred_mv_rd_start_addr +
4825 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4826
4827 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4828
4829 mpred_mv_wr_ptr =
4830 cur_pic->mpred_mv_wr_start_addr +
4831 (hevc->slice_addr * MV_MEM_UNIT);
4832
4833 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4834 hevc_print(hevc, 0,
4835 "cur pic index %d col pic index %d\n", cur_pic->index,
4836 col_pic->index);
4837 }
4838
4839 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4840 cur_pic->mpred_mv_wr_start_addr);
4841 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4842
4843 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4844 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4845 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4846
4847 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4848 data32 = (hevc->slice_type |
4849 hevc->new_pic << 2 |
4850 hevc->new_tile << 3 |
4851 hevc->isNextSliceSegment << 4 |
4852 hevc->TMVPFlag << 5 |
4853 hevc->LDCFlag << 6 |
4854 hevc->ColFromL0Flag << 7 |
4855 above_ptr_ctrl << 8 |
4856 above_en << 9 |
4857 mv_wr_en << 10 |
4858 mv_rd_en << 11 |
4859 col_isIntra << 12 |
4860 buffer_linear << 13 |
4861 hevc->LongTerm_Curr << 14 |
4862 hevc->LongTerm_Col << 15 |
4863 hevc->lcu_size_log2 << 16 |
4864 cu_size_log2 << 20 | hevc->plevel << 24);
4865 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
4866
4867 data32 = READ_VREG(HEVC_MPRED_CTRL1);
4868 data32 = (
4869#if 0
4870 /* no set in m8baby test1902 */
4871 /* Don't override clk_forced_on , */
4872 (data32 & (0x1 << 24)) |
4873#endif
4874 hevc->MaxNumMergeCand |
4875 AMVP_MAX_NUM_CANDS << 4 |
4876 AMVP_MAX_NUM_CANDS_MEM << 8 |
4877 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
4878 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
4879
4880 data32 = (hevc->pic_w | hevc->pic_h << 16);
4881 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
4882
4883 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
4884 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
4885
4886 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
4887 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
4888
4889 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
4890 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
4891
4892 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
4893 /* col_RefNum_L0<<16| */
4894 /* col_RefNum_L1<<24 */
4895 );
4896 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
4897
4898 data32 = (hevc->LongTerm_Ref);
4899 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
4900
4901 data32 = 0;
4902 for (i = 0; i < hevc->RefNum_L0; i++)
4903 data32 = data32 | (1 << i);
4904 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
4905
4906 data32 = 0;
4907 for (i = 0; i < hevc->RefNum_L1; i++)
4908 data32 = data32 | (1 << i);
4909 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
4910
4911 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
4912 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
4913
4914 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
4915 * Ref_POC_xx_L1 in pair write order!!!
4916 */
4917 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
4918 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
4919
4920 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
4921 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
4922
4923 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
4924 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
4925
4926 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
4927 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
4928
4929 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
4930 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
4931
4932 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
4933 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
4934
4935 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
4936 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
4937
4938 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
4939 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
4940
4941 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
4942 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
4943
4944 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
4945 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
4946
4947 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
4948 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
4949
4950 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
4951 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
4952
4953 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
4954 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
4955
4956 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
4957 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
4958
4959 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
4960 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
4961
4962 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
4963 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
4964
4965 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
4966 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
4967
4968 if (hevc->new_pic) {
4969 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
4970 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
4971 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
4972 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
4973 } else if (!hevc->isNextSliceSegment) {
4974 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
4975 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
4976 }
4977
4978 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
4979}
4980
4981static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
4982{
4983 unsigned int data32, data32_2;
4984 int misc_flag0 = hevc->misc_flag0;
4985 int slice_deblocking_filter_disabled_flag = 0;
4986
4987 int mc_buffer_size_u_v =
4988 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
4989 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
4990 struct PIC_s *cur_pic = hevc->cur_pic;
4991 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
4992
4993 data32 = READ_VREG(HEVC_SAO_CTRL0);
4994 data32 &= (~0xf);
4995 data32 |= hevc->lcu_size_log2;
4996 WRITE_VREG(HEVC_SAO_CTRL0, data32);
4997
4998 data32 = (hevc->pic_w | hevc->pic_h << 16);
4999 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5000
5001 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5002 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5003
5004 if (hevc->new_pic)
5005 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5006#ifdef LOSLESS_COMPRESS_MODE
5007/*SUPPORT_10BIT*/
5008 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5009 data32 = READ_VREG(HEVC_SAO_CTRL5);
5010 data32 &= (~(0xff << 16));
5011
5012 if (get_double_write_mode(hevc) == 2 ||
5013 get_double_write_mode(hevc) == 3)
5014 data32 |= (0xff<<16);
5015 else if (get_double_write_mode(hevc) == 4)
5016 data32 |= (0x33<<16);
5017
5018 if (hevc->mem_saving_mode == 1)
5019 data32 |= (1 << 9);
5020 else
5021 data32 &= ~(1 << 9);
5022 if (workaround_enable & 1)
5023 data32 |= (1 << 7);
5024 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5025 }
5026 data32 = cur_pic->mc_y_adr;
5027 if (get_double_write_mode(hevc))
5028 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5029
5030 if ((get_double_write_mode(hevc) & 0x10) == 0)
5031 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5032
5033 if (hevc->mmu_enable)
5034 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5035#else
5036 data32 = cur_pic->mc_y_adr;
5037 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5038#endif
5039 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5040 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5041
5042#ifdef LOSLESS_COMPRESS_MODE
5043/*SUPPORT_10BIT*/
5044 if (get_double_write_mode(hevc))
5045 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5046#else
5047 data32 = cur_pic->mc_u_v_adr;
5048 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5049#endif
5050 data32 = (mc_buffer_size_u_v_h << 16);
5051 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5052
5053#ifdef LOSLESS_COMPRESS_MODE
5054/*SUPPORT_10BIT*/
5055 if (get_double_write_mode(hevc)) {
5056 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5057 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5058 }
5059#else
5060 /* multi tile to do... */
5061 data32 = cur_pic->mc_y_adr;
5062 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5063
5064 data32 = cur_pic->mc_u_v_adr;
5065 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5066#endif
5067 /* DBLK CONFIG HERE */
5068 if (hevc->new_pic) {
5069 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5070 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5071 data32 = (0xff << 8) | (0x0 << 0);
5072 else
5073 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5074 (0x0 << 0); /* h265 video format*/
5075
5076 if (hevc->pic_w >= 1280)
5077 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5078 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5079 if (get_double_write_mode(hevc) == 0)
5080 data32 |= (0x1 << 8); /*enable first write*/
5081 else if (get_double_write_mode(hevc) == 0x10)
5082 data32 |= (0x1 << 9); /*double write only*/
5083 else
5084 data32 |= ((0x1 << 8) |(0x1 << 9));
5085
5086 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5087 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5088 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5089 }
5090 data32 = (hevc->pic_w | hevc->pic_h << 16);
5091 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5092
5093 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5094 data32 =
5095 ((misc_flag0 >>
5096 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5097 0x1) << 3;
5098 } else
5099 data32 = 0;
5100 data32 |=
5101 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5102 ((params->p.pps_cr_qp_offset
5103 & 0x1f) <<
5104 9));
5105 data32 |=
5106 (hevc->lcu_size ==
5107 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5108
5109 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5110
5111 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5112 /*if (debug & 0x80) {*/
5113 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5114 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5115 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5116 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5117 data32);
5118 /*}*/
5119 }
5120 }
5121#if 0
5122 data32 = READ_VREG(HEVC_SAO_CTRL1);
5123 data32 &= (~0x3000);
5124 data32 |= (mem_map_mode <<
5125 12);
5126
5127/* [13:12] axi_aformat,
5128 * 0-Linear, 1-32x32, 2-64x32
5129 */
5130 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5131
5132 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5133 data32 &= (~0x30);
5134 data32 |= (mem_map_mode <<
5135 4);
5136
5137/* [5:4] -- address_format
5138 * 00:linear 01:32x32 10:64x32
5139 */
5140 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5141#else
5142 /* m8baby test1902 */
5143 data32 = READ_VREG(HEVC_SAO_CTRL1);
5144 data32 &= (~0x3000);
5145 data32 |= (mem_map_mode <<
5146 12);
5147
5148/* [13:12] axi_aformat, 0-Linear,
5149 * 1-32x32, 2-64x32
5150 */
5151 data32 &= (~0xff0);
5152 /* data32 |= 0x670; // Big-Endian per 64-bit */
5153 data32 |= endian; /* Big-Endian per 64-bit */
5154 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5155 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5156 if (get_double_write_mode(hevc) == 0)
5157 data32 |= 0x2; /*disable double write*/
5158 else if (get_double_write_mode(hevc) & 0x10)
5159 data32 |= 0x1; /*disable cm*/
5160 } else {
5161 unsigned int data;
5162 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5163 (0x0 << 0); /* h265 video format*/
5164 if (hevc->pic_w >= 1280)
5165 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5166 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5167 if (get_double_write_mode(hevc) == 0)
5168 data |= (0x1 << 8); /*enable first write*/
5169 else if (get_double_write_mode(hevc) & 0x10)
5170 data |= (0x1 << 9); /*double write only*/
5171 else
5172 data |= ((0x1 << 8) |(0x1 << 9));
5173
5174 WRITE_VREG(HEVC_DBLK_CFGB, data);
5175 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5176 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5177 }
5178
5179 /* swap uv */
5180 if (hevc->is_used_v4l) {
5181 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5182 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5183 data32 &= ~(1 << 8); /* NV21 */
5184 else
5185 data32 |= (1 << 8); /* NV12 */
5186 }
5187
5188 /*
5189 * [31:24] ar_fifo1_axi_thred
5190 * [23:16] ar_fifo0_axi_thred
5191 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5192 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5193 * [11:08] axi_lendian_C
5194 * [07:04] axi_lendian_Y
5195 * [3] reserved
5196 * [2] clk_forceon
5197 * [1] dw_disable:disable double write output
5198 * [0] cm_disable:disable compress output
5199 */
5200 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5201 if (get_double_write_mode(hevc) & 0x10) {
5202 /* [23:22] dw_v1_ctrl
5203 *[21:20] dw_v0_ctrl
5204 *[19:18] dw_h1_ctrl
5205 *[17:16] dw_h0_ctrl
5206 */
5207 data32 = READ_VREG(HEVC_SAO_CTRL5);
5208 /*set them all 0 for H265_NV21 (no down-scale)*/
5209 data32 &= ~(0xff << 16);
5210 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5211 }
5212
5213 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5214 data32 &= (~0x30);
5215 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5216 data32 |= (mem_map_mode <<
5217 4);
5218 data32 &= (~0xF);
5219 data32 |= 0xf; /* valid only when double write only */
5220 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5221
5222 /* swap uv */
5223 if (hevc->is_used_v4l) {
5224 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5225 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5226 data32 |= (1 << 12); /* NV21 */
5227 else
5228 data32 &= ~(1 << 12); /* NV12 */
5229 }
5230
5231 /*
5232 * [3:0] little_endian
5233 * [5:4] address_format 00:linear 01:32x32 10:64x32
5234 * [7:6] reserved
5235 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5236 * [11:10] reserved
5237 * [12] CbCr_byte_swap
5238 * [31:13] reserved
5239 */
5240 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5241#endif
5242 data32 = 0;
5243 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5244 data32_2 &= (~0x300);
5245 /* slice_deblocking_filter_disabled_flag = 0;
5246 * ucode has handle it , so read it from ucode directly
5247 */
5248 if (hevc->tile_enabled) {
5249 data32 |=
5250 ((misc_flag0 >>
5251 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5252 0x1) << 0;
5253 data32_2 |=
5254 ((misc_flag0 >>
5255 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5256 0x1) << 8;
5257 }
5258 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5259 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5260 0x1; /* ucode has handle it,so read it from ucode directly */
5261 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5262 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5263 /* slice_deblocking_filter_disabled_flag =
5264 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5265 * //ucode has handle it , so read it from ucode directly
5266 */
5267 data32 |= slice_deblocking_filter_disabled_flag << 2;
5268 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5269 hevc_print_cont(hevc, 0,
5270 "(1,%x)", data32);
5271 if (!slice_deblocking_filter_disabled_flag) {
5272 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5273 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5274 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5275 hevc_print_cont(hevc, 0,
5276 "(2,%x)", data32);
5277 }
5278 } else {
5279 data32 |=
5280 ((misc_flag0 >>
5281 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5282 0x1) << 2;
5283 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5284 hevc_print_cont(hevc, 0,
5285 "(3,%x)", data32);
5286 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5287 0x1) == 0) {
5288 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5289 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5290 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5291 hevc_print_cont(hevc, 0,
5292 "(4,%x)", data32);
5293 }
5294 }
5295 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5296 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5297 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5298 || (!slice_deblocking_filter_disabled_flag))) {
5299 data32 |=
5300 ((misc_flag0 >>
5301 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5302 & 0x1) << 1;
5303 data32_2 |=
5304 ((misc_flag0 >>
5305 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5306 & 0x1) << 9;
5307 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5308 hevc_print_cont(hevc, 0,
5309 "(5,%x)\n", data32);
5310 } else {
5311 data32 |=
5312 ((misc_flag0 >>
5313 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5314 & 0x1) << 1;
5315 data32_2 |=
5316 ((misc_flag0 >>
5317 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5318 & 0x1) << 9;
5319 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5320 hevc_print_cont(hevc, 0,
5321 "(6,%x)\n", data32);
5322 }
5323 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5324 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5325}
5326
5327#ifdef TEST_NO_BUF
5328static unsigned char test_flag = 1;
5329#endif
5330
5331static void pic_list_process(struct hevc_state_s *hevc)
5332{
5333 int work_pic_num = get_work_pic_num(hevc);
5334 int alloc_pic_count = 0;
5335 int i;
5336 struct PIC_s *pic;
5337 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5338 pic = hevc->m_PIC[i];
5339 if (pic == NULL || pic->index == -1)
5340 continue;
5341 alloc_pic_count++;
5342 if (pic->output_mark == 0 && pic->referenced == 0
5343 && pic->output_ready == 0
5344 && (pic->width != hevc->pic_w ||
5345 pic->height != hevc->pic_h)
5346 ) {
5347 set_buf_unused(hevc, pic->BUF_index);
5348 pic->BUF_index = -1;
5349 if (alloc_pic_count > work_pic_num) {
5350 pic->width = 0;
5351 pic->height = 0;
5352 pic->index = -1;
5353 } else {
5354 pic->width = hevc->pic_w;
5355 pic->height = hevc->pic_h;
5356 }
5357 }
5358 }
5359 if (alloc_pic_count < work_pic_num) {
5360 int new_count = alloc_pic_count;
5361 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5362 pic = hevc->m_PIC[i];
5363 if (pic && pic->index == -1) {
5364 pic->index = i;
5365 pic->BUF_index = -1;
5366 pic->width = hevc->pic_w;
5367 pic->height = hevc->pic_h;
5368 new_count++;
5369 if (new_count >=
5370 work_pic_num)
5371 break;
5372 }
5373 }
5374
5375 }
5376 dealloc_unused_buf(hevc);
5377 if (get_alloc_pic_count(hevc)
5378 != alloc_pic_count) {
5379 hevc_print_cont(hevc, 0,
5380 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5381 __func__,
5382 work_pic_num,
5383 alloc_pic_count,
5384 get_alloc_pic_count(hevc));
5385 }
5386}
5387
5388static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5389{
5390 int i;
5391 struct PIC_s *pic;
5392 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5393 pic = hevc->m_PIC[i];
5394 if (pic == NULL || pic->index == -1)
5395 continue;
5396
5397 if (pic->output_mark == 0 && pic->referenced == 0
5398 && pic->output_ready == 0
5399 && pic->scatter_alloc
5400 )
5401 release_pic_mmu_buf(hevc, pic);
5402 }
5403
5404}
5405
5406static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5407 union param_u *rpm_param)
5408{
5409 struct PIC_s *new_pic = NULL;
5410 struct PIC_s *pic;
5411 int i;
5412 int ret;
5413
5414 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5415 pic = hevc->m_PIC[i];
5416 if (pic == NULL || pic->index == -1)
5417 continue;
5418
5419 if (pic->output_mark == 0 && pic->referenced == 0
5420 && pic->output_ready == 0
5421 && pic->width == hevc->pic_w
5422 && pic->height == hevc->pic_h
5423 ) {
5424 if (new_pic) {
5425 if (new_pic->POC != INVALID_POC) {
5426 if (pic->POC == INVALID_POC ||
5427 pic->POC < new_pic->POC)
5428 new_pic = pic;
5429 }
5430 } else
5431 new_pic = pic;
5432 }
5433 }
5434
5435 if (new_pic == NULL)
5436 return NULL;
5437
5438 if (new_pic->BUF_index < 0) {
5439 if (alloc_buf(hevc) < 0)
5440 return NULL;
5441 else {
5442 if (config_pic(hevc, new_pic) < 0) {
5443 dealloc_pic_buf(hevc, new_pic);
5444 return NULL;
5445 }
5446 }
5447 new_pic->width = hevc->pic_w;
5448 new_pic->height = hevc->pic_h;
5449 set_canvas(hevc, new_pic);
5450
5451 init_pic_list_hw(hevc);
5452 }
5453
5454 if (new_pic) {
5455 new_pic->double_write_mode =
5456 get_double_write_mode(hevc);
5457 if (new_pic->double_write_mode)
5458 set_canvas(hevc, new_pic);
5459
5460#ifdef TEST_NO_BUF
5461 if (test_flag) {
5462 test_flag = 0;
5463 return NULL;
5464 } else
5465 test_flag = 1;
5466#endif
5467 if (get_mv_buf(hevc, new_pic) < 0)
5468 return NULL;
5469
5470 if (hevc->mmu_enable) {
5471 ret = H265_alloc_mmu(hevc, new_pic,
5472 rpm_param->p.bit_depth,
5473 hevc->frame_mmu_map_addr);
5474 if (ret != 0) {
5475 put_mv_buf(hevc, new_pic);
5476 hevc_print(hevc, 0,
5477 "can't alloc need mmu1,idx %d ret =%d\n",
5478 new_pic->decode_idx,
5479 ret);
5480 return NULL;
5481 }
5482 }
5483 new_pic->referenced = 1;
5484 new_pic->decode_idx = hevc->decode_idx;
5485 new_pic->slice_idx = 0;
5486 new_pic->referenced = 1;
5487 new_pic->output_mark = 0;
5488 new_pic->recon_mark = 0;
5489 new_pic->error_mark = 0;
5490 new_pic->dis_mark = 0;
5491 /* new_pic->output_ready = 0; */
5492 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5493 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5494 new_pic->POC = hevc->curr_POC;
5495 new_pic->pic_struct = hevc->curr_pic_struct;
5496 if (new_pic->aux_data_buf)
5497 release_aux_data(hevc, new_pic);
5498 new_pic->mem_saving_mode =
5499 hevc->mem_saving_mode;
5500 new_pic->bit_depth_luma =
5501 hevc->bit_depth_luma;
5502 new_pic->bit_depth_chroma =
5503 hevc->bit_depth_chroma;
5504 new_pic->video_signal_type =
5505 hevc->video_signal_type;
5506
5507 new_pic->conformance_window_flag =
5508 hevc->param.p.conformance_window_flag;
5509 new_pic->conf_win_left_offset =
5510 hevc->param.p.conf_win_left_offset;
5511 new_pic->conf_win_right_offset =
5512 hevc->param.p.conf_win_right_offset;
5513 new_pic->conf_win_top_offset =
5514 hevc->param.p.conf_win_top_offset;
5515 new_pic->conf_win_bottom_offset =
5516 hevc->param.p.conf_win_bottom_offset;
5517 new_pic->chroma_format_idc =
5518 hevc->param.p.chroma_format_idc;
5519
5520 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5521 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5522 __func__, new_pic->index,
5523 new_pic->BUF_index, new_pic->decode_idx,
5524 new_pic->POC);
5525
5526 }
5527 if (pic_list_debug & 0x1) {
5528 dump_pic_list(hevc);
5529 pr_err("\n*******************************************\n");
5530 }
5531
5532 return new_pic;
5533}
5534
5535static int get_display_pic_num(struct hevc_state_s *hevc)
5536{
5537 int i;
5538 struct PIC_s *pic;
5539 int num = 0;
5540
5541 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5542 pic = hevc->m_PIC[i];
5543 if (pic == NULL ||
5544 pic->index == -1)
5545 continue;
5546
5547 if (pic->output_ready == 1)
5548 num++;
5549 }
5550 return num;
5551}
5552
5553static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5554{
5555 struct PIC_s *pic_display;
5556
5557 if (pic) {
5558 /*PB skip control */
5559 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5560 /* start decoding after first I */
5561 hevc->ignore_bufmgr_error |= 0x1;
5562 }
5563 if (hevc->ignore_bufmgr_error & 1) {
5564 if (hevc->PB_skip_count_after_decoding > 0)
5565 hevc->PB_skip_count_after_decoding--;
5566 else {
5567 /* start displaying */
5568 hevc->ignore_bufmgr_error |= 0x2;
5569 }
5570 }
5571 /**/
5572 if (pic->POC != INVALID_POC) {
5573 pic->output_mark = 1;
5574 pic->recon_mark = 1;
5575 }
5576 pic->recon_mark = 1;
5577 }
5578 do {
5579 pic_display = output_pic(hevc, 1);
5580
5581 if (pic_display) {
5582 pic_display->referenced = 0;
5583 put_mv_buf(hevc, pic_display);
5584 if ((pic_display->error_mark
5585 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5586 || (get_dbg_flag(hevc) &
5587 H265_DEBUG_DISPLAY_CUR_FRAME)
5588 || (get_dbg_flag(hevc) &
5589 H265_DEBUG_NO_DISPLAY)) {
5590 pic_display->output_ready = 0;
5591 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5592 hevc_print(hevc, 0,
5593 "[BM] Display: POC %d, ",
5594 pic_display->POC);
5595 hevc_print_cont(hevc, 0,
5596 "decoding index %d ==> ",
5597 pic_display->decode_idx);
5598 hevc_print_cont(hevc, 0,
5599 "Debug mode or error, recycle it\n");
5600 }
5601 } else {
5602 if (hevc->i_only & 0x1
5603 && pic_display->slice_type != 2) {
5604 pic_display->output_ready = 0;
5605 } else {
5606 prepare_display_buf(hevc, pic_display);
5607 if (get_dbg_flag(hevc)
5608 & H265_DEBUG_BUFMGR) {
5609 hevc_print(hevc, 0,
5610 "[BM] flush Display: POC %d, ",
5611 pic_display->POC);
5612 hevc_print_cont(hevc, 0,
5613 "decoding index %d\n",
5614 pic_display->decode_idx);
5615 }
5616 }
5617 }
5618 }
5619 } while (pic_display);
5620 clear_referenced_flag(hevc);
5621}
5622
5623/*
5624* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5625*/
5626static void set_aux_data(struct hevc_state_s *hevc,
5627 struct PIC_s *pic, unsigned char suffix_flag,
5628 unsigned char dv_meta_flag)
5629{
5630 int i;
5631 unsigned short *aux_adr;
5632 unsigned int size_reg_val =
5633 READ_VREG(HEVC_AUX_DATA_SIZE);
5634 unsigned int aux_count = 0;
5635 int aux_size = 0;
5636 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5637 return;
5638
5639 if (hevc->aux_data_dirty ||
5640 hevc->m_ins_flag == 0) {
5641
5642 hevc->aux_data_dirty = 0;
5643 }
5644
5645 if (suffix_flag) {
5646 aux_adr = (unsigned short *)
5647 (hevc->aux_addr +
5648 hevc->prefix_aux_size);
5649 aux_count =
5650 ((size_reg_val & 0xffff) << 4)
5651 >> 1;
5652 aux_size =
5653 hevc->suffix_aux_size;
5654 } else {
5655 aux_adr =
5656 (unsigned short *)hevc->aux_addr;
5657 aux_count =
5658 ((size_reg_val >> 16) << 4)
5659 >> 1;
5660 aux_size =
5661 hevc->prefix_aux_size;
5662 }
5663 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5664 hevc_print(hevc, 0,
5665 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5666 __func__, pic, pic->aux_data_size,
5667 aux_count, suffix_flag, dv_meta_flag);
5668 }
5669 if (aux_size > 0 && aux_count > 0) {
5670 int heads_size = 0;
5671 int new_size;
5672 char *new_buf;
5673
5674 for (i = 0; i < aux_count; i++) {
5675 unsigned char tag = aux_adr[i] >> 8;
5676 if (tag != 0 && tag != 0xff) {
5677 if (dv_meta_flag == 0)
5678 heads_size += 8;
5679 else if (dv_meta_flag == 1 && tag == 0x1)
5680 heads_size += 8;
5681 else if (dv_meta_flag == 2 && tag != 0x1)
5682 heads_size += 8;
5683 }
5684 }
5685 new_size = pic->aux_data_size + aux_count + heads_size;
5686 new_buf = vmalloc(new_size);
5687 if (new_buf) {
5688 unsigned char valid_tag = 0;
5689 unsigned char *h =
5690 new_buf +
5691 pic->aux_data_size;
5692 unsigned char *p = h + 8;
5693 int len = 0;
5694 int padding_len = 0;
5695 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5696 if (pic->aux_data_buf)
5697 vfree(pic->aux_data_buf);
5698 pic->aux_data_buf = new_buf;
5699 for (i = 0; i < aux_count; i += 4) {
5700 int ii;
5701 unsigned char tag = aux_adr[i + 3] >> 8;
5702 if (tag != 0 && tag != 0xff) {
5703 if (dv_meta_flag == 0)
5704 valid_tag = 1;
5705 else if (dv_meta_flag == 1
5706 && tag == 0x1)
5707 valid_tag = 1;
5708 else if (dv_meta_flag == 2
5709 && tag != 0x1)
5710 valid_tag = 1;
5711 else
5712 valid_tag = 0;
5713 if (valid_tag && len > 0) {
5714 pic->aux_data_size +=
5715 (len + 8);
5716 h[0] = (len >> 24)
5717 & 0xff;
5718 h[1] = (len >> 16)
5719 & 0xff;
5720 h[2] = (len >> 8)
5721 & 0xff;
5722 h[3] = (len >> 0)
5723 & 0xff;
5724 h[6] =
5725 (padding_len >> 8)
5726 & 0xff;
5727 h[7] = (padding_len)
5728 & 0xff;
5729 h += (len + 8);
5730 p += 8;
5731 len = 0;
5732 padding_len = 0;
5733 }
5734 if (valid_tag) {
5735 h[4] = tag;
5736 h[5] = 0;
5737 h[6] = 0;
5738 h[7] = 0;
5739 }
5740 }
5741 if (valid_tag) {
5742 for (ii = 0; ii < 4; ii++) {
5743 unsigned short aa =
5744 aux_adr[i + 3
5745 - ii];
5746 *p = aa & 0xff;
5747 p++;
5748 len++;
5749 /*if ((aa >> 8) == 0xff)
5750 padding_len++;*/
5751 }
5752 }
5753 }
5754 if (len > 0) {
5755 pic->aux_data_size += (len + 8);
5756 h[0] = (len >> 24) & 0xff;
5757 h[1] = (len >> 16) & 0xff;
5758 h[2] = (len >> 8) & 0xff;
5759 h[3] = (len >> 0) & 0xff;
5760 h[6] = (padding_len >> 8) & 0xff;
5761 h[7] = (padding_len) & 0xff;
5762 }
5763 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5764 hevc_print(hevc, 0,
5765 "aux: (size %d) suffix_flag %d\n",
5766 pic->aux_data_size, suffix_flag);
5767 for (i = 0; i < pic->aux_data_size; i++) {
5768 hevc_print_cont(hevc, 0,
5769 "%02x ", pic->aux_data_buf[i]);
5770 if (((i + 1) & 0xf) == 0)
5771 hevc_print_cont(hevc, 0, "\n");
5772 }
5773 hevc_print_cont(hevc, 0, "\n");
5774 }
5775
5776 } else {
5777 hevc_print(hevc, 0, "new buf alloc failed\n");
5778 if (pic->aux_data_buf)
5779 vfree(pic->aux_data_buf);
5780 pic->aux_data_buf = NULL;
5781 pic->aux_data_size = 0;
5782 }
5783 }
5784
5785}
5786
5787static void release_aux_data(struct hevc_state_s *hevc,
5788 struct PIC_s *pic)
5789{
5790 if (pic->aux_data_buf)
5791 vfree(pic->aux_data_buf);
5792 pic->aux_data_buf = NULL;
5793 pic->aux_data_size = 0;
5794}
5795
5796static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5797 struct PIC_s *pic)
5798{
5799
5800 /* prev pic */
5801 /*if (hevc->curr_POC != 0) {*/
5802 int decoded_poc = hevc->iPrevPOC;
5803#ifdef MULTI_INSTANCE_SUPPORT
5804 if (hevc->m_ins_flag) {
5805 decoded_poc = hevc->decoded_poc;
5806 hevc->decoded_poc = INVALID_POC;
5807 }
5808#endif
5809 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5810 && hevc->m_nalUnitType !=
5811 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5812 struct PIC_s *pic_display;
5813
5814 pic = get_pic_by_POC(hevc, decoded_poc);
5815 if (pic && (pic->POC != INVALID_POC)) {
5816 /*PB skip control */
5817 if (pic->error_mark == 0
5818 && hevc->PB_skip_mode == 1) {
5819 /* start decoding after
5820 * first I
5821 */
5822 hevc->ignore_bufmgr_error |= 0x1;
5823 }
5824 if (hevc->ignore_bufmgr_error & 1) {
5825 if (hevc->PB_skip_count_after_decoding > 0) {
5826 hevc->PB_skip_count_after_decoding--;
5827 } else {
5828 /* start displaying */
5829 hevc->ignore_bufmgr_error |= 0x2;
5830 }
5831 }
5832 if (hevc->mmu_enable
5833 && ((hevc->double_write_mode & 0x10) == 0)) {
5834 if (!hevc->m_ins_flag) {
5835 hevc->used_4k_num =
5836 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5837
5838 if ((!is_skip_decoding(hevc, pic)) &&
5839 (hevc->used_4k_num >= 0) &&
5840 (hevc->cur_pic->scatter_alloc
5841 == 1)) {
5842 hevc_print(hevc,
5843 H265_DEBUG_BUFMGR_MORE,
5844 "%s pic index %d scatter_alloc %d page_start %d\n",
5845 "decoder_mmu_box_free_idx_tail",
5846 hevc->cur_pic->index,
5847 hevc->cur_pic->scatter_alloc,
5848 hevc->used_4k_num);
5849 hevc_mmu_dma_check(hw_to_vdec(hevc));
5850 decoder_mmu_box_free_idx_tail(
5851 hevc->mmu_box,
5852 hevc->cur_pic->index,
5853 hevc->used_4k_num);
5854 hevc->cur_pic->scatter_alloc
5855 = 2;
5856 }
5857 hevc->used_4k_num = -1;
5858 }
5859 }
5860
5861 pic->output_mark = 1;
5862 pic->recon_mark = 1;
5863 pic->dis_mark = 1;
5864 }
5865 do {
5866 pic_display = output_pic(hevc, 0);
5867
5868 if (pic_display) {
5869 if ((pic_display->error_mark &&
5870 ((hevc->ignore_bufmgr_error &
5871 0x2) == 0))
5872 || (get_dbg_flag(hevc) &
5873 H265_DEBUG_DISPLAY_CUR_FRAME)
5874 || (get_dbg_flag(hevc) &
5875 H265_DEBUG_NO_DISPLAY)) {
5876 pic_display->output_ready = 0;
5877 if (get_dbg_flag(hevc) &
5878 H265_DEBUG_BUFMGR) {
5879 hevc_print(hevc, 0,
5880 "[BM] Display: POC %d, ",
5881 pic_display->POC);
5882 hevc_print_cont(hevc, 0,
5883 "decoding index %d ==> ",
5884 pic_display->
5885 decode_idx);
5886 hevc_print_cont(hevc, 0,
5887 "Debug or err,recycle it\n");
5888 }
5889 } else {
5890 if (hevc->i_only & 0x1
5891 && pic_display->
5892 slice_type != 2) {
5893 pic_display->output_ready = 0;
5894 } else {
5895 prepare_display_buf
5896 (hevc,
5897 pic_display);
5898 if (get_dbg_flag(hevc) &
5899 H265_DEBUG_BUFMGR) {
5900 hevc_print(hevc, 0,
5901 "[BM] Display: POC %d, ",
5902 pic_display->POC);
5903 hevc_print_cont(hevc, 0,
5904 "decoding index %d\n",
5905 pic_display->
5906 decode_idx);
5907 }
5908 }
5909 }
5910 }
5911 } while (pic_display);
5912 } else {
5913 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5914 hevc_print(hevc, 0,
5915 "[BM] current pic is IDR, ");
5916 hevc_print(hevc, 0,
5917 "clear referenced flag of all buffers\n");
5918 }
5919 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5920 dump_pic_list(hevc);
5921 pic = get_pic_by_POC(hevc, decoded_poc);
5922 flush_output(hevc, pic);
5923 }
5924
5925}
5926
5927static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
5928 int decoded_lcu)
5929{
5930 int current_lcu_idx = decoded_lcu;
5931 if (decoded_lcu < 0)
5932 return;
5933
5934 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5935 hevc_print(hevc, 0,
5936 "cur lcu idx = %d, (total %d)\n",
5937 current_lcu_idx, hevc->lcu_total);
5938 }
5939 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5940 if (hevc->first_pic_after_recover) {
5941 if (current_lcu_idx !=
5942 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5943 hevc->cur_pic->error_mark = 1;
5944 } else {
5945 if (hevc->lcu_x_num_pre != 0
5946 && hevc->lcu_y_num_pre != 0
5947 && current_lcu_idx != 0
5948 && current_lcu_idx <
5949 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
5950 hevc->cur_pic->error_mark = 1;
5951 }
5952 if (hevc->cur_pic->error_mark) {
5953 hevc_print(hevc, 0,
5954 "cur lcu idx = %d, (total %d), set error_mark\n",
5955 current_lcu_idx,
5956 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
5957 if (is_log_enable(hevc))
5958 add_log(hevc,
5959 "cur lcu idx = %d, (total %d), set error_mark",
5960 current_lcu_idx,
5961 hevc->lcu_x_num_pre *
5962 hevc->lcu_y_num_pre);
5963
5964 }
5965
5966 }
5967 if (hevc->cur_pic && hevc->head_error_flag) {
5968 hevc->cur_pic->error_mark = 1;
5969 hevc_print(hevc, 0,
5970 "head has error, set error_mark\n");
5971 }
5972
5973 if ((error_handle_policy & 0x80) == 0) {
5974 if (hevc->over_decode && hevc->cur_pic) {
5975 hevc_print(hevc, 0,
5976 "over decode, set error_mark\n");
5977 hevc->cur_pic->error_mark = 1;
5978 }
5979 }
5980
5981 hevc->lcu_x_num_pre = hevc->lcu_x_num;
5982 hevc->lcu_y_num_pre = hevc->lcu_y_num;
5983}
5984
5985static void check_pic_decoded_error(struct hevc_state_s *hevc,
5986 int decoded_lcu)
5987{
5988 int current_lcu_idx = decoded_lcu;
5989 if (decoded_lcu < 0)
5990 return;
5991
5992 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5993 hevc_print(hevc, 0,
5994 "cur lcu idx = %d, (total %d)\n",
5995 current_lcu_idx, hevc->lcu_total);
5996 }
5997 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
5998 if (hevc->lcu_x_num != 0
5999 && hevc->lcu_y_num != 0
6000 && current_lcu_idx != 0
6001 && current_lcu_idx <
6002 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6003 hevc->cur_pic->error_mark = 1;
6004 if (hevc->cur_pic->error_mark) {
6005 hevc_print(hevc, 0,
6006 "cur lcu idx = %d, (total %d), set error_mark\n",
6007 current_lcu_idx,
6008 hevc->lcu_x_num*hevc->lcu_y_num);
6009 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6010 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6011 hevc_print(hevc, 0,
6012 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6013 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6014 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6015 }
6016 if (is_log_enable(hevc))
6017 add_log(hevc,
6018 "cur lcu idx = %d, (total %d), set error_mark",
6019 current_lcu_idx,
6020 hevc->lcu_x_num *
6021 hevc->lcu_y_num);
6022
6023 }
6024
6025 }
6026 if (hevc->cur_pic && hevc->head_error_flag) {
6027 hevc->cur_pic->error_mark = 1;
6028 hevc_print(hevc, 0,
6029 "head has error, set error_mark\n");
6030 }
6031
6032 if ((error_handle_policy & 0x80) == 0) {
6033 if (hevc->over_decode && hevc->cur_pic) {
6034 hevc_print(hevc, 0,
6035 "over decode, set error_mark\n");
6036 hevc->cur_pic->error_mark = 1;
6037 }
6038 }
6039}
6040
6041/* only when we decoded one field or one frame,
6042we can call this function to get qos info*/
6043static void get_picture_qos_info(struct hevc_state_s *hevc)
6044{
6045 struct PIC_s *picture = hevc->cur_pic;
6046
6047/*
6048#define DEBUG_QOS
6049*/
6050
6051 if (!hevc->cur_pic)
6052 return;
6053
6054 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6055 unsigned char a[3];
6056 unsigned char i, j, t;
6057 unsigned long data;
6058
6059 data = READ_VREG(HEVC_MV_INFO);
6060 if (picture->slice_type == I_SLICE)
6061 data = 0;
6062 a[0] = data & 0xff;
6063 a[1] = (data >> 8) & 0xff;
6064 a[2] = (data >> 16) & 0xff;
6065
6066 for (i = 0; i < 3; i++)
6067 for (j = i+1; j < 3; j++) {
6068 if (a[j] < a[i]) {
6069 t = a[j];
6070 a[j] = a[i];
6071 a[i] = t;
6072 } else if (a[j] == a[i]) {
6073 a[i]++;
6074 t = a[j];
6075 a[j] = a[i];
6076 a[i] = t;
6077 }
6078 }
6079 picture->max_mv = a[2];
6080 picture->avg_mv = a[1];
6081 picture->min_mv = a[0];
6082#ifdef DEBUG_QOS
6083 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6084 data, a[0], a[1], a[2]);
6085#endif
6086
6087 data = READ_VREG(HEVC_QP_INFO);
6088 a[0] = data & 0x1f;
6089 a[1] = (data >> 8) & 0x3f;
6090 a[2] = (data >> 16) & 0x7f;
6091
6092 for (i = 0; i < 3; i++)
6093 for (j = i+1; j < 3; j++) {
6094 if (a[j] < a[i]) {
6095 t = a[j];
6096 a[j] = a[i];
6097 a[i] = t;
6098 } else if (a[j] == a[i]) {
6099 a[i]++;
6100 t = a[j];
6101 a[j] = a[i];
6102 a[i] = t;
6103 }
6104 }
6105 picture->max_qp = a[2];
6106 picture->avg_qp = a[1];
6107 picture->min_qp = a[0];
6108#ifdef DEBUG_QOS
6109 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6110 data, a[0], a[1], a[2]);
6111#endif
6112
6113 data = READ_VREG(HEVC_SKIP_INFO);
6114 a[0] = data & 0x1f;
6115 a[1] = (data >> 8) & 0x3f;
6116 a[2] = (data >> 16) & 0x7f;
6117
6118 for (i = 0; i < 3; i++)
6119 for (j = i+1; j < 3; j++) {
6120 if (a[j] < a[i]) {
6121 t = a[j];
6122 a[j] = a[i];
6123 a[i] = t;
6124 } else if (a[j] == a[i]) {
6125 a[i]++;
6126 t = a[j];
6127 a[j] = a[i];
6128 a[i] = t;
6129 }
6130 }
6131 picture->max_skip = a[2];
6132 picture->avg_skip = a[1];
6133 picture->min_skip = a[0];
6134
6135#ifdef DEBUG_QOS
6136 hevc_print(hevc, 0,
6137 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6138 data, a[0], a[1], a[2]);
6139#endif
6140 } else {
6141 uint32_t blk88_y_count;
6142 uint32_t blk88_c_count;
6143 uint32_t blk22_mv_count;
6144 uint32_t rdata32;
6145 int32_t mv_hi;
6146 int32_t mv_lo;
6147 uint32_t rdata32_l;
6148 uint32_t mvx_L0_hi;
6149 uint32_t mvy_L0_hi;
6150 uint32_t mvx_L1_hi;
6151 uint32_t mvy_L1_hi;
6152 int64_t value;
6153 uint64_t temp_value;
6154#ifdef DEBUG_QOS
6155 int pic_number = picture->POC;
6156#endif
6157
6158 picture->max_mv = 0;
6159 picture->avg_mv = 0;
6160 picture->min_mv = 0;
6161
6162 picture->max_skip = 0;
6163 picture->avg_skip = 0;
6164 picture->min_skip = 0;
6165
6166 picture->max_qp = 0;
6167 picture->avg_qp = 0;
6168 picture->min_qp = 0;
6169
6170
6171
6172#ifdef DEBUG_QOS
6173 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6174 picture->slice_type,
6175 picture->POC);
6176#endif
6177 /* set rd_idx to 0 */
6178 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6179
6180 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6181 if (blk88_y_count == 0) {
6182#ifdef DEBUG_QOS
6183 hevc_print(hevc, 0,
6184 "[Picture %d Quality] NO Data yet.\n",
6185 pic_number);
6186#endif
6187 /* reset all counts */
6188 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6189 return;
6190 }
6191 /* qp_y_sum */
6192 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6193#ifdef DEBUG_QOS
6194 hevc_print(hevc, 0,
6195 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6196 pic_number, rdata32/blk88_y_count,
6197 rdata32, blk88_y_count);
6198#endif
6199 picture->avg_qp = rdata32/blk88_y_count;
6200 /* intra_y_count */
6201 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6202#ifdef DEBUG_QOS
6203 hevc_print(hevc, 0,
6204 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6205 pic_number, rdata32*100/blk88_y_count,
6206 '%', rdata32);
6207#endif
6208 /* skipped_y_count */
6209 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6210#ifdef DEBUG_QOS
6211 hevc_print(hevc, 0,
6212 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6213 pic_number, rdata32*100/blk88_y_count,
6214 '%', rdata32);
6215#endif
6216 picture->avg_skip = rdata32*100/blk88_y_count;
6217 /* coeff_non_zero_y_count */
6218 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6219#ifdef DEBUG_QOS
6220 hevc_print(hevc, 0,
6221 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6222 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6223 '%', rdata32);
6224#endif
6225 /* blk66_c_count */
6226 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6227 if (blk88_c_count == 0) {
6228#ifdef DEBUG_QOS
6229 hevc_print(hevc, 0,
6230 "[Picture %d Quality] NO Data yet.\n",
6231 pic_number);
6232#endif
6233 /* reset all counts */
6234 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6235 return;
6236 }
6237 /* qp_c_sum */
6238 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6239#ifdef DEBUG_QOS
6240 hevc_print(hevc, 0,
6241 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6242 pic_number, rdata32/blk88_c_count,
6243 rdata32, blk88_c_count);
6244#endif
6245 /* intra_c_count */
6246 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6247#ifdef DEBUG_QOS
6248 hevc_print(hevc, 0,
6249 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6250 pic_number, rdata32*100/blk88_c_count,
6251 '%', rdata32);
6252#endif
6253 /* skipped_cu_c_count */
6254 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6255#ifdef DEBUG_QOS
6256 hevc_print(hevc, 0,
6257 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6258 pic_number, rdata32*100/blk88_c_count,
6259 '%', rdata32);
6260#endif
6261 /* coeff_non_zero_c_count */
6262 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6263#ifdef DEBUG_QOS
6264 hevc_print(hevc, 0,
6265 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6266 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6267 '%', rdata32);
6268#endif
6269
6270 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6271 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6272 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6273#ifdef DEBUG_QOS
6274 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6275 pic_number, (rdata32>>0)&0xff);
6276#endif
6277 picture->min_qp = (rdata32>>0)&0xff;
6278
6279#ifdef DEBUG_QOS
6280 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6281 pic_number, (rdata32>>8)&0xff);
6282#endif
6283 picture->max_qp = (rdata32>>8)&0xff;
6284
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6287 pic_number, (rdata32>>16)&0xff);
6288 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6289 pic_number, (rdata32>>24)&0xff);
6290#endif
6291
6292 /* blk22_mv_count */
6293 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6294 if (blk22_mv_count == 0) {
6295#ifdef DEBUG_QOS
6296 hevc_print(hevc, 0,
6297 "[Picture %d Quality] NO MV Data yet.\n",
6298 pic_number);
6299#endif
6300 /* reset all counts */
6301 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6302 return;
6303 }
6304 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6305 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6306 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6307 /* should all be 0x00 or 0xff */
6308#ifdef DEBUG_QOS
6309 hevc_print(hevc, 0,
6310 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6311 pic_number, rdata32);
6312#endif
6313 mvx_L0_hi = ((rdata32>>0)&0xff);
6314 mvy_L0_hi = ((rdata32>>8)&0xff);
6315 mvx_L1_hi = ((rdata32>>16)&0xff);
6316 mvy_L1_hi = ((rdata32>>24)&0xff);
6317
6318 /* mvx_L0_count[31:0] */
6319 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6320 temp_value = mvx_L0_hi;
6321 temp_value = (temp_value << 32) | rdata32_l;
6322
6323 if (mvx_L0_hi & 0x80)
6324 value = 0xFFFFFFF000000000 | temp_value;
6325 else
6326 value = temp_value;
6327 value = div_s64(value, blk22_mv_count);
6328#ifdef DEBUG_QOS
6329 hevc_print(hevc, 0,
6330 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6331 pic_number, (int)value,
6332 value, blk22_mv_count);
6333#endif
6334 picture->avg_mv = value;
6335
6336 /* mvy_L0_count[31:0] */
6337 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6338 temp_value = mvy_L0_hi;
6339 temp_value = (temp_value << 32) | rdata32_l;
6340
6341 if (mvy_L0_hi & 0x80)
6342 value = 0xFFFFFFF000000000 | temp_value;
6343 else
6344 value = temp_value;
6345#ifdef DEBUG_QOS
6346 hevc_print(hevc, 0,
6347 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6348 pic_number, rdata32_l/blk22_mv_count,
6349 value, blk22_mv_count);
6350#endif
6351
6352 /* mvx_L1_count[31:0] */
6353 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6354 temp_value = mvx_L1_hi;
6355 temp_value = (temp_value << 32) | rdata32_l;
6356 if (mvx_L1_hi & 0x80)
6357 value = 0xFFFFFFF000000000 | temp_value;
6358 else
6359 value = temp_value;
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0,
6362 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6363 pic_number, rdata32_l/blk22_mv_count,
6364 value, blk22_mv_count);
6365#endif
6366
6367 /* mvy_L1_count[31:0] */
6368 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6369 temp_value = mvy_L1_hi;
6370 temp_value = (temp_value << 32) | rdata32_l;
6371 if (mvy_L1_hi & 0x80)
6372 value = 0xFFFFFFF000000000 | temp_value;
6373 else
6374 value = temp_value;
6375#ifdef DEBUG_QOS
6376 hevc_print(hevc, 0,
6377 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6378 pic_number, rdata32_l/blk22_mv_count,
6379 value, blk22_mv_count);
6380#endif
6381
6382 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6383 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6384 mv_hi = (rdata32>>16)&0xffff;
6385 if (mv_hi & 0x8000)
6386 mv_hi = 0x8000 - mv_hi;
6387#ifdef DEBUG_QOS
6388 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6389 pic_number, mv_hi);
6390#endif
6391 picture->max_mv = mv_hi;
6392
6393 mv_lo = (rdata32>>0)&0xffff;
6394 if (mv_lo & 0x8000)
6395 mv_lo = 0x8000 - mv_lo;
6396#ifdef DEBUG_QOS
6397 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6398 pic_number, mv_lo);
6399#endif
6400 picture->min_mv = mv_lo;
6401
6402 /* {mvy_L0_max, mvy_L0_min} */
6403 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6404 mv_hi = (rdata32>>16)&0xffff;
6405 if (mv_hi & 0x8000)
6406 mv_hi = 0x8000 - mv_hi;
6407#ifdef DEBUG_QOS
6408 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6409 pic_number, mv_hi);
6410#endif
6411
6412 mv_lo = (rdata32>>0)&0xffff;
6413 if (mv_lo & 0x8000)
6414 mv_lo = 0x8000 - mv_lo;
6415#ifdef DEBUG_QOS
6416 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6417 pic_number, mv_lo);
6418#endif
6419
6420 /* {mvx_L1_max, mvx_L1_min} */
6421 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6422 mv_hi = (rdata32>>16)&0xffff;
6423 if (mv_hi & 0x8000)
6424 mv_hi = 0x8000 - mv_hi;
6425#ifdef DEBUG_QOS
6426 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6427 pic_number, mv_hi);
6428#endif
6429
6430 mv_lo = (rdata32>>0)&0xffff;
6431 if (mv_lo & 0x8000)
6432 mv_lo = 0x8000 - mv_lo;
6433#ifdef DEBUG_QOS
6434 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6435 pic_number, mv_lo);
6436#endif
6437
6438 /* {mvy_L1_max, mvy_L1_min} */
6439 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6440 mv_hi = (rdata32>>16)&0xffff;
6441 if (mv_hi & 0x8000)
6442 mv_hi = 0x8000 - mv_hi;
6443#ifdef DEBUG_QOS
6444 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6445 pic_number, mv_hi);
6446#endif
6447 mv_lo = (rdata32>>0)&0xffff;
6448 if (mv_lo & 0x8000)
6449 mv_lo = 0x8000 - mv_lo;
6450#ifdef DEBUG_QOS
6451 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6452 pic_number, mv_lo);
6453#endif
6454
6455 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6456#ifdef DEBUG_QOS
6457 hevc_print(hevc, 0,
6458 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6459 pic_number, rdata32);
6460#endif
6461 /* reset all counts */
6462 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6463 }
6464}
6465
6466static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6467 union param_u *rpm_param,
6468 int decode_pic_begin)
6469{
6470#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6471 struct vdec_s *vdec = hw_to_vdec(hevc);
6472#endif
6473 int i;
6474 int lcu_x_num_div;
6475 int lcu_y_num_div;
6476 int Col_ref;
6477 int dbg_skip_flag = 0;
6478
6479 if (hevc->wait_buf == 0) {
6480 hevc->sps_num_reorder_pics_0 =
6481 rpm_param->p.sps_num_reorder_pics_0;
6482 hevc->m_temporalId = rpm_param->p.m_temporalId;
6483 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6484 hevc->interlace_flag =
6485 (rpm_param->p.profile_etc >> 2) & 0x1;
6486 hevc->curr_pic_struct =
6487 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6488 if (parser_sei_enable & 0x4) {
6489 hevc->frame_field_info_present_flag =
6490 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6491 }
6492
6493 if (interlace_enable == 0 || hevc->m_ins_flag)
6494 hevc->interlace_flag = 0;
6495 if (interlace_enable & 0x100)
6496 hevc->interlace_flag = interlace_enable & 0x1;
6497 if (hevc->interlace_flag == 0)
6498 hevc->curr_pic_struct = 0;
6499 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6500 /*
6501 *hevc->m_pocRandomAccess = MAX_INT;
6502 * //add to fix RAP_B_Bossen_1
6503 */
6504 /* } */
6505 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6506 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6507 hevc->slice_segment_addr =
6508 rpm_param->p.slice_segment_address;
6509 if (!rpm_param->p.dependent_slice_segment_flag)
6510 hevc->slice_addr = hevc->slice_segment_addr;
6511 } else {
6512 hevc->slice_segment_addr = 0;
6513 hevc->slice_addr = 0;
6514 }
6515
6516 hevc->iPrevPOC = hevc->curr_POC;
6517 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6518 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6519 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6520 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6521 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6522 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6523 hevc->isNextSliceSegment =
6524 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6525 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6526 || hevc->pic_h !=
6527 rpm_param->p.pic_height_in_luma_samples) {
6528 hevc_print(hevc, 0,
6529 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6530 hevc->pic_w, hevc->pic_h,
6531 rpm_param->p.pic_width_in_luma_samples,
6532 rpm_param->p.pic_height_in_luma_samples,
6533 hevc->interlace_flag);
6534
6535 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6536 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6537 hevc->frame_width = hevc->pic_w;
6538 hevc->frame_height = hevc->pic_h;
6539#ifdef LOSLESS_COMPRESS_MODE
6540 if (/*re_config_pic_flag == 0 &&*/
6541 (get_double_write_mode(hevc) & 0x10) == 0)
6542 init_decode_head_hw(hevc);
6543#endif
6544 }
6545
6546 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6547 hevc_print(hevc, 0, "over size : %u x %u.\n",
6548 hevc->pic_w, hevc->pic_h);
6549 if ((!hevc->m_ins_flag) &&
6550 ((debug &
6551 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6552 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6553 H265_DEBUG_DIS_SYS_ERROR_PROC);
6554 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6555 return 3;
6556 }
6557 if (hevc->bit_depth_chroma > 10 ||
6558 hevc->bit_depth_luma > 10) {
6559 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6560 hevc->bit_depth_chroma,
6561 hevc->bit_depth_luma);
6562 if (!hevc->m_ins_flag)
6563 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6564 H265_DEBUG_DIS_SYS_ERROR_PROC);
6565 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6566 return 4;
6567 }
6568
6569 /* it will cause divide 0 error */
6570 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6571 if (get_dbg_flag(hevc)) {
6572 hevc_print(hevc, 0,
6573 "Fatal Error, pic_w = %d, pic_h = %d\n",
6574 hevc->pic_w, hevc->pic_h);
6575 }
6576 return 3;
6577 }
6578 pic_list_process(hevc);
6579
6580 hevc->lcu_size =
6581 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6582 3 + rpm_param->
6583 p.log2_diff_max_min_coding_block_size);
6584 if (hevc->lcu_size == 0) {
6585 hevc_print(hevc, 0,
6586 "Error, lcu_size = 0 (%d,%d)\n",
6587 rpm_param->p.
6588 log2_min_coding_block_size_minus3,
6589 rpm_param->p.
6590 log2_diff_max_min_coding_block_size);
6591 return 3;
6592 }
6593 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6594 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6595 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6596 hevc->lcu_x_num =
6597 ((hevc->pic_w % hevc->lcu_size) ==
6598 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6599 hevc->lcu_y_num =
6600 ((hevc->pic_h % hevc->lcu_size) ==
6601 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6602 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6603
6604 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6605 || hevc->m_nalUnitType ==
6606 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6607 hevc->curr_POC = 0;
6608 if ((hevc->m_temporalId - 1) == 0)
6609 hevc->iPrevTid0POC = hevc->curr_POC;
6610 } else {
6611 int iMaxPOClsb =
6612 1 << (rpm_param->p.
6613 log2_max_pic_order_cnt_lsb_minus4 + 4);
6614 int iPrevPOClsb;
6615 int iPrevPOCmsb;
6616 int iPOCmsb;
6617 int iPOClsb = rpm_param->p.POClsb;
6618
6619 if (iMaxPOClsb == 0) {
6620 hevc_print(hevc, 0,
6621 "error iMaxPOClsb is 0\n");
6622 return 3;
6623 }
6624
6625 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6626 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6627
6628 if ((iPOClsb < iPrevPOClsb)
6629 && ((iPrevPOClsb - iPOClsb) >=
6630 (iMaxPOClsb / 2)))
6631 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6632 else if ((iPOClsb > iPrevPOClsb)
6633 && ((iPOClsb - iPrevPOClsb) >
6634 (iMaxPOClsb / 2)))
6635 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6636 else
6637 iPOCmsb = iPrevPOCmsb;
6638 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6639 hevc_print(hevc, 0,
6640 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6641 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6642 iPOClsb);
6643 }
6644 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6645 || hevc->m_nalUnitType ==
6646 NAL_UNIT_CODED_SLICE_BLANT
6647 || hevc->m_nalUnitType ==
6648 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6649 /* For BLA picture types, POCmsb is set to 0. */
6650 iPOCmsb = 0;
6651 }
6652 hevc->curr_POC = (iPOCmsb + iPOClsb);
6653 if ((hevc->m_temporalId - 1) == 0)
6654 hevc->iPrevTid0POC = hevc->curr_POC;
6655 else {
6656 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6657 hevc_print(hevc, 0,
6658 "m_temporalID is %d\n",
6659 hevc->m_temporalId);
6660 }
6661 }
6662 }
6663 hevc->RefNum_L0 =
6664 (rpm_param->p.num_ref_idx_l0_active >
6665 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6666 num_ref_idx_l0_active;
6667 hevc->RefNum_L1 =
6668 (rpm_param->p.num_ref_idx_l1_active >
6669 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6670 num_ref_idx_l1_active;
6671
6672 /* if(curr_POC==0x10) dump_lmem(); */
6673
6674 /* skip RASL pictures after CRA/BLA pictures */
6675 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6676 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6677 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6678 || hevc->m_nalUnitType ==
6679 NAL_UNIT_CODED_SLICE_BLANT
6680 || hevc->m_nalUnitType ==
6681 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6682 hevc->m_pocRandomAccess = hevc->curr_POC;
6683 else
6684 hevc->m_pocRandomAccess = -MAX_INT;
6685 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6686 || hevc->m_nalUnitType ==
6687 NAL_UNIT_CODED_SLICE_BLANT
6688 || hevc->m_nalUnitType ==
6689 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6690 hevc->m_pocRandomAccess = hevc->curr_POC;
6691 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6692 (nal_skip_policy >= 3) &&
6693 (hevc->m_nalUnitType ==
6694 NAL_UNIT_CODED_SLICE_RASL_N ||
6695 hevc->m_nalUnitType ==
6696 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6697 if (get_dbg_flag(hevc)) {
6698 hevc_print(hevc, 0,
6699 "RASL picture with POC %d < %d ",
6700 hevc->curr_POC, hevc->m_pocRandomAccess);
6701 hevc_print(hevc, 0,
6702 "RandomAccess point POC), skip it\n");
6703 }
6704 return 1;
6705 }
6706
6707 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6708 hevc->skip_flag = 0;
6709 /**/
6710 /* if((iPrevPOC != curr_POC)){ */
6711 if (rpm_param->p.slice_segment_address == 0) {
6712 struct PIC_s *pic;
6713
6714 hevc->new_pic = 1;
6715#ifdef MULTI_INSTANCE_SUPPORT
6716 if (!hevc->m_ins_flag)
6717#endif
6718 check_pic_decoded_error_pre(hevc,
6719 READ_VREG(HEVC_PARSER_LCU_START)
6720 & 0xffffff);
6721 /**/ if (use_cma == 0) {
6722 if (hevc->pic_list_init_flag == 0) {
6723 init_pic_list(hevc);
6724 init_pic_list_hw(hevc);
6725 init_buf_spec(hevc);
6726 hevc->pic_list_init_flag = 3;
6727 }
6728 }
6729 if (!hevc->m_ins_flag) {
6730 if (hevc->cur_pic)
6731 get_picture_qos_info(hevc);
6732 }
6733 hevc->first_pic_after_recover = 0;
6734 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6735 dump_pic_list(hevc);
6736 /* prev pic */
6737 hevc_pre_pic(hevc, pic);
6738 /*
6739 *update referenced of old pictures
6740 *(cur_pic->referenced is 1 and not updated)
6741 */
6742 apply_ref_pic_set(hevc, hevc->curr_POC,
6743 rpm_param);
6744
6745 if (hevc->mmu_enable)
6746 recycle_mmu_bufs(hevc);
6747
6748#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6749 if (vdec->master) {
6750 struct hevc_state_s *hevc_ba =
6751 (struct hevc_state_s *)
6752 vdec->master->private;
6753 if (hevc_ba->cur_pic != NULL) {
6754 hevc_ba->cur_pic->dv_enhance_exist = 1;
6755 hevc_print(hevc, H265_DEBUG_DV,
6756 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6757 hevc->curr_POC, hevc_ba->cur_pic->POC);
6758 }
6759 }
6760 if (vdec->master == NULL &&
6761 vdec->slave == NULL)
6762 set_aux_data(hevc,
6763 hevc->cur_pic, 1, 0); /*suffix*/
6764 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6765 set_aux_data(hevc,
6766 hevc->cur_pic, 0, 1); /*dv meta only*/
6767#else
6768 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6769#endif
6770 /* new pic */
6771 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6772 if (hevc->cur_pic == NULL) {
6773 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6774 dump_pic_list(hevc);
6775 hevc->wait_buf = 1;
6776 return -1;
6777 }
6778#ifdef MULTI_INSTANCE_SUPPORT
6779 hevc->decoding_pic = hevc->cur_pic;
6780 if (!hevc->m_ins_flag)
6781 hevc->over_decode = 0;
6782#endif
6783#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6784 hevc->cur_pic->dv_enhance_exist = 0;
6785 if (vdec->slave)
6786 hevc_print(hevc, H265_DEBUG_DV,
6787 "Clear bl (poc %d) dv_enhance_exist flag\n",
6788 hevc->curr_POC);
6789 if (vdec->master == NULL &&
6790 vdec->slave == NULL)
6791 set_aux_data(hevc,
6792 hevc->cur_pic, 0, 0); /*prefix*/
6793
6794 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6795 set_aux_data(hevc,
6796 hevc->cur_pic, 0, 2); /*pre sei only*/
6797#else
6798 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6799#endif
6800 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6801 hevc->cur_pic->output_ready = 1;
6802 hevc->cur_pic->stream_offset =
6803 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6804 prepare_display_buf(hevc, hevc->cur_pic);
6805 hevc->wait_buf = 2;
6806 return -1;
6807 }
6808 } else {
6809 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6810#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6811 if (vdec->master == NULL &&
6812 vdec->slave == NULL) {
6813 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6814 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6815 }
6816#else
6817 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6818 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6819#endif
6820 }
6821 if (hevc->pic_list_init_flag != 3
6822 || hevc->cur_pic == NULL) {
6823 /* make it dec from the first slice segment */
6824 return 3;
6825 }
6826 hevc->cur_pic->slice_idx++;
6827 hevc->new_pic = 0;
6828 }
6829 } else {
6830 if (hevc->wait_buf == 1) {
6831 pic_list_process(hevc);
6832 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6833 if (hevc->cur_pic == NULL)
6834 return -1;
6835
6836 if (!hevc->m_ins_flag)
6837 hevc->over_decode = 0;
6838
6839#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6840 hevc->cur_pic->dv_enhance_exist = 0;
6841 if (vdec->master == NULL &&
6842 vdec->slave == NULL)
6843 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6844#else
6845 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6846#endif
6847 hevc->wait_buf = 0;
6848 } else if (hevc->wait_buf ==
6849 2) {
6850 if (get_display_pic_num(hevc) >
6851 1)
6852 return -1;
6853 hevc->wait_buf = 0;
6854 }
6855 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6856 dump_pic_list(hevc);
6857 }
6858
6859 if (hevc->new_pic) {
6860#if 1
6861 /*SUPPORT_10BIT*/
6862 int sao_mem_unit =
6863 (hevc->lcu_size == 16 ? 9 :
6864 hevc->lcu_size ==
6865 32 ? 14 : 24) << 4;
6866#else
6867 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
6868#endif
6869 int pic_height_cu =
6870 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
6871 int pic_width_cu =
6872 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
6873 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
6874
6875 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
6876 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6877 hevc_print(hevc, 0,
6878 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
6879 __func__,
6880 hevc->decode_idx,
6881 hevc->curr_pic_struct,
6882 hevc->interlace_flag,
6883 hevc->cur_pic->index);
6884 }
6885 if (dbg_skip_decode_index != 0 &&
6886 hevc->decode_idx == dbg_skip_decode_index)
6887 dbg_skip_flag = 1;
6888
6889 hevc->decode_idx++;
6890 update_tile_info(hevc, pic_width_cu, pic_height_cu,
6891 sao_mem_unit, rpm_param);
6892
6893 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
6894 }
6895
6896 if (hevc->iPrevPOC != hevc->curr_POC) {
6897 hevc->new_tile = 1;
6898 hevc->tile_x = 0;
6899 hevc->tile_y = 0;
6900 hevc->tile_y_x = 0;
6901 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6902 hevc_print(hevc, 0,
6903 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
6904 hevc->tile_x, hevc->tile_y);
6905 }
6906 } else if (hevc->tile_enabled) {
6907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6908 hevc_print(hevc, 0,
6909 "slice_segment_address is %d\n",
6910 rpm_param->p.slice_segment_address);
6911 }
6912 hevc->tile_y_x =
6913 get_tile_index(hevc, rpm_param->p.slice_segment_address,
6914 (hevc->pic_w +
6915 hevc->lcu_size -
6916 1) / hevc->lcu_size);
6917 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
6918 && (hevc->tile_y_x != -1)) {
6919 hevc->new_tile = 1;
6920 hevc->tile_x = hevc->tile_y_x & 0xff;
6921 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
6922 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6923 hevc_print(hevc, 0,
6924 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
6925 rpm_param->p.slice_segment_address,
6926 hevc->tile_x, hevc->tile_y);
6927 }
6928 } else
6929 hevc->new_tile = 0;
6930 } else
6931 hevc->new_tile = 0;
6932
6933 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
6934 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
6935 hevc->new_tile = 0;
6936
6937 if (hevc->new_tile) {
6938 hevc->tile_start_lcu_x =
6939 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
6940 hevc->tile_start_lcu_y =
6941 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
6942 hevc->tile_width_lcu =
6943 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
6944 hevc->tile_height_lcu =
6945 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
6946 }
6947
6948 set_ref_pic_list(hevc, rpm_param);
6949
6950 Col_ref = rpm_param->p.collocated_ref_idx;
6951
6952 hevc->LDCFlag = 0;
6953 if (rpm_param->p.slice_type != I_SLICE) {
6954 hevc->LDCFlag = 1;
6955 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
6956 if (hevc->cur_pic->
6957 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
6958 hevc->curr_POC)
6959 hevc->LDCFlag = 0;
6960 }
6961 if (rpm_param->p.slice_type == B_SLICE) {
6962 for (i = 0; (i < hevc->RefNum_L1)
6963 && hevc->LDCFlag; i++) {
6964 if (hevc->cur_pic->
6965 m_aiRefPOCList1[hevc->cur_pic->
6966 slice_idx][i] >
6967 hevc->curr_POC)
6968 hevc->LDCFlag = 0;
6969 }
6970 }
6971 }
6972
6973 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
6974
6975 hevc->plevel =
6976 rpm_param->p.log2_parallel_merge_level;
6977 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
6978
6979 hevc->LongTerm_Curr = 0; /* to do ... */
6980 hevc->LongTerm_Col = 0; /* to do ... */
6981
6982 hevc->list_no = 0;
6983 if (rpm_param->p.slice_type == B_SLICE)
6984 hevc->list_no = 1 - hevc->ColFromL0Flag;
6985 if (hevc->list_no == 0) {
6986 if (Col_ref < hevc->RefNum_L0) {
6987 hevc->Col_POC =
6988 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
6989 slice_idx][Col_ref];
6990 } else
6991 hevc->Col_POC = INVALID_POC;
6992 } else {
6993 if (Col_ref < hevc->RefNum_L1) {
6994 hevc->Col_POC =
6995 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
6996 slice_idx][Col_ref];
6997 } else
6998 hevc->Col_POC = INVALID_POC;
6999 }
7000
7001 hevc->LongTerm_Ref = 0; /* to do ... */
7002
7003 if (hevc->slice_type != 2) {
7004 /* if(hevc->i_only==1){ */
7005 /* return 0xf; */
7006 /* } */
7007
7008 if (hevc->Col_POC != INVALID_POC) {
7009 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7010 if (hevc->col_pic == NULL) {
7011 hevc->cur_pic->error_mark = 1;
7012 if (get_dbg_flag(hevc)) {
7013 hevc_print(hevc, 0,
7014 "WRONG,fail to get the pic Col_POC\n");
7015 }
7016 if (is_log_enable(hevc))
7017 add_log(hevc,
7018 "WRONG,fail to get the pic Col_POC");
7019 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7020 hevc->cur_pic->error_mark = 1;
7021 if (get_dbg_flag(hevc)) {
7022 hevc_print(hevc, 0,
7023 "WRONG, Col_POC error_mark is 1\n");
7024 }
7025 if (is_log_enable(hevc))
7026 add_log(hevc,
7027 "WRONG, Col_POC error_mark is 1");
7028 } else {
7029 if ((hevc->col_pic->width
7030 != hevc->pic_w) ||
7031 (hevc->col_pic->height
7032 != hevc->pic_h)) {
7033 hevc_print(hevc, 0,
7034 "Wrong reference pic (poc %d) width/height %d/%d\n",
7035 hevc->col_pic->POC,
7036 hevc->col_pic->width,
7037 hevc->col_pic->height);
7038 hevc->cur_pic->error_mark = 1;
7039 }
7040
7041 }
7042
7043 if (hevc->cur_pic->error_mark
7044 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7045#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7046 /*count info*/
7047 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7048 hevc->cur_pic->stream_offset);
7049#endif
7050 }
7051
7052 if (is_skip_decoding(hevc,
7053 hevc->cur_pic)) {
7054 return 2;
7055 }
7056 } else
7057 hevc->col_pic = hevc->cur_pic;
7058 } /* */
7059 if (hevc->col_pic == NULL)
7060 hevc->col_pic = hevc->cur_pic;
7061#ifdef BUFFER_MGR_ONLY
7062 return 0xf;
7063#else
7064 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7065 || (dbg_skip_flag))
7066 return 0xf;
7067#endif
7068
7069 config_mc_buffer(hevc, hevc->cur_pic);
7070
7071 if (is_skip_decoding(hevc,
7072 hevc->cur_pic)) {
7073 if (get_dbg_flag(hevc))
7074 hevc_print(hevc, 0,
7075 "Discard this picture index %d\n",
7076 hevc->cur_pic->index);
7077#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7078 /*count info*/
7079 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7080 hevc->cur_pic->stream_offset);
7081#endif
7082 return 2;
7083 }
7084#ifdef MCRCC_ENABLE
7085 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7086#endif
7087 config_mpred_hw(hevc);
7088
7089 config_sao_hw(hevc, rpm_param);
7090
7091 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7092 return 0xf;
7093
7094 return 0;
7095}
7096
7097
7098
7099static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7100 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7101 int cur_buf_idx = new_pic->index;
7102 int bit_depth_10 = (bit_depth != 0x00);
7103 int picture_size;
7104 int cur_mmu_4k_number;
7105 int ret, max_frame_num;
7106 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7107 new_pic->height, !bit_depth_10);
7108 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7109 if (hevc->double_write_mode & 0x10)
7110 return 0;
7111 /*hevc_print(hevc, 0,
7112 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7113 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7114 if (new_pic->scatter_alloc) {
7115 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7116 new_pic->scatter_alloc = 0;
7117 }
7118 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7119 max_frame_num = MAX_FRAME_8K_NUM;
7120 else
7121 max_frame_num = MAX_FRAME_4K_NUM;
7122 if (cur_mmu_4k_number > max_frame_num) {
7123 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7124 cur_mmu_4k_number,
7125 new_pic->width,
7126 new_pic->height);
7127 return -1;
7128 }
7129 ret = decoder_mmu_box_alloc_idx(
7130 hevc->mmu_box,
7131 cur_buf_idx,
7132 cur_mmu_4k_number,
7133 mmu_index_adr);
7134 if (ret == 0)
7135 new_pic->scatter_alloc = 1;
7136 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7137 "%s pic index %d page count(%d) ret =%d\n",
7138 __func__, cur_buf_idx,
7139 cur_mmu_4k_number,
7140 ret);
7141 return ret;
7142}
7143
7144
7145static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7146 struct PIC_s *pic)
7147{
7148 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7149 "%s pic index %d scatter_alloc %d\n",
7150 __func__, pic->index,
7151 pic->scatter_alloc);
7152
7153 if (hevc->mmu_enable
7154 && ((hevc->double_write_mode & 0x10) == 0)
7155 && pic->scatter_alloc)
7156 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7157 pic->scatter_alloc = 0;
7158}
7159
7160/*
7161 *************************************************
7162 *
7163 *h265 buffer management end
7164 *
7165 **************************************************
7166 */
7167static struct hevc_state_s *gHevc;
7168
7169static void hevc_local_uninit(struct hevc_state_s *hevc)
7170{
7171 hevc->rpm_ptr = NULL;
7172 hevc->lmem_ptr = NULL;
7173
7174#ifdef SWAP_HEVC_UCODE
7175 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7176 if (hevc->mc_cpu_addr != NULL) {
7177 dma_free_coherent(amports_get_dma_device(),
7178 hevc->swap_size, hevc->mc_cpu_addr,
7179 hevc->mc_dma_handle);
7180 hevc->mc_cpu_addr = NULL;
7181 }
7182
7183 }
7184#endif
7185#ifdef DETREFILL_ENABLE
7186 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7187 uninit_detrefill_buf(hevc);
7188#endif
7189 if (hevc->aux_addr) {
7190 dma_free_coherent(amports_get_dma_device(),
7191 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7192 hevc->aux_phy_addr);
7193 hevc->aux_addr = NULL;
7194 }
7195 if (hevc->rpm_addr) {
7196 dma_free_coherent(amports_get_dma_device(),
7197 RPM_BUF_SIZE, hevc->rpm_addr,
7198 hevc->rpm_phy_addr);
7199 hevc->rpm_addr = NULL;
7200 }
7201 if (hevc->lmem_addr) {
7202 dma_free_coherent(amports_get_dma_device(),
7203 RPM_BUF_SIZE, hevc->lmem_addr,
7204 hevc->lmem_phy_addr);
7205 hevc->lmem_addr = NULL;
7206 }
7207
7208 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7209 if (hevc->frame_mmu_map_phy_addr)
7210 dma_free_coherent(amports_get_dma_device(),
7211 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7212 hevc->frame_mmu_map_phy_addr);
7213
7214 hevc->frame_mmu_map_addr = NULL;
7215 }
7216
7217 kfree(gvs);
7218 gvs = NULL;
7219}
7220
7221static int hevc_local_init(struct hevc_state_s *hevc)
7222{
7223 int ret = -1;
7224 struct BuffInfo_s *cur_buf_info = NULL;
7225
7226 memset(&hevc->param, 0, sizeof(union param_u));
7227
7228 cur_buf_info = &hevc->work_space_buf_store;
7229
7230 if (vdec_is_support_4k()) {
7231 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7232 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7233 sizeof(struct BuffInfo_s));
7234 else
7235 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7236 sizeof(struct BuffInfo_s));
7237 } else
7238 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7239 sizeof(struct BuffInfo_s));
7240
7241 cur_buf_info->start_adr = hevc->buf_start;
7242 init_buff_spec(hevc, cur_buf_info);
7243
7244 hevc_init_stru(hevc, cur_buf_info);
7245
7246 hevc->bit_depth_luma = 8;
7247 hevc->bit_depth_chroma = 8;
7248 hevc->video_signal_type = 0;
7249 hevc->video_signal_type_debug = 0;
7250 bit_depth_luma = hevc->bit_depth_luma;
7251 bit_depth_chroma = hevc->bit_depth_chroma;
7252 video_signal_type = hevc->video_signal_type;
7253
7254 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7255 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7256 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7257 if (hevc->rpm_addr == NULL) {
7258 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7259 return -1;
7260 }
7261 hevc->rpm_ptr = hevc->rpm_addr;
7262 }
7263
7264 if (prefix_aux_buf_size > 0 ||
7265 suffix_aux_buf_size > 0) {
7266 u32 aux_buf_size;
7267
7268 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7269 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7270 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7271 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7272 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7273 if (hevc->aux_addr == NULL) {
7274 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7275 return -1;
7276 }
7277 }
7278
7279 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7280 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7281 if (hevc->lmem_addr == NULL) {
7282 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7283 return -1;
7284 }
7285 hevc->lmem_ptr = hevc->lmem_addr;
7286
7287 if (hevc->mmu_enable) {
7288 hevc->frame_mmu_map_addr =
7289 dma_alloc_coherent(amports_get_dma_device(),
7290 get_frame_mmu_map_size(),
7291 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7292 if (hevc->frame_mmu_map_addr == NULL) {
7293 pr_err("%s: failed to alloc count_buffer\n", __func__);
7294 return -1;
7295 }
7296 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7297 }
7298 ret = 0;
7299 return ret;
7300}
7301
7302/*
7303 *******************************************
7304 * Mailbox command
7305 *******************************************
7306 */
7307#define CMD_FINISHED 0
7308#define CMD_ALLOC_VIEW 1
7309#define CMD_FRAME_DISPLAY 3
7310#define CMD_DEBUG 10
7311
7312
7313#define DECODE_BUFFER_NUM_MAX 32
7314#define DISPLAY_BUFFER_NUM 6
7315
7316#define video_domain_addr(adr) (adr&0x7fffffff)
7317#define DECODER_WORK_SPACE_SIZE 0x800000
7318
7319#define spec2canvas(x) \
7320 (((x)->uv_canvas_index << 16) | \
7321 ((x)->uv_canvas_index << 8) | \
7322 ((x)->y_canvas_index << 0))
7323
7324
7325static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7326{
7327 struct vdec_s *vdec = hw_to_vdec(hevc);
7328 int canvas_w = ALIGN(pic->width, 64)/4;
7329 int canvas_h = ALIGN(pic->height, 32)/4;
7330 int blkmode = mem_map_mode;
7331
7332 /*CANVAS_BLKMODE_64X32*/
7333#ifdef SUPPORT_10BIT
7334 if (pic->double_write_mode) {
7335 canvas_w = pic->width /
7336 get_double_write_ratio(hevc, pic->double_write_mode);
7337 canvas_h = pic->height /
7338 get_double_write_ratio(hevc, pic->double_write_mode);
7339
7340 if (mem_map_mode == 0)
7341 canvas_w = ALIGN(canvas_w, 32);
7342 else
7343 canvas_w = ALIGN(canvas_w, 64);
7344 canvas_h = ALIGN(canvas_h, 32);
7345
7346 if (vdec->parallel_dec == 1) {
7347 if (pic->y_canvas_index == -1)
7348 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7349 if (pic->uv_canvas_index == -1)
7350 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7351 } else {
7352 pic->y_canvas_index = 128 + pic->index * 2;
7353 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7354 }
7355
7356 canvas_config_ex(pic->y_canvas_index,
7357 pic->dw_y_adr, canvas_w, canvas_h,
7358 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7359 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7360 canvas_w, canvas_h,
7361 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7362#ifdef MULTI_INSTANCE_SUPPORT
7363 pic->canvas_config[0].phy_addr =
7364 pic->dw_y_adr;
7365 pic->canvas_config[0].width =
7366 canvas_w;
7367 pic->canvas_config[0].height =
7368 canvas_h;
7369 pic->canvas_config[0].block_mode =
7370 blkmode;
7371 pic->canvas_config[0].endian = 7;
7372
7373 pic->canvas_config[1].phy_addr =
7374 pic->dw_u_v_adr;
7375 pic->canvas_config[1].width =
7376 canvas_w;
7377 pic->canvas_config[1].height =
7378 canvas_h;
7379 pic->canvas_config[1].block_mode =
7380 blkmode;
7381 pic->canvas_config[1].endian = 7;
7382#endif
7383 } else {
7384 if (!hevc->mmu_enable) {
7385 /* to change after 10bit VPU is ready ... */
7386 if (vdec->parallel_dec == 1) {
7387 if (pic->y_canvas_index == -1)
7388 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7389 pic->uv_canvas_index = pic->y_canvas_index;
7390 } else {
7391 pic->y_canvas_index = 128 + pic->index;
7392 pic->uv_canvas_index = 128 + pic->index;
7393 }
7394
7395 canvas_config_ex(pic->y_canvas_index,
7396 pic->mc_y_adr, canvas_w, canvas_h,
7397 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7398 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7399 canvas_w, canvas_h,
7400 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7401 }
7402 }
7403#else
7404 if (vdec->parallel_dec == 1) {
7405 if (pic->y_canvas_index == -1)
7406 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7407 if (pic->uv_canvas_index == -1)
7408 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7409 } else {
7410 pic->y_canvas_index = 128 + pic->index * 2;
7411 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7412 }
7413
7414
7415 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7416 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7417 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7418 canvas_w, canvas_h,
7419 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
7420#endif
7421}
7422
7423static int init_buf_spec(struct hevc_state_s *hevc)
7424{
7425 int pic_width = hevc->pic_w;
7426 int pic_height = hevc->pic_h;
7427
7428 /* hevc_print(hevc, 0,
7429 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7430 */
7431 hevc_print(hevc, 0,
7432 "%s2 %d %d\n", __func__, pic_width, pic_height);
7433 /* pic_width = hevc->pic_w; */
7434 /* pic_height = hevc->pic_h; */
7435
7436 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7437 hevc->frame_width = pic_width;
7438 hevc->frame_height = pic_height;
7439
7440 }
7441
7442 return 0;
7443}
7444
7445static int parse_sei(struct hevc_state_s *hevc,
7446 struct PIC_s *pic, char *sei_buf, uint32_t size)
7447{
7448 char *p = sei_buf;
7449 char *p_sei;
7450 uint16_t header;
7451 uint8_t nal_unit_type;
7452 uint8_t payload_type, payload_size;
7453 int i, j;
7454
7455 if (size < 2)
7456 return 0;
7457 header = *p++;
7458 header <<= 8;
7459 header += *p++;
7460 nal_unit_type = header >> 9;
7461 if ((nal_unit_type != NAL_UNIT_SEI)
7462 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7463 return 0;
7464 while (p+2 <= sei_buf+size) {
7465 payload_type = *p++;
7466 payload_size = *p++;
7467 if (p+payload_size <= sei_buf+size) {
7468 switch (payload_type) {
7469 case SEI_PicTiming:
7470 if ((parser_sei_enable & 0x4) &&
7471 hevc->frame_field_info_present_flag) {
7472 p_sei = p;
7473 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7474 pic->pic_struct = hevc->curr_pic_struct;
7475 if (get_dbg_flag(hevc) &
7476 H265_DEBUG_PIC_STRUCT) {
7477 hevc_print(hevc, 0,
7478 "parse result pic_struct = %d\n",
7479 hevc->curr_pic_struct);
7480 }
7481 }
7482 break;
7483 case SEI_UserDataITU_T_T35:
7484 p_sei = p;
7485 if (p_sei[0] == 0xB5
7486 && p_sei[1] == 0x00
7487 && p_sei[2] == 0x3C
7488 && p_sei[3] == 0x00
7489 && p_sei[4] == 0x01
7490 && p_sei[5] == 0x04)
7491 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7492
7493 break;
7494 case SEI_MasteringDisplayColorVolume:
7495 /*hevc_print(hevc, 0,
7496 "sei type: primary display color volume %d, size %d\n",
7497 payload_type,
7498 payload_size);*/
7499 /* master_display_colour */
7500 p_sei = p;
7501 for (i = 0; i < 3; i++) {
7502 for (j = 0; j < 2; j++) {
7503 hevc->primaries[i][j]
7504 = (*p_sei<<8)
7505 | *(p_sei+1);
7506 p_sei += 2;
7507 }
7508 }
7509 for (i = 0; i < 2; i++) {
7510 hevc->white_point[i]
7511 = (*p_sei<<8)
7512 | *(p_sei+1);
7513 p_sei += 2;
7514 }
7515 for (i = 0; i < 2; i++) {
7516 hevc->luminance[i]
7517 = (*p_sei<<24)
7518 | (*(p_sei+1)<<16)
7519 | (*(p_sei+2)<<8)
7520 | *(p_sei+3);
7521 p_sei += 4;
7522 }
7523 hevc->sei_present_flag |=
7524 SEI_MASTER_DISPLAY_COLOR_MASK;
7525 /*for (i = 0; i < 3; i++)
7526 for (j = 0; j < 2; j++)
7527 hevc_print(hevc, 0,
7528 "\tprimaries[%1d][%1d] = %04x\n",
7529 i, j,
7530 hevc->primaries[i][j]);
7531 hevc_print(hevc, 0,
7532 "\twhite_point = (%04x, %04x)\n",
7533 hevc->white_point[0],
7534 hevc->white_point[1]);
7535 hevc_print(hevc, 0,
7536 "\tmax,min luminance = %08x, %08x\n",
7537 hevc->luminance[0],
7538 hevc->luminance[1]);*/
7539 break;
7540 case SEI_ContentLightLevel:
7541 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7542 hevc_print(hevc, 0,
7543 "sei type: max content light level %d, size %d\n",
7544 payload_type, payload_size);
7545 /* content_light_level */
7546 p_sei = p;
7547 hevc->content_light_level[0]
7548 = (*p_sei<<8) | *(p_sei+1);
7549 p_sei += 2;
7550 hevc->content_light_level[1]
7551 = (*p_sei<<8) | *(p_sei+1);
7552 p_sei += 2;
7553 hevc->sei_present_flag |=
7554 SEI_CONTENT_LIGHT_LEVEL_MASK;
7555 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7556 hevc_print(hevc, 0,
7557 "\tmax cll = %04x, max_pa_cll = %04x\n",
7558 hevc->content_light_level[0],
7559 hevc->content_light_level[1]);
7560 break;
7561 default:
7562 break;
7563 }
7564 }
7565 p += payload_size;
7566 }
7567 return 0;
7568}
7569
7570static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7571 unsigned w, unsigned h)
7572{
7573 unsigned ar;
7574
7575 if (idc == 255) {
7576 ar = div_u64(256ULL * sar_h * h,
7577 sar_w * w);
7578 } else {
7579 switch (idc) {
7580 case 1:
7581 ar = 0x100 * h / w;
7582 break;
7583 case 2:
7584 ar = 0x100 * h * 11 / (w * 12);
7585 break;
7586 case 3:
7587 ar = 0x100 * h * 11 / (w * 10);
7588 break;
7589 case 4:
7590 ar = 0x100 * h * 11 / (w * 16);
7591 break;
7592 case 5:
7593 ar = 0x100 * h * 33 / (w * 40);
7594 break;
7595 case 6:
7596 ar = 0x100 * h * 11 / (w * 24);
7597 break;
7598 case 7:
7599 ar = 0x100 * h * 11 / (w * 20);
7600 break;
7601 case 8:
7602 ar = 0x100 * h * 11 / (w * 32);
7603 break;
7604 case 9:
7605 ar = 0x100 * h * 33 / (w * 80);
7606 break;
7607 case 10:
7608 ar = 0x100 * h * 11 / (w * 18);
7609 break;
7610 case 11:
7611 ar = 0x100 * h * 11 / (w * 15);
7612 break;
7613 case 12:
7614 ar = 0x100 * h * 33 / (w * 64);
7615 break;
7616 case 13:
7617 ar = 0x100 * h * 99 / (w * 160);
7618 break;
7619 case 14:
7620 ar = 0x100 * h * 3 / (w * 4);
7621 break;
7622 case 15:
7623 ar = 0x100 * h * 2 / (w * 3);
7624 break;
7625 case 16:
7626 ar = 0x100 * h * 1 / (w * 2);
7627 break;
7628 default:
7629 ar = h * 0x100 / w;
7630 break;
7631 }
7632 }
7633
7634 return ar;
7635}
7636
7637static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7638 struct PIC_s *pic)
7639{
7640 unsigned int ar;
7641 int i, j;
7642 char *p;
7643 unsigned size = 0;
7644 unsigned type = 0;
7645 struct vframe_master_display_colour_s *vf_dp
7646 = &vf->prop.master_display_colour;
7647
7648 vf->width = pic->width /
7649 get_double_write_ratio(hevc, pic->double_write_mode);
7650 vf->height = pic->height /
7651 get_double_write_ratio(hevc, pic->double_write_mode);
7652
7653 vf->duration = hevc->frame_dur;
7654 vf->duration_pulldown = 0;
7655 vf->flag = 0;
7656
7657 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7658 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7659
7660
7661 if (((pic->aspect_ratio_idc == 255) &&
7662 pic->sar_width &&
7663 pic->sar_height) ||
7664 ((pic->aspect_ratio_idc != 255) &&
7665 (pic->width))) {
7666 ar = min_t(u32,
7667 calc_ar(pic->aspect_ratio_idc,
7668 pic->sar_width,
7669 pic->sar_height,
7670 pic->width,
7671 pic->height),
7672 DISP_RATIO_ASPECT_RATIO_MAX);
7673 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7674 }
7675 hevc->ratio_control = vf->ratio_control;
7676 if (pic->aux_data_buf
7677 && pic->aux_data_size) {
7678 /* parser sei */
7679 p = pic->aux_data_buf;
7680 while (p < pic->aux_data_buf
7681 + pic->aux_data_size - 8) {
7682 size = *p++;
7683 size = (size << 8) | *p++;
7684 size = (size << 8) | *p++;
7685 size = (size << 8) | *p++;
7686 type = *p++;
7687 type = (type << 8) | *p++;
7688 type = (type << 8) | *p++;
7689 type = (type << 8) | *p++;
7690 if (type == 0x02000000) {
7691 /* hevc_print(hevc, 0,
7692 "sei(%d)\n", size); */
7693 parse_sei(hevc, pic, p, size);
7694 }
7695 p += size;
7696 }
7697 }
7698 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7699 vf->signal_type = pic->video_signal_type;
7700 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7701 u32 data;
7702 data = vf->signal_type;
7703 data = data & 0xFFFF00FF;
7704 data = data | (0x30<<8);
7705 vf->signal_type = data;
7706 }
7707 }
7708 else
7709 vf->signal_type = 0;
7710 hevc->video_signal_type_debug = vf->signal_type;
7711
7712 /* master_display_colour */
7713 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7714 for (i = 0; i < 3; i++)
7715 for (j = 0; j < 2; j++)
7716 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7717 for (i = 0; i < 2; i++) {
7718 vf_dp->white_point[i] = hevc->white_point[i];
7719 vf_dp->luminance[i]
7720 = hevc->luminance[i];
7721 }
7722 vf_dp->present_flag = 1;
7723 } else
7724 vf_dp->present_flag = 0;
7725
7726 /* content_light_level */
7727 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7728 vf_dp->content_light_level.max_content
7729 = hevc->content_light_level[0];
7730 vf_dp->content_light_level.max_pic_average
7731 = hevc->content_light_level[1];
7732 vf_dp->content_light_level.present_flag = 1;
7733 } else
7734 vf_dp->content_light_level.present_flag = 0;
7735}
7736
7737static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7738{
7739 unsigned long flags;
7740#ifdef MULTI_INSTANCE_SUPPORT
7741 struct vdec_s *vdec = op_arg;
7742 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7743#else
7744 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7745#endif
7746
7747 spin_lock_irqsave(&lock, flags);
7748
7749 states->vf_pool_size = VF_POOL_SIZE;
7750 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7751 states->buf_avail_num = kfifo_len(&hevc->display_q);
7752
7753 if (step == 2)
7754 states->buf_avail_num = 0;
7755 spin_unlock_irqrestore(&lock, flags);
7756 return 0;
7757}
7758
7759static struct vframe_s *vh265_vf_peek(void *op_arg)
7760{
7761 struct vframe_s *vf[2] = {0, 0};
7762#ifdef MULTI_INSTANCE_SUPPORT
7763 struct vdec_s *vdec = op_arg;
7764 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7765#else
7766 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7767#endif
7768
7769 if (step == 2)
7770 return NULL;
7771
7772 if (force_disp_pic_index & 0x100) {
7773 if (force_disp_pic_index & 0x200)
7774 return NULL;
7775 return &hevc->vframe_dummy;
7776 }
7777
7778
7779 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7780 if (vf[1]) {
7781 vf[0]->next_vf_pts_valid = true;
7782 vf[0]->next_vf_pts = vf[1]->pts;
7783 } else
7784 vf[0]->next_vf_pts_valid = false;
7785 return vf[0];
7786 }
7787
7788 return NULL;
7789}
7790
7791static struct vframe_s *vh265_vf_get(void *op_arg)
7792{
7793 struct vframe_s *vf;
7794#ifdef MULTI_INSTANCE_SUPPORT
7795 struct vdec_s *vdec = op_arg;
7796 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7797#else
7798 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7799#endif
7800
7801 if (step == 2)
7802 return NULL;
7803 else if (step == 1)
7804 step = 2;
7805
7806#if 0
7807 if (force_disp_pic_index & 0x100) {
7808 int buffer_index = force_disp_pic_index & 0xff;
7809 struct PIC_s *pic = NULL;
7810 if (buffer_index >= 0
7811 && buffer_index < MAX_REF_PIC_NUM)
7812 pic = hevc->m_PIC[buffer_index];
7813 if (pic == NULL)
7814 return NULL;
7815 if (force_disp_pic_index & 0x200)
7816 return NULL;
7817
7818 vf = &hevc->vframe_dummy;
7819 if (get_double_write_mode(hevc)) {
7820 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7821 VIDTYPE_VIU_NV21;
7822 if (hevc->m_ins_flag) {
7823 vf->canvas0Addr = vf->canvas1Addr = -1;
7824 vf->plane_num = 2;
7825 vf->canvas0_config[0] =
7826 pic->canvas_config[0];
7827 vf->canvas0_config[1] =
7828 pic->canvas_config[1];
7829
7830 vf->canvas1_config[0] =
7831 pic->canvas_config[0];
7832 vf->canvas1_config[1] =
7833 pic->canvas_config[1];
7834 } else {
7835 vf->canvas0Addr = vf->canvas1Addr
7836 = spec2canvas(pic);
7837 }
7838 } else {
7839 vf->canvas0Addr = vf->canvas1Addr = 0;
7840 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7841 if (hevc->mmu_enable)
7842 vf->type |= VIDTYPE_SCATTER;
7843 }
7844 vf->compWidth = pic->width;
7845 vf->compHeight = pic->height;
7846 update_vf_memhandle(hevc, vf, pic);
7847 switch (hevc->bit_depth_luma) {
7848 case 9:
7849 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
7850 break;
7851 case 10:
7852 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
7853 | BITDEPTH_V10;
7854 break;
7855 default:
7856 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7857 break;
7858 }
7859 if ((vf->type & VIDTYPE_COMPRESS) == 0)
7860 vf->bitdepth =
7861 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7862 if (hevc->mem_saving_mode == 1)
7863 vf->bitdepth |= BITDEPTH_SAVING_MODE;
7864 vf->duration_pulldown = 0;
7865 vf->pts = 0;
7866 vf->pts_us64 = 0;
7867 set_frame_info(hevc, vf);
7868
7869 vf->width = pic->width /
7870 get_double_write_ratio(hevc, pic->double_write_mode);
7871 vf->height = pic->height /
7872 get_double_write_ratio(hevc, pic->double_write_mode);
7873
7874 force_disp_pic_index |= 0x200;
7875 return vf;
7876 }
7877#endif
7878
7879 if (kfifo_get(&hevc->display_q, &vf)) {
7880 struct vframe_s *next_vf;
7881 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7882 hevc_print(hevc, 0,
7883 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
7884 __func__, vf, vf->type, vf->index,
7885 get_pic_poc(hevc, vf->index & 0xff),
7886 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
7887 vf->pts, vf->pts_us64,
7888 vf->duration);
7889#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7890 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
7891 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
7892 if (pic->aux_data_buf && pic->aux_data_size > 0) {
7893 int i;
7894 struct PIC_s *pic =
7895 hevc->m_PIC[vf->index & 0xff];
7896 hevc_print(hevc, 0,
7897 "pic 0x%p aux size %d:\n",
7898 pic, pic->aux_data_size);
7899 for (i = 0; i < pic->aux_data_size; i++) {
7900 hevc_print_cont(hevc, 0,
7901 "%02x ", pic->aux_data_buf[i]);
7902 if (((i + 1) & 0xf) == 0)
7903 hevc_print_cont(hevc, 0, "\n");
7904 }
7905 hevc_print_cont(hevc, 0, "\n");
7906 }
7907 }
7908#endif
7909 hevc->show_frame_num++;
7910 hevc->vf_get_count++;
7911
7912 if (kfifo_peek(&hevc->display_q, &next_vf)) {
7913 vf->next_vf_pts_valid = true;
7914 vf->next_vf_pts = next_vf->pts;
7915 } else
7916 vf->next_vf_pts_valid = false;
7917
7918 return vf;
7919 }
7920
7921 return NULL;
7922}
7923static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
7924 int i;
7925 for (i = 0; i < VF_POOL_SIZE; i++) {
7926 if (vf == &hevc->vfpool[i])
7927 return true;
7928 }
7929 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
7930 for (i = 0; i < VF_POOL_SIZE; i++) {
7931 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
7932 }
7933 return false;
7934}
7935
7936static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
7937{
7938 unsigned long flags;
7939#ifdef MULTI_INSTANCE_SUPPORT
7940 struct vdec_s *vdec = op_arg;
7941 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7942#else
7943 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7944#endif
7945 unsigned char index_top;
7946 unsigned char index_bot;
7947
7948 if (vf && (vf_valid_check(vf, hevc) == false))
7949 return;
7950 if (vf == (&hevc->vframe_dummy))
7951 return;
7952 index_top = vf->index & 0xff;
7953 index_bot = (vf->index >> 8) & 0xff;
7954 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
7955 hevc_print(hevc, 0,
7956 "%s(type %d index 0x%x)\n",
7957 __func__, vf->type, vf->index);
7958 hevc->vf_put_count++;
7959 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
7960 spin_lock_irqsave(&lock, flags);
7961
7962 if (index_top != 0xff
7963 && index_top < MAX_REF_PIC_NUM
7964 && hevc->m_PIC[index_top]) {
7965 if (hevc->m_PIC[index_top]->vf_ref > 0) {
7966 hevc->m_PIC[index_top]->vf_ref--;
7967
7968 if (hevc->m_PIC[index_top]->vf_ref == 0) {
7969 hevc->m_PIC[index_top]->output_ready = 0;
7970
7971 if (hevc->wait_buf != 0)
7972 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7973 0x1);
7974 }
7975 }
7976 }
7977
7978 if (index_bot != 0xff
7979 && index_bot < MAX_REF_PIC_NUM
7980 && hevc->m_PIC[index_bot]) {
7981 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
7982 hevc->m_PIC[index_bot]->vf_ref--;
7983
7984 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
7985 hevc->m_PIC[index_bot]->output_ready = 0;
7986 if (hevc->wait_buf != 0)
7987 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
7988 0x1);
7989 }
7990 }
7991 }
7992 spin_unlock_irqrestore(&lock, flags);
7993}
7994
7995static int vh265_event_cb(int type, void *data, void *op_arg)
7996{
7997 unsigned long flags;
7998#ifdef MULTI_INSTANCE_SUPPORT
7999 struct vdec_s *vdec = op_arg;
8000 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8001#else
8002 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8003#endif
8004 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8005#if 0
8006 amhevc_stop();
8007#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8008 vf_light_unreg_provider(&vh265_vf_prov);
8009#endif
8010 spin_lock_irqsave(&hevc->lock, flags);
8011 vh265_local_init();
8012 vh265_prot_init();
8013 spin_unlock_irqrestore(&hevc->lock, flags);
8014#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8015 vf_reg_provider(&vh265_vf_prov);
8016#endif
8017 amhevc_start();
8018#endif
8019 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8020 struct provider_aux_req_s *req =
8021 (struct provider_aux_req_s *)data;
8022 unsigned char index;
8023
8024 spin_lock_irqsave(&lock, flags);
8025 index = req->vf->index & 0xff;
8026 req->aux_buf = NULL;
8027 req->aux_size = 0;
8028 if (req->bot_flag)
8029 index = (req->vf->index >> 8) & 0xff;
8030 if (index != 0xff
8031 && index < MAX_REF_PIC_NUM
8032 && hevc->m_PIC[index]) {
8033 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8034 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8035#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8036 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8037 req->dv_enhance_exist = false;
8038 else
8039 req->dv_enhance_exist =
8040 hevc->m_PIC[index]->dv_enhance_exist;
8041 hevc_print(hevc, H265_DEBUG_DV,
8042 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8043 req->vf,
8044 hevc->m_PIC[index]->POC, index,
8045 req->dv_enhance_exist, req->aux_size);
8046#else
8047 req->dv_enhance_exist = 0;
8048#endif
8049 }
8050 spin_unlock_irqrestore(&lock, flags);
8051
8052 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8053 hevc_print(hevc, 0,
8054 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8055 __func__, type, index, req->aux_size);
8056#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8057 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8058 if ((force_bypass_dvenl & 0x80000000) == 0) {
8059 hevc_print(hevc, 0,
8060 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8061 __func__);
8062 hevc->bypass_dvenl_enable = 1;
8063 }
8064
8065#endif
8066 }
8067 return 0;
8068}
8069
8070#ifdef HEVC_PIC_STRUCT_SUPPORT
8071static int process_pending_vframe(struct hevc_state_s *hevc,
8072 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8073{
8074 struct vframe_s *vf;
8075
8076 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8077 hevc_print(hevc, 0,
8078 "%s: pair_pic index 0x%x %s\n",
8079 __func__, pair_pic->index,
8080 pair_frame_top_flag ?
8081 "top" : "bot");
8082
8083 if (kfifo_len(&hevc->pending_q) > 1) {
8084 unsigned long flags;
8085 /* do not pending more than 1 frame */
8086 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8087 hevc_print(hevc, 0,
8088 "fatal error, no available buffer slot.");
8089 return -1;
8090 }
8091 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8092 hevc_print(hevc, 0,
8093 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8094 __func__, vf->index);
8095 if ((hevc->double_write_mode == 3) &&
8096 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8097 vf->type |= VIDTYPE_COMPRESS;
8098 if (hevc->mmu_enable)
8099 vf->type |= VIDTYPE_SCATTER;
8100 }
8101 hevc->vf_pre_count++;
8102 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8103 spin_lock_irqsave(&lock, flags);
8104 vf->index &= 0xff;
8105 hevc->m_PIC[vf->index]->output_ready = 0;
8106 if (hevc->wait_buf != 0)
8107 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8108 0x1);
8109 spin_unlock_irqrestore(&lock, flags);
8110
8111 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8112 }
8113
8114 if (kfifo_peek(&hevc->pending_q, &vf)) {
8115 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8116 /*
8117 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8118 *do not use it
8119 */
8120 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8121 hevc_print(hevc, 0,
8122 "fatal error, no available buffer slot.");
8123 return -1;
8124 }
8125 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8126 hevc_print(hevc, 0,
8127 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8128 __func__, vf->index);
8129 if (vf) {
8130 if ((hevc->double_write_mode == 3) &&
8131 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8132 vf->type |= VIDTYPE_COMPRESS;
8133 if (hevc->mmu_enable)
8134 vf->type |= VIDTYPE_SCATTER;
8135 }
8136 hevc->vf_pre_count++;
8137 kfifo_put(&hevc->display_q,
8138 (const struct vframe_s *)vf);
8139 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8140 }
8141 } else if ((!pair_frame_top_flag) &&
8142 (((vf->index >> 8) & 0xff) == 0xff)) {
8143 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8144 hevc_print(hevc, 0,
8145 "fatal error, no available buffer slot.");
8146 return -1;
8147 }
8148 if (vf) {
8149 if ((hevc->double_write_mode == 3) &&
8150 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8151 vf->type |= VIDTYPE_COMPRESS;
8152 if (hevc->mmu_enable)
8153 vf->type |= VIDTYPE_SCATTER;
8154 }
8155 vf->index &= 0xff;
8156 vf->index |= (pair_pic->index << 8);
8157 vf->canvas1Addr = spec2canvas(pair_pic);
8158 pair_pic->vf_ref++;
8159 kfifo_put(&hevc->display_q,
8160 (const struct vframe_s *)vf);
8161 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8162 hevc->vf_pre_count++;
8163 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8164 hevc_print(hevc, 0,
8165 "%s vf => display_q: (index 0x%x)\n",
8166 __func__, vf->index);
8167 }
8168 } else if (pair_frame_top_flag &&
8169 ((vf->index & 0xff) == 0xff)) {
8170 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8171 hevc_print(hevc, 0,
8172 "fatal error, no available buffer slot.");
8173 return -1;
8174 }
8175 if (vf) {
8176 if ((hevc->double_write_mode == 3) &&
8177 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8178 vf->type |= VIDTYPE_COMPRESS;
8179 if (hevc->mmu_enable)
8180 vf->type |= VIDTYPE_SCATTER;
8181 }
8182 vf->index &= 0xff00;
8183 vf->index |= pair_pic->index;
8184 vf->canvas0Addr = spec2canvas(pair_pic);
8185 pair_pic->vf_ref++;
8186 kfifo_put(&hevc->display_q,
8187 (const struct vframe_s *)vf);
8188 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8189 hevc->vf_pre_count++;
8190 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8191 hevc_print(hevc, 0,
8192 "%s vf => display_q: (index 0x%x)\n",
8193 __func__, vf->index);
8194 }
8195 }
8196 }
8197 return 0;
8198}
8199#endif
8200static void update_vf_memhandle(struct hevc_state_s *hevc,
8201 struct vframe_s *vf, struct PIC_s *pic)
8202{
8203 if (pic->index < 0) {
8204 vf->mem_handle = NULL;
8205 vf->mem_head_handle = NULL;
8206 } else if (vf->type & VIDTYPE_SCATTER) {
8207 vf->mem_handle =
8208 decoder_mmu_box_get_mem_handle(
8209 hevc->mmu_box, pic->index);
8210 vf->mem_head_handle =
8211 decoder_bmmu_box_get_mem_handle(
8212 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8213 } else {
8214 vf->mem_handle =
8215 decoder_bmmu_box_get_mem_handle(
8216 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8217 vf->mem_head_handle = NULL;
8218 /*vf->mem_head_handle =
8219 decoder_bmmu_box_get_mem_handle(
8220 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8221 }
8222 return;
8223}
8224
8225static void fill_frame_info(struct hevc_state_s *hevc,
8226 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8227{
8228 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8229 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8230 vframe_qos->type = 4;
8231 else if (pic->slice_type == I_SLICE)
8232 vframe_qos->type = 1;
8233 else if (pic->slice_type == P_SLICE)
8234 vframe_qos->type = 2;
8235 else if (pic->slice_type == B_SLICE)
8236 vframe_qos->type = 3;
8237/*
8238#define SHOW_QOS_INFO
8239*/
8240 vframe_qos->size = framesize;
8241 vframe_qos->pts = pts;
8242#ifdef SHOW_QOS_INFO
8243 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8244#endif
8245
8246
8247 vframe_qos->max_mv = pic->max_mv;
8248 vframe_qos->avg_mv = pic->avg_mv;
8249 vframe_qos->min_mv = pic->min_mv;
8250#ifdef SHOW_QOS_INFO
8251 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8252 vframe_qos->max_mv,
8253 vframe_qos->avg_mv,
8254 vframe_qos->min_mv);
8255#endif
8256
8257 vframe_qos->max_qp = pic->max_qp;
8258 vframe_qos->avg_qp = pic->avg_qp;
8259 vframe_qos->min_qp = pic->min_qp;
8260#ifdef SHOW_QOS_INFO
8261 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8262 vframe_qos->max_qp,
8263 vframe_qos->avg_qp,
8264 vframe_qos->min_qp);
8265#endif
8266
8267 vframe_qos->max_skip = pic->max_skip;
8268 vframe_qos->avg_skip = pic->avg_skip;
8269 vframe_qos->min_skip = pic->min_skip;
8270#ifdef SHOW_QOS_INFO
8271 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8272 vframe_qos->max_skip,
8273 vframe_qos->avg_skip,
8274 vframe_qos->min_skip);
8275#endif
8276
8277 vframe_qos->num++;
8278
8279 if (hevc->frameinfo_enable)
8280 vdec_fill_frame_info(vframe_qos, 1);
8281}
8282
8283static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8284{
8285#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8286 struct vdec_s *vdec = hw_to_vdec(hevc);
8287#endif
8288 struct vframe_s *vf = NULL;
8289 int stream_offset = pic->stream_offset;
8290 unsigned short slice_type = pic->slice_type;
8291 u32 frame_size;
8292
8293 if (force_disp_pic_index & 0x100) {
8294 /*recycle directly*/
8295 pic->output_ready = 0;
8296 return -1;
8297 }
8298 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8299 hevc_print(hevc, 0,
8300 "fatal error, no available buffer slot.");
8301 return -1;
8302 }
8303 display_frame_count[hevc->index]++;
8304 if (vf) {
8305 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8306 "%s: pic index 0x%x\n",
8307 __func__, pic->index);*/
8308
8309 if (hevc->is_used_v4l) {
8310 vf->v4l_mem_handle
8311 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8312 if (hevc->mmu_enable) {
8313 if (vdec_v4l_binding_fd_and_vf(vf->v4l_mem_handle, vf) < 0) {
8314 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8315 "v4l: binding vf fail.\n");
8316 return -1;
8317 }
8318 }
8319 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
8320 "[%d] %s(), v4l mem handle: 0x%lx\n",
8321 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id,
8322 __func__, vf->v4l_mem_handle);
8323 }
8324
8325#ifdef MULTI_INSTANCE_SUPPORT
8326 if (vdec_frame_based(hw_to_vdec(hevc))) {
8327 vf->pts = pic->pts;
8328 vf->pts_us64 = pic->pts64;
8329 vf->timestamp = pic->timestamp;
8330 }
8331 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8332 stream_offset, &vf->pts, 0) != 0) { */
8333#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8334 else if (vdec->master == NULL) {
8335#else
8336 else {
8337#endif
8338#endif
8339 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8340 "call pts_lookup_offset_us64(0x%x)\n",
8341 stream_offset);
8342 if (pts_lookup_offset_us64
8343 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8344 &frame_size, 0,
8345 &vf->pts_us64) != 0) {
8346#ifdef DEBUG_PTS
8347 hevc->pts_missed++;
8348#endif
8349 vf->pts = 0;
8350 vf->pts_us64 = 0;
8351 }
8352#ifdef DEBUG_PTS
8353 else
8354 hevc->pts_hit++;
8355#endif
8356#ifdef MULTI_INSTANCE_SUPPORT
8357#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8358 } else {
8359 vf->pts = 0;
8360 vf->pts_us64 = 0;
8361 }
8362#else
8363 }
8364#endif
8365#endif
8366 if (pts_unstable && (hevc->frame_dur > 0))
8367 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8368
8369 fill_frame_info(hevc, pic, frame_size, vf->pts);
8370
8371 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8372 && hevc->get_frame_dur) {
8373 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8374
8375 if (pts_diff < 0) {
8376 hevc->pts_mode_switching_count++;
8377 hevc->pts_mode_recovery_count = 0;
8378
8379 if (hevc->pts_mode_switching_count >=
8380 PTS_MODE_SWITCHING_THRESHOLD) {
8381 hevc->pts_mode =
8382 PTS_NONE_REF_USE_DURATION;
8383 hevc_print(hevc, 0,
8384 "HEVC: switch to n_d mode.\n");
8385 }
8386
8387 } else {
8388 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8389
8390 hevc->pts_mode_recovery_count++;
8391 if (hevc->pts_mode_recovery_count > p) {
8392 hevc->pts_mode_switching_count = 0;
8393 hevc->pts_mode_recovery_count = 0;
8394 }
8395 }
8396 }
8397
8398 if (vf->pts != 0)
8399 hevc->last_lookup_pts = vf->pts;
8400
8401 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8402 && (slice_type != 2))
8403 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8404 hevc->last_pts = vf->pts;
8405
8406 if (vf->pts_us64 != 0)
8407 hevc->last_lookup_pts_us64 = vf->pts_us64;
8408
8409 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8410 && (slice_type != 2)) {
8411 vf->pts_us64 =
8412 hevc->last_pts_us64 +
8413 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8414 }
8415 hevc->last_pts_us64 = vf->pts_us64;
8416 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8417 hevc_print(hevc, 0,
8418 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8419 vf->pts, vf->pts_us64);
8420 }
8421
8422 /*
8423 *vf->index:
8424 *(1) vf->type is VIDTYPE_PROGRESSIVE
8425 * and vf->canvas0Addr != vf->canvas1Addr,
8426 * vf->index[7:0] is the index of top pic
8427 * vf->index[15:8] is the index of bot pic
8428 *(2) other cases,
8429 * only vf->index[7:0] is used
8430 * vf->index[15:8] == 0xff
8431 */
8432 vf->index = 0xff00 | pic->index;
8433#if 1
8434/*SUPPORT_10BIT*/
8435 if (pic->double_write_mode & 0x10) {
8436 /* double write only */
8437 vf->compBodyAddr = 0;
8438 vf->compHeadAddr = 0;
8439 } else {
8440
8441 if (hevc->mmu_enable) {
8442 vf->compBodyAddr = 0;
8443 vf->compHeadAddr = pic->header_adr;
8444 } else {
8445 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8446 vf->compHeadAddr = pic->mc_y_adr +
8447 pic->losless_comp_body_size;
8448 vf->mem_head_handle = NULL;
8449 }
8450
8451 /*head adr*/
8452 vf->canvas0Addr = vf->canvas1Addr = 0;
8453 }
8454 if (pic->double_write_mode) {
8455 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8456 vf->type |= VIDTYPE_VIU_NV21;
8457 if ((pic->double_write_mode == 3) &&
8458 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8459 vf->type |= VIDTYPE_COMPRESS;
8460 if (hevc->mmu_enable)
8461 vf->type |= VIDTYPE_SCATTER;
8462 }
8463#ifdef MULTI_INSTANCE_SUPPORT
8464 if (hevc->m_ins_flag &&
8465 (get_dbg_flag(hevc)
8466 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8467 vf->canvas0Addr = vf->canvas1Addr = -1;
8468 vf->plane_num = 2;
8469 vf->canvas0_config[0] =
8470 pic->canvas_config[0];
8471 vf->canvas0_config[1] =
8472 pic->canvas_config[1];
8473
8474 vf->canvas1_config[0] =
8475 pic->canvas_config[0];
8476 vf->canvas1_config[1] =
8477 pic->canvas_config[1];
8478
8479 } else
8480#endif
8481 vf->canvas0Addr = vf->canvas1Addr
8482 = spec2canvas(pic);
8483 } else {
8484 vf->canvas0Addr = vf->canvas1Addr = 0;
8485 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8486 if (hevc->mmu_enable)
8487 vf->type |= VIDTYPE_SCATTER;
8488 }
8489 vf->compWidth = pic->width;
8490 vf->compHeight = pic->height;
8491 update_vf_memhandle(hevc, vf, pic);
8492 switch (pic->bit_depth_luma) {
8493 case 9:
8494 vf->bitdepth = BITDEPTH_Y9;
8495 break;
8496 case 10:
8497 vf->bitdepth = BITDEPTH_Y10;
8498 break;
8499 default:
8500 vf->bitdepth = BITDEPTH_Y8;
8501 break;
8502 }
8503 switch (pic->bit_depth_chroma) {
8504 case 9:
8505 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8506 break;
8507 case 10:
8508 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8509 break;
8510 default:
8511 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8512 break;
8513 }
8514 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8515 vf->bitdepth =
8516 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8517 if (pic->mem_saving_mode == 1)
8518 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8519#else
8520 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8521 vf->type |= VIDTYPE_VIU_NV21;
8522 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8523#endif
8524 set_frame_info(hevc, vf, pic);
8525 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8526 /* hevc_print(hevc, 0,
8527 "aaa: %d/%d, %d/%d\n",
8528 vf->width,vf->height, pic->width, pic->height); */
8529 vf->width = pic->width;
8530 vf->height = pic->height;
8531
8532 if (force_w_h != 0) {
8533 vf->width = (force_w_h >> 16) & 0xffff;
8534 vf->height = force_w_h & 0xffff;
8535 }
8536 if (force_fps & 0x100) {
8537 u32 rate = force_fps & 0xff;
8538
8539 if (rate)
8540 vf->duration = 96000/rate;
8541 else
8542 vf->duration = 0;
8543 }
8544 if (force_fps & 0x200) {
8545 vf->pts = 0;
8546 vf->pts_us64 = 0;
8547 }
8548 /*
8549 * !!! to do ...
8550 * need move below code to get_new_pic(),
8551 * hevc->xxx can only be used by current decoded pic
8552 */
8553 if (pic->conformance_window_flag &&
8554 (get_dbg_flag(hevc) &
8555 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8556 unsigned int SubWidthC, SubHeightC;
8557
8558 switch (pic->chroma_format_idc) {
8559 case 1:
8560 SubWidthC = 2;
8561 SubHeightC = 2;
8562 break;
8563 case 2:
8564 SubWidthC = 2;
8565 SubHeightC = 1;
8566 break;
8567 default:
8568 SubWidthC = 1;
8569 SubHeightC = 1;
8570 break;
8571 }
8572 vf->width -= SubWidthC *
8573 (pic->conf_win_left_offset +
8574 pic->conf_win_right_offset);
8575 vf->height -= SubHeightC *
8576 (pic->conf_win_top_offset +
8577 pic->conf_win_bottom_offset);
8578
8579 vf->compWidth -= SubWidthC *
8580 (pic->conf_win_left_offset +
8581 pic->conf_win_right_offset);
8582 vf->compHeight -= SubHeightC *
8583 (pic->conf_win_top_offset +
8584 pic->conf_win_bottom_offset);
8585
8586 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8587 hevc_print(hevc, 0,
8588 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8589 pic->chroma_format_idc,
8590 pic->conf_win_left_offset,
8591 pic->conf_win_right_offset,
8592 pic->conf_win_top_offset,
8593 pic->conf_win_bottom_offset,
8594 vf->width, vf->height, vf->compWidth, vf->compHeight);
8595 }
8596
8597 vf->width = vf->width /
8598 get_double_write_ratio(hevc, pic->double_write_mode);
8599 vf->height = vf->height /
8600 get_double_write_ratio(hevc, pic->double_write_mode);
8601#ifdef HEVC_PIC_STRUCT_SUPPORT
8602 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8603 struct vframe_s *vf2;
8604
8605 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8606 hevc_print(hevc, 0,
8607 "pic_struct = %d index 0x%x\n",
8608 pic->pic_struct,
8609 pic->index);
8610
8611 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8612 hevc_print(hevc, 0,
8613 "fatal error, no available buffer slot.");
8614 return -1;
8615 }
8616 pic->vf_ref = 2;
8617 vf->duration = vf->duration>>1;
8618 memcpy(vf2, vf, sizeof(struct vframe_s));
8619
8620 if (pic->pic_struct == 3) {
8621 vf->type = VIDTYPE_INTERLACE_TOP
8622 | VIDTYPE_VIU_NV21;
8623 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8624 | VIDTYPE_VIU_NV21;
8625 } else {
8626 vf->type = VIDTYPE_INTERLACE_BOTTOM
8627 | VIDTYPE_VIU_NV21;
8628 vf2->type = VIDTYPE_INTERLACE_TOP
8629 | VIDTYPE_VIU_NV21;
8630 }
8631 hevc->vf_pre_count++;
8632 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8633 kfifo_put(&hevc->display_q,
8634 (const struct vframe_s *)vf);
8635 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8636 hevc->vf_pre_count++;
8637 kfifo_put(&hevc->display_q,
8638 (const struct vframe_s *)vf2);
8639 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8640 } else if (pic->pic_struct == 5
8641 || pic->pic_struct == 6) {
8642 struct vframe_s *vf2, *vf3;
8643
8644 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8645 hevc_print(hevc, 0,
8646 "pic_struct = %d index 0x%x\n",
8647 pic->pic_struct,
8648 pic->index);
8649
8650 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8651 hevc_print(hevc, 0,
8652 "fatal error, no available buffer slot.");
8653 return -1;
8654 }
8655 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8656 hevc_print(hevc, 0,
8657 "fatal error, no available buffer slot.");
8658 return -1;
8659 }
8660 pic->vf_ref = 3;
8661 vf->duration = vf->duration/3;
8662 memcpy(vf2, vf, sizeof(struct vframe_s));
8663 memcpy(vf3, vf, sizeof(struct vframe_s));
8664
8665 if (pic->pic_struct == 5) {
8666 vf->type = VIDTYPE_INTERLACE_TOP
8667 | VIDTYPE_VIU_NV21;
8668 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8669 | VIDTYPE_VIU_NV21;
8670 vf3->type = VIDTYPE_INTERLACE_TOP
8671 | VIDTYPE_VIU_NV21;
8672 } else {
8673 vf->type = VIDTYPE_INTERLACE_BOTTOM
8674 | VIDTYPE_VIU_NV21;
8675 vf2->type = VIDTYPE_INTERLACE_TOP
8676 | VIDTYPE_VIU_NV21;
8677 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8678 | VIDTYPE_VIU_NV21;
8679 }
8680 hevc->vf_pre_count++;
8681 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8682 kfifo_put(&hevc->display_q,
8683 (const struct vframe_s *)vf);
8684 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8685 hevc->vf_pre_count++;
8686 kfifo_put(&hevc->display_q,
8687 (const struct vframe_s *)vf2);
8688 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8689 hevc->vf_pre_count++;
8690 kfifo_put(&hevc->display_q,
8691 (const struct vframe_s *)vf3);
8692 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8693
8694 } else if (pic->pic_struct == 9
8695 || pic->pic_struct == 10) {
8696 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8697 hevc_print(hevc, 0,
8698 "pic_struct = %d index 0x%x\n",
8699 pic->pic_struct,
8700 pic->index);
8701
8702 pic->vf_ref = 1;
8703 /* process previous pending vf*/
8704 process_pending_vframe(hevc,
8705 pic, (pic->pic_struct == 9));
8706
8707 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8708 /* process current vf */
8709 kfifo_put(&hevc->pending_q,
8710 (const struct vframe_s *)vf);
8711 vf->height <<= 1;
8712 if (pic->pic_struct == 9) {
8713 vf->type = VIDTYPE_INTERLACE_TOP
8714 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8715 process_pending_vframe(hevc,
8716 hevc->pre_bot_pic, 0);
8717 } else {
8718 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8719 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8720 vf->index = (pic->index << 8) | 0xff;
8721 process_pending_vframe(hevc,
8722 hevc->pre_top_pic, 1);
8723 }
8724
8725 if (hevc->vf_pre_count == 0)
8726 hevc->vf_pre_count++;
8727
8728 /**/
8729 if (pic->pic_struct == 9)
8730 hevc->pre_top_pic = pic;
8731 else
8732 hevc->pre_bot_pic = pic;
8733
8734 } else if (pic->pic_struct == 11
8735 || pic->pic_struct == 12) {
8736 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8737 hevc_print(hevc, 0,
8738 "pic_struct = %d index 0x%x\n",
8739 pic->pic_struct,
8740 pic->index);
8741 pic->vf_ref = 1;
8742 /* process previous pending vf*/
8743 process_pending_vframe(hevc, pic,
8744 (pic->pic_struct == 11));
8745
8746 /* put current into pending q */
8747 vf->height <<= 1;
8748 if (pic->pic_struct == 11)
8749 vf->type = VIDTYPE_INTERLACE_TOP |
8750 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8751 else {
8752 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8753 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8754 vf->index = (pic->index << 8) | 0xff;
8755 }
8756 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8757 kfifo_put(&hevc->pending_q,
8758 (const struct vframe_s *)vf);
8759 if (hevc->vf_pre_count == 0)
8760 hevc->vf_pre_count++;
8761
8762 /**/
8763 if (pic->pic_struct == 11)
8764 hevc->pre_top_pic = pic;
8765 else
8766 hevc->pre_bot_pic = pic;
8767
8768 } else {
8769 pic->vf_ref = 1;
8770
8771 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8772 hevc_print(hevc, 0,
8773 "pic_struct = %d index 0x%x\n",
8774 pic->pic_struct,
8775 pic->index);
8776
8777 switch (pic->pic_struct) {
8778 case 7:
8779 vf->duration <<= 1;
8780 break;
8781 case 8:
8782 vf->duration = vf->duration * 3;
8783 break;
8784 case 1:
8785 vf->height <<= 1;
8786 vf->type = VIDTYPE_INTERLACE_TOP |
8787 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8788 process_pending_vframe(hevc, pic, 1);
8789 hevc->pre_top_pic = pic;
8790 break;
8791 case 2:
8792 vf->height <<= 1;
8793 vf->type = VIDTYPE_INTERLACE_BOTTOM
8794 | VIDTYPE_VIU_NV21
8795 | VIDTYPE_VIU_FIELD;
8796 process_pending_vframe(hevc, pic, 0);
8797 hevc->pre_bot_pic = pic;
8798 break;
8799 }
8800 hevc->vf_pre_count++;
8801 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8802 kfifo_put(&hevc->display_q,
8803 (const struct vframe_s *)vf);
8804 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8805 }
8806#else
8807 vf->type_original = vf->type;
8808 pic->vf_ref = 1;
8809 hevc->vf_pre_count++;
8810 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8811 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8812 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8813
8814 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8815 hevc_print(hevc, 0,
8816 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8817 __func__, vf->type, vf->index,
8818 get_pic_poc(hevc, vf->index & 0xff),
8819 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8820 vf->pts, vf->pts_us64,
8821 vf->duration);
8822#endif
8823#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8824 /*count info*/
8825 vdec_count_info(gvs, 0, stream_offset);
8826#endif
8827 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8828 if (without_display_mode == 0) {
8829 vf_notify_receiver(hevc->provider_name,
8830 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8831 }
8832 else
8833 vh265_vf_put(vh265_vf_get(vdec), vdec);
8834 }
8835
8836 return 0;
8837}
8838
8839static int notify_v4l_eos(struct vdec_s *vdec)
8840{
8841 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
8842 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
8843 struct vframe_s *vf = NULL;
8844 struct vdec_v4l2_buffer *fb = NULL;
8845
8846 if (hw->is_used_v4l && hw->eos) {
8847 if (kfifo_get(&hw->newframe_q, &vf) == 0 || vf == NULL) {
8848 hevc_print(hw, 0,
8849 "%s fatal error, no available buffer slot.\n",
8850 __func__);
8851 return -1;
8852 }
8853
8854 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb)) {
8855 pr_err("[%d] get fb fail.\n", ctx->id);
8856 return -1;
8857 }
8858
8859 vf->type |= VIDTYPE_V4L_EOS;
8860 vf->timestamp = ULONG_MAX;
8861 vf->v4l_mem_handle = (unsigned long)fb;
8862 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
8863
8864 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
8865 vf_notify_receiver(vdec->vf_provider_name,
8866 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8867
8868 pr_info("[%d] H265 EOS notify.\n", ctx->id);
8869 }
8870
8871 return 0;
8872}
8873
8874static void process_nal_sei(struct hevc_state_s *hevc,
8875 int payload_type, int payload_size)
8876{
8877 unsigned short data;
8878
8879 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8880 hevc_print(hevc, 0,
8881 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
8882 payload_type, payload_size);
8883
8884 if (payload_type == 137) {
8885 int i, j;
8886 /* MASTERING_DISPLAY_COLOUR_VOLUME */
8887 if (payload_size >= 24) {
8888 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8889 hevc_print(hevc, 0,
8890 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
8891 for (i = 0; i < 3; i++) {
8892 for (j = 0; j < 2; j++) {
8893 data =
8894 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8895 hevc->primaries[i][j] = data;
8896 WRITE_HREG(HEVC_SHIFT_COMMAND,
8897 (1<<7)|16);
8898 if (get_dbg_flag(hevc) &
8899 H265_DEBUG_PRINT_SEI)
8900 hevc_print(hevc, 0,
8901 "\t\tprimaries[%1d][%1d] = %04x\n",
8902 i, j, hevc->primaries[i][j]);
8903 }
8904 }
8905 for (i = 0; i < 2; i++) {
8906 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8907 hevc->white_point[i] = data;
8908 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
8909 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8910 hevc_print(hevc, 0,
8911 "\t\twhite_point[%1d] = %04x\n",
8912 i, hevc->white_point[i]);
8913 }
8914 for (i = 0; i < 2; i++) {
8915 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8916 hevc->luminance[i] = data << 16;
8917 WRITE_HREG(HEVC_SHIFT_COMMAND,
8918 (1<<7)|16);
8919 data =
8920 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8921 hevc->luminance[i] |= data;
8922 WRITE_HREG(HEVC_SHIFT_COMMAND,
8923 (1<<7)|16);
8924 if (get_dbg_flag(hevc) &
8925 H265_DEBUG_PRINT_SEI)
8926 hevc_print(hevc, 0,
8927 "\t\tluminance[%1d] = %08x\n",
8928 i, hevc->luminance[i]);
8929 }
8930 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
8931 }
8932 payload_size -= 24;
8933 while (payload_size > 0) {
8934 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
8935 payload_size--;
8936 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
8937 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
8938 }
8939 }
8940}
8941
8942static int hevc_recover(struct hevc_state_s *hevc)
8943{
8944 int ret = -1;
8945 u32 rem;
8946 u64 shift_byte_count64;
8947 unsigned int hevc_shift_byte_count;
8948 unsigned int hevc_stream_start_addr;
8949 unsigned int hevc_stream_end_addr;
8950 unsigned int hevc_stream_rd_ptr;
8951 unsigned int hevc_stream_wr_ptr;
8952 unsigned int hevc_stream_control;
8953 unsigned int hevc_stream_fifo_ctl;
8954 unsigned int hevc_stream_buf_size;
8955
8956 mutex_lock(&vh265_mutex);
8957#if 0
8958 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
8959 int ii;
8960
8961 for (ii = 0; ii < 4; ii++)
8962 hevc_print(hevc, 0,
8963 "%04x ", hevc->debug_ptr[i + 3 - ii]);
8964 if (((i + ii) & 0xf) == 0)
8965 hevc_print(hevc, 0, "\n");
8966 }
8967#endif
8968#define ES_VID_MAN_RD_PTR (1<<0)
8969 if (!hevc->init_flag) {
8970 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
8971 mutex_unlock(&vh265_mutex);
8972 return ret;
8973 }
8974 amhevc_stop();
8975 msleep(20);
8976 ret = 0;
8977 /* reset */
8978 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
8979 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
8980
8981 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
8982 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
8983 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
8984 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
8985 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
8986 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
8987 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
8988
8989 /* HEVC streaming buffer will reset and restart
8990 * from current hevc_stream_rd_ptr position
8991 */
8992 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
8993 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
8994 if ((hevc->shift_byte_count_lo & (1 << 31))
8995 && ((hevc_shift_byte_count & (1 << 31)) == 0))
8996 hevc->shift_byte_count_hi++;
8997
8998 hevc->shift_byte_count_lo = hevc_shift_byte_count;
8999 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9000 hevc->shift_byte_count_lo;
9001 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9002 shift_byte_count64 -= rem;
9003 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9004
9005 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9006 shift_byte_count64 += hevc_stream_buf_size;
9007
9008 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9009 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9010
9011 WRITE_VREG(DOS_SW_RESET3,
9012 /* (1<<2)| */
9013 (1 << 3) | (1 << 4) | (1 << 8) |
9014 (1 << 11) | (1 << 12) | (1 << 14)
9015 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9016 WRITE_VREG(DOS_SW_RESET3, 0);
9017
9018 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9019 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9020 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9021 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9022 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9023 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9024 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9025
9026 hevc_config_work_space_hw(hevc);
9027 decoder_hw_reset();
9028
9029 hevc->have_vps = 0;
9030 hevc->have_sps = 0;
9031 hevc->have_pps = 0;
9032
9033 hevc->have_valid_start_slice = 0;
9034
9035 if (get_double_write_mode(hevc) & 0x10)
9036 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9037 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9038 );
9039
9040 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9041 /* clear mailbox interrupt */
9042 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9043 /* enable mailbox interrupt */
9044 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9045 /* disable PSCALE for hardware sharing */
9046 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9047
9048 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9049
9050 WRITE_VREG(DEBUG_REG1, 0x0);
9051
9052 if ((error_handle_policy & 1) == 0) {
9053 if ((error_handle_policy & 4) == 0) {
9054 /* ucode auto mode, and do not check vps/sps/pps/idr */
9055 WRITE_VREG(NAL_SEARCH_CTL,
9056 0xc);
9057 } else {
9058 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9059 }
9060 } else {
9061 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9062 }
9063
9064 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9065 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9066 WRITE_VREG(NAL_SEARCH_CTL,
9067 READ_VREG(NAL_SEARCH_CTL)
9068 | ((parser_sei_enable & 0x7) << 17));
9069#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9070 WRITE_VREG(NAL_SEARCH_CTL,
9071 READ_VREG(NAL_SEARCH_CTL) |
9072 ((parser_dolby_vision_enable & 0x1) << 20));
9073#endif
9074 config_decode_mode(hevc);
9075 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9076
9077 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9078 /* amhevc_disable(); */
9079 /* return -EBUSY; */
9080 /* } */
9081#if 0
9082 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9083 int ii;
9084
9085 for (ii = 0; ii < 4; ii++) {
9086 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9087 hevc_print(hevc, 0,
9088 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9089 }
9090 if (((i + ii) & 0xf) == 0)
9091 hevc_print(hevc, 0, "\n");
9092 }
9093#endif
9094 init_pic_list_hw(hevc);
9095
9096 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9097 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9098
9099#ifdef SWAP_HEVC_UCODE
9100 if (!tee_enabled() && hevc->is_swap &&
9101 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9102 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9103 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9104 }
9105#endif
9106 amhevc_start();
9107
9108 /* skip, search next start code */
9109 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9110 hevc->skip_flag = 1;
9111#ifdef ERROR_HANDLE_DEBUG
9112 if (dbg_nal_skip_count & 0x20000) {
9113 dbg_nal_skip_count &= ~0x20000;
9114 mutex_unlock(&vh265_mutex);
9115 return ret;
9116 }
9117#endif
9118 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9119 /* Interrupt Amrisc to excute */
9120 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9121#ifdef MULTI_INSTANCE_SUPPORT
9122 if (!hevc->m_ins_flag)
9123#endif
9124 hevc->first_pic_after_recover = 1;
9125 mutex_unlock(&vh265_mutex);
9126 return ret;
9127}
9128
9129static void dump_aux_buf(struct hevc_state_s *hevc)
9130{
9131 int i;
9132 unsigned short *aux_adr =
9133 (unsigned short *)
9134 hevc->aux_addr;
9135 unsigned int aux_size =
9136 (READ_VREG(HEVC_AUX_DATA_SIZE)
9137 >> 16) << 4;
9138
9139 if (hevc->prefix_aux_size > 0) {
9140 hevc_print(hevc, 0,
9141 "prefix aux: (size %d)\n",
9142 aux_size);
9143 for (i = 0; i <
9144 (aux_size >> 1); i++) {
9145 hevc_print_cont(hevc, 0,
9146 "%04x ",
9147 *(aux_adr + i));
9148 if (((i + 1) & 0xf)
9149 == 0)
9150 hevc_print_cont(hevc,
9151 0, "\n");
9152 }
9153 }
9154 if (hevc->suffix_aux_size > 0) {
9155 aux_adr = (unsigned short *)
9156 (hevc->aux_addr +
9157 hevc->prefix_aux_size);
9158 aux_size =
9159 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9160 << 4;
9161 hevc_print(hevc, 0,
9162 "suffix aux: (size %d)\n",
9163 aux_size);
9164 for (i = 0; i <
9165 (aux_size >> 1); i++) {
9166 hevc_print_cont(hevc, 0,
9167 "%04x ", *(aux_adr + i));
9168 if (((i + 1) & 0xf) == 0)
9169 hevc_print_cont(hevc, 0, "\n");
9170 }
9171 }
9172}
9173
9174#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9175static void dolby_get_meta(struct hevc_state_s *hevc)
9176{
9177 struct vdec_s *vdec = hw_to_vdec(hevc);
9178
9179 if (get_dbg_flag(hevc) &
9180 H265_DEBUG_BUFMGR_MORE)
9181 dump_aux_buf(hevc);
9182 if (vdec->dolby_meta_with_el || vdec->slave) {
9183 set_aux_data(hevc,
9184 hevc->cur_pic, 0, 0);
9185 } else if (vdec->master) {
9186 struct hevc_state_s *hevc_ba =
9187 (struct hevc_state_s *)
9188 vdec->master->private;
9189 /*do not use hevc_ba*/
9190 set_aux_data(hevc,
9191 hevc_ba->cur_pic,
9192 0, 1);
9193 set_aux_data(hevc,
9194 hevc->cur_pic, 0, 2);
9195 }
9196}
9197#endif
9198
9199static void read_decode_info(struct hevc_state_s *hevc)
9200{
9201 uint32_t decode_info =
9202 READ_HREG(HEVC_DECODE_INFO);
9203 hevc->start_decoding_flag |=
9204 (decode_info & 0xff);
9205 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9206}
9207
9208static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9209{
9210 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9211 unsigned int dec_status = hevc->dec_status;
9212 int i, ret;
9213
9214#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9215 struct vdec_s *vdec = hw_to_vdec(hevc);
9216#endif
9217
9218 if (hevc->eos)
9219 return IRQ_HANDLED;
9220 if (
9221#ifdef MULTI_INSTANCE_SUPPORT
9222 (!hevc->m_ins_flag) &&
9223#endif
9224 hevc->error_flag == 1) {
9225 if ((error_handle_policy & 0x10) == 0) {
9226 if (hevc->cur_pic) {
9227 int current_lcu_idx =
9228 READ_VREG(HEVC_PARSER_LCU_START)
9229 & 0xffffff;
9230 if (current_lcu_idx <
9231 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9232 hevc->cur_pic->error_mark = 1;
9233
9234 }
9235 }
9236 if ((error_handle_policy & 1) == 0) {
9237 hevc->error_skip_nal_count = 1;
9238 /* manual search nal, skip error_skip_nal_count
9239 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9240 */
9241 WRITE_VREG(NAL_SEARCH_CTL,
9242 (error_skip_nal_count << 4) | 0x1);
9243 } else {
9244 hevc->error_skip_nal_count = error_skip_nal_count;
9245 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9246 }
9247 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9248#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9249 || vdec->master
9250 || vdec->slave
9251#endif
9252 ) {
9253 WRITE_VREG(NAL_SEARCH_CTL,
9254 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9255 }
9256 WRITE_VREG(NAL_SEARCH_CTL,
9257 READ_VREG(NAL_SEARCH_CTL)
9258 | ((parser_sei_enable & 0x7) << 17));
9259#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9260 WRITE_VREG(NAL_SEARCH_CTL,
9261 READ_VREG(NAL_SEARCH_CTL) |
9262 ((parser_dolby_vision_enable & 0x1) << 20));
9263#endif
9264 config_decode_mode(hevc);
9265 /* search new nal */
9266 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9267 /* Interrupt Amrisc to excute */
9268 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9269
9270 /* hevc_print(hevc, 0,
9271 *"%s: error handle\n", __func__);
9272 */
9273 hevc->error_flag = 2;
9274 return IRQ_HANDLED;
9275 } else if (
9276#ifdef MULTI_INSTANCE_SUPPORT
9277 (!hevc->m_ins_flag) &&
9278#endif
9279 hevc->error_flag == 3) {
9280 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9281 hevc_recover(hevc);
9282 hevc->error_flag = 0;
9283
9284 if ((error_handle_policy & 0x10) == 0) {
9285 if (hevc->cur_pic) {
9286 int current_lcu_idx =
9287 READ_VREG(HEVC_PARSER_LCU_START)
9288 & 0xffffff;
9289 if (current_lcu_idx <
9290 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9291 hevc->cur_pic->error_mark = 1;
9292
9293 }
9294 }
9295 if ((error_handle_policy & 1) == 0) {
9296 /* need skip some data when
9297 * error_flag of 3 is triggered,
9298 */
9299 /* to avoid hevc_recover() being called
9300 * for many times at the same bitstream position
9301 */
9302 hevc->error_skip_nal_count = 1;
9303 /* manual search nal, skip error_skip_nal_count
9304 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9305 */
9306 WRITE_VREG(NAL_SEARCH_CTL,
9307 (error_skip_nal_count << 4) | 0x1);
9308 }
9309
9310 if ((error_handle_policy & 0x2) == 0) {
9311 hevc->have_vps = 1;
9312 hevc->have_sps = 1;
9313 hevc->have_pps = 1;
9314 }
9315 return IRQ_HANDLED;
9316 }
9317 if (!hevc->m_ins_flag) {
9318 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9319 if ((hevc->shift_byte_count_lo & (1 << 31))
9320 && ((i & (1 << 31)) == 0))
9321 hevc->shift_byte_count_hi++;
9322 hevc->shift_byte_count_lo = i;
9323 }
9324#ifdef MULTI_INSTANCE_SUPPORT
9325 mutex_lock(&hevc->chunks_mutex);
9326 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9327 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9328 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9329 && (hevc->chunk)) {
9330 hevc->cur_pic->pts = hevc->chunk->pts;
9331 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9332 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9333 }
9334 mutex_unlock(&hevc->chunks_mutex);
9335
9336 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9337 dec_status == HEVC_DECODE_BUFEMPTY2) {
9338 if (hevc->m_ins_flag) {
9339 read_decode_info(hevc);
9340 if (vdec_frame_based(hw_to_vdec(hevc))) {
9341 hevc->empty_flag = 1;
9342 goto pic_done;
9343 } else {
9344 if (
9345#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9346 vdec->master ||
9347 vdec->slave ||
9348#endif
9349 (data_resend_policy & 0x1)) {
9350 hevc->dec_result = DEC_RESULT_AGAIN;
9351 amhevc_stop();
9352 restore_decode_state(hevc);
9353 } else
9354 hevc->dec_result = DEC_RESULT_GET_DATA;
9355 }
9356 reset_process_time(hevc);
9357 vdec_schedule_work(&hevc->work);
9358 }
9359 return IRQ_HANDLED;
9360 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9361 (dec_status == HEVC_NAL_DECODE_DONE)
9362 ) {
9363 if (hevc->m_ins_flag) {
9364 read_decode_info(hevc);
9365 if (vdec_frame_based(hw_to_vdec(hevc))) {
9366 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9367 hevc->empty_flag = 1;
9368 goto pic_done;
9369 } else {
9370 hevc->dec_result = DEC_RESULT_AGAIN;
9371 amhevc_stop();
9372 restore_decode_state(hevc);
9373 }
9374
9375 reset_process_time(hevc);
9376 vdec_schedule_work(&hevc->work);
9377 }
9378
9379 return IRQ_HANDLED;
9380 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9381 if (hevc->m_ins_flag) {
9382 struct PIC_s *pic;
9383 struct PIC_s *pic_display;
9384 int decoded_poc;
9385#ifdef DETREFILL_ENABLE
9386 if (hevc->is_swap &&
9387 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9388 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9389 && READ_VREG(HEVC_SAO_DBG_MODE0))
9390 hevc->delrefill_check = 2;
9391 }
9392#endif
9393 hevc->empty_flag = 0;
9394pic_done:
9395 if (input_frame_based(hw_to_vdec(hevc)) &&
9396 frmbase_cont_bitlevel != 0 &&
9397 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9398 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9399 > frmbase_cont_bitlevel)) {
9400 /*handle the case: multi pictures in one packet*/
9401 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9402 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9403 __func__,
9404 hevc->decode_idx, hevc->decode_size,
9405 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9406 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9407 start_process_time(hevc);
9408 return IRQ_HANDLED;
9409 }
9410
9411 read_decode_info(hevc);
9412 get_picture_qos_info(hevc);
9413#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9414 hevc->start_parser_type = 0;
9415 hevc->switch_dvlayer_flag = 0;
9416#endif
9417 hevc->decoded_poc = hevc->curr_POC;
9418 hevc->decoding_pic = NULL;
9419 hevc->dec_result = DEC_RESULT_DONE;
9420#ifdef DETREFILL_ENABLE
9421 if (hevc->is_swap &&
9422 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9423 if (hevc->delrefill_check != 2)
9424#endif
9425
9426 amhevc_stop();
9427
9428 reset_process_time(hevc);
9429
9430 if (hevc->vf_pre_count == 0) {
9431 decoded_poc = hevc->curr_POC;
9432 pic = get_pic_by_POC(hevc, decoded_poc);
9433 if (pic && (pic->POC != INVALID_POC)) {
9434 /*PB skip control */
9435 if (pic->error_mark == 0
9436 && hevc->PB_skip_mode == 1) {
9437 /* start decoding after
9438 * first I
9439 */
9440 hevc->ignore_bufmgr_error |= 0x1;
9441 }
9442 if (hevc->ignore_bufmgr_error & 1) {
9443 if (hevc->PB_skip_count_after_decoding > 0) {
9444 hevc->PB_skip_count_after_decoding--;
9445 } else {
9446 /* start displaying */
9447 hevc->ignore_bufmgr_error |= 0x2;
9448 }
9449 }
9450 if (hevc->mmu_enable
9451 && ((hevc->double_write_mode & 0x10) == 0)) {
9452 if (!hevc->m_ins_flag) {
9453 hevc->used_4k_num =
9454 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9455
9456 if ((!is_skip_decoding(hevc, pic)) &&
9457 (hevc->used_4k_num >= 0) &&
9458 (hevc->cur_pic->scatter_alloc
9459 == 1)) {
9460 hevc_print(hevc,
9461 H265_DEBUG_BUFMGR_MORE,
9462 "%s pic index %d scatter_alloc %d page_start %d\n",
9463 "decoder_mmu_box_free_idx_tail",
9464 hevc->cur_pic->index,
9465 hevc->cur_pic->scatter_alloc,
9466 hevc->used_4k_num);
9467 decoder_mmu_box_free_idx_tail(
9468 hevc->mmu_box,
9469 hevc->cur_pic->index,
9470 hevc->used_4k_num);
9471 hevc->cur_pic->scatter_alloc
9472 = 2;
9473 }
9474 hevc->used_4k_num = -1;
9475 }
9476 }
9477
9478 pic->output_mark = 1;
9479 pic->recon_mark = 1;
9480 }
9481force_output:
9482 pic_display = output_pic(hevc, 1);
9483
9484 if (pic_display) {
9485 if ((pic_display->error_mark &&
9486 ((hevc->ignore_bufmgr_error &
9487 0x2) == 0))
9488 || (get_dbg_flag(hevc) &
9489 H265_DEBUG_DISPLAY_CUR_FRAME)
9490 || (get_dbg_flag(hevc) &
9491 H265_DEBUG_NO_DISPLAY)) {
9492 pic_display->output_ready = 0;
9493 if (get_dbg_flag(hevc) &
9494 H265_DEBUG_BUFMGR) {
9495 hevc_print(hevc, 0,
9496 "[BM] Display: POC %d, ",
9497 pic_display->POC);
9498 hevc_print_cont(hevc, 0,
9499 "decoding index %d ==> ",
9500 pic_display->
9501 decode_idx);
9502 hevc_print_cont(hevc, 0,
9503 "Debug or err,recycle it\n");
9504 }
9505 } else {
9506 if (pic_display->
9507 slice_type != 2) {
9508 pic_display->output_ready = 0;
9509 } else {
9510 prepare_display_buf
9511 (hevc,
9512 pic_display);
9513 hevc->first_pic_flag = 1;
9514 }
9515 }
9516 }
9517 }
9518
9519 vdec_schedule_work(&hevc->work);
9520 }
9521
9522 return IRQ_HANDLED;
9523#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9524 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9525 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9526 if (hevc->m_ins_flag) {
9527 unsigned char next_parser_type =
9528 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9529 read_decode_info(hevc);
9530
9531 if (vdec->slave &&
9532 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9533 /*cur is base, found enhance*/
9534 struct hevc_state_s *hevc_el =
9535 (struct hevc_state_s *)
9536 vdec->slave->private;
9537 hevc->switch_dvlayer_flag = 1;
9538 hevc->no_switch_dvlayer_count = 0;
9539 hevc_el->start_parser_type =
9540 next_parser_type;
9541 hevc_print(hevc, H265_DEBUG_DV,
9542 "switch (poc %d) to el\n",
9543 hevc->cur_pic ?
9544 hevc->cur_pic->POC :
9545 INVALID_POC);
9546 } else if (vdec->master &&
9547 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9548 /*cur is enhance, found base*/
9549 struct hevc_state_s *hevc_ba =
9550 (struct hevc_state_s *)
9551 vdec->master->private;
9552 hevc->switch_dvlayer_flag = 1;
9553 hevc->no_switch_dvlayer_count = 0;
9554 hevc_ba->start_parser_type =
9555 next_parser_type;
9556 hevc_print(hevc, H265_DEBUG_DV,
9557 "switch (poc %d) to bl\n",
9558 hevc->cur_pic ?
9559 hevc->cur_pic->POC :
9560 INVALID_POC);
9561 } else {
9562 hevc->switch_dvlayer_flag = 0;
9563 hevc->start_parser_type =
9564 next_parser_type;
9565 hevc->no_switch_dvlayer_count++;
9566 hevc_print(hevc, H265_DEBUG_DV,
9567 "%s: no_switch_dvlayer_count = %d\n",
9568 vdec->master ? "el" : "bl",
9569 hevc->no_switch_dvlayer_count);
9570 if (vdec->slave &&
9571 dolby_el_flush_th != 0 &&
9572 hevc->no_switch_dvlayer_count >
9573 dolby_el_flush_th) {
9574 struct hevc_state_s *hevc_el =
9575 (struct hevc_state_s *)
9576 vdec->slave->private;
9577 struct PIC_s *el_pic;
9578 check_pic_decoded_error(hevc_el,
9579 hevc_el->pic_decoded_lcu_idx);
9580 el_pic = get_pic_by_POC(hevc_el,
9581 hevc_el->curr_POC);
9582 hevc_el->curr_POC = INVALID_POC;
9583 hevc_el->m_pocRandomAccess = MAX_INT;
9584 flush_output(hevc_el, el_pic);
9585 hevc_el->decoded_poc = INVALID_POC; /*
9586 already call flush_output*/
9587 hevc_el->decoding_pic = NULL;
9588 hevc->no_switch_dvlayer_count = 0;
9589 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9590 hevc_print(hevc, 0,
9591 "no el anymore, flush_output el\n");
9592 }
9593 }
9594 hevc->decoded_poc = hevc->curr_POC;
9595 hevc->decoding_pic = NULL;
9596 hevc->dec_result = DEC_RESULT_DONE;
9597 amhevc_stop();
9598 reset_process_time(hevc);
9599 if (aux_data_is_avaible(hevc))
9600 dolby_get_meta(hevc);
9601 if(hevc->cur_pic->slice_type == 2 &&
9602 hevc->vf_pre_count == 0) {
9603 hevc_print(hevc, 0,
9604 "first slice_type %x no_switch_dvlayer_count %x\n",
9605 hevc->cur_pic->slice_type,
9606 hevc->no_switch_dvlayer_count);
9607 goto force_output;
9608 }
9609 vdec_schedule_work(&hevc->work);
9610 }
9611
9612 return IRQ_HANDLED;
9613#endif
9614 }
9615
9616#endif
9617
9618 if (dec_status == HEVC_SEI_DAT) {
9619 if (!hevc->m_ins_flag) {
9620 int payload_type =
9621 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9622 int payload_size =
9623 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9624 process_nal_sei(hevc,
9625 payload_type, payload_size);
9626 }
9627 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9628 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9629 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9630 int parse_type = HEVC_DISCARD_NAL;
9631
9632 hevc->error_watchdog_count = 0;
9633 hevc->error_skip_nal_wt_cnt = 0;
9634#ifdef MULTI_INSTANCE_SUPPORT
9635 if (hevc->m_ins_flag)
9636 reset_process_time(hevc);
9637#endif
9638 if (slice_parse_begin > 0 &&
9639 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9640 hevc_print(hevc, 0,
9641 "nal type %d, discard %d\n", naltype,
9642 slice_parse_begin);
9643 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9644 slice_parse_begin--;
9645 }
9646 if (naltype == NAL_UNIT_EOS) {
9647 struct PIC_s *pic;
9648
9649 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9650#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9651 if ((vdec->master || vdec->slave) &&
9652 aux_data_is_avaible(hevc)) {
9653 if (hevc->decoding_pic)
9654 dolby_get_meta(hevc);
9655 }
9656#endif
9657 check_pic_decoded_error(hevc,
9658 hevc->pic_decoded_lcu_idx);
9659 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9660 hevc->curr_POC = INVALID_POC;
9661 /* add to fix RAP_B_Bossen_1 */
9662 hevc->m_pocRandomAccess = MAX_INT;
9663 flush_output(hevc, pic);
9664 clear_poc_flag(hevc);
9665 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9666 /* Interrupt Amrisc to excute */
9667 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9668#ifdef MULTI_INSTANCE_SUPPORT
9669 if (hevc->m_ins_flag) {
9670 hevc->decoded_poc = INVALID_POC; /*
9671 already call flush_output*/
9672 hevc->decoding_pic = NULL;
9673 hevc->dec_result = DEC_RESULT_DONE;
9674 amhevc_stop();
9675
9676 vdec_schedule_work(&hevc->work);
9677 }
9678#endif
9679 return IRQ_HANDLED;
9680 }
9681
9682 if (
9683#ifdef MULTI_INSTANCE_SUPPORT
9684 (!hevc->m_ins_flag) &&
9685#endif
9686 hevc->error_skip_nal_count > 0) {
9687 hevc_print(hevc, 0,
9688 "nal type %d, discard %d\n", naltype,
9689 hevc->error_skip_nal_count);
9690 hevc->error_skip_nal_count--;
9691 if (hevc->error_skip_nal_count == 0) {
9692 hevc_recover(hevc);
9693 hevc->error_flag = 0;
9694 if ((error_handle_policy & 0x2) == 0) {
9695 hevc->have_vps = 1;
9696 hevc->have_sps = 1;
9697 hevc->have_pps = 1;
9698 }
9699 return IRQ_HANDLED;
9700 }
9701 } else if (naltype == NAL_UNIT_VPS) {
9702 parse_type = HEVC_NAL_UNIT_VPS;
9703 hevc->have_vps = 1;
9704#ifdef ERROR_HANDLE_DEBUG
9705 if (dbg_nal_skip_flag & 1)
9706 parse_type = HEVC_DISCARD_NAL;
9707#endif
9708 } else if (hevc->have_vps) {
9709 if (naltype == NAL_UNIT_SPS) {
9710 parse_type = HEVC_NAL_UNIT_SPS;
9711 hevc->have_sps = 1;
9712#ifdef ERROR_HANDLE_DEBUG
9713 if (dbg_nal_skip_flag & 2)
9714 parse_type = HEVC_DISCARD_NAL;
9715#endif
9716 } else if (naltype == NAL_UNIT_PPS) {
9717 parse_type = HEVC_NAL_UNIT_PPS;
9718 hevc->have_pps = 1;
9719#ifdef ERROR_HANDLE_DEBUG
9720 if (dbg_nal_skip_flag & 4)
9721 parse_type = HEVC_DISCARD_NAL;
9722#endif
9723 } else if (hevc->have_sps && hevc->have_pps) {
9724 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9725
9726 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9727 (naltype ==
9728 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9729 || (naltype ==
9730 NAL_UNIT_CODED_SLICE_CRA)
9731 || (naltype ==
9732 NAL_UNIT_CODED_SLICE_BLA)
9733 || (naltype ==
9734 NAL_UNIT_CODED_SLICE_BLANT)
9735 || (naltype ==
9736 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9737 ) {
9738 if (slice_parse_begin > 0) {
9739 hevc_print(hevc, 0,
9740 "discard %d, for debugging\n",
9741 slice_parse_begin);
9742 slice_parse_begin--;
9743 } else {
9744 parse_type = seg;
9745 }
9746 hevc->have_valid_start_slice = 1;
9747 } else if (naltype <=
9748 NAL_UNIT_CODED_SLICE_CRA
9749 && (hevc->have_valid_start_slice
9750 || (hevc->PB_skip_mode != 3))) {
9751 if (slice_parse_begin > 0) {
9752 hevc_print(hevc, 0,
9753 "discard %d, dd\n",
9754 slice_parse_begin);
9755 slice_parse_begin--;
9756 } else
9757 parse_type = seg;
9758
9759 }
9760 }
9761 }
9762 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9763 && hevc->have_valid_start_slice &&
9764 hevc->error_flag == 0) {
9765 if ((get_dbg_flag(hevc) &
9766 H265_DEBUG_MAN_SEARCH_NAL) == 0
9767 /* && (!hevc->m_ins_flag)*/) {
9768 /* auot parser NAL; do not check
9769 *vps/sps/pps/idr
9770 */
9771 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9772 }
9773
9774 if ((get_dbg_flag(hevc) &
9775 H265_DEBUG_NO_EOS_SEARCH_DONE)
9776#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9777 || vdec->master
9778 || vdec->slave
9779#endif
9780 ) {
9781 WRITE_VREG(NAL_SEARCH_CTL,
9782 READ_VREG(NAL_SEARCH_CTL) |
9783 0x10000);
9784 }
9785 WRITE_VREG(NAL_SEARCH_CTL,
9786 READ_VREG(NAL_SEARCH_CTL)
9787 | ((parser_sei_enable & 0x7) << 17));
9788#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9789 WRITE_VREG(NAL_SEARCH_CTL,
9790 READ_VREG(NAL_SEARCH_CTL) |
9791 ((parser_dolby_vision_enable & 0x1) << 20));
9792#endif
9793 config_decode_mode(hevc);
9794 }
9795
9796 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9797 hevc_print(hevc, 0,
9798 "naltype = %d parse_type %d\n %d %d %d %d\n",
9799 naltype, parse_type, hevc->have_vps,
9800 hevc->have_sps, hevc->have_pps,
9801 hevc->have_valid_start_slice);
9802 }
9803
9804 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9805 /* Interrupt Amrisc to excute */
9806 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9807#ifdef MULTI_INSTANCE_SUPPORT
9808 if (hevc->m_ins_flag)
9809 start_process_time(hevc);
9810#endif
9811 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9812#ifdef MULTI_INSTANCE_SUPPORT
9813 if (hevc->m_ins_flag) {
9814 reset_process_time(hevc);
9815 read_decode_info(hevc);
9816
9817 }
9818#endif
9819 if (hevc->start_decoding_time > 0) {
9820 u32 process_time = 1000*
9821 (jiffies - hevc->start_decoding_time)/HZ;
9822 if (process_time > max_decoding_time)
9823 max_decoding_time = process_time;
9824 }
9825
9826 hevc->error_watchdog_count = 0;
9827 if (hevc->pic_list_init_flag == 2) {
9828 hevc->pic_list_init_flag = 3;
9829 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
9830 } else if (hevc->wait_buf == 0) {
9831 u32 vui_time_scale;
9832 u32 vui_num_units_in_tick;
9833 unsigned char reconfig_flag = 0;
9834
9835 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
9836 get_rpm_param(&hevc->param);
9837 else {
9838
9839 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
9840 int ii;
9841
9842 for (ii = 0; ii < 4; ii++) {
9843 hevc->param.l.data[i + ii] =
9844 hevc->rpm_ptr[i + 3
9845 - ii];
9846 }
9847 }
9848#ifdef SEND_LMEM_WITH_RPM
9849 check_head_error(hevc);
9850#endif
9851 }
9852 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
9853 hevc_print(hevc, 0,
9854 "rpm_param: (%d)\n", hevc->slice_idx);
9855 hevc->slice_idx++;
9856 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
9857 hevc_print_cont(hevc, 0,
9858 "%04x ", hevc->param.l.data[i]);
9859 if (((i + 1) & 0xf) == 0)
9860 hevc_print_cont(hevc, 0, "\n");
9861 }
9862
9863 hevc_print(hevc, 0,
9864 "vui_timing_info: %x, %x, %x, %x\n",
9865 hevc->param.p.vui_num_units_in_tick_hi,
9866 hevc->param.p.vui_num_units_in_tick_lo,
9867 hevc->param.p.vui_time_scale_hi,
9868 hevc->param.p.vui_time_scale_lo);
9869 }
9870
9871 if (hevc->is_used_v4l) {
9872 struct aml_vcodec_ctx *ctx =
9873 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9874
9875 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
9876 struct aml_vdec_pic_infos info;
9877
9878 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
9879 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
9880 info.visible_width = hevc->frame_width;
9881 info.visible_height = hevc->frame_height;
9882 info.coded_width = ALIGN(hevc->frame_width, 32);
9883 info.coded_height = ALIGN(hevc->frame_height, 32);
9884 info.dpb_size = get_work_pic_num(hevc);
9885 hevc->v4l_params_parsed = true;
9886 /*notice the v4l2 codec.*/
9887 vdec_v4l_set_pic_infos(ctx, &info);
9888 }
9889 }
9890
9891 if (
9892#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9893 vdec->master == NULL &&
9894 vdec->slave == NULL &&
9895#endif
9896 aux_data_is_avaible(hevc)
9897 ) {
9898
9899 if (get_dbg_flag(hevc) &
9900 H265_DEBUG_BUFMGR_MORE)
9901 dump_aux_buf(hevc);
9902 }
9903
9904 vui_time_scale =
9905 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
9906 hevc->param.p.vui_time_scale_lo;
9907 vui_num_units_in_tick =
9908 (u32)(hevc->param.
9909 p.vui_num_units_in_tick_hi << 16) |
9910 hevc->param.
9911 p.vui_num_units_in_tick_lo;
9912 if (hevc->bit_depth_luma !=
9913 ((hevc->param.p.bit_depth & 0xf) + 8)) {
9914 reconfig_flag = 1;
9915 hevc_print(hevc, 0, "Bit depth luma = %d\n",
9916 (hevc->param.p.bit_depth & 0xf) + 8);
9917 }
9918 if (hevc->bit_depth_chroma !=
9919 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
9920 reconfig_flag = 1;
9921 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
9922 ((hevc->param.p.bit_depth >> 4) &
9923 0xf) + 8);
9924 }
9925 hevc->bit_depth_luma =
9926 (hevc->param.p.bit_depth & 0xf) + 8;
9927 hevc->bit_depth_chroma =
9928 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
9929 bit_depth_luma = hevc->bit_depth_luma;
9930 bit_depth_chroma = hevc->bit_depth_chroma;
9931#ifdef SUPPORT_10BIT
9932 if (hevc->bit_depth_luma == 8 &&
9933 hevc->bit_depth_chroma == 8 &&
9934 enable_mem_saving)
9935 hevc->mem_saving_mode = 1;
9936 else
9937 hevc->mem_saving_mode = 0;
9938#endif
9939 if (reconfig_flag &&
9940 (get_double_write_mode(hevc) & 0x10) == 0)
9941 init_decode_head_hw(hevc);
9942
9943 if ((vui_time_scale != 0)
9944 && (vui_num_units_in_tick != 0)) {
9945 hevc->frame_dur =
9946 div_u64(96000ULL *
9947 vui_num_units_in_tick,
9948 vui_time_scale);
9949 if (hevc->get_frame_dur != true)
9950 vdec_schedule_work(
9951 &hevc->notify_work);
9952
9953 hevc->get_frame_dur = true;
9954#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
9955 gvs->frame_dur = hevc->frame_dur;
9956#endif
9957 }
9958
9959 if (hevc->video_signal_type !=
9960 ((hevc->param.p.video_signal_type << 16)
9961 | hevc->param.p.color_description)) {
9962 u32 v = hevc->param.p.video_signal_type;
9963 u32 c = hevc->param.p.color_description;
9964#if 0
9965 if (v & 0x2000) {
9966 hevc_print(hevc, 0,
9967 "video_signal_type present:\n");
9968 hevc_print(hevc, 0, " %s %s\n",
9969 video_format_names[(v >> 10) & 7],
9970 ((v >> 9) & 1) ?
9971 "full_range" : "limited");
9972 if (v & 0x100) {
9973 hevc_print(hevc, 0,
9974 " color_description present:\n");
9975 hevc_print(hevc, 0,
9976 " color_primarie = %s\n",
9977 color_primaries_names
9978 [v & 0xff]);
9979 hevc_print(hevc, 0,
9980 " transfer_characteristic = %s\n",
9981 transfer_characteristics_names
9982 [(c >> 8) & 0xff]);
9983 hevc_print(hevc, 0,
9984 " matrix_coefficient = %s\n",
9985 matrix_coeffs_names[c & 0xff]);
9986 }
9987 }
9988#endif
9989 hevc->video_signal_type = (v << 16) | c;
9990 video_signal_type = hevc->video_signal_type;
9991 }
9992
9993 if (use_cma &&
9994 (hevc->param.p.slice_segment_address == 0)
9995 && (hevc->pic_list_init_flag == 0)) {
9996 int log = hevc->param.p.log2_min_coding_block_size_minus3;
9997 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
9998
9999 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10000 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10001 hevc->lcu_size = 1 << (log + 3 + log_s);
10002 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10003 if (hevc->pic_w == 0 || hevc->pic_h == 0
10004 || hevc->lcu_size == 0
10005 || is_oversize(hevc->pic_w, hevc->pic_h)
10006 || (!hevc->skip_first_nal &&
10007 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10008 /* skip search next start code */
10009 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10010 & (~0x2));
10011 if ( !hevc->skip_first_nal &&
10012 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10013 hevc->skip_first_nal = 1;
10014 hevc->skip_flag = 1;
10015 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10016 /* Interrupt Amrisc to excute */
10017 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10018#ifdef MULTI_INSTANCE_SUPPORT
10019 if (hevc->m_ins_flag)
10020 start_process_time(hevc);
10021#endif
10022 } else {
10023 hevc->sps_num_reorder_pics_0 =
10024 hevc->param.p.sps_num_reorder_pics_0;
10025 hevc->pic_list_init_flag = 1;
10026#ifdef MULTI_INSTANCE_SUPPORT
10027 if (hevc->m_ins_flag) {
10028 vdec_schedule_work(&hevc->work);
10029 } else
10030#endif
10031 up(&h265_sema);
10032 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10033 }
10034 return IRQ_HANDLED;
10035 }
10036
10037}
10038 ret =
10039 hevc_slice_segment_header_process(hevc,
10040 &hevc->param, decode_pic_begin);
10041 if (ret < 0) {
10042#ifdef MULTI_INSTANCE_SUPPORT
10043 if (hevc->m_ins_flag) {
10044 hevc->wait_buf = 0;
10045 hevc->dec_result = DEC_RESULT_AGAIN;
10046 amhevc_stop();
10047 restore_decode_state(hevc);
10048 reset_process_time(hevc);
10049 vdec_schedule_work(&hevc->work);
10050 return IRQ_HANDLED;
10051 }
10052#else
10053 ;
10054#endif
10055 } else if (ret == 0) {
10056 if ((hevc->new_pic) && (hevc->cur_pic)) {
10057 hevc->cur_pic->stream_offset =
10058 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10059 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10060 "read stream_offset = 0x%x\n",
10061 hevc->cur_pic->stream_offset);
10062 hevc->cur_pic->aspect_ratio_idc =
10063 hevc->param.p.aspect_ratio_idc;
10064 hevc->cur_pic->sar_width =
10065 hevc->param.p.sar_width;
10066 hevc->cur_pic->sar_height =
10067 hevc->param.p.sar_height;
10068 }
10069
10070 WRITE_VREG(HEVC_DEC_STATUS_REG,
10071 HEVC_CODED_SLICE_SEGMENT_DAT);
10072 /* Interrupt Amrisc to excute */
10073 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10074
10075 hevc->start_decoding_time = jiffies;
10076#ifdef MULTI_INSTANCE_SUPPORT
10077 if (hevc->m_ins_flag)
10078 start_process_time(hevc);
10079#endif
10080#if 1
10081 /*to do..., copy aux data to hevc->cur_pic*/
10082#endif
10083#ifdef MULTI_INSTANCE_SUPPORT
10084 } else if (hevc->m_ins_flag) {
10085 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10086 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10087 __func__, ret);
10088 hevc->decoded_poc = INVALID_POC;
10089 hevc->decoding_pic = NULL;
10090 hevc->dec_result = DEC_RESULT_DONE;
10091 amhevc_stop();
10092 reset_process_time(hevc);
10093 vdec_schedule_work(&hevc->work);
10094#endif
10095 } else {
10096 /* skip, search next start code */
10097#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10098 gvs->drop_frame_count++;
10099#endif
10100 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10101 hevc->skip_flag = 1;
10102 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10103 /* Interrupt Amrisc to excute */
10104 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10105 }
10106
10107 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10108 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10109#ifdef MULTI_INSTANCE_SUPPORT
10110 if (!hevc->m_ins_flag)
10111 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10112 H265_DEBUG_DIS_SYS_ERROR_PROC);
10113#endif
10114 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10115 }
10116 return IRQ_HANDLED;
10117}
10118
10119static void wait_hevc_search_done(struct hevc_state_s *hevc)
10120{
10121 int count = 0;
10122 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10123 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10124 msleep(20);
10125 count++;
10126 if (count > 100) {
10127 hevc_print(hevc, 0, "%s timeout\n", __func__);
10128 break;
10129 }
10130 }
10131}
10132static irqreturn_t vh265_isr(int irq, void *data)
10133{
10134 int i, temp;
10135 unsigned int dec_status;
10136 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10137 u32 debug_tag;
10138 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10139
10140 if (hevc->init_flag == 0)
10141 return IRQ_HANDLED;
10142 hevc->dec_status = dec_status;
10143 if (is_log_enable(hevc))
10144 add_log(hevc,
10145 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10146 dec_status, READ_HREG(HEVC_DECODE_INFO),
10147 READ_VREG(HEVC_MPRED_CURR_LCU),
10148 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10149 READ_VREG(HEVC_SHIFT_STATUS));
10150
10151 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10152 hevc_print(hevc, 0,
10153 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10154 dec_status, READ_HREG(HEVC_DECODE_INFO),
10155 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10156 READ_VREG(HEVC_SHIFT_STATUS));
10157
10158 debug_tag = READ_HREG(DEBUG_REG1);
10159 if (debug_tag & 0x10000) {
10160 hevc_print(hevc, 0,
10161 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10162
10163 if (hevc->mmu_enable)
10164 temp = 0x500;
10165 else
10166 temp = 0x400;
10167 for (i = 0; i < temp; i += 4) {
10168 int ii;
10169 if ((i & 0xf) == 0)
10170 hevc_print_cont(hevc, 0, "%03x: ", i);
10171 for (ii = 0; ii < 4; ii++) {
10172 hevc_print_cont(hevc, 0, "%04x ",
10173 hevc->lmem_ptr[i + 3 - ii]);
10174 }
10175 if (((i + ii) & 0xf) == 0)
10176 hevc_print_cont(hevc, 0, "\n");
10177 }
10178
10179 if (((udebug_pause_pos & 0xffff)
10180 == (debug_tag & 0xffff)) &&
10181 (udebug_pause_decode_idx == 0 ||
10182 udebug_pause_decode_idx == hevc->decode_idx) &&
10183 (udebug_pause_val == 0 ||
10184 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10185 udebug_pause_pos &= 0xffff;
10186 hevc->ucode_pause_pos = udebug_pause_pos;
10187 }
10188 else if (debug_tag & 0x20000)
10189 hevc->ucode_pause_pos = 0xffffffff;
10190 if (hevc->ucode_pause_pos)
10191 reset_process_time(hevc);
10192 else
10193 WRITE_HREG(DEBUG_REG1, 0);
10194 } else if (debug_tag != 0) {
10195 hevc_print(hevc, 0,
10196 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10197 READ_HREG(DEBUG_REG2),
10198 READ_VREG(HEVC_STREAM_LEVEL),
10199 READ_VREG(HEVC_STREAM_WR_PTR),
10200 READ_VREG(HEVC_STREAM_RD_PTR));
10201 if (((udebug_pause_pos & 0xffff)
10202 == (debug_tag & 0xffff)) &&
10203 (udebug_pause_decode_idx == 0 ||
10204 udebug_pause_decode_idx == hevc->decode_idx) &&
10205 (udebug_pause_val == 0 ||
10206 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10207 udebug_pause_pos &= 0xffff;
10208 hevc->ucode_pause_pos = udebug_pause_pos;
10209 }
10210 if (hevc->ucode_pause_pos)
10211 reset_process_time(hevc);
10212 else
10213 WRITE_HREG(DEBUG_REG1, 0);
10214 return IRQ_HANDLED;
10215 }
10216
10217
10218 if (hevc->pic_list_init_flag == 1)
10219 return IRQ_HANDLED;
10220
10221 if (!hevc->m_ins_flag) {
10222 if (dec_status == HEVC_OVER_DECODE) {
10223 hevc->over_decode = 1;
10224 hevc_print(hevc, 0,
10225 "isr: over decode\n"),
10226 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10227 return IRQ_HANDLED;
10228 }
10229 }
10230
10231 return IRQ_WAKE_THREAD;
10232
10233}
10234
10235static void vh265_set_clk(struct work_struct *work)
10236{
10237 struct hevc_state_s *hevc = container_of(work,
10238 struct hevc_state_s, set_clk_work);
10239
10240 int fps = 96000 / hevc->frame_dur;
10241
10242 if (hevc_source_changed(VFORMAT_HEVC,
10243 hevc->frame_width, hevc->frame_height, fps) > 0)
10244 hevc->saved_resolution = hevc->frame_width *
10245 hevc->frame_height * fps;
10246}
10247
10248static void vh265_check_timer_func(unsigned long arg)
10249{
10250 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10251 struct timer_list *timer = &hevc->timer;
10252 unsigned char empty_flag;
10253 unsigned int buf_level;
10254
10255 enum receviver_start_e state = RECEIVER_INACTIVE;
10256
10257 if (hevc->init_flag == 0) {
10258 if (hevc->stat & STAT_TIMER_ARM) {
10259 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10260 }
10261 return;
10262 }
10263#ifdef MULTI_INSTANCE_SUPPORT
10264 if (hevc->m_ins_flag &&
10265 (get_dbg_flag(hevc) &
10266 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10267 hw_to_vdec(hevc)->next_status ==
10268 VDEC_STATUS_DISCONNECTED) {
10269 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10270 vdec_schedule_work(&hevc->work);
10271 hevc_print(hevc,
10272 0, "vdec requested to be disconnected\n");
10273 return;
10274 }
10275
10276 if (hevc->m_ins_flag) {
10277 if ((input_frame_based(hw_to_vdec(hevc)) ||
10278 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10279 ((get_dbg_flag(hevc) &
10280 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10281 (decode_timeout_val > 0) &&
10282 (hevc->start_process_time > 0) &&
10283 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10284 > decode_timeout_val)
10285 ) {
10286 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10287 int current_lcu_idx =
10288 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10289 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10290 if (hevc->last_lcu_idx == current_lcu_idx) {
10291 if (hevc->decode_timeout_count > 0)
10292 hevc->decode_timeout_count--;
10293 if (hevc->decode_timeout_count == 0)
10294 timeout_process(hevc);
10295 } else
10296 restart_process_time(hevc);
10297 hevc->last_lcu_idx = current_lcu_idx;
10298 } else {
10299 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10300 timeout_process(hevc);
10301 }
10302 }
10303 } else {
10304#endif
10305 if (hevc->m_ins_flag == 0 &&
10306 vf_get_receiver(hevc->provider_name)) {
10307 state =
10308 vf_notify_receiver(hevc->provider_name,
10309 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10310 NULL);
10311 if ((state == RECEIVER_STATE_NULL)
10312 || (state == RECEIVER_STATE_NONE))
10313 state = RECEIVER_INACTIVE;
10314 } else
10315 state = RECEIVER_INACTIVE;
10316
10317 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10318 /* error watchdog */
10319 if (hevc->m_ins_flag == 0 &&
10320 (empty_flag == 0)
10321 && (hevc->pic_list_init_flag == 0
10322 || hevc->pic_list_init_flag
10323 == 3)) {
10324 /* decoder has input */
10325 if ((get_dbg_flag(hevc) &
10326 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10327
10328 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10329 /* receiver has no buffer to recycle */
10330 if ((state == RECEIVER_INACTIVE) &&
10331 (kfifo_is_empty(&hevc->display_q) &&
10332 buf_level > 0x200)
10333 ) {
10334 if (hevc->error_flag == 0) {
10335 hevc->error_watchdog_count++;
10336 if (hevc->error_watchdog_count ==
10337 error_handle_threshold) {
10338 hevc_print(hevc, 0,
10339 "H265 dec err local reset.\n");
10340 hevc->error_flag = 1;
10341 hevc->error_watchdog_count = 0;
10342 hevc->error_skip_nal_wt_cnt = 0;
10343 hevc->
10344 error_system_watchdog_count++;
10345 WRITE_VREG
10346 (HEVC_ASSIST_MBOX0_IRQ_REG,
10347 0x1);
10348 }
10349 } else if (hevc->error_flag == 2) {
10350 int th =
10351 error_handle_nal_skip_threshold;
10352 hevc->error_skip_nal_wt_cnt++;
10353 if (hevc->error_skip_nal_wt_cnt
10354 == th) {
10355 hevc->error_flag = 3;
10356 hevc->error_watchdog_count = 0;
10357 hevc->
10358 error_skip_nal_wt_cnt = 0;
10359 WRITE_VREG
10360 (HEVC_ASSIST_MBOX0_IRQ_REG,
10361 0x1);
10362 }
10363 }
10364 }
10365 }
10366
10367 if ((get_dbg_flag(hevc)
10368 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10369 /* receiver has no buffer to recycle */
10370 if ((state == RECEIVER_INACTIVE) &&
10371 (kfifo_is_empty(&hevc->display_q))
10372 ) { /* no buffer to recycle */
10373 if ((get_dbg_flag(hevc) &
10374 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10375 0)
10376 hevc->error_system_watchdog_count++;
10377 if (hevc->error_system_watchdog_count ==
10378 error_handle_system_threshold) {
10379 /* and it lasts for a while */
10380 hevc_print(hevc, 0,
10381 "H265 dec fatal error watchdog.\n");
10382 hevc->
10383 error_system_watchdog_count = 0;
10384 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10385 }
10386 }
10387 } else {
10388 hevc->error_watchdog_count = 0;
10389 hevc->error_system_watchdog_count = 0;
10390 }
10391#ifdef MULTI_INSTANCE_SUPPORT
10392 }
10393#endif
10394 if ((hevc->ucode_pause_pos != 0) &&
10395 (hevc->ucode_pause_pos != 0xffffffff) &&
10396 udebug_pause_pos != hevc->ucode_pause_pos) {
10397 hevc->ucode_pause_pos = 0;
10398 WRITE_HREG(DEBUG_REG1, 0);
10399 }
10400
10401 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10402 dump_pic_list(hevc);
10403 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10404 }
10405 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10406 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10407 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10408 }
10409#ifdef TEST_NO_BUF
10410 if (hevc->wait_buf)
10411 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10412#endif
10413 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10414 hevc->error_skip_nal_count = error_skip_nal_count;
10415 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10416
10417 debug &= ~H265_DEBUG_HW_RESET;
10418 }
10419
10420#ifdef ERROR_HANDLE_DEBUG
10421 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10422 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10423 dbg_nal_skip_count &= ~0x10000;
10424 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10425 }
10426#endif
10427
10428 if (radr != 0) {
10429 if (rval != 0) {
10430 WRITE_VREG(radr, rval);
10431 hevc_print(hevc, 0,
10432 "WRITE_VREG(%x,%x)\n", radr, rval);
10433 } else
10434 hevc_print(hevc, 0,
10435 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10436 rval = 0;
10437 radr = 0;
10438 }
10439 if (dbg_cmd != 0) {
10440 if (dbg_cmd == 1) {
10441 u32 disp_laddr;
10442
10443 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10444 get_double_write_mode(hevc) == 0) {
10445 disp_laddr =
10446 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10447 } else {
10448 struct canvas_s cur_canvas;
10449
10450 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10451 & 0xff), &cur_canvas);
10452 disp_laddr = cur_canvas.addr;
10453 }
10454 hevc_print(hevc, 0,
10455 "current displayed buffer address %x\r\n",
10456 disp_laddr);
10457 }
10458 dbg_cmd = 0;
10459 }
10460 /*don't changed at start.*/
10461 if (hevc->m_ins_flag == 0 &&
10462 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10463 hevc->frame_dur > 0 && hevc->saved_resolution !=
10464 hevc->frame_width * hevc->frame_height *
10465 (96000 / hevc->frame_dur))
10466 vdec_schedule_work(&hevc->set_clk_work);
10467
10468 mod_timer(timer, jiffies + PUT_INTERVAL);
10469}
10470
10471static int h265_task_handle(void *data)
10472{
10473 int ret = 0;
10474 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10475
10476 set_user_nice(current, -10);
10477 while (1) {
10478 if (use_cma == 0) {
10479 hevc_print(hevc, 0,
10480 "ERROR: use_cma can not be changed dynamically\n");
10481 }
10482 ret = down_interruptible(&h265_sema);
10483 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10484 init_pic_list(hevc);
10485 init_pic_list_hw(hevc);
10486 init_buf_spec(hevc);
10487 hevc->pic_list_init_flag = 2;
10488 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10489
10490 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10491
10492 }
10493
10494 if (hevc->uninit_list) {
10495 /*USE_BUF_BLOCK*/
10496 uninit_pic_list(hevc);
10497 hevc_print(hevc, 0, "uninit list\n");
10498 hevc->uninit_list = 0;
10499#ifdef USE_UNINIT_SEMA
10500 if (use_cma) {
10501 up(&hevc->h265_uninit_done_sema);
10502 while (!kthread_should_stop())
10503 msleep(1);
10504 break;
10505 }
10506#endif
10507 }
10508 }
10509
10510 return 0;
10511}
10512
10513void vh265_free_cmabuf(void)
10514{
10515 struct hevc_state_s *hevc = gHevc;
10516
10517 mutex_lock(&vh265_mutex);
10518
10519 if (hevc->init_flag) {
10520 mutex_unlock(&vh265_mutex);
10521 return;
10522 }
10523
10524 mutex_unlock(&vh265_mutex);
10525}
10526
10527#ifdef MULTI_INSTANCE_SUPPORT
10528int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10529#else
10530int vh265_dec_status(struct vdec_info *vstatus)
10531#endif
10532{
10533#ifdef MULTI_INSTANCE_SUPPORT
10534 struct hevc_state_s *hevc =
10535 (struct hevc_state_s *)vdec->private;
10536#else
10537 struct hevc_state_s *hevc = gHevc;
10538#endif
10539 if (!hevc)
10540 return -1;
10541
10542 vstatus->frame_width = hevc->frame_width;
10543 vstatus->frame_height = hevc->frame_height;
10544 if (hevc->frame_dur != 0)
10545 vstatus->frame_rate = 96000 / hevc->frame_dur;
10546 else
10547 vstatus->frame_rate = -1;
10548 vstatus->error_count = 0;
10549 vstatus->status = hevc->stat | hevc->fatal_error;
10550#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10551 vstatus->bit_rate = gvs->bit_rate;
10552 vstatus->frame_dur = hevc->frame_dur;
10553 if (gvs) {
10554 vstatus->bit_rate = gvs->bit_rate;
10555 vstatus->frame_data = gvs->frame_data;
10556 vstatus->total_data = gvs->total_data;
10557 vstatus->frame_count = gvs->frame_count;
10558 vstatus->error_frame_count = gvs->error_frame_count;
10559 vstatus->drop_frame_count = gvs->drop_frame_count;
10560 vstatus->total_data = gvs->total_data;
10561 vstatus->samp_cnt = gvs->samp_cnt;
10562 vstatus->offset = gvs->offset;
10563 }
10564 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10565 "%s", DRIVER_NAME);
10566#endif
10567 vstatus->ratio_control = hevc->ratio_control;
10568 return 0;
10569}
10570
10571int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10572{
10573 is_reset = isreset;
10574 return 0;
10575}
10576
10577static int vh265_vdec_info_init(void)
10578{
10579 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10580 if (NULL == gvs) {
10581 pr_info("the struct of vdec status malloc failed.\n");
10582 return -ENOMEM;
10583 }
10584 return 0;
10585}
10586
10587#if 0
10588static void H265_DECODE_INIT(void)
10589{
10590 /* enable hevc clocks */
10591 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10592 /* *************************************************************** */
10593 /* Power ON HEVC */
10594 /* *************************************************************** */
10595 /* Powerup HEVC */
10596 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10597 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10598 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10599 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10600 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10601 /* remove isolations */
10602 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10603 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10604
10605}
10606#endif
10607
10608static void config_decode_mode(struct hevc_state_s *hevc)
10609{
10610#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10611 struct vdec_s *vdec = hw_to_vdec(hevc);
10612#endif
10613 unsigned decode_mode;
10614 if (!hevc->m_ins_flag)
10615 decode_mode = DECODE_MODE_SINGLE;
10616 else if (vdec_frame_based(hw_to_vdec(hevc)))
10617 decode_mode =
10618 DECODE_MODE_MULTI_FRAMEBASE;
10619#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10620 else if (vdec->slave) {
10621 if (force_bypass_dvenl & 0x80000000)
10622 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10623 else
10624 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10625 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10626 hevc->bypass_dvenl = 0;
10627 hevc_print(hevc, 0,
10628 "NOT support bypass_dvenl when meta_with_el\n");
10629 }
10630 if (hevc->bypass_dvenl)
10631 decode_mode =
10632 (hevc->start_parser_type << 8)
10633 | DECODE_MODE_MULTI_STREAMBASE;
10634 else
10635 decode_mode =
10636 (hevc->start_parser_type << 8)
10637 | DECODE_MODE_MULTI_DVBAL;
10638 } else if (vdec->master)
10639 decode_mode =
10640 (hevc->start_parser_type << 8)
10641 | DECODE_MODE_MULTI_DVENL;
10642#endif
10643 else
10644 decode_mode =
10645 DECODE_MODE_MULTI_STREAMBASE;
10646
10647 if (hevc->m_ins_flag)
10648 decode_mode |=
10649 (hevc->start_decoding_flag << 16);
10650 /* set MBX0 interrupt flag */
10651 decode_mode |= (0x80 << 24);
10652 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10653 WRITE_VREG(HEVC_DECODE_MODE2,
10654 hevc->rps_set_id);
10655}
10656
10657static void vh265_prot_init(struct hevc_state_s *hevc)
10658{
10659#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10660 struct vdec_s *vdec = hw_to_vdec(hevc);
10661#endif
10662 /* H265_DECODE_INIT(); */
10663
10664 hevc_config_work_space_hw(hevc);
10665
10666 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10667
10668 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10669
10670 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10671
10672 /* clear mailbox interrupt */
10673 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10674
10675 /* enable mailbox interrupt */
10676 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10677
10678 /* disable PSCALE for hardware sharing */
10679 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10680
10681 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10682
10683 if ((get_dbg_flag(hevc) &
10684 (H265_DEBUG_MAN_SKIP_NAL |
10685 H265_DEBUG_MAN_SEARCH_NAL))
10686 /*||hevc->m_ins_flag*/
10687 ) {
10688 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10689 } else {
10690 /* check vps/sps/pps/i-slice in ucode */
10691 unsigned ctl_val = 0x8;
10692 if (hevc->PB_skip_mode == 0)
10693 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10694 else if (hevc->PB_skip_mode == 3)
10695 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10696 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10697 }
10698 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10699#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10700 || vdec->master
10701 || vdec->slave
10702#endif
10703 )
10704 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10705
10706 WRITE_VREG(NAL_SEARCH_CTL,
10707 READ_VREG(NAL_SEARCH_CTL)
10708 | ((parser_sei_enable & 0x7) << 17));
10709#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10710 WRITE_VREG(NAL_SEARCH_CTL,
10711 READ_VREG(NAL_SEARCH_CTL) |
10712 ((parser_dolby_vision_enable & 0x1) << 20));
10713#endif
10714 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10715
10716 config_decode_mode(hevc);
10717 config_aux_buf(hevc);
10718#ifdef SWAP_HEVC_UCODE
10719 if (!tee_enabled() && hevc->is_swap &&
10720 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10721 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10722 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10723 }
10724#endif
10725#ifdef DETREFILL_ENABLE
10726 if (hevc->is_swap &&
10727 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10728 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10729 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10730 }
10731#endif
10732}
10733
10734static int vh265_local_init(struct hevc_state_s *hevc)
10735{
10736 int i;
10737 int ret = -1;
10738
10739#ifdef DEBUG_PTS
10740 hevc->pts_missed = 0;
10741 hevc->pts_hit = 0;
10742#endif
10743
10744 hevc->saved_resolution = 0;
10745 hevc->get_frame_dur = false;
10746 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10747 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10748 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10749 pr_info("over size : %u x %u.\n",
10750 hevc->frame_width, hevc->frame_height);
10751 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10752 return ret;
10753 }
10754
10755 if (hevc->max_pic_w && hevc->max_pic_h) {
10756 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10757 ((hevc->max_pic_w * hevc->max_pic_h) >
10758 1920 * 1088) ? true : false;
10759 } else {
10760 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10761 ((hevc->frame_width * hevc->frame_height) >
10762 1920 * 1088) ? true : false;
10763 }
10764
10765 hevc->frame_dur =
10766 (hevc->vh265_amstream_dec_info.rate ==
10767 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10768#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10769 gvs->frame_dur = hevc->frame_dur;
10770#endif
10771 if (hevc->frame_width && hevc->frame_height)
10772 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10773
10774 if (i_only_flag)
10775 hevc->i_only = i_only_flag & 0xff;
10776 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10777 & 0x08)
10778 hevc->i_only = 0x7;
10779 else
10780 hevc->i_only = 0x0;
10781 hevc->error_watchdog_count = 0;
10782 hevc->sei_present_flag = 0;
10783 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10784 & 0x40) >> 6;
10785 hevc_print(hevc, 0,
10786 "h265:pts_unstable=%d\n", pts_unstable);
10787/*
10788 *TODO:FOR VERSION
10789 */
10790 hevc_print(hevc, 0,
10791 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10792 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10793
10794 if (hevc->frame_dur == 0)
10795 hevc->frame_dur = 96000 / 24;
10796
10797 INIT_KFIFO(hevc->display_q);
10798 INIT_KFIFO(hevc->newframe_q);
10799 INIT_KFIFO(hevc->pending_q);
10800
10801 for (i = 0; i < VF_POOL_SIZE; i++) {
10802 const struct vframe_s *vf = &hevc->vfpool[i];
10803
10804 hevc->vfpool[i].index = -1;
10805 kfifo_put(&hevc->newframe_q, vf);
10806 }
10807
10808
10809 ret = hevc_local_init(hevc);
10810
10811 return ret;
10812}
10813#ifdef MULTI_INSTANCE_SUPPORT
10814static s32 vh265_init(struct vdec_s *vdec)
10815{
10816 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10817#else
10818static s32 vh265_init(struct hevc_state_s *hevc)
10819{
10820
10821#endif
10822 int ret, size = -1;
10823 int fw_size = 0x1000 * 16;
10824 struct firmware_s *fw = NULL;
10825
10826 init_timer(&hevc->timer);
10827
10828 hevc->stat |= STAT_TIMER_INIT;
10829
10830 if (hevc->m_ins_flag) {
10831#ifdef USE_UNINIT_SEMA
10832 sema_init(&hevc->h265_uninit_done_sema, 0);
10833#endif
10834 INIT_WORK(&hevc->work, vh265_work);
10835 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
10836 }
10837
10838 if (vh265_local_init(hevc) < 0)
10839 return -EBUSY;
10840
10841 mutex_init(&hevc->chunks_mutex);
10842 INIT_WORK(&hevc->notify_work, vh265_notify_work);
10843 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
10844
10845 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
10846 if (IS_ERR_OR_NULL(fw))
10847 return -ENOMEM;
10848
10849 if (hevc->mmu_enable)
10850 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10851 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
10852 else {
10853 if (!hevc->is_4k) {
10854 /* if an older version of the fw was loaded, */
10855 /* needs try to load noswap fw because the */
10856 /* old fw package dose not contain the swap fw.*/
10857 size = get_firmware_data(
10858 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
10859 if (size < 0)
10860 size = get_firmware_data(
10861 VIDEO_DEC_HEVC_MMU, fw->data);
10862 else if (size)
10863 hevc->is_swap = true;
10864 } else
10865 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
10866 fw->data);
10867 }
10868 else
10869 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
10870
10871 if (size < 0) {
10872 pr_err("get firmware fail.\n");
10873 vfree(fw);
10874 return -1;
10875 }
10876
10877 fw->len = size;
10878
10879#ifdef SWAP_HEVC_UCODE
10880 if (!tee_enabled() && hevc->is_swap &&
10881 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10882 if (hevc->mmu_enable) {
10883 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
10884 hevc->mc_cpu_addr =
10885 dma_alloc_coherent(amports_get_dma_device(),
10886 hevc->swap_size,
10887 &hevc->mc_dma_handle, GFP_KERNEL);
10888 if (!hevc->mc_cpu_addr) {
10889 amhevc_disable();
10890 pr_info("vh265 mmu swap ucode loaded fail.\n");
10891 return -ENOMEM;
10892 }
10893
10894 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
10895 hevc->swap_size);
10896
10897 hevc_print(hevc, 0,
10898 "vh265 mmu ucode swap loaded %x\n",
10899 hevc->mc_dma_handle);
10900 }
10901 }
10902#endif
10903
10904#ifdef MULTI_INSTANCE_SUPPORT
10905 if (hevc->m_ins_flag) {
10906 hevc->timer.data = (ulong) hevc;
10907 hevc->timer.function = vh265_check_timer_func;
10908 hevc->timer.expires = jiffies + PUT_INTERVAL;
10909
10910 hevc->fw = fw;
10911
10912 return 0;
10913 }
10914#endif
10915 amhevc_enable();
10916
10917 if (hevc->mmu_enable)
10918 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
10919 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
10920 else {
10921 if (!hevc->is_4k) {
10922 /* if an older version of the fw was loaded, */
10923 /* needs try to load noswap fw because the */
10924 /* old fw package dose not contain the swap fw. */
10925 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10926 "hevc_mmu_swap", fw->data);
10927 if (ret < 0)
10928 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10929 "h265_mmu", fw->data);
10930 else
10931 hevc->is_swap = true;
10932 } else
10933 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
10934 "h265_mmu", fw->data);
10935 }
10936 else
10937 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
10938
10939 if (ret < 0) {
10940 amhevc_disable();
10941 vfree(fw);
10942 pr_err("H265: the %s fw loading failed, err: %x\n",
10943 tee_enabled() ? "TEE" : "local", ret);
10944 return -EBUSY;
10945 }
10946
10947 vfree(fw);
10948
10949 hevc->stat |= STAT_MC_LOAD;
10950
10951#ifdef DETREFILL_ENABLE
10952 if (hevc->is_swap &&
10953 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10954 init_detrefill_buf(hevc);
10955#endif
10956 /* enable AMRISC side protocol */
10957 vh265_prot_init(hevc);
10958
10959 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
10960 vh265_isr_thread_fn,
10961 IRQF_ONESHOT,/*run thread on this irq disabled*/
10962 "vh265-irq", (void *)hevc)) {
10963 hevc_print(hevc, 0, "vh265 irq register error.\n");
10964 amhevc_disable();
10965 return -ENOENT;
10966 }
10967
10968 hevc->stat |= STAT_ISR_REG;
10969 hevc->provider_name = PROVIDER_NAME;
10970
10971#ifdef MULTI_INSTANCE_SUPPORT
10972 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
10973 &vh265_vf_provider, vdec);
10974 vf_reg_provider(&vh265_vf_prov);
10975 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
10976 NULL);
10977 if (hevc->frame_dur != 0) {
10978 if (!is_reset) {
10979 vf_notify_receiver(hevc->provider_name,
10980 VFRAME_EVENT_PROVIDER_FR_HINT,
10981 (void *)
10982 ((unsigned long)hevc->frame_dur));
10983 fr_hint_status = VDEC_HINTED;
10984 }
10985 } else
10986 fr_hint_status = VDEC_NEED_HINT;
10987#else
10988 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
10989 hevc);
10990 vf_reg_provider(&vh265_vf_prov);
10991 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
10992 if (hevc->frame_dur != 0) {
10993 vf_notify_receiver(PROVIDER_NAME,
10994 VFRAME_EVENT_PROVIDER_FR_HINT,
10995 (void *)
10996 ((unsigned long)hevc->frame_dur));
10997 fr_hint_status = VDEC_HINTED;
10998 } else
10999 fr_hint_status = VDEC_NEED_HINT;
11000#endif
11001 hevc->stat |= STAT_VF_HOOK;
11002
11003 hevc->timer.data = (ulong) hevc;
11004 hevc->timer.function = vh265_check_timer_func;
11005 hevc->timer.expires = jiffies + PUT_INTERVAL;
11006
11007 add_timer(&hevc->timer);
11008
11009 hevc->stat |= STAT_TIMER_ARM;
11010
11011 if (use_cma) {
11012#ifdef USE_UNINIT_SEMA
11013 sema_init(&hevc->h265_uninit_done_sema, 0);
11014#endif
11015 if (h265_task == NULL) {
11016 sema_init(&h265_sema, 1);
11017 h265_task =
11018 kthread_run(h265_task_handle, hevc,
11019 "kthread_h265");
11020 }
11021 }
11022 /* hevc->stat |= STAT_KTHREAD; */
11023#if 0
11024 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11025 hevc_print(hevc, 0, "%s force clk\n", __func__);
11026 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11027 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11028 ((1 << 2) | (1 << 1)));
11029 WRITE_VREG(HEVC_DBLK_CFG0,
11030 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11031 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11032 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11033 (1 << 2)); /* 2 */
11034 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11035 (1 << 24)); /* 24 */
11036 WRITE_VREG(HEVC_STREAM_CONTROL,
11037 READ_VREG(HEVC_STREAM_CONTROL) |
11038 (1 << 15)); /* 15 */
11039 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11040 (1 << 13)); /* 13 */
11041 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11042 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11043 (1 << 15)); /* 15 */
11044 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11045 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11046 (1 << 15)); /* 15 */
11047 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11048 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11049 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11050 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11051 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11052 (1 << 3)); /* 3 */
11053 }
11054#endif
11055#ifdef SWAP_HEVC_UCODE
11056 if (!tee_enabled() && hevc->is_swap &&
11057 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11058 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11059 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11060 }
11061#endif
11062
11063#ifndef MULTI_INSTANCE_SUPPORT
11064 set_vdec_func(&vh265_dec_status);
11065#endif
11066 amhevc_start();
11067 hevc->stat |= STAT_VDEC_RUN;
11068 hevc->init_flag = 1;
11069 error_handle_threshold = 30;
11070 /* pr_info("%d, vh265_init, RP=0x%x\n",
11071 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11072 */
11073
11074 return 0;
11075}
11076
11077static int vh265_stop(struct hevc_state_s *hevc)
11078{
11079 if (get_dbg_flag(hevc) &
11080 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11081 int wait_timeout_count = 0;
11082
11083 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11084 HEVC_CODED_SLICE_SEGMENT_DAT &&
11085 wait_timeout_count < 10){
11086 wait_timeout_count++;
11087 msleep(20);
11088 }
11089 }
11090 if (hevc->stat & STAT_VDEC_RUN) {
11091 amhevc_stop();
11092 hevc->stat &= ~STAT_VDEC_RUN;
11093 }
11094
11095 if (hevc->stat & STAT_ISR_REG) {
11096#ifdef MULTI_INSTANCE_SUPPORT
11097 if (!hevc->m_ins_flag)
11098#endif
11099 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11100 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11101 hevc->stat &= ~STAT_ISR_REG;
11102 }
11103
11104 hevc->stat &= ~STAT_TIMER_INIT;
11105 if (hevc->stat & STAT_TIMER_ARM) {
11106 del_timer_sync(&hevc->timer);
11107 hevc->stat &= ~STAT_TIMER_ARM;
11108 }
11109
11110 if (hevc->stat & STAT_VF_HOOK) {
11111 if (fr_hint_status == VDEC_HINTED) {
11112 vf_notify_receiver(hevc->provider_name,
11113 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11114 NULL);
11115 }
11116 fr_hint_status = VDEC_NO_NEED_HINT;
11117 vf_unreg_provider(&vh265_vf_prov);
11118 hevc->stat &= ~STAT_VF_HOOK;
11119 }
11120
11121 hevc_local_uninit(hevc);
11122
11123 if (use_cma) {
11124 hevc->uninit_list = 1;
11125 up(&h265_sema);
11126#ifdef USE_UNINIT_SEMA
11127 down(&hevc->h265_uninit_done_sema);
11128 if (!IS_ERR(h265_task)) {
11129 kthread_stop(h265_task);
11130 h265_task = NULL;
11131 }
11132#else
11133 while (hevc->uninit_list) /* wait uninit complete */
11134 msleep(20);
11135#endif
11136
11137 }
11138 hevc->init_flag = 0;
11139 hevc->first_sc_checked = 0;
11140 cancel_work_sync(&hevc->notify_work);
11141 cancel_work_sync(&hevc->set_clk_work);
11142 uninit_mmu_buffers(hevc);
11143 amhevc_disable();
11144
11145 kfree(gvs);
11146 gvs = NULL;
11147
11148 return 0;
11149}
11150
11151#ifdef MULTI_INSTANCE_SUPPORT
11152static void reset_process_time(struct hevc_state_s *hevc)
11153{
11154 if (hevc->start_process_time) {
11155 unsigned int process_time =
11156 1000 * (jiffies - hevc->start_process_time) / HZ;
11157 hevc->start_process_time = 0;
11158 if (process_time > max_process_time[hevc->index])
11159 max_process_time[hevc->index] = process_time;
11160 }
11161}
11162
11163static void start_process_time(struct hevc_state_s *hevc)
11164{
11165 hevc->start_process_time = jiffies;
11166 hevc->decode_timeout_count = 2;
11167 hevc->last_lcu_idx = 0;
11168}
11169
11170static void restart_process_time(struct hevc_state_s *hevc)
11171{
11172 hevc->start_process_time = jiffies;
11173 hevc->decode_timeout_count = 2;
11174}
11175
11176static void timeout_process(struct hevc_state_s *hevc)
11177{
11178 /*
11179 * In this very timeout point,the vh265_work arrives,
11180 * let it to handle the scenario.
11181 */
11182 if (work_pending(&hevc->work))
11183 return;
11184
11185 hevc->timeout_num++;
11186 amhevc_stop();
11187 read_decode_info(hevc);
11188
11189 hevc_print(hevc,
11190 0, "%s decoder timeout\n", __func__);
11191 check_pic_decoded_error(hevc,
11192 hevc->pic_decoded_lcu_idx);
11193 hevc->decoded_poc = hevc->curr_POC;
11194 hevc->decoding_pic = NULL;
11195 hevc->dec_result = DEC_RESULT_DONE;
11196 reset_process_time(hevc);
11197
11198 if (work_pending(&hevc->work))
11199 return;
11200 vdec_schedule_work(&hevc->timeout_work);
11201}
11202
11203#ifdef CONSTRAIN_MAX_BUF_NUM
11204static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11205{
11206 struct PIC_s *pic;
11207 int i;
11208 int count = 0;
11209 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11210 pic = hevc->m_PIC[i];
11211 if (pic == NULL || pic->index == -1)
11212 continue;
11213 if (pic->output_mark == 0 && pic->referenced == 0
11214 && pic->output_ready == 1)
11215 count++;
11216 }
11217
11218 return count;
11219}
11220
11221static int get_used_buf_count(struct hevc_state_s *hevc)
11222{
11223 struct PIC_s *pic;
11224 int i;
11225 int count = 0;
11226 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11227 pic = hevc->m_PIC[i];
11228 if (pic == NULL || pic->index == -1)
11229 continue;
11230 if (pic->output_mark != 0 || pic->referenced != 0
11231 || pic->output_ready != 0)
11232 count++;
11233 }
11234
11235 return count;
11236}
11237#endif
11238
11239
11240static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11241{
11242 struct PIC_s *new_pic = NULL;
11243 struct PIC_s *pic;
11244 /* recycle un-used pic */
11245 int i;
11246 int ref_pic = 0;
11247 struct vdec_s *vdec = hw_to_vdec(hevc);
11248 /*return 1 if pic_list is not initialized yet*/
11249 if (hevc->pic_list_init_flag != 3)
11250 return 1;
11251
11252 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11253 pic = hevc->m_PIC[i];
11254 if (pic == NULL || pic->index == -1)
11255 continue;
11256 if (pic->referenced == 1)
11257 ref_pic++;
11258 if (pic->output_mark == 0 && pic->referenced == 0
11259 && pic->output_ready == 0
11260 ) {
11261 if (new_pic) {
11262 if (pic->POC < new_pic->POC)
11263 new_pic = pic;
11264 } else
11265 new_pic = pic;
11266 }
11267 }
11268/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11269/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11270/*decode. all reference frames are removed and setting error flag.*/
11271/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11272/*when file is interlace.*/
11273 if ((new_pic == NULL) &&
11274 (ref_pic >=
11275 get_work_pic_num(hevc) -
11276 hevc->sps_num_reorder_pics_0 - 3)) {
11277 enum receviver_start_e state = RECEIVER_INACTIVE;
11278 if (vf_get_receiver(vdec->vf_provider_name)) {
11279 state =
11280 vf_notify_receiver(vdec->vf_provider_name,
11281 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11282 NULL);
11283 if ((state == RECEIVER_STATE_NULL)
11284 || (state == RECEIVER_STATE_NONE))
11285 state = RECEIVER_INACTIVE;
11286 }
11287 if (state == RECEIVER_INACTIVE) {
11288 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11289 pic = hevc->m_PIC[i];
11290 if (pic == NULL || pic->index == -1)
11291 continue;
11292
11293 if ((pic->referenced == 1) &&
11294 (pic->error_mark == 1)) {
11295 pic->referenced = 0;
11296 put_mv_buf(hevc, pic);
11297 }
11298 pic->error_mark = 1;
11299 }
11300 }
11301 }
11302
11303 return (new_pic != NULL) ? 1 : 0;
11304}
11305
11306static int vmh265_stop(struct hevc_state_s *hevc)
11307{
11308 if (hevc->stat & STAT_TIMER_ARM) {
11309 del_timer_sync(&hevc->timer);
11310 hevc->stat &= ~STAT_TIMER_ARM;
11311 }
11312 if (hevc->stat & STAT_VDEC_RUN) {
11313 amhevc_stop();
11314 hevc->stat &= ~STAT_VDEC_RUN;
11315 }
11316 if (hevc->stat & STAT_ISR_REG) {
11317 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11318 hevc->stat &= ~STAT_ISR_REG;
11319 }
11320
11321 if (hevc->stat & STAT_VF_HOOK) {
11322 if (fr_hint_status == VDEC_HINTED)
11323 vf_notify_receiver(hevc->provider_name,
11324 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11325 NULL);
11326 fr_hint_status = VDEC_NO_NEED_HINT;
11327 vf_unreg_provider(&vh265_vf_prov);
11328 hevc->stat &= ~STAT_VF_HOOK;
11329 }
11330
11331 hevc_local_uninit(hevc);
11332
11333 if (use_cma) {
11334 hevc->uninit_list = 1;
11335 reset_process_time(hevc);
11336 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11337 vdec_schedule_work(&hevc->work);
11338 flush_work(&hevc->work);
11339#ifdef USE_UNINIT_SEMA
11340 if (hevc->init_flag) {
11341 down(&hevc->h265_uninit_done_sema);
11342 }
11343#else
11344 while (hevc->uninit_list) /* wait uninit complete */
11345 msleep(20);
11346#endif
11347 }
11348 hevc->init_flag = 0;
11349 hevc->first_sc_checked = 0;
11350 cancel_work_sync(&hevc->notify_work);
11351 cancel_work_sync(&hevc->set_clk_work);
11352 cancel_work_sync(&hevc->work);
11353 cancel_work_sync(&hevc->timeout_work);
11354 uninit_mmu_buffers(hevc);
11355
11356 vfree(hevc->fw);
11357 hevc->fw = NULL;
11358
11359 dump_log(hevc);
11360 return 0;
11361}
11362
11363static unsigned char get_data_check_sum
11364 (struct hevc_state_s *hevc, int size)
11365{
11366 int jj;
11367 int sum = 0;
11368 u8 *data = NULL;
11369
11370 if (!hevc->chunk->block->is_mapped)
11371 data = codec_mm_vmap(hevc->chunk->block->start +
11372 hevc->chunk->offset, size);
11373 else
11374 data = ((u8 *)hevc->chunk->block->start_virt) +
11375 hevc->chunk->offset;
11376
11377 for (jj = 0; jj < size; jj++)
11378 sum += data[jj];
11379
11380 if (!hevc->chunk->block->is_mapped)
11381 codec_mm_unmap_phyaddr(data);
11382 return sum;
11383}
11384
11385static void vh265_notify_work(struct work_struct *work)
11386{
11387 struct hevc_state_s *hevc =
11388 container_of(work,
11389 struct hevc_state_s,
11390 notify_work);
11391 struct vdec_s *vdec = hw_to_vdec(hevc);
11392#ifdef MULTI_INSTANCE_SUPPORT
11393 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11394 vf_notify_receiver(hevc->provider_name,
11395 VFRAME_EVENT_PROVIDER_FR_HINT,
11396 (void *)
11397 ((unsigned long)hevc->frame_dur));
11398 vdec->fr_hint_state = VDEC_HINTED;
11399 } else if (fr_hint_status == VDEC_NEED_HINT) {
11400 vf_notify_receiver(hevc->provider_name,
11401 VFRAME_EVENT_PROVIDER_FR_HINT,
11402 (void *)
11403 ((unsigned long)hevc->frame_dur));
11404 fr_hint_status = VDEC_HINTED;
11405 }
11406#else
11407 if (fr_hint_status == VDEC_NEED_HINT)
11408 vf_notify_receiver(PROVIDER_NAME,
11409 VFRAME_EVENT_PROVIDER_FR_HINT,
11410 (void *)
11411 ((unsigned long)hevc->frame_dur));
11412 fr_hint_status = VDEC_HINTED;
11413 }
11414#endif
11415
11416 return;
11417}
11418
11419static void vh265_work_implement(struct hevc_state_s *hevc,
11420 struct vdec_s *vdec,int from)
11421{
11422 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11423 /*USE_BUF_BLOCK*/
11424 uninit_pic_list(hevc);
11425 hevc_print(hevc, 0, "uninit list\n");
11426 hevc->uninit_list = 0;
11427#ifdef USE_UNINIT_SEMA
11428 up(&hevc->h265_uninit_done_sema);
11429#endif
11430 return;
11431 }
11432
11433 /* finished decoding one frame or error,
11434 * notify vdec core to switch context
11435 */
11436 if (hevc->pic_list_init_flag == 1
11437 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11438 hevc->pic_list_init_flag = 2;
11439 init_pic_list(hevc);
11440 init_pic_list_hw(hevc);
11441 init_buf_spec(hevc);
11442 hevc_print(hevc, 0,
11443 "set pic_list_init_flag to 2\n");
11444
11445 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11446 return;
11447 }
11448
11449 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11450 "%s dec_result %d %x %x %x\n",
11451 __func__,
11452 hevc->dec_result,
11453 READ_VREG(HEVC_STREAM_LEVEL),
11454 READ_VREG(HEVC_STREAM_WR_PTR),
11455 READ_VREG(HEVC_STREAM_RD_PTR));
11456
11457 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11458 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11459 && (hw_to_vdec(hevc)->next_status !=
11460 VDEC_STATUS_DISCONNECTED)) {
11461 if (!vdec_has_more_input(vdec)) {
11462 hevc->dec_result = DEC_RESULT_EOS;
11463 vdec_schedule_work(&hevc->work);
11464 return;
11465 }
11466 if (!input_frame_based(vdec)) {
11467 int r = vdec_sync_input(vdec);
11468 if (r >= 0x200) {
11469 WRITE_VREG(HEVC_DECODE_SIZE,
11470 READ_VREG(HEVC_DECODE_SIZE) + r);
11471
11472 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11473 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11474 __func__,
11475 READ_VREG(HEVC_STREAM_LEVEL),
11476 READ_VREG(HEVC_STREAM_WR_PTR),
11477 READ_VREG(HEVC_STREAM_RD_PTR),
11478 READ_VREG(HEVC_MPC_E), r);
11479
11480 start_process_time(hevc);
11481 if (READ_VREG(HEVC_DEC_STATUS_REG)
11482 == HEVC_DECODE_BUFEMPTY2)
11483 WRITE_VREG(HEVC_DEC_STATUS_REG,
11484 HEVC_ACTION_DONE);
11485 else
11486 WRITE_VREG(HEVC_DEC_STATUS_REG,
11487 HEVC_ACTION_DEC_CONT);
11488 } else {
11489 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11490 vdec_schedule_work(&hevc->work);
11491 }
11492 return;
11493 }
11494
11495 /*below for frame_base*/
11496 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11497 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11498 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11499 __func__,
11500 READ_VREG(HEVC_STREAM_LEVEL),
11501 READ_VREG(HEVC_STREAM_WR_PTR),
11502 READ_VREG(HEVC_STREAM_RD_PTR),
11503 READ_VREG(HEVC_MPC_E));
11504 mutex_lock(&hevc->chunks_mutex);
11505 vdec_vframe_dirty(vdec, hevc->chunk);
11506 hevc->chunk = NULL;
11507 mutex_unlock(&hevc->chunks_mutex);
11508 vdec_clean_input(vdec);
11509 }
11510
11511 /*if (is_new_pic_available(hevc)) {*/
11512 if (run_ready(vdec, VDEC_HEVC)) {
11513 int r;
11514 int decode_size;
11515 r = vdec_prepare_input(vdec, &hevc->chunk);
11516 if (r < 0) {
11517 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11518
11519 hevc_print(hevc,
11520 PRINT_FLAG_VDEC_DETAIL,
11521 "amvdec_vh265: Insufficient data\n");
11522
11523 vdec_schedule_work(&hevc->work);
11524 return;
11525 }
11526 hevc->dec_result = DEC_RESULT_NONE;
11527 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11528 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11529 __func__, r,
11530 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11531 get_data_check_sum(hevc, r) : 0,
11532 READ_VREG(HEVC_MPC_E));
11533
11534 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11535 int jj;
11536 u8 *data = NULL;
11537
11538 if (!hevc->chunk->block->is_mapped)
11539 data = codec_mm_vmap(
11540 hevc->chunk->block->start +
11541 hevc->chunk->offset, r);
11542 else
11543 data = ((u8 *)
11544 hevc->chunk->block->start_virt)
11545 + hevc->chunk->offset;
11546
11547 for (jj = 0; jj < r; jj++) {
11548 if ((jj & 0xf) == 0)
11549 hevc_print(hevc,
11550 PRINT_FRAMEBASE_DATA,
11551 "%06x:", jj);
11552 hevc_print_cont(hevc,
11553 PRINT_FRAMEBASE_DATA,
11554 "%02x ", data[jj]);
11555 if (((jj + 1) & 0xf) == 0)
11556 hevc_print_cont(hevc,
11557 PRINT_FRAMEBASE_DATA,
11558 "\n");
11559 }
11560
11561 if (!hevc->chunk->block->is_mapped)
11562 codec_mm_unmap_phyaddr(data);
11563 }
11564
11565 decode_size = hevc->chunk->size +
11566 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11567 WRITE_VREG(HEVC_DECODE_SIZE,
11568 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11569
11570 vdec_enable_input(vdec);
11571
11572 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11573 "%s: mpc %x\n",
11574 __func__, READ_VREG(HEVC_MPC_E));
11575
11576 start_process_time(hevc);
11577 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11578 } else{
11579 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11580
11581 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11582 * "amvdec_vh265: Insufficient data\n");
11583 */
11584
11585 vdec_schedule_work(&hevc->work);
11586 }
11587 return;
11588 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11589 /* if (!hevc->ctx_valid)
11590 hevc->ctx_valid = 1; */
11591 decode_frame_count[hevc->index]++;
11592#ifdef DETREFILL_ENABLE
11593 if (hevc->is_swap &&
11594 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11595 if (hevc->delrefill_check == 2) {
11596 delrefill(hevc);
11597 amhevc_stop();
11598 }
11599 }
11600#endif
11601 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11602 hevc->used_4k_num =
11603 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11604 if (hevc->used_4k_num >= 0 &&
11605 hevc->cur_pic &&
11606 hevc->cur_pic->scatter_alloc
11607 == 1) {
11608 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11609 "%s pic index %d scatter_alloc %d page_start %d\n",
11610 "decoder_mmu_box_free_idx_tail",
11611 hevc->cur_pic->index,
11612 hevc->cur_pic->scatter_alloc,
11613 hevc->used_4k_num);
11614 if (hevc->m_ins_flag)
11615 hevc_mmu_dma_check(hw_to_vdec(hevc));
11616 decoder_mmu_box_free_idx_tail(
11617 hevc->mmu_box,
11618 hevc->cur_pic->index,
11619 hevc->used_4k_num);
11620 hevc->cur_pic->scatter_alloc = 2;
11621 }
11622 }
11623 hevc->pic_decoded_lcu_idx =
11624 READ_VREG(HEVC_PARSER_LCU_START)
11625 & 0xffffff;
11626
11627 if (vdec->master == NULL && vdec->slave == NULL &&
11628 hevc->empty_flag == 0) {
11629 hevc->over_decode =
11630 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11631 if (hevc->over_decode)
11632 hevc_print(hevc, 0,
11633 "!!!Over decode\n");
11634 }
11635
11636 if (is_log_enable(hevc))
11637 add_log(hevc,
11638 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11639 __func__,
11640 hevc->dec_result,
11641 hevc->pic_decoded_lcu_idx,
11642 hevc->used_4k_num,
11643 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11644 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11645 hevc->start_shift_bytes
11646 );
11647
11648 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11649 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11650 __func__,
11651 hevc->dec_result,
11652 READ_VREG(HEVC_STREAM_LEVEL),
11653 READ_VREG(HEVC_STREAM_WR_PTR),
11654 READ_VREG(HEVC_STREAM_RD_PTR),
11655 hevc->pic_decoded_lcu_idx,
11656 hevc->used_4k_num,
11657 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11658 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11659 hevc->start_shift_bytes
11660 );
11661
11662 hevc->used_4k_num = -1;
11663
11664 check_pic_decoded_error(hevc,
11665 hevc->pic_decoded_lcu_idx);
11666#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11667#if 1
11668 if (vdec->slave) {
11669 if (dv_debug & 0x1)
11670 vdec_set_flag(vdec->slave,
11671 VDEC_FLAG_SELF_INPUT_CONTEXT);
11672 else
11673 vdec_set_flag(vdec->slave,
11674 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11675 }
11676#else
11677 if (vdec->slave) {
11678 if (no_interleaved_el_slice)
11679 vdec_set_flag(vdec->slave,
11680 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11681 /* this will move real HW pointer for input */
11682 else
11683 vdec_set_flag(vdec->slave, 0);
11684 /* this will not move real HW pointer
11685 *and SL layer decoding
11686 *will start from same stream position
11687 *as current BL decoder
11688 */
11689 }
11690#endif
11691#endif
11692#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11693 hevc->shift_byte_count_lo
11694 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11695 if (vdec->slave) {
11696 /*cur is base, found enhance*/
11697 struct hevc_state_s *hevc_el =
11698 (struct hevc_state_s *)
11699 vdec->slave->private;
11700 if (hevc_el)
11701 hevc_el->shift_byte_count_lo =
11702 hevc->shift_byte_count_lo;
11703 } else if (vdec->master) {
11704 /*cur is enhance, found base*/
11705 struct hevc_state_s *hevc_ba =
11706 (struct hevc_state_s *)
11707 vdec->master->private;
11708 if (hevc_ba)
11709 hevc_ba->shift_byte_count_lo =
11710 hevc->shift_byte_count_lo;
11711 }
11712#endif
11713 mutex_lock(&hevc->chunks_mutex);
11714 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11715 hevc->chunk = NULL;
11716 mutex_unlock(&hevc->chunks_mutex);
11717 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11718 /*
11719 stream base: stream buf empty or timeout
11720 frame base: vdec_prepare_input fail
11721 */
11722 if (!vdec_has_more_input(vdec)) {
11723 hevc->dec_result = DEC_RESULT_EOS;
11724 vdec_schedule_work(&hevc->work);
11725 return;
11726 }
11727#ifdef AGAIN_HAS_THRESHOLD
11728 hevc->next_again_flag = 1;
11729#endif
11730 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11731 struct PIC_s *pic;
11732 hevc->eos = 1;
11733#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11734 if ((vdec->master || vdec->slave) &&
11735 aux_data_is_avaible(hevc))
11736 dolby_get_meta(hevc);
11737#endif
11738 check_pic_decoded_error(hevc,
11739 hevc->pic_decoded_lcu_idx);
11740 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11741 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11742 "%s: end of stream, last dec poc %d => 0x%pf\n",
11743 __func__, hevc->curr_POC, pic);
11744 flush_output(hevc, pic);
11745
11746 if (hevc->is_used_v4l)
11747 notify_v4l_eos(hw_to_vdec(hevc));
11748#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11749 hevc->shift_byte_count_lo
11750 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11751 if (vdec->slave) {
11752 /*cur is base, found enhance*/
11753 struct hevc_state_s *hevc_el =
11754 (struct hevc_state_s *)
11755 vdec->slave->private;
11756 if (hevc_el)
11757 hevc_el->shift_byte_count_lo =
11758 hevc->shift_byte_count_lo;
11759 } else if (vdec->master) {
11760 /*cur is enhance, found base*/
11761 struct hevc_state_s *hevc_ba =
11762 (struct hevc_state_s *)
11763 vdec->master->private;
11764 if (hevc_ba)
11765 hevc_ba->shift_byte_count_lo =
11766 hevc->shift_byte_count_lo;
11767 }
11768#endif
11769 mutex_lock(&hevc->chunks_mutex);
11770 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11771 hevc->chunk = NULL;
11772 mutex_unlock(&hevc->chunks_mutex);
11773 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11774 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11775 "%s: force exit\n",
11776 __func__);
11777 if (hevc->stat & STAT_VDEC_RUN) {
11778 amhevc_stop();
11779 hevc->stat &= ~STAT_VDEC_RUN;
11780 }
11781 if (hevc->stat & STAT_ISR_REG) {
11782 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11783 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11784 hevc->stat &= ~STAT_ISR_REG;
11785 }
11786 hevc_print(hevc, 0, "%s: force exit end\n",
11787 __func__);
11788 }
11789
11790 if (hevc->stat & STAT_VDEC_RUN) {
11791 amhevc_stop();
11792 hevc->stat &= ~STAT_VDEC_RUN;
11793 }
11794
11795 if (hevc->stat & STAT_TIMER_ARM) {
11796 del_timer_sync(&hevc->timer);
11797 hevc->stat &= ~STAT_TIMER_ARM;
11798 }
11799
11800 wait_hevc_search_done(hevc);
11801#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11802 if (hevc->switch_dvlayer_flag) {
11803 if (vdec->slave)
11804 vdec_set_next_sched(vdec, vdec->slave);
11805 else if (vdec->master)
11806 vdec_set_next_sched(vdec, vdec->master);
11807 } else if (vdec->slave || vdec->master)
11808 vdec_set_next_sched(vdec, vdec);
11809#endif
11810
11811 if (from == 1) {
11812 /* This is a timeout work */
11813 if (work_pending(&hevc->work)) {
11814 /*
11815 * The vh265_work arrives at the last second,
11816 * give it a chance to handle the scenario.
11817 */
11818 return;
11819 //cancel_work_sync(&hevc->work);//reserved for future considraion
11820 }
11821 }
11822
11823 /* mark itself has all HW resource released and input released */
11824 if (vdec->parallel_dec == 1)
11825 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
11826 else
11827 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
11828
11829 if (hevc->is_used_v4l) {
11830 struct aml_vcodec_ctx *ctx =
11831 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11832
11833 if (ctx->param_sets_from_ucode &&
11834 !hevc->v4l_params_parsed)
11835 vdec_v4l_write_frame_sync(ctx);
11836 }
11837
11838 if (hevc->vdec_cb)
11839 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
11840}
11841
11842static void vh265_work(struct work_struct *work)
11843{
11844 struct hevc_state_s *hevc = container_of(work,
11845 struct hevc_state_s, work);
11846 struct vdec_s *vdec = hw_to_vdec(hevc);
11847
11848 vh265_work_implement(hevc, vdec, 0);
11849}
11850
11851static void vh265_timeout_work(struct work_struct *work)
11852{
11853 struct hevc_state_s *hevc = container_of(work,
11854 struct hevc_state_s, timeout_work);
11855 struct vdec_s *vdec = hw_to_vdec(hevc);
11856
11857 if (work_pending(&hevc->work))
11858 return;
11859 vh265_work_implement(hevc, vdec, 1);
11860}
11861
11862
11863static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
11864{
11865 /* new to do ... */
11866 vh265_prot_init(hevc);
11867 return 0;
11868}
11869static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
11870{
11871 struct hevc_state_s *hevc =
11872 (struct hevc_state_s *)vdec->private;
11873 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
11874 CODEC_MM_FLAGS_TVP : 0;
11875 bool ret = 0;
11876 if (step == 0x12)
11877 return 0;
11878 else if (step == 0x11)
11879 step = 0x12;
11880
11881 if (hevc->eos)
11882 return 0;
11883 if (!hevc->first_sc_checked && hevc->mmu_enable) {
11884 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
11885 hevc->first_sc_checked =1;
11886 hevc_print(hevc, 0,
11887 "vh265 cached=%d need_size=%d speed= %d ms\n",
11888 size, (hevc->need_cache_size >> PAGE_SHIFT),
11889 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
11890 }
11891 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
11892 && pre_decode_buf_level != 0) {
11893 u32 rp, wp, level;
11894
11895 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
11896 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
11897 if (wp < rp)
11898 level = vdec->input.size + wp - rp;
11899 else
11900 level = wp - rp;
11901
11902 if (level < pre_decode_buf_level)
11903 return 0;
11904 }
11905
11906#ifdef AGAIN_HAS_THRESHOLD
11907 if (hevc->next_again_flag &&
11908 (!vdec_frame_based(vdec))) {
11909 u32 parser_wr_ptr =
11910 READ_PARSER_REG(PARSER_VIDEO_WP);
11911 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
11912 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
11913 again_threshold) {
11914 int r = vdec_sync_input(vdec);
11915 hevc_print(hevc,
11916 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
11917 return 0;
11918 }
11919 }
11920#endif
11921
11922 if (disp_vframe_valve_level &&
11923 kfifo_len(&hevc->display_q) >=
11924 disp_vframe_valve_level) {
11925 hevc->valve_count--;
11926 if (hevc->valve_count <= 0)
11927 hevc->valve_count = 2;
11928 else
11929 return 0;
11930 }
11931
11932 ret = is_new_pic_available(hevc);
11933 if (!ret) {
11934 hevc_print(hevc,
11935 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
11936 __func__, ret);
11937 }
11938
11939#ifdef CONSTRAIN_MAX_BUF_NUM
11940 if (hevc->pic_list_init_flag == 3) {
11941 if (run_ready_max_vf_only_num > 0 &&
11942 get_vf_ref_only_buf_count(hevc) >=
11943 run_ready_max_vf_only_num
11944 )
11945 ret = 0;
11946 if (run_ready_display_q_num > 0 &&
11947 kfifo_len(&hevc->display_q) >=
11948 run_ready_display_q_num)
11949 ret = 0;
11950
11951 /*avoid more buffers consumed when
11952 switching resolution*/
11953 if (run_ready_max_buf_num == 0xff &&
11954 get_used_buf_count(hevc) >=
11955 get_work_pic_num(hevc))
11956 ret = 0;
11957 else if (run_ready_max_buf_num &&
11958 get_used_buf_count(hevc) >=
11959 run_ready_max_buf_num)
11960 ret = 0;
11961 }
11962#endif
11963
11964 if (hevc->is_used_v4l) {
11965 struct aml_vcodec_ctx *ctx =
11966 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
11967
11968 if (ctx->param_sets_from_ucode &&
11969 !ctx->v4l_codec_ready &&
11970 hevc->v4l_params_parsed)
11971 ret = 0; /*the params has parsed.*/
11972 }
11973
11974
11975 if (ret)
11976 not_run_ready[hevc->index] = 0;
11977 else
11978 not_run_ready[hevc->index]++;
11979 if (vdec->parallel_dec == 1)
11980 return ret ? (CORE_MASK_HEVC) : 0;
11981 else
11982 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
11983}
11984
11985static void run(struct vdec_s *vdec, unsigned long mask,
11986 void (*callback)(struct vdec_s *, void *), void *arg)
11987{
11988 struct hevc_state_s *hevc =
11989 (struct hevc_state_s *)vdec->private;
11990 int r, loadr = 0;
11991 unsigned char check_sum = 0;
11992
11993 run_count[hevc->index]++;
11994 hevc->vdec_cb_arg = arg;
11995 hevc->vdec_cb = callback;
11996 hevc->aux_data_dirty = 1;
11997 hevc_reset_core(vdec);
11998
11999#ifdef AGAIN_HAS_THRESHOLD
12000 hevc->pre_parser_wr_ptr =
12001 READ_PARSER_REG(PARSER_VIDEO_WP);
12002 hevc->next_again_flag = 0;
12003#endif
12004 r = vdec_prepare_input(vdec, &hevc->chunk);
12005 if (r < 0) {
12006 input_empty[hevc->index]++;
12007 hevc->dec_result = DEC_RESULT_AGAIN;
12008 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12009 "ammvdec_vh265: Insufficient data\n");
12010
12011 vdec_schedule_work(&hevc->work);
12012 return;
12013 }
12014 input_empty[hevc->index] = 0;
12015 hevc->dec_result = DEC_RESULT_NONE;
12016 if (vdec_frame_based(vdec) &&
12017 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12018 || is_log_enable(hevc)))
12019 check_sum = get_data_check_sum(hevc, r);
12020
12021 if (is_log_enable(hevc))
12022 add_log(hevc,
12023 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12024 __func__, r,
12025 check_sum,
12026 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12027 );
12028 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12029 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12030 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12031 __func__, r,
12032 check_sum,
12033 READ_VREG(HEVC_STREAM_LEVEL),
12034 READ_VREG(HEVC_STREAM_WR_PTR),
12035 READ_VREG(HEVC_STREAM_RD_PTR),
12036 READ_PARSER_REG(PARSER_VIDEO_RP),
12037 READ_PARSER_REG(PARSER_VIDEO_WP),
12038 hevc->start_shift_bytes
12039 );
12040 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12041 input_frame_based(vdec)) {
12042 int jj;
12043 u8 *data = NULL;
12044
12045 if (!hevc->chunk->block->is_mapped)
12046 data = codec_mm_vmap(hevc->chunk->block->start +
12047 hevc->chunk->offset, r);
12048 else
12049 data = ((u8 *)hevc->chunk->block->start_virt)
12050 + hevc->chunk->offset;
12051
12052 for (jj = 0; jj < r; jj++) {
12053 if ((jj & 0xf) == 0)
12054 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12055 "%06x:", jj);
12056 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12057 "%02x ", data[jj]);
12058 if (((jj + 1) & 0xf) == 0)
12059 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12060 "\n");
12061 }
12062
12063 if (!hevc->chunk->block->is_mapped)
12064 codec_mm_unmap_phyaddr(data);
12065 }
12066 if (vdec->mc_loaded) {
12067 /*firmware have load before,
12068 and not changes to another.
12069 ignore reload.
12070 */
12071 if (tee_enabled() && hevc->is_swap &&
12072 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12073 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12074 } else {
12075 if (hevc->mmu_enable)
12076 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12077 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12078 "h265_mmu", hevc->fw->data);
12079 else {
12080 if (!hevc->is_4k) {
12081 /* if an older version of the fw was loaded, */
12082 /* needs try to load noswap fw because the */
12083 /* old fw package dose not contain the swap fw.*/
12084 loadr = amhevc_vdec_loadmc_ex(
12085 VFORMAT_HEVC, vdec,
12086 "hevc_mmu_swap",
12087 hevc->fw->data);
12088 if (loadr < 0)
12089 loadr = amhevc_vdec_loadmc_ex(
12090 VFORMAT_HEVC, vdec,
12091 "h265_mmu",
12092 hevc->fw->data);
12093 else
12094 hevc->is_swap = true;
12095 } else
12096 loadr = amhevc_vdec_loadmc_ex(
12097 VFORMAT_HEVC, vdec,
12098 "h265_mmu", hevc->fw->data);
12099 }
12100 else
12101 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12102 NULL, hevc->fw->data);
12103 if (loadr < 0) {
12104 amhevc_disable();
12105 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12106 tee_enabled() ? "TEE" : "local", loadr);
12107 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12108 vdec_schedule_work(&hevc->work);
12109 return;
12110 }
12111
12112 if (tee_enabled() && hevc->is_swap &&
12113 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12114 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12115#ifdef DETREFILL_ENABLE
12116 if (hevc->is_swap &&
12117 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12118 init_detrefill_buf(hevc);
12119#endif
12120 vdec->mc_loaded = 1;
12121 vdec->mc_type = VFORMAT_HEVC;
12122 }
12123 if (vh265_hw_ctx_restore(hevc) < 0) {
12124 vdec_schedule_work(&hevc->work);
12125 return;
12126 }
12127 vdec_enable_input(vdec);
12128
12129 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12130
12131 if (vdec_frame_based(vdec)) {
12132 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12133 r = hevc->chunk->size +
12134 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12135 hevc->decode_size = r;
12136 }
12137#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12138 else {
12139 if (vdec->master || vdec->slave)
12140 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12141 hevc->shift_byte_count_lo);
12142 }
12143#endif
12144 WRITE_VREG(HEVC_DECODE_SIZE, r);
12145 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12146 hevc->init_flag = 1;
12147
12148 if (hevc->pic_list_init_flag == 3)
12149 init_pic_list_hw(hevc);
12150
12151 backup_decode_state(hevc);
12152
12153 start_process_time(hevc);
12154 mod_timer(&hevc->timer, jiffies);
12155 hevc->stat |= STAT_TIMER_ARM;
12156 hevc->stat |= STAT_ISR_REG;
12157 amhevc_start();
12158 hevc->stat |= STAT_VDEC_RUN;
12159}
12160
12161static void reset(struct vdec_s *vdec)
12162{
12163
12164 struct hevc_state_s *hevc =
12165 (struct hevc_state_s *)vdec->private;
12166 int i;
12167
12168 cancel_work_sync(&hevc->work);
12169 cancel_work_sync(&hevc->notify_work);
12170 if (hevc->stat & STAT_VDEC_RUN) {
12171 amhevc_stop();
12172 hevc->stat &= ~STAT_VDEC_RUN;
12173 }
12174
12175 if (hevc->stat & STAT_TIMER_ARM) {
12176 del_timer_sync(&hevc->timer);
12177 hevc->stat &= ~STAT_TIMER_ARM;
12178 }
12179 hevc->dec_result = DEC_RESULT_NONE;
12180 reset_process_time(hevc);
12181 hevc->init_flag = 0;
12182 hevc->pic_list_init_flag = 0;
12183 dealloc_mv_bufs(hevc);
12184 hevc_local_uninit(hevc);
12185 if (vh265_local_init(hevc) < 0)
12186 pr_debug(" %s local init fail\n", __func__);
12187 for (i = 0; i < BUF_POOL_SIZE; i++) {
12188 hevc->m_BUF[i].start_adr = 0;
12189 }
12190
12191 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12192}
12193
12194static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12195{
12196 struct hevc_state_s *hevc =
12197 (struct hevc_state_s *)vdec->private;
12198
12199 return vh265_isr(0, hevc);
12200}
12201
12202static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12203{
12204 struct hevc_state_s *hevc =
12205 (struct hevc_state_s *)vdec->private;
12206
12207 return vh265_isr_thread_fn(0, hevc);
12208}
12209#endif
12210
12211static int amvdec_h265_probe(struct platform_device *pdev)
12212{
12213#ifdef MULTI_INSTANCE_SUPPORT
12214 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12215#else
12216 struct vdec_dev_reg_s *pdata =
12217 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12218#endif
12219 char *tmpbuf;
12220 int ret;
12221 struct hevc_state_s *hevc;
12222
12223 hevc = vmalloc(sizeof(struct hevc_state_s));
12224 if (hevc == NULL) {
12225 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12226 return -ENOMEM;
12227 }
12228 gHevc = hevc;
12229 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12230 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12231 H265_DEBUG_DIS_SYS_ERROR_PROC));
12232 memset(hevc, 0, sizeof(struct hevc_state_s));
12233 if (get_dbg_flag(hevc))
12234 hevc_print(hevc, 0, "%s\r\n", __func__);
12235 mutex_lock(&vh265_mutex);
12236
12237 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12238 (parser_sei_enable & 0x100) == 0)
12239 parser_sei_enable = 7; /*old 1*/
12240 hevc->m_ins_flag = 0;
12241 hevc->init_flag = 0;
12242 hevc->first_sc_checked = 0;
12243 hevc->uninit_list = 0;
12244 hevc->fatal_error = 0;
12245 hevc->show_frame_num = 0;
12246 hevc->frameinfo_enable = 1;
12247#ifdef MULTI_INSTANCE_SUPPORT
12248 hevc->platform_dev = pdev;
12249 platform_set_drvdata(pdev, pdata);
12250#endif
12251
12252 if (pdata == NULL) {
12253 hevc_print(hevc, 0,
12254 "\namvdec_h265 memory resource undefined.\n");
12255 vfree(hevc);
12256 mutex_unlock(&vh265_mutex);
12257 return -EFAULT;
12258 }
12259 if (mmu_enable_force == 0) {
12260 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12261 || double_write_mode == 0x10)
12262 hevc->mmu_enable = 0;
12263 else
12264 hevc->mmu_enable = 1;
12265 }
12266 if (init_mmu_buffers(hevc)) {
12267 hevc_print(hevc, 0,
12268 "\n 265 mmu init failed!\n");
12269 vfree(hevc);
12270 mutex_unlock(&vh265_mutex);
12271 return -EFAULT;
12272 }
12273
12274 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12275 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12276 if (ret < 0) {
12277 uninit_mmu_buffers(hevc);
12278 vfree(hevc);
12279 mutex_unlock(&vh265_mutex);
12280 return ret;
12281 }
12282 hevc->buf_size = work_buf_size;
12283
12284
12285 if (!vdec_secure(pdata)) {
12286 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12287 if (tmpbuf) {
12288 memset(tmpbuf, 0, work_buf_size);
12289 dma_sync_single_for_device(amports_get_dma_device(),
12290 hevc->buf_start,
12291 work_buf_size, DMA_TO_DEVICE);
12292 } else {
12293 tmpbuf = codec_mm_vmap(hevc->buf_start,
12294 work_buf_size);
12295 if (tmpbuf) {
12296 memset(tmpbuf, 0, work_buf_size);
12297 dma_sync_single_for_device(
12298 amports_get_dma_device(),
12299 hevc->buf_start,
12300 work_buf_size,
12301 DMA_TO_DEVICE);
12302 codec_mm_unmap_phyaddr(tmpbuf);
12303 }
12304 }
12305 }
12306
12307 if (get_dbg_flag(hevc)) {
12308 hevc_print(hevc, 0,
12309 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12310 hevc->buf_start, hevc->buf_size);
12311 }
12312
12313 if (pdata->sys_info)
12314 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12315 else {
12316 hevc->vh265_amstream_dec_info.width = 0;
12317 hevc->vh265_amstream_dec_info.height = 0;
12318 hevc->vh265_amstream_dec_info.rate = 30;
12319 }
12320#ifndef MULTI_INSTANCE_SUPPORT
12321 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12322 workaround_enable |= 3;
12323 hevc_print(hevc, 0,
12324 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12325 } else
12326 workaround_enable &= ~3;
12327#endif
12328 hevc->cma_dev = pdata->cma_dev;
12329 vh265_vdec_info_init();
12330
12331#ifdef MULTI_INSTANCE_SUPPORT
12332 pdata->private = hevc;
12333 pdata->dec_status = vh265_dec_status;
12334 pdata->set_isreset = vh265_set_isreset;
12335 is_reset = 0;
12336 if (vh265_init(pdata) < 0) {
12337#else
12338 if (vh265_init(hevc) < 0) {
12339#endif
12340 hevc_print(hevc, 0,
12341 "\namvdec_h265 init failed.\n");
12342 hevc_local_uninit(hevc);
12343 uninit_mmu_buffers(hevc);
12344 vfree(hevc);
12345 pdata->dec_status = NULL;
12346 mutex_unlock(&vh265_mutex);
12347 return -ENODEV;
12348 }
12349 /*set the max clk for smooth playing...*/
12350 hevc_source_changed(VFORMAT_HEVC,
12351 3840, 2160, 60);
12352 mutex_unlock(&vh265_mutex);
12353
12354 return 0;
12355}
12356
12357static int amvdec_h265_remove(struct platform_device *pdev)
12358{
12359 struct hevc_state_s *hevc = gHevc;
12360
12361 if (get_dbg_flag(hevc))
12362 hevc_print(hevc, 0, "%s\r\n", __func__);
12363
12364 mutex_lock(&vh265_mutex);
12365
12366 vh265_stop(hevc);
12367
12368 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12369
12370
12371#ifdef DEBUG_PTS
12372 hevc_print(hevc, 0,
12373 "pts missed %ld, pts hit %ld, duration %d\n",
12374 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12375#endif
12376
12377 vfree(hevc);
12378 hevc = NULL;
12379 gHevc = NULL;
12380
12381 mutex_unlock(&vh265_mutex);
12382
12383 return 0;
12384}
12385/****************************************/
12386#ifdef CONFIG_PM
12387static int h265_suspend(struct device *dev)
12388{
12389 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12390 return 0;
12391}
12392
12393static int h265_resume(struct device *dev)
12394{
12395 amhevc_resume(to_platform_device(dev));
12396 return 0;
12397}
12398
12399static const struct dev_pm_ops h265_pm_ops = {
12400 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12401};
12402#endif
12403
12404static struct platform_driver amvdec_h265_driver = {
12405 .probe = amvdec_h265_probe,
12406 .remove = amvdec_h265_remove,
12407 .driver = {
12408 .name = DRIVER_NAME,
12409#ifdef CONFIG_PM
12410 .pm = &h265_pm_ops,
12411#endif
12412 }
12413};
12414
12415#ifdef MULTI_INSTANCE_SUPPORT
12416static void vh265_dump_state(struct vdec_s *vdec)
12417{
12418 int i;
12419 struct hevc_state_s *hevc =
12420 (struct hevc_state_s *)vdec->private;
12421 hevc_print(hevc, 0,
12422 "====== %s\n", __func__);
12423
12424 hevc_print(hevc, 0,
12425 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12426 hevc->frame_width,
12427 hevc->frame_height,
12428 hevc->sps_num_reorder_pics_0,
12429 get_work_pic_num(hevc),
12430 hevc->video_signal_type_debug,
12431 hevc->is_swap
12432 );
12433
12434 hevc_print(hevc, 0,
12435 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12436 input_frame_based(vdec),
12437 hevc->eos,
12438 hevc->dec_result,
12439 decode_frame_count[hevc->index],
12440 display_frame_count[hevc->index],
12441 run_count[hevc->index],
12442 not_run_ready[hevc->index],
12443 input_empty[hevc->index]
12444 );
12445
12446 if (vf_get_receiver(vdec->vf_provider_name)) {
12447 enum receviver_start_e state =
12448 vf_notify_receiver(vdec->vf_provider_name,
12449 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12450 NULL);
12451 hevc_print(hevc, 0,
12452 "\nreceiver(%s) state %d\n",
12453 vdec->vf_provider_name,
12454 state);
12455 }
12456
12457 hevc_print(hevc, 0,
12458 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12459 __func__,
12460 kfifo_len(&hevc->newframe_q),
12461 VF_POOL_SIZE,
12462 kfifo_len(&hevc->display_q),
12463 VF_POOL_SIZE,
12464 hevc->vf_pre_count,
12465 hevc->vf_get_count,
12466 hevc->vf_put_count,
12467 hevc->pic_list_init_flag,
12468 is_new_pic_available(hevc)
12469 );
12470
12471 dump_pic_list(hevc);
12472
12473 for (i = 0; i < BUF_POOL_SIZE; i++) {
12474 hevc_print(hevc, 0,
12475 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12476 i,
12477 hevc->m_BUF[i].start_adr,
12478 hevc->m_BUF[i].size,
12479 hevc->m_BUF[i].used_flag);
12480 }
12481
12482 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12483 hevc_print(hevc, 0,
12484 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12485 i,
12486 hevc->m_mv_BUF[i].start_adr,
12487 hevc->m_mv_BUF[i].size,
12488 hevc->m_mv_BUF[i].used_flag);
12489 }
12490
12491 hevc_print(hevc, 0,
12492 "HEVC_DEC_STATUS_REG=0x%x\n",
12493 READ_VREG(HEVC_DEC_STATUS_REG));
12494 hevc_print(hevc, 0,
12495 "HEVC_MPC_E=0x%x\n",
12496 READ_VREG(HEVC_MPC_E));
12497 hevc_print(hevc, 0,
12498 "HEVC_DECODE_MODE=0x%x\n",
12499 READ_VREG(HEVC_DECODE_MODE));
12500 hevc_print(hevc, 0,
12501 "HEVC_DECODE_MODE2=0x%x\n",
12502 READ_VREG(HEVC_DECODE_MODE2));
12503 hevc_print(hevc, 0,
12504 "NAL_SEARCH_CTL=0x%x\n",
12505 READ_VREG(NAL_SEARCH_CTL));
12506 hevc_print(hevc, 0,
12507 "HEVC_PARSER_LCU_START=0x%x\n",
12508 READ_VREG(HEVC_PARSER_LCU_START));
12509 hevc_print(hevc, 0,
12510 "HEVC_DECODE_SIZE=0x%x\n",
12511 READ_VREG(HEVC_DECODE_SIZE));
12512 hevc_print(hevc, 0,
12513 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12514 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12515 hevc_print(hevc, 0,
12516 "HEVC_STREAM_START_ADDR=0x%x\n",
12517 READ_VREG(HEVC_STREAM_START_ADDR));
12518 hevc_print(hevc, 0,
12519 "HEVC_STREAM_END_ADDR=0x%x\n",
12520 READ_VREG(HEVC_STREAM_END_ADDR));
12521 hevc_print(hevc, 0,
12522 "HEVC_STREAM_LEVEL=0x%x\n",
12523 READ_VREG(HEVC_STREAM_LEVEL));
12524 hevc_print(hevc, 0,
12525 "HEVC_STREAM_WR_PTR=0x%x\n",
12526 READ_VREG(HEVC_STREAM_WR_PTR));
12527 hevc_print(hevc, 0,
12528 "HEVC_STREAM_RD_PTR=0x%x\n",
12529 READ_VREG(HEVC_STREAM_RD_PTR));
12530 hevc_print(hevc, 0,
12531 "PARSER_VIDEO_RP=0x%x\n",
12532 READ_PARSER_REG(PARSER_VIDEO_RP));
12533 hevc_print(hevc, 0,
12534 "PARSER_VIDEO_WP=0x%x\n",
12535 READ_PARSER_REG(PARSER_VIDEO_WP));
12536
12537 if (input_frame_based(vdec) &&
12538 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12539 ) {
12540 int jj;
12541 if (hevc->chunk && hevc->chunk->block &&
12542 hevc->chunk->size > 0) {
12543 u8 *data = NULL;
12544 if (!hevc->chunk->block->is_mapped)
12545 data = codec_mm_vmap(hevc->chunk->block->start +
12546 hevc->chunk->offset, hevc->chunk->size);
12547 else
12548 data = ((u8 *)hevc->chunk->block->start_virt)
12549 + hevc->chunk->offset;
12550 hevc_print(hevc, 0,
12551 "frame data size 0x%x\n",
12552 hevc->chunk->size);
12553 for (jj = 0; jj < hevc->chunk->size; jj++) {
12554 if ((jj & 0xf) == 0)
12555 hevc_print(hevc,
12556 PRINT_FRAMEBASE_DATA,
12557 "%06x:", jj);
12558 hevc_print_cont(hevc,
12559 PRINT_FRAMEBASE_DATA,
12560 "%02x ", data[jj]);
12561 if (((jj + 1) & 0xf) == 0)
12562 hevc_print_cont(hevc,
12563 PRINT_FRAMEBASE_DATA,
12564 "\n");
12565 }
12566
12567 if (!hevc->chunk->block->is_mapped)
12568 codec_mm_unmap_phyaddr(data);
12569 }
12570 }
12571
12572}
12573
12574
12575static int ammvdec_h265_probe(struct platform_device *pdev)
12576{
12577
12578 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12579 struct hevc_state_s *hevc = NULL;
12580 int ret;
12581#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12582 int config_val;
12583#endif
12584 if (pdata == NULL) {
12585 pr_info("\nammvdec_h265 memory resource undefined.\n");
12586 return -EFAULT;
12587 }
12588
12589 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12590 sizeof(struct hevc_state_s), GFP_KERNEL); */
12591 hevc = vmalloc(sizeof(struct hevc_state_s));
12592 if (hevc == NULL) {
12593 pr_info("\nammvdec_h265 device data allocation failed\n");
12594 return -ENOMEM;
12595 }
12596 memset(hevc, 0, sizeof(struct hevc_state_s));
12597
12598 /* the ctx from v4l2 driver. */
12599 hevc->v4l2_ctx = pdata->private;
12600
12601 pdata->private = hevc;
12602 pdata->dec_status = vh265_dec_status;
12603 /* pdata->set_trickmode = set_trickmode; */
12604 pdata->run_ready = run_ready;
12605 pdata->run = run;
12606 pdata->reset = reset;
12607 pdata->irq_handler = vh265_irq_cb;
12608 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12609 pdata->dump_state = vh265_dump_state;
12610
12611 hevc->index = pdev->id;
12612 hevc->m_ins_flag = 1;
12613
12614 if (pdata->use_vfm_path) {
12615 snprintf(pdata->vf_provider_name,
12616 VDEC_PROVIDER_NAME_SIZE,
12617 VFM_DEC_PROVIDER_NAME);
12618 hevc->frameinfo_enable = 1;
12619 }
12620#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12621 else if (vdec_dual(pdata)) {
12622 struct hevc_state_s *hevc_pair = NULL;
12623
12624 if (dv_toggle_prov_name) /*debug purpose*/
12625 snprintf(pdata->vf_provider_name,
12626 VDEC_PROVIDER_NAME_SIZE,
12627 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12628 VFM_DEC_DVEL_PROVIDER_NAME);
12629 else
12630 snprintf(pdata->vf_provider_name,
12631 VDEC_PROVIDER_NAME_SIZE,
12632 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12633 VFM_DEC_DVBL_PROVIDER_NAME);
12634 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12635 if (pdata->master)
12636 hevc_pair = (struct hevc_state_s *)
12637 pdata->master->private;
12638 else if (pdata->slave)
12639 hevc_pair = (struct hevc_state_s *)
12640 pdata->slave->private;
12641 if (hevc_pair)
12642 hevc->shift_byte_count_lo =
12643 hevc_pair->shift_byte_count_lo;
12644 }
12645#endif
12646 else
12647 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12648 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12649
12650 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12651 &vh265_vf_provider, pdata);
12652
12653 hevc->provider_name = pdata->vf_provider_name;
12654 platform_set_drvdata(pdev, pdata);
12655
12656 hevc->platform_dev = pdev;
12657
12658 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12659 pdata->config && pdata->config_len) {
12660#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12661 /*use ptr config for doubel_write_mode, etc*/
12662 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12663
12664 if (get_config_int(pdata->config, "hevc_double_write_mode",
12665 &config_val) == 0)
12666 hevc->double_write_mode = config_val;
12667 else
12668 hevc->double_write_mode = double_write_mode;
12669
12670 if (get_config_int(pdata->config, "save_buffer_mode",
12671 &config_val) == 0)
12672 hevc->save_buffer_mode = config_val;
12673 else
12674 hevc->save_buffer_mode = 0;
12675
12676 /*use ptr config for max_pic_w, etc*/
12677 if (get_config_int(pdata->config, "hevc_buf_width",
12678 &config_val) == 0) {
12679 hevc->max_pic_w = config_val;
12680 }
12681 if (get_config_int(pdata->config, "hevc_buf_height",
12682 &config_val) == 0) {
12683 hevc->max_pic_h = config_val;
12684 }
12685
12686#endif
12687 } else {
12688 if (pdata->sys_info)
12689 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12690 else {
12691 hevc->vh265_amstream_dec_info.width = 0;
12692 hevc->vh265_amstream_dec_info.height = 0;
12693 hevc->vh265_amstream_dec_info.rate = 30;
12694 }
12695 hevc->double_write_mode = double_write_mode;
12696 }
12697 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12698 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12699 else
12700 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12701
12702 if (mmu_enable_force == 0) {
12703 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12704 hevc->mmu_enable = 0;
12705 else
12706 hevc->mmu_enable = 1;
12707 }
12708
12709 hevc->is_used_v4l = (((unsigned long)
12710 hevc->vh265_amstream_dec_info.param & 0x80) >> 7);
12711 if (hevc->is_used_v4l) {
12712 hevc->mmu_enable = (((unsigned long) // scatter mem
12713 hevc->vh265_amstream_dec_info.param & 0x100) >> 8);
12714 if (!hevc->mmu_enable)
12715 hevc->double_write_mode = 0x10;
12716
12717 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
12718 "%s v4: enable mmu %d.\n",
12719 __func__, hevc->mmu_enable);
12720 }
12721
12722 if (init_mmu_buffers(hevc) < 0) {
12723 hevc_print(hevc, 0,
12724 "\n 265 mmu init failed!\n");
12725 mutex_unlock(&vh265_mutex);
12726 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12727 if (hevc)
12728 vfree((void *)hevc);
12729 pdata->dec_status = NULL;
12730 return -EFAULT;
12731 }
12732#if 0
12733 hevc->buf_start = pdata->mem_start;
12734 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12735#else
12736
12737 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12738 BMMU_WORKSPACE_ID, work_buf_size,
12739 DRIVER_NAME, &hevc->buf_start);
12740 if (ret < 0) {
12741 uninit_mmu_buffers(hevc);
12742 /* devm_kfree(&pdev->dev, (void *)hevc); */
12743 if (hevc)
12744 vfree((void *)hevc);
12745 pdata->dec_status = NULL;
12746 mutex_unlock(&vh265_mutex);
12747 return ret;
12748 }
12749 hevc->buf_size = work_buf_size;
12750#endif
12751 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12752 (parser_sei_enable & 0x100) == 0)
12753 parser_sei_enable = 7;
12754 hevc->init_flag = 0;
12755 hevc->first_sc_checked = 0;
12756 hevc->uninit_list = 0;
12757 hevc->fatal_error = 0;
12758 hevc->show_frame_num = 0;
12759
12760 /*
12761 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12762 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12763 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12764 */
12765 if (get_dbg_flag(hevc)) {
12766 hevc_print(hevc, 0,
12767 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12768 hevc->buf_start, hevc->buf_size);
12769 }
12770
12771 hevc_print(hevc, 0,
12772 "dynamic_buf_num_margin=%d\n",
12773 hevc->dynamic_buf_num_margin);
12774 hevc_print(hevc, 0,
12775 "double_write_mode=%d\n",
12776 hevc->double_write_mode);
12777
12778 hevc->cma_dev = pdata->cma_dev;
12779
12780 if (vh265_init(pdata) < 0) {
12781 hevc_print(hevc, 0,
12782 "\namvdec_h265 init failed.\n");
12783 hevc_local_uninit(hevc);
12784 uninit_mmu_buffers(hevc);
12785 /* devm_kfree(&pdev->dev, (void *)hevc); */
12786 if (hevc)
12787 vfree((void *)hevc);
12788 pdata->dec_status = NULL;
12789 return -ENODEV;
12790 }
12791
12792 vdec_set_prepare_level(pdata, start_decode_buf_level);
12793
12794 /*set the max clk for smooth playing...*/
12795 hevc_source_changed(VFORMAT_HEVC,
12796 3840, 2160, 60);
12797 if (pdata->parallel_dec == 1)
12798 vdec_core_request(pdata, CORE_MASK_HEVC);
12799 else
12800 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
12801 | CORE_MASK_COMBINE);
12802
12803 return 0;
12804}
12805
12806static int ammvdec_h265_remove(struct platform_device *pdev)
12807{
12808 struct hevc_state_s *hevc =
12809 (struct hevc_state_s *)
12810 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
12811 struct vdec_s *vdec = hw_to_vdec(hevc);
12812
12813 if (hevc == NULL)
12814 return 0;
12815
12816 if (get_dbg_flag(hevc))
12817 hevc_print(hevc, 0, "%s\r\n", __func__);
12818
12819 vmh265_stop(hevc);
12820
12821 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
12822 if (vdec->parallel_dec == 1)
12823 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12824 else
12825 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
12826
12827 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
12828
12829 vfree((void *)hevc);
12830 return 0;
12831}
12832
12833static struct platform_driver ammvdec_h265_driver = {
12834 .probe = ammvdec_h265_probe,
12835 .remove = ammvdec_h265_remove,
12836 .driver = {
12837 .name = MULTI_DRIVER_NAME,
12838#ifdef CONFIG_PM
12839 .pm = &h265_pm_ops,
12840#endif
12841 }
12842};
12843#endif
12844
12845static struct codec_profile_t amvdec_h265_profile = {
12846 .name = "hevc",
12847 .profile = ""
12848};
12849
12850static struct codec_profile_t amvdec_h265_profile_single,
12851 amvdec_h265_profile_mult;
12852
12853static struct mconfig h265_configs[] = {
12854 MC_PU32("use_cma", &use_cma),
12855 MC_PU32("bit_depth_luma", &bit_depth_luma),
12856 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
12857 MC_PU32("video_signal_type", &video_signal_type),
12858#ifdef ERROR_HANDLE_DEBUG
12859 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
12860 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
12861#endif
12862 MC_PU32("radr", &radr),
12863 MC_PU32("rval", &rval),
12864 MC_PU32("dbg_cmd", &dbg_cmd),
12865 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
12866 MC_PU32("endian", &endian),
12867 MC_PU32("step", &step),
12868 MC_PU32("udebug_flag", &udebug_flag),
12869 MC_PU32("decode_pic_begin", &decode_pic_begin),
12870 MC_PU32("slice_parse_begin", &slice_parse_begin),
12871 MC_PU32("nal_skip_policy", &nal_skip_policy),
12872 MC_PU32("i_only_flag", &i_only_flag),
12873 MC_PU32("error_handle_policy", &error_handle_policy),
12874 MC_PU32("error_handle_threshold", &error_handle_threshold),
12875 MC_PU32("error_handle_nal_skip_threshold",
12876 &error_handle_nal_skip_threshold),
12877 MC_PU32("error_handle_system_threshold",
12878 &error_handle_system_threshold),
12879 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
12880 MC_PU32("debug", &debug),
12881 MC_PU32("debug_mask", &debug_mask),
12882 MC_PU32("buffer_mode", &buffer_mode),
12883 MC_PU32("double_write_mode", &double_write_mode),
12884 MC_PU32("buf_alloc_width", &buf_alloc_width),
12885 MC_PU32("buf_alloc_height", &buf_alloc_height),
12886 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
12887 MC_PU32("max_buf_num", &max_buf_num),
12888 MC_PU32("buf_alloc_size", &buf_alloc_size),
12889 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
12890 MC_PU32("mem_map_mode", &mem_map_mode),
12891 MC_PU32("enable_mem_saving", &enable_mem_saving),
12892 MC_PU32("force_w_h", &force_w_h),
12893 MC_PU32("force_fps", &force_fps),
12894 MC_PU32("max_decoding_time", &max_decoding_time),
12895 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
12896 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
12897 MC_PU32("interlace_enable", &interlace_enable),
12898 MC_PU32("pts_unstable", &pts_unstable),
12899 MC_PU32("parser_sei_enable", &parser_sei_enable),
12900 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
12901 MC_PU32("decode_timeout_val", &decode_timeout_val),
12902#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12903 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
12904 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
12905 MC_PU32("dv_debug", &dv_debug),
12906#endif
12907};
12908static struct mconfig_node decoder_265_node;
12909
12910static int __init amvdec_h265_driver_init_module(void)
12911{
12912 struct BuffInfo_s *p_buf_info;
12913
12914 if (vdec_is_support_4k()) {
12915 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
12916 p_buf_info = &amvh265_workbuff_spec[2];
12917 else
12918 p_buf_info = &amvh265_workbuff_spec[1];
12919 } else
12920 p_buf_info = &amvh265_workbuff_spec[0];
12921
12922 init_buff_spec(NULL, p_buf_info);
12923 work_buf_size =
12924 (p_buf_info->end_adr - p_buf_info->start_adr
12925 + 0xffff) & (~0xffff);
12926
12927 pr_debug("amvdec_h265 module init\n");
12928 error_handle_policy = 0;
12929
12930#ifdef ERROR_HANDLE_DEBUG
12931 dbg_nal_skip_flag = 0;
12932 dbg_nal_skip_count = 0;
12933#endif
12934 udebug_flag = 0;
12935 decode_pic_begin = 0;
12936 slice_parse_begin = 0;
12937 step = 0;
12938 buf_alloc_size = 0;
12939
12940#ifdef MULTI_INSTANCE_SUPPORT
12941 if (platform_driver_register(&ammvdec_h265_driver))
12942 pr_err("failed to register ammvdec_h265 driver\n");
12943
12944#endif
12945 if (platform_driver_register(&amvdec_h265_driver)) {
12946 pr_err("failed to register amvdec_h265 driver\n");
12947 return -ENODEV;
12948 }
12949#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
12950 if (!has_hevc_vdec()) {
12951 /* not support hevc */
12952 amvdec_h265_profile.name = "hevc_unsupport";
12953 }
12954 if (vdec_is_support_4k()) {
12955 if (is_meson_m8m2_cpu()) {
12956 /* m8m2 support 4k */
12957 amvdec_h265_profile.profile = "4k";
12958 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
12959 amvdec_h265_profile.profile =
12960 "8k, 8bit, 10bit, dwrite, compressed";
12961 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
12962 amvdec_h265_profile.profile =
12963 "4k, 8bit, 10bit, dwrite, compressed";
12964 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
12965 amvdec_h265_profile.profile = "4k";
12966 }
12967#endif
12968 if (codec_mm_get_total_size() < 80 * SZ_1M) {
12969 pr_info("amvdec_h265 default mmu enabled.\n");
12970 mmu_enable = 1;
12971 }
12972
12973 vcodec_profile_register(&amvdec_h265_profile);
12974 amvdec_h265_profile_single = amvdec_h265_profile;
12975 amvdec_h265_profile_single.name = "h265";
12976 vcodec_profile_register(&amvdec_h265_profile_single);
12977 amvdec_h265_profile_mult = amvdec_h265_profile;
12978 amvdec_h265_profile_mult.name = "mh265";
12979 vcodec_profile_register(&amvdec_h265_profile_mult);
12980 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
12981 "h265", h265_configs, CONFIG_FOR_RW);
12982 return 0;
12983}
12984
12985static void __exit amvdec_h265_driver_remove_module(void)
12986{
12987 pr_debug("amvdec_h265 module remove.\n");
12988
12989#ifdef MULTI_INSTANCE_SUPPORT
12990 platform_driver_unregister(&ammvdec_h265_driver);
12991#endif
12992 platform_driver_unregister(&amvdec_h265_driver);
12993}
12994
12995/****************************************/
12996/*
12997 *module_param(stat, uint, 0664);
12998 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
12999 */
13000module_param(use_cma, uint, 0664);
13001MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13002
13003module_param(bit_depth_luma, uint, 0664);
13004MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13005
13006module_param(bit_depth_chroma, uint, 0664);
13007MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13008
13009module_param(video_signal_type, uint, 0664);
13010MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13011
13012#ifdef ERROR_HANDLE_DEBUG
13013module_param(dbg_nal_skip_flag, uint, 0664);
13014MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13015
13016module_param(dbg_nal_skip_count, uint, 0664);
13017MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13018#endif
13019
13020module_param(radr, uint, 0664);
13021MODULE_PARM_DESC(radr, "\n radr\n");
13022
13023module_param(rval, uint, 0664);
13024MODULE_PARM_DESC(rval, "\n rval\n");
13025
13026module_param(dbg_cmd, uint, 0664);
13027MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13028
13029module_param(dump_nal, uint, 0664);
13030MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13031
13032module_param(dbg_skip_decode_index, uint, 0664);
13033MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13034
13035module_param(endian, uint, 0664);
13036MODULE_PARM_DESC(endian, "\n rval\n");
13037
13038module_param(step, uint, 0664);
13039MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13040
13041module_param(decode_pic_begin, uint, 0664);
13042MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13043
13044module_param(slice_parse_begin, uint, 0664);
13045MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13046
13047module_param(nal_skip_policy, uint, 0664);
13048MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13049
13050module_param(i_only_flag, uint, 0664);
13051MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13052
13053module_param(fast_output_enable, uint, 0664);
13054MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13055
13056module_param(error_handle_policy, uint, 0664);
13057MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13058
13059module_param(error_handle_threshold, uint, 0664);
13060MODULE_PARM_DESC(error_handle_threshold,
13061 "\n amvdec_h265 error_handle_threshold\n");
13062
13063module_param(error_handle_nal_skip_threshold, uint, 0664);
13064MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13065 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13066
13067module_param(error_handle_system_threshold, uint, 0664);
13068MODULE_PARM_DESC(error_handle_system_threshold,
13069 "\n amvdec_h265 error_handle_system_threshold\n");
13070
13071module_param(error_skip_nal_count, uint, 0664);
13072MODULE_PARM_DESC(error_skip_nal_count,
13073 "\n amvdec_h265 error_skip_nal_count\n");
13074
13075module_param(debug, uint, 0664);
13076MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13077
13078module_param(debug_mask, uint, 0664);
13079MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13080
13081module_param(log_mask, uint, 0664);
13082MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13083
13084module_param(buffer_mode, uint, 0664);
13085MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13086
13087module_param(double_write_mode, uint, 0664);
13088MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13089
13090module_param(buf_alloc_width, uint, 0664);
13091MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13092
13093module_param(buf_alloc_height, uint, 0664);
13094MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13095
13096module_param(dynamic_buf_num_margin, uint, 0664);
13097MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13098
13099module_param(max_buf_num, uint, 0664);
13100MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13101
13102module_param(buf_alloc_size, uint, 0664);
13103MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13104
13105#ifdef CONSTRAIN_MAX_BUF_NUM
13106module_param(run_ready_max_vf_only_num, uint, 0664);
13107MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13108
13109module_param(run_ready_display_q_num, uint, 0664);
13110MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13111
13112module_param(run_ready_max_buf_num, uint, 0664);
13113MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13114#endif
13115
13116#if 0
13117module_param(re_config_pic_flag, uint, 0664);
13118MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13119#endif
13120
13121module_param(buffer_mode_dbg, uint, 0664);
13122MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13123
13124module_param(mem_map_mode, uint, 0664);
13125MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13126
13127module_param(enable_mem_saving, uint, 0664);
13128MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13129
13130module_param(force_w_h, uint, 0664);
13131MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13132
13133module_param(force_fps, uint, 0664);
13134MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13135
13136module_param(max_decoding_time, uint, 0664);
13137MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13138
13139module_param(prefix_aux_buf_size, uint, 0664);
13140MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13141
13142module_param(suffix_aux_buf_size, uint, 0664);
13143MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13144
13145module_param(interlace_enable, uint, 0664);
13146MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13147module_param(pts_unstable, uint, 0664);
13148MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13149module_param(parser_sei_enable, uint, 0664);
13150MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13151
13152#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13153module_param(parser_dolby_vision_enable, uint, 0664);
13154MODULE_PARM_DESC(parser_dolby_vision_enable,
13155 "\n parser_dolby_vision_enable\n");
13156
13157module_param(dolby_meta_with_el, uint, 0664);
13158MODULE_PARM_DESC(dolby_meta_with_el,
13159 "\n dolby_meta_with_el\n");
13160
13161module_param(dolby_el_flush_th, uint, 0664);
13162MODULE_PARM_DESC(dolby_el_flush_th,
13163 "\n dolby_el_flush_th\n");
13164#endif
13165module_param(mmu_enable, uint, 0664);
13166MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13167
13168module_param(mmu_enable_force, uint, 0664);
13169MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13170
13171#ifdef MULTI_INSTANCE_SUPPORT
13172module_param(start_decode_buf_level, int, 0664);
13173MODULE_PARM_DESC(start_decode_buf_level,
13174 "\n h265 start_decode_buf_level\n");
13175
13176module_param(decode_timeout_val, uint, 0664);
13177MODULE_PARM_DESC(decode_timeout_val,
13178 "\n h265 decode_timeout_val\n");
13179
13180module_param(data_resend_policy, uint, 0664);
13181MODULE_PARM_DESC(data_resend_policy,
13182 "\n h265 data_resend_policy\n");
13183
13184module_param_array(decode_frame_count, uint,
13185 &max_decode_instance_num, 0664);
13186
13187module_param_array(display_frame_count, uint,
13188 &max_decode_instance_num, 0664);
13189
13190module_param_array(max_process_time, uint,
13191 &max_decode_instance_num, 0664);
13192
13193module_param_array(max_get_frame_interval,
13194 uint, &max_decode_instance_num, 0664);
13195
13196module_param_array(run_count, uint,
13197 &max_decode_instance_num, 0664);
13198
13199module_param_array(input_empty, uint,
13200 &max_decode_instance_num, 0664);
13201
13202module_param_array(not_run_ready, uint,
13203 &max_decode_instance_num, 0664);
13204
13205module_param_array(ref_frame_mark_flag, uint,
13206 &max_decode_instance_num, 0664);
13207
13208#endif
13209#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13210module_param(dv_toggle_prov_name, uint, 0664);
13211MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13212
13213module_param(dv_debug, uint, 0664);
13214MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13215
13216module_param(force_bypass_dvenl, uint, 0664);
13217MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13218#endif
13219
13220#ifdef AGAIN_HAS_THRESHOLD
13221module_param(again_threshold, uint, 0664);
13222MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13223#endif
13224
13225module_param(force_disp_pic_index, int, 0664);
13226MODULE_PARM_DESC(force_disp_pic_index,
13227 "\n amvdec_h265 force_disp_pic_index\n");
13228
13229module_param(frmbase_cont_bitlevel, uint, 0664);
13230MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13231
13232module_param(udebug_flag, uint, 0664);
13233MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13234
13235module_param(udebug_pause_pos, uint, 0664);
13236MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13237
13238module_param(udebug_pause_val, uint, 0664);
13239MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13240
13241module_param(pre_decode_buf_level, int, 0664);
13242MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13243
13244module_param(udebug_pause_decode_idx, uint, 0664);
13245MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13246
13247module_param(disp_vframe_valve_level, uint, 0664);
13248MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13249
13250module_param(pic_list_debug, uint, 0664);
13251MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13252
13253module_param(without_display_mode, uint, 0664);
13254MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13255
13256module_init(amvdec_h265_driver_init_module);
13257module_exit(amvdec_h265_driver_remove_module);
13258
13259MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13260MODULE_LICENSE("GPL");
13261MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13262