summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 53aeb106631608fbe43967a22e685875140a0272
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52#define HEVC_8K_LFTOFFSET_FIX
53
54#define CONSTRAIN_MAX_BUF_NUM
55
56#define SWAP_HEVC_UCODE
57#define DETREFILL_ENABLE
58
59#define AGAIN_HAS_THRESHOLD
60/*#define TEST_NO_BUF*/
61#define HEVC_PIC_STRUCT_SUPPORT
62#define MULTI_INSTANCE_SUPPORT
63#define USE_UNINIT_SEMA
64
65 /* .buf_size = 0x100000*16,
66 //4k2k , 0x100000 per buffer */
67 /* 4096x2304 , 0x120000 per buffer */
68#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
69#define MPRED_4K_MV_BUF_SIZE (0x120000)
70#define MPRED_MV_BUF_SIZE (0x40000)
71
72#define MMU_COMPRESS_HEADER_SIZE 0x48000
73#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
74
75#define MAX_FRAME_4K_NUM 0x1200
76#define MAX_FRAME_8K_NUM (0x1200*4)
77
78//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
79#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
80
81#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
82
83#define HEVC_CM_HEADER_START_ADDR 0x3628
84#define HEVC_SAO_MMU_VH1_ADDR 0x363b
85#define HEVC_SAO_MMU_VH0_ADDR 0x363a
86
87#define HEVC_DBLK_CFGB 0x350b
88#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
89#define SWAP_HEVC_OFFSET (3 * 0x1000)
90
91#define MEM_NAME "codec_265"
92/* #include <mach/am_regs.h> */
93#include <linux/amlogic/media/utils/vdec_reg.h>
94
95#include "../utils/vdec.h"
96#include "../utils/amvdec.h"
97#include <linux/amlogic/media/video_sink/video.h>
98#include <linux/amlogic/media/codec_mm/configs.h>
99
100#define SEND_LMEM_WITH_RPM
101#define SUPPORT_10BIT
102/* #define ERROR_HANDLE_DEBUG */
103
104#ifndef STAT_KTHREAD
105#define STAT_KTHREAD 0x40
106#endif
107
108#ifdef MULTI_INSTANCE_SUPPORT
109#define MAX_DECODE_INSTANCE_NUM 9
110#define MULTI_DRIVER_NAME "ammvdec_h265"
111#endif
112#define DRIVER_NAME "amvdec_h265"
113#define MODULE_NAME "amvdec_h265"
114#define DRIVER_HEADER_NAME "amvdec_h265_header"
115
116#define PUT_INTERVAL (HZ/100)
117#define ERROR_SYSTEM_RESET_COUNT 200
118
119#define PTS_NORMAL 0
120#define PTS_NONE_REF_USE_DURATION 1
121
122#define PTS_MODE_SWITCHING_THRESHOLD 3
123#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
124
125#define DUR2PTS(x) ((x)*90/96)
126
127#define MAX_SIZE_8K (8192 * 4608)
128#define MAX_SIZE_4K (4096 * 2304)
129
130#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
131#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
132
133#define SEI_UserDataITU_T_T35 4
134#define INVALID_IDX -1 /* Invalid buffer index.*/
135
136static struct semaphore h265_sema;
137
138struct hevc_state_s;
139static int hevc_print(struct hevc_state_s *hevc,
140 int debug_flag, const char *fmt, ...);
141static int hevc_print_cont(struct hevc_state_s *hevc,
142 int debug_flag, const char *fmt, ...);
143static int vh265_vf_states(struct vframe_states *states, void *);
144static struct vframe_s *vh265_vf_peek(void *);
145static struct vframe_s *vh265_vf_get(void *);
146static void vh265_vf_put(struct vframe_s *, void *);
147static int vh265_event_cb(int type, void *data, void *private_data);
148
149static int vh265_stop(struct hevc_state_s *hevc);
150#ifdef MULTI_INSTANCE_SUPPORT
151static int vmh265_stop(struct hevc_state_s *hevc);
152static s32 vh265_init(struct vdec_s *vdec);
153static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
154static void reset_process_time(struct hevc_state_s *hevc);
155static void start_process_time(struct hevc_state_s *hevc);
156static void restart_process_time(struct hevc_state_s *hevc);
157static void timeout_process(struct hevc_state_s *hevc);
158#else
159static s32 vh265_init(struct hevc_state_s *hevc);
160#endif
161static void vh265_prot_init(struct hevc_state_s *hevc);
162static int vh265_local_init(struct hevc_state_s *hevc);
163static void vh265_check_timer_func(unsigned long arg);
164static void config_decode_mode(struct hevc_state_s *hevc);
165
166static const char vh265_dec_id[] = "vh265-dev";
167
168#define PROVIDER_NAME "decoder.h265"
169#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
170
171static const struct vframe_operations_s vh265_vf_provider = {
172 .peek = vh265_vf_peek,
173 .get = vh265_vf_get,
174 .put = vh265_vf_put,
175 .event_cb = vh265_event_cb,
176 .vf_states = vh265_vf_states,
177};
178
179static struct vframe_provider_s vh265_vf_prov;
180
181static u32 bit_depth_luma;
182static u32 bit_depth_chroma;
183static u32 video_signal_type;
184
185static int start_decode_buf_level = 0x8000;
186
187static unsigned int decode_timeout_val = 200;
188
189static u32 run_ready_min_buf_num = 2;
190static u32 disable_ip_mode;
191/*data_resend_policy:
192 bit 0, stream base resend data when decoding buf empty
193*/
194static u32 data_resend_policy = 1;
195
196#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
197/*
198static const char * const video_format_names[] = {
199 "component", "PAL", "NTSC", "SECAM",
200 "MAC", "unspecified", "unspecified", "unspecified"
201};
202
203static const char * const color_primaries_names[] = {
204 "unknown", "bt709", "undef", "unknown",
205 "bt470m", "bt470bg", "smpte170m", "smpte240m",
206 "film", "bt2020"
207};
208
209static const char * const transfer_characteristics_names[] = {
210 "unknown", "bt709", "undef", "unknown",
211 "bt470m", "bt470bg", "smpte170m", "smpte240m",
212 "linear", "log100", "log316", "iec61966-2-4",
213 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
214 "smpte-st-2084", "smpte-st-428"
215};
216
217static const char * const matrix_coeffs_names[] = {
218 "GBR", "bt709", "undef", "unknown",
219 "fcc", "bt470bg", "smpte170m", "smpte240m",
220 "YCgCo", "bt2020nc", "bt2020c"
221};
222*/
223#ifdef SUPPORT_10BIT
224#define HEVC_CM_BODY_START_ADDR 0x3626
225#define HEVC_CM_BODY_LENGTH 0x3627
226#define HEVC_CM_HEADER_LENGTH 0x3629
227#define HEVC_CM_HEADER_OFFSET 0x362b
228#define HEVC_SAO_CTRL9 0x362d
229#define LOSLESS_COMPRESS_MODE
230/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
231/* double_write_mode:
232 * 0, no double write;
233 * 1, 1:1 ratio;
234 * 2, (1/4):(1/4) ratio;
235 * 3, (1/4):(1/4) ratio, with both compressed frame included
236 * 4, (1/2):(1/2) ratio;
237 * 0x10, double write only
238 * 0x100, if > 1080p,use mode 4,else use mode 1;
239 * 0x200, if > 1080p,use mode 2,else use mode 1;
240 * 0x300, if > 720p, use mode 4, else use mode 1;
241 */
242static u32 double_write_mode;
243
244/*#define DECOMP_HEADR_SURGENT*/
245
246static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
247static u32 enable_mem_saving = 1;
248static u32 workaround_enable;
249static u32 force_w_h;
250#endif
251static u32 force_fps;
252static u32 pts_unstable;
253#define H265_DEBUG_BUFMGR 0x01
254#define H265_DEBUG_BUFMGR_MORE 0x02
255#define H265_DEBUG_DETAIL 0x04
256#define H265_DEBUG_REG 0x08
257#define H265_DEBUG_MAN_SEARCH_NAL 0x10
258#define H265_DEBUG_MAN_SKIP_NAL 0x20
259#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
260#define H265_DEBUG_FORCE_CLK 0x80
261#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
262#define H265_DEBUG_NO_DISPLAY 0x200
263#define H265_DEBUG_DISCARD_NAL 0x400
264#define H265_DEBUG_OUT_PTS 0x800
265#define H265_DEBUG_DUMP_PIC_LIST 0x1000
266#define H265_DEBUG_PRINT_SEI 0x2000
267#define H265_DEBUG_PIC_STRUCT 0x4000
268#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
269#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
270#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
271#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
272#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
273#define H265_DEBUG_HW_RESET 0x100000
274#define H265_CFG_CANVAS_IN_DECODE 0x200000
275#define H265_DEBUG_DV 0x400000
276#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
277#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
278#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
279#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
280#ifdef MULTI_INSTANCE_SUPPORT
281#define PRINT_FLAG_ERROR 0x0
282#define IGNORE_PARAM_FROM_CONFIG 0x08000000
283#define PRINT_FRAMEBASE_DATA 0x10000000
284#define PRINT_FLAG_VDEC_STATUS 0x20000000
285#define PRINT_FLAG_VDEC_DETAIL 0x40000000
286#define PRINT_FLAG_V4L_DETAIL 0x80000000
287#endif
288
289#define BUF_POOL_SIZE 32
290#define MAX_BUF_NUM 24
291#define MAX_REF_PIC_NUM 24
292#define MAX_REF_ACTIVE 16
293
294#ifdef MV_USE_FIXED_BUF
295#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
296#define VF_BUFFER_IDX(n) (n)
297#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
298#else
299#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
300#define VF_BUFFER_IDX(n) (n)
301#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
302#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
303#endif
304
305#define HEVC_MV_INFO 0x310d
306#define HEVC_QP_INFO 0x3137
307#define HEVC_SKIP_INFO 0x3136
308
309const u32 h265_version = 201602101;
310static u32 debug_mask = 0xffffffff;
311static u32 log_mask;
312static u32 debug;
313static u32 radr;
314static u32 rval;
315static u32 dbg_cmd;
316static u32 dump_nal;
317static u32 dbg_skip_decode_index;
318static u32 endian = 0xff0;
319#ifdef ERROR_HANDLE_DEBUG
320static u32 dbg_nal_skip_flag;
321 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
322static u32 dbg_nal_skip_count;
323#endif
324/*for debug*/
325/*
326 udebug_flag:
327 bit 0, enable ucode print
328 bit 1, enable ucode detail print
329 bit [31:16] not 0, pos to dump lmem
330 bit 2, pop bits to lmem
331 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
332*/
333static u32 udebug_flag;
334/*
335 when udebug_flag[1:0] is not 0
336 udebug_pause_pos not 0,
337 pause position
338*/
339static u32 udebug_pause_pos;
340/*
341 when udebug_flag[1:0] is not 0
342 and udebug_pause_pos is not 0,
343 pause only when DEBUG_REG2 is equal to this val
344*/
345static u32 udebug_pause_val;
346
347static u32 udebug_pause_decode_idx;
348
349static u32 decode_pic_begin;
350static uint slice_parse_begin;
351static u32 step;
352static bool is_reset;
353
354#ifdef CONSTRAIN_MAX_BUF_NUM
355static u32 run_ready_max_vf_only_num;
356static u32 run_ready_display_q_num;
357 /*0: not check
358 0xff: work_pic_num
359 */
360static u32 run_ready_max_buf_num = 0xff;
361#endif
362
363static u32 dynamic_buf_num_margin = 7;
364static u32 buf_alloc_width;
365static u32 buf_alloc_height;
366
367static u32 max_buf_num = 16;
368static u32 buf_alloc_size;
369/*static u32 re_config_pic_flag;*/
370/*
371 *bit[0]: 0,
372 *bit[1]: 0, always release cma buffer when stop
373 *bit[1]: 1, never release cma buffer when stop
374 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
375 *do not release cma buffer is blackout is not 1
376 *
377 *bit[2]: 0, when start decoding, check current displayed buffer
378 * (only for buffer decoded by h265) if blackout is 0
379 * 1, do not check current displayed buffer
380 *
381 *bit[3]: 1, if blackout is not 1, do not release current
382 * displayed cma buffer always.
383 */
384/* set to 1 for fast play;
385 * set to 8 for other case of "keep last frame"
386 */
387static u32 buffer_mode = 1;
388
389/* buffer_mode_dbg: debug only*/
390static u32 buffer_mode_dbg = 0xffff0000;
391/**/
392/*
393 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
394 *1, start decoding after first I;
395 *2, only decode and display none error picture;
396 *3, start decoding and display after IDR,etc
397 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
398 *only for mode 0 and 1.
399 */
400static u32 nal_skip_policy = 2;
401
402/*
403 *bit 0, 1: only display I picture;
404 *bit 1, 1: only decode I picture;
405 */
406static u32 i_only_flag;
407
408/*
409bit 0, fast output first I picture
410*/
411static u32 fast_output_enable = 1;
412
413static u32 frmbase_cont_bitlevel = 0x60;
414
415/*
416use_cma: 1, use both reserver memory and cma for buffers
4172, only use cma for buffers
418*/
419static u32 use_cma = 2;
420
421#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
422static u32 prefix_aux_buf_size = (16 * 1024);
423static u32 suffix_aux_buf_size;
424
425static u32 max_decoding_time;
426/*
427 *error handling
428 */
429/*error_handle_policy:
430 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
431 *1, skip error_skip_nal_count nals before error recovery;
432 *bit 1 (valid only when bit0 == 1):
433 *1, wait vps/sps/pps after error recovery;
434 *bit 2 (valid only when bit0 == 0):
435 *0, auto search after error recovery (hevc_recover() called);
436 *1, manual search after error recovery
437 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
438 *
439 *bit 4: 0, set error_mark after reset/recover
440 * 1, do not set error_mark after reset/recover
441 *bit 5: 0, check total lcu for every picture
442 * 1, do not check total lcu
443 *bit 6: 0, do not check head error
444 * 1, check head error
445 *
446 */
447
448static u32 error_handle_policy;
449static u32 error_skip_nal_count = 6;
450static u32 error_handle_threshold = 30;
451static u32 error_handle_nal_skip_threshold = 10;
452static u32 error_handle_system_threshold = 30;
453static u32 interlace_enable = 1;
454static u32 fr_hint_status;
455
456 /*
457 *parser_sei_enable:
458 * bit 0, sei;
459 * bit 1, sei_suffix (fill aux buf)
460 * bit 2, fill sei to aux buf (when bit 0 is 1)
461 * bit 8, debug flag
462 */
463static u32 parser_sei_enable;
464#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
465static u32 parser_dolby_vision_enable = 1;
466static u32 dolby_meta_with_el;
467static u32 dolby_el_flush_th = 2;
468#endif
469/* this is only for h265 mmu enable */
470
471static u32 mmu_enable = 1;
472static u32 mmu_enable_force;
473static u32 work_buf_size;
474static unsigned int force_disp_pic_index;
475static unsigned int disp_vframe_valve_level;
476static int pre_decode_buf_level = 0x1000;
477static unsigned int pic_list_debug;
478#ifdef HEVC_8K_LFTOFFSET_FIX
479 /* performance_profile: bit 0, multi slice in ucode
480 */
481static unsigned int performance_profile = 1;
482#endif
483#ifdef MULTI_INSTANCE_SUPPORT
484static unsigned int max_decode_instance_num
485 = MAX_DECODE_INSTANCE_NUM;
486static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
487static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
488static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
489static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
490static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
491static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
492static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
493static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
494{1, 1, 1, 1, 1, 1, 1, 1, 1};
495
496#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
497static unsigned char get_idx(struct hevc_state_s *hevc);
498#endif
499
500#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
501static u32 dv_toggle_prov_name;
502
503static u32 dv_debug;
504
505static u32 force_bypass_dvenl;
506#endif
507#endif
508
509
510#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
511#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
512#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
513#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
514#else
515#define get_dbg_flag(hevc) debug
516#define get_dbg_flag2(hevc) debug
517#define is_log_enable(hevc) (log_mask ? 1 : 0)
518#define get_valid_double_write_mode(hevc) double_write_mode
519#define get_buf_alloc_width(hevc) buf_alloc_width
520#define get_buf_alloc_height(hevc) buf_alloc_height
521#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
522#endif
523#define get_buffer_mode(hevc) buffer_mode
524
525
526DEFINE_SPINLOCK(lock);
527struct task_struct *h265_task = NULL;
528#undef DEBUG_REG
529#ifdef DEBUG_REG
530void WRITE_VREG_DBG(unsigned adr, unsigned val)
531{
532 if (debug & H265_DEBUG_REG)
533 pr_info("%s(%x, %x)\n", __func__, adr, val);
534 WRITE_VREG(adr, val);
535}
536
537#undef WRITE_VREG
538#define WRITE_VREG WRITE_VREG_DBG
539#endif
540extern u32 trickmode_i;
541
542static DEFINE_MUTEX(vh265_mutex);
543
544static DEFINE_MUTEX(vh265_log_mutex);
545
546//static struct vdec_info *gvs;
547
548static u32 without_display_mode;
549
550/**************************************************
551 *
552 *h265 buffer management include
553 *
554 ***************************************************
555 */
556enum NalUnitType {
557 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
558 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
559
560 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
561 /* Current name in the spec: TSA_R */
562 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
563
564 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
565 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
566
567 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
568 /* Current name in the spec: RADL_R */
569 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
570
571 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
572 /* Current name in the spec: RASL_R */
573 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
574
575 NAL_UNIT_RESERVED_10,
576 NAL_UNIT_RESERVED_11,
577 NAL_UNIT_RESERVED_12,
578 NAL_UNIT_RESERVED_13,
579 NAL_UNIT_RESERVED_14,
580 NAL_UNIT_RESERVED_15,
581
582 /* Current name in the spec: BLA_W_LP */
583 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
584 /* Current name in the spec: BLA_W_DLP */
585 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
586 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
587 /* Current name in the spec: IDR_W_DLP */
588 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
589 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
590 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
591 NAL_UNIT_RESERVED_22,
592 NAL_UNIT_RESERVED_23,
593
594 NAL_UNIT_RESERVED_24,
595 NAL_UNIT_RESERVED_25,
596 NAL_UNIT_RESERVED_26,
597 NAL_UNIT_RESERVED_27,
598 NAL_UNIT_RESERVED_28,
599 NAL_UNIT_RESERVED_29,
600 NAL_UNIT_RESERVED_30,
601 NAL_UNIT_RESERVED_31,
602
603 NAL_UNIT_VPS, /* 32 */
604 NAL_UNIT_SPS, /* 33 */
605 NAL_UNIT_PPS, /* 34 */
606 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
607 NAL_UNIT_EOS, /* 36 */
608 NAL_UNIT_EOB, /* 37 */
609 NAL_UNIT_FILLER_DATA, /* 38 */
610 NAL_UNIT_SEI, /* 39 Prefix SEI */
611 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
612 NAL_UNIT_RESERVED_41,
613 NAL_UNIT_RESERVED_42,
614 NAL_UNIT_RESERVED_43,
615 NAL_UNIT_RESERVED_44,
616 NAL_UNIT_RESERVED_45,
617 NAL_UNIT_RESERVED_46,
618 NAL_UNIT_RESERVED_47,
619 NAL_UNIT_UNSPECIFIED_48,
620 NAL_UNIT_UNSPECIFIED_49,
621 NAL_UNIT_UNSPECIFIED_50,
622 NAL_UNIT_UNSPECIFIED_51,
623 NAL_UNIT_UNSPECIFIED_52,
624 NAL_UNIT_UNSPECIFIED_53,
625 NAL_UNIT_UNSPECIFIED_54,
626 NAL_UNIT_UNSPECIFIED_55,
627 NAL_UNIT_UNSPECIFIED_56,
628 NAL_UNIT_UNSPECIFIED_57,
629 NAL_UNIT_UNSPECIFIED_58,
630 NAL_UNIT_UNSPECIFIED_59,
631 NAL_UNIT_UNSPECIFIED_60,
632 NAL_UNIT_UNSPECIFIED_61,
633 NAL_UNIT_UNSPECIFIED_62,
634 NAL_UNIT_UNSPECIFIED_63,
635 NAL_UNIT_INVALID,
636};
637
638/* --------------------------------------------------- */
639/* Amrisc Software Interrupt */
640/* --------------------------------------------------- */
641#define AMRISC_STREAM_EMPTY_REQ 0x01
642#define AMRISC_PARSER_REQ 0x02
643#define AMRISC_MAIN_REQ 0x04
644
645/* --------------------------------------------------- */
646/* HEVC_DEC_STATUS define */
647/* --------------------------------------------------- */
648#define HEVC_DEC_IDLE 0x0
649#define HEVC_NAL_UNIT_VPS 0x1
650#define HEVC_NAL_UNIT_SPS 0x2
651#define HEVC_NAL_UNIT_PPS 0x3
652#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
653#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
654#define HEVC_SLICE_DECODING 0x6
655#define HEVC_NAL_UNIT_SEI 0x7
656#define HEVC_SLICE_SEGMENT_DONE 0x8
657#define HEVC_NAL_SEARCH_DONE 0x9
658#define HEVC_DECPIC_DATA_DONE 0xa
659#define HEVC_DECPIC_DATA_ERROR 0xb
660#define HEVC_SEI_DAT 0xc
661#define HEVC_SEI_DAT_DONE 0xd
662#define HEVC_NAL_DECODE_DONE 0xe
663#define HEVC_OVER_DECODE 0xf
664
665#define HEVC_DATA_REQUEST 0x12
666
667#define HEVC_DECODE_BUFEMPTY 0x20
668#define HEVC_DECODE_TIMEOUT 0x21
669#define HEVC_SEARCH_BUFEMPTY 0x22
670#define HEVC_DECODE_OVER_SIZE 0x23
671#define HEVC_DECODE_BUFEMPTY2 0x24
672#define HEVC_FIND_NEXT_PIC_NAL 0x50
673#define HEVC_FIND_NEXT_DVEL_NAL 0x51
674
675#define HEVC_DUMP_LMEM 0x30
676
677#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
678#define HEVC_DISCARD_NAL 0xf0
679#define HEVC_ACTION_DEC_CONT 0xfd
680#define HEVC_ACTION_ERROR 0xfe
681#define HEVC_ACTION_DONE 0xff
682
683/* --------------------------------------------------- */
684/* Include "parser_cmd.h" */
685/* --------------------------------------------------- */
686#define PARSER_CMD_SKIP_CFG_0 0x0000090b
687
688#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
689
690#define PARSER_CMD_SKIP_CFG_2 0x001b1910
691
692#define PARSER_CMD_NUMBER 37
693
694/**************************************************
695 *
696 *h265 buffer management
697 *
698 ***************************************************
699 */
700/* #define BUFFER_MGR_ONLY */
701/* #define CONFIG_HEVC_CLK_FORCED_ON */
702/* #define ENABLE_SWAP_TEST */
703#define MCRCC_ENABLE
704#define INVALID_POC 0x80000000
705
706#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
707#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
708#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
709#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
710#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
711#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
712#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
713#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
714#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
715#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
716#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
717#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
718#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
719#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
720#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
721#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
722#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
723#ifdef ENABLE_SWAP_TEST
724#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
725#endif
726
727/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
728/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
729#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
730 /*do not define ENABLE_SWAP_TEST*/
731#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
732#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
733
734#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
735#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
736/*
737 *ucode parser/search control
738 *bit 0: 0, header auto parse; 1, header manual parse
739 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
740 *bit [3:2]: valid when bit1==0;
741 *0, auto skip nal before first vps/sps/pps/idr;
742 *1, auto skip nal before first vps/sps/pps
743 *2, auto skip nal before first vps/sps/pps,
744 * and not decode until the first I slice (with slice address of 0)
745 *
746 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
747 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
748 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
749 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
750 *bit [17]: for NAL_SEI when bit0 is 0:
751 * 0, do not parse/fetch SEI in ucode;
752 * 1, parse/fetch SEI in ucode
753 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
754 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
755 * 1, fetch NAL_SEL_SUFFIX data to aux buf
756 *bit [19]:
757 * 0, parse NAL_SEI in ucode
758 * 1, fetch NAL_SEI to aux buf
759 *bit [20]: for DOLBY_VISION_META
760 * 0, do not fetch DOLBY_VISION_META to aux buf
761 * 1, fetch DOLBY_VISION_META to aux buf
762 */
763#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
764 /*read only*/
765#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
766 /*
767 [15 : 8] rps_set_id
768 [7 : 0] start_decoding_flag
769 */
770#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
771 /*set before start decoder*/
772#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
773#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
774#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
775
776#define DECODE_MODE_SINGLE 0x0
777#define DECODE_MODE_MULTI_FRAMEBASE 0x1
778#define DECODE_MODE_MULTI_STREAMBASE 0x2
779#define DECODE_MODE_MULTI_DVBAL 0x3
780#define DECODE_MODE_MULTI_DVENL 0x4
781
782#define MAX_INT 0x7FFFFFFF
783
784#define RPM_BEGIN 0x100
785#define modification_list_cur 0x148
786#define RPM_END 0x180
787
788#define RPS_USED_BIT 14
789/* MISC_FLAG0 */
790#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
791#define PCM_ENABLE_FLAG_BIT 1
792#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
793#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
794#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
795#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
796#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
797#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
798#define SLICE_SAO_LUMA_FLAG_BIT 8
799#define SLICE_SAO_CHROMA_FLAG_BIT 9
800#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
801
802union param_u {
803 struct {
804 unsigned short data[RPM_END - RPM_BEGIN];
805 } l;
806 struct {
807 /* from ucode lmem, do not change this struct */
808 unsigned short CUR_RPS[0x10];
809 unsigned short num_ref_idx_l0_active;
810 unsigned short num_ref_idx_l1_active;
811 unsigned short slice_type;
812 unsigned short slice_temporal_mvp_enable_flag;
813 unsigned short dependent_slice_segment_flag;
814 unsigned short slice_segment_address;
815 unsigned short num_title_rows_minus1;
816 unsigned short pic_width_in_luma_samples;
817 unsigned short pic_height_in_luma_samples;
818 unsigned short log2_min_coding_block_size_minus3;
819 unsigned short log2_diff_max_min_coding_block_size;
820 unsigned short log2_max_pic_order_cnt_lsb_minus4;
821 unsigned short POClsb;
822 unsigned short collocated_from_l0_flag;
823 unsigned short collocated_ref_idx;
824 unsigned short log2_parallel_merge_level;
825 unsigned short five_minus_max_num_merge_cand;
826 unsigned short sps_num_reorder_pics_0;
827 unsigned short modification_flag;
828 unsigned short tiles_enabled_flag;
829 unsigned short num_tile_columns_minus1;
830 unsigned short num_tile_rows_minus1;
831 unsigned short tile_width[12];
832 unsigned short tile_height[8];
833 unsigned short misc_flag0;
834 unsigned short pps_beta_offset_div2;
835 unsigned short pps_tc_offset_div2;
836 unsigned short slice_beta_offset_div2;
837 unsigned short slice_tc_offset_div2;
838 unsigned short pps_cb_qp_offset;
839 unsigned short pps_cr_qp_offset;
840 unsigned short first_slice_segment_in_pic_flag;
841 unsigned short m_temporalId;
842 unsigned short m_nalUnitType;
843
844 unsigned short vui_num_units_in_tick_hi;
845 unsigned short vui_num_units_in_tick_lo;
846 unsigned short vui_time_scale_hi;
847 unsigned short vui_time_scale_lo;
848 unsigned short bit_depth;
849 unsigned short profile_etc;
850 unsigned short sei_frame_field_info;
851 unsigned short video_signal_type;
852 unsigned short modification_list[0x20];
853 unsigned short conformance_window_flag;
854 unsigned short conf_win_left_offset;
855 unsigned short conf_win_right_offset;
856 unsigned short conf_win_top_offset;
857 unsigned short conf_win_bottom_offset;
858 unsigned short chroma_format_idc;
859 unsigned short color_description;
860 unsigned short aspect_ratio_idc;
861 unsigned short sar_width;
862 unsigned short sar_height;
863 unsigned short sps_max_dec_pic_buffering_minus1_0;
864 } p;
865};
866
867#define RPM_BUF_SIZE (0x80*2)
868/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
869#define LMEM_BUF_SIZE (0x500 * 2)
870
871struct buff_s {
872 u32 buf_start;
873 u32 buf_size;
874 u32 buf_end;
875};
876
877struct BuffInfo_s {
878 u32 max_width;
879 u32 max_height;
880 unsigned int start_adr;
881 unsigned int end_adr;
882 struct buff_s ipp;
883 struct buff_s sao_abv;
884 struct buff_s sao_vb;
885 struct buff_s short_term_rps;
886 struct buff_s vps;
887 struct buff_s sps;
888 struct buff_s pps;
889 struct buff_s sao_up;
890 struct buff_s swap_buf;
891 struct buff_s swap_buf2;
892 struct buff_s scalelut;
893 struct buff_s dblk_para;
894 struct buff_s dblk_data;
895 struct buff_s dblk_data2;
896 struct buff_s mmu_vbh;
897 struct buff_s cm_header;
898 struct buff_s mpred_above;
899#ifdef MV_USE_FIXED_BUF
900 struct buff_s mpred_mv;
901#endif
902 struct buff_s rpm;
903 struct buff_s lmem;
904};
905#define WORK_BUF_SPEC_NUM 3
906static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
907 {
908 /* 8M bytes */
909 .max_width = 1920,
910 .max_height = 1088,
911 .ipp = {
912 /* IPP work space calculation :
913 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
914 */
915 .buf_size = 0x4000,
916 },
917 .sao_abv = {
918 .buf_size = 0x30000,
919 },
920 .sao_vb = {
921 .buf_size = 0x30000,
922 },
923 .short_term_rps = {
924 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
925 * total 64x16x2 = 2048 bytes (0x800)
926 */
927 .buf_size = 0x800,
928 },
929 .vps = {
930 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
931 * total 0x0800 bytes
932 */
933 .buf_size = 0x800,
934 },
935 .sps = {
936 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
937 * total 0x0800 bytes
938 */
939 .buf_size = 0x800,
940 },
941 .pps = {
942 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
943 * total 0x2000 bytes
944 */
945 .buf_size = 0x2000,
946 },
947 .sao_up = {
948 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
949 * each has 16 bytes total 0x2800 bytes
950 */
951 .buf_size = 0x2800,
952 },
953 .swap_buf = {
954 /* 256cyclex64bit = 2K bytes 0x800
955 * (only 144 cycles valid)
956 */
957 .buf_size = 0x800,
958 },
959 .swap_buf2 = {
960 .buf_size = 0x800,
961 },
962 .scalelut = {
963 /* support up to 32 SCALELUT 1024x32 =
964 * 32Kbytes (0x8000)
965 */
966 .buf_size = 0x8000,
967 },
968 .dblk_para = {
969#ifdef SUPPORT_10BIT
970 .buf_size = 0x40000,
971#else
972 /* DBLK -> Max 256(4096/16) LCU, each para
973 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
974 */
975 .buf_size = 0x20000,
976#endif
977 },
978 .dblk_data = {
979 .buf_size = 0x40000,
980 },
981 .dblk_data2 = {
982 .buf_size = 0x40000,
983 }, /*dblk data for adapter*/
984 .mmu_vbh = {
985 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
986 },
987#if 0
988 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
989 .buf_size = MMU_COMPRESS_HEADER_SIZE *
990 (MAX_REF_PIC_NUM + 1),
991 },
992#endif
993 .mpred_above = {
994 .buf_size = 0x8000,
995 },
996#ifdef MV_USE_FIXED_BUF
997 .mpred_mv = {/* 1080p, 0x40000 per buffer */
998 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
999 },
1000#endif
1001 .rpm = {
1002 .buf_size = RPM_BUF_SIZE,
1003 },
1004 .lmem = {
1005 .buf_size = 0x500 * 2,
1006 }
1007 },
1008 {
1009 .max_width = 4096,
1010 .max_height = 2048,
1011 .ipp = {
1012 /* IPP work space calculation :
1013 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1014 */
1015 .buf_size = 0x4000,
1016 },
1017 .sao_abv = {
1018 .buf_size = 0x30000,
1019 },
1020 .sao_vb = {
1021 .buf_size = 0x30000,
1022 },
1023 .short_term_rps = {
1024 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1025 * total 64x16x2 = 2048 bytes (0x800)
1026 */
1027 .buf_size = 0x800,
1028 },
1029 .vps = {
1030 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1031 * total 0x0800 bytes
1032 */
1033 .buf_size = 0x800,
1034 },
1035 .sps = {
1036 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1037 * total 0x0800 bytes
1038 */
1039 .buf_size = 0x800,
1040 },
1041 .pps = {
1042 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1043 * total 0x2000 bytes
1044 */
1045 .buf_size = 0x2000,
1046 },
1047 .sao_up = {
1048 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1049 * each has 16 bytes total 0x2800 bytes
1050 */
1051 .buf_size = 0x2800,
1052 },
1053 .swap_buf = {
1054 /* 256cyclex64bit = 2K bytes 0x800
1055 * (only 144 cycles valid)
1056 */
1057 .buf_size = 0x800,
1058 },
1059 .swap_buf2 = {
1060 .buf_size = 0x800,
1061 },
1062 .scalelut = {
1063 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1064 * (0x8000)
1065 */
1066 .buf_size = 0x8000,
1067 },
1068 .dblk_para = {
1069 /* DBLK -> Max 256(4096/16) LCU, each para
1070 * 512bytes(total:0x20000),
1071 * data 1024bytes(total:0x40000)
1072 */
1073 .buf_size = 0x20000,
1074 },
1075 .dblk_data = {
1076 .buf_size = 0x80000,
1077 },
1078 .dblk_data2 = {
1079 .buf_size = 0x80000,
1080 }, /*dblk data for adapter*/
1081 .mmu_vbh = {
1082 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1083 },
1084#if 0
1085 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1086 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1087 (MAX_REF_PIC_NUM + 1),
1088 },
1089#endif
1090 .mpred_above = {
1091 .buf_size = 0x8000,
1092 },
1093#ifdef MV_USE_FIXED_BUF
1094 .mpred_mv = {
1095 /* .buf_size = 0x100000*16,
1096 //4k2k , 0x100000 per buffer */
1097 /* 4096x2304 , 0x120000 per buffer */
1098 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1099 },
1100#endif
1101 .rpm = {
1102 .buf_size = RPM_BUF_SIZE,
1103 },
1104 .lmem = {
1105 .buf_size = 0x500 * 2,
1106 }
1107 },
1108
1109 {
1110 .max_width = 4096*2,
1111 .max_height = 2048*2,
1112 .ipp = {
1113 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1114 .buf_size = 0x4000*2,
1115 },
1116 .sao_abv = {
1117 .buf_size = 0x30000*2,
1118 },
1119 .sao_vb = {
1120 .buf_size = 0x30000*2,
1121 },
1122 .short_term_rps = {
1123 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1124 .buf_size = 0x800,
1125 },
1126 .vps = {
1127 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1128 .buf_size = 0x800,
1129 },
1130 .sps = {
1131 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1132 .buf_size = 0x800,
1133 },
1134 .pps = {
1135 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1136 .buf_size = 0x2000,
1137 },
1138 .sao_up = {
1139 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1140 .buf_size = 0x2800*2,
1141 },
1142 .swap_buf = {
1143 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1144 .buf_size = 0x800,
1145 },
1146 .swap_buf2 = {
1147 .buf_size = 0x800,
1148 },
1149 .scalelut = {
1150 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1151 .buf_size = 0x8000*2,
1152 },
1153 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1154 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1155 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1156 .mmu_vbh = {
1157 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1158 },
1159#if 0
1160 .cm_header = {
1161 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1162 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1163 },
1164#endif
1165 .mpred_above = {
1166 .buf_size = 0x8000*2,
1167 },
1168#ifdef MV_USE_FIXED_BUF
1169 .mpred_mv = {
1170 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1171 },
1172#endif
1173 .rpm = {
1174 .buf_size = RPM_BUF_SIZE,
1175 },
1176 .lmem = {
1177 .buf_size = 0x500 * 2,
1178 },
1179 }
1180};
1181
1182static void init_buff_spec(struct hevc_state_s *hevc,
1183 struct BuffInfo_s *buf_spec)
1184{
1185 buf_spec->ipp.buf_start = buf_spec->start_adr;
1186 buf_spec->sao_abv.buf_start =
1187 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1188
1189 buf_spec->sao_vb.buf_start =
1190 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1191 buf_spec->short_term_rps.buf_start =
1192 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1193 buf_spec->vps.buf_start =
1194 buf_spec->short_term_rps.buf_start +
1195 buf_spec->short_term_rps.buf_size;
1196 buf_spec->sps.buf_start =
1197 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1198 buf_spec->pps.buf_start =
1199 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1200 buf_spec->sao_up.buf_start =
1201 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1202 buf_spec->swap_buf.buf_start =
1203 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1204 buf_spec->swap_buf2.buf_start =
1205 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1206 buf_spec->scalelut.buf_start =
1207 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1208 buf_spec->dblk_para.buf_start =
1209 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1210 buf_spec->dblk_data.buf_start =
1211 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1212 buf_spec->dblk_data2.buf_start =
1213 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1214 buf_spec->mmu_vbh.buf_start =
1215 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1216 buf_spec->mpred_above.buf_start =
1217 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1218#ifdef MV_USE_FIXED_BUF
1219 buf_spec->mpred_mv.buf_start =
1220 buf_spec->mpred_above.buf_start +
1221 buf_spec->mpred_above.buf_size;
1222
1223 buf_spec->rpm.buf_start =
1224 buf_spec->mpred_mv.buf_start +
1225 buf_spec->mpred_mv.buf_size;
1226#else
1227 buf_spec->rpm.buf_start =
1228 buf_spec->mpred_above.buf_start +
1229 buf_spec->mpred_above.buf_size;
1230#endif
1231 buf_spec->lmem.buf_start =
1232 buf_spec->rpm.buf_start +
1233 buf_spec->rpm.buf_size;
1234 buf_spec->end_adr =
1235 buf_spec->lmem.buf_start +
1236 buf_spec->lmem.buf_size;
1237
1238 if (hevc && get_dbg_flag2(hevc)) {
1239 hevc_print(hevc, 0,
1240 "%s workspace (%x %x) size = %x\n", __func__,
1241 buf_spec->start_adr, buf_spec->end_adr,
1242 buf_spec->end_adr - buf_spec->start_adr);
1243
1244 hevc_print(hevc, 0,
1245 "ipp.buf_start :%x\n",
1246 buf_spec->ipp.buf_start);
1247 hevc_print(hevc, 0,
1248 "sao_abv.buf_start :%x\n",
1249 buf_spec->sao_abv.buf_start);
1250 hevc_print(hevc, 0,
1251 "sao_vb.buf_start :%x\n",
1252 buf_spec->sao_vb.buf_start);
1253 hevc_print(hevc, 0,
1254 "short_term_rps.buf_start :%x\n",
1255 buf_spec->short_term_rps.buf_start);
1256 hevc_print(hevc, 0,
1257 "vps.buf_start :%x\n",
1258 buf_spec->vps.buf_start);
1259 hevc_print(hevc, 0,
1260 "sps.buf_start :%x\n",
1261 buf_spec->sps.buf_start);
1262 hevc_print(hevc, 0,
1263 "pps.buf_start :%x\n",
1264 buf_spec->pps.buf_start);
1265 hevc_print(hevc, 0,
1266 "sao_up.buf_start :%x\n",
1267 buf_spec->sao_up.buf_start);
1268 hevc_print(hevc, 0,
1269 "swap_buf.buf_start :%x\n",
1270 buf_spec->swap_buf.buf_start);
1271 hevc_print(hevc, 0,
1272 "swap_buf2.buf_start :%x\n",
1273 buf_spec->swap_buf2.buf_start);
1274 hevc_print(hevc, 0,
1275 "scalelut.buf_start :%x\n",
1276 buf_spec->scalelut.buf_start);
1277 hevc_print(hevc, 0,
1278 "dblk_para.buf_start :%x\n",
1279 buf_spec->dblk_para.buf_start);
1280 hevc_print(hevc, 0,
1281 "dblk_data.buf_start :%x\n",
1282 buf_spec->dblk_data.buf_start);
1283 hevc_print(hevc, 0,
1284 "dblk_data2.buf_start :%x\n",
1285 buf_spec->dblk_data2.buf_start);
1286 hevc_print(hevc, 0,
1287 "mpred_above.buf_start :%x\n",
1288 buf_spec->mpred_above.buf_start);
1289#ifdef MV_USE_FIXED_BUF
1290 hevc_print(hevc, 0,
1291 "mpred_mv.buf_start :%x\n",
1292 buf_spec->mpred_mv.buf_start);
1293#endif
1294 if ((get_dbg_flag2(hevc)
1295 &
1296 H265_DEBUG_SEND_PARAM_WITH_REG)
1297 == 0) {
1298 hevc_print(hevc, 0,
1299 "rpm.buf_start :%x\n",
1300 buf_spec->rpm.buf_start);
1301 }
1302 }
1303
1304}
1305
1306enum SliceType {
1307 B_SLICE,
1308 P_SLICE,
1309 I_SLICE
1310};
1311
1312/*USE_BUF_BLOCK*/
1313struct BUF_s {
1314 ulong start_adr;
1315 u32 size;
1316 u32 luma_size;
1317 ulong header_addr;
1318 u32 header_size;
1319 int used_flag;
1320 ulong v4l_ref_buf_addr;
1321 ulong chroma_addr;
1322 u32 chroma_size;
1323} /*BUF_t */;
1324
1325/* level 6, 6.1 maximum slice number is 800; other is 200 */
1326#define MAX_SLICE_NUM 800
1327struct PIC_s {
1328 int index;
1329 int scatter_alloc;
1330 int BUF_index;
1331 int mv_buf_index;
1332 int POC;
1333 int decode_idx;
1334 int slice_type;
1335 int RefNum_L0;
1336 int RefNum_L1;
1337 int num_reorder_pic;
1338 int stream_offset;
1339 unsigned char referenced;
1340 unsigned char output_mark;
1341 unsigned char recon_mark;
1342 unsigned char output_ready;
1343 unsigned char error_mark;
1344 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1345 unsigned char dis_mark;
1346 /**/ int slice_idx;
1347 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1348 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1349 /*buffer */
1350 unsigned int header_adr;
1351#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1352 unsigned char dv_enhance_exist;
1353#endif
1354 char *aux_data_buf;
1355 int aux_data_size;
1356 unsigned long cma_alloc_addr;
1357 struct page *alloc_pages;
1358 unsigned int mpred_mv_wr_start_addr;
1359 unsigned int mc_y_adr;
1360 unsigned int mc_u_v_adr;
1361#ifdef SUPPORT_10BIT
1362 /*unsigned int comp_body_size;*/
1363 unsigned int dw_y_adr;
1364 unsigned int dw_u_v_adr;
1365#endif
1366 int mc_canvas_y;
1367 int mc_canvas_u_v;
1368 int width;
1369 int height;
1370
1371 int y_canvas_index;
1372 int uv_canvas_index;
1373#ifdef MULTI_INSTANCE_SUPPORT
1374 struct canvas_config_s canvas_config[2];
1375#endif
1376#ifdef SUPPORT_10BIT
1377 int mem_saving_mode;
1378 u32 bit_depth_luma;
1379 u32 bit_depth_chroma;
1380#endif
1381#ifdef LOSLESS_COMPRESS_MODE
1382 unsigned int losless_comp_body_size;
1383#endif
1384 unsigned char pic_struct;
1385 int vf_ref;
1386
1387 u32 pts;
1388 u64 pts64;
1389 u64 timestamp;
1390
1391 u32 aspect_ratio_idc;
1392 u32 sar_width;
1393 u32 sar_height;
1394 u32 double_write_mode;
1395 u32 video_signal_type;
1396 unsigned short conformance_window_flag;
1397 unsigned short conf_win_left_offset;
1398 unsigned short conf_win_right_offset;
1399 unsigned short conf_win_top_offset;
1400 unsigned short conf_win_bottom_offset;
1401 unsigned short chroma_format_idc;
1402
1403 /* picture qos infomation*/
1404 int max_qp;
1405 int avg_qp;
1406 int min_qp;
1407 int max_skip;
1408 int avg_skip;
1409 int min_skip;
1410 int max_mv;
1411 int min_mv;
1412 int avg_mv;
1413
1414 u32 hw_decode_time;
1415 u32 frame_size; // For frame base mode
1416 bool vframe_bound;
1417 bool ip_mode;
1418} /*PIC_t */;
1419
1420#define MAX_TILE_COL_NUM 10
1421#define MAX_TILE_ROW_NUM 20
1422struct tile_s {
1423 int width;
1424 int height;
1425 int start_cu_x;
1426 int start_cu_y;
1427
1428 unsigned int sao_vb_start_addr;
1429 unsigned int sao_abv_start_addr;
1430};
1431
1432#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1433#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1434#define SEI_HDR10PLUS_MASK 0x00000004
1435
1436#define VF_POOL_SIZE 32
1437
1438#ifdef MULTI_INSTANCE_SUPPORT
1439#define DEC_RESULT_NONE 0
1440#define DEC_RESULT_DONE 1
1441#define DEC_RESULT_AGAIN 2
1442#define DEC_RESULT_CONFIG_PARAM 3
1443#define DEC_RESULT_ERROR 4
1444#define DEC_INIT_PICLIST 5
1445#define DEC_UNINIT_PICLIST 6
1446#define DEC_RESULT_GET_DATA 7
1447#define DEC_RESULT_GET_DATA_RETRY 8
1448#define DEC_RESULT_EOS 9
1449#define DEC_RESULT_FORCE_EXIT 10
1450#define DEC_RESULT_FREE_CANVAS 11
1451
1452static void vh265_work(struct work_struct *work);
1453static void vh265_timeout_work(struct work_struct *work);
1454static void vh265_notify_work(struct work_struct *work);
1455
1456#endif
1457
1458struct debug_log_s {
1459 struct list_head list;
1460 uint8_t data; /*will alloc more size*/
1461};
1462
1463struct hevc_state_s {
1464#ifdef MULTI_INSTANCE_SUPPORT
1465 struct platform_device *platform_dev;
1466 void (*vdec_cb)(struct vdec_s *, void *);
1467 void *vdec_cb_arg;
1468 struct vframe_chunk_s *chunk;
1469 int dec_result;
1470 struct work_struct work;
1471 struct work_struct timeout_work;
1472 struct work_struct notify_work;
1473 struct work_struct set_clk_work;
1474 /* timeout handle */
1475 unsigned long int start_process_time;
1476 unsigned int last_lcu_idx;
1477 unsigned int decode_timeout_count;
1478 unsigned int timeout_num;
1479#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1480 unsigned char switch_dvlayer_flag;
1481 unsigned char no_switch_dvlayer_count;
1482 unsigned char bypass_dvenl_enable;
1483 unsigned char bypass_dvenl;
1484#endif
1485 unsigned char start_parser_type;
1486 /*start_decoding_flag:
1487 vps/pps/sps/idr info from ucode*/
1488 unsigned char start_decoding_flag;
1489 unsigned char rps_set_id;
1490 unsigned char eos;
1491 int pic_decoded_lcu_idx;
1492 u8 over_decode;
1493 u8 empty_flag;
1494#endif
1495 struct vframe_s vframe_dummy;
1496 char *provider_name;
1497 int index;
1498 struct device *cma_dev;
1499 unsigned char m_ins_flag;
1500 unsigned char dolby_enhance_flag;
1501 unsigned long buf_start;
1502 u32 buf_size;
1503 u32 mv_buf_size;
1504
1505 struct BuffInfo_s work_space_buf_store;
1506 struct BuffInfo_s *work_space_buf;
1507
1508 u8 aux_data_dirty;
1509 u32 prefix_aux_size;
1510 u32 suffix_aux_size;
1511 void *aux_addr;
1512 void *rpm_addr;
1513 void *lmem_addr;
1514 dma_addr_t aux_phy_addr;
1515 dma_addr_t rpm_phy_addr;
1516 dma_addr_t lmem_phy_addr;
1517
1518 unsigned int pic_list_init_flag;
1519 unsigned int use_cma_flag;
1520
1521 unsigned short *rpm_ptr;
1522 unsigned short *lmem_ptr;
1523 unsigned short *debug_ptr;
1524 int debug_ptr_size;
1525 int pic_w;
1526 int pic_h;
1527 int lcu_x_num;
1528 int lcu_y_num;
1529 int lcu_total;
1530 int lcu_size;
1531 int lcu_size_log2;
1532 int lcu_x_num_pre;
1533 int lcu_y_num_pre;
1534 int first_pic_after_recover;
1535
1536 int num_tile_col;
1537 int num_tile_row;
1538 int tile_enabled;
1539 int tile_x;
1540 int tile_y;
1541 int tile_y_x;
1542 int tile_start_lcu_x;
1543 int tile_start_lcu_y;
1544 int tile_width_lcu;
1545 int tile_height_lcu;
1546
1547 int slice_type;
1548 unsigned int slice_addr;
1549 unsigned int slice_segment_addr;
1550
1551 unsigned char interlace_flag;
1552 unsigned char curr_pic_struct;
1553 unsigned char frame_field_info_present_flag;
1554
1555 unsigned short sps_num_reorder_pics_0;
1556 unsigned short misc_flag0;
1557 int m_temporalId;
1558 int m_nalUnitType;
1559 int TMVPFlag;
1560 int isNextSliceSegment;
1561 int LDCFlag;
1562 int m_pocRandomAccess;
1563 int plevel;
1564 int MaxNumMergeCand;
1565
1566 int new_pic;
1567 int new_tile;
1568 int curr_POC;
1569 int iPrevPOC;
1570#ifdef MULTI_INSTANCE_SUPPORT
1571 int decoded_poc;
1572 struct PIC_s *decoding_pic;
1573#endif
1574 int iPrevTid0POC;
1575 int list_no;
1576 int RefNum_L0;
1577 int RefNum_L1;
1578 int ColFromL0Flag;
1579 int LongTerm_Curr;
1580 int LongTerm_Col;
1581 int Col_POC;
1582 int LongTerm_Ref;
1583#ifdef MULTI_INSTANCE_SUPPORT
1584 int m_pocRandomAccess_bak;
1585 int curr_POC_bak;
1586 int iPrevPOC_bak;
1587 int iPrevTid0POC_bak;
1588 unsigned char start_parser_type_bak;
1589 unsigned char start_decoding_flag_bak;
1590 unsigned char rps_set_id_bak;
1591 int pic_decoded_lcu_idx_bak;
1592 int decode_idx_bak;
1593#endif
1594 struct PIC_s *cur_pic;
1595 struct PIC_s *col_pic;
1596 int skip_flag;
1597 int decode_idx;
1598 int slice_idx;
1599 unsigned char have_vps;
1600 unsigned char have_sps;
1601 unsigned char have_pps;
1602 unsigned char have_valid_start_slice;
1603 unsigned char wait_buf;
1604 unsigned char error_flag;
1605 unsigned int error_skip_nal_count;
1606 long used_4k_num;
1607
1608 unsigned char
1609 ignore_bufmgr_error; /* bit 0, for decoding;
1610 bit 1, for displaying
1611 bit 1 must be set if bit 0 is 1*/
1612 int PB_skip_mode;
1613 int PB_skip_count_after_decoding;
1614#ifdef SUPPORT_10BIT
1615 int mem_saving_mode;
1616#endif
1617#ifdef LOSLESS_COMPRESS_MODE
1618 unsigned int losless_comp_body_size;
1619#endif
1620 int pts_mode;
1621 int last_lookup_pts;
1622 int last_pts;
1623 u64 last_lookup_pts_us64;
1624 u64 last_pts_us64;
1625 u32 shift_byte_count_lo;
1626 u32 shift_byte_count_hi;
1627 int pts_mode_switching_count;
1628 int pts_mode_recovery_count;
1629
1630 int pic_num;
1631
1632 /**/
1633 union param_u param;
1634
1635 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1636
1637 struct timer_list timer;
1638 struct BUF_s m_BUF[BUF_POOL_SIZE];
1639 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1640 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1641
1642 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1643 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1644 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1645 struct vframe_s vfpool[VF_POOL_SIZE];
1646
1647 u32 stat;
1648 u32 frame_width;
1649 u32 frame_height;
1650 u32 frame_dur;
1651 u32 frame_ar;
1652 u32 bit_depth_luma;
1653 u32 bit_depth_chroma;
1654 u32 video_signal_type;
1655 u32 video_signal_type_debug;
1656 u32 saved_resolution;
1657 bool get_frame_dur;
1658 u32 error_watchdog_count;
1659 u32 error_skip_nal_wt_cnt;
1660 u32 error_system_watchdog_count;
1661
1662#ifdef DEBUG_PTS
1663 unsigned long pts_missed;
1664 unsigned long pts_hit;
1665#endif
1666 struct dec_sysinfo vh265_amstream_dec_info;
1667 unsigned char init_flag;
1668 unsigned char first_sc_checked;
1669 unsigned char uninit_list;
1670 u32 start_decoding_time;
1671
1672 int show_frame_num;
1673#ifdef USE_UNINIT_SEMA
1674 struct semaphore h265_uninit_done_sema;
1675#endif
1676 int fatal_error;
1677
1678
1679 u32 sei_present_flag;
1680 void *frame_mmu_map_addr;
1681 dma_addr_t frame_mmu_map_phy_addr;
1682 unsigned int mmu_mc_buf_start;
1683 unsigned int mmu_mc_buf_end;
1684 unsigned int mmu_mc_start_4k_adr;
1685 void *mmu_box;
1686 void *bmmu_box;
1687 int mmu_enable;
1688
1689 unsigned int dec_status;
1690
1691 /* data for SEI_MASTER_DISPLAY_COLOR */
1692 unsigned int primaries[3][2];
1693 unsigned int white_point[2];
1694 unsigned int luminance[2];
1695 /* data for SEI_CONTENT_LIGHT_LEVEL */
1696 unsigned int content_light_level[2];
1697
1698 struct PIC_s *pre_top_pic;
1699 struct PIC_s *pre_bot_pic;
1700
1701#ifdef MULTI_INSTANCE_SUPPORT
1702 int double_write_mode;
1703 int dynamic_buf_num_margin;
1704 int start_action;
1705 int save_buffer_mode;
1706#endif
1707 u32 i_only;
1708 struct list_head log_list;
1709 u32 ucode_pause_pos;
1710 u32 start_shift_bytes;
1711
1712 u32 vf_pre_count;
1713 u32 vf_get_count;
1714 u32 vf_put_count;
1715#ifdef SWAP_HEVC_UCODE
1716 dma_addr_t mc_dma_handle;
1717 void *mc_cpu_addr;
1718 int swap_size;
1719 ulong swap_addr;
1720#endif
1721#ifdef DETREFILL_ENABLE
1722 dma_addr_t detbuf_adr;
1723 u16 *detbuf_adr_virt;
1724 u8 delrefill_check;
1725#endif
1726 u8 head_error_flag;
1727 int valve_count;
1728 struct firmware_s *fw;
1729 int max_pic_w;
1730 int max_pic_h;
1731#ifdef AGAIN_HAS_THRESHOLD
1732 u8 next_again_flag;
1733 u32 pre_parser_wr_ptr;
1734#endif
1735 u32 ratio_control;
1736 u32 first_pic_flag;
1737 u32 decode_size;
1738 struct mutex chunks_mutex;
1739 int need_cache_size;
1740 u64 sc_start_time;
1741 u32 skip_first_nal;
1742 bool is_swap;
1743 bool is_4k;
1744 int frameinfo_enable;
1745 struct vframe_qos_s vframe_qos;
1746 bool is_used_v4l;
1747 void *v4l2_ctx;
1748 bool v4l_params_parsed;
1749 u32 mem_map_mode;
1750 u32 performance_profile;
1751 struct vdec_info *gvs;
1752 unsigned int res_ch_flag;
1753 bool ip_mode;
1754} /*hevc_stru_t */;
1755
1756#ifdef AGAIN_HAS_THRESHOLD
1757u32 again_threshold;
1758#endif
1759#ifdef SEND_LMEM_WITH_RPM
1760#define get_lmem_params(hevc, ladr) \
1761 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1762
1763
1764static int get_frame_mmu_map_size(void)
1765{
1766 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1767 return (MAX_FRAME_8K_NUM * 4);
1768
1769 return (MAX_FRAME_4K_NUM * 4);
1770}
1771
1772static int is_oversize(int w, int h)
1773{
1774 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1775 MAX_SIZE_8K : MAX_SIZE_4K;
1776
1777 if (w < 0 || h < 0)
1778 return true;
1779
1780 if (h != 0 && (w > max / h))
1781 return true;
1782
1783 return false;
1784}
1785
1786void check_head_error(struct hevc_state_s *hevc)
1787{
1788#define pcm_enabled_flag 0x040
1789#define pcm_sample_bit_depth_luma 0x041
1790#define pcm_sample_bit_depth_chroma 0x042
1791 hevc->head_error_flag = 0;
1792 if ((error_handle_policy & 0x40) == 0)
1793 return;
1794 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1795 uint16_t pcm_depth_luma = get_lmem_params(
1796 hevc, pcm_sample_bit_depth_luma);
1797 uint16_t pcm_sample_chroma = get_lmem_params(
1798 hevc, pcm_sample_bit_depth_chroma);
1799 if (pcm_depth_luma >
1800 hevc->bit_depth_luma ||
1801 pcm_sample_chroma >
1802 hevc->bit_depth_chroma) {
1803 hevc_print(hevc, 0,
1804 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1805 pcm_depth_luma,
1806 pcm_sample_chroma,
1807 hevc->bit_depth_luma,
1808 hevc->bit_depth_chroma);
1809 hevc->head_error_flag = 1;
1810 }
1811 }
1812}
1813#endif
1814
1815#ifdef SUPPORT_10BIT
1816/* Losless compression body buffer size 4K per 64x32 (jt) */
1817static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1818 int width, int height, int mem_saving_mode)
1819{
1820 int width_x64;
1821 int height_x32;
1822 int bsize;
1823
1824 width_x64 = width + 63;
1825 width_x64 >>= 6;
1826
1827 height_x32 = height + 31;
1828 height_x32 >>= 5;
1829 if (mem_saving_mode == 1 && hevc->mmu_enable)
1830 bsize = 3200 * width_x64 * height_x32;
1831 else if (mem_saving_mode == 1)
1832 bsize = 3072 * width_x64 * height_x32;
1833 else
1834 bsize = 4096 * width_x64 * height_x32;
1835
1836 return bsize;
1837}
1838
1839/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1840static int compute_losless_comp_header_size(int width, int height)
1841{
1842 int width_x128;
1843 int height_x64;
1844 int hsize;
1845
1846 width_x128 = width + 127;
1847 width_x128 >>= 7;
1848
1849 height_x64 = height + 63;
1850 height_x64 >>= 6;
1851
1852 hsize = 32*width_x128*height_x64;
1853
1854 return hsize;
1855}
1856#endif
1857
1858static int add_log(struct hevc_state_s *hevc,
1859 const char *fmt, ...)
1860{
1861#define HEVC_LOG_BUF 196
1862 struct debug_log_s *log_item;
1863 unsigned char buf[HEVC_LOG_BUF];
1864 int len = 0;
1865 va_list args;
1866 mutex_lock(&vh265_log_mutex);
1867 va_start(args, fmt);
1868 len = sprintf(buf, "<%ld> <%05d> ",
1869 jiffies, hevc->decode_idx);
1870 len += vsnprintf(buf + len,
1871 HEVC_LOG_BUF - len, fmt, args);
1872 va_end(args);
1873 log_item = kmalloc(
1874 sizeof(struct debug_log_s) + len,
1875 GFP_KERNEL);
1876 if (log_item) {
1877 INIT_LIST_HEAD(&log_item->list);
1878 strcpy(&log_item->data, buf);
1879 list_add_tail(&log_item->list,
1880 &hevc->log_list);
1881 }
1882 mutex_unlock(&vh265_log_mutex);
1883 return 0;
1884}
1885
1886static void dump_log(struct hevc_state_s *hevc)
1887{
1888 int i = 0;
1889 struct debug_log_s *log_item, *tmp;
1890 mutex_lock(&vh265_log_mutex);
1891 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1892 hevc_print(hevc, 0,
1893 "[LOG%04d]%s\n",
1894 i++,
1895 &log_item->data);
1896 list_del(&log_item->list);
1897 kfree(log_item);
1898 }
1899 mutex_unlock(&vh265_log_mutex);
1900}
1901
1902static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1903 struct PIC_s *pic)
1904{
1905 if (pic->error_mark
1906 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1907 return 1;
1908 return 0;
1909}
1910
1911static int get_pic_poc(struct hevc_state_s *hevc,
1912 unsigned int idx)
1913{
1914 if (idx != 0xff
1915 && idx < MAX_REF_PIC_NUM
1916 && hevc->m_PIC[idx])
1917 return hevc->m_PIC[idx]->POC;
1918 return INVALID_POC;
1919}
1920
1921#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1922static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1923{
1924 return (hevc->m_ins_flag &&
1925 ((double_write_mode & 0x80000000) == 0)) ?
1926 hevc->double_write_mode :
1927 (double_write_mode & 0x7fffffff);
1928}
1929
1930static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1931{
1932 return (hevc->m_ins_flag &&
1933 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1934 hevc->dynamic_buf_num_margin :
1935 (dynamic_buf_num_margin & 0x7fffffff);
1936}
1937#endif
1938
1939static int get_double_write_mode(struct hevc_state_s *hevc)
1940{
1941 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1942 int w = hevc->pic_w;
1943 int h = hevc->pic_h;
1944 u32 dw = 0x1; /*1:1*/
1945 switch (valid_dw_mode) {
1946 case 0x100:
1947 if (w > 1920 && h > 1088)
1948 dw = 0x4; /*1:2*/
1949 break;
1950 case 0x200:
1951 if (w > 1920 && h > 1088)
1952 dw = 0x2; /*1:4*/
1953 break;
1954 case 0x300:
1955 if (w > 1280 && h > 720)
1956 dw = 0x4; /*1:2*/
1957 break;
1958 default:
1959 dw = valid_dw_mode;
1960 break;
1961 }
1962 return dw;
1963}
1964
1965static int v4l_parser_get_double_write_mode(struct hevc_state_s *hevc, int w, int h)
1966{
1967 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1968 u32 dw = 0x1; /*1:1*/
1969 switch (valid_dw_mode) {
1970 case 0x100:
1971 if (w > 1920 && h > 1088)
1972 dw = 0x4; /*1:2*/
1973 break;
1974 case 0x200:
1975 if (w > 1920 && h > 1088)
1976 dw = 0x2; /*1:4*/
1977 break;
1978 case 0x300:
1979 if (w > 1280 && h > 720)
1980 dw = 0x4; /*1:2*/
1981 break;
1982 default:
1983 dw = valid_dw_mode;
1984 break;
1985 }
1986 return dw;
1987}
1988
1989
1990static int get_double_write_ratio(struct hevc_state_s *hevc,
1991 int dw_mode)
1992{
1993 int ratio = 1;
1994 if ((dw_mode == 2) ||
1995 (dw_mode == 3))
1996 ratio = 4;
1997 else if (dw_mode == 4)
1998 ratio = 2;
1999 return ratio;
2000}
2001#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2002static unsigned char get_idx(struct hevc_state_s *hevc)
2003{
2004 return hevc->index;
2005}
2006#endif
2007
2008#undef pr_info
2009#define pr_info printk
2010static int hevc_print(struct hevc_state_s *hevc,
2011 int flag, const char *fmt, ...)
2012{
2013#define HEVC_PRINT_BUF 256
2014 unsigned char buf[HEVC_PRINT_BUF];
2015 int len = 0;
2016#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2017 if (hevc == NULL ||
2018 (flag == 0) ||
2019 ((debug_mask &
2020 (1 << hevc->index))
2021 && (debug & flag))) {
2022#endif
2023 va_list args;
2024
2025 va_start(args, fmt);
2026 if (hevc)
2027 len = sprintf(buf, "[%d]", hevc->index);
2028 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2029 pr_debug("%s", buf);
2030 va_end(args);
2031#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2032 }
2033#endif
2034 return 0;
2035}
2036
2037static int hevc_print_cont(struct hevc_state_s *hevc,
2038 int flag, const char *fmt, ...)
2039{
2040 unsigned char buf[HEVC_PRINT_BUF];
2041 int len = 0;
2042#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2043 if (hevc == NULL ||
2044 (flag == 0) ||
2045 ((debug_mask &
2046 (1 << hevc->index))
2047 && (debug & flag))) {
2048#endif
2049 va_list args;
2050
2051 va_start(args, fmt);
2052 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2053 pr_info("%s", buf);
2054 va_end(args);
2055#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2056 }
2057#endif
2058 return 0;
2059}
2060
2061static void put_mv_buf(struct hevc_state_s *hevc,
2062 struct PIC_s *pic);
2063
2064static void update_vf_memhandle(struct hevc_state_s *hevc,
2065 struct vframe_s *vf, struct PIC_s *pic);
2066
2067static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2068
2069static void release_aux_data(struct hevc_state_s *hevc,
2070 struct PIC_s *pic);
2071static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2072
2073#ifdef MULTI_INSTANCE_SUPPORT
2074static void backup_decode_state(struct hevc_state_s *hevc)
2075{
2076 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2077 hevc->curr_POC_bak = hevc->curr_POC;
2078 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2079 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2080 hevc->start_parser_type_bak = hevc->start_parser_type;
2081 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2082 hevc->rps_set_id_bak = hevc->rps_set_id;
2083 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2084 hevc->decode_idx_bak = hevc->decode_idx;
2085
2086}
2087
2088static void restore_decode_state(struct hevc_state_s *hevc)
2089{
2090 struct vdec_s *vdec = hw_to_vdec(hevc);
2091 if (!vdec_has_more_input(vdec)) {
2092 hevc->pic_decoded_lcu_idx =
2093 READ_VREG(HEVC_PARSER_LCU_START)
2094 & 0xffffff;
2095 return;
2096 }
2097 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2098 "%s: discard pic index 0x%x\n",
2099 __func__, hevc->decoding_pic ?
2100 hevc->decoding_pic->index : 0xff);
2101 if (hevc->decoding_pic) {
2102 hevc->decoding_pic->error_mark = 0;
2103 hevc->decoding_pic->output_ready = 0;
2104 hevc->decoding_pic->output_mark = 0;
2105 hevc->decoding_pic->referenced = 0;
2106 hevc->decoding_pic->POC = INVALID_POC;
2107 put_mv_buf(hevc, hevc->decoding_pic);
2108 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2109 release_aux_data(hevc, hevc->decoding_pic);
2110 hevc->decoding_pic = NULL;
2111 }
2112 hevc->decode_idx = hevc->decode_idx_bak;
2113 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2114 hevc->curr_POC = hevc->curr_POC_bak;
2115 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2116 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2117 hevc->start_parser_type = hevc->start_parser_type_bak;
2118 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2119 hevc->rps_set_id = hevc->rps_set_id_bak;
2120 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2121
2122 if (hevc->pic_list_init_flag == 1)
2123 hevc->pic_list_init_flag = 0;
2124 /*if (hevc->decode_idx == 0)
2125 hevc->start_decoding_flag = 0;*/
2126
2127 hevc->slice_idx = 0;
2128 hevc->used_4k_num = -1;
2129}
2130#endif
2131
2132static void hevc_init_stru(struct hevc_state_s *hevc,
2133 struct BuffInfo_s *buf_spec_i)
2134{
2135 int i;
2136 INIT_LIST_HEAD(&hevc->log_list);
2137 hevc->work_space_buf = buf_spec_i;
2138 hevc->prefix_aux_size = 0;
2139 hevc->suffix_aux_size = 0;
2140 hevc->aux_addr = NULL;
2141 hevc->rpm_addr = NULL;
2142 hevc->lmem_addr = NULL;
2143
2144 hevc->curr_POC = INVALID_POC;
2145
2146 hevc->pic_list_init_flag = 0;
2147 hevc->use_cma_flag = 0;
2148 hevc->decode_idx = 0;
2149 hevc->slice_idx = 0;
2150 hevc->new_pic = 0;
2151 hevc->new_tile = 0;
2152 hevc->iPrevPOC = 0;
2153 hevc->list_no = 0;
2154 /* int m_uiMaxCUWidth = 1<<7; */
2155 /* int m_uiMaxCUHeight = 1<<7; */
2156 hevc->m_pocRandomAccess = MAX_INT;
2157 hevc->tile_enabled = 0;
2158 hevc->tile_x = 0;
2159 hevc->tile_y = 0;
2160 hevc->iPrevTid0POC = 0;
2161 hevc->slice_addr = 0;
2162 hevc->slice_segment_addr = 0;
2163 hevc->skip_flag = 0;
2164 hevc->misc_flag0 = 0;
2165
2166 hevc->cur_pic = NULL;
2167 hevc->col_pic = NULL;
2168 hevc->wait_buf = 0;
2169 hevc->error_flag = 0;
2170 hevc->head_error_flag = 0;
2171 hevc->error_skip_nal_count = 0;
2172 hevc->have_vps = 0;
2173 hevc->have_sps = 0;
2174 hevc->have_pps = 0;
2175 hevc->have_valid_start_slice = 0;
2176
2177 hevc->pts_mode = PTS_NORMAL;
2178 hevc->last_pts = 0;
2179 hevc->last_lookup_pts = 0;
2180 hevc->last_pts_us64 = 0;
2181 hevc->last_lookup_pts_us64 = 0;
2182 hevc->pts_mode_switching_count = 0;
2183 hevc->pts_mode_recovery_count = 0;
2184
2185 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2186 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2187 if (hevc->PB_skip_mode == 0)
2188 hevc->ignore_bufmgr_error = 0x1;
2189 else
2190 hevc->ignore_bufmgr_error = 0x0;
2191
2192 if (hevc->is_used_v4l) {
2193 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2194 if (hevc->m_PIC[i] != NULL) {
2195 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2196 hevc->m_PIC[i]->index = i;
2197 }
2198 }
2199 }
2200
2201 hevc->pic_num = 0;
2202 hevc->lcu_x_num_pre = 0;
2203 hevc->lcu_y_num_pre = 0;
2204 hevc->first_pic_after_recover = 0;
2205
2206 hevc->pre_top_pic = NULL;
2207 hevc->pre_bot_pic = NULL;
2208
2209 hevc->sei_present_flag = 0;
2210 hevc->valve_count = 0;
2211 hevc->first_pic_flag = 0;
2212#ifdef MULTI_INSTANCE_SUPPORT
2213 hevc->decoded_poc = INVALID_POC;
2214 hevc->start_process_time = 0;
2215 hevc->last_lcu_idx = 0;
2216 hevc->decode_timeout_count = 0;
2217 hevc->timeout_num = 0;
2218 hevc->eos = 0;
2219 hevc->pic_decoded_lcu_idx = -1;
2220 hevc->over_decode = 0;
2221 hevc->used_4k_num = -1;
2222 hevc->start_decoding_flag = 0;
2223 hevc->rps_set_id = 0;
2224 backup_decode_state(hevc);
2225#endif
2226#ifdef DETREFILL_ENABLE
2227 hevc->detbuf_adr = 0;
2228 hevc->detbuf_adr_virt = NULL;
2229#endif
2230}
2231
2232static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2233static int H265_alloc_mmu(struct hevc_state_s *hevc,
2234 struct PIC_s *new_pic, unsigned short bit_depth,
2235 unsigned int *mmu_index_adr);
2236
2237#ifdef DETREFILL_ENABLE
2238#define DETREFILL_BUF_SIZE (4 * 0x4000)
2239#define HEVC_SAO_DBG_MODE0 0x361e
2240#define HEVC_SAO_DBG_MODE1 0x361f
2241#define HEVC_SAO_CTRL10 0x362e
2242#define HEVC_SAO_CTRL11 0x362f
2243static int init_detrefill_buf(struct hevc_state_s *hevc)
2244{
2245 if (hevc->detbuf_adr_virt)
2246 return 0;
2247
2248 hevc->detbuf_adr_virt =
2249 (void *)dma_alloc_coherent(amports_get_dma_device(),
2250 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2251 GFP_KERNEL);
2252
2253 if (hevc->detbuf_adr_virt == NULL) {
2254 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2255 return -1;
2256 }
2257 return 0;
2258}
2259
2260static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2261{
2262 if (hevc->detbuf_adr_virt) {
2263 dma_free_coherent(amports_get_dma_device(),
2264 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2265 hevc->detbuf_adr);
2266
2267 hevc->detbuf_adr_virt = NULL;
2268 hevc->detbuf_adr = 0;
2269 }
2270}
2271
2272/*
2273 * convert uncompressed frame buffer data from/to ddr
2274 */
2275static void convUnc8x4blk(uint16_t* blk8x4Luma,
2276 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2277{
2278 if (direction == 0) {
2279 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2280 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2281 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2282 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2283 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2284 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2285 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2286 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2287 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2288 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2289 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2290 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2291 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2292 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2293 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2294 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2295 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2296 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2297
2298 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2299 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2300 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2301 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2302 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2303 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2304 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2305 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2306 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2307 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2308 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2309 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2310 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2311 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2312 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2313 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2314 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2315 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2316
2317 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2318 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2319 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2320 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2321 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2322 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2323 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2324 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2325 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2326 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2327 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2328 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2329 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2330 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2331 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2332 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2333 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2334 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2335
2336 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2337 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2338 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2339 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2340 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2341 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2342 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2343 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2344 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2345 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2346 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2347 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2348 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2349 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2350 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2351 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2352 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2353 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2354 } else {
2355 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2356 blk8x4Luma[3 + 0 * 8];
2357 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2358 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2359 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2360 (blk8x4Luma[3 + 3 * 8] >> 2);
2361 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2362 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2363 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2364 (blk8x4Luma[7 + 2 * 8] >>4);
2365 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2366 blk8x4Cb[0 + 0 * 4];
2367 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2368 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2369 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2370
2371 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2372 blk8x4Luma[0 + 0 * 8];
2373 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2374 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2375 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2376 (blk8x4Luma[0 + 1 * 8] >> 2);
2377 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2378 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2379 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2380 (blk8x4Luma[0 + 2 * 8] >>4);
2381 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2382 blk8x4Luma[2 + 2 * 8];
2383 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2384 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2385 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2386
2387 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2388 blk8x4Luma[4 + 0 * 8];
2389 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2390 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2391 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2392 (blk8x4Luma[4 + 1 * 8] >> 2);
2393 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2394 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2395 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2396 (blk8x4Luma[4 + 2 * 8] >>4);
2397 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2398 blk8x4Luma[6 + 2 * 8];
2399 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2400 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2401 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2402
2403 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2404 blk8x4Cb[1 + 0 * 4];
2405 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2406 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2407 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2408 (blk8x4Cr[2 + 0 * 4] >> 2);
2409 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2410 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2411 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2412 (blk8x4Cb[1 + 1 * 4] >>4);
2413 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2414 blk8x4Cb[2 + 1 * 4];
2415 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2416 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2417 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2418 }
2419}
2420
2421static void corrRefillWithAmrisc (
2422 struct hevc_state_s *hevc,
2423 uint32_t cmHeaderBaseAddr,
2424 uint32_t picWidth,
2425 uint32_t ctuPosition)
2426{
2427 int32_t i;
2428 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2429 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2430 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2431
2432 uint16_t cmBodyBuf[32 * 18];
2433
2434 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2435 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2436 uint32_t stride64x64 = pic_width_x64 * 128;
2437 uint32_t addr_offset64x64_abv = stride64x64 *
2438 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2439 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2440 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2441 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2442 unsigned int tmpData32;
2443
2444 uint16_t blkBuf0Y[32];
2445 uint16_t blkBuf0Cb[8];
2446 uint16_t blkBuf0Cr[8];
2447 uint16_t blkBuf1Y[32];
2448 uint16_t blkBuf1Cb[8];
2449 uint16_t blkBuf1Cr[8];
2450 int32_t blkBufCnt = 0;
2451
2452 int32_t blkIdx;
2453
2454 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2455 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2456 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2457 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2458
2459 for (i = 0; i < 32 * 18; i++)
2460 cmBodyBuf[i] = 0;
2461
2462 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2463 "%s, %d\n", __func__, __LINE__);
2464 do {
2465 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2466 } while (tmpData32);
2467 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2468 "%s, %d\n", __func__, __LINE__);
2469
2470 hevc_print(hevc, H265_DEBUG_DETAIL,
2471 "cmBodyBuf from detbuf:\n");
2472 for (i = 0; i < 32 * 18; i++) {
2473 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2474 if (get_dbg_flag(hevc) &
2475 H265_DEBUG_DETAIL) {
2476 if ((i & 0xf) == 0)
2477 hevc_print_cont(hevc, 0, "\n");
2478 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2479 }
2480 }
2481 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2482
2483 for (i = 0; i < 32; i++)
2484 blkBuf0Y[i] = 0;
2485 for (i = 0; i < 8; i++)
2486 blkBuf0Cb[i] = 0;
2487 for (i = 0; i < 8; i++)
2488 blkBuf0Cr[i] = 0;
2489 for (i = 0; i < 32; i++)
2490 blkBuf1Y[i] = 0;
2491 for (i = 0; i < 8; i++)
2492 blkBuf1Cb[i] = 0;
2493 for (i = 0; i < 8; i++)
2494 blkBuf1Cr[i] = 0;
2495
2496 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2497 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2498 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2499 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2500 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2501 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2502 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2503
2504 if (!aboveCtuAvailable && inAboveCtu)
2505 continue;
2506
2507 /* detRefillBuf --> 8x4block*/
2508 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2509
2510 if (restoreEnable) {
2511 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2512 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2513 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2514 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2515 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2516 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2517 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2518 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2519 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2520 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2521 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2522 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2523 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2524 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2525 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2526 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2527 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2528 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2529 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2530 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2531 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2532 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2533 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2534 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2535 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2536 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2537 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2538 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2539 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2540 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2541 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2542 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2543
2544 /*Store data back to DDR*/
2545 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2546 }
2547
2548 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2549 }
2550
2551 hevc_print(hevc, H265_DEBUG_DETAIL,
2552 "cmBodyBuf to detbuf:\n");
2553 for (i = 0; i < 32 * 18; i++) {
2554 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2555 if (get_dbg_flag(hevc) &
2556 H265_DEBUG_DETAIL) {
2557 if ((i & 0xf) == 0)
2558 hevc_print_cont(hevc, 0, "\n");
2559 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2560 }
2561 }
2562 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2563
2564 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2565 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2566 "%s, %d\n", __func__, __LINE__);
2567 do {
2568 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2569 } while (tmpData32);
2570 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2571 "%s, %d\n", __func__, __LINE__);
2572}
2573
2574static void delrefill(struct hevc_state_s *hevc)
2575{
2576 /*
2577 * corrRefill
2578 */
2579 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2580 [31:30]error number
2581 [29:20]error2([9:7]tilex[6:0]ctuy)
2582 [19:10]error1 [9:0]error0*/
2583 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2584 uint32_t errorIdx;
2585 uint32_t errorNum = (detResult>>30);
2586
2587 if (detResult) {
2588 hevc_print(hevc, H265_DEBUG_BUFMGR,
2589 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2590 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2591 uint32_t errorPos = errorIdx * 10;
2592 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2593 uint32_t tilex = (errorResult >> 7) - 1;
2594 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2595 + hevc->m_tile[0][tilex].width - 1;
2596 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2597 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2598 hevc_print(hevc, H265_DEBUG_BUFMGR,
2599 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2600 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2601 corrRefillWithAmrisc(
2602 hevc,
2603 (uint32_t)hevc->cur_pic->header_adr,
2604 hevc->pic_w,
2605 ctuPosition);
2606 }
2607
2608 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2609 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2610 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2611 }
2612}
2613#endif
2614
2615static void get_rpm_param(union param_u *params)
2616{
2617 int i;
2618 unsigned int data32;
2619
2620 for (i = 0; i < 128; i++) {
2621 do {
2622 data32 = READ_VREG(RPM_CMD_REG);
2623 /* hevc_print(hevc, 0, "%x\n", data32); */
2624 } while ((data32 & 0x10000) == 0);
2625 params->l.data[i] = data32 & 0xffff;
2626 /* hevc_print(hevc, 0, "%x\n", data32); */
2627 WRITE_VREG(RPM_CMD_REG, 0);
2628 }
2629}
2630
2631static int get_free_buf_idx(struct hevc_state_s *hevc)
2632{
2633 int index = INVALID_IDX;
2634 struct PIC_s *pic;
2635 int i;
2636
2637 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2638 pic = hevc->m_PIC[i];
2639 if (pic == NULL ||
2640 pic->index == -1 ||
2641 pic->BUF_index == -1)
2642 continue;
2643
2644 if (pic->output_mark == 0 &&
2645 pic->referenced == 0 &&
2646 pic->output_ready == 0 &&
2647 pic->cma_alloc_addr) {
2648 pic->output_ready = 1;
2649 index = i;
2650 break;
2651 }
2652 }
2653
2654 return index;
2655}
2656
2657static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2658{
2659 int i;
2660 struct PIC_s *pic;
2661 struct PIC_s *ret_pic = NULL;
2662 if (POC == INVALID_POC)
2663 return NULL;
2664 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2665 pic = hevc->m_PIC[i];
2666 if (pic == NULL || pic->index == -1 ||
2667 pic->BUF_index == -1)
2668 continue;
2669 if (pic->POC == POC) {
2670 if (ret_pic == NULL)
2671 ret_pic = pic;
2672 else {
2673 if (pic->decode_idx > ret_pic->decode_idx)
2674 ret_pic = pic;
2675 }
2676 }
2677 }
2678 return ret_pic;
2679}
2680
2681static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2682{
2683 int i;
2684 struct PIC_s *pic;
2685 struct PIC_s *ret_pic = NULL;
2686
2687 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2688 pic = hevc->m_PIC[i];
2689 if (pic == NULL || pic->index == -1 ||
2690 pic->BUF_index == -1)
2691 continue;
2692 if ((pic->POC == POC) && (pic->referenced)) {
2693 if (ret_pic == NULL)
2694 ret_pic = pic;
2695 else {
2696 if (pic->decode_idx > ret_pic->decode_idx)
2697 ret_pic = pic;
2698 }
2699 }
2700 }
2701
2702 if (ret_pic == NULL) {
2703 if (get_dbg_flag(hevc)) {
2704 hevc_print(hevc, 0,
2705 "Wrong, POC of %d is not in referenced list\n",
2706 POC);
2707 }
2708 ret_pic = get_pic_by_POC(hevc, POC);
2709 }
2710 return ret_pic;
2711}
2712
2713static unsigned int log2i(unsigned int val)
2714{
2715 unsigned int ret = -1;
2716
2717 while (val != 0) {
2718 val >>= 1;
2719 ret++;
2720 }
2721 return ret;
2722}
2723
2724static int init_buf_spec(struct hevc_state_s *hevc);
2725
2726static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2727{
2728 int i;
2729
2730 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2731 struct PIC_s *pic = hevc->m_PIC[i];
2732
2733 if (pic && pic->vframe_bound)
2734 return true;
2735 }
2736
2737 return false;
2738}
2739
2740static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2741{
2742 int i;
2743
2744 /* release workspace */
2745 if (hevc->bmmu_box)
2746 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2747 BMMU_WORKSPACE_ID);
2748 /*
2749 * it's only when vframe get back to driver, right now we can be sure
2750 * that vframe and fd are related. if the playback exits, the capture
2751 * requires the upper app to release when the fd is closed, and others
2752 * buffers drivers are released by driver.
2753 */
2754 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2755 struct PIC_s *pic = hevc->m_PIC[i];
2756
2757 if (pic && !pic->vframe_bound) {
2758 if (hevc->bmmu_box)
2759 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2760 VF_BUFFER_IDX(i));
2761 if (hevc->mmu_box)
2762 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2763
2764 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2765 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2766 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2767 }
2768 }
2769}
2770
2771static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2772{
2773 if (hevc->is_used_v4l &&
2774 v4l_is_there_vframe_bound(hevc)) {
2775 if (get_double_write_mode(hevc) != 0x10) {
2776 v4l_mmu_buffer_release(hevc);
2777 return;
2778 }
2779 }
2780
2781 if (hevc->mmu_box)
2782 decoder_mmu_box_free(hevc->mmu_box);
2783 hevc->mmu_box = NULL;
2784
2785 if (hevc->bmmu_box)
2786 decoder_bmmu_box_free(hevc->bmmu_box);
2787 hevc->bmmu_box = NULL;
2788}
2789static int init_mmu_buffers(struct hevc_state_s *hevc)
2790{
2791 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2792 CODEC_MM_FLAGS_TVP : 0;
2793 int buf_size = 64;
2794
2795 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2796 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2797 buf_size = 24;
2798 }
2799
2800 if (get_dbg_flag(hevc)) {
2801 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2802 __func__, hevc->max_pic_w, hevc->max_pic_h);
2803 }
2804
2805 hevc->need_cache_size = buf_size * SZ_1M;
2806 hevc->sc_start_time = get_jiffies_64();
2807 if (hevc->mmu_enable
2808 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2809 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2810 hevc->index,
2811 MAX_REF_PIC_NUM,
2812 buf_size * SZ_1M,
2813 tvp_flag
2814 );
2815 if (!hevc->mmu_box) {
2816 pr_err("h265 alloc mmu box failed!!\n");
2817 return -1;
2818 }
2819 }
2820
2821 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2822 hevc->index,
2823 BMMU_MAX_BUFFERS,
2824 4 + PAGE_SHIFT,
2825 CODEC_MM_FLAGS_CMA_CLEAR |
2826 CODEC_MM_FLAGS_FOR_VDECODER |
2827 tvp_flag);
2828 if (!hevc->bmmu_box) {
2829 if (hevc->mmu_box)
2830 decoder_mmu_box_free(hevc->mmu_box);
2831 hevc->mmu_box = NULL;
2832 pr_err("h265 alloc mmu box failed!!\n");
2833 return -1;
2834 }
2835 return 0;
2836}
2837
2838struct buf_stru_s
2839{
2840 int lcu_total;
2841 int mc_buffer_size_h;
2842 int mc_buffer_size_u_v_h;
2843};
2844
2845#ifndef MV_USE_FIXED_BUF
2846static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2847{
2848 int i;
2849 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2850 if (hevc->m_mv_BUF[i].start_adr) {
2851 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2852 hevc_print(hevc, 0,
2853 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2854 i, hevc->m_mv_BUF[i].start_adr,
2855 hevc->m_mv_BUF[i].size,
2856 hevc->m_mv_BUF[i].used_flag);
2857 decoder_bmmu_box_free_idx(
2858 hevc->bmmu_box,
2859 MV_BUFFER_IDX(i));
2860 hevc->m_mv_BUF[i].start_adr = 0;
2861 hevc->m_mv_BUF[i].size = 0;
2862 hevc->m_mv_BUF[i].used_flag = 0;
2863 }
2864 }
2865 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2866 if (hevc->m_PIC[i] != NULL)
2867 hevc->m_PIC[i]->mv_buf_index = -1;
2868 }
2869}
2870
2871static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2872{
2873 int ret = 0;
2874 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2875 if (decoder_bmmu_box_alloc_buf_phy
2876 (hevc->bmmu_box,
2877 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2878 DRIVER_NAME,
2879 &hevc->m_mv_BUF[i].start_adr) < 0) {
2880 hevc->m_mv_BUF[i].start_adr = 0;
2881 ret = -1;
2882 } else {
2883 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2884 hevc->m_mv_BUF[i].used_flag = 0;
2885 ret = 0;
2886 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2887 hevc_print(hevc, 0,
2888 "MV Buffer %d: start_adr %p size %x\n",
2889 i,
2890 (void *)hevc->m_mv_BUF[i].start_adr,
2891 hevc->m_mv_BUF[i].size);
2892 }
2893 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2894 void *mem_start_virt;
2895 mem_start_virt =
2896 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2897 if (mem_start_virt) {
2898 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2899 codec_mm_dma_flush(mem_start_virt,
2900 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2901 } else {
2902 mem_start_virt = codec_mm_vmap(
2903 hevc->m_mv_BUF[i].start_adr,
2904 hevc->m_mv_BUF[i].size);
2905 if (mem_start_virt) {
2906 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2907 codec_mm_dma_flush(mem_start_virt,
2908 hevc->m_mv_BUF[i].size,
2909 DMA_TO_DEVICE);
2910 codec_mm_unmap_phyaddr(mem_start_virt);
2911 } else {
2912 /*not virt for tvp playing,
2913 may need clear on ucode.*/
2914 pr_err("ref %s mem_start_virt failed\n", __func__);
2915 }
2916 }
2917 }
2918 }
2919 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2920 return ret;
2921}
2922#endif
2923
2924static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2925{
2926#ifdef MV_USE_FIXED_BUF
2927 if (pic && pic->index >= 0) {
2928 if (IS_8K_SIZE(pic->width, pic->height)) {
2929 pic->mpred_mv_wr_start_addr =
2930 hevc->work_space_buf->mpred_mv.buf_start
2931 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2932 } else {
2933 pic->mpred_mv_wr_start_addr =
2934 hevc->work_space_buf->mpred_mv.buf_start
2935 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2936 }
2937 }
2938 return 0;
2939#else
2940 int i;
2941 int ret = -1;
2942 int new_size;
2943 if (IS_8K_SIZE(pic->width, pic->height))
2944 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2945 else if (IS_4K_SIZE(pic->width, pic->height))
2946 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2947 else
2948 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2949 if (new_size != hevc->mv_buf_size) {
2950 dealloc_mv_bufs(hevc);
2951 hevc->mv_buf_size = new_size;
2952 }
2953 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2954 if (hevc->m_mv_BUF[i].start_adr &&
2955 hevc->m_mv_BUF[i].used_flag == 0) {
2956 hevc->m_mv_BUF[i].used_flag = 1;
2957 ret = i;
2958 break;
2959 }
2960 }
2961 if (ret < 0) {
2962 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2963 if (hevc->m_mv_BUF[i].start_adr == 0) {
2964 if (alloc_mv_buf(hevc, i) >= 0) {
2965 hevc->m_mv_BUF[i].used_flag = 1;
2966 ret = i;
2967 }
2968 break;
2969 }
2970 }
2971 }
2972
2973 if (ret >= 0) {
2974 pic->mv_buf_index = ret;
2975 pic->mpred_mv_wr_start_addr =
2976 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2977 (~0xffff);
2978 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2979 "%s => %d (0x%x) size 0x%x\n",
2980 __func__, ret,
2981 pic->mpred_mv_wr_start_addr,
2982 hevc->m_mv_BUF[ret].size);
2983
2984 } else {
2985 hevc_print(hevc, 0,
2986 "%s: Error, mv buf is not enough\n",
2987 __func__);
2988 }
2989 return ret;
2990
2991#endif
2992}
2993
2994static void put_mv_buf(struct hevc_state_s *hevc,
2995 struct PIC_s *pic)
2996{
2997#ifndef MV_USE_FIXED_BUF
2998 int i = pic->mv_buf_index;
2999 if (i < 0 || i >= MAX_REF_PIC_NUM) {
3000 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3001 "%s: index %d beyond range\n",
3002 __func__, i);
3003 return;
3004 }
3005 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3006 "%s(%d): used_flag(%d)\n",
3007 __func__, i,
3008 hevc->m_mv_BUF[i].used_flag);
3009
3010 if (hevc->m_mv_BUF[i].start_adr &&
3011 hevc->m_mv_BUF[i].used_flag)
3012 hevc->m_mv_BUF[i].used_flag = 0;
3013 pic->mv_buf_index = -1;
3014#endif
3015}
3016
3017static int cal_current_buf_size(struct hevc_state_s *hevc,
3018 struct buf_stru_s *buf_stru)
3019{
3020
3021 int buf_size;
3022 int pic_width = hevc->pic_w;
3023 int pic_height = hevc->pic_h;
3024 int lcu_size = hevc->lcu_size;
3025 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
3026 1 : pic_width / lcu_size;
3027 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
3028 1 : pic_height / lcu_size;
3029 /*SUPPORT_10BIT*/
3030 int losless_comp_header_size = compute_losless_comp_header_size
3031 (pic_width, pic_height);
3032 /*always alloc buf for 10bit*/
3033 int losless_comp_body_size = compute_losless_comp_body_size
3034 (hevc, pic_width, pic_height, 0);
3035 int mc_buffer_size = losless_comp_header_size
3036 + losless_comp_body_size;
3037 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
3038 int mc_buffer_size_u_v_h = 0;
3039
3040 int dw_mode = get_double_write_mode(hevc);
3041
3042 if (hevc->mmu_enable) {
3043 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3044 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3045 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3046 << 16;
3047 else
3048 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3049 << 16;
3050 } else
3051 buf_size = 0;
3052
3053 if (dw_mode) {
3054 int pic_width_dw = pic_width /
3055 get_double_write_ratio(hevc, dw_mode);
3056 int pic_height_dw = pic_height /
3057 get_double_write_ratio(hevc, dw_mode);
3058
3059 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3060 pic_width_dw / lcu_size + 1 :
3061 pic_width_dw / lcu_size;
3062 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3063 pic_height_dw / lcu_size + 1 :
3064 pic_height_dw / lcu_size;
3065 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3066
3067 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3068 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3069 /*64k alignment*/
3070 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3071 }
3072
3073 if ((!hevc->mmu_enable) &&
3074 ((dw_mode & 0x10) == 0)) {
3075 /* use compress mode without mmu,
3076 need buf for compress decoding*/
3077 buf_size += (mc_buffer_size_h << 16);
3078 }
3079
3080 /*in case start adr is not 64k alignment*/
3081 if (buf_size > 0)
3082 buf_size += 0x10000;
3083
3084 if (buf_stru) {
3085 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3086 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3087 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3088 }
3089
3090 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3091 pic_width, pic_height, losless_comp_header_size,
3092 losless_comp_body_size, mc_buffer_size_h,
3093 mc_buffer_size_u_v_h, buf_size);
3094
3095 return buf_size;
3096}
3097
3098static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3099{
3100 int ret = -1;
3101 int i = pic->index;
3102 struct vdec_v4l2_buffer *fb = NULL;
3103
3104 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3105 return ret;
3106
3107 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3108 if (ret < 0) {
3109 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3110 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3111 return ret;
3112 }
3113
3114 if (hevc->mmu_enable) {
3115 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3116 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3117 hevc->m_BUF[i].header_size =
3118 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3119 else
3120 hevc->m_BUF[i].header_size =
3121 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3122
3123 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3124 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3125 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3126 if (ret < 0) {
3127 hevc_print(hevc, PRINT_FLAG_ERROR,
3128 "%s[%d], header size: %d, no mem fatal err\n",
3129 __func__, i, hevc->m_BUF[i].header_size);
3130 return ret;
3131 }
3132 }
3133
3134 hevc->m_BUF[i].used_flag = 0;
3135 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3136 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3137 if (fb->num_planes == 1) {
3138 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3139 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3140 hevc->m_BUF[i].size = fb->m.mem[0].size;
3141 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3142 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3143 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3144 } else if (fb->num_planes == 2) {
3145 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3146 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3147 hevc->m_BUF[i].chroma_addr = fb->m.mem[1].addr;
3148 hevc->m_BUF[i].chroma_size = fb->m.mem[1].size;
3149 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3150 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3151 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3152 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3153 pic->dw_u_v_adr = hevc->m_BUF[i].chroma_addr;
3154 }
3155
3156 return ret;
3157}
3158
3159static int alloc_buf(struct hevc_state_s *hevc)
3160{
3161 int i;
3162 int ret = -1;
3163 int buf_size = cal_current_buf_size(hevc, NULL);
3164
3165 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3166 return ret;
3167
3168 for (i = 0; i < BUF_POOL_SIZE; i++) {
3169 if (hevc->m_BUF[i].start_adr == 0)
3170 break;
3171 }
3172 if (i < BUF_POOL_SIZE) {
3173 if (buf_size > 0) {
3174 ret = decoder_bmmu_box_alloc_buf_phy
3175 (hevc->bmmu_box,
3176 VF_BUFFER_IDX(i), buf_size,
3177 DRIVER_NAME,
3178 &hevc->m_BUF[i].start_adr);
3179 if (ret < 0) {
3180 hevc->m_BUF[i].start_adr = 0;
3181 if (i <= 8) {
3182 hevc->fatal_error |=
3183 DECODER_FATAL_ERROR_NO_MEM;
3184 hevc_print(hevc, PRINT_FLAG_ERROR,
3185 "%s[%d], size: %d, no mem fatal err\n",
3186 __func__, i, buf_size);
3187 }
3188 }
3189
3190 if (ret >= 0) {
3191 hevc->m_BUF[i].size = buf_size;
3192 hevc->m_BUF[i].used_flag = 0;
3193 ret = 0;
3194
3195 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3196 hevc_print(hevc, 0,
3197 "Buffer %d: start_adr %p size %x\n",
3198 i,
3199 (void *)hevc->m_BUF[i].start_adr,
3200 hevc->m_BUF[i].size);
3201 }
3202 /*flush the buffer make sure no cache dirty*/
3203 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3204 void *mem_start_virt;
3205 mem_start_virt =
3206 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3207 if (mem_start_virt) {
3208 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3209 codec_mm_dma_flush(mem_start_virt,
3210 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3211 } else {
3212 mem_start_virt = codec_mm_vmap(
3213 hevc->m_BUF[i].start_adr,
3214 hevc->m_BUF[i].size);
3215 if (mem_start_virt) {
3216 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3217 codec_mm_dma_flush(mem_start_virt,
3218 hevc->m_BUF[i].size,
3219 DMA_TO_DEVICE);
3220 codec_mm_unmap_phyaddr(mem_start_virt);
3221 } else {
3222 /*not virt for tvp playing,
3223 may need clear on ucode.*/
3224 pr_err("ref %s mem_start_virt failed\n", __func__);
3225 }
3226 }
3227 }
3228 }
3229 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3230 } else
3231 ret = 0;
3232 }
3233
3234 if (ret >= 0) {
3235 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3236 hevc_print(hevc, 0,
3237 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3238 i, hevc->pic_w, hevc->pic_h,
3239 buf_size,
3240 hevc->m_BUF[i].start_adr);
3241 }
3242 } else {
3243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3244 hevc_print(hevc, 0,
3245 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3246 i, hevc->pic_w, hevc->pic_h,
3247 buf_size);
3248 }
3249 }
3250 return ret;
3251}
3252
3253static void set_buf_unused(struct hevc_state_s *hevc, int i)
3254{
3255 if (i >= 0 && i < BUF_POOL_SIZE)
3256 hevc->m_BUF[i].used_flag = 0;
3257}
3258
3259static void dealloc_unused_buf(struct hevc_state_s *hevc)
3260{
3261 int i;
3262 for (i = 0; i < BUF_POOL_SIZE; i++) {
3263 if (hevc->m_BUF[i].start_adr &&
3264 hevc->m_BUF[i].used_flag == 0) {
3265 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3266 hevc_print(hevc, 0,
3267 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3268 i, hevc->m_BUF[i].start_adr,
3269 hevc->m_BUF[i].size);
3270 }
3271 if (!hevc->is_used_v4l)
3272 decoder_bmmu_box_free_idx(
3273 hevc->bmmu_box,
3274 VF_BUFFER_IDX(i));
3275 hevc->m_BUF[i].start_adr = 0;
3276 hevc->m_BUF[i].size = 0;
3277 }
3278 }
3279}
3280
3281static void dealloc_pic_buf(struct hevc_state_s *hevc,
3282 struct PIC_s *pic)
3283{
3284 int i = pic->BUF_index;
3285 pic->BUF_index = -1;
3286 if (i >= 0 &&
3287 i < BUF_POOL_SIZE &&
3288 hevc->m_BUF[i].start_adr) {
3289 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3290 hevc_print(hevc, 0,
3291 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3292 i, hevc->m_BUF[i].start_adr,
3293 hevc->m_BUF[i].size);
3294 }
3295
3296 if (!hevc->is_used_v4l)
3297 decoder_bmmu_box_free_idx(
3298 hevc->bmmu_box,
3299 VF_BUFFER_IDX(i));
3300 hevc->m_BUF[i].used_flag = 0;
3301 hevc->m_BUF[i].start_adr = 0;
3302 hevc->m_BUF[i].size = 0;
3303 }
3304}
3305
3306static int get_work_pic_num(struct hevc_state_s *hevc)
3307{
3308 int used_buf_num = 0;
3309 int sps_pic_buf_diff = 0;
3310
3311 if (get_dynamic_buf_num_margin(hevc) > 0) {
3312 if ((!hevc->sps_num_reorder_pics_0) &&
3313 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3314 /* the range of sps_num_reorder_pics_0 is in
3315 [0, sps_max_dec_pic_buffering_minus1_0] */
3316 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3317 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3318 } else
3319 used_buf_num = hevc->sps_num_reorder_pics_0
3320 + get_dynamic_buf_num_margin(hevc);
3321
3322 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3323 - hevc->sps_num_reorder_pics_0;
3324#ifdef MULTI_INSTANCE_SUPPORT
3325 /*
3326 need one more for multi instance, as
3327 apply_ref_pic_set() has no chanch to run to
3328 to clear referenced flag in some case
3329 */
3330 if (hevc->m_ins_flag)
3331 used_buf_num++;
3332#endif
3333 } else
3334 used_buf_num = max_buf_num;
3335
3336 if (hevc->save_buffer_mode)
3337 hevc_print(hevc, 0,
3338 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3339 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3340
3341 if (sps_pic_buf_diff >= 4)
3342 {
3343 used_buf_num += 1;
3344 }
3345
3346 if (hevc->is_used_v4l) {
3347 /* for eos add more buffer to flush.*/
3348 used_buf_num++;
3349 }
3350
3351 if (used_buf_num > MAX_BUF_NUM)
3352 used_buf_num = MAX_BUF_NUM;
3353 return used_buf_num;
3354}
3355
3356static int v4l_parser_work_pic_num(struct hevc_state_s *hevc)
3357{
3358 int used_buf_num = 0;
3359 int sps_pic_buf_diff = 0;
3360 pr_debug("margin = %d, sps_max_dec_pic_buffering_minus1_0 = %d, sps_num_reorder_pics_0 = %d\n",
3361 get_dynamic_buf_num_margin(hevc),
3362 hevc->param.p.sps_max_dec_pic_buffering_minus1_0,
3363 hevc->param.p.sps_num_reorder_pics_0);
3364 if (get_dynamic_buf_num_margin(hevc) > 0) {
3365 if ((!hevc->param.p.sps_num_reorder_pics_0) &&
3366 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3367 /* the range of sps_num_reorder_pics_0 is in
3368 [0, sps_max_dec_pic_buffering_minus1_0] */
3369 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3370 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3371 } else
3372 used_buf_num = hevc->param.p.sps_num_reorder_pics_0
3373 + get_dynamic_buf_num_margin(hevc);
3374
3375 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3376 - hevc->param.p.sps_num_reorder_pics_0;
3377#ifdef MULTI_INSTANCE_SUPPORT
3378 /*
3379 need one more for multi instance, as
3380 apply_ref_pic_set() has no chanch to run to
3381 to clear referenced flag in some case
3382 */
3383 if (hevc->m_ins_flag)
3384 used_buf_num++;
3385#endif
3386 } else
3387 used_buf_num = max_buf_num;
3388
3389 if (hevc->save_buffer_mode)
3390 hevc_print(hevc, 0,
3391 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3392 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3393
3394 if (sps_pic_buf_diff >= 4)
3395 {
3396 used_buf_num += 1;
3397 }
3398
3399 /* for eos add more buffer to flush.*/
3400 used_buf_num++;
3401
3402 if (used_buf_num > MAX_BUF_NUM)
3403 used_buf_num = MAX_BUF_NUM;
3404 return used_buf_num;
3405}
3406
3407
3408static int get_alloc_pic_count(struct hevc_state_s *hevc)
3409{
3410 int alloc_pic_count = 0;
3411 int i;
3412 struct PIC_s *pic;
3413 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3414 pic = hevc->m_PIC[i];
3415 if (pic && pic->index >= 0)
3416 alloc_pic_count++;
3417 }
3418 return alloc_pic_count;
3419}
3420
3421static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3422{
3423 int i = pic->index;
3424 int dw_mode = get_double_write_mode(hevc);
3425
3426 if (hevc->mmu_enable)
3427 pic->header_adr = hevc->m_BUF[i].header_addr;
3428
3429 pic->BUF_index = i;
3430 pic->POC = INVALID_POC;
3431 pic->mc_canvas_y = pic->index;
3432 pic->mc_canvas_u_v = pic->index;
3433
3434 if (dw_mode & 0x10) {
3435 pic->mc_canvas_y = (pic->index << 1);
3436 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3437 pic->mc_y_adr = pic->dw_y_adr;
3438 pic->mc_u_v_adr = pic->dw_u_v_adr;
3439 }
3440
3441 return 0;
3442}
3443
3444static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3445{
3446 int ret = -1;
3447 int i;
3448 /*int lcu_size_log2 = hevc->lcu_size_log2;
3449 int MV_MEM_UNIT=lcu_size_log2==
3450 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3451 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3452 5 ? 0x80 : 0x20;
3453 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3454 hevc->work_space_buf->mpred_mv.buf_size;*/
3455 unsigned int y_adr = 0;
3456 struct buf_stru_s buf_stru;
3457 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3458 int dw_mode = get_double_write_mode(hevc);
3459
3460 for (i = 0; i < BUF_POOL_SIZE; i++) {
3461 if (hevc->m_BUF[i].start_adr != 0 &&
3462 hevc->m_BUF[i].used_flag == 0 &&
3463 buf_size <= hevc->m_BUF[i].size) {
3464 hevc->m_BUF[i].used_flag = 1;
3465 break;
3466 }
3467 }
3468
3469 if (i >= BUF_POOL_SIZE)
3470 return -1;
3471
3472 if (hevc->mmu_enable) {
3473 pic->header_adr = hevc->m_BUF[i].start_adr;
3474 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3475 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3476 y_adr = hevc->m_BUF[i].start_adr +
3477 MMU_COMPRESS_8K_HEADER_SIZE;
3478 else
3479 y_adr = hevc->m_BUF[i].start_adr +
3480 MMU_COMPRESS_HEADER_SIZE;
3481 } else
3482 y_adr = hevc->m_BUF[i].start_adr;
3483
3484 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3485
3486 pic->POC = INVALID_POC;
3487 /*ensure get_pic_by_POC()
3488 not get the buffer not decoded*/
3489 pic->BUF_index = i;
3490
3491 if ((!hevc->mmu_enable) &&
3492 ((dw_mode & 0x10) == 0)
3493 ) {
3494 pic->mc_y_adr = y_adr;
3495 y_adr += (buf_stru.mc_buffer_size_h << 16);
3496 }
3497 pic->mc_canvas_y = pic->index;
3498 pic->mc_canvas_u_v = pic->index;
3499 if (dw_mode & 0x10) {
3500 pic->mc_y_adr = y_adr;
3501 pic->mc_u_v_adr = y_adr +
3502 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3503 pic->mc_canvas_y = (pic->index << 1);
3504 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3505
3506 pic->dw_y_adr = pic->mc_y_adr;
3507 pic->dw_u_v_adr = pic->mc_u_v_adr;
3508 } else if (dw_mode) {
3509 pic->dw_y_adr = y_adr;
3510 pic->dw_u_v_adr = pic->dw_y_adr +
3511 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3512 }
3513
3514 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3515 hevc_print(hevc, 0,
3516 "%s index %d BUF_index %d mc_y_adr %x\n",
3517 __func__, pic->index,
3518 pic->BUF_index, pic->mc_y_adr);
3519 if (hevc->mmu_enable &&
3520 dw_mode)
3521 hevc_print(hevc, 0,
3522 "mmu double write adr %ld\n",
3523 pic->cma_alloc_addr);
3524 }
3525 ret = 0;
3526
3527 return ret;
3528}
3529
3530static void init_pic_list(struct hevc_state_s *hevc)
3531{
3532 int i;
3533 int init_buf_num = get_work_pic_num(hevc);
3534 int dw_mode = get_double_write_mode(hevc);
3535 struct vdec_s *vdec = hw_to_vdec(hevc);
3536 /*alloc decoder buf will be delay if work on v4l. */
3537 if (!hevc->is_used_v4l) {
3538 for (i = 0; i < init_buf_num; i++) {
3539 if (alloc_buf(hevc) < 0) {
3540 if (i <= 8) {
3541 /*if alloced (i+1)>=9
3542 don't send errors.*/
3543 hevc->fatal_error |=
3544 DECODER_FATAL_ERROR_NO_MEM;
3545 }
3546 break;
3547 }
3548 }
3549 }
3550
3551 for (i = 0; i < init_buf_num; i++) {
3552 struct PIC_s *pic = hevc->m_PIC[i];
3553
3554 if (!pic) {
3555 pic = vmalloc(sizeof(struct PIC_s));
3556 if (pic == NULL) {
3557 hevc_print(hevc, 0,
3558 "%s: alloc pic %d fail!!!\n",
3559 __func__, i);
3560 break;
3561 }
3562 hevc->m_PIC[i] = pic;
3563 }
3564 memset(pic, 0, sizeof(struct PIC_s));
3565
3566 pic->index = i;
3567 pic->BUF_index = -1;
3568 pic->mv_buf_index = -1;
3569 if (vdec->parallel_dec == 1) {
3570 pic->y_canvas_index = -1;
3571 pic->uv_canvas_index = -1;
3572 }
3573
3574 pic->width = hevc->pic_w;
3575 pic->height = hevc->pic_h;
3576 pic->double_write_mode = dw_mode;
3577
3578 /*config canvas will be delay if work on v4l. */
3579 if (!hevc->is_used_v4l) {
3580 if (config_pic(hevc, pic) < 0) {
3581 if (get_dbg_flag(hevc))
3582 hevc_print(hevc, 0,
3583 "Config_pic %d fail\n", pic->index);
3584 pic->index = -1;
3585 i++;
3586 break;
3587 }
3588
3589 if (pic->double_write_mode)
3590 set_canvas(hevc, pic);
3591 }
3592 }
3593}
3594
3595static void uninit_pic_list(struct hevc_state_s *hevc)
3596{
3597 struct vdec_s *vdec = hw_to_vdec(hevc);
3598 int i;
3599#ifndef MV_USE_FIXED_BUF
3600 dealloc_mv_bufs(hevc);
3601#endif
3602 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3603 struct PIC_s *pic = hevc->m_PIC[i];
3604
3605 if (pic) {
3606 if (vdec->parallel_dec == 1) {
3607 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3608 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3609 }
3610 release_aux_data(hevc, pic);
3611 vfree(pic);
3612 hevc->m_PIC[i] = NULL;
3613 }
3614 }
3615}
3616
3617#ifdef LOSLESS_COMPRESS_MODE
3618static void init_decode_head_hw(struct hevc_state_s *hevc)
3619{
3620
3621 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3622 unsigned int data32;
3623
3624 int losless_comp_header_size =
3625 compute_losless_comp_header_size(hevc->pic_w,
3626 hevc->pic_h);
3627 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3628 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3629
3630 hevc->losless_comp_body_size = losless_comp_body_size;
3631
3632
3633 if (hevc->mmu_enable) {
3634 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3635 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3636 } else {
3637 if (hevc->mem_saving_mode == 1)
3638 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3639 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3640 else
3641 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3642 ((workaround_enable & 2) ? 1 : 0));
3643 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3644 /*
3645 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3646 * //8-bit mode
3647 */
3648 }
3649 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3650 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3651 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3652
3653 if (hevc->mmu_enable) {
3654 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3655 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3656 buf_spec->mmu_vbh.buf_start +
3657 buf_spec->mmu_vbh.buf_size/2);
3658 data32 = READ_VREG(HEVC_SAO_CTRL9);
3659 data32 |= 0x1;
3660 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3661
3662 /* use HEVC_CM_HEADER_START_ADDR */
3663 data32 = READ_VREG(HEVC_SAO_CTRL5);
3664 data32 |= (1<<10);
3665 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3666 }
3667
3668 if (!hevc->m_ins_flag)
3669 hevc_print(hevc, 0,
3670 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3671 __func__, hevc->pic_w, hevc->pic_h,
3672 losless_comp_body_size, losless_comp_header_size);
3673
3674}
3675#endif
3676#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3677
3678static void init_pic_list_hw(struct hevc_state_s *hevc)
3679{
3680 int i;
3681 int cur_pic_num = MAX_REF_PIC_NUM;
3682 int dw_mode = get_double_write_mode(hevc);
3683 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3684 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3685 (0x1 << 1) | (0x1 << 2));
3686 else
3687 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3688
3689 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3690 if (hevc->m_PIC[i] == NULL ||
3691 hevc->m_PIC[i]->index == -1) {
3692 cur_pic_num = i;
3693 break;
3694 }
3695 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3696 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3697 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3698 hevc->m_PIC[i]->header_adr>>5);
3699 else
3700 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3701 hevc->m_PIC[i]->mc_y_adr >> 5);
3702 } else
3703 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3704 hevc->m_PIC[i]->mc_y_adr |
3705 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3706 if (dw_mode & 0x10) {
3707 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3708 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3709 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3710 }
3711 else
3712 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3713 hevc->m_PIC[i]->mc_u_v_adr |
3714 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3715 | 0x1);
3716 }
3717 }
3718 if (cur_pic_num == 0)
3719 return;
3720
3721 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3722
3723 /* Zero out canvas registers in IPP -- avoid simulation X */
3724 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3725 (0 << 8) | (0 << 1) | 1);
3726 for (i = 0; i < 32; i++)
3727 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3728
3729#ifdef LOSLESS_COMPRESS_MODE
3730 if ((dw_mode & 0x10) == 0)
3731 init_decode_head_hw(hevc);
3732#endif
3733
3734}
3735
3736
3737static void dump_pic_list(struct hevc_state_s *hevc)
3738{
3739 int i;
3740 struct PIC_s *pic;
3741
3742 hevc_print(hevc, 0,
3743 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3744 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3745 pic = hevc->m_PIC[i];
3746 if (pic == NULL || pic->index == -1)
3747 continue;
3748 hevc_print_cont(hevc, 0,
3749 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3750 pic->index, pic->BUF_index,
3751#ifndef MV_USE_FIXED_BUF
3752 pic->mv_buf_index,
3753#else
3754 -1,
3755#endif
3756 pic->decode_idx, pic->POC, pic->referenced);
3757 hevc_print_cont(hevc, 0,
3758 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3759 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3760 pic->width, pic->height);
3761 hevc_print_cont(hevc, 0,
3762 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3763 pic->output_ready, pic->mpred_mv_wr_start_addr,
3764 pic->vf_ref);
3765 }
3766}
3767
3768static void clear_referenced_flag(struct hevc_state_s *hevc)
3769{
3770 int i;
3771 struct PIC_s *pic;
3772 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3773 pic = hevc->m_PIC[i];
3774 if (pic == NULL || pic->index == -1)
3775 continue;
3776 if (pic->referenced) {
3777 pic->referenced = 0;
3778 put_mv_buf(hevc, pic);
3779 }
3780 }
3781}
3782
3783static void clear_poc_flag(struct hevc_state_s *hevc)
3784{
3785 int i;
3786 struct PIC_s *pic;
3787 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3788 pic = hevc->m_PIC[i];
3789 if (pic == NULL || pic->index == -1)
3790 continue;
3791 pic->POC = INVALID_POC;
3792 }
3793}
3794
3795static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3796 unsigned char flush_flag)
3797{
3798 int num_pic_not_yet_display = 0;
3799 int i, fisrt_pic_flag = 0;
3800 struct PIC_s *pic;
3801 struct PIC_s *pic_display = NULL;
3802 struct vdec_s *vdec = hw_to_vdec(hevc);
3803
3804 if (hevc->i_only & 0x4) {
3805 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3806 pic = hevc->m_PIC[i];
3807 if (pic == NULL ||
3808 (pic->index == -1) ||
3809 (pic->BUF_index == -1) ||
3810 (pic->POC == INVALID_POC))
3811 continue;
3812 if (pic->output_mark) {
3813 if (pic_display) {
3814 if (pic->decode_idx <
3815 pic_display->decode_idx)
3816 pic_display = pic;
3817
3818 } else
3819 pic_display = pic;
3820
3821 }
3822 }
3823 if (pic_display) {
3824 pic_display->output_mark = 0;
3825 pic_display->recon_mark = 0;
3826 pic_display->output_ready = 1;
3827 pic_display->referenced = 0;
3828 put_mv_buf(hevc, pic_display);
3829 }
3830 } else {
3831 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3832 pic = hevc->m_PIC[i];
3833 if (pic == NULL ||
3834 (pic->index == -1) ||
3835 (pic->BUF_index == -1) ||
3836 (pic->POC == INVALID_POC))
3837 continue;
3838 if (pic->output_mark)
3839 num_pic_not_yet_display++;
3840 if (pic->slice_type == 2 &&
3841 hevc->vf_pre_count == 0 &&
3842 fast_output_enable & 0x1) {
3843 /*fast output for first I picture*/
3844 pic->num_reorder_pic = 0;
3845 if (vdec->master || vdec->slave)
3846 pic_display = pic;
3847 fisrt_pic_flag = 1;
3848 hevc_print(hevc, 0, "VH265: output first frame\n");
3849 }
3850 }
3851
3852 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3853 pic = hevc->m_PIC[i];
3854 if (pic == NULL ||
3855 (pic->index == -1) ||
3856 (pic->BUF_index == -1) ||
3857 (pic->POC == INVALID_POC))
3858 continue;
3859 if (pic->output_mark) {
3860 if (pic_display) {
3861 if (pic->POC < pic_display->POC)
3862 pic_display = pic;
3863 else if ((pic->POC == pic_display->POC)
3864 && (pic->decode_idx <
3865 pic_display->
3866 decode_idx))
3867 pic_display
3868 = pic;
3869
3870 } else
3871 pic_display = pic;
3872
3873 }
3874 }
3875 /* dv wait cur_pic all data get,
3876 some data may get after picture output */
3877 if ((vdec->master || vdec->slave)
3878 && (pic_display == hevc->cur_pic) &&
3879 (!flush_flag) &&
3880 (hevc->bypass_dvenl && !dolby_meta_with_el)
3881 && (!fisrt_pic_flag))
3882 pic_display = NULL;
3883
3884 if (pic_display) {
3885 if ((num_pic_not_yet_display >
3886 pic_display->num_reorder_pic)
3887 || flush_flag) {
3888 pic_display->output_mark = 0;
3889 pic_display->recon_mark = 0;
3890 pic_display->output_ready = 1;
3891 } else if (num_pic_not_yet_display >=
3892 (MAX_REF_PIC_NUM - 1)) {
3893 pic_display->output_mark = 0;
3894 pic_display->recon_mark = 0;
3895 pic_display->output_ready = 1;
3896 hevc_print(hevc, 0,
3897 "Warning, num_reorder_pic %d is byeond buf num\n",
3898 pic_display->num_reorder_pic);
3899 } else
3900 pic_display = NULL;
3901 }
3902 }
3903
3904 if (pic_display && hevc->sps_num_reorder_pics_0 &&
3905 (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3906 pic_display = NULL;
3907 hevc->first_pic_flag = 0;
3908 }
3909 return pic_display;
3910}
3911
3912static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3913{
3914 int i;
3915 struct PIC_s *pic;
3916
3917 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3918 hevc_print(hevc, 0,
3919 "config_mc_buffer entered .....\n");
3920 if (cur_pic->slice_type != 2) { /* P and B pic */
3921 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3922 (0 << 8) | (0 << 1) | 1);
3923 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3924 pic =
3925 get_ref_pic_by_POC(hevc,
3926 cur_pic->
3927 m_aiRefPOCList0[cur_pic->
3928 slice_idx][i]);
3929 if (pic) {
3930 if ((pic->width != hevc->pic_w) ||
3931 (pic->height != hevc->pic_h)) {
3932 hevc_print(hevc, 0,
3933 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3934 __func__, pic->POC,
3935 pic->width, pic->height);
3936 cur_pic->error_mark = 1;
3937 }
3938 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3939 cur_pic->error_mark = 1;
3940 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3941 (pic->mc_canvas_u_v << 16)
3942 | (pic->mc_canvas_u_v
3943 << 8) |
3944 pic->mc_canvas_y);
3945 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3946 hevc_print_cont(hevc, 0,
3947 "refid %x mc_canvas_u_v %x",
3948 i, pic->mc_canvas_u_v);
3949 hevc_print_cont(hevc, 0,
3950 " mc_canvas_y %x\n",
3951 pic->mc_canvas_y);
3952 }
3953 } else
3954 cur_pic->error_mark = 1;
3955
3956 if (pic == NULL || pic->error_mark) {
3957 hevc_print(hevc, 0,
3958 "Error %s, %dth poc (%d) %s",
3959 __func__, i,
3960 cur_pic->m_aiRefPOCList0[cur_pic->
3961 slice_idx][i],
3962 pic ? "has error" :
3963 "not in list0");
3964 }
3965 }
3966 }
3967 if (cur_pic->slice_type == 0) { /* B pic */
3968 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3969 hevc_print(hevc, 0,
3970 "config_mc_buffer RefNum_L1\n");
3971 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3972 (16 << 8) | (0 << 1) | 1);
3973
3974 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3975 pic =
3976 get_ref_pic_by_POC(hevc,
3977 cur_pic->
3978 m_aiRefPOCList1[cur_pic->
3979 slice_idx][i]);
3980 if (pic) {
3981 if ((pic->width != hevc->pic_w) ||
3982 (pic->height != hevc->pic_h)) {
3983 hevc_print(hevc, 0,
3984 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3985 __func__, pic->POC,
3986 pic->width, pic->height);
3987 cur_pic->error_mark = 1;
3988 }
3989
3990 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3991 cur_pic->error_mark = 1;
3992 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3993 (pic->mc_canvas_u_v << 16)
3994 | (pic->mc_canvas_u_v
3995 << 8) |
3996 pic->mc_canvas_y);
3997 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3998 hevc_print_cont(hevc, 0,
3999 "refid %x mc_canvas_u_v %x",
4000 i, pic->mc_canvas_u_v);
4001 hevc_print_cont(hevc, 0,
4002 " mc_canvas_y %x\n",
4003 pic->mc_canvas_y);
4004 }
4005 } else
4006 cur_pic->error_mark = 1;
4007
4008 if (pic == NULL || pic->error_mark) {
4009 hevc_print(hevc, 0,
4010 "Error %s, %dth poc (%d) %s",
4011 __func__, i,
4012 cur_pic->m_aiRefPOCList1[cur_pic->
4013 slice_idx][i],
4014 pic ? "has error" :
4015 "not in list1");
4016 }
4017 }
4018 }
4019 return 0;
4020}
4021
4022static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
4023 union param_u *params)
4024{
4025 int ii, i;
4026 int poc_tmp;
4027 struct PIC_s *pic;
4028 unsigned char is_referenced;
4029 /* hevc_print(hevc, 0,
4030 "%s cur_poc %d\n", __func__, cur_poc); */
4031 if (pic_list_debug & 0x2) {
4032 pr_err("cur poc %d\n", cur_poc);
4033 }
4034 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
4035 pic = hevc->m_PIC[ii];
4036 if (pic == NULL ||
4037 pic->index == -1 ||
4038 pic->BUF_index == -1
4039 )
4040 continue;
4041
4042 if ((pic->referenced == 0 || pic->POC == cur_poc))
4043 continue;
4044 is_referenced = 0;
4045 for (i = 0; i < 16; i++) {
4046 int delt;
4047
4048 if (params->p.CUR_RPS[i] & 0x8000)
4049 break;
4050 delt =
4051 params->p.CUR_RPS[i] &
4052 ((1 << (RPS_USED_BIT - 1)) - 1);
4053 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4054 poc_tmp =
4055 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4056 delt);
4057 } else
4058 poc_tmp = cur_poc + delt;
4059 if (poc_tmp == pic->POC) {
4060 is_referenced = 1;
4061 /* hevc_print(hevc, 0, "i is %d\n", i); */
4062 break;
4063 }
4064 }
4065 if (is_referenced == 0) {
4066 pic->referenced = 0;
4067 put_mv_buf(hevc, pic);
4068 /* hevc_print(hevc, 0,
4069 "set poc %d reference to 0\n", pic->POC); */
4070 if (pic_list_debug & 0x2) {
4071 pr_err("set poc %d reference to 0\n", pic->POC);
4072 }
4073 }
4074 }
4075
4076}
4077
4078static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4079{
4080 struct PIC_s *pic = hevc->cur_pic;
4081 int i, rIdx;
4082 int num_neg = 0;
4083 int num_pos = 0;
4084 int total_num;
4085 int num_ref_idx_l0_active =
4086 (params->p.num_ref_idx_l0_active >
4087 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4088 params->p.num_ref_idx_l0_active;
4089 int num_ref_idx_l1_active =
4090 (params->p.num_ref_idx_l1_active >
4091 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4092 params->p.num_ref_idx_l1_active;
4093
4094 int RefPicSetStCurr0[16];
4095 int RefPicSetStCurr1[16];
4096
4097 for (i = 0; i < 16; i++) {
4098 RefPicSetStCurr0[i] = 0;
4099 RefPicSetStCurr1[i] = 0;
4100 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4101 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4102 }
4103 for (i = 0; i < 16; i++) {
4104 if (params->p.CUR_RPS[i] & 0x8000)
4105 break;
4106 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4107 int delt =
4108 params->p.CUR_RPS[i] &
4109 ((1 << (RPS_USED_BIT - 1)) - 1);
4110
4111 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4112 RefPicSetStCurr0[num_neg] =
4113 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4114 delt);
4115 /* hevc_print(hevc, 0,
4116 * "RefPicSetStCurr0 %x %x %x\n",
4117 * RefPicSetStCurr0[num_neg], pic->POC,
4118 * (0x800-(params[i]&0x7ff)));
4119 */
4120 num_neg++;
4121 } else {
4122 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4123 /* hevc_print(hevc, 0,
4124 * "RefPicSetStCurr1 %d\n",
4125 * RefPicSetStCurr1[num_pos]);
4126 */
4127 num_pos++;
4128 }
4129 }
4130 }
4131 total_num = num_neg + num_pos;
4132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4133 hevc_print(hevc, 0,
4134 "%s: curpoc %d slice_type %d, total %d ",
4135 __func__, pic->POC, params->p.slice_type, total_num);
4136 hevc_print_cont(hevc, 0,
4137 "num_neg %d num_list0 %d num_list1 %d\n",
4138 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4139 }
4140
4141 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4142 hevc_print(hevc, 0,
4143 "HEVC Stream buf start ");
4144 hevc_print_cont(hevc, 0,
4145 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4146 READ_VREG(HEVC_STREAM_START_ADDR),
4147 READ_VREG(HEVC_STREAM_END_ADDR),
4148 READ_VREG(HEVC_STREAM_WR_PTR),
4149 READ_VREG(HEVC_STREAM_RD_PTR),
4150 READ_VREG(HEVC_STREAM_LEVEL),
4151 READ_VREG(HEVC_STREAM_FIFO_CTL),
4152 READ_VREG(HEVC_PARSER_INT_CONTROL));
4153 }
4154
4155 if (total_num > 0) {
4156 if (params->p.modification_flag & 0x1) {
4157 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4158 hevc_print(hevc, 0, "ref0 POC (modification):");
4159 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4160 int cIdx = params->p.modification_list[rIdx];
4161
4162 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4163 cIdx >=
4164 num_neg ? RefPicSetStCurr1[cIdx -
4165 num_neg] :
4166 RefPicSetStCurr0[cIdx];
4167 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4168 hevc_print_cont(hevc, 0, "%d ",
4169 pic->m_aiRefPOCList0[pic->
4170 slice_idx]
4171 [rIdx]);
4172 }
4173 }
4174 } else {
4175 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4176 hevc_print(hevc, 0, "ref0 POC:");
4177 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4178 int cIdx = rIdx % total_num;
4179
4180 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4181 cIdx >=
4182 num_neg ? RefPicSetStCurr1[cIdx -
4183 num_neg] :
4184 RefPicSetStCurr0[cIdx];
4185 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4186 hevc_print_cont(hevc, 0, "%d ",
4187 pic->m_aiRefPOCList0[pic->
4188 slice_idx]
4189 [rIdx]);
4190 }
4191 }
4192 }
4193 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4194 hevc_print_cont(hevc, 0, "\n");
4195 if (params->p.slice_type == B_SLICE) {
4196 if (params->p.modification_flag & 0x2) {
4197 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4198 hevc_print(hevc, 0,
4199 "ref1 POC (modification):");
4200 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4201 rIdx++) {
4202 int cIdx;
4203
4204 if (params->p.modification_flag & 0x1) {
4205 cIdx =
4206 params->p.
4207 modification_list
4208 [num_ref_idx_l0_active +
4209 rIdx];
4210 } else {
4211 cIdx =
4212 params->p.
4213 modification_list[rIdx];
4214 }
4215 pic->m_aiRefPOCList1[pic->
4216 slice_idx][rIdx] =
4217 cIdx >=
4218 num_pos ?
4219 RefPicSetStCurr0[cIdx - num_pos]
4220 : RefPicSetStCurr1[cIdx];
4221 if (get_dbg_flag(hevc) &
4222 H265_DEBUG_BUFMGR) {
4223 hevc_print_cont(hevc, 0, "%d ",
4224 pic->
4225 m_aiRefPOCList1[pic->
4226 slice_idx]
4227 [rIdx]);
4228 }
4229 }
4230 } else {
4231 if (get_dbg_flag(hevc) &
4232 H265_DEBUG_BUFMGR)
4233 hevc_print(hevc, 0, "ref1 POC:");
4234 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4235 rIdx++) {
4236 int cIdx = rIdx % total_num;
4237
4238 pic->m_aiRefPOCList1[pic->
4239 slice_idx][rIdx] =
4240 cIdx >=
4241 num_pos ?
4242 RefPicSetStCurr0[cIdx -
4243 num_pos]
4244 : RefPicSetStCurr1[cIdx];
4245 if (get_dbg_flag(hevc) &
4246 H265_DEBUG_BUFMGR) {
4247 hevc_print_cont(hevc, 0, "%d ",
4248 pic->
4249 m_aiRefPOCList1[pic->
4250 slice_idx]
4251 [rIdx]);
4252 }
4253 }
4254 }
4255 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4256 hevc_print_cont(hevc, 0, "\n");
4257 }
4258 }
4259 /*set m_PIC */
4260 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4261 (params->p.slice_type == P_SLICE) ? 1 :
4262 (params->p.slice_type == B_SLICE) ? 0 : 3;
4263 pic->RefNum_L0 = num_ref_idx_l0_active;
4264 pic->RefNum_L1 = num_ref_idx_l1_active;
4265}
4266
4267static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4268 int pic_height_cu, int sao_mem_unit,
4269 union param_u *params)
4270{
4271 int i, j;
4272 int start_cu_x, start_cu_y;
4273 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4274 int sao_abv_size = sao_mem_unit * pic_width_cu;
4275#ifdef DETREFILL_ENABLE
4276 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4277 int tmpRefillLcuSize = 1 <<
4278 (params->p.log2_min_coding_block_size_minus3 +
4279 3 + params->p.log2_diff_max_min_coding_block_size);
4280 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4281 "%x, %x, %x, %x\n",
4282 params->p.slice_segment_address,
4283 params->p.bit_depth,
4284 params->p.tiles_enabled_flag,
4285 tmpRefillLcuSize);
4286 if (params->p.slice_segment_address == 0 &&
4287 params->p.bit_depth != 0 &&
4288 (params->p.tiles_enabled_flag & 1) &&
4289 tmpRefillLcuSize == 64)
4290 hevc->delrefill_check = 1;
4291 else
4292 hevc->delrefill_check = 0;
4293 }
4294#endif
4295
4296 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4297 if (params->p.tiles_enabled_flag & 1) {
4298 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4299 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4300
4301 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4302 || hevc->num_tile_row <= 0) {
4303 hevc->num_tile_row = 1;
4304 hevc_print(hevc, 0,
4305 "%s: num_tile_rows_minus1 (%d) error!!\n",
4306 __func__, params->p.num_tile_rows_minus1);
4307 }
4308 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4309 || hevc->num_tile_col <= 0) {
4310 hevc->num_tile_col = 1;
4311 hevc_print(hevc, 0,
4312 "%s: num_tile_columns_minus1 (%d) error!!\n",
4313 __func__, params->p.num_tile_columns_minus1);
4314 }
4315 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4316 hevc_print(hevc, 0,
4317 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4318 __func__, pic_width_cu, pic_height_cu);
4319 hevc_print_cont(hevc, 0,
4320 "num_tile_col %d num_tile_row %d:\n",
4321 hevc->num_tile_col, hevc->num_tile_row);
4322 }
4323
4324 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4325 int w = pic_width_cu / hevc->num_tile_col;
4326 int h = pic_height_cu / hevc->num_tile_row;
4327
4328 start_cu_y = 0;
4329 for (i = 0; i < hevc->num_tile_row; i++) {
4330 start_cu_x = 0;
4331 for (j = 0; j < hevc->num_tile_col; j++) {
4332 if (j == (hevc->num_tile_col - 1)) {
4333 hevc->m_tile[i][j].width =
4334 pic_width_cu -
4335 start_cu_x;
4336 } else
4337 hevc->m_tile[i][j].width = w;
4338 if (i == (hevc->num_tile_row - 1)) {
4339 hevc->m_tile[i][j].height =
4340 pic_height_cu -
4341 start_cu_y;
4342 } else
4343 hevc->m_tile[i][j].height = h;
4344 hevc->m_tile[i][j].start_cu_x
4345 = start_cu_x;
4346 hevc->m_tile[i][j].start_cu_y
4347 = start_cu_y;
4348 hevc->m_tile[i][j].sao_vb_start_addr =
4349 hevc->work_space_buf->sao_vb.
4350 buf_start + j * sao_vb_size;
4351 hevc->m_tile[i][j].sao_abv_start_addr =
4352 hevc->work_space_buf->sao_abv.
4353 buf_start + i * sao_abv_size;
4354 if (get_dbg_flag(hevc) &
4355 H265_DEBUG_BUFMGR) {
4356 hevc_print_cont(hevc, 0,
4357 "{y=%d, x=%d w %d h %d ",
4358 i, j, hevc->m_tile[i][j].width,
4359 hevc->m_tile[i][j].height);
4360 hevc_print_cont(hevc, 0,
4361 "start_x %d start_y %d ",
4362 hevc->m_tile[i][j].start_cu_x,
4363 hevc->m_tile[i][j].start_cu_y);
4364 hevc_print_cont(hevc, 0,
4365 "sao_vb_start 0x%x ",
4366 hevc->m_tile[i][j].
4367 sao_vb_start_addr);
4368 hevc_print_cont(hevc, 0,
4369 "sao_abv_start 0x%x}\n",
4370 hevc->m_tile[i][j].
4371 sao_abv_start_addr);
4372 }
4373 start_cu_x += hevc->m_tile[i][j].width;
4374
4375 }
4376 start_cu_y += hevc->m_tile[i][0].height;
4377 }
4378 } else {
4379 start_cu_y = 0;
4380 for (i = 0; i < hevc->num_tile_row; i++) {
4381 start_cu_x = 0;
4382 for (j = 0; j < hevc->num_tile_col; j++) {
4383 if (j == (hevc->num_tile_col - 1)) {
4384 hevc->m_tile[i][j].width =
4385 pic_width_cu -
4386 start_cu_x;
4387 } else {
4388 hevc->m_tile[i][j].width =
4389 params->p.tile_width[j];
4390 }
4391 if (i == (hevc->num_tile_row - 1)) {
4392 hevc->m_tile[i][j].height =
4393 pic_height_cu -
4394 start_cu_y;
4395 } else {
4396 hevc->m_tile[i][j].height =
4397 params->
4398 p.tile_height[i];
4399 }
4400 hevc->m_tile[i][j].start_cu_x
4401 = start_cu_x;
4402 hevc->m_tile[i][j].start_cu_y
4403 = start_cu_y;
4404 hevc->m_tile[i][j].sao_vb_start_addr =
4405 hevc->work_space_buf->sao_vb.
4406 buf_start + j * sao_vb_size;
4407 hevc->m_tile[i][j].sao_abv_start_addr =
4408 hevc->work_space_buf->sao_abv.
4409 buf_start + i * sao_abv_size;
4410 if (get_dbg_flag(hevc) &
4411 H265_DEBUG_BUFMGR) {
4412 hevc_print_cont(hevc, 0,
4413 "{y=%d, x=%d w %d h %d ",
4414 i, j, hevc->m_tile[i][j].width,
4415 hevc->m_tile[i][j].height);
4416 hevc_print_cont(hevc, 0,
4417 "start_x %d start_y %d ",
4418 hevc->m_tile[i][j].start_cu_x,
4419 hevc->m_tile[i][j].start_cu_y);
4420 hevc_print_cont(hevc, 0,
4421 "sao_vb_start 0x%x ",
4422 hevc->m_tile[i][j].
4423 sao_vb_start_addr);
4424 hevc_print_cont(hevc, 0,
4425 "sao_abv_start 0x%x}\n",
4426 hevc->m_tile[i][j].
4427 sao_abv_start_addr);
4428
4429 }
4430 start_cu_x += hevc->m_tile[i][j].width;
4431 }
4432 start_cu_y += hevc->m_tile[i][0].height;
4433 }
4434 }
4435 } else {
4436 hevc->num_tile_col = 1;
4437 hevc->num_tile_row = 1;
4438 hevc->m_tile[0][0].width = pic_width_cu;
4439 hevc->m_tile[0][0].height = pic_height_cu;
4440 hevc->m_tile[0][0].start_cu_x = 0;
4441 hevc->m_tile[0][0].start_cu_y = 0;
4442 hevc->m_tile[0][0].sao_vb_start_addr =
4443 hevc->work_space_buf->sao_vb.buf_start;
4444 hevc->m_tile[0][0].sao_abv_start_addr =
4445 hevc->work_space_buf->sao_abv.buf_start;
4446 }
4447}
4448
4449static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4450 int pic_width_lcu)
4451{
4452 int cu_x;
4453 int cu_y;
4454 int tile_x = 0;
4455 int tile_y = 0;
4456 int i;
4457
4458 if (pic_width_lcu == 0) {
4459 if (get_dbg_flag(hevc)) {
4460 hevc_print(hevc, 0,
4461 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4462 __func__, hevc->pic_w, hevc->pic_h);
4463 }
4464 return -1;
4465 }
4466 cu_x = cu_adr % pic_width_lcu;
4467 cu_y = cu_adr / pic_width_lcu;
4468 if (hevc->tile_enabled) {
4469 for (i = 0; i < hevc->num_tile_col; i++) {
4470 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4471 tile_x = i;
4472 else
4473 break;
4474 }
4475 for (i = 0; i < hevc->num_tile_row; i++) {
4476 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4477 tile_y = i;
4478 else
4479 break;
4480 }
4481 }
4482 return (tile_x) | (tile_y << 8);
4483}
4484
4485static void print_scratch_error(int error_num)
4486{
4487#if 0
4488 if (get_dbg_flag(hevc)) {
4489 hevc_print(hevc, 0,
4490 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4491 error_num);
4492 }
4493#endif
4494}
4495
4496static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4497{
4498 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4499
4500 if (get_dbg_flag(hevc))
4501 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4502 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4503 __func__,
4504 buf_spec->ipp.buf_start,
4505 buf_spec->start_adr,
4506 buf_spec->short_term_rps.buf_start,
4507 buf_spec->vps.buf_start,
4508 buf_spec->sps.buf_start,
4509 buf_spec->pps.buf_start,
4510 buf_spec->sao_up.buf_start,
4511 buf_spec->swap_buf.buf_start,
4512 buf_spec->swap_buf2.buf_start,
4513 buf_spec->scalelut.buf_start,
4514 buf_spec->dblk_para.buf_start,
4515 buf_spec->dblk_data.buf_start,
4516 buf_spec->dblk_data2.buf_start);
4517 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4518 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4519 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4520 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4521 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4522 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4523 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4524 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4525 if (hevc->mmu_enable) {
4526 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4527 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4528 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4529 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4530 } else
4531 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4532 } /*else
4533 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4534 buf_spec->swap_buf.buf_start);
4535 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4536 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4537#ifdef HEVC_8K_LFTOFFSET_FIX
4538 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
4539 WRITE_VREG(HEVC_DBLK_CFG3, 0x808020); /*offset should x2 if 8k*/
4540 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4541 "write HEVC_DBLK_CFG3\n");
4542 }
4543#endif
4544 /* cfg_p_addr */
4545 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4546 /* cfg_d_addr */
4547 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4548
4549 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4550
4551 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4552}
4553
4554static void parser_cmd_write(void)
4555{
4556 u32 i;
4557 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4558 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4559 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4560 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4561 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4562 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4563 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4564 0x7C00
4565 };
4566 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4567 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4568}
4569
4570static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4571 int decode_pic_begin, int decode_pic_num)
4572{
4573 unsigned int data32;
4574 int i;
4575#if 0
4576 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4577 /* Set MCR fetch priorities*/
4578 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4579 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4580 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4581 }
4582#endif
4583#if 1
4584 /* m8baby test1902 */
4585 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4586 hevc_print(hevc, 0,
4587 "%s\n", __func__);
4588 data32 = READ_VREG(HEVC_PARSER_VERSION);
4589 if (data32 != 0x00010001) {
4590 print_scratch_error(25);
4591 return;
4592 }
4593 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4594 data32 = READ_VREG(HEVC_PARSER_VERSION);
4595 if (data32 != 0x5a5a55aa) {
4596 print_scratch_error(26);
4597 return;
4598 }
4599#if 0
4600 /* test Parser Reset */
4601 /* reset iqit to start mem init again */
4602 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4603 (1 << 3) /* reset_whole parser */
4604 );
4605 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4606 data32 = READ_VREG(HEVC_PARSER_VERSION);
4607 if (data32 != 0x00010001)
4608 hevc_print(hevc, 0,
4609 "Test Parser Fatal Error\n");
4610#endif
4611 /* reset iqit to start mem init again */
4612 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4613 );
4614 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4615 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4616
4617#endif
4618 if (!hevc->m_ins_flag) {
4619 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4620 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4621 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4622 data32 |= (0xf << 25); /*arwlen_axi_max*/
4623 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4624 }
4625 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4626 if (data32 != 0x00000100) {
4627 print_scratch_error(29);
4628 return;
4629 }
4630 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4631 if (data32 != 0x00000300) {
4632 print_scratch_error(30);
4633 return;
4634 }
4635 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4636 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4637 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4638 if (data32 != 0x12345678) {
4639 print_scratch_error(31);
4640 return;
4641 }
4642 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4643 if (data32 != 0x9abcdef0) {
4644 print_scratch_error(32);
4645 return;
4646 }
4647 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4648 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4649
4650 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4651 data32 &= 0x03ffffff;
4652 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4653 | /* stream_buffer_empty_int_amrisc_enable */
4654 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4655 (1 << 7) | /* dec_done_int_cpu_enable */
4656 (1 << 4) | /* startcode_found_int_cpu_enable */
4657 (0 << 3) | /* startcode_found_int_amrisc_enable */
4658 (1 << 0) /* parser_int_enable */
4659 ;
4660 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4661
4662 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4663 data32 = data32 | (1 << 1) | /* emulation_check_on */
4664 (1 << 0) /* startcode_check_on */
4665 ;
4666 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4667
4668 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4669 (2 << 4) | /* emulate_code_length_sub_1 */
4670 (2 << 1) | /* start_code_length_sub_1 */
4671 (1 << 0) /* stream_shift_enable */
4672 );
4673
4674 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4675 );
4676 /* hevc_parser_core_clk_en */
4677 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4678 );
4679
4680 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4681
4682 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4683 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4684 for (i = 0; i < 1024; i++)
4685 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4686
4687#ifdef ENABLE_SWAP_TEST
4688 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4689#endif
4690
4691 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4692 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4693 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4694 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4695 /* Send parser_cmd */
4696 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4697
4698 parser_cmd_write();
4699
4700 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4701 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4702 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4703
4704 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4705 /* (1 << 8) | // sao_sw_pred_enable */
4706 (1 << 5) | /* parser_sao_if_en */
4707 (1 << 2) | /* parser_mpred_if_en */
4708 (1 << 0) /* parser_scaler_if_en */
4709 );
4710
4711 /* Changed to Start MPRED in microcode */
4712 /*
4713 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4714 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4715 * (1<<31)
4716 * );
4717 */
4718
4719 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4720 (1 << 0) /* software reset ipp and mpp */
4721 );
4722 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4723 (0 << 0) /* software reset ipp and mpp */
4724 );
4725
4726 if (get_double_write_mode(hevc) & 0x10)
4727 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4728 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4729 );
4730
4731}
4732
4733static void decoder_hw_reset(void)
4734{
4735 int i;
4736 unsigned int data32;
4737 /* reset iqit to start mem init again */
4738 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4739 );
4740 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4741 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4742
4743 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4744 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4745 ;
4746 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4747
4748 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4749 if (data32 != 0x00000100) {
4750 print_scratch_error(29);
4751 return;
4752 }
4753 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4754 if (data32 != 0x00000300) {
4755 print_scratch_error(30);
4756 return;
4757 }
4758 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4759 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4760 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4761 if (data32 != 0x12345678) {
4762 print_scratch_error(31);
4763 return;
4764 }
4765 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4766 if (data32 != 0x9abcdef0) {
4767 print_scratch_error(32);
4768 return;
4769 }
4770 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4771 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4772
4773 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4774 data32 &= 0x03ffffff;
4775 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4776 | /* stream_buffer_empty_int_amrisc_enable */
4777 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4778 (1 << 7) | /* dec_done_int_cpu_enable */
4779 (1 << 4) | /* startcode_found_int_cpu_enable */
4780 (0 << 3) | /* startcode_found_int_amrisc_enable */
4781 (1 << 0) /* parser_int_enable */
4782 ;
4783 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4784
4785 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4786 data32 = data32 | (1 << 1) | /* emulation_check_on */
4787 (1 << 0) /* startcode_check_on */
4788 ;
4789 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4790
4791 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4792 (2 << 4) | /* emulate_code_length_sub_1 */
4793 (2 << 1) | /* start_code_length_sub_1 */
4794 (1 << 0) /* stream_shift_enable */
4795 );
4796
4797 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4798 );
4799 /* hevc_parser_core_clk_en */
4800 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4801 );
4802
4803 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4804 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4805 for (i = 0; i < 1024; i++)
4806 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4807
4808 /* Send parser_cmd */
4809 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4810
4811 parser_cmd_write();
4812
4813 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4814 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4815 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4816
4817 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4818 /* (1 << 8) | // sao_sw_pred_enable */
4819 (1 << 5) | /* parser_sao_if_en */
4820 (1 << 2) | /* parser_mpred_if_en */
4821 (1 << 0) /* parser_scaler_if_en */
4822 );
4823
4824 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4825 (1 << 0) /* software reset ipp and mpp */
4826 );
4827 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4828 (0 << 0) /* software reset ipp and mpp */
4829 );
4830}
4831
4832#ifdef CONFIG_HEVC_CLK_FORCED_ON
4833static void config_hevc_clk_forced_on(void)
4834{
4835 unsigned int rdata32;
4836 /* IQIT */
4837 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4838 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4839
4840 /* DBLK */
4841 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4842 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4843
4844 /* SAO */
4845 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4846 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4847
4848 /* MPRED */
4849 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4850 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4851
4852 /* PARSER */
4853 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4854 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4855 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4856 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4857 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4858 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4859 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4860 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4861 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4862 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4863 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4864 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4865 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4866
4867 /* IPP */
4868 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4869 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4870
4871 /* MCRCC */
4872 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4873 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4874}
4875#endif
4876
4877#ifdef MCRCC_ENABLE
4878static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4879{
4880 unsigned int rdata32;
4881 unsigned int rdata32_2;
4882 int l0_cnt = 0;
4883 int l1_cnt = 0x7fff;
4884
4885 if (get_double_write_mode(hevc) & 0x10) {
4886 l0_cnt = hevc->cur_pic->RefNum_L0;
4887 l1_cnt = hevc->cur_pic->RefNum_L1;
4888 }
4889
4890 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4891
4892 if (slice_type == 2) { /* I-PIC */
4893 /* remove reset -- disables clock */
4894 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4895 return;
4896 }
4897
4898 if (slice_type == 0) { /* B-PIC */
4899 /* Programme canvas0 */
4900 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4901 (0 << 8) | (0 << 1) | 0);
4902 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4903 rdata32 = rdata32 & 0xffff;
4904 rdata32 = rdata32 | (rdata32 << 16);
4905 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4906
4907 /* Programme canvas1 */
4908 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4909 (16 << 8) | (1 << 1) | 0);
4910 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4911 rdata32_2 = rdata32_2 & 0xffff;
4912 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4913 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4914 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4915 rdata32_2 = rdata32_2 & 0xffff;
4916 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4917 }
4918 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4919 } else { /* P-PIC */
4920 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4921 (0 << 8) | (1 << 1) | 0);
4922 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4923 rdata32 = rdata32 & 0xffff;
4924 rdata32 = rdata32 | (rdata32 << 16);
4925 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4926
4927 if (l0_cnt == 1) {
4928 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4929 } else {
4930 /* Programme canvas1 */
4931 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4932 rdata32 = rdata32 & 0xffff;
4933 rdata32 = rdata32 | (rdata32 << 16);
4934 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4935 }
4936 }
4937 /* enable mcrcc progressive-mode */
4938 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4939}
4940#endif
4941
4942static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4943 int sao_mem_unit)
4944{
4945 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4946 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4947 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4948 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4949}
4950
4951static u32 init_aux_size;
4952static int aux_data_is_avaible(struct hevc_state_s *hevc)
4953{
4954 u32 reg_val;
4955
4956 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4957 if (reg_val != 0 && reg_val != init_aux_size)
4958 return 1;
4959 else
4960 return 0;
4961}
4962
4963static void config_aux_buf(struct hevc_state_s *hevc)
4964{
4965 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4966 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4967 (hevc->suffix_aux_size >> 4);
4968 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4969}
4970
4971static void config_mpred_hw(struct hevc_state_s *hevc)
4972{
4973 int i;
4974 unsigned int data32;
4975 struct PIC_s *cur_pic = hevc->cur_pic;
4976 struct PIC_s *col_pic = hevc->col_pic;
4977 int AMVP_MAX_NUM_CANDS_MEM = 3;
4978 int AMVP_MAX_NUM_CANDS = 2;
4979 int NUM_CHROMA_MODE = 5;
4980 int DM_CHROMA_IDX = 36;
4981 int above_ptr_ctrl = 0;
4982 int buffer_linear = 1;
4983 int cu_size_log2 = 3;
4984
4985 int mpred_mv_rd_start_addr;
4986 int mpred_curr_lcu_x;
4987 int mpred_curr_lcu_y;
4988 int mpred_above_buf_start;
4989 int mpred_mv_rd_ptr;
4990 int mpred_mv_rd_ptr_p1;
4991 int mpred_mv_rd_end_addr;
4992 int MV_MEM_UNIT;
4993 int mpred_mv_wr_ptr;
4994 int *ref_poc_L0, *ref_poc_L1;
4995
4996 int above_en;
4997 int mv_wr_en;
4998 int mv_rd_en;
4999 int col_isIntra;
5000
5001 if (hevc->slice_type != 2) {
5002 above_en = 1;
5003 mv_wr_en = 1;
5004 mv_rd_en = 1;
5005 col_isIntra = 0;
5006 } else {
5007 above_en = 1;
5008 mv_wr_en = 1;
5009 mv_rd_en = 0;
5010 col_isIntra = 0;
5011 }
5012
5013 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
5014 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
5015 mpred_curr_lcu_x = data32 & 0xffff;
5016 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
5017
5018 MV_MEM_UNIT =
5019 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
5020 5 ? 0x80 : 0x20;
5021 mpred_mv_rd_ptr =
5022 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
5023
5024 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
5025 mpred_mv_rd_end_addr =
5026 mpred_mv_rd_start_addr +
5027 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
5028
5029 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
5030
5031 mpred_mv_wr_ptr =
5032 cur_pic->mpred_mv_wr_start_addr +
5033 (hevc->slice_addr * MV_MEM_UNIT);
5034
5035 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5036 hevc_print(hevc, 0,
5037 "cur pic index %d col pic index %d\n", cur_pic->index,
5038 col_pic->index);
5039 }
5040
5041 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
5042 cur_pic->mpred_mv_wr_start_addr);
5043 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
5044
5045 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
5046 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
5047 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
5048
5049 data32 = READ_VREG(HEVC_MPRED_CTRL0);
5050 data32 = (hevc->slice_type |
5051 hevc->new_pic << 2 |
5052 hevc->new_tile << 3 |
5053 hevc->isNextSliceSegment << 4 |
5054 hevc->TMVPFlag << 5 |
5055 hevc->LDCFlag << 6 |
5056 hevc->ColFromL0Flag << 7 |
5057 above_ptr_ctrl << 8 |
5058 above_en << 9 |
5059 mv_wr_en << 10 |
5060 mv_rd_en << 11 |
5061 col_isIntra << 12 |
5062 buffer_linear << 13 |
5063 hevc->LongTerm_Curr << 14 |
5064 hevc->LongTerm_Col << 15 |
5065 hevc->lcu_size_log2 << 16 |
5066 cu_size_log2 << 20 | hevc->plevel << 24);
5067 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5068
5069 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5070 data32 = (
5071#if 0
5072 /* no set in m8baby test1902 */
5073 /* Don't override clk_forced_on , */
5074 (data32 & (0x1 << 24)) |
5075#endif
5076 hevc->MaxNumMergeCand |
5077 AMVP_MAX_NUM_CANDS << 4 |
5078 AMVP_MAX_NUM_CANDS_MEM << 8 |
5079 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5080 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5081
5082 data32 = (hevc->pic_w | hevc->pic_h << 16);
5083 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5084
5085 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5086 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5087
5088 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5089 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5090
5091 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5092 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5093
5094 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5095 /* col_RefNum_L0<<16| */
5096 /* col_RefNum_L1<<24 */
5097 );
5098 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5099
5100 data32 = (hevc->LongTerm_Ref);
5101 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5102
5103 data32 = 0;
5104 for (i = 0; i < hevc->RefNum_L0; i++)
5105 data32 = data32 | (1 << i);
5106 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5107
5108 data32 = 0;
5109 for (i = 0; i < hevc->RefNum_L1; i++)
5110 data32 = data32 | (1 << i);
5111 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5112
5113 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5114 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5115
5116 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5117 * Ref_POC_xx_L1 in pair write order!!!
5118 */
5119 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5120 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5121
5122 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5123 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5124
5125 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5126 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5127
5128 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5129 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5130
5131 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5132 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5133
5134 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5135 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5136
5137 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5138 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5139
5140 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5141 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5142
5143 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5144 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5145
5146 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5147 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5148
5149 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5150 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5151
5152 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5153 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5154
5155 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5156 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5157
5158 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5159 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5160
5161 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5162 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5163
5164 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5165 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5166
5167 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5168 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5169
5170 if (hevc->new_pic) {
5171 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5172 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5173 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5174 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5175 } else if (!hevc->isNextSliceSegment) {
5176 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5177 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5178 }
5179
5180 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5181}
5182
5183static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5184{
5185 unsigned int data32, data32_2;
5186 int misc_flag0 = hevc->misc_flag0;
5187 int slice_deblocking_filter_disabled_flag = 0;
5188
5189 int mc_buffer_size_u_v =
5190 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5191 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5192 struct PIC_s *cur_pic = hevc->cur_pic;
5193 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5194
5195 data32 = READ_VREG(HEVC_SAO_CTRL0);
5196 data32 &= (~0xf);
5197 data32 |= hevc->lcu_size_log2;
5198 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5199
5200 data32 = (hevc->pic_w | hevc->pic_h << 16);
5201 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5202
5203 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5204 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5205
5206 if (hevc->new_pic)
5207 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5208#ifdef LOSLESS_COMPRESS_MODE
5209/*SUPPORT_10BIT*/
5210 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5211 data32 = READ_VREG(HEVC_SAO_CTRL5);
5212 data32 &= (~(0xff << 16));
5213
5214 if (get_double_write_mode(hevc) == 2 ||
5215 get_double_write_mode(hevc) == 3)
5216 data32 |= (0xff<<16);
5217 else if (get_double_write_mode(hevc) == 4)
5218 data32 |= (0x33<<16);
5219
5220 if (hevc->mem_saving_mode == 1)
5221 data32 |= (1 << 9);
5222 else
5223 data32 &= ~(1 << 9);
5224 if (workaround_enable & 1)
5225 data32 |= (1 << 7);
5226 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5227 }
5228 data32 = cur_pic->mc_y_adr;
5229 if (get_double_write_mode(hevc))
5230 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5231
5232 if ((get_double_write_mode(hevc) & 0x10) == 0)
5233 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5234
5235 if (hevc->mmu_enable)
5236 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5237#else
5238 data32 = cur_pic->mc_y_adr;
5239 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5240#endif
5241 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5242 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5243
5244#ifdef LOSLESS_COMPRESS_MODE
5245/*SUPPORT_10BIT*/
5246 if (get_double_write_mode(hevc))
5247 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5248#else
5249 data32 = cur_pic->mc_u_v_adr;
5250 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5251#endif
5252 data32 = (mc_buffer_size_u_v_h << 16);
5253 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5254
5255#ifdef LOSLESS_COMPRESS_MODE
5256/*SUPPORT_10BIT*/
5257 if (get_double_write_mode(hevc)) {
5258 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5259 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5260 }
5261#else
5262 /* multi tile to do... */
5263 data32 = cur_pic->mc_y_adr;
5264 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5265
5266 data32 = cur_pic->mc_u_v_adr;
5267 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5268#endif
5269 /* DBLK CONFIG HERE */
5270 if (hevc->new_pic) {
5271 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5272 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5273 data32 = (0xff << 8) | (0x0 << 0);
5274 else
5275 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5276 (0x0 << 0); /* h265 video format*/
5277
5278 if (hevc->pic_w >= 1280)
5279 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5280 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5281 if (get_double_write_mode(hevc) == 0)
5282 data32 |= (0x1 << 8); /*enable first write*/
5283 else if (get_double_write_mode(hevc) == 0x10)
5284 data32 |= (0x1 << 9); /*double write only*/
5285 else
5286 data32 |= ((0x1 << 8) |(0x1 << 9));
5287
5288 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5289 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5290 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5291 }
5292 data32 = (hevc->pic_w | hevc->pic_h << 16);
5293 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5294
5295 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5296 data32 =
5297 ((misc_flag0 >>
5298 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5299 0x1) << 3;
5300 } else
5301 data32 = 0;
5302 data32 |=
5303 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5304 ((params->p.pps_cr_qp_offset
5305 & 0x1f) <<
5306 9));
5307 data32 |=
5308 (hevc->lcu_size ==
5309 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5310
5311 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5312
5313 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5314 /*if (debug & 0x80) {*/
5315 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5316 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5317 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5318 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5319 data32);
5320 /*}*/
5321 }
5322 }
5323#if 0
5324 data32 = READ_VREG(HEVC_SAO_CTRL1);
5325 data32 &= (~0x3000);
5326 data32 |= (hevc->mem_map_mode <<
5327 12);
5328
5329/* [13:12] axi_aformat,
5330 * 0-Linear, 1-32x32, 2-64x32
5331 */
5332 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5333
5334 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5335 data32 &= (~0x30);
5336 data32 |= (hevc->mem_map_mode <<
5337 4);
5338
5339/* [5:4] -- address_format
5340 * 00:linear 01:32x32 10:64x32
5341 */
5342 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5343#else
5344 /* m8baby test1902 */
5345 data32 = READ_VREG(HEVC_SAO_CTRL1);
5346 data32 &= (~0x3000);
5347 data32 |= (hevc->mem_map_mode <<
5348 12);
5349
5350/* [13:12] axi_aformat, 0-Linear,
5351 * 1-32x32, 2-64x32
5352 */
5353 data32 &= (~0xff0);
5354 /* data32 |= 0x670; // Big-Endian per 64-bit */
5355 data32 |= endian; /* Big-Endian per 64-bit */
5356 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5357 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5358 if (get_double_write_mode(hevc) == 0)
5359 data32 |= 0x2; /*disable double write*/
5360 else if (get_double_write_mode(hevc) & 0x10)
5361 data32 |= 0x1; /*disable cm*/
5362 } else {
5363 unsigned int data;
5364 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5365 (0x0 << 0); /* h265 video format*/
5366 if (hevc->pic_w >= 1280)
5367 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5368 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5369 if (get_double_write_mode(hevc) == 0)
5370 data |= (0x1 << 8); /*enable first write*/
5371 else if (get_double_write_mode(hevc) & 0x10)
5372 data |= (0x1 << 9); /*double write only*/
5373 else
5374 data |= ((0x1 << 8) |(0x1 << 9));
5375
5376 WRITE_VREG(HEVC_DBLK_CFGB, data);
5377 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5378 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5379 }
5380
5381 /* swap uv */
5382 if (hevc->is_used_v4l) {
5383 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5384 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5385 data32 &= ~(1 << 8); /* NV21 */
5386 else
5387 data32 |= (1 << 8); /* NV12 */
5388 }
5389
5390 /*
5391 * [31:24] ar_fifo1_axi_thred
5392 * [23:16] ar_fifo0_axi_thred
5393 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5394 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5395 * [11:08] axi_lendian_C
5396 * [07:04] axi_lendian_Y
5397 * [3] reserved
5398 * [2] clk_forceon
5399 * [1] dw_disable:disable double write output
5400 * [0] cm_disable:disable compress output
5401 */
5402 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5403 if (get_double_write_mode(hevc) & 0x10) {
5404 /* [23:22] dw_v1_ctrl
5405 *[21:20] dw_v0_ctrl
5406 *[19:18] dw_h1_ctrl
5407 *[17:16] dw_h0_ctrl
5408 */
5409 data32 = READ_VREG(HEVC_SAO_CTRL5);
5410 /*set them all 0 for H265_NV21 (no down-scale)*/
5411 data32 &= ~(0xff << 16);
5412 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5413 }
5414
5415 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5416 data32 &= (~0x30);
5417 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5418 data32 |= (hevc->mem_map_mode <<
5419 4);
5420 data32 &= (~0xF);
5421 data32 |= 0xf; /* valid only when double write only */
5422 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5423
5424 /* swap uv */
5425 if (hevc->is_used_v4l) {
5426 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5427 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5428 data32 |= (1 << 12); /* NV21 */
5429 else
5430 data32 &= ~(1 << 12); /* NV12 */
5431 }
5432
5433 /*
5434 * [3:0] little_endian
5435 * [5:4] address_format 00:linear 01:32x32 10:64x32
5436 * [7:6] reserved
5437 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5438 * [11:10] reserved
5439 * [12] CbCr_byte_swap
5440 * [31:13] reserved
5441 */
5442 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5443#endif
5444 data32 = 0;
5445 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5446 data32_2 &= (~0x300);
5447 /* slice_deblocking_filter_disabled_flag = 0;
5448 * ucode has handle it , so read it from ucode directly
5449 */
5450 if (hevc->tile_enabled) {
5451 data32 |=
5452 ((misc_flag0 >>
5453 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5454 0x1) << 0;
5455 data32_2 |=
5456 ((misc_flag0 >>
5457 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5458 0x1) << 8;
5459 }
5460 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5461 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5462 0x1; /* ucode has handle it,so read it from ucode directly */
5463 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5464 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5465 /* slice_deblocking_filter_disabled_flag =
5466 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5467 * //ucode has handle it , so read it from ucode directly
5468 */
5469 data32 |= slice_deblocking_filter_disabled_flag << 2;
5470 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5471 hevc_print_cont(hevc, 0,
5472 "(1,%x)", data32);
5473 if (!slice_deblocking_filter_disabled_flag) {
5474 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5475 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5476 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5477 hevc_print_cont(hevc, 0,
5478 "(2,%x)", data32);
5479 }
5480 } else {
5481 data32 |=
5482 ((misc_flag0 >>
5483 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5484 0x1) << 2;
5485 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5486 hevc_print_cont(hevc, 0,
5487 "(3,%x)", data32);
5488 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5489 0x1) == 0) {
5490 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5491 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5492 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5493 hevc_print_cont(hevc, 0,
5494 "(4,%x)", data32);
5495 }
5496 }
5497 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5498 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5499 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5500 || (!slice_deblocking_filter_disabled_flag))) {
5501 data32 |=
5502 ((misc_flag0 >>
5503 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5504 & 0x1) << 1;
5505 data32_2 |=
5506 ((misc_flag0 >>
5507 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5508 & 0x1) << 9;
5509 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5510 hevc_print_cont(hevc, 0,
5511 "(5,%x)\n", data32);
5512 } else {
5513 data32 |=
5514 ((misc_flag0 >>
5515 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5516 & 0x1) << 1;
5517 data32_2 |=
5518 ((misc_flag0 >>
5519 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5520 & 0x1) << 9;
5521 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5522 hevc_print_cont(hevc, 0,
5523 "(6,%x)\n", data32);
5524 }
5525 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5526 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5527}
5528
5529#ifdef TEST_NO_BUF
5530static unsigned char test_flag = 1;
5531#endif
5532
5533static void pic_list_process(struct hevc_state_s *hevc)
5534{
5535 int work_pic_num = get_work_pic_num(hevc);
5536 int alloc_pic_count = 0;
5537 int i;
5538 struct PIC_s *pic;
5539 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5540 pic = hevc->m_PIC[i];
5541 if (pic == NULL || pic->index == -1)
5542 continue;
5543 alloc_pic_count++;
5544 if (pic->output_mark == 0 && pic->referenced == 0
5545 && pic->output_ready == 0
5546 && (pic->width != hevc->pic_w ||
5547 pic->height != hevc->pic_h)
5548 ) {
5549 set_buf_unused(hevc, pic->BUF_index);
5550 pic->BUF_index = -1;
5551 if (alloc_pic_count > work_pic_num) {
5552 pic->width = 0;
5553 pic->height = 0;
5554 pic->index = -1;
5555 } else {
5556 pic->width = hevc->pic_w;
5557 pic->height = hevc->pic_h;
5558 }
5559 }
5560 }
5561 if (alloc_pic_count < work_pic_num) {
5562 int new_count = alloc_pic_count;
5563 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5564 pic = hevc->m_PIC[i];
5565 if (pic && pic->index == -1) {
5566 pic->index = i;
5567 pic->BUF_index = -1;
5568 pic->width = hevc->pic_w;
5569 pic->height = hevc->pic_h;
5570 new_count++;
5571 if (new_count >=
5572 work_pic_num)
5573 break;
5574 }
5575 }
5576
5577 }
5578 dealloc_unused_buf(hevc);
5579 if (get_alloc_pic_count(hevc)
5580 != alloc_pic_count) {
5581 hevc_print_cont(hevc, 0,
5582 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5583 __func__,
5584 work_pic_num,
5585 alloc_pic_count,
5586 get_alloc_pic_count(hevc));
5587 }
5588}
5589
5590static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5591{
5592 int i;
5593 struct PIC_s *pic;
5594 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5595 pic = hevc->m_PIC[i];
5596 if (pic == NULL || pic->index == -1)
5597 continue;
5598
5599 if (pic->output_mark == 0 && pic->referenced == 0
5600 && pic->output_ready == 0
5601 && pic->scatter_alloc
5602 )
5603 release_pic_mmu_buf(hevc, pic);
5604 }
5605
5606}
5607
5608static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5609 union param_u *rpm_param)
5610{
5611 struct PIC_s *new_pic = NULL;
5612 struct PIC_s *pic;
5613 int i;
5614 int ret;
5615
5616 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5617 pic = hevc->m_PIC[i];
5618 if (pic == NULL || pic->index == -1)
5619 continue;
5620
5621 if (pic->output_mark == 0 && pic->referenced == 0
5622 && pic->output_ready == 0
5623 && pic->width == hevc->pic_w
5624 && pic->height == hevc->pic_h
5625 && pic->vf_ref == 0
5626 ) {
5627 if (new_pic) {
5628 if (new_pic->POC != INVALID_POC) {
5629 if (pic->POC == INVALID_POC ||
5630 pic->POC < new_pic->POC)
5631 new_pic = pic;
5632 }
5633 } else
5634 new_pic = pic;
5635 }
5636 }
5637
5638 if (new_pic == NULL)
5639 return NULL;
5640
5641 if (new_pic->BUF_index < 0) {
5642 if (alloc_buf(hevc) < 0)
5643 return NULL;
5644 else {
5645 if (config_pic(hevc, new_pic) < 0) {
5646 dealloc_pic_buf(hevc, new_pic);
5647 return NULL;
5648 }
5649 }
5650 new_pic->width = hevc->pic_w;
5651 new_pic->height = hevc->pic_h;
5652 set_canvas(hevc, new_pic);
5653
5654 init_pic_list_hw(hevc);
5655 }
5656
5657 if (new_pic) {
5658 new_pic->double_write_mode =
5659 get_double_write_mode(hevc);
5660 if (new_pic->double_write_mode)
5661 set_canvas(hevc, new_pic);
5662
5663#ifdef TEST_NO_BUF
5664 if (test_flag) {
5665 test_flag = 0;
5666 return NULL;
5667 } else
5668 test_flag = 1;
5669#endif
5670 if (get_mv_buf(hevc, new_pic) < 0)
5671 return NULL;
5672
5673 if (hevc->mmu_enable) {
5674 ret = H265_alloc_mmu(hevc, new_pic,
5675 rpm_param->p.bit_depth,
5676 hevc->frame_mmu_map_addr);
5677 if (ret != 0) {
5678 put_mv_buf(hevc, new_pic);
5679 hevc_print(hevc, 0,
5680 "can't alloc need mmu1,idx %d ret =%d\n",
5681 new_pic->decode_idx,
5682 ret);
5683 return NULL;
5684 }
5685 }
5686 new_pic->referenced = 1;
5687 new_pic->decode_idx = hevc->decode_idx;
5688 new_pic->slice_idx = 0;
5689 new_pic->referenced = 1;
5690 new_pic->output_mark = 0;
5691 new_pic->recon_mark = 0;
5692 new_pic->error_mark = 0;
5693 new_pic->dis_mark = 0;
5694 /* new_pic->output_ready = 0; */
5695 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5696 new_pic->ip_mode = (!new_pic->num_reorder_pic && !disable_ip_mode) ? true : false;
5697 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5698 new_pic->POC = hevc->curr_POC;
5699 new_pic->pic_struct = hevc->curr_pic_struct;
5700 if (new_pic->aux_data_buf)
5701 release_aux_data(hevc, new_pic);
5702 new_pic->mem_saving_mode =
5703 hevc->mem_saving_mode;
5704 new_pic->bit_depth_luma =
5705 hevc->bit_depth_luma;
5706 new_pic->bit_depth_chroma =
5707 hevc->bit_depth_chroma;
5708 new_pic->video_signal_type =
5709 hevc->video_signal_type;
5710
5711 new_pic->conformance_window_flag =
5712 hevc->param.p.conformance_window_flag;
5713 new_pic->conf_win_left_offset =
5714 hevc->param.p.conf_win_left_offset;
5715 new_pic->conf_win_right_offset =
5716 hevc->param.p.conf_win_right_offset;
5717 new_pic->conf_win_top_offset =
5718 hevc->param.p.conf_win_top_offset;
5719 new_pic->conf_win_bottom_offset =
5720 hevc->param.p.conf_win_bottom_offset;
5721 new_pic->chroma_format_idc =
5722 hevc->param.p.chroma_format_idc;
5723
5724 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5725 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5726 __func__, new_pic->index,
5727 new_pic->BUF_index, new_pic->decode_idx,
5728 new_pic->POC);
5729
5730 }
5731 if (pic_list_debug & 0x1) {
5732 dump_pic_list(hevc);
5733 pr_err("\n*******************************************\n");
5734 }
5735
5736 return new_pic;
5737}
5738
5739static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5740 union param_u *rpm_param)
5741{
5742 int ret;
5743 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5744 struct v4l_buff_pool *pool = &v4l->cap_pool;
5745 struct PIC_s *new_pic = NULL;
5746 struct PIC_s *pic = NULL;
5747 int i;
5748
5749 for (i = 0; i < pool->in; ++i) {
5750 u32 state = (pool->seq[i] >> 16);
5751 u32 index = (pool->seq[i] & 0xffff);
5752
5753 switch (state) {
5754 case V4L_CAP_BUFF_IN_DEC:
5755 pic = hevc->m_PIC[i];
5756 if (pic && (pic->index != -1) &&
5757 (pic->output_mark == 0) &&
5758 (pic->referenced == 0) &&
5759 (pic->output_ready == 0) &&
5760 (pic->width == hevc->pic_w) &&
5761 (pic->height == hevc->pic_h) &&
5762 (pic->vf_ref == 0) &&
5763 pic->cma_alloc_addr) {
5764 new_pic = pic;
5765 }
5766 break;
5767 case V4L_CAP_BUFF_IN_M2M:
5768 pic = hevc->m_PIC[index];
5769 pic->width = hevc->pic_w;
5770 pic->height = hevc->pic_h;
5771 if ((pic->index != -1) &&
5772 !v4l_alloc_buf(hevc, pic)) {
5773 v4l_config_pic(hevc, pic);
5774 init_pic_list_hw(hevc);
5775 new_pic = pic;
5776 }
5777 break;
5778 default:
5779 pr_err("v4l buffer state err %d.\n", state);
5780 break;
5781 }
5782
5783 if (new_pic)
5784 break;
5785 }
5786
5787 if (new_pic == NULL)
5788 return NULL;
5789
5790 new_pic->double_write_mode = get_double_write_mode(hevc);
5791 if (new_pic->double_write_mode)
5792 set_canvas(hevc, new_pic);
5793
5794 if (get_mv_buf(hevc, new_pic) < 0)
5795 return NULL;
5796
5797 if (hevc->mmu_enable) {
5798 ret = H265_alloc_mmu(hevc, new_pic,
5799 rpm_param->p.bit_depth,
5800 hevc->frame_mmu_map_addr);
5801 if (ret != 0) {
5802 put_mv_buf(hevc, new_pic);
5803 hevc_print(hevc, 0,
5804 "can't alloc need mmu1,idx %d ret =%d\n",
5805 new_pic->decode_idx, ret);
5806 return NULL;
5807 }
5808 }
5809
5810 new_pic->referenced = 1;
5811 new_pic->decode_idx = hevc->decode_idx;
5812 new_pic->slice_idx = 0;
5813 new_pic->referenced = 1;
5814 new_pic->output_mark = 0;
5815 new_pic->recon_mark = 0;
5816 new_pic->error_mark = 0;
5817 new_pic->dis_mark = 0;
5818 /* new_pic->output_ready = 0; */
5819 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5820 new_pic->ip_mode = (!new_pic->num_reorder_pic && !disable_ip_mode) ? true : false;
5821 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5822 new_pic->POC = hevc->curr_POC;
5823 new_pic->pic_struct = hevc->curr_pic_struct;
5824
5825 if (new_pic->aux_data_buf)
5826 release_aux_data(hevc, new_pic);
5827 new_pic->mem_saving_mode =
5828 hevc->mem_saving_mode;
5829 new_pic->bit_depth_luma =
5830 hevc->bit_depth_luma;
5831 new_pic->bit_depth_chroma =
5832 hevc->bit_depth_chroma;
5833 new_pic->video_signal_type =
5834 hevc->video_signal_type;
5835
5836 new_pic->conformance_window_flag =
5837 hevc->param.p.conformance_window_flag;
5838 new_pic->conf_win_left_offset =
5839 hevc->param.p.conf_win_left_offset;
5840 new_pic->conf_win_right_offset =
5841 hevc->param.p.conf_win_right_offset;
5842 new_pic->conf_win_top_offset =
5843 hevc->param.p.conf_win_top_offset;
5844 new_pic->conf_win_bottom_offset =
5845 hevc->param.p.conf_win_bottom_offset;
5846 new_pic->chroma_format_idc =
5847 hevc->param.p.chroma_format_idc;
5848
5849 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5850 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5851 __func__, new_pic->index,
5852 new_pic->BUF_index, new_pic->decode_idx,
5853 new_pic->POC);
5854
5855 return new_pic;
5856}
5857
5858static int get_display_pic_num(struct hevc_state_s *hevc)
5859{
5860 int i;
5861 struct PIC_s *pic;
5862 int num = 0;
5863
5864 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5865 pic = hevc->m_PIC[i];
5866 if (pic == NULL ||
5867 pic->index == -1)
5868 continue;
5869
5870 if (pic->output_ready == 1)
5871 num++;
5872 }
5873 return num;
5874}
5875
5876static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5877{
5878 struct PIC_s *pic_display;
5879
5880 if (pic) {
5881 /*PB skip control */
5882 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5883 /* start decoding after first I */
5884 hevc->ignore_bufmgr_error |= 0x1;
5885 }
5886 if (hevc->ignore_bufmgr_error & 1) {
5887 if (hevc->PB_skip_count_after_decoding > 0)
5888 hevc->PB_skip_count_after_decoding--;
5889 else {
5890 /* start displaying */
5891 hevc->ignore_bufmgr_error |= 0x2;
5892 }
5893 }
5894 if (pic->POC != INVALID_POC && !pic->ip_mode)
5895 pic->output_mark = 1;
5896 pic->recon_mark = 1;
5897 }
5898 do {
5899 pic_display = output_pic(hevc, 1);
5900
5901 if (pic_display) {
5902 pic_display->referenced = 0;
5903 put_mv_buf(hevc, pic_display);
5904 if ((pic_display->error_mark
5905 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5906 || (get_dbg_flag(hevc) &
5907 H265_DEBUG_DISPLAY_CUR_FRAME)
5908 || (get_dbg_flag(hevc) &
5909 H265_DEBUG_NO_DISPLAY)) {
5910 pic_display->output_ready = 0;
5911 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5912 hevc_print(hevc, 0,
5913 "[BM] Display: POC %d, ",
5914 pic_display->POC);
5915 hevc_print_cont(hevc, 0,
5916 "decoding index %d ==> ",
5917 pic_display->decode_idx);
5918 hevc_print_cont(hevc, 0,
5919 "Debug mode or error, recycle it\n");
5920 }
5921 /*
5922 * Here the pic/frame error_mark is 1,
5923 * and it won't be displayed, so increase
5924 * the drop count
5925 */
5926 hevc->gvs->drop_frame_count++;
5927 /* error frame count also need increase */
5928 hevc->gvs->error_frame_count++;
5929 } else {
5930 if (hevc->i_only & 0x1
5931 && pic_display->slice_type != 2) {
5932 pic_display->output_ready = 0;
5933 } else {
5934 prepare_display_buf(hevc, pic_display);
5935 if (get_dbg_flag(hevc)
5936 & H265_DEBUG_BUFMGR) {
5937 hevc_print(hevc, 0,
5938 "[BM] flush Display: POC %d, ",
5939 pic_display->POC);
5940 hevc_print_cont(hevc, 0,
5941 "decoding index %d\n",
5942 pic_display->decode_idx);
5943 }
5944 }
5945 }
5946 }
5947 } while (pic_display);
5948 clear_referenced_flag(hevc);
5949}
5950
5951/*
5952* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5953*/
5954static void set_aux_data(struct hevc_state_s *hevc,
5955 struct PIC_s *pic, unsigned char suffix_flag,
5956 unsigned char dv_meta_flag)
5957{
5958 int i;
5959 unsigned short *aux_adr;
5960 unsigned int size_reg_val =
5961 READ_VREG(HEVC_AUX_DATA_SIZE);
5962 unsigned int aux_count = 0;
5963 int aux_size = 0;
5964 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5965 return;
5966
5967 if (hevc->aux_data_dirty ||
5968 hevc->m_ins_flag == 0) {
5969
5970 hevc->aux_data_dirty = 0;
5971 }
5972
5973 if (suffix_flag) {
5974 aux_adr = (unsigned short *)
5975 (hevc->aux_addr +
5976 hevc->prefix_aux_size);
5977 aux_count =
5978 ((size_reg_val & 0xffff) << 4)
5979 >> 1;
5980 aux_size =
5981 hevc->suffix_aux_size;
5982 } else {
5983 aux_adr =
5984 (unsigned short *)hevc->aux_addr;
5985 aux_count =
5986 ((size_reg_val >> 16) << 4)
5987 >> 1;
5988 aux_size =
5989 hevc->prefix_aux_size;
5990 }
5991 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5992 hevc_print(hevc, 0,
5993 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5994 __func__, pic, pic->aux_data_size,
5995 aux_count, suffix_flag, dv_meta_flag);
5996 }
5997 if (aux_size > 0 && aux_count > 0) {
5998 int heads_size = 0;
5999 int new_size;
6000 char *new_buf;
6001
6002 for (i = 0; i < aux_count; i++) {
6003 unsigned char tag = aux_adr[i] >> 8;
6004 if (tag != 0 && tag != 0xff) {
6005 if (dv_meta_flag == 0)
6006 heads_size += 8;
6007 else if (dv_meta_flag == 1 && tag == 0x1)
6008 heads_size += 8;
6009 else if (dv_meta_flag == 2 && tag != 0x1)
6010 heads_size += 8;
6011 }
6012 }
6013 new_size = pic->aux_data_size + aux_count + heads_size;
6014 new_buf = vzalloc(new_size);
6015 if (new_buf) {
6016 unsigned char valid_tag = 0;
6017 unsigned char *h =
6018 new_buf +
6019 pic->aux_data_size;
6020 unsigned char *p = h + 8;
6021 int len = 0;
6022 int padding_len = 0;
6023
6024 if (pic->aux_data_buf) {
6025 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
6026 vfree(pic->aux_data_buf);
6027 }
6028 pic->aux_data_buf = new_buf;
6029
6030 for (i = 0; i < aux_count; i += 4) {
6031 int ii;
6032 unsigned char tag = aux_adr[i + 3] >> 8;
6033 if (tag != 0 && tag != 0xff) {
6034 if (dv_meta_flag == 0)
6035 valid_tag = 1;
6036 else if (dv_meta_flag == 1
6037 && tag == 0x1)
6038 valid_tag = 1;
6039 else if (dv_meta_flag == 2
6040 && tag != 0x1)
6041 valid_tag = 1;
6042 else
6043 valid_tag = 0;
6044 if (valid_tag && len > 0) {
6045 pic->aux_data_size +=
6046 (len + 8);
6047 h[0] = (len >> 24)
6048 & 0xff;
6049 h[1] = (len >> 16)
6050 & 0xff;
6051 h[2] = (len >> 8)
6052 & 0xff;
6053 h[3] = (len >> 0)
6054 & 0xff;
6055 h[6] =
6056 (padding_len >> 8)
6057 & 0xff;
6058 h[7] = (padding_len)
6059 & 0xff;
6060 h += (len + 8);
6061 p += 8;
6062 len = 0;
6063 padding_len = 0;
6064 }
6065 if (valid_tag) {
6066 h[4] = tag;
6067 h[5] = 0;
6068 h[6] = 0;
6069 h[7] = 0;
6070 }
6071 }
6072 if (valid_tag) {
6073 for (ii = 0; ii < 4; ii++) {
6074 unsigned short aa =
6075 aux_adr[i + 3
6076 - ii];
6077 *p = aa & 0xff;
6078 p++;
6079 len++;
6080 /*if ((aa >> 8) == 0xff)
6081 padding_len++;*/
6082 }
6083 }
6084 }
6085 if (len > 0) {
6086 pic->aux_data_size += (len + 8);
6087 h[0] = (len >> 24) & 0xff;
6088 h[1] = (len >> 16) & 0xff;
6089 h[2] = (len >> 8) & 0xff;
6090 h[3] = (len >> 0) & 0xff;
6091 h[6] = (padding_len >> 8) & 0xff;
6092 h[7] = (padding_len) & 0xff;
6093 }
6094 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6095 hevc_print(hevc, 0,
6096 "aux: (size %d) suffix_flag %d\n",
6097 pic->aux_data_size, suffix_flag);
6098 for (i = 0; i < pic->aux_data_size; i++) {
6099 hevc_print_cont(hevc, 0,
6100 "%02x ", pic->aux_data_buf[i]);
6101 if (((i + 1) & 0xf) == 0)
6102 hevc_print_cont(hevc, 0, "\n");
6103 }
6104 hevc_print_cont(hevc, 0, "\n");
6105 }
6106
6107 } else {
6108 hevc_print(hevc, 0, "new buf alloc failed\n");
6109 if (pic->aux_data_buf)
6110 vfree(pic->aux_data_buf);
6111 pic->aux_data_buf = NULL;
6112 pic->aux_data_size = 0;
6113 }
6114 }
6115
6116}
6117
6118static void release_aux_data(struct hevc_state_s *hevc,
6119 struct PIC_s *pic)
6120{
6121 if (pic->aux_data_buf)
6122 vfree(pic->aux_data_buf);
6123 pic->aux_data_buf = NULL;
6124 pic->aux_data_size = 0;
6125}
6126
6127static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6128 struct PIC_s *pic)
6129{
6130
6131 /* prev pic */
6132 /*if (hevc->curr_POC != 0) {*/
6133 int decoded_poc = hevc->iPrevPOC;
6134#ifdef MULTI_INSTANCE_SUPPORT
6135 if (hevc->m_ins_flag) {
6136 decoded_poc = hevc->decoded_poc;
6137 hevc->decoded_poc = INVALID_POC;
6138 }
6139#endif
6140 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6141 && hevc->m_nalUnitType !=
6142 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6143 struct PIC_s *pic_display;
6144
6145 pic = get_pic_by_POC(hevc, decoded_poc);
6146 if (pic && (pic->POC != INVALID_POC)) {
6147 struct vdec_s *vdec = hw_to_vdec(hevc);
6148
6149 /*PB skip control */
6150 if (pic->error_mark == 0
6151 && hevc->PB_skip_mode == 1) {
6152 /* start decoding after
6153 * first I
6154 */
6155 hevc->ignore_bufmgr_error |= 0x1;
6156 }
6157 if (hevc->ignore_bufmgr_error & 1) {
6158 if (hevc->PB_skip_count_after_decoding > 0) {
6159 hevc->PB_skip_count_after_decoding--;
6160 } else {
6161 /* start displaying */
6162 hevc->ignore_bufmgr_error |= 0x2;
6163 }
6164 }
6165 if (hevc->mmu_enable
6166 && ((hevc->double_write_mode & 0x10) == 0)) {
6167 if (!hevc->m_ins_flag) {
6168 hevc->used_4k_num =
6169 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6170
6171 if ((!is_skip_decoding(hevc, pic)) &&
6172 (hevc->used_4k_num >= 0) &&
6173 (hevc->cur_pic->scatter_alloc
6174 == 1)) {
6175 hevc_print(hevc,
6176 H265_DEBUG_BUFMGR_MORE,
6177 "%s pic index %d scatter_alloc %d page_start %d\n",
6178 "decoder_mmu_box_free_idx_tail",
6179 hevc->cur_pic->index,
6180 hevc->cur_pic->scatter_alloc,
6181 hevc->used_4k_num);
6182 hevc_mmu_dma_check(hw_to_vdec(hevc));
6183 decoder_mmu_box_free_idx_tail(
6184 hevc->mmu_box,
6185 hevc->cur_pic->index,
6186 hevc->used_4k_num);
6187 hevc->cur_pic->scatter_alloc
6188 = 2;
6189 }
6190 hevc->used_4k_num = -1;
6191 }
6192 }
6193 if (!pic->ip_mode)
6194 pic->output_mark = 1;
6195 pic->recon_mark = 1;
6196 pic->dis_mark = 1;
6197 if (vdec->mvfrm) {
6198 pic->frame_size = vdec->mvfrm->frame_size;
6199 pic->hw_decode_time = (u32)vdec->mvfrm->hw_decode_time;
6200 }
6201 }
6202 do {
6203 pic_display = output_pic(hevc, 0);
6204
6205 if (pic_display) {
6206 if ((pic_display->error_mark &&
6207 ((hevc->ignore_bufmgr_error &
6208 0x2) == 0))
6209 || (get_dbg_flag(hevc) &
6210 H265_DEBUG_DISPLAY_CUR_FRAME)
6211 || (get_dbg_flag(hevc) &
6212 H265_DEBUG_NO_DISPLAY)) {
6213 pic_display->output_ready = 0;
6214 if (get_dbg_flag(hevc) &
6215 H265_DEBUG_BUFMGR) {
6216 hevc_print(hevc, 0,
6217 "[BM] Display: POC %d, ",
6218 pic_display->POC);
6219 hevc_print_cont(hevc, 0,
6220 "decoding index %d ==> ",
6221 pic_display->
6222 decode_idx);
6223 hevc_print_cont(hevc, 0,
6224 "Debug or err,recycle it\n");
6225 }
6226 /*
6227 * Here the pic/frame error_mark is 1,
6228 * and it won't be displayed, so increase
6229 * the drop count
6230 */
6231 hevc->gvs->drop_frame_count++;
6232 /* error frame count also need increase */
6233 hevc->gvs->error_frame_count++;
6234 } else {
6235 if (hevc->i_only & 0x1
6236 && pic_display->
6237 slice_type != 2) {
6238 pic_display->output_ready = 0;
6239 } else {
6240 prepare_display_buf
6241 (hevc,
6242 pic_display);
6243 if (get_dbg_flag(hevc) &
6244 H265_DEBUG_BUFMGR) {
6245 hevc_print(hevc, 0,
6246 "[BM] Display: POC %d, ",
6247 pic_display->POC);
6248 hevc_print_cont(hevc, 0,
6249 "decoding index %d\n",
6250 pic_display->
6251 decode_idx);
6252 }
6253 }
6254 }
6255 }
6256 } while (pic_display);
6257 } else {
6258 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6259 hevc_print(hevc, 0,
6260 "[BM] current pic is IDR, ");
6261 hevc_print(hevc, 0,
6262 "clear referenced flag of all buffers\n");
6263 }
6264 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6265 dump_pic_list(hevc);
6266 pic = get_pic_by_POC(hevc, decoded_poc);
6267 flush_output(hevc, pic);
6268 }
6269
6270}
6271
6272static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6273 int decoded_lcu)
6274{
6275 int current_lcu_idx = decoded_lcu;
6276 if (decoded_lcu < 0)
6277 return;
6278
6279 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6280 hevc_print(hevc, 0,
6281 "cur lcu idx = %d, (total %d)\n",
6282 current_lcu_idx, hevc->lcu_total);
6283 }
6284 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6285 if (hevc->first_pic_after_recover) {
6286 if (current_lcu_idx !=
6287 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6288 hevc->cur_pic->error_mark = 1;
6289 } else {
6290 if (hevc->lcu_x_num_pre != 0
6291 && hevc->lcu_y_num_pre != 0
6292 && current_lcu_idx != 0
6293 && current_lcu_idx <
6294 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6295 hevc->cur_pic->error_mark = 1;
6296 }
6297 if (hevc->cur_pic->error_mark) {
6298 hevc_print(hevc, 0,
6299 "cur lcu idx = %d, (total %d), set error_mark\n",
6300 current_lcu_idx,
6301 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6302 if (is_log_enable(hevc))
6303 add_log(hevc,
6304 "cur lcu idx = %d, (total %d), set error_mark",
6305 current_lcu_idx,
6306 hevc->lcu_x_num_pre *
6307 hevc->lcu_y_num_pre);
6308
6309 }
6310
6311 }
6312 if (hevc->cur_pic && hevc->head_error_flag) {
6313 hevc->cur_pic->error_mark = 1;
6314 hevc_print(hevc, 0,
6315 "head has error, set error_mark\n");
6316 }
6317
6318 if ((error_handle_policy & 0x80) == 0) {
6319 if (hevc->over_decode && hevc->cur_pic) {
6320 hevc_print(hevc, 0,
6321 "over decode, set error_mark\n");
6322 hevc->cur_pic->error_mark = 1;
6323 }
6324 }
6325
6326 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6327 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6328}
6329
6330static void check_pic_decoded_error(struct hevc_state_s *hevc,
6331 int decoded_lcu)
6332{
6333 int current_lcu_idx = decoded_lcu;
6334 if (decoded_lcu < 0)
6335 return;
6336
6337 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6338 hevc_print(hevc, 0,
6339 "cur lcu idx = %d, (total %d)\n",
6340 current_lcu_idx, hevc->lcu_total);
6341 }
6342 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6343 if (hevc->lcu_x_num != 0
6344 && hevc->lcu_y_num != 0
6345 && current_lcu_idx != 0
6346 && current_lcu_idx <
6347 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6348 hevc->cur_pic->error_mark = 1;
6349 if (hevc->cur_pic->error_mark) {
6350 hevc_print(hevc, 0,
6351 "cur lcu idx = %d, (total %d), set error_mark\n",
6352 current_lcu_idx,
6353 hevc->lcu_x_num*hevc->lcu_y_num);
6354 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6355 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6356 hevc_print(hevc, 0,
6357 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6358 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6359 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6360 }
6361 if (is_log_enable(hevc))
6362 add_log(hevc,
6363 "cur lcu idx = %d, (total %d), set error_mark",
6364 current_lcu_idx,
6365 hevc->lcu_x_num *
6366 hevc->lcu_y_num);
6367
6368 }
6369
6370 }
6371 if (hevc->cur_pic && hevc->head_error_flag) {
6372 hevc->cur_pic->error_mark = 1;
6373 hevc_print(hevc, 0,
6374 "head has error, set error_mark\n");
6375 }
6376
6377 if ((error_handle_policy & 0x80) == 0) {
6378 if (hevc->over_decode && hevc->cur_pic) {
6379 hevc_print(hevc, 0,
6380 "over decode, set error_mark\n");
6381 hevc->cur_pic->error_mark = 1;
6382 }
6383 }
6384}
6385
6386/* only when we decoded one field or one frame,
6387we can call this function to get qos info*/
6388static void get_picture_qos_info(struct hevc_state_s *hevc)
6389{
6390 struct PIC_s *picture = hevc->cur_pic;
6391
6392/*
6393#define DEBUG_QOS
6394*/
6395
6396 if (!hevc->cur_pic)
6397 return;
6398
6399 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6400 unsigned char a[3];
6401 unsigned char i, j, t;
6402 unsigned long data;
6403
6404 data = READ_VREG(HEVC_MV_INFO);
6405 if (picture->slice_type == I_SLICE)
6406 data = 0;
6407 a[0] = data & 0xff;
6408 a[1] = (data >> 8) & 0xff;
6409 a[2] = (data >> 16) & 0xff;
6410
6411 for (i = 0; i < 3; i++)
6412 for (j = i+1; j < 3; j++) {
6413 if (a[j] < a[i]) {
6414 t = a[j];
6415 a[j] = a[i];
6416 a[i] = t;
6417 } else if (a[j] == a[i]) {
6418 a[i]++;
6419 t = a[j];
6420 a[j] = a[i];
6421 a[i] = t;
6422 }
6423 }
6424 picture->max_mv = a[2];
6425 picture->avg_mv = a[1];
6426 picture->min_mv = a[0];
6427#ifdef DEBUG_QOS
6428 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6429 data, a[0], a[1], a[2]);
6430#endif
6431
6432 data = READ_VREG(HEVC_QP_INFO);
6433 a[0] = data & 0x1f;
6434 a[1] = (data >> 8) & 0x3f;
6435 a[2] = (data >> 16) & 0x7f;
6436
6437 for (i = 0; i < 3; i++)
6438 for (j = i+1; j < 3; j++) {
6439 if (a[j] < a[i]) {
6440 t = a[j];
6441 a[j] = a[i];
6442 a[i] = t;
6443 } else if (a[j] == a[i]) {
6444 a[i]++;
6445 t = a[j];
6446 a[j] = a[i];
6447 a[i] = t;
6448 }
6449 }
6450 picture->max_qp = a[2];
6451 picture->avg_qp = a[1];
6452 picture->min_qp = a[0];
6453#ifdef DEBUG_QOS
6454 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6455 data, a[0], a[1], a[2]);
6456#endif
6457
6458 data = READ_VREG(HEVC_SKIP_INFO);
6459 a[0] = data & 0x1f;
6460 a[1] = (data >> 8) & 0x3f;
6461 a[2] = (data >> 16) & 0x7f;
6462
6463 for (i = 0; i < 3; i++)
6464 for (j = i+1; j < 3; j++) {
6465 if (a[j] < a[i]) {
6466 t = a[j];
6467 a[j] = a[i];
6468 a[i] = t;
6469 } else if (a[j] == a[i]) {
6470 a[i]++;
6471 t = a[j];
6472 a[j] = a[i];
6473 a[i] = t;
6474 }
6475 }
6476 picture->max_skip = a[2];
6477 picture->avg_skip = a[1];
6478 picture->min_skip = a[0];
6479
6480#ifdef DEBUG_QOS
6481 hevc_print(hevc, 0,
6482 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6483 data, a[0], a[1], a[2]);
6484#endif
6485 } else {
6486 uint32_t blk88_y_count;
6487 uint32_t blk88_c_count;
6488 uint32_t blk22_mv_count;
6489 uint32_t rdata32;
6490 int32_t mv_hi;
6491 int32_t mv_lo;
6492 uint32_t rdata32_l;
6493 uint32_t mvx_L0_hi;
6494 uint32_t mvy_L0_hi;
6495 uint32_t mvx_L1_hi;
6496 uint32_t mvy_L1_hi;
6497 int64_t value;
6498 uint64_t temp_value;
6499#ifdef DEBUG_QOS
6500 int pic_number = picture->POC;
6501#endif
6502
6503 picture->max_mv = 0;
6504 picture->avg_mv = 0;
6505 picture->min_mv = 0;
6506
6507 picture->max_skip = 0;
6508 picture->avg_skip = 0;
6509 picture->min_skip = 0;
6510
6511 picture->max_qp = 0;
6512 picture->avg_qp = 0;
6513 picture->min_qp = 0;
6514
6515
6516
6517#ifdef DEBUG_QOS
6518 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6519 picture->slice_type,
6520 picture->POC);
6521#endif
6522 /* set rd_idx to 0 */
6523 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6524
6525 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6526 if (blk88_y_count == 0) {
6527#ifdef DEBUG_QOS
6528 hevc_print(hevc, 0,
6529 "[Picture %d Quality] NO Data yet.\n",
6530 pic_number);
6531#endif
6532 /* reset all counts */
6533 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6534 return;
6535 }
6536 /* qp_y_sum */
6537 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6538#ifdef DEBUG_QOS
6539 hevc_print(hevc, 0,
6540 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6541 pic_number, rdata32/blk88_y_count,
6542 rdata32, blk88_y_count);
6543#endif
6544 picture->avg_qp = rdata32/blk88_y_count;
6545 /* intra_y_count */
6546 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6547#ifdef DEBUG_QOS
6548 hevc_print(hevc, 0,
6549 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6550 pic_number, rdata32*100/blk88_y_count,
6551 '%', rdata32);
6552#endif
6553 /* skipped_y_count */
6554 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6555#ifdef DEBUG_QOS
6556 hevc_print(hevc, 0,
6557 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6558 pic_number, rdata32*100/blk88_y_count,
6559 '%', rdata32);
6560#endif
6561 picture->avg_skip = rdata32*100/blk88_y_count;
6562 /* coeff_non_zero_y_count */
6563 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6564#ifdef DEBUG_QOS
6565 hevc_print(hevc, 0,
6566 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6567 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6568 '%', rdata32);
6569#endif
6570 /* blk66_c_count */
6571 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6572 if (blk88_c_count == 0) {
6573#ifdef DEBUG_QOS
6574 hevc_print(hevc, 0,
6575 "[Picture %d Quality] NO Data yet.\n",
6576 pic_number);
6577#endif
6578 /* reset all counts */
6579 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6580 return;
6581 }
6582 /* qp_c_sum */
6583 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6584#ifdef DEBUG_QOS
6585 hevc_print(hevc, 0,
6586 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6587 pic_number, rdata32/blk88_c_count,
6588 rdata32, blk88_c_count);
6589#endif
6590 /* intra_c_count */
6591 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6592#ifdef DEBUG_QOS
6593 hevc_print(hevc, 0,
6594 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6595 pic_number, rdata32*100/blk88_c_count,
6596 '%', rdata32);
6597#endif
6598 /* skipped_cu_c_count */
6599 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6600#ifdef DEBUG_QOS
6601 hevc_print(hevc, 0,
6602 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6603 pic_number, rdata32*100/blk88_c_count,
6604 '%', rdata32);
6605#endif
6606 /* coeff_non_zero_c_count */
6607 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6608#ifdef DEBUG_QOS
6609 hevc_print(hevc, 0,
6610 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6611 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6612 '%', rdata32);
6613#endif
6614
6615 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6616 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6617 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6618#ifdef DEBUG_QOS
6619 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6620 pic_number, (rdata32>>0)&0xff);
6621#endif
6622 picture->min_qp = (rdata32>>0)&0xff;
6623
6624#ifdef DEBUG_QOS
6625 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6626 pic_number, (rdata32>>8)&0xff);
6627#endif
6628 picture->max_qp = (rdata32>>8)&0xff;
6629
6630#ifdef DEBUG_QOS
6631 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6632 pic_number, (rdata32>>16)&0xff);
6633 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6634 pic_number, (rdata32>>24)&0xff);
6635#endif
6636
6637 /* blk22_mv_count */
6638 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6639 if (blk22_mv_count == 0) {
6640#ifdef DEBUG_QOS
6641 hevc_print(hevc, 0,
6642 "[Picture %d Quality] NO MV Data yet.\n",
6643 pic_number);
6644#endif
6645 /* reset all counts */
6646 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6647 return;
6648 }
6649 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6650 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6651 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6652 /* should all be 0x00 or 0xff */
6653#ifdef DEBUG_QOS
6654 hevc_print(hevc, 0,
6655 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6656 pic_number, rdata32);
6657#endif
6658 mvx_L0_hi = ((rdata32>>0)&0xff);
6659 mvy_L0_hi = ((rdata32>>8)&0xff);
6660 mvx_L1_hi = ((rdata32>>16)&0xff);
6661 mvy_L1_hi = ((rdata32>>24)&0xff);
6662
6663 /* mvx_L0_count[31:0] */
6664 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6665 temp_value = mvx_L0_hi;
6666 temp_value = (temp_value << 32) | rdata32_l;
6667
6668 if (mvx_L0_hi & 0x80)
6669 value = 0xFFFFFFF000000000 | temp_value;
6670 else
6671 value = temp_value;
6672 value = div_s64(value, blk22_mv_count);
6673#ifdef DEBUG_QOS
6674 hevc_print(hevc, 0,
6675 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6676 pic_number, (int)value,
6677 value, blk22_mv_count);
6678#endif
6679 picture->avg_mv = value;
6680
6681 /* mvy_L0_count[31:0] */
6682 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6683 temp_value = mvy_L0_hi;
6684 temp_value = (temp_value << 32) | rdata32_l;
6685
6686 if (mvy_L0_hi & 0x80)
6687 value = 0xFFFFFFF000000000 | temp_value;
6688 else
6689 value = temp_value;
6690#ifdef DEBUG_QOS
6691 hevc_print(hevc, 0,
6692 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6693 pic_number, rdata32_l/blk22_mv_count,
6694 value, blk22_mv_count);
6695#endif
6696
6697 /* mvx_L1_count[31:0] */
6698 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6699 temp_value = mvx_L1_hi;
6700 temp_value = (temp_value << 32) | rdata32_l;
6701 if (mvx_L1_hi & 0x80)
6702 value = 0xFFFFFFF000000000 | temp_value;
6703 else
6704 value = temp_value;
6705#ifdef DEBUG_QOS
6706 hevc_print(hevc, 0,
6707 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6708 pic_number, rdata32_l/blk22_mv_count,
6709 value, blk22_mv_count);
6710#endif
6711
6712 /* mvy_L1_count[31:0] */
6713 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6714 temp_value = mvy_L1_hi;
6715 temp_value = (temp_value << 32) | rdata32_l;
6716 if (mvy_L1_hi & 0x80)
6717 value = 0xFFFFFFF000000000 | temp_value;
6718 else
6719 value = temp_value;
6720#ifdef DEBUG_QOS
6721 hevc_print(hevc, 0,
6722 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6723 pic_number, rdata32_l/blk22_mv_count,
6724 value, blk22_mv_count);
6725#endif
6726
6727 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6728 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6729 mv_hi = (rdata32>>16)&0xffff;
6730 if (mv_hi & 0x8000)
6731 mv_hi = 0x8000 - mv_hi;
6732#ifdef DEBUG_QOS
6733 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6734 pic_number, mv_hi);
6735#endif
6736 picture->max_mv = mv_hi;
6737
6738 mv_lo = (rdata32>>0)&0xffff;
6739 if (mv_lo & 0x8000)
6740 mv_lo = 0x8000 - mv_lo;
6741#ifdef DEBUG_QOS
6742 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6743 pic_number, mv_lo);
6744#endif
6745 picture->min_mv = mv_lo;
6746
6747#ifdef DEBUG_QOS
6748 /* {mvy_L0_max, mvy_L0_min} */
6749 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6750 mv_hi = (rdata32>>16)&0xffff;
6751 if (mv_hi & 0x8000)
6752 mv_hi = 0x8000 - mv_hi;
6753 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6754 pic_number, mv_hi);
6755
6756
6757 mv_lo = (rdata32>>0)&0xffff;
6758 if (mv_lo & 0x8000)
6759 mv_lo = 0x8000 - mv_lo;
6760
6761 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6762 pic_number, mv_lo);
6763
6764
6765 /* {mvx_L1_max, mvx_L1_min} */
6766 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6767 mv_hi = (rdata32>>16)&0xffff;
6768 if (mv_hi & 0x8000)
6769 mv_hi = 0x8000 - mv_hi;
6770
6771 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6772 pic_number, mv_hi);
6773
6774
6775 mv_lo = (rdata32>>0)&0xffff;
6776 if (mv_lo & 0x8000)
6777 mv_lo = 0x8000 - mv_lo;
6778
6779 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6780 pic_number, mv_lo);
6781
6782
6783 /* {mvy_L1_max, mvy_L1_min} */
6784 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6785 mv_hi = (rdata32>>16)&0xffff;
6786 if (mv_hi & 0x8000)
6787 mv_hi = 0x8000 - mv_hi;
6788
6789 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6790 pic_number, mv_hi);
6791
6792 mv_lo = (rdata32>>0)&0xffff;
6793 if (mv_lo & 0x8000)
6794 mv_lo = 0x8000 - mv_lo;
6795
6796 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6797 pic_number, mv_lo);
6798#endif
6799
6800 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6801#ifdef DEBUG_QOS
6802 hevc_print(hevc, 0,
6803 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6804 pic_number, rdata32);
6805#endif
6806 /* reset all counts */
6807 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6808 }
6809}
6810
6811static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6812 union param_u *rpm_param,
6813 int decode_pic_begin)
6814{
6815#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6816 struct vdec_s *vdec = hw_to_vdec(hevc);
6817#endif
6818 int i;
6819 int lcu_x_num_div;
6820 int lcu_y_num_div;
6821 int Col_ref;
6822 int dbg_skip_flag = 0;
6823 struct aml_vcodec_ctx *ctx =
6824 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
6825
6826 if (hevc->is_used_v4l && ctx->param_sets_from_ucode)
6827 hevc->res_ch_flag = 0;
6828
6829 if (hevc->wait_buf == 0) {
6830 hevc->sps_num_reorder_pics_0 =
6831 rpm_param->p.sps_num_reorder_pics_0;
6832 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 && !disable_ip_mode) ? true : false;
6833 hevc->m_temporalId = rpm_param->p.m_temporalId;
6834 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6835 hevc->interlace_flag =
6836 (rpm_param->p.profile_etc >> 2) & 0x1;
6837 hevc->curr_pic_struct =
6838 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6839 if (parser_sei_enable & 0x4) {
6840 hevc->frame_field_info_present_flag =
6841 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6842 }
6843
6844 /* if (interlace_enable == 0 || hevc->m_ins_flag) */
6845 if (interlace_enable == 0)
6846 hevc->interlace_flag = 0;
6847 if (interlace_enable & 0x100)
6848 hevc->interlace_flag = interlace_enable & 0x1;
6849 if (hevc->interlace_flag == 0)
6850 hevc->curr_pic_struct = 0;
6851 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6852 /*
6853 *hevc->m_pocRandomAccess = MAX_INT;
6854 * //add to fix RAP_B_Bossen_1
6855 */
6856 /* } */
6857 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6858 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6859 hevc->slice_segment_addr =
6860 rpm_param->p.slice_segment_address;
6861 if (!rpm_param->p.dependent_slice_segment_flag)
6862 hevc->slice_addr = hevc->slice_segment_addr;
6863 } else {
6864 hevc->slice_segment_addr = 0;
6865 hevc->slice_addr = 0;
6866 }
6867
6868 hevc->iPrevPOC = hevc->curr_POC;
6869 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6870 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6871 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6872 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6873 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6874 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6875 hevc->isNextSliceSegment =
6876 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6877 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6878 || hevc->pic_h !=
6879 rpm_param->p.pic_height_in_luma_samples) {
6880 hevc_print(hevc, 0,
6881 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6882 hevc->pic_w, hevc->pic_h,
6883 rpm_param->p.pic_width_in_luma_samples,
6884 rpm_param->p.pic_height_in_luma_samples,
6885 hevc->interlace_flag);
6886
6887 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6888 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6889 hevc->frame_width = hevc->pic_w;
6890 hevc->frame_height = hevc->pic_h;
6891#ifdef LOSLESS_COMPRESS_MODE
6892 if (/*re_config_pic_flag == 0 &&*/
6893 (get_double_write_mode(hevc) & 0x10) == 0)
6894 init_decode_head_hw(hevc);
6895#endif
6896 }
6897
6898 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6899 hevc_print(hevc, 0, "over size : %u x %u.\n",
6900 hevc->pic_w, hevc->pic_h);
6901 if ((!hevc->m_ins_flag) &&
6902 ((debug &
6903 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6904 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6905 H265_DEBUG_DIS_SYS_ERROR_PROC);
6906 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6907 return 3;
6908 }
6909 if (hevc->bit_depth_chroma > 10 ||
6910 hevc->bit_depth_luma > 10) {
6911 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6912 hevc->bit_depth_chroma,
6913 hevc->bit_depth_luma);
6914 if (!hevc->m_ins_flag)
6915 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6916 H265_DEBUG_DIS_SYS_ERROR_PROC);
6917 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6918 return 4;
6919 }
6920
6921 /* it will cause divide 0 error */
6922 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6923 if (get_dbg_flag(hevc)) {
6924 hevc_print(hevc, 0,
6925 "Fatal Error, pic_w = %d, pic_h = %d\n",
6926 hevc->pic_w, hevc->pic_h);
6927 }
6928 return 3;
6929 }
6930 pic_list_process(hevc);
6931
6932 hevc->lcu_size =
6933 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6934 3 + rpm_param->
6935 p.log2_diff_max_min_coding_block_size);
6936 if (hevc->lcu_size == 0) {
6937 hevc_print(hevc, 0,
6938 "Error, lcu_size = 0 (%d,%d)\n",
6939 rpm_param->p.
6940 log2_min_coding_block_size_minus3,
6941 rpm_param->p.
6942 log2_diff_max_min_coding_block_size);
6943 return 3;
6944 }
6945 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6946 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6947 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6948 hevc->lcu_x_num =
6949 ((hevc->pic_w % hevc->lcu_size) ==
6950 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6951 hevc->lcu_y_num =
6952 ((hevc->pic_h % hevc->lcu_size) ==
6953 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6954 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6955
6956 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6957 || hevc->m_nalUnitType ==
6958 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6959 hevc->curr_POC = 0;
6960 if ((hevc->m_temporalId - 1) == 0)
6961 hevc->iPrevTid0POC = hevc->curr_POC;
6962 } else {
6963 int iMaxPOClsb =
6964 1 << (rpm_param->p.
6965 log2_max_pic_order_cnt_lsb_minus4 + 4);
6966 int iPrevPOClsb;
6967 int iPrevPOCmsb;
6968 int iPOCmsb;
6969 int iPOClsb = rpm_param->p.POClsb;
6970
6971 if (iMaxPOClsb == 0) {
6972 hevc_print(hevc, 0,
6973 "error iMaxPOClsb is 0\n");
6974 return 3;
6975 }
6976
6977 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6978 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6979
6980 if ((iPOClsb < iPrevPOClsb)
6981 && ((iPrevPOClsb - iPOClsb) >=
6982 (iMaxPOClsb / 2)))
6983 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6984 else if ((iPOClsb > iPrevPOClsb)
6985 && ((iPOClsb - iPrevPOClsb) >
6986 (iMaxPOClsb / 2)))
6987 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6988 else
6989 iPOCmsb = iPrevPOCmsb;
6990 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6991 hevc_print(hevc, 0,
6992 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6993 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6994 iPOClsb);
6995 }
6996 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6997 || hevc->m_nalUnitType ==
6998 NAL_UNIT_CODED_SLICE_BLANT
6999 || hevc->m_nalUnitType ==
7000 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
7001 /* For BLA picture types, POCmsb is set to 0. */
7002 iPOCmsb = 0;
7003 }
7004 hevc->curr_POC = (iPOCmsb + iPOClsb);
7005 if ((hevc->m_temporalId - 1) == 0)
7006 hevc->iPrevTid0POC = hevc->curr_POC;
7007 else {
7008 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7009 hevc_print(hevc, 0,
7010 "m_temporalID is %d\n",
7011 hevc->m_temporalId);
7012 }
7013 }
7014 }
7015 hevc->RefNum_L0 =
7016 (rpm_param->p.num_ref_idx_l0_active >
7017 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7018 num_ref_idx_l0_active;
7019 hevc->RefNum_L1 =
7020 (rpm_param->p.num_ref_idx_l1_active >
7021 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7022 num_ref_idx_l1_active;
7023
7024 /* if(curr_POC==0x10) dump_lmem(); */
7025
7026 /* skip RASL pictures after CRA/BLA pictures */
7027 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
7028 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
7029 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7030 || hevc->m_nalUnitType ==
7031 NAL_UNIT_CODED_SLICE_BLANT
7032 || hevc->m_nalUnitType ==
7033 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7034 hevc->m_pocRandomAccess = hevc->curr_POC;
7035 else
7036 hevc->m_pocRandomAccess = -MAX_INT;
7037 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7038 || hevc->m_nalUnitType ==
7039 NAL_UNIT_CODED_SLICE_BLANT
7040 || hevc->m_nalUnitType ==
7041 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7042 hevc->m_pocRandomAccess = hevc->curr_POC;
7043 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
7044 (nal_skip_policy >= 3) &&
7045 (hevc->m_nalUnitType ==
7046 NAL_UNIT_CODED_SLICE_RASL_N ||
7047 hevc->m_nalUnitType ==
7048 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
7049 if (get_dbg_flag(hevc)) {
7050 hevc_print(hevc, 0,
7051 "RASL picture with POC %d < %d ",
7052 hevc->curr_POC, hevc->m_pocRandomAccess);
7053 hevc_print(hevc, 0,
7054 "RandomAccess point POC), skip it\n");
7055 }
7056 return 1;
7057 }
7058
7059 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
7060 hevc->skip_flag = 0;
7061 /**/
7062 /* if((iPrevPOC != curr_POC)){ */
7063 if (rpm_param->p.slice_segment_address == 0) {
7064 struct PIC_s *pic;
7065
7066 hevc->new_pic = 1;
7067#ifdef MULTI_INSTANCE_SUPPORT
7068 if (!hevc->m_ins_flag)
7069#endif
7070 check_pic_decoded_error_pre(hevc,
7071 READ_VREG(HEVC_PARSER_LCU_START)
7072 & 0xffffff);
7073 /**/ if (use_cma == 0) {
7074 if (hevc->pic_list_init_flag == 0) {
7075 init_pic_list(hevc);
7076 init_pic_list_hw(hevc);
7077 init_buf_spec(hevc);
7078 hevc->pic_list_init_flag = 3;
7079 }
7080 }
7081 if (!hevc->m_ins_flag) {
7082 if (hevc->cur_pic)
7083 get_picture_qos_info(hevc);
7084 }
7085 hevc->first_pic_after_recover = 0;
7086 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7087 dump_pic_list(hevc);
7088 /* prev pic */
7089 hevc_pre_pic(hevc, pic);
7090 /*
7091 *update referenced of old pictures
7092 *(cur_pic->referenced is 1 and not updated)
7093 */
7094 apply_ref_pic_set(hevc, hevc->curr_POC,
7095 rpm_param);
7096
7097 if (hevc->mmu_enable)
7098 recycle_mmu_bufs(hevc);
7099
7100#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7101 if (vdec->master) {
7102 struct hevc_state_s *hevc_ba =
7103 (struct hevc_state_s *)
7104 vdec->master->private;
7105 if (hevc_ba->cur_pic != NULL) {
7106 hevc_ba->cur_pic->dv_enhance_exist = 1;
7107 hevc_print(hevc, H265_DEBUG_DV,
7108 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7109 hevc->curr_POC, hevc_ba->cur_pic->POC);
7110 }
7111 }
7112 if (vdec->master == NULL &&
7113 vdec->slave == NULL)
7114 set_aux_data(hevc,
7115 hevc->cur_pic, 1, 0); /*suffix*/
7116 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7117 set_aux_data(hevc,
7118 hevc->cur_pic, 0, 1); /*dv meta only*/
7119#else
7120 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7121#endif
7122 /* new pic */
7123 hevc->cur_pic = hevc->is_used_v4l ?
7124 v4l_get_new_pic(hevc, rpm_param) :
7125 get_new_pic(hevc, rpm_param);
7126 if (hevc->cur_pic == NULL) {
7127 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7128 dump_pic_list(hevc);
7129 hevc->wait_buf = 1;
7130 return -1;
7131 }
7132#ifdef MULTI_INSTANCE_SUPPORT
7133 hevc->decoding_pic = hevc->cur_pic;
7134 if (!hevc->m_ins_flag)
7135 hevc->over_decode = 0;
7136#endif
7137#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7138 hevc->cur_pic->dv_enhance_exist = 0;
7139 if (vdec->slave)
7140 hevc_print(hevc, H265_DEBUG_DV,
7141 "Clear bl (poc %d) dv_enhance_exist flag\n",
7142 hevc->curr_POC);
7143 if (vdec->master == NULL &&
7144 vdec->slave == NULL)
7145 set_aux_data(hevc,
7146 hevc->cur_pic, 0, 0); /*prefix*/
7147
7148 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7149 set_aux_data(hevc,
7150 hevc->cur_pic, 0, 2); /*pre sei only*/
7151#else
7152 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7153#endif
7154 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7155 hevc->cur_pic->output_ready = 1;
7156 hevc->cur_pic->stream_offset =
7157 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7158 prepare_display_buf(hevc, hevc->cur_pic);
7159 hevc->wait_buf = 2;
7160 return -1;
7161 }
7162 } else {
7163 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7164#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7165 if (vdec->master == NULL &&
7166 vdec->slave == NULL) {
7167 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7168 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7169 }
7170#else
7171 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7172 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7173#endif
7174 }
7175 if (hevc->pic_list_init_flag != 3
7176 || hevc->cur_pic == NULL) {
7177 /* make it dec from the first slice segment */
7178 return 3;
7179 }
7180 hevc->cur_pic->slice_idx++;
7181 hevc->new_pic = 0;
7182 }
7183 } else {
7184 if (hevc->wait_buf == 1) {
7185 pic_list_process(hevc);
7186 hevc->cur_pic = hevc->is_used_v4l ?
7187 v4l_get_new_pic(hevc, rpm_param) :
7188 get_new_pic(hevc, rpm_param);
7189 if (hevc->cur_pic == NULL)
7190 return -1;
7191
7192 if (!hevc->m_ins_flag)
7193 hevc->over_decode = 0;
7194
7195#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7196 hevc->cur_pic->dv_enhance_exist = 0;
7197 if (vdec->master == NULL &&
7198 vdec->slave == NULL)
7199 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7200#else
7201 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7202#endif
7203 hevc->wait_buf = 0;
7204 } else if (hevc->wait_buf ==
7205 2) {
7206 if (get_display_pic_num(hevc) >
7207 1)
7208 return -1;
7209 hevc->wait_buf = 0;
7210 }
7211 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7212 dump_pic_list(hevc);
7213 }
7214
7215 if (hevc->new_pic) {
7216#if 1
7217 /*SUPPORT_10BIT*/
7218 int sao_mem_unit =
7219 (hevc->lcu_size == 16 ? 9 :
7220 hevc->lcu_size ==
7221 32 ? 14 : 24) << 4;
7222#else
7223 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7224#endif
7225 int pic_height_cu =
7226 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7227 int pic_width_cu =
7228 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7229 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7230
7231 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7232 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7233 hevc_print(hevc, 0,
7234 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7235 __func__,
7236 hevc->decode_idx,
7237 hevc->curr_pic_struct,
7238 hevc->interlace_flag,
7239 hevc->cur_pic->index);
7240 }
7241 if (dbg_skip_decode_index != 0 &&
7242 hevc->decode_idx == dbg_skip_decode_index)
7243 dbg_skip_flag = 1;
7244
7245 hevc->decode_idx++;
7246 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7247 sao_mem_unit, rpm_param);
7248
7249 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7250 }
7251
7252 if (hevc->iPrevPOC != hevc->curr_POC) {
7253 hevc->new_tile = 1;
7254 hevc->tile_x = 0;
7255 hevc->tile_y = 0;
7256 hevc->tile_y_x = 0;
7257 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7258 hevc_print(hevc, 0,
7259 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7260 hevc->tile_x, hevc->tile_y);
7261 }
7262 } else if (hevc->tile_enabled) {
7263 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7264 hevc_print(hevc, 0,
7265 "slice_segment_address is %d\n",
7266 rpm_param->p.slice_segment_address);
7267 }
7268 hevc->tile_y_x =
7269 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7270 (hevc->pic_w +
7271 hevc->lcu_size -
7272 1) / hevc->lcu_size);
7273 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7274 && (hevc->tile_y_x != -1)) {
7275 hevc->new_tile = 1;
7276 hevc->tile_x = hevc->tile_y_x & 0xff;
7277 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7278 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7279 hevc_print(hevc, 0,
7280 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7281 rpm_param->p.slice_segment_address,
7282 hevc->tile_x, hevc->tile_y);
7283 }
7284 } else
7285 hevc->new_tile = 0;
7286 } else
7287 hevc->new_tile = 0;
7288
7289 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7290 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7291 hevc->new_tile = 0;
7292
7293 if (hevc->new_tile) {
7294 hevc->tile_start_lcu_x =
7295 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7296 hevc->tile_start_lcu_y =
7297 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7298 hevc->tile_width_lcu =
7299 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7300 hevc->tile_height_lcu =
7301 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7302 }
7303
7304 set_ref_pic_list(hevc, rpm_param);
7305
7306 Col_ref = rpm_param->p.collocated_ref_idx;
7307
7308 hevc->LDCFlag = 0;
7309 if (rpm_param->p.slice_type != I_SLICE) {
7310 hevc->LDCFlag = 1;
7311 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7312 if (hevc->cur_pic->
7313 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7314 hevc->curr_POC)
7315 hevc->LDCFlag = 0;
7316 }
7317 if (rpm_param->p.slice_type == B_SLICE) {
7318 for (i = 0; (i < hevc->RefNum_L1)
7319 && hevc->LDCFlag; i++) {
7320 if (hevc->cur_pic->
7321 m_aiRefPOCList1[hevc->cur_pic->
7322 slice_idx][i] >
7323 hevc->curr_POC)
7324 hevc->LDCFlag = 0;
7325 }
7326 }
7327 }
7328
7329 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7330
7331 hevc->plevel =
7332 rpm_param->p.log2_parallel_merge_level;
7333 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7334
7335 hevc->LongTerm_Curr = 0; /* to do ... */
7336 hevc->LongTerm_Col = 0; /* to do ... */
7337
7338 hevc->list_no = 0;
7339 if (rpm_param->p.slice_type == B_SLICE)
7340 hevc->list_no = 1 - hevc->ColFromL0Flag;
7341 if (hevc->list_no == 0) {
7342 if (Col_ref < hevc->RefNum_L0) {
7343 hevc->Col_POC =
7344 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7345 slice_idx][Col_ref];
7346 } else
7347 hevc->Col_POC = INVALID_POC;
7348 } else {
7349 if (Col_ref < hevc->RefNum_L1) {
7350 hevc->Col_POC =
7351 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7352 slice_idx][Col_ref];
7353 } else
7354 hevc->Col_POC = INVALID_POC;
7355 }
7356
7357 hevc->LongTerm_Ref = 0; /* to do ... */
7358
7359 if (hevc->slice_type != 2) {
7360 /* if(hevc->i_only==1){ */
7361 /* return 0xf; */
7362 /* } */
7363
7364 if (hevc->Col_POC != INVALID_POC) {
7365 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7366 if (hevc->col_pic == NULL) {
7367 hevc->cur_pic->error_mark = 1;
7368 if (get_dbg_flag(hevc)) {
7369 hevc_print(hevc, 0,
7370 "WRONG,fail to get the pic Col_POC\n");
7371 }
7372 if (is_log_enable(hevc))
7373 add_log(hevc,
7374 "WRONG,fail to get the pic Col_POC");
7375 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7376 hevc->cur_pic->error_mark = 1;
7377 if (get_dbg_flag(hevc)) {
7378 hevc_print(hevc, 0,
7379 "WRONG, Col_POC error_mark is 1\n");
7380 }
7381 if (is_log_enable(hevc))
7382 add_log(hevc,
7383 "WRONG, Col_POC error_mark is 1");
7384 } else {
7385 if ((hevc->col_pic->width
7386 != hevc->pic_w) ||
7387 (hevc->col_pic->height
7388 != hevc->pic_h)) {
7389 hevc_print(hevc, 0,
7390 "Wrong reference pic (poc %d) width/height %d/%d\n",
7391 hevc->col_pic->POC,
7392 hevc->col_pic->width,
7393 hevc->col_pic->height);
7394 hevc->cur_pic->error_mark = 1;
7395 }
7396
7397 }
7398
7399 if (hevc->cur_pic->error_mark
7400 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7401 /*count info*/
7402 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7403 hevc->cur_pic->stream_offset);
7404 if (hevc->PB_skip_mode == 2)
7405 hevc->gvs->drop_frame_count++;
7406 }
7407
7408 if (is_skip_decoding(hevc,
7409 hevc->cur_pic)) {
7410 return 2;
7411 }
7412 } else
7413 hevc->col_pic = hevc->cur_pic;
7414 } /* */
7415 if (hevc->col_pic == NULL)
7416 hevc->col_pic = hevc->cur_pic;
7417#ifdef BUFFER_MGR_ONLY
7418 return 0xf;
7419#else
7420 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7421 || (dbg_skip_flag))
7422 return 0xf;
7423#endif
7424
7425 config_mc_buffer(hevc, hevc->cur_pic);
7426
7427 if (is_skip_decoding(hevc,
7428 hevc->cur_pic)) {
7429 if (get_dbg_flag(hevc))
7430 hevc_print(hevc, 0,
7431 "Discard this picture index %d\n",
7432 hevc->cur_pic->index);
7433 /*count info*/
7434 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7435 hevc->cur_pic->stream_offset);
7436 if (hevc->PB_skip_mode == 2)
7437 hevc->gvs->drop_frame_count++;
7438 return 2;
7439 }
7440#ifdef MCRCC_ENABLE
7441 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7442#endif
7443 config_mpred_hw(hevc);
7444
7445 config_sao_hw(hevc, rpm_param);
7446
7447 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7448 return 0xf;
7449
7450 return 0;
7451}
7452
7453
7454
7455static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7456 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7457 int cur_buf_idx = new_pic->index;
7458 int bit_depth_10 = (bit_depth != 0x00);
7459 int picture_size;
7460 int cur_mmu_4k_number;
7461 int ret, max_frame_num;
7462 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7463 new_pic->height, !bit_depth_10);
7464 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7465 if (get_double_write_mode(hevc) == 0x10)
7466 return 0;
7467 /*hevc_print(hevc, 0,
7468 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7469 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7470 if (new_pic->scatter_alloc) {
7471 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7472 new_pic->scatter_alloc = 0;
7473 }
7474 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7475 max_frame_num = MAX_FRAME_8K_NUM;
7476 else
7477 max_frame_num = MAX_FRAME_4K_NUM;
7478 if (cur_mmu_4k_number > max_frame_num) {
7479 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7480 cur_mmu_4k_number,
7481 new_pic->width,
7482 new_pic->height);
7483 return -1;
7484 }
7485 ret = decoder_mmu_box_alloc_idx(
7486 hevc->mmu_box,
7487 cur_buf_idx,
7488 cur_mmu_4k_number,
7489 mmu_index_adr);
7490 if (ret == 0)
7491 new_pic->scatter_alloc = 1;
7492
7493 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7494 "%s pic index %d page count(%d) ret =%d\n",
7495 __func__, cur_buf_idx,
7496 cur_mmu_4k_number, ret);
7497 return ret;
7498}
7499
7500
7501static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7502 struct PIC_s *pic)
7503{
7504 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7505 "%s pic index %d scatter_alloc %d\n",
7506 __func__, pic->index,
7507 pic->scatter_alloc);
7508
7509 if (hevc->mmu_enable
7510 && ((hevc->double_write_mode & 0x10) == 0)
7511 && pic->scatter_alloc)
7512 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7513 pic->scatter_alloc = 0;
7514}
7515
7516/*
7517 *************************************************
7518 *
7519 *h265 buffer management end
7520 *
7521 **************************************************
7522 */
7523static struct hevc_state_s *gHevc;
7524
7525static void hevc_local_uninit(struct hevc_state_s *hevc)
7526{
7527 hevc->rpm_ptr = NULL;
7528 hevc->lmem_ptr = NULL;
7529
7530#ifdef SWAP_HEVC_UCODE
7531 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7532 if (hevc->mc_cpu_addr != NULL) {
7533 dma_free_coherent(amports_get_dma_device(),
7534 hevc->swap_size, hevc->mc_cpu_addr,
7535 hevc->mc_dma_handle);
7536 hevc->mc_cpu_addr = NULL;
7537 }
7538
7539 }
7540#endif
7541#ifdef DETREFILL_ENABLE
7542 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7543 uninit_detrefill_buf(hevc);
7544#endif
7545 if (hevc->aux_addr) {
7546 dma_free_coherent(amports_get_dma_device(),
7547 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7548 hevc->aux_phy_addr);
7549 hevc->aux_addr = NULL;
7550 }
7551 if (hevc->rpm_addr) {
7552 dma_free_coherent(amports_get_dma_device(),
7553 RPM_BUF_SIZE, hevc->rpm_addr,
7554 hevc->rpm_phy_addr);
7555 hevc->rpm_addr = NULL;
7556 }
7557 if (hevc->lmem_addr) {
7558 dma_free_coherent(amports_get_dma_device(),
7559 RPM_BUF_SIZE, hevc->lmem_addr,
7560 hevc->lmem_phy_addr);
7561 hevc->lmem_addr = NULL;
7562 }
7563
7564 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7565 if (hevc->frame_mmu_map_phy_addr)
7566 dma_free_coherent(amports_get_dma_device(),
7567 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7568 hevc->frame_mmu_map_phy_addr);
7569
7570 hevc->frame_mmu_map_addr = NULL;
7571 }
7572
7573 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
7574}
7575
7576static int hevc_local_init(struct hevc_state_s *hevc)
7577{
7578 int ret = -1;
7579 struct BuffInfo_s *cur_buf_info = NULL;
7580
7581 memset(&hevc->param, 0, sizeof(union param_u));
7582
7583 cur_buf_info = &hevc->work_space_buf_store;
7584
7585 if (vdec_is_support_4k()) {
7586 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7587 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7588 sizeof(struct BuffInfo_s));
7589 else
7590 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7591 sizeof(struct BuffInfo_s));
7592 } else
7593 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7594 sizeof(struct BuffInfo_s));
7595
7596 cur_buf_info->start_adr = hevc->buf_start;
7597 init_buff_spec(hevc, cur_buf_info);
7598
7599 hevc_init_stru(hevc, cur_buf_info);
7600
7601 hevc->bit_depth_luma = 8;
7602 hevc->bit_depth_chroma = 8;
7603 hevc->video_signal_type = 0;
7604 hevc->video_signal_type_debug = 0;
7605 bit_depth_luma = hevc->bit_depth_luma;
7606 bit_depth_chroma = hevc->bit_depth_chroma;
7607 video_signal_type = hevc->video_signal_type;
7608
7609 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7610 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7611 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7612 if (hevc->rpm_addr == NULL) {
7613 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7614 return -1;
7615 }
7616 hevc->rpm_ptr = hevc->rpm_addr;
7617 }
7618
7619 if (prefix_aux_buf_size > 0 ||
7620 suffix_aux_buf_size > 0) {
7621 u32 aux_buf_size;
7622
7623 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7624 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7625 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7626 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7627 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7628 if (hevc->aux_addr == NULL) {
7629 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7630 return -1;
7631 }
7632 }
7633
7634 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7635 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7636 if (hevc->lmem_addr == NULL) {
7637 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7638 return -1;
7639 }
7640 hevc->lmem_ptr = hevc->lmem_addr;
7641
7642 if (hevc->mmu_enable) {
7643 hevc->frame_mmu_map_addr =
7644 dma_alloc_coherent(amports_get_dma_device(),
7645 get_frame_mmu_map_size(),
7646 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7647 if (hevc->frame_mmu_map_addr == NULL) {
7648 pr_err("%s: failed to alloc count_buffer\n", __func__);
7649 return -1;
7650 }
7651 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7652 }
7653 ret = 0;
7654 return ret;
7655}
7656
7657/*
7658 *******************************************
7659 * Mailbox command
7660 *******************************************
7661 */
7662#define CMD_FINISHED 0
7663#define CMD_ALLOC_VIEW 1
7664#define CMD_FRAME_DISPLAY 3
7665#define CMD_DEBUG 10
7666
7667
7668#define DECODE_BUFFER_NUM_MAX 32
7669#define DISPLAY_BUFFER_NUM 6
7670
7671#define video_domain_addr(adr) (adr&0x7fffffff)
7672#define DECODER_WORK_SPACE_SIZE 0x800000
7673
7674#define spec2canvas(x) \
7675 (((x)->uv_canvas_index << 16) | \
7676 ((x)->uv_canvas_index << 8) | \
7677 ((x)->y_canvas_index << 0))
7678
7679
7680static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7681{
7682 struct vdec_s *vdec = hw_to_vdec(hevc);
7683 int canvas_w = ALIGN(pic->width, 64)/4;
7684 int canvas_h = ALIGN(pic->height, 32)/4;
7685 int blkmode = hevc->mem_map_mode;
7686
7687 /*CANVAS_BLKMODE_64X32*/
7688#ifdef SUPPORT_10BIT
7689 if (pic->double_write_mode) {
7690 canvas_w = pic->width /
7691 get_double_write_ratio(hevc, pic->double_write_mode);
7692 canvas_h = pic->height /
7693 get_double_write_ratio(hevc, pic->double_write_mode);
7694
7695 if (hevc->mem_map_mode == 0)
7696 canvas_w = ALIGN(canvas_w, 32);
7697 else
7698 canvas_w = ALIGN(canvas_w, 64);
7699 canvas_h = ALIGN(canvas_h, 32);
7700
7701 if (vdec->parallel_dec == 1) {
7702 if (pic->y_canvas_index == -1)
7703 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7704 if (pic->uv_canvas_index == -1)
7705 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7706 } else {
7707 pic->y_canvas_index = 128 + pic->index * 2;
7708 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7709 }
7710
7711 canvas_config_ex(pic->y_canvas_index,
7712 pic->dw_y_adr, canvas_w, canvas_h,
7713 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7714 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7715 canvas_w, canvas_h,
7716 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7717#ifdef MULTI_INSTANCE_SUPPORT
7718 pic->canvas_config[0].phy_addr =
7719 pic->dw_y_adr;
7720 pic->canvas_config[0].width =
7721 canvas_w;
7722 pic->canvas_config[0].height =
7723 canvas_h;
7724 pic->canvas_config[0].block_mode =
7725 blkmode;
7726 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7727
7728 pic->canvas_config[1].phy_addr =
7729 pic->dw_u_v_adr;
7730 pic->canvas_config[1].width =
7731 canvas_w;
7732 pic->canvas_config[1].height =
7733 canvas_h;
7734 pic->canvas_config[1].block_mode =
7735 blkmode;
7736 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7737#endif
7738 } else {
7739 if (!hevc->mmu_enable) {
7740 /* to change after 10bit VPU is ready ... */
7741 if (vdec->parallel_dec == 1) {
7742 if (pic->y_canvas_index == -1)
7743 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7744 pic->uv_canvas_index = pic->y_canvas_index;
7745 } else {
7746 pic->y_canvas_index = 128 + pic->index;
7747 pic->uv_canvas_index = 128 + pic->index;
7748 }
7749
7750 canvas_config_ex(pic->y_canvas_index,
7751 pic->mc_y_adr, canvas_w, canvas_h,
7752 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7753 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7754 canvas_w, canvas_h,
7755 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7756 }
7757 }
7758#else
7759 if (vdec->parallel_dec == 1) {
7760 if (pic->y_canvas_index == -1)
7761 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7762 if (pic->uv_canvas_index == -1)
7763 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7764 } else {
7765 pic->y_canvas_index = 128 + pic->index * 2;
7766 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7767 }
7768
7769
7770 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7771 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7772 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7773 canvas_w, canvas_h,
7774 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7775#endif
7776}
7777
7778static int init_buf_spec(struct hevc_state_s *hevc)
7779{
7780 int pic_width = hevc->pic_w;
7781 int pic_height = hevc->pic_h;
7782
7783 /* hevc_print(hevc, 0,
7784 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7785 */
7786 hevc_print(hevc, 0,
7787 "%s2 %d %d\n", __func__, pic_width, pic_height);
7788 /* pic_width = hevc->pic_w; */
7789 /* pic_height = hevc->pic_h; */
7790
7791 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7792 hevc->frame_width = pic_width;
7793 hevc->frame_height = pic_height;
7794
7795 }
7796
7797 return 0;
7798}
7799
7800static int parse_sei(struct hevc_state_s *hevc,
7801 struct PIC_s *pic, char *sei_buf, uint32_t size)
7802{
7803 char *p = sei_buf;
7804 char *p_sei;
7805 uint16_t header;
7806 uint8_t nal_unit_type;
7807 uint8_t payload_type, payload_size;
7808 int i, j;
7809
7810 if (size < 2)
7811 return 0;
7812 header = *p++;
7813 header <<= 8;
7814 header += *p++;
7815 nal_unit_type = header >> 9;
7816 if ((nal_unit_type != NAL_UNIT_SEI)
7817 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7818 return 0;
7819 while (p+2 <= sei_buf+size) {
7820 payload_type = *p++;
7821 payload_size = *p++;
7822 if (p+payload_size <= sei_buf+size) {
7823 switch (payload_type) {
7824 case SEI_PicTiming:
7825 if ((parser_sei_enable & 0x4) &&
7826 hevc->frame_field_info_present_flag) {
7827 p_sei = p;
7828 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7829 pic->pic_struct = hevc->curr_pic_struct;
7830 if (get_dbg_flag(hevc) &
7831 H265_DEBUG_PIC_STRUCT) {
7832 hevc_print(hevc, 0,
7833 "parse result pic_struct = %d\n",
7834 hevc->curr_pic_struct);
7835 }
7836 }
7837 break;
7838 case SEI_UserDataITU_T_T35:
7839 p_sei = p;
7840 if (p_sei[0] == 0xB5
7841 && p_sei[1] == 0x00
7842 && p_sei[2] == 0x3C
7843 && p_sei[3] == 0x00
7844 && p_sei[4] == 0x01
7845 && p_sei[5] == 0x04)
7846 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7847
7848 break;
7849 case SEI_MasteringDisplayColorVolume:
7850 /*hevc_print(hevc, 0,
7851 "sei type: primary display color volume %d, size %d\n",
7852 payload_type,
7853 payload_size);*/
7854 /* master_display_colour */
7855 p_sei = p;
7856 for (i = 0; i < 3; i++) {
7857 for (j = 0; j < 2; j++) {
7858 hevc->primaries[i][j]
7859 = (*p_sei<<8)
7860 | *(p_sei+1);
7861 p_sei += 2;
7862 }
7863 }
7864 for (i = 0; i < 2; i++) {
7865 hevc->white_point[i]
7866 = (*p_sei<<8)
7867 | *(p_sei+1);
7868 p_sei += 2;
7869 }
7870 for (i = 0; i < 2; i++) {
7871 hevc->luminance[i]
7872 = (*p_sei<<24)
7873 | (*(p_sei+1)<<16)
7874 | (*(p_sei+2)<<8)
7875 | *(p_sei+3);
7876 p_sei += 4;
7877 }
7878 hevc->sei_present_flag |=
7879 SEI_MASTER_DISPLAY_COLOR_MASK;
7880 /*for (i = 0; i < 3; i++)
7881 for (j = 0; j < 2; j++)
7882 hevc_print(hevc, 0,
7883 "\tprimaries[%1d][%1d] = %04x\n",
7884 i, j,
7885 hevc->primaries[i][j]);
7886 hevc_print(hevc, 0,
7887 "\twhite_point = (%04x, %04x)\n",
7888 hevc->white_point[0],
7889 hevc->white_point[1]);
7890 hevc_print(hevc, 0,
7891 "\tmax,min luminance = %08x, %08x\n",
7892 hevc->luminance[0],
7893 hevc->luminance[1]);*/
7894 break;
7895 case SEI_ContentLightLevel:
7896 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7897 hevc_print(hevc, 0,
7898 "sei type: max content light level %d, size %d\n",
7899 payload_type, payload_size);
7900 /* content_light_level */
7901 p_sei = p;
7902 hevc->content_light_level[0]
7903 = (*p_sei<<8) | *(p_sei+1);
7904 p_sei += 2;
7905 hevc->content_light_level[1]
7906 = (*p_sei<<8) | *(p_sei+1);
7907 p_sei += 2;
7908 hevc->sei_present_flag |=
7909 SEI_CONTENT_LIGHT_LEVEL_MASK;
7910 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7911 hevc_print(hevc, 0,
7912 "\tmax cll = %04x, max_pa_cll = %04x\n",
7913 hevc->content_light_level[0],
7914 hevc->content_light_level[1]);
7915 break;
7916 default:
7917 break;
7918 }
7919 }
7920 p += payload_size;
7921 }
7922 return 0;
7923}
7924
7925static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7926 unsigned w, unsigned h)
7927{
7928 unsigned ar;
7929
7930 if (idc == 255) {
7931 ar = div_u64(256ULL * sar_h * h,
7932 sar_w * w);
7933 } else {
7934 switch (idc) {
7935 case 1:
7936 ar = 0x100 * h / w;
7937 break;
7938 case 2:
7939 ar = 0x100 * h * 11 / (w * 12);
7940 break;
7941 case 3:
7942 ar = 0x100 * h * 11 / (w * 10);
7943 break;
7944 case 4:
7945 ar = 0x100 * h * 11 / (w * 16);
7946 break;
7947 case 5:
7948 ar = 0x100 * h * 33 / (w * 40);
7949 break;
7950 case 6:
7951 ar = 0x100 * h * 11 / (w * 24);
7952 break;
7953 case 7:
7954 ar = 0x100 * h * 11 / (w * 20);
7955 break;
7956 case 8:
7957 ar = 0x100 * h * 11 / (w * 32);
7958 break;
7959 case 9:
7960 ar = 0x100 * h * 33 / (w * 80);
7961 break;
7962 case 10:
7963 ar = 0x100 * h * 11 / (w * 18);
7964 break;
7965 case 11:
7966 ar = 0x100 * h * 11 / (w * 15);
7967 break;
7968 case 12:
7969 ar = 0x100 * h * 33 / (w * 64);
7970 break;
7971 case 13:
7972 ar = 0x100 * h * 99 / (w * 160);
7973 break;
7974 case 14:
7975 ar = 0x100 * h * 3 / (w * 4);
7976 break;
7977 case 15:
7978 ar = 0x100 * h * 2 / (w * 3);
7979 break;
7980 case 16:
7981 ar = 0x100 * h * 1 / (w * 2);
7982 break;
7983 default:
7984 ar = h * 0x100 / w;
7985 break;
7986 }
7987 }
7988
7989 return ar;
7990}
7991
7992static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7993 struct PIC_s *pic)
7994{
7995 unsigned int ar;
7996 int i, j;
7997 char *p;
7998 unsigned size = 0;
7999 unsigned type = 0;
8000 struct vframe_master_display_colour_s *vf_dp
8001 = &vf->prop.master_display_colour;
8002
8003 vf->width = pic->width /
8004 get_double_write_ratio(hevc, pic->double_write_mode);
8005 vf->height = pic->height /
8006 get_double_write_ratio(hevc, pic->double_write_mode);
8007
8008 vf->duration = hevc->frame_dur;
8009 vf->duration_pulldown = 0;
8010 vf->flag = 0;
8011
8012 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
8013 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8014
8015
8016 if (((pic->aspect_ratio_idc == 255) &&
8017 pic->sar_width &&
8018 pic->sar_height) ||
8019 ((pic->aspect_ratio_idc != 255) &&
8020 (pic->width))) {
8021 ar = min_t(u32,
8022 calc_ar(pic->aspect_ratio_idc,
8023 pic->sar_width,
8024 pic->sar_height,
8025 pic->width,
8026 pic->height),
8027 DISP_RATIO_ASPECT_RATIO_MAX);
8028 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8029 vf->ratio_control <<= hevc->interlace_flag;
8030 }
8031 hevc->ratio_control = vf->ratio_control;
8032 if (pic->aux_data_buf
8033 && pic->aux_data_size) {
8034 /* parser sei */
8035 p = pic->aux_data_buf;
8036 while (p < pic->aux_data_buf
8037 + pic->aux_data_size - 8) {
8038 size = *p++;
8039 size = (size << 8) | *p++;
8040 size = (size << 8) | *p++;
8041 size = (size << 8) | *p++;
8042 type = *p++;
8043 type = (type << 8) | *p++;
8044 type = (type << 8) | *p++;
8045 type = (type << 8) | *p++;
8046 if (type == 0x02000000) {
8047 /* hevc_print(hevc, 0,
8048 "sei(%d)\n", size); */
8049 parse_sei(hevc, pic, p, size);
8050 }
8051 p += size;
8052 }
8053 }
8054 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
8055 vf->signal_type = pic->video_signal_type;
8056 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
8057 u32 data;
8058 data = vf->signal_type;
8059 data = data & 0xFFFF00FF;
8060 data = data | (0x30<<8);
8061 vf->signal_type = data;
8062 }
8063 }
8064 else
8065 vf->signal_type = 0;
8066 hevc->video_signal_type_debug = vf->signal_type;
8067
8068 /* master_display_colour */
8069 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
8070 for (i = 0; i < 3; i++)
8071 for (j = 0; j < 2; j++)
8072 vf_dp->primaries[i][j] = hevc->primaries[i][j];
8073 for (i = 0; i < 2; i++) {
8074 vf_dp->white_point[i] = hevc->white_point[i];
8075 vf_dp->luminance[i]
8076 = hevc->luminance[i];
8077 }
8078 vf_dp->present_flag = 1;
8079 } else
8080 vf_dp->present_flag = 0;
8081
8082 /* content_light_level */
8083 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
8084 vf_dp->content_light_level.max_content
8085 = hevc->content_light_level[0];
8086 vf_dp->content_light_level.max_pic_average
8087 = hevc->content_light_level[1];
8088 vf_dp->content_light_level.present_flag = 1;
8089 } else
8090 vf_dp->content_light_level.present_flag = 0;
8091
8092 if (hevc->is_used_v4l &&
8093 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
8094 (vf_dp->present_flag) ||
8095 (vf_dp->content_light_level.present_flag))) {
8096 struct aml_vdec_hdr_infos hdr;
8097 struct aml_vcodec_ctx *ctx =
8098 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8099
8100 memset(&hdr, 0, sizeof(hdr));
8101 hdr.signal_type = vf->signal_type;
8102 hdr.color_parms = *vf_dp;
8103 vdec_v4l_set_hdr_infos(ctx, &hdr);
8104 }
8105}
8106
8107static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8108{
8109 unsigned long flags;
8110#ifdef MULTI_INSTANCE_SUPPORT
8111 struct vdec_s *vdec = op_arg;
8112 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8113#else
8114 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8115#endif
8116
8117 spin_lock_irqsave(&lock, flags);
8118
8119 states->vf_pool_size = VF_POOL_SIZE;
8120 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8121 states->buf_avail_num = kfifo_len(&hevc->display_q);
8122
8123 if (step == 2)
8124 states->buf_avail_num = 0;
8125 spin_unlock_irqrestore(&lock, flags);
8126 return 0;
8127}
8128
8129static struct vframe_s *vh265_vf_peek(void *op_arg)
8130{
8131 struct vframe_s *vf[2] = {0, 0};
8132#ifdef MULTI_INSTANCE_SUPPORT
8133 struct vdec_s *vdec = op_arg;
8134 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8135#else
8136 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8137#endif
8138
8139 if (step == 2)
8140 return NULL;
8141
8142 if (force_disp_pic_index & 0x100) {
8143 if (force_disp_pic_index & 0x200)
8144 return NULL;
8145 return &hevc->vframe_dummy;
8146 }
8147
8148
8149 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8150 if (vf[1]) {
8151 vf[0]->next_vf_pts_valid = true;
8152 vf[0]->next_vf_pts = vf[1]->pts;
8153 } else
8154 vf[0]->next_vf_pts_valid = false;
8155 return vf[0];
8156 }
8157
8158 return NULL;
8159}
8160
8161static struct vframe_s *vh265_vf_get(void *op_arg)
8162{
8163 struct vframe_s *vf;
8164#ifdef MULTI_INSTANCE_SUPPORT
8165 struct vdec_s *vdec = op_arg;
8166 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8167#else
8168 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8169#endif
8170
8171 if (step == 2)
8172 return NULL;
8173 else if (step == 1)
8174 step = 2;
8175
8176#if 0
8177 if (force_disp_pic_index & 0x100) {
8178 int buffer_index = force_disp_pic_index & 0xff;
8179 struct PIC_s *pic = NULL;
8180 if (buffer_index >= 0
8181 && buffer_index < MAX_REF_PIC_NUM)
8182 pic = hevc->m_PIC[buffer_index];
8183 if (pic == NULL)
8184 return NULL;
8185 if (force_disp_pic_index & 0x200)
8186 return NULL;
8187
8188 vf = &hevc->vframe_dummy;
8189 if (get_double_write_mode(hevc)) {
8190 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8191 VIDTYPE_VIU_NV21;
8192 if (hevc->m_ins_flag) {
8193 vf->canvas0Addr = vf->canvas1Addr = -1;
8194 vf->plane_num = 2;
8195 vf->canvas0_config[0] =
8196 pic->canvas_config[0];
8197 vf->canvas0_config[1] =
8198 pic->canvas_config[1];
8199
8200 vf->canvas1_config[0] =
8201 pic->canvas_config[0];
8202 vf->canvas1_config[1] =
8203 pic->canvas_config[1];
8204 } else {
8205 vf->canvas0Addr = vf->canvas1Addr
8206 = spec2canvas(pic);
8207 }
8208 } else {
8209 vf->canvas0Addr = vf->canvas1Addr = 0;
8210 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8211 if (hevc->mmu_enable)
8212 vf->type |= VIDTYPE_SCATTER;
8213 }
8214 vf->compWidth = pic->width;
8215 vf->compHeight = pic->height;
8216 update_vf_memhandle(hevc, vf, pic);
8217 switch (hevc->bit_depth_luma) {
8218 case 9:
8219 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8220 break;
8221 case 10:
8222 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8223 | BITDEPTH_V10;
8224 break;
8225 default:
8226 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8227 break;
8228 }
8229 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8230 vf->bitdepth =
8231 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8232 if (hevc->mem_saving_mode == 1)
8233 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8234 vf->duration_pulldown = 0;
8235 vf->pts = 0;
8236 vf->pts_us64 = 0;
8237 set_frame_info(hevc, vf);
8238
8239 vf->width = pic->width /
8240 get_double_write_ratio(hevc, pic->double_write_mode);
8241 vf->height = pic->height /
8242 get_double_write_ratio(hevc, pic->double_write_mode);
8243
8244 force_disp_pic_index |= 0x200;
8245 return vf;
8246 }
8247#endif
8248
8249 if (kfifo_get(&hevc->display_q, &vf)) {
8250 struct vframe_s *next_vf;
8251 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8252 hevc_print(hevc, 0,
8253 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8254 __func__, vf, vf->type, vf->index,
8255 get_pic_poc(hevc, vf->index & 0xff),
8256 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8257 vf->pts, vf->pts_us64,
8258 vf->duration);
8259#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8260 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8261 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8262 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8263 int i;
8264 struct PIC_s *pic =
8265 hevc->m_PIC[vf->index & 0xff];
8266 hevc_print(hevc, 0,
8267 "pic 0x%p aux size %d:\n",
8268 pic, pic->aux_data_size);
8269 for (i = 0; i < pic->aux_data_size; i++) {
8270 hevc_print_cont(hevc, 0,
8271 "%02x ", pic->aux_data_buf[i]);
8272 if (((i + 1) & 0xf) == 0)
8273 hevc_print_cont(hevc, 0, "\n");
8274 }
8275 hevc_print_cont(hevc, 0, "\n");
8276 }
8277 }
8278#endif
8279 hevc->show_frame_num++;
8280 vf->index_disp = hevc->vf_get_count;
8281 hevc->vf_get_count++;
8282
8283 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8284 vf->next_vf_pts_valid = true;
8285 vf->next_vf_pts = next_vf->pts;
8286 } else
8287 vf->next_vf_pts_valid = false;
8288
8289 return vf;
8290 }
8291
8292 return NULL;
8293}
8294static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8295 int i;
8296 for (i = 0; i < VF_POOL_SIZE; i++) {
8297 if (vf == &hevc->vfpool[i])
8298 return true;
8299 }
8300 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8301 for (i = 0; i < VF_POOL_SIZE; i++) {
8302 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8303 }
8304 return false;
8305}
8306
8307static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8308{
8309 unsigned long flags;
8310#ifdef MULTI_INSTANCE_SUPPORT
8311 struct vdec_s *vdec = op_arg;
8312 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8313#else
8314 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8315#endif
8316 unsigned char index_top;
8317 unsigned char index_bot;
8318
8319 if (vf && (vf_valid_check(vf, hevc) == false))
8320 return;
8321 if (vf == (&hevc->vframe_dummy))
8322 return;
8323 if (!vf)
8324 return;
8325 index_top = vf->index & 0xff;
8326 index_bot = (vf->index >> 8) & 0xff;
8327 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8328 hevc_print(hevc, 0,
8329 "%s(type %d index 0x%x)\n",
8330 __func__, vf->type, vf->index);
8331 hevc->vf_put_count++;
8332 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8333 spin_lock_irqsave(&lock, flags);
8334
8335 if (index_top != 0xff
8336 && index_top < MAX_REF_PIC_NUM
8337 && hevc->m_PIC[index_top]) {
8338 if (hevc->is_used_v4l)
8339 hevc->m_PIC[index_top]->vframe_bound = true;
8340 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8341 hevc->m_PIC[index_top]->vf_ref--;
8342
8343 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8344 hevc->m_PIC[index_top]->output_ready = 0;
8345
8346 if (hevc->wait_buf != 0)
8347 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8348 0x1);
8349 }
8350 }
8351 }
8352
8353 if (index_bot != 0xff
8354 && index_bot < MAX_REF_PIC_NUM
8355 && hevc->m_PIC[index_bot]) {
8356 if (hevc->is_used_v4l)
8357 hevc->m_PIC[index_bot]->vframe_bound = true;
8358 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8359 hevc->m_PIC[index_bot]->vf_ref--;
8360
8361 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8362 hevc->m_PIC[index_bot]->output_ready = 0;
8363 if (hevc->wait_buf != 0)
8364 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8365 0x1);
8366 }
8367 }
8368 }
8369 spin_unlock_irqrestore(&lock, flags);
8370}
8371
8372static int vh265_event_cb(int type, void *data, void *op_arg)
8373{
8374 unsigned long flags;
8375#ifdef MULTI_INSTANCE_SUPPORT
8376 struct vdec_s *vdec = op_arg;
8377 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8378#else
8379 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8380#endif
8381 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8382#if 0
8383 amhevc_stop();
8384#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8385 vf_light_unreg_provider(&vh265_vf_prov);
8386#endif
8387 spin_lock_irqsave(&hevc->lock, flags);
8388 vh265_local_init();
8389 vh265_prot_init();
8390 spin_unlock_irqrestore(&hevc->lock, flags);
8391#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8392 vf_reg_provider(&vh265_vf_prov);
8393#endif
8394 amhevc_start();
8395#endif
8396 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8397 struct provider_aux_req_s *req =
8398 (struct provider_aux_req_s *)data;
8399 unsigned char index;
8400
8401 if (!req->vf) {
8402 req->aux_size = hevc->vf_put_count;
8403 return 0;
8404 }
8405 spin_lock_irqsave(&lock, flags);
8406 index = req->vf->index & 0xff;
8407 req->aux_buf = NULL;
8408 req->aux_size = 0;
8409 if (req->bot_flag)
8410 index = (req->vf->index >> 8) & 0xff;
8411 if (index != 0xff
8412 && index < MAX_REF_PIC_NUM
8413 && hevc->m_PIC[index]) {
8414 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8415 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8416#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8417 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8418 req->dv_enhance_exist = false;
8419 else
8420 req->dv_enhance_exist =
8421 hevc->m_PIC[index]->dv_enhance_exist;
8422 hevc_print(hevc, H265_DEBUG_DV,
8423 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8424 req->vf,
8425 hevc->m_PIC[index]->POC, index,
8426 req->dv_enhance_exist, req->aux_size);
8427#else
8428 req->dv_enhance_exist = 0;
8429#endif
8430 }
8431 spin_unlock_irqrestore(&lock, flags);
8432
8433 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8434 hevc_print(hevc, 0,
8435 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8436 __func__, type, index, req->aux_size);
8437#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8438 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8439 if ((force_bypass_dvenl & 0x80000000) == 0) {
8440 hevc_print(hevc, 0,
8441 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8442 __func__);
8443 hevc->bypass_dvenl_enable = 1;
8444 }
8445
8446#endif
8447 }
8448 return 0;
8449}
8450
8451#ifdef HEVC_PIC_STRUCT_SUPPORT
8452static int process_pending_vframe(struct hevc_state_s *hevc,
8453 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8454{
8455 struct vframe_s *vf;
8456
8457 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8458 hevc_print(hevc, 0,
8459 "%s: pair_pic index 0x%x %s\n",
8460 __func__, pair_pic->index,
8461 pair_frame_top_flag ?
8462 "top" : "bot");
8463
8464 if (kfifo_len(&hevc->pending_q) > 1) {
8465 unsigned long flags;
8466 /* do not pending more than 1 frame */
8467 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8468 hevc_print(hevc, 0,
8469 "fatal error, no available buffer slot.");
8470 return -1;
8471 }
8472 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8473 hevc_print(hevc, 0,
8474 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8475 __func__, vf->index);
8476 if ((hevc->double_write_mode == 3) &&
8477 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8478 vf->type |= VIDTYPE_COMPRESS;
8479 if (hevc->mmu_enable)
8480 vf->type |= VIDTYPE_SCATTER;
8481 }
8482 hevc->vf_pre_count++;
8483 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8484 spin_lock_irqsave(&lock, flags);
8485 vf->index &= 0xff;
8486 hevc->m_PIC[vf->index]->output_ready = 0;
8487 if (hevc->wait_buf != 0)
8488 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8489 0x1);
8490 spin_unlock_irqrestore(&lock, flags);
8491
8492 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8493 }
8494
8495 if (kfifo_peek(&hevc->pending_q, &vf)) {
8496 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8497 /*
8498 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8499 *do not use it
8500 */
8501 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8502 hevc_print(hevc, 0,
8503 "fatal error, no available buffer slot.");
8504 return -1;
8505 }
8506 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8507 hevc_print(hevc, 0,
8508 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8509 __func__, vf->index);
8510 if (vf) {
8511 if ((hevc->double_write_mode == 3) &&
8512 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8513 vf->type |= VIDTYPE_COMPRESS;
8514 if (hevc->mmu_enable)
8515 vf->type |= VIDTYPE_SCATTER;
8516 }
8517 hevc->vf_pre_count++;
8518 kfifo_put(&hevc->display_q,
8519 (const struct vframe_s *)vf);
8520 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8521 }
8522 } else if ((!pair_frame_top_flag) &&
8523 (((vf->index >> 8) & 0xff) == 0xff)) {
8524 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8525 hevc_print(hevc, 0,
8526 "fatal error, no available buffer slot.");
8527 return -1;
8528 }
8529 if (vf) {
8530 if ((hevc->double_write_mode == 3) &&
8531 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8532 vf->type |= VIDTYPE_COMPRESS;
8533 if (hevc->mmu_enable)
8534 vf->type |= VIDTYPE_SCATTER;
8535 }
8536 vf->index &= 0xff;
8537 vf->index |= (pair_pic->index << 8);
8538 vf->canvas1Addr = spec2canvas(pair_pic);
8539 pair_pic->vf_ref++;
8540 kfifo_put(&hevc->display_q,
8541 (const struct vframe_s *)vf);
8542 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8543 hevc->vf_pre_count++;
8544 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8545 hevc_print(hevc, 0,
8546 "%s vf => display_q: (index 0x%x)\n",
8547 __func__, vf->index);
8548 }
8549 } else if (pair_frame_top_flag &&
8550 ((vf->index & 0xff) == 0xff)) {
8551 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8552 hevc_print(hevc, 0,
8553 "fatal error, no available buffer slot.");
8554 return -1;
8555 }
8556 if (vf) {
8557 if ((hevc->double_write_mode == 3) &&
8558 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8559 vf->type |= VIDTYPE_COMPRESS;
8560 if (hevc->mmu_enable)
8561 vf->type |= VIDTYPE_SCATTER;
8562 }
8563 vf->index &= 0xff00;
8564 vf->index |= pair_pic->index;
8565 vf->canvas0Addr = spec2canvas(pair_pic);
8566 pair_pic->vf_ref++;
8567 kfifo_put(&hevc->display_q,
8568 (const struct vframe_s *)vf);
8569 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8570 hevc->vf_pre_count++;
8571 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8572 hevc_print(hevc, 0,
8573 "%s vf => display_q: (index 0x%x)\n",
8574 __func__, vf->index);
8575 }
8576 }
8577 }
8578 return 0;
8579}
8580#endif
8581static void update_vf_memhandle(struct hevc_state_s *hevc,
8582 struct vframe_s *vf, struct PIC_s *pic)
8583{
8584 if (pic->index < 0) {
8585 vf->mem_handle = NULL;
8586 vf->mem_head_handle = NULL;
8587 } else if (vf->type & VIDTYPE_SCATTER) {
8588 vf->mem_handle =
8589 decoder_mmu_box_get_mem_handle(
8590 hevc->mmu_box, pic->index);
8591 vf->mem_head_handle =
8592 decoder_bmmu_box_get_mem_handle(
8593 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8594 } else {
8595 vf->mem_handle =
8596 decoder_bmmu_box_get_mem_handle(
8597 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8598 vf->mem_head_handle = NULL;
8599 /*vf->mem_head_handle =
8600 decoder_bmmu_box_get_mem_handle(
8601 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8602 }
8603 return;
8604}
8605
8606static void fill_frame_info(struct hevc_state_s *hevc,
8607 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8608{
8609 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8610 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8611 vframe_qos->type = 4;
8612 else if (pic->slice_type == I_SLICE)
8613 vframe_qos->type = 1;
8614 else if (pic->slice_type == P_SLICE)
8615 vframe_qos->type = 2;
8616 else if (pic->slice_type == B_SLICE)
8617 vframe_qos->type = 3;
8618/*
8619#define SHOW_QOS_INFO
8620*/
8621 if (input_frame_based(hw_to_vdec(hevc)))
8622 vframe_qos->size = pic->frame_size;
8623 else
8624 vframe_qos->size = framesize;
8625 vframe_qos->pts = pts;
8626#ifdef SHOW_QOS_INFO
8627 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8628#endif
8629
8630
8631 vframe_qos->max_mv = pic->max_mv;
8632 vframe_qos->avg_mv = pic->avg_mv;
8633 vframe_qos->min_mv = pic->min_mv;
8634#ifdef SHOW_QOS_INFO
8635 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8636 vframe_qos->max_mv,
8637 vframe_qos->avg_mv,
8638 vframe_qos->min_mv);
8639#endif
8640
8641 vframe_qos->max_qp = pic->max_qp;
8642 vframe_qos->avg_qp = pic->avg_qp;
8643 vframe_qos->min_qp = pic->min_qp;
8644#ifdef SHOW_QOS_INFO
8645 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8646 vframe_qos->max_qp,
8647 vframe_qos->avg_qp,
8648 vframe_qos->min_qp);
8649#endif
8650
8651 vframe_qos->max_skip = pic->max_skip;
8652 vframe_qos->avg_skip = pic->avg_skip;
8653 vframe_qos->min_skip = pic->min_skip;
8654#ifdef SHOW_QOS_INFO
8655 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8656 vframe_qos->max_skip,
8657 vframe_qos->avg_skip,
8658 vframe_qos->min_skip);
8659#endif
8660
8661 vframe_qos->num++;
8662
8663}
8664
8665static inline void hevc_update_gvs(struct hevc_state_s *hevc)
8666{
8667 if (hevc->gvs->frame_height != hevc->frame_height) {
8668 hevc->gvs->frame_width = hevc->frame_width;
8669 hevc->gvs->frame_height = hevc->frame_height;
8670 }
8671 if (hevc->gvs->frame_dur != hevc->frame_dur) {
8672 hevc->gvs->frame_dur = hevc->frame_dur;
8673 if (hevc->frame_dur != 0)
8674 hevc->gvs->frame_rate = 96000 / hevc->frame_dur;
8675 else
8676 hevc->gvs->frame_rate = -1;
8677 }
8678 hevc->gvs->error_count = hevc->gvs->error_frame_count;
8679 hevc->gvs->status = hevc->stat | hevc->fatal_error;
8680 if (hevc->gvs->ratio_control != hevc->ratio_control)
8681 hevc->gvs->ratio_control = hevc->ratio_control;
8682}
8683
8684static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8685{
8686 struct vdec_s *vdec = hw_to_vdec(hevc);
8687 struct vframe_s *vf = NULL;
8688 int stream_offset = pic->stream_offset;
8689 unsigned short slice_type = pic->slice_type;
8690 ulong nv_order = VIDTYPE_VIU_NV21;
8691 u32 frame_size = 0;
8692 struct vdec_info tmp4x;
8693 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
8694
8695 /* swap uv */
8696 if (hevc->is_used_v4l) {
8697 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12) ||
8698 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12M))
8699 nv_order = VIDTYPE_VIU_NV12;
8700 }
8701
8702 if (force_disp_pic_index & 0x100) {
8703 /*recycle directly*/
8704 pic->output_ready = 0;
8705 return -1;
8706 }
8707 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8708 hevc_print(hevc, 0,
8709 "fatal error, no available buffer slot.");
8710 return -1;
8711 }
8712 display_frame_count[hevc->index]++;
8713 if (vf) {
8714 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8715 "%s: pic index 0x%x\n",
8716 __func__, pic->index);*/
8717
8718 if (hevc->is_used_v4l) {
8719 vf->v4l_mem_handle
8720 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8721 if (hevc->mmu_enable) {
8722 vf->mm_box.bmmu_box = hevc->bmmu_box;
8723 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8724 vf->mm_box.mmu_box = hevc->mmu_box;
8725 vf->mm_box.mmu_idx = pic->index;
8726 }
8727 }
8728
8729#ifdef MULTI_INSTANCE_SUPPORT
8730 if (vdec_frame_based(vdec)) {
8731 vf->pts = pic->pts;
8732 vf->pts_us64 = pic->pts64;
8733 vf->timestamp = pic->timestamp;
8734 }
8735 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8736 stream_offset, &vf->pts, 0) != 0) { */
8737#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8738 else if (vdec->master == NULL) {
8739#else
8740 else {
8741#endif
8742#endif
8743 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8744 "call pts_lookup_offset_us64(0x%x)\n",
8745 stream_offset);
8746 if (pts_lookup_offset_us64
8747 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8748 &frame_size, 0,
8749 &vf->pts_us64) != 0) {
8750#ifdef DEBUG_PTS
8751 hevc->pts_missed++;
8752#endif
8753 vf->pts = 0;
8754 vf->pts_us64 = 0;
8755 }
8756#ifdef DEBUG_PTS
8757 else
8758 hevc->pts_hit++;
8759#endif
8760#ifdef MULTI_INSTANCE_SUPPORT
8761#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8762 } else {
8763 vf->pts = 0;
8764 vf->pts_us64 = 0;
8765 }
8766#else
8767 }
8768#endif
8769#endif
8770 if (pts_unstable && (hevc->frame_dur > 0))
8771 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8772
8773 fill_frame_info(hevc, pic, frame_size, vf->pts);
8774
8775 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8776 && hevc->get_frame_dur) {
8777 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8778
8779 if (pts_diff < 0) {
8780 hevc->pts_mode_switching_count++;
8781 hevc->pts_mode_recovery_count = 0;
8782
8783 if (hevc->pts_mode_switching_count >=
8784 PTS_MODE_SWITCHING_THRESHOLD) {
8785 hevc->pts_mode =
8786 PTS_NONE_REF_USE_DURATION;
8787 hevc_print(hevc, 0,
8788 "HEVC: switch to n_d mode.\n");
8789 }
8790
8791 } else {
8792 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8793
8794 hevc->pts_mode_recovery_count++;
8795 if (hevc->pts_mode_recovery_count > p) {
8796 hevc->pts_mode_switching_count = 0;
8797 hevc->pts_mode_recovery_count = 0;
8798 }
8799 }
8800 }
8801
8802 if (vf->pts != 0)
8803 hevc->last_lookup_pts = vf->pts;
8804
8805 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8806 && (slice_type != 2))
8807 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8808 hevc->last_pts = vf->pts;
8809
8810 if (vf->pts_us64 != 0)
8811 hevc->last_lookup_pts_us64 = vf->pts_us64;
8812
8813 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8814 && (slice_type != 2)) {
8815 vf->pts_us64 =
8816 hevc->last_pts_us64 +
8817 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8818 }
8819 hevc->last_pts_us64 = vf->pts_us64;
8820 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8821 hevc_print(hevc, 0,
8822 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8823 vf->pts, vf->pts_us64);
8824 }
8825
8826 /*
8827 *vf->index:
8828 *(1) vf->type is VIDTYPE_PROGRESSIVE
8829 * and vf->canvas0Addr != vf->canvas1Addr,
8830 * vf->index[7:0] is the index of top pic
8831 * vf->index[15:8] is the index of bot pic
8832 *(2) other cases,
8833 * only vf->index[7:0] is used
8834 * vf->index[15:8] == 0xff
8835 */
8836 vf->index = 0xff00 | pic->index;
8837#if 1
8838/*SUPPORT_10BIT*/
8839 if (pic->double_write_mode & 0x10) {
8840 /* double write only */
8841 vf->compBodyAddr = 0;
8842 vf->compHeadAddr = 0;
8843 } else {
8844
8845 if (hevc->mmu_enable) {
8846 vf->compBodyAddr = 0;
8847 vf->compHeadAddr = pic->header_adr;
8848 } else {
8849 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8850 vf->compHeadAddr = pic->mc_y_adr +
8851 pic->losless_comp_body_size;
8852 vf->mem_head_handle = NULL;
8853 }
8854
8855 /*head adr*/
8856 vf->canvas0Addr = vf->canvas1Addr = 0;
8857 }
8858 if (pic->double_write_mode) {
8859 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8860 vf->type |= nv_order;
8861
8862 if ((pic->double_write_mode == 3) &&
8863 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8864 vf->type |= VIDTYPE_COMPRESS;
8865 if (hevc->mmu_enable)
8866 vf->type |= VIDTYPE_SCATTER;
8867 }
8868#ifdef MULTI_INSTANCE_SUPPORT
8869 if (hevc->m_ins_flag &&
8870 (get_dbg_flag(hevc)
8871 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8872 vf->canvas0Addr = vf->canvas1Addr = -1;
8873 vf->plane_num = 2;
8874 vf->canvas0_config[0] =
8875 pic->canvas_config[0];
8876 vf->canvas0_config[1] =
8877 pic->canvas_config[1];
8878
8879 vf->canvas1_config[0] =
8880 pic->canvas_config[0];
8881 vf->canvas1_config[1] =
8882 pic->canvas_config[1];
8883
8884 } else
8885#endif
8886 vf->canvas0Addr = vf->canvas1Addr
8887 = spec2canvas(pic);
8888 } else {
8889 vf->canvas0Addr = vf->canvas1Addr = 0;
8890 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8891 if (hevc->mmu_enable)
8892 vf->type |= VIDTYPE_SCATTER;
8893 }
8894 vf->compWidth = pic->width;
8895 vf->compHeight = pic->height;
8896 update_vf_memhandle(hevc, vf, pic);
8897 switch (pic->bit_depth_luma) {
8898 case 9:
8899 vf->bitdepth = BITDEPTH_Y9;
8900 break;
8901 case 10:
8902 vf->bitdepth = BITDEPTH_Y10;
8903 break;
8904 default:
8905 vf->bitdepth = BITDEPTH_Y8;
8906 break;
8907 }
8908 switch (pic->bit_depth_chroma) {
8909 case 9:
8910 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8911 break;
8912 case 10:
8913 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8914 break;
8915 default:
8916 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8917 break;
8918 }
8919 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8920 vf->bitdepth =
8921 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8922 if (pic->mem_saving_mode == 1)
8923 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8924#else
8925 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8926 vf->type |= nv_order;
8927 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8928#endif
8929 set_frame_info(hevc, vf, pic);
8930 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8931 /* hevc_print(hevc, 0,
8932 "aaa: %d/%d, %d/%d\n",
8933 vf->width,vf->height, pic->width, pic->height); */
8934 vf->width = pic->width;
8935 vf->height = pic->height;
8936
8937 if (force_w_h != 0) {
8938 vf->width = (force_w_h >> 16) & 0xffff;
8939 vf->height = force_w_h & 0xffff;
8940 }
8941 if (force_fps & 0x100) {
8942 u32 rate = force_fps & 0xff;
8943
8944 if (rate)
8945 vf->duration = 96000/rate;
8946 else
8947 vf->duration = 0;
8948 }
8949 if (force_fps & 0x200) {
8950 vf->pts = 0;
8951 vf->pts_us64 = 0;
8952 }
8953 /*
8954 * !!! to do ...
8955 * need move below code to get_new_pic(),
8956 * hevc->xxx can only be used by current decoded pic
8957 */
8958 if (pic->conformance_window_flag &&
8959 (get_dbg_flag(hevc) &
8960 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8961 unsigned int SubWidthC, SubHeightC;
8962
8963 switch (pic->chroma_format_idc) {
8964 case 1:
8965 SubWidthC = 2;
8966 SubHeightC = 2;
8967 break;
8968 case 2:
8969 SubWidthC = 2;
8970 SubHeightC = 1;
8971 break;
8972 default:
8973 SubWidthC = 1;
8974 SubHeightC = 1;
8975 break;
8976 }
8977 vf->width -= SubWidthC *
8978 (pic->conf_win_left_offset +
8979 pic->conf_win_right_offset);
8980 vf->height -= SubHeightC *
8981 (pic->conf_win_top_offset +
8982 pic->conf_win_bottom_offset);
8983
8984 vf->compWidth -= SubWidthC *
8985 (pic->conf_win_left_offset +
8986 pic->conf_win_right_offset);
8987 vf->compHeight -= SubHeightC *
8988 (pic->conf_win_top_offset +
8989 pic->conf_win_bottom_offset);
8990
8991 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8992 hevc_print(hevc, 0,
8993 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8994 pic->chroma_format_idc,
8995 pic->conf_win_left_offset,
8996 pic->conf_win_right_offset,
8997 pic->conf_win_top_offset,
8998 pic->conf_win_bottom_offset,
8999 vf->width, vf->height, vf->compWidth, vf->compHeight);
9000 }
9001
9002 vf->width = vf->width /
9003 get_double_write_ratio(hevc, pic->double_write_mode);
9004 vf->height = vf->height /
9005 get_double_write_ratio(hevc, pic->double_write_mode);
9006#ifdef HEVC_PIC_STRUCT_SUPPORT
9007 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
9008 struct vframe_s *vf2;
9009
9010 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9011 hevc_print(hevc, 0,
9012 "pic_struct = %d index 0x%x\n",
9013 pic->pic_struct,
9014 pic->index);
9015
9016 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9017 hevc_print(hevc, 0,
9018 "fatal error, no available buffer slot.");
9019 return -1;
9020 }
9021 pic->vf_ref = 2;
9022 vf->duration = vf->duration>>1;
9023 memcpy(vf2, vf, sizeof(struct vframe_s));
9024
9025 if (pic->pic_struct == 3) {
9026 vf->type = VIDTYPE_INTERLACE_TOP
9027 | nv_order;
9028 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9029 | nv_order;
9030 } else {
9031 vf->type = VIDTYPE_INTERLACE_BOTTOM
9032 | nv_order;
9033 vf2->type = VIDTYPE_INTERLACE_TOP
9034 | nv_order;
9035 }
9036 hevc->vf_pre_count++;
9037 decoder_do_frame_check(vdec, vf);
9038 kfifo_put(&hevc->display_q,
9039 (const struct vframe_s *)vf);
9040 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9041 hevc->vf_pre_count++;
9042 kfifo_put(&hevc->display_q,
9043 (const struct vframe_s *)vf2);
9044 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9045 } else if (pic->pic_struct == 5
9046 || pic->pic_struct == 6) {
9047 struct vframe_s *vf2, *vf3;
9048
9049 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9050 hevc_print(hevc, 0,
9051 "pic_struct = %d index 0x%x\n",
9052 pic->pic_struct,
9053 pic->index);
9054
9055 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9056 hevc_print(hevc, 0,
9057 "fatal error, no available buffer slot.");
9058 return -1;
9059 }
9060 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
9061 hevc_print(hevc, 0,
9062 "fatal error, no available buffer slot.");
9063 return -1;
9064 }
9065 pic->vf_ref = 3;
9066 vf->duration = vf->duration/3;
9067 memcpy(vf2, vf, sizeof(struct vframe_s));
9068 memcpy(vf3, vf, sizeof(struct vframe_s));
9069
9070 if (pic->pic_struct == 5) {
9071 vf->type = VIDTYPE_INTERLACE_TOP
9072 | nv_order;
9073 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9074 | nv_order;
9075 vf3->type = VIDTYPE_INTERLACE_TOP
9076 | nv_order;
9077 } else {
9078 vf->type = VIDTYPE_INTERLACE_BOTTOM
9079 | nv_order;
9080 vf2->type = VIDTYPE_INTERLACE_TOP
9081 | nv_order;
9082 vf3->type = VIDTYPE_INTERLACE_BOTTOM
9083 | nv_order;
9084 }
9085 hevc->vf_pre_count++;
9086 decoder_do_frame_check(vdec, vf);
9087 kfifo_put(&hevc->display_q,
9088 (const struct vframe_s *)vf);
9089 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9090 hevc->vf_pre_count++;
9091 kfifo_put(&hevc->display_q,
9092 (const struct vframe_s *)vf2);
9093 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9094 hevc->vf_pre_count++;
9095 kfifo_put(&hevc->display_q,
9096 (const struct vframe_s *)vf3);
9097 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
9098
9099 } else if (pic->pic_struct == 9
9100 || pic->pic_struct == 10) {
9101 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9102 hevc_print(hevc, 0,
9103 "pic_struct = %d index 0x%x\n",
9104 pic->pic_struct,
9105 pic->index);
9106
9107 pic->vf_ref = 1;
9108 /* process previous pending vf*/
9109 process_pending_vframe(hevc,
9110 pic, (pic->pic_struct == 9));
9111
9112 decoder_do_frame_check(vdec, vf);
9113 /* process current vf */
9114 kfifo_put(&hevc->pending_q,
9115 (const struct vframe_s *)vf);
9116 vf->height <<= 1;
9117 if (pic->pic_struct == 9) {
9118 vf->type = VIDTYPE_INTERLACE_TOP
9119 | nv_order | VIDTYPE_VIU_FIELD;
9120 process_pending_vframe(hevc,
9121 hevc->pre_bot_pic, 0);
9122 } else {
9123 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9124 nv_order | VIDTYPE_VIU_FIELD;
9125 vf->index = (pic->index << 8) | 0xff;
9126 process_pending_vframe(hevc,
9127 hevc->pre_top_pic, 1);
9128 }
9129
9130 if (hevc->vf_pre_count == 0)
9131 hevc->vf_pre_count++;
9132
9133 /**/
9134 if (pic->pic_struct == 9)
9135 hevc->pre_top_pic = pic;
9136 else
9137 hevc->pre_bot_pic = pic;
9138
9139 } else if (pic->pic_struct == 11
9140 || pic->pic_struct == 12) {
9141 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9142 hevc_print(hevc, 0,
9143 "pic_struct = %d index 0x%x\n",
9144 pic->pic_struct,
9145 pic->index);
9146 pic->vf_ref = 1;
9147 /* process previous pending vf*/
9148 process_pending_vframe(hevc, pic,
9149 (pic->pic_struct == 11));
9150
9151 /* put current into pending q */
9152 vf->height <<= 1;
9153 if (pic->pic_struct == 11)
9154 vf->type = VIDTYPE_INTERLACE_TOP |
9155 nv_order | VIDTYPE_VIU_FIELD;
9156 else {
9157 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9158 nv_order | VIDTYPE_VIU_FIELD;
9159 vf->index = (pic->index << 8) | 0xff;
9160 }
9161 decoder_do_frame_check(vdec, vf);
9162 kfifo_put(&hevc->pending_q,
9163 (const struct vframe_s *)vf);
9164 if (hevc->vf_pre_count == 0)
9165 hevc->vf_pre_count++;
9166
9167 /**/
9168 if (pic->pic_struct == 11)
9169 hevc->pre_top_pic = pic;
9170 else
9171 hevc->pre_bot_pic = pic;
9172
9173 } else {
9174 pic->vf_ref = 1;
9175
9176 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9177 hevc_print(hevc, 0,
9178 "pic_struct = %d index 0x%x\n",
9179 pic->pic_struct,
9180 pic->index);
9181
9182 switch (pic->pic_struct) {
9183 case 7:
9184 vf->duration <<= 1;
9185 break;
9186 case 8:
9187 vf->duration = vf->duration * 3;
9188 break;
9189 case 1:
9190 vf->height <<= 1;
9191 vf->type = VIDTYPE_INTERLACE_TOP |
9192 nv_order | VIDTYPE_VIU_FIELD;
9193 process_pending_vframe(hevc, pic, 1);
9194 hevc->pre_top_pic = pic;
9195 break;
9196 case 2:
9197 vf->height <<= 1;
9198 vf->type = VIDTYPE_INTERLACE_BOTTOM
9199 | nv_order
9200 | VIDTYPE_VIU_FIELD;
9201 process_pending_vframe(hevc, pic, 0);
9202 hevc->pre_bot_pic = pic;
9203 break;
9204 }
9205 hevc->vf_pre_count++;
9206 decoder_do_frame_check(vdec, vf);
9207 kfifo_put(&hevc->display_q,
9208 (const struct vframe_s *)vf);
9209 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9210 }
9211#else
9212 vf->type_original = vf->type;
9213 pic->vf_ref = 1;
9214 hevc->vf_pre_count++;
9215 decoder_do_frame_check(vdec, vf);
9216 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9217 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9218#endif
9219 /*count info*/
9220 vdec_count_info(hevc->gvs, 0, stream_offset);
9221 hevc_update_gvs(hevc);
9222 memcpy(&tmp4x, hevc->gvs, sizeof(struct vdec_info));
9223 tmp4x.bit_depth_luma = hevc->bit_depth_luma;
9224 tmp4x.bit_depth_chroma = hevc->bit_depth_chroma;
9225 tmp4x.double_write_mode = get_double_write_mode(hevc);
9226 vdec_fill_vdec_frame(vdec, &hevc->vframe_qos, &tmp4x, vf, pic->hw_decode_time);
9227 vdec->vdec_fps_detec(vdec->id);
9228 hevc_print(hevc, H265_DEBUG_BUFMGR,
9229 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9230 __func__, vf->type, vf->index,
9231 get_pic_poc(hevc, vf->index & 0xff),
9232 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9233 vf->pts, vf->pts_us64,
9234 vf->duration);
9235 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
9236 if (without_display_mode == 0) {
9237 vf_notify_receiver(hevc->provider_name,
9238 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9239 }
9240 else
9241 vh265_vf_put(vh265_vf_get(vdec), vdec);
9242 }
9243
9244 return 0;
9245}
9246
9247static int notify_v4l_eos(struct vdec_s *vdec)
9248{
9249 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9250 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9251 struct vframe_s *vf = &hw->vframe_dummy;
9252 struct vdec_v4l2_buffer *fb = NULL;
9253 int index = INVALID_IDX;
9254 ulong expires;
9255
9256 if (hw->is_used_v4l && hw->eos) {
9257 expires = jiffies + msecs_to_jiffies(2000);
9258 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9259 if (time_after(jiffies, expires) ||
9260 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx))
9261 break;
9262 }
9263
9264 if (index == INVALID_IDX) {
9265 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9266 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9267 return -1;
9268 }
9269 }
9270
9271 vf->type |= VIDTYPE_V4L_EOS;
9272 vf->timestamp = ULONG_MAX;
9273 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9274 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9275 hw->m_BUF[index].v4l_ref_buf_addr;
9276 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9277 vf_notify_receiver(vdec->vf_provider_name,
9278 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9279
9280 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9281 }
9282
9283 return 0;
9284}
9285
9286static void process_nal_sei(struct hevc_state_s *hevc,
9287 int payload_type, int payload_size)
9288{
9289 unsigned short data;
9290
9291 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9292 hevc_print(hevc, 0,
9293 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9294 payload_type, payload_size);
9295
9296 if (payload_type == 137) {
9297 int i, j;
9298 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9299 if (payload_size >= 24) {
9300 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9301 hevc_print(hevc, 0,
9302 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9303 for (i = 0; i < 3; i++) {
9304 for (j = 0; j < 2; j++) {
9305 data =
9306 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9307 hevc->primaries[i][j] = data;
9308 WRITE_HREG(HEVC_SHIFT_COMMAND,
9309 (1<<7)|16);
9310 if (get_dbg_flag(hevc) &
9311 H265_DEBUG_PRINT_SEI)
9312 hevc_print(hevc, 0,
9313 "\t\tprimaries[%1d][%1d] = %04x\n",
9314 i, j, hevc->primaries[i][j]);
9315 }
9316 }
9317 for (i = 0; i < 2; i++) {
9318 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9319 hevc->white_point[i] = data;
9320 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9321 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9322 hevc_print(hevc, 0,
9323 "\t\twhite_point[%1d] = %04x\n",
9324 i, hevc->white_point[i]);
9325 }
9326 for (i = 0; i < 2; i++) {
9327 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9328 hevc->luminance[i] = data << 16;
9329 WRITE_HREG(HEVC_SHIFT_COMMAND,
9330 (1<<7)|16);
9331 data =
9332 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9333 hevc->luminance[i] |= data;
9334 WRITE_HREG(HEVC_SHIFT_COMMAND,
9335 (1<<7)|16);
9336 if (get_dbg_flag(hevc) &
9337 H265_DEBUG_PRINT_SEI)
9338 hevc_print(hevc, 0,
9339 "\t\tluminance[%1d] = %08x\n",
9340 i, hevc->luminance[i]);
9341 }
9342 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9343 }
9344 payload_size -= 24;
9345 while (payload_size > 0) {
9346 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9347 payload_size--;
9348 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9349 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9350 }
9351 }
9352}
9353
9354static int hevc_recover(struct hevc_state_s *hevc)
9355{
9356 int ret = -1;
9357 u32 rem;
9358 u64 shift_byte_count64;
9359 unsigned int hevc_shift_byte_count;
9360 unsigned int hevc_stream_start_addr;
9361 unsigned int hevc_stream_end_addr;
9362 unsigned int hevc_stream_rd_ptr;
9363 unsigned int hevc_stream_wr_ptr;
9364 unsigned int hevc_stream_control;
9365 unsigned int hevc_stream_fifo_ctl;
9366 unsigned int hevc_stream_buf_size;
9367
9368 mutex_lock(&vh265_mutex);
9369#if 0
9370 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9371 int ii;
9372
9373 for (ii = 0; ii < 4; ii++)
9374 hevc_print(hevc, 0,
9375 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9376 if (((i + ii) & 0xf) == 0)
9377 hevc_print(hevc, 0, "\n");
9378 }
9379#endif
9380#define ES_VID_MAN_RD_PTR (1<<0)
9381 if (!hevc->init_flag) {
9382 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9383 mutex_unlock(&vh265_mutex);
9384 return ret;
9385 }
9386 amhevc_stop();
9387 msleep(20);
9388 ret = 0;
9389 /* reset */
9390 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9391 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9392
9393 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9394 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9395 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9396 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9397 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9398 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9399 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9400
9401 /* HEVC streaming buffer will reset and restart
9402 * from current hevc_stream_rd_ptr position
9403 */
9404 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9405 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9406 if ((hevc->shift_byte_count_lo & (1 << 31))
9407 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9408 hevc->shift_byte_count_hi++;
9409
9410 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9411 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9412 hevc->shift_byte_count_lo;
9413 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9414 shift_byte_count64 -= rem;
9415 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9416
9417 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9418 shift_byte_count64 += hevc_stream_buf_size;
9419
9420 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9421 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9422
9423 WRITE_VREG(DOS_SW_RESET3,
9424 /* (1<<2)| */
9425 (1 << 3) | (1 << 4) | (1 << 8) |
9426 (1 << 11) | (1 << 12) | (1 << 14)
9427 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9428 WRITE_VREG(DOS_SW_RESET3, 0);
9429
9430 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9431 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9432 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9433 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9434 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9435 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9436 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9437
9438 hevc_config_work_space_hw(hevc);
9439 decoder_hw_reset();
9440
9441 hevc->have_vps = 0;
9442 hevc->have_sps = 0;
9443 hevc->have_pps = 0;
9444
9445 hevc->have_valid_start_slice = 0;
9446
9447 if (get_double_write_mode(hevc) & 0x10)
9448 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9449 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9450 );
9451
9452 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9453 /* clear mailbox interrupt */
9454 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9455 /* enable mailbox interrupt */
9456 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9457 /* disable PSCALE for hardware sharing */
9458 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9459
9460 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9461
9462 WRITE_VREG(DEBUG_REG1, 0x0);
9463
9464 if ((error_handle_policy & 1) == 0) {
9465 if ((error_handle_policy & 4) == 0) {
9466 /* ucode auto mode, and do not check vps/sps/pps/idr */
9467 WRITE_VREG(NAL_SEARCH_CTL,
9468 0xc);
9469 } else {
9470 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9471 }
9472 } else {
9473 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9474 }
9475
9476 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9477 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9478 WRITE_VREG(NAL_SEARCH_CTL,
9479 READ_VREG(NAL_SEARCH_CTL)
9480 | ((parser_sei_enable & 0x7) << 17));
9481#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9482 WRITE_VREG(NAL_SEARCH_CTL,
9483 READ_VREG(NAL_SEARCH_CTL) |
9484 ((parser_dolby_vision_enable & 0x1) << 20));
9485#endif
9486 config_decode_mode(hevc);
9487 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9488
9489 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9490 /* amhevc_disable(); */
9491 /* return -EBUSY; */
9492 /* } */
9493#if 0
9494 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9495 int ii;
9496
9497 for (ii = 0; ii < 4; ii++) {
9498 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9499 hevc_print(hevc, 0,
9500 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9501 }
9502 if (((i + ii) & 0xf) == 0)
9503 hevc_print(hevc, 0, "\n");
9504 }
9505#endif
9506 init_pic_list_hw(hevc);
9507
9508 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9509 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9510
9511#ifdef SWAP_HEVC_UCODE
9512 if (!tee_enabled() && hevc->is_swap &&
9513 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9514 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9515 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9516 }
9517#endif
9518 amhevc_start();
9519
9520 /* skip, search next start code */
9521 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9522 hevc->skip_flag = 1;
9523#ifdef ERROR_HANDLE_DEBUG
9524 if (dbg_nal_skip_count & 0x20000) {
9525 dbg_nal_skip_count &= ~0x20000;
9526 mutex_unlock(&vh265_mutex);
9527 return ret;
9528 }
9529#endif
9530 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9531 /* Interrupt Amrisc to excute */
9532 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9533#ifdef MULTI_INSTANCE_SUPPORT
9534 if (!hevc->m_ins_flag)
9535#endif
9536 hevc->first_pic_after_recover = 1;
9537 mutex_unlock(&vh265_mutex);
9538 return ret;
9539}
9540
9541static void dump_aux_buf(struct hevc_state_s *hevc)
9542{
9543 int i;
9544 unsigned short *aux_adr =
9545 (unsigned short *)
9546 hevc->aux_addr;
9547 unsigned int aux_size =
9548 (READ_VREG(HEVC_AUX_DATA_SIZE)
9549 >> 16) << 4;
9550
9551 if (hevc->prefix_aux_size > 0) {
9552 hevc_print(hevc, 0,
9553 "prefix aux: (size %d)\n",
9554 aux_size);
9555 for (i = 0; i <
9556 (aux_size >> 1); i++) {
9557 hevc_print_cont(hevc, 0,
9558 "%04x ",
9559 *(aux_adr + i));
9560 if (((i + 1) & 0xf)
9561 == 0)
9562 hevc_print_cont(hevc,
9563 0, "\n");
9564 }
9565 }
9566 if (hevc->suffix_aux_size > 0) {
9567 aux_adr = (unsigned short *)
9568 (hevc->aux_addr +
9569 hevc->prefix_aux_size);
9570 aux_size =
9571 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9572 << 4;
9573 hevc_print(hevc, 0,
9574 "suffix aux: (size %d)\n",
9575 aux_size);
9576 for (i = 0; i <
9577 (aux_size >> 1); i++) {
9578 hevc_print_cont(hevc, 0,
9579 "%04x ", *(aux_adr + i));
9580 if (((i + 1) & 0xf) == 0)
9581 hevc_print_cont(hevc, 0, "\n");
9582 }
9583 }
9584}
9585
9586#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9587static void dolby_get_meta(struct hevc_state_s *hevc)
9588{
9589 struct vdec_s *vdec = hw_to_vdec(hevc);
9590
9591 if (get_dbg_flag(hevc) &
9592 H265_DEBUG_BUFMGR_MORE)
9593 dump_aux_buf(hevc);
9594 if (vdec->dolby_meta_with_el || vdec->slave) {
9595 set_aux_data(hevc,
9596 hevc->cur_pic, 0, 0);
9597 } else if (vdec->master) {
9598 struct hevc_state_s *hevc_ba =
9599 (struct hevc_state_s *)
9600 vdec->master->private;
9601 /*do not use hevc_ba*/
9602 set_aux_data(hevc,
9603 hevc_ba->cur_pic,
9604 0, 1);
9605 set_aux_data(hevc,
9606 hevc->cur_pic, 0, 2);
9607 }
9608}
9609#endif
9610
9611static void read_decode_info(struct hevc_state_s *hevc)
9612{
9613 uint32_t decode_info =
9614 READ_HREG(HEVC_DECODE_INFO);
9615 hevc->start_decoding_flag |=
9616 (decode_info & 0xff);
9617 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9618}
9619
9620static int vh265_get_ps_info(struct hevc_state_s *hevc, int width, int height, struct aml_vdec_ps_infos *ps)
9621{
9622 int dw_mode = v4l_parser_get_double_write_mode(hevc, width, height);
9623
9624 ps->visible_width = width / get_double_write_ratio(hevc, dw_mode);
9625 ps->visible_height = height / get_double_write_ratio(hevc, dw_mode);
9626 ps->coded_width = ALIGN(width, 32) / get_double_write_ratio(hevc, dw_mode);
9627 ps->coded_height = ALIGN(height, 32) / get_double_write_ratio(hevc, dw_mode);
9628 ps->dpb_size = v4l_parser_work_pic_num(hevc);
9629
9630 return 0;
9631}
9632
9633static int v4l_res_change(struct hevc_state_s *hevc, union param_u *rpm_param)
9634{
9635 struct aml_vcodec_ctx *ctx =
9636 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9637 int ret = 0;
9638
9639 if (ctx->param_sets_from_ucode &&
9640 hevc->res_ch_flag == 0) {
9641 struct aml_vdec_ps_infos ps;
9642 int width = rpm_param->p.pic_width_in_luma_samples;
9643 int height = rpm_param->p.pic_height_in_luma_samples;
9644 if ((hevc->pic_w != 0 &&
9645 hevc->pic_h != 0) &&
9646 (hevc->pic_w != width ||
9647 hevc->pic_h != height)) {
9648 hevc_print(hevc, 0,
9649 "v4l_res_change Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
9650 hevc->pic_w, hevc->pic_h,
9651 width,
9652 height,
9653 hevc->interlace_flag);
9654
9655 vh265_get_ps_info(hevc, width, height, &ps);
9656 vdec_v4l_set_ps_infos(ctx, &ps);
9657 vdec_v4l_res_ch_event(ctx);
9658 hevc->v4l_params_parsed = false;
9659 hevc->res_ch_flag = 1;
9660 hevc->eos = 1;
9661 flush_output(hevc, NULL);
9662 //del_timer_sync(&hevc->timer);
9663 notify_v4l_eos(hw_to_vdec(hevc));
9664
9665 ret = 1;
9666 }
9667 }
9668
9669 return ret;
9670}
9671
9672
9673static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9674{
9675 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9676 unsigned int dec_status = hevc->dec_status;
9677 int i, ret;
9678
9679#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9680 struct vdec_s *vdec = hw_to_vdec(hevc);
9681#endif
9682
9683 if (hevc->eos)
9684 return IRQ_HANDLED;
9685 if (
9686#ifdef MULTI_INSTANCE_SUPPORT
9687 (!hevc->m_ins_flag) &&
9688#endif
9689 hevc->error_flag == 1) {
9690 if ((error_handle_policy & 0x10) == 0) {
9691 if (hevc->cur_pic) {
9692 int current_lcu_idx =
9693 READ_VREG(HEVC_PARSER_LCU_START)
9694 & 0xffffff;
9695 if (current_lcu_idx <
9696 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9697 hevc->cur_pic->error_mark = 1;
9698
9699 }
9700 }
9701 if ((error_handle_policy & 1) == 0) {
9702 hevc->error_skip_nal_count = 1;
9703 /* manual search nal, skip error_skip_nal_count
9704 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9705 */
9706 WRITE_VREG(NAL_SEARCH_CTL,
9707 (error_skip_nal_count << 4) | 0x1);
9708 } else {
9709 hevc->error_skip_nal_count = error_skip_nal_count;
9710 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9711 }
9712 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9713#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9714 || vdec->master
9715 || vdec->slave
9716#endif
9717 ) {
9718 WRITE_VREG(NAL_SEARCH_CTL,
9719 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9720 }
9721 WRITE_VREG(NAL_SEARCH_CTL,
9722 READ_VREG(NAL_SEARCH_CTL)
9723 | ((parser_sei_enable & 0x7) << 17));
9724#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9725 WRITE_VREG(NAL_SEARCH_CTL,
9726 READ_VREG(NAL_SEARCH_CTL) |
9727 ((parser_dolby_vision_enable & 0x1) << 20));
9728#endif
9729 config_decode_mode(hevc);
9730 /* search new nal */
9731 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9732 /* Interrupt Amrisc to excute */
9733 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9734
9735 /* hevc_print(hevc, 0,
9736 *"%s: error handle\n", __func__);
9737 */
9738 hevc->error_flag = 2;
9739 return IRQ_HANDLED;
9740 } else if (
9741#ifdef MULTI_INSTANCE_SUPPORT
9742 (!hevc->m_ins_flag) &&
9743#endif
9744 hevc->error_flag == 3) {
9745 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9746 hevc_recover(hevc);
9747 hevc->error_flag = 0;
9748
9749 if ((error_handle_policy & 0x10) == 0) {
9750 if (hevc->cur_pic) {
9751 int current_lcu_idx =
9752 READ_VREG(HEVC_PARSER_LCU_START)
9753 & 0xffffff;
9754 if (current_lcu_idx <
9755 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9756 hevc->cur_pic->error_mark = 1;
9757
9758 }
9759 }
9760 if ((error_handle_policy & 1) == 0) {
9761 /* need skip some data when
9762 * error_flag of 3 is triggered,
9763 */
9764 /* to avoid hevc_recover() being called
9765 * for many times at the same bitstream position
9766 */
9767 hevc->error_skip_nal_count = 1;
9768 /* manual search nal, skip error_skip_nal_count
9769 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9770 */
9771 WRITE_VREG(NAL_SEARCH_CTL,
9772 (error_skip_nal_count << 4) | 0x1);
9773 }
9774
9775 if ((error_handle_policy & 0x2) == 0) {
9776 hevc->have_vps = 1;
9777 hevc->have_sps = 1;
9778 hevc->have_pps = 1;
9779 }
9780 return IRQ_HANDLED;
9781 }
9782 if (!hevc->m_ins_flag) {
9783 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9784 if ((hevc->shift_byte_count_lo & (1 << 31))
9785 && ((i & (1 << 31)) == 0))
9786 hevc->shift_byte_count_hi++;
9787 hevc->shift_byte_count_lo = i;
9788 }
9789#ifdef MULTI_INSTANCE_SUPPORT
9790 mutex_lock(&hevc->chunks_mutex);
9791 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9792 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9793 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9794 && (hevc->chunk)) {
9795 hevc->cur_pic->pts = hevc->chunk->pts;
9796 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9797 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9798 }
9799 mutex_unlock(&hevc->chunks_mutex);
9800
9801 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9802 dec_status == HEVC_DECODE_BUFEMPTY2) {
9803 if (hevc->m_ins_flag) {
9804 read_decode_info(hevc);
9805 if (vdec_frame_based(hw_to_vdec(hevc))) {
9806 hevc->empty_flag = 1;
9807 goto pic_done;
9808 } else {
9809 if (
9810#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9811 vdec->master ||
9812 vdec->slave ||
9813#endif
9814 (data_resend_policy & 0x1)) {
9815 hevc->dec_result = DEC_RESULT_AGAIN;
9816 amhevc_stop();
9817 restore_decode_state(hevc);
9818 } else
9819 hevc->dec_result = DEC_RESULT_GET_DATA;
9820 }
9821 reset_process_time(hevc);
9822 vdec_schedule_work(&hevc->work);
9823 }
9824 return IRQ_HANDLED;
9825 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9826 (dec_status == HEVC_NAL_DECODE_DONE)
9827 ) {
9828 if (hevc->m_ins_flag) {
9829 read_decode_info(hevc);
9830 if (vdec_frame_based(hw_to_vdec(hevc))) {
9831 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9832 hevc->empty_flag = 1;
9833 goto pic_done;
9834 } else {
9835 hevc->dec_result = DEC_RESULT_AGAIN;
9836 amhevc_stop();
9837 restore_decode_state(hevc);
9838 }
9839
9840 reset_process_time(hevc);
9841 vdec_schedule_work(&hevc->work);
9842 }
9843
9844 return IRQ_HANDLED;
9845 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9846 if (hevc->m_ins_flag) {
9847 struct PIC_s *pic;
9848 struct PIC_s *pic_display;
9849 int decoded_poc;
9850
9851 if (vdec->mvfrm)
9852 vdec->mvfrm->hw_decode_time =
9853 local_clock() - vdec->mvfrm->hw_decode_start;
9854#ifdef DETREFILL_ENABLE
9855 if (hevc->is_swap &&
9856 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9857 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9858 && READ_VREG(HEVC_SAO_DBG_MODE0))
9859 hevc->delrefill_check = 2;
9860 }
9861#endif
9862 hevc->empty_flag = 0;
9863pic_done:
9864 if (input_frame_based(hw_to_vdec(hevc)) &&
9865 frmbase_cont_bitlevel != 0 &&
9866 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9867 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9868 > frmbase_cont_bitlevel)) {
9869 /*handle the case: multi pictures in one packet*/
9870 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9871 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9872 __func__,
9873 hevc->decode_idx, hevc->decode_size,
9874 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9875 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9876 start_process_time(hevc);
9877 return IRQ_HANDLED;
9878 }
9879
9880 read_decode_info(hevc);
9881 get_picture_qos_info(hevc);
9882#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9883 hevc->start_parser_type = 0;
9884 hevc->switch_dvlayer_flag = 0;
9885#endif
9886 hevc->decoded_poc = hevc->curr_POC;
9887 hevc->decoding_pic = NULL;
9888 hevc->dec_result = DEC_RESULT_DONE;
9889#ifdef DETREFILL_ENABLE
9890 if (hevc->is_swap &&
9891 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9892 if (hevc->delrefill_check != 2)
9893#endif
9894
9895 amhevc_stop();
9896
9897 reset_process_time(hevc);
9898
9899 if (hevc->vf_pre_count == 0 || hevc->ip_mode) {
9900 decoded_poc = hevc->curr_POC;
9901 pic = get_pic_by_POC(hevc, decoded_poc);
9902 if (pic && (pic->POC != INVALID_POC)) {
9903 /*PB skip control */
9904 if (pic->error_mark == 0
9905 && hevc->PB_skip_mode == 1) {
9906 /* start decoding after
9907 * first I
9908 */
9909 hevc->ignore_bufmgr_error |= 0x1;
9910 }
9911 if (hevc->ignore_bufmgr_error & 1) {
9912 if (hevc->PB_skip_count_after_decoding > 0) {
9913 hevc->PB_skip_count_after_decoding--;
9914 } else {
9915 /* start displaying */
9916 hevc->ignore_bufmgr_error |= 0x2;
9917 }
9918 }
9919 if (hevc->mmu_enable
9920 && ((hevc->double_write_mode & 0x10) == 0)) {
9921 if (!hevc->m_ins_flag) {
9922 hevc->used_4k_num =
9923 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9924
9925 if ((!is_skip_decoding(hevc, pic)) &&
9926 (hevc->used_4k_num >= 0) &&
9927 (hevc->cur_pic->scatter_alloc
9928 == 1)) {
9929 hevc_print(hevc,
9930 H265_DEBUG_BUFMGR_MORE,
9931 "%s pic index %d scatter_alloc %d page_start %d\n",
9932 "decoder_mmu_box_free_idx_tail",
9933 hevc->cur_pic->index,
9934 hevc->cur_pic->scatter_alloc,
9935 hevc->used_4k_num);
9936 decoder_mmu_box_free_idx_tail(
9937 hevc->mmu_box,
9938 hevc->cur_pic->index,
9939 hevc->used_4k_num);
9940 hevc->cur_pic->scatter_alloc
9941 = 2;
9942 }
9943 hevc->used_4k_num = -1;
9944 }
9945 }
9946
9947 pic->output_mark = 1;
9948 pic->recon_mark = 1;
9949 if (vdec->mvfrm) {
9950 pic->frame_size =
9951 vdec->mvfrm->frame_size;
9952 pic->hw_decode_time =
9953 (u32)vdec->mvfrm->hw_decode_time;
9954 }
9955 }
9956 check_pic_decoded_error(hevc,
9957 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9958 if (hevc->cur_pic != NULL &&
9959 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9960 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9961 hevc->cur_pic->error_mark = 1;
9962force_output:
9963 pic_display = output_pic(hevc, 1);
9964 if (pic_display) {
9965 if ((pic_display->error_mark &&
9966 ((hevc->ignore_bufmgr_error &
9967 0x2) == 0))
9968 || (get_dbg_flag(hevc) &
9969 H265_DEBUG_DISPLAY_CUR_FRAME)
9970 || (get_dbg_flag(hevc) &
9971 H265_DEBUG_NO_DISPLAY)) {
9972 pic_display->output_ready = 0;
9973 if (get_dbg_flag(hevc) &
9974 H265_DEBUG_BUFMGR) {
9975 hevc_print(hevc, 0,
9976 "[BM] Display: POC %d, ",
9977 pic_display->POC);
9978 hevc_print_cont(hevc, 0,
9979 "decoding index %d ==> ",
9980 pic_display->
9981 decode_idx);
9982 hevc_print_cont(hevc, 0,
9983 "Debug or err,recycle it\n");
9984 }
9985 } else {
9986 if ((pic_display->
9987 slice_type != 2) && !pic_display->ip_mode) {
9988 pic_display->output_ready = 0;
9989 } else {
9990 prepare_display_buf
9991 (hevc,
9992 pic_display);
9993 hevc->first_pic_flag = 1;
9994 }
9995 }
9996 }
9997 }
9998
9999 vdec_schedule_work(&hevc->work);
10000 }
10001
10002 return IRQ_HANDLED;
10003#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10004 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
10005 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10006 if (hevc->m_ins_flag) {
10007 unsigned char next_parser_type =
10008 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
10009 read_decode_info(hevc);
10010
10011 if (vdec->slave &&
10012 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10013 /*cur is base, found enhance*/
10014 struct hevc_state_s *hevc_el =
10015 (struct hevc_state_s *)
10016 vdec->slave->private;
10017 hevc->switch_dvlayer_flag = 1;
10018 hevc->no_switch_dvlayer_count = 0;
10019 hevc_el->start_parser_type =
10020 next_parser_type;
10021 hevc_print(hevc, H265_DEBUG_DV,
10022 "switch (poc %d) to el\n",
10023 hevc->cur_pic ?
10024 hevc->cur_pic->POC :
10025 INVALID_POC);
10026 } else if (vdec->master &&
10027 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
10028 /*cur is enhance, found base*/
10029 struct hevc_state_s *hevc_ba =
10030 (struct hevc_state_s *)
10031 vdec->master->private;
10032 hevc->switch_dvlayer_flag = 1;
10033 hevc->no_switch_dvlayer_count = 0;
10034 hevc_ba->start_parser_type =
10035 next_parser_type;
10036 hevc_print(hevc, H265_DEBUG_DV,
10037 "switch (poc %d) to bl\n",
10038 hevc->cur_pic ?
10039 hevc->cur_pic->POC :
10040 INVALID_POC);
10041 } else {
10042 hevc->switch_dvlayer_flag = 0;
10043 hevc->start_parser_type =
10044 next_parser_type;
10045 hevc->no_switch_dvlayer_count++;
10046 hevc_print(hevc, H265_DEBUG_DV,
10047 "%s: no_switch_dvlayer_count = %d\n",
10048 vdec->master ? "el" : "bl",
10049 hevc->no_switch_dvlayer_count);
10050 if (vdec->slave &&
10051 dolby_el_flush_th != 0 &&
10052 hevc->no_switch_dvlayer_count >
10053 dolby_el_flush_th) {
10054 struct hevc_state_s *hevc_el =
10055 (struct hevc_state_s *)
10056 vdec->slave->private;
10057 struct PIC_s *el_pic;
10058 check_pic_decoded_error(hevc_el,
10059 hevc_el->pic_decoded_lcu_idx);
10060 el_pic = get_pic_by_POC(hevc_el,
10061 hevc_el->curr_POC);
10062 hevc_el->curr_POC = INVALID_POC;
10063 hevc_el->m_pocRandomAccess = MAX_INT;
10064 flush_output(hevc_el, el_pic);
10065 hevc_el->decoded_poc = INVALID_POC; /*
10066 already call flush_output*/
10067 hevc_el->decoding_pic = NULL;
10068 hevc->no_switch_dvlayer_count = 0;
10069 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
10070 hevc_print(hevc, 0,
10071 "no el anymore, flush_output el\n");
10072 }
10073 }
10074 hevc->decoded_poc = hevc->curr_POC;
10075 hevc->decoding_pic = NULL;
10076 hevc->dec_result = DEC_RESULT_DONE;
10077 amhevc_stop();
10078 reset_process_time(hevc);
10079 if (aux_data_is_avaible(hevc))
10080 dolby_get_meta(hevc);
10081 if(hevc->cur_pic->slice_type == 2 &&
10082 hevc->vf_pre_count == 0) {
10083 hevc_print(hevc, 0,
10084 "first slice_type %x no_switch_dvlayer_count %x\n",
10085 hevc->cur_pic->slice_type,
10086 hevc->no_switch_dvlayer_count);
10087 goto force_output;
10088 }
10089 vdec_schedule_work(&hevc->work);
10090 }
10091
10092 return IRQ_HANDLED;
10093#endif
10094 }
10095
10096#endif
10097
10098 if (dec_status == HEVC_SEI_DAT) {
10099 if (!hevc->m_ins_flag) {
10100 int payload_type =
10101 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
10102 int payload_size =
10103 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
10104 process_nal_sei(hevc,
10105 payload_type, payload_size);
10106 }
10107 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
10108 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
10109 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
10110 int parse_type = HEVC_DISCARD_NAL;
10111
10112 hevc->error_watchdog_count = 0;
10113 hevc->error_skip_nal_wt_cnt = 0;
10114#ifdef MULTI_INSTANCE_SUPPORT
10115 if (hevc->m_ins_flag)
10116 reset_process_time(hevc);
10117#endif
10118 if (slice_parse_begin > 0 &&
10119 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
10120 hevc_print(hevc, 0,
10121 "nal type %d, discard %d\n", naltype,
10122 slice_parse_begin);
10123 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
10124 slice_parse_begin--;
10125 }
10126 if (naltype == NAL_UNIT_EOS) {
10127 struct PIC_s *pic;
10128
10129 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
10130#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10131 if ((vdec->master || vdec->slave) &&
10132 aux_data_is_avaible(hevc)) {
10133 if (hevc->decoding_pic)
10134 dolby_get_meta(hevc);
10135 }
10136#endif
10137 check_pic_decoded_error(hevc,
10138 hevc->pic_decoded_lcu_idx);
10139 pic = get_pic_by_POC(hevc, hevc->curr_POC);
10140 hevc->curr_POC = INVALID_POC;
10141 /* add to fix RAP_B_Bossen_1 */
10142 hevc->m_pocRandomAccess = MAX_INT;
10143 flush_output(hevc, pic);
10144 clear_poc_flag(hevc);
10145 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
10146 /* Interrupt Amrisc to excute */
10147 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10148#ifdef MULTI_INSTANCE_SUPPORT
10149 if (hevc->m_ins_flag) {
10150 hevc->decoded_poc = INVALID_POC; /*
10151 already call flush_output*/
10152 hevc->decoding_pic = NULL;
10153 hevc->dec_result = DEC_RESULT_DONE;
10154 amhevc_stop();
10155
10156 vdec_schedule_work(&hevc->work);
10157 }
10158#endif
10159 return IRQ_HANDLED;
10160 }
10161
10162 if (
10163#ifdef MULTI_INSTANCE_SUPPORT
10164 (!hevc->m_ins_flag) &&
10165#endif
10166 hevc->error_skip_nal_count > 0) {
10167 hevc_print(hevc, 0,
10168 "nal type %d, discard %d\n", naltype,
10169 hevc->error_skip_nal_count);
10170 hevc->error_skip_nal_count--;
10171 if (hevc->error_skip_nal_count == 0) {
10172 hevc_recover(hevc);
10173 hevc->error_flag = 0;
10174 if ((error_handle_policy & 0x2) == 0) {
10175 hevc->have_vps = 1;
10176 hevc->have_sps = 1;
10177 hevc->have_pps = 1;
10178 }
10179 return IRQ_HANDLED;
10180 }
10181 } else if (naltype == NAL_UNIT_VPS) {
10182 parse_type = HEVC_NAL_UNIT_VPS;
10183 hevc->have_vps = 1;
10184#ifdef ERROR_HANDLE_DEBUG
10185 if (dbg_nal_skip_flag & 1)
10186 parse_type = HEVC_DISCARD_NAL;
10187#endif
10188 } else if (hevc->have_vps) {
10189 if (naltype == NAL_UNIT_SPS) {
10190 parse_type = HEVC_NAL_UNIT_SPS;
10191 hevc->have_sps = 1;
10192#ifdef ERROR_HANDLE_DEBUG
10193 if (dbg_nal_skip_flag & 2)
10194 parse_type = HEVC_DISCARD_NAL;
10195#endif
10196 } else if (naltype == NAL_UNIT_PPS) {
10197 parse_type = HEVC_NAL_UNIT_PPS;
10198 hevc->have_pps = 1;
10199#ifdef ERROR_HANDLE_DEBUG
10200 if (dbg_nal_skip_flag & 4)
10201 parse_type = HEVC_DISCARD_NAL;
10202#endif
10203 } else if (hevc->have_sps && hevc->have_pps) {
10204 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10205
10206 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10207 (naltype ==
10208 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10209 || (naltype ==
10210 NAL_UNIT_CODED_SLICE_CRA)
10211 || (naltype ==
10212 NAL_UNIT_CODED_SLICE_BLA)
10213 || (naltype ==
10214 NAL_UNIT_CODED_SLICE_BLANT)
10215 || (naltype ==
10216 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10217 ) {
10218 if (slice_parse_begin > 0) {
10219 hevc_print(hevc, 0,
10220 "discard %d, for debugging\n",
10221 slice_parse_begin);
10222 slice_parse_begin--;
10223 } else {
10224 parse_type = seg;
10225 }
10226 hevc->have_valid_start_slice = 1;
10227 } else if (naltype <=
10228 NAL_UNIT_CODED_SLICE_CRA
10229 && (hevc->have_valid_start_slice
10230 || (hevc->PB_skip_mode != 3))) {
10231 if (slice_parse_begin > 0) {
10232 hevc_print(hevc, 0,
10233 "discard %d, dd\n",
10234 slice_parse_begin);
10235 slice_parse_begin--;
10236 } else
10237 parse_type = seg;
10238
10239 }
10240 }
10241 }
10242 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10243 && hevc->have_valid_start_slice &&
10244 hevc->error_flag == 0) {
10245 if ((get_dbg_flag(hevc) &
10246 H265_DEBUG_MAN_SEARCH_NAL) == 0
10247 /* && (!hevc->m_ins_flag)*/) {
10248 /* auot parser NAL; do not check
10249 *vps/sps/pps/idr
10250 */
10251 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10252 }
10253
10254 if ((get_dbg_flag(hevc) &
10255 H265_DEBUG_NO_EOS_SEARCH_DONE)
10256#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10257 || vdec->master
10258 || vdec->slave
10259#endif
10260 ) {
10261 WRITE_VREG(NAL_SEARCH_CTL,
10262 READ_VREG(NAL_SEARCH_CTL) |
10263 0x10000);
10264 }
10265 WRITE_VREG(NAL_SEARCH_CTL,
10266 READ_VREG(NAL_SEARCH_CTL)
10267 | ((parser_sei_enable & 0x7) << 17));
10268#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10269 WRITE_VREG(NAL_SEARCH_CTL,
10270 READ_VREG(NAL_SEARCH_CTL) |
10271 ((parser_dolby_vision_enable & 0x1) << 20));
10272#endif
10273 config_decode_mode(hevc);
10274 }
10275
10276 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10277 hevc_print(hevc, 0,
10278 "naltype = %d parse_type %d\n %d %d %d %d\n",
10279 naltype, parse_type, hevc->have_vps,
10280 hevc->have_sps, hevc->have_pps,
10281 hevc->have_valid_start_slice);
10282 }
10283
10284 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10285 /* Interrupt Amrisc to excute */
10286 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10287#ifdef MULTI_INSTANCE_SUPPORT
10288 if (hevc->m_ins_flag)
10289 start_process_time(hevc);
10290#endif
10291 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10292#ifdef MULTI_INSTANCE_SUPPORT
10293 if (hevc->m_ins_flag) {
10294 reset_process_time(hevc);
10295 read_decode_info(hevc);
10296
10297 }
10298#endif
10299 if (hevc->start_decoding_time > 0) {
10300 u32 process_time = 1000*
10301 (jiffies - hevc->start_decoding_time)/HZ;
10302 if (process_time > max_decoding_time)
10303 max_decoding_time = process_time;
10304 }
10305
10306 hevc->error_watchdog_count = 0;
10307 if (hevc->pic_list_init_flag == 2) {
10308 hevc->pic_list_init_flag = 3;
10309 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10310 } else if (hevc->wait_buf == 0) {
10311 u32 vui_time_scale;
10312 u32 vui_num_units_in_tick;
10313 unsigned char reconfig_flag = 0;
10314
10315 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10316 get_rpm_param(&hevc->param);
10317 else {
10318
10319 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10320 int ii;
10321
10322 for (ii = 0; ii < 4; ii++) {
10323 hevc->param.l.data[i + ii] =
10324 hevc->rpm_ptr[i + 3
10325 - ii];
10326 }
10327 }
10328#ifdef SEND_LMEM_WITH_RPM
10329 check_head_error(hevc);
10330#endif
10331 }
10332 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10333 hevc_print(hevc, 0,
10334 "rpm_param: (%d)\n", hevc->slice_idx);
10335 hevc->slice_idx++;
10336 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10337 hevc_print_cont(hevc, 0,
10338 "%04x ", hevc->param.l.data[i]);
10339 if (((i + 1) & 0xf) == 0)
10340 hevc_print_cont(hevc, 0, "\n");
10341 }
10342
10343 hevc_print(hevc, 0,
10344 "vui_timing_info: %x, %x, %x, %x\n",
10345 hevc->param.p.vui_num_units_in_tick_hi,
10346 hevc->param.p.vui_num_units_in_tick_lo,
10347 hevc->param.p.vui_time_scale_hi,
10348 hevc->param.p.vui_time_scale_lo);
10349 }
10350
10351 if (hevc->is_used_v4l) {
10352 struct aml_vcodec_ctx *ctx =
10353 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10354 if (!v4l_res_change(hevc, &hevc->param)) {
10355 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10356 struct aml_vdec_ps_infos ps;
10357 int width = hevc->param.p.pic_width_in_luma_samples;
10358 int height = hevc->param.p.pic_height_in_luma_samples;
10359
10360 pr_debug("set ucode parse\n");
10361 vh265_get_ps_info(hevc, width, height, &ps);
10362 /*notice the v4l2 codec.*/
10363 vdec_v4l_set_ps_infos(ctx, &ps);
10364 hevc->v4l_params_parsed = true;
10365 hevc->dec_result = DEC_RESULT_AGAIN;
10366 amhevc_stop();
10367 restore_decode_state(hevc);
10368 reset_process_time(hevc);
10369 vdec_schedule_work(&hevc->work);
10370 return IRQ_HANDLED;
10371 }
10372 }else {
10373 pr_debug("resolution change\n");
10374 hevc->dec_result = DEC_RESULT_AGAIN;
10375 amhevc_stop();
10376 restore_decode_state(hevc);
10377 reset_process_time(hevc);
10378 vdec_schedule_work(&hevc->work);
10379 return IRQ_HANDLED;
10380
10381 }
10382 }
10383
10384 if (
10385#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10386 vdec->master == NULL &&
10387 vdec->slave == NULL &&
10388#endif
10389 aux_data_is_avaible(hevc)
10390 ) {
10391
10392 if (get_dbg_flag(hevc) &
10393 H265_DEBUG_BUFMGR_MORE)
10394 dump_aux_buf(hevc);
10395 }
10396
10397 vui_time_scale =
10398 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10399 hevc->param.p.vui_time_scale_lo;
10400 vui_num_units_in_tick =
10401 (u32)(hevc->param.
10402 p.vui_num_units_in_tick_hi << 16) |
10403 hevc->param.
10404 p.vui_num_units_in_tick_lo;
10405 if (hevc->bit_depth_luma !=
10406 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10407 reconfig_flag = 1;
10408 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10409 (hevc->param.p.bit_depth & 0xf) + 8);
10410 }
10411 if (hevc->bit_depth_chroma !=
10412 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10413 reconfig_flag = 1;
10414 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10415 ((hevc->param.p.bit_depth >> 4) &
10416 0xf) + 8);
10417 }
10418 hevc->bit_depth_luma =
10419 (hevc->param.p.bit_depth & 0xf) + 8;
10420 hevc->bit_depth_chroma =
10421 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10422 bit_depth_luma = hevc->bit_depth_luma;
10423 bit_depth_chroma = hevc->bit_depth_chroma;
10424#ifdef SUPPORT_10BIT
10425 if (hevc->bit_depth_luma == 8 &&
10426 hevc->bit_depth_chroma == 8 &&
10427 enable_mem_saving)
10428 hevc->mem_saving_mode = 1;
10429 else
10430 hevc->mem_saving_mode = 0;
10431#endif
10432 if (reconfig_flag &&
10433 (get_double_write_mode(hevc) & 0x10) == 0)
10434 init_decode_head_hw(hevc);
10435
10436 if ((vui_time_scale != 0)
10437 && (vui_num_units_in_tick != 0)) {
10438 hevc->frame_dur =
10439 div_u64(96000ULL *
10440 vui_num_units_in_tick,
10441 vui_time_scale);
10442 if (hevc->get_frame_dur != true)
10443 vdec_schedule_work(
10444 &hevc->notify_work);
10445
10446 hevc->get_frame_dur = true;
10447 //hevc->gvs->frame_dur = hevc->frame_dur;
10448 }
10449
10450 if (hevc->video_signal_type !=
10451 ((hevc->param.p.video_signal_type << 16)
10452 | hevc->param.p.color_description)) {
10453 u32 v = hevc->param.p.video_signal_type;
10454 u32 c = hevc->param.p.color_description;
10455#if 0
10456 if (v & 0x2000) {
10457 hevc_print(hevc, 0,
10458 "video_signal_type present:\n");
10459 hevc_print(hevc, 0, " %s %s\n",
10460 video_format_names[(v >> 10) & 7],
10461 ((v >> 9) & 1) ?
10462 "full_range" : "limited");
10463 if (v & 0x100) {
10464 hevc_print(hevc, 0,
10465 " color_description present:\n");
10466 hevc_print(hevc, 0,
10467 " color_primarie = %s\n",
10468 color_primaries_names
10469 [v & 0xff]);
10470 hevc_print(hevc, 0,
10471 " transfer_characteristic = %s\n",
10472 transfer_characteristics_names
10473 [(c >> 8) & 0xff]);
10474 hevc_print(hevc, 0,
10475 " matrix_coefficient = %s\n",
10476 matrix_coeffs_names[c & 0xff]);
10477 }
10478 }
10479#endif
10480 hevc->video_signal_type = (v << 16) | c;
10481 video_signal_type = hevc->video_signal_type;
10482 }
10483
10484 if (use_cma &&
10485 (hevc->param.p.slice_segment_address == 0)
10486 && (hevc->pic_list_init_flag == 0)) {
10487 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10488 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10489
10490 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10491 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10492 hevc->lcu_size = 1 << (log + 3 + log_s);
10493 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10494 if (performance_profile &&( (!is_oversize(hevc->pic_w, hevc->pic_h)) && IS_8K_SIZE(hevc->pic_w,hevc->pic_h)))
10495 hevc->performance_profile = 1;
10496 else
10497 hevc->performance_profile = 0;
10498 hevc_print(hevc, 0, "hevc->performance_profile %d\n", hevc->performance_profile);
10499 if (hevc->pic_w == 0 || hevc->pic_h == 0
10500 || hevc->lcu_size == 0
10501 || is_oversize(hevc->pic_w, hevc->pic_h)
10502 || (!hevc->skip_first_nal &&
10503 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10504 /* skip search next start code */
10505 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10506 & (~0x2));
10507 if ( !hevc->skip_first_nal &&
10508 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10509 hevc->skip_first_nal = 1;
10510 hevc->skip_flag = 1;
10511 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10512 /* Interrupt Amrisc to excute */
10513 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10514#ifdef MULTI_INSTANCE_SUPPORT
10515 if (hevc->m_ins_flag)
10516 start_process_time(hevc);
10517#endif
10518 } else {
10519 hevc->sps_num_reorder_pics_0 =
10520 hevc->param.p.sps_num_reorder_pics_0;
10521 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 && !disable_ip_mode) ? true : false;
10522 hevc->pic_list_init_flag = 1;
10523 if ((!IS_4K_SIZE(hevc->pic_w, hevc->pic_h)) &&
10524 ((hevc->param.p.profile_etc & 0xc) == 0x4)
10525 && (interlace_enable != 0)) {
10526 hevc->double_write_mode = 1;
10527 hevc->interlace_flag = 1;
10528 hevc->frame_ar = (hevc->pic_h * 0x100 / hevc->pic_w) * 2;
10529 hevc_print(hevc, 0,
10530 "interlace (%d, %d), profile_etc %x, ar 0x%x, dw %d\n",
10531 hevc->pic_w, hevc->pic_h, hevc->param.p.profile_etc, hevc->frame_ar,
10532 get_double_write_mode(hevc));
10533 }
10534#ifdef MULTI_INSTANCE_SUPPORT
10535 if (hevc->m_ins_flag) {
10536 vdec_schedule_work(&hevc->work);
10537 } else
10538#endif
10539 up(&h265_sema);
10540 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10541 }
10542 return IRQ_HANDLED;
10543 }
10544
10545}
10546 ret =
10547 hevc_slice_segment_header_process(hevc,
10548 &hevc->param, decode_pic_begin);
10549 if (ret < 0) {
10550#ifdef MULTI_INSTANCE_SUPPORT
10551 if (hevc->m_ins_flag) {
10552 hevc->wait_buf = 0;
10553 hevc->dec_result = DEC_RESULT_AGAIN;
10554 amhevc_stop();
10555 restore_decode_state(hevc);
10556 reset_process_time(hevc);
10557 vdec_schedule_work(&hevc->work);
10558 return IRQ_HANDLED;
10559 }
10560#else
10561 ;
10562#endif
10563 } else if (ret == 0) {
10564 if ((hevc->new_pic) && (hevc->cur_pic)) {
10565 hevc->cur_pic->stream_offset =
10566 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10567 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10568 "read stream_offset = 0x%x\n",
10569 hevc->cur_pic->stream_offset);
10570 hevc->cur_pic->aspect_ratio_idc =
10571 hevc->param.p.aspect_ratio_idc;
10572 hevc->cur_pic->sar_width =
10573 hevc->param.p.sar_width;
10574 hevc->cur_pic->sar_height =
10575 hevc->param.p.sar_height;
10576 }
10577
10578 WRITE_VREG(HEVC_DEC_STATUS_REG,
10579 HEVC_CODED_SLICE_SEGMENT_DAT);
10580 /* Interrupt Amrisc to excute */
10581 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10582
10583 hevc->start_decoding_time = jiffies;
10584#ifdef MULTI_INSTANCE_SUPPORT
10585 if (hevc->m_ins_flag)
10586 start_process_time(hevc);
10587#endif
10588#if 1
10589 /*to do..., copy aux data to hevc->cur_pic*/
10590#endif
10591#ifdef MULTI_INSTANCE_SUPPORT
10592 } else if (hevc->m_ins_flag) {
10593 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10594 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10595 __func__, ret);
10596 hevc->decoded_poc = INVALID_POC;
10597 hevc->decoding_pic = NULL;
10598 hevc->dec_result = DEC_RESULT_DONE;
10599 amhevc_stop();
10600 reset_process_time(hevc);
10601 vdec_schedule_work(&hevc->work);
10602#endif
10603 } else {
10604 /* skip, search next start code */
10605 hevc->gvs->drop_frame_count++;
10606 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10607 hevc->skip_flag = 1;
10608 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10609 /* Interrupt Amrisc to excute */
10610 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10611 }
10612
10613 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10614 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10615#ifdef MULTI_INSTANCE_SUPPORT
10616 if (!hevc->m_ins_flag)
10617 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10618 H265_DEBUG_DIS_SYS_ERROR_PROC);
10619#endif
10620 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10621 }
10622 return IRQ_HANDLED;
10623}
10624
10625static void wait_hevc_search_done(struct hevc_state_s *hevc)
10626{
10627 int count = 0;
10628 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10629 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10630 msleep(20);
10631 count++;
10632 if (count > 100) {
10633 hevc_print(hevc, 0, "%s timeout\n", __func__);
10634 break;
10635 }
10636 }
10637}
10638static irqreturn_t vh265_isr(int irq, void *data)
10639{
10640 int i, temp;
10641 unsigned int dec_status;
10642 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10643 u32 debug_tag;
10644 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10645
10646 if (hevc->init_flag == 0)
10647 return IRQ_HANDLED;
10648 hevc->dec_status = dec_status;
10649 if (is_log_enable(hevc))
10650 add_log(hevc,
10651 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10652 dec_status, READ_HREG(HEVC_DECODE_INFO),
10653 READ_VREG(HEVC_MPRED_CURR_LCU),
10654 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10655 READ_VREG(HEVC_SHIFT_STATUS));
10656
10657 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10658 hevc_print(hevc, 0,
10659 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10660 dec_status, READ_HREG(HEVC_DECODE_INFO),
10661 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10662 READ_VREG(HEVC_SHIFT_STATUS));
10663
10664 debug_tag = READ_HREG(DEBUG_REG1);
10665 if (debug_tag & 0x10000) {
10666 hevc_print(hevc, 0,
10667 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10668
10669 if (hevc->mmu_enable)
10670 temp = 0x500;
10671 else
10672 temp = 0x400;
10673 for (i = 0; i < temp; i += 4) {
10674 int ii;
10675 if ((i & 0xf) == 0)
10676 hevc_print_cont(hevc, 0, "%03x: ", i);
10677 for (ii = 0; ii < 4; ii++) {
10678 hevc_print_cont(hevc, 0, "%04x ",
10679 hevc->lmem_ptr[i + 3 - ii]);
10680 }
10681 if (((i + ii) & 0xf) == 0)
10682 hevc_print_cont(hevc, 0, "\n");
10683 }
10684
10685 if (((udebug_pause_pos & 0xffff)
10686 == (debug_tag & 0xffff)) &&
10687 (udebug_pause_decode_idx == 0 ||
10688 udebug_pause_decode_idx == hevc->decode_idx) &&
10689 (udebug_pause_val == 0 ||
10690 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10691 udebug_pause_pos &= 0xffff;
10692 hevc->ucode_pause_pos = udebug_pause_pos;
10693 }
10694 else if (debug_tag & 0x20000)
10695 hevc->ucode_pause_pos = 0xffffffff;
10696 if (hevc->ucode_pause_pos)
10697 reset_process_time(hevc);
10698 else
10699 WRITE_HREG(DEBUG_REG1, 0);
10700 } else if (debug_tag != 0) {
10701 hevc_print(hevc, 0,
10702 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10703 READ_HREG(DEBUG_REG2),
10704 READ_VREG(HEVC_STREAM_LEVEL),
10705 READ_VREG(HEVC_STREAM_WR_PTR),
10706 READ_VREG(HEVC_STREAM_RD_PTR));
10707 if (((udebug_pause_pos & 0xffff)
10708 == (debug_tag & 0xffff)) &&
10709 (udebug_pause_decode_idx == 0 ||
10710 udebug_pause_decode_idx == hevc->decode_idx) &&
10711 (udebug_pause_val == 0 ||
10712 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10713 udebug_pause_pos &= 0xffff;
10714 hevc->ucode_pause_pos = udebug_pause_pos;
10715 }
10716 if (hevc->ucode_pause_pos)
10717 reset_process_time(hevc);
10718 else
10719 WRITE_HREG(DEBUG_REG1, 0);
10720 return IRQ_HANDLED;
10721 }
10722
10723
10724 if (hevc->pic_list_init_flag == 1)
10725 return IRQ_HANDLED;
10726
10727 if (!hevc->m_ins_flag) {
10728 if (dec_status == HEVC_OVER_DECODE) {
10729 hevc->over_decode = 1;
10730 hevc_print(hevc, 0,
10731 "isr: over decode\n"),
10732 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10733 return IRQ_HANDLED;
10734 }
10735 }
10736
10737 return IRQ_WAKE_THREAD;
10738
10739}
10740
10741static void vh265_set_clk(struct work_struct *work)
10742{
10743 struct hevc_state_s *hevc = container_of(work,
10744 struct hevc_state_s, set_clk_work);
10745
10746 int fps = 96000 / hevc->frame_dur;
10747
10748 if (hevc_source_changed(VFORMAT_HEVC,
10749 hevc->frame_width, hevc->frame_height, fps) > 0)
10750 hevc->saved_resolution = hevc->frame_width *
10751 hevc->frame_height * fps;
10752}
10753
10754static void vh265_check_timer_func(unsigned long arg)
10755{
10756 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10757 struct timer_list *timer = &hevc->timer;
10758 unsigned char empty_flag;
10759 unsigned int buf_level;
10760
10761 enum receviver_start_e state = RECEIVER_INACTIVE;
10762
10763 if (hevc->init_flag == 0) {
10764 if (hevc->stat & STAT_TIMER_ARM) {
10765 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10766 }
10767 return;
10768 }
10769#ifdef MULTI_INSTANCE_SUPPORT
10770 if (hevc->m_ins_flag &&
10771 (get_dbg_flag(hevc) &
10772 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10773 hw_to_vdec(hevc)->next_status ==
10774 VDEC_STATUS_DISCONNECTED &&
10775 !hevc->is_used_v4l) {
10776 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10777 vdec_schedule_work(&hevc->work);
10778 hevc_print(hevc,
10779 0, "vdec requested to be disconnected\n");
10780 return;
10781 }
10782
10783 if (hevc->m_ins_flag) {
10784 if (((get_dbg_flag(hevc) &
10785 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10786 (decode_timeout_val > 0) &&
10787 (hevc->start_process_time > 0) &&
10788 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10789 > decode_timeout_val)
10790 ) {
10791 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10792 int current_lcu_idx =
10793 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10794 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10795 if (hevc->last_lcu_idx == current_lcu_idx) {
10796 if (hevc->decode_timeout_count > 0)
10797 hevc->decode_timeout_count--;
10798 if (hevc->decode_timeout_count == 0)
10799 timeout_process(hevc);
10800 } else
10801 restart_process_time(hevc);
10802 hevc->last_lcu_idx = current_lcu_idx;
10803 } else {
10804 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10805 timeout_process(hevc);
10806 }
10807 }
10808 } else {
10809#endif
10810 if (hevc->m_ins_flag == 0 &&
10811 vf_get_receiver(hevc->provider_name)) {
10812 state =
10813 vf_notify_receiver(hevc->provider_name,
10814 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10815 NULL);
10816 if ((state == RECEIVER_STATE_NULL)
10817 || (state == RECEIVER_STATE_NONE))
10818 state = RECEIVER_INACTIVE;
10819 } else
10820 state = RECEIVER_INACTIVE;
10821
10822 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10823 /* error watchdog */
10824 if (hevc->m_ins_flag == 0 &&
10825 (empty_flag == 0)
10826 && (hevc->pic_list_init_flag == 0
10827 || hevc->pic_list_init_flag
10828 == 3)) {
10829 /* decoder has input */
10830 if ((get_dbg_flag(hevc) &
10831 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10832
10833 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10834 /* receiver has no buffer to recycle */
10835 if ((state == RECEIVER_INACTIVE) &&
10836 (kfifo_is_empty(&hevc->display_q) &&
10837 buf_level > 0x200)
10838 ) {
10839 if (hevc->error_flag == 0) {
10840 hevc->error_watchdog_count++;
10841 if (hevc->error_watchdog_count ==
10842 error_handle_threshold) {
10843 hevc_print(hevc, 0,
10844 "H265 dec err local reset.\n");
10845 hevc->error_flag = 1;
10846 hevc->error_watchdog_count = 0;
10847 hevc->error_skip_nal_wt_cnt = 0;
10848 hevc->
10849 error_system_watchdog_count++;
10850 WRITE_VREG
10851 (HEVC_ASSIST_MBOX0_IRQ_REG,
10852 0x1);
10853 }
10854 } else if (hevc->error_flag == 2) {
10855 int th =
10856 error_handle_nal_skip_threshold;
10857 hevc->error_skip_nal_wt_cnt++;
10858 if (hevc->error_skip_nal_wt_cnt
10859 == th) {
10860 hevc->error_flag = 3;
10861 hevc->error_watchdog_count = 0;
10862 hevc->
10863 error_skip_nal_wt_cnt = 0;
10864 WRITE_VREG
10865 (HEVC_ASSIST_MBOX0_IRQ_REG,
10866 0x1);
10867 }
10868 }
10869 }
10870 }
10871
10872 if ((get_dbg_flag(hevc)
10873 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10874 /* receiver has no buffer to recycle */
10875 if ((state == RECEIVER_INACTIVE) &&
10876 (kfifo_is_empty(&hevc->display_q))
10877 ) { /* no buffer to recycle */
10878 if ((get_dbg_flag(hevc) &
10879 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10880 0)
10881 hevc->error_system_watchdog_count++;
10882 if (hevc->error_system_watchdog_count ==
10883 error_handle_system_threshold) {
10884 /* and it lasts for a while */
10885 hevc_print(hevc, 0,
10886 "H265 dec fatal error watchdog.\n");
10887 hevc->
10888 error_system_watchdog_count = 0;
10889 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10890 }
10891 }
10892 } else {
10893 hevc->error_watchdog_count = 0;
10894 hevc->error_system_watchdog_count = 0;
10895 }
10896#ifdef MULTI_INSTANCE_SUPPORT
10897 }
10898#endif
10899 if ((hevc->ucode_pause_pos != 0) &&
10900 (hevc->ucode_pause_pos != 0xffffffff) &&
10901 udebug_pause_pos != hevc->ucode_pause_pos) {
10902 hevc->ucode_pause_pos = 0;
10903 WRITE_HREG(DEBUG_REG1, 0);
10904 }
10905
10906 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10907 dump_pic_list(hevc);
10908 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10909 }
10910 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10911 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10912 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10913 }
10914#ifdef TEST_NO_BUF
10915 if (hevc->wait_buf)
10916 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10917#endif
10918 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10919 hevc->error_skip_nal_count = error_skip_nal_count;
10920 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10921
10922 debug &= ~H265_DEBUG_HW_RESET;
10923 }
10924
10925#ifdef ERROR_HANDLE_DEBUG
10926 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10927 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10928 dbg_nal_skip_count &= ~0x10000;
10929 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10930 }
10931#endif
10932
10933 if (radr != 0) {
10934 if (rval != 0) {
10935 WRITE_VREG(radr, rval);
10936 hevc_print(hevc, 0,
10937 "WRITE_VREG(%x,%x)\n", radr, rval);
10938 } else
10939 hevc_print(hevc, 0,
10940 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10941 rval = 0;
10942 radr = 0;
10943 }
10944 if (dbg_cmd != 0) {
10945 if (dbg_cmd == 1) {
10946 u32 disp_laddr;
10947
10948 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10949 get_double_write_mode(hevc) == 0) {
10950 disp_laddr =
10951 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10952 } else {
10953 struct canvas_s cur_canvas;
10954
10955 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10956 & 0xff), &cur_canvas);
10957 disp_laddr = cur_canvas.addr;
10958 }
10959 hevc_print(hevc, 0,
10960 "current displayed buffer address %x\r\n",
10961 disp_laddr);
10962 }
10963 dbg_cmd = 0;
10964 }
10965 /*don't changed at start.*/
10966 if (hevc->m_ins_flag == 0 &&
10967 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10968 hevc->frame_dur > 0 && hevc->saved_resolution !=
10969 hevc->frame_width * hevc->frame_height *
10970 (96000 / hevc->frame_dur))
10971 vdec_schedule_work(&hevc->set_clk_work);
10972
10973 mod_timer(timer, jiffies + PUT_INTERVAL);
10974}
10975
10976static int h265_task_handle(void *data)
10977{
10978 int ret = 0;
10979 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10980
10981 set_user_nice(current, -10);
10982 while (1) {
10983 if (use_cma == 0) {
10984 hevc_print(hevc, 0,
10985 "ERROR: use_cma can not be changed dynamically\n");
10986 }
10987 ret = down_interruptible(&h265_sema);
10988 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10989 init_pic_list(hevc);
10990 init_pic_list_hw(hevc);
10991 init_buf_spec(hevc);
10992 hevc->pic_list_init_flag = 2;
10993 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10994
10995 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10996
10997 }
10998
10999 if (hevc->uninit_list) {
11000 /*USE_BUF_BLOCK*/
11001 uninit_pic_list(hevc);
11002 hevc_print(hevc, 0, "uninit list\n");
11003 hevc->uninit_list = 0;
11004#ifdef USE_UNINIT_SEMA
11005 if (use_cma) {
11006 up(&hevc->h265_uninit_done_sema);
11007 while (!kthread_should_stop())
11008 msleep(1);
11009 break;
11010 }
11011#endif
11012 }
11013 }
11014
11015 return 0;
11016}
11017
11018void vh265_free_cmabuf(void)
11019{
11020 struct hevc_state_s *hevc = gHevc;
11021
11022 mutex_lock(&vh265_mutex);
11023
11024 if (hevc->init_flag) {
11025 mutex_unlock(&vh265_mutex);
11026 return;
11027 }
11028
11029 mutex_unlock(&vh265_mutex);
11030}
11031
11032#ifdef MULTI_INSTANCE_SUPPORT
11033int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
11034#else
11035int vh265_dec_status(struct vdec_info *vstatus)
11036#endif
11037{
11038#ifdef MULTI_INSTANCE_SUPPORT
11039 struct hevc_state_s *hevc =
11040 (struct hevc_state_s *)vdec->private;
11041#else
11042 struct hevc_state_s *hevc = gHevc;
11043#endif
11044 if (!hevc)
11045 return -1;
11046
11047 vstatus->frame_width = hevc->frame_width;
11048 /* for hevc interlace for disp height x2 */
11049 vstatus->frame_height =
11050 (hevc->frame_height << hevc->interlace_flag);
11051 if (hevc->frame_dur != 0)
11052 vstatus->frame_rate = 96000 / hevc->frame_dur;
11053 else
11054 vstatus->frame_rate = -1;
11055 vstatus->error_count = hevc->gvs->error_frame_count;
11056 vstatus->status = hevc->stat | hevc->fatal_error;
11057 vstatus->bit_rate = hevc->gvs->bit_rate;
11058 vstatus->frame_dur = hevc->frame_dur;
11059 if (hevc->gvs) {
11060 vstatus->bit_rate = hevc->gvs->bit_rate;
11061 vstatus->frame_data = hevc->gvs->frame_data;
11062 vstatus->total_data = hevc->gvs->total_data;
11063 vstatus->frame_count = hevc->gvs->frame_count;
11064 vstatus->error_frame_count = hevc->gvs->error_frame_count;
11065 vstatus->drop_frame_count = hevc->gvs->drop_frame_count;
11066 vstatus->samp_cnt = hevc->gvs->samp_cnt;
11067 vstatus->offset = hevc->gvs->offset;
11068 }
11069
11070 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
11071 "%s", DRIVER_NAME);
11072 vstatus->ratio_control = hevc->ratio_control;
11073 return 0;
11074}
11075
11076int vh265_set_isreset(struct vdec_s *vdec, int isreset)
11077{
11078 is_reset = isreset;
11079 return 0;
11080}
11081
11082static int vh265_vdec_info_init(struct hevc_state_s *hevc)
11083{
11084 hevc->gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
11085 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11086 if (NULL == hevc->gvs) {
11087 pr_info("the struct of vdec status malloc failed.\n");
11088 return -ENOMEM;
11089 }
11090 vdec_set_vframe_comm(hw_to_vdec(hevc), DRIVER_NAME);
11091 return 0;
11092}
11093
11094#if 0
11095static void H265_DECODE_INIT(void)
11096{
11097 /* enable hevc clocks */
11098 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
11099 /* *************************************************************** */
11100 /* Power ON HEVC */
11101 /* *************************************************************** */
11102 /* Powerup HEVC */
11103 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
11104 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
11105 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
11106 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
11107 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
11108 /* remove isolations */
11109 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
11110 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
11111
11112}
11113#endif
11114
11115int vh265_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
11116{
11117 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11118 hevc_print(hevc, 0, "[%s %d] trickmode:%lu\n", __func__, __LINE__, trickmode);
11119
11120 if (trickmode == TRICKMODE_I) {
11121 trickmode_i = 1;
11122 i_only_flag = 0x1;
11123 } else if (trickmode == TRICKMODE_NONE) {
11124 trickmode_i = 0;
11125 i_only_flag = 0x0;
11126 } else if (trickmode == 0x02) {
11127 trickmode_i = 0;
11128 i_only_flag = 0x02;
11129 } else if (trickmode == 0x03) {
11130 trickmode_i = 1;
11131 i_only_flag = 0x03;
11132 } else if (trickmode == 0x07) {
11133 trickmode_i = 1;
11134 i_only_flag = 0x07;
11135 }
11136 //hevc_print(hevc, 0, "i_only_flag: %d trickmode_i:%d\n", i_only_flag, trickmode_i);
11137
11138 return 0;
11139}
11140
11141static void config_decode_mode(struct hevc_state_s *hevc)
11142{
11143#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11144 struct vdec_s *vdec = hw_to_vdec(hevc);
11145#endif
11146 unsigned decode_mode;
11147#ifdef HEVC_8K_LFTOFFSET_FIX
11148 if (hevc->performance_profile)
11149 WRITE_VREG(NAL_SEARCH_CTL,
11150 READ_VREG(NAL_SEARCH_CTL) | (1 << 21));
11151#endif
11152 if (!hevc->m_ins_flag)
11153 decode_mode = DECODE_MODE_SINGLE;
11154 else if (vdec_frame_based(hw_to_vdec(hevc)))
11155 decode_mode =
11156 DECODE_MODE_MULTI_FRAMEBASE;
11157#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11158 else if (vdec->slave) {
11159 if (force_bypass_dvenl & 0x80000000)
11160 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
11161 else
11162 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
11163 if (dolby_meta_with_el && hevc->bypass_dvenl) {
11164 hevc->bypass_dvenl = 0;
11165 hevc_print(hevc, 0,
11166 "NOT support bypass_dvenl when meta_with_el\n");
11167 }
11168 if (hevc->bypass_dvenl)
11169 decode_mode =
11170 (hevc->start_parser_type << 8)
11171 | DECODE_MODE_MULTI_STREAMBASE;
11172 else
11173 decode_mode =
11174 (hevc->start_parser_type << 8)
11175 | DECODE_MODE_MULTI_DVBAL;
11176 } else if (vdec->master)
11177 decode_mode =
11178 (hevc->start_parser_type << 8)
11179 | DECODE_MODE_MULTI_DVENL;
11180#endif
11181 else
11182 decode_mode =
11183 DECODE_MODE_MULTI_STREAMBASE;
11184
11185 if (hevc->m_ins_flag)
11186 decode_mode |=
11187 (hevc->start_decoding_flag << 16);
11188 /* set MBX0 interrupt flag */
11189 decode_mode |= (0x80 << 24);
11190 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
11191 WRITE_VREG(HEVC_DECODE_MODE2,
11192 hevc->rps_set_id);
11193}
11194
11195static void vh265_prot_init(struct hevc_state_s *hevc)
11196{
11197#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11198 struct vdec_s *vdec = hw_to_vdec(hevc);
11199#endif
11200 /* H265_DECODE_INIT(); */
11201
11202 hevc_config_work_space_hw(hevc);
11203
11204 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
11205
11206 WRITE_VREG(HEVC_WAIT_FLAG, 1);
11207
11208 /* WRITE_VREG(P_HEVC_MPSR, 1); */
11209
11210 /* clear mailbox interrupt */
11211 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
11212
11213 /* enable mailbox interrupt */
11214 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
11215
11216 /* disable PSCALE for hardware sharing */
11217 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
11218
11219 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
11220
11221 if ((get_dbg_flag(hevc) &
11222 (H265_DEBUG_MAN_SKIP_NAL |
11223 H265_DEBUG_MAN_SEARCH_NAL))
11224 /*||hevc->m_ins_flag*/
11225 ) {
11226 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
11227 } else {
11228 /* check vps/sps/pps/i-slice in ucode */
11229 unsigned ctl_val = 0x8;
11230 if (hevc->PB_skip_mode == 0)
11231 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
11232 else if (hevc->PB_skip_mode == 3)
11233 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
11234 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
11235 }
11236 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
11237#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11238 || vdec->master
11239 || vdec->slave
11240#endif
11241 )
11242 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
11243
11244 WRITE_VREG(NAL_SEARCH_CTL,
11245 READ_VREG(NAL_SEARCH_CTL)
11246 | ((parser_sei_enable & 0x7) << 17));
11247#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11248 WRITE_VREG(NAL_SEARCH_CTL,
11249 READ_VREG(NAL_SEARCH_CTL) |
11250 ((parser_dolby_vision_enable & 0x1) << 20));
11251#endif
11252 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
11253
11254 config_decode_mode(hevc);
11255 config_aux_buf(hevc);
11256#ifdef SWAP_HEVC_UCODE
11257 if (!tee_enabled() && hevc->is_swap &&
11258 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11259 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11260 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11261 }
11262#endif
11263#ifdef DETREFILL_ENABLE
11264 if (hevc->is_swap &&
11265 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11266 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11267 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11268 }
11269#endif
11270}
11271
11272static int vh265_local_init(struct hevc_state_s *hevc)
11273{
11274 int i;
11275 int ret = -1;
11276
11277#ifdef DEBUG_PTS
11278 hevc->pts_missed = 0;
11279 hevc->pts_hit = 0;
11280#endif
11281
11282 hevc->saved_resolution = 0;
11283 hevc->get_frame_dur = false;
11284 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11285 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11286 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11287 pr_info("over size : %u x %u.\n",
11288 hevc->frame_width, hevc->frame_height);
11289 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11290 return ret;
11291 }
11292
11293 if (hevc->max_pic_w && hevc->max_pic_h) {
11294 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11295 ((hevc->max_pic_w * hevc->max_pic_h) >
11296 1920 * 1088) ? true : false;
11297 } else {
11298 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11299 ((hevc->frame_width * hevc->frame_height) >
11300 1920 * 1088) ? true : false;
11301 }
11302
11303 hevc->frame_dur =
11304 (hevc->vh265_amstream_dec_info.rate ==
11305 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11306 //hevc->gvs->frame_dur = hevc->frame_dur;
11307 if (hevc->frame_width && hevc->frame_height)
11308 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11309
11310 if (i_only_flag)
11311 hevc->i_only = i_only_flag & 0xff;
11312 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11313 & 0x08)
11314 hevc->i_only = 0x7;
11315 else
11316 hevc->i_only = 0x0;
11317 hevc->error_watchdog_count = 0;
11318 hevc->sei_present_flag = 0;
11319 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11320 & 0x40) >> 6;
11321 hevc_print(hevc, 0,
11322 "h265:pts_unstable=%d\n", pts_unstable);
11323/*
11324 *TODO:FOR VERSION
11325 */
11326 hevc_print(hevc, 0,
11327 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11328 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11329
11330 if (hevc->frame_dur == 0)
11331 hevc->frame_dur = 96000 / 24;
11332
11333 INIT_KFIFO(hevc->display_q);
11334 INIT_KFIFO(hevc->newframe_q);
11335 INIT_KFIFO(hevc->pending_q);
11336
11337 for (i = 0; i < VF_POOL_SIZE; i++) {
11338 const struct vframe_s *vf = &hevc->vfpool[i];
11339
11340 hevc->vfpool[i].index = -1;
11341 kfifo_put(&hevc->newframe_q, vf);
11342 }
11343
11344
11345 ret = hevc_local_init(hevc);
11346
11347 return ret;
11348}
11349#ifdef MULTI_INSTANCE_SUPPORT
11350static s32 vh265_init(struct vdec_s *vdec)
11351{
11352 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11353#else
11354static s32 vh265_init(struct hevc_state_s *hevc)
11355{
11356
11357#endif
11358 int ret, size = -1;
11359 int fw_size = 0x1000 * 16;
11360 struct firmware_s *fw = NULL;
11361
11362 init_timer(&hevc->timer);
11363
11364 hevc->stat |= STAT_TIMER_INIT;
11365
11366 if (hevc->m_ins_flag) {
11367#ifdef USE_UNINIT_SEMA
11368 sema_init(&hevc->h265_uninit_done_sema, 0);
11369#endif
11370 INIT_WORK(&hevc->work, vh265_work);
11371 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11372 }
11373
11374 if (vh265_local_init(hevc) < 0)
11375 return -EBUSY;
11376
11377 mutex_init(&hevc->chunks_mutex);
11378 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11379 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11380
11381 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11382 if (IS_ERR_OR_NULL(fw))
11383 return -ENOMEM;
11384
11385 if (hevc->mmu_enable)
11386 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11387 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11388 else {
11389 if (!hevc->is_4k) {
11390 /* if an older version of the fw was loaded, */
11391 /* needs try to load noswap fw because the */
11392 /* old fw package dose not contain the swap fw.*/
11393 size = get_firmware_data(
11394 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11395 if (size < 0)
11396 size = get_firmware_data(
11397 VIDEO_DEC_HEVC_MMU, fw->data);
11398 else if (size)
11399 hevc->is_swap = true;
11400 } else
11401 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11402 fw->data);
11403 }
11404 else
11405 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11406
11407 if (size < 0) {
11408 pr_err("get firmware fail.\n");
11409 vfree(fw);
11410 return -1;
11411 }
11412
11413 fw->len = size;
11414
11415#ifdef SWAP_HEVC_UCODE
11416 if (!tee_enabled() && hevc->is_swap &&
11417 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11418 if (hevc->mmu_enable) {
11419 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11420 hevc->mc_cpu_addr =
11421 dma_alloc_coherent(amports_get_dma_device(),
11422 hevc->swap_size,
11423 &hevc->mc_dma_handle, GFP_KERNEL);
11424 if (!hevc->mc_cpu_addr) {
11425 amhevc_disable();
11426 pr_info("vh265 mmu swap ucode loaded fail.\n");
11427 return -ENOMEM;
11428 }
11429
11430 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11431 hevc->swap_size);
11432
11433 hevc_print(hevc, 0,
11434 "vh265 mmu ucode swap loaded %x\n",
11435 hevc->mc_dma_handle);
11436 }
11437 }
11438#endif
11439
11440#ifdef MULTI_INSTANCE_SUPPORT
11441 if (hevc->m_ins_flag) {
11442 hevc->timer.data = (ulong) hevc;
11443 hevc->timer.function = vh265_check_timer_func;
11444 hevc->timer.expires = jiffies + PUT_INTERVAL;
11445
11446 hevc->fw = fw;
11447 hevc->init_flag = 1;
11448
11449 return 0;
11450 }
11451#endif
11452 amhevc_enable();
11453
11454 if (hevc->mmu_enable)
11455 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11456 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11457 else {
11458 if (!hevc->is_4k) {
11459 /* if an older version of the fw was loaded, */
11460 /* needs try to load noswap fw because the */
11461 /* old fw package dose not contain the swap fw. */
11462 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11463 "hevc_mmu_swap", fw->data);
11464 if (ret < 0)
11465 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11466 "h265_mmu", fw->data);
11467 else
11468 hevc->is_swap = true;
11469 } else
11470 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11471 "h265_mmu", fw->data);
11472 }
11473 else
11474 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11475
11476 if (ret < 0) {
11477 amhevc_disable();
11478 vfree(fw);
11479 pr_err("H265: the %s fw loading failed, err: %x\n",
11480 tee_enabled() ? "TEE" : "local", ret);
11481 return -EBUSY;
11482 }
11483
11484 vfree(fw);
11485
11486 hevc->stat |= STAT_MC_LOAD;
11487
11488#ifdef DETREFILL_ENABLE
11489 if (hevc->is_swap &&
11490 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11491 init_detrefill_buf(hevc);
11492#endif
11493 /* enable AMRISC side protocol */
11494 vh265_prot_init(hevc);
11495
11496 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11497 vh265_isr_thread_fn,
11498 IRQF_ONESHOT,/*run thread on this irq disabled*/
11499 "vh265-irq", (void *)hevc)) {
11500 hevc_print(hevc, 0, "vh265 irq register error.\n");
11501 amhevc_disable();
11502 return -ENOENT;
11503 }
11504
11505 hevc->stat |= STAT_ISR_REG;
11506 hevc->provider_name = PROVIDER_NAME;
11507
11508#ifdef MULTI_INSTANCE_SUPPORT
11509 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11510 &vh265_vf_provider, vdec);
11511 vf_reg_provider(&vh265_vf_prov);
11512 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11513 NULL);
11514 if (hevc->frame_dur != 0) {
11515 if (!is_reset) {
11516 vf_notify_receiver(hevc->provider_name,
11517 VFRAME_EVENT_PROVIDER_FR_HINT,
11518 (void *)
11519 ((unsigned long)hevc->frame_dur));
11520 fr_hint_status = VDEC_HINTED;
11521 }
11522 } else
11523 fr_hint_status = VDEC_NEED_HINT;
11524#else
11525 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11526 hevc);
11527 vf_reg_provider(&vh265_vf_prov);
11528 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11529 if (hevc->frame_dur != 0) {
11530 vf_notify_receiver(PROVIDER_NAME,
11531 VFRAME_EVENT_PROVIDER_FR_HINT,
11532 (void *)
11533 ((unsigned long)hevc->frame_dur));
11534 fr_hint_status = VDEC_HINTED;
11535 } else
11536 fr_hint_status = VDEC_NEED_HINT;
11537#endif
11538 hevc->stat |= STAT_VF_HOOK;
11539
11540 hevc->timer.data = (ulong) hevc;
11541 hevc->timer.function = vh265_check_timer_func;
11542 hevc->timer.expires = jiffies + PUT_INTERVAL;
11543
11544 add_timer(&hevc->timer);
11545
11546 hevc->stat |= STAT_TIMER_ARM;
11547
11548 if (use_cma) {
11549#ifdef USE_UNINIT_SEMA
11550 sema_init(&hevc->h265_uninit_done_sema, 0);
11551#endif
11552 if (h265_task == NULL) {
11553 sema_init(&h265_sema, 1);
11554 h265_task =
11555 kthread_run(h265_task_handle, hevc,
11556 "kthread_h265");
11557 }
11558 }
11559 /* hevc->stat |= STAT_KTHREAD; */
11560#if 0
11561 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11562 hevc_print(hevc, 0, "%s force clk\n", __func__);
11563 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11564 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11565 ((1 << 2) | (1 << 1)));
11566 WRITE_VREG(HEVC_DBLK_CFG0,
11567 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11568 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11569 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11570 (1 << 2)); /* 2 */
11571 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11572 (1 << 24)); /* 24 */
11573 WRITE_VREG(HEVC_STREAM_CONTROL,
11574 READ_VREG(HEVC_STREAM_CONTROL) |
11575 (1 << 15)); /* 15 */
11576 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11577 (1 << 13)); /* 13 */
11578 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11579 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11580 (1 << 15)); /* 15 */
11581 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11582 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11583 (1 << 15)); /* 15 */
11584 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11585 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11586 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11587 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11588 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11589 (1 << 3)); /* 3 */
11590 }
11591#endif
11592#ifdef SWAP_HEVC_UCODE
11593 if (!tee_enabled() && hevc->is_swap &&
11594 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11595 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11596 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11597 }
11598#endif
11599
11600#ifndef MULTI_INSTANCE_SUPPORT
11601 set_vdec_func(&vh265_dec_status);
11602#endif
11603 amhevc_start();
11604 hevc->stat |= STAT_VDEC_RUN;
11605 hevc->init_flag = 1;
11606 error_handle_threshold = 30;
11607 /* pr_info("%d, vh265_init, RP=0x%x\n",
11608 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11609 */
11610
11611 return 0;
11612}
11613
11614static int vh265_stop(struct hevc_state_s *hevc)
11615{
11616 if (get_dbg_flag(hevc) &
11617 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11618 int wait_timeout_count = 0;
11619
11620 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11621 HEVC_CODED_SLICE_SEGMENT_DAT &&
11622 wait_timeout_count < 10){
11623 wait_timeout_count++;
11624 msleep(20);
11625 }
11626 }
11627 if (hevc->stat & STAT_VDEC_RUN) {
11628 amhevc_stop();
11629 hevc->stat &= ~STAT_VDEC_RUN;
11630 }
11631
11632 if (hevc->stat & STAT_ISR_REG) {
11633#ifdef MULTI_INSTANCE_SUPPORT
11634 if (!hevc->m_ins_flag)
11635#endif
11636 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11637 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11638 hevc->stat &= ~STAT_ISR_REG;
11639 }
11640
11641 hevc->stat &= ~STAT_TIMER_INIT;
11642 if (hevc->stat & STAT_TIMER_ARM) {
11643 del_timer_sync(&hevc->timer);
11644 hevc->stat &= ~STAT_TIMER_ARM;
11645 }
11646
11647 if (hevc->stat & STAT_VF_HOOK) {
11648 if (fr_hint_status == VDEC_HINTED) {
11649 vf_notify_receiver(hevc->provider_name,
11650 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11651 NULL);
11652 }
11653 fr_hint_status = VDEC_NO_NEED_HINT;
11654 vf_unreg_provider(&vh265_vf_prov);
11655 hevc->stat &= ~STAT_VF_HOOK;
11656 }
11657
11658 hevc_local_uninit(hevc);
11659
11660 if (use_cma) {
11661 hevc->uninit_list = 1;
11662 up(&h265_sema);
11663#ifdef USE_UNINIT_SEMA
11664 down(&hevc->h265_uninit_done_sema);
11665 if (!IS_ERR(h265_task)) {
11666 kthread_stop(h265_task);
11667 h265_task = NULL;
11668 }
11669#else
11670 while (hevc->uninit_list) /* wait uninit complete */
11671 msleep(20);
11672#endif
11673
11674 }
11675 hevc->init_flag = 0;
11676 hevc->first_sc_checked = 0;
11677 cancel_work_sync(&hevc->notify_work);
11678 cancel_work_sync(&hevc->set_clk_work);
11679 uninit_mmu_buffers(hevc);
11680 amhevc_disable();
11681
11682 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11683 if (hevc->gvs)
11684 kfree(hevc->gvs);
11685 hevc->gvs = NULL;
11686
11687 return 0;
11688}
11689
11690#ifdef MULTI_INSTANCE_SUPPORT
11691static void reset_process_time(struct hevc_state_s *hevc)
11692{
11693 if (hevc->start_process_time) {
11694 unsigned int process_time =
11695 1000 * (jiffies - hevc->start_process_time) / HZ;
11696 hevc->start_process_time = 0;
11697 if (process_time > max_process_time[hevc->index])
11698 max_process_time[hevc->index] = process_time;
11699 }
11700}
11701
11702static void start_process_time(struct hevc_state_s *hevc)
11703{
11704 hevc->start_process_time = jiffies;
11705 hevc->decode_timeout_count = 2;
11706 hevc->last_lcu_idx = 0;
11707}
11708
11709static void restart_process_time(struct hevc_state_s *hevc)
11710{
11711 hevc->start_process_time = jiffies;
11712 hevc->decode_timeout_count = 2;
11713}
11714
11715static void timeout_process(struct hevc_state_s *hevc)
11716{
11717 /*
11718 * In this very timeout point,the vh265_work arrives,
11719 * let it to handle the scenario.
11720 */
11721 if (work_pending(&hevc->work))
11722 return;
11723
11724 hevc->timeout_num++;
11725 amhevc_stop();
11726 read_decode_info(hevc);
11727
11728 hevc_print(hevc,
11729 0, "%s decoder timeout\n", __func__);
11730 check_pic_decoded_error(hevc,
11731 hevc->pic_decoded_lcu_idx);
11732 hevc->decoded_poc = hevc->curr_POC;
11733 hevc->decoding_pic = NULL;
11734 hevc->dec_result = DEC_RESULT_DONE;
11735 reset_process_time(hevc);
11736
11737 if (work_pending(&hevc->work))
11738 return;
11739 vdec_schedule_work(&hevc->timeout_work);
11740}
11741
11742#ifdef CONSTRAIN_MAX_BUF_NUM
11743static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11744{
11745 struct PIC_s *pic;
11746 int i;
11747 int count = 0;
11748 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11749 pic = hevc->m_PIC[i];
11750 if (pic == NULL || pic->index == -1)
11751 continue;
11752 if (pic->output_mark == 0 && pic->referenced == 0
11753 && pic->output_ready == 1)
11754 count++;
11755 }
11756
11757 return count;
11758}
11759
11760static int get_used_buf_count(struct hevc_state_s *hevc)
11761{
11762 struct PIC_s *pic;
11763 int i;
11764 int count = 0;
11765 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11766 pic = hevc->m_PIC[i];
11767 if (pic == NULL || pic->index == -1)
11768 continue;
11769 if (pic->output_mark != 0 || pic->referenced != 0
11770 || pic->output_ready != 0)
11771 count++;
11772 }
11773
11774 return count;
11775}
11776#endif
11777
11778
11779static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11780{
11781 struct PIC_s *new_pic = NULL;
11782 struct PIC_s *pic;
11783 /* recycle un-used pic */
11784 int i;
11785 int ref_pic = 0;
11786 struct vdec_s *vdec = hw_to_vdec(hevc);
11787 /*return 1 if pic_list is not initialized yet*/
11788 if (hevc->pic_list_init_flag != 3)
11789 return 1;
11790
11791 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11792 pic = hevc->m_PIC[i];
11793 if (pic == NULL || pic->index == -1)
11794 continue;
11795 if (pic->referenced == 1)
11796 ref_pic++;
11797 if (pic->output_mark == 0 && pic->referenced == 0
11798 && pic->output_ready == 0
11799 && pic->vf_ref == 0
11800 ) {
11801 if (new_pic) {
11802 if (pic->POC < new_pic->POC)
11803 new_pic = pic;
11804 } else
11805 new_pic = pic;
11806 }
11807 }
11808 if (new_pic == NULL) {
11809 enum receviver_start_e state = RECEIVER_INACTIVE;
11810 if (vf_get_receiver(vdec->vf_provider_name)) {
11811 state =
11812 vf_notify_receiver(vdec->vf_provider_name,
11813 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11814 NULL);
11815 if ((state == RECEIVER_STATE_NULL)
11816 || (state == RECEIVER_STATE_NONE))
11817 state = RECEIVER_INACTIVE;
11818 }
11819 if (state == RECEIVER_INACTIVE) {
11820 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11821 int poc = INVALID_POC;
11822 pic = hevc->m_PIC[i];
11823 if (pic == NULL || pic->index == -1)
11824 continue;
11825 if ((pic->referenced == 0) &&
11826 (pic->error_mark == 1) &&
11827 (pic->output_mark == 1)) {
11828 if (poc == INVALID_POC || (pic->POC < poc)) {
11829 new_pic = pic;
11830 poc = pic->POC;
11831 }
11832 }
11833 }
11834 if (new_pic) {
11835 new_pic->referenced = 0;
11836 new_pic->output_mark = 0;
11837 put_mv_buf(hevc, new_pic);
11838 hevc_print(hevc, 0, "force release error pic %d recieve_state %d \n", new_pic->POC, state);
11839 } else {
11840 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11841 pic = hevc->m_PIC[i];
11842 if (pic == NULL || pic->index == -1)
11843 continue;
11844 if ((pic->referenced == 1) && (pic->error_mark == 1)) {
11845 flush_output(hevc, pic);
11846 hevc_print(hevc, 0, "DPB error, neeed fornce flush recieve_state %d \n", state);
11847 break;
11848 }
11849 }
11850 }
11851 }
11852 }
11853 return (new_pic != NULL) ? 1 : 0;
11854}
11855
11856static int vmh265_stop(struct hevc_state_s *hevc)
11857{
11858 if (hevc->stat & STAT_TIMER_ARM) {
11859 del_timer_sync(&hevc->timer);
11860 hevc->stat &= ~STAT_TIMER_ARM;
11861 }
11862 if (hevc->stat & STAT_VDEC_RUN) {
11863 amhevc_stop();
11864 hevc->stat &= ~STAT_VDEC_RUN;
11865 }
11866 if (hevc->stat & STAT_ISR_REG) {
11867 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11868 hevc->stat &= ~STAT_ISR_REG;
11869 }
11870
11871 if (hevc->stat & STAT_VF_HOOK) {
11872 if (fr_hint_status == VDEC_HINTED)
11873 vf_notify_receiver(hevc->provider_name,
11874 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11875 NULL);
11876 fr_hint_status = VDEC_NO_NEED_HINT;
11877 vf_unreg_provider(&vh265_vf_prov);
11878 hevc->stat &= ~STAT_VF_HOOK;
11879 }
11880
11881 hevc_local_uninit(hevc);
11882
11883 if (hevc->gvs)
11884 kfree(hevc->gvs);
11885 hevc->gvs = NULL;
11886
11887 if (use_cma) {
11888 hevc->uninit_list = 1;
11889 reset_process_time(hevc);
11890 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11891 vdec_schedule_work(&hevc->work);
11892 flush_work(&hevc->work);
11893#ifdef USE_UNINIT_SEMA
11894 if (hevc->init_flag) {
11895 down(&hevc->h265_uninit_done_sema);
11896 }
11897#else
11898 while (hevc->uninit_list) /* wait uninit complete */
11899 msleep(20);
11900#endif
11901 }
11902 hevc->init_flag = 0;
11903 hevc->first_sc_checked = 0;
11904 cancel_work_sync(&hevc->notify_work);
11905 cancel_work_sync(&hevc->set_clk_work);
11906 cancel_work_sync(&hevc->timeout_work);
11907 cancel_work_sync(&hevc->work);
11908 uninit_mmu_buffers(hevc);
11909
11910 vfree(hevc->fw);
11911 hevc->fw = NULL;
11912
11913 dump_log(hevc);
11914 return 0;
11915}
11916
11917static unsigned char get_data_check_sum
11918 (struct hevc_state_s *hevc, int size)
11919{
11920 int jj;
11921 int sum = 0;
11922 u8 *data = NULL;
11923
11924 if (!hevc->chunk->block->is_mapped)
11925 data = codec_mm_vmap(hevc->chunk->block->start +
11926 hevc->chunk->offset, size);
11927 else
11928 data = ((u8 *)hevc->chunk->block->start_virt) +
11929 hevc->chunk->offset;
11930
11931 for (jj = 0; jj < size; jj++)
11932 sum += data[jj];
11933
11934 if (!hevc->chunk->block->is_mapped)
11935 codec_mm_unmap_phyaddr(data);
11936 return sum;
11937}
11938
11939static void vh265_notify_work(struct work_struct *work)
11940{
11941 struct hevc_state_s *hevc =
11942 container_of(work,
11943 struct hevc_state_s,
11944 notify_work);
11945 struct vdec_s *vdec = hw_to_vdec(hevc);
11946#ifdef MULTI_INSTANCE_SUPPORT
11947 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11948 vf_notify_receiver(hevc->provider_name,
11949 VFRAME_EVENT_PROVIDER_FR_HINT,
11950 (void *)
11951 ((unsigned long)hevc->frame_dur));
11952 vdec->fr_hint_state = VDEC_HINTED;
11953 } else if (fr_hint_status == VDEC_NEED_HINT) {
11954 vf_notify_receiver(hevc->provider_name,
11955 VFRAME_EVENT_PROVIDER_FR_HINT,
11956 (void *)
11957 ((unsigned long)hevc->frame_dur));
11958 fr_hint_status = VDEC_HINTED;
11959 }
11960#else
11961 if (fr_hint_status == VDEC_NEED_HINT)
11962 vf_notify_receiver(PROVIDER_NAME,
11963 VFRAME_EVENT_PROVIDER_FR_HINT,
11964 (void *)
11965 ((unsigned long)hevc->frame_dur));
11966 fr_hint_status = VDEC_HINTED;
11967 }
11968#endif
11969
11970 return;
11971}
11972
11973static void vh265_work_implement(struct hevc_state_s *hevc,
11974 struct vdec_s *vdec,int from)
11975{
11976 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11977 /*USE_BUF_BLOCK*/
11978 uninit_pic_list(hevc);
11979 hevc->uninit_list = 0;
11980#ifdef USE_UNINIT_SEMA
11981 up(&hevc->h265_uninit_done_sema);
11982#endif
11983 return;
11984 }
11985
11986 /* finished decoding one frame or error,
11987 * notify vdec core to switch context
11988 */
11989 if (hevc->pic_list_init_flag == 1
11990 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11991 hevc->pic_list_init_flag = 2;
11992 init_pic_list(hevc);
11993 init_pic_list_hw(hevc);
11994 init_buf_spec(hevc);
11995 hevc_print(hevc, 0,
11996 "set pic_list_init_flag to 2\n");
11997
11998 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11999 return;
12000 }
12001
12002 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12003 "%s dec_result %d %x %x %x\n",
12004 __func__,
12005 hevc->dec_result,
12006 READ_VREG(HEVC_STREAM_LEVEL),
12007 READ_VREG(HEVC_STREAM_WR_PTR),
12008 READ_VREG(HEVC_STREAM_RD_PTR));
12009
12010 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
12011 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
12012 && (hw_to_vdec(hevc)->next_status !=
12013 VDEC_STATUS_DISCONNECTED)) {
12014 if (!vdec_has_more_input(vdec)) {
12015 hevc->dec_result = DEC_RESULT_EOS;
12016 vdec_schedule_work(&hevc->work);
12017 return;
12018 }
12019 if (!input_frame_based(vdec)) {
12020 int r = vdec_sync_input(vdec);
12021 if (r >= 0x200) {
12022 WRITE_VREG(HEVC_DECODE_SIZE,
12023 READ_VREG(HEVC_DECODE_SIZE) + r);
12024
12025 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12026 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
12027 __func__,
12028 READ_VREG(HEVC_STREAM_LEVEL),
12029 READ_VREG(HEVC_STREAM_WR_PTR),
12030 READ_VREG(HEVC_STREAM_RD_PTR),
12031 READ_VREG(HEVC_MPC_E), r);
12032
12033 start_process_time(hevc);
12034 if (READ_VREG(HEVC_DEC_STATUS_REG)
12035 == HEVC_DECODE_BUFEMPTY2)
12036 WRITE_VREG(HEVC_DEC_STATUS_REG,
12037 HEVC_ACTION_DONE);
12038 else
12039 WRITE_VREG(HEVC_DEC_STATUS_REG,
12040 HEVC_ACTION_DEC_CONT);
12041 } else {
12042 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12043 vdec_schedule_work(&hevc->work);
12044 }
12045 return;
12046 }
12047
12048 /*below for frame_base*/
12049 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
12050 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12051 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
12052 __func__,
12053 READ_VREG(HEVC_STREAM_LEVEL),
12054 READ_VREG(HEVC_STREAM_WR_PTR),
12055 READ_VREG(HEVC_STREAM_RD_PTR),
12056 READ_VREG(HEVC_MPC_E));
12057 mutex_lock(&hevc->chunks_mutex);
12058 vdec_vframe_dirty(vdec, hevc->chunk);
12059 hevc->chunk = NULL;
12060 mutex_unlock(&hevc->chunks_mutex);
12061 vdec_clean_input(vdec);
12062 }
12063
12064 /*if (is_new_pic_available(hevc)) {*/
12065 if (run_ready(vdec, VDEC_HEVC)) {
12066 int r;
12067 int decode_size;
12068 r = vdec_prepare_input(vdec, &hevc->chunk);
12069 if (r < 0) {
12070 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12071
12072 hevc_print(hevc,
12073 PRINT_FLAG_VDEC_DETAIL,
12074 "amvdec_vh265: Insufficient data\n");
12075
12076 vdec_schedule_work(&hevc->work);
12077 return;
12078 }
12079 hevc->dec_result = DEC_RESULT_NONE;
12080 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12081 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
12082 __func__, r,
12083 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
12084 get_data_check_sum(hevc, r) : 0,
12085 READ_VREG(HEVC_MPC_E));
12086
12087 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
12088 int jj;
12089 u8 *data = NULL;
12090
12091 if (!hevc->chunk->block->is_mapped)
12092 data = codec_mm_vmap(
12093 hevc->chunk->block->start +
12094 hevc->chunk->offset, r);
12095 else
12096 data = ((u8 *)
12097 hevc->chunk->block->start_virt)
12098 + hevc->chunk->offset;
12099
12100 for (jj = 0; jj < r; jj++) {
12101 if ((jj & 0xf) == 0)
12102 hevc_print(hevc,
12103 PRINT_FRAMEBASE_DATA,
12104 "%06x:", jj);
12105 hevc_print_cont(hevc,
12106 PRINT_FRAMEBASE_DATA,
12107 "%02x ", data[jj]);
12108 if (((jj + 1) & 0xf) == 0)
12109 hevc_print_cont(hevc,
12110 PRINT_FRAMEBASE_DATA,
12111 "\n");
12112 }
12113
12114 if (!hevc->chunk->block->is_mapped)
12115 codec_mm_unmap_phyaddr(data);
12116 }
12117
12118 decode_size = hevc->chunk->size +
12119 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12120 WRITE_VREG(HEVC_DECODE_SIZE,
12121 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
12122
12123 vdec_enable_input(vdec);
12124
12125 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12126 "%s: mpc %x\n",
12127 __func__, READ_VREG(HEVC_MPC_E));
12128
12129 start_process_time(hevc);
12130 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12131 } else{
12132 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12133
12134 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12135 * "amvdec_vh265: Insufficient data\n");
12136 */
12137
12138 vdec_schedule_work(&hevc->work);
12139 }
12140 return;
12141 } else if (hevc->dec_result == DEC_RESULT_DONE) {
12142 /* if (!hevc->ctx_valid)
12143 hevc->ctx_valid = 1; */
12144 decode_frame_count[hevc->index]++;
12145#ifdef DETREFILL_ENABLE
12146 if (hevc->is_swap &&
12147 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
12148 if (hevc->delrefill_check == 2) {
12149 delrefill(hevc);
12150 amhevc_stop();
12151 }
12152 }
12153#endif
12154 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
12155 hevc->used_4k_num =
12156 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
12157 if (hevc->used_4k_num >= 0 &&
12158 hevc->cur_pic &&
12159 hevc->cur_pic->scatter_alloc
12160 == 1) {
12161 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
12162 "%s pic index %d scatter_alloc %d page_start %d\n",
12163 "decoder_mmu_box_free_idx_tail",
12164 hevc->cur_pic->index,
12165 hevc->cur_pic->scatter_alloc,
12166 hevc->used_4k_num);
12167 if (hevc->m_ins_flag)
12168 hevc_mmu_dma_check(hw_to_vdec(hevc));
12169 decoder_mmu_box_free_idx_tail(
12170 hevc->mmu_box,
12171 hevc->cur_pic->index,
12172 hevc->used_4k_num);
12173 hevc->cur_pic->scatter_alloc = 2;
12174 }
12175 }
12176 hevc->pic_decoded_lcu_idx =
12177 READ_VREG(HEVC_PARSER_LCU_START)
12178 & 0xffffff;
12179
12180 if (vdec->master == NULL && vdec->slave == NULL &&
12181 hevc->empty_flag == 0) {
12182 hevc->over_decode =
12183 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
12184 if (hevc->over_decode)
12185 hevc_print(hevc, 0,
12186 "!!!Over decode\n");
12187 }
12188
12189 if (is_log_enable(hevc))
12190 add_log(hevc,
12191 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
12192 __func__,
12193 hevc->dec_result,
12194 hevc->pic_decoded_lcu_idx,
12195 hevc->used_4k_num,
12196 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12197 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12198 hevc->start_shift_bytes
12199 );
12200
12201 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12202 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
12203 __func__,
12204 hevc->dec_result,
12205 READ_VREG(HEVC_STREAM_LEVEL),
12206 READ_VREG(HEVC_STREAM_WR_PTR),
12207 READ_VREG(HEVC_STREAM_RD_PTR),
12208 hevc->pic_decoded_lcu_idx,
12209 hevc->used_4k_num,
12210 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12211 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12212 hevc->start_shift_bytes
12213 );
12214
12215 hevc->used_4k_num = -1;
12216
12217 check_pic_decoded_error(hevc,
12218 hevc->pic_decoded_lcu_idx);
12219#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12220#if 1
12221 if (vdec->slave) {
12222 if (dv_debug & 0x1)
12223 vdec_set_flag(vdec->slave,
12224 VDEC_FLAG_SELF_INPUT_CONTEXT);
12225 else
12226 vdec_set_flag(vdec->slave,
12227 VDEC_FLAG_OTHER_INPUT_CONTEXT);
12228 }
12229#else
12230 if (vdec->slave) {
12231 if (no_interleaved_el_slice)
12232 vdec_set_flag(vdec->slave,
12233 VDEC_FLAG_INPUT_KEEP_CONTEXT);
12234 /* this will move real HW pointer for input */
12235 else
12236 vdec_set_flag(vdec->slave, 0);
12237 /* this will not move real HW pointer
12238 *and SL layer decoding
12239 *will start from same stream position
12240 *as current BL decoder
12241 */
12242 }
12243#endif
12244#endif
12245#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12246 hevc->shift_byte_count_lo
12247 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12248 if (vdec->slave) {
12249 /*cur is base, found enhance*/
12250 struct hevc_state_s *hevc_el =
12251 (struct hevc_state_s *)
12252 vdec->slave->private;
12253 if (hevc_el)
12254 hevc_el->shift_byte_count_lo =
12255 hevc->shift_byte_count_lo;
12256 } else if (vdec->master) {
12257 /*cur is enhance, found base*/
12258 struct hevc_state_s *hevc_ba =
12259 (struct hevc_state_s *)
12260 vdec->master->private;
12261 if (hevc_ba)
12262 hevc_ba->shift_byte_count_lo =
12263 hevc->shift_byte_count_lo;
12264 }
12265#endif
12266 mutex_lock(&hevc->chunks_mutex);
12267 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12268 hevc->chunk = NULL;
12269 mutex_unlock(&hevc->chunks_mutex);
12270 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12271 /*
12272 stream base: stream buf empty or timeout
12273 frame base: vdec_prepare_input fail
12274 */
12275 if (!vdec_has_more_input(vdec)) {
12276 hevc->dec_result = DEC_RESULT_EOS;
12277 vdec_schedule_work(&hevc->work);
12278 return;
12279 }
12280#ifdef AGAIN_HAS_THRESHOLD
12281 hevc->next_again_flag = 1;
12282#endif
12283 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12284 struct PIC_s *pic;
12285 hevc->eos = 1;
12286#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12287 if ((vdec->master || vdec->slave) &&
12288 aux_data_is_avaible(hevc))
12289 dolby_get_meta(hevc);
12290#endif
12291 check_pic_decoded_error(hevc,
12292 hevc->pic_decoded_lcu_idx);
12293 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12294 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12295 "%s: end of stream, last dec poc %d => 0x%pf\n",
12296 __func__, hevc->curr_POC, pic);
12297 flush_output(hevc, pic);
12298
12299 if (hevc->is_used_v4l)
12300 notify_v4l_eos(hw_to_vdec(hevc));
12301#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12302 hevc->shift_byte_count_lo
12303 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12304 if (vdec->slave) {
12305 /*cur is base, found enhance*/
12306 struct hevc_state_s *hevc_el =
12307 (struct hevc_state_s *)
12308 vdec->slave->private;
12309 if (hevc_el)
12310 hevc_el->shift_byte_count_lo =
12311 hevc->shift_byte_count_lo;
12312 } else if (vdec->master) {
12313 /*cur is enhance, found base*/
12314 struct hevc_state_s *hevc_ba =
12315 (struct hevc_state_s *)
12316 vdec->master->private;
12317 if (hevc_ba)
12318 hevc_ba->shift_byte_count_lo =
12319 hevc->shift_byte_count_lo;
12320 }
12321#endif
12322 mutex_lock(&hevc->chunks_mutex);
12323 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12324 hevc->chunk = NULL;
12325 mutex_unlock(&hevc->chunks_mutex);
12326 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12327 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12328 "%s: force exit\n",
12329 __func__);
12330 if (hevc->stat & STAT_VDEC_RUN) {
12331 amhevc_stop();
12332 hevc->stat &= ~STAT_VDEC_RUN;
12333 }
12334 if (hevc->stat & STAT_ISR_REG) {
12335 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12336 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12337 hevc->stat &= ~STAT_ISR_REG;
12338 }
12339 hevc_print(hevc, 0, "%s: force exit end\n",
12340 __func__);
12341 }
12342
12343 if (hevc->stat & STAT_VDEC_RUN) {
12344 amhevc_stop();
12345 hevc->stat &= ~STAT_VDEC_RUN;
12346 }
12347
12348 if (hevc->stat & STAT_TIMER_ARM) {
12349 del_timer_sync(&hevc->timer);
12350 hevc->stat &= ~STAT_TIMER_ARM;
12351 }
12352
12353 wait_hevc_search_done(hevc);
12354#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12355 if (hevc->switch_dvlayer_flag) {
12356 if (vdec->slave)
12357 vdec_set_next_sched(vdec, vdec->slave);
12358 else if (vdec->master)
12359 vdec_set_next_sched(vdec, vdec->master);
12360 } else if (vdec->slave || vdec->master)
12361 vdec_set_next_sched(vdec, vdec);
12362#endif
12363
12364 if (from == 1) {
12365 /* This is a timeout work */
12366 if (work_pending(&hevc->work)) {
12367 /*
12368 * The vh265_work arrives at the last second,
12369 * give it a chance to handle the scenario.
12370 */
12371 return;
12372 //cancel_work_sync(&hevc->work);//reserved for future considraion
12373 }
12374 }
12375
12376 /* mark itself has all HW resource released and input released */
12377 if (vdec->parallel_dec == 1)
12378 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12379 else
12380 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12381
12382 if (hevc->is_used_v4l) {
12383 struct aml_vcodec_ctx *ctx =
12384 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12385
12386 if (ctx->param_sets_from_ucode &&
12387 !hevc->v4l_params_parsed)
12388 vdec_v4l_write_frame_sync(ctx);
12389 }
12390
12391 if (hevc->vdec_cb)
12392 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12393}
12394
12395static void vh265_work(struct work_struct *work)
12396{
12397 struct hevc_state_s *hevc = container_of(work,
12398 struct hevc_state_s, work);
12399 struct vdec_s *vdec = hw_to_vdec(hevc);
12400
12401 vh265_work_implement(hevc, vdec, 0);
12402}
12403
12404static void vh265_timeout_work(struct work_struct *work)
12405{
12406 struct hevc_state_s *hevc = container_of(work,
12407 struct hevc_state_s, timeout_work);
12408 struct vdec_s *vdec = hw_to_vdec(hevc);
12409
12410 if (work_pending(&hevc->work))
12411 return;
12412 vh265_work_implement(hevc, vdec, 1);
12413}
12414
12415
12416static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12417{
12418 /* new to do ... */
12419 vh265_prot_init(hevc);
12420 return 0;
12421}
12422static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12423{
12424 struct hevc_state_s *hevc =
12425 (struct hevc_state_s *)vdec->private;
12426 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12427 CODEC_MM_FLAGS_TVP : 0;
12428 bool ret = 0;
12429 if (step == 0x12)
12430 return 0;
12431 else if (step == 0x11)
12432 step = 0x12;
12433
12434 if (hevc->eos)
12435 return 0;
12436 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12437 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12438 hevc->first_sc_checked =1;
12439 hevc_print(hevc, 0,
12440 "vh265 cached=%d need_size=%d speed= %d ms\n",
12441 size, (hevc->need_cache_size >> PAGE_SHIFT),
12442 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12443 }
12444 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12445 && pre_decode_buf_level != 0) {
12446 u32 rp, wp, level;
12447
12448 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12449 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12450 if (wp < rp)
12451 level = vdec->input.size + wp - rp;
12452 else
12453 level = wp - rp;
12454
12455 if (level < pre_decode_buf_level)
12456 return 0;
12457 }
12458
12459#ifdef AGAIN_HAS_THRESHOLD
12460 if (hevc->next_again_flag &&
12461 (!vdec_frame_based(vdec))) {
12462 u32 parser_wr_ptr =
12463 READ_PARSER_REG(PARSER_VIDEO_WP);
12464 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12465 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12466 again_threshold) {
12467 int r = vdec_sync_input(vdec);
12468 hevc_print(hevc,
12469 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12470 return 0;
12471 }
12472 }
12473#endif
12474
12475 if (disp_vframe_valve_level &&
12476 kfifo_len(&hevc->display_q) >=
12477 disp_vframe_valve_level) {
12478 hevc->valve_count--;
12479 if (hevc->valve_count <= 0)
12480 hevc->valve_count = 2;
12481 else
12482 return 0;
12483 }
12484
12485 ret = is_new_pic_available(hevc);
12486 if (!ret) {
12487 hevc_print(hevc,
12488 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12489 __func__, ret);
12490 }
12491
12492#ifdef CONSTRAIN_MAX_BUF_NUM
12493 if (hevc->pic_list_init_flag == 3) {
12494 if (run_ready_max_vf_only_num > 0 &&
12495 get_vf_ref_only_buf_count(hevc) >=
12496 run_ready_max_vf_only_num
12497 )
12498 ret = 0;
12499 if (run_ready_display_q_num > 0 &&
12500 kfifo_len(&hevc->display_q) >=
12501 run_ready_display_q_num)
12502 ret = 0;
12503
12504 /*avoid more buffers consumed when
12505 switching resolution*/
12506 if (run_ready_max_buf_num == 0xff &&
12507 get_used_buf_count(hevc) >=
12508 get_work_pic_num(hevc))
12509 ret = 0;
12510 else if (run_ready_max_buf_num &&
12511 get_used_buf_count(hevc) >=
12512 run_ready_max_buf_num)
12513 ret = 0;
12514 }
12515#endif
12516
12517 if (hevc->is_used_v4l) {
12518 struct aml_vcodec_ctx *ctx =
12519 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12520
12521 if (ctx->param_sets_from_ucode) {
12522 if (hevc->v4l_params_parsed) {
12523 if (!ctx->v4l_codec_dpb_ready &&
12524 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12525 run_ready_min_buf_num)
12526 ret = 0;
12527 } else {
12528 if ((hevc->res_ch_flag == 1) &&
12529 ((ctx->state <= AML_STATE_INIT) ||
12530 (ctx->state >= AML_STATE_FLUSHING)))
12531 ret = 0;
12532 }
12533 } else if (!ctx->v4l_codec_dpb_ready) {
12534 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12535 run_ready_min_buf_num)
12536 ret = 0;
12537 }
12538 }
12539
12540 if (ret)
12541 not_run_ready[hevc->index] = 0;
12542 else
12543 not_run_ready[hevc->index]++;
12544 if (vdec->parallel_dec == 1)
12545 return ret ? (CORE_MASK_HEVC) : 0;
12546 else
12547 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12548}
12549
12550static void run(struct vdec_s *vdec, unsigned long mask,
12551 void (*callback)(struct vdec_s *, void *), void *arg)
12552{
12553 struct hevc_state_s *hevc =
12554 (struct hevc_state_s *)vdec->private;
12555 int r, loadr = 0;
12556 unsigned char check_sum = 0;
12557
12558 run_count[hevc->index]++;
12559 hevc->vdec_cb_arg = arg;
12560 hevc->vdec_cb = callback;
12561 hevc->aux_data_dirty = 1;
12562 hevc_reset_core(vdec);
12563
12564#ifdef AGAIN_HAS_THRESHOLD
12565 hevc->pre_parser_wr_ptr =
12566 READ_PARSER_REG(PARSER_VIDEO_WP);
12567 hevc->next_again_flag = 0;
12568#endif
12569 r = vdec_prepare_input(vdec, &hevc->chunk);
12570 if (r < 0) {
12571 input_empty[hevc->index]++;
12572 hevc->dec_result = DEC_RESULT_AGAIN;
12573 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12574 "ammvdec_vh265: Insufficient data\n");
12575
12576 vdec_schedule_work(&hevc->work);
12577 return;
12578 }
12579 input_empty[hevc->index] = 0;
12580 hevc->dec_result = DEC_RESULT_NONE;
12581 if (vdec_frame_based(vdec) &&
12582 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12583 || is_log_enable(hevc)))
12584 check_sum = get_data_check_sum(hevc, r);
12585
12586 if (is_log_enable(hevc))
12587 add_log(hevc,
12588 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12589 __func__, r,
12590 check_sum,
12591 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12592 );
12593 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12594 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12595 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12596 __func__, r,
12597 check_sum,
12598 READ_VREG(HEVC_STREAM_LEVEL),
12599 READ_VREG(HEVC_STREAM_WR_PTR),
12600 READ_VREG(HEVC_STREAM_RD_PTR),
12601 READ_PARSER_REG(PARSER_VIDEO_RP),
12602 READ_PARSER_REG(PARSER_VIDEO_WP),
12603 hevc->start_shift_bytes
12604 );
12605 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12606 input_frame_based(vdec)) {
12607 int jj;
12608 u8 *data = NULL;
12609
12610 if (!hevc->chunk->block->is_mapped)
12611 data = codec_mm_vmap(hevc->chunk->block->start +
12612 hevc->chunk->offset, r);
12613 else
12614 data = ((u8 *)hevc->chunk->block->start_virt)
12615 + hevc->chunk->offset;
12616
12617 for (jj = 0; jj < r; jj++) {
12618 if ((jj & 0xf) == 0)
12619 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12620 "%06x:", jj);
12621 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12622 "%02x ", data[jj]);
12623 if (((jj + 1) & 0xf) == 0)
12624 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12625 "\n");
12626 }
12627
12628 if (!hevc->chunk->block->is_mapped)
12629 codec_mm_unmap_phyaddr(data);
12630 }
12631 if (vdec->mc_loaded) {
12632 /*firmware have load before,
12633 and not changes to another.
12634 ignore reload.
12635 */
12636 if (tee_enabled() && hevc->is_swap &&
12637 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12638 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12639 } else {
12640 if (hevc->mmu_enable)
12641 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12642 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12643 "h265_mmu", hevc->fw->data);
12644 else {
12645 if (!hevc->is_4k) {
12646 /* if an older version of the fw was loaded, */
12647 /* needs try to load noswap fw because the */
12648 /* old fw package dose not contain the swap fw.*/
12649 loadr = amhevc_vdec_loadmc_ex(
12650 VFORMAT_HEVC, vdec,
12651 "hevc_mmu_swap",
12652 hevc->fw->data);
12653 if (loadr < 0)
12654 loadr = amhevc_vdec_loadmc_ex(
12655 VFORMAT_HEVC, vdec,
12656 "h265_mmu",
12657 hevc->fw->data);
12658 else
12659 hevc->is_swap = true;
12660 } else
12661 loadr = amhevc_vdec_loadmc_ex(
12662 VFORMAT_HEVC, vdec,
12663 "h265_mmu", hevc->fw->data);
12664 }
12665 else
12666 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12667 NULL, hevc->fw->data);
12668 if (loadr < 0) {
12669 amhevc_disable();
12670 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12671 tee_enabled() ? "TEE" : "local", loadr);
12672 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12673 vdec_schedule_work(&hevc->work);
12674 return;
12675 }
12676
12677 if (tee_enabled() && hevc->is_swap &&
12678 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12679 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12680#ifdef DETREFILL_ENABLE
12681 if (hevc->is_swap &&
12682 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12683 init_detrefill_buf(hevc);
12684#endif
12685 vdec->mc_loaded = 1;
12686 vdec->mc_type = VFORMAT_HEVC;
12687 }
12688 if (vh265_hw_ctx_restore(hevc) < 0) {
12689 vdec_schedule_work(&hevc->work);
12690 return;
12691 }
12692 vdec_enable_input(vdec);
12693
12694 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12695
12696 if (vdec_frame_based(vdec)) {
12697 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12698 r = hevc->chunk->size +
12699 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12700 hevc->decode_size = r;
12701 if (vdec->mvfrm)
12702 vdec->mvfrm->frame_size = hevc->chunk->size;
12703 }
12704#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12705 else {
12706 if (vdec->master || vdec->slave)
12707 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12708 hevc->shift_byte_count_lo);
12709 }
12710#endif
12711 WRITE_VREG(HEVC_DECODE_SIZE, r);
12712 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12713 hevc->init_flag = 1;
12714
12715 if (hevc->pic_list_init_flag == 3)
12716 init_pic_list_hw(hevc);
12717
12718 backup_decode_state(hevc);
12719
12720 start_process_time(hevc);
12721 mod_timer(&hevc->timer, jiffies);
12722 hevc->stat |= STAT_TIMER_ARM;
12723 hevc->stat |= STAT_ISR_REG;
12724 if (vdec->mvfrm)
12725 vdec->mvfrm->hw_decode_start = local_clock();
12726 amhevc_start();
12727 hevc->stat |= STAT_VDEC_RUN;
12728}
12729
12730static void aml_free_canvas(struct vdec_s *vdec)
12731{
12732 int i;
12733 struct hevc_state_s *hevc =
12734 (struct hevc_state_s *)vdec->private;
12735
12736 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12737 struct PIC_s *pic = hevc->m_PIC[i];
12738
12739 if (pic) {
12740 if (vdec->parallel_dec == 1) {
12741 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12742 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12743 }
12744 }
12745 }
12746}
12747
12748static void reset(struct vdec_s *vdec)
12749{
12750 struct hevc_state_s *hevc =
12751 (struct hevc_state_s *)vdec->private;
12752 int i;
12753
12754 cancel_work_sync(&hevc->work);
12755 cancel_work_sync(&hevc->notify_work);
12756 if (hevc->stat & STAT_VDEC_RUN) {
12757 amhevc_stop();
12758 hevc->stat &= ~STAT_VDEC_RUN;
12759 }
12760
12761 if (hevc->stat & STAT_TIMER_ARM) {
12762 del_timer_sync(&hevc->timer);
12763 hevc->stat &= ~STAT_TIMER_ARM;
12764 }
12765 hevc->dec_result = DEC_RESULT_NONE;
12766 reset_process_time(hevc);
12767 hevc->pic_list_init_flag = 0;
12768 dealloc_mv_bufs(hevc);
12769 aml_free_canvas(vdec);
12770 hevc_local_uninit(hevc);
12771 if (vh265_local_init(hevc) < 0)
12772 pr_debug(" %s local init fail\n", __func__);
12773 for (i = 0; i < BUF_POOL_SIZE; i++) {
12774 hevc->m_BUF[i].start_adr = 0;
12775 }
12776
12777 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12778}
12779
12780static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12781{
12782 struct hevc_state_s *hevc =
12783 (struct hevc_state_s *)vdec->private;
12784
12785 return vh265_isr(0, hevc);
12786}
12787
12788static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12789{
12790 struct hevc_state_s *hevc =
12791 (struct hevc_state_s *)vdec->private;
12792
12793 return vh265_isr_thread_fn(0, hevc);
12794}
12795#endif
12796
12797static int amvdec_h265_probe(struct platform_device *pdev)
12798{
12799#ifdef MULTI_INSTANCE_SUPPORT
12800 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12801#else
12802 struct vdec_dev_reg_s *pdata =
12803 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12804#endif
12805 char *tmpbuf;
12806 int ret;
12807 struct hevc_state_s *hevc;
12808
12809 hevc = vmalloc(sizeof(struct hevc_state_s));
12810 if (hevc == NULL) {
12811 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12812 return -ENOMEM;
12813 }
12814 gHevc = hevc;
12815 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12816 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12817 H265_DEBUG_DIS_SYS_ERROR_PROC));
12818 memset(hevc, 0, sizeof(struct hevc_state_s));
12819 if (get_dbg_flag(hevc))
12820 hevc_print(hevc, 0, "%s\r\n", __func__);
12821 mutex_lock(&vh265_mutex);
12822
12823 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12824 (parser_sei_enable & 0x100) == 0)
12825 parser_sei_enable = 7; /*old 1*/
12826 hevc->m_ins_flag = 0;
12827 hevc->init_flag = 0;
12828 hevc->first_sc_checked = 0;
12829 hevc->uninit_list = 0;
12830 hevc->fatal_error = 0;
12831 hevc->show_frame_num = 0;
12832 hevc->frameinfo_enable = 1;
12833#ifdef MULTI_INSTANCE_SUPPORT
12834 hevc->platform_dev = pdev;
12835 platform_set_drvdata(pdev, pdata);
12836#endif
12837
12838 if (pdata == NULL) {
12839 hevc_print(hevc, 0,
12840 "\namvdec_h265 memory resource undefined.\n");
12841 vfree(hevc);
12842 mutex_unlock(&vh265_mutex);
12843 return -EFAULT;
12844 }
12845 if (mmu_enable_force == 0) {
12846 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12847 || double_write_mode == 0x10)
12848 hevc->mmu_enable = 0;
12849 else
12850 hevc->mmu_enable = 1;
12851 }
12852 if (init_mmu_buffers(hevc)) {
12853 hevc_print(hevc, 0,
12854 "\n 265 mmu init failed!\n");
12855 vfree(hevc);
12856 mutex_unlock(&vh265_mutex);
12857 return -EFAULT;
12858 }
12859
12860 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12861 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12862 if (ret < 0) {
12863 uninit_mmu_buffers(hevc);
12864 vfree(hevc);
12865 mutex_unlock(&vh265_mutex);
12866 return ret;
12867 }
12868 hevc->buf_size = work_buf_size;
12869
12870
12871 if (!vdec_secure(pdata)) {
12872 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12873 if (tmpbuf) {
12874 memset(tmpbuf, 0, work_buf_size);
12875 dma_sync_single_for_device(amports_get_dma_device(),
12876 hevc->buf_start,
12877 work_buf_size, DMA_TO_DEVICE);
12878 } else {
12879 tmpbuf = codec_mm_vmap(hevc->buf_start,
12880 work_buf_size);
12881 if (tmpbuf) {
12882 memset(tmpbuf, 0, work_buf_size);
12883 dma_sync_single_for_device(
12884 amports_get_dma_device(),
12885 hevc->buf_start,
12886 work_buf_size,
12887 DMA_TO_DEVICE);
12888 codec_mm_unmap_phyaddr(tmpbuf);
12889 }
12890 }
12891 }
12892
12893 if (get_dbg_flag(hevc)) {
12894 hevc_print(hevc, 0,
12895 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12896 hevc->buf_start, hevc->buf_size);
12897 }
12898
12899 if (pdata->sys_info)
12900 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12901 else {
12902 hevc->vh265_amstream_dec_info.width = 0;
12903 hevc->vh265_amstream_dec_info.height = 0;
12904 hevc->vh265_amstream_dec_info.rate = 30;
12905 }
12906#ifndef MULTI_INSTANCE_SUPPORT
12907 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12908 workaround_enable |= 3;
12909 hevc_print(hevc, 0,
12910 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12911 } else
12912 workaround_enable &= ~3;
12913#endif
12914 hevc->cma_dev = pdata->cma_dev;
12915 vh265_vdec_info_init(hevc);
12916
12917#ifdef MULTI_INSTANCE_SUPPORT
12918 pdata->private = hevc;
12919 pdata->dec_status = vh265_dec_status;
12920 pdata->set_trickmode = vh265_set_trickmode;
12921 pdata->set_isreset = vh265_set_isreset;
12922 is_reset = 0;
12923 if (vh265_init(pdata) < 0) {
12924#else
12925 if (vh265_init(hevc) < 0) {
12926#endif
12927 hevc_print(hevc, 0,
12928 "\namvdec_h265 init failed.\n");
12929 hevc_local_uninit(hevc);
12930 if (hevc->gvs)
12931 kfree(hevc->gvs);
12932 hevc->gvs = NULL;
12933 uninit_mmu_buffers(hevc);
12934 vfree(hevc);
12935 pdata->dec_status = NULL;
12936 mutex_unlock(&vh265_mutex);
12937 return -ENODEV;
12938 }
12939 /*set the max clk for smooth playing...*/
12940 hevc_source_changed(VFORMAT_HEVC,
12941 3840, 2160, 60);
12942 mutex_unlock(&vh265_mutex);
12943
12944 return 0;
12945}
12946
12947static int amvdec_h265_remove(struct platform_device *pdev)
12948{
12949 struct hevc_state_s *hevc = gHevc;
12950
12951 if (get_dbg_flag(hevc))
12952 hevc_print(hevc, 0, "%s\r\n", __func__);
12953
12954 mutex_lock(&vh265_mutex);
12955
12956 vh265_stop(hevc);
12957
12958 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12959
12960
12961#ifdef DEBUG_PTS
12962 hevc_print(hevc, 0,
12963 "pts missed %ld, pts hit %ld, duration %d\n",
12964 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12965#endif
12966
12967 vfree(hevc);
12968 hevc = NULL;
12969 gHevc = NULL;
12970
12971 mutex_unlock(&vh265_mutex);
12972
12973 return 0;
12974}
12975/****************************************/
12976#ifdef CONFIG_PM
12977static int h265_suspend(struct device *dev)
12978{
12979 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12980 return 0;
12981}
12982
12983static int h265_resume(struct device *dev)
12984{
12985 amhevc_resume(to_platform_device(dev));
12986 return 0;
12987}
12988
12989static const struct dev_pm_ops h265_pm_ops = {
12990 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12991};
12992#endif
12993
12994static struct platform_driver amvdec_h265_driver = {
12995 .probe = amvdec_h265_probe,
12996 .remove = amvdec_h265_remove,
12997 .driver = {
12998 .name = DRIVER_NAME,
12999#ifdef CONFIG_PM
13000 .pm = &h265_pm_ops,
13001#endif
13002 }
13003};
13004
13005#ifdef MULTI_INSTANCE_SUPPORT
13006static void vh265_dump_state(struct vdec_s *vdec)
13007{
13008 int i;
13009 struct hevc_state_s *hevc =
13010 (struct hevc_state_s *)vdec->private;
13011 hevc_print(hevc, 0,
13012 "====== %s\n", __func__);
13013
13014 hevc_print(hevc, 0,
13015 "width/height (%d/%d), reorder_pic_num %d ip_mode %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
13016 hevc->frame_width,
13017 hevc->frame_height,
13018 hevc->sps_num_reorder_pics_0,
13019 hevc->ip_mode,
13020 get_work_pic_num(hevc),
13021 hevc->video_signal_type_debug,
13022 hevc->is_swap
13023 );
13024
13025 hevc_print(hevc, 0,
13026 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
13027 input_frame_based(vdec),
13028 hevc->eos,
13029 hevc->dec_result,
13030 decode_frame_count[hevc->index],
13031 display_frame_count[hevc->index],
13032 run_count[hevc->index],
13033 not_run_ready[hevc->index],
13034 input_empty[hevc->index]
13035 );
13036
13037 if (vf_get_receiver(vdec->vf_provider_name)) {
13038 enum receviver_start_e state =
13039 vf_notify_receiver(vdec->vf_provider_name,
13040 VFRAME_EVENT_PROVIDER_QUREY_STATE,
13041 NULL);
13042 hevc_print(hevc, 0,
13043 "\nreceiver(%s) state %d\n",
13044 vdec->vf_provider_name,
13045 state);
13046 }
13047
13048 hevc_print(hevc, 0,
13049 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
13050 __func__,
13051 kfifo_len(&hevc->newframe_q),
13052 VF_POOL_SIZE,
13053 kfifo_len(&hevc->display_q),
13054 VF_POOL_SIZE,
13055 hevc->vf_pre_count,
13056 hevc->vf_get_count,
13057 hevc->vf_put_count,
13058 hevc->pic_list_init_flag,
13059 is_new_pic_available(hevc)
13060 );
13061
13062 dump_pic_list(hevc);
13063
13064 for (i = 0; i < BUF_POOL_SIZE; i++) {
13065 hevc_print(hevc, 0,
13066 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13067 i,
13068 hevc->m_BUF[i].start_adr,
13069 hevc->m_BUF[i].size,
13070 hevc->m_BUF[i].used_flag);
13071 }
13072
13073 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13074 hevc_print(hevc, 0,
13075 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13076 i,
13077 hevc->m_mv_BUF[i].start_adr,
13078 hevc->m_mv_BUF[i].size,
13079 hevc->m_mv_BUF[i].used_flag);
13080 }
13081
13082 hevc_print(hevc, 0,
13083 "HEVC_DEC_STATUS_REG=0x%x\n",
13084 READ_VREG(HEVC_DEC_STATUS_REG));
13085 hevc_print(hevc, 0,
13086 "HEVC_MPC_E=0x%x\n",
13087 READ_VREG(HEVC_MPC_E));
13088 hevc_print(hevc, 0,
13089 "HEVC_DECODE_MODE=0x%x\n",
13090 READ_VREG(HEVC_DECODE_MODE));
13091 hevc_print(hevc, 0,
13092 "HEVC_DECODE_MODE2=0x%x\n",
13093 READ_VREG(HEVC_DECODE_MODE2));
13094 hevc_print(hevc, 0,
13095 "NAL_SEARCH_CTL=0x%x\n",
13096 READ_VREG(NAL_SEARCH_CTL));
13097 hevc_print(hevc, 0,
13098 "HEVC_PARSER_LCU_START=0x%x\n",
13099 READ_VREG(HEVC_PARSER_LCU_START));
13100 hevc_print(hevc, 0,
13101 "HEVC_DECODE_SIZE=0x%x\n",
13102 READ_VREG(HEVC_DECODE_SIZE));
13103 hevc_print(hevc, 0,
13104 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
13105 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
13106 hevc_print(hevc, 0,
13107 "HEVC_STREAM_START_ADDR=0x%x\n",
13108 READ_VREG(HEVC_STREAM_START_ADDR));
13109 hevc_print(hevc, 0,
13110 "HEVC_STREAM_END_ADDR=0x%x\n",
13111 READ_VREG(HEVC_STREAM_END_ADDR));
13112 hevc_print(hevc, 0,
13113 "HEVC_STREAM_LEVEL=0x%x\n",
13114 READ_VREG(HEVC_STREAM_LEVEL));
13115 hevc_print(hevc, 0,
13116 "HEVC_STREAM_WR_PTR=0x%x\n",
13117 READ_VREG(HEVC_STREAM_WR_PTR));
13118 hevc_print(hevc, 0,
13119 "HEVC_STREAM_RD_PTR=0x%x\n",
13120 READ_VREG(HEVC_STREAM_RD_PTR));
13121 hevc_print(hevc, 0,
13122 "PARSER_VIDEO_RP=0x%x\n",
13123 READ_PARSER_REG(PARSER_VIDEO_RP));
13124 hevc_print(hevc, 0,
13125 "PARSER_VIDEO_WP=0x%x\n",
13126 READ_PARSER_REG(PARSER_VIDEO_WP));
13127
13128 if (input_frame_based(vdec) &&
13129 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
13130 ) {
13131 int jj;
13132 if (hevc->chunk && hevc->chunk->block &&
13133 hevc->chunk->size > 0) {
13134 u8 *data = NULL;
13135 if (!hevc->chunk->block->is_mapped)
13136 data = codec_mm_vmap(hevc->chunk->block->start +
13137 hevc->chunk->offset, hevc->chunk->size);
13138 else
13139 data = ((u8 *)hevc->chunk->block->start_virt)
13140 + hevc->chunk->offset;
13141 hevc_print(hevc, 0,
13142 "frame data size 0x%x\n",
13143 hevc->chunk->size);
13144 for (jj = 0; jj < hevc->chunk->size; jj++) {
13145 if ((jj & 0xf) == 0)
13146 hevc_print(hevc,
13147 PRINT_FRAMEBASE_DATA,
13148 "%06x:", jj);
13149 hevc_print_cont(hevc,
13150 PRINT_FRAMEBASE_DATA,
13151 "%02x ", data[jj]);
13152 if (((jj + 1) & 0xf) == 0)
13153 hevc_print_cont(hevc,
13154 PRINT_FRAMEBASE_DATA,
13155 "\n");
13156 }
13157
13158 if (!hevc->chunk->block->is_mapped)
13159 codec_mm_unmap_phyaddr(data);
13160 }
13161 }
13162
13163}
13164
13165
13166static int ammvdec_h265_probe(struct platform_device *pdev)
13167{
13168
13169 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13170 struct hevc_state_s *hevc = NULL;
13171 int ret;
13172#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13173 int config_val;
13174#endif
13175 //pr_err("[%s pid=%d tgid=%d] \n",__func__, current->pid, current->tgid);
13176 if (pdata == NULL) {
13177 pr_info("\nammvdec_h265 memory resource undefined.\n");
13178 return -EFAULT;
13179 }
13180
13181 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
13182 sizeof(struct hevc_state_s), GFP_KERNEL); */
13183 hevc = vmalloc(sizeof(struct hevc_state_s));
13184 if (hevc == NULL) {
13185 pr_info("\nammvdec_h265 device data allocation failed\n");
13186 return -ENOMEM;
13187 }
13188 memset(hevc, 0, sizeof(struct hevc_state_s));
13189
13190 /* the ctx from v4l2 driver. */
13191 hevc->v4l2_ctx = pdata->private;
13192
13193 pdata->private = hevc;
13194 pdata->dec_status = vh265_dec_status;
13195 pdata->set_trickmode = vh265_set_trickmode;
13196 pdata->run_ready = run_ready;
13197 pdata->run = run;
13198 pdata->reset = reset;
13199 pdata->irq_handler = vh265_irq_cb;
13200 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
13201 pdata->dump_state = vh265_dump_state;
13202
13203 hevc->index = pdev->id;
13204 hevc->m_ins_flag = 1;
13205
13206 if (pdata->use_vfm_path) {
13207 snprintf(pdata->vf_provider_name,
13208 VDEC_PROVIDER_NAME_SIZE,
13209 VFM_DEC_PROVIDER_NAME);
13210 hevc->frameinfo_enable = 1;
13211 }
13212#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13213 else if (vdec_dual(pdata)) {
13214 struct hevc_state_s *hevc_pair = NULL;
13215
13216 if (dv_toggle_prov_name) /*debug purpose*/
13217 snprintf(pdata->vf_provider_name,
13218 VDEC_PROVIDER_NAME_SIZE,
13219 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
13220 VFM_DEC_DVEL_PROVIDER_NAME);
13221 else
13222 snprintf(pdata->vf_provider_name,
13223 VDEC_PROVIDER_NAME_SIZE,
13224 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
13225 VFM_DEC_DVBL_PROVIDER_NAME);
13226 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
13227 if (pdata->master)
13228 hevc_pair = (struct hevc_state_s *)
13229 pdata->master->private;
13230 else if (pdata->slave)
13231 hevc_pair = (struct hevc_state_s *)
13232 pdata->slave->private;
13233 if (hevc_pair)
13234 hevc->shift_byte_count_lo =
13235 hevc_pair->shift_byte_count_lo;
13236 }
13237#endif
13238 else
13239 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
13240 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
13241
13242 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
13243 &vh265_vf_provider, pdata);
13244
13245 hevc->provider_name = pdata->vf_provider_name;
13246 platform_set_drvdata(pdev, pdata);
13247
13248 hevc->platform_dev = pdev;
13249
13250 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
13251 pdata->config && pdata->config_len) {
13252#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13253 /*use ptr config for doubel_write_mode, etc*/
13254 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
13255
13256 if (get_config_int(pdata->config, "hevc_double_write_mode",
13257 &config_val) == 0)
13258 hevc->double_write_mode = config_val;
13259 else
13260 hevc->double_write_mode = double_write_mode;
13261
13262 if (get_config_int(pdata->config, "save_buffer_mode",
13263 &config_val) == 0)
13264 hevc->save_buffer_mode = config_val;
13265 else
13266 hevc->save_buffer_mode = 0;
13267
13268 /*use ptr config for max_pic_w, etc*/
13269 if (get_config_int(pdata->config, "hevc_buf_width",
13270 &config_val) == 0) {
13271 hevc->max_pic_w = config_val;
13272 }
13273 if (get_config_int(pdata->config, "hevc_buf_height",
13274 &config_val) == 0) {
13275 hevc->max_pic_h = config_val;
13276 }
13277
13278 if (get_config_int(pdata->config,
13279 "parm_v4l_codec_enable",
13280 &config_val) == 0)
13281 hevc->is_used_v4l = config_val;
13282
13283 if (get_config_int(pdata->config,
13284 "parm_v4l_buffer_margin",
13285 &config_val) == 0)
13286 hevc->dynamic_buf_num_margin = config_val;
13287
13288 if (get_config_int(pdata->config,
13289 "parm_v4l_canvas_mem_mode",
13290 &config_val) == 0)
13291 hevc->mem_map_mode = config_val;
13292#endif
13293 } else {
13294 if (pdata->sys_info)
13295 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13296 else {
13297 hevc->vh265_amstream_dec_info.width = 0;
13298 hevc->vh265_amstream_dec_info.height = 0;
13299 hevc->vh265_amstream_dec_info.rate = 30;
13300 }
13301 hevc->double_write_mode = double_write_mode;
13302 }
13303 /* get valid double write from configure or node */
13304 hevc->double_write_mode = get_double_write_mode(hevc);
13305
13306 if (!hevc->is_used_v4l) {
13307 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13308 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13309 else
13310 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13311
13312 hevc->mem_map_mode = mem_map_mode;
13313 }
13314
13315 if (mmu_enable_force == 0) {
13316 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13317 hevc->mmu_enable = 0;
13318 else
13319 hevc->mmu_enable = 1;
13320 }
13321
13322 if (init_mmu_buffers(hevc) < 0) {
13323 hevc_print(hevc, 0,
13324 "\n 265 mmu init failed!\n");
13325 mutex_unlock(&vh265_mutex);
13326 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13327 if (hevc)
13328 vfree((void *)hevc);
13329 pdata->dec_status = NULL;
13330 return -EFAULT;
13331 }
13332#if 0
13333 hevc->buf_start = pdata->mem_start;
13334 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13335#else
13336
13337 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13338 BMMU_WORKSPACE_ID, work_buf_size,
13339 DRIVER_NAME, &hevc->buf_start);
13340 if (ret < 0) {
13341 uninit_mmu_buffers(hevc);
13342 /* devm_kfree(&pdev->dev, (void *)hevc); */
13343 if (hevc)
13344 vfree((void *)hevc);
13345 pdata->dec_status = NULL;
13346 mutex_unlock(&vh265_mutex);
13347 return ret;
13348 }
13349 hevc->buf_size = work_buf_size;
13350#endif
13351 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13352 (parser_sei_enable & 0x100) == 0)
13353 parser_sei_enable = 7;
13354 hevc->init_flag = 0;
13355 hevc->first_sc_checked = 0;
13356 hevc->uninit_list = 0;
13357 hevc->fatal_error = 0;
13358 hevc->show_frame_num = 0;
13359
13360 /*
13361 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13362 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13363 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13364 */
13365 if (get_dbg_flag(hevc)) {
13366 hevc_print(hevc, 0,
13367 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13368 hevc->buf_start, hevc->buf_size);
13369 }
13370
13371 hevc_print(hevc, 0,
13372 "dynamic_buf_num_margin=%d\n",
13373 hevc->dynamic_buf_num_margin);
13374 hevc_print(hevc, 0,
13375 "double_write_mode=%d\n",
13376 hevc->double_write_mode);
13377
13378 hevc->cma_dev = pdata->cma_dev;
13379 vh265_vdec_info_init(hevc);
13380
13381 if (vh265_init(pdata) < 0) {
13382 hevc_print(hevc, 0,
13383 "\namvdec_h265 init failed.\n");
13384 hevc_local_uninit(hevc);
13385 if (hevc->gvs)
13386 kfree(hevc->gvs);
13387 hevc->gvs = NULL;
13388 uninit_mmu_buffers(hevc);
13389 /* devm_kfree(&pdev->dev, (void *)hevc); */
13390 if (hevc)
13391 vfree((void *)hevc);
13392 pdata->dec_status = NULL;
13393 return -ENODEV;
13394 }
13395
13396 vdec_set_prepare_level(pdata, start_decode_buf_level);
13397
13398 /*set the max clk for smooth playing...*/
13399 hevc_source_changed(VFORMAT_HEVC,
13400 3840, 2160, 60);
13401 if (pdata->parallel_dec == 1)
13402 vdec_core_request(pdata, CORE_MASK_HEVC);
13403 else
13404 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13405 | CORE_MASK_COMBINE);
13406
13407 return 0;
13408}
13409
13410static int ammvdec_h265_remove(struct platform_device *pdev)
13411{
13412 struct hevc_state_s *hevc =
13413 (struct hevc_state_s *)
13414 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13415 struct vdec_s *vdec;
13416
13417 if (hevc == NULL)
13418 return 0;
13419 vdec = hw_to_vdec(hevc);
13420
13421 //pr_err("%s [pid=%d,tgid=%d]\n", __func__, current->pid, current->tgid);
13422 if (get_dbg_flag(hevc))
13423 hevc_print(hevc, 0, "%s\r\n", __func__);
13424
13425 vmh265_stop(hevc);
13426
13427 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13428 if (vdec->parallel_dec == 1)
13429 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13430 else
13431 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13432
13433 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13434
13435 vfree((void *)hevc);
13436
13437 return 0;
13438}
13439
13440static struct platform_driver ammvdec_h265_driver = {
13441 .probe = ammvdec_h265_probe,
13442 .remove = ammvdec_h265_remove,
13443 .driver = {
13444 .name = MULTI_DRIVER_NAME,
13445#ifdef CONFIG_PM
13446 .pm = &h265_pm_ops,
13447#endif
13448 }
13449};
13450#endif
13451
13452static struct codec_profile_t amvdec_h265_profile = {
13453 .name = "hevc",
13454 .profile = ""
13455};
13456
13457static struct codec_profile_t amvdec_h265_profile_single,
13458 amvdec_h265_profile_mult;
13459
13460static struct mconfig h265_configs[] = {
13461 MC_PU32("use_cma", &use_cma),
13462 MC_PU32("bit_depth_luma", &bit_depth_luma),
13463 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13464 MC_PU32("video_signal_type", &video_signal_type),
13465#ifdef ERROR_HANDLE_DEBUG
13466 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13467 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13468#endif
13469 MC_PU32("radr", &radr),
13470 MC_PU32("rval", &rval),
13471 MC_PU32("dbg_cmd", &dbg_cmd),
13472 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13473 MC_PU32("endian", &endian),
13474 MC_PU32("step", &step),
13475 MC_PU32("udebug_flag", &udebug_flag),
13476 MC_PU32("decode_pic_begin", &decode_pic_begin),
13477 MC_PU32("slice_parse_begin", &slice_parse_begin),
13478 MC_PU32("nal_skip_policy", &nal_skip_policy),
13479 MC_PU32("i_only_flag", &i_only_flag),
13480 MC_PU32("error_handle_policy", &error_handle_policy),
13481 MC_PU32("error_handle_threshold", &error_handle_threshold),
13482 MC_PU32("error_handle_nal_skip_threshold",
13483 &error_handle_nal_skip_threshold),
13484 MC_PU32("error_handle_system_threshold",
13485 &error_handle_system_threshold),
13486 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13487 MC_PU32("debug", &debug),
13488 MC_PU32("debug_mask", &debug_mask),
13489 MC_PU32("buffer_mode", &buffer_mode),
13490 MC_PU32("double_write_mode", &double_write_mode),
13491 MC_PU32("buf_alloc_width", &buf_alloc_width),
13492 MC_PU32("buf_alloc_height", &buf_alloc_height),
13493 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13494 MC_PU32("max_buf_num", &max_buf_num),
13495 MC_PU32("buf_alloc_size", &buf_alloc_size),
13496 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13497 MC_PU32("mem_map_mode", &mem_map_mode),
13498 MC_PU32("enable_mem_saving", &enable_mem_saving),
13499 MC_PU32("force_w_h", &force_w_h),
13500 MC_PU32("force_fps", &force_fps),
13501 MC_PU32("max_decoding_time", &max_decoding_time),
13502 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13503 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13504 MC_PU32("interlace_enable", &interlace_enable),
13505 MC_PU32("pts_unstable", &pts_unstable),
13506 MC_PU32("parser_sei_enable", &parser_sei_enable),
13507 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13508 MC_PU32("decode_timeout_val", &decode_timeout_val),
13509#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13510 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13511 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13512 MC_PU32("dv_debug", &dv_debug),
13513#endif
13514};
13515static struct mconfig_node decoder_265_node;
13516
13517static int __init amvdec_h265_driver_init_module(void)
13518{
13519 struct BuffInfo_s *p_buf_info;
13520
13521 if (vdec_is_support_4k()) {
13522 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13523 p_buf_info = &amvh265_workbuff_spec[2];
13524 else
13525 p_buf_info = &amvh265_workbuff_spec[1];
13526 } else
13527 p_buf_info = &amvh265_workbuff_spec[0];
13528
13529 init_buff_spec(NULL, p_buf_info);
13530 work_buf_size =
13531 (p_buf_info->end_adr - p_buf_info->start_adr
13532 + 0xffff) & (~0xffff);
13533
13534 pr_debug("amvdec_h265 module init\n");
13535 error_handle_policy = 0;
13536
13537#ifdef ERROR_HANDLE_DEBUG
13538 dbg_nal_skip_flag = 0;
13539 dbg_nal_skip_count = 0;
13540#endif
13541 udebug_flag = 0;
13542 decode_pic_begin = 0;
13543 slice_parse_begin = 0;
13544 step = 0;
13545 buf_alloc_size = 0;
13546
13547#ifdef MULTI_INSTANCE_SUPPORT
13548 if (platform_driver_register(&ammvdec_h265_driver))
13549 pr_err("failed to register ammvdec_h265 driver\n");
13550
13551#endif
13552 if (platform_driver_register(&amvdec_h265_driver)) {
13553 pr_err("failed to register amvdec_h265 driver\n");
13554 return -ENODEV;
13555 }
13556#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13557 if (!has_hevc_vdec()) {
13558 /* not support hevc */
13559 amvdec_h265_profile.name = "hevc_unsupport";
13560 }
13561 if (vdec_is_support_4k()) {
13562 if (is_meson_m8m2_cpu()) {
13563 /* m8m2 support 4k */
13564 amvdec_h265_profile.profile = "4k";
13565 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13566 amvdec_h265_profile.profile =
13567 "8k, 8bit, 10bit, dwrite, compressed";
13568 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13569 amvdec_h265_profile.profile =
13570 "4k, 8bit, 10bit, dwrite, compressed";
13571 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13572 amvdec_h265_profile.profile = "4k";
13573 }
13574#endif
13575 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13576 pr_info("amvdec_h265 default mmu enabled.\n");
13577 mmu_enable = 1;
13578 }
13579
13580 vcodec_profile_register(&amvdec_h265_profile);
13581 amvdec_h265_profile_single = amvdec_h265_profile;
13582 amvdec_h265_profile_single.name = "h265";
13583 vcodec_profile_register(&amvdec_h265_profile_single);
13584 amvdec_h265_profile_mult = amvdec_h265_profile;
13585 amvdec_h265_profile_mult.name = "mh265";
13586 vcodec_profile_register(&amvdec_h265_profile_mult);
13587 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13588 "h265", h265_configs, CONFIG_FOR_RW);
13589 return 0;
13590}
13591
13592static void __exit amvdec_h265_driver_remove_module(void)
13593{
13594 pr_debug("amvdec_h265 module remove.\n");
13595
13596#ifdef MULTI_INSTANCE_SUPPORT
13597 platform_driver_unregister(&ammvdec_h265_driver);
13598#endif
13599 platform_driver_unregister(&amvdec_h265_driver);
13600}
13601
13602/****************************************/
13603/*
13604 *module_param(stat, uint, 0664);
13605 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13606 */
13607module_param(use_cma, uint, 0664);
13608MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13609
13610module_param(bit_depth_luma, uint, 0664);
13611MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13612
13613module_param(bit_depth_chroma, uint, 0664);
13614MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13615
13616module_param(video_signal_type, uint, 0664);
13617MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13618
13619#ifdef ERROR_HANDLE_DEBUG
13620module_param(dbg_nal_skip_flag, uint, 0664);
13621MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13622
13623module_param(dbg_nal_skip_count, uint, 0664);
13624MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13625#endif
13626
13627module_param(radr, uint, 0664);
13628MODULE_PARM_DESC(radr, "\n radr\n");
13629
13630module_param(rval, uint, 0664);
13631MODULE_PARM_DESC(rval, "\n rval\n");
13632
13633module_param(dbg_cmd, uint, 0664);
13634MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13635
13636module_param(dump_nal, uint, 0664);
13637MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13638
13639module_param(dbg_skip_decode_index, uint, 0664);
13640MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13641
13642module_param(endian, uint, 0664);
13643MODULE_PARM_DESC(endian, "\n rval\n");
13644
13645module_param(step, uint, 0664);
13646MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13647
13648module_param(decode_pic_begin, uint, 0664);
13649MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13650
13651module_param(slice_parse_begin, uint, 0664);
13652MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13653
13654module_param(nal_skip_policy, uint, 0664);
13655MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13656
13657module_param(i_only_flag, uint, 0664);
13658MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13659
13660module_param(fast_output_enable, uint, 0664);
13661MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13662
13663module_param(error_handle_policy, uint, 0664);
13664MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13665
13666module_param(error_handle_threshold, uint, 0664);
13667MODULE_PARM_DESC(error_handle_threshold,
13668 "\n amvdec_h265 error_handle_threshold\n");
13669
13670module_param(error_handle_nal_skip_threshold, uint, 0664);
13671MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13672 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13673
13674module_param(error_handle_system_threshold, uint, 0664);
13675MODULE_PARM_DESC(error_handle_system_threshold,
13676 "\n amvdec_h265 error_handle_system_threshold\n");
13677
13678module_param(error_skip_nal_count, uint, 0664);
13679MODULE_PARM_DESC(error_skip_nal_count,
13680 "\n amvdec_h265 error_skip_nal_count\n");
13681
13682module_param(debug, uint, 0664);
13683MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13684
13685module_param(debug_mask, uint, 0664);
13686MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13687
13688module_param(log_mask, uint, 0664);
13689MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13690
13691module_param(buffer_mode, uint, 0664);
13692MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13693
13694module_param(double_write_mode, uint, 0664);
13695MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13696
13697module_param(buf_alloc_width, uint, 0664);
13698MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13699
13700module_param(buf_alloc_height, uint, 0664);
13701MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13702
13703module_param(dynamic_buf_num_margin, uint, 0664);
13704MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13705
13706module_param(max_buf_num, uint, 0664);
13707MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13708
13709module_param(buf_alloc_size, uint, 0664);
13710MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13711
13712#ifdef CONSTRAIN_MAX_BUF_NUM
13713module_param(run_ready_max_vf_only_num, uint, 0664);
13714MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13715
13716module_param(run_ready_display_q_num, uint, 0664);
13717MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13718
13719module_param(run_ready_max_buf_num, uint, 0664);
13720MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13721#endif
13722
13723#if 0
13724module_param(re_config_pic_flag, uint, 0664);
13725MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13726#endif
13727
13728module_param(buffer_mode_dbg, uint, 0664);
13729MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13730
13731module_param(mem_map_mode, uint, 0664);
13732MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13733
13734module_param(enable_mem_saving, uint, 0664);
13735MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13736
13737module_param(force_w_h, uint, 0664);
13738MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13739
13740module_param(force_fps, uint, 0664);
13741MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13742
13743module_param(max_decoding_time, uint, 0664);
13744MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13745
13746module_param(prefix_aux_buf_size, uint, 0664);
13747MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13748
13749module_param(suffix_aux_buf_size, uint, 0664);
13750MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13751
13752module_param(interlace_enable, uint, 0664);
13753MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13754module_param(pts_unstable, uint, 0664);
13755MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13756module_param(parser_sei_enable, uint, 0664);
13757MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13758
13759#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13760module_param(parser_dolby_vision_enable, uint, 0664);
13761MODULE_PARM_DESC(parser_dolby_vision_enable,
13762 "\n parser_dolby_vision_enable\n");
13763
13764module_param(dolby_meta_with_el, uint, 0664);
13765MODULE_PARM_DESC(dolby_meta_with_el,
13766 "\n dolby_meta_with_el\n");
13767
13768module_param(dolby_el_flush_th, uint, 0664);
13769MODULE_PARM_DESC(dolby_el_flush_th,
13770 "\n dolby_el_flush_th\n");
13771#endif
13772module_param(mmu_enable, uint, 0664);
13773MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13774
13775module_param(mmu_enable_force, uint, 0664);
13776MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13777
13778#ifdef MULTI_INSTANCE_SUPPORT
13779module_param(start_decode_buf_level, int, 0664);
13780MODULE_PARM_DESC(start_decode_buf_level,
13781 "\n h265 start_decode_buf_level\n");
13782
13783module_param(decode_timeout_val, uint, 0664);
13784MODULE_PARM_DESC(decode_timeout_val,
13785 "\n h265 decode_timeout_val\n");
13786
13787module_param(data_resend_policy, uint, 0664);
13788MODULE_PARM_DESC(data_resend_policy,
13789 "\n h265 data_resend_policy\n");
13790
13791module_param_array(decode_frame_count, uint,
13792 &max_decode_instance_num, 0664);
13793
13794module_param_array(display_frame_count, uint,
13795 &max_decode_instance_num, 0664);
13796
13797module_param_array(max_process_time, uint,
13798 &max_decode_instance_num, 0664);
13799
13800module_param_array(max_get_frame_interval,
13801 uint, &max_decode_instance_num, 0664);
13802
13803module_param_array(run_count, uint,
13804 &max_decode_instance_num, 0664);
13805
13806module_param_array(input_empty, uint,
13807 &max_decode_instance_num, 0664);
13808
13809module_param_array(not_run_ready, uint,
13810 &max_decode_instance_num, 0664);
13811
13812module_param_array(ref_frame_mark_flag, uint,
13813 &max_decode_instance_num, 0664);
13814
13815#endif
13816#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13817module_param(dv_toggle_prov_name, uint, 0664);
13818MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13819
13820module_param(dv_debug, uint, 0664);
13821MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13822
13823module_param(force_bypass_dvenl, uint, 0664);
13824MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13825#endif
13826
13827#ifdef AGAIN_HAS_THRESHOLD
13828module_param(again_threshold, uint, 0664);
13829MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13830#endif
13831
13832module_param(force_disp_pic_index, int, 0664);
13833MODULE_PARM_DESC(force_disp_pic_index,
13834 "\n amvdec_h265 force_disp_pic_index\n");
13835
13836module_param(frmbase_cont_bitlevel, uint, 0664);
13837MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13838
13839module_param(udebug_flag, uint, 0664);
13840MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13841
13842module_param(udebug_pause_pos, uint, 0664);
13843MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13844
13845module_param(udebug_pause_val, uint, 0664);
13846MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13847
13848module_param(pre_decode_buf_level, int, 0664);
13849MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13850
13851module_param(udebug_pause_decode_idx, uint, 0664);
13852MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13853
13854module_param(disp_vframe_valve_level, uint, 0664);
13855MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13856
13857module_param(pic_list_debug, uint, 0664);
13858MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13859
13860module_param(without_display_mode, uint, 0664);
13861MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13862
13863#ifdef HEVC_8K_LFTOFFSET_FIX
13864module_param(performance_profile, uint, 0664);
13865MODULE_PARM_DESC(performance_profile, "\n amvdec_h265 performance_profile\n");
13866#endif
13867module_param(disable_ip_mode, uint, 0664);
13868MODULE_PARM_DESC(disable_ip_mode, "\n amvdec_h265 disable ip_mode\n");
13869
13870module_init(amvdec_h265_driver_init_module);
13871module_exit(amvdec_h265_driver_remove_module);
13872
13873MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13874MODULE_LICENSE("GPL");
13875MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13876