summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: ea12ba155ce1d2494dab02a766d5a82a537bbb25
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131#define INVALID_IDX -1 /* Invalid buffer index.*/
132
133static struct semaphore h265_sema;
134
135struct hevc_state_s;
136static int hevc_print(struct hevc_state_s *hevc,
137 int debug_flag, const char *fmt, ...);
138static int hevc_print_cont(struct hevc_state_s *hevc,
139 int debug_flag, const char *fmt, ...);
140static int vh265_vf_states(struct vframe_states *states, void *);
141static struct vframe_s *vh265_vf_peek(void *);
142static struct vframe_s *vh265_vf_get(void *);
143static void vh265_vf_put(struct vframe_s *, void *);
144static int vh265_event_cb(int type, void *data, void *private_data);
145
146static int vh265_stop(struct hevc_state_s *hevc);
147#ifdef MULTI_INSTANCE_SUPPORT
148static int vmh265_stop(struct hevc_state_s *hevc);
149static s32 vh265_init(struct vdec_s *vdec);
150static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
151static void reset_process_time(struct hevc_state_s *hevc);
152static void start_process_time(struct hevc_state_s *hevc);
153static void restart_process_time(struct hevc_state_s *hevc);
154static void timeout_process(struct hevc_state_s *hevc);
155#else
156static s32 vh265_init(struct hevc_state_s *hevc);
157#endif
158static void vh265_prot_init(struct hevc_state_s *hevc);
159static int vh265_local_init(struct hevc_state_s *hevc);
160static void vh265_check_timer_func(unsigned long arg);
161static void config_decode_mode(struct hevc_state_s *hevc);
162
163static const char vh265_dec_id[] = "vh265-dev";
164
165#define PROVIDER_NAME "decoder.h265"
166#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
167
168static const struct vframe_operations_s vh265_vf_provider = {
169 .peek = vh265_vf_peek,
170 .get = vh265_vf_get,
171 .put = vh265_vf_put,
172 .event_cb = vh265_event_cb,
173 .vf_states = vh265_vf_states,
174};
175
176static struct vframe_provider_s vh265_vf_prov;
177
178static u32 bit_depth_luma;
179static u32 bit_depth_chroma;
180static u32 video_signal_type;
181
182static int start_decode_buf_level = 0x8000;
183
184static unsigned int decode_timeout_val = 200;
185
186/*data_resend_policy:
187 bit 0, stream base resend data when decoding buf empty
188*/
189static u32 data_resend_policy = 1;
190
191#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
192/*
193static const char * const video_format_names[] = {
194 "component", "PAL", "NTSC", "SECAM",
195 "MAC", "unspecified", "unspecified", "unspecified"
196};
197
198static const char * const color_primaries_names[] = {
199 "unknown", "bt709", "undef", "unknown",
200 "bt470m", "bt470bg", "smpte170m", "smpte240m",
201 "film", "bt2020"
202};
203
204static const char * const transfer_characteristics_names[] = {
205 "unknown", "bt709", "undef", "unknown",
206 "bt470m", "bt470bg", "smpte170m", "smpte240m",
207 "linear", "log100", "log316", "iec61966-2-4",
208 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
209 "smpte-st-2084", "smpte-st-428"
210};
211
212static const char * const matrix_coeffs_names[] = {
213 "GBR", "bt709", "undef", "unknown",
214 "fcc", "bt470bg", "smpte170m", "smpte240m",
215 "YCgCo", "bt2020nc", "bt2020c"
216};
217*/
218#ifdef SUPPORT_10BIT
219#define HEVC_CM_BODY_START_ADDR 0x3626
220#define HEVC_CM_BODY_LENGTH 0x3627
221#define HEVC_CM_HEADER_LENGTH 0x3629
222#define HEVC_CM_HEADER_OFFSET 0x362b
223#define HEVC_SAO_CTRL9 0x362d
224#define LOSLESS_COMPRESS_MODE
225/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
226/* double_write_mode:
227 * 0, no double write;
228 * 1, 1:1 ratio;
229 * 2, (1/4):(1/4) ratio;
230 * 3, (1/4):(1/4) ratio, with both compressed frame included
231 * 4, (1/2):(1/2) ratio;
232 * 0x10, double write only
233 * 0x100, if > 1080p,use mode 4,else use mode 1;
234 * 0x200, if > 1080p,use mode 2,else use mode 1;
235 * 0x300, if > 720p, use mode 4, else use mode 1;
236 */
237static u32 double_write_mode;
238
239/*#define DECOMP_HEADR_SURGENT*/
240
241static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
242static u32 enable_mem_saving = 1;
243static u32 workaround_enable;
244static u32 force_w_h;
245#endif
246static u32 force_fps;
247static u32 pts_unstable;
248#define H265_DEBUG_BUFMGR 0x01
249#define H265_DEBUG_BUFMGR_MORE 0x02
250#define H265_DEBUG_DETAIL 0x04
251#define H265_DEBUG_REG 0x08
252#define H265_DEBUG_MAN_SEARCH_NAL 0x10
253#define H265_DEBUG_MAN_SKIP_NAL 0x20
254#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
255#define H265_DEBUG_FORCE_CLK 0x80
256#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
257#define H265_DEBUG_NO_DISPLAY 0x200
258#define H265_DEBUG_DISCARD_NAL 0x400
259#define H265_DEBUG_OUT_PTS 0x800
260#define H265_DEBUG_DUMP_PIC_LIST 0x1000
261#define H265_DEBUG_PRINT_SEI 0x2000
262#define H265_DEBUG_PIC_STRUCT 0x4000
263#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
264#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
265#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
266#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
267#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
268#define H265_DEBUG_HW_RESET 0x100000
269#define H265_CFG_CANVAS_IN_DECODE 0x200000
270#define H265_DEBUG_DV 0x400000
271#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
272#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
273#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
274#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
275#ifdef MULTI_INSTANCE_SUPPORT
276#define PRINT_FLAG_ERROR 0x0
277#define IGNORE_PARAM_FROM_CONFIG 0x08000000
278#define PRINT_FRAMEBASE_DATA 0x10000000
279#define PRINT_FLAG_VDEC_STATUS 0x20000000
280#define PRINT_FLAG_VDEC_DETAIL 0x40000000
281#define PRINT_FLAG_V4L_DETAIL 0x80000000
282#endif
283
284#define BUF_POOL_SIZE 32
285#define MAX_BUF_NUM 24
286#define MAX_REF_PIC_NUM 24
287#define MAX_REF_ACTIVE 16
288
289#ifdef MV_USE_FIXED_BUF
290#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
291#define VF_BUFFER_IDX(n) (n)
292#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
293#else
294#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
295#define VF_BUFFER_IDX(n) (n)
296#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
297#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
298#endif
299
300#define HEVC_MV_INFO 0x310d
301#define HEVC_QP_INFO 0x3137
302#define HEVC_SKIP_INFO 0x3136
303
304const u32 h265_version = 201602101;
305static u32 debug_mask = 0xffffffff;
306static u32 log_mask;
307static u32 debug;
308static u32 radr;
309static u32 rval;
310static u32 dbg_cmd;
311static u32 dump_nal;
312static u32 dbg_skip_decode_index;
313static u32 endian = 0xff0;
314#ifdef ERROR_HANDLE_DEBUG
315static u32 dbg_nal_skip_flag;
316 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
317static u32 dbg_nal_skip_count;
318#endif
319/*for debug*/
320/*
321 udebug_flag:
322 bit 0, enable ucode print
323 bit 1, enable ucode detail print
324 bit [31:16] not 0, pos to dump lmem
325 bit 2, pop bits to lmem
326 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
327*/
328static u32 udebug_flag;
329/*
330 when udebug_flag[1:0] is not 0
331 udebug_pause_pos not 0,
332 pause position
333*/
334static u32 udebug_pause_pos;
335/*
336 when udebug_flag[1:0] is not 0
337 and udebug_pause_pos is not 0,
338 pause only when DEBUG_REG2 is equal to this val
339*/
340static u32 udebug_pause_val;
341
342static u32 udebug_pause_decode_idx;
343
344static u32 decode_pic_begin;
345static uint slice_parse_begin;
346static u32 step;
347static bool is_reset;
348
349#ifdef CONSTRAIN_MAX_BUF_NUM
350static u32 run_ready_max_vf_only_num;
351static u32 run_ready_display_q_num;
352 /*0: not check
353 0xff: work_pic_num
354 */
355static u32 run_ready_max_buf_num = 0xff;
356#endif
357
358static u32 dynamic_buf_num_margin = 7;
359static u32 buf_alloc_width;
360static u32 buf_alloc_height;
361
362static u32 max_buf_num = 16;
363static u32 buf_alloc_size;
364/*static u32 re_config_pic_flag;*/
365/*
366 *bit[0]: 0,
367 *bit[1]: 0, always release cma buffer when stop
368 *bit[1]: 1, never release cma buffer when stop
369 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
370 *do not release cma buffer is blackout is not 1
371 *
372 *bit[2]: 0, when start decoding, check current displayed buffer
373 * (only for buffer decoded by h265) if blackout is 0
374 * 1, do not check current displayed buffer
375 *
376 *bit[3]: 1, if blackout is not 1, do not release current
377 * displayed cma buffer always.
378 */
379/* set to 1 for fast play;
380 * set to 8 for other case of "keep last frame"
381 */
382static u32 buffer_mode = 1;
383
384/* buffer_mode_dbg: debug only*/
385static u32 buffer_mode_dbg = 0xffff0000;
386/**/
387/*
388 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
389 *1, start decoding after first I;
390 *2, only decode and display none error picture;
391 *3, start decoding and display after IDR,etc
392 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
393 *only for mode 0 and 1.
394 */
395static u32 nal_skip_policy = 2;
396
397/*
398 *bit 0, 1: only display I picture;
399 *bit 1, 1: only decode I picture;
400 */
401static u32 i_only_flag;
402
403/*
404bit 0, fast output first I picture
405*/
406static u32 fast_output_enable = 1;
407
408static u32 frmbase_cont_bitlevel = 0x60;
409
410/*
411use_cma: 1, use both reserver memory and cma for buffers
4122, only use cma for buffers
413*/
414static u32 use_cma = 2;
415
416#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
417static u32 prefix_aux_buf_size = (16 * 1024);
418static u32 suffix_aux_buf_size;
419
420static u32 max_decoding_time;
421/*
422 *error handling
423 */
424/*error_handle_policy:
425 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
426 *1, skip error_skip_nal_count nals before error recovery;
427 *bit 1 (valid only when bit0 == 1):
428 *1, wait vps/sps/pps after error recovery;
429 *bit 2 (valid only when bit0 == 0):
430 *0, auto search after error recovery (hevc_recover() called);
431 *1, manual search after error recovery
432 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
433 *
434 *bit 4: 0, set error_mark after reset/recover
435 * 1, do not set error_mark after reset/recover
436 *bit 5: 0, check total lcu for every picture
437 * 1, do not check total lcu
438 *bit 6: 0, do not check head error
439 * 1, check head error
440 *
441 */
442
443static u32 error_handle_policy;
444static u32 error_skip_nal_count = 6;
445static u32 error_handle_threshold = 30;
446static u32 error_handle_nal_skip_threshold = 10;
447static u32 error_handle_system_threshold = 30;
448static u32 interlace_enable = 1;
449static u32 fr_hint_status;
450
451 /*
452 *parser_sei_enable:
453 * bit 0, sei;
454 * bit 1, sei_suffix (fill aux buf)
455 * bit 2, fill sei to aux buf (when bit 0 is 1)
456 * bit 8, debug flag
457 */
458static u32 parser_sei_enable;
459#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
460static u32 parser_dolby_vision_enable = 1;
461static u32 dolby_meta_with_el;
462static u32 dolby_el_flush_th = 2;
463#endif
464/* this is only for h265 mmu enable */
465
466static u32 mmu_enable = 1;
467static u32 mmu_enable_force;
468static u32 work_buf_size;
469static unsigned int force_disp_pic_index;
470static unsigned int disp_vframe_valve_level;
471static int pre_decode_buf_level = 0x1000;
472static unsigned int pic_list_debug;
473
474
475#ifdef MULTI_INSTANCE_SUPPORT
476static unsigned int max_decode_instance_num
477 = MAX_DECODE_INSTANCE_NUM;
478static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
481static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
482static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
484static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
485static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
486{1, 1, 1, 1, 1, 1, 1, 1, 1};
487
488#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
489static unsigned char get_idx(struct hevc_state_s *hevc);
490#endif
491
492#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
493static u32 dv_toggle_prov_name;
494
495static u32 dv_debug;
496
497static u32 force_bypass_dvenl;
498#endif
499#endif
500
501
502#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
503#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
504#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
505#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
506#else
507#define get_dbg_flag(hevc) debug
508#define get_dbg_flag2(hevc) debug
509#define is_log_enable(hevc) (log_mask ? 1 : 0)
510#define get_valid_double_write_mode(hevc) double_write_mode
511#define get_buf_alloc_width(hevc) buf_alloc_width
512#define get_buf_alloc_height(hevc) buf_alloc_height
513#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
514#endif
515#define get_buffer_mode(hevc) buffer_mode
516
517
518DEFINE_SPINLOCK(lock);
519struct task_struct *h265_task = NULL;
520#undef DEBUG_REG
521#ifdef DEBUG_REG
522void WRITE_VREG_DBG(unsigned adr, unsigned val)
523{
524 if (debug & H265_DEBUG_REG)
525 pr_info("%s(%x, %x)\n", __func__, adr, val);
526 WRITE_VREG(adr, val);
527}
528
529#undef WRITE_VREG
530#define WRITE_VREG WRITE_VREG_DBG
531#endif
532
533static DEFINE_MUTEX(vh265_mutex);
534
535static DEFINE_MUTEX(vh265_log_mutex);
536
537static struct vdec_info *gvs;
538
539static u32 without_display_mode;
540
541/**************************************************
542 *
543 *h265 buffer management include
544 *
545 ***************************************************
546 */
547enum NalUnitType {
548 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
549 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
550
551 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
552 /* Current name in the spec: TSA_R */
553 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
554
555 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
556 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
557
558 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
559 /* Current name in the spec: RADL_R */
560 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
561
562 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
563 /* Current name in the spec: RASL_R */
564 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
565
566 NAL_UNIT_RESERVED_10,
567 NAL_UNIT_RESERVED_11,
568 NAL_UNIT_RESERVED_12,
569 NAL_UNIT_RESERVED_13,
570 NAL_UNIT_RESERVED_14,
571 NAL_UNIT_RESERVED_15,
572
573 /* Current name in the spec: BLA_W_LP */
574 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
575 /* Current name in the spec: BLA_W_DLP */
576 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
577 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
578 /* Current name in the spec: IDR_W_DLP */
579 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
580 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
581 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
582 NAL_UNIT_RESERVED_22,
583 NAL_UNIT_RESERVED_23,
584
585 NAL_UNIT_RESERVED_24,
586 NAL_UNIT_RESERVED_25,
587 NAL_UNIT_RESERVED_26,
588 NAL_UNIT_RESERVED_27,
589 NAL_UNIT_RESERVED_28,
590 NAL_UNIT_RESERVED_29,
591 NAL_UNIT_RESERVED_30,
592 NAL_UNIT_RESERVED_31,
593
594 NAL_UNIT_VPS, /* 32 */
595 NAL_UNIT_SPS, /* 33 */
596 NAL_UNIT_PPS, /* 34 */
597 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
598 NAL_UNIT_EOS, /* 36 */
599 NAL_UNIT_EOB, /* 37 */
600 NAL_UNIT_FILLER_DATA, /* 38 */
601 NAL_UNIT_SEI, /* 39 Prefix SEI */
602 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
603 NAL_UNIT_RESERVED_41,
604 NAL_UNIT_RESERVED_42,
605 NAL_UNIT_RESERVED_43,
606 NAL_UNIT_RESERVED_44,
607 NAL_UNIT_RESERVED_45,
608 NAL_UNIT_RESERVED_46,
609 NAL_UNIT_RESERVED_47,
610 NAL_UNIT_UNSPECIFIED_48,
611 NAL_UNIT_UNSPECIFIED_49,
612 NAL_UNIT_UNSPECIFIED_50,
613 NAL_UNIT_UNSPECIFIED_51,
614 NAL_UNIT_UNSPECIFIED_52,
615 NAL_UNIT_UNSPECIFIED_53,
616 NAL_UNIT_UNSPECIFIED_54,
617 NAL_UNIT_UNSPECIFIED_55,
618 NAL_UNIT_UNSPECIFIED_56,
619 NAL_UNIT_UNSPECIFIED_57,
620 NAL_UNIT_UNSPECIFIED_58,
621 NAL_UNIT_UNSPECIFIED_59,
622 NAL_UNIT_UNSPECIFIED_60,
623 NAL_UNIT_UNSPECIFIED_61,
624 NAL_UNIT_UNSPECIFIED_62,
625 NAL_UNIT_UNSPECIFIED_63,
626 NAL_UNIT_INVALID,
627};
628
629/* --------------------------------------------------- */
630/* Amrisc Software Interrupt */
631/* --------------------------------------------------- */
632#define AMRISC_STREAM_EMPTY_REQ 0x01
633#define AMRISC_PARSER_REQ 0x02
634#define AMRISC_MAIN_REQ 0x04
635
636/* --------------------------------------------------- */
637/* HEVC_DEC_STATUS define */
638/* --------------------------------------------------- */
639#define HEVC_DEC_IDLE 0x0
640#define HEVC_NAL_UNIT_VPS 0x1
641#define HEVC_NAL_UNIT_SPS 0x2
642#define HEVC_NAL_UNIT_PPS 0x3
643#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
644#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
645#define HEVC_SLICE_DECODING 0x6
646#define HEVC_NAL_UNIT_SEI 0x7
647#define HEVC_SLICE_SEGMENT_DONE 0x8
648#define HEVC_NAL_SEARCH_DONE 0x9
649#define HEVC_DECPIC_DATA_DONE 0xa
650#define HEVC_DECPIC_DATA_ERROR 0xb
651#define HEVC_SEI_DAT 0xc
652#define HEVC_SEI_DAT_DONE 0xd
653#define HEVC_NAL_DECODE_DONE 0xe
654#define HEVC_OVER_DECODE 0xf
655
656#define HEVC_DATA_REQUEST 0x12
657
658#define HEVC_DECODE_BUFEMPTY 0x20
659#define HEVC_DECODE_TIMEOUT 0x21
660#define HEVC_SEARCH_BUFEMPTY 0x22
661#define HEVC_DECODE_OVER_SIZE 0x23
662#define HEVC_DECODE_BUFEMPTY2 0x24
663#define HEVC_FIND_NEXT_PIC_NAL 0x50
664#define HEVC_FIND_NEXT_DVEL_NAL 0x51
665
666#define HEVC_DUMP_LMEM 0x30
667
668#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
669#define HEVC_DISCARD_NAL 0xf0
670#define HEVC_ACTION_DEC_CONT 0xfd
671#define HEVC_ACTION_ERROR 0xfe
672#define HEVC_ACTION_DONE 0xff
673
674/* --------------------------------------------------- */
675/* Include "parser_cmd.h" */
676/* --------------------------------------------------- */
677#define PARSER_CMD_SKIP_CFG_0 0x0000090b
678
679#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
680
681#define PARSER_CMD_SKIP_CFG_2 0x001b1910
682
683#define PARSER_CMD_NUMBER 37
684
685/**************************************************
686 *
687 *h265 buffer management
688 *
689 ***************************************************
690 */
691/* #define BUFFER_MGR_ONLY */
692/* #define CONFIG_HEVC_CLK_FORCED_ON */
693/* #define ENABLE_SWAP_TEST */
694#define MCRCC_ENABLE
695#define INVALID_POC 0x80000000
696
697#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
698#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
699#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
700#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
701#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
702#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
703#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
704#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
705#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
706#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
707#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
708#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
709#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
710#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
711#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
712#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
713#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
714#ifdef ENABLE_SWAP_TEST
715#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
716#endif
717
718/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
719/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
720#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
721 /*do not define ENABLE_SWAP_TEST*/
722#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
723#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
724
725#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
726#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
727/*
728 *ucode parser/search control
729 *bit 0: 0, header auto parse; 1, header manual parse
730 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
731 *bit [3:2]: valid when bit1==0;
732 *0, auto skip nal before first vps/sps/pps/idr;
733 *1, auto skip nal before first vps/sps/pps
734 *2, auto skip nal before first vps/sps/pps,
735 * and not decode until the first I slice (with slice address of 0)
736 *
737 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
738 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
739 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
740 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
741 *bit [17]: for NAL_SEI when bit0 is 0:
742 * 0, do not parse/fetch SEI in ucode;
743 * 1, parse/fetch SEI in ucode
744 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
745 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
746 * 1, fetch NAL_SEL_SUFFIX data to aux buf
747 *bit [19]:
748 * 0, parse NAL_SEI in ucode
749 * 1, fetch NAL_SEI to aux buf
750 *bit [20]: for DOLBY_VISION_META
751 * 0, do not fetch DOLBY_VISION_META to aux buf
752 * 1, fetch DOLBY_VISION_META to aux buf
753 */
754#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
755 /*read only*/
756#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
757 /*
758 [15 : 8] rps_set_id
759 [7 : 0] start_decoding_flag
760 */
761#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
762 /*set before start decoder*/
763#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
764#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
765#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
766
767#define DECODE_MODE_SINGLE 0x0
768#define DECODE_MODE_MULTI_FRAMEBASE 0x1
769#define DECODE_MODE_MULTI_STREAMBASE 0x2
770#define DECODE_MODE_MULTI_DVBAL 0x3
771#define DECODE_MODE_MULTI_DVENL 0x4
772
773#define MAX_INT 0x7FFFFFFF
774
775#define RPM_BEGIN 0x100
776#define modification_list_cur 0x148
777#define RPM_END 0x180
778
779#define RPS_USED_BIT 14
780/* MISC_FLAG0 */
781#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
782#define PCM_ENABLE_FLAG_BIT 1
783#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
784#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
785#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
786#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
787#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
788#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
789#define SLICE_SAO_LUMA_FLAG_BIT 8
790#define SLICE_SAO_CHROMA_FLAG_BIT 9
791#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
792
793union param_u {
794 struct {
795 unsigned short data[RPM_END - RPM_BEGIN];
796 } l;
797 struct {
798 /* from ucode lmem, do not change this struct */
799 unsigned short CUR_RPS[0x10];
800 unsigned short num_ref_idx_l0_active;
801 unsigned short num_ref_idx_l1_active;
802 unsigned short slice_type;
803 unsigned short slice_temporal_mvp_enable_flag;
804 unsigned short dependent_slice_segment_flag;
805 unsigned short slice_segment_address;
806 unsigned short num_title_rows_minus1;
807 unsigned short pic_width_in_luma_samples;
808 unsigned short pic_height_in_luma_samples;
809 unsigned short log2_min_coding_block_size_minus3;
810 unsigned short log2_diff_max_min_coding_block_size;
811 unsigned short log2_max_pic_order_cnt_lsb_minus4;
812 unsigned short POClsb;
813 unsigned short collocated_from_l0_flag;
814 unsigned short collocated_ref_idx;
815 unsigned short log2_parallel_merge_level;
816 unsigned short five_minus_max_num_merge_cand;
817 unsigned short sps_num_reorder_pics_0;
818 unsigned short modification_flag;
819 unsigned short tiles_enabled_flag;
820 unsigned short num_tile_columns_minus1;
821 unsigned short num_tile_rows_minus1;
822 unsigned short tile_width[8];
823 unsigned short tile_height[8];
824 unsigned short misc_flag0;
825 unsigned short pps_beta_offset_div2;
826 unsigned short pps_tc_offset_div2;
827 unsigned short slice_beta_offset_div2;
828 unsigned short slice_tc_offset_div2;
829 unsigned short pps_cb_qp_offset;
830 unsigned short pps_cr_qp_offset;
831 unsigned short first_slice_segment_in_pic_flag;
832 unsigned short m_temporalId;
833 unsigned short m_nalUnitType;
834
835 unsigned short vui_num_units_in_tick_hi;
836 unsigned short vui_num_units_in_tick_lo;
837 unsigned short vui_time_scale_hi;
838 unsigned short vui_time_scale_lo;
839 unsigned short bit_depth;
840 unsigned short profile_etc;
841 unsigned short sei_frame_field_info;
842 unsigned short video_signal_type;
843 unsigned short modification_list[0x20];
844 unsigned short conformance_window_flag;
845 unsigned short conf_win_left_offset;
846 unsigned short conf_win_right_offset;
847 unsigned short conf_win_top_offset;
848 unsigned short conf_win_bottom_offset;
849 unsigned short chroma_format_idc;
850 unsigned short color_description;
851 unsigned short aspect_ratio_idc;
852 unsigned short sar_width;
853 unsigned short sar_height;
854 unsigned short sps_max_dec_pic_buffering_minus1_0;
855 } p;
856};
857
858#define RPM_BUF_SIZE (0x80*2)
859/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
860#define LMEM_BUF_SIZE (0x500 * 2)
861
862struct buff_s {
863 u32 buf_start;
864 u32 buf_size;
865 u32 buf_end;
866};
867
868struct BuffInfo_s {
869 u32 max_width;
870 u32 max_height;
871 unsigned int start_adr;
872 unsigned int end_adr;
873 struct buff_s ipp;
874 struct buff_s sao_abv;
875 struct buff_s sao_vb;
876 struct buff_s short_term_rps;
877 struct buff_s vps;
878 struct buff_s sps;
879 struct buff_s pps;
880 struct buff_s sao_up;
881 struct buff_s swap_buf;
882 struct buff_s swap_buf2;
883 struct buff_s scalelut;
884 struct buff_s dblk_para;
885 struct buff_s dblk_data;
886 struct buff_s dblk_data2;
887 struct buff_s mmu_vbh;
888 struct buff_s cm_header;
889 struct buff_s mpred_above;
890#ifdef MV_USE_FIXED_BUF
891 struct buff_s mpred_mv;
892#endif
893 struct buff_s rpm;
894 struct buff_s lmem;
895};
896#define WORK_BUF_SPEC_NUM 3
897static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
898 {
899 /* 8M bytes */
900 .max_width = 1920,
901 .max_height = 1088,
902 .ipp = {
903 /* IPP work space calculation :
904 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
905 */
906 .buf_size = 0x4000,
907 },
908 .sao_abv = {
909 .buf_size = 0x30000,
910 },
911 .sao_vb = {
912 .buf_size = 0x30000,
913 },
914 .short_term_rps = {
915 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
916 * total 64x16x2 = 2048 bytes (0x800)
917 */
918 .buf_size = 0x800,
919 },
920 .vps = {
921 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
922 * total 0x0800 bytes
923 */
924 .buf_size = 0x800,
925 },
926 .sps = {
927 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
928 * total 0x0800 bytes
929 */
930 .buf_size = 0x800,
931 },
932 .pps = {
933 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
934 * total 0x2000 bytes
935 */
936 .buf_size = 0x2000,
937 },
938 .sao_up = {
939 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
940 * each has 16 bytes total 0x2800 bytes
941 */
942 .buf_size = 0x2800,
943 },
944 .swap_buf = {
945 /* 256cyclex64bit = 2K bytes 0x800
946 * (only 144 cycles valid)
947 */
948 .buf_size = 0x800,
949 },
950 .swap_buf2 = {
951 .buf_size = 0x800,
952 },
953 .scalelut = {
954 /* support up to 32 SCALELUT 1024x32 =
955 * 32Kbytes (0x8000)
956 */
957 .buf_size = 0x8000,
958 },
959 .dblk_para = {
960#ifdef SUPPORT_10BIT
961 .buf_size = 0x40000,
962#else
963 /* DBLK -> Max 256(4096/16) LCU, each para
964 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
965 */
966 .buf_size = 0x20000,
967#endif
968 },
969 .dblk_data = {
970 .buf_size = 0x40000,
971 },
972 .dblk_data2 = {
973 .buf_size = 0x40000,
974 }, /*dblk data for adapter*/
975 .mmu_vbh = {
976 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
977 },
978#if 0
979 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
980 .buf_size = MMU_COMPRESS_HEADER_SIZE *
981 (MAX_REF_PIC_NUM + 1),
982 },
983#endif
984 .mpred_above = {
985 .buf_size = 0x8000,
986 },
987#ifdef MV_USE_FIXED_BUF
988 .mpred_mv = {/* 1080p, 0x40000 per buffer */
989 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
990 },
991#endif
992 .rpm = {
993 .buf_size = RPM_BUF_SIZE,
994 },
995 .lmem = {
996 .buf_size = 0x500 * 2,
997 }
998 },
999 {
1000 .max_width = 4096,
1001 .max_height = 2048,
1002 .ipp = {
1003 /* IPP work space calculation :
1004 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1005 */
1006 .buf_size = 0x4000,
1007 },
1008 .sao_abv = {
1009 .buf_size = 0x30000,
1010 },
1011 .sao_vb = {
1012 .buf_size = 0x30000,
1013 },
1014 .short_term_rps = {
1015 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1016 * total 64x16x2 = 2048 bytes (0x800)
1017 */
1018 .buf_size = 0x800,
1019 },
1020 .vps = {
1021 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1022 * total 0x0800 bytes
1023 */
1024 .buf_size = 0x800,
1025 },
1026 .sps = {
1027 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1028 * total 0x0800 bytes
1029 */
1030 .buf_size = 0x800,
1031 },
1032 .pps = {
1033 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1034 * total 0x2000 bytes
1035 */
1036 .buf_size = 0x2000,
1037 },
1038 .sao_up = {
1039 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1040 * each has 16 bytes total 0x2800 bytes
1041 */
1042 .buf_size = 0x2800,
1043 },
1044 .swap_buf = {
1045 /* 256cyclex64bit = 2K bytes 0x800
1046 * (only 144 cycles valid)
1047 */
1048 .buf_size = 0x800,
1049 },
1050 .swap_buf2 = {
1051 .buf_size = 0x800,
1052 },
1053 .scalelut = {
1054 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1055 * (0x8000)
1056 */
1057 .buf_size = 0x8000,
1058 },
1059 .dblk_para = {
1060 /* DBLK -> Max 256(4096/16) LCU, each para
1061 * 512bytes(total:0x20000),
1062 * data 1024bytes(total:0x40000)
1063 */
1064 .buf_size = 0x20000,
1065 },
1066 .dblk_data = {
1067 .buf_size = 0x80000,
1068 },
1069 .dblk_data2 = {
1070 .buf_size = 0x80000,
1071 }, /*dblk data for adapter*/
1072 .mmu_vbh = {
1073 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1074 },
1075#if 0
1076 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1077 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1078 (MAX_REF_PIC_NUM + 1),
1079 },
1080#endif
1081 .mpred_above = {
1082 .buf_size = 0x8000,
1083 },
1084#ifdef MV_USE_FIXED_BUF
1085 .mpred_mv = {
1086 /* .buf_size = 0x100000*16,
1087 //4k2k , 0x100000 per buffer */
1088 /* 4096x2304 , 0x120000 per buffer */
1089 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1090 },
1091#endif
1092 .rpm = {
1093 .buf_size = RPM_BUF_SIZE,
1094 },
1095 .lmem = {
1096 .buf_size = 0x500 * 2,
1097 }
1098 },
1099
1100 {
1101 .max_width = 4096*2,
1102 .max_height = 2048*2,
1103 .ipp = {
1104 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1105 .buf_size = 0x4000*2,
1106 },
1107 .sao_abv = {
1108 .buf_size = 0x30000*2,
1109 },
1110 .sao_vb = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .short_term_rps = {
1114 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1115 .buf_size = 0x800,
1116 },
1117 .vps = {
1118 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1119 .buf_size = 0x800,
1120 },
1121 .sps = {
1122 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1123 .buf_size = 0x800,
1124 },
1125 .pps = {
1126 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1127 .buf_size = 0x2000,
1128 },
1129 .sao_up = {
1130 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1131 .buf_size = 0x2800*2,
1132 },
1133 .swap_buf = {
1134 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1135 .buf_size = 0x800,
1136 },
1137 .swap_buf2 = {
1138 .buf_size = 0x800,
1139 },
1140 .scalelut = {
1141 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1142 .buf_size = 0x8000*2,
1143 },
1144 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1145 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1146 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1147 .mmu_vbh = {
1148 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1149 },
1150#if 0
1151 .cm_header = {
1152 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1153 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1154 },
1155#endif
1156 .mpred_above = {
1157 .buf_size = 0x8000*2,
1158 },
1159#ifdef MV_USE_FIXED_BUF
1160 .mpred_mv = {
1161 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1162 },
1163#endif
1164 .rpm = {
1165 .buf_size = RPM_BUF_SIZE,
1166 },
1167 .lmem = {
1168 .buf_size = 0x500 * 2,
1169 },
1170 }
1171};
1172
1173static void init_buff_spec(struct hevc_state_s *hevc,
1174 struct BuffInfo_s *buf_spec)
1175{
1176 buf_spec->ipp.buf_start = buf_spec->start_adr;
1177 buf_spec->sao_abv.buf_start =
1178 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1179
1180 buf_spec->sao_vb.buf_start =
1181 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1182 buf_spec->short_term_rps.buf_start =
1183 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1184 buf_spec->vps.buf_start =
1185 buf_spec->short_term_rps.buf_start +
1186 buf_spec->short_term_rps.buf_size;
1187 buf_spec->sps.buf_start =
1188 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1189 buf_spec->pps.buf_start =
1190 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1191 buf_spec->sao_up.buf_start =
1192 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1193 buf_spec->swap_buf.buf_start =
1194 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1195 buf_spec->swap_buf2.buf_start =
1196 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1197 buf_spec->scalelut.buf_start =
1198 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1199 buf_spec->dblk_para.buf_start =
1200 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1201 buf_spec->dblk_data.buf_start =
1202 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1203 buf_spec->dblk_data2.buf_start =
1204 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1205 buf_spec->mmu_vbh.buf_start =
1206 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1207 buf_spec->mpred_above.buf_start =
1208 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1209#ifdef MV_USE_FIXED_BUF
1210 buf_spec->mpred_mv.buf_start =
1211 buf_spec->mpred_above.buf_start +
1212 buf_spec->mpred_above.buf_size;
1213
1214 buf_spec->rpm.buf_start =
1215 buf_spec->mpred_mv.buf_start +
1216 buf_spec->mpred_mv.buf_size;
1217#else
1218 buf_spec->rpm.buf_start =
1219 buf_spec->mpred_above.buf_start +
1220 buf_spec->mpred_above.buf_size;
1221#endif
1222 buf_spec->lmem.buf_start =
1223 buf_spec->rpm.buf_start +
1224 buf_spec->rpm.buf_size;
1225 buf_spec->end_adr =
1226 buf_spec->lmem.buf_start +
1227 buf_spec->lmem.buf_size;
1228
1229 if (hevc && get_dbg_flag2(hevc)) {
1230 hevc_print(hevc, 0,
1231 "%s workspace (%x %x) size = %x\n", __func__,
1232 buf_spec->start_adr, buf_spec->end_adr,
1233 buf_spec->end_adr - buf_spec->start_adr);
1234
1235 hevc_print(hevc, 0,
1236 "ipp.buf_start :%x\n",
1237 buf_spec->ipp.buf_start);
1238 hevc_print(hevc, 0,
1239 "sao_abv.buf_start :%x\n",
1240 buf_spec->sao_abv.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_vb.buf_start :%x\n",
1243 buf_spec->sao_vb.buf_start);
1244 hevc_print(hevc, 0,
1245 "short_term_rps.buf_start :%x\n",
1246 buf_spec->short_term_rps.buf_start);
1247 hevc_print(hevc, 0,
1248 "vps.buf_start :%x\n",
1249 buf_spec->vps.buf_start);
1250 hevc_print(hevc, 0,
1251 "sps.buf_start :%x\n",
1252 buf_spec->sps.buf_start);
1253 hevc_print(hevc, 0,
1254 "pps.buf_start :%x\n",
1255 buf_spec->pps.buf_start);
1256 hevc_print(hevc, 0,
1257 "sao_up.buf_start :%x\n",
1258 buf_spec->sao_up.buf_start);
1259 hevc_print(hevc, 0,
1260 "swap_buf.buf_start :%x\n",
1261 buf_spec->swap_buf.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf2.buf_start :%x\n",
1264 buf_spec->swap_buf2.buf_start);
1265 hevc_print(hevc, 0,
1266 "scalelut.buf_start :%x\n",
1267 buf_spec->scalelut.buf_start);
1268 hevc_print(hevc, 0,
1269 "dblk_para.buf_start :%x\n",
1270 buf_spec->dblk_para.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_data.buf_start :%x\n",
1273 buf_spec->dblk_data.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data2.buf_start :%x\n",
1276 buf_spec->dblk_data2.buf_start);
1277 hevc_print(hevc, 0,
1278 "mpred_above.buf_start :%x\n",
1279 buf_spec->mpred_above.buf_start);
1280#ifdef MV_USE_FIXED_BUF
1281 hevc_print(hevc, 0,
1282 "mpred_mv.buf_start :%x\n",
1283 buf_spec->mpred_mv.buf_start);
1284#endif
1285 if ((get_dbg_flag2(hevc)
1286 &
1287 H265_DEBUG_SEND_PARAM_WITH_REG)
1288 == 0) {
1289 hevc_print(hevc, 0,
1290 "rpm.buf_start :%x\n",
1291 buf_spec->rpm.buf_start);
1292 }
1293 }
1294
1295}
1296
1297enum SliceType {
1298 B_SLICE,
1299 P_SLICE,
1300 I_SLICE
1301};
1302
1303/*USE_BUF_BLOCK*/
1304struct BUF_s {
1305 ulong start_adr;
1306 u32 size;
1307 u32 luma_size;
1308 ulong header_addr;
1309 u32 header_size;
1310 int used_flag;
1311 ulong v4l_ref_buf_addr;
1312} /*BUF_t */;
1313
1314/* level 6, 6.1 maximum slice number is 800; other is 200 */
1315#define MAX_SLICE_NUM 800
1316struct PIC_s {
1317 int index;
1318 int scatter_alloc;
1319 int BUF_index;
1320 int mv_buf_index;
1321 int POC;
1322 int decode_idx;
1323 int slice_type;
1324 int RefNum_L0;
1325 int RefNum_L1;
1326 int num_reorder_pic;
1327 int stream_offset;
1328 unsigned char referenced;
1329 unsigned char output_mark;
1330 unsigned char recon_mark;
1331 unsigned char output_ready;
1332 unsigned char error_mark;
1333 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1334 unsigned char dis_mark;
1335 /**/ int slice_idx;
1336 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1337 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1338 /*buffer */
1339 unsigned int header_adr;
1340#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1341 unsigned char dv_enhance_exist;
1342#endif
1343 char *aux_data_buf;
1344 int aux_data_size;
1345 unsigned long cma_alloc_addr;
1346 struct page *alloc_pages;
1347 unsigned int mpred_mv_wr_start_addr;
1348 unsigned int mc_y_adr;
1349 unsigned int mc_u_v_adr;
1350#ifdef SUPPORT_10BIT
1351 /*unsigned int comp_body_size;*/
1352 unsigned int dw_y_adr;
1353 unsigned int dw_u_v_adr;
1354#endif
1355 int mc_canvas_y;
1356 int mc_canvas_u_v;
1357 int width;
1358 int height;
1359
1360 int y_canvas_index;
1361 int uv_canvas_index;
1362#ifdef MULTI_INSTANCE_SUPPORT
1363 struct canvas_config_s canvas_config[2];
1364#endif
1365#ifdef SUPPORT_10BIT
1366 int mem_saving_mode;
1367 u32 bit_depth_luma;
1368 u32 bit_depth_chroma;
1369#endif
1370#ifdef LOSLESS_COMPRESS_MODE
1371 unsigned int losless_comp_body_size;
1372#endif
1373 unsigned char pic_struct;
1374 int vf_ref;
1375
1376 u32 pts;
1377 u64 pts64;
1378 u64 timestamp;
1379
1380 u32 aspect_ratio_idc;
1381 u32 sar_width;
1382 u32 sar_height;
1383 u32 double_write_mode;
1384 u32 video_signal_type;
1385 unsigned short conformance_window_flag;
1386 unsigned short conf_win_left_offset;
1387 unsigned short conf_win_right_offset;
1388 unsigned short conf_win_top_offset;
1389 unsigned short conf_win_bottom_offset;
1390 unsigned short chroma_format_idc;
1391
1392 /* picture qos infomation*/
1393 int max_qp;
1394 int avg_qp;
1395 int min_qp;
1396 int max_skip;
1397 int avg_skip;
1398 int min_skip;
1399 int max_mv;
1400 int min_mv;
1401 int avg_mv;
1402
1403 bool vframe_bound;
1404} /*PIC_t */;
1405
1406#define MAX_TILE_COL_NUM 10
1407#define MAX_TILE_ROW_NUM 20
1408struct tile_s {
1409 int width;
1410 int height;
1411 int start_cu_x;
1412 int start_cu_y;
1413
1414 unsigned int sao_vb_start_addr;
1415 unsigned int sao_abv_start_addr;
1416};
1417
1418#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1419#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1420#define SEI_HDR10PLUS_MASK 0x00000004
1421
1422#define VF_POOL_SIZE 32
1423
1424#ifdef MULTI_INSTANCE_SUPPORT
1425#define DEC_RESULT_NONE 0
1426#define DEC_RESULT_DONE 1
1427#define DEC_RESULT_AGAIN 2
1428#define DEC_RESULT_CONFIG_PARAM 3
1429#define DEC_RESULT_ERROR 4
1430#define DEC_INIT_PICLIST 5
1431#define DEC_UNINIT_PICLIST 6
1432#define DEC_RESULT_GET_DATA 7
1433#define DEC_RESULT_GET_DATA_RETRY 8
1434#define DEC_RESULT_EOS 9
1435#define DEC_RESULT_FORCE_EXIT 10
1436#define DEC_RESULT_FREE_CANVAS 11
1437
1438static void vh265_work(struct work_struct *work);
1439static void vh265_timeout_work(struct work_struct *work);
1440static void vh265_notify_work(struct work_struct *work);
1441
1442#endif
1443
1444struct debug_log_s {
1445 struct list_head list;
1446 uint8_t data; /*will alloc more size*/
1447};
1448
1449struct hevc_state_s {
1450#ifdef MULTI_INSTANCE_SUPPORT
1451 struct platform_device *platform_dev;
1452 void (*vdec_cb)(struct vdec_s *, void *);
1453 void *vdec_cb_arg;
1454 struct vframe_chunk_s *chunk;
1455 int dec_result;
1456 struct work_struct work;
1457 struct work_struct timeout_work;
1458 struct work_struct notify_work;
1459 struct work_struct set_clk_work;
1460 /* timeout handle */
1461 unsigned long int start_process_time;
1462 unsigned int last_lcu_idx;
1463 unsigned int decode_timeout_count;
1464 unsigned int timeout_num;
1465#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1466 unsigned char switch_dvlayer_flag;
1467 unsigned char no_switch_dvlayer_count;
1468 unsigned char bypass_dvenl_enable;
1469 unsigned char bypass_dvenl;
1470#endif
1471 unsigned char start_parser_type;
1472 /*start_decoding_flag:
1473 vps/pps/sps/idr info from ucode*/
1474 unsigned char start_decoding_flag;
1475 unsigned char rps_set_id;
1476 unsigned char eos;
1477 int pic_decoded_lcu_idx;
1478 u8 over_decode;
1479 u8 empty_flag;
1480#endif
1481 struct vframe_s vframe_dummy;
1482 char *provider_name;
1483 int index;
1484 struct device *cma_dev;
1485 unsigned char m_ins_flag;
1486 unsigned char dolby_enhance_flag;
1487 unsigned long buf_start;
1488 u32 buf_size;
1489 u32 mv_buf_size;
1490
1491 struct BuffInfo_s work_space_buf_store;
1492 struct BuffInfo_s *work_space_buf;
1493
1494 u8 aux_data_dirty;
1495 u32 prefix_aux_size;
1496 u32 suffix_aux_size;
1497 void *aux_addr;
1498 void *rpm_addr;
1499 void *lmem_addr;
1500 dma_addr_t aux_phy_addr;
1501 dma_addr_t rpm_phy_addr;
1502 dma_addr_t lmem_phy_addr;
1503
1504 unsigned int pic_list_init_flag;
1505 unsigned int use_cma_flag;
1506
1507 unsigned short *rpm_ptr;
1508 unsigned short *lmem_ptr;
1509 unsigned short *debug_ptr;
1510 int debug_ptr_size;
1511 int pic_w;
1512 int pic_h;
1513 int lcu_x_num;
1514 int lcu_y_num;
1515 int lcu_total;
1516 int lcu_size;
1517 int lcu_size_log2;
1518 int lcu_x_num_pre;
1519 int lcu_y_num_pre;
1520 int first_pic_after_recover;
1521
1522 int num_tile_col;
1523 int num_tile_row;
1524 int tile_enabled;
1525 int tile_x;
1526 int tile_y;
1527 int tile_y_x;
1528 int tile_start_lcu_x;
1529 int tile_start_lcu_y;
1530 int tile_width_lcu;
1531 int tile_height_lcu;
1532
1533 int slice_type;
1534 unsigned int slice_addr;
1535 unsigned int slice_segment_addr;
1536
1537 unsigned char interlace_flag;
1538 unsigned char curr_pic_struct;
1539 unsigned char frame_field_info_present_flag;
1540
1541 unsigned short sps_num_reorder_pics_0;
1542 unsigned short misc_flag0;
1543 int m_temporalId;
1544 int m_nalUnitType;
1545 int TMVPFlag;
1546 int isNextSliceSegment;
1547 int LDCFlag;
1548 int m_pocRandomAccess;
1549 int plevel;
1550 int MaxNumMergeCand;
1551
1552 int new_pic;
1553 int new_tile;
1554 int curr_POC;
1555 int iPrevPOC;
1556#ifdef MULTI_INSTANCE_SUPPORT
1557 int decoded_poc;
1558 struct PIC_s *decoding_pic;
1559#endif
1560 int iPrevTid0POC;
1561 int list_no;
1562 int RefNum_L0;
1563 int RefNum_L1;
1564 int ColFromL0Flag;
1565 int LongTerm_Curr;
1566 int LongTerm_Col;
1567 int Col_POC;
1568 int LongTerm_Ref;
1569#ifdef MULTI_INSTANCE_SUPPORT
1570 int m_pocRandomAccess_bak;
1571 int curr_POC_bak;
1572 int iPrevPOC_bak;
1573 int iPrevTid0POC_bak;
1574 unsigned char start_parser_type_bak;
1575 unsigned char start_decoding_flag_bak;
1576 unsigned char rps_set_id_bak;
1577 int pic_decoded_lcu_idx_bak;
1578 int decode_idx_bak;
1579#endif
1580 struct PIC_s *cur_pic;
1581 struct PIC_s *col_pic;
1582 int skip_flag;
1583 int decode_idx;
1584 int slice_idx;
1585 unsigned char have_vps;
1586 unsigned char have_sps;
1587 unsigned char have_pps;
1588 unsigned char have_valid_start_slice;
1589 unsigned char wait_buf;
1590 unsigned char error_flag;
1591 unsigned int error_skip_nal_count;
1592 long used_4k_num;
1593
1594 unsigned char
1595 ignore_bufmgr_error; /* bit 0, for decoding;
1596 bit 1, for displaying
1597 bit 1 must be set if bit 0 is 1*/
1598 int PB_skip_mode;
1599 int PB_skip_count_after_decoding;
1600#ifdef SUPPORT_10BIT
1601 int mem_saving_mode;
1602#endif
1603#ifdef LOSLESS_COMPRESS_MODE
1604 unsigned int losless_comp_body_size;
1605#endif
1606 int pts_mode;
1607 int last_lookup_pts;
1608 int last_pts;
1609 u64 last_lookup_pts_us64;
1610 u64 last_pts_us64;
1611 u32 shift_byte_count_lo;
1612 u32 shift_byte_count_hi;
1613 int pts_mode_switching_count;
1614 int pts_mode_recovery_count;
1615
1616 int pic_num;
1617
1618 /**/
1619 union param_u param;
1620
1621 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1622
1623 struct timer_list timer;
1624 struct BUF_s m_BUF[BUF_POOL_SIZE];
1625 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1626 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1627
1628 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1629 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1630 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1631 struct vframe_s vfpool[VF_POOL_SIZE];
1632
1633 u32 stat;
1634 u32 frame_width;
1635 u32 frame_height;
1636 u32 frame_dur;
1637 u32 frame_ar;
1638 u32 bit_depth_luma;
1639 u32 bit_depth_chroma;
1640 u32 video_signal_type;
1641 u32 video_signal_type_debug;
1642 u32 saved_resolution;
1643 bool get_frame_dur;
1644 u32 error_watchdog_count;
1645 u32 error_skip_nal_wt_cnt;
1646 u32 error_system_watchdog_count;
1647
1648#ifdef DEBUG_PTS
1649 unsigned long pts_missed;
1650 unsigned long pts_hit;
1651#endif
1652 struct dec_sysinfo vh265_amstream_dec_info;
1653 unsigned char init_flag;
1654 unsigned char first_sc_checked;
1655 unsigned char uninit_list;
1656 u32 start_decoding_time;
1657
1658 int show_frame_num;
1659#ifdef USE_UNINIT_SEMA
1660 struct semaphore h265_uninit_done_sema;
1661#endif
1662 int fatal_error;
1663
1664
1665 u32 sei_present_flag;
1666 void *frame_mmu_map_addr;
1667 dma_addr_t frame_mmu_map_phy_addr;
1668 unsigned int mmu_mc_buf_start;
1669 unsigned int mmu_mc_buf_end;
1670 unsigned int mmu_mc_start_4k_adr;
1671 void *mmu_box;
1672 void *bmmu_box;
1673 int mmu_enable;
1674
1675 unsigned int dec_status;
1676
1677 /* data for SEI_MASTER_DISPLAY_COLOR */
1678 unsigned int primaries[3][2];
1679 unsigned int white_point[2];
1680 unsigned int luminance[2];
1681 /* data for SEI_CONTENT_LIGHT_LEVEL */
1682 unsigned int content_light_level[2];
1683
1684 struct PIC_s *pre_top_pic;
1685 struct PIC_s *pre_bot_pic;
1686
1687#ifdef MULTI_INSTANCE_SUPPORT
1688 int double_write_mode;
1689 int dynamic_buf_num_margin;
1690 int start_action;
1691 int save_buffer_mode;
1692#endif
1693 u32 i_only;
1694 struct list_head log_list;
1695 u32 ucode_pause_pos;
1696 u32 start_shift_bytes;
1697
1698 u32 vf_pre_count;
1699 u32 vf_get_count;
1700 u32 vf_put_count;
1701#ifdef SWAP_HEVC_UCODE
1702 dma_addr_t mc_dma_handle;
1703 void *mc_cpu_addr;
1704 int swap_size;
1705 ulong swap_addr;
1706#endif
1707#ifdef DETREFILL_ENABLE
1708 dma_addr_t detbuf_adr;
1709 u16 *detbuf_adr_virt;
1710 u8 delrefill_check;
1711#endif
1712 u8 head_error_flag;
1713 int valve_count;
1714 struct firmware_s *fw;
1715 int max_pic_w;
1716 int max_pic_h;
1717#ifdef AGAIN_HAS_THRESHOLD
1718 u8 next_again_flag;
1719 u32 pre_parser_wr_ptr;
1720#endif
1721 u32 ratio_control;
1722 u32 first_pic_flag;
1723 u32 decode_size;
1724 struct mutex chunks_mutex;
1725 int need_cache_size;
1726 u64 sc_start_time;
1727 u32 skip_first_nal;
1728 bool is_swap;
1729 bool is_4k;
1730 int frameinfo_enable;
1731 struct vframe_qos_s vframe_qos;
1732 bool is_used_v4l;
1733 void *v4l2_ctx;
1734 bool v4l_params_parsed;
1735 u32 mem_map_mode;
1736} /*hevc_stru_t */;
1737
1738#ifdef AGAIN_HAS_THRESHOLD
1739u32 again_threshold;
1740#endif
1741#ifdef SEND_LMEM_WITH_RPM
1742#define get_lmem_params(hevc, ladr) \
1743 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1744
1745
1746static int get_frame_mmu_map_size(void)
1747{
1748 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1749 return (MAX_FRAME_8K_NUM * 4);
1750
1751 return (MAX_FRAME_4K_NUM * 4);
1752}
1753
1754static int is_oversize(int w, int h)
1755{
1756 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1757 MAX_SIZE_8K : MAX_SIZE_4K;
1758
1759 if (w < 0 || h < 0)
1760 return true;
1761
1762 if (h != 0 && (w > max / h))
1763 return true;
1764
1765 return false;
1766}
1767
1768void check_head_error(struct hevc_state_s *hevc)
1769{
1770#define pcm_enabled_flag 0x040
1771#define pcm_sample_bit_depth_luma 0x041
1772#define pcm_sample_bit_depth_chroma 0x042
1773 hevc->head_error_flag = 0;
1774 if ((error_handle_policy & 0x40) == 0)
1775 return;
1776 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1777 uint16_t pcm_depth_luma = get_lmem_params(
1778 hevc, pcm_sample_bit_depth_luma);
1779 uint16_t pcm_sample_chroma = get_lmem_params(
1780 hevc, pcm_sample_bit_depth_chroma);
1781 if (pcm_depth_luma >
1782 hevc->bit_depth_luma ||
1783 pcm_sample_chroma >
1784 hevc->bit_depth_chroma) {
1785 hevc_print(hevc, 0,
1786 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1787 pcm_depth_luma,
1788 pcm_sample_chroma,
1789 hevc->bit_depth_luma,
1790 hevc->bit_depth_chroma);
1791 hevc->head_error_flag = 1;
1792 }
1793 }
1794}
1795#endif
1796
1797#ifdef SUPPORT_10BIT
1798/* Losless compression body buffer size 4K per 64x32 (jt) */
1799static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1800 int width, int height, int mem_saving_mode)
1801{
1802 int width_x64;
1803 int height_x32;
1804 int bsize;
1805
1806 width_x64 = width + 63;
1807 width_x64 >>= 6;
1808
1809 height_x32 = height + 31;
1810 height_x32 >>= 5;
1811 if (mem_saving_mode == 1 && hevc->mmu_enable)
1812 bsize = 3200 * width_x64 * height_x32;
1813 else if (mem_saving_mode == 1)
1814 bsize = 3072 * width_x64 * height_x32;
1815 else
1816 bsize = 4096 * width_x64 * height_x32;
1817
1818 return bsize;
1819}
1820
1821/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1822static int compute_losless_comp_header_size(int width, int height)
1823{
1824 int width_x128;
1825 int height_x64;
1826 int hsize;
1827
1828 width_x128 = width + 127;
1829 width_x128 >>= 7;
1830
1831 height_x64 = height + 63;
1832 height_x64 >>= 6;
1833
1834 hsize = 32*width_x128*height_x64;
1835
1836 return hsize;
1837}
1838#endif
1839
1840static int add_log(struct hevc_state_s *hevc,
1841 const char *fmt, ...)
1842{
1843#define HEVC_LOG_BUF 196
1844 struct debug_log_s *log_item;
1845 unsigned char buf[HEVC_LOG_BUF];
1846 int len = 0;
1847 va_list args;
1848 mutex_lock(&vh265_log_mutex);
1849 va_start(args, fmt);
1850 len = sprintf(buf, "<%ld> <%05d> ",
1851 jiffies, hevc->decode_idx);
1852 len += vsnprintf(buf + len,
1853 HEVC_LOG_BUF - len, fmt, args);
1854 va_end(args);
1855 log_item = kmalloc(
1856 sizeof(struct debug_log_s) + len,
1857 GFP_KERNEL);
1858 if (log_item) {
1859 INIT_LIST_HEAD(&log_item->list);
1860 strcpy(&log_item->data, buf);
1861 list_add_tail(&log_item->list,
1862 &hevc->log_list);
1863 }
1864 mutex_unlock(&vh265_log_mutex);
1865 return 0;
1866}
1867
1868static void dump_log(struct hevc_state_s *hevc)
1869{
1870 int i = 0;
1871 struct debug_log_s *log_item, *tmp;
1872 mutex_lock(&vh265_log_mutex);
1873 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1874 hevc_print(hevc, 0,
1875 "[LOG%04d]%s\n",
1876 i++,
1877 &log_item->data);
1878 list_del(&log_item->list);
1879 kfree(log_item);
1880 }
1881 mutex_unlock(&vh265_log_mutex);
1882}
1883
1884static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1885 struct PIC_s *pic)
1886{
1887 if (pic->error_mark
1888 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1889 return 1;
1890 return 0;
1891}
1892
1893static int get_pic_poc(struct hevc_state_s *hevc,
1894 unsigned int idx)
1895{
1896 if (idx != 0xff
1897 && idx < MAX_REF_PIC_NUM
1898 && hevc->m_PIC[idx])
1899 return hevc->m_PIC[idx]->POC;
1900 return INVALID_POC;
1901}
1902
1903#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1904static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1905{
1906 return (hevc->m_ins_flag &&
1907 ((double_write_mode & 0x80000000) == 0)) ?
1908 hevc->double_write_mode :
1909 (double_write_mode & 0x7fffffff);
1910}
1911
1912static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1913{
1914 return (hevc->m_ins_flag &&
1915 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1916 hevc->dynamic_buf_num_margin :
1917 (dynamic_buf_num_margin & 0x7fffffff);
1918}
1919#endif
1920
1921static int get_double_write_mode(struct hevc_state_s *hevc)
1922{
1923 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1924 int w = hevc->pic_w;
1925 int h = hevc->pic_h;
1926 u32 dw = 0x1; /*1:1*/
1927 switch (valid_dw_mode) {
1928 case 0x100:
1929 if (w > 1920 && h > 1088)
1930 dw = 0x4; /*1:2*/
1931 break;
1932 case 0x200:
1933 if (w > 1920 && h > 1088)
1934 dw = 0x2; /*1:4*/
1935 break;
1936 case 0x300:
1937 if (w > 1280 && h > 720)
1938 dw = 0x4; /*1:2*/
1939 break;
1940 default:
1941 dw = valid_dw_mode;
1942 break;
1943 }
1944 return dw;
1945}
1946
1947static int get_double_write_ratio(struct hevc_state_s *hevc,
1948 int dw_mode)
1949{
1950 int ratio = 1;
1951 if ((dw_mode == 2) ||
1952 (dw_mode == 3))
1953 ratio = 4;
1954 else if (dw_mode == 4)
1955 ratio = 2;
1956 return ratio;
1957}
1958#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1959static unsigned char get_idx(struct hevc_state_s *hevc)
1960{
1961 return hevc->index;
1962}
1963#endif
1964
1965#undef pr_info
1966#define pr_info printk
1967static int hevc_print(struct hevc_state_s *hevc,
1968 int flag, const char *fmt, ...)
1969{
1970#define HEVC_PRINT_BUF 256
1971 unsigned char buf[HEVC_PRINT_BUF];
1972 int len = 0;
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 if (hevc == NULL ||
1975 (flag == 0) ||
1976 ((debug_mask &
1977 (1 << hevc->index))
1978 && (debug & flag))) {
1979#endif
1980 va_list args;
1981
1982 va_start(args, fmt);
1983 if (hevc)
1984 len = sprintf(buf, "[%d]", hevc->index);
1985 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1986 pr_debug("%s", buf);
1987 va_end(args);
1988#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1989 }
1990#endif
1991 return 0;
1992}
1993
1994static int hevc_print_cont(struct hevc_state_s *hevc,
1995 int flag, const char *fmt, ...)
1996{
1997 unsigned char buf[HEVC_PRINT_BUF];
1998 int len = 0;
1999#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2000 if (hevc == NULL ||
2001 (flag == 0) ||
2002 ((debug_mask &
2003 (1 << hevc->index))
2004 && (debug & flag))) {
2005#endif
2006 va_list args;
2007
2008 va_start(args, fmt);
2009 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2010 pr_info("%s", buf);
2011 va_end(args);
2012#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2013 }
2014#endif
2015 return 0;
2016}
2017
2018static void put_mv_buf(struct hevc_state_s *hevc,
2019 struct PIC_s *pic);
2020
2021static void update_vf_memhandle(struct hevc_state_s *hevc,
2022 struct vframe_s *vf, struct PIC_s *pic);
2023
2024static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2025
2026static void release_aux_data(struct hevc_state_s *hevc,
2027 struct PIC_s *pic);
2028static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2029
2030#ifdef MULTI_INSTANCE_SUPPORT
2031static void backup_decode_state(struct hevc_state_s *hevc)
2032{
2033 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2034 hevc->curr_POC_bak = hevc->curr_POC;
2035 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2036 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2037 hevc->start_parser_type_bak = hevc->start_parser_type;
2038 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2039 hevc->rps_set_id_bak = hevc->rps_set_id;
2040 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2041 hevc->decode_idx_bak = hevc->decode_idx;
2042
2043}
2044
2045static void restore_decode_state(struct hevc_state_s *hevc)
2046{
2047 struct vdec_s *vdec = hw_to_vdec(hevc);
2048 if (!vdec_has_more_input(vdec)) {
2049 hevc->pic_decoded_lcu_idx =
2050 READ_VREG(HEVC_PARSER_LCU_START)
2051 & 0xffffff;
2052 return;
2053 }
2054 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2055 "%s: discard pic index 0x%x\n",
2056 __func__, hevc->decoding_pic ?
2057 hevc->decoding_pic->index : 0xff);
2058 if (hevc->decoding_pic) {
2059 hevc->decoding_pic->error_mark = 0;
2060 hevc->decoding_pic->output_ready = 0;
2061 hevc->decoding_pic->output_mark = 0;
2062 hevc->decoding_pic->referenced = 0;
2063 hevc->decoding_pic->POC = INVALID_POC;
2064 put_mv_buf(hevc, hevc->decoding_pic);
2065 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2066 release_aux_data(hevc, hevc->decoding_pic);
2067 hevc->decoding_pic = NULL;
2068 }
2069 hevc->decode_idx = hevc->decode_idx_bak;
2070 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2071 hevc->curr_POC = hevc->curr_POC_bak;
2072 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2073 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2074 hevc->start_parser_type = hevc->start_parser_type_bak;
2075 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2076 hevc->rps_set_id = hevc->rps_set_id_bak;
2077 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2078
2079 if (hevc->pic_list_init_flag == 1)
2080 hevc->pic_list_init_flag = 0;
2081 /*if (hevc->decode_idx == 0)
2082 hevc->start_decoding_flag = 0;*/
2083
2084 hevc->slice_idx = 0;
2085 hevc->used_4k_num = -1;
2086}
2087#endif
2088
2089static void hevc_init_stru(struct hevc_state_s *hevc,
2090 struct BuffInfo_s *buf_spec_i)
2091{
2092 int i;
2093 INIT_LIST_HEAD(&hevc->log_list);
2094 hevc->work_space_buf = buf_spec_i;
2095 hevc->prefix_aux_size = 0;
2096 hevc->suffix_aux_size = 0;
2097 hevc->aux_addr = NULL;
2098 hevc->rpm_addr = NULL;
2099 hevc->lmem_addr = NULL;
2100
2101 hevc->curr_POC = INVALID_POC;
2102
2103 hevc->pic_list_init_flag = 0;
2104 hevc->use_cma_flag = 0;
2105 hevc->decode_idx = 0;
2106 hevc->slice_idx = 0;
2107 hevc->new_pic = 0;
2108 hevc->new_tile = 0;
2109 hevc->iPrevPOC = 0;
2110 hevc->list_no = 0;
2111 /* int m_uiMaxCUWidth = 1<<7; */
2112 /* int m_uiMaxCUHeight = 1<<7; */
2113 hevc->m_pocRandomAccess = MAX_INT;
2114 hevc->tile_enabled = 0;
2115 hevc->tile_x = 0;
2116 hevc->tile_y = 0;
2117 hevc->iPrevTid0POC = 0;
2118 hevc->slice_addr = 0;
2119 hevc->slice_segment_addr = 0;
2120 hevc->skip_flag = 0;
2121 hevc->misc_flag0 = 0;
2122
2123 hevc->cur_pic = NULL;
2124 hevc->col_pic = NULL;
2125 hevc->wait_buf = 0;
2126 hevc->error_flag = 0;
2127 hevc->head_error_flag = 0;
2128 hevc->error_skip_nal_count = 0;
2129 hevc->have_vps = 0;
2130 hevc->have_sps = 0;
2131 hevc->have_pps = 0;
2132 hevc->have_valid_start_slice = 0;
2133
2134 hevc->pts_mode = PTS_NORMAL;
2135 hevc->last_pts = 0;
2136 hevc->last_lookup_pts = 0;
2137 hevc->last_pts_us64 = 0;
2138 hevc->last_lookup_pts_us64 = 0;
2139 hevc->pts_mode_switching_count = 0;
2140 hevc->pts_mode_recovery_count = 0;
2141
2142 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2143 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2144 if (hevc->PB_skip_mode == 0)
2145 hevc->ignore_bufmgr_error = 0x1;
2146 else
2147 hevc->ignore_bufmgr_error = 0x0;
2148
2149 if (hevc->is_used_v4l) {
2150 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2151 if (hevc->m_PIC[i] != NULL) {
2152 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2153 hevc->m_PIC[i]->index = -1;
2154 }
2155 }
2156 }
2157
2158 hevc->pic_num = 0;
2159 hevc->lcu_x_num_pre = 0;
2160 hevc->lcu_y_num_pre = 0;
2161 hevc->first_pic_after_recover = 0;
2162
2163 hevc->pre_top_pic = NULL;
2164 hevc->pre_bot_pic = NULL;
2165
2166 hevc->sei_present_flag = 0;
2167 hevc->valve_count = 0;
2168 hevc->first_pic_flag = 0;
2169#ifdef MULTI_INSTANCE_SUPPORT
2170 hevc->decoded_poc = INVALID_POC;
2171 hevc->start_process_time = 0;
2172 hevc->last_lcu_idx = 0;
2173 hevc->decode_timeout_count = 0;
2174 hevc->timeout_num = 0;
2175 hevc->eos = 0;
2176 hevc->pic_decoded_lcu_idx = -1;
2177 hevc->over_decode = 0;
2178 hevc->used_4k_num = -1;
2179 hevc->start_decoding_flag = 0;
2180 hevc->rps_set_id = 0;
2181 backup_decode_state(hevc);
2182#endif
2183#ifdef DETREFILL_ENABLE
2184 hevc->detbuf_adr = 0;
2185 hevc->detbuf_adr_virt = NULL;
2186#endif
2187}
2188
2189static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2190static int H265_alloc_mmu(struct hevc_state_s *hevc,
2191 struct PIC_s *new_pic, unsigned short bit_depth,
2192 unsigned int *mmu_index_adr);
2193
2194#ifdef DETREFILL_ENABLE
2195#define DETREFILL_BUF_SIZE (4 * 0x4000)
2196#define HEVC_SAO_DBG_MODE0 0x361e
2197#define HEVC_SAO_DBG_MODE1 0x361f
2198#define HEVC_SAO_CTRL10 0x362e
2199#define HEVC_SAO_CTRL11 0x362f
2200static int init_detrefill_buf(struct hevc_state_s *hevc)
2201{
2202 if (hevc->detbuf_adr_virt)
2203 return 0;
2204
2205 hevc->detbuf_adr_virt =
2206 (void *)dma_alloc_coherent(amports_get_dma_device(),
2207 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2208 GFP_KERNEL);
2209
2210 if (hevc->detbuf_adr_virt == NULL) {
2211 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2212 return -1;
2213 }
2214 return 0;
2215}
2216
2217static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2218{
2219 if (hevc->detbuf_adr_virt) {
2220 dma_free_coherent(amports_get_dma_device(),
2221 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2222 hevc->detbuf_adr);
2223
2224 hevc->detbuf_adr_virt = NULL;
2225 hevc->detbuf_adr = 0;
2226 }
2227}
2228
2229/*
2230 * convert uncompressed frame buffer data from/to ddr
2231 */
2232static void convUnc8x4blk(uint16_t* blk8x4Luma,
2233 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2234{
2235 if (direction == 0) {
2236 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2237 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2238 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2239 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2240 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2241 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2242 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2243 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2244 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2245 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2246 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2247 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2248 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2249 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2250 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2251 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2252 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2253 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2254
2255 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2256 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2257 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2258 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2259 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2260 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2261 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2262 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2263 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2264 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2265 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2266 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2267 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2268 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2269 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2270 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2271 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2272 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2273
2274 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2275 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2276 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2277 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2278 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2279 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2280 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2281 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2282 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2283 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2284 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2285 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2286 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2287 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2288 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2289 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2290 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2291 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2292
2293 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2294 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2295 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2296 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2297 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2298 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2299 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2300 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2301 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2302 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2303 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2304 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2305 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2306 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2307 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2308 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2309 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2310 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2311 } else {
2312 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2313 blk8x4Luma[3 + 0 * 8];
2314 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2315 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2316 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2317 (blk8x4Luma[3 + 3 * 8] >> 2);
2318 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2319 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2320 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2321 (blk8x4Luma[7 + 2 * 8] >>4);
2322 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2323 blk8x4Cb[0 + 0 * 4];
2324 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2325 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2326 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2327
2328 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2329 blk8x4Luma[0 + 0 * 8];
2330 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2331 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2332 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2333 (blk8x4Luma[0 + 1 * 8] >> 2);
2334 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2335 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2336 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2337 (blk8x4Luma[0 + 2 * 8] >>4);
2338 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2339 blk8x4Luma[2 + 2 * 8];
2340 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2341 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2342 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2343
2344 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2345 blk8x4Luma[4 + 0 * 8];
2346 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2347 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2348 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2349 (blk8x4Luma[4 + 1 * 8] >> 2);
2350 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2351 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2352 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2353 (blk8x4Luma[4 + 2 * 8] >>4);
2354 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2355 blk8x4Luma[6 + 2 * 8];
2356 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2357 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2358 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2359
2360 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2361 blk8x4Cb[1 + 0 * 4];
2362 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2363 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2364 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2365 (blk8x4Cr[2 + 0 * 4] >> 2);
2366 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2367 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2368 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2369 (blk8x4Cb[1 + 1 * 4] >>4);
2370 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2371 blk8x4Cb[2 + 1 * 4];
2372 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2373 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2374 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2375 }
2376}
2377
2378static void corrRefillWithAmrisc (
2379 struct hevc_state_s *hevc,
2380 uint32_t cmHeaderBaseAddr,
2381 uint32_t picWidth,
2382 uint32_t ctuPosition)
2383{
2384 int32_t i;
2385 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2386 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2387 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2388
2389 uint16_t cmBodyBuf[32 * 18];
2390
2391 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2392 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2393 uint32_t stride64x64 = pic_width_x64 * 128;
2394 uint32_t addr_offset64x64_abv = stride64x64 *
2395 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2396 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2397 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2398 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2399 unsigned int tmpData32;
2400
2401 uint16_t blkBuf0Y[32];
2402 uint16_t blkBuf0Cb[8];
2403 uint16_t blkBuf0Cr[8];
2404 uint16_t blkBuf1Y[32];
2405 uint16_t blkBuf1Cb[8];
2406 uint16_t blkBuf1Cr[8];
2407 int32_t blkBufCnt = 0;
2408
2409 int32_t blkIdx;
2410
2411 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2412 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2413 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2414 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2415
2416 for (i = 0; i < 32 * 18; i++)
2417 cmBodyBuf[i] = 0;
2418
2419 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2420 "%s, %d\n", __func__, __LINE__);
2421 do {
2422 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2423 } while (tmpData32);
2424 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2425 "%s, %d\n", __func__, __LINE__);
2426
2427 hevc_print(hevc, H265_DEBUG_DETAIL,
2428 "cmBodyBuf from detbuf:\n");
2429 for (i = 0; i < 32 * 18; i++) {
2430 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2431 if (get_dbg_flag(hevc) &
2432 H265_DEBUG_DETAIL) {
2433 if ((i & 0xf) == 0)
2434 hevc_print_cont(hevc, 0, "\n");
2435 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2436 }
2437 }
2438 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2439
2440 for (i = 0; i < 32; i++)
2441 blkBuf0Y[i] = 0;
2442 for (i = 0; i < 8; i++)
2443 blkBuf0Cb[i] = 0;
2444 for (i = 0; i < 8; i++)
2445 blkBuf0Cr[i] = 0;
2446 for (i = 0; i < 32; i++)
2447 blkBuf1Y[i] = 0;
2448 for (i = 0; i < 8; i++)
2449 blkBuf1Cb[i] = 0;
2450 for (i = 0; i < 8; i++)
2451 blkBuf1Cr[i] = 0;
2452
2453 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2454 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2455 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2456 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2457 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2458 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2459 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2460
2461 if (!aboveCtuAvailable && inAboveCtu)
2462 continue;
2463
2464 /* detRefillBuf --> 8x4block*/
2465 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2466
2467 if (restoreEnable) {
2468 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2469 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2470 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2471 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2472 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2473 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2474 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2475 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2476 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2477 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2478 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2479 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2480 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2481 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2482 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2483 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2484 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2485 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2486 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2487 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2488 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2489 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2490 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2491 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2492 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2493 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2494 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2495 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2496 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2497 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2498 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2499 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2500
2501 /*Store data back to DDR*/
2502 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2503 }
2504
2505 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2506 }
2507
2508 hevc_print(hevc, H265_DEBUG_DETAIL,
2509 "cmBodyBuf to detbuf:\n");
2510 for (i = 0; i < 32 * 18; i++) {
2511 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2512 if (get_dbg_flag(hevc) &
2513 H265_DEBUG_DETAIL) {
2514 if ((i & 0xf) == 0)
2515 hevc_print_cont(hevc, 0, "\n");
2516 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2517 }
2518 }
2519 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2520
2521 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2522 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2523 "%s, %d\n", __func__, __LINE__);
2524 do {
2525 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2526 } while (tmpData32);
2527 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2528 "%s, %d\n", __func__, __LINE__);
2529}
2530
2531static void delrefill(struct hevc_state_s *hevc)
2532{
2533 /*
2534 * corrRefill
2535 */
2536 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2537 [31:30]error number
2538 [29:20]error2([9:7]tilex[6:0]ctuy)
2539 [19:10]error1 [9:0]error0*/
2540 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2541 uint32_t errorIdx;
2542 uint32_t errorNum = (detResult>>30);
2543
2544 if (detResult) {
2545 hevc_print(hevc, H265_DEBUG_BUFMGR,
2546 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2547 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2548 uint32_t errorPos = errorIdx * 10;
2549 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2550 uint32_t tilex = (errorResult >> 7) - 1;
2551 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2552 + hevc->m_tile[0][tilex].width - 1;
2553 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2554 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2555 hevc_print(hevc, H265_DEBUG_BUFMGR,
2556 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2557 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2558 corrRefillWithAmrisc(
2559 hevc,
2560 (uint32_t)hevc->cur_pic->header_adr,
2561 hevc->pic_w,
2562 ctuPosition);
2563 }
2564
2565 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2566 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2567 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2568 }
2569}
2570#endif
2571
2572static void get_rpm_param(union param_u *params)
2573{
2574 int i;
2575 unsigned int data32;
2576
2577 for (i = 0; i < 128; i++) {
2578 do {
2579 data32 = READ_VREG(RPM_CMD_REG);
2580 /* hevc_print(hevc, 0, "%x\n", data32); */
2581 } while ((data32 & 0x10000) == 0);
2582 params->l.data[i] = data32 & 0xffff;
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 WRITE_VREG(RPM_CMD_REG, 0);
2585 }
2586}
2587
2588static int get_free_buf_idx(struct hevc_state_s *hevc)
2589{
2590 int index = INVALID_IDX;
2591 struct PIC_s *pic;
2592 int i;
2593
2594 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2595 pic = hevc->m_PIC[i];
2596 if (pic == NULL ||
2597 pic->index == -1 ||
2598 pic->BUF_index == -1)
2599 continue;
2600
2601 if (pic->output_mark == 0 &&
2602 pic->referenced == 0 &&
2603 pic->output_ready == 0) {
2604 pic->output_ready = 1;
2605 index = i;
2606 break;
2607 }
2608 }
2609
2610 return index;
2611}
2612
2613static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2614{
2615 int i;
2616 struct PIC_s *pic;
2617 struct PIC_s *ret_pic = NULL;
2618 if (POC == INVALID_POC)
2619 return NULL;
2620 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2621 pic = hevc->m_PIC[i];
2622 if (pic == NULL || pic->index == -1 ||
2623 pic->BUF_index == -1)
2624 continue;
2625 if (pic->POC == POC) {
2626 if (ret_pic == NULL)
2627 ret_pic = pic;
2628 else {
2629 if (pic->decode_idx > ret_pic->decode_idx)
2630 ret_pic = pic;
2631 }
2632 }
2633 }
2634 return ret_pic;
2635}
2636
2637static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2638{
2639 int i;
2640 struct PIC_s *pic;
2641 struct PIC_s *ret_pic = NULL;
2642
2643 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2644 pic = hevc->m_PIC[i];
2645 if (pic == NULL || pic->index == -1 ||
2646 pic->BUF_index == -1)
2647 continue;
2648 if ((pic->POC == POC) && (pic->referenced)) {
2649 if (ret_pic == NULL)
2650 ret_pic = pic;
2651 else {
2652 if (pic->decode_idx > ret_pic->decode_idx)
2653 ret_pic = pic;
2654 }
2655 }
2656 }
2657
2658 if (ret_pic == NULL) {
2659 if (get_dbg_flag(hevc)) {
2660 hevc_print(hevc, 0,
2661 "Wrong, POC of %d is not in referenced list\n",
2662 POC);
2663 }
2664 ret_pic = get_pic_by_POC(hevc, POC);
2665 }
2666 return ret_pic;
2667}
2668
2669static unsigned int log2i(unsigned int val)
2670{
2671 unsigned int ret = -1;
2672
2673 while (val != 0) {
2674 val >>= 1;
2675 ret++;
2676 }
2677 return ret;
2678}
2679
2680static int init_buf_spec(struct hevc_state_s *hevc);
2681
2682static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2683{
2684 int i;
2685
2686 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2687 struct PIC_s *pic = hevc->m_PIC[i];
2688
2689 if (pic && pic->vframe_bound)
2690 return true;
2691 }
2692
2693 return false;
2694}
2695
2696static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2697{
2698 int i;
2699
2700 /* release workspace */
2701 if (hevc->bmmu_box)
2702 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2703 BMMU_WORKSPACE_ID);
2704 /*
2705 * it's only when vframe get back to driver, right now we can be sure
2706 * that vframe and fd are related. if the playback exits, the capture
2707 * requires the upper app to release when the fd is closed, and others
2708 * buffers drivers are released by driver.
2709 */
2710 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2711 struct PIC_s *pic = hevc->m_PIC[i];
2712
2713 if (pic && !pic->vframe_bound) {
2714 if (hevc->bmmu_box)
2715 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2716 VF_BUFFER_IDX(i));
2717 if (hevc->mmu_box)
2718 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2719
2720 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2721 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2722 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2723 }
2724 }
2725}
2726
2727static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2728{
2729 if (hevc->is_used_v4l &&
2730 v4l_is_there_vframe_bound(hevc)) {
2731 if (get_double_write_mode(hevc) != 0x10) {
2732 v4l_mmu_buffer_release(hevc);
2733 return;
2734 }
2735 }
2736
2737 if (hevc->mmu_box)
2738 decoder_mmu_box_free(hevc->mmu_box);
2739 hevc->mmu_box = NULL;
2740
2741 if (hevc->bmmu_box)
2742 decoder_bmmu_box_free(hevc->bmmu_box);
2743 hevc->bmmu_box = NULL;
2744}
2745static int init_mmu_buffers(struct hevc_state_s *hevc)
2746{
2747 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2748 CODEC_MM_FLAGS_TVP : 0;
2749 int buf_size = 64;
2750
2751 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2752 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2753 buf_size = 24;
2754 }
2755
2756 if (get_dbg_flag(hevc)) {
2757 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2758 __func__, hevc->max_pic_w, hevc->max_pic_h);
2759 }
2760
2761 hevc->need_cache_size = buf_size * SZ_1M;
2762 hevc->sc_start_time = get_jiffies_64();
2763 if (hevc->mmu_enable
2764 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2765 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2766 hevc->index,
2767 MAX_REF_PIC_NUM,
2768 buf_size * SZ_1M,
2769 tvp_flag
2770 );
2771 if (!hevc->mmu_box) {
2772 pr_err("h265 alloc mmu box failed!!\n");
2773 return -1;
2774 }
2775 }
2776
2777 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2778 hevc->index,
2779 BMMU_MAX_BUFFERS,
2780 4 + PAGE_SHIFT,
2781 CODEC_MM_FLAGS_CMA_CLEAR |
2782 CODEC_MM_FLAGS_FOR_VDECODER |
2783 tvp_flag);
2784 if (!hevc->bmmu_box) {
2785 if (hevc->mmu_box)
2786 decoder_mmu_box_free(hevc->mmu_box);
2787 hevc->mmu_box = NULL;
2788 pr_err("h265 alloc mmu box failed!!\n");
2789 return -1;
2790 }
2791 return 0;
2792}
2793
2794struct buf_stru_s
2795{
2796 int lcu_total;
2797 int mc_buffer_size_h;
2798 int mc_buffer_size_u_v_h;
2799};
2800
2801#ifndef MV_USE_FIXED_BUF
2802static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2803{
2804 int i;
2805 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2806 if (hevc->m_mv_BUF[i].start_adr) {
2807 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2808 hevc_print(hevc, 0,
2809 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2810 i, hevc->m_mv_BUF[i].start_adr,
2811 hevc->m_mv_BUF[i].size,
2812 hevc->m_mv_BUF[i].used_flag);
2813 decoder_bmmu_box_free_idx(
2814 hevc->bmmu_box,
2815 MV_BUFFER_IDX(i));
2816 hevc->m_mv_BUF[i].start_adr = 0;
2817 hevc->m_mv_BUF[i].size = 0;
2818 hevc->m_mv_BUF[i].used_flag = 0;
2819 }
2820 }
2821 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2822 if (hevc->m_PIC[i] != NULL)
2823 hevc->m_PIC[i]->mv_buf_index = -1;
2824 }
2825}
2826
2827static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2828{
2829 int ret = 0;
2830 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2831 if (decoder_bmmu_box_alloc_buf_phy
2832 (hevc->bmmu_box,
2833 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2834 DRIVER_NAME,
2835 &hevc->m_mv_BUF[i].start_adr) < 0) {
2836 hevc->m_mv_BUF[i].start_adr = 0;
2837 ret = -1;
2838 } else {
2839 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2840 hevc->m_mv_BUF[i].used_flag = 0;
2841 ret = 0;
2842 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2843 hevc_print(hevc, 0,
2844 "MV Buffer %d: start_adr %p size %x\n",
2845 i,
2846 (void *)hevc->m_mv_BUF[i].start_adr,
2847 hevc->m_mv_BUF[i].size);
2848 }
2849 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2850 void *mem_start_virt;
2851 mem_start_virt =
2852 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2853 if (mem_start_virt) {
2854 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2855 codec_mm_dma_flush(mem_start_virt,
2856 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2857 } else {
2858 mem_start_virt = codec_mm_vmap(
2859 hevc->m_mv_BUF[i].start_adr,
2860 hevc->m_mv_BUF[i].size);
2861 if (mem_start_virt) {
2862 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2863 codec_mm_dma_flush(mem_start_virt,
2864 hevc->m_mv_BUF[i].size,
2865 DMA_TO_DEVICE);
2866 codec_mm_unmap_phyaddr(mem_start_virt);
2867 } else {
2868 /*not virt for tvp playing,
2869 may need clear on ucode.*/
2870 pr_err("ref %s mem_start_virt failed\n", __func__);
2871 }
2872 }
2873 }
2874 }
2875 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2876 return ret;
2877}
2878#endif
2879
2880static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2881{
2882#ifdef MV_USE_FIXED_BUF
2883 if (pic && pic->index >= 0) {
2884 if (IS_8K_SIZE(pic->width, pic->height)) {
2885 pic->mpred_mv_wr_start_addr =
2886 hevc->work_space_buf->mpred_mv.buf_start
2887 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2888 } else {
2889 pic->mpred_mv_wr_start_addr =
2890 hevc->work_space_buf->mpred_mv.buf_start
2891 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2892 }
2893 }
2894 return 0;
2895#else
2896 int i;
2897 int ret = -1;
2898 int new_size;
2899 if (IS_8K_SIZE(pic->width, pic->height))
2900 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2901 else if (IS_4K_SIZE(pic->width, pic->height))
2902 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2903 else
2904 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2905 if (new_size != hevc->mv_buf_size) {
2906 dealloc_mv_bufs(hevc);
2907 hevc->mv_buf_size = new_size;
2908 }
2909 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2910 if (hevc->m_mv_BUF[i].start_adr &&
2911 hevc->m_mv_BUF[i].used_flag == 0) {
2912 hevc->m_mv_BUF[i].used_flag = 1;
2913 ret = i;
2914 break;
2915 }
2916 }
2917 if (ret < 0) {
2918 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2919 if (hevc->m_mv_BUF[i].start_adr == 0) {
2920 if (alloc_mv_buf(hevc, i) >= 0) {
2921 hevc->m_mv_BUF[i].used_flag = 1;
2922 ret = i;
2923 }
2924 break;
2925 }
2926 }
2927 }
2928
2929 if (ret >= 0) {
2930 pic->mv_buf_index = ret;
2931 pic->mpred_mv_wr_start_addr =
2932 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2933 (~0xffff);
2934 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2935 "%s => %d (0x%x) size 0x%x\n",
2936 __func__, ret,
2937 pic->mpred_mv_wr_start_addr,
2938 hevc->m_mv_BUF[ret].size);
2939
2940 } else {
2941 hevc_print(hevc, 0,
2942 "%s: Error, mv buf is not enough\n",
2943 __func__);
2944 }
2945 return ret;
2946
2947#endif
2948}
2949
2950static void put_mv_buf(struct hevc_state_s *hevc,
2951 struct PIC_s *pic)
2952{
2953#ifndef MV_USE_FIXED_BUF
2954 int i = pic->mv_buf_index;
2955 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2956 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2957 "%s: index %d beyond range\n",
2958 __func__, i);
2959 return;
2960 }
2961 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2962 "%s(%d): used_flag(%d)\n",
2963 __func__, i,
2964 hevc->m_mv_BUF[i].used_flag);
2965
2966 if (hevc->m_mv_BUF[i].start_adr &&
2967 hevc->m_mv_BUF[i].used_flag)
2968 hevc->m_mv_BUF[i].used_flag = 0;
2969 pic->mv_buf_index = -1;
2970#endif
2971}
2972
2973static int cal_current_buf_size(struct hevc_state_s *hevc,
2974 struct buf_stru_s *buf_stru)
2975{
2976
2977 int buf_size;
2978 int pic_width = hevc->pic_w;
2979 int pic_height = hevc->pic_h;
2980 int lcu_size = hevc->lcu_size;
2981 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2982 1 : pic_width / lcu_size;
2983 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2984 1 : pic_height / lcu_size;
2985 /*SUPPORT_10BIT*/
2986 int losless_comp_header_size = compute_losless_comp_header_size
2987 (pic_width, pic_height);
2988 /*always alloc buf for 10bit*/
2989 int losless_comp_body_size = compute_losless_comp_body_size
2990 (hevc, pic_width, pic_height, 0);
2991 int mc_buffer_size = losless_comp_header_size
2992 + losless_comp_body_size;
2993 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2994 int mc_buffer_size_u_v_h = 0;
2995
2996 int dw_mode = get_double_write_mode(hevc);
2997
2998 if (hevc->mmu_enable) {
2999 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3000 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3001 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3002 << 16;
3003 else
3004 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3005 << 16;
3006 } else
3007 buf_size = 0;
3008
3009 if (dw_mode) {
3010 int pic_width_dw = pic_width /
3011 get_double_write_ratio(hevc, dw_mode);
3012 int pic_height_dw = pic_height /
3013 get_double_write_ratio(hevc, dw_mode);
3014
3015 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3016 pic_width_dw / lcu_size + 1 :
3017 pic_width_dw / lcu_size;
3018 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3019 pic_height_dw / lcu_size + 1 :
3020 pic_height_dw / lcu_size;
3021 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3022
3023 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3024 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3025 /*64k alignment*/
3026 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3027 }
3028
3029 if ((!hevc->mmu_enable) &&
3030 ((dw_mode & 0x10) == 0)) {
3031 /* use compress mode without mmu,
3032 need buf for compress decoding*/
3033 buf_size += (mc_buffer_size_h << 16);
3034 }
3035
3036 /*in case start adr is not 64k alignment*/
3037 if (buf_size > 0)
3038 buf_size += 0x10000;
3039
3040 if (buf_stru) {
3041 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3042 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3043 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3044 }
3045
3046 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3047 pic_width, pic_height, losless_comp_header_size,
3048 losless_comp_body_size, mc_buffer_size_h,
3049 mc_buffer_size_u_v_h, buf_size);
3050
3051 return buf_size;
3052}
3053
3054static int v4l_alloc_buf(struct hevc_state_s *hevc)
3055{
3056 int i;
3057 int ret = -1;
3058 struct vdec_v4l2_buffer *fb = NULL;
3059
3060 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3061 return ret;
3062
3063 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3064 if (ret < 0) {
3065 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3066 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3067 return ret;
3068 }
3069
3070 for (i = 0; i < BUF_POOL_SIZE; i++)
3071 if (hevc->m_BUF[i].start_adr == 0)
3072 break;
3073
3074 if (hevc->mmu_enable) {
3075 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3076 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3077 hevc->m_BUF[i].header_size =
3078 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3079 else
3080 hevc->m_BUF[i].header_size =
3081 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3082
3083 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3084 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3085 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3086 if (ret < 0) {
3087 hevc_print(hevc, PRINT_FLAG_ERROR,
3088 "%s[%d], header size: %d, no mem fatal err\n",
3089 __func__, i, hevc->m_BUF[i].header_size);
3090 return ret;
3091 }
3092 }
3093
3094 hevc->m_BUF[i].used_flag = 0;
3095 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3096 if (fb->num_planes == 1) {
3097 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3098 hevc->m_BUF[i].size = fb->m.mem[0].size;
3099 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3100 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3101 } else if (fb->num_planes == 2) {
3102 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3103 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3104 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3105 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3106 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3107 }
3108
3109 return ret;
3110}
3111
3112static int alloc_buf(struct hevc_state_s *hevc)
3113{
3114 int i;
3115 int ret = -1;
3116 int buf_size = cal_current_buf_size(hevc, NULL);
3117
3118 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3119 return ret;
3120
3121 for (i = 0; i < BUF_POOL_SIZE; i++) {
3122 if (hevc->m_BUF[i].start_adr == 0)
3123 break;
3124 }
3125 if (i < BUF_POOL_SIZE) {
3126 if (buf_size > 0) {
3127 ret = decoder_bmmu_box_alloc_buf_phy
3128 (hevc->bmmu_box,
3129 VF_BUFFER_IDX(i), buf_size,
3130 DRIVER_NAME,
3131 &hevc->m_BUF[i].start_adr);
3132 if (ret < 0) {
3133 hevc->m_BUF[i].start_adr = 0;
3134 if (i <= 8) {
3135 hevc->fatal_error |=
3136 DECODER_FATAL_ERROR_NO_MEM;
3137 hevc_print(hevc, PRINT_FLAG_ERROR,
3138 "%s[%d], size: %d, no mem fatal err\n",
3139 __func__, i, buf_size);
3140 }
3141 }
3142
3143 if (ret >= 0) {
3144 hevc->m_BUF[i].size = buf_size;
3145 hevc->m_BUF[i].used_flag = 0;
3146 ret = 0;
3147
3148 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3149 hevc_print(hevc, 0,
3150 "Buffer %d: start_adr %p size %x\n",
3151 i,
3152 (void *)hevc->m_BUF[i].start_adr,
3153 hevc->m_BUF[i].size);
3154 }
3155 /*flush the buffer make sure no cache dirty*/
3156 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3157 void *mem_start_virt;
3158 mem_start_virt =
3159 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3160 if (mem_start_virt) {
3161 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3162 codec_mm_dma_flush(mem_start_virt,
3163 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3164 } else {
3165 mem_start_virt = codec_mm_vmap(
3166 hevc->m_BUF[i].start_adr,
3167 hevc->m_BUF[i].size);
3168 if (mem_start_virt) {
3169 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3170 codec_mm_dma_flush(mem_start_virt,
3171 hevc->m_BUF[i].size,
3172 DMA_TO_DEVICE);
3173 codec_mm_unmap_phyaddr(mem_start_virt);
3174 } else {
3175 /*not virt for tvp playing,
3176 may need clear on ucode.*/
3177 pr_err("ref %s mem_start_virt failed\n", __func__);
3178 }
3179 }
3180 }
3181 }
3182 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3183 } else
3184 ret = 0;
3185 }
3186
3187 if (ret >= 0) {
3188 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3189 hevc_print(hevc, 0,
3190 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3191 i, hevc->pic_w, hevc->pic_h,
3192 buf_size,
3193 hevc->m_BUF[i].start_adr);
3194 }
3195 } else {
3196 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3197 hevc_print(hevc, 0,
3198 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3199 i, hevc->pic_w, hevc->pic_h,
3200 buf_size);
3201 }
3202 }
3203 return ret;
3204}
3205
3206static void set_buf_unused(struct hevc_state_s *hevc, int i)
3207{
3208 if (i >= 0 && i < BUF_POOL_SIZE)
3209 hevc->m_BUF[i].used_flag = 0;
3210}
3211
3212static void dealloc_unused_buf(struct hevc_state_s *hevc)
3213{
3214 int i;
3215 for (i = 0; i < BUF_POOL_SIZE; i++) {
3216 if (hevc->m_BUF[i].start_adr &&
3217 hevc->m_BUF[i].used_flag == 0) {
3218 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3219 hevc_print(hevc, 0,
3220 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3221 i, hevc->m_BUF[i].start_adr,
3222 hevc->m_BUF[i].size);
3223 }
3224 if (!hevc->is_used_v4l)
3225 decoder_bmmu_box_free_idx(
3226 hevc->bmmu_box,
3227 VF_BUFFER_IDX(i));
3228 hevc->m_BUF[i].start_adr = 0;
3229 hevc->m_BUF[i].size = 0;
3230 }
3231 }
3232}
3233
3234static void dealloc_pic_buf(struct hevc_state_s *hevc,
3235 struct PIC_s *pic)
3236{
3237 int i = pic->BUF_index;
3238 pic->BUF_index = -1;
3239 if (i >= 0 &&
3240 i < BUF_POOL_SIZE &&
3241 hevc->m_BUF[i].start_adr) {
3242 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3243 hevc_print(hevc, 0,
3244 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3245 i, hevc->m_BUF[i].start_adr,
3246 hevc->m_BUF[i].size);
3247 }
3248
3249 if (!hevc->is_used_v4l)
3250 decoder_bmmu_box_free_idx(
3251 hevc->bmmu_box,
3252 VF_BUFFER_IDX(i));
3253 hevc->m_BUF[i].used_flag = 0;
3254 hevc->m_BUF[i].start_adr = 0;
3255 hevc->m_BUF[i].size = 0;
3256 }
3257}
3258
3259static int get_work_pic_num(struct hevc_state_s *hevc)
3260{
3261 int used_buf_num = 0;
3262 int sps_pic_buf_diff = 0;
3263
3264 if (get_dynamic_buf_num_margin(hevc) > 0) {
3265 if ((!hevc->sps_num_reorder_pics_0) &&
3266 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3267 /* the range of sps_num_reorder_pics_0 is in
3268 [0, sps_max_dec_pic_buffering_minus1_0] */
3269 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3270 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3271 } else
3272 used_buf_num = hevc->sps_num_reorder_pics_0
3273 + get_dynamic_buf_num_margin(hevc);
3274
3275 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3276 - hevc->sps_num_reorder_pics_0;
3277#ifdef MULTI_INSTANCE_SUPPORT
3278 /*
3279 need one more for multi instance, as
3280 apply_ref_pic_set() has no chanch to run to
3281 to clear referenced flag in some case
3282 */
3283 if (hevc->m_ins_flag)
3284 used_buf_num++;
3285#endif
3286 } else
3287 used_buf_num = max_buf_num;
3288
3289 if (hevc->save_buffer_mode)
3290 hevc_print(hevc, 0,
3291 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3292 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3293
3294 if (sps_pic_buf_diff >= 4)
3295 {
3296 used_buf_num += 1;
3297 }
3298
3299 if (used_buf_num > MAX_BUF_NUM)
3300 used_buf_num = MAX_BUF_NUM;
3301 return used_buf_num;
3302}
3303
3304static int get_alloc_pic_count(struct hevc_state_s *hevc)
3305{
3306 int alloc_pic_count = 0;
3307 int i;
3308 struct PIC_s *pic;
3309 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3310 pic = hevc->m_PIC[i];
3311 if (pic && pic->index >= 0)
3312 alloc_pic_count++;
3313 }
3314 return alloc_pic_count;
3315}
3316
3317static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3318{
3319 int i;
3320 int dw_mode = get_double_write_mode(hevc);
3321
3322 for (i = 0; i < BUF_POOL_SIZE; i++) {
3323 if (hevc->m_BUF[i].start_adr != 0 &&
3324 hevc->m_BUF[i].used_flag == 0) {
3325 hevc->m_BUF[i].used_flag = 1;
3326 break;
3327 }
3328 }
3329
3330 if (i >= BUF_POOL_SIZE)
3331 return -1;
3332
3333 if (hevc->mmu_enable)
3334 pic->header_adr = hevc->m_BUF[i].header_addr;
3335
3336 pic->BUF_index = i;
3337 pic->POC = INVALID_POC;
3338 pic->mc_canvas_y = pic->index;
3339 pic->mc_canvas_u_v = pic->index;
3340
3341 if (dw_mode & 0x10) {
3342 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3343 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3344 pic->mc_canvas_y = (pic->index << 1);
3345 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3346
3347 pic->dw_y_adr = pic->mc_y_adr;
3348 pic->dw_u_v_adr = pic->mc_u_v_adr;
3349 } else if (dw_mode) {
3350 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3351 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3352 }
3353
3354 return 0;
3355}
3356
3357static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3358{
3359 int ret = -1;
3360 int i;
3361 /*int lcu_size_log2 = hevc->lcu_size_log2;
3362 int MV_MEM_UNIT=lcu_size_log2==
3363 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3364 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3365 5 ? 0x80 : 0x20;
3366 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3367 hevc->work_space_buf->mpred_mv.buf_size;*/
3368 unsigned int y_adr = 0;
3369 struct buf_stru_s buf_stru;
3370 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3371 int dw_mode = get_double_write_mode(hevc);
3372
3373 for (i = 0; i < BUF_POOL_SIZE; i++) {
3374 if (hevc->m_BUF[i].start_adr != 0 &&
3375 hevc->m_BUF[i].used_flag == 0 &&
3376 buf_size <= hevc->m_BUF[i].size) {
3377 hevc->m_BUF[i].used_flag = 1;
3378 break;
3379 }
3380 }
3381
3382 if (i >= BUF_POOL_SIZE)
3383 return -1;
3384
3385 if (hevc->mmu_enable) {
3386 pic->header_adr = hevc->m_BUF[i].start_adr;
3387 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3388 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3389 y_adr = hevc->m_BUF[i].start_adr +
3390 MMU_COMPRESS_8K_HEADER_SIZE;
3391 else
3392 y_adr = hevc->m_BUF[i].start_adr +
3393 MMU_COMPRESS_HEADER_SIZE;
3394 } else
3395 y_adr = hevc->m_BUF[i].start_adr;
3396
3397 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3398
3399 pic->POC = INVALID_POC;
3400 /*ensure get_pic_by_POC()
3401 not get the buffer not decoded*/
3402 pic->BUF_index = i;
3403
3404 if ((!hevc->mmu_enable) &&
3405 ((dw_mode & 0x10) == 0)
3406 ) {
3407 pic->mc_y_adr = y_adr;
3408 y_adr += (buf_stru.mc_buffer_size_h << 16);
3409 }
3410 pic->mc_canvas_y = pic->index;
3411 pic->mc_canvas_u_v = pic->index;
3412 if (dw_mode & 0x10) {
3413 pic->mc_y_adr = y_adr;
3414 pic->mc_u_v_adr = y_adr +
3415 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3416 pic->mc_canvas_y = (pic->index << 1);
3417 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3418
3419 pic->dw_y_adr = pic->mc_y_adr;
3420 pic->dw_u_v_adr = pic->mc_u_v_adr;
3421 } else if (dw_mode) {
3422 pic->dw_y_adr = y_adr;
3423 pic->dw_u_v_adr = pic->dw_y_adr +
3424 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3425 }
3426
3427 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3428 hevc_print(hevc, 0,
3429 "%s index %d BUF_index %d mc_y_adr %x\n",
3430 __func__, pic->index,
3431 pic->BUF_index, pic->mc_y_adr);
3432 if (hevc->mmu_enable &&
3433 dw_mode)
3434 hevc_print(hevc, 0,
3435 "mmu double write adr %ld\n",
3436 pic->cma_alloc_addr);
3437 }
3438 ret = 0;
3439
3440 return ret;
3441}
3442
3443static void init_pic_list(struct hevc_state_s *hevc)
3444{
3445 int i;
3446 int init_buf_num = get_work_pic_num(hevc);
3447 int dw_mode = get_double_write_mode(hevc);
3448 struct vdec_s *vdec = hw_to_vdec(hevc);
3449 /*alloc decoder buf will be delay if work on v4l. */
3450 if (!hevc->is_used_v4l) {
3451 for (i = 0; i < init_buf_num; i++) {
3452 if (alloc_buf(hevc) < 0) {
3453 if (i <= 8) {
3454 /*if alloced (i+1)>=9
3455 don't send errors.*/
3456 hevc->fatal_error |=
3457 DECODER_FATAL_ERROR_NO_MEM;
3458 }
3459 break;
3460 }
3461 }
3462 }
3463
3464 for (i = 0; i < init_buf_num; i++) {
3465 struct PIC_s *pic = hevc->m_PIC[i];
3466
3467 if (!pic) {
3468 pic = vmalloc(sizeof(struct PIC_s));
3469 if (pic == NULL) {
3470 hevc_print(hevc, 0,
3471 "%s: alloc pic %d fail!!!\n",
3472 __func__, i);
3473 break;
3474 }
3475 hevc->m_PIC[i] = pic;
3476 }
3477 memset(pic, 0, sizeof(struct PIC_s));
3478
3479 pic->index = i;
3480 pic->BUF_index = -1;
3481 pic->mv_buf_index = -1;
3482 if (vdec->parallel_dec == 1) {
3483 pic->y_canvas_index = -1;
3484 pic->uv_canvas_index = -1;
3485 }
3486
3487 pic->width = hevc->pic_w;
3488 pic->height = hevc->pic_h;
3489 pic->double_write_mode = dw_mode;
3490
3491 /*config canvas will be delay if work on v4l. */
3492 if (!hevc->is_used_v4l) {
3493 if (config_pic(hevc, pic) < 0) {
3494 if (get_dbg_flag(hevc))
3495 hevc_print(hevc, 0,
3496 "Config_pic %d fail\n", pic->index);
3497 pic->index = -1;
3498 i++;
3499 break;
3500 }
3501
3502 if (pic->double_write_mode)
3503 set_canvas(hevc, pic);
3504 }
3505 }
3506
3507 for (; i < MAX_REF_PIC_NUM; i++) {
3508 struct PIC_s *pic = hevc->m_PIC[i];
3509
3510 if (!pic) {
3511 pic = vmalloc(sizeof(struct PIC_s));
3512 if (pic == NULL) {
3513 hevc_print(hevc, 0,
3514 "%s: alloc pic %d fail!!!\n",
3515 __func__, i);
3516 break;
3517 }
3518 hevc->m_PIC[i] = pic;
3519 }
3520 memset(pic, 0, sizeof(struct PIC_s));
3521
3522 pic->index = -1;
3523 pic->BUF_index = -1;
3524 if (vdec->parallel_dec == 1) {
3525 pic->y_canvas_index = -1;
3526 pic->uv_canvas_index = -1;
3527 }
3528 }
3529
3530}
3531
3532static void uninit_pic_list(struct hevc_state_s *hevc)
3533{
3534 struct vdec_s *vdec = hw_to_vdec(hevc);
3535 int i;
3536#ifndef MV_USE_FIXED_BUF
3537 dealloc_mv_bufs(hevc);
3538#endif
3539 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3540 struct PIC_s *pic = hevc->m_PIC[i];
3541
3542 if (pic) {
3543 if (vdec->parallel_dec == 1) {
3544 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3545 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3546 }
3547 release_aux_data(hevc, pic);
3548 vfree(pic);
3549 hevc->m_PIC[i] = NULL;
3550 }
3551 }
3552}
3553
3554#ifdef LOSLESS_COMPRESS_MODE
3555static void init_decode_head_hw(struct hevc_state_s *hevc)
3556{
3557
3558 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3559 unsigned int data32;
3560
3561 int losless_comp_header_size =
3562 compute_losless_comp_header_size(hevc->pic_w,
3563 hevc->pic_h);
3564 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3565 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3566
3567 hevc->losless_comp_body_size = losless_comp_body_size;
3568
3569
3570 if (hevc->mmu_enable) {
3571 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3572 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3573 } else {
3574 if (hevc->mem_saving_mode == 1)
3575 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3576 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3577 else
3578 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3579 ((workaround_enable & 2) ? 1 : 0));
3580 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3581 /*
3582 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3583 * //8-bit mode
3584 */
3585 }
3586 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3587 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3588 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3589
3590 if (hevc->mmu_enable) {
3591 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3592 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3593 buf_spec->mmu_vbh.buf_start +
3594 buf_spec->mmu_vbh.buf_size/2);
3595 data32 = READ_VREG(HEVC_SAO_CTRL9);
3596 data32 |= 0x1;
3597 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3598
3599 /* use HEVC_CM_HEADER_START_ADDR */
3600 data32 = READ_VREG(HEVC_SAO_CTRL5);
3601 data32 |= (1<<10);
3602 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3603 }
3604
3605 if (!hevc->m_ins_flag)
3606 hevc_print(hevc, 0,
3607 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3608 __func__, hevc->pic_w, hevc->pic_h,
3609 losless_comp_body_size, losless_comp_header_size);
3610
3611}
3612#endif
3613#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3614
3615static void init_pic_list_hw(struct hevc_state_s *hevc)
3616{
3617 int i;
3618 int cur_pic_num = MAX_REF_PIC_NUM;
3619 int dw_mode = get_double_write_mode(hevc);
3620 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3621 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3622 (0x1 << 1) | (0x1 << 2));
3623 else
3624 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3625
3626 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3627 if (hevc->m_PIC[i] == NULL ||
3628 hevc->m_PIC[i]->index == -1) {
3629 cur_pic_num = i;
3630 break;
3631 }
3632 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3633 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3634 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3635 hevc->m_PIC[i]->header_adr>>5);
3636 else
3637 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3638 hevc->m_PIC[i]->mc_y_adr >> 5);
3639 } else
3640 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3641 hevc->m_PIC[i]->mc_y_adr |
3642 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3643 if (dw_mode & 0x10) {
3644 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3645 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3646 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3647 }
3648 else
3649 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3650 hevc->m_PIC[i]->mc_u_v_adr |
3651 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3652 | 0x1);
3653 }
3654 }
3655 if (cur_pic_num == 0)
3656 return;
3657 for (; i < MAX_REF_PIC_NUM; i++) {
3658 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3659 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3661 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3662 else
3663 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3664 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3665#ifndef LOSLESS_COMPRESS_MODE
3666 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3667 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3668#endif
3669 } else {
3670 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3671 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3672 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3673 | 0x1);
3674#ifndef LOSLESS_COMPRESS_MODE
3675 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3676 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3677 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3678 | 0x1);
3679#endif
3680 }
3681 }
3682
3683 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3684
3685 /* Zero out canvas registers in IPP -- avoid simulation X */
3686 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3687 (0 << 8) | (0 << 1) | 1);
3688 for (i = 0; i < 32; i++)
3689 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3690
3691#ifdef LOSLESS_COMPRESS_MODE
3692 if ((dw_mode & 0x10) == 0)
3693 init_decode_head_hw(hevc);
3694#endif
3695
3696}
3697
3698
3699static void dump_pic_list(struct hevc_state_s *hevc)
3700{
3701 int i;
3702 struct PIC_s *pic;
3703
3704 hevc_print(hevc, 0,
3705 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3706 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3707 pic = hevc->m_PIC[i];
3708 if (pic == NULL || pic->index == -1)
3709 continue;
3710 hevc_print_cont(hevc, 0,
3711 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3712 pic->index, pic->BUF_index,
3713#ifndef MV_USE_FIXED_BUF
3714 pic->mv_buf_index,
3715#else
3716 -1,
3717#endif
3718 pic->decode_idx, pic->POC, pic->referenced);
3719 hevc_print_cont(hevc, 0,
3720 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3721 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3722 pic->width, pic->height);
3723 hevc_print_cont(hevc, 0,
3724 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3725 pic->output_ready, pic->mpred_mv_wr_start_addr,
3726 pic->vf_ref);
3727 }
3728}
3729
3730static void clear_referenced_flag(struct hevc_state_s *hevc)
3731{
3732 int i;
3733 struct PIC_s *pic;
3734 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3735 pic = hevc->m_PIC[i];
3736 if (pic == NULL || pic->index == -1)
3737 continue;
3738 if (pic->referenced) {
3739 pic->referenced = 0;
3740 put_mv_buf(hevc, pic);
3741 }
3742 }
3743}
3744
3745static void clear_poc_flag(struct hevc_state_s *hevc)
3746{
3747 int i;
3748 struct PIC_s *pic;
3749 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3750 pic = hevc->m_PIC[i];
3751 if (pic == NULL || pic->index == -1)
3752 continue;
3753 pic->POC = INVALID_POC;
3754 }
3755}
3756
3757static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3758 unsigned char flush_flag)
3759{
3760 int num_pic_not_yet_display = 0;
3761 int i;
3762 struct PIC_s *pic;
3763 struct PIC_s *pic_display = NULL;
3764 struct vdec_s *vdec = hw_to_vdec(hevc);
3765
3766 if (hevc->i_only & 0x4) {
3767 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3768 pic = hevc->m_PIC[i];
3769 if (pic == NULL ||
3770 (pic->index == -1) ||
3771 (pic->BUF_index == -1) ||
3772 (pic->POC == INVALID_POC))
3773 continue;
3774 if (pic->output_mark) {
3775 if (pic_display) {
3776 if (pic->decode_idx <
3777 pic_display->decode_idx)
3778 pic_display = pic;
3779
3780 } else
3781 pic_display = pic;
3782
3783 }
3784 }
3785 if (pic_display) {
3786 pic_display->output_mark = 0;
3787 pic_display->recon_mark = 0;
3788 pic_display->output_ready = 1;
3789 pic_display->referenced = 0;
3790 put_mv_buf(hevc, pic_display);
3791 }
3792 } else {
3793 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3794 pic = hevc->m_PIC[i];
3795 if (pic == NULL ||
3796 (pic->index == -1) ||
3797 (pic->BUF_index == -1) ||
3798 (pic->POC == INVALID_POC))
3799 continue;
3800 if (pic->output_mark)
3801 num_pic_not_yet_display++;
3802 if (pic->slice_type == 2 &&
3803 hevc->vf_pre_count == 0 &&
3804 fast_output_enable & 0x1) {
3805 /*fast output for first I picture*/
3806 pic->num_reorder_pic = 0;
3807 if (vdec->master || vdec->slave)
3808 pic_display = pic;
3809 hevc_print(hevc, 0, "VH265: output first frame\n");
3810 }
3811 }
3812
3813 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3814 pic = hevc->m_PIC[i];
3815 if (pic == NULL ||
3816 (pic->index == -1) ||
3817 (pic->BUF_index == -1) ||
3818 (pic->POC == INVALID_POC))
3819 continue;
3820 if (pic->output_mark) {
3821 if (pic_display) {
3822 if (pic->POC < pic_display->POC)
3823 pic_display = pic;
3824 else if ((pic->POC == pic_display->POC)
3825 && (pic->decode_idx <
3826 pic_display->
3827 decode_idx))
3828 pic_display
3829 = pic;
3830 } else
3831 pic_display = pic;
3832 }
3833 }
3834 if (pic_display) {
3835 if ((num_pic_not_yet_display >
3836 pic_display->num_reorder_pic)
3837 || flush_flag) {
3838 pic_display->output_mark = 0;
3839 pic_display->recon_mark = 0;
3840 pic_display->output_ready = 1;
3841 } else if (num_pic_not_yet_display >=
3842 (MAX_REF_PIC_NUM - 1)) {
3843 pic_display->output_mark = 0;
3844 pic_display->recon_mark = 0;
3845 pic_display->output_ready = 1;
3846 hevc_print(hevc, 0,
3847 "Warning, num_reorder_pic %d is byeond buf num\n",
3848 pic_display->num_reorder_pic);
3849 } else
3850 pic_display = NULL;
3851 }
3852 }
3853
3854 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3855 pic_display = NULL;
3856 hevc->first_pic_flag = 0;
3857 }
3858 return pic_display;
3859}
3860
3861static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3862{
3863 int i;
3864 struct PIC_s *pic;
3865
3866 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3867 hevc_print(hevc, 0,
3868 "config_mc_buffer entered .....\n");
3869 if (cur_pic->slice_type != 2) { /* P and B pic */
3870 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3871 (0 << 8) | (0 << 1) | 1);
3872 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3873 pic =
3874 get_ref_pic_by_POC(hevc,
3875 cur_pic->
3876 m_aiRefPOCList0[cur_pic->
3877 slice_idx][i]);
3878 if (pic) {
3879 if ((pic->width != hevc->pic_w) ||
3880 (pic->height != hevc->pic_h)) {
3881 hevc_print(hevc, 0,
3882 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3883 __func__, pic->POC,
3884 pic->width, pic->height);
3885 cur_pic->error_mark = 1;
3886 }
3887 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3888 cur_pic->error_mark = 1;
3889 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3890 (pic->mc_canvas_u_v << 16)
3891 | (pic->mc_canvas_u_v
3892 << 8) |
3893 pic->mc_canvas_y);
3894 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3895 hevc_print_cont(hevc, 0,
3896 "refid %x mc_canvas_u_v %x",
3897 i, pic->mc_canvas_u_v);
3898 hevc_print_cont(hevc, 0,
3899 " mc_canvas_y %x\n",
3900 pic->mc_canvas_y);
3901 }
3902 } else
3903 cur_pic->error_mark = 1;
3904
3905 if (pic == NULL || pic->error_mark) {
3906 hevc_print(hevc, 0,
3907 "Error %s, %dth poc (%d) %s",
3908 __func__, i,
3909 cur_pic->m_aiRefPOCList0[cur_pic->
3910 slice_idx][i],
3911 pic ? "has error" :
3912 "not in list0");
3913 }
3914 }
3915 }
3916 if (cur_pic->slice_type == 0) { /* B pic */
3917 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3918 hevc_print(hevc, 0,
3919 "config_mc_buffer RefNum_L1\n");
3920 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3921 (16 << 8) | (0 << 1) | 1);
3922
3923 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3924 pic =
3925 get_ref_pic_by_POC(hevc,
3926 cur_pic->
3927 m_aiRefPOCList1[cur_pic->
3928 slice_idx][i]);
3929 if (pic) {
3930 if ((pic->width != hevc->pic_w) ||
3931 (pic->height != hevc->pic_h)) {
3932 hevc_print(hevc, 0,
3933 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3934 __func__, pic->POC,
3935 pic->width, pic->height);
3936 cur_pic->error_mark = 1;
3937 }
3938
3939 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3940 cur_pic->error_mark = 1;
3941 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3942 (pic->mc_canvas_u_v << 16)
3943 | (pic->mc_canvas_u_v
3944 << 8) |
3945 pic->mc_canvas_y);
3946 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3947 hevc_print_cont(hevc, 0,
3948 "refid %x mc_canvas_u_v %x",
3949 i, pic->mc_canvas_u_v);
3950 hevc_print_cont(hevc, 0,
3951 " mc_canvas_y %x\n",
3952 pic->mc_canvas_y);
3953 }
3954 } else
3955 cur_pic->error_mark = 1;
3956
3957 if (pic == NULL || pic->error_mark) {
3958 hevc_print(hevc, 0,
3959 "Error %s, %dth poc (%d) %s",
3960 __func__, i,
3961 cur_pic->m_aiRefPOCList1[cur_pic->
3962 slice_idx][i],
3963 pic ? "has error" :
3964 "not in list1");
3965 }
3966 }
3967 }
3968 return 0;
3969}
3970
3971static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3972 union param_u *params)
3973{
3974 int ii, i;
3975 int poc_tmp;
3976 struct PIC_s *pic;
3977 unsigned char is_referenced;
3978 /* hevc_print(hevc, 0,
3979 "%s cur_poc %d\n", __func__, cur_poc); */
3980 if (pic_list_debug & 0x2) {
3981 pr_err("cur poc %d\n", cur_poc);
3982 }
3983 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3984 pic = hevc->m_PIC[ii];
3985 if (pic == NULL ||
3986 pic->index == -1 ||
3987 pic->BUF_index == -1
3988 )
3989 continue;
3990
3991 if ((pic->referenced == 0 || pic->POC == cur_poc))
3992 continue;
3993 is_referenced = 0;
3994 for (i = 0; i < 16; i++) {
3995 int delt;
3996
3997 if (params->p.CUR_RPS[i] & 0x8000)
3998 break;
3999 delt =
4000 params->p.CUR_RPS[i] &
4001 ((1 << (RPS_USED_BIT - 1)) - 1);
4002 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4003 poc_tmp =
4004 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4005 delt);
4006 } else
4007 poc_tmp = cur_poc + delt;
4008 if (poc_tmp == pic->POC) {
4009 is_referenced = 1;
4010 /* hevc_print(hevc, 0, "i is %d\n", i); */
4011 break;
4012 }
4013 }
4014 if (is_referenced == 0) {
4015 pic->referenced = 0;
4016 put_mv_buf(hevc, pic);
4017 /* hevc_print(hevc, 0,
4018 "set poc %d reference to 0\n", pic->POC); */
4019 if (pic_list_debug & 0x2) {
4020 pr_err("set poc %d reference to 0\n", pic->POC);
4021 }
4022 }
4023 }
4024
4025}
4026
4027static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4028{
4029 struct PIC_s *pic = hevc->cur_pic;
4030 int i, rIdx;
4031 int num_neg = 0;
4032 int num_pos = 0;
4033 int total_num;
4034 int num_ref_idx_l0_active =
4035 (params->p.num_ref_idx_l0_active >
4036 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4037 params->p.num_ref_idx_l0_active;
4038 int num_ref_idx_l1_active =
4039 (params->p.num_ref_idx_l1_active >
4040 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4041 params->p.num_ref_idx_l1_active;
4042
4043 int RefPicSetStCurr0[16];
4044 int RefPicSetStCurr1[16];
4045
4046 for (i = 0; i < 16; i++) {
4047 RefPicSetStCurr0[i] = 0;
4048 RefPicSetStCurr1[i] = 0;
4049 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4050 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4051 }
4052 for (i = 0; i < 16; i++) {
4053 if (params->p.CUR_RPS[i] & 0x8000)
4054 break;
4055 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4056 int delt =
4057 params->p.CUR_RPS[i] &
4058 ((1 << (RPS_USED_BIT - 1)) - 1);
4059
4060 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4061 RefPicSetStCurr0[num_neg] =
4062 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4063 delt);
4064 /* hevc_print(hevc, 0,
4065 * "RefPicSetStCurr0 %x %x %x\n",
4066 * RefPicSetStCurr0[num_neg], pic->POC,
4067 * (0x800-(params[i]&0x7ff)));
4068 */
4069 num_neg++;
4070 } else {
4071 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4072 /* hevc_print(hevc, 0,
4073 * "RefPicSetStCurr1 %d\n",
4074 * RefPicSetStCurr1[num_pos]);
4075 */
4076 num_pos++;
4077 }
4078 }
4079 }
4080 total_num = num_neg + num_pos;
4081 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4082 hevc_print(hevc, 0,
4083 "%s: curpoc %d slice_type %d, total %d ",
4084 __func__, pic->POC, params->p.slice_type, total_num);
4085 hevc_print_cont(hevc, 0,
4086 "num_neg %d num_list0 %d num_list1 %d\n",
4087 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4088 }
4089
4090 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4091 hevc_print(hevc, 0,
4092 "HEVC Stream buf start ");
4093 hevc_print_cont(hevc, 0,
4094 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4095 READ_VREG(HEVC_STREAM_START_ADDR),
4096 READ_VREG(HEVC_STREAM_END_ADDR),
4097 READ_VREG(HEVC_STREAM_WR_PTR),
4098 READ_VREG(HEVC_STREAM_RD_PTR),
4099 READ_VREG(HEVC_STREAM_LEVEL),
4100 READ_VREG(HEVC_STREAM_FIFO_CTL),
4101 READ_VREG(HEVC_PARSER_INT_CONTROL));
4102 }
4103
4104 if (total_num > 0) {
4105 if (params->p.modification_flag & 0x1) {
4106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4107 hevc_print(hevc, 0, "ref0 POC (modification):");
4108 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4109 int cIdx = params->p.modification_list[rIdx];
4110
4111 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4112 cIdx >=
4113 num_neg ? RefPicSetStCurr1[cIdx -
4114 num_neg] :
4115 RefPicSetStCurr0[cIdx];
4116 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4117 hevc_print_cont(hevc, 0, "%d ",
4118 pic->m_aiRefPOCList0[pic->
4119 slice_idx]
4120 [rIdx]);
4121 }
4122 }
4123 } else {
4124 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4125 hevc_print(hevc, 0, "ref0 POC:");
4126 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4127 int cIdx = rIdx % total_num;
4128
4129 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4130 cIdx >=
4131 num_neg ? RefPicSetStCurr1[cIdx -
4132 num_neg] :
4133 RefPicSetStCurr0[cIdx];
4134 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4135 hevc_print_cont(hevc, 0, "%d ",
4136 pic->m_aiRefPOCList0[pic->
4137 slice_idx]
4138 [rIdx]);
4139 }
4140 }
4141 }
4142 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4143 hevc_print_cont(hevc, 0, "\n");
4144 if (params->p.slice_type == B_SLICE) {
4145 if (params->p.modification_flag & 0x2) {
4146 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4147 hevc_print(hevc, 0,
4148 "ref1 POC (modification):");
4149 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4150 rIdx++) {
4151 int cIdx;
4152
4153 if (params->p.modification_flag & 0x1) {
4154 cIdx =
4155 params->p.
4156 modification_list
4157 [num_ref_idx_l0_active +
4158 rIdx];
4159 } else {
4160 cIdx =
4161 params->p.
4162 modification_list[rIdx];
4163 }
4164 pic->m_aiRefPOCList1[pic->
4165 slice_idx][rIdx] =
4166 cIdx >=
4167 num_pos ?
4168 RefPicSetStCurr0[cIdx - num_pos]
4169 : RefPicSetStCurr1[cIdx];
4170 if (get_dbg_flag(hevc) &
4171 H265_DEBUG_BUFMGR) {
4172 hevc_print_cont(hevc, 0, "%d ",
4173 pic->
4174 m_aiRefPOCList1[pic->
4175 slice_idx]
4176 [rIdx]);
4177 }
4178 }
4179 } else {
4180 if (get_dbg_flag(hevc) &
4181 H265_DEBUG_BUFMGR)
4182 hevc_print(hevc, 0, "ref1 POC:");
4183 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4184 rIdx++) {
4185 int cIdx = rIdx % total_num;
4186
4187 pic->m_aiRefPOCList1[pic->
4188 slice_idx][rIdx] =
4189 cIdx >=
4190 num_pos ?
4191 RefPicSetStCurr0[cIdx -
4192 num_pos]
4193 : RefPicSetStCurr1[cIdx];
4194 if (get_dbg_flag(hevc) &
4195 H265_DEBUG_BUFMGR) {
4196 hevc_print_cont(hevc, 0, "%d ",
4197 pic->
4198 m_aiRefPOCList1[pic->
4199 slice_idx]
4200 [rIdx]);
4201 }
4202 }
4203 }
4204 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4205 hevc_print_cont(hevc, 0, "\n");
4206 }
4207 }
4208 /*set m_PIC */
4209 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4210 (params->p.slice_type == P_SLICE) ? 1 :
4211 (params->p.slice_type == B_SLICE) ? 0 : 3;
4212 pic->RefNum_L0 = num_ref_idx_l0_active;
4213 pic->RefNum_L1 = num_ref_idx_l1_active;
4214}
4215
4216static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4217 int pic_height_cu, int sao_mem_unit,
4218 union param_u *params)
4219{
4220 int i, j;
4221 int start_cu_x, start_cu_y;
4222 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4223 int sao_abv_size = sao_mem_unit * pic_width_cu;
4224#ifdef DETREFILL_ENABLE
4225 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4226 int tmpRefillLcuSize = 1 <<
4227 (params->p.log2_min_coding_block_size_minus3 +
4228 3 + params->p.log2_diff_max_min_coding_block_size);
4229 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4230 "%x, %x, %x, %x\n",
4231 params->p.slice_segment_address,
4232 params->p.bit_depth,
4233 params->p.tiles_enabled_flag,
4234 tmpRefillLcuSize);
4235 if (params->p.slice_segment_address == 0 &&
4236 params->p.bit_depth != 0 &&
4237 (params->p.tiles_enabled_flag & 1) &&
4238 tmpRefillLcuSize == 64)
4239 hevc->delrefill_check = 1;
4240 else
4241 hevc->delrefill_check = 0;
4242 }
4243#endif
4244
4245 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4246 if (params->p.tiles_enabled_flag & 1) {
4247 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4248 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4249
4250 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4251 || hevc->num_tile_row <= 0) {
4252 hevc->num_tile_row = 1;
4253 hevc_print(hevc, 0,
4254 "%s: num_tile_rows_minus1 (%d) error!!\n",
4255 __func__, params->p.num_tile_rows_minus1);
4256 }
4257 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4258 || hevc->num_tile_col <= 0) {
4259 hevc->num_tile_col = 1;
4260 hevc_print(hevc, 0,
4261 "%s: num_tile_columns_minus1 (%d) error!!\n",
4262 __func__, params->p.num_tile_columns_minus1);
4263 }
4264 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4265 hevc_print(hevc, 0,
4266 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4267 __func__, pic_width_cu, pic_height_cu);
4268 hevc_print_cont(hevc, 0,
4269 "num_tile_col %d num_tile_row %d:\n",
4270 hevc->num_tile_col, hevc->num_tile_row);
4271 }
4272
4273 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4274 int w = pic_width_cu / hevc->num_tile_col;
4275 int h = pic_height_cu / hevc->num_tile_row;
4276
4277 start_cu_y = 0;
4278 for (i = 0; i < hevc->num_tile_row; i++) {
4279 start_cu_x = 0;
4280 for (j = 0; j < hevc->num_tile_col; j++) {
4281 if (j == (hevc->num_tile_col - 1)) {
4282 hevc->m_tile[i][j].width =
4283 pic_width_cu -
4284 start_cu_x;
4285 } else
4286 hevc->m_tile[i][j].width = w;
4287 if (i == (hevc->num_tile_row - 1)) {
4288 hevc->m_tile[i][j].height =
4289 pic_height_cu -
4290 start_cu_y;
4291 } else
4292 hevc->m_tile[i][j].height = h;
4293 hevc->m_tile[i][j].start_cu_x
4294 = start_cu_x;
4295 hevc->m_tile[i][j].start_cu_y
4296 = start_cu_y;
4297 hevc->m_tile[i][j].sao_vb_start_addr =
4298 hevc->work_space_buf->sao_vb.
4299 buf_start + j * sao_vb_size;
4300 hevc->m_tile[i][j].sao_abv_start_addr =
4301 hevc->work_space_buf->sao_abv.
4302 buf_start + i * sao_abv_size;
4303 if (get_dbg_flag(hevc) &
4304 H265_DEBUG_BUFMGR) {
4305 hevc_print_cont(hevc, 0,
4306 "{y=%d, x=%d w %d h %d ",
4307 i, j, hevc->m_tile[i][j].width,
4308 hevc->m_tile[i][j].height);
4309 hevc_print_cont(hevc, 0,
4310 "start_x %d start_y %d ",
4311 hevc->m_tile[i][j].start_cu_x,
4312 hevc->m_tile[i][j].start_cu_y);
4313 hevc_print_cont(hevc, 0,
4314 "sao_vb_start 0x%x ",
4315 hevc->m_tile[i][j].
4316 sao_vb_start_addr);
4317 hevc_print_cont(hevc, 0,
4318 "sao_abv_start 0x%x}\n",
4319 hevc->m_tile[i][j].
4320 sao_abv_start_addr);
4321 }
4322 start_cu_x += hevc->m_tile[i][j].width;
4323
4324 }
4325 start_cu_y += hevc->m_tile[i][0].height;
4326 }
4327 } else {
4328 start_cu_y = 0;
4329 for (i = 0; i < hevc->num_tile_row; i++) {
4330 start_cu_x = 0;
4331 for (j = 0; j < hevc->num_tile_col; j++) {
4332 if (j == (hevc->num_tile_col - 1)) {
4333 hevc->m_tile[i][j].width =
4334 pic_width_cu -
4335 start_cu_x;
4336 } else {
4337 hevc->m_tile[i][j].width =
4338 params->p.tile_width[j];
4339 }
4340 if (i == (hevc->num_tile_row - 1)) {
4341 hevc->m_tile[i][j].height =
4342 pic_height_cu -
4343 start_cu_y;
4344 } else {
4345 hevc->m_tile[i][j].height =
4346 params->
4347 p.tile_height[i];
4348 }
4349 hevc->m_tile[i][j].start_cu_x
4350 = start_cu_x;
4351 hevc->m_tile[i][j].start_cu_y
4352 = start_cu_y;
4353 hevc->m_tile[i][j].sao_vb_start_addr =
4354 hevc->work_space_buf->sao_vb.
4355 buf_start + j * sao_vb_size;
4356 hevc->m_tile[i][j].sao_abv_start_addr =
4357 hevc->work_space_buf->sao_abv.
4358 buf_start + i * sao_abv_size;
4359 if (get_dbg_flag(hevc) &
4360 H265_DEBUG_BUFMGR) {
4361 hevc_print_cont(hevc, 0,
4362 "{y=%d, x=%d w %d h %d ",
4363 i, j, hevc->m_tile[i][j].width,
4364 hevc->m_tile[i][j].height);
4365 hevc_print_cont(hevc, 0,
4366 "start_x %d start_y %d ",
4367 hevc->m_tile[i][j].start_cu_x,
4368 hevc->m_tile[i][j].start_cu_y);
4369 hevc_print_cont(hevc, 0,
4370 "sao_vb_start 0x%x ",
4371 hevc->m_tile[i][j].
4372 sao_vb_start_addr);
4373 hevc_print_cont(hevc, 0,
4374 "sao_abv_start 0x%x}\n",
4375 hevc->m_tile[i][j].
4376 sao_abv_start_addr);
4377
4378 }
4379 start_cu_x += hevc->m_tile[i][j].width;
4380 }
4381 start_cu_y += hevc->m_tile[i][0].height;
4382 }
4383 }
4384 } else {
4385 hevc->num_tile_col = 1;
4386 hevc->num_tile_row = 1;
4387 hevc->m_tile[0][0].width = pic_width_cu;
4388 hevc->m_tile[0][0].height = pic_height_cu;
4389 hevc->m_tile[0][0].start_cu_x = 0;
4390 hevc->m_tile[0][0].start_cu_y = 0;
4391 hevc->m_tile[0][0].sao_vb_start_addr =
4392 hevc->work_space_buf->sao_vb.buf_start;
4393 hevc->m_tile[0][0].sao_abv_start_addr =
4394 hevc->work_space_buf->sao_abv.buf_start;
4395 }
4396}
4397
4398static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4399 int pic_width_lcu)
4400{
4401 int cu_x;
4402 int cu_y;
4403 int tile_x = 0;
4404 int tile_y = 0;
4405 int i;
4406
4407 if (pic_width_lcu == 0) {
4408 if (get_dbg_flag(hevc)) {
4409 hevc_print(hevc, 0,
4410 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4411 __func__, hevc->pic_w, hevc->pic_h);
4412 }
4413 return -1;
4414 }
4415 cu_x = cu_adr % pic_width_lcu;
4416 cu_y = cu_adr / pic_width_lcu;
4417 if (hevc->tile_enabled) {
4418 for (i = 0; i < hevc->num_tile_col; i++) {
4419 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4420 tile_x = i;
4421 else
4422 break;
4423 }
4424 for (i = 0; i < hevc->num_tile_row; i++) {
4425 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4426 tile_y = i;
4427 else
4428 break;
4429 }
4430 }
4431 return (tile_x) | (tile_y << 8);
4432}
4433
4434static void print_scratch_error(int error_num)
4435{
4436#if 0
4437 if (get_dbg_flag(hevc)) {
4438 hevc_print(hevc, 0,
4439 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4440 error_num);
4441 }
4442#endif
4443}
4444
4445static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4446{
4447 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4448
4449 if (get_dbg_flag(hevc))
4450 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4451 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4452 __func__,
4453 buf_spec->ipp.buf_start,
4454 buf_spec->start_adr,
4455 buf_spec->short_term_rps.buf_start,
4456 buf_spec->vps.buf_start,
4457 buf_spec->sps.buf_start,
4458 buf_spec->pps.buf_start,
4459 buf_spec->sao_up.buf_start,
4460 buf_spec->swap_buf.buf_start,
4461 buf_spec->swap_buf2.buf_start,
4462 buf_spec->scalelut.buf_start,
4463 buf_spec->dblk_para.buf_start,
4464 buf_spec->dblk_data.buf_start,
4465 buf_spec->dblk_data2.buf_start);
4466 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4467 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4468 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4469 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4470 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4471 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4472 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4473 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4474 if (hevc->mmu_enable) {
4475 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4476 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4477 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4478 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4479 } else
4480 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4481 } /*else
4482 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4483 buf_spec->swap_buf.buf_start);
4484 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4485 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4486 /* cfg_p_addr */
4487 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4488 /* cfg_d_addr */
4489 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4490
4491 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4492
4493 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4494}
4495
4496static void parser_cmd_write(void)
4497{
4498 u32 i;
4499 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4500 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4501 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4502 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4503 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4504 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4505 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4506 0x7C00
4507 };
4508 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4509 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4510}
4511
4512static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4513 int decode_pic_begin, int decode_pic_num)
4514{
4515 unsigned int data32;
4516 int i;
4517#if 0
4518 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4519 /* Set MCR fetch priorities*/
4520 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4521 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4522 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4523 }
4524#endif
4525#if 1
4526 /* m8baby test1902 */
4527 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4528 hevc_print(hevc, 0,
4529 "%s\n", __func__);
4530 data32 = READ_VREG(HEVC_PARSER_VERSION);
4531 if (data32 != 0x00010001) {
4532 print_scratch_error(25);
4533 return;
4534 }
4535 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4536 data32 = READ_VREG(HEVC_PARSER_VERSION);
4537 if (data32 != 0x5a5a55aa) {
4538 print_scratch_error(26);
4539 return;
4540 }
4541#if 0
4542 /* test Parser Reset */
4543 /* reset iqit to start mem init again */
4544 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4545 (1 << 3) /* reset_whole parser */
4546 );
4547 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4548 data32 = READ_VREG(HEVC_PARSER_VERSION);
4549 if (data32 != 0x00010001)
4550 hevc_print(hevc, 0,
4551 "Test Parser Fatal Error\n");
4552#endif
4553 /* reset iqit to start mem init again */
4554 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4555 );
4556 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4557 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4558
4559#endif
4560 if (!hevc->m_ins_flag) {
4561 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4562 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4563 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4564 data32 |= (0xf << 25); /*arwlen_axi_max*/
4565 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4566 }
4567 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4568 if (data32 != 0x00000100) {
4569 print_scratch_error(29);
4570 return;
4571 }
4572 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4573 if (data32 != 0x00000300) {
4574 print_scratch_error(30);
4575 return;
4576 }
4577 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4578 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4579 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4580 if (data32 != 0x12345678) {
4581 print_scratch_error(31);
4582 return;
4583 }
4584 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4585 if (data32 != 0x9abcdef0) {
4586 print_scratch_error(32);
4587 return;
4588 }
4589 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4590 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4591
4592 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4593 data32 &= 0x03ffffff;
4594 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4595 | /* stream_buffer_empty_int_amrisc_enable */
4596 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4597 (1 << 7) | /* dec_done_int_cpu_enable */
4598 (1 << 4) | /* startcode_found_int_cpu_enable */
4599 (0 << 3) | /* startcode_found_int_amrisc_enable */
4600 (1 << 0) /* parser_int_enable */
4601 ;
4602 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4603
4604 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4605 data32 = data32 | (1 << 1) | /* emulation_check_on */
4606 (1 << 0) /* startcode_check_on */
4607 ;
4608 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4609
4610 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4611 (2 << 4) | /* emulate_code_length_sub_1 */
4612 (2 << 1) | /* start_code_length_sub_1 */
4613 (1 << 0) /* stream_shift_enable */
4614 );
4615
4616 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4617 );
4618 /* hevc_parser_core_clk_en */
4619 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4620 );
4621
4622 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4623
4624 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4625 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4626 for (i = 0; i < 1024; i++)
4627 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4628
4629#ifdef ENABLE_SWAP_TEST
4630 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4631#endif
4632
4633 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4634 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4635 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4636 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4637 /* Send parser_cmd */
4638 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4639
4640 parser_cmd_write();
4641
4642 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4643 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4644 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4645
4646 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4647 /* (1 << 8) | // sao_sw_pred_enable */
4648 (1 << 5) | /* parser_sao_if_en */
4649 (1 << 2) | /* parser_mpred_if_en */
4650 (1 << 0) /* parser_scaler_if_en */
4651 );
4652
4653 /* Changed to Start MPRED in microcode */
4654 /*
4655 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4656 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4657 * (1<<31)
4658 * );
4659 */
4660
4661 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4662 (1 << 0) /* software reset ipp and mpp */
4663 );
4664 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4665 (0 << 0) /* software reset ipp and mpp */
4666 );
4667
4668 if (get_double_write_mode(hevc) & 0x10)
4669 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4670 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4671 );
4672
4673}
4674
4675static void decoder_hw_reset(void)
4676{
4677 int i;
4678 unsigned int data32;
4679 /* reset iqit to start mem init again */
4680 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4681 );
4682 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4683 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4684
4685 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4686 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4687 ;
4688 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4689
4690 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4691 if (data32 != 0x00000100) {
4692 print_scratch_error(29);
4693 return;
4694 }
4695 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4696 if (data32 != 0x00000300) {
4697 print_scratch_error(30);
4698 return;
4699 }
4700 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4701 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4702 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4703 if (data32 != 0x12345678) {
4704 print_scratch_error(31);
4705 return;
4706 }
4707 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4708 if (data32 != 0x9abcdef0) {
4709 print_scratch_error(32);
4710 return;
4711 }
4712 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4713 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4714
4715 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4716 data32 &= 0x03ffffff;
4717 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4718 | /* stream_buffer_empty_int_amrisc_enable */
4719 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4720 (1 << 7) | /* dec_done_int_cpu_enable */
4721 (1 << 4) | /* startcode_found_int_cpu_enable */
4722 (0 << 3) | /* startcode_found_int_amrisc_enable */
4723 (1 << 0) /* parser_int_enable */
4724 ;
4725 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4726
4727 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4728 data32 = data32 | (1 << 1) | /* emulation_check_on */
4729 (1 << 0) /* startcode_check_on */
4730 ;
4731 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4732
4733 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4734 (2 << 4) | /* emulate_code_length_sub_1 */
4735 (2 << 1) | /* start_code_length_sub_1 */
4736 (1 << 0) /* stream_shift_enable */
4737 );
4738
4739 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4740 );
4741 /* hevc_parser_core_clk_en */
4742 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4743 );
4744
4745 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4746 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4747 for (i = 0; i < 1024; i++)
4748 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4749
4750 /* Send parser_cmd */
4751 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4752
4753 parser_cmd_write();
4754
4755 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4756 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4757 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4758
4759 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4760 /* (1 << 8) | // sao_sw_pred_enable */
4761 (1 << 5) | /* parser_sao_if_en */
4762 (1 << 2) | /* parser_mpred_if_en */
4763 (1 << 0) /* parser_scaler_if_en */
4764 );
4765
4766 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4767 (1 << 0) /* software reset ipp and mpp */
4768 );
4769 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4770 (0 << 0) /* software reset ipp and mpp */
4771 );
4772}
4773
4774#ifdef CONFIG_HEVC_CLK_FORCED_ON
4775static void config_hevc_clk_forced_on(void)
4776{
4777 unsigned int rdata32;
4778 /* IQIT */
4779 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4780 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4781
4782 /* DBLK */
4783 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4784 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4785
4786 /* SAO */
4787 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4788 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4789
4790 /* MPRED */
4791 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4792 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4793
4794 /* PARSER */
4795 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4796 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4797 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4798 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4799 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4800 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4801 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4802 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4803 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4804 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4805 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4806 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4807 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4808
4809 /* IPP */
4810 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4811 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4812
4813 /* MCRCC */
4814 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4815 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4816}
4817#endif
4818
4819#ifdef MCRCC_ENABLE
4820static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4821{
4822 unsigned int rdata32;
4823 unsigned int rdata32_2;
4824 int l0_cnt = 0;
4825 int l1_cnt = 0x7fff;
4826
4827 if (get_double_write_mode(hevc) & 0x10) {
4828 l0_cnt = hevc->cur_pic->RefNum_L0;
4829 l1_cnt = hevc->cur_pic->RefNum_L1;
4830 }
4831
4832 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4833
4834 if (slice_type == 2) { /* I-PIC */
4835 /* remove reset -- disables clock */
4836 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4837 return;
4838 }
4839
4840 if (slice_type == 0) { /* B-PIC */
4841 /* Programme canvas0 */
4842 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4843 (0 << 8) | (0 << 1) | 0);
4844 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4845 rdata32 = rdata32 & 0xffff;
4846 rdata32 = rdata32 | (rdata32 << 16);
4847 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4848
4849 /* Programme canvas1 */
4850 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4851 (16 << 8) | (1 << 1) | 0);
4852 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4853 rdata32_2 = rdata32_2 & 0xffff;
4854 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4855 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4856 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4857 rdata32_2 = rdata32_2 & 0xffff;
4858 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4859 }
4860 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4861 } else { /* P-PIC */
4862 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4863 (0 << 8) | (1 << 1) | 0);
4864 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4865 rdata32 = rdata32 & 0xffff;
4866 rdata32 = rdata32 | (rdata32 << 16);
4867 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4868
4869 if (l0_cnt == 1) {
4870 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4871 } else {
4872 /* Programme canvas1 */
4873 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4874 rdata32 = rdata32 & 0xffff;
4875 rdata32 = rdata32 | (rdata32 << 16);
4876 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4877 }
4878 }
4879 /* enable mcrcc progressive-mode */
4880 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4881}
4882#endif
4883
4884static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4885 int sao_mem_unit)
4886{
4887 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4888 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4889 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4890 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4891}
4892
4893static u32 init_aux_size;
4894static int aux_data_is_avaible(struct hevc_state_s *hevc)
4895{
4896 u32 reg_val;
4897
4898 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4899 if (reg_val != 0 && reg_val != init_aux_size)
4900 return 1;
4901 else
4902 return 0;
4903}
4904
4905static void config_aux_buf(struct hevc_state_s *hevc)
4906{
4907 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4908 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4909 (hevc->suffix_aux_size >> 4);
4910 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4911}
4912
4913static void config_mpred_hw(struct hevc_state_s *hevc)
4914{
4915 int i;
4916 unsigned int data32;
4917 struct PIC_s *cur_pic = hevc->cur_pic;
4918 struct PIC_s *col_pic = hevc->col_pic;
4919 int AMVP_MAX_NUM_CANDS_MEM = 3;
4920 int AMVP_MAX_NUM_CANDS = 2;
4921 int NUM_CHROMA_MODE = 5;
4922 int DM_CHROMA_IDX = 36;
4923 int above_ptr_ctrl = 0;
4924 int buffer_linear = 1;
4925 int cu_size_log2 = 3;
4926
4927 int mpred_mv_rd_start_addr;
4928 int mpred_curr_lcu_x;
4929 int mpred_curr_lcu_y;
4930 int mpred_above_buf_start;
4931 int mpred_mv_rd_ptr;
4932 int mpred_mv_rd_ptr_p1;
4933 int mpred_mv_rd_end_addr;
4934 int MV_MEM_UNIT;
4935 int mpred_mv_wr_ptr;
4936 int *ref_poc_L0, *ref_poc_L1;
4937
4938 int above_en;
4939 int mv_wr_en;
4940 int mv_rd_en;
4941 int col_isIntra;
4942
4943 if (hevc->slice_type != 2) {
4944 above_en = 1;
4945 mv_wr_en = 1;
4946 mv_rd_en = 1;
4947 col_isIntra = 0;
4948 } else {
4949 above_en = 1;
4950 mv_wr_en = 1;
4951 mv_rd_en = 0;
4952 col_isIntra = 0;
4953 }
4954
4955 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4956 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4957 mpred_curr_lcu_x = data32 & 0xffff;
4958 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4959
4960 MV_MEM_UNIT =
4961 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4962 5 ? 0x80 : 0x20;
4963 mpred_mv_rd_ptr =
4964 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4965
4966 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4967 mpred_mv_rd_end_addr =
4968 mpred_mv_rd_start_addr +
4969 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4970
4971 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4972
4973 mpred_mv_wr_ptr =
4974 cur_pic->mpred_mv_wr_start_addr +
4975 (hevc->slice_addr * MV_MEM_UNIT);
4976
4977 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4978 hevc_print(hevc, 0,
4979 "cur pic index %d col pic index %d\n", cur_pic->index,
4980 col_pic->index);
4981 }
4982
4983 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4984 cur_pic->mpred_mv_wr_start_addr);
4985 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4986
4987 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4988 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4989 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4990
4991 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4992 data32 = (hevc->slice_type |
4993 hevc->new_pic << 2 |
4994 hevc->new_tile << 3 |
4995 hevc->isNextSliceSegment << 4 |
4996 hevc->TMVPFlag << 5 |
4997 hevc->LDCFlag << 6 |
4998 hevc->ColFromL0Flag << 7 |
4999 above_ptr_ctrl << 8 |
5000 above_en << 9 |
5001 mv_wr_en << 10 |
5002 mv_rd_en << 11 |
5003 col_isIntra << 12 |
5004 buffer_linear << 13 |
5005 hevc->LongTerm_Curr << 14 |
5006 hevc->LongTerm_Col << 15 |
5007 hevc->lcu_size_log2 << 16 |
5008 cu_size_log2 << 20 | hevc->plevel << 24);
5009 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5010
5011 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5012 data32 = (
5013#if 0
5014 /* no set in m8baby test1902 */
5015 /* Don't override clk_forced_on , */
5016 (data32 & (0x1 << 24)) |
5017#endif
5018 hevc->MaxNumMergeCand |
5019 AMVP_MAX_NUM_CANDS << 4 |
5020 AMVP_MAX_NUM_CANDS_MEM << 8 |
5021 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5022 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5023
5024 data32 = (hevc->pic_w | hevc->pic_h << 16);
5025 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5026
5027 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5028 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5029
5030 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5031 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5032
5033 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5034 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5035
5036 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5037 /* col_RefNum_L0<<16| */
5038 /* col_RefNum_L1<<24 */
5039 );
5040 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5041
5042 data32 = (hevc->LongTerm_Ref);
5043 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5044
5045 data32 = 0;
5046 for (i = 0; i < hevc->RefNum_L0; i++)
5047 data32 = data32 | (1 << i);
5048 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5049
5050 data32 = 0;
5051 for (i = 0; i < hevc->RefNum_L1; i++)
5052 data32 = data32 | (1 << i);
5053 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5054
5055 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5056 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5057
5058 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5059 * Ref_POC_xx_L1 in pair write order!!!
5060 */
5061 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5062 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5063
5064 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5065 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5066
5067 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5068 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5069
5070 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5071 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5072
5073 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5074 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5075
5076 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5077 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5078
5079 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5080 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5081
5082 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5083 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5084
5085 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5086 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5087
5088 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5089 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5090
5091 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5092 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5093
5094 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5095 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5096
5097 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5098 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5099
5100 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5101 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5102
5103 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5104 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5105
5106 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5107 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5108
5109 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5110 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5111
5112 if (hevc->new_pic) {
5113 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5114 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5115 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5116 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5117 } else if (!hevc->isNextSliceSegment) {
5118 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5119 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5120 }
5121
5122 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5123}
5124
5125static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5126{
5127 unsigned int data32, data32_2;
5128 int misc_flag0 = hevc->misc_flag0;
5129 int slice_deblocking_filter_disabled_flag = 0;
5130
5131 int mc_buffer_size_u_v =
5132 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5133 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5134 struct PIC_s *cur_pic = hevc->cur_pic;
5135 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5136
5137 data32 = READ_VREG(HEVC_SAO_CTRL0);
5138 data32 &= (~0xf);
5139 data32 |= hevc->lcu_size_log2;
5140 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5141
5142 data32 = (hevc->pic_w | hevc->pic_h << 16);
5143 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5144
5145 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5146 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5147
5148 if (hevc->new_pic)
5149 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5150#ifdef LOSLESS_COMPRESS_MODE
5151/*SUPPORT_10BIT*/
5152 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5153 data32 = READ_VREG(HEVC_SAO_CTRL5);
5154 data32 &= (~(0xff << 16));
5155
5156 if (get_double_write_mode(hevc) == 2 ||
5157 get_double_write_mode(hevc) == 3)
5158 data32 |= (0xff<<16);
5159 else if (get_double_write_mode(hevc) == 4)
5160 data32 |= (0x33<<16);
5161
5162 if (hevc->mem_saving_mode == 1)
5163 data32 |= (1 << 9);
5164 else
5165 data32 &= ~(1 << 9);
5166 if (workaround_enable & 1)
5167 data32 |= (1 << 7);
5168 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5169 }
5170 data32 = cur_pic->mc_y_adr;
5171 if (get_double_write_mode(hevc))
5172 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5173
5174 if ((get_double_write_mode(hevc) & 0x10) == 0)
5175 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5176
5177 if (hevc->mmu_enable)
5178 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5179#else
5180 data32 = cur_pic->mc_y_adr;
5181 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5182#endif
5183 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5184 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5185
5186#ifdef LOSLESS_COMPRESS_MODE
5187/*SUPPORT_10BIT*/
5188 if (get_double_write_mode(hevc))
5189 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5190#else
5191 data32 = cur_pic->mc_u_v_adr;
5192 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5193#endif
5194 data32 = (mc_buffer_size_u_v_h << 16);
5195 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5196
5197#ifdef LOSLESS_COMPRESS_MODE
5198/*SUPPORT_10BIT*/
5199 if (get_double_write_mode(hevc)) {
5200 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5201 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5202 }
5203#else
5204 /* multi tile to do... */
5205 data32 = cur_pic->mc_y_adr;
5206 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5207
5208 data32 = cur_pic->mc_u_v_adr;
5209 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5210#endif
5211 /* DBLK CONFIG HERE */
5212 if (hevc->new_pic) {
5213 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5214 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5215 data32 = (0xff << 8) | (0x0 << 0);
5216 else
5217 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5218 (0x0 << 0); /* h265 video format*/
5219
5220 if (hevc->pic_w >= 1280)
5221 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5222 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5223 if (get_double_write_mode(hevc) == 0)
5224 data32 |= (0x1 << 8); /*enable first write*/
5225 else if (get_double_write_mode(hevc) == 0x10)
5226 data32 |= (0x1 << 9); /*double write only*/
5227 else
5228 data32 |= ((0x1 << 8) |(0x1 << 9));
5229
5230 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5231 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5232 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5233 }
5234 data32 = (hevc->pic_w | hevc->pic_h << 16);
5235 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5236
5237 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5238 data32 =
5239 ((misc_flag0 >>
5240 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5241 0x1) << 3;
5242 } else
5243 data32 = 0;
5244 data32 |=
5245 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5246 ((params->p.pps_cr_qp_offset
5247 & 0x1f) <<
5248 9));
5249 data32 |=
5250 (hevc->lcu_size ==
5251 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5252
5253 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5254
5255 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5256 /*if (debug & 0x80) {*/
5257 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5258 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5259 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5260 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5261 data32);
5262 /*}*/
5263 }
5264 }
5265#if 0
5266 data32 = READ_VREG(HEVC_SAO_CTRL1);
5267 data32 &= (~0x3000);
5268 data32 |= (hevc->mem_map_mode <<
5269 12);
5270
5271/* [13:12] axi_aformat,
5272 * 0-Linear, 1-32x32, 2-64x32
5273 */
5274 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5275
5276 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5277 data32 &= (~0x30);
5278 data32 |= (hevc->mem_map_mode <<
5279 4);
5280
5281/* [5:4] -- address_format
5282 * 00:linear 01:32x32 10:64x32
5283 */
5284 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5285#else
5286 /* m8baby test1902 */
5287 data32 = READ_VREG(HEVC_SAO_CTRL1);
5288 data32 &= (~0x3000);
5289 data32 |= (hevc->mem_map_mode <<
5290 12);
5291
5292/* [13:12] axi_aformat, 0-Linear,
5293 * 1-32x32, 2-64x32
5294 */
5295 data32 &= (~0xff0);
5296 /* data32 |= 0x670; // Big-Endian per 64-bit */
5297 data32 |= endian; /* Big-Endian per 64-bit */
5298 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5299 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5300 if (get_double_write_mode(hevc) == 0)
5301 data32 |= 0x2; /*disable double write*/
5302 else if (get_double_write_mode(hevc) & 0x10)
5303 data32 |= 0x1; /*disable cm*/
5304 } else {
5305 unsigned int data;
5306 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5307 (0x0 << 0); /* h265 video format*/
5308 if (hevc->pic_w >= 1280)
5309 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5310 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5311 if (get_double_write_mode(hevc) == 0)
5312 data |= (0x1 << 8); /*enable first write*/
5313 else if (get_double_write_mode(hevc) & 0x10)
5314 data |= (0x1 << 9); /*double write only*/
5315 else
5316 data |= ((0x1 << 8) |(0x1 << 9));
5317
5318 WRITE_VREG(HEVC_DBLK_CFGB, data);
5319 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5320 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5321 }
5322
5323 /* swap uv */
5324 if (hevc->is_used_v4l) {
5325 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5326 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5327 data32 &= ~(1 << 8); /* NV21 */
5328 else
5329 data32 |= (1 << 8); /* NV12 */
5330 }
5331
5332 /*
5333 * [31:24] ar_fifo1_axi_thred
5334 * [23:16] ar_fifo0_axi_thred
5335 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5336 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5337 * [11:08] axi_lendian_C
5338 * [07:04] axi_lendian_Y
5339 * [3] reserved
5340 * [2] clk_forceon
5341 * [1] dw_disable:disable double write output
5342 * [0] cm_disable:disable compress output
5343 */
5344 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5345 if (get_double_write_mode(hevc) & 0x10) {
5346 /* [23:22] dw_v1_ctrl
5347 *[21:20] dw_v0_ctrl
5348 *[19:18] dw_h1_ctrl
5349 *[17:16] dw_h0_ctrl
5350 */
5351 data32 = READ_VREG(HEVC_SAO_CTRL5);
5352 /*set them all 0 for H265_NV21 (no down-scale)*/
5353 data32 &= ~(0xff << 16);
5354 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5355 }
5356
5357 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5358 data32 &= (~0x30);
5359 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5360 data32 |= (hevc->mem_map_mode <<
5361 4);
5362 data32 &= (~0xF);
5363 data32 |= 0xf; /* valid only when double write only */
5364 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5365
5366 /* swap uv */
5367 if (hevc->is_used_v4l) {
5368 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5369 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5370 data32 |= (1 << 12); /* NV21 */
5371 else
5372 data32 &= ~(1 << 12); /* NV12 */
5373 }
5374
5375 /*
5376 * [3:0] little_endian
5377 * [5:4] address_format 00:linear 01:32x32 10:64x32
5378 * [7:6] reserved
5379 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5380 * [11:10] reserved
5381 * [12] CbCr_byte_swap
5382 * [31:13] reserved
5383 */
5384 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5385#endif
5386 data32 = 0;
5387 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5388 data32_2 &= (~0x300);
5389 /* slice_deblocking_filter_disabled_flag = 0;
5390 * ucode has handle it , so read it from ucode directly
5391 */
5392 if (hevc->tile_enabled) {
5393 data32 |=
5394 ((misc_flag0 >>
5395 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5396 0x1) << 0;
5397 data32_2 |=
5398 ((misc_flag0 >>
5399 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5400 0x1) << 8;
5401 }
5402 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5403 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5404 0x1; /* ucode has handle it,so read it from ucode directly */
5405 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5406 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5407 /* slice_deblocking_filter_disabled_flag =
5408 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5409 * //ucode has handle it , so read it from ucode directly
5410 */
5411 data32 |= slice_deblocking_filter_disabled_flag << 2;
5412 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5413 hevc_print_cont(hevc, 0,
5414 "(1,%x)", data32);
5415 if (!slice_deblocking_filter_disabled_flag) {
5416 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5417 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5418 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5419 hevc_print_cont(hevc, 0,
5420 "(2,%x)", data32);
5421 }
5422 } else {
5423 data32 |=
5424 ((misc_flag0 >>
5425 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5426 0x1) << 2;
5427 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5428 hevc_print_cont(hevc, 0,
5429 "(3,%x)", data32);
5430 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5431 0x1) == 0) {
5432 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5433 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5434 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5435 hevc_print_cont(hevc, 0,
5436 "(4,%x)", data32);
5437 }
5438 }
5439 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5440 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5441 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5442 || (!slice_deblocking_filter_disabled_flag))) {
5443 data32 |=
5444 ((misc_flag0 >>
5445 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5446 & 0x1) << 1;
5447 data32_2 |=
5448 ((misc_flag0 >>
5449 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5450 & 0x1) << 9;
5451 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5452 hevc_print_cont(hevc, 0,
5453 "(5,%x)\n", data32);
5454 } else {
5455 data32 |=
5456 ((misc_flag0 >>
5457 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5458 & 0x1) << 1;
5459 data32_2 |=
5460 ((misc_flag0 >>
5461 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5462 & 0x1) << 9;
5463 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5464 hevc_print_cont(hevc, 0,
5465 "(6,%x)\n", data32);
5466 }
5467 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5468 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5469}
5470
5471#ifdef TEST_NO_BUF
5472static unsigned char test_flag = 1;
5473#endif
5474
5475static void pic_list_process(struct hevc_state_s *hevc)
5476{
5477 int work_pic_num = get_work_pic_num(hevc);
5478 int alloc_pic_count = 0;
5479 int i;
5480 struct PIC_s *pic;
5481 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5482 pic = hevc->m_PIC[i];
5483 if (pic == NULL || pic->index == -1)
5484 continue;
5485 alloc_pic_count++;
5486 if (pic->output_mark == 0 && pic->referenced == 0
5487 && pic->output_ready == 0
5488 && (pic->width != hevc->pic_w ||
5489 pic->height != hevc->pic_h)
5490 ) {
5491 set_buf_unused(hevc, pic->BUF_index);
5492 pic->BUF_index = -1;
5493 if (alloc_pic_count > work_pic_num) {
5494 pic->width = 0;
5495 pic->height = 0;
5496 pic->index = -1;
5497 } else {
5498 pic->width = hevc->pic_w;
5499 pic->height = hevc->pic_h;
5500 }
5501 }
5502 }
5503 if (alloc_pic_count < work_pic_num) {
5504 int new_count = alloc_pic_count;
5505 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5506 pic = hevc->m_PIC[i];
5507 if (pic && pic->index == -1) {
5508 pic->index = i;
5509 pic->BUF_index = -1;
5510 pic->width = hevc->pic_w;
5511 pic->height = hevc->pic_h;
5512 new_count++;
5513 if (new_count >=
5514 work_pic_num)
5515 break;
5516 }
5517 }
5518
5519 }
5520 dealloc_unused_buf(hevc);
5521 if (get_alloc_pic_count(hevc)
5522 != alloc_pic_count) {
5523 hevc_print_cont(hevc, 0,
5524 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5525 __func__,
5526 work_pic_num,
5527 alloc_pic_count,
5528 get_alloc_pic_count(hevc));
5529 }
5530}
5531
5532static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5533{
5534 int i;
5535 struct PIC_s *pic;
5536 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5537 pic = hevc->m_PIC[i];
5538 if (pic == NULL || pic->index == -1)
5539 continue;
5540
5541 if (pic->output_mark == 0 && pic->referenced == 0
5542 && pic->output_ready == 0
5543 && pic->scatter_alloc
5544 )
5545 release_pic_mmu_buf(hevc, pic);
5546 }
5547
5548}
5549
5550static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5551 union param_u *rpm_param)
5552{
5553 struct PIC_s *new_pic = NULL;
5554 struct PIC_s *pic;
5555 int i;
5556 int ret;
5557
5558 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5559 pic = hevc->m_PIC[i];
5560 if (pic == NULL || pic->index == -1)
5561 continue;
5562
5563 if (pic->output_mark == 0 && pic->referenced == 0
5564 && pic->output_ready == 0
5565 && pic->width == hevc->pic_w
5566 && pic->height == hevc->pic_h
5567 ) {
5568 if (new_pic) {
5569 if (new_pic->POC != INVALID_POC) {
5570 if (pic->POC == INVALID_POC ||
5571 pic->POC < new_pic->POC)
5572 new_pic = pic;
5573 }
5574 } else
5575 new_pic = pic;
5576 }
5577 }
5578
5579 if (new_pic == NULL)
5580 return NULL;
5581
5582 if (new_pic->BUF_index < 0) {
5583 ret = hevc->is_used_v4l ?
5584 v4l_alloc_buf(hevc) :
5585 alloc_buf(hevc);
5586 if (ret < 0)
5587 return NULL;
5588
5589 ret = hevc->is_used_v4l ?
5590 v4l_config_pic(hevc, new_pic) :
5591 config_pic(hevc, new_pic);
5592 if (ret < 0) {
5593 dealloc_pic_buf(hevc, new_pic);
5594 return NULL;
5595 }
5596
5597 new_pic->width = hevc->pic_w;
5598 new_pic->height = hevc->pic_h;
5599 set_canvas(hevc, new_pic);
5600
5601 init_pic_list_hw(hevc);
5602 }
5603
5604 if (new_pic) {
5605 new_pic->double_write_mode =
5606 get_double_write_mode(hevc);
5607 if (new_pic->double_write_mode)
5608 set_canvas(hevc, new_pic);
5609
5610#ifdef TEST_NO_BUF
5611 if (test_flag) {
5612 test_flag = 0;
5613 return NULL;
5614 } else
5615 test_flag = 1;
5616#endif
5617 if (get_mv_buf(hevc, new_pic) < 0)
5618 return NULL;
5619
5620 if (hevc->mmu_enable) {
5621 ret = H265_alloc_mmu(hevc, new_pic,
5622 rpm_param->p.bit_depth,
5623 hevc->frame_mmu_map_addr);
5624 if (ret != 0) {
5625 put_mv_buf(hevc, new_pic);
5626 hevc_print(hevc, 0,
5627 "can't alloc need mmu1,idx %d ret =%d\n",
5628 new_pic->decode_idx,
5629 ret);
5630 return NULL;
5631 }
5632 }
5633 new_pic->referenced = 1;
5634 new_pic->decode_idx = hevc->decode_idx;
5635 new_pic->slice_idx = 0;
5636 new_pic->referenced = 1;
5637 new_pic->output_mark = 0;
5638 new_pic->recon_mark = 0;
5639 new_pic->error_mark = 0;
5640 new_pic->dis_mark = 0;
5641 /* new_pic->output_ready = 0; */
5642 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5643 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5644 new_pic->POC = hevc->curr_POC;
5645 new_pic->pic_struct = hevc->curr_pic_struct;
5646 if (new_pic->aux_data_buf)
5647 release_aux_data(hevc, new_pic);
5648 new_pic->mem_saving_mode =
5649 hevc->mem_saving_mode;
5650 new_pic->bit_depth_luma =
5651 hevc->bit_depth_luma;
5652 new_pic->bit_depth_chroma =
5653 hevc->bit_depth_chroma;
5654 new_pic->video_signal_type =
5655 hevc->video_signal_type;
5656
5657 new_pic->conformance_window_flag =
5658 hevc->param.p.conformance_window_flag;
5659 new_pic->conf_win_left_offset =
5660 hevc->param.p.conf_win_left_offset;
5661 new_pic->conf_win_right_offset =
5662 hevc->param.p.conf_win_right_offset;
5663 new_pic->conf_win_top_offset =
5664 hevc->param.p.conf_win_top_offset;
5665 new_pic->conf_win_bottom_offset =
5666 hevc->param.p.conf_win_bottom_offset;
5667 new_pic->chroma_format_idc =
5668 hevc->param.p.chroma_format_idc;
5669
5670 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5671 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5672 __func__, new_pic->index,
5673 new_pic->BUF_index, new_pic->decode_idx,
5674 new_pic->POC);
5675
5676 }
5677 if (pic_list_debug & 0x1) {
5678 dump_pic_list(hevc);
5679 pr_err("\n*******************************************\n");
5680 }
5681
5682 return new_pic;
5683}
5684
5685static int get_display_pic_num(struct hevc_state_s *hevc)
5686{
5687 int i;
5688 struct PIC_s *pic;
5689 int num = 0;
5690
5691 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5692 pic = hevc->m_PIC[i];
5693 if (pic == NULL ||
5694 pic->index == -1)
5695 continue;
5696
5697 if (pic->output_ready == 1)
5698 num++;
5699 }
5700 return num;
5701}
5702
5703static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5704{
5705 struct PIC_s *pic_display;
5706
5707 if (pic) {
5708 /*PB skip control */
5709 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5710 /* start decoding after first I */
5711 hevc->ignore_bufmgr_error |= 0x1;
5712 }
5713 if (hevc->ignore_bufmgr_error & 1) {
5714 if (hevc->PB_skip_count_after_decoding > 0)
5715 hevc->PB_skip_count_after_decoding--;
5716 else {
5717 /* start displaying */
5718 hevc->ignore_bufmgr_error |= 0x2;
5719 }
5720 }
5721 /**/
5722 if (pic->POC != INVALID_POC) {
5723 pic->output_mark = 1;
5724 pic->recon_mark = 1;
5725 }
5726 pic->recon_mark = 1;
5727 }
5728 do {
5729 pic_display = output_pic(hevc, 1);
5730
5731 if (pic_display) {
5732 pic_display->referenced = 0;
5733 put_mv_buf(hevc, pic_display);
5734 if ((pic_display->error_mark
5735 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5736 || (get_dbg_flag(hevc) &
5737 H265_DEBUG_DISPLAY_CUR_FRAME)
5738 || (get_dbg_flag(hevc) &
5739 H265_DEBUG_NO_DISPLAY)) {
5740 pic_display->output_ready = 0;
5741 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5742 hevc_print(hevc, 0,
5743 "[BM] Display: POC %d, ",
5744 pic_display->POC);
5745 hevc_print_cont(hevc, 0,
5746 "decoding index %d ==> ",
5747 pic_display->decode_idx);
5748 hevc_print_cont(hevc, 0,
5749 "Debug mode or error, recycle it\n");
5750 }
5751 } else {
5752 if (hevc->i_only & 0x1
5753 && pic_display->slice_type != 2) {
5754 pic_display->output_ready = 0;
5755 } else {
5756 prepare_display_buf(hevc, pic_display);
5757 if (get_dbg_flag(hevc)
5758 & H265_DEBUG_BUFMGR) {
5759 hevc_print(hevc, 0,
5760 "[BM] flush Display: POC %d, ",
5761 pic_display->POC);
5762 hevc_print_cont(hevc, 0,
5763 "decoding index %d\n",
5764 pic_display->decode_idx);
5765 }
5766 }
5767 }
5768 }
5769 } while (pic_display);
5770 clear_referenced_flag(hevc);
5771}
5772
5773/*
5774* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5775*/
5776static void set_aux_data(struct hevc_state_s *hevc,
5777 struct PIC_s *pic, unsigned char suffix_flag,
5778 unsigned char dv_meta_flag)
5779{
5780 int i;
5781 unsigned short *aux_adr;
5782 unsigned int size_reg_val =
5783 READ_VREG(HEVC_AUX_DATA_SIZE);
5784 unsigned int aux_count = 0;
5785 int aux_size = 0;
5786 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5787 return;
5788
5789 if (hevc->aux_data_dirty ||
5790 hevc->m_ins_flag == 0) {
5791
5792 hevc->aux_data_dirty = 0;
5793 }
5794
5795 if (suffix_flag) {
5796 aux_adr = (unsigned short *)
5797 (hevc->aux_addr +
5798 hevc->prefix_aux_size);
5799 aux_count =
5800 ((size_reg_val & 0xffff) << 4)
5801 >> 1;
5802 aux_size =
5803 hevc->suffix_aux_size;
5804 } else {
5805 aux_adr =
5806 (unsigned short *)hevc->aux_addr;
5807 aux_count =
5808 ((size_reg_val >> 16) << 4)
5809 >> 1;
5810 aux_size =
5811 hevc->prefix_aux_size;
5812 }
5813 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5814 hevc_print(hevc, 0,
5815 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5816 __func__, pic, pic->aux_data_size,
5817 aux_count, suffix_flag, dv_meta_flag);
5818 }
5819 if (aux_size > 0 && aux_count > 0) {
5820 int heads_size = 0;
5821 int new_size;
5822 char *new_buf;
5823
5824 for (i = 0; i < aux_count; i++) {
5825 unsigned char tag = aux_adr[i] >> 8;
5826 if (tag != 0 && tag != 0xff) {
5827 if (dv_meta_flag == 0)
5828 heads_size += 8;
5829 else if (dv_meta_flag == 1 && tag == 0x1)
5830 heads_size += 8;
5831 else if (dv_meta_flag == 2 && tag != 0x1)
5832 heads_size += 8;
5833 }
5834 }
5835 new_size = pic->aux_data_size + aux_count + heads_size;
5836 new_buf = vmalloc(new_size);
5837 if (new_buf) {
5838 unsigned char valid_tag = 0;
5839 unsigned char *h =
5840 new_buf +
5841 pic->aux_data_size;
5842 unsigned char *p = h + 8;
5843 int len = 0;
5844 int padding_len = 0;
5845 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5846 if (pic->aux_data_buf)
5847 vfree(pic->aux_data_buf);
5848 pic->aux_data_buf = new_buf;
5849 for (i = 0; i < aux_count; i += 4) {
5850 int ii;
5851 unsigned char tag = aux_adr[i + 3] >> 8;
5852 if (tag != 0 && tag != 0xff) {
5853 if (dv_meta_flag == 0)
5854 valid_tag = 1;
5855 else if (dv_meta_flag == 1
5856 && tag == 0x1)
5857 valid_tag = 1;
5858 else if (dv_meta_flag == 2
5859 && tag != 0x1)
5860 valid_tag = 1;
5861 else
5862 valid_tag = 0;
5863 if (valid_tag && len > 0) {
5864 pic->aux_data_size +=
5865 (len + 8);
5866 h[0] = (len >> 24)
5867 & 0xff;
5868 h[1] = (len >> 16)
5869 & 0xff;
5870 h[2] = (len >> 8)
5871 & 0xff;
5872 h[3] = (len >> 0)
5873 & 0xff;
5874 h[6] =
5875 (padding_len >> 8)
5876 & 0xff;
5877 h[7] = (padding_len)
5878 & 0xff;
5879 h += (len + 8);
5880 p += 8;
5881 len = 0;
5882 padding_len = 0;
5883 }
5884 if (valid_tag) {
5885 h[4] = tag;
5886 h[5] = 0;
5887 h[6] = 0;
5888 h[7] = 0;
5889 }
5890 }
5891 if (valid_tag) {
5892 for (ii = 0; ii < 4; ii++) {
5893 unsigned short aa =
5894 aux_adr[i + 3
5895 - ii];
5896 *p = aa & 0xff;
5897 p++;
5898 len++;
5899 /*if ((aa >> 8) == 0xff)
5900 padding_len++;*/
5901 }
5902 }
5903 }
5904 if (len > 0) {
5905 pic->aux_data_size += (len + 8);
5906 h[0] = (len >> 24) & 0xff;
5907 h[1] = (len >> 16) & 0xff;
5908 h[2] = (len >> 8) & 0xff;
5909 h[3] = (len >> 0) & 0xff;
5910 h[6] = (padding_len >> 8) & 0xff;
5911 h[7] = (padding_len) & 0xff;
5912 }
5913 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5914 hevc_print(hevc, 0,
5915 "aux: (size %d) suffix_flag %d\n",
5916 pic->aux_data_size, suffix_flag);
5917 for (i = 0; i < pic->aux_data_size; i++) {
5918 hevc_print_cont(hevc, 0,
5919 "%02x ", pic->aux_data_buf[i]);
5920 if (((i + 1) & 0xf) == 0)
5921 hevc_print_cont(hevc, 0, "\n");
5922 }
5923 hevc_print_cont(hevc, 0, "\n");
5924 }
5925
5926 } else {
5927 hevc_print(hevc, 0, "new buf alloc failed\n");
5928 if (pic->aux_data_buf)
5929 vfree(pic->aux_data_buf);
5930 pic->aux_data_buf = NULL;
5931 pic->aux_data_size = 0;
5932 }
5933 }
5934
5935}
5936
5937static void release_aux_data(struct hevc_state_s *hevc,
5938 struct PIC_s *pic)
5939{
5940 if (pic->aux_data_buf)
5941 vfree(pic->aux_data_buf);
5942 pic->aux_data_buf = NULL;
5943 pic->aux_data_size = 0;
5944}
5945
5946static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5947 struct PIC_s *pic)
5948{
5949
5950 /* prev pic */
5951 /*if (hevc->curr_POC != 0) {*/
5952 int decoded_poc = hevc->iPrevPOC;
5953#ifdef MULTI_INSTANCE_SUPPORT
5954 if (hevc->m_ins_flag) {
5955 decoded_poc = hevc->decoded_poc;
5956 hevc->decoded_poc = INVALID_POC;
5957 }
5958#endif
5959 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5960 && hevc->m_nalUnitType !=
5961 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5962 struct PIC_s *pic_display;
5963
5964 pic = get_pic_by_POC(hevc, decoded_poc);
5965 if (pic && (pic->POC != INVALID_POC)) {
5966 /*PB skip control */
5967 if (pic->error_mark == 0
5968 && hevc->PB_skip_mode == 1) {
5969 /* start decoding after
5970 * first I
5971 */
5972 hevc->ignore_bufmgr_error |= 0x1;
5973 }
5974 if (hevc->ignore_bufmgr_error & 1) {
5975 if (hevc->PB_skip_count_after_decoding > 0) {
5976 hevc->PB_skip_count_after_decoding--;
5977 } else {
5978 /* start displaying */
5979 hevc->ignore_bufmgr_error |= 0x2;
5980 }
5981 }
5982 if (hevc->mmu_enable
5983 && ((hevc->double_write_mode & 0x10) == 0)) {
5984 if (!hevc->m_ins_flag) {
5985 hevc->used_4k_num =
5986 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5987
5988 if ((!is_skip_decoding(hevc, pic)) &&
5989 (hevc->used_4k_num >= 0) &&
5990 (hevc->cur_pic->scatter_alloc
5991 == 1)) {
5992 hevc_print(hevc,
5993 H265_DEBUG_BUFMGR_MORE,
5994 "%s pic index %d scatter_alloc %d page_start %d\n",
5995 "decoder_mmu_box_free_idx_tail",
5996 hevc->cur_pic->index,
5997 hevc->cur_pic->scatter_alloc,
5998 hevc->used_4k_num);
5999 hevc_mmu_dma_check(hw_to_vdec(hevc));
6000 decoder_mmu_box_free_idx_tail(
6001 hevc->mmu_box,
6002 hevc->cur_pic->index,
6003 hevc->used_4k_num);
6004 hevc->cur_pic->scatter_alloc
6005 = 2;
6006 }
6007 hevc->used_4k_num = -1;
6008 }
6009 }
6010
6011 pic->output_mark = 1;
6012 pic->recon_mark = 1;
6013 pic->dis_mark = 1;
6014 }
6015 do {
6016 pic_display = output_pic(hevc, 0);
6017
6018 if (pic_display) {
6019 if ((pic_display->error_mark &&
6020 ((hevc->ignore_bufmgr_error &
6021 0x2) == 0))
6022 || (get_dbg_flag(hevc) &
6023 H265_DEBUG_DISPLAY_CUR_FRAME)
6024 || (get_dbg_flag(hevc) &
6025 H265_DEBUG_NO_DISPLAY)) {
6026 pic_display->output_ready = 0;
6027 if (get_dbg_flag(hevc) &
6028 H265_DEBUG_BUFMGR) {
6029 hevc_print(hevc, 0,
6030 "[BM] Display: POC %d, ",
6031 pic_display->POC);
6032 hevc_print_cont(hevc, 0,
6033 "decoding index %d ==> ",
6034 pic_display->
6035 decode_idx);
6036 hevc_print_cont(hevc, 0,
6037 "Debug or err,recycle it\n");
6038 }
6039 } else {
6040 if (hevc->i_only & 0x1
6041 && pic_display->
6042 slice_type != 2) {
6043 pic_display->output_ready = 0;
6044 } else {
6045 prepare_display_buf
6046 (hevc,
6047 pic_display);
6048 if (get_dbg_flag(hevc) &
6049 H265_DEBUG_BUFMGR) {
6050 hevc_print(hevc, 0,
6051 "[BM] Display: POC %d, ",
6052 pic_display->POC);
6053 hevc_print_cont(hevc, 0,
6054 "decoding index %d\n",
6055 pic_display->
6056 decode_idx);
6057 }
6058 }
6059 }
6060 }
6061 } while (pic_display);
6062 } else {
6063 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6064 hevc_print(hevc, 0,
6065 "[BM] current pic is IDR, ");
6066 hevc_print(hevc, 0,
6067 "clear referenced flag of all buffers\n");
6068 }
6069 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6070 dump_pic_list(hevc);
6071 pic = get_pic_by_POC(hevc, decoded_poc);
6072 flush_output(hevc, pic);
6073 }
6074
6075}
6076
6077static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6078 int decoded_lcu)
6079{
6080 int current_lcu_idx = decoded_lcu;
6081 if (decoded_lcu < 0)
6082 return;
6083
6084 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6085 hevc_print(hevc, 0,
6086 "cur lcu idx = %d, (total %d)\n",
6087 current_lcu_idx, hevc->lcu_total);
6088 }
6089 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6090 if (hevc->first_pic_after_recover) {
6091 if (current_lcu_idx !=
6092 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6093 hevc->cur_pic->error_mark = 1;
6094 } else {
6095 if (hevc->lcu_x_num_pre != 0
6096 && hevc->lcu_y_num_pre != 0
6097 && current_lcu_idx != 0
6098 && current_lcu_idx <
6099 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6100 hevc->cur_pic->error_mark = 1;
6101 }
6102 if (hevc->cur_pic->error_mark) {
6103 hevc_print(hevc, 0,
6104 "cur lcu idx = %d, (total %d), set error_mark\n",
6105 current_lcu_idx,
6106 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6107 if (is_log_enable(hevc))
6108 add_log(hevc,
6109 "cur lcu idx = %d, (total %d), set error_mark",
6110 current_lcu_idx,
6111 hevc->lcu_x_num_pre *
6112 hevc->lcu_y_num_pre);
6113
6114 }
6115
6116 }
6117 if (hevc->cur_pic && hevc->head_error_flag) {
6118 hevc->cur_pic->error_mark = 1;
6119 hevc_print(hevc, 0,
6120 "head has error, set error_mark\n");
6121 }
6122
6123 if ((error_handle_policy & 0x80) == 0) {
6124 if (hevc->over_decode && hevc->cur_pic) {
6125 hevc_print(hevc, 0,
6126 "over decode, set error_mark\n");
6127 hevc->cur_pic->error_mark = 1;
6128 }
6129 }
6130
6131 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6132 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6133}
6134
6135static void check_pic_decoded_error(struct hevc_state_s *hevc,
6136 int decoded_lcu)
6137{
6138 int current_lcu_idx = decoded_lcu;
6139 if (decoded_lcu < 0)
6140 return;
6141
6142 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6143 hevc_print(hevc, 0,
6144 "cur lcu idx = %d, (total %d)\n",
6145 current_lcu_idx, hevc->lcu_total);
6146 }
6147 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6148 if (hevc->lcu_x_num != 0
6149 && hevc->lcu_y_num != 0
6150 && current_lcu_idx != 0
6151 && current_lcu_idx <
6152 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6153 hevc->cur_pic->error_mark = 1;
6154 if (hevc->cur_pic->error_mark) {
6155 hevc_print(hevc, 0,
6156 "cur lcu idx = %d, (total %d), set error_mark\n",
6157 current_lcu_idx,
6158 hevc->lcu_x_num*hevc->lcu_y_num);
6159 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6160 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6161 hevc_print(hevc, 0,
6162 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6163 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6164 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6165 }
6166 if (is_log_enable(hevc))
6167 add_log(hevc,
6168 "cur lcu idx = %d, (total %d), set error_mark",
6169 current_lcu_idx,
6170 hevc->lcu_x_num *
6171 hevc->lcu_y_num);
6172
6173 }
6174
6175 }
6176 if (hevc->cur_pic && hevc->head_error_flag) {
6177 hevc->cur_pic->error_mark = 1;
6178 hevc_print(hevc, 0,
6179 "head has error, set error_mark\n");
6180 }
6181
6182 if ((error_handle_policy & 0x80) == 0) {
6183 if (hevc->over_decode && hevc->cur_pic) {
6184 hevc_print(hevc, 0,
6185 "over decode, set error_mark\n");
6186 hevc->cur_pic->error_mark = 1;
6187 }
6188 }
6189}
6190
6191/* only when we decoded one field or one frame,
6192we can call this function to get qos info*/
6193static void get_picture_qos_info(struct hevc_state_s *hevc)
6194{
6195 struct PIC_s *picture = hevc->cur_pic;
6196
6197/*
6198#define DEBUG_QOS
6199*/
6200
6201 if (!hevc->cur_pic)
6202 return;
6203
6204 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6205 unsigned char a[3];
6206 unsigned char i, j, t;
6207 unsigned long data;
6208
6209 data = READ_VREG(HEVC_MV_INFO);
6210 if (picture->slice_type == I_SLICE)
6211 data = 0;
6212 a[0] = data & 0xff;
6213 a[1] = (data >> 8) & 0xff;
6214 a[2] = (data >> 16) & 0xff;
6215
6216 for (i = 0; i < 3; i++)
6217 for (j = i+1; j < 3; j++) {
6218 if (a[j] < a[i]) {
6219 t = a[j];
6220 a[j] = a[i];
6221 a[i] = t;
6222 } else if (a[j] == a[i]) {
6223 a[i]++;
6224 t = a[j];
6225 a[j] = a[i];
6226 a[i] = t;
6227 }
6228 }
6229 picture->max_mv = a[2];
6230 picture->avg_mv = a[1];
6231 picture->min_mv = a[0];
6232#ifdef DEBUG_QOS
6233 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6234 data, a[0], a[1], a[2]);
6235#endif
6236
6237 data = READ_VREG(HEVC_QP_INFO);
6238 a[0] = data & 0x1f;
6239 a[1] = (data >> 8) & 0x3f;
6240 a[2] = (data >> 16) & 0x7f;
6241
6242 for (i = 0; i < 3; i++)
6243 for (j = i+1; j < 3; j++) {
6244 if (a[j] < a[i]) {
6245 t = a[j];
6246 a[j] = a[i];
6247 a[i] = t;
6248 } else if (a[j] == a[i]) {
6249 a[i]++;
6250 t = a[j];
6251 a[j] = a[i];
6252 a[i] = t;
6253 }
6254 }
6255 picture->max_qp = a[2];
6256 picture->avg_qp = a[1];
6257 picture->min_qp = a[0];
6258#ifdef DEBUG_QOS
6259 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6260 data, a[0], a[1], a[2]);
6261#endif
6262
6263 data = READ_VREG(HEVC_SKIP_INFO);
6264 a[0] = data & 0x1f;
6265 a[1] = (data >> 8) & 0x3f;
6266 a[2] = (data >> 16) & 0x7f;
6267
6268 for (i = 0; i < 3; i++)
6269 for (j = i+1; j < 3; j++) {
6270 if (a[j] < a[i]) {
6271 t = a[j];
6272 a[j] = a[i];
6273 a[i] = t;
6274 } else if (a[j] == a[i]) {
6275 a[i]++;
6276 t = a[j];
6277 a[j] = a[i];
6278 a[i] = t;
6279 }
6280 }
6281 picture->max_skip = a[2];
6282 picture->avg_skip = a[1];
6283 picture->min_skip = a[0];
6284
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0,
6287 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6288 data, a[0], a[1], a[2]);
6289#endif
6290 } else {
6291 uint32_t blk88_y_count;
6292 uint32_t blk88_c_count;
6293 uint32_t blk22_mv_count;
6294 uint32_t rdata32;
6295 int32_t mv_hi;
6296 int32_t mv_lo;
6297 uint32_t rdata32_l;
6298 uint32_t mvx_L0_hi;
6299 uint32_t mvy_L0_hi;
6300 uint32_t mvx_L1_hi;
6301 uint32_t mvy_L1_hi;
6302 int64_t value;
6303 uint64_t temp_value;
6304#ifdef DEBUG_QOS
6305 int pic_number = picture->POC;
6306#endif
6307
6308 picture->max_mv = 0;
6309 picture->avg_mv = 0;
6310 picture->min_mv = 0;
6311
6312 picture->max_skip = 0;
6313 picture->avg_skip = 0;
6314 picture->min_skip = 0;
6315
6316 picture->max_qp = 0;
6317 picture->avg_qp = 0;
6318 picture->min_qp = 0;
6319
6320
6321
6322#ifdef DEBUG_QOS
6323 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6324 picture->slice_type,
6325 picture->POC);
6326#endif
6327 /* set rd_idx to 0 */
6328 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6329
6330 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6331 if (blk88_y_count == 0) {
6332#ifdef DEBUG_QOS
6333 hevc_print(hevc, 0,
6334 "[Picture %d Quality] NO Data yet.\n",
6335 pic_number);
6336#endif
6337 /* reset all counts */
6338 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6339 return;
6340 }
6341 /* qp_y_sum */
6342 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6343#ifdef DEBUG_QOS
6344 hevc_print(hevc, 0,
6345 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6346 pic_number, rdata32/blk88_y_count,
6347 rdata32, blk88_y_count);
6348#endif
6349 picture->avg_qp = rdata32/blk88_y_count;
6350 /* intra_y_count */
6351 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6352#ifdef DEBUG_QOS
6353 hevc_print(hevc, 0,
6354 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6355 pic_number, rdata32*100/blk88_y_count,
6356 '%', rdata32);
6357#endif
6358 /* skipped_y_count */
6359 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0,
6362 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6363 pic_number, rdata32*100/blk88_y_count,
6364 '%', rdata32);
6365#endif
6366 picture->avg_skip = rdata32*100/blk88_y_count;
6367 /* coeff_non_zero_y_count */
6368 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6369#ifdef DEBUG_QOS
6370 hevc_print(hevc, 0,
6371 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6372 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6373 '%', rdata32);
6374#endif
6375 /* blk66_c_count */
6376 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6377 if (blk88_c_count == 0) {
6378#ifdef DEBUG_QOS
6379 hevc_print(hevc, 0,
6380 "[Picture %d Quality] NO Data yet.\n",
6381 pic_number);
6382#endif
6383 /* reset all counts */
6384 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6385 return;
6386 }
6387 /* qp_c_sum */
6388 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6389#ifdef DEBUG_QOS
6390 hevc_print(hevc, 0,
6391 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6392 pic_number, rdata32/blk88_c_count,
6393 rdata32, blk88_c_count);
6394#endif
6395 /* intra_c_count */
6396 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6397#ifdef DEBUG_QOS
6398 hevc_print(hevc, 0,
6399 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6400 pic_number, rdata32*100/blk88_c_count,
6401 '%', rdata32);
6402#endif
6403 /* skipped_cu_c_count */
6404 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6405#ifdef DEBUG_QOS
6406 hevc_print(hevc, 0,
6407 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6408 pic_number, rdata32*100/blk88_c_count,
6409 '%', rdata32);
6410#endif
6411 /* coeff_non_zero_c_count */
6412 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6413#ifdef DEBUG_QOS
6414 hevc_print(hevc, 0,
6415 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6416 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6417 '%', rdata32);
6418#endif
6419
6420 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6421 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6422 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6423#ifdef DEBUG_QOS
6424 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6425 pic_number, (rdata32>>0)&0xff);
6426#endif
6427 picture->min_qp = (rdata32>>0)&0xff;
6428
6429#ifdef DEBUG_QOS
6430 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6431 pic_number, (rdata32>>8)&0xff);
6432#endif
6433 picture->max_qp = (rdata32>>8)&0xff;
6434
6435#ifdef DEBUG_QOS
6436 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6437 pic_number, (rdata32>>16)&0xff);
6438 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6439 pic_number, (rdata32>>24)&0xff);
6440#endif
6441
6442 /* blk22_mv_count */
6443 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6444 if (blk22_mv_count == 0) {
6445#ifdef DEBUG_QOS
6446 hevc_print(hevc, 0,
6447 "[Picture %d Quality] NO MV Data yet.\n",
6448 pic_number);
6449#endif
6450 /* reset all counts */
6451 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6452 return;
6453 }
6454 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6455 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6456 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6457 /* should all be 0x00 or 0xff */
6458#ifdef DEBUG_QOS
6459 hevc_print(hevc, 0,
6460 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6461 pic_number, rdata32);
6462#endif
6463 mvx_L0_hi = ((rdata32>>0)&0xff);
6464 mvy_L0_hi = ((rdata32>>8)&0xff);
6465 mvx_L1_hi = ((rdata32>>16)&0xff);
6466 mvy_L1_hi = ((rdata32>>24)&0xff);
6467
6468 /* mvx_L0_count[31:0] */
6469 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6470 temp_value = mvx_L0_hi;
6471 temp_value = (temp_value << 32) | rdata32_l;
6472
6473 if (mvx_L0_hi & 0x80)
6474 value = 0xFFFFFFF000000000 | temp_value;
6475 else
6476 value = temp_value;
6477 value = div_s64(value, blk22_mv_count);
6478#ifdef DEBUG_QOS
6479 hevc_print(hevc, 0,
6480 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6481 pic_number, (int)value,
6482 value, blk22_mv_count);
6483#endif
6484 picture->avg_mv = value;
6485
6486 /* mvy_L0_count[31:0] */
6487 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6488 temp_value = mvy_L0_hi;
6489 temp_value = (temp_value << 32) | rdata32_l;
6490
6491 if (mvy_L0_hi & 0x80)
6492 value = 0xFFFFFFF000000000 | temp_value;
6493 else
6494 value = temp_value;
6495#ifdef DEBUG_QOS
6496 hevc_print(hevc, 0,
6497 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6498 pic_number, rdata32_l/blk22_mv_count,
6499 value, blk22_mv_count);
6500#endif
6501
6502 /* mvx_L1_count[31:0] */
6503 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6504 temp_value = mvx_L1_hi;
6505 temp_value = (temp_value << 32) | rdata32_l;
6506 if (mvx_L1_hi & 0x80)
6507 value = 0xFFFFFFF000000000 | temp_value;
6508 else
6509 value = temp_value;
6510#ifdef DEBUG_QOS
6511 hevc_print(hevc, 0,
6512 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6513 pic_number, rdata32_l/blk22_mv_count,
6514 value, blk22_mv_count);
6515#endif
6516
6517 /* mvy_L1_count[31:0] */
6518 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6519 temp_value = mvy_L1_hi;
6520 temp_value = (temp_value << 32) | rdata32_l;
6521 if (mvy_L1_hi & 0x80)
6522 value = 0xFFFFFFF000000000 | temp_value;
6523 else
6524 value = temp_value;
6525#ifdef DEBUG_QOS
6526 hevc_print(hevc, 0,
6527 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6528 pic_number, rdata32_l/blk22_mv_count,
6529 value, blk22_mv_count);
6530#endif
6531
6532 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6533 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6534 mv_hi = (rdata32>>16)&0xffff;
6535 if (mv_hi & 0x8000)
6536 mv_hi = 0x8000 - mv_hi;
6537#ifdef DEBUG_QOS
6538 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6539 pic_number, mv_hi);
6540#endif
6541 picture->max_mv = mv_hi;
6542
6543 mv_lo = (rdata32>>0)&0xffff;
6544 if (mv_lo & 0x8000)
6545 mv_lo = 0x8000 - mv_lo;
6546#ifdef DEBUG_QOS
6547 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6548 pic_number, mv_lo);
6549#endif
6550 picture->min_mv = mv_lo;
6551
6552 /* {mvy_L0_max, mvy_L0_min} */
6553 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6554 mv_hi = (rdata32>>16)&0xffff;
6555 if (mv_hi & 0x8000)
6556 mv_hi = 0x8000 - mv_hi;
6557#ifdef DEBUG_QOS
6558 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6559 pic_number, mv_hi);
6560#endif
6561
6562 mv_lo = (rdata32>>0)&0xffff;
6563 if (mv_lo & 0x8000)
6564 mv_lo = 0x8000 - mv_lo;
6565#ifdef DEBUG_QOS
6566 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6567 pic_number, mv_lo);
6568#endif
6569
6570 /* {mvx_L1_max, mvx_L1_min} */
6571 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6572 mv_hi = (rdata32>>16)&0xffff;
6573 if (mv_hi & 0x8000)
6574 mv_hi = 0x8000 - mv_hi;
6575#ifdef DEBUG_QOS
6576 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6577 pic_number, mv_hi);
6578#endif
6579
6580 mv_lo = (rdata32>>0)&0xffff;
6581 if (mv_lo & 0x8000)
6582 mv_lo = 0x8000 - mv_lo;
6583#ifdef DEBUG_QOS
6584 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6585 pic_number, mv_lo);
6586#endif
6587
6588 /* {mvy_L1_max, mvy_L1_min} */
6589 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6590 mv_hi = (rdata32>>16)&0xffff;
6591 if (mv_hi & 0x8000)
6592 mv_hi = 0x8000 - mv_hi;
6593#ifdef DEBUG_QOS
6594 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6595 pic_number, mv_hi);
6596#endif
6597 mv_lo = (rdata32>>0)&0xffff;
6598 if (mv_lo & 0x8000)
6599 mv_lo = 0x8000 - mv_lo;
6600#ifdef DEBUG_QOS
6601 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6602 pic_number, mv_lo);
6603#endif
6604
6605 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6606#ifdef DEBUG_QOS
6607 hevc_print(hevc, 0,
6608 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6609 pic_number, rdata32);
6610#endif
6611 /* reset all counts */
6612 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6613 }
6614}
6615
6616static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6617 union param_u *rpm_param,
6618 int decode_pic_begin)
6619{
6620#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6621 struct vdec_s *vdec = hw_to_vdec(hevc);
6622#endif
6623 int i;
6624 int lcu_x_num_div;
6625 int lcu_y_num_div;
6626 int Col_ref;
6627 int dbg_skip_flag = 0;
6628
6629 if (hevc->wait_buf == 0) {
6630 hevc->sps_num_reorder_pics_0 =
6631 rpm_param->p.sps_num_reorder_pics_0;
6632 hevc->m_temporalId = rpm_param->p.m_temporalId;
6633 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6634 hevc->interlace_flag =
6635 (rpm_param->p.profile_etc >> 2) & 0x1;
6636 hevc->curr_pic_struct =
6637 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6638 if (parser_sei_enable & 0x4) {
6639 hevc->frame_field_info_present_flag =
6640 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6641 }
6642
6643 if (interlace_enable == 0 || hevc->m_ins_flag)
6644 hevc->interlace_flag = 0;
6645 if (interlace_enable & 0x100)
6646 hevc->interlace_flag = interlace_enable & 0x1;
6647 if (hevc->interlace_flag == 0)
6648 hevc->curr_pic_struct = 0;
6649 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6650 /*
6651 *hevc->m_pocRandomAccess = MAX_INT;
6652 * //add to fix RAP_B_Bossen_1
6653 */
6654 /* } */
6655 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6656 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6657 hevc->slice_segment_addr =
6658 rpm_param->p.slice_segment_address;
6659 if (!rpm_param->p.dependent_slice_segment_flag)
6660 hevc->slice_addr = hevc->slice_segment_addr;
6661 } else {
6662 hevc->slice_segment_addr = 0;
6663 hevc->slice_addr = 0;
6664 }
6665
6666 hevc->iPrevPOC = hevc->curr_POC;
6667 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6668 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6669 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6670 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6671 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6672 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6673 hevc->isNextSliceSegment =
6674 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6675 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6676 || hevc->pic_h !=
6677 rpm_param->p.pic_height_in_luma_samples) {
6678 hevc_print(hevc, 0,
6679 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6680 hevc->pic_w, hevc->pic_h,
6681 rpm_param->p.pic_width_in_luma_samples,
6682 rpm_param->p.pic_height_in_luma_samples,
6683 hevc->interlace_flag);
6684
6685 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6686 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6687 hevc->frame_width = hevc->pic_w;
6688 hevc->frame_height = hevc->pic_h;
6689#ifdef LOSLESS_COMPRESS_MODE
6690 if (/*re_config_pic_flag == 0 &&*/
6691 (get_double_write_mode(hevc) & 0x10) == 0)
6692 init_decode_head_hw(hevc);
6693#endif
6694 }
6695
6696 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6697 hevc_print(hevc, 0, "over size : %u x %u.\n",
6698 hevc->pic_w, hevc->pic_h);
6699 if ((!hevc->m_ins_flag) &&
6700 ((debug &
6701 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6702 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6703 H265_DEBUG_DIS_SYS_ERROR_PROC);
6704 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6705 return 3;
6706 }
6707 if (hevc->bit_depth_chroma > 10 ||
6708 hevc->bit_depth_luma > 10) {
6709 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6710 hevc->bit_depth_chroma,
6711 hevc->bit_depth_luma);
6712 if (!hevc->m_ins_flag)
6713 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6714 H265_DEBUG_DIS_SYS_ERROR_PROC);
6715 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6716 return 4;
6717 }
6718
6719 /* it will cause divide 0 error */
6720 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6721 if (get_dbg_flag(hevc)) {
6722 hevc_print(hevc, 0,
6723 "Fatal Error, pic_w = %d, pic_h = %d\n",
6724 hevc->pic_w, hevc->pic_h);
6725 }
6726 return 3;
6727 }
6728 pic_list_process(hevc);
6729
6730 hevc->lcu_size =
6731 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6732 3 + rpm_param->
6733 p.log2_diff_max_min_coding_block_size);
6734 if (hevc->lcu_size == 0) {
6735 hevc_print(hevc, 0,
6736 "Error, lcu_size = 0 (%d,%d)\n",
6737 rpm_param->p.
6738 log2_min_coding_block_size_minus3,
6739 rpm_param->p.
6740 log2_diff_max_min_coding_block_size);
6741 return 3;
6742 }
6743 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6744 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6745 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6746 hevc->lcu_x_num =
6747 ((hevc->pic_w % hevc->lcu_size) ==
6748 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6749 hevc->lcu_y_num =
6750 ((hevc->pic_h % hevc->lcu_size) ==
6751 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6752 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6753
6754 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6755 || hevc->m_nalUnitType ==
6756 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6757 hevc->curr_POC = 0;
6758 if ((hevc->m_temporalId - 1) == 0)
6759 hevc->iPrevTid0POC = hevc->curr_POC;
6760 } else {
6761 int iMaxPOClsb =
6762 1 << (rpm_param->p.
6763 log2_max_pic_order_cnt_lsb_minus4 + 4);
6764 int iPrevPOClsb;
6765 int iPrevPOCmsb;
6766 int iPOCmsb;
6767 int iPOClsb = rpm_param->p.POClsb;
6768
6769 if (iMaxPOClsb == 0) {
6770 hevc_print(hevc, 0,
6771 "error iMaxPOClsb is 0\n");
6772 return 3;
6773 }
6774
6775 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6776 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6777
6778 if ((iPOClsb < iPrevPOClsb)
6779 && ((iPrevPOClsb - iPOClsb) >=
6780 (iMaxPOClsb / 2)))
6781 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6782 else if ((iPOClsb > iPrevPOClsb)
6783 && ((iPOClsb - iPrevPOClsb) >
6784 (iMaxPOClsb / 2)))
6785 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6786 else
6787 iPOCmsb = iPrevPOCmsb;
6788 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6789 hevc_print(hevc, 0,
6790 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6791 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6792 iPOClsb);
6793 }
6794 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6795 || hevc->m_nalUnitType ==
6796 NAL_UNIT_CODED_SLICE_BLANT
6797 || hevc->m_nalUnitType ==
6798 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6799 /* For BLA picture types, POCmsb is set to 0. */
6800 iPOCmsb = 0;
6801 }
6802 hevc->curr_POC = (iPOCmsb + iPOClsb);
6803 if ((hevc->m_temporalId - 1) == 0)
6804 hevc->iPrevTid0POC = hevc->curr_POC;
6805 else {
6806 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6807 hevc_print(hevc, 0,
6808 "m_temporalID is %d\n",
6809 hevc->m_temporalId);
6810 }
6811 }
6812 }
6813 hevc->RefNum_L0 =
6814 (rpm_param->p.num_ref_idx_l0_active >
6815 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6816 num_ref_idx_l0_active;
6817 hevc->RefNum_L1 =
6818 (rpm_param->p.num_ref_idx_l1_active >
6819 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6820 num_ref_idx_l1_active;
6821
6822 /* if(curr_POC==0x10) dump_lmem(); */
6823
6824 /* skip RASL pictures after CRA/BLA pictures */
6825 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6826 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6827 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6828 || hevc->m_nalUnitType ==
6829 NAL_UNIT_CODED_SLICE_BLANT
6830 || hevc->m_nalUnitType ==
6831 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6832 hevc->m_pocRandomAccess = hevc->curr_POC;
6833 else
6834 hevc->m_pocRandomAccess = -MAX_INT;
6835 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6836 || hevc->m_nalUnitType ==
6837 NAL_UNIT_CODED_SLICE_BLANT
6838 || hevc->m_nalUnitType ==
6839 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6840 hevc->m_pocRandomAccess = hevc->curr_POC;
6841 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6842 (nal_skip_policy >= 3) &&
6843 (hevc->m_nalUnitType ==
6844 NAL_UNIT_CODED_SLICE_RASL_N ||
6845 hevc->m_nalUnitType ==
6846 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6847 if (get_dbg_flag(hevc)) {
6848 hevc_print(hevc, 0,
6849 "RASL picture with POC %d < %d ",
6850 hevc->curr_POC, hevc->m_pocRandomAccess);
6851 hevc_print(hevc, 0,
6852 "RandomAccess point POC), skip it\n");
6853 }
6854 return 1;
6855 }
6856
6857 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6858 hevc->skip_flag = 0;
6859 /**/
6860 /* if((iPrevPOC != curr_POC)){ */
6861 if (rpm_param->p.slice_segment_address == 0) {
6862 struct PIC_s *pic;
6863
6864 hevc->new_pic = 1;
6865#ifdef MULTI_INSTANCE_SUPPORT
6866 if (!hevc->m_ins_flag)
6867#endif
6868 check_pic_decoded_error_pre(hevc,
6869 READ_VREG(HEVC_PARSER_LCU_START)
6870 & 0xffffff);
6871 /**/ if (use_cma == 0) {
6872 if (hevc->pic_list_init_flag == 0) {
6873 init_pic_list(hevc);
6874 init_pic_list_hw(hevc);
6875 init_buf_spec(hevc);
6876 hevc->pic_list_init_flag = 3;
6877 }
6878 }
6879 if (!hevc->m_ins_flag) {
6880 if (hevc->cur_pic)
6881 get_picture_qos_info(hevc);
6882 }
6883 hevc->first_pic_after_recover = 0;
6884 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6885 dump_pic_list(hevc);
6886 /* prev pic */
6887 hevc_pre_pic(hevc, pic);
6888 /*
6889 *update referenced of old pictures
6890 *(cur_pic->referenced is 1 and not updated)
6891 */
6892 apply_ref_pic_set(hevc, hevc->curr_POC,
6893 rpm_param);
6894
6895 if (hevc->mmu_enable)
6896 recycle_mmu_bufs(hevc);
6897
6898#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6899 if (vdec->master) {
6900 struct hevc_state_s *hevc_ba =
6901 (struct hevc_state_s *)
6902 vdec->master->private;
6903 if (hevc_ba->cur_pic != NULL) {
6904 hevc_ba->cur_pic->dv_enhance_exist = 1;
6905 hevc_print(hevc, H265_DEBUG_DV,
6906 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6907 hevc->curr_POC, hevc_ba->cur_pic->POC);
6908 }
6909 }
6910 if (vdec->master == NULL &&
6911 vdec->slave == NULL)
6912 set_aux_data(hevc,
6913 hevc->cur_pic, 1, 0); /*suffix*/
6914 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6915 set_aux_data(hevc,
6916 hevc->cur_pic, 0, 1); /*dv meta only*/
6917#else
6918 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6919#endif
6920 /* new pic */
6921 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6922 if (hevc->cur_pic == NULL) {
6923 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6924 dump_pic_list(hevc);
6925 hevc->wait_buf = 1;
6926 return -1;
6927 }
6928#ifdef MULTI_INSTANCE_SUPPORT
6929 hevc->decoding_pic = hevc->cur_pic;
6930 if (!hevc->m_ins_flag)
6931 hevc->over_decode = 0;
6932#endif
6933#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6934 hevc->cur_pic->dv_enhance_exist = 0;
6935 if (vdec->slave)
6936 hevc_print(hevc, H265_DEBUG_DV,
6937 "Clear bl (poc %d) dv_enhance_exist flag\n",
6938 hevc->curr_POC);
6939 if (vdec->master == NULL &&
6940 vdec->slave == NULL)
6941 set_aux_data(hevc,
6942 hevc->cur_pic, 0, 0); /*prefix*/
6943
6944 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6945 set_aux_data(hevc,
6946 hevc->cur_pic, 0, 2); /*pre sei only*/
6947#else
6948 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6949#endif
6950 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6951 hevc->cur_pic->output_ready = 1;
6952 hevc->cur_pic->stream_offset =
6953 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6954 prepare_display_buf(hevc, hevc->cur_pic);
6955 hevc->wait_buf = 2;
6956 return -1;
6957 }
6958 } else {
6959 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6960#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6961 if (vdec->master == NULL &&
6962 vdec->slave == NULL) {
6963 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6964 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6965 }
6966#else
6967 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6968 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6969#endif
6970 }
6971 if (hevc->pic_list_init_flag != 3
6972 || hevc->cur_pic == NULL) {
6973 /* make it dec from the first slice segment */
6974 return 3;
6975 }
6976 hevc->cur_pic->slice_idx++;
6977 hevc->new_pic = 0;
6978 }
6979 } else {
6980 if (hevc->wait_buf == 1) {
6981 pic_list_process(hevc);
6982 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6983 if (hevc->cur_pic == NULL)
6984 return -1;
6985
6986 if (!hevc->m_ins_flag)
6987 hevc->over_decode = 0;
6988
6989#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6990 hevc->cur_pic->dv_enhance_exist = 0;
6991 if (vdec->master == NULL &&
6992 vdec->slave == NULL)
6993 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6994#else
6995 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6996#endif
6997 hevc->wait_buf = 0;
6998 } else if (hevc->wait_buf ==
6999 2) {
7000 if (get_display_pic_num(hevc) >
7001 1)
7002 return -1;
7003 hevc->wait_buf = 0;
7004 }
7005 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7006 dump_pic_list(hevc);
7007 }
7008
7009 if (hevc->new_pic) {
7010#if 1
7011 /*SUPPORT_10BIT*/
7012 int sao_mem_unit =
7013 (hevc->lcu_size == 16 ? 9 :
7014 hevc->lcu_size ==
7015 32 ? 14 : 24) << 4;
7016#else
7017 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7018#endif
7019 int pic_height_cu =
7020 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7021 int pic_width_cu =
7022 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7023 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7024
7025 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7026 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7027 hevc_print(hevc, 0,
7028 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7029 __func__,
7030 hevc->decode_idx,
7031 hevc->curr_pic_struct,
7032 hevc->interlace_flag,
7033 hevc->cur_pic->index);
7034 }
7035 if (dbg_skip_decode_index != 0 &&
7036 hevc->decode_idx == dbg_skip_decode_index)
7037 dbg_skip_flag = 1;
7038
7039 hevc->decode_idx++;
7040 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7041 sao_mem_unit, rpm_param);
7042
7043 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7044 }
7045
7046 if (hevc->iPrevPOC != hevc->curr_POC) {
7047 hevc->new_tile = 1;
7048 hevc->tile_x = 0;
7049 hevc->tile_y = 0;
7050 hevc->tile_y_x = 0;
7051 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7052 hevc_print(hevc, 0,
7053 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7054 hevc->tile_x, hevc->tile_y);
7055 }
7056 } else if (hevc->tile_enabled) {
7057 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7058 hevc_print(hevc, 0,
7059 "slice_segment_address is %d\n",
7060 rpm_param->p.slice_segment_address);
7061 }
7062 hevc->tile_y_x =
7063 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7064 (hevc->pic_w +
7065 hevc->lcu_size -
7066 1) / hevc->lcu_size);
7067 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7068 && (hevc->tile_y_x != -1)) {
7069 hevc->new_tile = 1;
7070 hevc->tile_x = hevc->tile_y_x & 0xff;
7071 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7072 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7073 hevc_print(hevc, 0,
7074 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7075 rpm_param->p.slice_segment_address,
7076 hevc->tile_x, hevc->tile_y);
7077 }
7078 } else
7079 hevc->new_tile = 0;
7080 } else
7081 hevc->new_tile = 0;
7082
7083 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7084 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7085 hevc->new_tile = 0;
7086
7087 if (hevc->new_tile) {
7088 hevc->tile_start_lcu_x =
7089 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7090 hevc->tile_start_lcu_y =
7091 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7092 hevc->tile_width_lcu =
7093 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7094 hevc->tile_height_lcu =
7095 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7096 }
7097
7098 set_ref_pic_list(hevc, rpm_param);
7099
7100 Col_ref = rpm_param->p.collocated_ref_idx;
7101
7102 hevc->LDCFlag = 0;
7103 if (rpm_param->p.slice_type != I_SLICE) {
7104 hevc->LDCFlag = 1;
7105 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7106 if (hevc->cur_pic->
7107 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7108 hevc->curr_POC)
7109 hevc->LDCFlag = 0;
7110 }
7111 if (rpm_param->p.slice_type == B_SLICE) {
7112 for (i = 0; (i < hevc->RefNum_L1)
7113 && hevc->LDCFlag; i++) {
7114 if (hevc->cur_pic->
7115 m_aiRefPOCList1[hevc->cur_pic->
7116 slice_idx][i] >
7117 hevc->curr_POC)
7118 hevc->LDCFlag = 0;
7119 }
7120 }
7121 }
7122
7123 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7124
7125 hevc->plevel =
7126 rpm_param->p.log2_parallel_merge_level;
7127 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7128
7129 hevc->LongTerm_Curr = 0; /* to do ... */
7130 hevc->LongTerm_Col = 0; /* to do ... */
7131
7132 hevc->list_no = 0;
7133 if (rpm_param->p.slice_type == B_SLICE)
7134 hevc->list_no = 1 - hevc->ColFromL0Flag;
7135 if (hevc->list_no == 0) {
7136 if (Col_ref < hevc->RefNum_L0) {
7137 hevc->Col_POC =
7138 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7139 slice_idx][Col_ref];
7140 } else
7141 hevc->Col_POC = INVALID_POC;
7142 } else {
7143 if (Col_ref < hevc->RefNum_L1) {
7144 hevc->Col_POC =
7145 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7146 slice_idx][Col_ref];
7147 } else
7148 hevc->Col_POC = INVALID_POC;
7149 }
7150
7151 hevc->LongTerm_Ref = 0; /* to do ... */
7152
7153 if (hevc->slice_type != 2) {
7154 /* if(hevc->i_only==1){ */
7155 /* return 0xf; */
7156 /* } */
7157
7158 if (hevc->Col_POC != INVALID_POC) {
7159 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7160 if (hevc->col_pic == NULL) {
7161 hevc->cur_pic->error_mark = 1;
7162 if (get_dbg_flag(hevc)) {
7163 hevc_print(hevc, 0,
7164 "WRONG,fail to get the pic Col_POC\n");
7165 }
7166 if (is_log_enable(hevc))
7167 add_log(hevc,
7168 "WRONG,fail to get the pic Col_POC");
7169 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7170 hevc->cur_pic->error_mark = 1;
7171 if (get_dbg_flag(hevc)) {
7172 hevc_print(hevc, 0,
7173 "WRONG, Col_POC error_mark is 1\n");
7174 }
7175 if (is_log_enable(hevc))
7176 add_log(hevc,
7177 "WRONG, Col_POC error_mark is 1");
7178 } else {
7179 if ((hevc->col_pic->width
7180 != hevc->pic_w) ||
7181 (hevc->col_pic->height
7182 != hevc->pic_h)) {
7183 hevc_print(hevc, 0,
7184 "Wrong reference pic (poc %d) width/height %d/%d\n",
7185 hevc->col_pic->POC,
7186 hevc->col_pic->width,
7187 hevc->col_pic->height);
7188 hevc->cur_pic->error_mark = 1;
7189 }
7190
7191 }
7192
7193 if (hevc->cur_pic->error_mark
7194 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7195#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7196 /*count info*/
7197 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7198 hevc->cur_pic->stream_offset);
7199#endif
7200 }
7201
7202 if (is_skip_decoding(hevc,
7203 hevc->cur_pic)) {
7204 return 2;
7205 }
7206 } else
7207 hevc->col_pic = hevc->cur_pic;
7208 } /* */
7209 if (hevc->col_pic == NULL)
7210 hevc->col_pic = hevc->cur_pic;
7211#ifdef BUFFER_MGR_ONLY
7212 return 0xf;
7213#else
7214 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7215 || (dbg_skip_flag))
7216 return 0xf;
7217#endif
7218
7219 config_mc_buffer(hevc, hevc->cur_pic);
7220
7221 if (is_skip_decoding(hevc,
7222 hevc->cur_pic)) {
7223 if (get_dbg_flag(hevc))
7224 hevc_print(hevc, 0,
7225 "Discard this picture index %d\n",
7226 hevc->cur_pic->index);
7227#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7228 /*count info*/
7229 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7230 hevc->cur_pic->stream_offset);
7231#endif
7232 return 2;
7233 }
7234#ifdef MCRCC_ENABLE
7235 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7236#endif
7237 config_mpred_hw(hevc);
7238
7239 config_sao_hw(hevc, rpm_param);
7240
7241 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7242 return 0xf;
7243
7244 return 0;
7245}
7246
7247
7248
7249static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7250 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7251 int cur_buf_idx = new_pic->index;
7252 int bit_depth_10 = (bit_depth != 0x00);
7253 int picture_size;
7254 int cur_mmu_4k_number;
7255 int ret, max_frame_num;
7256 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7257 new_pic->height, !bit_depth_10);
7258 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7259 if (hevc->double_write_mode & 0x10)
7260 return 0;
7261 /*hevc_print(hevc, 0,
7262 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7263 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7264 if (new_pic->scatter_alloc) {
7265 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7266 new_pic->scatter_alloc = 0;
7267 }
7268 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7269 max_frame_num = MAX_FRAME_8K_NUM;
7270 else
7271 max_frame_num = MAX_FRAME_4K_NUM;
7272 if (cur_mmu_4k_number > max_frame_num) {
7273 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7274 cur_mmu_4k_number,
7275 new_pic->width,
7276 new_pic->height);
7277 return -1;
7278 }
7279 ret = decoder_mmu_box_alloc_idx(
7280 hevc->mmu_box,
7281 cur_buf_idx,
7282 cur_mmu_4k_number,
7283 mmu_index_adr);
7284 if (ret == 0)
7285 new_pic->scatter_alloc = 1;
7286
7287 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7288 "%s pic index %d page count(%d) ret =%d\n",
7289 __func__, cur_buf_idx,
7290 cur_mmu_4k_number, ret);
7291 return ret;
7292}
7293
7294
7295static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7296 struct PIC_s *pic)
7297{
7298 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7299 "%s pic index %d scatter_alloc %d\n",
7300 __func__, pic->index,
7301 pic->scatter_alloc);
7302
7303 if (hevc->mmu_enable
7304 && ((hevc->double_write_mode & 0x10) == 0)
7305 && pic->scatter_alloc)
7306 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7307 pic->scatter_alloc = 0;
7308}
7309
7310/*
7311 *************************************************
7312 *
7313 *h265 buffer management end
7314 *
7315 **************************************************
7316 */
7317static struct hevc_state_s *gHevc;
7318
7319static void hevc_local_uninit(struct hevc_state_s *hevc)
7320{
7321 hevc->rpm_ptr = NULL;
7322 hevc->lmem_ptr = NULL;
7323
7324#ifdef SWAP_HEVC_UCODE
7325 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7326 if (hevc->mc_cpu_addr != NULL) {
7327 dma_free_coherent(amports_get_dma_device(),
7328 hevc->swap_size, hevc->mc_cpu_addr,
7329 hevc->mc_dma_handle);
7330 hevc->mc_cpu_addr = NULL;
7331 }
7332
7333 }
7334#endif
7335#ifdef DETREFILL_ENABLE
7336 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7337 uninit_detrefill_buf(hevc);
7338#endif
7339 if (hevc->aux_addr) {
7340 dma_free_coherent(amports_get_dma_device(),
7341 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7342 hevc->aux_phy_addr);
7343 hevc->aux_addr = NULL;
7344 }
7345 if (hevc->rpm_addr) {
7346 dma_free_coherent(amports_get_dma_device(),
7347 RPM_BUF_SIZE, hevc->rpm_addr,
7348 hevc->rpm_phy_addr);
7349 hevc->rpm_addr = NULL;
7350 }
7351 if (hevc->lmem_addr) {
7352 dma_free_coherent(amports_get_dma_device(),
7353 RPM_BUF_SIZE, hevc->lmem_addr,
7354 hevc->lmem_phy_addr);
7355 hevc->lmem_addr = NULL;
7356 }
7357
7358 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7359 if (hevc->frame_mmu_map_phy_addr)
7360 dma_free_coherent(amports_get_dma_device(),
7361 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7362 hevc->frame_mmu_map_phy_addr);
7363
7364 hevc->frame_mmu_map_addr = NULL;
7365 }
7366
7367 kfree(gvs);
7368 gvs = NULL;
7369}
7370
7371static int hevc_local_init(struct hevc_state_s *hevc)
7372{
7373 int ret = -1;
7374 struct BuffInfo_s *cur_buf_info = NULL;
7375
7376 memset(&hevc->param, 0, sizeof(union param_u));
7377
7378 cur_buf_info = &hevc->work_space_buf_store;
7379
7380 if (vdec_is_support_4k()) {
7381 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7382 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7383 sizeof(struct BuffInfo_s));
7384 else
7385 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7386 sizeof(struct BuffInfo_s));
7387 } else
7388 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7389 sizeof(struct BuffInfo_s));
7390
7391 cur_buf_info->start_adr = hevc->buf_start;
7392 init_buff_spec(hevc, cur_buf_info);
7393
7394 hevc_init_stru(hevc, cur_buf_info);
7395
7396 hevc->bit_depth_luma = 8;
7397 hevc->bit_depth_chroma = 8;
7398 hevc->video_signal_type = 0;
7399 hevc->video_signal_type_debug = 0;
7400 bit_depth_luma = hevc->bit_depth_luma;
7401 bit_depth_chroma = hevc->bit_depth_chroma;
7402 video_signal_type = hevc->video_signal_type;
7403
7404 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7405 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7406 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7407 if (hevc->rpm_addr == NULL) {
7408 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7409 return -1;
7410 }
7411 hevc->rpm_ptr = hevc->rpm_addr;
7412 }
7413
7414 if (prefix_aux_buf_size > 0 ||
7415 suffix_aux_buf_size > 0) {
7416 u32 aux_buf_size;
7417
7418 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7419 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7420 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7421 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7422 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7423 if (hevc->aux_addr == NULL) {
7424 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7425 return -1;
7426 }
7427 }
7428
7429 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7430 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7431 if (hevc->lmem_addr == NULL) {
7432 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7433 return -1;
7434 }
7435 hevc->lmem_ptr = hevc->lmem_addr;
7436
7437 if (hevc->mmu_enable) {
7438 hevc->frame_mmu_map_addr =
7439 dma_alloc_coherent(amports_get_dma_device(),
7440 get_frame_mmu_map_size(),
7441 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7442 if (hevc->frame_mmu_map_addr == NULL) {
7443 pr_err("%s: failed to alloc count_buffer\n", __func__);
7444 return -1;
7445 }
7446 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7447 }
7448 ret = 0;
7449 return ret;
7450}
7451
7452/*
7453 *******************************************
7454 * Mailbox command
7455 *******************************************
7456 */
7457#define CMD_FINISHED 0
7458#define CMD_ALLOC_VIEW 1
7459#define CMD_FRAME_DISPLAY 3
7460#define CMD_DEBUG 10
7461
7462
7463#define DECODE_BUFFER_NUM_MAX 32
7464#define DISPLAY_BUFFER_NUM 6
7465
7466#define video_domain_addr(adr) (adr&0x7fffffff)
7467#define DECODER_WORK_SPACE_SIZE 0x800000
7468
7469#define spec2canvas(x) \
7470 (((x)->uv_canvas_index << 16) | \
7471 ((x)->uv_canvas_index << 8) | \
7472 ((x)->y_canvas_index << 0))
7473
7474
7475static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7476{
7477 struct vdec_s *vdec = hw_to_vdec(hevc);
7478 int canvas_w = ALIGN(pic->width, 64)/4;
7479 int canvas_h = ALIGN(pic->height, 32)/4;
7480 int blkmode = hevc->mem_map_mode;
7481
7482 /*CANVAS_BLKMODE_64X32*/
7483#ifdef SUPPORT_10BIT
7484 if (pic->double_write_mode) {
7485 canvas_w = pic->width /
7486 get_double_write_ratio(hevc, pic->double_write_mode);
7487 canvas_h = pic->height /
7488 get_double_write_ratio(hevc, pic->double_write_mode);
7489
7490 if (hevc->mem_map_mode == 0)
7491 canvas_w = ALIGN(canvas_w, 32);
7492 else
7493 canvas_w = ALIGN(canvas_w, 64);
7494 canvas_h = ALIGN(canvas_h, 32);
7495
7496 if (vdec->parallel_dec == 1) {
7497 if (pic->y_canvas_index == -1)
7498 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7499 if (pic->uv_canvas_index == -1)
7500 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7501 } else {
7502 pic->y_canvas_index = 128 + pic->index * 2;
7503 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7504 }
7505
7506 canvas_config_ex(pic->y_canvas_index,
7507 pic->dw_y_adr, canvas_w, canvas_h,
7508 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7509 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7510 canvas_w, canvas_h,
7511 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7512#ifdef MULTI_INSTANCE_SUPPORT
7513 pic->canvas_config[0].phy_addr =
7514 pic->dw_y_adr;
7515 pic->canvas_config[0].width =
7516 canvas_w;
7517 pic->canvas_config[0].height =
7518 canvas_h;
7519 pic->canvas_config[0].block_mode =
7520 blkmode;
7521 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7522
7523 pic->canvas_config[1].phy_addr =
7524 pic->dw_u_v_adr;
7525 pic->canvas_config[1].width =
7526 canvas_w;
7527 pic->canvas_config[1].height =
7528 canvas_h;
7529 pic->canvas_config[1].block_mode =
7530 blkmode;
7531 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7532#endif
7533 } else {
7534 if (!hevc->mmu_enable) {
7535 /* to change after 10bit VPU is ready ... */
7536 if (vdec->parallel_dec == 1) {
7537 if (pic->y_canvas_index == -1)
7538 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7539 pic->uv_canvas_index = pic->y_canvas_index;
7540 } else {
7541 pic->y_canvas_index = 128 + pic->index;
7542 pic->uv_canvas_index = 128 + pic->index;
7543 }
7544
7545 canvas_config_ex(pic->y_canvas_index,
7546 pic->mc_y_adr, canvas_w, canvas_h,
7547 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7548 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7549 canvas_w, canvas_h,
7550 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7551 }
7552 }
7553#else
7554 if (vdec->parallel_dec == 1) {
7555 if (pic->y_canvas_index == -1)
7556 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7557 if (pic->uv_canvas_index == -1)
7558 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7559 } else {
7560 pic->y_canvas_index = 128 + pic->index * 2;
7561 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7562 }
7563
7564
7565 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7566 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7567 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7568 canvas_w, canvas_h,
7569 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7570#endif
7571}
7572
7573static int init_buf_spec(struct hevc_state_s *hevc)
7574{
7575 int pic_width = hevc->pic_w;
7576 int pic_height = hevc->pic_h;
7577
7578 /* hevc_print(hevc, 0,
7579 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7580 */
7581 hevc_print(hevc, 0,
7582 "%s2 %d %d\n", __func__, pic_width, pic_height);
7583 /* pic_width = hevc->pic_w; */
7584 /* pic_height = hevc->pic_h; */
7585
7586 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7587 hevc->frame_width = pic_width;
7588 hevc->frame_height = pic_height;
7589
7590 }
7591
7592 return 0;
7593}
7594
7595static int parse_sei(struct hevc_state_s *hevc,
7596 struct PIC_s *pic, char *sei_buf, uint32_t size)
7597{
7598 char *p = sei_buf;
7599 char *p_sei;
7600 uint16_t header;
7601 uint8_t nal_unit_type;
7602 uint8_t payload_type, payload_size;
7603 int i, j;
7604
7605 if (size < 2)
7606 return 0;
7607 header = *p++;
7608 header <<= 8;
7609 header += *p++;
7610 nal_unit_type = header >> 9;
7611 if ((nal_unit_type != NAL_UNIT_SEI)
7612 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7613 return 0;
7614 while (p+2 <= sei_buf+size) {
7615 payload_type = *p++;
7616 payload_size = *p++;
7617 if (p+payload_size <= sei_buf+size) {
7618 switch (payload_type) {
7619 case SEI_PicTiming:
7620 if ((parser_sei_enable & 0x4) &&
7621 hevc->frame_field_info_present_flag) {
7622 p_sei = p;
7623 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7624 pic->pic_struct = hevc->curr_pic_struct;
7625 if (get_dbg_flag(hevc) &
7626 H265_DEBUG_PIC_STRUCT) {
7627 hevc_print(hevc, 0,
7628 "parse result pic_struct = %d\n",
7629 hevc->curr_pic_struct);
7630 }
7631 }
7632 break;
7633 case SEI_UserDataITU_T_T35:
7634 p_sei = p;
7635 if (p_sei[0] == 0xB5
7636 && p_sei[1] == 0x00
7637 && p_sei[2] == 0x3C
7638 && p_sei[3] == 0x00
7639 && p_sei[4] == 0x01
7640 && p_sei[5] == 0x04)
7641 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7642
7643 break;
7644 case SEI_MasteringDisplayColorVolume:
7645 /*hevc_print(hevc, 0,
7646 "sei type: primary display color volume %d, size %d\n",
7647 payload_type,
7648 payload_size);*/
7649 /* master_display_colour */
7650 p_sei = p;
7651 for (i = 0; i < 3; i++) {
7652 for (j = 0; j < 2; j++) {
7653 hevc->primaries[i][j]
7654 = (*p_sei<<8)
7655 | *(p_sei+1);
7656 p_sei += 2;
7657 }
7658 }
7659 for (i = 0; i < 2; i++) {
7660 hevc->white_point[i]
7661 = (*p_sei<<8)
7662 | *(p_sei+1);
7663 p_sei += 2;
7664 }
7665 for (i = 0; i < 2; i++) {
7666 hevc->luminance[i]
7667 = (*p_sei<<24)
7668 | (*(p_sei+1)<<16)
7669 | (*(p_sei+2)<<8)
7670 | *(p_sei+3);
7671 p_sei += 4;
7672 }
7673 hevc->sei_present_flag |=
7674 SEI_MASTER_DISPLAY_COLOR_MASK;
7675 /*for (i = 0; i < 3; i++)
7676 for (j = 0; j < 2; j++)
7677 hevc_print(hevc, 0,
7678 "\tprimaries[%1d][%1d] = %04x\n",
7679 i, j,
7680 hevc->primaries[i][j]);
7681 hevc_print(hevc, 0,
7682 "\twhite_point = (%04x, %04x)\n",
7683 hevc->white_point[0],
7684 hevc->white_point[1]);
7685 hevc_print(hevc, 0,
7686 "\tmax,min luminance = %08x, %08x\n",
7687 hevc->luminance[0],
7688 hevc->luminance[1]);*/
7689 break;
7690 case SEI_ContentLightLevel:
7691 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7692 hevc_print(hevc, 0,
7693 "sei type: max content light level %d, size %d\n",
7694 payload_type, payload_size);
7695 /* content_light_level */
7696 p_sei = p;
7697 hevc->content_light_level[0]
7698 = (*p_sei<<8) | *(p_sei+1);
7699 p_sei += 2;
7700 hevc->content_light_level[1]
7701 = (*p_sei<<8) | *(p_sei+1);
7702 p_sei += 2;
7703 hevc->sei_present_flag |=
7704 SEI_CONTENT_LIGHT_LEVEL_MASK;
7705 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7706 hevc_print(hevc, 0,
7707 "\tmax cll = %04x, max_pa_cll = %04x\n",
7708 hevc->content_light_level[0],
7709 hevc->content_light_level[1]);
7710 break;
7711 default:
7712 break;
7713 }
7714 }
7715 p += payload_size;
7716 }
7717 return 0;
7718}
7719
7720static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7721 unsigned w, unsigned h)
7722{
7723 unsigned ar;
7724
7725 if (idc == 255) {
7726 ar = div_u64(256ULL * sar_h * h,
7727 sar_w * w);
7728 } else {
7729 switch (idc) {
7730 case 1:
7731 ar = 0x100 * h / w;
7732 break;
7733 case 2:
7734 ar = 0x100 * h * 11 / (w * 12);
7735 break;
7736 case 3:
7737 ar = 0x100 * h * 11 / (w * 10);
7738 break;
7739 case 4:
7740 ar = 0x100 * h * 11 / (w * 16);
7741 break;
7742 case 5:
7743 ar = 0x100 * h * 33 / (w * 40);
7744 break;
7745 case 6:
7746 ar = 0x100 * h * 11 / (w * 24);
7747 break;
7748 case 7:
7749 ar = 0x100 * h * 11 / (w * 20);
7750 break;
7751 case 8:
7752 ar = 0x100 * h * 11 / (w * 32);
7753 break;
7754 case 9:
7755 ar = 0x100 * h * 33 / (w * 80);
7756 break;
7757 case 10:
7758 ar = 0x100 * h * 11 / (w * 18);
7759 break;
7760 case 11:
7761 ar = 0x100 * h * 11 / (w * 15);
7762 break;
7763 case 12:
7764 ar = 0x100 * h * 33 / (w * 64);
7765 break;
7766 case 13:
7767 ar = 0x100 * h * 99 / (w * 160);
7768 break;
7769 case 14:
7770 ar = 0x100 * h * 3 / (w * 4);
7771 break;
7772 case 15:
7773 ar = 0x100 * h * 2 / (w * 3);
7774 break;
7775 case 16:
7776 ar = 0x100 * h * 1 / (w * 2);
7777 break;
7778 default:
7779 ar = h * 0x100 / w;
7780 break;
7781 }
7782 }
7783
7784 return ar;
7785}
7786
7787static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7788 struct PIC_s *pic)
7789{
7790 unsigned int ar;
7791 int i, j;
7792 char *p;
7793 unsigned size = 0;
7794 unsigned type = 0;
7795 struct vframe_master_display_colour_s *vf_dp
7796 = &vf->prop.master_display_colour;
7797
7798 vf->width = pic->width /
7799 get_double_write_ratio(hevc, pic->double_write_mode);
7800 vf->height = pic->height /
7801 get_double_write_ratio(hevc, pic->double_write_mode);
7802
7803 vf->duration = hevc->frame_dur;
7804 vf->duration_pulldown = 0;
7805 vf->flag = 0;
7806
7807 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7808 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7809
7810
7811 if (((pic->aspect_ratio_idc == 255) &&
7812 pic->sar_width &&
7813 pic->sar_height) ||
7814 ((pic->aspect_ratio_idc != 255) &&
7815 (pic->width))) {
7816 ar = min_t(u32,
7817 calc_ar(pic->aspect_ratio_idc,
7818 pic->sar_width,
7819 pic->sar_height,
7820 pic->width,
7821 pic->height),
7822 DISP_RATIO_ASPECT_RATIO_MAX);
7823 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7824 }
7825 hevc->ratio_control = vf->ratio_control;
7826 if (pic->aux_data_buf
7827 && pic->aux_data_size) {
7828 /* parser sei */
7829 p = pic->aux_data_buf;
7830 while (p < pic->aux_data_buf
7831 + pic->aux_data_size - 8) {
7832 size = *p++;
7833 size = (size << 8) | *p++;
7834 size = (size << 8) | *p++;
7835 size = (size << 8) | *p++;
7836 type = *p++;
7837 type = (type << 8) | *p++;
7838 type = (type << 8) | *p++;
7839 type = (type << 8) | *p++;
7840 if (type == 0x02000000) {
7841 /* hevc_print(hevc, 0,
7842 "sei(%d)\n", size); */
7843 parse_sei(hevc, pic, p, size);
7844 }
7845 p += size;
7846 }
7847 }
7848 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7849 vf->signal_type = pic->video_signal_type;
7850 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7851 u32 data;
7852 data = vf->signal_type;
7853 data = data & 0xFFFF00FF;
7854 data = data | (0x30<<8);
7855 vf->signal_type = data;
7856 }
7857 }
7858 else
7859 vf->signal_type = 0;
7860 hevc->video_signal_type_debug = vf->signal_type;
7861
7862 /* master_display_colour */
7863 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7864 for (i = 0; i < 3; i++)
7865 for (j = 0; j < 2; j++)
7866 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7867 for (i = 0; i < 2; i++) {
7868 vf_dp->white_point[i] = hevc->white_point[i];
7869 vf_dp->luminance[i]
7870 = hevc->luminance[i];
7871 }
7872 vf_dp->present_flag = 1;
7873 } else
7874 vf_dp->present_flag = 0;
7875
7876 /* content_light_level */
7877 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7878 vf_dp->content_light_level.max_content
7879 = hevc->content_light_level[0];
7880 vf_dp->content_light_level.max_pic_average
7881 = hevc->content_light_level[1];
7882 vf_dp->content_light_level.present_flag = 1;
7883 } else
7884 vf_dp->content_light_level.present_flag = 0;
7885
7886 if (hevc->is_used_v4l &&
7887 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7888 (vf_dp->present_flag) ||
7889 (vf_dp->content_light_level.present_flag))) {
7890 struct aml_vdec_hdr_infos hdr;
7891 struct aml_vcodec_ctx *ctx =
7892 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
7893
7894 memset(&hdr, 0, sizeof(hdr));
7895 hdr.signal_type = vf->signal_type;
7896 hdr.color_parms = *vf_dp;
7897 vdec_v4l_set_hdr_infos(ctx, &hdr);
7898 }
7899}
7900
7901static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7902{
7903 unsigned long flags;
7904#ifdef MULTI_INSTANCE_SUPPORT
7905 struct vdec_s *vdec = op_arg;
7906 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7907#else
7908 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7909#endif
7910
7911 spin_lock_irqsave(&lock, flags);
7912
7913 states->vf_pool_size = VF_POOL_SIZE;
7914 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7915 states->buf_avail_num = kfifo_len(&hevc->display_q);
7916
7917 if (step == 2)
7918 states->buf_avail_num = 0;
7919 spin_unlock_irqrestore(&lock, flags);
7920 return 0;
7921}
7922
7923static struct vframe_s *vh265_vf_peek(void *op_arg)
7924{
7925 struct vframe_s *vf[2] = {0, 0};
7926#ifdef MULTI_INSTANCE_SUPPORT
7927 struct vdec_s *vdec = op_arg;
7928 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7929#else
7930 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7931#endif
7932
7933 if (step == 2)
7934 return NULL;
7935
7936 if (force_disp_pic_index & 0x100) {
7937 if (force_disp_pic_index & 0x200)
7938 return NULL;
7939 return &hevc->vframe_dummy;
7940 }
7941
7942
7943 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7944 if (vf[1]) {
7945 vf[0]->next_vf_pts_valid = true;
7946 vf[0]->next_vf_pts = vf[1]->pts;
7947 } else
7948 vf[0]->next_vf_pts_valid = false;
7949 return vf[0];
7950 }
7951
7952 return NULL;
7953}
7954
7955static struct vframe_s *vh265_vf_get(void *op_arg)
7956{
7957 struct vframe_s *vf;
7958#ifdef MULTI_INSTANCE_SUPPORT
7959 struct vdec_s *vdec = op_arg;
7960 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7961#else
7962 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7963#endif
7964
7965 if (step == 2)
7966 return NULL;
7967 else if (step == 1)
7968 step = 2;
7969
7970#if 0
7971 if (force_disp_pic_index & 0x100) {
7972 int buffer_index = force_disp_pic_index & 0xff;
7973 struct PIC_s *pic = NULL;
7974 if (buffer_index >= 0
7975 && buffer_index < MAX_REF_PIC_NUM)
7976 pic = hevc->m_PIC[buffer_index];
7977 if (pic == NULL)
7978 return NULL;
7979 if (force_disp_pic_index & 0x200)
7980 return NULL;
7981
7982 vf = &hevc->vframe_dummy;
7983 if (get_double_write_mode(hevc)) {
7984 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7985 VIDTYPE_VIU_NV21;
7986 if (hevc->m_ins_flag) {
7987 vf->canvas0Addr = vf->canvas1Addr = -1;
7988 vf->plane_num = 2;
7989 vf->canvas0_config[0] =
7990 pic->canvas_config[0];
7991 vf->canvas0_config[1] =
7992 pic->canvas_config[1];
7993
7994 vf->canvas1_config[0] =
7995 pic->canvas_config[0];
7996 vf->canvas1_config[1] =
7997 pic->canvas_config[1];
7998 } else {
7999 vf->canvas0Addr = vf->canvas1Addr
8000 = spec2canvas(pic);
8001 }
8002 } else {
8003 vf->canvas0Addr = vf->canvas1Addr = 0;
8004 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8005 if (hevc->mmu_enable)
8006 vf->type |= VIDTYPE_SCATTER;
8007 }
8008 vf->compWidth = pic->width;
8009 vf->compHeight = pic->height;
8010 update_vf_memhandle(hevc, vf, pic);
8011 switch (hevc->bit_depth_luma) {
8012 case 9:
8013 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8014 break;
8015 case 10:
8016 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8017 | BITDEPTH_V10;
8018 break;
8019 default:
8020 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8021 break;
8022 }
8023 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8024 vf->bitdepth =
8025 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8026 if (hevc->mem_saving_mode == 1)
8027 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8028 vf->duration_pulldown = 0;
8029 vf->pts = 0;
8030 vf->pts_us64 = 0;
8031 set_frame_info(hevc, vf);
8032
8033 vf->width = pic->width /
8034 get_double_write_ratio(hevc, pic->double_write_mode);
8035 vf->height = pic->height /
8036 get_double_write_ratio(hevc, pic->double_write_mode);
8037
8038 force_disp_pic_index |= 0x200;
8039 return vf;
8040 }
8041#endif
8042
8043 if (kfifo_get(&hevc->display_q, &vf)) {
8044 struct vframe_s *next_vf;
8045 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8046 hevc_print(hevc, 0,
8047 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8048 __func__, vf, vf->type, vf->index,
8049 get_pic_poc(hevc, vf->index & 0xff),
8050 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8051 vf->pts, vf->pts_us64,
8052 vf->duration);
8053#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8054 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8055 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8056 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8057 int i;
8058 struct PIC_s *pic =
8059 hevc->m_PIC[vf->index & 0xff];
8060 hevc_print(hevc, 0,
8061 "pic 0x%p aux size %d:\n",
8062 pic, pic->aux_data_size);
8063 for (i = 0; i < pic->aux_data_size; i++) {
8064 hevc_print_cont(hevc, 0,
8065 "%02x ", pic->aux_data_buf[i]);
8066 if (((i + 1) & 0xf) == 0)
8067 hevc_print_cont(hevc, 0, "\n");
8068 }
8069 hevc_print_cont(hevc, 0, "\n");
8070 }
8071 }
8072#endif
8073 hevc->show_frame_num++;
8074 hevc->vf_get_count++;
8075
8076 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8077 vf->next_vf_pts_valid = true;
8078 vf->next_vf_pts = next_vf->pts;
8079 } else
8080 vf->next_vf_pts_valid = false;
8081
8082 return vf;
8083 }
8084
8085 return NULL;
8086}
8087static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8088 int i;
8089 for (i = 0; i < VF_POOL_SIZE; i++) {
8090 if (vf == &hevc->vfpool[i])
8091 return true;
8092 }
8093 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8094 for (i = 0; i < VF_POOL_SIZE; i++) {
8095 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8096 }
8097 return false;
8098}
8099
8100static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8101{
8102 unsigned long flags;
8103#ifdef MULTI_INSTANCE_SUPPORT
8104 struct vdec_s *vdec = op_arg;
8105 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8106#else
8107 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8108#endif
8109 unsigned char index_top;
8110 unsigned char index_bot;
8111
8112 if (vf && (vf_valid_check(vf, hevc) == false))
8113 return;
8114 if (vf == (&hevc->vframe_dummy))
8115 return;
8116 index_top = vf->index & 0xff;
8117 index_bot = (vf->index >> 8) & 0xff;
8118 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8119 hevc_print(hevc, 0,
8120 "%s(type %d index 0x%x)\n",
8121 __func__, vf->type, vf->index);
8122 hevc->vf_put_count++;
8123 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8124 spin_lock_irqsave(&lock, flags);
8125
8126 if (index_top != 0xff
8127 && index_top < MAX_REF_PIC_NUM
8128 && hevc->m_PIC[index_top]) {
8129 if (hevc->is_used_v4l)
8130 hevc->m_PIC[index_top]->vframe_bound = true;
8131 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8132 hevc->m_PIC[index_top]->vf_ref--;
8133
8134 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8135 hevc->m_PIC[index_top]->output_ready = 0;
8136
8137 if (hevc->wait_buf != 0)
8138 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8139 0x1);
8140 }
8141 }
8142 }
8143
8144 if (index_bot != 0xff
8145 && index_bot < MAX_REF_PIC_NUM
8146 && hevc->m_PIC[index_bot]) {
8147 if (hevc->is_used_v4l)
8148 hevc->m_PIC[index_bot]->vframe_bound = true;
8149 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8150 hevc->m_PIC[index_bot]->vf_ref--;
8151
8152 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8153 hevc->m_PIC[index_bot]->output_ready = 0;
8154 if (hevc->wait_buf != 0)
8155 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8156 0x1);
8157 }
8158 }
8159 }
8160 spin_unlock_irqrestore(&lock, flags);
8161}
8162
8163static int vh265_event_cb(int type, void *data, void *op_arg)
8164{
8165 unsigned long flags;
8166#ifdef MULTI_INSTANCE_SUPPORT
8167 struct vdec_s *vdec = op_arg;
8168 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8169#else
8170 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8171#endif
8172 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8173#if 0
8174 amhevc_stop();
8175#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8176 vf_light_unreg_provider(&vh265_vf_prov);
8177#endif
8178 spin_lock_irqsave(&hevc->lock, flags);
8179 vh265_local_init();
8180 vh265_prot_init();
8181 spin_unlock_irqrestore(&hevc->lock, flags);
8182#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8183 vf_reg_provider(&vh265_vf_prov);
8184#endif
8185 amhevc_start();
8186#endif
8187 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8188 struct provider_aux_req_s *req =
8189 (struct provider_aux_req_s *)data;
8190 unsigned char index;
8191
8192 spin_lock_irqsave(&lock, flags);
8193 index = req->vf->index & 0xff;
8194 req->aux_buf = NULL;
8195 req->aux_size = 0;
8196 if (req->bot_flag)
8197 index = (req->vf->index >> 8) & 0xff;
8198 if (index != 0xff
8199 && index < MAX_REF_PIC_NUM
8200 && hevc->m_PIC[index]) {
8201 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8202 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8203#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8204 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8205 req->dv_enhance_exist = false;
8206 else
8207 req->dv_enhance_exist =
8208 hevc->m_PIC[index]->dv_enhance_exist;
8209 hevc_print(hevc, H265_DEBUG_DV,
8210 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8211 req->vf,
8212 hevc->m_PIC[index]->POC, index,
8213 req->dv_enhance_exist, req->aux_size);
8214#else
8215 req->dv_enhance_exist = 0;
8216#endif
8217 }
8218 spin_unlock_irqrestore(&lock, flags);
8219
8220 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8221 hevc_print(hevc, 0,
8222 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8223 __func__, type, index, req->aux_size);
8224#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8225 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8226 if ((force_bypass_dvenl & 0x80000000) == 0) {
8227 hevc_print(hevc, 0,
8228 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8229 __func__);
8230 hevc->bypass_dvenl_enable = 1;
8231 }
8232
8233#endif
8234 }
8235 return 0;
8236}
8237
8238#ifdef HEVC_PIC_STRUCT_SUPPORT
8239static int process_pending_vframe(struct hevc_state_s *hevc,
8240 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8241{
8242 struct vframe_s *vf;
8243
8244 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8245 hevc_print(hevc, 0,
8246 "%s: pair_pic index 0x%x %s\n",
8247 __func__, pair_pic->index,
8248 pair_frame_top_flag ?
8249 "top" : "bot");
8250
8251 if (kfifo_len(&hevc->pending_q) > 1) {
8252 unsigned long flags;
8253 /* do not pending more than 1 frame */
8254 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8255 hevc_print(hevc, 0,
8256 "fatal error, no available buffer slot.");
8257 return -1;
8258 }
8259 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8260 hevc_print(hevc, 0,
8261 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8262 __func__, vf->index);
8263 if ((hevc->double_write_mode == 3) &&
8264 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8265 vf->type |= VIDTYPE_COMPRESS;
8266 if (hevc->mmu_enable)
8267 vf->type |= VIDTYPE_SCATTER;
8268 }
8269 hevc->vf_pre_count++;
8270 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8271 spin_lock_irqsave(&lock, flags);
8272 vf->index &= 0xff;
8273 hevc->m_PIC[vf->index]->output_ready = 0;
8274 if (hevc->wait_buf != 0)
8275 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8276 0x1);
8277 spin_unlock_irqrestore(&lock, flags);
8278
8279 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8280 }
8281
8282 if (kfifo_peek(&hevc->pending_q, &vf)) {
8283 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8284 /*
8285 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8286 *do not use it
8287 */
8288 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8289 hevc_print(hevc, 0,
8290 "fatal error, no available buffer slot.");
8291 return -1;
8292 }
8293 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8294 hevc_print(hevc, 0,
8295 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8296 __func__, vf->index);
8297 if (vf) {
8298 if ((hevc->double_write_mode == 3) &&
8299 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8300 vf->type |= VIDTYPE_COMPRESS;
8301 if (hevc->mmu_enable)
8302 vf->type |= VIDTYPE_SCATTER;
8303 }
8304 hevc->vf_pre_count++;
8305 kfifo_put(&hevc->display_q,
8306 (const struct vframe_s *)vf);
8307 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8308 }
8309 } else if ((!pair_frame_top_flag) &&
8310 (((vf->index >> 8) & 0xff) == 0xff)) {
8311 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8312 hevc_print(hevc, 0,
8313 "fatal error, no available buffer slot.");
8314 return -1;
8315 }
8316 if (vf) {
8317 if ((hevc->double_write_mode == 3) &&
8318 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8319 vf->type |= VIDTYPE_COMPRESS;
8320 if (hevc->mmu_enable)
8321 vf->type |= VIDTYPE_SCATTER;
8322 }
8323 vf->index &= 0xff;
8324 vf->index |= (pair_pic->index << 8);
8325 vf->canvas1Addr = spec2canvas(pair_pic);
8326 pair_pic->vf_ref++;
8327 kfifo_put(&hevc->display_q,
8328 (const struct vframe_s *)vf);
8329 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8330 hevc->vf_pre_count++;
8331 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8332 hevc_print(hevc, 0,
8333 "%s vf => display_q: (index 0x%x)\n",
8334 __func__, vf->index);
8335 }
8336 } else if (pair_frame_top_flag &&
8337 ((vf->index & 0xff) == 0xff)) {
8338 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8339 hevc_print(hevc, 0,
8340 "fatal error, no available buffer slot.");
8341 return -1;
8342 }
8343 if (vf) {
8344 if ((hevc->double_write_mode == 3) &&
8345 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8346 vf->type |= VIDTYPE_COMPRESS;
8347 if (hevc->mmu_enable)
8348 vf->type |= VIDTYPE_SCATTER;
8349 }
8350 vf->index &= 0xff00;
8351 vf->index |= pair_pic->index;
8352 vf->canvas0Addr = spec2canvas(pair_pic);
8353 pair_pic->vf_ref++;
8354 kfifo_put(&hevc->display_q,
8355 (const struct vframe_s *)vf);
8356 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8357 hevc->vf_pre_count++;
8358 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8359 hevc_print(hevc, 0,
8360 "%s vf => display_q: (index 0x%x)\n",
8361 __func__, vf->index);
8362 }
8363 }
8364 }
8365 return 0;
8366}
8367#endif
8368static void update_vf_memhandle(struct hevc_state_s *hevc,
8369 struct vframe_s *vf, struct PIC_s *pic)
8370{
8371 if (pic->index < 0) {
8372 vf->mem_handle = NULL;
8373 vf->mem_head_handle = NULL;
8374 } else if (vf->type & VIDTYPE_SCATTER) {
8375 vf->mem_handle =
8376 decoder_mmu_box_get_mem_handle(
8377 hevc->mmu_box, pic->index);
8378 vf->mem_head_handle =
8379 decoder_bmmu_box_get_mem_handle(
8380 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8381 } else {
8382 vf->mem_handle =
8383 decoder_bmmu_box_get_mem_handle(
8384 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8385 vf->mem_head_handle = NULL;
8386 /*vf->mem_head_handle =
8387 decoder_bmmu_box_get_mem_handle(
8388 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8389 }
8390 return;
8391}
8392
8393static void fill_frame_info(struct hevc_state_s *hevc,
8394 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8395{
8396 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8397 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8398 vframe_qos->type = 4;
8399 else if (pic->slice_type == I_SLICE)
8400 vframe_qos->type = 1;
8401 else if (pic->slice_type == P_SLICE)
8402 vframe_qos->type = 2;
8403 else if (pic->slice_type == B_SLICE)
8404 vframe_qos->type = 3;
8405/*
8406#define SHOW_QOS_INFO
8407*/
8408 vframe_qos->size = framesize;
8409 vframe_qos->pts = pts;
8410#ifdef SHOW_QOS_INFO
8411 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8412#endif
8413
8414
8415 vframe_qos->max_mv = pic->max_mv;
8416 vframe_qos->avg_mv = pic->avg_mv;
8417 vframe_qos->min_mv = pic->min_mv;
8418#ifdef SHOW_QOS_INFO
8419 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8420 vframe_qos->max_mv,
8421 vframe_qos->avg_mv,
8422 vframe_qos->min_mv);
8423#endif
8424
8425 vframe_qos->max_qp = pic->max_qp;
8426 vframe_qos->avg_qp = pic->avg_qp;
8427 vframe_qos->min_qp = pic->min_qp;
8428#ifdef SHOW_QOS_INFO
8429 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8430 vframe_qos->max_qp,
8431 vframe_qos->avg_qp,
8432 vframe_qos->min_qp);
8433#endif
8434
8435 vframe_qos->max_skip = pic->max_skip;
8436 vframe_qos->avg_skip = pic->avg_skip;
8437 vframe_qos->min_skip = pic->min_skip;
8438#ifdef SHOW_QOS_INFO
8439 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8440 vframe_qos->max_skip,
8441 vframe_qos->avg_skip,
8442 vframe_qos->min_skip);
8443#endif
8444
8445 vframe_qos->num++;
8446
8447 if (hevc->frameinfo_enable)
8448 vdec_fill_frame_info(vframe_qos, 1);
8449}
8450
8451static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8452{
8453#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8454 struct vdec_s *vdec = hw_to_vdec(hevc);
8455#endif
8456 struct vframe_s *vf = NULL;
8457 int stream_offset = pic->stream_offset;
8458 unsigned short slice_type = pic->slice_type;
8459 u32 frame_size;
8460
8461 if (force_disp_pic_index & 0x100) {
8462 /*recycle directly*/
8463 pic->output_ready = 0;
8464 return -1;
8465 }
8466 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8467 hevc_print(hevc, 0,
8468 "fatal error, no available buffer slot.");
8469 return -1;
8470 }
8471 display_frame_count[hevc->index]++;
8472 if (vf) {
8473 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8474 "%s: pic index 0x%x\n",
8475 __func__, pic->index);*/
8476
8477 if (hevc->is_used_v4l) {
8478 vf->v4l_mem_handle
8479 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8480 if (hevc->mmu_enable) {
8481 vf->mm_box.bmmu_box = hevc->bmmu_box;
8482 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8483 vf->mm_box.mmu_box = hevc->mmu_box;
8484 vf->mm_box.mmu_idx = pic->index;
8485 }
8486 }
8487
8488#ifdef MULTI_INSTANCE_SUPPORT
8489 if (vdec_frame_based(hw_to_vdec(hevc))) {
8490 vf->pts = pic->pts;
8491 vf->pts_us64 = pic->pts64;
8492 vf->timestamp = pic->timestamp;
8493 }
8494 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8495 stream_offset, &vf->pts, 0) != 0) { */
8496#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8497 else if (vdec->master == NULL) {
8498#else
8499 else {
8500#endif
8501#endif
8502 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8503 "call pts_lookup_offset_us64(0x%x)\n",
8504 stream_offset);
8505 if (pts_lookup_offset_us64
8506 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8507 &frame_size, 0,
8508 &vf->pts_us64) != 0) {
8509#ifdef DEBUG_PTS
8510 hevc->pts_missed++;
8511#endif
8512 vf->pts = 0;
8513 vf->pts_us64 = 0;
8514 }
8515#ifdef DEBUG_PTS
8516 else
8517 hevc->pts_hit++;
8518#endif
8519#ifdef MULTI_INSTANCE_SUPPORT
8520#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8521 } else {
8522 vf->pts = 0;
8523 vf->pts_us64 = 0;
8524 }
8525#else
8526 }
8527#endif
8528#endif
8529 if (pts_unstable && (hevc->frame_dur > 0))
8530 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8531
8532 fill_frame_info(hevc, pic, frame_size, vf->pts);
8533
8534 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8535 && hevc->get_frame_dur) {
8536 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8537
8538 if (pts_diff < 0) {
8539 hevc->pts_mode_switching_count++;
8540 hevc->pts_mode_recovery_count = 0;
8541
8542 if (hevc->pts_mode_switching_count >=
8543 PTS_MODE_SWITCHING_THRESHOLD) {
8544 hevc->pts_mode =
8545 PTS_NONE_REF_USE_DURATION;
8546 hevc_print(hevc, 0,
8547 "HEVC: switch to n_d mode.\n");
8548 }
8549
8550 } else {
8551 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8552
8553 hevc->pts_mode_recovery_count++;
8554 if (hevc->pts_mode_recovery_count > p) {
8555 hevc->pts_mode_switching_count = 0;
8556 hevc->pts_mode_recovery_count = 0;
8557 }
8558 }
8559 }
8560
8561 if (vf->pts != 0)
8562 hevc->last_lookup_pts = vf->pts;
8563
8564 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8565 && (slice_type != 2))
8566 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8567 hevc->last_pts = vf->pts;
8568
8569 if (vf->pts_us64 != 0)
8570 hevc->last_lookup_pts_us64 = vf->pts_us64;
8571
8572 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8573 && (slice_type != 2)) {
8574 vf->pts_us64 =
8575 hevc->last_pts_us64 +
8576 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8577 }
8578 hevc->last_pts_us64 = vf->pts_us64;
8579 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8580 hevc_print(hevc, 0,
8581 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8582 vf->pts, vf->pts_us64);
8583 }
8584
8585 /*
8586 *vf->index:
8587 *(1) vf->type is VIDTYPE_PROGRESSIVE
8588 * and vf->canvas0Addr != vf->canvas1Addr,
8589 * vf->index[7:0] is the index of top pic
8590 * vf->index[15:8] is the index of bot pic
8591 *(2) other cases,
8592 * only vf->index[7:0] is used
8593 * vf->index[15:8] == 0xff
8594 */
8595 vf->index = 0xff00 | pic->index;
8596#if 1
8597/*SUPPORT_10BIT*/
8598 if (pic->double_write_mode & 0x10) {
8599 /* double write only */
8600 vf->compBodyAddr = 0;
8601 vf->compHeadAddr = 0;
8602 } else {
8603
8604 if (hevc->mmu_enable) {
8605 vf->compBodyAddr = 0;
8606 vf->compHeadAddr = pic->header_adr;
8607 } else {
8608 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8609 vf->compHeadAddr = pic->mc_y_adr +
8610 pic->losless_comp_body_size;
8611 vf->mem_head_handle = NULL;
8612 }
8613
8614 /*head adr*/
8615 vf->canvas0Addr = vf->canvas1Addr = 0;
8616 }
8617 if (pic->double_write_mode) {
8618 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8619 vf->type |= VIDTYPE_VIU_NV21;
8620
8621 if ((pic->double_write_mode == 3) &&
8622 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8623 vf->type |= VIDTYPE_COMPRESS;
8624 if (hevc->mmu_enable)
8625 vf->type |= VIDTYPE_SCATTER;
8626 }
8627#ifdef MULTI_INSTANCE_SUPPORT
8628 if (hevc->m_ins_flag &&
8629 (get_dbg_flag(hevc)
8630 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8631 vf->canvas0Addr = vf->canvas1Addr = -1;
8632 vf->plane_num = 2;
8633 vf->canvas0_config[0] =
8634 pic->canvas_config[0];
8635 vf->canvas0_config[1] =
8636 pic->canvas_config[1];
8637
8638 vf->canvas1_config[0] =
8639 pic->canvas_config[0];
8640 vf->canvas1_config[1] =
8641 pic->canvas_config[1];
8642
8643 } else
8644#endif
8645 vf->canvas0Addr = vf->canvas1Addr
8646 = spec2canvas(pic);
8647 } else {
8648 vf->canvas0Addr = vf->canvas1Addr = 0;
8649 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8650 if (hevc->mmu_enable)
8651 vf->type |= VIDTYPE_SCATTER;
8652 }
8653 vf->compWidth = pic->width;
8654 vf->compHeight = pic->height;
8655 update_vf_memhandle(hevc, vf, pic);
8656 switch (pic->bit_depth_luma) {
8657 case 9:
8658 vf->bitdepth = BITDEPTH_Y9;
8659 break;
8660 case 10:
8661 vf->bitdepth = BITDEPTH_Y10;
8662 break;
8663 default:
8664 vf->bitdepth = BITDEPTH_Y8;
8665 break;
8666 }
8667 switch (pic->bit_depth_chroma) {
8668 case 9:
8669 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8670 break;
8671 case 10:
8672 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8673 break;
8674 default:
8675 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8676 break;
8677 }
8678 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8679 vf->bitdepth =
8680 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8681 if (pic->mem_saving_mode == 1)
8682 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8683#else
8684 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8685 vf->type |= VIDTYPE_VIU_NV21;
8686 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8687#endif
8688 set_frame_info(hevc, vf, pic);
8689 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8690 /* hevc_print(hevc, 0,
8691 "aaa: %d/%d, %d/%d\n",
8692 vf->width,vf->height, pic->width, pic->height); */
8693 vf->width = pic->width;
8694 vf->height = pic->height;
8695
8696 if (force_w_h != 0) {
8697 vf->width = (force_w_h >> 16) & 0xffff;
8698 vf->height = force_w_h & 0xffff;
8699 }
8700 if (force_fps & 0x100) {
8701 u32 rate = force_fps & 0xff;
8702
8703 if (rate)
8704 vf->duration = 96000/rate;
8705 else
8706 vf->duration = 0;
8707 }
8708 if (force_fps & 0x200) {
8709 vf->pts = 0;
8710 vf->pts_us64 = 0;
8711 }
8712 /*
8713 * !!! to do ...
8714 * need move below code to get_new_pic(),
8715 * hevc->xxx can only be used by current decoded pic
8716 */
8717 if (pic->conformance_window_flag &&
8718 (get_dbg_flag(hevc) &
8719 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8720 unsigned int SubWidthC, SubHeightC;
8721
8722 switch (pic->chroma_format_idc) {
8723 case 1:
8724 SubWidthC = 2;
8725 SubHeightC = 2;
8726 break;
8727 case 2:
8728 SubWidthC = 2;
8729 SubHeightC = 1;
8730 break;
8731 default:
8732 SubWidthC = 1;
8733 SubHeightC = 1;
8734 break;
8735 }
8736 vf->width -= SubWidthC *
8737 (pic->conf_win_left_offset +
8738 pic->conf_win_right_offset);
8739 vf->height -= SubHeightC *
8740 (pic->conf_win_top_offset +
8741 pic->conf_win_bottom_offset);
8742
8743 vf->compWidth -= SubWidthC *
8744 (pic->conf_win_left_offset +
8745 pic->conf_win_right_offset);
8746 vf->compHeight -= SubHeightC *
8747 (pic->conf_win_top_offset +
8748 pic->conf_win_bottom_offset);
8749
8750 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8751 hevc_print(hevc, 0,
8752 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8753 pic->chroma_format_idc,
8754 pic->conf_win_left_offset,
8755 pic->conf_win_right_offset,
8756 pic->conf_win_top_offset,
8757 pic->conf_win_bottom_offset,
8758 vf->width, vf->height, vf->compWidth, vf->compHeight);
8759 }
8760
8761 vf->width = vf->width /
8762 get_double_write_ratio(hevc, pic->double_write_mode);
8763 vf->height = vf->height /
8764 get_double_write_ratio(hevc, pic->double_write_mode);
8765#ifdef HEVC_PIC_STRUCT_SUPPORT
8766 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8767 struct vframe_s *vf2;
8768
8769 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8770 hevc_print(hevc, 0,
8771 "pic_struct = %d index 0x%x\n",
8772 pic->pic_struct,
8773 pic->index);
8774
8775 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8776 hevc_print(hevc, 0,
8777 "fatal error, no available buffer slot.");
8778 return -1;
8779 }
8780 pic->vf_ref = 2;
8781 vf->duration = vf->duration>>1;
8782 memcpy(vf2, vf, sizeof(struct vframe_s));
8783
8784 if (pic->pic_struct == 3) {
8785 vf->type = VIDTYPE_INTERLACE_TOP
8786 | VIDTYPE_VIU_NV21;
8787 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8788 | VIDTYPE_VIU_NV21;
8789 } else {
8790 vf->type = VIDTYPE_INTERLACE_BOTTOM
8791 | VIDTYPE_VIU_NV21;
8792 vf2->type = VIDTYPE_INTERLACE_TOP
8793 | VIDTYPE_VIU_NV21;
8794 }
8795 hevc->vf_pre_count++;
8796 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8797 kfifo_put(&hevc->display_q,
8798 (const struct vframe_s *)vf);
8799 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8800 hevc->vf_pre_count++;
8801 kfifo_put(&hevc->display_q,
8802 (const struct vframe_s *)vf2);
8803 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8804 } else if (pic->pic_struct == 5
8805 || pic->pic_struct == 6) {
8806 struct vframe_s *vf2, *vf3;
8807
8808 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8809 hevc_print(hevc, 0,
8810 "pic_struct = %d index 0x%x\n",
8811 pic->pic_struct,
8812 pic->index);
8813
8814 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8815 hevc_print(hevc, 0,
8816 "fatal error, no available buffer slot.");
8817 return -1;
8818 }
8819 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8820 hevc_print(hevc, 0,
8821 "fatal error, no available buffer slot.");
8822 return -1;
8823 }
8824 pic->vf_ref = 3;
8825 vf->duration = vf->duration/3;
8826 memcpy(vf2, vf, sizeof(struct vframe_s));
8827 memcpy(vf3, vf, sizeof(struct vframe_s));
8828
8829 if (pic->pic_struct == 5) {
8830 vf->type = VIDTYPE_INTERLACE_TOP
8831 | VIDTYPE_VIU_NV21;
8832 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8833 | VIDTYPE_VIU_NV21;
8834 vf3->type = VIDTYPE_INTERLACE_TOP
8835 | VIDTYPE_VIU_NV21;
8836 } else {
8837 vf->type = VIDTYPE_INTERLACE_BOTTOM
8838 | VIDTYPE_VIU_NV21;
8839 vf2->type = VIDTYPE_INTERLACE_TOP
8840 | VIDTYPE_VIU_NV21;
8841 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8842 | VIDTYPE_VIU_NV21;
8843 }
8844 hevc->vf_pre_count++;
8845 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8846 kfifo_put(&hevc->display_q,
8847 (const struct vframe_s *)vf);
8848 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8849 hevc->vf_pre_count++;
8850 kfifo_put(&hevc->display_q,
8851 (const struct vframe_s *)vf2);
8852 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8853 hevc->vf_pre_count++;
8854 kfifo_put(&hevc->display_q,
8855 (const struct vframe_s *)vf3);
8856 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8857
8858 } else if (pic->pic_struct == 9
8859 || pic->pic_struct == 10) {
8860 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8861 hevc_print(hevc, 0,
8862 "pic_struct = %d index 0x%x\n",
8863 pic->pic_struct,
8864 pic->index);
8865
8866 pic->vf_ref = 1;
8867 /* process previous pending vf*/
8868 process_pending_vframe(hevc,
8869 pic, (pic->pic_struct == 9));
8870
8871 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8872 /* process current vf */
8873 kfifo_put(&hevc->pending_q,
8874 (const struct vframe_s *)vf);
8875 vf->height <<= 1;
8876 if (pic->pic_struct == 9) {
8877 vf->type = VIDTYPE_INTERLACE_TOP
8878 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8879 process_pending_vframe(hevc,
8880 hevc->pre_bot_pic, 0);
8881 } else {
8882 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8883 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8884 vf->index = (pic->index << 8) | 0xff;
8885 process_pending_vframe(hevc,
8886 hevc->pre_top_pic, 1);
8887 }
8888
8889 if (hevc->vf_pre_count == 0)
8890 hevc->vf_pre_count++;
8891
8892 /**/
8893 if (pic->pic_struct == 9)
8894 hevc->pre_top_pic = pic;
8895 else
8896 hevc->pre_bot_pic = pic;
8897
8898 } else if (pic->pic_struct == 11
8899 || pic->pic_struct == 12) {
8900 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8901 hevc_print(hevc, 0,
8902 "pic_struct = %d index 0x%x\n",
8903 pic->pic_struct,
8904 pic->index);
8905 pic->vf_ref = 1;
8906 /* process previous pending vf*/
8907 process_pending_vframe(hevc, pic,
8908 (pic->pic_struct == 11));
8909
8910 /* put current into pending q */
8911 vf->height <<= 1;
8912 if (pic->pic_struct == 11)
8913 vf->type = VIDTYPE_INTERLACE_TOP |
8914 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8915 else {
8916 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8917 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8918 vf->index = (pic->index << 8) | 0xff;
8919 }
8920 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8921 kfifo_put(&hevc->pending_q,
8922 (const struct vframe_s *)vf);
8923 if (hevc->vf_pre_count == 0)
8924 hevc->vf_pre_count++;
8925
8926 /**/
8927 if (pic->pic_struct == 11)
8928 hevc->pre_top_pic = pic;
8929 else
8930 hevc->pre_bot_pic = pic;
8931
8932 } else {
8933 pic->vf_ref = 1;
8934
8935 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8936 hevc_print(hevc, 0,
8937 "pic_struct = %d index 0x%x\n",
8938 pic->pic_struct,
8939 pic->index);
8940
8941 switch (pic->pic_struct) {
8942 case 7:
8943 vf->duration <<= 1;
8944 break;
8945 case 8:
8946 vf->duration = vf->duration * 3;
8947 break;
8948 case 1:
8949 vf->height <<= 1;
8950 vf->type = VIDTYPE_INTERLACE_TOP |
8951 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8952 process_pending_vframe(hevc, pic, 1);
8953 hevc->pre_top_pic = pic;
8954 break;
8955 case 2:
8956 vf->height <<= 1;
8957 vf->type = VIDTYPE_INTERLACE_BOTTOM
8958 | VIDTYPE_VIU_NV21
8959 | VIDTYPE_VIU_FIELD;
8960 process_pending_vframe(hevc, pic, 0);
8961 hevc->pre_bot_pic = pic;
8962 break;
8963 }
8964 hevc->vf_pre_count++;
8965 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8966 kfifo_put(&hevc->display_q,
8967 (const struct vframe_s *)vf);
8968 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8969 }
8970#else
8971 vf->type_original = vf->type;
8972 pic->vf_ref = 1;
8973 hevc->vf_pre_count++;
8974 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8975 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8976 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8977
8978 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8979 hevc_print(hevc, 0,
8980 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8981 __func__, vf->type, vf->index,
8982 get_pic_poc(hevc, vf->index & 0xff),
8983 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8984 vf->pts, vf->pts_us64,
8985 vf->duration);
8986#endif
8987#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8988 /*count info*/
8989 vdec_count_info(gvs, 0, stream_offset);
8990#endif
8991 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8992 if (without_display_mode == 0) {
8993 vf_notify_receiver(hevc->provider_name,
8994 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8995 }
8996 else
8997 vh265_vf_put(vh265_vf_get(vdec), vdec);
8998 }
8999
9000 return 0;
9001}
9002
9003static int notify_v4l_eos(struct vdec_s *vdec)
9004{
9005 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9006 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9007 struct vframe_s *vf = &hw->vframe_dummy;
9008 struct vdec_v4l2_buffer *fb = NULL;
9009 int index = INVALID_IDX;
9010 ulong expires;
9011
9012 if (hw->is_used_v4l && hw->eos) {
9013 expires = jiffies + msecs_to_jiffies(2000);
9014 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9015 if (time_after(jiffies, expires))
9016 break;
9017 }
9018
9019 if (index == INVALID_IDX) {
9020 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9021 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9022 return -1;
9023 }
9024 }
9025
9026 vf->type |= VIDTYPE_V4L_EOS;
9027 vf->timestamp = ULONG_MAX;
9028 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9029 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9030 hw->m_BUF[index].v4l_ref_buf_addr;
9031 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9032 vf_notify_receiver(vdec->vf_provider_name,
9033 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9034
9035 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9036 }
9037
9038 return 0;
9039}
9040
9041static void process_nal_sei(struct hevc_state_s *hevc,
9042 int payload_type, int payload_size)
9043{
9044 unsigned short data;
9045
9046 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9047 hevc_print(hevc, 0,
9048 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9049 payload_type, payload_size);
9050
9051 if (payload_type == 137) {
9052 int i, j;
9053 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9054 if (payload_size >= 24) {
9055 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9056 hevc_print(hevc, 0,
9057 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9058 for (i = 0; i < 3; i++) {
9059 for (j = 0; j < 2; j++) {
9060 data =
9061 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9062 hevc->primaries[i][j] = data;
9063 WRITE_HREG(HEVC_SHIFT_COMMAND,
9064 (1<<7)|16);
9065 if (get_dbg_flag(hevc) &
9066 H265_DEBUG_PRINT_SEI)
9067 hevc_print(hevc, 0,
9068 "\t\tprimaries[%1d][%1d] = %04x\n",
9069 i, j, hevc->primaries[i][j]);
9070 }
9071 }
9072 for (i = 0; i < 2; i++) {
9073 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9074 hevc->white_point[i] = data;
9075 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9076 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9077 hevc_print(hevc, 0,
9078 "\t\twhite_point[%1d] = %04x\n",
9079 i, hevc->white_point[i]);
9080 }
9081 for (i = 0; i < 2; i++) {
9082 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9083 hevc->luminance[i] = data << 16;
9084 WRITE_HREG(HEVC_SHIFT_COMMAND,
9085 (1<<7)|16);
9086 data =
9087 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9088 hevc->luminance[i] |= data;
9089 WRITE_HREG(HEVC_SHIFT_COMMAND,
9090 (1<<7)|16);
9091 if (get_dbg_flag(hevc) &
9092 H265_DEBUG_PRINT_SEI)
9093 hevc_print(hevc, 0,
9094 "\t\tluminance[%1d] = %08x\n",
9095 i, hevc->luminance[i]);
9096 }
9097 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9098 }
9099 payload_size -= 24;
9100 while (payload_size > 0) {
9101 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9102 payload_size--;
9103 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9104 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9105 }
9106 }
9107}
9108
9109static int hevc_recover(struct hevc_state_s *hevc)
9110{
9111 int ret = -1;
9112 u32 rem;
9113 u64 shift_byte_count64;
9114 unsigned int hevc_shift_byte_count;
9115 unsigned int hevc_stream_start_addr;
9116 unsigned int hevc_stream_end_addr;
9117 unsigned int hevc_stream_rd_ptr;
9118 unsigned int hevc_stream_wr_ptr;
9119 unsigned int hevc_stream_control;
9120 unsigned int hevc_stream_fifo_ctl;
9121 unsigned int hevc_stream_buf_size;
9122
9123 mutex_lock(&vh265_mutex);
9124#if 0
9125 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9126 int ii;
9127
9128 for (ii = 0; ii < 4; ii++)
9129 hevc_print(hevc, 0,
9130 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9131 if (((i + ii) & 0xf) == 0)
9132 hevc_print(hevc, 0, "\n");
9133 }
9134#endif
9135#define ES_VID_MAN_RD_PTR (1<<0)
9136 if (!hevc->init_flag) {
9137 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9138 mutex_unlock(&vh265_mutex);
9139 return ret;
9140 }
9141 amhevc_stop();
9142 msleep(20);
9143 ret = 0;
9144 /* reset */
9145 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9146 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9147
9148 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9149 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9150 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9151 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9152 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9153 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9154 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9155
9156 /* HEVC streaming buffer will reset and restart
9157 * from current hevc_stream_rd_ptr position
9158 */
9159 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9160 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9161 if ((hevc->shift_byte_count_lo & (1 << 31))
9162 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9163 hevc->shift_byte_count_hi++;
9164
9165 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9166 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9167 hevc->shift_byte_count_lo;
9168 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9169 shift_byte_count64 -= rem;
9170 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9171
9172 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9173 shift_byte_count64 += hevc_stream_buf_size;
9174
9175 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9176 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9177
9178 WRITE_VREG(DOS_SW_RESET3,
9179 /* (1<<2)| */
9180 (1 << 3) | (1 << 4) | (1 << 8) |
9181 (1 << 11) | (1 << 12) | (1 << 14)
9182 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9183 WRITE_VREG(DOS_SW_RESET3, 0);
9184
9185 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9186 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9187 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9188 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9189 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9190 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9191 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9192
9193 hevc_config_work_space_hw(hevc);
9194 decoder_hw_reset();
9195
9196 hevc->have_vps = 0;
9197 hevc->have_sps = 0;
9198 hevc->have_pps = 0;
9199
9200 hevc->have_valid_start_slice = 0;
9201
9202 if (get_double_write_mode(hevc) & 0x10)
9203 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9204 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9205 );
9206
9207 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9208 /* clear mailbox interrupt */
9209 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9210 /* enable mailbox interrupt */
9211 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9212 /* disable PSCALE for hardware sharing */
9213 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9214
9215 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9216
9217 WRITE_VREG(DEBUG_REG1, 0x0);
9218
9219 if ((error_handle_policy & 1) == 0) {
9220 if ((error_handle_policy & 4) == 0) {
9221 /* ucode auto mode, and do not check vps/sps/pps/idr */
9222 WRITE_VREG(NAL_SEARCH_CTL,
9223 0xc);
9224 } else {
9225 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9226 }
9227 } else {
9228 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9229 }
9230
9231 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9232 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9233 WRITE_VREG(NAL_SEARCH_CTL,
9234 READ_VREG(NAL_SEARCH_CTL)
9235 | ((parser_sei_enable & 0x7) << 17));
9236#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9237 WRITE_VREG(NAL_SEARCH_CTL,
9238 READ_VREG(NAL_SEARCH_CTL) |
9239 ((parser_dolby_vision_enable & 0x1) << 20));
9240#endif
9241 config_decode_mode(hevc);
9242 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9243
9244 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9245 /* amhevc_disable(); */
9246 /* return -EBUSY; */
9247 /* } */
9248#if 0
9249 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9250 int ii;
9251
9252 for (ii = 0; ii < 4; ii++) {
9253 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9254 hevc_print(hevc, 0,
9255 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9256 }
9257 if (((i + ii) & 0xf) == 0)
9258 hevc_print(hevc, 0, "\n");
9259 }
9260#endif
9261 init_pic_list_hw(hevc);
9262
9263 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9264 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9265
9266#ifdef SWAP_HEVC_UCODE
9267 if (!tee_enabled() && hevc->is_swap &&
9268 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9269 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9270 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9271 }
9272#endif
9273 amhevc_start();
9274
9275 /* skip, search next start code */
9276 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9277 hevc->skip_flag = 1;
9278#ifdef ERROR_HANDLE_DEBUG
9279 if (dbg_nal_skip_count & 0x20000) {
9280 dbg_nal_skip_count &= ~0x20000;
9281 mutex_unlock(&vh265_mutex);
9282 return ret;
9283 }
9284#endif
9285 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9286 /* Interrupt Amrisc to excute */
9287 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9288#ifdef MULTI_INSTANCE_SUPPORT
9289 if (!hevc->m_ins_flag)
9290#endif
9291 hevc->first_pic_after_recover = 1;
9292 mutex_unlock(&vh265_mutex);
9293 return ret;
9294}
9295
9296static void dump_aux_buf(struct hevc_state_s *hevc)
9297{
9298 int i;
9299 unsigned short *aux_adr =
9300 (unsigned short *)
9301 hevc->aux_addr;
9302 unsigned int aux_size =
9303 (READ_VREG(HEVC_AUX_DATA_SIZE)
9304 >> 16) << 4;
9305
9306 if (hevc->prefix_aux_size > 0) {
9307 hevc_print(hevc, 0,
9308 "prefix aux: (size %d)\n",
9309 aux_size);
9310 for (i = 0; i <
9311 (aux_size >> 1); i++) {
9312 hevc_print_cont(hevc, 0,
9313 "%04x ",
9314 *(aux_adr + i));
9315 if (((i + 1) & 0xf)
9316 == 0)
9317 hevc_print_cont(hevc,
9318 0, "\n");
9319 }
9320 }
9321 if (hevc->suffix_aux_size > 0) {
9322 aux_adr = (unsigned short *)
9323 (hevc->aux_addr +
9324 hevc->prefix_aux_size);
9325 aux_size =
9326 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9327 << 4;
9328 hevc_print(hevc, 0,
9329 "suffix aux: (size %d)\n",
9330 aux_size);
9331 for (i = 0; i <
9332 (aux_size >> 1); i++) {
9333 hevc_print_cont(hevc, 0,
9334 "%04x ", *(aux_adr + i));
9335 if (((i + 1) & 0xf) == 0)
9336 hevc_print_cont(hevc, 0, "\n");
9337 }
9338 }
9339}
9340
9341#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9342static void dolby_get_meta(struct hevc_state_s *hevc)
9343{
9344 struct vdec_s *vdec = hw_to_vdec(hevc);
9345
9346 if (get_dbg_flag(hevc) &
9347 H265_DEBUG_BUFMGR_MORE)
9348 dump_aux_buf(hevc);
9349 if (vdec->dolby_meta_with_el || vdec->slave) {
9350 set_aux_data(hevc,
9351 hevc->cur_pic, 0, 0);
9352 } else if (vdec->master) {
9353 struct hevc_state_s *hevc_ba =
9354 (struct hevc_state_s *)
9355 vdec->master->private;
9356 /*do not use hevc_ba*/
9357 set_aux_data(hevc,
9358 hevc_ba->cur_pic,
9359 0, 1);
9360 set_aux_data(hevc,
9361 hevc->cur_pic, 0, 2);
9362 }
9363}
9364#endif
9365
9366static void read_decode_info(struct hevc_state_s *hevc)
9367{
9368 uint32_t decode_info =
9369 READ_HREG(HEVC_DECODE_INFO);
9370 hevc->start_decoding_flag |=
9371 (decode_info & 0xff);
9372 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9373}
9374
9375static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9376{
9377 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9378 unsigned int dec_status = hevc->dec_status;
9379 int i, ret;
9380
9381#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9382 struct vdec_s *vdec = hw_to_vdec(hevc);
9383#endif
9384
9385 if (hevc->eos)
9386 return IRQ_HANDLED;
9387 if (
9388#ifdef MULTI_INSTANCE_SUPPORT
9389 (!hevc->m_ins_flag) &&
9390#endif
9391 hevc->error_flag == 1) {
9392 if ((error_handle_policy & 0x10) == 0) {
9393 if (hevc->cur_pic) {
9394 int current_lcu_idx =
9395 READ_VREG(HEVC_PARSER_LCU_START)
9396 & 0xffffff;
9397 if (current_lcu_idx <
9398 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9399 hevc->cur_pic->error_mark = 1;
9400
9401 }
9402 }
9403 if ((error_handle_policy & 1) == 0) {
9404 hevc->error_skip_nal_count = 1;
9405 /* manual search nal, skip error_skip_nal_count
9406 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9407 */
9408 WRITE_VREG(NAL_SEARCH_CTL,
9409 (error_skip_nal_count << 4) | 0x1);
9410 } else {
9411 hevc->error_skip_nal_count = error_skip_nal_count;
9412 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9413 }
9414 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9415#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9416 || vdec->master
9417 || vdec->slave
9418#endif
9419 ) {
9420 WRITE_VREG(NAL_SEARCH_CTL,
9421 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9422 }
9423 WRITE_VREG(NAL_SEARCH_CTL,
9424 READ_VREG(NAL_SEARCH_CTL)
9425 | ((parser_sei_enable & 0x7) << 17));
9426#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9427 WRITE_VREG(NAL_SEARCH_CTL,
9428 READ_VREG(NAL_SEARCH_CTL) |
9429 ((parser_dolby_vision_enable & 0x1) << 20));
9430#endif
9431 config_decode_mode(hevc);
9432 /* search new nal */
9433 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9434 /* Interrupt Amrisc to excute */
9435 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9436
9437 /* hevc_print(hevc, 0,
9438 *"%s: error handle\n", __func__);
9439 */
9440 hevc->error_flag = 2;
9441 return IRQ_HANDLED;
9442 } else if (
9443#ifdef MULTI_INSTANCE_SUPPORT
9444 (!hevc->m_ins_flag) &&
9445#endif
9446 hevc->error_flag == 3) {
9447 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9448 hevc_recover(hevc);
9449 hevc->error_flag = 0;
9450
9451 if ((error_handle_policy & 0x10) == 0) {
9452 if (hevc->cur_pic) {
9453 int current_lcu_idx =
9454 READ_VREG(HEVC_PARSER_LCU_START)
9455 & 0xffffff;
9456 if (current_lcu_idx <
9457 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9458 hevc->cur_pic->error_mark = 1;
9459
9460 }
9461 }
9462 if ((error_handle_policy & 1) == 0) {
9463 /* need skip some data when
9464 * error_flag of 3 is triggered,
9465 */
9466 /* to avoid hevc_recover() being called
9467 * for many times at the same bitstream position
9468 */
9469 hevc->error_skip_nal_count = 1;
9470 /* manual search nal, skip error_skip_nal_count
9471 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9472 */
9473 WRITE_VREG(NAL_SEARCH_CTL,
9474 (error_skip_nal_count << 4) | 0x1);
9475 }
9476
9477 if ((error_handle_policy & 0x2) == 0) {
9478 hevc->have_vps = 1;
9479 hevc->have_sps = 1;
9480 hevc->have_pps = 1;
9481 }
9482 return IRQ_HANDLED;
9483 }
9484 if (!hevc->m_ins_flag) {
9485 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9486 if ((hevc->shift_byte_count_lo & (1 << 31))
9487 && ((i & (1 << 31)) == 0))
9488 hevc->shift_byte_count_hi++;
9489 hevc->shift_byte_count_lo = i;
9490 }
9491#ifdef MULTI_INSTANCE_SUPPORT
9492 mutex_lock(&hevc->chunks_mutex);
9493 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9494 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9495 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9496 && (hevc->chunk)) {
9497 hevc->cur_pic->pts = hevc->chunk->pts;
9498 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9499 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9500 }
9501 mutex_unlock(&hevc->chunks_mutex);
9502
9503 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9504 dec_status == HEVC_DECODE_BUFEMPTY2) {
9505 if (hevc->m_ins_flag) {
9506 read_decode_info(hevc);
9507 if (vdec_frame_based(hw_to_vdec(hevc))) {
9508 hevc->empty_flag = 1;
9509 goto pic_done;
9510 } else {
9511 if (
9512#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9513 vdec->master ||
9514 vdec->slave ||
9515#endif
9516 (data_resend_policy & 0x1)) {
9517 hevc->dec_result = DEC_RESULT_AGAIN;
9518 amhevc_stop();
9519 restore_decode_state(hevc);
9520 } else
9521 hevc->dec_result = DEC_RESULT_GET_DATA;
9522 }
9523 reset_process_time(hevc);
9524 vdec_schedule_work(&hevc->work);
9525 }
9526 return IRQ_HANDLED;
9527 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9528 (dec_status == HEVC_NAL_DECODE_DONE)
9529 ) {
9530 if (hevc->m_ins_flag) {
9531 read_decode_info(hevc);
9532 if (vdec_frame_based(hw_to_vdec(hevc))) {
9533 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9534 hevc->empty_flag = 1;
9535 goto pic_done;
9536 } else {
9537 hevc->dec_result = DEC_RESULT_AGAIN;
9538 amhevc_stop();
9539 restore_decode_state(hevc);
9540 }
9541
9542 reset_process_time(hevc);
9543 vdec_schedule_work(&hevc->work);
9544 }
9545
9546 return IRQ_HANDLED;
9547 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9548 if (hevc->m_ins_flag) {
9549 struct PIC_s *pic;
9550 struct PIC_s *pic_display;
9551 int decoded_poc;
9552#ifdef DETREFILL_ENABLE
9553 if (hevc->is_swap &&
9554 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9555 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9556 && READ_VREG(HEVC_SAO_DBG_MODE0))
9557 hevc->delrefill_check = 2;
9558 }
9559#endif
9560 hevc->empty_flag = 0;
9561pic_done:
9562 if (input_frame_based(hw_to_vdec(hevc)) &&
9563 frmbase_cont_bitlevel != 0 &&
9564 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9565 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9566 > frmbase_cont_bitlevel)) {
9567 /*handle the case: multi pictures in one packet*/
9568 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9569 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9570 __func__,
9571 hevc->decode_idx, hevc->decode_size,
9572 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9573 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9574 start_process_time(hevc);
9575 return IRQ_HANDLED;
9576 }
9577
9578 read_decode_info(hevc);
9579 get_picture_qos_info(hevc);
9580#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9581 hevc->start_parser_type = 0;
9582 hevc->switch_dvlayer_flag = 0;
9583#endif
9584 hevc->decoded_poc = hevc->curr_POC;
9585 hevc->decoding_pic = NULL;
9586 hevc->dec_result = DEC_RESULT_DONE;
9587#ifdef DETREFILL_ENABLE
9588 if (hevc->is_swap &&
9589 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9590 if (hevc->delrefill_check != 2)
9591#endif
9592
9593 amhevc_stop();
9594
9595 reset_process_time(hevc);
9596
9597 if (hevc->vf_pre_count == 0) {
9598 decoded_poc = hevc->curr_POC;
9599 pic = get_pic_by_POC(hevc, decoded_poc);
9600 if (pic && (pic->POC != INVALID_POC)) {
9601 /*PB skip control */
9602 if (pic->error_mark == 0
9603 && hevc->PB_skip_mode == 1) {
9604 /* start decoding after
9605 * first I
9606 */
9607 hevc->ignore_bufmgr_error |= 0x1;
9608 }
9609 if (hevc->ignore_bufmgr_error & 1) {
9610 if (hevc->PB_skip_count_after_decoding > 0) {
9611 hevc->PB_skip_count_after_decoding--;
9612 } else {
9613 /* start displaying */
9614 hevc->ignore_bufmgr_error |= 0x2;
9615 }
9616 }
9617 if (hevc->mmu_enable
9618 && ((hevc->double_write_mode & 0x10) == 0)) {
9619 if (!hevc->m_ins_flag) {
9620 hevc->used_4k_num =
9621 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9622
9623 if ((!is_skip_decoding(hevc, pic)) &&
9624 (hevc->used_4k_num >= 0) &&
9625 (hevc->cur_pic->scatter_alloc
9626 == 1)) {
9627 hevc_print(hevc,
9628 H265_DEBUG_BUFMGR_MORE,
9629 "%s pic index %d scatter_alloc %d page_start %d\n",
9630 "decoder_mmu_box_free_idx_tail",
9631 hevc->cur_pic->index,
9632 hevc->cur_pic->scatter_alloc,
9633 hevc->used_4k_num);
9634 decoder_mmu_box_free_idx_tail(
9635 hevc->mmu_box,
9636 hevc->cur_pic->index,
9637 hevc->used_4k_num);
9638 hevc->cur_pic->scatter_alloc
9639 = 2;
9640 }
9641 hevc->used_4k_num = -1;
9642 }
9643 }
9644
9645 pic->output_mark = 1;
9646 pic->recon_mark = 1;
9647 }
9648 check_pic_decoded_error(hevc,
9649 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9650 if (hevc->cur_pic != NULL &&
9651 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9652 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9653 hevc->cur_pic->error_mark = 1;
9654force_output:
9655 pic_display = output_pic(hevc, 1);
9656 if (pic_display) {
9657 if ((pic_display->error_mark &&
9658 ((hevc->ignore_bufmgr_error &
9659 0x2) == 0))
9660 || (get_dbg_flag(hevc) &
9661 H265_DEBUG_DISPLAY_CUR_FRAME)
9662 || (get_dbg_flag(hevc) &
9663 H265_DEBUG_NO_DISPLAY)) {
9664 pic_display->output_ready = 0;
9665 if (get_dbg_flag(hevc) &
9666 H265_DEBUG_BUFMGR) {
9667 hevc_print(hevc, 0,
9668 "[BM] Display: POC %d, ",
9669 pic_display->POC);
9670 hevc_print_cont(hevc, 0,
9671 "decoding index %d ==> ",
9672 pic_display->
9673 decode_idx);
9674 hevc_print_cont(hevc, 0,
9675 "Debug or err,recycle it\n");
9676 }
9677 } else {
9678 if (pic_display->
9679 slice_type != 2) {
9680 pic_display->output_ready = 0;
9681 } else {
9682 prepare_display_buf
9683 (hevc,
9684 pic_display);
9685 hevc->first_pic_flag = 1;
9686 }
9687 }
9688 }
9689 }
9690
9691 vdec_schedule_work(&hevc->work);
9692 }
9693
9694 return IRQ_HANDLED;
9695#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9696 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9697 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9698 if (hevc->m_ins_flag) {
9699 unsigned char next_parser_type =
9700 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9701 read_decode_info(hevc);
9702
9703 if (vdec->slave &&
9704 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9705 /*cur is base, found enhance*/
9706 struct hevc_state_s *hevc_el =
9707 (struct hevc_state_s *)
9708 vdec->slave->private;
9709 hevc->switch_dvlayer_flag = 1;
9710 hevc->no_switch_dvlayer_count = 0;
9711 hevc_el->start_parser_type =
9712 next_parser_type;
9713 hevc_print(hevc, H265_DEBUG_DV,
9714 "switch (poc %d) to el\n",
9715 hevc->cur_pic ?
9716 hevc->cur_pic->POC :
9717 INVALID_POC);
9718 } else if (vdec->master &&
9719 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9720 /*cur is enhance, found base*/
9721 struct hevc_state_s *hevc_ba =
9722 (struct hevc_state_s *)
9723 vdec->master->private;
9724 hevc->switch_dvlayer_flag = 1;
9725 hevc->no_switch_dvlayer_count = 0;
9726 hevc_ba->start_parser_type =
9727 next_parser_type;
9728 hevc_print(hevc, H265_DEBUG_DV,
9729 "switch (poc %d) to bl\n",
9730 hevc->cur_pic ?
9731 hevc->cur_pic->POC :
9732 INVALID_POC);
9733 } else {
9734 hevc->switch_dvlayer_flag = 0;
9735 hevc->start_parser_type =
9736 next_parser_type;
9737 hevc->no_switch_dvlayer_count++;
9738 hevc_print(hevc, H265_DEBUG_DV,
9739 "%s: no_switch_dvlayer_count = %d\n",
9740 vdec->master ? "el" : "bl",
9741 hevc->no_switch_dvlayer_count);
9742 if (vdec->slave &&
9743 dolby_el_flush_th != 0 &&
9744 hevc->no_switch_dvlayer_count >
9745 dolby_el_flush_th) {
9746 struct hevc_state_s *hevc_el =
9747 (struct hevc_state_s *)
9748 vdec->slave->private;
9749 struct PIC_s *el_pic;
9750 check_pic_decoded_error(hevc_el,
9751 hevc_el->pic_decoded_lcu_idx);
9752 el_pic = get_pic_by_POC(hevc_el,
9753 hevc_el->curr_POC);
9754 hevc_el->curr_POC = INVALID_POC;
9755 hevc_el->m_pocRandomAccess = MAX_INT;
9756 flush_output(hevc_el, el_pic);
9757 hevc_el->decoded_poc = INVALID_POC; /*
9758 already call flush_output*/
9759 hevc_el->decoding_pic = NULL;
9760 hevc->no_switch_dvlayer_count = 0;
9761 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9762 hevc_print(hevc, 0,
9763 "no el anymore, flush_output el\n");
9764 }
9765 }
9766 hevc->decoded_poc = hevc->curr_POC;
9767 hevc->decoding_pic = NULL;
9768 hevc->dec_result = DEC_RESULT_DONE;
9769 amhevc_stop();
9770 reset_process_time(hevc);
9771 if (aux_data_is_avaible(hevc))
9772 dolby_get_meta(hevc);
9773 if(hevc->cur_pic->slice_type == 2 &&
9774 hevc->vf_pre_count == 0) {
9775 hevc_print(hevc, 0,
9776 "first slice_type %x no_switch_dvlayer_count %x\n",
9777 hevc->cur_pic->slice_type,
9778 hevc->no_switch_dvlayer_count);
9779 goto force_output;
9780 }
9781 vdec_schedule_work(&hevc->work);
9782 }
9783
9784 return IRQ_HANDLED;
9785#endif
9786 }
9787
9788#endif
9789
9790 if (dec_status == HEVC_SEI_DAT) {
9791 if (!hevc->m_ins_flag) {
9792 int payload_type =
9793 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9794 int payload_size =
9795 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9796 process_nal_sei(hevc,
9797 payload_type, payload_size);
9798 }
9799 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9800 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9801 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9802 int parse_type = HEVC_DISCARD_NAL;
9803
9804 hevc->error_watchdog_count = 0;
9805 hevc->error_skip_nal_wt_cnt = 0;
9806#ifdef MULTI_INSTANCE_SUPPORT
9807 if (hevc->m_ins_flag)
9808 reset_process_time(hevc);
9809#endif
9810 if (slice_parse_begin > 0 &&
9811 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9812 hevc_print(hevc, 0,
9813 "nal type %d, discard %d\n", naltype,
9814 slice_parse_begin);
9815 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9816 slice_parse_begin--;
9817 }
9818 if (naltype == NAL_UNIT_EOS) {
9819 struct PIC_s *pic;
9820
9821 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9822#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9823 if ((vdec->master || vdec->slave) &&
9824 aux_data_is_avaible(hevc)) {
9825 if (hevc->decoding_pic)
9826 dolby_get_meta(hevc);
9827 }
9828#endif
9829 check_pic_decoded_error(hevc,
9830 hevc->pic_decoded_lcu_idx);
9831 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9832 hevc->curr_POC = INVALID_POC;
9833 /* add to fix RAP_B_Bossen_1 */
9834 hevc->m_pocRandomAccess = MAX_INT;
9835 flush_output(hevc, pic);
9836 clear_poc_flag(hevc);
9837 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9838 /* Interrupt Amrisc to excute */
9839 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9840#ifdef MULTI_INSTANCE_SUPPORT
9841 if (hevc->m_ins_flag) {
9842 hevc->decoded_poc = INVALID_POC; /*
9843 already call flush_output*/
9844 hevc->decoding_pic = NULL;
9845 hevc->dec_result = DEC_RESULT_DONE;
9846 amhevc_stop();
9847
9848 vdec_schedule_work(&hevc->work);
9849 }
9850#endif
9851 return IRQ_HANDLED;
9852 }
9853
9854 if (
9855#ifdef MULTI_INSTANCE_SUPPORT
9856 (!hevc->m_ins_flag) &&
9857#endif
9858 hevc->error_skip_nal_count > 0) {
9859 hevc_print(hevc, 0,
9860 "nal type %d, discard %d\n", naltype,
9861 hevc->error_skip_nal_count);
9862 hevc->error_skip_nal_count--;
9863 if (hevc->error_skip_nal_count == 0) {
9864 hevc_recover(hevc);
9865 hevc->error_flag = 0;
9866 if ((error_handle_policy & 0x2) == 0) {
9867 hevc->have_vps = 1;
9868 hevc->have_sps = 1;
9869 hevc->have_pps = 1;
9870 }
9871 return IRQ_HANDLED;
9872 }
9873 } else if (naltype == NAL_UNIT_VPS) {
9874 parse_type = HEVC_NAL_UNIT_VPS;
9875 hevc->have_vps = 1;
9876#ifdef ERROR_HANDLE_DEBUG
9877 if (dbg_nal_skip_flag & 1)
9878 parse_type = HEVC_DISCARD_NAL;
9879#endif
9880 } else if (hevc->have_vps) {
9881 if (naltype == NAL_UNIT_SPS) {
9882 parse_type = HEVC_NAL_UNIT_SPS;
9883 hevc->have_sps = 1;
9884#ifdef ERROR_HANDLE_DEBUG
9885 if (dbg_nal_skip_flag & 2)
9886 parse_type = HEVC_DISCARD_NAL;
9887#endif
9888 } else if (naltype == NAL_UNIT_PPS) {
9889 parse_type = HEVC_NAL_UNIT_PPS;
9890 hevc->have_pps = 1;
9891#ifdef ERROR_HANDLE_DEBUG
9892 if (dbg_nal_skip_flag & 4)
9893 parse_type = HEVC_DISCARD_NAL;
9894#endif
9895 } else if (hevc->have_sps && hevc->have_pps) {
9896 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9897
9898 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9899 (naltype ==
9900 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9901 || (naltype ==
9902 NAL_UNIT_CODED_SLICE_CRA)
9903 || (naltype ==
9904 NAL_UNIT_CODED_SLICE_BLA)
9905 || (naltype ==
9906 NAL_UNIT_CODED_SLICE_BLANT)
9907 || (naltype ==
9908 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9909 ) {
9910 if (slice_parse_begin > 0) {
9911 hevc_print(hevc, 0,
9912 "discard %d, for debugging\n",
9913 slice_parse_begin);
9914 slice_parse_begin--;
9915 } else {
9916 parse_type = seg;
9917 }
9918 hevc->have_valid_start_slice = 1;
9919 } else if (naltype <=
9920 NAL_UNIT_CODED_SLICE_CRA
9921 && (hevc->have_valid_start_slice
9922 || (hevc->PB_skip_mode != 3))) {
9923 if (slice_parse_begin > 0) {
9924 hevc_print(hevc, 0,
9925 "discard %d, dd\n",
9926 slice_parse_begin);
9927 slice_parse_begin--;
9928 } else
9929 parse_type = seg;
9930
9931 }
9932 }
9933 }
9934 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9935 && hevc->have_valid_start_slice &&
9936 hevc->error_flag == 0) {
9937 if ((get_dbg_flag(hevc) &
9938 H265_DEBUG_MAN_SEARCH_NAL) == 0
9939 /* && (!hevc->m_ins_flag)*/) {
9940 /* auot parser NAL; do not check
9941 *vps/sps/pps/idr
9942 */
9943 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9944 }
9945
9946 if ((get_dbg_flag(hevc) &
9947 H265_DEBUG_NO_EOS_SEARCH_DONE)
9948#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9949 || vdec->master
9950 || vdec->slave
9951#endif
9952 ) {
9953 WRITE_VREG(NAL_SEARCH_CTL,
9954 READ_VREG(NAL_SEARCH_CTL) |
9955 0x10000);
9956 }
9957 WRITE_VREG(NAL_SEARCH_CTL,
9958 READ_VREG(NAL_SEARCH_CTL)
9959 | ((parser_sei_enable & 0x7) << 17));
9960#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9961 WRITE_VREG(NAL_SEARCH_CTL,
9962 READ_VREG(NAL_SEARCH_CTL) |
9963 ((parser_dolby_vision_enable & 0x1) << 20));
9964#endif
9965 config_decode_mode(hevc);
9966 }
9967
9968 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9969 hevc_print(hevc, 0,
9970 "naltype = %d parse_type %d\n %d %d %d %d\n",
9971 naltype, parse_type, hevc->have_vps,
9972 hevc->have_sps, hevc->have_pps,
9973 hevc->have_valid_start_slice);
9974 }
9975
9976 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9977 /* Interrupt Amrisc to excute */
9978 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9979#ifdef MULTI_INSTANCE_SUPPORT
9980 if (hevc->m_ins_flag)
9981 start_process_time(hevc);
9982#endif
9983 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9984#ifdef MULTI_INSTANCE_SUPPORT
9985 if (hevc->m_ins_flag) {
9986 reset_process_time(hevc);
9987 read_decode_info(hevc);
9988
9989 }
9990#endif
9991 if (hevc->start_decoding_time > 0) {
9992 u32 process_time = 1000*
9993 (jiffies - hevc->start_decoding_time)/HZ;
9994 if (process_time > max_decoding_time)
9995 max_decoding_time = process_time;
9996 }
9997
9998 hevc->error_watchdog_count = 0;
9999 if (hevc->pic_list_init_flag == 2) {
10000 hevc->pic_list_init_flag = 3;
10001 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10002 } else if (hevc->wait_buf == 0) {
10003 u32 vui_time_scale;
10004 u32 vui_num_units_in_tick;
10005 unsigned char reconfig_flag = 0;
10006
10007 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10008 get_rpm_param(&hevc->param);
10009 else {
10010
10011 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10012 int ii;
10013
10014 for (ii = 0; ii < 4; ii++) {
10015 hevc->param.l.data[i + ii] =
10016 hevc->rpm_ptr[i + 3
10017 - ii];
10018 }
10019 }
10020#ifdef SEND_LMEM_WITH_RPM
10021 check_head_error(hevc);
10022#endif
10023 }
10024 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10025 hevc_print(hevc, 0,
10026 "rpm_param: (%d)\n", hevc->slice_idx);
10027 hevc->slice_idx++;
10028 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10029 hevc_print_cont(hevc, 0,
10030 "%04x ", hevc->param.l.data[i]);
10031 if (((i + 1) & 0xf) == 0)
10032 hevc_print_cont(hevc, 0, "\n");
10033 }
10034
10035 hevc_print(hevc, 0,
10036 "vui_timing_info: %x, %x, %x, %x\n",
10037 hevc->param.p.vui_num_units_in_tick_hi,
10038 hevc->param.p.vui_num_units_in_tick_lo,
10039 hevc->param.p.vui_time_scale_hi,
10040 hevc->param.p.vui_time_scale_lo);
10041 }
10042
10043 if (hevc->is_used_v4l) {
10044 struct aml_vcodec_ctx *ctx =
10045 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10046
10047 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10048 struct aml_vdec_ps_infos ps;
10049
10050 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10051 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10052 ps.visible_width = hevc->frame_width;
10053 ps.visible_height = hevc->frame_height;
10054 ps.coded_width = ALIGN(hevc->frame_width, 32);
10055 ps.coded_height = ALIGN(hevc->frame_height, 32);
10056 ps.dpb_size = get_work_pic_num(hevc);
10057 hevc->v4l_params_parsed = true;
10058 /*notice the v4l2 codec.*/
10059 vdec_v4l_set_ps_infos(ctx, &ps);
10060 }
10061 }
10062
10063 if (
10064#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10065 vdec->master == NULL &&
10066 vdec->slave == NULL &&
10067#endif
10068 aux_data_is_avaible(hevc)
10069 ) {
10070
10071 if (get_dbg_flag(hevc) &
10072 H265_DEBUG_BUFMGR_MORE)
10073 dump_aux_buf(hevc);
10074 }
10075
10076 vui_time_scale =
10077 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10078 hevc->param.p.vui_time_scale_lo;
10079 vui_num_units_in_tick =
10080 (u32)(hevc->param.
10081 p.vui_num_units_in_tick_hi << 16) |
10082 hevc->param.
10083 p.vui_num_units_in_tick_lo;
10084 if (hevc->bit_depth_luma !=
10085 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10086 reconfig_flag = 1;
10087 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10088 (hevc->param.p.bit_depth & 0xf) + 8);
10089 }
10090 if (hevc->bit_depth_chroma !=
10091 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10092 reconfig_flag = 1;
10093 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10094 ((hevc->param.p.bit_depth >> 4) &
10095 0xf) + 8);
10096 }
10097 hevc->bit_depth_luma =
10098 (hevc->param.p.bit_depth & 0xf) + 8;
10099 hevc->bit_depth_chroma =
10100 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10101 bit_depth_luma = hevc->bit_depth_luma;
10102 bit_depth_chroma = hevc->bit_depth_chroma;
10103#ifdef SUPPORT_10BIT
10104 if (hevc->bit_depth_luma == 8 &&
10105 hevc->bit_depth_chroma == 8 &&
10106 enable_mem_saving)
10107 hevc->mem_saving_mode = 1;
10108 else
10109 hevc->mem_saving_mode = 0;
10110#endif
10111 if (reconfig_flag &&
10112 (get_double_write_mode(hevc) & 0x10) == 0)
10113 init_decode_head_hw(hevc);
10114
10115 if ((vui_time_scale != 0)
10116 && (vui_num_units_in_tick != 0)) {
10117 hevc->frame_dur =
10118 div_u64(96000ULL *
10119 vui_num_units_in_tick,
10120 vui_time_scale);
10121 if (hevc->get_frame_dur != true)
10122 vdec_schedule_work(
10123 &hevc->notify_work);
10124
10125 hevc->get_frame_dur = true;
10126#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10127 gvs->frame_dur = hevc->frame_dur;
10128#endif
10129 }
10130
10131 if (hevc->video_signal_type !=
10132 ((hevc->param.p.video_signal_type << 16)
10133 | hevc->param.p.color_description)) {
10134 u32 v = hevc->param.p.video_signal_type;
10135 u32 c = hevc->param.p.color_description;
10136#if 0
10137 if (v & 0x2000) {
10138 hevc_print(hevc, 0,
10139 "video_signal_type present:\n");
10140 hevc_print(hevc, 0, " %s %s\n",
10141 video_format_names[(v >> 10) & 7],
10142 ((v >> 9) & 1) ?
10143 "full_range" : "limited");
10144 if (v & 0x100) {
10145 hevc_print(hevc, 0,
10146 " color_description present:\n");
10147 hevc_print(hevc, 0,
10148 " color_primarie = %s\n",
10149 color_primaries_names
10150 [v & 0xff]);
10151 hevc_print(hevc, 0,
10152 " transfer_characteristic = %s\n",
10153 transfer_characteristics_names
10154 [(c >> 8) & 0xff]);
10155 hevc_print(hevc, 0,
10156 " matrix_coefficient = %s\n",
10157 matrix_coeffs_names[c & 0xff]);
10158 }
10159 }
10160#endif
10161 hevc->video_signal_type = (v << 16) | c;
10162 video_signal_type = hevc->video_signal_type;
10163 }
10164
10165 if (use_cma &&
10166 (hevc->param.p.slice_segment_address == 0)
10167 && (hevc->pic_list_init_flag == 0)) {
10168 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10169 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10170
10171 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10172 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10173 hevc->lcu_size = 1 << (log + 3 + log_s);
10174 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10175 if (hevc->pic_w == 0 || hevc->pic_h == 0
10176 || hevc->lcu_size == 0
10177 || is_oversize(hevc->pic_w, hevc->pic_h)
10178 || (!hevc->skip_first_nal &&
10179 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10180 /* skip search next start code */
10181 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10182 & (~0x2));
10183 if ( !hevc->skip_first_nal &&
10184 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10185 hevc->skip_first_nal = 1;
10186 hevc->skip_flag = 1;
10187 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10188 /* Interrupt Amrisc to excute */
10189 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10190#ifdef MULTI_INSTANCE_SUPPORT
10191 if (hevc->m_ins_flag)
10192 start_process_time(hevc);
10193#endif
10194 } else {
10195 hevc->sps_num_reorder_pics_0 =
10196 hevc->param.p.sps_num_reorder_pics_0;
10197 hevc->pic_list_init_flag = 1;
10198#ifdef MULTI_INSTANCE_SUPPORT
10199 if (hevc->m_ins_flag) {
10200 vdec_schedule_work(&hevc->work);
10201 } else
10202#endif
10203 up(&h265_sema);
10204 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10205 }
10206 return IRQ_HANDLED;
10207 }
10208
10209}
10210 ret =
10211 hevc_slice_segment_header_process(hevc,
10212 &hevc->param, decode_pic_begin);
10213 if (ret < 0) {
10214#ifdef MULTI_INSTANCE_SUPPORT
10215 if (hevc->m_ins_flag) {
10216 hevc->wait_buf = 0;
10217 hevc->dec_result = DEC_RESULT_AGAIN;
10218 amhevc_stop();
10219 restore_decode_state(hevc);
10220 reset_process_time(hevc);
10221 vdec_schedule_work(&hevc->work);
10222 return IRQ_HANDLED;
10223 }
10224#else
10225 ;
10226#endif
10227 } else if (ret == 0) {
10228 if ((hevc->new_pic) && (hevc->cur_pic)) {
10229 hevc->cur_pic->stream_offset =
10230 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10231 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10232 "read stream_offset = 0x%x\n",
10233 hevc->cur_pic->stream_offset);
10234 hevc->cur_pic->aspect_ratio_idc =
10235 hevc->param.p.aspect_ratio_idc;
10236 hevc->cur_pic->sar_width =
10237 hevc->param.p.sar_width;
10238 hevc->cur_pic->sar_height =
10239 hevc->param.p.sar_height;
10240 }
10241
10242 WRITE_VREG(HEVC_DEC_STATUS_REG,
10243 HEVC_CODED_SLICE_SEGMENT_DAT);
10244 /* Interrupt Amrisc to excute */
10245 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10246
10247 hevc->start_decoding_time = jiffies;
10248#ifdef MULTI_INSTANCE_SUPPORT
10249 if (hevc->m_ins_flag)
10250 start_process_time(hevc);
10251#endif
10252#if 1
10253 /*to do..., copy aux data to hevc->cur_pic*/
10254#endif
10255#ifdef MULTI_INSTANCE_SUPPORT
10256 } else if (hevc->m_ins_flag) {
10257 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10258 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10259 __func__, ret);
10260 hevc->decoded_poc = INVALID_POC;
10261 hevc->decoding_pic = NULL;
10262 hevc->dec_result = DEC_RESULT_DONE;
10263 amhevc_stop();
10264 reset_process_time(hevc);
10265 vdec_schedule_work(&hevc->work);
10266#endif
10267 } else {
10268 /* skip, search next start code */
10269#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10270 gvs->drop_frame_count++;
10271#endif
10272 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10273 hevc->skip_flag = 1;
10274 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10275 /* Interrupt Amrisc to excute */
10276 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10277 }
10278
10279 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10280 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10281#ifdef MULTI_INSTANCE_SUPPORT
10282 if (!hevc->m_ins_flag)
10283 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10284 H265_DEBUG_DIS_SYS_ERROR_PROC);
10285#endif
10286 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10287 }
10288 return IRQ_HANDLED;
10289}
10290
10291static void wait_hevc_search_done(struct hevc_state_s *hevc)
10292{
10293 int count = 0;
10294 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10295 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10296 msleep(20);
10297 count++;
10298 if (count > 100) {
10299 hevc_print(hevc, 0, "%s timeout\n", __func__);
10300 break;
10301 }
10302 }
10303}
10304static irqreturn_t vh265_isr(int irq, void *data)
10305{
10306 int i, temp;
10307 unsigned int dec_status;
10308 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10309 u32 debug_tag;
10310 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10311
10312 if (hevc->init_flag == 0)
10313 return IRQ_HANDLED;
10314 hevc->dec_status = dec_status;
10315 if (is_log_enable(hevc))
10316 add_log(hevc,
10317 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10318 dec_status, READ_HREG(HEVC_DECODE_INFO),
10319 READ_VREG(HEVC_MPRED_CURR_LCU),
10320 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10321 READ_VREG(HEVC_SHIFT_STATUS));
10322
10323 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10324 hevc_print(hevc, 0,
10325 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10326 dec_status, READ_HREG(HEVC_DECODE_INFO),
10327 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10328 READ_VREG(HEVC_SHIFT_STATUS));
10329
10330 debug_tag = READ_HREG(DEBUG_REG1);
10331 if (debug_tag & 0x10000) {
10332 hevc_print(hevc, 0,
10333 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10334
10335 if (hevc->mmu_enable)
10336 temp = 0x500;
10337 else
10338 temp = 0x400;
10339 for (i = 0; i < temp; i += 4) {
10340 int ii;
10341 if ((i & 0xf) == 0)
10342 hevc_print_cont(hevc, 0, "%03x: ", i);
10343 for (ii = 0; ii < 4; ii++) {
10344 hevc_print_cont(hevc, 0, "%04x ",
10345 hevc->lmem_ptr[i + 3 - ii]);
10346 }
10347 if (((i + ii) & 0xf) == 0)
10348 hevc_print_cont(hevc, 0, "\n");
10349 }
10350
10351 if (((udebug_pause_pos & 0xffff)
10352 == (debug_tag & 0xffff)) &&
10353 (udebug_pause_decode_idx == 0 ||
10354 udebug_pause_decode_idx == hevc->decode_idx) &&
10355 (udebug_pause_val == 0 ||
10356 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10357 udebug_pause_pos &= 0xffff;
10358 hevc->ucode_pause_pos = udebug_pause_pos;
10359 }
10360 else if (debug_tag & 0x20000)
10361 hevc->ucode_pause_pos = 0xffffffff;
10362 if (hevc->ucode_pause_pos)
10363 reset_process_time(hevc);
10364 else
10365 WRITE_HREG(DEBUG_REG1, 0);
10366 } else if (debug_tag != 0) {
10367 hevc_print(hevc, 0,
10368 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10369 READ_HREG(DEBUG_REG2),
10370 READ_VREG(HEVC_STREAM_LEVEL),
10371 READ_VREG(HEVC_STREAM_WR_PTR),
10372 READ_VREG(HEVC_STREAM_RD_PTR));
10373 if (((udebug_pause_pos & 0xffff)
10374 == (debug_tag & 0xffff)) &&
10375 (udebug_pause_decode_idx == 0 ||
10376 udebug_pause_decode_idx == hevc->decode_idx) &&
10377 (udebug_pause_val == 0 ||
10378 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10379 udebug_pause_pos &= 0xffff;
10380 hevc->ucode_pause_pos = udebug_pause_pos;
10381 }
10382 if (hevc->ucode_pause_pos)
10383 reset_process_time(hevc);
10384 else
10385 WRITE_HREG(DEBUG_REG1, 0);
10386 return IRQ_HANDLED;
10387 }
10388
10389
10390 if (hevc->pic_list_init_flag == 1)
10391 return IRQ_HANDLED;
10392
10393 if (!hevc->m_ins_flag) {
10394 if (dec_status == HEVC_OVER_DECODE) {
10395 hevc->over_decode = 1;
10396 hevc_print(hevc, 0,
10397 "isr: over decode\n"),
10398 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10399 return IRQ_HANDLED;
10400 }
10401 }
10402
10403 return IRQ_WAKE_THREAD;
10404
10405}
10406
10407static void vh265_set_clk(struct work_struct *work)
10408{
10409 struct hevc_state_s *hevc = container_of(work,
10410 struct hevc_state_s, set_clk_work);
10411
10412 int fps = 96000 / hevc->frame_dur;
10413
10414 if (hevc_source_changed(VFORMAT_HEVC,
10415 hevc->frame_width, hevc->frame_height, fps) > 0)
10416 hevc->saved_resolution = hevc->frame_width *
10417 hevc->frame_height * fps;
10418}
10419
10420static void vh265_check_timer_func(unsigned long arg)
10421{
10422 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10423 struct timer_list *timer = &hevc->timer;
10424 unsigned char empty_flag;
10425 unsigned int buf_level;
10426
10427 enum receviver_start_e state = RECEIVER_INACTIVE;
10428
10429 if (hevc->init_flag == 0) {
10430 if (hevc->stat & STAT_TIMER_ARM) {
10431 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10432 }
10433 return;
10434 }
10435#ifdef MULTI_INSTANCE_SUPPORT
10436 if (hevc->m_ins_flag &&
10437 (get_dbg_flag(hevc) &
10438 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10439 hw_to_vdec(hevc)->next_status ==
10440 VDEC_STATUS_DISCONNECTED) {
10441 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10442 vdec_schedule_work(&hevc->work);
10443 hevc_print(hevc,
10444 0, "vdec requested to be disconnected\n");
10445 return;
10446 }
10447
10448 if (hevc->m_ins_flag) {
10449 if ((input_frame_based(hw_to_vdec(hevc)) ||
10450 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10451 ((get_dbg_flag(hevc) &
10452 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10453 (decode_timeout_val > 0) &&
10454 (hevc->start_process_time > 0) &&
10455 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10456 > decode_timeout_val)
10457 ) {
10458 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10459 int current_lcu_idx =
10460 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10461 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10462 if (hevc->last_lcu_idx == current_lcu_idx) {
10463 if (hevc->decode_timeout_count > 0)
10464 hevc->decode_timeout_count--;
10465 if (hevc->decode_timeout_count == 0)
10466 timeout_process(hevc);
10467 } else
10468 restart_process_time(hevc);
10469 hevc->last_lcu_idx = current_lcu_idx;
10470 } else {
10471 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10472 timeout_process(hevc);
10473 }
10474 }
10475 } else {
10476#endif
10477 if (hevc->m_ins_flag == 0 &&
10478 vf_get_receiver(hevc->provider_name)) {
10479 state =
10480 vf_notify_receiver(hevc->provider_name,
10481 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10482 NULL);
10483 if ((state == RECEIVER_STATE_NULL)
10484 || (state == RECEIVER_STATE_NONE))
10485 state = RECEIVER_INACTIVE;
10486 } else
10487 state = RECEIVER_INACTIVE;
10488
10489 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10490 /* error watchdog */
10491 if (hevc->m_ins_flag == 0 &&
10492 (empty_flag == 0)
10493 && (hevc->pic_list_init_flag == 0
10494 || hevc->pic_list_init_flag
10495 == 3)) {
10496 /* decoder has input */
10497 if ((get_dbg_flag(hevc) &
10498 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10499
10500 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10501 /* receiver has no buffer to recycle */
10502 if ((state == RECEIVER_INACTIVE) &&
10503 (kfifo_is_empty(&hevc->display_q) &&
10504 buf_level > 0x200)
10505 ) {
10506 if (hevc->error_flag == 0) {
10507 hevc->error_watchdog_count++;
10508 if (hevc->error_watchdog_count ==
10509 error_handle_threshold) {
10510 hevc_print(hevc, 0,
10511 "H265 dec err local reset.\n");
10512 hevc->error_flag = 1;
10513 hevc->error_watchdog_count = 0;
10514 hevc->error_skip_nal_wt_cnt = 0;
10515 hevc->
10516 error_system_watchdog_count++;
10517 WRITE_VREG
10518 (HEVC_ASSIST_MBOX0_IRQ_REG,
10519 0x1);
10520 }
10521 } else if (hevc->error_flag == 2) {
10522 int th =
10523 error_handle_nal_skip_threshold;
10524 hevc->error_skip_nal_wt_cnt++;
10525 if (hevc->error_skip_nal_wt_cnt
10526 == th) {
10527 hevc->error_flag = 3;
10528 hevc->error_watchdog_count = 0;
10529 hevc->
10530 error_skip_nal_wt_cnt = 0;
10531 WRITE_VREG
10532 (HEVC_ASSIST_MBOX0_IRQ_REG,
10533 0x1);
10534 }
10535 }
10536 }
10537 }
10538
10539 if ((get_dbg_flag(hevc)
10540 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10541 /* receiver has no buffer to recycle */
10542 if ((state == RECEIVER_INACTIVE) &&
10543 (kfifo_is_empty(&hevc->display_q))
10544 ) { /* no buffer to recycle */
10545 if ((get_dbg_flag(hevc) &
10546 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10547 0)
10548 hevc->error_system_watchdog_count++;
10549 if (hevc->error_system_watchdog_count ==
10550 error_handle_system_threshold) {
10551 /* and it lasts for a while */
10552 hevc_print(hevc, 0,
10553 "H265 dec fatal error watchdog.\n");
10554 hevc->
10555 error_system_watchdog_count = 0;
10556 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10557 }
10558 }
10559 } else {
10560 hevc->error_watchdog_count = 0;
10561 hevc->error_system_watchdog_count = 0;
10562 }
10563#ifdef MULTI_INSTANCE_SUPPORT
10564 }
10565#endif
10566 if ((hevc->ucode_pause_pos != 0) &&
10567 (hevc->ucode_pause_pos != 0xffffffff) &&
10568 udebug_pause_pos != hevc->ucode_pause_pos) {
10569 hevc->ucode_pause_pos = 0;
10570 WRITE_HREG(DEBUG_REG1, 0);
10571 }
10572
10573 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10574 dump_pic_list(hevc);
10575 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10576 }
10577 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10578 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10579 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10580 }
10581#ifdef TEST_NO_BUF
10582 if (hevc->wait_buf)
10583 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10584#endif
10585 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10586 hevc->error_skip_nal_count = error_skip_nal_count;
10587 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10588
10589 debug &= ~H265_DEBUG_HW_RESET;
10590 }
10591
10592#ifdef ERROR_HANDLE_DEBUG
10593 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10594 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10595 dbg_nal_skip_count &= ~0x10000;
10596 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10597 }
10598#endif
10599
10600 if (radr != 0) {
10601 if (rval != 0) {
10602 WRITE_VREG(radr, rval);
10603 hevc_print(hevc, 0,
10604 "WRITE_VREG(%x,%x)\n", radr, rval);
10605 } else
10606 hevc_print(hevc, 0,
10607 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10608 rval = 0;
10609 radr = 0;
10610 }
10611 if (dbg_cmd != 0) {
10612 if (dbg_cmd == 1) {
10613 u32 disp_laddr;
10614
10615 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10616 get_double_write_mode(hevc) == 0) {
10617 disp_laddr =
10618 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10619 } else {
10620 struct canvas_s cur_canvas;
10621
10622 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10623 & 0xff), &cur_canvas);
10624 disp_laddr = cur_canvas.addr;
10625 }
10626 hevc_print(hevc, 0,
10627 "current displayed buffer address %x\r\n",
10628 disp_laddr);
10629 }
10630 dbg_cmd = 0;
10631 }
10632 /*don't changed at start.*/
10633 if (hevc->m_ins_flag == 0 &&
10634 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10635 hevc->frame_dur > 0 && hevc->saved_resolution !=
10636 hevc->frame_width * hevc->frame_height *
10637 (96000 / hevc->frame_dur))
10638 vdec_schedule_work(&hevc->set_clk_work);
10639
10640 mod_timer(timer, jiffies + PUT_INTERVAL);
10641}
10642
10643static int h265_task_handle(void *data)
10644{
10645 int ret = 0;
10646 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10647
10648 set_user_nice(current, -10);
10649 while (1) {
10650 if (use_cma == 0) {
10651 hevc_print(hevc, 0,
10652 "ERROR: use_cma can not be changed dynamically\n");
10653 }
10654 ret = down_interruptible(&h265_sema);
10655 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10656 init_pic_list(hevc);
10657 init_pic_list_hw(hevc);
10658 init_buf_spec(hevc);
10659 hevc->pic_list_init_flag = 2;
10660 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10661
10662 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10663
10664 }
10665
10666 if (hevc->uninit_list) {
10667 /*USE_BUF_BLOCK*/
10668 uninit_pic_list(hevc);
10669 hevc_print(hevc, 0, "uninit list\n");
10670 hevc->uninit_list = 0;
10671#ifdef USE_UNINIT_SEMA
10672 if (use_cma) {
10673 up(&hevc->h265_uninit_done_sema);
10674 while (!kthread_should_stop())
10675 msleep(1);
10676 break;
10677 }
10678#endif
10679 }
10680 }
10681
10682 return 0;
10683}
10684
10685void vh265_free_cmabuf(void)
10686{
10687 struct hevc_state_s *hevc = gHevc;
10688
10689 mutex_lock(&vh265_mutex);
10690
10691 if (hevc->init_flag) {
10692 mutex_unlock(&vh265_mutex);
10693 return;
10694 }
10695
10696 mutex_unlock(&vh265_mutex);
10697}
10698
10699#ifdef MULTI_INSTANCE_SUPPORT
10700int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10701#else
10702int vh265_dec_status(struct vdec_info *vstatus)
10703#endif
10704{
10705#ifdef MULTI_INSTANCE_SUPPORT
10706 struct hevc_state_s *hevc =
10707 (struct hevc_state_s *)vdec->private;
10708#else
10709 struct hevc_state_s *hevc = gHevc;
10710#endif
10711 if (!hevc)
10712 return -1;
10713
10714 vstatus->frame_width = hevc->frame_width;
10715 vstatus->frame_height = hevc->frame_height;
10716 if (hevc->frame_dur != 0)
10717 vstatus->frame_rate = 96000 / hevc->frame_dur;
10718 else
10719 vstatus->frame_rate = -1;
10720 vstatus->error_count = 0;
10721 vstatus->status = hevc->stat | hevc->fatal_error;
10722#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10723 vstatus->bit_rate = gvs->bit_rate;
10724 vstatus->frame_dur = hevc->frame_dur;
10725 if (gvs) {
10726 vstatus->bit_rate = gvs->bit_rate;
10727 vstatus->frame_data = gvs->frame_data;
10728 vstatus->total_data = gvs->total_data;
10729 vstatus->frame_count = gvs->frame_count;
10730 vstatus->error_frame_count = gvs->error_frame_count;
10731 vstatus->drop_frame_count = gvs->drop_frame_count;
10732 vstatus->total_data = gvs->total_data;
10733 vstatus->samp_cnt = gvs->samp_cnt;
10734 vstatus->offset = gvs->offset;
10735 }
10736 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10737 "%s", DRIVER_NAME);
10738#endif
10739 vstatus->ratio_control = hevc->ratio_control;
10740 return 0;
10741}
10742
10743int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10744{
10745 is_reset = isreset;
10746 return 0;
10747}
10748
10749static int vh265_vdec_info_init(void)
10750{
10751 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10752 if (NULL == gvs) {
10753 pr_info("the struct of vdec status malloc failed.\n");
10754 return -ENOMEM;
10755 }
10756 return 0;
10757}
10758
10759#if 0
10760static void H265_DECODE_INIT(void)
10761{
10762 /* enable hevc clocks */
10763 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10764 /* *************************************************************** */
10765 /* Power ON HEVC */
10766 /* *************************************************************** */
10767 /* Powerup HEVC */
10768 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10769 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10770 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10771 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10772 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10773 /* remove isolations */
10774 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10775 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10776
10777}
10778#endif
10779
10780static void config_decode_mode(struct hevc_state_s *hevc)
10781{
10782#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10783 struct vdec_s *vdec = hw_to_vdec(hevc);
10784#endif
10785 unsigned decode_mode;
10786 if (!hevc->m_ins_flag)
10787 decode_mode = DECODE_MODE_SINGLE;
10788 else if (vdec_frame_based(hw_to_vdec(hevc)))
10789 decode_mode =
10790 DECODE_MODE_MULTI_FRAMEBASE;
10791#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10792 else if (vdec->slave) {
10793 if (force_bypass_dvenl & 0x80000000)
10794 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10795 else
10796 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10797 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10798 hevc->bypass_dvenl = 0;
10799 hevc_print(hevc, 0,
10800 "NOT support bypass_dvenl when meta_with_el\n");
10801 }
10802 if (hevc->bypass_dvenl)
10803 decode_mode =
10804 (hevc->start_parser_type << 8)
10805 | DECODE_MODE_MULTI_STREAMBASE;
10806 else
10807 decode_mode =
10808 (hevc->start_parser_type << 8)
10809 | DECODE_MODE_MULTI_DVBAL;
10810 } else if (vdec->master)
10811 decode_mode =
10812 (hevc->start_parser_type << 8)
10813 | DECODE_MODE_MULTI_DVENL;
10814#endif
10815 else
10816 decode_mode =
10817 DECODE_MODE_MULTI_STREAMBASE;
10818
10819 if (hevc->m_ins_flag)
10820 decode_mode |=
10821 (hevc->start_decoding_flag << 16);
10822 /* set MBX0 interrupt flag */
10823 decode_mode |= (0x80 << 24);
10824 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10825 WRITE_VREG(HEVC_DECODE_MODE2,
10826 hevc->rps_set_id);
10827}
10828
10829static void vh265_prot_init(struct hevc_state_s *hevc)
10830{
10831#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10832 struct vdec_s *vdec = hw_to_vdec(hevc);
10833#endif
10834 /* H265_DECODE_INIT(); */
10835
10836 hevc_config_work_space_hw(hevc);
10837
10838 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10839
10840 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10841
10842 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10843
10844 /* clear mailbox interrupt */
10845 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10846
10847 /* enable mailbox interrupt */
10848 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10849
10850 /* disable PSCALE for hardware sharing */
10851 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10852
10853 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10854
10855 if ((get_dbg_flag(hevc) &
10856 (H265_DEBUG_MAN_SKIP_NAL |
10857 H265_DEBUG_MAN_SEARCH_NAL))
10858 /*||hevc->m_ins_flag*/
10859 ) {
10860 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10861 } else {
10862 /* check vps/sps/pps/i-slice in ucode */
10863 unsigned ctl_val = 0x8;
10864 if (hevc->PB_skip_mode == 0)
10865 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10866 else if (hevc->PB_skip_mode == 3)
10867 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10868 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10869 }
10870 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10871#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10872 || vdec->master
10873 || vdec->slave
10874#endif
10875 )
10876 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10877
10878 WRITE_VREG(NAL_SEARCH_CTL,
10879 READ_VREG(NAL_SEARCH_CTL)
10880 | ((parser_sei_enable & 0x7) << 17));
10881#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10882 WRITE_VREG(NAL_SEARCH_CTL,
10883 READ_VREG(NAL_SEARCH_CTL) |
10884 ((parser_dolby_vision_enable & 0x1) << 20));
10885#endif
10886 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10887
10888 config_decode_mode(hevc);
10889 config_aux_buf(hevc);
10890#ifdef SWAP_HEVC_UCODE
10891 if (!tee_enabled() && hevc->is_swap &&
10892 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10893 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10894 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10895 }
10896#endif
10897#ifdef DETREFILL_ENABLE
10898 if (hevc->is_swap &&
10899 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10900 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10901 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10902 }
10903#endif
10904}
10905
10906static int vh265_local_init(struct hevc_state_s *hevc)
10907{
10908 int i;
10909 int ret = -1;
10910
10911#ifdef DEBUG_PTS
10912 hevc->pts_missed = 0;
10913 hevc->pts_hit = 0;
10914#endif
10915
10916 hevc->saved_resolution = 0;
10917 hevc->get_frame_dur = false;
10918 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10919 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10920 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10921 pr_info("over size : %u x %u.\n",
10922 hevc->frame_width, hevc->frame_height);
10923 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10924 return ret;
10925 }
10926
10927 if (hevc->max_pic_w && hevc->max_pic_h) {
10928 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10929 ((hevc->max_pic_w * hevc->max_pic_h) >
10930 1920 * 1088) ? true : false;
10931 } else {
10932 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10933 ((hevc->frame_width * hevc->frame_height) >
10934 1920 * 1088) ? true : false;
10935 }
10936
10937 hevc->frame_dur =
10938 (hevc->vh265_amstream_dec_info.rate ==
10939 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10940#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10941 gvs->frame_dur = hevc->frame_dur;
10942#endif
10943 if (hevc->frame_width && hevc->frame_height)
10944 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10945
10946 if (i_only_flag)
10947 hevc->i_only = i_only_flag & 0xff;
10948 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10949 & 0x08)
10950 hevc->i_only = 0x7;
10951 else
10952 hevc->i_only = 0x0;
10953 hevc->error_watchdog_count = 0;
10954 hevc->sei_present_flag = 0;
10955 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10956 & 0x40) >> 6;
10957 hevc_print(hevc, 0,
10958 "h265:pts_unstable=%d\n", pts_unstable);
10959/*
10960 *TODO:FOR VERSION
10961 */
10962 hevc_print(hevc, 0,
10963 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10964 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10965
10966 if (hevc->frame_dur == 0)
10967 hevc->frame_dur = 96000 / 24;
10968
10969 INIT_KFIFO(hevc->display_q);
10970 INIT_KFIFO(hevc->newframe_q);
10971 INIT_KFIFO(hevc->pending_q);
10972
10973 for (i = 0; i < VF_POOL_SIZE; i++) {
10974 const struct vframe_s *vf = &hevc->vfpool[i];
10975
10976 hevc->vfpool[i].index = -1;
10977 kfifo_put(&hevc->newframe_q, vf);
10978 }
10979
10980
10981 ret = hevc_local_init(hevc);
10982
10983 return ret;
10984}
10985#ifdef MULTI_INSTANCE_SUPPORT
10986static s32 vh265_init(struct vdec_s *vdec)
10987{
10988 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10989#else
10990static s32 vh265_init(struct hevc_state_s *hevc)
10991{
10992
10993#endif
10994 int ret, size = -1;
10995 int fw_size = 0x1000 * 16;
10996 struct firmware_s *fw = NULL;
10997
10998 init_timer(&hevc->timer);
10999
11000 hevc->stat |= STAT_TIMER_INIT;
11001
11002 if (hevc->m_ins_flag) {
11003#ifdef USE_UNINIT_SEMA
11004 sema_init(&hevc->h265_uninit_done_sema, 0);
11005#endif
11006 INIT_WORK(&hevc->work, vh265_work);
11007 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11008 }
11009
11010 if (vh265_local_init(hevc) < 0)
11011 return -EBUSY;
11012
11013 mutex_init(&hevc->chunks_mutex);
11014 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11015 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11016
11017 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11018 if (IS_ERR_OR_NULL(fw))
11019 return -ENOMEM;
11020
11021 if (hevc->mmu_enable)
11022 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11023 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11024 else {
11025 if (!hevc->is_4k) {
11026 /* if an older version of the fw was loaded, */
11027 /* needs try to load noswap fw because the */
11028 /* old fw package dose not contain the swap fw.*/
11029 size = get_firmware_data(
11030 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11031 if (size < 0)
11032 size = get_firmware_data(
11033 VIDEO_DEC_HEVC_MMU, fw->data);
11034 else if (size)
11035 hevc->is_swap = true;
11036 } else
11037 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11038 fw->data);
11039 }
11040 else
11041 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11042
11043 if (size < 0) {
11044 pr_err("get firmware fail.\n");
11045 vfree(fw);
11046 return -1;
11047 }
11048
11049 fw->len = size;
11050
11051#ifdef SWAP_HEVC_UCODE
11052 if (!tee_enabled() && hevc->is_swap &&
11053 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11054 if (hevc->mmu_enable) {
11055 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11056 hevc->mc_cpu_addr =
11057 dma_alloc_coherent(amports_get_dma_device(),
11058 hevc->swap_size,
11059 &hevc->mc_dma_handle, GFP_KERNEL);
11060 if (!hevc->mc_cpu_addr) {
11061 amhevc_disable();
11062 pr_info("vh265 mmu swap ucode loaded fail.\n");
11063 return -ENOMEM;
11064 }
11065
11066 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11067 hevc->swap_size);
11068
11069 hevc_print(hevc, 0,
11070 "vh265 mmu ucode swap loaded %x\n",
11071 hevc->mc_dma_handle);
11072 }
11073 }
11074#endif
11075
11076#ifdef MULTI_INSTANCE_SUPPORT
11077 if (hevc->m_ins_flag) {
11078 hevc->timer.data = (ulong) hevc;
11079 hevc->timer.function = vh265_check_timer_func;
11080 hevc->timer.expires = jiffies + PUT_INTERVAL;
11081
11082 hevc->fw = fw;
11083
11084 return 0;
11085 }
11086#endif
11087 amhevc_enable();
11088
11089 if (hevc->mmu_enable)
11090 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11091 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11092 else {
11093 if (!hevc->is_4k) {
11094 /* if an older version of the fw was loaded, */
11095 /* needs try to load noswap fw because the */
11096 /* old fw package dose not contain the swap fw. */
11097 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11098 "hevc_mmu_swap", fw->data);
11099 if (ret < 0)
11100 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11101 "h265_mmu", fw->data);
11102 else
11103 hevc->is_swap = true;
11104 } else
11105 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11106 "h265_mmu", fw->data);
11107 }
11108 else
11109 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11110
11111 if (ret < 0) {
11112 amhevc_disable();
11113 vfree(fw);
11114 pr_err("H265: the %s fw loading failed, err: %x\n",
11115 tee_enabled() ? "TEE" : "local", ret);
11116 return -EBUSY;
11117 }
11118
11119 vfree(fw);
11120
11121 hevc->stat |= STAT_MC_LOAD;
11122
11123#ifdef DETREFILL_ENABLE
11124 if (hevc->is_swap &&
11125 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11126 init_detrefill_buf(hevc);
11127#endif
11128 /* enable AMRISC side protocol */
11129 vh265_prot_init(hevc);
11130
11131 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11132 vh265_isr_thread_fn,
11133 IRQF_ONESHOT,/*run thread on this irq disabled*/
11134 "vh265-irq", (void *)hevc)) {
11135 hevc_print(hevc, 0, "vh265 irq register error.\n");
11136 amhevc_disable();
11137 return -ENOENT;
11138 }
11139
11140 hevc->stat |= STAT_ISR_REG;
11141 hevc->provider_name = PROVIDER_NAME;
11142
11143#ifdef MULTI_INSTANCE_SUPPORT
11144 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11145 &vh265_vf_provider, vdec);
11146 vf_reg_provider(&vh265_vf_prov);
11147 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11148 NULL);
11149 if (hevc->frame_dur != 0) {
11150 if (!is_reset) {
11151 vf_notify_receiver(hevc->provider_name,
11152 VFRAME_EVENT_PROVIDER_FR_HINT,
11153 (void *)
11154 ((unsigned long)hevc->frame_dur));
11155 fr_hint_status = VDEC_HINTED;
11156 }
11157 } else
11158 fr_hint_status = VDEC_NEED_HINT;
11159#else
11160 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11161 hevc);
11162 vf_reg_provider(&vh265_vf_prov);
11163 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11164 if (hevc->frame_dur != 0) {
11165 vf_notify_receiver(PROVIDER_NAME,
11166 VFRAME_EVENT_PROVIDER_FR_HINT,
11167 (void *)
11168 ((unsigned long)hevc->frame_dur));
11169 fr_hint_status = VDEC_HINTED;
11170 } else
11171 fr_hint_status = VDEC_NEED_HINT;
11172#endif
11173 hevc->stat |= STAT_VF_HOOK;
11174
11175 hevc->timer.data = (ulong) hevc;
11176 hevc->timer.function = vh265_check_timer_func;
11177 hevc->timer.expires = jiffies + PUT_INTERVAL;
11178
11179 add_timer(&hevc->timer);
11180
11181 hevc->stat |= STAT_TIMER_ARM;
11182
11183 if (use_cma) {
11184#ifdef USE_UNINIT_SEMA
11185 sema_init(&hevc->h265_uninit_done_sema, 0);
11186#endif
11187 if (h265_task == NULL) {
11188 sema_init(&h265_sema, 1);
11189 h265_task =
11190 kthread_run(h265_task_handle, hevc,
11191 "kthread_h265");
11192 }
11193 }
11194 /* hevc->stat |= STAT_KTHREAD; */
11195#if 0
11196 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11197 hevc_print(hevc, 0, "%s force clk\n", __func__);
11198 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11199 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11200 ((1 << 2) | (1 << 1)));
11201 WRITE_VREG(HEVC_DBLK_CFG0,
11202 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11203 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11204 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11205 (1 << 2)); /* 2 */
11206 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11207 (1 << 24)); /* 24 */
11208 WRITE_VREG(HEVC_STREAM_CONTROL,
11209 READ_VREG(HEVC_STREAM_CONTROL) |
11210 (1 << 15)); /* 15 */
11211 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11212 (1 << 13)); /* 13 */
11213 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11214 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11215 (1 << 15)); /* 15 */
11216 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11217 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11218 (1 << 15)); /* 15 */
11219 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11220 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11221 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11222 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11223 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11224 (1 << 3)); /* 3 */
11225 }
11226#endif
11227#ifdef SWAP_HEVC_UCODE
11228 if (!tee_enabled() && hevc->is_swap &&
11229 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11230 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11231 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11232 }
11233#endif
11234
11235#ifndef MULTI_INSTANCE_SUPPORT
11236 set_vdec_func(&vh265_dec_status);
11237#endif
11238 amhevc_start();
11239 hevc->stat |= STAT_VDEC_RUN;
11240 hevc->init_flag = 1;
11241 error_handle_threshold = 30;
11242 /* pr_info("%d, vh265_init, RP=0x%x\n",
11243 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11244 */
11245
11246 return 0;
11247}
11248
11249static int vh265_stop(struct hevc_state_s *hevc)
11250{
11251 if (get_dbg_flag(hevc) &
11252 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11253 int wait_timeout_count = 0;
11254
11255 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11256 HEVC_CODED_SLICE_SEGMENT_DAT &&
11257 wait_timeout_count < 10){
11258 wait_timeout_count++;
11259 msleep(20);
11260 }
11261 }
11262 if (hevc->stat & STAT_VDEC_RUN) {
11263 amhevc_stop();
11264 hevc->stat &= ~STAT_VDEC_RUN;
11265 }
11266
11267 if (hevc->stat & STAT_ISR_REG) {
11268#ifdef MULTI_INSTANCE_SUPPORT
11269 if (!hevc->m_ins_flag)
11270#endif
11271 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11272 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11273 hevc->stat &= ~STAT_ISR_REG;
11274 }
11275
11276 hevc->stat &= ~STAT_TIMER_INIT;
11277 if (hevc->stat & STAT_TIMER_ARM) {
11278 del_timer_sync(&hevc->timer);
11279 hevc->stat &= ~STAT_TIMER_ARM;
11280 }
11281
11282 if (hevc->stat & STAT_VF_HOOK) {
11283 if (fr_hint_status == VDEC_HINTED) {
11284 vf_notify_receiver(hevc->provider_name,
11285 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11286 NULL);
11287 }
11288 fr_hint_status = VDEC_NO_NEED_HINT;
11289 vf_unreg_provider(&vh265_vf_prov);
11290 hevc->stat &= ~STAT_VF_HOOK;
11291 }
11292
11293 hevc_local_uninit(hevc);
11294
11295 if (use_cma) {
11296 hevc->uninit_list = 1;
11297 up(&h265_sema);
11298#ifdef USE_UNINIT_SEMA
11299 down(&hevc->h265_uninit_done_sema);
11300 if (!IS_ERR(h265_task)) {
11301 kthread_stop(h265_task);
11302 h265_task = NULL;
11303 }
11304#else
11305 while (hevc->uninit_list) /* wait uninit complete */
11306 msleep(20);
11307#endif
11308
11309 }
11310 hevc->init_flag = 0;
11311 hevc->first_sc_checked = 0;
11312 cancel_work_sync(&hevc->notify_work);
11313 cancel_work_sync(&hevc->set_clk_work);
11314 uninit_mmu_buffers(hevc);
11315 amhevc_disable();
11316
11317 kfree(gvs);
11318 gvs = NULL;
11319
11320 return 0;
11321}
11322
11323#ifdef MULTI_INSTANCE_SUPPORT
11324static void reset_process_time(struct hevc_state_s *hevc)
11325{
11326 if (hevc->start_process_time) {
11327 unsigned int process_time =
11328 1000 * (jiffies - hevc->start_process_time) / HZ;
11329 hevc->start_process_time = 0;
11330 if (process_time > max_process_time[hevc->index])
11331 max_process_time[hevc->index] = process_time;
11332 }
11333}
11334
11335static void start_process_time(struct hevc_state_s *hevc)
11336{
11337 hevc->start_process_time = jiffies;
11338 hevc->decode_timeout_count = 2;
11339 hevc->last_lcu_idx = 0;
11340}
11341
11342static void restart_process_time(struct hevc_state_s *hevc)
11343{
11344 hevc->start_process_time = jiffies;
11345 hevc->decode_timeout_count = 2;
11346}
11347
11348static void timeout_process(struct hevc_state_s *hevc)
11349{
11350 /*
11351 * In this very timeout point,the vh265_work arrives,
11352 * let it to handle the scenario.
11353 */
11354 if (work_pending(&hevc->work))
11355 return;
11356
11357 hevc->timeout_num++;
11358 amhevc_stop();
11359 read_decode_info(hevc);
11360
11361 hevc_print(hevc,
11362 0, "%s decoder timeout\n", __func__);
11363 check_pic_decoded_error(hevc,
11364 hevc->pic_decoded_lcu_idx);
11365 hevc->decoded_poc = hevc->curr_POC;
11366 hevc->decoding_pic = NULL;
11367 hevc->dec_result = DEC_RESULT_DONE;
11368 reset_process_time(hevc);
11369
11370 if (work_pending(&hevc->work))
11371 return;
11372 vdec_schedule_work(&hevc->timeout_work);
11373}
11374
11375#ifdef CONSTRAIN_MAX_BUF_NUM
11376static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11377{
11378 struct PIC_s *pic;
11379 int i;
11380 int count = 0;
11381 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11382 pic = hevc->m_PIC[i];
11383 if (pic == NULL || pic->index == -1)
11384 continue;
11385 if (pic->output_mark == 0 && pic->referenced == 0
11386 && pic->output_ready == 1)
11387 count++;
11388 }
11389
11390 return count;
11391}
11392
11393static int get_used_buf_count(struct hevc_state_s *hevc)
11394{
11395 struct PIC_s *pic;
11396 int i;
11397 int count = 0;
11398 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11399 pic = hevc->m_PIC[i];
11400 if (pic == NULL || pic->index == -1)
11401 continue;
11402 if (pic->output_mark != 0 || pic->referenced != 0
11403 || pic->output_ready != 0)
11404 count++;
11405 }
11406
11407 return count;
11408}
11409#endif
11410
11411
11412static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11413{
11414 struct PIC_s *new_pic = NULL;
11415 struct PIC_s *pic;
11416 /* recycle un-used pic */
11417 int i;
11418 int ref_pic = 0;
11419 struct vdec_s *vdec = hw_to_vdec(hevc);
11420 /*return 1 if pic_list is not initialized yet*/
11421 if (hevc->pic_list_init_flag != 3)
11422 return 1;
11423
11424 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11425 pic = hevc->m_PIC[i];
11426 if (pic == NULL || pic->index == -1)
11427 continue;
11428 if (pic->referenced == 1)
11429 ref_pic++;
11430 if (pic->output_mark == 0 && pic->referenced == 0
11431 && pic->output_ready == 0
11432 ) {
11433 if (new_pic) {
11434 if (pic->POC < new_pic->POC)
11435 new_pic = pic;
11436 } else
11437 new_pic = pic;
11438 }
11439 }
11440/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11441/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11442/*decode. all reference frames are removed and setting error flag.*/
11443/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11444/*when file is interlace.*/
11445 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11446 (ref_pic >=
11447 get_work_pic_num(hevc) -
11448 hevc->sps_num_reorder_pics_0 - 3)) {
11449 enum receviver_start_e state = RECEIVER_INACTIVE;
11450 if (vf_get_receiver(vdec->vf_provider_name)) {
11451 state =
11452 vf_notify_receiver(vdec->vf_provider_name,
11453 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11454 NULL);
11455 if ((state == RECEIVER_STATE_NULL)
11456 || (state == RECEIVER_STATE_NONE))
11457 state = RECEIVER_INACTIVE;
11458 }
11459 if (state == RECEIVER_INACTIVE) {
11460 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11461 pic = hevc->m_PIC[i];
11462 if (pic == NULL || pic->index == -1)
11463 continue;
11464
11465 if ((pic->referenced == 1) &&
11466 (pic->error_mark == 1)) {
11467 pic->referenced = 0;
11468 put_mv_buf(hevc, pic);
11469 }
11470 pic->error_mark = 1;
11471 }
11472 }
11473 }
11474
11475 return (new_pic != NULL) ? 1 : 0;
11476}
11477
11478static int vmh265_stop(struct hevc_state_s *hevc)
11479{
11480 if (hevc->stat & STAT_TIMER_ARM) {
11481 del_timer_sync(&hevc->timer);
11482 hevc->stat &= ~STAT_TIMER_ARM;
11483 }
11484 if (hevc->stat & STAT_VDEC_RUN) {
11485 amhevc_stop();
11486 hevc->stat &= ~STAT_VDEC_RUN;
11487 }
11488 if (hevc->stat & STAT_ISR_REG) {
11489 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11490 hevc->stat &= ~STAT_ISR_REG;
11491 }
11492
11493 if (hevc->stat & STAT_VF_HOOK) {
11494 if (fr_hint_status == VDEC_HINTED)
11495 vf_notify_receiver(hevc->provider_name,
11496 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11497 NULL);
11498 fr_hint_status = VDEC_NO_NEED_HINT;
11499 vf_unreg_provider(&vh265_vf_prov);
11500 hevc->stat &= ~STAT_VF_HOOK;
11501 }
11502
11503 hevc_local_uninit(hevc);
11504
11505 hevc->init_flag = 0;
11506 hevc->first_sc_checked = 0;
11507 cancel_work_sync(&hevc->notify_work);
11508 cancel_work_sync(&hevc->set_clk_work);
11509 cancel_work_sync(&hevc->timeout_work);
11510
11511 uninit_mmu_buffers(hevc);
11512
11513 if (use_cma) {
11514 hevc->uninit_list = 1;
11515 reset_process_time(hevc);
11516 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11517 vdec_schedule_work(&hevc->work);
11518 flush_work(&hevc->work);
11519#ifdef USE_UNINIT_SEMA
11520 if (hevc->init_flag) {
11521 down(&hevc->h265_uninit_done_sema);
11522 }
11523#else
11524 while (hevc->uninit_list) /* wait uninit complete */
11525 msleep(20);
11526#endif
11527 }
11528 cancel_work_sync(&hevc->work);
11529
11530 vfree(hevc->fw);
11531 hevc->fw = NULL;
11532
11533 dump_log(hevc);
11534 return 0;
11535}
11536
11537static unsigned char get_data_check_sum
11538 (struct hevc_state_s *hevc, int size)
11539{
11540 int jj;
11541 int sum = 0;
11542 u8 *data = NULL;
11543
11544 if (!hevc->chunk->block->is_mapped)
11545 data = codec_mm_vmap(hevc->chunk->block->start +
11546 hevc->chunk->offset, size);
11547 else
11548 data = ((u8 *)hevc->chunk->block->start_virt) +
11549 hevc->chunk->offset;
11550
11551 for (jj = 0; jj < size; jj++)
11552 sum += data[jj];
11553
11554 if (!hevc->chunk->block->is_mapped)
11555 codec_mm_unmap_phyaddr(data);
11556 return sum;
11557}
11558
11559static void vh265_notify_work(struct work_struct *work)
11560{
11561 struct hevc_state_s *hevc =
11562 container_of(work,
11563 struct hevc_state_s,
11564 notify_work);
11565 struct vdec_s *vdec = hw_to_vdec(hevc);
11566#ifdef MULTI_INSTANCE_SUPPORT
11567 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11568 vf_notify_receiver(hevc->provider_name,
11569 VFRAME_EVENT_PROVIDER_FR_HINT,
11570 (void *)
11571 ((unsigned long)hevc->frame_dur));
11572 vdec->fr_hint_state = VDEC_HINTED;
11573 } else if (fr_hint_status == VDEC_NEED_HINT) {
11574 vf_notify_receiver(hevc->provider_name,
11575 VFRAME_EVENT_PROVIDER_FR_HINT,
11576 (void *)
11577 ((unsigned long)hevc->frame_dur));
11578 fr_hint_status = VDEC_HINTED;
11579 }
11580#else
11581 if (fr_hint_status == VDEC_NEED_HINT)
11582 vf_notify_receiver(PROVIDER_NAME,
11583 VFRAME_EVENT_PROVIDER_FR_HINT,
11584 (void *)
11585 ((unsigned long)hevc->frame_dur));
11586 fr_hint_status = VDEC_HINTED;
11587 }
11588#endif
11589
11590 return;
11591}
11592
11593static void vh265_work_implement(struct hevc_state_s *hevc,
11594 struct vdec_s *vdec,int from)
11595{
11596 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11597 /*USE_BUF_BLOCK*/
11598 uninit_pic_list(hevc);
11599 hevc_print(hevc, 0, "uninit list\n");
11600 hevc->uninit_list = 0;
11601#ifdef USE_UNINIT_SEMA
11602 up(&hevc->h265_uninit_done_sema);
11603#endif
11604 return;
11605 }
11606
11607 /* finished decoding one frame or error,
11608 * notify vdec core to switch context
11609 */
11610 if (hevc->pic_list_init_flag == 1
11611 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11612 hevc->pic_list_init_flag = 2;
11613 init_pic_list(hevc);
11614 init_pic_list_hw(hevc);
11615 init_buf_spec(hevc);
11616 hevc_print(hevc, 0,
11617 "set pic_list_init_flag to 2\n");
11618
11619 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11620 return;
11621 }
11622
11623 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11624 "%s dec_result %d %x %x %x\n",
11625 __func__,
11626 hevc->dec_result,
11627 READ_VREG(HEVC_STREAM_LEVEL),
11628 READ_VREG(HEVC_STREAM_WR_PTR),
11629 READ_VREG(HEVC_STREAM_RD_PTR));
11630
11631 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11632 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11633 && (hw_to_vdec(hevc)->next_status !=
11634 VDEC_STATUS_DISCONNECTED)) {
11635 if (!vdec_has_more_input(vdec)) {
11636 hevc->dec_result = DEC_RESULT_EOS;
11637 vdec_schedule_work(&hevc->work);
11638 return;
11639 }
11640 if (!input_frame_based(vdec)) {
11641 int r = vdec_sync_input(vdec);
11642 if (r >= 0x200) {
11643 WRITE_VREG(HEVC_DECODE_SIZE,
11644 READ_VREG(HEVC_DECODE_SIZE) + r);
11645
11646 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11647 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11648 __func__,
11649 READ_VREG(HEVC_STREAM_LEVEL),
11650 READ_VREG(HEVC_STREAM_WR_PTR),
11651 READ_VREG(HEVC_STREAM_RD_PTR),
11652 READ_VREG(HEVC_MPC_E), r);
11653
11654 start_process_time(hevc);
11655 if (READ_VREG(HEVC_DEC_STATUS_REG)
11656 == HEVC_DECODE_BUFEMPTY2)
11657 WRITE_VREG(HEVC_DEC_STATUS_REG,
11658 HEVC_ACTION_DONE);
11659 else
11660 WRITE_VREG(HEVC_DEC_STATUS_REG,
11661 HEVC_ACTION_DEC_CONT);
11662 } else {
11663 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11664 vdec_schedule_work(&hevc->work);
11665 }
11666 return;
11667 }
11668
11669 /*below for frame_base*/
11670 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11671 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11672 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11673 __func__,
11674 READ_VREG(HEVC_STREAM_LEVEL),
11675 READ_VREG(HEVC_STREAM_WR_PTR),
11676 READ_VREG(HEVC_STREAM_RD_PTR),
11677 READ_VREG(HEVC_MPC_E));
11678 mutex_lock(&hevc->chunks_mutex);
11679 vdec_vframe_dirty(vdec, hevc->chunk);
11680 hevc->chunk = NULL;
11681 mutex_unlock(&hevc->chunks_mutex);
11682 vdec_clean_input(vdec);
11683 }
11684
11685 /*if (is_new_pic_available(hevc)) {*/
11686 if (run_ready(vdec, VDEC_HEVC)) {
11687 int r;
11688 int decode_size;
11689 r = vdec_prepare_input(vdec, &hevc->chunk);
11690 if (r < 0) {
11691 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11692
11693 hevc_print(hevc,
11694 PRINT_FLAG_VDEC_DETAIL,
11695 "amvdec_vh265: Insufficient data\n");
11696
11697 vdec_schedule_work(&hevc->work);
11698 return;
11699 }
11700 hevc->dec_result = DEC_RESULT_NONE;
11701 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11702 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11703 __func__, r,
11704 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11705 get_data_check_sum(hevc, r) : 0,
11706 READ_VREG(HEVC_MPC_E));
11707
11708 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11709 int jj;
11710 u8 *data = NULL;
11711
11712 if (!hevc->chunk->block->is_mapped)
11713 data = codec_mm_vmap(
11714 hevc->chunk->block->start +
11715 hevc->chunk->offset, r);
11716 else
11717 data = ((u8 *)
11718 hevc->chunk->block->start_virt)
11719 + hevc->chunk->offset;
11720
11721 for (jj = 0; jj < r; jj++) {
11722 if ((jj & 0xf) == 0)
11723 hevc_print(hevc,
11724 PRINT_FRAMEBASE_DATA,
11725 "%06x:", jj);
11726 hevc_print_cont(hevc,
11727 PRINT_FRAMEBASE_DATA,
11728 "%02x ", data[jj]);
11729 if (((jj + 1) & 0xf) == 0)
11730 hevc_print_cont(hevc,
11731 PRINT_FRAMEBASE_DATA,
11732 "\n");
11733 }
11734
11735 if (!hevc->chunk->block->is_mapped)
11736 codec_mm_unmap_phyaddr(data);
11737 }
11738
11739 decode_size = hevc->chunk->size +
11740 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11741 WRITE_VREG(HEVC_DECODE_SIZE,
11742 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11743
11744 vdec_enable_input(vdec);
11745
11746 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11747 "%s: mpc %x\n",
11748 __func__, READ_VREG(HEVC_MPC_E));
11749
11750 start_process_time(hevc);
11751 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11752 } else{
11753 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11754
11755 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11756 * "amvdec_vh265: Insufficient data\n");
11757 */
11758
11759 vdec_schedule_work(&hevc->work);
11760 }
11761 return;
11762 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11763 /* if (!hevc->ctx_valid)
11764 hevc->ctx_valid = 1; */
11765 decode_frame_count[hevc->index]++;
11766#ifdef DETREFILL_ENABLE
11767 if (hevc->is_swap &&
11768 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11769 if (hevc->delrefill_check == 2) {
11770 delrefill(hevc);
11771 amhevc_stop();
11772 }
11773 }
11774#endif
11775 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11776 hevc->used_4k_num =
11777 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11778 if (hevc->used_4k_num >= 0 &&
11779 hevc->cur_pic &&
11780 hevc->cur_pic->scatter_alloc
11781 == 1) {
11782 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11783 "%s pic index %d scatter_alloc %d page_start %d\n",
11784 "decoder_mmu_box_free_idx_tail",
11785 hevc->cur_pic->index,
11786 hevc->cur_pic->scatter_alloc,
11787 hevc->used_4k_num);
11788 if (hevc->m_ins_flag)
11789 hevc_mmu_dma_check(hw_to_vdec(hevc));
11790 decoder_mmu_box_free_idx_tail(
11791 hevc->mmu_box,
11792 hevc->cur_pic->index,
11793 hevc->used_4k_num);
11794 hevc->cur_pic->scatter_alloc = 2;
11795 }
11796 }
11797 hevc->pic_decoded_lcu_idx =
11798 READ_VREG(HEVC_PARSER_LCU_START)
11799 & 0xffffff;
11800
11801 if (vdec->master == NULL && vdec->slave == NULL &&
11802 hevc->empty_flag == 0) {
11803 hevc->over_decode =
11804 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11805 if (hevc->over_decode)
11806 hevc_print(hevc, 0,
11807 "!!!Over decode\n");
11808 }
11809
11810 if (is_log_enable(hevc))
11811 add_log(hevc,
11812 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11813 __func__,
11814 hevc->dec_result,
11815 hevc->pic_decoded_lcu_idx,
11816 hevc->used_4k_num,
11817 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11818 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11819 hevc->start_shift_bytes
11820 );
11821
11822 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11823 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11824 __func__,
11825 hevc->dec_result,
11826 READ_VREG(HEVC_STREAM_LEVEL),
11827 READ_VREG(HEVC_STREAM_WR_PTR),
11828 READ_VREG(HEVC_STREAM_RD_PTR),
11829 hevc->pic_decoded_lcu_idx,
11830 hevc->used_4k_num,
11831 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11832 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11833 hevc->start_shift_bytes
11834 );
11835
11836 hevc->used_4k_num = -1;
11837
11838 check_pic_decoded_error(hevc,
11839 hevc->pic_decoded_lcu_idx);
11840#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11841#if 1
11842 if (vdec->slave) {
11843 if (dv_debug & 0x1)
11844 vdec_set_flag(vdec->slave,
11845 VDEC_FLAG_SELF_INPUT_CONTEXT);
11846 else
11847 vdec_set_flag(vdec->slave,
11848 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11849 }
11850#else
11851 if (vdec->slave) {
11852 if (no_interleaved_el_slice)
11853 vdec_set_flag(vdec->slave,
11854 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11855 /* this will move real HW pointer for input */
11856 else
11857 vdec_set_flag(vdec->slave, 0);
11858 /* this will not move real HW pointer
11859 *and SL layer decoding
11860 *will start from same stream position
11861 *as current BL decoder
11862 */
11863 }
11864#endif
11865#endif
11866#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11867 hevc->shift_byte_count_lo
11868 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11869 if (vdec->slave) {
11870 /*cur is base, found enhance*/
11871 struct hevc_state_s *hevc_el =
11872 (struct hevc_state_s *)
11873 vdec->slave->private;
11874 if (hevc_el)
11875 hevc_el->shift_byte_count_lo =
11876 hevc->shift_byte_count_lo;
11877 } else if (vdec->master) {
11878 /*cur is enhance, found base*/
11879 struct hevc_state_s *hevc_ba =
11880 (struct hevc_state_s *)
11881 vdec->master->private;
11882 if (hevc_ba)
11883 hevc_ba->shift_byte_count_lo =
11884 hevc->shift_byte_count_lo;
11885 }
11886#endif
11887 mutex_lock(&hevc->chunks_mutex);
11888 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11889 hevc->chunk = NULL;
11890 mutex_unlock(&hevc->chunks_mutex);
11891 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11892 /*
11893 stream base: stream buf empty or timeout
11894 frame base: vdec_prepare_input fail
11895 */
11896 if (!vdec_has_more_input(vdec)) {
11897 hevc->dec_result = DEC_RESULT_EOS;
11898 vdec_schedule_work(&hevc->work);
11899 return;
11900 }
11901#ifdef AGAIN_HAS_THRESHOLD
11902 hevc->next_again_flag = 1;
11903#endif
11904 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11905 struct PIC_s *pic;
11906 hevc->eos = 1;
11907#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11908 if ((vdec->master || vdec->slave) &&
11909 aux_data_is_avaible(hevc))
11910 dolby_get_meta(hevc);
11911#endif
11912 check_pic_decoded_error(hevc,
11913 hevc->pic_decoded_lcu_idx);
11914 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11915 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11916 "%s: end of stream, last dec poc %d => 0x%pf\n",
11917 __func__, hevc->curr_POC, pic);
11918 flush_output(hevc, pic);
11919
11920 if (hevc->is_used_v4l)
11921 notify_v4l_eos(hw_to_vdec(hevc));
11922#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11923 hevc->shift_byte_count_lo
11924 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11925 if (vdec->slave) {
11926 /*cur is base, found enhance*/
11927 struct hevc_state_s *hevc_el =
11928 (struct hevc_state_s *)
11929 vdec->slave->private;
11930 if (hevc_el)
11931 hevc_el->shift_byte_count_lo =
11932 hevc->shift_byte_count_lo;
11933 } else if (vdec->master) {
11934 /*cur is enhance, found base*/
11935 struct hevc_state_s *hevc_ba =
11936 (struct hevc_state_s *)
11937 vdec->master->private;
11938 if (hevc_ba)
11939 hevc_ba->shift_byte_count_lo =
11940 hevc->shift_byte_count_lo;
11941 }
11942#endif
11943 mutex_lock(&hevc->chunks_mutex);
11944 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11945 hevc->chunk = NULL;
11946 mutex_unlock(&hevc->chunks_mutex);
11947 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11948 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11949 "%s: force exit\n",
11950 __func__);
11951 if (hevc->stat & STAT_VDEC_RUN) {
11952 amhevc_stop();
11953 hevc->stat &= ~STAT_VDEC_RUN;
11954 }
11955 if (hevc->stat & STAT_ISR_REG) {
11956 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11957 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11958 hevc->stat &= ~STAT_ISR_REG;
11959 }
11960 hevc_print(hevc, 0, "%s: force exit end\n",
11961 __func__);
11962 }
11963
11964 if (hevc->stat & STAT_VDEC_RUN) {
11965 amhevc_stop();
11966 hevc->stat &= ~STAT_VDEC_RUN;
11967 }
11968
11969 if (hevc->stat & STAT_TIMER_ARM) {
11970 del_timer_sync(&hevc->timer);
11971 hevc->stat &= ~STAT_TIMER_ARM;
11972 }
11973
11974 wait_hevc_search_done(hevc);
11975#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11976 if (hevc->switch_dvlayer_flag) {
11977 if (vdec->slave)
11978 vdec_set_next_sched(vdec, vdec->slave);
11979 else if (vdec->master)
11980 vdec_set_next_sched(vdec, vdec->master);
11981 } else if (vdec->slave || vdec->master)
11982 vdec_set_next_sched(vdec, vdec);
11983#endif
11984
11985 if (from == 1) {
11986 /* This is a timeout work */
11987 if (work_pending(&hevc->work)) {
11988 /*
11989 * The vh265_work arrives at the last second,
11990 * give it a chance to handle the scenario.
11991 */
11992 return;
11993 //cancel_work_sync(&hevc->work);//reserved for future considraion
11994 }
11995 }
11996
11997 /* mark itself has all HW resource released and input released */
11998 if (vdec->parallel_dec == 1)
11999 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12000 else
12001 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12002
12003 if (hevc->is_used_v4l) {
12004 struct aml_vcodec_ctx *ctx =
12005 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12006
12007 if (ctx->param_sets_from_ucode &&
12008 !hevc->v4l_params_parsed)
12009 vdec_v4l_write_frame_sync(ctx);
12010 }
12011
12012 if (hevc->vdec_cb)
12013 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12014}
12015
12016static void vh265_work(struct work_struct *work)
12017{
12018 struct hevc_state_s *hevc = container_of(work,
12019 struct hevc_state_s, work);
12020 struct vdec_s *vdec = hw_to_vdec(hevc);
12021
12022 vh265_work_implement(hevc, vdec, 0);
12023}
12024
12025static void vh265_timeout_work(struct work_struct *work)
12026{
12027 struct hevc_state_s *hevc = container_of(work,
12028 struct hevc_state_s, timeout_work);
12029 struct vdec_s *vdec = hw_to_vdec(hevc);
12030
12031 if (work_pending(&hevc->work))
12032 return;
12033 vh265_work_implement(hevc, vdec, 1);
12034}
12035
12036
12037static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12038{
12039 /* new to do ... */
12040 vh265_prot_init(hevc);
12041 return 0;
12042}
12043static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12044{
12045 struct hevc_state_s *hevc =
12046 (struct hevc_state_s *)vdec->private;
12047 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12048 CODEC_MM_FLAGS_TVP : 0;
12049 bool ret = 0;
12050 if (step == 0x12)
12051 return 0;
12052 else if (step == 0x11)
12053 step = 0x12;
12054
12055 if (hevc->eos)
12056 return 0;
12057 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12058 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12059 hevc->first_sc_checked =1;
12060 hevc_print(hevc, 0,
12061 "vh265 cached=%d need_size=%d speed= %d ms\n",
12062 size, (hevc->need_cache_size >> PAGE_SHIFT),
12063 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12064 }
12065 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12066 && pre_decode_buf_level != 0) {
12067 u32 rp, wp, level;
12068
12069 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12070 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12071 if (wp < rp)
12072 level = vdec->input.size + wp - rp;
12073 else
12074 level = wp - rp;
12075
12076 if (level < pre_decode_buf_level)
12077 return 0;
12078 }
12079
12080#ifdef AGAIN_HAS_THRESHOLD
12081 if (hevc->next_again_flag &&
12082 (!vdec_frame_based(vdec))) {
12083 u32 parser_wr_ptr =
12084 READ_PARSER_REG(PARSER_VIDEO_WP);
12085 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12086 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12087 again_threshold) {
12088 int r = vdec_sync_input(vdec);
12089 hevc_print(hevc,
12090 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12091 return 0;
12092 }
12093 }
12094#endif
12095
12096 if (disp_vframe_valve_level &&
12097 kfifo_len(&hevc->display_q) >=
12098 disp_vframe_valve_level) {
12099 hevc->valve_count--;
12100 if (hevc->valve_count <= 0)
12101 hevc->valve_count = 2;
12102 else
12103 return 0;
12104 }
12105
12106 ret = is_new_pic_available(hevc);
12107 if (!ret) {
12108 hevc_print(hevc,
12109 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12110 __func__, ret);
12111 }
12112
12113#ifdef CONSTRAIN_MAX_BUF_NUM
12114 if (hevc->pic_list_init_flag == 3) {
12115 if (run_ready_max_vf_only_num > 0 &&
12116 get_vf_ref_only_buf_count(hevc) >=
12117 run_ready_max_vf_only_num
12118 )
12119 ret = 0;
12120 if (run_ready_display_q_num > 0 &&
12121 kfifo_len(&hevc->display_q) >=
12122 run_ready_display_q_num)
12123 ret = 0;
12124
12125 /*avoid more buffers consumed when
12126 switching resolution*/
12127 if (run_ready_max_buf_num == 0xff &&
12128 get_used_buf_count(hevc) >=
12129 get_work_pic_num(hevc))
12130 ret = 0;
12131 else if (run_ready_max_buf_num &&
12132 get_used_buf_count(hevc) >=
12133 run_ready_max_buf_num)
12134 ret = 0;
12135 }
12136#endif
12137
12138 if (hevc->is_used_v4l) {
12139 struct aml_vcodec_ctx *ctx =
12140 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12141
12142 if (ctx->param_sets_from_ucode &&
12143 !ctx->v4l_codec_ready &&
12144 hevc->v4l_params_parsed) {
12145 ret = 0; /*the params has parsed.*/
12146 } else if (!ctx->v4l_codec_dpb_ready)
12147 ret = 0;
12148 }
12149
12150 if (ret)
12151 not_run_ready[hevc->index] = 0;
12152 else
12153 not_run_ready[hevc->index]++;
12154 if (vdec->parallel_dec == 1)
12155 return ret ? (CORE_MASK_HEVC) : 0;
12156 else
12157 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12158}
12159
12160static void run(struct vdec_s *vdec, unsigned long mask,
12161 void (*callback)(struct vdec_s *, void *), void *arg)
12162{
12163 struct hevc_state_s *hevc =
12164 (struct hevc_state_s *)vdec->private;
12165 int r, loadr = 0;
12166 unsigned char check_sum = 0;
12167
12168 run_count[hevc->index]++;
12169 hevc->vdec_cb_arg = arg;
12170 hevc->vdec_cb = callback;
12171 hevc->aux_data_dirty = 1;
12172 hevc_reset_core(vdec);
12173
12174#ifdef AGAIN_HAS_THRESHOLD
12175 hevc->pre_parser_wr_ptr =
12176 READ_PARSER_REG(PARSER_VIDEO_WP);
12177 hevc->next_again_flag = 0;
12178#endif
12179 r = vdec_prepare_input(vdec, &hevc->chunk);
12180 if (r < 0) {
12181 input_empty[hevc->index]++;
12182 hevc->dec_result = DEC_RESULT_AGAIN;
12183 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12184 "ammvdec_vh265: Insufficient data\n");
12185
12186 vdec_schedule_work(&hevc->work);
12187 return;
12188 }
12189 input_empty[hevc->index] = 0;
12190 hevc->dec_result = DEC_RESULT_NONE;
12191 if (vdec_frame_based(vdec) &&
12192 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12193 || is_log_enable(hevc)))
12194 check_sum = get_data_check_sum(hevc, r);
12195
12196 if (is_log_enable(hevc))
12197 add_log(hevc,
12198 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12199 __func__, r,
12200 check_sum,
12201 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12202 );
12203 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12204 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12205 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12206 __func__, r,
12207 check_sum,
12208 READ_VREG(HEVC_STREAM_LEVEL),
12209 READ_VREG(HEVC_STREAM_WR_PTR),
12210 READ_VREG(HEVC_STREAM_RD_PTR),
12211 READ_PARSER_REG(PARSER_VIDEO_RP),
12212 READ_PARSER_REG(PARSER_VIDEO_WP),
12213 hevc->start_shift_bytes
12214 );
12215 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12216 input_frame_based(vdec)) {
12217 int jj;
12218 u8 *data = NULL;
12219
12220 if (!hevc->chunk->block->is_mapped)
12221 data = codec_mm_vmap(hevc->chunk->block->start +
12222 hevc->chunk->offset, r);
12223 else
12224 data = ((u8 *)hevc->chunk->block->start_virt)
12225 + hevc->chunk->offset;
12226
12227 for (jj = 0; jj < r; jj++) {
12228 if ((jj & 0xf) == 0)
12229 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12230 "%06x:", jj);
12231 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12232 "%02x ", data[jj]);
12233 if (((jj + 1) & 0xf) == 0)
12234 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12235 "\n");
12236 }
12237
12238 if (!hevc->chunk->block->is_mapped)
12239 codec_mm_unmap_phyaddr(data);
12240 }
12241 if (vdec->mc_loaded) {
12242 /*firmware have load before,
12243 and not changes to another.
12244 ignore reload.
12245 */
12246 if (tee_enabled() && hevc->is_swap &&
12247 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12248 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12249 } else {
12250 if (hevc->mmu_enable)
12251 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12252 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12253 "h265_mmu", hevc->fw->data);
12254 else {
12255 if (!hevc->is_4k) {
12256 /* if an older version of the fw was loaded, */
12257 /* needs try to load noswap fw because the */
12258 /* old fw package dose not contain the swap fw.*/
12259 loadr = amhevc_vdec_loadmc_ex(
12260 VFORMAT_HEVC, vdec,
12261 "hevc_mmu_swap",
12262 hevc->fw->data);
12263 if (loadr < 0)
12264 loadr = amhevc_vdec_loadmc_ex(
12265 VFORMAT_HEVC, vdec,
12266 "h265_mmu",
12267 hevc->fw->data);
12268 else
12269 hevc->is_swap = true;
12270 } else
12271 loadr = amhevc_vdec_loadmc_ex(
12272 VFORMAT_HEVC, vdec,
12273 "h265_mmu", hevc->fw->data);
12274 }
12275 else
12276 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12277 NULL, hevc->fw->data);
12278 if (loadr < 0) {
12279 amhevc_disable();
12280 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12281 tee_enabled() ? "TEE" : "local", loadr);
12282 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12283 vdec_schedule_work(&hevc->work);
12284 return;
12285 }
12286
12287 if (tee_enabled() && hevc->is_swap &&
12288 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12289 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12290#ifdef DETREFILL_ENABLE
12291 if (hevc->is_swap &&
12292 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12293 init_detrefill_buf(hevc);
12294#endif
12295 vdec->mc_loaded = 1;
12296 vdec->mc_type = VFORMAT_HEVC;
12297 }
12298 if (vh265_hw_ctx_restore(hevc) < 0) {
12299 vdec_schedule_work(&hevc->work);
12300 return;
12301 }
12302 vdec_enable_input(vdec);
12303
12304 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12305
12306 if (vdec_frame_based(vdec)) {
12307 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12308 r = hevc->chunk->size +
12309 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12310 hevc->decode_size = r;
12311 }
12312#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12313 else {
12314 if (vdec->master || vdec->slave)
12315 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12316 hevc->shift_byte_count_lo);
12317 }
12318#endif
12319 WRITE_VREG(HEVC_DECODE_SIZE, r);
12320 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12321 hevc->init_flag = 1;
12322
12323 if (hevc->pic_list_init_flag == 3)
12324 init_pic_list_hw(hevc);
12325
12326 backup_decode_state(hevc);
12327
12328 start_process_time(hevc);
12329 mod_timer(&hevc->timer, jiffies);
12330 hevc->stat |= STAT_TIMER_ARM;
12331 hevc->stat |= STAT_ISR_REG;
12332 amhevc_start();
12333 hevc->stat |= STAT_VDEC_RUN;
12334}
12335
12336static void aml_free_canvas(struct vdec_s *vdec)
12337{
12338 int i;
12339 struct hevc_state_s *hevc =
12340 (struct hevc_state_s *)vdec->private;
12341
12342 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12343 struct PIC_s *pic = hevc->m_PIC[i];
12344
12345 if (pic) {
12346 if (vdec->parallel_dec == 1) {
12347 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12348 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12349 }
12350 }
12351 }
12352}
12353
12354static void reset(struct vdec_s *vdec)
12355{
12356
12357 struct hevc_state_s *hevc =
12358 (struct hevc_state_s *)vdec->private;
12359 int i;
12360
12361 cancel_work_sync(&hevc->work);
12362 cancel_work_sync(&hevc->notify_work);
12363 if (hevc->stat & STAT_VDEC_RUN) {
12364 amhevc_stop();
12365 hevc->stat &= ~STAT_VDEC_RUN;
12366 }
12367
12368 if (hevc->stat & STAT_TIMER_ARM) {
12369 del_timer_sync(&hevc->timer);
12370 hevc->stat &= ~STAT_TIMER_ARM;
12371 }
12372 hevc->dec_result = DEC_RESULT_NONE;
12373 reset_process_time(hevc);
12374 hevc->init_flag = 0;
12375 hevc->pic_list_init_flag = 0;
12376 dealloc_mv_bufs(hevc);
12377 aml_free_canvas(vdec);
12378 hevc_local_uninit(hevc);
12379 if (vh265_local_init(hevc) < 0)
12380 pr_debug(" %s local init fail\n", __func__);
12381 for (i = 0; i < BUF_POOL_SIZE; i++) {
12382 hevc->m_BUF[i].start_adr = 0;
12383 }
12384
12385 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12386}
12387
12388static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12389{
12390 struct hevc_state_s *hevc =
12391 (struct hevc_state_s *)vdec->private;
12392
12393 return vh265_isr(0, hevc);
12394}
12395
12396static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12397{
12398 struct hevc_state_s *hevc =
12399 (struct hevc_state_s *)vdec->private;
12400
12401 return vh265_isr_thread_fn(0, hevc);
12402}
12403#endif
12404
12405static int amvdec_h265_probe(struct platform_device *pdev)
12406{
12407#ifdef MULTI_INSTANCE_SUPPORT
12408 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12409#else
12410 struct vdec_dev_reg_s *pdata =
12411 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12412#endif
12413 char *tmpbuf;
12414 int ret;
12415 struct hevc_state_s *hevc;
12416
12417 hevc = vmalloc(sizeof(struct hevc_state_s));
12418 if (hevc == NULL) {
12419 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12420 return -ENOMEM;
12421 }
12422 gHevc = hevc;
12423 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12424 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12425 H265_DEBUG_DIS_SYS_ERROR_PROC));
12426 memset(hevc, 0, sizeof(struct hevc_state_s));
12427 if (get_dbg_flag(hevc))
12428 hevc_print(hevc, 0, "%s\r\n", __func__);
12429 mutex_lock(&vh265_mutex);
12430
12431 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12432 (parser_sei_enable & 0x100) == 0)
12433 parser_sei_enable = 7; /*old 1*/
12434 hevc->m_ins_flag = 0;
12435 hevc->init_flag = 0;
12436 hevc->first_sc_checked = 0;
12437 hevc->uninit_list = 0;
12438 hevc->fatal_error = 0;
12439 hevc->show_frame_num = 0;
12440 hevc->frameinfo_enable = 1;
12441#ifdef MULTI_INSTANCE_SUPPORT
12442 hevc->platform_dev = pdev;
12443 platform_set_drvdata(pdev, pdata);
12444#endif
12445
12446 if (pdata == NULL) {
12447 hevc_print(hevc, 0,
12448 "\namvdec_h265 memory resource undefined.\n");
12449 vfree(hevc);
12450 mutex_unlock(&vh265_mutex);
12451 return -EFAULT;
12452 }
12453 if (mmu_enable_force == 0) {
12454 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12455 || double_write_mode == 0x10)
12456 hevc->mmu_enable = 0;
12457 else
12458 hevc->mmu_enable = 1;
12459 }
12460 if (init_mmu_buffers(hevc)) {
12461 hevc_print(hevc, 0,
12462 "\n 265 mmu init failed!\n");
12463 vfree(hevc);
12464 mutex_unlock(&vh265_mutex);
12465 return -EFAULT;
12466 }
12467
12468 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12469 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12470 if (ret < 0) {
12471 uninit_mmu_buffers(hevc);
12472 vfree(hevc);
12473 mutex_unlock(&vh265_mutex);
12474 return ret;
12475 }
12476 hevc->buf_size = work_buf_size;
12477
12478
12479 if (!vdec_secure(pdata)) {
12480 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12481 if (tmpbuf) {
12482 memset(tmpbuf, 0, work_buf_size);
12483 dma_sync_single_for_device(amports_get_dma_device(),
12484 hevc->buf_start,
12485 work_buf_size, DMA_TO_DEVICE);
12486 } else {
12487 tmpbuf = codec_mm_vmap(hevc->buf_start,
12488 work_buf_size);
12489 if (tmpbuf) {
12490 memset(tmpbuf, 0, work_buf_size);
12491 dma_sync_single_for_device(
12492 amports_get_dma_device(),
12493 hevc->buf_start,
12494 work_buf_size,
12495 DMA_TO_DEVICE);
12496 codec_mm_unmap_phyaddr(tmpbuf);
12497 }
12498 }
12499 }
12500
12501 if (get_dbg_flag(hevc)) {
12502 hevc_print(hevc, 0,
12503 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12504 hevc->buf_start, hevc->buf_size);
12505 }
12506
12507 if (pdata->sys_info)
12508 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12509 else {
12510 hevc->vh265_amstream_dec_info.width = 0;
12511 hevc->vh265_amstream_dec_info.height = 0;
12512 hevc->vh265_amstream_dec_info.rate = 30;
12513 }
12514#ifndef MULTI_INSTANCE_SUPPORT
12515 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12516 workaround_enable |= 3;
12517 hevc_print(hevc, 0,
12518 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12519 } else
12520 workaround_enable &= ~3;
12521#endif
12522 hevc->cma_dev = pdata->cma_dev;
12523 vh265_vdec_info_init();
12524
12525#ifdef MULTI_INSTANCE_SUPPORT
12526 pdata->private = hevc;
12527 pdata->dec_status = vh265_dec_status;
12528 pdata->set_isreset = vh265_set_isreset;
12529 is_reset = 0;
12530 if (vh265_init(pdata) < 0) {
12531#else
12532 if (vh265_init(hevc) < 0) {
12533#endif
12534 hevc_print(hevc, 0,
12535 "\namvdec_h265 init failed.\n");
12536 hevc_local_uninit(hevc);
12537 uninit_mmu_buffers(hevc);
12538 vfree(hevc);
12539 pdata->dec_status = NULL;
12540 mutex_unlock(&vh265_mutex);
12541 return -ENODEV;
12542 }
12543 /*set the max clk for smooth playing...*/
12544 hevc_source_changed(VFORMAT_HEVC,
12545 3840, 2160, 60);
12546 mutex_unlock(&vh265_mutex);
12547
12548 return 0;
12549}
12550
12551static int amvdec_h265_remove(struct platform_device *pdev)
12552{
12553 struct hevc_state_s *hevc = gHevc;
12554
12555 if (get_dbg_flag(hevc))
12556 hevc_print(hevc, 0, "%s\r\n", __func__);
12557
12558 mutex_lock(&vh265_mutex);
12559
12560 vh265_stop(hevc);
12561
12562 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12563
12564
12565#ifdef DEBUG_PTS
12566 hevc_print(hevc, 0,
12567 "pts missed %ld, pts hit %ld, duration %d\n",
12568 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12569#endif
12570
12571 vfree(hevc);
12572 hevc = NULL;
12573 gHevc = NULL;
12574
12575 mutex_unlock(&vh265_mutex);
12576
12577 return 0;
12578}
12579/****************************************/
12580#ifdef CONFIG_PM
12581static int h265_suspend(struct device *dev)
12582{
12583 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12584 return 0;
12585}
12586
12587static int h265_resume(struct device *dev)
12588{
12589 amhevc_resume(to_platform_device(dev));
12590 return 0;
12591}
12592
12593static const struct dev_pm_ops h265_pm_ops = {
12594 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12595};
12596#endif
12597
12598static struct platform_driver amvdec_h265_driver = {
12599 .probe = amvdec_h265_probe,
12600 .remove = amvdec_h265_remove,
12601 .driver = {
12602 .name = DRIVER_NAME,
12603#ifdef CONFIG_PM
12604 .pm = &h265_pm_ops,
12605#endif
12606 }
12607};
12608
12609#ifdef MULTI_INSTANCE_SUPPORT
12610static void vh265_dump_state(struct vdec_s *vdec)
12611{
12612 int i;
12613 struct hevc_state_s *hevc =
12614 (struct hevc_state_s *)vdec->private;
12615 hevc_print(hevc, 0,
12616 "====== %s\n", __func__);
12617
12618 hevc_print(hevc, 0,
12619 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12620 hevc->frame_width,
12621 hevc->frame_height,
12622 hevc->sps_num_reorder_pics_0,
12623 get_work_pic_num(hevc),
12624 hevc->video_signal_type_debug,
12625 hevc->is_swap
12626 );
12627
12628 hevc_print(hevc, 0,
12629 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12630 input_frame_based(vdec),
12631 hevc->eos,
12632 hevc->dec_result,
12633 decode_frame_count[hevc->index],
12634 display_frame_count[hevc->index],
12635 run_count[hevc->index],
12636 not_run_ready[hevc->index],
12637 input_empty[hevc->index]
12638 );
12639
12640 if (vf_get_receiver(vdec->vf_provider_name)) {
12641 enum receviver_start_e state =
12642 vf_notify_receiver(vdec->vf_provider_name,
12643 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12644 NULL);
12645 hevc_print(hevc, 0,
12646 "\nreceiver(%s) state %d\n",
12647 vdec->vf_provider_name,
12648 state);
12649 }
12650
12651 hevc_print(hevc, 0,
12652 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12653 __func__,
12654 kfifo_len(&hevc->newframe_q),
12655 VF_POOL_SIZE,
12656 kfifo_len(&hevc->display_q),
12657 VF_POOL_SIZE,
12658 hevc->vf_pre_count,
12659 hevc->vf_get_count,
12660 hevc->vf_put_count,
12661 hevc->pic_list_init_flag,
12662 is_new_pic_available(hevc)
12663 );
12664
12665 dump_pic_list(hevc);
12666
12667 for (i = 0; i < BUF_POOL_SIZE; i++) {
12668 hevc_print(hevc, 0,
12669 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12670 i,
12671 hevc->m_BUF[i].start_adr,
12672 hevc->m_BUF[i].size,
12673 hevc->m_BUF[i].used_flag);
12674 }
12675
12676 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12677 hevc_print(hevc, 0,
12678 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12679 i,
12680 hevc->m_mv_BUF[i].start_adr,
12681 hevc->m_mv_BUF[i].size,
12682 hevc->m_mv_BUF[i].used_flag);
12683 }
12684
12685 hevc_print(hevc, 0,
12686 "HEVC_DEC_STATUS_REG=0x%x\n",
12687 READ_VREG(HEVC_DEC_STATUS_REG));
12688 hevc_print(hevc, 0,
12689 "HEVC_MPC_E=0x%x\n",
12690 READ_VREG(HEVC_MPC_E));
12691 hevc_print(hevc, 0,
12692 "HEVC_DECODE_MODE=0x%x\n",
12693 READ_VREG(HEVC_DECODE_MODE));
12694 hevc_print(hevc, 0,
12695 "HEVC_DECODE_MODE2=0x%x\n",
12696 READ_VREG(HEVC_DECODE_MODE2));
12697 hevc_print(hevc, 0,
12698 "NAL_SEARCH_CTL=0x%x\n",
12699 READ_VREG(NAL_SEARCH_CTL));
12700 hevc_print(hevc, 0,
12701 "HEVC_PARSER_LCU_START=0x%x\n",
12702 READ_VREG(HEVC_PARSER_LCU_START));
12703 hevc_print(hevc, 0,
12704 "HEVC_DECODE_SIZE=0x%x\n",
12705 READ_VREG(HEVC_DECODE_SIZE));
12706 hevc_print(hevc, 0,
12707 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12708 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12709 hevc_print(hevc, 0,
12710 "HEVC_STREAM_START_ADDR=0x%x\n",
12711 READ_VREG(HEVC_STREAM_START_ADDR));
12712 hevc_print(hevc, 0,
12713 "HEVC_STREAM_END_ADDR=0x%x\n",
12714 READ_VREG(HEVC_STREAM_END_ADDR));
12715 hevc_print(hevc, 0,
12716 "HEVC_STREAM_LEVEL=0x%x\n",
12717 READ_VREG(HEVC_STREAM_LEVEL));
12718 hevc_print(hevc, 0,
12719 "HEVC_STREAM_WR_PTR=0x%x\n",
12720 READ_VREG(HEVC_STREAM_WR_PTR));
12721 hevc_print(hevc, 0,
12722 "HEVC_STREAM_RD_PTR=0x%x\n",
12723 READ_VREG(HEVC_STREAM_RD_PTR));
12724 hevc_print(hevc, 0,
12725 "PARSER_VIDEO_RP=0x%x\n",
12726 READ_PARSER_REG(PARSER_VIDEO_RP));
12727 hevc_print(hevc, 0,
12728 "PARSER_VIDEO_WP=0x%x\n",
12729 READ_PARSER_REG(PARSER_VIDEO_WP));
12730
12731 if (input_frame_based(vdec) &&
12732 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12733 ) {
12734 int jj;
12735 if (hevc->chunk && hevc->chunk->block &&
12736 hevc->chunk->size > 0) {
12737 u8 *data = NULL;
12738 if (!hevc->chunk->block->is_mapped)
12739 data = codec_mm_vmap(hevc->chunk->block->start +
12740 hevc->chunk->offset, hevc->chunk->size);
12741 else
12742 data = ((u8 *)hevc->chunk->block->start_virt)
12743 + hevc->chunk->offset;
12744 hevc_print(hevc, 0,
12745 "frame data size 0x%x\n",
12746 hevc->chunk->size);
12747 for (jj = 0; jj < hevc->chunk->size; jj++) {
12748 if ((jj & 0xf) == 0)
12749 hevc_print(hevc,
12750 PRINT_FRAMEBASE_DATA,
12751 "%06x:", jj);
12752 hevc_print_cont(hevc,
12753 PRINT_FRAMEBASE_DATA,
12754 "%02x ", data[jj]);
12755 if (((jj + 1) & 0xf) == 0)
12756 hevc_print_cont(hevc,
12757 PRINT_FRAMEBASE_DATA,
12758 "\n");
12759 }
12760
12761 if (!hevc->chunk->block->is_mapped)
12762 codec_mm_unmap_phyaddr(data);
12763 }
12764 }
12765
12766}
12767
12768
12769static int ammvdec_h265_probe(struct platform_device *pdev)
12770{
12771
12772 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12773 struct hevc_state_s *hevc = NULL;
12774 int ret;
12775#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12776 int config_val;
12777#endif
12778 if (pdata == NULL) {
12779 pr_info("\nammvdec_h265 memory resource undefined.\n");
12780 return -EFAULT;
12781 }
12782
12783 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12784 sizeof(struct hevc_state_s), GFP_KERNEL); */
12785 hevc = vmalloc(sizeof(struct hevc_state_s));
12786 if (hevc == NULL) {
12787 pr_info("\nammvdec_h265 device data allocation failed\n");
12788 return -ENOMEM;
12789 }
12790 memset(hevc, 0, sizeof(struct hevc_state_s));
12791
12792 /* the ctx from v4l2 driver. */
12793 hevc->v4l2_ctx = pdata->private;
12794
12795 pdata->private = hevc;
12796 pdata->dec_status = vh265_dec_status;
12797 /* pdata->set_trickmode = set_trickmode; */
12798 pdata->run_ready = run_ready;
12799 pdata->run = run;
12800 pdata->reset = reset;
12801 pdata->irq_handler = vh265_irq_cb;
12802 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12803 pdata->dump_state = vh265_dump_state;
12804
12805 hevc->index = pdev->id;
12806 hevc->m_ins_flag = 1;
12807
12808 if (pdata->use_vfm_path) {
12809 snprintf(pdata->vf_provider_name,
12810 VDEC_PROVIDER_NAME_SIZE,
12811 VFM_DEC_PROVIDER_NAME);
12812 hevc->frameinfo_enable = 1;
12813 }
12814#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12815 else if (vdec_dual(pdata)) {
12816 struct hevc_state_s *hevc_pair = NULL;
12817
12818 if (dv_toggle_prov_name) /*debug purpose*/
12819 snprintf(pdata->vf_provider_name,
12820 VDEC_PROVIDER_NAME_SIZE,
12821 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12822 VFM_DEC_DVEL_PROVIDER_NAME);
12823 else
12824 snprintf(pdata->vf_provider_name,
12825 VDEC_PROVIDER_NAME_SIZE,
12826 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12827 VFM_DEC_DVBL_PROVIDER_NAME);
12828 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12829 if (pdata->master)
12830 hevc_pair = (struct hevc_state_s *)
12831 pdata->master->private;
12832 else if (pdata->slave)
12833 hevc_pair = (struct hevc_state_s *)
12834 pdata->slave->private;
12835 if (hevc_pair)
12836 hevc->shift_byte_count_lo =
12837 hevc_pair->shift_byte_count_lo;
12838 }
12839#endif
12840 else
12841 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12842 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12843
12844 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12845 &vh265_vf_provider, pdata);
12846
12847 hevc->provider_name = pdata->vf_provider_name;
12848 platform_set_drvdata(pdev, pdata);
12849
12850 hevc->platform_dev = pdev;
12851
12852 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12853 pdata->config && pdata->config_len) {
12854#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12855 /*use ptr config for doubel_write_mode, etc*/
12856 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12857
12858 if (get_config_int(pdata->config, "hevc_double_write_mode",
12859 &config_val) == 0)
12860 hevc->double_write_mode = config_val;
12861 else
12862 hevc->double_write_mode = double_write_mode;
12863
12864 if (get_config_int(pdata->config, "save_buffer_mode",
12865 &config_val) == 0)
12866 hevc->save_buffer_mode = config_val;
12867 else
12868 hevc->save_buffer_mode = 0;
12869
12870 /*use ptr config for max_pic_w, etc*/
12871 if (get_config_int(pdata->config, "hevc_buf_width",
12872 &config_val) == 0) {
12873 hevc->max_pic_w = config_val;
12874 }
12875 if (get_config_int(pdata->config, "hevc_buf_height",
12876 &config_val) == 0) {
12877 hevc->max_pic_h = config_val;
12878 }
12879
12880 if (get_config_int(pdata->config,
12881 "parm_v4l_codec_enable",
12882 &config_val) == 0)
12883 hevc->is_used_v4l = config_val;
12884
12885 if (get_config_int(pdata->config,
12886 "parm_v4l_buffer_margin",
12887 &config_val) == 0)
12888 hevc->dynamic_buf_num_margin = config_val;
12889
12890 if (get_config_int(pdata->config,
12891 "parm_v4l_canvas_mem_mode",
12892 &config_val) == 0)
12893 hevc->mem_map_mode = config_val;
12894#endif
12895 } else {
12896 if (pdata->sys_info)
12897 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12898 else {
12899 hevc->vh265_amstream_dec_info.width = 0;
12900 hevc->vh265_amstream_dec_info.height = 0;
12901 hevc->vh265_amstream_dec_info.rate = 30;
12902 }
12903 hevc->double_write_mode = double_write_mode;
12904 }
12905 if (!hevc->is_used_v4l) {
12906 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12907 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12908 else
12909 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12910
12911 hevc->mem_map_mode = mem_map_mode;
12912 }
12913
12914 if (mmu_enable_force == 0) {
12915 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12916 hevc->mmu_enable = 0;
12917 else
12918 hevc->mmu_enable = 1;
12919 }
12920
12921 if (init_mmu_buffers(hevc) < 0) {
12922 hevc_print(hevc, 0,
12923 "\n 265 mmu init failed!\n");
12924 mutex_unlock(&vh265_mutex);
12925 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12926 if (hevc)
12927 vfree((void *)hevc);
12928 pdata->dec_status = NULL;
12929 return -EFAULT;
12930 }
12931#if 0
12932 hevc->buf_start = pdata->mem_start;
12933 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12934#else
12935
12936 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12937 BMMU_WORKSPACE_ID, work_buf_size,
12938 DRIVER_NAME, &hevc->buf_start);
12939 if (ret < 0) {
12940 uninit_mmu_buffers(hevc);
12941 /* devm_kfree(&pdev->dev, (void *)hevc); */
12942 if (hevc)
12943 vfree((void *)hevc);
12944 pdata->dec_status = NULL;
12945 mutex_unlock(&vh265_mutex);
12946 return ret;
12947 }
12948 hevc->buf_size = work_buf_size;
12949#endif
12950 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12951 (parser_sei_enable & 0x100) == 0)
12952 parser_sei_enable = 7;
12953 hevc->init_flag = 0;
12954 hevc->first_sc_checked = 0;
12955 hevc->uninit_list = 0;
12956 hevc->fatal_error = 0;
12957 hevc->show_frame_num = 0;
12958
12959 /*
12960 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12961 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12962 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12963 */
12964 if (get_dbg_flag(hevc)) {
12965 hevc_print(hevc, 0,
12966 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12967 hevc->buf_start, hevc->buf_size);
12968 }
12969
12970 hevc_print(hevc, 0,
12971 "dynamic_buf_num_margin=%d\n",
12972 hevc->dynamic_buf_num_margin);
12973 hevc_print(hevc, 0,
12974 "double_write_mode=%d\n",
12975 hevc->double_write_mode);
12976
12977 hevc->cma_dev = pdata->cma_dev;
12978
12979 if (vh265_init(pdata) < 0) {
12980 hevc_print(hevc, 0,
12981 "\namvdec_h265 init failed.\n");
12982 hevc_local_uninit(hevc);
12983 uninit_mmu_buffers(hevc);
12984 /* devm_kfree(&pdev->dev, (void *)hevc); */
12985 if (hevc)
12986 vfree((void *)hevc);
12987 pdata->dec_status = NULL;
12988 return -ENODEV;
12989 }
12990
12991 vdec_set_prepare_level(pdata, start_decode_buf_level);
12992
12993 /*set the max clk for smooth playing...*/
12994 hevc_source_changed(VFORMAT_HEVC,
12995 3840, 2160, 60);
12996 if (pdata->parallel_dec == 1)
12997 vdec_core_request(pdata, CORE_MASK_HEVC);
12998 else
12999 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13000 | CORE_MASK_COMBINE);
13001
13002 return 0;
13003}
13004
13005static int ammvdec_h265_remove(struct platform_device *pdev)
13006{
13007 struct hevc_state_s *hevc =
13008 (struct hevc_state_s *)
13009 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13010 struct vdec_s *vdec = hw_to_vdec(hevc);
13011
13012 if (hevc == NULL)
13013 return 0;
13014
13015 if (get_dbg_flag(hevc))
13016 hevc_print(hevc, 0, "%s\r\n", __func__);
13017
13018 vmh265_stop(hevc);
13019
13020 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13021 if (vdec->parallel_dec == 1)
13022 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13023 else
13024 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13025
13026 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13027
13028 vfree((void *)hevc);
13029 return 0;
13030}
13031
13032static struct platform_driver ammvdec_h265_driver = {
13033 .probe = ammvdec_h265_probe,
13034 .remove = ammvdec_h265_remove,
13035 .driver = {
13036 .name = MULTI_DRIVER_NAME,
13037#ifdef CONFIG_PM
13038 .pm = &h265_pm_ops,
13039#endif
13040 }
13041};
13042#endif
13043
13044static struct codec_profile_t amvdec_h265_profile = {
13045 .name = "hevc",
13046 .profile = ""
13047};
13048
13049static struct codec_profile_t amvdec_h265_profile_single,
13050 amvdec_h265_profile_mult;
13051
13052static struct mconfig h265_configs[] = {
13053 MC_PU32("use_cma", &use_cma),
13054 MC_PU32("bit_depth_luma", &bit_depth_luma),
13055 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13056 MC_PU32("video_signal_type", &video_signal_type),
13057#ifdef ERROR_HANDLE_DEBUG
13058 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13059 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13060#endif
13061 MC_PU32("radr", &radr),
13062 MC_PU32("rval", &rval),
13063 MC_PU32("dbg_cmd", &dbg_cmd),
13064 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13065 MC_PU32("endian", &endian),
13066 MC_PU32("step", &step),
13067 MC_PU32("udebug_flag", &udebug_flag),
13068 MC_PU32("decode_pic_begin", &decode_pic_begin),
13069 MC_PU32("slice_parse_begin", &slice_parse_begin),
13070 MC_PU32("nal_skip_policy", &nal_skip_policy),
13071 MC_PU32("i_only_flag", &i_only_flag),
13072 MC_PU32("error_handle_policy", &error_handle_policy),
13073 MC_PU32("error_handle_threshold", &error_handle_threshold),
13074 MC_PU32("error_handle_nal_skip_threshold",
13075 &error_handle_nal_skip_threshold),
13076 MC_PU32("error_handle_system_threshold",
13077 &error_handle_system_threshold),
13078 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13079 MC_PU32("debug", &debug),
13080 MC_PU32("debug_mask", &debug_mask),
13081 MC_PU32("buffer_mode", &buffer_mode),
13082 MC_PU32("double_write_mode", &double_write_mode),
13083 MC_PU32("buf_alloc_width", &buf_alloc_width),
13084 MC_PU32("buf_alloc_height", &buf_alloc_height),
13085 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13086 MC_PU32("max_buf_num", &max_buf_num),
13087 MC_PU32("buf_alloc_size", &buf_alloc_size),
13088 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13089 MC_PU32("mem_map_mode", &mem_map_mode),
13090 MC_PU32("enable_mem_saving", &enable_mem_saving),
13091 MC_PU32("force_w_h", &force_w_h),
13092 MC_PU32("force_fps", &force_fps),
13093 MC_PU32("max_decoding_time", &max_decoding_time),
13094 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13095 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13096 MC_PU32("interlace_enable", &interlace_enable),
13097 MC_PU32("pts_unstable", &pts_unstable),
13098 MC_PU32("parser_sei_enable", &parser_sei_enable),
13099 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13100 MC_PU32("decode_timeout_val", &decode_timeout_val),
13101#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13102 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13103 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13104 MC_PU32("dv_debug", &dv_debug),
13105#endif
13106};
13107static struct mconfig_node decoder_265_node;
13108
13109static int __init amvdec_h265_driver_init_module(void)
13110{
13111 struct BuffInfo_s *p_buf_info;
13112
13113 if (vdec_is_support_4k()) {
13114 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13115 p_buf_info = &amvh265_workbuff_spec[2];
13116 else
13117 p_buf_info = &amvh265_workbuff_spec[1];
13118 } else
13119 p_buf_info = &amvh265_workbuff_spec[0];
13120
13121 init_buff_spec(NULL, p_buf_info);
13122 work_buf_size =
13123 (p_buf_info->end_adr - p_buf_info->start_adr
13124 + 0xffff) & (~0xffff);
13125
13126 pr_debug("amvdec_h265 module init\n");
13127 error_handle_policy = 0;
13128
13129#ifdef ERROR_HANDLE_DEBUG
13130 dbg_nal_skip_flag = 0;
13131 dbg_nal_skip_count = 0;
13132#endif
13133 udebug_flag = 0;
13134 decode_pic_begin = 0;
13135 slice_parse_begin = 0;
13136 step = 0;
13137 buf_alloc_size = 0;
13138
13139#ifdef MULTI_INSTANCE_SUPPORT
13140 if (platform_driver_register(&ammvdec_h265_driver))
13141 pr_err("failed to register ammvdec_h265 driver\n");
13142
13143#endif
13144 if (platform_driver_register(&amvdec_h265_driver)) {
13145 pr_err("failed to register amvdec_h265 driver\n");
13146 return -ENODEV;
13147 }
13148#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13149 if (!has_hevc_vdec()) {
13150 /* not support hevc */
13151 amvdec_h265_profile.name = "hevc_unsupport";
13152 }
13153 if (vdec_is_support_4k()) {
13154 if (is_meson_m8m2_cpu()) {
13155 /* m8m2 support 4k */
13156 amvdec_h265_profile.profile = "4k";
13157 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13158 amvdec_h265_profile.profile =
13159 "8k, 8bit, 10bit, dwrite, compressed";
13160 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13161 amvdec_h265_profile.profile =
13162 "4k, 8bit, 10bit, dwrite, compressed";
13163 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13164 amvdec_h265_profile.profile = "4k";
13165 }
13166#endif
13167 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13168 pr_info("amvdec_h265 default mmu enabled.\n");
13169 mmu_enable = 1;
13170 }
13171
13172 vcodec_profile_register(&amvdec_h265_profile);
13173 amvdec_h265_profile_single = amvdec_h265_profile;
13174 amvdec_h265_profile_single.name = "h265";
13175 vcodec_profile_register(&amvdec_h265_profile_single);
13176 amvdec_h265_profile_mult = amvdec_h265_profile;
13177 amvdec_h265_profile_mult.name = "mh265";
13178 vcodec_profile_register(&amvdec_h265_profile_mult);
13179 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13180 "h265", h265_configs, CONFIG_FOR_RW);
13181 return 0;
13182}
13183
13184static void __exit amvdec_h265_driver_remove_module(void)
13185{
13186 pr_debug("amvdec_h265 module remove.\n");
13187
13188#ifdef MULTI_INSTANCE_SUPPORT
13189 platform_driver_unregister(&ammvdec_h265_driver);
13190#endif
13191 platform_driver_unregister(&amvdec_h265_driver);
13192}
13193
13194/****************************************/
13195/*
13196 *module_param(stat, uint, 0664);
13197 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13198 */
13199module_param(use_cma, uint, 0664);
13200MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13201
13202module_param(bit_depth_luma, uint, 0664);
13203MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13204
13205module_param(bit_depth_chroma, uint, 0664);
13206MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13207
13208module_param(video_signal_type, uint, 0664);
13209MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13210
13211#ifdef ERROR_HANDLE_DEBUG
13212module_param(dbg_nal_skip_flag, uint, 0664);
13213MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13214
13215module_param(dbg_nal_skip_count, uint, 0664);
13216MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13217#endif
13218
13219module_param(radr, uint, 0664);
13220MODULE_PARM_DESC(radr, "\n radr\n");
13221
13222module_param(rval, uint, 0664);
13223MODULE_PARM_DESC(rval, "\n rval\n");
13224
13225module_param(dbg_cmd, uint, 0664);
13226MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13227
13228module_param(dump_nal, uint, 0664);
13229MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13230
13231module_param(dbg_skip_decode_index, uint, 0664);
13232MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13233
13234module_param(endian, uint, 0664);
13235MODULE_PARM_DESC(endian, "\n rval\n");
13236
13237module_param(step, uint, 0664);
13238MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13239
13240module_param(decode_pic_begin, uint, 0664);
13241MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13242
13243module_param(slice_parse_begin, uint, 0664);
13244MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13245
13246module_param(nal_skip_policy, uint, 0664);
13247MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13248
13249module_param(i_only_flag, uint, 0664);
13250MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13251
13252module_param(fast_output_enable, uint, 0664);
13253MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13254
13255module_param(error_handle_policy, uint, 0664);
13256MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13257
13258module_param(error_handle_threshold, uint, 0664);
13259MODULE_PARM_DESC(error_handle_threshold,
13260 "\n amvdec_h265 error_handle_threshold\n");
13261
13262module_param(error_handle_nal_skip_threshold, uint, 0664);
13263MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13264 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13265
13266module_param(error_handle_system_threshold, uint, 0664);
13267MODULE_PARM_DESC(error_handle_system_threshold,
13268 "\n amvdec_h265 error_handle_system_threshold\n");
13269
13270module_param(error_skip_nal_count, uint, 0664);
13271MODULE_PARM_DESC(error_skip_nal_count,
13272 "\n amvdec_h265 error_skip_nal_count\n");
13273
13274module_param(debug, uint, 0664);
13275MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13276
13277module_param(debug_mask, uint, 0664);
13278MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13279
13280module_param(log_mask, uint, 0664);
13281MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13282
13283module_param(buffer_mode, uint, 0664);
13284MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13285
13286module_param(double_write_mode, uint, 0664);
13287MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13288
13289module_param(buf_alloc_width, uint, 0664);
13290MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13291
13292module_param(buf_alloc_height, uint, 0664);
13293MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13294
13295module_param(dynamic_buf_num_margin, uint, 0664);
13296MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13297
13298module_param(max_buf_num, uint, 0664);
13299MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13300
13301module_param(buf_alloc_size, uint, 0664);
13302MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13303
13304#ifdef CONSTRAIN_MAX_BUF_NUM
13305module_param(run_ready_max_vf_only_num, uint, 0664);
13306MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13307
13308module_param(run_ready_display_q_num, uint, 0664);
13309MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13310
13311module_param(run_ready_max_buf_num, uint, 0664);
13312MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13313#endif
13314
13315#if 0
13316module_param(re_config_pic_flag, uint, 0664);
13317MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13318#endif
13319
13320module_param(buffer_mode_dbg, uint, 0664);
13321MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13322
13323module_param(mem_map_mode, uint, 0664);
13324MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13325
13326module_param(enable_mem_saving, uint, 0664);
13327MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13328
13329module_param(force_w_h, uint, 0664);
13330MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13331
13332module_param(force_fps, uint, 0664);
13333MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13334
13335module_param(max_decoding_time, uint, 0664);
13336MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13337
13338module_param(prefix_aux_buf_size, uint, 0664);
13339MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13340
13341module_param(suffix_aux_buf_size, uint, 0664);
13342MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13343
13344module_param(interlace_enable, uint, 0664);
13345MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13346module_param(pts_unstable, uint, 0664);
13347MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13348module_param(parser_sei_enable, uint, 0664);
13349MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13350
13351#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13352module_param(parser_dolby_vision_enable, uint, 0664);
13353MODULE_PARM_DESC(parser_dolby_vision_enable,
13354 "\n parser_dolby_vision_enable\n");
13355
13356module_param(dolby_meta_with_el, uint, 0664);
13357MODULE_PARM_DESC(dolby_meta_with_el,
13358 "\n dolby_meta_with_el\n");
13359
13360module_param(dolby_el_flush_th, uint, 0664);
13361MODULE_PARM_DESC(dolby_el_flush_th,
13362 "\n dolby_el_flush_th\n");
13363#endif
13364module_param(mmu_enable, uint, 0664);
13365MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13366
13367module_param(mmu_enable_force, uint, 0664);
13368MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13369
13370#ifdef MULTI_INSTANCE_SUPPORT
13371module_param(start_decode_buf_level, int, 0664);
13372MODULE_PARM_DESC(start_decode_buf_level,
13373 "\n h265 start_decode_buf_level\n");
13374
13375module_param(decode_timeout_val, uint, 0664);
13376MODULE_PARM_DESC(decode_timeout_val,
13377 "\n h265 decode_timeout_val\n");
13378
13379module_param(data_resend_policy, uint, 0664);
13380MODULE_PARM_DESC(data_resend_policy,
13381 "\n h265 data_resend_policy\n");
13382
13383module_param_array(decode_frame_count, uint,
13384 &max_decode_instance_num, 0664);
13385
13386module_param_array(display_frame_count, uint,
13387 &max_decode_instance_num, 0664);
13388
13389module_param_array(max_process_time, uint,
13390 &max_decode_instance_num, 0664);
13391
13392module_param_array(max_get_frame_interval,
13393 uint, &max_decode_instance_num, 0664);
13394
13395module_param_array(run_count, uint,
13396 &max_decode_instance_num, 0664);
13397
13398module_param_array(input_empty, uint,
13399 &max_decode_instance_num, 0664);
13400
13401module_param_array(not_run_ready, uint,
13402 &max_decode_instance_num, 0664);
13403
13404module_param_array(ref_frame_mark_flag, uint,
13405 &max_decode_instance_num, 0664);
13406
13407#endif
13408#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13409module_param(dv_toggle_prov_name, uint, 0664);
13410MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13411
13412module_param(dv_debug, uint, 0664);
13413MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13414
13415module_param(force_bypass_dvenl, uint, 0664);
13416MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13417#endif
13418
13419#ifdef AGAIN_HAS_THRESHOLD
13420module_param(again_threshold, uint, 0664);
13421MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13422#endif
13423
13424module_param(force_disp_pic_index, int, 0664);
13425MODULE_PARM_DESC(force_disp_pic_index,
13426 "\n amvdec_h265 force_disp_pic_index\n");
13427
13428module_param(frmbase_cont_bitlevel, uint, 0664);
13429MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13430
13431module_param(udebug_flag, uint, 0664);
13432MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13433
13434module_param(udebug_pause_pos, uint, 0664);
13435MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13436
13437module_param(udebug_pause_val, uint, 0664);
13438MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13439
13440module_param(pre_decode_buf_level, int, 0664);
13441MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13442
13443module_param(udebug_pause_decode_idx, uint, 0664);
13444MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13445
13446module_param(disp_vframe_valve_level, uint, 0664);
13447MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13448
13449module_param(pic_list_debug, uint, 0664);
13450MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13451
13452module_param(without_display_mode, uint, 0664);
13453MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13454
13455module_init(amvdec_h265_driver_init_module);
13456module_exit(amvdec_h265_driver_remove_module);
13457
13458MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13459MODULE_LICENSE("GPL");
13460MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13461