summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 714ae42e0e71c8e03ea92eb70cefa67d751e0a4c
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131#define INVALID_IDX -1 /* Invalid buffer index.*/
132
133static struct semaphore h265_sema;
134
135struct hevc_state_s;
136static int hevc_print(struct hevc_state_s *hevc,
137 int debug_flag, const char *fmt, ...);
138static int hevc_print_cont(struct hevc_state_s *hevc,
139 int debug_flag, const char *fmt, ...);
140static int vh265_vf_states(struct vframe_states *states, void *);
141static struct vframe_s *vh265_vf_peek(void *);
142static struct vframe_s *vh265_vf_get(void *);
143static void vh265_vf_put(struct vframe_s *, void *);
144static int vh265_event_cb(int type, void *data, void *private_data);
145
146static int vh265_stop(struct hevc_state_s *hevc);
147#ifdef MULTI_INSTANCE_SUPPORT
148static int vmh265_stop(struct hevc_state_s *hevc);
149static s32 vh265_init(struct vdec_s *vdec);
150static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
151static void reset_process_time(struct hevc_state_s *hevc);
152static void start_process_time(struct hevc_state_s *hevc);
153static void restart_process_time(struct hevc_state_s *hevc);
154static void timeout_process(struct hevc_state_s *hevc);
155#else
156static s32 vh265_init(struct hevc_state_s *hevc);
157#endif
158static void vh265_prot_init(struct hevc_state_s *hevc);
159static int vh265_local_init(struct hevc_state_s *hevc);
160static void vh265_check_timer_func(unsigned long arg);
161static void config_decode_mode(struct hevc_state_s *hevc);
162
163static const char vh265_dec_id[] = "vh265-dev";
164
165#define PROVIDER_NAME "decoder.h265"
166#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
167
168static const struct vframe_operations_s vh265_vf_provider = {
169 .peek = vh265_vf_peek,
170 .get = vh265_vf_get,
171 .put = vh265_vf_put,
172 .event_cb = vh265_event_cb,
173 .vf_states = vh265_vf_states,
174};
175
176static struct vframe_provider_s vh265_vf_prov;
177
178static u32 bit_depth_luma;
179static u32 bit_depth_chroma;
180static u32 video_signal_type;
181
182static int start_decode_buf_level = 0x8000;
183
184static unsigned int decode_timeout_val = 200;
185
186/*data_resend_policy:
187 bit 0, stream base resend data when decoding buf empty
188*/
189static u32 data_resend_policy = 1;
190
191#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
192/*
193static const char * const video_format_names[] = {
194 "component", "PAL", "NTSC", "SECAM",
195 "MAC", "unspecified", "unspecified", "unspecified"
196};
197
198static const char * const color_primaries_names[] = {
199 "unknown", "bt709", "undef", "unknown",
200 "bt470m", "bt470bg", "smpte170m", "smpte240m",
201 "film", "bt2020"
202};
203
204static const char * const transfer_characteristics_names[] = {
205 "unknown", "bt709", "undef", "unknown",
206 "bt470m", "bt470bg", "smpte170m", "smpte240m",
207 "linear", "log100", "log316", "iec61966-2-4",
208 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
209 "smpte-st-2084", "smpte-st-428"
210};
211
212static const char * const matrix_coeffs_names[] = {
213 "GBR", "bt709", "undef", "unknown",
214 "fcc", "bt470bg", "smpte170m", "smpte240m",
215 "YCgCo", "bt2020nc", "bt2020c"
216};
217*/
218#ifdef SUPPORT_10BIT
219#define HEVC_CM_BODY_START_ADDR 0x3626
220#define HEVC_CM_BODY_LENGTH 0x3627
221#define HEVC_CM_HEADER_LENGTH 0x3629
222#define HEVC_CM_HEADER_OFFSET 0x362b
223#define HEVC_SAO_CTRL9 0x362d
224#define LOSLESS_COMPRESS_MODE
225/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
226/* double_write_mode:
227 * 0, no double write;
228 * 1, 1:1 ratio;
229 * 2, (1/4):(1/4) ratio;
230 * 3, (1/4):(1/4) ratio, with both compressed frame included
231 * 4, (1/2):(1/2) ratio;
232 * 0x10, double write only
233 * 0x100, if > 1080p,use mode 4,else use mode 1;
234 * 0x200, if > 1080p,use mode 2,else use mode 1;
235 * 0x300, if > 720p, use mode 4, else use mode 1;
236 */
237static u32 double_write_mode;
238
239/*#define DECOMP_HEADR_SURGENT*/
240
241static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
242static u32 enable_mem_saving = 1;
243static u32 workaround_enable;
244static u32 force_w_h;
245#endif
246static u32 force_fps;
247static u32 pts_unstable;
248#define H265_DEBUG_BUFMGR 0x01
249#define H265_DEBUG_BUFMGR_MORE 0x02
250#define H265_DEBUG_DETAIL 0x04
251#define H265_DEBUG_REG 0x08
252#define H265_DEBUG_MAN_SEARCH_NAL 0x10
253#define H265_DEBUG_MAN_SKIP_NAL 0x20
254#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
255#define H265_DEBUG_FORCE_CLK 0x80
256#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
257#define H265_DEBUG_NO_DISPLAY 0x200
258#define H265_DEBUG_DISCARD_NAL 0x400
259#define H265_DEBUG_OUT_PTS 0x800
260#define H265_DEBUG_DUMP_PIC_LIST 0x1000
261#define H265_DEBUG_PRINT_SEI 0x2000
262#define H265_DEBUG_PIC_STRUCT 0x4000
263#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
264#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
265#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
266#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
267#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
268#define H265_DEBUG_HW_RESET 0x100000
269#define H265_CFG_CANVAS_IN_DECODE 0x200000
270#define H265_DEBUG_DV 0x400000
271#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
272#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
273#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
274#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
275#ifdef MULTI_INSTANCE_SUPPORT
276#define PRINT_FLAG_ERROR 0x0
277#define IGNORE_PARAM_FROM_CONFIG 0x08000000
278#define PRINT_FRAMEBASE_DATA 0x10000000
279#define PRINT_FLAG_VDEC_STATUS 0x20000000
280#define PRINT_FLAG_VDEC_DETAIL 0x40000000
281#define PRINT_FLAG_V4L_DETAIL 0x80000000
282#endif
283
284#define BUF_POOL_SIZE 32
285#define MAX_BUF_NUM 24
286#define MAX_REF_PIC_NUM 24
287#define MAX_REF_ACTIVE 16
288
289#ifdef MV_USE_FIXED_BUF
290#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
291#define VF_BUFFER_IDX(n) (n)
292#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
293#else
294#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
295#define VF_BUFFER_IDX(n) (n)
296#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
297#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
298#endif
299
300#define HEVC_MV_INFO 0x310d
301#define HEVC_QP_INFO 0x3137
302#define HEVC_SKIP_INFO 0x3136
303
304const u32 h265_version = 201602101;
305static u32 debug_mask = 0xffffffff;
306static u32 log_mask;
307static u32 debug;
308static u32 radr;
309static u32 rval;
310static u32 dbg_cmd;
311static u32 dump_nal;
312static u32 dbg_skip_decode_index;
313static u32 endian = 0xff0;
314#ifdef ERROR_HANDLE_DEBUG
315static u32 dbg_nal_skip_flag;
316 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
317static u32 dbg_nal_skip_count;
318#endif
319/*for debug*/
320/*
321 udebug_flag:
322 bit 0, enable ucode print
323 bit 1, enable ucode detail print
324 bit [31:16] not 0, pos to dump lmem
325 bit 2, pop bits to lmem
326 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
327*/
328static u32 udebug_flag;
329/*
330 when udebug_flag[1:0] is not 0
331 udebug_pause_pos not 0,
332 pause position
333*/
334static u32 udebug_pause_pos;
335/*
336 when udebug_flag[1:0] is not 0
337 and udebug_pause_pos is not 0,
338 pause only when DEBUG_REG2 is equal to this val
339*/
340static u32 udebug_pause_val;
341
342static u32 udebug_pause_decode_idx;
343
344static u32 decode_pic_begin;
345static uint slice_parse_begin;
346static u32 step;
347static bool is_reset;
348
349#ifdef CONSTRAIN_MAX_BUF_NUM
350static u32 run_ready_max_vf_only_num;
351static u32 run_ready_display_q_num;
352 /*0: not check
353 0xff: work_pic_num
354 */
355static u32 run_ready_max_buf_num = 0xff;
356#endif
357
358static u32 dynamic_buf_num_margin = 7;
359static u32 buf_alloc_width;
360static u32 buf_alloc_height;
361
362static u32 max_buf_num = 16;
363static u32 buf_alloc_size;
364/*static u32 re_config_pic_flag;*/
365/*
366 *bit[0]: 0,
367 *bit[1]: 0, always release cma buffer when stop
368 *bit[1]: 1, never release cma buffer when stop
369 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
370 *do not release cma buffer is blackout is not 1
371 *
372 *bit[2]: 0, when start decoding, check current displayed buffer
373 * (only for buffer decoded by h265) if blackout is 0
374 * 1, do not check current displayed buffer
375 *
376 *bit[3]: 1, if blackout is not 1, do not release current
377 * displayed cma buffer always.
378 */
379/* set to 1 for fast play;
380 * set to 8 for other case of "keep last frame"
381 */
382static u32 buffer_mode = 1;
383
384/* buffer_mode_dbg: debug only*/
385static u32 buffer_mode_dbg = 0xffff0000;
386/**/
387/*
388 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
389 *1, start decoding after first I;
390 *2, only decode and display none error picture;
391 *3, start decoding and display after IDR,etc
392 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
393 *only for mode 0 and 1.
394 */
395static u32 nal_skip_policy = 2;
396
397/*
398 *bit 0, 1: only display I picture;
399 *bit 1, 1: only decode I picture;
400 */
401static u32 i_only_flag;
402
403/*
404bit 0, fast output first I picture
405*/
406static u32 fast_output_enable = 1;
407
408static u32 frmbase_cont_bitlevel = 0x60;
409
410/*
411use_cma: 1, use both reserver memory and cma for buffers
4122, only use cma for buffers
413*/
414static u32 use_cma = 2;
415
416#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
417static u32 prefix_aux_buf_size = (16 * 1024);
418static u32 suffix_aux_buf_size;
419
420static u32 max_decoding_time;
421/*
422 *error handling
423 */
424/*error_handle_policy:
425 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
426 *1, skip error_skip_nal_count nals before error recovery;
427 *bit 1 (valid only when bit0 == 1):
428 *1, wait vps/sps/pps after error recovery;
429 *bit 2 (valid only when bit0 == 0):
430 *0, auto search after error recovery (hevc_recover() called);
431 *1, manual search after error recovery
432 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
433 *
434 *bit 4: 0, set error_mark after reset/recover
435 * 1, do not set error_mark after reset/recover
436 *bit 5: 0, check total lcu for every picture
437 * 1, do not check total lcu
438 *bit 6: 0, do not check head error
439 * 1, check head error
440 *
441 */
442
443static u32 error_handle_policy;
444static u32 error_skip_nal_count = 6;
445static u32 error_handle_threshold = 30;
446static u32 error_handle_nal_skip_threshold = 10;
447static u32 error_handle_system_threshold = 30;
448static u32 interlace_enable = 1;
449static u32 fr_hint_status;
450
451 /*
452 *parser_sei_enable:
453 * bit 0, sei;
454 * bit 1, sei_suffix (fill aux buf)
455 * bit 2, fill sei to aux buf (when bit 0 is 1)
456 * bit 8, debug flag
457 */
458static u32 parser_sei_enable;
459#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
460static u32 parser_dolby_vision_enable = 1;
461static u32 dolby_meta_with_el;
462static u32 dolby_el_flush_th = 2;
463#endif
464/* this is only for h265 mmu enable */
465
466static u32 mmu_enable = 1;
467static u32 mmu_enable_force;
468static u32 work_buf_size;
469static unsigned int force_disp_pic_index;
470static unsigned int disp_vframe_valve_level;
471static int pre_decode_buf_level = 0x1000;
472static unsigned int pic_list_debug;
473
474
475#ifdef MULTI_INSTANCE_SUPPORT
476static unsigned int max_decode_instance_num
477 = MAX_DECODE_INSTANCE_NUM;
478static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
481static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
482static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
484static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
485static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
486{1, 1, 1, 1, 1, 1, 1, 1, 1};
487
488#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
489static unsigned char get_idx(struct hevc_state_s *hevc);
490#endif
491
492#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
493static u32 dv_toggle_prov_name;
494
495static u32 dv_debug;
496
497static u32 force_bypass_dvenl;
498#endif
499#endif
500
501
502#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
503#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
504#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
505#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
506#else
507#define get_dbg_flag(hevc) debug
508#define get_dbg_flag2(hevc) debug
509#define is_log_enable(hevc) (log_mask ? 1 : 0)
510#define get_valid_double_write_mode(hevc) double_write_mode
511#define get_buf_alloc_width(hevc) buf_alloc_width
512#define get_buf_alloc_height(hevc) buf_alloc_height
513#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
514#endif
515#define get_buffer_mode(hevc) buffer_mode
516
517
518DEFINE_SPINLOCK(lock);
519struct task_struct *h265_task = NULL;
520#undef DEBUG_REG
521#ifdef DEBUG_REG
522void WRITE_VREG_DBG(unsigned adr, unsigned val)
523{
524 if (debug & H265_DEBUG_REG)
525 pr_info("%s(%x, %x)\n", __func__, adr, val);
526 WRITE_VREG(adr, val);
527}
528
529#undef WRITE_VREG
530#define WRITE_VREG WRITE_VREG_DBG
531#endif
532
533static DEFINE_MUTEX(vh265_mutex);
534
535static DEFINE_MUTEX(vh265_log_mutex);
536
537static struct vdec_info *gvs;
538
539static u32 without_display_mode;
540
541/**************************************************
542 *
543 *h265 buffer management include
544 *
545 ***************************************************
546 */
547enum NalUnitType {
548 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
549 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
550
551 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
552 /* Current name in the spec: TSA_R */
553 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
554
555 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
556 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
557
558 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
559 /* Current name in the spec: RADL_R */
560 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
561
562 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
563 /* Current name in the spec: RASL_R */
564 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
565
566 NAL_UNIT_RESERVED_10,
567 NAL_UNIT_RESERVED_11,
568 NAL_UNIT_RESERVED_12,
569 NAL_UNIT_RESERVED_13,
570 NAL_UNIT_RESERVED_14,
571 NAL_UNIT_RESERVED_15,
572
573 /* Current name in the spec: BLA_W_LP */
574 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
575 /* Current name in the spec: BLA_W_DLP */
576 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
577 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
578 /* Current name in the spec: IDR_W_DLP */
579 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
580 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
581 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
582 NAL_UNIT_RESERVED_22,
583 NAL_UNIT_RESERVED_23,
584
585 NAL_UNIT_RESERVED_24,
586 NAL_UNIT_RESERVED_25,
587 NAL_UNIT_RESERVED_26,
588 NAL_UNIT_RESERVED_27,
589 NAL_UNIT_RESERVED_28,
590 NAL_UNIT_RESERVED_29,
591 NAL_UNIT_RESERVED_30,
592 NAL_UNIT_RESERVED_31,
593
594 NAL_UNIT_VPS, /* 32 */
595 NAL_UNIT_SPS, /* 33 */
596 NAL_UNIT_PPS, /* 34 */
597 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
598 NAL_UNIT_EOS, /* 36 */
599 NAL_UNIT_EOB, /* 37 */
600 NAL_UNIT_FILLER_DATA, /* 38 */
601 NAL_UNIT_SEI, /* 39 Prefix SEI */
602 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
603 NAL_UNIT_RESERVED_41,
604 NAL_UNIT_RESERVED_42,
605 NAL_UNIT_RESERVED_43,
606 NAL_UNIT_RESERVED_44,
607 NAL_UNIT_RESERVED_45,
608 NAL_UNIT_RESERVED_46,
609 NAL_UNIT_RESERVED_47,
610 NAL_UNIT_UNSPECIFIED_48,
611 NAL_UNIT_UNSPECIFIED_49,
612 NAL_UNIT_UNSPECIFIED_50,
613 NAL_UNIT_UNSPECIFIED_51,
614 NAL_UNIT_UNSPECIFIED_52,
615 NAL_UNIT_UNSPECIFIED_53,
616 NAL_UNIT_UNSPECIFIED_54,
617 NAL_UNIT_UNSPECIFIED_55,
618 NAL_UNIT_UNSPECIFIED_56,
619 NAL_UNIT_UNSPECIFIED_57,
620 NAL_UNIT_UNSPECIFIED_58,
621 NAL_UNIT_UNSPECIFIED_59,
622 NAL_UNIT_UNSPECIFIED_60,
623 NAL_UNIT_UNSPECIFIED_61,
624 NAL_UNIT_UNSPECIFIED_62,
625 NAL_UNIT_UNSPECIFIED_63,
626 NAL_UNIT_INVALID,
627};
628
629/* --------------------------------------------------- */
630/* Amrisc Software Interrupt */
631/* --------------------------------------------------- */
632#define AMRISC_STREAM_EMPTY_REQ 0x01
633#define AMRISC_PARSER_REQ 0x02
634#define AMRISC_MAIN_REQ 0x04
635
636/* --------------------------------------------------- */
637/* HEVC_DEC_STATUS define */
638/* --------------------------------------------------- */
639#define HEVC_DEC_IDLE 0x0
640#define HEVC_NAL_UNIT_VPS 0x1
641#define HEVC_NAL_UNIT_SPS 0x2
642#define HEVC_NAL_UNIT_PPS 0x3
643#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
644#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
645#define HEVC_SLICE_DECODING 0x6
646#define HEVC_NAL_UNIT_SEI 0x7
647#define HEVC_SLICE_SEGMENT_DONE 0x8
648#define HEVC_NAL_SEARCH_DONE 0x9
649#define HEVC_DECPIC_DATA_DONE 0xa
650#define HEVC_DECPIC_DATA_ERROR 0xb
651#define HEVC_SEI_DAT 0xc
652#define HEVC_SEI_DAT_DONE 0xd
653#define HEVC_NAL_DECODE_DONE 0xe
654#define HEVC_OVER_DECODE 0xf
655
656#define HEVC_DATA_REQUEST 0x12
657
658#define HEVC_DECODE_BUFEMPTY 0x20
659#define HEVC_DECODE_TIMEOUT 0x21
660#define HEVC_SEARCH_BUFEMPTY 0x22
661#define HEVC_DECODE_OVER_SIZE 0x23
662#define HEVC_DECODE_BUFEMPTY2 0x24
663#define HEVC_FIND_NEXT_PIC_NAL 0x50
664#define HEVC_FIND_NEXT_DVEL_NAL 0x51
665
666#define HEVC_DUMP_LMEM 0x30
667
668#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
669#define HEVC_DISCARD_NAL 0xf0
670#define HEVC_ACTION_DEC_CONT 0xfd
671#define HEVC_ACTION_ERROR 0xfe
672#define HEVC_ACTION_DONE 0xff
673
674/* --------------------------------------------------- */
675/* Include "parser_cmd.h" */
676/* --------------------------------------------------- */
677#define PARSER_CMD_SKIP_CFG_0 0x0000090b
678
679#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
680
681#define PARSER_CMD_SKIP_CFG_2 0x001b1910
682
683#define PARSER_CMD_NUMBER 37
684
685/**************************************************
686 *
687 *h265 buffer management
688 *
689 ***************************************************
690 */
691/* #define BUFFER_MGR_ONLY */
692/* #define CONFIG_HEVC_CLK_FORCED_ON */
693/* #define ENABLE_SWAP_TEST */
694#define MCRCC_ENABLE
695#define INVALID_POC 0x80000000
696
697#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
698#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
699#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
700#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
701#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
702#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
703#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
704#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
705#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
706#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
707#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
708#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
709#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
710#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
711#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
712#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
713#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
714#ifdef ENABLE_SWAP_TEST
715#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
716#endif
717
718/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
719/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
720#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
721 /*do not define ENABLE_SWAP_TEST*/
722#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
723#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
724
725#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
726#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
727/*
728 *ucode parser/search control
729 *bit 0: 0, header auto parse; 1, header manual parse
730 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
731 *bit [3:2]: valid when bit1==0;
732 *0, auto skip nal before first vps/sps/pps/idr;
733 *1, auto skip nal before first vps/sps/pps
734 *2, auto skip nal before first vps/sps/pps,
735 * and not decode until the first I slice (with slice address of 0)
736 *
737 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
738 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
739 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
740 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
741 *bit [17]: for NAL_SEI when bit0 is 0:
742 * 0, do not parse/fetch SEI in ucode;
743 * 1, parse/fetch SEI in ucode
744 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
745 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
746 * 1, fetch NAL_SEL_SUFFIX data to aux buf
747 *bit [19]:
748 * 0, parse NAL_SEI in ucode
749 * 1, fetch NAL_SEI to aux buf
750 *bit [20]: for DOLBY_VISION_META
751 * 0, do not fetch DOLBY_VISION_META to aux buf
752 * 1, fetch DOLBY_VISION_META to aux buf
753 */
754#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
755 /*read only*/
756#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
757 /*
758 [15 : 8] rps_set_id
759 [7 : 0] start_decoding_flag
760 */
761#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
762 /*set before start decoder*/
763#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
764#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
765#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
766
767#define DECODE_MODE_SINGLE 0x0
768#define DECODE_MODE_MULTI_FRAMEBASE 0x1
769#define DECODE_MODE_MULTI_STREAMBASE 0x2
770#define DECODE_MODE_MULTI_DVBAL 0x3
771#define DECODE_MODE_MULTI_DVENL 0x4
772
773#define MAX_INT 0x7FFFFFFF
774
775#define RPM_BEGIN 0x100
776#define modification_list_cur 0x148
777#define RPM_END 0x180
778
779#define RPS_USED_BIT 14
780/* MISC_FLAG0 */
781#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
782#define PCM_ENABLE_FLAG_BIT 1
783#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
784#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
785#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
786#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
787#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
788#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
789#define SLICE_SAO_LUMA_FLAG_BIT 8
790#define SLICE_SAO_CHROMA_FLAG_BIT 9
791#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
792
793union param_u {
794 struct {
795 unsigned short data[RPM_END - RPM_BEGIN];
796 } l;
797 struct {
798 /* from ucode lmem, do not change this struct */
799 unsigned short CUR_RPS[0x10];
800 unsigned short num_ref_idx_l0_active;
801 unsigned short num_ref_idx_l1_active;
802 unsigned short slice_type;
803 unsigned short slice_temporal_mvp_enable_flag;
804 unsigned short dependent_slice_segment_flag;
805 unsigned short slice_segment_address;
806 unsigned short num_title_rows_minus1;
807 unsigned short pic_width_in_luma_samples;
808 unsigned short pic_height_in_luma_samples;
809 unsigned short log2_min_coding_block_size_minus3;
810 unsigned short log2_diff_max_min_coding_block_size;
811 unsigned short log2_max_pic_order_cnt_lsb_minus4;
812 unsigned short POClsb;
813 unsigned short collocated_from_l0_flag;
814 unsigned short collocated_ref_idx;
815 unsigned short log2_parallel_merge_level;
816 unsigned short five_minus_max_num_merge_cand;
817 unsigned short sps_num_reorder_pics_0;
818 unsigned short modification_flag;
819 unsigned short tiles_enabled_flag;
820 unsigned short num_tile_columns_minus1;
821 unsigned short num_tile_rows_minus1;
822 unsigned short tile_width[8];
823 unsigned short tile_height[8];
824 unsigned short misc_flag0;
825 unsigned short pps_beta_offset_div2;
826 unsigned short pps_tc_offset_div2;
827 unsigned short slice_beta_offset_div2;
828 unsigned short slice_tc_offset_div2;
829 unsigned short pps_cb_qp_offset;
830 unsigned short pps_cr_qp_offset;
831 unsigned short first_slice_segment_in_pic_flag;
832 unsigned short m_temporalId;
833 unsigned short m_nalUnitType;
834
835 unsigned short vui_num_units_in_tick_hi;
836 unsigned short vui_num_units_in_tick_lo;
837 unsigned short vui_time_scale_hi;
838 unsigned short vui_time_scale_lo;
839 unsigned short bit_depth;
840 unsigned short profile_etc;
841 unsigned short sei_frame_field_info;
842 unsigned short video_signal_type;
843 unsigned short modification_list[0x20];
844 unsigned short conformance_window_flag;
845 unsigned short conf_win_left_offset;
846 unsigned short conf_win_right_offset;
847 unsigned short conf_win_top_offset;
848 unsigned short conf_win_bottom_offset;
849 unsigned short chroma_format_idc;
850 unsigned short color_description;
851 unsigned short aspect_ratio_idc;
852 unsigned short sar_width;
853 unsigned short sar_height;
854 unsigned short sps_max_dec_pic_buffering_minus1_0;
855 } p;
856};
857
858#define RPM_BUF_SIZE (0x80*2)
859/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
860#define LMEM_BUF_SIZE (0x500 * 2)
861
862struct buff_s {
863 u32 buf_start;
864 u32 buf_size;
865 u32 buf_end;
866};
867
868struct BuffInfo_s {
869 u32 max_width;
870 u32 max_height;
871 unsigned int start_adr;
872 unsigned int end_adr;
873 struct buff_s ipp;
874 struct buff_s sao_abv;
875 struct buff_s sao_vb;
876 struct buff_s short_term_rps;
877 struct buff_s vps;
878 struct buff_s sps;
879 struct buff_s pps;
880 struct buff_s sao_up;
881 struct buff_s swap_buf;
882 struct buff_s swap_buf2;
883 struct buff_s scalelut;
884 struct buff_s dblk_para;
885 struct buff_s dblk_data;
886 struct buff_s dblk_data2;
887 struct buff_s mmu_vbh;
888 struct buff_s cm_header;
889 struct buff_s mpred_above;
890#ifdef MV_USE_FIXED_BUF
891 struct buff_s mpred_mv;
892#endif
893 struct buff_s rpm;
894 struct buff_s lmem;
895};
896#define WORK_BUF_SPEC_NUM 3
897static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
898 {
899 /* 8M bytes */
900 .max_width = 1920,
901 .max_height = 1088,
902 .ipp = {
903 /* IPP work space calculation :
904 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
905 */
906 .buf_size = 0x4000,
907 },
908 .sao_abv = {
909 .buf_size = 0x30000,
910 },
911 .sao_vb = {
912 .buf_size = 0x30000,
913 },
914 .short_term_rps = {
915 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
916 * total 64x16x2 = 2048 bytes (0x800)
917 */
918 .buf_size = 0x800,
919 },
920 .vps = {
921 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
922 * total 0x0800 bytes
923 */
924 .buf_size = 0x800,
925 },
926 .sps = {
927 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
928 * total 0x0800 bytes
929 */
930 .buf_size = 0x800,
931 },
932 .pps = {
933 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
934 * total 0x2000 bytes
935 */
936 .buf_size = 0x2000,
937 },
938 .sao_up = {
939 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
940 * each has 16 bytes total 0x2800 bytes
941 */
942 .buf_size = 0x2800,
943 },
944 .swap_buf = {
945 /* 256cyclex64bit = 2K bytes 0x800
946 * (only 144 cycles valid)
947 */
948 .buf_size = 0x800,
949 },
950 .swap_buf2 = {
951 .buf_size = 0x800,
952 },
953 .scalelut = {
954 /* support up to 32 SCALELUT 1024x32 =
955 * 32Kbytes (0x8000)
956 */
957 .buf_size = 0x8000,
958 },
959 .dblk_para = {
960#ifdef SUPPORT_10BIT
961 .buf_size = 0x40000,
962#else
963 /* DBLK -> Max 256(4096/16) LCU, each para
964 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
965 */
966 .buf_size = 0x20000,
967#endif
968 },
969 .dblk_data = {
970 .buf_size = 0x40000,
971 },
972 .dblk_data2 = {
973 .buf_size = 0x40000,
974 }, /*dblk data for adapter*/
975 .mmu_vbh = {
976 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
977 },
978#if 0
979 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
980 .buf_size = MMU_COMPRESS_HEADER_SIZE *
981 (MAX_REF_PIC_NUM + 1),
982 },
983#endif
984 .mpred_above = {
985 .buf_size = 0x8000,
986 },
987#ifdef MV_USE_FIXED_BUF
988 .mpred_mv = {/* 1080p, 0x40000 per buffer */
989 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
990 },
991#endif
992 .rpm = {
993 .buf_size = RPM_BUF_SIZE,
994 },
995 .lmem = {
996 .buf_size = 0x500 * 2,
997 }
998 },
999 {
1000 .max_width = 4096,
1001 .max_height = 2048,
1002 .ipp = {
1003 /* IPP work space calculation :
1004 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1005 */
1006 .buf_size = 0x4000,
1007 },
1008 .sao_abv = {
1009 .buf_size = 0x30000,
1010 },
1011 .sao_vb = {
1012 .buf_size = 0x30000,
1013 },
1014 .short_term_rps = {
1015 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1016 * total 64x16x2 = 2048 bytes (0x800)
1017 */
1018 .buf_size = 0x800,
1019 },
1020 .vps = {
1021 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1022 * total 0x0800 bytes
1023 */
1024 .buf_size = 0x800,
1025 },
1026 .sps = {
1027 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1028 * total 0x0800 bytes
1029 */
1030 .buf_size = 0x800,
1031 },
1032 .pps = {
1033 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1034 * total 0x2000 bytes
1035 */
1036 .buf_size = 0x2000,
1037 },
1038 .sao_up = {
1039 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1040 * each has 16 bytes total 0x2800 bytes
1041 */
1042 .buf_size = 0x2800,
1043 },
1044 .swap_buf = {
1045 /* 256cyclex64bit = 2K bytes 0x800
1046 * (only 144 cycles valid)
1047 */
1048 .buf_size = 0x800,
1049 },
1050 .swap_buf2 = {
1051 .buf_size = 0x800,
1052 },
1053 .scalelut = {
1054 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1055 * (0x8000)
1056 */
1057 .buf_size = 0x8000,
1058 },
1059 .dblk_para = {
1060 /* DBLK -> Max 256(4096/16) LCU, each para
1061 * 512bytes(total:0x20000),
1062 * data 1024bytes(total:0x40000)
1063 */
1064 .buf_size = 0x20000,
1065 },
1066 .dblk_data = {
1067 .buf_size = 0x80000,
1068 },
1069 .dblk_data2 = {
1070 .buf_size = 0x80000,
1071 }, /*dblk data for adapter*/
1072 .mmu_vbh = {
1073 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1074 },
1075#if 0
1076 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1077 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1078 (MAX_REF_PIC_NUM + 1),
1079 },
1080#endif
1081 .mpred_above = {
1082 .buf_size = 0x8000,
1083 },
1084#ifdef MV_USE_FIXED_BUF
1085 .mpred_mv = {
1086 /* .buf_size = 0x100000*16,
1087 //4k2k , 0x100000 per buffer */
1088 /* 4096x2304 , 0x120000 per buffer */
1089 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1090 },
1091#endif
1092 .rpm = {
1093 .buf_size = RPM_BUF_SIZE,
1094 },
1095 .lmem = {
1096 .buf_size = 0x500 * 2,
1097 }
1098 },
1099
1100 {
1101 .max_width = 4096*2,
1102 .max_height = 2048*2,
1103 .ipp = {
1104 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1105 .buf_size = 0x4000*2,
1106 },
1107 .sao_abv = {
1108 .buf_size = 0x30000*2,
1109 },
1110 .sao_vb = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .short_term_rps = {
1114 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1115 .buf_size = 0x800,
1116 },
1117 .vps = {
1118 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1119 .buf_size = 0x800,
1120 },
1121 .sps = {
1122 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1123 .buf_size = 0x800,
1124 },
1125 .pps = {
1126 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1127 .buf_size = 0x2000,
1128 },
1129 .sao_up = {
1130 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1131 .buf_size = 0x2800*2,
1132 },
1133 .swap_buf = {
1134 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1135 .buf_size = 0x800,
1136 },
1137 .swap_buf2 = {
1138 .buf_size = 0x800,
1139 },
1140 .scalelut = {
1141 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1142 .buf_size = 0x8000*2,
1143 },
1144 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1145 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1146 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1147 .mmu_vbh = {
1148 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1149 },
1150#if 0
1151 .cm_header = {
1152 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1153 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1154 },
1155#endif
1156 .mpred_above = {
1157 .buf_size = 0x8000*2,
1158 },
1159#ifdef MV_USE_FIXED_BUF
1160 .mpred_mv = {
1161 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1162 },
1163#endif
1164 .rpm = {
1165 .buf_size = RPM_BUF_SIZE,
1166 },
1167 .lmem = {
1168 .buf_size = 0x500 * 2,
1169 },
1170 }
1171};
1172
1173static void init_buff_spec(struct hevc_state_s *hevc,
1174 struct BuffInfo_s *buf_spec)
1175{
1176 buf_spec->ipp.buf_start = buf_spec->start_adr;
1177 buf_spec->sao_abv.buf_start =
1178 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1179
1180 buf_spec->sao_vb.buf_start =
1181 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1182 buf_spec->short_term_rps.buf_start =
1183 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1184 buf_spec->vps.buf_start =
1185 buf_spec->short_term_rps.buf_start +
1186 buf_spec->short_term_rps.buf_size;
1187 buf_spec->sps.buf_start =
1188 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1189 buf_spec->pps.buf_start =
1190 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1191 buf_spec->sao_up.buf_start =
1192 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1193 buf_spec->swap_buf.buf_start =
1194 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1195 buf_spec->swap_buf2.buf_start =
1196 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1197 buf_spec->scalelut.buf_start =
1198 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1199 buf_spec->dblk_para.buf_start =
1200 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1201 buf_spec->dblk_data.buf_start =
1202 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1203 buf_spec->dblk_data2.buf_start =
1204 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1205 buf_spec->mmu_vbh.buf_start =
1206 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1207 buf_spec->mpred_above.buf_start =
1208 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1209#ifdef MV_USE_FIXED_BUF
1210 buf_spec->mpred_mv.buf_start =
1211 buf_spec->mpred_above.buf_start +
1212 buf_spec->mpred_above.buf_size;
1213
1214 buf_spec->rpm.buf_start =
1215 buf_spec->mpred_mv.buf_start +
1216 buf_spec->mpred_mv.buf_size;
1217#else
1218 buf_spec->rpm.buf_start =
1219 buf_spec->mpred_above.buf_start +
1220 buf_spec->mpred_above.buf_size;
1221#endif
1222 buf_spec->lmem.buf_start =
1223 buf_spec->rpm.buf_start +
1224 buf_spec->rpm.buf_size;
1225 buf_spec->end_adr =
1226 buf_spec->lmem.buf_start +
1227 buf_spec->lmem.buf_size;
1228
1229 if (hevc && get_dbg_flag2(hevc)) {
1230 hevc_print(hevc, 0,
1231 "%s workspace (%x %x) size = %x\n", __func__,
1232 buf_spec->start_adr, buf_spec->end_adr,
1233 buf_spec->end_adr - buf_spec->start_adr);
1234
1235 hevc_print(hevc, 0,
1236 "ipp.buf_start :%x\n",
1237 buf_spec->ipp.buf_start);
1238 hevc_print(hevc, 0,
1239 "sao_abv.buf_start :%x\n",
1240 buf_spec->sao_abv.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_vb.buf_start :%x\n",
1243 buf_spec->sao_vb.buf_start);
1244 hevc_print(hevc, 0,
1245 "short_term_rps.buf_start :%x\n",
1246 buf_spec->short_term_rps.buf_start);
1247 hevc_print(hevc, 0,
1248 "vps.buf_start :%x\n",
1249 buf_spec->vps.buf_start);
1250 hevc_print(hevc, 0,
1251 "sps.buf_start :%x\n",
1252 buf_spec->sps.buf_start);
1253 hevc_print(hevc, 0,
1254 "pps.buf_start :%x\n",
1255 buf_spec->pps.buf_start);
1256 hevc_print(hevc, 0,
1257 "sao_up.buf_start :%x\n",
1258 buf_spec->sao_up.buf_start);
1259 hevc_print(hevc, 0,
1260 "swap_buf.buf_start :%x\n",
1261 buf_spec->swap_buf.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf2.buf_start :%x\n",
1264 buf_spec->swap_buf2.buf_start);
1265 hevc_print(hevc, 0,
1266 "scalelut.buf_start :%x\n",
1267 buf_spec->scalelut.buf_start);
1268 hevc_print(hevc, 0,
1269 "dblk_para.buf_start :%x\n",
1270 buf_spec->dblk_para.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_data.buf_start :%x\n",
1273 buf_spec->dblk_data.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data2.buf_start :%x\n",
1276 buf_spec->dblk_data2.buf_start);
1277 hevc_print(hevc, 0,
1278 "mpred_above.buf_start :%x\n",
1279 buf_spec->mpred_above.buf_start);
1280#ifdef MV_USE_FIXED_BUF
1281 hevc_print(hevc, 0,
1282 "mpred_mv.buf_start :%x\n",
1283 buf_spec->mpred_mv.buf_start);
1284#endif
1285 if ((get_dbg_flag2(hevc)
1286 &
1287 H265_DEBUG_SEND_PARAM_WITH_REG)
1288 == 0) {
1289 hevc_print(hevc, 0,
1290 "rpm.buf_start :%x\n",
1291 buf_spec->rpm.buf_start);
1292 }
1293 }
1294
1295}
1296
1297enum SliceType {
1298 B_SLICE,
1299 P_SLICE,
1300 I_SLICE
1301};
1302
1303/*USE_BUF_BLOCK*/
1304struct BUF_s {
1305 ulong start_adr;
1306 u32 size;
1307 u32 luma_size;
1308 ulong header_addr;
1309 u32 header_size;
1310 int used_flag;
1311 ulong v4l_ref_buf_addr;
1312} /*BUF_t */;
1313
1314/* level 6, 6.1 maximum slice number is 800; other is 200 */
1315#define MAX_SLICE_NUM 800
1316struct PIC_s {
1317 int index;
1318 int scatter_alloc;
1319 int BUF_index;
1320 int mv_buf_index;
1321 int POC;
1322 int decode_idx;
1323 int slice_type;
1324 int RefNum_L0;
1325 int RefNum_L1;
1326 int num_reorder_pic;
1327 int stream_offset;
1328 unsigned char referenced;
1329 unsigned char output_mark;
1330 unsigned char recon_mark;
1331 unsigned char output_ready;
1332 unsigned char error_mark;
1333 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1334 unsigned char dis_mark;
1335 /**/ int slice_idx;
1336 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1337 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1338 /*buffer */
1339 unsigned int header_adr;
1340#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1341 unsigned char dv_enhance_exist;
1342#endif
1343 char *aux_data_buf;
1344 int aux_data_size;
1345 unsigned long cma_alloc_addr;
1346 struct page *alloc_pages;
1347 unsigned int mpred_mv_wr_start_addr;
1348 unsigned int mc_y_adr;
1349 unsigned int mc_u_v_adr;
1350#ifdef SUPPORT_10BIT
1351 /*unsigned int comp_body_size;*/
1352 unsigned int dw_y_adr;
1353 unsigned int dw_u_v_adr;
1354#endif
1355 int mc_canvas_y;
1356 int mc_canvas_u_v;
1357 int width;
1358 int height;
1359
1360 int y_canvas_index;
1361 int uv_canvas_index;
1362#ifdef MULTI_INSTANCE_SUPPORT
1363 struct canvas_config_s canvas_config[2];
1364#endif
1365#ifdef SUPPORT_10BIT
1366 int mem_saving_mode;
1367 u32 bit_depth_luma;
1368 u32 bit_depth_chroma;
1369#endif
1370#ifdef LOSLESS_COMPRESS_MODE
1371 unsigned int losless_comp_body_size;
1372#endif
1373 unsigned char pic_struct;
1374 int vf_ref;
1375
1376 u32 pts;
1377 u64 pts64;
1378 u64 timestamp;
1379
1380 u32 aspect_ratio_idc;
1381 u32 sar_width;
1382 u32 sar_height;
1383 u32 double_write_mode;
1384 u32 video_signal_type;
1385 unsigned short conformance_window_flag;
1386 unsigned short conf_win_left_offset;
1387 unsigned short conf_win_right_offset;
1388 unsigned short conf_win_top_offset;
1389 unsigned short conf_win_bottom_offset;
1390 unsigned short chroma_format_idc;
1391
1392 /* picture qos infomation*/
1393 int max_qp;
1394 int avg_qp;
1395 int min_qp;
1396 int max_skip;
1397 int avg_skip;
1398 int min_skip;
1399 int max_mv;
1400 int min_mv;
1401 int avg_mv;
1402
1403 bool vframe_bound;
1404} /*PIC_t */;
1405
1406#define MAX_TILE_COL_NUM 10
1407#define MAX_TILE_ROW_NUM 20
1408struct tile_s {
1409 int width;
1410 int height;
1411 int start_cu_x;
1412 int start_cu_y;
1413
1414 unsigned int sao_vb_start_addr;
1415 unsigned int sao_abv_start_addr;
1416};
1417
1418#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1419#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1420#define SEI_HDR10PLUS_MASK 0x00000004
1421
1422#define VF_POOL_SIZE 32
1423
1424#ifdef MULTI_INSTANCE_SUPPORT
1425#define DEC_RESULT_NONE 0
1426#define DEC_RESULT_DONE 1
1427#define DEC_RESULT_AGAIN 2
1428#define DEC_RESULT_CONFIG_PARAM 3
1429#define DEC_RESULT_ERROR 4
1430#define DEC_INIT_PICLIST 5
1431#define DEC_UNINIT_PICLIST 6
1432#define DEC_RESULT_GET_DATA 7
1433#define DEC_RESULT_GET_DATA_RETRY 8
1434#define DEC_RESULT_EOS 9
1435#define DEC_RESULT_FORCE_EXIT 10
1436#define DEC_RESULT_FREE_CANVAS 11
1437
1438static void vh265_work(struct work_struct *work);
1439static void vh265_timeout_work(struct work_struct *work);
1440static void vh265_notify_work(struct work_struct *work);
1441
1442#endif
1443
1444struct debug_log_s {
1445 struct list_head list;
1446 uint8_t data; /*will alloc more size*/
1447};
1448
1449struct hevc_state_s {
1450#ifdef MULTI_INSTANCE_SUPPORT
1451 struct platform_device *platform_dev;
1452 void (*vdec_cb)(struct vdec_s *, void *);
1453 void *vdec_cb_arg;
1454 struct vframe_chunk_s *chunk;
1455 int dec_result;
1456 struct work_struct work;
1457 struct work_struct timeout_work;
1458 struct work_struct notify_work;
1459 struct work_struct set_clk_work;
1460 /* timeout handle */
1461 unsigned long int start_process_time;
1462 unsigned int last_lcu_idx;
1463 unsigned int decode_timeout_count;
1464 unsigned int timeout_num;
1465#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1466 unsigned char switch_dvlayer_flag;
1467 unsigned char no_switch_dvlayer_count;
1468 unsigned char bypass_dvenl_enable;
1469 unsigned char bypass_dvenl;
1470#endif
1471 unsigned char start_parser_type;
1472 /*start_decoding_flag:
1473 vps/pps/sps/idr info from ucode*/
1474 unsigned char start_decoding_flag;
1475 unsigned char rps_set_id;
1476 unsigned char eos;
1477 int pic_decoded_lcu_idx;
1478 u8 over_decode;
1479 u8 empty_flag;
1480#endif
1481 struct vframe_s vframe_dummy;
1482 char *provider_name;
1483 int index;
1484 struct device *cma_dev;
1485 unsigned char m_ins_flag;
1486 unsigned char dolby_enhance_flag;
1487 unsigned long buf_start;
1488 u32 buf_size;
1489 u32 mv_buf_size;
1490
1491 struct BuffInfo_s work_space_buf_store;
1492 struct BuffInfo_s *work_space_buf;
1493
1494 u8 aux_data_dirty;
1495 u32 prefix_aux_size;
1496 u32 suffix_aux_size;
1497 void *aux_addr;
1498 void *rpm_addr;
1499 void *lmem_addr;
1500 dma_addr_t aux_phy_addr;
1501 dma_addr_t rpm_phy_addr;
1502 dma_addr_t lmem_phy_addr;
1503
1504 unsigned int pic_list_init_flag;
1505 unsigned int use_cma_flag;
1506
1507 unsigned short *rpm_ptr;
1508 unsigned short *lmem_ptr;
1509 unsigned short *debug_ptr;
1510 int debug_ptr_size;
1511 int pic_w;
1512 int pic_h;
1513 int lcu_x_num;
1514 int lcu_y_num;
1515 int lcu_total;
1516 int lcu_size;
1517 int lcu_size_log2;
1518 int lcu_x_num_pre;
1519 int lcu_y_num_pre;
1520 int first_pic_after_recover;
1521
1522 int num_tile_col;
1523 int num_tile_row;
1524 int tile_enabled;
1525 int tile_x;
1526 int tile_y;
1527 int tile_y_x;
1528 int tile_start_lcu_x;
1529 int tile_start_lcu_y;
1530 int tile_width_lcu;
1531 int tile_height_lcu;
1532
1533 int slice_type;
1534 unsigned int slice_addr;
1535 unsigned int slice_segment_addr;
1536
1537 unsigned char interlace_flag;
1538 unsigned char curr_pic_struct;
1539 unsigned char frame_field_info_present_flag;
1540
1541 unsigned short sps_num_reorder_pics_0;
1542 unsigned short misc_flag0;
1543 int m_temporalId;
1544 int m_nalUnitType;
1545 int TMVPFlag;
1546 int isNextSliceSegment;
1547 int LDCFlag;
1548 int m_pocRandomAccess;
1549 int plevel;
1550 int MaxNumMergeCand;
1551
1552 int new_pic;
1553 int new_tile;
1554 int curr_POC;
1555 int iPrevPOC;
1556#ifdef MULTI_INSTANCE_SUPPORT
1557 int decoded_poc;
1558 struct PIC_s *decoding_pic;
1559#endif
1560 int iPrevTid0POC;
1561 int list_no;
1562 int RefNum_L0;
1563 int RefNum_L1;
1564 int ColFromL0Flag;
1565 int LongTerm_Curr;
1566 int LongTerm_Col;
1567 int Col_POC;
1568 int LongTerm_Ref;
1569#ifdef MULTI_INSTANCE_SUPPORT
1570 int m_pocRandomAccess_bak;
1571 int curr_POC_bak;
1572 int iPrevPOC_bak;
1573 int iPrevTid0POC_bak;
1574 unsigned char start_parser_type_bak;
1575 unsigned char start_decoding_flag_bak;
1576 unsigned char rps_set_id_bak;
1577 int pic_decoded_lcu_idx_bak;
1578 int decode_idx_bak;
1579#endif
1580 struct PIC_s *cur_pic;
1581 struct PIC_s *col_pic;
1582 int skip_flag;
1583 int decode_idx;
1584 int slice_idx;
1585 unsigned char have_vps;
1586 unsigned char have_sps;
1587 unsigned char have_pps;
1588 unsigned char have_valid_start_slice;
1589 unsigned char wait_buf;
1590 unsigned char error_flag;
1591 unsigned int error_skip_nal_count;
1592 long used_4k_num;
1593
1594 unsigned char
1595 ignore_bufmgr_error; /* bit 0, for decoding;
1596 bit 1, for displaying
1597 bit 1 must be set if bit 0 is 1*/
1598 int PB_skip_mode;
1599 int PB_skip_count_after_decoding;
1600#ifdef SUPPORT_10BIT
1601 int mem_saving_mode;
1602#endif
1603#ifdef LOSLESS_COMPRESS_MODE
1604 unsigned int losless_comp_body_size;
1605#endif
1606 int pts_mode;
1607 int last_lookup_pts;
1608 int last_pts;
1609 u64 last_lookup_pts_us64;
1610 u64 last_pts_us64;
1611 u32 shift_byte_count_lo;
1612 u32 shift_byte_count_hi;
1613 int pts_mode_switching_count;
1614 int pts_mode_recovery_count;
1615
1616 int pic_num;
1617
1618 /**/
1619 union param_u param;
1620
1621 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1622
1623 struct timer_list timer;
1624 struct BUF_s m_BUF[BUF_POOL_SIZE];
1625 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1626 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1627
1628 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1629 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1630 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1631 struct vframe_s vfpool[VF_POOL_SIZE];
1632
1633 u32 stat;
1634 u32 frame_width;
1635 u32 frame_height;
1636 u32 frame_dur;
1637 u32 frame_ar;
1638 u32 bit_depth_luma;
1639 u32 bit_depth_chroma;
1640 u32 video_signal_type;
1641 u32 video_signal_type_debug;
1642 u32 saved_resolution;
1643 bool get_frame_dur;
1644 u32 error_watchdog_count;
1645 u32 error_skip_nal_wt_cnt;
1646 u32 error_system_watchdog_count;
1647
1648#ifdef DEBUG_PTS
1649 unsigned long pts_missed;
1650 unsigned long pts_hit;
1651#endif
1652 struct dec_sysinfo vh265_amstream_dec_info;
1653 unsigned char init_flag;
1654 unsigned char first_sc_checked;
1655 unsigned char uninit_list;
1656 u32 start_decoding_time;
1657
1658 int show_frame_num;
1659#ifdef USE_UNINIT_SEMA
1660 struct semaphore h265_uninit_done_sema;
1661#endif
1662 int fatal_error;
1663
1664
1665 u32 sei_present_flag;
1666 void *frame_mmu_map_addr;
1667 dma_addr_t frame_mmu_map_phy_addr;
1668 unsigned int mmu_mc_buf_start;
1669 unsigned int mmu_mc_buf_end;
1670 unsigned int mmu_mc_start_4k_adr;
1671 void *mmu_box;
1672 void *bmmu_box;
1673 int mmu_enable;
1674
1675 unsigned int dec_status;
1676
1677 /* data for SEI_MASTER_DISPLAY_COLOR */
1678 unsigned int primaries[3][2];
1679 unsigned int white_point[2];
1680 unsigned int luminance[2];
1681 /* data for SEI_CONTENT_LIGHT_LEVEL */
1682 unsigned int content_light_level[2];
1683
1684 struct PIC_s *pre_top_pic;
1685 struct PIC_s *pre_bot_pic;
1686
1687#ifdef MULTI_INSTANCE_SUPPORT
1688 int double_write_mode;
1689 int dynamic_buf_num_margin;
1690 int start_action;
1691 int save_buffer_mode;
1692#endif
1693 u32 i_only;
1694 struct list_head log_list;
1695 u32 ucode_pause_pos;
1696 u32 start_shift_bytes;
1697
1698 u32 vf_pre_count;
1699 u32 vf_get_count;
1700 u32 vf_put_count;
1701#ifdef SWAP_HEVC_UCODE
1702 dma_addr_t mc_dma_handle;
1703 void *mc_cpu_addr;
1704 int swap_size;
1705 ulong swap_addr;
1706#endif
1707#ifdef DETREFILL_ENABLE
1708 dma_addr_t detbuf_adr;
1709 u16 *detbuf_adr_virt;
1710 u8 delrefill_check;
1711#endif
1712 u8 head_error_flag;
1713 int valve_count;
1714 struct firmware_s *fw;
1715 int max_pic_w;
1716 int max_pic_h;
1717#ifdef AGAIN_HAS_THRESHOLD
1718 u8 next_again_flag;
1719 u32 pre_parser_wr_ptr;
1720#endif
1721 u32 ratio_control;
1722 u32 first_pic_flag;
1723 u32 decode_size;
1724 struct mutex chunks_mutex;
1725 int need_cache_size;
1726 u64 sc_start_time;
1727 u32 skip_first_nal;
1728 bool is_swap;
1729 bool is_4k;
1730 int frameinfo_enable;
1731 struct vframe_qos_s vframe_qos;
1732 bool is_used_v4l;
1733 void *v4l2_ctx;
1734 bool v4l_params_parsed;
1735 u32 mem_map_mode;
1736} /*hevc_stru_t */;
1737
1738#ifdef AGAIN_HAS_THRESHOLD
1739u32 again_threshold;
1740#endif
1741#ifdef SEND_LMEM_WITH_RPM
1742#define get_lmem_params(hevc, ladr) \
1743 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1744
1745
1746static int get_frame_mmu_map_size(void)
1747{
1748 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1749 return (MAX_FRAME_8K_NUM * 4);
1750
1751 return (MAX_FRAME_4K_NUM * 4);
1752}
1753
1754static int is_oversize(int w, int h)
1755{
1756 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1757 MAX_SIZE_8K : MAX_SIZE_4K;
1758
1759 if (w < 0 || h < 0)
1760 return true;
1761
1762 if (h != 0 && (w > max / h))
1763 return true;
1764
1765 return false;
1766}
1767
1768void check_head_error(struct hevc_state_s *hevc)
1769{
1770#define pcm_enabled_flag 0x040
1771#define pcm_sample_bit_depth_luma 0x041
1772#define pcm_sample_bit_depth_chroma 0x042
1773 hevc->head_error_flag = 0;
1774 if ((error_handle_policy & 0x40) == 0)
1775 return;
1776 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1777 uint16_t pcm_depth_luma = get_lmem_params(
1778 hevc, pcm_sample_bit_depth_luma);
1779 uint16_t pcm_sample_chroma = get_lmem_params(
1780 hevc, pcm_sample_bit_depth_chroma);
1781 if (pcm_depth_luma >
1782 hevc->bit_depth_luma ||
1783 pcm_sample_chroma >
1784 hevc->bit_depth_chroma) {
1785 hevc_print(hevc, 0,
1786 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1787 pcm_depth_luma,
1788 pcm_sample_chroma,
1789 hevc->bit_depth_luma,
1790 hevc->bit_depth_chroma);
1791 hevc->head_error_flag = 1;
1792 }
1793 }
1794}
1795#endif
1796
1797#ifdef SUPPORT_10BIT
1798/* Losless compression body buffer size 4K per 64x32 (jt) */
1799static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1800 int width, int height, int mem_saving_mode)
1801{
1802 int width_x64;
1803 int height_x32;
1804 int bsize;
1805
1806 width_x64 = width + 63;
1807 width_x64 >>= 6;
1808
1809 height_x32 = height + 31;
1810 height_x32 >>= 5;
1811 if (mem_saving_mode == 1 && hevc->mmu_enable)
1812 bsize = 3200 * width_x64 * height_x32;
1813 else if (mem_saving_mode == 1)
1814 bsize = 3072 * width_x64 * height_x32;
1815 else
1816 bsize = 4096 * width_x64 * height_x32;
1817
1818 return bsize;
1819}
1820
1821/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1822static int compute_losless_comp_header_size(int width, int height)
1823{
1824 int width_x128;
1825 int height_x64;
1826 int hsize;
1827
1828 width_x128 = width + 127;
1829 width_x128 >>= 7;
1830
1831 height_x64 = height + 63;
1832 height_x64 >>= 6;
1833
1834 hsize = 32*width_x128*height_x64;
1835
1836 return hsize;
1837}
1838#endif
1839
1840static int add_log(struct hevc_state_s *hevc,
1841 const char *fmt, ...)
1842{
1843#define HEVC_LOG_BUF 196
1844 struct debug_log_s *log_item;
1845 unsigned char buf[HEVC_LOG_BUF];
1846 int len = 0;
1847 va_list args;
1848 mutex_lock(&vh265_log_mutex);
1849 va_start(args, fmt);
1850 len = sprintf(buf, "<%ld> <%05d> ",
1851 jiffies, hevc->decode_idx);
1852 len += vsnprintf(buf + len,
1853 HEVC_LOG_BUF - len, fmt, args);
1854 va_end(args);
1855 log_item = kmalloc(
1856 sizeof(struct debug_log_s) + len,
1857 GFP_KERNEL);
1858 if (log_item) {
1859 INIT_LIST_HEAD(&log_item->list);
1860 strcpy(&log_item->data, buf);
1861 list_add_tail(&log_item->list,
1862 &hevc->log_list);
1863 }
1864 mutex_unlock(&vh265_log_mutex);
1865 return 0;
1866}
1867
1868static void dump_log(struct hevc_state_s *hevc)
1869{
1870 int i = 0;
1871 struct debug_log_s *log_item, *tmp;
1872 mutex_lock(&vh265_log_mutex);
1873 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1874 hevc_print(hevc, 0,
1875 "[LOG%04d]%s\n",
1876 i++,
1877 &log_item->data);
1878 list_del(&log_item->list);
1879 kfree(log_item);
1880 }
1881 mutex_unlock(&vh265_log_mutex);
1882}
1883
1884static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1885 struct PIC_s *pic)
1886{
1887 if (pic->error_mark
1888 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1889 return 1;
1890 return 0;
1891}
1892
1893static int get_pic_poc(struct hevc_state_s *hevc,
1894 unsigned int idx)
1895{
1896 if (idx != 0xff
1897 && idx < MAX_REF_PIC_NUM
1898 && hevc->m_PIC[idx])
1899 return hevc->m_PIC[idx]->POC;
1900 return INVALID_POC;
1901}
1902
1903#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1904static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1905{
1906 return (hevc->m_ins_flag &&
1907 ((double_write_mode & 0x80000000) == 0)) ?
1908 hevc->double_write_mode :
1909 (double_write_mode & 0x7fffffff);
1910}
1911
1912static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1913{
1914 return (hevc->m_ins_flag &&
1915 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1916 hevc->dynamic_buf_num_margin :
1917 (dynamic_buf_num_margin & 0x7fffffff);
1918}
1919#endif
1920
1921static int get_double_write_mode(struct hevc_state_s *hevc)
1922{
1923 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1924 int w = hevc->pic_w;
1925 int h = hevc->pic_h;
1926 u32 dw = 0x1; /*1:1*/
1927 switch (valid_dw_mode) {
1928 case 0x100:
1929 if (w > 1920 && h > 1088)
1930 dw = 0x4; /*1:2*/
1931 break;
1932 case 0x200:
1933 if (w > 1920 && h > 1088)
1934 dw = 0x2; /*1:4*/
1935 break;
1936 case 0x300:
1937 if (w > 1280 && h > 720)
1938 dw = 0x4; /*1:2*/
1939 break;
1940 default:
1941 dw = valid_dw_mode;
1942 break;
1943 }
1944 return dw;
1945}
1946
1947static int get_double_write_ratio(struct hevc_state_s *hevc,
1948 int dw_mode)
1949{
1950 int ratio = 1;
1951 if ((dw_mode == 2) ||
1952 (dw_mode == 3))
1953 ratio = 4;
1954 else if (dw_mode == 4)
1955 ratio = 2;
1956 return ratio;
1957}
1958#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1959static unsigned char get_idx(struct hevc_state_s *hevc)
1960{
1961 return hevc->index;
1962}
1963#endif
1964
1965#undef pr_info
1966#define pr_info printk
1967static int hevc_print(struct hevc_state_s *hevc,
1968 int flag, const char *fmt, ...)
1969{
1970#define HEVC_PRINT_BUF 256
1971 unsigned char buf[HEVC_PRINT_BUF];
1972 int len = 0;
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 if (hevc == NULL ||
1975 (flag == 0) ||
1976 ((debug_mask &
1977 (1 << hevc->index))
1978 && (debug & flag))) {
1979#endif
1980 va_list args;
1981
1982 va_start(args, fmt);
1983 if (hevc)
1984 len = sprintf(buf, "[%d]", hevc->index);
1985 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1986 pr_debug("%s", buf);
1987 va_end(args);
1988#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1989 }
1990#endif
1991 return 0;
1992}
1993
1994static int hevc_print_cont(struct hevc_state_s *hevc,
1995 int flag, const char *fmt, ...)
1996{
1997 unsigned char buf[HEVC_PRINT_BUF];
1998 int len = 0;
1999#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2000 if (hevc == NULL ||
2001 (flag == 0) ||
2002 ((debug_mask &
2003 (1 << hevc->index))
2004 && (debug & flag))) {
2005#endif
2006 va_list args;
2007
2008 va_start(args, fmt);
2009 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2010 pr_info("%s", buf);
2011 va_end(args);
2012#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2013 }
2014#endif
2015 return 0;
2016}
2017
2018static void put_mv_buf(struct hevc_state_s *hevc,
2019 struct PIC_s *pic);
2020
2021static void update_vf_memhandle(struct hevc_state_s *hevc,
2022 struct vframe_s *vf, struct PIC_s *pic);
2023
2024static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2025
2026static void release_aux_data(struct hevc_state_s *hevc,
2027 struct PIC_s *pic);
2028static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2029
2030#ifdef MULTI_INSTANCE_SUPPORT
2031static void backup_decode_state(struct hevc_state_s *hevc)
2032{
2033 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2034 hevc->curr_POC_bak = hevc->curr_POC;
2035 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2036 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2037 hevc->start_parser_type_bak = hevc->start_parser_type;
2038 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2039 hevc->rps_set_id_bak = hevc->rps_set_id;
2040 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2041 hevc->decode_idx_bak = hevc->decode_idx;
2042
2043}
2044
2045static void restore_decode_state(struct hevc_state_s *hevc)
2046{
2047 struct vdec_s *vdec = hw_to_vdec(hevc);
2048 if (!vdec_has_more_input(vdec)) {
2049 hevc->pic_decoded_lcu_idx =
2050 READ_VREG(HEVC_PARSER_LCU_START)
2051 & 0xffffff;
2052 return;
2053 }
2054 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2055 "%s: discard pic index 0x%x\n",
2056 __func__, hevc->decoding_pic ?
2057 hevc->decoding_pic->index : 0xff);
2058 if (hevc->decoding_pic) {
2059 hevc->decoding_pic->error_mark = 0;
2060 hevc->decoding_pic->output_ready = 0;
2061 hevc->decoding_pic->output_mark = 0;
2062 hevc->decoding_pic->referenced = 0;
2063 hevc->decoding_pic->POC = INVALID_POC;
2064 put_mv_buf(hevc, hevc->decoding_pic);
2065 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2066 release_aux_data(hevc, hevc->decoding_pic);
2067 hevc->decoding_pic = NULL;
2068 }
2069 hevc->decode_idx = hevc->decode_idx_bak;
2070 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2071 hevc->curr_POC = hevc->curr_POC_bak;
2072 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2073 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2074 hevc->start_parser_type = hevc->start_parser_type_bak;
2075 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2076 hevc->rps_set_id = hevc->rps_set_id_bak;
2077 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2078
2079 if (hevc->pic_list_init_flag == 1)
2080 hevc->pic_list_init_flag = 0;
2081 /*if (hevc->decode_idx == 0)
2082 hevc->start_decoding_flag = 0;*/
2083
2084 hevc->slice_idx = 0;
2085 hevc->used_4k_num = -1;
2086}
2087#endif
2088
2089static void hevc_init_stru(struct hevc_state_s *hevc,
2090 struct BuffInfo_s *buf_spec_i)
2091{
2092 int i;
2093 INIT_LIST_HEAD(&hevc->log_list);
2094 hevc->work_space_buf = buf_spec_i;
2095 hevc->prefix_aux_size = 0;
2096 hevc->suffix_aux_size = 0;
2097 hevc->aux_addr = NULL;
2098 hevc->rpm_addr = NULL;
2099 hevc->lmem_addr = NULL;
2100
2101 hevc->curr_POC = INVALID_POC;
2102
2103 hevc->pic_list_init_flag = 0;
2104 hevc->use_cma_flag = 0;
2105 hevc->decode_idx = 0;
2106 hevc->slice_idx = 0;
2107 hevc->new_pic = 0;
2108 hevc->new_tile = 0;
2109 hevc->iPrevPOC = 0;
2110 hevc->list_no = 0;
2111 /* int m_uiMaxCUWidth = 1<<7; */
2112 /* int m_uiMaxCUHeight = 1<<7; */
2113 hevc->m_pocRandomAccess = MAX_INT;
2114 hevc->tile_enabled = 0;
2115 hevc->tile_x = 0;
2116 hevc->tile_y = 0;
2117 hevc->iPrevTid0POC = 0;
2118 hevc->slice_addr = 0;
2119 hevc->slice_segment_addr = 0;
2120 hevc->skip_flag = 0;
2121 hevc->misc_flag0 = 0;
2122
2123 hevc->cur_pic = NULL;
2124 hevc->col_pic = NULL;
2125 hevc->wait_buf = 0;
2126 hevc->error_flag = 0;
2127 hevc->head_error_flag = 0;
2128 hevc->error_skip_nal_count = 0;
2129 hevc->have_vps = 0;
2130 hevc->have_sps = 0;
2131 hevc->have_pps = 0;
2132 hevc->have_valid_start_slice = 0;
2133
2134 hevc->pts_mode = PTS_NORMAL;
2135 hevc->last_pts = 0;
2136 hevc->last_lookup_pts = 0;
2137 hevc->last_pts_us64 = 0;
2138 hevc->last_lookup_pts_us64 = 0;
2139 hevc->pts_mode_switching_count = 0;
2140 hevc->pts_mode_recovery_count = 0;
2141
2142 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2143 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2144 if (hevc->PB_skip_mode == 0)
2145 hevc->ignore_bufmgr_error = 0x1;
2146 else
2147 hevc->ignore_bufmgr_error = 0x0;
2148
2149 if (hevc->is_used_v4l) {
2150 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2151 if (hevc->m_PIC[i] != NULL) {
2152 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2153 hevc->m_PIC[i]->index = -1;
2154 }
2155 }
2156 }
2157
2158 hevc->pic_num = 0;
2159 hevc->lcu_x_num_pre = 0;
2160 hevc->lcu_y_num_pre = 0;
2161 hevc->first_pic_after_recover = 0;
2162
2163 hevc->pre_top_pic = NULL;
2164 hevc->pre_bot_pic = NULL;
2165
2166 hevc->sei_present_flag = 0;
2167 hevc->valve_count = 0;
2168 hevc->first_pic_flag = 0;
2169#ifdef MULTI_INSTANCE_SUPPORT
2170 hevc->decoded_poc = INVALID_POC;
2171 hevc->start_process_time = 0;
2172 hevc->last_lcu_idx = 0;
2173 hevc->decode_timeout_count = 0;
2174 hevc->timeout_num = 0;
2175 hevc->eos = 0;
2176 hevc->pic_decoded_lcu_idx = -1;
2177 hevc->over_decode = 0;
2178 hevc->used_4k_num = -1;
2179 hevc->start_decoding_flag = 0;
2180 hevc->rps_set_id = 0;
2181 backup_decode_state(hevc);
2182#endif
2183#ifdef DETREFILL_ENABLE
2184 hevc->detbuf_adr = 0;
2185 hevc->detbuf_adr_virt = NULL;
2186#endif
2187}
2188
2189static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2190static int H265_alloc_mmu(struct hevc_state_s *hevc,
2191 struct PIC_s *new_pic, unsigned short bit_depth,
2192 unsigned int *mmu_index_adr);
2193
2194#ifdef DETREFILL_ENABLE
2195#define DETREFILL_BUF_SIZE (4 * 0x4000)
2196#define HEVC_SAO_DBG_MODE0 0x361e
2197#define HEVC_SAO_DBG_MODE1 0x361f
2198#define HEVC_SAO_CTRL10 0x362e
2199#define HEVC_SAO_CTRL11 0x362f
2200static int init_detrefill_buf(struct hevc_state_s *hevc)
2201{
2202 if (hevc->detbuf_adr_virt)
2203 return 0;
2204
2205 hevc->detbuf_adr_virt =
2206 (void *)dma_alloc_coherent(amports_get_dma_device(),
2207 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2208 GFP_KERNEL);
2209
2210 if (hevc->detbuf_adr_virt == NULL) {
2211 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2212 return -1;
2213 }
2214 return 0;
2215}
2216
2217static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2218{
2219 if (hevc->detbuf_adr_virt) {
2220 dma_free_coherent(amports_get_dma_device(),
2221 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2222 hevc->detbuf_adr);
2223
2224 hevc->detbuf_adr_virt = NULL;
2225 hevc->detbuf_adr = 0;
2226 }
2227}
2228
2229/*
2230 * convert uncompressed frame buffer data from/to ddr
2231 */
2232static void convUnc8x4blk(uint16_t* blk8x4Luma,
2233 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2234{
2235 if (direction == 0) {
2236 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2237 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2238 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2239 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2240 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2241 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2242 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2243 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2244 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2245 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2246 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2247 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2248 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2249 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2250 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2251 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2252 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2253 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2254
2255 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2256 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2257 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2258 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2259 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2260 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2261 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2262 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2263 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2264 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2265 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2266 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2267 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2268 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2269 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2270 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2271 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2272 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2273
2274 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2275 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2276 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2277 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2278 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2279 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2280 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2281 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2282 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2283 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2284 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2285 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2286 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2287 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2288 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2289 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2290 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2291 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2292
2293 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2294 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2295 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2296 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2297 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2298 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2299 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2300 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2301 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2302 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2303 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2304 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2305 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2306 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2307 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2308 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2309 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2310 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2311 } else {
2312 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2313 blk8x4Luma[3 + 0 * 8];
2314 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2315 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2316 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2317 (blk8x4Luma[3 + 3 * 8] >> 2);
2318 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2319 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2320 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2321 (blk8x4Luma[7 + 2 * 8] >>4);
2322 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2323 blk8x4Cb[0 + 0 * 4];
2324 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2325 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2326 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2327
2328 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2329 blk8x4Luma[0 + 0 * 8];
2330 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2331 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2332 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2333 (blk8x4Luma[0 + 1 * 8] >> 2);
2334 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2335 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2336 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2337 (blk8x4Luma[0 + 2 * 8] >>4);
2338 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2339 blk8x4Luma[2 + 2 * 8];
2340 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2341 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2342 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2343
2344 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2345 blk8x4Luma[4 + 0 * 8];
2346 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2347 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2348 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2349 (blk8x4Luma[4 + 1 * 8] >> 2);
2350 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2351 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2352 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2353 (blk8x4Luma[4 + 2 * 8] >>4);
2354 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2355 blk8x4Luma[6 + 2 * 8];
2356 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2357 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2358 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2359
2360 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2361 blk8x4Cb[1 + 0 * 4];
2362 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2363 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2364 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2365 (blk8x4Cr[2 + 0 * 4] >> 2);
2366 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2367 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2368 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2369 (blk8x4Cb[1 + 1 * 4] >>4);
2370 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2371 blk8x4Cb[2 + 1 * 4];
2372 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2373 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2374 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2375 }
2376}
2377
2378static void corrRefillWithAmrisc (
2379 struct hevc_state_s *hevc,
2380 uint32_t cmHeaderBaseAddr,
2381 uint32_t picWidth,
2382 uint32_t ctuPosition)
2383{
2384 int32_t i;
2385 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2386 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2387 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2388
2389 uint16_t cmBodyBuf[32 * 18];
2390
2391 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2392 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2393 uint32_t stride64x64 = pic_width_x64 * 128;
2394 uint32_t addr_offset64x64_abv = stride64x64 *
2395 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2396 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2397 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2398 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2399 unsigned int tmpData32;
2400
2401 uint16_t blkBuf0Y[32];
2402 uint16_t blkBuf0Cb[8];
2403 uint16_t blkBuf0Cr[8];
2404 uint16_t blkBuf1Y[32];
2405 uint16_t blkBuf1Cb[8];
2406 uint16_t blkBuf1Cr[8];
2407 int32_t blkBufCnt = 0;
2408
2409 int32_t blkIdx;
2410
2411 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2412 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2413 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2414 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2415
2416 for (i = 0; i < 32 * 18; i++)
2417 cmBodyBuf[i] = 0;
2418
2419 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2420 "%s, %d\n", __func__, __LINE__);
2421 do {
2422 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2423 } while (tmpData32);
2424 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2425 "%s, %d\n", __func__, __LINE__);
2426
2427 hevc_print(hevc, H265_DEBUG_DETAIL,
2428 "cmBodyBuf from detbuf:\n");
2429 for (i = 0; i < 32 * 18; i++) {
2430 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2431 if (get_dbg_flag(hevc) &
2432 H265_DEBUG_DETAIL) {
2433 if ((i & 0xf) == 0)
2434 hevc_print_cont(hevc, 0, "\n");
2435 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2436 }
2437 }
2438 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2439
2440 for (i = 0; i < 32; i++)
2441 blkBuf0Y[i] = 0;
2442 for (i = 0; i < 8; i++)
2443 blkBuf0Cb[i] = 0;
2444 for (i = 0; i < 8; i++)
2445 blkBuf0Cr[i] = 0;
2446 for (i = 0; i < 32; i++)
2447 blkBuf1Y[i] = 0;
2448 for (i = 0; i < 8; i++)
2449 blkBuf1Cb[i] = 0;
2450 for (i = 0; i < 8; i++)
2451 blkBuf1Cr[i] = 0;
2452
2453 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2454 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2455 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2456 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2457 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2458 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2459 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2460
2461 if (!aboveCtuAvailable && inAboveCtu)
2462 continue;
2463
2464 /* detRefillBuf --> 8x4block*/
2465 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2466
2467 if (restoreEnable) {
2468 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2469 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2470 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2471 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2472 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2473 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2474 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2475 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2476 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2477 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2478 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2479 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2480 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2481 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2482 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2483 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2484 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2485 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2486 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2487 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2488 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2489 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2490 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2491 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2492 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2493 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2494 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2495 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2496 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2497 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2498 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2499 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2500
2501 /*Store data back to DDR*/
2502 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2503 }
2504
2505 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2506 }
2507
2508 hevc_print(hevc, H265_DEBUG_DETAIL,
2509 "cmBodyBuf to detbuf:\n");
2510 for (i = 0; i < 32 * 18; i++) {
2511 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2512 if (get_dbg_flag(hevc) &
2513 H265_DEBUG_DETAIL) {
2514 if ((i & 0xf) == 0)
2515 hevc_print_cont(hevc, 0, "\n");
2516 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2517 }
2518 }
2519 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2520
2521 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2522 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2523 "%s, %d\n", __func__, __LINE__);
2524 do {
2525 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2526 } while (tmpData32);
2527 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2528 "%s, %d\n", __func__, __LINE__);
2529}
2530
2531static void delrefill(struct hevc_state_s *hevc)
2532{
2533 /*
2534 * corrRefill
2535 */
2536 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2537 [31:30]error number
2538 [29:20]error2([9:7]tilex[6:0]ctuy)
2539 [19:10]error1 [9:0]error0*/
2540 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2541 uint32_t errorIdx;
2542 uint32_t errorNum = (detResult>>30);
2543
2544 if (detResult) {
2545 hevc_print(hevc, H265_DEBUG_BUFMGR,
2546 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2547 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2548 uint32_t errorPos = errorIdx * 10;
2549 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2550 uint32_t tilex = (errorResult >> 7) - 1;
2551 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2552 + hevc->m_tile[0][tilex].width - 1;
2553 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2554 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2555 hevc_print(hevc, H265_DEBUG_BUFMGR,
2556 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2557 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2558 corrRefillWithAmrisc(
2559 hevc,
2560 (uint32_t)hevc->cur_pic->header_adr,
2561 hevc->pic_w,
2562 ctuPosition);
2563 }
2564
2565 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2566 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2567 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2568 }
2569}
2570#endif
2571
2572static void get_rpm_param(union param_u *params)
2573{
2574 int i;
2575 unsigned int data32;
2576
2577 for (i = 0; i < 128; i++) {
2578 do {
2579 data32 = READ_VREG(RPM_CMD_REG);
2580 /* hevc_print(hevc, 0, "%x\n", data32); */
2581 } while ((data32 & 0x10000) == 0);
2582 params->l.data[i] = data32 & 0xffff;
2583 /* hevc_print(hevc, 0, "%x\n", data32); */
2584 WRITE_VREG(RPM_CMD_REG, 0);
2585 }
2586}
2587
2588static int get_free_buf_idx(struct hevc_state_s *hevc)
2589{
2590 int index = INVALID_IDX;
2591 struct PIC_s *pic;
2592 int i;
2593
2594 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2595 pic = hevc->m_PIC[i];
2596 if (pic == NULL ||
2597 pic->index == -1 ||
2598 pic->BUF_index == -1)
2599 continue;
2600
2601 if (pic->output_mark == 0 &&
2602 pic->referenced == 0 &&
2603 pic->output_ready == 0) {
2604 pic->output_ready = 1;
2605 index = i;
2606 break;
2607 }
2608 }
2609
2610 return index;
2611}
2612
2613static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2614{
2615 int i;
2616 struct PIC_s *pic;
2617 struct PIC_s *ret_pic = NULL;
2618 if (POC == INVALID_POC)
2619 return NULL;
2620 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2621 pic = hevc->m_PIC[i];
2622 if (pic == NULL || pic->index == -1 ||
2623 pic->BUF_index == -1)
2624 continue;
2625 if (pic->POC == POC) {
2626 if (ret_pic == NULL)
2627 ret_pic = pic;
2628 else {
2629 if (pic->decode_idx > ret_pic->decode_idx)
2630 ret_pic = pic;
2631 }
2632 }
2633 }
2634 return ret_pic;
2635}
2636
2637static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2638{
2639 int i;
2640 struct PIC_s *pic;
2641 struct PIC_s *ret_pic = NULL;
2642
2643 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2644 pic = hevc->m_PIC[i];
2645 if (pic == NULL || pic->index == -1 ||
2646 pic->BUF_index == -1)
2647 continue;
2648 if ((pic->POC == POC) && (pic->referenced)) {
2649 if (ret_pic == NULL)
2650 ret_pic = pic;
2651 else {
2652 if (pic->decode_idx > ret_pic->decode_idx)
2653 ret_pic = pic;
2654 }
2655 }
2656 }
2657
2658 if (ret_pic == NULL) {
2659 if (get_dbg_flag(hevc)) {
2660 hevc_print(hevc, 0,
2661 "Wrong, POC of %d is not in referenced list\n",
2662 POC);
2663 }
2664 ret_pic = get_pic_by_POC(hevc, POC);
2665 }
2666 return ret_pic;
2667}
2668
2669static unsigned int log2i(unsigned int val)
2670{
2671 unsigned int ret = -1;
2672
2673 while (val != 0) {
2674 val >>= 1;
2675 ret++;
2676 }
2677 return ret;
2678}
2679
2680static int init_buf_spec(struct hevc_state_s *hevc);
2681
2682static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2683{
2684 int i;
2685
2686 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2687 struct PIC_s *pic = hevc->m_PIC[i];
2688
2689 if (pic && pic->vframe_bound)
2690 return true;
2691 }
2692
2693 return false;
2694}
2695
2696static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2697{
2698 int i;
2699
2700 /* release workspace */
2701 if (hevc->bmmu_box)
2702 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2703 BMMU_WORKSPACE_ID);
2704 /*
2705 * it's only when vframe get back to driver, right now we can be sure
2706 * that vframe and fd are related. if the playback exits, the capture
2707 * requires the upper app to release when the fd is closed, and others
2708 * buffers drivers are released by driver.
2709 */
2710 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2711 struct PIC_s *pic = hevc->m_PIC[i];
2712
2713 if (pic && !pic->vframe_bound) {
2714 if (hevc->bmmu_box)
2715 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2716 VF_BUFFER_IDX(i));
2717 if (hevc->mmu_box)
2718 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2719
2720 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2721 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2722 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2723 }
2724 }
2725}
2726
2727static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2728{
2729 if (hevc->is_used_v4l &&
2730 v4l_is_there_vframe_bound(hevc)) {
2731 if (get_double_write_mode(hevc) != 0x10) {
2732 v4l_mmu_buffer_release(hevc);
2733 return;
2734 }
2735 }
2736
2737 if (hevc->mmu_box)
2738 decoder_mmu_box_free(hevc->mmu_box);
2739 hevc->mmu_box = NULL;
2740
2741 if (hevc->bmmu_box)
2742 decoder_bmmu_box_free(hevc->bmmu_box);
2743 hevc->bmmu_box = NULL;
2744}
2745static int init_mmu_buffers(struct hevc_state_s *hevc)
2746{
2747 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2748 CODEC_MM_FLAGS_TVP : 0;
2749 int buf_size = 64;
2750
2751 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2752 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2753 buf_size = 24;
2754 }
2755
2756 if (get_dbg_flag(hevc)) {
2757 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2758 __func__, hevc->max_pic_w, hevc->max_pic_h);
2759 }
2760
2761 hevc->need_cache_size = buf_size * SZ_1M;
2762 hevc->sc_start_time = get_jiffies_64();
2763 if (hevc->mmu_enable
2764 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2765 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2766 hevc->index,
2767 MAX_REF_PIC_NUM,
2768 buf_size * SZ_1M,
2769 tvp_flag
2770 );
2771 if (!hevc->mmu_box) {
2772 pr_err("h265 alloc mmu box failed!!\n");
2773 return -1;
2774 }
2775 }
2776
2777 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2778 hevc->index,
2779 BMMU_MAX_BUFFERS,
2780 4 + PAGE_SHIFT,
2781 CODEC_MM_FLAGS_CMA_CLEAR |
2782 CODEC_MM_FLAGS_FOR_VDECODER |
2783 tvp_flag);
2784 if (!hevc->bmmu_box) {
2785 if (hevc->mmu_box)
2786 decoder_mmu_box_free(hevc->mmu_box);
2787 hevc->mmu_box = NULL;
2788 pr_err("h265 alloc mmu box failed!!\n");
2789 return -1;
2790 }
2791 return 0;
2792}
2793
2794struct buf_stru_s
2795{
2796 int lcu_total;
2797 int mc_buffer_size_h;
2798 int mc_buffer_size_u_v_h;
2799};
2800
2801#ifndef MV_USE_FIXED_BUF
2802static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2803{
2804 int i;
2805 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2806 if (hevc->m_mv_BUF[i].start_adr) {
2807 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2808 hevc_print(hevc, 0,
2809 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2810 i, hevc->m_mv_BUF[i].start_adr,
2811 hevc->m_mv_BUF[i].size,
2812 hevc->m_mv_BUF[i].used_flag);
2813 decoder_bmmu_box_free_idx(
2814 hevc->bmmu_box,
2815 MV_BUFFER_IDX(i));
2816 hevc->m_mv_BUF[i].start_adr = 0;
2817 hevc->m_mv_BUF[i].size = 0;
2818 hevc->m_mv_BUF[i].used_flag = 0;
2819 }
2820 }
2821 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2822 if (hevc->m_PIC[i] != NULL)
2823 hevc->m_PIC[i]->mv_buf_index = -1;
2824 }
2825}
2826
2827static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2828{
2829 int ret = 0;
2830 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2831 if (decoder_bmmu_box_alloc_buf_phy
2832 (hevc->bmmu_box,
2833 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2834 DRIVER_NAME,
2835 &hevc->m_mv_BUF[i].start_adr) < 0) {
2836 hevc->m_mv_BUF[i].start_adr = 0;
2837 ret = -1;
2838 } else {
2839 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2840 hevc->m_mv_BUF[i].used_flag = 0;
2841 ret = 0;
2842 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2843 hevc_print(hevc, 0,
2844 "MV Buffer %d: start_adr %p size %x\n",
2845 i,
2846 (void *)hevc->m_mv_BUF[i].start_adr,
2847 hevc->m_mv_BUF[i].size);
2848 }
2849 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2850 void *mem_start_virt;
2851 mem_start_virt =
2852 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2853 if (mem_start_virt) {
2854 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2855 codec_mm_dma_flush(mem_start_virt,
2856 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2857 } else {
2858 mem_start_virt = codec_mm_vmap(
2859 hevc->m_mv_BUF[i].start_adr,
2860 hevc->m_mv_BUF[i].size);
2861 if (mem_start_virt) {
2862 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2863 codec_mm_dma_flush(mem_start_virt,
2864 hevc->m_mv_BUF[i].size,
2865 DMA_TO_DEVICE);
2866 codec_mm_unmap_phyaddr(mem_start_virt);
2867 } else {
2868 /*not virt for tvp playing,
2869 may need clear on ucode.*/
2870 pr_err("ref %s mem_start_virt failed\n", __func__);
2871 }
2872 }
2873 }
2874 }
2875 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2876 return ret;
2877}
2878#endif
2879
2880static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2881{
2882#ifdef MV_USE_FIXED_BUF
2883 if (pic && pic->index >= 0) {
2884 if (IS_8K_SIZE(pic->width, pic->height)) {
2885 pic->mpred_mv_wr_start_addr =
2886 hevc->work_space_buf->mpred_mv.buf_start
2887 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2888 } else {
2889 pic->mpred_mv_wr_start_addr =
2890 hevc->work_space_buf->mpred_mv.buf_start
2891 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2892 }
2893 }
2894 return 0;
2895#else
2896 int i;
2897 int ret = -1;
2898 int new_size;
2899 if (IS_8K_SIZE(pic->width, pic->height))
2900 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2901 else if (IS_4K_SIZE(pic->width, pic->height))
2902 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2903 else
2904 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2905 if (new_size != hevc->mv_buf_size) {
2906 dealloc_mv_bufs(hevc);
2907 hevc->mv_buf_size = new_size;
2908 }
2909 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2910 if (hevc->m_mv_BUF[i].start_adr &&
2911 hevc->m_mv_BUF[i].used_flag == 0) {
2912 hevc->m_mv_BUF[i].used_flag = 1;
2913 ret = i;
2914 break;
2915 }
2916 }
2917 if (ret < 0) {
2918 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2919 if (hevc->m_mv_BUF[i].start_adr == 0) {
2920 if (alloc_mv_buf(hevc, i) >= 0) {
2921 hevc->m_mv_BUF[i].used_flag = 1;
2922 ret = i;
2923 }
2924 break;
2925 }
2926 }
2927 }
2928
2929 if (ret >= 0) {
2930 pic->mv_buf_index = ret;
2931 pic->mpred_mv_wr_start_addr =
2932 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2933 (~0xffff);
2934 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2935 "%s => %d (0x%x) size 0x%x\n",
2936 __func__, ret,
2937 pic->mpred_mv_wr_start_addr,
2938 hevc->m_mv_BUF[ret].size);
2939
2940 } else {
2941 hevc_print(hevc, 0,
2942 "%s: Error, mv buf is not enough\n",
2943 __func__);
2944 }
2945 return ret;
2946
2947#endif
2948}
2949
2950static void put_mv_buf(struct hevc_state_s *hevc,
2951 struct PIC_s *pic)
2952{
2953#ifndef MV_USE_FIXED_BUF
2954 int i = pic->mv_buf_index;
2955 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2956 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2957 "%s: index %d beyond range\n",
2958 __func__, i);
2959 return;
2960 }
2961 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2962 "%s(%d): used_flag(%d)\n",
2963 __func__, i,
2964 hevc->m_mv_BUF[i].used_flag);
2965
2966 if (hevc->m_mv_BUF[i].start_adr &&
2967 hevc->m_mv_BUF[i].used_flag)
2968 hevc->m_mv_BUF[i].used_flag = 0;
2969 pic->mv_buf_index = -1;
2970#endif
2971}
2972
2973static int cal_current_buf_size(struct hevc_state_s *hevc,
2974 struct buf_stru_s *buf_stru)
2975{
2976
2977 int buf_size;
2978 int pic_width = hevc->pic_w;
2979 int pic_height = hevc->pic_h;
2980 int lcu_size = hevc->lcu_size;
2981 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2982 1 : pic_width / lcu_size;
2983 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2984 1 : pic_height / lcu_size;
2985 /*SUPPORT_10BIT*/
2986 int losless_comp_header_size = compute_losless_comp_header_size
2987 (pic_width, pic_height);
2988 /*always alloc buf for 10bit*/
2989 int losless_comp_body_size = compute_losless_comp_body_size
2990 (hevc, pic_width, pic_height, 0);
2991 int mc_buffer_size = losless_comp_header_size
2992 + losless_comp_body_size;
2993 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2994 int mc_buffer_size_u_v_h = 0;
2995
2996 int dw_mode = get_double_write_mode(hevc);
2997
2998 if (hevc->mmu_enable) {
2999 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3000 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3001 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3002 << 16;
3003 else
3004 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3005 << 16;
3006 } else
3007 buf_size = 0;
3008
3009 if (dw_mode) {
3010 int pic_width_dw = pic_width /
3011 get_double_write_ratio(hevc, dw_mode);
3012 int pic_height_dw = pic_height /
3013 get_double_write_ratio(hevc, dw_mode);
3014
3015 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3016 pic_width_dw / lcu_size + 1 :
3017 pic_width_dw / lcu_size;
3018 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3019 pic_height_dw / lcu_size + 1 :
3020 pic_height_dw / lcu_size;
3021 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3022
3023 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3024 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3025 /*64k alignment*/
3026 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3027 }
3028
3029 if ((!hevc->mmu_enable) &&
3030 ((dw_mode & 0x10) == 0)) {
3031 /* use compress mode without mmu,
3032 need buf for compress decoding*/
3033 buf_size += (mc_buffer_size_h << 16);
3034 }
3035
3036 /*in case start adr is not 64k alignment*/
3037 if (buf_size > 0)
3038 buf_size += 0x10000;
3039
3040 if (buf_stru) {
3041 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3042 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3043 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3044 }
3045
3046 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3047 pic_width, pic_height, losless_comp_header_size,
3048 losless_comp_body_size, mc_buffer_size_h,
3049 mc_buffer_size_u_v_h, buf_size);
3050
3051 return buf_size;
3052}
3053
3054static int v4l_alloc_buf(struct hevc_state_s *hevc)
3055{
3056 int i;
3057 int ret = -1;
3058 struct vdec_v4l2_buffer *fb = NULL;
3059
3060 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3061 return ret;
3062
3063 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3064 if (ret < 0) {
3065 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3066 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3067 return ret;
3068 }
3069
3070 for (i = 0; i < BUF_POOL_SIZE; i++)
3071 if (hevc->m_BUF[i].start_adr == 0)
3072 break;
3073
3074 if (hevc->mmu_enable) {
3075 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3076 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3077 hevc->m_BUF[i].header_size =
3078 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3079 else
3080 hevc->m_BUF[i].header_size =
3081 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3082
3083 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3084 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3085 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3086 if (ret < 0) {
3087 hevc_print(hevc, PRINT_FLAG_ERROR,
3088 "%s[%d], header size: %d, no mem fatal err\n",
3089 __func__, i, hevc->m_BUF[i].header_size);
3090 return ret;
3091 }
3092 }
3093
3094 hevc->m_BUF[i].used_flag = 0;
3095 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3096 if (fb->num_planes == 1) {
3097 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3098 hevc->m_BUF[i].size = fb->m.mem[0].size;
3099 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3100 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3101 } else if (fb->num_planes == 2) {
3102 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3103 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3104 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3105 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3106 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3107 }
3108
3109 return ret;
3110}
3111
3112static int alloc_buf(struct hevc_state_s *hevc)
3113{
3114 int i;
3115 int ret = -1;
3116 int buf_size = cal_current_buf_size(hevc, NULL);
3117
3118 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3119 return ret;
3120
3121 for (i = 0; i < BUF_POOL_SIZE; i++) {
3122 if (hevc->m_BUF[i].start_adr == 0)
3123 break;
3124 }
3125 if (i < BUF_POOL_SIZE) {
3126 if (buf_size > 0) {
3127 ret = decoder_bmmu_box_alloc_buf_phy
3128 (hevc->bmmu_box,
3129 VF_BUFFER_IDX(i), buf_size,
3130 DRIVER_NAME,
3131 &hevc->m_BUF[i].start_adr);
3132 if (ret < 0) {
3133 hevc->m_BUF[i].start_adr = 0;
3134 if (i <= 8) {
3135 hevc->fatal_error |=
3136 DECODER_FATAL_ERROR_NO_MEM;
3137 hevc_print(hevc, PRINT_FLAG_ERROR,
3138 "%s[%d], size: %d, no mem fatal err\n",
3139 __func__, i, buf_size);
3140 }
3141 }
3142
3143 if (ret >= 0) {
3144 hevc->m_BUF[i].size = buf_size;
3145 hevc->m_BUF[i].used_flag = 0;
3146 ret = 0;
3147
3148 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3149 hevc_print(hevc, 0,
3150 "Buffer %d: start_adr %p size %x\n",
3151 i,
3152 (void *)hevc->m_BUF[i].start_adr,
3153 hevc->m_BUF[i].size);
3154 }
3155 /*flush the buffer make sure no cache dirty*/
3156 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3157 void *mem_start_virt;
3158 mem_start_virt =
3159 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3160 if (mem_start_virt) {
3161 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3162 codec_mm_dma_flush(mem_start_virt,
3163 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3164 } else {
3165 mem_start_virt = codec_mm_vmap(
3166 hevc->m_BUF[i].start_adr,
3167 hevc->m_BUF[i].size);
3168 if (mem_start_virt) {
3169 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3170 codec_mm_dma_flush(mem_start_virt,
3171 hevc->m_BUF[i].size,
3172 DMA_TO_DEVICE);
3173 codec_mm_unmap_phyaddr(mem_start_virt);
3174 } else {
3175 /*not virt for tvp playing,
3176 may need clear on ucode.*/
3177 pr_err("ref %s mem_start_virt failed\n", __func__);
3178 }
3179 }
3180 }
3181 }
3182 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3183 } else
3184 ret = 0;
3185 }
3186
3187 if (ret >= 0) {
3188 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3189 hevc_print(hevc, 0,
3190 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3191 i, hevc->pic_w, hevc->pic_h,
3192 buf_size,
3193 hevc->m_BUF[i].start_adr);
3194 }
3195 } else {
3196 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3197 hevc_print(hevc, 0,
3198 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3199 i, hevc->pic_w, hevc->pic_h,
3200 buf_size);
3201 }
3202 }
3203 return ret;
3204}
3205
3206static void set_buf_unused(struct hevc_state_s *hevc, int i)
3207{
3208 if (i >= 0 && i < BUF_POOL_SIZE)
3209 hevc->m_BUF[i].used_flag = 0;
3210}
3211
3212static void dealloc_unused_buf(struct hevc_state_s *hevc)
3213{
3214 int i;
3215 for (i = 0; i < BUF_POOL_SIZE; i++) {
3216 if (hevc->m_BUF[i].start_adr &&
3217 hevc->m_BUF[i].used_flag == 0) {
3218 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3219 hevc_print(hevc, 0,
3220 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3221 i, hevc->m_BUF[i].start_adr,
3222 hevc->m_BUF[i].size);
3223 }
3224 if (!hevc->is_used_v4l)
3225 decoder_bmmu_box_free_idx(
3226 hevc->bmmu_box,
3227 VF_BUFFER_IDX(i));
3228 hevc->m_BUF[i].start_adr = 0;
3229 hevc->m_BUF[i].size = 0;
3230 }
3231 }
3232}
3233
3234static void dealloc_pic_buf(struct hevc_state_s *hevc,
3235 struct PIC_s *pic)
3236{
3237 int i = pic->BUF_index;
3238 pic->BUF_index = -1;
3239 if (i >= 0 &&
3240 i < BUF_POOL_SIZE &&
3241 hevc->m_BUF[i].start_adr) {
3242 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3243 hevc_print(hevc, 0,
3244 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3245 i, hevc->m_BUF[i].start_adr,
3246 hevc->m_BUF[i].size);
3247 }
3248
3249 if (!hevc->is_used_v4l)
3250 decoder_bmmu_box_free_idx(
3251 hevc->bmmu_box,
3252 VF_BUFFER_IDX(i));
3253 hevc->m_BUF[i].used_flag = 0;
3254 hevc->m_BUF[i].start_adr = 0;
3255 hevc->m_BUF[i].size = 0;
3256 }
3257}
3258
3259static int get_work_pic_num(struct hevc_state_s *hevc)
3260{
3261 int used_buf_num = 0;
3262 int sps_pic_buf_diff = 0;
3263
3264 if (get_dynamic_buf_num_margin(hevc) > 0) {
3265 if ((!hevc->sps_num_reorder_pics_0) &&
3266 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3267 /* the range of sps_num_reorder_pics_0 is in
3268 [0, sps_max_dec_pic_buffering_minus1_0] */
3269 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3270 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3271 } else
3272 used_buf_num = hevc->sps_num_reorder_pics_0
3273 + get_dynamic_buf_num_margin(hevc);
3274
3275 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3276 - hevc->sps_num_reorder_pics_0;
3277#ifdef MULTI_INSTANCE_SUPPORT
3278 /*
3279 need one more for multi instance, as
3280 apply_ref_pic_set() has no chanch to run to
3281 to clear referenced flag in some case
3282 */
3283 if (hevc->m_ins_flag)
3284 used_buf_num++;
3285#endif
3286 } else
3287 used_buf_num = max_buf_num;
3288
3289 if (hevc->save_buffer_mode)
3290 hevc_print(hevc, 0,
3291 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3292 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3293
3294 if (sps_pic_buf_diff >= 4)
3295 {
3296 used_buf_num += 1;
3297 }
3298
3299 if (used_buf_num > MAX_BUF_NUM)
3300 used_buf_num = MAX_BUF_NUM;
3301 return used_buf_num;
3302}
3303
3304static int get_alloc_pic_count(struct hevc_state_s *hevc)
3305{
3306 int alloc_pic_count = 0;
3307 int i;
3308 struct PIC_s *pic;
3309 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3310 pic = hevc->m_PIC[i];
3311 if (pic && pic->index >= 0)
3312 alloc_pic_count++;
3313 }
3314 return alloc_pic_count;
3315}
3316
3317static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3318{
3319 int i;
3320 int dw_mode = get_double_write_mode(hevc);
3321
3322 for (i = 0; i < BUF_POOL_SIZE; i++) {
3323 if (hevc->m_BUF[i].start_adr != 0 &&
3324 hevc->m_BUF[i].used_flag == 0) {
3325 hevc->m_BUF[i].used_flag = 1;
3326 break;
3327 }
3328 }
3329
3330 if (i >= BUF_POOL_SIZE)
3331 return -1;
3332
3333 if (hevc->mmu_enable)
3334 pic->header_adr = hevc->m_BUF[i].header_addr;
3335
3336 pic->BUF_index = i;
3337 pic->POC = INVALID_POC;
3338 pic->mc_canvas_y = pic->index;
3339 pic->mc_canvas_u_v = pic->index;
3340
3341 if (dw_mode & 0x10) {
3342 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3343 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3344 pic->mc_canvas_y = (pic->index << 1);
3345 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3346
3347 pic->dw_y_adr = pic->mc_y_adr;
3348 pic->dw_u_v_adr = pic->mc_u_v_adr;
3349 } else if (dw_mode) {
3350 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3351 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3352 }
3353
3354 return 0;
3355}
3356
3357static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3358{
3359 int ret = -1;
3360 int i;
3361 /*int lcu_size_log2 = hevc->lcu_size_log2;
3362 int MV_MEM_UNIT=lcu_size_log2==
3363 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3364 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3365 5 ? 0x80 : 0x20;
3366 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3367 hevc->work_space_buf->mpred_mv.buf_size;*/
3368 unsigned int y_adr = 0;
3369 struct buf_stru_s buf_stru;
3370 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3371 int dw_mode = get_double_write_mode(hevc);
3372
3373 for (i = 0; i < BUF_POOL_SIZE; i++) {
3374 if (hevc->m_BUF[i].start_adr != 0 &&
3375 hevc->m_BUF[i].used_flag == 0 &&
3376 buf_size <= hevc->m_BUF[i].size) {
3377 hevc->m_BUF[i].used_flag = 1;
3378 break;
3379 }
3380 }
3381
3382 if (i >= BUF_POOL_SIZE)
3383 return -1;
3384
3385 if (hevc->mmu_enable) {
3386 pic->header_adr = hevc->m_BUF[i].start_adr;
3387 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3388 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3389 y_adr = hevc->m_BUF[i].start_adr +
3390 MMU_COMPRESS_8K_HEADER_SIZE;
3391 else
3392 y_adr = hevc->m_BUF[i].start_adr +
3393 MMU_COMPRESS_HEADER_SIZE;
3394 } else
3395 y_adr = hevc->m_BUF[i].start_adr;
3396
3397 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3398
3399 pic->POC = INVALID_POC;
3400 /*ensure get_pic_by_POC()
3401 not get the buffer not decoded*/
3402 pic->BUF_index = i;
3403
3404 if ((!hevc->mmu_enable) &&
3405 ((dw_mode & 0x10) == 0)
3406 ) {
3407 pic->mc_y_adr = y_adr;
3408 y_adr += (buf_stru.mc_buffer_size_h << 16);
3409 }
3410 pic->mc_canvas_y = pic->index;
3411 pic->mc_canvas_u_v = pic->index;
3412 if (dw_mode & 0x10) {
3413 pic->mc_y_adr = y_adr;
3414 pic->mc_u_v_adr = y_adr +
3415 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3416 pic->mc_canvas_y = (pic->index << 1);
3417 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3418
3419 pic->dw_y_adr = pic->mc_y_adr;
3420 pic->dw_u_v_adr = pic->mc_u_v_adr;
3421 } else if (dw_mode) {
3422 pic->dw_y_adr = y_adr;
3423 pic->dw_u_v_adr = pic->dw_y_adr +
3424 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3425 }
3426
3427 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3428 hevc_print(hevc, 0,
3429 "%s index %d BUF_index %d mc_y_adr %x\n",
3430 __func__, pic->index,
3431 pic->BUF_index, pic->mc_y_adr);
3432 if (hevc->mmu_enable &&
3433 dw_mode)
3434 hevc_print(hevc, 0,
3435 "mmu double write adr %ld\n",
3436 pic->cma_alloc_addr);
3437 }
3438 ret = 0;
3439
3440 return ret;
3441}
3442
3443static void init_pic_list(struct hevc_state_s *hevc)
3444{
3445 int i;
3446 int init_buf_num = get_work_pic_num(hevc);
3447 int dw_mode = get_double_write_mode(hevc);
3448 struct vdec_s *vdec = hw_to_vdec(hevc);
3449 /*alloc decoder buf will be delay if work on v4l. */
3450 if (!hevc->is_used_v4l) {
3451 for (i = 0; i < init_buf_num; i++) {
3452 if (alloc_buf(hevc) < 0) {
3453 if (i <= 8) {
3454 /*if alloced (i+1)>=9
3455 don't send errors.*/
3456 hevc->fatal_error |=
3457 DECODER_FATAL_ERROR_NO_MEM;
3458 }
3459 break;
3460 }
3461 }
3462 }
3463
3464 for (i = 0; i < init_buf_num; i++) {
3465 struct PIC_s *pic = hevc->m_PIC[i];
3466
3467 if (!pic) {
3468 pic = vmalloc(sizeof(struct PIC_s));
3469 if (pic == NULL) {
3470 hevc_print(hevc, 0,
3471 "%s: alloc pic %d fail!!!\n",
3472 __func__, i);
3473 break;
3474 }
3475 hevc->m_PIC[i] = pic;
3476 }
3477 memset(pic, 0, sizeof(struct PIC_s));
3478
3479 pic->index = i;
3480 pic->BUF_index = -1;
3481 pic->mv_buf_index = -1;
3482 if (vdec->parallel_dec == 1) {
3483 pic->y_canvas_index = -1;
3484 pic->uv_canvas_index = -1;
3485 }
3486
3487 pic->width = hevc->pic_w;
3488 pic->height = hevc->pic_h;
3489 pic->double_write_mode = dw_mode;
3490
3491 /*config canvas will be delay if work on v4l. */
3492 if (!hevc->is_used_v4l) {
3493 if (config_pic(hevc, pic) < 0) {
3494 if (get_dbg_flag(hevc))
3495 hevc_print(hevc, 0,
3496 "Config_pic %d fail\n", pic->index);
3497 pic->index = -1;
3498 i++;
3499 break;
3500 }
3501
3502 if (pic->double_write_mode)
3503 set_canvas(hevc, pic);
3504 }
3505 }
3506
3507 for (; i < MAX_REF_PIC_NUM; i++) {
3508 struct PIC_s *pic = hevc->m_PIC[i];
3509
3510 if (!pic) {
3511 pic = vmalloc(sizeof(struct PIC_s));
3512 if (pic == NULL) {
3513 hevc_print(hevc, 0,
3514 "%s: alloc pic %d fail!!!\n",
3515 __func__, i);
3516 break;
3517 }
3518 hevc->m_PIC[i] = pic;
3519 }
3520 memset(pic, 0, sizeof(struct PIC_s));
3521
3522 pic->index = -1;
3523 pic->BUF_index = -1;
3524 if (vdec->parallel_dec == 1) {
3525 pic->y_canvas_index = -1;
3526 pic->uv_canvas_index = -1;
3527 }
3528 }
3529
3530}
3531
3532static void uninit_pic_list(struct hevc_state_s *hevc)
3533{
3534 struct vdec_s *vdec = hw_to_vdec(hevc);
3535 int i;
3536#ifndef MV_USE_FIXED_BUF
3537 dealloc_mv_bufs(hevc);
3538#endif
3539 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3540 struct PIC_s *pic = hevc->m_PIC[i];
3541
3542 if (pic) {
3543 if (vdec->parallel_dec == 1) {
3544 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3545 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3546 }
3547 release_aux_data(hevc, pic);
3548 vfree(pic);
3549 hevc->m_PIC[i] = NULL;
3550 }
3551 }
3552}
3553
3554#ifdef LOSLESS_COMPRESS_MODE
3555static void init_decode_head_hw(struct hevc_state_s *hevc)
3556{
3557
3558 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3559 unsigned int data32;
3560
3561 int losless_comp_header_size =
3562 compute_losless_comp_header_size(hevc->pic_w,
3563 hevc->pic_h);
3564 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3565 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3566
3567 hevc->losless_comp_body_size = losless_comp_body_size;
3568
3569
3570 if (hevc->mmu_enable) {
3571 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3572 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3573 } else {
3574 if (hevc->mem_saving_mode == 1)
3575 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3576 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3577 else
3578 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3579 ((workaround_enable & 2) ? 1 : 0));
3580 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3581 /*
3582 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3583 * //8-bit mode
3584 */
3585 }
3586 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3587 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3588 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3589
3590 if (hevc->mmu_enable) {
3591 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3592 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3593 buf_spec->mmu_vbh.buf_start +
3594 buf_spec->mmu_vbh.buf_size/2);
3595 data32 = READ_VREG(HEVC_SAO_CTRL9);
3596 data32 |= 0x1;
3597 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3598
3599 /* use HEVC_CM_HEADER_START_ADDR */
3600 data32 = READ_VREG(HEVC_SAO_CTRL5);
3601 data32 |= (1<<10);
3602 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3603 }
3604
3605 if (!hevc->m_ins_flag)
3606 hevc_print(hevc, 0,
3607 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3608 __func__, hevc->pic_w, hevc->pic_h,
3609 losless_comp_body_size, losless_comp_header_size);
3610
3611}
3612#endif
3613#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3614
3615static void init_pic_list_hw(struct hevc_state_s *hevc)
3616{
3617 int i;
3618 int cur_pic_num = MAX_REF_PIC_NUM;
3619 int dw_mode = get_double_write_mode(hevc);
3620 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3621 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3622 (0x1 << 1) | (0x1 << 2));
3623 else
3624 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3625
3626 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3627 if (hevc->m_PIC[i] == NULL ||
3628 hevc->m_PIC[i]->index == -1) {
3629 cur_pic_num = i;
3630 break;
3631 }
3632 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3633 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3634 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3635 hevc->m_PIC[i]->header_adr>>5);
3636 else
3637 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3638 hevc->m_PIC[i]->mc_y_adr >> 5);
3639 } else
3640 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3641 hevc->m_PIC[i]->mc_y_adr |
3642 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3643 if (dw_mode & 0x10) {
3644 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3645 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3646 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3647 }
3648 else
3649 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3650 hevc->m_PIC[i]->mc_u_v_adr |
3651 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3652 | 0x1);
3653 }
3654 }
3655 if (cur_pic_num == 0)
3656 return;
3657 for (; i < MAX_REF_PIC_NUM; i++) {
3658 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3659 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3661 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3662 else
3663 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3664 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3665#ifndef LOSLESS_COMPRESS_MODE
3666 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3667 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3668#endif
3669 } else {
3670 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3671 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3672 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3673 | 0x1);
3674#ifndef LOSLESS_COMPRESS_MODE
3675 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3676 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3677 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3678 | 0x1);
3679#endif
3680 }
3681 }
3682
3683 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3684
3685 /* Zero out canvas registers in IPP -- avoid simulation X */
3686 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3687 (0 << 8) | (0 << 1) | 1);
3688 for (i = 0; i < 32; i++)
3689 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3690
3691#ifdef LOSLESS_COMPRESS_MODE
3692 if ((dw_mode & 0x10) == 0)
3693 init_decode_head_hw(hevc);
3694#endif
3695
3696}
3697
3698
3699static void dump_pic_list(struct hevc_state_s *hevc)
3700{
3701 int i;
3702 struct PIC_s *pic;
3703
3704 hevc_print(hevc, 0,
3705 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3706 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3707 pic = hevc->m_PIC[i];
3708 if (pic == NULL || pic->index == -1)
3709 continue;
3710 hevc_print_cont(hevc, 0,
3711 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3712 pic->index, pic->BUF_index,
3713#ifndef MV_USE_FIXED_BUF
3714 pic->mv_buf_index,
3715#else
3716 -1,
3717#endif
3718 pic->decode_idx, pic->POC, pic->referenced);
3719 hevc_print_cont(hevc, 0,
3720 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3721 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3722 pic->width, pic->height);
3723 hevc_print_cont(hevc, 0,
3724 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3725 pic->output_ready, pic->mpred_mv_wr_start_addr,
3726 pic->vf_ref);
3727 }
3728}
3729
3730static void clear_referenced_flag(struct hevc_state_s *hevc)
3731{
3732 int i;
3733 struct PIC_s *pic;
3734 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3735 pic = hevc->m_PIC[i];
3736 if (pic == NULL || pic->index == -1)
3737 continue;
3738 if (pic->referenced) {
3739 pic->referenced = 0;
3740 put_mv_buf(hevc, pic);
3741 }
3742 }
3743}
3744
3745static void clear_poc_flag(struct hevc_state_s *hevc)
3746{
3747 int i;
3748 struct PIC_s *pic;
3749 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3750 pic = hevc->m_PIC[i];
3751 if (pic == NULL || pic->index == -1)
3752 continue;
3753 pic->POC = INVALID_POC;
3754 }
3755}
3756
3757static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3758 unsigned char flush_flag)
3759{
3760 int num_pic_not_yet_display = 0;
3761 int i;
3762 struct PIC_s *pic;
3763 struct PIC_s *pic_display = NULL;
3764 struct vdec_s *vdec = hw_to_vdec(hevc);
3765
3766 if (hevc->i_only & 0x4) {
3767 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3768 pic = hevc->m_PIC[i];
3769 if (pic == NULL ||
3770 (pic->index == -1) ||
3771 (pic->BUF_index == -1) ||
3772 (pic->POC == INVALID_POC))
3773 continue;
3774 if (pic->output_mark) {
3775 if (pic_display) {
3776 if (pic->decode_idx <
3777 pic_display->decode_idx)
3778 pic_display = pic;
3779
3780 } else
3781 pic_display = pic;
3782
3783 }
3784 }
3785 if (pic_display) {
3786 pic_display->output_mark = 0;
3787 pic_display->recon_mark = 0;
3788 pic_display->output_ready = 1;
3789 pic_display->referenced = 0;
3790 put_mv_buf(hevc, pic_display);
3791 }
3792 } else {
3793 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3794 pic = hevc->m_PIC[i];
3795 if (pic == NULL ||
3796 (pic->index == -1) ||
3797 (pic->BUF_index == -1) ||
3798 (pic->POC == INVALID_POC))
3799 continue;
3800 if (pic->output_mark)
3801 num_pic_not_yet_display++;
3802 if (pic->slice_type == 2 &&
3803 hevc->vf_pre_count == 0 &&
3804 fast_output_enable & 0x1) {
3805 /*fast output for first I picture*/
3806 pic->num_reorder_pic = 0;
3807 if (vdec->master || vdec->slave)
3808 pic_display = pic;
3809 hevc_print(hevc, 0, "VH265: output first frame\n");
3810 }
3811 }
3812
3813 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3814 pic = hevc->m_PIC[i];
3815 if (pic == NULL ||
3816 (pic->index == -1) ||
3817 (pic->BUF_index == -1) ||
3818 (pic->POC == INVALID_POC))
3819 continue;
3820 if (pic->output_mark) {
3821 if (pic_display) {
3822 if (pic->POC < pic_display->POC)
3823 pic_display = pic;
3824 else if ((pic->POC == pic_display->POC)
3825 && (pic->decode_idx <
3826 pic_display->
3827 decode_idx))
3828 pic_display
3829 = pic;
3830 } else
3831 pic_display = pic;
3832 }
3833 }
3834 if (pic_display) {
3835 if ((num_pic_not_yet_display >
3836 pic_display->num_reorder_pic)
3837 || flush_flag) {
3838 pic_display->output_mark = 0;
3839 pic_display->recon_mark = 0;
3840 pic_display->output_ready = 1;
3841 } else if (num_pic_not_yet_display >=
3842 (MAX_REF_PIC_NUM - 1)) {
3843 pic_display->output_mark = 0;
3844 pic_display->recon_mark = 0;
3845 pic_display->output_ready = 1;
3846 hevc_print(hevc, 0,
3847 "Warning, num_reorder_pic %d is byeond buf num\n",
3848 pic_display->num_reorder_pic);
3849 } else
3850 pic_display = NULL;
3851 }
3852 }
3853
3854 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3855 pic_display = NULL;
3856 hevc->first_pic_flag = 0;
3857 }
3858 return pic_display;
3859}
3860
3861static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3862{
3863 int i;
3864 struct PIC_s *pic;
3865
3866 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3867 hevc_print(hevc, 0,
3868 "config_mc_buffer entered .....\n");
3869 if (cur_pic->slice_type != 2) { /* P and B pic */
3870 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3871 (0 << 8) | (0 << 1) | 1);
3872 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3873 pic =
3874 get_ref_pic_by_POC(hevc,
3875 cur_pic->
3876 m_aiRefPOCList0[cur_pic->
3877 slice_idx][i]);
3878 if (pic) {
3879 if ((pic->width != hevc->pic_w) ||
3880 (pic->height != hevc->pic_h)) {
3881 hevc_print(hevc, 0,
3882 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3883 __func__, pic->POC,
3884 pic->width, pic->height);
3885 cur_pic->error_mark = 1;
3886 }
3887 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3888 cur_pic->error_mark = 1;
3889 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3890 (pic->mc_canvas_u_v << 16)
3891 | (pic->mc_canvas_u_v
3892 << 8) |
3893 pic->mc_canvas_y);
3894 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3895 hevc_print_cont(hevc, 0,
3896 "refid %x mc_canvas_u_v %x",
3897 i, pic->mc_canvas_u_v);
3898 hevc_print_cont(hevc, 0,
3899 " mc_canvas_y %x\n",
3900 pic->mc_canvas_y);
3901 }
3902 } else
3903 cur_pic->error_mark = 1;
3904
3905 if (pic == NULL || pic->error_mark) {
3906 hevc_print(hevc, 0,
3907 "Error %s, %dth poc (%d) %s",
3908 __func__, i,
3909 cur_pic->m_aiRefPOCList0[cur_pic->
3910 slice_idx][i],
3911 pic ? "has error" :
3912 "not in list0");
3913 }
3914 }
3915 }
3916 if (cur_pic->slice_type == 0) { /* B pic */
3917 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3918 hevc_print(hevc, 0,
3919 "config_mc_buffer RefNum_L1\n");
3920 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3921 (16 << 8) | (0 << 1) | 1);
3922
3923 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3924 pic =
3925 get_ref_pic_by_POC(hevc,
3926 cur_pic->
3927 m_aiRefPOCList1[cur_pic->
3928 slice_idx][i]);
3929 if (pic) {
3930 if ((pic->width != hevc->pic_w) ||
3931 (pic->height != hevc->pic_h)) {
3932 hevc_print(hevc, 0,
3933 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3934 __func__, pic->POC,
3935 pic->width, pic->height);
3936 cur_pic->error_mark = 1;
3937 }
3938
3939 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3940 cur_pic->error_mark = 1;
3941 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3942 (pic->mc_canvas_u_v << 16)
3943 | (pic->mc_canvas_u_v
3944 << 8) |
3945 pic->mc_canvas_y);
3946 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3947 hevc_print_cont(hevc, 0,
3948 "refid %x mc_canvas_u_v %x",
3949 i, pic->mc_canvas_u_v);
3950 hevc_print_cont(hevc, 0,
3951 " mc_canvas_y %x\n",
3952 pic->mc_canvas_y);
3953 }
3954 } else
3955 cur_pic->error_mark = 1;
3956
3957 if (pic == NULL || pic->error_mark) {
3958 hevc_print(hevc, 0,
3959 "Error %s, %dth poc (%d) %s",
3960 __func__, i,
3961 cur_pic->m_aiRefPOCList1[cur_pic->
3962 slice_idx][i],
3963 pic ? "has error" :
3964 "not in list1");
3965 }
3966 }
3967 }
3968 return 0;
3969}
3970
3971static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3972 union param_u *params)
3973{
3974 int ii, i;
3975 int poc_tmp;
3976 struct PIC_s *pic;
3977 unsigned char is_referenced;
3978 /* hevc_print(hevc, 0,
3979 "%s cur_poc %d\n", __func__, cur_poc); */
3980 if (pic_list_debug & 0x2) {
3981 pr_err("cur poc %d\n", cur_poc);
3982 }
3983 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3984 pic = hevc->m_PIC[ii];
3985 if (pic == NULL ||
3986 pic->index == -1 ||
3987 pic->BUF_index == -1
3988 )
3989 continue;
3990
3991 if ((pic->referenced == 0 || pic->POC == cur_poc))
3992 continue;
3993 is_referenced = 0;
3994 for (i = 0; i < 16; i++) {
3995 int delt;
3996
3997 if (params->p.CUR_RPS[i] & 0x8000)
3998 break;
3999 delt =
4000 params->p.CUR_RPS[i] &
4001 ((1 << (RPS_USED_BIT - 1)) - 1);
4002 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4003 poc_tmp =
4004 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4005 delt);
4006 } else
4007 poc_tmp = cur_poc + delt;
4008 if (poc_tmp == pic->POC) {
4009 is_referenced = 1;
4010 /* hevc_print(hevc, 0, "i is %d\n", i); */
4011 break;
4012 }
4013 }
4014 if (is_referenced == 0) {
4015 pic->referenced = 0;
4016 put_mv_buf(hevc, pic);
4017 /* hevc_print(hevc, 0,
4018 "set poc %d reference to 0\n", pic->POC); */
4019 if (pic_list_debug & 0x2) {
4020 pr_err("set poc %d reference to 0\n", pic->POC);
4021 }
4022 }
4023 }
4024
4025}
4026
4027static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4028{
4029 struct PIC_s *pic = hevc->cur_pic;
4030 int i, rIdx;
4031 int num_neg = 0;
4032 int num_pos = 0;
4033 int total_num;
4034 int num_ref_idx_l0_active =
4035 (params->p.num_ref_idx_l0_active >
4036 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4037 params->p.num_ref_idx_l0_active;
4038 int num_ref_idx_l1_active =
4039 (params->p.num_ref_idx_l1_active >
4040 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4041 params->p.num_ref_idx_l1_active;
4042
4043 int RefPicSetStCurr0[16];
4044 int RefPicSetStCurr1[16];
4045
4046 for (i = 0; i < 16; i++) {
4047 RefPicSetStCurr0[i] = 0;
4048 RefPicSetStCurr1[i] = 0;
4049 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4050 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4051 }
4052 for (i = 0; i < 16; i++) {
4053 if (params->p.CUR_RPS[i] & 0x8000)
4054 break;
4055 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4056 int delt =
4057 params->p.CUR_RPS[i] &
4058 ((1 << (RPS_USED_BIT - 1)) - 1);
4059
4060 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4061 RefPicSetStCurr0[num_neg] =
4062 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4063 delt);
4064 /* hevc_print(hevc, 0,
4065 * "RefPicSetStCurr0 %x %x %x\n",
4066 * RefPicSetStCurr0[num_neg], pic->POC,
4067 * (0x800-(params[i]&0x7ff)));
4068 */
4069 num_neg++;
4070 } else {
4071 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4072 /* hevc_print(hevc, 0,
4073 * "RefPicSetStCurr1 %d\n",
4074 * RefPicSetStCurr1[num_pos]);
4075 */
4076 num_pos++;
4077 }
4078 }
4079 }
4080 total_num = num_neg + num_pos;
4081 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4082 hevc_print(hevc, 0,
4083 "%s: curpoc %d slice_type %d, total %d ",
4084 __func__, pic->POC, params->p.slice_type, total_num);
4085 hevc_print_cont(hevc, 0,
4086 "num_neg %d num_list0 %d num_list1 %d\n",
4087 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4088 }
4089
4090 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4091 hevc_print(hevc, 0,
4092 "HEVC Stream buf start ");
4093 hevc_print_cont(hevc, 0,
4094 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4095 READ_VREG(HEVC_STREAM_START_ADDR),
4096 READ_VREG(HEVC_STREAM_END_ADDR),
4097 READ_VREG(HEVC_STREAM_WR_PTR),
4098 READ_VREG(HEVC_STREAM_RD_PTR),
4099 READ_VREG(HEVC_STREAM_LEVEL),
4100 READ_VREG(HEVC_STREAM_FIFO_CTL),
4101 READ_VREG(HEVC_PARSER_INT_CONTROL));
4102 }
4103
4104 if (total_num > 0) {
4105 if (params->p.modification_flag & 0x1) {
4106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4107 hevc_print(hevc, 0, "ref0 POC (modification):");
4108 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4109 int cIdx = params->p.modification_list[rIdx];
4110
4111 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4112 cIdx >=
4113 num_neg ? RefPicSetStCurr1[cIdx -
4114 num_neg] :
4115 RefPicSetStCurr0[cIdx];
4116 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4117 hevc_print_cont(hevc, 0, "%d ",
4118 pic->m_aiRefPOCList0[pic->
4119 slice_idx]
4120 [rIdx]);
4121 }
4122 }
4123 } else {
4124 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4125 hevc_print(hevc, 0, "ref0 POC:");
4126 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4127 int cIdx = rIdx % total_num;
4128
4129 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4130 cIdx >=
4131 num_neg ? RefPicSetStCurr1[cIdx -
4132 num_neg] :
4133 RefPicSetStCurr0[cIdx];
4134 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4135 hevc_print_cont(hevc, 0, "%d ",
4136 pic->m_aiRefPOCList0[pic->
4137 slice_idx]
4138 [rIdx]);
4139 }
4140 }
4141 }
4142 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4143 hevc_print_cont(hevc, 0, "\n");
4144 if (params->p.slice_type == B_SLICE) {
4145 if (params->p.modification_flag & 0x2) {
4146 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4147 hevc_print(hevc, 0,
4148 "ref1 POC (modification):");
4149 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4150 rIdx++) {
4151 int cIdx;
4152
4153 if (params->p.modification_flag & 0x1) {
4154 cIdx =
4155 params->p.
4156 modification_list
4157 [num_ref_idx_l0_active +
4158 rIdx];
4159 } else {
4160 cIdx =
4161 params->p.
4162 modification_list[rIdx];
4163 }
4164 pic->m_aiRefPOCList1[pic->
4165 slice_idx][rIdx] =
4166 cIdx >=
4167 num_pos ?
4168 RefPicSetStCurr0[cIdx - num_pos]
4169 : RefPicSetStCurr1[cIdx];
4170 if (get_dbg_flag(hevc) &
4171 H265_DEBUG_BUFMGR) {
4172 hevc_print_cont(hevc, 0, "%d ",
4173 pic->
4174 m_aiRefPOCList1[pic->
4175 slice_idx]
4176 [rIdx]);
4177 }
4178 }
4179 } else {
4180 if (get_dbg_flag(hevc) &
4181 H265_DEBUG_BUFMGR)
4182 hevc_print(hevc, 0, "ref1 POC:");
4183 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4184 rIdx++) {
4185 int cIdx = rIdx % total_num;
4186
4187 pic->m_aiRefPOCList1[pic->
4188 slice_idx][rIdx] =
4189 cIdx >=
4190 num_pos ?
4191 RefPicSetStCurr0[cIdx -
4192 num_pos]
4193 : RefPicSetStCurr1[cIdx];
4194 if (get_dbg_flag(hevc) &
4195 H265_DEBUG_BUFMGR) {
4196 hevc_print_cont(hevc, 0, "%d ",
4197 pic->
4198 m_aiRefPOCList1[pic->
4199 slice_idx]
4200 [rIdx]);
4201 }
4202 }
4203 }
4204 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4205 hevc_print_cont(hevc, 0, "\n");
4206 }
4207 }
4208 /*set m_PIC */
4209 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4210 (params->p.slice_type == P_SLICE) ? 1 :
4211 (params->p.slice_type == B_SLICE) ? 0 : 3;
4212 pic->RefNum_L0 = num_ref_idx_l0_active;
4213 pic->RefNum_L1 = num_ref_idx_l1_active;
4214}
4215
4216static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4217 int pic_height_cu, int sao_mem_unit,
4218 union param_u *params)
4219{
4220 int i, j;
4221 int start_cu_x, start_cu_y;
4222 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4223 int sao_abv_size = sao_mem_unit * pic_width_cu;
4224#ifdef DETREFILL_ENABLE
4225 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4226 int tmpRefillLcuSize = 1 <<
4227 (params->p.log2_min_coding_block_size_minus3 +
4228 3 + params->p.log2_diff_max_min_coding_block_size);
4229 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4230 "%x, %x, %x, %x\n",
4231 params->p.slice_segment_address,
4232 params->p.bit_depth,
4233 params->p.tiles_enabled_flag,
4234 tmpRefillLcuSize);
4235 if (params->p.slice_segment_address == 0 &&
4236 params->p.bit_depth != 0 &&
4237 (params->p.tiles_enabled_flag & 1) &&
4238 tmpRefillLcuSize == 64)
4239 hevc->delrefill_check = 1;
4240 else
4241 hevc->delrefill_check = 0;
4242 }
4243#endif
4244
4245 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4246 if (params->p.tiles_enabled_flag & 1) {
4247 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4248 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4249
4250 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4251 || hevc->num_tile_row <= 0) {
4252 hevc->num_tile_row = 1;
4253 hevc_print(hevc, 0,
4254 "%s: num_tile_rows_minus1 (%d) error!!\n",
4255 __func__, params->p.num_tile_rows_minus1);
4256 }
4257 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4258 || hevc->num_tile_col <= 0) {
4259 hevc->num_tile_col = 1;
4260 hevc_print(hevc, 0,
4261 "%s: num_tile_columns_minus1 (%d) error!!\n",
4262 __func__, params->p.num_tile_columns_minus1);
4263 }
4264 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4265 hevc_print(hevc, 0,
4266 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4267 __func__, pic_width_cu, pic_height_cu);
4268 hevc_print_cont(hevc, 0,
4269 "num_tile_col %d num_tile_row %d:\n",
4270 hevc->num_tile_col, hevc->num_tile_row);
4271 }
4272
4273 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4274 int w = pic_width_cu / hevc->num_tile_col;
4275 int h = pic_height_cu / hevc->num_tile_row;
4276
4277 start_cu_y = 0;
4278 for (i = 0; i < hevc->num_tile_row; i++) {
4279 start_cu_x = 0;
4280 for (j = 0; j < hevc->num_tile_col; j++) {
4281 if (j == (hevc->num_tile_col - 1)) {
4282 hevc->m_tile[i][j].width =
4283 pic_width_cu -
4284 start_cu_x;
4285 } else
4286 hevc->m_tile[i][j].width = w;
4287 if (i == (hevc->num_tile_row - 1)) {
4288 hevc->m_tile[i][j].height =
4289 pic_height_cu -
4290 start_cu_y;
4291 } else
4292 hevc->m_tile[i][j].height = h;
4293 hevc->m_tile[i][j].start_cu_x
4294 = start_cu_x;
4295 hevc->m_tile[i][j].start_cu_y
4296 = start_cu_y;
4297 hevc->m_tile[i][j].sao_vb_start_addr =
4298 hevc->work_space_buf->sao_vb.
4299 buf_start + j * sao_vb_size;
4300 hevc->m_tile[i][j].sao_abv_start_addr =
4301 hevc->work_space_buf->sao_abv.
4302 buf_start + i * sao_abv_size;
4303 if (get_dbg_flag(hevc) &
4304 H265_DEBUG_BUFMGR) {
4305 hevc_print_cont(hevc, 0,
4306 "{y=%d, x=%d w %d h %d ",
4307 i, j, hevc->m_tile[i][j].width,
4308 hevc->m_tile[i][j].height);
4309 hevc_print_cont(hevc, 0,
4310 "start_x %d start_y %d ",
4311 hevc->m_tile[i][j].start_cu_x,
4312 hevc->m_tile[i][j].start_cu_y);
4313 hevc_print_cont(hevc, 0,
4314 "sao_vb_start 0x%x ",
4315 hevc->m_tile[i][j].
4316 sao_vb_start_addr);
4317 hevc_print_cont(hevc, 0,
4318 "sao_abv_start 0x%x}\n",
4319 hevc->m_tile[i][j].
4320 sao_abv_start_addr);
4321 }
4322 start_cu_x += hevc->m_tile[i][j].width;
4323
4324 }
4325 start_cu_y += hevc->m_tile[i][0].height;
4326 }
4327 } else {
4328 start_cu_y = 0;
4329 for (i = 0; i < hevc->num_tile_row; i++) {
4330 start_cu_x = 0;
4331 for (j = 0; j < hevc->num_tile_col; j++) {
4332 if (j == (hevc->num_tile_col - 1)) {
4333 hevc->m_tile[i][j].width =
4334 pic_width_cu -
4335 start_cu_x;
4336 } else {
4337 hevc->m_tile[i][j].width =
4338 params->p.tile_width[j];
4339 }
4340 if (i == (hevc->num_tile_row - 1)) {
4341 hevc->m_tile[i][j].height =
4342 pic_height_cu -
4343 start_cu_y;
4344 } else {
4345 hevc->m_tile[i][j].height =
4346 params->
4347 p.tile_height[i];
4348 }
4349 hevc->m_tile[i][j].start_cu_x
4350 = start_cu_x;
4351 hevc->m_tile[i][j].start_cu_y
4352 = start_cu_y;
4353 hevc->m_tile[i][j].sao_vb_start_addr =
4354 hevc->work_space_buf->sao_vb.
4355 buf_start + j * sao_vb_size;
4356 hevc->m_tile[i][j].sao_abv_start_addr =
4357 hevc->work_space_buf->sao_abv.
4358 buf_start + i * sao_abv_size;
4359 if (get_dbg_flag(hevc) &
4360 H265_DEBUG_BUFMGR) {
4361 hevc_print_cont(hevc, 0,
4362 "{y=%d, x=%d w %d h %d ",
4363 i, j, hevc->m_tile[i][j].width,
4364 hevc->m_tile[i][j].height);
4365 hevc_print_cont(hevc, 0,
4366 "start_x %d start_y %d ",
4367 hevc->m_tile[i][j].start_cu_x,
4368 hevc->m_tile[i][j].start_cu_y);
4369 hevc_print_cont(hevc, 0,
4370 "sao_vb_start 0x%x ",
4371 hevc->m_tile[i][j].
4372 sao_vb_start_addr);
4373 hevc_print_cont(hevc, 0,
4374 "sao_abv_start 0x%x}\n",
4375 hevc->m_tile[i][j].
4376 sao_abv_start_addr);
4377
4378 }
4379 start_cu_x += hevc->m_tile[i][j].width;
4380 }
4381 start_cu_y += hevc->m_tile[i][0].height;
4382 }
4383 }
4384 } else {
4385 hevc->num_tile_col = 1;
4386 hevc->num_tile_row = 1;
4387 hevc->m_tile[0][0].width = pic_width_cu;
4388 hevc->m_tile[0][0].height = pic_height_cu;
4389 hevc->m_tile[0][0].start_cu_x = 0;
4390 hevc->m_tile[0][0].start_cu_y = 0;
4391 hevc->m_tile[0][0].sao_vb_start_addr =
4392 hevc->work_space_buf->sao_vb.buf_start;
4393 hevc->m_tile[0][0].sao_abv_start_addr =
4394 hevc->work_space_buf->sao_abv.buf_start;
4395 }
4396}
4397
4398static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4399 int pic_width_lcu)
4400{
4401 int cu_x;
4402 int cu_y;
4403 int tile_x = 0;
4404 int tile_y = 0;
4405 int i;
4406
4407 if (pic_width_lcu == 0) {
4408 if (get_dbg_flag(hevc)) {
4409 hevc_print(hevc, 0,
4410 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4411 __func__, hevc->pic_w, hevc->pic_h);
4412 }
4413 return -1;
4414 }
4415 cu_x = cu_adr % pic_width_lcu;
4416 cu_y = cu_adr / pic_width_lcu;
4417 if (hevc->tile_enabled) {
4418 for (i = 0; i < hevc->num_tile_col; i++) {
4419 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4420 tile_x = i;
4421 else
4422 break;
4423 }
4424 for (i = 0; i < hevc->num_tile_row; i++) {
4425 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4426 tile_y = i;
4427 else
4428 break;
4429 }
4430 }
4431 return (tile_x) | (tile_y << 8);
4432}
4433
4434static void print_scratch_error(int error_num)
4435{
4436#if 0
4437 if (get_dbg_flag(hevc)) {
4438 hevc_print(hevc, 0,
4439 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4440 error_num);
4441 }
4442#endif
4443}
4444
4445static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4446{
4447 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4448
4449 if (get_dbg_flag(hevc))
4450 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4451 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4452 __func__,
4453 buf_spec->ipp.buf_start,
4454 buf_spec->start_adr,
4455 buf_spec->short_term_rps.buf_start,
4456 buf_spec->vps.buf_start,
4457 buf_spec->sps.buf_start,
4458 buf_spec->pps.buf_start,
4459 buf_spec->sao_up.buf_start,
4460 buf_spec->swap_buf.buf_start,
4461 buf_spec->swap_buf2.buf_start,
4462 buf_spec->scalelut.buf_start,
4463 buf_spec->dblk_para.buf_start,
4464 buf_spec->dblk_data.buf_start,
4465 buf_spec->dblk_data2.buf_start);
4466 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4467 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4468 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4469 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4470 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4471 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4472 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4473 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4474 if (hevc->mmu_enable) {
4475 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4476 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4477 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4478 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4479 } else
4480 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4481 } /*else
4482 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4483 buf_spec->swap_buf.buf_start);
4484 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4485 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4486 /* cfg_p_addr */
4487 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4488 /* cfg_d_addr */
4489 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4490
4491 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4492
4493 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4494}
4495
4496static void parser_cmd_write(void)
4497{
4498 u32 i;
4499 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4500 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4501 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4502 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4503 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4504 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4505 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4506 0x7C00
4507 };
4508 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4509 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4510}
4511
4512static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4513 int decode_pic_begin, int decode_pic_num)
4514{
4515 unsigned int data32;
4516 int i;
4517#if 0
4518 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4519 /* Set MCR fetch priorities*/
4520 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4521 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4522 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4523 }
4524#endif
4525#if 1
4526 /* m8baby test1902 */
4527 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4528 hevc_print(hevc, 0,
4529 "%s\n", __func__);
4530 data32 = READ_VREG(HEVC_PARSER_VERSION);
4531 if (data32 != 0x00010001) {
4532 print_scratch_error(25);
4533 return;
4534 }
4535 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4536 data32 = READ_VREG(HEVC_PARSER_VERSION);
4537 if (data32 != 0x5a5a55aa) {
4538 print_scratch_error(26);
4539 return;
4540 }
4541#if 0
4542 /* test Parser Reset */
4543 /* reset iqit to start mem init again */
4544 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4545 (1 << 3) /* reset_whole parser */
4546 );
4547 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4548 data32 = READ_VREG(HEVC_PARSER_VERSION);
4549 if (data32 != 0x00010001)
4550 hevc_print(hevc, 0,
4551 "Test Parser Fatal Error\n");
4552#endif
4553 /* reset iqit to start mem init again */
4554 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4555 );
4556 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4557 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4558
4559#endif
4560 if (!hevc->m_ins_flag) {
4561 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4562 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4563 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4564 data32 |= (0xf << 25); /*arwlen_axi_max*/
4565 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4566 }
4567 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4568 if (data32 != 0x00000100) {
4569 print_scratch_error(29);
4570 return;
4571 }
4572 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4573 if (data32 != 0x00000300) {
4574 print_scratch_error(30);
4575 return;
4576 }
4577 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4578 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4579 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4580 if (data32 != 0x12345678) {
4581 print_scratch_error(31);
4582 return;
4583 }
4584 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4585 if (data32 != 0x9abcdef0) {
4586 print_scratch_error(32);
4587 return;
4588 }
4589 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4590 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4591
4592 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4593 data32 &= 0x03ffffff;
4594 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4595 | /* stream_buffer_empty_int_amrisc_enable */
4596 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4597 (1 << 7) | /* dec_done_int_cpu_enable */
4598 (1 << 4) | /* startcode_found_int_cpu_enable */
4599 (0 << 3) | /* startcode_found_int_amrisc_enable */
4600 (1 << 0) /* parser_int_enable */
4601 ;
4602 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4603
4604 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4605 data32 = data32 | (1 << 1) | /* emulation_check_on */
4606 (1 << 0) /* startcode_check_on */
4607 ;
4608 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4609
4610 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4611 (2 << 4) | /* emulate_code_length_sub_1 */
4612 (2 << 1) | /* start_code_length_sub_1 */
4613 (1 << 0) /* stream_shift_enable */
4614 );
4615
4616 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4617 );
4618 /* hevc_parser_core_clk_en */
4619 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4620 );
4621
4622 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4623
4624 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4625 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4626 for (i = 0; i < 1024; i++)
4627 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4628
4629#ifdef ENABLE_SWAP_TEST
4630 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4631#endif
4632
4633 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4634 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4635 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4636 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4637 /* Send parser_cmd */
4638 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4639
4640 parser_cmd_write();
4641
4642 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4643 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4644 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4645
4646 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4647 /* (1 << 8) | // sao_sw_pred_enable */
4648 (1 << 5) | /* parser_sao_if_en */
4649 (1 << 2) | /* parser_mpred_if_en */
4650 (1 << 0) /* parser_scaler_if_en */
4651 );
4652
4653 /* Changed to Start MPRED in microcode */
4654 /*
4655 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4656 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4657 * (1<<31)
4658 * );
4659 */
4660
4661 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4662 (1 << 0) /* software reset ipp and mpp */
4663 );
4664 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4665 (0 << 0) /* software reset ipp and mpp */
4666 );
4667
4668 if (get_double_write_mode(hevc) & 0x10)
4669 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4670 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4671 );
4672
4673}
4674
4675static void decoder_hw_reset(void)
4676{
4677 int i;
4678 unsigned int data32;
4679 /* reset iqit to start mem init again */
4680 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4681 );
4682 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4683 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4684
4685 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4686 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4687 ;
4688 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4689
4690 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4691 if (data32 != 0x00000100) {
4692 print_scratch_error(29);
4693 return;
4694 }
4695 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4696 if (data32 != 0x00000300) {
4697 print_scratch_error(30);
4698 return;
4699 }
4700 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4701 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4702 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4703 if (data32 != 0x12345678) {
4704 print_scratch_error(31);
4705 return;
4706 }
4707 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4708 if (data32 != 0x9abcdef0) {
4709 print_scratch_error(32);
4710 return;
4711 }
4712 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4713 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4714
4715 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4716 data32 &= 0x03ffffff;
4717 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4718 | /* stream_buffer_empty_int_amrisc_enable */
4719 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4720 (1 << 7) | /* dec_done_int_cpu_enable */
4721 (1 << 4) | /* startcode_found_int_cpu_enable */
4722 (0 << 3) | /* startcode_found_int_amrisc_enable */
4723 (1 << 0) /* parser_int_enable */
4724 ;
4725 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4726
4727 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4728 data32 = data32 | (1 << 1) | /* emulation_check_on */
4729 (1 << 0) /* startcode_check_on */
4730 ;
4731 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4732
4733 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4734 (2 << 4) | /* emulate_code_length_sub_1 */
4735 (2 << 1) | /* start_code_length_sub_1 */
4736 (1 << 0) /* stream_shift_enable */
4737 );
4738
4739 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4740 );
4741 /* hevc_parser_core_clk_en */
4742 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4743 );
4744
4745 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4746 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4747 for (i = 0; i < 1024; i++)
4748 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4749
4750 /* Send parser_cmd */
4751 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4752
4753 parser_cmd_write();
4754
4755 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4756 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4757 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4758
4759 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4760 /* (1 << 8) | // sao_sw_pred_enable */
4761 (1 << 5) | /* parser_sao_if_en */
4762 (1 << 2) | /* parser_mpred_if_en */
4763 (1 << 0) /* parser_scaler_if_en */
4764 );
4765
4766 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4767 (1 << 0) /* software reset ipp and mpp */
4768 );
4769 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4770 (0 << 0) /* software reset ipp and mpp */
4771 );
4772}
4773
4774#ifdef CONFIG_HEVC_CLK_FORCED_ON
4775static void config_hevc_clk_forced_on(void)
4776{
4777 unsigned int rdata32;
4778 /* IQIT */
4779 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4780 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4781
4782 /* DBLK */
4783 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4784 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4785
4786 /* SAO */
4787 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4788 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4789
4790 /* MPRED */
4791 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4792 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4793
4794 /* PARSER */
4795 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4796 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4797 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4798 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4799 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4800 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4801 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4802 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4803 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4804 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4805 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4806 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4807 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4808
4809 /* IPP */
4810 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4811 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4812
4813 /* MCRCC */
4814 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4815 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4816}
4817#endif
4818
4819#ifdef MCRCC_ENABLE
4820static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4821{
4822 unsigned int rdata32;
4823 unsigned int rdata32_2;
4824 int l0_cnt = 0;
4825 int l1_cnt = 0x7fff;
4826
4827 if (get_double_write_mode(hevc) & 0x10) {
4828 l0_cnt = hevc->cur_pic->RefNum_L0;
4829 l1_cnt = hevc->cur_pic->RefNum_L1;
4830 }
4831
4832 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4833
4834 if (slice_type == 2) { /* I-PIC */
4835 /* remove reset -- disables clock */
4836 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4837 return;
4838 }
4839
4840 if (slice_type == 0) { /* B-PIC */
4841 /* Programme canvas0 */
4842 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4843 (0 << 8) | (0 << 1) | 0);
4844 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4845 rdata32 = rdata32 & 0xffff;
4846 rdata32 = rdata32 | (rdata32 << 16);
4847 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4848
4849 /* Programme canvas1 */
4850 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4851 (16 << 8) | (1 << 1) | 0);
4852 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4853 rdata32_2 = rdata32_2 & 0xffff;
4854 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4855 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4856 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4857 rdata32_2 = rdata32_2 & 0xffff;
4858 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4859 }
4860 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4861 } else { /* P-PIC */
4862 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4863 (0 << 8) | (1 << 1) | 0);
4864 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4865 rdata32 = rdata32 & 0xffff;
4866 rdata32 = rdata32 | (rdata32 << 16);
4867 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4868
4869 if (l0_cnt == 1) {
4870 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4871 } else {
4872 /* Programme canvas1 */
4873 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4874 rdata32 = rdata32 & 0xffff;
4875 rdata32 = rdata32 | (rdata32 << 16);
4876 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4877 }
4878 }
4879 /* enable mcrcc progressive-mode */
4880 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4881}
4882#endif
4883
4884static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4885 int sao_mem_unit)
4886{
4887 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4888 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4889 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4890 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4891}
4892
4893static u32 init_aux_size;
4894static int aux_data_is_avaible(struct hevc_state_s *hevc)
4895{
4896 u32 reg_val;
4897
4898 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4899 if (reg_val != 0 && reg_val != init_aux_size)
4900 return 1;
4901 else
4902 return 0;
4903}
4904
4905static void config_aux_buf(struct hevc_state_s *hevc)
4906{
4907 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4908 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4909 (hevc->suffix_aux_size >> 4);
4910 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4911}
4912
4913static void config_mpred_hw(struct hevc_state_s *hevc)
4914{
4915 int i;
4916 unsigned int data32;
4917 struct PIC_s *cur_pic = hevc->cur_pic;
4918 struct PIC_s *col_pic = hevc->col_pic;
4919 int AMVP_MAX_NUM_CANDS_MEM = 3;
4920 int AMVP_MAX_NUM_CANDS = 2;
4921 int NUM_CHROMA_MODE = 5;
4922 int DM_CHROMA_IDX = 36;
4923 int above_ptr_ctrl = 0;
4924 int buffer_linear = 1;
4925 int cu_size_log2 = 3;
4926
4927 int mpred_mv_rd_start_addr;
4928 int mpred_curr_lcu_x;
4929 int mpred_curr_lcu_y;
4930 int mpred_above_buf_start;
4931 int mpred_mv_rd_ptr;
4932 int mpred_mv_rd_ptr_p1;
4933 int mpred_mv_rd_end_addr;
4934 int MV_MEM_UNIT;
4935 int mpred_mv_wr_ptr;
4936 int *ref_poc_L0, *ref_poc_L1;
4937
4938 int above_en;
4939 int mv_wr_en;
4940 int mv_rd_en;
4941 int col_isIntra;
4942
4943 if (hevc->slice_type != 2) {
4944 above_en = 1;
4945 mv_wr_en = 1;
4946 mv_rd_en = 1;
4947 col_isIntra = 0;
4948 } else {
4949 above_en = 1;
4950 mv_wr_en = 1;
4951 mv_rd_en = 0;
4952 col_isIntra = 0;
4953 }
4954
4955 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4956 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4957 mpred_curr_lcu_x = data32 & 0xffff;
4958 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4959
4960 MV_MEM_UNIT =
4961 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4962 5 ? 0x80 : 0x20;
4963 mpred_mv_rd_ptr =
4964 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4965
4966 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4967 mpred_mv_rd_end_addr =
4968 mpred_mv_rd_start_addr +
4969 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4970
4971 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4972
4973 mpred_mv_wr_ptr =
4974 cur_pic->mpred_mv_wr_start_addr +
4975 (hevc->slice_addr * MV_MEM_UNIT);
4976
4977 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4978 hevc_print(hevc, 0,
4979 "cur pic index %d col pic index %d\n", cur_pic->index,
4980 col_pic->index);
4981 }
4982
4983 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4984 cur_pic->mpred_mv_wr_start_addr);
4985 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4986
4987 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4988 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4989 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4990
4991 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4992 data32 = (hevc->slice_type |
4993 hevc->new_pic << 2 |
4994 hevc->new_tile << 3 |
4995 hevc->isNextSliceSegment << 4 |
4996 hevc->TMVPFlag << 5 |
4997 hevc->LDCFlag << 6 |
4998 hevc->ColFromL0Flag << 7 |
4999 above_ptr_ctrl << 8 |
5000 above_en << 9 |
5001 mv_wr_en << 10 |
5002 mv_rd_en << 11 |
5003 col_isIntra << 12 |
5004 buffer_linear << 13 |
5005 hevc->LongTerm_Curr << 14 |
5006 hevc->LongTerm_Col << 15 |
5007 hevc->lcu_size_log2 << 16 |
5008 cu_size_log2 << 20 | hevc->plevel << 24);
5009 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5010
5011 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5012 data32 = (
5013#if 0
5014 /* no set in m8baby test1902 */
5015 /* Don't override clk_forced_on , */
5016 (data32 & (0x1 << 24)) |
5017#endif
5018 hevc->MaxNumMergeCand |
5019 AMVP_MAX_NUM_CANDS << 4 |
5020 AMVP_MAX_NUM_CANDS_MEM << 8 |
5021 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5022 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5023
5024 data32 = (hevc->pic_w | hevc->pic_h << 16);
5025 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5026
5027 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5028 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5029
5030 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5031 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5032
5033 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5034 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5035
5036 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5037 /* col_RefNum_L0<<16| */
5038 /* col_RefNum_L1<<24 */
5039 );
5040 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5041
5042 data32 = (hevc->LongTerm_Ref);
5043 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5044
5045 data32 = 0;
5046 for (i = 0; i < hevc->RefNum_L0; i++)
5047 data32 = data32 | (1 << i);
5048 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5049
5050 data32 = 0;
5051 for (i = 0; i < hevc->RefNum_L1; i++)
5052 data32 = data32 | (1 << i);
5053 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5054
5055 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5056 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5057
5058 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5059 * Ref_POC_xx_L1 in pair write order!!!
5060 */
5061 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5062 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5063
5064 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5065 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5066
5067 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5068 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5069
5070 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5071 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5072
5073 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5074 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5075
5076 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5077 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5078
5079 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5080 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5081
5082 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5083 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5084
5085 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5086 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5087
5088 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5089 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5090
5091 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5092 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5093
5094 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5095 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5096
5097 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5098 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5099
5100 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5101 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5102
5103 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5104 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5105
5106 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5107 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5108
5109 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5110 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5111
5112 if (hevc->new_pic) {
5113 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5114 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5115 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5116 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5117 } else if (!hevc->isNextSliceSegment) {
5118 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5119 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5120 }
5121
5122 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5123}
5124
5125static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5126{
5127 unsigned int data32, data32_2;
5128 int misc_flag0 = hevc->misc_flag0;
5129 int slice_deblocking_filter_disabled_flag = 0;
5130
5131 int mc_buffer_size_u_v =
5132 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5133 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5134 struct PIC_s *cur_pic = hevc->cur_pic;
5135 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5136
5137 data32 = READ_VREG(HEVC_SAO_CTRL0);
5138 data32 &= (~0xf);
5139 data32 |= hevc->lcu_size_log2;
5140 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5141
5142 data32 = (hevc->pic_w | hevc->pic_h << 16);
5143 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5144
5145 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5146 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5147
5148 if (hevc->new_pic)
5149 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5150#ifdef LOSLESS_COMPRESS_MODE
5151/*SUPPORT_10BIT*/
5152 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5153 data32 = READ_VREG(HEVC_SAO_CTRL5);
5154 data32 &= (~(0xff << 16));
5155
5156 if (get_double_write_mode(hevc) == 2 ||
5157 get_double_write_mode(hevc) == 3)
5158 data32 |= (0xff<<16);
5159 else if (get_double_write_mode(hevc) == 4)
5160 data32 |= (0x33<<16);
5161
5162 if (hevc->mem_saving_mode == 1)
5163 data32 |= (1 << 9);
5164 else
5165 data32 &= ~(1 << 9);
5166 if (workaround_enable & 1)
5167 data32 |= (1 << 7);
5168 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5169 }
5170 data32 = cur_pic->mc_y_adr;
5171 if (get_double_write_mode(hevc))
5172 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5173
5174 if ((get_double_write_mode(hevc) & 0x10) == 0)
5175 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5176
5177 if (hevc->mmu_enable)
5178 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5179#else
5180 data32 = cur_pic->mc_y_adr;
5181 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5182#endif
5183 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5184 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5185
5186#ifdef LOSLESS_COMPRESS_MODE
5187/*SUPPORT_10BIT*/
5188 if (get_double_write_mode(hevc))
5189 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5190#else
5191 data32 = cur_pic->mc_u_v_adr;
5192 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5193#endif
5194 data32 = (mc_buffer_size_u_v_h << 16);
5195 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5196
5197#ifdef LOSLESS_COMPRESS_MODE
5198/*SUPPORT_10BIT*/
5199 if (get_double_write_mode(hevc)) {
5200 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5201 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5202 }
5203#else
5204 /* multi tile to do... */
5205 data32 = cur_pic->mc_y_adr;
5206 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5207
5208 data32 = cur_pic->mc_u_v_adr;
5209 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5210#endif
5211 /* DBLK CONFIG HERE */
5212 if (hevc->new_pic) {
5213 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5214 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5215 data32 = (0xff << 8) | (0x0 << 0);
5216 else
5217 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5218 (0x0 << 0); /* h265 video format*/
5219
5220 if (hevc->pic_w >= 1280)
5221 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5222 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5223 if (get_double_write_mode(hevc) == 0)
5224 data32 |= (0x1 << 8); /*enable first write*/
5225 else if (get_double_write_mode(hevc) == 0x10)
5226 data32 |= (0x1 << 9); /*double write only*/
5227 else
5228 data32 |= ((0x1 << 8) |(0x1 << 9));
5229
5230 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5231 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5232 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5233 }
5234 data32 = (hevc->pic_w | hevc->pic_h << 16);
5235 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5236
5237 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5238 data32 =
5239 ((misc_flag0 >>
5240 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5241 0x1) << 3;
5242 } else
5243 data32 = 0;
5244 data32 |=
5245 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5246 ((params->p.pps_cr_qp_offset
5247 & 0x1f) <<
5248 9));
5249 data32 |=
5250 (hevc->lcu_size ==
5251 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5252
5253 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5254
5255 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5256 /*if (debug & 0x80) {*/
5257 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5258 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5259 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5260 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5261 data32);
5262 /*}*/
5263 }
5264 }
5265#if 0
5266 data32 = READ_VREG(HEVC_SAO_CTRL1);
5267 data32 &= (~0x3000);
5268 data32 |= (hevc->mem_map_mode <<
5269 12);
5270
5271/* [13:12] axi_aformat,
5272 * 0-Linear, 1-32x32, 2-64x32
5273 */
5274 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5275
5276 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5277 data32 &= (~0x30);
5278 data32 |= (hevc->mem_map_mode <<
5279 4);
5280
5281/* [5:4] -- address_format
5282 * 00:linear 01:32x32 10:64x32
5283 */
5284 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5285#else
5286 /* m8baby test1902 */
5287 data32 = READ_VREG(HEVC_SAO_CTRL1);
5288 data32 &= (~0x3000);
5289 data32 |= (hevc->mem_map_mode <<
5290 12);
5291
5292/* [13:12] axi_aformat, 0-Linear,
5293 * 1-32x32, 2-64x32
5294 */
5295 data32 &= (~0xff0);
5296 /* data32 |= 0x670; // Big-Endian per 64-bit */
5297 data32 |= endian; /* Big-Endian per 64-bit */
5298 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5299 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5300 if (get_double_write_mode(hevc) == 0)
5301 data32 |= 0x2; /*disable double write*/
5302 else if (get_double_write_mode(hevc) & 0x10)
5303 data32 |= 0x1; /*disable cm*/
5304 } else {
5305 unsigned int data;
5306 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5307 (0x0 << 0); /* h265 video format*/
5308 if (hevc->pic_w >= 1280)
5309 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5310 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5311 if (get_double_write_mode(hevc) == 0)
5312 data |= (0x1 << 8); /*enable first write*/
5313 else if (get_double_write_mode(hevc) & 0x10)
5314 data |= (0x1 << 9); /*double write only*/
5315 else
5316 data |= ((0x1 << 8) |(0x1 << 9));
5317
5318 WRITE_VREG(HEVC_DBLK_CFGB, data);
5319 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5320 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5321 }
5322
5323 /* swap uv */
5324 if (hevc->is_used_v4l) {
5325 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5326 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5327 data32 &= ~(1 << 8); /* NV21 */
5328 else
5329 data32 |= (1 << 8); /* NV12 */
5330 }
5331
5332 /*
5333 * [31:24] ar_fifo1_axi_thred
5334 * [23:16] ar_fifo0_axi_thred
5335 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5336 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5337 * [11:08] axi_lendian_C
5338 * [07:04] axi_lendian_Y
5339 * [3] reserved
5340 * [2] clk_forceon
5341 * [1] dw_disable:disable double write output
5342 * [0] cm_disable:disable compress output
5343 */
5344 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5345 if (get_double_write_mode(hevc) & 0x10) {
5346 /* [23:22] dw_v1_ctrl
5347 *[21:20] dw_v0_ctrl
5348 *[19:18] dw_h1_ctrl
5349 *[17:16] dw_h0_ctrl
5350 */
5351 data32 = READ_VREG(HEVC_SAO_CTRL5);
5352 /*set them all 0 for H265_NV21 (no down-scale)*/
5353 data32 &= ~(0xff << 16);
5354 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5355 }
5356
5357 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5358 data32 &= (~0x30);
5359 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5360 data32 |= (hevc->mem_map_mode <<
5361 4);
5362 data32 &= (~0xF);
5363 data32 |= 0xf; /* valid only when double write only */
5364 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5365
5366 /* swap uv */
5367 if (hevc->is_used_v4l) {
5368 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5369 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5370 data32 |= (1 << 12); /* NV21 */
5371 else
5372 data32 &= ~(1 << 12); /* NV12 */
5373 }
5374
5375 /*
5376 * [3:0] little_endian
5377 * [5:4] address_format 00:linear 01:32x32 10:64x32
5378 * [7:6] reserved
5379 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5380 * [11:10] reserved
5381 * [12] CbCr_byte_swap
5382 * [31:13] reserved
5383 */
5384 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5385#endif
5386 data32 = 0;
5387 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5388 data32_2 &= (~0x300);
5389 /* slice_deblocking_filter_disabled_flag = 0;
5390 * ucode has handle it , so read it from ucode directly
5391 */
5392 if (hevc->tile_enabled) {
5393 data32 |=
5394 ((misc_flag0 >>
5395 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5396 0x1) << 0;
5397 data32_2 |=
5398 ((misc_flag0 >>
5399 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5400 0x1) << 8;
5401 }
5402 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5403 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5404 0x1; /* ucode has handle it,so read it from ucode directly */
5405 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5406 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5407 /* slice_deblocking_filter_disabled_flag =
5408 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5409 * //ucode has handle it , so read it from ucode directly
5410 */
5411 data32 |= slice_deblocking_filter_disabled_flag << 2;
5412 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5413 hevc_print_cont(hevc, 0,
5414 "(1,%x)", data32);
5415 if (!slice_deblocking_filter_disabled_flag) {
5416 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5417 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5418 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5419 hevc_print_cont(hevc, 0,
5420 "(2,%x)", data32);
5421 }
5422 } else {
5423 data32 |=
5424 ((misc_flag0 >>
5425 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5426 0x1) << 2;
5427 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5428 hevc_print_cont(hevc, 0,
5429 "(3,%x)", data32);
5430 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5431 0x1) == 0) {
5432 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5433 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5434 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5435 hevc_print_cont(hevc, 0,
5436 "(4,%x)", data32);
5437 }
5438 }
5439 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5440 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5441 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5442 || (!slice_deblocking_filter_disabled_flag))) {
5443 data32 |=
5444 ((misc_flag0 >>
5445 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5446 & 0x1) << 1;
5447 data32_2 |=
5448 ((misc_flag0 >>
5449 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5450 & 0x1) << 9;
5451 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5452 hevc_print_cont(hevc, 0,
5453 "(5,%x)\n", data32);
5454 } else {
5455 data32 |=
5456 ((misc_flag0 >>
5457 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5458 & 0x1) << 1;
5459 data32_2 |=
5460 ((misc_flag0 >>
5461 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5462 & 0x1) << 9;
5463 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5464 hevc_print_cont(hevc, 0,
5465 "(6,%x)\n", data32);
5466 }
5467 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5468 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5469}
5470
5471#ifdef TEST_NO_BUF
5472static unsigned char test_flag = 1;
5473#endif
5474
5475static void pic_list_process(struct hevc_state_s *hevc)
5476{
5477 int work_pic_num = get_work_pic_num(hevc);
5478 int alloc_pic_count = 0;
5479 int i;
5480 struct PIC_s *pic;
5481 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5482 pic = hevc->m_PIC[i];
5483 if (pic == NULL || pic->index == -1)
5484 continue;
5485 alloc_pic_count++;
5486 if (pic->output_mark == 0 && pic->referenced == 0
5487 && pic->output_ready == 0
5488 && (pic->width != hevc->pic_w ||
5489 pic->height != hevc->pic_h)
5490 ) {
5491 set_buf_unused(hevc, pic->BUF_index);
5492 pic->BUF_index = -1;
5493 if (alloc_pic_count > work_pic_num) {
5494 pic->width = 0;
5495 pic->height = 0;
5496 pic->index = -1;
5497 } else {
5498 pic->width = hevc->pic_w;
5499 pic->height = hevc->pic_h;
5500 }
5501 }
5502 }
5503 if (alloc_pic_count < work_pic_num) {
5504 int new_count = alloc_pic_count;
5505 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5506 pic = hevc->m_PIC[i];
5507 if (pic && pic->index == -1) {
5508 pic->index = i;
5509 pic->BUF_index = -1;
5510 pic->width = hevc->pic_w;
5511 pic->height = hevc->pic_h;
5512 new_count++;
5513 if (new_count >=
5514 work_pic_num)
5515 break;
5516 }
5517 }
5518
5519 }
5520 dealloc_unused_buf(hevc);
5521 if (get_alloc_pic_count(hevc)
5522 != alloc_pic_count) {
5523 hevc_print_cont(hevc, 0,
5524 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5525 __func__,
5526 work_pic_num,
5527 alloc_pic_count,
5528 get_alloc_pic_count(hevc));
5529 }
5530}
5531
5532static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5533{
5534 int i;
5535 struct PIC_s *pic;
5536 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5537 pic = hevc->m_PIC[i];
5538 if (pic == NULL || pic->index == -1)
5539 continue;
5540
5541 if (pic->output_mark == 0 && pic->referenced == 0
5542 && pic->output_ready == 0
5543 && pic->scatter_alloc
5544 )
5545 release_pic_mmu_buf(hevc, pic);
5546 }
5547
5548}
5549
5550static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5551 union param_u *rpm_param)
5552{
5553 struct PIC_s *new_pic = NULL;
5554 struct PIC_s *pic;
5555 int i;
5556 int ret;
5557
5558 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5559 pic = hevc->m_PIC[i];
5560 if (pic == NULL || pic->index == -1)
5561 continue;
5562
5563 if (pic->output_mark == 0 && pic->referenced == 0
5564 && pic->output_ready == 0
5565 && pic->width == hevc->pic_w
5566 && pic->height == hevc->pic_h
5567 ) {
5568 if (new_pic) {
5569 if (new_pic->POC != INVALID_POC) {
5570 if (pic->POC == INVALID_POC ||
5571 pic->POC < new_pic->POC)
5572 new_pic = pic;
5573 }
5574 } else
5575 new_pic = pic;
5576 }
5577 }
5578
5579 if (new_pic == NULL)
5580 return NULL;
5581
5582 if (new_pic->BUF_index < 0) {
5583 ret = hevc->is_used_v4l ?
5584 v4l_alloc_buf(hevc) :
5585 alloc_buf(hevc);
5586 if (ret < 0)
5587 return NULL;
5588
5589 ret = hevc->is_used_v4l ?
5590 v4l_config_pic(hevc, new_pic) :
5591 config_pic(hevc, new_pic);
5592 if (ret < 0) {
5593 dealloc_pic_buf(hevc, new_pic);
5594 return NULL;
5595 }
5596
5597 new_pic->width = hevc->pic_w;
5598 new_pic->height = hevc->pic_h;
5599 set_canvas(hevc, new_pic);
5600
5601 init_pic_list_hw(hevc);
5602 }
5603
5604 if (new_pic) {
5605 new_pic->double_write_mode =
5606 get_double_write_mode(hevc);
5607 if (new_pic->double_write_mode)
5608 set_canvas(hevc, new_pic);
5609
5610#ifdef TEST_NO_BUF
5611 if (test_flag) {
5612 test_flag = 0;
5613 return NULL;
5614 } else
5615 test_flag = 1;
5616#endif
5617 if (get_mv_buf(hevc, new_pic) < 0)
5618 return NULL;
5619
5620 if (hevc->mmu_enable) {
5621 ret = H265_alloc_mmu(hevc, new_pic,
5622 rpm_param->p.bit_depth,
5623 hevc->frame_mmu_map_addr);
5624 if (ret != 0) {
5625 put_mv_buf(hevc, new_pic);
5626 hevc_print(hevc, 0,
5627 "can't alloc need mmu1,idx %d ret =%d\n",
5628 new_pic->decode_idx,
5629 ret);
5630 return NULL;
5631 }
5632 }
5633 new_pic->referenced = 1;
5634 new_pic->decode_idx = hevc->decode_idx;
5635 new_pic->slice_idx = 0;
5636 new_pic->referenced = 1;
5637 new_pic->output_mark = 0;
5638 new_pic->recon_mark = 0;
5639 new_pic->error_mark = 0;
5640 new_pic->dis_mark = 0;
5641 /* new_pic->output_ready = 0; */
5642 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5643 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5644 new_pic->POC = hevc->curr_POC;
5645 new_pic->pic_struct = hevc->curr_pic_struct;
5646 if (new_pic->aux_data_buf)
5647 release_aux_data(hevc, new_pic);
5648 new_pic->mem_saving_mode =
5649 hevc->mem_saving_mode;
5650 new_pic->bit_depth_luma =
5651 hevc->bit_depth_luma;
5652 new_pic->bit_depth_chroma =
5653 hevc->bit_depth_chroma;
5654 new_pic->video_signal_type =
5655 hevc->video_signal_type;
5656
5657 new_pic->conformance_window_flag =
5658 hevc->param.p.conformance_window_flag;
5659 new_pic->conf_win_left_offset =
5660 hevc->param.p.conf_win_left_offset;
5661 new_pic->conf_win_right_offset =
5662 hevc->param.p.conf_win_right_offset;
5663 new_pic->conf_win_top_offset =
5664 hevc->param.p.conf_win_top_offset;
5665 new_pic->conf_win_bottom_offset =
5666 hevc->param.p.conf_win_bottom_offset;
5667 new_pic->chroma_format_idc =
5668 hevc->param.p.chroma_format_idc;
5669
5670 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5671 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5672 __func__, new_pic->index,
5673 new_pic->BUF_index, new_pic->decode_idx,
5674 new_pic->POC);
5675
5676 }
5677 if (pic_list_debug & 0x1) {
5678 dump_pic_list(hevc);
5679 pr_err("\n*******************************************\n");
5680 }
5681
5682 return new_pic;
5683}
5684
5685static int get_display_pic_num(struct hevc_state_s *hevc)
5686{
5687 int i;
5688 struct PIC_s *pic;
5689 int num = 0;
5690
5691 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5692 pic = hevc->m_PIC[i];
5693 if (pic == NULL ||
5694 pic->index == -1)
5695 continue;
5696
5697 if (pic->output_ready == 1)
5698 num++;
5699 }
5700 return num;
5701}
5702
5703static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5704{
5705 struct PIC_s *pic_display;
5706
5707 if (pic) {
5708 /*PB skip control */
5709 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5710 /* start decoding after first I */
5711 hevc->ignore_bufmgr_error |= 0x1;
5712 }
5713 if (hevc->ignore_bufmgr_error & 1) {
5714 if (hevc->PB_skip_count_after_decoding > 0)
5715 hevc->PB_skip_count_after_decoding--;
5716 else {
5717 /* start displaying */
5718 hevc->ignore_bufmgr_error |= 0x2;
5719 }
5720 }
5721 /**/
5722 if (pic->POC != INVALID_POC) {
5723 pic->output_mark = 1;
5724 pic->recon_mark = 1;
5725 }
5726 pic->recon_mark = 1;
5727 }
5728 do {
5729 pic_display = output_pic(hevc, 1);
5730
5731 if (pic_display) {
5732 pic_display->referenced = 0;
5733 put_mv_buf(hevc, pic_display);
5734 if ((pic_display->error_mark
5735 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5736 || (get_dbg_flag(hevc) &
5737 H265_DEBUG_DISPLAY_CUR_FRAME)
5738 || (get_dbg_flag(hevc) &
5739 H265_DEBUG_NO_DISPLAY)) {
5740 pic_display->output_ready = 0;
5741 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5742 hevc_print(hevc, 0,
5743 "[BM] Display: POC %d, ",
5744 pic_display->POC);
5745 hevc_print_cont(hevc, 0,
5746 "decoding index %d ==> ",
5747 pic_display->decode_idx);
5748 hevc_print_cont(hevc, 0,
5749 "Debug mode or error, recycle it\n");
5750 }
5751 } else {
5752 if (hevc->i_only & 0x1
5753 && pic_display->slice_type != 2) {
5754 pic_display->output_ready = 0;
5755 } else {
5756 prepare_display_buf(hevc, pic_display);
5757 if (get_dbg_flag(hevc)
5758 & H265_DEBUG_BUFMGR) {
5759 hevc_print(hevc, 0,
5760 "[BM] flush Display: POC %d, ",
5761 pic_display->POC);
5762 hevc_print_cont(hevc, 0,
5763 "decoding index %d\n",
5764 pic_display->decode_idx);
5765 }
5766 }
5767 }
5768 }
5769 } while (pic_display);
5770 clear_referenced_flag(hevc);
5771}
5772
5773/*
5774* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5775*/
5776static void set_aux_data(struct hevc_state_s *hevc,
5777 struct PIC_s *pic, unsigned char suffix_flag,
5778 unsigned char dv_meta_flag)
5779{
5780 int i;
5781 unsigned short *aux_adr;
5782 unsigned int size_reg_val =
5783 READ_VREG(HEVC_AUX_DATA_SIZE);
5784 unsigned int aux_count = 0;
5785 int aux_size = 0;
5786 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5787 return;
5788
5789 if (hevc->aux_data_dirty ||
5790 hevc->m_ins_flag == 0) {
5791
5792 hevc->aux_data_dirty = 0;
5793 }
5794
5795 if (suffix_flag) {
5796 aux_adr = (unsigned short *)
5797 (hevc->aux_addr +
5798 hevc->prefix_aux_size);
5799 aux_count =
5800 ((size_reg_val & 0xffff) << 4)
5801 >> 1;
5802 aux_size =
5803 hevc->suffix_aux_size;
5804 } else {
5805 aux_adr =
5806 (unsigned short *)hevc->aux_addr;
5807 aux_count =
5808 ((size_reg_val >> 16) << 4)
5809 >> 1;
5810 aux_size =
5811 hevc->prefix_aux_size;
5812 }
5813 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5814 hevc_print(hevc, 0,
5815 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5816 __func__, pic, pic->aux_data_size,
5817 aux_count, suffix_flag, dv_meta_flag);
5818 }
5819 if (aux_size > 0 && aux_count > 0) {
5820 int heads_size = 0;
5821 int new_size;
5822 char *new_buf;
5823
5824 for (i = 0; i < aux_count; i++) {
5825 unsigned char tag = aux_adr[i] >> 8;
5826 if (tag != 0 && tag != 0xff) {
5827 if (dv_meta_flag == 0)
5828 heads_size += 8;
5829 else if (dv_meta_flag == 1 && tag == 0x1)
5830 heads_size += 8;
5831 else if (dv_meta_flag == 2 && tag != 0x1)
5832 heads_size += 8;
5833 }
5834 }
5835 new_size = pic->aux_data_size + aux_count + heads_size;
5836 new_buf = vmalloc(new_size);
5837 if (new_buf) {
5838 unsigned char valid_tag = 0;
5839 unsigned char *h =
5840 new_buf +
5841 pic->aux_data_size;
5842 unsigned char *p = h + 8;
5843 int len = 0;
5844 int padding_len = 0;
5845 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5846 if (pic->aux_data_buf)
5847 vfree(pic->aux_data_buf);
5848 pic->aux_data_buf = new_buf;
5849 for (i = 0; i < aux_count; i += 4) {
5850 int ii;
5851 unsigned char tag = aux_adr[i + 3] >> 8;
5852 if (tag != 0 && tag != 0xff) {
5853 if (dv_meta_flag == 0)
5854 valid_tag = 1;
5855 else if (dv_meta_flag == 1
5856 && tag == 0x1)
5857 valid_tag = 1;
5858 else if (dv_meta_flag == 2
5859 && tag != 0x1)
5860 valid_tag = 1;
5861 else
5862 valid_tag = 0;
5863 if (valid_tag && len > 0) {
5864 pic->aux_data_size +=
5865 (len + 8);
5866 h[0] = (len >> 24)
5867 & 0xff;
5868 h[1] = (len >> 16)
5869 & 0xff;
5870 h[2] = (len >> 8)
5871 & 0xff;
5872 h[3] = (len >> 0)
5873 & 0xff;
5874 h[6] =
5875 (padding_len >> 8)
5876 & 0xff;
5877 h[7] = (padding_len)
5878 & 0xff;
5879 h += (len + 8);
5880 p += 8;
5881 len = 0;
5882 padding_len = 0;
5883 }
5884 if (valid_tag) {
5885 h[4] = tag;
5886 h[5] = 0;
5887 h[6] = 0;
5888 h[7] = 0;
5889 }
5890 }
5891 if (valid_tag) {
5892 for (ii = 0; ii < 4; ii++) {
5893 unsigned short aa =
5894 aux_adr[i + 3
5895 - ii];
5896 *p = aa & 0xff;
5897 p++;
5898 len++;
5899 /*if ((aa >> 8) == 0xff)
5900 padding_len++;*/
5901 }
5902 }
5903 }
5904 if (len > 0) {
5905 pic->aux_data_size += (len + 8);
5906 h[0] = (len >> 24) & 0xff;
5907 h[1] = (len >> 16) & 0xff;
5908 h[2] = (len >> 8) & 0xff;
5909 h[3] = (len >> 0) & 0xff;
5910 h[6] = (padding_len >> 8) & 0xff;
5911 h[7] = (padding_len) & 0xff;
5912 }
5913 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5914 hevc_print(hevc, 0,
5915 "aux: (size %d) suffix_flag %d\n",
5916 pic->aux_data_size, suffix_flag);
5917 for (i = 0; i < pic->aux_data_size; i++) {
5918 hevc_print_cont(hevc, 0,
5919 "%02x ", pic->aux_data_buf[i]);
5920 if (((i + 1) & 0xf) == 0)
5921 hevc_print_cont(hevc, 0, "\n");
5922 }
5923 hevc_print_cont(hevc, 0, "\n");
5924 }
5925
5926 } else {
5927 hevc_print(hevc, 0, "new buf alloc failed\n");
5928 if (pic->aux_data_buf)
5929 vfree(pic->aux_data_buf);
5930 pic->aux_data_buf = NULL;
5931 pic->aux_data_size = 0;
5932 }
5933 }
5934
5935}
5936
5937static void release_aux_data(struct hevc_state_s *hevc,
5938 struct PIC_s *pic)
5939{
5940 if (pic->aux_data_buf)
5941 vfree(pic->aux_data_buf);
5942 pic->aux_data_buf = NULL;
5943 pic->aux_data_size = 0;
5944}
5945
5946static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5947 struct PIC_s *pic)
5948{
5949
5950 /* prev pic */
5951 /*if (hevc->curr_POC != 0) {*/
5952 int decoded_poc = hevc->iPrevPOC;
5953#ifdef MULTI_INSTANCE_SUPPORT
5954 if (hevc->m_ins_flag) {
5955 decoded_poc = hevc->decoded_poc;
5956 hevc->decoded_poc = INVALID_POC;
5957 }
5958#endif
5959 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5960 && hevc->m_nalUnitType !=
5961 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5962 struct PIC_s *pic_display;
5963
5964 pic = get_pic_by_POC(hevc, decoded_poc);
5965 if (pic && (pic->POC != INVALID_POC)) {
5966 /*PB skip control */
5967 if (pic->error_mark == 0
5968 && hevc->PB_skip_mode == 1) {
5969 /* start decoding after
5970 * first I
5971 */
5972 hevc->ignore_bufmgr_error |= 0x1;
5973 }
5974 if (hevc->ignore_bufmgr_error & 1) {
5975 if (hevc->PB_skip_count_after_decoding > 0) {
5976 hevc->PB_skip_count_after_decoding--;
5977 } else {
5978 /* start displaying */
5979 hevc->ignore_bufmgr_error |= 0x2;
5980 }
5981 }
5982 if (hevc->mmu_enable
5983 && ((hevc->double_write_mode & 0x10) == 0)) {
5984 if (!hevc->m_ins_flag) {
5985 hevc->used_4k_num =
5986 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5987
5988 if ((!is_skip_decoding(hevc, pic)) &&
5989 (hevc->used_4k_num >= 0) &&
5990 (hevc->cur_pic->scatter_alloc
5991 == 1)) {
5992 hevc_print(hevc,
5993 H265_DEBUG_BUFMGR_MORE,
5994 "%s pic index %d scatter_alloc %d page_start %d\n",
5995 "decoder_mmu_box_free_idx_tail",
5996 hevc->cur_pic->index,
5997 hevc->cur_pic->scatter_alloc,
5998 hevc->used_4k_num);
5999 hevc_mmu_dma_check(hw_to_vdec(hevc));
6000 decoder_mmu_box_free_idx_tail(
6001 hevc->mmu_box,
6002 hevc->cur_pic->index,
6003 hevc->used_4k_num);
6004 hevc->cur_pic->scatter_alloc
6005 = 2;
6006 }
6007 hevc->used_4k_num = -1;
6008 }
6009 }
6010
6011 pic->output_mark = 1;
6012 pic->recon_mark = 1;
6013 pic->dis_mark = 1;
6014 }
6015 do {
6016 pic_display = output_pic(hevc, 0);
6017
6018 if (pic_display) {
6019 if ((pic_display->error_mark &&
6020 ((hevc->ignore_bufmgr_error &
6021 0x2) == 0))
6022 || (get_dbg_flag(hevc) &
6023 H265_DEBUG_DISPLAY_CUR_FRAME)
6024 || (get_dbg_flag(hevc) &
6025 H265_DEBUG_NO_DISPLAY)) {
6026 pic_display->output_ready = 0;
6027 if (get_dbg_flag(hevc) &
6028 H265_DEBUG_BUFMGR) {
6029 hevc_print(hevc, 0,
6030 "[BM] Display: POC %d, ",
6031 pic_display->POC);
6032 hevc_print_cont(hevc, 0,
6033 "decoding index %d ==> ",
6034 pic_display->
6035 decode_idx);
6036 hevc_print_cont(hevc, 0,
6037 "Debug or err,recycle it\n");
6038 }
6039 } else {
6040 if (hevc->i_only & 0x1
6041 && pic_display->
6042 slice_type != 2) {
6043 pic_display->output_ready = 0;
6044 } else {
6045 prepare_display_buf
6046 (hevc,
6047 pic_display);
6048 if (get_dbg_flag(hevc) &
6049 H265_DEBUG_BUFMGR) {
6050 hevc_print(hevc, 0,
6051 "[BM] Display: POC %d, ",
6052 pic_display->POC);
6053 hevc_print_cont(hevc, 0,
6054 "decoding index %d\n",
6055 pic_display->
6056 decode_idx);
6057 }
6058 }
6059 }
6060 }
6061 } while (pic_display);
6062 } else {
6063 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6064 hevc_print(hevc, 0,
6065 "[BM] current pic is IDR, ");
6066 hevc_print(hevc, 0,
6067 "clear referenced flag of all buffers\n");
6068 }
6069 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6070 dump_pic_list(hevc);
6071 pic = get_pic_by_POC(hevc, decoded_poc);
6072 flush_output(hevc, pic);
6073 }
6074
6075}
6076
6077static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6078 int decoded_lcu)
6079{
6080 int current_lcu_idx = decoded_lcu;
6081 if (decoded_lcu < 0)
6082 return;
6083
6084 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6085 hevc_print(hevc, 0,
6086 "cur lcu idx = %d, (total %d)\n",
6087 current_lcu_idx, hevc->lcu_total);
6088 }
6089 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6090 if (hevc->first_pic_after_recover) {
6091 if (current_lcu_idx !=
6092 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6093 hevc->cur_pic->error_mark = 1;
6094 } else {
6095 if (hevc->lcu_x_num_pre != 0
6096 && hevc->lcu_y_num_pre != 0
6097 && current_lcu_idx != 0
6098 && current_lcu_idx <
6099 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6100 hevc->cur_pic->error_mark = 1;
6101 }
6102 if (hevc->cur_pic->error_mark) {
6103 hevc_print(hevc, 0,
6104 "cur lcu idx = %d, (total %d), set error_mark\n",
6105 current_lcu_idx,
6106 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6107 if (is_log_enable(hevc))
6108 add_log(hevc,
6109 "cur lcu idx = %d, (total %d), set error_mark",
6110 current_lcu_idx,
6111 hevc->lcu_x_num_pre *
6112 hevc->lcu_y_num_pre);
6113
6114 }
6115
6116 }
6117 if (hevc->cur_pic && hevc->head_error_flag) {
6118 hevc->cur_pic->error_mark = 1;
6119 hevc_print(hevc, 0,
6120 "head has error, set error_mark\n");
6121 }
6122
6123 if ((error_handle_policy & 0x80) == 0) {
6124 if (hevc->over_decode && hevc->cur_pic) {
6125 hevc_print(hevc, 0,
6126 "over decode, set error_mark\n");
6127 hevc->cur_pic->error_mark = 1;
6128 }
6129 }
6130
6131 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6132 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6133}
6134
6135static void check_pic_decoded_error(struct hevc_state_s *hevc,
6136 int decoded_lcu)
6137{
6138 int current_lcu_idx = decoded_lcu;
6139 if (decoded_lcu < 0)
6140 return;
6141
6142 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6143 hevc_print(hevc, 0,
6144 "cur lcu idx = %d, (total %d)\n",
6145 current_lcu_idx, hevc->lcu_total);
6146 }
6147 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6148 if (hevc->lcu_x_num != 0
6149 && hevc->lcu_y_num != 0
6150 && current_lcu_idx != 0
6151 && current_lcu_idx <
6152 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6153 hevc->cur_pic->error_mark = 1;
6154 if (hevc->cur_pic->error_mark) {
6155 hevc_print(hevc, 0,
6156 "cur lcu idx = %d, (total %d), set error_mark\n",
6157 current_lcu_idx,
6158 hevc->lcu_x_num*hevc->lcu_y_num);
6159 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6160 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6161 hevc_print(hevc, 0,
6162 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6163 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6164 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6165 }
6166 if (is_log_enable(hevc))
6167 add_log(hevc,
6168 "cur lcu idx = %d, (total %d), set error_mark",
6169 current_lcu_idx,
6170 hevc->lcu_x_num *
6171 hevc->lcu_y_num);
6172
6173 }
6174
6175 }
6176 if (hevc->cur_pic && hevc->head_error_flag) {
6177 hevc->cur_pic->error_mark = 1;
6178 hevc_print(hevc, 0,
6179 "head has error, set error_mark\n");
6180 }
6181
6182 if ((error_handle_policy & 0x80) == 0) {
6183 if (hevc->over_decode && hevc->cur_pic) {
6184 hevc_print(hevc, 0,
6185 "over decode, set error_mark\n");
6186 hevc->cur_pic->error_mark = 1;
6187 }
6188 }
6189}
6190
6191/* only when we decoded one field or one frame,
6192we can call this function to get qos info*/
6193static void get_picture_qos_info(struct hevc_state_s *hevc)
6194{
6195 struct PIC_s *picture = hevc->cur_pic;
6196
6197/*
6198#define DEBUG_QOS
6199*/
6200
6201 if (!hevc->cur_pic)
6202 return;
6203
6204 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6205 unsigned char a[3];
6206 unsigned char i, j, t;
6207 unsigned long data;
6208
6209 data = READ_VREG(HEVC_MV_INFO);
6210 if (picture->slice_type == I_SLICE)
6211 data = 0;
6212 a[0] = data & 0xff;
6213 a[1] = (data >> 8) & 0xff;
6214 a[2] = (data >> 16) & 0xff;
6215
6216 for (i = 0; i < 3; i++)
6217 for (j = i+1; j < 3; j++) {
6218 if (a[j] < a[i]) {
6219 t = a[j];
6220 a[j] = a[i];
6221 a[i] = t;
6222 } else if (a[j] == a[i]) {
6223 a[i]++;
6224 t = a[j];
6225 a[j] = a[i];
6226 a[i] = t;
6227 }
6228 }
6229 picture->max_mv = a[2];
6230 picture->avg_mv = a[1];
6231 picture->min_mv = a[0];
6232#ifdef DEBUG_QOS
6233 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6234 data, a[0], a[1], a[2]);
6235#endif
6236
6237 data = READ_VREG(HEVC_QP_INFO);
6238 a[0] = data & 0x1f;
6239 a[1] = (data >> 8) & 0x3f;
6240 a[2] = (data >> 16) & 0x7f;
6241
6242 for (i = 0; i < 3; i++)
6243 for (j = i+1; j < 3; j++) {
6244 if (a[j] < a[i]) {
6245 t = a[j];
6246 a[j] = a[i];
6247 a[i] = t;
6248 } else if (a[j] == a[i]) {
6249 a[i]++;
6250 t = a[j];
6251 a[j] = a[i];
6252 a[i] = t;
6253 }
6254 }
6255 picture->max_qp = a[2];
6256 picture->avg_qp = a[1];
6257 picture->min_qp = a[0];
6258#ifdef DEBUG_QOS
6259 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6260 data, a[0], a[1], a[2]);
6261#endif
6262
6263 data = READ_VREG(HEVC_SKIP_INFO);
6264 a[0] = data & 0x1f;
6265 a[1] = (data >> 8) & 0x3f;
6266 a[2] = (data >> 16) & 0x7f;
6267
6268 for (i = 0; i < 3; i++)
6269 for (j = i+1; j < 3; j++) {
6270 if (a[j] < a[i]) {
6271 t = a[j];
6272 a[j] = a[i];
6273 a[i] = t;
6274 } else if (a[j] == a[i]) {
6275 a[i]++;
6276 t = a[j];
6277 a[j] = a[i];
6278 a[i] = t;
6279 }
6280 }
6281 picture->max_skip = a[2];
6282 picture->avg_skip = a[1];
6283 picture->min_skip = a[0];
6284
6285#ifdef DEBUG_QOS
6286 hevc_print(hevc, 0,
6287 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6288 data, a[0], a[1], a[2]);
6289#endif
6290 } else {
6291 uint32_t blk88_y_count;
6292 uint32_t blk88_c_count;
6293 uint32_t blk22_mv_count;
6294 uint32_t rdata32;
6295 int32_t mv_hi;
6296 int32_t mv_lo;
6297 uint32_t rdata32_l;
6298 uint32_t mvx_L0_hi;
6299 uint32_t mvy_L0_hi;
6300 uint32_t mvx_L1_hi;
6301 uint32_t mvy_L1_hi;
6302 int64_t value;
6303 uint64_t temp_value;
6304#ifdef DEBUG_QOS
6305 int pic_number = picture->POC;
6306#endif
6307
6308 picture->max_mv = 0;
6309 picture->avg_mv = 0;
6310 picture->min_mv = 0;
6311
6312 picture->max_skip = 0;
6313 picture->avg_skip = 0;
6314 picture->min_skip = 0;
6315
6316 picture->max_qp = 0;
6317 picture->avg_qp = 0;
6318 picture->min_qp = 0;
6319
6320
6321
6322#ifdef DEBUG_QOS
6323 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6324 picture->slice_type,
6325 picture->POC);
6326#endif
6327 /* set rd_idx to 0 */
6328 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6329
6330 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6331 if (blk88_y_count == 0) {
6332#ifdef DEBUG_QOS
6333 hevc_print(hevc, 0,
6334 "[Picture %d Quality] NO Data yet.\n",
6335 pic_number);
6336#endif
6337 /* reset all counts */
6338 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6339 return;
6340 }
6341 /* qp_y_sum */
6342 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6343#ifdef DEBUG_QOS
6344 hevc_print(hevc, 0,
6345 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6346 pic_number, rdata32/blk88_y_count,
6347 rdata32, blk88_y_count);
6348#endif
6349 picture->avg_qp = rdata32/blk88_y_count;
6350 /* intra_y_count */
6351 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6352#ifdef DEBUG_QOS
6353 hevc_print(hevc, 0,
6354 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6355 pic_number, rdata32*100/blk88_y_count,
6356 '%', rdata32);
6357#endif
6358 /* skipped_y_count */
6359 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6360#ifdef DEBUG_QOS
6361 hevc_print(hevc, 0,
6362 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6363 pic_number, rdata32*100/blk88_y_count,
6364 '%', rdata32);
6365#endif
6366 picture->avg_skip = rdata32*100/blk88_y_count;
6367 /* coeff_non_zero_y_count */
6368 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6369#ifdef DEBUG_QOS
6370 hevc_print(hevc, 0,
6371 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6372 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6373 '%', rdata32);
6374#endif
6375 /* blk66_c_count */
6376 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6377 if (blk88_c_count == 0) {
6378#ifdef DEBUG_QOS
6379 hevc_print(hevc, 0,
6380 "[Picture %d Quality] NO Data yet.\n",
6381 pic_number);
6382#endif
6383 /* reset all counts */
6384 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6385 return;
6386 }
6387 /* qp_c_sum */
6388 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6389#ifdef DEBUG_QOS
6390 hevc_print(hevc, 0,
6391 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6392 pic_number, rdata32/blk88_c_count,
6393 rdata32, blk88_c_count);
6394#endif
6395 /* intra_c_count */
6396 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6397#ifdef DEBUG_QOS
6398 hevc_print(hevc, 0,
6399 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6400 pic_number, rdata32*100/blk88_c_count,
6401 '%', rdata32);
6402#endif
6403 /* skipped_cu_c_count */
6404 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6405#ifdef DEBUG_QOS
6406 hevc_print(hevc, 0,
6407 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6408 pic_number, rdata32*100/blk88_c_count,
6409 '%', rdata32);
6410#endif
6411 /* coeff_non_zero_c_count */
6412 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6413#ifdef DEBUG_QOS
6414 hevc_print(hevc, 0,
6415 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6416 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6417 '%', rdata32);
6418#endif
6419
6420 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6421 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6422 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6423#ifdef DEBUG_QOS
6424 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6425 pic_number, (rdata32>>0)&0xff);
6426#endif
6427 picture->min_qp = (rdata32>>0)&0xff;
6428
6429#ifdef DEBUG_QOS
6430 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6431 pic_number, (rdata32>>8)&0xff);
6432#endif
6433 picture->max_qp = (rdata32>>8)&0xff;
6434
6435#ifdef DEBUG_QOS
6436 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6437 pic_number, (rdata32>>16)&0xff);
6438 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6439 pic_number, (rdata32>>24)&0xff);
6440#endif
6441
6442 /* blk22_mv_count */
6443 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6444 if (blk22_mv_count == 0) {
6445#ifdef DEBUG_QOS
6446 hevc_print(hevc, 0,
6447 "[Picture %d Quality] NO MV Data yet.\n",
6448 pic_number);
6449#endif
6450 /* reset all counts */
6451 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6452 return;
6453 }
6454 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6455 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6456 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6457 /* should all be 0x00 or 0xff */
6458#ifdef DEBUG_QOS
6459 hevc_print(hevc, 0,
6460 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6461 pic_number, rdata32);
6462#endif
6463 mvx_L0_hi = ((rdata32>>0)&0xff);
6464 mvy_L0_hi = ((rdata32>>8)&0xff);
6465 mvx_L1_hi = ((rdata32>>16)&0xff);
6466 mvy_L1_hi = ((rdata32>>24)&0xff);
6467
6468 /* mvx_L0_count[31:0] */
6469 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6470 temp_value = mvx_L0_hi;
6471 temp_value = (temp_value << 32) | rdata32_l;
6472
6473 if (mvx_L0_hi & 0x80)
6474 value = 0xFFFFFFF000000000 | temp_value;
6475 else
6476 value = temp_value;
6477 value = div_s64(value, blk22_mv_count);
6478#ifdef DEBUG_QOS
6479 hevc_print(hevc, 0,
6480 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6481 pic_number, (int)value,
6482 value, blk22_mv_count);
6483#endif
6484 picture->avg_mv = value;
6485
6486 /* mvy_L0_count[31:0] */
6487 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6488 temp_value = mvy_L0_hi;
6489 temp_value = (temp_value << 32) | rdata32_l;
6490
6491 if (mvy_L0_hi & 0x80)
6492 value = 0xFFFFFFF000000000 | temp_value;
6493 else
6494 value = temp_value;
6495#ifdef DEBUG_QOS
6496 hevc_print(hevc, 0,
6497 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6498 pic_number, rdata32_l/blk22_mv_count,
6499 value, blk22_mv_count);
6500#endif
6501
6502 /* mvx_L1_count[31:0] */
6503 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6504 temp_value = mvx_L1_hi;
6505 temp_value = (temp_value << 32) | rdata32_l;
6506 if (mvx_L1_hi & 0x80)
6507 value = 0xFFFFFFF000000000 | temp_value;
6508 else
6509 value = temp_value;
6510#ifdef DEBUG_QOS
6511 hevc_print(hevc, 0,
6512 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6513 pic_number, rdata32_l/blk22_mv_count,
6514 value, blk22_mv_count);
6515#endif
6516
6517 /* mvy_L1_count[31:0] */
6518 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6519 temp_value = mvy_L1_hi;
6520 temp_value = (temp_value << 32) | rdata32_l;
6521 if (mvy_L1_hi & 0x80)
6522 value = 0xFFFFFFF000000000 | temp_value;
6523 else
6524 value = temp_value;
6525#ifdef DEBUG_QOS
6526 hevc_print(hevc, 0,
6527 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6528 pic_number, rdata32_l/blk22_mv_count,
6529 value, blk22_mv_count);
6530#endif
6531
6532 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6533 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6534 mv_hi = (rdata32>>16)&0xffff;
6535 if (mv_hi & 0x8000)
6536 mv_hi = 0x8000 - mv_hi;
6537#ifdef DEBUG_QOS
6538 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6539 pic_number, mv_hi);
6540#endif
6541 picture->max_mv = mv_hi;
6542
6543 mv_lo = (rdata32>>0)&0xffff;
6544 if (mv_lo & 0x8000)
6545 mv_lo = 0x8000 - mv_lo;
6546#ifdef DEBUG_QOS
6547 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6548 pic_number, mv_lo);
6549#endif
6550 picture->min_mv = mv_lo;
6551
6552 /* {mvy_L0_max, mvy_L0_min} */
6553 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6554 mv_hi = (rdata32>>16)&0xffff;
6555 if (mv_hi & 0x8000)
6556 mv_hi = 0x8000 - mv_hi;
6557#ifdef DEBUG_QOS
6558 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6559 pic_number, mv_hi);
6560#endif
6561
6562 mv_lo = (rdata32>>0)&0xffff;
6563 if (mv_lo & 0x8000)
6564 mv_lo = 0x8000 - mv_lo;
6565#ifdef DEBUG_QOS
6566 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6567 pic_number, mv_lo);
6568#endif
6569
6570 /* {mvx_L1_max, mvx_L1_min} */
6571 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6572 mv_hi = (rdata32>>16)&0xffff;
6573 if (mv_hi & 0x8000)
6574 mv_hi = 0x8000 - mv_hi;
6575#ifdef DEBUG_QOS
6576 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6577 pic_number, mv_hi);
6578#endif
6579
6580 mv_lo = (rdata32>>0)&0xffff;
6581 if (mv_lo & 0x8000)
6582 mv_lo = 0x8000 - mv_lo;
6583#ifdef DEBUG_QOS
6584 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6585 pic_number, mv_lo);
6586#endif
6587
6588 /* {mvy_L1_max, mvy_L1_min} */
6589 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6590 mv_hi = (rdata32>>16)&0xffff;
6591 if (mv_hi & 0x8000)
6592 mv_hi = 0x8000 - mv_hi;
6593#ifdef DEBUG_QOS
6594 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6595 pic_number, mv_hi);
6596#endif
6597 mv_lo = (rdata32>>0)&0xffff;
6598 if (mv_lo & 0x8000)
6599 mv_lo = 0x8000 - mv_lo;
6600#ifdef DEBUG_QOS
6601 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6602 pic_number, mv_lo);
6603#endif
6604
6605 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6606#ifdef DEBUG_QOS
6607 hevc_print(hevc, 0,
6608 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6609 pic_number, rdata32);
6610#endif
6611 /* reset all counts */
6612 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6613 }
6614}
6615
6616static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6617 union param_u *rpm_param,
6618 int decode_pic_begin)
6619{
6620#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6621 struct vdec_s *vdec = hw_to_vdec(hevc);
6622#endif
6623 int i;
6624 int lcu_x_num_div;
6625 int lcu_y_num_div;
6626 int Col_ref;
6627 int dbg_skip_flag = 0;
6628
6629 if (hevc->wait_buf == 0) {
6630 hevc->sps_num_reorder_pics_0 =
6631 rpm_param->p.sps_num_reorder_pics_0;
6632 hevc->m_temporalId = rpm_param->p.m_temporalId;
6633 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6634 hevc->interlace_flag =
6635 (rpm_param->p.profile_etc >> 2) & 0x1;
6636 hevc->curr_pic_struct =
6637 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6638 if (parser_sei_enable & 0x4) {
6639 hevc->frame_field_info_present_flag =
6640 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6641 }
6642
6643 if (interlace_enable == 0 || hevc->m_ins_flag)
6644 hevc->interlace_flag = 0;
6645 if (interlace_enable & 0x100)
6646 hevc->interlace_flag = interlace_enable & 0x1;
6647 if (hevc->interlace_flag == 0)
6648 hevc->curr_pic_struct = 0;
6649 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6650 /*
6651 *hevc->m_pocRandomAccess = MAX_INT;
6652 * //add to fix RAP_B_Bossen_1
6653 */
6654 /* } */
6655 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6656 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6657 hevc->slice_segment_addr =
6658 rpm_param->p.slice_segment_address;
6659 if (!rpm_param->p.dependent_slice_segment_flag)
6660 hevc->slice_addr = hevc->slice_segment_addr;
6661 } else {
6662 hevc->slice_segment_addr = 0;
6663 hevc->slice_addr = 0;
6664 }
6665
6666 hevc->iPrevPOC = hevc->curr_POC;
6667 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6668 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6669 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6670 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6671 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6672 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6673 hevc->isNextSliceSegment =
6674 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6675 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6676 || hevc->pic_h !=
6677 rpm_param->p.pic_height_in_luma_samples) {
6678 hevc_print(hevc, 0,
6679 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6680 hevc->pic_w, hevc->pic_h,
6681 rpm_param->p.pic_width_in_luma_samples,
6682 rpm_param->p.pic_height_in_luma_samples,
6683 hevc->interlace_flag);
6684
6685 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6686 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6687 hevc->frame_width = hevc->pic_w;
6688 hevc->frame_height = hevc->pic_h;
6689#ifdef LOSLESS_COMPRESS_MODE
6690 if (/*re_config_pic_flag == 0 &&*/
6691 (get_double_write_mode(hevc) & 0x10) == 0)
6692 init_decode_head_hw(hevc);
6693#endif
6694 }
6695
6696 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6697 hevc_print(hevc, 0, "over size : %u x %u.\n",
6698 hevc->pic_w, hevc->pic_h);
6699 if ((!hevc->m_ins_flag) &&
6700 ((debug &
6701 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6702 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6703 H265_DEBUG_DIS_SYS_ERROR_PROC);
6704 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6705 return 3;
6706 }
6707 if (hevc->bit_depth_chroma > 10 ||
6708 hevc->bit_depth_luma > 10) {
6709 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6710 hevc->bit_depth_chroma,
6711 hevc->bit_depth_luma);
6712 if (!hevc->m_ins_flag)
6713 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6714 H265_DEBUG_DIS_SYS_ERROR_PROC);
6715 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6716 return 4;
6717 }
6718
6719 /* it will cause divide 0 error */
6720 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6721 if (get_dbg_flag(hevc)) {
6722 hevc_print(hevc, 0,
6723 "Fatal Error, pic_w = %d, pic_h = %d\n",
6724 hevc->pic_w, hevc->pic_h);
6725 }
6726 return 3;
6727 }
6728 pic_list_process(hevc);
6729
6730 hevc->lcu_size =
6731 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6732 3 + rpm_param->
6733 p.log2_diff_max_min_coding_block_size);
6734 if (hevc->lcu_size == 0) {
6735 hevc_print(hevc, 0,
6736 "Error, lcu_size = 0 (%d,%d)\n",
6737 rpm_param->p.
6738 log2_min_coding_block_size_minus3,
6739 rpm_param->p.
6740 log2_diff_max_min_coding_block_size);
6741 return 3;
6742 }
6743 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6744 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6745 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6746 hevc->lcu_x_num =
6747 ((hevc->pic_w % hevc->lcu_size) ==
6748 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6749 hevc->lcu_y_num =
6750 ((hevc->pic_h % hevc->lcu_size) ==
6751 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6752 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6753
6754 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6755 || hevc->m_nalUnitType ==
6756 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6757 hevc->curr_POC = 0;
6758 if ((hevc->m_temporalId - 1) == 0)
6759 hevc->iPrevTid0POC = hevc->curr_POC;
6760 } else {
6761 int iMaxPOClsb =
6762 1 << (rpm_param->p.
6763 log2_max_pic_order_cnt_lsb_minus4 + 4);
6764 int iPrevPOClsb;
6765 int iPrevPOCmsb;
6766 int iPOCmsb;
6767 int iPOClsb = rpm_param->p.POClsb;
6768
6769 if (iMaxPOClsb == 0) {
6770 hevc_print(hevc, 0,
6771 "error iMaxPOClsb is 0\n");
6772 return 3;
6773 }
6774
6775 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6776 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6777
6778 if ((iPOClsb < iPrevPOClsb)
6779 && ((iPrevPOClsb - iPOClsb) >=
6780 (iMaxPOClsb / 2)))
6781 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6782 else if ((iPOClsb > iPrevPOClsb)
6783 && ((iPOClsb - iPrevPOClsb) >
6784 (iMaxPOClsb / 2)))
6785 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6786 else
6787 iPOCmsb = iPrevPOCmsb;
6788 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6789 hevc_print(hevc, 0,
6790 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6791 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6792 iPOClsb);
6793 }
6794 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6795 || hevc->m_nalUnitType ==
6796 NAL_UNIT_CODED_SLICE_BLANT
6797 || hevc->m_nalUnitType ==
6798 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6799 /* For BLA picture types, POCmsb is set to 0. */
6800 iPOCmsb = 0;
6801 }
6802 hevc->curr_POC = (iPOCmsb + iPOClsb);
6803 if ((hevc->m_temporalId - 1) == 0)
6804 hevc->iPrevTid0POC = hevc->curr_POC;
6805 else {
6806 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6807 hevc_print(hevc, 0,
6808 "m_temporalID is %d\n",
6809 hevc->m_temporalId);
6810 }
6811 }
6812 }
6813 hevc->RefNum_L0 =
6814 (rpm_param->p.num_ref_idx_l0_active >
6815 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6816 num_ref_idx_l0_active;
6817 hevc->RefNum_L1 =
6818 (rpm_param->p.num_ref_idx_l1_active >
6819 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6820 num_ref_idx_l1_active;
6821
6822 /* if(curr_POC==0x10) dump_lmem(); */
6823
6824 /* skip RASL pictures after CRA/BLA pictures */
6825 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6826 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6827 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6828 || hevc->m_nalUnitType ==
6829 NAL_UNIT_CODED_SLICE_BLANT
6830 || hevc->m_nalUnitType ==
6831 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6832 hevc->m_pocRandomAccess = hevc->curr_POC;
6833 else
6834 hevc->m_pocRandomAccess = -MAX_INT;
6835 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6836 || hevc->m_nalUnitType ==
6837 NAL_UNIT_CODED_SLICE_BLANT
6838 || hevc->m_nalUnitType ==
6839 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6840 hevc->m_pocRandomAccess = hevc->curr_POC;
6841 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6842 (nal_skip_policy >= 3) &&
6843 (hevc->m_nalUnitType ==
6844 NAL_UNIT_CODED_SLICE_RASL_N ||
6845 hevc->m_nalUnitType ==
6846 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6847 if (get_dbg_flag(hevc)) {
6848 hevc_print(hevc, 0,
6849 "RASL picture with POC %d < %d ",
6850 hevc->curr_POC, hevc->m_pocRandomAccess);
6851 hevc_print(hevc, 0,
6852 "RandomAccess point POC), skip it\n");
6853 }
6854 return 1;
6855 }
6856
6857 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6858 hevc->skip_flag = 0;
6859 /**/
6860 /* if((iPrevPOC != curr_POC)){ */
6861 if (rpm_param->p.slice_segment_address == 0) {
6862 struct PIC_s *pic;
6863
6864 hevc->new_pic = 1;
6865#ifdef MULTI_INSTANCE_SUPPORT
6866 if (!hevc->m_ins_flag)
6867#endif
6868 check_pic_decoded_error_pre(hevc,
6869 READ_VREG(HEVC_PARSER_LCU_START)
6870 & 0xffffff);
6871 /**/ if (use_cma == 0) {
6872 if (hevc->pic_list_init_flag == 0) {
6873 init_pic_list(hevc);
6874 init_pic_list_hw(hevc);
6875 init_buf_spec(hevc);
6876 hevc->pic_list_init_flag = 3;
6877 }
6878 }
6879 if (!hevc->m_ins_flag) {
6880 if (hevc->cur_pic)
6881 get_picture_qos_info(hevc);
6882 }
6883 hevc->first_pic_after_recover = 0;
6884 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6885 dump_pic_list(hevc);
6886 /* prev pic */
6887 hevc_pre_pic(hevc, pic);
6888 /*
6889 *update referenced of old pictures
6890 *(cur_pic->referenced is 1 and not updated)
6891 */
6892 apply_ref_pic_set(hevc, hevc->curr_POC,
6893 rpm_param);
6894
6895 if (hevc->mmu_enable)
6896 recycle_mmu_bufs(hevc);
6897
6898#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6899 if (vdec->master) {
6900 struct hevc_state_s *hevc_ba =
6901 (struct hevc_state_s *)
6902 vdec->master->private;
6903 if (hevc_ba->cur_pic != NULL) {
6904 hevc_ba->cur_pic->dv_enhance_exist = 1;
6905 hevc_print(hevc, H265_DEBUG_DV,
6906 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6907 hevc->curr_POC, hevc_ba->cur_pic->POC);
6908 }
6909 }
6910 if (vdec->master == NULL &&
6911 vdec->slave == NULL)
6912 set_aux_data(hevc,
6913 hevc->cur_pic, 1, 0); /*suffix*/
6914 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6915 set_aux_data(hevc,
6916 hevc->cur_pic, 0, 1); /*dv meta only*/
6917#else
6918 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6919#endif
6920 /* new pic */
6921 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6922 if (hevc->cur_pic == NULL) {
6923 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6924 dump_pic_list(hevc);
6925 hevc->wait_buf = 1;
6926 return -1;
6927 }
6928#ifdef MULTI_INSTANCE_SUPPORT
6929 hevc->decoding_pic = hevc->cur_pic;
6930 if (!hevc->m_ins_flag)
6931 hevc->over_decode = 0;
6932#endif
6933#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6934 hevc->cur_pic->dv_enhance_exist = 0;
6935 if (vdec->slave)
6936 hevc_print(hevc, H265_DEBUG_DV,
6937 "Clear bl (poc %d) dv_enhance_exist flag\n",
6938 hevc->curr_POC);
6939 if (vdec->master == NULL &&
6940 vdec->slave == NULL)
6941 set_aux_data(hevc,
6942 hevc->cur_pic, 0, 0); /*prefix*/
6943
6944 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6945 set_aux_data(hevc,
6946 hevc->cur_pic, 0, 2); /*pre sei only*/
6947#else
6948 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6949#endif
6950 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6951 hevc->cur_pic->output_ready = 1;
6952 hevc->cur_pic->stream_offset =
6953 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6954 prepare_display_buf(hevc, hevc->cur_pic);
6955 hevc->wait_buf = 2;
6956 return -1;
6957 }
6958 } else {
6959 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6960#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6961 if (vdec->master == NULL &&
6962 vdec->slave == NULL) {
6963 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6964 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6965 }
6966#else
6967 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6968 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6969#endif
6970 }
6971 if (hevc->pic_list_init_flag != 3
6972 || hevc->cur_pic == NULL) {
6973 /* make it dec from the first slice segment */
6974 return 3;
6975 }
6976 hevc->cur_pic->slice_idx++;
6977 hevc->new_pic = 0;
6978 }
6979 } else {
6980 if (hevc->wait_buf == 1) {
6981 pic_list_process(hevc);
6982 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6983 if (hevc->cur_pic == NULL)
6984 return -1;
6985
6986 if (!hevc->m_ins_flag)
6987 hevc->over_decode = 0;
6988
6989#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6990 hevc->cur_pic->dv_enhance_exist = 0;
6991 if (vdec->master == NULL &&
6992 vdec->slave == NULL)
6993 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6994#else
6995 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6996#endif
6997 hevc->wait_buf = 0;
6998 } else if (hevc->wait_buf ==
6999 2) {
7000 if (get_display_pic_num(hevc) >
7001 1)
7002 return -1;
7003 hevc->wait_buf = 0;
7004 }
7005 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7006 dump_pic_list(hevc);
7007 }
7008
7009 if (hevc->new_pic) {
7010#if 1
7011 /*SUPPORT_10BIT*/
7012 int sao_mem_unit =
7013 (hevc->lcu_size == 16 ? 9 :
7014 hevc->lcu_size ==
7015 32 ? 14 : 24) << 4;
7016#else
7017 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7018#endif
7019 int pic_height_cu =
7020 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7021 int pic_width_cu =
7022 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7023 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7024
7025 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7026 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7027 hevc_print(hevc, 0,
7028 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7029 __func__,
7030 hevc->decode_idx,
7031 hevc->curr_pic_struct,
7032 hevc->interlace_flag,
7033 hevc->cur_pic->index);
7034 }
7035 if (dbg_skip_decode_index != 0 &&
7036 hevc->decode_idx == dbg_skip_decode_index)
7037 dbg_skip_flag = 1;
7038
7039 hevc->decode_idx++;
7040 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7041 sao_mem_unit, rpm_param);
7042
7043 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7044 }
7045
7046 if (hevc->iPrevPOC != hevc->curr_POC) {
7047 hevc->new_tile = 1;
7048 hevc->tile_x = 0;
7049 hevc->tile_y = 0;
7050 hevc->tile_y_x = 0;
7051 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7052 hevc_print(hevc, 0,
7053 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7054 hevc->tile_x, hevc->tile_y);
7055 }
7056 } else if (hevc->tile_enabled) {
7057 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7058 hevc_print(hevc, 0,
7059 "slice_segment_address is %d\n",
7060 rpm_param->p.slice_segment_address);
7061 }
7062 hevc->tile_y_x =
7063 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7064 (hevc->pic_w +
7065 hevc->lcu_size -
7066 1) / hevc->lcu_size);
7067 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7068 && (hevc->tile_y_x != -1)) {
7069 hevc->new_tile = 1;
7070 hevc->tile_x = hevc->tile_y_x & 0xff;
7071 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7072 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7073 hevc_print(hevc, 0,
7074 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7075 rpm_param->p.slice_segment_address,
7076 hevc->tile_x, hevc->tile_y);
7077 }
7078 } else
7079 hevc->new_tile = 0;
7080 } else
7081 hevc->new_tile = 0;
7082
7083 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7084 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7085 hevc->new_tile = 0;
7086
7087 if (hevc->new_tile) {
7088 hevc->tile_start_lcu_x =
7089 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7090 hevc->tile_start_lcu_y =
7091 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7092 hevc->tile_width_lcu =
7093 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7094 hevc->tile_height_lcu =
7095 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7096 }
7097
7098 set_ref_pic_list(hevc, rpm_param);
7099
7100 Col_ref = rpm_param->p.collocated_ref_idx;
7101
7102 hevc->LDCFlag = 0;
7103 if (rpm_param->p.slice_type != I_SLICE) {
7104 hevc->LDCFlag = 1;
7105 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7106 if (hevc->cur_pic->
7107 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7108 hevc->curr_POC)
7109 hevc->LDCFlag = 0;
7110 }
7111 if (rpm_param->p.slice_type == B_SLICE) {
7112 for (i = 0; (i < hevc->RefNum_L1)
7113 && hevc->LDCFlag; i++) {
7114 if (hevc->cur_pic->
7115 m_aiRefPOCList1[hevc->cur_pic->
7116 slice_idx][i] >
7117 hevc->curr_POC)
7118 hevc->LDCFlag = 0;
7119 }
7120 }
7121 }
7122
7123 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7124
7125 hevc->plevel =
7126 rpm_param->p.log2_parallel_merge_level;
7127 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7128
7129 hevc->LongTerm_Curr = 0; /* to do ... */
7130 hevc->LongTerm_Col = 0; /* to do ... */
7131
7132 hevc->list_no = 0;
7133 if (rpm_param->p.slice_type == B_SLICE)
7134 hevc->list_no = 1 - hevc->ColFromL0Flag;
7135 if (hevc->list_no == 0) {
7136 if (Col_ref < hevc->RefNum_L0) {
7137 hevc->Col_POC =
7138 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7139 slice_idx][Col_ref];
7140 } else
7141 hevc->Col_POC = INVALID_POC;
7142 } else {
7143 if (Col_ref < hevc->RefNum_L1) {
7144 hevc->Col_POC =
7145 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7146 slice_idx][Col_ref];
7147 } else
7148 hevc->Col_POC = INVALID_POC;
7149 }
7150
7151 hevc->LongTerm_Ref = 0; /* to do ... */
7152
7153 if (hevc->slice_type != 2) {
7154 /* if(hevc->i_only==1){ */
7155 /* return 0xf; */
7156 /* } */
7157
7158 if (hevc->Col_POC != INVALID_POC) {
7159 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7160 if (hevc->col_pic == NULL) {
7161 hevc->cur_pic->error_mark = 1;
7162 if (get_dbg_flag(hevc)) {
7163 hevc_print(hevc, 0,
7164 "WRONG,fail to get the pic Col_POC\n");
7165 }
7166 if (is_log_enable(hevc))
7167 add_log(hevc,
7168 "WRONG,fail to get the pic Col_POC");
7169 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7170 hevc->cur_pic->error_mark = 1;
7171 if (get_dbg_flag(hevc)) {
7172 hevc_print(hevc, 0,
7173 "WRONG, Col_POC error_mark is 1\n");
7174 }
7175 if (is_log_enable(hevc))
7176 add_log(hevc,
7177 "WRONG, Col_POC error_mark is 1");
7178 } else {
7179 if ((hevc->col_pic->width
7180 != hevc->pic_w) ||
7181 (hevc->col_pic->height
7182 != hevc->pic_h)) {
7183 hevc_print(hevc, 0,
7184 "Wrong reference pic (poc %d) width/height %d/%d\n",
7185 hevc->col_pic->POC,
7186 hevc->col_pic->width,
7187 hevc->col_pic->height);
7188 hevc->cur_pic->error_mark = 1;
7189 }
7190
7191 }
7192
7193 if (hevc->cur_pic->error_mark
7194 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7195#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7196 /*count info*/
7197 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7198 hevc->cur_pic->stream_offset);
7199#endif
7200 }
7201
7202 if (is_skip_decoding(hevc,
7203 hevc->cur_pic)) {
7204 return 2;
7205 }
7206 } else
7207 hevc->col_pic = hevc->cur_pic;
7208 } /* */
7209 if (hevc->col_pic == NULL)
7210 hevc->col_pic = hevc->cur_pic;
7211#ifdef BUFFER_MGR_ONLY
7212 return 0xf;
7213#else
7214 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7215 || (dbg_skip_flag))
7216 return 0xf;
7217#endif
7218
7219 config_mc_buffer(hevc, hevc->cur_pic);
7220
7221 if (is_skip_decoding(hevc,
7222 hevc->cur_pic)) {
7223 if (get_dbg_flag(hevc))
7224 hevc_print(hevc, 0,
7225 "Discard this picture index %d\n",
7226 hevc->cur_pic->index);
7227#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7228 /*count info*/
7229 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7230 hevc->cur_pic->stream_offset);
7231#endif
7232 return 2;
7233 }
7234#ifdef MCRCC_ENABLE
7235 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7236#endif
7237 config_mpred_hw(hevc);
7238
7239 config_sao_hw(hevc, rpm_param);
7240
7241 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7242 return 0xf;
7243
7244 return 0;
7245}
7246
7247
7248
7249static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7250 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7251 int cur_buf_idx = new_pic->index;
7252 int bit_depth_10 = (bit_depth != 0x00);
7253 int picture_size;
7254 int cur_mmu_4k_number;
7255 int ret, max_frame_num;
7256 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7257 new_pic->height, !bit_depth_10);
7258 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7259 if (hevc->double_write_mode & 0x10)
7260 return 0;
7261 /*hevc_print(hevc, 0,
7262 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7263 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7264 if (new_pic->scatter_alloc) {
7265 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7266 new_pic->scatter_alloc = 0;
7267 }
7268 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7269 max_frame_num = MAX_FRAME_8K_NUM;
7270 else
7271 max_frame_num = MAX_FRAME_4K_NUM;
7272 if (cur_mmu_4k_number > max_frame_num) {
7273 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7274 cur_mmu_4k_number,
7275 new_pic->width,
7276 new_pic->height);
7277 return -1;
7278 }
7279 ret = decoder_mmu_box_alloc_idx(
7280 hevc->mmu_box,
7281 cur_buf_idx,
7282 cur_mmu_4k_number,
7283 mmu_index_adr);
7284 if (ret == 0)
7285 new_pic->scatter_alloc = 1;
7286
7287 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7288 "%s pic index %d page count(%d) ret =%d\n",
7289 __func__, cur_buf_idx,
7290 cur_mmu_4k_number, ret);
7291 return ret;
7292}
7293
7294
7295static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7296 struct PIC_s *pic)
7297{
7298 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7299 "%s pic index %d scatter_alloc %d\n",
7300 __func__, pic->index,
7301 pic->scatter_alloc);
7302
7303 if (hevc->mmu_enable
7304 && ((hevc->double_write_mode & 0x10) == 0)
7305 && pic->scatter_alloc)
7306 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7307 pic->scatter_alloc = 0;
7308}
7309
7310/*
7311 *************************************************
7312 *
7313 *h265 buffer management end
7314 *
7315 **************************************************
7316 */
7317static struct hevc_state_s *gHevc;
7318
7319static void hevc_local_uninit(struct hevc_state_s *hevc)
7320{
7321 hevc->rpm_ptr = NULL;
7322 hevc->lmem_ptr = NULL;
7323
7324#ifdef SWAP_HEVC_UCODE
7325 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7326 if (hevc->mc_cpu_addr != NULL) {
7327 dma_free_coherent(amports_get_dma_device(),
7328 hevc->swap_size, hevc->mc_cpu_addr,
7329 hevc->mc_dma_handle);
7330 hevc->mc_cpu_addr = NULL;
7331 }
7332
7333 }
7334#endif
7335#ifdef DETREFILL_ENABLE
7336 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7337 uninit_detrefill_buf(hevc);
7338#endif
7339 if (hevc->aux_addr) {
7340 dma_free_coherent(amports_get_dma_device(),
7341 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7342 hevc->aux_phy_addr);
7343 hevc->aux_addr = NULL;
7344 }
7345 if (hevc->rpm_addr) {
7346 dma_free_coherent(amports_get_dma_device(),
7347 RPM_BUF_SIZE, hevc->rpm_addr,
7348 hevc->rpm_phy_addr);
7349 hevc->rpm_addr = NULL;
7350 }
7351 if (hevc->lmem_addr) {
7352 dma_free_coherent(amports_get_dma_device(),
7353 RPM_BUF_SIZE, hevc->lmem_addr,
7354 hevc->lmem_phy_addr);
7355 hevc->lmem_addr = NULL;
7356 }
7357
7358 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7359 if (hevc->frame_mmu_map_phy_addr)
7360 dma_free_coherent(amports_get_dma_device(),
7361 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7362 hevc->frame_mmu_map_phy_addr);
7363
7364 hevc->frame_mmu_map_addr = NULL;
7365 }
7366
7367 kfree(gvs);
7368 gvs = NULL;
7369}
7370
7371static int hevc_local_init(struct hevc_state_s *hevc)
7372{
7373 int ret = -1;
7374 struct BuffInfo_s *cur_buf_info = NULL;
7375
7376 memset(&hevc->param, 0, sizeof(union param_u));
7377
7378 cur_buf_info = &hevc->work_space_buf_store;
7379
7380 if (vdec_is_support_4k()) {
7381 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7382 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7383 sizeof(struct BuffInfo_s));
7384 else
7385 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7386 sizeof(struct BuffInfo_s));
7387 } else
7388 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7389 sizeof(struct BuffInfo_s));
7390
7391 cur_buf_info->start_adr = hevc->buf_start;
7392 init_buff_spec(hevc, cur_buf_info);
7393
7394 hevc_init_stru(hevc, cur_buf_info);
7395
7396 hevc->bit_depth_luma = 8;
7397 hevc->bit_depth_chroma = 8;
7398 hevc->video_signal_type = 0;
7399 hevc->video_signal_type_debug = 0;
7400 bit_depth_luma = hevc->bit_depth_luma;
7401 bit_depth_chroma = hevc->bit_depth_chroma;
7402 video_signal_type = hevc->video_signal_type;
7403
7404 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7405 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7406 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7407 if (hevc->rpm_addr == NULL) {
7408 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7409 return -1;
7410 }
7411 hevc->rpm_ptr = hevc->rpm_addr;
7412 }
7413
7414 if (prefix_aux_buf_size > 0 ||
7415 suffix_aux_buf_size > 0) {
7416 u32 aux_buf_size;
7417
7418 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7419 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7420 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7421 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7422 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7423 if (hevc->aux_addr == NULL) {
7424 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7425 return -1;
7426 }
7427 }
7428
7429 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7430 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7431 if (hevc->lmem_addr == NULL) {
7432 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7433 return -1;
7434 }
7435 hevc->lmem_ptr = hevc->lmem_addr;
7436
7437 if (hevc->mmu_enable) {
7438 hevc->frame_mmu_map_addr =
7439 dma_alloc_coherent(amports_get_dma_device(),
7440 get_frame_mmu_map_size(),
7441 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7442 if (hevc->frame_mmu_map_addr == NULL) {
7443 pr_err("%s: failed to alloc count_buffer\n", __func__);
7444 return -1;
7445 }
7446 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7447 }
7448 ret = 0;
7449 return ret;
7450}
7451
7452/*
7453 *******************************************
7454 * Mailbox command
7455 *******************************************
7456 */
7457#define CMD_FINISHED 0
7458#define CMD_ALLOC_VIEW 1
7459#define CMD_FRAME_DISPLAY 3
7460#define CMD_DEBUG 10
7461
7462
7463#define DECODE_BUFFER_NUM_MAX 32
7464#define DISPLAY_BUFFER_NUM 6
7465
7466#define video_domain_addr(adr) (adr&0x7fffffff)
7467#define DECODER_WORK_SPACE_SIZE 0x800000
7468
7469#define spec2canvas(x) \
7470 (((x)->uv_canvas_index << 16) | \
7471 ((x)->uv_canvas_index << 8) | \
7472 ((x)->y_canvas_index << 0))
7473
7474
7475static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7476{
7477 struct vdec_s *vdec = hw_to_vdec(hevc);
7478 int canvas_w = ALIGN(pic->width, 64)/4;
7479 int canvas_h = ALIGN(pic->height, 32)/4;
7480 int blkmode = hevc->mem_map_mode;
7481
7482 /*CANVAS_BLKMODE_64X32*/
7483#ifdef SUPPORT_10BIT
7484 if (pic->double_write_mode) {
7485 canvas_w = pic->width /
7486 get_double_write_ratio(hevc, pic->double_write_mode);
7487 canvas_h = pic->height /
7488 get_double_write_ratio(hevc, pic->double_write_mode);
7489
7490 if (hevc->mem_map_mode == 0)
7491 canvas_w = ALIGN(canvas_w, 32);
7492 else
7493 canvas_w = ALIGN(canvas_w, 64);
7494 canvas_h = ALIGN(canvas_h, 32);
7495
7496 if (vdec->parallel_dec == 1) {
7497 if (pic->y_canvas_index == -1)
7498 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7499 if (pic->uv_canvas_index == -1)
7500 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7501 } else {
7502 pic->y_canvas_index = 128 + pic->index * 2;
7503 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7504 }
7505
7506 canvas_config_ex(pic->y_canvas_index,
7507 pic->dw_y_adr, canvas_w, canvas_h,
7508 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7509 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7510 canvas_w, canvas_h,
7511 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7512#ifdef MULTI_INSTANCE_SUPPORT
7513 pic->canvas_config[0].phy_addr =
7514 pic->dw_y_adr;
7515 pic->canvas_config[0].width =
7516 canvas_w;
7517 pic->canvas_config[0].height =
7518 canvas_h;
7519 pic->canvas_config[0].block_mode =
7520 blkmode;
7521 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7522
7523 pic->canvas_config[1].phy_addr =
7524 pic->dw_u_v_adr;
7525 pic->canvas_config[1].width =
7526 canvas_w;
7527 pic->canvas_config[1].height =
7528 canvas_h;
7529 pic->canvas_config[1].block_mode =
7530 blkmode;
7531 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7532#endif
7533 } else {
7534 if (!hevc->mmu_enable) {
7535 /* to change after 10bit VPU is ready ... */
7536 if (vdec->parallel_dec == 1) {
7537 if (pic->y_canvas_index == -1)
7538 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7539 pic->uv_canvas_index = pic->y_canvas_index;
7540 } else {
7541 pic->y_canvas_index = 128 + pic->index;
7542 pic->uv_canvas_index = 128 + pic->index;
7543 }
7544
7545 canvas_config_ex(pic->y_canvas_index,
7546 pic->mc_y_adr, canvas_w, canvas_h,
7547 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7548 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7549 canvas_w, canvas_h,
7550 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7551 }
7552 }
7553#else
7554 if (vdec->parallel_dec == 1) {
7555 if (pic->y_canvas_index == -1)
7556 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7557 if (pic->uv_canvas_index == -1)
7558 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7559 } else {
7560 pic->y_canvas_index = 128 + pic->index * 2;
7561 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7562 }
7563
7564
7565 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7566 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7567 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7568 canvas_w, canvas_h,
7569 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7570#endif
7571}
7572
7573static int init_buf_spec(struct hevc_state_s *hevc)
7574{
7575 int pic_width = hevc->pic_w;
7576 int pic_height = hevc->pic_h;
7577
7578 /* hevc_print(hevc, 0,
7579 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7580 */
7581 hevc_print(hevc, 0,
7582 "%s2 %d %d\n", __func__, pic_width, pic_height);
7583 /* pic_width = hevc->pic_w; */
7584 /* pic_height = hevc->pic_h; */
7585
7586 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7587 hevc->frame_width = pic_width;
7588 hevc->frame_height = pic_height;
7589
7590 }
7591
7592 return 0;
7593}
7594
7595static int parse_sei(struct hevc_state_s *hevc,
7596 struct PIC_s *pic, char *sei_buf, uint32_t size)
7597{
7598 char *p = sei_buf;
7599 char *p_sei;
7600 uint16_t header;
7601 uint8_t nal_unit_type;
7602 uint8_t payload_type, payload_size;
7603 int i, j;
7604
7605 if (size < 2)
7606 return 0;
7607 header = *p++;
7608 header <<= 8;
7609 header += *p++;
7610 nal_unit_type = header >> 9;
7611 if ((nal_unit_type != NAL_UNIT_SEI)
7612 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7613 return 0;
7614 while (p+2 <= sei_buf+size) {
7615 payload_type = *p++;
7616 payload_size = *p++;
7617 if (p+payload_size <= sei_buf+size) {
7618 switch (payload_type) {
7619 case SEI_PicTiming:
7620 if ((parser_sei_enable & 0x4) &&
7621 hevc->frame_field_info_present_flag) {
7622 p_sei = p;
7623 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7624 pic->pic_struct = hevc->curr_pic_struct;
7625 if (get_dbg_flag(hevc) &
7626 H265_DEBUG_PIC_STRUCT) {
7627 hevc_print(hevc, 0,
7628 "parse result pic_struct = %d\n",
7629 hevc->curr_pic_struct);
7630 }
7631 }
7632 break;
7633 case SEI_UserDataITU_T_T35:
7634 p_sei = p;
7635 if (p_sei[0] == 0xB5
7636 && p_sei[1] == 0x00
7637 && p_sei[2] == 0x3C
7638 && p_sei[3] == 0x00
7639 && p_sei[4] == 0x01
7640 && p_sei[5] == 0x04)
7641 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7642
7643 break;
7644 case SEI_MasteringDisplayColorVolume:
7645 /*hevc_print(hevc, 0,
7646 "sei type: primary display color volume %d, size %d\n",
7647 payload_type,
7648 payload_size);*/
7649 /* master_display_colour */
7650 p_sei = p;
7651 for (i = 0; i < 3; i++) {
7652 for (j = 0; j < 2; j++) {
7653 hevc->primaries[i][j]
7654 = (*p_sei<<8)
7655 | *(p_sei+1);
7656 p_sei += 2;
7657 }
7658 }
7659 for (i = 0; i < 2; i++) {
7660 hevc->white_point[i]
7661 = (*p_sei<<8)
7662 | *(p_sei+1);
7663 p_sei += 2;
7664 }
7665 for (i = 0; i < 2; i++) {
7666 hevc->luminance[i]
7667 = (*p_sei<<24)
7668 | (*(p_sei+1)<<16)
7669 | (*(p_sei+2)<<8)
7670 | *(p_sei+3);
7671 p_sei += 4;
7672 }
7673 hevc->sei_present_flag |=
7674 SEI_MASTER_DISPLAY_COLOR_MASK;
7675 /*for (i = 0; i < 3; i++)
7676 for (j = 0; j < 2; j++)
7677 hevc_print(hevc, 0,
7678 "\tprimaries[%1d][%1d] = %04x\n",
7679 i, j,
7680 hevc->primaries[i][j]);
7681 hevc_print(hevc, 0,
7682 "\twhite_point = (%04x, %04x)\n",
7683 hevc->white_point[0],
7684 hevc->white_point[1]);
7685 hevc_print(hevc, 0,
7686 "\tmax,min luminance = %08x, %08x\n",
7687 hevc->luminance[0],
7688 hevc->luminance[1]);*/
7689 break;
7690 case SEI_ContentLightLevel:
7691 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7692 hevc_print(hevc, 0,
7693 "sei type: max content light level %d, size %d\n",
7694 payload_type, payload_size);
7695 /* content_light_level */
7696 p_sei = p;
7697 hevc->content_light_level[0]
7698 = (*p_sei<<8) | *(p_sei+1);
7699 p_sei += 2;
7700 hevc->content_light_level[1]
7701 = (*p_sei<<8) | *(p_sei+1);
7702 p_sei += 2;
7703 hevc->sei_present_flag |=
7704 SEI_CONTENT_LIGHT_LEVEL_MASK;
7705 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7706 hevc_print(hevc, 0,
7707 "\tmax cll = %04x, max_pa_cll = %04x\n",
7708 hevc->content_light_level[0],
7709 hevc->content_light_level[1]);
7710 break;
7711 default:
7712 break;
7713 }
7714 }
7715 p += payload_size;
7716 }
7717 return 0;
7718}
7719
7720static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7721 unsigned w, unsigned h)
7722{
7723 unsigned ar;
7724
7725 if (idc == 255) {
7726 ar = div_u64(256ULL * sar_h * h,
7727 sar_w * w);
7728 } else {
7729 switch (idc) {
7730 case 1:
7731 ar = 0x100 * h / w;
7732 break;
7733 case 2:
7734 ar = 0x100 * h * 11 / (w * 12);
7735 break;
7736 case 3:
7737 ar = 0x100 * h * 11 / (w * 10);
7738 break;
7739 case 4:
7740 ar = 0x100 * h * 11 / (w * 16);
7741 break;
7742 case 5:
7743 ar = 0x100 * h * 33 / (w * 40);
7744 break;
7745 case 6:
7746 ar = 0x100 * h * 11 / (w * 24);
7747 break;
7748 case 7:
7749 ar = 0x100 * h * 11 / (w * 20);
7750 break;
7751 case 8:
7752 ar = 0x100 * h * 11 / (w * 32);
7753 break;
7754 case 9:
7755 ar = 0x100 * h * 33 / (w * 80);
7756 break;
7757 case 10:
7758 ar = 0x100 * h * 11 / (w * 18);
7759 break;
7760 case 11:
7761 ar = 0x100 * h * 11 / (w * 15);
7762 break;
7763 case 12:
7764 ar = 0x100 * h * 33 / (w * 64);
7765 break;
7766 case 13:
7767 ar = 0x100 * h * 99 / (w * 160);
7768 break;
7769 case 14:
7770 ar = 0x100 * h * 3 / (w * 4);
7771 break;
7772 case 15:
7773 ar = 0x100 * h * 2 / (w * 3);
7774 break;
7775 case 16:
7776 ar = 0x100 * h * 1 / (w * 2);
7777 break;
7778 default:
7779 ar = h * 0x100 / w;
7780 break;
7781 }
7782 }
7783
7784 return ar;
7785}
7786
7787static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7788 struct PIC_s *pic)
7789{
7790 unsigned int ar;
7791 int i, j;
7792 char *p;
7793 unsigned size = 0;
7794 unsigned type = 0;
7795 struct vframe_master_display_colour_s *vf_dp
7796 = &vf->prop.master_display_colour;
7797
7798 vf->width = pic->width /
7799 get_double_write_ratio(hevc, pic->double_write_mode);
7800 vf->height = pic->height /
7801 get_double_write_ratio(hevc, pic->double_write_mode);
7802
7803 vf->duration = hevc->frame_dur;
7804 vf->duration_pulldown = 0;
7805 vf->flag = 0;
7806
7807 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7808 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7809
7810
7811 if (((pic->aspect_ratio_idc == 255) &&
7812 pic->sar_width &&
7813 pic->sar_height) ||
7814 ((pic->aspect_ratio_idc != 255) &&
7815 (pic->width))) {
7816 ar = min_t(u32,
7817 calc_ar(pic->aspect_ratio_idc,
7818 pic->sar_width,
7819 pic->sar_height,
7820 pic->width,
7821 pic->height),
7822 DISP_RATIO_ASPECT_RATIO_MAX);
7823 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7824 }
7825 hevc->ratio_control = vf->ratio_control;
7826 if (pic->aux_data_buf
7827 && pic->aux_data_size) {
7828 /* parser sei */
7829 p = pic->aux_data_buf;
7830 while (p < pic->aux_data_buf
7831 + pic->aux_data_size - 8) {
7832 size = *p++;
7833 size = (size << 8) | *p++;
7834 size = (size << 8) | *p++;
7835 size = (size << 8) | *p++;
7836 type = *p++;
7837 type = (type << 8) | *p++;
7838 type = (type << 8) | *p++;
7839 type = (type << 8) | *p++;
7840 if (type == 0x02000000) {
7841 /* hevc_print(hevc, 0,
7842 "sei(%d)\n", size); */
7843 parse_sei(hevc, pic, p, size);
7844 }
7845 p += size;
7846 }
7847 }
7848 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7849 vf->signal_type = pic->video_signal_type;
7850 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7851 u32 data;
7852 data = vf->signal_type;
7853 data = data & 0xFFFF00FF;
7854 data = data | (0x30<<8);
7855 vf->signal_type = data;
7856 }
7857 }
7858 else
7859 vf->signal_type = 0;
7860 hevc->video_signal_type_debug = vf->signal_type;
7861
7862 /* master_display_colour */
7863 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7864 for (i = 0; i < 3; i++)
7865 for (j = 0; j < 2; j++)
7866 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7867 for (i = 0; i < 2; i++) {
7868 vf_dp->white_point[i] = hevc->white_point[i];
7869 vf_dp->luminance[i]
7870 = hevc->luminance[i];
7871 }
7872 vf_dp->present_flag = 1;
7873 } else
7874 vf_dp->present_flag = 0;
7875
7876 /* content_light_level */
7877 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7878 vf_dp->content_light_level.max_content
7879 = hevc->content_light_level[0];
7880 vf_dp->content_light_level.max_pic_average
7881 = hevc->content_light_level[1];
7882 vf_dp->content_light_level.present_flag = 1;
7883 } else
7884 vf_dp->content_light_level.present_flag = 0;
7885
7886 if (hevc->is_used_v4l &&
7887 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7888 (vf_dp->present_flag) ||
7889 (vf_dp->content_light_level.present_flag))) {
7890 struct aml_vdec_hdr_infos hdr;
7891 struct aml_vcodec_ctx *ctx =
7892 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
7893
7894 memset(&hdr, 0, sizeof(hdr));
7895 hdr.signal_type = vf->signal_type;
7896 hdr.color_parms = *vf_dp;
7897 vdec_v4l_set_hdr_infos(ctx, &hdr);
7898 }
7899}
7900
7901static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7902{
7903 unsigned long flags;
7904#ifdef MULTI_INSTANCE_SUPPORT
7905 struct vdec_s *vdec = op_arg;
7906 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7907#else
7908 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7909#endif
7910
7911 spin_lock_irqsave(&lock, flags);
7912
7913 states->vf_pool_size = VF_POOL_SIZE;
7914 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7915 states->buf_avail_num = kfifo_len(&hevc->display_q);
7916
7917 if (step == 2)
7918 states->buf_avail_num = 0;
7919 spin_unlock_irqrestore(&lock, flags);
7920 return 0;
7921}
7922
7923static struct vframe_s *vh265_vf_peek(void *op_arg)
7924{
7925 struct vframe_s *vf[2] = {0, 0};
7926#ifdef MULTI_INSTANCE_SUPPORT
7927 struct vdec_s *vdec = op_arg;
7928 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7929#else
7930 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7931#endif
7932
7933 if (step == 2)
7934 return NULL;
7935
7936 if (force_disp_pic_index & 0x100) {
7937 if (force_disp_pic_index & 0x200)
7938 return NULL;
7939 return &hevc->vframe_dummy;
7940 }
7941
7942
7943 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7944 if (vf[1]) {
7945 vf[0]->next_vf_pts_valid = true;
7946 vf[0]->next_vf_pts = vf[1]->pts;
7947 } else
7948 vf[0]->next_vf_pts_valid = false;
7949 return vf[0];
7950 }
7951
7952 return NULL;
7953}
7954
7955static struct vframe_s *vh265_vf_get(void *op_arg)
7956{
7957 struct vframe_s *vf;
7958#ifdef MULTI_INSTANCE_SUPPORT
7959 struct vdec_s *vdec = op_arg;
7960 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7961#else
7962 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7963#endif
7964
7965 if (step == 2)
7966 return NULL;
7967 else if (step == 1)
7968 step = 2;
7969
7970#if 0
7971 if (force_disp_pic_index & 0x100) {
7972 int buffer_index = force_disp_pic_index & 0xff;
7973 struct PIC_s *pic = NULL;
7974 if (buffer_index >= 0
7975 && buffer_index < MAX_REF_PIC_NUM)
7976 pic = hevc->m_PIC[buffer_index];
7977 if (pic == NULL)
7978 return NULL;
7979 if (force_disp_pic_index & 0x200)
7980 return NULL;
7981
7982 vf = &hevc->vframe_dummy;
7983 if (get_double_write_mode(hevc)) {
7984 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7985 VIDTYPE_VIU_NV21;
7986 if (hevc->m_ins_flag) {
7987 vf->canvas0Addr = vf->canvas1Addr = -1;
7988 vf->plane_num = 2;
7989 vf->canvas0_config[0] =
7990 pic->canvas_config[0];
7991 vf->canvas0_config[1] =
7992 pic->canvas_config[1];
7993
7994 vf->canvas1_config[0] =
7995 pic->canvas_config[0];
7996 vf->canvas1_config[1] =
7997 pic->canvas_config[1];
7998 } else {
7999 vf->canvas0Addr = vf->canvas1Addr
8000 = spec2canvas(pic);
8001 }
8002 } else {
8003 vf->canvas0Addr = vf->canvas1Addr = 0;
8004 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8005 if (hevc->mmu_enable)
8006 vf->type |= VIDTYPE_SCATTER;
8007 }
8008 vf->compWidth = pic->width;
8009 vf->compHeight = pic->height;
8010 update_vf_memhandle(hevc, vf, pic);
8011 switch (hevc->bit_depth_luma) {
8012 case 9:
8013 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8014 break;
8015 case 10:
8016 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8017 | BITDEPTH_V10;
8018 break;
8019 default:
8020 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8021 break;
8022 }
8023 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8024 vf->bitdepth =
8025 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8026 if (hevc->mem_saving_mode == 1)
8027 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8028 vf->duration_pulldown = 0;
8029 vf->pts = 0;
8030 vf->pts_us64 = 0;
8031 set_frame_info(hevc, vf);
8032
8033 vf->width = pic->width /
8034 get_double_write_ratio(hevc, pic->double_write_mode);
8035 vf->height = pic->height /
8036 get_double_write_ratio(hevc, pic->double_write_mode);
8037
8038 force_disp_pic_index |= 0x200;
8039 return vf;
8040 }
8041#endif
8042
8043 if (kfifo_get(&hevc->display_q, &vf)) {
8044 struct vframe_s *next_vf;
8045 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8046 hevc_print(hevc, 0,
8047 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8048 __func__, vf, vf->type, vf->index,
8049 get_pic_poc(hevc, vf->index & 0xff),
8050 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8051 vf->pts, vf->pts_us64,
8052 vf->duration);
8053#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8054 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8055 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8056 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8057 int i;
8058 struct PIC_s *pic =
8059 hevc->m_PIC[vf->index & 0xff];
8060 hevc_print(hevc, 0,
8061 "pic 0x%p aux size %d:\n",
8062 pic, pic->aux_data_size);
8063 for (i = 0; i < pic->aux_data_size; i++) {
8064 hevc_print_cont(hevc, 0,
8065 "%02x ", pic->aux_data_buf[i]);
8066 if (((i + 1) & 0xf) == 0)
8067 hevc_print_cont(hevc, 0, "\n");
8068 }
8069 hevc_print_cont(hevc, 0, "\n");
8070 }
8071 }
8072#endif
8073 hevc->show_frame_num++;
8074 hevc->vf_get_count++;
8075
8076 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8077 vf->next_vf_pts_valid = true;
8078 vf->next_vf_pts = next_vf->pts;
8079 } else
8080 vf->next_vf_pts_valid = false;
8081
8082 return vf;
8083 }
8084
8085 return NULL;
8086}
8087static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8088 int i;
8089 for (i = 0; i < VF_POOL_SIZE; i++) {
8090 if (vf == &hevc->vfpool[i])
8091 return true;
8092 }
8093 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8094 for (i = 0; i < VF_POOL_SIZE; i++) {
8095 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8096 }
8097 return false;
8098}
8099
8100static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8101{
8102 unsigned long flags;
8103#ifdef MULTI_INSTANCE_SUPPORT
8104 struct vdec_s *vdec = op_arg;
8105 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8106#else
8107 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8108#endif
8109 unsigned char index_top;
8110 unsigned char index_bot;
8111
8112 if (vf && (vf_valid_check(vf, hevc) == false))
8113 return;
8114 if (vf == (&hevc->vframe_dummy))
8115 return;
8116 index_top = vf->index & 0xff;
8117 index_bot = (vf->index >> 8) & 0xff;
8118 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8119 hevc_print(hevc, 0,
8120 "%s(type %d index 0x%x)\n",
8121 __func__, vf->type, vf->index);
8122 hevc->vf_put_count++;
8123 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8124 spin_lock_irqsave(&lock, flags);
8125
8126 if (index_top != 0xff
8127 && index_top < MAX_REF_PIC_NUM
8128 && hevc->m_PIC[index_top]) {
8129 if (hevc->is_used_v4l)
8130 hevc->m_PIC[index_top]->vframe_bound = true;
8131 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8132 hevc->m_PIC[index_top]->vf_ref--;
8133
8134 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8135 hevc->m_PIC[index_top]->output_ready = 0;
8136
8137 if (hevc->wait_buf != 0)
8138 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8139 0x1);
8140 }
8141 }
8142 }
8143
8144 if (index_bot != 0xff
8145 && index_bot < MAX_REF_PIC_NUM
8146 && hevc->m_PIC[index_bot]) {
8147 if (hevc->is_used_v4l)
8148 hevc->m_PIC[index_bot]->vframe_bound = true;
8149 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8150 hevc->m_PIC[index_bot]->vf_ref--;
8151
8152 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8153 hevc->m_PIC[index_bot]->output_ready = 0;
8154 if (hevc->wait_buf != 0)
8155 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8156 0x1);
8157 }
8158 }
8159 }
8160 spin_unlock_irqrestore(&lock, flags);
8161}
8162
8163static int vh265_event_cb(int type, void *data, void *op_arg)
8164{
8165 unsigned long flags;
8166#ifdef MULTI_INSTANCE_SUPPORT
8167 struct vdec_s *vdec = op_arg;
8168 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8169#else
8170 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8171#endif
8172 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8173#if 0
8174 amhevc_stop();
8175#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8176 vf_light_unreg_provider(&vh265_vf_prov);
8177#endif
8178 spin_lock_irqsave(&hevc->lock, flags);
8179 vh265_local_init();
8180 vh265_prot_init();
8181 spin_unlock_irqrestore(&hevc->lock, flags);
8182#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8183 vf_reg_provider(&vh265_vf_prov);
8184#endif
8185 amhevc_start();
8186#endif
8187 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8188 struct provider_aux_req_s *req =
8189 (struct provider_aux_req_s *)data;
8190 unsigned char index;
8191
8192 if (!req->vf) {
8193 req->aux_size = hevc->vf_put_count;
8194 return 0;
8195 }
8196 spin_lock_irqsave(&lock, flags);
8197 index = req->vf->index & 0xff;
8198 req->aux_buf = NULL;
8199 req->aux_size = 0;
8200 if (req->bot_flag)
8201 index = (req->vf->index >> 8) & 0xff;
8202 if (index != 0xff
8203 && index < MAX_REF_PIC_NUM
8204 && hevc->m_PIC[index]) {
8205 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8206 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8207#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8208 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8209 req->dv_enhance_exist = false;
8210 else
8211 req->dv_enhance_exist =
8212 hevc->m_PIC[index]->dv_enhance_exist;
8213 hevc_print(hevc, H265_DEBUG_DV,
8214 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8215 req->vf,
8216 hevc->m_PIC[index]->POC, index,
8217 req->dv_enhance_exist, req->aux_size);
8218#else
8219 req->dv_enhance_exist = 0;
8220#endif
8221 }
8222 spin_unlock_irqrestore(&lock, flags);
8223
8224 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8225 hevc_print(hevc, 0,
8226 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8227 __func__, type, index, req->aux_size);
8228#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8229 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8230 if ((force_bypass_dvenl & 0x80000000) == 0) {
8231 hevc_print(hevc, 0,
8232 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8233 __func__);
8234 hevc->bypass_dvenl_enable = 1;
8235 }
8236
8237#endif
8238 }
8239 return 0;
8240}
8241
8242#ifdef HEVC_PIC_STRUCT_SUPPORT
8243static int process_pending_vframe(struct hevc_state_s *hevc,
8244 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8245{
8246 struct vframe_s *vf;
8247
8248 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8249 hevc_print(hevc, 0,
8250 "%s: pair_pic index 0x%x %s\n",
8251 __func__, pair_pic->index,
8252 pair_frame_top_flag ?
8253 "top" : "bot");
8254
8255 if (kfifo_len(&hevc->pending_q) > 1) {
8256 unsigned long flags;
8257 /* do not pending more than 1 frame */
8258 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8259 hevc_print(hevc, 0,
8260 "fatal error, no available buffer slot.");
8261 return -1;
8262 }
8263 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8264 hevc_print(hevc, 0,
8265 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8266 __func__, vf->index);
8267 if ((hevc->double_write_mode == 3) &&
8268 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8269 vf->type |= VIDTYPE_COMPRESS;
8270 if (hevc->mmu_enable)
8271 vf->type |= VIDTYPE_SCATTER;
8272 }
8273 hevc->vf_pre_count++;
8274 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8275 spin_lock_irqsave(&lock, flags);
8276 vf->index &= 0xff;
8277 hevc->m_PIC[vf->index]->output_ready = 0;
8278 if (hevc->wait_buf != 0)
8279 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8280 0x1);
8281 spin_unlock_irqrestore(&lock, flags);
8282
8283 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8284 }
8285
8286 if (kfifo_peek(&hevc->pending_q, &vf)) {
8287 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8288 /*
8289 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8290 *do not use it
8291 */
8292 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8293 hevc_print(hevc, 0,
8294 "fatal error, no available buffer slot.");
8295 return -1;
8296 }
8297 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8298 hevc_print(hevc, 0,
8299 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8300 __func__, vf->index);
8301 if (vf) {
8302 if ((hevc->double_write_mode == 3) &&
8303 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8304 vf->type |= VIDTYPE_COMPRESS;
8305 if (hevc->mmu_enable)
8306 vf->type |= VIDTYPE_SCATTER;
8307 }
8308 hevc->vf_pre_count++;
8309 kfifo_put(&hevc->display_q,
8310 (const struct vframe_s *)vf);
8311 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8312 }
8313 } else if ((!pair_frame_top_flag) &&
8314 (((vf->index >> 8) & 0xff) == 0xff)) {
8315 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8316 hevc_print(hevc, 0,
8317 "fatal error, no available buffer slot.");
8318 return -1;
8319 }
8320 if (vf) {
8321 if ((hevc->double_write_mode == 3) &&
8322 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8323 vf->type |= VIDTYPE_COMPRESS;
8324 if (hevc->mmu_enable)
8325 vf->type |= VIDTYPE_SCATTER;
8326 }
8327 vf->index &= 0xff;
8328 vf->index |= (pair_pic->index << 8);
8329 vf->canvas1Addr = spec2canvas(pair_pic);
8330 pair_pic->vf_ref++;
8331 kfifo_put(&hevc->display_q,
8332 (const struct vframe_s *)vf);
8333 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8334 hevc->vf_pre_count++;
8335 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8336 hevc_print(hevc, 0,
8337 "%s vf => display_q: (index 0x%x)\n",
8338 __func__, vf->index);
8339 }
8340 } else if (pair_frame_top_flag &&
8341 ((vf->index & 0xff) == 0xff)) {
8342 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8343 hevc_print(hevc, 0,
8344 "fatal error, no available buffer slot.");
8345 return -1;
8346 }
8347 if (vf) {
8348 if ((hevc->double_write_mode == 3) &&
8349 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8350 vf->type |= VIDTYPE_COMPRESS;
8351 if (hevc->mmu_enable)
8352 vf->type |= VIDTYPE_SCATTER;
8353 }
8354 vf->index &= 0xff00;
8355 vf->index |= pair_pic->index;
8356 vf->canvas0Addr = spec2canvas(pair_pic);
8357 pair_pic->vf_ref++;
8358 kfifo_put(&hevc->display_q,
8359 (const struct vframe_s *)vf);
8360 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8361 hevc->vf_pre_count++;
8362 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8363 hevc_print(hevc, 0,
8364 "%s vf => display_q: (index 0x%x)\n",
8365 __func__, vf->index);
8366 }
8367 }
8368 }
8369 return 0;
8370}
8371#endif
8372static void update_vf_memhandle(struct hevc_state_s *hevc,
8373 struct vframe_s *vf, struct PIC_s *pic)
8374{
8375 if (pic->index < 0) {
8376 vf->mem_handle = NULL;
8377 vf->mem_head_handle = NULL;
8378 } else if (vf->type & VIDTYPE_SCATTER) {
8379 vf->mem_handle =
8380 decoder_mmu_box_get_mem_handle(
8381 hevc->mmu_box, pic->index);
8382 vf->mem_head_handle =
8383 decoder_bmmu_box_get_mem_handle(
8384 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8385 } else {
8386 vf->mem_handle =
8387 decoder_bmmu_box_get_mem_handle(
8388 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8389 vf->mem_head_handle = NULL;
8390 /*vf->mem_head_handle =
8391 decoder_bmmu_box_get_mem_handle(
8392 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8393 }
8394 return;
8395}
8396
8397static void fill_frame_info(struct hevc_state_s *hevc,
8398 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8399{
8400 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8401 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8402 vframe_qos->type = 4;
8403 else if (pic->slice_type == I_SLICE)
8404 vframe_qos->type = 1;
8405 else if (pic->slice_type == P_SLICE)
8406 vframe_qos->type = 2;
8407 else if (pic->slice_type == B_SLICE)
8408 vframe_qos->type = 3;
8409/*
8410#define SHOW_QOS_INFO
8411*/
8412 vframe_qos->size = framesize;
8413 vframe_qos->pts = pts;
8414#ifdef SHOW_QOS_INFO
8415 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8416#endif
8417
8418
8419 vframe_qos->max_mv = pic->max_mv;
8420 vframe_qos->avg_mv = pic->avg_mv;
8421 vframe_qos->min_mv = pic->min_mv;
8422#ifdef SHOW_QOS_INFO
8423 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8424 vframe_qos->max_mv,
8425 vframe_qos->avg_mv,
8426 vframe_qos->min_mv);
8427#endif
8428
8429 vframe_qos->max_qp = pic->max_qp;
8430 vframe_qos->avg_qp = pic->avg_qp;
8431 vframe_qos->min_qp = pic->min_qp;
8432#ifdef SHOW_QOS_INFO
8433 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8434 vframe_qos->max_qp,
8435 vframe_qos->avg_qp,
8436 vframe_qos->min_qp);
8437#endif
8438
8439 vframe_qos->max_skip = pic->max_skip;
8440 vframe_qos->avg_skip = pic->avg_skip;
8441 vframe_qos->min_skip = pic->min_skip;
8442#ifdef SHOW_QOS_INFO
8443 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8444 vframe_qos->max_skip,
8445 vframe_qos->avg_skip,
8446 vframe_qos->min_skip);
8447#endif
8448
8449 vframe_qos->num++;
8450
8451 if (hevc->frameinfo_enable)
8452 vdec_fill_frame_info(vframe_qos, 1);
8453}
8454
8455static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8456{
8457#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8458 struct vdec_s *vdec = hw_to_vdec(hevc);
8459#endif
8460 struct vframe_s *vf = NULL;
8461 int stream_offset = pic->stream_offset;
8462 unsigned short slice_type = pic->slice_type;
8463 u32 frame_size;
8464
8465 if (force_disp_pic_index & 0x100) {
8466 /*recycle directly*/
8467 pic->output_ready = 0;
8468 return -1;
8469 }
8470 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8471 hevc_print(hevc, 0,
8472 "fatal error, no available buffer slot.");
8473 return -1;
8474 }
8475 display_frame_count[hevc->index]++;
8476 if (vf) {
8477 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8478 "%s: pic index 0x%x\n",
8479 __func__, pic->index);*/
8480
8481 if (hevc->is_used_v4l) {
8482 vf->v4l_mem_handle
8483 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8484 if (hevc->mmu_enable) {
8485 vf->mm_box.bmmu_box = hevc->bmmu_box;
8486 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8487 vf->mm_box.mmu_box = hevc->mmu_box;
8488 vf->mm_box.mmu_idx = pic->index;
8489 }
8490 }
8491
8492#ifdef MULTI_INSTANCE_SUPPORT
8493 if (vdec_frame_based(hw_to_vdec(hevc))) {
8494 vf->pts = pic->pts;
8495 vf->pts_us64 = pic->pts64;
8496 vf->timestamp = pic->timestamp;
8497 }
8498 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8499 stream_offset, &vf->pts, 0) != 0) { */
8500#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8501 else if (vdec->master == NULL) {
8502#else
8503 else {
8504#endif
8505#endif
8506 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8507 "call pts_lookup_offset_us64(0x%x)\n",
8508 stream_offset);
8509 if (pts_lookup_offset_us64
8510 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8511 &frame_size, 0,
8512 &vf->pts_us64) != 0) {
8513#ifdef DEBUG_PTS
8514 hevc->pts_missed++;
8515#endif
8516 vf->pts = 0;
8517 vf->pts_us64 = 0;
8518 }
8519#ifdef DEBUG_PTS
8520 else
8521 hevc->pts_hit++;
8522#endif
8523#ifdef MULTI_INSTANCE_SUPPORT
8524#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8525 } else {
8526 vf->pts = 0;
8527 vf->pts_us64 = 0;
8528 }
8529#else
8530 }
8531#endif
8532#endif
8533 if (pts_unstable && (hevc->frame_dur > 0))
8534 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8535
8536 fill_frame_info(hevc, pic, frame_size, vf->pts);
8537
8538 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8539 && hevc->get_frame_dur) {
8540 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8541
8542 if (pts_diff < 0) {
8543 hevc->pts_mode_switching_count++;
8544 hevc->pts_mode_recovery_count = 0;
8545
8546 if (hevc->pts_mode_switching_count >=
8547 PTS_MODE_SWITCHING_THRESHOLD) {
8548 hevc->pts_mode =
8549 PTS_NONE_REF_USE_DURATION;
8550 hevc_print(hevc, 0,
8551 "HEVC: switch to n_d mode.\n");
8552 }
8553
8554 } else {
8555 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8556
8557 hevc->pts_mode_recovery_count++;
8558 if (hevc->pts_mode_recovery_count > p) {
8559 hevc->pts_mode_switching_count = 0;
8560 hevc->pts_mode_recovery_count = 0;
8561 }
8562 }
8563 }
8564
8565 if (vf->pts != 0)
8566 hevc->last_lookup_pts = vf->pts;
8567
8568 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8569 && (slice_type != 2))
8570 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8571 hevc->last_pts = vf->pts;
8572
8573 if (vf->pts_us64 != 0)
8574 hevc->last_lookup_pts_us64 = vf->pts_us64;
8575
8576 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8577 && (slice_type != 2)) {
8578 vf->pts_us64 =
8579 hevc->last_pts_us64 +
8580 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8581 }
8582 hevc->last_pts_us64 = vf->pts_us64;
8583 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8584 hevc_print(hevc, 0,
8585 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8586 vf->pts, vf->pts_us64);
8587 }
8588
8589 /*
8590 *vf->index:
8591 *(1) vf->type is VIDTYPE_PROGRESSIVE
8592 * and vf->canvas0Addr != vf->canvas1Addr,
8593 * vf->index[7:0] is the index of top pic
8594 * vf->index[15:8] is the index of bot pic
8595 *(2) other cases,
8596 * only vf->index[7:0] is used
8597 * vf->index[15:8] == 0xff
8598 */
8599 vf->index = 0xff00 | pic->index;
8600#if 1
8601/*SUPPORT_10BIT*/
8602 if (pic->double_write_mode & 0x10) {
8603 /* double write only */
8604 vf->compBodyAddr = 0;
8605 vf->compHeadAddr = 0;
8606 } else {
8607
8608 if (hevc->mmu_enable) {
8609 vf->compBodyAddr = 0;
8610 vf->compHeadAddr = pic->header_adr;
8611 } else {
8612 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8613 vf->compHeadAddr = pic->mc_y_adr +
8614 pic->losless_comp_body_size;
8615 vf->mem_head_handle = NULL;
8616 }
8617
8618 /*head adr*/
8619 vf->canvas0Addr = vf->canvas1Addr = 0;
8620 }
8621 if (pic->double_write_mode) {
8622 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8623 vf->type |= VIDTYPE_VIU_NV21;
8624
8625 if ((pic->double_write_mode == 3) &&
8626 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8627 vf->type |= VIDTYPE_COMPRESS;
8628 if (hevc->mmu_enable)
8629 vf->type |= VIDTYPE_SCATTER;
8630 }
8631#ifdef MULTI_INSTANCE_SUPPORT
8632 if (hevc->m_ins_flag &&
8633 (get_dbg_flag(hevc)
8634 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8635 vf->canvas0Addr = vf->canvas1Addr = -1;
8636 vf->plane_num = 2;
8637 vf->canvas0_config[0] =
8638 pic->canvas_config[0];
8639 vf->canvas0_config[1] =
8640 pic->canvas_config[1];
8641
8642 vf->canvas1_config[0] =
8643 pic->canvas_config[0];
8644 vf->canvas1_config[1] =
8645 pic->canvas_config[1];
8646
8647 } else
8648#endif
8649 vf->canvas0Addr = vf->canvas1Addr
8650 = spec2canvas(pic);
8651 } else {
8652 vf->canvas0Addr = vf->canvas1Addr = 0;
8653 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8654 if (hevc->mmu_enable)
8655 vf->type |= VIDTYPE_SCATTER;
8656 }
8657 vf->compWidth = pic->width;
8658 vf->compHeight = pic->height;
8659 update_vf_memhandle(hevc, vf, pic);
8660 switch (pic->bit_depth_luma) {
8661 case 9:
8662 vf->bitdepth = BITDEPTH_Y9;
8663 break;
8664 case 10:
8665 vf->bitdepth = BITDEPTH_Y10;
8666 break;
8667 default:
8668 vf->bitdepth = BITDEPTH_Y8;
8669 break;
8670 }
8671 switch (pic->bit_depth_chroma) {
8672 case 9:
8673 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8674 break;
8675 case 10:
8676 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8677 break;
8678 default:
8679 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8680 break;
8681 }
8682 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8683 vf->bitdepth =
8684 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8685 if (pic->mem_saving_mode == 1)
8686 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8687#else
8688 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8689 vf->type |= VIDTYPE_VIU_NV21;
8690 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8691#endif
8692 set_frame_info(hevc, vf, pic);
8693 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8694 /* hevc_print(hevc, 0,
8695 "aaa: %d/%d, %d/%d\n",
8696 vf->width,vf->height, pic->width, pic->height); */
8697 vf->width = pic->width;
8698 vf->height = pic->height;
8699
8700 if (force_w_h != 0) {
8701 vf->width = (force_w_h >> 16) & 0xffff;
8702 vf->height = force_w_h & 0xffff;
8703 }
8704 if (force_fps & 0x100) {
8705 u32 rate = force_fps & 0xff;
8706
8707 if (rate)
8708 vf->duration = 96000/rate;
8709 else
8710 vf->duration = 0;
8711 }
8712 if (force_fps & 0x200) {
8713 vf->pts = 0;
8714 vf->pts_us64 = 0;
8715 }
8716 /*
8717 * !!! to do ...
8718 * need move below code to get_new_pic(),
8719 * hevc->xxx can only be used by current decoded pic
8720 */
8721 if (pic->conformance_window_flag &&
8722 (get_dbg_flag(hevc) &
8723 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8724 unsigned int SubWidthC, SubHeightC;
8725
8726 switch (pic->chroma_format_idc) {
8727 case 1:
8728 SubWidthC = 2;
8729 SubHeightC = 2;
8730 break;
8731 case 2:
8732 SubWidthC = 2;
8733 SubHeightC = 1;
8734 break;
8735 default:
8736 SubWidthC = 1;
8737 SubHeightC = 1;
8738 break;
8739 }
8740 vf->width -= SubWidthC *
8741 (pic->conf_win_left_offset +
8742 pic->conf_win_right_offset);
8743 vf->height -= SubHeightC *
8744 (pic->conf_win_top_offset +
8745 pic->conf_win_bottom_offset);
8746
8747 vf->compWidth -= SubWidthC *
8748 (pic->conf_win_left_offset +
8749 pic->conf_win_right_offset);
8750 vf->compHeight -= SubHeightC *
8751 (pic->conf_win_top_offset +
8752 pic->conf_win_bottom_offset);
8753
8754 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8755 hevc_print(hevc, 0,
8756 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8757 pic->chroma_format_idc,
8758 pic->conf_win_left_offset,
8759 pic->conf_win_right_offset,
8760 pic->conf_win_top_offset,
8761 pic->conf_win_bottom_offset,
8762 vf->width, vf->height, vf->compWidth, vf->compHeight);
8763 }
8764
8765 vf->width = vf->width /
8766 get_double_write_ratio(hevc, pic->double_write_mode);
8767 vf->height = vf->height /
8768 get_double_write_ratio(hevc, pic->double_write_mode);
8769#ifdef HEVC_PIC_STRUCT_SUPPORT
8770 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8771 struct vframe_s *vf2;
8772
8773 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8774 hevc_print(hevc, 0,
8775 "pic_struct = %d index 0x%x\n",
8776 pic->pic_struct,
8777 pic->index);
8778
8779 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8780 hevc_print(hevc, 0,
8781 "fatal error, no available buffer slot.");
8782 return -1;
8783 }
8784 pic->vf_ref = 2;
8785 vf->duration = vf->duration>>1;
8786 memcpy(vf2, vf, sizeof(struct vframe_s));
8787
8788 if (pic->pic_struct == 3) {
8789 vf->type = VIDTYPE_INTERLACE_TOP
8790 | VIDTYPE_VIU_NV21;
8791 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8792 | VIDTYPE_VIU_NV21;
8793 } else {
8794 vf->type = VIDTYPE_INTERLACE_BOTTOM
8795 | VIDTYPE_VIU_NV21;
8796 vf2->type = VIDTYPE_INTERLACE_TOP
8797 | VIDTYPE_VIU_NV21;
8798 }
8799 hevc->vf_pre_count++;
8800 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8801 kfifo_put(&hevc->display_q,
8802 (const struct vframe_s *)vf);
8803 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8804 hevc->vf_pre_count++;
8805 kfifo_put(&hevc->display_q,
8806 (const struct vframe_s *)vf2);
8807 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8808 } else if (pic->pic_struct == 5
8809 || pic->pic_struct == 6) {
8810 struct vframe_s *vf2, *vf3;
8811
8812 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8813 hevc_print(hevc, 0,
8814 "pic_struct = %d index 0x%x\n",
8815 pic->pic_struct,
8816 pic->index);
8817
8818 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8819 hevc_print(hevc, 0,
8820 "fatal error, no available buffer slot.");
8821 return -1;
8822 }
8823 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8824 hevc_print(hevc, 0,
8825 "fatal error, no available buffer slot.");
8826 return -1;
8827 }
8828 pic->vf_ref = 3;
8829 vf->duration = vf->duration/3;
8830 memcpy(vf2, vf, sizeof(struct vframe_s));
8831 memcpy(vf3, vf, sizeof(struct vframe_s));
8832
8833 if (pic->pic_struct == 5) {
8834 vf->type = VIDTYPE_INTERLACE_TOP
8835 | VIDTYPE_VIU_NV21;
8836 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8837 | VIDTYPE_VIU_NV21;
8838 vf3->type = VIDTYPE_INTERLACE_TOP
8839 | VIDTYPE_VIU_NV21;
8840 } else {
8841 vf->type = VIDTYPE_INTERLACE_BOTTOM
8842 | VIDTYPE_VIU_NV21;
8843 vf2->type = VIDTYPE_INTERLACE_TOP
8844 | VIDTYPE_VIU_NV21;
8845 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8846 | VIDTYPE_VIU_NV21;
8847 }
8848 hevc->vf_pre_count++;
8849 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8850 kfifo_put(&hevc->display_q,
8851 (const struct vframe_s *)vf);
8852 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8853 hevc->vf_pre_count++;
8854 kfifo_put(&hevc->display_q,
8855 (const struct vframe_s *)vf2);
8856 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8857 hevc->vf_pre_count++;
8858 kfifo_put(&hevc->display_q,
8859 (const struct vframe_s *)vf3);
8860 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8861
8862 } else if (pic->pic_struct == 9
8863 || pic->pic_struct == 10) {
8864 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8865 hevc_print(hevc, 0,
8866 "pic_struct = %d index 0x%x\n",
8867 pic->pic_struct,
8868 pic->index);
8869
8870 pic->vf_ref = 1;
8871 /* process previous pending vf*/
8872 process_pending_vframe(hevc,
8873 pic, (pic->pic_struct == 9));
8874
8875 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8876 /* process current vf */
8877 kfifo_put(&hevc->pending_q,
8878 (const struct vframe_s *)vf);
8879 vf->height <<= 1;
8880 if (pic->pic_struct == 9) {
8881 vf->type = VIDTYPE_INTERLACE_TOP
8882 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8883 process_pending_vframe(hevc,
8884 hevc->pre_bot_pic, 0);
8885 } else {
8886 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8887 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8888 vf->index = (pic->index << 8) | 0xff;
8889 process_pending_vframe(hevc,
8890 hevc->pre_top_pic, 1);
8891 }
8892
8893 if (hevc->vf_pre_count == 0)
8894 hevc->vf_pre_count++;
8895
8896 /**/
8897 if (pic->pic_struct == 9)
8898 hevc->pre_top_pic = pic;
8899 else
8900 hevc->pre_bot_pic = pic;
8901
8902 } else if (pic->pic_struct == 11
8903 || pic->pic_struct == 12) {
8904 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8905 hevc_print(hevc, 0,
8906 "pic_struct = %d index 0x%x\n",
8907 pic->pic_struct,
8908 pic->index);
8909 pic->vf_ref = 1;
8910 /* process previous pending vf*/
8911 process_pending_vframe(hevc, pic,
8912 (pic->pic_struct == 11));
8913
8914 /* put current into pending q */
8915 vf->height <<= 1;
8916 if (pic->pic_struct == 11)
8917 vf->type = VIDTYPE_INTERLACE_TOP |
8918 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8919 else {
8920 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8921 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8922 vf->index = (pic->index << 8) | 0xff;
8923 }
8924 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8925 kfifo_put(&hevc->pending_q,
8926 (const struct vframe_s *)vf);
8927 if (hevc->vf_pre_count == 0)
8928 hevc->vf_pre_count++;
8929
8930 /**/
8931 if (pic->pic_struct == 11)
8932 hevc->pre_top_pic = pic;
8933 else
8934 hevc->pre_bot_pic = pic;
8935
8936 } else {
8937 pic->vf_ref = 1;
8938
8939 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8940 hevc_print(hevc, 0,
8941 "pic_struct = %d index 0x%x\n",
8942 pic->pic_struct,
8943 pic->index);
8944
8945 switch (pic->pic_struct) {
8946 case 7:
8947 vf->duration <<= 1;
8948 break;
8949 case 8:
8950 vf->duration = vf->duration * 3;
8951 break;
8952 case 1:
8953 vf->height <<= 1;
8954 vf->type = VIDTYPE_INTERLACE_TOP |
8955 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8956 process_pending_vframe(hevc, pic, 1);
8957 hevc->pre_top_pic = pic;
8958 break;
8959 case 2:
8960 vf->height <<= 1;
8961 vf->type = VIDTYPE_INTERLACE_BOTTOM
8962 | VIDTYPE_VIU_NV21
8963 | VIDTYPE_VIU_FIELD;
8964 process_pending_vframe(hevc, pic, 0);
8965 hevc->pre_bot_pic = pic;
8966 break;
8967 }
8968 hevc->vf_pre_count++;
8969 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8970 kfifo_put(&hevc->display_q,
8971 (const struct vframe_s *)vf);
8972 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8973 }
8974#else
8975 vf->type_original = vf->type;
8976 pic->vf_ref = 1;
8977 hevc->vf_pre_count++;
8978 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8979 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8980 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8981
8982 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8983 hevc_print(hevc, 0,
8984 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8985 __func__, vf->type, vf->index,
8986 get_pic_poc(hevc, vf->index & 0xff),
8987 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8988 vf->pts, vf->pts_us64,
8989 vf->duration);
8990#endif
8991#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8992 /*count info*/
8993 vdec_count_info(gvs, 0, stream_offset);
8994#endif
8995 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8996 if (without_display_mode == 0) {
8997 vf_notify_receiver(hevc->provider_name,
8998 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8999 }
9000 else
9001 vh265_vf_put(vh265_vf_get(vdec), vdec);
9002 }
9003
9004 return 0;
9005}
9006
9007static int notify_v4l_eos(struct vdec_s *vdec)
9008{
9009 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9010 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9011 struct vframe_s *vf = &hw->vframe_dummy;
9012 struct vdec_v4l2_buffer *fb = NULL;
9013 int index = INVALID_IDX;
9014 ulong expires;
9015
9016 if (hw->is_used_v4l && hw->eos) {
9017 expires = jiffies + msecs_to_jiffies(2000);
9018 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9019 if (time_after(jiffies, expires))
9020 break;
9021 }
9022
9023 if (index == INVALID_IDX) {
9024 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9025 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9026 return -1;
9027 }
9028 }
9029
9030 vf->type |= VIDTYPE_V4L_EOS;
9031 vf->timestamp = ULONG_MAX;
9032 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9033 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9034 hw->m_BUF[index].v4l_ref_buf_addr;
9035 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9036 vf_notify_receiver(vdec->vf_provider_name,
9037 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9038
9039 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9040 }
9041
9042 return 0;
9043}
9044
9045static void process_nal_sei(struct hevc_state_s *hevc,
9046 int payload_type, int payload_size)
9047{
9048 unsigned short data;
9049
9050 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9051 hevc_print(hevc, 0,
9052 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9053 payload_type, payload_size);
9054
9055 if (payload_type == 137) {
9056 int i, j;
9057 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9058 if (payload_size >= 24) {
9059 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9060 hevc_print(hevc, 0,
9061 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9062 for (i = 0; i < 3; i++) {
9063 for (j = 0; j < 2; j++) {
9064 data =
9065 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9066 hevc->primaries[i][j] = data;
9067 WRITE_HREG(HEVC_SHIFT_COMMAND,
9068 (1<<7)|16);
9069 if (get_dbg_flag(hevc) &
9070 H265_DEBUG_PRINT_SEI)
9071 hevc_print(hevc, 0,
9072 "\t\tprimaries[%1d][%1d] = %04x\n",
9073 i, j, hevc->primaries[i][j]);
9074 }
9075 }
9076 for (i = 0; i < 2; i++) {
9077 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9078 hevc->white_point[i] = data;
9079 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9080 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9081 hevc_print(hevc, 0,
9082 "\t\twhite_point[%1d] = %04x\n",
9083 i, hevc->white_point[i]);
9084 }
9085 for (i = 0; i < 2; i++) {
9086 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9087 hevc->luminance[i] = data << 16;
9088 WRITE_HREG(HEVC_SHIFT_COMMAND,
9089 (1<<7)|16);
9090 data =
9091 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9092 hevc->luminance[i] |= data;
9093 WRITE_HREG(HEVC_SHIFT_COMMAND,
9094 (1<<7)|16);
9095 if (get_dbg_flag(hevc) &
9096 H265_DEBUG_PRINT_SEI)
9097 hevc_print(hevc, 0,
9098 "\t\tluminance[%1d] = %08x\n",
9099 i, hevc->luminance[i]);
9100 }
9101 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9102 }
9103 payload_size -= 24;
9104 while (payload_size > 0) {
9105 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9106 payload_size--;
9107 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9108 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9109 }
9110 }
9111}
9112
9113static int hevc_recover(struct hevc_state_s *hevc)
9114{
9115 int ret = -1;
9116 u32 rem;
9117 u64 shift_byte_count64;
9118 unsigned int hevc_shift_byte_count;
9119 unsigned int hevc_stream_start_addr;
9120 unsigned int hevc_stream_end_addr;
9121 unsigned int hevc_stream_rd_ptr;
9122 unsigned int hevc_stream_wr_ptr;
9123 unsigned int hevc_stream_control;
9124 unsigned int hevc_stream_fifo_ctl;
9125 unsigned int hevc_stream_buf_size;
9126
9127 mutex_lock(&vh265_mutex);
9128#if 0
9129 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9130 int ii;
9131
9132 for (ii = 0; ii < 4; ii++)
9133 hevc_print(hevc, 0,
9134 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9135 if (((i + ii) & 0xf) == 0)
9136 hevc_print(hevc, 0, "\n");
9137 }
9138#endif
9139#define ES_VID_MAN_RD_PTR (1<<0)
9140 if (!hevc->init_flag) {
9141 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9142 mutex_unlock(&vh265_mutex);
9143 return ret;
9144 }
9145 amhevc_stop();
9146 msleep(20);
9147 ret = 0;
9148 /* reset */
9149 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9150 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9151
9152 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9153 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9154 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9155 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9156 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9157 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9158 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9159
9160 /* HEVC streaming buffer will reset and restart
9161 * from current hevc_stream_rd_ptr position
9162 */
9163 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9164 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9165 if ((hevc->shift_byte_count_lo & (1 << 31))
9166 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9167 hevc->shift_byte_count_hi++;
9168
9169 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9170 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9171 hevc->shift_byte_count_lo;
9172 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9173 shift_byte_count64 -= rem;
9174 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9175
9176 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9177 shift_byte_count64 += hevc_stream_buf_size;
9178
9179 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9180 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9181
9182 WRITE_VREG(DOS_SW_RESET3,
9183 /* (1<<2)| */
9184 (1 << 3) | (1 << 4) | (1 << 8) |
9185 (1 << 11) | (1 << 12) | (1 << 14)
9186 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9187 WRITE_VREG(DOS_SW_RESET3, 0);
9188
9189 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9190 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9191 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9192 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9193 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9194 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9195 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9196
9197 hevc_config_work_space_hw(hevc);
9198 decoder_hw_reset();
9199
9200 hevc->have_vps = 0;
9201 hevc->have_sps = 0;
9202 hevc->have_pps = 0;
9203
9204 hevc->have_valid_start_slice = 0;
9205
9206 if (get_double_write_mode(hevc) & 0x10)
9207 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9208 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9209 );
9210
9211 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9212 /* clear mailbox interrupt */
9213 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9214 /* enable mailbox interrupt */
9215 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9216 /* disable PSCALE for hardware sharing */
9217 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9218
9219 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9220
9221 WRITE_VREG(DEBUG_REG1, 0x0);
9222
9223 if ((error_handle_policy & 1) == 0) {
9224 if ((error_handle_policy & 4) == 0) {
9225 /* ucode auto mode, and do not check vps/sps/pps/idr */
9226 WRITE_VREG(NAL_SEARCH_CTL,
9227 0xc);
9228 } else {
9229 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9230 }
9231 } else {
9232 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9233 }
9234
9235 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9236 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9237 WRITE_VREG(NAL_SEARCH_CTL,
9238 READ_VREG(NAL_SEARCH_CTL)
9239 | ((parser_sei_enable & 0x7) << 17));
9240#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9241 WRITE_VREG(NAL_SEARCH_CTL,
9242 READ_VREG(NAL_SEARCH_CTL) |
9243 ((parser_dolby_vision_enable & 0x1) << 20));
9244#endif
9245 config_decode_mode(hevc);
9246 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9247
9248 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9249 /* amhevc_disable(); */
9250 /* return -EBUSY; */
9251 /* } */
9252#if 0
9253 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9254 int ii;
9255
9256 for (ii = 0; ii < 4; ii++) {
9257 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9258 hevc_print(hevc, 0,
9259 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9260 }
9261 if (((i + ii) & 0xf) == 0)
9262 hevc_print(hevc, 0, "\n");
9263 }
9264#endif
9265 init_pic_list_hw(hevc);
9266
9267 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9268 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9269
9270#ifdef SWAP_HEVC_UCODE
9271 if (!tee_enabled() && hevc->is_swap &&
9272 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9273 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9274 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9275 }
9276#endif
9277 amhevc_start();
9278
9279 /* skip, search next start code */
9280 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9281 hevc->skip_flag = 1;
9282#ifdef ERROR_HANDLE_DEBUG
9283 if (dbg_nal_skip_count & 0x20000) {
9284 dbg_nal_skip_count &= ~0x20000;
9285 mutex_unlock(&vh265_mutex);
9286 return ret;
9287 }
9288#endif
9289 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9290 /* Interrupt Amrisc to excute */
9291 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9292#ifdef MULTI_INSTANCE_SUPPORT
9293 if (!hevc->m_ins_flag)
9294#endif
9295 hevc->first_pic_after_recover = 1;
9296 mutex_unlock(&vh265_mutex);
9297 return ret;
9298}
9299
9300static void dump_aux_buf(struct hevc_state_s *hevc)
9301{
9302 int i;
9303 unsigned short *aux_adr =
9304 (unsigned short *)
9305 hevc->aux_addr;
9306 unsigned int aux_size =
9307 (READ_VREG(HEVC_AUX_DATA_SIZE)
9308 >> 16) << 4;
9309
9310 if (hevc->prefix_aux_size > 0) {
9311 hevc_print(hevc, 0,
9312 "prefix aux: (size %d)\n",
9313 aux_size);
9314 for (i = 0; i <
9315 (aux_size >> 1); i++) {
9316 hevc_print_cont(hevc, 0,
9317 "%04x ",
9318 *(aux_adr + i));
9319 if (((i + 1) & 0xf)
9320 == 0)
9321 hevc_print_cont(hevc,
9322 0, "\n");
9323 }
9324 }
9325 if (hevc->suffix_aux_size > 0) {
9326 aux_adr = (unsigned short *)
9327 (hevc->aux_addr +
9328 hevc->prefix_aux_size);
9329 aux_size =
9330 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9331 << 4;
9332 hevc_print(hevc, 0,
9333 "suffix aux: (size %d)\n",
9334 aux_size);
9335 for (i = 0; i <
9336 (aux_size >> 1); i++) {
9337 hevc_print_cont(hevc, 0,
9338 "%04x ", *(aux_adr + i));
9339 if (((i + 1) & 0xf) == 0)
9340 hevc_print_cont(hevc, 0, "\n");
9341 }
9342 }
9343}
9344
9345#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9346static void dolby_get_meta(struct hevc_state_s *hevc)
9347{
9348 struct vdec_s *vdec = hw_to_vdec(hevc);
9349
9350 if (get_dbg_flag(hevc) &
9351 H265_DEBUG_BUFMGR_MORE)
9352 dump_aux_buf(hevc);
9353 if (vdec->dolby_meta_with_el || vdec->slave) {
9354 set_aux_data(hevc,
9355 hevc->cur_pic, 0, 0);
9356 } else if (vdec->master) {
9357 struct hevc_state_s *hevc_ba =
9358 (struct hevc_state_s *)
9359 vdec->master->private;
9360 /*do not use hevc_ba*/
9361 set_aux_data(hevc,
9362 hevc_ba->cur_pic,
9363 0, 1);
9364 set_aux_data(hevc,
9365 hevc->cur_pic, 0, 2);
9366 }
9367}
9368#endif
9369
9370static void read_decode_info(struct hevc_state_s *hevc)
9371{
9372 uint32_t decode_info =
9373 READ_HREG(HEVC_DECODE_INFO);
9374 hevc->start_decoding_flag |=
9375 (decode_info & 0xff);
9376 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9377}
9378
9379static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9380{
9381 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9382 unsigned int dec_status = hevc->dec_status;
9383 int i, ret;
9384
9385#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9386 struct vdec_s *vdec = hw_to_vdec(hevc);
9387#endif
9388
9389 if (hevc->eos)
9390 return IRQ_HANDLED;
9391 if (
9392#ifdef MULTI_INSTANCE_SUPPORT
9393 (!hevc->m_ins_flag) &&
9394#endif
9395 hevc->error_flag == 1) {
9396 if ((error_handle_policy & 0x10) == 0) {
9397 if (hevc->cur_pic) {
9398 int current_lcu_idx =
9399 READ_VREG(HEVC_PARSER_LCU_START)
9400 & 0xffffff;
9401 if (current_lcu_idx <
9402 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9403 hevc->cur_pic->error_mark = 1;
9404
9405 }
9406 }
9407 if ((error_handle_policy & 1) == 0) {
9408 hevc->error_skip_nal_count = 1;
9409 /* manual search nal, skip error_skip_nal_count
9410 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9411 */
9412 WRITE_VREG(NAL_SEARCH_CTL,
9413 (error_skip_nal_count << 4) | 0x1);
9414 } else {
9415 hevc->error_skip_nal_count = error_skip_nal_count;
9416 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9417 }
9418 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9419#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9420 || vdec->master
9421 || vdec->slave
9422#endif
9423 ) {
9424 WRITE_VREG(NAL_SEARCH_CTL,
9425 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9426 }
9427 WRITE_VREG(NAL_SEARCH_CTL,
9428 READ_VREG(NAL_SEARCH_CTL)
9429 | ((parser_sei_enable & 0x7) << 17));
9430#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9431 WRITE_VREG(NAL_SEARCH_CTL,
9432 READ_VREG(NAL_SEARCH_CTL) |
9433 ((parser_dolby_vision_enable & 0x1) << 20));
9434#endif
9435 config_decode_mode(hevc);
9436 /* search new nal */
9437 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9438 /* Interrupt Amrisc to excute */
9439 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9440
9441 /* hevc_print(hevc, 0,
9442 *"%s: error handle\n", __func__);
9443 */
9444 hevc->error_flag = 2;
9445 return IRQ_HANDLED;
9446 } else if (
9447#ifdef MULTI_INSTANCE_SUPPORT
9448 (!hevc->m_ins_flag) &&
9449#endif
9450 hevc->error_flag == 3) {
9451 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9452 hevc_recover(hevc);
9453 hevc->error_flag = 0;
9454
9455 if ((error_handle_policy & 0x10) == 0) {
9456 if (hevc->cur_pic) {
9457 int current_lcu_idx =
9458 READ_VREG(HEVC_PARSER_LCU_START)
9459 & 0xffffff;
9460 if (current_lcu_idx <
9461 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9462 hevc->cur_pic->error_mark = 1;
9463
9464 }
9465 }
9466 if ((error_handle_policy & 1) == 0) {
9467 /* need skip some data when
9468 * error_flag of 3 is triggered,
9469 */
9470 /* to avoid hevc_recover() being called
9471 * for many times at the same bitstream position
9472 */
9473 hevc->error_skip_nal_count = 1;
9474 /* manual search nal, skip error_skip_nal_count
9475 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9476 */
9477 WRITE_VREG(NAL_SEARCH_CTL,
9478 (error_skip_nal_count << 4) | 0x1);
9479 }
9480
9481 if ((error_handle_policy & 0x2) == 0) {
9482 hevc->have_vps = 1;
9483 hevc->have_sps = 1;
9484 hevc->have_pps = 1;
9485 }
9486 return IRQ_HANDLED;
9487 }
9488 if (!hevc->m_ins_flag) {
9489 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9490 if ((hevc->shift_byte_count_lo & (1 << 31))
9491 && ((i & (1 << 31)) == 0))
9492 hevc->shift_byte_count_hi++;
9493 hevc->shift_byte_count_lo = i;
9494 }
9495#ifdef MULTI_INSTANCE_SUPPORT
9496 mutex_lock(&hevc->chunks_mutex);
9497 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9498 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9499 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9500 && (hevc->chunk)) {
9501 hevc->cur_pic->pts = hevc->chunk->pts;
9502 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9503 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9504 }
9505 mutex_unlock(&hevc->chunks_mutex);
9506
9507 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9508 dec_status == HEVC_DECODE_BUFEMPTY2) {
9509 if (hevc->m_ins_flag) {
9510 read_decode_info(hevc);
9511 if (vdec_frame_based(hw_to_vdec(hevc))) {
9512 hevc->empty_flag = 1;
9513 goto pic_done;
9514 } else {
9515 if (
9516#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9517 vdec->master ||
9518 vdec->slave ||
9519#endif
9520 (data_resend_policy & 0x1)) {
9521 hevc->dec_result = DEC_RESULT_AGAIN;
9522 amhevc_stop();
9523 restore_decode_state(hevc);
9524 } else
9525 hevc->dec_result = DEC_RESULT_GET_DATA;
9526 }
9527 reset_process_time(hevc);
9528 vdec_schedule_work(&hevc->work);
9529 }
9530 return IRQ_HANDLED;
9531 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9532 (dec_status == HEVC_NAL_DECODE_DONE)
9533 ) {
9534 if (hevc->m_ins_flag) {
9535 read_decode_info(hevc);
9536 if (vdec_frame_based(hw_to_vdec(hevc))) {
9537 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9538 hevc->empty_flag = 1;
9539 goto pic_done;
9540 } else {
9541 hevc->dec_result = DEC_RESULT_AGAIN;
9542 amhevc_stop();
9543 restore_decode_state(hevc);
9544 }
9545
9546 reset_process_time(hevc);
9547 vdec_schedule_work(&hevc->work);
9548 }
9549
9550 return IRQ_HANDLED;
9551 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9552 if (hevc->m_ins_flag) {
9553 struct PIC_s *pic;
9554 struct PIC_s *pic_display;
9555 int decoded_poc;
9556#ifdef DETREFILL_ENABLE
9557 if (hevc->is_swap &&
9558 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9559 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9560 && READ_VREG(HEVC_SAO_DBG_MODE0))
9561 hevc->delrefill_check = 2;
9562 }
9563#endif
9564 hevc->empty_flag = 0;
9565pic_done:
9566 if (input_frame_based(hw_to_vdec(hevc)) &&
9567 frmbase_cont_bitlevel != 0 &&
9568 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9569 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9570 > frmbase_cont_bitlevel)) {
9571 /*handle the case: multi pictures in one packet*/
9572 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9573 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9574 __func__,
9575 hevc->decode_idx, hevc->decode_size,
9576 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9577 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9578 start_process_time(hevc);
9579 return IRQ_HANDLED;
9580 }
9581
9582 read_decode_info(hevc);
9583 get_picture_qos_info(hevc);
9584#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9585 hevc->start_parser_type = 0;
9586 hevc->switch_dvlayer_flag = 0;
9587#endif
9588 hevc->decoded_poc = hevc->curr_POC;
9589 hevc->decoding_pic = NULL;
9590 hevc->dec_result = DEC_RESULT_DONE;
9591#ifdef DETREFILL_ENABLE
9592 if (hevc->is_swap &&
9593 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9594 if (hevc->delrefill_check != 2)
9595#endif
9596
9597 amhevc_stop();
9598
9599 reset_process_time(hevc);
9600
9601 if (hevc->vf_pre_count == 0) {
9602 decoded_poc = hevc->curr_POC;
9603 pic = get_pic_by_POC(hevc, decoded_poc);
9604 if (pic && (pic->POC != INVALID_POC)) {
9605 /*PB skip control */
9606 if (pic->error_mark == 0
9607 && hevc->PB_skip_mode == 1) {
9608 /* start decoding after
9609 * first I
9610 */
9611 hevc->ignore_bufmgr_error |= 0x1;
9612 }
9613 if (hevc->ignore_bufmgr_error & 1) {
9614 if (hevc->PB_skip_count_after_decoding > 0) {
9615 hevc->PB_skip_count_after_decoding--;
9616 } else {
9617 /* start displaying */
9618 hevc->ignore_bufmgr_error |= 0x2;
9619 }
9620 }
9621 if (hevc->mmu_enable
9622 && ((hevc->double_write_mode & 0x10) == 0)) {
9623 if (!hevc->m_ins_flag) {
9624 hevc->used_4k_num =
9625 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9626
9627 if ((!is_skip_decoding(hevc, pic)) &&
9628 (hevc->used_4k_num >= 0) &&
9629 (hevc->cur_pic->scatter_alloc
9630 == 1)) {
9631 hevc_print(hevc,
9632 H265_DEBUG_BUFMGR_MORE,
9633 "%s pic index %d scatter_alloc %d page_start %d\n",
9634 "decoder_mmu_box_free_idx_tail",
9635 hevc->cur_pic->index,
9636 hevc->cur_pic->scatter_alloc,
9637 hevc->used_4k_num);
9638 decoder_mmu_box_free_idx_tail(
9639 hevc->mmu_box,
9640 hevc->cur_pic->index,
9641 hevc->used_4k_num);
9642 hevc->cur_pic->scatter_alloc
9643 = 2;
9644 }
9645 hevc->used_4k_num = -1;
9646 }
9647 }
9648
9649 pic->output_mark = 1;
9650 pic->recon_mark = 1;
9651 }
9652 check_pic_decoded_error(hevc,
9653 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9654 if (hevc->cur_pic != NULL &&
9655 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9656 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9657 hevc->cur_pic->error_mark = 1;
9658force_output:
9659 pic_display = output_pic(hevc, 1);
9660 if (pic_display) {
9661 if ((pic_display->error_mark &&
9662 ((hevc->ignore_bufmgr_error &
9663 0x2) == 0))
9664 || (get_dbg_flag(hevc) &
9665 H265_DEBUG_DISPLAY_CUR_FRAME)
9666 || (get_dbg_flag(hevc) &
9667 H265_DEBUG_NO_DISPLAY)) {
9668 pic_display->output_ready = 0;
9669 if (get_dbg_flag(hevc) &
9670 H265_DEBUG_BUFMGR) {
9671 hevc_print(hevc, 0,
9672 "[BM] Display: POC %d, ",
9673 pic_display->POC);
9674 hevc_print_cont(hevc, 0,
9675 "decoding index %d ==> ",
9676 pic_display->
9677 decode_idx);
9678 hevc_print_cont(hevc, 0,
9679 "Debug or err,recycle it\n");
9680 }
9681 } else {
9682 if (pic_display->
9683 slice_type != 2) {
9684 pic_display->output_ready = 0;
9685 } else {
9686 prepare_display_buf
9687 (hevc,
9688 pic_display);
9689 hevc->first_pic_flag = 1;
9690 }
9691 }
9692 }
9693 }
9694
9695 vdec_schedule_work(&hevc->work);
9696 }
9697
9698 return IRQ_HANDLED;
9699#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9700 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9701 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9702 if (hevc->m_ins_flag) {
9703 unsigned char next_parser_type =
9704 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9705 read_decode_info(hevc);
9706
9707 if (vdec->slave &&
9708 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9709 /*cur is base, found enhance*/
9710 struct hevc_state_s *hevc_el =
9711 (struct hevc_state_s *)
9712 vdec->slave->private;
9713 hevc->switch_dvlayer_flag = 1;
9714 hevc->no_switch_dvlayer_count = 0;
9715 hevc_el->start_parser_type =
9716 next_parser_type;
9717 hevc_print(hevc, H265_DEBUG_DV,
9718 "switch (poc %d) to el\n",
9719 hevc->cur_pic ?
9720 hevc->cur_pic->POC :
9721 INVALID_POC);
9722 } else if (vdec->master &&
9723 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9724 /*cur is enhance, found base*/
9725 struct hevc_state_s *hevc_ba =
9726 (struct hevc_state_s *)
9727 vdec->master->private;
9728 hevc->switch_dvlayer_flag = 1;
9729 hevc->no_switch_dvlayer_count = 0;
9730 hevc_ba->start_parser_type =
9731 next_parser_type;
9732 hevc_print(hevc, H265_DEBUG_DV,
9733 "switch (poc %d) to bl\n",
9734 hevc->cur_pic ?
9735 hevc->cur_pic->POC :
9736 INVALID_POC);
9737 } else {
9738 hevc->switch_dvlayer_flag = 0;
9739 hevc->start_parser_type =
9740 next_parser_type;
9741 hevc->no_switch_dvlayer_count++;
9742 hevc_print(hevc, H265_DEBUG_DV,
9743 "%s: no_switch_dvlayer_count = %d\n",
9744 vdec->master ? "el" : "bl",
9745 hevc->no_switch_dvlayer_count);
9746 if (vdec->slave &&
9747 dolby_el_flush_th != 0 &&
9748 hevc->no_switch_dvlayer_count >
9749 dolby_el_flush_th) {
9750 struct hevc_state_s *hevc_el =
9751 (struct hevc_state_s *)
9752 vdec->slave->private;
9753 struct PIC_s *el_pic;
9754 check_pic_decoded_error(hevc_el,
9755 hevc_el->pic_decoded_lcu_idx);
9756 el_pic = get_pic_by_POC(hevc_el,
9757 hevc_el->curr_POC);
9758 hevc_el->curr_POC = INVALID_POC;
9759 hevc_el->m_pocRandomAccess = MAX_INT;
9760 flush_output(hevc_el, el_pic);
9761 hevc_el->decoded_poc = INVALID_POC; /*
9762 already call flush_output*/
9763 hevc_el->decoding_pic = NULL;
9764 hevc->no_switch_dvlayer_count = 0;
9765 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9766 hevc_print(hevc, 0,
9767 "no el anymore, flush_output el\n");
9768 }
9769 }
9770 hevc->decoded_poc = hevc->curr_POC;
9771 hevc->decoding_pic = NULL;
9772 hevc->dec_result = DEC_RESULT_DONE;
9773 amhevc_stop();
9774 reset_process_time(hevc);
9775 if (aux_data_is_avaible(hevc))
9776 dolby_get_meta(hevc);
9777 if(hevc->cur_pic->slice_type == 2 &&
9778 hevc->vf_pre_count == 0) {
9779 hevc_print(hevc, 0,
9780 "first slice_type %x no_switch_dvlayer_count %x\n",
9781 hevc->cur_pic->slice_type,
9782 hevc->no_switch_dvlayer_count);
9783 goto force_output;
9784 }
9785 vdec_schedule_work(&hevc->work);
9786 }
9787
9788 return IRQ_HANDLED;
9789#endif
9790 }
9791
9792#endif
9793
9794 if (dec_status == HEVC_SEI_DAT) {
9795 if (!hevc->m_ins_flag) {
9796 int payload_type =
9797 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9798 int payload_size =
9799 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9800 process_nal_sei(hevc,
9801 payload_type, payload_size);
9802 }
9803 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9804 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9805 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9806 int parse_type = HEVC_DISCARD_NAL;
9807
9808 hevc->error_watchdog_count = 0;
9809 hevc->error_skip_nal_wt_cnt = 0;
9810#ifdef MULTI_INSTANCE_SUPPORT
9811 if (hevc->m_ins_flag)
9812 reset_process_time(hevc);
9813#endif
9814 if (slice_parse_begin > 0 &&
9815 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9816 hevc_print(hevc, 0,
9817 "nal type %d, discard %d\n", naltype,
9818 slice_parse_begin);
9819 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9820 slice_parse_begin--;
9821 }
9822 if (naltype == NAL_UNIT_EOS) {
9823 struct PIC_s *pic;
9824
9825 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9826#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9827 if ((vdec->master || vdec->slave) &&
9828 aux_data_is_avaible(hevc)) {
9829 if (hevc->decoding_pic)
9830 dolby_get_meta(hevc);
9831 }
9832#endif
9833 check_pic_decoded_error(hevc,
9834 hevc->pic_decoded_lcu_idx);
9835 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9836 hevc->curr_POC = INVALID_POC;
9837 /* add to fix RAP_B_Bossen_1 */
9838 hevc->m_pocRandomAccess = MAX_INT;
9839 flush_output(hevc, pic);
9840 clear_poc_flag(hevc);
9841 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9842 /* Interrupt Amrisc to excute */
9843 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9844#ifdef MULTI_INSTANCE_SUPPORT
9845 if (hevc->m_ins_flag) {
9846 hevc->decoded_poc = INVALID_POC; /*
9847 already call flush_output*/
9848 hevc->decoding_pic = NULL;
9849 hevc->dec_result = DEC_RESULT_DONE;
9850 amhevc_stop();
9851
9852 vdec_schedule_work(&hevc->work);
9853 }
9854#endif
9855 return IRQ_HANDLED;
9856 }
9857
9858 if (
9859#ifdef MULTI_INSTANCE_SUPPORT
9860 (!hevc->m_ins_flag) &&
9861#endif
9862 hevc->error_skip_nal_count > 0) {
9863 hevc_print(hevc, 0,
9864 "nal type %d, discard %d\n", naltype,
9865 hevc->error_skip_nal_count);
9866 hevc->error_skip_nal_count--;
9867 if (hevc->error_skip_nal_count == 0) {
9868 hevc_recover(hevc);
9869 hevc->error_flag = 0;
9870 if ((error_handle_policy & 0x2) == 0) {
9871 hevc->have_vps = 1;
9872 hevc->have_sps = 1;
9873 hevc->have_pps = 1;
9874 }
9875 return IRQ_HANDLED;
9876 }
9877 } else if (naltype == NAL_UNIT_VPS) {
9878 parse_type = HEVC_NAL_UNIT_VPS;
9879 hevc->have_vps = 1;
9880#ifdef ERROR_HANDLE_DEBUG
9881 if (dbg_nal_skip_flag & 1)
9882 parse_type = HEVC_DISCARD_NAL;
9883#endif
9884 } else if (hevc->have_vps) {
9885 if (naltype == NAL_UNIT_SPS) {
9886 parse_type = HEVC_NAL_UNIT_SPS;
9887 hevc->have_sps = 1;
9888#ifdef ERROR_HANDLE_DEBUG
9889 if (dbg_nal_skip_flag & 2)
9890 parse_type = HEVC_DISCARD_NAL;
9891#endif
9892 } else if (naltype == NAL_UNIT_PPS) {
9893 parse_type = HEVC_NAL_UNIT_PPS;
9894 hevc->have_pps = 1;
9895#ifdef ERROR_HANDLE_DEBUG
9896 if (dbg_nal_skip_flag & 4)
9897 parse_type = HEVC_DISCARD_NAL;
9898#endif
9899 } else if (hevc->have_sps && hevc->have_pps) {
9900 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9901
9902 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9903 (naltype ==
9904 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9905 || (naltype ==
9906 NAL_UNIT_CODED_SLICE_CRA)
9907 || (naltype ==
9908 NAL_UNIT_CODED_SLICE_BLA)
9909 || (naltype ==
9910 NAL_UNIT_CODED_SLICE_BLANT)
9911 || (naltype ==
9912 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9913 ) {
9914 if (slice_parse_begin > 0) {
9915 hevc_print(hevc, 0,
9916 "discard %d, for debugging\n",
9917 slice_parse_begin);
9918 slice_parse_begin--;
9919 } else {
9920 parse_type = seg;
9921 }
9922 hevc->have_valid_start_slice = 1;
9923 } else if (naltype <=
9924 NAL_UNIT_CODED_SLICE_CRA
9925 && (hevc->have_valid_start_slice
9926 || (hevc->PB_skip_mode != 3))) {
9927 if (slice_parse_begin > 0) {
9928 hevc_print(hevc, 0,
9929 "discard %d, dd\n",
9930 slice_parse_begin);
9931 slice_parse_begin--;
9932 } else
9933 parse_type = seg;
9934
9935 }
9936 }
9937 }
9938 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9939 && hevc->have_valid_start_slice &&
9940 hevc->error_flag == 0) {
9941 if ((get_dbg_flag(hevc) &
9942 H265_DEBUG_MAN_SEARCH_NAL) == 0
9943 /* && (!hevc->m_ins_flag)*/) {
9944 /* auot parser NAL; do not check
9945 *vps/sps/pps/idr
9946 */
9947 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9948 }
9949
9950 if ((get_dbg_flag(hevc) &
9951 H265_DEBUG_NO_EOS_SEARCH_DONE)
9952#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9953 || vdec->master
9954 || vdec->slave
9955#endif
9956 ) {
9957 WRITE_VREG(NAL_SEARCH_CTL,
9958 READ_VREG(NAL_SEARCH_CTL) |
9959 0x10000);
9960 }
9961 WRITE_VREG(NAL_SEARCH_CTL,
9962 READ_VREG(NAL_SEARCH_CTL)
9963 | ((parser_sei_enable & 0x7) << 17));
9964#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9965 WRITE_VREG(NAL_SEARCH_CTL,
9966 READ_VREG(NAL_SEARCH_CTL) |
9967 ((parser_dolby_vision_enable & 0x1) << 20));
9968#endif
9969 config_decode_mode(hevc);
9970 }
9971
9972 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9973 hevc_print(hevc, 0,
9974 "naltype = %d parse_type %d\n %d %d %d %d\n",
9975 naltype, parse_type, hevc->have_vps,
9976 hevc->have_sps, hevc->have_pps,
9977 hevc->have_valid_start_slice);
9978 }
9979
9980 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9981 /* Interrupt Amrisc to excute */
9982 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9983#ifdef MULTI_INSTANCE_SUPPORT
9984 if (hevc->m_ins_flag)
9985 start_process_time(hevc);
9986#endif
9987 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9988#ifdef MULTI_INSTANCE_SUPPORT
9989 if (hevc->m_ins_flag) {
9990 reset_process_time(hevc);
9991 read_decode_info(hevc);
9992
9993 }
9994#endif
9995 if (hevc->start_decoding_time > 0) {
9996 u32 process_time = 1000*
9997 (jiffies - hevc->start_decoding_time)/HZ;
9998 if (process_time > max_decoding_time)
9999 max_decoding_time = process_time;
10000 }
10001
10002 hevc->error_watchdog_count = 0;
10003 if (hevc->pic_list_init_flag == 2) {
10004 hevc->pic_list_init_flag = 3;
10005 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10006 } else if (hevc->wait_buf == 0) {
10007 u32 vui_time_scale;
10008 u32 vui_num_units_in_tick;
10009 unsigned char reconfig_flag = 0;
10010
10011 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10012 get_rpm_param(&hevc->param);
10013 else {
10014
10015 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10016 int ii;
10017
10018 for (ii = 0; ii < 4; ii++) {
10019 hevc->param.l.data[i + ii] =
10020 hevc->rpm_ptr[i + 3
10021 - ii];
10022 }
10023 }
10024#ifdef SEND_LMEM_WITH_RPM
10025 check_head_error(hevc);
10026#endif
10027 }
10028 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10029 hevc_print(hevc, 0,
10030 "rpm_param: (%d)\n", hevc->slice_idx);
10031 hevc->slice_idx++;
10032 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10033 hevc_print_cont(hevc, 0,
10034 "%04x ", hevc->param.l.data[i]);
10035 if (((i + 1) & 0xf) == 0)
10036 hevc_print_cont(hevc, 0, "\n");
10037 }
10038
10039 hevc_print(hevc, 0,
10040 "vui_timing_info: %x, %x, %x, %x\n",
10041 hevc->param.p.vui_num_units_in_tick_hi,
10042 hevc->param.p.vui_num_units_in_tick_lo,
10043 hevc->param.p.vui_time_scale_hi,
10044 hevc->param.p.vui_time_scale_lo);
10045 }
10046
10047 if (hevc->is_used_v4l) {
10048 struct aml_vcodec_ctx *ctx =
10049 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10050
10051 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10052 struct aml_vdec_ps_infos ps;
10053
10054 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10055 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10056 ps.visible_width = hevc->frame_width;
10057 ps.visible_height = hevc->frame_height;
10058 ps.coded_width = ALIGN(hevc->frame_width, 32);
10059 ps.coded_height = ALIGN(hevc->frame_height, 32);
10060 ps.dpb_size = get_work_pic_num(hevc);
10061 hevc->v4l_params_parsed = true;
10062 /*notice the v4l2 codec.*/
10063 vdec_v4l_set_ps_infos(ctx, &ps);
10064 }
10065 }
10066
10067 if (
10068#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10069 vdec->master == NULL &&
10070 vdec->slave == NULL &&
10071#endif
10072 aux_data_is_avaible(hevc)
10073 ) {
10074
10075 if (get_dbg_flag(hevc) &
10076 H265_DEBUG_BUFMGR_MORE)
10077 dump_aux_buf(hevc);
10078 }
10079
10080 vui_time_scale =
10081 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10082 hevc->param.p.vui_time_scale_lo;
10083 vui_num_units_in_tick =
10084 (u32)(hevc->param.
10085 p.vui_num_units_in_tick_hi << 16) |
10086 hevc->param.
10087 p.vui_num_units_in_tick_lo;
10088 if (hevc->bit_depth_luma !=
10089 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10090 reconfig_flag = 1;
10091 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10092 (hevc->param.p.bit_depth & 0xf) + 8);
10093 }
10094 if (hevc->bit_depth_chroma !=
10095 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10096 reconfig_flag = 1;
10097 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10098 ((hevc->param.p.bit_depth >> 4) &
10099 0xf) + 8);
10100 }
10101 hevc->bit_depth_luma =
10102 (hevc->param.p.bit_depth & 0xf) + 8;
10103 hevc->bit_depth_chroma =
10104 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10105 bit_depth_luma = hevc->bit_depth_luma;
10106 bit_depth_chroma = hevc->bit_depth_chroma;
10107#ifdef SUPPORT_10BIT
10108 if (hevc->bit_depth_luma == 8 &&
10109 hevc->bit_depth_chroma == 8 &&
10110 enable_mem_saving)
10111 hevc->mem_saving_mode = 1;
10112 else
10113 hevc->mem_saving_mode = 0;
10114#endif
10115 if (reconfig_flag &&
10116 (get_double_write_mode(hevc) & 0x10) == 0)
10117 init_decode_head_hw(hevc);
10118
10119 if ((vui_time_scale != 0)
10120 && (vui_num_units_in_tick != 0)) {
10121 hevc->frame_dur =
10122 div_u64(96000ULL *
10123 vui_num_units_in_tick,
10124 vui_time_scale);
10125 if (hevc->get_frame_dur != true)
10126 vdec_schedule_work(
10127 &hevc->notify_work);
10128
10129 hevc->get_frame_dur = true;
10130#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10131 gvs->frame_dur = hevc->frame_dur;
10132#endif
10133 }
10134
10135 if (hevc->video_signal_type !=
10136 ((hevc->param.p.video_signal_type << 16)
10137 | hevc->param.p.color_description)) {
10138 u32 v = hevc->param.p.video_signal_type;
10139 u32 c = hevc->param.p.color_description;
10140#if 0
10141 if (v & 0x2000) {
10142 hevc_print(hevc, 0,
10143 "video_signal_type present:\n");
10144 hevc_print(hevc, 0, " %s %s\n",
10145 video_format_names[(v >> 10) & 7],
10146 ((v >> 9) & 1) ?
10147 "full_range" : "limited");
10148 if (v & 0x100) {
10149 hevc_print(hevc, 0,
10150 " color_description present:\n");
10151 hevc_print(hevc, 0,
10152 " color_primarie = %s\n",
10153 color_primaries_names
10154 [v & 0xff]);
10155 hevc_print(hevc, 0,
10156 " transfer_characteristic = %s\n",
10157 transfer_characteristics_names
10158 [(c >> 8) & 0xff]);
10159 hevc_print(hevc, 0,
10160 " matrix_coefficient = %s\n",
10161 matrix_coeffs_names[c & 0xff]);
10162 }
10163 }
10164#endif
10165 hevc->video_signal_type = (v << 16) | c;
10166 video_signal_type = hevc->video_signal_type;
10167 }
10168
10169 if (use_cma &&
10170 (hevc->param.p.slice_segment_address == 0)
10171 && (hevc->pic_list_init_flag == 0)) {
10172 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10173 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10174
10175 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10176 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10177 hevc->lcu_size = 1 << (log + 3 + log_s);
10178 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10179 if (hevc->pic_w == 0 || hevc->pic_h == 0
10180 || hevc->lcu_size == 0
10181 || is_oversize(hevc->pic_w, hevc->pic_h)
10182 || (!hevc->skip_first_nal &&
10183 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10184 /* skip search next start code */
10185 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10186 & (~0x2));
10187 if ( !hevc->skip_first_nal &&
10188 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10189 hevc->skip_first_nal = 1;
10190 hevc->skip_flag = 1;
10191 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10192 /* Interrupt Amrisc to excute */
10193 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10194#ifdef MULTI_INSTANCE_SUPPORT
10195 if (hevc->m_ins_flag)
10196 start_process_time(hevc);
10197#endif
10198 } else {
10199 hevc->sps_num_reorder_pics_0 =
10200 hevc->param.p.sps_num_reorder_pics_0;
10201 hevc->pic_list_init_flag = 1;
10202#ifdef MULTI_INSTANCE_SUPPORT
10203 if (hevc->m_ins_flag) {
10204 vdec_schedule_work(&hevc->work);
10205 } else
10206#endif
10207 up(&h265_sema);
10208 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10209 }
10210 return IRQ_HANDLED;
10211 }
10212
10213}
10214 ret =
10215 hevc_slice_segment_header_process(hevc,
10216 &hevc->param, decode_pic_begin);
10217 if (ret < 0) {
10218#ifdef MULTI_INSTANCE_SUPPORT
10219 if (hevc->m_ins_flag) {
10220 hevc->wait_buf = 0;
10221 hevc->dec_result = DEC_RESULT_AGAIN;
10222 amhevc_stop();
10223 restore_decode_state(hevc);
10224 reset_process_time(hevc);
10225 vdec_schedule_work(&hevc->work);
10226 return IRQ_HANDLED;
10227 }
10228#else
10229 ;
10230#endif
10231 } else if (ret == 0) {
10232 if ((hevc->new_pic) && (hevc->cur_pic)) {
10233 hevc->cur_pic->stream_offset =
10234 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10235 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10236 "read stream_offset = 0x%x\n",
10237 hevc->cur_pic->stream_offset);
10238 hevc->cur_pic->aspect_ratio_idc =
10239 hevc->param.p.aspect_ratio_idc;
10240 hevc->cur_pic->sar_width =
10241 hevc->param.p.sar_width;
10242 hevc->cur_pic->sar_height =
10243 hevc->param.p.sar_height;
10244 }
10245
10246 WRITE_VREG(HEVC_DEC_STATUS_REG,
10247 HEVC_CODED_SLICE_SEGMENT_DAT);
10248 /* Interrupt Amrisc to excute */
10249 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10250
10251 hevc->start_decoding_time = jiffies;
10252#ifdef MULTI_INSTANCE_SUPPORT
10253 if (hevc->m_ins_flag)
10254 start_process_time(hevc);
10255#endif
10256#if 1
10257 /*to do..., copy aux data to hevc->cur_pic*/
10258#endif
10259#ifdef MULTI_INSTANCE_SUPPORT
10260 } else if (hevc->m_ins_flag) {
10261 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10262 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10263 __func__, ret);
10264 hevc->decoded_poc = INVALID_POC;
10265 hevc->decoding_pic = NULL;
10266 hevc->dec_result = DEC_RESULT_DONE;
10267 amhevc_stop();
10268 reset_process_time(hevc);
10269 vdec_schedule_work(&hevc->work);
10270#endif
10271 } else {
10272 /* skip, search next start code */
10273#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10274 gvs->drop_frame_count++;
10275#endif
10276 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10277 hevc->skip_flag = 1;
10278 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10279 /* Interrupt Amrisc to excute */
10280 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10281 }
10282
10283 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10284 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10285#ifdef MULTI_INSTANCE_SUPPORT
10286 if (!hevc->m_ins_flag)
10287 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10288 H265_DEBUG_DIS_SYS_ERROR_PROC);
10289#endif
10290 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10291 }
10292 return IRQ_HANDLED;
10293}
10294
10295static void wait_hevc_search_done(struct hevc_state_s *hevc)
10296{
10297 int count = 0;
10298 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10299 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10300 msleep(20);
10301 count++;
10302 if (count > 100) {
10303 hevc_print(hevc, 0, "%s timeout\n", __func__);
10304 break;
10305 }
10306 }
10307}
10308static irqreturn_t vh265_isr(int irq, void *data)
10309{
10310 int i, temp;
10311 unsigned int dec_status;
10312 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10313 u32 debug_tag;
10314 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10315
10316 if (hevc->init_flag == 0)
10317 return IRQ_HANDLED;
10318 hevc->dec_status = dec_status;
10319 if (is_log_enable(hevc))
10320 add_log(hevc,
10321 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10322 dec_status, READ_HREG(HEVC_DECODE_INFO),
10323 READ_VREG(HEVC_MPRED_CURR_LCU),
10324 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10325 READ_VREG(HEVC_SHIFT_STATUS));
10326
10327 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10328 hevc_print(hevc, 0,
10329 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10330 dec_status, READ_HREG(HEVC_DECODE_INFO),
10331 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10332 READ_VREG(HEVC_SHIFT_STATUS));
10333
10334 debug_tag = READ_HREG(DEBUG_REG1);
10335 if (debug_tag & 0x10000) {
10336 hevc_print(hevc, 0,
10337 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10338
10339 if (hevc->mmu_enable)
10340 temp = 0x500;
10341 else
10342 temp = 0x400;
10343 for (i = 0; i < temp; i += 4) {
10344 int ii;
10345 if ((i & 0xf) == 0)
10346 hevc_print_cont(hevc, 0, "%03x: ", i);
10347 for (ii = 0; ii < 4; ii++) {
10348 hevc_print_cont(hevc, 0, "%04x ",
10349 hevc->lmem_ptr[i + 3 - ii]);
10350 }
10351 if (((i + ii) & 0xf) == 0)
10352 hevc_print_cont(hevc, 0, "\n");
10353 }
10354
10355 if (((udebug_pause_pos & 0xffff)
10356 == (debug_tag & 0xffff)) &&
10357 (udebug_pause_decode_idx == 0 ||
10358 udebug_pause_decode_idx == hevc->decode_idx) &&
10359 (udebug_pause_val == 0 ||
10360 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10361 udebug_pause_pos &= 0xffff;
10362 hevc->ucode_pause_pos = udebug_pause_pos;
10363 }
10364 else if (debug_tag & 0x20000)
10365 hevc->ucode_pause_pos = 0xffffffff;
10366 if (hevc->ucode_pause_pos)
10367 reset_process_time(hevc);
10368 else
10369 WRITE_HREG(DEBUG_REG1, 0);
10370 } else if (debug_tag != 0) {
10371 hevc_print(hevc, 0,
10372 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10373 READ_HREG(DEBUG_REG2),
10374 READ_VREG(HEVC_STREAM_LEVEL),
10375 READ_VREG(HEVC_STREAM_WR_PTR),
10376 READ_VREG(HEVC_STREAM_RD_PTR));
10377 if (((udebug_pause_pos & 0xffff)
10378 == (debug_tag & 0xffff)) &&
10379 (udebug_pause_decode_idx == 0 ||
10380 udebug_pause_decode_idx == hevc->decode_idx) &&
10381 (udebug_pause_val == 0 ||
10382 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10383 udebug_pause_pos &= 0xffff;
10384 hevc->ucode_pause_pos = udebug_pause_pos;
10385 }
10386 if (hevc->ucode_pause_pos)
10387 reset_process_time(hevc);
10388 else
10389 WRITE_HREG(DEBUG_REG1, 0);
10390 return IRQ_HANDLED;
10391 }
10392
10393
10394 if (hevc->pic_list_init_flag == 1)
10395 return IRQ_HANDLED;
10396
10397 if (!hevc->m_ins_flag) {
10398 if (dec_status == HEVC_OVER_DECODE) {
10399 hevc->over_decode = 1;
10400 hevc_print(hevc, 0,
10401 "isr: over decode\n"),
10402 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10403 return IRQ_HANDLED;
10404 }
10405 }
10406
10407 return IRQ_WAKE_THREAD;
10408
10409}
10410
10411static void vh265_set_clk(struct work_struct *work)
10412{
10413 struct hevc_state_s *hevc = container_of(work,
10414 struct hevc_state_s, set_clk_work);
10415
10416 int fps = 96000 / hevc->frame_dur;
10417
10418 if (hevc_source_changed(VFORMAT_HEVC,
10419 hevc->frame_width, hevc->frame_height, fps) > 0)
10420 hevc->saved_resolution = hevc->frame_width *
10421 hevc->frame_height * fps;
10422}
10423
10424static void vh265_check_timer_func(unsigned long arg)
10425{
10426 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10427 struct timer_list *timer = &hevc->timer;
10428 unsigned char empty_flag;
10429 unsigned int buf_level;
10430
10431 enum receviver_start_e state = RECEIVER_INACTIVE;
10432
10433 if (hevc->init_flag == 0) {
10434 if (hevc->stat & STAT_TIMER_ARM) {
10435 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10436 }
10437 return;
10438 }
10439#ifdef MULTI_INSTANCE_SUPPORT
10440 if (hevc->m_ins_flag &&
10441 (get_dbg_flag(hevc) &
10442 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10443 hw_to_vdec(hevc)->next_status ==
10444 VDEC_STATUS_DISCONNECTED) {
10445 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10446 vdec_schedule_work(&hevc->work);
10447 hevc_print(hevc,
10448 0, "vdec requested to be disconnected\n");
10449 return;
10450 }
10451
10452 if (hevc->m_ins_flag) {
10453 if ((input_frame_based(hw_to_vdec(hevc)) ||
10454 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10455 ((get_dbg_flag(hevc) &
10456 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10457 (decode_timeout_val > 0) &&
10458 (hevc->start_process_time > 0) &&
10459 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10460 > decode_timeout_val)
10461 ) {
10462 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10463 int current_lcu_idx =
10464 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10465 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10466 if (hevc->last_lcu_idx == current_lcu_idx) {
10467 if (hevc->decode_timeout_count > 0)
10468 hevc->decode_timeout_count--;
10469 if (hevc->decode_timeout_count == 0)
10470 timeout_process(hevc);
10471 } else
10472 restart_process_time(hevc);
10473 hevc->last_lcu_idx = current_lcu_idx;
10474 } else {
10475 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10476 timeout_process(hevc);
10477 }
10478 }
10479 } else {
10480#endif
10481 if (hevc->m_ins_flag == 0 &&
10482 vf_get_receiver(hevc->provider_name)) {
10483 state =
10484 vf_notify_receiver(hevc->provider_name,
10485 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10486 NULL);
10487 if ((state == RECEIVER_STATE_NULL)
10488 || (state == RECEIVER_STATE_NONE))
10489 state = RECEIVER_INACTIVE;
10490 } else
10491 state = RECEIVER_INACTIVE;
10492
10493 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10494 /* error watchdog */
10495 if (hevc->m_ins_flag == 0 &&
10496 (empty_flag == 0)
10497 && (hevc->pic_list_init_flag == 0
10498 || hevc->pic_list_init_flag
10499 == 3)) {
10500 /* decoder has input */
10501 if ((get_dbg_flag(hevc) &
10502 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10503
10504 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10505 /* receiver has no buffer to recycle */
10506 if ((state == RECEIVER_INACTIVE) &&
10507 (kfifo_is_empty(&hevc->display_q) &&
10508 buf_level > 0x200)
10509 ) {
10510 if (hevc->error_flag == 0) {
10511 hevc->error_watchdog_count++;
10512 if (hevc->error_watchdog_count ==
10513 error_handle_threshold) {
10514 hevc_print(hevc, 0,
10515 "H265 dec err local reset.\n");
10516 hevc->error_flag = 1;
10517 hevc->error_watchdog_count = 0;
10518 hevc->error_skip_nal_wt_cnt = 0;
10519 hevc->
10520 error_system_watchdog_count++;
10521 WRITE_VREG
10522 (HEVC_ASSIST_MBOX0_IRQ_REG,
10523 0x1);
10524 }
10525 } else if (hevc->error_flag == 2) {
10526 int th =
10527 error_handle_nal_skip_threshold;
10528 hevc->error_skip_nal_wt_cnt++;
10529 if (hevc->error_skip_nal_wt_cnt
10530 == th) {
10531 hevc->error_flag = 3;
10532 hevc->error_watchdog_count = 0;
10533 hevc->
10534 error_skip_nal_wt_cnt = 0;
10535 WRITE_VREG
10536 (HEVC_ASSIST_MBOX0_IRQ_REG,
10537 0x1);
10538 }
10539 }
10540 }
10541 }
10542
10543 if ((get_dbg_flag(hevc)
10544 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10545 /* receiver has no buffer to recycle */
10546 if ((state == RECEIVER_INACTIVE) &&
10547 (kfifo_is_empty(&hevc->display_q))
10548 ) { /* no buffer to recycle */
10549 if ((get_dbg_flag(hevc) &
10550 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10551 0)
10552 hevc->error_system_watchdog_count++;
10553 if (hevc->error_system_watchdog_count ==
10554 error_handle_system_threshold) {
10555 /* and it lasts for a while */
10556 hevc_print(hevc, 0,
10557 "H265 dec fatal error watchdog.\n");
10558 hevc->
10559 error_system_watchdog_count = 0;
10560 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10561 }
10562 }
10563 } else {
10564 hevc->error_watchdog_count = 0;
10565 hevc->error_system_watchdog_count = 0;
10566 }
10567#ifdef MULTI_INSTANCE_SUPPORT
10568 }
10569#endif
10570 if ((hevc->ucode_pause_pos != 0) &&
10571 (hevc->ucode_pause_pos != 0xffffffff) &&
10572 udebug_pause_pos != hevc->ucode_pause_pos) {
10573 hevc->ucode_pause_pos = 0;
10574 WRITE_HREG(DEBUG_REG1, 0);
10575 }
10576
10577 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10578 dump_pic_list(hevc);
10579 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10580 }
10581 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10582 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10583 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10584 }
10585#ifdef TEST_NO_BUF
10586 if (hevc->wait_buf)
10587 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10588#endif
10589 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10590 hevc->error_skip_nal_count = error_skip_nal_count;
10591 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10592
10593 debug &= ~H265_DEBUG_HW_RESET;
10594 }
10595
10596#ifdef ERROR_HANDLE_DEBUG
10597 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10598 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10599 dbg_nal_skip_count &= ~0x10000;
10600 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10601 }
10602#endif
10603
10604 if (radr != 0) {
10605 if (rval != 0) {
10606 WRITE_VREG(radr, rval);
10607 hevc_print(hevc, 0,
10608 "WRITE_VREG(%x,%x)\n", radr, rval);
10609 } else
10610 hevc_print(hevc, 0,
10611 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10612 rval = 0;
10613 radr = 0;
10614 }
10615 if (dbg_cmd != 0) {
10616 if (dbg_cmd == 1) {
10617 u32 disp_laddr;
10618
10619 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10620 get_double_write_mode(hevc) == 0) {
10621 disp_laddr =
10622 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10623 } else {
10624 struct canvas_s cur_canvas;
10625
10626 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10627 & 0xff), &cur_canvas);
10628 disp_laddr = cur_canvas.addr;
10629 }
10630 hevc_print(hevc, 0,
10631 "current displayed buffer address %x\r\n",
10632 disp_laddr);
10633 }
10634 dbg_cmd = 0;
10635 }
10636 /*don't changed at start.*/
10637 if (hevc->m_ins_flag == 0 &&
10638 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10639 hevc->frame_dur > 0 && hevc->saved_resolution !=
10640 hevc->frame_width * hevc->frame_height *
10641 (96000 / hevc->frame_dur))
10642 vdec_schedule_work(&hevc->set_clk_work);
10643
10644 mod_timer(timer, jiffies + PUT_INTERVAL);
10645}
10646
10647static int h265_task_handle(void *data)
10648{
10649 int ret = 0;
10650 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10651
10652 set_user_nice(current, -10);
10653 while (1) {
10654 if (use_cma == 0) {
10655 hevc_print(hevc, 0,
10656 "ERROR: use_cma can not be changed dynamically\n");
10657 }
10658 ret = down_interruptible(&h265_sema);
10659 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10660 init_pic_list(hevc);
10661 init_pic_list_hw(hevc);
10662 init_buf_spec(hevc);
10663 hevc->pic_list_init_flag = 2;
10664 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10665
10666 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10667
10668 }
10669
10670 if (hevc->uninit_list) {
10671 /*USE_BUF_BLOCK*/
10672 uninit_pic_list(hevc);
10673 hevc_print(hevc, 0, "uninit list\n");
10674 hevc->uninit_list = 0;
10675#ifdef USE_UNINIT_SEMA
10676 if (use_cma) {
10677 up(&hevc->h265_uninit_done_sema);
10678 while (!kthread_should_stop())
10679 msleep(1);
10680 break;
10681 }
10682#endif
10683 }
10684 }
10685
10686 return 0;
10687}
10688
10689void vh265_free_cmabuf(void)
10690{
10691 struct hevc_state_s *hevc = gHevc;
10692
10693 mutex_lock(&vh265_mutex);
10694
10695 if (hevc->init_flag) {
10696 mutex_unlock(&vh265_mutex);
10697 return;
10698 }
10699
10700 mutex_unlock(&vh265_mutex);
10701}
10702
10703#ifdef MULTI_INSTANCE_SUPPORT
10704int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10705#else
10706int vh265_dec_status(struct vdec_info *vstatus)
10707#endif
10708{
10709#ifdef MULTI_INSTANCE_SUPPORT
10710 struct hevc_state_s *hevc =
10711 (struct hevc_state_s *)vdec->private;
10712#else
10713 struct hevc_state_s *hevc = gHevc;
10714#endif
10715 if (!hevc)
10716 return -1;
10717
10718 vstatus->frame_width = hevc->frame_width;
10719 vstatus->frame_height = hevc->frame_height;
10720 if (hevc->frame_dur != 0)
10721 vstatus->frame_rate = 96000 / hevc->frame_dur;
10722 else
10723 vstatus->frame_rate = -1;
10724 vstatus->error_count = 0;
10725 vstatus->status = hevc->stat | hevc->fatal_error;
10726#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10727 vstatus->bit_rate = gvs->bit_rate;
10728 vstatus->frame_dur = hevc->frame_dur;
10729 if (gvs) {
10730 vstatus->bit_rate = gvs->bit_rate;
10731 vstatus->frame_data = gvs->frame_data;
10732 vstatus->total_data = gvs->total_data;
10733 vstatus->frame_count = gvs->frame_count;
10734 vstatus->error_frame_count = gvs->error_frame_count;
10735 vstatus->drop_frame_count = gvs->drop_frame_count;
10736 vstatus->total_data = gvs->total_data;
10737 vstatus->samp_cnt = gvs->samp_cnt;
10738 vstatus->offset = gvs->offset;
10739 }
10740 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10741 "%s", DRIVER_NAME);
10742#endif
10743 vstatus->ratio_control = hevc->ratio_control;
10744 return 0;
10745}
10746
10747int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10748{
10749 is_reset = isreset;
10750 return 0;
10751}
10752
10753static int vh265_vdec_info_init(void)
10754{
10755 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10756 if (NULL == gvs) {
10757 pr_info("the struct of vdec status malloc failed.\n");
10758 return -ENOMEM;
10759 }
10760 return 0;
10761}
10762
10763#if 0
10764static void H265_DECODE_INIT(void)
10765{
10766 /* enable hevc clocks */
10767 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10768 /* *************************************************************** */
10769 /* Power ON HEVC */
10770 /* *************************************************************** */
10771 /* Powerup HEVC */
10772 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10773 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10774 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10775 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10776 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10777 /* remove isolations */
10778 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10779 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10780
10781}
10782#endif
10783
10784static void config_decode_mode(struct hevc_state_s *hevc)
10785{
10786#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10787 struct vdec_s *vdec = hw_to_vdec(hevc);
10788#endif
10789 unsigned decode_mode;
10790 if (!hevc->m_ins_flag)
10791 decode_mode = DECODE_MODE_SINGLE;
10792 else if (vdec_frame_based(hw_to_vdec(hevc)))
10793 decode_mode =
10794 DECODE_MODE_MULTI_FRAMEBASE;
10795#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10796 else if (vdec->slave) {
10797 if (force_bypass_dvenl & 0x80000000)
10798 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10799 else
10800 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10801 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10802 hevc->bypass_dvenl = 0;
10803 hevc_print(hevc, 0,
10804 "NOT support bypass_dvenl when meta_with_el\n");
10805 }
10806 if (hevc->bypass_dvenl)
10807 decode_mode =
10808 (hevc->start_parser_type << 8)
10809 | DECODE_MODE_MULTI_STREAMBASE;
10810 else
10811 decode_mode =
10812 (hevc->start_parser_type << 8)
10813 | DECODE_MODE_MULTI_DVBAL;
10814 } else if (vdec->master)
10815 decode_mode =
10816 (hevc->start_parser_type << 8)
10817 | DECODE_MODE_MULTI_DVENL;
10818#endif
10819 else
10820 decode_mode =
10821 DECODE_MODE_MULTI_STREAMBASE;
10822
10823 if (hevc->m_ins_flag)
10824 decode_mode |=
10825 (hevc->start_decoding_flag << 16);
10826 /* set MBX0 interrupt flag */
10827 decode_mode |= (0x80 << 24);
10828 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10829 WRITE_VREG(HEVC_DECODE_MODE2,
10830 hevc->rps_set_id);
10831}
10832
10833static void vh265_prot_init(struct hevc_state_s *hevc)
10834{
10835#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10836 struct vdec_s *vdec = hw_to_vdec(hevc);
10837#endif
10838 /* H265_DECODE_INIT(); */
10839
10840 hevc_config_work_space_hw(hevc);
10841
10842 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10843
10844 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10845
10846 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10847
10848 /* clear mailbox interrupt */
10849 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10850
10851 /* enable mailbox interrupt */
10852 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10853
10854 /* disable PSCALE for hardware sharing */
10855 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10856
10857 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10858
10859 if ((get_dbg_flag(hevc) &
10860 (H265_DEBUG_MAN_SKIP_NAL |
10861 H265_DEBUG_MAN_SEARCH_NAL))
10862 /*||hevc->m_ins_flag*/
10863 ) {
10864 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10865 } else {
10866 /* check vps/sps/pps/i-slice in ucode */
10867 unsigned ctl_val = 0x8;
10868 if (hevc->PB_skip_mode == 0)
10869 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10870 else if (hevc->PB_skip_mode == 3)
10871 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10872 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10873 }
10874 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10875#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10876 || vdec->master
10877 || vdec->slave
10878#endif
10879 )
10880 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10881
10882 WRITE_VREG(NAL_SEARCH_CTL,
10883 READ_VREG(NAL_SEARCH_CTL)
10884 | ((parser_sei_enable & 0x7) << 17));
10885#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10886 WRITE_VREG(NAL_SEARCH_CTL,
10887 READ_VREG(NAL_SEARCH_CTL) |
10888 ((parser_dolby_vision_enable & 0x1) << 20));
10889#endif
10890 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10891
10892 config_decode_mode(hevc);
10893 config_aux_buf(hevc);
10894#ifdef SWAP_HEVC_UCODE
10895 if (!tee_enabled() && hevc->is_swap &&
10896 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10897 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10898 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10899 }
10900#endif
10901#ifdef DETREFILL_ENABLE
10902 if (hevc->is_swap &&
10903 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10904 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10905 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10906 }
10907#endif
10908}
10909
10910static int vh265_local_init(struct hevc_state_s *hevc)
10911{
10912 int i;
10913 int ret = -1;
10914
10915#ifdef DEBUG_PTS
10916 hevc->pts_missed = 0;
10917 hevc->pts_hit = 0;
10918#endif
10919
10920 hevc->saved_resolution = 0;
10921 hevc->get_frame_dur = false;
10922 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10923 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10924 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10925 pr_info("over size : %u x %u.\n",
10926 hevc->frame_width, hevc->frame_height);
10927 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10928 return ret;
10929 }
10930
10931 if (hevc->max_pic_w && hevc->max_pic_h) {
10932 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10933 ((hevc->max_pic_w * hevc->max_pic_h) >
10934 1920 * 1088) ? true : false;
10935 } else {
10936 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10937 ((hevc->frame_width * hevc->frame_height) >
10938 1920 * 1088) ? true : false;
10939 }
10940
10941 hevc->frame_dur =
10942 (hevc->vh265_amstream_dec_info.rate ==
10943 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10944#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10945 gvs->frame_dur = hevc->frame_dur;
10946#endif
10947 if (hevc->frame_width && hevc->frame_height)
10948 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10949
10950 if (i_only_flag)
10951 hevc->i_only = i_only_flag & 0xff;
10952 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10953 & 0x08)
10954 hevc->i_only = 0x7;
10955 else
10956 hevc->i_only = 0x0;
10957 hevc->error_watchdog_count = 0;
10958 hevc->sei_present_flag = 0;
10959 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10960 & 0x40) >> 6;
10961 hevc_print(hevc, 0,
10962 "h265:pts_unstable=%d\n", pts_unstable);
10963/*
10964 *TODO:FOR VERSION
10965 */
10966 hevc_print(hevc, 0,
10967 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10968 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10969
10970 if (hevc->frame_dur == 0)
10971 hevc->frame_dur = 96000 / 24;
10972
10973 INIT_KFIFO(hevc->display_q);
10974 INIT_KFIFO(hevc->newframe_q);
10975 INIT_KFIFO(hevc->pending_q);
10976
10977 for (i = 0; i < VF_POOL_SIZE; i++) {
10978 const struct vframe_s *vf = &hevc->vfpool[i];
10979
10980 hevc->vfpool[i].index = -1;
10981 kfifo_put(&hevc->newframe_q, vf);
10982 }
10983
10984
10985 ret = hevc_local_init(hevc);
10986
10987 return ret;
10988}
10989#ifdef MULTI_INSTANCE_SUPPORT
10990static s32 vh265_init(struct vdec_s *vdec)
10991{
10992 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10993#else
10994static s32 vh265_init(struct hevc_state_s *hevc)
10995{
10996
10997#endif
10998 int ret, size = -1;
10999 int fw_size = 0x1000 * 16;
11000 struct firmware_s *fw = NULL;
11001
11002 init_timer(&hevc->timer);
11003
11004 hevc->stat |= STAT_TIMER_INIT;
11005
11006 if (hevc->m_ins_flag) {
11007#ifdef USE_UNINIT_SEMA
11008 sema_init(&hevc->h265_uninit_done_sema, 0);
11009#endif
11010 INIT_WORK(&hevc->work, vh265_work);
11011 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11012 }
11013
11014 if (vh265_local_init(hevc) < 0)
11015 return -EBUSY;
11016
11017 mutex_init(&hevc->chunks_mutex);
11018 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11019 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11020
11021 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11022 if (IS_ERR_OR_NULL(fw))
11023 return -ENOMEM;
11024
11025 if (hevc->mmu_enable)
11026 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11027 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11028 else {
11029 if (!hevc->is_4k) {
11030 /* if an older version of the fw was loaded, */
11031 /* needs try to load noswap fw because the */
11032 /* old fw package dose not contain the swap fw.*/
11033 size = get_firmware_data(
11034 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11035 if (size < 0)
11036 size = get_firmware_data(
11037 VIDEO_DEC_HEVC_MMU, fw->data);
11038 else if (size)
11039 hevc->is_swap = true;
11040 } else
11041 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11042 fw->data);
11043 }
11044 else
11045 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11046
11047 if (size < 0) {
11048 pr_err("get firmware fail.\n");
11049 vfree(fw);
11050 return -1;
11051 }
11052
11053 fw->len = size;
11054
11055#ifdef SWAP_HEVC_UCODE
11056 if (!tee_enabled() && hevc->is_swap &&
11057 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11058 if (hevc->mmu_enable) {
11059 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11060 hevc->mc_cpu_addr =
11061 dma_alloc_coherent(amports_get_dma_device(),
11062 hevc->swap_size,
11063 &hevc->mc_dma_handle, GFP_KERNEL);
11064 if (!hevc->mc_cpu_addr) {
11065 amhevc_disable();
11066 pr_info("vh265 mmu swap ucode loaded fail.\n");
11067 return -ENOMEM;
11068 }
11069
11070 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11071 hevc->swap_size);
11072
11073 hevc_print(hevc, 0,
11074 "vh265 mmu ucode swap loaded %x\n",
11075 hevc->mc_dma_handle);
11076 }
11077 }
11078#endif
11079
11080#ifdef MULTI_INSTANCE_SUPPORT
11081 if (hevc->m_ins_flag) {
11082 hevc->timer.data = (ulong) hevc;
11083 hevc->timer.function = vh265_check_timer_func;
11084 hevc->timer.expires = jiffies + PUT_INTERVAL;
11085
11086 hevc->fw = fw;
11087
11088 return 0;
11089 }
11090#endif
11091 amhevc_enable();
11092
11093 if (hevc->mmu_enable)
11094 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11095 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11096 else {
11097 if (!hevc->is_4k) {
11098 /* if an older version of the fw was loaded, */
11099 /* needs try to load noswap fw because the */
11100 /* old fw package dose not contain the swap fw. */
11101 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11102 "hevc_mmu_swap", fw->data);
11103 if (ret < 0)
11104 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11105 "h265_mmu", fw->data);
11106 else
11107 hevc->is_swap = true;
11108 } else
11109 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11110 "h265_mmu", fw->data);
11111 }
11112 else
11113 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11114
11115 if (ret < 0) {
11116 amhevc_disable();
11117 vfree(fw);
11118 pr_err("H265: the %s fw loading failed, err: %x\n",
11119 tee_enabled() ? "TEE" : "local", ret);
11120 return -EBUSY;
11121 }
11122
11123 vfree(fw);
11124
11125 hevc->stat |= STAT_MC_LOAD;
11126
11127#ifdef DETREFILL_ENABLE
11128 if (hevc->is_swap &&
11129 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11130 init_detrefill_buf(hevc);
11131#endif
11132 /* enable AMRISC side protocol */
11133 vh265_prot_init(hevc);
11134
11135 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11136 vh265_isr_thread_fn,
11137 IRQF_ONESHOT,/*run thread on this irq disabled*/
11138 "vh265-irq", (void *)hevc)) {
11139 hevc_print(hevc, 0, "vh265 irq register error.\n");
11140 amhevc_disable();
11141 return -ENOENT;
11142 }
11143
11144 hevc->stat |= STAT_ISR_REG;
11145 hevc->provider_name = PROVIDER_NAME;
11146
11147#ifdef MULTI_INSTANCE_SUPPORT
11148 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11149 &vh265_vf_provider, vdec);
11150 vf_reg_provider(&vh265_vf_prov);
11151 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11152 NULL);
11153 if (hevc->frame_dur != 0) {
11154 if (!is_reset) {
11155 vf_notify_receiver(hevc->provider_name,
11156 VFRAME_EVENT_PROVIDER_FR_HINT,
11157 (void *)
11158 ((unsigned long)hevc->frame_dur));
11159 fr_hint_status = VDEC_HINTED;
11160 }
11161 } else
11162 fr_hint_status = VDEC_NEED_HINT;
11163#else
11164 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11165 hevc);
11166 vf_reg_provider(&vh265_vf_prov);
11167 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11168 if (hevc->frame_dur != 0) {
11169 vf_notify_receiver(PROVIDER_NAME,
11170 VFRAME_EVENT_PROVIDER_FR_HINT,
11171 (void *)
11172 ((unsigned long)hevc->frame_dur));
11173 fr_hint_status = VDEC_HINTED;
11174 } else
11175 fr_hint_status = VDEC_NEED_HINT;
11176#endif
11177 hevc->stat |= STAT_VF_HOOK;
11178
11179 hevc->timer.data = (ulong) hevc;
11180 hevc->timer.function = vh265_check_timer_func;
11181 hevc->timer.expires = jiffies + PUT_INTERVAL;
11182
11183 add_timer(&hevc->timer);
11184
11185 hevc->stat |= STAT_TIMER_ARM;
11186
11187 if (use_cma) {
11188#ifdef USE_UNINIT_SEMA
11189 sema_init(&hevc->h265_uninit_done_sema, 0);
11190#endif
11191 if (h265_task == NULL) {
11192 sema_init(&h265_sema, 1);
11193 h265_task =
11194 kthread_run(h265_task_handle, hevc,
11195 "kthread_h265");
11196 }
11197 }
11198 /* hevc->stat |= STAT_KTHREAD; */
11199#if 0
11200 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11201 hevc_print(hevc, 0, "%s force clk\n", __func__);
11202 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11203 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11204 ((1 << 2) | (1 << 1)));
11205 WRITE_VREG(HEVC_DBLK_CFG0,
11206 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11207 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11208 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11209 (1 << 2)); /* 2 */
11210 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11211 (1 << 24)); /* 24 */
11212 WRITE_VREG(HEVC_STREAM_CONTROL,
11213 READ_VREG(HEVC_STREAM_CONTROL) |
11214 (1 << 15)); /* 15 */
11215 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11216 (1 << 13)); /* 13 */
11217 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11218 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11219 (1 << 15)); /* 15 */
11220 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11221 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11222 (1 << 15)); /* 15 */
11223 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11224 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11225 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11226 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11227 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11228 (1 << 3)); /* 3 */
11229 }
11230#endif
11231#ifdef SWAP_HEVC_UCODE
11232 if (!tee_enabled() && hevc->is_swap &&
11233 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11234 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11235 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11236 }
11237#endif
11238
11239#ifndef MULTI_INSTANCE_SUPPORT
11240 set_vdec_func(&vh265_dec_status);
11241#endif
11242 amhevc_start();
11243 hevc->stat |= STAT_VDEC_RUN;
11244 hevc->init_flag = 1;
11245 error_handle_threshold = 30;
11246 /* pr_info("%d, vh265_init, RP=0x%x\n",
11247 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11248 */
11249
11250 return 0;
11251}
11252
11253static int vh265_stop(struct hevc_state_s *hevc)
11254{
11255 if (get_dbg_flag(hevc) &
11256 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11257 int wait_timeout_count = 0;
11258
11259 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11260 HEVC_CODED_SLICE_SEGMENT_DAT &&
11261 wait_timeout_count < 10){
11262 wait_timeout_count++;
11263 msleep(20);
11264 }
11265 }
11266 if (hevc->stat & STAT_VDEC_RUN) {
11267 amhevc_stop();
11268 hevc->stat &= ~STAT_VDEC_RUN;
11269 }
11270
11271 if (hevc->stat & STAT_ISR_REG) {
11272#ifdef MULTI_INSTANCE_SUPPORT
11273 if (!hevc->m_ins_flag)
11274#endif
11275 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11276 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11277 hevc->stat &= ~STAT_ISR_REG;
11278 }
11279
11280 hevc->stat &= ~STAT_TIMER_INIT;
11281 if (hevc->stat & STAT_TIMER_ARM) {
11282 del_timer_sync(&hevc->timer);
11283 hevc->stat &= ~STAT_TIMER_ARM;
11284 }
11285
11286 if (hevc->stat & STAT_VF_HOOK) {
11287 if (fr_hint_status == VDEC_HINTED) {
11288 vf_notify_receiver(hevc->provider_name,
11289 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11290 NULL);
11291 }
11292 fr_hint_status = VDEC_NO_NEED_HINT;
11293 vf_unreg_provider(&vh265_vf_prov);
11294 hevc->stat &= ~STAT_VF_HOOK;
11295 }
11296
11297 hevc_local_uninit(hevc);
11298
11299 if (use_cma) {
11300 hevc->uninit_list = 1;
11301 up(&h265_sema);
11302#ifdef USE_UNINIT_SEMA
11303 down(&hevc->h265_uninit_done_sema);
11304 if (!IS_ERR(h265_task)) {
11305 kthread_stop(h265_task);
11306 h265_task = NULL;
11307 }
11308#else
11309 while (hevc->uninit_list) /* wait uninit complete */
11310 msleep(20);
11311#endif
11312
11313 }
11314 hevc->init_flag = 0;
11315 hevc->first_sc_checked = 0;
11316 cancel_work_sync(&hevc->notify_work);
11317 cancel_work_sync(&hevc->set_clk_work);
11318 uninit_mmu_buffers(hevc);
11319 amhevc_disable();
11320
11321 kfree(gvs);
11322 gvs = NULL;
11323
11324 return 0;
11325}
11326
11327#ifdef MULTI_INSTANCE_SUPPORT
11328static void reset_process_time(struct hevc_state_s *hevc)
11329{
11330 if (hevc->start_process_time) {
11331 unsigned int process_time =
11332 1000 * (jiffies - hevc->start_process_time) / HZ;
11333 hevc->start_process_time = 0;
11334 if (process_time > max_process_time[hevc->index])
11335 max_process_time[hevc->index] = process_time;
11336 }
11337}
11338
11339static void start_process_time(struct hevc_state_s *hevc)
11340{
11341 hevc->start_process_time = jiffies;
11342 hevc->decode_timeout_count = 2;
11343 hevc->last_lcu_idx = 0;
11344}
11345
11346static void restart_process_time(struct hevc_state_s *hevc)
11347{
11348 hevc->start_process_time = jiffies;
11349 hevc->decode_timeout_count = 2;
11350}
11351
11352static void timeout_process(struct hevc_state_s *hevc)
11353{
11354 /*
11355 * In this very timeout point,the vh265_work arrives,
11356 * let it to handle the scenario.
11357 */
11358 if (work_pending(&hevc->work))
11359 return;
11360
11361 hevc->timeout_num++;
11362 amhevc_stop();
11363 read_decode_info(hevc);
11364
11365 hevc_print(hevc,
11366 0, "%s decoder timeout\n", __func__);
11367 check_pic_decoded_error(hevc,
11368 hevc->pic_decoded_lcu_idx);
11369 hevc->decoded_poc = hevc->curr_POC;
11370 hevc->decoding_pic = NULL;
11371 hevc->dec_result = DEC_RESULT_DONE;
11372 reset_process_time(hevc);
11373
11374 if (work_pending(&hevc->work))
11375 return;
11376 vdec_schedule_work(&hevc->timeout_work);
11377}
11378
11379#ifdef CONSTRAIN_MAX_BUF_NUM
11380static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11381{
11382 struct PIC_s *pic;
11383 int i;
11384 int count = 0;
11385 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11386 pic = hevc->m_PIC[i];
11387 if (pic == NULL || pic->index == -1)
11388 continue;
11389 if (pic->output_mark == 0 && pic->referenced == 0
11390 && pic->output_ready == 1)
11391 count++;
11392 }
11393
11394 return count;
11395}
11396
11397static int get_used_buf_count(struct hevc_state_s *hevc)
11398{
11399 struct PIC_s *pic;
11400 int i;
11401 int count = 0;
11402 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11403 pic = hevc->m_PIC[i];
11404 if (pic == NULL || pic->index == -1)
11405 continue;
11406 if (pic->output_mark != 0 || pic->referenced != 0
11407 || pic->output_ready != 0)
11408 count++;
11409 }
11410
11411 return count;
11412}
11413#endif
11414
11415
11416static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11417{
11418 struct PIC_s *new_pic = NULL;
11419 struct PIC_s *pic;
11420 /* recycle un-used pic */
11421 int i;
11422 int ref_pic = 0;
11423 struct vdec_s *vdec = hw_to_vdec(hevc);
11424 /*return 1 if pic_list is not initialized yet*/
11425 if (hevc->pic_list_init_flag != 3)
11426 return 1;
11427
11428 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11429 pic = hevc->m_PIC[i];
11430 if (pic == NULL || pic->index == -1)
11431 continue;
11432 if (pic->referenced == 1)
11433 ref_pic++;
11434 if (pic->output_mark == 0 && pic->referenced == 0
11435 && pic->output_ready == 0
11436 ) {
11437 if (new_pic) {
11438 if (pic->POC < new_pic->POC)
11439 new_pic = pic;
11440 } else
11441 new_pic = pic;
11442 }
11443 }
11444/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11445/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11446/*decode. all reference frames are removed and setting error flag.*/
11447/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11448/*when file is interlace.*/
11449 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11450 (ref_pic >=
11451 get_work_pic_num(hevc) -
11452 hevc->sps_num_reorder_pics_0 - 3)) {
11453 enum receviver_start_e state = RECEIVER_INACTIVE;
11454 if (vf_get_receiver(vdec->vf_provider_name)) {
11455 state =
11456 vf_notify_receiver(vdec->vf_provider_name,
11457 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11458 NULL);
11459 if ((state == RECEIVER_STATE_NULL)
11460 || (state == RECEIVER_STATE_NONE))
11461 state = RECEIVER_INACTIVE;
11462 }
11463 if (state == RECEIVER_INACTIVE) {
11464 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11465 pic = hevc->m_PIC[i];
11466 if (pic == NULL || pic->index == -1)
11467 continue;
11468
11469 if ((pic->referenced == 1) &&
11470 (pic->error_mark == 1)) {
11471 pic->referenced = 0;
11472 put_mv_buf(hevc, pic);
11473 }
11474 pic->error_mark = 1;
11475 }
11476 }
11477 }
11478
11479 return (new_pic != NULL) ? 1 : 0;
11480}
11481
11482static int vmh265_stop(struct hevc_state_s *hevc)
11483{
11484 if (hevc->stat & STAT_TIMER_ARM) {
11485 del_timer_sync(&hevc->timer);
11486 hevc->stat &= ~STAT_TIMER_ARM;
11487 }
11488 if (hevc->stat & STAT_VDEC_RUN) {
11489 amhevc_stop();
11490 hevc->stat &= ~STAT_VDEC_RUN;
11491 }
11492 if (hevc->stat & STAT_ISR_REG) {
11493 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11494 hevc->stat &= ~STAT_ISR_REG;
11495 }
11496
11497 if (hevc->stat & STAT_VF_HOOK) {
11498 if (fr_hint_status == VDEC_HINTED)
11499 vf_notify_receiver(hevc->provider_name,
11500 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11501 NULL);
11502 fr_hint_status = VDEC_NO_NEED_HINT;
11503 vf_unreg_provider(&vh265_vf_prov);
11504 hevc->stat &= ~STAT_VF_HOOK;
11505 }
11506
11507 hevc_local_uninit(hevc);
11508
11509 hevc->init_flag = 0;
11510 hevc->first_sc_checked = 0;
11511 cancel_work_sync(&hevc->notify_work);
11512 cancel_work_sync(&hevc->set_clk_work);
11513 cancel_work_sync(&hevc->timeout_work);
11514
11515 uninit_mmu_buffers(hevc);
11516
11517 if (use_cma) {
11518 hevc->uninit_list = 1;
11519 reset_process_time(hevc);
11520 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11521 vdec_schedule_work(&hevc->work);
11522 flush_work(&hevc->work);
11523#ifdef USE_UNINIT_SEMA
11524 if (hevc->init_flag) {
11525 down(&hevc->h265_uninit_done_sema);
11526 }
11527#else
11528 while (hevc->uninit_list) /* wait uninit complete */
11529 msleep(20);
11530#endif
11531 }
11532 cancel_work_sync(&hevc->work);
11533
11534 vfree(hevc->fw);
11535 hevc->fw = NULL;
11536
11537 dump_log(hevc);
11538 return 0;
11539}
11540
11541static unsigned char get_data_check_sum
11542 (struct hevc_state_s *hevc, int size)
11543{
11544 int jj;
11545 int sum = 0;
11546 u8 *data = NULL;
11547
11548 if (!hevc->chunk->block->is_mapped)
11549 data = codec_mm_vmap(hevc->chunk->block->start +
11550 hevc->chunk->offset, size);
11551 else
11552 data = ((u8 *)hevc->chunk->block->start_virt) +
11553 hevc->chunk->offset;
11554
11555 for (jj = 0; jj < size; jj++)
11556 sum += data[jj];
11557
11558 if (!hevc->chunk->block->is_mapped)
11559 codec_mm_unmap_phyaddr(data);
11560 return sum;
11561}
11562
11563static void vh265_notify_work(struct work_struct *work)
11564{
11565 struct hevc_state_s *hevc =
11566 container_of(work,
11567 struct hevc_state_s,
11568 notify_work);
11569 struct vdec_s *vdec = hw_to_vdec(hevc);
11570#ifdef MULTI_INSTANCE_SUPPORT
11571 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11572 vf_notify_receiver(hevc->provider_name,
11573 VFRAME_EVENT_PROVIDER_FR_HINT,
11574 (void *)
11575 ((unsigned long)hevc->frame_dur));
11576 vdec->fr_hint_state = VDEC_HINTED;
11577 } else if (fr_hint_status == VDEC_NEED_HINT) {
11578 vf_notify_receiver(hevc->provider_name,
11579 VFRAME_EVENT_PROVIDER_FR_HINT,
11580 (void *)
11581 ((unsigned long)hevc->frame_dur));
11582 fr_hint_status = VDEC_HINTED;
11583 }
11584#else
11585 if (fr_hint_status == VDEC_NEED_HINT)
11586 vf_notify_receiver(PROVIDER_NAME,
11587 VFRAME_EVENT_PROVIDER_FR_HINT,
11588 (void *)
11589 ((unsigned long)hevc->frame_dur));
11590 fr_hint_status = VDEC_HINTED;
11591 }
11592#endif
11593
11594 return;
11595}
11596
11597static void vh265_work_implement(struct hevc_state_s *hevc,
11598 struct vdec_s *vdec,int from)
11599{
11600 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11601 /*USE_BUF_BLOCK*/
11602 uninit_pic_list(hevc);
11603 hevc_print(hevc, 0, "uninit list\n");
11604 hevc->uninit_list = 0;
11605#ifdef USE_UNINIT_SEMA
11606 up(&hevc->h265_uninit_done_sema);
11607#endif
11608 return;
11609 }
11610
11611 /* finished decoding one frame or error,
11612 * notify vdec core to switch context
11613 */
11614 if (hevc->pic_list_init_flag == 1
11615 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11616 hevc->pic_list_init_flag = 2;
11617 init_pic_list(hevc);
11618 init_pic_list_hw(hevc);
11619 init_buf_spec(hevc);
11620 hevc_print(hevc, 0,
11621 "set pic_list_init_flag to 2\n");
11622
11623 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11624 return;
11625 }
11626
11627 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11628 "%s dec_result %d %x %x %x\n",
11629 __func__,
11630 hevc->dec_result,
11631 READ_VREG(HEVC_STREAM_LEVEL),
11632 READ_VREG(HEVC_STREAM_WR_PTR),
11633 READ_VREG(HEVC_STREAM_RD_PTR));
11634
11635 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11636 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11637 && (hw_to_vdec(hevc)->next_status !=
11638 VDEC_STATUS_DISCONNECTED)) {
11639 if (!vdec_has_more_input(vdec)) {
11640 hevc->dec_result = DEC_RESULT_EOS;
11641 vdec_schedule_work(&hevc->work);
11642 return;
11643 }
11644 if (!input_frame_based(vdec)) {
11645 int r = vdec_sync_input(vdec);
11646 if (r >= 0x200) {
11647 WRITE_VREG(HEVC_DECODE_SIZE,
11648 READ_VREG(HEVC_DECODE_SIZE) + r);
11649
11650 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11651 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11652 __func__,
11653 READ_VREG(HEVC_STREAM_LEVEL),
11654 READ_VREG(HEVC_STREAM_WR_PTR),
11655 READ_VREG(HEVC_STREAM_RD_PTR),
11656 READ_VREG(HEVC_MPC_E), r);
11657
11658 start_process_time(hevc);
11659 if (READ_VREG(HEVC_DEC_STATUS_REG)
11660 == HEVC_DECODE_BUFEMPTY2)
11661 WRITE_VREG(HEVC_DEC_STATUS_REG,
11662 HEVC_ACTION_DONE);
11663 else
11664 WRITE_VREG(HEVC_DEC_STATUS_REG,
11665 HEVC_ACTION_DEC_CONT);
11666 } else {
11667 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11668 vdec_schedule_work(&hevc->work);
11669 }
11670 return;
11671 }
11672
11673 /*below for frame_base*/
11674 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11675 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11676 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11677 __func__,
11678 READ_VREG(HEVC_STREAM_LEVEL),
11679 READ_VREG(HEVC_STREAM_WR_PTR),
11680 READ_VREG(HEVC_STREAM_RD_PTR),
11681 READ_VREG(HEVC_MPC_E));
11682 mutex_lock(&hevc->chunks_mutex);
11683 vdec_vframe_dirty(vdec, hevc->chunk);
11684 hevc->chunk = NULL;
11685 mutex_unlock(&hevc->chunks_mutex);
11686 vdec_clean_input(vdec);
11687 }
11688
11689 /*if (is_new_pic_available(hevc)) {*/
11690 if (run_ready(vdec, VDEC_HEVC)) {
11691 int r;
11692 int decode_size;
11693 r = vdec_prepare_input(vdec, &hevc->chunk);
11694 if (r < 0) {
11695 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11696
11697 hevc_print(hevc,
11698 PRINT_FLAG_VDEC_DETAIL,
11699 "amvdec_vh265: Insufficient data\n");
11700
11701 vdec_schedule_work(&hevc->work);
11702 return;
11703 }
11704 hevc->dec_result = DEC_RESULT_NONE;
11705 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11706 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11707 __func__, r,
11708 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11709 get_data_check_sum(hevc, r) : 0,
11710 READ_VREG(HEVC_MPC_E));
11711
11712 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11713 int jj;
11714 u8 *data = NULL;
11715
11716 if (!hevc->chunk->block->is_mapped)
11717 data = codec_mm_vmap(
11718 hevc->chunk->block->start +
11719 hevc->chunk->offset, r);
11720 else
11721 data = ((u8 *)
11722 hevc->chunk->block->start_virt)
11723 + hevc->chunk->offset;
11724
11725 for (jj = 0; jj < r; jj++) {
11726 if ((jj & 0xf) == 0)
11727 hevc_print(hevc,
11728 PRINT_FRAMEBASE_DATA,
11729 "%06x:", jj);
11730 hevc_print_cont(hevc,
11731 PRINT_FRAMEBASE_DATA,
11732 "%02x ", data[jj]);
11733 if (((jj + 1) & 0xf) == 0)
11734 hevc_print_cont(hevc,
11735 PRINT_FRAMEBASE_DATA,
11736 "\n");
11737 }
11738
11739 if (!hevc->chunk->block->is_mapped)
11740 codec_mm_unmap_phyaddr(data);
11741 }
11742
11743 decode_size = hevc->chunk->size +
11744 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11745 WRITE_VREG(HEVC_DECODE_SIZE,
11746 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11747
11748 vdec_enable_input(vdec);
11749
11750 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11751 "%s: mpc %x\n",
11752 __func__, READ_VREG(HEVC_MPC_E));
11753
11754 start_process_time(hevc);
11755 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11756 } else{
11757 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11758
11759 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11760 * "amvdec_vh265: Insufficient data\n");
11761 */
11762
11763 vdec_schedule_work(&hevc->work);
11764 }
11765 return;
11766 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11767 /* if (!hevc->ctx_valid)
11768 hevc->ctx_valid = 1; */
11769 decode_frame_count[hevc->index]++;
11770#ifdef DETREFILL_ENABLE
11771 if (hevc->is_swap &&
11772 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11773 if (hevc->delrefill_check == 2) {
11774 delrefill(hevc);
11775 amhevc_stop();
11776 }
11777 }
11778#endif
11779 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11780 hevc->used_4k_num =
11781 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11782 if (hevc->used_4k_num >= 0 &&
11783 hevc->cur_pic &&
11784 hevc->cur_pic->scatter_alloc
11785 == 1) {
11786 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11787 "%s pic index %d scatter_alloc %d page_start %d\n",
11788 "decoder_mmu_box_free_idx_tail",
11789 hevc->cur_pic->index,
11790 hevc->cur_pic->scatter_alloc,
11791 hevc->used_4k_num);
11792 if (hevc->m_ins_flag)
11793 hevc_mmu_dma_check(hw_to_vdec(hevc));
11794 decoder_mmu_box_free_idx_tail(
11795 hevc->mmu_box,
11796 hevc->cur_pic->index,
11797 hevc->used_4k_num);
11798 hevc->cur_pic->scatter_alloc = 2;
11799 }
11800 }
11801 hevc->pic_decoded_lcu_idx =
11802 READ_VREG(HEVC_PARSER_LCU_START)
11803 & 0xffffff;
11804
11805 if (vdec->master == NULL && vdec->slave == NULL &&
11806 hevc->empty_flag == 0) {
11807 hevc->over_decode =
11808 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11809 if (hevc->over_decode)
11810 hevc_print(hevc, 0,
11811 "!!!Over decode\n");
11812 }
11813
11814 if (is_log_enable(hevc))
11815 add_log(hevc,
11816 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11817 __func__,
11818 hevc->dec_result,
11819 hevc->pic_decoded_lcu_idx,
11820 hevc->used_4k_num,
11821 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11822 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11823 hevc->start_shift_bytes
11824 );
11825
11826 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11827 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11828 __func__,
11829 hevc->dec_result,
11830 READ_VREG(HEVC_STREAM_LEVEL),
11831 READ_VREG(HEVC_STREAM_WR_PTR),
11832 READ_VREG(HEVC_STREAM_RD_PTR),
11833 hevc->pic_decoded_lcu_idx,
11834 hevc->used_4k_num,
11835 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11836 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11837 hevc->start_shift_bytes
11838 );
11839
11840 hevc->used_4k_num = -1;
11841
11842 check_pic_decoded_error(hevc,
11843 hevc->pic_decoded_lcu_idx);
11844#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11845#if 1
11846 if (vdec->slave) {
11847 if (dv_debug & 0x1)
11848 vdec_set_flag(vdec->slave,
11849 VDEC_FLAG_SELF_INPUT_CONTEXT);
11850 else
11851 vdec_set_flag(vdec->slave,
11852 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11853 }
11854#else
11855 if (vdec->slave) {
11856 if (no_interleaved_el_slice)
11857 vdec_set_flag(vdec->slave,
11858 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11859 /* this will move real HW pointer for input */
11860 else
11861 vdec_set_flag(vdec->slave, 0);
11862 /* this will not move real HW pointer
11863 *and SL layer decoding
11864 *will start from same stream position
11865 *as current BL decoder
11866 */
11867 }
11868#endif
11869#endif
11870#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11871 hevc->shift_byte_count_lo
11872 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11873 if (vdec->slave) {
11874 /*cur is base, found enhance*/
11875 struct hevc_state_s *hevc_el =
11876 (struct hevc_state_s *)
11877 vdec->slave->private;
11878 if (hevc_el)
11879 hevc_el->shift_byte_count_lo =
11880 hevc->shift_byte_count_lo;
11881 } else if (vdec->master) {
11882 /*cur is enhance, found base*/
11883 struct hevc_state_s *hevc_ba =
11884 (struct hevc_state_s *)
11885 vdec->master->private;
11886 if (hevc_ba)
11887 hevc_ba->shift_byte_count_lo =
11888 hevc->shift_byte_count_lo;
11889 }
11890#endif
11891 mutex_lock(&hevc->chunks_mutex);
11892 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11893 hevc->chunk = NULL;
11894 mutex_unlock(&hevc->chunks_mutex);
11895 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11896 /*
11897 stream base: stream buf empty or timeout
11898 frame base: vdec_prepare_input fail
11899 */
11900 if (!vdec_has_more_input(vdec)) {
11901 hevc->dec_result = DEC_RESULT_EOS;
11902 vdec_schedule_work(&hevc->work);
11903 return;
11904 }
11905#ifdef AGAIN_HAS_THRESHOLD
11906 hevc->next_again_flag = 1;
11907#endif
11908 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11909 struct PIC_s *pic;
11910 hevc->eos = 1;
11911#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11912 if ((vdec->master || vdec->slave) &&
11913 aux_data_is_avaible(hevc))
11914 dolby_get_meta(hevc);
11915#endif
11916 check_pic_decoded_error(hevc,
11917 hevc->pic_decoded_lcu_idx);
11918 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11919 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11920 "%s: end of stream, last dec poc %d => 0x%pf\n",
11921 __func__, hevc->curr_POC, pic);
11922 flush_output(hevc, pic);
11923
11924 if (hevc->is_used_v4l)
11925 notify_v4l_eos(hw_to_vdec(hevc));
11926#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11927 hevc->shift_byte_count_lo
11928 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11929 if (vdec->slave) {
11930 /*cur is base, found enhance*/
11931 struct hevc_state_s *hevc_el =
11932 (struct hevc_state_s *)
11933 vdec->slave->private;
11934 if (hevc_el)
11935 hevc_el->shift_byte_count_lo =
11936 hevc->shift_byte_count_lo;
11937 } else if (vdec->master) {
11938 /*cur is enhance, found base*/
11939 struct hevc_state_s *hevc_ba =
11940 (struct hevc_state_s *)
11941 vdec->master->private;
11942 if (hevc_ba)
11943 hevc_ba->shift_byte_count_lo =
11944 hevc->shift_byte_count_lo;
11945 }
11946#endif
11947 mutex_lock(&hevc->chunks_mutex);
11948 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11949 hevc->chunk = NULL;
11950 mutex_unlock(&hevc->chunks_mutex);
11951 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11952 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11953 "%s: force exit\n",
11954 __func__);
11955 if (hevc->stat & STAT_VDEC_RUN) {
11956 amhevc_stop();
11957 hevc->stat &= ~STAT_VDEC_RUN;
11958 }
11959 if (hevc->stat & STAT_ISR_REG) {
11960 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11961 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11962 hevc->stat &= ~STAT_ISR_REG;
11963 }
11964 hevc_print(hevc, 0, "%s: force exit end\n",
11965 __func__);
11966 }
11967
11968 if (hevc->stat & STAT_VDEC_RUN) {
11969 amhevc_stop();
11970 hevc->stat &= ~STAT_VDEC_RUN;
11971 }
11972
11973 if (hevc->stat & STAT_TIMER_ARM) {
11974 del_timer_sync(&hevc->timer);
11975 hevc->stat &= ~STAT_TIMER_ARM;
11976 }
11977
11978 wait_hevc_search_done(hevc);
11979#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11980 if (hevc->switch_dvlayer_flag) {
11981 if (vdec->slave)
11982 vdec_set_next_sched(vdec, vdec->slave);
11983 else if (vdec->master)
11984 vdec_set_next_sched(vdec, vdec->master);
11985 } else if (vdec->slave || vdec->master)
11986 vdec_set_next_sched(vdec, vdec);
11987#endif
11988
11989 if (from == 1) {
11990 /* This is a timeout work */
11991 if (work_pending(&hevc->work)) {
11992 /*
11993 * The vh265_work arrives at the last second,
11994 * give it a chance to handle the scenario.
11995 */
11996 return;
11997 //cancel_work_sync(&hevc->work);//reserved for future considraion
11998 }
11999 }
12000
12001 /* mark itself has all HW resource released and input released */
12002 if (vdec->parallel_dec == 1)
12003 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12004 else
12005 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12006
12007 if (hevc->is_used_v4l) {
12008 struct aml_vcodec_ctx *ctx =
12009 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12010
12011 if (ctx->param_sets_from_ucode &&
12012 !hevc->v4l_params_parsed)
12013 vdec_v4l_write_frame_sync(ctx);
12014 }
12015
12016 if (hevc->vdec_cb)
12017 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12018}
12019
12020static void vh265_work(struct work_struct *work)
12021{
12022 struct hevc_state_s *hevc = container_of(work,
12023 struct hevc_state_s, work);
12024 struct vdec_s *vdec = hw_to_vdec(hevc);
12025
12026 vh265_work_implement(hevc, vdec, 0);
12027}
12028
12029static void vh265_timeout_work(struct work_struct *work)
12030{
12031 struct hevc_state_s *hevc = container_of(work,
12032 struct hevc_state_s, timeout_work);
12033 struct vdec_s *vdec = hw_to_vdec(hevc);
12034
12035 if (work_pending(&hevc->work))
12036 return;
12037 vh265_work_implement(hevc, vdec, 1);
12038}
12039
12040
12041static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12042{
12043 /* new to do ... */
12044 vh265_prot_init(hevc);
12045 return 0;
12046}
12047static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12048{
12049 struct hevc_state_s *hevc =
12050 (struct hevc_state_s *)vdec->private;
12051 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12052 CODEC_MM_FLAGS_TVP : 0;
12053 bool ret = 0;
12054 if (step == 0x12)
12055 return 0;
12056 else if (step == 0x11)
12057 step = 0x12;
12058
12059 if (hevc->eos)
12060 return 0;
12061 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12062 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12063 hevc->first_sc_checked =1;
12064 hevc_print(hevc, 0,
12065 "vh265 cached=%d need_size=%d speed= %d ms\n",
12066 size, (hevc->need_cache_size >> PAGE_SHIFT),
12067 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12068 }
12069 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12070 && pre_decode_buf_level != 0) {
12071 u32 rp, wp, level;
12072
12073 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12074 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12075 if (wp < rp)
12076 level = vdec->input.size + wp - rp;
12077 else
12078 level = wp - rp;
12079
12080 if (level < pre_decode_buf_level)
12081 return 0;
12082 }
12083
12084#ifdef AGAIN_HAS_THRESHOLD
12085 if (hevc->next_again_flag &&
12086 (!vdec_frame_based(vdec))) {
12087 u32 parser_wr_ptr =
12088 READ_PARSER_REG(PARSER_VIDEO_WP);
12089 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12090 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12091 again_threshold) {
12092 int r = vdec_sync_input(vdec);
12093 hevc_print(hevc,
12094 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12095 return 0;
12096 }
12097 }
12098#endif
12099
12100 if (disp_vframe_valve_level &&
12101 kfifo_len(&hevc->display_q) >=
12102 disp_vframe_valve_level) {
12103 hevc->valve_count--;
12104 if (hevc->valve_count <= 0)
12105 hevc->valve_count = 2;
12106 else
12107 return 0;
12108 }
12109
12110 ret = is_new_pic_available(hevc);
12111 if (!ret) {
12112 hevc_print(hevc,
12113 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12114 __func__, ret);
12115 }
12116
12117#ifdef CONSTRAIN_MAX_BUF_NUM
12118 if (hevc->pic_list_init_flag == 3) {
12119 if (run_ready_max_vf_only_num > 0 &&
12120 get_vf_ref_only_buf_count(hevc) >=
12121 run_ready_max_vf_only_num
12122 )
12123 ret = 0;
12124 if (run_ready_display_q_num > 0 &&
12125 kfifo_len(&hevc->display_q) >=
12126 run_ready_display_q_num)
12127 ret = 0;
12128
12129 /*avoid more buffers consumed when
12130 switching resolution*/
12131 if (run_ready_max_buf_num == 0xff &&
12132 get_used_buf_count(hevc) >=
12133 get_work_pic_num(hevc))
12134 ret = 0;
12135 else if (run_ready_max_buf_num &&
12136 get_used_buf_count(hevc) >=
12137 run_ready_max_buf_num)
12138 ret = 0;
12139 }
12140#endif
12141
12142 if (hevc->is_used_v4l) {
12143 struct aml_vcodec_ctx *ctx =
12144 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12145
12146 if (ctx->param_sets_from_ucode &&
12147 !ctx->v4l_codec_ready &&
12148 hevc->v4l_params_parsed) {
12149 ret = 0; /*the params has parsed.*/
12150 } else if (!ctx->v4l_codec_dpb_ready)
12151 ret = 0;
12152 }
12153
12154 if (ret)
12155 not_run_ready[hevc->index] = 0;
12156 else
12157 not_run_ready[hevc->index]++;
12158 if (vdec->parallel_dec == 1)
12159 return ret ? (CORE_MASK_HEVC) : 0;
12160 else
12161 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12162}
12163
12164static void run(struct vdec_s *vdec, unsigned long mask,
12165 void (*callback)(struct vdec_s *, void *), void *arg)
12166{
12167 struct hevc_state_s *hevc =
12168 (struct hevc_state_s *)vdec->private;
12169 int r, loadr = 0;
12170 unsigned char check_sum = 0;
12171
12172 run_count[hevc->index]++;
12173 hevc->vdec_cb_arg = arg;
12174 hevc->vdec_cb = callback;
12175 hevc->aux_data_dirty = 1;
12176 hevc_reset_core(vdec);
12177
12178#ifdef AGAIN_HAS_THRESHOLD
12179 hevc->pre_parser_wr_ptr =
12180 READ_PARSER_REG(PARSER_VIDEO_WP);
12181 hevc->next_again_flag = 0;
12182#endif
12183 r = vdec_prepare_input(vdec, &hevc->chunk);
12184 if (r < 0) {
12185 input_empty[hevc->index]++;
12186 hevc->dec_result = DEC_RESULT_AGAIN;
12187 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12188 "ammvdec_vh265: Insufficient data\n");
12189
12190 vdec_schedule_work(&hevc->work);
12191 return;
12192 }
12193 input_empty[hevc->index] = 0;
12194 hevc->dec_result = DEC_RESULT_NONE;
12195 if (vdec_frame_based(vdec) &&
12196 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12197 || is_log_enable(hevc)))
12198 check_sum = get_data_check_sum(hevc, r);
12199
12200 if (is_log_enable(hevc))
12201 add_log(hevc,
12202 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12203 __func__, r,
12204 check_sum,
12205 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12206 );
12207 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12208 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12209 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12210 __func__, r,
12211 check_sum,
12212 READ_VREG(HEVC_STREAM_LEVEL),
12213 READ_VREG(HEVC_STREAM_WR_PTR),
12214 READ_VREG(HEVC_STREAM_RD_PTR),
12215 READ_PARSER_REG(PARSER_VIDEO_RP),
12216 READ_PARSER_REG(PARSER_VIDEO_WP),
12217 hevc->start_shift_bytes
12218 );
12219 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12220 input_frame_based(vdec)) {
12221 int jj;
12222 u8 *data = NULL;
12223
12224 if (!hevc->chunk->block->is_mapped)
12225 data = codec_mm_vmap(hevc->chunk->block->start +
12226 hevc->chunk->offset, r);
12227 else
12228 data = ((u8 *)hevc->chunk->block->start_virt)
12229 + hevc->chunk->offset;
12230
12231 for (jj = 0; jj < r; jj++) {
12232 if ((jj & 0xf) == 0)
12233 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12234 "%06x:", jj);
12235 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12236 "%02x ", data[jj]);
12237 if (((jj + 1) & 0xf) == 0)
12238 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12239 "\n");
12240 }
12241
12242 if (!hevc->chunk->block->is_mapped)
12243 codec_mm_unmap_phyaddr(data);
12244 }
12245 if (vdec->mc_loaded) {
12246 /*firmware have load before,
12247 and not changes to another.
12248 ignore reload.
12249 */
12250 if (tee_enabled() && hevc->is_swap &&
12251 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12252 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12253 } else {
12254 if (hevc->mmu_enable)
12255 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12256 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12257 "h265_mmu", hevc->fw->data);
12258 else {
12259 if (!hevc->is_4k) {
12260 /* if an older version of the fw was loaded, */
12261 /* needs try to load noswap fw because the */
12262 /* old fw package dose not contain the swap fw.*/
12263 loadr = amhevc_vdec_loadmc_ex(
12264 VFORMAT_HEVC, vdec,
12265 "hevc_mmu_swap",
12266 hevc->fw->data);
12267 if (loadr < 0)
12268 loadr = amhevc_vdec_loadmc_ex(
12269 VFORMAT_HEVC, vdec,
12270 "h265_mmu",
12271 hevc->fw->data);
12272 else
12273 hevc->is_swap = true;
12274 } else
12275 loadr = amhevc_vdec_loadmc_ex(
12276 VFORMAT_HEVC, vdec,
12277 "h265_mmu", hevc->fw->data);
12278 }
12279 else
12280 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12281 NULL, hevc->fw->data);
12282 if (loadr < 0) {
12283 amhevc_disable();
12284 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12285 tee_enabled() ? "TEE" : "local", loadr);
12286 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12287 vdec_schedule_work(&hevc->work);
12288 return;
12289 }
12290
12291 if (tee_enabled() && hevc->is_swap &&
12292 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12293 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12294#ifdef DETREFILL_ENABLE
12295 if (hevc->is_swap &&
12296 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12297 init_detrefill_buf(hevc);
12298#endif
12299 vdec->mc_loaded = 1;
12300 vdec->mc_type = VFORMAT_HEVC;
12301 }
12302 if (vh265_hw_ctx_restore(hevc) < 0) {
12303 vdec_schedule_work(&hevc->work);
12304 return;
12305 }
12306 vdec_enable_input(vdec);
12307
12308 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12309
12310 if (vdec_frame_based(vdec)) {
12311 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12312 r = hevc->chunk->size +
12313 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12314 hevc->decode_size = r;
12315 }
12316#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12317 else {
12318 if (vdec->master || vdec->slave)
12319 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12320 hevc->shift_byte_count_lo);
12321 }
12322#endif
12323 WRITE_VREG(HEVC_DECODE_SIZE, r);
12324 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12325 hevc->init_flag = 1;
12326
12327 if (hevc->pic_list_init_flag == 3)
12328 init_pic_list_hw(hevc);
12329
12330 backup_decode_state(hevc);
12331
12332 start_process_time(hevc);
12333 mod_timer(&hevc->timer, jiffies);
12334 hevc->stat |= STAT_TIMER_ARM;
12335 hevc->stat |= STAT_ISR_REG;
12336 amhevc_start();
12337 hevc->stat |= STAT_VDEC_RUN;
12338}
12339
12340static void aml_free_canvas(struct vdec_s *vdec)
12341{
12342 int i;
12343 struct hevc_state_s *hevc =
12344 (struct hevc_state_s *)vdec->private;
12345
12346 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12347 struct PIC_s *pic = hevc->m_PIC[i];
12348
12349 if (pic) {
12350 if (vdec->parallel_dec == 1) {
12351 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12352 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12353 }
12354 }
12355 }
12356}
12357
12358static void reset(struct vdec_s *vdec)
12359{
12360
12361 struct hevc_state_s *hevc =
12362 (struct hevc_state_s *)vdec->private;
12363 int i;
12364
12365 cancel_work_sync(&hevc->work);
12366 cancel_work_sync(&hevc->notify_work);
12367 if (hevc->stat & STAT_VDEC_RUN) {
12368 amhevc_stop();
12369 hevc->stat &= ~STAT_VDEC_RUN;
12370 }
12371
12372 if (hevc->stat & STAT_TIMER_ARM) {
12373 del_timer_sync(&hevc->timer);
12374 hevc->stat &= ~STAT_TIMER_ARM;
12375 }
12376 hevc->dec_result = DEC_RESULT_NONE;
12377 reset_process_time(hevc);
12378 hevc->init_flag = 0;
12379 hevc->pic_list_init_flag = 0;
12380 dealloc_mv_bufs(hevc);
12381 aml_free_canvas(vdec);
12382 hevc_local_uninit(hevc);
12383 if (vh265_local_init(hevc) < 0)
12384 pr_debug(" %s local init fail\n", __func__);
12385 for (i = 0; i < BUF_POOL_SIZE; i++) {
12386 hevc->m_BUF[i].start_adr = 0;
12387 }
12388
12389 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12390}
12391
12392static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12393{
12394 struct hevc_state_s *hevc =
12395 (struct hevc_state_s *)vdec->private;
12396
12397 return vh265_isr(0, hevc);
12398}
12399
12400static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12401{
12402 struct hevc_state_s *hevc =
12403 (struct hevc_state_s *)vdec->private;
12404
12405 return vh265_isr_thread_fn(0, hevc);
12406}
12407#endif
12408
12409static int amvdec_h265_probe(struct platform_device *pdev)
12410{
12411#ifdef MULTI_INSTANCE_SUPPORT
12412 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12413#else
12414 struct vdec_dev_reg_s *pdata =
12415 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12416#endif
12417 char *tmpbuf;
12418 int ret;
12419 struct hevc_state_s *hevc;
12420
12421 hevc = vmalloc(sizeof(struct hevc_state_s));
12422 if (hevc == NULL) {
12423 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12424 return -ENOMEM;
12425 }
12426 gHevc = hevc;
12427 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12428 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12429 H265_DEBUG_DIS_SYS_ERROR_PROC));
12430 memset(hevc, 0, sizeof(struct hevc_state_s));
12431 if (get_dbg_flag(hevc))
12432 hevc_print(hevc, 0, "%s\r\n", __func__);
12433 mutex_lock(&vh265_mutex);
12434
12435 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12436 (parser_sei_enable & 0x100) == 0)
12437 parser_sei_enable = 7; /*old 1*/
12438 hevc->m_ins_flag = 0;
12439 hevc->init_flag = 0;
12440 hevc->first_sc_checked = 0;
12441 hevc->uninit_list = 0;
12442 hevc->fatal_error = 0;
12443 hevc->show_frame_num = 0;
12444 hevc->frameinfo_enable = 1;
12445#ifdef MULTI_INSTANCE_SUPPORT
12446 hevc->platform_dev = pdev;
12447 platform_set_drvdata(pdev, pdata);
12448#endif
12449
12450 if (pdata == NULL) {
12451 hevc_print(hevc, 0,
12452 "\namvdec_h265 memory resource undefined.\n");
12453 vfree(hevc);
12454 mutex_unlock(&vh265_mutex);
12455 return -EFAULT;
12456 }
12457 if (mmu_enable_force == 0) {
12458 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12459 || double_write_mode == 0x10)
12460 hevc->mmu_enable = 0;
12461 else
12462 hevc->mmu_enable = 1;
12463 }
12464 if (init_mmu_buffers(hevc)) {
12465 hevc_print(hevc, 0,
12466 "\n 265 mmu init failed!\n");
12467 vfree(hevc);
12468 mutex_unlock(&vh265_mutex);
12469 return -EFAULT;
12470 }
12471
12472 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12473 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12474 if (ret < 0) {
12475 uninit_mmu_buffers(hevc);
12476 vfree(hevc);
12477 mutex_unlock(&vh265_mutex);
12478 return ret;
12479 }
12480 hevc->buf_size = work_buf_size;
12481
12482
12483 if (!vdec_secure(pdata)) {
12484 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12485 if (tmpbuf) {
12486 memset(tmpbuf, 0, work_buf_size);
12487 dma_sync_single_for_device(amports_get_dma_device(),
12488 hevc->buf_start,
12489 work_buf_size, DMA_TO_DEVICE);
12490 } else {
12491 tmpbuf = codec_mm_vmap(hevc->buf_start,
12492 work_buf_size);
12493 if (tmpbuf) {
12494 memset(tmpbuf, 0, work_buf_size);
12495 dma_sync_single_for_device(
12496 amports_get_dma_device(),
12497 hevc->buf_start,
12498 work_buf_size,
12499 DMA_TO_DEVICE);
12500 codec_mm_unmap_phyaddr(tmpbuf);
12501 }
12502 }
12503 }
12504
12505 if (get_dbg_flag(hevc)) {
12506 hevc_print(hevc, 0,
12507 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12508 hevc->buf_start, hevc->buf_size);
12509 }
12510
12511 if (pdata->sys_info)
12512 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12513 else {
12514 hevc->vh265_amstream_dec_info.width = 0;
12515 hevc->vh265_amstream_dec_info.height = 0;
12516 hevc->vh265_amstream_dec_info.rate = 30;
12517 }
12518#ifndef MULTI_INSTANCE_SUPPORT
12519 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12520 workaround_enable |= 3;
12521 hevc_print(hevc, 0,
12522 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12523 } else
12524 workaround_enable &= ~3;
12525#endif
12526 hevc->cma_dev = pdata->cma_dev;
12527 vh265_vdec_info_init();
12528
12529#ifdef MULTI_INSTANCE_SUPPORT
12530 pdata->private = hevc;
12531 pdata->dec_status = vh265_dec_status;
12532 pdata->set_isreset = vh265_set_isreset;
12533 is_reset = 0;
12534 if (vh265_init(pdata) < 0) {
12535#else
12536 if (vh265_init(hevc) < 0) {
12537#endif
12538 hevc_print(hevc, 0,
12539 "\namvdec_h265 init failed.\n");
12540 hevc_local_uninit(hevc);
12541 uninit_mmu_buffers(hevc);
12542 vfree(hevc);
12543 pdata->dec_status = NULL;
12544 mutex_unlock(&vh265_mutex);
12545 return -ENODEV;
12546 }
12547 /*set the max clk for smooth playing...*/
12548 hevc_source_changed(VFORMAT_HEVC,
12549 3840, 2160, 60);
12550 mutex_unlock(&vh265_mutex);
12551
12552 return 0;
12553}
12554
12555static int amvdec_h265_remove(struct platform_device *pdev)
12556{
12557 struct hevc_state_s *hevc = gHevc;
12558
12559 if (get_dbg_flag(hevc))
12560 hevc_print(hevc, 0, "%s\r\n", __func__);
12561
12562 mutex_lock(&vh265_mutex);
12563
12564 vh265_stop(hevc);
12565
12566 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12567
12568
12569#ifdef DEBUG_PTS
12570 hevc_print(hevc, 0,
12571 "pts missed %ld, pts hit %ld, duration %d\n",
12572 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12573#endif
12574
12575 vfree(hevc);
12576 hevc = NULL;
12577 gHevc = NULL;
12578
12579 mutex_unlock(&vh265_mutex);
12580
12581 return 0;
12582}
12583/****************************************/
12584#ifdef CONFIG_PM
12585static int h265_suspend(struct device *dev)
12586{
12587 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12588 return 0;
12589}
12590
12591static int h265_resume(struct device *dev)
12592{
12593 amhevc_resume(to_platform_device(dev));
12594 return 0;
12595}
12596
12597static const struct dev_pm_ops h265_pm_ops = {
12598 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12599};
12600#endif
12601
12602static struct platform_driver amvdec_h265_driver = {
12603 .probe = amvdec_h265_probe,
12604 .remove = amvdec_h265_remove,
12605 .driver = {
12606 .name = DRIVER_NAME,
12607#ifdef CONFIG_PM
12608 .pm = &h265_pm_ops,
12609#endif
12610 }
12611};
12612
12613#ifdef MULTI_INSTANCE_SUPPORT
12614static void vh265_dump_state(struct vdec_s *vdec)
12615{
12616 int i;
12617 struct hevc_state_s *hevc =
12618 (struct hevc_state_s *)vdec->private;
12619 hevc_print(hevc, 0,
12620 "====== %s\n", __func__);
12621
12622 hevc_print(hevc, 0,
12623 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12624 hevc->frame_width,
12625 hevc->frame_height,
12626 hevc->sps_num_reorder_pics_0,
12627 get_work_pic_num(hevc),
12628 hevc->video_signal_type_debug,
12629 hevc->is_swap
12630 );
12631
12632 hevc_print(hevc, 0,
12633 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12634 input_frame_based(vdec),
12635 hevc->eos,
12636 hevc->dec_result,
12637 decode_frame_count[hevc->index],
12638 display_frame_count[hevc->index],
12639 run_count[hevc->index],
12640 not_run_ready[hevc->index],
12641 input_empty[hevc->index]
12642 );
12643
12644 if (vf_get_receiver(vdec->vf_provider_name)) {
12645 enum receviver_start_e state =
12646 vf_notify_receiver(vdec->vf_provider_name,
12647 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12648 NULL);
12649 hevc_print(hevc, 0,
12650 "\nreceiver(%s) state %d\n",
12651 vdec->vf_provider_name,
12652 state);
12653 }
12654
12655 hevc_print(hevc, 0,
12656 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12657 __func__,
12658 kfifo_len(&hevc->newframe_q),
12659 VF_POOL_SIZE,
12660 kfifo_len(&hevc->display_q),
12661 VF_POOL_SIZE,
12662 hevc->vf_pre_count,
12663 hevc->vf_get_count,
12664 hevc->vf_put_count,
12665 hevc->pic_list_init_flag,
12666 is_new_pic_available(hevc)
12667 );
12668
12669 dump_pic_list(hevc);
12670
12671 for (i = 0; i < BUF_POOL_SIZE; i++) {
12672 hevc_print(hevc, 0,
12673 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12674 i,
12675 hevc->m_BUF[i].start_adr,
12676 hevc->m_BUF[i].size,
12677 hevc->m_BUF[i].used_flag);
12678 }
12679
12680 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12681 hevc_print(hevc, 0,
12682 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12683 i,
12684 hevc->m_mv_BUF[i].start_adr,
12685 hevc->m_mv_BUF[i].size,
12686 hevc->m_mv_BUF[i].used_flag);
12687 }
12688
12689 hevc_print(hevc, 0,
12690 "HEVC_DEC_STATUS_REG=0x%x\n",
12691 READ_VREG(HEVC_DEC_STATUS_REG));
12692 hevc_print(hevc, 0,
12693 "HEVC_MPC_E=0x%x\n",
12694 READ_VREG(HEVC_MPC_E));
12695 hevc_print(hevc, 0,
12696 "HEVC_DECODE_MODE=0x%x\n",
12697 READ_VREG(HEVC_DECODE_MODE));
12698 hevc_print(hevc, 0,
12699 "HEVC_DECODE_MODE2=0x%x\n",
12700 READ_VREG(HEVC_DECODE_MODE2));
12701 hevc_print(hevc, 0,
12702 "NAL_SEARCH_CTL=0x%x\n",
12703 READ_VREG(NAL_SEARCH_CTL));
12704 hevc_print(hevc, 0,
12705 "HEVC_PARSER_LCU_START=0x%x\n",
12706 READ_VREG(HEVC_PARSER_LCU_START));
12707 hevc_print(hevc, 0,
12708 "HEVC_DECODE_SIZE=0x%x\n",
12709 READ_VREG(HEVC_DECODE_SIZE));
12710 hevc_print(hevc, 0,
12711 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12712 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12713 hevc_print(hevc, 0,
12714 "HEVC_STREAM_START_ADDR=0x%x\n",
12715 READ_VREG(HEVC_STREAM_START_ADDR));
12716 hevc_print(hevc, 0,
12717 "HEVC_STREAM_END_ADDR=0x%x\n",
12718 READ_VREG(HEVC_STREAM_END_ADDR));
12719 hevc_print(hevc, 0,
12720 "HEVC_STREAM_LEVEL=0x%x\n",
12721 READ_VREG(HEVC_STREAM_LEVEL));
12722 hevc_print(hevc, 0,
12723 "HEVC_STREAM_WR_PTR=0x%x\n",
12724 READ_VREG(HEVC_STREAM_WR_PTR));
12725 hevc_print(hevc, 0,
12726 "HEVC_STREAM_RD_PTR=0x%x\n",
12727 READ_VREG(HEVC_STREAM_RD_PTR));
12728 hevc_print(hevc, 0,
12729 "PARSER_VIDEO_RP=0x%x\n",
12730 READ_PARSER_REG(PARSER_VIDEO_RP));
12731 hevc_print(hevc, 0,
12732 "PARSER_VIDEO_WP=0x%x\n",
12733 READ_PARSER_REG(PARSER_VIDEO_WP));
12734
12735 if (input_frame_based(vdec) &&
12736 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12737 ) {
12738 int jj;
12739 if (hevc->chunk && hevc->chunk->block &&
12740 hevc->chunk->size > 0) {
12741 u8 *data = NULL;
12742 if (!hevc->chunk->block->is_mapped)
12743 data = codec_mm_vmap(hevc->chunk->block->start +
12744 hevc->chunk->offset, hevc->chunk->size);
12745 else
12746 data = ((u8 *)hevc->chunk->block->start_virt)
12747 + hevc->chunk->offset;
12748 hevc_print(hevc, 0,
12749 "frame data size 0x%x\n",
12750 hevc->chunk->size);
12751 for (jj = 0; jj < hevc->chunk->size; jj++) {
12752 if ((jj & 0xf) == 0)
12753 hevc_print(hevc,
12754 PRINT_FRAMEBASE_DATA,
12755 "%06x:", jj);
12756 hevc_print_cont(hevc,
12757 PRINT_FRAMEBASE_DATA,
12758 "%02x ", data[jj]);
12759 if (((jj + 1) & 0xf) == 0)
12760 hevc_print_cont(hevc,
12761 PRINT_FRAMEBASE_DATA,
12762 "\n");
12763 }
12764
12765 if (!hevc->chunk->block->is_mapped)
12766 codec_mm_unmap_phyaddr(data);
12767 }
12768 }
12769
12770}
12771
12772
12773static int ammvdec_h265_probe(struct platform_device *pdev)
12774{
12775
12776 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12777 struct hevc_state_s *hevc = NULL;
12778 int ret;
12779#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12780 int config_val;
12781#endif
12782 if (pdata == NULL) {
12783 pr_info("\nammvdec_h265 memory resource undefined.\n");
12784 return -EFAULT;
12785 }
12786
12787 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12788 sizeof(struct hevc_state_s), GFP_KERNEL); */
12789 hevc = vmalloc(sizeof(struct hevc_state_s));
12790 if (hevc == NULL) {
12791 pr_info("\nammvdec_h265 device data allocation failed\n");
12792 return -ENOMEM;
12793 }
12794 memset(hevc, 0, sizeof(struct hevc_state_s));
12795
12796 /* the ctx from v4l2 driver. */
12797 hevc->v4l2_ctx = pdata->private;
12798
12799 pdata->private = hevc;
12800 pdata->dec_status = vh265_dec_status;
12801 /* pdata->set_trickmode = set_trickmode; */
12802 pdata->run_ready = run_ready;
12803 pdata->run = run;
12804 pdata->reset = reset;
12805 pdata->irq_handler = vh265_irq_cb;
12806 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12807 pdata->dump_state = vh265_dump_state;
12808
12809 hevc->index = pdev->id;
12810 hevc->m_ins_flag = 1;
12811
12812 if (pdata->use_vfm_path) {
12813 snprintf(pdata->vf_provider_name,
12814 VDEC_PROVIDER_NAME_SIZE,
12815 VFM_DEC_PROVIDER_NAME);
12816 hevc->frameinfo_enable = 1;
12817 }
12818#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12819 else if (vdec_dual(pdata)) {
12820 struct hevc_state_s *hevc_pair = NULL;
12821
12822 if (dv_toggle_prov_name) /*debug purpose*/
12823 snprintf(pdata->vf_provider_name,
12824 VDEC_PROVIDER_NAME_SIZE,
12825 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12826 VFM_DEC_DVEL_PROVIDER_NAME);
12827 else
12828 snprintf(pdata->vf_provider_name,
12829 VDEC_PROVIDER_NAME_SIZE,
12830 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12831 VFM_DEC_DVBL_PROVIDER_NAME);
12832 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12833 if (pdata->master)
12834 hevc_pair = (struct hevc_state_s *)
12835 pdata->master->private;
12836 else if (pdata->slave)
12837 hevc_pair = (struct hevc_state_s *)
12838 pdata->slave->private;
12839 if (hevc_pair)
12840 hevc->shift_byte_count_lo =
12841 hevc_pair->shift_byte_count_lo;
12842 }
12843#endif
12844 else
12845 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12846 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12847
12848 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12849 &vh265_vf_provider, pdata);
12850
12851 hevc->provider_name = pdata->vf_provider_name;
12852 platform_set_drvdata(pdev, pdata);
12853
12854 hevc->platform_dev = pdev;
12855
12856 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12857 pdata->config && pdata->config_len) {
12858#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12859 /*use ptr config for doubel_write_mode, etc*/
12860 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12861
12862 if (get_config_int(pdata->config, "hevc_double_write_mode",
12863 &config_val) == 0)
12864 hevc->double_write_mode = config_val;
12865 else
12866 hevc->double_write_mode = double_write_mode;
12867
12868 if (get_config_int(pdata->config, "save_buffer_mode",
12869 &config_val) == 0)
12870 hevc->save_buffer_mode = config_val;
12871 else
12872 hevc->save_buffer_mode = 0;
12873
12874 /*use ptr config for max_pic_w, etc*/
12875 if (get_config_int(pdata->config, "hevc_buf_width",
12876 &config_val) == 0) {
12877 hevc->max_pic_w = config_val;
12878 }
12879 if (get_config_int(pdata->config, "hevc_buf_height",
12880 &config_val) == 0) {
12881 hevc->max_pic_h = config_val;
12882 }
12883
12884 if (get_config_int(pdata->config,
12885 "parm_v4l_codec_enable",
12886 &config_val) == 0)
12887 hevc->is_used_v4l = config_val;
12888
12889 if (get_config_int(pdata->config,
12890 "parm_v4l_buffer_margin",
12891 &config_val) == 0)
12892 hevc->dynamic_buf_num_margin = config_val;
12893
12894 if (get_config_int(pdata->config,
12895 "parm_v4l_canvas_mem_mode",
12896 &config_val) == 0)
12897 hevc->mem_map_mode = config_val;
12898#endif
12899 } else {
12900 if (pdata->sys_info)
12901 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12902 else {
12903 hevc->vh265_amstream_dec_info.width = 0;
12904 hevc->vh265_amstream_dec_info.height = 0;
12905 hevc->vh265_amstream_dec_info.rate = 30;
12906 }
12907 hevc->double_write_mode = double_write_mode;
12908 }
12909 if (!hevc->is_used_v4l) {
12910 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12911 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12912 else
12913 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12914
12915 hevc->mem_map_mode = mem_map_mode;
12916 }
12917
12918 if (mmu_enable_force == 0) {
12919 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12920 hevc->mmu_enable = 0;
12921 else
12922 hevc->mmu_enable = 1;
12923 }
12924
12925 if (init_mmu_buffers(hevc) < 0) {
12926 hevc_print(hevc, 0,
12927 "\n 265 mmu init failed!\n");
12928 mutex_unlock(&vh265_mutex);
12929 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12930 if (hevc)
12931 vfree((void *)hevc);
12932 pdata->dec_status = NULL;
12933 return -EFAULT;
12934 }
12935#if 0
12936 hevc->buf_start = pdata->mem_start;
12937 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12938#else
12939
12940 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12941 BMMU_WORKSPACE_ID, work_buf_size,
12942 DRIVER_NAME, &hevc->buf_start);
12943 if (ret < 0) {
12944 uninit_mmu_buffers(hevc);
12945 /* devm_kfree(&pdev->dev, (void *)hevc); */
12946 if (hevc)
12947 vfree((void *)hevc);
12948 pdata->dec_status = NULL;
12949 mutex_unlock(&vh265_mutex);
12950 return ret;
12951 }
12952 hevc->buf_size = work_buf_size;
12953#endif
12954 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12955 (parser_sei_enable & 0x100) == 0)
12956 parser_sei_enable = 7;
12957 hevc->init_flag = 0;
12958 hevc->first_sc_checked = 0;
12959 hevc->uninit_list = 0;
12960 hevc->fatal_error = 0;
12961 hevc->show_frame_num = 0;
12962
12963 /*
12964 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12965 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12966 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12967 */
12968 if (get_dbg_flag(hevc)) {
12969 hevc_print(hevc, 0,
12970 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12971 hevc->buf_start, hevc->buf_size);
12972 }
12973
12974 hevc_print(hevc, 0,
12975 "dynamic_buf_num_margin=%d\n",
12976 hevc->dynamic_buf_num_margin);
12977 hevc_print(hevc, 0,
12978 "double_write_mode=%d\n",
12979 hevc->double_write_mode);
12980
12981 hevc->cma_dev = pdata->cma_dev;
12982
12983 if (vh265_init(pdata) < 0) {
12984 hevc_print(hevc, 0,
12985 "\namvdec_h265 init failed.\n");
12986 hevc_local_uninit(hevc);
12987 uninit_mmu_buffers(hevc);
12988 /* devm_kfree(&pdev->dev, (void *)hevc); */
12989 if (hevc)
12990 vfree((void *)hevc);
12991 pdata->dec_status = NULL;
12992 return -ENODEV;
12993 }
12994
12995 vdec_set_prepare_level(pdata, start_decode_buf_level);
12996
12997 /*set the max clk for smooth playing...*/
12998 hevc_source_changed(VFORMAT_HEVC,
12999 3840, 2160, 60);
13000 if (pdata->parallel_dec == 1)
13001 vdec_core_request(pdata, CORE_MASK_HEVC);
13002 else
13003 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13004 | CORE_MASK_COMBINE);
13005
13006 return 0;
13007}
13008
13009static int ammvdec_h265_remove(struct platform_device *pdev)
13010{
13011 struct hevc_state_s *hevc =
13012 (struct hevc_state_s *)
13013 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13014 struct vdec_s *vdec = hw_to_vdec(hevc);
13015
13016 if (hevc == NULL)
13017 return 0;
13018
13019 if (get_dbg_flag(hevc))
13020 hevc_print(hevc, 0, "%s\r\n", __func__);
13021
13022 vmh265_stop(hevc);
13023
13024 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13025 if (vdec->parallel_dec == 1)
13026 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13027 else
13028 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13029
13030 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13031
13032 vfree((void *)hevc);
13033 return 0;
13034}
13035
13036static struct platform_driver ammvdec_h265_driver = {
13037 .probe = ammvdec_h265_probe,
13038 .remove = ammvdec_h265_remove,
13039 .driver = {
13040 .name = MULTI_DRIVER_NAME,
13041#ifdef CONFIG_PM
13042 .pm = &h265_pm_ops,
13043#endif
13044 }
13045};
13046#endif
13047
13048static struct codec_profile_t amvdec_h265_profile = {
13049 .name = "hevc",
13050 .profile = ""
13051};
13052
13053static struct codec_profile_t amvdec_h265_profile_single,
13054 amvdec_h265_profile_mult;
13055
13056static struct mconfig h265_configs[] = {
13057 MC_PU32("use_cma", &use_cma),
13058 MC_PU32("bit_depth_luma", &bit_depth_luma),
13059 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13060 MC_PU32("video_signal_type", &video_signal_type),
13061#ifdef ERROR_HANDLE_DEBUG
13062 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13063 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13064#endif
13065 MC_PU32("radr", &radr),
13066 MC_PU32("rval", &rval),
13067 MC_PU32("dbg_cmd", &dbg_cmd),
13068 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13069 MC_PU32("endian", &endian),
13070 MC_PU32("step", &step),
13071 MC_PU32("udebug_flag", &udebug_flag),
13072 MC_PU32("decode_pic_begin", &decode_pic_begin),
13073 MC_PU32("slice_parse_begin", &slice_parse_begin),
13074 MC_PU32("nal_skip_policy", &nal_skip_policy),
13075 MC_PU32("i_only_flag", &i_only_flag),
13076 MC_PU32("error_handle_policy", &error_handle_policy),
13077 MC_PU32("error_handle_threshold", &error_handle_threshold),
13078 MC_PU32("error_handle_nal_skip_threshold",
13079 &error_handle_nal_skip_threshold),
13080 MC_PU32("error_handle_system_threshold",
13081 &error_handle_system_threshold),
13082 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13083 MC_PU32("debug", &debug),
13084 MC_PU32("debug_mask", &debug_mask),
13085 MC_PU32("buffer_mode", &buffer_mode),
13086 MC_PU32("double_write_mode", &double_write_mode),
13087 MC_PU32("buf_alloc_width", &buf_alloc_width),
13088 MC_PU32("buf_alloc_height", &buf_alloc_height),
13089 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13090 MC_PU32("max_buf_num", &max_buf_num),
13091 MC_PU32("buf_alloc_size", &buf_alloc_size),
13092 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13093 MC_PU32("mem_map_mode", &mem_map_mode),
13094 MC_PU32("enable_mem_saving", &enable_mem_saving),
13095 MC_PU32("force_w_h", &force_w_h),
13096 MC_PU32("force_fps", &force_fps),
13097 MC_PU32("max_decoding_time", &max_decoding_time),
13098 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13099 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13100 MC_PU32("interlace_enable", &interlace_enable),
13101 MC_PU32("pts_unstable", &pts_unstable),
13102 MC_PU32("parser_sei_enable", &parser_sei_enable),
13103 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13104 MC_PU32("decode_timeout_val", &decode_timeout_val),
13105#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13106 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13107 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13108 MC_PU32("dv_debug", &dv_debug),
13109#endif
13110};
13111static struct mconfig_node decoder_265_node;
13112
13113static int __init amvdec_h265_driver_init_module(void)
13114{
13115 struct BuffInfo_s *p_buf_info;
13116
13117 if (vdec_is_support_4k()) {
13118 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13119 p_buf_info = &amvh265_workbuff_spec[2];
13120 else
13121 p_buf_info = &amvh265_workbuff_spec[1];
13122 } else
13123 p_buf_info = &amvh265_workbuff_spec[0];
13124
13125 init_buff_spec(NULL, p_buf_info);
13126 work_buf_size =
13127 (p_buf_info->end_adr - p_buf_info->start_adr
13128 + 0xffff) & (~0xffff);
13129
13130 pr_debug("amvdec_h265 module init\n");
13131 error_handle_policy = 0;
13132
13133#ifdef ERROR_HANDLE_DEBUG
13134 dbg_nal_skip_flag = 0;
13135 dbg_nal_skip_count = 0;
13136#endif
13137 udebug_flag = 0;
13138 decode_pic_begin = 0;
13139 slice_parse_begin = 0;
13140 step = 0;
13141 buf_alloc_size = 0;
13142
13143#ifdef MULTI_INSTANCE_SUPPORT
13144 if (platform_driver_register(&ammvdec_h265_driver))
13145 pr_err("failed to register ammvdec_h265 driver\n");
13146
13147#endif
13148 if (platform_driver_register(&amvdec_h265_driver)) {
13149 pr_err("failed to register amvdec_h265 driver\n");
13150 return -ENODEV;
13151 }
13152#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13153 if (!has_hevc_vdec()) {
13154 /* not support hevc */
13155 amvdec_h265_profile.name = "hevc_unsupport";
13156 }
13157 if (vdec_is_support_4k()) {
13158 if (is_meson_m8m2_cpu()) {
13159 /* m8m2 support 4k */
13160 amvdec_h265_profile.profile = "4k";
13161 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13162 amvdec_h265_profile.profile =
13163 "8k, 8bit, 10bit, dwrite, compressed";
13164 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13165 amvdec_h265_profile.profile =
13166 "4k, 8bit, 10bit, dwrite, compressed";
13167 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13168 amvdec_h265_profile.profile = "4k";
13169 }
13170#endif
13171 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13172 pr_info("amvdec_h265 default mmu enabled.\n");
13173 mmu_enable = 1;
13174 }
13175
13176 vcodec_profile_register(&amvdec_h265_profile);
13177 amvdec_h265_profile_single = amvdec_h265_profile;
13178 amvdec_h265_profile_single.name = "h265";
13179 vcodec_profile_register(&amvdec_h265_profile_single);
13180 amvdec_h265_profile_mult = amvdec_h265_profile;
13181 amvdec_h265_profile_mult.name = "mh265";
13182 vcodec_profile_register(&amvdec_h265_profile_mult);
13183 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13184 "h265", h265_configs, CONFIG_FOR_RW);
13185 return 0;
13186}
13187
13188static void __exit amvdec_h265_driver_remove_module(void)
13189{
13190 pr_debug("amvdec_h265 module remove.\n");
13191
13192#ifdef MULTI_INSTANCE_SUPPORT
13193 platform_driver_unregister(&ammvdec_h265_driver);
13194#endif
13195 platform_driver_unregister(&amvdec_h265_driver);
13196}
13197
13198/****************************************/
13199/*
13200 *module_param(stat, uint, 0664);
13201 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13202 */
13203module_param(use_cma, uint, 0664);
13204MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13205
13206module_param(bit_depth_luma, uint, 0664);
13207MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13208
13209module_param(bit_depth_chroma, uint, 0664);
13210MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13211
13212module_param(video_signal_type, uint, 0664);
13213MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13214
13215#ifdef ERROR_HANDLE_DEBUG
13216module_param(dbg_nal_skip_flag, uint, 0664);
13217MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13218
13219module_param(dbg_nal_skip_count, uint, 0664);
13220MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13221#endif
13222
13223module_param(radr, uint, 0664);
13224MODULE_PARM_DESC(radr, "\n radr\n");
13225
13226module_param(rval, uint, 0664);
13227MODULE_PARM_DESC(rval, "\n rval\n");
13228
13229module_param(dbg_cmd, uint, 0664);
13230MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13231
13232module_param(dump_nal, uint, 0664);
13233MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13234
13235module_param(dbg_skip_decode_index, uint, 0664);
13236MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13237
13238module_param(endian, uint, 0664);
13239MODULE_PARM_DESC(endian, "\n rval\n");
13240
13241module_param(step, uint, 0664);
13242MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13243
13244module_param(decode_pic_begin, uint, 0664);
13245MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13246
13247module_param(slice_parse_begin, uint, 0664);
13248MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13249
13250module_param(nal_skip_policy, uint, 0664);
13251MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13252
13253module_param(i_only_flag, uint, 0664);
13254MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13255
13256module_param(fast_output_enable, uint, 0664);
13257MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13258
13259module_param(error_handle_policy, uint, 0664);
13260MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13261
13262module_param(error_handle_threshold, uint, 0664);
13263MODULE_PARM_DESC(error_handle_threshold,
13264 "\n amvdec_h265 error_handle_threshold\n");
13265
13266module_param(error_handle_nal_skip_threshold, uint, 0664);
13267MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13268 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13269
13270module_param(error_handle_system_threshold, uint, 0664);
13271MODULE_PARM_DESC(error_handle_system_threshold,
13272 "\n amvdec_h265 error_handle_system_threshold\n");
13273
13274module_param(error_skip_nal_count, uint, 0664);
13275MODULE_PARM_DESC(error_skip_nal_count,
13276 "\n amvdec_h265 error_skip_nal_count\n");
13277
13278module_param(debug, uint, 0664);
13279MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13280
13281module_param(debug_mask, uint, 0664);
13282MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13283
13284module_param(log_mask, uint, 0664);
13285MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13286
13287module_param(buffer_mode, uint, 0664);
13288MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13289
13290module_param(double_write_mode, uint, 0664);
13291MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13292
13293module_param(buf_alloc_width, uint, 0664);
13294MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13295
13296module_param(buf_alloc_height, uint, 0664);
13297MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13298
13299module_param(dynamic_buf_num_margin, uint, 0664);
13300MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13301
13302module_param(max_buf_num, uint, 0664);
13303MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13304
13305module_param(buf_alloc_size, uint, 0664);
13306MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13307
13308#ifdef CONSTRAIN_MAX_BUF_NUM
13309module_param(run_ready_max_vf_only_num, uint, 0664);
13310MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13311
13312module_param(run_ready_display_q_num, uint, 0664);
13313MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13314
13315module_param(run_ready_max_buf_num, uint, 0664);
13316MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13317#endif
13318
13319#if 0
13320module_param(re_config_pic_flag, uint, 0664);
13321MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13322#endif
13323
13324module_param(buffer_mode_dbg, uint, 0664);
13325MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13326
13327module_param(mem_map_mode, uint, 0664);
13328MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13329
13330module_param(enable_mem_saving, uint, 0664);
13331MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13332
13333module_param(force_w_h, uint, 0664);
13334MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13335
13336module_param(force_fps, uint, 0664);
13337MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13338
13339module_param(max_decoding_time, uint, 0664);
13340MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13341
13342module_param(prefix_aux_buf_size, uint, 0664);
13343MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13344
13345module_param(suffix_aux_buf_size, uint, 0664);
13346MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13347
13348module_param(interlace_enable, uint, 0664);
13349MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13350module_param(pts_unstable, uint, 0664);
13351MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13352module_param(parser_sei_enable, uint, 0664);
13353MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13354
13355#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13356module_param(parser_dolby_vision_enable, uint, 0664);
13357MODULE_PARM_DESC(parser_dolby_vision_enable,
13358 "\n parser_dolby_vision_enable\n");
13359
13360module_param(dolby_meta_with_el, uint, 0664);
13361MODULE_PARM_DESC(dolby_meta_with_el,
13362 "\n dolby_meta_with_el\n");
13363
13364module_param(dolby_el_flush_th, uint, 0664);
13365MODULE_PARM_DESC(dolby_el_flush_th,
13366 "\n dolby_el_flush_th\n");
13367#endif
13368module_param(mmu_enable, uint, 0664);
13369MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13370
13371module_param(mmu_enable_force, uint, 0664);
13372MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13373
13374#ifdef MULTI_INSTANCE_SUPPORT
13375module_param(start_decode_buf_level, int, 0664);
13376MODULE_PARM_DESC(start_decode_buf_level,
13377 "\n h265 start_decode_buf_level\n");
13378
13379module_param(decode_timeout_val, uint, 0664);
13380MODULE_PARM_DESC(decode_timeout_val,
13381 "\n h265 decode_timeout_val\n");
13382
13383module_param(data_resend_policy, uint, 0664);
13384MODULE_PARM_DESC(data_resend_policy,
13385 "\n h265 data_resend_policy\n");
13386
13387module_param_array(decode_frame_count, uint,
13388 &max_decode_instance_num, 0664);
13389
13390module_param_array(display_frame_count, uint,
13391 &max_decode_instance_num, 0664);
13392
13393module_param_array(max_process_time, uint,
13394 &max_decode_instance_num, 0664);
13395
13396module_param_array(max_get_frame_interval,
13397 uint, &max_decode_instance_num, 0664);
13398
13399module_param_array(run_count, uint,
13400 &max_decode_instance_num, 0664);
13401
13402module_param_array(input_empty, uint,
13403 &max_decode_instance_num, 0664);
13404
13405module_param_array(not_run_ready, uint,
13406 &max_decode_instance_num, 0664);
13407
13408module_param_array(ref_frame_mark_flag, uint,
13409 &max_decode_instance_num, 0664);
13410
13411#endif
13412#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13413module_param(dv_toggle_prov_name, uint, 0664);
13414MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13415
13416module_param(dv_debug, uint, 0664);
13417MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13418
13419module_param(force_bypass_dvenl, uint, 0664);
13420MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13421#endif
13422
13423#ifdef AGAIN_HAS_THRESHOLD
13424module_param(again_threshold, uint, 0664);
13425MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13426#endif
13427
13428module_param(force_disp_pic_index, int, 0664);
13429MODULE_PARM_DESC(force_disp_pic_index,
13430 "\n amvdec_h265 force_disp_pic_index\n");
13431
13432module_param(frmbase_cont_bitlevel, uint, 0664);
13433MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13434
13435module_param(udebug_flag, uint, 0664);
13436MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13437
13438module_param(udebug_pause_pos, uint, 0664);
13439MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13440
13441module_param(udebug_pause_val, uint, 0664);
13442MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13443
13444module_param(pre_decode_buf_level, int, 0664);
13445MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13446
13447module_param(udebug_pause_decode_idx, uint, 0664);
13448MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13449
13450module_param(disp_vframe_valve_level, uint, 0664);
13451MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13452
13453module_param(pic_list_debug, uint, 0664);
13454MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13455
13456module_param(without_display_mode, uint, 0664);
13457MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13458
13459module_init(amvdec_h265_driver_init_module);
13460module_exit(amvdec_h265_driver_remove_module);
13461
13462MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13463MODULE_LICENSE("GPL");
13464MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13465