summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 0f5ca8b4c2001adebcbd9b408fbc8d57f83ac872
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50
51#define CONSTRAIN_MAX_BUF_NUM
52
53#define SWAP_HEVC_UCODE
54#define DETREFILL_ENABLE
55
56#define AGAIN_HAS_THRESHOLD
57/*#define TEST_NO_BUF*/
58#define HEVC_PIC_STRUCT_SUPPORT
59#define MULTI_INSTANCE_SUPPORT
60#define USE_UNINIT_SEMA
61
62 /* .buf_size = 0x100000*16,
63 //4k2k , 0x100000 per buffer */
64 /* 4096x2304 , 0x120000 per buffer */
65#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
66#define MPRED_4K_MV_BUF_SIZE (0x120000)
67#define MPRED_MV_BUF_SIZE (0x40000)
68
69#define MMU_COMPRESS_HEADER_SIZE 0x48000
70#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
71
72#define MAX_FRAME_4K_NUM 0x1200
73#define MAX_FRAME_8K_NUM (0x1200*4)
74
75//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
76#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
77
78#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
79
80#define HEVC_CM_HEADER_START_ADDR 0x3628
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_DBLK_CFGB 0x350b
85#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
86#define SWAP_HEVC_OFFSET (3 * 0x1000)
87
88#define MEM_NAME "codec_265"
89/* #include <mach/am_regs.h> */
90#include <linux/amlogic/media/utils/vdec_reg.h>
91
92#include "../utils/vdec.h"
93#include "../utils/amvdec.h"
94#include <linux/amlogic/media/video_sink/video.h>
95#include <linux/amlogic/media/codec_mm/configs.h>
96
97#define SEND_LMEM_WITH_RPM
98#define SUPPORT_10BIT
99/* #define ERROR_HANDLE_DEBUG */
100
101#ifndef STAT_KTHREAD
102#define STAT_KTHREAD 0x40
103#endif
104
105#ifdef MULTI_INSTANCE_SUPPORT
106#define MAX_DECODE_INSTANCE_NUM 9
107#define MULTI_DRIVER_NAME "ammvdec_h265"
108#endif
109#define DRIVER_NAME "amvdec_h265"
110#define MODULE_NAME "amvdec_h265"
111#define DRIVER_HEADER_NAME "amvdec_h265_header"
112
113#define PUT_INTERVAL (HZ/100)
114#define ERROR_SYSTEM_RESET_COUNT 200
115
116#define PTS_NORMAL 0
117#define PTS_NONE_REF_USE_DURATION 1
118
119#define PTS_MODE_SWITCHING_THRESHOLD 3
120#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
121
122#define DUR2PTS(x) ((x)*90/96)
123
124#define MAX_SIZE_8K (8192 * 4608)
125#define MAX_SIZE_4K (4096 * 2304)
126
127#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
128#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
129
130#define SEI_UserDataITU_T_T35 4
131#define INVALID_IDX -1 /* Invalid buffer index.*/
132
133static struct semaphore h265_sema;
134
135struct hevc_state_s;
136static int hevc_print(struct hevc_state_s *hevc,
137 int debug_flag, const char *fmt, ...);
138static int hevc_print_cont(struct hevc_state_s *hevc,
139 int debug_flag, const char *fmt, ...);
140static int vh265_vf_states(struct vframe_states *states, void *);
141static struct vframe_s *vh265_vf_peek(void *);
142static struct vframe_s *vh265_vf_get(void *);
143static void vh265_vf_put(struct vframe_s *, void *);
144static int vh265_event_cb(int type, void *data, void *private_data);
145
146static int vh265_stop(struct hevc_state_s *hevc);
147#ifdef MULTI_INSTANCE_SUPPORT
148static int vmh265_stop(struct hevc_state_s *hevc);
149static s32 vh265_init(struct vdec_s *vdec);
150static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
151static void reset_process_time(struct hevc_state_s *hevc);
152static void start_process_time(struct hevc_state_s *hevc);
153static void restart_process_time(struct hevc_state_s *hevc);
154static void timeout_process(struct hevc_state_s *hevc);
155#else
156static s32 vh265_init(struct hevc_state_s *hevc);
157#endif
158static void vh265_prot_init(struct hevc_state_s *hevc);
159static int vh265_local_init(struct hevc_state_s *hevc);
160static void vh265_check_timer_func(unsigned long arg);
161static void config_decode_mode(struct hevc_state_s *hevc);
162
163static const char vh265_dec_id[] = "vh265-dev";
164
165#define PROVIDER_NAME "decoder.h265"
166#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
167
168static const struct vframe_operations_s vh265_vf_provider = {
169 .peek = vh265_vf_peek,
170 .get = vh265_vf_get,
171 .put = vh265_vf_put,
172 .event_cb = vh265_event_cb,
173 .vf_states = vh265_vf_states,
174};
175
176static struct vframe_provider_s vh265_vf_prov;
177
178static u32 bit_depth_luma;
179static u32 bit_depth_chroma;
180static u32 video_signal_type;
181
182static int start_decode_buf_level = 0x8000;
183
184static unsigned int decode_timeout_val = 200;
185
186/*data_resend_policy:
187 bit 0, stream base resend data when decoding buf empty
188*/
189static u32 data_resend_policy = 1;
190
191#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
192/*
193static const char * const video_format_names[] = {
194 "component", "PAL", "NTSC", "SECAM",
195 "MAC", "unspecified", "unspecified", "unspecified"
196};
197
198static const char * const color_primaries_names[] = {
199 "unknown", "bt709", "undef", "unknown",
200 "bt470m", "bt470bg", "smpte170m", "smpte240m",
201 "film", "bt2020"
202};
203
204static const char * const transfer_characteristics_names[] = {
205 "unknown", "bt709", "undef", "unknown",
206 "bt470m", "bt470bg", "smpte170m", "smpte240m",
207 "linear", "log100", "log316", "iec61966-2-4",
208 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
209 "smpte-st-2084", "smpte-st-428"
210};
211
212static const char * const matrix_coeffs_names[] = {
213 "GBR", "bt709", "undef", "unknown",
214 "fcc", "bt470bg", "smpte170m", "smpte240m",
215 "YCgCo", "bt2020nc", "bt2020c"
216};
217*/
218#ifdef SUPPORT_10BIT
219#define HEVC_CM_BODY_START_ADDR 0x3626
220#define HEVC_CM_BODY_LENGTH 0x3627
221#define HEVC_CM_HEADER_LENGTH 0x3629
222#define HEVC_CM_HEADER_OFFSET 0x362b
223#define HEVC_SAO_CTRL9 0x362d
224#define LOSLESS_COMPRESS_MODE
225/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
226/* double_write_mode:
227 * 0, no double write;
228 * 1, 1:1 ratio;
229 * 2, (1/4):(1/4) ratio;
230 * 3, (1/4):(1/4) ratio, with both compressed frame included
231 * 4, (1/2):(1/2) ratio;
232 * 0x10, double write only
233 * 0x100, if > 1080p,use mode 4,else use mode 1;
234 * 0x200, if > 1080p,use mode 2,else use mode 1;
235 * 0x300, if > 720p, use mode 4, else use mode 1;
236 */
237static u32 double_write_mode;
238
239/*#define DECOMP_HEADR_SURGENT*/
240
241static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
242static u32 enable_mem_saving = 1;
243static u32 workaround_enable;
244static u32 force_w_h;
245#endif
246static u32 force_fps;
247static u32 pts_unstable;
248#define H265_DEBUG_BUFMGR 0x01
249#define H265_DEBUG_BUFMGR_MORE 0x02
250#define H265_DEBUG_DETAIL 0x04
251#define H265_DEBUG_REG 0x08
252#define H265_DEBUG_MAN_SEARCH_NAL 0x10
253#define H265_DEBUG_MAN_SKIP_NAL 0x20
254#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
255#define H265_DEBUG_FORCE_CLK 0x80
256#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
257#define H265_DEBUG_NO_DISPLAY 0x200
258#define H265_DEBUG_DISCARD_NAL 0x400
259#define H265_DEBUG_OUT_PTS 0x800
260#define H265_DEBUG_DUMP_PIC_LIST 0x1000
261#define H265_DEBUG_PRINT_SEI 0x2000
262#define H265_DEBUG_PIC_STRUCT 0x4000
263#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
264#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
265#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
266#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
267#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
268#define H265_DEBUG_HW_RESET 0x100000
269#define H265_CFG_CANVAS_IN_DECODE 0x200000
270#define H265_DEBUG_DV 0x400000
271#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
272#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
273#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
274#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
275#ifdef MULTI_INSTANCE_SUPPORT
276#define PRINT_FLAG_ERROR 0x0
277#define IGNORE_PARAM_FROM_CONFIG 0x08000000
278#define PRINT_FRAMEBASE_DATA 0x10000000
279#define PRINT_FLAG_VDEC_STATUS 0x20000000
280#define PRINT_FLAG_VDEC_DETAIL 0x40000000
281#define PRINT_FLAG_V4L_DETAIL 0x80000000
282#endif
283
284#define BUF_POOL_SIZE 32
285#define MAX_BUF_NUM 24
286#define MAX_REF_PIC_NUM 24
287#define MAX_REF_ACTIVE 16
288
289#ifdef MV_USE_FIXED_BUF
290#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
291#define VF_BUFFER_IDX(n) (n)
292#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
293#else
294#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
295#define VF_BUFFER_IDX(n) (n)
296#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
297#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
298#endif
299
300#define HEVC_MV_INFO 0x310d
301#define HEVC_QP_INFO 0x3137
302#define HEVC_SKIP_INFO 0x3136
303
304const u32 h265_version = 201602101;
305static u32 debug_mask = 0xffffffff;
306static u32 log_mask;
307static u32 debug;
308static u32 radr;
309static u32 rval;
310static u32 dbg_cmd;
311static u32 dump_nal;
312static u32 dbg_skip_decode_index;
313static u32 endian = 0xff0;
314#ifdef ERROR_HANDLE_DEBUG
315static u32 dbg_nal_skip_flag;
316 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
317static u32 dbg_nal_skip_count;
318#endif
319/*for debug*/
320/*
321 udebug_flag:
322 bit 0, enable ucode print
323 bit 1, enable ucode detail print
324 bit [31:16] not 0, pos to dump lmem
325 bit 2, pop bits to lmem
326 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
327*/
328static u32 udebug_flag;
329/*
330 when udebug_flag[1:0] is not 0
331 udebug_pause_pos not 0,
332 pause position
333*/
334static u32 udebug_pause_pos;
335/*
336 when udebug_flag[1:0] is not 0
337 and udebug_pause_pos is not 0,
338 pause only when DEBUG_REG2 is equal to this val
339*/
340static u32 udebug_pause_val;
341
342static u32 udebug_pause_decode_idx;
343
344static u32 decode_pic_begin;
345static uint slice_parse_begin;
346static u32 step;
347static bool is_reset;
348
349#ifdef CONSTRAIN_MAX_BUF_NUM
350static u32 run_ready_max_vf_only_num;
351static u32 run_ready_display_q_num;
352 /*0: not check
353 0xff: work_pic_num
354 */
355static u32 run_ready_max_buf_num = 0xff;
356#endif
357
358static u32 dynamic_buf_num_margin = 7;
359static u32 buf_alloc_width;
360static u32 buf_alloc_height;
361
362static u32 max_buf_num = 16;
363static u32 buf_alloc_size;
364/*static u32 re_config_pic_flag;*/
365/*
366 *bit[0]: 0,
367 *bit[1]: 0, always release cma buffer when stop
368 *bit[1]: 1, never release cma buffer when stop
369 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
370 *do not release cma buffer is blackout is not 1
371 *
372 *bit[2]: 0, when start decoding, check current displayed buffer
373 * (only for buffer decoded by h265) if blackout is 0
374 * 1, do not check current displayed buffer
375 *
376 *bit[3]: 1, if blackout is not 1, do not release current
377 * displayed cma buffer always.
378 */
379/* set to 1 for fast play;
380 * set to 8 for other case of "keep last frame"
381 */
382static u32 buffer_mode = 1;
383
384/* buffer_mode_dbg: debug only*/
385static u32 buffer_mode_dbg = 0xffff0000;
386/**/
387/*
388 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
389 *1, start decoding after first I;
390 *2, only decode and display none error picture;
391 *3, start decoding and display after IDR,etc
392 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
393 *only for mode 0 and 1.
394 */
395static u32 nal_skip_policy = 2;
396
397/*
398 *bit 0, 1: only display I picture;
399 *bit 1, 1: only decode I picture;
400 */
401static u32 i_only_flag;
402
403/*
404bit 0, fast output first I picture
405*/
406static u32 fast_output_enable = 1;
407
408static u32 frmbase_cont_bitlevel = 0x60;
409
410/*
411use_cma: 1, use both reserver memory and cma for buffers
4122, only use cma for buffers
413*/
414static u32 use_cma = 2;
415
416#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
417static u32 prefix_aux_buf_size = (16 * 1024);
418static u32 suffix_aux_buf_size;
419
420static u32 max_decoding_time;
421/*
422 *error handling
423 */
424/*error_handle_policy:
425 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
426 *1, skip error_skip_nal_count nals before error recovery;
427 *bit 1 (valid only when bit0 == 1):
428 *1, wait vps/sps/pps after error recovery;
429 *bit 2 (valid only when bit0 == 0):
430 *0, auto search after error recovery (hevc_recover() called);
431 *1, manual search after error recovery
432 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
433 *
434 *bit 4: 0, set error_mark after reset/recover
435 * 1, do not set error_mark after reset/recover
436 *bit 5: 0, check total lcu for every picture
437 * 1, do not check total lcu
438 *bit 6: 0, do not check head error
439 * 1, check head error
440 *
441 */
442
443static u32 error_handle_policy;
444static u32 error_skip_nal_count = 6;
445static u32 error_handle_threshold = 30;
446static u32 error_handle_nal_skip_threshold = 10;
447static u32 error_handle_system_threshold = 30;
448static u32 interlace_enable = 1;
449static u32 fr_hint_status;
450
451 /*
452 *parser_sei_enable:
453 * bit 0, sei;
454 * bit 1, sei_suffix (fill aux buf)
455 * bit 2, fill sei to aux buf (when bit 0 is 1)
456 * bit 8, debug flag
457 */
458static u32 parser_sei_enable;
459#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
460static u32 parser_dolby_vision_enable = 1;
461static u32 dolby_meta_with_el;
462static u32 dolby_el_flush_th = 2;
463#endif
464/* this is only for h265 mmu enable */
465
466static u32 mmu_enable = 1;
467static u32 mmu_enable_force;
468static u32 work_buf_size;
469static unsigned int force_disp_pic_index;
470static unsigned int disp_vframe_valve_level;
471static int pre_decode_buf_level = 0x1000;
472static unsigned int pic_list_debug;
473
474
475#ifdef MULTI_INSTANCE_SUPPORT
476static unsigned int max_decode_instance_num
477 = MAX_DECODE_INSTANCE_NUM;
478static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
479static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
480static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
481static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
482static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
483static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
484static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
485static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
486{1, 1, 1, 1, 1, 1, 1, 1, 1};
487
488#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
489static unsigned char get_idx(struct hevc_state_s *hevc);
490#endif
491
492#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
493static u32 dv_toggle_prov_name;
494
495static u32 dv_debug;
496
497static u32 force_bypass_dvenl;
498#endif
499#endif
500
501
502#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
503#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
504#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
505#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
506#else
507#define get_dbg_flag(hevc) debug
508#define get_dbg_flag2(hevc) debug
509#define is_log_enable(hevc) (log_mask ? 1 : 0)
510#define get_valid_double_write_mode(hevc) double_write_mode
511#define get_buf_alloc_width(hevc) buf_alloc_width
512#define get_buf_alloc_height(hevc) buf_alloc_height
513#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
514#endif
515#define get_buffer_mode(hevc) buffer_mode
516
517
518DEFINE_SPINLOCK(lock);
519struct task_struct *h265_task = NULL;
520#undef DEBUG_REG
521#ifdef DEBUG_REG
522void WRITE_VREG_DBG(unsigned adr, unsigned val)
523{
524 if (debug & H265_DEBUG_REG)
525 pr_info("%s(%x, %x)\n", __func__, adr, val);
526 WRITE_VREG(adr, val);
527}
528
529#undef WRITE_VREG
530#define WRITE_VREG WRITE_VREG_DBG
531#endif
532
533static DEFINE_MUTEX(vh265_mutex);
534
535static DEFINE_MUTEX(vh265_log_mutex);
536
537static struct vdec_info *gvs;
538
539static u32 without_display_mode;
540
541/**************************************************
542 *
543 *h265 buffer management include
544 *
545 ***************************************************
546 */
547enum NalUnitType {
548 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
549 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
550
551 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
552 /* Current name in the spec: TSA_R */
553 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
554
555 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
556 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
557
558 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
559 /* Current name in the spec: RADL_R */
560 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
561
562 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
563 /* Current name in the spec: RASL_R */
564 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
565
566 NAL_UNIT_RESERVED_10,
567 NAL_UNIT_RESERVED_11,
568 NAL_UNIT_RESERVED_12,
569 NAL_UNIT_RESERVED_13,
570 NAL_UNIT_RESERVED_14,
571 NAL_UNIT_RESERVED_15,
572
573 /* Current name in the spec: BLA_W_LP */
574 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
575 /* Current name in the spec: BLA_W_DLP */
576 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
577 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
578 /* Current name in the spec: IDR_W_DLP */
579 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
580 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
581 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
582 NAL_UNIT_RESERVED_22,
583 NAL_UNIT_RESERVED_23,
584
585 NAL_UNIT_RESERVED_24,
586 NAL_UNIT_RESERVED_25,
587 NAL_UNIT_RESERVED_26,
588 NAL_UNIT_RESERVED_27,
589 NAL_UNIT_RESERVED_28,
590 NAL_UNIT_RESERVED_29,
591 NAL_UNIT_RESERVED_30,
592 NAL_UNIT_RESERVED_31,
593
594 NAL_UNIT_VPS, /* 32 */
595 NAL_UNIT_SPS, /* 33 */
596 NAL_UNIT_PPS, /* 34 */
597 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
598 NAL_UNIT_EOS, /* 36 */
599 NAL_UNIT_EOB, /* 37 */
600 NAL_UNIT_FILLER_DATA, /* 38 */
601 NAL_UNIT_SEI, /* 39 Prefix SEI */
602 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
603 NAL_UNIT_RESERVED_41,
604 NAL_UNIT_RESERVED_42,
605 NAL_UNIT_RESERVED_43,
606 NAL_UNIT_RESERVED_44,
607 NAL_UNIT_RESERVED_45,
608 NAL_UNIT_RESERVED_46,
609 NAL_UNIT_RESERVED_47,
610 NAL_UNIT_UNSPECIFIED_48,
611 NAL_UNIT_UNSPECIFIED_49,
612 NAL_UNIT_UNSPECIFIED_50,
613 NAL_UNIT_UNSPECIFIED_51,
614 NAL_UNIT_UNSPECIFIED_52,
615 NAL_UNIT_UNSPECIFIED_53,
616 NAL_UNIT_UNSPECIFIED_54,
617 NAL_UNIT_UNSPECIFIED_55,
618 NAL_UNIT_UNSPECIFIED_56,
619 NAL_UNIT_UNSPECIFIED_57,
620 NAL_UNIT_UNSPECIFIED_58,
621 NAL_UNIT_UNSPECIFIED_59,
622 NAL_UNIT_UNSPECIFIED_60,
623 NAL_UNIT_UNSPECIFIED_61,
624 NAL_UNIT_UNSPECIFIED_62,
625 NAL_UNIT_UNSPECIFIED_63,
626 NAL_UNIT_INVALID,
627};
628
629/* --------------------------------------------------- */
630/* Amrisc Software Interrupt */
631/* --------------------------------------------------- */
632#define AMRISC_STREAM_EMPTY_REQ 0x01
633#define AMRISC_PARSER_REQ 0x02
634#define AMRISC_MAIN_REQ 0x04
635
636/* --------------------------------------------------- */
637/* HEVC_DEC_STATUS define */
638/* --------------------------------------------------- */
639#define HEVC_DEC_IDLE 0x0
640#define HEVC_NAL_UNIT_VPS 0x1
641#define HEVC_NAL_UNIT_SPS 0x2
642#define HEVC_NAL_UNIT_PPS 0x3
643#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
644#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
645#define HEVC_SLICE_DECODING 0x6
646#define HEVC_NAL_UNIT_SEI 0x7
647#define HEVC_SLICE_SEGMENT_DONE 0x8
648#define HEVC_NAL_SEARCH_DONE 0x9
649#define HEVC_DECPIC_DATA_DONE 0xa
650#define HEVC_DECPIC_DATA_ERROR 0xb
651#define HEVC_SEI_DAT 0xc
652#define HEVC_SEI_DAT_DONE 0xd
653#define HEVC_NAL_DECODE_DONE 0xe
654#define HEVC_OVER_DECODE 0xf
655
656#define HEVC_DATA_REQUEST 0x12
657
658#define HEVC_DECODE_BUFEMPTY 0x20
659#define HEVC_DECODE_TIMEOUT 0x21
660#define HEVC_SEARCH_BUFEMPTY 0x22
661#define HEVC_DECODE_OVER_SIZE 0x23
662#define HEVC_DECODE_BUFEMPTY2 0x24
663#define HEVC_FIND_NEXT_PIC_NAL 0x50
664#define HEVC_FIND_NEXT_DVEL_NAL 0x51
665
666#define HEVC_DUMP_LMEM 0x30
667
668#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
669#define HEVC_DISCARD_NAL 0xf0
670#define HEVC_ACTION_DEC_CONT 0xfd
671#define HEVC_ACTION_ERROR 0xfe
672#define HEVC_ACTION_DONE 0xff
673
674/* --------------------------------------------------- */
675/* Include "parser_cmd.h" */
676/* --------------------------------------------------- */
677#define PARSER_CMD_SKIP_CFG_0 0x0000090b
678
679#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
680
681#define PARSER_CMD_SKIP_CFG_2 0x001b1910
682
683#define PARSER_CMD_NUMBER 37
684
685/**************************************************
686 *
687 *h265 buffer management
688 *
689 ***************************************************
690 */
691/* #define BUFFER_MGR_ONLY */
692/* #define CONFIG_HEVC_CLK_FORCED_ON */
693/* #define ENABLE_SWAP_TEST */
694#define MCRCC_ENABLE
695#define INVALID_POC 0x80000000
696
697#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
698#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
699#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
700#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
701#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
702#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
703#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
704#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
705#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
706#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
707#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
708#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
709#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
710#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
711#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
712#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
713#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
714#ifdef ENABLE_SWAP_TEST
715#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
716#endif
717
718/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
719/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
720#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
721 /*do not define ENABLE_SWAP_TEST*/
722#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
723#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
724
725#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
726#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
727/*
728 *ucode parser/search control
729 *bit 0: 0, header auto parse; 1, header manual parse
730 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
731 *bit [3:2]: valid when bit1==0;
732 *0, auto skip nal before first vps/sps/pps/idr;
733 *1, auto skip nal before first vps/sps/pps
734 *2, auto skip nal before first vps/sps/pps,
735 * and not decode until the first I slice (with slice address of 0)
736 *
737 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
738 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
739 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
740 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
741 *bit [17]: for NAL_SEI when bit0 is 0:
742 * 0, do not parse/fetch SEI in ucode;
743 * 1, parse/fetch SEI in ucode
744 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
745 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
746 * 1, fetch NAL_SEL_SUFFIX data to aux buf
747 *bit [19]:
748 * 0, parse NAL_SEI in ucode
749 * 1, fetch NAL_SEI to aux buf
750 *bit [20]: for DOLBY_VISION_META
751 * 0, do not fetch DOLBY_VISION_META to aux buf
752 * 1, fetch DOLBY_VISION_META to aux buf
753 */
754#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
755 /*read only*/
756#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
757 /*
758 [15 : 8] rps_set_id
759 [7 : 0] start_decoding_flag
760 */
761#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
762 /*set before start decoder*/
763#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
764#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
765#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
766
767#define DECODE_MODE_SINGLE 0x0
768#define DECODE_MODE_MULTI_FRAMEBASE 0x1
769#define DECODE_MODE_MULTI_STREAMBASE 0x2
770#define DECODE_MODE_MULTI_DVBAL 0x3
771#define DECODE_MODE_MULTI_DVENL 0x4
772
773#define MAX_INT 0x7FFFFFFF
774
775#define RPM_BEGIN 0x100
776#define modification_list_cur 0x148
777#define RPM_END 0x180
778
779#define RPS_USED_BIT 14
780/* MISC_FLAG0 */
781#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
782#define PCM_ENABLE_FLAG_BIT 1
783#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
784#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
785#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
786#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
787#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
788#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
789#define SLICE_SAO_LUMA_FLAG_BIT 8
790#define SLICE_SAO_CHROMA_FLAG_BIT 9
791#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
792
793union param_u {
794 struct {
795 unsigned short data[RPM_END - RPM_BEGIN];
796 } l;
797 struct {
798 /* from ucode lmem, do not change this struct */
799 unsigned short CUR_RPS[0x10];
800 unsigned short num_ref_idx_l0_active;
801 unsigned short num_ref_idx_l1_active;
802 unsigned short slice_type;
803 unsigned short slice_temporal_mvp_enable_flag;
804 unsigned short dependent_slice_segment_flag;
805 unsigned short slice_segment_address;
806 unsigned short num_title_rows_minus1;
807 unsigned short pic_width_in_luma_samples;
808 unsigned short pic_height_in_luma_samples;
809 unsigned short log2_min_coding_block_size_minus3;
810 unsigned short log2_diff_max_min_coding_block_size;
811 unsigned short log2_max_pic_order_cnt_lsb_minus4;
812 unsigned short POClsb;
813 unsigned short collocated_from_l0_flag;
814 unsigned short collocated_ref_idx;
815 unsigned short log2_parallel_merge_level;
816 unsigned short five_minus_max_num_merge_cand;
817 unsigned short sps_num_reorder_pics_0;
818 unsigned short modification_flag;
819 unsigned short tiles_enabled_flag;
820 unsigned short num_tile_columns_minus1;
821 unsigned short num_tile_rows_minus1;
822 unsigned short tile_width[8];
823 unsigned short tile_height[8];
824 unsigned short misc_flag0;
825 unsigned short pps_beta_offset_div2;
826 unsigned short pps_tc_offset_div2;
827 unsigned short slice_beta_offset_div2;
828 unsigned short slice_tc_offset_div2;
829 unsigned short pps_cb_qp_offset;
830 unsigned short pps_cr_qp_offset;
831 unsigned short first_slice_segment_in_pic_flag;
832 unsigned short m_temporalId;
833 unsigned short m_nalUnitType;
834
835 unsigned short vui_num_units_in_tick_hi;
836 unsigned short vui_num_units_in_tick_lo;
837 unsigned short vui_time_scale_hi;
838 unsigned short vui_time_scale_lo;
839 unsigned short bit_depth;
840 unsigned short profile_etc;
841 unsigned short sei_frame_field_info;
842 unsigned short video_signal_type;
843 unsigned short modification_list[0x20];
844 unsigned short conformance_window_flag;
845 unsigned short conf_win_left_offset;
846 unsigned short conf_win_right_offset;
847 unsigned short conf_win_top_offset;
848 unsigned short conf_win_bottom_offset;
849 unsigned short chroma_format_idc;
850 unsigned short color_description;
851 unsigned short aspect_ratio_idc;
852 unsigned short sar_width;
853 unsigned short sar_height;
854 unsigned short sps_max_dec_pic_buffering_minus1_0;
855 } p;
856};
857
858#define RPM_BUF_SIZE (0x80*2)
859/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
860#define LMEM_BUF_SIZE (0x500 * 2)
861
862struct buff_s {
863 u32 buf_start;
864 u32 buf_size;
865 u32 buf_end;
866};
867
868struct BuffInfo_s {
869 u32 max_width;
870 u32 max_height;
871 unsigned int start_adr;
872 unsigned int end_adr;
873 struct buff_s ipp;
874 struct buff_s sao_abv;
875 struct buff_s sao_vb;
876 struct buff_s short_term_rps;
877 struct buff_s vps;
878 struct buff_s sps;
879 struct buff_s pps;
880 struct buff_s sao_up;
881 struct buff_s swap_buf;
882 struct buff_s swap_buf2;
883 struct buff_s scalelut;
884 struct buff_s dblk_para;
885 struct buff_s dblk_data;
886 struct buff_s dblk_data2;
887 struct buff_s mmu_vbh;
888 struct buff_s cm_header;
889 struct buff_s mpred_above;
890#ifdef MV_USE_FIXED_BUF
891 struct buff_s mpred_mv;
892#endif
893 struct buff_s rpm;
894 struct buff_s lmem;
895};
896#define WORK_BUF_SPEC_NUM 3
897static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
898 {
899 /* 8M bytes */
900 .max_width = 1920,
901 .max_height = 1088,
902 .ipp = {
903 /* IPP work space calculation :
904 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
905 */
906 .buf_size = 0x4000,
907 },
908 .sao_abv = {
909 .buf_size = 0x30000,
910 },
911 .sao_vb = {
912 .buf_size = 0x30000,
913 },
914 .short_term_rps = {
915 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
916 * total 64x16x2 = 2048 bytes (0x800)
917 */
918 .buf_size = 0x800,
919 },
920 .vps = {
921 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
922 * total 0x0800 bytes
923 */
924 .buf_size = 0x800,
925 },
926 .sps = {
927 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
928 * total 0x0800 bytes
929 */
930 .buf_size = 0x800,
931 },
932 .pps = {
933 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
934 * total 0x2000 bytes
935 */
936 .buf_size = 0x2000,
937 },
938 .sao_up = {
939 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
940 * each has 16 bytes total 0x2800 bytes
941 */
942 .buf_size = 0x2800,
943 },
944 .swap_buf = {
945 /* 256cyclex64bit = 2K bytes 0x800
946 * (only 144 cycles valid)
947 */
948 .buf_size = 0x800,
949 },
950 .swap_buf2 = {
951 .buf_size = 0x800,
952 },
953 .scalelut = {
954 /* support up to 32 SCALELUT 1024x32 =
955 * 32Kbytes (0x8000)
956 */
957 .buf_size = 0x8000,
958 },
959 .dblk_para = {
960#ifdef SUPPORT_10BIT
961 .buf_size = 0x40000,
962#else
963 /* DBLK -> Max 256(4096/16) LCU, each para
964 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
965 */
966 .buf_size = 0x20000,
967#endif
968 },
969 .dblk_data = {
970 .buf_size = 0x40000,
971 },
972 .dblk_data2 = {
973 .buf_size = 0x40000,
974 }, /*dblk data for adapter*/
975 .mmu_vbh = {
976 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
977 },
978#if 0
979 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
980 .buf_size = MMU_COMPRESS_HEADER_SIZE *
981 (MAX_REF_PIC_NUM + 1),
982 },
983#endif
984 .mpred_above = {
985 .buf_size = 0x8000,
986 },
987#ifdef MV_USE_FIXED_BUF
988 .mpred_mv = {/* 1080p, 0x40000 per buffer */
989 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
990 },
991#endif
992 .rpm = {
993 .buf_size = RPM_BUF_SIZE,
994 },
995 .lmem = {
996 .buf_size = 0x500 * 2,
997 }
998 },
999 {
1000 .max_width = 4096,
1001 .max_height = 2048,
1002 .ipp = {
1003 /* IPP work space calculation :
1004 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1005 */
1006 .buf_size = 0x4000,
1007 },
1008 .sao_abv = {
1009 .buf_size = 0x30000,
1010 },
1011 .sao_vb = {
1012 .buf_size = 0x30000,
1013 },
1014 .short_term_rps = {
1015 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1016 * total 64x16x2 = 2048 bytes (0x800)
1017 */
1018 .buf_size = 0x800,
1019 },
1020 .vps = {
1021 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1022 * total 0x0800 bytes
1023 */
1024 .buf_size = 0x800,
1025 },
1026 .sps = {
1027 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1028 * total 0x0800 bytes
1029 */
1030 .buf_size = 0x800,
1031 },
1032 .pps = {
1033 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1034 * total 0x2000 bytes
1035 */
1036 .buf_size = 0x2000,
1037 },
1038 .sao_up = {
1039 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1040 * each has 16 bytes total 0x2800 bytes
1041 */
1042 .buf_size = 0x2800,
1043 },
1044 .swap_buf = {
1045 /* 256cyclex64bit = 2K bytes 0x800
1046 * (only 144 cycles valid)
1047 */
1048 .buf_size = 0x800,
1049 },
1050 .swap_buf2 = {
1051 .buf_size = 0x800,
1052 },
1053 .scalelut = {
1054 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1055 * (0x8000)
1056 */
1057 .buf_size = 0x8000,
1058 },
1059 .dblk_para = {
1060 /* DBLK -> Max 256(4096/16) LCU, each para
1061 * 512bytes(total:0x20000),
1062 * data 1024bytes(total:0x40000)
1063 */
1064 .buf_size = 0x20000,
1065 },
1066 .dblk_data = {
1067 .buf_size = 0x80000,
1068 },
1069 .dblk_data2 = {
1070 .buf_size = 0x80000,
1071 }, /*dblk data for adapter*/
1072 .mmu_vbh = {
1073 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1074 },
1075#if 0
1076 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1077 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1078 (MAX_REF_PIC_NUM + 1),
1079 },
1080#endif
1081 .mpred_above = {
1082 .buf_size = 0x8000,
1083 },
1084#ifdef MV_USE_FIXED_BUF
1085 .mpred_mv = {
1086 /* .buf_size = 0x100000*16,
1087 //4k2k , 0x100000 per buffer */
1088 /* 4096x2304 , 0x120000 per buffer */
1089 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1090 },
1091#endif
1092 .rpm = {
1093 .buf_size = RPM_BUF_SIZE,
1094 },
1095 .lmem = {
1096 .buf_size = 0x500 * 2,
1097 }
1098 },
1099
1100 {
1101 .max_width = 4096*2,
1102 .max_height = 2048*2,
1103 .ipp = {
1104 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1105 .buf_size = 0x4000*2,
1106 },
1107 .sao_abv = {
1108 .buf_size = 0x30000*2,
1109 },
1110 .sao_vb = {
1111 .buf_size = 0x30000*2,
1112 },
1113 .short_term_rps = {
1114 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1115 .buf_size = 0x800,
1116 },
1117 .vps = {
1118 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1119 .buf_size = 0x800,
1120 },
1121 .sps = {
1122 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1123 .buf_size = 0x800,
1124 },
1125 .pps = {
1126 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1127 .buf_size = 0x2000,
1128 },
1129 .sao_up = {
1130 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1131 .buf_size = 0x2800*2,
1132 },
1133 .swap_buf = {
1134 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1135 .buf_size = 0x800,
1136 },
1137 .swap_buf2 = {
1138 .buf_size = 0x800,
1139 },
1140 .scalelut = {
1141 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1142 .buf_size = 0x8000*2,
1143 },
1144 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1145 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1146 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1147 .mmu_vbh = {
1148 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1149 },
1150#if 0
1151 .cm_header = {
1152 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1153 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1154 },
1155#endif
1156 .mpred_above = {
1157 .buf_size = 0x8000*2,
1158 },
1159#ifdef MV_USE_FIXED_BUF
1160 .mpred_mv = {
1161 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1162 },
1163#endif
1164 .rpm = {
1165 .buf_size = RPM_BUF_SIZE,
1166 },
1167 .lmem = {
1168 .buf_size = 0x500 * 2,
1169 },
1170 }
1171};
1172
1173static void init_buff_spec(struct hevc_state_s *hevc,
1174 struct BuffInfo_s *buf_spec)
1175{
1176 buf_spec->ipp.buf_start = buf_spec->start_adr;
1177 buf_spec->sao_abv.buf_start =
1178 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1179
1180 buf_spec->sao_vb.buf_start =
1181 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1182 buf_spec->short_term_rps.buf_start =
1183 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1184 buf_spec->vps.buf_start =
1185 buf_spec->short_term_rps.buf_start +
1186 buf_spec->short_term_rps.buf_size;
1187 buf_spec->sps.buf_start =
1188 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1189 buf_spec->pps.buf_start =
1190 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1191 buf_spec->sao_up.buf_start =
1192 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1193 buf_spec->swap_buf.buf_start =
1194 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1195 buf_spec->swap_buf2.buf_start =
1196 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1197 buf_spec->scalelut.buf_start =
1198 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1199 buf_spec->dblk_para.buf_start =
1200 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1201 buf_spec->dblk_data.buf_start =
1202 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1203 buf_spec->dblk_data2.buf_start =
1204 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1205 buf_spec->mmu_vbh.buf_start =
1206 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1207 buf_spec->mpred_above.buf_start =
1208 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1209#ifdef MV_USE_FIXED_BUF
1210 buf_spec->mpred_mv.buf_start =
1211 buf_spec->mpred_above.buf_start +
1212 buf_spec->mpred_above.buf_size;
1213
1214 buf_spec->rpm.buf_start =
1215 buf_spec->mpred_mv.buf_start +
1216 buf_spec->mpred_mv.buf_size;
1217#else
1218 buf_spec->rpm.buf_start =
1219 buf_spec->mpred_above.buf_start +
1220 buf_spec->mpred_above.buf_size;
1221#endif
1222 buf_spec->lmem.buf_start =
1223 buf_spec->rpm.buf_start +
1224 buf_spec->rpm.buf_size;
1225 buf_spec->end_adr =
1226 buf_spec->lmem.buf_start +
1227 buf_spec->lmem.buf_size;
1228
1229 if (hevc && get_dbg_flag2(hevc)) {
1230 hevc_print(hevc, 0,
1231 "%s workspace (%x %x) size = %x\n", __func__,
1232 buf_spec->start_adr, buf_spec->end_adr,
1233 buf_spec->end_adr - buf_spec->start_adr);
1234
1235 hevc_print(hevc, 0,
1236 "ipp.buf_start :%x\n",
1237 buf_spec->ipp.buf_start);
1238 hevc_print(hevc, 0,
1239 "sao_abv.buf_start :%x\n",
1240 buf_spec->sao_abv.buf_start);
1241 hevc_print(hevc, 0,
1242 "sao_vb.buf_start :%x\n",
1243 buf_spec->sao_vb.buf_start);
1244 hevc_print(hevc, 0,
1245 "short_term_rps.buf_start :%x\n",
1246 buf_spec->short_term_rps.buf_start);
1247 hevc_print(hevc, 0,
1248 "vps.buf_start :%x\n",
1249 buf_spec->vps.buf_start);
1250 hevc_print(hevc, 0,
1251 "sps.buf_start :%x\n",
1252 buf_spec->sps.buf_start);
1253 hevc_print(hevc, 0,
1254 "pps.buf_start :%x\n",
1255 buf_spec->pps.buf_start);
1256 hevc_print(hevc, 0,
1257 "sao_up.buf_start :%x\n",
1258 buf_spec->sao_up.buf_start);
1259 hevc_print(hevc, 0,
1260 "swap_buf.buf_start :%x\n",
1261 buf_spec->swap_buf.buf_start);
1262 hevc_print(hevc, 0,
1263 "swap_buf2.buf_start :%x\n",
1264 buf_spec->swap_buf2.buf_start);
1265 hevc_print(hevc, 0,
1266 "scalelut.buf_start :%x\n",
1267 buf_spec->scalelut.buf_start);
1268 hevc_print(hevc, 0,
1269 "dblk_para.buf_start :%x\n",
1270 buf_spec->dblk_para.buf_start);
1271 hevc_print(hevc, 0,
1272 "dblk_data.buf_start :%x\n",
1273 buf_spec->dblk_data.buf_start);
1274 hevc_print(hevc, 0,
1275 "dblk_data2.buf_start :%x\n",
1276 buf_spec->dblk_data2.buf_start);
1277 hevc_print(hevc, 0,
1278 "mpred_above.buf_start :%x\n",
1279 buf_spec->mpred_above.buf_start);
1280#ifdef MV_USE_FIXED_BUF
1281 hevc_print(hevc, 0,
1282 "mpred_mv.buf_start :%x\n",
1283 buf_spec->mpred_mv.buf_start);
1284#endif
1285 if ((get_dbg_flag2(hevc)
1286 &
1287 H265_DEBUG_SEND_PARAM_WITH_REG)
1288 == 0) {
1289 hevc_print(hevc, 0,
1290 "rpm.buf_start :%x\n",
1291 buf_spec->rpm.buf_start);
1292 }
1293 }
1294
1295}
1296
1297enum SliceType {
1298 B_SLICE,
1299 P_SLICE,
1300 I_SLICE
1301};
1302
1303/*USE_BUF_BLOCK*/
1304struct BUF_s {
1305 ulong start_adr;
1306 u32 size;
1307 u32 luma_size;
1308 ulong header_addr;
1309 u32 header_size;
1310 int used_flag;
1311 ulong v4l_ref_buf_addr;
1312} /*BUF_t */;
1313
1314/* level 6, 6.1 maximum slice number is 800; other is 200 */
1315#define MAX_SLICE_NUM 800
1316struct PIC_s {
1317 int index;
1318 int scatter_alloc;
1319 int BUF_index;
1320 int mv_buf_index;
1321 int POC;
1322 int decode_idx;
1323 int slice_type;
1324 int RefNum_L0;
1325 int RefNum_L1;
1326 int num_reorder_pic;
1327 int stream_offset;
1328 unsigned char referenced;
1329 unsigned char output_mark;
1330 unsigned char recon_mark;
1331 unsigned char output_ready;
1332 unsigned char error_mark;
1333 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1334 unsigned char dis_mark;
1335 /**/ int slice_idx;
1336 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1337 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1338 /*buffer */
1339 unsigned int header_adr;
1340#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1341 unsigned char dv_enhance_exist;
1342#endif
1343 char *aux_data_buf;
1344 int aux_data_size;
1345 unsigned long cma_alloc_addr;
1346 struct page *alloc_pages;
1347 unsigned int mpred_mv_wr_start_addr;
1348 unsigned int mc_y_adr;
1349 unsigned int mc_u_v_adr;
1350#ifdef SUPPORT_10BIT
1351 /*unsigned int comp_body_size;*/
1352 unsigned int dw_y_adr;
1353 unsigned int dw_u_v_adr;
1354#endif
1355 int mc_canvas_y;
1356 int mc_canvas_u_v;
1357 int width;
1358 int height;
1359
1360 int y_canvas_index;
1361 int uv_canvas_index;
1362#ifdef MULTI_INSTANCE_SUPPORT
1363 struct canvas_config_s canvas_config[2];
1364#endif
1365#ifdef SUPPORT_10BIT
1366 int mem_saving_mode;
1367 u32 bit_depth_luma;
1368 u32 bit_depth_chroma;
1369#endif
1370#ifdef LOSLESS_COMPRESS_MODE
1371 unsigned int losless_comp_body_size;
1372#endif
1373 unsigned char pic_struct;
1374 int vf_ref;
1375
1376 u32 pts;
1377 u64 pts64;
1378 u64 timestamp;
1379
1380 u32 aspect_ratio_idc;
1381 u32 sar_width;
1382 u32 sar_height;
1383 u32 double_write_mode;
1384 u32 video_signal_type;
1385 unsigned short conformance_window_flag;
1386 unsigned short conf_win_left_offset;
1387 unsigned short conf_win_right_offset;
1388 unsigned short conf_win_top_offset;
1389 unsigned short conf_win_bottom_offset;
1390 unsigned short chroma_format_idc;
1391
1392 /* picture qos infomation*/
1393 int max_qp;
1394 int avg_qp;
1395 int min_qp;
1396 int max_skip;
1397 int avg_skip;
1398 int min_skip;
1399 int max_mv;
1400 int min_mv;
1401 int avg_mv;
1402
1403 bool vframe_bound;
1404} /*PIC_t */;
1405
1406#define MAX_TILE_COL_NUM 10
1407#define MAX_TILE_ROW_NUM 20
1408struct tile_s {
1409 int width;
1410 int height;
1411 int start_cu_x;
1412 int start_cu_y;
1413
1414 unsigned int sao_vb_start_addr;
1415 unsigned int sao_abv_start_addr;
1416};
1417
1418#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1419#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1420#define SEI_HDR10PLUS_MASK 0x00000004
1421
1422#define VF_POOL_SIZE 32
1423
1424#ifdef MULTI_INSTANCE_SUPPORT
1425#define DEC_RESULT_NONE 0
1426#define DEC_RESULT_DONE 1
1427#define DEC_RESULT_AGAIN 2
1428#define DEC_RESULT_CONFIG_PARAM 3
1429#define DEC_RESULT_ERROR 4
1430#define DEC_INIT_PICLIST 5
1431#define DEC_UNINIT_PICLIST 6
1432#define DEC_RESULT_GET_DATA 7
1433#define DEC_RESULT_GET_DATA_RETRY 8
1434#define DEC_RESULT_EOS 9
1435#define DEC_RESULT_FORCE_EXIT 10
1436#define DEC_RESULT_FREE_CANVAS 11
1437
1438static void vh265_work(struct work_struct *work);
1439static void vh265_timeout_work(struct work_struct *work);
1440static void vh265_notify_work(struct work_struct *work);
1441
1442#endif
1443
1444struct debug_log_s {
1445 struct list_head list;
1446 uint8_t data; /*will alloc more size*/
1447};
1448
1449struct hevc_state_s {
1450#ifdef MULTI_INSTANCE_SUPPORT
1451 struct platform_device *platform_dev;
1452 void (*vdec_cb)(struct vdec_s *, void *);
1453 void *vdec_cb_arg;
1454 struct vframe_chunk_s *chunk;
1455 int dec_result;
1456 struct work_struct work;
1457 struct work_struct timeout_work;
1458 struct work_struct notify_work;
1459 struct work_struct set_clk_work;
1460 /* timeout handle */
1461 unsigned long int start_process_time;
1462 unsigned int last_lcu_idx;
1463 unsigned int decode_timeout_count;
1464 unsigned int timeout_num;
1465#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1466 unsigned char switch_dvlayer_flag;
1467 unsigned char no_switch_dvlayer_count;
1468 unsigned char bypass_dvenl_enable;
1469 unsigned char bypass_dvenl;
1470#endif
1471 unsigned char start_parser_type;
1472 /*start_decoding_flag:
1473 vps/pps/sps/idr info from ucode*/
1474 unsigned char start_decoding_flag;
1475 unsigned char rps_set_id;
1476 unsigned char eos;
1477 int pic_decoded_lcu_idx;
1478 u8 over_decode;
1479 u8 empty_flag;
1480#endif
1481 struct vframe_s vframe_dummy;
1482 char *provider_name;
1483 int index;
1484 struct device *cma_dev;
1485 unsigned char m_ins_flag;
1486 unsigned char dolby_enhance_flag;
1487 unsigned long buf_start;
1488 u32 buf_size;
1489 u32 mv_buf_size;
1490
1491 struct BuffInfo_s work_space_buf_store;
1492 struct BuffInfo_s *work_space_buf;
1493
1494 u8 aux_data_dirty;
1495 u32 prefix_aux_size;
1496 u32 suffix_aux_size;
1497 void *aux_addr;
1498 void *rpm_addr;
1499 void *lmem_addr;
1500 dma_addr_t aux_phy_addr;
1501 dma_addr_t rpm_phy_addr;
1502 dma_addr_t lmem_phy_addr;
1503
1504 unsigned int pic_list_init_flag;
1505 unsigned int use_cma_flag;
1506
1507 unsigned short *rpm_ptr;
1508 unsigned short *lmem_ptr;
1509 unsigned short *debug_ptr;
1510 int debug_ptr_size;
1511 int pic_w;
1512 int pic_h;
1513 int lcu_x_num;
1514 int lcu_y_num;
1515 int lcu_total;
1516 int lcu_size;
1517 int lcu_size_log2;
1518 int lcu_x_num_pre;
1519 int lcu_y_num_pre;
1520 int first_pic_after_recover;
1521
1522 int num_tile_col;
1523 int num_tile_row;
1524 int tile_enabled;
1525 int tile_x;
1526 int tile_y;
1527 int tile_y_x;
1528 int tile_start_lcu_x;
1529 int tile_start_lcu_y;
1530 int tile_width_lcu;
1531 int tile_height_lcu;
1532
1533 int slice_type;
1534 unsigned int slice_addr;
1535 unsigned int slice_segment_addr;
1536
1537 unsigned char interlace_flag;
1538 unsigned char curr_pic_struct;
1539 unsigned char frame_field_info_present_flag;
1540
1541 unsigned short sps_num_reorder_pics_0;
1542 unsigned short misc_flag0;
1543 int m_temporalId;
1544 int m_nalUnitType;
1545 int TMVPFlag;
1546 int isNextSliceSegment;
1547 int LDCFlag;
1548 int m_pocRandomAccess;
1549 int plevel;
1550 int MaxNumMergeCand;
1551
1552 int new_pic;
1553 int new_tile;
1554 int curr_POC;
1555 int iPrevPOC;
1556#ifdef MULTI_INSTANCE_SUPPORT
1557 int decoded_poc;
1558 struct PIC_s *decoding_pic;
1559#endif
1560 int iPrevTid0POC;
1561 int list_no;
1562 int RefNum_L0;
1563 int RefNum_L1;
1564 int ColFromL0Flag;
1565 int LongTerm_Curr;
1566 int LongTerm_Col;
1567 int Col_POC;
1568 int LongTerm_Ref;
1569#ifdef MULTI_INSTANCE_SUPPORT
1570 int m_pocRandomAccess_bak;
1571 int curr_POC_bak;
1572 int iPrevPOC_bak;
1573 int iPrevTid0POC_bak;
1574 unsigned char start_parser_type_bak;
1575 unsigned char start_decoding_flag_bak;
1576 unsigned char rps_set_id_bak;
1577 int pic_decoded_lcu_idx_bak;
1578 int decode_idx_bak;
1579#endif
1580 struct PIC_s *cur_pic;
1581 struct PIC_s *col_pic;
1582 int skip_flag;
1583 int decode_idx;
1584 int slice_idx;
1585 unsigned char have_vps;
1586 unsigned char have_sps;
1587 unsigned char have_pps;
1588 unsigned char have_valid_start_slice;
1589 unsigned char wait_buf;
1590 unsigned char error_flag;
1591 unsigned int error_skip_nal_count;
1592 long used_4k_num;
1593
1594 unsigned char
1595 ignore_bufmgr_error; /* bit 0, for decoding;
1596 bit 1, for displaying
1597 bit 1 must be set if bit 0 is 1*/
1598 int PB_skip_mode;
1599 int PB_skip_count_after_decoding;
1600#ifdef SUPPORT_10BIT
1601 int mem_saving_mode;
1602#endif
1603#ifdef LOSLESS_COMPRESS_MODE
1604 unsigned int losless_comp_body_size;
1605#endif
1606 int pts_mode;
1607 int last_lookup_pts;
1608 int last_pts;
1609 u64 last_lookup_pts_us64;
1610 u64 last_pts_us64;
1611 u32 shift_byte_count_lo;
1612 u32 shift_byte_count_hi;
1613 int pts_mode_switching_count;
1614 int pts_mode_recovery_count;
1615
1616 int pic_num;
1617
1618 /**/
1619 union param_u param;
1620
1621 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1622
1623 struct timer_list timer;
1624 struct BUF_s m_BUF[BUF_POOL_SIZE];
1625 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1626 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1627
1628 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1629 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1630 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1631 struct vframe_s vfpool[VF_POOL_SIZE];
1632
1633 u32 stat;
1634 u32 frame_width;
1635 u32 frame_height;
1636 u32 frame_dur;
1637 u32 frame_ar;
1638 u32 bit_depth_luma;
1639 u32 bit_depth_chroma;
1640 u32 video_signal_type;
1641 u32 video_signal_type_debug;
1642 u32 saved_resolution;
1643 bool get_frame_dur;
1644 u32 error_watchdog_count;
1645 u32 error_skip_nal_wt_cnt;
1646 u32 error_system_watchdog_count;
1647
1648#ifdef DEBUG_PTS
1649 unsigned long pts_missed;
1650 unsigned long pts_hit;
1651#endif
1652 struct dec_sysinfo vh265_amstream_dec_info;
1653 unsigned char init_flag;
1654 unsigned char first_sc_checked;
1655 unsigned char uninit_list;
1656 u32 start_decoding_time;
1657
1658 int show_frame_num;
1659#ifdef USE_UNINIT_SEMA
1660 struct semaphore h265_uninit_done_sema;
1661#endif
1662 int fatal_error;
1663
1664
1665 u32 sei_present_flag;
1666 void *frame_mmu_map_addr;
1667 dma_addr_t frame_mmu_map_phy_addr;
1668 unsigned int mmu_mc_buf_start;
1669 unsigned int mmu_mc_buf_end;
1670 unsigned int mmu_mc_start_4k_adr;
1671 void *mmu_box;
1672 void *bmmu_box;
1673 int mmu_enable;
1674
1675 unsigned int dec_status;
1676
1677 /* data for SEI_MASTER_DISPLAY_COLOR */
1678 unsigned int primaries[3][2];
1679 unsigned int white_point[2];
1680 unsigned int luminance[2];
1681 /* data for SEI_CONTENT_LIGHT_LEVEL */
1682 unsigned int content_light_level[2];
1683
1684 struct PIC_s *pre_top_pic;
1685 struct PIC_s *pre_bot_pic;
1686
1687#ifdef MULTI_INSTANCE_SUPPORT
1688 int double_write_mode;
1689 int dynamic_buf_num_margin;
1690 int start_action;
1691 int save_buffer_mode;
1692#endif
1693 u32 i_only;
1694 struct list_head log_list;
1695 u32 ucode_pause_pos;
1696 u32 start_shift_bytes;
1697
1698 u32 vf_pre_count;
1699 u32 vf_get_count;
1700 u32 vf_put_count;
1701#ifdef SWAP_HEVC_UCODE
1702 dma_addr_t mc_dma_handle;
1703 void *mc_cpu_addr;
1704 int swap_size;
1705 ulong swap_addr;
1706#endif
1707#ifdef DETREFILL_ENABLE
1708 dma_addr_t detbuf_adr;
1709 u16 *detbuf_adr_virt;
1710 u8 delrefill_check;
1711#endif
1712 u8 head_error_flag;
1713 int valve_count;
1714 struct firmware_s *fw;
1715 int max_pic_w;
1716 int max_pic_h;
1717#ifdef AGAIN_HAS_THRESHOLD
1718 u8 next_again_flag;
1719 u32 pre_parser_wr_ptr;
1720#endif
1721 u32 ratio_control;
1722 u32 first_pic_flag;
1723 u32 decode_size;
1724 struct mutex chunks_mutex;
1725 int need_cache_size;
1726 u64 sc_start_time;
1727 u32 skip_first_nal;
1728 bool is_swap;
1729 bool is_4k;
1730 int frameinfo_enable;
1731 struct vframe_qos_s vframe_qos;
1732 bool is_used_v4l;
1733 void *v4l2_ctx;
1734 bool v4l_params_parsed;
1735 u32 mem_map_mode;
1736} /*hevc_stru_t */;
1737
1738#ifdef AGAIN_HAS_THRESHOLD
1739u32 again_threshold;
1740#endif
1741#ifdef SEND_LMEM_WITH_RPM
1742#define get_lmem_params(hevc, ladr) \
1743 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1744
1745
1746static int get_frame_mmu_map_size(void)
1747{
1748 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1749 return (MAX_FRAME_8K_NUM * 4);
1750
1751 return (MAX_FRAME_4K_NUM * 4);
1752}
1753
1754static int is_oversize(int w, int h)
1755{
1756 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1757 MAX_SIZE_8K : MAX_SIZE_4K;
1758
1759 if (w < 0 || h < 0)
1760 return true;
1761
1762 if (h != 0 && (w > max / h))
1763 return true;
1764
1765 return false;
1766}
1767
1768void check_head_error(struct hevc_state_s *hevc)
1769{
1770#define pcm_enabled_flag 0x040
1771#define pcm_sample_bit_depth_luma 0x041
1772#define pcm_sample_bit_depth_chroma 0x042
1773 hevc->head_error_flag = 0;
1774 if ((error_handle_policy & 0x40) == 0)
1775 return;
1776 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1777 uint16_t pcm_depth_luma = get_lmem_params(
1778 hevc, pcm_sample_bit_depth_luma);
1779 uint16_t pcm_sample_chroma = get_lmem_params(
1780 hevc, pcm_sample_bit_depth_chroma);
1781 if (pcm_depth_luma >
1782 hevc->bit_depth_luma ||
1783 pcm_sample_chroma >
1784 hevc->bit_depth_chroma) {
1785 hevc_print(hevc, 0,
1786 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1787 pcm_depth_luma,
1788 pcm_sample_chroma,
1789 hevc->bit_depth_luma,
1790 hevc->bit_depth_chroma);
1791 hevc->head_error_flag = 1;
1792 }
1793 }
1794}
1795#endif
1796
1797#ifdef SUPPORT_10BIT
1798/* Losless compression body buffer size 4K per 64x32 (jt) */
1799static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1800 int width, int height, int mem_saving_mode)
1801{
1802 int width_x64;
1803 int height_x32;
1804 int bsize;
1805
1806 width_x64 = width + 63;
1807 width_x64 >>= 6;
1808
1809 height_x32 = height + 31;
1810 height_x32 >>= 5;
1811 if (mem_saving_mode == 1 && hevc->mmu_enable)
1812 bsize = 3200 * width_x64 * height_x32;
1813 else if (mem_saving_mode == 1)
1814 bsize = 3072 * width_x64 * height_x32;
1815 else
1816 bsize = 4096 * width_x64 * height_x32;
1817
1818 return bsize;
1819}
1820
1821/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1822static int compute_losless_comp_header_size(int width, int height)
1823{
1824 int width_x128;
1825 int height_x64;
1826 int hsize;
1827
1828 width_x128 = width + 127;
1829 width_x128 >>= 7;
1830
1831 height_x64 = height + 63;
1832 height_x64 >>= 6;
1833
1834 hsize = 32*width_x128*height_x64;
1835
1836 return hsize;
1837}
1838#endif
1839
1840static int add_log(struct hevc_state_s *hevc,
1841 const char *fmt, ...)
1842{
1843#define HEVC_LOG_BUF 196
1844 struct debug_log_s *log_item;
1845 unsigned char buf[HEVC_LOG_BUF];
1846 int len = 0;
1847 va_list args;
1848 mutex_lock(&vh265_log_mutex);
1849 va_start(args, fmt);
1850 len = sprintf(buf, "<%ld> <%05d> ",
1851 jiffies, hevc->decode_idx);
1852 len += vsnprintf(buf + len,
1853 HEVC_LOG_BUF - len, fmt, args);
1854 va_end(args);
1855 log_item = kmalloc(
1856 sizeof(struct debug_log_s) + len,
1857 GFP_KERNEL);
1858 if (log_item) {
1859 INIT_LIST_HEAD(&log_item->list);
1860 strcpy(&log_item->data, buf);
1861 list_add_tail(&log_item->list,
1862 &hevc->log_list);
1863 }
1864 mutex_unlock(&vh265_log_mutex);
1865 return 0;
1866}
1867
1868static void dump_log(struct hevc_state_s *hevc)
1869{
1870 int i = 0;
1871 struct debug_log_s *log_item, *tmp;
1872 mutex_lock(&vh265_log_mutex);
1873 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1874 hevc_print(hevc, 0,
1875 "[LOG%04d]%s\n",
1876 i++,
1877 &log_item->data);
1878 list_del(&log_item->list);
1879 kfree(log_item);
1880 }
1881 mutex_unlock(&vh265_log_mutex);
1882}
1883
1884static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1885 struct PIC_s *pic)
1886{
1887 if (pic->error_mark
1888 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1889 return 1;
1890 return 0;
1891}
1892
1893static int get_pic_poc(struct hevc_state_s *hevc,
1894 unsigned int idx)
1895{
1896 if (idx != 0xff
1897 && idx < MAX_REF_PIC_NUM
1898 && hevc->m_PIC[idx])
1899 return hevc->m_PIC[idx]->POC;
1900 return INVALID_POC;
1901}
1902
1903#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1904static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1905{
1906 return (hevc->m_ins_flag &&
1907 ((double_write_mode & 0x80000000) == 0)) ?
1908 hevc->double_write_mode :
1909 (double_write_mode & 0x7fffffff);
1910}
1911
1912static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1913{
1914 return (hevc->m_ins_flag &&
1915 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1916 hevc->dynamic_buf_num_margin :
1917 (dynamic_buf_num_margin & 0x7fffffff);
1918}
1919#endif
1920
1921static int get_double_write_mode(struct hevc_state_s *hevc)
1922{
1923 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1924 int w = hevc->pic_w;
1925 int h = hevc->pic_h;
1926 u32 dw = 0x1; /*1:1*/
1927 switch (valid_dw_mode) {
1928 case 0x100:
1929 if (w > 1920 && h > 1088)
1930 dw = 0x4; /*1:2*/
1931 break;
1932 case 0x200:
1933 if (w > 1920 && h > 1088)
1934 dw = 0x2; /*1:4*/
1935 break;
1936 case 0x300:
1937 if (w > 1280 && h > 720)
1938 dw = 0x4; /*1:2*/
1939 break;
1940 default:
1941 dw = valid_dw_mode;
1942 break;
1943 }
1944 return dw;
1945}
1946
1947static int get_double_write_ratio(struct hevc_state_s *hevc,
1948 int dw_mode)
1949{
1950 int ratio = 1;
1951 if ((dw_mode == 2) ||
1952 (dw_mode == 3))
1953 ratio = 4;
1954 else if (dw_mode == 4)
1955 ratio = 2;
1956 return ratio;
1957}
1958#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1959static unsigned char get_idx(struct hevc_state_s *hevc)
1960{
1961 return hevc->index;
1962}
1963#endif
1964
1965#undef pr_info
1966#define pr_info printk
1967static int hevc_print(struct hevc_state_s *hevc,
1968 int flag, const char *fmt, ...)
1969{
1970#define HEVC_PRINT_BUF 256
1971 unsigned char buf[HEVC_PRINT_BUF];
1972 int len = 0;
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 if (hevc == NULL ||
1975 (flag == 0) ||
1976 ((debug_mask &
1977 (1 << hevc->index))
1978 && (debug & flag))) {
1979#endif
1980 va_list args;
1981
1982 va_start(args, fmt);
1983 if (hevc)
1984 len = sprintf(buf, "[%d]", hevc->index);
1985 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1986 pr_debug("%s", buf);
1987 va_end(args);
1988#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1989 }
1990#endif
1991 return 0;
1992}
1993
1994static int hevc_print_cont(struct hevc_state_s *hevc,
1995 int flag, const char *fmt, ...)
1996{
1997 unsigned char buf[HEVC_PRINT_BUF];
1998 int len = 0;
1999#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2000 if (hevc == NULL ||
2001 (flag == 0) ||
2002 ((debug_mask &
2003 (1 << hevc->index))
2004 && (debug & flag))) {
2005#endif
2006 va_list args;
2007
2008 va_start(args, fmt);
2009 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2010 pr_info("%s", buf);
2011 va_end(args);
2012#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2013 }
2014#endif
2015 return 0;
2016}
2017
2018static void put_mv_buf(struct hevc_state_s *hevc,
2019 struct PIC_s *pic);
2020
2021static void update_vf_memhandle(struct hevc_state_s *hevc,
2022 struct vframe_s *vf, struct PIC_s *pic);
2023
2024static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2025
2026static void release_aux_data(struct hevc_state_s *hevc,
2027 struct PIC_s *pic);
2028static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2029
2030#ifdef MULTI_INSTANCE_SUPPORT
2031static void backup_decode_state(struct hevc_state_s *hevc)
2032{
2033 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2034 hevc->curr_POC_bak = hevc->curr_POC;
2035 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2036 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2037 hevc->start_parser_type_bak = hevc->start_parser_type;
2038 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2039 hevc->rps_set_id_bak = hevc->rps_set_id;
2040 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2041 hevc->decode_idx_bak = hevc->decode_idx;
2042
2043}
2044
2045static void restore_decode_state(struct hevc_state_s *hevc)
2046{
2047 struct vdec_s *vdec = hw_to_vdec(hevc);
2048 if (!vdec_has_more_input(vdec)) {
2049 hevc->pic_decoded_lcu_idx =
2050 READ_VREG(HEVC_PARSER_LCU_START)
2051 & 0xffffff;
2052 return;
2053 }
2054 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2055 "%s: discard pic index 0x%x\n",
2056 __func__, hevc->decoding_pic ?
2057 hevc->decoding_pic->index : 0xff);
2058 if (hevc->decoding_pic) {
2059 hevc->decoding_pic->error_mark = 0;
2060 hevc->decoding_pic->output_ready = 0;
2061 hevc->decoding_pic->output_mark = 0;
2062 hevc->decoding_pic->referenced = 0;
2063 hevc->decoding_pic->POC = INVALID_POC;
2064 put_mv_buf(hevc, hevc->decoding_pic);
2065 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2066 release_aux_data(hevc, hevc->decoding_pic);
2067 hevc->decoding_pic = NULL;
2068 }
2069 hevc->decode_idx = hevc->decode_idx_bak;
2070 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2071 hevc->curr_POC = hevc->curr_POC_bak;
2072 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2073 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2074 hevc->start_parser_type = hevc->start_parser_type_bak;
2075 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2076 hevc->rps_set_id = hevc->rps_set_id_bak;
2077 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2078
2079 if (hevc->pic_list_init_flag == 1)
2080 hevc->pic_list_init_flag = 0;
2081 /*if (hevc->decode_idx == 0)
2082 hevc->start_decoding_flag = 0;*/
2083
2084 hevc->slice_idx = 0;
2085 hevc->used_4k_num = -1;
2086}
2087#endif
2088
2089static void hevc_init_stru(struct hevc_state_s *hevc,
2090 struct BuffInfo_s *buf_spec_i)
2091{
2092 //int i;
2093 INIT_LIST_HEAD(&hevc->log_list);
2094 hevc->work_space_buf = buf_spec_i;
2095 hevc->prefix_aux_size = 0;
2096 hevc->suffix_aux_size = 0;
2097 hevc->aux_addr = NULL;
2098 hevc->rpm_addr = NULL;
2099 hevc->lmem_addr = NULL;
2100
2101 hevc->curr_POC = INVALID_POC;
2102
2103 hevc->pic_list_init_flag = 0;
2104 hevc->use_cma_flag = 0;
2105 hevc->decode_idx = 0;
2106 hevc->slice_idx = 0;
2107 hevc->new_pic = 0;
2108 hevc->new_tile = 0;
2109 hevc->iPrevPOC = 0;
2110 hevc->list_no = 0;
2111 /* int m_uiMaxCUWidth = 1<<7; */
2112 /* int m_uiMaxCUHeight = 1<<7; */
2113 hevc->m_pocRandomAccess = MAX_INT;
2114 hevc->tile_enabled = 0;
2115 hevc->tile_x = 0;
2116 hevc->tile_y = 0;
2117 hevc->iPrevTid0POC = 0;
2118 hevc->slice_addr = 0;
2119 hevc->slice_segment_addr = 0;
2120 hevc->skip_flag = 0;
2121 hevc->misc_flag0 = 0;
2122
2123 hevc->cur_pic = NULL;
2124 hevc->col_pic = NULL;
2125 hevc->wait_buf = 0;
2126 hevc->error_flag = 0;
2127 hevc->head_error_flag = 0;
2128 hevc->error_skip_nal_count = 0;
2129 hevc->have_vps = 0;
2130 hevc->have_sps = 0;
2131 hevc->have_pps = 0;
2132 hevc->have_valid_start_slice = 0;
2133
2134 hevc->pts_mode = PTS_NORMAL;
2135 hevc->last_pts = 0;
2136 hevc->last_lookup_pts = 0;
2137 hevc->last_pts_us64 = 0;
2138 hevc->last_lookup_pts_us64 = 0;
2139 hevc->pts_mode_switching_count = 0;
2140 hevc->pts_mode_recovery_count = 0;
2141
2142 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2143 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2144 if (hevc->PB_skip_mode == 0)
2145 hevc->ignore_bufmgr_error = 0x1;
2146 else
2147 hevc->ignore_bufmgr_error = 0x0;
2148
2149 /*for (i = 0; i < MAX_REF_PIC_NUM; i++)
2150 hevc->m_PIC[i] = NULL;*/
2151
2152 hevc->pic_num = 0;
2153 hevc->lcu_x_num_pre = 0;
2154 hevc->lcu_y_num_pre = 0;
2155 hevc->first_pic_after_recover = 0;
2156
2157 hevc->pre_top_pic = NULL;
2158 hevc->pre_bot_pic = NULL;
2159
2160 hevc->sei_present_flag = 0;
2161 hevc->valve_count = 0;
2162 hevc->first_pic_flag = 0;
2163#ifdef MULTI_INSTANCE_SUPPORT
2164 hevc->decoded_poc = INVALID_POC;
2165 hevc->start_process_time = 0;
2166 hevc->last_lcu_idx = 0;
2167 hevc->decode_timeout_count = 0;
2168 hevc->timeout_num = 0;
2169 hevc->eos = 0;
2170 hevc->pic_decoded_lcu_idx = -1;
2171 hevc->over_decode = 0;
2172 hevc->used_4k_num = -1;
2173 hevc->start_decoding_flag = 0;
2174 hevc->rps_set_id = 0;
2175 backup_decode_state(hevc);
2176#endif
2177#ifdef DETREFILL_ENABLE
2178 hevc->detbuf_adr = 0;
2179 hevc->detbuf_adr_virt = NULL;
2180#endif
2181}
2182
2183static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2184static int H265_alloc_mmu(struct hevc_state_s *hevc,
2185 struct PIC_s *new_pic, unsigned short bit_depth,
2186 unsigned int *mmu_index_adr);
2187
2188#ifdef DETREFILL_ENABLE
2189#define DETREFILL_BUF_SIZE (4 * 0x4000)
2190#define HEVC_SAO_DBG_MODE0 0x361e
2191#define HEVC_SAO_DBG_MODE1 0x361f
2192#define HEVC_SAO_CTRL10 0x362e
2193#define HEVC_SAO_CTRL11 0x362f
2194static int init_detrefill_buf(struct hevc_state_s *hevc)
2195{
2196 if (hevc->detbuf_adr_virt)
2197 return 0;
2198
2199 hevc->detbuf_adr_virt =
2200 (void *)dma_alloc_coherent(amports_get_dma_device(),
2201 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2202 GFP_KERNEL);
2203
2204 if (hevc->detbuf_adr_virt == NULL) {
2205 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2206 return -1;
2207 }
2208 return 0;
2209}
2210
2211static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2212{
2213 if (hevc->detbuf_adr_virt) {
2214 dma_free_coherent(amports_get_dma_device(),
2215 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2216 hevc->detbuf_adr);
2217
2218 hevc->detbuf_adr_virt = NULL;
2219 hevc->detbuf_adr = 0;
2220 }
2221}
2222
2223/*
2224 * convert uncompressed frame buffer data from/to ddr
2225 */
2226static void convUnc8x4blk(uint16_t* blk8x4Luma,
2227 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2228{
2229 if (direction == 0) {
2230 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2231 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2232 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2233 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2234 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2235 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2236 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2237 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2238 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2239 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2240 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2241 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2242 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2243 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2244 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2245 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2246 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2247 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2248
2249 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2250 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2251 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2252 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2253 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2254 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2255 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2256 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2257 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2258 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2259 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2260 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2261 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2262 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2263 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2264 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2265 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2266 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2267
2268 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2269 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2270 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2271 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2272 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2273 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2274 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2275 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2276 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2277 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2278 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2279 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2280 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2281 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2282 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2283 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2284 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2285 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2286
2287 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2288 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2289 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2290 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2291 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2292 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2293 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2294 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2295 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2296 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2297 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2298 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2299 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2300 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2301 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2302 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2303 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2304 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2305 } else {
2306 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2307 blk8x4Luma[3 + 0 * 8];
2308 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2309 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2310 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2311 (blk8x4Luma[3 + 3 * 8] >> 2);
2312 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2313 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2314 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2315 (blk8x4Luma[7 + 2 * 8] >>4);
2316 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2317 blk8x4Cb[0 + 0 * 4];
2318 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2319 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2320 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2321
2322 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2323 blk8x4Luma[0 + 0 * 8];
2324 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2325 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2326 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2327 (blk8x4Luma[0 + 1 * 8] >> 2);
2328 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2329 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2330 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2331 (blk8x4Luma[0 + 2 * 8] >>4);
2332 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2333 blk8x4Luma[2 + 2 * 8];
2334 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2335 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2336 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2337
2338 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2339 blk8x4Luma[4 + 0 * 8];
2340 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2341 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2342 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2343 (blk8x4Luma[4 + 1 * 8] >> 2);
2344 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2345 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2346 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2347 (blk8x4Luma[4 + 2 * 8] >>4);
2348 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2349 blk8x4Luma[6 + 2 * 8];
2350 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2351 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2352 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2353
2354 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2355 blk8x4Cb[1 + 0 * 4];
2356 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2357 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2358 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2359 (blk8x4Cr[2 + 0 * 4] >> 2);
2360 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2361 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2362 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2363 (blk8x4Cb[1 + 1 * 4] >>4);
2364 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2365 blk8x4Cb[2 + 1 * 4];
2366 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2367 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2368 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2369 }
2370}
2371
2372static void corrRefillWithAmrisc (
2373 struct hevc_state_s *hevc,
2374 uint32_t cmHeaderBaseAddr,
2375 uint32_t picWidth,
2376 uint32_t ctuPosition)
2377{
2378 int32_t i;
2379 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2380 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2381 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2382
2383 uint16_t cmBodyBuf[32 * 18];
2384
2385 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2386 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2387 uint32_t stride64x64 = pic_width_x64 * 128;
2388 uint32_t addr_offset64x64_abv = stride64x64 *
2389 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2390 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2391 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2392 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2393 unsigned int tmpData32;
2394
2395 uint16_t blkBuf0Y[32];
2396 uint16_t blkBuf0Cb[8];
2397 uint16_t blkBuf0Cr[8];
2398 uint16_t blkBuf1Y[32];
2399 uint16_t blkBuf1Cb[8];
2400 uint16_t blkBuf1Cr[8];
2401 int32_t blkBufCnt = 0;
2402
2403 int32_t blkIdx;
2404
2405 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2406 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2407 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2408 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2409
2410 for (i = 0; i < 32 * 18; i++)
2411 cmBodyBuf[i] = 0;
2412
2413 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2414 "%s, %d\n", __func__, __LINE__);
2415 do {
2416 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2417 } while (tmpData32);
2418 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2419 "%s, %d\n", __func__, __LINE__);
2420
2421 hevc_print(hevc, H265_DEBUG_DETAIL,
2422 "cmBodyBuf from detbuf:\n");
2423 for (i = 0; i < 32 * 18; i++) {
2424 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2425 if (get_dbg_flag(hevc) &
2426 H265_DEBUG_DETAIL) {
2427 if ((i & 0xf) == 0)
2428 hevc_print_cont(hevc, 0, "\n");
2429 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2430 }
2431 }
2432 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2433
2434 for (i = 0; i < 32; i++)
2435 blkBuf0Y[i] = 0;
2436 for (i = 0; i < 8; i++)
2437 blkBuf0Cb[i] = 0;
2438 for (i = 0; i < 8; i++)
2439 blkBuf0Cr[i] = 0;
2440 for (i = 0; i < 32; i++)
2441 blkBuf1Y[i] = 0;
2442 for (i = 0; i < 8; i++)
2443 blkBuf1Cb[i] = 0;
2444 for (i = 0; i < 8; i++)
2445 blkBuf1Cr[i] = 0;
2446
2447 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2448 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2449 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2450 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2451 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2452 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2453 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2454
2455 if (!aboveCtuAvailable && inAboveCtu)
2456 continue;
2457
2458 /* detRefillBuf --> 8x4block*/
2459 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2460
2461 if (restoreEnable) {
2462 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2463 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2464 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2465 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2466 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2467 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2468 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2469 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2470 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2471 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2472 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2473 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2474 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2475 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2476 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2477 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2478 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2479 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2480 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2481 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2482 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2483 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2484 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2485 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2486 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2487 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2488 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2489 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2490 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2491 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2492 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2493 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2494
2495 /*Store data back to DDR*/
2496 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2497 }
2498
2499 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2500 }
2501
2502 hevc_print(hevc, H265_DEBUG_DETAIL,
2503 "cmBodyBuf to detbuf:\n");
2504 for (i = 0; i < 32 * 18; i++) {
2505 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2506 if (get_dbg_flag(hevc) &
2507 H265_DEBUG_DETAIL) {
2508 if ((i & 0xf) == 0)
2509 hevc_print_cont(hevc, 0, "\n");
2510 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2511 }
2512 }
2513 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2514
2515 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2516 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2517 "%s, %d\n", __func__, __LINE__);
2518 do {
2519 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2520 } while (tmpData32);
2521 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2522 "%s, %d\n", __func__, __LINE__);
2523}
2524
2525static void delrefill(struct hevc_state_s *hevc)
2526{
2527 /*
2528 * corrRefill
2529 */
2530 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2531 [31:30]error number
2532 [29:20]error2([9:7]tilex[6:0]ctuy)
2533 [19:10]error1 [9:0]error0*/
2534 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2535 uint32_t errorIdx;
2536 uint32_t errorNum = (detResult>>30);
2537
2538 if (detResult) {
2539 hevc_print(hevc, H265_DEBUG_BUFMGR,
2540 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2541 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2542 uint32_t errorPos = errorIdx * 10;
2543 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2544 uint32_t tilex = (errorResult >> 7) - 1;
2545 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2546 + hevc->m_tile[0][tilex].width - 1;
2547 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2548 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2549 hevc_print(hevc, H265_DEBUG_BUFMGR,
2550 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2551 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2552 corrRefillWithAmrisc(
2553 hevc,
2554 (uint32_t)hevc->cur_pic->header_adr,
2555 hevc->pic_w,
2556 ctuPosition);
2557 }
2558
2559 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2560 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2561 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2562 }
2563}
2564#endif
2565
2566static void get_rpm_param(union param_u *params)
2567{
2568 int i;
2569 unsigned int data32;
2570
2571 for (i = 0; i < 128; i++) {
2572 do {
2573 data32 = READ_VREG(RPM_CMD_REG);
2574 /* hevc_print(hevc, 0, "%x\n", data32); */
2575 } while ((data32 & 0x10000) == 0);
2576 params->l.data[i] = data32 & 0xffff;
2577 /* hevc_print(hevc, 0, "%x\n", data32); */
2578 WRITE_VREG(RPM_CMD_REG, 0);
2579 }
2580}
2581
2582static int get_free_buf_idx(struct hevc_state_s *hevc)
2583{
2584 int index = INVALID_IDX;
2585 struct PIC_s *pic;
2586 int i;
2587
2588 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2589 pic = hevc->m_PIC[i];
2590 if (pic == NULL ||
2591 pic->index == -1 ||
2592 pic->BUF_index == -1)
2593 continue;
2594
2595 if (pic->output_mark == 0 &&
2596 pic->referenced == 0 &&
2597 pic->output_ready == 0) {
2598 pic->output_ready = 1;
2599 index = i;
2600 break;
2601 }
2602 }
2603
2604 return index;
2605}
2606
2607static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2608{
2609 int i;
2610 struct PIC_s *pic;
2611 struct PIC_s *ret_pic = NULL;
2612 if (POC == INVALID_POC)
2613 return NULL;
2614 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2615 pic = hevc->m_PIC[i];
2616 if (pic == NULL || pic->index == -1 ||
2617 pic->BUF_index == -1)
2618 continue;
2619 if (pic->POC == POC) {
2620 if (ret_pic == NULL)
2621 ret_pic = pic;
2622 else {
2623 if (pic->decode_idx > ret_pic->decode_idx)
2624 ret_pic = pic;
2625 }
2626 }
2627 }
2628 return ret_pic;
2629}
2630
2631static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2632{
2633 int i;
2634 struct PIC_s *pic;
2635 struct PIC_s *ret_pic = NULL;
2636
2637 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2638 pic = hevc->m_PIC[i];
2639 if (pic == NULL || pic->index == -1 ||
2640 pic->BUF_index == -1)
2641 continue;
2642 if ((pic->POC == POC) && (pic->referenced)) {
2643 if (ret_pic == NULL)
2644 ret_pic = pic;
2645 else {
2646 if (pic->decode_idx > ret_pic->decode_idx)
2647 ret_pic = pic;
2648 }
2649 }
2650 }
2651
2652 if (ret_pic == NULL) {
2653 if (get_dbg_flag(hevc)) {
2654 hevc_print(hevc, 0,
2655 "Wrong, POC of %d is not in referenced list\n",
2656 POC);
2657 }
2658 ret_pic = get_pic_by_POC(hevc, POC);
2659 }
2660 return ret_pic;
2661}
2662
2663static unsigned int log2i(unsigned int val)
2664{
2665 unsigned int ret = -1;
2666
2667 while (val != 0) {
2668 val >>= 1;
2669 ret++;
2670 }
2671 return ret;
2672}
2673
2674static int init_buf_spec(struct hevc_state_s *hevc);
2675
2676static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2677{
2678 int i;
2679
2680 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2681 struct PIC_s *pic = hevc->m_PIC[i];
2682
2683 if (pic && pic->vframe_bound)
2684 return true;
2685 }
2686
2687 return false;
2688}
2689
2690static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2691{
2692 int i;
2693
2694 /* release workspace */
2695 if (hevc->bmmu_box)
2696 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2697 BMMU_WORKSPACE_ID);
2698 /*
2699 * it's only when vframe get back to driver, right now we can be sure
2700 * that vframe and fd are related. if the playback exits, the capture
2701 * requires the upper app to release when the fd is closed, and others
2702 * buffers drivers are released by driver.
2703 */
2704 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2705 struct PIC_s *pic = hevc->m_PIC[i];
2706
2707 if (pic && !pic->vframe_bound) {
2708 if (hevc->bmmu_box)
2709 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2710 VF_BUFFER_IDX(i));
2711 if (hevc->mmu_box)
2712 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2713
2714 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2715 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2716 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2717 }
2718 }
2719}
2720
2721static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2722{
2723 if (hevc->is_used_v4l &&
2724 v4l_is_there_vframe_bound(hevc)) {
2725 if (get_double_write_mode(hevc) != 0x10) {
2726 v4l_mmu_buffer_release(hevc);
2727 return;
2728 }
2729 }
2730
2731 if (hevc->mmu_box)
2732 decoder_mmu_box_free(hevc->mmu_box);
2733 hevc->mmu_box = NULL;
2734
2735 if (hevc->bmmu_box)
2736 decoder_bmmu_box_free(hevc->bmmu_box);
2737 hevc->bmmu_box = NULL;
2738}
2739static int init_mmu_buffers(struct hevc_state_s *hevc)
2740{
2741 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2742 CODEC_MM_FLAGS_TVP : 0;
2743 int buf_size = 64;
2744
2745 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2746 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2747 buf_size = 24;
2748 }
2749
2750 if (get_dbg_flag(hevc)) {
2751 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2752 __func__, hevc->max_pic_w, hevc->max_pic_h);
2753 }
2754
2755 hevc->need_cache_size = buf_size * SZ_1M;
2756 hevc->sc_start_time = get_jiffies_64();
2757 if (hevc->mmu_enable
2758 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2759 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2760 hevc->index,
2761 MAX_REF_PIC_NUM,
2762 buf_size * SZ_1M,
2763 tvp_flag
2764 );
2765 if (!hevc->mmu_box) {
2766 pr_err("h265 alloc mmu box failed!!\n");
2767 return -1;
2768 }
2769 }
2770
2771 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2772 hevc->index,
2773 BMMU_MAX_BUFFERS,
2774 4 + PAGE_SHIFT,
2775 CODEC_MM_FLAGS_CMA_CLEAR |
2776 CODEC_MM_FLAGS_FOR_VDECODER |
2777 tvp_flag);
2778 if (!hevc->bmmu_box) {
2779 if (hevc->mmu_box)
2780 decoder_mmu_box_free(hevc->mmu_box);
2781 hevc->mmu_box = NULL;
2782 pr_err("h265 alloc mmu box failed!!\n");
2783 return -1;
2784 }
2785 return 0;
2786}
2787
2788struct buf_stru_s
2789{
2790 int lcu_total;
2791 int mc_buffer_size_h;
2792 int mc_buffer_size_u_v_h;
2793};
2794
2795#ifndef MV_USE_FIXED_BUF
2796static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2797{
2798 int i;
2799 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2800 if (hevc->m_mv_BUF[i].start_adr) {
2801 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2802 hevc_print(hevc, 0,
2803 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2804 i, hevc->m_mv_BUF[i].start_adr,
2805 hevc->m_mv_BUF[i].size,
2806 hevc->m_mv_BUF[i].used_flag);
2807 decoder_bmmu_box_free_idx(
2808 hevc->bmmu_box,
2809 MV_BUFFER_IDX(i));
2810 hevc->m_mv_BUF[i].start_adr = 0;
2811 hevc->m_mv_BUF[i].size = 0;
2812 hevc->m_mv_BUF[i].used_flag = 0;
2813 }
2814 }
2815 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2816 if (hevc->m_PIC[i] != NULL)
2817 hevc->m_PIC[i]->mv_buf_index = -1;
2818 }
2819}
2820
2821static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2822{
2823 int ret = 0;
2824 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2825 if (decoder_bmmu_box_alloc_buf_phy
2826 (hevc->bmmu_box,
2827 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2828 DRIVER_NAME,
2829 &hevc->m_mv_BUF[i].start_adr) < 0) {
2830 hevc->m_mv_BUF[i].start_adr = 0;
2831 ret = -1;
2832 } else {
2833 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2834 hevc->m_mv_BUF[i].used_flag = 0;
2835 ret = 0;
2836 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2837 hevc_print(hevc, 0,
2838 "MV Buffer %d: start_adr %p size %x\n",
2839 i,
2840 (void *)hevc->m_mv_BUF[i].start_adr,
2841 hevc->m_mv_BUF[i].size);
2842 }
2843 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2844 void *mem_start_virt;
2845 mem_start_virt =
2846 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2847 if (mem_start_virt) {
2848 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2849 codec_mm_dma_flush(mem_start_virt,
2850 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2851 } else {
2852 mem_start_virt = codec_mm_vmap(
2853 hevc->m_mv_BUF[i].start_adr,
2854 hevc->m_mv_BUF[i].size);
2855 if (mem_start_virt) {
2856 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2857 codec_mm_dma_flush(mem_start_virt,
2858 hevc->m_mv_BUF[i].size,
2859 DMA_TO_DEVICE);
2860 codec_mm_unmap_phyaddr(mem_start_virt);
2861 } else {
2862 /*not virt for tvp playing,
2863 may need clear on ucode.*/
2864 pr_err("ref %s mem_start_virt failed\n", __func__);
2865 }
2866 }
2867 }
2868 }
2869 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2870 return ret;
2871}
2872#endif
2873
2874static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2875{
2876#ifdef MV_USE_FIXED_BUF
2877 if (pic && pic->index >= 0) {
2878 if (IS_8K_SIZE(pic->width, pic->height)) {
2879 pic->mpred_mv_wr_start_addr =
2880 hevc->work_space_buf->mpred_mv.buf_start
2881 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2882 } else {
2883 pic->mpred_mv_wr_start_addr =
2884 hevc->work_space_buf->mpred_mv.buf_start
2885 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2886 }
2887 }
2888 return 0;
2889#else
2890 int i;
2891 int ret = -1;
2892 int new_size;
2893 if (IS_8K_SIZE(pic->width, pic->height))
2894 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2895 else if (IS_4K_SIZE(pic->width, pic->height))
2896 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2897 else
2898 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2899 if (new_size != hevc->mv_buf_size) {
2900 dealloc_mv_bufs(hevc);
2901 hevc->mv_buf_size = new_size;
2902 }
2903 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2904 if (hevc->m_mv_BUF[i].start_adr &&
2905 hevc->m_mv_BUF[i].used_flag == 0) {
2906 hevc->m_mv_BUF[i].used_flag = 1;
2907 ret = i;
2908 break;
2909 }
2910 }
2911 if (ret < 0) {
2912 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2913 if (hevc->m_mv_BUF[i].start_adr == 0) {
2914 if (alloc_mv_buf(hevc, i) >= 0) {
2915 hevc->m_mv_BUF[i].used_flag = 1;
2916 ret = i;
2917 }
2918 break;
2919 }
2920 }
2921 }
2922
2923 if (ret >= 0) {
2924 pic->mv_buf_index = ret;
2925 pic->mpred_mv_wr_start_addr =
2926 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2927 (~0xffff);
2928 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2929 "%s => %d (0x%x) size 0x%x\n",
2930 __func__, ret,
2931 pic->mpred_mv_wr_start_addr,
2932 hevc->m_mv_BUF[ret].size);
2933
2934 } else {
2935 hevc_print(hevc, 0,
2936 "%s: Error, mv buf is not enough\n",
2937 __func__);
2938 }
2939 return ret;
2940
2941#endif
2942}
2943
2944static void put_mv_buf(struct hevc_state_s *hevc,
2945 struct PIC_s *pic)
2946{
2947#ifndef MV_USE_FIXED_BUF
2948 int i = pic->mv_buf_index;
2949 if (i < 0 || i >= MAX_REF_PIC_NUM) {
2950 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2951 "%s: index %d beyond range\n",
2952 __func__, i);
2953 return;
2954 }
2955 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2956 "%s(%d): used_flag(%d)\n",
2957 __func__, i,
2958 hevc->m_mv_BUF[i].used_flag);
2959
2960 if (hevc->m_mv_BUF[i].start_adr &&
2961 hevc->m_mv_BUF[i].used_flag)
2962 hevc->m_mv_BUF[i].used_flag = 0;
2963 pic->mv_buf_index = -1;
2964#endif
2965}
2966
2967static int cal_current_buf_size(struct hevc_state_s *hevc,
2968 struct buf_stru_s *buf_stru)
2969{
2970
2971 int buf_size;
2972 int pic_width = hevc->pic_w;
2973 int pic_height = hevc->pic_h;
2974 int lcu_size = hevc->lcu_size;
2975 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
2976 1 : pic_width / lcu_size;
2977 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
2978 1 : pic_height / lcu_size;
2979 /*SUPPORT_10BIT*/
2980 int losless_comp_header_size = compute_losless_comp_header_size
2981 (pic_width, pic_height);
2982 /*always alloc buf for 10bit*/
2983 int losless_comp_body_size = compute_losless_comp_body_size
2984 (hevc, pic_width, pic_height, 0);
2985 int mc_buffer_size = losless_comp_header_size
2986 + losless_comp_body_size;
2987 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
2988 int mc_buffer_size_u_v_h = 0;
2989
2990 int dw_mode = get_double_write_mode(hevc);
2991
2992 if (hevc->mmu_enable) {
2993 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
2994 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
2995 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
2996 << 16;
2997 else
2998 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
2999 << 16;
3000 } else
3001 buf_size = 0;
3002
3003 if (dw_mode) {
3004 int pic_width_dw = pic_width /
3005 get_double_write_ratio(hevc, dw_mode);
3006 int pic_height_dw = pic_height /
3007 get_double_write_ratio(hevc, dw_mode);
3008
3009 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3010 pic_width_dw / lcu_size + 1 :
3011 pic_width_dw / lcu_size;
3012 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3013 pic_height_dw / lcu_size + 1 :
3014 pic_height_dw / lcu_size;
3015 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3016
3017 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3018 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3019 /*64k alignment*/
3020 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3021 }
3022
3023 if ((!hevc->mmu_enable) &&
3024 ((dw_mode & 0x10) == 0)) {
3025 /* use compress mode without mmu,
3026 need buf for compress decoding*/
3027 buf_size += (mc_buffer_size_h << 16);
3028 }
3029
3030 /*in case start adr is not 64k alignment*/
3031 if (buf_size > 0)
3032 buf_size += 0x10000;
3033
3034 if (buf_stru) {
3035 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3036 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3037 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3038 }
3039
3040 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3041 pic_width, pic_height, losless_comp_header_size,
3042 losless_comp_body_size, mc_buffer_size_h,
3043 mc_buffer_size_u_v_h, buf_size);
3044
3045 return buf_size;
3046}
3047
3048static int v4l_alloc_buf(struct hevc_state_s *hevc)
3049{
3050 int i;
3051 int ret = -1;
3052 struct vdec_v4l2_buffer *fb = NULL;
3053
3054 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3055 return ret;
3056
3057 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3058 if (ret < 0) {
3059 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3060 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3061 return ret;
3062 }
3063
3064 for (i = 0; i < BUF_POOL_SIZE; i++)
3065 if (hevc->m_BUF[i].start_adr == 0)
3066 break;
3067
3068 if (hevc->mmu_enable) {
3069 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3070 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3071 hevc->m_BUF[i].header_size =
3072 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3073 else
3074 hevc->m_BUF[i].header_size =
3075 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3076
3077 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3078 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3079 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3080 if (ret < 0) {
3081 hevc_print(hevc, PRINT_FLAG_ERROR,
3082 "%s[%d], header size: %d, no mem fatal err\n",
3083 __func__, i, hevc->m_BUF[i].header_size);
3084 return ret;
3085 }
3086 }
3087
3088 hevc->m_BUF[i].used_flag = 0;
3089 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3090 if (fb->num_planes == 1) {
3091 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3092 hevc->m_BUF[i].size = fb->m.mem[0].size;
3093 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3094 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3095 } else if (fb->num_planes == 2) {
3096 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3097 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3098 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3099 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3100 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3101 }
3102
3103 return ret;
3104}
3105
3106static int alloc_buf(struct hevc_state_s *hevc)
3107{
3108 int i;
3109 int ret = -1;
3110 int buf_size = cal_current_buf_size(hevc, NULL);
3111
3112 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3113 return ret;
3114
3115 for (i = 0; i < BUF_POOL_SIZE; i++) {
3116 if (hevc->m_BUF[i].start_adr == 0)
3117 break;
3118 }
3119 if (i < BUF_POOL_SIZE) {
3120 if (buf_size > 0) {
3121 ret = decoder_bmmu_box_alloc_buf_phy
3122 (hevc->bmmu_box,
3123 VF_BUFFER_IDX(i), buf_size,
3124 DRIVER_NAME,
3125 &hevc->m_BUF[i].start_adr);
3126 if (ret < 0) {
3127 hevc->m_BUF[i].start_adr = 0;
3128 if (i <= 8) {
3129 hevc->fatal_error |=
3130 DECODER_FATAL_ERROR_NO_MEM;
3131 hevc_print(hevc, PRINT_FLAG_ERROR,
3132 "%s[%d], size: %d, no mem fatal err\n",
3133 __func__, i, buf_size);
3134 }
3135 }
3136
3137 if (ret >= 0) {
3138 hevc->m_BUF[i].size = buf_size;
3139 hevc->m_BUF[i].used_flag = 0;
3140 ret = 0;
3141
3142 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3143 hevc_print(hevc, 0,
3144 "Buffer %d: start_adr %p size %x\n",
3145 i,
3146 (void *)hevc->m_BUF[i].start_adr,
3147 hevc->m_BUF[i].size);
3148 }
3149 /*flush the buffer make sure no cache dirty*/
3150 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3151 void *mem_start_virt;
3152 mem_start_virt =
3153 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3154 if (mem_start_virt) {
3155 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3156 codec_mm_dma_flush(mem_start_virt,
3157 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3158 } else {
3159 mem_start_virt = codec_mm_vmap(
3160 hevc->m_BUF[i].start_adr,
3161 hevc->m_BUF[i].size);
3162 if (mem_start_virt) {
3163 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3164 codec_mm_dma_flush(mem_start_virt,
3165 hevc->m_BUF[i].size,
3166 DMA_TO_DEVICE);
3167 codec_mm_unmap_phyaddr(mem_start_virt);
3168 } else {
3169 /*not virt for tvp playing,
3170 may need clear on ucode.*/
3171 pr_err("ref %s mem_start_virt failed\n", __func__);
3172 }
3173 }
3174 }
3175 }
3176 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3177 } else
3178 ret = 0;
3179 }
3180
3181 if (ret >= 0) {
3182 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3183 hevc_print(hevc, 0,
3184 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3185 i, hevc->pic_w, hevc->pic_h,
3186 buf_size,
3187 hevc->m_BUF[i].start_adr);
3188 }
3189 } else {
3190 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3191 hevc_print(hevc, 0,
3192 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3193 i, hevc->pic_w, hevc->pic_h,
3194 buf_size);
3195 }
3196 }
3197 return ret;
3198}
3199
3200static void set_buf_unused(struct hevc_state_s *hevc, int i)
3201{
3202 if (i >= 0 && i < BUF_POOL_SIZE)
3203 hevc->m_BUF[i].used_flag = 0;
3204}
3205
3206static void dealloc_unused_buf(struct hevc_state_s *hevc)
3207{
3208 int i;
3209 for (i = 0; i < BUF_POOL_SIZE; i++) {
3210 if (hevc->m_BUF[i].start_adr &&
3211 hevc->m_BUF[i].used_flag == 0) {
3212 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3213 hevc_print(hevc, 0,
3214 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3215 i, hevc->m_BUF[i].start_adr,
3216 hevc->m_BUF[i].size);
3217 }
3218 if (!hevc->is_used_v4l)
3219 decoder_bmmu_box_free_idx(
3220 hevc->bmmu_box,
3221 VF_BUFFER_IDX(i));
3222 hevc->m_BUF[i].start_adr = 0;
3223 hevc->m_BUF[i].size = 0;
3224 }
3225 }
3226}
3227
3228static void dealloc_pic_buf(struct hevc_state_s *hevc,
3229 struct PIC_s *pic)
3230{
3231 int i = pic->BUF_index;
3232 pic->BUF_index = -1;
3233 if (i >= 0 &&
3234 i < BUF_POOL_SIZE &&
3235 hevc->m_BUF[i].start_adr) {
3236 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3237 hevc_print(hevc, 0,
3238 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3239 i, hevc->m_BUF[i].start_adr,
3240 hevc->m_BUF[i].size);
3241 }
3242
3243 if (!hevc->is_used_v4l)
3244 decoder_bmmu_box_free_idx(
3245 hevc->bmmu_box,
3246 VF_BUFFER_IDX(i));
3247 hevc->m_BUF[i].used_flag = 0;
3248 hevc->m_BUF[i].start_adr = 0;
3249 hevc->m_BUF[i].size = 0;
3250 }
3251}
3252
3253static int get_work_pic_num(struct hevc_state_s *hevc)
3254{
3255 int used_buf_num = 0;
3256 int sps_pic_buf_diff = 0;
3257
3258 if (get_dynamic_buf_num_margin(hevc) > 0) {
3259 if ((!hevc->sps_num_reorder_pics_0) &&
3260 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3261 /* the range of sps_num_reorder_pics_0 is in
3262 [0, sps_max_dec_pic_buffering_minus1_0] */
3263 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3264 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3265 } else
3266 used_buf_num = hevc->sps_num_reorder_pics_0
3267 + get_dynamic_buf_num_margin(hevc);
3268
3269 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3270 - hevc->sps_num_reorder_pics_0;
3271#ifdef MULTI_INSTANCE_SUPPORT
3272 /*
3273 need one more for multi instance, as
3274 apply_ref_pic_set() has no chanch to run to
3275 to clear referenced flag in some case
3276 */
3277 if (hevc->m_ins_flag)
3278 used_buf_num++;
3279#endif
3280 } else
3281 used_buf_num = max_buf_num;
3282
3283 if (hevc->save_buffer_mode)
3284 hevc_print(hevc, 0,
3285 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3286 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3287
3288 if (sps_pic_buf_diff >= 4)
3289 {
3290 used_buf_num += 1;
3291 }
3292
3293 if (used_buf_num > MAX_BUF_NUM)
3294 used_buf_num = MAX_BUF_NUM;
3295 return used_buf_num;
3296}
3297
3298static int get_alloc_pic_count(struct hevc_state_s *hevc)
3299{
3300 int alloc_pic_count = 0;
3301 int i;
3302 struct PIC_s *pic;
3303 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3304 pic = hevc->m_PIC[i];
3305 if (pic && pic->index >= 0)
3306 alloc_pic_count++;
3307 }
3308 return alloc_pic_count;
3309}
3310
3311static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3312{
3313 int i;
3314 int dw_mode = get_double_write_mode(hevc);
3315
3316 for (i = 0; i < BUF_POOL_SIZE; i++) {
3317 if (hevc->m_BUF[i].start_adr != 0 &&
3318 hevc->m_BUF[i].used_flag == 0) {
3319 hevc->m_BUF[i].used_flag = 1;
3320 break;
3321 }
3322 }
3323
3324 if (i >= BUF_POOL_SIZE)
3325 return -1;
3326
3327 if (hevc->mmu_enable)
3328 pic->header_adr = hevc->m_BUF[i].header_addr;
3329
3330 pic->BUF_index = i;
3331 pic->POC = INVALID_POC;
3332 pic->mc_canvas_y = pic->index;
3333 pic->mc_canvas_u_v = pic->index;
3334
3335 if (dw_mode & 0x10) {
3336 pic->mc_y_adr = hevc->m_BUF[i].start_adr;
3337 pic->mc_u_v_adr = pic->mc_y_adr + hevc->m_BUF[i].luma_size;
3338 pic->mc_canvas_y = (pic->index << 1);
3339 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3340
3341 pic->dw_y_adr = pic->mc_y_adr;
3342 pic->dw_u_v_adr = pic->mc_u_v_adr;
3343 } else if (dw_mode) {
3344 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3345 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3346 }
3347
3348 return 0;
3349}
3350
3351static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3352{
3353 int ret = -1;
3354 int i;
3355 /*int lcu_size_log2 = hevc->lcu_size_log2;
3356 int MV_MEM_UNIT=lcu_size_log2==
3357 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3358 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3359 5 ? 0x80 : 0x20;
3360 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3361 hevc->work_space_buf->mpred_mv.buf_size;*/
3362 unsigned int y_adr = 0;
3363 struct buf_stru_s buf_stru;
3364 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3365 int dw_mode = get_double_write_mode(hevc);
3366
3367 for (i = 0; i < BUF_POOL_SIZE; i++) {
3368 if (hevc->m_BUF[i].start_adr != 0 &&
3369 hevc->m_BUF[i].used_flag == 0 &&
3370 buf_size <= hevc->m_BUF[i].size) {
3371 hevc->m_BUF[i].used_flag = 1;
3372 break;
3373 }
3374 }
3375
3376 if (i >= BUF_POOL_SIZE)
3377 return -1;
3378
3379 if (hevc->mmu_enable) {
3380 pic->header_adr = hevc->m_BUF[i].start_adr;
3381 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3382 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3383 y_adr = hevc->m_BUF[i].start_adr +
3384 MMU_COMPRESS_8K_HEADER_SIZE;
3385 else
3386 y_adr = hevc->m_BUF[i].start_adr +
3387 MMU_COMPRESS_HEADER_SIZE;
3388 } else
3389 y_adr = hevc->m_BUF[i].start_adr;
3390
3391 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3392
3393 pic->POC = INVALID_POC;
3394 /*ensure get_pic_by_POC()
3395 not get the buffer not decoded*/
3396 pic->BUF_index = i;
3397
3398 if ((!hevc->mmu_enable) &&
3399 ((dw_mode & 0x10) == 0)
3400 ) {
3401 pic->mc_y_adr = y_adr;
3402 y_adr += (buf_stru.mc_buffer_size_h << 16);
3403 }
3404 pic->mc_canvas_y = pic->index;
3405 pic->mc_canvas_u_v = pic->index;
3406 if (dw_mode & 0x10) {
3407 pic->mc_y_adr = y_adr;
3408 pic->mc_u_v_adr = y_adr +
3409 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3410 pic->mc_canvas_y = (pic->index << 1);
3411 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3412
3413 pic->dw_y_adr = pic->mc_y_adr;
3414 pic->dw_u_v_adr = pic->mc_u_v_adr;
3415 } else if (dw_mode) {
3416 pic->dw_y_adr = y_adr;
3417 pic->dw_u_v_adr = pic->dw_y_adr +
3418 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3419 }
3420
3421 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3422 hevc_print(hevc, 0,
3423 "%s index %d BUF_index %d mc_y_adr %x\n",
3424 __func__, pic->index,
3425 pic->BUF_index, pic->mc_y_adr);
3426 if (hevc->mmu_enable &&
3427 dw_mode)
3428 hevc_print(hevc, 0,
3429 "mmu double write adr %ld\n",
3430 pic->cma_alloc_addr);
3431 }
3432 ret = 0;
3433
3434 return ret;
3435}
3436
3437static void init_pic_list(struct hevc_state_s *hevc)
3438{
3439 int i;
3440 int init_buf_num = get_work_pic_num(hevc);
3441 int dw_mode = get_double_write_mode(hevc);
3442 struct vdec_s *vdec = hw_to_vdec(hevc);
3443 /*alloc decoder buf will be delay if work on v4l. */
3444 if (!hevc->is_used_v4l) {
3445 for (i = 0; i < init_buf_num; i++) {
3446 if (alloc_buf(hevc) < 0) {
3447 if (i <= 8) {
3448 /*if alloced (i+1)>=9
3449 don't send errors.*/
3450 hevc->fatal_error |=
3451 DECODER_FATAL_ERROR_NO_MEM;
3452 }
3453 break;
3454 }
3455 }
3456 }
3457
3458 for (i = 0; i < init_buf_num; i++) {
3459 struct PIC_s *pic = hevc->m_PIC[i];
3460
3461 if (!pic) {
3462 pic = vmalloc(sizeof(struct PIC_s));
3463 if (pic == NULL) {
3464 hevc_print(hevc, 0,
3465 "%s: alloc pic %d fail!!!\n",
3466 __func__, i);
3467 break;
3468 }
3469 hevc->m_PIC[i] = pic;
3470 }
3471 memset(pic, 0, sizeof(struct PIC_s));
3472
3473 pic->index = i;
3474 pic->BUF_index = -1;
3475 pic->mv_buf_index = -1;
3476 if (vdec->parallel_dec == 1) {
3477 pic->y_canvas_index = -1;
3478 pic->uv_canvas_index = -1;
3479 }
3480
3481 pic->width = hevc->pic_w;
3482 pic->height = hevc->pic_h;
3483 pic->double_write_mode = dw_mode;
3484
3485 /*config canvas will be delay if work on v4l. */
3486 if (!hevc->is_used_v4l) {
3487 if (config_pic(hevc, pic) < 0) {
3488 if (get_dbg_flag(hevc))
3489 hevc_print(hevc, 0,
3490 "Config_pic %d fail\n", pic->index);
3491 pic->index = -1;
3492 i++;
3493 break;
3494 }
3495
3496 if (pic->double_write_mode)
3497 set_canvas(hevc, pic);
3498 }
3499 }
3500
3501 for (; i < MAX_REF_PIC_NUM; i++) {
3502 struct PIC_s *pic = hevc->m_PIC[i];
3503
3504 if (!pic) {
3505 pic = vmalloc(sizeof(struct PIC_s));
3506 if (pic == NULL) {
3507 hevc_print(hevc, 0,
3508 "%s: alloc pic %d fail!!!\n",
3509 __func__, i);
3510 break;
3511 }
3512 hevc->m_PIC[i] = pic;
3513 }
3514 memset(pic, 0, sizeof(struct PIC_s));
3515
3516 pic->index = -1;
3517 pic->BUF_index = -1;
3518 if (vdec->parallel_dec == 1) {
3519 pic->y_canvas_index = -1;
3520 pic->uv_canvas_index = -1;
3521 }
3522 }
3523
3524}
3525
3526static void uninit_pic_list(struct hevc_state_s *hevc)
3527{
3528 struct vdec_s *vdec = hw_to_vdec(hevc);
3529 int i;
3530#ifndef MV_USE_FIXED_BUF
3531 dealloc_mv_bufs(hevc);
3532#endif
3533 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3534 struct PIC_s *pic = hevc->m_PIC[i];
3535
3536 if (pic) {
3537 if (vdec->parallel_dec == 1) {
3538 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3539 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3540 }
3541 release_aux_data(hevc, pic);
3542 vfree(pic);
3543 hevc->m_PIC[i] = NULL;
3544 }
3545 }
3546}
3547
3548#ifdef LOSLESS_COMPRESS_MODE
3549static void init_decode_head_hw(struct hevc_state_s *hevc)
3550{
3551
3552 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3553 unsigned int data32;
3554
3555 int losless_comp_header_size =
3556 compute_losless_comp_header_size(hevc->pic_w,
3557 hevc->pic_h);
3558 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3559 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3560
3561 hevc->losless_comp_body_size = losless_comp_body_size;
3562
3563
3564 if (hevc->mmu_enable) {
3565 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3566 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3567 } else {
3568 if (hevc->mem_saving_mode == 1)
3569 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3570 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3571 else
3572 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3573 ((workaround_enable & 2) ? 1 : 0));
3574 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3575 /*
3576 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3577 * //8-bit mode
3578 */
3579 }
3580 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3581 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3582 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3583
3584 if (hevc->mmu_enable) {
3585 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3586 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3587 buf_spec->mmu_vbh.buf_start +
3588 buf_spec->mmu_vbh.buf_size/2);
3589 data32 = READ_VREG(HEVC_SAO_CTRL9);
3590 data32 |= 0x1;
3591 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3592
3593 /* use HEVC_CM_HEADER_START_ADDR */
3594 data32 = READ_VREG(HEVC_SAO_CTRL5);
3595 data32 |= (1<<10);
3596 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3597 }
3598
3599 if (!hevc->m_ins_flag)
3600 hevc_print(hevc, 0,
3601 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3602 __func__, hevc->pic_w, hevc->pic_h,
3603 losless_comp_body_size, losless_comp_header_size);
3604
3605}
3606#endif
3607#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3608
3609static void init_pic_list_hw(struct hevc_state_s *hevc)
3610{
3611 int i;
3612 int cur_pic_num = MAX_REF_PIC_NUM;
3613 int dw_mode = get_double_write_mode(hevc);
3614 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3615 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3616 (0x1 << 1) | (0x1 << 2));
3617 else
3618 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3619
3620 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3621 if (hevc->m_PIC[i] == NULL ||
3622 hevc->m_PIC[i]->index == -1) {
3623 cur_pic_num = i;
3624 break;
3625 }
3626 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3627 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3628 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3629 hevc->m_PIC[i]->header_adr>>5);
3630 else
3631 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3632 hevc->m_PIC[i]->mc_y_adr >> 5);
3633 } else
3634 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3635 hevc->m_PIC[i]->mc_y_adr |
3636 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3637 if (dw_mode & 0x10) {
3638 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3639 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3640 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3641 }
3642 else
3643 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3644 hevc->m_PIC[i]->mc_u_v_adr |
3645 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3646 | 0x1);
3647 }
3648 }
3649 if (cur_pic_num == 0)
3650 return;
3651 for (; i < MAX_REF_PIC_NUM; i++) {
3652 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3653 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3654 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3655 hevc->m_PIC[cur_pic_num-1]->header_adr>>5);
3656 else
3657 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3658 hevc->m_PIC[cur_pic_num-1]->mc_y_adr >> 5);
3659#ifndef LOSLESS_COMPRESS_MODE
3660 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3661 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr >> 5);
3662#endif
3663 } else {
3664 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3665 hevc->m_PIC[cur_pic_num-1]->mc_y_adr|
3666 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_y<<8)
3667 | 0x1);
3668#ifndef LOSLESS_COMPRESS_MODE
3669 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3670 hevc->m_PIC[cur_pic_num-1]->mc_u_v_adr|
3671 (hevc->m_PIC[cur_pic_num-1]->mc_canvas_u_v<<8)
3672 | 0x1);
3673#endif
3674 }
3675 }
3676
3677 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3678
3679 /* Zero out canvas registers in IPP -- avoid simulation X */
3680 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3681 (0 << 8) | (0 << 1) | 1);
3682 for (i = 0; i < 32; i++)
3683 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3684
3685#ifdef LOSLESS_COMPRESS_MODE
3686 if ((dw_mode & 0x10) == 0)
3687 init_decode_head_hw(hevc);
3688#endif
3689
3690}
3691
3692
3693static void dump_pic_list(struct hevc_state_s *hevc)
3694{
3695 int i;
3696 struct PIC_s *pic;
3697
3698 hevc_print(hevc, 0,
3699 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3700 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3701 pic = hevc->m_PIC[i];
3702 if (pic == NULL || pic->index == -1)
3703 continue;
3704 hevc_print_cont(hevc, 0,
3705 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3706 pic->index, pic->BUF_index,
3707#ifndef MV_USE_FIXED_BUF
3708 pic->mv_buf_index,
3709#else
3710 -1,
3711#endif
3712 pic->decode_idx, pic->POC, pic->referenced);
3713 hevc_print_cont(hevc, 0,
3714 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3715 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3716 pic->width, pic->height);
3717 hevc_print_cont(hevc, 0,
3718 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3719 pic->output_ready, pic->mpred_mv_wr_start_addr,
3720 pic->vf_ref);
3721 }
3722}
3723
3724static void clear_referenced_flag(struct hevc_state_s *hevc)
3725{
3726 int i;
3727 struct PIC_s *pic;
3728 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3729 pic = hevc->m_PIC[i];
3730 if (pic == NULL || pic->index == -1)
3731 continue;
3732 if (pic->referenced) {
3733 pic->referenced = 0;
3734 put_mv_buf(hevc, pic);
3735 }
3736 }
3737}
3738
3739static void clear_poc_flag(struct hevc_state_s *hevc)
3740{
3741 int i;
3742 struct PIC_s *pic;
3743 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3744 pic = hevc->m_PIC[i];
3745 if (pic == NULL || pic->index == -1)
3746 continue;
3747 pic->POC = INVALID_POC;
3748 }
3749}
3750
3751static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3752 unsigned char flush_flag)
3753{
3754 int num_pic_not_yet_display = 0;
3755 int i;
3756 struct PIC_s *pic;
3757 struct PIC_s *pic_display = NULL;
3758 struct vdec_s *vdec = hw_to_vdec(hevc);
3759
3760 if (hevc->i_only & 0x4) {
3761 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3762 pic = hevc->m_PIC[i];
3763 if (pic == NULL ||
3764 (pic->index == -1) ||
3765 (pic->BUF_index == -1) ||
3766 (pic->POC == INVALID_POC))
3767 continue;
3768 if (pic->output_mark) {
3769 if (pic_display) {
3770 if (pic->decode_idx <
3771 pic_display->decode_idx)
3772 pic_display = pic;
3773
3774 } else
3775 pic_display = pic;
3776
3777 }
3778 }
3779 if (pic_display) {
3780 pic_display->output_mark = 0;
3781 pic_display->recon_mark = 0;
3782 pic_display->output_ready = 1;
3783 pic_display->referenced = 0;
3784 put_mv_buf(hevc, pic_display);
3785 }
3786 } else {
3787 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3788 pic = hevc->m_PIC[i];
3789 if (pic == NULL ||
3790 (pic->index == -1) ||
3791 (pic->BUF_index == -1) ||
3792 (pic->POC == INVALID_POC))
3793 continue;
3794 if (pic->output_mark)
3795 num_pic_not_yet_display++;
3796 if (pic->slice_type == 2 &&
3797 hevc->vf_pre_count == 0 &&
3798 fast_output_enable & 0x1) {
3799 /*fast output for first I picture*/
3800 pic->num_reorder_pic = 0;
3801 if (vdec->master || vdec->slave)
3802 pic_display = pic;
3803 hevc_print(hevc, 0, "VH265: output first frame\n");
3804 }
3805 }
3806
3807 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3808 pic = hevc->m_PIC[i];
3809 if (pic == NULL ||
3810 (pic->index == -1) ||
3811 (pic->BUF_index == -1) ||
3812 (pic->POC == INVALID_POC))
3813 continue;
3814 if (pic->output_mark) {
3815 if (pic_display) {
3816 if (pic->POC < pic_display->POC)
3817 pic_display = pic;
3818 else if ((pic->POC == pic_display->POC)
3819 && (pic->decode_idx <
3820 pic_display->
3821 decode_idx))
3822 pic_display
3823 = pic;
3824 } else
3825 pic_display = pic;
3826 }
3827 }
3828 if (pic_display) {
3829 if ((num_pic_not_yet_display >
3830 pic_display->num_reorder_pic)
3831 || flush_flag) {
3832 pic_display->output_mark = 0;
3833 pic_display->recon_mark = 0;
3834 pic_display->output_ready = 1;
3835 } else if (num_pic_not_yet_display >=
3836 (MAX_REF_PIC_NUM - 1)) {
3837 pic_display->output_mark = 0;
3838 pic_display->recon_mark = 0;
3839 pic_display->output_ready = 1;
3840 hevc_print(hevc, 0,
3841 "Warning, num_reorder_pic %d is byeond buf num\n",
3842 pic_display->num_reorder_pic);
3843 } else
3844 pic_display = NULL;
3845 }
3846 }
3847
3848 if (pic_display && (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3849 pic_display = NULL;
3850 hevc->first_pic_flag = 0;
3851 }
3852 return pic_display;
3853}
3854
3855static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3856{
3857 int i;
3858 struct PIC_s *pic;
3859
3860 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3861 hevc_print(hevc, 0,
3862 "config_mc_buffer entered .....\n");
3863 if (cur_pic->slice_type != 2) { /* P and B pic */
3864 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3865 (0 << 8) | (0 << 1) | 1);
3866 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3867 pic =
3868 get_ref_pic_by_POC(hevc,
3869 cur_pic->
3870 m_aiRefPOCList0[cur_pic->
3871 slice_idx][i]);
3872 if (pic) {
3873 if ((pic->width != hevc->pic_w) ||
3874 (pic->height != hevc->pic_h)) {
3875 hevc_print(hevc, 0,
3876 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3877 __func__, pic->POC,
3878 pic->width, pic->height);
3879 cur_pic->error_mark = 1;
3880 }
3881 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3882 cur_pic->error_mark = 1;
3883 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3884 (pic->mc_canvas_u_v << 16)
3885 | (pic->mc_canvas_u_v
3886 << 8) |
3887 pic->mc_canvas_y);
3888 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3889 hevc_print_cont(hevc, 0,
3890 "refid %x mc_canvas_u_v %x",
3891 i, pic->mc_canvas_u_v);
3892 hevc_print_cont(hevc, 0,
3893 " mc_canvas_y %x\n",
3894 pic->mc_canvas_y);
3895 }
3896 } else
3897 cur_pic->error_mark = 1;
3898
3899 if (pic == NULL || pic->error_mark) {
3900 hevc_print(hevc, 0,
3901 "Error %s, %dth poc (%d) %s",
3902 __func__, i,
3903 cur_pic->m_aiRefPOCList0[cur_pic->
3904 slice_idx][i],
3905 pic ? "has error" :
3906 "not in list0");
3907 }
3908 }
3909 }
3910 if (cur_pic->slice_type == 0) { /* B pic */
3911 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3912 hevc_print(hevc, 0,
3913 "config_mc_buffer RefNum_L1\n");
3914 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3915 (16 << 8) | (0 << 1) | 1);
3916
3917 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3918 pic =
3919 get_ref_pic_by_POC(hevc,
3920 cur_pic->
3921 m_aiRefPOCList1[cur_pic->
3922 slice_idx][i]);
3923 if (pic) {
3924 if ((pic->width != hevc->pic_w) ||
3925 (pic->height != hevc->pic_h)) {
3926 hevc_print(hevc, 0,
3927 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3928 __func__, pic->POC,
3929 pic->width, pic->height);
3930 cur_pic->error_mark = 1;
3931 }
3932
3933 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3934 cur_pic->error_mark = 1;
3935 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3936 (pic->mc_canvas_u_v << 16)
3937 | (pic->mc_canvas_u_v
3938 << 8) |
3939 pic->mc_canvas_y);
3940 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3941 hevc_print_cont(hevc, 0,
3942 "refid %x mc_canvas_u_v %x",
3943 i, pic->mc_canvas_u_v);
3944 hevc_print_cont(hevc, 0,
3945 " mc_canvas_y %x\n",
3946 pic->mc_canvas_y);
3947 }
3948 } else
3949 cur_pic->error_mark = 1;
3950
3951 if (pic == NULL || pic->error_mark) {
3952 hevc_print(hevc, 0,
3953 "Error %s, %dth poc (%d) %s",
3954 __func__, i,
3955 cur_pic->m_aiRefPOCList1[cur_pic->
3956 slice_idx][i],
3957 pic ? "has error" :
3958 "not in list1");
3959 }
3960 }
3961 }
3962 return 0;
3963}
3964
3965static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
3966 union param_u *params)
3967{
3968 int ii, i;
3969 int poc_tmp;
3970 struct PIC_s *pic;
3971 unsigned char is_referenced;
3972 /* hevc_print(hevc, 0,
3973 "%s cur_poc %d\n", __func__, cur_poc); */
3974 if (pic_list_debug & 0x2) {
3975 pr_err("cur poc %d\n", cur_poc);
3976 }
3977 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
3978 pic = hevc->m_PIC[ii];
3979 if (pic == NULL ||
3980 pic->index == -1 ||
3981 pic->BUF_index == -1
3982 )
3983 continue;
3984
3985 if ((pic->referenced == 0 || pic->POC == cur_poc))
3986 continue;
3987 is_referenced = 0;
3988 for (i = 0; i < 16; i++) {
3989 int delt;
3990
3991 if (params->p.CUR_RPS[i] & 0x8000)
3992 break;
3993 delt =
3994 params->p.CUR_RPS[i] &
3995 ((1 << (RPS_USED_BIT - 1)) - 1);
3996 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
3997 poc_tmp =
3998 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
3999 delt);
4000 } else
4001 poc_tmp = cur_poc + delt;
4002 if (poc_tmp == pic->POC) {
4003 is_referenced = 1;
4004 /* hevc_print(hevc, 0, "i is %d\n", i); */
4005 break;
4006 }
4007 }
4008 if (is_referenced == 0) {
4009 pic->referenced = 0;
4010 put_mv_buf(hevc, pic);
4011 /* hevc_print(hevc, 0,
4012 "set poc %d reference to 0\n", pic->POC); */
4013 if (pic_list_debug & 0x2) {
4014 pr_err("set poc %d reference to 0\n", pic->POC);
4015 }
4016 }
4017 }
4018
4019}
4020
4021static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4022{
4023 struct PIC_s *pic = hevc->cur_pic;
4024 int i, rIdx;
4025 int num_neg = 0;
4026 int num_pos = 0;
4027 int total_num;
4028 int num_ref_idx_l0_active =
4029 (params->p.num_ref_idx_l0_active >
4030 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4031 params->p.num_ref_idx_l0_active;
4032 int num_ref_idx_l1_active =
4033 (params->p.num_ref_idx_l1_active >
4034 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4035 params->p.num_ref_idx_l1_active;
4036
4037 int RefPicSetStCurr0[16];
4038 int RefPicSetStCurr1[16];
4039
4040 for (i = 0; i < 16; i++) {
4041 RefPicSetStCurr0[i] = 0;
4042 RefPicSetStCurr1[i] = 0;
4043 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4044 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4045 }
4046 for (i = 0; i < 16; i++) {
4047 if (params->p.CUR_RPS[i] & 0x8000)
4048 break;
4049 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4050 int delt =
4051 params->p.CUR_RPS[i] &
4052 ((1 << (RPS_USED_BIT - 1)) - 1);
4053
4054 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4055 RefPicSetStCurr0[num_neg] =
4056 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4057 delt);
4058 /* hevc_print(hevc, 0,
4059 * "RefPicSetStCurr0 %x %x %x\n",
4060 * RefPicSetStCurr0[num_neg], pic->POC,
4061 * (0x800-(params[i]&0x7ff)));
4062 */
4063 num_neg++;
4064 } else {
4065 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4066 /* hevc_print(hevc, 0,
4067 * "RefPicSetStCurr1 %d\n",
4068 * RefPicSetStCurr1[num_pos]);
4069 */
4070 num_pos++;
4071 }
4072 }
4073 }
4074 total_num = num_neg + num_pos;
4075 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4076 hevc_print(hevc, 0,
4077 "%s: curpoc %d slice_type %d, total %d ",
4078 __func__, pic->POC, params->p.slice_type, total_num);
4079 hevc_print_cont(hevc, 0,
4080 "num_neg %d num_list0 %d num_list1 %d\n",
4081 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4082 }
4083
4084 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4085 hevc_print(hevc, 0,
4086 "HEVC Stream buf start ");
4087 hevc_print_cont(hevc, 0,
4088 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4089 READ_VREG(HEVC_STREAM_START_ADDR),
4090 READ_VREG(HEVC_STREAM_END_ADDR),
4091 READ_VREG(HEVC_STREAM_WR_PTR),
4092 READ_VREG(HEVC_STREAM_RD_PTR),
4093 READ_VREG(HEVC_STREAM_LEVEL),
4094 READ_VREG(HEVC_STREAM_FIFO_CTL),
4095 READ_VREG(HEVC_PARSER_INT_CONTROL));
4096 }
4097
4098 if (total_num > 0) {
4099 if (params->p.modification_flag & 0x1) {
4100 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4101 hevc_print(hevc, 0, "ref0 POC (modification):");
4102 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4103 int cIdx = params->p.modification_list[rIdx];
4104
4105 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4106 cIdx >=
4107 num_neg ? RefPicSetStCurr1[cIdx -
4108 num_neg] :
4109 RefPicSetStCurr0[cIdx];
4110 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4111 hevc_print_cont(hevc, 0, "%d ",
4112 pic->m_aiRefPOCList0[pic->
4113 slice_idx]
4114 [rIdx]);
4115 }
4116 }
4117 } else {
4118 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4119 hevc_print(hevc, 0, "ref0 POC:");
4120 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4121 int cIdx = rIdx % total_num;
4122
4123 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4124 cIdx >=
4125 num_neg ? RefPicSetStCurr1[cIdx -
4126 num_neg] :
4127 RefPicSetStCurr0[cIdx];
4128 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4129 hevc_print_cont(hevc, 0, "%d ",
4130 pic->m_aiRefPOCList0[pic->
4131 slice_idx]
4132 [rIdx]);
4133 }
4134 }
4135 }
4136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4137 hevc_print_cont(hevc, 0, "\n");
4138 if (params->p.slice_type == B_SLICE) {
4139 if (params->p.modification_flag & 0x2) {
4140 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4141 hevc_print(hevc, 0,
4142 "ref1 POC (modification):");
4143 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4144 rIdx++) {
4145 int cIdx;
4146
4147 if (params->p.modification_flag & 0x1) {
4148 cIdx =
4149 params->p.
4150 modification_list
4151 [num_ref_idx_l0_active +
4152 rIdx];
4153 } else {
4154 cIdx =
4155 params->p.
4156 modification_list[rIdx];
4157 }
4158 pic->m_aiRefPOCList1[pic->
4159 slice_idx][rIdx] =
4160 cIdx >=
4161 num_pos ?
4162 RefPicSetStCurr0[cIdx - num_pos]
4163 : RefPicSetStCurr1[cIdx];
4164 if (get_dbg_flag(hevc) &
4165 H265_DEBUG_BUFMGR) {
4166 hevc_print_cont(hevc, 0, "%d ",
4167 pic->
4168 m_aiRefPOCList1[pic->
4169 slice_idx]
4170 [rIdx]);
4171 }
4172 }
4173 } else {
4174 if (get_dbg_flag(hevc) &
4175 H265_DEBUG_BUFMGR)
4176 hevc_print(hevc, 0, "ref1 POC:");
4177 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4178 rIdx++) {
4179 int cIdx = rIdx % total_num;
4180
4181 pic->m_aiRefPOCList1[pic->
4182 slice_idx][rIdx] =
4183 cIdx >=
4184 num_pos ?
4185 RefPicSetStCurr0[cIdx -
4186 num_pos]
4187 : RefPicSetStCurr1[cIdx];
4188 if (get_dbg_flag(hevc) &
4189 H265_DEBUG_BUFMGR) {
4190 hevc_print_cont(hevc, 0, "%d ",
4191 pic->
4192 m_aiRefPOCList1[pic->
4193 slice_idx]
4194 [rIdx]);
4195 }
4196 }
4197 }
4198 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4199 hevc_print_cont(hevc, 0, "\n");
4200 }
4201 }
4202 /*set m_PIC */
4203 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4204 (params->p.slice_type == P_SLICE) ? 1 :
4205 (params->p.slice_type == B_SLICE) ? 0 : 3;
4206 pic->RefNum_L0 = num_ref_idx_l0_active;
4207 pic->RefNum_L1 = num_ref_idx_l1_active;
4208}
4209
4210static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4211 int pic_height_cu, int sao_mem_unit,
4212 union param_u *params)
4213{
4214 int i, j;
4215 int start_cu_x, start_cu_y;
4216 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4217 int sao_abv_size = sao_mem_unit * pic_width_cu;
4218#ifdef DETREFILL_ENABLE
4219 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4220 int tmpRefillLcuSize = 1 <<
4221 (params->p.log2_min_coding_block_size_minus3 +
4222 3 + params->p.log2_diff_max_min_coding_block_size);
4223 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4224 "%x, %x, %x, %x\n",
4225 params->p.slice_segment_address,
4226 params->p.bit_depth,
4227 params->p.tiles_enabled_flag,
4228 tmpRefillLcuSize);
4229 if (params->p.slice_segment_address == 0 &&
4230 params->p.bit_depth != 0 &&
4231 (params->p.tiles_enabled_flag & 1) &&
4232 tmpRefillLcuSize == 64)
4233 hevc->delrefill_check = 1;
4234 else
4235 hevc->delrefill_check = 0;
4236 }
4237#endif
4238
4239 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4240 if (params->p.tiles_enabled_flag & 1) {
4241 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4242 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4243
4244 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4245 || hevc->num_tile_row <= 0) {
4246 hevc->num_tile_row = 1;
4247 hevc_print(hevc, 0,
4248 "%s: num_tile_rows_minus1 (%d) error!!\n",
4249 __func__, params->p.num_tile_rows_minus1);
4250 }
4251 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4252 || hevc->num_tile_col <= 0) {
4253 hevc->num_tile_col = 1;
4254 hevc_print(hevc, 0,
4255 "%s: num_tile_columns_minus1 (%d) error!!\n",
4256 __func__, params->p.num_tile_columns_minus1);
4257 }
4258 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4259 hevc_print(hevc, 0,
4260 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4261 __func__, pic_width_cu, pic_height_cu);
4262 hevc_print_cont(hevc, 0,
4263 "num_tile_col %d num_tile_row %d:\n",
4264 hevc->num_tile_col, hevc->num_tile_row);
4265 }
4266
4267 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4268 int w = pic_width_cu / hevc->num_tile_col;
4269 int h = pic_height_cu / hevc->num_tile_row;
4270
4271 start_cu_y = 0;
4272 for (i = 0; i < hevc->num_tile_row; i++) {
4273 start_cu_x = 0;
4274 for (j = 0; j < hevc->num_tile_col; j++) {
4275 if (j == (hevc->num_tile_col - 1)) {
4276 hevc->m_tile[i][j].width =
4277 pic_width_cu -
4278 start_cu_x;
4279 } else
4280 hevc->m_tile[i][j].width = w;
4281 if (i == (hevc->num_tile_row - 1)) {
4282 hevc->m_tile[i][j].height =
4283 pic_height_cu -
4284 start_cu_y;
4285 } else
4286 hevc->m_tile[i][j].height = h;
4287 hevc->m_tile[i][j].start_cu_x
4288 = start_cu_x;
4289 hevc->m_tile[i][j].start_cu_y
4290 = start_cu_y;
4291 hevc->m_tile[i][j].sao_vb_start_addr =
4292 hevc->work_space_buf->sao_vb.
4293 buf_start + j * sao_vb_size;
4294 hevc->m_tile[i][j].sao_abv_start_addr =
4295 hevc->work_space_buf->sao_abv.
4296 buf_start + i * sao_abv_size;
4297 if (get_dbg_flag(hevc) &
4298 H265_DEBUG_BUFMGR) {
4299 hevc_print_cont(hevc, 0,
4300 "{y=%d, x=%d w %d h %d ",
4301 i, j, hevc->m_tile[i][j].width,
4302 hevc->m_tile[i][j].height);
4303 hevc_print_cont(hevc, 0,
4304 "start_x %d start_y %d ",
4305 hevc->m_tile[i][j].start_cu_x,
4306 hevc->m_tile[i][j].start_cu_y);
4307 hevc_print_cont(hevc, 0,
4308 "sao_vb_start 0x%x ",
4309 hevc->m_tile[i][j].
4310 sao_vb_start_addr);
4311 hevc_print_cont(hevc, 0,
4312 "sao_abv_start 0x%x}\n",
4313 hevc->m_tile[i][j].
4314 sao_abv_start_addr);
4315 }
4316 start_cu_x += hevc->m_tile[i][j].width;
4317
4318 }
4319 start_cu_y += hevc->m_tile[i][0].height;
4320 }
4321 } else {
4322 start_cu_y = 0;
4323 for (i = 0; i < hevc->num_tile_row; i++) {
4324 start_cu_x = 0;
4325 for (j = 0; j < hevc->num_tile_col; j++) {
4326 if (j == (hevc->num_tile_col - 1)) {
4327 hevc->m_tile[i][j].width =
4328 pic_width_cu -
4329 start_cu_x;
4330 } else {
4331 hevc->m_tile[i][j].width =
4332 params->p.tile_width[j];
4333 }
4334 if (i == (hevc->num_tile_row - 1)) {
4335 hevc->m_tile[i][j].height =
4336 pic_height_cu -
4337 start_cu_y;
4338 } else {
4339 hevc->m_tile[i][j].height =
4340 params->
4341 p.tile_height[i];
4342 }
4343 hevc->m_tile[i][j].start_cu_x
4344 = start_cu_x;
4345 hevc->m_tile[i][j].start_cu_y
4346 = start_cu_y;
4347 hevc->m_tile[i][j].sao_vb_start_addr =
4348 hevc->work_space_buf->sao_vb.
4349 buf_start + j * sao_vb_size;
4350 hevc->m_tile[i][j].sao_abv_start_addr =
4351 hevc->work_space_buf->sao_abv.
4352 buf_start + i * sao_abv_size;
4353 if (get_dbg_flag(hevc) &
4354 H265_DEBUG_BUFMGR) {
4355 hevc_print_cont(hevc, 0,
4356 "{y=%d, x=%d w %d h %d ",
4357 i, j, hevc->m_tile[i][j].width,
4358 hevc->m_tile[i][j].height);
4359 hevc_print_cont(hevc, 0,
4360 "start_x %d start_y %d ",
4361 hevc->m_tile[i][j].start_cu_x,
4362 hevc->m_tile[i][j].start_cu_y);
4363 hevc_print_cont(hevc, 0,
4364 "sao_vb_start 0x%x ",
4365 hevc->m_tile[i][j].
4366 sao_vb_start_addr);
4367 hevc_print_cont(hevc, 0,
4368 "sao_abv_start 0x%x}\n",
4369 hevc->m_tile[i][j].
4370 sao_abv_start_addr);
4371
4372 }
4373 start_cu_x += hevc->m_tile[i][j].width;
4374 }
4375 start_cu_y += hevc->m_tile[i][0].height;
4376 }
4377 }
4378 } else {
4379 hevc->num_tile_col = 1;
4380 hevc->num_tile_row = 1;
4381 hevc->m_tile[0][0].width = pic_width_cu;
4382 hevc->m_tile[0][0].height = pic_height_cu;
4383 hevc->m_tile[0][0].start_cu_x = 0;
4384 hevc->m_tile[0][0].start_cu_y = 0;
4385 hevc->m_tile[0][0].sao_vb_start_addr =
4386 hevc->work_space_buf->sao_vb.buf_start;
4387 hevc->m_tile[0][0].sao_abv_start_addr =
4388 hevc->work_space_buf->sao_abv.buf_start;
4389 }
4390}
4391
4392static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4393 int pic_width_lcu)
4394{
4395 int cu_x;
4396 int cu_y;
4397 int tile_x = 0;
4398 int tile_y = 0;
4399 int i;
4400
4401 if (pic_width_lcu == 0) {
4402 if (get_dbg_flag(hevc)) {
4403 hevc_print(hevc, 0,
4404 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4405 __func__, hevc->pic_w, hevc->pic_h);
4406 }
4407 return -1;
4408 }
4409 cu_x = cu_adr % pic_width_lcu;
4410 cu_y = cu_adr / pic_width_lcu;
4411 if (hevc->tile_enabled) {
4412 for (i = 0; i < hevc->num_tile_col; i++) {
4413 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4414 tile_x = i;
4415 else
4416 break;
4417 }
4418 for (i = 0; i < hevc->num_tile_row; i++) {
4419 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4420 tile_y = i;
4421 else
4422 break;
4423 }
4424 }
4425 return (tile_x) | (tile_y << 8);
4426}
4427
4428static void print_scratch_error(int error_num)
4429{
4430#if 0
4431 if (get_dbg_flag(hevc)) {
4432 hevc_print(hevc, 0,
4433 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4434 error_num);
4435 }
4436#endif
4437}
4438
4439static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4440{
4441 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4442
4443 if (get_dbg_flag(hevc))
4444 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4445 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4446 __func__,
4447 buf_spec->ipp.buf_start,
4448 buf_spec->start_adr,
4449 buf_spec->short_term_rps.buf_start,
4450 buf_spec->vps.buf_start,
4451 buf_spec->sps.buf_start,
4452 buf_spec->pps.buf_start,
4453 buf_spec->sao_up.buf_start,
4454 buf_spec->swap_buf.buf_start,
4455 buf_spec->swap_buf2.buf_start,
4456 buf_spec->scalelut.buf_start,
4457 buf_spec->dblk_para.buf_start,
4458 buf_spec->dblk_data.buf_start,
4459 buf_spec->dblk_data2.buf_start);
4460 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4461 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4462 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4463 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4464 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4465 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4466 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4467 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4468 if (hevc->mmu_enable) {
4469 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4470 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4471 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4472 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4473 } else
4474 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4475 } /*else
4476 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4477 buf_spec->swap_buf.buf_start);
4478 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4479 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4480 /* cfg_p_addr */
4481 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4482 /* cfg_d_addr */
4483 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4484
4485 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4486
4487 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4488}
4489
4490static void parser_cmd_write(void)
4491{
4492 u32 i;
4493 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4494 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4495 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4496 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4497 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4498 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4499 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4500 0x7C00
4501 };
4502 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4503 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4504}
4505
4506static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4507 int decode_pic_begin, int decode_pic_num)
4508{
4509 unsigned int data32;
4510 int i;
4511#if 0
4512 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4513 /* Set MCR fetch priorities*/
4514 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4515 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4516 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4517 }
4518#endif
4519#if 1
4520 /* m8baby test1902 */
4521 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4522 hevc_print(hevc, 0,
4523 "%s\n", __func__);
4524 data32 = READ_VREG(HEVC_PARSER_VERSION);
4525 if (data32 != 0x00010001) {
4526 print_scratch_error(25);
4527 return;
4528 }
4529 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4530 data32 = READ_VREG(HEVC_PARSER_VERSION);
4531 if (data32 != 0x5a5a55aa) {
4532 print_scratch_error(26);
4533 return;
4534 }
4535#if 0
4536 /* test Parser Reset */
4537 /* reset iqit to start mem init again */
4538 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4539 (1 << 3) /* reset_whole parser */
4540 );
4541 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4542 data32 = READ_VREG(HEVC_PARSER_VERSION);
4543 if (data32 != 0x00010001)
4544 hevc_print(hevc, 0,
4545 "Test Parser Fatal Error\n");
4546#endif
4547 /* reset iqit to start mem init again */
4548 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4549 );
4550 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4551 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4552
4553#endif
4554 if (!hevc->m_ins_flag) {
4555 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4556 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4557 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4558 data32 |= (0xf << 25); /*arwlen_axi_max*/
4559 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4560 }
4561 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4562 if (data32 != 0x00000100) {
4563 print_scratch_error(29);
4564 return;
4565 }
4566 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4567 if (data32 != 0x00000300) {
4568 print_scratch_error(30);
4569 return;
4570 }
4571 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4572 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4573 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4574 if (data32 != 0x12345678) {
4575 print_scratch_error(31);
4576 return;
4577 }
4578 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4579 if (data32 != 0x9abcdef0) {
4580 print_scratch_error(32);
4581 return;
4582 }
4583 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4584 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4585
4586 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4587 data32 &= 0x03ffffff;
4588 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4589 | /* stream_buffer_empty_int_amrisc_enable */
4590 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4591 (1 << 7) | /* dec_done_int_cpu_enable */
4592 (1 << 4) | /* startcode_found_int_cpu_enable */
4593 (0 << 3) | /* startcode_found_int_amrisc_enable */
4594 (1 << 0) /* parser_int_enable */
4595 ;
4596 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4597
4598 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4599 data32 = data32 | (1 << 1) | /* emulation_check_on */
4600 (1 << 0) /* startcode_check_on */
4601 ;
4602 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4603
4604 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4605 (2 << 4) | /* emulate_code_length_sub_1 */
4606 (2 << 1) | /* start_code_length_sub_1 */
4607 (1 << 0) /* stream_shift_enable */
4608 );
4609
4610 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4611 );
4612 /* hevc_parser_core_clk_en */
4613 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4614 );
4615
4616 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4617
4618 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4619 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4620 for (i = 0; i < 1024; i++)
4621 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4622
4623#ifdef ENABLE_SWAP_TEST
4624 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4625#endif
4626
4627 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4628 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4629 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4630 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4631 /* Send parser_cmd */
4632 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4633
4634 parser_cmd_write();
4635
4636 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4637 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4638 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4639
4640 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4641 /* (1 << 8) | // sao_sw_pred_enable */
4642 (1 << 5) | /* parser_sao_if_en */
4643 (1 << 2) | /* parser_mpred_if_en */
4644 (1 << 0) /* parser_scaler_if_en */
4645 );
4646
4647 /* Changed to Start MPRED in microcode */
4648 /*
4649 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4650 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4651 * (1<<31)
4652 * );
4653 */
4654
4655 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4656 (1 << 0) /* software reset ipp and mpp */
4657 );
4658 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4659 (0 << 0) /* software reset ipp and mpp */
4660 );
4661
4662 if (get_double_write_mode(hevc) & 0x10)
4663 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4664 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4665 );
4666
4667}
4668
4669static void decoder_hw_reset(void)
4670{
4671 int i;
4672 unsigned int data32;
4673 /* reset iqit to start mem init again */
4674 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4675 );
4676 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4677 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4678
4679 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4680 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4681 ;
4682 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4683
4684 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4685 if (data32 != 0x00000100) {
4686 print_scratch_error(29);
4687 return;
4688 }
4689 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4690 if (data32 != 0x00000300) {
4691 print_scratch_error(30);
4692 return;
4693 }
4694 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4695 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4696 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4697 if (data32 != 0x12345678) {
4698 print_scratch_error(31);
4699 return;
4700 }
4701 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4702 if (data32 != 0x9abcdef0) {
4703 print_scratch_error(32);
4704 return;
4705 }
4706 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4707 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4708
4709 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4710 data32 &= 0x03ffffff;
4711 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4712 | /* stream_buffer_empty_int_amrisc_enable */
4713 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4714 (1 << 7) | /* dec_done_int_cpu_enable */
4715 (1 << 4) | /* startcode_found_int_cpu_enable */
4716 (0 << 3) | /* startcode_found_int_amrisc_enable */
4717 (1 << 0) /* parser_int_enable */
4718 ;
4719 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4720
4721 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4722 data32 = data32 | (1 << 1) | /* emulation_check_on */
4723 (1 << 0) /* startcode_check_on */
4724 ;
4725 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4726
4727 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4728 (2 << 4) | /* emulate_code_length_sub_1 */
4729 (2 << 1) | /* start_code_length_sub_1 */
4730 (1 << 0) /* stream_shift_enable */
4731 );
4732
4733 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4734 );
4735 /* hevc_parser_core_clk_en */
4736 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4737 );
4738
4739 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4740 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4741 for (i = 0; i < 1024; i++)
4742 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4743
4744 /* Send parser_cmd */
4745 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4746
4747 parser_cmd_write();
4748
4749 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4750 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4751 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4752
4753 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4754 /* (1 << 8) | // sao_sw_pred_enable */
4755 (1 << 5) | /* parser_sao_if_en */
4756 (1 << 2) | /* parser_mpred_if_en */
4757 (1 << 0) /* parser_scaler_if_en */
4758 );
4759
4760 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4761 (1 << 0) /* software reset ipp and mpp */
4762 );
4763 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4764 (0 << 0) /* software reset ipp and mpp */
4765 );
4766}
4767
4768#ifdef CONFIG_HEVC_CLK_FORCED_ON
4769static void config_hevc_clk_forced_on(void)
4770{
4771 unsigned int rdata32;
4772 /* IQIT */
4773 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4774 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4775
4776 /* DBLK */
4777 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4778 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4779
4780 /* SAO */
4781 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4782 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4783
4784 /* MPRED */
4785 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4786 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4787
4788 /* PARSER */
4789 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4790 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4791 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4792 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4793 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4794 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4795 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4796 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4797 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4798 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4799 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4800 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4801 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4802
4803 /* IPP */
4804 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4805 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4806
4807 /* MCRCC */
4808 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4809 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4810}
4811#endif
4812
4813#ifdef MCRCC_ENABLE
4814static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4815{
4816 unsigned int rdata32;
4817 unsigned int rdata32_2;
4818 int l0_cnt = 0;
4819 int l1_cnt = 0x7fff;
4820
4821 if (get_double_write_mode(hevc) & 0x10) {
4822 l0_cnt = hevc->cur_pic->RefNum_L0;
4823 l1_cnt = hevc->cur_pic->RefNum_L1;
4824 }
4825
4826 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4827
4828 if (slice_type == 2) { /* I-PIC */
4829 /* remove reset -- disables clock */
4830 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4831 return;
4832 }
4833
4834 if (slice_type == 0) { /* B-PIC */
4835 /* Programme canvas0 */
4836 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4837 (0 << 8) | (0 << 1) | 0);
4838 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4839 rdata32 = rdata32 & 0xffff;
4840 rdata32 = rdata32 | (rdata32 << 16);
4841 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4842
4843 /* Programme canvas1 */
4844 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4845 (16 << 8) | (1 << 1) | 0);
4846 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4847 rdata32_2 = rdata32_2 & 0xffff;
4848 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4849 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4850 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4851 rdata32_2 = rdata32_2 & 0xffff;
4852 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4853 }
4854 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4855 } else { /* P-PIC */
4856 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4857 (0 << 8) | (1 << 1) | 0);
4858 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4859 rdata32 = rdata32 & 0xffff;
4860 rdata32 = rdata32 | (rdata32 << 16);
4861 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4862
4863 if (l0_cnt == 1) {
4864 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4865 } else {
4866 /* Programme canvas1 */
4867 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4868 rdata32 = rdata32 & 0xffff;
4869 rdata32 = rdata32 | (rdata32 << 16);
4870 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4871 }
4872 }
4873 /* enable mcrcc progressive-mode */
4874 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4875}
4876#endif
4877
4878static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4879 int sao_mem_unit)
4880{
4881 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4882 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4883 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4884 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4885}
4886
4887static u32 init_aux_size;
4888static int aux_data_is_avaible(struct hevc_state_s *hevc)
4889{
4890 u32 reg_val;
4891
4892 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4893 if (reg_val != 0 && reg_val != init_aux_size)
4894 return 1;
4895 else
4896 return 0;
4897}
4898
4899static void config_aux_buf(struct hevc_state_s *hevc)
4900{
4901 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4902 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4903 (hevc->suffix_aux_size >> 4);
4904 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4905}
4906
4907static void config_mpred_hw(struct hevc_state_s *hevc)
4908{
4909 int i;
4910 unsigned int data32;
4911 struct PIC_s *cur_pic = hevc->cur_pic;
4912 struct PIC_s *col_pic = hevc->col_pic;
4913 int AMVP_MAX_NUM_CANDS_MEM = 3;
4914 int AMVP_MAX_NUM_CANDS = 2;
4915 int NUM_CHROMA_MODE = 5;
4916 int DM_CHROMA_IDX = 36;
4917 int above_ptr_ctrl = 0;
4918 int buffer_linear = 1;
4919 int cu_size_log2 = 3;
4920
4921 int mpred_mv_rd_start_addr;
4922 int mpred_curr_lcu_x;
4923 int mpred_curr_lcu_y;
4924 int mpred_above_buf_start;
4925 int mpred_mv_rd_ptr;
4926 int mpred_mv_rd_ptr_p1;
4927 int mpred_mv_rd_end_addr;
4928 int MV_MEM_UNIT;
4929 int mpred_mv_wr_ptr;
4930 int *ref_poc_L0, *ref_poc_L1;
4931
4932 int above_en;
4933 int mv_wr_en;
4934 int mv_rd_en;
4935 int col_isIntra;
4936
4937 if (hevc->slice_type != 2) {
4938 above_en = 1;
4939 mv_wr_en = 1;
4940 mv_rd_en = 1;
4941 col_isIntra = 0;
4942 } else {
4943 above_en = 1;
4944 mv_wr_en = 1;
4945 mv_rd_en = 0;
4946 col_isIntra = 0;
4947 }
4948
4949 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
4950 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
4951 mpred_curr_lcu_x = data32 & 0xffff;
4952 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
4953
4954 MV_MEM_UNIT =
4955 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
4956 5 ? 0x80 : 0x20;
4957 mpred_mv_rd_ptr =
4958 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
4959
4960 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
4961 mpred_mv_rd_end_addr =
4962 mpred_mv_rd_start_addr +
4963 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
4964
4965 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
4966
4967 mpred_mv_wr_ptr =
4968 cur_pic->mpred_mv_wr_start_addr +
4969 (hevc->slice_addr * MV_MEM_UNIT);
4970
4971 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4972 hevc_print(hevc, 0,
4973 "cur pic index %d col pic index %d\n", cur_pic->index,
4974 col_pic->index);
4975 }
4976
4977 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
4978 cur_pic->mpred_mv_wr_start_addr);
4979 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
4980
4981 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
4982 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
4983 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
4984
4985 data32 = READ_VREG(HEVC_MPRED_CTRL0);
4986 data32 = (hevc->slice_type |
4987 hevc->new_pic << 2 |
4988 hevc->new_tile << 3 |
4989 hevc->isNextSliceSegment << 4 |
4990 hevc->TMVPFlag << 5 |
4991 hevc->LDCFlag << 6 |
4992 hevc->ColFromL0Flag << 7 |
4993 above_ptr_ctrl << 8 |
4994 above_en << 9 |
4995 mv_wr_en << 10 |
4996 mv_rd_en << 11 |
4997 col_isIntra << 12 |
4998 buffer_linear << 13 |
4999 hevc->LongTerm_Curr << 14 |
5000 hevc->LongTerm_Col << 15 |
5001 hevc->lcu_size_log2 << 16 |
5002 cu_size_log2 << 20 | hevc->plevel << 24);
5003 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5004
5005 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5006 data32 = (
5007#if 0
5008 /* no set in m8baby test1902 */
5009 /* Don't override clk_forced_on , */
5010 (data32 & (0x1 << 24)) |
5011#endif
5012 hevc->MaxNumMergeCand |
5013 AMVP_MAX_NUM_CANDS << 4 |
5014 AMVP_MAX_NUM_CANDS_MEM << 8 |
5015 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5016 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5017
5018 data32 = (hevc->pic_w | hevc->pic_h << 16);
5019 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5020
5021 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5022 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5023
5024 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5025 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5026
5027 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5028 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5029
5030 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5031 /* col_RefNum_L0<<16| */
5032 /* col_RefNum_L1<<24 */
5033 );
5034 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5035
5036 data32 = (hevc->LongTerm_Ref);
5037 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5038
5039 data32 = 0;
5040 for (i = 0; i < hevc->RefNum_L0; i++)
5041 data32 = data32 | (1 << i);
5042 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5043
5044 data32 = 0;
5045 for (i = 0; i < hevc->RefNum_L1; i++)
5046 data32 = data32 | (1 << i);
5047 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5048
5049 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5050 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5051
5052 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5053 * Ref_POC_xx_L1 in pair write order!!!
5054 */
5055 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5056 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5057
5058 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5059 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5060
5061 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5062 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5063
5064 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5065 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5066
5067 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5068 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5069
5070 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5071 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5072
5073 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5074 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5075
5076 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5077 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5078
5079 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5080 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5081
5082 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5083 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5084
5085 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5086 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5087
5088 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5089 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5090
5091 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5092 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5093
5094 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5095 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5096
5097 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5098 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5099
5100 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5101 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5102
5103 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5104 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5105
5106 if (hevc->new_pic) {
5107 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5108 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5109 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5110 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5111 } else if (!hevc->isNextSliceSegment) {
5112 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5113 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5114 }
5115
5116 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5117}
5118
5119static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5120{
5121 unsigned int data32, data32_2;
5122 int misc_flag0 = hevc->misc_flag0;
5123 int slice_deblocking_filter_disabled_flag = 0;
5124
5125 int mc_buffer_size_u_v =
5126 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5127 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5128 struct PIC_s *cur_pic = hevc->cur_pic;
5129 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5130
5131 data32 = READ_VREG(HEVC_SAO_CTRL0);
5132 data32 &= (~0xf);
5133 data32 |= hevc->lcu_size_log2;
5134 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5135
5136 data32 = (hevc->pic_w | hevc->pic_h << 16);
5137 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5138
5139 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5140 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5141
5142 if (hevc->new_pic)
5143 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5144#ifdef LOSLESS_COMPRESS_MODE
5145/*SUPPORT_10BIT*/
5146 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5147 data32 = READ_VREG(HEVC_SAO_CTRL5);
5148 data32 &= (~(0xff << 16));
5149
5150 if (get_double_write_mode(hevc) == 2 ||
5151 get_double_write_mode(hevc) == 3)
5152 data32 |= (0xff<<16);
5153 else if (get_double_write_mode(hevc) == 4)
5154 data32 |= (0x33<<16);
5155
5156 if (hevc->mem_saving_mode == 1)
5157 data32 |= (1 << 9);
5158 else
5159 data32 &= ~(1 << 9);
5160 if (workaround_enable & 1)
5161 data32 |= (1 << 7);
5162 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5163 }
5164 data32 = cur_pic->mc_y_adr;
5165 if (get_double_write_mode(hevc))
5166 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5167
5168 if ((get_double_write_mode(hevc) & 0x10) == 0)
5169 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5170
5171 if (hevc->mmu_enable)
5172 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5173#else
5174 data32 = cur_pic->mc_y_adr;
5175 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5176#endif
5177 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5178 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5179
5180#ifdef LOSLESS_COMPRESS_MODE
5181/*SUPPORT_10BIT*/
5182 if (get_double_write_mode(hevc))
5183 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5184#else
5185 data32 = cur_pic->mc_u_v_adr;
5186 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5187#endif
5188 data32 = (mc_buffer_size_u_v_h << 16);
5189 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5190
5191#ifdef LOSLESS_COMPRESS_MODE
5192/*SUPPORT_10BIT*/
5193 if (get_double_write_mode(hevc)) {
5194 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5195 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5196 }
5197#else
5198 /* multi tile to do... */
5199 data32 = cur_pic->mc_y_adr;
5200 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5201
5202 data32 = cur_pic->mc_u_v_adr;
5203 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5204#endif
5205 /* DBLK CONFIG HERE */
5206 if (hevc->new_pic) {
5207 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5208 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5209 data32 = (0xff << 8) | (0x0 << 0);
5210 else
5211 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5212 (0x0 << 0); /* h265 video format*/
5213
5214 if (hevc->pic_w >= 1280)
5215 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5216 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5217 if (get_double_write_mode(hevc) == 0)
5218 data32 |= (0x1 << 8); /*enable first write*/
5219 else if (get_double_write_mode(hevc) == 0x10)
5220 data32 |= (0x1 << 9); /*double write only*/
5221 else
5222 data32 |= ((0x1 << 8) |(0x1 << 9));
5223
5224 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5225 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5226 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5227 }
5228 data32 = (hevc->pic_w | hevc->pic_h << 16);
5229 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5230
5231 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5232 data32 =
5233 ((misc_flag0 >>
5234 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5235 0x1) << 3;
5236 } else
5237 data32 = 0;
5238 data32 |=
5239 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5240 ((params->p.pps_cr_qp_offset
5241 & 0x1f) <<
5242 9));
5243 data32 |=
5244 (hevc->lcu_size ==
5245 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5246
5247 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5248
5249 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5250 /*if (debug & 0x80) {*/
5251 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5252 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5253 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5254 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5255 data32);
5256 /*}*/
5257 }
5258 }
5259#if 0
5260 data32 = READ_VREG(HEVC_SAO_CTRL1);
5261 data32 &= (~0x3000);
5262 data32 |= (hevc->mem_map_mode <<
5263 12);
5264
5265/* [13:12] axi_aformat,
5266 * 0-Linear, 1-32x32, 2-64x32
5267 */
5268 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5269
5270 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5271 data32 &= (~0x30);
5272 data32 |= (hevc->mem_map_mode <<
5273 4);
5274
5275/* [5:4] -- address_format
5276 * 00:linear 01:32x32 10:64x32
5277 */
5278 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5279#else
5280 /* m8baby test1902 */
5281 data32 = READ_VREG(HEVC_SAO_CTRL1);
5282 data32 &= (~0x3000);
5283 data32 |= (hevc->mem_map_mode <<
5284 12);
5285
5286/* [13:12] axi_aformat, 0-Linear,
5287 * 1-32x32, 2-64x32
5288 */
5289 data32 &= (~0xff0);
5290 /* data32 |= 0x670; // Big-Endian per 64-bit */
5291 data32 |= endian; /* Big-Endian per 64-bit */
5292 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5293 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5294 if (get_double_write_mode(hevc) == 0)
5295 data32 |= 0x2; /*disable double write*/
5296 else if (get_double_write_mode(hevc) & 0x10)
5297 data32 |= 0x1; /*disable cm*/
5298 } else {
5299 unsigned int data;
5300 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5301 (0x0 << 0); /* h265 video format*/
5302 if (hevc->pic_w >= 1280)
5303 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5304 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5305 if (get_double_write_mode(hevc) == 0)
5306 data |= (0x1 << 8); /*enable first write*/
5307 else if (get_double_write_mode(hevc) & 0x10)
5308 data |= (0x1 << 9); /*double write only*/
5309 else
5310 data |= ((0x1 << 8) |(0x1 << 9));
5311
5312 WRITE_VREG(HEVC_DBLK_CFGB, data);
5313 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5314 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5315 }
5316
5317 /* swap uv */
5318 if (hevc->is_used_v4l) {
5319 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5320 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5321 data32 &= ~(1 << 8); /* NV21 */
5322 else
5323 data32 |= (1 << 8); /* NV12 */
5324 }
5325
5326 /*
5327 * [31:24] ar_fifo1_axi_thred
5328 * [23:16] ar_fifo0_axi_thred
5329 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5330 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5331 * [11:08] axi_lendian_C
5332 * [07:04] axi_lendian_Y
5333 * [3] reserved
5334 * [2] clk_forceon
5335 * [1] dw_disable:disable double write output
5336 * [0] cm_disable:disable compress output
5337 */
5338 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5339 if (get_double_write_mode(hevc) & 0x10) {
5340 /* [23:22] dw_v1_ctrl
5341 *[21:20] dw_v0_ctrl
5342 *[19:18] dw_h1_ctrl
5343 *[17:16] dw_h0_ctrl
5344 */
5345 data32 = READ_VREG(HEVC_SAO_CTRL5);
5346 /*set them all 0 for H265_NV21 (no down-scale)*/
5347 data32 &= ~(0xff << 16);
5348 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5349 }
5350
5351 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5352 data32 &= (~0x30);
5353 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5354 data32 |= (hevc->mem_map_mode <<
5355 4);
5356 data32 &= (~0xF);
5357 data32 |= 0xf; /* valid only when double write only */
5358 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5359
5360 /* swap uv */
5361 if (hevc->is_used_v4l) {
5362 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5363 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5364 data32 |= (1 << 12); /* NV21 */
5365 else
5366 data32 &= ~(1 << 12); /* NV12 */
5367 }
5368
5369 /*
5370 * [3:0] little_endian
5371 * [5:4] address_format 00:linear 01:32x32 10:64x32
5372 * [7:6] reserved
5373 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5374 * [11:10] reserved
5375 * [12] CbCr_byte_swap
5376 * [31:13] reserved
5377 */
5378 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5379#endif
5380 data32 = 0;
5381 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5382 data32_2 &= (~0x300);
5383 /* slice_deblocking_filter_disabled_flag = 0;
5384 * ucode has handle it , so read it from ucode directly
5385 */
5386 if (hevc->tile_enabled) {
5387 data32 |=
5388 ((misc_flag0 >>
5389 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5390 0x1) << 0;
5391 data32_2 |=
5392 ((misc_flag0 >>
5393 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5394 0x1) << 8;
5395 }
5396 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5397 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5398 0x1; /* ucode has handle it,so read it from ucode directly */
5399 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5400 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5401 /* slice_deblocking_filter_disabled_flag =
5402 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5403 * //ucode has handle it , so read it from ucode directly
5404 */
5405 data32 |= slice_deblocking_filter_disabled_flag << 2;
5406 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5407 hevc_print_cont(hevc, 0,
5408 "(1,%x)", data32);
5409 if (!slice_deblocking_filter_disabled_flag) {
5410 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5411 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5412 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5413 hevc_print_cont(hevc, 0,
5414 "(2,%x)", data32);
5415 }
5416 } else {
5417 data32 |=
5418 ((misc_flag0 >>
5419 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5420 0x1) << 2;
5421 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5422 hevc_print_cont(hevc, 0,
5423 "(3,%x)", data32);
5424 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5425 0x1) == 0) {
5426 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5427 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5428 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5429 hevc_print_cont(hevc, 0,
5430 "(4,%x)", data32);
5431 }
5432 }
5433 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5434 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5435 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5436 || (!slice_deblocking_filter_disabled_flag))) {
5437 data32 |=
5438 ((misc_flag0 >>
5439 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5440 & 0x1) << 1;
5441 data32_2 |=
5442 ((misc_flag0 >>
5443 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5444 & 0x1) << 9;
5445 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5446 hevc_print_cont(hevc, 0,
5447 "(5,%x)\n", data32);
5448 } else {
5449 data32 |=
5450 ((misc_flag0 >>
5451 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5452 & 0x1) << 1;
5453 data32_2 |=
5454 ((misc_flag0 >>
5455 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5456 & 0x1) << 9;
5457 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5458 hevc_print_cont(hevc, 0,
5459 "(6,%x)\n", data32);
5460 }
5461 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5462 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5463}
5464
5465#ifdef TEST_NO_BUF
5466static unsigned char test_flag = 1;
5467#endif
5468
5469static void pic_list_process(struct hevc_state_s *hevc)
5470{
5471 int work_pic_num = get_work_pic_num(hevc);
5472 int alloc_pic_count = 0;
5473 int i;
5474 struct PIC_s *pic;
5475 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5476 pic = hevc->m_PIC[i];
5477 if (pic == NULL || pic->index == -1)
5478 continue;
5479 alloc_pic_count++;
5480 if (pic->output_mark == 0 && pic->referenced == 0
5481 && pic->output_ready == 0
5482 && (pic->width != hevc->pic_w ||
5483 pic->height != hevc->pic_h)
5484 ) {
5485 set_buf_unused(hevc, pic->BUF_index);
5486 pic->BUF_index = -1;
5487 if (alloc_pic_count > work_pic_num) {
5488 pic->width = 0;
5489 pic->height = 0;
5490 pic->index = -1;
5491 } else {
5492 pic->width = hevc->pic_w;
5493 pic->height = hevc->pic_h;
5494 }
5495 }
5496 }
5497 if (alloc_pic_count < work_pic_num) {
5498 int new_count = alloc_pic_count;
5499 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5500 pic = hevc->m_PIC[i];
5501 if (pic && pic->index == -1) {
5502 pic->index = i;
5503 pic->BUF_index = -1;
5504 pic->width = hevc->pic_w;
5505 pic->height = hevc->pic_h;
5506 new_count++;
5507 if (new_count >=
5508 work_pic_num)
5509 break;
5510 }
5511 }
5512
5513 }
5514 dealloc_unused_buf(hevc);
5515 if (get_alloc_pic_count(hevc)
5516 != alloc_pic_count) {
5517 hevc_print_cont(hevc, 0,
5518 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5519 __func__,
5520 work_pic_num,
5521 alloc_pic_count,
5522 get_alloc_pic_count(hevc));
5523 }
5524}
5525
5526static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5527{
5528 int i;
5529 struct PIC_s *pic;
5530 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5531 pic = hevc->m_PIC[i];
5532 if (pic == NULL || pic->index == -1)
5533 continue;
5534
5535 if (pic->output_mark == 0 && pic->referenced == 0
5536 && pic->output_ready == 0
5537 && pic->scatter_alloc
5538 )
5539 release_pic_mmu_buf(hevc, pic);
5540 }
5541
5542}
5543
5544static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5545 union param_u *rpm_param)
5546{
5547 struct PIC_s *new_pic = NULL;
5548 struct PIC_s *pic;
5549 int i;
5550 int ret;
5551
5552 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5553 pic = hevc->m_PIC[i];
5554 if (pic == NULL || pic->index == -1)
5555 continue;
5556
5557 if (pic->output_mark == 0 && pic->referenced == 0
5558 && pic->output_ready == 0
5559 && pic->width == hevc->pic_w
5560 && pic->height == hevc->pic_h
5561 ) {
5562 if (new_pic) {
5563 if (new_pic->POC != INVALID_POC) {
5564 if (pic->POC == INVALID_POC ||
5565 pic->POC < new_pic->POC)
5566 new_pic = pic;
5567 }
5568 } else
5569 new_pic = pic;
5570 }
5571 }
5572
5573 if (new_pic == NULL)
5574 return NULL;
5575
5576 if (new_pic->BUF_index < 0) {
5577 ret = hevc->is_used_v4l ?
5578 v4l_alloc_buf(hevc) :
5579 alloc_buf(hevc);
5580 if (ret < 0)
5581 return NULL;
5582
5583 ret = hevc->is_used_v4l ?
5584 v4l_config_pic(hevc, new_pic) :
5585 config_pic(hevc, new_pic);
5586 if (ret < 0) {
5587 dealloc_pic_buf(hevc, new_pic);
5588 return NULL;
5589 }
5590
5591 new_pic->width = hevc->pic_w;
5592 new_pic->height = hevc->pic_h;
5593 set_canvas(hevc, new_pic);
5594
5595 init_pic_list_hw(hevc);
5596 }
5597
5598 if (new_pic) {
5599 new_pic->double_write_mode =
5600 get_double_write_mode(hevc);
5601 if (new_pic->double_write_mode)
5602 set_canvas(hevc, new_pic);
5603
5604#ifdef TEST_NO_BUF
5605 if (test_flag) {
5606 test_flag = 0;
5607 return NULL;
5608 } else
5609 test_flag = 1;
5610#endif
5611 if (get_mv_buf(hevc, new_pic) < 0)
5612 return NULL;
5613
5614 if (hevc->mmu_enable) {
5615 ret = H265_alloc_mmu(hevc, new_pic,
5616 rpm_param->p.bit_depth,
5617 hevc->frame_mmu_map_addr);
5618 if (ret != 0) {
5619 put_mv_buf(hevc, new_pic);
5620 hevc_print(hevc, 0,
5621 "can't alloc need mmu1,idx %d ret =%d\n",
5622 new_pic->decode_idx,
5623 ret);
5624 return NULL;
5625 }
5626 }
5627 new_pic->referenced = 1;
5628 new_pic->decode_idx = hevc->decode_idx;
5629 new_pic->slice_idx = 0;
5630 new_pic->referenced = 1;
5631 new_pic->output_mark = 0;
5632 new_pic->recon_mark = 0;
5633 new_pic->error_mark = 0;
5634 new_pic->dis_mark = 0;
5635 /* new_pic->output_ready = 0; */
5636 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5637 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5638 new_pic->POC = hevc->curr_POC;
5639 new_pic->pic_struct = hevc->curr_pic_struct;
5640 if (new_pic->aux_data_buf)
5641 release_aux_data(hevc, new_pic);
5642 new_pic->mem_saving_mode =
5643 hevc->mem_saving_mode;
5644 new_pic->bit_depth_luma =
5645 hevc->bit_depth_luma;
5646 new_pic->bit_depth_chroma =
5647 hevc->bit_depth_chroma;
5648 new_pic->video_signal_type =
5649 hevc->video_signal_type;
5650
5651 new_pic->conformance_window_flag =
5652 hevc->param.p.conformance_window_flag;
5653 new_pic->conf_win_left_offset =
5654 hevc->param.p.conf_win_left_offset;
5655 new_pic->conf_win_right_offset =
5656 hevc->param.p.conf_win_right_offset;
5657 new_pic->conf_win_top_offset =
5658 hevc->param.p.conf_win_top_offset;
5659 new_pic->conf_win_bottom_offset =
5660 hevc->param.p.conf_win_bottom_offset;
5661 new_pic->chroma_format_idc =
5662 hevc->param.p.chroma_format_idc;
5663
5664 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5665 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5666 __func__, new_pic->index,
5667 new_pic->BUF_index, new_pic->decode_idx,
5668 new_pic->POC);
5669
5670 }
5671 if (pic_list_debug & 0x1) {
5672 dump_pic_list(hevc);
5673 pr_err("\n*******************************************\n");
5674 }
5675
5676 return new_pic;
5677}
5678
5679static int get_display_pic_num(struct hevc_state_s *hevc)
5680{
5681 int i;
5682 struct PIC_s *pic;
5683 int num = 0;
5684
5685 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5686 pic = hevc->m_PIC[i];
5687 if (pic == NULL ||
5688 pic->index == -1)
5689 continue;
5690
5691 if (pic->output_ready == 1)
5692 num++;
5693 }
5694 return num;
5695}
5696
5697static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5698{
5699 struct PIC_s *pic_display;
5700
5701 if (pic) {
5702 /*PB skip control */
5703 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5704 /* start decoding after first I */
5705 hevc->ignore_bufmgr_error |= 0x1;
5706 }
5707 if (hevc->ignore_bufmgr_error & 1) {
5708 if (hevc->PB_skip_count_after_decoding > 0)
5709 hevc->PB_skip_count_after_decoding--;
5710 else {
5711 /* start displaying */
5712 hevc->ignore_bufmgr_error |= 0x2;
5713 }
5714 }
5715 /**/
5716 if (pic->POC != INVALID_POC) {
5717 pic->output_mark = 1;
5718 pic->recon_mark = 1;
5719 }
5720 pic->recon_mark = 1;
5721 }
5722 do {
5723 pic_display = output_pic(hevc, 1);
5724
5725 if (pic_display) {
5726 pic_display->referenced = 0;
5727 put_mv_buf(hevc, pic_display);
5728 if ((pic_display->error_mark
5729 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5730 || (get_dbg_flag(hevc) &
5731 H265_DEBUG_DISPLAY_CUR_FRAME)
5732 || (get_dbg_flag(hevc) &
5733 H265_DEBUG_NO_DISPLAY)) {
5734 pic_display->output_ready = 0;
5735 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5736 hevc_print(hevc, 0,
5737 "[BM] Display: POC %d, ",
5738 pic_display->POC);
5739 hevc_print_cont(hevc, 0,
5740 "decoding index %d ==> ",
5741 pic_display->decode_idx);
5742 hevc_print_cont(hevc, 0,
5743 "Debug mode or error, recycle it\n");
5744 }
5745 } else {
5746 if (hevc->i_only & 0x1
5747 && pic_display->slice_type != 2) {
5748 pic_display->output_ready = 0;
5749 } else {
5750 prepare_display_buf(hevc, pic_display);
5751 if (get_dbg_flag(hevc)
5752 & H265_DEBUG_BUFMGR) {
5753 hevc_print(hevc, 0,
5754 "[BM] flush Display: POC %d, ",
5755 pic_display->POC);
5756 hevc_print_cont(hevc, 0,
5757 "decoding index %d\n",
5758 pic_display->decode_idx);
5759 }
5760 }
5761 }
5762 }
5763 } while (pic_display);
5764 clear_referenced_flag(hevc);
5765}
5766
5767/*
5768* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5769*/
5770static void set_aux_data(struct hevc_state_s *hevc,
5771 struct PIC_s *pic, unsigned char suffix_flag,
5772 unsigned char dv_meta_flag)
5773{
5774 int i;
5775 unsigned short *aux_adr;
5776 unsigned int size_reg_val =
5777 READ_VREG(HEVC_AUX_DATA_SIZE);
5778 unsigned int aux_count = 0;
5779 int aux_size = 0;
5780 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5781 return;
5782
5783 if (hevc->aux_data_dirty ||
5784 hevc->m_ins_flag == 0) {
5785
5786 hevc->aux_data_dirty = 0;
5787 }
5788
5789 if (suffix_flag) {
5790 aux_adr = (unsigned short *)
5791 (hevc->aux_addr +
5792 hevc->prefix_aux_size);
5793 aux_count =
5794 ((size_reg_val & 0xffff) << 4)
5795 >> 1;
5796 aux_size =
5797 hevc->suffix_aux_size;
5798 } else {
5799 aux_adr =
5800 (unsigned short *)hevc->aux_addr;
5801 aux_count =
5802 ((size_reg_val >> 16) << 4)
5803 >> 1;
5804 aux_size =
5805 hevc->prefix_aux_size;
5806 }
5807 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5808 hevc_print(hevc, 0,
5809 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
5810 __func__, pic, pic->aux_data_size,
5811 aux_count, suffix_flag, dv_meta_flag);
5812 }
5813 if (aux_size > 0 && aux_count > 0) {
5814 int heads_size = 0;
5815 int new_size;
5816 char *new_buf;
5817
5818 for (i = 0; i < aux_count; i++) {
5819 unsigned char tag = aux_adr[i] >> 8;
5820 if (tag != 0 && tag != 0xff) {
5821 if (dv_meta_flag == 0)
5822 heads_size += 8;
5823 else if (dv_meta_flag == 1 && tag == 0x1)
5824 heads_size += 8;
5825 else if (dv_meta_flag == 2 && tag != 0x1)
5826 heads_size += 8;
5827 }
5828 }
5829 new_size = pic->aux_data_size + aux_count + heads_size;
5830 new_buf = vmalloc(new_size);
5831 if (new_buf) {
5832 unsigned char valid_tag = 0;
5833 unsigned char *h =
5834 new_buf +
5835 pic->aux_data_size;
5836 unsigned char *p = h + 8;
5837 int len = 0;
5838 int padding_len = 0;
5839 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
5840 if (pic->aux_data_buf)
5841 vfree(pic->aux_data_buf);
5842 pic->aux_data_buf = new_buf;
5843 for (i = 0; i < aux_count; i += 4) {
5844 int ii;
5845 unsigned char tag = aux_adr[i + 3] >> 8;
5846 if (tag != 0 && tag != 0xff) {
5847 if (dv_meta_flag == 0)
5848 valid_tag = 1;
5849 else if (dv_meta_flag == 1
5850 && tag == 0x1)
5851 valid_tag = 1;
5852 else if (dv_meta_flag == 2
5853 && tag != 0x1)
5854 valid_tag = 1;
5855 else
5856 valid_tag = 0;
5857 if (valid_tag && len > 0) {
5858 pic->aux_data_size +=
5859 (len + 8);
5860 h[0] = (len >> 24)
5861 & 0xff;
5862 h[1] = (len >> 16)
5863 & 0xff;
5864 h[2] = (len >> 8)
5865 & 0xff;
5866 h[3] = (len >> 0)
5867 & 0xff;
5868 h[6] =
5869 (padding_len >> 8)
5870 & 0xff;
5871 h[7] = (padding_len)
5872 & 0xff;
5873 h += (len + 8);
5874 p += 8;
5875 len = 0;
5876 padding_len = 0;
5877 }
5878 if (valid_tag) {
5879 h[4] = tag;
5880 h[5] = 0;
5881 h[6] = 0;
5882 h[7] = 0;
5883 }
5884 }
5885 if (valid_tag) {
5886 for (ii = 0; ii < 4; ii++) {
5887 unsigned short aa =
5888 aux_adr[i + 3
5889 - ii];
5890 *p = aa & 0xff;
5891 p++;
5892 len++;
5893 /*if ((aa >> 8) == 0xff)
5894 padding_len++;*/
5895 }
5896 }
5897 }
5898 if (len > 0) {
5899 pic->aux_data_size += (len + 8);
5900 h[0] = (len >> 24) & 0xff;
5901 h[1] = (len >> 16) & 0xff;
5902 h[2] = (len >> 8) & 0xff;
5903 h[3] = (len >> 0) & 0xff;
5904 h[6] = (padding_len >> 8) & 0xff;
5905 h[7] = (padding_len) & 0xff;
5906 }
5907 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
5908 hevc_print(hevc, 0,
5909 "aux: (size %d) suffix_flag %d\n",
5910 pic->aux_data_size, suffix_flag);
5911 for (i = 0; i < pic->aux_data_size; i++) {
5912 hevc_print_cont(hevc, 0,
5913 "%02x ", pic->aux_data_buf[i]);
5914 if (((i + 1) & 0xf) == 0)
5915 hevc_print_cont(hevc, 0, "\n");
5916 }
5917 hevc_print_cont(hevc, 0, "\n");
5918 }
5919
5920 } else {
5921 hevc_print(hevc, 0, "new buf alloc failed\n");
5922 if (pic->aux_data_buf)
5923 vfree(pic->aux_data_buf);
5924 pic->aux_data_buf = NULL;
5925 pic->aux_data_size = 0;
5926 }
5927 }
5928
5929}
5930
5931static void release_aux_data(struct hevc_state_s *hevc,
5932 struct PIC_s *pic)
5933{
5934 if (pic->aux_data_buf)
5935 vfree(pic->aux_data_buf);
5936 pic->aux_data_buf = NULL;
5937 pic->aux_data_size = 0;
5938}
5939
5940static inline void hevc_pre_pic(struct hevc_state_s *hevc,
5941 struct PIC_s *pic)
5942{
5943
5944 /* prev pic */
5945 /*if (hevc->curr_POC != 0) {*/
5946 int decoded_poc = hevc->iPrevPOC;
5947#ifdef MULTI_INSTANCE_SUPPORT
5948 if (hevc->m_ins_flag) {
5949 decoded_poc = hevc->decoded_poc;
5950 hevc->decoded_poc = INVALID_POC;
5951 }
5952#endif
5953 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
5954 && hevc->m_nalUnitType !=
5955 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
5956 struct PIC_s *pic_display;
5957
5958 pic = get_pic_by_POC(hevc, decoded_poc);
5959 if (pic && (pic->POC != INVALID_POC)) {
5960 /*PB skip control */
5961 if (pic->error_mark == 0
5962 && hevc->PB_skip_mode == 1) {
5963 /* start decoding after
5964 * first I
5965 */
5966 hevc->ignore_bufmgr_error |= 0x1;
5967 }
5968 if (hevc->ignore_bufmgr_error & 1) {
5969 if (hevc->PB_skip_count_after_decoding > 0) {
5970 hevc->PB_skip_count_after_decoding--;
5971 } else {
5972 /* start displaying */
5973 hevc->ignore_bufmgr_error |= 0x2;
5974 }
5975 }
5976 if (hevc->mmu_enable
5977 && ((hevc->double_write_mode & 0x10) == 0)) {
5978 if (!hevc->m_ins_flag) {
5979 hevc->used_4k_num =
5980 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
5981
5982 if ((!is_skip_decoding(hevc, pic)) &&
5983 (hevc->used_4k_num >= 0) &&
5984 (hevc->cur_pic->scatter_alloc
5985 == 1)) {
5986 hevc_print(hevc,
5987 H265_DEBUG_BUFMGR_MORE,
5988 "%s pic index %d scatter_alloc %d page_start %d\n",
5989 "decoder_mmu_box_free_idx_tail",
5990 hevc->cur_pic->index,
5991 hevc->cur_pic->scatter_alloc,
5992 hevc->used_4k_num);
5993 hevc_mmu_dma_check(hw_to_vdec(hevc));
5994 decoder_mmu_box_free_idx_tail(
5995 hevc->mmu_box,
5996 hevc->cur_pic->index,
5997 hevc->used_4k_num);
5998 hevc->cur_pic->scatter_alloc
5999 = 2;
6000 }
6001 hevc->used_4k_num = -1;
6002 }
6003 }
6004
6005 pic->output_mark = 1;
6006 pic->recon_mark = 1;
6007 pic->dis_mark = 1;
6008 }
6009 do {
6010 pic_display = output_pic(hevc, 0);
6011
6012 if (pic_display) {
6013 if ((pic_display->error_mark &&
6014 ((hevc->ignore_bufmgr_error &
6015 0x2) == 0))
6016 || (get_dbg_flag(hevc) &
6017 H265_DEBUG_DISPLAY_CUR_FRAME)
6018 || (get_dbg_flag(hevc) &
6019 H265_DEBUG_NO_DISPLAY)) {
6020 pic_display->output_ready = 0;
6021 if (get_dbg_flag(hevc) &
6022 H265_DEBUG_BUFMGR) {
6023 hevc_print(hevc, 0,
6024 "[BM] Display: POC %d, ",
6025 pic_display->POC);
6026 hevc_print_cont(hevc, 0,
6027 "decoding index %d ==> ",
6028 pic_display->
6029 decode_idx);
6030 hevc_print_cont(hevc, 0,
6031 "Debug or err,recycle it\n");
6032 }
6033 } else {
6034 if (hevc->i_only & 0x1
6035 && pic_display->
6036 slice_type != 2) {
6037 pic_display->output_ready = 0;
6038 } else {
6039 prepare_display_buf
6040 (hevc,
6041 pic_display);
6042 if (get_dbg_flag(hevc) &
6043 H265_DEBUG_BUFMGR) {
6044 hevc_print(hevc, 0,
6045 "[BM] Display: POC %d, ",
6046 pic_display->POC);
6047 hevc_print_cont(hevc, 0,
6048 "decoding index %d\n",
6049 pic_display->
6050 decode_idx);
6051 }
6052 }
6053 }
6054 }
6055 } while (pic_display);
6056 } else {
6057 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6058 hevc_print(hevc, 0,
6059 "[BM] current pic is IDR, ");
6060 hevc_print(hevc, 0,
6061 "clear referenced flag of all buffers\n");
6062 }
6063 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6064 dump_pic_list(hevc);
6065 pic = get_pic_by_POC(hevc, decoded_poc);
6066 flush_output(hevc, pic);
6067 }
6068
6069}
6070
6071static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6072 int decoded_lcu)
6073{
6074 int current_lcu_idx = decoded_lcu;
6075 if (decoded_lcu < 0)
6076 return;
6077
6078 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6079 hevc_print(hevc, 0,
6080 "cur lcu idx = %d, (total %d)\n",
6081 current_lcu_idx, hevc->lcu_total);
6082 }
6083 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6084 if (hevc->first_pic_after_recover) {
6085 if (current_lcu_idx !=
6086 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6087 hevc->cur_pic->error_mark = 1;
6088 } else {
6089 if (hevc->lcu_x_num_pre != 0
6090 && hevc->lcu_y_num_pre != 0
6091 && current_lcu_idx != 0
6092 && current_lcu_idx <
6093 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6094 hevc->cur_pic->error_mark = 1;
6095 }
6096 if (hevc->cur_pic->error_mark) {
6097 hevc_print(hevc, 0,
6098 "cur lcu idx = %d, (total %d), set error_mark\n",
6099 current_lcu_idx,
6100 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6101 if (is_log_enable(hevc))
6102 add_log(hevc,
6103 "cur lcu idx = %d, (total %d), set error_mark",
6104 current_lcu_idx,
6105 hevc->lcu_x_num_pre *
6106 hevc->lcu_y_num_pre);
6107
6108 }
6109
6110 }
6111 if (hevc->cur_pic && hevc->head_error_flag) {
6112 hevc->cur_pic->error_mark = 1;
6113 hevc_print(hevc, 0,
6114 "head has error, set error_mark\n");
6115 }
6116
6117 if ((error_handle_policy & 0x80) == 0) {
6118 if (hevc->over_decode && hevc->cur_pic) {
6119 hevc_print(hevc, 0,
6120 "over decode, set error_mark\n");
6121 hevc->cur_pic->error_mark = 1;
6122 }
6123 }
6124
6125 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6126 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6127}
6128
6129static void check_pic_decoded_error(struct hevc_state_s *hevc,
6130 int decoded_lcu)
6131{
6132 int current_lcu_idx = decoded_lcu;
6133 if (decoded_lcu < 0)
6134 return;
6135
6136 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6137 hevc_print(hevc, 0,
6138 "cur lcu idx = %d, (total %d)\n",
6139 current_lcu_idx, hevc->lcu_total);
6140 }
6141 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6142 if (hevc->lcu_x_num != 0
6143 && hevc->lcu_y_num != 0
6144 && current_lcu_idx != 0
6145 && current_lcu_idx <
6146 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6147 hevc->cur_pic->error_mark = 1;
6148 if (hevc->cur_pic->error_mark) {
6149 hevc_print(hevc, 0,
6150 "cur lcu idx = %d, (total %d), set error_mark\n",
6151 current_lcu_idx,
6152 hevc->lcu_x_num*hevc->lcu_y_num);
6153 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6154 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6155 hevc_print(hevc, 0,
6156 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6157 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6158 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6159 }
6160 if (is_log_enable(hevc))
6161 add_log(hevc,
6162 "cur lcu idx = %d, (total %d), set error_mark",
6163 current_lcu_idx,
6164 hevc->lcu_x_num *
6165 hevc->lcu_y_num);
6166
6167 }
6168
6169 }
6170 if (hevc->cur_pic && hevc->head_error_flag) {
6171 hevc->cur_pic->error_mark = 1;
6172 hevc_print(hevc, 0,
6173 "head has error, set error_mark\n");
6174 }
6175
6176 if ((error_handle_policy & 0x80) == 0) {
6177 if (hevc->over_decode && hevc->cur_pic) {
6178 hevc_print(hevc, 0,
6179 "over decode, set error_mark\n");
6180 hevc->cur_pic->error_mark = 1;
6181 }
6182 }
6183}
6184
6185/* only when we decoded one field or one frame,
6186we can call this function to get qos info*/
6187static void get_picture_qos_info(struct hevc_state_s *hevc)
6188{
6189 struct PIC_s *picture = hevc->cur_pic;
6190
6191/*
6192#define DEBUG_QOS
6193*/
6194
6195 if (!hevc->cur_pic)
6196 return;
6197
6198 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6199 unsigned char a[3];
6200 unsigned char i, j, t;
6201 unsigned long data;
6202
6203 data = READ_VREG(HEVC_MV_INFO);
6204 if (picture->slice_type == I_SLICE)
6205 data = 0;
6206 a[0] = data & 0xff;
6207 a[1] = (data >> 8) & 0xff;
6208 a[2] = (data >> 16) & 0xff;
6209
6210 for (i = 0; i < 3; i++)
6211 for (j = i+1; j < 3; j++) {
6212 if (a[j] < a[i]) {
6213 t = a[j];
6214 a[j] = a[i];
6215 a[i] = t;
6216 } else if (a[j] == a[i]) {
6217 a[i]++;
6218 t = a[j];
6219 a[j] = a[i];
6220 a[i] = t;
6221 }
6222 }
6223 picture->max_mv = a[2];
6224 picture->avg_mv = a[1];
6225 picture->min_mv = a[0];
6226#ifdef DEBUG_QOS
6227 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6228 data, a[0], a[1], a[2]);
6229#endif
6230
6231 data = READ_VREG(HEVC_QP_INFO);
6232 a[0] = data & 0x1f;
6233 a[1] = (data >> 8) & 0x3f;
6234 a[2] = (data >> 16) & 0x7f;
6235
6236 for (i = 0; i < 3; i++)
6237 for (j = i+1; j < 3; j++) {
6238 if (a[j] < a[i]) {
6239 t = a[j];
6240 a[j] = a[i];
6241 a[i] = t;
6242 } else if (a[j] == a[i]) {
6243 a[i]++;
6244 t = a[j];
6245 a[j] = a[i];
6246 a[i] = t;
6247 }
6248 }
6249 picture->max_qp = a[2];
6250 picture->avg_qp = a[1];
6251 picture->min_qp = a[0];
6252#ifdef DEBUG_QOS
6253 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6254 data, a[0], a[1], a[2]);
6255#endif
6256
6257 data = READ_VREG(HEVC_SKIP_INFO);
6258 a[0] = data & 0x1f;
6259 a[1] = (data >> 8) & 0x3f;
6260 a[2] = (data >> 16) & 0x7f;
6261
6262 for (i = 0; i < 3; i++)
6263 for (j = i+1; j < 3; j++) {
6264 if (a[j] < a[i]) {
6265 t = a[j];
6266 a[j] = a[i];
6267 a[i] = t;
6268 } else if (a[j] == a[i]) {
6269 a[i]++;
6270 t = a[j];
6271 a[j] = a[i];
6272 a[i] = t;
6273 }
6274 }
6275 picture->max_skip = a[2];
6276 picture->avg_skip = a[1];
6277 picture->min_skip = a[0];
6278
6279#ifdef DEBUG_QOS
6280 hevc_print(hevc, 0,
6281 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6282 data, a[0], a[1], a[2]);
6283#endif
6284 } else {
6285 uint32_t blk88_y_count;
6286 uint32_t blk88_c_count;
6287 uint32_t blk22_mv_count;
6288 uint32_t rdata32;
6289 int32_t mv_hi;
6290 int32_t mv_lo;
6291 uint32_t rdata32_l;
6292 uint32_t mvx_L0_hi;
6293 uint32_t mvy_L0_hi;
6294 uint32_t mvx_L1_hi;
6295 uint32_t mvy_L1_hi;
6296 int64_t value;
6297 uint64_t temp_value;
6298#ifdef DEBUG_QOS
6299 int pic_number = picture->POC;
6300#endif
6301
6302 picture->max_mv = 0;
6303 picture->avg_mv = 0;
6304 picture->min_mv = 0;
6305
6306 picture->max_skip = 0;
6307 picture->avg_skip = 0;
6308 picture->min_skip = 0;
6309
6310 picture->max_qp = 0;
6311 picture->avg_qp = 0;
6312 picture->min_qp = 0;
6313
6314
6315
6316#ifdef DEBUG_QOS
6317 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6318 picture->slice_type,
6319 picture->POC);
6320#endif
6321 /* set rd_idx to 0 */
6322 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6323
6324 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6325 if (blk88_y_count == 0) {
6326#ifdef DEBUG_QOS
6327 hevc_print(hevc, 0,
6328 "[Picture %d Quality] NO Data yet.\n",
6329 pic_number);
6330#endif
6331 /* reset all counts */
6332 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6333 return;
6334 }
6335 /* qp_y_sum */
6336 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6337#ifdef DEBUG_QOS
6338 hevc_print(hevc, 0,
6339 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6340 pic_number, rdata32/blk88_y_count,
6341 rdata32, blk88_y_count);
6342#endif
6343 picture->avg_qp = rdata32/blk88_y_count;
6344 /* intra_y_count */
6345 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6346#ifdef DEBUG_QOS
6347 hevc_print(hevc, 0,
6348 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6349 pic_number, rdata32*100/blk88_y_count,
6350 '%', rdata32);
6351#endif
6352 /* skipped_y_count */
6353 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6354#ifdef DEBUG_QOS
6355 hevc_print(hevc, 0,
6356 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6357 pic_number, rdata32*100/blk88_y_count,
6358 '%', rdata32);
6359#endif
6360 picture->avg_skip = rdata32*100/blk88_y_count;
6361 /* coeff_non_zero_y_count */
6362 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6363#ifdef DEBUG_QOS
6364 hevc_print(hevc, 0,
6365 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6366 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6367 '%', rdata32);
6368#endif
6369 /* blk66_c_count */
6370 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6371 if (blk88_c_count == 0) {
6372#ifdef DEBUG_QOS
6373 hevc_print(hevc, 0,
6374 "[Picture %d Quality] NO Data yet.\n",
6375 pic_number);
6376#endif
6377 /* reset all counts */
6378 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6379 return;
6380 }
6381 /* qp_c_sum */
6382 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6383#ifdef DEBUG_QOS
6384 hevc_print(hevc, 0,
6385 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6386 pic_number, rdata32/blk88_c_count,
6387 rdata32, blk88_c_count);
6388#endif
6389 /* intra_c_count */
6390 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6391#ifdef DEBUG_QOS
6392 hevc_print(hevc, 0,
6393 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6394 pic_number, rdata32*100/blk88_c_count,
6395 '%', rdata32);
6396#endif
6397 /* skipped_cu_c_count */
6398 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6399#ifdef DEBUG_QOS
6400 hevc_print(hevc, 0,
6401 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6402 pic_number, rdata32*100/blk88_c_count,
6403 '%', rdata32);
6404#endif
6405 /* coeff_non_zero_c_count */
6406 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6407#ifdef DEBUG_QOS
6408 hevc_print(hevc, 0,
6409 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6410 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6411 '%', rdata32);
6412#endif
6413
6414 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6415 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6416 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6417#ifdef DEBUG_QOS
6418 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6419 pic_number, (rdata32>>0)&0xff);
6420#endif
6421 picture->min_qp = (rdata32>>0)&0xff;
6422
6423#ifdef DEBUG_QOS
6424 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6425 pic_number, (rdata32>>8)&0xff);
6426#endif
6427 picture->max_qp = (rdata32>>8)&0xff;
6428
6429#ifdef DEBUG_QOS
6430 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6431 pic_number, (rdata32>>16)&0xff);
6432 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6433 pic_number, (rdata32>>24)&0xff);
6434#endif
6435
6436 /* blk22_mv_count */
6437 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6438 if (blk22_mv_count == 0) {
6439#ifdef DEBUG_QOS
6440 hevc_print(hevc, 0,
6441 "[Picture %d Quality] NO MV Data yet.\n",
6442 pic_number);
6443#endif
6444 /* reset all counts */
6445 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6446 return;
6447 }
6448 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6449 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6450 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6451 /* should all be 0x00 or 0xff */
6452#ifdef DEBUG_QOS
6453 hevc_print(hevc, 0,
6454 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6455 pic_number, rdata32);
6456#endif
6457 mvx_L0_hi = ((rdata32>>0)&0xff);
6458 mvy_L0_hi = ((rdata32>>8)&0xff);
6459 mvx_L1_hi = ((rdata32>>16)&0xff);
6460 mvy_L1_hi = ((rdata32>>24)&0xff);
6461
6462 /* mvx_L0_count[31:0] */
6463 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6464 temp_value = mvx_L0_hi;
6465 temp_value = (temp_value << 32) | rdata32_l;
6466
6467 if (mvx_L0_hi & 0x80)
6468 value = 0xFFFFFFF000000000 | temp_value;
6469 else
6470 value = temp_value;
6471 value = div_s64(value, blk22_mv_count);
6472#ifdef DEBUG_QOS
6473 hevc_print(hevc, 0,
6474 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6475 pic_number, (int)value,
6476 value, blk22_mv_count);
6477#endif
6478 picture->avg_mv = value;
6479
6480 /* mvy_L0_count[31:0] */
6481 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6482 temp_value = mvy_L0_hi;
6483 temp_value = (temp_value << 32) | rdata32_l;
6484
6485 if (mvy_L0_hi & 0x80)
6486 value = 0xFFFFFFF000000000 | temp_value;
6487 else
6488 value = temp_value;
6489#ifdef DEBUG_QOS
6490 hevc_print(hevc, 0,
6491 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6492 pic_number, rdata32_l/blk22_mv_count,
6493 value, blk22_mv_count);
6494#endif
6495
6496 /* mvx_L1_count[31:0] */
6497 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6498 temp_value = mvx_L1_hi;
6499 temp_value = (temp_value << 32) | rdata32_l;
6500 if (mvx_L1_hi & 0x80)
6501 value = 0xFFFFFFF000000000 | temp_value;
6502 else
6503 value = temp_value;
6504#ifdef DEBUG_QOS
6505 hevc_print(hevc, 0,
6506 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6507 pic_number, rdata32_l/blk22_mv_count,
6508 value, blk22_mv_count);
6509#endif
6510
6511 /* mvy_L1_count[31:0] */
6512 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6513 temp_value = mvy_L1_hi;
6514 temp_value = (temp_value << 32) | rdata32_l;
6515 if (mvy_L1_hi & 0x80)
6516 value = 0xFFFFFFF000000000 | temp_value;
6517 else
6518 value = temp_value;
6519#ifdef DEBUG_QOS
6520 hevc_print(hevc, 0,
6521 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6522 pic_number, rdata32_l/blk22_mv_count,
6523 value, blk22_mv_count);
6524#endif
6525
6526 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6527 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6528 mv_hi = (rdata32>>16)&0xffff;
6529 if (mv_hi & 0x8000)
6530 mv_hi = 0x8000 - mv_hi;
6531#ifdef DEBUG_QOS
6532 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6533 pic_number, mv_hi);
6534#endif
6535 picture->max_mv = mv_hi;
6536
6537 mv_lo = (rdata32>>0)&0xffff;
6538 if (mv_lo & 0x8000)
6539 mv_lo = 0x8000 - mv_lo;
6540#ifdef DEBUG_QOS
6541 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6542 pic_number, mv_lo);
6543#endif
6544 picture->min_mv = mv_lo;
6545
6546 /* {mvy_L0_max, mvy_L0_min} */
6547 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6548 mv_hi = (rdata32>>16)&0xffff;
6549 if (mv_hi & 0x8000)
6550 mv_hi = 0x8000 - mv_hi;
6551#ifdef DEBUG_QOS
6552 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6553 pic_number, mv_hi);
6554#endif
6555
6556 mv_lo = (rdata32>>0)&0xffff;
6557 if (mv_lo & 0x8000)
6558 mv_lo = 0x8000 - mv_lo;
6559#ifdef DEBUG_QOS
6560 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6561 pic_number, mv_lo);
6562#endif
6563
6564 /* {mvx_L1_max, mvx_L1_min} */
6565 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6566 mv_hi = (rdata32>>16)&0xffff;
6567 if (mv_hi & 0x8000)
6568 mv_hi = 0x8000 - mv_hi;
6569#ifdef DEBUG_QOS
6570 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6571 pic_number, mv_hi);
6572#endif
6573
6574 mv_lo = (rdata32>>0)&0xffff;
6575 if (mv_lo & 0x8000)
6576 mv_lo = 0x8000 - mv_lo;
6577#ifdef DEBUG_QOS
6578 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6579 pic_number, mv_lo);
6580#endif
6581
6582 /* {mvy_L1_max, mvy_L1_min} */
6583 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6584 mv_hi = (rdata32>>16)&0xffff;
6585 if (mv_hi & 0x8000)
6586 mv_hi = 0x8000 - mv_hi;
6587#ifdef DEBUG_QOS
6588 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6589 pic_number, mv_hi);
6590#endif
6591 mv_lo = (rdata32>>0)&0xffff;
6592 if (mv_lo & 0x8000)
6593 mv_lo = 0x8000 - mv_lo;
6594#ifdef DEBUG_QOS
6595 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6596 pic_number, mv_lo);
6597#endif
6598
6599 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6600#ifdef DEBUG_QOS
6601 hevc_print(hevc, 0,
6602 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6603 pic_number, rdata32);
6604#endif
6605 /* reset all counts */
6606 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6607 }
6608}
6609
6610static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6611 union param_u *rpm_param,
6612 int decode_pic_begin)
6613{
6614#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6615 struct vdec_s *vdec = hw_to_vdec(hevc);
6616#endif
6617 int i;
6618 int lcu_x_num_div;
6619 int lcu_y_num_div;
6620 int Col_ref;
6621 int dbg_skip_flag = 0;
6622
6623 if (hevc->wait_buf == 0) {
6624 hevc->sps_num_reorder_pics_0 =
6625 rpm_param->p.sps_num_reorder_pics_0;
6626 hevc->m_temporalId = rpm_param->p.m_temporalId;
6627 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6628 hevc->interlace_flag =
6629 (rpm_param->p.profile_etc >> 2) & 0x1;
6630 hevc->curr_pic_struct =
6631 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6632 if (parser_sei_enable & 0x4) {
6633 hevc->frame_field_info_present_flag =
6634 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6635 }
6636
6637 if (interlace_enable == 0 || hevc->m_ins_flag)
6638 hevc->interlace_flag = 0;
6639 if (interlace_enable & 0x100)
6640 hevc->interlace_flag = interlace_enable & 0x1;
6641 if (hevc->interlace_flag == 0)
6642 hevc->curr_pic_struct = 0;
6643 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6644 /*
6645 *hevc->m_pocRandomAccess = MAX_INT;
6646 * //add to fix RAP_B_Bossen_1
6647 */
6648 /* } */
6649 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6650 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6651 hevc->slice_segment_addr =
6652 rpm_param->p.slice_segment_address;
6653 if (!rpm_param->p.dependent_slice_segment_flag)
6654 hevc->slice_addr = hevc->slice_segment_addr;
6655 } else {
6656 hevc->slice_segment_addr = 0;
6657 hevc->slice_addr = 0;
6658 }
6659
6660 hevc->iPrevPOC = hevc->curr_POC;
6661 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6662 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6663 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6664 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6665 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6666 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6667 hevc->isNextSliceSegment =
6668 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6669 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6670 || hevc->pic_h !=
6671 rpm_param->p.pic_height_in_luma_samples) {
6672 hevc_print(hevc, 0,
6673 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6674 hevc->pic_w, hevc->pic_h,
6675 rpm_param->p.pic_width_in_luma_samples,
6676 rpm_param->p.pic_height_in_luma_samples,
6677 hevc->interlace_flag);
6678
6679 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6680 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6681 hevc->frame_width = hevc->pic_w;
6682 hevc->frame_height = hevc->pic_h;
6683#ifdef LOSLESS_COMPRESS_MODE
6684 if (/*re_config_pic_flag == 0 &&*/
6685 (get_double_write_mode(hevc) & 0x10) == 0)
6686 init_decode_head_hw(hevc);
6687#endif
6688 }
6689
6690 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6691 hevc_print(hevc, 0, "over size : %u x %u.\n",
6692 hevc->pic_w, hevc->pic_h);
6693 if ((!hevc->m_ins_flag) &&
6694 ((debug &
6695 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6696 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6697 H265_DEBUG_DIS_SYS_ERROR_PROC);
6698 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6699 return 3;
6700 }
6701 if (hevc->bit_depth_chroma > 10 ||
6702 hevc->bit_depth_luma > 10) {
6703 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6704 hevc->bit_depth_chroma,
6705 hevc->bit_depth_luma);
6706 if (!hevc->m_ins_flag)
6707 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6708 H265_DEBUG_DIS_SYS_ERROR_PROC);
6709 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6710 return 4;
6711 }
6712
6713 /* it will cause divide 0 error */
6714 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6715 if (get_dbg_flag(hevc)) {
6716 hevc_print(hevc, 0,
6717 "Fatal Error, pic_w = %d, pic_h = %d\n",
6718 hevc->pic_w, hevc->pic_h);
6719 }
6720 return 3;
6721 }
6722 pic_list_process(hevc);
6723
6724 hevc->lcu_size =
6725 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6726 3 + rpm_param->
6727 p.log2_diff_max_min_coding_block_size);
6728 if (hevc->lcu_size == 0) {
6729 hevc_print(hevc, 0,
6730 "Error, lcu_size = 0 (%d,%d)\n",
6731 rpm_param->p.
6732 log2_min_coding_block_size_minus3,
6733 rpm_param->p.
6734 log2_diff_max_min_coding_block_size);
6735 return 3;
6736 }
6737 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6738 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6739 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6740 hevc->lcu_x_num =
6741 ((hevc->pic_w % hevc->lcu_size) ==
6742 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6743 hevc->lcu_y_num =
6744 ((hevc->pic_h % hevc->lcu_size) ==
6745 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6746 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6747
6748 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6749 || hevc->m_nalUnitType ==
6750 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6751 hevc->curr_POC = 0;
6752 if ((hevc->m_temporalId - 1) == 0)
6753 hevc->iPrevTid0POC = hevc->curr_POC;
6754 } else {
6755 int iMaxPOClsb =
6756 1 << (rpm_param->p.
6757 log2_max_pic_order_cnt_lsb_minus4 + 4);
6758 int iPrevPOClsb;
6759 int iPrevPOCmsb;
6760 int iPOCmsb;
6761 int iPOClsb = rpm_param->p.POClsb;
6762
6763 if (iMaxPOClsb == 0) {
6764 hevc_print(hevc, 0,
6765 "error iMaxPOClsb is 0\n");
6766 return 3;
6767 }
6768
6769 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
6770 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
6771
6772 if ((iPOClsb < iPrevPOClsb)
6773 && ((iPrevPOClsb - iPOClsb) >=
6774 (iMaxPOClsb / 2)))
6775 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
6776 else if ((iPOClsb > iPrevPOClsb)
6777 && ((iPOClsb - iPrevPOClsb) >
6778 (iMaxPOClsb / 2)))
6779 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
6780 else
6781 iPOCmsb = iPrevPOCmsb;
6782 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6783 hevc_print(hevc, 0,
6784 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
6785 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
6786 iPOClsb);
6787 }
6788 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6789 || hevc->m_nalUnitType ==
6790 NAL_UNIT_CODED_SLICE_BLANT
6791 || hevc->m_nalUnitType ==
6792 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
6793 /* For BLA picture types, POCmsb is set to 0. */
6794 iPOCmsb = 0;
6795 }
6796 hevc->curr_POC = (iPOCmsb + iPOClsb);
6797 if ((hevc->m_temporalId - 1) == 0)
6798 hevc->iPrevTid0POC = hevc->curr_POC;
6799 else {
6800 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6801 hevc_print(hevc, 0,
6802 "m_temporalID is %d\n",
6803 hevc->m_temporalId);
6804 }
6805 }
6806 }
6807 hevc->RefNum_L0 =
6808 (rpm_param->p.num_ref_idx_l0_active >
6809 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6810 num_ref_idx_l0_active;
6811 hevc->RefNum_L1 =
6812 (rpm_param->p.num_ref_idx_l1_active >
6813 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
6814 num_ref_idx_l1_active;
6815
6816 /* if(curr_POC==0x10) dump_lmem(); */
6817
6818 /* skip RASL pictures after CRA/BLA pictures */
6819 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
6820 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
6821 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6822 || hevc->m_nalUnitType ==
6823 NAL_UNIT_CODED_SLICE_BLANT
6824 || hevc->m_nalUnitType ==
6825 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6826 hevc->m_pocRandomAccess = hevc->curr_POC;
6827 else
6828 hevc->m_pocRandomAccess = -MAX_INT;
6829 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
6830 || hevc->m_nalUnitType ==
6831 NAL_UNIT_CODED_SLICE_BLANT
6832 || hevc->m_nalUnitType ==
6833 NAL_UNIT_CODED_SLICE_BLA_N_LP)
6834 hevc->m_pocRandomAccess = hevc->curr_POC;
6835 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
6836 (nal_skip_policy >= 3) &&
6837 (hevc->m_nalUnitType ==
6838 NAL_UNIT_CODED_SLICE_RASL_N ||
6839 hevc->m_nalUnitType ==
6840 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
6841 if (get_dbg_flag(hevc)) {
6842 hevc_print(hevc, 0,
6843 "RASL picture with POC %d < %d ",
6844 hevc->curr_POC, hevc->m_pocRandomAccess);
6845 hevc_print(hevc, 0,
6846 "RandomAccess point POC), skip it\n");
6847 }
6848 return 1;
6849 }
6850
6851 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
6852 hevc->skip_flag = 0;
6853 /**/
6854 /* if((iPrevPOC != curr_POC)){ */
6855 if (rpm_param->p.slice_segment_address == 0) {
6856 struct PIC_s *pic;
6857
6858 hevc->new_pic = 1;
6859#ifdef MULTI_INSTANCE_SUPPORT
6860 if (!hevc->m_ins_flag)
6861#endif
6862 check_pic_decoded_error_pre(hevc,
6863 READ_VREG(HEVC_PARSER_LCU_START)
6864 & 0xffffff);
6865 /**/ if (use_cma == 0) {
6866 if (hevc->pic_list_init_flag == 0) {
6867 init_pic_list(hevc);
6868 init_pic_list_hw(hevc);
6869 init_buf_spec(hevc);
6870 hevc->pic_list_init_flag = 3;
6871 }
6872 }
6873 if (!hevc->m_ins_flag) {
6874 if (hevc->cur_pic)
6875 get_picture_qos_info(hevc);
6876 }
6877 hevc->first_pic_after_recover = 0;
6878 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
6879 dump_pic_list(hevc);
6880 /* prev pic */
6881 hevc_pre_pic(hevc, pic);
6882 /*
6883 *update referenced of old pictures
6884 *(cur_pic->referenced is 1 and not updated)
6885 */
6886 apply_ref_pic_set(hevc, hevc->curr_POC,
6887 rpm_param);
6888
6889 if (hevc->mmu_enable)
6890 recycle_mmu_bufs(hevc);
6891
6892#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6893 if (vdec->master) {
6894 struct hevc_state_s *hevc_ba =
6895 (struct hevc_state_s *)
6896 vdec->master->private;
6897 if (hevc_ba->cur_pic != NULL) {
6898 hevc_ba->cur_pic->dv_enhance_exist = 1;
6899 hevc_print(hevc, H265_DEBUG_DV,
6900 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
6901 hevc->curr_POC, hevc_ba->cur_pic->POC);
6902 }
6903 }
6904 if (vdec->master == NULL &&
6905 vdec->slave == NULL)
6906 set_aux_data(hevc,
6907 hevc->cur_pic, 1, 0); /*suffix*/
6908 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6909 set_aux_data(hevc,
6910 hevc->cur_pic, 0, 1); /*dv meta only*/
6911#else
6912 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6913#endif
6914 /* new pic */
6915 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6916 if (hevc->cur_pic == NULL) {
6917 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6918 dump_pic_list(hevc);
6919 hevc->wait_buf = 1;
6920 return -1;
6921 }
6922#ifdef MULTI_INSTANCE_SUPPORT
6923 hevc->decoding_pic = hevc->cur_pic;
6924 if (!hevc->m_ins_flag)
6925 hevc->over_decode = 0;
6926#endif
6927#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6928 hevc->cur_pic->dv_enhance_exist = 0;
6929 if (vdec->slave)
6930 hevc_print(hevc, H265_DEBUG_DV,
6931 "Clear bl (poc %d) dv_enhance_exist flag\n",
6932 hevc->curr_POC);
6933 if (vdec->master == NULL &&
6934 vdec->slave == NULL)
6935 set_aux_data(hevc,
6936 hevc->cur_pic, 0, 0); /*prefix*/
6937
6938 if (hevc->bypass_dvenl && !dolby_meta_with_el)
6939 set_aux_data(hevc,
6940 hevc->cur_pic, 0, 2); /*pre sei only*/
6941#else
6942 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6943#endif
6944 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
6945 hevc->cur_pic->output_ready = 1;
6946 hevc->cur_pic->stream_offset =
6947 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
6948 prepare_display_buf(hevc, hevc->cur_pic);
6949 hevc->wait_buf = 2;
6950 return -1;
6951 }
6952 } else {
6953 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
6954#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6955 if (vdec->master == NULL &&
6956 vdec->slave == NULL) {
6957 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6958 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6959 }
6960#else
6961 set_aux_data(hevc, hevc->cur_pic, 1, 0);
6962 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6963#endif
6964 }
6965 if (hevc->pic_list_init_flag != 3
6966 || hevc->cur_pic == NULL) {
6967 /* make it dec from the first slice segment */
6968 return 3;
6969 }
6970 hevc->cur_pic->slice_idx++;
6971 hevc->new_pic = 0;
6972 }
6973 } else {
6974 if (hevc->wait_buf == 1) {
6975 pic_list_process(hevc);
6976 hevc->cur_pic = get_new_pic(hevc, rpm_param);
6977 if (hevc->cur_pic == NULL)
6978 return -1;
6979
6980 if (!hevc->m_ins_flag)
6981 hevc->over_decode = 0;
6982
6983#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6984 hevc->cur_pic->dv_enhance_exist = 0;
6985 if (vdec->master == NULL &&
6986 vdec->slave == NULL)
6987 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6988#else
6989 set_aux_data(hevc, hevc->cur_pic, 0, 0);
6990#endif
6991 hevc->wait_buf = 0;
6992 } else if (hevc->wait_buf ==
6993 2) {
6994 if (get_display_pic_num(hevc) >
6995 1)
6996 return -1;
6997 hevc->wait_buf = 0;
6998 }
6999 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7000 dump_pic_list(hevc);
7001 }
7002
7003 if (hevc->new_pic) {
7004#if 1
7005 /*SUPPORT_10BIT*/
7006 int sao_mem_unit =
7007 (hevc->lcu_size == 16 ? 9 :
7008 hevc->lcu_size ==
7009 32 ? 14 : 24) << 4;
7010#else
7011 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7012#endif
7013 int pic_height_cu =
7014 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7015 int pic_width_cu =
7016 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7017 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7018
7019 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7020 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7021 hevc_print(hevc, 0,
7022 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7023 __func__,
7024 hevc->decode_idx,
7025 hevc->curr_pic_struct,
7026 hevc->interlace_flag,
7027 hevc->cur_pic->index);
7028 }
7029 if (dbg_skip_decode_index != 0 &&
7030 hevc->decode_idx == dbg_skip_decode_index)
7031 dbg_skip_flag = 1;
7032
7033 hevc->decode_idx++;
7034 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7035 sao_mem_unit, rpm_param);
7036
7037 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7038 }
7039
7040 if (hevc->iPrevPOC != hevc->curr_POC) {
7041 hevc->new_tile = 1;
7042 hevc->tile_x = 0;
7043 hevc->tile_y = 0;
7044 hevc->tile_y_x = 0;
7045 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7046 hevc_print(hevc, 0,
7047 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7048 hevc->tile_x, hevc->tile_y);
7049 }
7050 } else if (hevc->tile_enabled) {
7051 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7052 hevc_print(hevc, 0,
7053 "slice_segment_address is %d\n",
7054 rpm_param->p.slice_segment_address);
7055 }
7056 hevc->tile_y_x =
7057 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7058 (hevc->pic_w +
7059 hevc->lcu_size -
7060 1) / hevc->lcu_size);
7061 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7062 && (hevc->tile_y_x != -1)) {
7063 hevc->new_tile = 1;
7064 hevc->tile_x = hevc->tile_y_x & 0xff;
7065 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7066 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7067 hevc_print(hevc, 0,
7068 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7069 rpm_param->p.slice_segment_address,
7070 hevc->tile_x, hevc->tile_y);
7071 }
7072 } else
7073 hevc->new_tile = 0;
7074 } else
7075 hevc->new_tile = 0;
7076
7077 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7078 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7079 hevc->new_tile = 0;
7080
7081 if (hevc->new_tile) {
7082 hevc->tile_start_lcu_x =
7083 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7084 hevc->tile_start_lcu_y =
7085 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7086 hevc->tile_width_lcu =
7087 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7088 hevc->tile_height_lcu =
7089 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7090 }
7091
7092 set_ref_pic_list(hevc, rpm_param);
7093
7094 Col_ref = rpm_param->p.collocated_ref_idx;
7095
7096 hevc->LDCFlag = 0;
7097 if (rpm_param->p.slice_type != I_SLICE) {
7098 hevc->LDCFlag = 1;
7099 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7100 if (hevc->cur_pic->
7101 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7102 hevc->curr_POC)
7103 hevc->LDCFlag = 0;
7104 }
7105 if (rpm_param->p.slice_type == B_SLICE) {
7106 for (i = 0; (i < hevc->RefNum_L1)
7107 && hevc->LDCFlag; i++) {
7108 if (hevc->cur_pic->
7109 m_aiRefPOCList1[hevc->cur_pic->
7110 slice_idx][i] >
7111 hevc->curr_POC)
7112 hevc->LDCFlag = 0;
7113 }
7114 }
7115 }
7116
7117 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7118
7119 hevc->plevel =
7120 rpm_param->p.log2_parallel_merge_level;
7121 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7122
7123 hevc->LongTerm_Curr = 0; /* to do ... */
7124 hevc->LongTerm_Col = 0; /* to do ... */
7125
7126 hevc->list_no = 0;
7127 if (rpm_param->p.slice_type == B_SLICE)
7128 hevc->list_no = 1 - hevc->ColFromL0Flag;
7129 if (hevc->list_no == 0) {
7130 if (Col_ref < hevc->RefNum_L0) {
7131 hevc->Col_POC =
7132 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7133 slice_idx][Col_ref];
7134 } else
7135 hevc->Col_POC = INVALID_POC;
7136 } else {
7137 if (Col_ref < hevc->RefNum_L1) {
7138 hevc->Col_POC =
7139 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7140 slice_idx][Col_ref];
7141 } else
7142 hevc->Col_POC = INVALID_POC;
7143 }
7144
7145 hevc->LongTerm_Ref = 0; /* to do ... */
7146
7147 if (hevc->slice_type != 2) {
7148 /* if(hevc->i_only==1){ */
7149 /* return 0xf; */
7150 /* } */
7151
7152 if (hevc->Col_POC != INVALID_POC) {
7153 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7154 if (hevc->col_pic == NULL) {
7155 hevc->cur_pic->error_mark = 1;
7156 if (get_dbg_flag(hevc)) {
7157 hevc_print(hevc, 0,
7158 "WRONG,fail to get the pic Col_POC\n");
7159 }
7160 if (is_log_enable(hevc))
7161 add_log(hevc,
7162 "WRONG,fail to get the pic Col_POC");
7163 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7164 hevc->cur_pic->error_mark = 1;
7165 if (get_dbg_flag(hevc)) {
7166 hevc_print(hevc, 0,
7167 "WRONG, Col_POC error_mark is 1\n");
7168 }
7169 if (is_log_enable(hevc))
7170 add_log(hevc,
7171 "WRONG, Col_POC error_mark is 1");
7172 } else {
7173 if ((hevc->col_pic->width
7174 != hevc->pic_w) ||
7175 (hevc->col_pic->height
7176 != hevc->pic_h)) {
7177 hevc_print(hevc, 0,
7178 "Wrong reference pic (poc %d) width/height %d/%d\n",
7179 hevc->col_pic->POC,
7180 hevc->col_pic->width,
7181 hevc->col_pic->height);
7182 hevc->cur_pic->error_mark = 1;
7183 }
7184
7185 }
7186
7187 if (hevc->cur_pic->error_mark
7188 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7189#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7190 /*count info*/
7191 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7192 hevc->cur_pic->stream_offset);
7193#endif
7194 }
7195
7196 if (is_skip_decoding(hevc,
7197 hevc->cur_pic)) {
7198 return 2;
7199 }
7200 } else
7201 hevc->col_pic = hevc->cur_pic;
7202 } /* */
7203 if (hevc->col_pic == NULL)
7204 hevc->col_pic = hevc->cur_pic;
7205#ifdef BUFFER_MGR_ONLY
7206 return 0xf;
7207#else
7208 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7209 || (dbg_skip_flag))
7210 return 0xf;
7211#endif
7212
7213 config_mc_buffer(hevc, hevc->cur_pic);
7214
7215 if (is_skip_decoding(hevc,
7216 hevc->cur_pic)) {
7217 if (get_dbg_flag(hevc))
7218 hevc_print(hevc, 0,
7219 "Discard this picture index %d\n",
7220 hevc->cur_pic->index);
7221#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7222 /*count info*/
7223 vdec_count_info(gvs, hevc->cur_pic->error_mark,
7224 hevc->cur_pic->stream_offset);
7225#endif
7226 return 2;
7227 }
7228#ifdef MCRCC_ENABLE
7229 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7230#endif
7231 config_mpred_hw(hevc);
7232
7233 config_sao_hw(hevc, rpm_param);
7234
7235 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7236 return 0xf;
7237
7238 return 0;
7239}
7240
7241
7242
7243static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7244 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7245 int cur_buf_idx = new_pic->index;
7246 int bit_depth_10 = (bit_depth != 0x00);
7247 int picture_size;
7248 int cur_mmu_4k_number;
7249 int ret, max_frame_num;
7250 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7251 new_pic->height, !bit_depth_10);
7252 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7253 if (hevc->double_write_mode & 0x10)
7254 return 0;
7255 /*hevc_print(hevc, 0,
7256 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7257 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7258 if (new_pic->scatter_alloc) {
7259 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7260 new_pic->scatter_alloc = 0;
7261 }
7262 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7263 max_frame_num = MAX_FRAME_8K_NUM;
7264 else
7265 max_frame_num = MAX_FRAME_4K_NUM;
7266 if (cur_mmu_4k_number > max_frame_num) {
7267 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7268 cur_mmu_4k_number,
7269 new_pic->width,
7270 new_pic->height);
7271 return -1;
7272 }
7273 ret = decoder_mmu_box_alloc_idx(
7274 hevc->mmu_box,
7275 cur_buf_idx,
7276 cur_mmu_4k_number,
7277 mmu_index_adr);
7278 if (ret == 0)
7279 new_pic->scatter_alloc = 1;
7280
7281 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7282 "%s pic index %d page count(%d) ret =%d\n",
7283 __func__, cur_buf_idx,
7284 cur_mmu_4k_number, ret);
7285 return ret;
7286}
7287
7288
7289static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7290 struct PIC_s *pic)
7291{
7292 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7293 "%s pic index %d scatter_alloc %d\n",
7294 __func__, pic->index,
7295 pic->scatter_alloc);
7296
7297 if (hevc->mmu_enable
7298 && ((hevc->double_write_mode & 0x10) == 0)
7299 && pic->scatter_alloc)
7300 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7301 pic->scatter_alloc = 0;
7302}
7303
7304/*
7305 *************************************************
7306 *
7307 *h265 buffer management end
7308 *
7309 **************************************************
7310 */
7311static struct hevc_state_s *gHevc;
7312
7313static void hevc_local_uninit(struct hevc_state_s *hevc)
7314{
7315 hevc->rpm_ptr = NULL;
7316 hevc->lmem_ptr = NULL;
7317
7318#ifdef SWAP_HEVC_UCODE
7319 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7320 if (hevc->mc_cpu_addr != NULL) {
7321 dma_free_coherent(amports_get_dma_device(),
7322 hevc->swap_size, hevc->mc_cpu_addr,
7323 hevc->mc_dma_handle);
7324 hevc->mc_cpu_addr = NULL;
7325 }
7326
7327 }
7328#endif
7329#ifdef DETREFILL_ENABLE
7330 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7331 uninit_detrefill_buf(hevc);
7332#endif
7333 if (hevc->aux_addr) {
7334 dma_free_coherent(amports_get_dma_device(),
7335 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7336 hevc->aux_phy_addr);
7337 hevc->aux_addr = NULL;
7338 }
7339 if (hevc->rpm_addr) {
7340 dma_free_coherent(amports_get_dma_device(),
7341 RPM_BUF_SIZE, hevc->rpm_addr,
7342 hevc->rpm_phy_addr);
7343 hevc->rpm_addr = NULL;
7344 }
7345 if (hevc->lmem_addr) {
7346 dma_free_coherent(amports_get_dma_device(),
7347 RPM_BUF_SIZE, hevc->lmem_addr,
7348 hevc->lmem_phy_addr);
7349 hevc->lmem_addr = NULL;
7350 }
7351
7352 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7353 if (hevc->frame_mmu_map_phy_addr)
7354 dma_free_coherent(amports_get_dma_device(),
7355 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7356 hevc->frame_mmu_map_phy_addr);
7357
7358 hevc->frame_mmu_map_addr = NULL;
7359 }
7360
7361 kfree(gvs);
7362 gvs = NULL;
7363}
7364
7365static int hevc_local_init(struct hevc_state_s *hevc)
7366{
7367 int ret = -1;
7368 struct BuffInfo_s *cur_buf_info = NULL;
7369
7370 memset(&hevc->param, 0, sizeof(union param_u));
7371
7372 cur_buf_info = &hevc->work_space_buf_store;
7373
7374 if (vdec_is_support_4k()) {
7375 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7376 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7377 sizeof(struct BuffInfo_s));
7378 else
7379 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7380 sizeof(struct BuffInfo_s));
7381 } else
7382 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7383 sizeof(struct BuffInfo_s));
7384
7385 cur_buf_info->start_adr = hevc->buf_start;
7386 init_buff_spec(hevc, cur_buf_info);
7387
7388 hevc_init_stru(hevc, cur_buf_info);
7389
7390 hevc->bit_depth_luma = 8;
7391 hevc->bit_depth_chroma = 8;
7392 hevc->video_signal_type = 0;
7393 hevc->video_signal_type_debug = 0;
7394 bit_depth_luma = hevc->bit_depth_luma;
7395 bit_depth_chroma = hevc->bit_depth_chroma;
7396 video_signal_type = hevc->video_signal_type;
7397
7398 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7399 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7400 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7401 if (hevc->rpm_addr == NULL) {
7402 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7403 return -1;
7404 }
7405 hevc->rpm_ptr = hevc->rpm_addr;
7406 }
7407
7408 if (prefix_aux_buf_size > 0 ||
7409 suffix_aux_buf_size > 0) {
7410 u32 aux_buf_size;
7411
7412 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7413 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7414 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7415 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7416 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7417 if (hevc->aux_addr == NULL) {
7418 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7419 return -1;
7420 }
7421 }
7422
7423 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7424 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7425 if (hevc->lmem_addr == NULL) {
7426 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7427 return -1;
7428 }
7429 hevc->lmem_ptr = hevc->lmem_addr;
7430
7431 if (hevc->mmu_enable) {
7432 hevc->frame_mmu_map_addr =
7433 dma_alloc_coherent(amports_get_dma_device(),
7434 get_frame_mmu_map_size(),
7435 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7436 if (hevc->frame_mmu_map_addr == NULL) {
7437 pr_err("%s: failed to alloc count_buffer\n", __func__);
7438 return -1;
7439 }
7440 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7441 }
7442 ret = 0;
7443 return ret;
7444}
7445
7446/*
7447 *******************************************
7448 * Mailbox command
7449 *******************************************
7450 */
7451#define CMD_FINISHED 0
7452#define CMD_ALLOC_VIEW 1
7453#define CMD_FRAME_DISPLAY 3
7454#define CMD_DEBUG 10
7455
7456
7457#define DECODE_BUFFER_NUM_MAX 32
7458#define DISPLAY_BUFFER_NUM 6
7459
7460#define video_domain_addr(adr) (adr&0x7fffffff)
7461#define DECODER_WORK_SPACE_SIZE 0x800000
7462
7463#define spec2canvas(x) \
7464 (((x)->uv_canvas_index << 16) | \
7465 ((x)->uv_canvas_index << 8) | \
7466 ((x)->y_canvas_index << 0))
7467
7468
7469static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7470{
7471 struct vdec_s *vdec = hw_to_vdec(hevc);
7472 int canvas_w = ALIGN(pic->width, 64)/4;
7473 int canvas_h = ALIGN(pic->height, 32)/4;
7474 int blkmode = hevc->mem_map_mode;
7475
7476 /*CANVAS_BLKMODE_64X32*/
7477#ifdef SUPPORT_10BIT
7478 if (pic->double_write_mode) {
7479 canvas_w = pic->width /
7480 get_double_write_ratio(hevc, pic->double_write_mode);
7481 canvas_h = pic->height /
7482 get_double_write_ratio(hevc, pic->double_write_mode);
7483
7484 if (hevc->mem_map_mode == 0)
7485 canvas_w = ALIGN(canvas_w, 32);
7486 else
7487 canvas_w = ALIGN(canvas_w, 64);
7488 canvas_h = ALIGN(canvas_h, 32);
7489
7490 if (vdec->parallel_dec == 1) {
7491 if (pic->y_canvas_index == -1)
7492 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7493 if (pic->uv_canvas_index == -1)
7494 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7495 } else {
7496 pic->y_canvas_index = 128 + pic->index * 2;
7497 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7498 }
7499
7500 canvas_config_ex(pic->y_canvas_index,
7501 pic->dw_y_adr, canvas_w, canvas_h,
7502 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7503 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7504 canvas_w, canvas_h,
7505 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7506#ifdef MULTI_INSTANCE_SUPPORT
7507 pic->canvas_config[0].phy_addr =
7508 pic->dw_y_adr;
7509 pic->canvas_config[0].width =
7510 canvas_w;
7511 pic->canvas_config[0].height =
7512 canvas_h;
7513 pic->canvas_config[0].block_mode =
7514 blkmode;
7515 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7516
7517 pic->canvas_config[1].phy_addr =
7518 pic->dw_u_v_adr;
7519 pic->canvas_config[1].width =
7520 canvas_w;
7521 pic->canvas_config[1].height =
7522 canvas_h;
7523 pic->canvas_config[1].block_mode =
7524 blkmode;
7525 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7526#endif
7527 } else {
7528 if (!hevc->mmu_enable) {
7529 /* to change after 10bit VPU is ready ... */
7530 if (vdec->parallel_dec == 1) {
7531 if (pic->y_canvas_index == -1)
7532 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7533 pic->uv_canvas_index = pic->y_canvas_index;
7534 } else {
7535 pic->y_canvas_index = 128 + pic->index;
7536 pic->uv_canvas_index = 128 + pic->index;
7537 }
7538
7539 canvas_config_ex(pic->y_canvas_index,
7540 pic->mc_y_adr, canvas_w, canvas_h,
7541 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7542 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7543 canvas_w, canvas_h,
7544 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7545 }
7546 }
7547#else
7548 if (vdec->parallel_dec == 1) {
7549 if (pic->y_canvas_index == -1)
7550 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7551 if (pic->uv_canvas_index == -1)
7552 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7553 } else {
7554 pic->y_canvas_index = 128 + pic->index * 2;
7555 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7556 }
7557
7558
7559 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7560 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7561 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7562 canvas_w, canvas_h,
7563 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7564#endif
7565}
7566
7567static int init_buf_spec(struct hevc_state_s *hevc)
7568{
7569 int pic_width = hevc->pic_w;
7570 int pic_height = hevc->pic_h;
7571
7572 /* hevc_print(hevc, 0,
7573 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7574 */
7575 hevc_print(hevc, 0,
7576 "%s2 %d %d\n", __func__, pic_width, pic_height);
7577 /* pic_width = hevc->pic_w; */
7578 /* pic_height = hevc->pic_h; */
7579
7580 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7581 hevc->frame_width = pic_width;
7582 hevc->frame_height = pic_height;
7583
7584 }
7585
7586 return 0;
7587}
7588
7589static int parse_sei(struct hevc_state_s *hevc,
7590 struct PIC_s *pic, char *sei_buf, uint32_t size)
7591{
7592 char *p = sei_buf;
7593 char *p_sei;
7594 uint16_t header;
7595 uint8_t nal_unit_type;
7596 uint8_t payload_type, payload_size;
7597 int i, j;
7598
7599 if (size < 2)
7600 return 0;
7601 header = *p++;
7602 header <<= 8;
7603 header += *p++;
7604 nal_unit_type = header >> 9;
7605 if ((nal_unit_type != NAL_UNIT_SEI)
7606 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7607 return 0;
7608 while (p+2 <= sei_buf+size) {
7609 payload_type = *p++;
7610 payload_size = *p++;
7611 if (p+payload_size <= sei_buf+size) {
7612 switch (payload_type) {
7613 case SEI_PicTiming:
7614 if ((parser_sei_enable & 0x4) &&
7615 hevc->frame_field_info_present_flag) {
7616 p_sei = p;
7617 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7618 pic->pic_struct = hevc->curr_pic_struct;
7619 if (get_dbg_flag(hevc) &
7620 H265_DEBUG_PIC_STRUCT) {
7621 hevc_print(hevc, 0,
7622 "parse result pic_struct = %d\n",
7623 hevc->curr_pic_struct);
7624 }
7625 }
7626 break;
7627 case SEI_UserDataITU_T_T35:
7628 p_sei = p;
7629 if (p_sei[0] == 0xB5
7630 && p_sei[1] == 0x00
7631 && p_sei[2] == 0x3C
7632 && p_sei[3] == 0x00
7633 && p_sei[4] == 0x01
7634 && p_sei[5] == 0x04)
7635 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7636
7637 break;
7638 case SEI_MasteringDisplayColorVolume:
7639 /*hevc_print(hevc, 0,
7640 "sei type: primary display color volume %d, size %d\n",
7641 payload_type,
7642 payload_size);*/
7643 /* master_display_colour */
7644 p_sei = p;
7645 for (i = 0; i < 3; i++) {
7646 for (j = 0; j < 2; j++) {
7647 hevc->primaries[i][j]
7648 = (*p_sei<<8)
7649 | *(p_sei+1);
7650 p_sei += 2;
7651 }
7652 }
7653 for (i = 0; i < 2; i++) {
7654 hevc->white_point[i]
7655 = (*p_sei<<8)
7656 | *(p_sei+1);
7657 p_sei += 2;
7658 }
7659 for (i = 0; i < 2; i++) {
7660 hevc->luminance[i]
7661 = (*p_sei<<24)
7662 | (*(p_sei+1)<<16)
7663 | (*(p_sei+2)<<8)
7664 | *(p_sei+3);
7665 p_sei += 4;
7666 }
7667 hevc->sei_present_flag |=
7668 SEI_MASTER_DISPLAY_COLOR_MASK;
7669 /*for (i = 0; i < 3; i++)
7670 for (j = 0; j < 2; j++)
7671 hevc_print(hevc, 0,
7672 "\tprimaries[%1d][%1d] = %04x\n",
7673 i, j,
7674 hevc->primaries[i][j]);
7675 hevc_print(hevc, 0,
7676 "\twhite_point = (%04x, %04x)\n",
7677 hevc->white_point[0],
7678 hevc->white_point[1]);
7679 hevc_print(hevc, 0,
7680 "\tmax,min luminance = %08x, %08x\n",
7681 hevc->luminance[0],
7682 hevc->luminance[1]);*/
7683 break;
7684 case SEI_ContentLightLevel:
7685 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7686 hevc_print(hevc, 0,
7687 "sei type: max content light level %d, size %d\n",
7688 payload_type, payload_size);
7689 /* content_light_level */
7690 p_sei = p;
7691 hevc->content_light_level[0]
7692 = (*p_sei<<8) | *(p_sei+1);
7693 p_sei += 2;
7694 hevc->content_light_level[1]
7695 = (*p_sei<<8) | *(p_sei+1);
7696 p_sei += 2;
7697 hevc->sei_present_flag |=
7698 SEI_CONTENT_LIGHT_LEVEL_MASK;
7699 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7700 hevc_print(hevc, 0,
7701 "\tmax cll = %04x, max_pa_cll = %04x\n",
7702 hevc->content_light_level[0],
7703 hevc->content_light_level[1]);
7704 break;
7705 default:
7706 break;
7707 }
7708 }
7709 p += payload_size;
7710 }
7711 return 0;
7712}
7713
7714static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7715 unsigned w, unsigned h)
7716{
7717 unsigned ar;
7718
7719 if (idc == 255) {
7720 ar = div_u64(256ULL * sar_h * h,
7721 sar_w * w);
7722 } else {
7723 switch (idc) {
7724 case 1:
7725 ar = 0x100 * h / w;
7726 break;
7727 case 2:
7728 ar = 0x100 * h * 11 / (w * 12);
7729 break;
7730 case 3:
7731 ar = 0x100 * h * 11 / (w * 10);
7732 break;
7733 case 4:
7734 ar = 0x100 * h * 11 / (w * 16);
7735 break;
7736 case 5:
7737 ar = 0x100 * h * 33 / (w * 40);
7738 break;
7739 case 6:
7740 ar = 0x100 * h * 11 / (w * 24);
7741 break;
7742 case 7:
7743 ar = 0x100 * h * 11 / (w * 20);
7744 break;
7745 case 8:
7746 ar = 0x100 * h * 11 / (w * 32);
7747 break;
7748 case 9:
7749 ar = 0x100 * h * 33 / (w * 80);
7750 break;
7751 case 10:
7752 ar = 0x100 * h * 11 / (w * 18);
7753 break;
7754 case 11:
7755 ar = 0x100 * h * 11 / (w * 15);
7756 break;
7757 case 12:
7758 ar = 0x100 * h * 33 / (w * 64);
7759 break;
7760 case 13:
7761 ar = 0x100 * h * 99 / (w * 160);
7762 break;
7763 case 14:
7764 ar = 0x100 * h * 3 / (w * 4);
7765 break;
7766 case 15:
7767 ar = 0x100 * h * 2 / (w * 3);
7768 break;
7769 case 16:
7770 ar = 0x100 * h * 1 / (w * 2);
7771 break;
7772 default:
7773 ar = h * 0x100 / w;
7774 break;
7775 }
7776 }
7777
7778 return ar;
7779}
7780
7781static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
7782 struct PIC_s *pic)
7783{
7784 unsigned int ar;
7785 int i, j;
7786 char *p;
7787 unsigned size = 0;
7788 unsigned type = 0;
7789 struct vframe_master_display_colour_s *vf_dp
7790 = &vf->prop.master_display_colour;
7791
7792 vf->width = pic->width /
7793 get_double_write_ratio(hevc, pic->double_write_mode);
7794 vf->height = pic->height /
7795 get_double_write_ratio(hevc, pic->double_write_mode);
7796
7797 vf->duration = hevc->frame_dur;
7798 vf->duration_pulldown = 0;
7799 vf->flag = 0;
7800
7801 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
7802 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7803
7804
7805 if (((pic->aspect_ratio_idc == 255) &&
7806 pic->sar_width &&
7807 pic->sar_height) ||
7808 ((pic->aspect_ratio_idc != 255) &&
7809 (pic->width))) {
7810 ar = min_t(u32,
7811 calc_ar(pic->aspect_ratio_idc,
7812 pic->sar_width,
7813 pic->sar_height,
7814 pic->width,
7815 pic->height),
7816 DISP_RATIO_ASPECT_RATIO_MAX);
7817 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
7818 }
7819 hevc->ratio_control = vf->ratio_control;
7820 if (pic->aux_data_buf
7821 && pic->aux_data_size) {
7822 /* parser sei */
7823 p = pic->aux_data_buf;
7824 while (p < pic->aux_data_buf
7825 + pic->aux_data_size - 8) {
7826 size = *p++;
7827 size = (size << 8) | *p++;
7828 size = (size << 8) | *p++;
7829 size = (size << 8) | *p++;
7830 type = *p++;
7831 type = (type << 8) | *p++;
7832 type = (type << 8) | *p++;
7833 type = (type << 8) | *p++;
7834 if (type == 0x02000000) {
7835 /* hevc_print(hevc, 0,
7836 "sei(%d)\n", size); */
7837 parse_sei(hevc, pic, p, size);
7838 }
7839 p += size;
7840 }
7841 }
7842 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
7843 vf->signal_type = pic->video_signal_type;
7844 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
7845 u32 data;
7846 data = vf->signal_type;
7847 data = data & 0xFFFF00FF;
7848 data = data | (0x30<<8);
7849 vf->signal_type = data;
7850 }
7851 }
7852 else
7853 vf->signal_type = 0;
7854 hevc->video_signal_type_debug = vf->signal_type;
7855
7856 /* master_display_colour */
7857 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
7858 for (i = 0; i < 3; i++)
7859 for (j = 0; j < 2; j++)
7860 vf_dp->primaries[i][j] = hevc->primaries[i][j];
7861 for (i = 0; i < 2; i++) {
7862 vf_dp->white_point[i] = hevc->white_point[i];
7863 vf_dp->luminance[i]
7864 = hevc->luminance[i];
7865 }
7866 vf_dp->present_flag = 1;
7867 } else
7868 vf_dp->present_flag = 0;
7869
7870 /* content_light_level */
7871 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
7872 vf_dp->content_light_level.max_content
7873 = hevc->content_light_level[0];
7874 vf_dp->content_light_level.max_pic_average
7875 = hevc->content_light_level[1];
7876 vf_dp->content_light_level.present_flag = 1;
7877 } else
7878 vf_dp->content_light_level.present_flag = 0;
7879
7880 if (hevc->is_used_v4l &&
7881 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
7882 (vf_dp->present_flag) ||
7883 (vf_dp->content_light_level.present_flag))) {
7884 struct aml_vdec_hdr_infos hdr;
7885 struct aml_vcodec_ctx *ctx =
7886 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
7887
7888 memset(&hdr, 0, sizeof(hdr));
7889 hdr.signal_type = vf->signal_type;
7890 hdr.color_parms = *vf_dp;
7891 vdec_v4l_set_hdr_infos(ctx, &hdr);
7892 }
7893}
7894
7895static int vh265_vf_states(struct vframe_states *states, void *op_arg)
7896{
7897 unsigned long flags;
7898#ifdef MULTI_INSTANCE_SUPPORT
7899 struct vdec_s *vdec = op_arg;
7900 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7901#else
7902 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7903#endif
7904
7905 spin_lock_irqsave(&lock, flags);
7906
7907 states->vf_pool_size = VF_POOL_SIZE;
7908 states->buf_free_num = kfifo_len(&hevc->newframe_q);
7909 states->buf_avail_num = kfifo_len(&hevc->display_q);
7910
7911 if (step == 2)
7912 states->buf_avail_num = 0;
7913 spin_unlock_irqrestore(&lock, flags);
7914 return 0;
7915}
7916
7917static struct vframe_s *vh265_vf_peek(void *op_arg)
7918{
7919 struct vframe_s *vf[2] = {0, 0};
7920#ifdef MULTI_INSTANCE_SUPPORT
7921 struct vdec_s *vdec = op_arg;
7922 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7923#else
7924 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7925#endif
7926
7927 if (step == 2)
7928 return NULL;
7929
7930 if (force_disp_pic_index & 0x100) {
7931 if (force_disp_pic_index & 0x200)
7932 return NULL;
7933 return &hevc->vframe_dummy;
7934 }
7935
7936
7937 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
7938 if (vf[1]) {
7939 vf[0]->next_vf_pts_valid = true;
7940 vf[0]->next_vf_pts = vf[1]->pts;
7941 } else
7942 vf[0]->next_vf_pts_valid = false;
7943 return vf[0];
7944 }
7945
7946 return NULL;
7947}
7948
7949static struct vframe_s *vh265_vf_get(void *op_arg)
7950{
7951 struct vframe_s *vf;
7952#ifdef MULTI_INSTANCE_SUPPORT
7953 struct vdec_s *vdec = op_arg;
7954 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
7955#else
7956 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
7957#endif
7958
7959 if (step == 2)
7960 return NULL;
7961 else if (step == 1)
7962 step = 2;
7963
7964#if 0
7965 if (force_disp_pic_index & 0x100) {
7966 int buffer_index = force_disp_pic_index & 0xff;
7967 struct PIC_s *pic = NULL;
7968 if (buffer_index >= 0
7969 && buffer_index < MAX_REF_PIC_NUM)
7970 pic = hevc->m_PIC[buffer_index];
7971 if (pic == NULL)
7972 return NULL;
7973 if (force_disp_pic_index & 0x200)
7974 return NULL;
7975
7976 vf = &hevc->vframe_dummy;
7977 if (get_double_write_mode(hevc)) {
7978 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
7979 VIDTYPE_VIU_NV21;
7980 if (hevc->m_ins_flag) {
7981 vf->canvas0Addr = vf->canvas1Addr = -1;
7982 vf->plane_num = 2;
7983 vf->canvas0_config[0] =
7984 pic->canvas_config[0];
7985 vf->canvas0_config[1] =
7986 pic->canvas_config[1];
7987
7988 vf->canvas1_config[0] =
7989 pic->canvas_config[0];
7990 vf->canvas1_config[1] =
7991 pic->canvas_config[1];
7992 } else {
7993 vf->canvas0Addr = vf->canvas1Addr
7994 = spec2canvas(pic);
7995 }
7996 } else {
7997 vf->canvas0Addr = vf->canvas1Addr = 0;
7998 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7999 if (hevc->mmu_enable)
8000 vf->type |= VIDTYPE_SCATTER;
8001 }
8002 vf->compWidth = pic->width;
8003 vf->compHeight = pic->height;
8004 update_vf_memhandle(hevc, vf, pic);
8005 switch (hevc->bit_depth_luma) {
8006 case 9:
8007 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8008 break;
8009 case 10:
8010 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8011 | BITDEPTH_V10;
8012 break;
8013 default:
8014 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8015 break;
8016 }
8017 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8018 vf->bitdepth =
8019 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8020 if (hevc->mem_saving_mode == 1)
8021 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8022 vf->duration_pulldown = 0;
8023 vf->pts = 0;
8024 vf->pts_us64 = 0;
8025 set_frame_info(hevc, vf);
8026
8027 vf->width = pic->width /
8028 get_double_write_ratio(hevc, pic->double_write_mode);
8029 vf->height = pic->height /
8030 get_double_write_ratio(hevc, pic->double_write_mode);
8031
8032 force_disp_pic_index |= 0x200;
8033 return vf;
8034 }
8035#endif
8036
8037 if (kfifo_get(&hevc->display_q, &vf)) {
8038 struct vframe_s *next_vf;
8039 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8040 hevc_print(hevc, 0,
8041 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8042 __func__, vf, vf->type, vf->index,
8043 get_pic_poc(hevc, vf->index & 0xff),
8044 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8045 vf->pts, vf->pts_us64,
8046 vf->duration);
8047#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8048 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8049 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8050 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8051 int i;
8052 struct PIC_s *pic =
8053 hevc->m_PIC[vf->index & 0xff];
8054 hevc_print(hevc, 0,
8055 "pic 0x%p aux size %d:\n",
8056 pic, pic->aux_data_size);
8057 for (i = 0; i < pic->aux_data_size; i++) {
8058 hevc_print_cont(hevc, 0,
8059 "%02x ", pic->aux_data_buf[i]);
8060 if (((i + 1) & 0xf) == 0)
8061 hevc_print_cont(hevc, 0, "\n");
8062 }
8063 hevc_print_cont(hevc, 0, "\n");
8064 }
8065 }
8066#endif
8067 hevc->show_frame_num++;
8068 hevc->vf_get_count++;
8069
8070 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8071 vf->next_vf_pts_valid = true;
8072 vf->next_vf_pts = next_vf->pts;
8073 } else
8074 vf->next_vf_pts_valid = false;
8075
8076 return vf;
8077 }
8078
8079 return NULL;
8080}
8081static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8082 int i;
8083 for (i = 0; i < VF_POOL_SIZE; i++) {
8084 if (vf == &hevc->vfpool[i])
8085 return true;
8086 }
8087 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8088 for (i = 0; i < VF_POOL_SIZE; i++) {
8089 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8090 }
8091 return false;
8092}
8093
8094static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8095{
8096 unsigned long flags;
8097#ifdef MULTI_INSTANCE_SUPPORT
8098 struct vdec_s *vdec = op_arg;
8099 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8100#else
8101 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8102#endif
8103 unsigned char index_top;
8104 unsigned char index_bot;
8105
8106 if (vf && (vf_valid_check(vf, hevc) == false))
8107 return;
8108 if (vf == (&hevc->vframe_dummy))
8109 return;
8110 index_top = vf->index & 0xff;
8111 index_bot = (vf->index >> 8) & 0xff;
8112 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8113 hevc_print(hevc, 0,
8114 "%s(type %d index 0x%x)\n",
8115 __func__, vf->type, vf->index);
8116 hevc->vf_put_count++;
8117 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8118 spin_lock_irqsave(&lock, flags);
8119
8120 if (index_top != 0xff
8121 && index_top < MAX_REF_PIC_NUM
8122 && hevc->m_PIC[index_top]) {
8123 if (hevc->is_used_v4l)
8124 hevc->m_PIC[index_top]->vframe_bound = true;
8125 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8126 hevc->m_PIC[index_top]->vf_ref--;
8127
8128 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8129 hevc->m_PIC[index_top]->output_ready = 0;
8130
8131 if (hevc->wait_buf != 0)
8132 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8133 0x1);
8134 }
8135 }
8136 }
8137
8138 if (index_bot != 0xff
8139 && index_bot < MAX_REF_PIC_NUM
8140 && hevc->m_PIC[index_bot]) {
8141 if (hevc->is_used_v4l)
8142 hevc->m_PIC[index_bot]->vframe_bound = true;
8143 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8144 hevc->m_PIC[index_bot]->vf_ref--;
8145
8146 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8147 hevc->m_PIC[index_bot]->output_ready = 0;
8148 if (hevc->wait_buf != 0)
8149 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8150 0x1);
8151 }
8152 }
8153 }
8154 spin_unlock_irqrestore(&lock, flags);
8155}
8156
8157static int vh265_event_cb(int type, void *data, void *op_arg)
8158{
8159 unsigned long flags;
8160#ifdef MULTI_INSTANCE_SUPPORT
8161 struct vdec_s *vdec = op_arg;
8162 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8163#else
8164 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8165#endif
8166 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8167#if 0
8168 amhevc_stop();
8169#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8170 vf_light_unreg_provider(&vh265_vf_prov);
8171#endif
8172 spin_lock_irqsave(&hevc->lock, flags);
8173 vh265_local_init();
8174 vh265_prot_init();
8175 spin_unlock_irqrestore(&hevc->lock, flags);
8176#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8177 vf_reg_provider(&vh265_vf_prov);
8178#endif
8179 amhevc_start();
8180#endif
8181 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8182 struct provider_aux_req_s *req =
8183 (struct provider_aux_req_s *)data;
8184 unsigned char index;
8185
8186 spin_lock_irqsave(&lock, flags);
8187 index = req->vf->index & 0xff;
8188 req->aux_buf = NULL;
8189 req->aux_size = 0;
8190 if (req->bot_flag)
8191 index = (req->vf->index >> 8) & 0xff;
8192 if (index != 0xff
8193 && index < MAX_REF_PIC_NUM
8194 && hevc->m_PIC[index]) {
8195 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8196 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8197#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8198 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8199 req->dv_enhance_exist = false;
8200 else
8201 req->dv_enhance_exist =
8202 hevc->m_PIC[index]->dv_enhance_exist;
8203 hevc_print(hevc, H265_DEBUG_DV,
8204 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8205 req->vf,
8206 hevc->m_PIC[index]->POC, index,
8207 req->dv_enhance_exist, req->aux_size);
8208#else
8209 req->dv_enhance_exist = 0;
8210#endif
8211 }
8212 spin_unlock_irqrestore(&lock, flags);
8213
8214 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8215 hevc_print(hevc, 0,
8216 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8217 __func__, type, index, req->aux_size);
8218#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8219 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8220 if ((force_bypass_dvenl & 0x80000000) == 0) {
8221 hevc_print(hevc, 0,
8222 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8223 __func__);
8224 hevc->bypass_dvenl_enable = 1;
8225 }
8226
8227#endif
8228 }
8229 return 0;
8230}
8231
8232#ifdef HEVC_PIC_STRUCT_SUPPORT
8233static int process_pending_vframe(struct hevc_state_s *hevc,
8234 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8235{
8236 struct vframe_s *vf;
8237
8238 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8239 hevc_print(hevc, 0,
8240 "%s: pair_pic index 0x%x %s\n",
8241 __func__, pair_pic->index,
8242 pair_frame_top_flag ?
8243 "top" : "bot");
8244
8245 if (kfifo_len(&hevc->pending_q) > 1) {
8246 unsigned long flags;
8247 /* do not pending more than 1 frame */
8248 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8249 hevc_print(hevc, 0,
8250 "fatal error, no available buffer slot.");
8251 return -1;
8252 }
8253 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8254 hevc_print(hevc, 0,
8255 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8256 __func__, vf->index);
8257 if ((hevc->double_write_mode == 3) &&
8258 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8259 vf->type |= VIDTYPE_COMPRESS;
8260 if (hevc->mmu_enable)
8261 vf->type |= VIDTYPE_SCATTER;
8262 }
8263 hevc->vf_pre_count++;
8264 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8265 spin_lock_irqsave(&lock, flags);
8266 vf->index &= 0xff;
8267 hevc->m_PIC[vf->index]->output_ready = 0;
8268 if (hevc->wait_buf != 0)
8269 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8270 0x1);
8271 spin_unlock_irqrestore(&lock, flags);
8272
8273 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8274 }
8275
8276 if (kfifo_peek(&hevc->pending_q, &vf)) {
8277 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8278 /*
8279 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8280 *do not use it
8281 */
8282 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8283 hevc_print(hevc, 0,
8284 "fatal error, no available buffer slot.");
8285 return -1;
8286 }
8287 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8288 hevc_print(hevc, 0,
8289 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8290 __func__, vf->index);
8291 if (vf) {
8292 if ((hevc->double_write_mode == 3) &&
8293 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8294 vf->type |= VIDTYPE_COMPRESS;
8295 if (hevc->mmu_enable)
8296 vf->type |= VIDTYPE_SCATTER;
8297 }
8298 hevc->vf_pre_count++;
8299 kfifo_put(&hevc->display_q,
8300 (const struct vframe_s *)vf);
8301 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8302 }
8303 } else if ((!pair_frame_top_flag) &&
8304 (((vf->index >> 8) & 0xff) == 0xff)) {
8305 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8306 hevc_print(hevc, 0,
8307 "fatal error, no available buffer slot.");
8308 return -1;
8309 }
8310 if (vf) {
8311 if ((hevc->double_write_mode == 3) &&
8312 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8313 vf->type |= VIDTYPE_COMPRESS;
8314 if (hevc->mmu_enable)
8315 vf->type |= VIDTYPE_SCATTER;
8316 }
8317 vf->index &= 0xff;
8318 vf->index |= (pair_pic->index << 8);
8319 vf->canvas1Addr = spec2canvas(pair_pic);
8320 pair_pic->vf_ref++;
8321 kfifo_put(&hevc->display_q,
8322 (const struct vframe_s *)vf);
8323 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8324 hevc->vf_pre_count++;
8325 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8326 hevc_print(hevc, 0,
8327 "%s vf => display_q: (index 0x%x)\n",
8328 __func__, vf->index);
8329 }
8330 } else if (pair_frame_top_flag &&
8331 ((vf->index & 0xff) == 0xff)) {
8332 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8333 hevc_print(hevc, 0,
8334 "fatal error, no available buffer slot.");
8335 return -1;
8336 }
8337 if (vf) {
8338 if ((hevc->double_write_mode == 3) &&
8339 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8340 vf->type |= VIDTYPE_COMPRESS;
8341 if (hevc->mmu_enable)
8342 vf->type |= VIDTYPE_SCATTER;
8343 }
8344 vf->index &= 0xff00;
8345 vf->index |= pair_pic->index;
8346 vf->canvas0Addr = spec2canvas(pair_pic);
8347 pair_pic->vf_ref++;
8348 kfifo_put(&hevc->display_q,
8349 (const struct vframe_s *)vf);
8350 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8351 hevc->vf_pre_count++;
8352 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8353 hevc_print(hevc, 0,
8354 "%s vf => display_q: (index 0x%x)\n",
8355 __func__, vf->index);
8356 }
8357 }
8358 }
8359 return 0;
8360}
8361#endif
8362static void update_vf_memhandle(struct hevc_state_s *hevc,
8363 struct vframe_s *vf, struct PIC_s *pic)
8364{
8365 if (pic->index < 0) {
8366 vf->mem_handle = NULL;
8367 vf->mem_head_handle = NULL;
8368 } else if (vf->type & VIDTYPE_SCATTER) {
8369 vf->mem_handle =
8370 decoder_mmu_box_get_mem_handle(
8371 hevc->mmu_box, pic->index);
8372 vf->mem_head_handle =
8373 decoder_bmmu_box_get_mem_handle(
8374 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8375 } else {
8376 vf->mem_handle =
8377 decoder_bmmu_box_get_mem_handle(
8378 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8379 vf->mem_head_handle = NULL;
8380 /*vf->mem_head_handle =
8381 decoder_bmmu_box_get_mem_handle(
8382 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8383 }
8384 return;
8385}
8386
8387static void fill_frame_info(struct hevc_state_s *hevc,
8388 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8389{
8390 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8391 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8392 vframe_qos->type = 4;
8393 else if (pic->slice_type == I_SLICE)
8394 vframe_qos->type = 1;
8395 else if (pic->slice_type == P_SLICE)
8396 vframe_qos->type = 2;
8397 else if (pic->slice_type == B_SLICE)
8398 vframe_qos->type = 3;
8399/*
8400#define SHOW_QOS_INFO
8401*/
8402 vframe_qos->size = framesize;
8403 vframe_qos->pts = pts;
8404#ifdef SHOW_QOS_INFO
8405 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8406#endif
8407
8408
8409 vframe_qos->max_mv = pic->max_mv;
8410 vframe_qos->avg_mv = pic->avg_mv;
8411 vframe_qos->min_mv = pic->min_mv;
8412#ifdef SHOW_QOS_INFO
8413 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8414 vframe_qos->max_mv,
8415 vframe_qos->avg_mv,
8416 vframe_qos->min_mv);
8417#endif
8418
8419 vframe_qos->max_qp = pic->max_qp;
8420 vframe_qos->avg_qp = pic->avg_qp;
8421 vframe_qos->min_qp = pic->min_qp;
8422#ifdef SHOW_QOS_INFO
8423 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8424 vframe_qos->max_qp,
8425 vframe_qos->avg_qp,
8426 vframe_qos->min_qp);
8427#endif
8428
8429 vframe_qos->max_skip = pic->max_skip;
8430 vframe_qos->avg_skip = pic->avg_skip;
8431 vframe_qos->min_skip = pic->min_skip;
8432#ifdef SHOW_QOS_INFO
8433 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8434 vframe_qos->max_skip,
8435 vframe_qos->avg_skip,
8436 vframe_qos->min_skip);
8437#endif
8438
8439 vframe_qos->num++;
8440
8441 if (hevc->frameinfo_enable)
8442 vdec_fill_frame_info(vframe_qos, 1);
8443}
8444
8445static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8446{
8447#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8448 struct vdec_s *vdec = hw_to_vdec(hevc);
8449#endif
8450 struct vframe_s *vf = NULL;
8451 int stream_offset = pic->stream_offset;
8452 unsigned short slice_type = pic->slice_type;
8453 u32 frame_size;
8454
8455 if (force_disp_pic_index & 0x100) {
8456 /*recycle directly*/
8457 pic->output_ready = 0;
8458 return -1;
8459 }
8460 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8461 hevc_print(hevc, 0,
8462 "fatal error, no available buffer slot.");
8463 return -1;
8464 }
8465 display_frame_count[hevc->index]++;
8466 if (vf) {
8467 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8468 "%s: pic index 0x%x\n",
8469 __func__, pic->index);*/
8470
8471 if (hevc->is_used_v4l) {
8472 vf->v4l_mem_handle
8473 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8474 if (hevc->mmu_enable) {
8475 vf->mm_box.bmmu_box = hevc->bmmu_box;
8476 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8477 vf->mm_box.mmu_box = hevc->mmu_box;
8478 vf->mm_box.mmu_idx = pic->index;
8479 }
8480 }
8481
8482#ifdef MULTI_INSTANCE_SUPPORT
8483 if (vdec_frame_based(hw_to_vdec(hevc))) {
8484 vf->pts = pic->pts;
8485 vf->pts_us64 = pic->pts64;
8486 vf->timestamp = pic->timestamp;
8487 }
8488 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8489 stream_offset, &vf->pts, 0) != 0) { */
8490#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8491 else if (vdec->master == NULL) {
8492#else
8493 else {
8494#endif
8495#endif
8496 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8497 "call pts_lookup_offset_us64(0x%x)\n",
8498 stream_offset);
8499 if (pts_lookup_offset_us64
8500 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8501 &frame_size, 0,
8502 &vf->pts_us64) != 0) {
8503#ifdef DEBUG_PTS
8504 hevc->pts_missed++;
8505#endif
8506 vf->pts = 0;
8507 vf->pts_us64 = 0;
8508 }
8509#ifdef DEBUG_PTS
8510 else
8511 hevc->pts_hit++;
8512#endif
8513#ifdef MULTI_INSTANCE_SUPPORT
8514#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8515 } else {
8516 vf->pts = 0;
8517 vf->pts_us64 = 0;
8518 }
8519#else
8520 }
8521#endif
8522#endif
8523 if (pts_unstable && (hevc->frame_dur > 0))
8524 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8525
8526 fill_frame_info(hevc, pic, frame_size, vf->pts);
8527
8528 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8529 && hevc->get_frame_dur) {
8530 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8531
8532 if (pts_diff < 0) {
8533 hevc->pts_mode_switching_count++;
8534 hevc->pts_mode_recovery_count = 0;
8535
8536 if (hevc->pts_mode_switching_count >=
8537 PTS_MODE_SWITCHING_THRESHOLD) {
8538 hevc->pts_mode =
8539 PTS_NONE_REF_USE_DURATION;
8540 hevc_print(hevc, 0,
8541 "HEVC: switch to n_d mode.\n");
8542 }
8543
8544 } else {
8545 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8546
8547 hevc->pts_mode_recovery_count++;
8548 if (hevc->pts_mode_recovery_count > p) {
8549 hevc->pts_mode_switching_count = 0;
8550 hevc->pts_mode_recovery_count = 0;
8551 }
8552 }
8553 }
8554
8555 if (vf->pts != 0)
8556 hevc->last_lookup_pts = vf->pts;
8557
8558 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8559 && (slice_type != 2))
8560 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8561 hevc->last_pts = vf->pts;
8562
8563 if (vf->pts_us64 != 0)
8564 hevc->last_lookup_pts_us64 = vf->pts_us64;
8565
8566 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8567 && (slice_type != 2)) {
8568 vf->pts_us64 =
8569 hevc->last_pts_us64 +
8570 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8571 }
8572 hevc->last_pts_us64 = vf->pts_us64;
8573 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8574 hevc_print(hevc, 0,
8575 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8576 vf->pts, vf->pts_us64);
8577 }
8578
8579 /*
8580 *vf->index:
8581 *(1) vf->type is VIDTYPE_PROGRESSIVE
8582 * and vf->canvas0Addr != vf->canvas1Addr,
8583 * vf->index[7:0] is the index of top pic
8584 * vf->index[15:8] is the index of bot pic
8585 *(2) other cases,
8586 * only vf->index[7:0] is used
8587 * vf->index[15:8] == 0xff
8588 */
8589 vf->index = 0xff00 | pic->index;
8590#if 1
8591/*SUPPORT_10BIT*/
8592 if (pic->double_write_mode & 0x10) {
8593 /* double write only */
8594 vf->compBodyAddr = 0;
8595 vf->compHeadAddr = 0;
8596 } else {
8597
8598 if (hevc->mmu_enable) {
8599 vf->compBodyAddr = 0;
8600 vf->compHeadAddr = pic->header_adr;
8601 } else {
8602 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8603 vf->compHeadAddr = pic->mc_y_adr +
8604 pic->losless_comp_body_size;
8605 vf->mem_head_handle = NULL;
8606 }
8607
8608 /*head adr*/
8609 vf->canvas0Addr = vf->canvas1Addr = 0;
8610 }
8611 if (pic->double_write_mode) {
8612 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8613 vf->type |= VIDTYPE_VIU_NV21;
8614
8615 if ((pic->double_write_mode == 3) &&
8616 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8617 vf->type |= VIDTYPE_COMPRESS;
8618 if (hevc->mmu_enable)
8619 vf->type |= VIDTYPE_SCATTER;
8620 }
8621#ifdef MULTI_INSTANCE_SUPPORT
8622 if (hevc->m_ins_flag &&
8623 (get_dbg_flag(hevc)
8624 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8625 vf->canvas0Addr = vf->canvas1Addr = -1;
8626 vf->plane_num = 2;
8627 vf->canvas0_config[0] =
8628 pic->canvas_config[0];
8629 vf->canvas0_config[1] =
8630 pic->canvas_config[1];
8631
8632 vf->canvas1_config[0] =
8633 pic->canvas_config[0];
8634 vf->canvas1_config[1] =
8635 pic->canvas_config[1];
8636
8637 } else
8638#endif
8639 vf->canvas0Addr = vf->canvas1Addr
8640 = spec2canvas(pic);
8641 } else {
8642 vf->canvas0Addr = vf->canvas1Addr = 0;
8643 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8644 if (hevc->mmu_enable)
8645 vf->type |= VIDTYPE_SCATTER;
8646 }
8647 vf->compWidth = pic->width;
8648 vf->compHeight = pic->height;
8649 update_vf_memhandle(hevc, vf, pic);
8650 switch (pic->bit_depth_luma) {
8651 case 9:
8652 vf->bitdepth = BITDEPTH_Y9;
8653 break;
8654 case 10:
8655 vf->bitdepth = BITDEPTH_Y10;
8656 break;
8657 default:
8658 vf->bitdepth = BITDEPTH_Y8;
8659 break;
8660 }
8661 switch (pic->bit_depth_chroma) {
8662 case 9:
8663 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8664 break;
8665 case 10:
8666 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8667 break;
8668 default:
8669 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8670 break;
8671 }
8672 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8673 vf->bitdepth =
8674 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8675 if (pic->mem_saving_mode == 1)
8676 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8677#else
8678 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8679 vf->type |= VIDTYPE_VIU_NV21;
8680 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8681#endif
8682 set_frame_info(hevc, vf, pic);
8683 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8684 /* hevc_print(hevc, 0,
8685 "aaa: %d/%d, %d/%d\n",
8686 vf->width,vf->height, pic->width, pic->height); */
8687 vf->width = pic->width;
8688 vf->height = pic->height;
8689
8690 if (force_w_h != 0) {
8691 vf->width = (force_w_h >> 16) & 0xffff;
8692 vf->height = force_w_h & 0xffff;
8693 }
8694 if (force_fps & 0x100) {
8695 u32 rate = force_fps & 0xff;
8696
8697 if (rate)
8698 vf->duration = 96000/rate;
8699 else
8700 vf->duration = 0;
8701 }
8702 if (force_fps & 0x200) {
8703 vf->pts = 0;
8704 vf->pts_us64 = 0;
8705 }
8706 /*
8707 * !!! to do ...
8708 * need move below code to get_new_pic(),
8709 * hevc->xxx can only be used by current decoded pic
8710 */
8711 if (pic->conformance_window_flag &&
8712 (get_dbg_flag(hevc) &
8713 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
8714 unsigned int SubWidthC, SubHeightC;
8715
8716 switch (pic->chroma_format_idc) {
8717 case 1:
8718 SubWidthC = 2;
8719 SubHeightC = 2;
8720 break;
8721 case 2:
8722 SubWidthC = 2;
8723 SubHeightC = 1;
8724 break;
8725 default:
8726 SubWidthC = 1;
8727 SubHeightC = 1;
8728 break;
8729 }
8730 vf->width -= SubWidthC *
8731 (pic->conf_win_left_offset +
8732 pic->conf_win_right_offset);
8733 vf->height -= SubHeightC *
8734 (pic->conf_win_top_offset +
8735 pic->conf_win_bottom_offset);
8736
8737 vf->compWidth -= SubWidthC *
8738 (pic->conf_win_left_offset +
8739 pic->conf_win_right_offset);
8740 vf->compHeight -= SubHeightC *
8741 (pic->conf_win_top_offset +
8742 pic->conf_win_bottom_offset);
8743
8744 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
8745 hevc_print(hevc, 0,
8746 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
8747 pic->chroma_format_idc,
8748 pic->conf_win_left_offset,
8749 pic->conf_win_right_offset,
8750 pic->conf_win_top_offset,
8751 pic->conf_win_bottom_offset,
8752 vf->width, vf->height, vf->compWidth, vf->compHeight);
8753 }
8754
8755 vf->width = vf->width /
8756 get_double_write_ratio(hevc, pic->double_write_mode);
8757 vf->height = vf->height /
8758 get_double_write_ratio(hevc, pic->double_write_mode);
8759#ifdef HEVC_PIC_STRUCT_SUPPORT
8760 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
8761 struct vframe_s *vf2;
8762
8763 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8764 hevc_print(hevc, 0,
8765 "pic_struct = %d index 0x%x\n",
8766 pic->pic_struct,
8767 pic->index);
8768
8769 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8770 hevc_print(hevc, 0,
8771 "fatal error, no available buffer slot.");
8772 return -1;
8773 }
8774 pic->vf_ref = 2;
8775 vf->duration = vf->duration>>1;
8776 memcpy(vf2, vf, sizeof(struct vframe_s));
8777
8778 if (pic->pic_struct == 3) {
8779 vf->type = VIDTYPE_INTERLACE_TOP
8780 | VIDTYPE_VIU_NV21;
8781 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8782 | VIDTYPE_VIU_NV21;
8783 } else {
8784 vf->type = VIDTYPE_INTERLACE_BOTTOM
8785 | VIDTYPE_VIU_NV21;
8786 vf2->type = VIDTYPE_INTERLACE_TOP
8787 | VIDTYPE_VIU_NV21;
8788 }
8789 hevc->vf_pre_count++;
8790 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8791 kfifo_put(&hevc->display_q,
8792 (const struct vframe_s *)vf);
8793 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8794 hevc->vf_pre_count++;
8795 kfifo_put(&hevc->display_q,
8796 (const struct vframe_s *)vf2);
8797 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8798 } else if (pic->pic_struct == 5
8799 || pic->pic_struct == 6) {
8800 struct vframe_s *vf2, *vf3;
8801
8802 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8803 hevc_print(hevc, 0,
8804 "pic_struct = %d index 0x%x\n",
8805 pic->pic_struct,
8806 pic->index);
8807
8808 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
8809 hevc_print(hevc, 0,
8810 "fatal error, no available buffer slot.");
8811 return -1;
8812 }
8813 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
8814 hevc_print(hevc, 0,
8815 "fatal error, no available buffer slot.");
8816 return -1;
8817 }
8818 pic->vf_ref = 3;
8819 vf->duration = vf->duration/3;
8820 memcpy(vf2, vf, sizeof(struct vframe_s));
8821 memcpy(vf3, vf, sizeof(struct vframe_s));
8822
8823 if (pic->pic_struct == 5) {
8824 vf->type = VIDTYPE_INTERLACE_TOP
8825 | VIDTYPE_VIU_NV21;
8826 vf2->type = VIDTYPE_INTERLACE_BOTTOM
8827 | VIDTYPE_VIU_NV21;
8828 vf3->type = VIDTYPE_INTERLACE_TOP
8829 | VIDTYPE_VIU_NV21;
8830 } else {
8831 vf->type = VIDTYPE_INTERLACE_BOTTOM
8832 | VIDTYPE_VIU_NV21;
8833 vf2->type = VIDTYPE_INTERLACE_TOP
8834 | VIDTYPE_VIU_NV21;
8835 vf3->type = VIDTYPE_INTERLACE_BOTTOM
8836 | VIDTYPE_VIU_NV21;
8837 }
8838 hevc->vf_pre_count++;
8839 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8840 kfifo_put(&hevc->display_q,
8841 (const struct vframe_s *)vf);
8842 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8843 hevc->vf_pre_count++;
8844 kfifo_put(&hevc->display_q,
8845 (const struct vframe_s *)vf2);
8846 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
8847 hevc->vf_pre_count++;
8848 kfifo_put(&hevc->display_q,
8849 (const struct vframe_s *)vf3);
8850 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
8851
8852 } else if (pic->pic_struct == 9
8853 || pic->pic_struct == 10) {
8854 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8855 hevc_print(hevc, 0,
8856 "pic_struct = %d index 0x%x\n",
8857 pic->pic_struct,
8858 pic->index);
8859
8860 pic->vf_ref = 1;
8861 /* process previous pending vf*/
8862 process_pending_vframe(hevc,
8863 pic, (pic->pic_struct == 9));
8864
8865 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8866 /* process current vf */
8867 kfifo_put(&hevc->pending_q,
8868 (const struct vframe_s *)vf);
8869 vf->height <<= 1;
8870 if (pic->pic_struct == 9) {
8871 vf->type = VIDTYPE_INTERLACE_TOP
8872 | VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8873 process_pending_vframe(hevc,
8874 hevc->pre_bot_pic, 0);
8875 } else {
8876 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8877 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8878 vf->index = (pic->index << 8) | 0xff;
8879 process_pending_vframe(hevc,
8880 hevc->pre_top_pic, 1);
8881 }
8882
8883 if (hevc->vf_pre_count == 0)
8884 hevc->vf_pre_count++;
8885
8886 /**/
8887 if (pic->pic_struct == 9)
8888 hevc->pre_top_pic = pic;
8889 else
8890 hevc->pre_bot_pic = pic;
8891
8892 } else if (pic->pic_struct == 11
8893 || pic->pic_struct == 12) {
8894 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8895 hevc_print(hevc, 0,
8896 "pic_struct = %d index 0x%x\n",
8897 pic->pic_struct,
8898 pic->index);
8899 pic->vf_ref = 1;
8900 /* process previous pending vf*/
8901 process_pending_vframe(hevc, pic,
8902 (pic->pic_struct == 11));
8903
8904 /* put current into pending q */
8905 vf->height <<= 1;
8906 if (pic->pic_struct == 11)
8907 vf->type = VIDTYPE_INTERLACE_TOP |
8908 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8909 else {
8910 vf->type = VIDTYPE_INTERLACE_BOTTOM |
8911 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8912 vf->index = (pic->index << 8) | 0xff;
8913 }
8914 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8915 kfifo_put(&hevc->pending_q,
8916 (const struct vframe_s *)vf);
8917 if (hevc->vf_pre_count == 0)
8918 hevc->vf_pre_count++;
8919
8920 /**/
8921 if (pic->pic_struct == 11)
8922 hevc->pre_top_pic = pic;
8923 else
8924 hevc->pre_bot_pic = pic;
8925
8926 } else {
8927 pic->vf_ref = 1;
8928
8929 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8930 hevc_print(hevc, 0,
8931 "pic_struct = %d index 0x%x\n",
8932 pic->pic_struct,
8933 pic->index);
8934
8935 switch (pic->pic_struct) {
8936 case 7:
8937 vf->duration <<= 1;
8938 break;
8939 case 8:
8940 vf->duration = vf->duration * 3;
8941 break;
8942 case 1:
8943 vf->height <<= 1;
8944 vf->type = VIDTYPE_INTERLACE_TOP |
8945 VIDTYPE_VIU_NV21 | VIDTYPE_VIU_FIELD;
8946 process_pending_vframe(hevc, pic, 1);
8947 hevc->pre_top_pic = pic;
8948 break;
8949 case 2:
8950 vf->height <<= 1;
8951 vf->type = VIDTYPE_INTERLACE_BOTTOM
8952 | VIDTYPE_VIU_NV21
8953 | VIDTYPE_VIU_FIELD;
8954 process_pending_vframe(hevc, pic, 0);
8955 hevc->pre_bot_pic = pic;
8956 break;
8957 }
8958 hevc->vf_pre_count++;
8959 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8960 kfifo_put(&hevc->display_q,
8961 (const struct vframe_s *)vf);
8962 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8963 }
8964#else
8965 vf->type_original = vf->type;
8966 pic->vf_ref = 1;
8967 hevc->vf_pre_count++;
8968 decoder_do_frame_check(hw_to_vdec(hevc), vf);
8969 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
8970 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8971
8972 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8973 hevc_print(hevc, 0,
8974 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8975 __func__, vf->type, vf->index,
8976 get_pic_poc(hevc, vf->index & 0xff),
8977 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8978 vf->pts, vf->pts_us64,
8979 vf->duration);
8980#endif
8981#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8982 /*count info*/
8983 vdec_count_info(gvs, 0, stream_offset);
8984#endif
8985 hw_to_vdec(hevc)->vdec_fps_detec(hw_to_vdec(hevc)->id);
8986 if (without_display_mode == 0) {
8987 vf_notify_receiver(hevc->provider_name,
8988 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
8989 }
8990 else
8991 vh265_vf_put(vh265_vf_get(vdec), vdec);
8992 }
8993
8994 return 0;
8995}
8996
8997static int notify_v4l_eos(struct vdec_s *vdec)
8998{
8999 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9000 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9001 struct vframe_s *vf = &hw->vframe_dummy;
9002 struct vdec_v4l2_buffer *fb = NULL;
9003 int index = INVALID_IDX;
9004 ulong expires;
9005
9006 if (hw->is_used_v4l && hw->eos) {
9007 expires = jiffies + msecs_to_jiffies(2000);
9008 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9009 if (time_after(jiffies, expires))
9010 break;
9011 }
9012
9013 if (index == INVALID_IDX) {
9014 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9015 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9016 return -1;
9017 }
9018 }
9019
9020 vf->type |= VIDTYPE_V4L_EOS;
9021 vf->timestamp = ULONG_MAX;
9022 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9023 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9024 hw->m_BUF[index].v4l_ref_buf_addr;
9025 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9026 vf_notify_receiver(vdec->vf_provider_name,
9027 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9028
9029 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9030 }
9031
9032 return 0;
9033}
9034
9035static void process_nal_sei(struct hevc_state_s *hevc,
9036 int payload_type, int payload_size)
9037{
9038 unsigned short data;
9039
9040 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9041 hevc_print(hevc, 0,
9042 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9043 payload_type, payload_size);
9044
9045 if (payload_type == 137) {
9046 int i, j;
9047 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9048 if (payload_size >= 24) {
9049 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9050 hevc_print(hevc, 0,
9051 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9052 for (i = 0; i < 3; i++) {
9053 for (j = 0; j < 2; j++) {
9054 data =
9055 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9056 hevc->primaries[i][j] = data;
9057 WRITE_HREG(HEVC_SHIFT_COMMAND,
9058 (1<<7)|16);
9059 if (get_dbg_flag(hevc) &
9060 H265_DEBUG_PRINT_SEI)
9061 hevc_print(hevc, 0,
9062 "\t\tprimaries[%1d][%1d] = %04x\n",
9063 i, j, hevc->primaries[i][j]);
9064 }
9065 }
9066 for (i = 0; i < 2; i++) {
9067 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9068 hevc->white_point[i] = data;
9069 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9070 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9071 hevc_print(hevc, 0,
9072 "\t\twhite_point[%1d] = %04x\n",
9073 i, hevc->white_point[i]);
9074 }
9075 for (i = 0; i < 2; i++) {
9076 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9077 hevc->luminance[i] = data << 16;
9078 WRITE_HREG(HEVC_SHIFT_COMMAND,
9079 (1<<7)|16);
9080 data =
9081 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9082 hevc->luminance[i] |= data;
9083 WRITE_HREG(HEVC_SHIFT_COMMAND,
9084 (1<<7)|16);
9085 if (get_dbg_flag(hevc) &
9086 H265_DEBUG_PRINT_SEI)
9087 hevc_print(hevc, 0,
9088 "\t\tluminance[%1d] = %08x\n",
9089 i, hevc->luminance[i]);
9090 }
9091 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9092 }
9093 payload_size -= 24;
9094 while (payload_size > 0) {
9095 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9096 payload_size--;
9097 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9098 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9099 }
9100 }
9101}
9102
9103static int hevc_recover(struct hevc_state_s *hevc)
9104{
9105 int ret = -1;
9106 u32 rem;
9107 u64 shift_byte_count64;
9108 unsigned int hevc_shift_byte_count;
9109 unsigned int hevc_stream_start_addr;
9110 unsigned int hevc_stream_end_addr;
9111 unsigned int hevc_stream_rd_ptr;
9112 unsigned int hevc_stream_wr_ptr;
9113 unsigned int hevc_stream_control;
9114 unsigned int hevc_stream_fifo_ctl;
9115 unsigned int hevc_stream_buf_size;
9116
9117 mutex_lock(&vh265_mutex);
9118#if 0
9119 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9120 int ii;
9121
9122 for (ii = 0; ii < 4; ii++)
9123 hevc_print(hevc, 0,
9124 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9125 if (((i + ii) & 0xf) == 0)
9126 hevc_print(hevc, 0, "\n");
9127 }
9128#endif
9129#define ES_VID_MAN_RD_PTR (1<<0)
9130 if (!hevc->init_flag) {
9131 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9132 mutex_unlock(&vh265_mutex);
9133 return ret;
9134 }
9135 amhevc_stop();
9136 msleep(20);
9137 ret = 0;
9138 /* reset */
9139 WRITE_PARSER_REG(PARSER_VIDEO_RP, READ_VREG(HEVC_STREAM_RD_PTR));
9140 SET_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9141
9142 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9143 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9144 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9145 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9146 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9147 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9148 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9149
9150 /* HEVC streaming buffer will reset and restart
9151 * from current hevc_stream_rd_ptr position
9152 */
9153 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9154 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9155 if ((hevc->shift_byte_count_lo & (1 << 31))
9156 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9157 hevc->shift_byte_count_hi++;
9158
9159 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9160 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9161 hevc->shift_byte_count_lo;
9162 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9163 shift_byte_count64 -= rem;
9164 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9165
9166 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9167 shift_byte_count64 += hevc_stream_buf_size;
9168
9169 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9170 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9171
9172 WRITE_VREG(DOS_SW_RESET3,
9173 /* (1<<2)| */
9174 (1 << 3) | (1 << 4) | (1 << 8) |
9175 (1 << 11) | (1 << 12) | (1 << 14)
9176 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9177 WRITE_VREG(DOS_SW_RESET3, 0);
9178
9179 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9180 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9181 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9182 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9183 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9184 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9185 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9186
9187 hevc_config_work_space_hw(hevc);
9188 decoder_hw_reset();
9189
9190 hevc->have_vps = 0;
9191 hevc->have_sps = 0;
9192 hevc->have_pps = 0;
9193
9194 hevc->have_valid_start_slice = 0;
9195
9196 if (get_double_write_mode(hevc) & 0x10)
9197 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9198 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9199 );
9200
9201 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9202 /* clear mailbox interrupt */
9203 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9204 /* enable mailbox interrupt */
9205 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9206 /* disable PSCALE for hardware sharing */
9207 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9208
9209 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9210
9211 WRITE_VREG(DEBUG_REG1, 0x0);
9212
9213 if ((error_handle_policy & 1) == 0) {
9214 if ((error_handle_policy & 4) == 0) {
9215 /* ucode auto mode, and do not check vps/sps/pps/idr */
9216 WRITE_VREG(NAL_SEARCH_CTL,
9217 0xc);
9218 } else {
9219 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9220 }
9221 } else {
9222 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9223 }
9224
9225 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9226 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9227 WRITE_VREG(NAL_SEARCH_CTL,
9228 READ_VREG(NAL_SEARCH_CTL)
9229 | ((parser_sei_enable & 0x7) << 17));
9230#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9231 WRITE_VREG(NAL_SEARCH_CTL,
9232 READ_VREG(NAL_SEARCH_CTL) |
9233 ((parser_dolby_vision_enable & 0x1) << 20));
9234#endif
9235 config_decode_mode(hevc);
9236 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9237
9238 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9239 /* amhevc_disable(); */
9240 /* return -EBUSY; */
9241 /* } */
9242#if 0
9243 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9244 int ii;
9245
9246 for (ii = 0; ii < 4; ii++) {
9247 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9248 hevc_print(hevc, 0,
9249 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9250 }
9251 if (((i + ii) & 0xf) == 0)
9252 hevc_print(hevc, 0, "\n");
9253 }
9254#endif
9255 init_pic_list_hw(hevc);
9256
9257 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9258 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9259
9260#ifdef SWAP_HEVC_UCODE
9261 if (!tee_enabled() && hevc->is_swap &&
9262 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9263 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9264 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9265 }
9266#endif
9267 amhevc_start();
9268
9269 /* skip, search next start code */
9270 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9271 hevc->skip_flag = 1;
9272#ifdef ERROR_HANDLE_DEBUG
9273 if (dbg_nal_skip_count & 0x20000) {
9274 dbg_nal_skip_count &= ~0x20000;
9275 mutex_unlock(&vh265_mutex);
9276 return ret;
9277 }
9278#endif
9279 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9280 /* Interrupt Amrisc to excute */
9281 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9282#ifdef MULTI_INSTANCE_SUPPORT
9283 if (!hevc->m_ins_flag)
9284#endif
9285 hevc->first_pic_after_recover = 1;
9286 mutex_unlock(&vh265_mutex);
9287 return ret;
9288}
9289
9290static void dump_aux_buf(struct hevc_state_s *hevc)
9291{
9292 int i;
9293 unsigned short *aux_adr =
9294 (unsigned short *)
9295 hevc->aux_addr;
9296 unsigned int aux_size =
9297 (READ_VREG(HEVC_AUX_DATA_SIZE)
9298 >> 16) << 4;
9299
9300 if (hevc->prefix_aux_size > 0) {
9301 hevc_print(hevc, 0,
9302 "prefix aux: (size %d)\n",
9303 aux_size);
9304 for (i = 0; i <
9305 (aux_size >> 1); i++) {
9306 hevc_print_cont(hevc, 0,
9307 "%04x ",
9308 *(aux_adr + i));
9309 if (((i + 1) & 0xf)
9310 == 0)
9311 hevc_print_cont(hevc,
9312 0, "\n");
9313 }
9314 }
9315 if (hevc->suffix_aux_size > 0) {
9316 aux_adr = (unsigned short *)
9317 (hevc->aux_addr +
9318 hevc->prefix_aux_size);
9319 aux_size =
9320 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9321 << 4;
9322 hevc_print(hevc, 0,
9323 "suffix aux: (size %d)\n",
9324 aux_size);
9325 for (i = 0; i <
9326 (aux_size >> 1); i++) {
9327 hevc_print_cont(hevc, 0,
9328 "%04x ", *(aux_adr + i));
9329 if (((i + 1) & 0xf) == 0)
9330 hevc_print_cont(hevc, 0, "\n");
9331 }
9332 }
9333}
9334
9335#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9336static void dolby_get_meta(struct hevc_state_s *hevc)
9337{
9338 struct vdec_s *vdec = hw_to_vdec(hevc);
9339
9340 if (get_dbg_flag(hevc) &
9341 H265_DEBUG_BUFMGR_MORE)
9342 dump_aux_buf(hevc);
9343 if (vdec->dolby_meta_with_el || vdec->slave) {
9344 set_aux_data(hevc,
9345 hevc->cur_pic, 0, 0);
9346 } else if (vdec->master) {
9347 struct hevc_state_s *hevc_ba =
9348 (struct hevc_state_s *)
9349 vdec->master->private;
9350 /*do not use hevc_ba*/
9351 set_aux_data(hevc,
9352 hevc_ba->cur_pic,
9353 0, 1);
9354 set_aux_data(hevc,
9355 hevc->cur_pic, 0, 2);
9356 }
9357}
9358#endif
9359
9360static void read_decode_info(struct hevc_state_s *hevc)
9361{
9362 uint32_t decode_info =
9363 READ_HREG(HEVC_DECODE_INFO);
9364 hevc->start_decoding_flag |=
9365 (decode_info & 0xff);
9366 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9367}
9368
9369static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9370{
9371 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9372 unsigned int dec_status = hevc->dec_status;
9373 int i, ret;
9374
9375#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9376 struct vdec_s *vdec = hw_to_vdec(hevc);
9377#endif
9378
9379 if (hevc->eos)
9380 return IRQ_HANDLED;
9381 if (
9382#ifdef MULTI_INSTANCE_SUPPORT
9383 (!hevc->m_ins_flag) &&
9384#endif
9385 hevc->error_flag == 1) {
9386 if ((error_handle_policy & 0x10) == 0) {
9387 if (hevc->cur_pic) {
9388 int current_lcu_idx =
9389 READ_VREG(HEVC_PARSER_LCU_START)
9390 & 0xffffff;
9391 if (current_lcu_idx <
9392 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9393 hevc->cur_pic->error_mark = 1;
9394
9395 }
9396 }
9397 if ((error_handle_policy & 1) == 0) {
9398 hevc->error_skip_nal_count = 1;
9399 /* manual search nal, skip error_skip_nal_count
9400 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9401 */
9402 WRITE_VREG(NAL_SEARCH_CTL,
9403 (error_skip_nal_count << 4) | 0x1);
9404 } else {
9405 hevc->error_skip_nal_count = error_skip_nal_count;
9406 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9407 }
9408 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9409#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9410 || vdec->master
9411 || vdec->slave
9412#endif
9413 ) {
9414 WRITE_VREG(NAL_SEARCH_CTL,
9415 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9416 }
9417 WRITE_VREG(NAL_SEARCH_CTL,
9418 READ_VREG(NAL_SEARCH_CTL)
9419 | ((parser_sei_enable & 0x7) << 17));
9420#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9421 WRITE_VREG(NAL_SEARCH_CTL,
9422 READ_VREG(NAL_SEARCH_CTL) |
9423 ((parser_dolby_vision_enable & 0x1) << 20));
9424#endif
9425 config_decode_mode(hevc);
9426 /* search new nal */
9427 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9428 /* Interrupt Amrisc to excute */
9429 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9430
9431 /* hevc_print(hevc, 0,
9432 *"%s: error handle\n", __func__);
9433 */
9434 hevc->error_flag = 2;
9435 return IRQ_HANDLED;
9436 } else if (
9437#ifdef MULTI_INSTANCE_SUPPORT
9438 (!hevc->m_ins_flag) &&
9439#endif
9440 hevc->error_flag == 3) {
9441 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9442 hevc_recover(hevc);
9443 hevc->error_flag = 0;
9444
9445 if ((error_handle_policy & 0x10) == 0) {
9446 if (hevc->cur_pic) {
9447 int current_lcu_idx =
9448 READ_VREG(HEVC_PARSER_LCU_START)
9449 & 0xffffff;
9450 if (current_lcu_idx <
9451 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9452 hevc->cur_pic->error_mark = 1;
9453
9454 }
9455 }
9456 if ((error_handle_policy & 1) == 0) {
9457 /* need skip some data when
9458 * error_flag of 3 is triggered,
9459 */
9460 /* to avoid hevc_recover() being called
9461 * for many times at the same bitstream position
9462 */
9463 hevc->error_skip_nal_count = 1;
9464 /* manual search nal, skip error_skip_nal_count
9465 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9466 */
9467 WRITE_VREG(NAL_SEARCH_CTL,
9468 (error_skip_nal_count << 4) | 0x1);
9469 }
9470
9471 if ((error_handle_policy & 0x2) == 0) {
9472 hevc->have_vps = 1;
9473 hevc->have_sps = 1;
9474 hevc->have_pps = 1;
9475 }
9476 return IRQ_HANDLED;
9477 }
9478 if (!hevc->m_ins_flag) {
9479 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9480 if ((hevc->shift_byte_count_lo & (1 << 31))
9481 && ((i & (1 << 31)) == 0))
9482 hevc->shift_byte_count_hi++;
9483 hevc->shift_byte_count_lo = i;
9484 }
9485#ifdef MULTI_INSTANCE_SUPPORT
9486 mutex_lock(&hevc->chunks_mutex);
9487 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9488 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9489 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9490 && (hevc->chunk)) {
9491 hevc->cur_pic->pts = hevc->chunk->pts;
9492 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9493 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9494 }
9495 mutex_unlock(&hevc->chunks_mutex);
9496
9497 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9498 dec_status == HEVC_DECODE_BUFEMPTY2) {
9499 if (hevc->m_ins_flag) {
9500 read_decode_info(hevc);
9501 if (vdec_frame_based(hw_to_vdec(hevc))) {
9502 hevc->empty_flag = 1;
9503 goto pic_done;
9504 } else {
9505 if (
9506#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9507 vdec->master ||
9508 vdec->slave ||
9509#endif
9510 (data_resend_policy & 0x1)) {
9511 hevc->dec_result = DEC_RESULT_AGAIN;
9512 amhevc_stop();
9513 restore_decode_state(hevc);
9514 } else
9515 hevc->dec_result = DEC_RESULT_GET_DATA;
9516 }
9517 reset_process_time(hevc);
9518 vdec_schedule_work(&hevc->work);
9519 }
9520 return IRQ_HANDLED;
9521 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9522 (dec_status == HEVC_NAL_DECODE_DONE)
9523 ) {
9524 if (hevc->m_ins_flag) {
9525 read_decode_info(hevc);
9526 if (vdec_frame_based(hw_to_vdec(hevc))) {
9527 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9528 hevc->empty_flag = 1;
9529 goto pic_done;
9530 } else {
9531 hevc->dec_result = DEC_RESULT_AGAIN;
9532 amhevc_stop();
9533 restore_decode_state(hevc);
9534 }
9535
9536 reset_process_time(hevc);
9537 vdec_schedule_work(&hevc->work);
9538 }
9539
9540 return IRQ_HANDLED;
9541 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9542 if (hevc->m_ins_flag) {
9543 struct PIC_s *pic;
9544 struct PIC_s *pic_display;
9545 int decoded_poc;
9546#ifdef DETREFILL_ENABLE
9547 if (hevc->is_swap &&
9548 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9549 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9550 && READ_VREG(HEVC_SAO_DBG_MODE0))
9551 hevc->delrefill_check = 2;
9552 }
9553#endif
9554 hevc->empty_flag = 0;
9555pic_done:
9556 if (input_frame_based(hw_to_vdec(hevc)) &&
9557 frmbase_cont_bitlevel != 0 &&
9558 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9559 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9560 > frmbase_cont_bitlevel)) {
9561 /*handle the case: multi pictures in one packet*/
9562 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9563 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9564 __func__,
9565 hevc->decode_idx, hevc->decode_size,
9566 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9567 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9568 start_process_time(hevc);
9569 return IRQ_HANDLED;
9570 }
9571
9572 read_decode_info(hevc);
9573 get_picture_qos_info(hevc);
9574#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9575 hevc->start_parser_type = 0;
9576 hevc->switch_dvlayer_flag = 0;
9577#endif
9578 hevc->decoded_poc = hevc->curr_POC;
9579 hevc->decoding_pic = NULL;
9580 hevc->dec_result = DEC_RESULT_DONE;
9581#ifdef DETREFILL_ENABLE
9582 if (hevc->is_swap &&
9583 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9584 if (hevc->delrefill_check != 2)
9585#endif
9586
9587 amhevc_stop();
9588
9589 reset_process_time(hevc);
9590
9591 if (hevc->vf_pre_count == 0) {
9592 decoded_poc = hevc->curr_POC;
9593 pic = get_pic_by_POC(hevc, decoded_poc);
9594 if (pic && (pic->POC != INVALID_POC)) {
9595 /*PB skip control */
9596 if (pic->error_mark == 0
9597 && hevc->PB_skip_mode == 1) {
9598 /* start decoding after
9599 * first I
9600 */
9601 hevc->ignore_bufmgr_error |= 0x1;
9602 }
9603 if (hevc->ignore_bufmgr_error & 1) {
9604 if (hevc->PB_skip_count_after_decoding > 0) {
9605 hevc->PB_skip_count_after_decoding--;
9606 } else {
9607 /* start displaying */
9608 hevc->ignore_bufmgr_error |= 0x2;
9609 }
9610 }
9611 if (hevc->mmu_enable
9612 && ((hevc->double_write_mode & 0x10) == 0)) {
9613 if (!hevc->m_ins_flag) {
9614 hevc->used_4k_num =
9615 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9616
9617 if ((!is_skip_decoding(hevc, pic)) &&
9618 (hevc->used_4k_num >= 0) &&
9619 (hevc->cur_pic->scatter_alloc
9620 == 1)) {
9621 hevc_print(hevc,
9622 H265_DEBUG_BUFMGR_MORE,
9623 "%s pic index %d scatter_alloc %d page_start %d\n",
9624 "decoder_mmu_box_free_idx_tail",
9625 hevc->cur_pic->index,
9626 hevc->cur_pic->scatter_alloc,
9627 hevc->used_4k_num);
9628 decoder_mmu_box_free_idx_tail(
9629 hevc->mmu_box,
9630 hevc->cur_pic->index,
9631 hevc->used_4k_num);
9632 hevc->cur_pic->scatter_alloc
9633 = 2;
9634 }
9635 hevc->used_4k_num = -1;
9636 }
9637 }
9638
9639 pic->output_mark = 1;
9640 pic->recon_mark = 1;
9641 }
9642 check_pic_decoded_error(hevc,
9643 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
9644 if (hevc->cur_pic != NULL &&
9645 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
9646 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
9647 hevc->cur_pic->error_mark = 1;
9648force_output:
9649 pic_display = output_pic(hevc, 1);
9650 if (pic_display) {
9651 if ((pic_display->error_mark &&
9652 ((hevc->ignore_bufmgr_error &
9653 0x2) == 0))
9654 || (get_dbg_flag(hevc) &
9655 H265_DEBUG_DISPLAY_CUR_FRAME)
9656 || (get_dbg_flag(hevc) &
9657 H265_DEBUG_NO_DISPLAY)) {
9658 pic_display->output_ready = 0;
9659 if (get_dbg_flag(hevc) &
9660 H265_DEBUG_BUFMGR) {
9661 hevc_print(hevc, 0,
9662 "[BM] Display: POC %d, ",
9663 pic_display->POC);
9664 hevc_print_cont(hevc, 0,
9665 "decoding index %d ==> ",
9666 pic_display->
9667 decode_idx);
9668 hevc_print_cont(hevc, 0,
9669 "Debug or err,recycle it\n");
9670 }
9671 } else {
9672 if (pic_display->
9673 slice_type != 2) {
9674 pic_display->output_ready = 0;
9675 } else {
9676 prepare_display_buf
9677 (hevc,
9678 pic_display);
9679 hevc->first_pic_flag = 1;
9680 }
9681 }
9682 }
9683 }
9684
9685 vdec_schedule_work(&hevc->work);
9686 }
9687
9688 return IRQ_HANDLED;
9689#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9690 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9691 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9692 if (hevc->m_ins_flag) {
9693 unsigned char next_parser_type =
9694 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
9695 read_decode_info(hevc);
9696
9697 if (vdec->slave &&
9698 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
9699 /*cur is base, found enhance*/
9700 struct hevc_state_s *hevc_el =
9701 (struct hevc_state_s *)
9702 vdec->slave->private;
9703 hevc->switch_dvlayer_flag = 1;
9704 hevc->no_switch_dvlayer_count = 0;
9705 hevc_el->start_parser_type =
9706 next_parser_type;
9707 hevc_print(hevc, H265_DEBUG_DV,
9708 "switch (poc %d) to el\n",
9709 hevc->cur_pic ?
9710 hevc->cur_pic->POC :
9711 INVALID_POC);
9712 } else if (vdec->master &&
9713 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
9714 /*cur is enhance, found base*/
9715 struct hevc_state_s *hevc_ba =
9716 (struct hevc_state_s *)
9717 vdec->master->private;
9718 hevc->switch_dvlayer_flag = 1;
9719 hevc->no_switch_dvlayer_count = 0;
9720 hevc_ba->start_parser_type =
9721 next_parser_type;
9722 hevc_print(hevc, H265_DEBUG_DV,
9723 "switch (poc %d) to bl\n",
9724 hevc->cur_pic ?
9725 hevc->cur_pic->POC :
9726 INVALID_POC);
9727 } else {
9728 hevc->switch_dvlayer_flag = 0;
9729 hevc->start_parser_type =
9730 next_parser_type;
9731 hevc->no_switch_dvlayer_count++;
9732 hevc_print(hevc, H265_DEBUG_DV,
9733 "%s: no_switch_dvlayer_count = %d\n",
9734 vdec->master ? "el" : "bl",
9735 hevc->no_switch_dvlayer_count);
9736 if (vdec->slave &&
9737 dolby_el_flush_th != 0 &&
9738 hevc->no_switch_dvlayer_count >
9739 dolby_el_flush_th) {
9740 struct hevc_state_s *hevc_el =
9741 (struct hevc_state_s *)
9742 vdec->slave->private;
9743 struct PIC_s *el_pic;
9744 check_pic_decoded_error(hevc_el,
9745 hevc_el->pic_decoded_lcu_idx);
9746 el_pic = get_pic_by_POC(hevc_el,
9747 hevc_el->curr_POC);
9748 hevc_el->curr_POC = INVALID_POC;
9749 hevc_el->m_pocRandomAccess = MAX_INT;
9750 flush_output(hevc_el, el_pic);
9751 hevc_el->decoded_poc = INVALID_POC; /*
9752 already call flush_output*/
9753 hevc_el->decoding_pic = NULL;
9754 hevc->no_switch_dvlayer_count = 0;
9755 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
9756 hevc_print(hevc, 0,
9757 "no el anymore, flush_output el\n");
9758 }
9759 }
9760 hevc->decoded_poc = hevc->curr_POC;
9761 hevc->decoding_pic = NULL;
9762 hevc->dec_result = DEC_RESULT_DONE;
9763 amhevc_stop();
9764 reset_process_time(hevc);
9765 if (aux_data_is_avaible(hevc))
9766 dolby_get_meta(hevc);
9767 if(hevc->cur_pic->slice_type == 2 &&
9768 hevc->vf_pre_count == 0) {
9769 hevc_print(hevc, 0,
9770 "first slice_type %x no_switch_dvlayer_count %x\n",
9771 hevc->cur_pic->slice_type,
9772 hevc->no_switch_dvlayer_count);
9773 goto force_output;
9774 }
9775 vdec_schedule_work(&hevc->work);
9776 }
9777
9778 return IRQ_HANDLED;
9779#endif
9780 }
9781
9782#endif
9783
9784 if (dec_status == HEVC_SEI_DAT) {
9785 if (!hevc->m_ins_flag) {
9786 int payload_type =
9787 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
9788 int payload_size =
9789 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
9790 process_nal_sei(hevc,
9791 payload_type, payload_size);
9792 }
9793 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
9794 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
9795 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
9796 int parse_type = HEVC_DISCARD_NAL;
9797
9798 hevc->error_watchdog_count = 0;
9799 hevc->error_skip_nal_wt_cnt = 0;
9800#ifdef MULTI_INSTANCE_SUPPORT
9801 if (hevc->m_ins_flag)
9802 reset_process_time(hevc);
9803#endif
9804 if (slice_parse_begin > 0 &&
9805 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
9806 hevc_print(hevc, 0,
9807 "nal type %d, discard %d\n", naltype,
9808 slice_parse_begin);
9809 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
9810 slice_parse_begin--;
9811 }
9812 if (naltype == NAL_UNIT_EOS) {
9813 struct PIC_s *pic;
9814
9815 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
9816#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9817 if ((vdec->master || vdec->slave) &&
9818 aux_data_is_avaible(hevc)) {
9819 if (hevc->decoding_pic)
9820 dolby_get_meta(hevc);
9821 }
9822#endif
9823 check_pic_decoded_error(hevc,
9824 hevc->pic_decoded_lcu_idx);
9825 pic = get_pic_by_POC(hevc, hevc->curr_POC);
9826 hevc->curr_POC = INVALID_POC;
9827 /* add to fix RAP_B_Bossen_1 */
9828 hevc->m_pocRandomAccess = MAX_INT;
9829 flush_output(hevc, pic);
9830 clear_poc_flag(hevc);
9831 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
9832 /* Interrupt Amrisc to excute */
9833 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9834#ifdef MULTI_INSTANCE_SUPPORT
9835 if (hevc->m_ins_flag) {
9836 hevc->decoded_poc = INVALID_POC; /*
9837 already call flush_output*/
9838 hevc->decoding_pic = NULL;
9839 hevc->dec_result = DEC_RESULT_DONE;
9840 amhevc_stop();
9841
9842 vdec_schedule_work(&hevc->work);
9843 }
9844#endif
9845 return IRQ_HANDLED;
9846 }
9847
9848 if (
9849#ifdef MULTI_INSTANCE_SUPPORT
9850 (!hevc->m_ins_flag) &&
9851#endif
9852 hevc->error_skip_nal_count > 0) {
9853 hevc_print(hevc, 0,
9854 "nal type %d, discard %d\n", naltype,
9855 hevc->error_skip_nal_count);
9856 hevc->error_skip_nal_count--;
9857 if (hevc->error_skip_nal_count == 0) {
9858 hevc_recover(hevc);
9859 hevc->error_flag = 0;
9860 if ((error_handle_policy & 0x2) == 0) {
9861 hevc->have_vps = 1;
9862 hevc->have_sps = 1;
9863 hevc->have_pps = 1;
9864 }
9865 return IRQ_HANDLED;
9866 }
9867 } else if (naltype == NAL_UNIT_VPS) {
9868 parse_type = HEVC_NAL_UNIT_VPS;
9869 hevc->have_vps = 1;
9870#ifdef ERROR_HANDLE_DEBUG
9871 if (dbg_nal_skip_flag & 1)
9872 parse_type = HEVC_DISCARD_NAL;
9873#endif
9874 } else if (hevc->have_vps) {
9875 if (naltype == NAL_UNIT_SPS) {
9876 parse_type = HEVC_NAL_UNIT_SPS;
9877 hevc->have_sps = 1;
9878#ifdef ERROR_HANDLE_DEBUG
9879 if (dbg_nal_skip_flag & 2)
9880 parse_type = HEVC_DISCARD_NAL;
9881#endif
9882 } else if (naltype == NAL_UNIT_PPS) {
9883 parse_type = HEVC_NAL_UNIT_PPS;
9884 hevc->have_pps = 1;
9885#ifdef ERROR_HANDLE_DEBUG
9886 if (dbg_nal_skip_flag & 4)
9887 parse_type = HEVC_DISCARD_NAL;
9888#endif
9889 } else if (hevc->have_sps && hevc->have_pps) {
9890 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
9891
9892 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
9893 (naltype ==
9894 NAL_UNIT_CODED_SLICE_IDR_N_LP)
9895 || (naltype ==
9896 NAL_UNIT_CODED_SLICE_CRA)
9897 || (naltype ==
9898 NAL_UNIT_CODED_SLICE_BLA)
9899 || (naltype ==
9900 NAL_UNIT_CODED_SLICE_BLANT)
9901 || (naltype ==
9902 NAL_UNIT_CODED_SLICE_BLA_N_LP)
9903 ) {
9904 if (slice_parse_begin > 0) {
9905 hevc_print(hevc, 0,
9906 "discard %d, for debugging\n",
9907 slice_parse_begin);
9908 slice_parse_begin--;
9909 } else {
9910 parse_type = seg;
9911 }
9912 hevc->have_valid_start_slice = 1;
9913 } else if (naltype <=
9914 NAL_UNIT_CODED_SLICE_CRA
9915 && (hevc->have_valid_start_slice
9916 || (hevc->PB_skip_mode != 3))) {
9917 if (slice_parse_begin > 0) {
9918 hevc_print(hevc, 0,
9919 "discard %d, dd\n",
9920 slice_parse_begin);
9921 slice_parse_begin--;
9922 } else
9923 parse_type = seg;
9924
9925 }
9926 }
9927 }
9928 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
9929 && hevc->have_valid_start_slice &&
9930 hevc->error_flag == 0) {
9931 if ((get_dbg_flag(hevc) &
9932 H265_DEBUG_MAN_SEARCH_NAL) == 0
9933 /* && (!hevc->m_ins_flag)*/) {
9934 /* auot parser NAL; do not check
9935 *vps/sps/pps/idr
9936 */
9937 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
9938 }
9939
9940 if ((get_dbg_flag(hevc) &
9941 H265_DEBUG_NO_EOS_SEARCH_DONE)
9942#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9943 || vdec->master
9944 || vdec->slave
9945#endif
9946 ) {
9947 WRITE_VREG(NAL_SEARCH_CTL,
9948 READ_VREG(NAL_SEARCH_CTL) |
9949 0x10000);
9950 }
9951 WRITE_VREG(NAL_SEARCH_CTL,
9952 READ_VREG(NAL_SEARCH_CTL)
9953 | ((parser_sei_enable & 0x7) << 17));
9954#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9955 WRITE_VREG(NAL_SEARCH_CTL,
9956 READ_VREG(NAL_SEARCH_CTL) |
9957 ((parser_dolby_vision_enable & 0x1) << 20));
9958#endif
9959 config_decode_mode(hevc);
9960 }
9961
9962 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
9963 hevc_print(hevc, 0,
9964 "naltype = %d parse_type %d\n %d %d %d %d\n",
9965 naltype, parse_type, hevc->have_vps,
9966 hevc->have_sps, hevc->have_pps,
9967 hevc->have_valid_start_slice);
9968 }
9969
9970 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
9971 /* Interrupt Amrisc to excute */
9972 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9973#ifdef MULTI_INSTANCE_SUPPORT
9974 if (hevc->m_ins_flag)
9975 start_process_time(hevc);
9976#endif
9977 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
9978#ifdef MULTI_INSTANCE_SUPPORT
9979 if (hevc->m_ins_flag) {
9980 reset_process_time(hevc);
9981 read_decode_info(hevc);
9982
9983 }
9984#endif
9985 if (hevc->start_decoding_time > 0) {
9986 u32 process_time = 1000*
9987 (jiffies - hevc->start_decoding_time)/HZ;
9988 if (process_time > max_decoding_time)
9989 max_decoding_time = process_time;
9990 }
9991
9992 hevc->error_watchdog_count = 0;
9993 if (hevc->pic_list_init_flag == 2) {
9994 hevc->pic_list_init_flag = 3;
9995 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
9996 } else if (hevc->wait_buf == 0) {
9997 u32 vui_time_scale;
9998 u32 vui_num_units_in_tick;
9999 unsigned char reconfig_flag = 0;
10000
10001 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10002 get_rpm_param(&hevc->param);
10003 else {
10004
10005 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10006 int ii;
10007
10008 for (ii = 0; ii < 4; ii++) {
10009 hevc->param.l.data[i + ii] =
10010 hevc->rpm_ptr[i + 3
10011 - ii];
10012 }
10013 }
10014#ifdef SEND_LMEM_WITH_RPM
10015 check_head_error(hevc);
10016#endif
10017 }
10018 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10019 hevc_print(hevc, 0,
10020 "rpm_param: (%d)\n", hevc->slice_idx);
10021 hevc->slice_idx++;
10022 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10023 hevc_print_cont(hevc, 0,
10024 "%04x ", hevc->param.l.data[i]);
10025 if (((i + 1) & 0xf) == 0)
10026 hevc_print_cont(hevc, 0, "\n");
10027 }
10028
10029 hevc_print(hevc, 0,
10030 "vui_timing_info: %x, %x, %x, %x\n",
10031 hevc->param.p.vui_num_units_in_tick_hi,
10032 hevc->param.p.vui_num_units_in_tick_lo,
10033 hevc->param.p.vui_time_scale_hi,
10034 hevc->param.p.vui_time_scale_lo);
10035 }
10036
10037 if (hevc->is_used_v4l) {
10038 struct aml_vcodec_ctx *ctx =
10039 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10040
10041 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10042 struct aml_vdec_ps_infos ps;
10043
10044 hevc->frame_width = hevc->param.p.pic_width_in_luma_samples;
10045 hevc->frame_height = hevc->param.p.pic_height_in_luma_samples;
10046 ps.visible_width = hevc->frame_width;
10047 ps.visible_height = hevc->frame_height;
10048 ps.coded_width = ALIGN(hevc->frame_width, 32);
10049 ps.coded_height = ALIGN(hevc->frame_height, 32);
10050 ps.dpb_size = get_work_pic_num(hevc);
10051 hevc->v4l_params_parsed = true;
10052 /*notice the v4l2 codec.*/
10053 vdec_v4l_set_ps_infos(ctx, &ps);
10054 }
10055 }
10056
10057 if (
10058#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10059 vdec->master == NULL &&
10060 vdec->slave == NULL &&
10061#endif
10062 aux_data_is_avaible(hevc)
10063 ) {
10064
10065 if (get_dbg_flag(hevc) &
10066 H265_DEBUG_BUFMGR_MORE)
10067 dump_aux_buf(hevc);
10068 }
10069
10070 vui_time_scale =
10071 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10072 hevc->param.p.vui_time_scale_lo;
10073 vui_num_units_in_tick =
10074 (u32)(hevc->param.
10075 p.vui_num_units_in_tick_hi << 16) |
10076 hevc->param.
10077 p.vui_num_units_in_tick_lo;
10078 if (hevc->bit_depth_luma !=
10079 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10080 reconfig_flag = 1;
10081 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10082 (hevc->param.p.bit_depth & 0xf) + 8);
10083 }
10084 if (hevc->bit_depth_chroma !=
10085 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10086 reconfig_flag = 1;
10087 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10088 ((hevc->param.p.bit_depth >> 4) &
10089 0xf) + 8);
10090 }
10091 hevc->bit_depth_luma =
10092 (hevc->param.p.bit_depth & 0xf) + 8;
10093 hevc->bit_depth_chroma =
10094 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10095 bit_depth_luma = hevc->bit_depth_luma;
10096 bit_depth_chroma = hevc->bit_depth_chroma;
10097#ifdef SUPPORT_10BIT
10098 if (hevc->bit_depth_luma == 8 &&
10099 hevc->bit_depth_chroma == 8 &&
10100 enable_mem_saving)
10101 hevc->mem_saving_mode = 1;
10102 else
10103 hevc->mem_saving_mode = 0;
10104#endif
10105 if (reconfig_flag &&
10106 (get_double_write_mode(hevc) & 0x10) == 0)
10107 init_decode_head_hw(hevc);
10108
10109 if ((vui_time_scale != 0)
10110 && (vui_num_units_in_tick != 0)) {
10111 hevc->frame_dur =
10112 div_u64(96000ULL *
10113 vui_num_units_in_tick,
10114 vui_time_scale);
10115 if (hevc->get_frame_dur != true)
10116 vdec_schedule_work(
10117 &hevc->notify_work);
10118
10119 hevc->get_frame_dur = true;
10120#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10121 gvs->frame_dur = hevc->frame_dur;
10122#endif
10123 }
10124
10125 if (hevc->video_signal_type !=
10126 ((hevc->param.p.video_signal_type << 16)
10127 | hevc->param.p.color_description)) {
10128 u32 v = hevc->param.p.video_signal_type;
10129 u32 c = hevc->param.p.color_description;
10130#if 0
10131 if (v & 0x2000) {
10132 hevc_print(hevc, 0,
10133 "video_signal_type present:\n");
10134 hevc_print(hevc, 0, " %s %s\n",
10135 video_format_names[(v >> 10) & 7],
10136 ((v >> 9) & 1) ?
10137 "full_range" : "limited");
10138 if (v & 0x100) {
10139 hevc_print(hevc, 0,
10140 " color_description present:\n");
10141 hevc_print(hevc, 0,
10142 " color_primarie = %s\n",
10143 color_primaries_names
10144 [v & 0xff]);
10145 hevc_print(hevc, 0,
10146 " transfer_characteristic = %s\n",
10147 transfer_characteristics_names
10148 [(c >> 8) & 0xff]);
10149 hevc_print(hevc, 0,
10150 " matrix_coefficient = %s\n",
10151 matrix_coeffs_names[c & 0xff]);
10152 }
10153 }
10154#endif
10155 hevc->video_signal_type = (v << 16) | c;
10156 video_signal_type = hevc->video_signal_type;
10157 }
10158
10159 if (use_cma &&
10160 (hevc->param.p.slice_segment_address == 0)
10161 && (hevc->pic_list_init_flag == 0)) {
10162 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10163 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10164
10165 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10166 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10167 hevc->lcu_size = 1 << (log + 3 + log_s);
10168 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10169 if (hevc->pic_w == 0 || hevc->pic_h == 0
10170 || hevc->lcu_size == 0
10171 || is_oversize(hevc->pic_w, hevc->pic_h)
10172 || (!hevc->skip_first_nal &&
10173 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10174 /* skip search next start code */
10175 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10176 & (~0x2));
10177 if ( !hevc->skip_first_nal &&
10178 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10179 hevc->skip_first_nal = 1;
10180 hevc->skip_flag = 1;
10181 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10182 /* Interrupt Amrisc to excute */
10183 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10184#ifdef MULTI_INSTANCE_SUPPORT
10185 if (hevc->m_ins_flag)
10186 start_process_time(hevc);
10187#endif
10188 } else {
10189 hevc->sps_num_reorder_pics_0 =
10190 hevc->param.p.sps_num_reorder_pics_0;
10191 hevc->pic_list_init_flag = 1;
10192#ifdef MULTI_INSTANCE_SUPPORT
10193 if (hevc->m_ins_flag) {
10194 vdec_schedule_work(&hevc->work);
10195 } else
10196#endif
10197 up(&h265_sema);
10198 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10199 }
10200 return IRQ_HANDLED;
10201 }
10202
10203}
10204 ret =
10205 hevc_slice_segment_header_process(hevc,
10206 &hevc->param, decode_pic_begin);
10207 if (ret < 0) {
10208#ifdef MULTI_INSTANCE_SUPPORT
10209 if (hevc->m_ins_flag) {
10210 hevc->wait_buf = 0;
10211 hevc->dec_result = DEC_RESULT_AGAIN;
10212 amhevc_stop();
10213 restore_decode_state(hevc);
10214 reset_process_time(hevc);
10215 vdec_schedule_work(&hevc->work);
10216 return IRQ_HANDLED;
10217 }
10218#else
10219 ;
10220#endif
10221 } else if (ret == 0) {
10222 if ((hevc->new_pic) && (hevc->cur_pic)) {
10223 hevc->cur_pic->stream_offset =
10224 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10225 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10226 "read stream_offset = 0x%x\n",
10227 hevc->cur_pic->stream_offset);
10228 hevc->cur_pic->aspect_ratio_idc =
10229 hevc->param.p.aspect_ratio_idc;
10230 hevc->cur_pic->sar_width =
10231 hevc->param.p.sar_width;
10232 hevc->cur_pic->sar_height =
10233 hevc->param.p.sar_height;
10234 }
10235
10236 WRITE_VREG(HEVC_DEC_STATUS_REG,
10237 HEVC_CODED_SLICE_SEGMENT_DAT);
10238 /* Interrupt Amrisc to excute */
10239 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10240
10241 hevc->start_decoding_time = jiffies;
10242#ifdef MULTI_INSTANCE_SUPPORT
10243 if (hevc->m_ins_flag)
10244 start_process_time(hevc);
10245#endif
10246#if 1
10247 /*to do..., copy aux data to hevc->cur_pic*/
10248#endif
10249#ifdef MULTI_INSTANCE_SUPPORT
10250 } else if (hevc->m_ins_flag) {
10251 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10252 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10253 __func__, ret);
10254 hevc->decoded_poc = INVALID_POC;
10255 hevc->decoding_pic = NULL;
10256 hevc->dec_result = DEC_RESULT_DONE;
10257 amhevc_stop();
10258 reset_process_time(hevc);
10259 vdec_schedule_work(&hevc->work);
10260#endif
10261 } else {
10262 /* skip, search next start code */
10263#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10264 gvs->drop_frame_count++;
10265#endif
10266 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10267 hevc->skip_flag = 1;
10268 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10269 /* Interrupt Amrisc to excute */
10270 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10271 }
10272
10273 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10274 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10275#ifdef MULTI_INSTANCE_SUPPORT
10276 if (!hevc->m_ins_flag)
10277 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10278 H265_DEBUG_DIS_SYS_ERROR_PROC);
10279#endif
10280 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10281 }
10282 return IRQ_HANDLED;
10283}
10284
10285static void wait_hevc_search_done(struct hevc_state_s *hevc)
10286{
10287 int count = 0;
10288 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10289 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10290 msleep(20);
10291 count++;
10292 if (count > 100) {
10293 hevc_print(hevc, 0, "%s timeout\n", __func__);
10294 break;
10295 }
10296 }
10297}
10298static irqreturn_t vh265_isr(int irq, void *data)
10299{
10300 int i, temp;
10301 unsigned int dec_status;
10302 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10303 u32 debug_tag;
10304 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10305
10306 if (hevc->init_flag == 0)
10307 return IRQ_HANDLED;
10308 hevc->dec_status = dec_status;
10309 if (is_log_enable(hevc))
10310 add_log(hevc,
10311 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10312 dec_status, READ_HREG(HEVC_DECODE_INFO),
10313 READ_VREG(HEVC_MPRED_CURR_LCU),
10314 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10315 READ_VREG(HEVC_SHIFT_STATUS));
10316
10317 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10318 hevc_print(hevc, 0,
10319 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10320 dec_status, READ_HREG(HEVC_DECODE_INFO),
10321 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10322 READ_VREG(HEVC_SHIFT_STATUS));
10323
10324 debug_tag = READ_HREG(DEBUG_REG1);
10325 if (debug_tag & 0x10000) {
10326 hevc_print(hevc, 0,
10327 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10328
10329 if (hevc->mmu_enable)
10330 temp = 0x500;
10331 else
10332 temp = 0x400;
10333 for (i = 0; i < temp; i += 4) {
10334 int ii;
10335 if ((i & 0xf) == 0)
10336 hevc_print_cont(hevc, 0, "%03x: ", i);
10337 for (ii = 0; ii < 4; ii++) {
10338 hevc_print_cont(hevc, 0, "%04x ",
10339 hevc->lmem_ptr[i + 3 - ii]);
10340 }
10341 if (((i + ii) & 0xf) == 0)
10342 hevc_print_cont(hevc, 0, "\n");
10343 }
10344
10345 if (((udebug_pause_pos & 0xffff)
10346 == (debug_tag & 0xffff)) &&
10347 (udebug_pause_decode_idx == 0 ||
10348 udebug_pause_decode_idx == hevc->decode_idx) &&
10349 (udebug_pause_val == 0 ||
10350 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10351 udebug_pause_pos &= 0xffff;
10352 hevc->ucode_pause_pos = udebug_pause_pos;
10353 }
10354 else if (debug_tag & 0x20000)
10355 hevc->ucode_pause_pos = 0xffffffff;
10356 if (hevc->ucode_pause_pos)
10357 reset_process_time(hevc);
10358 else
10359 WRITE_HREG(DEBUG_REG1, 0);
10360 } else if (debug_tag != 0) {
10361 hevc_print(hevc, 0,
10362 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10363 READ_HREG(DEBUG_REG2),
10364 READ_VREG(HEVC_STREAM_LEVEL),
10365 READ_VREG(HEVC_STREAM_WR_PTR),
10366 READ_VREG(HEVC_STREAM_RD_PTR));
10367 if (((udebug_pause_pos & 0xffff)
10368 == (debug_tag & 0xffff)) &&
10369 (udebug_pause_decode_idx == 0 ||
10370 udebug_pause_decode_idx == hevc->decode_idx) &&
10371 (udebug_pause_val == 0 ||
10372 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10373 udebug_pause_pos &= 0xffff;
10374 hevc->ucode_pause_pos = udebug_pause_pos;
10375 }
10376 if (hevc->ucode_pause_pos)
10377 reset_process_time(hevc);
10378 else
10379 WRITE_HREG(DEBUG_REG1, 0);
10380 return IRQ_HANDLED;
10381 }
10382
10383
10384 if (hevc->pic_list_init_flag == 1)
10385 return IRQ_HANDLED;
10386
10387 if (!hevc->m_ins_flag) {
10388 if (dec_status == HEVC_OVER_DECODE) {
10389 hevc->over_decode = 1;
10390 hevc_print(hevc, 0,
10391 "isr: over decode\n"),
10392 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10393 return IRQ_HANDLED;
10394 }
10395 }
10396
10397 return IRQ_WAKE_THREAD;
10398
10399}
10400
10401static void vh265_set_clk(struct work_struct *work)
10402{
10403 struct hevc_state_s *hevc = container_of(work,
10404 struct hevc_state_s, set_clk_work);
10405
10406 int fps = 96000 / hevc->frame_dur;
10407
10408 if (hevc_source_changed(VFORMAT_HEVC,
10409 hevc->frame_width, hevc->frame_height, fps) > 0)
10410 hevc->saved_resolution = hevc->frame_width *
10411 hevc->frame_height * fps;
10412}
10413
10414static void vh265_check_timer_func(unsigned long arg)
10415{
10416 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10417 struct timer_list *timer = &hevc->timer;
10418 unsigned char empty_flag;
10419 unsigned int buf_level;
10420
10421 enum receviver_start_e state = RECEIVER_INACTIVE;
10422
10423 if (hevc->init_flag == 0) {
10424 if (hevc->stat & STAT_TIMER_ARM) {
10425 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10426 }
10427 return;
10428 }
10429#ifdef MULTI_INSTANCE_SUPPORT
10430 if (hevc->m_ins_flag &&
10431 (get_dbg_flag(hevc) &
10432 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10433 hw_to_vdec(hevc)->next_status ==
10434 VDEC_STATUS_DISCONNECTED) {
10435 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10436 vdec_schedule_work(&hevc->work);
10437 hevc_print(hevc,
10438 0, "vdec requested to be disconnected\n");
10439 return;
10440 }
10441
10442 if (hevc->m_ins_flag) {
10443 if ((input_frame_based(hw_to_vdec(hevc)) ||
10444 (READ_VREG(HEVC_STREAM_LEVEL) > 0xb0)) &&
10445 ((get_dbg_flag(hevc) &
10446 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10447 (decode_timeout_val > 0) &&
10448 (hevc->start_process_time > 0) &&
10449 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10450 > decode_timeout_val)
10451 ) {
10452 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10453 int current_lcu_idx =
10454 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10455 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10456 if (hevc->last_lcu_idx == current_lcu_idx) {
10457 if (hevc->decode_timeout_count > 0)
10458 hevc->decode_timeout_count--;
10459 if (hevc->decode_timeout_count == 0)
10460 timeout_process(hevc);
10461 } else
10462 restart_process_time(hevc);
10463 hevc->last_lcu_idx = current_lcu_idx;
10464 } else {
10465 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10466 timeout_process(hevc);
10467 }
10468 }
10469 } else {
10470#endif
10471 if (hevc->m_ins_flag == 0 &&
10472 vf_get_receiver(hevc->provider_name)) {
10473 state =
10474 vf_notify_receiver(hevc->provider_name,
10475 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10476 NULL);
10477 if ((state == RECEIVER_STATE_NULL)
10478 || (state == RECEIVER_STATE_NONE))
10479 state = RECEIVER_INACTIVE;
10480 } else
10481 state = RECEIVER_INACTIVE;
10482
10483 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10484 /* error watchdog */
10485 if (hevc->m_ins_flag == 0 &&
10486 (empty_flag == 0)
10487 && (hevc->pic_list_init_flag == 0
10488 || hevc->pic_list_init_flag
10489 == 3)) {
10490 /* decoder has input */
10491 if ((get_dbg_flag(hevc) &
10492 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10493
10494 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10495 /* receiver has no buffer to recycle */
10496 if ((state == RECEIVER_INACTIVE) &&
10497 (kfifo_is_empty(&hevc->display_q) &&
10498 buf_level > 0x200)
10499 ) {
10500 if (hevc->error_flag == 0) {
10501 hevc->error_watchdog_count++;
10502 if (hevc->error_watchdog_count ==
10503 error_handle_threshold) {
10504 hevc_print(hevc, 0,
10505 "H265 dec err local reset.\n");
10506 hevc->error_flag = 1;
10507 hevc->error_watchdog_count = 0;
10508 hevc->error_skip_nal_wt_cnt = 0;
10509 hevc->
10510 error_system_watchdog_count++;
10511 WRITE_VREG
10512 (HEVC_ASSIST_MBOX0_IRQ_REG,
10513 0x1);
10514 }
10515 } else if (hevc->error_flag == 2) {
10516 int th =
10517 error_handle_nal_skip_threshold;
10518 hevc->error_skip_nal_wt_cnt++;
10519 if (hevc->error_skip_nal_wt_cnt
10520 == th) {
10521 hevc->error_flag = 3;
10522 hevc->error_watchdog_count = 0;
10523 hevc->
10524 error_skip_nal_wt_cnt = 0;
10525 WRITE_VREG
10526 (HEVC_ASSIST_MBOX0_IRQ_REG,
10527 0x1);
10528 }
10529 }
10530 }
10531 }
10532
10533 if ((get_dbg_flag(hevc)
10534 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10535 /* receiver has no buffer to recycle */
10536 if ((state == RECEIVER_INACTIVE) &&
10537 (kfifo_is_empty(&hevc->display_q))
10538 ) { /* no buffer to recycle */
10539 if ((get_dbg_flag(hevc) &
10540 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10541 0)
10542 hevc->error_system_watchdog_count++;
10543 if (hevc->error_system_watchdog_count ==
10544 error_handle_system_threshold) {
10545 /* and it lasts for a while */
10546 hevc_print(hevc, 0,
10547 "H265 dec fatal error watchdog.\n");
10548 hevc->
10549 error_system_watchdog_count = 0;
10550 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10551 }
10552 }
10553 } else {
10554 hevc->error_watchdog_count = 0;
10555 hevc->error_system_watchdog_count = 0;
10556 }
10557#ifdef MULTI_INSTANCE_SUPPORT
10558 }
10559#endif
10560 if ((hevc->ucode_pause_pos != 0) &&
10561 (hevc->ucode_pause_pos != 0xffffffff) &&
10562 udebug_pause_pos != hevc->ucode_pause_pos) {
10563 hevc->ucode_pause_pos = 0;
10564 WRITE_HREG(DEBUG_REG1, 0);
10565 }
10566
10567 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10568 dump_pic_list(hevc);
10569 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10570 }
10571 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10572 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10573 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10574 }
10575#ifdef TEST_NO_BUF
10576 if (hevc->wait_buf)
10577 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10578#endif
10579 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
10580 hevc->error_skip_nal_count = error_skip_nal_count;
10581 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10582
10583 debug &= ~H265_DEBUG_HW_RESET;
10584 }
10585
10586#ifdef ERROR_HANDLE_DEBUG
10587 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
10588 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
10589 dbg_nal_skip_count &= ~0x10000;
10590 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10591 }
10592#endif
10593
10594 if (radr != 0) {
10595 if (rval != 0) {
10596 WRITE_VREG(radr, rval);
10597 hevc_print(hevc, 0,
10598 "WRITE_VREG(%x,%x)\n", radr, rval);
10599 } else
10600 hevc_print(hevc, 0,
10601 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
10602 rval = 0;
10603 radr = 0;
10604 }
10605 if (dbg_cmd != 0) {
10606 if (dbg_cmd == 1) {
10607 u32 disp_laddr;
10608
10609 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
10610 get_double_write_mode(hevc) == 0) {
10611 disp_laddr =
10612 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
10613 } else {
10614 struct canvas_s cur_canvas;
10615
10616 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
10617 & 0xff), &cur_canvas);
10618 disp_laddr = cur_canvas.addr;
10619 }
10620 hevc_print(hevc, 0,
10621 "current displayed buffer address %x\r\n",
10622 disp_laddr);
10623 }
10624 dbg_cmd = 0;
10625 }
10626 /*don't changed at start.*/
10627 if (hevc->m_ins_flag == 0 &&
10628 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
10629 hevc->frame_dur > 0 && hevc->saved_resolution !=
10630 hevc->frame_width * hevc->frame_height *
10631 (96000 / hevc->frame_dur))
10632 vdec_schedule_work(&hevc->set_clk_work);
10633
10634 mod_timer(timer, jiffies + PUT_INTERVAL);
10635}
10636
10637static int h265_task_handle(void *data)
10638{
10639 int ret = 0;
10640 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10641
10642 set_user_nice(current, -10);
10643 while (1) {
10644 if (use_cma == 0) {
10645 hevc_print(hevc, 0,
10646 "ERROR: use_cma can not be changed dynamically\n");
10647 }
10648 ret = down_interruptible(&h265_sema);
10649 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
10650 init_pic_list(hevc);
10651 init_pic_list_hw(hevc);
10652 init_buf_spec(hevc);
10653 hevc->pic_list_init_flag = 2;
10654 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
10655
10656 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10657
10658 }
10659
10660 if (hevc->uninit_list) {
10661 /*USE_BUF_BLOCK*/
10662 uninit_pic_list(hevc);
10663 hevc_print(hevc, 0, "uninit list\n");
10664 hevc->uninit_list = 0;
10665#ifdef USE_UNINIT_SEMA
10666 if (use_cma) {
10667 up(&hevc->h265_uninit_done_sema);
10668 while (!kthread_should_stop())
10669 msleep(1);
10670 break;
10671 }
10672#endif
10673 }
10674 }
10675
10676 return 0;
10677}
10678
10679void vh265_free_cmabuf(void)
10680{
10681 struct hevc_state_s *hevc = gHevc;
10682
10683 mutex_lock(&vh265_mutex);
10684
10685 if (hevc->init_flag) {
10686 mutex_unlock(&vh265_mutex);
10687 return;
10688 }
10689
10690 mutex_unlock(&vh265_mutex);
10691}
10692
10693#ifdef MULTI_INSTANCE_SUPPORT
10694int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
10695#else
10696int vh265_dec_status(struct vdec_info *vstatus)
10697#endif
10698{
10699#ifdef MULTI_INSTANCE_SUPPORT
10700 struct hevc_state_s *hevc =
10701 (struct hevc_state_s *)vdec->private;
10702#else
10703 struct hevc_state_s *hevc = gHevc;
10704#endif
10705 if (!hevc)
10706 return -1;
10707
10708 vstatus->frame_width = hevc->frame_width;
10709 vstatus->frame_height = hevc->frame_height;
10710 if (hevc->frame_dur != 0)
10711 vstatus->frame_rate = 96000 / hevc->frame_dur;
10712 else
10713 vstatus->frame_rate = -1;
10714 vstatus->error_count = 0;
10715 vstatus->status = hevc->stat | hevc->fatal_error;
10716#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10717 vstatus->bit_rate = gvs->bit_rate;
10718 vstatus->frame_dur = hevc->frame_dur;
10719 if (gvs) {
10720 vstatus->bit_rate = gvs->bit_rate;
10721 vstatus->frame_data = gvs->frame_data;
10722 vstatus->total_data = gvs->total_data;
10723 vstatus->frame_count = gvs->frame_count;
10724 vstatus->error_frame_count = gvs->error_frame_count;
10725 vstatus->drop_frame_count = gvs->drop_frame_count;
10726 vstatus->total_data = gvs->total_data;
10727 vstatus->samp_cnt = gvs->samp_cnt;
10728 vstatus->offset = gvs->offset;
10729 }
10730 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
10731 "%s", DRIVER_NAME);
10732#endif
10733 vstatus->ratio_control = hevc->ratio_control;
10734 return 0;
10735}
10736
10737int vh265_set_isreset(struct vdec_s *vdec, int isreset)
10738{
10739 is_reset = isreset;
10740 return 0;
10741}
10742
10743static int vh265_vdec_info_init(void)
10744{
10745 gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
10746 if (NULL == gvs) {
10747 pr_info("the struct of vdec status malloc failed.\n");
10748 return -ENOMEM;
10749 }
10750 return 0;
10751}
10752
10753#if 0
10754static void H265_DECODE_INIT(void)
10755{
10756 /* enable hevc clocks */
10757 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
10758 /* *************************************************************** */
10759 /* Power ON HEVC */
10760 /* *************************************************************** */
10761 /* Powerup HEVC */
10762 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
10763 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
10764 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
10765 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
10766 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
10767 /* remove isolations */
10768 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
10769 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
10770
10771}
10772#endif
10773
10774static void config_decode_mode(struct hevc_state_s *hevc)
10775{
10776#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10777 struct vdec_s *vdec = hw_to_vdec(hevc);
10778#endif
10779 unsigned decode_mode;
10780 if (!hevc->m_ins_flag)
10781 decode_mode = DECODE_MODE_SINGLE;
10782 else if (vdec_frame_based(hw_to_vdec(hevc)))
10783 decode_mode =
10784 DECODE_MODE_MULTI_FRAMEBASE;
10785#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10786 else if (vdec->slave) {
10787 if (force_bypass_dvenl & 0x80000000)
10788 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
10789 else
10790 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
10791 if (dolby_meta_with_el && hevc->bypass_dvenl) {
10792 hevc->bypass_dvenl = 0;
10793 hevc_print(hevc, 0,
10794 "NOT support bypass_dvenl when meta_with_el\n");
10795 }
10796 if (hevc->bypass_dvenl)
10797 decode_mode =
10798 (hevc->start_parser_type << 8)
10799 | DECODE_MODE_MULTI_STREAMBASE;
10800 else
10801 decode_mode =
10802 (hevc->start_parser_type << 8)
10803 | DECODE_MODE_MULTI_DVBAL;
10804 } else if (vdec->master)
10805 decode_mode =
10806 (hevc->start_parser_type << 8)
10807 | DECODE_MODE_MULTI_DVENL;
10808#endif
10809 else
10810 decode_mode =
10811 DECODE_MODE_MULTI_STREAMBASE;
10812
10813 if (hevc->m_ins_flag)
10814 decode_mode |=
10815 (hevc->start_decoding_flag << 16);
10816 /* set MBX0 interrupt flag */
10817 decode_mode |= (0x80 << 24);
10818 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
10819 WRITE_VREG(HEVC_DECODE_MODE2,
10820 hevc->rps_set_id);
10821}
10822
10823static void vh265_prot_init(struct hevc_state_s *hevc)
10824{
10825#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10826 struct vdec_s *vdec = hw_to_vdec(hevc);
10827#endif
10828 /* H265_DECODE_INIT(); */
10829
10830 hevc_config_work_space_hw(hevc);
10831
10832 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
10833
10834 WRITE_VREG(HEVC_WAIT_FLAG, 1);
10835
10836 /* WRITE_VREG(P_HEVC_MPSR, 1); */
10837
10838 /* clear mailbox interrupt */
10839 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
10840
10841 /* enable mailbox interrupt */
10842 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
10843
10844 /* disable PSCALE for hardware sharing */
10845 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
10846
10847 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
10848
10849 if ((get_dbg_flag(hevc) &
10850 (H265_DEBUG_MAN_SKIP_NAL |
10851 H265_DEBUG_MAN_SEARCH_NAL))
10852 /*||hevc->m_ins_flag*/
10853 ) {
10854 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
10855 } else {
10856 /* check vps/sps/pps/i-slice in ucode */
10857 unsigned ctl_val = 0x8;
10858 if (hevc->PB_skip_mode == 0)
10859 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
10860 else if (hevc->PB_skip_mode == 3)
10861 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
10862 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
10863 }
10864 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
10865#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10866 || vdec->master
10867 || vdec->slave
10868#endif
10869 )
10870 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
10871
10872 WRITE_VREG(NAL_SEARCH_CTL,
10873 READ_VREG(NAL_SEARCH_CTL)
10874 | ((parser_sei_enable & 0x7) << 17));
10875#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10876 WRITE_VREG(NAL_SEARCH_CTL,
10877 READ_VREG(NAL_SEARCH_CTL) |
10878 ((parser_dolby_vision_enable & 0x1) << 20));
10879#endif
10880 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
10881
10882 config_decode_mode(hevc);
10883 config_aux_buf(hevc);
10884#ifdef SWAP_HEVC_UCODE
10885 if (!tee_enabled() && hevc->is_swap &&
10886 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10887 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
10888 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
10889 }
10890#endif
10891#ifdef DETREFILL_ENABLE
10892 if (hevc->is_swap &&
10893 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10894 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
10895 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
10896 }
10897#endif
10898}
10899
10900static int vh265_local_init(struct hevc_state_s *hevc)
10901{
10902 int i;
10903 int ret = -1;
10904
10905#ifdef DEBUG_PTS
10906 hevc->pts_missed = 0;
10907 hevc->pts_hit = 0;
10908#endif
10909
10910 hevc->saved_resolution = 0;
10911 hevc->get_frame_dur = false;
10912 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
10913 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
10914 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
10915 pr_info("over size : %u x %u.\n",
10916 hevc->frame_width, hevc->frame_height);
10917 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10918 return ret;
10919 }
10920
10921 if (hevc->max_pic_w && hevc->max_pic_h) {
10922 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
10923 ((hevc->max_pic_w * hevc->max_pic_h) >
10924 1920 * 1088) ? true : false;
10925 } else {
10926 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
10927 ((hevc->frame_width * hevc->frame_height) >
10928 1920 * 1088) ? true : false;
10929 }
10930
10931 hevc->frame_dur =
10932 (hevc->vh265_amstream_dec_info.rate ==
10933 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
10934#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
10935 gvs->frame_dur = hevc->frame_dur;
10936#endif
10937 if (hevc->frame_width && hevc->frame_height)
10938 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
10939
10940 if (i_only_flag)
10941 hevc->i_only = i_only_flag & 0xff;
10942 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
10943 & 0x08)
10944 hevc->i_only = 0x7;
10945 else
10946 hevc->i_only = 0x0;
10947 hevc->error_watchdog_count = 0;
10948 hevc->sei_present_flag = 0;
10949 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
10950 & 0x40) >> 6;
10951 hevc_print(hevc, 0,
10952 "h265:pts_unstable=%d\n", pts_unstable);
10953/*
10954 *TODO:FOR VERSION
10955 */
10956 hevc_print(hevc, 0,
10957 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
10958 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
10959
10960 if (hevc->frame_dur == 0)
10961 hevc->frame_dur = 96000 / 24;
10962
10963 INIT_KFIFO(hevc->display_q);
10964 INIT_KFIFO(hevc->newframe_q);
10965 INIT_KFIFO(hevc->pending_q);
10966
10967 for (i = 0; i < VF_POOL_SIZE; i++) {
10968 const struct vframe_s *vf = &hevc->vfpool[i];
10969
10970 hevc->vfpool[i].index = -1;
10971 kfifo_put(&hevc->newframe_q, vf);
10972 }
10973
10974
10975 ret = hevc_local_init(hevc);
10976
10977 return ret;
10978}
10979#ifdef MULTI_INSTANCE_SUPPORT
10980static s32 vh265_init(struct vdec_s *vdec)
10981{
10982 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
10983#else
10984static s32 vh265_init(struct hevc_state_s *hevc)
10985{
10986
10987#endif
10988 int ret, size = -1;
10989 int fw_size = 0x1000 * 16;
10990 struct firmware_s *fw = NULL;
10991
10992 init_timer(&hevc->timer);
10993
10994 hevc->stat |= STAT_TIMER_INIT;
10995
10996 if (hevc->m_ins_flag) {
10997#ifdef USE_UNINIT_SEMA
10998 sema_init(&hevc->h265_uninit_done_sema, 0);
10999#endif
11000 INIT_WORK(&hevc->work, vh265_work);
11001 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11002 }
11003
11004 if (vh265_local_init(hevc) < 0)
11005 return -EBUSY;
11006
11007 mutex_init(&hevc->chunks_mutex);
11008 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11009 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11010
11011 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11012 if (IS_ERR_OR_NULL(fw))
11013 return -ENOMEM;
11014
11015 if (hevc->mmu_enable)
11016 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11017 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11018 else {
11019 if (!hevc->is_4k) {
11020 /* if an older version of the fw was loaded, */
11021 /* needs try to load noswap fw because the */
11022 /* old fw package dose not contain the swap fw.*/
11023 size = get_firmware_data(
11024 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11025 if (size < 0)
11026 size = get_firmware_data(
11027 VIDEO_DEC_HEVC_MMU, fw->data);
11028 else if (size)
11029 hevc->is_swap = true;
11030 } else
11031 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11032 fw->data);
11033 }
11034 else
11035 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11036
11037 if (size < 0) {
11038 pr_err("get firmware fail.\n");
11039 vfree(fw);
11040 return -1;
11041 }
11042
11043 fw->len = size;
11044
11045#ifdef SWAP_HEVC_UCODE
11046 if (!tee_enabled() && hevc->is_swap &&
11047 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11048 if (hevc->mmu_enable) {
11049 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11050 hevc->mc_cpu_addr =
11051 dma_alloc_coherent(amports_get_dma_device(),
11052 hevc->swap_size,
11053 &hevc->mc_dma_handle, GFP_KERNEL);
11054 if (!hevc->mc_cpu_addr) {
11055 amhevc_disable();
11056 pr_info("vh265 mmu swap ucode loaded fail.\n");
11057 return -ENOMEM;
11058 }
11059
11060 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11061 hevc->swap_size);
11062
11063 hevc_print(hevc, 0,
11064 "vh265 mmu ucode swap loaded %x\n",
11065 hevc->mc_dma_handle);
11066 }
11067 }
11068#endif
11069
11070#ifdef MULTI_INSTANCE_SUPPORT
11071 if (hevc->m_ins_flag) {
11072 hevc->timer.data = (ulong) hevc;
11073 hevc->timer.function = vh265_check_timer_func;
11074 hevc->timer.expires = jiffies + PUT_INTERVAL;
11075
11076 hevc->fw = fw;
11077
11078 return 0;
11079 }
11080#endif
11081 amhevc_enable();
11082
11083 if (hevc->mmu_enable)
11084 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11085 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11086 else {
11087 if (!hevc->is_4k) {
11088 /* if an older version of the fw was loaded, */
11089 /* needs try to load noswap fw because the */
11090 /* old fw package dose not contain the swap fw. */
11091 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11092 "hevc_mmu_swap", fw->data);
11093 if (ret < 0)
11094 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11095 "h265_mmu", fw->data);
11096 else
11097 hevc->is_swap = true;
11098 } else
11099 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11100 "h265_mmu", fw->data);
11101 }
11102 else
11103 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11104
11105 if (ret < 0) {
11106 amhevc_disable();
11107 vfree(fw);
11108 pr_err("H265: the %s fw loading failed, err: %x\n",
11109 tee_enabled() ? "TEE" : "local", ret);
11110 return -EBUSY;
11111 }
11112
11113 vfree(fw);
11114
11115 hevc->stat |= STAT_MC_LOAD;
11116
11117#ifdef DETREFILL_ENABLE
11118 if (hevc->is_swap &&
11119 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11120 init_detrefill_buf(hevc);
11121#endif
11122 /* enable AMRISC side protocol */
11123 vh265_prot_init(hevc);
11124
11125 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11126 vh265_isr_thread_fn,
11127 IRQF_ONESHOT,/*run thread on this irq disabled*/
11128 "vh265-irq", (void *)hevc)) {
11129 hevc_print(hevc, 0, "vh265 irq register error.\n");
11130 amhevc_disable();
11131 return -ENOENT;
11132 }
11133
11134 hevc->stat |= STAT_ISR_REG;
11135 hevc->provider_name = PROVIDER_NAME;
11136
11137#ifdef MULTI_INSTANCE_SUPPORT
11138 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11139 &vh265_vf_provider, vdec);
11140 vf_reg_provider(&vh265_vf_prov);
11141 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11142 NULL);
11143 if (hevc->frame_dur != 0) {
11144 if (!is_reset) {
11145 vf_notify_receiver(hevc->provider_name,
11146 VFRAME_EVENT_PROVIDER_FR_HINT,
11147 (void *)
11148 ((unsigned long)hevc->frame_dur));
11149 fr_hint_status = VDEC_HINTED;
11150 }
11151 } else
11152 fr_hint_status = VDEC_NEED_HINT;
11153#else
11154 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11155 hevc);
11156 vf_reg_provider(&vh265_vf_prov);
11157 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11158 if (hevc->frame_dur != 0) {
11159 vf_notify_receiver(PROVIDER_NAME,
11160 VFRAME_EVENT_PROVIDER_FR_HINT,
11161 (void *)
11162 ((unsigned long)hevc->frame_dur));
11163 fr_hint_status = VDEC_HINTED;
11164 } else
11165 fr_hint_status = VDEC_NEED_HINT;
11166#endif
11167 hevc->stat |= STAT_VF_HOOK;
11168
11169 hevc->timer.data = (ulong) hevc;
11170 hevc->timer.function = vh265_check_timer_func;
11171 hevc->timer.expires = jiffies + PUT_INTERVAL;
11172
11173 add_timer(&hevc->timer);
11174
11175 hevc->stat |= STAT_TIMER_ARM;
11176
11177 if (use_cma) {
11178#ifdef USE_UNINIT_SEMA
11179 sema_init(&hevc->h265_uninit_done_sema, 0);
11180#endif
11181 if (h265_task == NULL) {
11182 sema_init(&h265_sema, 1);
11183 h265_task =
11184 kthread_run(h265_task_handle, hevc,
11185 "kthread_h265");
11186 }
11187 }
11188 /* hevc->stat |= STAT_KTHREAD; */
11189#if 0
11190 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11191 hevc_print(hevc, 0, "%s force clk\n", __func__);
11192 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11193 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11194 ((1 << 2) | (1 << 1)));
11195 WRITE_VREG(HEVC_DBLK_CFG0,
11196 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11197 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11198 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11199 (1 << 2)); /* 2 */
11200 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11201 (1 << 24)); /* 24 */
11202 WRITE_VREG(HEVC_STREAM_CONTROL,
11203 READ_VREG(HEVC_STREAM_CONTROL) |
11204 (1 << 15)); /* 15 */
11205 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11206 (1 << 13)); /* 13 */
11207 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11208 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11209 (1 << 15)); /* 15 */
11210 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11211 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11212 (1 << 15)); /* 15 */
11213 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11214 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11215 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11216 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11217 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11218 (1 << 3)); /* 3 */
11219 }
11220#endif
11221#ifdef SWAP_HEVC_UCODE
11222 if (!tee_enabled() && hevc->is_swap &&
11223 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11224 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11225 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11226 }
11227#endif
11228
11229#ifndef MULTI_INSTANCE_SUPPORT
11230 set_vdec_func(&vh265_dec_status);
11231#endif
11232 amhevc_start();
11233 hevc->stat |= STAT_VDEC_RUN;
11234 hevc->init_flag = 1;
11235 error_handle_threshold = 30;
11236 /* pr_info("%d, vh265_init, RP=0x%x\n",
11237 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11238 */
11239
11240 return 0;
11241}
11242
11243static int vh265_stop(struct hevc_state_s *hevc)
11244{
11245 if (get_dbg_flag(hevc) &
11246 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11247 int wait_timeout_count = 0;
11248
11249 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11250 HEVC_CODED_SLICE_SEGMENT_DAT &&
11251 wait_timeout_count < 10){
11252 wait_timeout_count++;
11253 msleep(20);
11254 }
11255 }
11256 if (hevc->stat & STAT_VDEC_RUN) {
11257 amhevc_stop();
11258 hevc->stat &= ~STAT_VDEC_RUN;
11259 }
11260
11261 if (hevc->stat & STAT_ISR_REG) {
11262#ifdef MULTI_INSTANCE_SUPPORT
11263 if (!hevc->m_ins_flag)
11264#endif
11265 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11266 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11267 hevc->stat &= ~STAT_ISR_REG;
11268 }
11269
11270 hevc->stat &= ~STAT_TIMER_INIT;
11271 if (hevc->stat & STAT_TIMER_ARM) {
11272 del_timer_sync(&hevc->timer);
11273 hevc->stat &= ~STAT_TIMER_ARM;
11274 }
11275
11276 if (hevc->stat & STAT_VF_HOOK) {
11277 if (fr_hint_status == VDEC_HINTED) {
11278 vf_notify_receiver(hevc->provider_name,
11279 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11280 NULL);
11281 }
11282 fr_hint_status = VDEC_NO_NEED_HINT;
11283 vf_unreg_provider(&vh265_vf_prov);
11284 hevc->stat &= ~STAT_VF_HOOK;
11285 }
11286
11287 hevc_local_uninit(hevc);
11288
11289 if (use_cma) {
11290 hevc->uninit_list = 1;
11291 up(&h265_sema);
11292#ifdef USE_UNINIT_SEMA
11293 down(&hevc->h265_uninit_done_sema);
11294 if (!IS_ERR(h265_task)) {
11295 kthread_stop(h265_task);
11296 h265_task = NULL;
11297 }
11298#else
11299 while (hevc->uninit_list) /* wait uninit complete */
11300 msleep(20);
11301#endif
11302
11303 }
11304 hevc->init_flag = 0;
11305 hevc->first_sc_checked = 0;
11306 cancel_work_sync(&hevc->notify_work);
11307 cancel_work_sync(&hevc->set_clk_work);
11308 uninit_mmu_buffers(hevc);
11309 amhevc_disable();
11310
11311 kfree(gvs);
11312 gvs = NULL;
11313
11314 return 0;
11315}
11316
11317#ifdef MULTI_INSTANCE_SUPPORT
11318static void reset_process_time(struct hevc_state_s *hevc)
11319{
11320 if (hevc->start_process_time) {
11321 unsigned int process_time =
11322 1000 * (jiffies - hevc->start_process_time) / HZ;
11323 hevc->start_process_time = 0;
11324 if (process_time > max_process_time[hevc->index])
11325 max_process_time[hevc->index] = process_time;
11326 }
11327}
11328
11329static void start_process_time(struct hevc_state_s *hevc)
11330{
11331 hevc->start_process_time = jiffies;
11332 hevc->decode_timeout_count = 2;
11333 hevc->last_lcu_idx = 0;
11334}
11335
11336static void restart_process_time(struct hevc_state_s *hevc)
11337{
11338 hevc->start_process_time = jiffies;
11339 hevc->decode_timeout_count = 2;
11340}
11341
11342static void timeout_process(struct hevc_state_s *hevc)
11343{
11344 /*
11345 * In this very timeout point,the vh265_work arrives,
11346 * let it to handle the scenario.
11347 */
11348 if (work_pending(&hevc->work))
11349 return;
11350
11351 hevc->timeout_num++;
11352 amhevc_stop();
11353 read_decode_info(hevc);
11354
11355 hevc_print(hevc,
11356 0, "%s decoder timeout\n", __func__);
11357 check_pic_decoded_error(hevc,
11358 hevc->pic_decoded_lcu_idx);
11359 hevc->decoded_poc = hevc->curr_POC;
11360 hevc->decoding_pic = NULL;
11361 hevc->dec_result = DEC_RESULT_DONE;
11362 reset_process_time(hevc);
11363
11364 if (work_pending(&hevc->work))
11365 return;
11366 vdec_schedule_work(&hevc->timeout_work);
11367}
11368
11369#ifdef CONSTRAIN_MAX_BUF_NUM
11370static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11371{
11372 struct PIC_s *pic;
11373 int i;
11374 int count = 0;
11375 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11376 pic = hevc->m_PIC[i];
11377 if (pic == NULL || pic->index == -1)
11378 continue;
11379 if (pic->output_mark == 0 && pic->referenced == 0
11380 && pic->output_ready == 1)
11381 count++;
11382 }
11383
11384 return count;
11385}
11386
11387static int get_used_buf_count(struct hevc_state_s *hevc)
11388{
11389 struct PIC_s *pic;
11390 int i;
11391 int count = 0;
11392 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11393 pic = hevc->m_PIC[i];
11394 if (pic == NULL || pic->index == -1)
11395 continue;
11396 if (pic->output_mark != 0 || pic->referenced != 0
11397 || pic->output_ready != 0)
11398 count++;
11399 }
11400
11401 return count;
11402}
11403#endif
11404
11405
11406static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11407{
11408 struct PIC_s *new_pic = NULL;
11409 struct PIC_s *pic;
11410 /* recycle un-used pic */
11411 int i;
11412 int ref_pic = 0;
11413 struct vdec_s *vdec = hw_to_vdec(hevc);
11414 /*return 1 if pic_list is not initialized yet*/
11415 if (hevc->pic_list_init_flag != 3)
11416 return 1;
11417
11418 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11419 pic = hevc->m_PIC[i];
11420 if (pic == NULL || pic->index == -1)
11421 continue;
11422 if (pic->referenced == 1)
11423 ref_pic++;
11424 if (pic->output_mark == 0 && pic->referenced == 0
11425 && pic->output_ready == 0
11426 ) {
11427 if (new_pic) {
11428 if (pic->POC < new_pic->POC)
11429 new_pic = pic;
11430 } else
11431 new_pic = pic;
11432 }
11433 }
11434/*If the number of reference frames of DPB >= (the DPB buffer size - the number of reorders -3)*/
11435/*and the back-end state is RECEIVER INACTIVE, it will cause the decoder have no buffer to*/
11436/*decode. all reference frames are removed and setting error flag.*/
11437/*3 represents 2 filed are needed for back-end display and 1 filed is needed for decoding*/
11438/*when file is interlace.*/
11439 if ((!hevc->is_used_v4l) && (new_pic == NULL) &&
11440 (ref_pic >=
11441 get_work_pic_num(hevc) -
11442 hevc->sps_num_reorder_pics_0 - 3)) {
11443 enum receviver_start_e state = RECEIVER_INACTIVE;
11444 if (vf_get_receiver(vdec->vf_provider_name)) {
11445 state =
11446 vf_notify_receiver(vdec->vf_provider_name,
11447 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11448 NULL);
11449 if ((state == RECEIVER_STATE_NULL)
11450 || (state == RECEIVER_STATE_NONE))
11451 state = RECEIVER_INACTIVE;
11452 }
11453 if (state == RECEIVER_INACTIVE) {
11454 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11455 pic = hevc->m_PIC[i];
11456 if (pic == NULL || pic->index == -1)
11457 continue;
11458
11459 if ((pic->referenced == 1) &&
11460 (pic->error_mark == 1)) {
11461 pic->referenced = 0;
11462 put_mv_buf(hevc, pic);
11463 }
11464 pic->error_mark = 1;
11465 }
11466 }
11467 }
11468
11469 return (new_pic != NULL) ? 1 : 0;
11470}
11471
11472static int vmh265_stop(struct hevc_state_s *hevc)
11473{
11474 if (hevc->stat & STAT_TIMER_ARM) {
11475 del_timer_sync(&hevc->timer);
11476 hevc->stat &= ~STAT_TIMER_ARM;
11477 }
11478 if (hevc->stat & STAT_VDEC_RUN) {
11479 amhevc_stop();
11480 hevc->stat &= ~STAT_VDEC_RUN;
11481 }
11482 if (hevc->stat & STAT_ISR_REG) {
11483 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11484 hevc->stat &= ~STAT_ISR_REG;
11485 }
11486
11487 if (hevc->stat & STAT_VF_HOOK) {
11488 if (fr_hint_status == VDEC_HINTED)
11489 vf_notify_receiver(hevc->provider_name,
11490 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11491 NULL);
11492 fr_hint_status = VDEC_NO_NEED_HINT;
11493 vf_unreg_provider(&vh265_vf_prov);
11494 hevc->stat &= ~STAT_VF_HOOK;
11495 }
11496
11497 hevc_local_uninit(hevc);
11498
11499 hevc->init_flag = 0;
11500 hevc->first_sc_checked = 0;
11501 cancel_work_sync(&hevc->notify_work);
11502 cancel_work_sync(&hevc->set_clk_work);
11503 cancel_work_sync(&hevc->timeout_work);
11504
11505 uninit_mmu_buffers(hevc);
11506
11507 if (use_cma) {
11508 hevc->uninit_list = 1;
11509 reset_process_time(hevc);
11510 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11511 vdec_schedule_work(&hevc->work);
11512 flush_work(&hevc->work);
11513#ifdef USE_UNINIT_SEMA
11514 if (hevc->init_flag) {
11515 down(&hevc->h265_uninit_done_sema);
11516 }
11517#else
11518 while (hevc->uninit_list) /* wait uninit complete */
11519 msleep(20);
11520#endif
11521 }
11522 cancel_work_sync(&hevc->work);
11523
11524 vfree(hevc->fw);
11525 hevc->fw = NULL;
11526
11527 dump_log(hevc);
11528 return 0;
11529}
11530
11531static unsigned char get_data_check_sum
11532 (struct hevc_state_s *hevc, int size)
11533{
11534 int jj;
11535 int sum = 0;
11536 u8 *data = NULL;
11537
11538 if (!hevc->chunk->block->is_mapped)
11539 data = codec_mm_vmap(hevc->chunk->block->start +
11540 hevc->chunk->offset, size);
11541 else
11542 data = ((u8 *)hevc->chunk->block->start_virt) +
11543 hevc->chunk->offset;
11544
11545 for (jj = 0; jj < size; jj++)
11546 sum += data[jj];
11547
11548 if (!hevc->chunk->block->is_mapped)
11549 codec_mm_unmap_phyaddr(data);
11550 return sum;
11551}
11552
11553static void vh265_notify_work(struct work_struct *work)
11554{
11555 struct hevc_state_s *hevc =
11556 container_of(work,
11557 struct hevc_state_s,
11558 notify_work);
11559 struct vdec_s *vdec = hw_to_vdec(hevc);
11560#ifdef MULTI_INSTANCE_SUPPORT
11561 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
11562 vf_notify_receiver(hevc->provider_name,
11563 VFRAME_EVENT_PROVIDER_FR_HINT,
11564 (void *)
11565 ((unsigned long)hevc->frame_dur));
11566 vdec->fr_hint_state = VDEC_HINTED;
11567 } else if (fr_hint_status == VDEC_NEED_HINT) {
11568 vf_notify_receiver(hevc->provider_name,
11569 VFRAME_EVENT_PROVIDER_FR_HINT,
11570 (void *)
11571 ((unsigned long)hevc->frame_dur));
11572 fr_hint_status = VDEC_HINTED;
11573 }
11574#else
11575 if (fr_hint_status == VDEC_NEED_HINT)
11576 vf_notify_receiver(PROVIDER_NAME,
11577 VFRAME_EVENT_PROVIDER_FR_HINT,
11578 (void *)
11579 ((unsigned long)hevc->frame_dur));
11580 fr_hint_status = VDEC_HINTED;
11581 }
11582#endif
11583
11584 return;
11585}
11586
11587static void vh265_work_implement(struct hevc_state_s *hevc,
11588 struct vdec_s *vdec,int from)
11589{
11590 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
11591 /*USE_BUF_BLOCK*/
11592 uninit_pic_list(hevc);
11593 hevc_print(hevc, 0, "uninit list\n");
11594 hevc->uninit_list = 0;
11595#ifdef USE_UNINIT_SEMA
11596 up(&hevc->h265_uninit_done_sema);
11597#endif
11598 return;
11599 }
11600
11601 /* finished decoding one frame or error,
11602 * notify vdec core to switch context
11603 */
11604 if (hevc->pic_list_init_flag == 1
11605 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
11606 hevc->pic_list_init_flag = 2;
11607 init_pic_list(hevc);
11608 init_pic_list_hw(hevc);
11609 init_buf_spec(hevc);
11610 hevc_print(hevc, 0,
11611 "set pic_list_init_flag to 2\n");
11612
11613 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11614 return;
11615 }
11616
11617 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11618 "%s dec_result %d %x %x %x\n",
11619 __func__,
11620 hevc->dec_result,
11621 READ_VREG(HEVC_STREAM_LEVEL),
11622 READ_VREG(HEVC_STREAM_WR_PTR),
11623 READ_VREG(HEVC_STREAM_RD_PTR));
11624
11625 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
11626 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
11627 && (hw_to_vdec(hevc)->next_status !=
11628 VDEC_STATUS_DISCONNECTED)) {
11629 if (!vdec_has_more_input(vdec)) {
11630 hevc->dec_result = DEC_RESULT_EOS;
11631 vdec_schedule_work(&hevc->work);
11632 return;
11633 }
11634 if (!input_frame_based(vdec)) {
11635 int r = vdec_sync_input(vdec);
11636 if (r >= 0x200) {
11637 WRITE_VREG(HEVC_DECODE_SIZE,
11638 READ_VREG(HEVC_DECODE_SIZE) + r);
11639
11640 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11641 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
11642 __func__,
11643 READ_VREG(HEVC_STREAM_LEVEL),
11644 READ_VREG(HEVC_STREAM_WR_PTR),
11645 READ_VREG(HEVC_STREAM_RD_PTR),
11646 READ_VREG(HEVC_MPC_E), r);
11647
11648 start_process_time(hevc);
11649 if (READ_VREG(HEVC_DEC_STATUS_REG)
11650 == HEVC_DECODE_BUFEMPTY2)
11651 WRITE_VREG(HEVC_DEC_STATUS_REG,
11652 HEVC_ACTION_DONE);
11653 else
11654 WRITE_VREG(HEVC_DEC_STATUS_REG,
11655 HEVC_ACTION_DEC_CONT);
11656 } else {
11657 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11658 vdec_schedule_work(&hevc->work);
11659 }
11660 return;
11661 }
11662
11663 /*below for frame_base*/
11664 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
11665 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11666 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
11667 __func__,
11668 READ_VREG(HEVC_STREAM_LEVEL),
11669 READ_VREG(HEVC_STREAM_WR_PTR),
11670 READ_VREG(HEVC_STREAM_RD_PTR),
11671 READ_VREG(HEVC_MPC_E));
11672 mutex_lock(&hevc->chunks_mutex);
11673 vdec_vframe_dirty(vdec, hevc->chunk);
11674 hevc->chunk = NULL;
11675 mutex_unlock(&hevc->chunks_mutex);
11676 vdec_clean_input(vdec);
11677 }
11678
11679 /*if (is_new_pic_available(hevc)) {*/
11680 if (run_ready(vdec, VDEC_HEVC)) {
11681 int r;
11682 int decode_size;
11683 r = vdec_prepare_input(vdec, &hevc->chunk);
11684 if (r < 0) {
11685 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11686
11687 hevc_print(hevc,
11688 PRINT_FLAG_VDEC_DETAIL,
11689 "amvdec_vh265: Insufficient data\n");
11690
11691 vdec_schedule_work(&hevc->work);
11692 return;
11693 }
11694 hevc->dec_result = DEC_RESULT_NONE;
11695 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11696 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
11697 __func__, r,
11698 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
11699 get_data_check_sum(hevc, r) : 0,
11700 READ_VREG(HEVC_MPC_E));
11701
11702 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
11703 int jj;
11704 u8 *data = NULL;
11705
11706 if (!hevc->chunk->block->is_mapped)
11707 data = codec_mm_vmap(
11708 hevc->chunk->block->start +
11709 hevc->chunk->offset, r);
11710 else
11711 data = ((u8 *)
11712 hevc->chunk->block->start_virt)
11713 + hevc->chunk->offset;
11714
11715 for (jj = 0; jj < r; jj++) {
11716 if ((jj & 0xf) == 0)
11717 hevc_print(hevc,
11718 PRINT_FRAMEBASE_DATA,
11719 "%06x:", jj);
11720 hevc_print_cont(hevc,
11721 PRINT_FRAMEBASE_DATA,
11722 "%02x ", data[jj]);
11723 if (((jj + 1) & 0xf) == 0)
11724 hevc_print_cont(hevc,
11725 PRINT_FRAMEBASE_DATA,
11726 "\n");
11727 }
11728
11729 if (!hevc->chunk->block->is_mapped)
11730 codec_mm_unmap_phyaddr(data);
11731 }
11732
11733 decode_size = hevc->chunk->size +
11734 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
11735 WRITE_VREG(HEVC_DECODE_SIZE,
11736 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
11737
11738 vdec_enable_input(vdec);
11739
11740 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11741 "%s: mpc %x\n",
11742 __func__, READ_VREG(HEVC_MPC_E));
11743
11744 start_process_time(hevc);
11745 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11746 } else{
11747 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
11748
11749 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
11750 * "amvdec_vh265: Insufficient data\n");
11751 */
11752
11753 vdec_schedule_work(&hevc->work);
11754 }
11755 return;
11756 } else if (hevc->dec_result == DEC_RESULT_DONE) {
11757 /* if (!hevc->ctx_valid)
11758 hevc->ctx_valid = 1; */
11759 decode_frame_count[hevc->index]++;
11760#ifdef DETREFILL_ENABLE
11761 if (hevc->is_swap &&
11762 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11763 if (hevc->delrefill_check == 2) {
11764 delrefill(hevc);
11765 amhevc_stop();
11766 }
11767 }
11768#endif
11769 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
11770 hevc->used_4k_num =
11771 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
11772 if (hevc->used_4k_num >= 0 &&
11773 hevc->cur_pic &&
11774 hevc->cur_pic->scatter_alloc
11775 == 1) {
11776 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
11777 "%s pic index %d scatter_alloc %d page_start %d\n",
11778 "decoder_mmu_box_free_idx_tail",
11779 hevc->cur_pic->index,
11780 hevc->cur_pic->scatter_alloc,
11781 hevc->used_4k_num);
11782 if (hevc->m_ins_flag)
11783 hevc_mmu_dma_check(hw_to_vdec(hevc));
11784 decoder_mmu_box_free_idx_tail(
11785 hevc->mmu_box,
11786 hevc->cur_pic->index,
11787 hevc->used_4k_num);
11788 hevc->cur_pic->scatter_alloc = 2;
11789 }
11790 }
11791 hevc->pic_decoded_lcu_idx =
11792 READ_VREG(HEVC_PARSER_LCU_START)
11793 & 0xffffff;
11794
11795 if (vdec->master == NULL && vdec->slave == NULL &&
11796 hevc->empty_flag == 0) {
11797 hevc->over_decode =
11798 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
11799 if (hevc->over_decode)
11800 hevc_print(hevc, 0,
11801 "!!!Over decode\n");
11802 }
11803
11804 if (is_log_enable(hevc))
11805 add_log(hevc,
11806 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
11807 __func__,
11808 hevc->dec_result,
11809 hevc->pic_decoded_lcu_idx,
11810 hevc->used_4k_num,
11811 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11812 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11813 hevc->start_shift_bytes
11814 );
11815
11816 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11817 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
11818 __func__,
11819 hevc->dec_result,
11820 READ_VREG(HEVC_STREAM_LEVEL),
11821 READ_VREG(HEVC_STREAM_WR_PTR),
11822 READ_VREG(HEVC_STREAM_RD_PTR),
11823 hevc->pic_decoded_lcu_idx,
11824 hevc->used_4k_num,
11825 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
11826 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
11827 hevc->start_shift_bytes
11828 );
11829
11830 hevc->used_4k_num = -1;
11831
11832 check_pic_decoded_error(hevc,
11833 hevc->pic_decoded_lcu_idx);
11834#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11835#if 1
11836 if (vdec->slave) {
11837 if (dv_debug & 0x1)
11838 vdec_set_flag(vdec->slave,
11839 VDEC_FLAG_SELF_INPUT_CONTEXT);
11840 else
11841 vdec_set_flag(vdec->slave,
11842 VDEC_FLAG_OTHER_INPUT_CONTEXT);
11843 }
11844#else
11845 if (vdec->slave) {
11846 if (no_interleaved_el_slice)
11847 vdec_set_flag(vdec->slave,
11848 VDEC_FLAG_INPUT_KEEP_CONTEXT);
11849 /* this will move real HW pointer for input */
11850 else
11851 vdec_set_flag(vdec->slave, 0);
11852 /* this will not move real HW pointer
11853 *and SL layer decoding
11854 *will start from same stream position
11855 *as current BL decoder
11856 */
11857 }
11858#endif
11859#endif
11860#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11861 hevc->shift_byte_count_lo
11862 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11863 if (vdec->slave) {
11864 /*cur is base, found enhance*/
11865 struct hevc_state_s *hevc_el =
11866 (struct hevc_state_s *)
11867 vdec->slave->private;
11868 if (hevc_el)
11869 hevc_el->shift_byte_count_lo =
11870 hevc->shift_byte_count_lo;
11871 } else if (vdec->master) {
11872 /*cur is enhance, found base*/
11873 struct hevc_state_s *hevc_ba =
11874 (struct hevc_state_s *)
11875 vdec->master->private;
11876 if (hevc_ba)
11877 hevc_ba->shift_byte_count_lo =
11878 hevc->shift_byte_count_lo;
11879 }
11880#endif
11881 mutex_lock(&hevc->chunks_mutex);
11882 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11883 hevc->chunk = NULL;
11884 mutex_unlock(&hevc->chunks_mutex);
11885 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
11886 /*
11887 stream base: stream buf empty or timeout
11888 frame base: vdec_prepare_input fail
11889 */
11890 if (!vdec_has_more_input(vdec)) {
11891 hevc->dec_result = DEC_RESULT_EOS;
11892 vdec_schedule_work(&hevc->work);
11893 return;
11894 }
11895#ifdef AGAIN_HAS_THRESHOLD
11896 hevc->next_again_flag = 1;
11897#endif
11898 } else if (hevc->dec_result == DEC_RESULT_EOS) {
11899 struct PIC_s *pic;
11900 hevc->eos = 1;
11901#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11902 if ((vdec->master || vdec->slave) &&
11903 aux_data_is_avaible(hevc))
11904 dolby_get_meta(hevc);
11905#endif
11906 check_pic_decoded_error(hevc,
11907 hevc->pic_decoded_lcu_idx);
11908 pic = get_pic_by_POC(hevc, hevc->curr_POC);
11909 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11910 "%s: end of stream, last dec poc %d => 0x%pf\n",
11911 __func__, hevc->curr_POC, pic);
11912 flush_output(hevc, pic);
11913
11914 if (hevc->is_used_v4l)
11915 notify_v4l_eos(hw_to_vdec(hevc));
11916#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11917 hevc->shift_byte_count_lo
11918 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
11919 if (vdec->slave) {
11920 /*cur is base, found enhance*/
11921 struct hevc_state_s *hevc_el =
11922 (struct hevc_state_s *)
11923 vdec->slave->private;
11924 if (hevc_el)
11925 hevc_el->shift_byte_count_lo =
11926 hevc->shift_byte_count_lo;
11927 } else if (vdec->master) {
11928 /*cur is enhance, found base*/
11929 struct hevc_state_s *hevc_ba =
11930 (struct hevc_state_s *)
11931 vdec->master->private;
11932 if (hevc_ba)
11933 hevc_ba->shift_byte_count_lo =
11934 hevc->shift_byte_count_lo;
11935 }
11936#endif
11937 mutex_lock(&hevc->chunks_mutex);
11938 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
11939 hevc->chunk = NULL;
11940 mutex_unlock(&hevc->chunks_mutex);
11941 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
11942 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
11943 "%s: force exit\n",
11944 __func__);
11945 if (hevc->stat & STAT_VDEC_RUN) {
11946 amhevc_stop();
11947 hevc->stat &= ~STAT_VDEC_RUN;
11948 }
11949 if (hevc->stat & STAT_ISR_REG) {
11950 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11951 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11952 hevc->stat &= ~STAT_ISR_REG;
11953 }
11954 hevc_print(hevc, 0, "%s: force exit end\n",
11955 __func__);
11956 }
11957
11958 if (hevc->stat & STAT_VDEC_RUN) {
11959 amhevc_stop();
11960 hevc->stat &= ~STAT_VDEC_RUN;
11961 }
11962
11963 if (hevc->stat & STAT_TIMER_ARM) {
11964 del_timer_sync(&hevc->timer);
11965 hevc->stat &= ~STAT_TIMER_ARM;
11966 }
11967
11968 wait_hevc_search_done(hevc);
11969#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11970 if (hevc->switch_dvlayer_flag) {
11971 if (vdec->slave)
11972 vdec_set_next_sched(vdec, vdec->slave);
11973 else if (vdec->master)
11974 vdec_set_next_sched(vdec, vdec->master);
11975 } else if (vdec->slave || vdec->master)
11976 vdec_set_next_sched(vdec, vdec);
11977#endif
11978
11979 if (from == 1) {
11980 /* This is a timeout work */
11981 if (work_pending(&hevc->work)) {
11982 /*
11983 * The vh265_work arrives at the last second,
11984 * give it a chance to handle the scenario.
11985 */
11986 return;
11987 //cancel_work_sync(&hevc->work);//reserved for future considraion
11988 }
11989 }
11990
11991 /* mark itself has all HW resource released and input released */
11992 if (vdec->parallel_dec == 1)
11993 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
11994 else
11995 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
11996
11997 if (hevc->is_used_v4l) {
11998 struct aml_vcodec_ctx *ctx =
11999 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12000
12001 if (ctx->param_sets_from_ucode &&
12002 !hevc->v4l_params_parsed)
12003 vdec_v4l_write_frame_sync(ctx);
12004 }
12005
12006 if (hevc->vdec_cb)
12007 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12008}
12009
12010static void vh265_work(struct work_struct *work)
12011{
12012 struct hevc_state_s *hevc = container_of(work,
12013 struct hevc_state_s, work);
12014 struct vdec_s *vdec = hw_to_vdec(hevc);
12015
12016 vh265_work_implement(hevc, vdec, 0);
12017}
12018
12019static void vh265_timeout_work(struct work_struct *work)
12020{
12021 struct hevc_state_s *hevc = container_of(work,
12022 struct hevc_state_s, timeout_work);
12023 struct vdec_s *vdec = hw_to_vdec(hevc);
12024
12025 if (work_pending(&hevc->work))
12026 return;
12027 vh265_work_implement(hevc, vdec, 1);
12028}
12029
12030
12031static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12032{
12033 /* new to do ... */
12034 vh265_prot_init(hevc);
12035 return 0;
12036}
12037static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12038{
12039 struct hevc_state_s *hevc =
12040 (struct hevc_state_s *)vdec->private;
12041 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12042 CODEC_MM_FLAGS_TVP : 0;
12043 bool ret = 0;
12044 if (step == 0x12)
12045 return 0;
12046 else if (step == 0x11)
12047 step = 0x12;
12048
12049 if (hevc->eos)
12050 return 0;
12051 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12052 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12053 hevc->first_sc_checked =1;
12054 hevc_print(hevc, 0,
12055 "vh265 cached=%d need_size=%d speed= %d ms\n",
12056 size, (hevc->need_cache_size >> PAGE_SHIFT),
12057 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12058 }
12059 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12060 && pre_decode_buf_level != 0) {
12061 u32 rp, wp, level;
12062
12063 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
12064 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
12065 if (wp < rp)
12066 level = vdec->input.size + wp - rp;
12067 else
12068 level = wp - rp;
12069
12070 if (level < pre_decode_buf_level)
12071 return 0;
12072 }
12073
12074#ifdef AGAIN_HAS_THRESHOLD
12075 if (hevc->next_again_flag &&
12076 (!vdec_frame_based(vdec))) {
12077 u32 parser_wr_ptr =
12078 READ_PARSER_REG(PARSER_VIDEO_WP);
12079 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12080 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12081 again_threshold) {
12082 int r = vdec_sync_input(vdec);
12083 hevc_print(hevc,
12084 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12085 return 0;
12086 }
12087 }
12088#endif
12089
12090 if (disp_vframe_valve_level &&
12091 kfifo_len(&hevc->display_q) >=
12092 disp_vframe_valve_level) {
12093 hevc->valve_count--;
12094 if (hevc->valve_count <= 0)
12095 hevc->valve_count = 2;
12096 else
12097 return 0;
12098 }
12099
12100 ret = is_new_pic_available(hevc);
12101 if (!ret) {
12102 hevc_print(hevc,
12103 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12104 __func__, ret);
12105 }
12106
12107#ifdef CONSTRAIN_MAX_BUF_NUM
12108 if (hevc->pic_list_init_flag == 3) {
12109 if (run_ready_max_vf_only_num > 0 &&
12110 get_vf_ref_only_buf_count(hevc) >=
12111 run_ready_max_vf_only_num
12112 )
12113 ret = 0;
12114 if (run_ready_display_q_num > 0 &&
12115 kfifo_len(&hevc->display_q) >=
12116 run_ready_display_q_num)
12117 ret = 0;
12118
12119 /*avoid more buffers consumed when
12120 switching resolution*/
12121 if (run_ready_max_buf_num == 0xff &&
12122 get_used_buf_count(hevc) >=
12123 get_work_pic_num(hevc))
12124 ret = 0;
12125 else if (run_ready_max_buf_num &&
12126 get_used_buf_count(hevc) >=
12127 run_ready_max_buf_num)
12128 ret = 0;
12129 }
12130#endif
12131
12132 if (hevc->is_used_v4l) {
12133 struct aml_vcodec_ctx *ctx =
12134 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12135
12136 if (ctx->param_sets_from_ucode &&
12137 !ctx->v4l_codec_ready &&
12138 hevc->v4l_params_parsed) {
12139 ret = 0; /*the params has parsed.*/
12140 } else if (!ctx->v4l_codec_dpb_ready)
12141 ret = 0;
12142 }
12143
12144 if (ret)
12145 not_run_ready[hevc->index] = 0;
12146 else
12147 not_run_ready[hevc->index]++;
12148 if (vdec->parallel_dec == 1)
12149 return ret ? (CORE_MASK_HEVC) : 0;
12150 else
12151 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12152}
12153
12154static void run(struct vdec_s *vdec, unsigned long mask,
12155 void (*callback)(struct vdec_s *, void *), void *arg)
12156{
12157 struct hevc_state_s *hevc =
12158 (struct hevc_state_s *)vdec->private;
12159 int r, loadr = 0;
12160 unsigned char check_sum = 0;
12161
12162 run_count[hevc->index]++;
12163 hevc->vdec_cb_arg = arg;
12164 hevc->vdec_cb = callback;
12165 hevc->aux_data_dirty = 1;
12166 hevc_reset_core(vdec);
12167
12168#ifdef AGAIN_HAS_THRESHOLD
12169 hevc->pre_parser_wr_ptr =
12170 READ_PARSER_REG(PARSER_VIDEO_WP);
12171 hevc->next_again_flag = 0;
12172#endif
12173 r = vdec_prepare_input(vdec, &hevc->chunk);
12174 if (r < 0) {
12175 input_empty[hevc->index]++;
12176 hevc->dec_result = DEC_RESULT_AGAIN;
12177 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12178 "ammvdec_vh265: Insufficient data\n");
12179
12180 vdec_schedule_work(&hevc->work);
12181 return;
12182 }
12183 input_empty[hevc->index] = 0;
12184 hevc->dec_result = DEC_RESULT_NONE;
12185 if (vdec_frame_based(vdec) &&
12186 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12187 || is_log_enable(hevc)))
12188 check_sum = get_data_check_sum(hevc, r);
12189
12190 if (is_log_enable(hevc))
12191 add_log(hevc,
12192 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12193 __func__, r,
12194 check_sum,
12195 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12196 );
12197 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12198 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12199 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12200 __func__, r,
12201 check_sum,
12202 READ_VREG(HEVC_STREAM_LEVEL),
12203 READ_VREG(HEVC_STREAM_WR_PTR),
12204 READ_VREG(HEVC_STREAM_RD_PTR),
12205 READ_PARSER_REG(PARSER_VIDEO_RP),
12206 READ_PARSER_REG(PARSER_VIDEO_WP),
12207 hevc->start_shift_bytes
12208 );
12209 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12210 input_frame_based(vdec)) {
12211 int jj;
12212 u8 *data = NULL;
12213
12214 if (!hevc->chunk->block->is_mapped)
12215 data = codec_mm_vmap(hevc->chunk->block->start +
12216 hevc->chunk->offset, r);
12217 else
12218 data = ((u8 *)hevc->chunk->block->start_virt)
12219 + hevc->chunk->offset;
12220
12221 for (jj = 0; jj < r; jj++) {
12222 if ((jj & 0xf) == 0)
12223 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12224 "%06x:", jj);
12225 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12226 "%02x ", data[jj]);
12227 if (((jj + 1) & 0xf) == 0)
12228 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12229 "\n");
12230 }
12231
12232 if (!hevc->chunk->block->is_mapped)
12233 codec_mm_unmap_phyaddr(data);
12234 }
12235 if (vdec->mc_loaded) {
12236 /*firmware have load before,
12237 and not changes to another.
12238 ignore reload.
12239 */
12240 if (tee_enabled() && hevc->is_swap &&
12241 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12242 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12243 } else {
12244 if (hevc->mmu_enable)
12245 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12246 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12247 "h265_mmu", hevc->fw->data);
12248 else {
12249 if (!hevc->is_4k) {
12250 /* if an older version of the fw was loaded, */
12251 /* needs try to load noswap fw because the */
12252 /* old fw package dose not contain the swap fw.*/
12253 loadr = amhevc_vdec_loadmc_ex(
12254 VFORMAT_HEVC, vdec,
12255 "hevc_mmu_swap",
12256 hevc->fw->data);
12257 if (loadr < 0)
12258 loadr = amhevc_vdec_loadmc_ex(
12259 VFORMAT_HEVC, vdec,
12260 "h265_mmu",
12261 hevc->fw->data);
12262 else
12263 hevc->is_swap = true;
12264 } else
12265 loadr = amhevc_vdec_loadmc_ex(
12266 VFORMAT_HEVC, vdec,
12267 "h265_mmu", hevc->fw->data);
12268 }
12269 else
12270 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12271 NULL, hevc->fw->data);
12272 if (loadr < 0) {
12273 amhevc_disable();
12274 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12275 tee_enabled() ? "TEE" : "local", loadr);
12276 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12277 vdec_schedule_work(&hevc->work);
12278 return;
12279 }
12280
12281 if (tee_enabled() && hevc->is_swap &&
12282 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12283 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12284#ifdef DETREFILL_ENABLE
12285 if (hevc->is_swap &&
12286 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12287 init_detrefill_buf(hevc);
12288#endif
12289 vdec->mc_loaded = 1;
12290 vdec->mc_type = VFORMAT_HEVC;
12291 }
12292 if (vh265_hw_ctx_restore(hevc) < 0) {
12293 vdec_schedule_work(&hevc->work);
12294 return;
12295 }
12296 vdec_enable_input(vdec);
12297
12298 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12299
12300 if (vdec_frame_based(vdec)) {
12301 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12302 r = hevc->chunk->size +
12303 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12304 hevc->decode_size = r;
12305 }
12306#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12307 else {
12308 if (vdec->master || vdec->slave)
12309 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12310 hevc->shift_byte_count_lo);
12311 }
12312#endif
12313 WRITE_VREG(HEVC_DECODE_SIZE, r);
12314 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12315 hevc->init_flag = 1;
12316
12317 if (hevc->pic_list_init_flag == 3)
12318 init_pic_list_hw(hevc);
12319
12320 backup_decode_state(hevc);
12321
12322 start_process_time(hevc);
12323 mod_timer(&hevc->timer, jiffies);
12324 hevc->stat |= STAT_TIMER_ARM;
12325 hevc->stat |= STAT_ISR_REG;
12326 amhevc_start();
12327 hevc->stat |= STAT_VDEC_RUN;
12328}
12329
12330static void aml_free_canvas(struct vdec_s *vdec)
12331{
12332 int i;
12333 struct hevc_state_s *hevc =
12334 (struct hevc_state_s *)vdec->private;
12335
12336 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12337 struct PIC_s *pic = hevc->m_PIC[i];
12338
12339 if (pic) {
12340 if (vdec->parallel_dec == 1) {
12341 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12342 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12343 }
12344 }
12345 }
12346}
12347
12348static void reset(struct vdec_s *vdec)
12349{
12350
12351 struct hevc_state_s *hevc =
12352 (struct hevc_state_s *)vdec->private;
12353 int i;
12354
12355 cancel_work_sync(&hevc->work);
12356 cancel_work_sync(&hevc->notify_work);
12357 if (hevc->stat & STAT_VDEC_RUN) {
12358 amhevc_stop();
12359 hevc->stat &= ~STAT_VDEC_RUN;
12360 }
12361
12362 if (hevc->stat & STAT_TIMER_ARM) {
12363 del_timer_sync(&hevc->timer);
12364 hevc->stat &= ~STAT_TIMER_ARM;
12365 }
12366 hevc->dec_result = DEC_RESULT_NONE;
12367 reset_process_time(hevc);
12368 hevc->init_flag = 0;
12369 hevc->pic_list_init_flag = 0;
12370 dealloc_mv_bufs(hevc);
12371 aml_free_canvas(vdec);
12372 hevc_local_uninit(hevc);
12373 if (vh265_local_init(hevc) < 0)
12374 pr_debug(" %s local init fail\n", __func__);
12375 for (i = 0; i < BUF_POOL_SIZE; i++) {
12376 hevc->m_BUF[i].start_adr = 0;
12377 }
12378
12379 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12380}
12381
12382static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12383{
12384 struct hevc_state_s *hevc =
12385 (struct hevc_state_s *)vdec->private;
12386
12387 return vh265_isr(0, hevc);
12388}
12389
12390static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12391{
12392 struct hevc_state_s *hevc =
12393 (struct hevc_state_s *)vdec->private;
12394
12395 return vh265_isr_thread_fn(0, hevc);
12396}
12397#endif
12398
12399static int amvdec_h265_probe(struct platform_device *pdev)
12400{
12401#ifdef MULTI_INSTANCE_SUPPORT
12402 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12403#else
12404 struct vdec_dev_reg_s *pdata =
12405 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12406#endif
12407 char *tmpbuf;
12408 int ret;
12409 struct hevc_state_s *hevc;
12410
12411 hevc = vmalloc(sizeof(struct hevc_state_s));
12412 if (hevc == NULL) {
12413 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12414 return -ENOMEM;
12415 }
12416 gHevc = hevc;
12417 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12418 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12419 H265_DEBUG_DIS_SYS_ERROR_PROC));
12420 memset(hevc, 0, sizeof(struct hevc_state_s));
12421 if (get_dbg_flag(hevc))
12422 hevc_print(hevc, 0, "%s\r\n", __func__);
12423 mutex_lock(&vh265_mutex);
12424
12425 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12426 (parser_sei_enable & 0x100) == 0)
12427 parser_sei_enable = 7; /*old 1*/
12428 hevc->m_ins_flag = 0;
12429 hevc->init_flag = 0;
12430 hevc->first_sc_checked = 0;
12431 hevc->uninit_list = 0;
12432 hevc->fatal_error = 0;
12433 hevc->show_frame_num = 0;
12434 hevc->frameinfo_enable = 1;
12435#ifdef MULTI_INSTANCE_SUPPORT
12436 hevc->platform_dev = pdev;
12437 platform_set_drvdata(pdev, pdata);
12438#endif
12439
12440 if (pdata == NULL) {
12441 hevc_print(hevc, 0,
12442 "\namvdec_h265 memory resource undefined.\n");
12443 vfree(hevc);
12444 mutex_unlock(&vh265_mutex);
12445 return -EFAULT;
12446 }
12447 if (mmu_enable_force == 0) {
12448 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12449 || double_write_mode == 0x10)
12450 hevc->mmu_enable = 0;
12451 else
12452 hevc->mmu_enable = 1;
12453 }
12454 if (init_mmu_buffers(hevc)) {
12455 hevc_print(hevc, 0,
12456 "\n 265 mmu init failed!\n");
12457 vfree(hevc);
12458 mutex_unlock(&vh265_mutex);
12459 return -EFAULT;
12460 }
12461
12462 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12463 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12464 if (ret < 0) {
12465 uninit_mmu_buffers(hevc);
12466 vfree(hevc);
12467 mutex_unlock(&vh265_mutex);
12468 return ret;
12469 }
12470 hevc->buf_size = work_buf_size;
12471
12472
12473 if (!vdec_secure(pdata)) {
12474 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12475 if (tmpbuf) {
12476 memset(tmpbuf, 0, work_buf_size);
12477 dma_sync_single_for_device(amports_get_dma_device(),
12478 hevc->buf_start,
12479 work_buf_size, DMA_TO_DEVICE);
12480 } else {
12481 tmpbuf = codec_mm_vmap(hevc->buf_start,
12482 work_buf_size);
12483 if (tmpbuf) {
12484 memset(tmpbuf, 0, work_buf_size);
12485 dma_sync_single_for_device(
12486 amports_get_dma_device(),
12487 hevc->buf_start,
12488 work_buf_size,
12489 DMA_TO_DEVICE);
12490 codec_mm_unmap_phyaddr(tmpbuf);
12491 }
12492 }
12493 }
12494
12495 if (get_dbg_flag(hevc)) {
12496 hevc_print(hevc, 0,
12497 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12498 hevc->buf_start, hevc->buf_size);
12499 }
12500
12501 if (pdata->sys_info)
12502 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12503 else {
12504 hevc->vh265_amstream_dec_info.width = 0;
12505 hevc->vh265_amstream_dec_info.height = 0;
12506 hevc->vh265_amstream_dec_info.rate = 30;
12507 }
12508#ifndef MULTI_INSTANCE_SUPPORT
12509 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12510 workaround_enable |= 3;
12511 hevc_print(hevc, 0,
12512 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
12513 } else
12514 workaround_enable &= ~3;
12515#endif
12516 hevc->cma_dev = pdata->cma_dev;
12517 vh265_vdec_info_init();
12518
12519#ifdef MULTI_INSTANCE_SUPPORT
12520 pdata->private = hevc;
12521 pdata->dec_status = vh265_dec_status;
12522 pdata->set_isreset = vh265_set_isreset;
12523 is_reset = 0;
12524 if (vh265_init(pdata) < 0) {
12525#else
12526 if (vh265_init(hevc) < 0) {
12527#endif
12528 hevc_print(hevc, 0,
12529 "\namvdec_h265 init failed.\n");
12530 hevc_local_uninit(hevc);
12531 uninit_mmu_buffers(hevc);
12532 vfree(hevc);
12533 pdata->dec_status = NULL;
12534 mutex_unlock(&vh265_mutex);
12535 return -ENODEV;
12536 }
12537 /*set the max clk for smooth playing...*/
12538 hevc_source_changed(VFORMAT_HEVC,
12539 3840, 2160, 60);
12540 mutex_unlock(&vh265_mutex);
12541
12542 return 0;
12543}
12544
12545static int amvdec_h265_remove(struct platform_device *pdev)
12546{
12547 struct hevc_state_s *hevc = gHevc;
12548
12549 if (get_dbg_flag(hevc))
12550 hevc_print(hevc, 0, "%s\r\n", __func__);
12551
12552 mutex_lock(&vh265_mutex);
12553
12554 vh265_stop(hevc);
12555
12556 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
12557
12558
12559#ifdef DEBUG_PTS
12560 hevc_print(hevc, 0,
12561 "pts missed %ld, pts hit %ld, duration %d\n",
12562 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
12563#endif
12564
12565 vfree(hevc);
12566 hevc = NULL;
12567 gHevc = NULL;
12568
12569 mutex_unlock(&vh265_mutex);
12570
12571 return 0;
12572}
12573/****************************************/
12574#ifdef CONFIG_PM
12575static int h265_suspend(struct device *dev)
12576{
12577 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
12578 return 0;
12579}
12580
12581static int h265_resume(struct device *dev)
12582{
12583 amhevc_resume(to_platform_device(dev));
12584 return 0;
12585}
12586
12587static const struct dev_pm_ops h265_pm_ops = {
12588 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
12589};
12590#endif
12591
12592static struct platform_driver amvdec_h265_driver = {
12593 .probe = amvdec_h265_probe,
12594 .remove = amvdec_h265_remove,
12595 .driver = {
12596 .name = DRIVER_NAME,
12597#ifdef CONFIG_PM
12598 .pm = &h265_pm_ops,
12599#endif
12600 }
12601};
12602
12603#ifdef MULTI_INSTANCE_SUPPORT
12604static void vh265_dump_state(struct vdec_s *vdec)
12605{
12606 int i;
12607 struct hevc_state_s *hevc =
12608 (struct hevc_state_s *)vdec->private;
12609 hevc_print(hevc, 0,
12610 "====== %s\n", __func__);
12611
12612 hevc_print(hevc, 0,
12613 "width/height (%d/%d), reorder_pic_num %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
12614 hevc->frame_width,
12615 hevc->frame_height,
12616 hevc->sps_num_reorder_pics_0,
12617 get_work_pic_num(hevc),
12618 hevc->video_signal_type_debug,
12619 hevc->is_swap
12620 );
12621
12622 hevc_print(hevc, 0,
12623 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
12624 input_frame_based(vdec),
12625 hevc->eos,
12626 hevc->dec_result,
12627 decode_frame_count[hevc->index],
12628 display_frame_count[hevc->index],
12629 run_count[hevc->index],
12630 not_run_ready[hevc->index],
12631 input_empty[hevc->index]
12632 );
12633
12634 if (vf_get_receiver(vdec->vf_provider_name)) {
12635 enum receviver_start_e state =
12636 vf_notify_receiver(vdec->vf_provider_name,
12637 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12638 NULL);
12639 hevc_print(hevc, 0,
12640 "\nreceiver(%s) state %d\n",
12641 vdec->vf_provider_name,
12642 state);
12643 }
12644
12645 hevc_print(hevc, 0,
12646 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
12647 __func__,
12648 kfifo_len(&hevc->newframe_q),
12649 VF_POOL_SIZE,
12650 kfifo_len(&hevc->display_q),
12651 VF_POOL_SIZE,
12652 hevc->vf_pre_count,
12653 hevc->vf_get_count,
12654 hevc->vf_put_count,
12655 hevc->pic_list_init_flag,
12656 is_new_pic_available(hevc)
12657 );
12658
12659 dump_pic_list(hevc);
12660
12661 for (i = 0; i < BUF_POOL_SIZE; i++) {
12662 hevc_print(hevc, 0,
12663 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12664 i,
12665 hevc->m_BUF[i].start_adr,
12666 hevc->m_BUF[i].size,
12667 hevc->m_BUF[i].used_flag);
12668 }
12669
12670 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12671 hevc_print(hevc, 0,
12672 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
12673 i,
12674 hevc->m_mv_BUF[i].start_adr,
12675 hevc->m_mv_BUF[i].size,
12676 hevc->m_mv_BUF[i].used_flag);
12677 }
12678
12679 hevc_print(hevc, 0,
12680 "HEVC_DEC_STATUS_REG=0x%x\n",
12681 READ_VREG(HEVC_DEC_STATUS_REG));
12682 hevc_print(hevc, 0,
12683 "HEVC_MPC_E=0x%x\n",
12684 READ_VREG(HEVC_MPC_E));
12685 hevc_print(hevc, 0,
12686 "HEVC_DECODE_MODE=0x%x\n",
12687 READ_VREG(HEVC_DECODE_MODE));
12688 hevc_print(hevc, 0,
12689 "HEVC_DECODE_MODE2=0x%x\n",
12690 READ_VREG(HEVC_DECODE_MODE2));
12691 hevc_print(hevc, 0,
12692 "NAL_SEARCH_CTL=0x%x\n",
12693 READ_VREG(NAL_SEARCH_CTL));
12694 hevc_print(hevc, 0,
12695 "HEVC_PARSER_LCU_START=0x%x\n",
12696 READ_VREG(HEVC_PARSER_LCU_START));
12697 hevc_print(hevc, 0,
12698 "HEVC_DECODE_SIZE=0x%x\n",
12699 READ_VREG(HEVC_DECODE_SIZE));
12700 hevc_print(hevc, 0,
12701 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
12702 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
12703 hevc_print(hevc, 0,
12704 "HEVC_STREAM_START_ADDR=0x%x\n",
12705 READ_VREG(HEVC_STREAM_START_ADDR));
12706 hevc_print(hevc, 0,
12707 "HEVC_STREAM_END_ADDR=0x%x\n",
12708 READ_VREG(HEVC_STREAM_END_ADDR));
12709 hevc_print(hevc, 0,
12710 "HEVC_STREAM_LEVEL=0x%x\n",
12711 READ_VREG(HEVC_STREAM_LEVEL));
12712 hevc_print(hevc, 0,
12713 "HEVC_STREAM_WR_PTR=0x%x\n",
12714 READ_VREG(HEVC_STREAM_WR_PTR));
12715 hevc_print(hevc, 0,
12716 "HEVC_STREAM_RD_PTR=0x%x\n",
12717 READ_VREG(HEVC_STREAM_RD_PTR));
12718 hevc_print(hevc, 0,
12719 "PARSER_VIDEO_RP=0x%x\n",
12720 READ_PARSER_REG(PARSER_VIDEO_RP));
12721 hevc_print(hevc, 0,
12722 "PARSER_VIDEO_WP=0x%x\n",
12723 READ_PARSER_REG(PARSER_VIDEO_WP));
12724
12725 if (input_frame_based(vdec) &&
12726 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
12727 ) {
12728 int jj;
12729 if (hevc->chunk && hevc->chunk->block &&
12730 hevc->chunk->size > 0) {
12731 u8 *data = NULL;
12732 if (!hevc->chunk->block->is_mapped)
12733 data = codec_mm_vmap(hevc->chunk->block->start +
12734 hevc->chunk->offset, hevc->chunk->size);
12735 else
12736 data = ((u8 *)hevc->chunk->block->start_virt)
12737 + hevc->chunk->offset;
12738 hevc_print(hevc, 0,
12739 "frame data size 0x%x\n",
12740 hevc->chunk->size);
12741 for (jj = 0; jj < hevc->chunk->size; jj++) {
12742 if ((jj & 0xf) == 0)
12743 hevc_print(hevc,
12744 PRINT_FRAMEBASE_DATA,
12745 "%06x:", jj);
12746 hevc_print_cont(hevc,
12747 PRINT_FRAMEBASE_DATA,
12748 "%02x ", data[jj]);
12749 if (((jj + 1) & 0xf) == 0)
12750 hevc_print_cont(hevc,
12751 PRINT_FRAMEBASE_DATA,
12752 "\n");
12753 }
12754
12755 if (!hevc->chunk->block->is_mapped)
12756 codec_mm_unmap_phyaddr(data);
12757 }
12758 }
12759
12760}
12761
12762
12763static int ammvdec_h265_probe(struct platform_device *pdev)
12764{
12765
12766 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12767 struct hevc_state_s *hevc = NULL;
12768 int ret;
12769#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12770 int config_val;
12771#endif
12772 if (pdata == NULL) {
12773 pr_info("\nammvdec_h265 memory resource undefined.\n");
12774 return -EFAULT;
12775 }
12776
12777 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
12778 sizeof(struct hevc_state_s), GFP_KERNEL); */
12779 hevc = vmalloc(sizeof(struct hevc_state_s));
12780 if (hevc == NULL) {
12781 pr_info("\nammvdec_h265 device data allocation failed\n");
12782 return -ENOMEM;
12783 }
12784 memset(hevc, 0, sizeof(struct hevc_state_s));
12785
12786 /* the ctx from v4l2 driver. */
12787 hevc->v4l2_ctx = pdata->private;
12788
12789 pdata->private = hevc;
12790 pdata->dec_status = vh265_dec_status;
12791 /* pdata->set_trickmode = set_trickmode; */
12792 pdata->run_ready = run_ready;
12793 pdata->run = run;
12794 pdata->reset = reset;
12795 pdata->irq_handler = vh265_irq_cb;
12796 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
12797 pdata->dump_state = vh265_dump_state;
12798
12799 hevc->index = pdev->id;
12800 hevc->m_ins_flag = 1;
12801
12802 if (pdata->use_vfm_path) {
12803 snprintf(pdata->vf_provider_name,
12804 VDEC_PROVIDER_NAME_SIZE,
12805 VFM_DEC_PROVIDER_NAME);
12806 hevc->frameinfo_enable = 1;
12807 }
12808#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12809 else if (vdec_dual(pdata)) {
12810 struct hevc_state_s *hevc_pair = NULL;
12811
12812 if (dv_toggle_prov_name) /*debug purpose*/
12813 snprintf(pdata->vf_provider_name,
12814 VDEC_PROVIDER_NAME_SIZE,
12815 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
12816 VFM_DEC_DVEL_PROVIDER_NAME);
12817 else
12818 snprintf(pdata->vf_provider_name,
12819 VDEC_PROVIDER_NAME_SIZE,
12820 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
12821 VFM_DEC_DVBL_PROVIDER_NAME);
12822 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
12823 if (pdata->master)
12824 hevc_pair = (struct hevc_state_s *)
12825 pdata->master->private;
12826 else if (pdata->slave)
12827 hevc_pair = (struct hevc_state_s *)
12828 pdata->slave->private;
12829 if (hevc_pair)
12830 hevc->shift_byte_count_lo =
12831 hevc_pair->shift_byte_count_lo;
12832 }
12833#endif
12834 else
12835 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
12836 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
12837
12838 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
12839 &vh265_vf_provider, pdata);
12840
12841 hevc->provider_name = pdata->vf_provider_name;
12842 platform_set_drvdata(pdev, pdata);
12843
12844 hevc->platform_dev = pdev;
12845
12846 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
12847 pdata->config && pdata->config_len) {
12848#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
12849 /*use ptr config for doubel_write_mode, etc*/
12850 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
12851
12852 if (get_config_int(pdata->config, "hevc_double_write_mode",
12853 &config_val) == 0)
12854 hevc->double_write_mode = config_val;
12855 else
12856 hevc->double_write_mode = double_write_mode;
12857
12858 if (get_config_int(pdata->config, "save_buffer_mode",
12859 &config_val) == 0)
12860 hevc->save_buffer_mode = config_val;
12861 else
12862 hevc->save_buffer_mode = 0;
12863
12864 /*use ptr config for max_pic_w, etc*/
12865 if (get_config_int(pdata->config, "hevc_buf_width",
12866 &config_val) == 0) {
12867 hevc->max_pic_w = config_val;
12868 }
12869 if (get_config_int(pdata->config, "hevc_buf_height",
12870 &config_val) == 0) {
12871 hevc->max_pic_h = config_val;
12872 }
12873
12874 if (get_config_int(pdata->config,
12875 "parm_v4l_codec_enable",
12876 &config_val) == 0)
12877 hevc->is_used_v4l = config_val;
12878
12879 if (get_config_int(pdata->config,
12880 "parm_v4l_buffer_margin",
12881 &config_val) == 0)
12882 hevc->dynamic_buf_num_margin = config_val;
12883
12884 if (get_config_int(pdata->config,
12885 "parm_v4l_canvas_mem_mode",
12886 &config_val) == 0)
12887 hevc->mem_map_mode = config_val;
12888#endif
12889 } else {
12890 if (pdata->sys_info)
12891 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12892 else {
12893 hevc->vh265_amstream_dec_info.width = 0;
12894 hevc->vh265_amstream_dec_info.height = 0;
12895 hevc->vh265_amstream_dec_info.rate = 30;
12896 }
12897 hevc->double_write_mode = double_write_mode;
12898 }
12899 if (!hevc->is_used_v4l) {
12900 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
12901 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
12902 else
12903 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
12904
12905 hevc->mem_map_mode = mem_map_mode;
12906 }
12907
12908 if (mmu_enable_force == 0) {
12909 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
12910 hevc->mmu_enable = 0;
12911 else
12912 hevc->mmu_enable = 1;
12913 }
12914
12915 if (init_mmu_buffers(hevc) < 0) {
12916 hevc_print(hevc, 0,
12917 "\n 265 mmu init failed!\n");
12918 mutex_unlock(&vh265_mutex);
12919 /* devm_kfree(&pdev->dev, (void *)hevc);*/
12920 if (hevc)
12921 vfree((void *)hevc);
12922 pdata->dec_status = NULL;
12923 return -EFAULT;
12924 }
12925#if 0
12926 hevc->buf_start = pdata->mem_start;
12927 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
12928#else
12929
12930 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
12931 BMMU_WORKSPACE_ID, work_buf_size,
12932 DRIVER_NAME, &hevc->buf_start);
12933 if (ret < 0) {
12934 uninit_mmu_buffers(hevc);
12935 /* devm_kfree(&pdev->dev, (void *)hevc); */
12936 if (hevc)
12937 vfree((void *)hevc);
12938 pdata->dec_status = NULL;
12939 mutex_unlock(&vh265_mutex);
12940 return ret;
12941 }
12942 hevc->buf_size = work_buf_size;
12943#endif
12944 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12945 (parser_sei_enable & 0x100) == 0)
12946 parser_sei_enable = 7;
12947 hevc->init_flag = 0;
12948 hevc->first_sc_checked = 0;
12949 hevc->uninit_list = 0;
12950 hevc->fatal_error = 0;
12951 hevc->show_frame_num = 0;
12952
12953 /*
12954 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
12955 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
12956 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
12957 */
12958 if (get_dbg_flag(hevc)) {
12959 hevc_print(hevc, 0,
12960 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12961 hevc->buf_start, hevc->buf_size);
12962 }
12963
12964 hevc_print(hevc, 0,
12965 "dynamic_buf_num_margin=%d\n",
12966 hevc->dynamic_buf_num_margin);
12967 hevc_print(hevc, 0,
12968 "double_write_mode=%d\n",
12969 hevc->double_write_mode);
12970
12971 hevc->cma_dev = pdata->cma_dev;
12972
12973 if (vh265_init(pdata) < 0) {
12974 hevc_print(hevc, 0,
12975 "\namvdec_h265 init failed.\n");
12976 hevc_local_uninit(hevc);
12977 uninit_mmu_buffers(hevc);
12978 /* devm_kfree(&pdev->dev, (void *)hevc); */
12979 if (hevc)
12980 vfree((void *)hevc);
12981 pdata->dec_status = NULL;
12982 return -ENODEV;
12983 }
12984
12985 vdec_set_prepare_level(pdata, start_decode_buf_level);
12986
12987 /*set the max clk for smooth playing...*/
12988 hevc_source_changed(VFORMAT_HEVC,
12989 3840, 2160, 60);
12990 if (pdata->parallel_dec == 1)
12991 vdec_core_request(pdata, CORE_MASK_HEVC);
12992 else
12993 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
12994 | CORE_MASK_COMBINE);
12995
12996 return 0;
12997}
12998
12999static int ammvdec_h265_remove(struct platform_device *pdev)
13000{
13001 struct hevc_state_s *hevc =
13002 (struct hevc_state_s *)
13003 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13004 struct vdec_s *vdec = hw_to_vdec(hevc);
13005
13006 if (hevc == NULL)
13007 return 0;
13008
13009 if (get_dbg_flag(hevc))
13010 hevc_print(hevc, 0, "%s\r\n", __func__);
13011
13012 vmh265_stop(hevc);
13013
13014 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13015 if (vdec->parallel_dec == 1)
13016 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13017 else
13018 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13019
13020 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13021
13022 vfree((void *)hevc);
13023 return 0;
13024}
13025
13026static struct platform_driver ammvdec_h265_driver = {
13027 .probe = ammvdec_h265_probe,
13028 .remove = ammvdec_h265_remove,
13029 .driver = {
13030 .name = MULTI_DRIVER_NAME,
13031#ifdef CONFIG_PM
13032 .pm = &h265_pm_ops,
13033#endif
13034 }
13035};
13036#endif
13037
13038static struct codec_profile_t amvdec_h265_profile = {
13039 .name = "hevc",
13040 .profile = ""
13041};
13042
13043static struct codec_profile_t amvdec_h265_profile_single,
13044 amvdec_h265_profile_mult;
13045
13046static struct mconfig h265_configs[] = {
13047 MC_PU32("use_cma", &use_cma),
13048 MC_PU32("bit_depth_luma", &bit_depth_luma),
13049 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13050 MC_PU32("video_signal_type", &video_signal_type),
13051#ifdef ERROR_HANDLE_DEBUG
13052 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13053 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13054#endif
13055 MC_PU32("radr", &radr),
13056 MC_PU32("rval", &rval),
13057 MC_PU32("dbg_cmd", &dbg_cmd),
13058 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13059 MC_PU32("endian", &endian),
13060 MC_PU32("step", &step),
13061 MC_PU32("udebug_flag", &udebug_flag),
13062 MC_PU32("decode_pic_begin", &decode_pic_begin),
13063 MC_PU32("slice_parse_begin", &slice_parse_begin),
13064 MC_PU32("nal_skip_policy", &nal_skip_policy),
13065 MC_PU32("i_only_flag", &i_only_flag),
13066 MC_PU32("error_handle_policy", &error_handle_policy),
13067 MC_PU32("error_handle_threshold", &error_handle_threshold),
13068 MC_PU32("error_handle_nal_skip_threshold",
13069 &error_handle_nal_skip_threshold),
13070 MC_PU32("error_handle_system_threshold",
13071 &error_handle_system_threshold),
13072 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13073 MC_PU32("debug", &debug),
13074 MC_PU32("debug_mask", &debug_mask),
13075 MC_PU32("buffer_mode", &buffer_mode),
13076 MC_PU32("double_write_mode", &double_write_mode),
13077 MC_PU32("buf_alloc_width", &buf_alloc_width),
13078 MC_PU32("buf_alloc_height", &buf_alloc_height),
13079 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13080 MC_PU32("max_buf_num", &max_buf_num),
13081 MC_PU32("buf_alloc_size", &buf_alloc_size),
13082 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13083 MC_PU32("mem_map_mode", &mem_map_mode),
13084 MC_PU32("enable_mem_saving", &enable_mem_saving),
13085 MC_PU32("force_w_h", &force_w_h),
13086 MC_PU32("force_fps", &force_fps),
13087 MC_PU32("max_decoding_time", &max_decoding_time),
13088 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13089 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13090 MC_PU32("interlace_enable", &interlace_enable),
13091 MC_PU32("pts_unstable", &pts_unstable),
13092 MC_PU32("parser_sei_enable", &parser_sei_enable),
13093 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13094 MC_PU32("decode_timeout_val", &decode_timeout_val),
13095#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13096 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13097 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13098 MC_PU32("dv_debug", &dv_debug),
13099#endif
13100};
13101static struct mconfig_node decoder_265_node;
13102
13103static int __init amvdec_h265_driver_init_module(void)
13104{
13105 struct BuffInfo_s *p_buf_info;
13106
13107 if (vdec_is_support_4k()) {
13108 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13109 p_buf_info = &amvh265_workbuff_spec[2];
13110 else
13111 p_buf_info = &amvh265_workbuff_spec[1];
13112 } else
13113 p_buf_info = &amvh265_workbuff_spec[0];
13114
13115 init_buff_spec(NULL, p_buf_info);
13116 work_buf_size =
13117 (p_buf_info->end_adr - p_buf_info->start_adr
13118 + 0xffff) & (~0xffff);
13119
13120 pr_debug("amvdec_h265 module init\n");
13121 error_handle_policy = 0;
13122
13123#ifdef ERROR_HANDLE_DEBUG
13124 dbg_nal_skip_flag = 0;
13125 dbg_nal_skip_count = 0;
13126#endif
13127 udebug_flag = 0;
13128 decode_pic_begin = 0;
13129 slice_parse_begin = 0;
13130 step = 0;
13131 buf_alloc_size = 0;
13132
13133#ifdef MULTI_INSTANCE_SUPPORT
13134 if (platform_driver_register(&ammvdec_h265_driver))
13135 pr_err("failed to register ammvdec_h265 driver\n");
13136
13137#endif
13138 if (platform_driver_register(&amvdec_h265_driver)) {
13139 pr_err("failed to register amvdec_h265 driver\n");
13140 return -ENODEV;
13141 }
13142#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13143 if (!has_hevc_vdec()) {
13144 /* not support hevc */
13145 amvdec_h265_profile.name = "hevc_unsupport";
13146 }
13147 if (vdec_is_support_4k()) {
13148 if (is_meson_m8m2_cpu()) {
13149 /* m8m2 support 4k */
13150 amvdec_h265_profile.profile = "4k";
13151 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13152 amvdec_h265_profile.profile =
13153 "8k, 8bit, 10bit, dwrite, compressed";
13154 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13155 amvdec_h265_profile.profile =
13156 "4k, 8bit, 10bit, dwrite, compressed";
13157 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13158 amvdec_h265_profile.profile = "4k";
13159 }
13160#endif
13161 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13162 pr_info("amvdec_h265 default mmu enabled.\n");
13163 mmu_enable = 1;
13164 }
13165
13166 vcodec_profile_register(&amvdec_h265_profile);
13167 amvdec_h265_profile_single = amvdec_h265_profile;
13168 amvdec_h265_profile_single.name = "h265";
13169 vcodec_profile_register(&amvdec_h265_profile_single);
13170 amvdec_h265_profile_mult = amvdec_h265_profile;
13171 amvdec_h265_profile_mult.name = "mh265";
13172 vcodec_profile_register(&amvdec_h265_profile_mult);
13173 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13174 "h265", h265_configs, CONFIG_FOR_RW);
13175 return 0;
13176}
13177
13178static void __exit amvdec_h265_driver_remove_module(void)
13179{
13180 pr_debug("amvdec_h265 module remove.\n");
13181
13182#ifdef MULTI_INSTANCE_SUPPORT
13183 platform_driver_unregister(&ammvdec_h265_driver);
13184#endif
13185 platform_driver_unregister(&amvdec_h265_driver);
13186}
13187
13188/****************************************/
13189/*
13190 *module_param(stat, uint, 0664);
13191 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13192 */
13193module_param(use_cma, uint, 0664);
13194MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13195
13196module_param(bit_depth_luma, uint, 0664);
13197MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13198
13199module_param(bit_depth_chroma, uint, 0664);
13200MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13201
13202module_param(video_signal_type, uint, 0664);
13203MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13204
13205#ifdef ERROR_HANDLE_DEBUG
13206module_param(dbg_nal_skip_flag, uint, 0664);
13207MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13208
13209module_param(dbg_nal_skip_count, uint, 0664);
13210MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13211#endif
13212
13213module_param(radr, uint, 0664);
13214MODULE_PARM_DESC(radr, "\n radr\n");
13215
13216module_param(rval, uint, 0664);
13217MODULE_PARM_DESC(rval, "\n rval\n");
13218
13219module_param(dbg_cmd, uint, 0664);
13220MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13221
13222module_param(dump_nal, uint, 0664);
13223MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13224
13225module_param(dbg_skip_decode_index, uint, 0664);
13226MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13227
13228module_param(endian, uint, 0664);
13229MODULE_PARM_DESC(endian, "\n rval\n");
13230
13231module_param(step, uint, 0664);
13232MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13233
13234module_param(decode_pic_begin, uint, 0664);
13235MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13236
13237module_param(slice_parse_begin, uint, 0664);
13238MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13239
13240module_param(nal_skip_policy, uint, 0664);
13241MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13242
13243module_param(i_only_flag, uint, 0664);
13244MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13245
13246module_param(fast_output_enable, uint, 0664);
13247MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13248
13249module_param(error_handle_policy, uint, 0664);
13250MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13251
13252module_param(error_handle_threshold, uint, 0664);
13253MODULE_PARM_DESC(error_handle_threshold,
13254 "\n amvdec_h265 error_handle_threshold\n");
13255
13256module_param(error_handle_nal_skip_threshold, uint, 0664);
13257MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13258 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13259
13260module_param(error_handle_system_threshold, uint, 0664);
13261MODULE_PARM_DESC(error_handle_system_threshold,
13262 "\n amvdec_h265 error_handle_system_threshold\n");
13263
13264module_param(error_skip_nal_count, uint, 0664);
13265MODULE_PARM_DESC(error_skip_nal_count,
13266 "\n amvdec_h265 error_skip_nal_count\n");
13267
13268module_param(debug, uint, 0664);
13269MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13270
13271module_param(debug_mask, uint, 0664);
13272MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13273
13274module_param(log_mask, uint, 0664);
13275MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13276
13277module_param(buffer_mode, uint, 0664);
13278MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13279
13280module_param(double_write_mode, uint, 0664);
13281MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13282
13283module_param(buf_alloc_width, uint, 0664);
13284MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13285
13286module_param(buf_alloc_height, uint, 0664);
13287MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13288
13289module_param(dynamic_buf_num_margin, uint, 0664);
13290MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13291
13292module_param(max_buf_num, uint, 0664);
13293MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13294
13295module_param(buf_alloc_size, uint, 0664);
13296MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13297
13298#ifdef CONSTRAIN_MAX_BUF_NUM
13299module_param(run_ready_max_vf_only_num, uint, 0664);
13300MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13301
13302module_param(run_ready_display_q_num, uint, 0664);
13303MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13304
13305module_param(run_ready_max_buf_num, uint, 0664);
13306MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13307#endif
13308
13309#if 0
13310module_param(re_config_pic_flag, uint, 0664);
13311MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13312#endif
13313
13314module_param(buffer_mode_dbg, uint, 0664);
13315MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13316
13317module_param(mem_map_mode, uint, 0664);
13318MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13319
13320module_param(enable_mem_saving, uint, 0664);
13321MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13322
13323module_param(force_w_h, uint, 0664);
13324MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13325
13326module_param(force_fps, uint, 0664);
13327MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13328
13329module_param(max_decoding_time, uint, 0664);
13330MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13331
13332module_param(prefix_aux_buf_size, uint, 0664);
13333MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13334
13335module_param(suffix_aux_buf_size, uint, 0664);
13336MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13337
13338module_param(interlace_enable, uint, 0664);
13339MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13340module_param(pts_unstable, uint, 0664);
13341MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13342module_param(parser_sei_enable, uint, 0664);
13343MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13344
13345#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13346module_param(parser_dolby_vision_enable, uint, 0664);
13347MODULE_PARM_DESC(parser_dolby_vision_enable,
13348 "\n parser_dolby_vision_enable\n");
13349
13350module_param(dolby_meta_with_el, uint, 0664);
13351MODULE_PARM_DESC(dolby_meta_with_el,
13352 "\n dolby_meta_with_el\n");
13353
13354module_param(dolby_el_flush_th, uint, 0664);
13355MODULE_PARM_DESC(dolby_el_flush_th,
13356 "\n dolby_el_flush_th\n");
13357#endif
13358module_param(mmu_enable, uint, 0664);
13359MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13360
13361module_param(mmu_enable_force, uint, 0664);
13362MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13363
13364#ifdef MULTI_INSTANCE_SUPPORT
13365module_param(start_decode_buf_level, int, 0664);
13366MODULE_PARM_DESC(start_decode_buf_level,
13367 "\n h265 start_decode_buf_level\n");
13368
13369module_param(decode_timeout_val, uint, 0664);
13370MODULE_PARM_DESC(decode_timeout_val,
13371 "\n h265 decode_timeout_val\n");
13372
13373module_param(data_resend_policy, uint, 0664);
13374MODULE_PARM_DESC(data_resend_policy,
13375 "\n h265 data_resend_policy\n");
13376
13377module_param_array(decode_frame_count, uint,
13378 &max_decode_instance_num, 0664);
13379
13380module_param_array(display_frame_count, uint,
13381 &max_decode_instance_num, 0664);
13382
13383module_param_array(max_process_time, uint,
13384 &max_decode_instance_num, 0664);
13385
13386module_param_array(max_get_frame_interval,
13387 uint, &max_decode_instance_num, 0664);
13388
13389module_param_array(run_count, uint,
13390 &max_decode_instance_num, 0664);
13391
13392module_param_array(input_empty, uint,
13393 &max_decode_instance_num, 0664);
13394
13395module_param_array(not_run_ready, uint,
13396 &max_decode_instance_num, 0664);
13397
13398module_param_array(ref_frame_mark_flag, uint,
13399 &max_decode_instance_num, 0664);
13400
13401#endif
13402#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13403module_param(dv_toggle_prov_name, uint, 0664);
13404MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13405
13406module_param(dv_debug, uint, 0664);
13407MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13408
13409module_param(force_bypass_dvenl, uint, 0664);
13410MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13411#endif
13412
13413#ifdef AGAIN_HAS_THRESHOLD
13414module_param(again_threshold, uint, 0664);
13415MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13416#endif
13417
13418module_param(force_disp_pic_index, int, 0664);
13419MODULE_PARM_DESC(force_disp_pic_index,
13420 "\n amvdec_h265 force_disp_pic_index\n");
13421
13422module_param(frmbase_cont_bitlevel, uint, 0664);
13423MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13424
13425module_param(udebug_flag, uint, 0664);
13426MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13427
13428module_param(udebug_pause_pos, uint, 0664);
13429MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13430
13431module_param(udebug_pause_val, uint, 0664);
13432MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13433
13434module_param(pre_decode_buf_level, int, 0664);
13435MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13436
13437module_param(udebug_pause_decode_idx, uint, 0664);
13438MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13439
13440module_param(disp_vframe_valve_level, uint, 0664);
13441MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13442
13443module_param(pic_list_debug, uint, 0664);
13444MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13445
13446module_param(without_display_mode, uint, 0664);
13447MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13448
13449module_init(amvdec_h265_driver_init_module);
13450module_exit(amvdec_h265_driver_remove_module);
13451
13452MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13453MODULE_LICENSE("GPL");
13454MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13455