summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 1ef70dddc2e821ed5982ee8908b30cfd47de09b1
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52/*
53to enable DV of frame mode
54#define DOLBY_META_SUPPORT in ucode
55*/
56
57#define HEVC_8K_LFTOFFSET_FIX
58
59#define CONSTRAIN_MAX_BUF_NUM
60
61#define SWAP_HEVC_UCODE
62#define DETREFILL_ENABLE
63
64#define AGAIN_HAS_THRESHOLD
65/*#define TEST_NO_BUF*/
66#define HEVC_PIC_STRUCT_SUPPORT
67#define MULTI_INSTANCE_SUPPORT
68#define USE_UNINIT_SEMA
69
70 /* .buf_size = 0x100000*16,
71 //4k2k , 0x100000 per buffer */
72 /* 4096x2304 , 0x120000 per buffer */
73#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
74#define MPRED_4K_MV_BUF_SIZE (0x120000)
75#define MPRED_MV_BUF_SIZE (0x40000)
76
77#define MMU_COMPRESS_HEADER_SIZE 0x48000
78#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
79
80#define MAX_FRAME_4K_NUM 0x1200
81#define MAX_FRAME_8K_NUM (0x1200*4)
82
83//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
84#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
85
86#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
87
88#define HEVC_CM_HEADER_START_ADDR 0x3628
89#define HEVC_SAO_MMU_VH1_ADDR 0x363b
90#define HEVC_SAO_MMU_VH0_ADDR 0x363a
91
92#define HEVC_DBLK_CFGB 0x350b
93#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
94#define SWAP_HEVC_OFFSET (3 * 0x1000)
95
96#define MEM_NAME "codec_265"
97/* #include <mach/am_regs.h> */
98#include <linux/amlogic/media/utils/vdec_reg.h>
99
100#include "../utils/vdec.h"
101#include "../utils/amvdec.h"
102#include <linux/amlogic/media/video_sink/video.h>
103#include <linux/amlogic/media/codec_mm/configs.h>
104
105#define SEND_LMEM_WITH_RPM
106#define SUPPORT_10BIT
107/* #define ERROR_HANDLE_DEBUG */
108
109#ifndef STAT_KTHREAD
110#define STAT_KTHREAD 0x40
111#endif
112
113#ifdef MULTI_INSTANCE_SUPPORT
114#define MAX_DECODE_INSTANCE_NUM 9
115#define MULTI_DRIVER_NAME "ammvdec_h265"
116#endif
117#define DRIVER_NAME "amvdec_h265"
118#define MODULE_NAME "amvdec_h265"
119#define DRIVER_HEADER_NAME "amvdec_h265_header"
120
121#define PUT_INTERVAL (HZ/100)
122#define ERROR_SYSTEM_RESET_COUNT 200
123
124#define PTS_NORMAL 0
125#define PTS_NONE_REF_USE_DURATION 1
126
127#define PTS_MODE_SWITCHING_THRESHOLD 3
128#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
129
130#define DUR2PTS(x) ((x)*90/96)
131
132#define MAX_SIZE_8K (8192 * 4608)
133#define MAX_SIZE_4K (4096 * 2304)
134
135#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
136#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
137
138#define SEI_UserDataITU_T_T35 4
139#define INVALID_IDX -1 /* Invalid buffer index.*/
140
141static struct semaphore h265_sema;
142
143struct hevc_state_s;
144static int hevc_print(struct hevc_state_s *hevc,
145 int debug_flag, const char *fmt, ...);
146static int hevc_print_cont(struct hevc_state_s *hevc,
147 int debug_flag, const char *fmt, ...);
148static int vh265_vf_states(struct vframe_states *states, void *);
149static struct vframe_s *vh265_vf_peek(void *);
150static struct vframe_s *vh265_vf_get(void *);
151static void vh265_vf_put(struct vframe_s *, void *);
152static int vh265_event_cb(int type, void *data, void *private_data);
153
154static int vh265_stop(struct hevc_state_s *hevc);
155#ifdef MULTI_INSTANCE_SUPPORT
156static int vmh265_stop(struct hevc_state_s *hevc);
157static s32 vh265_init(struct vdec_s *vdec);
158static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
159static void reset_process_time(struct hevc_state_s *hevc);
160static void start_process_time(struct hevc_state_s *hevc);
161static void restart_process_time(struct hevc_state_s *hevc);
162static void timeout_process(struct hevc_state_s *hevc);
163#else
164static s32 vh265_init(struct hevc_state_s *hevc);
165#endif
166static void vh265_prot_init(struct hevc_state_s *hevc);
167static int vh265_local_init(struct hevc_state_s *hevc);
168static void vh265_check_timer_func(unsigned long arg);
169static void config_decode_mode(struct hevc_state_s *hevc);
170
171static const char vh265_dec_id[] = "vh265-dev";
172
173#define PROVIDER_NAME "decoder.h265"
174#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
175
176static const struct vframe_operations_s vh265_vf_provider = {
177 .peek = vh265_vf_peek,
178 .get = vh265_vf_get,
179 .put = vh265_vf_put,
180 .event_cb = vh265_event_cb,
181 .vf_states = vh265_vf_states,
182};
183
184static struct vframe_provider_s vh265_vf_prov;
185
186static u32 bit_depth_luma;
187static u32 bit_depth_chroma;
188static u32 video_signal_type;
189static int start_decode_buf_level = 0x8000;
190static unsigned int decode_timeout_val = 200;
191
192static u32 run_ready_min_buf_num = 2;
193static u32 disable_ip_mode;
194static u32 print_lcu_error = 1;
195/*data_resend_policy:
196 bit 0, stream base resend data when decoding buf empty
197*/
198static u32 data_resend_policy = 1;
199static u32 poc_num_margin = 1000;
200static u32 poc_error_limit = 30;
201
202static u32 dirty_time_threshold = 2000;
203static u32 dirty_count_threshold = 200;
204static u32 dirty_buffersize_threshold = 0x800000;
205
206
207#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
208/*
209static const char * const video_format_names[] = {
210 "component", "PAL", "NTSC", "SECAM",
211 "MAC", "unspecified", "unspecified", "unspecified"
212};
213
214static const char * const color_primaries_names[] = {
215 "unknown", "bt709", "undef", "unknown",
216 "bt470m", "bt470bg", "smpte170m", "smpte240m",
217 "film", "bt2020"
218};
219
220static const char * const transfer_characteristics_names[] = {
221 "unknown", "bt709", "undef", "unknown",
222 "bt470m", "bt470bg", "smpte170m", "smpte240m",
223 "linear", "log100", "log316", "iec61966-2-4",
224 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
225 "smpte-st-2084", "smpte-st-428"
226};
227
228static const char * const matrix_coeffs_names[] = {
229 "GBR", "bt709", "undef", "unknown",
230 "fcc", "bt470bg", "smpte170m", "smpte240m",
231 "YCgCo", "bt2020nc", "bt2020c"
232};
233*/
234#ifdef SUPPORT_10BIT
235#define HEVC_CM_BODY_START_ADDR 0x3626
236#define HEVC_CM_BODY_LENGTH 0x3627
237#define HEVC_CM_HEADER_LENGTH 0x3629
238#define HEVC_CM_HEADER_OFFSET 0x362b
239#define HEVC_SAO_CTRL9 0x362d
240#define LOSLESS_COMPRESS_MODE
241/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
242/* double_write_mode:
243 * 0, no double write;
244 * 1, 1:1 ratio;
245 * 2, (1/4):(1/4) ratio;
246 * 3, (1/4):(1/4) ratio, with both compressed frame included
247 * 4, (1/2):(1/2) ratio;
248 * 0x10, double write only
249 * 0x100, if > 1080p,use mode 4,else use mode 1;
250 * 0x200, if > 1080p,use mode 2,else use mode 1;
251 * 0x300, if > 720p, use mode 4, else use mode 1;
252 */
253static u32 double_write_mode;
254
255/*#define DECOMP_HEADR_SURGENT*/
256
257static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
258static u32 enable_mem_saving = 1;
259static u32 workaround_enable;
260static u32 force_w_h;
261#endif
262static u32 force_fps;
263static u32 pts_unstable;
264#define H265_DEBUG_BUFMGR 0x01
265#define H265_DEBUG_BUFMGR_MORE 0x02
266#define H265_DEBUG_DETAIL 0x04
267#define H265_DEBUG_REG 0x08
268#define H265_DEBUG_MAN_SEARCH_NAL 0x10
269#define H265_DEBUG_MAN_SKIP_NAL 0x20
270#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
271#define H265_DEBUG_FORCE_CLK 0x80
272#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
273#define H265_DEBUG_NO_DISPLAY 0x200
274#define H265_DEBUG_DISCARD_NAL 0x400
275#define H265_DEBUG_OUT_PTS 0x800
276#define H265_DEBUG_DUMP_PIC_LIST 0x1000
277#define H265_DEBUG_PRINT_SEI 0x2000
278#define H265_DEBUG_PIC_STRUCT 0x4000
279#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
280#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
281#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
282#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
283#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
284#define H265_DEBUG_HW_RESET 0x100000
285#define H265_CFG_CANVAS_IN_DECODE 0x200000
286#define H265_DEBUG_DV 0x400000
287#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
288#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
289#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
290#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
291#ifdef MULTI_INSTANCE_SUPPORT
292#define PRINT_FLAG_ERROR 0x0
293#define IGNORE_PARAM_FROM_CONFIG 0x08000000
294#define PRINT_FRAMEBASE_DATA 0x10000000
295#define PRINT_FLAG_VDEC_STATUS 0x20000000
296#define PRINT_FLAG_VDEC_DETAIL 0x40000000
297#define PRINT_FLAG_V4L_DETAIL 0x80000000
298#endif
299
300#define BUF_POOL_SIZE 32
301#define MAX_BUF_NUM 24
302#define MAX_REF_PIC_NUM 24
303#define MAX_REF_ACTIVE 16
304
305#ifdef MV_USE_FIXED_BUF
306#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
307#define VF_BUFFER_IDX(n) (n)
308#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
309#else
310#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
311#define VF_BUFFER_IDX(n) (n)
312#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
313#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
314#endif
315
316#define HEVC_MV_INFO 0x310d
317#define HEVC_QP_INFO 0x3137
318#define HEVC_SKIP_INFO 0x3136
319
320const u32 h265_version = 201602101;
321static u32 debug_mask = 0xffffffff;
322static u32 log_mask;
323static u32 debug;
324static u32 radr;
325static u32 rval;
326static u32 dbg_cmd;
327static u32 dump_nal;
328static u32 dbg_skip_decode_index;
329static u32 endian = 0xff0;
330#ifdef ERROR_HANDLE_DEBUG
331static u32 dbg_nal_skip_flag;
332 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
333static u32 dbg_nal_skip_count;
334#endif
335/*for debug*/
336/*
337 udebug_flag:
338 bit 0, enable ucode print
339 bit 1, enable ucode detail print
340 bit [31:16] not 0, pos to dump lmem
341 bit 2, pop bits to lmem
342 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
343*/
344static u32 udebug_flag;
345/*
346 when udebug_flag[1:0] is not 0
347 udebug_pause_pos not 0,
348 pause position
349*/
350static u32 udebug_pause_pos;
351/*
352 when udebug_flag[1:0] is not 0
353 and udebug_pause_pos is not 0,
354 pause only when DEBUG_REG2 is equal to this val
355*/
356static u32 udebug_pause_val;
357
358static u32 udebug_pause_decode_idx;
359
360static u32 decode_pic_begin;
361static uint slice_parse_begin;
362static u32 step;
363static bool is_reset;
364
365#ifdef CONSTRAIN_MAX_BUF_NUM
366static u32 run_ready_max_vf_only_num;
367static u32 run_ready_display_q_num;
368 /*0: not check
369 0xff: work_pic_num
370 */
371static u32 run_ready_max_buf_num = 0xff;
372#endif
373
374static u32 dynamic_buf_num_margin = 7;
375static u32 buf_alloc_width;
376static u32 buf_alloc_height;
377
378static u32 max_buf_num = 16;
379static u32 buf_alloc_size;
380/*static u32 re_config_pic_flag;*/
381/*
382 *bit[0]: 0,
383 *bit[1]: 0, always release cma buffer when stop
384 *bit[1]: 1, never release cma buffer when stop
385 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
386 *do not release cma buffer is blackout is not 1
387 *
388 *bit[2]: 0, when start decoding, check current displayed buffer
389 * (only for buffer decoded by h265) if blackout is 0
390 * 1, do not check current displayed buffer
391 *
392 *bit[3]: 1, if blackout is not 1, do not release current
393 * displayed cma buffer always.
394 */
395/* set to 1 for fast play;
396 * set to 8 for other case of "keep last frame"
397 */
398static u32 buffer_mode = 1;
399
400/* buffer_mode_dbg: debug only*/
401static u32 buffer_mode_dbg = 0xffff0000;
402/**/
403/*
404 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
405 *1, start decoding after first I;
406 *2, only decode and display none error picture;
407 *3, start decoding and display after IDR,etc
408 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
409 *only for mode 0 and 1.
410 */
411static u32 nal_skip_policy = 2;
412
413/*
414 *bit 0, 1: only display I picture;
415 *bit 1, 1: only decode I picture;
416 */
417static u32 i_only_flag;
418static u32 skip_nal_count = 500;
419/*
420bit 0, fast output first I picture
421*/
422static u32 fast_output_enable = 1;
423
424static u32 frmbase_cont_bitlevel = 0x60;
425
426/*
427use_cma: 1, use both reserver memory and cma for buffers
4282, only use cma for buffers
429*/
430static u32 use_cma = 2;
431
432#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
433/*
434static u32 prefix_aux_buf_size = (16 * 1024);
435static u32 suffix_aux_buf_size;
436*/
437static u32 prefix_aux_buf_size = (12 * 1024);
438static u32 suffix_aux_buf_size = (12 * 1024);
439
440static u32 max_decoding_time;
441/*
442 *error handling
443 */
444/*error_handle_policy:
445 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
446 *1, skip error_skip_nal_count nals before error recovery;
447 *bit 1 (valid only when bit0 == 1):
448 *1, wait vps/sps/pps after error recovery;
449 *bit 2 (valid only when bit0 == 0):
450 *0, auto search after error recovery (hevc_recover() called);
451 *1, manual search after error recovery
452 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
453 *
454 *bit 4: 0, set error_mark after reset/recover
455 * 1, do not set error_mark after reset/recover
456 *
457 *bit 5: 0, check total lcu for every picture
458 * 1, do not check total lcu
459 *
460 *bit 6: 0, do not check head error
461 * 1, check head error
462 *
463 *bit 7: 0, allow to print over decode
464 * 1, NOT allow to print over decode
465 *
466 *bit 8: 0, use interlace policy
467 * 1, NOT use interlace policy
468 *
469 */
470
471static u32 error_handle_policy;
472static u32 error_skip_nal_count = 6;
473static u32 error_handle_threshold = 30;
474static u32 error_handle_nal_skip_threshold = 10;
475static u32 error_handle_system_threshold = 30;
476static u32 interlace_enable = 1;
477static u32 fr_hint_status;
478
479 /*
480 *parser_sei_enable:
481 * bit 0, sei;
482 * bit 1, sei_suffix (fill aux buf)
483 * bit 2, fill sei to aux buf (when bit 0 is 1)
484 * bit 8, debug flag
485 */
486static u32 parser_sei_enable;
487static u32 parser_dolby_vision_enable = 1;
488#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
489static u32 dolby_meta_with_el;
490static u32 dolby_el_flush_th = 2;
491#endif
492/* this is only for h265 mmu enable */
493
494static u32 mmu_enable = 1;
495static u32 mmu_enable_force;
496static u32 work_buf_size;
497static unsigned int force_disp_pic_index;
498static unsigned int disp_vframe_valve_level;
499static int pre_decode_buf_level = 0x1000;
500static unsigned int pic_list_debug;
501#ifdef HEVC_8K_LFTOFFSET_FIX
502 /* performance_profile: bit 0, multi slice in ucode
503 */
504static unsigned int performance_profile = 1;
505#endif
506#ifdef MULTI_INSTANCE_SUPPORT
507static unsigned int max_decode_instance_num
508 = MAX_DECODE_INSTANCE_NUM;
509static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
510static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
511static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
512static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
513static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
514static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
515static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
516static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
517{1, 1, 1, 1, 1, 1, 1, 1, 1};
518
519#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
520static unsigned char get_idx(struct hevc_state_s *hevc);
521#endif
522
523#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
524static u32 dv_toggle_prov_name;
525
526static u32 dv_debug;
527
528static u32 force_bypass_dvenl;
529#endif
530#endif
531
532
533#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
534#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
535#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
536#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
537#else
538#define get_dbg_flag(hevc) debug
539#define get_dbg_flag2(hevc) debug
540#define is_log_enable(hevc) (log_mask ? 1 : 0)
541#define get_valid_double_write_mode(hevc) double_write_mode
542#define get_buf_alloc_width(hevc) buf_alloc_width
543#define get_buf_alloc_height(hevc) buf_alloc_height
544#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
545#endif
546#define get_buffer_mode(hevc) buffer_mode
547
548
549static DEFINE_SPINLOCK(lock);
550struct task_struct *h265_task = NULL;
551#undef DEBUG_REG
552#ifdef DEBUG_REG
553void WRITE_VREG_DBG(unsigned adr, unsigned val)
554{
555 if (debug & H265_DEBUG_REG)
556 pr_info("%s(%x, %x)\n", __func__, adr, val);
557 WRITE_VREG(adr, val);
558}
559
560#undef WRITE_VREG
561#define WRITE_VREG WRITE_VREG_DBG
562#endif
563extern u32 trickmode_i;
564
565static DEFINE_MUTEX(vh265_mutex);
566
567static DEFINE_MUTEX(vh265_log_mutex);
568
569//static struct vdec_info *gvs;
570
571static u32 without_display_mode;
572
573/**************************************************
574 *
575 *h265 buffer management include
576 *
577 ***************************************************
578 */
579enum NalUnitType {
580 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
581 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
582
583 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
584 /* Current name in the spec: TSA_R */
585 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
586
587 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
588 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
589
590 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
591 /* Current name in the spec: RADL_R */
592 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
593
594 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
595 /* Current name in the spec: RASL_R */
596 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
597
598 NAL_UNIT_RESERVED_10,
599 NAL_UNIT_RESERVED_11,
600 NAL_UNIT_RESERVED_12,
601 NAL_UNIT_RESERVED_13,
602 NAL_UNIT_RESERVED_14,
603 NAL_UNIT_RESERVED_15,
604
605 /* Current name in the spec: BLA_W_LP */
606 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
607 /* Current name in the spec: BLA_W_DLP */
608 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
609 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
610 /* Current name in the spec: IDR_W_DLP */
611 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
612 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
613 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
614 NAL_UNIT_RESERVED_22,
615 NAL_UNIT_RESERVED_23,
616
617 NAL_UNIT_RESERVED_24,
618 NAL_UNIT_RESERVED_25,
619 NAL_UNIT_RESERVED_26,
620 NAL_UNIT_RESERVED_27,
621 NAL_UNIT_RESERVED_28,
622 NAL_UNIT_RESERVED_29,
623 NAL_UNIT_RESERVED_30,
624 NAL_UNIT_RESERVED_31,
625
626 NAL_UNIT_VPS, /* 32 */
627 NAL_UNIT_SPS, /* 33 */
628 NAL_UNIT_PPS, /* 34 */
629 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
630 NAL_UNIT_EOS, /* 36 */
631 NAL_UNIT_EOB, /* 37 */
632 NAL_UNIT_FILLER_DATA, /* 38 */
633 NAL_UNIT_SEI, /* 39 Prefix SEI */
634 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
635 NAL_UNIT_RESERVED_41,
636 NAL_UNIT_RESERVED_42,
637 NAL_UNIT_RESERVED_43,
638 NAL_UNIT_RESERVED_44,
639 NAL_UNIT_RESERVED_45,
640 NAL_UNIT_RESERVED_46,
641 NAL_UNIT_RESERVED_47,
642 NAL_UNIT_UNSPECIFIED_48,
643 NAL_UNIT_UNSPECIFIED_49,
644 NAL_UNIT_UNSPECIFIED_50,
645 NAL_UNIT_UNSPECIFIED_51,
646 NAL_UNIT_UNSPECIFIED_52,
647 NAL_UNIT_UNSPECIFIED_53,
648 NAL_UNIT_UNSPECIFIED_54,
649 NAL_UNIT_UNSPECIFIED_55,
650 NAL_UNIT_UNSPECIFIED_56,
651 NAL_UNIT_UNSPECIFIED_57,
652 NAL_UNIT_UNSPECIFIED_58,
653 NAL_UNIT_UNSPECIFIED_59,
654 NAL_UNIT_UNSPECIFIED_60,
655 NAL_UNIT_UNSPECIFIED_61,
656 NAL_UNIT_UNSPECIFIED_62,
657 NAL_UNIT_UNSPECIFIED_63,
658 NAL_UNIT_INVALID,
659};
660
661/* --------------------------------------------------- */
662/* Amrisc Software Interrupt */
663/* --------------------------------------------------- */
664#define AMRISC_STREAM_EMPTY_REQ 0x01
665#define AMRISC_PARSER_REQ 0x02
666#define AMRISC_MAIN_REQ 0x04
667
668/* --------------------------------------------------- */
669/* HEVC_DEC_STATUS define */
670/* --------------------------------------------------- */
671#define HEVC_DEC_IDLE 0x0
672#define HEVC_NAL_UNIT_VPS 0x1
673#define HEVC_NAL_UNIT_SPS 0x2
674#define HEVC_NAL_UNIT_PPS 0x3
675#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
676#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
677#define HEVC_SLICE_DECODING 0x6
678#define HEVC_NAL_UNIT_SEI 0x7
679#define HEVC_SLICE_SEGMENT_DONE 0x8
680#define HEVC_NAL_SEARCH_DONE 0x9
681#define HEVC_DECPIC_DATA_DONE 0xa
682#define HEVC_DECPIC_DATA_ERROR 0xb
683#define HEVC_SEI_DAT 0xc
684#define HEVC_SEI_DAT_DONE 0xd
685#define HEVC_NAL_DECODE_DONE 0xe
686#define HEVC_OVER_DECODE 0xf
687
688#define HEVC_DATA_REQUEST 0x12
689
690#define HEVC_DECODE_BUFEMPTY 0x20
691#define HEVC_DECODE_TIMEOUT 0x21
692#define HEVC_SEARCH_BUFEMPTY 0x22
693#define HEVC_DECODE_OVER_SIZE 0x23
694#define HEVC_DECODE_BUFEMPTY2 0x24
695#define HEVC_FIND_NEXT_PIC_NAL 0x50
696#define HEVC_FIND_NEXT_DVEL_NAL 0x51
697
698#define HEVC_DUMP_LMEM 0x30
699
700#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
701#define HEVC_DISCARD_NAL 0xf0
702#define HEVC_ACTION_DEC_CONT 0xfd
703#define HEVC_ACTION_ERROR 0xfe
704#define HEVC_ACTION_DONE 0xff
705
706/* --------------------------------------------------- */
707/* Include "parser_cmd.h" */
708/* --------------------------------------------------- */
709#define PARSER_CMD_SKIP_CFG_0 0x0000090b
710
711#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
712
713#define PARSER_CMD_SKIP_CFG_2 0x001b1910
714
715#define PARSER_CMD_NUMBER 37
716
717/**************************************************
718 *
719 *h265 buffer management
720 *
721 ***************************************************
722 */
723/* #define BUFFER_MGR_ONLY */
724/* #define CONFIG_HEVC_CLK_FORCED_ON */
725/* #define ENABLE_SWAP_TEST */
726#define MCRCC_ENABLE
727#define INVALID_POC 0x80000000
728
729#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
730#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
731#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
732#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
733#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
734#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
735#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
736#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
737#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
738#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
739#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
740#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
741#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
742#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
743#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
744#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
745#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
746#ifdef ENABLE_SWAP_TEST
747#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
748#endif
749
750/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
751/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
752#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
753 /*do not define ENABLE_SWAP_TEST*/
754#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
755#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
756
757#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
758#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
759/*
760 *ucode parser/search control
761 *bit 0: 0, header auto parse; 1, header manual parse
762 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
763 *bit [3:2]: valid when bit1==0;
764 *0, auto skip nal before first vps/sps/pps/idr;
765 *1, auto skip nal before first vps/sps/pps
766 *2, auto skip nal before first vps/sps/pps,
767 * and not decode until the first I slice (with slice address of 0)
768 *
769 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
770 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
771 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
772 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
773 *bit [17]: for NAL_SEI when bit0 is 0:
774 * 0, do not parse/fetch SEI in ucode;
775 * 1, parse/fetch SEI in ucode
776 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
777 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
778 * 1, fetch NAL_SEL_SUFFIX data to aux buf
779 *bit [19]:
780 * 0, parse NAL_SEI in ucode
781 * 1, fetch NAL_SEI to aux buf
782 *bit [20]: for DOLBY_VISION_META
783 * 0, do not fetch DOLBY_VISION_META to aux buf
784 * 1, fetch DOLBY_VISION_META to aux buf
785 */
786#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
787 /*read only*/
788#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
789 /*
790 [15 : 8] rps_set_id
791 [7 : 0] start_decoding_flag
792 */
793#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
794 /*set before start decoder*/
795#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
796#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
797#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
798
799#define DECODE_MODE_SINGLE 0x0
800#define DECODE_MODE_MULTI_FRAMEBASE 0x1
801#define DECODE_MODE_MULTI_STREAMBASE 0x2
802#define DECODE_MODE_MULTI_DVBAL 0x3
803#define DECODE_MODE_MULTI_DVENL 0x4
804
805#define MAX_INT 0x7FFFFFFF
806
807#define RPM_BEGIN 0x100
808#define modification_list_cur 0x148
809#define RPM_END 0x180
810
811#define RPS_USED_BIT 14
812/* MISC_FLAG0 */
813#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
814#define PCM_ENABLE_FLAG_BIT 1
815#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
816#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
817#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
818#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
819#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
820#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
821#define SLICE_SAO_LUMA_FLAG_BIT 8
822#define SLICE_SAO_CHROMA_FLAG_BIT 9
823#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
824
825union param_u {
826 struct {
827 unsigned short data[RPM_END - RPM_BEGIN];
828 } l;
829 struct {
830 /* from ucode lmem, do not change this struct */
831 unsigned short CUR_RPS[0x10];
832 unsigned short num_ref_idx_l0_active;
833 unsigned short num_ref_idx_l1_active;
834 unsigned short slice_type;
835 unsigned short slice_temporal_mvp_enable_flag;
836 unsigned short dependent_slice_segment_flag;
837 unsigned short slice_segment_address;
838 unsigned short num_title_rows_minus1;
839 unsigned short pic_width_in_luma_samples;
840 unsigned short pic_height_in_luma_samples;
841 unsigned short log2_min_coding_block_size_minus3;
842 unsigned short log2_diff_max_min_coding_block_size;
843 unsigned short log2_max_pic_order_cnt_lsb_minus4;
844 unsigned short POClsb;
845 unsigned short collocated_from_l0_flag;
846 unsigned short collocated_ref_idx;
847 unsigned short log2_parallel_merge_level;
848 unsigned short five_minus_max_num_merge_cand;
849 unsigned short sps_num_reorder_pics_0;
850 unsigned short modification_flag;
851 unsigned short tiles_enabled_flag;
852 unsigned short num_tile_columns_minus1;
853 unsigned short num_tile_rows_minus1;
854 unsigned short tile_width[12];
855 unsigned short tile_height[8];
856 unsigned short misc_flag0;
857 unsigned short pps_beta_offset_div2;
858 unsigned short pps_tc_offset_div2;
859 unsigned short slice_beta_offset_div2;
860 unsigned short slice_tc_offset_div2;
861 unsigned short pps_cb_qp_offset;
862 unsigned short pps_cr_qp_offset;
863 unsigned short first_slice_segment_in_pic_flag;
864 unsigned short m_temporalId;
865 unsigned short m_nalUnitType;
866
867 unsigned short vui_num_units_in_tick_hi;
868 unsigned short vui_num_units_in_tick_lo;
869 unsigned short vui_time_scale_hi;
870 unsigned short vui_time_scale_lo;
871 unsigned short bit_depth;
872 unsigned short profile_etc;
873 unsigned short sei_frame_field_info;
874 unsigned short video_signal_type;
875 unsigned short modification_list[0x20];
876 unsigned short conformance_window_flag;
877 unsigned short conf_win_left_offset;
878 unsigned short conf_win_right_offset;
879 unsigned short conf_win_top_offset;
880 unsigned short conf_win_bottom_offset;
881 unsigned short chroma_format_idc;
882 unsigned short color_description;
883 unsigned short aspect_ratio_idc;
884 unsigned short sar_width;
885 unsigned short sar_height;
886 unsigned short sps_max_dec_pic_buffering_minus1_0;
887 } p;
888};
889
890#define RPM_BUF_SIZE (0x80*2)
891/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
892#define LMEM_BUF_SIZE (0x500 * 2)
893
894struct buff_s {
895 u32 buf_start;
896 u32 buf_size;
897 u32 buf_end;
898};
899
900struct BuffInfo_s {
901 u32 max_width;
902 u32 max_height;
903 unsigned int start_adr;
904 unsigned int end_adr;
905 struct buff_s ipp;
906 struct buff_s sao_abv;
907 struct buff_s sao_vb;
908 struct buff_s short_term_rps;
909 struct buff_s vps;
910 struct buff_s sps;
911 struct buff_s pps;
912 struct buff_s sao_up;
913 struct buff_s swap_buf;
914 struct buff_s swap_buf2;
915 struct buff_s scalelut;
916 struct buff_s dblk_para;
917 struct buff_s dblk_data;
918 struct buff_s dblk_data2;
919 struct buff_s mmu_vbh;
920 struct buff_s cm_header;
921 struct buff_s mpred_above;
922#ifdef MV_USE_FIXED_BUF
923 struct buff_s mpred_mv;
924#endif
925 struct buff_s rpm;
926 struct buff_s lmem;
927};
928#define WORK_BUF_SPEC_NUM 3
929static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
930 {
931 /* 8M bytes */
932 .max_width = 1920,
933 .max_height = 1088,
934 .ipp = {
935 /* IPP work space calculation :
936 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
937 */
938 .buf_size = 0x4000,
939 },
940 .sao_abv = {
941 .buf_size = 0x30000,
942 },
943 .sao_vb = {
944 .buf_size = 0x30000,
945 },
946 .short_term_rps = {
947 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
948 * total 64x16x2 = 2048 bytes (0x800)
949 */
950 .buf_size = 0x800,
951 },
952 .vps = {
953 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
954 * total 0x0800 bytes
955 */
956 .buf_size = 0x800,
957 },
958 .sps = {
959 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
960 * total 0x0800 bytes
961 */
962 .buf_size = 0x800,
963 },
964 .pps = {
965 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
966 * total 0x2000 bytes
967 */
968 .buf_size = 0x2000,
969 },
970 .sao_up = {
971 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
972 * each has 16 bytes total 0x2800 bytes
973 */
974 .buf_size = 0x2800,
975 },
976 .swap_buf = {
977 /* 256cyclex64bit = 2K bytes 0x800
978 * (only 144 cycles valid)
979 */
980 .buf_size = 0x800,
981 },
982 .swap_buf2 = {
983 .buf_size = 0x800,
984 },
985 .scalelut = {
986 /* support up to 32 SCALELUT 1024x32 =
987 * 32Kbytes (0x8000)
988 */
989 .buf_size = 0x8000,
990 },
991 .dblk_para = {
992#ifdef SUPPORT_10BIT
993 .buf_size = 0x40000,
994#else
995 /* DBLK -> Max 256(4096/16) LCU, each para
996 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
997 */
998 .buf_size = 0x20000,
999#endif
1000 },
1001 .dblk_data = {
1002 .buf_size = 0x40000,
1003 },
1004 .dblk_data2 = {
1005 .buf_size = 0x40000,
1006 }, /*dblk data for adapter*/
1007 .mmu_vbh = {
1008 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1009 },
1010#if 0
1011 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1012 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1013 (MAX_REF_PIC_NUM + 1),
1014 },
1015#endif
1016 .mpred_above = {
1017 .buf_size = 0x8000,
1018 },
1019#ifdef MV_USE_FIXED_BUF
1020 .mpred_mv = {/* 1080p, 0x40000 per buffer */
1021 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
1022 },
1023#endif
1024 .rpm = {
1025 .buf_size = RPM_BUF_SIZE,
1026 },
1027 .lmem = {
1028 .buf_size = 0x500 * 2,
1029 }
1030 },
1031 {
1032 .max_width = 4096,
1033 .max_height = 2048,
1034 .ipp = {
1035 /* IPP work space calculation :
1036 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1037 */
1038 .buf_size = 0x4000,
1039 },
1040 .sao_abv = {
1041 .buf_size = 0x30000,
1042 },
1043 .sao_vb = {
1044 .buf_size = 0x30000,
1045 },
1046 .short_term_rps = {
1047 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1048 * total 64x16x2 = 2048 bytes (0x800)
1049 */
1050 .buf_size = 0x800,
1051 },
1052 .vps = {
1053 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1054 * total 0x0800 bytes
1055 */
1056 .buf_size = 0x800,
1057 },
1058 .sps = {
1059 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1060 * total 0x0800 bytes
1061 */
1062 .buf_size = 0x800,
1063 },
1064 .pps = {
1065 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1066 * total 0x2000 bytes
1067 */
1068 .buf_size = 0x2000,
1069 },
1070 .sao_up = {
1071 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1072 * each has 16 bytes total 0x2800 bytes
1073 */
1074 .buf_size = 0x2800,
1075 },
1076 .swap_buf = {
1077 /* 256cyclex64bit = 2K bytes 0x800
1078 * (only 144 cycles valid)
1079 */
1080 .buf_size = 0x800,
1081 },
1082 .swap_buf2 = {
1083 .buf_size = 0x800,
1084 },
1085 .scalelut = {
1086 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1087 * (0x8000)
1088 */
1089 .buf_size = 0x8000,
1090 },
1091 .dblk_para = {
1092 /* DBLK -> Max 256(4096/16) LCU, each para
1093 * 512bytes(total:0x20000),
1094 * data 1024bytes(total:0x40000)
1095 */
1096 .buf_size = 0x20000,
1097 },
1098 .dblk_data = {
1099 .buf_size = 0x80000,
1100 },
1101 .dblk_data2 = {
1102 .buf_size = 0x80000,
1103 }, /*dblk data for adapter*/
1104 .mmu_vbh = {
1105 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1106 },
1107#if 0
1108 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1109 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1110 (MAX_REF_PIC_NUM + 1),
1111 },
1112#endif
1113 .mpred_above = {
1114 .buf_size = 0x8000,
1115 },
1116#ifdef MV_USE_FIXED_BUF
1117 .mpred_mv = {
1118 /* .buf_size = 0x100000*16,
1119 //4k2k , 0x100000 per buffer */
1120 /* 4096x2304 , 0x120000 per buffer */
1121 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1122 },
1123#endif
1124 .rpm = {
1125 .buf_size = RPM_BUF_SIZE,
1126 },
1127 .lmem = {
1128 .buf_size = 0x500 * 2,
1129 }
1130 },
1131
1132 {
1133 .max_width = 4096*2,
1134 .max_height = 2048*2,
1135 .ipp = {
1136 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1137 .buf_size = 0x4000*2,
1138 },
1139 .sao_abv = {
1140 .buf_size = 0x30000*2,
1141 },
1142 .sao_vb = {
1143 .buf_size = 0x30000*2,
1144 },
1145 .short_term_rps = {
1146 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1147 .buf_size = 0x800,
1148 },
1149 .vps = {
1150 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1151 .buf_size = 0x800,
1152 },
1153 .sps = {
1154 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1155 .buf_size = 0x800,
1156 },
1157 .pps = {
1158 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1159 .buf_size = 0x2000,
1160 },
1161 .sao_up = {
1162 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1163 .buf_size = 0x2800*2,
1164 },
1165 .swap_buf = {
1166 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1167 .buf_size = 0x800,
1168 },
1169 .swap_buf2 = {
1170 .buf_size = 0x800,
1171 },
1172 .scalelut = {
1173 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1174 .buf_size = 0x8000*2,
1175 },
1176 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1177 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1178 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1179 .mmu_vbh = {
1180 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1181 },
1182#if 0
1183 .cm_header = {
1184 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1185 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1186 },
1187#endif
1188 .mpred_above = {
1189 .buf_size = 0x8000*2,
1190 },
1191#ifdef MV_USE_FIXED_BUF
1192 .mpred_mv = {
1193 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1194 },
1195#endif
1196 .rpm = {
1197 .buf_size = RPM_BUF_SIZE,
1198 },
1199 .lmem = {
1200 .buf_size = 0x500 * 2,
1201 },
1202 }
1203};
1204
1205static void init_buff_spec(struct hevc_state_s *hevc,
1206 struct BuffInfo_s *buf_spec)
1207{
1208 buf_spec->ipp.buf_start = buf_spec->start_adr;
1209 buf_spec->sao_abv.buf_start =
1210 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1211
1212 buf_spec->sao_vb.buf_start =
1213 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1214 buf_spec->short_term_rps.buf_start =
1215 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1216 buf_spec->vps.buf_start =
1217 buf_spec->short_term_rps.buf_start +
1218 buf_spec->short_term_rps.buf_size;
1219 buf_spec->sps.buf_start =
1220 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1221 buf_spec->pps.buf_start =
1222 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1223 buf_spec->sao_up.buf_start =
1224 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1225 buf_spec->swap_buf.buf_start =
1226 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1227 buf_spec->swap_buf2.buf_start =
1228 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1229 buf_spec->scalelut.buf_start =
1230 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1231 buf_spec->dblk_para.buf_start =
1232 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1233 buf_spec->dblk_data.buf_start =
1234 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1235 buf_spec->dblk_data2.buf_start =
1236 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1237 buf_spec->mmu_vbh.buf_start =
1238 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1239 buf_spec->mpred_above.buf_start =
1240 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1241#ifdef MV_USE_FIXED_BUF
1242 buf_spec->mpred_mv.buf_start =
1243 buf_spec->mpred_above.buf_start +
1244 buf_spec->mpred_above.buf_size;
1245
1246 buf_spec->rpm.buf_start =
1247 buf_spec->mpred_mv.buf_start +
1248 buf_spec->mpred_mv.buf_size;
1249#else
1250 buf_spec->rpm.buf_start =
1251 buf_spec->mpred_above.buf_start +
1252 buf_spec->mpred_above.buf_size;
1253#endif
1254 buf_spec->lmem.buf_start =
1255 buf_spec->rpm.buf_start +
1256 buf_spec->rpm.buf_size;
1257 buf_spec->end_adr =
1258 buf_spec->lmem.buf_start +
1259 buf_spec->lmem.buf_size;
1260
1261 if (hevc && get_dbg_flag2(hevc)) {
1262 hevc_print(hevc, 0,
1263 "%s workspace (%x %x) size = %x\n", __func__,
1264 buf_spec->start_adr, buf_spec->end_adr,
1265 buf_spec->end_adr - buf_spec->start_adr);
1266
1267 hevc_print(hevc, 0,
1268 "ipp.buf_start :%x\n",
1269 buf_spec->ipp.buf_start);
1270 hevc_print(hevc, 0,
1271 "sao_abv.buf_start :%x\n",
1272 buf_spec->sao_abv.buf_start);
1273 hevc_print(hevc, 0,
1274 "sao_vb.buf_start :%x\n",
1275 buf_spec->sao_vb.buf_start);
1276 hevc_print(hevc, 0,
1277 "short_term_rps.buf_start :%x\n",
1278 buf_spec->short_term_rps.buf_start);
1279 hevc_print(hevc, 0,
1280 "vps.buf_start :%x\n",
1281 buf_spec->vps.buf_start);
1282 hevc_print(hevc, 0,
1283 "sps.buf_start :%x\n",
1284 buf_spec->sps.buf_start);
1285 hevc_print(hevc, 0,
1286 "pps.buf_start :%x\n",
1287 buf_spec->pps.buf_start);
1288 hevc_print(hevc, 0,
1289 "sao_up.buf_start :%x\n",
1290 buf_spec->sao_up.buf_start);
1291 hevc_print(hevc, 0,
1292 "swap_buf.buf_start :%x\n",
1293 buf_spec->swap_buf.buf_start);
1294 hevc_print(hevc, 0,
1295 "swap_buf2.buf_start :%x\n",
1296 buf_spec->swap_buf2.buf_start);
1297 hevc_print(hevc, 0,
1298 "scalelut.buf_start :%x\n",
1299 buf_spec->scalelut.buf_start);
1300 hevc_print(hevc, 0,
1301 "dblk_para.buf_start :%x\n",
1302 buf_spec->dblk_para.buf_start);
1303 hevc_print(hevc, 0,
1304 "dblk_data.buf_start :%x\n",
1305 buf_spec->dblk_data.buf_start);
1306 hevc_print(hevc, 0,
1307 "dblk_data2.buf_start :%x\n",
1308 buf_spec->dblk_data2.buf_start);
1309 hevc_print(hevc, 0,
1310 "mpred_above.buf_start :%x\n",
1311 buf_spec->mpred_above.buf_start);
1312#ifdef MV_USE_FIXED_BUF
1313 hevc_print(hevc, 0,
1314 "mpred_mv.buf_start :%x\n",
1315 buf_spec->mpred_mv.buf_start);
1316#endif
1317 if ((get_dbg_flag2(hevc)
1318 &
1319 H265_DEBUG_SEND_PARAM_WITH_REG)
1320 == 0) {
1321 hevc_print(hevc, 0,
1322 "rpm.buf_start :%x\n",
1323 buf_spec->rpm.buf_start);
1324 }
1325 }
1326
1327}
1328
1329enum SliceType {
1330 B_SLICE,
1331 P_SLICE,
1332 I_SLICE
1333};
1334
1335/*USE_BUF_BLOCK*/
1336struct BUF_s {
1337 ulong start_adr;
1338 u32 size;
1339 u32 luma_size;
1340 ulong header_addr;
1341 u32 header_size;
1342 int used_flag;
1343 ulong v4l_ref_buf_addr;
1344 ulong chroma_addr;
1345 u32 chroma_size;
1346} /*BUF_t */;
1347
1348/* level 6, 6.1 maximum slice number is 800; other is 200 */
1349#define MAX_SLICE_NUM 800
1350struct PIC_s {
1351 int index;
1352 int scatter_alloc;
1353 int BUF_index;
1354 int mv_buf_index;
1355 int POC;
1356 int decode_idx;
1357 int slice_type;
1358 int RefNum_L0;
1359 int RefNum_L1;
1360 int num_reorder_pic;
1361 int stream_offset;
1362 unsigned char referenced;
1363 unsigned char output_mark;
1364 unsigned char recon_mark;
1365 unsigned char output_ready;
1366 unsigned char error_mark;
1367 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1368 unsigned char dis_mark;
1369 /**/ int slice_idx;
1370 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1371 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1372 /*buffer */
1373 unsigned int header_adr;
1374#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1375 unsigned char dv_enhance_exist;
1376#endif
1377 char *aux_data_buf;
1378 int aux_data_size;
1379 unsigned long cma_alloc_addr;
1380 struct page *alloc_pages;
1381 unsigned int mpred_mv_wr_start_addr;
1382 unsigned int mc_y_adr;
1383 unsigned int mc_u_v_adr;
1384#ifdef SUPPORT_10BIT
1385 /*unsigned int comp_body_size;*/
1386 unsigned int dw_y_adr;
1387 unsigned int dw_u_v_adr;
1388#endif
1389 int mc_canvas_y;
1390 int mc_canvas_u_v;
1391 int width;
1392 int height;
1393
1394 int y_canvas_index;
1395 int uv_canvas_index;
1396#ifdef MULTI_INSTANCE_SUPPORT
1397 struct canvas_config_s canvas_config[2];
1398#endif
1399#ifdef SUPPORT_10BIT
1400 int mem_saving_mode;
1401 u32 bit_depth_luma;
1402 u32 bit_depth_chroma;
1403#endif
1404#ifdef LOSLESS_COMPRESS_MODE
1405 unsigned int losless_comp_body_size;
1406#endif
1407 unsigned char pic_struct;
1408 int vf_ref;
1409
1410 u32 pts;
1411 u64 pts64;
1412 u64 timestamp;
1413
1414 u32 aspect_ratio_idc;
1415 u32 sar_width;
1416 u32 sar_height;
1417 u32 double_write_mode;
1418 u32 video_signal_type;
1419 unsigned short conformance_window_flag;
1420 unsigned short conf_win_left_offset;
1421 unsigned short conf_win_right_offset;
1422 unsigned short conf_win_top_offset;
1423 unsigned short conf_win_bottom_offset;
1424 unsigned short chroma_format_idc;
1425
1426 /* picture qos infomation*/
1427 int max_qp;
1428 int avg_qp;
1429 int min_qp;
1430 int max_skip;
1431 int avg_skip;
1432 int min_skip;
1433 int max_mv;
1434 int min_mv;
1435 int avg_mv;
1436
1437 u32 hw_decode_time;
1438 u32 frame_size; // For frame base mode
1439 bool vframe_bound;
1440 bool ip_mode;
1441 u32 stream_frame_size; //for stream base
1442 u32 hdr10p_data_size;
1443 char *hdr10p_data_buf;
1444} /*PIC_t */;
1445
1446#define MAX_TILE_COL_NUM 10
1447#define MAX_TILE_ROW_NUM 20
1448struct tile_s {
1449 int width;
1450 int height;
1451 int start_cu_x;
1452 int start_cu_y;
1453
1454 unsigned int sao_vb_start_addr;
1455 unsigned int sao_abv_start_addr;
1456};
1457
1458#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1459#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1460#define SEI_HDR10PLUS_MASK 0x00000004
1461
1462#define VF_POOL_SIZE 32
1463
1464#ifdef MULTI_INSTANCE_SUPPORT
1465#define DEC_RESULT_NONE 0
1466#define DEC_RESULT_DONE 1
1467#define DEC_RESULT_AGAIN 2
1468#define DEC_RESULT_CONFIG_PARAM 3
1469#define DEC_RESULT_ERROR 4
1470#define DEC_INIT_PICLIST 5
1471#define DEC_UNINIT_PICLIST 6
1472#define DEC_RESULT_GET_DATA 7
1473#define DEC_RESULT_GET_DATA_RETRY 8
1474#define DEC_RESULT_EOS 9
1475#define DEC_RESULT_FORCE_EXIT 10
1476#define DEC_RESULT_FREE_CANVAS 11
1477
1478static void vh265_work(struct work_struct *work);
1479static void vh265_timeout_work(struct work_struct *work);
1480static void vh265_notify_work(struct work_struct *work);
1481
1482#endif
1483
1484struct debug_log_s {
1485 struct list_head list;
1486 uint8_t data; /*will alloc more size*/
1487};
1488
1489struct hevc_state_s {
1490#ifdef MULTI_INSTANCE_SUPPORT
1491 struct platform_device *platform_dev;
1492 void (*vdec_cb)(struct vdec_s *, void *);
1493 void *vdec_cb_arg;
1494 struct vframe_chunk_s *chunk;
1495 int dec_result;
1496 struct work_struct work;
1497 struct work_struct timeout_work;
1498 struct work_struct notify_work;
1499 struct work_struct set_clk_work;
1500 /* timeout handle */
1501 unsigned long int start_process_time;
1502 unsigned int last_lcu_idx;
1503 unsigned int decode_timeout_count;
1504 unsigned int timeout_num;
1505#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1506 unsigned char switch_dvlayer_flag;
1507 unsigned char no_switch_dvlayer_count;
1508 unsigned char bypass_dvenl_enable;
1509 unsigned char bypass_dvenl;
1510#endif
1511 unsigned char start_parser_type;
1512 /*start_decoding_flag:
1513 vps/pps/sps/idr info from ucode*/
1514 unsigned char start_decoding_flag;
1515 unsigned char rps_set_id;
1516 unsigned char eos;
1517 int pic_decoded_lcu_idx;
1518 u8 over_decode;
1519 u8 empty_flag;
1520#endif
1521 struct vframe_s vframe_dummy;
1522 char *provider_name;
1523 int index;
1524 struct device *cma_dev;
1525 unsigned char m_ins_flag;
1526 unsigned char dolby_enhance_flag;
1527 unsigned long buf_start;
1528 u32 buf_size;
1529 u32 mv_buf_size;
1530
1531 struct BuffInfo_s work_space_buf_store;
1532 struct BuffInfo_s *work_space_buf;
1533
1534 u8 aux_data_dirty;
1535 u32 prefix_aux_size;
1536 u32 suffix_aux_size;
1537 void *aux_addr;
1538 void *rpm_addr;
1539 void *lmem_addr;
1540 dma_addr_t aux_phy_addr;
1541 dma_addr_t rpm_phy_addr;
1542 dma_addr_t lmem_phy_addr;
1543
1544 unsigned int pic_list_init_flag;
1545 unsigned int use_cma_flag;
1546
1547 unsigned short *rpm_ptr;
1548 unsigned short *lmem_ptr;
1549 unsigned short *debug_ptr;
1550 int debug_ptr_size;
1551 int pic_w;
1552 int pic_h;
1553 int lcu_x_num;
1554 int lcu_y_num;
1555 int lcu_total;
1556 int lcu_size;
1557 int lcu_size_log2;
1558 int lcu_x_num_pre;
1559 int lcu_y_num_pre;
1560 int first_pic_after_recover;
1561
1562 int num_tile_col;
1563 int num_tile_row;
1564 int tile_enabled;
1565 int tile_x;
1566 int tile_y;
1567 int tile_y_x;
1568 int tile_start_lcu_x;
1569 int tile_start_lcu_y;
1570 int tile_width_lcu;
1571 int tile_height_lcu;
1572
1573 int slice_type;
1574 unsigned int slice_addr;
1575 unsigned int slice_segment_addr;
1576
1577 unsigned char interlace_flag;
1578 unsigned char curr_pic_struct;
1579 unsigned char frame_field_info_present_flag;
1580
1581 unsigned short sps_num_reorder_pics_0;
1582 unsigned short misc_flag0;
1583 int m_temporalId;
1584 int m_nalUnitType;
1585 int TMVPFlag;
1586 int isNextSliceSegment;
1587 int LDCFlag;
1588 int m_pocRandomAccess;
1589 int plevel;
1590 int MaxNumMergeCand;
1591
1592 int new_pic;
1593 int new_tile;
1594 int curr_POC;
1595 int iPrevPOC;
1596#ifdef MULTI_INSTANCE_SUPPORT
1597 int decoded_poc;
1598 struct PIC_s *decoding_pic;
1599#endif
1600 int iPrevTid0POC;
1601 int list_no;
1602 int RefNum_L0;
1603 int RefNum_L1;
1604 int ColFromL0Flag;
1605 int LongTerm_Curr;
1606 int LongTerm_Col;
1607 int Col_POC;
1608 int LongTerm_Ref;
1609#ifdef MULTI_INSTANCE_SUPPORT
1610 int m_pocRandomAccess_bak;
1611 int curr_POC_bak;
1612 int iPrevPOC_bak;
1613 int iPrevTid0POC_bak;
1614 unsigned char start_parser_type_bak;
1615 unsigned char start_decoding_flag_bak;
1616 unsigned char rps_set_id_bak;
1617 int pic_decoded_lcu_idx_bak;
1618 int decode_idx_bak;
1619#endif
1620 struct PIC_s *cur_pic;
1621 struct PIC_s *col_pic;
1622 int skip_flag;
1623 int decode_idx;
1624 int slice_idx;
1625 unsigned char have_vps;
1626 unsigned char have_sps;
1627 unsigned char have_pps;
1628 unsigned char have_valid_start_slice;
1629 unsigned char wait_buf;
1630 unsigned char error_flag;
1631 unsigned int error_skip_nal_count;
1632 long used_4k_num;
1633
1634 unsigned char
1635 ignore_bufmgr_error; /* bit 0, for decoding;
1636 bit 1, for displaying
1637 bit 1 must be set if bit 0 is 1*/
1638 int PB_skip_mode;
1639 int PB_skip_count_after_decoding;
1640#ifdef SUPPORT_10BIT
1641 int mem_saving_mode;
1642#endif
1643#ifdef LOSLESS_COMPRESS_MODE
1644 unsigned int losless_comp_body_size;
1645#endif
1646 int pts_mode;
1647 int last_lookup_pts;
1648 int last_pts;
1649 u64 last_lookup_pts_us64;
1650 u64 last_pts_us64;
1651 u32 shift_byte_count_lo;
1652 u32 shift_byte_count_hi;
1653 int pts_mode_switching_count;
1654 int pts_mode_recovery_count;
1655
1656 int pic_num;
1657
1658 /**/
1659 union param_u param;
1660
1661 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1662
1663 struct timer_list timer;
1664 struct BUF_s m_BUF[BUF_POOL_SIZE];
1665 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1666 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1667
1668 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1669 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1670 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1671 struct vframe_s vfpool[VF_POOL_SIZE];
1672
1673 u32 stat;
1674 u32 frame_width;
1675 u32 frame_height;
1676 u32 frame_dur;
1677 u32 frame_ar;
1678 u32 bit_depth_luma;
1679 u32 bit_depth_chroma;
1680 u32 video_signal_type;
1681 u32 video_signal_type_debug;
1682 u32 saved_resolution;
1683 bool get_frame_dur;
1684 u32 error_watchdog_count;
1685 u32 error_skip_nal_wt_cnt;
1686 u32 error_system_watchdog_count;
1687
1688#ifdef DEBUG_PTS
1689 unsigned long pts_missed;
1690 unsigned long pts_hit;
1691#endif
1692 struct dec_sysinfo vh265_amstream_dec_info;
1693 unsigned char init_flag;
1694 unsigned char first_sc_checked;
1695 unsigned char uninit_list;
1696 u32 start_decoding_time;
1697
1698 int show_frame_num;
1699#ifdef USE_UNINIT_SEMA
1700 struct semaphore h265_uninit_done_sema;
1701#endif
1702 int fatal_error;
1703
1704
1705 u32 sei_present_flag;
1706 void *frame_mmu_map_addr;
1707 dma_addr_t frame_mmu_map_phy_addr;
1708 unsigned int mmu_mc_buf_start;
1709 unsigned int mmu_mc_buf_end;
1710 unsigned int mmu_mc_start_4k_adr;
1711 void *mmu_box;
1712 void *bmmu_box;
1713 int mmu_enable;
1714
1715 unsigned int dec_status;
1716
1717 /* data for SEI_MASTER_DISPLAY_COLOR */
1718 unsigned int primaries[3][2];
1719 unsigned int white_point[2];
1720 unsigned int luminance[2];
1721 /* data for SEI_CONTENT_LIGHT_LEVEL */
1722 unsigned int content_light_level[2];
1723
1724 struct PIC_s *pre_top_pic;
1725 struct PIC_s *pre_bot_pic;
1726
1727#ifdef MULTI_INSTANCE_SUPPORT
1728 int double_write_mode;
1729 int dynamic_buf_num_margin;
1730 int start_action;
1731 int save_buffer_mode;
1732#endif
1733 u32 i_only;
1734 struct list_head log_list;
1735 u32 ucode_pause_pos;
1736 u32 start_shift_bytes;
1737
1738 u32 vf_pre_count;
1739 u32 vf_get_count;
1740 u32 vf_put_count;
1741#ifdef SWAP_HEVC_UCODE
1742 dma_addr_t mc_dma_handle;
1743 void *mc_cpu_addr;
1744 int swap_size;
1745 ulong swap_addr;
1746#endif
1747#ifdef DETREFILL_ENABLE
1748 dma_addr_t detbuf_adr;
1749 u16 *detbuf_adr_virt;
1750 u8 delrefill_check;
1751#endif
1752 u8 head_error_flag;
1753 int valve_count;
1754 struct firmware_s *fw;
1755 int max_pic_w;
1756 int max_pic_h;
1757#ifdef AGAIN_HAS_THRESHOLD
1758 u8 next_again_flag;
1759 u32 pre_parser_wr_ptr;
1760#endif
1761 u32 ratio_control;
1762 u32 first_pic_flag;
1763 u32 decode_size;
1764 struct mutex chunks_mutex;
1765 int need_cache_size;
1766 u64 sc_start_time;
1767 u32 skip_nal_count;
1768 bool is_swap;
1769 bool is_4k;
1770 int frameinfo_enable;
1771 struct vframe_qos_s vframe_qos;
1772 bool is_used_v4l;
1773 void *v4l2_ctx;
1774 bool v4l_params_parsed;
1775 u32 mem_map_mode;
1776 u32 performance_profile;
1777 struct vdec_info *gvs;
1778 unsigned int res_ch_flag;
1779 bool ip_mode;
1780 u32 kpi_first_i_comming;
1781 u32 kpi_first_i_decoded;
1782 int sidebind_type;
1783 int sidebind_channel_id;
1784 u32 last_dec_pic_offset;
1785 u32 min_pic_size;
1786 u32 pts_continue_miss;
1787 u32 pts_lookup_margin;
1788 u32 again_count;
1789 u64 again_timeout_jiffies;
1790 u32 pre_parser_video_rp;
1791 u32 pre_parser_video_wp;
1792 bool dv_duallayer;
1793 u32 poc_error_count;
1794} /*hevc_stru_t */;
1795
1796#ifdef AGAIN_HAS_THRESHOLD
1797static u32 again_threshold;
1798#endif
1799#ifdef SEND_LMEM_WITH_RPM
1800#define get_lmem_params(hevc, ladr) \
1801 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1802
1803
1804static int get_frame_mmu_map_size(void)
1805{
1806 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1807 return (MAX_FRAME_8K_NUM * 4);
1808
1809 return (MAX_FRAME_4K_NUM * 4);
1810}
1811
1812static int is_oversize(int w, int h)
1813{
1814 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1815 MAX_SIZE_8K : MAX_SIZE_4K;
1816
1817 if (w < 0 || h < 0)
1818 return true;
1819
1820 if (h != 0 && (w > max / h))
1821 return true;
1822
1823 return false;
1824}
1825
1826int is_oversize_ex(int w, int h)
1827{
1828 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) ?
1829 MAX_SIZE_8K : MAX_SIZE_4K;
1830
1831 if (w == 0 || h == 0)
1832 return true;
1833 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
1834 if (w > 8192 || h > 4608)
1835 return true;
1836 } else {
1837 if (w > 4096 || h > 2304)
1838 return true;
1839 }
1840
1841 if (w < 0 || h < 0)
1842 return true;
1843
1844 if (h != 0 && (w > max / h))
1845 return true;
1846
1847 return false;
1848}
1849
1850
1851void check_head_error(struct hevc_state_s *hevc)
1852{
1853#define pcm_enabled_flag 0x040
1854#define pcm_sample_bit_depth_luma 0x041
1855#define pcm_sample_bit_depth_chroma 0x042
1856 hevc->head_error_flag = 0;
1857 if ((error_handle_policy & 0x40) == 0)
1858 return;
1859 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1860 uint16_t pcm_depth_luma = get_lmem_params(
1861 hevc, pcm_sample_bit_depth_luma);
1862 uint16_t pcm_sample_chroma = get_lmem_params(
1863 hevc, pcm_sample_bit_depth_chroma);
1864 if (pcm_depth_luma >
1865 hevc->bit_depth_luma ||
1866 pcm_sample_chroma >
1867 hevc->bit_depth_chroma) {
1868 hevc_print(hevc, 0,
1869 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1870 pcm_depth_luma,
1871 pcm_sample_chroma,
1872 hevc->bit_depth_luma,
1873 hevc->bit_depth_chroma);
1874 hevc->head_error_flag = 1;
1875 }
1876 }
1877}
1878#endif
1879
1880#ifdef SUPPORT_10BIT
1881/* Losless compression body buffer size 4K per 64x32 (jt) */
1882static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1883 int width, int height, int mem_saving_mode)
1884{
1885 int width_x64;
1886 int height_x32;
1887 int bsize;
1888
1889 width_x64 = width + 63;
1890 width_x64 >>= 6;
1891
1892 height_x32 = height + 31;
1893 height_x32 >>= 5;
1894 if (mem_saving_mode == 1 && hevc->mmu_enable)
1895 bsize = 3200 * width_x64 * height_x32;
1896 else if (mem_saving_mode == 1)
1897 bsize = 3072 * width_x64 * height_x32;
1898 else
1899 bsize = 4096 * width_x64 * height_x32;
1900
1901 return bsize;
1902}
1903
1904/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1905static int compute_losless_comp_header_size(int width, int height)
1906{
1907 int width_x128;
1908 int height_x64;
1909 int hsize;
1910
1911 width_x128 = width + 127;
1912 width_x128 >>= 7;
1913
1914 height_x64 = height + 63;
1915 height_x64 >>= 6;
1916
1917 hsize = 32*width_x128*height_x64;
1918
1919 return hsize;
1920}
1921#endif
1922
1923static int add_log(struct hevc_state_s *hevc,
1924 const char *fmt, ...)
1925{
1926#define HEVC_LOG_BUF 196
1927 struct debug_log_s *log_item;
1928 unsigned char buf[HEVC_LOG_BUF];
1929 int len = 0;
1930 va_list args;
1931 mutex_lock(&vh265_log_mutex);
1932 va_start(args, fmt);
1933 len = sprintf(buf, "<%ld> <%05d> ",
1934 jiffies, hevc->decode_idx);
1935 len += vsnprintf(buf + len,
1936 HEVC_LOG_BUF - len, fmt, args);
1937 va_end(args);
1938 log_item = kmalloc(
1939 sizeof(struct debug_log_s) + len,
1940 GFP_KERNEL);
1941 if (log_item) {
1942 INIT_LIST_HEAD(&log_item->list);
1943 strcpy(&log_item->data, buf);
1944 list_add_tail(&log_item->list,
1945 &hevc->log_list);
1946 }
1947 mutex_unlock(&vh265_log_mutex);
1948 return 0;
1949}
1950
1951static void dump_log(struct hevc_state_s *hevc)
1952{
1953 int i = 0;
1954 struct debug_log_s *log_item, *tmp;
1955 mutex_lock(&vh265_log_mutex);
1956 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1957 hevc_print(hevc, 0,
1958 "[LOG%04d]%s\n",
1959 i++,
1960 &log_item->data);
1961 list_del(&log_item->list);
1962 kfree(log_item);
1963 }
1964 mutex_unlock(&vh265_log_mutex);
1965}
1966
1967static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1968 struct PIC_s *pic)
1969{
1970 if (pic->error_mark
1971 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1972 return 1;
1973 return 0;
1974}
1975
1976static int get_pic_poc(struct hevc_state_s *hevc,
1977 unsigned int idx)
1978{
1979 if (idx != 0xff
1980 && idx < MAX_REF_PIC_NUM
1981 && hevc->m_PIC[idx])
1982 return hevc->m_PIC[idx]->POC;
1983 return INVALID_POC;
1984}
1985
1986#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1987static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1988{
1989 return (hevc->m_ins_flag &&
1990 ((double_write_mode & 0x80000000) == 0)) ?
1991 hevc->double_write_mode :
1992 (double_write_mode & 0x7fffffff);
1993}
1994
1995static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1996{
1997 return (hevc->m_ins_flag &&
1998 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1999 hevc->dynamic_buf_num_margin :
2000 (dynamic_buf_num_margin & 0x7fffffff);
2001}
2002#endif
2003
2004static int get_double_write_mode(struct hevc_state_s *hevc)
2005{
2006 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
2007 int w = hevc->pic_w;
2008 int h = hevc->pic_h;
2009 u32 dw = 0x1; /*1:1*/
2010 switch (valid_dw_mode) {
2011 case 0x100:
2012 if (w > 1920 && h > 1088)
2013 dw = 0x4; /*1:2*/
2014 break;
2015 case 0x200:
2016 if (w > 1920 && h > 1088)
2017 dw = 0x2; /*1:4*/
2018 break;
2019 case 0x300:
2020 if (w > 1280 && h > 720)
2021 dw = 0x4; /*1:2*/
2022 break;
2023 default:
2024 dw = valid_dw_mode;
2025 break;
2026 }
2027 return dw;
2028}
2029
2030static int v4l_parser_get_double_write_mode(struct hevc_state_s *hevc, int w, int h)
2031{
2032 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
2033 u32 dw = 0x1; /*1:1*/
2034 switch (valid_dw_mode) {
2035 case 0x100:
2036 if (w > 1920 && h > 1088)
2037 dw = 0x4; /*1:2*/
2038 break;
2039 case 0x200:
2040 if (w > 1920 && h > 1088)
2041 dw = 0x2; /*1:4*/
2042 break;
2043 case 0x300:
2044 if (w > 1280 && h > 720)
2045 dw = 0x4; /*1:2*/
2046 break;
2047 default:
2048 dw = valid_dw_mode;
2049 break;
2050 }
2051 return dw;
2052}
2053
2054
2055static int get_double_write_ratio(struct hevc_state_s *hevc,
2056 int dw_mode)
2057{
2058 int ratio = 1;
2059 if ((dw_mode == 2) ||
2060 (dw_mode == 3))
2061 ratio = 4;
2062 else if (dw_mode == 4)
2063 ratio = 2;
2064 return ratio;
2065}
2066#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2067static unsigned char get_idx(struct hevc_state_s *hevc)
2068{
2069 return hevc->index;
2070}
2071#endif
2072
2073#undef pr_info
2074#define pr_info printk
2075static int hevc_print(struct hevc_state_s *hevc,
2076 int flag, const char *fmt, ...)
2077{
2078#define HEVC_PRINT_BUF 256
2079 unsigned char buf[HEVC_PRINT_BUF];
2080 int len = 0;
2081#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2082 if (hevc == NULL ||
2083 (flag == 0) ||
2084 ((debug_mask &
2085 (1 << hevc->index))
2086 && (debug & flag))) {
2087#endif
2088 va_list args;
2089
2090 va_start(args, fmt);
2091 if (hevc)
2092 len = sprintf(buf, "[%d]", hevc->index);
2093 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2094 pr_debug("%s", buf);
2095 va_end(args);
2096#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2097 }
2098#endif
2099 return 0;
2100}
2101
2102static int hevc_print_cont(struct hevc_state_s *hevc,
2103 int flag, const char *fmt, ...)
2104{
2105 unsigned char buf[HEVC_PRINT_BUF];
2106 int len = 0;
2107#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2108 if (hevc == NULL ||
2109 (flag == 0) ||
2110 ((debug_mask &
2111 (1 << hevc->index))
2112 && (debug & flag))) {
2113#endif
2114 va_list args;
2115
2116 va_start(args, fmt);
2117 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2118 pr_info("%s", buf);
2119 va_end(args);
2120#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2121 }
2122#endif
2123 return 0;
2124}
2125
2126static void put_mv_buf(struct hevc_state_s *hevc,
2127 struct PIC_s *pic);
2128
2129static void update_vf_memhandle(struct hevc_state_s *hevc,
2130 struct vframe_s *vf, struct PIC_s *pic);
2131
2132static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2133
2134static void release_aux_data(struct hevc_state_s *hevc,
2135 struct PIC_s *pic);
2136static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2137
2138#ifdef MULTI_INSTANCE_SUPPORT
2139static void backup_decode_state(struct hevc_state_s *hevc)
2140{
2141 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2142 hevc->curr_POC_bak = hevc->curr_POC;
2143 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2144 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2145 hevc->start_parser_type_bak = hevc->start_parser_type;
2146 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2147 hevc->rps_set_id_bak = hevc->rps_set_id;
2148 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2149 hevc->decode_idx_bak = hevc->decode_idx;
2150
2151}
2152
2153static void restore_decode_state(struct hevc_state_s *hevc)
2154{
2155 struct vdec_s *vdec = hw_to_vdec(hevc);
2156 if (!vdec_has_more_input(vdec)) {
2157 hevc->pic_decoded_lcu_idx =
2158 READ_VREG(HEVC_PARSER_LCU_START)
2159 & 0xffffff;
2160 return;
2161 }
2162 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2163 "%s: discard pic index 0x%x\n",
2164 __func__, hevc->decoding_pic ?
2165 hevc->decoding_pic->index : 0xff);
2166 if (hevc->decoding_pic) {
2167 hevc->decoding_pic->error_mark = 0;
2168 hevc->decoding_pic->output_ready = 0;
2169 hevc->decoding_pic->output_mark = 0;
2170 hevc->decoding_pic->referenced = 0;
2171 hevc->decoding_pic->POC = INVALID_POC;
2172 put_mv_buf(hevc, hevc->decoding_pic);
2173 release_aux_data(hevc, hevc->decoding_pic);
2174 hevc->decoding_pic = NULL;
2175 }
2176 hevc->decode_idx = hevc->decode_idx_bak;
2177 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2178 hevc->curr_POC = hevc->curr_POC_bak;
2179 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2180 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2181 hevc->start_parser_type = hevc->start_parser_type_bak;
2182 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2183 hevc->rps_set_id = hevc->rps_set_id_bak;
2184 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2185
2186 if (hevc->pic_list_init_flag == 1)
2187 hevc->pic_list_init_flag = 0;
2188 /*if (hevc->decode_idx == 0)
2189 hevc->start_decoding_flag = 0;*/
2190
2191 hevc->slice_idx = 0;
2192 hevc->used_4k_num = -1;
2193}
2194#endif
2195
2196static void hevc_init_stru(struct hevc_state_s *hevc,
2197 struct BuffInfo_s *buf_spec_i)
2198{
2199 int i;
2200 INIT_LIST_HEAD(&hevc->log_list);
2201 hevc->work_space_buf = buf_spec_i;
2202 hevc->prefix_aux_size = 0;
2203 hevc->suffix_aux_size = 0;
2204 hevc->aux_addr = NULL;
2205 hevc->rpm_addr = NULL;
2206 hevc->lmem_addr = NULL;
2207
2208 hevc->curr_POC = INVALID_POC;
2209
2210 hevc->pic_list_init_flag = 0;
2211 hevc->use_cma_flag = 0;
2212 hevc->decode_idx = 0;
2213 hevc->slice_idx = 0;
2214 hevc->new_pic = 0;
2215 hevc->new_tile = 0;
2216 hevc->iPrevPOC = 0;
2217 hevc->list_no = 0;
2218 /* int m_uiMaxCUWidth = 1<<7; */
2219 /* int m_uiMaxCUHeight = 1<<7; */
2220 hevc->m_pocRandomAccess = MAX_INT;
2221 hevc->tile_enabled = 0;
2222 hevc->tile_x = 0;
2223 hevc->tile_y = 0;
2224 hevc->iPrevTid0POC = 0;
2225 hevc->slice_addr = 0;
2226 hevc->slice_segment_addr = 0;
2227 hevc->skip_flag = 0;
2228 hevc->misc_flag0 = 0;
2229
2230 hevc->cur_pic = NULL;
2231 hevc->col_pic = NULL;
2232 hevc->wait_buf = 0;
2233 hevc->error_flag = 0;
2234 hevc->head_error_flag = 0;
2235 hevc->error_skip_nal_count = 0;
2236 hevc->have_vps = 0;
2237 hevc->have_sps = 0;
2238 hevc->have_pps = 0;
2239 hevc->have_valid_start_slice = 0;
2240
2241 hevc->pts_mode = PTS_NORMAL;
2242 hevc->last_pts = 0;
2243 hevc->last_lookup_pts = 0;
2244 hevc->last_pts_us64 = 0;
2245 hevc->last_lookup_pts_us64 = 0;
2246 hevc->pts_mode_switching_count = 0;
2247 hevc->pts_mode_recovery_count = 0;
2248
2249 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2250 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2251 if (hevc->PB_skip_mode == 0)
2252 hevc->ignore_bufmgr_error = 0x1;
2253 else
2254 hevc->ignore_bufmgr_error = 0x0;
2255
2256 if (hevc->is_used_v4l) {
2257 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2258 if (hevc->m_PIC[i] != NULL) {
2259 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2260 hevc->m_PIC[i]->index = i;
2261 }
2262 }
2263 }
2264
2265 hevc->pic_num = 0;
2266 hevc->lcu_x_num_pre = 0;
2267 hevc->lcu_y_num_pre = 0;
2268 hevc->first_pic_after_recover = 0;
2269
2270 hevc->pre_top_pic = NULL;
2271 hevc->pre_bot_pic = NULL;
2272
2273 hevc->sei_present_flag = 0;
2274 hevc->valve_count = 0;
2275 hevc->first_pic_flag = 0;
2276#ifdef MULTI_INSTANCE_SUPPORT
2277 hevc->decoded_poc = INVALID_POC;
2278 hevc->start_process_time = 0;
2279 hevc->last_lcu_idx = 0;
2280 hevc->decode_timeout_count = 0;
2281 hevc->timeout_num = 0;
2282 hevc->eos = 0;
2283 hevc->pic_decoded_lcu_idx = -1;
2284 hevc->over_decode = 0;
2285 hevc->used_4k_num = -1;
2286 hevc->start_decoding_flag = 0;
2287 hevc->rps_set_id = 0;
2288 backup_decode_state(hevc);
2289#endif
2290#ifdef DETREFILL_ENABLE
2291 hevc->detbuf_adr = 0;
2292 hevc->detbuf_adr_virt = NULL;
2293#endif
2294}
2295
2296static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2297static int H265_alloc_mmu(struct hevc_state_s *hevc,
2298 struct PIC_s *new_pic, unsigned short bit_depth,
2299 unsigned int *mmu_index_adr);
2300
2301#ifdef DETREFILL_ENABLE
2302#define DETREFILL_BUF_SIZE (4 * 0x4000)
2303#define HEVC_SAO_DBG_MODE0 0x361e
2304#define HEVC_SAO_DBG_MODE1 0x361f
2305#define HEVC_SAO_CTRL10 0x362e
2306#define HEVC_SAO_CTRL11 0x362f
2307static int init_detrefill_buf(struct hevc_state_s *hevc)
2308{
2309 if (hevc->detbuf_adr_virt)
2310 return 0;
2311
2312 hevc->detbuf_adr_virt =
2313 (void *)dma_alloc_coherent(amports_get_dma_device(),
2314 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2315 GFP_KERNEL);
2316
2317 if (hevc->detbuf_adr_virt == NULL) {
2318 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2319 return -1;
2320 }
2321 return 0;
2322}
2323
2324static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2325{
2326 if (hevc->detbuf_adr_virt) {
2327 dma_free_coherent(amports_get_dma_device(),
2328 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2329 hevc->detbuf_adr);
2330
2331 hevc->detbuf_adr_virt = NULL;
2332 hevc->detbuf_adr = 0;
2333 }
2334}
2335
2336/*
2337 * convert uncompressed frame buffer data from/to ddr
2338 */
2339static void convUnc8x4blk(uint16_t* blk8x4Luma,
2340 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2341{
2342 if (direction == 0) {
2343 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2344 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2345 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2346 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2347 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2348 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2349 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2350 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2351 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2352 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2353 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2354 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2355 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2356 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2357 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2358 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2359 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2360 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2361
2362 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2363 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2364 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2365 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2366 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2367 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2368 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2369 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2370 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2371 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2372 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2373 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2374 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2375 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2376 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2377 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2378 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2379 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2380
2381 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2382 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2383 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2384 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2385 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2386 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2387 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2388 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2389 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2390 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2391 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2392 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2393 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2394 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2395 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2396 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2397 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2398 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2399
2400 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2401 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2402 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2403 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2404 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2405 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2406 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2407 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2408 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2409 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2410 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2411 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2412 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2413 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2414 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2415 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2416 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2417 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2418 } else {
2419 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2420 blk8x4Luma[3 + 0 * 8];
2421 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2422 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2423 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2424 (blk8x4Luma[3 + 3 * 8] >> 2);
2425 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2426 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2427 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2428 (blk8x4Luma[7 + 2 * 8] >>4);
2429 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2430 blk8x4Cb[0 + 0 * 4];
2431 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2432 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2433 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2434
2435 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2436 blk8x4Luma[0 + 0 * 8];
2437 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2438 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2439 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2440 (blk8x4Luma[0 + 1 * 8] >> 2);
2441 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2442 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2443 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2444 (blk8x4Luma[0 + 2 * 8] >>4);
2445 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2446 blk8x4Luma[2 + 2 * 8];
2447 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2448 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2449 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2450
2451 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2452 blk8x4Luma[4 + 0 * 8];
2453 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2454 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2455 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2456 (blk8x4Luma[4 + 1 * 8] >> 2);
2457 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2458 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2459 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2460 (blk8x4Luma[4 + 2 * 8] >>4);
2461 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2462 blk8x4Luma[6 + 2 * 8];
2463 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2464 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2465 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2466
2467 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2468 blk8x4Cb[1 + 0 * 4];
2469 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2470 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2471 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2472 (blk8x4Cr[2 + 0 * 4] >> 2);
2473 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2474 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2475 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2476 (blk8x4Cb[1 + 1 * 4] >>4);
2477 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2478 blk8x4Cb[2 + 1 * 4];
2479 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2480 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2481 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2482 }
2483}
2484
2485static void corrRefillWithAmrisc (
2486 struct hevc_state_s *hevc,
2487 uint32_t cmHeaderBaseAddr,
2488 uint32_t picWidth,
2489 uint32_t ctuPosition)
2490{
2491 int32_t i;
2492 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2493 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2494 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2495
2496 uint16_t cmBodyBuf[32 * 18];
2497
2498 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2499 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2500 uint32_t stride64x64 = pic_width_x64 * 128;
2501 uint32_t addr_offset64x64_abv = stride64x64 *
2502 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2503 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2504 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2505 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2506 unsigned int tmpData32;
2507
2508 uint16_t blkBuf0Y[32];
2509 uint16_t blkBuf0Cb[8];
2510 uint16_t blkBuf0Cr[8];
2511 uint16_t blkBuf1Y[32];
2512 uint16_t blkBuf1Cb[8];
2513 uint16_t blkBuf1Cr[8];
2514 int32_t blkBufCnt = 0;
2515
2516 int32_t blkIdx;
2517
2518 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2519 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2520 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2521 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2522
2523 for (i = 0; i < 32 * 18; i++)
2524 cmBodyBuf[i] = 0;
2525
2526 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2527 "%s, %d\n", __func__, __LINE__);
2528 do {
2529 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2530 } while (tmpData32);
2531 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2532 "%s, %d\n", __func__, __LINE__);
2533
2534 hevc_print(hevc, H265_DEBUG_DETAIL,
2535 "cmBodyBuf from detbuf:\n");
2536 for (i = 0; i < 32 * 18; i++) {
2537 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2538 if (get_dbg_flag(hevc) &
2539 H265_DEBUG_DETAIL) {
2540 if ((i & 0xf) == 0)
2541 hevc_print_cont(hevc, 0, "\n");
2542 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2543 }
2544 }
2545 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2546
2547 for (i = 0; i < 32; i++)
2548 blkBuf0Y[i] = 0;
2549 for (i = 0; i < 8; i++)
2550 blkBuf0Cb[i] = 0;
2551 for (i = 0; i < 8; i++)
2552 blkBuf0Cr[i] = 0;
2553 for (i = 0; i < 32; i++)
2554 blkBuf1Y[i] = 0;
2555 for (i = 0; i < 8; i++)
2556 blkBuf1Cb[i] = 0;
2557 for (i = 0; i < 8; i++)
2558 blkBuf1Cr[i] = 0;
2559
2560 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2561 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2562 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2563 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2564 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2565 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2566 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2567
2568 if (!aboveCtuAvailable && inAboveCtu)
2569 continue;
2570
2571 /* detRefillBuf --> 8x4block*/
2572 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2573
2574 if (restoreEnable) {
2575 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2576 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2577 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2578 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2579 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2580 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2581 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2582 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2583 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2584 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2585 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2586 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2587 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2588 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2589 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2590 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2591 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2592 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2593 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2594 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2595 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2596 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2597 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2598 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2599 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2600 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2601 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2602 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2603 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2604 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2605 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2606 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2607
2608 /*Store data back to DDR*/
2609 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2610 }
2611
2612 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2613 }
2614
2615 hevc_print(hevc, H265_DEBUG_DETAIL,
2616 "cmBodyBuf to detbuf:\n");
2617 for (i = 0; i < 32 * 18; i++) {
2618 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2619 if (get_dbg_flag(hevc) &
2620 H265_DEBUG_DETAIL) {
2621 if ((i & 0xf) == 0)
2622 hevc_print_cont(hevc, 0, "\n");
2623 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2624 }
2625 }
2626 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2627
2628 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2629 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2630 "%s, %d\n", __func__, __LINE__);
2631 do {
2632 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2633 } while (tmpData32);
2634 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2635 "%s, %d\n", __func__, __LINE__);
2636}
2637
2638static void delrefill(struct hevc_state_s *hevc)
2639{
2640 /*
2641 * corrRefill
2642 */
2643 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2644 [31:30]error number
2645 [29:20]error2([9:7]tilex[6:0]ctuy)
2646 [19:10]error1 [9:0]error0*/
2647 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2648 uint32_t errorIdx;
2649 uint32_t errorNum = (detResult>>30);
2650
2651 if (detResult) {
2652 hevc_print(hevc, H265_DEBUG_BUFMGR,
2653 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2654 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2655 uint32_t errorPos = errorIdx * 10;
2656 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2657 uint32_t tilex = (errorResult >> 7) - 1;
2658 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2659 + hevc->m_tile[0][tilex].width - 1;
2660 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2661 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2662 hevc_print(hevc, H265_DEBUG_BUFMGR,
2663 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2664 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2665 corrRefillWithAmrisc(
2666 hevc,
2667 (uint32_t)hevc->cur_pic->header_adr,
2668 hevc->pic_w,
2669 ctuPosition);
2670 }
2671
2672 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2673 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2674 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2675 }
2676}
2677#endif
2678
2679static void get_rpm_param(union param_u *params)
2680{
2681 int i;
2682 unsigned int data32;
2683
2684 for (i = 0; i < 128; i++) {
2685 do {
2686 data32 = READ_VREG(RPM_CMD_REG);
2687 /* hevc_print(hevc, 0, "%x\n", data32); */
2688 } while ((data32 & 0x10000) == 0);
2689 params->l.data[i] = data32 & 0xffff;
2690 /* hevc_print(hevc, 0, "%x\n", data32); */
2691 WRITE_VREG(RPM_CMD_REG, 0);
2692 }
2693}
2694
2695static int get_free_buf_idx(struct hevc_state_s *hevc)
2696{
2697 int index = INVALID_IDX;
2698 struct PIC_s *pic;
2699 int i;
2700
2701 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2702 pic = hevc->m_PIC[i];
2703 if (pic == NULL ||
2704 pic->index == -1 ||
2705 pic->BUF_index == -1)
2706 continue;
2707
2708 if (pic->output_mark == 0 &&
2709 pic->referenced == 0 &&
2710 pic->output_ready == 0 &&
2711 pic->cma_alloc_addr) {
2712 pic->output_ready = 1;
2713 index = i;
2714 break;
2715 }
2716 }
2717
2718 return index;
2719}
2720
2721static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2722{
2723 int i;
2724 struct PIC_s *pic;
2725 struct PIC_s *ret_pic = NULL;
2726 if (POC == INVALID_POC)
2727 return NULL;
2728 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2729 pic = hevc->m_PIC[i];
2730 if (pic == NULL || pic->index == -1 ||
2731 pic->BUF_index == -1)
2732 continue;
2733 if (pic->POC == POC) {
2734 if (ret_pic == NULL)
2735 ret_pic = pic;
2736 else {
2737 if (pic->decode_idx > ret_pic->decode_idx)
2738 ret_pic = pic;
2739 }
2740 }
2741 }
2742 return ret_pic;
2743}
2744
2745static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2746{
2747 int i;
2748 struct PIC_s *pic;
2749 struct PIC_s *ret_pic = NULL;
2750
2751 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2752 pic = hevc->m_PIC[i];
2753 if (pic == NULL || pic->index == -1 ||
2754 pic->BUF_index == -1)
2755 continue;
2756 /*Add width and height of ref picture detection,
2757 resolved incorrectly referenced frame.*/
2758 if ((pic->POC == POC) && (pic->referenced) &&
2759 (hevc->pic_w == pic->width) &&
2760 (hevc->pic_h == pic->height)) {
2761 if (ret_pic == NULL)
2762 ret_pic = pic;
2763 else {
2764 if (pic->decode_idx > ret_pic->decode_idx)
2765 ret_pic = pic;
2766 }
2767 }
2768 }
2769
2770 return ret_pic;
2771}
2772
2773static unsigned int log2i(unsigned int val)
2774{
2775 unsigned int ret = -1;
2776
2777 while (val != 0) {
2778 val >>= 1;
2779 ret++;
2780 }
2781 return ret;
2782}
2783
2784static int init_buf_spec(struct hevc_state_s *hevc);
2785
2786static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2787{
2788 int i;
2789
2790 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2791 struct PIC_s *pic = hevc->m_PIC[i];
2792
2793 if (pic && pic->vframe_bound)
2794 return true;
2795 }
2796
2797 return false;
2798}
2799
2800static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2801{
2802 int i;
2803
2804 /* release workspace */
2805 if (hevc->bmmu_box)
2806 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2807 BMMU_WORKSPACE_ID);
2808 /*
2809 * it's only when vframe get back to driver, right now we can be sure
2810 * that vframe and fd are related. if the playback exits, the capture
2811 * requires the upper app to release when the fd is closed, and others
2812 * buffers drivers are released by driver.
2813 */
2814 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2815 struct PIC_s *pic = hevc->m_PIC[i];
2816
2817 if (pic && !pic->vframe_bound) {
2818 if (hevc->bmmu_box)
2819 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2820 VF_BUFFER_IDX(i));
2821 if (hevc->mmu_box)
2822 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2823
2824 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2825 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2826 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2827 }
2828 }
2829}
2830
2831static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2832{
2833 if (hevc->is_used_v4l &&
2834 v4l_is_there_vframe_bound(hevc)) {
2835 if (get_double_write_mode(hevc) != 0x10) {
2836 v4l_mmu_buffer_release(hevc);
2837 return;
2838 }
2839 }
2840
2841 if (hevc->mmu_box)
2842 decoder_mmu_box_free(hevc->mmu_box);
2843 hevc->mmu_box = NULL;
2844
2845 if (hevc->bmmu_box)
2846 decoder_bmmu_box_free(hevc->bmmu_box);
2847 hevc->bmmu_box = NULL;
2848}
2849
2850static int init_mmu_box(struct hevc_state_s *hevc)
2851{
2852 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2853 CODEC_MM_FLAGS_TVP : 0;
2854 int buf_size = 64;
2855
2856 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2857 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2858 buf_size = 24;
2859 }
2860
2861 if (get_dbg_flag(hevc)) {
2862 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2863 __func__, hevc->max_pic_w, hevc->max_pic_h);
2864 }
2865
2866 hevc->need_cache_size = buf_size * SZ_1M;
2867 hevc->sc_start_time = get_jiffies_64();
2868 if (hevc->mmu_enable
2869 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2870 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2871 hevc->index,
2872 MAX_REF_PIC_NUM,
2873 buf_size * SZ_1M,
2874 tvp_flag
2875 );
2876 if (!hevc->mmu_box) {
2877 hevc_print(hevc, 0, "h265 alloc mmu box failed!!\n");
2878 return -1;
2879 }
2880 }
2881
2882 return 0;
2883}
2884
2885static int init_mmu_buffers(struct hevc_state_s *hevc)
2886{
2887 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2888 CODEC_MM_FLAGS_TVP : 0;
2889 int buf_size = 64;
2890
2891 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2892 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2893 buf_size = 24;
2894 }
2895
2896 if (get_dbg_flag(hevc)) {
2897 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2898 __func__, hevc->max_pic_w, hevc->max_pic_h);
2899 }
2900
2901 hevc->need_cache_size = buf_size * SZ_1M;
2902 hevc->sc_start_time = get_jiffies_64();
2903 if (hevc->mmu_enable
2904 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2905 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2906 hevc->index,
2907 MAX_REF_PIC_NUM,
2908 buf_size * SZ_1M,
2909 tvp_flag
2910 );
2911 if (!hevc->mmu_box) {
2912 pr_err("h265 alloc mmu box failed!!\n");
2913 return -1;
2914 }
2915 }
2916
2917 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2918 hevc->index,
2919 BMMU_MAX_BUFFERS,
2920 4 + PAGE_SHIFT,
2921 CODEC_MM_FLAGS_CMA_CLEAR |
2922 CODEC_MM_FLAGS_FOR_VDECODER |
2923 tvp_flag);
2924 if (!hevc->bmmu_box) {
2925 if (hevc->mmu_box)
2926 decoder_mmu_box_free(hevc->mmu_box);
2927 hevc->mmu_box = NULL;
2928 pr_err("h265 alloc mmu box failed!!\n");
2929 return -1;
2930 }
2931 return 0;
2932}
2933
2934struct buf_stru_s
2935{
2936 int lcu_total;
2937 int mc_buffer_size_h;
2938 int mc_buffer_size_u_v_h;
2939};
2940
2941#ifndef MV_USE_FIXED_BUF
2942static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2943{
2944 int i;
2945 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2946 if (hevc->m_mv_BUF[i].start_adr) {
2947 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2948 hevc_print(hevc, 0,
2949 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2950 i, hevc->m_mv_BUF[i].start_adr,
2951 hevc->m_mv_BUF[i].size,
2952 hevc->m_mv_BUF[i].used_flag);
2953 decoder_bmmu_box_free_idx(
2954 hevc->bmmu_box,
2955 MV_BUFFER_IDX(i));
2956 hevc->m_mv_BUF[i].start_adr = 0;
2957 hevc->m_mv_BUF[i].size = 0;
2958 hevc->m_mv_BUF[i].used_flag = 0;
2959 }
2960 }
2961 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2962 if (hevc->m_PIC[i] != NULL)
2963 hevc->m_PIC[i]->mv_buf_index = -1;
2964 }
2965}
2966
2967static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2968{
2969 int ret = 0;
2970 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2971 if (decoder_bmmu_box_alloc_buf_phy
2972 (hevc->bmmu_box,
2973 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2974 DRIVER_NAME,
2975 &hevc->m_mv_BUF[i].start_adr) < 0) {
2976 hevc->m_mv_BUF[i].start_adr = 0;
2977 ret = -1;
2978 } else {
2979 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2980 hevc->m_mv_BUF[i].used_flag = 0;
2981 ret = 0;
2982 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2983 hevc_print(hevc, 0,
2984 "MV Buffer %d: start_adr %p size %x\n",
2985 i,
2986 (void *)hevc->m_mv_BUF[i].start_adr,
2987 hevc->m_mv_BUF[i].size);
2988 }
2989 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2990 void *mem_start_virt;
2991 mem_start_virt =
2992 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2993 if (mem_start_virt) {
2994 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2995 codec_mm_dma_flush(mem_start_virt,
2996 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2997 } else {
2998 mem_start_virt = codec_mm_vmap(
2999 hevc->m_mv_BUF[i].start_adr,
3000 hevc->m_mv_BUF[i].size);
3001 if (mem_start_virt) {
3002 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
3003 codec_mm_dma_flush(mem_start_virt,
3004 hevc->m_mv_BUF[i].size,
3005 DMA_TO_DEVICE);
3006 codec_mm_unmap_phyaddr(mem_start_virt);
3007 } else {
3008 /*not virt for tvp playing,
3009 may need clear on ucode.*/
3010 pr_err("ref %s mem_start_virt failed\n", __func__);
3011 }
3012 }
3013 }
3014 }
3015 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3016 return ret;
3017}
3018#endif
3019
3020static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3021{
3022#ifdef MV_USE_FIXED_BUF
3023 if (pic && pic->index >= 0) {
3024 if (IS_8K_SIZE(pic->width, pic->height)) {
3025 pic->mpred_mv_wr_start_addr =
3026 hevc->work_space_buf->mpred_mv.buf_start
3027 + (pic->index * MPRED_8K_MV_BUF_SIZE);
3028 } else {
3029 pic->mpred_mv_wr_start_addr =
3030 hevc->work_space_buf->mpred_mv.buf_start
3031 + (pic->index * MPRED_4K_MV_BUF_SIZE);
3032 }
3033 }
3034 return 0;
3035#else
3036 int i;
3037 int ret = -1;
3038 int new_size;
3039 if (IS_8K_SIZE(pic->width, pic->height))
3040 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
3041 else if (IS_4K_SIZE(pic->width, pic->height))
3042 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
3043 else
3044 new_size = MPRED_MV_BUF_SIZE + 0x10000;
3045 if (new_size != hevc->mv_buf_size) {
3046 dealloc_mv_bufs(hevc);
3047 hevc->mv_buf_size = new_size;
3048 }
3049 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3050 if (hevc->m_mv_BUF[i].start_adr &&
3051 hevc->m_mv_BUF[i].used_flag == 0) {
3052 hevc->m_mv_BUF[i].used_flag = 1;
3053 ret = i;
3054 break;
3055 }
3056 }
3057 if (ret < 0) {
3058 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3059 if (hevc->m_mv_BUF[i].start_adr == 0) {
3060 if (alloc_mv_buf(hevc, i) >= 0) {
3061 hevc->m_mv_BUF[i].used_flag = 1;
3062 ret = i;
3063 }
3064 break;
3065 }
3066 }
3067 }
3068
3069 if (ret >= 0) {
3070 pic->mv_buf_index = ret;
3071 pic->mpred_mv_wr_start_addr =
3072 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
3073 (~0xffff);
3074 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3075 "%s => %d (0x%x) size 0x%x\n",
3076 __func__, ret,
3077 pic->mpred_mv_wr_start_addr,
3078 hevc->m_mv_BUF[ret].size);
3079
3080 } else {
3081 hevc_print(hevc, 0,
3082 "%s: Error, mv buf is not enough\n",
3083 __func__);
3084 }
3085 return ret;
3086
3087#endif
3088}
3089
3090static void put_mv_buf(struct hevc_state_s *hevc,
3091 struct PIC_s *pic)
3092{
3093#ifndef MV_USE_FIXED_BUF
3094 int i = pic->mv_buf_index;
3095 if (i < 0 || i >= MAX_REF_PIC_NUM) {
3096 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3097 "%s: index %d beyond range\n",
3098 __func__, i);
3099 return;
3100 }
3101 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3102 "%s(%d): used_flag(%d)\n",
3103 __func__, i,
3104 hevc->m_mv_BUF[i].used_flag);
3105
3106 if (hevc->m_mv_BUF[i].start_adr &&
3107 hevc->m_mv_BUF[i].used_flag)
3108 hevc->m_mv_BUF[i].used_flag = 0;
3109 pic->mv_buf_index = -1;
3110#endif
3111}
3112
3113static int cal_current_buf_size(struct hevc_state_s *hevc,
3114 struct buf_stru_s *buf_stru)
3115{
3116
3117 int buf_size;
3118 int pic_width = hevc->pic_w;
3119 int pic_height = hevc->pic_h;
3120 int lcu_size = hevc->lcu_size;
3121 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
3122 1 : pic_width / lcu_size;
3123 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
3124 1 : pic_height / lcu_size;
3125 /*SUPPORT_10BIT*/
3126 int losless_comp_header_size = compute_losless_comp_header_size
3127 (pic_width, pic_height);
3128 /*always alloc buf for 10bit*/
3129 int losless_comp_body_size = compute_losless_comp_body_size
3130 (hevc, pic_width, pic_height, 0);
3131 int mc_buffer_size = losless_comp_header_size
3132 + losless_comp_body_size;
3133 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
3134 int mc_buffer_size_u_v_h = 0;
3135
3136 int dw_mode = get_double_write_mode(hevc);
3137
3138 if (hevc->mmu_enable) {
3139 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3140 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3141 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3142 << 16;
3143 else
3144 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3145 << 16;
3146 } else
3147 buf_size = 0;
3148
3149 if (dw_mode) {
3150 int pic_width_dw = pic_width /
3151 get_double_write_ratio(hevc, dw_mode);
3152 int pic_height_dw = pic_height /
3153 get_double_write_ratio(hevc, dw_mode);
3154
3155 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3156 pic_width_dw / lcu_size + 1 :
3157 pic_width_dw / lcu_size;
3158 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3159 pic_height_dw / lcu_size + 1 :
3160 pic_height_dw / lcu_size;
3161 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3162
3163 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3164 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3165 /*64k alignment*/
3166 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3167 }
3168
3169 if ((!hevc->mmu_enable) &&
3170 ((dw_mode & 0x10) == 0)) {
3171 /* use compress mode without mmu,
3172 need buf for compress decoding*/
3173 buf_size += (mc_buffer_size_h << 16);
3174 }
3175
3176 /*in case start adr is not 64k alignment*/
3177 if (buf_size > 0)
3178 buf_size += 0x10000;
3179
3180 if (buf_stru) {
3181 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3182 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3183 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3184 }
3185
3186 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3187 pic_width, pic_height, losless_comp_header_size,
3188 losless_comp_body_size, mc_buffer_size_h,
3189 mc_buffer_size_u_v_h, buf_size);
3190
3191 return buf_size;
3192}
3193
3194static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3195{
3196 int ret = -1;
3197 int i = pic->index;
3198 struct vdec_v4l2_buffer *fb = NULL;
3199
3200 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3201 return ret;
3202
3203 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3204 if (ret < 0) {
3205 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3206 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3207 return ret;
3208 }
3209
3210 if (hevc->mmu_enable) {
3211 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3212 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3213 hevc->m_BUF[i].header_size =
3214 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3215 else
3216 hevc->m_BUF[i].header_size =
3217 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3218
3219 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3220 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3221 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3222 if (ret < 0) {
3223 hevc_print(hevc, PRINT_FLAG_ERROR,
3224 "%s[%d], header size: %d, no mem fatal err\n",
3225 __func__, i, hevc->m_BUF[i].header_size);
3226 return ret;
3227 }
3228 }
3229
3230 hevc->m_BUF[i].used_flag = 0;
3231 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3232 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3233 if (fb->num_planes == 1) {
3234 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3235 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3236 hevc->m_BUF[i].size = fb->m.mem[0].size;
3237 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3238 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3239 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3240 } else if (fb->num_planes == 2) {
3241 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3242 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3243 hevc->m_BUF[i].chroma_addr = fb->m.mem[1].addr;
3244 hevc->m_BUF[i].chroma_size = fb->m.mem[1].size;
3245 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3246 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3247 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3248 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3249 pic->dw_u_v_adr = hevc->m_BUF[i].chroma_addr;
3250 }
3251
3252 return ret;
3253}
3254
3255static int alloc_buf(struct hevc_state_s *hevc)
3256{
3257 int i;
3258 int ret = -1;
3259 int buf_size = cal_current_buf_size(hevc, NULL);
3260
3261 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3262 return ret;
3263
3264 for (i = 0; i < BUF_POOL_SIZE; i++) {
3265 if (hevc->m_BUF[i].start_adr == 0)
3266 break;
3267 }
3268 if (i < BUF_POOL_SIZE) {
3269 if (buf_size > 0) {
3270 ret = decoder_bmmu_box_alloc_buf_phy
3271 (hevc->bmmu_box,
3272 VF_BUFFER_IDX(i), buf_size,
3273 DRIVER_NAME,
3274 &hevc->m_BUF[i].start_adr);
3275 if (ret < 0) {
3276 hevc->m_BUF[i].start_adr = 0;
3277 if (i <= 8) {
3278 hevc->fatal_error |=
3279 DECODER_FATAL_ERROR_NO_MEM;
3280 hevc_print(hevc, PRINT_FLAG_ERROR,
3281 "%s[%d], size: %d, no mem fatal err\n",
3282 __func__, i, buf_size);
3283 }
3284 }
3285
3286 if (ret >= 0) {
3287 hevc->m_BUF[i].size = buf_size;
3288 hevc->m_BUF[i].used_flag = 0;
3289 ret = 0;
3290
3291 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3292 hevc_print(hevc, 0,
3293 "Buffer %d: start_adr %p size %x\n",
3294 i,
3295 (void *)hevc->m_BUF[i].start_adr,
3296 hevc->m_BUF[i].size);
3297 }
3298 /*flush the buffer make sure no cache dirty*/
3299 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3300 void *mem_start_virt;
3301 mem_start_virt =
3302 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3303 if (mem_start_virt) {
3304 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3305 codec_mm_dma_flush(mem_start_virt,
3306 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3307 } else {
3308 mem_start_virt = codec_mm_vmap(
3309 hevc->m_BUF[i].start_adr,
3310 hevc->m_BUF[i].size);
3311 if (mem_start_virt) {
3312 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3313 codec_mm_dma_flush(mem_start_virt,
3314 hevc->m_BUF[i].size,
3315 DMA_TO_DEVICE);
3316 codec_mm_unmap_phyaddr(mem_start_virt);
3317 } else {
3318 /*not virt for tvp playing,
3319 may need clear on ucode.*/
3320 pr_err("ref %s mem_start_virt failed\n", __func__);
3321 }
3322 }
3323 }
3324 }
3325 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3326 } else
3327 ret = 0;
3328 }
3329
3330 if (ret >= 0) {
3331 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3332 hevc_print(hevc, 0,
3333 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3334 i, hevc->pic_w, hevc->pic_h,
3335 buf_size,
3336 hevc->m_BUF[i].start_adr);
3337 }
3338 } else {
3339 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3340 hevc_print(hevc, 0,
3341 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3342 i, hevc->pic_w, hevc->pic_h,
3343 buf_size);
3344 }
3345 }
3346 return ret;
3347}
3348
3349static void set_buf_unused(struct hevc_state_s *hevc, int i)
3350{
3351 if (i >= 0 && i < BUF_POOL_SIZE)
3352 hevc->m_BUF[i].used_flag = 0;
3353}
3354
3355static void dealloc_unused_buf(struct hevc_state_s *hevc)
3356{
3357 int i;
3358 for (i = 0; i < BUF_POOL_SIZE; i++) {
3359 if (hevc->m_BUF[i].start_adr &&
3360 hevc->m_BUF[i].used_flag == 0) {
3361 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3362 hevc_print(hevc, 0,
3363 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3364 i, hevc->m_BUF[i].start_adr,
3365 hevc->m_BUF[i].size);
3366 }
3367 if (!hevc->is_used_v4l)
3368 decoder_bmmu_box_free_idx(
3369 hevc->bmmu_box,
3370 VF_BUFFER_IDX(i));
3371 hevc->m_BUF[i].start_adr = 0;
3372 hevc->m_BUF[i].size = 0;
3373 }
3374 }
3375}
3376
3377static void dealloc_pic_buf(struct hevc_state_s *hevc,
3378 struct PIC_s *pic)
3379{
3380 int i = pic->BUF_index;
3381 pic->BUF_index = -1;
3382 if (i >= 0 &&
3383 i < BUF_POOL_SIZE &&
3384 hevc->m_BUF[i].start_adr) {
3385 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3386 hevc_print(hevc, 0,
3387 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3388 i, hevc->m_BUF[i].start_adr,
3389 hevc->m_BUF[i].size);
3390 }
3391
3392 if (!hevc->is_used_v4l)
3393 decoder_bmmu_box_free_idx(
3394 hevc->bmmu_box,
3395 VF_BUFFER_IDX(i));
3396 hevc->m_BUF[i].used_flag = 0;
3397 hevc->m_BUF[i].start_adr = 0;
3398 hevc->m_BUF[i].size = 0;
3399 }
3400}
3401
3402static int get_work_pic_num(struct hevc_state_s *hevc)
3403{
3404 int used_buf_num = 0;
3405 int sps_pic_buf_diff = 0;
3406
3407 if (get_dynamic_buf_num_margin(hevc) > 0) {
3408 if ((!hevc->sps_num_reorder_pics_0) &&
3409 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3410 /* the range of sps_num_reorder_pics_0 is in
3411 [0, sps_max_dec_pic_buffering_minus1_0] */
3412 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3413 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3414 } else
3415 used_buf_num = hevc->sps_num_reorder_pics_0
3416 + get_dynamic_buf_num_margin(hevc);
3417
3418 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3419 - hevc->sps_num_reorder_pics_0;
3420#ifdef MULTI_INSTANCE_SUPPORT
3421 /*
3422 need one more for multi instance, as
3423 apply_ref_pic_set() has no chanch to run to
3424 to clear referenced flag in some case
3425 */
3426 if (hevc->m_ins_flag)
3427 used_buf_num++;
3428#endif
3429 } else
3430 used_buf_num = max_buf_num;
3431
3432 if (hevc->save_buffer_mode)
3433 hevc_print(hevc, 0,
3434 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3435 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3436
3437 if (sps_pic_buf_diff >= 4)
3438 used_buf_num += sps_pic_buf_diff;
3439
3440 if (hevc->is_used_v4l) {
3441 /* for eos add more buffer to flush.*/
3442 used_buf_num++;
3443 }
3444
3445 if (used_buf_num > MAX_BUF_NUM)
3446 used_buf_num = MAX_BUF_NUM;
3447 return used_buf_num;
3448}
3449
3450static int v4l_parser_work_pic_num(struct hevc_state_s *hevc)
3451{
3452 int used_buf_num = 0;
3453 int sps_pic_buf_diff = 0;
3454 pr_debug("margin = %d, sps_max_dec_pic_buffering_minus1_0 = %d, sps_num_reorder_pics_0 = %d\n",
3455 get_dynamic_buf_num_margin(hevc),
3456 hevc->param.p.sps_max_dec_pic_buffering_minus1_0,
3457 hevc->param.p.sps_num_reorder_pics_0);
3458 if (get_dynamic_buf_num_margin(hevc) > 0) {
3459 if ((!hevc->param.p.sps_num_reorder_pics_0) &&
3460 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3461 /* the range of sps_num_reorder_pics_0 is in
3462 [0, sps_max_dec_pic_buffering_minus1_0] */
3463 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3464 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3465 } else
3466 used_buf_num = hevc->param.p.sps_num_reorder_pics_0
3467 + get_dynamic_buf_num_margin(hevc);
3468
3469 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3470 - hevc->param.p.sps_num_reorder_pics_0;
3471#ifdef MULTI_INSTANCE_SUPPORT
3472 /*
3473 need one more for multi instance, as
3474 apply_ref_pic_set() has no chanch to run to
3475 to clear referenced flag in some case
3476 */
3477 if (hevc->m_ins_flag)
3478 used_buf_num++;
3479#endif
3480 } else
3481 used_buf_num = max_buf_num;
3482
3483 if (hevc->save_buffer_mode)
3484 hevc_print(hevc, 0,
3485 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3486 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3487
3488 if (sps_pic_buf_diff >= 4)
3489 {
3490 used_buf_num += 1;
3491 }
3492
3493 /* for eos add more buffer to flush.*/
3494 used_buf_num++;
3495
3496 if (used_buf_num > MAX_BUF_NUM)
3497 used_buf_num = MAX_BUF_NUM;
3498 return used_buf_num;
3499}
3500
3501
3502static int get_alloc_pic_count(struct hevc_state_s *hevc)
3503{
3504 int alloc_pic_count = 0;
3505 int i;
3506 struct PIC_s *pic;
3507 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3508 pic = hevc->m_PIC[i];
3509 if (pic && pic->index >= 0)
3510 alloc_pic_count++;
3511 }
3512 return alloc_pic_count;
3513}
3514
3515static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3516{
3517 int i = pic->index;
3518 int dw_mode = get_double_write_mode(hevc);
3519
3520 if (hevc->mmu_enable)
3521 pic->header_adr = hevc->m_BUF[i].header_addr;
3522
3523 pic->BUF_index = i;
3524 pic->POC = INVALID_POC;
3525 pic->mc_canvas_y = pic->index;
3526 pic->mc_canvas_u_v = pic->index;
3527
3528 if (dw_mode & 0x10) {
3529 pic->mc_canvas_y = (pic->index << 1);
3530 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3531 pic->mc_y_adr = pic->dw_y_adr;
3532 pic->mc_u_v_adr = pic->dw_u_v_adr;
3533 }
3534
3535 return 0;
3536}
3537
3538static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3539{
3540 int ret = -1;
3541 int i;
3542 /*int lcu_size_log2 = hevc->lcu_size_log2;
3543 int MV_MEM_UNIT=lcu_size_log2==
3544 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3545 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3546 5 ? 0x80 : 0x20;
3547 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3548 hevc->work_space_buf->mpred_mv.buf_size;*/
3549 unsigned int y_adr = 0;
3550 struct buf_stru_s buf_stru;
3551 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3552 int dw_mode = get_double_write_mode(hevc);
3553
3554 for (i = 0; i < BUF_POOL_SIZE; i++) {
3555 if (hevc->m_BUF[i].start_adr != 0 &&
3556 hevc->m_BUF[i].used_flag == 0 &&
3557 buf_size <= hevc->m_BUF[i].size) {
3558 hevc->m_BUF[i].used_flag = 1;
3559 break;
3560 }
3561 }
3562
3563 if (i >= BUF_POOL_SIZE)
3564 return -1;
3565
3566 if (hevc->mmu_enable) {
3567 pic->header_adr = hevc->m_BUF[i].start_adr;
3568 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3569 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3570 y_adr = hevc->m_BUF[i].start_adr +
3571 MMU_COMPRESS_8K_HEADER_SIZE;
3572 else
3573 y_adr = hevc->m_BUF[i].start_adr +
3574 MMU_COMPRESS_HEADER_SIZE;
3575 } else
3576 y_adr = hevc->m_BUF[i].start_adr;
3577
3578 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3579
3580 pic->POC = INVALID_POC;
3581 /*ensure get_pic_by_POC()
3582 not get the buffer not decoded*/
3583 pic->BUF_index = i;
3584
3585 if ((!hevc->mmu_enable) &&
3586 ((dw_mode & 0x10) == 0)
3587 ) {
3588 pic->mc_y_adr = y_adr;
3589 y_adr += (buf_stru.mc_buffer_size_h << 16);
3590 }
3591 pic->mc_canvas_y = pic->index;
3592 pic->mc_canvas_u_v = pic->index;
3593 if (dw_mode & 0x10) {
3594 pic->mc_y_adr = y_adr;
3595 pic->mc_u_v_adr = y_adr +
3596 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3597 pic->mc_canvas_y = (pic->index << 1);
3598 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3599
3600 pic->dw_y_adr = pic->mc_y_adr;
3601 pic->dw_u_v_adr = pic->mc_u_v_adr;
3602 } else if (dw_mode) {
3603 pic->dw_y_adr = y_adr;
3604 pic->dw_u_v_adr = pic->dw_y_adr +
3605 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3606 }
3607
3608 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3609 hevc_print(hevc, 0,
3610 "%s index %d BUF_index %d mc_y_adr %x\n",
3611 __func__, pic->index,
3612 pic->BUF_index, pic->mc_y_adr);
3613 if (hevc->mmu_enable &&
3614 dw_mode)
3615 hevc_print(hevc, 0,
3616 "mmu double write adr %ld\n",
3617 pic->cma_alloc_addr);
3618 }
3619 ret = 0;
3620
3621 return ret;
3622}
3623
3624static void init_pic_list(struct hevc_state_s *hevc)
3625{
3626 int i;
3627 int init_buf_num = get_work_pic_num(hevc);
3628 int dw_mode = get_double_write_mode(hevc);
3629 struct vdec_s *vdec = hw_to_vdec(hevc);
3630 /*alloc decoder buf will be delay if work on v4l. */
3631 if (!hevc->is_used_v4l) {
3632 for (i = 0; i < init_buf_num; i++) {
3633 if (alloc_buf(hevc) < 0) {
3634 if (i <= 8) {
3635 /*if alloced (i+1)>=9
3636 don't send errors.*/
3637 hevc->fatal_error |=
3638 DECODER_FATAL_ERROR_NO_MEM;
3639 }
3640 break;
3641 }
3642 }
3643 }
3644
3645 for (i = 0; i < init_buf_num; i++) {
3646 struct PIC_s *pic = hevc->m_PIC[i];
3647
3648 if (!pic) {
3649 pic = vmalloc(sizeof(struct PIC_s));
3650 if (pic == NULL) {
3651 hevc_print(hevc, 0,
3652 "%s: alloc pic %d fail!!!\n",
3653 __func__, i);
3654 break;
3655 }
3656 hevc->m_PIC[i] = pic;
3657 }
3658 memset(pic, 0, sizeof(struct PIC_s));
3659
3660 pic->index = i;
3661 pic->BUF_index = -1;
3662 pic->mv_buf_index = -1;
3663 if (vdec->parallel_dec == 1) {
3664 pic->y_canvas_index = -1;
3665 pic->uv_canvas_index = -1;
3666 }
3667
3668 pic->width = hevc->pic_w;
3669 pic->height = hevc->pic_h;
3670 pic->double_write_mode = dw_mode;
3671
3672 /*config canvas will be delay if work on v4l. */
3673 if (!hevc->is_used_v4l) {
3674 if (config_pic(hevc, pic) < 0) {
3675 if (get_dbg_flag(hevc))
3676 hevc_print(hevc, 0,
3677 "Config_pic %d fail\n", pic->index);
3678 pic->index = -1;
3679 i++;
3680 break;
3681 }
3682
3683 if (pic->double_write_mode)
3684 set_canvas(hevc, pic);
3685 }
3686 }
3687}
3688
3689static void uninit_pic_list(struct hevc_state_s *hevc)
3690{
3691 struct vdec_s *vdec = hw_to_vdec(hevc);
3692 int i;
3693#ifndef MV_USE_FIXED_BUF
3694 dealloc_mv_bufs(hevc);
3695#endif
3696 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3697 struct PIC_s *pic = hevc->m_PIC[i];
3698
3699 if (pic) {
3700 if (vdec->parallel_dec == 1) {
3701 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3702 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3703 }
3704 release_aux_data(hevc, pic);
3705 vfree(pic);
3706 hevc->m_PIC[i] = NULL;
3707 }
3708 }
3709}
3710
3711#ifdef LOSLESS_COMPRESS_MODE
3712static void init_decode_head_hw(struct hevc_state_s *hevc)
3713{
3714
3715 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3716 unsigned int data32;
3717
3718 int losless_comp_header_size =
3719 compute_losless_comp_header_size(hevc->pic_w,
3720 hevc->pic_h);
3721 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3722 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3723
3724 hevc->losless_comp_body_size = losless_comp_body_size;
3725
3726
3727 if (hevc->mmu_enable) {
3728 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3729 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3730 } else {
3731 if (hevc->mem_saving_mode == 1)
3732 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3733 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3734 else
3735 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3736 ((workaround_enable & 2) ? 1 : 0));
3737 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3738 /*
3739 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3740 * //8-bit mode
3741 */
3742 }
3743 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3744 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3745 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3746
3747 if (hevc->mmu_enable) {
3748 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3749 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3750 buf_spec->mmu_vbh.buf_start +
3751 buf_spec->mmu_vbh.buf_size/2);
3752 data32 = READ_VREG(HEVC_SAO_CTRL9);
3753 data32 |= 0x1;
3754 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3755
3756 /* use HEVC_CM_HEADER_START_ADDR */
3757 data32 = READ_VREG(HEVC_SAO_CTRL5);
3758 data32 |= (1<<10);
3759 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3760 }
3761
3762 if (!hevc->m_ins_flag)
3763 hevc_print(hevc, 0,
3764 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3765 __func__, hevc->pic_w, hevc->pic_h,
3766 losless_comp_body_size, losless_comp_header_size);
3767
3768}
3769#endif
3770#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3771
3772static void init_pic_list_hw(struct hevc_state_s *hevc)
3773{
3774 int i;
3775 int cur_pic_num = MAX_REF_PIC_NUM;
3776 int dw_mode = get_double_write_mode(hevc);
3777 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3778 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3779 (0x1 << 1) | (0x1 << 2));
3780 else
3781 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3782
3783 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3784 if (hevc->m_PIC[i] == NULL ||
3785 hevc->m_PIC[i]->index == -1) {
3786 cur_pic_num = i;
3787 break;
3788 }
3789 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3790 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3791 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3792 hevc->m_PIC[i]->header_adr>>5);
3793 else
3794 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3795 hevc->m_PIC[i]->mc_y_adr >> 5);
3796 } else
3797 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3798 hevc->m_PIC[i]->mc_y_adr |
3799 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3800 if (dw_mode & 0x10) {
3801 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3802 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3803 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3804 }
3805 else
3806 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3807 hevc->m_PIC[i]->mc_u_v_adr |
3808 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3809 | 0x1);
3810 }
3811 }
3812 if (cur_pic_num == 0)
3813 return;
3814
3815 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3816
3817 /* Zero out canvas registers in IPP -- avoid simulation X */
3818 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3819 (0 << 8) | (0 << 1) | 1);
3820 for (i = 0; i < 32; i++)
3821 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3822
3823#ifdef LOSLESS_COMPRESS_MODE
3824 if ((dw_mode & 0x10) == 0)
3825 init_decode_head_hw(hevc);
3826#endif
3827
3828}
3829
3830
3831static void dump_pic_list(struct hevc_state_s *hevc)
3832{
3833 int i;
3834 struct PIC_s *pic;
3835
3836 hevc_print(hevc, 0,
3837 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3838 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3839 pic = hevc->m_PIC[i];
3840 if (pic == NULL || pic->index == -1)
3841 continue;
3842 hevc_print_cont(hevc, 0,
3843 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3844 pic->index, pic->BUF_index,
3845#ifndef MV_USE_FIXED_BUF
3846 pic->mv_buf_index,
3847#else
3848 -1,
3849#endif
3850 pic->decode_idx, pic->POC, pic->referenced);
3851 hevc_print_cont(hevc, 0,
3852 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3853 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3854 pic->width, pic->height);
3855 hevc_print_cont(hevc, 0,
3856 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3857 pic->output_ready, pic->mpred_mv_wr_start_addr,
3858 pic->vf_ref);
3859 }
3860}
3861
3862static void clear_referenced_flag(struct hevc_state_s *hevc)
3863{
3864 int i;
3865 struct PIC_s *pic;
3866 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3867 pic = hevc->m_PIC[i];
3868 if (pic == NULL || pic->index == -1)
3869 continue;
3870 if (pic->referenced) {
3871 pic->referenced = 0;
3872 put_mv_buf(hevc, pic);
3873 }
3874 }
3875}
3876
3877static void clear_poc_flag(struct hevc_state_s *hevc)
3878{
3879 int i;
3880 struct PIC_s *pic;
3881 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3882 pic = hevc->m_PIC[i];
3883 if (pic == NULL || pic->index == -1)
3884 continue;
3885 pic->POC = INVALID_POC;
3886 }
3887}
3888
3889static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3890 unsigned char flush_flag)
3891{
3892 int num_pic_not_yet_display = 0;
3893 int i, fisrt_pic_flag = 0;
3894 struct PIC_s *pic;
3895 struct PIC_s *pic_display = NULL;
3896 struct vdec_s *vdec = hw_to_vdec(hevc);
3897
3898 if (hevc->i_only & 0x4) {
3899 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3900 pic = hevc->m_PIC[i];
3901 if (pic == NULL ||
3902 (pic->index == -1) ||
3903 (pic->BUF_index == -1) ||
3904 (pic->POC == INVALID_POC))
3905 continue;
3906 if (pic->output_mark) {
3907 if (pic_display) {
3908 if (pic->decode_idx <
3909 pic_display->decode_idx)
3910 pic_display = pic;
3911
3912 } else
3913 pic_display = pic;
3914
3915 }
3916 }
3917 if (pic_display) {
3918 pic_display->output_mark = 0;
3919 pic_display->recon_mark = 0;
3920 pic_display->output_ready = 1;
3921 pic_display->referenced = 0;
3922 put_mv_buf(hevc, pic_display);
3923 }
3924 } else {
3925 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3926 pic = hevc->m_PIC[i];
3927 if (pic == NULL ||
3928 (pic->index == -1) ||
3929 (pic->BUF_index == -1) ||
3930 (pic->POC == INVALID_POC))
3931 continue;
3932 if (pic->output_mark)
3933 num_pic_not_yet_display++;
3934 if (pic->slice_type == 2 &&
3935 hevc->vf_pre_count == 0 &&
3936 fast_output_enable & 0x1) {
3937 /*fast output for first I picture*/
3938 pic->num_reorder_pic = 0;
3939 if (vdec->master || vdec->slave)
3940 pic_display = pic;
3941 fisrt_pic_flag = 1;
3942 hevc_print(hevc, 0, "VH265: output first frame\n");
3943 }
3944 }
3945
3946 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3947 pic = hevc->m_PIC[i];
3948 if (pic == NULL ||
3949 (pic->index == -1) ||
3950 (pic->BUF_index == -1) ||
3951 (pic->POC == INVALID_POC))
3952 continue;
3953 if (pic->output_mark) {
3954 if (pic_display) {
3955 if (pic->POC < pic_display->POC)
3956 pic_display = pic;
3957 else if ((pic->POC == pic_display->POC)
3958 && (pic->decode_idx <
3959 pic_display->
3960 decode_idx))
3961 pic_display
3962 = pic;
3963
3964 } else
3965 pic_display = pic;
3966
3967 }
3968 }
3969#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
3970 /* dv wait cur_pic all data get,
3971 some data may get after picture output */
3972 if ((vdec->master || vdec->slave)
3973 && (pic_display == hevc->cur_pic) &&
3974 (!flush_flag) &&
3975 (hevc->bypass_dvenl && !dolby_meta_with_el)
3976 && (!fisrt_pic_flag))
3977 pic_display = NULL;
3978#endif
3979 if (pic_display) {
3980 if ((num_pic_not_yet_display >
3981 pic_display->num_reorder_pic)
3982 || flush_flag) {
3983 pic_display->output_mark = 0;
3984 pic_display->recon_mark = 0;
3985 pic_display->output_ready = 1;
3986 } else if (num_pic_not_yet_display >=
3987 (MAX_REF_PIC_NUM - 1)) {
3988 pic_display->output_mark = 0;
3989 pic_display->recon_mark = 0;
3990 pic_display->output_ready = 1;
3991 hevc_print(hevc, 0,
3992 "Warning, num_reorder_pic %d is byeond buf num\n",
3993 pic_display->num_reorder_pic);
3994 } else
3995 pic_display = NULL;
3996 }
3997 }
3998
3999 if (pic_display && hevc->sps_num_reorder_pics_0 &&
4000 (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
4001 pic_display = NULL;
4002 hevc->first_pic_flag = 2;
4003 }
4004 return pic_display;
4005}
4006
4007static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
4008{
4009 int i;
4010 struct PIC_s *pic;
4011
4012 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4013 hevc_print(hevc, 0,
4014 "config_mc_buffer entered .....\n");
4015 if (cur_pic->slice_type != 2) { /* P and B pic */
4016 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4017 (0 << 8) | (0 << 1) | 1);
4018 for (i = 0; i < cur_pic->RefNum_L0; i++) {
4019 pic =
4020 get_ref_pic_by_POC(hevc,
4021 cur_pic->
4022 m_aiRefPOCList0[cur_pic->
4023 slice_idx][i]);
4024 if (pic) {
4025 if ((pic->width != hevc->pic_w) ||
4026 (pic->height != hevc->pic_h)) {
4027 hevc_print(hevc, 0,
4028 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
4029 __func__, pic->POC,
4030 pic->width, pic->height);
4031 cur_pic->error_mark = 1;
4032 }
4033 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
4034 cur_pic->error_mark = 1;
4035 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
4036 (pic->mc_canvas_u_v << 16)
4037 | (pic->mc_canvas_u_v
4038 << 8) |
4039 pic->mc_canvas_y);
4040 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4041 hevc_print_cont(hevc, 0,
4042 "refid %x mc_canvas_u_v %x",
4043 i, pic->mc_canvas_u_v);
4044 hevc_print_cont(hevc, 0,
4045 " mc_canvas_y %x\n",
4046 pic->mc_canvas_y);
4047 }
4048 } else
4049 cur_pic->error_mark = 1;
4050
4051 if (pic == NULL || pic->error_mark) {
4052 hevc_print(hevc, 0,
4053 "Error %s, %dth poc (%d) %s",
4054 __func__, i,
4055 cur_pic->m_aiRefPOCList0[cur_pic->
4056 slice_idx][i],
4057 pic ? "has error" :
4058 "not in list0");
4059 }
4060 }
4061 }
4062 if (cur_pic->slice_type == 0) { /* B pic */
4063 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4064 hevc_print(hevc, 0,
4065 "config_mc_buffer RefNum_L1\n");
4066 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4067 (16 << 8) | (0 << 1) | 1);
4068
4069 for (i = 0; i < cur_pic->RefNum_L1; i++) {
4070 pic =
4071 get_ref_pic_by_POC(hevc,
4072 cur_pic->
4073 m_aiRefPOCList1[cur_pic->
4074 slice_idx][i]);
4075 if (pic) {
4076 if ((pic->width != hevc->pic_w) ||
4077 (pic->height != hevc->pic_h)) {
4078 hevc_print(hevc, 0,
4079 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
4080 __func__, pic->POC,
4081 pic->width, pic->height);
4082 cur_pic->error_mark = 1;
4083 }
4084
4085 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
4086 cur_pic->error_mark = 1;
4087 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
4088 (pic->mc_canvas_u_v << 16)
4089 | (pic->mc_canvas_u_v
4090 << 8) |
4091 pic->mc_canvas_y);
4092 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4093 hevc_print_cont(hevc, 0,
4094 "refid %x mc_canvas_u_v %x",
4095 i, pic->mc_canvas_u_v);
4096 hevc_print_cont(hevc, 0,
4097 " mc_canvas_y %x\n",
4098 pic->mc_canvas_y);
4099 }
4100 } else
4101 cur_pic->error_mark = 1;
4102
4103 if (pic == NULL || pic->error_mark) {
4104 hevc_print(hevc, 0,
4105 "Error %s, %dth poc (%d) %s",
4106 __func__, i,
4107 cur_pic->m_aiRefPOCList1[cur_pic->
4108 slice_idx][i],
4109 pic ? "has error" :
4110 "not in list1");
4111 }
4112 }
4113 }
4114 return 0;
4115}
4116
4117static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
4118 union param_u *params)
4119{
4120 int ii, i;
4121 int poc_tmp;
4122 struct PIC_s *pic;
4123 unsigned char is_referenced;
4124 /* hevc_print(hevc, 0,
4125 "%s cur_poc %d\n", __func__, cur_poc); */
4126 if (pic_list_debug & 0x2) {
4127 pr_err("cur poc %d\n", cur_poc);
4128 }
4129 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
4130 pic = hevc->m_PIC[ii];
4131 if (pic == NULL ||
4132 pic->index == -1 ||
4133 pic->BUF_index == -1
4134 )
4135 continue;
4136
4137 if ((pic->referenced == 0 || pic->POC == cur_poc))
4138 continue;
4139 is_referenced = 0;
4140 for (i = 0; i < 16; i++) {
4141 int delt;
4142
4143 if (params->p.CUR_RPS[i] & 0x8000)
4144 break;
4145 delt =
4146 params->p.CUR_RPS[i] &
4147 ((1 << (RPS_USED_BIT - 1)) - 1);
4148 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4149 poc_tmp =
4150 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4151 delt);
4152 } else
4153 poc_tmp = cur_poc + delt;
4154 if (poc_tmp == pic->POC) {
4155 is_referenced = 1;
4156 /* hevc_print(hevc, 0, "i is %d\n", i); */
4157 break;
4158 }
4159 }
4160 if (is_referenced == 0) {
4161 pic->referenced = 0;
4162 put_mv_buf(hevc, pic);
4163 /* hevc_print(hevc, 0,
4164 "set poc %d reference to 0\n", pic->POC); */
4165 if (pic_list_debug & 0x2) {
4166 pr_err("set poc %d reference to 0\n", pic->POC);
4167 }
4168 }
4169 }
4170
4171}
4172
4173static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4174{
4175 struct PIC_s *pic = hevc->cur_pic;
4176 int i, rIdx;
4177 int num_neg = 0;
4178 int num_pos = 0;
4179 int total_num;
4180 int num_ref_idx_l0_active =
4181 (params->p.num_ref_idx_l0_active >
4182 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4183 params->p.num_ref_idx_l0_active;
4184 int num_ref_idx_l1_active =
4185 (params->p.num_ref_idx_l1_active >
4186 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4187 params->p.num_ref_idx_l1_active;
4188
4189 int RefPicSetStCurr0[16];
4190 int RefPicSetStCurr1[16];
4191
4192 for (i = 0; i < 16; i++) {
4193 RefPicSetStCurr0[i] = 0;
4194 RefPicSetStCurr1[i] = 0;
4195 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4196 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4197 }
4198 for (i = 0; i < 16; i++) {
4199 if (params->p.CUR_RPS[i] & 0x8000)
4200 break;
4201 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4202 int delt =
4203 params->p.CUR_RPS[i] &
4204 ((1 << (RPS_USED_BIT - 1)) - 1);
4205
4206 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4207 RefPicSetStCurr0[num_neg] =
4208 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4209 delt);
4210 /* hevc_print(hevc, 0,
4211 * "RefPicSetStCurr0 %x %x %x\n",
4212 * RefPicSetStCurr0[num_neg], pic->POC,
4213 * (0x800-(params[i]&0x7ff)));
4214 */
4215 num_neg++;
4216 } else {
4217 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4218 /* hevc_print(hevc, 0,
4219 * "RefPicSetStCurr1 %d\n",
4220 * RefPicSetStCurr1[num_pos]);
4221 */
4222 num_pos++;
4223 }
4224 }
4225 }
4226 total_num = num_neg + num_pos;
4227 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4228 hevc_print(hevc, 0,
4229 "%s: curpoc %d slice_type %d, total %d ",
4230 __func__, pic->POC, params->p.slice_type, total_num);
4231 hevc_print_cont(hevc, 0,
4232 "num_neg %d num_list0 %d num_list1 %d\n",
4233 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4234 }
4235
4236 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4237 hevc_print(hevc, 0,
4238 "HEVC Stream buf start ");
4239 hevc_print_cont(hevc, 0,
4240 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4241 READ_VREG(HEVC_STREAM_START_ADDR),
4242 READ_VREG(HEVC_STREAM_END_ADDR),
4243 READ_VREG(HEVC_STREAM_WR_PTR),
4244 READ_VREG(HEVC_STREAM_RD_PTR),
4245 READ_VREG(HEVC_STREAM_LEVEL),
4246 READ_VREG(HEVC_STREAM_FIFO_CTL),
4247 READ_VREG(HEVC_PARSER_INT_CONTROL));
4248 }
4249
4250 if (total_num > 0) {
4251 if (params->p.modification_flag & 0x1) {
4252 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4253 hevc_print(hevc, 0, "ref0 POC (modification):");
4254 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4255 int cIdx = params->p.modification_list[rIdx];
4256
4257 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4258 cIdx >=
4259 num_neg ? RefPicSetStCurr1[cIdx -
4260 num_neg] :
4261 RefPicSetStCurr0[cIdx];
4262 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4263 hevc_print_cont(hevc, 0, "%d ",
4264 pic->m_aiRefPOCList0[pic->
4265 slice_idx]
4266 [rIdx]);
4267 }
4268 }
4269 } else {
4270 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4271 hevc_print(hevc, 0, "ref0 POC:");
4272 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4273 int cIdx = rIdx % total_num;
4274
4275 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4276 cIdx >=
4277 num_neg ? RefPicSetStCurr1[cIdx -
4278 num_neg] :
4279 RefPicSetStCurr0[cIdx];
4280 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4281 hevc_print_cont(hevc, 0, "%d ",
4282 pic->m_aiRefPOCList0[pic->
4283 slice_idx]
4284 [rIdx]);
4285 }
4286 }
4287 }
4288 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4289 hevc_print_cont(hevc, 0, "\n");
4290 if (params->p.slice_type == B_SLICE) {
4291 if (params->p.modification_flag & 0x2) {
4292 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4293 hevc_print(hevc, 0,
4294 "ref1 POC (modification):");
4295 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4296 rIdx++) {
4297 int cIdx;
4298
4299 if (params->p.modification_flag & 0x1) {
4300 cIdx =
4301 params->p.
4302 modification_list
4303 [num_ref_idx_l0_active +
4304 rIdx];
4305 } else {
4306 cIdx =
4307 params->p.
4308 modification_list[rIdx];
4309 }
4310 pic->m_aiRefPOCList1[pic->
4311 slice_idx][rIdx] =
4312 cIdx >=
4313 num_pos ?
4314 RefPicSetStCurr0[cIdx - num_pos]
4315 : RefPicSetStCurr1[cIdx];
4316 if (get_dbg_flag(hevc) &
4317 H265_DEBUG_BUFMGR) {
4318 hevc_print_cont(hevc, 0, "%d ",
4319 pic->
4320 m_aiRefPOCList1[pic->
4321 slice_idx]
4322 [rIdx]);
4323 }
4324 }
4325 } else {
4326 if (get_dbg_flag(hevc) &
4327 H265_DEBUG_BUFMGR)
4328 hevc_print(hevc, 0, "ref1 POC:");
4329 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4330 rIdx++) {
4331 int cIdx = rIdx % total_num;
4332
4333 pic->m_aiRefPOCList1[pic->
4334 slice_idx][rIdx] =
4335 cIdx >=
4336 num_pos ?
4337 RefPicSetStCurr0[cIdx -
4338 num_pos]
4339 : RefPicSetStCurr1[cIdx];
4340 if (get_dbg_flag(hevc) &
4341 H265_DEBUG_BUFMGR) {
4342 hevc_print_cont(hevc, 0, "%d ",
4343 pic->
4344 m_aiRefPOCList1[pic->
4345 slice_idx]
4346 [rIdx]);
4347 }
4348 }
4349 }
4350 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4351 hevc_print_cont(hevc, 0, "\n");
4352 }
4353 }
4354 /*set m_PIC */
4355 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4356 (params->p.slice_type == P_SLICE) ? 1 :
4357 (params->p.slice_type == B_SLICE) ? 0 : 3;
4358 pic->RefNum_L0 = num_ref_idx_l0_active;
4359 pic->RefNum_L1 = num_ref_idx_l1_active;
4360}
4361
4362static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4363 int pic_height_cu, int sao_mem_unit,
4364 union param_u *params)
4365{
4366 int i, j;
4367 int start_cu_x, start_cu_y;
4368 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4369 int sao_abv_size = sao_mem_unit * pic_width_cu;
4370#ifdef DETREFILL_ENABLE
4371 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4372 int tmpRefillLcuSize = 1 <<
4373 (params->p.log2_min_coding_block_size_minus3 +
4374 3 + params->p.log2_diff_max_min_coding_block_size);
4375 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4376 "%x, %x, %x, %x\n",
4377 params->p.slice_segment_address,
4378 params->p.bit_depth,
4379 params->p.tiles_enabled_flag,
4380 tmpRefillLcuSize);
4381 if (params->p.slice_segment_address == 0 &&
4382 params->p.bit_depth != 0 &&
4383 (params->p.tiles_enabled_flag & 1) &&
4384 tmpRefillLcuSize == 64)
4385 hevc->delrefill_check = 1;
4386 else
4387 hevc->delrefill_check = 0;
4388 }
4389#endif
4390
4391 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4392 if (params->p.tiles_enabled_flag & 1) {
4393 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4394 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4395
4396 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4397 || hevc->num_tile_row <= 0) {
4398 hevc->num_tile_row = 1;
4399 hevc_print(hevc, 0,
4400 "%s: num_tile_rows_minus1 (%d) error!!\n",
4401 __func__, params->p.num_tile_rows_minus1);
4402 }
4403 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4404 || hevc->num_tile_col <= 0) {
4405 hevc->num_tile_col = 1;
4406 hevc_print(hevc, 0,
4407 "%s: num_tile_columns_minus1 (%d) error!!\n",
4408 __func__, params->p.num_tile_columns_minus1);
4409 }
4410 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4411 hevc_print(hevc, 0,
4412 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4413 __func__, pic_width_cu, pic_height_cu);
4414 hevc_print_cont(hevc, 0,
4415 "num_tile_col %d num_tile_row %d:\n",
4416 hevc->num_tile_col, hevc->num_tile_row);
4417 }
4418
4419 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4420 int w = pic_width_cu / hevc->num_tile_col;
4421 int h = pic_height_cu / hevc->num_tile_row;
4422
4423 start_cu_y = 0;
4424 for (i = 0; i < hevc->num_tile_row; i++) {
4425 start_cu_x = 0;
4426 for (j = 0; j < hevc->num_tile_col; j++) {
4427 if (j == (hevc->num_tile_col - 1)) {
4428 hevc->m_tile[i][j].width =
4429 pic_width_cu -
4430 start_cu_x;
4431 } else
4432 hevc->m_tile[i][j].width = w;
4433 if (i == (hevc->num_tile_row - 1)) {
4434 hevc->m_tile[i][j].height =
4435 pic_height_cu -
4436 start_cu_y;
4437 } else
4438 hevc->m_tile[i][j].height = h;
4439 hevc->m_tile[i][j].start_cu_x
4440 = start_cu_x;
4441 hevc->m_tile[i][j].start_cu_y
4442 = start_cu_y;
4443 hevc->m_tile[i][j].sao_vb_start_addr =
4444 hevc->work_space_buf->sao_vb.
4445 buf_start + j * sao_vb_size;
4446 hevc->m_tile[i][j].sao_abv_start_addr =
4447 hevc->work_space_buf->sao_abv.
4448 buf_start + i * sao_abv_size;
4449 if (get_dbg_flag(hevc) &
4450 H265_DEBUG_BUFMGR) {
4451 hevc_print_cont(hevc, 0,
4452 "{y=%d, x=%d w %d h %d ",
4453 i, j, hevc->m_tile[i][j].width,
4454 hevc->m_tile[i][j].height);
4455 hevc_print_cont(hevc, 0,
4456 "start_x %d start_y %d ",
4457 hevc->m_tile[i][j].start_cu_x,
4458 hevc->m_tile[i][j].start_cu_y);
4459 hevc_print_cont(hevc, 0,
4460 "sao_vb_start 0x%x ",
4461 hevc->m_tile[i][j].
4462 sao_vb_start_addr);
4463 hevc_print_cont(hevc, 0,
4464 "sao_abv_start 0x%x}\n",
4465 hevc->m_tile[i][j].
4466 sao_abv_start_addr);
4467 }
4468 start_cu_x += hevc->m_tile[i][j].width;
4469
4470 }
4471 start_cu_y += hevc->m_tile[i][0].height;
4472 }
4473 } else {
4474 start_cu_y = 0;
4475 for (i = 0; i < hevc->num_tile_row; i++) {
4476 start_cu_x = 0;
4477 for (j = 0; j < hevc->num_tile_col; j++) {
4478 if (j == (hevc->num_tile_col - 1)) {
4479 hevc->m_tile[i][j].width =
4480 pic_width_cu -
4481 start_cu_x;
4482 } else {
4483 hevc->m_tile[i][j].width =
4484 params->p.tile_width[j];
4485 }
4486 if (i == (hevc->num_tile_row - 1)) {
4487 hevc->m_tile[i][j].height =
4488 pic_height_cu -
4489 start_cu_y;
4490 } else {
4491 hevc->m_tile[i][j].height =
4492 params->
4493 p.tile_height[i];
4494 }
4495 hevc->m_tile[i][j].start_cu_x
4496 = start_cu_x;
4497 hevc->m_tile[i][j].start_cu_y
4498 = start_cu_y;
4499 hevc->m_tile[i][j].sao_vb_start_addr =
4500 hevc->work_space_buf->sao_vb.
4501 buf_start + j * sao_vb_size;
4502 hevc->m_tile[i][j].sao_abv_start_addr =
4503 hevc->work_space_buf->sao_abv.
4504 buf_start + i * sao_abv_size;
4505 if (get_dbg_flag(hevc) &
4506 H265_DEBUG_BUFMGR) {
4507 hevc_print_cont(hevc, 0,
4508 "{y=%d, x=%d w %d h %d ",
4509 i, j, hevc->m_tile[i][j].width,
4510 hevc->m_tile[i][j].height);
4511 hevc_print_cont(hevc, 0,
4512 "start_x %d start_y %d ",
4513 hevc->m_tile[i][j].start_cu_x,
4514 hevc->m_tile[i][j].start_cu_y);
4515 hevc_print_cont(hevc, 0,
4516 "sao_vb_start 0x%x ",
4517 hevc->m_tile[i][j].
4518 sao_vb_start_addr);
4519 hevc_print_cont(hevc, 0,
4520 "sao_abv_start 0x%x}\n",
4521 hevc->m_tile[i][j].
4522 sao_abv_start_addr);
4523
4524 }
4525 start_cu_x += hevc->m_tile[i][j].width;
4526 }
4527 start_cu_y += hevc->m_tile[i][0].height;
4528 }
4529 }
4530 } else {
4531 hevc->num_tile_col = 1;
4532 hevc->num_tile_row = 1;
4533 hevc->m_tile[0][0].width = pic_width_cu;
4534 hevc->m_tile[0][0].height = pic_height_cu;
4535 hevc->m_tile[0][0].start_cu_x = 0;
4536 hevc->m_tile[0][0].start_cu_y = 0;
4537 hevc->m_tile[0][0].sao_vb_start_addr =
4538 hevc->work_space_buf->sao_vb.buf_start;
4539 hevc->m_tile[0][0].sao_abv_start_addr =
4540 hevc->work_space_buf->sao_abv.buf_start;
4541 }
4542}
4543
4544static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4545 int pic_width_lcu)
4546{
4547 int cu_x;
4548 int cu_y;
4549 int tile_x = 0;
4550 int tile_y = 0;
4551 int i;
4552
4553 if (pic_width_lcu == 0) {
4554 if (get_dbg_flag(hevc)) {
4555 hevc_print(hevc, 0,
4556 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4557 __func__, hevc->pic_w, hevc->pic_h);
4558 }
4559 return -1;
4560 }
4561 cu_x = cu_adr % pic_width_lcu;
4562 cu_y = cu_adr / pic_width_lcu;
4563 if (hevc->tile_enabled) {
4564 for (i = 0; i < hevc->num_tile_col; i++) {
4565 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4566 tile_x = i;
4567 else
4568 break;
4569 }
4570 for (i = 0; i < hevc->num_tile_row; i++) {
4571 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4572 tile_y = i;
4573 else
4574 break;
4575 }
4576 }
4577 return (tile_x) | (tile_y << 8);
4578}
4579
4580static void print_scratch_error(int error_num)
4581{
4582#if 0
4583 if (get_dbg_flag(hevc)) {
4584 hevc_print(hevc, 0,
4585 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4586 error_num);
4587 }
4588#endif
4589}
4590
4591static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4592{
4593 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4594
4595 if (get_dbg_flag(hevc))
4596 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4597 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4598 __func__,
4599 buf_spec->ipp.buf_start,
4600 buf_spec->start_adr,
4601 buf_spec->short_term_rps.buf_start,
4602 buf_spec->vps.buf_start,
4603 buf_spec->sps.buf_start,
4604 buf_spec->pps.buf_start,
4605 buf_spec->sao_up.buf_start,
4606 buf_spec->swap_buf.buf_start,
4607 buf_spec->swap_buf2.buf_start,
4608 buf_spec->scalelut.buf_start,
4609 buf_spec->dblk_para.buf_start,
4610 buf_spec->dblk_data.buf_start,
4611 buf_spec->dblk_data2.buf_start);
4612 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4613 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4614 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4615 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4616 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4617 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4618 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4619 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4620 if (hevc->mmu_enable) {
4621 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4622 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4623 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4624 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4625 } else
4626 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4627 } /*else
4628 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4629 buf_spec->swap_buf.buf_start);
4630 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4631 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4632#ifdef HEVC_8K_LFTOFFSET_FIX
4633 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
4634 WRITE_VREG(HEVC_DBLK_CFG3, 0x808020); /*offset should x2 if 8k*/
4635 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4636 "write HEVC_DBLK_CFG3\n");
4637 }
4638#endif
4639 /* cfg_p_addr */
4640 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4641 /* cfg_d_addr */
4642 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4643
4644 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4645
4646 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4647}
4648
4649static void parser_cmd_write(void)
4650{
4651 u32 i;
4652 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4653 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4654 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4655 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4656 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4657 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4658 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4659 0x7C00
4660 };
4661 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4662 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4663}
4664
4665static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4666 int decode_pic_begin, int decode_pic_num)
4667{
4668 unsigned int data32;
4669 int i;
4670#if 0
4671 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4672 /* Set MCR fetch priorities*/
4673 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4674 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4675 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4676 }
4677#endif
4678#if 1
4679 /* m8baby test1902 */
4680 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4681 hevc_print(hevc, 0,
4682 "%s\n", __func__);
4683 data32 = READ_VREG(HEVC_PARSER_VERSION);
4684 if (data32 != 0x00010001) {
4685 print_scratch_error(25);
4686 return;
4687 }
4688 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4689 data32 = READ_VREG(HEVC_PARSER_VERSION);
4690 if (data32 != 0x5a5a55aa) {
4691 print_scratch_error(26);
4692 return;
4693 }
4694#if 0
4695 /* test Parser Reset */
4696 /* reset iqit to start mem init again */
4697 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4698 (1 << 3) /* reset_whole parser */
4699 );
4700 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4701 data32 = READ_VREG(HEVC_PARSER_VERSION);
4702 if (data32 != 0x00010001)
4703 hevc_print(hevc, 0,
4704 "Test Parser Fatal Error\n");
4705#endif
4706 /* reset iqit to start mem init again */
4707 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4708 );
4709 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4710 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4711
4712#endif
4713 if (!hevc->m_ins_flag) {
4714 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4715 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4716 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4717 data32 |= (0xf << 25); /*arwlen_axi_max*/
4718 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4719 }
4720 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4721 if (data32 != 0x00000100) {
4722 print_scratch_error(29);
4723 return;
4724 }
4725 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4726 if (data32 != 0x00000300) {
4727 print_scratch_error(30);
4728 return;
4729 }
4730 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4731 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4732 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4733 if (data32 != 0x12345678) {
4734 print_scratch_error(31);
4735 return;
4736 }
4737 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4738 if (data32 != 0x9abcdef0) {
4739 print_scratch_error(32);
4740 return;
4741 }
4742 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4743 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4744
4745 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4746 data32 &= 0x03ffffff;
4747 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4748 | /* stream_buffer_empty_int_amrisc_enable */
4749 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4750 (1 << 7) | /* dec_done_int_cpu_enable */
4751 (1 << 4) | /* startcode_found_int_cpu_enable */
4752 (0 << 3) | /* startcode_found_int_amrisc_enable */
4753 (1 << 0) /* parser_int_enable */
4754 ;
4755 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4756
4757 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4758 data32 = data32 | (1 << 1) | /* emulation_check_on */
4759 (1 << 0) /* startcode_check_on */
4760 ;
4761 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4762
4763 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4764 (2 << 4) | /* emulate_code_length_sub_1 */
4765 (2 << 1) | /* start_code_length_sub_1 */
4766 (1 << 0) /* stream_shift_enable */
4767 );
4768
4769 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4770 );
4771 /* hevc_parser_core_clk_en */
4772 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4773 );
4774
4775 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4776
4777 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4778 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4779 for (i = 0; i < 1024; i++)
4780 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4781
4782#ifdef ENABLE_SWAP_TEST
4783 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4784#endif
4785
4786 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4787 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4788 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4789 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4790 /* Send parser_cmd */
4791 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4792
4793 parser_cmd_write();
4794
4795 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4796 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4797 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4798
4799 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4800 /* (1 << 8) | // sao_sw_pred_enable */
4801 (1 << 5) | /* parser_sao_if_en */
4802 (1 << 2) | /* parser_mpred_if_en */
4803 (1 << 0) /* parser_scaler_if_en */
4804 );
4805
4806 /* Changed to Start MPRED in microcode */
4807 /*
4808 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4809 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4810 * (1<<31)
4811 * );
4812 */
4813
4814 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4815 (1 << 0) /* software reset ipp and mpp */
4816 );
4817 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4818 (0 << 0) /* software reset ipp and mpp */
4819 );
4820
4821 if (get_double_write_mode(hevc) & 0x10)
4822 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4823 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4824 );
4825
4826}
4827
4828static void decoder_hw_reset(void)
4829{
4830 int i;
4831 unsigned int data32;
4832
4833 /* reset iqit to start mem init again */
4834 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4835 );
4836 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4837 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4838
4839 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4840 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4841 ;
4842 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4843
4844 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4845 if (data32 != 0x00000100) {
4846 print_scratch_error(29);
4847 return;
4848 }
4849 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4850 if (data32 != 0x00000300) {
4851 print_scratch_error(30);
4852 return;
4853 }
4854 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4855 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4856 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4857 if (data32 != 0x12345678) {
4858 print_scratch_error(31);
4859 return;
4860 }
4861 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4862 if (data32 != 0x9abcdef0) {
4863 print_scratch_error(32);
4864 return;
4865 }
4866 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4867 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4868
4869 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4870 data32 &= 0x03ffffff;
4871 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4872 | /* stream_buffer_empty_int_amrisc_enable */
4873 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4874 (1 << 7) | /* dec_done_int_cpu_enable */
4875 (1 << 4) | /* startcode_found_int_cpu_enable */
4876 (0 << 3) | /* startcode_found_int_amrisc_enable */
4877 (1 << 0) /* parser_int_enable */
4878 ;
4879 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4880
4881 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4882 data32 = data32 | (1 << 1) | /* emulation_check_on */
4883 (1 << 0) /* startcode_check_on */
4884 ;
4885 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4886
4887 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4888 (2 << 4) | /* emulate_code_length_sub_1 */
4889 (2 << 1) | /* start_code_length_sub_1 */
4890 (1 << 0) /* stream_shift_enable */
4891 );
4892
4893 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4894 );
4895 /* hevc_parser_core_clk_en */
4896 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4897 );
4898
4899 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4900 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4901 for (i = 0; i < 1024; i++)
4902 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4903
4904 /* Send parser_cmd */
4905 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4906
4907 parser_cmd_write();
4908
4909 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4910 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4911 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4912
4913 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4914 /* (1 << 8) | // sao_sw_pred_enable */
4915 (1 << 5) | /* parser_sao_if_en */
4916 (1 << 2) | /* parser_mpred_if_en */
4917 (1 << 0) /* parser_scaler_if_en */
4918 );
4919
4920 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4921 (1 << 0) /* software reset ipp and mpp */
4922 );
4923 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4924 (0 << 0) /* software reset ipp and mpp */
4925 );
4926
4927}
4928
4929#ifdef CONFIG_HEVC_CLK_FORCED_ON
4930static void config_hevc_clk_forced_on(void)
4931{
4932 unsigned int rdata32;
4933 /* IQIT */
4934 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4935 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4936
4937 /* DBLK */
4938 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4939 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4940
4941 /* SAO */
4942 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4943 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4944
4945 /* MPRED */
4946 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4947 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4948
4949 /* PARSER */
4950 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4951 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4952 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4953 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4954 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4955 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4956 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4957 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4958 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4959 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4960 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4961 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4962 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4963
4964 /* IPP */
4965 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4966 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4967
4968 /* MCRCC */
4969 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4970 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4971}
4972#endif
4973
4974#ifdef MCRCC_ENABLE
4975static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4976{
4977 unsigned int rdata32;
4978 unsigned int rdata32_2;
4979 int l0_cnt = 0;
4980 int l1_cnt = 0x7fff;
4981
4982 if (get_double_write_mode(hevc) & 0x10) {
4983 l0_cnt = hevc->cur_pic->RefNum_L0;
4984 l1_cnt = hevc->cur_pic->RefNum_L1;
4985 }
4986
4987 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4988
4989 if (slice_type == 2) { /* I-PIC */
4990 /* remove reset -- disables clock */
4991 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4992 return;
4993 }
4994
4995 if (slice_type == 0) { /* B-PIC */
4996 /* Programme canvas0 */
4997 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4998 (0 << 8) | (0 << 1) | 0);
4999 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5000 rdata32 = rdata32 & 0xffff;
5001 rdata32 = rdata32 | (rdata32 << 16);
5002 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
5003
5004 /* Programme canvas1 */
5005 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5006 (16 << 8) | (1 << 1) | 0);
5007 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5008 rdata32_2 = rdata32_2 & 0xffff;
5009 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
5010 if (rdata32 == rdata32_2 && l1_cnt > 1) {
5011 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5012 rdata32_2 = rdata32_2 & 0xffff;
5013 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
5014 }
5015 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
5016 } else { /* P-PIC */
5017 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5018 (0 << 8) | (1 << 1) | 0);
5019 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5020 rdata32 = rdata32 & 0xffff;
5021 rdata32 = rdata32 | (rdata32 << 16);
5022 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
5023
5024 if (l0_cnt == 1) {
5025 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
5026 } else {
5027 /* Programme canvas1 */
5028 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5029 rdata32 = rdata32 & 0xffff;
5030 rdata32 = rdata32 | (rdata32 << 16);
5031 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
5032 }
5033 }
5034 /* enable mcrcc progressive-mode */
5035 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
5036}
5037#endif
5038
5039static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
5040 int sao_mem_unit)
5041{
5042 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
5043 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
5044 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
5045 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
5046}
5047
5048static u32 init_aux_size;
5049static int aux_data_is_avaible(struct hevc_state_s *hevc)
5050{
5051 u32 reg_val;
5052
5053 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
5054 if (reg_val != 0 && reg_val != init_aux_size)
5055 return 1;
5056 else
5057 return 0;
5058}
5059
5060static void config_aux_buf(struct hevc_state_s *hevc)
5061{
5062 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
5063 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
5064 (hevc->suffix_aux_size >> 4);
5065 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
5066}
5067
5068static void config_mpred_hw(struct hevc_state_s *hevc)
5069{
5070 int i;
5071 unsigned int data32;
5072 struct PIC_s *cur_pic = hevc->cur_pic;
5073 struct PIC_s *col_pic = hevc->col_pic;
5074 int AMVP_MAX_NUM_CANDS_MEM = 3;
5075 int AMVP_MAX_NUM_CANDS = 2;
5076 int NUM_CHROMA_MODE = 5;
5077 int DM_CHROMA_IDX = 36;
5078 int above_ptr_ctrl = 0;
5079 int buffer_linear = 1;
5080 int cu_size_log2 = 3;
5081
5082 int mpred_mv_rd_start_addr;
5083 int mpred_curr_lcu_x;
5084 int mpred_curr_lcu_y;
5085 int mpred_above_buf_start;
5086 int mpred_mv_rd_ptr;
5087 int mpred_mv_rd_ptr_p1;
5088 int mpred_mv_rd_end_addr;
5089 int MV_MEM_UNIT;
5090 int mpred_mv_wr_ptr;
5091 int *ref_poc_L0, *ref_poc_L1;
5092
5093 int above_en;
5094 int mv_wr_en;
5095 int mv_rd_en;
5096 int col_isIntra;
5097
5098 if (hevc->slice_type != 2) {
5099 above_en = 1;
5100 mv_wr_en = 1;
5101 mv_rd_en = 1;
5102 col_isIntra = 0;
5103 } else {
5104 above_en = 1;
5105 mv_wr_en = 1;
5106 mv_rd_en = 0;
5107 col_isIntra = 0;
5108 }
5109
5110 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
5111 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
5112 mpred_curr_lcu_x = data32 & 0xffff;
5113 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
5114
5115 MV_MEM_UNIT =
5116 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
5117 5 ? 0x80 : 0x20;
5118 mpred_mv_rd_ptr =
5119 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
5120
5121 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
5122 mpred_mv_rd_end_addr =
5123 mpred_mv_rd_start_addr +
5124 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
5125
5126 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
5127
5128 mpred_mv_wr_ptr =
5129 cur_pic->mpred_mv_wr_start_addr +
5130 (hevc->slice_addr * MV_MEM_UNIT);
5131
5132 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5133 hevc_print(hevc, 0,
5134 "cur pic index %d col pic index %d\n", cur_pic->index,
5135 col_pic->index);
5136 }
5137
5138 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
5139 cur_pic->mpred_mv_wr_start_addr);
5140 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
5141
5142 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
5143 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
5144 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
5145
5146 data32 = READ_VREG(HEVC_MPRED_CTRL0);
5147 data32 = ((hevc->slice_type & 3) |
5148 (hevc->new_pic & 1) << 2 |
5149 (hevc->new_tile & 1) << 3 |
5150 (hevc->isNextSliceSegment & 1)<< 4 |
5151 (hevc->TMVPFlag & 1)<< 5 |
5152 (hevc->LDCFlag & 1) << 6 |
5153 (hevc->ColFromL0Flag & 1)<< 7 |
5154 (above_ptr_ctrl & 1)<< 8 |
5155 (above_en & 1) << 9 |
5156 (mv_wr_en & 1) << 10 |
5157 (mv_rd_en & 1)<< 11 |
5158 (col_isIntra & 1)<< 12 |
5159 (buffer_linear & 1)<< 13 |
5160 (hevc->LongTerm_Curr & 1) << 14 |
5161 (hevc->LongTerm_Col & 1) << 15 |
5162 (hevc->lcu_size_log2 & 0xf) << 16 |
5163 (cu_size_log2 & 0xf) << 20 | (hevc->plevel & 0x7) << 24);
5164 data32 &= ~(1<< 28);
5165 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5166
5167 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5168 data32 = (
5169#if 0
5170 /* no set in m8baby test1902 */
5171 /* Don't override clk_forced_on , */
5172 (data32 & (0x1 << 24)) |
5173#endif
5174 hevc->MaxNumMergeCand |
5175 AMVP_MAX_NUM_CANDS << 4 |
5176 AMVP_MAX_NUM_CANDS_MEM << 8 |
5177 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5178 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5179
5180 data32 = (hevc->pic_w | hevc->pic_h << 16);
5181 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5182
5183 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5184 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5185
5186 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5187 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5188
5189 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5190 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5191
5192 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5193 /* col_RefNum_L0<<16| */
5194 /* col_RefNum_L1<<24 */
5195 );
5196 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5197
5198 data32 = (hevc->LongTerm_Ref);
5199 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5200
5201 data32 = 0;
5202 for (i = 0; i < hevc->RefNum_L0; i++)
5203 data32 = data32 | (1 << i);
5204 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5205
5206 data32 = 0;
5207 for (i = 0; i < hevc->RefNum_L1; i++)
5208 data32 = data32 | (1 << i);
5209 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5210
5211 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5212 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5213
5214 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5215 * Ref_POC_xx_L1 in pair write order!!!
5216 */
5217 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5218 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5219
5220 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5221 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5222
5223 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5224 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5225
5226 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5227 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5228
5229 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5230 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5231
5232 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5233 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5234
5235 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5236 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5237
5238 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5239 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5240
5241 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5242 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5243
5244 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5245 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5246
5247 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5248 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5249
5250 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5251 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5252
5253 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5254 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5255
5256 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5257 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5258
5259 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5260 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5261
5262 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5263 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5264
5265 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5266 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5267
5268 if (hevc->new_pic) {
5269 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5270 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5271 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5272 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5273 } else if (!hevc->isNextSliceSegment) {
5274 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5275 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5276 }
5277
5278 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5279}
5280
5281static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5282{
5283 unsigned int data32, data32_2;
5284 int misc_flag0 = hevc->misc_flag0;
5285 int slice_deblocking_filter_disabled_flag = 0;
5286
5287 int mc_buffer_size_u_v =
5288 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5289 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5290 struct PIC_s *cur_pic = hevc->cur_pic;
5291 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5292
5293 data32 = READ_VREG(HEVC_SAO_CTRL0);
5294 data32 &= (~0xf);
5295 data32 |= hevc->lcu_size_log2;
5296 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5297
5298 data32 = (hevc->pic_w | hevc->pic_h << 16);
5299 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5300
5301 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5302 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5303
5304 if (hevc->new_pic)
5305 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5306#ifdef LOSLESS_COMPRESS_MODE
5307/*SUPPORT_10BIT*/
5308 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5309 data32 = READ_VREG(HEVC_SAO_CTRL5);
5310 data32 &= (~(0xff << 16));
5311
5312 if (get_double_write_mode(hevc) == 2 ||
5313 get_double_write_mode(hevc) == 3)
5314 data32 |= (0xff<<16);
5315 else if (get_double_write_mode(hevc) == 4)
5316 data32 |= (0x33<<16);
5317
5318 if (hevc->mem_saving_mode == 1)
5319 data32 |= (1 << 9);
5320 else
5321 data32 &= ~(1 << 9);
5322 if (workaround_enable & 1)
5323 data32 |= (1 << 7);
5324 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5325 }
5326 data32 = cur_pic->mc_y_adr;
5327 if (get_double_write_mode(hevc))
5328 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5329
5330 if ((get_double_write_mode(hevc) & 0x10) == 0)
5331 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5332
5333 if (hevc->mmu_enable)
5334 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5335#else
5336 data32 = cur_pic->mc_y_adr;
5337 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5338#endif
5339 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5340 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5341
5342#ifdef LOSLESS_COMPRESS_MODE
5343/*SUPPORT_10BIT*/
5344 if (get_double_write_mode(hevc))
5345 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5346#else
5347 data32 = cur_pic->mc_u_v_adr;
5348 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5349#endif
5350 data32 = (mc_buffer_size_u_v_h << 16);
5351 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5352
5353#ifdef LOSLESS_COMPRESS_MODE
5354/*SUPPORT_10BIT*/
5355 if (get_double_write_mode(hevc)) {
5356 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5357 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5358 }
5359#else
5360 /* multi tile to do... */
5361 data32 = cur_pic->mc_y_adr;
5362 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5363
5364 data32 = cur_pic->mc_u_v_adr;
5365 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5366#endif
5367 /* DBLK CONFIG HERE */
5368 if (hevc->new_pic) {
5369 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5370 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5371 data32 = (0xff << 8) | (0x0 << 0);
5372 else
5373 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5374 (0x0 << 0); /* h265 video format*/
5375
5376 if (hevc->pic_w >= 1280)
5377 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5378 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5379 if (get_double_write_mode(hevc) == 0)
5380 data32 |= (0x1 << 8); /*enable first write*/
5381 else if (get_double_write_mode(hevc) == 0x10)
5382 data32 |= (0x1 << 9); /*double write only*/
5383 else
5384 data32 |= ((0x1 << 8) |(0x1 << 9));
5385
5386 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5387 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5388 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5389 }
5390 data32 = (hevc->pic_w | hevc->pic_h << 16);
5391 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5392
5393 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5394 data32 =
5395 ((misc_flag0 >>
5396 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5397 0x1) << 3;
5398 } else
5399 data32 = 0;
5400 data32 |=
5401 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5402 ((params->p.pps_cr_qp_offset
5403 & 0x1f) <<
5404 9));
5405 data32 |=
5406 (hevc->lcu_size ==
5407 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5408
5409 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5410
5411 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5412 /*if (debug & 0x80) {*/
5413 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5414 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5415 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5416 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5417 data32);
5418 /*}*/
5419 }
5420 }
5421#if 0
5422 data32 = READ_VREG(HEVC_SAO_CTRL1);
5423 data32 &= (~0x3000);
5424 data32 |= (hevc->mem_map_mode <<
5425 12);
5426
5427/* [13:12] axi_aformat,
5428 * 0-Linear, 1-32x32, 2-64x32
5429 */
5430 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5431
5432 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5433 data32 &= (~0x30);
5434 data32 |= (hevc->mem_map_mode <<
5435 4);
5436
5437/* [5:4] -- address_format
5438 * 00:linear 01:32x32 10:64x32
5439 */
5440 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5441#else
5442 /* m8baby test1902 */
5443 data32 = READ_VREG(HEVC_SAO_CTRL1);
5444 data32 &= (~0x3000);
5445 data32 |= (hevc->mem_map_mode <<
5446 12);
5447
5448/* [13:12] axi_aformat, 0-Linear,
5449 * 1-32x32, 2-64x32
5450 */
5451 data32 &= (~0xff0);
5452 /* data32 |= 0x670; // Big-Endian per 64-bit */
5453 data32 |= endian; /* Big-Endian per 64-bit */
5454 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5455 if (get_double_write_mode(hevc) == 0)
5456 data32 |= 0x2; /*disable double write*/
5457 else if (get_double_write_mode(hevc) & 0x10)
5458 data32 |= 0x1; /*disable cm*/
5459 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5460 unsigned int data;
5461 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5462 (0x0 << 0); /* h265 video format*/
5463 if (hevc->pic_w >= 1280)
5464 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5465 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5466 if (get_double_write_mode(hevc) == 0)
5467 data |= (0x1 << 8); /*enable first write*/
5468 else if (get_double_write_mode(hevc) & 0x10)
5469 data |= (0x1 << 9); /*double write only*/
5470 else
5471 data |= ((0x1 << 8) |(0x1 << 9));
5472 WRITE_VREG(HEVC_DBLK_CFGB, data);
5473 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5474 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5475 }
5476
5477 /* swap uv */
5478 if (hevc->is_used_v4l) {
5479 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5480 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5481 data32 &= ~(1 << 8); /* NV21 */
5482 else
5483 data32 |= (1 << 8); /* NV12 */
5484 }
5485
5486 /*
5487 * [31:24] ar_fifo1_axi_thred
5488 * [23:16] ar_fifo0_axi_thred
5489 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5490 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5491 * [11:08] axi_lendian_C
5492 * [07:04] axi_lendian_Y
5493 * [3] reserved
5494 * [2] clk_forceon
5495 * [1] dw_disable:disable double write output
5496 * [0] cm_disable:disable compress output
5497 */
5498
5499 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5500 if (get_double_write_mode(hevc) & 0x10) {
5501 /* [23:22] dw_v1_ctrl
5502 *[21:20] dw_v0_ctrl
5503 *[19:18] dw_h1_ctrl
5504 *[17:16] dw_h0_ctrl
5505 */
5506 data32 = READ_VREG(HEVC_SAO_CTRL5);
5507 /*set them all 0 for H265_NV21 (no down-scale)*/
5508 data32 &= ~(0xff << 16);
5509 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5510 }
5511
5512 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5513 data32 &= (~0x30);
5514 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5515 data32 |= (hevc->mem_map_mode <<
5516 4);
5517 data32 &= (~0xF);
5518 data32 |= 0xf; /* valid only when double write only */
5519 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5520
5521 /* swap uv */
5522 if (hevc->is_used_v4l) {
5523 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5524 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5525 data32 |= (1 << 12); /* NV21 */
5526 else
5527 data32 &= ~(1 << 12); /* NV12 */
5528 }
5529
5530 /*
5531 * [3:0] little_endian
5532 * [5:4] address_format 00:linear 01:32x32 10:64x32
5533 * [7:6] reserved
5534 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5535 * [11:10] reserved
5536 * [12] CbCr_byte_swap
5537 * [31:13] reserved
5538 */
5539 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5540#endif
5541 data32 = 0;
5542 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5543 data32_2 &= (~0x300);
5544 /* slice_deblocking_filter_disabled_flag = 0;
5545 * ucode has handle it , so read it from ucode directly
5546 */
5547 if (hevc->tile_enabled) {
5548 data32 |=
5549 ((misc_flag0 >>
5550 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5551 0x1) << 0;
5552 data32_2 |=
5553 ((misc_flag0 >>
5554 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5555 0x1) << 8;
5556 }
5557 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5558 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5559 0x1; /* ucode has handle it,so read it from ucode directly */
5560 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5561 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5562 /* slice_deblocking_filter_disabled_flag =
5563 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5564 * //ucode has handle it , so read it from ucode directly
5565 */
5566 data32 |= slice_deblocking_filter_disabled_flag << 2;
5567 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5568 hevc_print_cont(hevc, 0,
5569 "(1,%x)", data32);
5570 if (!slice_deblocking_filter_disabled_flag) {
5571 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5572 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5573 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5574 hevc_print_cont(hevc, 0,
5575 "(2,%x)", data32);
5576 }
5577 } else {
5578 data32 |=
5579 ((misc_flag0 >>
5580 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5581 0x1) << 2;
5582 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5583 hevc_print_cont(hevc, 0,
5584 "(3,%x)", data32);
5585 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5586 0x1) == 0) {
5587 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5588 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5589 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5590 hevc_print_cont(hevc, 0,
5591 "(4,%x)", data32);
5592 }
5593 }
5594 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5595 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5596 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5597 || (!slice_deblocking_filter_disabled_flag))) {
5598 data32 |=
5599 ((misc_flag0 >>
5600 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5601 & 0x1) << 1;
5602 data32_2 |=
5603 ((misc_flag0 >>
5604 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5605 & 0x1) << 9;
5606 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5607 hevc_print_cont(hevc, 0,
5608 "(5,%x)\n", data32);
5609 } else {
5610 data32 |=
5611 ((misc_flag0 >>
5612 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5613 & 0x1) << 1;
5614 data32_2 |=
5615 ((misc_flag0 >>
5616 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5617 & 0x1) << 9;
5618 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5619 hevc_print_cont(hevc, 0,
5620 "(6,%x)\n", data32);
5621 }
5622 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5623 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5624}
5625
5626#ifdef TEST_NO_BUF
5627static unsigned char test_flag = 1;
5628#endif
5629
5630static void pic_list_process(struct hevc_state_s *hevc)
5631{
5632 int work_pic_num = get_work_pic_num(hevc);
5633 int alloc_pic_count = 0;
5634 int i;
5635 struct PIC_s *pic;
5636 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5637 pic = hevc->m_PIC[i];
5638 if (pic == NULL || pic->index == -1)
5639 continue;
5640 alloc_pic_count++;
5641 if (pic->output_mark == 0 && pic->referenced == 0
5642 && pic->output_ready == 0
5643 && (pic->width != hevc->pic_w ||
5644 pic->height != hevc->pic_h)
5645 ) {
5646 set_buf_unused(hevc, pic->BUF_index);
5647 pic->BUF_index = -1;
5648 if (alloc_pic_count > work_pic_num) {
5649 pic->width = 0;
5650 pic->height = 0;
5651 release_pic_mmu_buf(hevc, pic);
5652 pic->index = -1;
5653 } else {
5654 pic->width = hevc->pic_w;
5655 pic->height = hevc->pic_h;
5656 }
5657 }
5658 }
5659 if (alloc_pic_count < work_pic_num) {
5660 int new_count = alloc_pic_count;
5661 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5662 pic = hevc->m_PIC[i];
5663 if (pic && pic->index == -1) {
5664 pic->index = i;
5665 pic->BUF_index = -1;
5666 pic->width = hevc->pic_w;
5667 pic->height = hevc->pic_h;
5668 new_count++;
5669 if (new_count >=
5670 work_pic_num)
5671 break;
5672 }
5673 }
5674
5675 }
5676 dealloc_unused_buf(hevc);
5677 if (get_alloc_pic_count(hevc)
5678 != alloc_pic_count) {
5679 hevc_print_cont(hevc, 0,
5680 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5681 __func__,
5682 work_pic_num,
5683 alloc_pic_count,
5684 get_alloc_pic_count(hevc));
5685 }
5686}
5687
5688static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5689 union param_u *rpm_param)
5690{
5691 struct vdec_s *vdec = hw_to_vdec(hevc);
5692 struct PIC_s *new_pic = NULL;
5693 struct PIC_s *pic;
5694 int i;
5695 int ret;
5696
5697 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5698 pic = hevc->m_PIC[i];
5699 if (pic == NULL || pic->index == -1)
5700 continue;
5701
5702 if (pic->output_mark == 0 && pic->referenced == 0
5703 && pic->output_ready == 0
5704 && pic->width == hevc->pic_w
5705 && pic->height == hevc->pic_h
5706 && pic->vf_ref == 0
5707 ) {
5708 if (new_pic) {
5709 if (new_pic->POC != INVALID_POC) {
5710 if (pic->POC == INVALID_POC ||
5711 pic->POC < new_pic->POC)
5712 new_pic = pic;
5713 }
5714 } else
5715 new_pic = pic;
5716 }
5717 }
5718
5719 if (new_pic == NULL)
5720 return NULL;
5721
5722 if (new_pic->BUF_index < 0) {
5723 if (alloc_buf(hevc) < 0)
5724 return NULL;
5725 else {
5726 if (config_pic(hevc, new_pic) < 0) {
5727 dealloc_pic_buf(hevc, new_pic);
5728 return NULL;
5729 }
5730 }
5731 new_pic->width = hevc->pic_w;
5732 new_pic->height = hevc->pic_h;
5733 set_canvas(hevc, new_pic);
5734
5735 init_pic_list_hw(hevc);
5736 }
5737
5738 if (new_pic) {
5739 new_pic->double_write_mode =
5740 get_double_write_mode(hevc);
5741 if (new_pic->double_write_mode)
5742 set_canvas(hevc, new_pic);
5743
5744#ifdef TEST_NO_BUF
5745 if (test_flag) {
5746 test_flag = 0;
5747 return NULL;
5748 } else
5749 test_flag = 1;
5750#endif
5751 if (get_mv_buf(hevc, new_pic) < 0)
5752 return NULL;
5753
5754 if (hevc->mmu_enable) {
5755 ret = H265_alloc_mmu(hevc, new_pic,
5756 rpm_param->p.bit_depth,
5757 hevc->frame_mmu_map_addr);
5758 if (ret != 0) {
5759 put_mv_buf(hevc, new_pic);
5760 hevc_print(hevc, 0,
5761 "can't alloc need mmu1,idx %d ret =%d\n",
5762 new_pic->decode_idx,
5763 ret);
5764 return NULL;
5765 }
5766 }
5767 new_pic->referenced = 1;
5768 new_pic->decode_idx = hevc->decode_idx;
5769 new_pic->slice_idx = 0;
5770 new_pic->referenced = 1;
5771 new_pic->output_mark = 0;
5772 new_pic->recon_mark = 0;
5773 new_pic->error_mark = 0;
5774 new_pic->dis_mark = 0;
5775 /* new_pic->output_ready = 0; */
5776 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5777 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5778 !(vdec->slave || vdec->master) &&
5779 !disable_ip_mode) ? true : false;
5780 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5781 new_pic->POC = hevc->curr_POC;
5782 new_pic->pic_struct = hevc->curr_pic_struct;
5783 if (new_pic->aux_data_buf)
5784 release_aux_data(hevc, new_pic);
5785 new_pic->mem_saving_mode =
5786 hevc->mem_saving_mode;
5787 new_pic->bit_depth_luma =
5788 hevc->bit_depth_luma;
5789 new_pic->bit_depth_chroma =
5790 hevc->bit_depth_chroma;
5791 new_pic->video_signal_type =
5792 hevc->video_signal_type;
5793
5794 new_pic->conformance_window_flag =
5795 hevc->param.p.conformance_window_flag;
5796 new_pic->conf_win_left_offset =
5797 hevc->param.p.conf_win_left_offset;
5798 new_pic->conf_win_right_offset =
5799 hevc->param.p.conf_win_right_offset;
5800 new_pic->conf_win_top_offset =
5801 hevc->param.p.conf_win_top_offset;
5802 new_pic->conf_win_bottom_offset =
5803 hevc->param.p.conf_win_bottom_offset;
5804 new_pic->chroma_format_idc =
5805 hevc->param.p.chroma_format_idc;
5806
5807 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5808 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5809 __func__, new_pic->index,
5810 new_pic->BUF_index, new_pic->decode_idx,
5811 new_pic->POC);
5812
5813 }
5814 if (pic_list_debug & 0x1) {
5815 dump_pic_list(hevc);
5816 pr_err("\n*******************************************\n");
5817 }
5818
5819 return new_pic;
5820}
5821
5822static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5823 union param_u *rpm_param)
5824{
5825 struct vdec_s *vdec = hw_to_vdec(hevc);
5826 int ret;
5827 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5828 struct v4l_buff_pool *pool = &v4l->cap_pool;
5829 struct PIC_s *new_pic = NULL;
5830 struct PIC_s *pic = NULL;
5831 int i;
5832
5833 for (i = 0; i < pool->in; ++i) {
5834 u32 state = (pool->seq[i] >> 16);
5835 u32 index = (pool->seq[i] & 0xffff);
5836
5837 switch (state) {
5838 case V4L_CAP_BUFF_IN_DEC:
5839 pic = hevc->m_PIC[i];
5840 if (pic && (pic->index != -1) &&
5841 (pic->output_mark == 0) &&
5842 (pic->referenced == 0) &&
5843 (pic->output_ready == 0) &&
5844 (pic->width == hevc->pic_w) &&
5845 (pic->height == hevc->pic_h) &&
5846 (pic->vf_ref == 0) &&
5847 pic->cma_alloc_addr) {
5848 new_pic = pic;
5849 }
5850 break;
5851 case V4L_CAP_BUFF_IN_M2M:
5852 pic = hevc->m_PIC[index];
5853 pic->width = hevc->pic_w;
5854 pic->height = hevc->pic_h;
5855 if ((pic->index != -1) &&
5856 !v4l_alloc_buf(hevc, pic)) {
5857 v4l_config_pic(hevc, pic);
5858 init_pic_list_hw(hevc);
5859 new_pic = pic;
5860 }
5861 break;
5862 default:
5863 pr_err("v4l buffer state err %d.\n", state);
5864 break;
5865 }
5866
5867 if (new_pic)
5868 break;
5869 }
5870
5871 if (new_pic == NULL)
5872 return NULL;
5873
5874 new_pic->double_write_mode = get_double_write_mode(hevc);
5875 if (new_pic->double_write_mode)
5876 set_canvas(hevc, new_pic);
5877
5878 if (get_mv_buf(hevc, new_pic) < 0)
5879 return NULL;
5880
5881 if (hevc->mmu_enable) {
5882 ret = H265_alloc_mmu(hevc, new_pic,
5883 rpm_param->p.bit_depth,
5884 hevc->frame_mmu_map_addr);
5885 if (ret != 0) {
5886 put_mv_buf(hevc, new_pic);
5887 hevc_print(hevc, 0,
5888 "can't alloc need mmu1,idx %d ret =%d\n",
5889 new_pic->decode_idx, ret);
5890 return NULL;
5891 }
5892 }
5893
5894 new_pic->referenced = 1;
5895 new_pic->decode_idx = hevc->decode_idx;
5896 new_pic->slice_idx = 0;
5897 new_pic->referenced = 1;
5898 new_pic->output_mark = 0;
5899 new_pic->recon_mark = 0;
5900 new_pic->error_mark = 0;
5901 new_pic->dis_mark = 0;
5902 /* new_pic->output_ready = 0; */
5903 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5904 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5905 !(vdec->slave || vdec->master) &&
5906 !disable_ip_mode) ? true : false;
5907 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5908 new_pic->POC = hevc->curr_POC;
5909 new_pic->pic_struct = hevc->curr_pic_struct;
5910
5911 if (new_pic->aux_data_buf)
5912 release_aux_data(hevc, new_pic);
5913 new_pic->mem_saving_mode =
5914 hevc->mem_saving_mode;
5915 new_pic->bit_depth_luma =
5916 hevc->bit_depth_luma;
5917 new_pic->bit_depth_chroma =
5918 hevc->bit_depth_chroma;
5919 new_pic->video_signal_type =
5920 hevc->video_signal_type;
5921
5922 new_pic->conformance_window_flag =
5923 hevc->param.p.conformance_window_flag;
5924 new_pic->conf_win_left_offset =
5925 hevc->param.p.conf_win_left_offset;
5926 new_pic->conf_win_right_offset =
5927 hevc->param.p.conf_win_right_offset;
5928 new_pic->conf_win_top_offset =
5929 hevc->param.p.conf_win_top_offset;
5930 new_pic->conf_win_bottom_offset =
5931 hevc->param.p.conf_win_bottom_offset;
5932 new_pic->chroma_format_idc =
5933 hevc->param.p.chroma_format_idc;
5934
5935 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5936 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5937 __func__, new_pic->index,
5938 new_pic->BUF_index, new_pic->decode_idx,
5939 new_pic->POC);
5940
5941 return new_pic;
5942}
5943
5944static int get_display_pic_num(struct hevc_state_s *hevc)
5945{
5946 int i;
5947 struct PIC_s *pic;
5948 int num = 0;
5949
5950 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5951 pic = hevc->m_PIC[i];
5952 if (pic == NULL ||
5953 pic->index == -1)
5954 continue;
5955
5956 if (pic->output_ready == 1)
5957 num++;
5958 }
5959 return num;
5960}
5961
5962static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5963{
5964 struct PIC_s *pic_display;
5965
5966 if (pic) {
5967 /*PB skip control */
5968 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5969 /* start decoding after first I */
5970 hevc->ignore_bufmgr_error |= 0x1;
5971 }
5972 if (hevc->ignore_bufmgr_error & 1) {
5973 if (hevc->PB_skip_count_after_decoding > 0)
5974 hevc->PB_skip_count_after_decoding--;
5975 else {
5976 /* start displaying */
5977 hevc->ignore_bufmgr_error |= 0x2;
5978 }
5979 }
5980 if (pic->POC != INVALID_POC && !pic->ip_mode)
5981 pic->output_mark = 1;
5982 pic->recon_mark = 1;
5983 }
5984 do {
5985 pic_display = output_pic(hevc, 1);
5986
5987 if (pic_display) {
5988 pic_display->referenced = 0;
5989 put_mv_buf(hevc, pic_display);
5990 if ((pic_display->error_mark
5991 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5992 || (get_dbg_flag(hevc) &
5993 H265_DEBUG_DISPLAY_CUR_FRAME)
5994 || (get_dbg_flag(hevc) &
5995 H265_DEBUG_NO_DISPLAY)) {
5996 pic_display->output_ready = 0;
5997 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5998 hevc_print(hevc, 0,
5999 "[BM] Display: POC %d, ",
6000 pic_display->POC);
6001 hevc_print_cont(hevc, 0,
6002 "decoding index %d ==> ",
6003 pic_display->decode_idx);
6004 hevc_print_cont(hevc, 0,
6005 "Debug mode or error, recycle it\n");
6006 }
6007 /*
6008 * Here the pic/frame error_mark is 1,
6009 * and it won't be displayed, so increase
6010 * the drop count
6011 */
6012 hevc->gvs->drop_frame_count++;
6013 /* error frame count also need increase */
6014 hevc->gvs->error_frame_count++;
6015 } else {
6016 if (hevc->i_only & 0x1
6017 && pic_display->slice_type != 2) {
6018 pic_display->output_ready = 0;
6019 } else {
6020 prepare_display_buf(hevc, pic_display);
6021 if (get_dbg_flag(hevc)
6022 & H265_DEBUG_BUFMGR) {
6023 hevc_print(hevc, 0,
6024 "[BM] flush Display: POC %d, ",
6025 pic_display->POC);
6026 hevc_print_cont(hevc, 0,
6027 "decoding index %d\n",
6028 pic_display->decode_idx);
6029 }
6030 }
6031 }
6032 }
6033 } while (pic_display);
6034 clear_referenced_flag(hevc);
6035}
6036
6037/*
6038* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
6039*/
6040static void set_aux_data(struct hevc_state_s *hevc,
6041 struct PIC_s *pic, unsigned char suffix_flag,
6042 unsigned char dv_meta_flag)
6043{
6044 int i;
6045 unsigned short *aux_adr;
6046 unsigned int size_reg_val =
6047 READ_VREG(HEVC_AUX_DATA_SIZE);
6048 unsigned int aux_count = 0;
6049 int aux_size = 0;
6050 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
6051 return;
6052
6053 if (hevc->aux_data_dirty ||
6054 hevc->m_ins_flag == 0) {
6055
6056 hevc->aux_data_dirty = 0;
6057 }
6058
6059 if (suffix_flag) {
6060 aux_adr = (unsigned short *)
6061 (hevc->aux_addr +
6062 hevc->prefix_aux_size);
6063 aux_count =
6064 ((size_reg_val & 0xffff) << 4)
6065 >> 1;
6066 aux_size =
6067 hevc->suffix_aux_size;
6068 } else {
6069 aux_adr =
6070 (unsigned short *)hevc->aux_addr;
6071 aux_count =
6072 ((size_reg_val >> 16) << 4)
6073 >> 1;
6074 aux_size =
6075 hevc->prefix_aux_size;
6076 }
6077 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6078 hevc_print(hevc, 0,
6079 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
6080 __func__, pic, pic->aux_data_size,
6081 aux_count, suffix_flag, dv_meta_flag);
6082 }
6083 if (aux_size > 0 && aux_count > 0) {
6084 int heads_size = 0;
6085 int new_size;
6086 char *new_buf;
6087
6088 for (i = 0; i < aux_count; i++) {
6089 unsigned char tag = aux_adr[i] >> 8;
6090 if (tag != 0 && tag != 0xff) {
6091 if (dv_meta_flag == 0)
6092 heads_size += 8;
6093 else if (dv_meta_flag == 1 && tag == 0x1)
6094 heads_size += 8;
6095 else if (dv_meta_flag == 2 && tag != 0x1)
6096 heads_size += 8;
6097 }
6098 }
6099 new_size = pic->aux_data_size + aux_count + heads_size;
6100 new_buf = vzalloc(new_size);
6101 if (new_buf) {
6102 unsigned char valid_tag = 0;
6103 unsigned char *h =
6104 new_buf +
6105 pic->aux_data_size;
6106 unsigned char *p = h + 8;
6107 int len = 0;
6108 int padding_len = 0;
6109
6110 if (pic->aux_data_buf) {
6111 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
6112 vfree(pic->aux_data_buf);
6113 }
6114 pic->aux_data_buf = new_buf;
6115
6116 for (i = 0; i < aux_count; i += 4) {
6117 int ii;
6118 unsigned char tag = aux_adr[i + 3] >> 8;
6119 if (tag != 0 && tag != 0xff) {
6120 if (dv_meta_flag == 0)
6121 valid_tag = 1;
6122 else if (dv_meta_flag == 1
6123 && tag == 0x1)
6124 valid_tag = 1;
6125 else if (dv_meta_flag == 2
6126 && tag != 0x1)
6127 valid_tag = 1;
6128 else
6129 valid_tag = 0;
6130 if (valid_tag && len > 0) {
6131 pic->aux_data_size +=
6132 (len + 8);
6133 h[0] = (len >> 24)
6134 & 0xff;
6135 h[1] = (len >> 16)
6136 & 0xff;
6137 h[2] = (len >> 8)
6138 & 0xff;
6139 h[3] = (len >> 0)
6140 & 0xff;
6141 h[6] =
6142 (padding_len >> 8)
6143 & 0xff;
6144 h[7] = (padding_len)
6145 & 0xff;
6146 h += (len + 8);
6147 p += 8;
6148 len = 0;
6149 padding_len = 0;
6150 }
6151 if (valid_tag) {
6152 h[4] = tag;
6153 h[5] = 0;
6154 h[6] = 0;
6155 h[7] = 0;
6156 }
6157 }
6158 if (valid_tag) {
6159 for (ii = 0; ii < 4; ii++) {
6160 unsigned short aa =
6161 aux_adr[i + 3
6162 - ii];
6163 *p = aa & 0xff;
6164 p++;
6165 len++;
6166 /*if ((aa >> 8) == 0xff)
6167 padding_len++;*/
6168 }
6169 }
6170 }
6171 if (len > 0) {
6172 pic->aux_data_size += (len + 8);
6173 h[0] = (len >> 24) & 0xff;
6174 h[1] = (len >> 16) & 0xff;
6175 h[2] = (len >> 8) & 0xff;
6176 h[3] = (len >> 0) & 0xff;
6177 h[6] = (padding_len >> 8) & 0xff;
6178 h[7] = (padding_len) & 0xff;
6179 }
6180 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6181 hevc_print(hevc, 0,
6182 "aux: (size %d) suffix_flag %d\n",
6183 pic->aux_data_size, suffix_flag);
6184 for (i = 0; i < pic->aux_data_size; i++) {
6185 hevc_print_cont(hevc, 0,
6186 "%02x ", pic->aux_data_buf[i]);
6187 if (((i + 1) & 0xf) == 0)
6188 hevc_print_cont(hevc, 0, "\n");
6189 }
6190 hevc_print_cont(hevc, 0, "\n");
6191 }
6192
6193 } else {
6194 hevc_print(hevc, 0, "new buf alloc failed\n");
6195 if (pic->aux_data_buf)
6196 vfree(pic->aux_data_buf);
6197 pic->aux_data_buf = NULL;
6198 pic->aux_data_size = 0;
6199 }
6200 }
6201
6202}
6203
6204static void release_aux_data(struct hevc_state_s *hevc,
6205 struct PIC_s *pic)
6206{
6207 if (pic->aux_data_buf) {
6208 vfree(pic->aux_data_buf);
6209 if ((run_count[hevc->index] & 63) == 0)
6210 vm_unmap_aliases();
6211 }
6212 pic->aux_data_buf = NULL;
6213 pic->aux_data_size = 0;
6214}
6215
6216static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6217 struct PIC_s *pic)
6218{
6219
6220 /* prev pic */
6221 /*if (hevc->curr_POC != 0) {*/
6222 int decoded_poc = hevc->iPrevPOC;
6223#ifdef MULTI_INSTANCE_SUPPORT
6224 if (hevc->m_ins_flag) {
6225 decoded_poc = hevc->decoded_poc;
6226 hevc->decoded_poc = INVALID_POC;
6227 }
6228#endif
6229 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6230 && hevc->m_nalUnitType !=
6231 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6232 struct PIC_s *pic_display;
6233
6234 pic = get_pic_by_POC(hevc, decoded_poc);
6235 if (pic && (pic->POC != INVALID_POC)) {
6236 struct vdec_s *vdec = hw_to_vdec(hevc);
6237
6238 /*PB skip control */
6239 if (pic->error_mark == 0
6240 && hevc->PB_skip_mode == 1) {
6241 /* start decoding after
6242 * first I
6243 */
6244 hevc->ignore_bufmgr_error |= 0x1;
6245 }
6246 if (hevc->ignore_bufmgr_error & 1) {
6247 if (hevc->PB_skip_count_after_decoding > 0) {
6248 hevc->PB_skip_count_after_decoding--;
6249 } else {
6250 /* start displaying */
6251 hevc->ignore_bufmgr_error |= 0x2;
6252 }
6253 }
6254 if (hevc->mmu_enable
6255 && ((hevc->double_write_mode & 0x10) == 0)) {
6256 if (!hevc->m_ins_flag) {
6257 hevc->used_4k_num =
6258 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6259
6260 if ((!is_skip_decoding(hevc, pic)) &&
6261 (hevc->used_4k_num >= 0) &&
6262 (hevc->cur_pic->scatter_alloc
6263 == 1)) {
6264 hevc_print(hevc,
6265 H265_DEBUG_BUFMGR_MORE,
6266 "%s pic index %d scatter_alloc %d page_start %d\n",
6267 "decoder_mmu_box_free_idx_tail",
6268 hevc->cur_pic->index,
6269 hevc->cur_pic->scatter_alloc,
6270 hevc->used_4k_num);
6271 hevc_mmu_dma_check(hw_to_vdec(hevc));
6272 decoder_mmu_box_free_idx_tail(
6273 hevc->mmu_box,
6274 hevc->cur_pic->index,
6275 hevc->used_4k_num);
6276 hevc->cur_pic->scatter_alloc
6277 = 2;
6278 }
6279 hevc->used_4k_num = -1;
6280 }
6281 }
6282 if (!pic->ip_mode)
6283 pic->output_mark = 1;
6284 pic->recon_mark = 1;
6285 pic->dis_mark = 1;
6286 if (vdec->mvfrm) {
6287 pic->frame_size = vdec->mvfrm->frame_size;
6288 pic->hw_decode_time = (u32)vdec->mvfrm->hw_decode_time;
6289 }
6290 }
6291 do {
6292 pic_display = output_pic(hevc, 0);
6293
6294 if (pic_display) {
6295 if ((pic_display->error_mark &&
6296 ((hevc->ignore_bufmgr_error &
6297 0x2) == 0))
6298 || (get_dbg_flag(hevc) &
6299 H265_DEBUG_DISPLAY_CUR_FRAME)
6300 || (get_dbg_flag(hevc) &
6301 H265_DEBUG_NO_DISPLAY)) {
6302 pic_display->output_ready = 0;
6303 if (get_dbg_flag(hevc) &
6304 H265_DEBUG_BUFMGR) {
6305 hevc_print(hevc, 0,
6306 "[BM] Display: POC %d, ",
6307 pic_display->POC);
6308 hevc_print_cont(hevc, 0,
6309 "decoding index %d ==> ",
6310 pic_display->
6311 decode_idx);
6312 hevc_print_cont(hevc, 0,
6313 "Debug or err,recycle it\n");
6314 }
6315 /*
6316 * Here the pic/frame error_mark is 1,
6317 * and it won't be displayed, so increase
6318 * the drop count
6319 */
6320 hevc->gvs->drop_frame_count++;
6321 /* error frame count also need increase */
6322 hevc->gvs->error_frame_count++;
6323 } else {
6324 if (hevc->i_only & 0x1
6325 && pic_display->
6326 slice_type != 2) {
6327 pic_display->output_ready = 0;
6328 } else {
6329 prepare_display_buf
6330 (hevc,
6331 pic_display);
6332 if (get_dbg_flag(hevc) &
6333 H265_DEBUG_BUFMGR) {
6334 hevc_print(hevc, 0,
6335 "[BM] Display: POC %d, ",
6336 pic_display->POC);
6337 hevc_print_cont(hevc, 0,
6338 "decoding index %d\n",
6339 pic_display->
6340 decode_idx);
6341 }
6342 }
6343 }
6344 }
6345 } while (pic_display);
6346 } else {
6347 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6348 hevc_print(hevc, 0,
6349 "[BM] current pic is IDR, ");
6350 hevc_print(hevc, 0,
6351 "clear referenced flag of all buffers\n");
6352 }
6353 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6354 dump_pic_list(hevc);
6355 if (hevc->vf_pre_count == 1 &&
6356 hevc->first_pic_flag == 1) {
6357 hevc->first_pic_flag = 2;
6358 pic = NULL;
6359 }
6360 else
6361 pic = get_pic_by_POC(hevc, decoded_poc);
6362
6363 flush_output(hevc, pic);
6364 }
6365
6366}
6367
6368static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6369 int decoded_lcu)
6370{
6371 int current_lcu_idx = decoded_lcu;
6372 if (decoded_lcu < 0)
6373 return;
6374
6375 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6376 hevc_print(hevc, 0,
6377 "cur lcu idx = %d, (total %d)\n",
6378 current_lcu_idx, hevc->lcu_total);
6379 }
6380 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6381 if (hevc->first_pic_after_recover) {
6382 if (current_lcu_idx !=
6383 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6384 hevc->cur_pic->error_mark = 1;
6385 } else {
6386 if (hevc->lcu_x_num_pre != 0
6387 && hevc->lcu_y_num_pre != 0
6388 && current_lcu_idx != 0
6389 && current_lcu_idx <
6390 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6391 hevc->cur_pic->error_mark = 1;
6392 }
6393 if (hevc->cur_pic->error_mark) {
6394 if (print_lcu_error)
6395 hevc_print(hevc, 0,
6396 "cur lcu idx = %d, (total %d), set error_mark\n",
6397 current_lcu_idx,
6398 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6399 if (is_log_enable(hevc))
6400 add_log(hevc,
6401 "cur lcu idx = %d, (total %d), set error_mark",
6402 current_lcu_idx,
6403 hevc->lcu_x_num_pre *
6404 hevc->lcu_y_num_pre);
6405
6406 }
6407
6408 }
6409 if (hevc->cur_pic && hevc->head_error_flag) {
6410 hevc->cur_pic->error_mark = 1;
6411 hevc_print(hevc, 0,
6412 "head has error, set error_mark\n");
6413 }
6414
6415 if ((error_handle_policy & 0x80) == 0) {
6416 if (hevc->over_decode && hevc->cur_pic) {
6417 hevc_print(hevc, 0,
6418 "over decode, set error_mark\n");
6419 hevc->cur_pic->error_mark = 1;
6420 }
6421 }
6422
6423 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6424 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6425}
6426
6427static void check_pic_decoded_error(struct hevc_state_s *hevc,
6428 int decoded_lcu)
6429{
6430 int current_lcu_idx = decoded_lcu;
6431 if (decoded_lcu < 0)
6432 return;
6433
6434 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6435 hevc_print(hevc, 0,
6436 "cur lcu idx = %d, (total %d)\n",
6437 current_lcu_idx, hevc->lcu_total);
6438 }
6439 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6440 if (hevc->lcu_x_num != 0
6441 && hevc->lcu_y_num != 0
6442 && current_lcu_idx != 0
6443 && current_lcu_idx <
6444 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6445 hevc->cur_pic->error_mark = 1;
6446
6447 if (hevc->cur_pic->error_mark) {
6448 if (print_lcu_error)
6449 hevc_print(hevc, 0,
6450 "cur lcu idx = %d, (total %d), set error_mark\n",
6451 current_lcu_idx,
6452 hevc->lcu_x_num*hevc->lcu_y_num);
6453 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6454 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6455 hevc_print(hevc, 0,
6456 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6457 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6458 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6459 }
6460 if (is_log_enable(hevc))
6461 add_log(hevc,
6462 "cur lcu idx = %d, (total %d), set error_mark",
6463 current_lcu_idx,
6464 hevc->lcu_x_num *
6465 hevc->lcu_y_num);
6466
6467 }
6468
6469 }
6470 if (hevc->cur_pic && hevc->head_error_flag) {
6471 hevc->cur_pic->error_mark = 1;
6472 hevc_print(hevc, 0,
6473 "head has error, set error_mark\n");
6474 }
6475
6476 if ((error_handle_policy & 0x80) == 0) {
6477 if (hevc->over_decode && hevc->cur_pic) {
6478 hevc_print(hevc, 0,
6479 "over decode, set error_mark\n");
6480 hevc->cur_pic->error_mark = 1;
6481 }
6482 }
6483}
6484
6485/* only when we decoded one field or one frame,
6486we can call this function to get qos info*/
6487static void get_picture_qos_info(struct hevc_state_s *hevc)
6488{
6489 struct PIC_s *picture = hevc->cur_pic;
6490
6491/*
6492#define DEBUG_QOS
6493*/
6494
6495 if (!hevc->cur_pic)
6496 return;
6497
6498 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6499 unsigned char a[3];
6500 unsigned char i, j, t;
6501 unsigned long data;
6502
6503 data = READ_VREG(HEVC_MV_INFO);
6504 if (picture->slice_type == I_SLICE)
6505 data = 0;
6506 a[0] = data & 0xff;
6507 a[1] = (data >> 8) & 0xff;
6508 a[2] = (data >> 16) & 0xff;
6509
6510 for (i = 0; i < 3; i++)
6511 for (j = i+1; j < 3; j++) {
6512 if (a[j] < a[i]) {
6513 t = a[j];
6514 a[j] = a[i];
6515 a[i] = t;
6516 } else if (a[j] == a[i]) {
6517 a[i]++;
6518 t = a[j];
6519 a[j] = a[i];
6520 a[i] = t;
6521 }
6522 }
6523 picture->max_mv = a[2];
6524 picture->avg_mv = a[1];
6525 picture->min_mv = a[0];
6526#ifdef DEBUG_QOS
6527 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6528 data, a[0], a[1], a[2]);
6529#endif
6530
6531 data = READ_VREG(HEVC_QP_INFO);
6532 a[0] = data & 0x1f;
6533 a[1] = (data >> 8) & 0x3f;
6534 a[2] = (data >> 16) & 0x7f;
6535
6536 for (i = 0; i < 3; i++)
6537 for (j = i+1; j < 3; j++) {
6538 if (a[j] < a[i]) {
6539 t = a[j];
6540 a[j] = a[i];
6541 a[i] = t;
6542 } else if (a[j] == a[i]) {
6543 a[i]++;
6544 t = a[j];
6545 a[j] = a[i];
6546 a[i] = t;
6547 }
6548 }
6549 picture->max_qp = a[2];
6550 picture->avg_qp = a[1];
6551 picture->min_qp = a[0];
6552#ifdef DEBUG_QOS
6553 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6554 data, a[0], a[1], a[2]);
6555#endif
6556
6557 data = READ_VREG(HEVC_SKIP_INFO);
6558 a[0] = data & 0x1f;
6559 a[1] = (data >> 8) & 0x3f;
6560 a[2] = (data >> 16) & 0x7f;
6561
6562 for (i = 0; i < 3; i++)
6563 for (j = i+1; j < 3; j++) {
6564 if (a[j] < a[i]) {
6565 t = a[j];
6566 a[j] = a[i];
6567 a[i] = t;
6568 } else if (a[j] == a[i]) {
6569 a[i]++;
6570 t = a[j];
6571 a[j] = a[i];
6572 a[i] = t;
6573 }
6574 }
6575 picture->max_skip = a[2];
6576 picture->avg_skip = a[1];
6577 picture->min_skip = a[0];
6578
6579#ifdef DEBUG_QOS
6580 hevc_print(hevc, 0,
6581 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6582 data, a[0], a[1], a[2]);
6583#endif
6584 } else {
6585 uint32_t blk88_y_count;
6586 uint32_t blk88_c_count;
6587 uint32_t blk22_mv_count;
6588 uint32_t rdata32;
6589 int32_t mv_hi;
6590 int32_t mv_lo;
6591 uint32_t rdata32_l;
6592 uint32_t mvx_L0_hi;
6593 uint32_t mvy_L0_hi;
6594 uint32_t mvx_L1_hi;
6595 uint32_t mvy_L1_hi;
6596 int64_t value;
6597 uint64_t temp_value;
6598#ifdef DEBUG_QOS
6599 int pic_number = picture->POC;
6600#endif
6601
6602 picture->max_mv = 0;
6603 picture->avg_mv = 0;
6604 picture->min_mv = 0;
6605
6606 picture->max_skip = 0;
6607 picture->avg_skip = 0;
6608 picture->min_skip = 0;
6609
6610 picture->max_qp = 0;
6611 picture->avg_qp = 0;
6612 picture->min_qp = 0;
6613
6614
6615
6616#ifdef DEBUG_QOS
6617 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6618 picture->slice_type,
6619 picture->POC);
6620#endif
6621 /* set rd_idx to 0 */
6622 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6623
6624 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6625 if (blk88_y_count == 0) {
6626#ifdef DEBUG_QOS
6627 hevc_print(hevc, 0,
6628 "[Picture %d Quality] NO Data yet.\n",
6629 pic_number);
6630#endif
6631 /* reset all counts */
6632 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6633 return;
6634 }
6635 /* qp_y_sum */
6636 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6637#ifdef DEBUG_QOS
6638 hevc_print(hevc, 0,
6639 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6640 pic_number, rdata32/blk88_y_count,
6641 rdata32, blk88_y_count);
6642#endif
6643 picture->avg_qp = rdata32/blk88_y_count;
6644 /* intra_y_count */
6645 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6646#ifdef DEBUG_QOS
6647 hevc_print(hevc, 0,
6648 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6649 pic_number, rdata32*100/blk88_y_count,
6650 '%', rdata32);
6651#endif
6652 /* skipped_y_count */
6653 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6654#ifdef DEBUG_QOS
6655 hevc_print(hevc, 0,
6656 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6657 pic_number, rdata32*100/blk88_y_count,
6658 '%', rdata32);
6659#endif
6660 picture->avg_skip = rdata32*100/blk88_y_count;
6661 /* coeff_non_zero_y_count */
6662 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6663#ifdef DEBUG_QOS
6664 hevc_print(hevc, 0,
6665 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6666 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6667 '%', rdata32);
6668#endif
6669 /* blk66_c_count */
6670 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6671 if (blk88_c_count == 0) {
6672#ifdef DEBUG_QOS
6673 hevc_print(hevc, 0,
6674 "[Picture %d Quality] NO Data yet.\n",
6675 pic_number);
6676#endif
6677 /* reset all counts */
6678 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6679 return;
6680 }
6681 /* qp_c_sum */
6682 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6683#ifdef DEBUG_QOS
6684 hevc_print(hevc, 0,
6685 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6686 pic_number, rdata32/blk88_c_count,
6687 rdata32, blk88_c_count);
6688#endif
6689 /* intra_c_count */
6690 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6691#ifdef DEBUG_QOS
6692 hevc_print(hevc, 0,
6693 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6694 pic_number, rdata32*100/blk88_c_count,
6695 '%', rdata32);
6696#endif
6697 /* skipped_cu_c_count */
6698 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6699#ifdef DEBUG_QOS
6700 hevc_print(hevc, 0,
6701 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6702 pic_number, rdata32*100/blk88_c_count,
6703 '%', rdata32);
6704#endif
6705 /* coeff_non_zero_c_count */
6706 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6707#ifdef DEBUG_QOS
6708 hevc_print(hevc, 0,
6709 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6710 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6711 '%', rdata32);
6712#endif
6713
6714 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6715 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6716 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6717#ifdef DEBUG_QOS
6718 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6719 pic_number, (rdata32>>0)&0xff);
6720#endif
6721 picture->min_qp = (rdata32>>0)&0xff;
6722
6723#ifdef DEBUG_QOS
6724 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6725 pic_number, (rdata32>>8)&0xff);
6726#endif
6727 picture->max_qp = (rdata32>>8)&0xff;
6728
6729#ifdef DEBUG_QOS
6730 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6731 pic_number, (rdata32>>16)&0xff);
6732 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6733 pic_number, (rdata32>>24)&0xff);
6734#endif
6735
6736 /* blk22_mv_count */
6737 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6738 if (blk22_mv_count == 0) {
6739#ifdef DEBUG_QOS
6740 hevc_print(hevc, 0,
6741 "[Picture %d Quality] NO MV Data yet.\n",
6742 pic_number);
6743#endif
6744 /* reset all counts */
6745 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6746 return;
6747 }
6748 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6749 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6750 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6751 /* should all be 0x00 or 0xff */
6752#ifdef DEBUG_QOS
6753 hevc_print(hevc, 0,
6754 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6755 pic_number, rdata32);
6756#endif
6757 mvx_L0_hi = ((rdata32>>0)&0xff);
6758 mvy_L0_hi = ((rdata32>>8)&0xff);
6759 mvx_L1_hi = ((rdata32>>16)&0xff);
6760 mvy_L1_hi = ((rdata32>>24)&0xff);
6761
6762 /* mvx_L0_count[31:0] */
6763 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6764 temp_value = mvx_L0_hi;
6765 temp_value = (temp_value << 32) | rdata32_l;
6766
6767 if (mvx_L0_hi & 0x80)
6768 value = 0xFFFFFFF000000000 | temp_value;
6769 else
6770 value = temp_value;
6771 value = div_s64(value, blk22_mv_count);
6772#ifdef DEBUG_QOS
6773 hevc_print(hevc, 0,
6774 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6775 pic_number, (int)value,
6776 value, blk22_mv_count);
6777#endif
6778 picture->avg_mv = value;
6779
6780 /* mvy_L0_count[31:0] */
6781 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6782 temp_value = mvy_L0_hi;
6783 temp_value = (temp_value << 32) | rdata32_l;
6784
6785 if (mvy_L0_hi & 0x80)
6786 value = 0xFFFFFFF000000000 | temp_value;
6787 else
6788 value = temp_value;
6789#ifdef DEBUG_QOS
6790 hevc_print(hevc, 0,
6791 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6792 pic_number, rdata32_l/blk22_mv_count,
6793 value, blk22_mv_count);
6794#endif
6795
6796 /* mvx_L1_count[31:0] */
6797 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6798 temp_value = mvx_L1_hi;
6799 temp_value = (temp_value << 32) | rdata32_l;
6800 if (mvx_L1_hi & 0x80)
6801 value = 0xFFFFFFF000000000 | temp_value;
6802 else
6803 value = temp_value;
6804#ifdef DEBUG_QOS
6805 hevc_print(hevc, 0,
6806 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6807 pic_number, rdata32_l/blk22_mv_count,
6808 value, blk22_mv_count);
6809#endif
6810
6811 /* mvy_L1_count[31:0] */
6812 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6813 temp_value = mvy_L1_hi;
6814 temp_value = (temp_value << 32) | rdata32_l;
6815 if (mvy_L1_hi & 0x80)
6816 value = 0xFFFFFFF000000000 | temp_value;
6817 else
6818 value = temp_value;
6819#ifdef DEBUG_QOS
6820 hevc_print(hevc, 0,
6821 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6822 pic_number, rdata32_l/blk22_mv_count,
6823 value, blk22_mv_count);
6824#endif
6825
6826 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6827 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6828 mv_hi = (rdata32>>16)&0xffff;
6829 if (mv_hi & 0x8000)
6830 mv_hi = 0x8000 - mv_hi;
6831#ifdef DEBUG_QOS
6832 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6833 pic_number, mv_hi);
6834#endif
6835 picture->max_mv = mv_hi;
6836
6837 mv_lo = (rdata32>>0)&0xffff;
6838 if (mv_lo & 0x8000)
6839 mv_lo = 0x8000 - mv_lo;
6840#ifdef DEBUG_QOS
6841 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6842 pic_number, mv_lo);
6843#endif
6844 picture->min_mv = mv_lo;
6845
6846#ifdef DEBUG_QOS
6847 /* {mvy_L0_max, mvy_L0_min} */
6848 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6849 mv_hi = (rdata32>>16)&0xffff;
6850 if (mv_hi & 0x8000)
6851 mv_hi = 0x8000 - mv_hi;
6852 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6853 pic_number, mv_hi);
6854
6855
6856 mv_lo = (rdata32>>0)&0xffff;
6857 if (mv_lo & 0x8000)
6858 mv_lo = 0x8000 - mv_lo;
6859
6860 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6861 pic_number, mv_lo);
6862
6863
6864 /* {mvx_L1_max, mvx_L1_min} */
6865 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6866 mv_hi = (rdata32>>16)&0xffff;
6867 if (mv_hi & 0x8000)
6868 mv_hi = 0x8000 - mv_hi;
6869
6870 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6871 pic_number, mv_hi);
6872
6873
6874 mv_lo = (rdata32>>0)&0xffff;
6875 if (mv_lo & 0x8000)
6876 mv_lo = 0x8000 - mv_lo;
6877
6878 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6879 pic_number, mv_lo);
6880
6881
6882 /* {mvy_L1_max, mvy_L1_min} */
6883 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6884 mv_hi = (rdata32>>16)&0xffff;
6885 if (mv_hi & 0x8000)
6886 mv_hi = 0x8000 - mv_hi;
6887
6888 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6889 pic_number, mv_hi);
6890
6891 mv_lo = (rdata32>>0)&0xffff;
6892 if (mv_lo & 0x8000)
6893 mv_lo = 0x8000 - mv_lo;
6894
6895 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6896 pic_number, mv_lo);
6897#endif
6898
6899 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6900#ifdef DEBUG_QOS
6901 hevc_print(hevc, 0,
6902 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6903 pic_number, rdata32);
6904#endif
6905 /* reset all counts */
6906 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6907 }
6908}
6909
6910static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6911 union param_u *rpm_param,
6912 int decode_pic_begin)
6913{
6914 struct vdec_s *vdec = hw_to_vdec(hevc);
6915 int i;
6916 int lcu_x_num_div;
6917 int lcu_y_num_div;
6918 int Col_ref;
6919 int dbg_skip_flag = 0;
6920 struct aml_vcodec_ctx *ctx =
6921 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
6922
6923 if (hevc->is_used_v4l && ctx->param_sets_from_ucode)
6924 hevc->res_ch_flag = 0;
6925
6926 if (hevc->wait_buf == 0) {
6927 hevc->sps_num_reorder_pics_0 =
6928 rpm_param->p.sps_num_reorder_pics_0;
6929 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
6930 !(vdec->slave || vdec->master) &&
6931 !disable_ip_mode) ? true : false;
6932 hevc->m_temporalId = rpm_param->p.m_temporalId;
6933 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6934 hevc->interlace_flag =
6935 (rpm_param->p.profile_etc >> 2) & 0x1;
6936 hevc->curr_pic_struct =
6937 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6938 if (parser_sei_enable & 0x4) {
6939 hevc->frame_field_info_present_flag =
6940 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6941 }
6942
6943 /* if (interlace_enable == 0 || hevc->m_ins_flag) */
6944 if (interlace_enable == 0)
6945 hevc->interlace_flag = 0;
6946 if (interlace_enable & 0x100)
6947 hevc->interlace_flag = interlace_enable & 0x1;
6948 if (hevc->interlace_flag == 0)
6949 hevc->curr_pic_struct = 0;
6950 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6951 /*
6952 *hevc->m_pocRandomAccess = MAX_INT;
6953 * //add to fix RAP_B_Bossen_1
6954 */
6955 /* } */
6956 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6957 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6958 hevc->slice_segment_addr =
6959 rpm_param->p.slice_segment_address;
6960 if (!rpm_param->p.dependent_slice_segment_flag)
6961 hevc->slice_addr = hevc->slice_segment_addr;
6962 } else {
6963 hevc->slice_segment_addr = 0;
6964 hevc->slice_addr = 0;
6965 }
6966
6967 hevc->iPrevPOC = hevc->curr_POC;
6968 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6969 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6970 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6971 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6972 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6973 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6974 hevc->isNextSliceSegment =
6975 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6976 if (is_oversize_ex(rpm_param->p.pic_width_in_luma_samples,
6977 rpm_param->p.pic_height_in_luma_samples)) {
6978 hevc_print(hevc, 0, "over size : %u x %u.\n",
6979 rpm_param->p.pic_width_in_luma_samples, rpm_param->p.pic_height_in_luma_samples);
6980 if ((!hevc->m_ins_flag) &&
6981 ((debug &
6982 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6983 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6984 H265_DEBUG_DIS_SYS_ERROR_PROC);
6985 return 3;
6986 }
6987
6988 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6989 || hevc->pic_h !=
6990 rpm_param->p.pic_height_in_luma_samples) {
6991 hevc_print(hevc, 0,
6992 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6993 hevc->pic_w, hevc->pic_h,
6994 rpm_param->p.pic_width_in_luma_samples,
6995 rpm_param->p.pic_height_in_luma_samples,
6996 hevc->interlace_flag);
6997 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6998 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6999 hevc->frame_width = hevc->pic_w;
7000 hevc->frame_height = hevc->pic_h;
7001#ifdef LOSLESS_COMPRESS_MODE
7002 if (/*re_config_pic_flag == 0 &&*/
7003 (get_double_write_mode(hevc) & 0x10) == 0)
7004 init_decode_head_hw(hevc);
7005#endif
7006 }
7007
7008 if (hevc->bit_depth_chroma > 10 ||
7009 hevc->bit_depth_luma > 10) {
7010 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
7011 hevc->bit_depth_chroma,
7012 hevc->bit_depth_luma);
7013 if (!hevc->m_ins_flag)
7014 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
7015 H265_DEBUG_DIS_SYS_ERROR_PROC);
7016 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
7017 return 4;
7018 }
7019
7020 /* it will cause divide 0 error */
7021 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
7022 if (get_dbg_flag(hevc)) {
7023 hevc_print(hevc, 0,
7024 "Fatal Error, pic_w = %d, pic_h = %d\n",
7025 hevc->pic_w, hevc->pic_h);
7026 }
7027 return 3;
7028 }
7029 pic_list_process(hevc);
7030
7031 hevc->lcu_size =
7032 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
7033 3 + rpm_param->
7034 p.log2_diff_max_min_coding_block_size);
7035 if (hevc->lcu_size == 0) {
7036 hevc_print(hevc, 0,
7037 "Error, lcu_size = 0 (%d,%d)\n",
7038 rpm_param->p.
7039 log2_min_coding_block_size_minus3,
7040 rpm_param->p.
7041 log2_diff_max_min_coding_block_size);
7042 return 3;
7043 }
7044 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
7045 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
7046 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
7047 hevc->lcu_x_num =
7048 ((hevc->pic_w % hevc->lcu_size) ==
7049 0) ? lcu_x_num_div : lcu_x_num_div + 1;
7050 hevc->lcu_y_num =
7051 ((hevc->pic_h % hevc->lcu_size) ==
7052 0) ? lcu_y_num_div : lcu_y_num_div + 1;
7053 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
7054
7055 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
7056 || hevc->m_nalUnitType ==
7057 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
7058 hevc->curr_POC = 0;
7059 if ((hevc->m_temporalId - 1) == 0)
7060 hevc->iPrevTid0POC = hevc->curr_POC;
7061 } else {
7062 int iMaxPOClsb =
7063 1 << (rpm_param->p.
7064 log2_max_pic_order_cnt_lsb_minus4 + 4);
7065 int iPrevPOClsb;
7066 int iPrevPOCmsb;
7067 int iPOCmsb;
7068 int iPOClsb = rpm_param->p.POClsb;
7069
7070 if (iMaxPOClsb == 0) {
7071 hevc_print(hevc, 0,
7072 "error iMaxPOClsb is 0\n");
7073 return 3;
7074 }
7075
7076 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
7077 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
7078
7079 if ((iPOClsb < iPrevPOClsb)
7080 && ((iPrevPOClsb - iPOClsb) >=
7081 (iMaxPOClsb / 2)))
7082 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
7083 else if ((iPOClsb > iPrevPOClsb)
7084 && ((iPOClsb - iPrevPOClsb) >
7085 (iMaxPOClsb / 2)))
7086 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
7087 else
7088 iPOCmsb = iPrevPOCmsb;
7089 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7090 hevc_print(hevc, 0,
7091 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
7092 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
7093 iPOClsb);
7094 }
7095 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7096 || hevc->m_nalUnitType ==
7097 NAL_UNIT_CODED_SLICE_BLANT
7098 || hevc->m_nalUnitType ==
7099 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
7100 /* For BLA picture types, POCmsb is set to 0. */
7101 iPOCmsb = 0;
7102 }
7103 hevc->curr_POC = (iPOCmsb + iPOClsb);
7104 if ((hevc->m_temporalId - 1) == 0)
7105 hevc->iPrevTid0POC = hevc->curr_POC;
7106 else {
7107 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7108 hevc_print(hevc, 0,
7109 "m_temporalID is %d\n",
7110 hevc->m_temporalId);
7111 }
7112 }
7113 }
7114 hevc->RefNum_L0 =
7115 (rpm_param->p.num_ref_idx_l0_active >
7116 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7117 num_ref_idx_l0_active;
7118 hevc->RefNum_L1 =
7119 (rpm_param->p.num_ref_idx_l1_active >
7120 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7121 num_ref_idx_l1_active;
7122
7123 /* if(curr_POC==0x10) dump_lmem(); */
7124
7125 /* skip RASL pictures after CRA/BLA pictures */
7126 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
7127 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
7128 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7129 || hevc->m_nalUnitType ==
7130 NAL_UNIT_CODED_SLICE_BLANT
7131 || hevc->m_nalUnitType ==
7132 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7133 hevc->m_pocRandomAccess = hevc->curr_POC;
7134 else
7135 hevc->m_pocRandomAccess = -MAX_INT;
7136 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7137 || hevc->m_nalUnitType ==
7138 NAL_UNIT_CODED_SLICE_BLANT
7139 || hevc->m_nalUnitType ==
7140 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7141 hevc->m_pocRandomAccess = hevc->curr_POC;
7142 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
7143 (nal_skip_policy >= 3) &&
7144 (hevc->m_nalUnitType ==
7145 NAL_UNIT_CODED_SLICE_RASL_N ||
7146 hevc->m_nalUnitType ==
7147 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
7148 if (get_dbg_flag(hevc)) {
7149 hevc_print(hevc, 0,
7150 "RASL picture with POC %d < %d ",
7151 hevc->curr_POC, hevc->m_pocRandomAccess);
7152 hevc_print(hevc, 0,
7153 "RandomAccess point POC), skip it\n");
7154 }
7155 return 1;
7156 }
7157
7158 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
7159 hevc->skip_flag = 0;
7160 /**/
7161 /* if((iPrevPOC != curr_POC)){ */
7162 if (rpm_param->p.slice_segment_address == 0) {
7163 struct PIC_s *pic;
7164
7165 hevc->new_pic = 1;
7166#ifdef MULTI_INSTANCE_SUPPORT
7167 if (!hevc->m_ins_flag)
7168#endif
7169 check_pic_decoded_error_pre(hevc,
7170 READ_VREG(HEVC_PARSER_LCU_START)
7171 & 0xffffff);
7172 /**/ if (use_cma == 0) {
7173 if (hevc->pic_list_init_flag == 0) {
7174 init_pic_list(hevc);
7175 init_pic_list_hw(hevc);
7176 init_buf_spec(hevc);
7177 hevc->pic_list_init_flag = 3;
7178 }
7179 }
7180 if (!hevc->m_ins_flag) {
7181 if (hevc->cur_pic)
7182 get_picture_qos_info(hevc);
7183 }
7184 hevc->first_pic_after_recover = 0;
7185 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7186 dump_pic_list(hevc);
7187 /* prev pic */
7188 hevc_pre_pic(hevc, pic);
7189 /*
7190 *update referenced of old pictures
7191 *(cur_pic->referenced is 1 and not updated)
7192 */
7193 apply_ref_pic_set(hevc, hevc->curr_POC,
7194 rpm_param);
7195
7196 /*if (hevc->mmu_enable)
7197 recycle_mmu_bufs(hevc);*/
7198
7199#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7200 if (vdec->master) {
7201 struct hevc_state_s *hevc_ba =
7202 (struct hevc_state_s *)
7203 vdec->master->private;
7204 if (hevc_ba->cur_pic != NULL) {
7205 hevc_ba->cur_pic->dv_enhance_exist = 1;
7206 hevc_print(hevc, H265_DEBUG_DV,
7207 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7208 hevc->curr_POC, hevc_ba->cur_pic->POC);
7209 }
7210 }
7211 if (vdec->master == NULL &&
7212 vdec->slave == NULL)
7213 set_aux_data(hevc,
7214 hevc->cur_pic, 1, 0); /*suffix*/
7215 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7216 set_aux_data(hevc,
7217 hevc->cur_pic, 0, 1); /*dv meta only*/
7218#else
7219 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7220#endif
7221 /* new pic */
7222 hevc->cur_pic = hevc->is_used_v4l ?
7223 v4l_get_new_pic(hevc, rpm_param) :
7224 get_new_pic(hevc, rpm_param);
7225 if (hevc->cur_pic == NULL) {
7226 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7227 dump_pic_list(hevc);
7228 hevc->wait_buf = 1;
7229 return -1;
7230 }
7231#ifdef MULTI_INSTANCE_SUPPORT
7232 hevc->decoding_pic = hevc->cur_pic;
7233 if (!hevc->m_ins_flag)
7234 hevc->over_decode = 0;
7235#endif
7236#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7237 hevc->cur_pic->dv_enhance_exist = 0;
7238 if (vdec->slave)
7239 hevc_print(hevc, H265_DEBUG_DV,
7240 "Clear bl (poc %d) dv_enhance_exist flag\n",
7241 hevc->curr_POC);
7242 if (vdec->master == NULL &&
7243 vdec->slave == NULL)
7244 set_aux_data(hevc,
7245 hevc->cur_pic, 0, 0); /*prefix*/
7246
7247 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7248 set_aux_data(hevc,
7249 hevc->cur_pic, 0, 2); /*pre sei only*/
7250#else
7251 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7252#endif
7253 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7254 hevc->cur_pic->output_ready = 1;
7255 hevc->cur_pic->stream_offset =
7256 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7257 prepare_display_buf(hevc, hevc->cur_pic);
7258 hevc->wait_buf = 2;
7259 return -1;
7260 }
7261 } else {
7262 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7263#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7264 if (vdec->master == NULL &&
7265 vdec->slave == NULL) {
7266 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7267 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7268 }
7269#else
7270 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7271 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7272#endif
7273 }
7274 if (hevc->pic_list_init_flag != 3
7275 || hevc->cur_pic == NULL) {
7276 /* make it dec from the first slice segment */
7277 return 3;
7278 }
7279 hevc->cur_pic->slice_idx++;
7280 hevc->new_pic = 0;
7281 }
7282 } else {
7283 if (hevc->wait_buf == 1) {
7284 pic_list_process(hevc);
7285 hevc->cur_pic = hevc->is_used_v4l ?
7286 v4l_get_new_pic(hevc, rpm_param) :
7287 get_new_pic(hevc, rpm_param);
7288 if (hevc->cur_pic == NULL)
7289 return -1;
7290
7291 if (!hevc->m_ins_flag)
7292 hevc->over_decode = 0;
7293
7294#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7295 hevc->cur_pic->dv_enhance_exist = 0;
7296 if (vdec->master == NULL &&
7297 vdec->slave == NULL)
7298 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7299#else
7300 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7301#endif
7302 hevc->wait_buf = 0;
7303 } else if (hevc->wait_buf ==
7304 2) {
7305 if (get_display_pic_num(hevc) >
7306 1)
7307 return -1;
7308 hevc->wait_buf = 0;
7309 }
7310 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7311 dump_pic_list(hevc);
7312 }
7313
7314 if (hevc->new_pic) {
7315#if 1
7316 /*SUPPORT_10BIT*/
7317 int sao_mem_unit =
7318 (hevc->lcu_size == 16 ? 9 :
7319 hevc->lcu_size ==
7320 32 ? 14 : 24) << 4;
7321#else
7322 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7323#endif
7324 int pic_height_cu =
7325 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7326 int pic_width_cu =
7327 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7328 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7329
7330 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7331 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7332 hevc_print(hevc, 0,
7333 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7334 __func__,
7335 hevc->decode_idx,
7336 hevc->curr_pic_struct,
7337 hevc->interlace_flag,
7338 hevc->cur_pic->index);
7339 }
7340 if (dbg_skip_decode_index != 0 &&
7341 hevc->decode_idx == dbg_skip_decode_index)
7342 dbg_skip_flag = 1;
7343
7344 hevc->decode_idx++;
7345 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7346 sao_mem_unit, rpm_param);
7347
7348 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7349 }
7350
7351 if (hevc->iPrevPOC != hevc->curr_POC) {
7352 hevc->new_tile = 1;
7353 hevc->tile_x = 0;
7354 hevc->tile_y = 0;
7355 hevc->tile_y_x = 0;
7356 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7357 hevc_print(hevc, 0,
7358 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7359 hevc->tile_x, hevc->tile_y);
7360 }
7361 } else if (hevc->tile_enabled) {
7362 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7363 hevc_print(hevc, 0,
7364 "slice_segment_address is %d\n",
7365 rpm_param->p.slice_segment_address);
7366 }
7367 hevc->tile_y_x =
7368 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7369 (hevc->pic_w +
7370 hevc->lcu_size -
7371 1) / hevc->lcu_size);
7372 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7373 && (hevc->tile_y_x != -1)) {
7374 hevc->new_tile = 1;
7375 hevc->tile_x = hevc->tile_y_x & 0xff;
7376 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7377 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7378 hevc_print(hevc, 0,
7379 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7380 rpm_param->p.slice_segment_address,
7381 hevc->tile_x, hevc->tile_y);
7382 }
7383 } else
7384 hevc->new_tile = 0;
7385 } else
7386 hevc->new_tile = 0;
7387
7388 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7389 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7390 hevc->new_tile = 0;
7391
7392 if (hevc->new_tile) {
7393 hevc->tile_start_lcu_x =
7394 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7395 hevc->tile_start_lcu_y =
7396 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7397 hevc->tile_width_lcu =
7398 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7399 hevc->tile_height_lcu =
7400 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7401 }
7402
7403 set_ref_pic_list(hevc, rpm_param);
7404
7405 Col_ref = rpm_param->p.collocated_ref_idx;
7406
7407 hevc->LDCFlag = 0;
7408 if (rpm_param->p.slice_type != I_SLICE) {
7409 hevc->LDCFlag = 1;
7410 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7411 if (hevc->cur_pic->
7412 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7413 hevc->curr_POC)
7414 hevc->LDCFlag = 0;
7415 }
7416 if (rpm_param->p.slice_type == B_SLICE) {
7417 for (i = 0; (i < hevc->RefNum_L1)
7418 && hevc->LDCFlag; i++) {
7419 if (hevc->cur_pic->
7420 m_aiRefPOCList1[hevc->cur_pic->
7421 slice_idx][i] >
7422 hevc->curr_POC)
7423 hevc->LDCFlag = 0;
7424 }
7425 }
7426 }
7427
7428 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7429
7430 hevc->plevel =
7431 rpm_param->p.log2_parallel_merge_level;
7432 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7433
7434 hevc->LongTerm_Curr = 0; /* to do ... */
7435 hevc->LongTerm_Col = 0; /* to do ... */
7436
7437 hevc->list_no = 0;
7438 if (rpm_param->p.slice_type == B_SLICE)
7439 hevc->list_no = 1 - hevc->ColFromL0Flag;
7440 if (hevc->list_no == 0) {
7441 if (Col_ref < hevc->RefNum_L0) {
7442 hevc->Col_POC =
7443 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7444 slice_idx][Col_ref];
7445 } else
7446 hevc->Col_POC = INVALID_POC;
7447 } else {
7448 if (Col_ref < hevc->RefNum_L1) {
7449 hevc->Col_POC =
7450 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7451 slice_idx][Col_ref];
7452 } else
7453 hevc->Col_POC = INVALID_POC;
7454 }
7455
7456 hevc->LongTerm_Ref = 0; /* to do ... */
7457
7458 if (hevc->slice_type != 2) {
7459 /* if(hevc->i_only==1){ */
7460 /* return 0xf; */
7461 /* } */
7462
7463 if (hevc->Col_POC != INVALID_POC) {
7464 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7465 if (hevc->col_pic == NULL) {
7466 hevc->cur_pic->error_mark = 1;
7467 if (get_dbg_flag(hevc)) {
7468 hevc_print(hevc, 0,
7469 "WRONG,fail to get the pic Col_POC\n");
7470 }
7471 if (is_log_enable(hevc))
7472 add_log(hevc,
7473 "WRONG,fail to get the pic Col_POC");
7474 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7475 hevc->col_pic->error_mark = 1;
7476 hevc->cur_pic->error_mark = 1;
7477 if (get_dbg_flag(hevc)) {
7478 hevc_print(hevc, 0,
7479 "WRONG, Col_POC error_mark is 1\n");
7480 }
7481 if (is_log_enable(hevc))
7482 add_log(hevc,
7483 "WRONG, Col_POC error_mark is 1");
7484 } else {
7485 if ((hevc->col_pic->width
7486 != hevc->pic_w) ||
7487 (hevc->col_pic->height
7488 != hevc->pic_h)) {
7489 hevc_print(hevc, 0,
7490 "Wrong reference pic (poc %d) width/height %d/%d\n",
7491 hevc->col_pic->POC,
7492 hevc->col_pic->width,
7493 hevc->col_pic->height);
7494 hevc->cur_pic->error_mark = 1;
7495 }
7496
7497 }
7498
7499 if (hevc->cur_pic->error_mark
7500 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7501 /*count info*/
7502 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7503 hevc->cur_pic->stream_offset);
7504 if (hevc->PB_skip_mode == 2)
7505 hevc->gvs->drop_frame_count++;
7506 }
7507
7508 if (is_skip_decoding(hevc,
7509 hevc->cur_pic)) {
7510 return 2;
7511 }
7512 } else
7513 hevc->col_pic = hevc->cur_pic;
7514 } /* */
7515 if (hevc->col_pic == NULL)
7516 hevc->col_pic = hevc->cur_pic;
7517#ifdef BUFFER_MGR_ONLY
7518 return 0xf;
7519#else
7520 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7521 || (dbg_skip_flag))
7522 return 0xf;
7523#endif
7524
7525 config_mc_buffer(hevc, hevc->cur_pic);
7526
7527 if (is_skip_decoding(hevc,
7528 hevc->cur_pic)) {
7529 if (get_dbg_flag(hevc))
7530 hevc_print(hevc, 0,
7531 "Discard this picture index %d\n",
7532 hevc->cur_pic->index);
7533 /*count info*/
7534 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7535 hevc->cur_pic->stream_offset);
7536 if (hevc->PB_skip_mode == 2)
7537 hevc->gvs->drop_frame_count++;
7538 return 2;
7539 }
7540#ifdef MCRCC_ENABLE
7541 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7542#endif
7543 config_mpred_hw(hevc);
7544
7545 config_sao_hw(hevc, rpm_param);
7546
7547 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7548 return 0xf;
7549
7550 return 0;
7551}
7552
7553
7554
7555static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7556 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7557 int cur_buf_idx = new_pic->index;
7558 int bit_depth_10 = (bit_depth != 0x00);
7559 int picture_size;
7560 int cur_mmu_4k_number;
7561 int ret, max_frame_num;
7562 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7563 new_pic->height, !bit_depth_10);
7564 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7565 if (get_double_write_mode(hevc) == 0x10)
7566 return 0;
7567 /*hevc_print(hevc, 0,
7568 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7569 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7570
7571 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7572 max_frame_num = MAX_FRAME_8K_NUM;
7573 else
7574 max_frame_num = MAX_FRAME_4K_NUM;
7575 if (cur_mmu_4k_number > max_frame_num) {
7576 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7577 cur_mmu_4k_number,
7578 new_pic->width,
7579 new_pic->height);
7580 return -1;
7581 }
7582 ret = decoder_mmu_box_alloc_idx(
7583 hevc->mmu_box,
7584 cur_buf_idx,
7585 cur_mmu_4k_number,
7586 mmu_index_adr);
7587
7588 new_pic->scatter_alloc = 1;
7589
7590 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7591 "%s pic index %d page count(%d) ret =%d\n",
7592 __func__, cur_buf_idx,
7593 cur_mmu_4k_number, ret);
7594 return ret;
7595}
7596
7597
7598static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7599 struct PIC_s *pic)
7600{
7601 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7602 "%s pic index %d scatter_alloc %d\n",
7603 __func__, pic->index,
7604 pic->scatter_alloc);
7605
7606 if (hevc->mmu_enable
7607 && ((hevc->double_write_mode & 0x10) == 0)
7608 && pic->scatter_alloc)
7609 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7610 pic->scatter_alloc = 0;
7611}
7612
7613/*
7614 *************************************************
7615 *
7616 *h265 buffer management end
7617 *
7618 **************************************************
7619 */
7620static struct hevc_state_s *gHevc;
7621
7622static void hevc_local_uninit(struct hevc_state_s *hevc)
7623{
7624 hevc->rpm_ptr = NULL;
7625 hevc->lmem_ptr = NULL;
7626
7627#ifdef SWAP_HEVC_UCODE
7628 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7629 if (hevc->mc_cpu_addr != NULL) {
7630 dma_free_coherent(amports_get_dma_device(),
7631 hevc->swap_size, hevc->mc_cpu_addr,
7632 hevc->mc_dma_handle);
7633 hevc->mc_cpu_addr = NULL;
7634 }
7635
7636 }
7637#endif
7638#ifdef DETREFILL_ENABLE
7639 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7640 uninit_detrefill_buf(hevc);
7641#endif
7642 if (hevc->aux_addr) {
7643 dma_free_coherent(amports_get_dma_device(),
7644 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7645 hevc->aux_phy_addr);
7646 hevc->aux_addr = NULL;
7647 }
7648 if (hevc->rpm_addr) {
7649 dma_free_coherent(amports_get_dma_device(),
7650 RPM_BUF_SIZE, hevc->rpm_addr,
7651 hevc->rpm_phy_addr);
7652 hevc->rpm_addr = NULL;
7653 }
7654 if (hevc->lmem_addr) {
7655 dma_free_coherent(amports_get_dma_device(),
7656 RPM_BUF_SIZE, hevc->lmem_addr,
7657 hevc->lmem_phy_addr);
7658 hevc->lmem_addr = NULL;
7659 }
7660
7661 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7662 if (hevc->frame_mmu_map_phy_addr)
7663 dma_free_coherent(amports_get_dma_device(),
7664 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7665 hevc->frame_mmu_map_phy_addr);
7666
7667 hevc->frame_mmu_map_addr = NULL;
7668 }
7669
7670 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
7671}
7672
7673static int hevc_local_init(struct hevc_state_s *hevc)
7674{
7675 int ret = -1;
7676 struct BuffInfo_s *cur_buf_info = NULL;
7677
7678 memset(&hevc->param, 0, sizeof(union param_u));
7679
7680 cur_buf_info = &hevc->work_space_buf_store;
7681
7682 if (vdec_is_support_4k()) {
7683 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7684 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7685 sizeof(struct BuffInfo_s));
7686 else
7687 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7688 sizeof(struct BuffInfo_s));
7689 } else
7690 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7691 sizeof(struct BuffInfo_s));
7692
7693 cur_buf_info->start_adr = hevc->buf_start;
7694 init_buff_spec(hevc, cur_buf_info);
7695
7696 hevc_init_stru(hevc, cur_buf_info);
7697
7698 hevc->bit_depth_luma = 8;
7699 hevc->bit_depth_chroma = 8;
7700 hevc->video_signal_type = 0;
7701 hevc->video_signal_type_debug = 0;
7702 bit_depth_luma = hevc->bit_depth_luma;
7703 bit_depth_chroma = hevc->bit_depth_chroma;
7704 video_signal_type = hevc->video_signal_type;
7705
7706 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7707 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7708 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7709 if (hevc->rpm_addr == NULL) {
7710 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7711 return -1;
7712 }
7713 hevc->rpm_ptr = hevc->rpm_addr;
7714 }
7715
7716 if (prefix_aux_buf_size > 0 ||
7717 suffix_aux_buf_size > 0) {
7718 u32 aux_buf_size;
7719
7720 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7721 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7722 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7723 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7724 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7725 if (hevc->aux_addr == NULL) {
7726 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7727 return -1;
7728 }
7729 }
7730
7731 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7732 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7733 if (hevc->lmem_addr == NULL) {
7734 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7735 return -1;
7736 }
7737 hevc->lmem_ptr = hevc->lmem_addr;
7738
7739 if (hevc->mmu_enable) {
7740 hevc->frame_mmu_map_addr =
7741 dma_alloc_coherent(amports_get_dma_device(),
7742 get_frame_mmu_map_size(),
7743 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7744 if (hevc->frame_mmu_map_addr == NULL) {
7745 pr_err("%s: failed to alloc count_buffer\n", __func__);
7746 return -1;
7747 }
7748 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7749 }
7750 ret = 0;
7751 return ret;
7752}
7753
7754/*
7755 *******************************************
7756 * Mailbox command
7757 *******************************************
7758 */
7759#define CMD_FINISHED 0
7760#define CMD_ALLOC_VIEW 1
7761#define CMD_FRAME_DISPLAY 3
7762#define CMD_DEBUG 10
7763
7764
7765#define DECODE_BUFFER_NUM_MAX 32
7766#define DISPLAY_BUFFER_NUM 6
7767
7768#define video_domain_addr(adr) (adr&0x7fffffff)
7769#define DECODER_WORK_SPACE_SIZE 0x800000
7770
7771#define spec2canvas(x) \
7772 (((x)->uv_canvas_index << 16) | \
7773 ((x)->uv_canvas_index << 8) | \
7774 ((x)->y_canvas_index << 0))
7775
7776
7777static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7778{
7779 struct vdec_s *vdec = hw_to_vdec(hevc);
7780 int canvas_w = ALIGN(pic->width, 64)/4;
7781 int canvas_h = ALIGN(pic->height, 32)/4;
7782 int blkmode = hevc->mem_map_mode;
7783
7784 /*CANVAS_BLKMODE_64X32*/
7785#ifdef SUPPORT_10BIT
7786 if (pic->double_write_mode) {
7787 canvas_w = pic->width /
7788 get_double_write_ratio(hevc, pic->double_write_mode);
7789 canvas_h = pic->height /
7790 get_double_write_ratio(hevc, pic->double_write_mode);
7791
7792 if (hevc->mem_map_mode == 0)
7793 canvas_w = ALIGN(canvas_w, 32);
7794 else
7795 canvas_w = ALIGN(canvas_w, 64);
7796 canvas_h = ALIGN(canvas_h, 32);
7797
7798 if (vdec->parallel_dec == 1) {
7799 if (pic->y_canvas_index == -1)
7800 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7801 if (pic->uv_canvas_index == -1)
7802 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7803 } else {
7804 pic->y_canvas_index = 128 + pic->index * 2;
7805 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7806 }
7807
7808 canvas_config_ex(pic->y_canvas_index,
7809 pic->dw_y_adr, canvas_w, canvas_h,
7810 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7811 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7812 canvas_w, canvas_h,
7813 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7814#ifdef MULTI_INSTANCE_SUPPORT
7815 pic->canvas_config[0].phy_addr =
7816 pic->dw_y_adr;
7817 pic->canvas_config[0].width =
7818 canvas_w;
7819 pic->canvas_config[0].height =
7820 canvas_h;
7821 pic->canvas_config[0].block_mode =
7822 blkmode;
7823 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7824
7825 pic->canvas_config[1].phy_addr =
7826 pic->dw_u_v_adr;
7827 pic->canvas_config[1].width =
7828 canvas_w;
7829 pic->canvas_config[1].height =
7830 canvas_h;
7831 pic->canvas_config[1].block_mode =
7832 blkmode;
7833 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7834#endif
7835 } else {
7836 if (!hevc->mmu_enable) {
7837 /* to change after 10bit VPU is ready ... */
7838 if (vdec->parallel_dec == 1) {
7839 if (pic->y_canvas_index == -1)
7840 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7841 pic->uv_canvas_index = pic->y_canvas_index;
7842 } else {
7843 pic->y_canvas_index = 128 + pic->index;
7844 pic->uv_canvas_index = 128 + pic->index;
7845 }
7846
7847 canvas_config_ex(pic->y_canvas_index,
7848 pic->mc_y_adr, canvas_w, canvas_h,
7849 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7850 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7851 canvas_w, canvas_h,
7852 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7853 }
7854 }
7855#else
7856 if (vdec->parallel_dec == 1) {
7857 if (pic->y_canvas_index == -1)
7858 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7859 if (pic->uv_canvas_index == -1)
7860 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7861 } else {
7862 pic->y_canvas_index = 128 + pic->index * 2;
7863 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7864 }
7865
7866
7867 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7868 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7869 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7870 canvas_w, canvas_h,
7871 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7872#endif
7873}
7874
7875static int init_buf_spec(struct hevc_state_s *hevc)
7876{
7877 int pic_width = hevc->pic_w;
7878 int pic_height = hevc->pic_h;
7879
7880 /* hevc_print(hevc, 0,
7881 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7882 */
7883 hevc_print(hevc, 0,
7884 "%s2 %d %d\n", __func__, pic_width, pic_height);
7885 /* pic_width = hevc->pic_w; */
7886 /* pic_height = hevc->pic_h; */
7887
7888 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7889 hevc->frame_width = pic_width;
7890 hevc->frame_height = pic_height;
7891
7892 }
7893
7894 return 0;
7895}
7896
7897static int parse_sei(struct hevc_state_s *hevc,
7898 struct PIC_s *pic, char *sei_buf, uint32_t size)
7899{
7900 char *p = sei_buf;
7901 char *p_sei;
7902 uint16_t header;
7903 uint8_t nal_unit_type;
7904 uint8_t payload_type, payload_size;
7905 int i, j;
7906
7907 if (size < 2)
7908 return 0;
7909 header = *p++;
7910 header <<= 8;
7911 header += *p++;
7912 nal_unit_type = header >> 9;
7913 if ((nal_unit_type != NAL_UNIT_SEI)
7914 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7915 return 0;
7916 while (p+2 <= sei_buf+size) {
7917 payload_type = *p++;
7918 payload_size = *p++;
7919 if (p+payload_size <= sei_buf+size) {
7920 switch (payload_type) {
7921 case SEI_PicTiming:
7922 if ((parser_sei_enable & 0x4) &&
7923 hevc->frame_field_info_present_flag) {
7924 p_sei = p;
7925 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7926 pic->pic_struct = hevc->curr_pic_struct;
7927 if (get_dbg_flag(hevc) &
7928 H265_DEBUG_PIC_STRUCT) {
7929 hevc_print(hevc, 0,
7930 "parse result pic_struct = %d\n",
7931 hevc->curr_pic_struct);
7932 }
7933 }
7934 break;
7935 case SEI_UserDataITU_T_T35:
7936 p_sei = p;
7937 if (p_sei[0] == 0xB5
7938 && p_sei[1] == 0x00
7939 && p_sei[2] == 0x3C
7940 && p_sei[3] == 0x00
7941 && p_sei[4] == 0x01
7942 && p_sei[5] == 0x04) {
7943 char *new_buf;
7944 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7945 new_buf = vzalloc(payload_size);
7946 if (new_buf) {
7947 memcpy(new_buf, p_sei, payload_size);
7948 pic->hdr10p_data_buf = new_buf;
7949 pic->hdr10p_data_size = payload_size;
7950 } else {
7951 hevc_print(hevc, 0,
7952 "%s:hdr10p data vzalloc size(%d) fail\n",
7953 __func__, payload_size);
7954 pic->hdr10p_data_buf = NULL;
7955 pic->hdr10p_data_size = 0;
7956 }
7957 }
7958
7959 break;
7960 case SEI_MasteringDisplayColorVolume:
7961 /*hevc_print(hevc, 0,
7962 "sei type: primary display color volume %d, size %d\n",
7963 payload_type,
7964 payload_size);*/
7965 /* master_display_colour */
7966 p_sei = p;
7967 for (i = 0; i < 3; i++) {
7968 for (j = 0; j < 2; j++) {
7969 hevc->primaries[i][j]
7970 = (*p_sei<<8)
7971 | *(p_sei+1);
7972 p_sei += 2;
7973 }
7974 }
7975 for (i = 0; i < 2; i++) {
7976 hevc->white_point[i]
7977 = (*p_sei<<8)
7978 | *(p_sei+1);
7979 p_sei += 2;
7980 }
7981 for (i = 0; i < 2; i++) {
7982 hevc->luminance[i]
7983 = (*p_sei<<24)
7984 | (*(p_sei+1)<<16)
7985 | (*(p_sei+2)<<8)
7986 | *(p_sei+3);
7987 p_sei += 4;
7988 }
7989 hevc->sei_present_flag |=
7990 SEI_MASTER_DISPLAY_COLOR_MASK;
7991 /*for (i = 0; i < 3; i++)
7992 for (j = 0; j < 2; j++)
7993 hevc_print(hevc, 0,
7994 "\tprimaries[%1d][%1d] = %04x\n",
7995 i, j,
7996 hevc->primaries[i][j]);
7997 hevc_print(hevc, 0,
7998 "\twhite_point = (%04x, %04x)\n",
7999 hevc->white_point[0],
8000 hevc->white_point[1]);
8001 hevc_print(hevc, 0,
8002 "\tmax,min luminance = %08x, %08x\n",
8003 hevc->luminance[0],
8004 hevc->luminance[1]);*/
8005 break;
8006 case SEI_ContentLightLevel:
8007 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8008 hevc_print(hevc, 0,
8009 "sei type: max content light level %d, size %d\n",
8010 payload_type, payload_size);
8011 /* content_light_level */
8012 p_sei = p;
8013 hevc->content_light_level[0]
8014 = (*p_sei<<8) | *(p_sei+1);
8015 p_sei += 2;
8016 hevc->content_light_level[1]
8017 = (*p_sei<<8) | *(p_sei+1);
8018 p_sei += 2;
8019 hevc->sei_present_flag |=
8020 SEI_CONTENT_LIGHT_LEVEL_MASK;
8021 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8022 hevc_print(hevc, 0,
8023 "\tmax cll = %04x, max_pa_cll = %04x\n",
8024 hevc->content_light_level[0],
8025 hevc->content_light_level[1]);
8026 break;
8027 default:
8028 break;
8029 }
8030 }
8031 p += payload_size;
8032 }
8033 return 0;
8034}
8035
8036static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
8037 unsigned w, unsigned h)
8038{
8039 unsigned ar;
8040
8041 if (idc == 255) {
8042 ar = div_u64(256ULL * sar_h * h,
8043 sar_w * w);
8044 } else {
8045 switch (idc) {
8046 case 1:
8047 ar = 0x100 * h / w;
8048 break;
8049 case 2:
8050 ar = 0x100 * h * 11 / (w * 12);
8051 break;
8052 case 3:
8053 ar = 0x100 * h * 11 / (w * 10);
8054 break;
8055 case 4:
8056 ar = 0x100 * h * 11 / (w * 16);
8057 break;
8058 case 5:
8059 ar = 0x100 * h * 33 / (w * 40);
8060 break;
8061 case 6:
8062 ar = 0x100 * h * 11 / (w * 24);
8063 break;
8064 case 7:
8065 ar = 0x100 * h * 11 / (w * 20);
8066 break;
8067 case 8:
8068 ar = 0x100 * h * 11 / (w * 32);
8069 break;
8070 case 9:
8071 ar = 0x100 * h * 33 / (w * 80);
8072 break;
8073 case 10:
8074 ar = 0x100 * h * 11 / (w * 18);
8075 break;
8076 case 11:
8077 ar = 0x100 * h * 11 / (w * 15);
8078 break;
8079 case 12:
8080 ar = 0x100 * h * 33 / (w * 64);
8081 break;
8082 case 13:
8083 ar = 0x100 * h * 99 / (w * 160);
8084 break;
8085 case 14:
8086 ar = 0x100 * h * 3 / (w * 4);
8087 break;
8088 case 15:
8089 ar = 0x100 * h * 2 / (w * 3);
8090 break;
8091 case 16:
8092 ar = 0x100 * h * 1 / (w * 2);
8093 break;
8094 default:
8095 ar = h * 0x100 / w;
8096 break;
8097 }
8098 }
8099
8100 return ar;
8101}
8102
8103static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
8104 struct PIC_s *pic)
8105{
8106 unsigned int ar;
8107 int i, j;
8108 char *p;
8109 unsigned size = 0;
8110 unsigned type = 0;
8111 struct vframe_master_display_colour_s *vf_dp
8112 = &vf->prop.master_display_colour;
8113
8114 vf->width = pic->width /
8115 get_double_write_ratio(hevc, pic->double_write_mode);
8116 vf->height = pic->height /
8117 get_double_write_ratio(hevc, pic->double_write_mode);
8118
8119 vf->duration = hevc->frame_dur;
8120 vf->duration_pulldown = 0;
8121 vf->flag = 0;
8122
8123 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
8124 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8125
8126
8127 if (((pic->aspect_ratio_idc == 255) &&
8128 pic->sar_width &&
8129 pic->sar_height) ||
8130 ((pic->aspect_ratio_idc != 255) &&
8131 (pic->width))) {
8132 ar = min_t(u32,
8133 calc_ar(pic->aspect_ratio_idc,
8134 pic->sar_width,
8135 pic->sar_height,
8136 pic->width,
8137 pic->height),
8138 DISP_RATIO_ASPECT_RATIO_MAX);
8139 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8140 vf->ratio_control <<= hevc->interlace_flag;
8141 }
8142 hevc->ratio_control = vf->ratio_control;
8143 if (pic->aux_data_buf
8144 && pic->aux_data_size) {
8145 /* parser sei */
8146 p = pic->aux_data_buf;
8147 while (p < pic->aux_data_buf
8148 + pic->aux_data_size - 8) {
8149 size = *p++;
8150 size = (size << 8) | *p++;
8151 size = (size << 8) | *p++;
8152 size = (size << 8) | *p++;
8153 type = *p++;
8154 type = (type << 8) | *p++;
8155 type = (type << 8) | *p++;
8156 type = (type << 8) | *p++;
8157 if (type == 0x02000000) {
8158 /* hevc_print(hevc, 0,
8159 "sei(%d)\n", size); */
8160 parse_sei(hevc, pic, p, size);
8161 }
8162 p += size;
8163 }
8164 }
8165 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
8166 vf->signal_type = pic->video_signal_type;
8167 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
8168 u32 data;
8169 data = vf->signal_type;
8170 data = data & 0xFFFF00FF;
8171 data = data | (0x30<<8);
8172 vf->signal_type = data;
8173 }
8174 }
8175 else
8176 vf->signal_type = 0;
8177 hevc->video_signal_type_debug = vf->signal_type;
8178
8179 /* master_display_colour */
8180 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
8181 for (i = 0; i < 3; i++)
8182 for (j = 0; j < 2; j++)
8183 vf_dp->primaries[i][j] = hevc->primaries[i][j];
8184 for (i = 0; i < 2; i++) {
8185 vf_dp->white_point[i] = hevc->white_point[i];
8186 vf_dp->luminance[i]
8187 = hevc->luminance[i];
8188 }
8189 vf_dp->present_flag = 1;
8190 } else
8191 vf_dp->present_flag = 0;
8192
8193 /* content_light_level */
8194 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
8195 vf_dp->content_light_level.max_content
8196 = hevc->content_light_level[0];
8197 vf_dp->content_light_level.max_pic_average
8198 = hevc->content_light_level[1];
8199 vf_dp->content_light_level.present_flag = 1;
8200 } else
8201 vf_dp->content_light_level.present_flag = 0;
8202
8203 if (hevc->is_used_v4l &&
8204 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
8205 (vf_dp->present_flag) ||
8206 (vf_dp->content_light_level.present_flag))) {
8207 struct aml_vdec_hdr_infos hdr;
8208 struct aml_vcodec_ctx *ctx =
8209 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8210
8211 memset(&hdr, 0, sizeof(hdr));
8212 hdr.signal_type = vf->signal_type;
8213 hdr.color_parms = *vf_dp;
8214 vdec_v4l_set_hdr_infos(ctx, &hdr);
8215 }
8216
8217 if ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) && (pic->hdr10p_data_buf != NULL)
8218 && (pic->hdr10p_data_size != 0)) {
8219 char *new_buf;
8220 new_buf = vzalloc(pic->hdr10p_data_size);
8221
8222 if (new_buf) {
8223 memcpy(new_buf, pic->hdr10p_data_buf, pic->hdr10p_data_size);
8224 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
8225 hevc_print(hevc, 0,
8226 "hdr10p data: (size %d)\n",
8227 pic->hdr10p_data_size);
8228 for (i = 0; i < pic->hdr10p_data_size; i++) {
8229 hevc_print_cont(hevc, 0,
8230 "%02x ", pic->hdr10p_data_buf[i]);
8231 if (((i + 1) & 0xf) == 0)
8232 hevc_print_cont(hevc, 0, "\n");
8233 }
8234 hevc_print_cont(hevc, 0, "\n");
8235 }
8236
8237 vf->hdr10p_data_size = pic->hdr10p_data_size;
8238 vf->hdr10p_data_buf = new_buf;
8239 } else {
8240 hevc_print(hevc, 0,
8241 "%s:hdr10p data vzalloc size(%d) fail\n",
8242 __func__, pic->hdr10p_data_size);
8243 vf->hdr10p_data_buf = NULL;
8244 vf->hdr10p_data_size = 0;
8245 }
8246
8247 vfree(pic->hdr10p_data_buf);
8248 pic->hdr10p_data_buf = NULL;
8249 pic->hdr10p_data_size = 0;
8250 }
8251
8252 vf->sidebind_type = hevc->sidebind_type;
8253 vf->sidebind_channel_id = hevc->sidebind_channel_id;
8254}
8255
8256static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8257{
8258 unsigned long flags;
8259#ifdef MULTI_INSTANCE_SUPPORT
8260 struct vdec_s *vdec = op_arg;
8261 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8262#else
8263 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8264#endif
8265
8266 spin_lock_irqsave(&lock, flags);
8267
8268 states->vf_pool_size = VF_POOL_SIZE;
8269 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8270 states->buf_avail_num = kfifo_len(&hevc->display_q);
8271
8272 if (step == 2)
8273 states->buf_avail_num = 0;
8274 spin_unlock_irqrestore(&lock, flags);
8275 return 0;
8276}
8277
8278static struct vframe_s *vh265_vf_peek(void *op_arg)
8279{
8280 struct vframe_s *vf[2] = {0, 0};
8281#ifdef MULTI_INSTANCE_SUPPORT
8282 struct vdec_s *vdec = op_arg;
8283 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8284#else
8285 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8286#endif
8287
8288 if (step == 2)
8289 return NULL;
8290
8291 if (force_disp_pic_index & 0x100) {
8292 if (force_disp_pic_index & 0x200)
8293 return NULL;
8294 return &hevc->vframe_dummy;
8295 }
8296
8297
8298 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8299 if (vf[1]) {
8300 vf[0]->next_vf_pts_valid = true;
8301 vf[0]->next_vf_pts = vf[1]->pts;
8302 } else
8303 vf[0]->next_vf_pts_valid = false;
8304 return vf[0];
8305 }
8306
8307 return NULL;
8308}
8309
8310static struct vframe_s *vh265_vf_get(void *op_arg)
8311{
8312 struct vframe_s *vf;
8313#ifdef MULTI_INSTANCE_SUPPORT
8314 struct vdec_s *vdec = op_arg;
8315 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8316#else
8317 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8318#endif
8319
8320 if (step == 2)
8321 return NULL;
8322 else if (step == 1)
8323 step = 2;
8324
8325#if 0
8326 if (force_disp_pic_index & 0x100) {
8327 int buffer_index = force_disp_pic_index & 0xff;
8328 struct PIC_s *pic = NULL;
8329 if (buffer_index >= 0
8330 && buffer_index < MAX_REF_PIC_NUM)
8331 pic = hevc->m_PIC[buffer_index];
8332 if (pic == NULL)
8333 return NULL;
8334 if (force_disp_pic_index & 0x200)
8335 return NULL;
8336
8337 vf = &hevc->vframe_dummy;
8338 if (get_double_write_mode(hevc)) {
8339 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8340 VIDTYPE_VIU_NV21;
8341 if (hevc->m_ins_flag) {
8342 vf->canvas0Addr = vf->canvas1Addr = -1;
8343 vf->plane_num = 2;
8344 vf->canvas0_config[0] =
8345 pic->canvas_config[0];
8346 vf->canvas0_config[1] =
8347 pic->canvas_config[1];
8348
8349 vf->canvas1_config[0] =
8350 pic->canvas_config[0];
8351 vf->canvas1_config[1] =
8352 pic->canvas_config[1];
8353 } else {
8354 vf->canvas0Addr = vf->canvas1Addr
8355 = spec2canvas(pic);
8356 }
8357 } else {
8358 vf->canvas0Addr = vf->canvas1Addr = 0;
8359 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8360 if (hevc->mmu_enable)
8361 vf->type |= VIDTYPE_SCATTER;
8362 }
8363 vf->compWidth = pic->width;
8364 vf->compHeight = pic->height;
8365 update_vf_memhandle(hevc, vf, pic);
8366 switch (hevc->bit_depth_luma) {
8367 case 9:
8368 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8369 break;
8370 case 10:
8371 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8372 | BITDEPTH_V10;
8373 break;
8374 default:
8375 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8376 break;
8377 }
8378 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8379 vf->bitdepth =
8380 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8381 if (hevc->mem_saving_mode == 1)
8382 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8383 vf->duration_pulldown = 0;
8384 vf->pts = 0;
8385 vf->pts_us64 = 0;
8386 set_frame_info(hevc, vf);
8387
8388 vf->width = pic->width /
8389 get_double_write_ratio(hevc, pic->double_write_mode);
8390 vf->height = pic->height /
8391 get_double_write_ratio(hevc, pic->double_write_mode);
8392
8393 force_disp_pic_index |= 0x200;
8394 return vf;
8395 }
8396#endif
8397
8398 if (kfifo_get(&hevc->display_q, &vf)) {
8399 struct vframe_s *next_vf;
8400 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8401 hevc_print(hevc, 0,
8402 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8403 __func__, vf, vf->type, vf->index,
8404 get_pic_poc(hevc, vf->index & 0xff),
8405 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8406 vf->pts, vf->pts_us64,
8407 vf->duration);
8408#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8409 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8410 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8411 hevc_print(hevc, 0, "pic 0x%p aux size %d:\n",
8412 pic, pic->aux_data_size);
8413 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8414 int i;
8415 for (i = 0; i < pic->aux_data_size; i++) {
8416 hevc_print_cont(hevc, 0,
8417 "%02x ", pic->aux_data_buf[i]);
8418 if (((i + 1) & 0xf) == 0)
8419 hevc_print_cont(hevc, 0, "\n");
8420 }
8421 hevc_print_cont(hevc, 0, "\n");
8422 }
8423 }
8424#endif
8425 hevc->show_frame_num++;
8426 vf->index_disp = hevc->vf_get_count;
8427 hevc->vf_get_count++;
8428
8429 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8430 vf->next_vf_pts_valid = true;
8431 vf->next_vf_pts = next_vf->pts;
8432 } else
8433 vf->next_vf_pts_valid = false;
8434
8435 return vf;
8436 }
8437
8438 return NULL;
8439}
8440static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8441 int i;
8442 for (i = 0; i < VF_POOL_SIZE; i++) {
8443 if (vf == &hevc->vfpool[i] || vf == &hevc->vframe_dummy)
8444 return true;
8445 }
8446 hevc_print(hevc, 0," h265 invalid vf been put, vf = %p\n", vf);
8447 for (i = 0; i < VF_POOL_SIZE; i++) {
8448 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,"valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8449 }
8450 return false;
8451}
8452
8453static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8454{
8455 unsigned long flags;
8456#ifdef MULTI_INSTANCE_SUPPORT
8457 struct vdec_s *vdec = op_arg;
8458 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8459#else
8460 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8461#endif
8462 unsigned char index_top;
8463 unsigned char index_bot;
8464
8465 if (vf && (vf_valid_check(vf, hevc) == false))
8466 return;
8467 if (vf == (&hevc->vframe_dummy))
8468 return;
8469 if (!vf)
8470 return;
8471 index_top = vf->index & 0xff;
8472 index_bot = (vf->index >> 8) & 0xff;
8473 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8474 hevc_print(hevc, 0,
8475 "%s(type %d index 0x%x)\n",
8476 __func__, vf->type, vf->index);
8477 hevc->vf_put_count++;
8478 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8479 spin_lock_irqsave(&lock, flags);
8480
8481 if (vf->hdr10p_data_buf) {
8482 vfree(vf->hdr10p_data_buf);
8483 vf->hdr10p_data_buf = NULL;
8484 vf->hdr10p_data_size = 0;
8485 }
8486
8487 if (index_top != 0xff
8488 && index_top < MAX_REF_PIC_NUM
8489 && hevc->m_PIC[index_top]) {
8490 if (hevc->is_used_v4l)
8491 hevc->m_PIC[index_top]->vframe_bound = true;
8492 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8493 hevc->m_PIC[index_top]->vf_ref--;
8494
8495 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8496 hevc->m_PIC[index_top]->output_ready = 0;
8497
8498 if (hevc->wait_buf != 0)
8499 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8500 0x1);
8501 }
8502 }
8503 }
8504
8505 if (index_bot != 0xff
8506 && index_bot < MAX_REF_PIC_NUM
8507 && hevc->m_PIC[index_bot]) {
8508 if (hevc->is_used_v4l)
8509 hevc->m_PIC[index_bot]->vframe_bound = true;
8510 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8511 hevc->m_PIC[index_bot]->vf_ref--;
8512
8513 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8514 hevc->m_PIC[index_bot]->output_ready = 0;
8515 if (hevc->wait_buf != 0)
8516 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8517 0x1);
8518 }
8519 }
8520 }
8521 spin_unlock_irqrestore(&lock, flags);
8522}
8523
8524static int vh265_event_cb(int type, void *data, void *op_arg)
8525{
8526 unsigned long flags;
8527#ifdef MULTI_INSTANCE_SUPPORT
8528 struct vdec_s *vdec = op_arg;
8529 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8530#else
8531 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8532#endif
8533 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8534#if 0
8535 amhevc_stop();
8536#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8537 vf_light_unreg_provider(&vh265_vf_prov);
8538#endif
8539 spin_lock_irqsave(&hevc->lock, flags);
8540 vh265_local_init();
8541 vh265_prot_init();
8542 spin_unlock_irqrestore(&hevc->lock, flags);
8543#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8544 vf_reg_provider(&vh265_vf_prov);
8545#endif
8546 amhevc_start();
8547#endif
8548 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8549 struct provider_aux_req_s *req =
8550 (struct provider_aux_req_s *)data;
8551 unsigned char index;
8552
8553 if (!req->vf) {
8554 req->aux_size = hevc->vf_put_count;
8555 return 0;
8556 }
8557 spin_lock_irqsave(&lock, flags);
8558 index = req->vf->index & 0xff;
8559 req->aux_buf = NULL;
8560 req->aux_size = 0;
8561 if (req->bot_flag)
8562 index = (req->vf->index >> 8) & 0xff;
8563 if (index != 0xff
8564 && index < MAX_REF_PIC_NUM
8565 && hevc->m_PIC[index]) {
8566 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8567 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8568#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8569 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8570 req->dv_enhance_exist = false;
8571 else
8572 req->dv_enhance_exist =
8573 hevc->m_PIC[index]->dv_enhance_exist;
8574 if (vdec_frame_based(vdec) && (hevc->dv_duallayer == true))
8575 req->dv_enhance_exist = 1;
8576 hevc_print(hevc, H265_DEBUG_DV,
8577 "query dv_enhance_exist for (pic 0x%p, vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8578 hevc->m_PIC[index],
8579 req->vf,
8580 hevc->m_PIC[index]->POC, index,
8581 req->dv_enhance_exist, req->aux_size);
8582#else
8583 req->dv_enhance_exist = 0;
8584#endif
8585 }
8586 spin_unlock_irqrestore(&lock, flags);
8587
8588 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8589 hevc_print(hevc, 0,
8590 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8591 __func__, type, index, req->aux_size);
8592#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8593 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8594 if ((force_bypass_dvenl & 0x80000000) == 0) {
8595 hevc_print(hevc, 0,
8596 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8597 __func__);
8598 hevc->bypass_dvenl_enable = 1;
8599 }
8600
8601#endif
8602 }
8603 return 0;
8604}
8605
8606#ifdef HEVC_PIC_STRUCT_SUPPORT
8607static int process_pending_vframe(struct hevc_state_s *hevc,
8608 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8609{
8610 struct vframe_s *vf;
8611
8612 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8613 hevc_print(hevc, 0,
8614 "%s: pair_pic index 0x%x %s\n",
8615 __func__, pair_pic->index,
8616 pair_frame_top_flag ?
8617 "top" : "bot");
8618
8619 if (kfifo_len(&hevc->pending_q) > 1) {
8620 unsigned long flags;
8621 /* do not pending more than 1 frame */
8622 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8623 hevc_print(hevc, 0,
8624 "fatal error, no available buffer slot.");
8625 return -1;
8626 }
8627 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8628 hevc_print(hevc, 0,
8629 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8630 __func__, vf->index);
8631 if ((hevc->double_write_mode == 3) &&
8632 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8633 vf->type |= VIDTYPE_COMPRESS;
8634 if (hevc->mmu_enable)
8635 vf->type |= VIDTYPE_SCATTER;
8636 }
8637 hevc->vf_pre_count++;
8638 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8639 spin_lock_irqsave(&lock, flags);
8640 vf->index &= 0xff;
8641 hevc->m_PIC[vf->index]->vf_ref = 0;
8642 hevc->m_PIC[vf->index]->output_ready = 0;
8643 if (hevc->wait_buf != 0)
8644 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8645 0x1);
8646 spin_unlock_irqrestore(&lock, flags);
8647
8648 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8649 }
8650
8651 if (kfifo_peek(&hevc->pending_q, &vf)) {
8652 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8653 /*
8654 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8655 *do not use it
8656 */
8657 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8658 hevc_print(hevc, 0,
8659 "fatal error, no available buffer slot.");
8660 return -1;
8661 }
8662 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8663 hevc_print(hevc, 0,
8664 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8665 __func__, vf->index);
8666 if (vf) {
8667 if ((hevc->double_write_mode == 3) &&
8668 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8669 vf->type |= VIDTYPE_COMPRESS;
8670 if (hevc->mmu_enable)
8671 vf->type |= VIDTYPE_SCATTER;
8672 }
8673 hevc->vf_pre_count++;
8674 kfifo_put(&hevc->display_q,
8675 (const struct vframe_s *)vf);
8676 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8677 }
8678 } else if ((!pair_frame_top_flag) &&
8679 (((vf->index >> 8) & 0xff) == 0xff)) {
8680 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8681 hevc_print(hevc, 0,
8682 "fatal error, no available buffer slot.");
8683 return -1;
8684 }
8685 if (vf) {
8686 if ((hevc->double_write_mode == 3) &&
8687 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8688 vf->type |= VIDTYPE_COMPRESS;
8689 if (hevc->mmu_enable)
8690 vf->type |= VIDTYPE_SCATTER;
8691 }
8692 vf->index &= 0xff;
8693 vf->index |= (pair_pic->index << 8);
8694 vf->canvas1Addr = spec2canvas(pair_pic);
8695 pair_pic->vf_ref++;
8696 kfifo_put(&hevc->display_q,
8697 (const struct vframe_s *)vf);
8698 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8699 hevc->vf_pre_count++;
8700 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8701 hevc_print(hevc, 0,
8702 "%s vf => display_q: (index 0x%x)\n",
8703 __func__, vf->index);
8704 }
8705 } else if (pair_frame_top_flag &&
8706 ((vf->index & 0xff) == 0xff)) {
8707 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8708 hevc_print(hevc, 0,
8709 "fatal error, no available buffer slot.");
8710 return -1;
8711 }
8712 if (vf) {
8713 if ((hevc->double_write_mode == 3) &&
8714 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8715 vf->type |= VIDTYPE_COMPRESS;
8716 if (hevc->mmu_enable)
8717 vf->type |= VIDTYPE_SCATTER;
8718 }
8719 vf->index &= 0xff00;
8720 vf->index |= pair_pic->index;
8721 vf->canvas0Addr = spec2canvas(pair_pic);
8722 pair_pic->vf_ref++;
8723 kfifo_put(&hevc->display_q,
8724 (const struct vframe_s *)vf);
8725 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8726 hevc->vf_pre_count++;
8727 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8728 hevc_print(hevc, 0,
8729 "%s vf => display_q: (index 0x%x)\n",
8730 __func__, vf->index);
8731 }
8732 }
8733 }
8734 return 0;
8735}
8736#endif
8737static void update_vf_memhandle(struct hevc_state_s *hevc,
8738 struct vframe_s *vf, struct PIC_s *pic)
8739{
8740 if (pic->index < 0) {
8741 vf->mem_handle = NULL;
8742 vf->mem_head_handle = NULL;
8743 } else if (vf->type & VIDTYPE_SCATTER) {
8744 vf->mem_handle =
8745 decoder_mmu_box_get_mem_handle(
8746 hevc->mmu_box, pic->index);
8747 vf->mem_head_handle =
8748 decoder_bmmu_box_get_mem_handle(
8749 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8750 } else {
8751 vf->mem_handle =
8752 decoder_bmmu_box_get_mem_handle(
8753 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8754 vf->mem_head_handle = NULL;
8755 /*vf->mem_head_handle =
8756 decoder_bmmu_box_get_mem_handle(
8757 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8758 }
8759 return;
8760}
8761
8762static void fill_frame_info(struct hevc_state_s *hevc,
8763 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8764{
8765 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8766 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8767 vframe_qos->type = 4;
8768 else if (pic->slice_type == I_SLICE)
8769 vframe_qos->type = 1;
8770 else if (pic->slice_type == P_SLICE)
8771 vframe_qos->type = 2;
8772 else if (pic->slice_type == B_SLICE)
8773 vframe_qos->type = 3;
8774/*
8775#define SHOW_QOS_INFO
8776*/
8777 if (input_frame_based(hw_to_vdec(hevc)))
8778 vframe_qos->size = pic->frame_size;
8779 else
8780 vframe_qos->size = framesize;
8781 vframe_qos->pts = pts;
8782#ifdef SHOW_QOS_INFO
8783 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8784#endif
8785
8786
8787 vframe_qos->max_mv = pic->max_mv;
8788 vframe_qos->avg_mv = pic->avg_mv;
8789 vframe_qos->min_mv = pic->min_mv;
8790#ifdef SHOW_QOS_INFO
8791 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8792 vframe_qos->max_mv,
8793 vframe_qos->avg_mv,
8794 vframe_qos->min_mv);
8795#endif
8796
8797 vframe_qos->max_qp = pic->max_qp;
8798 vframe_qos->avg_qp = pic->avg_qp;
8799 vframe_qos->min_qp = pic->min_qp;
8800#ifdef SHOW_QOS_INFO
8801 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8802 vframe_qos->max_qp,
8803 vframe_qos->avg_qp,
8804 vframe_qos->min_qp);
8805#endif
8806
8807 vframe_qos->max_skip = pic->max_skip;
8808 vframe_qos->avg_skip = pic->avg_skip;
8809 vframe_qos->min_skip = pic->min_skip;
8810#ifdef SHOW_QOS_INFO
8811 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8812 vframe_qos->max_skip,
8813 vframe_qos->avg_skip,
8814 vframe_qos->min_skip);
8815#endif
8816
8817 vframe_qos->num++;
8818
8819}
8820
8821static inline void hevc_update_gvs(struct hevc_state_s *hevc)
8822{
8823 if (hevc->gvs->frame_height != hevc->frame_height) {
8824 hevc->gvs->frame_width = hevc->frame_width;
8825 hevc->gvs->frame_height = hevc->frame_height;
8826 }
8827 if (hevc->gvs->frame_dur != hevc->frame_dur) {
8828 hevc->gvs->frame_dur = hevc->frame_dur;
8829 if (hevc->frame_dur != 0)
8830 hevc->gvs->frame_rate = 96000 / hevc->frame_dur;
8831 else
8832 hevc->gvs->frame_rate = -1;
8833 }
8834 hevc->gvs->error_count = hevc->gvs->error_frame_count;
8835 hevc->gvs->status = hevc->stat | hevc->fatal_error;
8836 if (hevc->gvs->ratio_control != hevc->ratio_control)
8837 hevc->gvs->ratio_control = hevc->ratio_control;
8838}
8839
8840static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8841{
8842 struct vdec_s *vdec = hw_to_vdec(hevc);
8843 struct vframe_s *vf = NULL;
8844 int stream_offset = pic->stream_offset;
8845 unsigned short slice_type = pic->slice_type;
8846 ulong nv_order = VIDTYPE_VIU_NV21;
8847 u32 frame_size = 0;
8848 struct vdec_info tmp4x;
8849 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
8850
8851 /* swap uv */
8852 if (hevc->is_used_v4l) {
8853 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12) ||
8854 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12M))
8855 nv_order = VIDTYPE_VIU_NV12;
8856 }
8857
8858 if (force_disp_pic_index & 0x100) {
8859 /*recycle directly*/
8860 pic->output_ready = 0;
8861 return -1;
8862 }
8863 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8864 hevc_print(hevc, 0,
8865 "fatal error, no available buffer slot.");
8866 return -1;
8867 }
8868 display_frame_count[hevc->index]++;
8869 if (vf) {
8870 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8871 "%s: pic index 0x%x\n",
8872 __func__, pic->index);*/
8873
8874 if (hevc->is_used_v4l) {
8875 vf->v4l_mem_handle
8876 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8877 if (hevc->mmu_enable) {
8878 vf->mm_box.bmmu_box = hevc->bmmu_box;
8879 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8880 vf->mm_box.mmu_box = hevc->mmu_box;
8881 vf->mm_box.mmu_idx = pic->index;
8882 }
8883 }
8884
8885#ifdef MULTI_INSTANCE_SUPPORT
8886 if (vdec_frame_based(vdec)) {
8887 vf->pts = pic->pts;
8888 vf->pts_us64 = pic->pts64;
8889 vf->timestamp = pic->timestamp;
8890 }
8891 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8892 stream_offset, &vf->pts, 0) != 0) { */
8893#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8894 else if (vdec->master == NULL) {
8895#else
8896 else {
8897#endif
8898#endif
8899 if (!vdec_dual(vdec) && pic->stream_frame_size > 50 &&
8900 (hevc->min_pic_size > pic->stream_frame_size ||
8901 (hevc->min_pic_size == 0))) {
8902 hevc->min_pic_size = pic->stream_frame_size;
8903
8904 if (hevc->min_pic_size < 1024 &&
8905 ((hevc->pts_lookup_margin > hevc->min_pic_size)
8906 || (hevc->pts_lookup_margin == 0)))
8907 hevc->pts_lookup_margin = hevc->min_pic_size;
8908 }
8909
8910 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8911 "call pts_lookup_offset_us64(0x%x)\n",
8912 stream_offset);
8913 if (pts_lookup_offset_us64
8914 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8915 &frame_size, hevc->pts_lookup_margin,
8916 &vf->pts_us64) != 0) {
8917#ifdef DEBUG_PTS
8918 hevc->pts_missed++;
8919#endif
8920 vf->pts = 0;
8921 vf->pts_us64 = 0;
8922 hevc->pts_continue_miss++;
8923 } else {
8924 hevc->pts_continue_miss = 0;
8925#ifdef DEBUG_PTS
8926 hevc->pts_hit++;
8927#endif
8928 }
8929#ifdef MULTI_INSTANCE_SUPPORT
8930#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8931 } else {
8932 vf->pts = 0;
8933 vf->pts_us64 = 0;
8934 }
8935#else
8936 }
8937#endif
8938#endif
8939 if (!vdec_dual(vdec) &&
8940 vdec_stream_based(vdec) && (vf->duration > 0)) {
8941 if ((vf->pts != 0) && (hevc->last_pts != 0)) {
8942 int diff = vf->pts - hevc->last_pts;
8943 if (diff > ((hevc->pts_continue_miss + 2)
8944 * DUR2PTS(vf->duration))) {
8945 vf->pts = 0;
8946 vf->pts_us64 = 0;
8947 }
8948 }
8949 }
8950
8951 if (pts_unstable && (hevc->frame_dur > 0))
8952 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8953
8954 fill_frame_info(hevc, pic, frame_size, vf->pts);
8955
8956 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8957 && hevc->get_frame_dur) {
8958 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8959
8960 if (pts_diff < 0) {
8961 hevc->pts_mode_switching_count++;
8962 hevc->pts_mode_recovery_count = 0;
8963
8964 if (hevc->pts_mode_switching_count >=
8965 PTS_MODE_SWITCHING_THRESHOLD) {
8966 hevc->pts_mode =
8967 PTS_NONE_REF_USE_DURATION;
8968 hevc_print(hevc, 0,
8969 "HEVC: switch to n_d mode.\n");
8970 }
8971
8972 } else {
8973 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8974
8975 hevc->pts_mode_recovery_count++;
8976 if (hevc->pts_mode_recovery_count > p) {
8977 hevc->pts_mode_switching_count = 0;
8978 hevc->pts_mode_recovery_count = 0;
8979 }
8980 }
8981 }
8982
8983 if (vf->pts != 0)
8984 hevc->last_lookup_pts = vf->pts;
8985
8986 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8987 && (slice_type != 2))
8988 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8989 hevc->last_pts = vf->pts;
8990
8991 if (vf->pts_us64 != 0)
8992 hevc->last_lookup_pts_us64 = vf->pts_us64;
8993
8994 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8995 && (slice_type != 2)) {
8996 vf->pts_us64 =
8997 hevc->last_pts_us64 +
8998 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8999 }
9000 hevc->last_pts_us64 = vf->pts_us64;
9001 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
9002 hevc_print(hevc, 0,
9003 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
9004 vf->pts, vf->pts_us64);
9005 }
9006
9007 /*
9008 *vf->index:
9009 *(1) vf->type is VIDTYPE_PROGRESSIVE
9010 * and vf->canvas0Addr != vf->canvas1Addr,
9011 * vf->index[7:0] is the index of top pic
9012 * vf->index[15:8] is the index of bot pic
9013 *(2) other cases,
9014 * only vf->index[7:0] is used
9015 * vf->index[15:8] == 0xff
9016 */
9017 vf->index = 0xff00 | pic->index;
9018#if 1
9019/*SUPPORT_10BIT*/
9020 if (pic->double_write_mode & 0x10) {
9021 /* double write only */
9022 vf->compBodyAddr = 0;
9023 vf->compHeadAddr = 0;
9024 } else {
9025
9026 if (hevc->mmu_enable) {
9027 vf->compBodyAddr = 0;
9028 vf->compHeadAddr = pic->header_adr;
9029 } else {
9030 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
9031 vf->compHeadAddr = pic->mc_y_adr +
9032 pic->losless_comp_body_size;
9033 vf->mem_head_handle = NULL;
9034 }
9035
9036 /*head adr*/
9037 vf->canvas0Addr = vf->canvas1Addr = 0;
9038 }
9039 if (pic->double_write_mode) {
9040 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
9041 vf->type |= nv_order;
9042
9043 if ((pic->double_write_mode == 3) &&
9044 (!(IS_8K_SIZE(pic->width, pic->height)))) {
9045 vf->type |= VIDTYPE_COMPRESS;
9046 if (hevc->mmu_enable)
9047 vf->type |= VIDTYPE_SCATTER;
9048 }
9049#ifdef MULTI_INSTANCE_SUPPORT
9050 if (hevc->m_ins_flag &&
9051 (get_dbg_flag(hevc)
9052 & H265_CFG_CANVAS_IN_DECODE) == 0) {
9053 vf->canvas0Addr = vf->canvas1Addr = -1;
9054 vf->plane_num = 2;
9055 vf->canvas0_config[0] =
9056 pic->canvas_config[0];
9057 vf->canvas0_config[1] =
9058 pic->canvas_config[1];
9059
9060 vf->canvas1_config[0] =
9061 pic->canvas_config[0];
9062 vf->canvas1_config[1] =
9063 pic->canvas_config[1];
9064
9065 } else
9066#endif
9067 vf->canvas0Addr = vf->canvas1Addr
9068 = spec2canvas(pic);
9069 } else {
9070 vf->canvas0Addr = vf->canvas1Addr = 0;
9071 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
9072 if (hevc->mmu_enable)
9073 vf->type |= VIDTYPE_SCATTER;
9074 }
9075 vf->compWidth = pic->width;
9076 vf->compHeight = pic->height;
9077 update_vf_memhandle(hevc, vf, pic);
9078 switch (pic->bit_depth_luma) {
9079 case 9:
9080 vf->bitdepth = BITDEPTH_Y9;
9081 break;
9082 case 10:
9083 vf->bitdepth = BITDEPTH_Y10;
9084 break;
9085 default:
9086 vf->bitdepth = BITDEPTH_Y8;
9087 break;
9088 }
9089 switch (pic->bit_depth_chroma) {
9090 case 9:
9091 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
9092 break;
9093 case 10:
9094 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
9095 break;
9096 default:
9097 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
9098 break;
9099 }
9100 if ((vf->type & VIDTYPE_COMPRESS) == 0)
9101 vf->bitdepth =
9102 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
9103 if (pic->mem_saving_mode == 1)
9104 vf->bitdepth |= BITDEPTH_SAVING_MODE;
9105#else
9106 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
9107 vf->type |= nv_order;
9108 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
9109#endif
9110 set_frame_info(hevc, vf, pic);
9111 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
9112 /* hevc_print(hevc, 0,
9113 "aaa: %d/%d, %d/%d\n",
9114 vf->width,vf->height, pic->width, pic->height); */
9115 vf->width = pic->width;
9116 vf->height = pic->height;
9117
9118 if (force_w_h != 0) {
9119 vf->width = (force_w_h >> 16) & 0xffff;
9120 vf->height = force_w_h & 0xffff;
9121 }
9122 if (force_fps & 0x100) {
9123 u32 rate = force_fps & 0xff;
9124
9125 if (rate)
9126 vf->duration = 96000/rate;
9127 else
9128 vf->duration = 0;
9129 }
9130 if (force_fps & 0x200) {
9131 vf->pts = 0;
9132 vf->pts_us64 = 0;
9133 }
9134 /*
9135 * !!! to do ...
9136 * need move below code to get_new_pic(),
9137 * hevc->xxx can only be used by current decoded pic
9138 */
9139 if (pic->conformance_window_flag &&
9140 (get_dbg_flag(hevc) &
9141 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
9142 unsigned int SubWidthC, SubHeightC;
9143
9144 switch (pic->chroma_format_idc) {
9145 case 1:
9146 SubWidthC = 2;
9147 SubHeightC = 2;
9148 break;
9149 case 2:
9150 SubWidthC = 2;
9151 SubHeightC = 1;
9152 break;
9153 default:
9154 SubWidthC = 1;
9155 SubHeightC = 1;
9156 break;
9157 }
9158 vf->width -= SubWidthC *
9159 (pic->conf_win_left_offset +
9160 pic->conf_win_right_offset);
9161 vf->height -= SubHeightC *
9162 (pic->conf_win_top_offset +
9163 pic->conf_win_bottom_offset);
9164
9165 vf->compWidth -= SubWidthC *
9166 (pic->conf_win_left_offset +
9167 pic->conf_win_right_offset);
9168 vf->compHeight -= SubHeightC *
9169 (pic->conf_win_top_offset +
9170 pic->conf_win_bottom_offset);
9171
9172 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
9173 hevc_print(hevc, 0,
9174 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
9175 pic->chroma_format_idc,
9176 pic->conf_win_left_offset,
9177 pic->conf_win_right_offset,
9178 pic->conf_win_top_offset,
9179 pic->conf_win_bottom_offset,
9180 vf->width, vf->height, vf->compWidth, vf->compHeight);
9181 }
9182
9183 vf->width = vf->width /
9184 get_double_write_ratio(hevc, pic->double_write_mode);
9185 vf->height = vf->height /
9186 get_double_write_ratio(hevc, pic->double_write_mode);
9187#ifdef HEVC_PIC_STRUCT_SUPPORT
9188 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
9189 struct vframe_s *vf2;
9190
9191 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9192 hevc_print(hevc, 0,
9193 "pic_struct = %d index 0x%x\n",
9194 pic->pic_struct,
9195 pic->index);
9196
9197 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9198 hevc_print(hevc, 0,
9199 "fatal error, no available buffer slot.");
9200 return -1;
9201 }
9202 pic->vf_ref = 2;
9203 vf->duration = vf->duration>>1;
9204 memcpy(vf2, vf, sizeof(struct vframe_s));
9205
9206 if (pic->pic_struct == 3) {
9207 vf->type = VIDTYPE_INTERLACE_TOP
9208 | nv_order;
9209 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9210 | nv_order;
9211 } else {
9212 vf->type = VIDTYPE_INTERLACE_BOTTOM
9213 | nv_order;
9214 vf2->type = VIDTYPE_INTERLACE_TOP
9215 | nv_order;
9216 }
9217 hevc->vf_pre_count++;
9218 decoder_do_frame_check(vdec, vf);
9219 kfifo_put(&hevc->display_q,
9220 (const struct vframe_s *)vf);
9221 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9222 hevc->vf_pre_count++;
9223 kfifo_put(&hevc->display_q,
9224 (const struct vframe_s *)vf2);
9225 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9226 } else if (pic->pic_struct == 5
9227 || pic->pic_struct == 6) {
9228 struct vframe_s *vf2, *vf3;
9229
9230 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9231 hevc_print(hevc, 0,
9232 "pic_struct = %d index 0x%x\n",
9233 pic->pic_struct,
9234 pic->index);
9235
9236 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9237 hevc_print(hevc, 0,
9238 "fatal error, no available buffer slot.");
9239 return -1;
9240 }
9241 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
9242 hevc_print(hevc, 0,
9243 "fatal error, no available buffer slot.");
9244 return -1;
9245 }
9246 pic->vf_ref = 3;
9247 vf->duration = vf->duration/3;
9248 memcpy(vf2, vf, sizeof(struct vframe_s));
9249 memcpy(vf3, vf, sizeof(struct vframe_s));
9250
9251 if (pic->pic_struct == 5) {
9252 vf->type = VIDTYPE_INTERLACE_TOP
9253 | nv_order;
9254 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9255 | nv_order;
9256 vf3->type = VIDTYPE_INTERLACE_TOP
9257 | nv_order;
9258 } else {
9259 vf->type = VIDTYPE_INTERLACE_BOTTOM
9260 | nv_order;
9261 vf2->type = VIDTYPE_INTERLACE_TOP
9262 | nv_order;
9263 vf3->type = VIDTYPE_INTERLACE_BOTTOM
9264 | nv_order;
9265 }
9266 hevc->vf_pre_count++;
9267 decoder_do_frame_check(vdec, vf);
9268 kfifo_put(&hevc->display_q,
9269 (const struct vframe_s *)vf);
9270 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9271 hevc->vf_pre_count++;
9272 kfifo_put(&hevc->display_q,
9273 (const struct vframe_s *)vf2);
9274 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9275 hevc->vf_pre_count++;
9276 kfifo_put(&hevc->display_q,
9277 (const struct vframe_s *)vf3);
9278 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
9279
9280 } else if (pic->pic_struct == 9
9281 || pic->pic_struct == 10) {
9282 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9283 hevc_print(hevc, 0,
9284 "pic_struct = %d index 0x%x\n",
9285 pic->pic_struct,
9286 pic->index);
9287
9288 pic->vf_ref = 1;
9289 /* process previous pending vf*/
9290 process_pending_vframe(hevc,
9291 pic, (pic->pic_struct == 9));
9292
9293 decoder_do_frame_check(vdec, vf);
9294 /* process current vf */
9295 kfifo_put(&hevc->pending_q,
9296 (const struct vframe_s *)vf);
9297 vf->height <<= 1;
9298 if (pic->pic_struct == 9) {
9299 vf->type = VIDTYPE_INTERLACE_TOP
9300 | nv_order | VIDTYPE_VIU_FIELD;
9301 process_pending_vframe(hevc,
9302 hevc->pre_bot_pic, 0);
9303 } else {
9304 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9305 nv_order | VIDTYPE_VIU_FIELD;
9306 vf->index = (pic->index << 8) | 0xff;
9307 process_pending_vframe(hevc,
9308 hevc->pre_top_pic, 1);
9309 }
9310
9311 if (hevc->vf_pre_count == 0)
9312 hevc->vf_pre_count++;
9313
9314 /**/
9315 if (pic->pic_struct == 9)
9316 hevc->pre_top_pic = pic;
9317 else
9318 hevc->pre_bot_pic = pic;
9319
9320 } else if (pic->pic_struct == 11
9321 || pic->pic_struct == 12) {
9322 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9323 hevc_print(hevc, 0,
9324 "pic_struct = %d index 0x%x\n",
9325 pic->pic_struct,
9326 pic->index);
9327 pic->vf_ref = 1;
9328 /* process previous pending vf*/
9329 process_pending_vframe(hevc, pic,
9330 (pic->pic_struct == 11));
9331
9332 /* put current into pending q */
9333 vf->height <<= 1;
9334 if (pic->pic_struct == 11)
9335 vf->type = VIDTYPE_INTERLACE_TOP |
9336 nv_order | VIDTYPE_VIU_FIELD;
9337 else {
9338 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9339 nv_order | VIDTYPE_VIU_FIELD;
9340 vf->index = (pic->index << 8) | 0xff;
9341 }
9342 decoder_do_frame_check(vdec, vf);
9343 kfifo_put(&hevc->pending_q,
9344 (const struct vframe_s *)vf);
9345 if (hevc->vf_pre_count == 0)
9346 hevc->vf_pre_count++;
9347
9348 /**/
9349 if (pic->pic_struct == 11)
9350 hevc->pre_top_pic = pic;
9351 else
9352 hevc->pre_bot_pic = pic;
9353
9354 } else {
9355 pic->vf_ref = 1;
9356
9357 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9358 hevc_print(hevc, 0,
9359 "pic_struct = %d index 0x%x\n",
9360 pic->pic_struct,
9361 pic->index);
9362
9363 switch (pic->pic_struct) {
9364 case 7:
9365 vf->duration <<= 1;
9366 break;
9367 case 8:
9368 vf->duration = vf->duration * 3;
9369 break;
9370 case 1:
9371 vf->height <<= 1;
9372 vf->type = VIDTYPE_INTERLACE_TOP |
9373 nv_order | VIDTYPE_VIU_FIELD;
9374 process_pending_vframe(hevc, pic, 1);
9375 hevc->pre_top_pic = pic;
9376 break;
9377 case 2:
9378 vf->height <<= 1;
9379 vf->type = VIDTYPE_INTERLACE_BOTTOM
9380 | nv_order
9381 | VIDTYPE_VIU_FIELD;
9382 process_pending_vframe(hevc, pic, 0);
9383 hevc->pre_bot_pic = pic;
9384 break;
9385 }
9386 hevc->vf_pre_count++;
9387 decoder_do_frame_check(vdec, vf);
9388 kfifo_put(&hevc->display_q,
9389 (const struct vframe_s *)vf);
9390 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9391 }
9392#else
9393 vf->type_original = vf->type;
9394 pic->vf_ref = 1;
9395 hevc->vf_pre_count++;
9396 decoder_do_frame_check(vdec, vf);
9397 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9398 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9399#endif
9400 /*count info*/
9401 vdec_count_info(hevc->gvs, 0, stream_offset);
9402 hevc_update_gvs(hevc);
9403 memcpy(&tmp4x, hevc->gvs, sizeof(struct vdec_info));
9404 tmp4x.bit_depth_luma = hevc->bit_depth_luma;
9405 tmp4x.bit_depth_chroma = hevc->bit_depth_chroma;
9406 tmp4x.double_write_mode = get_double_write_mode(hevc);
9407 vdec_fill_vdec_frame(vdec, &hevc->vframe_qos, &tmp4x, vf, pic->hw_decode_time);
9408 vdec->vdec_fps_detec(vdec->id);
9409 hevc_print(hevc, H265_DEBUG_BUFMGR,
9410 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9411 __func__, vf->type, vf->index,
9412 get_pic_poc(hevc, vf->index & 0xff),
9413 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9414 vf->pts, vf->pts_us64,
9415 vf->duration);
9416
9417 /*if (pic->vf_ref == hevc->vf_pre_count) {*/
9418 if (hevc->kpi_first_i_decoded == 0) {
9419 hevc->kpi_first_i_decoded = 1;
9420 pr_debug("[vdec_kpi][%s] First I frame decoded.\n",
9421 __func__);
9422 }
9423
9424 if (without_display_mode == 0) {
9425 vf_notify_receiver(hevc->provider_name,
9426 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9427 }
9428 else
9429 vh265_vf_put(vh265_vf_get(vdec), vdec);
9430 }
9431
9432 return 0;
9433}
9434
9435static int notify_v4l_eos(struct vdec_s *vdec)
9436{
9437 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9438 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9439 struct vframe_s *vf = &hw->vframe_dummy;
9440 struct vdec_v4l2_buffer *fb = NULL;
9441 int index = INVALID_IDX;
9442 ulong expires;
9443
9444 if (hw->eos) {
9445 if (hw->is_used_v4l) {
9446 expires = jiffies + msecs_to_jiffies(2000);
9447 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9448 if (time_after(jiffies, expires) ||
9449 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx))
9450 break;
9451 }
9452
9453 if (index == INVALID_IDX) {
9454 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9455 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9456 return -1;
9457 }
9458 }
9459 }
9460
9461 vf->type |= VIDTYPE_V4L_EOS;
9462 vf->timestamp = ULONG_MAX;
9463 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9464 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9465 hw->m_BUF[index].v4l_ref_buf_addr;
9466 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9467 vf_notify_receiver(vdec->vf_provider_name,
9468 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9469
9470 pr_info("[%d] H265 EOS notify.\n", (hw->is_used_v4l)?ctx->id:vdec->id);
9471 }
9472
9473 return 0;
9474}
9475
9476static void process_nal_sei(struct hevc_state_s *hevc,
9477 int payload_type, int payload_size)
9478{
9479 unsigned short data;
9480
9481 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9482 hevc_print(hevc, 0,
9483 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9484 payload_type, payload_size);
9485
9486 if (payload_type == 137) {
9487 int i, j;
9488 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9489 if (payload_size >= 24) {
9490 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9491 hevc_print(hevc, 0,
9492 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9493 for (i = 0; i < 3; i++) {
9494 for (j = 0; j < 2; j++) {
9495 data =
9496 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9497 hevc->primaries[i][j] = data;
9498 WRITE_HREG(HEVC_SHIFT_COMMAND,
9499 (1<<7)|16);
9500 if (get_dbg_flag(hevc) &
9501 H265_DEBUG_PRINT_SEI)
9502 hevc_print(hevc, 0,
9503 "\t\tprimaries[%1d][%1d] = %04x\n",
9504 i, j, hevc->primaries[i][j]);
9505 }
9506 }
9507 for (i = 0; i < 2; i++) {
9508 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9509 hevc->white_point[i] = data;
9510 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9511 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9512 hevc_print(hevc, 0,
9513 "\t\twhite_point[%1d] = %04x\n",
9514 i, hevc->white_point[i]);
9515 }
9516 for (i = 0; i < 2; i++) {
9517 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9518 hevc->luminance[i] = data << 16;
9519 WRITE_HREG(HEVC_SHIFT_COMMAND,
9520 (1<<7)|16);
9521 data =
9522 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9523 hevc->luminance[i] |= data;
9524 WRITE_HREG(HEVC_SHIFT_COMMAND,
9525 (1<<7)|16);
9526 if (get_dbg_flag(hevc) &
9527 H265_DEBUG_PRINT_SEI)
9528 hevc_print(hevc, 0,
9529 "\t\tluminance[%1d] = %08x\n",
9530 i, hevc->luminance[i]);
9531 }
9532 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9533 }
9534 payload_size -= 24;
9535 while (payload_size > 0) {
9536 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9537 payload_size--;
9538 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9539 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9540 }
9541 }
9542}
9543
9544static int hevc_recover(struct hevc_state_s *hevc)
9545{
9546 int ret = -1;
9547 u32 rem;
9548 u64 shift_byte_count64;
9549 unsigned int hevc_shift_byte_count;
9550 unsigned int hevc_stream_start_addr;
9551 unsigned int hevc_stream_end_addr;
9552 unsigned int hevc_stream_rd_ptr;
9553 unsigned int hevc_stream_wr_ptr;
9554 unsigned int hevc_stream_control;
9555 unsigned int hevc_stream_fifo_ctl;
9556 unsigned int hevc_stream_buf_size;
9557 struct vdec_s *vdec = hw_to_vdec(hevc);
9558
9559 mutex_lock(&vh265_mutex);
9560#if 0
9561 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9562 int ii;
9563
9564 for (ii = 0; ii < 4; ii++)
9565 hevc_print(hevc, 0,
9566 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9567 if (((i + ii) & 0xf) == 0)
9568 hevc_print(hevc, 0, "\n");
9569 }
9570#endif
9571#define ES_VID_MAN_RD_PTR (1<<0)
9572 if (!hevc->init_flag) {
9573 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9574 mutex_unlock(&vh265_mutex);
9575 return ret;
9576 }
9577 amhevc_stop();
9578 msleep(20);
9579 ret = 0;
9580 /* reset */
9581 if (vdec_stream_based(vdec)) {
9582 STBUF_WRITE(&vdec->vbuf, set_rp,
9583 READ_VREG(HEVC_STREAM_RD_PTR));
9584
9585 if (!vdec->vbuf.no_parser)
9586 SET_PARSER_REG_MASK(PARSER_ES_CONTROL,
9587 ES_VID_MAN_RD_PTR);
9588 }
9589
9590 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9591 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9592 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9593 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9594 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9595 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9596 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9597
9598 /* HEVC streaming buffer will reset and restart
9599 * from current hevc_stream_rd_ptr position
9600 */
9601 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9602 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9603 if ((hevc->shift_byte_count_lo & (1 << 31))
9604 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9605 hevc->shift_byte_count_hi++;
9606
9607 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9608 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9609 hevc->shift_byte_count_lo;
9610 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9611 shift_byte_count64 -= rem;
9612 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9613
9614 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9615 shift_byte_count64 += hevc_stream_buf_size;
9616
9617 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9618 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9619
9620 WRITE_VREG(DOS_SW_RESET3,
9621 /* (1<<2)| */
9622 (1 << 3) | (1 << 4) | (1 << 8) |
9623 (1 << 11) | (1 << 12) | (1 << 14)
9624 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9625 WRITE_VREG(DOS_SW_RESET3, 0);
9626
9627 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9628 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9629 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9630 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9631 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9632 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9633 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9634
9635 hevc_config_work_space_hw(hevc);
9636 decoder_hw_reset();
9637
9638 hevc->have_vps = 0;
9639 hevc->have_sps = 0;
9640 hevc->have_pps = 0;
9641
9642 hevc->have_valid_start_slice = 0;
9643
9644 if (get_double_write_mode(hevc) & 0x10)
9645 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9646 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9647 );
9648
9649 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9650 /* clear mailbox interrupt */
9651 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9652 /* enable mailbox interrupt */
9653 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9654 /* disable PSCALE for hardware sharing */
9655 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9656
9657 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9658
9659 WRITE_VREG(DEBUG_REG1, 0x0);
9660
9661 if ((error_handle_policy & 1) == 0) {
9662 if ((error_handle_policy & 4) == 0) {
9663 /* ucode auto mode, and do not check vps/sps/pps/idr */
9664 WRITE_VREG(NAL_SEARCH_CTL,
9665 0xc);
9666 } else {
9667 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9668 }
9669 } else {
9670 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9671 }
9672
9673 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9674 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9675 WRITE_VREG(NAL_SEARCH_CTL,
9676 READ_VREG(NAL_SEARCH_CTL)
9677 | ((parser_sei_enable & 0x7) << 17));
9678/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
9679 WRITE_VREG(NAL_SEARCH_CTL,
9680 READ_VREG(NAL_SEARCH_CTL) |
9681 ((parser_dolby_vision_enable & 0x1) << 20));
9682/*#endif*/
9683 config_decode_mode(hevc);
9684 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9685
9686 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9687 /* amhevc_disable(); */
9688 /* return -EBUSY; */
9689 /* } */
9690#if 0
9691 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9692 int ii;
9693
9694 for (ii = 0; ii < 4; ii++) {
9695 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9696 hevc_print(hevc, 0,
9697 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9698 }
9699 if (((i + ii) & 0xf) == 0)
9700 hevc_print(hevc, 0, "\n");
9701 }
9702#endif
9703 init_pic_list_hw(hevc);
9704
9705 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9706 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9707
9708#ifdef SWAP_HEVC_UCODE
9709 if (!tee_enabled() && hevc->is_swap &&
9710 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9711 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9712 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9713 }
9714#endif
9715 amhevc_start();
9716
9717 /* skip, search next start code */
9718 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9719 hevc->skip_flag = 1;
9720#ifdef ERROR_HANDLE_DEBUG
9721 if (dbg_nal_skip_count & 0x20000) {
9722 dbg_nal_skip_count &= ~0x20000;
9723 mutex_unlock(&vh265_mutex);
9724 return ret;
9725 }
9726#endif
9727 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9728 /* Interrupt Amrisc to excute */
9729 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9730#ifdef MULTI_INSTANCE_SUPPORT
9731 if (!hevc->m_ins_flag)
9732#endif
9733 hevc->first_pic_after_recover = 1;
9734 mutex_unlock(&vh265_mutex);
9735 return ret;
9736}
9737
9738static void dump_aux_buf(struct hevc_state_s *hevc)
9739{
9740 int i;
9741 unsigned short *aux_adr =
9742 (unsigned short *)
9743 hevc->aux_addr;
9744 unsigned int aux_size =
9745 (READ_VREG(HEVC_AUX_DATA_SIZE)
9746 >> 16) << 4;
9747
9748 if (hevc->prefix_aux_size > 0) {
9749 hevc_print(hevc, 0,
9750 "prefix aux: (size %d)\n",
9751 aux_size);
9752 for (i = 0; i <
9753 (aux_size >> 1); i++) {
9754 hevc_print_cont(hevc, 0,
9755 "%04x ",
9756 *(aux_adr + i));
9757 if (((i + 1) & 0xf)
9758 == 0)
9759 hevc_print_cont(hevc,
9760 0, "\n");
9761 }
9762 }
9763 if (hevc->suffix_aux_size > 0) {
9764 aux_adr = (unsigned short *)
9765 (hevc->aux_addr +
9766 hevc->prefix_aux_size);
9767 aux_size =
9768 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9769 << 4;
9770 hevc_print(hevc, 0,
9771 "suffix aux: (size %d)\n",
9772 aux_size);
9773 for (i = 0; i <
9774 (aux_size >> 1); i++) {
9775 hevc_print_cont(hevc, 0,
9776 "%04x ", *(aux_adr + i));
9777 if (((i + 1) & 0xf) == 0)
9778 hevc_print_cont(hevc, 0, "\n");
9779 }
9780 }
9781}
9782
9783#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9784static void dolby_get_meta(struct hevc_state_s *hevc)
9785{
9786 struct vdec_s *vdec = hw_to_vdec(hevc);
9787
9788 if (get_dbg_flag(hevc) &
9789 H265_DEBUG_BUFMGR_MORE)
9790 dump_aux_buf(hevc);
9791 if (vdec->dolby_meta_with_el || vdec->slave) {
9792 set_aux_data(hevc,
9793 hevc->cur_pic, 0, 0);
9794 } else if (vdec->master) {
9795 struct hevc_state_s *hevc_ba =
9796 (struct hevc_state_s *)
9797 vdec->master->private;
9798 /*do not use hevc_ba*/
9799 set_aux_data(hevc,
9800 hevc_ba->cur_pic,
9801 0, 1);
9802 set_aux_data(hevc,
9803 hevc->cur_pic, 0, 2);
9804 } else if (vdec_frame_based(vdec)) {
9805 set_aux_data(hevc,
9806 hevc->cur_pic, 1, 0);
9807 }
9808}
9809#endif
9810
9811static void read_decode_info(struct hevc_state_s *hevc)
9812{
9813 uint32_t decode_info =
9814 READ_HREG(HEVC_DECODE_INFO);
9815 hevc->start_decoding_flag |=
9816 (decode_info & 0xff);
9817 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9818}
9819
9820static int vh265_get_ps_info(struct hevc_state_s *hevc, int width, int height, struct aml_vdec_ps_infos *ps)
9821{
9822 int dw_mode = v4l_parser_get_double_write_mode(hevc, width, height);
9823
9824 ps->visible_width = width / get_double_write_ratio(hevc, dw_mode);
9825 ps->visible_height = height / get_double_write_ratio(hevc, dw_mode);
9826 ps->coded_width = ALIGN(width, 32) / get_double_write_ratio(hevc, dw_mode);
9827 ps->coded_height = ALIGN(height, 32) / get_double_write_ratio(hevc, dw_mode);
9828 ps->dpb_size = v4l_parser_work_pic_num(hevc);
9829
9830 return 0;
9831}
9832
9833static int v4l_res_change(struct hevc_state_s *hevc, union param_u *rpm_param)
9834{
9835 struct aml_vcodec_ctx *ctx =
9836 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9837 int ret = 0;
9838
9839 if (ctx->param_sets_from_ucode &&
9840 hevc->res_ch_flag == 0) {
9841 struct aml_vdec_ps_infos ps;
9842 int width = rpm_param->p.pic_width_in_luma_samples;
9843 int height = rpm_param->p.pic_height_in_luma_samples;
9844 if ((hevc->pic_w != 0 &&
9845 hevc->pic_h != 0) &&
9846 (hevc->pic_w != width ||
9847 hevc->pic_h != height)) {
9848 hevc_print(hevc, 0,
9849 "v4l_res_change Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
9850 hevc->pic_w, hevc->pic_h,
9851 width,
9852 height,
9853 hevc->interlace_flag);
9854
9855 vh265_get_ps_info(hevc, width, height, &ps);
9856 vdec_v4l_set_ps_infos(ctx, &ps);
9857 vdec_v4l_res_ch_event(ctx);
9858 hevc->v4l_params_parsed = false;
9859 hevc->res_ch_flag = 1;
9860 hevc->eos = 1;
9861 flush_output(hevc, NULL);
9862 //del_timer_sync(&hevc->timer);
9863 notify_v4l_eos(hw_to_vdec(hevc));
9864
9865 ret = 1;
9866 }
9867 }
9868
9869 return ret;
9870}
9871
9872static int hevc_skip_nal(struct hevc_state_s *hevc)
9873{
9874 if ((hevc->pic_h == 96) && (hevc->pic_w == 160) &&
9875 (get_double_write_mode(hevc) == 0x10)) {
9876 if (get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_TXLX) {
9877 if (hevc->skip_nal_count < skip_nal_count)
9878 return 1;
9879 } else {
9880 if (hevc->skip_nal_count < 1)
9881 return 1;
9882 }
9883 }
9884 return 0;
9885}
9886
9887static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9888{
9889 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9890 unsigned int dec_status = hevc->dec_status;
9891 int i, ret;
9892
9893 struct vdec_s *vdec = hw_to_vdec(hevc);
9894
9895 if (hevc->eos)
9896 return IRQ_HANDLED;
9897 if (
9898#ifdef MULTI_INSTANCE_SUPPORT
9899 (!hevc->m_ins_flag) &&
9900#endif
9901 hevc->error_flag == 1) {
9902 if ((error_handle_policy & 0x10) == 0) {
9903 if (hevc->cur_pic) {
9904 int current_lcu_idx =
9905 READ_VREG(HEVC_PARSER_LCU_START)
9906 & 0xffffff;
9907 if (current_lcu_idx <
9908 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9909 hevc->cur_pic->error_mark = 1;
9910
9911 }
9912 }
9913 if ((error_handle_policy & 1) == 0) {
9914 hevc->error_skip_nal_count = 1;
9915 /* manual search nal, skip error_skip_nal_count
9916 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9917 */
9918 WRITE_VREG(NAL_SEARCH_CTL,
9919 (error_skip_nal_count << 4) | 0x1);
9920 } else {
9921 hevc->error_skip_nal_count = error_skip_nal_count;
9922 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9923 }
9924 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9925#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9926 || vdec->master
9927 || vdec->slave
9928#endif
9929 ) {
9930 WRITE_VREG(NAL_SEARCH_CTL,
9931 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9932 }
9933 WRITE_VREG(NAL_SEARCH_CTL,
9934 READ_VREG(NAL_SEARCH_CTL)
9935 | ((parser_sei_enable & 0x7) << 17));
9936/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
9937 WRITE_VREG(NAL_SEARCH_CTL,
9938 READ_VREG(NAL_SEARCH_CTL) |
9939 ((parser_dolby_vision_enable & 0x1) << 20));
9940/*#endif*/
9941 config_decode_mode(hevc);
9942 /* search new nal */
9943 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9944 /* Interrupt Amrisc to excute */
9945 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9946
9947 /* hevc_print(hevc, 0,
9948 *"%s: error handle\n", __func__);
9949 */
9950 hevc->error_flag = 2;
9951 return IRQ_HANDLED;
9952 } else if (
9953#ifdef MULTI_INSTANCE_SUPPORT
9954 (!hevc->m_ins_flag) &&
9955#endif
9956 hevc->error_flag == 3) {
9957 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9958 hevc_recover(hevc);
9959 hevc->error_flag = 0;
9960
9961 if ((error_handle_policy & 0x10) == 0) {
9962 if (hevc->cur_pic) {
9963 int current_lcu_idx =
9964 READ_VREG(HEVC_PARSER_LCU_START)
9965 & 0xffffff;
9966 if (current_lcu_idx <
9967 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9968 hevc->cur_pic->error_mark = 1;
9969
9970 }
9971 }
9972 if ((error_handle_policy & 1) == 0) {
9973 /* need skip some data when
9974 * error_flag of 3 is triggered,
9975 */
9976 /* to avoid hevc_recover() being called
9977 * for many times at the same bitstream position
9978 */
9979 hevc->error_skip_nal_count = 1;
9980 /* manual search nal, skip error_skip_nal_count
9981 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9982 */
9983 WRITE_VREG(NAL_SEARCH_CTL,
9984 (error_skip_nal_count << 4) | 0x1);
9985 }
9986
9987 if ((error_handle_policy & 0x2) == 0) {
9988 hevc->have_vps = 1;
9989 hevc->have_sps = 1;
9990 hevc->have_pps = 1;
9991 }
9992 return IRQ_HANDLED;
9993 }
9994 if (!hevc->m_ins_flag) {
9995 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9996 if ((hevc->shift_byte_count_lo & (1 << 31))
9997 && ((i & (1 << 31)) == 0))
9998 hevc->shift_byte_count_hi++;
9999 hevc->shift_byte_count_lo = i;
10000 }
10001#ifdef MULTI_INSTANCE_SUPPORT
10002 mutex_lock(&hevc->chunks_mutex);
10003 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
10004 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
10005 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
10006 && (hevc->chunk)) {
10007 hevc->cur_pic->pts = hevc->chunk->pts;
10008 hevc->cur_pic->pts64 = hevc->chunk->pts64;
10009 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
10010 }
10011 mutex_unlock(&hevc->chunks_mutex);
10012
10013 if (dec_status == HEVC_DECODE_BUFEMPTY ||
10014 dec_status == HEVC_DECODE_BUFEMPTY2) {
10015 if (hevc->m_ins_flag) {
10016 read_decode_info(hevc);
10017 if (vdec_frame_based(hw_to_vdec(hevc))) {
10018 hevc->empty_flag = 1;
10019 /*suffix sei or dv meta*/
10020 set_aux_data(hevc, hevc->cur_pic, 1, 0);
10021 goto pic_done;
10022 } else {
10023 if (
10024#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10025 vdec->master ||
10026 vdec->slave ||
10027#endif
10028 (data_resend_policy & 0x1)) {
10029 hevc->dec_result = DEC_RESULT_AGAIN;
10030 amhevc_stop();
10031 restore_decode_state(hevc);
10032 } else
10033 hevc->dec_result = DEC_RESULT_GET_DATA;
10034 }
10035 reset_process_time(hevc);
10036 vdec_schedule_work(&hevc->work);
10037 }
10038 return IRQ_HANDLED;
10039 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
10040 (dec_status == HEVC_NAL_DECODE_DONE)
10041 ) {
10042 if (hevc->m_ins_flag) {
10043 read_decode_info(hevc);
10044 if (vdec_frame_based(hw_to_vdec(hevc))) {
10045 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
10046 hevc->empty_flag = 1;
10047 /*suffix sei or dv meta*/
10048 set_aux_data(hevc, hevc->cur_pic, 1, 0);
10049 goto pic_done;
10050 } else {
10051 hevc->dec_result = DEC_RESULT_AGAIN;
10052 amhevc_stop();
10053 restore_decode_state(hevc);
10054 }
10055
10056 reset_process_time(hevc);
10057 vdec_schedule_work(&hevc->work);
10058 }
10059
10060 return IRQ_HANDLED;
10061 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
10062 if (hevc->m_ins_flag) {
10063 struct PIC_s *pic;
10064 struct PIC_s *pic_display;
10065 int decoded_poc;
10066
10067 if (vdec->mvfrm)
10068 vdec->mvfrm->hw_decode_time =
10069 local_clock() - vdec->mvfrm->hw_decode_start;
10070#ifdef DETREFILL_ENABLE
10071 if (hevc->is_swap &&
10072 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10073 if (hevc->detbuf_adr_virt && hevc->delrefill_check
10074 && READ_VREG(HEVC_SAO_DBG_MODE0))
10075 hevc->delrefill_check = 2;
10076 }
10077#endif
10078 hevc->empty_flag = 0;
10079pic_done:
10080 if (input_frame_based(hw_to_vdec(hevc)) &&
10081 frmbase_cont_bitlevel != 0 &&
10082 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
10083 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
10084 > frmbase_cont_bitlevel)) {
10085 /*handle the case: multi pictures in one packet*/
10086 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10087 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
10088 __func__,
10089 hevc->decode_idx, hevc->decode_size,
10090 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
10091 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10092 start_process_time(hevc);
10093 return IRQ_HANDLED;
10094 }
10095
10096 read_decode_info(hevc);
10097 get_picture_qos_info(hevc);
10098#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10099 hevc->start_parser_type = 0;
10100 hevc->switch_dvlayer_flag = 0;
10101#endif
10102 hevc->decoded_poc = hevc->curr_POC;
10103 hevc->decoding_pic = NULL;
10104 hevc->dec_result = DEC_RESULT_DONE;
10105#ifdef DETREFILL_ENABLE
10106 if (hevc->is_swap &&
10107 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10108 if (hevc->delrefill_check != 2)
10109#endif
10110
10111 amhevc_stop();
10112
10113 reset_process_time(hevc);
10114
10115 if (hevc->vf_pre_count == 0 || hevc->ip_mode) {
10116 decoded_poc = hevc->curr_POC;
10117 pic = get_pic_by_POC(hevc, decoded_poc);
10118 if (pic && (pic->POC != INVALID_POC)) {
10119 /*PB skip control */
10120 if (pic->error_mark == 0
10121 && hevc->PB_skip_mode == 1) {
10122 /* start decoding after
10123 * first I
10124 */
10125 hevc->ignore_bufmgr_error |= 0x1;
10126 }
10127 if (hevc->ignore_bufmgr_error & 1) {
10128 if (hevc->PB_skip_count_after_decoding > 0) {
10129 hevc->PB_skip_count_after_decoding--;
10130 } else {
10131 /* start displaying */
10132 hevc->ignore_bufmgr_error |= 0x2;
10133 }
10134 }
10135 if (hevc->mmu_enable
10136 && ((hevc->double_write_mode & 0x10) == 0)) {
10137 if (!hevc->m_ins_flag) {
10138 hevc->used_4k_num =
10139 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
10140
10141 if ((!is_skip_decoding(hevc, pic)) &&
10142 (hevc->used_4k_num >= 0) &&
10143 (hevc->cur_pic->scatter_alloc
10144 == 1)) {
10145 hevc_print(hevc,
10146 H265_DEBUG_BUFMGR_MORE,
10147 "%s pic index %d scatter_alloc %d page_start %d\n",
10148 "decoder_mmu_box_free_idx_tail",
10149 hevc->cur_pic->index,
10150 hevc->cur_pic->scatter_alloc,
10151 hevc->used_4k_num);
10152 decoder_mmu_box_free_idx_tail(
10153 hevc->mmu_box,
10154 hevc->cur_pic->index,
10155 hevc->used_4k_num);
10156 hevc->cur_pic->scatter_alloc
10157 = 2;
10158 }
10159 hevc->used_4k_num = -1;
10160 }
10161 }
10162
10163 pic->output_mark = 1;
10164 pic->recon_mark = 1;
10165 if (vdec->mvfrm) {
10166 pic->frame_size =
10167 vdec->mvfrm->frame_size;
10168 pic->hw_decode_time =
10169 (u32)vdec->mvfrm->hw_decode_time;
10170 }
10171 }
10172 /*Detects the first frame whether has an over decode error*/
10173 if ((!vdec_dual(vdec)) &&
10174 hevc->empty_flag == 0) {
10175 hevc->over_decode =
10176 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
10177 if (hevc->over_decode)
10178 hevc_print(hevc, 0,
10179 "!!!Over decode %d\n", __LINE__);
10180 }
10181 check_pic_decoded_error(hevc,
10182 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
10183 if (hevc->cur_pic != NULL &&
10184 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
10185 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
10186 hevc->cur_pic->error_mark = 1;
10187#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10188force_output:
10189#endif
10190 pic_display = output_pic(hevc, 1);
10191 if (pic_display) {
10192 if ((pic_display->error_mark &&
10193 ((hevc->ignore_bufmgr_error &
10194 0x2) == 0))
10195 || (get_dbg_flag(hevc) &
10196 H265_DEBUG_DISPLAY_CUR_FRAME)
10197 || (get_dbg_flag(hevc) &
10198 H265_DEBUG_NO_DISPLAY)) {
10199 pic_display->output_ready = 0;
10200 if (get_dbg_flag(hevc) &
10201 H265_DEBUG_BUFMGR) {
10202 hevc_print(hevc, 0,
10203 "[BM] Display: POC %d, ",
10204 pic_display->POC);
10205 hevc_print_cont(hevc, 0,
10206 "decoding index %d ==> ",
10207 pic_display->
10208 decode_idx);
10209 hevc_print_cont(hevc, 0,
10210 "Debug or err,recycle it\n");
10211 }
10212 } else {
10213 if ((pic_display->
10214 slice_type != 2) && !pic_display->ip_mode) {
10215 pic_display->output_ready = 0;
10216 } else {
10217 prepare_display_buf
10218 (hevc,
10219 pic_display);
10220 hevc->first_pic_flag = 1;
10221 }
10222 }
10223 }
10224 }
10225
10226 vdec_schedule_work(&hevc->work);
10227 }
10228
10229 return IRQ_HANDLED;
10230#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10231 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
10232 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10233 if (hevc->m_ins_flag) {
10234 unsigned char next_parser_type =
10235 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
10236 read_decode_info(hevc);
10237
10238 if (vdec->slave &&
10239 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10240 /*cur is base, found enhance*/
10241 struct hevc_state_s *hevc_el =
10242 (struct hevc_state_s *)
10243 vdec->slave->private;
10244 hevc->switch_dvlayer_flag = 1;
10245 hevc->no_switch_dvlayer_count = 0;
10246 hevc_el->start_parser_type =
10247 next_parser_type;
10248 hevc_print(hevc, H265_DEBUG_DV,
10249 "switch (poc %d) to el\n",
10250 hevc->cur_pic ?
10251 hevc->cur_pic->POC :
10252 INVALID_POC);
10253 } else if (vdec->master &&
10254 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
10255 /*cur is enhance, found base*/
10256 struct hevc_state_s *hevc_ba =
10257 (struct hevc_state_s *)
10258 vdec->master->private;
10259 hevc->switch_dvlayer_flag = 1;
10260 hevc->no_switch_dvlayer_count = 0;
10261 hevc_ba->start_parser_type =
10262 next_parser_type;
10263 hevc_print(hevc, H265_DEBUG_DV,
10264 "switch (poc %d) to bl\n",
10265 hevc->cur_pic ?
10266 hevc->cur_pic->POC :
10267 INVALID_POC);
10268 } else {
10269 hevc->switch_dvlayer_flag = 0;
10270 hevc->start_parser_type =
10271 next_parser_type;
10272 hevc->no_switch_dvlayer_count++;
10273 hevc_print(hevc, H265_DEBUG_DV,
10274 "%s: no_switch_dvlayer_count = %d\n",
10275 vdec->master ? "el" : "bl",
10276 hevc->no_switch_dvlayer_count);
10277 if (vdec->slave &&
10278 dolby_el_flush_th != 0 &&
10279 hevc->no_switch_dvlayer_count >
10280 dolby_el_flush_th) {
10281 struct hevc_state_s *hevc_el =
10282 (struct hevc_state_s *)
10283 vdec->slave->private;
10284 struct PIC_s *el_pic;
10285 check_pic_decoded_error(hevc_el,
10286 hevc_el->pic_decoded_lcu_idx);
10287 el_pic = get_pic_by_POC(hevc_el,
10288 hevc_el->curr_POC);
10289 hevc_el->curr_POC = INVALID_POC;
10290 hevc_el->m_pocRandomAccess = MAX_INT;
10291 flush_output(hevc_el, el_pic);
10292 hevc_el->decoded_poc = INVALID_POC; /*
10293 already call flush_output*/
10294 hevc_el->decoding_pic = NULL;
10295 hevc->no_switch_dvlayer_count = 0;
10296 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
10297 hevc_print(hevc, 0,
10298 "no el anymore, flush_output el\n");
10299 }
10300 }
10301 hevc->decoded_poc = hevc->curr_POC;
10302 hevc->decoding_pic = NULL;
10303 hevc->dec_result = DEC_RESULT_DONE;
10304 amhevc_stop();
10305 reset_process_time(hevc);
10306 if (aux_data_is_avaible(hevc))
10307 dolby_get_meta(hevc);
10308 if(hevc->cur_pic && hevc->cur_pic->slice_type == 2 &&
10309 hevc->vf_pre_count == 0) {
10310 hevc_print(hevc, 0,
10311 "first slice_type %x no_switch_dvlayer_count %x\n",
10312 hevc->cur_pic->slice_type,
10313 hevc->no_switch_dvlayer_count);
10314 goto force_output;
10315 }
10316 vdec_schedule_work(&hevc->work);
10317 }
10318
10319 return IRQ_HANDLED;
10320#endif
10321 }
10322
10323#endif
10324
10325 if (dec_status == HEVC_SEI_DAT) {
10326 if (!hevc->m_ins_flag) {
10327 int payload_type =
10328 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
10329 int payload_size =
10330 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
10331 process_nal_sei(hevc,
10332 payload_type, payload_size);
10333 }
10334 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
10335 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
10336 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
10337 int parse_type = HEVC_DISCARD_NAL;
10338
10339 hevc->error_watchdog_count = 0;
10340 hevc->error_skip_nal_wt_cnt = 0;
10341#ifdef MULTI_INSTANCE_SUPPORT
10342 if (hevc->m_ins_flag)
10343 reset_process_time(hevc);
10344#endif
10345 if (slice_parse_begin > 0 &&
10346 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
10347 hevc_print(hevc, 0,
10348 "nal type %d, discard %d\n", naltype,
10349 slice_parse_begin);
10350 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
10351 slice_parse_begin--;
10352 }
10353 if (naltype == NAL_UNIT_EOS) {
10354 struct PIC_s *pic;
10355
10356 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
10357#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10358 if ((vdec_dual(vdec)) && aux_data_is_avaible(hevc)) {
10359 if (hevc->decoding_pic)
10360 dolby_get_meta(hevc);
10361 }
10362#endif
10363 /*Detects frame whether has an over decode error*/
10364 if ((!vdec_dual(vdec)) &&
10365 hevc->empty_flag == 0) {
10366 hevc->over_decode =
10367 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
10368 if (hevc->over_decode)
10369 hevc_print(hevc, 0,
10370 "!!!Over decode %d\n", __LINE__);
10371 }
10372 check_pic_decoded_error(hevc,
10373 hevc->pic_decoded_lcu_idx);
10374 pic = get_pic_by_POC(hevc, hevc->curr_POC);
10375 hevc->curr_POC = INVALID_POC;
10376 /* add to fix RAP_B_Bossen_1 */
10377 hevc->m_pocRandomAccess = MAX_INT;
10378 flush_output(hevc, pic);
10379 clear_poc_flag(hevc);
10380 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
10381 /* Interrupt Amrisc to excute */
10382 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10383#ifdef MULTI_INSTANCE_SUPPORT
10384 if (hevc->m_ins_flag) {
10385 hevc->decoded_poc = INVALID_POC; /*
10386 already call flush_output*/
10387 hevc->decoding_pic = NULL;
10388 hevc->dec_result = DEC_RESULT_DONE;
10389 amhevc_stop();
10390
10391 vdec_schedule_work(&hevc->work);
10392 }
10393#endif
10394 return IRQ_HANDLED;
10395 }
10396
10397 if (
10398#ifdef MULTI_INSTANCE_SUPPORT
10399 (!hevc->m_ins_flag) &&
10400#endif
10401 hevc->error_skip_nal_count > 0) {
10402 hevc_print(hevc, 0,
10403 "nal type %d, discard %d\n", naltype,
10404 hevc->error_skip_nal_count);
10405 hevc->error_skip_nal_count--;
10406 if (hevc->error_skip_nal_count == 0) {
10407 hevc_recover(hevc);
10408 hevc->error_flag = 0;
10409 if ((error_handle_policy & 0x2) == 0) {
10410 hevc->have_vps = 1;
10411 hevc->have_sps = 1;
10412 hevc->have_pps = 1;
10413 }
10414 return IRQ_HANDLED;
10415 }
10416 } else if (naltype == NAL_UNIT_VPS) {
10417 parse_type = HEVC_NAL_UNIT_VPS;
10418 hevc->have_vps = 1;
10419#ifdef ERROR_HANDLE_DEBUG
10420 if (dbg_nal_skip_flag & 1)
10421 parse_type = HEVC_DISCARD_NAL;
10422#endif
10423 } else if (hevc->have_vps) {
10424 if (naltype == NAL_UNIT_SPS) {
10425 parse_type = HEVC_NAL_UNIT_SPS;
10426 hevc->have_sps = 1;
10427#ifdef ERROR_HANDLE_DEBUG
10428 if (dbg_nal_skip_flag & 2)
10429 parse_type = HEVC_DISCARD_NAL;
10430#endif
10431 } else if (naltype == NAL_UNIT_PPS) {
10432 parse_type = HEVC_NAL_UNIT_PPS;
10433 hevc->have_pps = 1;
10434#ifdef ERROR_HANDLE_DEBUG
10435 if (dbg_nal_skip_flag & 4)
10436 parse_type = HEVC_DISCARD_NAL;
10437#endif
10438 } else if (hevc->have_sps && hevc->have_pps) {
10439 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10440
10441 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10442 (naltype ==
10443 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10444 || (naltype ==
10445 NAL_UNIT_CODED_SLICE_CRA)
10446 || (naltype ==
10447 NAL_UNIT_CODED_SLICE_BLA)
10448 || (naltype ==
10449 NAL_UNIT_CODED_SLICE_BLANT)
10450 || (naltype ==
10451 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10452 ) {
10453 if (slice_parse_begin > 0) {
10454 hevc_print(hevc, 0,
10455 "discard %d, for debugging\n",
10456 slice_parse_begin);
10457 slice_parse_begin--;
10458 } else {
10459 parse_type = seg;
10460 }
10461 hevc->have_valid_start_slice = 1;
10462 } else if (naltype <=
10463 NAL_UNIT_CODED_SLICE_CRA
10464 && (hevc->have_valid_start_slice
10465 || (hevc->PB_skip_mode != 3))) {
10466 if (slice_parse_begin > 0) {
10467 hevc_print(hevc, 0,
10468 "discard %d, dd\n",
10469 slice_parse_begin);
10470 slice_parse_begin--;
10471 } else
10472 parse_type = seg;
10473
10474 }
10475 }
10476 }
10477 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10478 && hevc->have_valid_start_slice &&
10479 hevc->error_flag == 0) {
10480 if ((get_dbg_flag(hevc) &
10481 H265_DEBUG_MAN_SEARCH_NAL) == 0
10482 /* && (!hevc->m_ins_flag)*/) {
10483 /* auot parser NAL; do not check
10484 *vps/sps/pps/idr
10485 */
10486 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10487 }
10488
10489 if ((get_dbg_flag(hevc) &
10490 H265_DEBUG_NO_EOS_SEARCH_DONE)
10491#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10492 || vdec->master
10493 || vdec->slave
10494#endif
10495 ) {
10496 WRITE_VREG(NAL_SEARCH_CTL,
10497 READ_VREG(NAL_SEARCH_CTL) |
10498 0x10000);
10499 }
10500 WRITE_VREG(NAL_SEARCH_CTL,
10501 READ_VREG(NAL_SEARCH_CTL)
10502 | ((parser_sei_enable & 0x7) << 17));
10503/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
10504 WRITE_VREG(NAL_SEARCH_CTL,
10505 READ_VREG(NAL_SEARCH_CTL) |
10506 ((parser_dolby_vision_enable & 0x1) << 20));
10507/*#endif*/
10508 config_decode_mode(hevc);
10509 }
10510
10511 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10512 hevc_print(hevc, 0,
10513 "naltype = %d parse_type %d\n %d %d %d %d\n",
10514 naltype, parse_type, hevc->have_vps,
10515 hevc->have_sps, hevc->have_pps,
10516 hevc->have_valid_start_slice);
10517 }
10518
10519 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10520 /* Interrupt Amrisc to excute */
10521 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10522#ifdef MULTI_INSTANCE_SUPPORT
10523 if (hevc->m_ins_flag)
10524 start_process_time(hevc);
10525#endif
10526 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10527#ifdef MULTI_INSTANCE_SUPPORT
10528 if (hevc->m_ins_flag) {
10529 reset_process_time(hevc);
10530 read_decode_info(hevc);
10531
10532 }
10533#endif
10534 if (hevc->start_decoding_time > 0) {
10535 u32 process_time = 1000*
10536 (jiffies - hevc->start_decoding_time)/HZ;
10537 if (process_time > max_decoding_time)
10538 max_decoding_time = process_time;
10539 }
10540
10541 hevc->error_watchdog_count = 0;
10542 if (hevc->pic_list_init_flag == 2) {
10543 hevc->pic_list_init_flag = 3;
10544 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10545 if (hevc->kpi_first_i_comming == 0) {
10546 hevc->kpi_first_i_comming = 1;
10547 pr_debug("[vdec_kpi][%s] First I frame coming.\n",
10548 __func__);
10549 }
10550 } else if (hevc->wait_buf == 0) {
10551 u32 vui_time_scale;
10552 u32 vui_num_units_in_tick;
10553 unsigned char reconfig_flag = 0;
10554
10555 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10556 get_rpm_param(&hevc->param);
10557 else {
10558
10559 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10560 int ii;
10561
10562 for (ii = 0; ii < 4; ii++) {
10563 hevc->param.l.data[i + ii] =
10564 hevc->rpm_ptr[i + 3
10565 - ii];
10566 }
10567 }
10568#ifdef SEND_LMEM_WITH_RPM
10569 check_head_error(hevc);
10570#endif
10571 }
10572 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10573 hevc_print(hevc, 0,
10574 "rpm_param: (%d)\n", hevc->slice_idx);
10575 hevc->slice_idx++;
10576 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10577 hevc_print_cont(hevc, 0,
10578 "%04x ", hevc->param.l.data[i]);
10579 if (((i + 1) & 0xf) == 0)
10580 hevc_print_cont(hevc, 0, "\n");
10581 }
10582
10583 hevc_print(hevc, 0,
10584 "vui_timing_info: %x, %x, %x, %x\n",
10585 hevc->param.p.vui_num_units_in_tick_hi,
10586 hevc->param.p.vui_num_units_in_tick_lo,
10587 hevc->param.p.vui_time_scale_hi,
10588 hevc->param.p.vui_time_scale_lo);
10589 }
10590
10591 if (hevc->is_used_v4l) {
10592 struct aml_vcodec_ctx *ctx =
10593 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10594 if (!v4l_res_change(hevc, &hevc->param)) {
10595 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10596 struct aml_vdec_ps_infos ps;
10597 int width = hevc->param.p.pic_width_in_luma_samples;
10598 int height = hevc->param.p.pic_height_in_luma_samples;
10599
10600 pr_debug("set ucode parse\n");
10601 vh265_get_ps_info(hevc, width, height, &ps);
10602 /*notice the v4l2 codec.*/
10603 vdec_v4l_set_ps_infos(ctx, &ps);
10604 hevc->v4l_params_parsed = true;
10605 hevc->dec_result = DEC_RESULT_AGAIN;
10606 amhevc_stop();
10607 restore_decode_state(hevc);
10608 reset_process_time(hevc);
10609 vdec_schedule_work(&hevc->work);
10610 return IRQ_HANDLED;
10611 }
10612 }else {
10613 pr_debug("resolution change\n");
10614 hevc->dec_result = DEC_RESULT_AGAIN;
10615 amhevc_stop();
10616 restore_decode_state(hevc);
10617 reset_process_time(hevc);
10618 vdec_schedule_work(&hevc->work);
10619 return IRQ_HANDLED;
10620
10621 }
10622 }
10623
10624 if (
10625#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10626 vdec->master == NULL &&
10627 vdec->slave == NULL &&
10628#endif
10629 aux_data_is_avaible(hevc)
10630 ) {
10631
10632 if (get_dbg_flag(hevc) &
10633 H265_DEBUG_BUFMGR_MORE)
10634 dump_aux_buf(hevc);
10635 }
10636
10637 vui_time_scale =
10638 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10639 hevc->param.p.vui_time_scale_lo;
10640 vui_num_units_in_tick =
10641 (u32)(hevc->param.
10642 p.vui_num_units_in_tick_hi << 16) |
10643 hevc->param.
10644 p.vui_num_units_in_tick_lo;
10645 if (hevc->bit_depth_luma !=
10646 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10647 reconfig_flag = 1;
10648 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10649 (hevc->param.p.bit_depth & 0xf) + 8);
10650 }
10651 if (hevc->bit_depth_chroma !=
10652 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10653 reconfig_flag = 1;
10654 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10655 ((hevc->param.p.bit_depth >> 4) &
10656 0xf) + 8);
10657 }
10658 hevc->bit_depth_luma =
10659 (hevc->param.p.bit_depth & 0xf) + 8;
10660 hevc->bit_depth_chroma =
10661 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10662 bit_depth_luma = hevc->bit_depth_luma;
10663 bit_depth_chroma = hevc->bit_depth_chroma;
10664#ifdef SUPPORT_10BIT
10665 if (hevc->bit_depth_luma == 8 &&
10666 hevc->bit_depth_chroma == 8 &&
10667 enable_mem_saving)
10668 hevc->mem_saving_mode = 1;
10669 else
10670 hevc->mem_saving_mode = 0;
10671#endif
10672 if (reconfig_flag &&
10673 (get_double_write_mode(hevc) & 0x10) == 0)
10674 init_decode_head_hw(hevc);
10675
10676 if ((vui_time_scale != 0)
10677 && (vui_num_units_in_tick != 0)) {
10678 hevc->frame_dur =
10679 div_u64(96000ULL *
10680 vui_num_units_in_tick,
10681 vui_time_scale);
10682 if (hevc->get_frame_dur != true)
10683 vdec_schedule_work(
10684 &hevc->notify_work);
10685
10686 hevc->get_frame_dur = true;
10687 //hevc->gvs->frame_dur = hevc->frame_dur;
10688 }
10689
10690 if (hevc->video_signal_type !=
10691 ((hevc->param.p.video_signal_type << 16)
10692 | hevc->param.p.color_description)) {
10693 u32 v = hevc->param.p.video_signal_type;
10694 u32 c = hevc->param.p.color_description;
10695#if 0
10696 if (v & 0x2000) {
10697 hevc_print(hevc, 0,
10698 "video_signal_type present:\n");
10699 hevc_print(hevc, 0, " %s %s\n",
10700 video_format_names[(v >> 10) & 7],
10701 ((v >> 9) & 1) ?
10702 "full_range" : "limited");
10703 if (v & 0x100) {
10704 hevc_print(hevc, 0,
10705 " color_description present:\n");
10706 hevc_print(hevc, 0,
10707 " color_primarie = %s\n",
10708 color_primaries_names
10709 [v & 0xff]);
10710 hevc_print(hevc, 0,
10711 " transfer_characteristic = %s\n",
10712 transfer_characteristics_names
10713 [(c >> 8) & 0xff]);
10714 hevc_print(hevc, 0,
10715 " matrix_coefficient = %s\n",
10716 matrix_coeffs_names[c & 0xff]);
10717 }
10718 }
10719#endif
10720 hevc->video_signal_type = (v << 16) | c;
10721 video_signal_type = hevc->video_signal_type;
10722 }
10723
10724 if (use_cma &&
10725 (hevc->param.p.slice_segment_address == 0)
10726 && (hevc->pic_list_init_flag == 0)) {
10727 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10728 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10729
10730 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10731 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10732 hevc->lcu_size = 1 << (log + 3 + log_s);
10733 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10734 if (performance_profile &&( (!is_oversize(hevc->pic_w, hevc->pic_h)) && IS_8K_SIZE(hevc->pic_w,hevc->pic_h)))
10735 hevc->performance_profile = 1;
10736 else
10737 hevc->performance_profile = 0;
10738 hevc_print(hevc, 0, "hevc->performance_profile %d\n", hevc->performance_profile);
10739 if (hevc->pic_w == 0 || hevc->pic_h == 0
10740 || hevc->lcu_size == 0
10741 || is_oversize(hevc->pic_w, hevc->pic_h)
10742 || hevc_skip_nal(hevc)) {
10743 /* skip search next start code */
10744 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10745 & (~0x2));
10746 if ((hevc->pic_h == 96) && (hevc->pic_w == 160))
10747 hevc->skip_nal_count++;
10748 hevc->skip_flag = 1;
10749 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10750 /* Interrupt Amrisc to excute */
10751 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10752#ifdef MULTI_INSTANCE_SUPPORT
10753 if (hevc->m_ins_flag)
10754 start_process_time(hevc);
10755#endif
10756 } else {
10757 hevc->sps_num_reorder_pics_0 =
10758 hevc->param.p.sps_num_reorder_pics_0;
10759 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
10760 !(vdec->slave || vdec->master) &&
10761 !disable_ip_mode) ? true : false;
10762 hevc->pic_list_init_flag = 1;
10763 if ((!IS_4K_SIZE(hevc->pic_w, hevc->pic_h)) &&
10764 ((hevc->param.p.profile_etc & 0xc) == 0x4)
10765 && (interlace_enable != 0)) {
10766 hevc->double_write_mode = 1;
10767 hevc->interlace_flag = 1;
10768 hevc->frame_ar = (hevc->pic_h * 0x100 / hevc->pic_w) * 2;
10769 hevc_print(hevc, 0,
10770 "interlace (%d, %d), profile_etc %x, ar 0x%x, dw %d\n",
10771 hevc->pic_w, hevc->pic_h, hevc->param.p.profile_etc, hevc->frame_ar,
10772 get_double_write_mode(hevc));
10773 /* When dw changed from 0x10 to 1, the mmu_box is NULL */
10774 if (!hevc->mmu_box && init_mmu_box(hevc) != 0) {
10775 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10776 hevc->fatal_error |=
10777 DECODER_FATAL_ERROR_NO_MEM;
10778 vdec_schedule_work(&hevc->work);
10779 hevc_print(hevc,
10780 0, "can not alloc mmu box, force exit\n");
10781 return IRQ_HANDLED;
10782 }
10783 }
10784#ifdef MULTI_INSTANCE_SUPPORT
10785 if (hevc->m_ins_flag) {
10786 vdec_schedule_work(&hevc->work);
10787 } else
10788#endif
10789 up(&h265_sema);
10790 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10791 }
10792 return IRQ_HANDLED;
10793 }
10794
10795}
10796 ret =
10797 hevc_slice_segment_header_process(hevc,
10798 &hevc->param, decode_pic_begin);
10799 if (ret < 0) {
10800#ifdef MULTI_INSTANCE_SUPPORT
10801 if (hevc->m_ins_flag) {
10802 hevc->wait_buf = 0;
10803 hevc->dec_result = DEC_RESULT_AGAIN;
10804 amhevc_stop();
10805 restore_decode_state(hevc);
10806 reset_process_time(hevc);
10807 vdec_schedule_work(&hevc->work);
10808 return IRQ_HANDLED;
10809 }
10810#else
10811 ;
10812#endif
10813 } else if (ret == 0) {
10814 if ((hevc->new_pic) && (hevc->cur_pic)) {
10815 hevc->cur_pic->stream_offset =
10816 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10817 hevc->cur_pic->stream_frame_size =
10818 hevc->cur_pic->stream_offset - hevc->last_dec_pic_offset;
10819 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10820 "read stream_offset = 0x%x, frame_size = 0x%x\n",
10821 hevc->cur_pic->stream_offset, hevc->cur_pic->stream_frame_size);
10822 hevc->last_dec_pic_offset = hevc->cur_pic->stream_offset;
10823
10824
10825 hevc->cur_pic->aspect_ratio_idc =
10826 hevc->param.p.aspect_ratio_idc;
10827 hevc->cur_pic->sar_width =
10828 hevc->param.p.sar_width;
10829 hevc->cur_pic->sar_height =
10830 hevc->param.p.sar_height;
10831 }
10832
10833 WRITE_VREG(HEVC_DEC_STATUS_REG,
10834 HEVC_CODED_SLICE_SEGMENT_DAT);
10835 /* Interrupt Amrisc to excute */
10836 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10837
10838 hevc->start_decoding_time = jiffies;
10839#ifdef MULTI_INSTANCE_SUPPORT
10840 if (hevc->m_ins_flag)
10841 start_process_time(hevc);
10842#endif
10843#if 1
10844 /*to do..., copy aux data to hevc->cur_pic*/
10845#endif
10846#ifdef MULTI_INSTANCE_SUPPORT
10847 } else if (hevc->m_ins_flag) {
10848 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10849 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10850 __func__, ret);
10851 hevc->decoded_poc = INVALID_POC;
10852 hevc->decoding_pic = NULL;
10853 hevc->dec_result = DEC_RESULT_DONE;
10854 amhevc_stop();
10855 reset_process_time(hevc);
10856 vdec_schedule_work(&hevc->work);
10857#endif
10858 } else {
10859 /* skip, search next start code */
10860 hevc->gvs->drop_frame_count++;
10861 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10862 hevc->skip_flag = 1;
10863 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10864 /* Interrupt Amrisc to excute */
10865 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10866 }
10867
10868 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10869 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10870#ifdef MULTI_INSTANCE_SUPPORT
10871 if (!hevc->m_ins_flag)
10872 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10873 H265_DEBUG_DIS_SYS_ERROR_PROC);
10874#endif
10875 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10876 }
10877 return IRQ_HANDLED;
10878}
10879
10880static void wait_hevc_search_done(struct hevc_state_s *hevc)
10881{
10882 int count = 0;
10883 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10884 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10885 msleep(20);
10886 count++;
10887 if (count > 100) {
10888 hevc_print(hevc, 0, "%s timeout\n", __func__);
10889 break;
10890 }
10891 }
10892}
10893static irqreturn_t vh265_isr(int irq, void *data)
10894{
10895 int i, temp;
10896 unsigned int dec_status;
10897 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10898 u32 debug_tag;
10899 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10900
10901 if (hevc->init_flag == 0)
10902 return IRQ_HANDLED;
10903 hevc->dec_status = dec_status;
10904 if (is_log_enable(hevc))
10905 add_log(hevc,
10906 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10907 dec_status, READ_HREG(HEVC_DECODE_INFO),
10908 READ_VREG(HEVC_MPRED_CURR_LCU),
10909 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10910 READ_VREG(HEVC_SHIFT_STATUS));
10911
10912 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10913 hevc_print(hevc, 0,
10914 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10915 dec_status, READ_HREG(HEVC_DECODE_INFO),
10916 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10917 READ_VREG(HEVC_SHIFT_STATUS));
10918
10919 debug_tag = READ_HREG(DEBUG_REG1);
10920 if (debug_tag & 0x10000) {
10921 hevc_print(hevc, 0,
10922 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10923
10924 if (hevc->mmu_enable)
10925 temp = 0x500;
10926 else
10927 temp = 0x400;
10928 for (i = 0; i < temp; i += 4) {
10929 int ii;
10930 if ((i & 0xf) == 0)
10931 hevc_print_cont(hevc, 0, "%03x: ", i);
10932 for (ii = 0; ii < 4; ii++) {
10933 hevc_print_cont(hevc, 0, "%04x ",
10934 hevc->lmem_ptr[i + 3 - ii]);
10935 }
10936 if (((i + ii) & 0xf) == 0)
10937 hevc_print_cont(hevc, 0, "\n");
10938 }
10939
10940 if (((udebug_pause_pos & 0xffff)
10941 == (debug_tag & 0xffff)) &&
10942 (udebug_pause_decode_idx == 0 ||
10943 udebug_pause_decode_idx == hevc->decode_idx) &&
10944 (udebug_pause_val == 0 ||
10945 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10946 udebug_pause_pos &= 0xffff;
10947 hevc->ucode_pause_pos = udebug_pause_pos;
10948 }
10949 else if (debug_tag & 0x20000)
10950 hevc->ucode_pause_pos = 0xffffffff;
10951 if (hevc->ucode_pause_pos)
10952 reset_process_time(hevc);
10953 else
10954 WRITE_HREG(DEBUG_REG1, 0);
10955 } else if (debug_tag != 0) {
10956 hevc_print(hevc, 0,
10957 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10958 READ_HREG(DEBUG_REG2),
10959 READ_VREG(HEVC_STREAM_LEVEL),
10960 READ_VREG(HEVC_STREAM_WR_PTR),
10961 READ_VREG(HEVC_STREAM_RD_PTR));
10962 if (((udebug_pause_pos & 0xffff)
10963 == (debug_tag & 0xffff)) &&
10964 (udebug_pause_decode_idx == 0 ||
10965 udebug_pause_decode_idx == hevc->decode_idx) &&
10966 (udebug_pause_val == 0 ||
10967 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10968 udebug_pause_pos &= 0xffff;
10969 hevc->ucode_pause_pos = udebug_pause_pos;
10970 }
10971 if (hevc->ucode_pause_pos)
10972 reset_process_time(hevc);
10973 else
10974 WRITE_HREG(DEBUG_REG1, 0);
10975 return IRQ_HANDLED;
10976 }
10977
10978
10979 if (hevc->pic_list_init_flag == 1)
10980 return IRQ_HANDLED;
10981
10982 if (!hevc->m_ins_flag) {
10983 if (dec_status == HEVC_OVER_DECODE) {
10984 hevc->over_decode = 1;
10985 hevc_print(hevc, 0,
10986 "isr: over decode\n"),
10987 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10988 return IRQ_HANDLED;
10989 }
10990 }
10991
10992 return IRQ_WAKE_THREAD;
10993
10994}
10995
10996static void vh265_set_clk(struct work_struct *work)
10997{
10998 struct hevc_state_s *hevc = container_of(work,
10999 struct hevc_state_s, set_clk_work);
11000
11001 int fps = 96000 / hevc->frame_dur;
11002
11003 if (hevc_source_changed(VFORMAT_HEVC,
11004 hevc->frame_width, hevc->frame_height, fps) > 0)
11005 hevc->saved_resolution = hevc->frame_width *
11006 hevc->frame_height * fps;
11007}
11008
11009static void vh265_check_timer_func(unsigned long arg)
11010{
11011 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
11012 struct timer_list *timer = &hevc->timer;
11013 unsigned char empty_flag;
11014 unsigned int buf_level;
11015
11016 enum receviver_start_e state = RECEIVER_INACTIVE;
11017
11018 if (hevc->init_flag == 0) {
11019 if (hevc->stat & STAT_TIMER_ARM) {
11020 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
11021 }
11022 return;
11023 }
11024#ifdef MULTI_INSTANCE_SUPPORT
11025 if (hevc->m_ins_flag &&
11026 (get_dbg_flag(hevc) &
11027 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
11028 hw_to_vdec(hevc)->next_status ==
11029 VDEC_STATUS_DISCONNECTED &&
11030 !hevc->is_used_v4l) {
11031 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
11032 vdec_schedule_work(&hevc->work);
11033 hevc_print(hevc,
11034 0, "vdec requested to be disconnected\n");
11035 return;
11036 }
11037
11038 if (hevc->m_ins_flag) {
11039 if (((get_dbg_flag(hevc) &
11040 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
11041 (decode_timeout_val > 0) &&
11042 (hevc->start_process_time > 0) &&
11043 ((1000 * (jiffies - hevc->start_process_time) / HZ)
11044 > decode_timeout_val)
11045 ) {
11046 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
11047 int current_lcu_idx =
11048 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
11049 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
11050 if (hevc->last_lcu_idx == current_lcu_idx) {
11051 if (hevc->decode_timeout_count > 0)
11052 hevc->decode_timeout_count--;
11053 if (hevc->decode_timeout_count == 0)
11054 timeout_process(hevc);
11055 } else
11056 restart_process_time(hevc);
11057 hevc->last_lcu_idx = current_lcu_idx;
11058 } else {
11059 hevc->pic_decoded_lcu_idx = current_lcu_idx;
11060 timeout_process(hevc);
11061 }
11062 }
11063 } else {
11064#endif
11065 if (hevc->m_ins_flag == 0 &&
11066 vf_get_receiver(hevc->provider_name)) {
11067 state =
11068 vf_notify_receiver(hevc->provider_name,
11069 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11070 NULL);
11071 if ((state == RECEIVER_STATE_NULL)
11072 || (state == RECEIVER_STATE_NONE))
11073 state = RECEIVER_INACTIVE;
11074 } else
11075 state = RECEIVER_INACTIVE;
11076
11077 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
11078 /* error watchdog */
11079 if (hevc->m_ins_flag == 0 &&
11080 (empty_flag == 0)
11081 && (hevc->pic_list_init_flag == 0
11082 || hevc->pic_list_init_flag
11083 == 3)) {
11084 /* decoder has input */
11085 if ((get_dbg_flag(hevc) &
11086 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
11087
11088 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
11089 /* receiver has no buffer to recycle */
11090 if ((state == RECEIVER_INACTIVE) &&
11091 (kfifo_is_empty(&hevc->display_q) &&
11092 buf_level > 0x200)
11093 ) {
11094 if (hevc->error_flag == 0) {
11095 hevc->error_watchdog_count++;
11096 if (hevc->error_watchdog_count ==
11097 error_handle_threshold) {
11098 hevc_print(hevc, 0,
11099 "H265 dec err local reset.\n");
11100 hevc->error_flag = 1;
11101 hevc->error_watchdog_count = 0;
11102 hevc->error_skip_nal_wt_cnt = 0;
11103 hevc->
11104 error_system_watchdog_count++;
11105 WRITE_VREG
11106 (HEVC_ASSIST_MBOX0_IRQ_REG,
11107 0x1);
11108 }
11109 } else if (hevc->error_flag == 2) {
11110 int th =
11111 error_handle_nal_skip_threshold;
11112 hevc->error_skip_nal_wt_cnt++;
11113 if (hevc->error_skip_nal_wt_cnt
11114 == th) {
11115 hevc->error_flag = 3;
11116 hevc->error_watchdog_count = 0;
11117 hevc->
11118 error_skip_nal_wt_cnt = 0;
11119 WRITE_VREG
11120 (HEVC_ASSIST_MBOX0_IRQ_REG,
11121 0x1);
11122 }
11123 }
11124 }
11125 }
11126
11127 if ((get_dbg_flag(hevc)
11128 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
11129 /* receiver has no buffer to recycle */
11130 if ((state == RECEIVER_INACTIVE) &&
11131 (kfifo_is_empty(&hevc->display_q))
11132 ) { /* no buffer to recycle */
11133 if ((get_dbg_flag(hevc) &
11134 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
11135 0)
11136 hevc->error_system_watchdog_count++;
11137 if (hevc->error_system_watchdog_count ==
11138 error_handle_system_threshold) {
11139 /* and it lasts for a while */
11140 hevc_print(hevc, 0,
11141 "H265 dec fatal error watchdog.\n");
11142 hevc->
11143 error_system_watchdog_count = 0;
11144 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
11145 }
11146 }
11147 } else {
11148 hevc->error_watchdog_count = 0;
11149 hevc->error_system_watchdog_count = 0;
11150 }
11151#ifdef MULTI_INSTANCE_SUPPORT
11152 }
11153#endif
11154 if ((hevc->ucode_pause_pos != 0) &&
11155 (hevc->ucode_pause_pos != 0xffffffff) &&
11156 udebug_pause_pos != hevc->ucode_pause_pos) {
11157 hevc->ucode_pause_pos = 0;
11158 WRITE_HREG(DEBUG_REG1, 0);
11159 }
11160
11161 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
11162 dump_pic_list(hevc);
11163 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
11164 }
11165 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
11166 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11167 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
11168 }
11169#ifdef TEST_NO_BUF
11170 if (hevc->wait_buf)
11171 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11172#endif
11173 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
11174 hevc->error_skip_nal_count = error_skip_nal_count;
11175 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11176
11177 debug &= ~H265_DEBUG_HW_RESET;
11178 }
11179
11180#ifdef ERROR_HANDLE_DEBUG
11181 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
11182 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
11183 dbg_nal_skip_count &= ~0x10000;
11184 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11185 }
11186#endif
11187
11188 if (radr != 0) {
11189 if (rval != 0) {
11190 WRITE_VREG(radr, rval);
11191 hevc_print(hevc, 0,
11192 "WRITE_VREG(%x,%x)\n", radr, rval);
11193 } else
11194 hevc_print(hevc, 0,
11195 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
11196 rval = 0;
11197 radr = 0;
11198 }
11199 if (dbg_cmd != 0) {
11200 if (dbg_cmd == 1) {
11201 u32 disp_laddr;
11202
11203 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
11204 get_double_write_mode(hevc) == 0) {
11205 disp_laddr =
11206 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
11207 } else {
11208 struct canvas_s cur_canvas;
11209
11210 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
11211 & 0xff), &cur_canvas);
11212 disp_laddr = cur_canvas.addr;
11213 }
11214 hevc_print(hevc, 0,
11215 "current displayed buffer address %x\r\n",
11216 disp_laddr);
11217 }
11218 dbg_cmd = 0;
11219 }
11220 /*don't changed at start.*/
11221 if (hevc->m_ins_flag == 0 &&
11222 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
11223 hevc->frame_dur > 0 && hevc->saved_resolution !=
11224 hevc->frame_width * hevc->frame_height *
11225 (96000 / hevc->frame_dur))
11226 vdec_schedule_work(&hevc->set_clk_work);
11227
11228 mod_timer(timer, jiffies + PUT_INTERVAL);
11229}
11230
11231static int h265_task_handle(void *data)
11232{
11233 int ret = 0;
11234 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
11235
11236 set_user_nice(current, -10);
11237 while (1) {
11238 if (use_cma == 0) {
11239 hevc_print(hevc, 0,
11240 "ERROR: use_cma can not be changed dynamically\n");
11241 }
11242 ret = down_interruptible(&h265_sema);
11243 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
11244 init_pic_list(hevc);
11245 init_pic_list_hw(hevc);
11246 init_buf_spec(hevc);
11247 hevc->pic_list_init_flag = 2;
11248 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
11249
11250 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11251
11252 }
11253
11254 if (hevc->uninit_list) {
11255 /*USE_BUF_BLOCK*/
11256 uninit_pic_list(hevc);
11257 hevc_print(hevc, 0, "uninit list\n");
11258 hevc->uninit_list = 0;
11259#ifdef USE_UNINIT_SEMA
11260 if (use_cma) {
11261 up(&hevc->h265_uninit_done_sema);
11262 while (!kthread_should_stop())
11263 msleep(1);
11264 break;
11265 }
11266#endif
11267 }
11268 }
11269
11270 return 0;
11271}
11272
11273void vh265_free_cmabuf(void)
11274{
11275 struct hevc_state_s *hevc = gHevc;
11276
11277 mutex_lock(&vh265_mutex);
11278
11279 if (hevc->init_flag) {
11280 mutex_unlock(&vh265_mutex);
11281 return;
11282 }
11283
11284 mutex_unlock(&vh265_mutex);
11285}
11286
11287#ifdef MULTI_INSTANCE_SUPPORT
11288int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
11289#else
11290int vh265_dec_status(struct vdec_info *vstatus)
11291#endif
11292{
11293#ifdef MULTI_INSTANCE_SUPPORT
11294 struct hevc_state_s *hevc =
11295 (struct hevc_state_s *)vdec->private;
11296#else
11297 struct hevc_state_s *hevc = gHevc;
11298#endif
11299 if (!hevc)
11300 return -1;
11301
11302 vstatus->frame_width = hevc->frame_width;
11303 /* for hevc interlace for disp height x2 */
11304 vstatus->frame_height =
11305 (hevc->frame_height << hevc->interlace_flag);
11306 if (hevc->frame_dur != 0)
11307 vstatus->frame_rate = 96000 / hevc->frame_dur;
11308 else
11309 vstatus->frame_rate = -1;
11310 vstatus->error_count = hevc->gvs->error_frame_count;
11311 vstatus->status = hevc->stat | hevc->fatal_error;
11312 vstatus->bit_rate = hevc->gvs->bit_rate;
11313 vstatus->frame_dur = hevc->frame_dur;
11314 if (hevc->gvs) {
11315 vstatus->bit_rate = hevc->gvs->bit_rate;
11316 vstatus->frame_data = hevc->gvs->frame_data;
11317 vstatus->total_data = hevc->gvs->total_data;
11318 vstatus->frame_count = hevc->gvs->frame_count;
11319 vstatus->error_frame_count = hevc->gvs->error_frame_count;
11320 vstatus->drop_frame_count = hevc->gvs->drop_frame_count;
11321 vstatus->samp_cnt = hevc->gvs->samp_cnt;
11322 vstatus->offset = hevc->gvs->offset;
11323 }
11324
11325 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
11326 "%s", DRIVER_NAME);
11327 vstatus->ratio_control = hevc->ratio_control;
11328 return 0;
11329}
11330
11331int vh265_set_isreset(struct vdec_s *vdec, int isreset)
11332{
11333 is_reset = isreset;
11334 return 0;
11335}
11336
11337static int vh265_vdec_info_init(struct hevc_state_s *hevc)
11338{
11339 hevc->gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
11340 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11341 if (NULL == hevc->gvs) {
11342 pr_info("the struct of vdec status malloc failed.\n");
11343 return -ENOMEM;
11344 }
11345 vdec_set_vframe_comm(hw_to_vdec(hevc), DRIVER_NAME);
11346 return 0;
11347}
11348
11349#if 0
11350static void H265_DECODE_INIT(void)
11351{
11352 /* enable hevc clocks */
11353 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
11354 /* *************************************************************** */
11355 /* Power ON HEVC */
11356 /* *************************************************************** */
11357 /* Powerup HEVC */
11358 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
11359 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
11360 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
11361 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
11362 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
11363 /* remove isolations */
11364 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
11365 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
11366
11367}
11368#endif
11369
11370int vh265_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
11371{
11372 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11373 hevc_print(hevc, 0, "[%s %d] trickmode:%lu\n", __func__, __LINE__, trickmode);
11374
11375 if (trickmode == TRICKMODE_I) {
11376 trickmode_i = 1;
11377 i_only_flag = 0x1;
11378 } else if (trickmode == TRICKMODE_NONE) {
11379 trickmode_i = 0;
11380 i_only_flag = 0x0;
11381 } else if (trickmode == 0x02) {
11382 trickmode_i = 0;
11383 i_only_flag = 0x02;
11384 } else if (trickmode == 0x03) {
11385 trickmode_i = 1;
11386 i_only_flag = 0x03;
11387 } else if (trickmode == 0x07) {
11388 trickmode_i = 1;
11389 i_only_flag = 0x07;
11390 }
11391 //hevc_print(hevc, 0, "i_only_flag: %d trickmode_i:%d\n", i_only_flag, trickmode_i);
11392
11393 return 0;
11394}
11395
11396static void config_decode_mode(struct hevc_state_s *hevc)
11397{
11398#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11399 struct vdec_s *vdec = hw_to_vdec(hevc);
11400#endif
11401 unsigned decode_mode;
11402#ifdef HEVC_8K_LFTOFFSET_FIX
11403 if (hevc->performance_profile)
11404 WRITE_VREG(NAL_SEARCH_CTL,
11405 READ_VREG(NAL_SEARCH_CTL) | (1 << 21));
11406#endif
11407 if (!hevc->m_ins_flag)
11408 decode_mode = DECODE_MODE_SINGLE;
11409 else if (vdec_frame_based(hw_to_vdec(hevc)))
11410 decode_mode =
11411 DECODE_MODE_MULTI_FRAMEBASE;
11412#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11413 else if (vdec->slave) {
11414 if (force_bypass_dvenl & 0x80000000)
11415 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
11416 else
11417 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
11418 if (dolby_meta_with_el && hevc->bypass_dvenl) {
11419 hevc->bypass_dvenl = 0;
11420 hevc_print(hevc, 0,
11421 "NOT support bypass_dvenl when meta_with_el\n");
11422 }
11423 if (hevc->bypass_dvenl)
11424 decode_mode =
11425 (hevc->start_parser_type << 8)
11426 | DECODE_MODE_MULTI_STREAMBASE;
11427 else
11428 decode_mode =
11429 (hevc->start_parser_type << 8)
11430 | DECODE_MODE_MULTI_DVBAL;
11431 } else if (vdec->master)
11432 decode_mode =
11433 (hevc->start_parser_type << 8)
11434 | DECODE_MODE_MULTI_DVENL;
11435#endif
11436 else
11437 decode_mode =
11438 DECODE_MODE_MULTI_STREAMBASE;
11439
11440 if (hevc->m_ins_flag)
11441 decode_mode |=
11442 (hevc->start_decoding_flag << 16);
11443 /* set MBX0 interrupt flag */
11444 decode_mode |= (0x80 << 24);
11445 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
11446 WRITE_VREG(HEVC_DECODE_MODE2,
11447 hevc->rps_set_id);
11448}
11449
11450static void vh265_prot_init(struct hevc_state_s *hevc)
11451{
11452#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11453 struct vdec_s *vdec = hw_to_vdec(hevc);
11454#endif
11455 /* H265_DECODE_INIT(); */
11456
11457 hevc_config_work_space_hw(hevc);
11458
11459 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
11460
11461 WRITE_VREG(HEVC_WAIT_FLAG, 1);
11462
11463 /* WRITE_VREG(P_HEVC_MPSR, 1); */
11464
11465 /* clear mailbox interrupt */
11466 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
11467
11468 /* enable mailbox interrupt */
11469 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
11470
11471 /* disable PSCALE for hardware sharing */
11472 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
11473
11474 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
11475
11476 if ((get_dbg_flag(hevc) &
11477 (H265_DEBUG_MAN_SKIP_NAL |
11478 H265_DEBUG_MAN_SEARCH_NAL))
11479 /*||hevc->m_ins_flag*/
11480 ) {
11481 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
11482 } else {
11483 /* check vps/sps/pps/i-slice in ucode */
11484 unsigned ctl_val = 0x8;
11485 if (hevc->PB_skip_mode == 0)
11486 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
11487 else if (hevc->PB_skip_mode == 3)
11488 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
11489 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
11490 }
11491 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
11492#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11493 || vdec->master
11494 || vdec->slave
11495#endif
11496 )
11497 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
11498
11499 WRITE_VREG(NAL_SEARCH_CTL,
11500 READ_VREG(NAL_SEARCH_CTL)
11501 | ((parser_sei_enable & 0x7) << 17));
11502/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
11503 WRITE_VREG(NAL_SEARCH_CTL,
11504 READ_VREG(NAL_SEARCH_CTL) |
11505 ((parser_dolby_vision_enable & 0x1) << 20));
11506/*#endif*/
11507 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
11508
11509 config_decode_mode(hevc);
11510 config_aux_buf(hevc);
11511#ifdef SWAP_HEVC_UCODE
11512 if (!tee_enabled() && hevc->is_swap &&
11513 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11514 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11515 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11516 }
11517#endif
11518#ifdef DETREFILL_ENABLE
11519 if (hevc->is_swap &&
11520 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11521 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11522 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11523 }
11524#endif
11525}
11526
11527static int vh265_local_init(struct hevc_state_s *hevc)
11528{
11529 int i;
11530 int ret = -1;
11531
11532#ifdef DEBUG_PTS
11533 hevc->pts_missed = 0;
11534 hevc->pts_hit = 0;
11535#endif
11536 hevc->pts_lookup_margin = 0;
11537 hevc->pts_continue_miss = 0;
11538 hevc->min_pic_size = 0;
11539
11540 hevc->saved_resolution = 0;
11541 hevc->get_frame_dur = false;
11542 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11543 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11544 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11545 pr_info("over size : %u x %u.\n",
11546 hevc->frame_width, hevc->frame_height);
11547 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11548 return ret;
11549 }
11550
11551 if (hevc->max_pic_w && hevc->max_pic_h) {
11552 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11553 ((hevc->max_pic_w * hevc->max_pic_h) >
11554 1920 * 1088) ? true : false;
11555 } else {
11556 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11557 ((hevc->frame_width * hevc->frame_height) >
11558 1920 * 1088) ? true : false;
11559 }
11560
11561 hevc->frame_dur =
11562 (hevc->vh265_amstream_dec_info.rate ==
11563 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11564 //hevc->gvs->frame_dur = hevc->frame_dur;
11565 if (hevc->frame_width && hevc->frame_height)
11566 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11567
11568 if (i_only_flag)
11569 hevc->i_only = i_only_flag & 0xff;
11570 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11571 & 0x08)
11572 hevc->i_only = 0x7;
11573 else
11574 hevc->i_only = 0x0;
11575 hevc->error_watchdog_count = 0;
11576 hevc->sei_present_flag = 0;
11577 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11578 & 0x40) >> 6;
11579 hevc_print(hevc, 0,
11580 "h265:pts_unstable=%d\n", pts_unstable);
11581/*
11582 *TODO:FOR VERSION
11583 */
11584 hevc_print(hevc, 0,
11585 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11586 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11587
11588 if (hevc->frame_dur == 0)
11589 hevc->frame_dur = 96000 / 24;
11590
11591 INIT_KFIFO(hevc->display_q);
11592 INIT_KFIFO(hevc->newframe_q);
11593 INIT_KFIFO(hevc->pending_q);
11594
11595 for (i = 0; i < VF_POOL_SIZE; i++) {
11596 const struct vframe_s *vf = &hevc->vfpool[i];
11597
11598 hevc->vfpool[i].index = -1;
11599 kfifo_put(&hevc->newframe_q, vf);
11600 }
11601
11602
11603 ret = hevc_local_init(hevc);
11604
11605 return ret;
11606}
11607#ifdef MULTI_INSTANCE_SUPPORT
11608static s32 vh265_init(struct vdec_s *vdec)
11609{
11610 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11611#else
11612static s32 vh265_init(struct hevc_state_s *hevc)
11613{
11614
11615#endif
11616 int ret, size = -1;
11617 int fw_size = 0x1000 * 16;
11618 struct firmware_s *fw = NULL;
11619
11620 init_timer(&hevc->timer);
11621
11622 hevc->stat |= STAT_TIMER_INIT;
11623
11624 if (hevc->m_ins_flag) {
11625#ifdef USE_UNINIT_SEMA
11626 sema_init(&hevc->h265_uninit_done_sema, 0);
11627#endif
11628 INIT_WORK(&hevc->work, vh265_work);
11629 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11630 }
11631
11632 if (vh265_local_init(hevc) < 0)
11633 return -EBUSY;
11634
11635 mutex_init(&hevc->chunks_mutex);
11636 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11637 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11638
11639 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11640 if (IS_ERR_OR_NULL(fw))
11641 return -ENOMEM;
11642
11643 if (hevc->mmu_enable)
11644 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11645 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11646 else {
11647 if (!hevc->is_4k) {
11648 /* if an older version of the fw was loaded, */
11649 /* needs try to load noswap fw because the */
11650 /* old fw package dose not contain the swap fw.*/
11651 size = get_firmware_data(
11652 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11653 if (size < 0)
11654 size = get_firmware_data(
11655 VIDEO_DEC_HEVC_MMU, fw->data);
11656 else if (size)
11657 hevc->is_swap = true;
11658 } else
11659 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11660 fw->data);
11661 }
11662 else
11663 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11664
11665 if (size < 0) {
11666 pr_err("get firmware fail.\n");
11667 vfree(fw);
11668 return -1;
11669 }
11670
11671 fw->len = size;
11672
11673#ifdef SWAP_HEVC_UCODE
11674 if (!tee_enabled() && hevc->is_swap &&
11675 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11676 if (hevc->mmu_enable) {
11677 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11678 hevc->mc_cpu_addr =
11679 dma_alloc_coherent(amports_get_dma_device(),
11680 hevc->swap_size,
11681 &hevc->mc_dma_handle, GFP_KERNEL);
11682 if (!hevc->mc_cpu_addr) {
11683 amhevc_disable();
11684 pr_info("vh265 mmu swap ucode loaded fail.\n");
11685 return -ENOMEM;
11686 }
11687
11688 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11689 hevc->swap_size);
11690
11691 hevc_print(hevc, 0,
11692 "vh265 mmu ucode swap loaded %x\n",
11693 hevc->mc_dma_handle);
11694 }
11695 }
11696#endif
11697
11698#ifdef MULTI_INSTANCE_SUPPORT
11699 if (hevc->m_ins_flag) {
11700 hevc->timer.data = (ulong) hevc;
11701 hevc->timer.function = vh265_check_timer_func;
11702 hevc->timer.expires = jiffies + PUT_INTERVAL;
11703
11704 hevc->fw = fw;
11705 hevc->init_flag = 1;
11706
11707 return 0;
11708 }
11709#endif
11710 amhevc_enable();
11711
11712 if (hevc->mmu_enable)
11713 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11714 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11715 else {
11716 if (!hevc->is_4k) {
11717 /* if an older version of the fw was loaded, */
11718 /* needs try to load noswap fw because the */
11719 /* old fw package dose not contain the swap fw. */
11720 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11721 "hevc_mmu_swap", fw->data);
11722 if (ret < 0)
11723 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11724 "h265_mmu", fw->data);
11725 else
11726 hevc->is_swap = true;
11727 } else
11728 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11729 "h265_mmu", fw->data);
11730 }
11731 else
11732 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11733
11734 if (ret < 0) {
11735 amhevc_disable();
11736 vfree(fw);
11737 pr_err("H265: the %s fw loading failed, err: %x\n",
11738 tee_enabled() ? "TEE" : "local", ret);
11739 return -EBUSY;
11740 }
11741
11742 vfree(fw);
11743
11744 hevc->stat |= STAT_MC_LOAD;
11745
11746#ifdef DETREFILL_ENABLE
11747 if (hevc->is_swap &&
11748 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11749 init_detrefill_buf(hevc);
11750#endif
11751 /* enable AMRISC side protocol */
11752 vh265_prot_init(hevc);
11753
11754 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11755 vh265_isr_thread_fn,
11756 IRQF_ONESHOT,/*run thread on this irq disabled*/
11757 "vh265-irq", (void *)hevc)) {
11758 hevc_print(hevc, 0, "vh265 irq register error.\n");
11759 amhevc_disable();
11760 return -ENOENT;
11761 }
11762
11763 hevc->stat |= STAT_ISR_REG;
11764 hevc->provider_name = PROVIDER_NAME;
11765
11766#ifdef MULTI_INSTANCE_SUPPORT
11767 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11768 &vh265_vf_provider, vdec);
11769 vf_reg_provider(&vh265_vf_prov);
11770 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11771 NULL);
11772 if (hevc->frame_dur != 0) {
11773 if (!is_reset) {
11774 vf_notify_receiver(hevc->provider_name,
11775 VFRAME_EVENT_PROVIDER_FR_HINT,
11776 (void *)
11777 ((unsigned long)hevc->frame_dur));
11778 fr_hint_status = VDEC_HINTED;
11779 }
11780 } else
11781 fr_hint_status = VDEC_NEED_HINT;
11782#else
11783 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11784 hevc);
11785 vf_reg_provider(&vh265_vf_prov);
11786 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11787 if (hevc->frame_dur != 0) {
11788 vf_notify_receiver(PROVIDER_NAME,
11789 VFRAME_EVENT_PROVIDER_FR_HINT,
11790 (void *)
11791 ((unsigned long)hevc->frame_dur));
11792 fr_hint_status = VDEC_HINTED;
11793 } else
11794 fr_hint_status = VDEC_NEED_HINT;
11795#endif
11796 hevc->stat |= STAT_VF_HOOK;
11797
11798 hevc->timer.data = (ulong) hevc;
11799 hevc->timer.function = vh265_check_timer_func;
11800 hevc->timer.expires = jiffies + PUT_INTERVAL;
11801
11802 add_timer(&hevc->timer);
11803
11804 hevc->stat |= STAT_TIMER_ARM;
11805
11806 if (use_cma) {
11807#ifdef USE_UNINIT_SEMA
11808 sema_init(&hevc->h265_uninit_done_sema, 0);
11809#endif
11810 if (h265_task == NULL) {
11811 sema_init(&h265_sema, 1);
11812 h265_task =
11813 kthread_run(h265_task_handle, hevc,
11814 "kthread_h265");
11815 }
11816 }
11817 /* hevc->stat |= STAT_KTHREAD; */
11818#if 0
11819 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11820 hevc_print(hevc, 0, "%s force clk\n", __func__);
11821 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11822 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11823 ((1 << 2) | (1 << 1)));
11824 WRITE_VREG(HEVC_DBLK_CFG0,
11825 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11826 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11827 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11828 (1 << 2)); /* 2 */
11829 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11830 (1 << 24)); /* 24 */
11831 WRITE_VREG(HEVC_STREAM_CONTROL,
11832 READ_VREG(HEVC_STREAM_CONTROL) |
11833 (1 << 15)); /* 15 */
11834 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11835 (1 << 13)); /* 13 */
11836 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11837 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11838 (1 << 15)); /* 15 */
11839 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11840 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11841 (1 << 15)); /* 15 */
11842 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11843 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11844 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11845 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11846 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11847 (1 << 3)); /* 3 */
11848 }
11849#endif
11850#ifdef SWAP_HEVC_UCODE
11851 if (!tee_enabled() && hevc->is_swap &&
11852 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11853 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11854 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11855 }
11856#endif
11857
11858#ifndef MULTI_INSTANCE_SUPPORT
11859 set_vdec_func(&vh265_dec_status);
11860#endif
11861 amhevc_start();
11862 hevc->stat |= STAT_VDEC_RUN;
11863 hevc->init_flag = 1;
11864 error_handle_threshold = 30;
11865 /* pr_info("%d, vh265_init, RP=0x%x\n",
11866 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11867 */
11868
11869 return 0;
11870}
11871
11872static int vh265_stop(struct hevc_state_s *hevc)
11873{
11874 if (get_dbg_flag(hevc) &
11875 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11876 int wait_timeout_count = 0;
11877
11878 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11879 HEVC_CODED_SLICE_SEGMENT_DAT &&
11880 wait_timeout_count < 10){
11881 wait_timeout_count++;
11882 msleep(20);
11883 }
11884 }
11885 if (hevc->stat & STAT_VDEC_RUN) {
11886 amhevc_stop();
11887 hevc->stat &= ~STAT_VDEC_RUN;
11888 }
11889
11890 if (hevc->stat & STAT_ISR_REG) {
11891#ifdef MULTI_INSTANCE_SUPPORT
11892 if (!hevc->m_ins_flag)
11893#endif
11894 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11895 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11896 hevc->stat &= ~STAT_ISR_REG;
11897 }
11898
11899 hevc->stat &= ~STAT_TIMER_INIT;
11900 if (hevc->stat & STAT_TIMER_ARM) {
11901 del_timer_sync(&hevc->timer);
11902 hevc->stat &= ~STAT_TIMER_ARM;
11903 }
11904
11905 if (hevc->stat & STAT_VF_HOOK) {
11906 if (fr_hint_status == VDEC_HINTED) {
11907 vf_notify_receiver(hevc->provider_name,
11908 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11909 NULL);
11910 }
11911 fr_hint_status = VDEC_NO_NEED_HINT;
11912 vf_unreg_provider(&vh265_vf_prov);
11913 hevc->stat &= ~STAT_VF_HOOK;
11914 }
11915
11916 hevc_local_uninit(hevc);
11917
11918 if (use_cma) {
11919 hevc->uninit_list = 1;
11920 up(&h265_sema);
11921#ifdef USE_UNINIT_SEMA
11922 down(&hevc->h265_uninit_done_sema);
11923 if (!IS_ERR(h265_task)) {
11924 kthread_stop(h265_task);
11925 h265_task = NULL;
11926 }
11927#else
11928 while (hevc->uninit_list) /* wait uninit complete */
11929 msleep(20);
11930#endif
11931
11932 }
11933 hevc->init_flag = 0;
11934 hevc->first_sc_checked = 0;
11935 cancel_work_sync(&hevc->notify_work);
11936 cancel_work_sync(&hevc->set_clk_work);
11937 uninit_mmu_buffers(hevc);
11938 amhevc_disable();
11939
11940 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11941 if (hevc->gvs)
11942 kfree(hevc->gvs);
11943 hevc->gvs = NULL;
11944
11945 return 0;
11946}
11947
11948#ifdef MULTI_INSTANCE_SUPPORT
11949static void reset_process_time(struct hevc_state_s *hevc)
11950{
11951 if (hevc->start_process_time) {
11952 unsigned int process_time =
11953 1000 * (jiffies - hevc->start_process_time) / HZ;
11954 hevc->start_process_time = 0;
11955 if (process_time > max_process_time[hevc->index])
11956 max_process_time[hevc->index] = process_time;
11957 }
11958}
11959
11960static void start_process_time(struct hevc_state_s *hevc)
11961{
11962 hevc->start_process_time = jiffies;
11963 hevc->decode_timeout_count = 2;
11964 hevc->last_lcu_idx = 0;
11965}
11966
11967static void restart_process_time(struct hevc_state_s *hevc)
11968{
11969 hevc->start_process_time = jiffies;
11970 hevc->decode_timeout_count = 2;
11971}
11972
11973static void timeout_process(struct hevc_state_s *hevc)
11974{
11975 /*
11976 * In this very timeout point,the vh265_work arrives,
11977 * let it to handle the scenario.
11978 */
11979 if (work_pending(&hevc->work) ||
11980 work_busy(&hevc->work) ||
11981 work_pending(&hevc->timeout_work) ||
11982 work_busy(&hevc->timeout_work))
11983 return;
11984
11985 hevc->timeout_num++;
11986 amhevc_stop();
11987 read_decode_info(hevc);
11988
11989 hevc_print(hevc,
11990 0, "%s decoder timeout\n", __func__);
11991 check_pic_decoded_error(hevc,
11992 hevc->pic_decoded_lcu_idx);
11993 /*The current decoded frame is marked
11994 error when the decode timeout*/
11995 if (hevc->cur_pic != NULL)
11996 hevc->cur_pic->error_mark = 1;
11997 hevc->decoded_poc = hevc->curr_POC;
11998 hevc->decoding_pic = NULL;
11999 hevc->dec_result = DEC_RESULT_DONE;
12000 reset_process_time(hevc);
12001
12002 if (work_pending(&hevc->work))
12003 return;
12004 vdec_schedule_work(&hevc->timeout_work);
12005}
12006
12007#ifdef CONSTRAIN_MAX_BUF_NUM
12008static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
12009{
12010 struct PIC_s *pic;
12011 int i;
12012 int count = 0;
12013 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12014 pic = hevc->m_PIC[i];
12015 if (pic == NULL || pic->index == -1)
12016 continue;
12017 if (pic->output_mark == 0 && pic->referenced == 0
12018 && pic->output_ready == 1)
12019 count++;
12020 }
12021
12022 return count;
12023}
12024
12025static int get_used_buf_count(struct hevc_state_s *hevc)
12026{
12027 struct PIC_s *pic;
12028 int i;
12029 int count = 0;
12030 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12031 pic = hevc->m_PIC[i];
12032 if (pic == NULL || pic->index == -1)
12033 continue;
12034 if (pic->output_mark != 0 || pic->referenced != 0
12035 || pic->output_ready != 0)
12036 count++;
12037 }
12038
12039 return count;
12040}
12041#endif
12042
12043
12044static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
12045{
12046 struct PIC_s *new_pic = NULL;
12047 struct PIC_s *pic;
12048 /* recycle un-used pic */
12049 int i;
12050 int ref_pic = 0;
12051 struct vdec_s *vdec = hw_to_vdec(hevc);
12052 /*return 1 if pic_list is not initialized yet*/
12053 if (hevc->pic_list_init_flag != 3)
12054 return 1;
12055
12056 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12057 pic = hevc->m_PIC[i];
12058 if (pic == NULL || pic->index == -1)
12059 continue;
12060 if (pic->referenced == 1)
12061 ref_pic++;
12062 if (pic->output_mark == 0 && pic->referenced == 0
12063 && pic->output_ready == 0
12064 && pic->vf_ref == 0
12065 ) {
12066 if (new_pic) {
12067 if (pic->POC < new_pic->POC)
12068 new_pic = pic;
12069 } else
12070 new_pic = pic;
12071 }
12072 }
12073 if (new_pic == NULL) {
12074 enum receviver_start_e state = RECEIVER_INACTIVE;
12075 if (vf_get_receiver(vdec->vf_provider_name)) {
12076 state =
12077 vf_notify_receiver(vdec->vf_provider_name,
12078 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12079 NULL);
12080 if ((state == RECEIVER_STATE_NULL)
12081 || (state == RECEIVER_STATE_NONE))
12082 state = RECEIVER_INACTIVE;
12083 }
12084 if (state == RECEIVER_INACTIVE) {
12085 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12086 int poc = INVALID_POC;
12087 pic = hevc->m_PIC[i];
12088 if (pic == NULL || pic->index == -1)
12089 continue;
12090 if ((pic->referenced == 0) &&
12091 (pic->error_mark == 1) &&
12092 (pic->output_mark == 1)) {
12093 if (poc == INVALID_POC || (pic->POC < poc)) {
12094 new_pic = pic;
12095 poc = pic->POC;
12096 }
12097 }
12098 }
12099 if (new_pic) {
12100 new_pic->referenced = 0;
12101 new_pic->output_mark = 0;
12102 put_mv_buf(hevc, new_pic);
12103 hevc_print(hevc, 0, "force release error pic %d recieve_state %d \n", new_pic->POC, state);
12104 } else {
12105 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12106 pic = hevc->m_PIC[i];
12107 if (pic == NULL || pic->index == -1)
12108 continue;
12109 if ((pic->referenced == 1) && (pic->error_mark == 1)) {
12110 flush_output(hevc, pic);
12111 hevc_print(hevc, 0, "DPB error, neeed fornce flush recieve_state %d \n", state);
12112 break;
12113 }
12114 }
12115 }
12116 }
12117 }
12118 return (new_pic != NULL) ? 1 : 0;
12119}
12120
12121static int vmh265_stop(struct hevc_state_s *hevc)
12122{
12123 if (hevc->stat & STAT_TIMER_ARM) {
12124 del_timer_sync(&hevc->timer);
12125 hevc->stat &= ~STAT_TIMER_ARM;
12126 }
12127 if (hevc->stat & STAT_VDEC_RUN) {
12128 amhevc_stop();
12129 hevc->stat &= ~STAT_VDEC_RUN;
12130 }
12131 if (hevc->stat & STAT_ISR_REG) {
12132 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12133 hevc->stat &= ~STAT_ISR_REG;
12134 }
12135
12136 if (hevc->stat & STAT_VF_HOOK) {
12137 if (fr_hint_status == VDEC_HINTED)
12138 vf_notify_receiver(hevc->provider_name,
12139 VFRAME_EVENT_PROVIDER_FR_END_HINT,
12140 NULL);
12141 fr_hint_status = VDEC_NO_NEED_HINT;
12142 vf_unreg_provider(&vh265_vf_prov);
12143 hevc->stat &= ~STAT_VF_HOOK;
12144 }
12145
12146 hevc_local_uninit(hevc);
12147
12148 if (hevc->gvs)
12149 kfree(hevc->gvs);
12150 hevc->gvs = NULL;
12151
12152 if (use_cma) {
12153 hevc->uninit_list = 1;
12154 reset_process_time(hevc);
12155 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
12156 vdec_schedule_work(&hevc->work);
12157 flush_work(&hevc->work);
12158#ifdef USE_UNINIT_SEMA
12159 if (hevc->init_flag) {
12160 down(&hevc->h265_uninit_done_sema);
12161 }
12162#else
12163 while (hevc->uninit_list) /* wait uninit complete */
12164 msleep(20);
12165#endif
12166 }
12167 hevc->init_flag = 0;
12168 hevc->first_sc_checked = 0;
12169 cancel_work_sync(&hevc->notify_work);
12170 cancel_work_sync(&hevc->set_clk_work);
12171 cancel_work_sync(&hevc->timeout_work);
12172 cancel_work_sync(&hevc->work);
12173 uninit_mmu_buffers(hevc);
12174
12175 vfree(hevc->fw);
12176 hevc->fw = NULL;
12177
12178 dump_log(hevc);
12179 return 0;
12180}
12181
12182static unsigned char get_data_check_sum
12183 (struct hevc_state_s *hevc, int size)
12184{
12185 int jj;
12186 int sum = 0;
12187 u8 *data = NULL;
12188
12189 if (!hevc->chunk->block->is_mapped)
12190 data = codec_mm_vmap(hevc->chunk->block->start +
12191 hevc->chunk->offset, size);
12192 else
12193 data = ((u8 *)hevc->chunk->block->start_virt) +
12194 hevc->chunk->offset;
12195
12196 for (jj = 0; jj < size; jj++)
12197 sum += data[jj];
12198
12199 if (!hevc->chunk->block->is_mapped)
12200 codec_mm_unmap_phyaddr(data);
12201 return sum;
12202}
12203
12204static void vh265_notify_work(struct work_struct *work)
12205{
12206 struct hevc_state_s *hevc =
12207 container_of(work,
12208 struct hevc_state_s,
12209 notify_work);
12210 struct vdec_s *vdec = hw_to_vdec(hevc);
12211#ifdef MULTI_INSTANCE_SUPPORT
12212 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
12213 vf_notify_receiver(hevc->provider_name,
12214 VFRAME_EVENT_PROVIDER_FR_HINT,
12215 (void *)
12216 ((unsigned long)hevc->frame_dur));
12217 vdec->fr_hint_state = VDEC_HINTED;
12218 } else if (fr_hint_status == VDEC_NEED_HINT) {
12219 vf_notify_receiver(hevc->provider_name,
12220 VFRAME_EVENT_PROVIDER_FR_HINT,
12221 (void *)
12222 ((unsigned long)hevc->frame_dur));
12223 fr_hint_status = VDEC_HINTED;
12224 }
12225#else
12226 if (fr_hint_status == VDEC_NEED_HINT)
12227 vf_notify_receiver(PROVIDER_NAME,
12228 VFRAME_EVENT_PROVIDER_FR_HINT,
12229 (void *)
12230 ((unsigned long)hevc->frame_dur));
12231 fr_hint_status = VDEC_HINTED;
12232 }
12233#endif
12234
12235 return;
12236}
12237
12238static void vh265_work_implement(struct hevc_state_s *hevc,
12239 struct vdec_s *vdec,int from)
12240{
12241 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
12242 /*USE_BUF_BLOCK*/
12243 uninit_pic_list(hevc);
12244 hevc->uninit_list = 0;
12245#ifdef USE_UNINIT_SEMA
12246 up(&hevc->h265_uninit_done_sema);
12247#endif
12248 return;
12249 }
12250
12251 /* finished decoding one frame or error,
12252 * notify vdec core to switch context
12253 */
12254 if (hevc->pic_list_init_flag == 1
12255 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
12256 hevc->pic_list_init_flag = 2;
12257 init_pic_list(hevc);
12258 init_pic_list_hw(hevc);
12259 init_buf_spec(hevc);
12260 hevc_print(hevc, 0,
12261 "set pic_list_init_flag to 2\n");
12262
12263 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
12264 return;
12265 }
12266
12267 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12268 "%s dec_result %d %x %x %x\n",
12269 __func__,
12270 hevc->dec_result,
12271 READ_VREG(HEVC_STREAM_LEVEL),
12272 READ_VREG(HEVC_STREAM_WR_PTR),
12273 READ_VREG(HEVC_STREAM_RD_PTR));
12274
12275 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
12276 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
12277 && (hw_to_vdec(hevc)->next_status !=
12278 VDEC_STATUS_DISCONNECTED)) {
12279 if (!vdec_has_more_input(vdec)) {
12280 hevc->dec_result = DEC_RESULT_EOS;
12281 vdec_schedule_work(&hevc->work);
12282 return;
12283 }
12284 if (!input_frame_based(vdec)) {
12285 int r = vdec_sync_input(vdec);
12286 if (r >= 0x200) {
12287 WRITE_VREG(HEVC_DECODE_SIZE,
12288 READ_VREG(HEVC_DECODE_SIZE) + r);
12289
12290 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12291 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
12292 __func__,
12293 READ_VREG(HEVC_STREAM_LEVEL),
12294 READ_VREG(HEVC_STREAM_WR_PTR),
12295 READ_VREG(HEVC_STREAM_RD_PTR),
12296 READ_VREG(HEVC_MPC_E), r);
12297
12298 start_process_time(hevc);
12299 if (READ_VREG(HEVC_DEC_STATUS_REG)
12300 == HEVC_DECODE_BUFEMPTY2)
12301 WRITE_VREG(HEVC_DEC_STATUS_REG,
12302 HEVC_ACTION_DONE);
12303 else
12304 WRITE_VREG(HEVC_DEC_STATUS_REG,
12305 HEVC_ACTION_DEC_CONT);
12306 } else {
12307 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12308 vdec_schedule_work(&hevc->work);
12309 }
12310 return;
12311 }
12312
12313 /*below for frame_base*/
12314 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
12315 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12316 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
12317 __func__,
12318 READ_VREG(HEVC_STREAM_LEVEL),
12319 READ_VREG(HEVC_STREAM_WR_PTR),
12320 READ_VREG(HEVC_STREAM_RD_PTR),
12321 READ_VREG(HEVC_MPC_E));
12322 mutex_lock(&hevc->chunks_mutex);
12323 vdec_vframe_dirty(vdec, hevc->chunk);
12324 hevc->chunk = NULL;
12325 mutex_unlock(&hevc->chunks_mutex);
12326 vdec_clean_input(vdec);
12327 }
12328
12329 /*if (is_new_pic_available(hevc)) {*/
12330 if (run_ready(vdec, VDEC_HEVC)) {
12331 int r;
12332 int decode_size;
12333 r = vdec_prepare_input(vdec, &hevc->chunk);
12334 if (r < 0) {
12335 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12336
12337 hevc_print(hevc,
12338 PRINT_FLAG_VDEC_DETAIL,
12339 "amvdec_vh265: Insufficient data\n");
12340
12341 vdec_schedule_work(&hevc->work);
12342 return;
12343 }
12344 hevc->dec_result = DEC_RESULT_NONE;
12345 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12346 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
12347 __func__, r,
12348 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
12349 get_data_check_sum(hevc, r) : 0,
12350 READ_VREG(HEVC_MPC_E));
12351
12352 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
12353 int jj;
12354 u8 *data = NULL;
12355
12356 if (!hevc->chunk->block->is_mapped)
12357 data = codec_mm_vmap(
12358 hevc->chunk->block->start +
12359 hevc->chunk->offset, r);
12360 else
12361 data = ((u8 *)
12362 hevc->chunk->block->start_virt)
12363 + hevc->chunk->offset;
12364
12365 for (jj = 0; jj < r; jj++) {
12366 if ((jj & 0xf) == 0)
12367 hevc_print(hevc,
12368 PRINT_FRAMEBASE_DATA,
12369 "%06x:", jj);
12370 hevc_print_cont(hevc,
12371 PRINT_FRAMEBASE_DATA,
12372 "%02x ", data[jj]);
12373 if (((jj + 1) & 0xf) == 0)
12374 hevc_print_cont(hevc,
12375 PRINT_FRAMEBASE_DATA,
12376 "\n");
12377 }
12378
12379 if (!hevc->chunk->block->is_mapped)
12380 codec_mm_unmap_phyaddr(data);
12381 }
12382
12383 decode_size = hevc->chunk->size +
12384 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12385 WRITE_VREG(HEVC_DECODE_SIZE,
12386 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
12387
12388 vdec_enable_input(vdec);
12389
12390 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12391 "%s: mpc %x\n",
12392 __func__, READ_VREG(HEVC_MPC_E));
12393
12394 start_process_time(hevc);
12395 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12396 } else{
12397 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12398
12399 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12400 * "amvdec_vh265: Insufficient data\n");
12401 */
12402
12403 vdec_schedule_work(&hevc->work);
12404 }
12405 return;
12406 } else if (hevc->dec_result == DEC_RESULT_DONE) {
12407 /* if (!hevc->ctx_valid)
12408 hevc->ctx_valid = 1; */
12409 int i;
12410 decode_frame_count[hevc->index]++;
12411#ifdef DETREFILL_ENABLE
12412 if (hevc->is_swap &&
12413 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
12414 if (hevc->delrefill_check == 2) {
12415 delrefill(hevc);
12416 amhevc_stop();
12417 }
12418 }
12419#endif
12420 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
12421 hevc->used_4k_num =
12422 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
12423 if (hevc->used_4k_num >= 0 &&
12424 hevc->cur_pic &&
12425 hevc->cur_pic->scatter_alloc
12426 == 1) {
12427 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
12428 "%s pic index %d scatter_alloc %d page_start %d\n",
12429 "decoder_mmu_box_free_idx_tail",
12430 hevc->cur_pic->index,
12431 hevc->cur_pic->scatter_alloc,
12432 hevc->used_4k_num);
12433 if (hevc->m_ins_flag)
12434 hevc_mmu_dma_check(hw_to_vdec(hevc));
12435 decoder_mmu_box_free_idx_tail(
12436 hevc->mmu_box,
12437 hevc->cur_pic->index,
12438 hevc->used_4k_num);
12439 hevc->cur_pic->scatter_alloc = 2;
12440 }
12441 }
12442 hevc->pic_decoded_lcu_idx =
12443 READ_VREG(HEVC_PARSER_LCU_START)
12444 & 0xffffff;
12445
12446 if ((!vdec_dual(vdec)) &&
12447 hevc->empty_flag == 0) {
12448 hevc->over_decode =
12449 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
12450 if (hevc->over_decode)
12451 hevc_print(hevc, 0,
12452 "!!!Over decode\n");
12453 }
12454
12455 if (is_log_enable(hevc))
12456 add_log(hevc,
12457 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
12458 __func__,
12459 hevc->dec_result,
12460 hevc->pic_decoded_lcu_idx,
12461 hevc->used_4k_num,
12462 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12463 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12464 hevc->start_shift_bytes
12465 );
12466
12467 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12468 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
12469 __func__,
12470 hevc->dec_result,
12471 READ_VREG(HEVC_STREAM_LEVEL),
12472 READ_VREG(HEVC_STREAM_WR_PTR),
12473 READ_VREG(HEVC_STREAM_RD_PTR),
12474 hevc->pic_decoded_lcu_idx,
12475 hevc->used_4k_num,
12476 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12477 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12478 hevc->start_shift_bytes
12479 );
12480
12481 hevc->used_4k_num = -1;
12482
12483 check_pic_decoded_error(hevc,
12484 hevc->pic_decoded_lcu_idx);
12485 if ((error_handle_policy & 0x100) == 0 && hevc->cur_pic) {
12486 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12487 struct PIC_s *pic;
12488 pic = hevc->m_PIC[i];
12489 if (!pic || pic->index == -1)
12490 continue;
12491 if ((hevc->cur_pic->POC + poc_num_margin < pic->POC) && (pic->referenced == 0) &&
12492 (pic->output_mark == 1) && (pic->output_ready == 0)) {
12493 hevc->poc_error_count++;
12494 break;
12495 }
12496 }
12497 if (i == MAX_REF_PIC_NUM)
12498 hevc->poc_error_count = 0;
12499 if (hevc->poc_error_count >= poc_error_limit) {
12500 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12501 struct PIC_s *pic;
12502 pic = hevc->m_PIC[i];
12503 if (!pic || pic->index == -1)
12504 continue;
12505 if ((hevc->cur_pic->POC + poc_num_margin < pic->POC) && (pic->referenced == 0) &&
12506 (pic->output_mark == 1) && (pic->output_ready == 0)) {
12507 pic->output_mark = 0;
12508 hevc_print(hevc, 0, "DPB poc error, remove error frame\n");
12509 }
12510 }
12511 }
12512 }
12513
12514#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12515#if 1
12516 if (vdec->slave) {
12517 if (dv_debug & 0x1)
12518 vdec_set_flag(vdec->slave,
12519 VDEC_FLAG_SELF_INPUT_CONTEXT);
12520 else
12521 vdec_set_flag(vdec->slave,
12522 VDEC_FLAG_OTHER_INPUT_CONTEXT);
12523 }
12524#else
12525 if (vdec->slave) {
12526 if (no_interleaved_el_slice)
12527 vdec_set_flag(vdec->slave,
12528 VDEC_FLAG_INPUT_KEEP_CONTEXT);
12529 /* this will move real HW pointer for input */
12530 else
12531 vdec_set_flag(vdec->slave, 0);
12532 /* this will not move real HW pointer
12533 *and SL layer decoding
12534 *will start from same stream position
12535 *as current BL decoder
12536 */
12537 }
12538#endif
12539#endif
12540#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12541 hevc->shift_byte_count_lo
12542 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12543 if (vdec->slave) {
12544 /*cur is base, found enhance*/
12545 struct hevc_state_s *hevc_el =
12546 (struct hevc_state_s *)
12547 vdec->slave->private;
12548 if (hevc_el)
12549 hevc_el->shift_byte_count_lo =
12550 hevc->shift_byte_count_lo;
12551 } else if (vdec->master) {
12552 /*cur is enhance, found base*/
12553 struct hevc_state_s *hevc_ba =
12554 (struct hevc_state_s *)
12555 vdec->master->private;
12556 if (hevc_ba)
12557 hevc_ba->shift_byte_count_lo =
12558 hevc->shift_byte_count_lo;
12559 }
12560#endif
12561 mutex_lock(&hevc->chunks_mutex);
12562 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12563 hevc->chunk = NULL;
12564 mutex_unlock(&hevc->chunks_mutex);
12565 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12566 /*
12567 stream base: stream buf empty or timeout
12568 frame base: vdec_prepare_input fail
12569 */
12570
12571 if (!vdec_has_more_input(vdec)) {
12572 hevc->dec_result = DEC_RESULT_EOS;
12573 vdec_schedule_work(&hevc->work);
12574 return;
12575 }
12576#ifdef AGAIN_HAS_THRESHOLD
12577 hevc->next_again_flag = 1;
12578#endif
12579 if (input_stream_based(vdec)) {
12580 u32 rp, wp, level;
12581 struct vdec_input_s *input = &vdec->input;
12582 rp = STBUF_READ(&vdec->vbuf, get_rp);;
12583 wp = STBUF_READ(&vdec->vbuf, get_wp);
12584 if (wp < rp)
12585 level = input->size + wp - rp;
12586 else
12587 level = wp - rp;
12588 if ((level >= dirty_buffersize_threshold) &&
12589 (hevc->pre_parser_video_rp ==
12590 STBUF_READ(&vdec->vbuf, get_rp)) &&
12591 (hevc->pre_parser_video_wp ==
12592 STBUF_READ(&vdec->vbuf, get_wp))) {
12593 if (hevc->again_count == 0) {
12594 hevc->again_timeout_jiffies =
12595 get_jiffies_64() + dirty_time_threshold * HZ/1000;
12596 }
12597 hevc->again_count++;
12598 }
12599 else
12600 hevc->again_count = 0;
12601
12602 hevc->pre_parser_video_rp = STBUF_READ(&vdec->vbuf, get_rp);;
12603 hevc->pre_parser_video_wp = STBUF_READ(&vdec->vbuf, get_wp);
12604
12605 if ((hevc->again_count > dirty_count_threshold) &&
12606 time_after64(get_jiffies_64(), hevc->again_timeout_jiffies)) {
12607 mutex_lock(&hevc->chunks_mutex);
12608 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12609 hevc->chunk = NULL;
12610 mutex_unlock(&hevc->chunks_mutex);
12611 }
12612 }
12613 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12614 struct PIC_s *pic;
12615 hevc->eos = 1;
12616#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12617 if ((vdec_dual(vdec)) && aux_data_is_avaible(hevc))
12618 dolby_get_meta(hevc);
12619#endif
12620 check_pic_decoded_error(hevc,
12621 hevc->pic_decoded_lcu_idx);
12622 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12623 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12624 "%s: end of stream, last dec poc %d => 0x%pf\n",
12625 __func__, hevc->curr_POC, pic);
12626 flush_output(hevc, pic);
12627 /* dummy vf with eos flag to backend */
12628 notify_v4l_eos(hw_to_vdec(hevc));
12629#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12630 hevc->shift_byte_count_lo
12631 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12632 if (vdec->slave) {
12633 /*cur is base, found enhance*/
12634 struct hevc_state_s *hevc_el =
12635 (struct hevc_state_s *)
12636 vdec->slave->private;
12637 if (hevc_el)
12638 hevc_el->shift_byte_count_lo =
12639 hevc->shift_byte_count_lo;
12640 } else if (vdec->master) {
12641 /*cur is enhance, found base*/
12642 struct hevc_state_s *hevc_ba =
12643 (struct hevc_state_s *)
12644 vdec->master->private;
12645 if (hevc_ba)
12646 hevc_ba->shift_byte_count_lo =
12647 hevc->shift_byte_count_lo;
12648 }
12649#endif
12650 mutex_lock(&hevc->chunks_mutex);
12651 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12652 hevc->chunk = NULL;
12653 mutex_unlock(&hevc->chunks_mutex);
12654 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12655 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12656 "%s: force exit\n",
12657 __func__);
12658 if (hevc->stat & STAT_VDEC_RUN) {
12659 amhevc_stop();
12660 hevc->stat &= ~STAT_VDEC_RUN;
12661 }
12662 if (hevc->stat & STAT_ISR_REG) {
12663 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12664 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12665 hevc->stat &= ~STAT_ISR_REG;
12666 }
12667 hevc_print(hevc, 0, "%s: force exit end\n",
12668 __func__);
12669 }
12670
12671 if (hevc->stat & STAT_VDEC_RUN) {
12672 amhevc_stop();
12673 hevc->stat &= ~STAT_VDEC_RUN;
12674 }
12675
12676 if (hevc->stat & STAT_TIMER_ARM) {
12677 del_timer_sync(&hevc->timer);
12678 hevc->stat &= ~STAT_TIMER_ARM;
12679 }
12680
12681 wait_hevc_search_done(hevc);
12682#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12683 if (hevc->switch_dvlayer_flag) {
12684 if (vdec->slave)
12685 vdec_set_next_sched(vdec, vdec->slave);
12686 else if (vdec->master)
12687 vdec_set_next_sched(vdec, vdec->master);
12688 } else if (vdec->slave || vdec->master)
12689 vdec_set_next_sched(vdec, vdec);
12690#endif
12691
12692 if (from == 1) {
12693 /* This is a timeout work */
12694 if (work_pending(&hevc->work)) {
12695 /*
12696 * The vh265_work arrives at the last second,
12697 * give it a chance to handle the scenario.
12698 */
12699 return;
12700 //cancel_work_sync(&hevc->work);//reserved for future considraion
12701 }
12702 }
12703
12704 /* mark itself has all HW resource released and input released */
12705 if (vdec->parallel_dec == 1)
12706 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12707 else
12708 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12709
12710 if (hevc->is_used_v4l) {
12711 struct aml_vcodec_ctx *ctx =
12712 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12713
12714 if (ctx->param_sets_from_ucode &&
12715 !hevc->v4l_params_parsed)
12716 vdec_v4l_write_frame_sync(ctx);
12717 }
12718
12719 if (hevc->vdec_cb)
12720 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12721}
12722
12723static void vh265_work(struct work_struct *work)
12724{
12725 struct hevc_state_s *hevc = container_of(work,
12726 struct hevc_state_s, work);
12727 struct vdec_s *vdec = hw_to_vdec(hevc);
12728
12729 vh265_work_implement(hevc, vdec, 0);
12730}
12731
12732static void vh265_timeout_work(struct work_struct *work)
12733{
12734 struct hevc_state_s *hevc = container_of(work,
12735 struct hevc_state_s, timeout_work);
12736 struct vdec_s *vdec = hw_to_vdec(hevc);
12737
12738 if (work_pending(&hevc->work))
12739 return;
12740 vh265_work_implement(hevc, vdec, 1);
12741}
12742
12743
12744static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12745{
12746 /* new to do ... */
12747 vh265_prot_init(hevc);
12748 return 0;
12749}
12750static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12751{
12752 struct hevc_state_s *hevc =
12753 (struct hevc_state_s *)vdec->private;
12754 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12755 CODEC_MM_FLAGS_TVP : 0;
12756 bool ret = 0;
12757 if (step == 0x12)
12758 return 0;
12759 else if (step == 0x11)
12760 step = 0x12;
12761
12762 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
12763 return 0;
12764
12765 if (hevc->eos)
12766 return 0;
12767 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12768 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12769 hevc->first_sc_checked =1;
12770 hevc_print(hevc, 0,
12771 "vh265 cached=%d need_size=%d speed= %d ms\n",
12772 size, (hevc->need_cache_size >> PAGE_SHIFT),
12773 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12774 }
12775 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12776 && pre_decode_buf_level != 0) {
12777 u32 rp, wp, level;
12778
12779 rp = STBUF_READ(&vdec->vbuf, get_rp);
12780 wp = STBUF_READ(&vdec->vbuf, get_wp);
12781 if (wp < rp)
12782 level = vdec->input.size + wp - rp;
12783 else
12784 level = wp - rp;
12785
12786 if (level < pre_decode_buf_level)
12787 return 0;
12788 }
12789
12790#ifdef AGAIN_HAS_THRESHOLD
12791 if (hevc->next_again_flag &&
12792 (!vdec_frame_based(vdec))) {
12793 u32 parser_wr_ptr =
12794 STBUF_READ(&vdec->vbuf, get_wp);
12795 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12796 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12797 again_threshold) {
12798 int r = vdec_sync_input(vdec);
12799 hevc_print(hevc,
12800 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12801 return 0;
12802 }
12803 }
12804#endif
12805
12806 if (disp_vframe_valve_level &&
12807 kfifo_len(&hevc->display_q) >=
12808 disp_vframe_valve_level) {
12809 hevc->valve_count--;
12810 if (hevc->valve_count <= 0)
12811 hevc->valve_count = 2;
12812 else
12813 return 0;
12814 }
12815
12816 ret = is_new_pic_available(hevc);
12817 if (!ret) {
12818 hevc_print(hevc,
12819 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12820 __func__, ret);
12821 }
12822
12823#ifdef CONSTRAIN_MAX_BUF_NUM
12824 if (hevc->pic_list_init_flag == 3) {
12825 if (run_ready_max_vf_only_num > 0 &&
12826 get_vf_ref_only_buf_count(hevc) >=
12827 run_ready_max_vf_only_num
12828 )
12829 ret = 0;
12830 if (run_ready_display_q_num > 0 &&
12831 kfifo_len(&hevc->display_q) >=
12832 run_ready_display_q_num)
12833 ret = 0;
12834
12835 /*avoid more buffers consumed when
12836 switching resolution*/
12837 if (run_ready_max_buf_num == 0xff &&
12838 get_used_buf_count(hevc) >=
12839 get_work_pic_num(hevc))
12840 ret = 0;
12841 else if (run_ready_max_buf_num &&
12842 get_used_buf_count(hevc) >=
12843 run_ready_max_buf_num)
12844 ret = 0;
12845 }
12846#endif
12847
12848 if (hevc->is_used_v4l) {
12849 struct aml_vcodec_ctx *ctx =
12850 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12851
12852 if (ctx->param_sets_from_ucode) {
12853 if (hevc->v4l_params_parsed) {
12854 if (!ctx->v4l_codec_dpb_ready &&
12855 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12856 run_ready_min_buf_num)
12857 ret = 0;
12858 } else {
12859 if ((hevc->res_ch_flag == 1) &&
12860 ((ctx->state <= AML_STATE_INIT) ||
12861 (ctx->state >= AML_STATE_FLUSHING)))
12862 ret = 0;
12863 }
12864 } else if (!ctx->v4l_codec_dpb_ready) {
12865 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12866 run_ready_min_buf_num)
12867 ret = 0;
12868 }
12869 }
12870
12871 if (ret)
12872 not_run_ready[hevc->index] = 0;
12873 else
12874 not_run_ready[hevc->index]++;
12875 if (vdec->parallel_dec == 1)
12876 return ret ? (CORE_MASK_HEVC) : 0;
12877 else
12878 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12879}
12880
12881static void run(struct vdec_s *vdec, unsigned long mask,
12882 void (*callback)(struct vdec_s *, void *), void *arg)
12883{
12884 struct hevc_state_s *hevc =
12885 (struct hevc_state_s *)vdec->private;
12886 int r, loadr = 0;
12887 unsigned char check_sum = 0;
12888
12889 run_count[hevc->index]++;
12890 hevc->vdec_cb_arg = arg;
12891 hevc->vdec_cb = callback;
12892 hevc->aux_data_dirty = 1;
12893 hevc_reset_core(vdec);
12894
12895#ifdef AGAIN_HAS_THRESHOLD
12896 if (vdec_stream_based(vdec)) {
12897 hevc->pre_parser_wr_ptr =
12898 STBUF_READ(&vdec->vbuf, get_wp);
12899 hevc->next_again_flag = 0;
12900 }
12901#endif
12902 r = vdec_prepare_input(vdec, &hevc->chunk);
12903 if (r < 0) {
12904 input_empty[hevc->index]++;
12905 hevc->dec_result = DEC_RESULT_AGAIN;
12906 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12907 "ammvdec_vh265: Insufficient data\n");
12908
12909 vdec_schedule_work(&hevc->work);
12910 return;
12911 }
12912 input_empty[hevc->index] = 0;
12913 hevc->dec_result = DEC_RESULT_NONE;
12914 if (vdec_frame_based(vdec) &&
12915 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12916 || is_log_enable(hevc)))
12917 check_sum = get_data_check_sum(hevc, r);
12918
12919 if (is_log_enable(hevc))
12920 add_log(hevc,
12921 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12922 __func__, r,
12923 check_sum,
12924 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12925 );
12926 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12927 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12928 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12929 __func__, r,
12930 check_sum,
12931 READ_VREG(HEVC_STREAM_LEVEL),
12932 READ_VREG(HEVC_STREAM_WR_PTR),
12933 READ_VREG(HEVC_STREAM_RD_PTR),
12934 STBUF_READ(&vdec->vbuf, get_rp),
12935 STBUF_READ(&vdec->vbuf, get_wp),
12936 hevc->start_shift_bytes
12937 );
12938 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12939 input_frame_based(vdec)) {
12940 int jj;
12941 u8 *data = NULL;
12942
12943 if (!hevc->chunk->block->is_mapped)
12944 data = codec_mm_vmap(hevc->chunk->block->start +
12945 hevc->chunk->offset, r);
12946 else
12947 data = ((u8 *)hevc->chunk->block->start_virt)
12948 + hevc->chunk->offset;
12949
12950 for (jj = 0; jj < r; jj++) {
12951 if ((jj & 0xf) == 0)
12952 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12953 "%06x:", jj);
12954 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12955 "%02x ", data[jj]);
12956 if (((jj + 1) & 0xf) == 0)
12957 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12958 "\n");
12959 }
12960
12961 if (!hevc->chunk->block->is_mapped)
12962 codec_mm_unmap_phyaddr(data);
12963 }
12964 if (vdec->mc_loaded) {
12965 /*firmware have load before,
12966 and not changes to another.
12967 ignore reload.
12968 */
12969 if (tee_enabled() && hevc->is_swap &&
12970 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12971 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12972 } else {
12973 if (hevc->mmu_enable)
12974 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12975 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12976 "h265_mmu", hevc->fw->data);
12977 else {
12978 if (!hevc->is_4k) {
12979 /* if an older version of the fw was loaded, */
12980 /* needs try to load noswap fw because the */
12981 /* old fw package dose not contain the swap fw.*/
12982 loadr = amhevc_vdec_loadmc_ex(
12983 VFORMAT_HEVC, vdec,
12984 "hevc_mmu_swap",
12985 hevc->fw->data);
12986 if (loadr < 0)
12987 loadr = amhevc_vdec_loadmc_ex(
12988 VFORMAT_HEVC, vdec,
12989 "h265_mmu",
12990 hevc->fw->data);
12991 else
12992 hevc->is_swap = true;
12993 } else
12994 loadr = amhevc_vdec_loadmc_ex(
12995 VFORMAT_HEVC, vdec,
12996 "h265_mmu", hevc->fw->data);
12997 }
12998 else
12999 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
13000 NULL, hevc->fw->data);
13001 if (loadr < 0) {
13002 amhevc_disable();
13003 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
13004 tee_enabled() ? "TEE" : "local", loadr);
13005 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
13006 vdec_schedule_work(&hevc->work);
13007 return;
13008 }
13009
13010 if (tee_enabled() && hevc->is_swap &&
13011 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
13012 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
13013#ifdef DETREFILL_ENABLE
13014 if (hevc->is_swap &&
13015 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
13016 init_detrefill_buf(hevc);
13017#endif
13018 vdec->mc_loaded = 1;
13019 vdec->mc_type = VFORMAT_HEVC;
13020 }
13021 if (vh265_hw_ctx_restore(hevc) < 0) {
13022 vdec_schedule_work(&hevc->work);
13023 return;
13024 }
13025 vdec_enable_input(vdec);
13026
13027 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
13028
13029 if (vdec_frame_based(vdec)) {
13030 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
13031 r = hevc->chunk->size +
13032 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
13033 hevc->decode_size = r;
13034 if (vdec->mvfrm)
13035 vdec->mvfrm->frame_size = hevc->chunk->size;
13036 }
13037#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13038 else {
13039 if (vdec->master || vdec->slave)
13040 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
13041 hevc->shift_byte_count_lo);
13042 }
13043#endif
13044 WRITE_VREG(HEVC_DECODE_SIZE, r);
13045 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
13046 hevc->init_flag = 1;
13047
13048 if (hevc->pic_list_init_flag == 3)
13049 init_pic_list_hw(hevc);
13050
13051 backup_decode_state(hevc);
13052 start_process_time(hevc);
13053 mod_timer(&hevc->timer, jiffies);
13054 hevc->stat |= STAT_TIMER_ARM;
13055 hevc->stat |= STAT_ISR_REG;
13056 if (vdec->mvfrm)
13057 vdec->mvfrm->hw_decode_start = local_clock();
13058 amhevc_start();
13059 hevc->stat |= STAT_VDEC_RUN;
13060}
13061
13062static void aml_free_canvas(struct vdec_s *vdec)
13063{
13064 int i;
13065 struct hevc_state_s *hevc =
13066 (struct hevc_state_s *)vdec->private;
13067
13068 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13069 struct PIC_s *pic = hevc->m_PIC[i];
13070
13071 if (pic) {
13072 if (vdec->parallel_dec == 1) {
13073 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
13074 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
13075 }
13076 }
13077 }
13078}
13079
13080static void reset(struct vdec_s *vdec)
13081{
13082 struct hevc_state_s *hevc =
13083 (struct hevc_state_s *)vdec->private;
13084 int i;
13085
13086 cancel_work_sync(&hevc->work);
13087 cancel_work_sync(&hevc->notify_work);
13088 if (hevc->stat & STAT_VDEC_RUN) {
13089 amhevc_stop();
13090 hevc->stat &= ~STAT_VDEC_RUN;
13091 }
13092
13093 if (hevc->stat & STAT_TIMER_ARM) {
13094 del_timer_sync(&hevc->timer);
13095 hevc->stat &= ~STAT_TIMER_ARM;
13096 }
13097 hevc->dec_result = DEC_RESULT_NONE;
13098 reset_process_time(hevc);
13099 hevc->pic_list_init_flag = 0;
13100 dealloc_mv_bufs(hevc);
13101 aml_free_canvas(vdec);
13102 hevc_local_uninit(hevc);
13103 if (vh265_local_init(hevc) < 0)
13104 pr_debug(" %s local init fail\n", __func__);
13105 for (i = 0; i < BUF_POOL_SIZE; i++) {
13106 hevc->m_BUF[i].start_adr = 0;
13107 }
13108
13109 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
13110}
13111
13112static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
13113{
13114 struct hevc_state_s *hevc =
13115 (struct hevc_state_s *)vdec->private;
13116
13117 return vh265_isr(0, hevc);
13118}
13119
13120static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
13121{
13122 struct hevc_state_s *hevc =
13123 (struct hevc_state_s *)vdec->private;
13124
13125 return vh265_isr_thread_fn(0, hevc);
13126}
13127#endif
13128
13129static int amvdec_h265_probe(struct platform_device *pdev)
13130{
13131#ifdef MULTI_INSTANCE_SUPPORT
13132 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13133#else
13134 struct vdec_dev_reg_s *pdata =
13135 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
13136#endif
13137 char *tmpbuf;
13138 int ret;
13139 struct hevc_state_s *hevc;
13140
13141 hevc = vmalloc(sizeof(struct hevc_state_s));
13142 if (hevc == NULL) {
13143 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
13144 return -ENOMEM;
13145 }
13146 gHevc = hevc;
13147 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
13148 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
13149 H265_DEBUG_DIS_SYS_ERROR_PROC));
13150 memset(hevc, 0, sizeof(struct hevc_state_s));
13151 if (get_dbg_flag(hevc))
13152 hevc_print(hevc, 0, "%s\r\n", __func__);
13153 mutex_lock(&vh265_mutex);
13154
13155 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13156 (parser_sei_enable & 0x100) == 0)
13157 parser_sei_enable = 7; /*old 1*/
13158 hevc->m_ins_flag = 0;
13159 hevc->init_flag = 0;
13160 hevc->first_sc_checked = 0;
13161 hevc->uninit_list = 0;
13162 hevc->fatal_error = 0;
13163 hevc->show_frame_num = 0;
13164 hevc->frameinfo_enable = 1;
13165#ifdef MULTI_INSTANCE_SUPPORT
13166 hevc->platform_dev = pdev;
13167 platform_set_drvdata(pdev, pdata);
13168#endif
13169
13170 if (pdata == NULL) {
13171 hevc_print(hevc, 0,
13172 "\namvdec_h265 memory resource undefined.\n");
13173 vfree(hevc);
13174 mutex_unlock(&vh265_mutex);
13175 return -EFAULT;
13176 }
13177 if (mmu_enable_force == 0) {
13178 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
13179 || double_write_mode == 0x10)
13180 hevc->mmu_enable = 0;
13181 else
13182 hevc->mmu_enable = 1;
13183 }
13184 if (init_mmu_buffers(hevc)) {
13185 hevc_print(hevc, 0,
13186 "\n 265 mmu init failed!\n");
13187 vfree(hevc);
13188 mutex_unlock(&vh265_mutex);
13189 return -EFAULT;
13190 }
13191
13192 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
13193 work_buf_size, DRIVER_NAME, &hevc->buf_start);
13194 if (ret < 0) {
13195 uninit_mmu_buffers(hevc);
13196 vfree(hevc);
13197 mutex_unlock(&vh265_mutex);
13198 return ret;
13199 }
13200 hevc->buf_size = work_buf_size;
13201
13202
13203 if (!vdec_secure(pdata)) {
13204 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
13205 if (tmpbuf) {
13206 memset(tmpbuf, 0, work_buf_size);
13207 dma_sync_single_for_device(amports_get_dma_device(),
13208 hevc->buf_start,
13209 work_buf_size, DMA_TO_DEVICE);
13210 } else {
13211 tmpbuf = codec_mm_vmap(hevc->buf_start,
13212 work_buf_size);
13213 if (tmpbuf) {
13214 memset(tmpbuf, 0, work_buf_size);
13215 dma_sync_single_for_device(
13216 amports_get_dma_device(),
13217 hevc->buf_start,
13218 work_buf_size,
13219 DMA_TO_DEVICE);
13220 codec_mm_unmap_phyaddr(tmpbuf);
13221 }
13222 }
13223 }
13224
13225 if (get_dbg_flag(hevc)) {
13226 hevc_print(hevc, 0,
13227 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13228 hevc->buf_start, hevc->buf_size);
13229 }
13230
13231 if (pdata->sys_info)
13232 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13233 else {
13234 hevc->vh265_amstream_dec_info.width = 0;
13235 hevc->vh265_amstream_dec_info.height = 0;
13236 hevc->vh265_amstream_dec_info.rate = 30;
13237 }
13238#ifndef MULTI_INSTANCE_SUPPORT
13239 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
13240 workaround_enable |= 3;
13241 hevc_print(hevc, 0,
13242 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
13243 } else
13244 workaround_enable &= ~3;
13245#endif
13246 hevc->cma_dev = pdata->cma_dev;
13247 vh265_vdec_info_init(hevc);
13248
13249#ifdef MULTI_INSTANCE_SUPPORT
13250 pdata->private = hevc;
13251 pdata->dec_status = vh265_dec_status;
13252 pdata->set_trickmode = vh265_set_trickmode;
13253 pdata->set_isreset = vh265_set_isreset;
13254 is_reset = 0;
13255 if (vh265_init(pdata) < 0) {
13256#else
13257 if (vh265_init(hevc) < 0) {
13258#endif
13259 hevc_print(hevc, 0,
13260 "\namvdec_h265 init failed.\n");
13261 hevc_local_uninit(hevc);
13262 if (hevc->gvs)
13263 kfree(hevc->gvs);
13264 hevc->gvs = NULL;
13265 uninit_mmu_buffers(hevc);
13266 vfree(hevc);
13267 pdata->dec_status = NULL;
13268 mutex_unlock(&vh265_mutex);
13269 return -ENODEV;
13270 }
13271 /*set the max clk for smooth playing...*/
13272 hevc_source_changed(VFORMAT_HEVC,
13273 3840, 2160, 60);
13274 mutex_unlock(&vh265_mutex);
13275
13276 return 0;
13277}
13278
13279static int amvdec_h265_remove(struct platform_device *pdev)
13280{
13281 struct hevc_state_s *hevc = gHevc;
13282
13283 if (get_dbg_flag(hevc))
13284 hevc_print(hevc, 0, "%s\r\n", __func__);
13285
13286 mutex_lock(&vh265_mutex);
13287
13288 vh265_stop(hevc);
13289
13290 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
13291
13292
13293#ifdef DEBUG_PTS
13294 hevc_print(hevc, 0,
13295 "pts missed %ld, pts hit %ld, duration %d\n",
13296 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
13297#endif
13298
13299 vfree(hevc);
13300 hevc = NULL;
13301 gHevc = NULL;
13302
13303 mutex_unlock(&vh265_mutex);
13304
13305 return 0;
13306}
13307/****************************************/
13308#ifdef CONFIG_PM
13309static int h265_suspend(struct device *dev)
13310{
13311 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
13312 return 0;
13313}
13314
13315static int h265_resume(struct device *dev)
13316{
13317 amhevc_resume(to_platform_device(dev));
13318 return 0;
13319}
13320
13321static const struct dev_pm_ops h265_pm_ops = {
13322 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
13323};
13324#endif
13325
13326static struct platform_driver amvdec_h265_driver = {
13327 .probe = amvdec_h265_probe,
13328 .remove = amvdec_h265_remove,
13329 .driver = {
13330 .name = DRIVER_NAME,
13331#ifdef CONFIG_PM
13332 .pm = &h265_pm_ops,
13333#endif
13334 }
13335};
13336
13337#ifdef MULTI_INSTANCE_SUPPORT
13338static void vh265_dump_state(struct vdec_s *vdec)
13339{
13340 int i;
13341 struct hevc_state_s *hevc =
13342 (struct hevc_state_s *)vdec->private;
13343 hevc_print(hevc, 0,
13344 "====== %s\n", __func__);
13345
13346 hevc_print(hevc, 0,
13347 "width/height (%d/%d), reorder_pic_num %d ip_mode %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
13348 hevc->frame_width,
13349 hevc->frame_height,
13350 hevc->sps_num_reorder_pics_0,
13351 hevc->ip_mode,
13352 get_work_pic_num(hevc),
13353 hevc->video_signal_type_debug,
13354 hevc->is_swap
13355 );
13356
13357 hevc_print(hevc, 0,
13358 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
13359 input_frame_based(vdec),
13360 hevc->eos,
13361 hevc->dec_result,
13362 decode_frame_count[hevc->index],
13363 display_frame_count[hevc->index],
13364 run_count[hevc->index],
13365 not_run_ready[hevc->index],
13366 input_empty[hevc->index]
13367 );
13368
13369 if (vf_get_receiver(vdec->vf_provider_name)) {
13370 enum receviver_start_e state =
13371 vf_notify_receiver(vdec->vf_provider_name,
13372 VFRAME_EVENT_PROVIDER_QUREY_STATE,
13373 NULL);
13374 hevc_print(hevc, 0,
13375 "\nreceiver(%s) state %d\n",
13376 vdec->vf_provider_name,
13377 state);
13378 }
13379
13380 hevc_print(hevc, 0,
13381 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
13382 __func__,
13383 kfifo_len(&hevc->newframe_q),
13384 VF_POOL_SIZE,
13385 kfifo_len(&hevc->display_q),
13386 VF_POOL_SIZE,
13387 hevc->vf_pre_count,
13388 hevc->vf_get_count,
13389 hevc->vf_put_count,
13390 hevc->pic_list_init_flag,
13391 is_new_pic_available(hevc)
13392 );
13393
13394 dump_pic_list(hevc);
13395
13396 for (i = 0; i < BUF_POOL_SIZE; i++) {
13397 hevc_print(hevc, 0,
13398 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13399 i,
13400 hevc->m_BUF[i].start_adr,
13401 hevc->m_BUF[i].size,
13402 hevc->m_BUF[i].used_flag);
13403 }
13404
13405 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13406 hevc_print(hevc, 0,
13407 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13408 i,
13409 hevc->m_mv_BUF[i].start_adr,
13410 hevc->m_mv_BUF[i].size,
13411 hevc->m_mv_BUF[i].used_flag);
13412 }
13413
13414 hevc_print(hevc, 0,
13415 "HEVC_DEC_STATUS_REG=0x%x\n",
13416 READ_VREG(HEVC_DEC_STATUS_REG));
13417 hevc_print(hevc, 0,
13418 "HEVC_MPC_E=0x%x\n",
13419 READ_VREG(HEVC_MPC_E));
13420 hevc_print(hevc, 0,
13421 "HEVC_DECODE_MODE=0x%x\n",
13422 READ_VREG(HEVC_DECODE_MODE));
13423 hevc_print(hevc, 0,
13424 "HEVC_DECODE_MODE2=0x%x\n",
13425 READ_VREG(HEVC_DECODE_MODE2));
13426 hevc_print(hevc, 0,
13427 "NAL_SEARCH_CTL=0x%x\n",
13428 READ_VREG(NAL_SEARCH_CTL));
13429 hevc_print(hevc, 0,
13430 "HEVC_PARSER_LCU_START=0x%x\n",
13431 READ_VREG(HEVC_PARSER_LCU_START));
13432 hevc_print(hevc, 0,
13433 "HEVC_DECODE_SIZE=0x%x\n",
13434 READ_VREG(HEVC_DECODE_SIZE));
13435 hevc_print(hevc, 0,
13436 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
13437 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
13438 hevc_print(hevc, 0,
13439 "HEVC_STREAM_START_ADDR=0x%x\n",
13440 READ_VREG(HEVC_STREAM_START_ADDR));
13441 hevc_print(hevc, 0,
13442 "HEVC_STREAM_END_ADDR=0x%x\n",
13443 READ_VREG(HEVC_STREAM_END_ADDR));
13444 hevc_print(hevc, 0,
13445 "HEVC_STREAM_LEVEL=0x%x\n",
13446 READ_VREG(HEVC_STREAM_LEVEL));
13447 hevc_print(hevc, 0,
13448 "HEVC_STREAM_WR_PTR=0x%x\n",
13449 READ_VREG(HEVC_STREAM_WR_PTR));
13450 hevc_print(hevc, 0,
13451 "HEVC_STREAM_RD_PTR=0x%x\n",
13452 READ_VREG(HEVC_STREAM_RD_PTR));
13453 hevc_print(hevc, 0,
13454 "PARSER_VIDEO_RP=0x%x\n",
13455 STBUF_READ(&vdec->vbuf, get_rp));
13456 hevc_print(hevc, 0,
13457 "PARSER_VIDEO_WP=0x%x\n",
13458 STBUF_READ(&vdec->vbuf, get_wp));
13459
13460 if (input_frame_based(vdec) &&
13461 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
13462 ) {
13463 int jj;
13464 if (hevc->chunk && hevc->chunk->block &&
13465 hevc->chunk->size > 0) {
13466 u8 *data = NULL;
13467 if (!hevc->chunk->block->is_mapped)
13468 data = codec_mm_vmap(hevc->chunk->block->start +
13469 hevc->chunk->offset, hevc->chunk->size);
13470 else
13471 data = ((u8 *)hevc->chunk->block->start_virt)
13472 + hevc->chunk->offset;
13473 hevc_print(hevc, 0,
13474 "frame data size 0x%x\n",
13475 hevc->chunk->size);
13476 for (jj = 0; jj < hevc->chunk->size; jj++) {
13477 if ((jj & 0xf) == 0)
13478 hevc_print(hevc,
13479 PRINT_FRAMEBASE_DATA,
13480 "%06x:", jj);
13481 hevc_print_cont(hevc,
13482 PRINT_FRAMEBASE_DATA,
13483 "%02x ", data[jj]);
13484 if (((jj + 1) & 0xf) == 0)
13485 hevc_print_cont(hevc,
13486 PRINT_FRAMEBASE_DATA,
13487 "\n");
13488 }
13489
13490 if (!hevc->chunk->block->is_mapped)
13491 codec_mm_unmap_phyaddr(data);
13492 }
13493 }
13494
13495}
13496
13497
13498static int ammvdec_h265_probe(struct platform_device *pdev)
13499{
13500
13501 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13502 struct hevc_state_s *hevc = NULL;
13503 int ret;
13504#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13505 int config_val;
13506#endif
13507 //pr_err("[%s pid=%d tgid=%d] \n",__func__, current->pid, current->tgid);
13508 if (pdata == NULL) {
13509 pr_info("\nammvdec_h265 memory resource undefined.\n");
13510 return -EFAULT;
13511 }
13512
13513 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
13514 sizeof(struct hevc_state_s), GFP_KERNEL); */
13515 hevc = vmalloc(sizeof(struct hevc_state_s));
13516 if (hevc == NULL) {
13517 pr_info("\nammvdec_h265 device data allocation failed\n");
13518 return -ENOMEM;
13519 }
13520 memset(hevc, 0, sizeof(struct hevc_state_s));
13521
13522 /* the ctx from v4l2 driver. */
13523 hevc->v4l2_ctx = pdata->private;
13524
13525 pdata->private = hevc;
13526 pdata->dec_status = vh265_dec_status;
13527 pdata->set_trickmode = vh265_set_trickmode;
13528 pdata->run_ready = run_ready;
13529 pdata->run = run;
13530 pdata->reset = reset;
13531 pdata->irq_handler = vh265_irq_cb;
13532 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
13533 pdata->dump_state = vh265_dump_state;
13534
13535 hevc->index = pdev->id;
13536 hevc->m_ins_flag = 1;
13537
13538 if (pdata->use_vfm_path) {
13539 snprintf(pdata->vf_provider_name,
13540 VDEC_PROVIDER_NAME_SIZE,
13541 VFM_DEC_PROVIDER_NAME);
13542 hevc->frameinfo_enable = 1;
13543 }
13544#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13545 else if (vdec_dual(pdata)) {
13546 struct hevc_state_s *hevc_pair = NULL;
13547
13548 if (dv_toggle_prov_name) /*debug purpose*/
13549 snprintf(pdata->vf_provider_name,
13550 VDEC_PROVIDER_NAME_SIZE,
13551 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
13552 VFM_DEC_DVEL_PROVIDER_NAME);
13553 else
13554 snprintf(pdata->vf_provider_name,
13555 VDEC_PROVIDER_NAME_SIZE,
13556 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
13557 VFM_DEC_DVBL_PROVIDER_NAME);
13558 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
13559 if (pdata->master)
13560 hevc_pair = (struct hevc_state_s *)
13561 pdata->master->private;
13562 else if (pdata->slave)
13563 hevc_pair = (struct hevc_state_s *)
13564 pdata->slave->private;
13565 if (hevc_pair)
13566 hevc->shift_byte_count_lo =
13567 hevc_pair->shift_byte_count_lo;
13568 }
13569#endif
13570 else
13571 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
13572 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
13573
13574 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
13575 &vh265_vf_provider, pdata);
13576
13577 hevc->provider_name = pdata->vf_provider_name;
13578 platform_set_drvdata(pdev, pdata);
13579
13580 hevc->platform_dev = pdev;
13581
13582 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
13583 pdata->config && pdata->config_len) {
13584#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13585 /*use ptr config for doubel_write_mode, etc*/
13586 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
13587
13588 if (get_config_int(pdata->config, "hevc_double_write_mode",
13589 &config_val) == 0)
13590 hevc->double_write_mode = config_val;
13591 else
13592 hevc->double_write_mode = double_write_mode;
13593
13594 if (get_config_int(pdata->config, "save_buffer_mode",
13595 &config_val) == 0)
13596 hevc->save_buffer_mode = config_val;
13597 else
13598 hevc->save_buffer_mode = 0;
13599
13600 /*use ptr config for max_pic_w, etc*/
13601 if (get_config_int(pdata->config, "hevc_buf_width",
13602 &config_val) == 0) {
13603 hevc->max_pic_w = config_val;
13604 }
13605 if (get_config_int(pdata->config, "hevc_buf_height",
13606 &config_val) == 0) {
13607 hevc->max_pic_h = config_val;
13608 }
13609
13610 if (get_config_int(pdata->config, "sidebind_type",
13611 &config_val) == 0)
13612 hevc->sidebind_type = config_val;
13613
13614 if (get_config_int(pdata->config, "sidebind_channel_id",
13615 &config_val) == 0)
13616 hevc->sidebind_channel_id = config_val;
13617
13618 if (get_config_int(pdata->config,
13619 "parm_v4l_codec_enable",
13620 &config_val) == 0)
13621 hevc->is_used_v4l = config_val;
13622
13623 if (get_config_int(pdata->config,
13624 "parm_v4l_buffer_margin",
13625 &config_val) == 0)
13626 hevc->dynamic_buf_num_margin = config_val;
13627
13628 if (get_config_int(pdata->config,
13629 "parm_v4l_canvas_mem_mode",
13630 &config_val) == 0)
13631 hevc->mem_map_mode = config_val;
13632
13633 if (get_config_int(pdata->config, "dv_duallayer",
13634 &config_val) == 0)
13635 hevc->dv_duallayer = config_val;
13636 else
13637 hevc->dv_duallayer = false;
13638#endif
13639 } else {
13640 if (pdata->sys_info)
13641 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13642 else {
13643 hevc->vh265_amstream_dec_info.width = 0;
13644 hevc->vh265_amstream_dec_info.height = 0;
13645 hevc->vh265_amstream_dec_info.rate = 30;
13646 }
13647 hevc->double_write_mode = double_write_mode;
13648 }
13649 /* get valid double write from configure or node */
13650 hevc->double_write_mode = get_double_write_mode(hevc);
13651
13652 if (!hevc->is_used_v4l) {
13653 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13654 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13655 else
13656 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13657
13658 hevc->mem_map_mode = mem_map_mode;
13659 }
13660
13661 if (mmu_enable_force == 0) {
13662 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13663 hevc->mmu_enable = 0;
13664 else
13665 hevc->mmu_enable = 1;
13666 }
13667
13668 if (init_mmu_buffers(hevc) < 0) {
13669 hevc_print(hevc, 0,
13670 "\n 265 mmu init failed!\n");
13671 mutex_unlock(&vh265_mutex);
13672 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13673 if (hevc)
13674 vfree((void *)hevc);
13675 pdata->dec_status = NULL;
13676 return -EFAULT;
13677 }
13678#if 0
13679 hevc->buf_start = pdata->mem_start;
13680 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13681#else
13682
13683 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13684 BMMU_WORKSPACE_ID, work_buf_size,
13685 DRIVER_NAME, &hevc->buf_start);
13686 if (ret < 0) {
13687 uninit_mmu_buffers(hevc);
13688 /* devm_kfree(&pdev->dev, (void *)hevc); */
13689 if (hevc)
13690 vfree((void *)hevc);
13691 pdata->dec_status = NULL;
13692 mutex_unlock(&vh265_mutex);
13693 return ret;
13694 }
13695 hevc->buf_size = work_buf_size;
13696#endif
13697 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13698 (parser_sei_enable & 0x100) == 0)
13699 parser_sei_enable = 7;
13700 hevc->init_flag = 0;
13701 hevc->first_sc_checked = 0;
13702 hevc->uninit_list = 0;
13703 hevc->fatal_error = 0;
13704 hevc->show_frame_num = 0;
13705
13706 /*
13707 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13708 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13709 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13710 */
13711 if (get_dbg_flag(hevc)) {
13712 hevc_print(hevc, 0,
13713 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13714 hevc->buf_start, hevc->buf_size);
13715 }
13716
13717 hevc_print(hevc, 0,
13718 "dynamic_buf_num_margin=%d\n",
13719 hevc->dynamic_buf_num_margin);
13720 hevc_print(hevc, 0,
13721 "double_write_mode=%d\n",
13722 hevc->double_write_mode);
13723
13724 hevc->cma_dev = pdata->cma_dev;
13725 vh265_vdec_info_init(hevc);
13726
13727 if (vh265_init(pdata) < 0) {
13728 hevc_print(hevc, 0,
13729 "\namvdec_h265 init failed.\n");
13730 hevc_local_uninit(hevc);
13731 if (hevc->gvs)
13732 kfree(hevc->gvs);
13733 hevc->gvs = NULL;
13734 uninit_mmu_buffers(hevc);
13735 /* devm_kfree(&pdev->dev, (void *)hevc); */
13736 if (hevc)
13737 vfree((void *)hevc);
13738 pdata->dec_status = NULL;
13739 return -ENODEV;
13740 }
13741
13742 vdec_set_prepare_level(pdata, start_decode_buf_level);
13743
13744 /*set the max clk for smooth playing...*/
13745 hevc_source_changed(VFORMAT_HEVC,
13746 3840, 2160, 60);
13747 if (pdata->parallel_dec == 1)
13748 vdec_core_request(pdata, CORE_MASK_HEVC);
13749 else
13750 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13751 | CORE_MASK_COMBINE);
13752
13753 return 0;
13754}
13755
13756static int ammvdec_h265_remove(struct platform_device *pdev)
13757{
13758 struct hevc_state_s *hevc =
13759 (struct hevc_state_s *)
13760 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13761 struct vdec_s *vdec;
13762
13763 if (hevc == NULL)
13764 return 0;
13765 vdec = hw_to_vdec(hevc);
13766
13767 //pr_err("%s [pid=%d,tgid=%d]\n", __func__, current->pid, current->tgid);
13768 if (get_dbg_flag(hevc))
13769 hevc_print(hevc, 0, "%s\r\n", __func__);
13770
13771 vmh265_stop(hevc);
13772
13773 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13774 if (vdec->parallel_dec == 1)
13775 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13776 else
13777 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13778
13779 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13780
13781 vfree((void *)hevc);
13782
13783 return 0;
13784}
13785
13786static struct platform_driver ammvdec_h265_driver = {
13787 .probe = ammvdec_h265_probe,
13788 .remove = ammvdec_h265_remove,
13789 .driver = {
13790 .name = MULTI_DRIVER_NAME,
13791#ifdef CONFIG_PM
13792 .pm = &h265_pm_ops,
13793#endif
13794 }
13795};
13796#endif
13797
13798static struct codec_profile_t amvdec_h265_profile = {
13799 .name = "hevc",
13800 .profile = ""
13801};
13802
13803static struct codec_profile_t amvdec_h265_profile_single,
13804 amvdec_h265_profile_mult;
13805
13806static struct mconfig h265_configs[] = {
13807 MC_PU32("use_cma", &use_cma),
13808 MC_PU32("bit_depth_luma", &bit_depth_luma),
13809 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13810 MC_PU32("video_signal_type", &video_signal_type),
13811#ifdef ERROR_HANDLE_DEBUG
13812 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13813 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13814#endif
13815 MC_PU32("radr", &radr),
13816 MC_PU32("rval", &rval),
13817 MC_PU32("dbg_cmd", &dbg_cmd),
13818 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13819 MC_PU32("endian", &endian),
13820 MC_PU32("step", &step),
13821 MC_PU32("udebug_flag", &udebug_flag),
13822 MC_PU32("decode_pic_begin", &decode_pic_begin),
13823 MC_PU32("slice_parse_begin", &slice_parse_begin),
13824 MC_PU32("nal_skip_policy", &nal_skip_policy),
13825 MC_PU32("i_only_flag", &i_only_flag),
13826 MC_PU32("error_handle_policy", &error_handle_policy),
13827 MC_PU32("error_handle_threshold", &error_handle_threshold),
13828 MC_PU32("error_handle_nal_skip_threshold",
13829 &error_handle_nal_skip_threshold),
13830 MC_PU32("error_handle_system_threshold",
13831 &error_handle_system_threshold),
13832 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13833 MC_PU32("debug", &debug),
13834 MC_PU32("debug_mask", &debug_mask),
13835 MC_PU32("buffer_mode", &buffer_mode),
13836 MC_PU32("double_write_mode", &double_write_mode),
13837 MC_PU32("buf_alloc_width", &buf_alloc_width),
13838 MC_PU32("buf_alloc_height", &buf_alloc_height),
13839 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13840 MC_PU32("max_buf_num", &max_buf_num),
13841 MC_PU32("buf_alloc_size", &buf_alloc_size),
13842 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13843 MC_PU32("mem_map_mode", &mem_map_mode),
13844 MC_PU32("enable_mem_saving", &enable_mem_saving),
13845 MC_PU32("force_w_h", &force_w_h),
13846 MC_PU32("force_fps", &force_fps),
13847 MC_PU32("max_decoding_time", &max_decoding_time),
13848 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13849 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13850 MC_PU32("interlace_enable", &interlace_enable),
13851 MC_PU32("pts_unstable", &pts_unstable),
13852 MC_PU32("parser_sei_enable", &parser_sei_enable),
13853 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13854 MC_PU32("decode_timeout_val", &decode_timeout_val),
13855 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13856#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13857 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13858 MC_PU32("dv_debug", &dv_debug),
13859#endif
13860};
13861static struct mconfig_node decoder_265_node;
13862
13863static int __init amvdec_h265_driver_init_module(void)
13864{
13865 struct BuffInfo_s *p_buf_info;
13866
13867 if (vdec_is_support_4k()) {
13868 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13869 p_buf_info = &amvh265_workbuff_spec[2];
13870 else
13871 p_buf_info = &amvh265_workbuff_spec[1];
13872 } else
13873 p_buf_info = &amvh265_workbuff_spec[0];
13874
13875 init_buff_spec(NULL, p_buf_info);
13876 work_buf_size =
13877 (p_buf_info->end_adr - p_buf_info->start_adr
13878 + 0xffff) & (~0xffff);
13879
13880 pr_debug("amvdec_h265 module init\n");
13881 error_handle_policy = 0;
13882
13883#ifdef ERROR_HANDLE_DEBUG
13884 dbg_nal_skip_flag = 0;
13885 dbg_nal_skip_count = 0;
13886#endif
13887 udebug_flag = 0;
13888 decode_pic_begin = 0;
13889 slice_parse_begin = 0;
13890 step = 0;
13891 buf_alloc_size = 0;
13892
13893#ifdef MULTI_INSTANCE_SUPPORT
13894 if (platform_driver_register(&ammvdec_h265_driver))
13895 pr_err("failed to register ammvdec_h265 driver\n");
13896
13897#endif
13898 if (platform_driver_register(&amvdec_h265_driver)) {
13899 pr_err("failed to register amvdec_h265 driver\n");
13900 return -ENODEV;
13901 }
13902#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13903 if (!has_hevc_vdec()) {
13904 /* not support hevc */
13905 amvdec_h265_profile.name = "hevc_unsupport";
13906 }
13907 if (vdec_is_support_4k()) {
13908 if (is_meson_m8m2_cpu()) {
13909 /* m8m2 support 4k */
13910 amvdec_h265_profile.profile = "4k";
13911 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13912 amvdec_h265_profile.profile =
13913 "8k, 8bit, 10bit, dwrite, compressed, frame_dv";
13914 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13915 amvdec_h265_profile.profile =
13916 "4k, 8bit, 10bit, dwrite, compressed, frame_dv";
13917 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13918 amvdec_h265_profile.profile = "4k";
13919 }
13920#endif
13921 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13922 pr_info("amvdec_h265 default mmu enabled.\n");
13923 mmu_enable = 1;
13924 }
13925
13926 vcodec_profile_register(&amvdec_h265_profile);
13927 amvdec_h265_profile_single = amvdec_h265_profile;
13928 amvdec_h265_profile_single.name = "h265";
13929 vcodec_profile_register(&amvdec_h265_profile_single);
13930 amvdec_h265_profile_mult = amvdec_h265_profile;
13931 amvdec_h265_profile_mult.name = "mh265";
13932 vcodec_profile_register(&amvdec_h265_profile_mult);
13933 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13934 "h265", h265_configs, CONFIG_FOR_RW);
13935 return 0;
13936}
13937
13938static void __exit amvdec_h265_driver_remove_module(void)
13939{
13940 pr_debug("amvdec_h265 module remove.\n");
13941
13942#ifdef MULTI_INSTANCE_SUPPORT
13943 platform_driver_unregister(&ammvdec_h265_driver);
13944#endif
13945 platform_driver_unregister(&amvdec_h265_driver);
13946}
13947
13948/****************************************/
13949/*
13950 *module_param(stat, uint, 0664);
13951 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13952 */
13953module_param(use_cma, uint, 0664);
13954MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13955
13956module_param(bit_depth_luma, uint, 0664);
13957MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13958
13959module_param(bit_depth_chroma, uint, 0664);
13960MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13961
13962module_param(video_signal_type, uint, 0664);
13963MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13964
13965#ifdef ERROR_HANDLE_DEBUG
13966module_param(dbg_nal_skip_flag, uint, 0664);
13967MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13968
13969module_param(dbg_nal_skip_count, uint, 0664);
13970MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13971#endif
13972
13973module_param(radr, uint, 0664);
13974MODULE_PARM_DESC(radr, "\n radr\n");
13975
13976module_param(rval, uint, 0664);
13977MODULE_PARM_DESC(rval, "\n rval\n");
13978
13979module_param(dbg_cmd, uint, 0664);
13980MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13981
13982module_param(dump_nal, uint, 0664);
13983MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13984
13985module_param(dbg_skip_decode_index, uint, 0664);
13986MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13987
13988module_param(endian, uint, 0664);
13989MODULE_PARM_DESC(endian, "\n rval\n");
13990
13991module_param(step, uint, 0664);
13992MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13993
13994module_param(decode_pic_begin, uint, 0664);
13995MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13996
13997module_param(slice_parse_begin, uint, 0664);
13998MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13999
14000module_param(nal_skip_policy, uint, 0664);
14001MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
14002
14003module_param(i_only_flag, uint, 0664);
14004MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
14005
14006module_param(fast_output_enable, uint, 0664);
14007MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
14008
14009module_param(error_handle_policy, uint, 0664);
14010MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
14011
14012module_param(error_handle_threshold, uint, 0664);
14013MODULE_PARM_DESC(error_handle_threshold,
14014 "\n amvdec_h265 error_handle_threshold\n");
14015
14016module_param(error_handle_nal_skip_threshold, uint, 0664);
14017MODULE_PARM_DESC(error_handle_nal_skip_threshold,
14018 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
14019
14020module_param(error_handle_system_threshold, uint, 0664);
14021MODULE_PARM_DESC(error_handle_system_threshold,
14022 "\n amvdec_h265 error_handle_system_threshold\n");
14023
14024module_param(error_skip_nal_count, uint, 0664);
14025MODULE_PARM_DESC(error_skip_nal_count,
14026 "\n amvdec_h265 error_skip_nal_count\n");
14027
14028module_param(skip_nal_count, uint, 0664);
14029MODULE_PARM_DESC(skip_nal_count, "\n skip_nal_count\n");
14030
14031module_param(debug, uint, 0664);
14032MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
14033
14034module_param(debug_mask, uint, 0664);
14035MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
14036
14037module_param(log_mask, uint, 0664);
14038MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
14039
14040module_param(buffer_mode, uint, 0664);
14041MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
14042
14043module_param(double_write_mode, uint, 0664);
14044MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
14045
14046module_param(buf_alloc_width, uint, 0664);
14047MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
14048
14049module_param(buf_alloc_height, uint, 0664);
14050MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
14051
14052module_param(dynamic_buf_num_margin, uint, 0664);
14053MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
14054
14055module_param(max_buf_num, uint, 0664);
14056MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
14057
14058module_param(buf_alloc_size, uint, 0664);
14059MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
14060
14061#ifdef CONSTRAIN_MAX_BUF_NUM
14062module_param(run_ready_max_vf_only_num, uint, 0664);
14063MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
14064
14065module_param(run_ready_display_q_num, uint, 0664);
14066MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
14067
14068module_param(run_ready_max_buf_num, uint, 0664);
14069MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
14070#endif
14071
14072#if 0
14073module_param(re_config_pic_flag, uint, 0664);
14074MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
14075#endif
14076
14077module_param(buffer_mode_dbg, uint, 0664);
14078MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
14079
14080module_param(mem_map_mode, uint, 0664);
14081MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
14082
14083module_param(enable_mem_saving, uint, 0664);
14084MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
14085
14086module_param(force_w_h, uint, 0664);
14087MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
14088
14089module_param(force_fps, uint, 0664);
14090MODULE_PARM_DESC(force_fps, "\n force_fps\n");
14091
14092module_param(max_decoding_time, uint, 0664);
14093MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
14094
14095module_param(prefix_aux_buf_size, uint, 0664);
14096MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
14097
14098module_param(suffix_aux_buf_size, uint, 0664);
14099MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
14100
14101module_param(interlace_enable, uint, 0664);
14102MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
14103module_param(pts_unstable, uint, 0664);
14104MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
14105module_param(parser_sei_enable, uint, 0664);
14106MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
14107
14108module_param(parser_dolby_vision_enable, uint, 0664);
14109MODULE_PARM_DESC(parser_dolby_vision_enable,
14110 "\n parser_dolby_vision_enable\n");
14111
14112#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
14113module_param(dolby_meta_with_el, uint, 0664);
14114MODULE_PARM_DESC(dolby_meta_with_el,
14115 "\n dolby_meta_with_el\n");
14116
14117module_param(dolby_el_flush_th, uint, 0664);
14118MODULE_PARM_DESC(dolby_el_flush_th,
14119 "\n dolby_el_flush_th\n");
14120#endif
14121module_param(mmu_enable, uint, 0664);
14122MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
14123
14124module_param(mmu_enable_force, uint, 0664);
14125MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
14126
14127#ifdef MULTI_INSTANCE_SUPPORT
14128module_param(start_decode_buf_level, int, 0664);
14129MODULE_PARM_DESC(start_decode_buf_level,
14130 "\n h265 start_decode_buf_level\n");
14131
14132module_param(decode_timeout_val, uint, 0664);
14133MODULE_PARM_DESC(decode_timeout_val,
14134 "\n h265 decode_timeout_val\n");
14135
14136module_param(print_lcu_error, uint, 0664);
14137MODULE_PARM_DESC(print_lcu_error,
14138 "\n h265 print_lcu_error\n");
14139
14140module_param(data_resend_policy, uint, 0664);
14141MODULE_PARM_DESC(data_resend_policy,
14142 "\n h265 data_resend_policy\n");
14143
14144module_param(poc_num_margin, uint, 0664);
14145MODULE_PARM_DESC(poc_num_margin,
14146 "\n h265 poc_num_margin\n");
14147
14148module_param(poc_error_limit, uint, 0664);
14149MODULE_PARM_DESC(poc_error_limit,
14150 "\n h265 poc_error_limit\n");
14151
14152module_param_array(decode_frame_count, uint,
14153 &max_decode_instance_num, 0664);
14154
14155module_param_array(display_frame_count, uint,
14156 &max_decode_instance_num, 0664);
14157
14158module_param_array(max_process_time, uint,
14159 &max_decode_instance_num, 0664);
14160
14161module_param_array(max_get_frame_interval,
14162 uint, &max_decode_instance_num, 0664);
14163
14164module_param_array(run_count, uint,
14165 &max_decode_instance_num, 0664);
14166
14167module_param_array(input_empty, uint,
14168 &max_decode_instance_num, 0664);
14169
14170module_param_array(not_run_ready, uint,
14171 &max_decode_instance_num, 0664);
14172
14173module_param_array(ref_frame_mark_flag, uint,
14174 &max_decode_instance_num, 0664);
14175
14176#endif
14177#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
14178module_param(dv_toggle_prov_name, uint, 0664);
14179MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
14180
14181module_param(dv_debug, uint, 0664);
14182MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
14183
14184module_param(force_bypass_dvenl, uint, 0664);
14185MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
14186#endif
14187
14188#ifdef AGAIN_HAS_THRESHOLD
14189module_param(again_threshold, uint, 0664);
14190MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
14191#endif
14192
14193module_param(force_disp_pic_index, int, 0664);
14194MODULE_PARM_DESC(force_disp_pic_index,
14195 "\n amvdec_h265 force_disp_pic_index\n");
14196
14197module_param(frmbase_cont_bitlevel, uint, 0664);
14198MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
14199
14200module_param(udebug_flag, uint, 0664);
14201MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
14202
14203module_param(udebug_pause_pos, uint, 0664);
14204MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
14205
14206module_param(udebug_pause_val, uint, 0664);
14207MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
14208
14209module_param(pre_decode_buf_level, int, 0664);
14210MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
14211
14212module_param(udebug_pause_decode_idx, uint, 0664);
14213MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
14214
14215module_param(disp_vframe_valve_level, uint, 0664);
14216MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
14217
14218module_param(pic_list_debug, uint, 0664);
14219MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
14220
14221module_param(without_display_mode, uint, 0664);
14222MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
14223
14224#ifdef HEVC_8K_LFTOFFSET_FIX
14225module_param(performance_profile, uint, 0664);
14226MODULE_PARM_DESC(performance_profile, "\n amvdec_h265 performance_profile\n");
14227#endif
14228module_param(disable_ip_mode, uint, 0664);
14229MODULE_PARM_DESC(disable_ip_mode, "\n amvdec_h265 disable ip_mode\n");
14230
14231module_param(dirty_time_threshold, uint, 0664);
14232MODULE_PARM_DESC(dirty_time_threshold, "\n dirty_time_threshold\n");
14233
14234module_param(dirty_count_threshold, uint, 0664);
14235MODULE_PARM_DESC(dirty_count_threshold, "\n dirty_count_threshold\n");
14236
14237module_param(dirty_buffersize_threshold, uint, 0664);
14238MODULE_PARM_DESC(dirty_buffersize_threshold, "\n dirty_buffersize_threshold\n");
14239
14240
14241
14242module_init(amvdec_h265_driver_init_module);
14243module_exit(amvdec_h265_driver_remove_module);
14244
14245MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
14246MODULE_LICENSE("GPL");
14247MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
14248