summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/h265/vh265.c (plain)
blob: 091935dd5029ae5d4810fce736143644f56bc777
1/*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/platform_device.h>
29#include <linux/amlogic/media/vfm/vframe.h>
30#include <linux/amlogic/media/utils/amstream.h>
31#include <linux/amlogic/media/utils/vformat.h>
32#include <linux/amlogic/media/frame_sync/ptsserv.h>
33#include <linux/amlogic/media/canvas/canvas.h>
34#include <linux/amlogic/media/vfm/vframe.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/mm.h>
41#include <linux/amlogic/tee.h>
42#include "../../../stream_input/amports/amports_priv.h"
43#include <linux/amlogic/media/codec_mm/codec_mm.h>
44#include "../utils/decoder_mmu_box.h"
45#include "../utils/decoder_bmmu_box.h"
46#include "../utils/config_parser.h"
47#include "../utils/firmware.h"
48#include "../../../common/chips/decoder_cpu_ver_info.h"
49#include "../utils/vdec_v4l2_buffer_ops.h"
50#include <media/v4l2-mem2mem.h>
51
52/*
53to enable DV of frame mode
54#define DOLBY_META_SUPPORT in ucode
55*/
56
57#define HEVC_8K_LFTOFFSET_FIX
58
59#define CONSTRAIN_MAX_BUF_NUM
60
61#define SWAP_HEVC_UCODE
62#define DETREFILL_ENABLE
63
64#define AGAIN_HAS_THRESHOLD
65/*#define TEST_NO_BUF*/
66#define HEVC_PIC_STRUCT_SUPPORT
67#define MULTI_INSTANCE_SUPPORT
68#define USE_UNINIT_SEMA
69
70 /* .buf_size = 0x100000*16,
71 //4k2k , 0x100000 per buffer */
72 /* 4096x2304 , 0x120000 per buffer */
73#define MPRED_8K_MV_BUF_SIZE (0x120000*4)
74#define MPRED_4K_MV_BUF_SIZE (0x120000)
75#define MPRED_MV_BUF_SIZE (0x40000)
76
77#define MMU_COMPRESS_HEADER_SIZE 0x48000
78#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
79
80#define MAX_FRAME_4K_NUM 0x1200
81#define MAX_FRAME_8K_NUM (0x1200*4)
82
83//#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
84#define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
85
86#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
87
88#define HEVC_CM_HEADER_START_ADDR 0x3628
89#define HEVC_SAO_MMU_VH1_ADDR 0x363b
90#define HEVC_SAO_MMU_VH0_ADDR 0x363a
91
92#define HEVC_DBLK_CFGB 0x350b
93#define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
94#define SWAP_HEVC_OFFSET (3 * 0x1000)
95
96#define MEM_NAME "codec_265"
97/* #include <mach/am_regs.h> */
98#include <linux/amlogic/media/utils/vdec_reg.h>
99
100#include "../utils/vdec.h"
101#include "../utils/amvdec.h"
102#include <linux/amlogic/media/video_sink/video.h>
103#include <linux/amlogic/media/codec_mm/configs.h>
104
105#define SEND_LMEM_WITH_RPM
106#define SUPPORT_10BIT
107/* #define ERROR_HANDLE_DEBUG */
108
109#ifndef STAT_KTHREAD
110#define STAT_KTHREAD 0x40
111#endif
112
113#ifdef MULTI_INSTANCE_SUPPORT
114#define MAX_DECODE_INSTANCE_NUM 9
115#define MULTI_DRIVER_NAME "ammvdec_h265"
116#endif
117#define DRIVER_NAME "amvdec_h265"
118#define MODULE_NAME "amvdec_h265"
119#define DRIVER_HEADER_NAME "amvdec_h265_header"
120
121#define PUT_INTERVAL (HZ/100)
122#define ERROR_SYSTEM_RESET_COUNT 200
123
124#define PTS_NORMAL 0
125#define PTS_NONE_REF_USE_DURATION 1
126
127#define PTS_MODE_SWITCHING_THRESHOLD 3
128#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
129
130#define DUR2PTS(x) ((x)*90/96)
131
132#define MAX_SIZE_8K (8192 * 4608)
133#define MAX_SIZE_4K (4096 * 2304)
134
135#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
136#define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
137
138#define SEI_UserDataITU_T_T35 4
139#define INVALID_IDX -1 /* Invalid buffer index.*/
140
141static struct semaphore h265_sema;
142
143struct hevc_state_s;
144static int hevc_print(struct hevc_state_s *hevc,
145 int debug_flag, const char *fmt, ...);
146static int hevc_print_cont(struct hevc_state_s *hevc,
147 int debug_flag, const char *fmt, ...);
148static int vh265_vf_states(struct vframe_states *states, void *);
149static struct vframe_s *vh265_vf_peek(void *);
150static struct vframe_s *vh265_vf_get(void *);
151static void vh265_vf_put(struct vframe_s *, void *);
152static int vh265_event_cb(int type, void *data, void *private_data);
153
154static int vh265_stop(struct hevc_state_s *hevc);
155#ifdef MULTI_INSTANCE_SUPPORT
156static int vmh265_stop(struct hevc_state_s *hevc);
157static s32 vh265_init(struct vdec_s *vdec);
158static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
159static void reset_process_time(struct hevc_state_s *hevc);
160static void start_process_time(struct hevc_state_s *hevc);
161static void restart_process_time(struct hevc_state_s *hevc);
162static void timeout_process(struct hevc_state_s *hevc);
163#else
164static s32 vh265_init(struct hevc_state_s *hevc);
165#endif
166static void vh265_prot_init(struct hevc_state_s *hevc);
167static int vh265_local_init(struct hevc_state_s *hevc);
168static void vh265_check_timer_func(unsigned long arg);
169static void config_decode_mode(struct hevc_state_s *hevc);
170
171static const char vh265_dec_id[] = "vh265-dev";
172
173#define PROVIDER_NAME "decoder.h265"
174#define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
175
176static const struct vframe_operations_s vh265_vf_provider = {
177 .peek = vh265_vf_peek,
178 .get = vh265_vf_get,
179 .put = vh265_vf_put,
180 .event_cb = vh265_event_cb,
181 .vf_states = vh265_vf_states,
182};
183
184static struct vframe_provider_s vh265_vf_prov;
185
186static u32 bit_depth_luma;
187static u32 bit_depth_chroma;
188static u32 video_signal_type;
189static int start_decode_buf_level = 0x8000;
190static unsigned int decode_timeout_val = 200;
191
192static u32 run_ready_min_buf_num = 2;
193static u32 disable_ip_mode;
194static u32 print_lcu_error = 1;
195/*data_resend_policy:
196 bit 0, stream base resend data when decoding buf empty
197*/
198static u32 data_resend_policy = 1;
199static u32 poc_num_margin = 1000;
200static u32 poc_error_limit = 30;
201
202static u32 dirty_time_threshold = 2000;
203static u32 dirty_count_threshold = 200;
204static u32 dirty_buffersize_threshold = 0x800000;
205
206
207#define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
208/*
209static const char * const video_format_names[] = {
210 "component", "PAL", "NTSC", "SECAM",
211 "MAC", "unspecified", "unspecified", "unspecified"
212};
213
214static const char * const color_primaries_names[] = {
215 "unknown", "bt709", "undef", "unknown",
216 "bt470m", "bt470bg", "smpte170m", "smpte240m",
217 "film", "bt2020"
218};
219
220static const char * const transfer_characteristics_names[] = {
221 "unknown", "bt709", "undef", "unknown",
222 "bt470m", "bt470bg", "smpte170m", "smpte240m",
223 "linear", "log100", "log316", "iec61966-2-4",
224 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
225 "smpte-st-2084", "smpte-st-428"
226};
227
228static const char * const matrix_coeffs_names[] = {
229 "GBR", "bt709", "undef", "unknown",
230 "fcc", "bt470bg", "smpte170m", "smpte240m",
231 "YCgCo", "bt2020nc", "bt2020c"
232};
233*/
234#ifdef SUPPORT_10BIT
235#define HEVC_CM_BODY_START_ADDR 0x3626
236#define HEVC_CM_BODY_LENGTH 0x3627
237#define HEVC_CM_HEADER_LENGTH 0x3629
238#define HEVC_CM_HEADER_OFFSET 0x362b
239#define HEVC_SAO_CTRL9 0x362d
240#define LOSLESS_COMPRESS_MODE
241/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
242/* double_write_mode:
243 * 0, no double write;
244 * 1, 1:1 ratio;
245 * 2, (1/4):(1/4) ratio;
246 * 3, (1/4):(1/4) ratio, with both compressed frame included
247 * 4, (1/2):(1/2) ratio;
248 * 0x10, double write only
249 * 0x100, if > 1080p,use mode 4,else use mode 1;
250 * 0x200, if > 1080p,use mode 2,else use mode 1;
251 * 0x300, if > 720p, use mode 4, else use mode 1;
252 */
253static u32 double_write_mode;
254
255/*#define DECOMP_HEADR_SURGENT*/
256
257static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
258static u32 enable_mem_saving = 1;
259static u32 workaround_enable;
260static u32 force_w_h;
261#endif
262static u32 force_fps;
263static u32 pts_unstable;
264#define H265_DEBUG_BUFMGR 0x01
265#define H265_DEBUG_BUFMGR_MORE 0x02
266#define H265_DEBUG_DETAIL 0x04
267#define H265_DEBUG_REG 0x08
268#define H265_DEBUG_MAN_SEARCH_NAL 0x10
269#define H265_DEBUG_MAN_SKIP_NAL 0x20
270#define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
271#define H265_DEBUG_FORCE_CLK 0x80
272#define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
273#define H265_DEBUG_NO_DISPLAY 0x200
274#define H265_DEBUG_DISCARD_NAL 0x400
275#define H265_DEBUG_OUT_PTS 0x800
276#define H265_DEBUG_DUMP_PIC_LIST 0x1000
277#define H265_DEBUG_PRINT_SEI 0x2000
278#define H265_DEBUG_PIC_STRUCT 0x4000
279#define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
280#define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
281#define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
282#define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
283#define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
284#define H265_DEBUG_HW_RESET 0x100000
285#define H265_CFG_CANVAS_IN_DECODE 0x200000
286#define H265_DEBUG_DV 0x400000
287#define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
288#define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
289#define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
290#define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
291#ifdef MULTI_INSTANCE_SUPPORT
292#define PRINT_FLAG_ERROR 0x0
293#define IGNORE_PARAM_FROM_CONFIG 0x08000000
294#define PRINT_FRAMEBASE_DATA 0x10000000
295#define PRINT_FLAG_VDEC_STATUS 0x20000000
296#define PRINT_FLAG_VDEC_DETAIL 0x40000000
297#define PRINT_FLAG_V4L_DETAIL 0x80000000
298#endif
299
300#define BUF_POOL_SIZE 32
301#define MAX_BUF_NUM 24
302#define MAX_REF_PIC_NUM 24
303#define MAX_REF_ACTIVE 16
304
305#ifdef MV_USE_FIXED_BUF
306#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
307#define VF_BUFFER_IDX(n) (n)
308#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
309#else
310#define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
311#define VF_BUFFER_IDX(n) (n)
312#define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
313#define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
314#endif
315
316#define HEVC_MV_INFO 0x310d
317#define HEVC_QP_INFO 0x3137
318#define HEVC_SKIP_INFO 0x3136
319
320const u32 h265_version = 201602101;
321static u32 debug_mask = 0xffffffff;
322static u32 log_mask;
323static u32 debug;
324static u32 radr;
325static u32 rval;
326static u32 dbg_cmd;
327static u32 dump_nal;
328static u32 dbg_skip_decode_index;
329static u32 endian = 0xff0;
330#ifdef ERROR_HANDLE_DEBUG
331static u32 dbg_nal_skip_flag;
332 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
333static u32 dbg_nal_skip_count;
334#endif
335/*for debug*/
336/*
337 udebug_flag:
338 bit 0, enable ucode print
339 bit 1, enable ucode detail print
340 bit [31:16] not 0, pos to dump lmem
341 bit 2, pop bits to lmem
342 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
343*/
344static u32 udebug_flag;
345/*
346 when udebug_flag[1:0] is not 0
347 udebug_pause_pos not 0,
348 pause position
349*/
350static u32 udebug_pause_pos;
351/*
352 when udebug_flag[1:0] is not 0
353 and udebug_pause_pos is not 0,
354 pause only when DEBUG_REG2 is equal to this val
355*/
356static u32 udebug_pause_val;
357
358static u32 udebug_pause_decode_idx;
359
360static u32 decode_pic_begin;
361static uint slice_parse_begin;
362static u32 step;
363static bool is_reset;
364
365#ifdef CONSTRAIN_MAX_BUF_NUM
366static u32 run_ready_max_vf_only_num;
367static u32 run_ready_display_q_num;
368 /*0: not check
369 0xff: work_pic_num
370 */
371static u32 run_ready_max_buf_num = 0xff;
372#endif
373
374static u32 dynamic_buf_num_margin = 7;
375static u32 buf_alloc_width;
376static u32 buf_alloc_height;
377
378static u32 max_buf_num = 16;
379static u32 buf_alloc_size;
380/*static u32 re_config_pic_flag;*/
381/*
382 *bit[0]: 0,
383 *bit[1]: 0, always release cma buffer when stop
384 *bit[1]: 1, never release cma buffer when stop
385 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
386 *do not release cma buffer is blackout is not 1
387 *
388 *bit[2]: 0, when start decoding, check current displayed buffer
389 * (only for buffer decoded by h265) if blackout is 0
390 * 1, do not check current displayed buffer
391 *
392 *bit[3]: 1, if blackout is not 1, do not release current
393 * displayed cma buffer always.
394 */
395/* set to 1 for fast play;
396 * set to 8 for other case of "keep last frame"
397 */
398static u32 buffer_mode = 1;
399
400/* buffer_mode_dbg: debug only*/
401static u32 buffer_mode_dbg = 0xffff0000;
402/**/
403/*
404 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
405 *1, start decoding after first I;
406 *2, only decode and display none error picture;
407 *3, start decoding and display after IDR,etc
408 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
409 *only for mode 0 and 1.
410 */
411static u32 nal_skip_policy = 2;
412
413/*
414 *bit 0, 1: only display I picture;
415 *bit 1, 1: only decode I picture;
416 */
417static u32 i_only_flag;
418static u32 skip_nal_count = 500;
419/*
420bit 0, fast output first I picture
421*/
422static u32 fast_output_enable = 1;
423
424static u32 frmbase_cont_bitlevel = 0x60;
425
426/*
427use_cma: 1, use both reserver memory and cma for buffers
4282, only use cma for buffers
429*/
430static u32 use_cma = 2;
431
432#define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
433/*
434static u32 prefix_aux_buf_size = (16 * 1024);
435static u32 suffix_aux_buf_size;
436*/
437static u32 prefix_aux_buf_size = (12 * 1024);
438static u32 suffix_aux_buf_size = (12 * 1024);
439
440static u32 max_decoding_time;
441/*
442 *error handling
443 */
444/*error_handle_policy:
445 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
446 *1, skip error_skip_nal_count nals before error recovery;
447 *bit 1 (valid only when bit0 == 1):
448 *1, wait vps/sps/pps after error recovery;
449 *bit 2 (valid only when bit0 == 0):
450 *0, auto search after error recovery (hevc_recover() called);
451 *1, manual search after error recovery
452 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
453 *
454 *bit 4: 0, set error_mark after reset/recover
455 * 1, do not set error_mark after reset/recover
456 *
457 *bit 5: 0, check total lcu for every picture
458 * 1, do not check total lcu
459 *
460 *bit 6: 0, do not check head error
461 * 1, check head error
462 *
463 *bit 7: 0, allow to print over decode
464 * 1, NOT allow to print over decode
465 *
466 *bit 8: 0, use interlace policy
467 * 1, NOT use interlace policy
468 *
469 */
470
471static u32 error_handle_policy;
472static u32 error_skip_nal_count = 6;
473static u32 error_handle_threshold = 30;
474static u32 error_handle_nal_skip_threshold = 10;
475static u32 error_handle_system_threshold = 30;
476static u32 interlace_enable = 1;
477static u32 fr_hint_status;
478
479 /*
480 *parser_sei_enable:
481 * bit 0, sei;
482 * bit 1, sei_suffix (fill aux buf)
483 * bit 2, fill sei to aux buf (when bit 0 is 1)
484 * bit 8, debug flag
485 */
486static u32 parser_sei_enable;
487static u32 parser_dolby_vision_enable = 1;
488#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
489static u32 dolby_meta_with_el;
490static u32 dolby_el_flush_th = 2;
491#endif
492/* this is only for h265 mmu enable */
493
494static u32 mmu_enable = 1;
495static u32 mmu_enable_force;
496static u32 work_buf_size;
497static unsigned int force_disp_pic_index;
498static unsigned int disp_vframe_valve_level;
499static int pre_decode_buf_level = 0x1000;
500static unsigned int pic_list_debug;
501#ifdef HEVC_8K_LFTOFFSET_FIX
502 /* performance_profile: bit 0, multi slice in ucode
503 */
504static unsigned int performance_profile = 1;
505#endif
506#ifdef MULTI_INSTANCE_SUPPORT
507static unsigned int max_decode_instance_num
508 = MAX_DECODE_INSTANCE_NUM;
509static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
510static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
511static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
512static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
513static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
514static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
515static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
516static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
517{1, 1, 1, 1, 1, 1, 1, 1, 1};
518
519#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
520static unsigned char get_idx(struct hevc_state_s *hevc);
521#endif
522
523#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
524static u32 dv_toggle_prov_name;
525
526static u32 dv_debug;
527
528static u32 force_bypass_dvenl;
529#endif
530#endif
531
532
533#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
534#define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
535#define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
536#define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
537#else
538#define get_dbg_flag(hevc) debug
539#define get_dbg_flag2(hevc) debug
540#define is_log_enable(hevc) (log_mask ? 1 : 0)
541#define get_valid_double_write_mode(hevc) double_write_mode
542#define get_buf_alloc_width(hevc) buf_alloc_width
543#define get_buf_alloc_height(hevc) buf_alloc_height
544#define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
545#endif
546#define get_buffer_mode(hevc) buffer_mode
547
548
549static DEFINE_SPINLOCK(lock);
550struct task_struct *h265_task = NULL;
551#undef DEBUG_REG
552#ifdef DEBUG_REG
553void WRITE_VREG_DBG(unsigned adr, unsigned val)
554{
555 if (debug & H265_DEBUG_REG)
556 pr_info("%s(%x, %x)\n", __func__, adr, val);
557 WRITE_VREG(adr, val);
558}
559
560#undef WRITE_VREG
561#define WRITE_VREG WRITE_VREG_DBG
562#endif
563extern u32 trickmode_i;
564
565static DEFINE_MUTEX(vh265_mutex);
566
567static DEFINE_MUTEX(vh265_log_mutex);
568
569//static struct vdec_info *gvs;
570
571static u32 without_display_mode;
572
573/**************************************************
574 *
575 *h265 buffer management include
576 *
577 ***************************************************
578 */
579enum NalUnitType {
580 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
581 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
582
583 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
584 /* Current name in the spec: TSA_R */
585 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
586
587 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
588 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
589
590 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
591 /* Current name in the spec: RADL_R */
592 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
593
594 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
595 /* Current name in the spec: RASL_R */
596 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
597
598 NAL_UNIT_RESERVED_10,
599 NAL_UNIT_RESERVED_11,
600 NAL_UNIT_RESERVED_12,
601 NAL_UNIT_RESERVED_13,
602 NAL_UNIT_RESERVED_14,
603 NAL_UNIT_RESERVED_15,
604
605 /* Current name in the spec: BLA_W_LP */
606 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
607 /* Current name in the spec: BLA_W_DLP */
608 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
609 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
610 /* Current name in the spec: IDR_W_DLP */
611 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
612 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
613 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
614 NAL_UNIT_RESERVED_22,
615 NAL_UNIT_RESERVED_23,
616
617 NAL_UNIT_RESERVED_24,
618 NAL_UNIT_RESERVED_25,
619 NAL_UNIT_RESERVED_26,
620 NAL_UNIT_RESERVED_27,
621 NAL_UNIT_RESERVED_28,
622 NAL_UNIT_RESERVED_29,
623 NAL_UNIT_RESERVED_30,
624 NAL_UNIT_RESERVED_31,
625
626 NAL_UNIT_VPS, /* 32 */
627 NAL_UNIT_SPS, /* 33 */
628 NAL_UNIT_PPS, /* 34 */
629 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
630 NAL_UNIT_EOS, /* 36 */
631 NAL_UNIT_EOB, /* 37 */
632 NAL_UNIT_FILLER_DATA, /* 38 */
633 NAL_UNIT_SEI, /* 39 Prefix SEI */
634 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
635 NAL_UNIT_RESERVED_41,
636 NAL_UNIT_RESERVED_42,
637 NAL_UNIT_RESERVED_43,
638 NAL_UNIT_RESERVED_44,
639 NAL_UNIT_RESERVED_45,
640 NAL_UNIT_RESERVED_46,
641 NAL_UNIT_RESERVED_47,
642 NAL_UNIT_UNSPECIFIED_48,
643 NAL_UNIT_UNSPECIFIED_49,
644 NAL_UNIT_UNSPECIFIED_50,
645 NAL_UNIT_UNSPECIFIED_51,
646 NAL_UNIT_UNSPECIFIED_52,
647 NAL_UNIT_UNSPECIFIED_53,
648 NAL_UNIT_UNSPECIFIED_54,
649 NAL_UNIT_UNSPECIFIED_55,
650 NAL_UNIT_UNSPECIFIED_56,
651 NAL_UNIT_UNSPECIFIED_57,
652 NAL_UNIT_UNSPECIFIED_58,
653 NAL_UNIT_UNSPECIFIED_59,
654 NAL_UNIT_UNSPECIFIED_60,
655 NAL_UNIT_UNSPECIFIED_61,
656 NAL_UNIT_UNSPECIFIED_62,
657 NAL_UNIT_UNSPECIFIED_63,
658 NAL_UNIT_INVALID,
659};
660
661/* --------------------------------------------------- */
662/* Amrisc Software Interrupt */
663/* --------------------------------------------------- */
664#define AMRISC_STREAM_EMPTY_REQ 0x01
665#define AMRISC_PARSER_REQ 0x02
666#define AMRISC_MAIN_REQ 0x04
667
668/* --------------------------------------------------- */
669/* HEVC_DEC_STATUS define */
670/* --------------------------------------------------- */
671#define HEVC_DEC_IDLE 0x0
672#define HEVC_NAL_UNIT_VPS 0x1
673#define HEVC_NAL_UNIT_SPS 0x2
674#define HEVC_NAL_UNIT_PPS 0x3
675#define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
676#define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
677#define HEVC_SLICE_DECODING 0x6
678#define HEVC_NAL_UNIT_SEI 0x7
679#define HEVC_SLICE_SEGMENT_DONE 0x8
680#define HEVC_NAL_SEARCH_DONE 0x9
681#define HEVC_DECPIC_DATA_DONE 0xa
682#define HEVC_DECPIC_DATA_ERROR 0xb
683#define HEVC_SEI_DAT 0xc
684#define HEVC_SEI_DAT_DONE 0xd
685#define HEVC_NAL_DECODE_DONE 0xe
686#define HEVC_OVER_DECODE 0xf
687
688#define HEVC_DATA_REQUEST 0x12
689
690#define HEVC_DECODE_BUFEMPTY 0x20
691#define HEVC_DECODE_TIMEOUT 0x21
692#define HEVC_SEARCH_BUFEMPTY 0x22
693#define HEVC_DECODE_OVER_SIZE 0x23
694#define HEVC_DECODE_BUFEMPTY2 0x24
695#define HEVC_FIND_NEXT_PIC_NAL 0x50
696#define HEVC_FIND_NEXT_DVEL_NAL 0x51
697
698#define HEVC_DUMP_LMEM 0x30
699
700#define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
701#define HEVC_DISCARD_NAL 0xf0
702#define HEVC_ACTION_DEC_CONT 0xfd
703#define HEVC_ACTION_ERROR 0xfe
704#define HEVC_ACTION_DONE 0xff
705
706/* --------------------------------------------------- */
707/* Include "parser_cmd.h" */
708/* --------------------------------------------------- */
709#define PARSER_CMD_SKIP_CFG_0 0x0000090b
710
711#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
712
713#define PARSER_CMD_SKIP_CFG_2 0x001b1910
714
715#define PARSER_CMD_NUMBER 37
716
717/**************************************************
718 *
719 *h265 buffer management
720 *
721 ***************************************************
722 */
723/* #define BUFFER_MGR_ONLY */
724/* #define CONFIG_HEVC_CLK_FORCED_ON */
725/* #define ENABLE_SWAP_TEST */
726#define MCRCC_ENABLE
727#define INVALID_POC 0x80000000
728
729#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
730#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
731#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
732#define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
733#define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
734#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
735#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
736#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
737#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
738#define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
739#define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
740#define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
741#define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
742#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
743#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
744#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
745#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
746#ifdef ENABLE_SWAP_TEST
747#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
748#endif
749
750/*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
751/*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
752#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
753 /*do not define ENABLE_SWAP_TEST*/
754#define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
755#define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
756
757#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
758#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
759/*
760 *ucode parser/search control
761 *bit 0: 0, header auto parse; 1, header manual parse
762 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
763 *bit [3:2]: valid when bit1==0;
764 *0, auto skip nal before first vps/sps/pps/idr;
765 *1, auto skip nal before first vps/sps/pps
766 *2, auto skip nal before first vps/sps/pps,
767 * and not decode until the first I slice (with slice address of 0)
768 *
769 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
770 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
771 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
772 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
773 *bit [17]: for NAL_SEI when bit0 is 0:
774 * 0, do not parse/fetch SEI in ucode;
775 * 1, parse/fetch SEI in ucode
776 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
777 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
778 * 1, fetch NAL_SEL_SUFFIX data to aux buf
779 *bit [19]:
780 * 0, parse NAL_SEI in ucode
781 * 1, fetch NAL_SEI to aux buf
782 *bit [20]: for DOLBY_VISION_META
783 * 0, do not fetch DOLBY_VISION_META to aux buf
784 * 1, fetch DOLBY_VISION_META to aux buf
785 */
786#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
787 /*read only*/
788#define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
789 /*
790 [15 : 8] rps_set_id
791 [7 : 0] start_decoding_flag
792 */
793#define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
794 /*set before start decoder*/
795#define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
796#define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
797#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
798
799#define DECODE_MODE_SINGLE 0x0
800#define DECODE_MODE_MULTI_FRAMEBASE 0x1
801#define DECODE_MODE_MULTI_STREAMBASE 0x2
802#define DECODE_MODE_MULTI_DVBAL 0x3
803#define DECODE_MODE_MULTI_DVENL 0x4
804
805#define MAX_INT 0x7FFFFFFF
806
807#define RPM_BEGIN 0x100
808#define modification_list_cur 0x148
809#define RPM_END 0x180
810
811#define RPS_USED_BIT 14
812/* MISC_FLAG0 */
813#define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
814#define PCM_ENABLE_FLAG_BIT 1
815#define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
816#define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
817#define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
818#define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
819#define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
820#define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
821#define SLICE_SAO_LUMA_FLAG_BIT 8
822#define SLICE_SAO_CHROMA_FLAG_BIT 9
823#define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
824
825union param_u {
826 struct {
827 unsigned short data[RPM_END - RPM_BEGIN];
828 } l;
829 struct {
830 /* from ucode lmem, do not change this struct */
831 unsigned short CUR_RPS[0x10];
832 unsigned short num_ref_idx_l0_active;
833 unsigned short num_ref_idx_l1_active;
834 unsigned short slice_type;
835 unsigned short slice_temporal_mvp_enable_flag;
836 unsigned short dependent_slice_segment_flag;
837 unsigned short slice_segment_address;
838 unsigned short num_title_rows_minus1;
839 unsigned short pic_width_in_luma_samples;
840 unsigned short pic_height_in_luma_samples;
841 unsigned short log2_min_coding_block_size_minus3;
842 unsigned short log2_diff_max_min_coding_block_size;
843 unsigned short log2_max_pic_order_cnt_lsb_minus4;
844 unsigned short POClsb;
845 unsigned short collocated_from_l0_flag;
846 unsigned short collocated_ref_idx;
847 unsigned short log2_parallel_merge_level;
848 unsigned short five_minus_max_num_merge_cand;
849 unsigned short sps_num_reorder_pics_0;
850 unsigned short modification_flag;
851 unsigned short tiles_enabled_flag;
852 unsigned short num_tile_columns_minus1;
853 unsigned short num_tile_rows_minus1;
854 unsigned short tile_width[12];
855 unsigned short tile_height[8];
856 unsigned short misc_flag0;
857 unsigned short pps_beta_offset_div2;
858 unsigned short pps_tc_offset_div2;
859 unsigned short slice_beta_offset_div2;
860 unsigned short slice_tc_offset_div2;
861 unsigned short pps_cb_qp_offset;
862 unsigned short pps_cr_qp_offset;
863 unsigned short first_slice_segment_in_pic_flag;
864 unsigned short m_temporalId;
865 unsigned short m_nalUnitType;
866
867 unsigned short vui_num_units_in_tick_hi;
868 unsigned short vui_num_units_in_tick_lo;
869 unsigned short vui_time_scale_hi;
870 unsigned short vui_time_scale_lo;
871 unsigned short bit_depth;
872 unsigned short profile_etc;
873 unsigned short sei_frame_field_info;
874 unsigned short video_signal_type;
875 unsigned short modification_list[0x20];
876 unsigned short conformance_window_flag;
877 unsigned short conf_win_left_offset;
878 unsigned short conf_win_right_offset;
879 unsigned short conf_win_top_offset;
880 unsigned short conf_win_bottom_offset;
881 unsigned short chroma_format_idc;
882 unsigned short color_description;
883 unsigned short aspect_ratio_idc;
884 unsigned short sar_width;
885 unsigned short sar_height;
886 unsigned short sps_max_dec_pic_buffering_minus1_0;
887 } p;
888};
889
890#define RPM_BUF_SIZE (0x80*2)
891/* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
892#define LMEM_BUF_SIZE (0x500 * 2)
893
894struct buff_s {
895 u32 buf_start;
896 u32 buf_size;
897 u32 buf_end;
898};
899
900struct BuffInfo_s {
901 u32 max_width;
902 u32 max_height;
903 unsigned int start_adr;
904 unsigned int end_adr;
905 struct buff_s ipp;
906 struct buff_s sao_abv;
907 struct buff_s sao_vb;
908 struct buff_s short_term_rps;
909 struct buff_s vps;
910 struct buff_s sps;
911 struct buff_s pps;
912 struct buff_s sao_up;
913 struct buff_s swap_buf;
914 struct buff_s swap_buf2;
915 struct buff_s scalelut;
916 struct buff_s dblk_para;
917 struct buff_s dblk_data;
918 struct buff_s dblk_data2;
919 struct buff_s mmu_vbh;
920 struct buff_s cm_header;
921 struct buff_s mpred_above;
922#ifdef MV_USE_FIXED_BUF
923 struct buff_s mpred_mv;
924#endif
925 struct buff_s rpm;
926 struct buff_s lmem;
927};
928#define WORK_BUF_SPEC_NUM 3
929static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
930 {
931 /* 8M bytes */
932 .max_width = 1920,
933 .max_height = 1088,
934 .ipp = {
935 /* IPP work space calculation :
936 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
937 */
938 .buf_size = 0x4000,
939 },
940 .sao_abv = {
941 .buf_size = 0x30000,
942 },
943 .sao_vb = {
944 .buf_size = 0x30000,
945 },
946 .short_term_rps = {
947 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
948 * total 64x16x2 = 2048 bytes (0x800)
949 */
950 .buf_size = 0x800,
951 },
952 .vps = {
953 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
954 * total 0x0800 bytes
955 */
956 .buf_size = 0x800,
957 },
958 .sps = {
959 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
960 * total 0x0800 bytes
961 */
962 .buf_size = 0x800,
963 },
964 .pps = {
965 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
966 * total 0x2000 bytes
967 */
968 .buf_size = 0x2000,
969 },
970 .sao_up = {
971 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
972 * each has 16 bytes total 0x2800 bytes
973 */
974 .buf_size = 0x2800,
975 },
976 .swap_buf = {
977 /* 256cyclex64bit = 2K bytes 0x800
978 * (only 144 cycles valid)
979 */
980 .buf_size = 0x800,
981 },
982 .swap_buf2 = {
983 .buf_size = 0x800,
984 },
985 .scalelut = {
986 /* support up to 32 SCALELUT 1024x32 =
987 * 32Kbytes (0x8000)
988 */
989 .buf_size = 0x8000,
990 },
991 .dblk_para = {
992#ifdef SUPPORT_10BIT
993 .buf_size = 0x40000,
994#else
995 /* DBLK -> Max 256(4096/16) LCU, each para
996 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
997 */
998 .buf_size = 0x20000,
999#endif
1000 },
1001 .dblk_data = {
1002 .buf_size = 0x40000,
1003 },
1004 .dblk_data2 = {
1005 .buf_size = 0x40000,
1006 }, /*dblk data for adapter*/
1007 .mmu_vbh = {
1008 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1009 },
1010#if 0
1011 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1012 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1013 (MAX_REF_PIC_NUM + 1),
1014 },
1015#endif
1016 .mpred_above = {
1017 .buf_size = 0x8000,
1018 },
1019#ifdef MV_USE_FIXED_BUF
1020 .mpred_mv = {/* 1080p, 0x40000 per buffer */
1021 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
1022 },
1023#endif
1024 .rpm = {
1025 .buf_size = RPM_BUF_SIZE,
1026 },
1027 .lmem = {
1028 .buf_size = 0x500 * 2,
1029 }
1030 },
1031 {
1032 .max_width = 4096,
1033 .max_height = 2048,
1034 .ipp = {
1035 /* IPP work space calculation :
1036 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1037 */
1038 .buf_size = 0x4000,
1039 },
1040 .sao_abv = {
1041 .buf_size = 0x30000,
1042 },
1043 .sao_vb = {
1044 .buf_size = 0x30000,
1045 },
1046 .short_term_rps = {
1047 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1048 * total 64x16x2 = 2048 bytes (0x800)
1049 */
1050 .buf_size = 0x800,
1051 },
1052 .vps = {
1053 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1054 * total 0x0800 bytes
1055 */
1056 .buf_size = 0x800,
1057 },
1058 .sps = {
1059 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1060 * total 0x0800 bytes
1061 */
1062 .buf_size = 0x800,
1063 },
1064 .pps = {
1065 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1066 * total 0x2000 bytes
1067 */
1068 .buf_size = 0x2000,
1069 },
1070 .sao_up = {
1071 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1072 * each has 16 bytes total 0x2800 bytes
1073 */
1074 .buf_size = 0x2800,
1075 },
1076 .swap_buf = {
1077 /* 256cyclex64bit = 2K bytes 0x800
1078 * (only 144 cycles valid)
1079 */
1080 .buf_size = 0x800,
1081 },
1082 .swap_buf2 = {
1083 .buf_size = 0x800,
1084 },
1085 .scalelut = {
1086 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1087 * (0x8000)
1088 */
1089 .buf_size = 0x8000,
1090 },
1091 .dblk_para = {
1092 /* DBLK -> Max 256(4096/16) LCU, each para
1093 * 512bytes(total:0x20000),
1094 * data 1024bytes(total:0x40000)
1095 */
1096 .buf_size = 0x20000,
1097 },
1098 .dblk_data = {
1099 .buf_size = 0x80000,
1100 },
1101 .dblk_data2 = {
1102 .buf_size = 0x80000,
1103 }, /*dblk data for adapter*/
1104 .mmu_vbh = {
1105 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1106 },
1107#if 0
1108 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1109 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1110 (MAX_REF_PIC_NUM + 1),
1111 },
1112#endif
1113 .mpred_above = {
1114 .buf_size = 0x8000,
1115 },
1116#ifdef MV_USE_FIXED_BUF
1117 .mpred_mv = {
1118 /* .buf_size = 0x100000*16,
1119 //4k2k , 0x100000 per buffer */
1120 /* 4096x2304 , 0x120000 per buffer */
1121 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1122 },
1123#endif
1124 .rpm = {
1125 .buf_size = RPM_BUF_SIZE,
1126 },
1127 .lmem = {
1128 .buf_size = 0x500 * 2,
1129 }
1130 },
1131
1132 {
1133 .max_width = 4096*2,
1134 .max_height = 2048*2,
1135 .ipp = {
1136 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1137 .buf_size = 0x4000*2,
1138 },
1139 .sao_abv = {
1140 .buf_size = 0x30000*2,
1141 },
1142 .sao_vb = {
1143 .buf_size = 0x30000*2,
1144 },
1145 .short_term_rps = {
1146 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1147 .buf_size = 0x800,
1148 },
1149 .vps = {
1150 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1151 .buf_size = 0x800,
1152 },
1153 .sps = {
1154 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1155 .buf_size = 0x800,
1156 },
1157 .pps = {
1158 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1159 .buf_size = 0x2000,
1160 },
1161 .sao_up = {
1162 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1163 .buf_size = 0x2800*2,
1164 },
1165 .swap_buf = {
1166 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1167 .buf_size = 0x800,
1168 },
1169 .swap_buf2 = {
1170 .buf_size = 0x800,
1171 },
1172 .scalelut = {
1173 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1174 .buf_size = 0x8000*2,
1175 },
1176 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1177 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1178 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1179 .mmu_vbh = {
1180 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1181 },
1182#if 0
1183 .cm_header = {
1184 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1185 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1186 },
1187#endif
1188 .mpred_above = {
1189 .buf_size = 0x8000*2,
1190 },
1191#ifdef MV_USE_FIXED_BUF
1192 .mpred_mv = {
1193 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1194 },
1195#endif
1196 .rpm = {
1197 .buf_size = RPM_BUF_SIZE,
1198 },
1199 .lmem = {
1200 .buf_size = 0x500 * 2,
1201 },
1202 }
1203};
1204
1205static void init_buff_spec(struct hevc_state_s *hevc,
1206 struct BuffInfo_s *buf_spec)
1207{
1208 buf_spec->ipp.buf_start = buf_spec->start_adr;
1209 buf_spec->sao_abv.buf_start =
1210 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1211
1212 buf_spec->sao_vb.buf_start =
1213 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1214 buf_spec->short_term_rps.buf_start =
1215 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1216 buf_spec->vps.buf_start =
1217 buf_spec->short_term_rps.buf_start +
1218 buf_spec->short_term_rps.buf_size;
1219 buf_spec->sps.buf_start =
1220 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1221 buf_spec->pps.buf_start =
1222 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1223 buf_spec->sao_up.buf_start =
1224 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1225 buf_spec->swap_buf.buf_start =
1226 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1227 buf_spec->swap_buf2.buf_start =
1228 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1229 buf_spec->scalelut.buf_start =
1230 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1231 buf_spec->dblk_para.buf_start =
1232 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1233 buf_spec->dblk_data.buf_start =
1234 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1235 buf_spec->dblk_data2.buf_start =
1236 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1237 buf_spec->mmu_vbh.buf_start =
1238 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1239 buf_spec->mpred_above.buf_start =
1240 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1241#ifdef MV_USE_FIXED_BUF
1242 buf_spec->mpred_mv.buf_start =
1243 buf_spec->mpred_above.buf_start +
1244 buf_spec->mpred_above.buf_size;
1245
1246 buf_spec->rpm.buf_start =
1247 buf_spec->mpred_mv.buf_start +
1248 buf_spec->mpred_mv.buf_size;
1249#else
1250 buf_spec->rpm.buf_start =
1251 buf_spec->mpred_above.buf_start +
1252 buf_spec->mpred_above.buf_size;
1253#endif
1254 buf_spec->lmem.buf_start =
1255 buf_spec->rpm.buf_start +
1256 buf_spec->rpm.buf_size;
1257 buf_spec->end_adr =
1258 buf_spec->lmem.buf_start +
1259 buf_spec->lmem.buf_size;
1260
1261 if (hevc && get_dbg_flag2(hevc)) {
1262 hevc_print(hevc, 0,
1263 "%s workspace (%x %x) size = %x\n", __func__,
1264 buf_spec->start_adr, buf_spec->end_adr,
1265 buf_spec->end_adr - buf_spec->start_adr);
1266
1267 hevc_print(hevc, 0,
1268 "ipp.buf_start :%x\n",
1269 buf_spec->ipp.buf_start);
1270 hevc_print(hevc, 0,
1271 "sao_abv.buf_start :%x\n",
1272 buf_spec->sao_abv.buf_start);
1273 hevc_print(hevc, 0,
1274 "sao_vb.buf_start :%x\n",
1275 buf_spec->sao_vb.buf_start);
1276 hevc_print(hevc, 0,
1277 "short_term_rps.buf_start :%x\n",
1278 buf_spec->short_term_rps.buf_start);
1279 hevc_print(hevc, 0,
1280 "vps.buf_start :%x\n",
1281 buf_spec->vps.buf_start);
1282 hevc_print(hevc, 0,
1283 "sps.buf_start :%x\n",
1284 buf_spec->sps.buf_start);
1285 hevc_print(hevc, 0,
1286 "pps.buf_start :%x\n",
1287 buf_spec->pps.buf_start);
1288 hevc_print(hevc, 0,
1289 "sao_up.buf_start :%x\n",
1290 buf_spec->sao_up.buf_start);
1291 hevc_print(hevc, 0,
1292 "swap_buf.buf_start :%x\n",
1293 buf_spec->swap_buf.buf_start);
1294 hevc_print(hevc, 0,
1295 "swap_buf2.buf_start :%x\n",
1296 buf_spec->swap_buf2.buf_start);
1297 hevc_print(hevc, 0,
1298 "scalelut.buf_start :%x\n",
1299 buf_spec->scalelut.buf_start);
1300 hevc_print(hevc, 0,
1301 "dblk_para.buf_start :%x\n",
1302 buf_spec->dblk_para.buf_start);
1303 hevc_print(hevc, 0,
1304 "dblk_data.buf_start :%x\n",
1305 buf_spec->dblk_data.buf_start);
1306 hevc_print(hevc, 0,
1307 "dblk_data2.buf_start :%x\n",
1308 buf_spec->dblk_data2.buf_start);
1309 hevc_print(hevc, 0,
1310 "mpred_above.buf_start :%x\n",
1311 buf_spec->mpred_above.buf_start);
1312#ifdef MV_USE_FIXED_BUF
1313 hevc_print(hevc, 0,
1314 "mpred_mv.buf_start :%x\n",
1315 buf_spec->mpred_mv.buf_start);
1316#endif
1317 if ((get_dbg_flag2(hevc)
1318 &
1319 H265_DEBUG_SEND_PARAM_WITH_REG)
1320 == 0) {
1321 hevc_print(hevc, 0,
1322 "rpm.buf_start :%x\n",
1323 buf_spec->rpm.buf_start);
1324 }
1325 }
1326
1327}
1328
1329enum SliceType {
1330 B_SLICE,
1331 P_SLICE,
1332 I_SLICE
1333};
1334
1335/*USE_BUF_BLOCK*/
1336struct BUF_s {
1337 ulong start_adr;
1338 u32 size;
1339 u32 luma_size;
1340 ulong header_addr;
1341 u32 header_size;
1342 int used_flag;
1343 ulong v4l_ref_buf_addr;
1344 ulong chroma_addr;
1345 u32 chroma_size;
1346} /*BUF_t */;
1347
1348/* level 6, 6.1 maximum slice number is 800; other is 200 */
1349#define MAX_SLICE_NUM 800
1350struct PIC_s {
1351 int index;
1352 int scatter_alloc;
1353 int BUF_index;
1354 int mv_buf_index;
1355 int POC;
1356 int decode_idx;
1357 int slice_type;
1358 int RefNum_L0;
1359 int RefNum_L1;
1360 int num_reorder_pic;
1361 int stream_offset;
1362 unsigned char referenced;
1363 unsigned char output_mark;
1364 unsigned char recon_mark;
1365 unsigned char output_ready;
1366 unsigned char error_mark;
1367 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1368 unsigned char dis_mark;
1369 /**/ int slice_idx;
1370 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1371 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1372 /*buffer */
1373 unsigned int header_adr;
1374#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1375 unsigned char dv_enhance_exist;
1376#endif
1377 char *aux_data_buf;
1378 int aux_data_size;
1379 unsigned long cma_alloc_addr;
1380 struct page *alloc_pages;
1381 unsigned int mpred_mv_wr_start_addr;
1382 unsigned int mc_y_adr;
1383 unsigned int mc_u_v_adr;
1384#ifdef SUPPORT_10BIT
1385 /*unsigned int comp_body_size;*/
1386 unsigned int dw_y_adr;
1387 unsigned int dw_u_v_adr;
1388#endif
1389 int mc_canvas_y;
1390 int mc_canvas_u_v;
1391 int width;
1392 int height;
1393
1394 int y_canvas_index;
1395 int uv_canvas_index;
1396#ifdef MULTI_INSTANCE_SUPPORT
1397 struct canvas_config_s canvas_config[2];
1398#endif
1399#ifdef SUPPORT_10BIT
1400 int mem_saving_mode;
1401 u32 bit_depth_luma;
1402 u32 bit_depth_chroma;
1403#endif
1404#ifdef LOSLESS_COMPRESS_MODE
1405 unsigned int losless_comp_body_size;
1406#endif
1407 unsigned char pic_struct;
1408 int vf_ref;
1409
1410 u32 pts;
1411 u64 pts64;
1412 u64 timestamp;
1413
1414 u32 aspect_ratio_idc;
1415 u32 sar_width;
1416 u32 sar_height;
1417 u32 double_write_mode;
1418 u32 video_signal_type;
1419 unsigned short conformance_window_flag;
1420 unsigned short conf_win_left_offset;
1421 unsigned short conf_win_right_offset;
1422 unsigned short conf_win_top_offset;
1423 unsigned short conf_win_bottom_offset;
1424 unsigned short chroma_format_idc;
1425
1426 /* picture qos infomation*/
1427 int max_qp;
1428 int avg_qp;
1429 int min_qp;
1430 int max_skip;
1431 int avg_skip;
1432 int min_skip;
1433 int max_mv;
1434 int min_mv;
1435 int avg_mv;
1436
1437 u32 hw_decode_time;
1438 u32 frame_size; // For frame base mode
1439 bool vframe_bound;
1440 bool ip_mode;
1441 u32 stream_frame_size; //for stream base
1442} /*PIC_t */;
1443
1444#define MAX_TILE_COL_NUM 10
1445#define MAX_TILE_ROW_NUM 20
1446struct tile_s {
1447 int width;
1448 int height;
1449 int start_cu_x;
1450 int start_cu_y;
1451
1452 unsigned int sao_vb_start_addr;
1453 unsigned int sao_abv_start_addr;
1454};
1455
1456#define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1457#define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1458#define SEI_HDR10PLUS_MASK 0x00000004
1459
1460#define VF_POOL_SIZE 32
1461
1462#ifdef MULTI_INSTANCE_SUPPORT
1463#define DEC_RESULT_NONE 0
1464#define DEC_RESULT_DONE 1
1465#define DEC_RESULT_AGAIN 2
1466#define DEC_RESULT_CONFIG_PARAM 3
1467#define DEC_RESULT_ERROR 4
1468#define DEC_INIT_PICLIST 5
1469#define DEC_UNINIT_PICLIST 6
1470#define DEC_RESULT_GET_DATA 7
1471#define DEC_RESULT_GET_DATA_RETRY 8
1472#define DEC_RESULT_EOS 9
1473#define DEC_RESULT_FORCE_EXIT 10
1474#define DEC_RESULT_FREE_CANVAS 11
1475
1476static void vh265_work(struct work_struct *work);
1477static void vh265_timeout_work(struct work_struct *work);
1478static void vh265_notify_work(struct work_struct *work);
1479
1480#endif
1481
1482struct debug_log_s {
1483 struct list_head list;
1484 uint8_t data; /*will alloc more size*/
1485};
1486
1487struct hevc_state_s {
1488#ifdef MULTI_INSTANCE_SUPPORT
1489 struct platform_device *platform_dev;
1490 void (*vdec_cb)(struct vdec_s *, void *);
1491 void *vdec_cb_arg;
1492 struct vframe_chunk_s *chunk;
1493 int dec_result;
1494 struct work_struct work;
1495 struct work_struct timeout_work;
1496 struct work_struct notify_work;
1497 struct work_struct set_clk_work;
1498 /* timeout handle */
1499 unsigned long int start_process_time;
1500 unsigned int last_lcu_idx;
1501 unsigned int decode_timeout_count;
1502 unsigned int timeout_num;
1503#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1504 unsigned char switch_dvlayer_flag;
1505 unsigned char no_switch_dvlayer_count;
1506 unsigned char bypass_dvenl_enable;
1507 unsigned char bypass_dvenl;
1508#endif
1509 unsigned char start_parser_type;
1510 /*start_decoding_flag:
1511 vps/pps/sps/idr info from ucode*/
1512 unsigned char start_decoding_flag;
1513 unsigned char rps_set_id;
1514 unsigned char eos;
1515 int pic_decoded_lcu_idx;
1516 u8 over_decode;
1517 u8 empty_flag;
1518#endif
1519 struct vframe_s vframe_dummy;
1520 char *provider_name;
1521 int index;
1522 struct device *cma_dev;
1523 unsigned char m_ins_flag;
1524 unsigned char dolby_enhance_flag;
1525 unsigned long buf_start;
1526 u32 buf_size;
1527 u32 mv_buf_size;
1528
1529 struct BuffInfo_s work_space_buf_store;
1530 struct BuffInfo_s *work_space_buf;
1531
1532 u8 aux_data_dirty;
1533 u32 prefix_aux_size;
1534 u32 suffix_aux_size;
1535 void *aux_addr;
1536 void *rpm_addr;
1537 void *lmem_addr;
1538 dma_addr_t aux_phy_addr;
1539 dma_addr_t rpm_phy_addr;
1540 dma_addr_t lmem_phy_addr;
1541
1542 unsigned int pic_list_init_flag;
1543 unsigned int use_cma_flag;
1544
1545 unsigned short *rpm_ptr;
1546 unsigned short *lmem_ptr;
1547 unsigned short *debug_ptr;
1548 int debug_ptr_size;
1549 int pic_w;
1550 int pic_h;
1551 int lcu_x_num;
1552 int lcu_y_num;
1553 int lcu_total;
1554 int lcu_size;
1555 int lcu_size_log2;
1556 int lcu_x_num_pre;
1557 int lcu_y_num_pre;
1558 int first_pic_after_recover;
1559
1560 int num_tile_col;
1561 int num_tile_row;
1562 int tile_enabled;
1563 int tile_x;
1564 int tile_y;
1565 int tile_y_x;
1566 int tile_start_lcu_x;
1567 int tile_start_lcu_y;
1568 int tile_width_lcu;
1569 int tile_height_lcu;
1570
1571 int slice_type;
1572 unsigned int slice_addr;
1573 unsigned int slice_segment_addr;
1574
1575 unsigned char interlace_flag;
1576 unsigned char curr_pic_struct;
1577 unsigned char frame_field_info_present_flag;
1578
1579 unsigned short sps_num_reorder_pics_0;
1580 unsigned short misc_flag0;
1581 int m_temporalId;
1582 int m_nalUnitType;
1583 int TMVPFlag;
1584 int isNextSliceSegment;
1585 int LDCFlag;
1586 int m_pocRandomAccess;
1587 int plevel;
1588 int MaxNumMergeCand;
1589
1590 int new_pic;
1591 int new_tile;
1592 int curr_POC;
1593 int iPrevPOC;
1594#ifdef MULTI_INSTANCE_SUPPORT
1595 int decoded_poc;
1596 struct PIC_s *decoding_pic;
1597#endif
1598 int iPrevTid0POC;
1599 int list_no;
1600 int RefNum_L0;
1601 int RefNum_L1;
1602 int ColFromL0Flag;
1603 int LongTerm_Curr;
1604 int LongTerm_Col;
1605 int Col_POC;
1606 int LongTerm_Ref;
1607#ifdef MULTI_INSTANCE_SUPPORT
1608 int m_pocRandomAccess_bak;
1609 int curr_POC_bak;
1610 int iPrevPOC_bak;
1611 int iPrevTid0POC_bak;
1612 unsigned char start_parser_type_bak;
1613 unsigned char start_decoding_flag_bak;
1614 unsigned char rps_set_id_bak;
1615 int pic_decoded_lcu_idx_bak;
1616 int decode_idx_bak;
1617#endif
1618 struct PIC_s *cur_pic;
1619 struct PIC_s *col_pic;
1620 int skip_flag;
1621 int decode_idx;
1622 int slice_idx;
1623 unsigned char have_vps;
1624 unsigned char have_sps;
1625 unsigned char have_pps;
1626 unsigned char have_valid_start_slice;
1627 unsigned char wait_buf;
1628 unsigned char error_flag;
1629 unsigned int error_skip_nal_count;
1630 long used_4k_num;
1631
1632 unsigned char
1633 ignore_bufmgr_error; /* bit 0, for decoding;
1634 bit 1, for displaying
1635 bit 1 must be set if bit 0 is 1*/
1636 int PB_skip_mode;
1637 int PB_skip_count_after_decoding;
1638#ifdef SUPPORT_10BIT
1639 int mem_saving_mode;
1640#endif
1641#ifdef LOSLESS_COMPRESS_MODE
1642 unsigned int losless_comp_body_size;
1643#endif
1644 int pts_mode;
1645 int last_lookup_pts;
1646 int last_pts;
1647 u64 last_lookup_pts_us64;
1648 u64 last_pts_us64;
1649 u32 shift_byte_count_lo;
1650 u32 shift_byte_count_hi;
1651 int pts_mode_switching_count;
1652 int pts_mode_recovery_count;
1653
1654 int pic_num;
1655
1656 /**/
1657 union param_u param;
1658
1659 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1660
1661 struct timer_list timer;
1662 struct BUF_s m_BUF[BUF_POOL_SIZE];
1663 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1664 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1665
1666 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1667 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1668 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1669 struct vframe_s vfpool[VF_POOL_SIZE];
1670
1671 u32 stat;
1672 u32 frame_width;
1673 u32 frame_height;
1674 u32 frame_dur;
1675 u32 frame_ar;
1676 u32 bit_depth_luma;
1677 u32 bit_depth_chroma;
1678 u32 video_signal_type;
1679 u32 video_signal_type_debug;
1680 u32 saved_resolution;
1681 bool get_frame_dur;
1682 u32 error_watchdog_count;
1683 u32 error_skip_nal_wt_cnt;
1684 u32 error_system_watchdog_count;
1685
1686#ifdef DEBUG_PTS
1687 unsigned long pts_missed;
1688 unsigned long pts_hit;
1689#endif
1690 struct dec_sysinfo vh265_amstream_dec_info;
1691 unsigned char init_flag;
1692 unsigned char first_sc_checked;
1693 unsigned char uninit_list;
1694 u32 start_decoding_time;
1695
1696 int show_frame_num;
1697#ifdef USE_UNINIT_SEMA
1698 struct semaphore h265_uninit_done_sema;
1699#endif
1700 int fatal_error;
1701
1702
1703 u32 sei_present_flag;
1704 void *frame_mmu_map_addr;
1705 dma_addr_t frame_mmu_map_phy_addr;
1706 unsigned int mmu_mc_buf_start;
1707 unsigned int mmu_mc_buf_end;
1708 unsigned int mmu_mc_start_4k_adr;
1709 void *mmu_box;
1710 void *bmmu_box;
1711 int mmu_enable;
1712
1713 unsigned int dec_status;
1714
1715 /* data for SEI_MASTER_DISPLAY_COLOR */
1716 unsigned int primaries[3][2];
1717 unsigned int white_point[2];
1718 unsigned int luminance[2];
1719 /* data for SEI_CONTENT_LIGHT_LEVEL */
1720 unsigned int content_light_level[2];
1721
1722 struct PIC_s *pre_top_pic;
1723 struct PIC_s *pre_bot_pic;
1724
1725#ifdef MULTI_INSTANCE_SUPPORT
1726 int double_write_mode;
1727 int dynamic_buf_num_margin;
1728 int start_action;
1729 int save_buffer_mode;
1730#endif
1731 u32 i_only;
1732 struct list_head log_list;
1733 u32 ucode_pause_pos;
1734 u32 start_shift_bytes;
1735
1736 u32 vf_pre_count;
1737 u32 vf_get_count;
1738 u32 vf_put_count;
1739#ifdef SWAP_HEVC_UCODE
1740 dma_addr_t mc_dma_handle;
1741 void *mc_cpu_addr;
1742 int swap_size;
1743 ulong swap_addr;
1744#endif
1745#ifdef DETREFILL_ENABLE
1746 dma_addr_t detbuf_adr;
1747 u16 *detbuf_adr_virt;
1748 u8 delrefill_check;
1749#endif
1750 u8 head_error_flag;
1751 int valve_count;
1752 struct firmware_s *fw;
1753 int max_pic_w;
1754 int max_pic_h;
1755#ifdef AGAIN_HAS_THRESHOLD
1756 u8 next_again_flag;
1757 u32 pre_parser_wr_ptr;
1758#endif
1759 u32 ratio_control;
1760 u32 first_pic_flag;
1761 u32 decode_size;
1762 struct mutex chunks_mutex;
1763 int need_cache_size;
1764 u64 sc_start_time;
1765 u32 skip_nal_count;
1766 bool is_swap;
1767 bool is_4k;
1768 int frameinfo_enable;
1769 struct vframe_qos_s vframe_qos;
1770 bool is_used_v4l;
1771 void *v4l2_ctx;
1772 bool v4l_params_parsed;
1773 u32 mem_map_mode;
1774 u32 performance_profile;
1775 struct vdec_info *gvs;
1776 unsigned int res_ch_flag;
1777 bool ip_mode;
1778 u32 kpi_first_i_comming;
1779 u32 kpi_first_i_decoded;
1780 int sidebind_type;
1781 int sidebind_channel_id;
1782 u32 last_dec_pic_offset;
1783 u32 min_pic_size;
1784 u32 pts_continue_miss;
1785 u32 pts_lookup_margin;
1786 u32 again_count;
1787 u64 again_timeout_jiffies;
1788 u32 pre_parser_video_rp;
1789 u32 pre_parser_video_wp;
1790 bool dv_duallayer;
1791 u32 poc_error_count;
1792} /*hevc_stru_t */;
1793
1794#ifdef AGAIN_HAS_THRESHOLD
1795static u32 again_threshold;
1796#endif
1797#ifdef SEND_LMEM_WITH_RPM
1798#define get_lmem_params(hevc, ladr) \
1799 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1800
1801
1802static int get_frame_mmu_map_size(void)
1803{
1804 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1805 return (MAX_FRAME_8K_NUM * 4);
1806
1807 return (MAX_FRAME_4K_NUM * 4);
1808}
1809
1810static int is_oversize(int w, int h)
1811{
1812 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1813 MAX_SIZE_8K : MAX_SIZE_4K;
1814
1815 if (w < 0 || h < 0)
1816 return true;
1817
1818 if (h != 0 && (w > max / h))
1819 return true;
1820
1821 return false;
1822}
1823
1824int is_oversize_ex(int w, int h)
1825{
1826 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) ?
1827 MAX_SIZE_8K : MAX_SIZE_4K;
1828
1829 if (w == 0 || h == 0)
1830 return true;
1831 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
1832 if (w > 8192 || h > 4608)
1833 return true;
1834 } else {
1835 if (w > 4096 || h > 2304)
1836 return true;
1837 }
1838
1839 if (w < 0 || h < 0)
1840 return true;
1841
1842 if (h != 0 && (w > max / h))
1843 return true;
1844
1845 return false;
1846}
1847
1848
1849void check_head_error(struct hevc_state_s *hevc)
1850{
1851#define pcm_enabled_flag 0x040
1852#define pcm_sample_bit_depth_luma 0x041
1853#define pcm_sample_bit_depth_chroma 0x042
1854 hevc->head_error_flag = 0;
1855 if ((error_handle_policy & 0x40) == 0)
1856 return;
1857 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1858 uint16_t pcm_depth_luma = get_lmem_params(
1859 hevc, pcm_sample_bit_depth_luma);
1860 uint16_t pcm_sample_chroma = get_lmem_params(
1861 hevc, pcm_sample_bit_depth_chroma);
1862 if (pcm_depth_luma >
1863 hevc->bit_depth_luma ||
1864 pcm_sample_chroma >
1865 hevc->bit_depth_chroma) {
1866 hevc_print(hevc, 0,
1867 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1868 pcm_depth_luma,
1869 pcm_sample_chroma,
1870 hevc->bit_depth_luma,
1871 hevc->bit_depth_chroma);
1872 hevc->head_error_flag = 1;
1873 }
1874 }
1875}
1876#endif
1877
1878#ifdef SUPPORT_10BIT
1879/* Losless compression body buffer size 4K per 64x32 (jt) */
1880static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1881 int width, int height, int mem_saving_mode)
1882{
1883 int width_x64;
1884 int height_x32;
1885 int bsize;
1886
1887 width_x64 = width + 63;
1888 width_x64 >>= 6;
1889
1890 height_x32 = height + 31;
1891 height_x32 >>= 5;
1892 if (mem_saving_mode == 1 && hevc->mmu_enable)
1893 bsize = 3200 * width_x64 * height_x32;
1894 else if (mem_saving_mode == 1)
1895 bsize = 3072 * width_x64 * height_x32;
1896 else
1897 bsize = 4096 * width_x64 * height_x32;
1898
1899 return bsize;
1900}
1901
1902/* Losless compression header buffer size 32bytes per 128x64 (jt) */
1903static int compute_losless_comp_header_size(int width, int height)
1904{
1905 int width_x128;
1906 int height_x64;
1907 int hsize;
1908
1909 width_x128 = width + 127;
1910 width_x128 >>= 7;
1911
1912 height_x64 = height + 63;
1913 height_x64 >>= 6;
1914
1915 hsize = 32*width_x128*height_x64;
1916
1917 return hsize;
1918}
1919#endif
1920
1921static int add_log(struct hevc_state_s *hevc,
1922 const char *fmt, ...)
1923{
1924#define HEVC_LOG_BUF 196
1925 struct debug_log_s *log_item;
1926 unsigned char buf[HEVC_LOG_BUF];
1927 int len = 0;
1928 va_list args;
1929 mutex_lock(&vh265_log_mutex);
1930 va_start(args, fmt);
1931 len = sprintf(buf, "<%ld> <%05d> ",
1932 jiffies, hevc->decode_idx);
1933 len += vsnprintf(buf + len,
1934 HEVC_LOG_BUF - len, fmt, args);
1935 va_end(args);
1936 log_item = kmalloc(
1937 sizeof(struct debug_log_s) + len,
1938 GFP_KERNEL);
1939 if (log_item) {
1940 INIT_LIST_HEAD(&log_item->list);
1941 strcpy(&log_item->data, buf);
1942 list_add_tail(&log_item->list,
1943 &hevc->log_list);
1944 }
1945 mutex_unlock(&vh265_log_mutex);
1946 return 0;
1947}
1948
1949static void dump_log(struct hevc_state_s *hevc)
1950{
1951 int i = 0;
1952 struct debug_log_s *log_item, *tmp;
1953 mutex_lock(&vh265_log_mutex);
1954 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1955 hevc_print(hevc, 0,
1956 "[LOG%04d]%s\n",
1957 i++,
1958 &log_item->data);
1959 list_del(&log_item->list);
1960 kfree(log_item);
1961 }
1962 mutex_unlock(&vh265_log_mutex);
1963}
1964
1965static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1966 struct PIC_s *pic)
1967{
1968 if (pic->error_mark
1969 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1970 return 1;
1971 return 0;
1972}
1973
1974static int get_pic_poc(struct hevc_state_s *hevc,
1975 unsigned int idx)
1976{
1977 if (idx != 0xff
1978 && idx < MAX_REF_PIC_NUM
1979 && hevc->m_PIC[idx])
1980 return hevc->m_PIC[idx]->POC;
1981 return INVALID_POC;
1982}
1983
1984#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1985static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1986{
1987 return (hevc->m_ins_flag &&
1988 ((double_write_mode & 0x80000000) == 0)) ?
1989 hevc->double_write_mode :
1990 (double_write_mode & 0x7fffffff);
1991}
1992
1993static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1994{
1995 return (hevc->m_ins_flag &&
1996 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1997 hevc->dynamic_buf_num_margin :
1998 (dynamic_buf_num_margin & 0x7fffffff);
1999}
2000#endif
2001
2002static int get_double_write_mode(struct hevc_state_s *hevc)
2003{
2004 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
2005 int w = hevc->pic_w;
2006 int h = hevc->pic_h;
2007 u32 dw = 0x1; /*1:1*/
2008 switch (valid_dw_mode) {
2009 case 0x100:
2010 if (w > 1920 && h > 1088)
2011 dw = 0x4; /*1:2*/
2012 break;
2013 case 0x200:
2014 if (w > 1920 && h > 1088)
2015 dw = 0x2; /*1:4*/
2016 break;
2017 case 0x300:
2018 if (w > 1280 && h > 720)
2019 dw = 0x4; /*1:2*/
2020 break;
2021 default:
2022 dw = valid_dw_mode;
2023 break;
2024 }
2025 return dw;
2026}
2027
2028static int v4l_parser_get_double_write_mode(struct hevc_state_s *hevc, int w, int h)
2029{
2030 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
2031 u32 dw = 0x1; /*1:1*/
2032 switch (valid_dw_mode) {
2033 case 0x100:
2034 if (w > 1920 && h > 1088)
2035 dw = 0x4; /*1:2*/
2036 break;
2037 case 0x200:
2038 if (w > 1920 && h > 1088)
2039 dw = 0x2; /*1:4*/
2040 break;
2041 case 0x300:
2042 if (w > 1280 && h > 720)
2043 dw = 0x4; /*1:2*/
2044 break;
2045 default:
2046 dw = valid_dw_mode;
2047 break;
2048 }
2049 return dw;
2050}
2051
2052
2053static int get_double_write_ratio(struct hevc_state_s *hevc,
2054 int dw_mode)
2055{
2056 int ratio = 1;
2057 if ((dw_mode == 2) ||
2058 (dw_mode == 3))
2059 ratio = 4;
2060 else if (dw_mode == 4)
2061 ratio = 2;
2062 return ratio;
2063}
2064#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2065static unsigned char get_idx(struct hevc_state_s *hevc)
2066{
2067 return hevc->index;
2068}
2069#endif
2070
2071#undef pr_info
2072#define pr_info printk
2073static int hevc_print(struct hevc_state_s *hevc,
2074 int flag, const char *fmt, ...)
2075{
2076#define HEVC_PRINT_BUF 256
2077 unsigned char buf[HEVC_PRINT_BUF];
2078 int len = 0;
2079#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2080 if (hevc == NULL ||
2081 (flag == 0) ||
2082 ((debug_mask &
2083 (1 << hevc->index))
2084 && (debug & flag))) {
2085#endif
2086 va_list args;
2087
2088 va_start(args, fmt);
2089 if (hevc)
2090 len = sprintf(buf, "[%d]", hevc->index);
2091 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2092 pr_debug("%s", buf);
2093 va_end(args);
2094#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2095 }
2096#endif
2097 return 0;
2098}
2099
2100static int hevc_print_cont(struct hevc_state_s *hevc,
2101 int flag, const char *fmt, ...)
2102{
2103 unsigned char buf[HEVC_PRINT_BUF];
2104 int len = 0;
2105#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2106 if (hevc == NULL ||
2107 (flag == 0) ||
2108 ((debug_mask &
2109 (1 << hevc->index))
2110 && (debug & flag))) {
2111#endif
2112 va_list args;
2113
2114 va_start(args, fmt);
2115 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2116 pr_info("%s", buf);
2117 va_end(args);
2118#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2119 }
2120#endif
2121 return 0;
2122}
2123
2124static void put_mv_buf(struct hevc_state_s *hevc,
2125 struct PIC_s *pic);
2126
2127static void update_vf_memhandle(struct hevc_state_s *hevc,
2128 struct vframe_s *vf, struct PIC_s *pic);
2129
2130static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2131
2132static void release_aux_data(struct hevc_state_s *hevc,
2133 struct PIC_s *pic);
2134static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2135
2136#ifdef MULTI_INSTANCE_SUPPORT
2137static void backup_decode_state(struct hevc_state_s *hevc)
2138{
2139 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2140 hevc->curr_POC_bak = hevc->curr_POC;
2141 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2142 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2143 hevc->start_parser_type_bak = hevc->start_parser_type;
2144 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2145 hevc->rps_set_id_bak = hevc->rps_set_id;
2146 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2147 hevc->decode_idx_bak = hevc->decode_idx;
2148
2149}
2150
2151static void restore_decode_state(struct hevc_state_s *hevc)
2152{
2153 struct vdec_s *vdec = hw_to_vdec(hevc);
2154 if (!vdec_has_more_input(vdec)) {
2155 hevc->pic_decoded_lcu_idx =
2156 READ_VREG(HEVC_PARSER_LCU_START)
2157 & 0xffffff;
2158 return;
2159 }
2160 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2161 "%s: discard pic index 0x%x\n",
2162 __func__, hevc->decoding_pic ?
2163 hevc->decoding_pic->index : 0xff);
2164 if (hevc->decoding_pic) {
2165 hevc->decoding_pic->error_mark = 0;
2166 hevc->decoding_pic->output_ready = 0;
2167 hevc->decoding_pic->output_mark = 0;
2168 hevc->decoding_pic->referenced = 0;
2169 hevc->decoding_pic->POC = INVALID_POC;
2170 put_mv_buf(hevc, hevc->decoding_pic);
2171 release_aux_data(hevc, hevc->decoding_pic);
2172 hevc->decoding_pic = NULL;
2173 }
2174 hevc->decode_idx = hevc->decode_idx_bak;
2175 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2176 hevc->curr_POC = hevc->curr_POC_bak;
2177 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2178 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2179 hevc->start_parser_type = hevc->start_parser_type_bak;
2180 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2181 hevc->rps_set_id = hevc->rps_set_id_bak;
2182 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2183
2184 if (hevc->pic_list_init_flag == 1)
2185 hevc->pic_list_init_flag = 0;
2186 /*if (hevc->decode_idx == 0)
2187 hevc->start_decoding_flag = 0;*/
2188
2189 hevc->slice_idx = 0;
2190 hevc->used_4k_num = -1;
2191}
2192#endif
2193
2194static void hevc_init_stru(struct hevc_state_s *hevc,
2195 struct BuffInfo_s *buf_spec_i)
2196{
2197 int i;
2198 INIT_LIST_HEAD(&hevc->log_list);
2199 hevc->work_space_buf = buf_spec_i;
2200 hevc->prefix_aux_size = 0;
2201 hevc->suffix_aux_size = 0;
2202 hevc->aux_addr = NULL;
2203 hevc->rpm_addr = NULL;
2204 hevc->lmem_addr = NULL;
2205
2206 hevc->curr_POC = INVALID_POC;
2207
2208 hevc->pic_list_init_flag = 0;
2209 hevc->use_cma_flag = 0;
2210 hevc->decode_idx = 0;
2211 hevc->slice_idx = 0;
2212 hevc->new_pic = 0;
2213 hevc->new_tile = 0;
2214 hevc->iPrevPOC = 0;
2215 hevc->list_no = 0;
2216 /* int m_uiMaxCUWidth = 1<<7; */
2217 /* int m_uiMaxCUHeight = 1<<7; */
2218 hevc->m_pocRandomAccess = MAX_INT;
2219 hevc->tile_enabled = 0;
2220 hevc->tile_x = 0;
2221 hevc->tile_y = 0;
2222 hevc->iPrevTid0POC = 0;
2223 hevc->slice_addr = 0;
2224 hevc->slice_segment_addr = 0;
2225 hevc->skip_flag = 0;
2226 hevc->misc_flag0 = 0;
2227
2228 hevc->cur_pic = NULL;
2229 hevc->col_pic = NULL;
2230 hevc->wait_buf = 0;
2231 hevc->error_flag = 0;
2232 hevc->head_error_flag = 0;
2233 hevc->error_skip_nal_count = 0;
2234 hevc->have_vps = 0;
2235 hevc->have_sps = 0;
2236 hevc->have_pps = 0;
2237 hevc->have_valid_start_slice = 0;
2238
2239 hevc->pts_mode = PTS_NORMAL;
2240 hevc->last_pts = 0;
2241 hevc->last_lookup_pts = 0;
2242 hevc->last_pts_us64 = 0;
2243 hevc->last_lookup_pts_us64 = 0;
2244 hevc->pts_mode_switching_count = 0;
2245 hevc->pts_mode_recovery_count = 0;
2246
2247 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2248 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2249 if (hevc->PB_skip_mode == 0)
2250 hevc->ignore_bufmgr_error = 0x1;
2251 else
2252 hevc->ignore_bufmgr_error = 0x0;
2253
2254 if (hevc->is_used_v4l) {
2255 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2256 if (hevc->m_PIC[i] != NULL) {
2257 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2258 hevc->m_PIC[i]->index = i;
2259 }
2260 }
2261 }
2262
2263 hevc->pic_num = 0;
2264 hevc->lcu_x_num_pre = 0;
2265 hevc->lcu_y_num_pre = 0;
2266 hevc->first_pic_after_recover = 0;
2267
2268 hevc->pre_top_pic = NULL;
2269 hevc->pre_bot_pic = NULL;
2270
2271 hevc->sei_present_flag = 0;
2272 hevc->valve_count = 0;
2273 hevc->first_pic_flag = 0;
2274#ifdef MULTI_INSTANCE_SUPPORT
2275 hevc->decoded_poc = INVALID_POC;
2276 hevc->start_process_time = 0;
2277 hevc->last_lcu_idx = 0;
2278 hevc->decode_timeout_count = 0;
2279 hevc->timeout_num = 0;
2280 hevc->eos = 0;
2281 hevc->pic_decoded_lcu_idx = -1;
2282 hevc->over_decode = 0;
2283 hevc->used_4k_num = -1;
2284 hevc->start_decoding_flag = 0;
2285 hevc->rps_set_id = 0;
2286 backup_decode_state(hevc);
2287#endif
2288#ifdef DETREFILL_ENABLE
2289 hevc->detbuf_adr = 0;
2290 hevc->detbuf_adr_virt = NULL;
2291#endif
2292}
2293
2294static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2295static int H265_alloc_mmu(struct hevc_state_s *hevc,
2296 struct PIC_s *new_pic, unsigned short bit_depth,
2297 unsigned int *mmu_index_adr);
2298
2299#ifdef DETREFILL_ENABLE
2300#define DETREFILL_BUF_SIZE (4 * 0x4000)
2301#define HEVC_SAO_DBG_MODE0 0x361e
2302#define HEVC_SAO_DBG_MODE1 0x361f
2303#define HEVC_SAO_CTRL10 0x362e
2304#define HEVC_SAO_CTRL11 0x362f
2305static int init_detrefill_buf(struct hevc_state_s *hevc)
2306{
2307 if (hevc->detbuf_adr_virt)
2308 return 0;
2309
2310 hevc->detbuf_adr_virt =
2311 (void *)dma_alloc_coherent(amports_get_dma_device(),
2312 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2313 GFP_KERNEL);
2314
2315 if (hevc->detbuf_adr_virt == NULL) {
2316 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2317 return -1;
2318 }
2319 return 0;
2320}
2321
2322static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2323{
2324 if (hevc->detbuf_adr_virt) {
2325 dma_free_coherent(amports_get_dma_device(),
2326 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2327 hevc->detbuf_adr);
2328
2329 hevc->detbuf_adr_virt = NULL;
2330 hevc->detbuf_adr = 0;
2331 }
2332}
2333
2334/*
2335 * convert uncompressed frame buffer data from/to ddr
2336 */
2337static void convUnc8x4blk(uint16_t* blk8x4Luma,
2338 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2339{
2340 if (direction == 0) {
2341 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2342 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2343 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2344 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2345 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2346 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2347 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2348 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2349 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2350 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2351 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2352 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2353 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2354 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2355 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2356 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2357 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2358 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2359
2360 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2361 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2362 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2363 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2364 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2365 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2366 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2367 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2368 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2369 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2370 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2371 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2372 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2373 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2374 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2375 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2376 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2377 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2378
2379 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2380 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2381 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2382 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2383 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2384 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2385 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2386 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2387 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2388 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2389 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2390 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2391 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2392 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2393 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2394 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2395 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2396 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2397
2398 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2399 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2400 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2401 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2402 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2403 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2404 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2405 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2406 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2407 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2408 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2409 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2410 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2411 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2412 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2413 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2414 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2415 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2416 } else {
2417 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2418 blk8x4Luma[3 + 0 * 8];
2419 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2420 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2421 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2422 (blk8x4Luma[3 + 3 * 8] >> 2);
2423 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2424 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2425 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2426 (blk8x4Luma[7 + 2 * 8] >>4);
2427 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2428 blk8x4Cb[0 + 0 * 4];
2429 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2430 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2431 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2432
2433 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2434 blk8x4Luma[0 + 0 * 8];
2435 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2436 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2437 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2438 (blk8x4Luma[0 + 1 * 8] >> 2);
2439 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2440 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2441 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2442 (blk8x4Luma[0 + 2 * 8] >>4);
2443 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2444 blk8x4Luma[2 + 2 * 8];
2445 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2446 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2447 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2448
2449 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2450 blk8x4Luma[4 + 0 * 8];
2451 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2452 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2453 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2454 (blk8x4Luma[4 + 1 * 8] >> 2);
2455 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2456 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2457 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2458 (blk8x4Luma[4 + 2 * 8] >>4);
2459 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2460 blk8x4Luma[6 + 2 * 8];
2461 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2462 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2463 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2464
2465 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2466 blk8x4Cb[1 + 0 * 4];
2467 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2468 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2469 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2470 (blk8x4Cr[2 + 0 * 4] >> 2);
2471 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2472 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2473 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2474 (blk8x4Cb[1 + 1 * 4] >>4);
2475 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2476 blk8x4Cb[2 + 1 * 4];
2477 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2478 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2479 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2480 }
2481}
2482
2483static void corrRefillWithAmrisc (
2484 struct hevc_state_s *hevc,
2485 uint32_t cmHeaderBaseAddr,
2486 uint32_t picWidth,
2487 uint32_t ctuPosition)
2488{
2489 int32_t i;
2490 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2491 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2492 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2493
2494 uint16_t cmBodyBuf[32 * 18];
2495
2496 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2497 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2498 uint32_t stride64x64 = pic_width_x64 * 128;
2499 uint32_t addr_offset64x64_abv = stride64x64 *
2500 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2501 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2502 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2503 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2504 unsigned int tmpData32;
2505
2506 uint16_t blkBuf0Y[32];
2507 uint16_t blkBuf0Cb[8];
2508 uint16_t blkBuf0Cr[8];
2509 uint16_t blkBuf1Y[32];
2510 uint16_t blkBuf1Cb[8];
2511 uint16_t blkBuf1Cr[8];
2512 int32_t blkBufCnt = 0;
2513
2514 int32_t blkIdx;
2515
2516 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2517 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2518 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2519 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2520
2521 for (i = 0; i < 32 * 18; i++)
2522 cmBodyBuf[i] = 0;
2523
2524 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2525 "%s, %d\n", __func__, __LINE__);
2526 do {
2527 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2528 } while (tmpData32);
2529 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2530 "%s, %d\n", __func__, __LINE__);
2531
2532 hevc_print(hevc, H265_DEBUG_DETAIL,
2533 "cmBodyBuf from detbuf:\n");
2534 for (i = 0; i < 32 * 18; i++) {
2535 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2536 if (get_dbg_flag(hevc) &
2537 H265_DEBUG_DETAIL) {
2538 if ((i & 0xf) == 0)
2539 hevc_print_cont(hevc, 0, "\n");
2540 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2541 }
2542 }
2543 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2544
2545 for (i = 0; i < 32; i++)
2546 blkBuf0Y[i] = 0;
2547 for (i = 0; i < 8; i++)
2548 blkBuf0Cb[i] = 0;
2549 for (i = 0; i < 8; i++)
2550 blkBuf0Cr[i] = 0;
2551 for (i = 0; i < 32; i++)
2552 blkBuf1Y[i] = 0;
2553 for (i = 0; i < 8; i++)
2554 blkBuf1Cb[i] = 0;
2555 for (i = 0; i < 8; i++)
2556 blkBuf1Cr[i] = 0;
2557
2558 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2559 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2560 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2561 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2562 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2563 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2564 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2565
2566 if (!aboveCtuAvailable && inAboveCtu)
2567 continue;
2568
2569 /* detRefillBuf --> 8x4block*/
2570 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2571
2572 if (restoreEnable) {
2573 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2574 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2575 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2576 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2577 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2578 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2579 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2580 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2581 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2582 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2583 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2584 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2585 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2586 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2587 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2588 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2589 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2590 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2591 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2592 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2593 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2594 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2595 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2596 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2597 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2598 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2599 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2600 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2601 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2602 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2603 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2604 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2605
2606 /*Store data back to DDR*/
2607 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2608 }
2609
2610 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2611 }
2612
2613 hevc_print(hevc, H265_DEBUG_DETAIL,
2614 "cmBodyBuf to detbuf:\n");
2615 for (i = 0; i < 32 * 18; i++) {
2616 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2617 if (get_dbg_flag(hevc) &
2618 H265_DEBUG_DETAIL) {
2619 if ((i & 0xf) == 0)
2620 hevc_print_cont(hevc, 0, "\n");
2621 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2622 }
2623 }
2624 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2625
2626 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2627 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2628 "%s, %d\n", __func__, __LINE__);
2629 do {
2630 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2631 } while (tmpData32);
2632 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2633 "%s, %d\n", __func__, __LINE__);
2634}
2635
2636static void delrefill(struct hevc_state_s *hevc)
2637{
2638 /*
2639 * corrRefill
2640 */
2641 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2642 [31:30]error number
2643 [29:20]error2([9:7]tilex[6:0]ctuy)
2644 [19:10]error1 [9:0]error0*/
2645 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2646 uint32_t errorIdx;
2647 uint32_t errorNum = (detResult>>30);
2648
2649 if (detResult) {
2650 hevc_print(hevc, H265_DEBUG_BUFMGR,
2651 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2652 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2653 uint32_t errorPos = errorIdx * 10;
2654 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2655 uint32_t tilex = (errorResult >> 7) - 1;
2656 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2657 + hevc->m_tile[0][tilex].width - 1;
2658 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2659 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2660 hevc_print(hevc, H265_DEBUG_BUFMGR,
2661 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2662 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2663 corrRefillWithAmrisc(
2664 hevc,
2665 (uint32_t)hevc->cur_pic->header_adr,
2666 hevc->pic_w,
2667 ctuPosition);
2668 }
2669
2670 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2671 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2672 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2673 }
2674}
2675#endif
2676
2677static void get_rpm_param(union param_u *params)
2678{
2679 int i;
2680 unsigned int data32;
2681
2682 for (i = 0; i < 128; i++) {
2683 do {
2684 data32 = READ_VREG(RPM_CMD_REG);
2685 /* hevc_print(hevc, 0, "%x\n", data32); */
2686 } while ((data32 & 0x10000) == 0);
2687 params->l.data[i] = data32 & 0xffff;
2688 /* hevc_print(hevc, 0, "%x\n", data32); */
2689 WRITE_VREG(RPM_CMD_REG, 0);
2690 }
2691}
2692
2693static int get_free_buf_idx(struct hevc_state_s *hevc)
2694{
2695 int index = INVALID_IDX;
2696 struct PIC_s *pic;
2697 int i;
2698
2699 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2700 pic = hevc->m_PIC[i];
2701 if (pic == NULL ||
2702 pic->index == -1 ||
2703 pic->BUF_index == -1)
2704 continue;
2705
2706 if (pic->output_mark == 0 &&
2707 pic->referenced == 0 &&
2708 pic->output_ready == 0 &&
2709 pic->cma_alloc_addr) {
2710 pic->output_ready = 1;
2711 index = i;
2712 break;
2713 }
2714 }
2715
2716 return index;
2717}
2718
2719static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2720{
2721 int i;
2722 struct PIC_s *pic;
2723 struct PIC_s *ret_pic = NULL;
2724 if (POC == INVALID_POC)
2725 return NULL;
2726 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2727 pic = hevc->m_PIC[i];
2728 if (pic == NULL || pic->index == -1 ||
2729 pic->BUF_index == -1)
2730 continue;
2731 if (pic->POC == POC) {
2732 if (ret_pic == NULL)
2733 ret_pic = pic;
2734 else {
2735 if (pic->decode_idx > ret_pic->decode_idx)
2736 ret_pic = pic;
2737 }
2738 }
2739 }
2740 return ret_pic;
2741}
2742
2743static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2744{
2745 int i;
2746 struct PIC_s *pic;
2747 struct PIC_s *ret_pic = NULL;
2748
2749 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2750 pic = hevc->m_PIC[i];
2751 if (pic == NULL || pic->index == -1 ||
2752 pic->BUF_index == -1)
2753 continue;
2754 /*Add width and height of ref picture detection,
2755 resolved incorrectly referenced frame.*/
2756 if ((pic->POC == POC) && (pic->referenced) &&
2757 (hevc->pic_w == pic->width) &&
2758 (hevc->pic_h == pic->height)) {
2759 if (ret_pic == NULL)
2760 ret_pic = pic;
2761 else {
2762 if (pic->decode_idx > ret_pic->decode_idx)
2763 ret_pic = pic;
2764 }
2765 }
2766 }
2767
2768 return ret_pic;
2769}
2770
2771static unsigned int log2i(unsigned int val)
2772{
2773 unsigned int ret = -1;
2774
2775 while (val != 0) {
2776 val >>= 1;
2777 ret++;
2778 }
2779 return ret;
2780}
2781
2782static int init_buf_spec(struct hevc_state_s *hevc);
2783
2784static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2785{
2786 int i;
2787
2788 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2789 struct PIC_s *pic = hevc->m_PIC[i];
2790
2791 if (pic && pic->vframe_bound)
2792 return true;
2793 }
2794
2795 return false;
2796}
2797
2798static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2799{
2800 int i;
2801
2802 /* release workspace */
2803 if (hevc->bmmu_box)
2804 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2805 BMMU_WORKSPACE_ID);
2806 /*
2807 * it's only when vframe get back to driver, right now we can be sure
2808 * that vframe and fd are related. if the playback exits, the capture
2809 * requires the upper app to release when the fd is closed, and others
2810 * buffers drivers are released by driver.
2811 */
2812 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2813 struct PIC_s *pic = hevc->m_PIC[i];
2814
2815 if (pic && !pic->vframe_bound) {
2816 if (hevc->bmmu_box)
2817 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2818 VF_BUFFER_IDX(i));
2819 if (hevc->mmu_box)
2820 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2821
2822 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2823 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2824 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2825 }
2826 }
2827}
2828
2829static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2830{
2831 if (hevc->is_used_v4l &&
2832 v4l_is_there_vframe_bound(hevc)) {
2833 if (get_double_write_mode(hevc) != 0x10) {
2834 v4l_mmu_buffer_release(hevc);
2835 return;
2836 }
2837 }
2838
2839 if (hevc->mmu_box)
2840 decoder_mmu_box_free(hevc->mmu_box);
2841 hevc->mmu_box = NULL;
2842
2843 if (hevc->bmmu_box)
2844 decoder_bmmu_box_free(hevc->bmmu_box);
2845 hevc->bmmu_box = NULL;
2846}
2847
2848static int init_mmu_box(struct hevc_state_s *hevc)
2849{
2850 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2851 CODEC_MM_FLAGS_TVP : 0;
2852 int buf_size = 64;
2853
2854 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2855 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2856 buf_size = 24;
2857 }
2858
2859 if (get_dbg_flag(hevc)) {
2860 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2861 __func__, hevc->max_pic_w, hevc->max_pic_h);
2862 }
2863
2864 hevc->need_cache_size = buf_size * SZ_1M;
2865 hevc->sc_start_time = get_jiffies_64();
2866 if (hevc->mmu_enable
2867 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2868 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2869 hevc->index,
2870 MAX_REF_PIC_NUM,
2871 buf_size * SZ_1M,
2872 tvp_flag
2873 );
2874 if (!hevc->mmu_box) {
2875 hevc_print(hevc, 0, "h265 alloc mmu box failed!!\n");
2876 return -1;
2877 }
2878 }
2879
2880 return 0;
2881}
2882
2883static int init_mmu_buffers(struct hevc_state_s *hevc)
2884{
2885 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2886 CODEC_MM_FLAGS_TVP : 0;
2887 int buf_size = 64;
2888
2889 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2890 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2891 buf_size = 24;
2892 }
2893
2894 if (get_dbg_flag(hevc)) {
2895 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2896 __func__, hevc->max_pic_w, hevc->max_pic_h);
2897 }
2898
2899 hevc->need_cache_size = buf_size * SZ_1M;
2900 hevc->sc_start_time = get_jiffies_64();
2901 if (hevc->mmu_enable
2902 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2903 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2904 hevc->index,
2905 MAX_REF_PIC_NUM,
2906 buf_size * SZ_1M,
2907 tvp_flag
2908 );
2909 if (!hevc->mmu_box) {
2910 pr_err("h265 alloc mmu box failed!!\n");
2911 return -1;
2912 }
2913 }
2914
2915 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2916 hevc->index,
2917 BMMU_MAX_BUFFERS,
2918 4 + PAGE_SHIFT,
2919 CODEC_MM_FLAGS_CMA_CLEAR |
2920 CODEC_MM_FLAGS_FOR_VDECODER |
2921 tvp_flag);
2922 if (!hevc->bmmu_box) {
2923 if (hevc->mmu_box)
2924 decoder_mmu_box_free(hevc->mmu_box);
2925 hevc->mmu_box = NULL;
2926 pr_err("h265 alloc mmu box failed!!\n");
2927 return -1;
2928 }
2929 return 0;
2930}
2931
2932struct buf_stru_s
2933{
2934 int lcu_total;
2935 int mc_buffer_size_h;
2936 int mc_buffer_size_u_v_h;
2937};
2938
2939#ifndef MV_USE_FIXED_BUF
2940static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2941{
2942 int i;
2943 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2944 if (hevc->m_mv_BUF[i].start_adr) {
2945 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2946 hevc_print(hevc, 0,
2947 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2948 i, hevc->m_mv_BUF[i].start_adr,
2949 hevc->m_mv_BUF[i].size,
2950 hevc->m_mv_BUF[i].used_flag);
2951 decoder_bmmu_box_free_idx(
2952 hevc->bmmu_box,
2953 MV_BUFFER_IDX(i));
2954 hevc->m_mv_BUF[i].start_adr = 0;
2955 hevc->m_mv_BUF[i].size = 0;
2956 hevc->m_mv_BUF[i].used_flag = 0;
2957 }
2958 }
2959 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2960 if (hevc->m_PIC[i] != NULL)
2961 hevc->m_PIC[i]->mv_buf_index = -1;
2962 }
2963}
2964
2965static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2966{
2967 int ret = 0;
2968 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2969 if (decoder_bmmu_box_alloc_buf_phy
2970 (hevc->bmmu_box,
2971 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2972 DRIVER_NAME,
2973 &hevc->m_mv_BUF[i].start_adr) < 0) {
2974 hevc->m_mv_BUF[i].start_adr = 0;
2975 ret = -1;
2976 } else {
2977 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2978 hevc->m_mv_BUF[i].used_flag = 0;
2979 ret = 0;
2980 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2981 hevc_print(hevc, 0,
2982 "MV Buffer %d: start_adr %p size %x\n",
2983 i,
2984 (void *)hevc->m_mv_BUF[i].start_adr,
2985 hevc->m_mv_BUF[i].size);
2986 }
2987 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2988 void *mem_start_virt;
2989 mem_start_virt =
2990 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2991 if (mem_start_virt) {
2992 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2993 codec_mm_dma_flush(mem_start_virt,
2994 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2995 } else {
2996 mem_start_virt = codec_mm_vmap(
2997 hevc->m_mv_BUF[i].start_adr,
2998 hevc->m_mv_BUF[i].size);
2999 if (mem_start_virt) {
3000 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
3001 codec_mm_dma_flush(mem_start_virt,
3002 hevc->m_mv_BUF[i].size,
3003 DMA_TO_DEVICE);
3004 codec_mm_unmap_phyaddr(mem_start_virt);
3005 } else {
3006 /*not virt for tvp playing,
3007 may need clear on ucode.*/
3008 pr_err("ref %s mem_start_virt failed\n", __func__);
3009 }
3010 }
3011 }
3012 }
3013 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3014 return ret;
3015}
3016#endif
3017
3018static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3019{
3020#ifdef MV_USE_FIXED_BUF
3021 if (pic && pic->index >= 0) {
3022 if (IS_8K_SIZE(pic->width, pic->height)) {
3023 pic->mpred_mv_wr_start_addr =
3024 hevc->work_space_buf->mpred_mv.buf_start
3025 + (pic->index * MPRED_8K_MV_BUF_SIZE);
3026 } else {
3027 pic->mpred_mv_wr_start_addr =
3028 hevc->work_space_buf->mpred_mv.buf_start
3029 + (pic->index * MPRED_4K_MV_BUF_SIZE);
3030 }
3031 }
3032 return 0;
3033#else
3034 int i;
3035 int ret = -1;
3036 int new_size;
3037 if (IS_8K_SIZE(pic->width, pic->height))
3038 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
3039 else if (IS_4K_SIZE(pic->width, pic->height))
3040 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
3041 else
3042 new_size = MPRED_MV_BUF_SIZE + 0x10000;
3043 if (new_size != hevc->mv_buf_size) {
3044 dealloc_mv_bufs(hevc);
3045 hevc->mv_buf_size = new_size;
3046 }
3047 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3048 if (hevc->m_mv_BUF[i].start_adr &&
3049 hevc->m_mv_BUF[i].used_flag == 0) {
3050 hevc->m_mv_BUF[i].used_flag = 1;
3051 ret = i;
3052 break;
3053 }
3054 }
3055 if (ret < 0) {
3056 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3057 if (hevc->m_mv_BUF[i].start_adr == 0) {
3058 if (alloc_mv_buf(hevc, i) >= 0) {
3059 hevc->m_mv_BUF[i].used_flag = 1;
3060 ret = i;
3061 }
3062 break;
3063 }
3064 }
3065 }
3066
3067 if (ret >= 0) {
3068 pic->mv_buf_index = ret;
3069 pic->mpred_mv_wr_start_addr =
3070 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
3071 (~0xffff);
3072 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3073 "%s => %d (0x%x) size 0x%x\n",
3074 __func__, ret,
3075 pic->mpred_mv_wr_start_addr,
3076 hevc->m_mv_BUF[ret].size);
3077
3078 } else {
3079 hevc_print(hevc, 0,
3080 "%s: Error, mv buf is not enough\n",
3081 __func__);
3082 }
3083 return ret;
3084
3085#endif
3086}
3087
3088static void put_mv_buf(struct hevc_state_s *hevc,
3089 struct PIC_s *pic)
3090{
3091#ifndef MV_USE_FIXED_BUF
3092 int i = pic->mv_buf_index;
3093 if (i < 0 || i >= MAX_REF_PIC_NUM) {
3094 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3095 "%s: index %d beyond range\n",
3096 __func__, i);
3097 return;
3098 }
3099 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3100 "%s(%d): used_flag(%d)\n",
3101 __func__, i,
3102 hevc->m_mv_BUF[i].used_flag);
3103
3104 if (hevc->m_mv_BUF[i].start_adr &&
3105 hevc->m_mv_BUF[i].used_flag)
3106 hevc->m_mv_BUF[i].used_flag = 0;
3107 pic->mv_buf_index = -1;
3108#endif
3109}
3110
3111static int cal_current_buf_size(struct hevc_state_s *hevc,
3112 struct buf_stru_s *buf_stru)
3113{
3114
3115 int buf_size;
3116 int pic_width = hevc->pic_w;
3117 int pic_height = hevc->pic_h;
3118 int lcu_size = hevc->lcu_size;
3119 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
3120 1 : pic_width / lcu_size;
3121 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
3122 1 : pic_height / lcu_size;
3123 /*SUPPORT_10BIT*/
3124 int losless_comp_header_size = compute_losless_comp_header_size
3125 (pic_width, pic_height);
3126 /*always alloc buf for 10bit*/
3127 int losless_comp_body_size = compute_losless_comp_body_size
3128 (hevc, pic_width, pic_height, 0);
3129 int mc_buffer_size = losless_comp_header_size
3130 + losless_comp_body_size;
3131 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
3132 int mc_buffer_size_u_v_h = 0;
3133
3134 int dw_mode = get_double_write_mode(hevc);
3135
3136 if (hevc->mmu_enable) {
3137 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3138 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3139 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3140 << 16;
3141 else
3142 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3143 << 16;
3144 } else
3145 buf_size = 0;
3146
3147 if (dw_mode) {
3148 int pic_width_dw = pic_width /
3149 get_double_write_ratio(hevc, dw_mode);
3150 int pic_height_dw = pic_height /
3151 get_double_write_ratio(hevc, dw_mode);
3152
3153 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3154 pic_width_dw / lcu_size + 1 :
3155 pic_width_dw / lcu_size;
3156 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3157 pic_height_dw / lcu_size + 1 :
3158 pic_height_dw / lcu_size;
3159 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3160
3161 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3162 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3163 /*64k alignment*/
3164 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3165 }
3166
3167 if ((!hevc->mmu_enable) &&
3168 ((dw_mode & 0x10) == 0)) {
3169 /* use compress mode without mmu,
3170 need buf for compress decoding*/
3171 buf_size += (mc_buffer_size_h << 16);
3172 }
3173
3174 /*in case start adr is not 64k alignment*/
3175 if (buf_size > 0)
3176 buf_size += 0x10000;
3177
3178 if (buf_stru) {
3179 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3180 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3181 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3182 }
3183
3184 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3185 pic_width, pic_height, losless_comp_header_size,
3186 losless_comp_body_size, mc_buffer_size_h,
3187 mc_buffer_size_u_v_h, buf_size);
3188
3189 return buf_size;
3190}
3191
3192static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3193{
3194 int ret = -1;
3195 int i = pic->index;
3196 struct vdec_v4l2_buffer *fb = NULL;
3197
3198 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3199 return ret;
3200
3201 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3202 if (ret < 0) {
3203 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3204 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3205 return ret;
3206 }
3207
3208 if (hevc->mmu_enable) {
3209 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3210 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3211 hevc->m_BUF[i].header_size =
3212 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3213 else
3214 hevc->m_BUF[i].header_size =
3215 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3216
3217 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3218 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3219 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3220 if (ret < 0) {
3221 hevc_print(hevc, PRINT_FLAG_ERROR,
3222 "%s[%d], header size: %d, no mem fatal err\n",
3223 __func__, i, hevc->m_BUF[i].header_size);
3224 return ret;
3225 }
3226 }
3227
3228 hevc->m_BUF[i].used_flag = 0;
3229 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3230 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3231 if (fb->num_planes == 1) {
3232 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3233 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3234 hevc->m_BUF[i].size = fb->m.mem[0].size;
3235 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3236 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3237 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3238 } else if (fb->num_planes == 2) {
3239 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3240 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3241 hevc->m_BUF[i].chroma_addr = fb->m.mem[1].addr;
3242 hevc->m_BUF[i].chroma_size = fb->m.mem[1].size;
3243 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3244 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3245 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3246 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3247 pic->dw_u_v_adr = hevc->m_BUF[i].chroma_addr;
3248 }
3249
3250 return ret;
3251}
3252
3253static int alloc_buf(struct hevc_state_s *hevc)
3254{
3255 int i;
3256 int ret = -1;
3257 int buf_size = cal_current_buf_size(hevc, NULL);
3258
3259 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3260 return ret;
3261
3262 for (i = 0; i < BUF_POOL_SIZE; i++) {
3263 if (hevc->m_BUF[i].start_adr == 0)
3264 break;
3265 }
3266 if (i < BUF_POOL_SIZE) {
3267 if (buf_size > 0) {
3268 ret = decoder_bmmu_box_alloc_buf_phy
3269 (hevc->bmmu_box,
3270 VF_BUFFER_IDX(i), buf_size,
3271 DRIVER_NAME,
3272 &hevc->m_BUF[i].start_adr);
3273 if (ret < 0) {
3274 hevc->m_BUF[i].start_adr = 0;
3275 if (i <= 8) {
3276 hevc->fatal_error |=
3277 DECODER_FATAL_ERROR_NO_MEM;
3278 hevc_print(hevc, PRINT_FLAG_ERROR,
3279 "%s[%d], size: %d, no mem fatal err\n",
3280 __func__, i, buf_size);
3281 }
3282 }
3283
3284 if (ret >= 0) {
3285 hevc->m_BUF[i].size = buf_size;
3286 hevc->m_BUF[i].used_flag = 0;
3287 ret = 0;
3288
3289 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3290 hevc_print(hevc, 0,
3291 "Buffer %d: start_adr %p size %x\n",
3292 i,
3293 (void *)hevc->m_BUF[i].start_adr,
3294 hevc->m_BUF[i].size);
3295 }
3296 /*flush the buffer make sure no cache dirty*/
3297 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3298 void *mem_start_virt;
3299 mem_start_virt =
3300 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3301 if (mem_start_virt) {
3302 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3303 codec_mm_dma_flush(mem_start_virt,
3304 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3305 } else {
3306 mem_start_virt = codec_mm_vmap(
3307 hevc->m_BUF[i].start_adr,
3308 hevc->m_BUF[i].size);
3309 if (mem_start_virt) {
3310 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3311 codec_mm_dma_flush(mem_start_virt,
3312 hevc->m_BUF[i].size,
3313 DMA_TO_DEVICE);
3314 codec_mm_unmap_phyaddr(mem_start_virt);
3315 } else {
3316 /*not virt for tvp playing,
3317 may need clear on ucode.*/
3318 pr_err("ref %s mem_start_virt failed\n", __func__);
3319 }
3320 }
3321 }
3322 }
3323 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3324 } else
3325 ret = 0;
3326 }
3327
3328 if (ret >= 0) {
3329 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3330 hevc_print(hevc, 0,
3331 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3332 i, hevc->pic_w, hevc->pic_h,
3333 buf_size,
3334 hevc->m_BUF[i].start_adr);
3335 }
3336 } else {
3337 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3338 hevc_print(hevc, 0,
3339 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3340 i, hevc->pic_w, hevc->pic_h,
3341 buf_size);
3342 }
3343 }
3344 return ret;
3345}
3346
3347static void set_buf_unused(struct hevc_state_s *hevc, int i)
3348{
3349 if (i >= 0 && i < BUF_POOL_SIZE)
3350 hevc->m_BUF[i].used_flag = 0;
3351}
3352
3353static void dealloc_unused_buf(struct hevc_state_s *hevc)
3354{
3355 int i;
3356 for (i = 0; i < BUF_POOL_SIZE; i++) {
3357 if (hevc->m_BUF[i].start_adr &&
3358 hevc->m_BUF[i].used_flag == 0) {
3359 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3360 hevc_print(hevc, 0,
3361 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3362 i, hevc->m_BUF[i].start_adr,
3363 hevc->m_BUF[i].size);
3364 }
3365 if (!hevc->is_used_v4l)
3366 decoder_bmmu_box_free_idx(
3367 hevc->bmmu_box,
3368 VF_BUFFER_IDX(i));
3369 hevc->m_BUF[i].start_adr = 0;
3370 hevc->m_BUF[i].size = 0;
3371 }
3372 }
3373}
3374
3375static void dealloc_pic_buf(struct hevc_state_s *hevc,
3376 struct PIC_s *pic)
3377{
3378 int i = pic->BUF_index;
3379 pic->BUF_index = -1;
3380 if (i >= 0 &&
3381 i < BUF_POOL_SIZE &&
3382 hevc->m_BUF[i].start_adr) {
3383 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3384 hevc_print(hevc, 0,
3385 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3386 i, hevc->m_BUF[i].start_adr,
3387 hevc->m_BUF[i].size);
3388 }
3389
3390 if (!hevc->is_used_v4l)
3391 decoder_bmmu_box_free_idx(
3392 hevc->bmmu_box,
3393 VF_BUFFER_IDX(i));
3394 hevc->m_BUF[i].used_flag = 0;
3395 hevc->m_BUF[i].start_adr = 0;
3396 hevc->m_BUF[i].size = 0;
3397 }
3398}
3399
3400static int get_work_pic_num(struct hevc_state_s *hevc)
3401{
3402 int used_buf_num = 0;
3403 int sps_pic_buf_diff = 0;
3404
3405 if (get_dynamic_buf_num_margin(hevc) > 0) {
3406 if ((!hevc->sps_num_reorder_pics_0) &&
3407 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3408 /* the range of sps_num_reorder_pics_0 is in
3409 [0, sps_max_dec_pic_buffering_minus1_0] */
3410 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3411 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3412 } else
3413 used_buf_num = hevc->sps_num_reorder_pics_0
3414 + get_dynamic_buf_num_margin(hevc);
3415
3416 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3417 - hevc->sps_num_reorder_pics_0;
3418#ifdef MULTI_INSTANCE_SUPPORT
3419 /*
3420 need one more for multi instance, as
3421 apply_ref_pic_set() has no chanch to run to
3422 to clear referenced flag in some case
3423 */
3424 if (hevc->m_ins_flag)
3425 used_buf_num++;
3426#endif
3427 } else
3428 used_buf_num = max_buf_num;
3429
3430 if (hevc->save_buffer_mode)
3431 hevc_print(hevc, 0,
3432 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3433 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3434
3435 if (sps_pic_buf_diff >= 4)
3436 used_buf_num += sps_pic_buf_diff;
3437
3438 if (hevc->is_used_v4l) {
3439 /* for eos add more buffer to flush.*/
3440 used_buf_num++;
3441 }
3442
3443 if (used_buf_num > MAX_BUF_NUM)
3444 used_buf_num = MAX_BUF_NUM;
3445 return used_buf_num;
3446}
3447
3448static int v4l_parser_work_pic_num(struct hevc_state_s *hevc)
3449{
3450 int used_buf_num = 0;
3451 int sps_pic_buf_diff = 0;
3452 pr_debug("margin = %d, sps_max_dec_pic_buffering_minus1_0 = %d, sps_num_reorder_pics_0 = %d\n",
3453 get_dynamic_buf_num_margin(hevc),
3454 hevc->param.p.sps_max_dec_pic_buffering_minus1_0,
3455 hevc->param.p.sps_num_reorder_pics_0);
3456 if (get_dynamic_buf_num_margin(hevc) > 0) {
3457 if ((!hevc->param.p.sps_num_reorder_pics_0) &&
3458 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3459 /* the range of sps_num_reorder_pics_0 is in
3460 [0, sps_max_dec_pic_buffering_minus1_0] */
3461 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3462 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3463 } else
3464 used_buf_num = hevc->param.p.sps_num_reorder_pics_0
3465 + get_dynamic_buf_num_margin(hevc);
3466
3467 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3468 - hevc->param.p.sps_num_reorder_pics_0;
3469#ifdef MULTI_INSTANCE_SUPPORT
3470 /*
3471 need one more for multi instance, as
3472 apply_ref_pic_set() has no chanch to run to
3473 to clear referenced flag in some case
3474 */
3475 if (hevc->m_ins_flag)
3476 used_buf_num++;
3477#endif
3478 } else
3479 used_buf_num = max_buf_num;
3480
3481 if (hevc->save_buffer_mode)
3482 hevc_print(hevc, 0,
3483 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3484 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3485
3486 if (sps_pic_buf_diff >= 4)
3487 {
3488 used_buf_num += 1;
3489 }
3490
3491 /* for eos add more buffer to flush.*/
3492 used_buf_num++;
3493
3494 if (used_buf_num > MAX_BUF_NUM)
3495 used_buf_num = MAX_BUF_NUM;
3496 return used_buf_num;
3497}
3498
3499
3500static int get_alloc_pic_count(struct hevc_state_s *hevc)
3501{
3502 int alloc_pic_count = 0;
3503 int i;
3504 struct PIC_s *pic;
3505 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3506 pic = hevc->m_PIC[i];
3507 if (pic && pic->index >= 0)
3508 alloc_pic_count++;
3509 }
3510 return alloc_pic_count;
3511}
3512
3513static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3514{
3515 int i = pic->index;
3516 int dw_mode = get_double_write_mode(hevc);
3517
3518 if (hevc->mmu_enable)
3519 pic->header_adr = hevc->m_BUF[i].header_addr;
3520
3521 pic->BUF_index = i;
3522 pic->POC = INVALID_POC;
3523 pic->mc_canvas_y = pic->index;
3524 pic->mc_canvas_u_v = pic->index;
3525
3526 if (dw_mode & 0x10) {
3527 pic->mc_canvas_y = (pic->index << 1);
3528 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3529 pic->mc_y_adr = pic->dw_y_adr;
3530 pic->mc_u_v_adr = pic->dw_u_v_adr;
3531 }
3532
3533 return 0;
3534}
3535
3536static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3537{
3538 int ret = -1;
3539 int i;
3540 /*int lcu_size_log2 = hevc->lcu_size_log2;
3541 int MV_MEM_UNIT=lcu_size_log2==
3542 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3543 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3544 5 ? 0x80 : 0x20;
3545 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3546 hevc->work_space_buf->mpred_mv.buf_size;*/
3547 unsigned int y_adr = 0;
3548 struct buf_stru_s buf_stru;
3549 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3550 int dw_mode = get_double_write_mode(hevc);
3551
3552 for (i = 0; i < BUF_POOL_SIZE; i++) {
3553 if (hevc->m_BUF[i].start_adr != 0 &&
3554 hevc->m_BUF[i].used_flag == 0 &&
3555 buf_size <= hevc->m_BUF[i].size) {
3556 hevc->m_BUF[i].used_flag = 1;
3557 break;
3558 }
3559 }
3560
3561 if (i >= BUF_POOL_SIZE)
3562 return -1;
3563
3564 if (hevc->mmu_enable) {
3565 pic->header_adr = hevc->m_BUF[i].start_adr;
3566 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3567 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3568 y_adr = hevc->m_BUF[i].start_adr +
3569 MMU_COMPRESS_8K_HEADER_SIZE;
3570 else
3571 y_adr = hevc->m_BUF[i].start_adr +
3572 MMU_COMPRESS_HEADER_SIZE;
3573 } else
3574 y_adr = hevc->m_BUF[i].start_adr;
3575
3576 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3577
3578 pic->POC = INVALID_POC;
3579 /*ensure get_pic_by_POC()
3580 not get the buffer not decoded*/
3581 pic->BUF_index = i;
3582
3583 if ((!hevc->mmu_enable) &&
3584 ((dw_mode & 0x10) == 0)
3585 ) {
3586 pic->mc_y_adr = y_adr;
3587 y_adr += (buf_stru.mc_buffer_size_h << 16);
3588 }
3589 pic->mc_canvas_y = pic->index;
3590 pic->mc_canvas_u_v = pic->index;
3591 if (dw_mode & 0x10) {
3592 pic->mc_y_adr = y_adr;
3593 pic->mc_u_v_adr = y_adr +
3594 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3595 pic->mc_canvas_y = (pic->index << 1);
3596 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3597
3598 pic->dw_y_adr = pic->mc_y_adr;
3599 pic->dw_u_v_adr = pic->mc_u_v_adr;
3600 } else if (dw_mode) {
3601 pic->dw_y_adr = y_adr;
3602 pic->dw_u_v_adr = pic->dw_y_adr +
3603 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3604 }
3605
3606 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3607 hevc_print(hevc, 0,
3608 "%s index %d BUF_index %d mc_y_adr %x\n",
3609 __func__, pic->index,
3610 pic->BUF_index, pic->mc_y_adr);
3611 if (hevc->mmu_enable &&
3612 dw_mode)
3613 hevc_print(hevc, 0,
3614 "mmu double write adr %ld\n",
3615 pic->cma_alloc_addr);
3616 }
3617 ret = 0;
3618
3619 return ret;
3620}
3621
3622static void init_pic_list(struct hevc_state_s *hevc)
3623{
3624 int i;
3625 int init_buf_num = get_work_pic_num(hevc);
3626 int dw_mode = get_double_write_mode(hevc);
3627 struct vdec_s *vdec = hw_to_vdec(hevc);
3628 /*alloc decoder buf will be delay if work on v4l. */
3629 if (!hevc->is_used_v4l) {
3630 for (i = 0; i < init_buf_num; i++) {
3631 if (alloc_buf(hevc) < 0) {
3632 if (i <= 8) {
3633 /*if alloced (i+1)>=9
3634 don't send errors.*/
3635 hevc->fatal_error |=
3636 DECODER_FATAL_ERROR_NO_MEM;
3637 }
3638 break;
3639 }
3640 }
3641 }
3642
3643 for (i = 0; i < init_buf_num; i++) {
3644 struct PIC_s *pic = hevc->m_PIC[i];
3645
3646 if (!pic) {
3647 pic = vmalloc(sizeof(struct PIC_s));
3648 if (pic == NULL) {
3649 hevc_print(hevc, 0,
3650 "%s: alloc pic %d fail!!!\n",
3651 __func__, i);
3652 break;
3653 }
3654 hevc->m_PIC[i] = pic;
3655 }
3656 memset(pic, 0, sizeof(struct PIC_s));
3657
3658 pic->index = i;
3659 pic->BUF_index = -1;
3660 pic->mv_buf_index = -1;
3661 if (vdec->parallel_dec == 1) {
3662 pic->y_canvas_index = -1;
3663 pic->uv_canvas_index = -1;
3664 }
3665
3666 pic->width = hevc->pic_w;
3667 pic->height = hevc->pic_h;
3668 pic->double_write_mode = dw_mode;
3669
3670 /*config canvas will be delay if work on v4l. */
3671 if (!hevc->is_used_v4l) {
3672 if (config_pic(hevc, pic) < 0) {
3673 if (get_dbg_flag(hevc))
3674 hevc_print(hevc, 0,
3675 "Config_pic %d fail\n", pic->index);
3676 pic->index = -1;
3677 i++;
3678 break;
3679 }
3680
3681 if (pic->double_write_mode)
3682 set_canvas(hevc, pic);
3683 }
3684 }
3685}
3686
3687static void uninit_pic_list(struct hevc_state_s *hevc)
3688{
3689 struct vdec_s *vdec = hw_to_vdec(hevc);
3690 int i;
3691#ifndef MV_USE_FIXED_BUF
3692 dealloc_mv_bufs(hevc);
3693#endif
3694 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3695 struct PIC_s *pic = hevc->m_PIC[i];
3696
3697 if (pic) {
3698 if (vdec->parallel_dec == 1) {
3699 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3700 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3701 }
3702 release_aux_data(hevc, pic);
3703 vfree(pic);
3704 hevc->m_PIC[i] = NULL;
3705 }
3706 }
3707}
3708
3709#ifdef LOSLESS_COMPRESS_MODE
3710static void init_decode_head_hw(struct hevc_state_s *hevc)
3711{
3712
3713 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3714 unsigned int data32;
3715
3716 int losless_comp_header_size =
3717 compute_losless_comp_header_size(hevc->pic_w,
3718 hevc->pic_h);
3719 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3720 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3721
3722 hevc->losless_comp_body_size = losless_comp_body_size;
3723
3724
3725 if (hevc->mmu_enable) {
3726 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3727 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3728 } else {
3729 if (hevc->mem_saving_mode == 1)
3730 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3731 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3732 else
3733 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3734 ((workaround_enable & 2) ? 1 : 0));
3735 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3736 /*
3737 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3738 * //8-bit mode
3739 */
3740 }
3741 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3742 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3743 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3744
3745 if (hevc->mmu_enable) {
3746 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3747 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3748 buf_spec->mmu_vbh.buf_start +
3749 buf_spec->mmu_vbh.buf_size/2);
3750 data32 = READ_VREG(HEVC_SAO_CTRL9);
3751 data32 |= 0x1;
3752 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3753
3754 /* use HEVC_CM_HEADER_START_ADDR */
3755 data32 = READ_VREG(HEVC_SAO_CTRL5);
3756 data32 |= (1<<10);
3757 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3758 }
3759
3760 if (!hevc->m_ins_flag)
3761 hevc_print(hevc, 0,
3762 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3763 __func__, hevc->pic_w, hevc->pic_h,
3764 losless_comp_body_size, losless_comp_header_size);
3765
3766}
3767#endif
3768#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3769
3770static void init_pic_list_hw(struct hevc_state_s *hevc)
3771{
3772 int i;
3773 int cur_pic_num = MAX_REF_PIC_NUM;
3774 int dw_mode = get_double_write_mode(hevc);
3775 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3776 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3777 (0x1 << 1) | (0x1 << 2));
3778 else
3779 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3780
3781 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3782 if (hevc->m_PIC[i] == NULL ||
3783 hevc->m_PIC[i]->index == -1) {
3784 cur_pic_num = i;
3785 break;
3786 }
3787 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3788 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3789 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3790 hevc->m_PIC[i]->header_adr>>5);
3791 else
3792 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3793 hevc->m_PIC[i]->mc_y_adr >> 5);
3794 } else
3795 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3796 hevc->m_PIC[i]->mc_y_adr |
3797 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3798 if (dw_mode & 0x10) {
3799 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3800 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3801 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3802 }
3803 else
3804 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3805 hevc->m_PIC[i]->mc_u_v_adr |
3806 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3807 | 0x1);
3808 }
3809 }
3810 if (cur_pic_num == 0)
3811 return;
3812
3813 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3814
3815 /* Zero out canvas registers in IPP -- avoid simulation X */
3816 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3817 (0 << 8) | (0 << 1) | 1);
3818 for (i = 0; i < 32; i++)
3819 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3820
3821#ifdef LOSLESS_COMPRESS_MODE
3822 if ((dw_mode & 0x10) == 0)
3823 init_decode_head_hw(hevc);
3824#endif
3825
3826}
3827
3828
3829static void dump_pic_list(struct hevc_state_s *hevc)
3830{
3831 int i;
3832 struct PIC_s *pic;
3833
3834 hevc_print(hevc, 0,
3835 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3836 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3837 pic = hevc->m_PIC[i];
3838 if (pic == NULL || pic->index == -1)
3839 continue;
3840 hevc_print_cont(hevc, 0,
3841 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3842 pic->index, pic->BUF_index,
3843#ifndef MV_USE_FIXED_BUF
3844 pic->mv_buf_index,
3845#else
3846 -1,
3847#endif
3848 pic->decode_idx, pic->POC, pic->referenced);
3849 hevc_print_cont(hevc, 0,
3850 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3851 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3852 pic->width, pic->height);
3853 hevc_print_cont(hevc, 0,
3854 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3855 pic->output_ready, pic->mpred_mv_wr_start_addr,
3856 pic->vf_ref);
3857 }
3858}
3859
3860static void clear_referenced_flag(struct hevc_state_s *hevc)
3861{
3862 int i;
3863 struct PIC_s *pic;
3864 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3865 pic = hevc->m_PIC[i];
3866 if (pic == NULL || pic->index == -1)
3867 continue;
3868 if (pic->referenced) {
3869 pic->referenced = 0;
3870 put_mv_buf(hevc, pic);
3871 }
3872 }
3873}
3874
3875static void clear_poc_flag(struct hevc_state_s *hevc)
3876{
3877 int i;
3878 struct PIC_s *pic;
3879 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3880 pic = hevc->m_PIC[i];
3881 if (pic == NULL || pic->index == -1)
3882 continue;
3883 pic->POC = INVALID_POC;
3884 }
3885}
3886
3887static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3888 unsigned char flush_flag)
3889{
3890 int num_pic_not_yet_display = 0;
3891 int i, fisrt_pic_flag = 0;
3892 struct PIC_s *pic;
3893 struct PIC_s *pic_display = NULL;
3894 struct vdec_s *vdec = hw_to_vdec(hevc);
3895
3896 if (hevc->i_only & 0x4) {
3897 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3898 pic = hevc->m_PIC[i];
3899 if (pic == NULL ||
3900 (pic->index == -1) ||
3901 (pic->BUF_index == -1) ||
3902 (pic->POC == INVALID_POC))
3903 continue;
3904 if (pic->output_mark) {
3905 if (pic_display) {
3906 if (pic->decode_idx <
3907 pic_display->decode_idx)
3908 pic_display = pic;
3909
3910 } else
3911 pic_display = pic;
3912
3913 }
3914 }
3915 if (pic_display) {
3916 pic_display->output_mark = 0;
3917 pic_display->recon_mark = 0;
3918 pic_display->output_ready = 1;
3919 pic_display->referenced = 0;
3920 put_mv_buf(hevc, pic_display);
3921 }
3922 } else {
3923 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3924 pic = hevc->m_PIC[i];
3925 if (pic == NULL ||
3926 (pic->index == -1) ||
3927 (pic->BUF_index == -1) ||
3928 (pic->POC == INVALID_POC))
3929 continue;
3930 if (pic->output_mark)
3931 num_pic_not_yet_display++;
3932 if (pic->slice_type == 2 &&
3933 hevc->vf_pre_count == 0 &&
3934 fast_output_enable & 0x1) {
3935 /*fast output for first I picture*/
3936 pic->num_reorder_pic = 0;
3937 if (vdec->master || vdec->slave)
3938 pic_display = pic;
3939 fisrt_pic_flag = 1;
3940 hevc_print(hevc, 0, "VH265: output first frame\n");
3941 }
3942 }
3943
3944 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3945 pic = hevc->m_PIC[i];
3946 if (pic == NULL ||
3947 (pic->index == -1) ||
3948 (pic->BUF_index == -1) ||
3949 (pic->POC == INVALID_POC))
3950 continue;
3951 if (pic->output_mark) {
3952 if (pic_display) {
3953 if (pic->POC < pic_display->POC)
3954 pic_display = pic;
3955 else if ((pic->POC == pic_display->POC)
3956 && (pic->decode_idx <
3957 pic_display->
3958 decode_idx))
3959 pic_display
3960 = pic;
3961
3962 } else
3963 pic_display = pic;
3964
3965 }
3966 }
3967#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
3968 /* dv wait cur_pic all data get,
3969 some data may get after picture output */
3970 if ((vdec->master || vdec->slave)
3971 && (pic_display == hevc->cur_pic) &&
3972 (!flush_flag) &&
3973 (hevc->bypass_dvenl && !dolby_meta_with_el)
3974 && (!fisrt_pic_flag))
3975 pic_display = NULL;
3976#endif
3977 if (pic_display) {
3978 if ((num_pic_not_yet_display >
3979 pic_display->num_reorder_pic)
3980 || flush_flag) {
3981 pic_display->output_mark = 0;
3982 pic_display->recon_mark = 0;
3983 pic_display->output_ready = 1;
3984 } else if (num_pic_not_yet_display >=
3985 (MAX_REF_PIC_NUM - 1)) {
3986 pic_display->output_mark = 0;
3987 pic_display->recon_mark = 0;
3988 pic_display->output_ready = 1;
3989 hevc_print(hevc, 0,
3990 "Warning, num_reorder_pic %d is byeond buf num\n",
3991 pic_display->num_reorder_pic);
3992 } else
3993 pic_display = NULL;
3994 }
3995 }
3996
3997 if (pic_display && hevc->sps_num_reorder_pics_0 &&
3998 (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3999 pic_display = NULL;
4000 hevc->first_pic_flag = 2;
4001 }
4002 return pic_display;
4003}
4004
4005static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
4006{
4007 int i;
4008 struct PIC_s *pic;
4009
4010 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4011 hevc_print(hevc, 0,
4012 "config_mc_buffer entered .....\n");
4013 if (cur_pic->slice_type != 2) { /* P and B pic */
4014 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4015 (0 << 8) | (0 << 1) | 1);
4016 for (i = 0; i < cur_pic->RefNum_L0; i++) {
4017 pic =
4018 get_ref_pic_by_POC(hevc,
4019 cur_pic->
4020 m_aiRefPOCList0[cur_pic->
4021 slice_idx][i]);
4022 if (pic) {
4023 if ((pic->width != hevc->pic_w) ||
4024 (pic->height != hevc->pic_h)) {
4025 hevc_print(hevc, 0,
4026 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
4027 __func__, pic->POC,
4028 pic->width, pic->height);
4029 cur_pic->error_mark = 1;
4030 }
4031 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
4032 cur_pic->error_mark = 1;
4033 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
4034 (pic->mc_canvas_u_v << 16)
4035 | (pic->mc_canvas_u_v
4036 << 8) |
4037 pic->mc_canvas_y);
4038 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4039 hevc_print_cont(hevc, 0,
4040 "refid %x mc_canvas_u_v %x",
4041 i, pic->mc_canvas_u_v);
4042 hevc_print_cont(hevc, 0,
4043 " mc_canvas_y %x\n",
4044 pic->mc_canvas_y);
4045 }
4046 } else
4047 cur_pic->error_mark = 1;
4048
4049 if (pic == NULL || pic->error_mark) {
4050 hevc_print(hevc, 0,
4051 "Error %s, %dth poc (%d) %s",
4052 __func__, i,
4053 cur_pic->m_aiRefPOCList0[cur_pic->
4054 slice_idx][i],
4055 pic ? "has error" :
4056 "not in list0");
4057 }
4058 }
4059 }
4060 if (cur_pic->slice_type == 0) { /* B pic */
4061 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4062 hevc_print(hevc, 0,
4063 "config_mc_buffer RefNum_L1\n");
4064 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4065 (16 << 8) | (0 << 1) | 1);
4066
4067 for (i = 0; i < cur_pic->RefNum_L1; i++) {
4068 pic =
4069 get_ref_pic_by_POC(hevc,
4070 cur_pic->
4071 m_aiRefPOCList1[cur_pic->
4072 slice_idx][i]);
4073 if (pic) {
4074 if ((pic->width != hevc->pic_w) ||
4075 (pic->height != hevc->pic_h)) {
4076 hevc_print(hevc, 0,
4077 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
4078 __func__, pic->POC,
4079 pic->width, pic->height);
4080 cur_pic->error_mark = 1;
4081 }
4082
4083 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
4084 cur_pic->error_mark = 1;
4085 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
4086 (pic->mc_canvas_u_v << 16)
4087 | (pic->mc_canvas_u_v
4088 << 8) |
4089 pic->mc_canvas_y);
4090 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4091 hevc_print_cont(hevc, 0,
4092 "refid %x mc_canvas_u_v %x",
4093 i, pic->mc_canvas_u_v);
4094 hevc_print_cont(hevc, 0,
4095 " mc_canvas_y %x\n",
4096 pic->mc_canvas_y);
4097 }
4098 } else
4099 cur_pic->error_mark = 1;
4100
4101 if (pic == NULL || pic->error_mark) {
4102 hevc_print(hevc, 0,
4103 "Error %s, %dth poc (%d) %s",
4104 __func__, i,
4105 cur_pic->m_aiRefPOCList1[cur_pic->
4106 slice_idx][i],
4107 pic ? "has error" :
4108 "not in list1");
4109 }
4110 }
4111 }
4112 return 0;
4113}
4114
4115static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
4116 union param_u *params)
4117{
4118 int ii, i;
4119 int poc_tmp;
4120 struct PIC_s *pic;
4121 unsigned char is_referenced;
4122 /* hevc_print(hevc, 0,
4123 "%s cur_poc %d\n", __func__, cur_poc); */
4124 if (pic_list_debug & 0x2) {
4125 pr_err("cur poc %d\n", cur_poc);
4126 }
4127 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
4128 pic = hevc->m_PIC[ii];
4129 if (pic == NULL ||
4130 pic->index == -1 ||
4131 pic->BUF_index == -1
4132 )
4133 continue;
4134
4135 if ((pic->referenced == 0 || pic->POC == cur_poc))
4136 continue;
4137 is_referenced = 0;
4138 for (i = 0; i < 16; i++) {
4139 int delt;
4140
4141 if (params->p.CUR_RPS[i] & 0x8000)
4142 break;
4143 delt =
4144 params->p.CUR_RPS[i] &
4145 ((1 << (RPS_USED_BIT - 1)) - 1);
4146 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4147 poc_tmp =
4148 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4149 delt);
4150 } else
4151 poc_tmp = cur_poc + delt;
4152 if (poc_tmp == pic->POC) {
4153 is_referenced = 1;
4154 /* hevc_print(hevc, 0, "i is %d\n", i); */
4155 break;
4156 }
4157 }
4158 if (is_referenced == 0) {
4159 pic->referenced = 0;
4160 put_mv_buf(hevc, pic);
4161 /* hevc_print(hevc, 0,
4162 "set poc %d reference to 0\n", pic->POC); */
4163 if (pic_list_debug & 0x2) {
4164 pr_err("set poc %d reference to 0\n", pic->POC);
4165 }
4166 }
4167 }
4168
4169}
4170
4171static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4172{
4173 struct PIC_s *pic = hevc->cur_pic;
4174 int i, rIdx;
4175 int num_neg = 0;
4176 int num_pos = 0;
4177 int total_num;
4178 int num_ref_idx_l0_active =
4179 (params->p.num_ref_idx_l0_active >
4180 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4181 params->p.num_ref_idx_l0_active;
4182 int num_ref_idx_l1_active =
4183 (params->p.num_ref_idx_l1_active >
4184 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4185 params->p.num_ref_idx_l1_active;
4186
4187 int RefPicSetStCurr0[16];
4188 int RefPicSetStCurr1[16];
4189
4190 for (i = 0; i < 16; i++) {
4191 RefPicSetStCurr0[i] = 0;
4192 RefPicSetStCurr1[i] = 0;
4193 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4194 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4195 }
4196 for (i = 0; i < 16; i++) {
4197 if (params->p.CUR_RPS[i] & 0x8000)
4198 break;
4199 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4200 int delt =
4201 params->p.CUR_RPS[i] &
4202 ((1 << (RPS_USED_BIT - 1)) - 1);
4203
4204 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4205 RefPicSetStCurr0[num_neg] =
4206 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4207 delt);
4208 /* hevc_print(hevc, 0,
4209 * "RefPicSetStCurr0 %x %x %x\n",
4210 * RefPicSetStCurr0[num_neg], pic->POC,
4211 * (0x800-(params[i]&0x7ff)));
4212 */
4213 num_neg++;
4214 } else {
4215 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4216 /* hevc_print(hevc, 0,
4217 * "RefPicSetStCurr1 %d\n",
4218 * RefPicSetStCurr1[num_pos]);
4219 */
4220 num_pos++;
4221 }
4222 }
4223 }
4224 total_num = num_neg + num_pos;
4225 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4226 hevc_print(hevc, 0,
4227 "%s: curpoc %d slice_type %d, total %d ",
4228 __func__, pic->POC, params->p.slice_type, total_num);
4229 hevc_print_cont(hevc, 0,
4230 "num_neg %d num_list0 %d num_list1 %d\n",
4231 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4232 }
4233
4234 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4235 hevc_print(hevc, 0,
4236 "HEVC Stream buf start ");
4237 hevc_print_cont(hevc, 0,
4238 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4239 READ_VREG(HEVC_STREAM_START_ADDR),
4240 READ_VREG(HEVC_STREAM_END_ADDR),
4241 READ_VREG(HEVC_STREAM_WR_PTR),
4242 READ_VREG(HEVC_STREAM_RD_PTR),
4243 READ_VREG(HEVC_STREAM_LEVEL),
4244 READ_VREG(HEVC_STREAM_FIFO_CTL),
4245 READ_VREG(HEVC_PARSER_INT_CONTROL));
4246 }
4247
4248 if (total_num > 0) {
4249 if (params->p.modification_flag & 0x1) {
4250 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4251 hevc_print(hevc, 0, "ref0 POC (modification):");
4252 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4253 int cIdx = params->p.modification_list[rIdx];
4254
4255 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4256 cIdx >=
4257 num_neg ? RefPicSetStCurr1[cIdx -
4258 num_neg] :
4259 RefPicSetStCurr0[cIdx];
4260 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4261 hevc_print_cont(hevc, 0, "%d ",
4262 pic->m_aiRefPOCList0[pic->
4263 slice_idx]
4264 [rIdx]);
4265 }
4266 }
4267 } else {
4268 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4269 hevc_print(hevc, 0, "ref0 POC:");
4270 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4271 int cIdx = rIdx % total_num;
4272
4273 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4274 cIdx >=
4275 num_neg ? RefPicSetStCurr1[cIdx -
4276 num_neg] :
4277 RefPicSetStCurr0[cIdx];
4278 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4279 hevc_print_cont(hevc, 0, "%d ",
4280 pic->m_aiRefPOCList0[pic->
4281 slice_idx]
4282 [rIdx]);
4283 }
4284 }
4285 }
4286 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4287 hevc_print_cont(hevc, 0, "\n");
4288 if (params->p.slice_type == B_SLICE) {
4289 if (params->p.modification_flag & 0x2) {
4290 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4291 hevc_print(hevc, 0,
4292 "ref1 POC (modification):");
4293 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4294 rIdx++) {
4295 int cIdx;
4296
4297 if (params->p.modification_flag & 0x1) {
4298 cIdx =
4299 params->p.
4300 modification_list
4301 [num_ref_idx_l0_active +
4302 rIdx];
4303 } else {
4304 cIdx =
4305 params->p.
4306 modification_list[rIdx];
4307 }
4308 pic->m_aiRefPOCList1[pic->
4309 slice_idx][rIdx] =
4310 cIdx >=
4311 num_pos ?
4312 RefPicSetStCurr0[cIdx - num_pos]
4313 : RefPicSetStCurr1[cIdx];
4314 if (get_dbg_flag(hevc) &
4315 H265_DEBUG_BUFMGR) {
4316 hevc_print_cont(hevc, 0, "%d ",
4317 pic->
4318 m_aiRefPOCList1[pic->
4319 slice_idx]
4320 [rIdx]);
4321 }
4322 }
4323 } else {
4324 if (get_dbg_flag(hevc) &
4325 H265_DEBUG_BUFMGR)
4326 hevc_print(hevc, 0, "ref1 POC:");
4327 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4328 rIdx++) {
4329 int cIdx = rIdx % total_num;
4330
4331 pic->m_aiRefPOCList1[pic->
4332 slice_idx][rIdx] =
4333 cIdx >=
4334 num_pos ?
4335 RefPicSetStCurr0[cIdx -
4336 num_pos]
4337 : RefPicSetStCurr1[cIdx];
4338 if (get_dbg_flag(hevc) &
4339 H265_DEBUG_BUFMGR) {
4340 hevc_print_cont(hevc, 0, "%d ",
4341 pic->
4342 m_aiRefPOCList1[pic->
4343 slice_idx]
4344 [rIdx]);
4345 }
4346 }
4347 }
4348 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4349 hevc_print_cont(hevc, 0, "\n");
4350 }
4351 }
4352 /*set m_PIC */
4353 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4354 (params->p.slice_type == P_SLICE) ? 1 :
4355 (params->p.slice_type == B_SLICE) ? 0 : 3;
4356 pic->RefNum_L0 = num_ref_idx_l0_active;
4357 pic->RefNum_L1 = num_ref_idx_l1_active;
4358}
4359
4360static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4361 int pic_height_cu, int sao_mem_unit,
4362 union param_u *params)
4363{
4364 int i, j;
4365 int start_cu_x, start_cu_y;
4366 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4367 int sao_abv_size = sao_mem_unit * pic_width_cu;
4368#ifdef DETREFILL_ENABLE
4369 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4370 int tmpRefillLcuSize = 1 <<
4371 (params->p.log2_min_coding_block_size_minus3 +
4372 3 + params->p.log2_diff_max_min_coding_block_size);
4373 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4374 "%x, %x, %x, %x\n",
4375 params->p.slice_segment_address,
4376 params->p.bit_depth,
4377 params->p.tiles_enabled_flag,
4378 tmpRefillLcuSize);
4379 if (params->p.slice_segment_address == 0 &&
4380 params->p.bit_depth != 0 &&
4381 (params->p.tiles_enabled_flag & 1) &&
4382 tmpRefillLcuSize == 64)
4383 hevc->delrefill_check = 1;
4384 else
4385 hevc->delrefill_check = 0;
4386 }
4387#endif
4388
4389 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4390 if (params->p.tiles_enabled_flag & 1) {
4391 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4392 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4393
4394 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4395 || hevc->num_tile_row <= 0) {
4396 hevc->num_tile_row = 1;
4397 hevc_print(hevc, 0,
4398 "%s: num_tile_rows_minus1 (%d) error!!\n",
4399 __func__, params->p.num_tile_rows_minus1);
4400 }
4401 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4402 || hevc->num_tile_col <= 0) {
4403 hevc->num_tile_col = 1;
4404 hevc_print(hevc, 0,
4405 "%s: num_tile_columns_minus1 (%d) error!!\n",
4406 __func__, params->p.num_tile_columns_minus1);
4407 }
4408 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4409 hevc_print(hevc, 0,
4410 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4411 __func__, pic_width_cu, pic_height_cu);
4412 hevc_print_cont(hevc, 0,
4413 "num_tile_col %d num_tile_row %d:\n",
4414 hevc->num_tile_col, hevc->num_tile_row);
4415 }
4416
4417 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4418 int w = pic_width_cu / hevc->num_tile_col;
4419 int h = pic_height_cu / hevc->num_tile_row;
4420
4421 start_cu_y = 0;
4422 for (i = 0; i < hevc->num_tile_row; i++) {
4423 start_cu_x = 0;
4424 for (j = 0; j < hevc->num_tile_col; j++) {
4425 if (j == (hevc->num_tile_col - 1)) {
4426 hevc->m_tile[i][j].width =
4427 pic_width_cu -
4428 start_cu_x;
4429 } else
4430 hevc->m_tile[i][j].width = w;
4431 if (i == (hevc->num_tile_row - 1)) {
4432 hevc->m_tile[i][j].height =
4433 pic_height_cu -
4434 start_cu_y;
4435 } else
4436 hevc->m_tile[i][j].height = h;
4437 hevc->m_tile[i][j].start_cu_x
4438 = start_cu_x;
4439 hevc->m_tile[i][j].start_cu_y
4440 = start_cu_y;
4441 hevc->m_tile[i][j].sao_vb_start_addr =
4442 hevc->work_space_buf->sao_vb.
4443 buf_start + j * sao_vb_size;
4444 hevc->m_tile[i][j].sao_abv_start_addr =
4445 hevc->work_space_buf->sao_abv.
4446 buf_start + i * sao_abv_size;
4447 if (get_dbg_flag(hevc) &
4448 H265_DEBUG_BUFMGR) {
4449 hevc_print_cont(hevc, 0,
4450 "{y=%d, x=%d w %d h %d ",
4451 i, j, hevc->m_tile[i][j].width,
4452 hevc->m_tile[i][j].height);
4453 hevc_print_cont(hevc, 0,
4454 "start_x %d start_y %d ",
4455 hevc->m_tile[i][j].start_cu_x,
4456 hevc->m_tile[i][j].start_cu_y);
4457 hevc_print_cont(hevc, 0,
4458 "sao_vb_start 0x%x ",
4459 hevc->m_tile[i][j].
4460 sao_vb_start_addr);
4461 hevc_print_cont(hevc, 0,
4462 "sao_abv_start 0x%x}\n",
4463 hevc->m_tile[i][j].
4464 sao_abv_start_addr);
4465 }
4466 start_cu_x += hevc->m_tile[i][j].width;
4467
4468 }
4469 start_cu_y += hevc->m_tile[i][0].height;
4470 }
4471 } else {
4472 start_cu_y = 0;
4473 for (i = 0; i < hevc->num_tile_row; i++) {
4474 start_cu_x = 0;
4475 for (j = 0; j < hevc->num_tile_col; j++) {
4476 if (j == (hevc->num_tile_col - 1)) {
4477 hevc->m_tile[i][j].width =
4478 pic_width_cu -
4479 start_cu_x;
4480 } else {
4481 hevc->m_tile[i][j].width =
4482 params->p.tile_width[j];
4483 }
4484 if (i == (hevc->num_tile_row - 1)) {
4485 hevc->m_tile[i][j].height =
4486 pic_height_cu -
4487 start_cu_y;
4488 } else {
4489 hevc->m_tile[i][j].height =
4490 params->
4491 p.tile_height[i];
4492 }
4493 hevc->m_tile[i][j].start_cu_x
4494 = start_cu_x;
4495 hevc->m_tile[i][j].start_cu_y
4496 = start_cu_y;
4497 hevc->m_tile[i][j].sao_vb_start_addr =
4498 hevc->work_space_buf->sao_vb.
4499 buf_start + j * sao_vb_size;
4500 hevc->m_tile[i][j].sao_abv_start_addr =
4501 hevc->work_space_buf->sao_abv.
4502 buf_start + i * sao_abv_size;
4503 if (get_dbg_flag(hevc) &
4504 H265_DEBUG_BUFMGR) {
4505 hevc_print_cont(hevc, 0,
4506 "{y=%d, x=%d w %d h %d ",
4507 i, j, hevc->m_tile[i][j].width,
4508 hevc->m_tile[i][j].height);
4509 hevc_print_cont(hevc, 0,
4510 "start_x %d start_y %d ",
4511 hevc->m_tile[i][j].start_cu_x,
4512 hevc->m_tile[i][j].start_cu_y);
4513 hevc_print_cont(hevc, 0,
4514 "sao_vb_start 0x%x ",
4515 hevc->m_tile[i][j].
4516 sao_vb_start_addr);
4517 hevc_print_cont(hevc, 0,
4518 "sao_abv_start 0x%x}\n",
4519 hevc->m_tile[i][j].
4520 sao_abv_start_addr);
4521
4522 }
4523 start_cu_x += hevc->m_tile[i][j].width;
4524 }
4525 start_cu_y += hevc->m_tile[i][0].height;
4526 }
4527 }
4528 } else {
4529 hevc->num_tile_col = 1;
4530 hevc->num_tile_row = 1;
4531 hevc->m_tile[0][0].width = pic_width_cu;
4532 hevc->m_tile[0][0].height = pic_height_cu;
4533 hevc->m_tile[0][0].start_cu_x = 0;
4534 hevc->m_tile[0][0].start_cu_y = 0;
4535 hevc->m_tile[0][0].sao_vb_start_addr =
4536 hevc->work_space_buf->sao_vb.buf_start;
4537 hevc->m_tile[0][0].sao_abv_start_addr =
4538 hevc->work_space_buf->sao_abv.buf_start;
4539 }
4540}
4541
4542static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4543 int pic_width_lcu)
4544{
4545 int cu_x;
4546 int cu_y;
4547 int tile_x = 0;
4548 int tile_y = 0;
4549 int i;
4550
4551 if (pic_width_lcu == 0) {
4552 if (get_dbg_flag(hevc)) {
4553 hevc_print(hevc, 0,
4554 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4555 __func__, hevc->pic_w, hevc->pic_h);
4556 }
4557 return -1;
4558 }
4559 cu_x = cu_adr % pic_width_lcu;
4560 cu_y = cu_adr / pic_width_lcu;
4561 if (hevc->tile_enabled) {
4562 for (i = 0; i < hevc->num_tile_col; i++) {
4563 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4564 tile_x = i;
4565 else
4566 break;
4567 }
4568 for (i = 0; i < hevc->num_tile_row; i++) {
4569 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4570 tile_y = i;
4571 else
4572 break;
4573 }
4574 }
4575 return (tile_x) | (tile_y << 8);
4576}
4577
4578static void print_scratch_error(int error_num)
4579{
4580#if 0
4581 if (get_dbg_flag(hevc)) {
4582 hevc_print(hevc, 0,
4583 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4584 error_num);
4585 }
4586#endif
4587}
4588
4589static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4590{
4591 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4592
4593 if (get_dbg_flag(hevc))
4594 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4595 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4596 __func__,
4597 buf_spec->ipp.buf_start,
4598 buf_spec->start_adr,
4599 buf_spec->short_term_rps.buf_start,
4600 buf_spec->vps.buf_start,
4601 buf_spec->sps.buf_start,
4602 buf_spec->pps.buf_start,
4603 buf_spec->sao_up.buf_start,
4604 buf_spec->swap_buf.buf_start,
4605 buf_spec->swap_buf2.buf_start,
4606 buf_spec->scalelut.buf_start,
4607 buf_spec->dblk_para.buf_start,
4608 buf_spec->dblk_data.buf_start,
4609 buf_spec->dblk_data2.buf_start);
4610 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4611 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4612 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4613 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4614 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4615 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4616 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4617 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4618 if (hevc->mmu_enable) {
4619 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4620 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4621 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4622 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4623 } else
4624 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4625 } /*else
4626 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4627 buf_spec->swap_buf.buf_start);
4628 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4629 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4630#ifdef HEVC_8K_LFTOFFSET_FIX
4631 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
4632 WRITE_VREG(HEVC_DBLK_CFG3, 0x808020); /*offset should x2 if 8k*/
4633 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4634 "write HEVC_DBLK_CFG3\n");
4635 }
4636#endif
4637 /* cfg_p_addr */
4638 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4639 /* cfg_d_addr */
4640 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4641
4642 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4643
4644 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4645}
4646
4647static void parser_cmd_write(void)
4648{
4649 u32 i;
4650 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4651 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4652 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4653 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4654 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4655 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4656 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4657 0x7C00
4658 };
4659 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4660 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4661}
4662
4663static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4664 int decode_pic_begin, int decode_pic_num)
4665{
4666 unsigned int data32;
4667 int i;
4668#if 0
4669 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4670 /* Set MCR fetch priorities*/
4671 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4672 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4673 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4674 }
4675#endif
4676#if 1
4677 /* m8baby test1902 */
4678 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4679 hevc_print(hevc, 0,
4680 "%s\n", __func__);
4681 data32 = READ_VREG(HEVC_PARSER_VERSION);
4682 if (data32 != 0x00010001) {
4683 print_scratch_error(25);
4684 return;
4685 }
4686 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4687 data32 = READ_VREG(HEVC_PARSER_VERSION);
4688 if (data32 != 0x5a5a55aa) {
4689 print_scratch_error(26);
4690 return;
4691 }
4692#if 0
4693 /* test Parser Reset */
4694 /* reset iqit to start mem init again */
4695 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4696 (1 << 3) /* reset_whole parser */
4697 );
4698 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4699 data32 = READ_VREG(HEVC_PARSER_VERSION);
4700 if (data32 != 0x00010001)
4701 hevc_print(hevc, 0,
4702 "Test Parser Fatal Error\n");
4703#endif
4704 /* reset iqit to start mem init again */
4705 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4706 );
4707 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4708 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4709
4710#endif
4711 if (!hevc->m_ins_flag) {
4712 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4713 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4714 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4715 data32 |= (0xf << 25); /*arwlen_axi_max*/
4716 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4717 }
4718 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4719 if (data32 != 0x00000100) {
4720 print_scratch_error(29);
4721 return;
4722 }
4723 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4724 if (data32 != 0x00000300) {
4725 print_scratch_error(30);
4726 return;
4727 }
4728 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4729 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4730 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4731 if (data32 != 0x12345678) {
4732 print_scratch_error(31);
4733 return;
4734 }
4735 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4736 if (data32 != 0x9abcdef0) {
4737 print_scratch_error(32);
4738 return;
4739 }
4740 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4741 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4742
4743 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4744 data32 &= 0x03ffffff;
4745 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4746 | /* stream_buffer_empty_int_amrisc_enable */
4747 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4748 (1 << 7) | /* dec_done_int_cpu_enable */
4749 (1 << 4) | /* startcode_found_int_cpu_enable */
4750 (0 << 3) | /* startcode_found_int_amrisc_enable */
4751 (1 << 0) /* parser_int_enable */
4752 ;
4753 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4754
4755 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4756 data32 = data32 | (1 << 1) | /* emulation_check_on */
4757 (1 << 0) /* startcode_check_on */
4758 ;
4759 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4760
4761 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4762 (2 << 4) | /* emulate_code_length_sub_1 */
4763 (2 << 1) | /* start_code_length_sub_1 */
4764 (1 << 0) /* stream_shift_enable */
4765 );
4766
4767 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4768 );
4769 /* hevc_parser_core_clk_en */
4770 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4771 );
4772
4773 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4774
4775 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4776 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4777 for (i = 0; i < 1024; i++)
4778 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4779
4780#ifdef ENABLE_SWAP_TEST
4781 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4782#endif
4783
4784 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4785 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4786 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4787 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4788 /* Send parser_cmd */
4789 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4790
4791 parser_cmd_write();
4792
4793 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4794 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4795 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4796
4797 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4798 /* (1 << 8) | // sao_sw_pred_enable */
4799 (1 << 5) | /* parser_sao_if_en */
4800 (1 << 2) | /* parser_mpred_if_en */
4801 (1 << 0) /* parser_scaler_if_en */
4802 );
4803
4804 /* Changed to Start MPRED in microcode */
4805 /*
4806 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4807 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4808 * (1<<31)
4809 * );
4810 */
4811
4812 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4813 (1 << 0) /* software reset ipp and mpp */
4814 );
4815 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4816 (0 << 0) /* software reset ipp and mpp */
4817 );
4818
4819 if (get_double_write_mode(hevc) & 0x10)
4820 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4821 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4822 );
4823
4824}
4825
4826static void decoder_hw_reset(void)
4827{
4828 int i;
4829 unsigned int data32;
4830
4831 /* reset iqit to start mem init again */
4832 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4833 );
4834 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4835 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4836
4837 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4838 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4839 ;
4840 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4841
4842 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4843 if (data32 != 0x00000100) {
4844 print_scratch_error(29);
4845 return;
4846 }
4847 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4848 if (data32 != 0x00000300) {
4849 print_scratch_error(30);
4850 return;
4851 }
4852 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4853 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4854 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4855 if (data32 != 0x12345678) {
4856 print_scratch_error(31);
4857 return;
4858 }
4859 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4860 if (data32 != 0x9abcdef0) {
4861 print_scratch_error(32);
4862 return;
4863 }
4864 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4865 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4866
4867 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4868 data32 &= 0x03ffffff;
4869 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4870 | /* stream_buffer_empty_int_amrisc_enable */
4871 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4872 (1 << 7) | /* dec_done_int_cpu_enable */
4873 (1 << 4) | /* startcode_found_int_cpu_enable */
4874 (0 << 3) | /* startcode_found_int_amrisc_enable */
4875 (1 << 0) /* parser_int_enable */
4876 ;
4877 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4878
4879 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4880 data32 = data32 | (1 << 1) | /* emulation_check_on */
4881 (1 << 0) /* startcode_check_on */
4882 ;
4883 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4884
4885 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4886 (2 << 4) | /* emulate_code_length_sub_1 */
4887 (2 << 1) | /* start_code_length_sub_1 */
4888 (1 << 0) /* stream_shift_enable */
4889 );
4890
4891 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4892 );
4893 /* hevc_parser_core_clk_en */
4894 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4895 );
4896
4897 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4898 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4899 for (i = 0; i < 1024; i++)
4900 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4901
4902 /* Send parser_cmd */
4903 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4904
4905 parser_cmd_write();
4906
4907 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4908 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4909 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4910
4911 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4912 /* (1 << 8) | // sao_sw_pred_enable */
4913 (1 << 5) | /* parser_sao_if_en */
4914 (1 << 2) | /* parser_mpred_if_en */
4915 (1 << 0) /* parser_scaler_if_en */
4916 );
4917
4918 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4919 (1 << 0) /* software reset ipp and mpp */
4920 );
4921 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4922 (0 << 0) /* software reset ipp and mpp */
4923 );
4924
4925}
4926
4927#ifdef CONFIG_HEVC_CLK_FORCED_ON
4928static void config_hevc_clk_forced_on(void)
4929{
4930 unsigned int rdata32;
4931 /* IQIT */
4932 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4933 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4934
4935 /* DBLK */
4936 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4937 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4938
4939 /* SAO */
4940 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4941 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4942
4943 /* MPRED */
4944 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4945 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4946
4947 /* PARSER */
4948 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4949 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4950 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4951 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4952 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4953 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4954 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4955 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4956 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4957 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4958 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4959 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4960 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4961
4962 /* IPP */
4963 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4964 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4965
4966 /* MCRCC */
4967 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4968 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4969}
4970#endif
4971
4972#ifdef MCRCC_ENABLE
4973static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4974{
4975 unsigned int rdata32;
4976 unsigned int rdata32_2;
4977 int l0_cnt = 0;
4978 int l1_cnt = 0x7fff;
4979
4980 if (get_double_write_mode(hevc) & 0x10) {
4981 l0_cnt = hevc->cur_pic->RefNum_L0;
4982 l1_cnt = hevc->cur_pic->RefNum_L1;
4983 }
4984
4985 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4986
4987 if (slice_type == 2) { /* I-PIC */
4988 /* remove reset -- disables clock */
4989 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4990 return;
4991 }
4992
4993 if (slice_type == 0) { /* B-PIC */
4994 /* Programme canvas0 */
4995 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4996 (0 << 8) | (0 << 1) | 0);
4997 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4998 rdata32 = rdata32 & 0xffff;
4999 rdata32 = rdata32 | (rdata32 << 16);
5000 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
5001
5002 /* Programme canvas1 */
5003 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5004 (16 << 8) | (1 << 1) | 0);
5005 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5006 rdata32_2 = rdata32_2 & 0xffff;
5007 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
5008 if (rdata32 == rdata32_2 && l1_cnt > 1) {
5009 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5010 rdata32_2 = rdata32_2 & 0xffff;
5011 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
5012 }
5013 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
5014 } else { /* P-PIC */
5015 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5016 (0 << 8) | (1 << 1) | 0);
5017 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5018 rdata32 = rdata32 & 0xffff;
5019 rdata32 = rdata32 | (rdata32 << 16);
5020 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
5021
5022 if (l0_cnt == 1) {
5023 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
5024 } else {
5025 /* Programme canvas1 */
5026 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
5027 rdata32 = rdata32 & 0xffff;
5028 rdata32 = rdata32 | (rdata32 << 16);
5029 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
5030 }
5031 }
5032 /* enable mcrcc progressive-mode */
5033 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
5034}
5035#endif
5036
5037static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
5038 int sao_mem_unit)
5039{
5040 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
5041 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
5042 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
5043 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
5044}
5045
5046static u32 init_aux_size;
5047static int aux_data_is_avaible(struct hevc_state_s *hevc)
5048{
5049 u32 reg_val;
5050
5051 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
5052 if (reg_val != 0 && reg_val != init_aux_size)
5053 return 1;
5054 else
5055 return 0;
5056}
5057
5058static void config_aux_buf(struct hevc_state_s *hevc)
5059{
5060 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
5061 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
5062 (hevc->suffix_aux_size >> 4);
5063 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
5064}
5065
5066static void config_mpred_hw(struct hevc_state_s *hevc)
5067{
5068 int i;
5069 unsigned int data32;
5070 struct PIC_s *cur_pic = hevc->cur_pic;
5071 struct PIC_s *col_pic = hevc->col_pic;
5072 int AMVP_MAX_NUM_CANDS_MEM = 3;
5073 int AMVP_MAX_NUM_CANDS = 2;
5074 int NUM_CHROMA_MODE = 5;
5075 int DM_CHROMA_IDX = 36;
5076 int above_ptr_ctrl = 0;
5077 int buffer_linear = 1;
5078 int cu_size_log2 = 3;
5079
5080 int mpred_mv_rd_start_addr;
5081 int mpred_curr_lcu_x;
5082 int mpred_curr_lcu_y;
5083 int mpred_above_buf_start;
5084 int mpred_mv_rd_ptr;
5085 int mpred_mv_rd_ptr_p1;
5086 int mpred_mv_rd_end_addr;
5087 int MV_MEM_UNIT;
5088 int mpred_mv_wr_ptr;
5089 int *ref_poc_L0, *ref_poc_L1;
5090
5091 int above_en;
5092 int mv_wr_en;
5093 int mv_rd_en;
5094 int col_isIntra;
5095
5096 if (hevc->slice_type != 2) {
5097 above_en = 1;
5098 mv_wr_en = 1;
5099 mv_rd_en = 1;
5100 col_isIntra = 0;
5101 } else {
5102 above_en = 1;
5103 mv_wr_en = 1;
5104 mv_rd_en = 0;
5105 col_isIntra = 0;
5106 }
5107
5108 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
5109 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
5110 mpred_curr_lcu_x = data32 & 0xffff;
5111 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
5112
5113 MV_MEM_UNIT =
5114 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
5115 5 ? 0x80 : 0x20;
5116 mpred_mv_rd_ptr =
5117 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
5118
5119 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
5120 mpred_mv_rd_end_addr =
5121 mpred_mv_rd_start_addr +
5122 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
5123
5124 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
5125
5126 mpred_mv_wr_ptr =
5127 cur_pic->mpred_mv_wr_start_addr +
5128 (hevc->slice_addr * MV_MEM_UNIT);
5129
5130 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5131 hevc_print(hevc, 0,
5132 "cur pic index %d col pic index %d\n", cur_pic->index,
5133 col_pic->index);
5134 }
5135
5136 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
5137 cur_pic->mpred_mv_wr_start_addr);
5138 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
5139
5140 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
5141 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
5142 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
5143
5144 data32 = READ_VREG(HEVC_MPRED_CTRL0);
5145 data32 = ((hevc->slice_type & 3) |
5146 (hevc->new_pic & 1) << 2 |
5147 (hevc->new_tile & 1) << 3 |
5148 (hevc->isNextSliceSegment & 1)<< 4 |
5149 (hevc->TMVPFlag & 1)<< 5 |
5150 (hevc->LDCFlag & 1) << 6 |
5151 (hevc->ColFromL0Flag & 1)<< 7 |
5152 (above_ptr_ctrl & 1)<< 8 |
5153 (above_en & 1) << 9 |
5154 (mv_wr_en & 1) << 10 |
5155 (mv_rd_en & 1)<< 11 |
5156 (col_isIntra & 1)<< 12 |
5157 (buffer_linear & 1)<< 13 |
5158 (hevc->LongTerm_Curr & 1) << 14 |
5159 (hevc->LongTerm_Col & 1) << 15 |
5160 (hevc->lcu_size_log2 & 0xf) << 16 |
5161 (cu_size_log2 & 0xf) << 20 | (hevc->plevel & 0x7) << 24);
5162 data32 &= ~(1<< 28);
5163 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5164
5165 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5166 data32 = (
5167#if 0
5168 /* no set in m8baby test1902 */
5169 /* Don't override clk_forced_on , */
5170 (data32 & (0x1 << 24)) |
5171#endif
5172 hevc->MaxNumMergeCand |
5173 AMVP_MAX_NUM_CANDS << 4 |
5174 AMVP_MAX_NUM_CANDS_MEM << 8 |
5175 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5176 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5177
5178 data32 = (hevc->pic_w | hevc->pic_h << 16);
5179 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5180
5181 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5182 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5183
5184 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5185 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5186
5187 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5188 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5189
5190 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5191 /* col_RefNum_L0<<16| */
5192 /* col_RefNum_L1<<24 */
5193 );
5194 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5195
5196 data32 = (hevc->LongTerm_Ref);
5197 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5198
5199 data32 = 0;
5200 for (i = 0; i < hevc->RefNum_L0; i++)
5201 data32 = data32 | (1 << i);
5202 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5203
5204 data32 = 0;
5205 for (i = 0; i < hevc->RefNum_L1; i++)
5206 data32 = data32 | (1 << i);
5207 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5208
5209 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5210 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5211
5212 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5213 * Ref_POC_xx_L1 in pair write order!!!
5214 */
5215 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5216 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5217
5218 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5219 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5220
5221 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5222 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5223
5224 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5225 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5226
5227 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5228 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5229
5230 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5231 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5232
5233 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5234 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5235
5236 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5237 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5238
5239 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5240 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5241
5242 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5243 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5244
5245 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5246 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5247
5248 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5249 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5250
5251 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5252 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5253
5254 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5255 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5256
5257 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5258 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5259
5260 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5261 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5262
5263 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5264 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5265
5266 if (hevc->new_pic) {
5267 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5268 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5269 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5270 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5271 } else if (!hevc->isNextSliceSegment) {
5272 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5273 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5274 }
5275
5276 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5277}
5278
5279static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5280{
5281 unsigned int data32, data32_2;
5282 int misc_flag0 = hevc->misc_flag0;
5283 int slice_deblocking_filter_disabled_flag = 0;
5284
5285 int mc_buffer_size_u_v =
5286 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5287 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5288 struct PIC_s *cur_pic = hevc->cur_pic;
5289 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5290
5291 data32 = READ_VREG(HEVC_SAO_CTRL0);
5292 data32 &= (~0xf);
5293 data32 |= hevc->lcu_size_log2;
5294 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5295
5296 data32 = (hevc->pic_w | hevc->pic_h << 16);
5297 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5298
5299 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5300 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5301
5302 if (hevc->new_pic)
5303 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5304#ifdef LOSLESS_COMPRESS_MODE
5305/*SUPPORT_10BIT*/
5306 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5307 data32 = READ_VREG(HEVC_SAO_CTRL5);
5308 data32 &= (~(0xff << 16));
5309
5310 if (get_double_write_mode(hevc) == 2 ||
5311 get_double_write_mode(hevc) == 3)
5312 data32 |= (0xff<<16);
5313 else if (get_double_write_mode(hevc) == 4)
5314 data32 |= (0x33<<16);
5315
5316 if (hevc->mem_saving_mode == 1)
5317 data32 |= (1 << 9);
5318 else
5319 data32 &= ~(1 << 9);
5320 if (workaround_enable & 1)
5321 data32 |= (1 << 7);
5322 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5323 }
5324 data32 = cur_pic->mc_y_adr;
5325 if (get_double_write_mode(hevc))
5326 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5327
5328 if ((get_double_write_mode(hevc) & 0x10) == 0)
5329 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5330
5331 if (hevc->mmu_enable)
5332 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5333#else
5334 data32 = cur_pic->mc_y_adr;
5335 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5336#endif
5337 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5338 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5339
5340#ifdef LOSLESS_COMPRESS_MODE
5341/*SUPPORT_10BIT*/
5342 if (get_double_write_mode(hevc))
5343 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5344#else
5345 data32 = cur_pic->mc_u_v_adr;
5346 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5347#endif
5348 data32 = (mc_buffer_size_u_v_h << 16);
5349 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5350
5351#ifdef LOSLESS_COMPRESS_MODE
5352/*SUPPORT_10BIT*/
5353 if (get_double_write_mode(hevc)) {
5354 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5355 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5356 }
5357#else
5358 /* multi tile to do... */
5359 data32 = cur_pic->mc_y_adr;
5360 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5361
5362 data32 = cur_pic->mc_u_v_adr;
5363 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5364#endif
5365 /* DBLK CONFIG HERE */
5366 if (hevc->new_pic) {
5367 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5368 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5369 data32 = (0xff << 8) | (0x0 << 0);
5370 else
5371 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5372 (0x0 << 0); /* h265 video format*/
5373
5374 if (hevc->pic_w >= 1280)
5375 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5376 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5377 if (get_double_write_mode(hevc) == 0)
5378 data32 |= (0x1 << 8); /*enable first write*/
5379 else if (get_double_write_mode(hevc) == 0x10)
5380 data32 |= (0x1 << 9); /*double write only*/
5381 else
5382 data32 |= ((0x1 << 8) |(0x1 << 9));
5383
5384 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5385 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5386 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5387 }
5388 data32 = (hevc->pic_w | hevc->pic_h << 16);
5389 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5390
5391 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5392 data32 =
5393 ((misc_flag0 >>
5394 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5395 0x1) << 3;
5396 } else
5397 data32 = 0;
5398 data32 |=
5399 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5400 ((params->p.pps_cr_qp_offset
5401 & 0x1f) <<
5402 9));
5403 data32 |=
5404 (hevc->lcu_size ==
5405 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5406
5407 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5408
5409 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5410 /*if (debug & 0x80) {*/
5411 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5412 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5413 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5414 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5415 data32);
5416 /*}*/
5417 }
5418 }
5419#if 0
5420 data32 = READ_VREG(HEVC_SAO_CTRL1);
5421 data32 &= (~0x3000);
5422 data32 |= (hevc->mem_map_mode <<
5423 12);
5424
5425/* [13:12] axi_aformat,
5426 * 0-Linear, 1-32x32, 2-64x32
5427 */
5428 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5429
5430 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5431 data32 &= (~0x30);
5432 data32 |= (hevc->mem_map_mode <<
5433 4);
5434
5435/* [5:4] -- address_format
5436 * 00:linear 01:32x32 10:64x32
5437 */
5438 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5439#else
5440 /* m8baby test1902 */
5441 data32 = READ_VREG(HEVC_SAO_CTRL1);
5442 data32 &= (~0x3000);
5443 data32 |= (hevc->mem_map_mode <<
5444 12);
5445
5446/* [13:12] axi_aformat, 0-Linear,
5447 * 1-32x32, 2-64x32
5448 */
5449 data32 &= (~0xff0);
5450 /* data32 |= 0x670; // Big-Endian per 64-bit */
5451 data32 |= endian; /* Big-Endian per 64-bit */
5452 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5453 if (get_double_write_mode(hevc) == 0)
5454 data32 |= 0x2; /*disable double write*/
5455 else if (get_double_write_mode(hevc) & 0x10)
5456 data32 |= 0x1; /*disable cm*/
5457 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5458 unsigned int data;
5459 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5460 (0x0 << 0); /* h265 video format*/
5461 if (hevc->pic_w >= 1280)
5462 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5463 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5464 if (get_double_write_mode(hevc) == 0)
5465 data |= (0x1 << 8); /*enable first write*/
5466 else if (get_double_write_mode(hevc) & 0x10)
5467 data |= (0x1 << 9); /*double write only*/
5468 else
5469 data |= ((0x1 << 8) |(0x1 << 9));
5470 WRITE_VREG(HEVC_DBLK_CFGB, data);
5471 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5472 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5473 }
5474
5475 /* swap uv */
5476 if (hevc->is_used_v4l) {
5477 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5478 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5479 data32 &= ~(1 << 8); /* NV21 */
5480 else
5481 data32 |= (1 << 8); /* NV12 */
5482 }
5483
5484 /*
5485 * [31:24] ar_fifo1_axi_thred
5486 * [23:16] ar_fifo0_axi_thred
5487 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5488 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5489 * [11:08] axi_lendian_C
5490 * [07:04] axi_lendian_Y
5491 * [3] reserved
5492 * [2] clk_forceon
5493 * [1] dw_disable:disable double write output
5494 * [0] cm_disable:disable compress output
5495 */
5496
5497 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5498 if (get_double_write_mode(hevc) & 0x10) {
5499 /* [23:22] dw_v1_ctrl
5500 *[21:20] dw_v0_ctrl
5501 *[19:18] dw_h1_ctrl
5502 *[17:16] dw_h0_ctrl
5503 */
5504 data32 = READ_VREG(HEVC_SAO_CTRL5);
5505 /*set them all 0 for H265_NV21 (no down-scale)*/
5506 data32 &= ~(0xff << 16);
5507 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5508 }
5509
5510 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5511 data32 &= (~0x30);
5512 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5513 data32 |= (hevc->mem_map_mode <<
5514 4);
5515 data32 &= (~0xF);
5516 data32 |= 0xf; /* valid only when double write only */
5517 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5518
5519 /* swap uv */
5520 if (hevc->is_used_v4l) {
5521 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5522 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5523 data32 |= (1 << 12); /* NV21 */
5524 else
5525 data32 &= ~(1 << 12); /* NV12 */
5526 }
5527
5528 /*
5529 * [3:0] little_endian
5530 * [5:4] address_format 00:linear 01:32x32 10:64x32
5531 * [7:6] reserved
5532 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5533 * [11:10] reserved
5534 * [12] CbCr_byte_swap
5535 * [31:13] reserved
5536 */
5537 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5538#endif
5539 data32 = 0;
5540 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5541 data32_2 &= (~0x300);
5542 /* slice_deblocking_filter_disabled_flag = 0;
5543 * ucode has handle it , so read it from ucode directly
5544 */
5545 if (hevc->tile_enabled) {
5546 data32 |=
5547 ((misc_flag0 >>
5548 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5549 0x1) << 0;
5550 data32_2 |=
5551 ((misc_flag0 >>
5552 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5553 0x1) << 8;
5554 }
5555 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5556 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5557 0x1; /* ucode has handle it,so read it from ucode directly */
5558 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5559 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5560 /* slice_deblocking_filter_disabled_flag =
5561 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5562 * //ucode has handle it , so read it from ucode directly
5563 */
5564 data32 |= slice_deblocking_filter_disabled_flag << 2;
5565 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5566 hevc_print_cont(hevc, 0,
5567 "(1,%x)", data32);
5568 if (!slice_deblocking_filter_disabled_flag) {
5569 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5570 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5571 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5572 hevc_print_cont(hevc, 0,
5573 "(2,%x)", data32);
5574 }
5575 } else {
5576 data32 |=
5577 ((misc_flag0 >>
5578 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5579 0x1) << 2;
5580 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5581 hevc_print_cont(hevc, 0,
5582 "(3,%x)", data32);
5583 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5584 0x1) == 0) {
5585 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5586 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5587 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5588 hevc_print_cont(hevc, 0,
5589 "(4,%x)", data32);
5590 }
5591 }
5592 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5593 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5594 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5595 || (!slice_deblocking_filter_disabled_flag))) {
5596 data32 |=
5597 ((misc_flag0 >>
5598 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5599 & 0x1) << 1;
5600 data32_2 |=
5601 ((misc_flag0 >>
5602 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5603 & 0x1) << 9;
5604 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5605 hevc_print_cont(hevc, 0,
5606 "(5,%x)\n", data32);
5607 } else {
5608 data32 |=
5609 ((misc_flag0 >>
5610 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5611 & 0x1) << 1;
5612 data32_2 |=
5613 ((misc_flag0 >>
5614 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5615 & 0x1) << 9;
5616 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5617 hevc_print_cont(hevc, 0,
5618 "(6,%x)\n", data32);
5619 }
5620 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5621 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5622}
5623
5624#ifdef TEST_NO_BUF
5625static unsigned char test_flag = 1;
5626#endif
5627
5628static void pic_list_process(struct hevc_state_s *hevc)
5629{
5630 int work_pic_num = get_work_pic_num(hevc);
5631 int alloc_pic_count = 0;
5632 int i;
5633 struct PIC_s *pic;
5634 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5635 pic = hevc->m_PIC[i];
5636 if (pic == NULL || pic->index == -1)
5637 continue;
5638 alloc_pic_count++;
5639 if (pic->output_mark == 0 && pic->referenced == 0
5640 && pic->output_ready == 0
5641 && (pic->width != hevc->pic_w ||
5642 pic->height != hevc->pic_h)
5643 ) {
5644 set_buf_unused(hevc, pic->BUF_index);
5645 pic->BUF_index = -1;
5646 if (alloc_pic_count > work_pic_num) {
5647 pic->width = 0;
5648 pic->height = 0;
5649 release_pic_mmu_buf(hevc, pic);
5650 pic->index = -1;
5651 } else {
5652 pic->width = hevc->pic_w;
5653 pic->height = hevc->pic_h;
5654 }
5655 }
5656 }
5657 if (alloc_pic_count < work_pic_num) {
5658 int new_count = alloc_pic_count;
5659 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5660 pic = hevc->m_PIC[i];
5661 if (pic && pic->index == -1) {
5662 pic->index = i;
5663 pic->BUF_index = -1;
5664 pic->width = hevc->pic_w;
5665 pic->height = hevc->pic_h;
5666 new_count++;
5667 if (new_count >=
5668 work_pic_num)
5669 break;
5670 }
5671 }
5672
5673 }
5674 dealloc_unused_buf(hevc);
5675 if (get_alloc_pic_count(hevc)
5676 != alloc_pic_count) {
5677 hevc_print_cont(hevc, 0,
5678 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5679 __func__,
5680 work_pic_num,
5681 alloc_pic_count,
5682 get_alloc_pic_count(hevc));
5683 }
5684}
5685
5686static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5687 union param_u *rpm_param)
5688{
5689 struct vdec_s *vdec = hw_to_vdec(hevc);
5690 struct PIC_s *new_pic = NULL;
5691 struct PIC_s *pic;
5692 int i;
5693 int ret;
5694
5695 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5696 pic = hevc->m_PIC[i];
5697 if (pic == NULL || pic->index == -1)
5698 continue;
5699
5700 if (pic->output_mark == 0 && pic->referenced == 0
5701 && pic->output_ready == 0
5702 && pic->width == hevc->pic_w
5703 && pic->height == hevc->pic_h
5704 && pic->vf_ref == 0
5705 ) {
5706 if (new_pic) {
5707 if (new_pic->POC != INVALID_POC) {
5708 if (pic->POC == INVALID_POC ||
5709 pic->POC < new_pic->POC)
5710 new_pic = pic;
5711 }
5712 } else
5713 new_pic = pic;
5714 }
5715 }
5716
5717 if (new_pic == NULL)
5718 return NULL;
5719
5720 if (new_pic->BUF_index < 0) {
5721 if (alloc_buf(hevc) < 0)
5722 return NULL;
5723 else {
5724 if (config_pic(hevc, new_pic) < 0) {
5725 dealloc_pic_buf(hevc, new_pic);
5726 return NULL;
5727 }
5728 }
5729 new_pic->width = hevc->pic_w;
5730 new_pic->height = hevc->pic_h;
5731 set_canvas(hevc, new_pic);
5732
5733 init_pic_list_hw(hevc);
5734 }
5735
5736 if (new_pic) {
5737 new_pic->double_write_mode =
5738 get_double_write_mode(hevc);
5739 if (new_pic->double_write_mode)
5740 set_canvas(hevc, new_pic);
5741
5742#ifdef TEST_NO_BUF
5743 if (test_flag) {
5744 test_flag = 0;
5745 return NULL;
5746 } else
5747 test_flag = 1;
5748#endif
5749 if (get_mv_buf(hevc, new_pic) < 0)
5750 return NULL;
5751
5752 if (hevc->mmu_enable) {
5753 ret = H265_alloc_mmu(hevc, new_pic,
5754 rpm_param->p.bit_depth,
5755 hevc->frame_mmu_map_addr);
5756 if (ret != 0) {
5757 put_mv_buf(hevc, new_pic);
5758 hevc_print(hevc, 0,
5759 "can't alloc need mmu1,idx %d ret =%d\n",
5760 new_pic->decode_idx,
5761 ret);
5762 return NULL;
5763 }
5764 }
5765 new_pic->referenced = 1;
5766 new_pic->decode_idx = hevc->decode_idx;
5767 new_pic->slice_idx = 0;
5768 new_pic->referenced = 1;
5769 new_pic->output_mark = 0;
5770 new_pic->recon_mark = 0;
5771 new_pic->error_mark = 0;
5772 new_pic->dis_mark = 0;
5773 /* new_pic->output_ready = 0; */
5774 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5775 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5776 !(vdec->slave || vdec->master) &&
5777 !disable_ip_mode) ? true : false;
5778 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5779 new_pic->POC = hevc->curr_POC;
5780 new_pic->pic_struct = hevc->curr_pic_struct;
5781 if (new_pic->aux_data_buf)
5782 release_aux_data(hevc, new_pic);
5783 new_pic->mem_saving_mode =
5784 hevc->mem_saving_mode;
5785 new_pic->bit_depth_luma =
5786 hevc->bit_depth_luma;
5787 new_pic->bit_depth_chroma =
5788 hevc->bit_depth_chroma;
5789 new_pic->video_signal_type =
5790 hevc->video_signal_type;
5791
5792 new_pic->conformance_window_flag =
5793 hevc->param.p.conformance_window_flag;
5794 new_pic->conf_win_left_offset =
5795 hevc->param.p.conf_win_left_offset;
5796 new_pic->conf_win_right_offset =
5797 hevc->param.p.conf_win_right_offset;
5798 new_pic->conf_win_top_offset =
5799 hevc->param.p.conf_win_top_offset;
5800 new_pic->conf_win_bottom_offset =
5801 hevc->param.p.conf_win_bottom_offset;
5802 new_pic->chroma_format_idc =
5803 hevc->param.p.chroma_format_idc;
5804
5805 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5806 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5807 __func__, new_pic->index,
5808 new_pic->BUF_index, new_pic->decode_idx,
5809 new_pic->POC);
5810
5811 }
5812 if (pic_list_debug & 0x1) {
5813 dump_pic_list(hevc);
5814 pr_err("\n*******************************************\n");
5815 }
5816
5817 return new_pic;
5818}
5819
5820static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5821 union param_u *rpm_param)
5822{
5823 struct vdec_s *vdec = hw_to_vdec(hevc);
5824 int ret;
5825 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5826 struct v4l_buff_pool *pool = &v4l->cap_pool;
5827 struct PIC_s *new_pic = NULL;
5828 struct PIC_s *pic = NULL;
5829 int i;
5830
5831 for (i = 0; i < pool->in; ++i) {
5832 u32 state = (pool->seq[i] >> 16);
5833 u32 index = (pool->seq[i] & 0xffff);
5834
5835 switch (state) {
5836 case V4L_CAP_BUFF_IN_DEC:
5837 pic = hevc->m_PIC[i];
5838 if (pic && (pic->index != -1) &&
5839 (pic->output_mark == 0) &&
5840 (pic->referenced == 0) &&
5841 (pic->output_ready == 0) &&
5842 (pic->width == hevc->pic_w) &&
5843 (pic->height == hevc->pic_h) &&
5844 (pic->vf_ref == 0) &&
5845 pic->cma_alloc_addr) {
5846 new_pic = pic;
5847 }
5848 break;
5849 case V4L_CAP_BUFF_IN_M2M:
5850 pic = hevc->m_PIC[index];
5851 pic->width = hevc->pic_w;
5852 pic->height = hevc->pic_h;
5853 if ((pic->index != -1) &&
5854 !v4l_alloc_buf(hevc, pic)) {
5855 v4l_config_pic(hevc, pic);
5856 init_pic_list_hw(hevc);
5857 new_pic = pic;
5858 }
5859 break;
5860 default:
5861 pr_err("v4l buffer state err %d.\n", state);
5862 break;
5863 }
5864
5865 if (new_pic)
5866 break;
5867 }
5868
5869 if (new_pic == NULL)
5870 return NULL;
5871
5872 new_pic->double_write_mode = get_double_write_mode(hevc);
5873 if (new_pic->double_write_mode)
5874 set_canvas(hevc, new_pic);
5875
5876 if (get_mv_buf(hevc, new_pic) < 0)
5877 return NULL;
5878
5879 if (hevc->mmu_enable) {
5880 ret = H265_alloc_mmu(hevc, new_pic,
5881 rpm_param->p.bit_depth,
5882 hevc->frame_mmu_map_addr);
5883 if (ret != 0) {
5884 put_mv_buf(hevc, new_pic);
5885 hevc_print(hevc, 0,
5886 "can't alloc need mmu1,idx %d ret =%d\n",
5887 new_pic->decode_idx, ret);
5888 return NULL;
5889 }
5890 }
5891
5892 new_pic->referenced = 1;
5893 new_pic->decode_idx = hevc->decode_idx;
5894 new_pic->slice_idx = 0;
5895 new_pic->referenced = 1;
5896 new_pic->output_mark = 0;
5897 new_pic->recon_mark = 0;
5898 new_pic->error_mark = 0;
5899 new_pic->dis_mark = 0;
5900 /* new_pic->output_ready = 0; */
5901 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5902 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5903 !(vdec->slave || vdec->master) &&
5904 !disable_ip_mode) ? true : false;
5905 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5906 new_pic->POC = hevc->curr_POC;
5907 new_pic->pic_struct = hevc->curr_pic_struct;
5908
5909 if (new_pic->aux_data_buf)
5910 release_aux_data(hevc, new_pic);
5911 new_pic->mem_saving_mode =
5912 hevc->mem_saving_mode;
5913 new_pic->bit_depth_luma =
5914 hevc->bit_depth_luma;
5915 new_pic->bit_depth_chroma =
5916 hevc->bit_depth_chroma;
5917 new_pic->video_signal_type =
5918 hevc->video_signal_type;
5919
5920 new_pic->conformance_window_flag =
5921 hevc->param.p.conformance_window_flag;
5922 new_pic->conf_win_left_offset =
5923 hevc->param.p.conf_win_left_offset;
5924 new_pic->conf_win_right_offset =
5925 hevc->param.p.conf_win_right_offset;
5926 new_pic->conf_win_top_offset =
5927 hevc->param.p.conf_win_top_offset;
5928 new_pic->conf_win_bottom_offset =
5929 hevc->param.p.conf_win_bottom_offset;
5930 new_pic->chroma_format_idc =
5931 hevc->param.p.chroma_format_idc;
5932
5933 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5934 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5935 __func__, new_pic->index,
5936 new_pic->BUF_index, new_pic->decode_idx,
5937 new_pic->POC);
5938
5939 return new_pic;
5940}
5941
5942static int get_display_pic_num(struct hevc_state_s *hevc)
5943{
5944 int i;
5945 struct PIC_s *pic;
5946 int num = 0;
5947
5948 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5949 pic = hevc->m_PIC[i];
5950 if (pic == NULL ||
5951 pic->index == -1)
5952 continue;
5953
5954 if (pic->output_ready == 1)
5955 num++;
5956 }
5957 return num;
5958}
5959
5960static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5961{
5962 struct PIC_s *pic_display;
5963
5964 if (pic) {
5965 /*PB skip control */
5966 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5967 /* start decoding after first I */
5968 hevc->ignore_bufmgr_error |= 0x1;
5969 }
5970 if (hevc->ignore_bufmgr_error & 1) {
5971 if (hevc->PB_skip_count_after_decoding > 0)
5972 hevc->PB_skip_count_after_decoding--;
5973 else {
5974 /* start displaying */
5975 hevc->ignore_bufmgr_error |= 0x2;
5976 }
5977 }
5978 if (pic->POC != INVALID_POC && !pic->ip_mode)
5979 pic->output_mark = 1;
5980 pic->recon_mark = 1;
5981 }
5982 do {
5983 pic_display = output_pic(hevc, 1);
5984
5985 if (pic_display) {
5986 pic_display->referenced = 0;
5987 put_mv_buf(hevc, pic_display);
5988 if ((pic_display->error_mark
5989 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5990 || (get_dbg_flag(hevc) &
5991 H265_DEBUG_DISPLAY_CUR_FRAME)
5992 || (get_dbg_flag(hevc) &
5993 H265_DEBUG_NO_DISPLAY)) {
5994 pic_display->output_ready = 0;
5995 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5996 hevc_print(hevc, 0,
5997 "[BM] Display: POC %d, ",
5998 pic_display->POC);
5999 hevc_print_cont(hevc, 0,
6000 "decoding index %d ==> ",
6001 pic_display->decode_idx);
6002 hevc_print_cont(hevc, 0,
6003 "Debug mode or error, recycle it\n");
6004 }
6005 /*
6006 * Here the pic/frame error_mark is 1,
6007 * and it won't be displayed, so increase
6008 * the drop count
6009 */
6010 hevc->gvs->drop_frame_count++;
6011 /* error frame count also need increase */
6012 hevc->gvs->error_frame_count++;
6013 } else {
6014 if (hevc->i_only & 0x1
6015 && pic_display->slice_type != 2) {
6016 pic_display->output_ready = 0;
6017 } else {
6018 prepare_display_buf(hevc, pic_display);
6019 if (get_dbg_flag(hevc)
6020 & H265_DEBUG_BUFMGR) {
6021 hevc_print(hevc, 0,
6022 "[BM] flush Display: POC %d, ",
6023 pic_display->POC);
6024 hevc_print_cont(hevc, 0,
6025 "decoding index %d\n",
6026 pic_display->decode_idx);
6027 }
6028 }
6029 }
6030 }
6031 } while (pic_display);
6032 clear_referenced_flag(hevc);
6033}
6034
6035/*
6036* dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
6037*/
6038static void set_aux_data(struct hevc_state_s *hevc,
6039 struct PIC_s *pic, unsigned char suffix_flag,
6040 unsigned char dv_meta_flag)
6041{
6042 int i;
6043 unsigned short *aux_adr;
6044 unsigned int size_reg_val =
6045 READ_VREG(HEVC_AUX_DATA_SIZE);
6046 unsigned int aux_count = 0;
6047 int aux_size = 0;
6048 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
6049 return;
6050
6051 if (hevc->aux_data_dirty ||
6052 hevc->m_ins_flag == 0) {
6053
6054 hevc->aux_data_dirty = 0;
6055 }
6056
6057 if (suffix_flag) {
6058 aux_adr = (unsigned short *)
6059 (hevc->aux_addr +
6060 hevc->prefix_aux_size);
6061 aux_count =
6062 ((size_reg_val & 0xffff) << 4)
6063 >> 1;
6064 aux_size =
6065 hevc->suffix_aux_size;
6066 } else {
6067 aux_adr =
6068 (unsigned short *)hevc->aux_addr;
6069 aux_count =
6070 ((size_reg_val >> 16) << 4)
6071 >> 1;
6072 aux_size =
6073 hevc->prefix_aux_size;
6074 }
6075 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6076 hevc_print(hevc, 0,
6077 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
6078 __func__, pic, pic->aux_data_size,
6079 aux_count, suffix_flag, dv_meta_flag);
6080 }
6081 if (aux_size > 0 && aux_count > 0) {
6082 int heads_size = 0;
6083 int new_size;
6084 char *new_buf;
6085
6086 for (i = 0; i < aux_count; i++) {
6087 unsigned char tag = aux_adr[i] >> 8;
6088 if (tag != 0 && tag != 0xff) {
6089 if (dv_meta_flag == 0)
6090 heads_size += 8;
6091 else if (dv_meta_flag == 1 && tag == 0x1)
6092 heads_size += 8;
6093 else if (dv_meta_flag == 2 && tag != 0x1)
6094 heads_size += 8;
6095 }
6096 }
6097 new_size = pic->aux_data_size + aux_count + heads_size;
6098 new_buf = vzalloc(new_size);
6099 if (new_buf) {
6100 unsigned char valid_tag = 0;
6101 unsigned char *h =
6102 new_buf +
6103 pic->aux_data_size;
6104 unsigned char *p = h + 8;
6105 int len = 0;
6106 int padding_len = 0;
6107
6108 if (pic->aux_data_buf) {
6109 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
6110 vfree(pic->aux_data_buf);
6111 }
6112 pic->aux_data_buf = new_buf;
6113
6114 for (i = 0; i < aux_count; i += 4) {
6115 int ii;
6116 unsigned char tag = aux_adr[i + 3] >> 8;
6117 if (tag != 0 && tag != 0xff) {
6118 if (dv_meta_flag == 0)
6119 valid_tag = 1;
6120 else if (dv_meta_flag == 1
6121 && tag == 0x1)
6122 valid_tag = 1;
6123 else if (dv_meta_flag == 2
6124 && tag != 0x1)
6125 valid_tag = 1;
6126 else
6127 valid_tag = 0;
6128 if (valid_tag && len > 0) {
6129 pic->aux_data_size +=
6130 (len + 8);
6131 h[0] = (len >> 24)
6132 & 0xff;
6133 h[1] = (len >> 16)
6134 & 0xff;
6135 h[2] = (len >> 8)
6136 & 0xff;
6137 h[3] = (len >> 0)
6138 & 0xff;
6139 h[6] =
6140 (padding_len >> 8)
6141 & 0xff;
6142 h[7] = (padding_len)
6143 & 0xff;
6144 h += (len + 8);
6145 p += 8;
6146 len = 0;
6147 padding_len = 0;
6148 }
6149 if (valid_tag) {
6150 h[4] = tag;
6151 h[5] = 0;
6152 h[6] = 0;
6153 h[7] = 0;
6154 }
6155 }
6156 if (valid_tag) {
6157 for (ii = 0; ii < 4; ii++) {
6158 unsigned short aa =
6159 aux_adr[i + 3
6160 - ii];
6161 *p = aa & 0xff;
6162 p++;
6163 len++;
6164 /*if ((aa >> 8) == 0xff)
6165 padding_len++;*/
6166 }
6167 }
6168 }
6169 if (len > 0) {
6170 pic->aux_data_size += (len + 8);
6171 h[0] = (len >> 24) & 0xff;
6172 h[1] = (len >> 16) & 0xff;
6173 h[2] = (len >> 8) & 0xff;
6174 h[3] = (len >> 0) & 0xff;
6175 h[6] = (padding_len >> 8) & 0xff;
6176 h[7] = (padding_len) & 0xff;
6177 }
6178 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6179 hevc_print(hevc, 0,
6180 "aux: (size %d) suffix_flag %d\n",
6181 pic->aux_data_size, suffix_flag);
6182 for (i = 0; i < pic->aux_data_size; i++) {
6183 hevc_print_cont(hevc, 0,
6184 "%02x ", pic->aux_data_buf[i]);
6185 if (((i + 1) & 0xf) == 0)
6186 hevc_print_cont(hevc, 0, "\n");
6187 }
6188 hevc_print_cont(hevc, 0, "\n");
6189 }
6190
6191 } else {
6192 hevc_print(hevc, 0, "new buf alloc failed\n");
6193 if (pic->aux_data_buf)
6194 vfree(pic->aux_data_buf);
6195 pic->aux_data_buf = NULL;
6196 pic->aux_data_size = 0;
6197 }
6198 }
6199
6200}
6201
6202static void release_aux_data(struct hevc_state_s *hevc,
6203 struct PIC_s *pic)
6204{
6205 if (pic->aux_data_buf) {
6206 vfree(pic->aux_data_buf);
6207 if ((run_count[hevc->index] & 63) == 0)
6208 vm_unmap_aliases();
6209 }
6210 pic->aux_data_buf = NULL;
6211 pic->aux_data_size = 0;
6212}
6213
6214static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6215 struct PIC_s *pic)
6216{
6217
6218 /* prev pic */
6219 /*if (hevc->curr_POC != 0) {*/
6220 int decoded_poc = hevc->iPrevPOC;
6221#ifdef MULTI_INSTANCE_SUPPORT
6222 if (hevc->m_ins_flag) {
6223 decoded_poc = hevc->decoded_poc;
6224 hevc->decoded_poc = INVALID_POC;
6225 }
6226#endif
6227 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6228 && hevc->m_nalUnitType !=
6229 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6230 struct PIC_s *pic_display;
6231
6232 pic = get_pic_by_POC(hevc, decoded_poc);
6233 if (pic && (pic->POC != INVALID_POC)) {
6234 struct vdec_s *vdec = hw_to_vdec(hevc);
6235
6236 /*PB skip control */
6237 if (pic->error_mark == 0
6238 && hevc->PB_skip_mode == 1) {
6239 /* start decoding after
6240 * first I
6241 */
6242 hevc->ignore_bufmgr_error |= 0x1;
6243 }
6244 if (hevc->ignore_bufmgr_error & 1) {
6245 if (hevc->PB_skip_count_after_decoding > 0) {
6246 hevc->PB_skip_count_after_decoding--;
6247 } else {
6248 /* start displaying */
6249 hevc->ignore_bufmgr_error |= 0x2;
6250 }
6251 }
6252 if (hevc->mmu_enable
6253 && ((hevc->double_write_mode & 0x10) == 0)) {
6254 if (!hevc->m_ins_flag) {
6255 hevc->used_4k_num =
6256 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6257
6258 if ((!is_skip_decoding(hevc, pic)) &&
6259 (hevc->used_4k_num >= 0) &&
6260 (hevc->cur_pic->scatter_alloc
6261 == 1)) {
6262 hevc_print(hevc,
6263 H265_DEBUG_BUFMGR_MORE,
6264 "%s pic index %d scatter_alloc %d page_start %d\n",
6265 "decoder_mmu_box_free_idx_tail",
6266 hevc->cur_pic->index,
6267 hevc->cur_pic->scatter_alloc,
6268 hevc->used_4k_num);
6269 hevc_mmu_dma_check(hw_to_vdec(hevc));
6270 decoder_mmu_box_free_idx_tail(
6271 hevc->mmu_box,
6272 hevc->cur_pic->index,
6273 hevc->used_4k_num);
6274 hevc->cur_pic->scatter_alloc
6275 = 2;
6276 }
6277 hevc->used_4k_num = -1;
6278 }
6279 }
6280 if (!pic->ip_mode)
6281 pic->output_mark = 1;
6282 pic->recon_mark = 1;
6283 pic->dis_mark = 1;
6284 if (vdec->mvfrm) {
6285 pic->frame_size = vdec->mvfrm->frame_size;
6286 pic->hw_decode_time = (u32)vdec->mvfrm->hw_decode_time;
6287 }
6288 }
6289 do {
6290 pic_display = output_pic(hevc, 0);
6291
6292 if (pic_display) {
6293 if ((pic_display->error_mark &&
6294 ((hevc->ignore_bufmgr_error &
6295 0x2) == 0))
6296 || (get_dbg_flag(hevc) &
6297 H265_DEBUG_DISPLAY_CUR_FRAME)
6298 || (get_dbg_flag(hevc) &
6299 H265_DEBUG_NO_DISPLAY)) {
6300 pic_display->output_ready = 0;
6301 if (get_dbg_flag(hevc) &
6302 H265_DEBUG_BUFMGR) {
6303 hevc_print(hevc, 0,
6304 "[BM] Display: POC %d, ",
6305 pic_display->POC);
6306 hevc_print_cont(hevc, 0,
6307 "decoding index %d ==> ",
6308 pic_display->
6309 decode_idx);
6310 hevc_print_cont(hevc, 0,
6311 "Debug or err,recycle it\n");
6312 }
6313 /*
6314 * Here the pic/frame error_mark is 1,
6315 * and it won't be displayed, so increase
6316 * the drop count
6317 */
6318 hevc->gvs->drop_frame_count++;
6319 /* error frame count also need increase */
6320 hevc->gvs->error_frame_count++;
6321 } else {
6322 if (hevc->i_only & 0x1
6323 && pic_display->
6324 slice_type != 2) {
6325 pic_display->output_ready = 0;
6326 } else {
6327 prepare_display_buf
6328 (hevc,
6329 pic_display);
6330 if (get_dbg_flag(hevc) &
6331 H265_DEBUG_BUFMGR) {
6332 hevc_print(hevc, 0,
6333 "[BM] Display: POC %d, ",
6334 pic_display->POC);
6335 hevc_print_cont(hevc, 0,
6336 "decoding index %d\n",
6337 pic_display->
6338 decode_idx);
6339 }
6340 }
6341 }
6342 }
6343 } while (pic_display);
6344 } else {
6345 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6346 hevc_print(hevc, 0,
6347 "[BM] current pic is IDR, ");
6348 hevc_print(hevc, 0,
6349 "clear referenced flag of all buffers\n");
6350 }
6351 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6352 dump_pic_list(hevc);
6353 if (hevc->vf_pre_count == 1 &&
6354 hevc->first_pic_flag == 1) {
6355 hevc->first_pic_flag = 2;
6356 pic = NULL;
6357 }
6358 else
6359 pic = get_pic_by_POC(hevc, decoded_poc);
6360
6361 flush_output(hevc, pic);
6362 }
6363
6364}
6365
6366static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6367 int decoded_lcu)
6368{
6369 int current_lcu_idx = decoded_lcu;
6370 if (decoded_lcu < 0)
6371 return;
6372
6373 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6374 hevc_print(hevc, 0,
6375 "cur lcu idx = %d, (total %d)\n",
6376 current_lcu_idx, hevc->lcu_total);
6377 }
6378 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6379 if (hevc->first_pic_after_recover) {
6380 if (current_lcu_idx !=
6381 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6382 hevc->cur_pic->error_mark = 1;
6383 } else {
6384 if (hevc->lcu_x_num_pre != 0
6385 && hevc->lcu_y_num_pre != 0
6386 && current_lcu_idx != 0
6387 && current_lcu_idx <
6388 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6389 hevc->cur_pic->error_mark = 1;
6390 }
6391 if (hevc->cur_pic->error_mark) {
6392 if (print_lcu_error)
6393 hevc_print(hevc, 0,
6394 "cur lcu idx = %d, (total %d), set error_mark\n",
6395 current_lcu_idx,
6396 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6397 if (is_log_enable(hevc))
6398 add_log(hevc,
6399 "cur lcu idx = %d, (total %d), set error_mark",
6400 current_lcu_idx,
6401 hevc->lcu_x_num_pre *
6402 hevc->lcu_y_num_pre);
6403
6404 }
6405
6406 }
6407 if (hevc->cur_pic && hevc->head_error_flag) {
6408 hevc->cur_pic->error_mark = 1;
6409 hevc_print(hevc, 0,
6410 "head has error, set error_mark\n");
6411 }
6412
6413 if ((error_handle_policy & 0x80) == 0) {
6414 if (hevc->over_decode && hevc->cur_pic) {
6415 hevc_print(hevc, 0,
6416 "over decode, set error_mark\n");
6417 hevc->cur_pic->error_mark = 1;
6418 }
6419 }
6420
6421 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6422 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6423}
6424
6425static void check_pic_decoded_error(struct hevc_state_s *hevc,
6426 int decoded_lcu)
6427{
6428 int current_lcu_idx = decoded_lcu;
6429 if (decoded_lcu < 0)
6430 return;
6431
6432 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6433 hevc_print(hevc, 0,
6434 "cur lcu idx = %d, (total %d)\n",
6435 current_lcu_idx, hevc->lcu_total);
6436 }
6437 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6438 if (hevc->lcu_x_num != 0
6439 && hevc->lcu_y_num != 0
6440 && current_lcu_idx != 0
6441 && current_lcu_idx <
6442 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6443 hevc->cur_pic->error_mark = 1;
6444
6445 if (hevc->cur_pic->error_mark) {
6446 if (print_lcu_error)
6447 hevc_print(hevc, 0,
6448 "cur lcu idx = %d, (total %d), set error_mark\n",
6449 current_lcu_idx,
6450 hevc->lcu_x_num*hevc->lcu_y_num);
6451 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6452 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6453 hevc_print(hevc, 0,
6454 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6455 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6456 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6457 }
6458 if (is_log_enable(hevc))
6459 add_log(hevc,
6460 "cur lcu idx = %d, (total %d), set error_mark",
6461 current_lcu_idx,
6462 hevc->lcu_x_num *
6463 hevc->lcu_y_num);
6464
6465 }
6466
6467 }
6468 if (hevc->cur_pic && hevc->head_error_flag) {
6469 hevc->cur_pic->error_mark = 1;
6470 hevc_print(hevc, 0,
6471 "head has error, set error_mark\n");
6472 }
6473
6474 if ((error_handle_policy & 0x80) == 0) {
6475 if (hevc->over_decode && hevc->cur_pic) {
6476 hevc_print(hevc, 0,
6477 "over decode, set error_mark\n");
6478 hevc->cur_pic->error_mark = 1;
6479 }
6480 }
6481}
6482
6483/* only when we decoded one field or one frame,
6484we can call this function to get qos info*/
6485static void get_picture_qos_info(struct hevc_state_s *hevc)
6486{
6487 struct PIC_s *picture = hevc->cur_pic;
6488
6489/*
6490#define DEBUG_QOS
6491*/
6492
6493 if (!hevc->cur_pic)
6494 return;
6495
6496 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6497 unsigned char a[3];
6498 unsigned char i, j, t;
6499 unsigned long data;
6500
6501 data = READ_VREG(HEVC_MV_INFO);
6502 if (picture->slice_type == I_SLICE)
6503 data = 0;
6504 a[0] = data & 0xff;
6505 a[1] = (data >> 8) & 0xff;
6506 a[2] = (data >> 16) & 0xff;
6507
6508 for (i = 0; i < 3; i++)
6509 for (j = i+1; j < 3; j++) {
6510 if (a[j] < a[i]) {
6511 t = a[j];
6512 a[j] = a[i];
6513 a[i] = t;
6514 } else if (a[j] == a[i]) {
6515 a[i]++;
6516 t = a[j];
6517 a[j] = a[i];
6518 a[i] = t;
6519 }
6520 }
6521 picture->max_mv = a[2];
6522 picture->avg_mv = a[1];
6523 picture->min_mv = a[0];
6524#ifdef DEBUG_QOS
6525 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6526 data, a[0], a[1], a[2]);
6527#endif
6528
6529 data = READ_VREG(HEVC_QP_INFO);
6530 a[0] = data & 0x1f;
6531 a[1] = (data >> 8) & 0x3f;
6532 a[2] = (data >> 16) & 0x7f;
6533
6534 for (i = 0; i < 3; i++)
6535 for (j = i+1; j < 3; j++) {
6536 if (a[j] < a[i]) {
6537 t = a[j];
6538 a[j] = a[i];
6539 a[i] = t;
6540 } else if (a[j] == a[i]) {
6541 a[i]++;
6542 t = a[j];
6543 a[j] = a[i];
6544 a[i] = t;
6545 }
6546 }
6547 picture->max_qp = a[2];
6548 picture->avg_qp = a[1];
6549 picture->min_qp = a[0];
6550#ifdef DEBUG_QOS
6551 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6552 data, a[0], a[1], a[2]);
6553#endif
6554
6555 data = READ_VREG(HEVC_SKIP_INFO);
6556 a[0] = data & 0x1f;
6557 a[1] = (data >> 8) & 0x3f;
6558 a[2] = (data >> 16) & 0x7f;
6559
6560 for (i = 0; i < 3; i++)
6561 for (j = i+1; j < 3; j++) {
6562 if (a[j] < a[i]) {
6563 t = a[j];
6564 a[j] = a[i];
6565 a[i] = t;
6566 } else if (a[j] == a[i]) {
6567 a[i]++;
6568 t = a[j];
6569 a[j] = a[i];
6570 a[i] = t;
6571 }
6572 }
6573 picture->max_skip = a[2];
6574 picture->avg_skip = a[1];
6575 picture->min_skip = a[0];
6576
6577#ifdef DEBUG_QOS
6578 hevc_print(hevc, 0,
6579 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6580 data, a[0], a[1], a[2]);
6581#endif
6582 } else {
6583 uint32_t blk88_y_count;
6584 uint32_t blk88_c_count;
6585 uint32_t blk22_mv_count;
6586 uint32_t rdata32;
6587 int32_t mv_hi;
6588 int32_t mv_lo;
6589 uint32_t rdata32_l;
6590 uint32_t mvx_L0_hi;
6591 uint32_t mvy_L0_hi;
6592 uint32_t mvx_L1_hi;
6593 uint32_t mvy_L1_hi;
6594 int64_t value;
6595 uint64_t temp_value;
6596#ifdef DEBUG_QOS
6597 int pic_number = picture->POC;
6598#endif
6599
6600 picture->max_mv = 0;
6601 picture->avg_mv = 0;
6602 picture->min_mv = 0;
6603
6604 picture->max_skip = 0;
6605 picture->avg_skip = 0;
6606 picture->min_skip = 0;
6607
6608 picture->max_qp = 0;
6609 picture->avg_qp = 0;
6610 picture->min_qp = 0;
6611
6612
6613
6614#ifdef DEBUG_QOS
6615 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6616 picture->slice_type,
6617 picture->POC);
6618#endif
6619 /* set rd_idx to 0 */
6620 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6621
6622 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6623 if (blk88_y_count == 0) {
6624#ifdef DEBUG_QOS
6625 hevc_print(hevc, 0,
6626 "[Picture %d Quality] NO Data yet.\n",
6627 pic_number);
6628#endif
6629 /* reset all counts */
6630 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6631 return;
6632 }
6633 /* qp_y_sum */
6634 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6635#ifdef DEBUG_QOS
6636 hevc_print(hevc, 0,
6637 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6638 pic_number, rdata32/blk88_y_count,
6639 rdata32, blk88_y_count);
6640#endif
6641 picture->avg_qp = rdata32/blk88_y_count;
6642 /* intra_y_count */
6643 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6644#ifdef DEBUG_QOS
6645 hevc_print(hevc, 0,
6646 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6647 pic_number, rdata32*100/blk88_y_count,
6648 '%', rdata32);
6649#endif
6650 /* skipped_y_count */
6651 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6652#ifdef DEBUG_QOS
6653 hevc_print(hevc, 0,
6654 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6655 pic_number, rdata32*100/blk88_y_count,
6656 '%', rdata32);
6657#endif
6658 picture->avg_skip = rdata32*100/blk88_y_count;
6659 /* coeff_non_zero_y_count */
6660 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6661#ifdef DEBUG_QOS
6662 hevc_print(hevc, 0,
6663 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6664 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6665 '%', rdata32);
6666#endif
6667 /* blk66_c_count */
6668 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6669 if (blk88_c_count == 0) {
6670#ifdef DEBUG_QOS
6671 hevc_print(hevc, 0,
6672 "[Picture %d Quality] NO Data yet.\n",
6673 pic_number);
6674#endif
6675 /* reset all counts */
6676 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6677 return;
6678 }
6679 /* qp_c_sum */
6680 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6681#ifdef DEBUG_QOS
6682 hevc_print(hevc, 0,
6683 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6684 pic_number, rdata32/blk88_c_count,
6685 rdata32, blk88_c_count);
6686#endif
6687 /* intra_c_count */
6688 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6689#ifdef DEBUG_QOS
6690 hevc_print(hevc, 0,
6691 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6692 pic_number, rdata32*100/blk88_c_count,
6693 '%', rdata32);
6694#endif
6695 /* skipped_cu_c_count */
6696 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6697#ifdef DEBUG_QOS
6698 hevc_print(hevc, 0,
6699 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6700 pic_number, rdata32*100/blk88_c_count,
6701 '%', rdata32);
6702#endif
6703 /* coeff_non_zero_c_count */
6704 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6705#ifdef DEBUG_QOS
6706 hevc_print(hevc, 0,
6707 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6708 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6709 '%', rdata32);
6710#endif
6711
6712 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6713 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6714 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6715#ifdef DEBUG_QOS
6716 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6717 pic_number, (rdata32>>0)&0xff);
6718#endif
6719 picture->min_qp = (rdata32>>0)&0xff;
6720
6721#ifdef DEBUG_QOS
6722 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6723 pic_number, (rdata32>>8)&0xff);
6724#endif
6725 picture->max_qp = (rdata32>>8)&0xff;
6726
6727#ifdef DEBUG_QOS
6728 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6729 pic_number, (rdata32>>16)&0xff);
6730 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6731 pic_number, (rdata32>>24)&0xff);
6732#endif
6733
6734 /* blk22_mv_count */
6735 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6736 if (blk22_mv_count == 0) {
6737#ifdef DEBUG_QOS
6738 hevc_print(hevc, 0,
6739 "[Picture %d Quality] NO MV Data yet.\n",
6740 pic_number);
6741#endif
6742 /* reset all counts */
6743 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6744 return;
6745 }
6746 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6747 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6748 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6749 /* should all be 0x00 or 0xff */
6750#ifdef DEBUG_QOS
6751 hevc_print(hevc, 0,
6752 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6753 pic_number, rdata32);
6754#endif
6755 mvx_L0_hi = ((rdata32>>0)&0xff);
6756 mvy_L0_hi = ((rdata32>>8)&0xff);
6757 mvx_L1_hi = ((rdata32>>16)&0xff);
6758 mvy_L1_hi = ((rdata32>>24)&0xff);
6759
6760 /* mvx_L0_count[31:0] */
6761 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6762 temp_value = mvx_L0_hi;
6763 temp_value = (temp_value << 32) | rdata32_l;
6764
6765 if (mvx_L0_hi & 0x80)
6766 value = 0xFFFFFFF000000000 | temp_value;
6767 else
6768 value = temp_value;
6769 value = div_s64(value, blk22_mv_count);
6770#ifdef DEBUG_QOS
6771 hevc_print(hevc, 0,
6772 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6773 pic_number, (int)value,
6774 value, blk22_mv_count);
6775#endif
6776 picture->avg_mv = value;
6777
6778 /* mvy_L0_count[31:0] */
6779 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6780 temp_value = mvy_L0_hi;
6781 temp_value = (temp_value << 32) | rdata32_l;
6782
6783 if (mvy_L0_hi & 0x80)
6784 value = 0xFFFFFFF000000000 | temp_value;
6785 else
6786 value = temp_value;
6787#ifdef DEBUG_QOS
6788 hevc_print(hevc, 0,
6789 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6790 pic_number, rdata32_l/blk22_mv_count,
6791 value, blk22_mv_count);
6792#endif
6793
6794 /* mvx_L1_count[31:0] */
6795 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6796 temp_value = mvx_L1_hi;
6797 temp_value = (temp_value << 32) | rdata32_l;
6798 if (mvx_L1_hi & 0x80)
6799 value = 0xFFFFFFF000000000 | temp_value;
6800 else
6801 value = temp_value;
6802#ifdef DEBUG_QOS
6803 hevc_print(hevc, 0,
6804 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6805 pic_number, rdata32_l/blk22_mv_count,
6806 value, blk22_mv_count);
6807#endif
6808
6809 /* mvy_L1_count[31:0] */
6810 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6811 temp_value = mvy_L1_hi;
6812 temp_value = (temp_value << 32) | rdata32_l;
6813 if (mvy_L1_hi & 0x80)
6814 value = 0xFFFFFFF000000000 | temp_value;
6815 else
6816 value = temp_value;
6817#ifdef DEBUG_QOS
6818 hevc_print(hevc, 0,
6819 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6820 pic_number, rdata32_l/blk22_mv_count,
6821 value, blk22_mv_count);
6822#endif
6823
6824 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6825 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6826 mv_hi = (rdata32>>16)&0xffff;
6827 if (mv_hi & 0x8000)
6828 mv_hi = 0x8000 - mv_hi;
6829#ifdef DEBUG_QOS
6830 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6831 pic_number, mv_hi);
6832#endif
6833 picture->max_mv = mv_hi;
6834
6835 mv_lo = (rdata32>>0)&0xffff;
6836 if (mv_lo & 0x8000)
6837 mv_lo = 0x8000 - mv_lo;
6838#ifdef DEBUG_QOS
6839 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6840 pic_number, mv_lo);
6841#endif
6842 picture->min_mv = mv_lo;
6843
6844#ifdef DEBUG_QOS
6845 /* {mvy_L0_max, mvy_L0_min} */
6846 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6847 mv_hi = (rdata32>>16)&0xffff;
6848 if (mv_hi & 0x8000)
6849 mv_hi = 0x8000 - mv_hi;
6850 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6851 pic_number, mv_hi);
6852
6853
6854 mv_lo = (rdata32>>0)&0xffff;
6855 if (mv_lo & 0x8000)
6856 mv_lo = 0x8000 - mv_lo;
6857
6858 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6859 pic_number, mv_lo);
6860
6861
6862 /* {mvx_L1_max, mvx_L1_min} */
6863 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6864 mv_hi = (rdata32>>16)&0xffff;
6865 if (mv_hi & 0x8000)
6866 mv_hi = 0x8000 - mv_hi;
6867
6868 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6869 pic_number, mv_hi);
6870
6871
6872 mv_lo = (rdata32>>0)&0xffff;
6873 if (mv_lo & 0x8000)
6874 mv_lo = 0x8000 - mv_lo;
6875
6876 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6877 pic_number, mv_lo);
6878
6879
6880 /* {mvy_L1_max, mvy_L1_min} */
6881 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6882 mv_hi = (rdata32>>16)&0xffff;
6883 if (mv_hi & 0x8000)
6884 mv_hi = 0x8000 - mv_hi;
6885
6886 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6887 pic_number, mv_hi);
6888
6889 mv_lo = (rdata32>>0)&0xffff;
6890 if (mv_lo & 0x8000)
6891 mv_lo = 0x8000 - mv_lo;
6892
6893 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6894 pic_number, mv_lo);
6895#endif
6896
6897 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6898#ifdef DEBUG_QOS
6899 hevc_print(hevc, 0,
6900 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6901 pic_number, rdata32);
6902#endif
6903 /* reset all counts */
6904 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6905 }
6906}
6907
6908static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6909 union param_u *rpm_param,
6910 int decode_pic_begin)
6911{
6912 struct vdec_s *vdec = hw_to_vdec(hevc);
6913 int i;
6914 int lcu_x_num_div;
6915 int lcu_y_num_div;
6916 int Col_ref;
6917 int dbg_skip_flag = 0;
6918 struct aml_vcodec_ctx *ctx =
6919 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
6920
6921 if (hevc->is_used_v4l && ctx->param_sets_from_ucode)
6922 hevc->res_ch_flag = 0;
6923
6924 if (hevc->wait_buf == 0) {
6925 hevc->sps_num_reorder_pics_0 =
6926 rpm_param->p.sps_num_reorder_pics_0;
6927 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
6928 !(vdec->slave || vdec->master) &&
6929 !disable_ip_mode) ? true : false;
6930 hevc->m_temporalId = rpm_param->p.m_temporalId;
6931 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6932 hevc->interlace_flag =
6933 (rpm_param->p.profile_etc >> 2) & 0x1;
6934 hevc->curr_pic_struct =
6935 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6936 if (parser_sei_enable & 0x4) {
6937 hevc->frame_field_info_present_flag =
6938 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6939 }
6940
6941 /* if (interlace_enable == 0 || hevc->m_ins_flag) */
6942 if (interlace_enable == 0)
6943 hevc->interlace_flag = 0;
6944 if (interlace_enable & 0x100)
6945 hevc->interlace_flag = interlace_enable & 0x1;
6946 if (hevc->interlace_flag == 0)
6947 hevc->curr_pic_struct = 0;
6948 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6949 /*
6950 *hevc->m_pocRandomAccess = MAX_INT;
6951 * //add to fix RAP_B_Bossen_1
6952 */
6953 /* } */
6954 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6955 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6956 hevc->slice_segment_addr =
6957 rpm_param->p.slice_segment_address;
6958 if (!rpm_param->p.dependent_slice_segment_flag)
6959 hevc->slice_addr = hevc->slice_segment_addr;
6960 } else {
6961 hevc->slice_segment_addr = 0;
6962 hevc->slice_addr = 0;
6963 }
6964
6965 hevc->iPrevPOC = hevc->curr_POC;
6966 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6967 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6968 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6969 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6970 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6971 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6972 hevc->isNextSliceSegment =
6973 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6974 if (is_oversize_ex(rpm_param->p.pic_width_in_luma_samples,
6975 rpm_param->p.pic_height_in_luma_samples)) {
6976 hevc_print(hevc, 0, "over size : %u x %u.\n",
6977 rpm_param->p.pic_width_in_luma_samples, rpm_param->p.pic_height_in_luma_samples);
6978 if ((!hevc->m_ins_flag) &&
6979 ((debug &
6980 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6981 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6982 H265_DEBUG_DIS_SYS_ERROR_PROC);
6983 return 3;
6984 }
6985
6986 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6987 || hevc->pic_h !=
6988 rpm_param->p.pic_height_in_luma_samples) {
6989 hevc_print(hevc, 0,
6990 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6991 hevc->pic_w, hevc->pic_h,
6992 rpm_param->p.pic_width_in_luma_samples,
6993 rpm_param->p.pic_height_in_luma_samples,
6994 hevc->interlace_flag);
6995 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6996 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6997 hevc->frame_width = hevc->pic_w;
6998 hevc->frame_height = hevc->pic_h;
6999#ifdef LOSLESS_COMPRESS_MODE
7000 if (/*re_config_pic_flag == 0 &&*/
7001 (get_double_write_mode(hevc) & 0x10) == 0)
7002 init_decode_head_hw(hevc);
7003#endif
7004 }
7005
7006 if (hevc->bit_depth_chroma > 10 ||
7007 hevc->bit_depth_luma > 10) {
7008 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
7009 hevc->bit_depth_chroma,
7010 hevc->bit_depth_luma);
7011 if (!hevc->m_ins_flag)
7012 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
7013 H265_DEBUG_DIS_SYS_ERROR_PROC);
7014 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
7015 return 4;
7016 }
7017
7018 /* it will cause divide 0 error */
7019 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
7020 if (get_dbg_flag(hevc)) {
7021 hevc_print(hevc, 0,
7022 "Fatal Error, pic_w = %d, pic_h = %d\n",
7023 hevc->pic_w, hevc->pic_h);
7024 }
7025 return 3;
7026 }
7027 pic_list_process(hevc);
7028
7029 hevc->lcu_size =
7030 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
7031 3 + rpm_param->
7032 p.log2_diff_max_min_coding_block_size);
7033 if (hevc->lcu_size == 0) {
7034 hevc_print(hevc, 0,
7035 "Error, lcu_size = 0 (%d,%d)\n",
7036 rpm_param->p.
7037 log2_min_coding_block_size_minus3,
7038 rpm_param->p.
7039 log2_diff_max_min_coding_block_size);
7040 return 3;
7041 }
7042 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
7043 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
7044 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
7045 hevc->lcu_x_num =
7046 ((hevc->pic_w % hevc->lcu_size) ==
7047 0) ? lcu_x_num_div : lcu_x_num_div + 1;
7048 hevc->lcu_y_num =
7049 ((hevc->pic_h % hevc->lcu_size) ==
7050 0) ? lcu_y_num_div : lcu_y_num_div + 1;
7051 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
7052
7053 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
7054 || hevc->m_nalUnitType ==
7055 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
7056 hevc->curr_POC = 0;
7057 if ((hevc->m_temporalId - 1) == 0)
7058 hevc->iPrevTid0POC = hevc->curr_POC;
7059 } else {
7060 int iMaxPOClsb =
7061 1 << (rpm_param->p.
7062 log2_max_pic_order_cnt_lsb_minus4 + 4);
7063 int iPrevPOClsb;
7064 int iPrevPOCmsb;
7065 int iPOCmsb;
7066 int iPOClsb = rpm_param->p.POClsb;
7067
7068 if (iMaxPOClsb == 0) {
7069 hevc_print(hevc, 0,
7070 "error iMaxPOClsb is 0\n");
7071 return 3;
7072 }
7073
7074 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
7075 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
7076
7077 if ((iPOClsb < iPrevPOClsb)
7078 && ((iPrevPOClsb - iPOClsb) >=
7079 (iMaxPOClsb / 2)))
7080 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
7081 else if ((iPOClsb > iPrevPOClsb)
7082 && ((iPOClsb - iPrevPOClsb) >
7083 (iMaxPOClsb / 2)))
7084 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
7085 else
7086 iPOCmsb = iPrevPOCmsb;
7087 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7088 hevc_print(hevc, 0,
7089 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
7090 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
7091 iPOClsb);
7092 }
7093 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7094 || hevc->m_nalUnitType ==
7095 NAL_UNIT_CODED_SLICE_BLANT
7096 || hevc->m_nalUnitType ==
7097 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
7098 /* For BLA picture types, POCmsb is set to 0. */
7099 iPOCmsb = 0;
7100 }
7101 hevc->curr_POC = (iPOCmsb + iPOClsb);
7102 if ((hevc->m_temporalId - 1) == 0)
7103 hevc->iPrevTid0POC = hevc->curr_POC;
7104 else {
7105 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7106 hevc_print(hevc, 0,
7107 "m_temporalID is %d\n",
7108 hevc->m_temporalId);
7109 }
7110 }
7111 }
7112 hevc->RefNum_L0 =
7113 (rpm_param->p.num_ref_idx_l0_active >
7114 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7115 num_ref_idx_l0_active;
7116 hevc->RefNum_L1 =
7117 (rpm_param->p.num_ref_idx_l1_active >
7118 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7119 num_ref_idx_l1_active;
7120
7121 /* if(curr_POC==0x10) dump_lmem(); */
7122
7123 /* skip RASL pictures after CRA/BLA pictures */
7124 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
7125 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
7126 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7127 || hevc->m_nalUnitType ==
7128 NAL_UNIT_CODED_SLICE_BLANT
7129 || hevc->m_nalUnitType ==
7130 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7131 hevc->m_pocRandomAccess = hevc->curr_POC;
7132 else
7133 hevc->m_pocRandomAccess = -MAX_INT;
7134 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7135 || hevc->m_nalUnitType ==
7136 NAL_UNIT_CODED_SLICE_BLANT
7137 || hevc->m_nalUnitType ==
7138 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7139 hevc->m_pocRandomAccess = hevc->curr_POC;
7140 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
7141 (nal_skip_policy >= 3) &&
7142 (hevc->m_nalUnitType ==
7143 NAL_UNIT_CODED_SLICE_RASL_N ||
7144 hevc->m_nalUnitType ==
7145 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
7146 if (get_dbg_flag(hevc)) {
7147 hevc_print(hevc, 0,
7148 "RASL picture with POC %d < %d ",
7149 hevc->curr_POC, hevc->m_pocRandomAccess);
7150 hevc_print(hevc, 0,
7151 "RandomAccess point POC), skip it\n");
7152 }
7153 return 1;
7154 }
7155
7156 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
7157 hevc->skip_flag = 0;
7158 /**/
7159 /* if((iPrevPOC != curr_POC)){ */
7160 if (rpm_param->p.slice_segment_address == 0) {
7161 struct PIC_s *pic;
7162
7163 hevc->new_pic = 1;
7164#ifdef MULTI_INSTANCE_SUPPORT
7165 if (!hevc->m_ins_flag)
7166#endif
7167 check_pic_decoded_error_pre(hevc,
7168 READ_VREG(HEVC_PARSER_LCU_START)
7169 & 0xffffff);
7170 /**/ if (use_cma == 0) {
7171 if (hevc->pic_list_init_flag == 0) {
7172 init_pic_list(hevc);
7173 init_pic_list_hw(hevc);
7174 init_buf_spec(hevc);
7175 hevc->pic_list_init_flag = 3;
7176 }
7177 }
7178 if (!hevc->m_ins_flag) {
7179 if (hevc->cur_pic)
7180 get_picture_qos_info(hevc);
7181 }
7182 hevc->first_pic_after_recover = 0;
7183 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7184 dump_pic_list(hevc);
7185 /* prev pic */
7186 hevc_pre_pic(hevc, pic);
7187 /*
7188 *update referenced of old pictures
7189 *(cur_pic->referenced is 1 and not updated)
7190 */
7191 apply_ref_pic_set(hevc, hevc->curr_POC,
7192 rpm_param);
7193
7194 /*if (hevc->mmu_enable)
7195 recycle_mmu_bufs(hevc);*/
7196
7197#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7198 if (vdec->master) {
7199 struct hevc_state_s *hevc_ba =
7200 (struct hevc_state_s *)
7201 vdec->master->private;
7202 if (hevc_ba->cur_pic != NULL) {
7203 hevc_ba->cur_pic->dv_enhance_exist = 1;
7204 hevc_print(hevc, H265_DEBUG_DV,
7205 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7206 hevc->curr_POC, hevc_ba->cur_pic->POC);
7207 }
7208 }
7209 if (vdec->master == NULL &&
7210 vdec->slave == NULL)
7211 set_aux_data(hevc,
7212 hevc->cur_pic, 1, 0); /*suffix*/
7213 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7214 set_aux_data(hevc,
7215 hevc->cur_pic, 0, 1); /*dv meta only*/
7216#else
7217 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7218#endif
7219 /* new pic */
7220 hevc->cur_pic = hevc->is_used_v4l ?
7221 v4l_get_new_pic(hevc, rpm_param) :
7222 get_new_pic(hevc, rpm_param);
7223 if (hevc->cur_pic == NULL) {
7224 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7225 dump_pic_list(hevc);
7226 hevc->wait_buf = 1;
7227 return -1;
7228 }
7229#ifdef MULTI_INSTANCE_SUPPORT
7230 hevc->decoding_pic = hevc->cur_pic;
7231 if (!hevc->m_ins_flag)
7232 hevc->over_decode = 0;
7233#endif
7234#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7235 hevc->cur_pic->dv_enhance_exist = 0;
7236 if (vdec->slave)
7237 hevc_print(hevc, H265_DEBUG_DV,
7238 "Clear bl (poc %d) dv_enhance_exist flag\n",
7239 hevc->curr_POC);
7240 if (vdec->master == NULL &&
7241 vdec->slave == NULL)
7242 set_aux_data(hevc,
7243 hevc->cur_pic, 0, 0); /*prefix*/
7244
7245 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7246 set_aux_data(hevc,
7247 hevc->cur_pic, 0, 2); /*pre sei only*/
7248#else
7249 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7250#endif
7251 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7252 hevc->cur_pic->output_ready = 1;
7253 hevc->cur_pic->stream_offset =
7254 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7255 prepare_display_buf(hevc, hevc->cur_pic);
7256 hevc->wait_buf = 2;
7257 return -1;
7258 }
7259 } else {
7260 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7261#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7262 if (vdec->master == NULL &&
7263 vdec->slave == NULL) {
7264 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7265 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7266 }
7267#else
7268 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7269 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7270#endif
7271 }
7272 if (hevc->pic_list_init_flag != 3
7273 || hevc->cur_pic == NULL) {
7274 /* make it dec from the first slice segment */
7275 return 3;
7276 }
7277 hevc->cur_pic->slice_idx++;
7278 hevc->new_pic = 0;
7279 }
7280 } else {
7281 if (hevc->wait_buf == 1) {
7282 pic_list_process(hevc);
7283 hevc->cur_pic = hevc->is_used_v4l ?
7284 v4l_get_new_pic(hevc, rpm_param) :
7285 get_new_pic(hevc, rpm_param);
7286 if (hevc->cur_pic == NULL)
7287 return -1;
7288
7289 if (!hevc->m_ins_flag)
7290 hevc->over_decode = 0;
7291
7292#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7293 hevc->cur_pic->dv_enhance_exist = 0;
7294 if (vdec->master == NULL &&
7295 vdec->slave == NULL)
7296 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7297#else
7298 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7299#endif
7300 hevc->wait_buf = 0;
7301 } else if (hevc->wait_buf ==
7302 2) {
7303 if (get_display_pic_num(hevc) >
7304 1)
7305 return -1;
7306 hevc->wait_buf = 0;
7307 }
7308 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7309 dump_pic_list(hevc);
7310 }
7311
7312 if (hevc->new_pic) {
7313#if 1
7314 /*SUPPORT_10BIT*/
7315 int sao_mem_unit =
7316 (hevc->lcu_size == 16 ? 9 :
7317 hevc->lcu_size ==
7318 32 ? 14 : 24) << 4;
7319#else
7320 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7321#endif
7322 int pic_height_cu =
7323 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7324 int pic_width_cu =
7325 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7326 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7327
7328 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7329 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7330 hevc_print(hevc, 0,
7331 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7332 __func__,
7333 hevc->decode_idx,
7334 hevc->curr_pic_struct,
7335 hevc->interlace_flag,
7336 hevc->cur_pic->index);
7337 }
7338 if (dbg_skip_decode_index != 0 &&
7339 hevc->decode_idx == dbg_skip_decode_index)
7340 dbg_skip_flag = 1;
7341
7342 hevc->decode_idx++;
7343 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7344 sao_mem_unit, rpm_param);
7345
7346 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7347 }
7348
7349 if (hevc->iPrevPOC != hevc->curr_POC) {
7350 hevc->new_tile = 1;
7351 hevc->tile_x = 0;
7352 hevc->tile_y = 0;
7353 hevc->tile_y_x = 0;
7354 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7355 hevc_print(hevc, 0,
7356 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7357 hevc->tile_x, hevc->tile_y);
7358 }
7359 } else if (hevc->tile_enabled) {
7360 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7361 hevc_print(hevc, 0,
7362 "slice_segment_address is %d\n",
7363 rpm_param->p.slice_segment_address);
7364 }
7365 hevc->tile_y_x =
7366 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7367 (hevc->pic_w +
7368 hevc->lcu_size -
7369 1) / hevc->lcu_size);
7370 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7371 && (hevc->tile_y_x != -1)) {
7372 hevc->new_tile = 1;
7373 hevc->tile_x = hevc->tile_y_x & 0xff;
7374 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7375 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7376 hevc_print(hevc, 0,
7377 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7378 rpm_param->p.slice_segment_address,
7379 hevc->tile_x, hevc->tile_y);
7380 }
7381 } else
7382 hevc->new_tile = 0;
7383 } else
7384 hevc->new_tile = 0;
7385
7386 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7387 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7388 hevc->new_tile = 0;
7389
7390 if (hevc->new_tile) {
7391 hevc->tile_start_lcu_x =
7392 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7393 hevc->tile_start_lcu_y =
7394 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7395 hevc->tile_width_lcu =
7396 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7397 hevc->tile_height_lcu =
7398 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7399 }
7400
7401 set_ref_pic_list(hevc, rpm_param);
7402
7403 Col_ref = rpm_param->p.collocated_ref_idx;
7404
7405 hevc->LDCFlag = 0;
7406 if (rpm_param->p.slice_type != I_SLICE) {
7407 hevc->LDCFlag = 1;
7408 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7409 if (hevc->cur_pic->
7410 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7411 hevc->curr_POC)
7412 hevc->LDCFlag = 0;
7413 }
7414 if (rpm_param->p.slice_type == B_SLICE) {
7415 for (i = 0; (i < hevc->RefNum_L1)
7416 && hevc->LDCFlag; i++) {
7417 if (hevc->cur_pic->
7418 m_aiRefPOCList1[hevc->cur_pic->
7419 slice_idx][i] >
7420 hevc->curr_POC)
7421 hevc->LDCFlag = 0;
7422 }
7423 }
7424 }
7425
7426 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7427
7428 hevc->plevel =
7429 rpm_param->p.log2_parallel_merge_level;
7430 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7431
7432 hevc->LongTerm_Curr = 0; /* to do ... */
7433 hevc->LongTerm_Col = 0; /* to do ... */
7434
7435 hevc->list_no = 0;
7436 if (rpm_param->p.slice_type == B_SLICE)
7437 hevc->list_no = 1 - hevc->ColFromL0Flag;
7438 if (hevc->list_no == 0) {
7439 if (Col_ref < hevc->RefNum_L0) {
7440 hevc->Col_POC =
7441 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7442 slice_idx][Col_ref];
7443 } else
7444 hevc->Col_POC = INVALID_POC;
7445 } else {
7446 if (Col_ref < hevc->RefNum_L1) {
7447 hevc->Col_POC =
7448 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7449 slice_idx][Col_ref];
7450 } else
7451 hevc->Col_POC = INVALID_POC;
7452 }
7453
7454 hevc->LongTerm_Ref = 0; /* to do ... */
7455
7456 if (hevc->slice_type != 2) {
7457 /* if(hevc->i_only==1){ */
7458 /* return 0xf; */
7459 /* } */
7460
7461 if (hevc->Col_POC != INVALID_POC) {
7462 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7463 if (hevc->col_pic == NULL) {
7464 hevc->cur_pic->error_mark = 1;
7465 if (get_dbg_flag(hevc)) {
7466 hevc_print(hevc, 0,
7467 "WRONG,fail to get the pic Col_POC\n");
7468 }
7469 if (is_log_enable(hevc))
7470 add_log(hevc,
7471 "WRONG,fail to get the pic Col_POC");
7472 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7473 hevc->col_pic->error_mark = 1;
7474 hevc->cur_pic->error_mark = 1;
7475 if (get_dbg_flag(hevc)) {
7476 hevc_print(hevc, 0,
7477 "WRONG, Col_POC error_mark is 1\n");
7478 }
7479 if (is_log_enable(hevc))
7480 add_log(hevc,
7481 "WRONG, Col_POC error_mark is 1");
7482 } else {
7483 if ((hevc->col_pic->width
7484 != hevc->pic_w) ||
7485 (hevc->col_pic->height
7486 != hevc->pic_h)) {
7487 hevc_print(hevc, 0,
7488 "Wrong reference pic (poc %d) width/height %d/%d\n",
7489 hevc->col_pic->POC,
7490 hevc->col_pic->width,
7491 hevc->col_pic->height);
7492 hevc->cur_pic->error_mark = 1;
7493 }
7494
7495 }
7496
7497 if (hevc->cur_pic->error_mark
7498 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7499 /*count info*/
7500 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7501 hevc->cur_pic->stream_offset);
7502 if (hevc->PB_skip_mode == 2)
7503 hevc->gvs->drop_frame_count++;
7504 }
7505
7506 if (is_skip_decoding(hevc,
7507 hevc->cur_pic)) {
7508 return 2;
7509 }
7510 } else
7511 hevc->col_pic = hevc->cur_pic;
7512 } /* */
7513 if (hevc->col_pic == NULL)
7514 hevc->col_pic = hevc->cur_pic;
7515#ifdef BUFFER_MGR_ONLY
7516 return 0xf;
7517#else
7518 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7519 || (dbg_skip_flag))
7520 return 0xf;
7521#endif
7522
7523 config_mc_buffer(hevc, hevc->cur_pic);
7524
7525 if (is_skip_decoding(hevc,
7526 hevc->cur_pic)) {
7527 if (get_dbg_flag(hevc))
7528 hevc_print(hevc, 0,
7529 "Discard this picture index %d\n",
7530 hevc->cur_pic->index);
7531 /*count info*/
7532 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7533 hevc->cur_pic->stream_offset);
7534 if (hevc->PB_skip_mode == 2)
7535 hevc->gvs->drop_frame_count++;
7536 return 2;
7537 }
7538#ifdef MCRCC_ENABLE
7539 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7540#endif
7541 config_mpred_hw(hevc);
7542
7543 config_sao_hw(hevc, rpm_param);
7544
7545 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7546 return 0xf;
7547
7548 return 0;
7549}
7550
7551
7552
7553static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7554 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7555 int cur_buf_idx = new_pic->index;
7556 int bit_depth_10 = (bit_depth != 0x00);
7557 int picture_size;
7558 int cur_mmu_4k_number;
7559 int ret, max_frame_num;
7560 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7561 new_pic->height, !bit_depth_10);
7562 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7563 if (get_double_write_mode(hevc) == 0x10)
7564 return 0;
7565 /*hevc_print(hevc, 0,
7566 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7567 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7568
7569 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7570 max_frame_num = MAX_FRAME_8K_NUM;
7571 else
7572 max_frame_num = MAX_FRAME_4K_NUM;
7573 if (cur_mmu_4k_number > max_frame_num) {
7574 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7575 cur_mmu_4k_number,
7576 new_pic->width,
7577 new_pic->height);
7578 return -1;
7579 }
7580 ret = decoder_mmu_box_alloc_idx(
7581 hevc->mmu_box,
7582 cur_buf_idx,
7583 cur_mmu_4k_number,
7584 mmu_index_adr);
7585
7586 new_pic->scatter_alloc = 1;
7587
7588 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7589 "%s pic index %d page count(%d) ret =%d\n",
7590 __func__, cur_buf_idx,
7591 cur_mmu_4k_number, ret);
7592 return ret;
7593}
7594
7595
7596static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7597 struct PIC_s *pic)
7598{
7599 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7600 "%s pic index %d scatter_alloc %d\n",
7601 __func__, pic->index,
7602 pic->scatter_alloc);
7603
7604 if (hevc->mmu_enable
7605 && ((hevc->double_write_mode & 0x10) == 0)
7606 && pic->scatter_alloc)
7607 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7608 pic->scatter_alloc = 0;
7609}
7610
7611/*
7612 *************************************************
7613 *
7614 *h265 buffer management end
7615 *
7616 **************************************************
7617 */
7618static struct hevc_state_s *gHevc;
7619
7620static void hevc_local_uninit(struct hevc_state_s *hevc)
7621{
7622 hevc->rpm_ptr = NULL;
7623 hevc->lmem_ptr = NULL;
7624
7625#ifdef SWAP_HEVC_UCODE
7626 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7627 if (hevc->mc_cpu_addr != NULL) {
7628 dma_free_coherent(amports_get_dma_device(),
7629 hevc->swap_size, hevc->mc_cpu_addr,
7630 hevc->mc_dma_handle);
7631 hevc->mc_cpu_addr = NULL;
7632 }
7633
7634 }
7635#endif
7636#ifdef DETREFILL_ENABLE
7637 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7638 uninit_detrefill_buf(hevc);
7639#endif
7640 if (hevc->aux_addr) {
7641 dma_free_coherent(amports_get_dma_device(),
7642 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7643 hevc->aux_phy_addr);
7644 hevc->aux_addr = NULL;
7645 }
7646 if (hevc->rpm_addr) {
7647 dma_free_coherent(amports_get_dma_device(),
7648 RPM_BUF_SIZE, hevc->rpm_addr,
7649 hevc->rpm_phy_addr);
7650 hevc->rpm_addr = NULL;
7651 }
7652 if (hevc->lmem_addr) {
7653 dma_free_coherent(amports_get_dma_device(),
7654 RPM_BUF_SIZE, hevc->lmem_addr,
7655 hevc->lmem_phy_addr);
7656 hevc->lmem_addr = NULL;
7657 }
7658
7659 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7660 if (hevc->frame_mmu_map_phy_addr)
7661 dma_free_coherent(amports_get_dma_device(),
7662 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7663 hevc->frame_mmu_map_phy_addr);
7664
7665 hevc->frame_mmu_map_addr = NULL;
7666 }
7667
7668 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
7669}
7670
7671static int hevc_local_init(struct hevc_state_s *hevc)
7672{
7673 int ret = -1;
7674 struct BuffInfo_s *cur_buf_info = NULL;
7675
7676 memset(&hevc->param, 0, sizeof(union param_u));
7677
7678 cur_buf_info = &hevc->work_space_buf_store;
7679
7680 if (vdec_is_support_4k()) {
7681 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7682 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7683 sizeof(struct BuffInfo_s));
7684 else
7685 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7686 sizeof(struct BuffInfo_s));
7687 } else
7688 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7689 sizeof(struct BuffInfo_s));
7690
7691 cur_buf_info->start_adr = hevc->buf_start;
7692 init_buff_spec(hevc, cur_buf_info);
7693
7694 hevc_init_stru(hevc, cur_buf_info);
7695
7696 hevc->bit_depth_luma = 8;
7697 hevc->bit_depth_chroma = 8;
7698 hevc->video_signal_type = 0;
7699 hevc->video_signal_type_debug = 0;
7700 bit_depth_luma = hevc->bit_depth_luma;
7701 bit_depth_chroma = hevc->bit_depth_chroma;
7702 video_signal_type = hevc->video_signal_type;
7703
7704 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7705 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7706 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7707 if (hevc->rpm_addr == NULL) {
7708 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7709 return -1;
7710 }
7711 hevc->rpm_ptr = hevc->rpm_addr;
7712 }
7713
7714 if (prefix_aux_buf_size > 0 ||
7715 suffix_aux_buf_size > 0) {
7716 u32 aux_buf_size;
7717
7718 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7719 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7720 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7721 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7722 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7723 if (hevc->aux_addr == NULL) {
7724 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7725 return -1;
7726 }
7727 }
7728
7729 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7730 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7731 if (hevc->lmem_addr == NULL) {
7732 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7733 return -1;
7734 }
7735 hevc->lmem_ptr = hevc->lmem_addr;
7736
7737 if (hevc->mmu_enable) {
7738 hevc->frame_mmu_map_addr =
7739 dma_alloc_coherent(amports_get_dma_device(),
7740 get_frame_mmu_map_size(),
7741 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7742 if (hevc->frame_mmu_map_addr == NULL) {
7743 pr_err("%s: failed to alloc count_buffer\n", __func__);
7744 return -1;
7745 }
7746 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7747 }
7748 ret = 0;
7749 return ret;
7750}
7751
7752/*
7753 *******************************************
7754 * Mailbox command
7755 *******************************************
7756 */
7757#define CMD_FINISHED 0
7758#define CMD_ALLOC_VIEW 1
7759#define CMD_FRAME_DISPLAY 3
7760#define CMD_DEBUG 10
7761
7762
7763#define DECODE_BUFFER_NUM_MAX 32
7764#define DISPLAY_BUFFER_NUM 6
7765
7766#define video_domain_addr(adr) (adr&0x7fffffff)
7767#define DECODER_WORK_SPACE_SIZE 0x800000
7768
7769#define spec2canvas(x) \
7770 (((x)->uv_canvas_index << 16) | \
7771 ((x)->uv_canvas_index << 8) | \
7772 ((x)->y_canvas_index << 0))
7773
7774
7775static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7776{
7777 struct vdec_s *vdec = hw_to_vdec(hevc);
7778 int canvas_w = ALIGN(pic->width, 64)/4;
7779 int canvas_h = ALIGN(pic->height, 32)/4;
7780 int blkmode = hevc->mem_map_mode;
7781
7782 /*CANVAS_BLKMODE_64X32*/
7783#ifdef SUPPORT_10BIT
7784 if (pic->double_write_mode) {
7785 canvas_w = pic->width /
7786 get_double_write_ratio(hevc, pic->double_write_mode);
7787 canvas_h = pic->height /
7788 get_double_write_ratio(hevc, pic->double_write_mode);
7789
7790 if (hevc->mem_map_mode == 0)
7791 canvas_w = ALIGN(canvas_w, 32);
7792 else
7793 canvas_w = ALIGN(canvas_w, 64);
7794 canvas_h = ALIGN(canvas_h, 32);
7795
7796 if (vdec->parallel_dec == 1) {
7797 if (pic->y_canvas_index == -1)
7798 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7799 if (pic->uv_canvas_index == -1)
7800 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7801 } else {
7802 pic->y_canvas_index = 128 + pic->index * 2;
7803 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7804 }
7805
7806 canvas_config_ex(pic->y_canvas_index,
7807 pic->dw_y_adr, canvas_w, canvas_h,
7808 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7809 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7810 canvas_w, canvas_h,
7811 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7812#ifdef MULTI_INSTANCE_SUPPORT
7813 pic->canvas_config[0].phy_addr =
7814 pic->dw_y_adr;
7815 pic->canvas_config[0].width =
7816 canvas_w;
7817 pic->canvas_config[0].height =
7818 canvas_h;
7819 pic->canvas_config[0].block_mode =
7820 blkmode;
7821 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7822
7823 pic->canvas_config[1].phy_addr =
7824 pic->dw_u_v_adr;
7825 pic->canvas_config[1].width =
7826 canvas_w;
7827 pic->canvas_config[1].height =
7828 canvas_h;
7829 pic->canvas_config[1].block_mode =
7830 blkmode;
7831 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7832#endif
7833 } else {
7834 if (!hevc->mmu_enable) {
7835 /* to change after 10bit VPU is ready ... */
7836 if (vdec->parallel_dec == 1) {
7837 if (pic->y_canvas_index == -1)
7838 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7839 pic->uv_canvas_index = pic->y_canvas_index;
7840 } else {
7841 pic->y_canvas_index = 128 + pic->index;
7842 pic->uv_canvas_index = 128 + pic->index;
7843 }
7844
7845 canvas_config_ex(pic->y_canvas_index,
7846 pic->mc_y_adr, canvas_w, canvas_h,
7847 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7848 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7849 canvas_w, canvas_h,
7850 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7851 }
7852 }
7853#else
7854 if (vdec->parallel_dec == 1) {
7855 if (pic->y_canvas_index == -1)
7856 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7857 if (pic->uv_canvas_index == -1)
7858 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7859 } else {
7860 pic->y_canvas_index = 128 + pic->index * 2;
7861 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7862 }
7863
7864
7865 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7866 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7867 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7868 canvas_w, canvas_h,
7869 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7870#endif
7871}
7872
7873static int init_buf_spec(struct hevc_state_s *hevc)
7874{
7875 int pic_width = hevc->pic_w;
7876 int pic_height = hevc->pic_h;
7877
7878 /* hevc_print(hevc, 0,
7879 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7880 */
7881 hevc_print(hevc, 0,
7882 "%s2 %d %d\n", __func__, pic_width, pic_height);
7883 /* pic_width = hevc->pic_w; */
7884 /* pic_height = hevc->pic_h; */
7885
7886 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7887 hevc->frame_width = pic_width;
7888 hevc->frame_height = pic_height;
7889
7890 }
7891
7892 return 0;
7893}
7894
7895static int parse_sei(struct hevc_state_s *hevc,
7896 struct PIC_s *pic, char *sei_buf, uint32_t size)
7897{
7898 char *p = sei_buf;
7899 char *p_sei;
7900 uint16_t header;
7901 uint8_t nal_unit_type;
7902 uint8_t payload_type, payload_size;
7903 int i, j;
7904
7905 if (size < 2)
7906 return 0;
7907 header = *p++;
7908 header <<= 8;
7909 header += *p++;
7910 nal_unit_type = header >> 9;
7911 if ((nal_unit_type != NAL_UNIT_SEI)
7912 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7913 return 0;
7914 while (p+2 <= sei_buf+size) {
7915 payload_type = *p++;
7916 payload_size = *p++;
7917 if (p+payload_size <= sei_buf+size) {
7918 switch (payload_type) {
7919 case SEI_PicTiming:
7920 if ((parser_sei_enable & 0x4) &&
7921 hevc->frame_field_info_present_flag) {
7922 p_sei = p;
7923 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7924 pic->pic_struct = hevc->curr_pic_struct;
7925 if (get_dbg_flag(hevc) &
7926 H265_DEBUG_PIC_STRUCT) {
7927 hevc_print(hevc, 0,
7928 "parse result pic_struct = %d\n",
7929 hevc->curr_pic_struct);
7930 }
7931 }
7932 break;
7933 case SEI_UserDataITU_T_T35:
7934 p_sei = p;
7935 if (p_sei[0] == 0xB5
7936 && p_sei[1] == 0x00
7937 && p_sei[2] == 0x3C
7938 && p_sei[3] == 0x00
7939 && p_sei[4] == 0x01
7940 && p_sei[5] == 0x04)
7941 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7942
7943 break;
7944 case SEI_MasteringDisplayColorVolume:
7945 /*hevc_print(hevc, 0,
7946 "sei type: primary display color volume %d, size %d\n",
7947 payload_type,
7948 payload_size);*/
7949 /* master_display_colour */
7950 p_sei = p;
7951 for (i = 0; i < 3; i++) {
7952 for (j = 0; j < 2; j++) {
7953 hevc->primaries[i][j]
7954 = (*p_sei<<8)
7955 | *(p_sei+1);
7956 p_sei += 2;
7957 }
7958 }
7959 for (i = 0; i < 2; i++) {
7960 hevc->white_point[i]
7961 = (*p_sei<<8)
7962 | *(p_sei+1);
7963 p_sei += 2;
7964 }
7965 for (i = 0; i < 2; i++) {
7966 hevc->luminance[i]
7967 = (*p_sei<<24)
7968 | (*(p_sei+1)<<16)
7969 | (*(p_sei+2)<<8)
7970 | *(p_sei+3);
7971 p_sei += 4;
7972 }
7973 hevc->sei_present_flag |=
7974 SEI_MASTER_DISPLAY_COLOR_MASK;
7975 /*for (i = 0; i < 3; i++)
7976 for (j = 0; j < 2; j++)
7977 hevc_print(hevc, 0,
7978 "\tprimaries[%1d][%1d] = %04x\n",
7979 i, j,
7980 hevc->primaries[i][j]);
7981 hevc_print(hevc, 0,
7982 "\twhite_point = (%04x, %04x)\n",
7983 hevc->white_point[0],
7984 hevc->white_point[1]);
7985 hevc_print(hevc, 0,
7986 "\tmax,min luminance = %08x, %08x\n",
7987 hevc->luminance[0],
7988 hevc->luminance[1]);*/
7989 break;
7990 case SEI_ContentLightLevel:
7991 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7992 hevc_print(hevc, 0,
7993 "sei type: max content light level %d, size %d\n",
7994 payload_type, payload_size);
7995 /* content_light_level */
7996 p_sei = p;
7997 hevc->content_light_level[0]
7998 = (*p_sei<<8) | *(p_sei+1);
7999 p_sei += 2;
8000 hevc->content_light_level[1]
8001 = (*p_sei<<8) | *(p_sei+1);
8002 p_sei += 2;
8003 hevc->sei_present_flag |=
8004 SEI_CONTENT_LIGHT_LEVEL_MASK;
8005 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
8006 hevc_print(hevc, 0,
8007 "\tmax cll = %04x, max_pa_cll = %04x\n",
8008 hevc->content_light_level[0],
8009 hevc->content_light_level[1]);
8010 break;
8011 default:
8012 break;
8013 }
8014 }
8015 p += payload_size;
8016 }
8017 return 0;
8018}
8019
8020static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
8021 unsigned w, unsigned h)
8022{
8023 unsigned ar;
8024
8025 if (idc == 255) {
8026 ar = div_u64(256ULL * sar_h * h,
8027 sar_w * w);
8028 } else {
8029 switch (idc) {
8030 case 1:
8031 ar = 0x100 * h / w;
8032 break;
8033 case 2:
8034 ar = 0x100 * h * 11 / (w * 12);
8035 break;
8036 case 3:
8037 ar = 0x100 * h * 11 / (w * 10);
8038 break;
8039 case 4:
8040 ar = 0x100 * h * 11 / (w * 16);
8041 break;
8042 case 5:
8043 ar = 0x100 * h * 33 / (w * 40);
8044 break;
8045 case 6:
8046 ar = 0x100 * h * 11 / (w * 24);
8047 break;
8048 case 7:
8049 ar = 0x100 * h * 11 / (w * 20);
8050 break;
8051 case 8:
8052 ar = 0x100 * h * 11 / (w * 32);
8053 break;
8054 case 9:
8055 ar = 0x100 * h * 33 / (w * 80);
8056 break;
8057 case 10:
8058 ar = 0x100 * h * 11 / (w * 18);
8059 break;
8060 case 11:
8061 ar = 0x100 * h * 11 / (w * 15);
8062 break;
8063 case 12:
8064 ar = 0x100 * h * 33 / (w * 64);
8065 break;
8066 case 13:
8067 ar = 0x100 * h * 99 / (w * 160);
8068 break;
8069 case 14:
8070 ar = 0x100 * h * 3 / (w * 4);
8071 break;
8072 case 15:
8073 ar = 0x100 * h * 2 / (w * 3);
8074 break;
8075 case 16:
8076 ar = 0x100 * h * 1 / (w * 2);
8077 break;
8078 default:
8079 ar = h * 0x100 / w;
8080 break;
8081 }
8082 }
8083
8084 return ar;
8085}
8086
8087static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
8088 struct PIC_s *pic)
8089{
8090 unsigned int ar;
8091 int i, j;
8092 char *p;
8093 unsigned size = 0;
8094 unsigned type = 0;
8095 struct vframe_master_display_colour_s *vf_dp
8096 = &vf->prop.master_display_colour;
8097
8098 vf->width = pic->width /
8099 get_double_write_ratio(hevc, pic->double_write_mode);
8100 vf->height = pic->height /
8101 get_double_write_ratio(hevc, pic->double_write_mode);
8102
8103 vf->duration = hevc->frame_dur;
8104 vf->duration_pulldown = 0;
8105 vf->flag = 0;
8106
8107 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
8108 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8109
8110
8111 if (((pic->aspect_ratio_idc == 255) &&
8112 pic->sar_width &&
8113 pic->sar_height) ||
8114 ((pic->aspect_ratio_idc != 255) &&
8115 (pic->width))) {
8116 ar = min_t(u32,
8117 calc_ar(pic->aspect_ratio_idc,
8118 pic->sar_width,
8119 pic->sar_height,
8120 pic->width,
8121 pic->height),
8122 DISP_RATIO_ASPECT_RATIO_MAX);
8123 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8124 vf->ratio_control <<= hevc->interlace_flag;
8125 }
8126 hevc->ratio_control = vf->ratio_control;
8127 if (pic->aux_data_buf
8128 && pic->aux_data_size) {
8129 /* parser sei */
8130 p = pic->aux_data_buf;
8131 while (p < pic->aux_data_buf
8132 + pic->aux_data_size - 8) {
8133 size = *p++;
8134 size = (size << 8) | *p++;
8135 size = (size << 8) | *p++;
8136 size = (size << 8) | *p++;
8137 type = *p++;
8138 type = (type << 8) | *p++;
8139 type = (type << 8) | *p++;
8140 type = (type << 8) | *p++;
8141 if (type == 0x02000000) {
8142 /* hevc_print(hevc, 0,
8143 "sei(%d)\n", size); */
8144 parse_sei(hevc, pic, p, size);
8145 }
8146 p += size;
8147 }
8148 }
8149 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
8150 vf->signal_type = pic->video_signal_type;
8151 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
8152 u32 data;
8153 data = vf->signal_type;
8154 data = data & 0xFFFF00FF;
8155 data = data | (0x30<<8);
8156 vf->signal_type = data;
8157 }
8158 }
8159 else
8160 vf->signal_type = 0;
8161 hevc->video_signal_type_debug = vf->signal_type;
8162
8163 /* master_display_colour */
8164 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
8165 for (i = 0; i < 3; i++)
8166 for (j = 0; j < 2; j++)
8167 vf_dp->primaries[i][j] = hevc->primaries[i][j];
8168 for (i = 0; i < 2; i++) {
8169 vf_dp->white_point[i] = hevc->white_point[i];
8170 vf_dp->luminance[i]
8171 = hevc->luminance[i];
8172 }
8173 vf_dp->present_flag = 1;
8174 } else
8175 vf_dp->present_flag = 0;
8176
8177 /* content_light_level */
8178 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
8179 vf_dp->content_light_level.max_content
8180 = hevc->content_light_level[0];
8181 vf_dp->content_light_level.max_pic_average
8182 = hevc->content_light_level[1];
8183 vf_dp->content_light_level.present_flag = 1;
8184 } else
8185 vf_dp->content_light_level.present_flag = 0;
8186
8187 if (hevc->is_used_v4l &&
8188 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
8189 (vf_dp->present_flag) ||
8190 (vf_dp->content_light_level.present_flag))) {
8191 struct aml_vdec_hdr_infos hdr;
8192 struct aml_vcodec_ctx *ctx =
8193 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8194
8195 memset(&hdr, 0, sizeof(hdr));
8196 hdr.signal_type = vf->signal_type;
8197 hdr.color_parms = *vf_dp;
8198 vdec_v4l_set_hdr_infos(ctx, &hdr);
8199 }
8200
8201 vf->sidebind_type = hevc->sidebind_type;
8202 vf->sidebind_channel_id = hevc->sidebind_channel_id;
8203}
8204
8205static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8206{
8207 unsigned long flags;
8208#ifdef MULTI_INSTANCE_SUPPORT
8209 struct vdec_s *vdec = op_arg;
8210 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8211#else
8212 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8213#endif
8214
8215 spin_lock_irqsave(&lock, flags);
8216
8217 states->vf_pool_size = VF_POOL_SIZE;
8218 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8219 states->buf_avail_num = kfifo_len(&hevc->display_q);
8220
8221 if (step == 2)
8222 states->buf_avail_num = 0;
8223 spin_unlock_irqrestore(&lock, flags);
8224 return 0;
8225}
8226
8227static struct vframe_s *vh265_vf_peek(void *op_arg)
8228{
8229 struct vframe_s *vf[2] = {0, 0};
8230#ifdef MULTI_INSTANCE_SUPPORT
8231 struct vdec_s *vdec = op_arg;
8232 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8233#else
8234 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8235#endif
8236
8237 if (step == 2)
8238 return NULL;
8239
8240 if (force_disp_pic_index & 0x100) {
8241 if (force_disp_pic_index & 0x200)
8242 return NULL;
8243 return &hevc->vframe_dummy;
8244 }
8245
8246
8247 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8248 if (vf[1]) {
8249 vf[0]->next_vf_pts_valid = true;
8250 vf[0]->next_vf_pts = vf[1]->pts;
8251 } else
8252 vf[0]->next_vf_pts_valid = false;
8253 return vf[0];
8254 }
8255
8256 return NULL;
8257}
8258
8259static struct vframe_s *vh265_vf_get(void *op_arg)
8260{
8261 struct vframe_s *vf;
8262#ifdef MULTI_INSTANCE_SUPPORT
8263 struct vdec_s *vdec = op_arg;
8264 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8265#else
8266 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8267#endif
8268
8269 if (step == 2)
8270 return NULL;
8271 else if (step == 1)
8272 step = 2;
8273
8274#if 0
8275 if (force_disp_pic_index & 0x100) {
8276 int buffer_index = force_disp_pic_index & 0xff;
8277 struct PIC_s *pic = NULL;
8278 if (buffer_index >= 0
8279 && buffer_index < MAX_REF_PIC_NUM)
8280 pic = hevc->m_PIC[buffer_index];
8281 if (pic == NULL)
8282 return NULL;
8283 if (force_disp_pic_index & 0x200)
8284 return NULL;
8285
8286 vf = &hevc->vframe_dummy;
8287 if (get_double_write_mode(hevc)) {
8288 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8289 VIDTYPE_VIU_NV21;
8290 if (hevc->m_ins_flag) {
8291 vf->canvas0Addr = vf->canvas1Addr = -1;
8292 vf->plane_num = 2;
8293 vf->canvas0_config[0] =
8294 pic->canvas_config[0];
8295 vf->canvas0_config[1] =
8296 pic->canvas_config[1];
8297
8298 vf->canvas1_config[0] =
8299 pic->canvas_config[0];
8300 vf->canvas1_config[1] =
8301 pic->canvas_config[1];
8302 } else {
8303 vf->canvas0Addr = vf->canvas1Addr
8304 = spec2canvas(pic);
8305 }
8306 } else {
8307 vf->canvas0Addr = vf->canvas1Addr = 0;
8308 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8309 if (hevc->mmu_enable)
8310 vf->type |= VIDTYPE_SCATTER;
8311 }
8312 vf->compWidth = pic->width;
8313 vf->compHeight = pic->height;
8314 update_vf_memhandle(hevc, vf, pic);
8315 switch (hevc->bit_depth_luma) {
8316 case 9:
8317 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8318 break;
8319 case 10:
8320 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8321 | BITDEPTH_V10;
8322 break;
8323 default:
8324 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8325 break;
8326 }
8327 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8328 vf->bitdepth =
8329 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8330 if (hevc->mem_saving_mode == 1)
8331 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8332 vf->duration_pulldown = 0;
8333 vf->pts = 0;
8334 vf->pts_us64 = 0;
8335 set_frame_info(hevc, vf);
8336
8337 vf->width = pic->width /
8338 get_double_write_ratio(hevc, pic->double_write_mode);
8339 vf->height = pic->height /
8340 get_double_write_ratio(hevc, pic->double_write_mode);
8341
8342 force_disp_pic_index |= 0x200;
8343 return vf;
8344 }
8345#endif
8346
8347 if (kfifo_get(&hevc->display_q, &vf)) {
8348 struct vframe_s *next_vf;
8349 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8350 hevc_print(hevc, 0,
8351 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8352 __func__, vf, vf->type, vf->index,
8353 get_pic_poc(hevc, vf->index & 0xff),
8354 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8355 vf->pts, vf->pts_us64,
8356 vf->duration);
8357#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8358 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8359 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8360 hevc_print(hevc, 0, "pic 0x%p aux size %d:\n",
8361 pic, pic->aux_data_size);
8362 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8363 int i;
8364 for (i = 0; i < pic->aux_data_size; i++) {
8365 hevc_print_cont(hevc, 0,
8366 "%02x ", pic->aux_data_buf[i]);
8367 if (((i + 1) & 0xf) == 0)
8368 hevc_print_cont(hevc, 0, "\n");
8369 }
8370 hevc_print_cont(hevc, 0, "\n");
8371 }
8372 }
8373#endif
8374 hevc->show_frame_num++;
8375 vf->index_disp = hevc->vf_get_count;
8376 hevc->vf_get_count++;
8377
8378 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8379 vf->next_vf_pts_valid = true;
8380 vf->next_vf_pts = next_vf->pts;
8381 } else
8382 vf->next_vf_pts_valid = false;
8383
8384 return vf;
8385 }
8386
8387 return NULL;
8388}
8389static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8390 int i;
8391 for (i = 0; i < VF_POOL_SIZE; i++) {
8392 if (vf == &hevc->vfpool[i] || vf == &hevc->vframe_dummy)
8393 return true;
8394 }
8395 hevc_print(hevc, 0," h265 invalid vf been put, vf = %p\n", vf);
8396 for (i = 0; i < VF_POOL_SIZE; i++) {
8397 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,"valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8398 }
8399 return false;
8400}
8401
8402static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8403{
8404 unsigned long flags;
8405#ifdef MULTI_INSTANCE_SUPPORT
8406 struct vdec_s *vdec = op_arg;
8407 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8408#else
8409 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8410#endif
8411 unsigned char index_top;
8412 unsigned char index_bot;
8413
8414 if (vf && (vf_valid_check(vf, hevc) == false))
8415 return;
8416 if (vf == (&hevc->vframe_dummy))
8417 return;
8418 if (!vf)
8419 return;
8420 index_top = vf->index & 0xff;
8421 index_bot = (vf->index >> 8) & 0xff;
8422 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8423 hevc_print(hevc, 0,
8424 "%s(type %d index 0x%x)\n",
8425 __func__, vf->type, vf->index);
8426 hevc->vf_put_count++;
8427 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8428 spin_lock_irqsave(&lock, flags);
8429
8430 if (index_top != 0xff
8431 && index_top < MAX_REF_PIC_NUM
8432 && hevc->m_PIC[index_top]) {
8433 if (hevc->is_used_v4l)
8434 hevc->m_PIC[index_top]->vframe_bound = true;
8435 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8436 hevc->m_PIC[index_top]->vf_ref--;
8437
8438 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8439 hevc->m_PIC[index_top]->output_ready = 0;
8440
8441 if (hevc->wait_buf != 0)
8442 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8443 0x1);
8444 }
8445 }
8446 }
8447
8448 if (index_bot != 0xff
8449 && index_bot < MAX_REF_PIC_NUM
8450 && hevc->m_PIC[index_bot]) {
8451 if (hevc->is_used_v4l)
8452 hevc->m_PIC[index_bot]->vframe_bound = true;
8453 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8454 hevc->m_PIC[index_bot]->vf_ref--;
8455
8456 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8457 hevc->m_PIC[index_bot]->output_ready = 0;
8458 if (hevc->wait_buf != 0)
8459 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8460 0x1);
8461 }
8462 }
8463 }
8464 spin_unlock_irqrestore(&lock, flags);
8465}
8466
8467static int vh265_event_cb(int type, void *data, void *op_arg)
8468{
8469 unsigned long flags;
8470#ifdef MULTI_INSTANCE_SUPPORT
8471 struct vdec_s *vdec = op_arg;
8472 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8473#else
8474 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8475#endif
8476 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8477#if 0
8478 amhevc_stop();
8479#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8480 vf_light_unreg_provider(&vh265_vf_prov);
8481#endif
8482 spin_lock_irqsave(&hevc->lock, flags);
8483 vh265_local_init();
8484 vh265_prot_init();
8485 spin_unlock_irqrestore(&hevc->lock, flags);
8486#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8487 vf_reg_provider(&vh265_vf_prov);
8488#endif
8489 amhevc_start();
8490#endif
8491 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8492 struct provider_aux_req_s *req =
8493 (struct provider_aux_req_s *)data;
8494 unsigned char index;
8495
8496 if (!req->vf) {
8497 req->aux_size = hevc->vf_put_count;
8498 return 0;
8499 }
8500 spin_lock_irqsave(&lock, flags);
8501 index = req->vf->index & 0xff;
8502 req->aux_buf = NULL;
8503 req->aux_size = 0;
8504 if (req->bot_flag)
8505 index = (req->vf->index >> 8) & 0xff;
8506 if (index != 0xff
8507 && index < MAX_REF_PIC_NUM
8508 && hevc->m_PIC[index]) {
8509 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8510 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8511#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8512 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8513 req->dv_enhance_exist = false;
8514 else
8515 req->dv_enhance_exist =
8516 hevc->m_PIC[index]->dv_enhance_exist;
8517 if (vdec_frame_based(vdec) && (hevc->dv_duallayer == true))
8518 req->dv_enhance_exist = 1;
8519 hevc_print(hevc, H265_DEBUG_DV,
8520 "query dv_enhance_exist for (pic 0x%p, vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8521 hevc->m_PIC[index],
8522 req->vf,
8523 hevc->m_PIC[index]->POC, index,
8524 req->dv_enhance_exist, req->aux_size);
8525#else
8526 req->dv_enhance_exist = 0;
8527#endif
8528 }
8529 spin_unlock_irqrestore(&lock, flags);
8530
8531 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8532 hevc_print(hevc, 0,
8533 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8534 __func__, type, index, req->aux_size);
8535#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8536 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8537 if ((force_bypass_dvenl & 0x80000000) == 0) {
8538 hevc_print(hevc, 0,
8539 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8540 __func__);
8541 hevc->bypass_dvenl_enable = 1;
8542 }
8543
8544#endif
8545 }
8546 return 0;
8547}
8548
8549#ifdef HEVC_PIC_STRUCT_SUPPORT
8550static int process_pending_vframe(struct hevc_state_s *hevc,
8551 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8552{
8553 struct vframe_s *vf;
8554
8555 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8556 hevc_print(hevc, 0,
8557 "%s: pair_pic index 0x%x %s\n",
8558 __func__, pair_pic->index,
8559 pair_frame_top_flag ?
8560 "top" : "bot");
8561
8562 if (kfifo_len(&hevc->pending_q) > 1) {
8563 unsigned long flags;
8564 /* do not pending more than 1 frame */
8565 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8566 hevc_print(hevc, 0,
8567 "fatal error, no available buffer slot.");
8568 return -1;
8569 }
8570 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8571 hevc_print(hevc, 0,
8572 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8573 __func__, vf->index);
8574 if ((hevc->double_write_mode == 3) &&
8575 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8576 vf->type |= VIDTYPE_COMPRESS;
8577 if (hevc->mmu_enable)
8578 vf->type |= VIDTYPE_SCATTER;
8579 }
8580 hevc->vf_pre_count++;
8581 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8582 spin_lock_irqsave(&lock, flags);
8583 vf->index &= 0xff;
8584 hevc->m_PIC[vf->index]->vf_ref = 0;
8585 hevc->m_PIC[vf->index]->output_ready = 0;
8586 if (hevc->wait_buf != 0)
8587 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8588 0x1);
8589 spin_unlock_irqrestore(&lock, flags);
8590
8591 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8592 }
8593
8594 if (kfifo_peek(&hevc->pending_q, &vf)) {
8595 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8596 /*
8597 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8598 *do not use it
8599 */
8600 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8601 hevc_print(hevc, 0,
8602 "fatal error, no available buffer slot.");
8603 return -1;
8604 }
8605 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8606 hevc_print(hevc, 0,
8607 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8608 __func__, vf->index);
8609 if (vf) {
8610 if ((hevc->double_write_mode == 3) &&
8611 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8612 vf->type |= VIDTYPE_COMPRESS;
8613 if (hevc->mmu_enable)
8614 vf->type |= VIDTYPE_SCATTER;
8615 }
8616 hevc->vf_pre_count++;
8617 kfifo_put(&hevc->display_q,
8618 (const struct vframe_s *)vf);
8619 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8620 }
8621 } else if ((!pair_frame_top_flag) &&
8622 (((vf->index >> 8) & 0xff) == 0xff)) {
8623 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8624 hevc_print(hevc, 0,
8625 "fatal error, no available buffer slot.");
8626 return -1;
8627 }
8628 if (vf) {
8629 if ((hevc->double_write_mode == 3) &&
8630 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8631 vf->type |= VIDTYPE_COMPRESS;
8632 if (hevc->mmu_enable)
8633 vf->type |= VIDTYPE_SCATTER;
8634 }
8635 vf->index &= 0xff;
8636 vf->index |= (pair_pic->index << 8);
8637 vf->canvas1Addr = spec2canvas(pair_pic);
8638 pair_pic->vf_ref++;
8639 kfifo_put(&hevc->display_q,
8640 (const struct vframe_s *)vf);
8641 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8642 hevc->vf_pre_count++;
8643 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8644 hevc_print(hevc, 0,
8645 "%s vf => display_q: (index 0x%x)\n",
8646 __func__, vf->index);
8647 }
8648 } else if (pair_frame_top_flag &&
8649 ((vf->index & 0xff) == 0xff)) {
8650 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8651 hevc_print(hevc, 0,
8652 "fatal error, no available buffer slot.");
8653 return -1;
8654 }
8655 if (vf) {
8656 if ((hevc->double_write_mode == 3) &&
8657 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8658 vf->type |= VIDTYPE_COMPRESS;
8659 if (hevc->mmu_enable)
8660 vf->type |= VIDTYPE_SCATTER;
8661 }
8662 vf->index &= 0xff00;
8663 vf->index |= pair_pic->index;
8664 vf->canvas0Addr = spec2canvas(pair_pic);
8665 pair_pic->vf_ref++;
8666 kfifo_put(&hevc->display_q,
8667 (const struct vframe_s *)vf);
8668 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8669 hevc->vf_pre_count++;
8670 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8671 hevc_print(hevc, 0,
8672 "%s vf => display_q: (index 0x%x)\n",
8673 __func__, vf->index);
8674 }
8675 }
8676 }
8677 return 0;
8678}
8679#endif
8680static void update_vf_memhandle(struct hevc_state_s *hevc,
8681 struct vframe_s *vf, struct PIC_s *pic)
8682{
8683 if (pic->index < 0) {
8684 vf->mem_handle = NULL;
8685 vf->mem_head_handle = NULL;
8686 } else if (vf->type & VIDTYPE_SCATTER) {
8687 vf->mem_handle =
8688 decoder_mmu_box_get_mem_handle(
8689 hevc->mmu_box, pic->index);
8690 vf->mem_head_handle =
8691 decoder_bmmu_box_get_mem_handle(
8692 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8693 } else {
8694 vf->mem_handle =
8695 decoder_bmmu_box_get_mem_handle(
8696 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8697 vf->mem_head_handle = NULL;
8698 /*vf->mem_head_handle =
8699 decoder_bmmu_box_get_mem_handle(
8700 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8701 }
8702 return;
8703}
8704
8705static void fill_frame_info(struct hevc_state_s *hevc,
8706 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8707{
8708 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8709 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8710 vframe_qos->type = 4;
8711 else if (pic->slice_type == I_SLICE)
8712 vframe_qos->type = 1;
8713 else if (pic->slice_type == P_SLICE)
8714 vframe_qos->type = 2;
8715 else if (pic->slice_type == B_SLICE)
8716 vframe_qos->type = 3;
8717/*
8718#define SHOW_QOS_INFO
8719*/
8720 if (input_frame_based(hw_to_vdec(hevc)))
8721 vframe_qos->size = pic->frame_size;
8722 else
8723 vframe_qos->size = framesize;
8724 vframe_qos->pts = pts;
8725#ifdef SHOW_QOS_INFO
8726 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8727#endif
8728
8729
8730 vframe_qos->max_mv = pic->max_mv;
8731 vframe_qos->avg_mv = pic->avg_mv;
8732 vframe_qos->min_mv = pic->min_mv;
8733#ifdef SHOW_QOS_INFO
8734 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8735 vframe_qos->max_mv,
8736 vframe_qos->avg_mv,
8737 vframe_qos->min_mv);
8738#endif
8739
8740 vframe_qos->max_qp = pic->max_qp;
8741 vframe_qos->avg_qp = pic->avg_qp;
8742 vframe_qos->min_qp = pic->min_qp;
8743#ifdef SHOW_QOS_INFO
8744 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8745 vframe_qos->max_qp,
8746 vframe_qos->avg_qp,
8747 vframe_qos->min_qp);
8748#endif
8749
8750 vframe_qos->max_skip = pic->max_skip;
8751 vframe_qos->avg_skip = pic->avg_skip;
8752 vframe_qos->min_skip = pic->min_skip;
8753#ifdef SHOW_QOS_INFO
8754 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8755 vframe_qos->max_skip,
8756 vframe_qos->avg_skip,
8757 vframe_qos->min_skip);
8758#endif
8759
8760 vframe_qos->num++;
8761
8762}
8763
8764static inline void hevc_update_gvs(struct hevc_state_s *hevc)
8765{
8766 if (hevc->gvs->frame_height != hevc->frame_height) {
8767 hevc->gvs->frame_width = hevc->frame_width;
8768 hevc->gvs->frame_height = hevc->frame_height;
8769 }
8770 if (hevc->gvs->frame_dur != hevc->frame_dur) {
8771 hevc->gvs->frame_dur = hevc->frame_dur;
8772 if (hevc->frame_dur != 0)
8773 hevc->gvs->frame_rate = 96000 / hevc->frame_dur;
8774 else
8775 hevc->gvs->frame_rate = -1;
8776 }
8777 hevc->gvs->error_count = hevc->gvs->error_frame_count;
8778 hevc->gvs->status = hevc->stat | hevc->fatal_error;
8779 if (hevc->gvs->ratio_control != hevc->ratio_control)
8780 hevc->gvs->ratio_control = hevc->ratio_control;
8781}
8782
8783static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8784{
8785 struct vdec_s *vdec = hw_to_vdec(hevc);
8786 struct vframe_s *vf = NULL;
8787 int stream_offset = pic->stream_offset;
8788 unsigned short slice_type = pic->slice_type;
8789 ulong nv_order = VIDTYPE_VIU_NV21;
8790 u32 frame_size = 0;
8791 struct vdec_info tmp4x;
8792 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
8793
8794 /* swap uv */
8795 if (hevc->is_used_v4l) {
8796 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12) ||
8797 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12M))
8798 nv_order = VIDTYPE_VIU_NV12;
8799 }
8800
8801 if (force_disp_pic_index & 0x100) {
8802 /*recycle directly*/
8803 pic->output_ready = 0;
8804 return -1;
8805 }
8806 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8807 hevc_print(hevc, 0,
8808 "fatal error, no available buffer slot.");
8809 return -1;
8810 }
8811 display_frame_count[hevc->index]++;
8812 if (vf) {
8813 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8814 "%s: pic index 0x%x\n",
8815 __func__, pic->index);*/
8816
8817 if (hevc->is_used_v4l) {
8818 vf->v4l_mem_handle
8819 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8820 if (hevc->mmu_enable) {
8821 vf->mm_box.bmmu_box = hevc->bmmu_box;
8822 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8823 vf->mm_box.mmu_box = hevc->mmu_box;
8824 vf->mm_box.mmu_idx = pic->index;
8825 }
8826 }
8827
8828#ifdef MULTI_INSTANCE_SUPPORT
8829 if (vdec_frame_based(vdec)) {
8830 vf->pts = pic->pts;
8831 vf->pts_us64 = pic->pts64;
8832 vf->timestamp = pic->timestamp;
8833 }
8834 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8835 stream_offset, &vf->pts, 0) != 0) { */
8836#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8837 else if (vdec->master == NULL) {
8838#else
8839 else {
8840#endif
8841#endif
8842 if (!vdec_dual(vdec) && pic->stream_frame_size > 50 &&
8843 (hevc->min_pic_size > pic->stream_frame_size ||
8844 (hevc->min_pic_size == 0))) {
8845 hevc->min_pic_size = pic->stream_frame_size;
8846
8847 if (hevc->min_pic_size < 1024 &&
8848 ((hevc->pts_lookup_margin > hevc->min_pic_size)
8849 || (hevc->pts_lookup_margin == 0)))
8850 hevc->pts_lookup_margin = hevc->min_pic_size;
8851 }
8852
8853 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8854 "call pts_lookup_offset_us64(0x%x)\n",
8855 stream_offset);
8856 if (pts_lookup_offset_us64
8857 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8858 &frame_size, hevc->pts_lookup_margin,
8859 &vf->pts_us64) != 0) {
8860#ifdef DEBUG_PTS
8861 hevc->pts_missed++;
8862#endif
8863 vf->pts = 0;
8864 vf->pts_us64 = 0;
8865 hevc->pts_continue_miss++;
8866 } else {
8867 hevc->pts_continue_miss = 0;
8868#ifdef DEBUG_PTS
8869 hevc->pts_hit++;
8870#endif
8871 }
8872#ifdef MULTI_INSTANCE_SUPPORT
8873#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8874 } else {
8875 vf->pts = 0;
8876 vf->pts_us64 = 0;
8877 }
8878#else
8879 }
8880#endif
8881#endif
8882 if (!vdec_dual(vdec) &&
8883 vdec_stream_based(vdec) && (vf->duration > 0)) {
8884 if ((vf->pts != 0) && (hevc->last_pts != 0)) {
8885 int diff = vf->pts - hevc->last_pts;
8886 if (diff > ((hevc->pts_continue_miss + 2)
8887 * DUR2PTS(vf->duration))) {
8888 vf->pts = 0;
8889 vf->pts_us64 = 0;
8890 }
8891 }
8892 }
8893
8894 if (pts_unstable && (hevc->frame_dur > 0))
8895 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8896
8897 fill_frame_info(hevc, pic, frame_size, vf->pts);
8898
8899 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8900 && hevc->get_frame_dur) {
8901 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8902
8903 if (pts_diff < 0) {
8904 hevc->pts_mode_switching_count++;
8905 hevc->pts_mode_recovery_count = 0;
8906
8907 if (hevc->pts_mode_switching_count >=
8908 PTS_MODE_SWITCHING_THRESHOLD) {
8909 hevc->pts_mode =
8910 PTS_NONE_REF_USE_DURATION;
8911 hevc_print(hevc, 0,
8912 "HEVC: switch to n_d mode.\n");
8913 }
8914
8915 } else {
8916 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8917
8918 hevc->pts_mode_recovery_count++;
8919 if (hevc->pts_mode_recovery_count > p) {
8920 hevc->pts_mode_switching_count = 0;
8921 hevc->pts_mode_recovery_count = 0;
8922 }
8923 }
8924 }
8925
8926 if (vf->pts != 0)
8927 hevc->last_lookup_pts = vf->pts;
8928
8929 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8930 && (slice_type != 2))
8931 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8932 hevc->last_pts = vf->pts;
8933
8934 if (vf->pts_us64 != 0)
8935 hevc->last_lookup_pts_us64 = vf->pts_us64;
8936
8937 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8938 && (slice_type != 2)) {
8939 vf->pts_us64 =
8940 hevc->last_pts_us64 +
8941 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8942 }
8943 hevc->last_pts_us64 = vf->pts_us64;
8944 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8945 hevc_print(hevc, 0,
8946 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8947 vf->pts, vf->pts_us64);
8948 }
8949
8950 /*
8951 *vf->index:
8952 *(1) vf->type is VIDTYPE_PROGRESSIVE
8953 * and vf->canvas0Addr != vf->canvas1Addr,
8954 * vf->index[7:0] is the index of top pic
8955 * vf->index[15:8] is the index of bot pic
8956 *(2) other cases,
8957 * only vf->index[7:0] is used
8958 * vf->index[15:8] == 0xff
8959 */
8960 vf->index = 0xff00 | pic->index;
8961#if 1
8962/*SUPPORT_10BIT*/
8963 if (pic->double_write_mode & 0x10) {
8964 /* double write only */
8965 vf->compBodyAddr = 0;
8966 vf->compHeadAddr = 0;
8967 } else {
8968
8969 if (hevc->mmu_enable) {
8970 vf->compBodyAddr = 0;
8971 vf->compHeadAddr = pic->header_adr;
8972 } else {
8973 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8974 vf->compHeadAddr = pic->mc_y_adr +
8975 pic->losless_comp_body_size;
8976 vf->mem_head_handle = NULL;
8977 }
8978
8979 /*head adr*/
8980 vf->canvas0Addr = vf->canvas1Addr = 0;
8981 }
8982 if (pic->double_write_mode) {
8983 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8984 vf->type |= nv_order;
8985
8986 if ((pic->double_write_mode == 3) &&
8987 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8988 vf->type |= VIDTYPE_COMPRESS;
8989 if (hevc->mmu_enable)
8990 vf->type |= VIDTYPE_SCATTER;
8991 }
8992#ifdef MULTI_INSTANCE_SUPPORT
8993 if (hevc->m_ins_flag &&
8994 (get_dbg_flag(hevc)
8995 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8996 vf->canvas0Addr = vf->canvas1Addr = -1;
8997 vf->plane_num = 2;
8998 vf->canvas0_config[0] =
8999 pic->canvas_config[0];
9000 vf->canvas0_config[1] =
9001 pic->canvas_config[1];
9002
9003 vf->canvas1_config[0] =
9004 pic->canvas_config[0];
9005 vf->canvas1_config[1] =
9006 pic->canvas_config[1];
9007
9008 } else
9009#endif
9010 vf->canvas0Addr = vf->canvas1Addr
9011 = spec2canvas(pic);
9012 } else {
9013 vf->canvas0Addr = vf->canvas1Addr = 0;
9014 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
9015 if (hevc->mmu_enable)
9016 vf->type |= VIDTYPE_SCATTER;
9017 }
9018 vf->compWidth = pic->width;
9019 vf->compHeight = pic->height;
9020 update_vf_memhandle(hevc, vf, pic);
9021 switch (pic->bit_depth_luma) {
9022 case 9:
9023 vf->bitdepth = BITDEPTH_Y9;
9024 break;
9025 case 10:
9026 vf->bitdepth = BITDEPTH_Y10;
9027 break;
9028 default:
9029 vf->bitdepth = BITDEPTH_Y8;
9030 break;
9031 }
9032 switch (pic->bit_depth_chroma) {
9033 case 9:
9034 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
9035 break;
9036 case 10:
9037 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
9038 break;
9039 default:
9040 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
9041 break;
9042 }
9043 if ((vf->type & VIDTYPE_COMPRESS) == 0)
9044 vf->bitdepth =
9045 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
9046 if (pic->mem_saving_mode == 1)
9047 vf->bitdepth |= BITDEPTH_SAVING_MODE;
9048#else
9049 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
9050 vf->type |= nv_order;
9051 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
9052#endif
9053 set_frame_info(hevc, vf, pic);
9054 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
9055 /* hevc_print(hevc, 0,
9056 "aaa: %d/%d, %d/%d\n",
9057 vf->width,vf->height, pic->width, pic->height); */
9058 vf->width = pic->width;
9059 vf->height = pic->height;
9060
9061 if (force_w_h != 0) {
9062 vf->width = (force_w_h >> 16) & 0xffff;
9063 vf->height = force_w_h & 0xffff;
9064 }
9065 if (force_fps & 0x100) {
9066 u32 rate = force_fps & 0xff;
9067
9068 if (rate)
9069 vf->duration = 96000/rate;
9070 else
9071 vf->duration = 0;
9072 }
9073 if (force_fps & 0x200) {
9074 vf->pts = 0;
9075 vf->pts_us64 = 0;
9076 }
9077 /*
9078 * !!! to do ...
9079 * need move below code to get_new_pic(),
9080 * hevc->xxx can only be used by current decoded pic
9081 */
9082 if (pic->conformance_window_flag &&
9083 (get_dbg_flag(hevc) &
9084 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
9085 unsigned int SubWidthC, SubHeightC;
9086
9087 switch (pic->chroma_format_idc) {
9088 case 1:
9089 SubWidthC = 2;
9090 SubHeightC = 2;
9091 break;
9092 case 2:
9093 SubWidthC = 2;
9094 SubHeightC = 1;
9095 break;
9096 default:
9097 SubWidthC = 1;
9098 SubHeightC = 1;
9099 break;
9100 }
9101 vf->width -= SubWidthC *
9102 (pic->conf_win_left_offset +
9103 pic->conf_win_right_offset);
9104 vf->height -= SubHeightC *
9105 (pic->conf_win_top_offset +
9106 pic->conf_win_bottom_offset);
9107
9108 vf->compWidth -= SubWidthC *
9109 (pic->conf_win_left_offset +
9110 pic->conf_win_right_offset);
9111 vf->compHeight -= SubHeightC *
9112 (pic->conf_win_top_offset +
9113 pic->conf_win_bottom_offset);
9114
9115 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
9116 hevc_print(hevc, 0,
9117 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
9118 pic->chroma_format_idc,
9119 pic->conf_win_left_offset,
9120 pic->conf_win_right_offset,
9121 pic->conf_win_top_offset,
9122 pic->conf_win_bottom_offset,
9123 vf->width, vf->height, vf->compWidth, vf->compHeight);
9124 }
9125
9126 vf->width = vf->width /
9127 get_double_write_ratio(hevc, pic->double_write_mode);
9128 vf->height = vf->height /
9129 get_double_write_ratio(hevc, pic->double_write_mode);
9130#ifdef HEVC_PIC_STRUCT_SUPPORT
9131 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
9132 struct vframe_s *vf2;
9133
9134 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9135 hevc_print(hevc, 0,
9136 "pic_struct = %d index 0x%x\n",
9137 pic->pic_struct,
9138 pic->index);
9139
9140 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9141 hevc_print(hevc, 0,
9142 "fatal error, no available buffer slot.");
9143 return -1;
9144 }
9145 pic->vf_ref = 2;
9146 vf->duration = vf->duration>>1;
9147 memcpy(vf2, vf, sizeof(struct vframe_s));
9148
9149 if (pic->pic_struct == 3) {
9150 vf->type = VIDTYPE_INTERLACE_TOP
9151 | nv_order;
9152 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9153 | nv_order;
9154 } else {
9155 vf->type = VIDTYPE_INTERLACE_BOTTOM
9156 | nv_order;
9157 vf2->type = VIDTYPE_INTERLACE_TOP
9158 | nv_order;
9159 }
9160 hevc->vf_pre_count++;
9161 decoder_do_frame_check(vdec, vf);
9162 kfifo_put(&hevc->display_q,
9163 (const struct vframe_s *)vf);
9164 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9165 hevc->vf_pre_count++;
9166 kfifo_put(&hevc->display_q,
9167 (const struct vframe_s *)vf2);
9168 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9169 } else if (pic->pic_struct == 5
9170 || pic->pic_struct == 6) {
9171 struct vframe_s *vf2, *vf3;
9172
9173 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9174 hevc_print(hevc, 0,
9175 "pic_struct = %d index 0x%x\n",
9176 pic->pic_struct,
9177 pic->index);
9178
9179 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9180 hevc_print(hevc, 0,
9181 "fatal error, no available buffer slot.");
9182 return -1;
9183 }
9184 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
9185 hevc_print(hevc, 0,
9186 "fatal error, no available buffer slot.");
9187 return -1;
9188 }
9189 pic->vf_ref = 3;
9190 vf->duration = vf->duration/3;
9191 memcpy(vf2, vf, sizeof(struct vframe_s));
9192 memcpy(vf3, vf, sizeof(struct vframe_s));
9193
9194 if (pic->pic_struct == 5) {
9195 vf->type = VIDTYPE_INTERLACE_TOP
9196 | nv_order;
9197 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9198 | nv_order;
9199 vf3->type = VIDTYPE_INTERLACE_TOP
9200 | nv_order;
9201 } else {
9202 vf->type = VIDTYPE_INTERLACE_BOTTOM
9203 | nv_order;
9204 vf2->type = VIDTYPE_INTERLACE_TOP
9205 | nv_order;
9206 vf3->type = VIDTYPE_INTERLACE_BOTTOM
9207 | nv_order;
9208 }
9209 hevc->vf_pre_count++;
9210 decoder_do_frame_check(vdec, vf);
9211 kfifo_put(&hevc->display_q,
9212 (const struct vframe_s *)vf);
9213 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9214 hevc->vf_pre_count++;
9215 kfifo_put(&hevc->display_q,
9216 (const struct vframe_s *)vf2);
9217 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9218 hevc->vf_pre_count++;
9219 kfifo_put(&hevc->display_q,
9220 (const struct vframe_s *)vf3);
9221 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
9222
9223 } else if (pic->pic_struct == 9
9224 || pic->pic_struct == 10) {
9225 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9226 hevc_print(hevc, 0,
9227 "pic_struct = %d index 0x%x\n",
9228 pic->pic_struct,
9229 pic->index);
9230
9231 pic->vf_ref = 1;
9232 /* process previous pending vf*/
9233 process_pending_vframe(hevc,
9234 pic, (pic->pic_struct == 9));
9235
9236 decoder_do_frame_check(vdec, vf);
9237 /* process current vf */
9238 kfifo_put(&hevc->pending_q,
9239 (const struct vframe_s *)vf);
9240 vf->height <<= 1;
9241 if (pic->pic_struct == 9) {
9242 vf->type = VIDTYPE_INTERLACE_TOP
9243 | nv_order | VIDTYPE_VIU_FIELD;
9244 process_pending_vframe(hevc,
9245 hevc->pre_bot_pic, 0);
9246 } else {
9247 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9248 nv_order | VIDTYPE_VIU_FIELD;
9249 vf->index = (pic->index << 8) | 0xff;
9250 process_pending_vframe(hevc,
9251 hevc->pre_top_pic, 1);
9252 }
9253
9254 if (hevc->vf_pre_count == 0)
9255 hevc->vf_pre_count++;
9256
9257 /**/
9258 if (pic->pic_struct == 9)
9259 hevc->pre_top_pic = pic;
9260 else
9261 hevc->pre_bot_pic = pic;
9262
9263 } else if (pic->pic_struct == 11
9264 || pic->pic_struct == 12) {
9265 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9266 hevc_print(hevc, 0,
9267 "pic_struct = %d index 0x%x\n",
9268 pic->pic_struct,
9269 pic->index);
9270 pic->vf_ref = 1;
9271 /* process previous pending vf*/
9272 process_pending_vframe(hevc, pic,
9273 (pic->pic_struct == 11));
9274
9275 /* put current into pending q */
9276 vf->height <<= 1;
9277 if (pic->pic_struct == 11)
9278 vf->type = VIDTYPE_INTERLACE_TOP |
9279 nv_order | VIDTYPE_VIU_FIELD;
9280 else {
9281 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9282 nv_order | VIDTYPE_VIU_FIELD;
9283 vf->index = (pic->index << 8) | 0xff;
9284 }
9285 decoder_do_frame_check(vdec, vf);
9286 kfifo_put(&hevc->pending_q,
9287 (const struct vframe_s *)vf);
9288 if (hevc->vf_pre_count == 0)
9289 hevc->vf_pre_count++;
9290
9291 /**/
9292 if (pic->pic_struct == 11)
9293 hevc->pre_top_pic = pic;
9294 else
9295 hevc->pre_bot_pic = pic;
9296
9297 } else {
9298 pic->vf_ref = 1;
9299
9300 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9301 hevc_print(hevc, 0,
9302 "pic_struct = %d index 0x%x\n",
9303 pic->pic_struct,
9304 pic->index);
9305
9306 switch (pic->pic_struct) {
9307 case 7:
9308 vf->duration <<= 1;
9309 break;
9310 case 8:
9311 vf->duration = vf->duration * 3;
9312 break;
9313 case 1:
9314 vf->height <<= 1;
9315 vf->type = VIDTYPE_INTERLACE_TOP |
9316 nv_order | VIDTYPE_VIU_FIELD;
9317 process_pending_vframe(hevc, pic, 1);
9318 hevc->pre_top_pic = pic;
9319 break;
9320 case 2:
9321 vf->height <<= 1;
9322 vf->type = VIDTYPE_INTERLACE_BOTTOM
9323 | nv_order
9324 | VIDTYPE_VIU_FIELD;
9325 process_pending_vframe(hevc, pic, 0);
9326 hevc->pre_bot_pic = pic;
9327 break;
9328 }
9329 hevc->vf_pre_count++;
9330 decoder_do_frame_check(vdec, vf);
9331 kfifo_put(&hevc->display_q,
9332 (const struct vframe_s *)vf);
9333 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9334 }
9335#else
9336 vf->type_original = vf->type;
9337 pic->vf_ref = 1;
9338 hevc->vf_pre_count++;
9339 decoder_do_frame_check(vdec, vf);
9340 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9341 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9342#endif
9343 /*count info*/
9344 vdec_count_info(hevc->gvs, 0, stream_offset);
9345 hevc_update_gvs(hevc);
9346 memcpy(&tmp4x, hevc->gvs, sizeof(struct vdec_info));
9347 tmp4x.bit_depth_luma = hevc->bit_depth_luma;
9348 tmp4x.bit_depth_chroma = hevc->bit_depth_chroma;
9349 tmp4x.double_write_mode = get_double_write_mode(hevc);
9350 vdec_fill_vdec_frame(vdec, &hevc->vframe_qos, &tmp4x, vf, pic->hw_decode_time);
9351 vdec->vdec_fps_detec(vdec->id);
9352 hevc_print(hevc, H265_DEBUG_BUFMGR,
9353 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9354 __func__, vf->type, vf->index,
9355 get_pic_poc(hevc, vf->index & 0xff),
9356 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9357 vf->pts, vf->pts_us64,
9358 vf->duration);
9359
9360 /*if (pic->vf_ref == hevc->vf_pre_count) {*/
9361 if (hevc->kpi_first_i_decoded == 0) {
9362 hevc->kpi_first_i_decoded = 1;
9363 pr_debug("[vdec_kpi][%s] First I frame decoded.\n",
9364 __func__);
9365 }
9366
9367 if (without_display_mode == 0) {
9368 vf_notify_receiver(hevc->provider_name,
9369 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9370 }
9371 else
9372 vh265_vf_put(vh265_vf_get(vdec), vdec);
9373 }
9374
9375 return 0;
9376}
9377
9378static int notify_v4l_eos(struct vdec_s *vdec)
9379{
9380 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9381 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9382 struct vframe_s *vf = &hw->vframe_dummy;
9383 struct vdec_v4l2_buffer *fb = NULL;
9384 int index = INVALID_IDX;
9385 ulong expires;
9386
9387 if (hw->eos) {
9388 if (hw->is_used_v4l) {
9389 expires = jiffies + msecs_to_jiffies(2000);
9390 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9391 if (time_after(jiffies, expires) ||
9392 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx))
9393 break;
9394 }
9395
9396 if (index == INVALID_IDX) {
9397 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9398 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9399 return -1;
9400 }
9401 }
9402 }
9403
9404 vf->type |= VIDTYPE_V4L_EOS;
9405 vf->timestamp = ULONG_MAX;
9406 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9407 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9408 hw->m_BUF[index].v4l_ref_buf_addr;
9409 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9410 vf_notify_receiver(vdec->vf_provider_name,
9411 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9412
9413 pr_info("[%d] H265 EOS notify.\n", (hw->is_used_v4l)?ctx->id:vdec->id);
9414 }
9415
9416 return 0;
9417}
9418
9419static void process_nal_sei(struct hevc_state_s *hevc,
9420 int payload_type, int payload_size)
9421{
9422 unsigned short data;
9423
9424 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9425 hevc_print(hevc, 0,
9426 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9427 payload_type, payload_size);
9428
9429 if (payload_type == 137) {
9430 int i, j;
9431 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9432 if (payload_size >= 24) {
9433 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9434 hevc_print(hevc, 0,
9435 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9436 for (i = 0; i < 3; i++) {
9437 for (j = 0; j < 2; j++) {
9438 data =
9439 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9440 hevc->primaries[i][j] = data;
9441 WRITE_HREG(HEVC_SHIFT_COMMAND,
9442 (1<<7)|16);
9443 if (get_dbg_flag(hevc) &
9444 H265_DEBUG_PRINT_SEI)
9445 hevc_print(hevc, 0,
9446 "\t\tprimaries[%1d][%1d] = %04x\n",
9447 i, j, hevc->primaries[i][j]);
9448 }
9449 }
9450 for (i = 0; i < 2; i++) {
9451 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9452 hevc->white_point[i] = data;
9453 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9454 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9455 hevc_print(hevc, 0,
9456 "\t\twhite_point[%1d] = %04x\n",
9457 i, hevc->white_point[i]);
9458 }
9459 for (i = 0; i < 2; i++) {
9460 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9461 hevc->luminance[i] = data << 16;
9462 WRITE_HREG(HEVC_SHIFT_COMMAND,
9463 (1<<7)|16);
9464 data =
9465 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9466 hevc->luminance[i] |= data;
9467 WRITE_HREG(HEVC_SHIFT_COMMAND,
9468 (1<<7)|16);
9469 if (get_dbg_flag(hevc) &
9470 H265_DEBUG_PRINT_SEI)
9471 hevc_print(hevc, 0,
9472 "\t\tluminance[%1d] = %08x\n",
9473 i, hevc->luminance[i]);
9474 }
9475 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9476 }
9477 payload_size -= 24;
9478 while (payload_size > 0) {
9479 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9480 payload_size--;
9481 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9482 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9483 }
9484 }
9485}
9486
9487static int hevc_recover(struct hevc_state_s *hevc)
9488{
9489 int ret = -1;
9490 u32 rem;
9491 u64 shift_byte_count64;
9492 unsigned int hevc_shift_byte_count;
9493 unsigned int hevc_stream_start_addr;
9494 unsigned int hevc_stream_end_addr;
9495 unsigned int hevc_stream_rd_ptr;
9496 unsigned int hevc_stream_wr_ptr;
9497 unsigned int hevc_stream_control;
9498 unsigned int hevc_stream_fifo_ctl;
9499 unsigned int hevc_stream_buf_size;
9500 struct vdec_s *vdec = hw_to_vdec(hevc);
9501
9502 mutex_lock(&vh265_mutex);
9503#if 0
9504 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9505 int ii;
9506
9507 for (ii = 0; ii < 4; ii++)
9508 hevc_print(hevc, 0,
9509 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9510 if (((i + ii) & 0xf) == 0)
9511 hevc_print(hevc, 0, "\n");
9512 }
9513#endif
9514#define ES_VID_MAN_RD_PTR (1<<0)
9515 if (!hevc->init_flag) {
9516 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9517 mutex_unlock(&vh265_mutex);
9518 return ret;
9519 }
9520 amhevc_stop();
9521 msleep(20);
9522 ret = 0;
9523 /* reset */
9524 if (vdec_stream_based(vdec)) {
9525 STBUF_WRITE(&vdec->vbuf, set_rp,
9526 READ_VREG(HEVC_STREAM_RD_PTR));
9527
9528 if (!vdec->vbuf.no_parser)
9529 SET_PARSER_REG_MASK(PARSER_ES_CONTROL,
9530 ES_VID_MAN_RD_PTR);
9531 }
9532
9533 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9534 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9535 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9536 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9537 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9538 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9539 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9540
9541 /* HEVC streaming buffer will reset and restart
9542 * from current hevc_stream_rd_ptr position
9543 */
9544 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9545 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9546 if ((hevc->shift_byte_count_lo & (1 << 31))
9547 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9548 hevc->shift_byte_count_hi++;
9549
9550 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9551 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9552 hevc->shift_byte_count_lo;
9553 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9554 shift_byte_count64 -= rem;
9555 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9556
9557 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9558 shift_byte_count64 += hevc_stream_buf_size;
9559
9560 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9561 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9562
9563 WRITE_VREG(DOS_SW_RESET3,
9564 /* (1<<2)| */
9565 (1 << 3) | (1 << 4) | (1 << 8) |
9566 (1 << 11) | (1 << 12) | (1 << 14)
9567 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9568 WRITE_VREG(DOS_SW_RESET3, 0);
9569
9570 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9571 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9572 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9573 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9574 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9575 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9576 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9577
9578 hevc_config_work_space_hw(hevc);
9579 decoder_hw_reset();
9580
9581 hevc->have_vps = 0;
9582 hevc->have_sps = 0;
9583 hevc->have_pps = 0;
9584
9585 hevc->have_valid_start_slice = 0;
9586
9587 if (get_double_write_mode(hevc) & 0x10)
9588 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9589 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9590 );
9591
9592 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9593 /* clear mailbox interrupt */
9594 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9595 /* enable mailbox interrupt */
9596 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9597 /* disable PSCALE for hardware sharing */
9598 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9599
9600 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9601
9602 WRITE_VREG(DEBUG_REG1, 0x0);
9603
9604 if ((error_handle_policy & 1) == 0) {
9605 if ((error_handle_policy & 4) == 0) {
9606 /* ucode auto mode, and do not check vps/sps/pps/idr */
9607 WRITE_VREG(NAL_SEARCH_CTL,
9608 0xc);
9609 } else {
9610 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9611 }
9612 } else {
9613 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9614 }
9615
9616 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9617 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9618 WRITE_VREG(NAL_SEARCH_CTL,
9619 READ_VREG(NAL_SEARCH_CTL)
9620 | ((parser_sei_enable & 0x7) << 17));
9621/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
9622 WRITE_VREG(NAL_SEARCH_CTL,
9623 READ_VREG(NAL_SEARCH_CTL) |
9624 ((parser_dolby_vision_enable & 0x1) << 20));
9625/*#endif*/
9626 config_decode_mode(hevc);
9627 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9628
9629 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9630 /* amhevc_disable(); */
9631 /* return -EBUSY; */
9632 /* } */
9633#if 0
9634 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9635 int ii;
9636
9637 for (ii = 0; ii < 4; ii++) {
9638 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9639 hevc_print(hevc, 0,
9640 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9641 }
9642 if (((i + ii) & 0xf) == 0)
9643 hevc_print(hevc, 0, "\n");
9644 }
9645#endif
9646 init_pic_list_hw(hevc);
9647
9648 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9649 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9650
9651#ifdef SWAP_HEVC_UCODE
9652 if (!tee_enabled() && hevc->is_swap &&
9653 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9654 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9655 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9656 }
9657#endif
9658 amhevc_start();
9659
9660 /* skip, search next start code */
9661 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9662 hevc->skip_flag = 1;
9663#ifdef ERROR_HANDLE_DEBUG
9664 if (dbg_nal_skip_count & 0x20000) {
9665 dbg_nal_skip_count &= ~0x20000;
9666 mutex_unlock(&vh265_mutex);
9667 return ret;
9668 }
9669#endif
9670 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9671 /* Interrupt Amrisc to excute */
9672 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9673#ifdef MULTI_INSTANCE_SUPPORT
9674 if (!hevc->m_ins_flag)
9675#endif
9676 hevc->first_pic_after_recover = 1;
9677 mutex_unlock(&vh265_mutex);
9678 return ret;
9679}
9680
9681static void dump_aux_buf(struct hevc_state_s *hevc)
9682{
9683 int i;
9684 unsigned short *aux_adr =
9685 (unsigned short *)
9686 hevc->aux_addr;
9687 unsigned int aux_size =
9688 (READ_VREG(HEVC_AUX_DATA_SIZE)
9689 >> 16) << 4;
9690
9691 if (hevc->prefix_aux_size > 0) {
9692 hevc_print(hevc, 0,
9693 "prefix aux: (size %d)\n",
9694 aux_size);
9695 for (i = 0; i <
9696 (aux_size >> 1); i++) {
9697 hevc_print_cont(hevc, 0,
9698 "%04x ",
9699 *(aux_adr + i));
9700 if (((i + 1) & 0xf)
9701 == 0)
9702 hevc_print_cont(hevc,
9703 0, "\n");
9704 }
9705 }
9706 if (hevc->suffix_aux_size > 0) {
9707 aux_adr = (unsigned short *)
9708 (hevc->aux_addr +
9709 hevc->prefix_aux_size);
9710 aux_size =
9711 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9712 << 4;
9713 hevc_print(hevc, 0,
9714 "suffix aux: (size %d)\n",
9715 aux_size);
9716 for (i = 0; i <
9717 (aux_size >> 1); i++) {
9718 hevc_print_cont(hevc, 0,
9719 "%04x ", *(aux_adr + i));
9720 if (((i + 1) & 0xf) == 0)
9721 hevc_print_cont(hevc, 0, "\n");
9722 }
9723 }
9724}
9725
9726#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9727static void dolby_get_meta(struct hevc_state_s *hevc)
9728{
9729 struct vdec_s *vdec = hw_to_vdec(hevc);
9730
9731 if (get_dbg_flag(hevc) &
9732 H265_DEBUG_BUFMGR_MORE)
9733 dump_aux_buf(hevc);
9734 if (vdec->dolby_meta_with_el || vdec->slave) {
9735 set_aux_data(hevc,
9736 hevc->cur_pic, 0, 0);
9737 } else if (vdec->master) {
9738 struct hevc_state_s *hevc_ba =
9739 (struct hevc_state_s *)
9740 vdec->master->private;
9741 /*do not use hevc_ba*/
9742 set_aux_data(hevc,
9743 hevc_ba->cur_pic,
9744 0, 1);
9745 set_aux_data(hevc,
9746 hevc->cur_pic, 0, 2);
9747 } else if (vdec_frame_based(vdec)) {
9748 set_aux_data(hevc,
9749 hevc->cur_pic, 1, 0);
9750 }
9751}
9752#endif
9753
9754static void read_decode_info(struct hevc_state_s *hevc)
9755{
9756 uint32_t decode_info =
9757 READ_HREG(HEVC_DECODE_INFO);
9758 hevc->start_decoding_flag |=
9759 (decode_info & 0xff);
9760 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9761}
9762
9763static int vh265_get_ps_info(struct hevc_state_s *hevc, int width, int height, struct aml_vdec_ps_infos *ps)
9764{
9765 int dw_mode = v4l_parser_get_double_write_mode(hevc, width, height);
9766
9767 ps->visible_width = width / get_double_write_ratio(hevc, dw_mode);
9768 ps->visible_height = height / get_double_write_ratio(hevc, dw_mode);
9769 ps->coded_width = ALIGN(width, 32) / get_double_write_ratio(hevc, dw_mode);
9770 ps->coded_height = ALIGN(height, 32) / get_double_write_ratio(hevc, dw_mode);
9771 ps->dpb_size = v4l_parser_work_pic_num(hevc);
9772
9773 return 0;
9774}
9775
9776static int v4l_res_change(struct hevc_state_s *hevc, union param_u *rpm_param)
9777{
9778 struct aml_vcodec_ctx *ctx =
9779 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9780 int ret = 0;
9781
9782 if (ctx->param_sets_from_ucode &&
9783 hevc->res_ch_flag == 0) {
9784 struct aml_vdec_ps_infos ps;
9785 int width = rpm_param->p.pic_width_in_luma_samples;
9786 int height = rpm_param->p.pic_height_in_luma_samples;
9787 if ((hevc->pic_w != 0 &&
9788 hevc->pic_h != 0) &&
9789 (hevc->pic_w != width ||
9790 hevc->pic_h != height)) {
9791 hevc_print(hevc, 0,
9792 "v4l_res_change Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
9793 hevc->pic_w, hevc->pic_h,
9794 width,
9795 height,
9796 hevc->interlace_flag);
9797
9798 vh265_get_ps_info(hevc, width, height, &ps);
9799 vdec_v4l_set_ps_infos(ctx, &ps);
9800 vdec_v4l_res_ch_event(ctx);
9801 hevc->v4l_params_parsed = false;
9802 hevc->res_ch_flag = 1;
9803 hevc->eos = 1;
9804 flush_output(hevc, NULL);
9805 //del_timer_sync(&hevc->timer);
9806 notify_v4l_eos(hw_to_vdec(hevc));
9807
9808 ret = 1;
9809 }
9810 }
9811
9812 return ret;
9813}
9814
9815static int hevc_skip_nal(struct hevc_state_s *hevc)
9816{
9817 if ((hevc->pic_h == 96) && (hevc->pic_w == 160) &&
9818 (get_double_write_mode(hevc) == 0x10)) {
9819 if (get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_TXLX) {
9820 if (hevc->skip_nal_count < skip_nal_count)
9821 return 1;
9822 } else {
9823 if (hevc->skip_nal_count < 1)
9824 return 1;
9825 }
9826 }
9827 return 0;
9828}
9829
9830static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9831{
9832 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9833 unsigned int dec_status = hevc->dec_status;
9834 int i, ret;
9835
9836 struct vdec_s *vdec = hw_to_vdec(hevc);
9837
9838 if (hevc->eos)
9839 return IRQ_HANDLED;
9840 if (
9841#ifdef MULTI_INSTANCE_SUPPORT
9842 (!hevc->m_ins_flag) &&
9843#endif
9844 hevc->error_flag == 1) {
9845 if ((error_handle_policy & 0x10) == 0) {
9846 if (hevc->cur_pic) {
9847 int current_lcu_idx =
9848 READ_VREG(HEVC_PARSER_LCU_START)
9849 & 0xffffff;
9850 if (current_lcu_idx <
9851 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9852 hevc->cur_pic->error_mark = 1;
9853
9854 }
9855 }
9856 if ((error_handle_policy & 1) == 0) {
9857 hevc->error_skip_nal_count = 1;
9858 /* manual search nal, skip error_skip_nal_count
9859 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9860 */
9861 WRITE_VREG(NAL_SEARCH_CTL,
9862 (error_skip_nal_count << 4) | 0x1);
9863 } else {
9864 hevc->error_skip_nal_count = error_skip_nal_count;
9865 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9866 }
9867 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9868#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9869 || vdec->master
9870 || vdec->slave
9871#endif
9872 ) {
9873 WRITE_VREG(NAL_SEARCH_CTL,
9874 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9875 }
9876 WRITE_VREG(NAL_SEARCH_CTL,
9877 READ_VREG(NAL_SEARCH_CTL)
9878 | ((parser_sei_enable & 0x7) << 17));
9879/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
9880 WRITE_VREG(NAL_SEARCH_CTL,
9881 READ_VREG(NAL_SEARCH_CTL) |
9882 ((parser_dolby_vision_enable & 0x1) << 20));
9883/*#endif*/
9884 config_decode_mode(hevc);
9885 /* search new nal */
9886 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9887 /* Interrupt Amrisc to excute */
9888 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9889
9890 /* hevc_print(hevc, 0,
9891 *"%s: error handle\n", __func__);
9892 */
9893 hevc->error_flag = 2;
9894 return IRQ_HANDLED;
9895 } else if (
9896#ifdef MULTI_INSTANCE_SUPPORT
9897 (!hevc->m_ins_flag) &&
9898#endif
9899 hevc->error_flag == 3) {
9900 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9901 hevc_recover(hevc);
9902 hevc->error_flag = 0;
9903
9904 if ((error_handle_policy & 0x10) == 0) {
9905 if (hevc->cur_pic) {
9906 int current_lcu_idx =
9907 READ_VREG(HEVC_PARSER_LCU_START)
9908 & 0xffffff;
9909 if (current_lcu_idx <
9910 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9911 hevc->cur_pic->error_mark = 1;
9912
9913 }
9914 }
9915 if ((error_handle_policy & 1) == 0) {
9916 /* need skip some data when
9917 * error_flag of 3 is triggered,
9918 */
9919 /* to avoid hevc_recover() being called
9920 * for many times at the same bitstream position
9921 */
9922 hevc->error_skip_nal_count = 1;
9923 /* manual search nal, skip error_skip_nal_count
9924 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9925 */
9926 WRITE_VREG(NAL_SEARCH_CTL,
9927 (error_skip_nal_count << 4) | 0x1);
9928 }
9929
9930 if ((error_handle_policy & 0x2) == 0) {
9931 hevc->have_vps = 1;
9932 hevc->have_sps = 1;
9933 hevc->have_pps = 1;
9934 }
9935 return IRQ_HANDLED;
9936 }
9937 if (!hevc->m_ins_flag) {
9938 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9939 if ((hevc->shift_byte_count_lo & (1 << 31))
9940 && ((i & (1 << 31)) == 0))
9941 hevc->shift_byte_count_hi++;
9942 hevc->shift_byte_count_lo = i;
9943 }
9944#ifdef MULTI_INSTANCE_SUPPORT
9945 mutex_lock(&hevc->chunks_mutex);
9946 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9947 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9948 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9949 && (hevc->chunk)) {
9950 hevc->cur_pic->pts = hevc->chunk->pts;
9951 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9952 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9953 }
9954 mutex_unlock(&hevc->chunks_mutex);
9955
9956 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9957 dec_status == HEVC_DECODE_BUFEMPTY2) {
9958 if (hevc->m_ins_flag) {
9959 read_decode_info(hevc);
9960 if (vdec_frame_based(hw_to_vdec(hevc))) {
9961 hevc->empty_flag = 1;
9962 /*suffix sei or dv meta*/
9963 set_aux_data(hevc, hevc->cur_pic, 1, 0);
9964 goto pic_done;
9965 } else {
9966 if (
9967#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9968 vdec->master ||
9969 vdec->slave ||
9970#endif
9971 (data_resend_policy & 0x1)) {
9972 hevc->dec_result = DEC_RESULT_AGAIN;
9973 amhevc_stop();
9974 restore_decode_state(hevc);
9975 } else
9976 hevc->dec_result = DEC_RESULT_GET_DATA;
9977 }
9978 reset_process_time(hevc);
9979 vdec_schedule_work(&hevc->work);
9980 }
9981 return IRQ_HANDLED;
9982 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9983 (dec_status == HEVC_NAL_DECODE_DONE)
9984 ) {
9985 if (hevc->m_ins_flag) {
9986 read_decode_info(hevc);
9987 if (vdec_frame_based(hw_to_vdec(hevc))) {
9988 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9989 hevc->empty_flag = 1;
9990 /*suffix sei or dv meta*/
9991 set_aux_data(hevc, hevc->cur_pic, 1, 0);
9992 goto pic_done;
9993 } else {
9994 hevc->dec_result = DEC_RESULT_AGAIN;
9995 amhevc_stop();
9996 restore_decode_state(hevc);
9997 }
9998
9999 reset_process_time(hevc);
10000 vdec_schedule_work(&hevc->work);
10001 }
10002
10003 return IRQ_HANDLED;
10004 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
10005 if (hevc->m_ins_flag) {
10006 struct PIC_s *pic;
10007 struct PIC_s *pic_display;
10008 int decoded_poc;
10009
10010 if (vdec->mvfrm)
10011 vdec->mvfrm->hw_decode_time =
10012 local_clock() - vdec->mvfrm->hw_decode_start;
10013#ifdef DETREFILL_ENABLE
10014 if (hevc->is_swap &&
10015 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
10016 if (hevc->detbuf_adr_virt && hevc->delrefill_check
10017 && READ_VREG(HEVC_SAO_DBG_MODE0))
10018 hevc->delrefill_check = 2;
10019 }
10020#endif
10021 hevc->empty_flag = 0;
10022pic_done:
10023 if (input_frame_based(hw_to_vdec(hevc)) &&
10024 frmbase_cont_bitlevel != 0 &&
10025 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
10026 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
10027 > frmbase_cont_bitlevel)) {
10028 /*handle the case: multi pictures in one packet*/
10029 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10030 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
10031 __func__,
10032 hevc->decode_idx, hevc->decode_size,
10033 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
10034 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10035 start_process_time(hevc);
10036 return IRQ_HANDLED;
10037 }
10038
10039 read_decode_info(hevc);
10040 get_picture_qos_info(hevc);
10041#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10042 hevc->start_parser_type = 0;
10043 hevc->switch_dvlayer_flag = 0;
10044#endif
10045 hevc->decoded_poc = hevc->curr_POC;
10046 hevc->decoding_pic = NULL;
10047 hevc->dec_result = DEC_RESULT_DONE;
10048#ifdef DETREFILL_ENABLE
10049 if (hevc->is_swap &&
10050 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
10051 if (hevc->delrefill_check != 2)
10052#endif
10053
10054 amhevc_stop();
10055
10056 reset_process_time(hevc);
10057
10058 if (hevc->vf_pre_count == 0 || hevc->ip_mode) {
10059 decoded_poc = hevc->curr_POC;
10060 pic = get_pic_by_POC(hevc, decoded_poc);
10061 if (pic && (pic->POC != INVALID_POC)) {
10062 /*PB skip control */
10063 if (pic->error_mark == 0
10064 && hevc->PB_skip_mode == 1) {
10065 /* start decoding after
10066 * first I
10067 */
10068 hevc->ignore_bufmgr_error |= 0x1;
10069 }
10070 if (hevc->ignore_bufmgr_error & 1) {
10071 if (hevc->PB_skip_count_after_decoding > 0) {
10072 hevc->PB_skip_count_after_decoding--;
10073 } else {
10074 /* start displaying */
10075 hevc->ignore_bufmgr_error |= 0x2;
10076 }
10077 }
10078 if (hevc->mmu_enable
10079 && ((hevc->double_write_mode & 0x10) == 0)) {
10080 if (!hevc->m_ins_flag) {
10081 hevc->used_4k_num =
10082 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
10083
10084 if ((!is_skip_decoding(hevc, pic)) &&
10085 (hevc->used_4k_num >= 0) &&
10086 (hevc->cur_pic->scatter_alloc
10087 == 1)) {
10088 hevc_print(hevc,
10089 H265_DEBUG_BUFMGR_MORE,
10090 "%s pic index %d scatter_alloc %d page_start %d\n",
10091 "decoder_mmu_box_free_idx_tail",
10092 hevc->cur_pic->index,
10093 hevc->cur_pic->scatter_alloc,
10094 hevc->used_4k_num);
10095 decoder_mmu_box_free_idx_tail(
10096 hevc->mmu_box,
10097 hevc->cur_pic->index,
10098 hevc->used_4k_num);
10099 hevc->cur_pic->scatter_alloc
10100 = 2;
10101 }
10102 hevc->used_4k_num = -1;
10103 }
10104 }
10105
10106 pic->output_mark = 1;
10107 pic->recon_mark = 1;
10108 if (vdec->mvfrm) {
10109 pic->frame_size =
10110 vdec->mvfrm->frame_size;
10111 pic->hw_decode_time =
10112 (u32)vdec->mvfrm->hw_decode_time;
10113 }
10114 }
10115 /*Detects the first frame whether has an over decode error*/
10116 if ((!vdec_dual(vdec)) &&
10117 hevc->empty_flag == 0) {
10118 hevc->over_decode =
10119 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
10120 if (hevc->over_decode)
10121 hevc_print(hevc, 0,
10122 "!!!Over decode %d\n", __LINE__);
10123 }
10124 check_pic_decoded_error(hevc,
10125 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
10126 if (hevc->cur_pic != NULL &&
10127 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
10128 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
10129 hevc->cur_pic->error_mark = 1;
10130#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10131force_output:
10132#endif
10133 pic_display = output_pic(hevc, 1);
10134 if (pic_display) {
10135 if ((pic_display->error_mark &&
10136 ((hevc->ignore_bufmgr_error &
10137 0x2) == 0))
10138 || (get_dbg_flag(hevc) &
10139 H265_DEBUG_DISPLAY_CUR_FRAME)
10140 || (get_dbg_flag(hevc) &
10141 H265_DEBUG_NO_DISPLAY)) {
10142 pic_display->output_ready = 0;
10143 if (get_dbg_flag(hevc) &
10144 H265_DEBUG_BUFMGR) {
10145 hevc_print(hevc, 0,
10146 "[BM] Display: POC %d, ",
10147 pic_display->POC);
10148 hevc_print_cont(hevc, 0,
10149 "decoding index %d ==> ",
10150 pic_display->
10151 decode_idx);
10152 hevc_print_cont(hevc, 0,
10153 "Debug or err,recycle it\n");
10154 }
10155 } else {
10156 if ((pic_display->
10157 slice_type != 2) && !pic_display->ip_mode) {
10158 pic_display->output_ready = 0;
10159 } else {
10160 prepare_display_buf
10161 (hevc,
10162 pic_display);
10163 hevc->first_pic_flag = 1;
10164 }
10165 }
10166 }
10167 }
10168
10169 vdec_schedule_work(&hevc->work);
10170 }
10171
10172 return IRQ_HANDLED;
10173#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10174 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
10175 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10176 if (hevc->m_ins_flag) {
10177 unsigned char next_parser_type =
10178 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
10179 read_decode_info(hevc);
10180
10181 if (vdec->slave &&
10182 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10183 /*cur is base, found enhance*/
10184 struct hevc_state_s *hevc_el =
10185 (struct hevc_state_s *)
10186 vdec->slave->private;
10187 hevc->switch_dvlayer_flag = 1;
10188 hevc->no_switch_dvlayer_count = 0;
10189 hevc_el->start_parser_type =
10190 next_parser_type;
10191 hevc_print(hevc, H265_DEBUG_DV,
10192 "switch (poc %d) to el\n",
10193 hevc->cur_pic ?
10194 hevc->cur_pic->POC :
10195 INVALID_POC);
10196 } else if (vdec->master &&
10197 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
10198 /*cur is enhance, found base*/
10199 struct hevc_state_s *hevc_ba =
10200 (struct hevc_state_s *)
10201 vdec->master->private;
10202 hevc->switch_dvlayer_flag = 1;
10203 hevc->no_switch_dvlayer_count = 0;
10204 hevc_ba->start_parser_type =
10205 next_parser_type;
10206 hevc_print(hevc, H265_DEBUG_DV,
10207 "switch (poc %d) to bl\n",
10208 hevc->cur_pic ?
10209 hevc->cur_pic->POC :
10210 INVALID_POC);
10211 } else {
10212 hevc->switch_dvlayer_flag = 0;
10213 hevc->start_parser_type =
10214 next_parser_type;
10215 hevc->no_switch_dvlayer_count++;
10216 hevc_print(hevc, H265_DEBUG_DV,
10217 "%s: no_switch_dvlayer_count = %d\n",
10218 vdec->master ? "el" : "bl",
10219 hevc->no_switch_dvlayer_count);
10220 if (vdec->slave &&
10221 dolby_el_flush_th != 0 &&
10222 hevc->no_switch_dvlayer_count >
10223 dolby_el_flush_th) {
10224 struct hevc_state_s *hevc_el =
10225 (struct hevc_state_s *)
10226 vdec->slave->private;
10227 struct PIC_s *el_pic;
10228 check_pic_decoded_error(hevc_el,
10229 hevc_el->pic_decoded_lcu_idx);
10230 el_pic = get_pic_by_POC(hevc_el,
10231 hevc_el->curr_POC);
10232 hevc_el->curr_POC = INVALID_POC;
10233 hevc_el->m_pocRandomAccess = MAX_INT;
10234 flush_output(hevc_el, el_pic);
10235 hevc_el->decoded_poc = INVALID_POC; /*
10236 already call flush_output*/
10237 hevc_el->decoding_pic = NULL;
10238 hevc->no_switch_dvlayer_count = 0;
10239 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
10240 hevc_print(hevc, 0,
10241 "no el anymore, flush_output el\n");
10242 }
10243 }
10244 hevc->decoded_poc = hevc->curr_POC;
10245 hevc->decoding_pic = NULL;
10246 hevc->dec_result = DEC_RESULT_DONE;
10247 amhevc_stop();
10248 reset_process_time(hevc);
10249 if (aux_data_is_avaible(hevc))
10250 dolby_get_meta(hevc);
10251 if(hevc->cur_pic && hevc->cur_pic->slice_type == 2 &&
10252 hevc->vf_pre_count == 0) {
10253 hevc_print(hevc, 0,
10254 "first slice_type %x no_switch_dvlayer_count %x\n",
10255 hevc->cur_pic->slice_type,
10256 hevc->no_switch_dvlayer_count);
10257 goto force_output;
10258 }
10259 vdec_schedule_work(&hevc->work);
10260 }
10261
10262 return IRQ_HANDLED;
10263#endif
10264 }
10265
10266#endif
10267
10268 if (dec_status == HEVC_SEI_DAT) {
10269 if (!hevc->m_ins_flag) {
10270 int payload_type =
10271 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
10272 int payload_size =
10273 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
10274 process_nal_sei(hevc,
10275 payload_type, payload_size);
10276 }
10277 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
10278 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
10279 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
10280 int parse_type = HEVC_DISCARD_NAL;
10281
10282 hevc->error_watchdog_count = 0;
10283 hevc->error_skip_nal_wt_cnt = 0;
10284#ifdef MULTI_INSTANCE_SUPPORT
10285 if (hevc->m_ins_flag)
10286 reset_process_time(hevc);
10287#endif
10288 if (slice_parse_begin > 0 &&
10289 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
10290 hevc_print(hevc, 0,
10291 "nal type %d, discard %d\n", naltype,
10292 slice_parse_begin);
10293 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
10294 slice_parse_begin--;
10295 }
10296 if (naltype == NAL_UNIT_EOS) {
10297 struct PIC_s *pic;
10298
10299 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
10300#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10301 if ((vdec_dual(vdec)) && aux_data_is_avaible(hevc)) {
10302 if (hevc->decoding_pic)
10303 dolby_get_meta(hevc);
10304 }
10305#endif
10306 /*Detects frame whether has an over decode error*/
10307 if ((!vdec_dual(vdec)) &&
10308 hevc->empty_flag == 0) {
10309 hevc->over_decode =
10310 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
10311 if (hevc->over_decode)
10312 hevc_print(hevc, 0,
10313 "!!!Over decode %d\n", __LINE__);
10314 }
10315 check_pic_decoded_error(hevc,
10316 hevc->pic_decoded_lcu_idx);
10317 pic = get_pic_by_POC(hevc, hevc->curr_POC);
10318 hevc->curr_POC = INVALID_POC;
10319 /* add to fix RAP_B_Bossen_1 */
10320 hevc->m_pocRandomAccess = MAX_INT;
10321 flush_output(hevc, pic);
10322 clear_poc_flag(hevc);
10323 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
10324 /* Interrupt Amrisc to excute */
10325 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10326#ifdef MULTI_INSTANCE_SUPPORT
10327 if (hevc->m_ins_flag) {
10328 hevc->decoded_poc = INVALID_POC; /*
10329 already call flush_output*/
10330 hevc->decoding_pic = NULL;
10331 hevc->dec_result = DEC_RESULT_DONE;
10332 amhevc_stop();
10333
10334 vdec_schedule_work(&hevc->work);
10335 }
10336#endif
10337 return IRQ_HANDLED;
10338 }
10339
10340 if (
10341#ifdef MULTI_INSTANCE_SUPPORT
10342 (!hevc->m_ins_flag) &&
10343#endif
10344 hevc->error_skip_nal_count > 0) {
10345 hevc_print(hevc, 0,
10346 "nal type %d, discard %d\n", naltype,
10347 hevc->error_skip_nal_count);
10348 hevc->error_skip_nal_count--;
10349 if (hevc->error_skip_nal_count == 0) {
10350 hevc_recover(hevc);
10351 hevc->error_flag = 0;
10352 if ((error_handle_policy & 0x2) == 0) {
10353 hevc->have_vps = 1;
10354 hevc->have_sps = 1;
10355 hevc->have_pps = 1;
10356 }
10357 return IRQ_HANDLED;
10358 }
10359 } else if (naltype == NAL_UNIT_VPS) {
10360 parse_type = HEVC_NAL_UNIT_VPS;
10361 hevc->have_vps = 1;
10362#ifdef ERROR_HANDLE_DEBUG
10363 if (dbg_nal_skip_flag & 1)
10364 parse_type = HEVC_DISCARD_NAL;
10365#endif
10366 } else if (hevc->have_vps) {
10367 if (naltype == NAL_UNIT_SPS) {
10368 parse_type = HEVC_NAL_UNIT_SPS;
10369 hevc->have_sps = 1;
10370#ifdef ERROR_HANDLE_DEBUG
10371 if (dbg_nal_skip_flag & 2)
10372 parse_type = HEVC_DISCARD_NAL;
10373#endif
10374 } else if (naltype == NAL_UNIT_PPS) {
10375 parse_type = HEVC_NAL_UNIT_PPS;
10376 hevc->have_pps = 1;
10377#ifdef ERROR_HANDLE_DEBUG
10378 if (dbg_nal_skip_flag & 4)
10379 parse_type = HEVC_DISCARD_NAL;
10380#endif
10381 } else if (hevc->have_sps && hevc->have_pps) {
10382 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10383
10384 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10385 (naltype ==
10386 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10387 || (naltype ==
10388 NAL_UNIT_CODED_SLICE_CRA)
10389 || (naltype ==
10390 NAL_UNIT_CODED_SLICE_BLA)
10391 || (naltype ==
10392 NAL_UNIT_CODED_SLICE_BLANT)
10393 || (naltype ==
10394 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10395 ) {
10396 if (slice_parse_begin > 0) {
10397 hevc_print(hevc, 0,
10398 "discard %d, for debugging\n",
10399 slice_parse_begin);
10400 slice_parse_begin--;
10401 } else {
10402 parse_type = seg;
10403 }
10404 hevc->have_valid_start_slice = 1;
10405 } else if (naltype <=
10406 NAL_UNIT_CODED_SLICE_CRA
10407 && (hevc->have_valid_start_slice
10408 || (hevc->PB_skip_mode != 3))) {
10409 if (slice_parse_begin > 0) {
10410 hevc_print(hevc, 0,
10411 "discard %d, dd\n",
10412 slice_parse_begin);
10413 slice_parse_begin--;
10414 } else
10415 parse_type = seg;
10416
10417 }
10418 }
10419 }
10420 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10421 && hevc->have_valid_start_slice &&
10422 hevc->error_flag == 0) {
10423 if ((get_dbg_flag(hevc) &
10424 H265_DEBUG_MAN_SEARCH_NAL) == 0
10425 /* && (!hevc->m_ins_flag)*/) {
10426 /* auot parser NAL; do not check
10427 *vps/sps/pps/idr
10428 */
10429 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10430 }
10431
10432 if ((get_dbg_flag(hevc) &
10433 H265_DEBUG_NO_EOS_SEARCH_DONE)
10434#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10435 || vdec->master
10436 || vdec->slave
10437#endif
10438 ) {
10439 WRITE_VREG(NAL_SEARCH_CTL,
10440 READ_VREG(NAL_SEARCH_CTL) |
10441 0x10000);
10442 }
10443 WRITE_VREG(NAL_SEARCH_CTL,
10444 READ_VREG(NAL_SEARCH_CTL)
10445 | ((parser_sei_enable & 0x7) << 17));
10446/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
10447 WRITE_VREG(NAL_SEARCH_CTL,
10448 READ_VREG(NAL_SEARCH_CTL) |
10449 ((parser_dolby_vision_enable & 0x1) << 20));
10450/*#endif*/
10451 config_decode_mode(hevc);
10452 }
10453
10454 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10455 hevc_print(hevc, 0,
10456 "naltype = %d parse_type %d\n %d %d %d %d\n",
10457 naltype, parse_type, hevc->have_vps,
10458 hevc->have_sps, hevc->have_pps,
10459 hevc->have_valid_start_slice);
10460 }
10461
10462 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10463 /* Interrupt Amrisc to excute */
10464 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10465#ifdef MULTI_INSTANCE_SUPPORT
10466 if (hevc->m_ins_flag)
10467 start_process_time(hevc);
10468#endif
10469 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10470#ifdef MULTI_INSTANCE_SUPPORT
10471 if (hevc->m_ins_flag) {
10472 reset_process_time(hevc);
10473 read_decode_info(hevc);
10474
10475 }
10476#endif
10477 if (hevc->start_decoding_time > 0) {
10478 u32 process_time = 1000*
10479 (jiffies - hevc->start_decoding_time)/HZ;
10480 if (process_time > max_decoding_time)
10481 max_decoding_time = process_time;
10482 }
10483
10484 hevc->error_watchdog_count = 0;
10485 if (hevc->pic_list_init_flag == 2) {
10486 hevc->pic_list_init_flag = 3;
10487 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10488 if (hevc->kpi_first_i_comming == 0) {
10489 hevc->kpi_first_i_comming = 1;
10490 pr_debug("[vdec_kpi][%s] First I frame coming.\n",
10491 __func__);
10492 }
10493 } else if (hevc->wait_buf == 0) {
10494 u32 vui_time_scale;
10495 u32 vui_num_units_in_tick;
10496 unsigned char reconfig_flag = 0;
10497
10498 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10499 get_rpm_param(&hevc->param);
10500 else {
10501
10502 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10503 int ii;
10504
10505 for (ii = 0; ii < 4; ii++) {
10506 hevc->param.l.data[i + ii] =
10507 hevc->rpm_ptr[i + 3
10508 - ii];
10509 }
10510 }
10511#ifdef SEND_LMEM_WITH_RPM
10512 check_head_error(hevc);
10513#endif
10514 }
10515 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10516 hevc_print(hevc, 0,
10517 "rpm_param: (%d)\n", hevc->slice_idx);
10518 hevc->slice_idx++;
10519 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10520 hevc_print_cont(hevc, 0,
10521 "%04x ", hevc->param.l.data[i]);
10522 if (((i + 1) & 0xf) == 0)
10523 hevc_print_cont(hevc, 0, "\n");
10524 }
10525
10526 hevc_print(hevc, 0,
10527 "vui_timing_info: %x, %x, %x, %x\n",
10528 hevc->param.p.vui_num_units_in_tick_hi,
10529 hevc->param.p.vui_num_units_in_tick_lo,
10530 hevc->param.p.vui_time_scale_hi,
10531 hevc->param.p.vui_time_scale_lo);
10532 }
10533
10534 if (hevc->is_used_v4l) {
10535 struct aml_vcodec_ctx *ctx =
10536 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10537 if (!v4l_res_change(hevc, &hevc->param)) {
10538 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10539 struct aml_vdec_ps_infos ps;
10540 int width = hevc->param.p.pic_width_in_luma_samples;
10541 int height = hevc->param.p.pic_height_in_luma_samples;
10542
10543 pr_debug("set ucode parse\n");
10544 vh265_get_ps_info(hevc, width, height, &ps);
10545 /*notice the v4l2 codec.*/
10546 vdec_v4l_set_ps_infos(ctx, &ps);
10547 hevc->v4l_params_parsed = true;
10548 hevc->dec_result = DEC_RESULT_AGAIN;
10549 amhevc_stop();
10550 restore_decode_state(hevc);
10551 reset_process_time(hevc);
10552 vdec_schedule_work(&hevc->work);
10553 return IRQ_HANDLED;
10554 }
10555 }else {
10556 pr_debug("resolution change\n");
10557 hevc->dec_result = DEC_RESULT_AGAIN;
10558 amhevc_stop();
10559 restore_decode_state(hevc);
10560 reset_process_time(hevc);
10561 vdec_schedule_work(&hevc->work);
10562 return IRQ_HANDLED;
10563
10564 }
10565 }
10566
10567 if (
10568#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10569 vdec->master == NULL &&
10570 vdec->slave == NULL &&
10571#endif
10572 aux_data_is_avaible(hevc)
10573 ) {
10574
10575 if (get_dbg_flag(hevc) &
10576 H265_DEBUG_BUFMGR_MORE)
10577 dump_aux_buf(hevc);
10578 }
10579
10580 vui_time_scale =
10581 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10582 hevc->param.p.vui_time_scale_lo;
10583 vui_num_units_in_tick =
10584 (u32)(hevc->param.
10585 p.vui_num_units_in_tick_hi << 16) |
10586 hevc->param.
10587 p.vui_num_units_in_tick_lo;
10588 if (hevc->bit_depth_luma !=
10589 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10590 reconfig_flag = 1;
10591 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10592 (hevc->param.p.bit_depth & 0xf) + 8);
10593 }
10594 if (hevc->bit_depth_chroma !=
10595 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10596 reconfig_flag = 1;
10597 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10598 ((hevc->param.p.bit_depth >> 4) &
10599 0xf) + 8);
10600 }
10601 hevc->bit_depth_luma =
10602 (hevc->param.p.bit_depth & 0xf) + 8;
10603 hevc->bit_depth_chroma =
10604 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10605 bit_depth_luma = hevc->bit_depth_luma;
10606 bit_depth_chroma = hevc->bit_depth_chroma;
10607#ifdef SUPPORT_10BIT
10608 if (hevc->bit_depth_luma == 8 &&
10609 hevc->bit_depth_chroma == 8 &&
10610 enable_mem_saving)
10611 hevc->mem_saving_mode = 1;
10612 else
10613 hevc->mem_saving_mode = 0;
10614#endif
10615 if (reconfig_flag &&
10616 (get_double_write_mode(hevc) & 0x10) == 0)
10617 init_decode_head_hw(hevc);
10618
10619 if ((vui_time_scale != 0)
10620 && (vui_num_units_in_tick != 0)) {
10621 hevc->frame_dur =
10622 div_u64(96000ULL *
10623 vui_num_units_in_tick,
10624 vui_time_scale);
10625 if (hevc->get_frame_dur != true)
10626 vdec_schedule_work(
10627 &hevc->notify_work);
10628
10629 hevc->get_frame_dur = true;
10630 //hevc->gvs->frame_dur = hevc->frame_dur;
10631 }
10632
10633 if (hevc->video_signal_type !=
10634 ((hevc->param.p.video_signal_type << 16)
10635 | hevc->param.p.color_description)) {
10636 u32 v = hevc->param.p.video_signal_type;
10637 u32 c = hevc->param.p.color_description;
10638#if 0
10639 if (v & 0x2000) {
10640 hevc_print(hevc, 0,
10641 "video_signal_type present:\n");
10642 hevc_print(hevc, 0, " %s %s\n",
10643 video_format_names[(v >> 10) & 7],
10644 ((v >> 9) & 1) ?
10645 "full_range" : "limited");
10646 if (v & 0x100) {
10647 hevc_print(hevc, 0,
10648 " color_description present:\n");
10649 hevc_print(hevc, 0,
10650 " color_primarie = %s\n",
10651 color_primaries_names
10652 [v & 0xff]);
10653 hevc_print(hevc, 0,
10654 " transfer_characteristic = %s\n",
10655 transfer_characteristics_names
10656 [(c >> 8) & 0xff]);
10657 hevc_print(hevc, 0,
10658 " matrix_coefficient = %s\n",
10659 matrix_coeffs_names[c & 0xff]);
10660 }
10661 }
10662#endif
10663 hevc->video_signal_type = (v << 16) | c;
10664 video_signal_type = hevc->video_signal_type;
10665 }
10666
10667 if (use_cma &&
10668 (hevc->param.p.slice_segment_address == 0)
10669 && (hevc->pic_list_init_flag == 0)) {
10670 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10671 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10672
10673 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10674 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10675 hevc->lcu_size = 1 << (log + 3 + log_s);
10676 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10677 if (performance_profile &&( (!is_oversize(hevc->pic_w, hevc->pic_h)) && IS_8K_SIZE(hevc->pic_w,hevc->pic_h)))
10678 hevc->performance_profile = 1;
10679 else
10680 hevc->performance_profile = 0;
10681 hevc_print(hevc, 0, "hevc->performance_profile %d\n", hevc->performance_profile);
10682 if (hevc->pic_w == 0 || hevc->pic_h == 0
10683 || hevc->lcu_size == 0
10684 || is_oversize(hevc->pic_w, hevc->pic_h)
10685 || hevc_skip_nal(hevc)) {
10686 /* skip search next start code */
10687 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10688 & (~0x2));
10689 if ((hevc->pic_h == 96) && (hevc->pic_w == 160))
10690 hevc->skip_nal_count++;
10691 hevc->skip_flag = 1;
10692 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10693 /* Interrupt Amrisc to excute */
10694 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10695#ifdef MULTI_INSTANCE_SUPPORT
10696 if (hevc->m_ins_flag)
10697 start_process_time(hevc);
10698#endif
10699 } else {
10700 hevc->sps_num_reorder_pics_0 =
10701 hevc->param.p.sps_num_reorder_pics_0;
10702 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
10703 !(vdec->slave || vdec->master) &&
10704 !disable_ip_mode) ? true : false;
10705 hevc->pic_list_init_flag = 1;
10706 if ((!IS_4K_SIZE(hevc->pic_w, hevc->pic_h)) &&
10707 ((hevc->param.p.profile_etc & 0xc) == 0x4)
10708 && (interlace_enable != 0)) {
10709 hevc->double_write_mode = 1;
10710 hevc->interlace_flag = 1;
10711 hevc->frame_ar = (hevc->pic_h * 0x100 / hevc->pic_w) * 2;
10712 hevc_print(hevc, 0,
10713 "interlace (%d, %d), profile_etc %x, ar 0x%x, dw %d\n",
10714 hevc->pic_w, hevc->pic_h, hevc->param.p.profile_etc, hevc->frame_ar,
10715 get_double_write_mode(hevc));
10716 /* When dw changed from 0x10 to 1, the mmu_box is NULL */
10717 if (!hevc->mmu_box && init_mmu_box(hevc) != 0) {
10718 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10719 hevc->fatal_error |=
10720 DECODER_FATAL_ERROR_NO_MEM;
10721 vdec_schedule_work(&hevc->work);
10722 hevc_print(hevc,
10723 0, "can not alloc mmu box, force exit\n");
10724 return IRQ_HANDLED;
10725 }
10726 }
10727#ifdef MULTI_INSTANCE_SUPPORT
10728 if (hevc->m_ins_flag) {
10729 vdec_schedule_work(&hevc->work);
10730 } else
10731#endif
10732 up(&h265_sema);
10733 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10734 }
10735 return IRQ_HANDLED;
10736 }
10737
10738}
10739 ret =
10740 hevc_slice_segment_header_process(hevc,
10741 &hevc->param, decode_pic_begin);
10742 if (ret < 0) {
10743#ifdef MULTI_INSTANCE_SUPPORT
10744 if (hevc->m_ins_flag) {
10745 hevc->wait_buf = 0;
10746 hevc->dec_result = DEC_RESULT_AGAIN;
10747 amhevc_stop();
10748 restore_decode_state(hevc);
10749 reset_process_time(hevc);
10750 vdec_schedule_work(&hevc->work);
10751 return IRQ_HANDLED;
10752 }
10753#else
10754 ;
10755#endif
10756 } else if (ret == 0) {
10757 if ((hevc->new_pic) && (hevc->cur_pic)) {
10758 hevc->cur_pic->stream_offset =
10759 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10760 hevc->cur_pic->stream_frame_size =
10761 hevc->cur_pic->stream_offset - hevc->last_dec_pic_offset;
10762 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10763 "read stream_offset = 0x%x, frame_size = 0x%x\n",
10764 hevc->cur_pic->stream_offset, hevc->cur_pic->stream_frame_size);
10765 hevc->last_dec_pic_offset = hevc->cur_pic->stream_offset;
10766
10767
10768 hevc->cur_pic->aspect_ratio_idc =
10769 hevc->param.p.aspect_ratio_idc;
10770 hevc->cur_pic->sar_width =
10771 hevc->param.p.sar_width;
10772 hevc->cur_pic->sar_height =
10773 hevc->param.p.sar_height;
10774 }
10775
10776 WRITE_VREG(HEVC_DEC_STATUS_REG,
10777 HEVC_CODED_SLICE_SEGMENT_DAT);
10778 /* Interrupt Amrisc to excute */
10779 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10780
10781 hevc->start_decoding_time = jiffies;
10782#ifdef MULTI_INSTANCE_SUPPORT
10783 if (hevc->m_ins_flag)
10784 start_process_time(hevc);
10785#endif
10786#if 1
10787 /*to do..., copy aux data to hevc->cur_pic*/
10788#endif
10789#ifdef MULTI_INSTANCE_SUPPORT
10790 } else if (hevc->m_ins_flag) {
10791 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10792 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10793 __func__, ret);
10794 hevc->decoded_poc = INVALID_POC;
10795 hevc->decoding_pic = NULL;
10796 hevc->dec_result = DEC_RESULT_DONE;
10797 amhevc_stop();
10798 reset_process_time(hevc);
10799 vdec_schedule_work(&hevc->work);
10800#endif
10801 } else {
10802 /* skip, search next start code */
10803 hevc->gvs->drop_frame_count++;
10804 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10805 hevc->skip_flag = 1;
10806 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10807 /* Interrupt Amrisc to excute */
10808 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10809 }
10810
10811 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10812 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10813#ifdef MULTI_INSTANCE_SUPPORT
10814 if (!hevc->m_ins_flag)
10815 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10816 H265_DEBUG_DIS_SYS_ERROR_PROC);
10817#endif
10818 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10819 }
10820 return IRQ_HANDLED;
10821}
10822
10823static void wait_hevc_search_done(struct hevc_state_s *hevc)
10824{
10825 int count = 0;
10826 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10827 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10828 msleep(20);
10829 count++;
10830 if (count > 100) {
10831 hevc_print(hevc, 0, "%s timeout\n", __func__);
10832 break;
10833 }
10834 }
10835}
10836static irqreturn_t vh265_isr(int irq, void *data)
10837{
10838 int i, temp;
10839 unsigned int dec_status;
10840 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10841 u32 debug_tag;
10842 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10843
10844 if (hevc->init_flag == 0)
10845 return IRQ_HANDLED;
10846 hevc->dec_status = dec_status;
10847 if (is_log_enable(hevc))
10848 add_log(hevc,
10849 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10850 dec_status, READ_HREG(HEVC_DECODE_INFO),
10851 READ_VREG(HEVC_MPRED_CURR_LCU),
10852 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10853 READ_VREG(HEVC_SHIFT_STATUS));
10854
10855 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10856 hevc_print(hevc, 0,
10857 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10858 dec_status, READ_HREG(HEVC_DECODE_INFO),
10859 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10860 READ_VREG(HEVC_SHIFT_STATUS));
10861
10862 debug_tag = READ_HREG(DEBUG_REG1);
10863 if (debug_tag & 0x10000) {
10864 hevc_print(hevc, 0,
10865 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10866
10867 if (hevc->mmu_enable)
10868 temp = 0x500;
10869 else
10870 temp = 0x400;
10871 for (i = 0; i < temp; i += 4) {
10872 int ii;
10873 if ((i & 0xf) == 0)
10874 hevc_print_cont(hevc, 0, "%03x: ", i);
10875 for (ii = 0; ii < 4; ii++) {
10876 hevc_print_cont(hevc, 0, "%04x ",
10877 hevc->lmem_ptr[i + 3 - ii]);
10878 }
10879 if (((i + ii) & 0xf) == 0)
10880 hevc_print_cont(hevc, 0, "\n");
10881 }
10882
10883 if (((udebug_pause_pos & 0xffff)
10884 == (debug_tag & 0xffff)) &&
10885 (udebug_pause_decode_idx == 0 ||
10886 udebug_pause_decode_idx == hevc->decode_idx) &&
10887 (udebug_pause_val == 0 ||
10888 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10889 udebug_pause_pos &= 0xffff;
10890 hevc->ucode_pause_pos = udebug_pause_pos;
10891 }
10892 else if (debug_tag & 0x20000)
10893 hevc->ucode_pause_pos = 0xffffffff;
10894 if (hevc->ucode_pause_pos)
10895 reset_process_time(hevc);
10896 else
10897 WRITE_HREG(DEBUG_REG1, 0);
10898 } else if (debug_tag != 0) {
10899 hevc_print(hevc, 0,
10900 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10901 READ_HREG(DEBUG_REG2),
10902 READ_VREG(HEVC_STREAM_LEVEL),
10903 READ_VREG(HEVC_STREAM_WR_PTR),
10904 READ_VREG(HEVC_STREAM_RD_PTR));
10905 if (((udebug_pause_pos & 0xffff)
10906 == (debug_tag & 0xffff)) &&
10907 (udebug_pause_decode_idx == 0 ||
10908 udebug_pause_decode_idx == hevc->decode_idx) &&
10909 (udebug_pause_val == 0 ||
10910 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10911 udebug_pause_pos &= 0xffff;
10912 hevc->ucode_pause_pos = udebug_pause_pos;
10913 }
10914 if (hevc->ucode_pause_pos)
10915 reset_process_time(hevc);
10916 else
10917 WRITE_HREG(DEBUG_REG1, 0);
10918 return IRQ_HANDLED;
10919 }
10920
10921
10922 if (hevc->pic_list_init_flag == 1)
10923 return IRQ_HANDLED;
10924
10925 if (!hevc->m_ins_flag) {
10926 if (dec_status == HEVC_OVER_DECODE) {
10927 hevc->over_decode = 1;
10928 hevc_print(hevc, 0,
10929 "isr: over decode\n"),
10930 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10931 return IRQ_HANDLED;
10932 }
10933 }
10934
10935 return IRQ_WAKE_THREAD;
10936
10937}
10938
10939static void vh265_set_clk(struct work_struct *work)
10940{
10941 struct hevc_state_s *hevc = container_of(work,
10942 struct hevc_state_s, set_clk_work);
10943
10944 int fps = 96000 / hevc->frame_dur;
10945
10946 if (hevc_source_changed(VFORMAT_HEVC,
10947 hevc->frame_width, hevc->frame_height, fps) > 0)
10948 hevc->saved_resolution = hevc->frame_width *
10949 hevc->frame_height * fps;
10950}
10951
10952static void vh265_check_timer_func(unsigned long arg)
10953{
10954 struct hevc_state_s *hevc = (struct hevc_state_s *)arg;
10955 struct timer_list *timer = &hevc->timer;
10956 unsigned char empty_flag;
10957 unsigned int buf_level;
10958
10959 enum receviver_start_e state = RECEIVER_INACTIVE;
10960
10961 if (hevc->init_flag == 0) {
10962 if (hevc->stat & STAT_TIMER_ARM) {
10963 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10964 }
10965 return;
10966 }
10967#ifdef MULTI_INSTANCE_SUPPORT
10968 if (hevc->m_ins_flag &&
10969 (get_dbg_flag(hevc) &
10970 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10971 hw_to_vdec(hevc)->next_status ==
10972 VDEC_STATUS_DISCONNECTED &&
10973 !hevc->is_used_v4l) {
10974 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10975 vdec_schedule_work(&hevc->work);
10976 hevc_print(hevc,
10977 0, "vdec requested to be disconnected\n");
10978 return;
10979 }
10980
10981 if (hevc->m_ins_flag) {
10982 if (((get_dbg_flag(hevc) &
10983 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10984 (decode_timeout_val > 0) &&
10985 (hevc->start_process_time > 0) &&
10986 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10987 > decode_timeout_val)
10988 ) {
10989 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10990 int current_lcu_idx =
10991 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10992 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10993 if (hevc->last_lcu_idx == current_lcu_idx) {
10994 if (hevc->decode_timeout_count > 0)
10995 hevc->decode_timeout_count--;
10996 if (hevc->decode_timeout_count == 0)
10997 timeout_process(hevc);
10998 } else
10999 restart_process_time(hevc);
11000 hevc->last_lcu_idx = current_lcu_idx;
11001 } else {
11002 hevc->pic_decoded_lcu_idx = current_lcu_idx;
11003 timeout_process(hevc);
11004 }
11005 }
11006 } else {
11007#endif
11008 if (hevc->m_ins_flag == 0 &&
11009 vf_get_receiver(hevc->provider_name)) {
11010 state =
11011 vf_notify_receiver(hevc->provider_name,
11012 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11013 NULL);
11014 if ((state == RECEIVER_STATE_NULL)
11015 || (state == RECEIVER_STATE_NONE))
11016 state = RECEIVER_INACTIVE;
11017 } else
11018 state = RECEIVER_INACTIVE;
11019
11020 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
11021 /* error watchdog */
11022 if (hevc->m_ins_flag == 0 &&
11023 (empty_flag == 0)
11024 && (hevc->pic_list_init_flag == 0
11025 || hevc->pic_list_init_flag
11026 == 3)) {
11027 /* decoder has input */
11028 if ((get_dbg_flag(hevc) &
11029 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
11030
11031 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
11032 /* receiver has no buffer to recycle */
11033 if ((state == RECEIVER_INACTIVE) &&
11034 (kfifo_is_empty(&hevc->display_q) &&
11035 buf_level > 0x200)
11036 ) {
11037 if (hevc->error_flag == 0) {
11038 hevc->error_watchdog_count++;
11039 if (hevc->error_watchdog_count ==
11040 error_handle_threshold) {
11041 hevc_print(hevc, 0,
11042 "H265 dec err local reset.\n");
11043 hevc->error_flag = 1;
11044 hevc->error_watchdog_count = 0;
11045 hevc->error_skip_nal_wt_cnt = 0;
11046 hevc->
11047 error_system_watchdog_count++;
11048 WRITE_VREG
11049 (HEVC_ASSIST_MBOX0_IRQ_REG,
11050 0x1);
11051 }
11052 } else if (hevc->error_flag == 2) {
11053 int th =
11054 error_handle_nal_skip_threshold;
11055 hevc->error_skip_nal_wt_cnt++;
11056 if (hevc->error_skip_nal_wt_cnt
11057 == th) {
11058 hevc->error_flag = 3;
11059 hevc->error_watchdog_count = 0;
11060 hevc->
11061 error_skip_nal_wt_cnt = 0;
11062 WRITE_VREG
11063 (HEVC_ASSIST_MBOX0_IRQ_REG,
11064 0x1);
11065 }
11066 }
11067 }
11068 }
11069
11070 if ((get_dbg_flag(hevc)
11071 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
11072 /* receiver has no buffer to recycle */
11073 if ((state == RECEIVER_INACTIVE) &&
11074 (kfifo_is_empty(&hevc->display_q))
11075 ) { /* no buffer to recycle */
11076 if ((get_dbg_flag(hevc) &
11077 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
11078 0)
11079 hevc->error_system_watchdog_count++;
11080 if (hevc->error_system_watchdog_count ==
11081 error_handle_system_threshold) {
11082 /* and it lasts for a while */
11083 hevc_print(hevc, 0,
11084 "H265 dec fatal error watchdog.\n");
11085 hevc->
11086 error_system_watchdog_count = 0;
11087 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
11088 }
11089 }
11090 } else {
11091 hevc->error_watchdog_count = 0;
11092 hevc->error_system_watchdog_count = 0;
11093 }
11094#ifdef MULTI_INSTANCE_SUPPORT
11095 }
11096#endif
11097 if ((hevc->ucode_pause_pos != 0) &&
11098 (hevc->ucode_pause_pos != 0xffffffff) &&
11099 udebug_pause_pos != hevc->ucode_pause_pos) {
11100 hevc->ucode_pause_pos = 0;
11101 WRITE_HREG(DEBUG_REG1, 0);
11102 }
11103
11104 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
11105 dump_pic_list(hevc);
11106 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
11107 }
11108 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
11109 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11110 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
11111 }
11112#ifdef TEST_NO_BUF
11113 if (hevc->wait_buf)
11114 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11115#endif
11116 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
11117 hevc->error_skip_nal_count = error_skip_nal_count;
11118 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11119
11120 debug &= ~H265_DEBUG_HW_RESET;
11121 }
11122
11123#ifdef ERROR_HANDLE_DEBUG
11124 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
11125 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
11126 dbg_nal_skip_count &= ~0x10000;
11127 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11128 }
11129#endif
11130
11131 if (radr != 0) {
11132 if (rval != 0) {
11133 WRITE_VREG(radr, rval);
11134 hevc_print(hevc, 0,
11135 "WRITE_VREG(%x,%x)\n", radr, rval);
11136 } else
11137 hevc_print(hevc, 0,
11138 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
11139 rval = 0;
11140 radr = 0;
11141 }
11142 if (dbg_cmd != 0) {
11143 if (dbg_cmd == 1) {
11144 u32 disp_laddr;
11145
11146 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
11147 get_double_write_mode(hevc) == 0) {
11148 disp_laddr =
11149 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
11150 } else {
11151 struct canvas_s cur_canvas;
11152
11153 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
11154 & 0xff), &cur_canvas);
11155 disp_laddr = cur_canvas.addr;
11156 }
11157 hevc_print(hevc, 0,
11158 "current displayed buffer address %x\r\n",
11159 disp_laddr);
11160 }
11161 dbg_cmd = 0;
11162 }
11163 /*don't changed at start.*/
11164 if (hevc->m_ins_flag == 0 &&
11165 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
11166 hevc->frame_dur > 0 && hevc->saved_resolution !=
11167 hevc->frame_width * hevc->frame_height *
11168 (96000 / hevc->frame_dur))
11169 vdec_schedule_work(&hevc->set_clk_work);
11170
11171 mod_timer(timer, jiffies + PUT_INTERVAL);
11172}
11173
11174static int h265_task_handle(void *data)
11175{
11176 int ret = 0;
11177 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
11178
11179 set_user_nice(current, -10);
11180 while (1) {
11181 if (use_cma == 0) {
11182 hevc_print(hevc, 0,
11183 "ERROR: use_cma can not be changed dynamically\n");
11184 }
11185 ret = down_interruptible(&h265_sema);
11186 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
11187 init_pic_list(hevc);
11188 init_pic_list_hw(hevc);
11189 init_buf_spec(hevc);
11190 hevc->pic_list_init_flag = 2;
11191 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
11192
11193 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11194
11195 }
11196
11197 if (hevc->uninit_list) {
11198 /*USE_BUF_BLOCK*/
11199 uninit_pic_list(hevc);
11200 hevc_print(hevc, 0, "uninit list\n");
11201 hevc->uninit_list = 0;
11202#ifdef USE_UNINIT_SEMA
11203 if (use_cma) {
11204 up(&hevc->h265_uninit_done_sema);
11205 while (!kthread_should_stop())
11206 msleep(1);
11207 break;
11208 }
11209#endif
11210 }
11211 }
11212
11213 return 0;
11214}
11215
11216void vh265_free_cmabuf(void)
11217{
11218 struct hevc_state_s *hevc = gHevc;
11219
11220 mutex_lock(&vh265_mutex);
11221
11222 if (hevc->init_flag) {
11223 mutex_unlock(&vh265_mutex);
11224 return;
11225 }
11226
11227 mutex_unlock(&vh265_mutex);
11228}
11229
11230#ifdef MULTI_INSTANCE_SUPPORT
11231int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
11232#else
11233int vh265_dec_status(struct vdec_info *vstatus)
11234#endif
11235{
11236#ifdef MULTI_INSTANCE_SUPPORT
11237 struct hevc_state_s *hevc =
11238 (struct hevc_state_s *)vdec->private;
11239#else
11240 struct hevc_state_s *hevc = gHevc;
11241#endif
11242 if (!hevc)
11243 return -1;
11244
11245 vstatus->frame_width = hevc->frame_width;
11246 /* for hevc interlace for disp height x2 */
11247 vstatus->frame_height =
11248 (hevc->frame_height << hevc->interlace_flag);
11249 if (hevc->frame_dur != 0)
11250 vstatus->frame_rate = 96000 / hevc->frame_dur;
11251 else
11252 vstatus->frame_rate = -1;
11253 vstatus->error_count = hevc->gvs->error_frame_count;
11254 vstatus->status = hevc->stat | hevc->fatal_error;
11255 vstatus->bit_rate = hevc->gvs->bit_rate;
11256 vstatus->frame_dur = hevc->frame_dur;
11257 if (hevc->gvs) {
11258 vstatus->bit_rate = hevc->gvs->bit_rate;
11259 vstatus->frame_data = hevc->gvs->frame_data;
11260 vstatus->total_data = hevc->gvs->total_data;
11261 vstatus->frame_count = hevc->gvs->frame_count;
11262 vstatus->error_frame_count = hevc->gvs->error_frame_count;
11263 vstatus->drop_frame_count = hevc->gvs->drop_frame_count;
11264 vstatus->samp_cnt = hevc->gvs->samp_cnt;
11265 vstatus->offset = hevc->gvs->offset;
11266 }
11267
11268 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
11269 "%s", DRIVER_NAME);
11270 vstatus->ratio_control = hevc->ratio_control;
11271 return 0;
11272}
11273
11274int vh265_set_isreset(struct vdec_s *vdec, int isreset)
11275{
11276 is_reset = isreset;
11277 return 0;
11278}
11279
11280static int vh265_vdec_info_init(struct hevc_state_s *hevc)
11281{
11282 hevc->gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
11283 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11284 if (NULL == hevc->gvs) {
11285 pr_info("the struct of vdec status malloc failed.\n");
11286 return -ENOMEM;
11287 }
11288 vdec_set_vframe_comm(hw_to_vdec(hevc), DRIVER_NAME);
11289 return 0;
11290}
11291
11292#if 0
11293static void H265_DECODE_INIT(void)
11294{
11295 /* enable hevc clocks */
11296 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
11297 /* *************************************************************** */
11298 /* Power ON HEVC */
11299 /* *************************************************************** */
11300 /* Powerup HEVC */
11301 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
11302 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
11303 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
11304 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
11305 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
11306 /* remove isolations */
11307 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
11308 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
11309
11310}
11311#endif
11312
11313int vh265_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
11314{
11315 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11316 hevc_print(hevc, 0, "[%s %d] trickmode:%lu\n", __func__, __LINE__, trickmode);
11317
11318 if (trickmode == TRICKMODE_I) {
11319 trickmode_i = 1;
11320 i_only_flag = 0x1;
11321 } else if (trickmode == TRICKMODE_NONE) {
11322 trickmode_i = 0;
11323 i_only_flag = 0x0;
11324 } else if (trickmode == 0x02) {
11325 trickmode_i = 0;
11326 i_only_flag = 0x02;
11327 } else if (trickmode == 0x03) {
11328 trickmode_i = 1;
11329 i_only_flag = 0x03;
11330 } else if (trickmode == 0x07) {
11331 trickmode_i = 1;
11332 i_only_flag = 0x07;
11333 }
11334 //hevc_print(hevc, 0, "i_only_flag: %d trickmode_i:%d\n", i_only_flag, trickmode_i);
11335
11336 return 0;
11337}
11338
11339static void config_decode_mode(struct hevc_state_s *hevc)
11340{
11341#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11342 struct vdec_s *vdec = hw_to_vdec(hevc);
11343#endif
11344 unsigned decode_mode;
11345#ifdef HEVC_8K_LFTOFFSET_FIX
11346 if (hevc->performance_profile)
11347 WRITE_VREG(NAL_SEARCH_CTL,
11348 READ_VREG(NAL_SEARCH_CTL) | (1 << 21));
11349#endif
11350 if (!hevc->m_ins_flag)
11351 decode_mode = DECODE_MODE_SINGLE;
11352 else if (vdec_frame_based(hw_to_vdec(hevc)))
11353 decode_mode =
11354 DECODE_MODE_MULTI_FRAMEBASE;
11355#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11356 else if (vdec->slave) {
11357 if (force_bypass_dvenl & 0x80000000)
11358 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
11359 else
11360 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
11361 if (dolby_meta_with_el && hevc->bypass_dvenl) {
11362 hevc->bypass_dvenl = 0;
11363 hevc_print(hevc, 0,
11364 "NOT support bypass_dvenl when meta_with_el\n");
11365 }
11366 if (hevc->bypass_dvenl)
11367 decode_mode =
11368 (hevc->start_parser_type << 8)
11369 | DECODE_MODE_MULTI_STREAMBASE;
11370 else
11371 decode_mode =
11372 (hevc->start_parser_type << 8)
11373 | DECODE_MODE_MULTI_DVBAL;
11374 } else if (vdec->master)
11375 decode_mode =
11376 (hevc->start_parser_type << 8)
11377 | DECODE_MODE_MULTI_DVENL;
11378#endif
11379 else
11380 decode_mode =
11381 DECODE_MODE_MULTI_STREAMBASE;
11382
11383 if (hevc->m_ins_flag)
11384 decode_mode |=
11385 (hevc->start_decoding_flag << 16);
11386 /* set MBX0 interrupt flag */
11387 decode_mode |= (0x80 << 24);
11388 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
11389 WRITE_VREG(HEVC_DECODE_MODE2,
11390 hevc->rps_set_id);
11391}
11392
11393static void vh265_prot_init(struct hevc_state_s *hevc)
11394{
11395#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11396 struct vdec_s *vdec = hw_to_vdec(hevc);
11397#endif
11398 /* H265_DECODE_INIT(); */
11399
11400 hevc_config_work_space_hw(hevc);
11401
11402 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
11403
11404 WRITE_VREG(HEVC_WAIT_FLAG, 1);
11405
11406 /* WRITE_VREG(P_HEVC_MPSR, 1); */
11407
11408 /* clear mailbox interrupt */
11409 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
11410
11411 /* enable mailbox interrupt */
11412 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
11413
11414 /* disable PSCALE for hardware sharing */
11415 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
11416
11417 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
11418
11419 if ((get_dbg_flag(hevc) &
11420 (H265_DEBUG_MAN_SKIP_NAL |
11421 H265_DEBUG_MAN_SEARCH_NAL))
11422 /*||hevc->m_ins_flag*/
11423 ) {
11424 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
11425 } else {
11426 /* check vps/sps/pps/i-slice in ucode */
11427 unsigned ctl_val = 0x8;
11428 if (hevc->PB_skip_mode == 0)
11429 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
11430 else if (hevc->PB_skip_mode == 3)
11431 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
11432 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
11433 }
11434 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
11435#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11436 || vdec->master
11437 || vdec->slave
11438#endif
11439 )
11440 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
11441
11442 WRITE_VREG(NAL_SEARCH_CTL,
11443 READ_VREG(NAL_SEARCH_CTL)
11444 | ((parser_sei_enable & 0x7) << 17));
11445/*#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION*/
11446 WRITE_VREG(NAL_SEARCH_CTL,
11447 READ_VREG(NAL_SEARCH_CTL) |
11448 ((parser_dolby_vision_enable & 0x1) << 20));
11449/*#endif*/
11450 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
11451
11452 config_decode_mode(hevc);
11453 config_aux_buf(hevc);
11454#ifdef SWAP_HEVC_UCODE
11455 if (!tee_enabled() && hevc->is_swap &&
11456 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11457 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11458 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11459 }
11460#endif
11461#ifdef DETREFILL_ENABLE
11462 if (hevc->is_swap &&
11463 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11464 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11465 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11466 }
11467#endif
11468}
11469
11470static int vh265_local_init(struct hevc_state_s *hevc)
11471{
11472 int i;
11473 int ret = -1;
11474
11475#ifdef DEBUG_PTS
11476 hevc->pts_missed = 0;
11477 hevc->pts_hit = 0;
11478#endif
11479 hevc->pts_lookup_margin = 0;
11480 hevc->pts_continue_miss = 0;
11481 hevc->min_pic_size = 0;
11482
11483 hevc->saved_resolution = 0;
11484 hevc->get_frame_dur = false;
11485 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11486 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11487 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11488 pr_info("over size : %u x %u.\n",
11489 hevc->frame_width, hevc->frame_height);
11490 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11491 return ret;
11492 }
11493
11494 if (hevc->max_pic_w && hevc->max_pic_h) {
11495 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11496 ((hevc->max_pic_w * hevc->max_pic_h) >
11497 1920 * 1088) ? true : false;
11498 } else {
11499 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11500 ((hevc->frame_width * hevc->frame_height) >
11501 1920 * 1088) ? true : false;
11502 }
11503
11504 hevc->frame_dur =
11505 (hevc->vh265_amstream_dec_info.rate ==
11506 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11507 //hevc->gvs->frame_dur = hevc->frame_dur;
11508 if (hevc->frame_width && hevc->frame_height)
11509 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11510
11511 if (i_only_flag)
11512 hevc->i_only = i_only_flag & 0xff;
11513 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11514 & 0x08)
11515 hevc->i_only = 0x7;
11516 else
11517 hevc->i_only = 0x0;
11518 hevc->error_watchdog_count = 0;
11519 hevc->sei_present_flag = 0;
11520 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11521 & 0x40) >> 6;
11522 hevc_print(hevc, 0,
11523 "h265:pts_unstable=%d\n", pts_unstable);
11524/*
11525 *TODO:FOR VERSION
11526 */
11527 hevc_print(hevc, 0,
11528 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11529 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11530
11531 if (hevc->frame_dur == 0)
11532 hevc->frame_dur = 96000 / 24;
11533
11534 INIT_KFIFO(hevc->display_q);
11535 INIT_KFIFO(hevc->newframe_q);
11536 INIT_KFIFO(hevc->pending_q);
11537
11538 for (i = 0; i < VF_POOL_SIZE; i++) {
11539 const struct vframe_s *vf = &hevc->vfpool[i];
11540
11541 hevc->vfpool[i].index = -1;
11542 kfifo_put(&hevc->newframe_q, vf);
11543 }
11544
11545
11546 ret = hevc_local_init(hevc);
11547
11548 return ret;
11549}
11550#ifdef MULTI_INSTANCE_SUPPORT
11551static s32 vh265_init(struct vdec_s *vdec)
11552{
11553 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11554#else
11555static s32 vh265_init(struct hevc_state_s *hevc)
11556{
11557
11558#endif
11559 int ret, size = -1;
11560 int fw_size = 0x1000 * 16;
11561 struct firmware_s *fw = NULL;
11562
11563 init_timer(&hevc->timer);
11564
11565 hevc->stat |= STAT_TIMER_INIT;
11566
11567 if (hevc->m_ins_flag) {
11568#ifdef USE_UNINIT_SEMA
11569 sema_init(&hevc->h265_uninit_done_sema, 0);
11570#endif
11571 INIT_WORK(&hevc->work, vh265_work);
11572 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11573 }
11574
11575 if (vh265_local_init(hevc) < 0)
11576 return -EBUSY;
11577
11578 mutex_init(&hevc->chunks_mutex);
11579 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11580 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11581
11582 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11583 if (IS_ERR_OR_NULL(fw))
11584 return -ENOMEM;
11585
11586 if (hevc->mmu_enable)
11587 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11588 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11589 else {
11590 if (!hevc->is_4k) {
11591 /* if an older version of the fw was loaded, */
11592 /* needs try to load noswap fw because the */
11593 /* old fw package dose not contain the swap fw.*/
11594 size = get_firmware_data(
11595 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11596 if (size < 0)
11597 size = get_firmware_data(
11598 VIDEO_DEC_HEVC_MMU, fw->data);
11599 else if (size)
11600 hevc->is_swap = true;
11601 } else
11602 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11603 fw->data);
11604 }
11605 else
11606 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11607
11608 if (size < 0) {
11609 pr_err("get firmware fail.\n");
11610 vfree(fw);
11611 return -1;
11612 }
11613
11614 fw->len = size;
11615
11616#ifdef SWAP_HEVC_UCODE
11617 if (!tee_enabled() && hevc->is_swap &&
11618 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11619 if (hevc->mmu_enable) {
11620 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11621 hevc->mc_cpu_addr =
11622 dma_alloc_coherent(amports_get_dma_device(),
11623 hevc->swap_size,
11624 &hevc->mc_dma_handle, GFP_KERNEL);
11625 if (!hevc->mc_cpu_addr) {
11626 amhevc_disable();
11627 pr_info("vh265 mmu swap ucode loaded fail.\n");
11628 return -ENOMEM;
11629 }
11630
11631 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11632 hevc->swap_size);
11633
11634 hevc_print(hevc, 0,
11635 "vh265 mmu ucode swap loaded %x\n",
11636 hevc->mc_dma_handle);
11637 }
11638 }
11639#endif
11640
11641#ifdef MULTI_INSTANCE_SUPPORT
11642 if (hevc->m_ins_flag) {
11643 hevc->timer.data = (ulong) hevc;
11644 hevc->timer.function = vh265_check_timer_func;
11645 hevc->timer.expires = jiffies + PUT_INTERVAL;
11646
11647 hevc->fw = fw;
11648 hevc->init_flag = 1;
11649
11650 return 0;
11651 }
11652#endif
11653 amhevc_enable();
11654
11655 if (hevc->mmu_enable)
11656 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11657 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11658 else {
11659 if (!hevc->is_4k) {
11660 /* if an older version of the fw was loaded, */
11661 /* needs try to load noswap fw because the */
11662 /* old fw package dose not contain the swap fw. */
11663 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11664 "hevc_mmu_swap", fw->data);
11665 if (ret < 0)
11666 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11667 "h265_mmu", fw->data);
11668 else
11669 hevc->is_swap = true;
11670 } else
11671 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11672 "h265_mmu", fw->data);
11673 }
11674 else
11675 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11676
11677 if (ret < 0) {
11678 amhevc_disable();
11679 vfree(fw);
11680 pr_err("H265: the %s fw loading failed, err: %x\n",
11681 tee_enabled() ? "TEE" : "local", ret);
11682 return -EBUSY;
11683 }
11684
11685 vfree(fw);
11686
11687 hevc->stat |= STAT_MC_LOAD;
11688
11689#ifdef DETREFILL_ENABLE
11690 if (hevc->is_swap &&
11691 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11692 init_detrefill_buf(hevc);
11693#endif
11694 /* enable AMRISC side protocol */
11695 vh265_prot_init(hevc);
11696
11697 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11698 vh265_isr_thread_fn,
11699 IRQF_ONESHOT,/*run thread on this irq disabled*/
11700 "vh265-irq", (void *)hevc)) {
11701 hevc_print(hevc, 0, "vh265 irq register error.\n");
11702 amhevc_disable();
11703 return -ENOENT;
11704 }
11705
11706 hevc->stat |= STAT_ISR_REG;
11707 hevc->provider_name = PROVIDER_NAME;
11708
11709#ifdef MULTI_INSTANCE_SUPPORT
11710 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11711 &vh265_vf_provider, vdec);
11712 vf_reg_provider(&vh265_vf_prov);
11713 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11714 NULL);
11715 if (hevc->frame_dur != 0) {
11716 if (!is_reset) {
11717 vf_notify_receiver(hevc->provider_name,
11718 VFRAME_EVENT_PROVIDER_FR_HINT,
11719 (void *)
11720 ((unsigned long)hevc->frame_dur));
11721 fr_hint_status = VDEC_HINTED;
11722 }
11723 } else
11724 fr_hint_status = VDEC_NEED_HINT;
11725#else
11726 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11727 hevc);
11728 vf_reg_provider(&vh265_vf_prov);
11729 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11730 if (hevc->frame_dur != 0) {
11731 vf_notify_receiver(PROVIDER_NAME,
11732 VFRAME_EVENT_PROVIDER_FR_HINT,
11733 (void *)
11734 ((unsigned long)hevc->frame_dur));
11735 fr_hint_status = VDEC_HINTED;
11736 } else
11737 fr_hint_status = VDEC_NEED_HINT;
11738#endif
11739 hevc->stat |= STAT_VF_HOOK;
11740
11741 hevc->timer.data = (ulong) hevc;
11742 hevc->timer.function = vh265_check_timer_func;
11743 hevc->timer.expires = jiffies + PUT_INTERVAL;
11744
11745 add_timer(&hevc->timer);
11746
11747 hevc->stat |= STAT_TIMER_ARM;
11748
11749 if (use_cma) {
11750#ifdef USE_UNINIT_SEMA
11751 sema_init(&hevc->h265_uninit_done_sema, 0);
11752#endif
11753 if (h265_task == NULL) {
11754 sema_init(&h265_sema, 1);
11755 h265_task =
11756 kthread_run(h265_task_handle, hevc,
11757 "kthread_h265");
11758 }
11759 }
11760 /* hevc->stat |= STAT_KTHREAD; */
11761#if 0
11762 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11763 hevc_print(hevc, 0, "%s force clk\n", __func__);
11764 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11765 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11766 ((1 << 2) | (1 << 1)));
11767 WRITE_VREG(HEVC_DBLK_CFG0,
11768 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11769 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11770 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11771 (1 << 2)); /* 2 */
11772 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11773 (1 << 24)); /* 24 */
11774 WRITE_VREG(HEVC_STREAM_CONTROL,
11775 READ_VREG(HEVC_STREAM_CONTROL) |
11776 (1 << 15)); /* 15 */
11777 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11778 (1 << 13)); /* 13 */
11779 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11780 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11781 (1 << 15)); /* 15 */
11782 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11783 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11784 (1 << 15)); /* 15 */
11785 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11786 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11787 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11788 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11789 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11790 (1 << 3)); /* 3 */
11791 }
11792#endif
11793#ifdef SWAP_HEVC_UCODE
11794 if (!tee_enabled() && hevc->is_swap &&
11795 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11796 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11797 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11798 }
11799#endif
11800
11801#ifndef MULTI_INSTANCE_SUPPORT
11802 set_vdec_func(&vh265_dec_status);
11803#endif
11804 amhevc_start();
11805 hevc->stat |= STAT_VDEC_RUN;
11806 hevc->init_flag = 1;
11807 error_handle_threshold = 30;
11808 /* pr_info("%d, vh265_init, RP=0x%x\n",
11809 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11810 */
11811
11812 return 0;
11813}
11814
11815static int vh265_stop(struct hevc_state_s *hevc)
11816{
11817 if (get_dbg_flag(hevc) &
11818 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11819 int wait_timeout_count = 0;
11820
11821 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11822 HEVC_CODED_SLICE_SEGMENT_DAT &&
11823 wait_timeout_count < 10){
11824 wait_timeout_count++;
11825 msleep(20);
11826 }
11827 }
11828 if (hevc->stat & STAT_VDEC_RUN) {
11829 amhevc_stop();
11830 hevc->stat &= ~STAT_VDEC_RUN;
11831 }
11832
11833 if (hevc->stat & STAT_ISR_REG) {
11834#ifdef MULTI_INSTANCE_SUPPORT
11835 if (!hevc->m_ins_flag)
11836#endif
11837 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11838 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11839 hevc->stat &= ~STAT_ISR_REG;
11840 }
11841
11842 hevc->stat &= ~STAT_TIMER_INIT;
11843 if (hevc->stat & STAT_TIMER_ARM) {
11844 del_timer_sync(&hevc->timer);
11845 hevc->stat &= ~STAT_TIMER_ARM;
11846 }
11847
11848 if (hevc->stat & STAT_VF_HOOK) {
11849 if (fr_hint_status == VDEC_HINTED) {
11850 vf_notify_receiver(hevc->provider_name,
11851 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11852 NULL);
11853 }
11854 fr_hint_status = VDEC_NO_NEED_HINT;
11855 vf_unreg_provider(&vh265_vf_prov);
11856 hevc->stat &= ~STAT_VF_HOOK;
11857 }
11858
11859 hevc_local_uninit(hevc);
11860
11861 if (use_cma) {
11862 hevc->uninit_list = 1;
11863 up(&h265_sema);
11864#ifdef USE_UNINIT_SEMA
11865 down(&hevc->h265_uninit_done_sema);
11866 if (!IS_ERR(h265_task)) {
11867 kthread_stop(h265_task);
11868 h265_task = NULL;
11869 }
11870#else
11871 while (hevc->uninit_list) /* wait uninit complete */
11872 msleep(20);
11873#endif
11874
11875 }
11876 hevc->init_flag = 0;
11877 hevc->first_sc_checked = 0;
11878 cancel_work_sync(&hevc->notify_work);
11879 cancel_work_sync(&hevc->set_clk_work);
11880 uninit_mmu_buffers(hevc);
11881 amhevc_disable();
11882
11883 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11884 if (hevc->gvs)
11885 kfree(hevc->gvs);
11886 hevc->gvs = NULL;
11887
11888 return 0;
11889}
11890
11891#ifdef MULTI_INSTANCE_SUPPORT
11892static void reset_process_time(struct hevc_state_s *hevc)
11893{
11894 if (hevc->start_process_time) {
11895 unsigned int process_time =
11896 1000 * (jiffies - hevc->start_process_time) / HZ;
11897 hevc->start_process_time = 0;
11898 if (process_time > max_process_time[hevc->index])
11899 max_process_time[hevc->index] = process_time;
11900 }
11901}
11902
11903static void start_process_time(struct hevc_state_s *hevc)
11904{
11905 hevc->start_process_time = jiffies;
11906 hevc->decode_timeout_count = 2;
11907 hevc->last_lcu_idx = 0;
11908}
11909
11910static void restart_process_time(struct hevc_state_s *hevc)
11911{
11912 hevc->start_process_time = jiffies;
11913 hevc->decode_timeout_count = 2;
11914}
11915
11916static void timeout_process(struct hevc_state_s *hevc)
11917{
11918 /*
11919 * In this very timeout point,the vh265_work arrives,
11920 * let it to handle the scenario.
11921 */
11922 if (work_pending(&hevc->work) ||
11923 work_busy(&hevc->work) ||
11924 work_pending(&hevc->timeout_work) ||
11925 work_busy(&hevc->timeout_work))
11926 return;
11927
11928 hevc->timeout_num++;
11929 amhevc_stop();
11930 read_decode_info(hevc);
11931
11932 hevc_print(hevc,
11933 0, "%s decoder timeout\n", __func__);
11934 check_pic_decoded_error(hevc,
11935 hevc->pic_decoded_lcu_idx);
11936 /*The current decoded frame is marked
11937 error when the decode timeout*/
11938 if (hevc->cur_pic != NULL)
11939 hevc->cur_pic->error_mark = 1;
11940 hevc->decoded_poc = hevc->curr_POC;
11941 hevc->decoding_pic = NULL;
11942 hevc->dec_result = DEC_RESULT_DONE;
11943 reset_process_time(hevc);
11944
11945 if (work_pending(&hevc->work))
11946 return;
11947 vdec_schedule_work(&hevc->timeout_work);
11948}
11949
11950#ifdef CONSTRAIN_MAX_BUF_NUM
11951static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11952{
11953 struct PIC_s *pic;
11954 int i;
11955 int count = 0;
11956 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11957 pic = hevc->m_PIC[i];
11958 if (pic == NULL || pic->index == -1)
11959 continue;
11960 if (pic->output_mark == 0 && pic->referenced == 0
11961 && pic->output_ready == 1)
11962 count++;
11963 }
11964
11965 return count;
11966}
11967
11968static int get_used_buf_count(struct hevc_state_s *hevc)
11969{
11970 struct PIC_s *pic;
11971 int i;
11972 int count = 0;
11973 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11974 pic = hevc->m_PIC[i];
11975 if (pic == NULL || pic->index == -1)
11976 continue;
11977 if (pic->output_mark != 0 || pic->referenced != 0
11978 || pic->output_ready != 0)
11979 count++;
11980 }
11981
11982 return count;
11983}
11984#endif
11985
11986
11987static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11988{
11989 struct PIC_s *new_pic = NULL;
11990 struct PIC_s *pic;
11991 /* recycle un-used pic */
11992 int i;
11993 int ref_pic = 0;
11994 struct vdec_s *vdec = hw_to_vdec(hevc);
11995 /*return 1 if pic_list is not initialized yet*/
11996 if (hevc->pic_list_init_flag != 3)
11997 return 1;
11998
11999 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12000 pic = hevc->m_PIC[i];
12001 if (pic == NULL || pic->index == -1)
12002 continue;
12003 if (pic->referenced == 1)
12004 ref_pic++;
12005 if (pic->output_mark == 0 && pic->referenced == 0
12006 && pic->output_ready == 0
12007 && pic->vf_ref == 0
12008 ) {
12009 if (new_pic) {
12010 if (pic->POC < new_pic->POC)
12011 new_pic = pic;
12012 } else
12013 new_pic = pic;
12014 }
12015 }
12016 if (new_pic == NULL) {
12017 enum receviver_start_e state = RECEIVER_INACTIVE;
12018 if (vf_get_receiver(vdec->vf_provider_name)) {
12019 state =
12020 vf_notify_receiver(vdec->vf_provider_name,
12021 VFRAME_EVENT_PROVIDER_QUREY_STATE,
12022 NULL);
12023 if ((state == RECEIVER_STATE_NULL)
12024 || (state == RECEIVER_STATE_NONE))
12025 state = RECEIVER_INACTIVE;
12026 }
12027 if (state == RECEIVER_INACTIVE) {
12028 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12029 int poc = INVALID_POC;
12030 pic = hevc->m_PIC[i];
12031 if (pic == NULL || pic->index == -1)
12032 continue;
12033 if ((pic->referenced == 0) &&
12034 (pic->error_mark == 1) &&
12035 (pic->output_mark == 1)) {
12036 if (poc == INVALID_POC || (pic->POC < poc)) {
12037 new_pic = pic;
12038 poc = pic->POC;
12039 }
12040 }
12041 }
12042 if (new_pic) {
12043 new_pic->referenced = 0;
12044 new_pic->output_mark = 0;
12045 put_mv_buf(hevc, new_pic);
12046 hevc_print(hevc, 0, "force release error pic %d recieve_state %d \n", new_pic->POC, state);
12047 } else {
12048 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12049 pic = hevc->m_PIC[i];
12050 if (pic == NULL || pic->index == -1)
12051 continue;
12052 if ((pic->referenced == 1) && (pic->error_mark == 1)) {
12053 flush_output(hevc, pic);
12054 hevc_print(hevc, 0, "DPB error, neeed fornce flush recieve_state %d \n", state);
12055 break;
12056 }
12057 }
12058 }
12059 }
12060 }
12061 return (new_pic != NULL) ? 1 : 0;
12062}
12063
12064static int vmh265_stop(struct hevc_state_s *hevc)
12065{
12066 if (hevc->stat & STAT_TIMER_ARM) {
12067 del_timer_sync(&hevc->timer);
12068 hevc->stat &= ~STAT_TIMER_ARM;
12069 }
12070 if (hevc->stat & STAT_VDEC_RUN) {
12071 amhevc_stop();
12072 hevc->stat &= ~STAT_VDEC_RUN;
12073 }
12074 if (hevc->stat & STAT_ISR_REG) {
12075 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12076 hevc->stat &= ~STAT_ISR_REG;
12077 }
12078
12079 if (hevc->stat & STAT_VF_HOOK) {
12080 if (fr_hint_status == VDEC_HINTED)
12081 vf_notify_receiver(hevc->provider_name,
12082 VFRAME_EVENT_PROVIDER_FR_END_HINT,
12083 NULL);
12084 fr_hint_status = VDEC_NO_NEED_HINT;
12085 vf_unreg_provider(&vh265_vf_prov);
12086 hevc->stat &= ~STAT_VF_HOOK;
12087 }
12088
12089 hevc_local_uninit(hevc);
12090
12091 if (hevc->gvs)
12092 kfree(hevc->gvs);
12093 hevc->gvs = NULL;
12094
12095 if (use_cma) {
12096 hevc->uninit_list = 1;
12097 reset_process_time(hevc);
12098 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
12099 vdec_schedule_work(&hevc->work);
12100 flush_work(&hevc->work);
12101#ifdef USE_UNINIT_SEMA
12102 if (hevc->init_flag) {
12103 down(&hevc->h265_uninit_done_sema);
12104 }
12105#else
12106 while (hevc->uninit_list) /* wait uninit complete */
12107 msleep(20);
12108#endif
12109 }
12110 hevc->init_flag = 0;
12111 hevc->first_sc_checked = 0;
12112 cancel_work_sync(&hevc->notify_work);
12113 cancel_work_sync(&hevc->set_clk_work);
12114 cancel_work_sync(&hevc->timeout_work);
12115 cancel_work_sync(&hevc->work);
12116 uninit_mmu_buffers(hevc);
12117
12118 vfree(hevc->fw);
12119 hevc->fw = NULL;
12120
12121 dump_log(hevc);
12122 return 0;
12123}
12124
12125static unsigned char get_data_check_sum
12126 (struct hevc_state_s *hevc, int size)
12127{
12128 int jj;
12129 int sum = 0;
12130 u8 *data = NULL;
12131
12132 if (!hevc->chunk->block->is_mapped)
12133 data = codec_mm_vmap(hevc->chunk->block->start +
12134 hevc->chunk->offset, size);
12135 else
12136 data = ((u8 *)hevc->chunk->block->start_virt) +
12137 hevc->chunk->offset;
12138
12139 for (jj = 0; jj < size; jj++)
12140 sum += data[jj];
12141
12142 if (!hevc->chunk->block->is_mapped)
12143 codec_mm_unmap_phyaddr(data);
12144 return sum;
12145}
12146
12147static void vh265_notify_work(struct work_struct *work)
12148{
12149 struct hevc_state_s *hevc =
12150 container_of(work,
12151 struct hevc_state_s,
12152 notify_work);
12153 struct vdec_s *vdec = hw_to_vdec(hevc);
12154#ifdef MULTI_INSTANCE_SUPPORT
12155 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
12156 vf_notify_receiver(hevc->provider_name,
12157 VFRAME_EVENT_PROVIDER_FR_HINT,
12158 (void *)
12159 ((unsigned long)hevc->frame_dur));
12160 vdec->fr_hint_state = VDEC_HINTED;
12161 } else if (fr_hint_status == VDEC_NEED_HINT) {
12162 vf_notify_receiver(hevc->provider_name,
12163 VFRAME_EVENT_PROVIDER_FR_HINT,
12164 (void *)
12165 ((unsigned long)hevc->frame_dur));
12166 fr_hint_status = VDEC_HINTED;
12167 }
12168#else
12169 if (fr_hint_status == VDEC_NEED_HINT)
12170 vf_notify_receiver(PROVIDER_NAME,
12171 VFRAME_EVENT_PROVIDER_FR_HINT,
12172 (void *)
12173 ((unsigned long)hevc->frame_dur));
12174 fr_hint_status = VDEC_HINTED;
12175 }
12176#endif
12177
12178 return;
12179}
12180
12181static void vh265_work_implement(struct hevc_state_s *hevc,
12182 struct vdec_s *vdec,int from)
12183{
12184 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
12185 /*USE_BUF_BLOCK*/
12186 uninit_pic_list(hevc);
12187 hevc->uninit_list = 0;
12188#ifdef USE_UNINIT_SEMA
12189 up(&hevc->h265_uninit_done_sema);
12190#endif
12191 return;
12192 }
12193
12194 /* finished decoding one frame or error,
12195 * notify vdec core to switch context
12196 */
12197 if (hevc->pic_list_init_flag == 1
12198 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
12199 hevc->pic_list_init_flag = 2;
12200 init_pic_list(hevc);
12201 init_pic_list_hw(hevc);
12202 init_buf_spec(hevc);
12203 hevc_print(hevc, 0,
12204 "set pic_list_init_flag to 2\n");
12205
12206 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
12207 return;
12208 }
12209
12210 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12211 "%s dec_result %d %x %x %x\n",
12212 __func__,
12213 hevc->dec_result,
12214 READ_VREG(HEVC_STREAM_LEVEL),
12215 READ_VREG(HEVC_STREAM_WR_PTR),
12216 READ_VREG(HEVC_STREAM_RD_PTR));
12217
12218 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
12219 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
12220 && (hw_to_vdec(hevc)->next_status !=
12221 VDEC_STATUS_DISCONNECTED)) {
12222 if (!vdec_has_more_input(vdec)) {
12223 hevc->dec_result = DEC_RESULT_EOS;
12224 vdec_schedule_work(&hevc->work);
12225 return;
12226 }
12227 if (!input_frame_based(vdec)) {
12228 int r = vdec_sync_input(vdec);
12229 if (r >= 0x200) {
12230 WRITE_VREG(HEVC_DECODE_SIZE,
12231 READ_VREG(HEVC_DECODE_SIZE) + r);
12232
12233 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12234 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
12235 __func__,
12236 READ_VREG(HEVC_STREAM_LEVEL),
12237 READ_VREG(HEVC_STREAM_WR_PTR),
12238 READ_VREG(HEVC_STREAM_RD_PTR),
12239 READ_VREG(HEVC_MPC_E), r);
12240
12241 start_process_time(hevc);
12242 if (READ_VREG(HEVC_DEC_STATUS_REG)
12243 == HEVC_DECODE_BUFEMPTY2)
12244 WRITE_VREG(HEVC_DEC_STATUS_REG,
12245 HEVC_ACTION_DONE);
12246 else
12247 WRITE_VREG(HEVC_DEC_STATUS_REG,
12248 HEVC_ACTION_DEC_CONT);
12249 } else {
12250 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12251 vdec_schedule_work(&hevc->work);
12252 }
12253 return;
12254 }
12255
12256 /*below for frame_base*/
12257 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
12258 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12259 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
12260 __func__,
12261 READ_VREG(HEVC_STREAM_LEVEL),
12262 READ_VREG(HEVC_STREAM_WR_PTR),
12263 READ_VREG(HEVC_STREAM_RD_PTR),
12264 READ_VREG(HEVC_MPC_E));
12265 mutex_lock(&hevc->chunks_mutex);
12266 vdec_vframe_dirty(vdec, hevc->chunk);
12267 hevc->chunk = NULL;
12268 mutex_unlock(&hevc->chunks_mutex);
12269 vdec_clean_input(vdec);
12270 }
12271
12272 /*if (is_new_pic_available(hevc)) {*/
12273 if (run_ready(vdec, VDEC_HEVC)) {
12274 int r;
12275 int decode_size;
12276 r = vdec_prepare_input(vdec, &hevc->chunk);
12277 if (r < 0) {
12278 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12279
12280 hevc_print(hevc,
12281 PRINT_FLAG_VDEC_DETAIL,
12282 "amvdec_vh265: Insufficient data\n");
12283
12284 vdec_schedule_work(&hevc->work);
12285 return;
12286 }
12287 hevc->dec_result = DEC_RESULT_NONE;
12288 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12289 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
12290 __func__, r,
12291 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
12292 get_data_check_sum(hevc, r) : 0,
12293 READ_VREG(HEVC_MPC_E));
12294
12295 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
12296 int jj;
12297 u8 *data = NULL;
12298
12299 if (!hevc->chunk->block->is_mapped)
12300 data = codec_mm_vmap(
12301 hevc->chunk->block->start +
12302 hevc->chunk->offset, r);
12303 else
12304 data = ((u8 *)
12305 hevc->chunk->block->start_virt)
12306 + hevc->chunk->offset;
12307
12308 for (jj = 0; jj < r; jj++) {
12309 if ((jj & 0xf) == 0)
12310 hevc_print(hevc,
12311 PRINT_FRAMEBASE_DATA,
12312 "%06x:", jj);
12313 hevc_print_cont(hevc,
12314 PRINT_FRAMEBASE_DATA,
12315 "%02x ", data[jj]);
12316 if (((jj + 1) & 0xf) == 0)
12317 hevc_print_cont(hevc,
12318 PRINT_FRAMEBASE_DATA,
12319 "\n");
12320 }
12321
12322 if (!hevc->chunk->block->is_mapped)
12323 codec_mm_unmap_phyaddr(data);
12324 }
12325
12326 decode_size = hevc->chunk->size +
12327 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12328 WRITE_VREG(HEVC_DECODE_SIZE,
12329 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
12330
12331 vdec_enable_input(vdec);
12332
12333 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12334 "%s: mpc %x\n",
12335 __func__, READ_VREG(HEVC_MPC_E));
12336
12337 start_process_time(hevc);
12338 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12339 } else{
12340 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12341
12342 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12343 * "amvdec_vh265: Insufficient data\n");
12344 */
12345
12346 vdec_schedule_work(&hevc->work);
12347 }
12348 return;
12349 } else if (hevc->dec_result == DEC_RESULT_DONE) {
12350 /* if (!hevc->ctx_valid)
12351 hevc->ctx_valid = 1; */
12352 int i;
12353 decode_frame_count[hevc->index]++;
12354#ifdef DETREFILL_ENABLE
12355 if (hevc->is_swap &&
12356 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
12357 if (hevc->delrefill_check == 2) {
12358 delrefill(hevc);
12359 amhevc_stop();
12360 }
12361 }
12362#endif
12363 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
12364 hevc->used_4k_num =
12365 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
12366 if (hevc->used_4k_num >= 0 &&
12367 hevc->cur_pic &&
12368 hevc->cur_pic->scatter_alloc
12369 == 1) {
12370 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
12371 "%s pic index %d scatter_alloc %d page_start %d\n",
12372 "decoder_mmu_box_free_idx_tail",
12373 hevc->cur_pic->index,
12374 hevc->cur_pic->scatter_alloc,
12375 hevc->used_4k_num);
12376 if (hevc->m_ins_flag)
12377 hevc_mmu_dma_check(hw_to_vdec(hevc));
12378 decoder_mmu_box_free_idx_tail(
12379 hevc->mmu_box,
12380 hevc->cur_pic->index,
12381 hevc->used_4k_num);
12382 hevc->cur_pic->scatter_alloc = 2;
12383 }
12384 }
12385 hevc->pic_decoded_lcu_idx =
12386 READ_VREG(HEVC_PARSER_LCU_START)
12387 & 0xffffff;
12388
12389 if ((!vdec_dual(vdec)) &&
12390 hevc->empty_flag == 0) {
12391 hevc->over_decode =
12392 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
12393 if (hevc->over_decode)
12394 hevc_print(hevc, 0,
12395 "!!!Over decode\n");
12396 }
12397
12398 if (is_log_enable(hevc))
12399 add_log(hevc,
12400 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
12401 __func__,
12402 hevc->dec_result,
12403 hevc->pic_decoded_lcu_idx,
12404 hevc->used_4k_num,
12405 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12406 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12407 hevc->start_shift_bytes
12408 );
12409
12410 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12411 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
12412 __func__,
12413 hevc->dec_result,
12414 READ_VREG(HEVC_STREAM_LEVEL),
12415 READ_VREG(HEVC_STREAM_WR_PTR),
12416 READ_VREG(HEVC_STREAM_RD_PTR),
12417 hevc->pic_decoded_lcu_idx,
12418 hevc->used_4k_num,
12419 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12420 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12421 hevc->start_shift_bytes
12422 );
12423
12424 hevc->used_4k_num = -1;
12425
12426 check_pic_decoded_error(hevc,
12427 hevc->pic_decoded_lcu_idx);
12428 if ((error_handle_policy & 0x100) == 0 && hevc->cur_pic) {
12429 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12430 struct PIC_s *pic;
12431 pic = hevc->m_PIC[i];
12432 if (!pic || pic->index == -1)
12433 continue;
12434 if ((hevc->cur_pic->POC + poc_num_margin < pic->POC) && (pic->referenced == 0) &&
12435 (pic->output_mark == 1) && (pic->output_ready == 0)) {
12436 hevc->poc_error_count++;
12437 break;
12438 }
12439 }
12440 if (i == MAX_REF_PIC_NUM)
12441 hevc->poc_error_count = 0;
12442 if (hevc->poc_error_count >= poc_error_limit) {
12443 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12444 struct PIC_s *pic;
12445 pic = hevc->m_PIC[i];
12446 if (!pic || pic->index == -1)
12447 continue;
12448 if ((hevc->cur_pic->POC + poc_num_margin < pic->POC) && (pic->referenced == 0) &&
12449 (pic->output_mark == 1) && (pic->output_ready == 0)) {
12450 pic->output_mark = 0;
12451 hevc_print(hevc, 0, "DPB poc error, remove error frame\n");
12452 }
12453 }
12454 }
12455 }
12456
12457#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12458#if 1
12459 if (vdec->slave) {
12460 if (dv_debug & 0x1)
12461 vdec_set_flag(vdec->slave,
12462 VDEC_FLAG_SELF_INPUT_CONTEXT);
12463 else
12464 vdec_set_flag(vdec->slave,
12465 VDEC_FLAG_OTHER_INPUT_CONTEXT);
12466 }
12467#else
12468 if (vdec->slave) {
12469 if (no_interleaved_el_slice)
12470 vdec_set_flag(vdec->slave,
12471 VDEC_FLAG_INPUT_KEEP_CONTEXT);
12472 /* this will move real HW pointer for input */
12473 else
12474 vdec_set_flag(vdec->slave, 0);
12475 /* this will not move real HW pointer
12476 *and SL layer decoding
12477 *will start from same stream position
12478 *as current BL decoder
12479 */
12480 }
12481#endif
12482#endif
12483#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12484 hevc->shift_byte_count_lo
12485 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12486 if (vdec->slave) {
12487 /*cur is base, found enhance*/
12488 struct hevc_state_s *hevc_el =
12489 (struct hevc_state_s *)
12490 vdec->slave->private;
12491 if (hevc_el)
12492 hevc_el->shift_byte_count_lo =
12493 hevc->shift_byte_count_lo;
12494 } else if (vdec->master) {
12495 /*cur is enhance, found base*/
12496 struct hevc_state_s *hevc_ba =
12497 (struct hevc_state_s *)
12498 vdec->master->private;
12499 if (hevc_ba)
12500 hevc_ba->shift_byte_count_lo =
12501 hevc->shift_byte_count_lo;
12502 }
12503#endif
12504 mutex_lock(&hevc->chunks_mutex);
12505 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12506 hevc->chunk = NULL;
12507 mutex_unlock(&hevc->chunks_mutex);
12508 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12509 /*
12510 stream base: stream buf empty or timeout
12511 frame base: vdec_prepare_input fail
12512 */
12513
12514 if (!vdec_has_more_input(vdec)) {
12515 hevc->dec_result = DEC_RESULT_EOS;
12516 vdec_schedule_work(&hevc->work);
12517 return;
12518 }
12519#ifdef AGAIN_HAS_THRESHOLD
12520 hevc->next_again_flag = 1;
12521#endif
12522 if (input_stream_based(vdec)) {
12523 u32 rp, wp, level;
12524 struct vdec_input_s *input = &vdec->input;
12525 rp = STBUF_READ(&vdec->vbuf, get_rp);;
12526 wp = STBUF_READ(&vdec->vbuf, get_wp);
12527 if (wp < rp)
12528 level = input->size + wp - rp;
12529 else
12530 level = wp - rp;
12531 if ((level >= dirty_buffersize_threshold) &&
12532 (hevc->pre_parser_video_rp ==
12533 STBUF_READ(&vdec->vbuf, get_rp)) &&
12534 (hevc->pre_parser_video_wp ==
12535 STBUF_READ(&vdec->vbuf, get_wp))) {
12536 if (hevc->again_count == 0) {
12537 hevc->again_timeout_jiffies =
12538 get_jiffies_64() + dirty_time_threshold * HZ/1000;
12539 }
12540 hevc->again_count++;
12541 }
12542 else
12543 hevc->again_count = 0;
12544
12545 hevc->pre_parser_video_rp = STBUF_READ(&vdec->vbuf, get_rp);;
12546 hevc->pre_parser_video_wp = STBUF_READ(&vdec->vbuf, get_wp);
12547
12548 if ((hevc->again_count > dirty_count_threshold) &&
12549 time_after64(get_jiffies_64(), hevc->again_timeout_jiffies)) {
12550 mutex_lock(&hevc->chunks_mutex);
12551 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12552 hevc->chunk = NULL;
12553 mutex_unlock(&hevc->chunks_mutex);
12554 }
12555 }
12556 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12557 struct PIC_s *pic;
12558 hevc->eos = 1;
12559#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12560 if ((vdec_dual(vdec)) && aux_data_is_avaible(hevc))
12561 dolby_get_meta(hevc);
12562#endif
12563 check_pic_decoded_error(hevc,
12564 hevc->pic_decoded_lcu_idx);
12565 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12566 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12567 "%s: end of stream, last dec poc %d => 0x%pf\n",
12568 __func__, hevc->curr_POC, pic);
12569 flush_output(hevc, pic);
12570 /* dummy vf with eos flag to backend */
12571 notify_v4l_eos(hw_to_vdec(hevc));
12572#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12573 hevc->shift_byte_count_lo
12574 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12575 if (vdec->slave) {
12576 /*cur is base, found enhance*/
12577 struct hevc_state_s *hevc_el =
12578 (struct hevc_state_s *)
12579 vdec->slave->private;
12580 if (hevc_el)
12581 hevc_el->shift_byte_count_lo =
12582 hevc->shift_byte_count_lo;
12583 } else if (vdec->master) {
12584 /*cur is enhance, found base*/
12585 struct hevc_state_s *hevc_ba =
12586 (struct hevc_state_s *)
12587 vdec->master->private;
12588 if (hevc_ba)
12589 hevc_ba->shift_byte_count_lo =
12590 hevc->shift_byte_count_lo;
12591 }
12592#endif
12593 mutex_lock(&hevc->chunks_mutex);
12594 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12595 hevc->chunk = NULL;
12596 mutex_unlock(&hevc->chunks_mutex);
12597 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12598 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12599 "%s: force exit\n",
12600 __func__);
12601 if (hevc->stat & STAT_VDEC_RUN) {
12602 amhevc_stop();
12603 hevc->stat &= ~STAT_VDEC_RUN;
12604 }
12605 if (hevc->stat & STAT_ISR_REG) {
12606 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12607 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12608 hevc->stat &= ~STAT_ISR_REG;
12609 }
12610 hevc_print(hevc, 0, "%s: force exit end\n",
12611 __func__);
12612 }
12613
12614 if (hevc->stat & STAT_VDEC_RUN) {
12615 amhevc_stop();
12616 hevc->stat &= ~STAT_VDEC_RUN;
12617 }
12618
12619 if (hevc->stat & STAT_TIMER_ARM) {
12620 del_timer_sync(&hevc->timer);
12621 hevc->stat &= ~STAT_TIMER_ARM;
12622 }
12623
12624 wait_hevc_search_done(hevc);
12625#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12626 if (hevc->switch_dvlayer_flag) {
12627 if (vdec->slave)
12628 vdec_set_next_sched(vdec, vdec->slave);
12629 else if (vdec->master)
12630 vdec_set_next_sched(vdec, vdec->master);
12631 } else if (vdec->slave || vdec->master)
12632 vdec_set_next_sched(vdec, vdec);
12633#endif
12634
12635 if (from == 1) {
12636 /* This is a timeout work */
12637 if (work_pending(&hevc->work)) {
12638 /*
12639 * The vh265_work arrives at the last second,
12640 * give it a chance to handle the scenario.
12641 */
12642 return;
12643 //cancel_work_sync(&hevc->work);//reserved for future considraion
12644 }
12645 }
12646
12647 /* mark itself has all HW resource released and input released */
12648 if (vdec->parallel_dec == 1)
12649 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12650 else
12651 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12652
12653 if (hevc->is_used_v4l) {
12654 struct aml_vcodec_ctx *ctx =
12655 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12656
12657 if (ctx->param_sets_from_ucode &&
12658 !hevc->v4l_params_parsed)
12659 vdec_v4l_write_frame_sync(ctx);
12660 }
12661
12662 if (hevc->vdec_cb)
12663 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12664}
12665
12666static void vh265_work(struct work_struct *work)
12667{
12668 struct hevc_state_s *hevc = container_of(work,
12669 struct hevc_state_s, work);
12670 struct vdec_s *vdec = hw_to_vdec(hevc);
12671
12672 vh265_work_implement(hevc, vdec, 0);
12673}
12674
12675static void vh265_timeout_work(struct work_struct *work)
12676{
12677 struct hevc_state_s *hevc = container_of(work,
12678 struct hevc_state_s, timeout_work);
12679 struct vdec_s *vdec = hw_to_vdec(hevc);
12680
12681 if (work_pending(&hevc->work))
12682 return;
12683 vh265_work_implement(hevc, vdec, 1);
12684}
12685
12686
12687static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12688{
12689 /* new to do ... */
12690 vh265_prot_init(hevc);
12691 return 0;
12692}
12693static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12694{
12695 struct hevc_state_s *hevc =
12696 (struct hevc_state_s *)vdec->private;
12697 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12698 CODEC_MM_FLAGS_TVP : 0;
12699 bool ret = 0;
12700 if (step == 0x12)
12701 return 0;
12702 else if (step == 0x11)
12703 step = 0x12;
12704
12705 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
12706 return 0;
12707
12708 if (hevc->eos)
12709 return 0;
12710 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12711 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12712 hevc->first_sc_checked =1;
12713 hevc_print(hevc, 0,
12714 "vh265 cached=%d need_size=%d speed= %d ms\n",
12715 size, (hevc->need_cache_size >> PAGE_SHIFT),
12716 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12717 }
12718 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12719 && pre_decode_buf_level != 0) {
12720 u32 rp, wp, level;
12721
12722 rp = STBUF_READ(&vdec->vbuf, get_rp);
12723 wp = STBUF_READ(&vdec->vbuf, get_wp);
12724 if (wp < rp)
12725 level = vdec->input.size + wp - rp;
12726 else
12727 level = wp - rp;
12728
12729 if (level < pre_decode_buf_level)
12730 return 0;
12731 }
12732
12733#ifdef AGAIN_HAS_THRESHOLD
12734 if (hevc->next_again_flag &&
12735 (!vdec_frame_based(vdec))) {
12736 u32 parser_wr_ptr =
12737 STBUF_READ(&vdec->vbuf, get_wp);
12738 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12739 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12740 again_threshold) {
12741 int r = vdec_sync_input(vdec);
12742 hevc_print(hevc,
12743 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12744 return 0;
12745 }
12746 }
12747#endif
12748
12749 if (disp_vframe_valve_level &&
12750 kfifo_len(&hevc->display_q) >=
12751 disp_vframe_valve_level) {
12752 hevc->valve_count--;
12753 if (hevc->valve_count <= 0)
12754 hevc->valve_count = 2;
12755 else
12756 return 0;
12757 }
12758
12759 ret = is_new_pic_available(hevc);
12760 if (!ret) {
12761 hevc_print(hevc,
12762 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12763 __func__, ret);
12764 }
12765
12766#ifdef CONSTRAIN_MAX_BUF_NUM
12767 if (hevc->pic_list_init_flag == 3) {
12768 if (run_ready_max_vf_only_num > 0 &&
12769 get_vf_ref_only_buf_count(hevc) >=
12770 run_ready_max_vf_only_num
12771 )
12772 ret = 0;
12773 if (run_ready_display_q_num > 0 &&
12774 kfifo_len(&hevc->display_q) >=
12775 run_ready_display_q_num)
12776 ret = 0;
12777
12778 /*avoid more buffers consumed when
12779 switching resolution*/
12780 if (run_ready_max_buf_num == 0xff &&
12781 get_used_buf_count(hevc) >=
12782 get_work_pic_num(hevc))
12783 ret = 0;
12784 else if (run_ready_max_buf_num &&
12785 get_used_buf_count(hevc) >=
12786 run_ready_max_buf_num)
12787 ret = 0;
12788 }
12789#endif
12790
12791 if (hevc->is_used_v4l) {
12792 struct aml_vcodec_ctx *ctx =
12793 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12794
12795 if (ctx->param_sets_from_ucode) {
12796 if (hevc->v4l_params_parsed) {
12797 if (!ctx->v4l_codec_dpb_ready &&
12798 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12799 run_ready_min_buf_num)
12800 ret = 0;
12801 } else {
12802 if ((hevc->res_ch_flag == 1) &&
12803 ((ctx->state <= AML_STATE_INIT) ||
12804 (ctx->state >= AML_STATE_FLUSHING)))
12805 ret = 0;
12806 }
12807 } else if (!ctx->v4l_codec_dpb_ready) {
12808 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12809 run_ready_min_buf_num)
12810 ret = 0;
12811 }
12812 }
12813
12814 if (ret)
12815 not_run_ready[hevc->index] = 0;
12816 else
12817 not_run_ready[hevc->index]++;
12818 if (vdec->parallel_dec == 1)
12819 return ret ? (CORE_MASK_HEVC) : 0;
12820 else
12821 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12822}
12823
12824static void run(struct vdec_s *vdec, unsigned long mask,
12825 void (*callback)(struct vdec_s *, void *), void *arg)
12826{
12827 struct hevc_state_s *hevc =
12828 (struct hevc_state_s *)vdec->private;
12829 int r, loadr = 0;
12830 unsigned char check_sum = 0;
12831
12832 run_count[hevc->index]++;
12833 hevc->vdec_cb_arg = arg;
12834 hevc->vdec_cb = callback;
12835 hevc->aux_data_dirty = 1;
12836 hevc_reset_core(vdec);
12837
12838#ifdef AGAIN_HAS_THRESHOLD
12839 if (vdec_stream_based(vdec)) {
12840 hevc->pre_parser_wr_ptr =
12841 STBUF_READ(&vdec->vbuf, get_wp);
12842 hevc->next_again_flag = 0;
12843 }
12844#endif
12845 r = vdec_prepare_input(vdec, &hevc->chunk);
12846 if (r < 0) {
12847 input_empty[hevc->index]++;
12848 hevc->dec_result = DEC_RESULT_AGAIN;
12849 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12850 "ammvdec_vh265: Insufficient data\n");
12851
12852 vdec_schedule_work(&hevc->work);
12853 return;
12854 }
12855 input_empty[hevc->index] = 0;
12856 hevc->dec_result = DEC_RESULT_NONE;
12857 if (vdec_frame_based(vdec) &&
12858 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12859 || is_log_enable(hevc)))
12860 check_sum = get_data_check_sum(hevc, r);
12861
12862 if (is_log_enable(hevc))
12863 add_log(hevc,
12864 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12865 __func__, r,
12866 check_sum,
12867 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12868 );
12869 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12870 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12871 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12872 __func__, r,
12873 check_sum,
12874 READ_VREG(HEVC_STREAM_LEVEL),
12875 READ_VREG(HEVC_STREAM_WR_PTR),
12876 READ_VREG(HEVC_STREAM_RD_PTR),
12877 STBUF_READ(&vdec->vbuf, get_rp),
12878 STBUF_READ(&vdec->vbuf, get_wp),
12879 hevc->start_shift_bytes
12880 );
12881 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12882 input_frame_based(vdec)) {
12883 int jj;
12884 u8 *data = NULL;
12885
12886 if (!hevc->chunk->block->is_mapped)
12887 data = codec_mm_vmap(hevc->chunk->block->start +
12888 hevc->chunk->offset, r);
12889 else
12890 data = ((u8 *)hevc->chunk->block->start_virt)
12891 + hevc->chunk->offset;
12892
12893 for (jj = 0; jj < r; jj++) {
12894 if ((jj & 0xf) == 0)
12895 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12896 "%06x:", jj);
12897 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12898 "%02x ", data[jj]);
12899 if (((jj + 1) & 0xf) == 0)
12900 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12901 "\n");
12902 }
12903
12904 if (!hevc->chunk->block->is_mapped)
12905 codec_mm_unmap_phyaddr(data);
12906 }
12907 if (vdec->mc_loaded) {
12908 /*firmware have load before,
12909 and not changes to another.
12910 ignore reload.
12911 */
12912 if (tee_enabled() && hevc->is_swap &&
12913 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12914 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12915 } else {
12916 if (hevc->mmu_enable)
12917 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12918 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12919 "h265_mmu", hevc->fw->data);
12920 else {
12921 if (!hevc->is_4k) {
12922 /* if an older version of the fw was loaded, */
12923 /* needs try to load noswap fw because the */
12924 /* old fw package dose not contain the swap fw.*/
12925 loadr = amhevc_vdec_loadmc_ex(
12926 VFORMAT_HEVC, vdec,
12927 "hevc_mmu_swap",
12928 hevc->fw->data);
12929 if (loadr < 0)
12930 loadr = amhevc_vdec_loadmc_ex(
12931 VFORMAT_HEVC, vdec,
12932 "h265_mmu",
12933 hevc->fw->data);
12934 else
12935 hevc->is_swap = true;
12936 } else
12937 loadr = amhevc_vdec_loadmc_ex(
12938 VFORMAT_HEVC, vdec,
12939 "h265_mmu", hevc->fw->data);
12940 }
12941 else
12942 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12943 NULL, hevc->fw->data);
12944 if (loadr < 0) {
12945 amhevc_disable();
12946 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12947 tee_enabled() ? "TEE" : "local", loadr);
12948 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12949 vdec_schedule_work(&hevc->work);
12950 return;
12951 }
12952
12953 if (tee_enabled() && hevc->is_swap &&
12954 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12955 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12956#ifdef DETREFILL_ENABLE
12957 if (hevc->is_swap &&
12958 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12959 init_detrefill_buf(hevc);
12960#endif
12961 vdec->mc_loaded = 1;
12962 vdec->mc_type = VFORMAT_HEVC;
12963 }
12964 if (vh265_hw_ctx_restore(hevc) < 0) {
12965 vdec_schedule_work(&hevc->work);
12966 return;
12967 }
12968 vdec_enable_input(vdec);
12969
12970 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12971
12972 if (vdec_frame_based(vdec)) {
12973 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12974 r = hevc->chunk->size +
12975 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12976 hevc->decode_size = r;
12977 if (vdec->mvfrm)
12978 vdec->mvfrm->frame_size = hevc->chunk->size;
12979 }
12980#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12981 else {
12982 if (vdec->master || vdec->slave)
12983 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12984 hevc->shift_byte_count_lo);
12985 }
12986#endif
12987 WRITE_VREG(HEVC_DECODE_SIZE, r);
12988 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12989 hevc->init_flag = 1;
12990
12991 if (hevc->pic_list_init_flag == 3)
12992 init_pic_list_hw(hevc);
12993
12994 backup_decode_state(hevc);
12995 start_process_time(hevc);
12996 mod_timer(&hevc->timer, jiffies);
12997 hevc->stat |= STAT_TIMER_ARM;
12998 hevc->stat |= STAT_ISR_REG;
12999 if (vdec->mvfrm)
13000 vdec->mvfrm->hw_decode_start = local_clock();
13001 amhevc_start();
13002 hevc->stat |= STAT_VDEC_RUN;
13003}
13004
13005static void aml_free_canvas(struct vdec_s *vdec)
13006{
13007 int i;
13008 struct hevc_state_s *hevc =
13009 (struct hevc_state_s *)vdec->private;
13010
13011 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13012 struct PIC_s *pic = hevc->m_PIC[i];
13013
13014 if (pic) {
13015 if (vdec->parallel_dec == 1) {
13016 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
13017 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
13018 }
13019 }
13020 }
13021}
13022
13023static void reset(struct vdec_s *vdec)
13024{
13025 struct hevc_state_s *hevc =
13026 (struct hevc_state_s *)vdec->private;
13027 int i;
13028
13029 cancel_work_sync(&hevc->work);
13030 cancel_work_sync(&hevc->notify_work);
13031 if (hevc->stat & STAT_VDEC_RUN) {
13032 amhevc_stop();
13033 hevc->stat &= ~STAT_VDEC_RUN;
13034 }
13035
13036 if (hevc->stat & STAT_TIMER_ARM) {
13037 del_timer_sync(&hevc->timer);
13038 hevc->stat &= ~STAT_TIMER_ARM;
13039 }
13040 hevc->dec_result = DEC_RESULT_NONE;
13041 reset_process_time(hevc);
13042 hevc->pic_list_init_flag = 0;
13043 dealloc_mv_bufs(hevc);
13044 aml_free_canvas(vdec);
13045 hevc_local_uninit(hevc);
13046 if (vh265_local_init(hevc) < 0)
13047 pr_debug(" %s local init fail\n", __func__);
13048 for (i = 0; i < BUF_POOL_SIZE; i++) {
13049 hevc->m_BUF[i].start_adr = 0;
13050 }
13051
13052 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
13053}
13054
13055static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
13056{
13057 struct hevc_state_s *hevc =
13058 (struct hevc_state_s *)vdec->private;
13059
13060 return vh265_isr(0, hevc);
13061}
13062
13063static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
13064{
13065 struct hevc_state_s *hevc =
13066 (struct hevc_state_s *)vdec->private;
13067
13068 return vh265_isr_thread_fn(0, hevc);
13069}
13070#endif
13071
13072static int amvdec_h265_probe(struct platform_device *pdev)
13073{
13074#ifdef MULTI_INSTANCE_SUPPORT
13075 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13076#else
13077 struct vdec_dev_reg_s *pdata =
13078 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
13079#endif
13080 char *tmpbuf;
13081 int ret;
13082 struct hevc_state_s *hevc;
13083
13084 hevc = vmalloc(sizeof(struct hevc_state_s));
13085 if (hevc == NULL) {
13086 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
13087 return -ENOMEM;
13088 }
13089 gHevc = hevc;
13090 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
13091 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
13092 H265_DEBUG_DIS_SYS_ERROR_PROC));
13093 memset(hevc, 0, sizeof(struct hevc_state_s));
13094 if (get_dbg_flag(hevc))
13095 hevc_print(hevc, 0, "%s\r\n", __func__);
13096 mutex_lock(&vh265_mutex);
13097
13098 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13099 (parser_sei_enable & 0x100) == 0)
13100 parser_sei_enable = 7; /*old 1*/
13101 hevc->m_ins_flag = 0;
13102 hevc->init_flag = 0;
13103 hevc->first_sc_checked = 0;
13104 hevc->uninit_list = 0;
13105 hevc->fatal_error = 0;
13106 hevc->show_frame_num = 0;
13107 hevc->frameinfo_enable = 1;
13108#ifdef MULTI_INSTANCE_SUPPORT
13109 hevc->platform_dev = pdev;
13110 platform_set_drvdata(pdev, pdata);
13111#endif
13112
13113 if (pdata == NULL) {
13114 hevc_print(hevc, 0,
13115 "\namvdec_h265 memory resource undefined.\n");
13116 vfree(hevc);
13117 mutex_unlock(&vh265_mutex);
13118 return -EFAULT;
13119 }
13120 if (mmu_enable_force == 0) {
13121 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
13122 || double_write_mode == 0x10)
13123 hevc->mmu_enable = 0;
13124 else
13125 hevc->mmu_enable = 1;
13126 }
13127 if (init_mmu_buffers(hevc)) {
13128 hevc_print(hevc, 0,
13129 "\n 265 mmu init failed!\n");
13130 vfree(hevc);
13131 mutex_unlock(&vh265_mutex);
13132 return -EFAULT;
13133 }
13134
13135 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
13136 work_buf_size, DRIVER_NAME, &hevc->buf_start);
13137 if (ret < 0) {
13138 uninit_mmu_buffers(hevc);
13139 vfree(hevc);
13140 mutex_unlock(&vh265_mutex);
13141 return ret;
13142 }
13143 hevc->buf_size = work_buf_size;
13144
13145
13146 if (!vdec_secure(pdata)) {
13147 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
13148 if (tmpbuf) {
13149 memset(tmpbuf, 0, work_buf_size);
13150 dma_sync_single_for_device(amports_get_dma_device(),
13151 hevc->buf_start,
13152 work_buf_size, DMA_TO_DEVICE);
13153 } else {
13154 tmpbuf = codec_mm_vmap(hevc->buf_start,
13155 work_buf_size);
13156 if (tmpbuf) {
13157 memset(tmpbuf, 0, work_buf_size);
13158 dma_sync_single_for_device(
13159 amports_get_dma_device(),
13160 hevc->buf_start,
13161 work_buf_size,
13162 DMA_TO_DEVICE);
13163 codec_mm_unmap_phyaddr(tmpbuf);
13164 }
13165 }
13166 }
13167
13168 if (get_dbg_flag(hevc)) {
13169 hevc_print(hevc, 0,
13170 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13171 hevc->buf_start, hevc->buf_size);
13172 }
13173
13174 if (pdata->sys_info)
13175 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13176 else {
13177 hevc->vh265_amstream_dec_info.width = 0;
13178 hevc->vh265_amstream_dec_info.height = 0;
13179 hevc->vh265_amstream_dec_info.rate = 30;
13180 }
13181#ifndef MULTI_INSTANCE_SUPPORT
13182 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
13183 workaround_enable |= 3;
13184 hevc_print(hevc, 0,
13185 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
13186 } else
13187 workaround_enable &= ~3;
13188#endif
13189 hevc->cma_dev = pdata->cma_dev;
13190 vh265_vdec_info_init(hevc);
13191
13192#ifdef MULTI_INSTANCE_SUPPORT
13193 pdata->private = hevc;
13194 pdata->dec_status = vh265_dec_status;
13195 pdata->set_trickmode = vh265_set_trickmode;
13196 pdata->set_isreset = vh265_set_isreset;
13197 is_reset = 0;
13198 if (vh265_init(pdata) < 0) {
13199#else
13200 if (vh265_init(hevc) < 0) {
13201#endif
13202 hevc_print(hevc, 0,
13203 "\namvdec_h265 init failed.\n");
13204 hevc_local_uninit(hevc);
13205 if (hevc->gvs)
13206 kfree(hevc->gvs);
13207 hevc->gvs = NULL;
13208 uninit_mmu_buffers(hevc);
13209 vfree(hevc);
13210 pdata->dec_status = NULL;
13211 mutex_unlock(&vh265_mutex);
13212 return -ENODEV;
13213 }
13214 /*set the max clk for smooth playing...*/
13215 hevc_source_changed(VFORMAT_HEVC,
13216 3840, 2160, 60);
13217 mutex_unlock(&vh265_mutex);
13218
13219 return 0;
13220}
13221
13222static int amvdec_h265_remove(struct platform_device *pdev)
13223{
13224 struct hevc_state_s *hevc = gHevc;
13225
13226 if (get_dbg_flag(hevc))
13227 hevc_print(hevc, 0, "%s\r\n", __func__);
13228
13229 mutex_lock(&vh265_mutex);
13230
13231 vh265_stop(hevc);
13232
13233 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
13234
13235
13236#ifdef DEBUG_PTS
13237 hevc_print(hevc, 0,
13238 "pts missed %ld, pts hit %ld, duration %d\n",
13239 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
13240#endif
13241
13242 vfree(hevc);
13243 hevc = NULL;
13244 gHevc = NULL;
13245
13246 mutex_unlock(&vh265_mutex);
13247
13248 return 0;
13249}
13250/****************************************/
13251#ifdef CONFIG_PM
13252static int h265_suspend(struct device *dev)
13253{
13254 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
13255 return 0;
13256}
13257
13258static int h265_resume(struct device *dev)
13259{
13260 amhevc_resume(to_platform_device(dev));
13261 return 0;
13262}
13263
13264static const struct dev_pm_ops h265_pm_ops = {
13265 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
13266};
13267#endif
13268
13269static struct platform_driver amvdec_h265_driver = {
13270 .probe = amvdec_h265_probe,
13271 .remove = amvdec_h265_remove,
13272 .driver = {
13273 .name = DRIVER_NAME,
13274#ifdef CONFIG_PM
13275 .pm = &h265_pm_ops,
13276#endif
13277 }
13278};
13279
13280#ifdef MULTI_INSTANCE_SUPPORT
13281static void vh265_dump_state(struct vdec_s *vdec)
13282{
13283 int i;
13284 struct hevc_state_s *hevc =
13285 (struct hevc_state_s *)vdec->private;
13286 hevc_print(hevc, 0,
13287 "====== %s\n", __func__);
13288
13289 hevc_print(hevc, 0,
13290 "width/height (%d/%d), reorder_pic_num %d ip_mode %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
13291 hevc->frame_width,
13292 hevc->frame_height,
13293 hevc->sps_num_reorder_pics_0,
13294 hevc->ip_mode,
13295 get_work_pic_num(hevc),
13296 hevc->video_signal_type_debug,
13297 hevc->is_swap
13298 );
13299
13300 hevc_print(hevc, 0,
13301 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
13302 input_frame_based(vdec),
13303 hevc->eos,
13304 hevc->dec_result,
13305 decode_frame_count[hevc->index],
13306 display_frame_count[hevc->index],
13307 run_count[hevc->index],
13308 not_run_ready[hevc->index],
13309 input_empty[hevc->index]
13310 );
13311
13312 if (vf_get_receiver(vdec->vf_provider_name)) {
13313 enum receviver_start_e state =
13314 vf_notify_receiver(vdec->vf_provider_name,
13315 VFRAME_EVENT_PROVIDER_QUREY_STATE,
13316 NULL);
13317 hevc_print(hevc, 0,
13318 "\nreceiver(%s) state %d\n",
13319 vdec->vf_provider_name,
13320 state);
13321 }
13322
13323 hevc_print(hevc, 0,
13324 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
13325 __func__,
13326 kfifo_len(&hevc->newframe_q),
13327 VF_POOL_SIZE,
13328 kfifo_len(&hevc->display_q),
13329 VF_POOL_SIZE,
13330 hevc->vf_pre_count,
13331 hevc->vf_get_count,
13332 hevc->vf_put_count,
13333 hevc->pic_list_init_flag,
13334 is_new_pic_available(hevc)
13335 );
13336
13337 dump_pic_list(hevc);
13338
13339 for (i = 0; i < BUF_POOL_SIZE; i++) {
13340 hevc_print(hevc, 0,
13341 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13342 i,
13343 hevc->m_BUF[i].start_adr,
13344 hevc->m_BUF[i].size,
13345 hevc->m_BUF[i].used_flag);
13346 }
13347
13348 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13349 hevc_print(hevc, 0,
13350 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13351 i,
13352 hevc->m_mv_BUF[i].start_adr,
13353 hevc->m_mv_BUF[i].size,
13354 hevc->m_mv_BUF[i].used_flag);
13355 }
13356
13357 hevc_print(hevc, 0,
13358 "HEVC_DEC_STATUS_REG=0x%x\n",
13359 READ_VREG(HEVC_DEC_STATUS_REG));
13360 hevc_print(hevc, 0,
13361 "HEVC_MPC_E=0x%x\n",
13362 READ_VREG(HEVC_MPC_E));
13363 hevc_print(hevc, 0,
13364 "HEVC_DECODE_MODE=0x%x\n",
13365 READ_VREG(HEVC_DECODE_MODE));
13366 hevc_print(hevc, 0,
13367 "HEVC_DECODE_MODE2=0x%x\n",
13368 READ_VREG(HEVC_DECODE_MODE2));
13369 hevc_print(hevc, 0,
13370 "NAL_SEARCH_CTL=0x%x\n",
13371 READ_VREG(NAL_SEARCH_CTL));
13372 hevc_print(hevc, 0,
13373 "HEVC_PARSER_LCU_START=0x%x\n",
13374 READ_VREG(HEVC_PARSER_LCU_START));
13375 hevc_print(hevc, 0,
13376 "HEVC_DECODE_SIZE=0x%x\n",
13377 READ_VREG(HEVC_DECODE_SIZE));
13378 hevc_print(hevc, 0,
13379 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
13380 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
13381 hevc_print(hevc, 0,
13382 "HEVC_STREAM_START_ADDR=0x%x\n",
13383 READ_VREG(HEVC_STREAM_START_ADDR));
13384 hevc_print(hevc, 0,
13385 "HEVC_STREAM_END_ADDR=0x%x\n",
13386 READ_VREG(HEVC_STREAM_END_ADDR));
13387 hevc_print(hevc, 0,
13388 "HEVC_STREAM_LEVEL=0x%x\n",
13389 READ_VREG(HEVC_STREAM_LEVEL));
13390 hevc_print(hevc, 0,
13391 "HEVC_STREAM_WR_PTR=0x%x\n",
13392 READ_VREG(HEVC_STREAM_WR_PTR));
13393 hevc_print(hevc, 0,
13394 "HEVC_STREAM_RD_PTR=0x%x\n",
13395 READ_VREG(HEVC_STREAM_RD_PTR));
13396 hevc_print(hevc, 0,
13397 "PARSER_VIDEO_RP=0x%x\n",
13398 STBUF_READ(&vdec->vbuf, get_rp));
13399 hevc_print(hevc, 0,
13400 "PARSER_VIDEO_WP=0x%x\n",
13401 STBUF_READ(&vdec->vbuf, get_wp));
13402
13403 if (input_frame_based(vdec) &&
13404 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
13405 ) {
13406 int jj;
13407 if (hevc->chunk && hevc->chunk->block &&
13408 hevc->chunk->size > 0) {
13409 u8 *data = NULL;
13410 if (!hevc->chunk->block->is_mapped)
13411 data = codec_mm_vmap(hevc->chunk->block->start +
13412 hevc->chunk->offset, hevc->chunk->size);
13413 else
13414 data = ((u8 *)hevc->chunk->block->start_virt)
13415 + hevc->chunk->offset;
13416 hevc_print(hevc, 0,
13417 "frame data size 0x%x\n",
13418 hevc->chunk->size);
13419 for (jj = 0; jj < hevc->chunk->size; jj++) {
13420 if ((jj & 0xf) == 0)
13421 hevc_print(hevc,
13422 PRINT_FRAMEBASE_DATA,
13423 "%06x:", jj);
13424 hevc_print_cont(hevc,
13425 PRINT_FRAMEBASE_DATA,
13426 "%02x ", data[jj]);
13427 if (((jj + 1) & 0xf) == 0)
13428 hevc_print_cont(hevc,
13429 PRINT_FRAMEBASE_DATA,
13430 "\n");
13431 }
13432
13433 if (!hevc->chunk->block->is_mapped)
13434 codec_mm_unmap_phyaddr(data);
13435 }
13436 }
13437
13438}
13439
13440
13441static int ammvdec_h265_probe(struct platform_device *pdev)
13442{
13443
13444 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13445 struct hevc_state_s *hevc = NULL;
13446 int ret;
13447#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13448 int config_val;
13449#endif
13450 //pr_err("[%s pid=%d tgid=%d] \n",__func__, current->pid, current->tgid);
13451 if (pdata == NULL) {
13452 pr_info("\nammvdec_h265 memory resource undefined.\n");
13453 return -EFAULT;
13454 }
13455
13456 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
13457 sizeof(struct hevc_state_s), GFP_KERNEL); */
13458 hevc = vmalloc(sizeof(struct hevc_state_s));
13459 if (hevc == NULL) {
13460 pr_info("\nammvdec_h265 device data allocation failed\n");
13461 return -ENOMEM;
13462 }
13463 memset(hevc, 0, sizeof(struct hevc_state_s));
13464
13465 /* the ctx from v4l2 driver. */
13466 hevc->v4l2_ctx = pdata->private;
13467
13468 pdata->private = hevc;
13469 pdata->dec_status = vh265_dec_status;
13470 pdata->set_trickmode = vh265_set_trickmode;
13471 pdata->run_ready = run_ready;
13472 pdata->run = run;
13473 pdata->reset = reset;
13474 pdata->irq_handler = vh265_irq_cb;
13475 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
13476 pdata->dump_state = vh265_dump_state;
13477
13478 hevc->index = pdev->id;
13479 hevc->m_ins_flag = 1;
13480
13481 if (pdata->use_vfm_path) {
13482 snprintf(pdata->vf_provider_name,
13483 VDEC_PROVIDER_NAME_SIZE,
13484 VFM_DEC_PROVIDER_NAME);
13485 hevc->frameinfo_enable = 1;
13486 }
13487#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13488 else if (vdec_dual(pdata)) {
13489 struct hevc_state_s *hevc_pair = NULL;
13490
13491 if (dv_toggle_prov_name) /*debug purpose*/
13492 snprintf(pdata->vf_provider_name,
13493 VDEC_PROVIDER_NAME_SIZE,
13494 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
13495 VFM_DEC_DVEL_PROVIDER_NAME);
13496 else
13497 snprintf(pdata->vf_provider_name,
13498 VDEC_PROVIDER_NAME_SIZE,
13499 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
13500 VFM_DEC_DVBL_PROVIDER_NAME);
13501 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
13502 if (pdata->master)
13503 hevc_pair = (struct hevc_state_s *)
13504 pdata->master->private;
13505 else if (pdata->slave)
13506 hevc_pair = (struct hevc_state_s *)
13507 pdata->slave->private;
13508 if (hevc_pair)
13509 hevc->shift_byte_count_lo =
13510 hevc_pair->shift_byte_count_lo;
13511 }
13512#endif
13513 else
13514 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
13515 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
13516
13517 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
13518 &vh265_vf_provider, pdata);
13519
13520 hevc->provider_name = pdata->vf_provider_name;
13521 platform_set_drvdata(pdev, pdata);
13522
13523 hevc->platform_dev = pdev;
13524
13525 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
13526 pdata->config && pdata->config_len) {
13527#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13528 /*use ptr config for doubel_write_mode, etc*/
13529 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
13530
13531 if (get_config_int(pdata->config, "hevc_double_write_mode",
13532 &config_val) == 0)
13533 hevc->double_write_mode = config_val;
13534 else
13535 hevc->double_write_mode = double_write_mode;
13536
13537 if (get_config_int(pdata->config, "save_buffer_mode",
13538 &config_val) == 0)
13539 hevc->save_buffer_mode = config_val;
13540 else
13541 hevc->save_buffer_mode = 0;
13542
13543 /*use ptr config for max_pic_w, etc*/
13544 if (get_config_int(pdata->config, "hevc_buf_width",
13545 &config_val) == 0) {
13546 hevc->max_pic_w = config_val;
13547 }
13548 if (get_config_int(pdata->config, "hevc_buf_height",
13549 &config_val) == 0) {
13550 hevc->max_pic_h = config_val;
13551 }
13552
13553 if (get_config_int(pdata->config, "sidebind_type",
13554 &config_val) == 0)
13555 hevc->sidebind_type = config_val;
13556
13557 if (get_config_int(pdata->config, "sidebind_channel_id",
13558 &config_val) == 0)
13559 hevc->sidebind_channel_id = config_val;
13560
13561 if (get_config_int(pdata->config,
13562 "parm_v4l_codec_enable",
13563 &config_val) == 0)
13564 hevc->is_used_v4l = config_val;
13565
13566 if (get_config_int(pdata->config,
13567 "parm_v4l_buffer_margin",
13568 &config_val) == 0)
13569 hevc->dynamic_buf_num_margin = config_val;
13570
13571 if (get_config_int(pdata->config,
13572 "parm_v4l_canvas_mem_mode",
13573 &config_val) == 0)
13574 hevc->mem_map_mode = config_val;
13575
13576 if (get_config_int(pdata->config, "dv_duallayer",
13577 &config_val) == 0)
13578 hevc->dv_duallayer = config_val;
13579 else
13580 hevc->dv_duallayer = false;
13581#endif
13582 } else {
13583 if (pdata->sys_info)
13584 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13585 else {
13586 hevc->vh265_amstream_dec_info.width = 0;
13587 hevc->vh265_amstream_dec_info.height = 0;
13588 hevc->vh265_amstream_dec_info.rate = 30;
13589 }
13590 hevc->double_write_mode = double_write_mode;
13591 }
13592 /* get valid double write from configure or node */
13593 hevc->double_write_mode = get_double_write_mode(hevc);
13594
13595 if (!hevc->is_used_v4l) {
13596 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13597 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13598 else
13599 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13600
13601 hevc->mem_map_mode = mem_map_mode;
13602 }
13603
13604 if (mmu_enable_force == 0) {
13605 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13606 hevc->mmu_enable = 0;
13607 else
13608 hevc->mmu_enable = 1;
13609 }
13610
13611 if (init_mmu_buffers(hevc) < 0) {
13612 hevc_print(hevc, 0,
13613 "\n 265 mmu init failed!\n");
13614 mutex_unlock(&vh265_mutex);
13615 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13616 if (hevc)
13617 vfree((void *)hevc);
13618 pdata->dec_status = NULL;
13619 return -EFAULT;
13620 }
13621#if 0
13622 hevc->buf_start = pdata->mem_start;
13623 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13624#else
13625
13626 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13627 BMMU_WORKSPACE_ID, work_buf_size,
13628 DRIVER_NAME, &hevc->buf_start);
13629 if (ret < 0) {
13630 uninit_mmu_buffers(hevc);
13631 /* devm_kfree(&pdev->dev, (void *)hevc); */
13632 if (hevc)
13633 vfree((void *)hevc);
13634 pdata->dec_status = NULL;
13635 mutex_unlock(&vh265_mutex);
13636 return ret;
13637 }
13638 hevc->buf_size = work_buf_size;
13639#endif
13640 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13641 (parser_sei_enable & 0x100) == 0)
13642 parser_sei_enable = 7;
13643 hevc->init_flag = 0;
13644 hevc->first_sc_checked = 0;
13645 hevc->uninit_list = 0;
13646 hevc->fatal_error = 0;
13647 hevc->show_frame_num = 0;
13648
13649 /*
13650 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13651 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13652 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13653 */
13654 if (get_dbg_flag(hevc)) {
13655 hevc_print(hevc, 0,
13656 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13657 hevc->buf_start, hevc->buf_size);
13658 }
13659
13660 hevc_print(hevc, 0,
13661 "dynamic_buf_num_margin=%d\n",
13662 hevc->dynamic_buf_num_margin);
13663 hevc_print(hevc, 0,
13664 "double_write_mode=%d\n",
13665 hevc->double_write_mode);
13666
13667 hevc->cma_dev = pdata->cma_dev;
13668 vh265_vdec_info_init(hevc);
13669
13670 if (vh265_init(pdata) < 0) {
13671 hevc_print(hevc, 0,
13672 "\namvdec_h265 init failed.\n");
13673 hevc_local_uninit(hevc);
13674 if (hevc->gvs)
13675 kfree(hevc->gvs);
13676 hevc->gvs = NULL;
13677 uninit_mmu_buffers(hevc);
13678 /* devm_kfree(&pdev->dev, (void *)hevc); */
13679 if (hevc)
13680 vfree((void *)hevc);
13681 pdata->dec_status = NULL;
13682 return -ENODEV;
13683 }
13684
13685 vdec_set_prepare_level(pdata, start_decode_buf_level);
13686
13687 /*set the max clk for smooth playing...*/
13688 hevc_source_changed(VFORMAT_HEVC,
13689 3840, 2160, 60);
13690 if (pdata->parallel_dec == 1)
13691 vdec_core_request(pdata, CORE_MASK_HEVC);
13692 else
13693 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13694 | CORE_MASK_COMBINE);
13695
13696 return 0;
13697}
13698
13699static int ammvdec_h265_remove(struct platform_device *pdev)
13700{
13701 struct hevc_state_s *hevc =
13702 (struct hevc_state_s *)
13703 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13704 struct vdec_s *vdec;
13705
13706 if (hevc == NULL)
13707 return 0;
13708 vdec = hw_to_vdec(hevc);
13709
13710 //pr_err("%s [pid=%d,tgid=%d]\n", __func__, current->pid, current->tgid);
13711 if (get_dbg_flag(hevc))
13712 hevc_print(hevc, 0, "%s\r\n", __func__);
13713
13714 vmh265_stop(hevc);
13715
13716 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13717 if (vdec->parallel_dec == 1)
13718 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13719 else
13720 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13721
13722 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13723
13724 vfree((void *)hevc);
13725
13726 return 0;
13727}
13728
13729static struct platform_driver ammvdec_h265_driver = {
13730 .probe = ammvdec_h265_probe,
13731 .remove = ammvdec_h265_remove,
13732 .driver = {
13733 .name = MULTI_DRIVER_NAME,
13734#ifdef CONFIG_PM
13735 .pm = &h265_pm_ops,
13736#endif
13737 }
13738};
13739#endif
13740
13741static struct codec_profile_t amvdec_h265_profile = {
13742 .name = "hevc",
13743 .profile = ""
13744};
13745
13746static struct codec_profile_t amvdec_h265_profile_single,
13747 amvdec_h265_profile_mult;
13748
13749static struct mconfig h265_configs[] = {
13750 MC_PU32("use_cma", &use_cma),
13751 MC_PU32("bit_depth_luma", &bit_depth_luma),
13752 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13753 MC_PU32("video_signal_type", &video_signal_type),
13754#ifdef ERROR_HANDLE_DEBUG
13755 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13756 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13757#endif
13758 MC_PU32("radr", &radr),
13759 MC_PU32("rval", &rval),
13760 MC_PU32("dbg_cmd", &dbg_cmd),
13761 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13762 MC_PU32("endian", &endian),
13763 MC_PU32("step", &step),
13764 MC_PU32("udebug_flag", &udebug_flag),
13765 MC_PU32("decode_pic_begin", &decode_pic_begin),
13766 MC_PU32("slice_parse_begin", &slice_parse_begin),
13767 MC_PU32("nal_skip_policy", &nal_skip_policy),
13768 MC_PU32("i_only_flag", &i_only_flag),
13769 MC_PU32("error_handle_policy", &error_handle_policy),
13770 MC_PU32("error_handle_threshold", &error_handle_threshold),
13771 MC_PU32("error_handle_nal_skip_threshold",
13772 &error_handle_nal_skip_threshold),
13773 MC_PU32("error_handle_system_threshold",
13774 &error_handle_system_threshold),
13775 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13776 MC_PU32("debug", &debug),
13777 MC_PU32("debug_mask", &debug_mask),
13778 MC_PU32("buffer_mode", &buffer_mode),
13779 MC_PU32("double_write_mode", &double_write_mode),
13780 MC_PU32("buf_alloc_width", &buf_alloc_width),
13781 MC_PU32("buf_alloc_height", &buf_alloc_height),
13782 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13783 MC_PU32("max_buf_num", &max_buf_num),
13784 MC_PU32("buf_alloc_size", &buf_alloc_size),
13785 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13786 MC_PU32("mem_map_mode", &mem_map_mode),
13787 MC_PU32("enable_mem_saving", &enable_mem_saving),
13788 MC_PU32("force_w_h", &force_w_h),
13789 MC_PU32("force_fps", &force_fps),
13790 MC_PU32("max_decoding_time", &max_decoding_time),
13791 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13792 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13793 MC_PU32("interlace_enable", &interlace_enable),
13794 MC_PU32("pts_unstable", &pts_unstable),
13795 MC_PU32("parser_sei_enable", &parser_sei_enable),
13796 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13797 MC_PU32("decode_timeout_val", &decode_timeout_val),
13798 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13799#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13800 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13801 MC_PU32("dv_debug", &dv_debug),
13802#endif
13803};
13804static struct mconfig_node decoder_265_node;
13805
13806static int __init amvdec_h265_driver_init_module(void)
13807{
13808 struct BuffInfo_s *p_buf_info;
13809
13810 if (vdec_is_support_4k()) {
13811 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13812 p_buf_info = &amvh265_workbuff_spec[2];
13813 else
13814 p_buf_info = &amvh265_workbuff_spec[1];
13815 } else
13816 p_buf_info = &amvh265_workbuff_spec[0];
13817
13818 init_buff_spec(NULL, p_buf_info);
13819 work_buf_size =
13820 (p_buf_info->end_adr - p_buf_info->start_adr
13821 + 0xffff) & (~0xffff);
13822
13823 pr_debug("amvdec_h265 module init\n");
13824 error_handle_policy = 0;
13825
13826#ifdef ERROR_HANDLE_DEBUG
13827 dbg_nal_skip_flag = 0;
13828 dbg_nal_skip_count = 0;
13829#endif
13830 udebug_flag = 0;
13831 decode_pic_begin = 0;
13832 slice_parse_begin = 0;
13833 step = 0;
13834 buf_alloc_size = 0;
13835
13836#ifdef MULTI_INSTANCE_SUPPORT
13837 if (platform_driver_register(&ammvdec_h265_driver))
13838 pr_err("failed to register ammvdec_h265 driver\n");
13839
13840#endif
13841 if (platform_driver_register(&amvdec_h265_driver)) {
13842 pr_err("failed to register amvdec_h265 driver\n");
13843 return -ENODEV;
13844 }
13845#if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13846 if (!has_hevc_vdec()) {
13847 /* not support hevc */
13848 amvdec_h265_profile.name = "hevc_unsupport";
13849 }
13850 if (vdec_is_support_4k()) {
13851 if (is_meson_m8m2_cpu()) {
13852 /* m8m2 support 4k */
13853 amvdec_h265_profile.profile = "4k";
13854 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13855 amvdec_h265_profile.profile =
13856 "8k, 8bit, 10bit, dwrite, compressed, frame_dv";
13857 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13858 amvdec_h265_profile.profile =
13859 "4k, 8bit, 10bit, dwrite, compressed, frame_dv";
13860 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13861 amvdec_h265_profile.profile = "4k";
13862 }
13863#endif
13864 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13865 pr_info("amvdec_h265 default mmu enabled.\n");
13866 mmu_enable = 1;
13867 }
13868
13869 vcodec_profile_register(&amvdec_h265_profile);
13870 amvdec_h265_profile_single = amvdec_h265_profile;
13871 amvdec_h265_profile_single.name = "h265";
13872 vcodec_profile_register(&amvdec_h265_profile_single);
13873 amvdec_h265_profile_mult = amvdec_h265_profile;
13874 amvdec_h265_profile_mult.name = "mh265";
13875 vcodec_profile_register(&amvdec_h265_profile_mult);
13876 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13877 "h265", h265_configs, CONFIG_FOR_RW);
13878 return 0;
13879}
13880
13881static void __exit amvdec_h265_driver_remove_module(void)
13882{
13883 pr_debug("amvdec_h265 module remove.\n");
13884
13885#ifdef MULTI_INSTANCE_SUPPORT
13886 platform_driver_unregister(&ammvdec_h265_driver);
13887#endif
13888 platform_driver_unregister(&amvdec_h265_driver);
13889}
13890
13891/****************************************/
13892/*
13893 *module_param(stat, uint, 0664);
13894 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13895 */
13896module_param(use_cma, uint, 0664);
13897MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13898
13899module_param(bit_depth_luma, uint, 0664);
13900MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13901
13902module_param(bit_depth_chroma, uint, 0664);
13903MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13904
13905module_param(video_signal_type, uint, 0664);
13906MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13907
13908#ifdef ERROR_HANDLE_DEBUG
13909module_param(dbg_nal_skip_flag, uint, 0664);
13910MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13911
13912module_param(dbg_nal_skip_count, uint, 0664);
13913MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13914#endif
13915
13916module_param(radr, uint, 0664);
13917MODULE_PARM_DESC(radr, "\n radr\n");
13918
13919module_param(rval, uint, 0664);
13920MODULE_PARM_DESC(rval, "\n rval\n");
13921
13922module_param(dbg_cmd, uint, 0664);
13923MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13924
13925module_param(dump_nal, uint, 0664);
13926MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13927
13928module_param(dbg_skip_decode_index, uint, 0664);
13929MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13930
13931module_param(endian, uint, 0664);
13932MODULE_PARM_DESC(endian, "\n rval\n");
13933
13934module_param(step, uint, 0664);
13935MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13936
13937module_param(decode_pic_begin, uint, 0664);
13938MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13939
13940module_param(slice_parse_begin, uint, 0664);
13941MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13942
13943module_param(nal_skip_policy, uint, 0664);
13944MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13945
13946module_param(i_only_flag, uint, 0664);
13947MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13948
13949module_param(fast_output_enable, uint, 0664);
13950MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13951
13952module_param(error_handle_policy, uint, 0664);
13953MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13954
13955module_param(error_handle_threshold, uint, 0664);
13956MODULE_PARM_DESC(error_handle_threshold,
13957 "\n amvdec_h265 error_handle_threshold\n");
13958
13959module_param(error_handle_nal_skip_threshold, uint, 0664);
13960MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13961 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13962
13963module_param(error_handle_system_threshold, uint, 0664);
13964MODULE_PARM_DESC(error_handle_system_threshold,
13965 "\n amvdec_h265 error_handle_system_threshold\n");
13966
13967module_param(error_skip_nal_count, uint, 0664);
13968MODULE_PARM_DESC(error_skip_nal_count,
13969 "\n amvdec_h265 error_skip_nal_count\n");
13970
13971module_param(skip_nal_count, uint, 0664);
13972MODULE_PARM_DESC(skip_nal_count, "\n skip_nal_count\n");
13973
13974module_param(debug, uint, 0664);
13975MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13976
13977module_param(debug_mask, uint, 0664);
13978MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13979
13980module_param(log_mask, uint, 0664);
13981MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13982
13983module_param(buffer_mode, uint, 0664);
13984MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13985
13986module_param(double_write_mode, uint, 0664);
13987MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13988
13989module_param(buf_alloc_width, uint, 0664);
13990MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13991
13992module_param(buf_alloc_height, uint, 0664);
13993MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13994
13995module_param(dynamic_buf_num_margin, uint, 0664);
13996MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13997
13998module_param(max_buf_num, uint, 0664);
13999MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
14000
14001module_param(buf_alloc_size, uint, 0664);
14002MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
14003
14004#ifdef CONSTRAIN_MAX_BUF_NUM
14005module_param(run_ready_max_vf_only_num, uint, 0664);
14006MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
14007
14008module_param(run_ready_display_q_num, uint, 0664);
14009MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
14010
14011module_param(run_ready_max_buf_num, uint, 0664);
14012MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
14013#endif
14014
14015#if 0
14016module_param(re_config_pic_flag, uint, 0664);
14017MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
14018#endif
14019
14020module_param(buffer_mode_dbg, uint, 0664);
14021MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
14022
14023module_param(mem_map_mode, uint, 0664);
14024MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
14025
14026module_param(enable_mem_saving, uint, 0664);
14027MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
14028
14029module_param(force_w_h, uint, 0664);
14030MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
14031
14032module_param(force_fps, uint, 0664);
14033MODULE_PARM_DESC(force_fps, "\n force_fps\n");
14034
14035module_param(max_decoding_time, uint, 0664);
14036MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
14037
14038module_param(prefix_aux_buf_size, uint, 0664);
14039MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
14040
14041module_param(suffix_aux_buf_size, uint, 0664);
14042MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
14043
14044module_param(interlace_enable, uint, 0664);
14045MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
14046module_param(pts_unstable, uint, 0664);
14047MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
14048module_param(parser_sei_enable, uint, 0664);
14049MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
14050
14051module_param(parser_dolby_vision_enable, uint, 0664);
14052MODULE_PARM_DESC(parser_dolby_vision_enable,
14053 "\n parser_dolby_vision_enable\n");
14054
14055#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
14056module_param(dolby_meta_with_el, uint, 0664);
14057MODULE_PARM_DESC(dolby_meta_with_el,
14058 "\n dolby_meta_with_el\n");
14059
14060module_param(dolby_el_flush_th, uint, 0664);
14061MODULE_PARM_DESC(dolby_el_flush_th,
14062 "\n dolby_el_flush_th\n");
14063#endif
14064module_param(mmu_enable, uint, 0664);
14065MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
14066
14067module_param(mmu_enable_force, uint, 0664);
14068MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
14069
14070#ifdef MULTI_INSTANCE_SUPPORT
14071module_param(start_decode_buf_level, int, 0664);
14072MODULE_PARM_DESC(start_decode_buf_level,
14073 "\n h265 start_decode_buf_level\n");
14074
14075module_param(decode_timeout_val, uint, 0664);
14076MODULE_PARM_DESC(decode_timeout_val,
14077 "\n h265 decode_timeout_val\n");
14078
14079module_param(print_lcu_error, uint, 0664);
14080MODULE_PARM_DESC(print_lcu_error,
14081 "\n h265 print_lcu_error\n");
14082
14083module_param(data_resend_policy, uint, 0664);
14084MODULE_PARM_DESC(data_resend_policy,
14085 "\n h265 data_resend_policy\n");
14086
14087module_param(poc_num_margin, uint, 0664);
14088MODULE_PARM_DESC(poc_num_margin,
14089 "\n h265 poc_num_margin\n");
14090
14091module_param(poc_error_limit, uint, 0664);
14092MODULE_PARM_DESC(poc_error_limit,
14093 "\n h265 poc_error_limit\n");
14094
14095module_param_array(decode_frame_count, uint,
14096 &max_decode_instance_num, 0664);
14097
14098module_param_array(display_frame_count, uint,
14099 &max_decode_instance_num, 0664);
14100
14101module_param_array(max_process_time, uint,
14102 &max_decode_instance_num, 0664);
14103
14104module_param_array(max_get_frame_interval,
14105 uint, &max_decode_instance_num, 0664);
14106
14107module_param_array(run_count, uint,
14108 &max_decode_instance_num, 0664);
14109
14110module_param_array(input_empty, uint,
14111 &max_decode_instance_num, 0664);
14112
14113module_param_array(not_run_ready, uint,
14114 &max_decode_instance_num, 0664);
14115
14116module_param_array(ref_frame_mark_flag, uint,
14117 &max_decode_instance_num, 0664);
14118
14119#endif
14120#ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
14121module_param(dv_toggle_prov_name, uint, 0664);
14122MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
14123
14124module_param(dv_debug, uint, 0664);
14125MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
14126
14127module_param(force_bypass_dvenl, uint, 0664);
14128MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
14129#endif
14130
14131#ifdef AGAIN_HAS_THRESHOLD
14132module_param(again_threshold, uint, 0664);
14133MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
14134#endif
14135
14136module_param(force_disp_pic_index, int, 0664);
14137MODULE_PARM_DESC(force_disp_pic_index,
14138 "\n amvdec_h265 force_disp_pic_index\n");
14139
14140module_param(frmbase_cont_bitlevel, uint, 0664);
14141MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
14142
14143module_param(udebug_flag, uint, 0664);
14144MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
14145
14146module_param(udebug_pause_pos, uint, 0664);
14147MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
14148
14149module_param(udebug_pause_val, uint, 0664);
14150MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
14151
14152module_param(pre_decode_buf_level, int, 0664);
14153MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
14154
14155module_param(udebug_pause_decode_idx, uint, 0664);
14156MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
14157
14158module_param(disp_vframe_valve_level, uint, 0664);
14159MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
14160
14161module_param(pic_list_debug, uint, 0664);
14162MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
14163
14164module_param(without_display_mode, uint, 0664);
14165MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
14166
14167#ifdef HEVC_8K_LFTOFFSET_FIX
14168module_param(performance_profile, uint, 0664);
14169MODULE_PARM_DESC(performance_profile, "\n amvdec_h265 performance_profile\n");
14170#endif
14171module_param(disable_ip_mode, uint, 0664);
14172MODULE_PARM_DESC(disable_ip_mode, "\n amvdec_h265 disable ip_mode\n");
14173
14174module_param(dirty_time_threshold, uint, 0664);
14175MODULE_PARM_DESC(dirty_time_threshold, "\n dirty_time_threshold\n");
14176
14177module_param(dirty_count_threshold, uint, 0664);
14178MODULE_PARM_DESC(dirty_count_threshold, "\n dirty_count_threshold\n");
14179
14180module_param(dirty_buffersize_threshold, uint, 0664);
14181MODULE_PARM_DESC(dirty_buffersize_threshold, "\n dirty_buffersize_threshold\n");
14182
14183
14184
14185module_init(amvdec_h265_driver_init_module);
14186module_exit(amvdec_h265_driver_remove_module);
14187
14188MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
14189MODULE_LICENSE("GPL");
14190MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
14191