summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/vp9/vvp9.c (plain)
blob: 076d00dcb943521e73bb10ed6e1b50f5c733c057
1 /*
2 * drivers/amlogic/amports/vvp9.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/semaphore.h>
24#include <linux/delay.h>
25#include <linux/timer.h>
26#include <linux/kfifo.h>
27#include <linux/kthread.h>
28#include <linux/spinlock.h>
29#include <linux/platform_device.h>
30#include <linux/amlogic/media/vfm/vframe.h>
31#include <linux/amlogic/media/utils/amstream.h>
32#include <linux/amlogic/media/utils/vformat.h>
33#include <linux/amlogic/media/frame_sync/ptsserv.h>
34#include <linux/amlogic/media/canvas/canvas.h>
35#include <linux/amlogic/media/vfm/vframe_provider.h>
36#include <linux/amlogic/media/vfm/vframe_receiver.h>
37#include <linux/dma-mapping.h>
38#include <linux/dma-contiguous.h>
39#include <linux/slab.h>
40#include <linux/amlogic/tee.h>
41#include "../../../stream_input/amports/amports_priv.h"
42#include <linux/amlogic/media/codec_mm/codec_mm.h>
43#include "../utils/decoder_mmu_box.h"
44#include "../utils/decoder_bmmu_box.h"
45
46#define MEM_NAME "codec_vp9"
47/* #include <mach/am_regs.h> */
48#include <linux/amlogic/media/utils/vdec_reg.h>
49#include "../utils/vdec.h"
50#include "../utils/amvdec.h"
51#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
52#include "../utils/vdec_profile.h"
53#endif
54
55#include <linux/amlogic/media/video_sink/video.h>
56#include <linux/amlogic/media/codec_mm/configs.h>
57#include "../utils/config_parser.h"
58#include "../utils/firmware.h"
59#include "../../../common/chips/decoder_cpu_ver_info.h"
60#include "../utils/vdec_v4l2_buffer_ops.h"
61
62#define MIX_STREAM_SUPPORT
63
64#include "vvp9.h"
65
66
67/*#define SUPPORT_FB_DECODING*/
68/*#define FB_DECODING_TEST_SCHEDULE*/
69
70
71#define HW_MASK_FRONT 0x1
72#define HW_MASK_BACK 0x2
73
74#define VP9D_MPP_REFINFO_TBL_ACCCONFIG 0x3442
75#define VP9D_MPP_REFINFO_DATA 0x3443
76#define VP9D_MPP_REF_SCALE_ENBL 0x3441
77#define HEVC_MPRED_CTRL4 0x324c
78#define HEVC_CM_HEADER_START_ADDR 0x3628
79#define HEVC_DBLK_CFGB 0x350b
80#define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
81#define HEVC_SAO_MMU_VH1_ADDR 0x363b
82#define HEVC_SAO_MMU_VH0_ADDR 0x363a
83
84#define HEVC_MV_INFO 0x310d
85#define HEVC_QP_INFO 0x3137
86#define HEVC_SKIP_INFO 0x3136
87
88#define VP9_10B_DEC_IDLE 0
89#define VP9_10B_DEC_FRAME_HEADER 1
90#define VP9_10B_DEC_SLICE_SEGMENT 2
91#define VP9_10B_DECODE_SLICE 5
92#define VP9_10B_DISCARD_NAL 6
93#define VP9_DUMP_LMEM 7
94#define HEVC_DECPIC_DATA_DONE 0xa
95#define HEVC_DECPIC_DATA_ERROR 0xb
96#define HEVC_NAL_DECODE_DONE 0xe
97#define HEVC_DECODE_BUFEMPTY 0x20
98#define HEVC_DECODE_TIMEOUT 0x21
99#define HEVC_SEARCH_BUFEMPTY 0x22
100#define HEVC_DECODE_OVER_SIZE 0x23
101#define HEVC_S2_DECODING_DONE 0x50
102#define VP9_HEAD_PARSER_DONE 0xf0
103#define VP9_HEAD_SEARCH_DONE 0xf1
104#define VP9_EOS 0xf2
105#define HEVC_ACTION_DONE 0xff
106
107#define VF_POOL_SIZE 32
108
109#undef pr_info
110#define pr_info printk
111
112#define DECODE_MODE_SINGLE ((0x80 << 24) | 0)
113#define DECODE_MODE_MULTI_STREAMBASE ((0x80 << 24) | 1)
114#define DECODE_MODE_MULTI_FRAMEBASE ((0x80 << 24) | 2)
115#define DECODE_MODE_SINGLE_LOW_LATENCY ((0x80 << 24) | 3)
116#define DECODE_MODE_MULTI_FRAMEBASE_NOHEAD ((0x80 << 24) | 4)
117
118#define VP9_TRIGGER_FRAME_DONE 0x100
119#define VP9_TRIGGER_FRAME_ENABLE 0x200
120
121#define MV_MEM_UNIT 0x240
122/*---------------------------------------------------
123 * Include "parser_cmd.h"
124 *---------------------------------------------------
125 */
126#define PARSER_CMD_SKIP_CFG_0 0x0000090b
127
128#define PARSER_CMD_SKIP_CFG_1 0x1b14140f
129
130#define PARSER_CMD_SKIP_CFG_2 0x001b1910
131
132#define PARSER_CMD_NUMBER 37
133
134/*#define HEVC_PIC_STRUCT_SUPPORT*/
135/* to remove, fix build error */
136
137/*#define CODEC_MM_FLAGS_FOR_VDECODER 0*/
138
139#define MULTI_INSTANCE_SUPPORT
140#define SUPPORT_10BIT
141/* #define ERROR_HANDLE_DEBUG */
142
143#ifndef STAT_KTHREAD
144#define STAT_KTHREAD 0x40
145#endif
146
147#ifdef MULTI_INSTANCE_SUPPORT
148#define MAX_DECODE_INSTANCE_NUM 9
149#define MULTI_DRIVER_NAME "ammvdec_vp9"
150static unsigned int max_decode_instance_num
151 = MAX_DECODE_INSTANCE_NUM;
152static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
153static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
154static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
155static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
156static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
157static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
158
159static u32 decode_timeout_val = 200;
160static int start_decode_buf_level = 0x8000;
161static u32 work_buf_size;
162
163static u32 mv_buf_margin;
164
165/* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
166/* double_write_mode:
167 * 0, no double write;
168 * 1, 1:1 ratio;
169 * 2, (1/4):(1/4) ratio;
170 * 3, (1/4):(1/4) ratio, with both compressed frame included
171 * 4, (1/2):(1/2) ratio;
172 * 0x10, double write only
173 * 0x100, if > 1080p,use mode 4,else use mode 1;
174 * 0x200, if > 1080p,use mode 2,else use mode 1;
175 * 0x300, if > 720p, use mode 4, else use mode 1;
176 */
177static u32 double_write_mode;
178
179#define DRIVER_NAME "amvdec_vp9"
180#define MODULE_NAME "amvdec_vp9"
181#define DRIVER_HEADER_NAME "amvdec_vp9_header"
182
183
184#define PUT_INTERVAL (HZ/100)
185#define ERROR_SYSTEM_RESET_COUNT 200
186
187#define PTS_NORMAL 0
188#define PTS_NONE_REF_USE_DURATION 1
189
190#define PTS_MODE_SWITCHING_THRESHOLD 3
191#define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
192
193#define DUR2PTS(x) ((x)*90/96)
194
195struct VP9Decoder_s;
196static int vvp9_vf_states(struct vframe_states *states, void *);
197static struct vframe_s *vvp9_vf_peek(void *);
198static struct vframe_s *vvp9_vf_get(void *);
199static void vvp9_vf_put(struct vframe_s *, void *);
200static int vvp9_event_cb(int type, void *data, void *private_data);
201
202static int vvp9_stop(struct VP9Decoder_s *pbi);
203#ifdef MULTI_INSTANCE_SUPPORT
204static s32 vvp9_init(struct vdec_s *vdec);
205#else
206static s32 vvp9_init(struct VP9Decoder_s *pbi);
207#endif
208static void vvp9_prot_init(struct VP9Decoder_s *pbi, u32 mask);
209static int vvp9_local_init(struct VP9Decoder_s *pbi);
210static void vvp9_put_timer_func(unsigned long arg);
211static void dump_data(struct VP9Decoder_s *pbi, int size);
212static unsigned char get_data_check_sum
213 (struct VP9Decoder_s *pbi, int size);
214static void dump_pic_list(struct VP9Decoder_s *pbi);
215static int vp9_alloc_mmu(
216 struct VP9Decoder_s *pbi,
217 int cur_buf_idx,
218 int pic_width,
219 int pic_height,
220 unsigned short bit_depth,
221 unsigned int *mmu_index_adr);
222
223
224static const char vvp9_dec_id[] = "vvp9-dev";
225
226#define PROVIDER_NAME "decoder.vp9"
227#define MULTI_INSTANCE_PROVIDER_NAME "vdec.vp9"
228
229static const struct vframe_operations_s vvp9_vf_provider = {
230 .peek = vvp9_vf_peek,
231 .get = vvp9_vf_get,
232 .put = vvp9_vf_put,
233 .event_cb = vvp9_event_cb,
234 .vf_states = vvp9_vf_states,
235};
236
237static struct vframe_provider_s vvp9_vf_prov;
238
239static u32 bit_depth_luma;
240static u32 bit_depth_chroma;
241static u32 frame_width;
242static u32 frame_height;
243static u32 video_signal_type;
244
245static u32 on_no_keyframe_skiped;
246
247#define PROB_SIZE (496 * 2 * 4)
248#define PROB_BUF_SIZE (0x5000)
249#define COUNT_BUF_SIZE (0x300 * 4 * 4)
250/*compute_losless_comp_body_size(4096, 2304, 1) = 18874368(0x1200000)*/
251#define MAX_FRAME_4K_NUM 0x1200
252#define MAX_FRAME_8K_NUM 0x4800
253
254#define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
255
256#ifdef SUPPORT_FB_DECODING
257/* register define */
258#define HEVC_ASSIST_HED_FB_W_CTL 0x3006
259#define HEVC_ASSIST_HED_FB_R_CTL 0x3007
260#define HEVC_ASSIST_HED_FB_ADDR 0x3008
261#define HEVC_ASSIST_FB_MMU_MAP_ADDR 0x300a
262#define HEVC_ASSIST_FBD_MMU_MAP_ADDR 0x300b
263
264
265#define MAX_STAGE_PAGE_NUM 0x1200
266#define STAGE_MMU_MAP_SIZE (MAX_STAGE_PAGE_NUM * 4)
267#endif
268static inline int div_r32(int64_t m, int n)
269{
270/*
271 *return (int)(m/n)
272 */
273#ifndef CONFIG_ARM64
274 int64_t qu = 0;
275 qu = div_s64(m, n);
276 return (int)qu;
277#else
278 return (int)(m/n);
279#endif
280}
281
282/*USE_BUF_BLOCK*/
283struct BUF_s {
284 int index;
285 unsigned int alloc_flag;
286 /*buffer */
287 unsigned int cma_page_count;
288 unsigned long alloc_addr;
289 unsigned long start_adr;
290 unsigned int size;
291
292 unsigned int free_start_adr;
293 ulong v4l_ref_buf_addr;
294} /*BUF_t */;
295
296struct MVBUF_s {
297 unsigned long start_adr;
298 unsigned int size;
299 int used_flag;
300} /*MVBUF_t */;
301
302 /* #undef BUFMGR_ONLY to enable hardware configuration */
303
304/*#define TEST_WR_PTR_INC*/
305/*#define WR_PTR_INC_NUM 128*/
306#define WR_PTR_INC_NUM 1
307
308#define SIMULATION
309#define DOS_PROJECT
310#undef MEMORY_MAP_IN_REAL_CHIP
311
312/*#undef DOS_PROJECT*/
313/*#define MEMORY_MAP_IN_REAL_CHIP*/
314
315/*#define BUFFER_MGR_ONLY*/
316/*#define CONFIG_HEVC_CLK_FORCED_ON*/
317/*#define ENABLE_SWAP_TEST*/
318#define MCRCC_ENABLE
319
320#define VP9_LPF_LVL_UPDATE
321/*#define DBG_LF_PRINT*/
322
323#ifdef VP9_10B_NV21
324#else
325#define LOSLESS_COMPRESS_MODE
326#endif
327
328#define DOUBLE_WRITE_YSTART_TEMP 0x02000000
329#define DOUBLE_WRITE_CSTART_TEMP 0x02900000
330
331
332
333typedef unsigned int u32;
334typedef unsigned short u16;
335
336#define VP9_DEBUG_BUFMGR 0x01
337#define VP9_DEBUG_BUFMGR_MORE 0x02
338#define VP9_DEBUG_BUFMGR_DETAIL 0x04
339#define VP9_DEBUG_OUT_PTS 0x10
340#define VP9_DEBUG_SEND_PARAM_WITH_REG 0x100
341#define VP9_DEBUG_MERGE 0x200
342#define VP9_DEBUG_DBG_LF_PRINT 0x400
343#define VP9_DEBUG_REG 0x800
344#define VP9_DEBUG_2_STAGE 0x1000
345#define VP9_DEBUG_2_STAGE_MORE 0x2000
346#define VP9_DEBUG_QOS_INFO 0x4000
347#define VP9_DEBUG_DIS_LOC_ERROR_PROC 0x10000
348#define VP9_DEBUG_DIS_SYS_ERROR_PROC 0x20000
349#define VP9_DEBUG_DUMP_PIC_LIST 0x40000
350#define VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
351#define VP9_DEBUG_NO_TRIGGER_FRAME 0x100000
352#define VP9_DEBUG_LOAD_UCODE_FROM_FILE 0x200000
353#define VP9_DEBUG_FORCE_SEND_AGAIN 0x400000
354#define VP9_DEBUG_DUMP_DATA 0x800000
355#define VP9_DEBUG_CACHE 0x1000000
356#define VP9_DEBUG_CACHE_HIT_RATE 0x2000000
357#define IGNORE_PARAM_FROM_CONFIG 0x8000000
358#ifdef MULTI_INSTANCE_SUPPORT
359#define PRINT_FLAG_ERROR 0x0
360#define PRINT_FLAG_V4L_DETAIL 0x10000000
361#define PRINT_FLAG_VDEC_STATUS 0x20000000
362#define PRINT_FLAG_VDEC_DETAIL 0x40000000
363#define PRINT_FLAG_VDEC_DATA 0x80000000
364#endif
365
366static u32 debug;
367static bool is_reset;
368/*for debug*/
369/*
370 udebug_flag:
371 bit 0, enable ucode print
372 bit 1, enable ucode detail print
373 bit [31:16] not 0, pos to dump lmem
374 bit 2, pop bits to lmem
375 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
376*/
377static u32 udebug_flag;
378/*
379 when udebug_flag[1:0] is not 0
380 udebug_pause_pos not 0,
381 pause position
382*/
383static u32 udebug_pause_pos;
384/*
385 when udebug_flag[1:0] is not 0
386 and udebug_pause_pos is not 0,
387 pause only when DEBUG_REG2 is equal to this val
388*/
389static u32 udebug_pause_val;
390
391static u32 udebug_pause_decode_idx;
392
393static u32 without_display_mode;
394
395#define DEBUG_REG
396#ifdef DEBUG_REG
397void WRITE_VREG_DBG2(unsigned int adr, unsigned int val)
398{
399 if (debug & VP9_DEBUG_REG)
400 pr_info("%s(%x, %x)\n", __func__, adr, val);
401 if (adr != 0)
402 WRITE_VREG(adr, val);
403}
404
405#undef WRITE_VREG
406#define WRITE_VREG WRITE_VREG_DBG2
407#endif
408
409#define FRAME_CNT_WINDOW_SIZE 59
410#define RATE_CORRECTION_THRESHOLD 5
411/**************************************************
412
413VP9 buffer management start
414
415***************************************************/
416
417#define MMU_COMPRESS_HEADER_SIZE 0x48000
418#define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
419#define MAX_SIZE_8K (8192 * 4608)
420#define MAX_SIZE_4K (4096 * 2304)
421#define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
422
423#define INVALID_IDX -1 /* Invalid buffer index.*/
424
425#define RPM_BEGIN 0x200
426#define RPM_END 0x280
427
428union param_u {
429 struct {
430 unsigned short data[RPM_END - RPM_BEGIN];
431 } l;
432 struct {
433 /* from ucode lmem, do not change this struct */
434 unsigned short profile;
435 unsigned short show_existing_frame;
436 unsigned short frame_to_show_idx;
437 unsigned short frame_type; /*1 bit*/
438 unsigned short show_frame; /*1 bit*/
439 unsigned short error_resilient_mode; /*1 bit*/
440 unsigned short intra_only; /*1 bit*/
441 unsigned short display_size_present; /*1 bit*/
442 unsigned short reset_frame_context;
443 unsigned short refresh_frame_flags;
444 unsigned short width;
445 unsigned short height;
446 unsigned short display_width;
447 unsigned short display_height;
448 /*
449 *bit[11:8] - ref_frame_info_0 (ref(3-bits), ref_frame_sign_bias(1-bit))
450 *bit[7:4] - ref_frame_info_1 (ref(3-bits), ref_frame_sign_bias(1-bit))
451 *bit[3:0] - ref_frame_info_2 (ref(3-bits), ref_frame_sign_bias(1-bit))
452 */
453 unsigned short ref_info;
454 /*
455 *bit[2]: same_frame_size0
456 *bit[1]: same_frame_size1
457 *bit[0]: same_frame_size2
458 */
459 unsigned short same_frame_size;
460
461 unsigned short mode_ref_delta_enabled;
462 unsigned short ref_deltas[4];
463 unsigned short mode_deltas[2];
464 unsigned short filter_level;
465 unsigned short sharpness_level;
466 unsigned short bit_depth;
467 unsigned short seg_quant_info[8];
468 unsigned short seg_enabled;
469 unsigned short seg_abs_delta;
470 /* bit 15: feature enabled; bit 8, sign; bit[5:0], data */
471 unsigned short seg_lf_info[8];
472 } p;
473};
474
475
476struct vpx_codec_frame_buffer_s {
477 uint8_t *data; /**< Pointer to the data buffer */
478 size_t size; /**< Size of data in bytes */
479 void *priv; /**< Frame's private data */
480};
481
482enum vpx_color_space_t {
483 VPX_CS_UNKNOWN = 0, /**< Unknown */
484 VPX_CS_BT_601 = 1, /**< BT.601 */
485 VPX_CS_BT_709 = 2, /**< BT.709 */
486 VPX_CS_SMPTE_170 = 3, /**< SMPTE.170 */
487 VPX_CS_SMPTE_240 = 4, /**< SMPTE.240 */
488 VPX_CS_BT_2020 = 5, /**< BT.2020 */
489 VPX_CS_RESERVED = 6, /**< Reserved */
490 VPX_CS_SRGB = 7 /**< sRGB */
491}; /**< alias for enum vpx_color_space */
492
493enum vpx_bit_depth_t {
494 VPX_BITS_8 = 8, /**< 8 bits */
495 VPX_BITS_10 = 10, /**< 10 bits */
496 VPX_BITS_12 = 12, /**< 12 bits */
497};
498
499#define MAX_SLICE_NUM 1024
500struct PIC_BUFFER_CONFIG_s {
501 int index;
502 int BUF_index;
503 int mv_buf_index;
504 int comp_body_size;
505 int buf_size;
506 int vf_ref;
507 int y_canvas_index;
508 int uv_canvas_index;
509#ifdef MULTI_INSTANCE_SUPPORT
510 struct canvas_config_s canvas_config[2];
511#endif
512 int decode_idx;
513 int slice_type;
514 int stream_offset;
515 u32 pts;
516 u64 pts64;
517 u64 timestamp;
518 uint8_t error_mark;
519 /**/
520 int slice_idx;
521 /*buffer*/
522 unsigned long header_adr;
523 unsigned long mpred_mv_wr_start_addr;
524 /*unsigned long mc_y_adr;
525 *unsigned long mc_u_v_adr;
526 */
527 unsigned int dw_y_adr;
528 unsigned int dw_u_v_adr;
529 int mc_canvas_y;
530 int mc_canvas_u_v;
531
532 int lcu_total;
533 /**/
534 int y_width;
535 int y_height;
536 int y_crop_width;
537 int y_crop_height;
538 int y_stride;
539
540 int uv_width;
541 int uv_height;
542 int uv_crop_width;
543 int uv_crop_height;
544 int uv_stride;
545
546 int alpha_width;
547 int alpha_height;
548 int alpha_stride;
549
550 uint8_t *y_buffer;
551 uint8_t *u_buffer;
552 uint8_t *v_buffer;
553 uint8_t *alpha_buffer;
554
555 uint8_t *buffer_alloc;
556 int buffer_alloc_sz;
557 int border;
558 int frame_size;
559 int subsampling_x;
560 int subsampling_y;
561 unsigned int bit_depth;
562 enum vpx_color_space_t color_space;
563
564 int corrupted;
565 int flags;
566 unsigned long cma_alloc_addr;
567
568 int double_write_mode;
569
570 /* picture qos infomation*/
571 int max_qp;
572 int avg_qp;
573 int min_qp;
574 int max_skip;
575 int avg_skip;
576 int min_skip;
577 int max_mv;
578 int min_mv;
579 int avg_mv;
580} PIC_BUFFER_CONFIG;
581
582enum BITSTREAM_PROFILE {
583 PROFILE_0,
584 PROFILE_1,
585 PROFILE_2,
586 PROFILE_3,
587 MAX_PROFILES
588};
589
590enum FRAME_TYPE {
591 KEY_FRAME = 0,
592 INTER_FRAME = 1,
593 FRAME_TYPES,
594};
595
596enum REFERENCE_MODE {
597 SINGLE_REFERENCE = 0,
598 COMPOUND_REFERENCE = 1,
599 REFERENCE_MODE_SELECT = 2,
600 REFERENCE_MODES = 3,
601};
602
603#define NONE -1
604#define INTRA_FRAME 0
605#define LAST_FRAME 1
606#define GOLDEN_FRAME 2
607#define ALTREF_FRAME 3
608#define MAX_REF_FRAMES 4
609
610#define REFS_PER_FRAME 3
611
612#define REF_FRAMES_LOG2 3
613#define REF_FRAMES (1 << REF_FRAMES_LOG2)
614#define REF_FRAMES_4K (6)
615
616/*4 scratch frames for the new frames to support a maximum of 4 cores decoding
617 *in parallel, 3 for scaled references on the encoder.
618 *TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number
619 * // of framebuffers.
620 *TODO(jkoleszar): These 3 extra references could probably come from the
621 *normal reference pool.
622 */
623#define FRAME_BUFFERS (REF_FRAMES + 16)
624#define HEADER_FRAME_BUFFERS (FRAME_BUFFERS)
625#define MAX_BUF_NUM (FRAME_BUFFERS)
626#define MV_BUFFER_NUM FRAME_BUFFERS
627#ifdef SUPPORT_FB_DECODING
628#define STAGE_MAX_BUFFERS 16
629#else
630#define STAGE_MAX_BUFFERS 0
631#endif
632
633#define FRAME_CONTEXTS_LOG2 2
634#define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
635/*buffer + header buffer + workspace*/
636#ifdef MV_USE_FIXED_BUF
637#define MAX_BMMU_BUFFER_NUM (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + 1)
638#define VF_BUFFER_IDX(n) (n)
639#define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
640#define WORK_SPACE_BUF_ID (FRAME_BUFFERS + HEADER_FRAME_BUFFERS)
641#else
642#define MAX_BMMU_BUFFER_NUM \
643 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM + 1)
644#define VF_BUFFER_IDX(n) (n)
645#define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
646#define MV_BUFFER_IDX(n) (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + n)
647#define WORK_SPACE_BUF_ID \
648 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM)
649#endif
650
651struct RefCntBuffer_s {
652 int ref_count;
653 /*MV_REF *mvs;*/
654 int mi_rows;
655 int mi_cols;
656 struct vpx_codec_frame_buffer_s raw_frame_buffer;
657 struct PIC_BUFFER_CONFIG_s buf;
658
659/*The Following variables will only be used in frame parallel decode.
660 *
661 *frame_worker_owner indicates which FrameWorker owns this buffer. NULL means
662 *that no FrameWorker owns, or is decoding, this buffer.
663 *VP9Worker *frame_worker_owner;
664 *
665 *row and col indicate which position frame has been decoded to in real
666 *pixel unit. They are reset to -1 when decoding begins and set to INT_MAX
667 *when the frame is fully decoded.
668 */
669 int row;
670 int col;
671} RefCntBuffer;
672
673struct RefBuffer_s {
674/*TODO(dkovalev): idx is not really required and should be removed, now it
675 *is used in vp9_onyxd_if.c
676 */
677 int idx;
678 struct PIC_BUFFER_CONFIG_s *buf;
679 /*struct scale_factors sf;*/
680} RefBuffer;
681
682struct InternalFrameBuffer_s {
683 uint8_t *data;
684 size_t size;
685 int in_use;
686} InternalFrameBuffer;
687
688struct InternalFrameBufferList_s {
689 int num_internal_frame_buffers;
690 struct InternalFrameBuffer_s *int_fb;
691} InternalFrameBufferList;
692
693struct BufferPool_s {
694/*Protect BufferPool from being accessed by several FrameWorkers at
695 *the same time during frame parallel decode.
696 *TODO(hkuang): Try to use atomic variable instead of locking the whole pool.
697 *
698 *Private data associated with the frame buffer callbacks.
699 *void *cb_priv;
700 *
701 *vpx_get_frame_buffer_cb_fn_t get_fb_cb;
702 *vpx_release_frame_buffer_cb_fn_t release_fb_cb;
703 */
704
705 struct RefCntBuffer_s frame_bufs[FRAME_BUFFERS];
706
707/*Frame buffers allocated internally by the codec.*/
708 struct InternalFrameBufferList_s int_frame_buffers;
709 unsigned long flags;
710 spinlock_t lock;
711
712} BufferPool;
713
714#define lock_buffer_pool(pool, flags) \
715 spin_lock_irqsave(&pool->lock, flags)
716
717#define unlock_buffer_pool(pool, flags) \
718 spin_unlock_irqrestore(&pool->lock, flags)
719
720struct VP9_Common_s {
721 enum vpx_color_space_t color_space;
722 int width;
723 int height;
724 int display_width;
725 int display_height;
726 int last_width;
727 int last_height;
728
729 int subsampling_x;
730 int subsampling_y;
731
732 int use_highbitdepth;/*Marks if we need to use 16bit frame buffers.*/
733
734 struct PIC_BUFFER_CONFIG_s *frame_to_show;
735 struct RefCntBuffer_s *prev_frame;
736
737 /*TODO(hkuang): Combine this with cur_buf in macroblockd.*/
738 struct RefCntBuffer_s *cur_frame;
739
740 int ref_frame_map[REF_FRAMES]; /* maps fb_idx to reference slot */
741
742 /*Prepare ref_frame_map for the next frame.
743 *Only used in frame parallel decode.
744 */
745 int next_ref_frame_map[REF_FRAMES];
746
747 /* TODO(jkoleszar): could expand active_ref_idx to 4,
748 *with 0 as intra, and roll new_fb_idx into it.
749 */
750
751 /*Each frame can reference REFS_PER_FRAME buffers*/
752 struct RefBuffer_s frame_refs[REFS_PER_FRAME];
753
754 int prev_fb_idx;
755 int new_fb_idx;
756 int cur_fb_idx_mmu;
757 /*last frame's frame type for motion search*/
758 enum FRAME_TYPE last_frame_type;
759 enum FRAME_TYPE frame_type;
760
761 int show_frame;
762 int last_show_frame;
763 int show_existing_frame;
764
765 /*Flag signaling that the frame is encoded using only INTRA modes.*/
766 uint8_t intra_only;
767 uint8_t last_intra_only;
768
769 int allow_high_precision_mv;
770
771 /*Flag signaling that the frame context should be reset to default
772 *values. 0 or 1 implies don't reset, 2 reset just the context
773 *specified in the frame header, 3 reset all contexts.
774 */
775 int reset_frame_context;
776
777 /*MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in
778 * MODE_INFO (8-pixel) units.
779 */
780 int MBs;
781 int mb_rows, mi_rows;
782 int mb_cols, mi_cols;
783 int mi_stride;
784
785 /*Whether to use previous frame's motion vectors for prediction.*/
786 int use_prev_frame_mvs;
787
788 int refresh_frame_context; /* Two state 0 = NO, 1 = YES */
789
790 int ref_frame_sign_bias[MAX_REF_FRAMES]; /* Two state 0, 1 */
791
792 /*struct loopfilter lf;*/
793 /*struct segmentation seg;*/
794
795 /*TODO(hkuang):Remove this as it is the same as frame_parallel_decode*/
796 /* in pbi.*/
797 int frame_parallel_decode; /* frame-based threading.*/
798
799 /*Context probabilities for reference frame prediction*/
800 /*MV_REFERENCE_FRAME comp_fixed_ref;*/
801 /*MV_REFERENCE_FRAME comp_var_ref[2];*/
802 enum REFERENCE_MODE reference_mode;
803
804 /*FRAME_CONTEXT *fc; */ /* this frame entropy */
805 /*FRAME_CONTEXT *frame_contexts; */ /*FRAME_CONTEXTS*/
806 /*unsigned int frame_context_idx; *//* Context to use/update */
807 /*FRAME_COUNTS counts;*/
808
809 unsigned int current_video_frame;
810 enum BITSTREAM_PROFILE profile;
811
812 enum vpx_bit_depth_t bit_depth;
813
814 int error_resilient_mode;
815 int frame_parallel_decoding_mode;
816
817 int byte_alignment;
818 int skip_loop_filter;
819
820 /*External BufferPool passed from outside.*/
821 struct BufferPool_s *buffer_pool;
822
823 int above_context_alloc_cols;
824
825};
826
827static void set_canvas(struct VP9Decoder_s *pbi,
828 struct PIC_BUFFER_CONFIG_s *pic_config);
829static int prepare_display_buf(struct VP9Decoder_s *pbi,
830 struct PIC_BUFFER_CONFIG_s *pic_config);
831
832static void fill_frame_info(struct VP9Decoder_s *pbi,
833 struct PIC_BUFFER_CONFIG_s *frame,
834 unsigned int framesize,
835 unsigned int pts);
836
837static struct PIC_BUFFER_CONFIG_s *get_frame_new_buffer(struct VP9_Common_s *cm)
838{
839 return &cm->buffer_pool->frame_bufs[cm->new_fb_idx].buf;
840}
841
842static void ref_cnt_fb(struct RefCntBuffer_s *bufs, int *idx, int new_idx)
843{
844 const int ref_index = *idx;
845
846 if (ref_index >= 0 && bufs[ref_index].ref_count > 0) {
847 bufs[ref_index].ref_count--;
848 /*pr_info("[MMU DEBUG 2] dec ref_count[%d] : %d\r\n",
849 * ref_index, bufs[ref_index].ref_count);
850 */
851 }
852
853 *idx = new_idx;
854
855 bufs[new_idx].ref_count++;
856 /*pr_info("[MMU DEBUG 3] inc ref_count[%d] : %d\r\n",
857 * new_idx, bufs[new_idx].ref_count);
858 */
859}
860
861int vp9_release_frame_buffer(struct vpx_codec_frame_buffer_s *fb)
862{
863 struct InternalFrameBuffer_s *const int_fb =
864 (struct InternalFrameBuffer_s *)fb->priv;
865 if (int_fb)
866 int_fb->in_use = 0;
867 return 0;
868}
869
870static int compute_losless_comp_body_size(int width, int height,
871 uint8_t is_bit_depth_10);
872
873static void setup_display_size(struct VP9_Common_s *cm, union param_u *params,
874 int print_header_info)
875{
876 cm->display_width = cm->width;
877 cm->display_height = cm->height;
878 if (params->p.display_size_present) {
879 if (print_header_info)
880 pr_info(" * 1-bit display_size_present read : 1\n");
881 cm->display_width = params->p.display_width;
882 cm->display_height = params->p.display_height;
883 /*vp9_read_frame_size(rb, &cm->display_width,
884 * &cm->display_height);
885 */
886 } else {
887 if (print_header_info)
888 pr_info(" * 1-bit display_size_present read : 0\n");
889 }
890}
891
892
893uint8_t print_header_info = 0;
894
895struct buff_s {
896 u32 buf_start;
897 u32 buf_size;
898 u32 buf_end;
899} buff_t;
900
901struct BuffInfo_s {
902 u32 max_width;
903 u32 max_height;
904 u32 start_adr;
905 u32 end_adr;
906 struct buff_s ipp;
907 struct buff_s sao_abv;
908 struct buff_s sao_vb;
909 struct buff_s short_term_rps;
910 struct buff_s vps;
911 struct buff_s sps;
912 struct buff_s pps;
913 struct buff_s sao_up;
914 struct buff_s swap_buf;
915 struct buff_s swap_buf2;
916 struct buff_s scalelut;
917 struct buff_s dblk_para;
918 struct buff_s dblk_data;
919 struct buff_s seg_map;
920 struct buff_s mmu_vbh;
921 struct buff_s cm_header;
922 struct buff_s mpred_above;
923#ifdef MV_USE_FIXED_BUF
924 struct buff_s mpred_mv;
925#endif
926 struct buff_s rpm;
927 struct buff_s lmem;
928} BuffInfo_t;
929#ifdef MULTI_INSTANCE_SUPPORT
930#define DEC_RESULT_NONE 0
931#define DEC_RESULT_DONE 1
932#define DEC_RESULT_AGAIN 2
933#define DEC_RESULT_CONFIG_PARAM 3
934#define DEC_RESULT_ERROR 4
935#define DEC_INIT_PICLIST 5
936#define DEC_UNINIT_PICLIST 6
937#define DEC_RESULT_GET_DATA 7
938#define DEC_RESULT_GET_DATA_RETRY 8
939#define DEC_RESULT_EOS 9
940#define DEC_RESULT_FORCE_EXIT 10
941#define DEC_V4L2_CONTINUE_DECODING 18
942
943#define DEC_S1_RESULT_NONE 0
944#define DEC_S1_RESULT_DONE 1
945#define DEC_S1_RESULT_FORCE_EXIT 2
946#define DEC_S1_RESULT_TEST_TRIGGER_DONE 0xf0
947
948#ifdef FB_DECODING_TEST_SCHEDULE
949#define TEST_SET_NONE 0
950#define TEST_SET_PIC_DONE 1
951#define TEST_SET_S2_DONE 2
952#endif
953
954static void vp9_work(struct work_struct *work);
955#endif
956struct loop_filter_info_n;
957struct loopfilter;
958struct segmentation;
959
960#ifdef SUPPORT_FB_DECODING
961static void mpred_process(struct VP9Decoder_s *pbi);
962static void vp9_s1_work(struct work_struct *work);
963
964struct stage_buf_s {
965 int index;
966 unsigned short rpm[RPM_END - RPM_BEGIN];
967};
968
969static unsigned int not_run2_ready[MAX_DECODE_INSTANCE_NUM];
970
971static unsigned int run2_count[MAX_DECODE_INSTANCE_NUM];
972
973#ifdef FB_DECODING_TEST_SCHEDULE
974u32 stage_buf_num; /* = 16;*/
975#else
976u32 stage_buf_num;
977#endif
978#endif
979
980struct VP9Decoder_s {
981#ifdef MULTI_INSTANCE_SUPPORT
982 unsigned char index;
983
984 struct device *cma_dev;
985 struct platform_device *platform_dev;
986 void (*vdec_cb)(struct vdec_s *, void *);
987 void *vdec_cb_arg;
988 struct vframe_chunk_s *chunk;
989 int dec_result;
990 struct work_struct work;
991 struct work_struct set_clk_work;
992 u32 start_shift_bytes;
993
994 struct BuffInfo_s work_space_buf_store;
995 unsigned long buf_start;
996 u32 buf_size;
997 u32 cma_alloc_count;
998 unsigned long cma_alloc_addr;
999 uint8_t eos;
1000 unsigned long int start_process_time;
1001 unsigned last_lcu_idx;
1002 int decode_timeout_count;
1003 unsigned timeout_num;
1004 int save_buffer_mode;
1005
1006 int double_write_mode;
1007#endif
1008 long used_4k_num;
1009
1010 unsigned char m_ins_flag;
1011 char *provider_name;
1012 union param_u param;
1013 int frame_count;
1014 int pic_count;
1015 u32 stat;
1016 struct timer_list timer;
1017 u32 frame_dur;
1018 u32 frame_ar;
1019 int fatal_error;
1020 uint8_t init_flag;
1021 uint8_t first_sc_checked;
1022 uint8_t process_busy;
1023#define PROC_STATE_INIT 0
1024#define PROC_STATE_DECODESLICE 1
1025#define PROC_STATE_SENDAGAIN 2
1026 uint8_t process_state;
1027 u32 ucode_pause_pos;
1028
1029 int show_frame_num;
1030 struct buff_s mc_buf_spec;
1031 struct dec_sysinfo vvp9_amstream_dec_info;
1032 void *rpm_addr;
1033 void *lmem_addr;
1034 dma_addr_t rpm_phy_addr;
1035 dma_addr_t lmem_phy_addr;
1036 unsigned short *lmem_ptr;
1037 unsigned short *debug_ptr;
1038
1039 void *prob_buffer_addr;
1040 void *count_buffer_addr;
1041 dma_addr_t prob_buffer_phy_addr;
1042 dma_addr_t count_buffer_phy_addr;
1043
1044 void *frame_mmu_map_addr;
1045 dma_addr_t frame_mmu_map_phy_addr;
1046
1047 unsigned int use_cma_flag;
1048
1049 struct BUF_s m_BUF[MAX_BUF_NUM];
1050 struct MVBUF_s m_mv_BUF[MV_BUFFER_NUM];
1051 u32 used_buf_num;
1052 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1053 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1054 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1055 struct vframe_s vfpool[VF_POOL_SIZE];
1056 u32 vf_pre_count;
1057 u32 vf_get_count;
1058 u32 vf_put_count;
1059 int buf_num;
1060 int pic_num;
1061 int lcu_size_log2;
1062 unsigned int losless_comp_body_size;
1063
1064 u32 video_signal_type;
1065
1066 int pts_mode;
1067 int last_lookup_pts;
1068 int last_pts;
1069 u64 last_lookup_pts_us64;
1070 u64 last_pts_us64;
1071 u64 shift_byte_count;
1072
1073 u32 pts_unstable;
1074 u32 frame_cnt_window;
1075 u32 pts1, pts2;
1076 u32 last_duration;
1077 u32 duration_from_pts_done;
1078 bool vp9_first_pts_ready;
1079
1080 u32 shift_byte_count_lo;
1081 u32 shift_byte_count_hi;
1082 int pts_mode_switching_count;
1083 int pts_mode_recovery_count;
1084
1085 bool get_frame_dur;
1086 u32 saved_resolution;
1087
1088 /**/
1089 struct VP9_Common_s common;
1090 struct RefCntBuffer_s *cur_buf;
1091 int refresh_frame_flags;
1092 uint8_t need_resync;
1093 uint8_t hold_ref_buf;
1094 uint8_t ready_for_new_data;
1095 struct BufferPool_s vp9_buffer_pool;
1096
1097 struct BuffInfo_s *work_space_buf;
1098
1099 struct buff_s *mc_buf;
1100
1101 unsigned int frame_width;
1102 unsigned int frame_height;
1103
1104 unsigned short *rpm_ptr;
1105 int init_pic_w;
1106 int init_pic_h;
1107 int lcu_total;
1108 int lcu_size;
1109
1110 int slice_type;
1111
1112 int skip_flag;
1113 int decode_idx;
1114 int slice_idx;
1115 uint8_t has_keyframe;
1116 uint8_t wait_buf;
1117 uint8_t error_flag;
1118
1119 /* bit 0, for decoding; bit 1, for displaying */
1120 uint8_t ignore_bufmgr_error;
1121 int PB_skip_mode;
1122 int PB_skip_count_after_decoding;
1123 /*hw*/
1124
1125 /*lf*/
1126 int default_filt_lvl;
1127 struct loop_filter_info_n *lfi;
1128 struct loopfilter *lf;
1129 struct segmentation *seg_4lf;
1130 /**/
1131 struct vdec_info *gvs;
1132
1133 u32 pre_stream_offset;
1134
1135 unsigned int dec_status;
1136 u32 last_put_idx;
1137 int new_frame_displayed;
1138 void *mmu_box;
1139 void *bmmu_box;
1140 int mmu_enable;
1141 struct vframe_master_display_colour_s vf_dp;
1142 struct firmware_s *fw;
1143 int max_pic_w;
1144 int max_pic_h;
1145#ifdef SUPPORT_FB_DECODING
1146 int dec_s1_result;
1147 int s1_test_cmd;
1148 struct work_struct s1_work;
1149 int used_stage_buf_num;
1150 int s1_pos;
1151 int s2_pos;
1152 void *stage_mmu_map_addr;
1153 dma_addr_t stage_mmu_map_phy_addr;
1154 struct stage_buf_s *s1_buf;
1155 struct stage_buf_s *s2_buf;
1156 struct stage_buf_s *stage_bufs
1157 [STAGE_MAX_BUFFERS];
1158 unsigned char run2_busy;
1159
1160 int s1_mv_buf_index;
1161 int s1_mv_buf_index_pre;
1162 int s1_mv_buf_index_pre_pre;
1163 unsigned long s1_mpred_mv_wr_start_addr;
1164 unsigned long s1_mpred_mv_wr_start_addr_pre;
1165 unsigned short s1_intra_only;
1166 unsigned short s1_frame_type;
1167 unsigned short s1_width;
1168 unsigned short s1_height;
1169 unsigned short s1_last_show_frame;
1170 union param_u s1_param;
1171 u8 back_not_run_ready;
1172#endif
1173 int need_cache_size;
1174 u64 sc_start_time;
1175 bool postproc_done;
1176 int low_latency_flag;
1177 bool no_head;
1178 bool pic_list_init_done;
1179 bool pic_list_init_done2;
1180 bool is_used_v4l;
1181 void *v4l2_ctx;
1182 bool v4l_params_parsed;
1183 int frameinfo_enable;
1184 struct vframe_qos_s vframe_qos;
1185 u32 mem_map_mode;
1186};
1187
1188static int vp9_print(struct VP9Decoder_s *pbi,
1189 int flag, const char *fmt, ...)
1190{
1191#define HEVC_PRINT_BUF 256
1192 unsigned char buf[HEVC_PRINT_BUF];
1193 int len = 0;
1194
1195 if (pbi == NULL ||
1196 (flag == 0) ||
1197 (debug & flag)) {
1198 va_list args;
1199
1200 va_start(args, fmt);
1201 if (pbi)
1202 len = sprintf(buf, "[%d]", pbi->index);
1203 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1204 pr_debug("%s", buf);
1205 va_end(args);
1206 }
1207 return 0;
1208}
1209
1210static int is_oversize(int w, int h)
1211{
1212 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1213 MAX_SIZE_8K : MAX_SIZE_4K;
1214
1215 if (w <= 0 || h <= 0)
1216 return true;
1217
1218 if (h != 0 && (w > max / h))
1219 return true;
1220
1221 return false;
1222}
1223
1224static int config_pic(struct VP9Decoder_s *pbi,
1225 struct PIC_BUFFER_CONFIG_s *pic_config);
1226
1227static void resize_context_buffers(struct VP9Decoder_s *pbi,
1228 struct VP9_Common_s *cm, int width, int height)
1229{
1230 if (cm->width != width || cm->height != height) {
1231 /* to do ..*/
1232 if (pbi != NULL) {
1233 pbi->vp9_first_pts_ready = 0;
1234 pbi->duration_from_pts_done = 0;
1235 }
1236 pr_info("%s (%d,%d)=>(%d,%d)\r\n", __func__, cm->width,
1237 cm->height, width, height);
1238
1239 if (pbi->is_used_v4l) {
1240 struct PIC_BUFFER_CONFIG_s *pic = &cm->cur_frame->buf;
1241
1242 /* resolution change happend need to reconfig buffs if true. */
1243 if (pic->y_crop_width != width || pic->y_crop_height != height) {
1244 int i;
1245 for (i = 0; i < pbi->used_buf_num; i++) {
1246 pic = &cm->buffer_pool->frame_bufs[i].buf;
1247 pic->y_crop_width = width;
1248 pic->y_crop_height = height;
1249 if (!config_pic(pbi, pic))
1250 set_canvas(pbi, pic);
1251 else
1252 vp9_print(pbi, 0,
1253 "v4l: reconfig buff fail.\n");
1254 }
1255 }
1256 }
1257
1258 cm->width = width;
1259 cm->height = height;
1260 }
1261 /*
1262 *if (cm->cur_frame->mvs == NULL ||
1263 * cm->mi_rows > cm->cur_frame->mi_rows ||
1264 * cm->mi_cols > cm->cur_frame->mi_cols) {
1265 * resize_mv_buffer(cm);
1266 *}
1267 */
1268}
1269
1270static int valid_ref_frame_size(int ref_width, int ref_height,
1271 int this_width, int this_height) {
1272 return 2 * this_width >= ref_width &&
1273 2 * this_height >= ref_height &&
1274 this_width <= 16 * ref_width &&
1275 this_height <= 16 * ref_height;
1276}
1277
1278/*
1279 *static int valid_ref_frame_img_fmt(enum vpx_bit_depth_t ref_bit_depth,
1280 * int ref_xss, int ref_yss,
1281 * enum vpx_bit_depth_t this_bit_depth,
1282 * int this_xss, int this_yss) {
1283 * return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1284 * ref_yss == this_yss;
1285 *}
1286 */
1287
1288
1289static int setup_frame_size(
1290 struct VP9Decoder_s *pbi,
1291 struct VP9_Common_s *cm, union param_u *params,
1292 unsigned int *mmu_index_adr,
1293 int print_header_info) {
1294 int width, height;
1295 struct BufferPool_s * const pool = cm->buffer_pool;
1296 struct PIC_BUFFER_CONFIG_s *ybf;
1297 int ret = 0;
1298
1299 width = params->p.width;
1300 height = params->p.height;
1301 if (is_oversize(width, height)) {
1302 vp9_print(pbi, 0, "%s, Error: Invalid frame size\n", __func__);
1303 return -1;
1304 }
1305
1306 /*vp9_read_frame_size(rb, &width, &height);*/
1307 if (print_header_info)
1308 pr_info(" * 16-bits w read : %d (width : %d)\n", width, height);
1309 if (print_header_info)
1310 pr_info
1311 (" * 16-bits h read : %d (height : %d)\n", width, height);
1312
1313 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE, (height << 16) | width);
1314#ifdef VP9_10B_HED_FB
1315 WRITE_VREG(HEVC_ASSIST_PIC_SIZE_FB_READ, (height << 16) | width);
1316#endif
1317 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
1318 ret = vp9_alloc_mmu(pbi,
1319 cm->new_fb_idx,
1320 params->p.width,
1321 params->p.height,
1322 params->p.bit_depth,
1323 mmu_index_adr);
1324 if (ret != 0) {
1325 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
1326 cm->new_fb_idx,
1327 ret);
1328 return ret;
1329 }
1330 cm->cur_fb_idx_mmu = cm->new_fb_idx;
1331 }
1332
1333 resize_context_buffers(pbi, cm, width, height);
1334 setup_display_size(cm, params, print_header_info);
1335#if 0
1336 lock_buffer_pool(pool);
1337 if (vp9_realloc_frame_buffer(
1338 get_frame_new_buffer(cm), cm->width, cm->height,
1339 cm->subsampling_x, cm->subsampling_y,
1340#if CONFIG_VP9_HIGHBITDEPTH
1341 cm->use_highbitdepth,
1342#endif
1343 VP9_DEC_BORDER_IN_PIXELS,
1344 cm->byte_alignment,
1345 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer,
1346 pool->get_fb_cb, pool->cb_priv)) {
1347 unlock_buffer_pool(pool);
1348 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1349 "Failed to allocate frame buffer");
1350 }
1351 unlock_buffer_pool(pool);
1352#else
1353 /* porting */
1354 ybf = get_frame_new_buffer(cm);
1355 if (!ybf)
1356 return -1;
1357
1358 ybf->y_crop_width = width;
1359 ybf->y_crop_height = height;
1360 ybf->bit_depth = params->p.bit_depth;
1361#endif
1362 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1363 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1364 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth =
1365 (unsigned int)cm->bit_depth;
1366 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
1367 return ret;
1368}
1369
1370static int setup_frame_size_with_refs(
1371 struct VP9Decoder_s *pbi,
1372 struct VP9_Common_s *cm,
1373 union param_u *params,
1374 unsigned int *mmu_index_adr,
1375 int print_header_info) {
1376
1377 int width, height;
1378 int found = 0, i;
1379 int has_valid_ref_frame = 0;
1380 struct PIC_BUFFER_CONFIG_s *ybf;
1381 struct BufferPool_s * const pool = cm->buffer_pool;
1382 int ret = 0;
1383
1384 for (i = 0; i < REFS_PER_FRAME; ++i) {
1385 if ((params->p.same_frame_size >>
1386 (REFS_PER_FRAME - i - 1)) & 0x1) {
1387 struct PIC_BUFFER_CONFIG_s *const buf =
1388 cm->frame_refs[i].buf;
1389 /*if (print_header_info)
1390 * pr_info
1391 * ("1-bit same_frame_size[%d] read : 1\n", i);
1392 */
1393 width = buf->y_crop_width;
1394 height = buf->y_crop_height;
1395 /*if (print_header_info)
1396 * pr_info
1397 * (" - same_frame_size width : %d\n", width);
1398 */
1399 /*if (print_header_info)
1400 * pr_info
1401 * (" - same_frame_size height : %d\n", height);
1402 */
1403 found = 1;
1404 break;
1405 } else {
1406 /*if (print_header_info)
1407 * pr_info
1408 * ("1-bit same_frame_size[%d] read : 0\n", i);
1409 */
1410 }
1411 }
1412
1413 if (!found) {
1414 /*vp9_read_frame_size(rb, &width, &height);*/
1415 width = params->p.width;
1416 height = params->p.height;
1417 /*if (print_header_info)
1418 * pr_info
1419 * (" * 16-bits w read : %d (width : %d)\n",
1420 * width, height);
1421 *if (print_header_info)
1422 * pr_info
1423 * (" * 16-bits h read : %d (height : %d)\n",
1424 * width, height);
1425 */
1426 }
1427
1428 if (is_oversize(width, height)) {
1429 vp9_print(pbi, 0, "%s, Error: Invalid frame size\n", __func__);
1430 return -1;
1431 }
1432
1433 params->p.width = width;
1434 params->p.height = height;
1435
1436 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE, (height << 16) | width);
1437 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
1438 /*if(cm->prev_fb_idx >= 0) release_unused_4k(cm->prev_fb_idx);
1439 *cm->prev_fb_idx = cm->new_fb_idx;
1440 */
1441 /* pr_info
1442 * ("[DEBUG DEBUG]Before alloc_mmu,
1443 * prev_fb_idx : %d, new_fb_idx : %d\r\n",
1444 * cm->prev_fb_idx, cm->new_fb_idx);
1445 */
1446 ret = vp9_alloc_mmu(pbi, cm->new_fb_idx,
1447 params->p.width, params->p.height,
1448 params->p.bit_depth, mmu_index_adr);
1449 if (ret != 0) {
1450 pr_err("can't alloc need mmu,idx %d\r\n",
1451 cm->new_fb_idx);
1452 return ret;
1453 }
1454 cm->cur_fb_idx_mmu = cm->new_fb_idx;
1455 }
1456
1457 /*Check to make sure at least one of frames that this frame references
1458 *has valid dimensions.
1459 */
1460 for (i = 0; i < REFS_PER_FRAME; ++i) {
1461 struct RefBuffer_s * const ref_frame = &cm->frame_refs[i];
1462
1463 has_valid_ref_frame |=
1464 valid_ref_frame_size(ref_frame->buf->y_crop_width,
1465 ref_frame->buf->y_crop_height,
1466 width, height);
1467 }
1468 if (!has_valid_ref_frame) {
1469 pr_err("Error: Referenced frame has invalid size\r\n");
1470 return -1;
1471 }
1472#if 0
1473 for (i = 0; i < REFS_PER_FRAME; ++i) {
1474 struct RefBuffer_s * const ref_frame =
1475 &cm->frame_refs[i];
1476 if (!valid_ref_frame_img_fmt(
1477 ref_frame->buf->bit_depth,
1478 ref_frame->buf->subsampling_x,
1479 ref_frame->buf->subsampling_y,
1480 cm->bit_depth,
1481 cm->subsampling_x,
1482 cm->subsampling_y))
1483 pr_err
1484 ("Referenced frame incompatible color fmt\r\n");
1485 return -1;
1486 }
1487#endif
1488 resize_context_buffers(pbi, cm, width, height);
1489 setup_display_size(cm, params, print_header_info);
1490
1491#if 0
1492 lock_buffer_pool(pool);
1493 if (vp9_realloc_frame_buffer(
1494 get_frame_new_buffer(cm), cm->width, cm->height,
1495 cm->subsampling_x, cm->subsampling_y,
1496#if CONFIG_VP9_HIGHBITDEPTH
1497 cm->use_highbitdepth,
1498#endif
1499 VP9_DEC_BORDER_IN_PIXELS,
1500 cm->byte_alignment,
1501 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer,
1502 pool->get_fb_cb,
1503 pool->cb_priv)) {
1504 unlock_buffer_pool(pool);
1505 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1506 "Failed to allocate frame buffer");
1507 }
1508 unlock_buffer_pool(pool);
1509#else
1510 /* porting */
1511 ybf = get_frame_new_buffer(cm);
1512 if (!ybf)
1513 return -1;
1514
1515 ybf->y_crop_width = width;
1516 ybf->y_crop_height = height;
1517 ybf->bit_depth = params->p.bit_depth;
1518#endif
1519 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1520 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1521 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth =
1522 (unsigned int)cm->bit_depth;
1523 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
1524 return ret;
1525}
1526
1527static inline bool close_to(int a, int b, int m)
1528{
1529 return (abs(a - b) < m) ? true : false;
1530}
1531
1532#ifdef MULTI_INSTANCE_SUPPORT
1533static int vp9_print_cont(struct VP9Decoder_s *pbi,
1534 int flag, const char *fmt, ...)
1535{
1536 unsigned char buf[HEVC_PRINT_BUF];
1537 int len = 0;
1538
1539 if (pbi == NULL ||
1540 (flag == 0) ||
1541 (debug & flag)) {
1542 va_list args;
1543
1544 va_start(args, fmt);
1545 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
1546 pr_debug("%s", buf);
1547 va_end(args);
1548 }
1549 return 0;
1550}
1551
1552static void trigger_schedule(struct VP9Decoder_s *pbi)
1553{
1554 if (pbi->is_used_v4l) {
1555 struct aml_vcodec_ctx *ctx =
1556 (struct aml_vcodec_ctx *)(pbi->v4l2_ctx);
1557
1558 if (ctx->param_sets_from_ucode &&
1559 !pbi->v4l_params_parsed)
1560 vdec_v4l_write_frame_sync(ctx);
1561 }
1562
1563 if (pbi->vdec_cb)
1564 pbi->vdec_cb(hw_to_vdec(pbi), pbi->vdec_cb_arg);
1565}
1566
1567static void reset_process_time(struct VP9Decoder_s *pbi)
1568{
1569 if (pbi->start_process_time) {
1570 unsigned process_time =
1571 1000 * (jiffies - pbi->start_process_time) / HZ;
1572 pbi->start_process_time = 0;
1573 if (process_time > max_process_time[pbi->index])
1574 max_process_time[pbi->index] = process_time;
1575 }
1576}
1577
1578static void start_process_time(struct VP9Decoder_s *pbi)
1579{
1580 pbi->start_process_time = jiffies;
1581 pbi->decode_timeout_count = 0;
1582 pbi->last_lcu_idx = 0;
1583}
1584
1585static void timeout_process(struct VP9Decoder_s *pbi)
1586{
1587 pbi->timeout_num++;
1588 amhevc_stop();
1589 vp9_print(pbi,
1590 0, "%s decoder timeout\n", __func__);
1591
1592 pbi->dec_result = DEC_RESULT_DONE;
1593 reset_process_time(pbi);
1594 vdec_schedule_work(&pbi->work);
1595}
1596
1597static u32 get_valid_double_write_mode(struct VP9Decoder_s *pbi)
1598{
1599 return ((double_write_mode & 0x80000000) == 0) ?
1600 pbi->double_write_mode :
1601 (double_write_mode & 0x7fffffff);
1602}
1603
1604static int get_double_write_mode(struct VP9Decoder_s *pbi)
1605{
1606 u32 valid_dw_mode = get_valid_double_write_mode(pbi);
1607 u32 dw;
1608 int w, h;
1609 struct VP9_Common_s *cm = &pbi->common;
1610 struct PIC_BUFFER_CONFIG_s *cur_pic_config;
1611
1612 /* mask for supporting double write value bigger than 0x100 */
1613 if (valid_dw_mode & 0xffffff00) {
1614 if (!cm->cur_frame)
1615 return 1;/*no valid frame,*/
1616 cur_pic_config = &cm->cur_frame->buf;
1617 w = cur_pic_config->y_crop_width;
1618 h = cur_pic_config->y_crop_height;
1619
1620 dw = 0x1; /*1:1*/
1621 switch (valid_dw_mode) {
1622 case 0x100:
1623 if (w > 1920 && h > 1088)
1624 dw = 0x4; /*1:2*/
1625 break;
1626 case 0x200:
1627 if (w > 1920 && h > 1088)
1628 dw = 0x2; /*1:4*/
1629 break;
1630 case 0x300:
1631 if (w > 1280 && h > 720)
1632 dw = 0x4; /*1:2*/
1633 break;
1634 default:
1635 break;
1636 }
1637 return dw;
1638 }
1639
1640 return valid_dw_mode;
1641}
1642
1643/* for double write buf alloc */
1644static int get_double_write_mode_init(struct VP9Decoder_s *pbi)
1645{
1646 u32 valid_dw_mode = get_valid_double_write_mode(pbi);
1647 u32 dw;
1648 int w = pbi->init_pic_w;
1649 int h = pbi->init_pic_h;
1650
1651 dw = 0x1; /*1:1*/
1652 switch (valid_dw_mode) {
1653 case 0x100:
1654 if (w > 1920 && h > 1088)
1655 dw = 0x4; /*1:2*/
1656 break;
1657 case 0x200:
1658 if (w > 1920 && h > 1088)
1659 dw = 0x2; /*1:4*/
1660 break;
1661 case 0x300:
1662 if (w > 1280 && h > 720)
1663 dw = 0x4; /*1:2*/
1664 break;
1665 default:
1666 dw = valid_dw_mode;
1667 break;
1668 }
1669 return dw;
1670}
1671#endif
1672
1673static int get_double_write_ratio(struct VP9Decoder_s *pbi,
1674 int dw_mode)
1675{
1676 int ratio = 1;
1677 if ((dw_mode == 2) ||
1678 (dw_mode == 3))
1679 ratio = 4;
1680 else if (dw_mode == 4)
1681 ratio = 2;
1682 return ratio;
1683}
1684
1685//#define MAX_4K_NUM 0x1200
1686
1687int vp9_alloc_mmu(
1688 struct VP9Decoder_s *pbi,
1689 int cur_buf_idx,
1690 int pic_width,
1691 int pic_height,
1692 unsigned short bit_depth,
1693 unsigned int *mmu_index_adr)
1694{
1695 int bit_depth_10 = (bit_depth == VPX_BITS_10);
1696 int picture_size;
1697 int cur_mmu_4k_number, max_frame_num;
1698 if (!pbi->mmu_box) {
1699 pr_err("error no mmu box!\n");
1700 return -1;
1701 }
1702 if (pbi->double_write_mode & 0x10)
1703 return 0;
1704 if (bit_depth >= VPX_BITS_12) {
1705 pbi->fatal_error = DECODER_FATAL_ERROR_SIZE_OVERFLOW;
1706 pr_err("fatal_error, un support bit depth 12!\n\n");
1707 return -1;
1708 }
1709 picture_size = compute_losless_comp_body_size(pic_width, pic_height,
1710 bit_depth_10);
1711 cur_mmu_4k_number = ((picture_size + (1 << 12) - 1) >> 12);
1712
1713 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1714 max_frame_num = MAX_FRAME_8K_NUM;
1715 else
1716 max_frame_num = MAX_FRAME_4K_NUM;
1717
1718 if (cur_mmu_4k_number > max_frame_num) {
1719 pr_err("over max !! cur_mmu_4k_number 0x%x width %d height %d\n",
1720 cur_mmu_4k_number, pic_width, pic_height);
1721 return -1;
1722 }
1723 return decoder_mmu_box_alloc_idx(
1724 pbi->mmu_box,
1725 cur_buf_idx,
1726 cur_mmu_4k_number,
1727 mmu_index_adr);
1728}
1729
1730
1731#ifndef MV_USE_FIXED_BUF
1732static void dealloc_mv_bufs(struct VP9Decoder_s *pbi)
1733{
1734 int i;
1735 for (i = 0; i < MV_BUFFER_NUM; i++) {
1736 if (pbi->m_mv_BUF[i].start_adr) {
1737 if (debug)
1738 pr_info(
1739 "dealloc mv buf(%d) adr %ld size 0x%x used_flag %d\n",
1740 i, pbi->m_mv_BUF[i].start_adr,
1741 pbi->m_mv_BUF[i].size,
1742 pbi->m_mv_BUF[i].used_flag);
1743 decoder_bmmu_box_free_idx(
1744 pbi->bmmu_box,
1745 MV_BUFFER_IDX(i));
1746 pbi->m_mv_BUF[i].start_adr = 0;
1747 pbi->m_mv_BUF[i].size = 0;
1748 pbi->m_mv_BUF[i].used_flag = 0;
1749 }
1750 }
1751}
1752
1753static int alloc_mv_buf(struct VP9Decoder_s *pbi,
1754 int i, int size)
1755{
1756 int ret = 0;
1757 if (decoder_bmmu_box_alloc_buf_phy
1758 (pbi->bmmu_box,
1759 MV_BUFFER_IDX(i), size,
1760 DRIVER_NAME,
1761 &pbi->m_mv_BUF[i].start_adr) < 0) {
1762 pbi->m_mv_BUF[i].start_adr = 0;
1763 ret = -1;
1764 } else {
1765 pbi->m_mv_BUF[i].size = size;
1766 pbi->m_mv_BUF[i].used_flag = 0;
1767 ret = 0;
1768 if (debug) {
1769 pr_info(
1770 "MV Buffer %d: start_adr %p size %x\n",
1771 i,
1772 (void *)pbi->m_mv_BUF[i].start_adr,
1773 pbi->m_mv_BUF[i].size);
1774 }
1775 }
1776 return ret;
1777}
1778
1779static int init_mv_buf_list(struct VP9Decoder_s *pbi)
1780{
1781 int i;
1782 int ret = 0;
1783 int count = MV_BUFFER_NUM;
1784 int pic_width = pbi->init_pic_w;
1785 int pic_height = pbi->init_pic_h;
1786 int lcu_size = 64; /*fixed 64*/
1787 int pic_width_64 = (pic_width + 63) & (~0x3f);
1788 int pic_height_32 = (pic_height + 31) & (~0x1f);
1789 int pic_width_lcu = (pic_width_64 % lcu_size) ?
1790 pic_width_64 / lcu_size + 1
1791 : pic_width_64 / lcu_size;
1792 int pic_height_lcu = (pic_height_32 % lcu_size) ?
1793 pic_height_32 / lcu_size + 1
1794 : pic_height_32 / lcu_size;
1795 int lcu_total = pic_width_lcu * pic_height_lcu;
1796 int size = ((lcu_total * MV_MEM_UNIT) + 0xffff) &
1797 (~0xffff);
1798 if (mv_buf_margin > 0)
1799 count = REF_FRAMES + mv_buf_margin;
1800
1801 if (pbi->init_pic_w > 2048 && pbi->init_pic_h > 1088)
1802 count = REF_FRAMES_4K + mv_buf_margin;
1803
1804 if (debug) {
1805 pr_info("%s w:%d, h:%d, count: %d\n",
1806 __func__, pbi->init_pic_w, pbi->init_pic_h, count);
1807 }
1808
1809 for (i = 0;
1810 i < count && i < MV_BUFFER_NUM; i++) {
1811 if (alloc_mv_buf(pbi, i, size) < 0) {
1812 ret = -1;
1813 break;
1814 }
1815 }
1816 return ret;
1817}
1818
1819static int get_mv_buf(struct VP9Decoder_s *pbi,
1820 int *mv_buf_index,
1821 unsigned long *mpred_mv_wr_start_addr)
1822{
1823 int i;
1824 int ret = -1;
1825 for (i = 0; i < MV_BUFFER_NUM; i++) {
1826 if (pbi->m_mv_BUF[i].start_adr &&
1827 pbi->m_mv_BUF[i].used_flag == 0) {
1828 pbi->m_mv_BUF[i].used_flag = 1;
1829 ret = i;
1830 break;
1831 }
1832 }
1833
1834 if (ret >= 0) {
1835 *mv_buf_index = ret;
1836 *mpred_mv_wr_start_addr =
1837 (pbi->m_mv_BUF[ret].start_adr + 0xffff) &
1838 (~0xffff);
1839 if (debug & VP9_DEBUG_BUFMGR_MORE)
1840 pr_info(
1841 "%s => %d (%ld) size 0x%x\n",
1842 __func__, ret,
1843 *mpred_mv_wr_start_addr,
1844 pbi->m_mv_BUF[ret].size);
1845 } else {
1846 pr_info(
1847 "%s: Error, mv buf is not enough\n",
1848 __func__);
1849 }
1850 return ret;
1851}
1852
1853static void put_mv_buf(struct VP9Decoder_s *pbi,
1854 int *mv_buf_index)
1855{
1856 int i = *mv_buf_index;
1857 if (i >= MV_BUFFER_NUM) {
1858 if (debug & VP9_DEBUG_BUFMGR_MORE)
1859 pr_info(
1860 "%s: index %d beyond range\n",
1861 __func__, i);
1862 return;
1863 }
1864 if (debug & VP9_DEBUG_BUFMGR_MORE)
1865 pr_info(
1866 "%s(%d): used_flag(%d)\n",
1867 __func__, i,
1868 pbi->m_mv_BUF[i].used_flag);
1869
1870 *mv_buf_index = -1;
1871 if (pbi->m_mv_BUF[i].start_adr &&
1872 pbi->m_mv_BUF[i].used_flag)
1873 pbi->m_mv_BUF[i].used_flag = 0;
1874}
1875
1876static void put_un_used_mv_bufs(struct VP9Decoder_s *pbi)
1877{
1878 struct VP9_Common_s *const cm = &pbi->common;
1879 struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs;
1880 int i;
1881 for (i = 0; i < pbi->used_buf_num; ++i) {
1882 if ((frame_bufs[i].ref_count == 0) &&
1883 (frame_bufs[i].buf.index != -1) &&
1884 (frame_bufs[i].buf.mv_buf_index >= 0)
1885 )
1886 put_mv_buf(pbi, &frame_bufs[i].buf.mv_buf_index);
1887 }
1888}
1889
1890#ifdef SUPPORT_FB_DECODING
1891static bool mv_buf_available(struct VP9Decoder_s *pbi)
1892{
1893 int i;
1894 bool ret = 0;
1895 for (i = 0; i < MV_BUFFER_NUM; i++) {
1896 if (pbi->m_mv_BUF[i].start_adr &&
1897 pbi->m_mv_BUF[i].used_flag == 0) {
1898 ret = 1;
1899 break;
1900 }
1901 }
1902 return ret;
1903}
1904#endif
1905#endif
1906
1907#ifdef SUPPORT_FB_DECODING
1908static void init_stage_buf(struct VP9Decoder_s *pbi)
1909{
1910 uint i;
1911 for (i = 0; i < STAGE_MAX_BUFFERS
1912 && i < stage_buf_num; i++) {
1913 pbi->stage_bufs[i] =
1914 vmalloc(sizeof(struct stage_buf_s));
1915 if (pbi->stage_bufs[i] == NULL) {
1916 vp9_print(pbi,
1917 0, "%s vmalloc fail\n", __func__);
1918 break;
1919 }
1920 pbi->stage_bufs[i]->index = i;
1921 }
1922 pbi->used_stage_buf_num = i;
1923 pbi->s1_pos = 0;
1924 pbi->s2_pos = 0;
1925 pbi->s1_buf = NULL;
1926 pbi->s2_buf = NULL;
1927 pbi->s1_mv_buf_index = FRAME_BUFFERS;
1928 pbi->s1_mv_buf_index_pre = FRAME_BUFFERS;
1929 pbi->s1_mv_buf_index_pre_pre = FRAME_BUFFERS;
1930
1931 if (pbi->used_stage_buf_num > 0)
1932 vp9_print(pbi,
1933 0, "%s 2 stage decoding buf %d\n",
1934 __func__,
1935 pbi->used_stage_buf_num);
1936}
1937
1938static void uninit_stage_buf(struct VP9Decoder_s *pbi)
1939{
1940 int i;
1941 for (i = 0; i < pbi->used_stage_buf_num; i++) {
1942 if (pbi->stage_bufs[i])
1943 vfree(pbi->stage_bufs[i]);
1944 pbi->stage_bufs[i] = NULL;
1945 }
1946 pbi->used_stage_buf_num = 0;
1947 pbi->s1_pos = 0;
1948 pbi->s2_pos = 0;
1949 pbi->s1_buf = NULL;
1950 pbi->s2_buf = NULL;
1951}
1952
1953static int get_s1_buf(
1954 struct VP9Decoder_s *pbi)
1955{
1956 struct stage_buf_s *buf = NULL;
1957 int ret = -1;
1958 int buf_page_num = MAX_STAGE_PAGE_NUM;
1959 int next_s1_pos = pbi->s1_pos + 1;
1960
1961 if (next_s1_pos >= pbi->used_stage_buf_num)
1962 next_s1_pos = 0;
1963 if (next_s1_pos == pbi->s2_pos) {
1964 pbi->s1_buf = NULL;
1965 return ret;
1966 }
1967
1968 buf = pbi->stage_bufs[pbi->s1_pos];
1969 ret = decoder_mmu_box_alloc_idx(
1970 pbi->mmu_box,
1971 buf->index,
1972 buf_page_num,
1973 pbi->stage_mmu_map_addr);
1974 if (ret < 0) {
1975 vp9_print(pbi, 0,
1976 "%s decoder_mmu_box_alloc fail for index %d (s1_pos %d s2_pos %d)\n",
1977 __func__, buf->index,
1978 pbi->s1_pos, pbi->s2_pos);
1979 buf = NULL;
1980 } else {
1981 vp9_print(pbi, VP9_DEBUG_2_STAGE,
1982 "%s decoder_mmu_box_alloc %d page for index %d (s1_pos %d s2_pos %d)\n",
1983 __func__, buf_page_num, buf->index,
1984 pbi->s1_pos, pbi->s2_pos);
1985 }
1986 pbi->s1_buf = buf;
1987 return ret;
1988}
1989
1990static void inc_s1_pos(struct VP9Decoder_s *pbi)
1991{
1992 struct stage_buf_s *buf =
1993 pbi->stage_bufs[pbi->s1_pos];
1994
1995 int used_page_num =
1996#ifdef FB_DECODING_TEST_SCHEDULE
1997 MAX_STAGE_PAGE_NUM/2;
1998#else
1999 (READ_VREG(HEVC_ASSIST_HED_FB_W_CTL) >> 16);
2000#endif
2001 decoder_mmu_box_free_idx_tail(pbi->mmu_box,
2002 FRAME_BUFFERS + buf->index, used_page_num);
2003
2004 pbi->s1_pos++;
2005 if (pbi->s1_pos >= pbi->used_stage_buf_num)
2006 pbi->s1_pos = 0;
2007
2008 vp9_print(pbi, VP9_DEBUG_2_STAGE,
2009 "%s (used_page_num %d) for index %d (s1_pos %d s2_pos %d)\n",
2010 __func__, used_page_num, buf->index,
2011 pbi->s1_pos, pbi->s2_pos);
2012}
2013
2014#define s2_buf_available(pbi) (pbi->s1_pos != pbi->s2_pos)
2015
2016static int get_s2_buf(
2017 struct VP9Decoder_s *pbi)
2018{
2019 int ret = -1;
2020 struct stage_buf_s *buf = NULL;
2021 if (s2_buf_available(pbi)) {
2022 buf = pbi->stage_bufs[pbi->s2_pos];
2023 vp9_print(pbi, VP9_DEBUG_2_STAGE,
2024 "%s for index %d (s1_pos %d s2_pos %d)\n",
2025 __func__, buf->index,
2026 pbi->s1_pos, pbi->s2_pos);
2027 pbi->s2_buf = buf;
2028 ret = 0;
2029 }
2030 return ret;
2031}
2032
2033static void inc_s2_pos(struct VP9Decoder_s *pbi)
2034{
2035 struct stage_buf_s *buf =
2036 pbi->stage_bufs[pbi->s2_pos];
2037 decoder_mmu_box_free_idx(pbi->mmu_box,
2038 FRAME_BUFFERS + buf->index);
2039 pbi->s2_pos++;
2040 if (pbi->s2_pos >= pbi->used_stage_buf_num)
2041 pbi->s2_pos = 0;
2042 vp9_print(pbi, VP9_DEBUG_2_STAGE,
2043 "%s for index %d (s1_pos %d s2_pos %d)\n",
2044 __func__, buf->index,
2045 pbi->s1_pos, pbi->s2_pos);
2046}
2047
2048static int get_free_stage_buf_num(struct VP9Decoder_s *pbi)
2049{
2050 int num;
2051 if (pbi->s1_pos >= pbi->s2_pos)
2052 num = pbi->used_stage_buf_num -
2053 (pbi->s1_pos - pbi->s2_pos) - 1;
2054 else
2055 num = (pbi->s2_pos - pbi->s1_pos) - 1;
2056 return num;
2057}
2058
2059#ifndef FB_DECODING_TEST_SCHEDULE
2060static DEFINE_SPINLOCK(fb_core_spin_lock);
2061
2062static u8 is_s2_decoding_finished(struct VP9Decoder_s *pbi)
2063{
2064 /* to do: VLSI review
2065 completion of last LCU decoding in BACK
2066 */
2067 return 1;
2068}
2069
2070static void start_s1_decoding(struct VP9Decoder_s *pbi)
2071{
2072 /* to do: VLSI review
2073 after parser, how to start LCU decoding in BACK
2074 */
2075}
2076
2077static void fb_reset_core(struct vdec_s *vdec, u32 mask)
2078{
2079 /* to do: VLSI review
2080 1. how to disconnect DMC for FRONT and BACK
2081 2. reset bit 13, 24, FRONT or BACK ??
2082 */
2083
2084 unsigned long flags;
2085 u32 reset_bits = 0;
2086 if (mask & HW_MASK_FRONT)
2087 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
2088 spin_lock_irqsave(&fb_core_spin_lock, flags);
2089 codec_dmcbus_write(DMC_REQ_CTRL,
2090 codec_dmcbus_read(DMC_REQ_CTRL) & (~(1 << 4)));
2091 spin_unlock_irqrestore(&fb_core_spin_lock, flags);
2092
2093 while (!(codec_dmcbus_read(DMC_CHAN_STS)
2094 & (1 << 4)))
2095 ;
2096
2097 if ((mask & HW_MASK_FRONT) &&
2098 input_frame_based(vdec))
2099 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
2100
2101 /*
2102 * 2: assist
2103 * 3: parser
2104 * 4: parser_state
2105 * 8: dblk
2106 * 11:mcpu
2107 * 12:ccpu
2108 * 13:ddr
2109 * 14:iqit
2110 * 15:ipp
2111 * 17:qdct
2112 * 18:mpred
2113 * 19:sao
2114 * 24:hevc_afifo
2115 */
2116 if (mask & HW_MASK_FRONT) {
2117 reset_bits =
2118 (1<<3)|(1<<4)|(1<<11)|
2119 (1<<12)|(1<<18);
2120 }
2121 if (mask & HW_MASK_BACK) {
2122 reset_bits =
2123 (1<<8)|(1<<13)|(1<<14)|(1<<15)|
2124 (1<<17)|(1<<19)|(1<<24);
2125 }
2126 WRITE_VREG(DOS_SW_RESET3, reset_bits);
2127#if 0
2128 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
2129 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
2130 (1<<17)|(1<<18)|(1<<19)|(1<<24);
2131#endif
2132 WRITE_VREG(DOS_SW_RESET3, 0);
2133
2134
2135 spin_lock_irqsave(&fb_core_spin_lock, flags);
2136 codec_dmcbus_write(DMC_REQ_CTRL,
2137 codec_dmcbus_read(DMC_REQ_CTRL) | (1 << 4));
2138 spin_unlock_irqrestore(&fb_core_spin_lock, flags);
2139
2140}
2141#endif
2142
2143#endif
2144
2145static void init_pic_list_hw(struct VP9Decoder_s *pbi);
2146
2147static int get_free_fb(struct VP9Decoder_s *pbi)
2148{
2149 struct VP9_Common_s *const cm = &pbi->common;
2150 struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs;
2151 struct PIC_BUFFER_CONFIG_s *pic = NULL;
2152 int i;
2153 unsigned long flags;
2154
2155 lock_buffer_pool(cm->buffer_pool, flags);
2156 if (debug & VP9_DEBUG_BUFMGR_MORE) {
2157 for (i = 0; i < pbi->used_buf_num; ++i) {
2158 pr_info("%s:%d, ref_count %d vf_ref %d index %d\r\n",
2159 __func__, i, frame_bufs[i].ref_count,
2160 frame_bufs[i].buf.vf_ref,
2161 frame_bufs[i].buf.index);
2162 }
2163 }
2164 for (i = 0; i < pbi->used_buf_num; ++i) {
2165 pic = &frame_bufs[i].buf;
2166 if ((frame_bufs[i].ref_count == 0) &&
2167 (pic->vf_ref == 0) && (pic->index != -1)) {
2168 if (pbi->is_used_v4l && !pic->cma_alloc_addr) {
2169 pic->y_crop_width = pbi->frame_width;
2170 pic->y_crop_height = pbi->frame_height;
2171 if (!config_pic(pbi, pic)) {
2172 set_canvas(pbi, pic);
2173 init_pic_list_hw(pbi);
2174 } else
2175 i = pbi->used_buf_num;
2176 }
2177
2178 break;
2179 }
2180 }
2181 if (i != pbi->used_buf_num) {
2182 frame_bufs[i].ref_count = 1;
2183 /*pr_info("[MMU DEBUG 1] set ref_count[%d] : %d\r\n",
2184 i, frame_bufs[i].ref_count);*/
2185 } else {
2186 /* Reset i to be INVALID_IDX to indicate
2187 no free buffer found*/
2188 i = INVALID_IDX;
2189 }
2190
2191 unlock_buffer_pool(cm->buffer_pool, flags);
2192 return i;
2193}
2194
2195static int get_free_buf_count(struct VP9Decoder_s *pbi)
2196{
2197 struct VP9_Common_s *const cm = &pbi->common;
2198 struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs;
2199 int i;
2200 int free_buf_count = 0;
2201 for (i = 0; i < pbi->used_buf_num; ++i)
2202 if ((frame_bufs[i].ref_count == 0) &&
2203 (frame_bufs[i].buf.vf_ref == 0) &&
2204 (frame_bufs[i].buf.index != -1)
2205 )
2206 free_buf_count++;
2207 return free_buf_count;
2208}
2209
2210static void decrease_ref_count(int idx, struct RefCntBuffer_s *const frame_bufs,
2211 struct BufferPool_s *const pool)
2212{
2213 if (idx >= 0) {
2214 --frame_bufs[idx].ref_count;
2215 /*pr_info("[MMU DEBUG 7] dec ref_count[%d] : %d\r\n", idx,
2216 * frame_bufs[idx].ref_count);
2217 */
2218 /*A worker may only get a free framebuffer index when
2219 *calling get_free_fb. But the private buffer is not set up
2220 *until finish decoding header. So any error happens during
2221 *decoding header, the frame_bufs will not have valid priv
2222 *buffer.
2223 */
2224
2225 if (frame_bufs[idx].ref_count == 0 &&
2226 frame_bufs[idx].raw_frame_buffer.priv)
2227 vp9_release_frame_buffer
2228 (&frame_bufs[idx].raw_frame_buffer);
2229 }
2230}
2231
2232static void generate_next_ref_frames(struct VP9Decoder_s *pbi)
2233{
2234 struct VP9_Common_s *const cm = &pbi->common;
2235 struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs;
2236 struct BufferPool_s *const pool = cm->buffer_pool;
2237 int mask, ref_index = 0;
2238 unsigned long flags;
2239
2240 /* Generate next_ref_frame_map.*/
2241 lock_buffer_pool(pool, flags);
2242 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
2243 if (mask & 1) {
2244 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
2245 ++frame_bufs[cm->new_fb_idx].ref_count;
2246 /*pr_info("[MMU DEBUG 4] inc ref_count[%d] : %d\r\n",
2247 *cm->new_fb_idx, frame_bufs[cm->new_fb_idx].ref_count);
2248 */
2249 } else
2250 cm->next_ref_frame_map[ref_index] =
2251 cm->ref_frame_map[ref_index];
2252 /* Current thread holds the reference frame.*/
2253 if (cm->ref_frame_map[ref_index] >= 0) {
2254 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
2255 /*pr_info
2256 *("[MMU DEBUG 5] inc ref_count[%d] : %d\r\n",
2257 *cm->ref_frame_map[ref_index],
2258 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2259 */
2260 }
2261 ++ref_index;
2262 }
2263
2264 for (; ref_index < REF_FRAMES; ++ref_index) {
2265 cm->next_ref_frame_map[ref_index] =
2266 cm->ref_frame_map[ref_index];
2267 /* Current thread holds the reference frame.*/
2268 if (cm->ref_frame_map[ref_index] >= 0) {
2269 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
2270 /*pr_info("[MMU DEBUG 6] inc ref_count[%d] : %d\r\n",
2271 *cm->ref_frame_map[ref_index],
2272 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2273 */
2274 }
2275 }
2276 unlock_buffer_pool(pool, flags);
2277 return;
2278}
2279
2280static void refresh_ref_frames(struct VP9Decoder_s *pbi)
2281
2282{
2283 struct VP9_Common_s *const cm = &pbi->common;
2284 struct BufferPool_s *pool = cm->buffer_pool;
2285 struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs;
2286 int mask, ref_index = 0;
2287 unsigned long flags;
2288
2289 lock_buffer_pool(pool, flags);
2290 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
2291 const int old_idx = cm->ref_frame_map[ref_index];
2292 /*Current thread releases the holding of reference frame.*/
2293 decrease_ref_count(old_idx, frame_bufs, pool);
2294
2295 /*Release the reference frame in reference map.*/
2296 if ((mask & 1) && old_idx >= 0)
2297 decrease_ref_count(old_idx, frame_bufs, pool);
2298 cm->ref_frame_map[ref_index] =
2299 cm->next_ref_frame_map[ref_index];
2300 ++ref_index;
2301 }
2302
2303 /*Current thread releases the holding of reference frame.*/
2304 for (; ref_index < REF_FRAMES && !cm->show_existing_frame;
2305 ++ref_index) {
2306 const int old_idx = cm->ref_frame_map[ref_index];
2307
2308 decrease_ref_count(old_idx, frame_bufs, pool);
2309 cm->ref_frame_map[ref_index] =
2310 cm->next_ref_frame_map[ref_index];
2311 }
2312 unlock_buffer_pool(pool, flags);
2313 return;
2314}
2315
2316int vp9_bufmgr_process(struct VP9Decoder_s *pbi, union param_u *params)
2317{
2318 struct VP9_Common_s *const cm = &pbi->common;
2319 struct BufferPool_s *pool = cm->buffer_pool;
2320 struct RefCntBuffer_s *frame_bufs = cm->buffer_pool->frame_bufs;
2321 struct PIC_BUFFER_CONFIG_s *pic = NULL;
2322 int i;
2323 int ret;
2324
2325 pbi->ready_for_new_data = 0;
2326
2327 if (pbi->has_keyframe == 0 &&
2328 params->p.frame_type != KEY_FRAME){
2329 on_no_keyframe_skiped++;
2330 return -2;
2331 }
2332 pbi->has_keyframe = 1;
2333 on_no_keyframe_skiped = 0;
2334#if 0
2335 if (pbi->mmu_enable) {
2336 if (!pbi->m_ins_flag)
2337 pbi->used_4k_num = (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16);
2338 if (cm->prev_fb_idx >= 0) {
2339 decoder_mmu_box_free_idx_tail(pbi->mmu_box,
2340 cm->prev_fb_idx, pbi->used_4k_num);
2341 }
2342 }
2343#endif
2344 if (cm->new_fb_idx >= 0
2345 && frame_bufs[cm->new_fb_idx].ref_count == 0){
2346 vp9_release_frame_buffer
2347 (&frame_bufs[cm->new_fb_idx].raw_frame_buffer);
2348 }
2349 /*pr_info("Before get_free_fb, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2350 cm->prev_fb_idx, cm->new_fb_idx);*/
2351#ifndef MV_USE_FIXED_BUF
2352 put_un_used_mv_bufs(pbi);
2353 if (debug & VP9_DEBUG_BUFMGR_DETAIL)
2354 dump_pic_list(pbi);
2355#endif
2356 cm->new_fb_idx = get_free_fb(pbi);
2357 if (cm->new_fb_idx == INVALID_IDX) {
2358 pr_info("get_free_fb error\r\n");
2359 return -1;
2360 }
2361#ifndef MV_USE_FIXED_BUF
2362#ifdef SUPPORT_FB_DECODING
2363 if (pbi->used_stage_buf_num == 0) {
2364#endif
2365 if (get_mv_buf(pbi,
2366 &pool->frame_bufs[cm->new_fb_idx].
2367 buf.mv_buf_index,
2368 &pool->frame_bufs[cm->new_fb_idx].
2369 buf.mpred_mv_wr_start_addr
2370 ) < 0) {
2371 pr_info("get_mv_buf fail\r\n");
2372 return -1;
2373 }
2374 if (debug & VP9_DEBUG_BUFMGR_DETAIL)
2375 dump_pic_list(pbi);
2376#ifdef SUPPORT_FB_DECODING
2377 }
2378#endif
2379#endif
2380 cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx];
2381 /*if (debug & VP9_DEBUG_BUFMGR)
2382 pr_info("[VP9 DEBUG]%s(get_free_fb): %d\r\n", __func__,
2383 cm->new_fb_idx);*/
2384
2385 pbi->cur_buf = &frame_bufs[cm->new_fb_idx];
2386 if (pbi->mmu_enable) {
2387 /* moved to after picture size ready
2388 *alloc_mmu(cm, params->p.width, params->p.height,
2389 *params->p.bit_depth, pbi->frame_mmu_map_addr);
2390 */
2391 cm->prev_fb_idx = cm->new_fb_idx;
2392 }
2393 /*read_uncompressed_header()*/
2394 cm->last_frame_type = cm->frame_type;
2395 cm->last_intra_only = cm->intra_only;
2396 cm->profile = params->p.profile;
2397 if (cm->profile >= MAX_PROFILES) {
2398 pr_err("Error: Unsupported profile %d\r\n", cm->profile);
2399 return -1;
2400 }
2401 cm->show_existing_frame = params->p.show_existing_frame;
2402 if (cm->show_existing_frame) {
2403 /* Show an existing frame directly.*/
2404 int frame_to_show_idx = params->p.frame_to_show_idx;
2405 int frame_to_show;
2406 unsigned long flags;
2407 if (frame_to_show_idx >= REF_FRAMES) {
2408 pr_info("frame_to_show_idx %d exceed max index\r\n",
2409 frame_to_show_idx);
2410 return -1;
2411 }
2412
2413 frame_to_show = cm->ref_frame_map[frame_to_show_idx];
2414 /*pr_info("frame_to_show %d\r\n", frame_to_show);*/
2415 lock_buffer_pool(pool, flags);
2416 if (frame_to_show < 0 ||
2417 frame_bufs[frame_to_show].ref_count < 1) {
2418 unlock_buffer_pool(pool, flags);
2419 pr_err
2420 ("Error:Buffer %d does not contain a decoded frame",
2421 frame_to_show);
2422 return -1;
2423 }
2424
2425 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2426 unlock_buffer_pool(pool, flags);
2427 pbi->refresh_frame_flags = 0;
2428 /*cm->lf.filter_level = 0;*/
2429 cm->show_frame = 1;
2430
2431 /*
2432 *if (pbi->frame_parallel_decode) {
2433 * for (i = 0; i < REF_FRAMES; ++i)
2434 * cm->next_ref_frame_map[i] =
2435 * cm->ref_frame_map[i];
2436 *}
2437 */
2438 /* do not decode, search next start code */
2439 return 1;
2440 }
2441 cm->frame_type = params->p.frame_type;
2442 cm->show_frame = params->p.show_frame;
2443 cm->error_resilient_mode = params->p.error_resilient_mode;
2444
2445
2446 if (cm->frame_type == KEY_FRAME) {
2447 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2448
2449 for (i = 0; i < REFS_PER_FRAME; ++i) {
2450 cm->frame_refs[i].idx = INVALID_IDX;
2451 cm->frame_refs[i].buf = NULL;
2452 }
2453
2454 ret = setup_frame_size(pbi,
2455 cm, params, pbi->frame_mmu_map_addr,
2456 print_header_info);
2457 if (ret)
2458 return -1;
2459 if (pbi->need_resync) {
2460 memset(&cm->ref_frame_map, -1,
2461 sizeof(cm->ref_frame_map));
2462 pbi->need_resync = 0;
2463 }
2464 } else {
2465 cm->intra_only = cm->show_frame ? 0 : params->p.intra_only;
2466 /*if (print_header_info) {
2467 * if (cm->show_frame)
2468 * pr_info
2469 * ("intra_only set to 0 because of show_frame\n");
2470 * else
2471 * pr_info
2472 * ("1-bit intra_only read: %d\n", cm->intra_only);
2473 *}
2474 */
2475
2476
2477 cm->reset_frame_context = cm->error_resilient_mode ?
2478 0 : params->p.reset_frame_context;
2479 if (print_header_info) {
2480 if (cm->error_resilient_mode)
2481 pr_info
2482 ("reset to 0 error_resilient_mode\n");
2483 else
2484 pr_info
2485 (" * 2-bits reset_frame_context read : %d\n",
2486 cm->reset_frame_context);
2487 }
2488
2489 if (cm->intra_only) {
2490 if (cm->profile > PROFILE_0) {
2491 /*read_bitdepth_colorspace_sampling(cm,
2492 * rb, print_header_info);
2493 */
2494 } else {
2495 /*NOTE: The intra-only frame header
2496 *does not include the specification
2497 *of either the color format or
2498 *color sub-sampling
2499 *in profile 0. VP9 specifies that the default
2500 *color format should be YUV 4:2:0 in this
2501 *case (normative).
2502 */
2503 cm->color_space = VPX_CS_BT_601;
2504 cm->subsampling_y = cm->subsampling_x = 1;
2505 cm->bit_depth = VPX_BITS_8;
2506 cm->use_highbitdepth = 0;
2507 }
2508
2509 pbi->refresh_frame_flags =
2510 params->p.refresh_frame_flags;
2511 /*if (print_header_info)
2512 * pr_info("*%d-bits refresh_frame read:0x%x\n",
2513 * REF_FRAMES, pbi->refresh_frame_flags);
2514 */
2515 ret = setup_frame_size(pbi,
2516 cm,
2517 params,
2518 pbi->frame_mmu_map_addr,
2519 print_header_info);
2520 if (ret)
2521 return -1;
2522 if (pbi->need_resync) {
2523 memset(&cm->ref_frame_map, -1,
2524 sizeof(cm->ref_frame_map));
2525 pbi->need_resync = 0;
2526 }
2527 } else if (pbi->need_resync != 1) { /* Skip if need resync */
2528 pbi->refresh_frame_flags =
2529 params->p.refresh_frame_flags;
2530 if (print_header_info)
2531 pr_info
2532 ("*%d-bits refresh_frame read:0x%x\n",
2533 REF_FRAMES, pbi->refresh_frame_flags);
2534 for (i = 0; i < REFS_PER_FRAME; ++i) {
2535 const int ref =
2536 (params->p.ref_info >>
2537 (((REFS_PER_FRAME-i-1)*4)+1))
2538 & 0x7;
2539 const int idx =
2540 cm->ref_frame_map[ref];
2541 struct RefBuffer_s * const ref_frame =
2542 &cm->frame_refs[i];
2543 if (print_header_info)
2544 pr_info("*%d-bits ref[%d]read:%d\n",
2545 REF_FRAMES_LOG2, i, ref);
2546 ref_frame->idx = idx;
2547 ref_frame->buf = &frame_bufs[idx].buf;
2548 cm->ref_frame_sign_bias[LAST_FRAME + i]
2549 = (params->p.ref_info >>
2550 ((REFS_PER_FRAME-i-1)*4)) & 0x1;
2551 if (print_header_info)
2552 pr_info("1bit ref_frame_sign_bias");
2553 /*pr_info
2554 *("%dread: %d\n",
2555 *LAST_FRAME+i,
2556 *cm->ref_frame_sign_bias
2557 *[LAST_FRAME + i]);
2558 */
2559 /*pr_info
2560 *("[VP9 DEBUG]%s(get ref):%d\r\n",
2561 *__func__, ref_frame->idx);
2562 */
2563
2564 }
2565
2566 ret = setup_frame_size_with_refs(
2567 pbi,
2568 cm,
2569 params,
2570 pbi->frame_mmu_map_addr,
2571 print_header_info);
2572 if (ret)
2573 return -1;
2574 for (i = 0; i < REFS_PER_FRAME; ++i) {
2575 /*struct RefBuffer_s *const ref_buf =
2576 *&cm->frame_refs[i];
2577 */
2578 /* to do:
2579 *vp9_setup_scale_factors_for_frame
2580 */
2581 }
2582 }
2583 }
2584
2585 pic = get_frame_new_buffer(cm);
2586 if (!pic)
2587 return -1;
2588
2589 pic->bit_depth = cm->bit_depth;
2590 pic->color_space = cm->color_space;
2591 pic->slice_type = cm->frame_type;
2592
2593 if (pbi->need_resync) {
2594 pr_err
2595 ("Error: Keyframe/intra-only frame required to reset\r\n");
2596 return -1;
2597 }
2598 generate_next_ref_frames(pbi);
2599 pbi->hold_ref_buf = 1;
2600
2601#if 0
2602 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
2603 vp9_setup_past_independence(cm);
2604 setup_loopfilter(&cm->lf, rb, print_header_info);
2605 setup_quantization(cm, &pbi->mb, rb, print_header_info);
2606 setup_segmentation(&cm->seg, rb, print_header_info);
2607 setup_segmentation_dequant(cm, print_header_info);
2608
2609 setup_tile_info(cm, rb, print_header_info);
2610 sz = vp9_rb_read_literal(rb, 16);
2611 if (print_header_info)
2612 pr_info(" * 16-bits size read : %d (0x%x)\n", sz, sz);
2613
2614 if (sz == 0)
2615 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
2616 "Invalid header size");
2617#endif
2618 /*end read_uncompressed_header()*/
2619 cm->use_prev_frame_mvs = !cm->error_resilient_mode &&
2620 cm->width == cm->last_width &&
2621 cm->height == cm->last_height &&
2622 !cm->last_intra_only &&
2623 cm->last_show_frame &&
2624 (cm->last_frame_type != KEY_FRAME);
2625
2626 /*pr_info
2627 *("set use_prev_frame_mvs to %d (last_width %d last_height %d",
2628 *cm->use_prev_frame_mvs, cm->last_width, cm->last_height);
2629 *pr_info
2630 *(" last_intra_only %d last_show_frame %d last_frame_type %d)\n",
2631 *cm->last_intra_only, cm->last_show_frame, cm->last_frame_type);
2632 */
2633 return 0;
2634}
2635
2636
2637void swap_frame_buffers(struct VP9Decoder_s *pbi)
2638{
2639 int ref_index = 0;
2640 struct VP9_Common_s *const cm = &pbi->common;
2641 struct BufferPool_s *const pool = cm->buffer_pool;
2642 struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs;
2643 unsigned long flags;
2644 refresh_ref_frames(pbi);
2645 pbi->hold_ref_buf = 0;
2646 cm->frame_to_show = get_frame_new_buffer(cm);
2647
2648 if (cm->frame_to_show) {
2649 /*if (!pbi->frame_parallel_decode || !cm->show_frame) {*/
2650 lock_buffer_pool(pool, flags);
2651 --frame_bufs[cm->new_fb_idx].ref_count;
2652 /*pr_info("[MMU DEBUG 8] dec ref_count[%d] : %d\r\n", cm->new_fb_idx,
2653 * frame_bufs[cm->new_fb_idx].ref_count);
2654 */
2655 unlock_buffer_pool(pool, flags);
2656 /*}*/
2657 }
2658
2659 /*Invalidate these references until the next frame starts.*/
2660 for (ref_index = 0; ref_index < 3; ref_index++)
2661 cm->frame_refs[ref_index].idx = -1;
2662}
2663
2664#if 0
2665static void check_resync(vpx_codec_alg_priv_t *const ctx,
2666 const struct VP9Decoder_s *const pbi)
2667{
2668 /* Clear resync flag if worker got a key frame or intra only frame.*/
2669 if (ctx->need_resync == 1 && pbi->need_resync == 0 &&
2670 (pbi->common.intra_only || pbi->common.frame_type == KEY_FRAME))
2671 ctx->need_resync = 0;
2672}
2673#endif
2674
2675int vp9_get_raw_frame(struct VP9Decoder_s *pbi, struct PIC_BUFFER_CONFIG_s *sd)
2676{
2677 struct VP9_Common_s *const cm = &pbi->common;
2678 int ret = -1;
2679
2680 if (pbi->ready_for_new_data == 1)
2681 return ret;
2682
2683 pbi->ready_for_new_data = 1;
2684
2685 /* no raw frame to show!!! */
2686 if (!cm->show_frame)
2687 return ret;
2688
2689 /* may not be get buff in v4l2 */
2690 if (!cm->frame_to_show)
2691 return ret;
2692
2693 pbi->ready_for_new_data = 1;
2694
2695 *sd = *cm->frame_to_show;
2696 ret = 0;
2697
2698 return ret;
2699}
2700
2701int vp9_bufmgr_init(struct VP9Decoder_s *pbi, struct BuffInfo_s *buf_spec_i,
2702 struct buff_s *mc_buf_i) {
2703 struct VP9_Common_s *cm = &pbi->common;
2704
2705 /*memset(pbi, 0, sizeof(struct VP9Decoder_s));*/
2706 pbi->frame_count = 0;
2707 pbi->pic_count = 0;
2708 pbi->pre_stream_offset = 0;
2709 cm->buffer_pool = &pbi->vp9_buffer_pool;
2710 spin_lock_init(&cm->buffer_pool->lock);
2711 cm->prev_fb_idx = INVALID_IDX;
2712 cm->new_fb_idx = INVALID_IDX;
2713 pbi->used_4k_num = -1;
2714 cm->cur_fb_idx_mmu = INVALID_IDX;
2715 pr_debug
2716 ("After vp9_bufmgr_init, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2717 cm->prev_fb_idx, cm->new_fb_idx);
2718 pbi->need_resync = 1;
2719 /* Initialize the references to not point to any frame buffers.*/
2720 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
2721 memset(&cm->next_ref_frame_map, -1, sizeof(cm->next_ref_frame_map));
2722 cm->current_video_frame = 0;
2723 pbi->ready_for_new_data = 1;
2724
2725 /* private init */
2726 pbi->work_space_buf = buf_spec_i;
2727 if (!pbi->mmu_enable)
2728 pbi->mc_buf = mc_buf_i;
2729
2730 pbi->rpm_addr = NULL;
2731 pbi->lmem_addr = NULL;
2732
2733 pbi->use_cma_flag = 0;
2734 pbi->decode_idx = 0;
2735 pbi->slice_idx = 0;
2736 /*int m_uiMaxCUWidth = 1<<7;*/
2737 /*int m_uiMaxCUHeight = 1<<7;*/
2738 pbi->has_keyframe = 0;
2739 pbi->skip_flag = 0;
2740 pbi->wait_buf = 0;
2741 pbi->error_flag = 0;
2742
2743 pbi->pts_mode = PTS_NORMAL;
2744 pbi->last_pts = 0;
2745 pbi->last_lookup_pts = 0;
2746 pbi->last_pts_us64 = 0;
2747 pbi->last_lookup_pts_us64 = 0;
2748 pbi->shift_byte_count = 0;
2749 pbi->shift_byte_count_lo = 0;
2750 pbi->shift_byte_count_hi = 0;
2751 pbi->pts_mode_switching_count = 0;
2752 pbi->pts_mode_recovery_count = 0;
2753
2754 pbi->buf_num = 0;
2755 pbi->pic_num = 0;
2756
2757 return 0;
2758}
2759
2760int vp9_bufmgr_postproc(struct VP9Decoder_s *pbi)
2761{
2762 struct VP9_Common_s *cm = &pbi->common;
2763 struct PIC_BUFFER_CONFIG_s sd;
2764
2765 if (pbi->postproc_done)
2766 return 0;
2767 pbi->postproc_done = 1;
2768 swap_frame_buffers(pbi);
2769 if (!cm->show_existing_frame) {
2770 cm->last_show_frame = cm->show_frame;
2771 cm->prev_frame = cm->cur_frame;
2772#if 0
2773 if (cm->seg.enabled && !pbi->frame_parallel_decode)
2774 vp9_swap_current_and_last_seg_map(cm);
2775#endif
2776 }
2777 cm->last_width = cm->width;
2778 cm->last_height = cm->height;
2779 if (cm->show_frame)
2780 cm->current_video_frame++;
2781
2782 if (vp9_get_raw_frame(pbi, &sd) == 0) {
2783 /*pr_info("Display frame index %d\r\n", sd.index);*/
2784 sd.stream_offset = pbi->pre_stream_offset;
2785 prepare_display_buf(pbi, &sd);
2786 pbi->pre_stream_offset = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
2787 }
2788
2789/* else
2790 * pr_info
2791 * ("Not display this frame,ready_for_new_data%d show_frame%d\r\n",
2792 * pbi->ready_for_new_data, cm->show_frame);
2793 */
2794 return 0;
2795}
2796
2797/*struct VP9Decoder_s vp9_decoder;*/
2798union param_u vp9_param;
2799
2800/**************************************************
2801 *
2802 *VP9 buffer management end
2803 *
2804 ***************************************************
2805 */
2806
2807
2808#define HEVC_CM_BODY_START_ADDR 0x3626
2809#define HEVC_CM_BODY_LENGTH 0x3627
2810#define HEVC_CM_HEADER_LENGTH 0x3629
2811#define HEVC_CM_HEADER_OFFSET 0x362b
2812
2813#define LOSLESS_COMPRESS_MODE
2814
2815/*#define DECOMP_HEADR_SURGENT*/
2816#ifdef VP9_10B_NV21
2817static u32 mem_map_mode = 2 /* 0:linear 1:32x32 2:64x32*/
2818#else
2819static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
2820#endif
2821static u32 enable_mem_saving = 1;
2822static u32 force_w_h;
2823
2824static u32 force_fps;
2825
2826
2827const u32 vp9_version = 201602101;
2828static u32 debug;
2829static u32 radr;
2830static u32 rval;
2831static u32 pop_shorts;
2832static u32 dbg_cmd;
2833static u32 dbg_skip_decode_index;
2834static u32 endian = 0xff0;
2835#ifdef ERROR_HANDLE_DEBUG
2836static u32 dbg_nal_skip_flag;
2837 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
2838static u32 dbg_nal_skip_count;
2839#endif
2840/*for debug*/
2841static u32 decode_pic_begin;
2842static uint slice_parse_begin;
2843static u32 step;
2844#ifdef MIX_STREAM_SUPPORT
2845static u32 buf_alloc_width = 4096;
2846static u32 buf_alloc_height = 2304;
2847static u32 vp9_max_pic_w = 4096;
2848static u32 vp9_max_pic_h = 2304;
2849
2850static u32 dynamic_buf_num_margin;
2851#else
2852static u32 buf_alloc_width;
2853static u32 buf_alloc_height;
2854static u32 dynamic_buf_num_margin = 7;
2855#endif
2856static u32 buf_alloc_depth = 10;
2857static u32 buf_alloc_size;
2858/*
2859 *bit[0]: 0,
2860 * bit[1]: 0, always release cma buffer when stop
2861 * bit[1]: 1, never release cma buffer when stop
2862 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
2863 *do not release cma buffer is blackout is not 1
2864 *
2865 *bit[2]: 0, when start decoding, check current displayed buffer
2866 * (only for buffer decoded by vp9) if blackout is 0
2867 * 1, do not check current displayed buffer
2868 *
2869 *bit[3]: 1, if blackout is not 1, do not release current
2870 * displayed cma buffer always.
2871 */
2872/* set to 1 for fast play;
2873 * set to 8 for other case of "keep last frame"
2874 */
2875static u32 buffer_mode = 1;
2876/* buffer_mode_dbg: debug only*/
2877static u32 buffer_mode_dbg = 0xffff0000;
2878/**/
2879
2880/*
2881 *bit 0, 1: only display I picture;
2882 *bit 1, 1: only decode I picture;
2883 */
2884static u32 i_only_flag;
2885
2886static u32 low_latency_flag;
2887
2888static u32 no_head;
2889
2890static u32 max_decoding_time;
2891/*
2892 *error handling
2893 */
2894/*error_handle_policy:
2895 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
2896 *1, skip error_skip_nal_count nals before error recovery;
2897 *bit 1 (valid only when bit0 == 1):
2898 *1, wait vps/sps/pps after error recovery;
2899 *bit 2 (valid only when bit0 == 0):
2900 *0, auto search after error recovery (vp9_recover() called);
2901 *1, manual search after error recovery
2902 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
2903 *
2904 *bit 4: 0, set error_mark after reset/recover
2905 * 1, do not set error_mark after reset/recover
2906 *bit 5: 0, check total lcu for every picture
2907 * 1, do not check total lcu
2908 *
2909 */
2910
2911static u32 error_handle_policy;
2912/*static u32 parser_sei_enable = 1;*/
2913#define MAX_BUF_NUM_NORMAL 12
2914#define MAX_BUF_NUM_LESS 10
2915static u32 max_buf_num = MAX_BUF_NUM_NORMAL;
2916#define MAX_BUF_NUM_SAVE_BUF 8
2917
2918static u32 run_ready_min_buf_num = 2;
2919
2920
2921static DEFINE_MUTEX(vvp9_mutex);
2922#ifndef MULTI_INSTANCE_SUPPORT
2923static struct device *cma_dev;
2924#endif
2925
2926#define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
2927#define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
2928#define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
2929#define VP9_ADAPT_PROB_REG HEVC_ASSIST_SCRATCH_3
2930#define VP9_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_4
2931#define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
2932#define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
2933#define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
2934#define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
2935#define VP9_PROB_SWAP_BUFFER HEVC_ASSIST_SCRATCH_9
2936#define VP9_COUNT_SWAP_BUFFER HEVC_ASSIST_SCRATCH_A
2937#define VP9_SEG_MAP_BUFFER HEVC_ASSIST_SCRATCH_B
2938#define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
2939#define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
2940#define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
2941#define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
2942#define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
2943#ifdef MULTI_INSTANCE_SUPPORT
2944#define HEVC_DECODE_COUNT HEVC_ASSIST_SCRATCH_M
2945#define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
2946#else
2947#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M
2948#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N
2949#endif
2950#define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
2951#define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
2952
2953
2954/*
2955 *ucode parser/search control
2956 *bit 0: 0, header auto parse; 1, header manual parse
2957 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
2958 *bit [3:2]: valid when bit1==0;
2959 *0, auto skip nal before first vps/sps/pps/idr;
2960 *1, auto skip nal before first vps/sps/pps
2961 *2, auto skip nal before first vps/sps/pps,
2962 * and not decode until the first I slice (with slice address of 0)
2963 *
2964 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
2965 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
2966 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
2967 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
2968 *bit [17]: for NAL_SEI when bit0 is 0:
2969 * 0, do not parse SEI in ucode; 1, parse SEI in ucode
2970 *bit [31:20]: used by ucode for debug purpose
2971 */
2972#define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
2973 /*[31:24] chip feature
2974 31: 0, use MBOX1; 1, use MBOX0
2975 */
2976#define DECODE_MODE HEVC_ASSIST_SCRATCH_J
2977#define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
2978
2979#ifdef MULTI_INSTANCE_SUPPORT
2980#define RPM_BUF_SIZE (0x400 * 2)
2981#else
2982#define RPM_BUF_SIZE (0x80*2)
2983#endif
2984#define LMEM_BUF_SIZE (0x400 * 2)
2985
2986#define WORK_BUF_SPEC_NUM 3
2987static struct BuffInfo_s amvvp9_workbuff_spec[WORK_BUF_SPEC_NUM] = {
2988 {
2989 /* 8M bytes */
2990 .max_width = 1920,
2991 .max_height = 1088,
2992 .ipp = {
2993 /* IPP work space calculation :
2994 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
2995 */
2996 .buf_size = 0x4000,
2997 },
2998 .sao_abv = {
2999 .buf_size = 0x30000,
3000 },
3001 .sao_vb = {
3002 .buf_size = 0x30000,
3003 },
3004 .short_term_rps = {
3005 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3006 * total 64x16x2 = 2048 bytes (0x800)
3007 */
3008 .buf_size = 0x800,
3009 },
3010 .vps = {
3011 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3012 * total 0x0800 bytes
3013 */
3014 .buf_size = 0x800,
3015 },
3016 .sps = {
3017 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3018 * total 0x0800 bytes
3019 */
3020 .buf_size = 0x800,
3021 },
3022 .pps = {
3023 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3024 * total 0x2000 bytes
3025 */
3026 .buf_size = 0x2000,
3027 },
3028 .sao_up = {
3029 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3030 * each has 16 bytes total 0x2800 bytes
3031 */
3032 .buf_size = 0x2800,
3033 },
3034 .swap_buf = {
3035 /* 256cyclex64bit = 2K bytes 0x800
3036 * (only 144 cycles valid)
3037 */
3038 .buf_size = 0x800,
3039 },
3040 .swap_buf2 = {
3041 .buf_size = 0x800,
3042 },
3043 .scalelut = {
3044 /* support up to 32 SCALELUT 1024x32 =
3045 * 32Kbytes (0x8000)
3046 */
3047 .buf_size = 0x8000,
3048 },
3049 .dblk_para = {
3050 /* DBLK -> Max 256(4096/16) LCU,
3051 *each para 1024bytes(total:0x40000),
3052 *data 1024bytes(total:0x40000)
3053 */
3054 .buf_size = 0x80000,
3055 },
3056 .dblk_data = {
3057 .buf_size = 0x80000,
3058 },
3059 .seg_map = {
3060 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3061 .buf_size = 0xd800,
3062 },
3063 .mmu_vbh = {
3064 .buf_size = 0x5000, /*2*16*(more than 2304)/4, 4K*/
3065 },
3066#if 0
3067 .cm_header = {
3068 /*add one for keeper.*/
3069 .buf_size = MMU_COMPRESS_HEADER_SIZE *
3070 (FRAME_BUFFERS + 1),
3071 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3072 },
3073#endif
3074 .mpred_above = {
3075 .buf_size = 0x10000, /* 2 * size of hevc*/
3076 },
3077#ifdef MV_USE_FIXED_BUF
3078 .mpred_mv = {/* 1080p, 0x40000 per buffer */
3079 .buf_size = 0x40000 * FRAME_BUFFERS,
3080 },
3081#endif
3082 .rpm = {
3083 .buf_size = RPM_BUF_SIZE,
3084 },
3085 .lmem = {
3086 .buf_size = 0x400 * 2,
3087 }
3088 },
3089 {
3090 .max_width = 4096,
3091 .max_height = 2304,
3092 .ipp = {
3093 /* IPP work space calculation :
3094 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3095 */
3096 .buf_size = 0x4000,
3097 },
3098 .sao_abv = {
3099 .buf_size = 0x30000,
3100 },
3101 .sao_vb = {
3102 .buf_size = 0x30000,
3103 },
3104 .short_term_rps = {
3105 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3106 * total 64x16x2 = 2048 bytes (0x800)
3107 */
3108 .buf_size = 0x800,
3109 },
3110 .vps = {
3111 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3112 * total 0x0800 bytes
3113 */
3114 .buf_size = 0x800,
3115 },
3116 .sps = {
3117 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3118 * total 0x0800 bytes
3119 */
3120 .buf_size = 0x800,
3121 },
3122 .pps = {
3123 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3124 * total 0x2000 bytes
3125 */
3126 .buf_size = 0x2000,
3127 },
3128 .sao_up = {
3129 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3130 * each has 16 bytes total 0x2800 bytes
3131 */
3132 .buf_size = 0x2800,
3133 },
3134 .swap_buf = {
3135 /* 256cyclex64bit = 2K bytes 0x800
3136 * (only 144 cycles valid)
3137 */
3138 .buf_size = 0x800,
3139 },
3140 .swap_buf2 = {
3141 .buf_size = 0x800,
3142 },
3143 .scalelut = {
3144 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
3145 * (0x8000)
3146 */
3147 .buf_size = 0x8000,
3148 },
3149 .dblk_para = {
3150 /* DBLK -> Max 256(4096/16) LCU,
3151 *each para 1024bytes(total:0x40000),
3152 *data 1024bytes(total:0x40000)
3153 */
3154 .buf_size = 0x80000,
3155 },
3156 .dblk_data = {
3157 .buf_size = 0x80000,
3158 },
3159 .seg_map = {
3160 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3161 .buf_size = 0xd800,
3162 },
3163 .mmu_vbh = {
3164 .buf_size = 0x5000,/*2*16*(more than 2304)/4, 4K*/
3165 },
3166#if 0
3167 .cm_header = {
3168 /*add one for keeper.*/
3169 .buf_size = MMU_COMPRESS_HEADER_SIZE *
3170 (FRAME_BUFFERS + 1),
3171 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3172 },
3173#endif
3174 .mpred_above = {
3175 .buf_size = 0x10000, /* 2 * size of hevc*/
3176 },
3177#ifdef MV_USE_FIXED_BUF
3178 .mpred_mv = {
3179 /* .buf_size = 0x100000*16,
3180 * //4k2k , 0x100000 per buffer
3181 */
3182 /* 4096x2304 , 0x120000 per buffer */
3183 .buf_size = 0x120000 * FRAME_BUFFERS,
3184 },
3185#endif
3186 .rpm = {
3187 .buf_size = RPM_BUF_SIZE,
3188 },
3189 .lmem = {
3190 .buf_size = 0x400 * 2,
3191 }
3192 },
3193 {
3194 .max_width = 4096*2,
3195 .max_height = 2304*2,
3196 .ipp = {
3197 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3198 .buf_size = 0x4000*2,
3199 },
3200 .sao_abv = {
3201 .buf_size = 0x30000*2,
3202 },
3203 .sao_vb = {
3204 .buf_size = 0x30000*2,
3205 },
3206 .short_term_rps = {
3207 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
3208 .buf_size = 0x800,
3209 },
3210 .vps = {
3211 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
3212 .buf_size = 0x800,
3213 },
3214 .sps = {
3215 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
3216 .buf_size = 0x800,
3217 },
3218 .pps = {
3219 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
3220 .buf_size = 0x2000,
3221 },
3222 .sao_up = {
3223 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
3224 .buf_size = 0x2800*2,
3225 },
3226 .swap_buf = {
3227 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
3228 .buf_size = 0x800,
3229 },
3230 .swap_buf2 = {
3231 .buf_size = 0x800,
3232 },
3233 .scalelut = {
3234 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
3235 .buf_size = 0x8000*2,
3236 },
3237 .dblk_para = {
3238 // DBLK -> Max 256(4096/16) LCU, each para 1024bytes(total:0x40000), data 1024bytes(total:0x40000)
3239 .buf_size = 0x80000*2,
3240 },
3241 .dblk_data = {
3242 .buf_size = 0x80000*2,
3243 },
3244 .seg_map = {
3245 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3246 .buf_size = 0xd800*4,
3247 },
3248 .mmu_vbh = {
3249 .buf_size = 0x5000*2, //2*16*(more than 2304)/4, 4K
3250 },
3251#if 0
3252 .cm_header = {
3253 //.buf_size = MMU_COMPRESS_HEADER_SIZE*8, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3254 .buf_size = MMU_COMPRESS_HEADER_SIZE*16, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3255 },
3256#endif
3257 .mpred_above = {
3258 .buf_size = 0x10000*2, /* 2 * size of hevc*/
3259 },
3260#ifdef MV_USE_FIXED_BUF
3261 .mpred_mv = {
3262 //4k2k , 0x100000 per buffer */
3263 /* 4096x2304 , 0x120000 per buffer */
3264 .buf_size = 0x120000 * FRAME_BUFFERS * 4,
3265 },
3266#endif
3267 .rpm = {
3268 .buf_size = RPM_BUF_SIZE,
3269 },
3270 .lmem = {
3271 .buf_size = 0x400 * 2,
3272 }
3273 }
3274};
3275
3276
3277/*Losless compression body buffer size 4K per 64x32 (jt)*/
3278int compute_losless_comp_body_size(int width, int height,
3279 uint8_t is_bit_depth_10)
3280{
3281 int width_x64;
3282 int height_x32;
3283 int bsize;
3284
3285 width_x64 = width + 63;
3286 width_x64 >>= 6;
3287 height_x32 = height + 31;
3288 height_x32 >>= 5;
3289 bsize = (is_bit_depth_10?4096:3200)*width_x64*height_x32;
3290 if (debug & VP9_DEBUG_BUFMGR_MORE)
3291 pr_info("%s(%d,%d,%d)=>%d\n",
3292 __func__, width, height,
3293 is_bit_depth_10, bsize);
3294
3295 return bsize;
3296}
3297
3298/* Losless compression header buffer size 32bytes per 128x64 (jt)*/
3299static int compute_losless_comp_header_size(int width, int height)
3300{
3301 int width_x128;
3302 int height_x64;
3303 int hsize;
3304
3305 width_x128 = width + 127;
3306 width_x128 >>= 7;
3307 height_x64 = height + 63;
3308 height_x64 >>= 6;
3309
3310 hsize = 32 * width_x128 * height_x64;
3311 if (debug & VP9_DEBUG_BUFMGR_MORE)
3312 pr_info("%s(%d,%d)=>%d\n",
3313 __func__, width, height,
3314 hsize);
3315
3316 return hsize;
3317}
3318
3319static void init_buff_spec(struct VP9Decoder_s *pbi,
3320 struct BuffInfo_s *buf_spec)
3321{
3322 void *mem_start_virt;
3323
3324 buf_spec->ipp.buf_start = buf_spec->start_adr;
3325 buf_spec->sao_abv.buf_start =
3326 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
3327
3328 buf_spec->sao_vb.buf_start =
3329 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
3330 buf_spec->short_term_rps.buf_start =
3331 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
3332 buf_spec->vps.buf_start =
3333 buf_spec->short_term_rps.buf_start +
3334 buf_spec->short_term_rps.buf_size;
3335 buf_spec->sps.buf_start =
3336 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
3337 buf_spec->pps.buf_start =
3338 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
3339 buf_spec->sao_up.buf_start =
3340 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
3341 buf_spec->swap_buf.buf_start =
3342 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
3343 buf_spec->swap_buf2.buf_start =
3344 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
3345 buf_spec->scalelut.buf_start =
3346 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
3347 buf_spec->dblk_para.buf_start =
3348 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
3349 buf_spec->dblk_data.buf_start =
3350 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
3351 buf_spec->seg_map.buf_start =
3352 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
3353 if (pbi == NULL || pbi->mmu_enable) {
3354 buf_spec->mmu_vbh.buf_start =
3355 buf_spec->seg_map.buf_start +
3356 buf_spec->seg_map.buf_size;
3357 buf_spec->mpred_above.buf_start =
3358 buf_spec->mmu_vbh.buf_start +
3359 buf_spec->mmu_vbh.buf_size;
3360 } else {
3361 buf_spec->mpred_above.buf_start =
3362 buf_spec->seg_map.buf_start + buf_spec->seg_map.buf_size;
3363 }
3364#ifdef MV_USE_FIXED_BUF
3365 buf_spec->mpred_mv.buf_start =
3366 buf_spec->mpred_above.buf_start +
3367 buf_spec->mpred_above.buf_size;
3368
3369 buf_spec->rpm.buf_start =
3370 buf_spec->mpred_mv.buf_start +
3371 buf_spec->mpred_mv.buf_size;
3372#else
3373 buf_spec->rpm.buf_start =
3374 buf_spec->mpred_above.buf_start +
3375 buf_spec->mpred_above.buf_size;
3376
3377#endif
3378 buf_spec->lmem.buf_start =
3379 buf_spec->rpm.buf_start +
3380 buf_spec->rpm.buf_size;
3381 buf_spec->end_adr =
3382 buf_spec->lmem.buf_start +
3383 buf_spec->lmem.buf_size;
3384
3385 if (!pbi)
3386 return;
3387
3388 if (!vdec_secure(hw_to_vdec(pbi))) {
3389 mem_start_virt =
3390 codec_mm_phys_to_virt(buf_spec->dblk_para.buf_start);
3391 if (mem_start_virt) {
3392 memset(mem_start_virt, 0,
3393 buf_spec->dblk_para.buf_size);
3394 codec_mm_dma_flush(mem_start_virt,
3395 buf_spec->dblk_para.buf_size,
3396 DMA_TO_DEVICE);
3397 } else {
3398 mem_start_virt = codec_mm_vmap(
3399 buf_spec->dblk_para.buf_start,
3400 buf_spec->dblk_para.buf_size);
3401 if (mem_start_virt) {
3402 memset(mem_start_virt, 0,
3403 buf_spec->dblk_para.buf_size);
3404 codec_mm_dma_flush(mem_start_virt,
3405 buf_spec->dblk_para.buf_size,
3406 DMA_TO_DEVICE);
3407 codec_mm_unmap_phyaddr(mem_start_virt);
3408 } else {
3409 /*not virt for tvp playing,
3410 may need clear on ucode.*/
3411 pr_err("mem_start_virt failed\n");
3412 }
3413 }
3414 }
3415
3416 if (debug) {
3417 pr_info("%s workspace (%x %x) size = %x\n", __func__,
3418 buf_spec->start_adr, buf_spec->end_adr,
3419 buf_spec->end_adr - buf_spec->start_adr);
3420 }
3421
3422 if (debug) {
3423 pr_info("ipp.buf_start :%x\n",
3424 buf_spec->ipp.buf_start);
3425 pr_info("sao_abv.buf_start :%x\n",
3426 buf_spec->sao_abv.buf_start);
3427 pr_info("sao_vb.buf_start :%x\n",
3428 buf_spec->sao_vb.buf_start);
3429 pr_info("short_term_rps.buf_start :%x\n",
3430 buf_spec->short_term_rps.buf_start);
3431 pr_info("vps.buf_start :%x\n",
3432 buf_spec->vps.buf_start);
3433 pr_info("sps.buf_start :%x\n",
3434 buf_spec->sps.buf_start);
3435 pr_info("pps.buf_start :%x\n",
3436 buf_spec->pps.buf_start);
3437 pr_info("sao_up.buf_start :%x\n",
3438 buf_spec->sao_up.buf_start);
3439 pr_info("swap_buf.buf_start :%x\n",
3440 buf_spec->swap_buf.buf_start);
3441 pr_info("swap_buf2.buf_start :%x\n",
3442 buf_spec->swap_buf2.buf_start);
3443 pr_info("scalelut.buf_start :%x\n",
3444 buf_spec->scalelut.buf_start);
3445 pr_info("dblk_para.buf_start :%x\n",
3446 buf_spec->dblk_para.buf_start);
3447 pr_info("dblk_data.buf_start :%x\n",
3448 buf_spec->dblk_data.buf_start);
3449 pr_info("seg_map.buf_start :%x\n",
3450 buf_spec->seg_map.buf_start);
3451 if (pbi->mmu_enable) {
3452 pr_info("mmu_vbh.buf_start :%x\n",
3453 buf_spec->mmu_vbh.buf_start);
3454 }
3455 pr_info("mpred_above.buf_start :%x\n",
3456 buf_spec->mpred_above.buf_start);
3457#ifdef MV_USE_FIXED_BUF
3458 pr_info("mpred_mv.buf_start :%x\n",
3459 buf_spec->mpred_mv.buf_start);
3460#endif
3461 if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0) {
3462 pr_info("rpm.buf_start :%x\n",
3463 buf_spec->rpm.buf_start);
3464 }
3465 }
3466}
3467
3468/* cache_util.c */
3469#define THODIYIL_MCRCC_CANVAS_ALGX 4
3470
3471static u32 mcrcc_cache_alg_flag = THODIYIL_MCRCC_CANVAS_ALGX;
3472
3473static void mcrcc_perfcount_reset(void)
3474{
3475 if (debug & VP9_DEBUG_CACHE)
3476 pr_info("[cache_util.c] Entered mcrcc_perfcount_reset...\n");
3477 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)0x1);
3478 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)0x0);
3479 return;
3480}
3481
3482static unsigned raw_mcr_cnt_total_prev;
3483static unsigned hit_mcr_0_cnt_total_prev;
3484static unsigned hit_mcr_1_cnt_total_prev;
3485static unsigned byp_mcr_cnt_nchcanv_total_prev;
3486static unsigned byp_mcr_cnt_nchoutwin_total_prev;
3487
3488static void mcrcc_get_hitrate(unsigned reset_pre)
3489{
3490 unsigned delta_hit_mcr_0_cnt;
3491 unsigned delta_hit_mcr_1_cnt;
3492 unsigned delta_raw_mcr_cnt;
3493 unsigned delta_mcr_cnt_nchcanv;
3494 unsigned delta_mcr_cnt_nchoutwin;
3495
3496 unsigned tmp;
3497 unsigned raw_mcr_cnt;
3498 unsigned hit_mcr_cnt;
3499 unsigned byp_mcr_cnt_nchoutwin;
3500 unsigned byp_mcr_cnt_nchcanv;
3501 int hitrate;
3502 if (reset_pre) {
3503 raw_mcr_cnt_total_prev = 0;
3504 hit_mcr_0_cnt_total_prev = 0;
3505 hit_mcr_1_cnt_total_prev = 0;
3506 byp_mcr_cnt_nchcanv_total_prev = 0;
3507 byp_mcr_cnt_nchoutwin_total_prev = 0;
3508 }
3509 if (debug & VP9_DEBUG_CACHE)
3510 pr_info("[cache_util.c] Entered mcrcc_get_hitrate...\n");
3511 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x0<<1));
3512 raw_mcr_cnt = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3513 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x1<<1));
3514 hit_mcr_cnt = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3515 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x2<<1));
3516 byp_mcr_cnt_nchoutwin = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3517 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x3<<1));
3518 byp_mcr_cnt_nchcanv = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3519
3520 if (debug & VP9_DEBUG_CACHE)
3521 pr_info("raw_mcr_cnt_total: %d\n",
3522 raw_mcr_cnt);
3523 if (debug & VP9_DEBUG_CACHE)
3524 pr_info("hit_mcr_cnt_total: %d\n",
3525 hit_mcr_cnt);
3526 if (debug & VP9_DEBUG_CACHE)
3527 pr_info("byp_mcr_cnt_nchoutwin_total: %d\n",
3528 byp_mcr_cnt_nchoutwin);
3529 if (debug & VP9_DEBUG_CACHE)
3530 pr_info("byp_mcr_cnt_nchcanv_total: %d\n",
3531 byp_mcr_cnt_nchcanv);
3532
3533 delta_raw_mcr_cnt = raw_mcr_cnt -
3534 raw_mcr_cnt_total_prev;
3535 delta_mcr_cnt_nchcanv = byp_mcr_cnt_nchcanv -
3536 byp_mcr_cnt_nchcanv_total_prev;
3537 delta_mcr_cnt_nchoutwin = byp_mcr_cnt_nchoutwin -
3538 byp_mcr_cnt_nchoutwin_total_prev;
3539 raw_mcr_cnt_total_prev = raw_mcr_cnt;
3540 byp_mcr_cnt_nchcanv_total_prev = byp_mcr_cnt_nchcanv;
3541 byp_mcr_cnt_nchoutwin_total_prev = byp_mcr_cnt_nchoutwin;
3542
3543 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x4<<1));
3544 tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3545 if (debug & VP9_DEBUG_CACHE)
3546 pr_info("miss_mcr_0_cnt_total: %d\n", tmp);
3547 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x5<<1));
3548 tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3549 if (debug & VP9_DEBUG_CACHE)
3550 pr_info("miss_mcr_1_cnt_total: %d\n", tmp);
3551 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x6<<1));
3552 tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3553 if (debug & VP9_DEBUG_CACHE)
3554 pr_info("hit_mcr_0_cnt_total: %d\n", tmp);
3555 delta_hit_mcr_0_cnt = tmp - hit_mcr_0_cnt_total_prev;
3556 hit_mcr_0_cnt_total_prev = tmp;
3557 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL, (unsigned int)(0x7<<1));
3558 tmp = READ_VREG(HEVCD_MCRCC_PERFMON_DATA);
3559 if (debug & VP9_DEBUG_CACHE)
3560 pr_info("hit_mcr_1_cnt_total: %d\n", tmp);
3561 delta_hit_mcr_1_cnt = tmp - hit_mcr_1_cnt_total_prev;
3562 hit_mcr_1_cnt_total_prev = tmp;
3563
3564 if (delta_raw_mcr_cnt != 0) {
3565 hitrate = 100 * delta_hit_mcr_0_cnt
3566 / delta_raw_mcr_cnt;
3567 if (debug & VP9_DEBUG_CACHE)
3568 pr_info("CANV0_HIT_RATE : %d\n", hitrate);
3569 hitrate = 100 * delta_hit_mcr_1_cnt
3570 / delta_raw_mcr_cnt;
3571 if (debug & VP9_DEBUG_CACHE)
3572 pr_info("CANV1_HIT_RATE : %d\n", hitrate);
3573 hitrate = 100 * delta_mcr_cnt_nchcanv
3574 / delta_raw_mcr_cnt;
3575 if (debug & VP9_DEBUG_CACHE)
3576 pr_info("NONCACH_CANV_BYP_RATE : %d\n", hitrate);
3577 hitrate = 100 * delta_mcr_cnt_nchoutwin
3578 / delta_raw_mcr_cnt;
3579 if (debug & VP9_DEBUG_CACHE)
3580 pr_info("CACHE_OUTWIN_BYP_RATE : %d\n", hitrate);
3581 }
3582
3583
3584 if (raw_mcr_cnt != 0) {
3585 hitrate = 100 * hit_mcr_cnt / raw_mcr_cnt;
3586 if (debug & VP9_DEBUG_CACHE)
3587 pr_info("MCRCC_HIT_RATE : %d\n", hitrate);
3588 hitrate = 100 * (byp_mcr_cnt_nchoutwin + byp_mcr_cnt_nchcanv)
3589 / raw_mcr_cnt;
3590 if (debug & VP9_DEBUG_CACHE)
3591 pr_info("MCRCC_BYP_RATE : %d\n", hitrate);
3592 } else {
3593 if (debug & VP9_DEBUG_CACHE)
3594 pr_info("MCRCC_HIT_RATE : na\n");
3595 if (debug & VP9_DEBUG_CACHE)
3596 pr_info("MCRCC_BYP_RATE : na\n");
3597 }
3598 return;
3599}
3600
3601
3602static void decomp_perfcount_reset(void)
3603{
3604 if (debug & VP9_DEBUG_CACHE)
3605 pr_info("[cache_util.c] Entered decomp_perfcount_reset...\n");
3606 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)0x1);
3607 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)0x0);
3608 return;
3609}
3610
3611static void decomp_get_hitrate(void)
3612{
3613 unsigned raw_mcr_cnt;
3614 unsigned hit_mcr_cnt;
3615 int hitrate;
3616 if (debug & VP9_DEBUG_CACHE)
3617 pr_info("[cache_util.c] Entered decomp_get_hitrate...\n");
3618 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x0<<1));
3619 raw_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3620 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x1<<1));
3621 hit_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3622
3623 if (debug & VP9_DEBUG_CACHE)
3624 pr_info("hcache_raw_cnt_total: %d\n", raw_mcr_cnt);
3625 if (debug & VP9_DEBUG_CACHE)
3626 pr_info("hcache_hit_cnt_total: %d\n", hit_mcr_cnt);
3627
3628 if (raw_mcr_cnt != 0) {
3629 hitrate = hit_mcr_cnt * 100 / raw_mcr_cnt;
3630 if (debug & VP9_DEBUG_CACHE)
3631 pr_info("DECOMP_HCACHE_HIT_RATE : %d\n", hitrate);
3632 } else {
3633 if (debug & VP9_DEBUG_CACHE)
3634 pr_info("DECOMP_HCACHE_HIT_RATE : na\n");
3635 }
3636 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x2<<1));
3637 raw_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3638 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x3<<1));
3639 hit_mcr_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3640
3641 if (debug & VP9_DEBUG_CACHE)
3642 pr_info("dcache_raw_cnt_total: %d\n", raw_mcr_cnt);
3643 if (debug & VP9_DEBUG_CACHE)
3644 pr_info("dcache_hit_cnt_total: %d\n", hit_mcr_cnt);
3645
3646 if (raw_mcr_cnt != 0) {
3647 hitrate = hit_mcr_cnt * 100 / raw_mcr_cnt;
3648 if (debug & VP9_DEBUG_CACHE)
3649 pr_info("DECOMP_DCACHE_HIT_RATE : %d\n", hitrate);
3650 } else {
3651 if (debug & VP9_DEBUG_CACHE)
3652 pr_info("DECOMP_DCACHE_HIT_RATE : na\n");
3653 }
3654 return;
3655}
3656
3657static void decomp_get_comprate(void)
3658{
3659 unsigned raw_ucomp_cnt;
3660 unsigned fast_comp_cnt;
3661 unsigned slow_comp_cnt;
3662 int comprate;
3663
3664 if (debug & VP9_DEBUG_CACHE)
3665 pr_info("[cache_util.c] Entered decomp_get_comprate...\n");
3666 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x4<<1));
3667 fast_comp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3668 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x5<<1));
3669 slow_comp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3670 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL, (unsigned int)(0x6<<1));
3671 raw_ucomp_cnt = READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA);
3672
3673 if (debug & VP9_DEBUG_CACHE)
3674 pr_info("decomp_fast_comp_total: %d\n", fast_comp_cnt);
3675 if (debug & VP9_DEBUG_CACHE)
3676 pr_info("decomp_slow_comp_total: %d\n", slow_comp_cnt);
3677 if (debug & VP9_DEBUG_CACHE)
3678 pr_info("decomp_raw_uncomp_total: %d\n", raw_ucomp_cnt);
3679
3680 if (raw_ucomp_cnt != 0) {
3681 comprate = (fast_comp_cnt + slow_comp_cnt)
3682 * 100 / raw_ucomp_cnt;
3683 if (debug & VP9_DEBUG_CACHE)
3684 pr_info("DECOMP_COMP_RATIO : %d\n", comprate);
3685 } else {
3686 if (debug & VP9_DEBUG_CACHE)
3687 pr_info("DECOMP_COMP_RATIO : na\n");
3688 }
3689 return;
3690}
3691/* cache_util.c end */
3692
3693/*====================================================
3694 *========================================================================
3695 *vp9_prob define
3696 *========================================================================
3697 */
3698#define VP9_PARTITION_START 0
3699#define VP9_PARTITION_SIZE_STEP (3 * 4)
3700#define VP9_PARTITION_ONE_SIZE (4 * VP9_PARTITION_SIZE_STEP)
3701#define VP9_PARTITION_KEY_START 0
3702#define VP9_PARTITION_P_START VP9_PARTITION_ONE_SIZE
3703#define VP9_PARTITION_SIZE (2 * VP9_PARTITION_ONE_SIZE)
3704#define VP9_SKIP_START (VP9_PARTITION_START + VP9_PARTITION_SIZE)
3705#define VP9_SKIP_SIZE 4 /* only use 3*/
3706#define VP9_TX_MODE_START (VP9_SKIP_START+VP9_SKIP_SIZE)
3707#define VP9_TX_MODE_8_0_OFFSET 0
3708#define VP9_TX_MODE_8_1_OFFSET 1
3709#define VP9_TX_MODE_16_0_OFFSET 2
3710#define VP9_TX_MODE_16_1_OFFSET 4
3711#define VP9_TX_MODE_32_0_OFFSET 6
3712#define VP9_TX_MODE_32_1_OFFSET 9
3713#define VP9_TX_MODE_SIZE 12
3714#define VP9_COEF_START (VP9_TX_MODE_START+VP9_TX_MODE_SIZE)
3715#define VP9_COEF_BAND_0_OFFSET 0
3716#define VP9_COEF_BAND_1_OFFSET (VP9_COEF_BAND_0_OFFSET + 3 * 3 + 1)
3717#define VP9_COEF_BAND_2_OFFSET (VP9_COEF_BAND_1_OFFSET + 6 * 3)
3718#define VP9_COEF_BAND_3_OFFSET (VP9_COEF_BAND_2_OFFSET + 6 * 3)
3719#define VP9_COEF_BAND_4_OFFSET (VP9_COEF_BAND_3_OFFSET + 6 * 3)
3720#define VP9_COEF_BAND_5_OFFSET (VP9_COEF_BAND_4_OFFSET + 6 * 3)
3721#define VP9_COEF_SIZE_ONE_SET 100 /* ((3 +5*6)*3 + 1 padding)*/
3722#define VP9_COEF_4X4_START (VP9_COEF_START + 0 * VP9_COEF_SIZE_ONE_SET)
3723#define VP9_COEF_8X8_START (VP9_COEF_START + 4 * VP9_COEF_SIZE_ONE_SET)
3724#define VP9_COEF_16X16_START (VP9_COEF_START + 8 * VP9_COEF_SIZE_ONE_SET)
3725#define VP9_COEF_32X32_START (VP9_COEF_START + 12 * VP9_COEF_SIZE_ONE_SET)
3726#define VP9_COEF_SIZE_PLANE (2 * VP9_COEF_SIZE_ONE_SET)
3727#define VP9_COEF_SIZE (4 * 2 * 2 * VP9_COEF_SIZE_ONE_SET)
3728#define VP9_INTER_MODE_START (VP9_COEF_START+VP9_COEF_SIZE)
3729#define VP9_INTER_MODE_SIZE 24 /* only use 21 ( #*7)*/
3730#define VP9_INTERP_START (VP9_INTER_MODE_START+VP9_INTER_MODE_SIZE)
3731#define VP9_INTERP_SIZE 8
3732#define VP9_INTRA_INTER_START (VP9_INTERP_START+VP9_INTERP_SIZE)
3733#define VP9_INTRA_INTER_SIZE 4
3734#define VP9_INTERP_INTRA_INTER_START VP9_INTERP_START
3735#define VP9_INTERP_INTRA_INTER_SIZE (VP9_INTERP_SIZE + VP9_INTRA_INTER_SIZE)
3736#define VP9_COMP_INTER_START \
3737 (VP9_INTERP_INTRA_INTER_START+VP9_INTERP_INTRA_INTER_SIZE)
3738#define VP9_COMP_INTER_SIZE 5
3739#define VP9_COMP_REF_START (VP9_COMP_INTER_START+VP9_COMP_INTER_SIZE)
3740#define VP9_COMP_REF_SIZE 5
3741#define VP9_SINGLE_REF_START (VP9_COMP_REF_START+VP9_COMP_REF_SIZE)
3742#define VP9_SINGLE_REF_SIZE 10
3743#define VP9_REF_MODE_START VP9_COMP_INTER_START
3744#define VP9_REF_MODE_SIZE \
3745 (VP9_COMP_INTER_SIZE+VP9_COMP_REF_SIZE+VP9_SINGLE_REF_SIZE)
3746#define VP9_IF_Y_MODE_START (VP9_REF_MODE_START+VP9_REF_MODE_SIZE)
3747#define VP9_IF_Y_MODE_SIZE 36
3748#define VP9_IF_UV_MODE_START (VP9_IF_Y_MODE_START+VP9_IF_Y_MODE_SIZE)
3749#define VP9_IF_UV_MODE_SIZE 92 /* only use 90*/
3750#define VP9_MV_JOINTS_START (VP9_IF_UV_MODE_START+VP9_IF_UV_MODE_SIZE)
3751#define VP9_MV_JOINTS_SIZE 3
3752#define VP9_MV_SIGN_0_START (VP9_MV_JOINTS_START+VP9_MV_JOINTS_SIZE)
3753#define VP9_MV_SIGN_0_SIZE 1
3754#define VP9_MV_CLASSES_0_START (VP9_MV_SIGN_0_START+VP9_MV_SIGN_0_SIZE)
3755#define VP9_MV_CLASSES_0_SIZE 10
3756#define VP9_MV_CLASS0_0_START (VP9_MV_CLASSES_0_START+VP9_MV_CLASSES_0_SIZE)
3757#define VP9_MV_CLASS0_0_SIZE 1
3758#define VP9_MV_BITS_0_START (VP9_MV_CLASS0_0_START+VP9_MV_CLASS0_0_SIZE)
3759#define VP9_MV_BITS_0_SIZE 10
3760#define VP9_MV_SIGN_1_START (VP9_MV_BITS_0_START+VP9_MV_BITS_0_SIZE)
3761#define VP9_MV_SIGN_1_SIZE 1
3762#define VP9_MV_CLASSES_1_START \
3763 (VP9_MV_SIGN_1_START+VP9_MV_SIGN_1_SIZE)
3764#define VP9_MV_CLASSES_1_SIZE 10
3765#define VP9_MV_CLASS0_1_START \
3766 (VP9_MV_CLASSES_1_START+VP9_MV_CLASSES_1_SIZE)
3767#define VP9_MV_CLASS0_1_SIZE 1
3768#define VP9_MV_BITS_1_START \
3769 (VP9_MV_CLASS0_1_START+VP9_MV_CLASS0_1_SIZE)
3770#define VP9_MV_BITS_1_SIZE 10
3771#define VP9_MV_CLASS0_FP_0_START \
3772 (VP9_MV_BITS_1_START+VP9_MV_BITS_1_SIZE)
3773#define VP9_MV_CLASS0_FP_0_SIZE 9
3774#define VP9_MV_CLASS0_FP_1_START \
3775 (VP9_MV_CLASS0_FP_0_START+VP9_MV_CLASS0_FP_0_SIZE)
3776#define VP9_MV_CLASS0_FP_1_SIZE 9
3777#define VP9_MV_CLASS0_HP_0_START \
3778 (VP9_MV_CLASS0_FP_1_START+VP9_MV_CLASS0_FP_1_SIZE)
3779#define VP9_MV_CLASS0_HP_0_SIZE 2
3780#define VP9_MV_CLASS0_HP_1_START \
3781 (VP9_MV_CLASS0_HP_0_START+VP9_MV_CLASS0_HP_0_SIZE)
3782#define VP9_MV_CLASS0_HP_1_SIZE 2
3783#define VP9_MV_START VP9_MV_JOINTS_START
3784#define VP9_MV_SIZE 72 /*only use 69*/
3785
3786#define VP9_TOTAL_SIZE (VP9_MV_START + VP9_MV_SIZE)
3787
3788
3789/*========================================================================
3790 * vp9_count_mem define
3791 *========================================================================
3792 */
3793#define VP9_COEF_COUNT_START 0
3794#define VP9_COEF_COUNT_BAND_0_OFFSET 0
3795#define VP9_COEF_COUNT_BAND_1_OFFSET \
3796 (VP9_COEF_COUNT_BAND_0_OFFSET + 3*5)
3797#define VP9_COEF_COUNT_BAND_2_OFFSET \
3798 (VP9_COEF_COUNT_BAND_1_OFFSET + 6*5)
3799#define VP9_COEF_COUNT_BAND_3_OFFSET \
3800 (VP9_COEF_COUNT_BAND_2_OFFSET + 6*5)
3801#define VP9_COEF_COUNT_BAND_4_OFFSET \
3802 (VP9_COEF_COUNT_BAND_3_OFFSET + 6*5)
3803#define VP9_COEF_COUNT_BAND_5_OFFSET \
3804 (VP9_COEF_COUNT_BAND_4_OFFSET + 6*5)
3805#define VP9_COEF_COUNT_SIZE_ONE_SET 165 /* ((3 +5*6)*5 */
3806#define VP9_COEF_COUNT_4X4_START \
3807 (VP9_COEF_COUNT_START + 0*VP9_COEF_COUNT_SIZE_ONE_SET)
3808#define VP9_COEF_COUNT_8X8_START \
3809 (VP9_COEF_COUNT_START + 4*VP9_COEF_COUNT_SIZE_ONE_SET)
3810#define VP9_COEF_COUNT_16X16_START \
3811 (VP9_COEF_COUNT_START + 8*VP9_COEF_COUNT_SIZE_ONE_SET)
3812#define VP9_COEF_COUNT_32X32_START \
3813 (VP9_COEF_COUNT_START + 12*VP9_COEF_COUNT_SIZE_ONE_SET)
3814#define VP9_COEF_COUNT_SIZE_PLANE (2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3815#define VP9_COEF_COUNT_SIZE (4 * 2 * 2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3816
3817#define VP9_INTRA_INTER_COUNT_START \
3818 (VP9_COEF_COUNT_START+VP9_COEF_COUNT_SIZE)
3819#define VP9_INTRA_INTER_COUNT_SIZE (4*2)
3820#define VP9_COMP_INTER_COUNT_START \
3821 (VP9_INTRA_INTER_COUNT_START+VP9_INTRA_INTER_COUNT_SIZE)
3822#define VP9_COMP_INTER_COUNT_SIZE (5*2)
3823#define VP9_COMP_REF_COUNT_START \
3824 (VP9_COMP_INTER_COUNT_START+VP9_COMP_INTER_COUNT_SIZE)
3825#define VP9_COMP_REF_COUNT_SIZE (5*2)
3826#define VP9_SINGLE_REF_COUNT_START \
3827 (VP9_COMP_REF_COUNT_START+VP9_COMP_REF_COUNT_SIZE)
3828#define VP9_SINGLE_REF_COUNT_SIZE (10*2)
3829#define VP9_TX_MODE_COUNT_START \
3830 (VP9_SINGLE_REF_COUNT_START+VP9_SINGLE_REF_COUNT_SIZE)
3831#define VP9_TX_MODE_COUNT_SIZE (12*2)
3832#define VP9_SKIP_COUNT_START \
3833 (VP9_TX_MODE_COUNT_START+VP9_TX_MODE_COUNT_SIZE)
3834#define VP9_SKIP_COUNT_SIZE (3*2)
3835#define VP9_MV_SIGN_0_COUNT_START \
3836 (VP9_SKIP_COUNT_START+VP9_SKIP_COUNT_SIZE)
3837#define VP9_MV_SIGN_0_COUNT_SIZE (1*2)
3838#define VP9_MV_SIGN_1_COUNT_START \
3839 (VP9_MV_SIGN_0_COUNT_START+VP9_MV_SIGN_0_COUNT_SIZE)
3840#define VP9_MV_SIGN_1_COUNT_SIZE (1*2)
3841#define VP9_MV_BITS_0_COUNT_START \
3842 (VP9_MV_SIGN_1_COUNT_START+VP9_MV_SIGN_1_COUNT_SIZE)
3843#define VP9_MV_BITS_0_COUNT_SIZE (10*2)
3844#define VP9_MV_BITS_1_COUNT_START \
3845 (VP9_MV_BITS_0_COUNT_START+VP9_MV_BITS_0_COUNT_SIZE)
3846#define VP9_MV_BITS_1_COUNT_SIZE (10*2)
3847#define VP9_MV_CLASS0_HP_0_COUNT_START \
3848 (VP9_MV_BITS_1_COUNT_START+VP9_MV_BITS_1_COUNT_SIZE)
3849#define VP9_MV_CLASS0_HP_0_COUNT_SIZE (2*2)
3850#define VP9_MV_CLASS0_HP_1_COUNT_START \
3851 (VP9_MV_CLASS0_HP_0_COUNT_START+VP9_MV_CLASS0_HP_0_COUNT_SIZE)
3852#define VP9_MV_CLASS0_HP_1_COUNT_SIZE (2*2)
3853/* Start merge_tree*/
3854#define VP9_INTER_MODE_COUNT_START \
3855 (VP9_MV_CLASS0_HP_1_COUNT_START+VP9_MV_CLASS0_HP_1_COUNT_SIZE)
3856#define VP9_INTER_MODE_COUNT_SIZE (7*4)
3857#define VP9_IF_Y_MODE_COUNT_START \
3858 (VP9_INTER_MODE_COUNT_START+VP9_INTER_MODE_COUNT_SIZE)
3859#define VP9_IF_Y_MODE_COUNT_SIZE (10*4)
3860#define VP9_IF_UV_MODE_COUNT_START \
3861 (VP9_IF_Y_MODE_COUNT_START+VP9_IF_Y_MODE_COUNT_SIZE)
3862#define VP9_IF_UV_MODE_COUNT_SIZE (10*10)
3863#define VP9_PARTITION_P_COUNT_START \
3864 (VP9_IF_UV_MODE_COUNT_START+VP9_IF_UV_MODE_COUNT_SIZE)
3865#define VP9_PARTITION_P_COUNT_SIZE (4*4*4)
3866#define VP9_INTERP_COUNT_START \
3867 (VP9_PARTITION_P_COUNT_START+VP9_PARTITION_P_COUNT_SIZE)
3868#define VP9_INTERP_COUNT_SIZE (4*3)
3869#define VP9_MV_JOINTS_COUNT_START \
3870 (VP9_INTERP_COUNT_START+VP9_INTERP_COUNT_SIZE)
3871#define VP9_MV_JOINTS_COUNT_SIZE (1 * 4)
3872#define VP9_MV_CLASSES_0_COUNT_START \
3873 (VP9_MV_JOINTS_COUNT_START+VP9_MV_JOINTS_COUNT_SIZE)
3874#define VP9_MV_CLASSES_0_COUNT_SIZE (1*11)
3875#define VP9_MV_CLASS0_0_COUNT_START \
3876 (VP9_MV_CLASSES_0_COUNT_START+VP9_MV_CLASSES_0_COUNT_SIZE)
3877#define VP9_MV_CLASS0_0_COUNT_SIZE (1*2)
3878#define VP9_MV_CLASSES_1_COUNT_START \
3879 (VP9_MV_CLASS0_0_COUNT_START+VP9_MV_CLASS0_0_COUNT_SIZE)
3880#define VP9_MV_CLASSES_1_COUNT_SIZE (1*11)
3881#define VP9_MV_CLASS0_1_COUNT_START \
3882 (VP9_MV_CLASSES_1_COUNT_START+VP9_MV_CLASSES_1_COUNT_SIZE)
3883#define VP9_MV_CLASS0_1_COUNT_SIZE (1*2)
3884#define VP9_MV_CLASS0_FP_0_COUNT_START \
3885 (VP9_MV_CLASS0_1_COUNT_START+VP9_MV_CLASS0_1_COUNT_SIZE)
3886#define VP9_MV_CLASS0_FP_0_COUNT_SIZE (3*4)
3887#define VP9_MV_CLASS0_FP_1_COUNT_START \
3888 (VP9_MV_CLASS0_FP_0_COUNT_START+VP9_MV_CLASS0_FP_0_COUNT_SIZE)
3889#define VP9_MV_CLASS0_FP_1_COUNT_SIZE (3*4)
3890
3891
3892#define DC_PRED 0 /* Average of above and left pixels*/
3893#define V_PRED 1 /* Vertical*/
3894#define H_PRED 2 /* Horizontal*/
3895#define D45_PRED 3 /*Directional 45 deg = round(arctan(1/1) * 180/pi)*/
3896#define D135_PRED 4 /* Directional 135 deg = 180 - 45*/
3897#define D117_PRED 5 /* Directional 117 deg = 180 - 63*/
3898#define D153_PRED 6 /* Directional 153 deg = 180 - 27*/
3899#define D207_PRED 7 /* Directional 207 deg = 180 + 27*/
3900#define D63_PRED 8 /*Directional 63 deg = round(arctan(2/1) * 180/pi)*/
3901#define TM_PRED 9 /*True-motion*/
3902
3903int clip_prob(int p)
3904{
3905 return (p > 255) ? 255 : (p < 1) ? 1 : p;
3906}
3907
3908#define ROUND_POWER_OF_TWO(value, n) \
3909 (((value) + (1 << ((n) - 1))) >> (n))
3910
3911#define MODE_MV_COUNT_SAT 20
3912static const int count_to_update_factor[MODE_MV_COUNT_SAT + 1] = {
3913 0, 6, 12, 19, 25, 32, 38, 44, 51, 57, 64,
3914 70, 76, 83, 89, 96, 102, 108, 115, 121, 128
3915};
3916
3917void vp9_tree_merge_probs(unsigned int *prev_prob, unsigned int *cur_prob,
3918 int coef_node_start, int tree_left, int tree_right, int tree_i,
3919 int node) {
3920
3921 int prob_32, prob_res, prob_shift;
3922 int pre_prob, new_prob;
3923 int den, m_count, get_prob, factor;
3924
3925 prob_32 = prev_prob[coef_node_start / 4 * 2];
3926 prob_res = coef_node_start & 3;
3927 prob_shift = prob_res * 8;
3928 pre_prob = (prob_32 >> prob_shift) & 0xff;
3929
3930 den = tree_left + tree_right;
3931
3932 if (den == 0)
3933 new_prob = pre_prob;
3934 else {
3935 m_count = (den < MODE_MV_COUNT_SAT) ?
3936 den : MODE_MV_COUNT_SAT;
3937 get_prob = clip_prob(
3938 div_r32(((int64_t)tree_left * 256 + (den >> 1)),
3939 den));
3940 /*weighted_prob*/
3941 factor = count_to_update_factor[m_count];
3942 new_prob = ROUND_POWER_OF_TWO(pre_prob * (256 - factor)
3943 + get_prob * factor, 8);
3944 }
3945 cur_prob[coef_node_start / 4 * 2] = (cur_prob[coef_node_start / 4 * 2]
3946 & (~(0xff << prob_shift))) | (new_prob << prob_shift);
3947
3948 /*pr_info(" - [%d][%d] 0x%02X --> 0x%02X (0x%X 0x%X) (%X)\n",
3949 *tree_i, node, pre_prob, new_prob, tree_left, tree_right,
3950 *cur_prob[coef_node_start/4*2]);
3951 */
3952}
3953
3954
3955/*void adapt_coef_probs(void)*/
3956void adapt_coef_probs(int pic_count, int prev_kf, int cur_kf, int pre_fc,
3957 unsigned int *prev_prob, unsigned int *cur_prob, unsigned int *count)
3958{
3959 /* 80 * 64bits = 0xF00 ( use 0x1000 4K bytes)
3960 *unsigned int prev_prob[496*2];
3961 *unsigned int cur_prob[496*2];
3962 *0x300 * 128bits = 0x3000 (32K Bytes)
3963 *unsigned int count[0x300*4];
3964 */
3965
3966 int tx_size, coef_tx_size_start, coef_count_tx_size_start;
3967 int plane, coef_plane_start, coef_count_plane_start;
3968 int type, coef_type_start, coef_count_type_start;
3969 int band, coef_band_start, coef_count_band_start;
3970 int cxt_num;
3971 int cxt, coef_cxt_start, coef_count_cxt_start;
3972 int node, coef_node_start, coef_count_node_start;
3973
3974 int tree_i, tree_left, tree_right;
3975 int mvd_i;
3976
3977 int count_sat = 24;
3978 /*int update_factor = 112;*/ /*If COEF_MAX_UPDATE_FACTOR_AFTER_KEY,
3979 *use 128
3980 */
3981 /* If COEF_MAX_UPDATE_FACTOR_AFTER_KEY, use 128*/
3982 /*int update_factor = (pic_count == 1) ? 128 : 112;*/
3983 int update_factor = cur_kf ? 112 :
3984 prev_kf ? 128 : 112;
3985
3986 int prob_32;
3987 int prob_res;
3988 int prob_shift;
3989 int pre_prob;
3990
3991 int num, den;
3992 int get_prob;
3993 int m_count;
3994 int factor;
3995
3996 int new_prob;
3997
3998 if (debug & VP9_DEBUG_MERGE)
3999 pr_info
4000 ("\n ##adapt_coef_probs (pre_fc : %d ,prev_kf : %d,cur_kf : %d)##\n\n",
4001 pre_fc, prev_kf, cur_kf);
4002
4003 /*adapt_coef_probs*/
4004 for (tx_size = 0; tx_size < 4; tx_size++) {
4005 coef_tx_size_start = VP9_COEF_START
4006 + tx_size * 4 * VP9_COEF_SIZE_ONE_SET;
4007 coef_count_tx_size_start = VP9_COEF_COUNT_START
4008 + tx_size * 4 * VP9_COEF_COUNT_SIZE_ONE_SET;
4009 coef_plane_start = coef_tx_size_start;
4010 coef_count_plane_start = coef_count_tx_size_start;
4011 for (plane = 0; plane < 2; plane++) {
4012 coef_type_start = coef_plane_start;
4013 coef_count_type_start = coef_count_plane_start;
4014 for (type = 0; type < 2; type++) {
4015 coef_band_start = coef_type_start;
4016 coef_count_band_start = coef_count_type_start;
4017 for (band = 0; band < 6; band++) {
4018 if (band == 0)
4019 cxt_num = 3;
4020 else
4021 cxt_num = 6;
4022 coef_cxt_start = coef_band_start;
4023 coef_count_cxt_start =
4024 coef_count_band_start;
4025 for (cxt = 0; cxt < cxt_num; cxt++) {
4026 const int n0 =
4027 count[coef_count_cxt_start];
4028 const int n1 =
4029 count[coef_count_cxt_start + 1];
4030 const int n2 =
4031 count[coef_count_cxt_start + 2];
4032 const int neob =
4033 count[coef_count_cxt_start + 3];
4034 const int nneob =
4035 count[coef_count_cxt_start + 4];
4036 const unsigned int
4037 branch_ct[3][2] = {
4038 { neob, nneob },
4039 { n0, n1 + n2 },
4040 { n1, n2 }
4041 };
4042 coef_node_start =
4043 coef_cxt_start;
4044 for
4045 (node = 0; node < 3; node++) {
4046 prob_32 =
4047 prev_prob[
4048 coef_node_start
4049 / 4 * 2];
4050 prob_res =
4051 coef_node_start & 3;
4052 prob_shift =
4053 prob_res * 8;
4054 pre_prob =
4055 (prob_32 >> prob_shift)
4056 & 0xff;
4057
4058 /*get_binary_prob*/
4059 num =
4060 branch_ct[node][0];
4061 den =
4062 branch_ct[node][0] +
4063 branch_ct[node][1];
4064 m_count = (den <
4065 count_sat)
4066 ? den : count_sat;
4067
4068 get_prob =
4069 (den == 0) ? 128u :
4070 clip_prob(
4071 div_r32(((int64_t)
4072 num * 256
4073 + (den >> 1)),
4074 den));
4075
4076 factor =
4077 update_factor * m_count
4078 / count_sat;
4079 new_prob =
4080 ROUND_POWER_OF_TWO
4081 (pre_prob *
4082 (256 - factor) +
4083 get_prob * factor, 8);
4084
4085 cur_prob[coef_node_start
4086 / 4 * 2] =
4087 (cur_prob
4088 [coef_node_start
4089 / 4 * 2] & (~(0xff <<
4090 prob_shift))) |
4091 (new_prob <<
4092 prob_shift);
4093
4094 coef_node_start += 1;
4095 }
4096
4097 coef_cxt_start =
4098 coef_cxt_start + 3;
4099 coef_count_cxt_start =
4100 coef_count_cxt_start
4101 + 5;
4102 }
4103 if (band == 0) {
4104 coef_band_start += 10;
4105 coef_count_band_start += 15;
4106 } else {
4107 coef_band_start += 18;
4108 coef_count_band_start += 30;
4109 }
4110 }
4111 coef_type_start += VP9_COEF_SIZE_ONE_SET;
4112 coef_count_type_start +=
4113 VP9_COEF_COUNT_SIZE_ONE_SET;
4114 }
4115 coef_plane_start += 2 * VP9_COEF_SIZE_ONE_SET;
4116 coef_count_plane_start +=
4117 2 * VP9_COEF_COUNT_SIZE_ONE_SET;
4118 }
4119 }
4120
4121 if (cur_kf == 0) {
4122 /*mode_mv_merge_probs - merge_intra_inter_prob*/
4123 for (coef_count_node_start = VP9_INTRA_INTER_COUNT_START;
4124 coef_count_node_start < (VP9_MV_CLASS0_HP_1_COUNT_START +
4125 VP9_MV_CLASS0_HP_1_COUNT_SIZE); coef_count_node_start += 2) {
4126
4127 if (coef_count_node_start ==
4128 VP9_INTRA_INTER_COUNT_START) {
4129 if (debug & VP9_DEBUG_MERGE)
4130 pr_info(" # merge_intra_inter_prob\n");
4131 coef_node_start = VP9_INTRA_INTER_START;
4132 } else if (coef_count_node_start ==
4133 VP9_COMP_INTER_COUNT_START) {
4134 if (debug & VP9_DEBUG_MERGE)
4135 pr_info(" # merge_comp_inter_prob\n");
4136 coef_node_start = VP9_COMP_INTER_START;
4137 }
4138 /*
4139 *else if (coef_count_node_start ==
4140 * VP9_COMP_REF_COUNT_START) {
4141 * pr_info(" # merge_comp_inter_prob\n");
4142 * coef_node_start = VP9_COMP_REF_START;
4143 *}
4144 *else if (coef_count_node_start ==
4145 * VP9_SINGLE_REF_COUNT_START) {
4146 * pr_info(" # merge_comp_inter_prob\n");
4147 * coef_node_start = VP9_SINGLE_REF_START;
4148 *}
4149 */
4150 else if (coef_count_node_start ==
4151 VP9_TX_MODE_COUNT_START) {
4152 if (debug & VP9_DEBUG_MERGE)
4153 pr_info(" # merge_tx_mode_probs\n");
4154 coef_node_start = VP9_TX_MODE_START;
4155 } else if (coef_count_node_start ==
4156 VP9_SKIP_COUNT_START) {
4157 if (debug & VP9_DEBUG_MERGE)
4158 pr_info(" # merge_skip_probs\n");
4159 coef_node_start = VP9_SKIP_START;
4160 } else if (coef_count_node_start ==
4161 VP9_MV_SIGN_0_COUNT_START) {
4162 if (debug & VP9_DEBUG_MERGE)
4163 pr_info(" # merge_sign_0\n");
4164 coef_node_start = VP9_MV_SIGN_0_START;
4165 } else if (coef_count_node_start ==
4166 VP9_MV_SIGN_1_COUNT_START) {
4167 if (debug & VP9_DEBUG_MERGE)
4168 pr_info(" # merge_sign_1\n");
4169 coef_node_start = VP9_MV_SIGN_1_START;
4170 } else if (coef_count_node_start ==
4171 VP9_MV_BITS_0_COUNT_START) {
4172 if (debug & VP9_DEBUG_MERGE)
4173 pr_info(" # merge_bits_0\n");
4174 coef_node_start = VP9_MV_BITS_0_START;
4175 } else if (coef_count_node_start ==
4176 VP9_MV_BITS_1_COUNT_START) {
4177 if (debug & VP9_DEBUG_MERGE)
4178 pr_info(" # merge_bits_1\n");
4179 coef_node_start = VP9_MV_BITS_1_START;
4180 } else if (coef_count_node_start ==
4181 VP9_MV_CLASS0_HP_0_COUNT_START) {
4182 if (debug & VP9_DEBUG_MERGE)
4183 pr_info(" # merge_class0_hp\n");
4184 coef_node_start = VP9_MV_CLASS0_HP_0_START;
4185 }
4186
4187
4188 den = count[coef_count_node_start] +
4189 count[coef_count_node_start + 1];
4190
4191 prob_32 = prev_prob[coef_node_start / 4 * 2];
4192 prob_res = coef_node_start & 3;
4193 prob_shift = prob_res * 8;
4194 pre_prob = (prob_32 >> prob_shift) & 0xff;
4195
4196 if (den == 0)
4197 new_prob = pre_prob;
4198 else {
4199 m_count = (den < MODE_MV_COUNT_SAT) ?
4200 den : MODE_MV_COUNT_SAT;
4201 get_prob =
4202 clip_prob(
4203 div_r32(((int64_t)count[coef_count_node_start]
4204 * 256 + (den >> 1)),
4205 den));
4206 /*weighted_prob*/
4207 factor = count_to_update_factor[m_count];
4208 new_prob =
4209 ROUND_POWER_OF_TWO(pre_prob * (256 - factor)
4210 + get_prob * factor, 8);
4211 }
4212 cur_prob[coef_node_start / 4 * 2] =
4213 (cur_prob[coef_node_start / 4 * 2] &
4214 (~(0xff << prob_shift)))
4215 | (new_prob << prob_shift);
4216
4217 coef_node_start = coef_node_start + 1;
4218 }
4219 if (debug & VP9_DEBUG_MERGE)
4220 pr_info(" # merge_vp9_inter_mode_tree\n");
4221 coef_node_start = VP9_INTER_MODE_START;
4222 coef_count_node_start = VP9_INTER_MODE_COUNT_START;
4223 for (tree_i = 0; tree_i < 7; tree_i++) {
4224 for (node = 0; node < 3; node++) {
4225 switch (node) {
4226 case 2:
4227 tree_left =
4228 count[coef_count_node_start + 1];
4229 tree_right =
4230 count[coef_count_node_start + 3];
4231 break;
4232 case 1:
4233 tree_left =
4234 count[coef_count_node_start + 0];
4235 tree_right =
4236 count[coef_count_node_start + 1]
4237 + count[coef_count_node_start + 3];
4238 break;
4239 default:
4240 tree_left =
4241 count[coef_count_node_start + 2];
4242 tree_right =
4243 count[coef_count_node_start + 0]
4244 + count[coef_count_node_start + 1]
4245 + count[coef_count_node_start + 3];
4246 break;
4247
4248 }
4249
4250 vp9_tree_merge_probs(prev_prob, cur_prob,
4251 coef_node_start, tree_left, tree_right,
4252 tree_i, node);
4253
4254 coef_node_start = coef_node_start + 1;
4255 }
4256 coef_count_node_start = coef_count_node_start + 4;
4257 }
4258 if (debug & VP9_DEBUG_MERGE)
4259 pr_info(" # merge_vp9_intra_mode_tree\n");
4260 coef_node_start = VP9_IF_Y_MODE_START;
4261 coef_count_node_start = VP9_IF_Y_MODE_COUNT_START;
4262 for (tree_i = 0; tree_i < 14; tree_i++) {
4263 for (node = 0; node < 9; node++) {
4264 switch (node) {
4265 case 8:
4266 tree_left =
4267 count[coef_count_node_start+D153_PRED];
4268 tree_right =
4269 count[coef_count_node_start+D207_PRED];
4270 break;
4271 case 7:
4272 tree_left =
4273 count[coef_count_node_start+D63_PRED];
4274 tree_right =
4275 count[coef_count_node_start+D207_PRED] +
4276 count[coef_count_node_start+D153_PRED];
4277 break;
4278 case 6:
4279 tree_left =
4280 count[coef_count_node_start + D45_PRED];
4281 tree_right =
4282 count[coef_count_node_start+D207_PRED] +
4283 count[coef_count_node_start+D153_PRED] +
4284 count[coef_count_node_start+D63_PRED];
4285 break;
4286 case 5:
4287 tree_left =
4288 count[coef_count_node_start+D135_PRED];
4289 tree_right =
4290 count[coef_count_node_start+D117_PRED];
4291 break;
4292 case 4:
4293 tree_left =
4294 count[coef_count_node_start+H_PRED];
4295 tree_right =
4296 count[coef_count_node_start+D117_PRED] +
4297 count[coef_count_node_start+D135_PRED];
4298 break;
4299 case 3:
4300 tree_left =
4301 count[coef_count_node_start+H_PRED] +
4302 count[coef_count_node_start+D117_PRED] +
4303 count[coef_count_node_start+D135_PRED];
4304 tree_right =
4305 count[coef_count_node_start+D45_PRED] +
4306 count[coef_count_node_start+D207_PRED] +
4307 count[coef_count_node_start+D153_PRED] +
4308 count[coef_count_node_start+D63_PRED];
4309 break;
4310 case 2:
4311 tree_left =
4312 count[coef_count_node_start+V_PRED];
4313 tree_right =
4314 count[coef_count_node_start+H_PRED] +
4315 count[coef_count_node_start+D117_PRED] +
4316 count[coef_count_node_start+D135_PRED] +
4317 count[coef_count_node_start+D45_PRED] +
4318 count[coef_count_node_start+D207_PRED] +
4319 count[coef_count_node_start+D153_PRED] +
4320 count[coef_count_node_start+D63_PRED];
4321 break;
4322 case 1:
4323 tree_left =
4324 count[coef_count_node_start+TM_PRED];
4325 tree_right =
4326 count[coef_count_node_start+V_PRED] +
4327 count[coef_count_node_start+H_PRED] +
4328 count[coef_count_node_start+D117_PRED] +
4329 count[coef_count_node_start+D135_PRED] +
4330 count[coef_count_node_start+D45_PRED] +
4331 count[coef_count_node_start+D207_PRED] +
4332 count[coef_count_node_start+D153_PRED] +
4333 count[coef_count_node_start+D63_PRED];
4334 break;
4335 default:
4336 tree_left =
4337 count[coef_count_node_start+DC_PRED];
4338 tree_right =
4339 count[coef_count_node_start+TM_PRED] +
4340 count[coef_count_node_start+V_PRED] +
4341 count[coef_count_node_start+H_PRED] +
4342 count[coef_count_node_start+D117_PRED] +
4343 count[coef_count_node_start+D135_PRED] +
4344 count[coef_count_node_start+D45_PRED] +
4345 count[coef_count_node_start+D207_PRED] +
4346 count[coef_count_node_start+D153_PRED] +
4347 count[coef_count_node_start+D63_PRED];
4348 break;
4349
4350 }
4351
4352 vp9_tree_merge_probs(prev_prob, cur_prob,
4353 coef_node_start, tree_left, tree_right,
4354 tree_i, node);
4355
4356 coef_node_start = coef_node_start + 1;
4357 }
4358 coef_count_node_start = coef_count_node_start + 10;
4359 }
4360
4361 if (debug & VP9_DEBUG_MERGE)
4362 pr_info(" # merge_vp9_partition_tree\n");
4363 coef_node_start = VP9_PARTITION_P_START;
4364 coef_count_node_start = VP9_PARTITION_P_COUNT_START;
4365 for (tree_i = 0; tree_i < 16; tree_i++) {
4366 for (node = 0; node < 3; node++) {
4367 switch (node) {
4368 case 2:
4369 tree_left =
4370 count[coef_count_node_start + 2];
4371 tree_right =
4372 count[coef_count_node_start + 3];
4373 break;
4374 case 1:
4375 tree_left =
4376 count[coef_count_node_start + 1];
4377 tree_right =
4378 count[coef_count_node_start + 2] +
4379 count[coef_count_node_start + 3];
4380 break;
4381 default:
4382 tree_left =
4383 count[coef_count_node_start + 0];
4384 tree_right =
4385 count[coef_count_node_start + 1] +
4386 count[coef_count_node_start + 2] +
4387 count[coef_count_node_start + 3];
4388 break;
4389
4390 }
4391
4392 vp9_tree_merge_probs(prev_prob, cur_prob,
4393 coef_node_start,
4394 tree_left, tree_right, tree_i, node);
4395
4396 coef_node_start = coef_node_start + 1;
4397 }
4398 coef_count_node_start = coef_count_node_start + 4;
4399 }
4400
4401 if (debug & VP9_DEBUG_MERGE)
4402 pr_info(" # merge_vp9_switchable_interp_tree\n");
4403 coef_node_start = VP9_INTERP_START;
4404 coef_count_node_start = VP9_INTERP_COUNT_START;
4405 for (tree_i = 0; tree_i < 4; tree_i++) {
4406 for (node = 0; node < 2; node++) {
4407 switch (node) {
4408 case 1:
4409 tree_left =
4410 count[coef_count_node_start + 1];
4411 tree_right =
4412 count[coef_count_node_start + 2];
4413 break;
4414 default:
4415 tree_left =
4416 count[coef_count_node_start + 0];
4417 tree_right =
4418 count[coef_count_node_start + 1] +
4419 count[coef_count_node_start + 2];
4420 break;
4421
4422 }
4423
4424 vp9_tree_merge_probs(prev_prob, cur_prob,
4425 coef_node_start,
4426 tree_left, tree_right, tree_i, node);
4427
4428 coef_node_start = coef_node_start + 1;
4429 }
4430 coef_count_node_start = coef_count_node_start + 3;
4431 }
4432
4433 if (debug & VP9_DEBUG_MERGE)
4434 pr_info("# merge_vp9_mv_joint_tree\n");
4435 coef_node_start = VP9_MV_JOINTS_START;
4436 coef_count_node_start = VP9_MV_JOINTS_COUNT_START;
4437 for (tree_i = 0; tree_i < 1; tree_i++) {
4438 for (node = 0; node < 3; node++) {
4439 switch (node) {
4440 case 2:
4441 tree_left =
4442 count[coef_count_node_start + 2];
4443 tree_right =
4444 count[coef_count_node_start + 3];
4445 break;
4446 case 1:
4447 tree_left =
4448 count[coef_count_node_start + 1];
4449 tree_right =
4450 count[coef_count_node_start + 2] +
4451 count[coef_count_node_start + 3];
4452 break;
4453 default:
4454 tree_left =
4455 count[coef_count_node_start + 0];
4456 tree_right =
4457 count[coef_count_node_start + 1] +
4458 count[coef_count_node_start + 2] +
4459 count[coef_count_node_start + 3];
4460 break;
4461 }
4462
4463 vp9_tree_merge_probs(prev_prob, cur_prob,
4464 coef_node_start,
4465 tree_left, tree_right, tree_i, node);
4466
4467 coef_node_start = coef_node_start + 1;
4468 }
4469 coef_count_node_start = coef_count_node_start + 4;
4470 }
4471
4472 for (mvd_i = 0; mvd_i < 2; mvd_i++) {
4473 if (debug & VP9_DEBUG_MERGE)
4474 pr_info(" # merge_vp9_mv_class_tree [%d] -\n", mvd_i);
4475 coef_node_start =
4476 mvd_i ? VP9_MV_CLASSES_1_START : VP9_MV_CLASSES_0_START;
4477 coef_count_node_start =
4478 mvd_i ? VP9_MV_CLASSES_1_COUNT_START
4479 : VP9_MV_CLASSES_0_COUNT_START;
4480 tree_i = 0;
4481 for (node = 0; node < 10; node++) {
4482 switch (node) {
4483 case 9:
4484 tree_left =
4485 count[coef_count_node_start + 9];
4486 tree_right =
4487 count[coef_count_node_start + 10];
4488 break;
4489 case 8:
4490 tree_left =
4491 count[coef_count_node_start + 7];
4492 tree_right =
4493 count[coef_count_node_start + 8];
4494 break;
4495 case 7:
4496 tree_left =
4497 count[coef_count_node_start + 7] +
4498 count[coef_count_node_start + 8];
4499 tree_right =
4500 count[coef_count_node_start + 9] +
4501 count[coef_count_node_start + 10];
4502 break;
4503 case 6:
4504 tree_left =
4505 count[coef_count_node_start + 6];
4506 tree_right =
4507 count[coef_count_node_start + 7] +
4508 count[coef_count_node_start + 8] +
4509 count[coef_count_node_start + 9] +
4510 count[coef_count_node_start + 10];
4511 break;
4512 case 5:
4513 tree_left =
4514 count[coef_count_node_start + 4];
4515 tree_right =
4516 count[coef_count_node_start + 5];
4517 break;
4518 case 4:
4519 tree_left =
4520 count[coef_count_node_start + 4] +
4521 count[coef_count_node_start + 5];
4522 tree_right =
4523 count[coef_count_node_start + 6] +
4524 count[coef_count_node_start + 7] +
4525 count[coef_count_node_start + 8] +
4526 count[coef_count_node_start + 9] +
4527 count[coef_count_node_start + 10];
4528 break;
4529 case 3:
4530 tree_left =
4531 count[coef_count_node_start + 2];
4532 tree_right =
4533 count[coef_count_node_start + 3];
4534 break;
4535 case 2:
4536 tree_left =
4537 count[coef_count_node_start + 2] +
4538 count[coef_count_node_start + 3];
4539 tree_right =
4540 count[coef_count_node_start + 4] +
4541 count[coef_count_node_start + 5] +
4542 count[coef_count_node_start + 6] +
4543 count[coef_count_node_start + 7] +
4544 count[coef_count_node_start + 8] +
4545 count[coef_count_node_start + 9] +
4546 count[coef_count_node_start + 10];
4547 break;
4548 case 1:
4549 tree_left =
4550 count[coef_count_node_start + 1];
4551 tree_right =
4552 count[coef_count_node_start + 2] +
4553 count[coef_count_node_start + 3] +
4554 count[coef_count_node_start + 4] +
4555 count[coef_count_node_start + 5] +
4556 count[coef_count_node_start + 6] +
4557 count[coef_count_node_start + 7] +
4558 count[coef_count_node_start + 8] +
4559 count[coef_count_node_start + 9] +
4560 count[coef_count_node_start + 10];
4561 break;
4562 default:
4563 tree_left =
4564 count[coef_count_node_start + 0];
4565 tree_right =
4566 count[coef_count_node_start + 1] +
4567 count[coef_count_node_start + 2] +
4568 count[coef_count_node_start + 3] +
4569 count[coef_count_node_start + 4] +
4570 count[coef_count_node_start + 5] +
4571 count[coef_count_node_start + 6] +
4572 count[coef_count_node_start + 7] +
4573 count[coef_count_node_start + 8] +
4574 count[coef_count_node_start + 9] +
4575 count[coef_count_node_start + 10];
4576 break;
4577
4578 }
4579
4580 vp9_tree_merge_probs(prev_prob, cur_prob,
4581 coef_node_start, tree_left, tree_right,
4582 tree_i, node);
4583
4584 coef_node_start = coef_node_start + 1;
4585 }
4586
4587 if (debug & VP9_DEBUG_MERGE)
4588 pr_info(" # merge_vp9_mv_class0_tree [%d] -\n", mvd_i);
4589 coef_node_start =
4590 mvd_i ? VP9_MV_CLASS0_1_START : VP9_MV_CLASS0_0_START;
4591 coef_count_node_start =
4592 mvd_i ? VP9_MV_CLASS0_1_COUNT_START :
4593 VP9_MV_CLASS0_0_COUNT_START;
4594 tree_i = 0;
4595 node = 0;
4596 tree_left = count[coef_count_node_start + 0];
4597 tree_right = count[coef_count_node_start + 1];
4598
4599 vp9_tree_merge_probs(prev_prob, cur_prob, coef_node_start,
4600 tree_left, tree_right, tree_i, node);
4601 if (debug & VP9_DEBUG_MERGE)
4602 pr_info(" # merge_vp9_mv_fp_tree_class0_fp [%d] -\n",
4603 mvd_i);
4604 coef_node_start =
4605 mvd_i ? VP9_MV_CLASS0_FP_1_START :
4606 VP9_MV_CLASS0_FP_0_START;
4607 coef_count_node_start =
4608 mvd_i ? VP9_MV_CLASS0_FP_1_COUNT_START :
4609 VP9_MV_CLASS0_FP_0_COUNT_START;
4610 for (tree_i = 0; tree_i < 3; tree_i++) {
4611 for (node = 0; node < 3; node++) {
4612 switch (node) {
4613 case 2:
4614 tree_left =
4615 count[coef_count_node_start + 2];
4616 tree_right =
4617 count[coef_count_node_start + 3];
4618 break;
4619 case 1:
4620 tree_left =
4621 count[coef_count_node_start + 1];
4622 tree_right =
4623 count[coef_count_node_start + 2]
4624 + count[coef_count_node_start + 3];
4625 break;
4626 default:
4627 tree_left =
4628 count[coef_count_node_start + 0];
4629 tree_right =
4630 count[coef_count_node_start + 1]
4631 + count[coef_count_node_start + 2]
4632 + count[coef_count_node_start + 3];
4633 break;
4634
4635 }
4636
4637 vp9_tree_merge_probs(prev_prob, cur_prob,
4638 coef_node_start, tree_left, tree_right,
4639 tree_i, node);
4640
4641 coef_node_start = coef_node_start + 1;
4642 }
4643 coef_count_node_start = coef_count_node_start + 4;
4644 }
4645
4646 } /* for mvd_i (mvd_y or mvd_x)*/
4647}
4648
4649}
4650
4651
4652static void uninit_mmu_buffers(struct VP9Decoder_s *pbi)
4653{
4654#ifndef MV_USE_FIXED_BUF
4655 dealloc_mv_bufs(pbi);
4656#endif
4657 if (pbi->mmu_box)
4658 decoder_mmu_box_free(pbi->mmu_box);
4659 pbi->mmu_box = NULL;
4660
4661 if (pbi->bmmu_box)
4662 decoder_bmmu_box_free(pbi->bmmu_box);
4663 pbi->bmmu_box = NULL;
4664}
4665
4666
4667static int config_pic(struct VP9Decoder_s *pbi,
4668 struct PIC_BUFFER_CONFIG_s *pic_config)
4669{
4670 int ret = -1;
4671 int i;
4672 int pic_width = pbi->is_used_v4l ? pbi->frame_width : pbi->init_pic_w;
4673 int pic_height = pbi->is_used_v4l ? pbi->frame_height : pbi->init_pic_h;
4674 int lcu_size = 64; /*fixed 64*/
4675 int pic_width_64 = (pic_width + 63) & (~0x3f);
4676 int pic_height_32 = (pic_height + 31) & (~0x1f);
4677 int pic_width_lcu = (pic_width_64 % lcu_size) ?
4678 pic_width_64 / lcu_size + 1
4679 : pic_width_64 / lcu_size;
4680 int pic_height_lcu = (pic_height_32 % lcu_size) ?
4681 pic_height_32 / lcu_size + 1
4682 : pic_height_32 / lcu_size;
4683 int lcu_total = pic_width_lcu * pic_height_lcu;
4684#ifdef MV_USE_FIXED_BUF
4685 u32 mpred_mv_end = pbi->work_space_buf->mpred_mv.buf_start +
4686 pbi->work_space_buf->mpred_mv.buf_size;
4687#endif
4688 u32 y_adr = 0;
4689 int buf_size = 0;
4690
4691 int losless_comp_header_size =
4692 compute_losless_comp_header_size(pic_width,
4693 pic_height);
4694 int losless_comp_body_size = compute_losless_comp_body_size(pic_width,
4695 pic_height, buf_alloc_depth == 10);
4696 int mc_buffer_size = losless_comp_header_size + losless_comp_body_size;
4697 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
4698 int mc_buffer_size_u_v = 0;
4699 int mc_buffer_size_u_v_h = 0;
4700 int dw_mode = get_double_write_mode_init(pbi);
4701 int pic_width_align = 0;
4702 int pic_height_align = 0;
4703 struct vdec_v4l2_buffer *fb = NULL;
4704
4705 pbi->lcu_total = lcu_total;
4706
4707 if (dw_mode) {
4708 int pic_width_dw = pic_width /
4709 get_double_write_ratio(pbi, dw_mode);
4710 int pic_height_dw = pic_height /
4711 get_double_write_ratio(pbi, dw_mode);
4712
4713 int pic_width_64_dw = (pic_width_dw + 63) & (~0x3f);
4714 int pic_height_32_dw = (pic_height_dw + 31) & (~0x1f);
4715 int pic_width_lcu_dw = (pic_width_64_dw % lcu_size) ?
4716 pic_width_64_dw / lcu_size + 1
4717 : pic_width_64_dw / lcu_size;
4718 int pic_height_lcu_dw = (pic_height_32_dw % lcu_size) ?
4719 pic_height_32_dw / lcu_size + 1
4720 : pic_height_32_dw / lcu_size;
4721 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
4722 mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
4723 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
4724 /*64k alignment*/
4725 buf_size = ((mc_buffer_size_u_v_h << 16) * 3);
4726 buf_size = ((buf_size + 0xffff) >> 16) << 16;
4727
4728 if (pbi->is_used_v4l) {
4729 pic_width_align = ALIGN(pic_width_dw, 32);
4730 pic_height_align = ALIGN(pic_height_dw, 32);
4731 }
4732 }
4733
4734 if (mc_buffer_size & 0xffff) /*64k alignment*/
4735 mc_buffer_size_h += 1;
4736 if ((!pbi->mmu_enable) && ((dw_mode & 0x10) == 0))
4737 buf_size += (mc_buffer_size_h << 16);
4738
4739 if (pbi->mmu_enable) {
4740 pic_config->header_adr = decoder_bmmu_box_get_phy_addr(
4741 pbi->bmmu_box, HEADER_BUFFER_IDX(pic_config->index));
4742
4743 if (debug & VP9_DEBUG_BUFMGR_MORE) {
4744 pr_info("MMU header_adr %d: %ld\n",
4745 pic_config->index, pic_config->header_adr);
4746 }
4747 }
4748
4749 i = pic_config->index;
4750#ifdef MV_USE_FIXED_BUF
4751 if ((pbi->work_space_buf->mpred_mv.buf_start +
4752 (((i + 1) * lcu_total) * MV_MEM_UNIT))
4753 <= mpred_mv_end
4754 ) {
4755#endif
4756 if (buf_size > 0) {
4757 if (pbi->is_used_v4l) {
4758 ret = vdec_v4l_get_buffer(pbi->v4l2_ctx, &fb);
4759 if (ret) {
4760 vp9_print(pbi, PRINT_FLAG_V4L_DETAIL,
4761 "[%d] get fb fail.\n",
4762 ((struct aml_vcodec_ctx *)
4763 (pbi->v4l2_ctx))->id);
4764 return ret;
4765 }
4766
4767 pbi->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
4768 pic_config->cma_alloc_addr = fb->m.mem[0].addr;
4769 vp9_print(pbi, PRINT_FLAG_V4L_DETAIL,
4770 "[%d] %s(), v4l ref buf addr: 0x%x\n",
4771 ((struct aml_vcodec_ctx *)
4772 (pbi->v4l2_ctx))->id, __func__, fb);
4773 } else {
4774 ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box,
4775 VF_BUFFER_IDX(i),
4776 buf_size, DRIVER_NAME,
4777 &pic_config->cma_alloc_addr);
4778 if (ret < 0) {
4779 pr_info(
4780 "decoder_bmmu_box_alloc_buf_phy idx %d size %d fail\n",
4781 VF_BUFFER_IDX(i),
4782 buf_size
4783 );
4784 return ret;
4785 }
4786 }
4787
4788 if (pic_config->cma_alloc_addr)
4789 y_adr = pic_config->cma_alloc_addr;
4790 else {
4791 pr_info(
4792 "decoder_bmmu_box_alloc_buf_phy idx %d size %d return null\n",
4793 VF_BUFFER_IDX(i),
4794 buf_size
4795 );
4796 return -1;
4797 }
4798 }
4799 {
4800 /*ensure get_pic_by_POC()
4801 not get the buffer not decoded*/
4802 pic_config->BUF_index = i;
4803 pic_config->lcu_total = lcu_total;
4804
4805 pic_config->comp_body_size = losless_comp_body_size;
4806 pic_config->buf_size = buf_size;
4807
4808 pic_config->mc_canvas_y = pic_config->index;
4809 pic_config->mc_canvas_u_v = pic_config->index;
4810 if (dw_mode & 0x10) {
4811 if (pbi->is_used_v4l) {
4812 if (fb->num_planes == 1) {
4813 fb->m.mem[0].bytes_used =
4814 pic_width_align * pic_height_align;
4815 pic_config->dw_y_adr = y_adr;
4816 pic_config->dw_u_v_adr = y_adr +
4817 fb->m.mem[0].bytes_used;
4818 } else if (fb->num_planes == 2) {
4819 fb->m.mem[0].bytes_used =
4820 pic_width_align * pic_height_align;
4821 fb->m.mem[1].bytes_used =
4822 fb->m.mem[1].size;
4823 pic_config->dw_y_adr = y_adr;
4824 pic_config->dw_u_v_adr = y_adr +
4825 fb->m.mem[0].bytes_used;
4826 }
4827 } else {
4828 pic_config->dw_y_adr = y_adr;
4829 pic_config->dw_u_v_adr = y_adr +
4830 ((mc_buffer_size_u_v_h << 16) << 1);
4831 }
4832
4833 pic_config->mc_canvas_y =
4834 (pic_config->index << 1);
4835 pic_config->mc_canvas_u_v =
4836 (pic_config->index << 1) + 1;
4837 } else if (dw_mode) {
4838 pic_config->dw_y_adr = y_adr;
4839 pic_config->dw_u_v_adr = pic_config->dw_y_adr +
4840 ((mc_buffer_size_u_v_h << 16) << 1);
4841 }
4842#ifdef MV_USE_FIXED_BUF
4843 pic_config->mpred_mv_wr_start_addr =
4844 pbi->work_space_buf->mpred_mv.buf_start +
4845 ((pic_config->index * lcu_total)
4846 * MV_MEM_UNIT);
4847#endif
4848 if (debug) {
4849 pr_info
4850 ("%s index %d BUF_index %d ",
4851 __func__, pic_config->index,
4852 pic_config->BUF_index);
4853 pr_info
4854 ("comp_body_size %x comp_buf_size %x ",
4855 pic_config->comp_body_size,
4856 pic_config->buf_size);
4857 pr_info
4858 ("mpred_mv_wr_start_adr %ld\n",
4859 pic_config->mpred_mv_wr_start_addr);
4860 pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n",
4861 pic_config->dw_y_adr,
4862 pic_config->dw_u_v_adr);
4863 }
4864 ret = 0;
4865 }
4866#ifdef MV_USE_FIXED_BUF
4867 }
4868#endif
4869 return ret;
4870}
4871
4872static int vvp9_mmu_compress_header_size(struct VP9Decoder_s *pbi)
4873{
4874 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
4875 IS_8K_SIZE(pbi->max_pic_w, pbi->max_pic_h))
4876 return (MMU_COMPRESS_8K_HEADER_SIZE);
4877
4878 return (MMU_COMPRESS_HEADER_SIZE);
4879}
4880
4881/*#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)*/
4882static int vvp9_frame_mmu_map_size(struct VP9Decoder_s *pbi)
4883{
4884 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
4885 IS_8K_SIZE(pbi->max_pic_w, pbi->max_pic_h))
4886 return (MAX_FRAME_8K_NUM * 4);
4887
4888 return (MAX_FRAME_4K_NUM * 4);
4889}
4890
4891static void init_pic_list(struct VP9Decoder_s *pbi)
4892{
4893 int i;
4894 struct VP9_Common_s *cm = &pbi->common;
4895 struct PIC_BUFFER_CONFIG_s *pic_config;
4896 u32 header_size;
4897 struct vdec_s *vdec = hw_to_vdec(pbi);
4898
4899 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
4900 header_size = vvp9_mmu_compress_header_size(pbi);
4901 /*alloc VP9 compress header first*/
4902 for (i = 0; i < pbi->used_buf_num; i++) {
4903 unsigned long buf_addr;
4904 if (decoder_bmmu_box_alloc_buf_phy
4905 (pbi->bmmu_box,
4906 HEADER_BUFFER_IDX(i), header_size,
4907 DRIVER_HEADER_NAME,
4908 &buf_addr) < 0) {
4909 pr_info("%s malloc compress header failed %d\n",
4910 DRIVER_HEADER_NAME, i);
4911 pbi->fatal_error |= DECODER_FATAL_ERROR_NO_MEM;
4912 return;
4913 }
4914 }
4915 }
4916 for (i = 0; i < pbi->used_buf_num; i++) {
4917 pic_config = &cm->buffer_pool->frame_bufs[i].buf;
4918 pic_config->index = i;
4919 pic_config->BUF_index = -1;
4920 pic_config->mv_buf_index = -1;
4921 if (vdec->parallel_dec == 1) {
4922 pic_config->y_canvas_index = -1;
4923 pic_config->uv_canvas_index = -1;
4924 }
4925 pic_config->y_crop_width = pbi->init_pic_w;
4926 pic_config->y_crop_height = pbi->init_pic_h;
4927 pic_config->double_write_mode = get_double_write_mode(pbi);
4928
4929 if (!pbi->is_used_v4l) {
4930 if (config_pic(pbi, pic_config) < 0) {
4931 if (debug)
4932 pr_info("Config_pic %d fail\n",
4933 pic_config->index);
4934 pic_config->index = -1;
4935 break;
4936 }
4937
4938 if (pic_config->double_write_mode) {
4939 set_canvas(pbi, pic_config);
4940 }
4941 }
4942 }
4943 for (; i < pbi->used_buf_num; i++) {
4944 pic_config = &cm->buffer_pool->frame_bufs[i].buf;
4945 pic_config->index = -1;
4946 pic_config->BUF_index = -1;
4947 pic_config->mv_buf_index = -1;
4948 if (vdec->parallel_dec == 1) {
4949 pic_config->y_canvas_index = -1;
4950 pic_config->uv_canvas_index = -1;
4951 }
4952 }
4953 pr_info("%s ok, used_buf_num = %d\n",
4954 __func__, pbi->used_buf_num);
4955}
4956
4957static void init_pic_list_hw(struct VP9Decoder_s *pbi)
4958{
4959 int i;
4960 struct VP9_Common_s *cm = &pbi->common;
4961 struct PIC_BUFFER_CONFIG_s *pic_config;
4962 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);*/
4963 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
4964 (0x1 << 1) | (0x1 << 2));
4965
4966
4967 for (i = 0; i < pbi->used_buf_num; i++) {
4968 pic_config = &cm->buffer_pool->frame_bufs[i].buf;
4969 if (pic_config->index < 0)
4970 break;
4971
4972 if (pbi->mmu_enable && ((pic_config->double_write_mode & 0x10) == 0)) {
4973
4974 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
4975 pic_config->header_adr >> 5);
4976 } else {
4977 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
4978 * pic_config->mc_y_adr
4979 * | (pic_config->mc_canvas_y << 8) | 0x1);
4980 */
4981 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
4982 pic_config->dw_y_adr >> 5);
4983 }
4984#ifndef LOSLESS_COMPRESS_MODE
4985 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
4986 * pic_config->mc_u_v_adr
4987 * | (pic_config->mc_canvas_u_v << 8)| 0x1);
4988 */
4989 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
4990 pic_config->header_adr >> 5);
4991#else
4992 if (pic_config->double_write_mode & 0x10) {
4993 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
4994 pic_config->dw_u_v_adr >> 5);
4995 }
4996#endif
4997 }
4998 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
4999
5000 /*Zero out canvas registers in IPP -- avoid simulation X*/
5001 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5002 (0 << 8) | (0 << 1) | 1);
5003 for (i = 0; i < 32; i++)
5004 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
5005}
5006
5007
5008static void dump_pic_list(struct VP9Decoder_s *pbi)
5009{
5010 struct VP9_Common_s *const cm = &pbi->common;
5011 struct PIC_BUFFER_CONFIG_s *pic_config;
5012 int i;
5013 for (i = 0; i < FRAME_BUFFERS; i++) {
5014 pic_config = &cm->buffer_pool->frame_bufs[i].buf;
5015 vp9_print(pbi, 0,
5016 "Buf(%d) index %d mv_buf_index %d ref_count %d vf_ref %d dec_idx %d slice_type %d w/h %d/%d adr%ld\n",
5017 i,
5018 pic_config->index,
5019#ifndef MV_USE_FIXED_BUF
5020 pic_config->mv_buf_index,
5021#else
5022 -1,
5023#endif
5024 cm->buffer_pool->
5025 frame_bufs[i].ref_count,
5026 pic_config->vf_ref,
5027 pic_config->decode_idx,
5028 pic_config->slice_type,
5029 pic_config->y_crop_width,
5030 pic_config->y_crop_height,
5031 pic_config->cma_alloc_addr
5032 );
5033 }
5034 return;
5035}
5036
5037static int config_pic_size(struct VP9Decoder_s *pbi, unsigned short bit_depth)
5038{
5039#ifdef LOSLESS_COMPRESS_MODE
5040 unsigned int data32;
5041#endif
5042 int losless_comp_header_size, losless_comp_body_size;
5043 struct VP9_Common_s *cm = &pbi->common;
5044 struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf;
5045
5046 frame_width = cur_pic_config->y_crop_width;
5047 frame_height = cur_pic_config->y_crop_height;
5048 cur_pic_config->bit_depth = bit_depth;
5049 cur_pic_config->double_write_mode = get_double_write_mode(pbi);
5050 losless_comp_header_size =
5051 compute_losless_comp_header_size(cur_pic_config->y_crop_width,
5052 cur_pic_config->y_crop_height);
5053 losless_comp_body_size =
5054 compute_losless_comp_body_size(cur_pic_config->y_crop_width,
5055 cur_pic_config->y_crop_height, (bit_depth == VPX_BITS_10));
5056 cur_pic_config->comp_body_size = losless_comp_body_size;
5057#ifdef LOSLESS_COMPRESS_MODE
5058 data32 = READ_VREG(HEVC_SAO_CTRL5);
5059 if (bit_depth == VPX_BITS_10)
5060 data32 &= ~(1 << 9);
5061 else
5062 data32 |= (1 << 9);
5063
5064 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5065
5066 if (pbi->mmu_enable) {
5067 /*bit[4] : paged_mem_mode*/
5068 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
5069 } else {
5070 /*bit[3] smem mdoe*/
5071 if (bit_depth == VPX_BITS_10)
5072 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0 << 3));
5073 else
5074 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (1 << 3));
5075 }
5076 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1)
5077 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
5078 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);*/
5079 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
5080 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
5081 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
5082 if (get_double_write_mode(pbi) & 0x10)
5083 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31);
5084#else
5085 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31);
5086#endif
5087 return 0;
5088}
5089
5090static int config_mc_buffer(struct VP9Decoder_s *pbi, unsigned short bit_depth)
5091{
5092 int i;
5093 struct VP9_Common_s *cm = &pbi->common;
5094 struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf;
5095 uint8_t scale_enable = 0;
5096
5097 if (debug&VP9_DEBUG_BUFMGR_MORE)
5098 pr_info("config_mc_buffer entered .....\n");
5099
5100 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5101 (0 << 8) | (0 << 1) | 1);
5102 for (i = 0; i < REFS_PER_FRAME; ++i) {
5103 struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf;
5104 if (!pic_config)
5105 continue;
5106 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
5107 (pic_config->mc_canvas_u_v << 16)
5108 | (pic_config->mc_canvas_u_v << 8)
5109 | pic_config->mc_canvas_y);
5110 if (debug & VP9_DEBUG_BUFMGR_MORE)
5111 pr_info("refid %x mc_canvas_u_v %x mc_canvas_y %x\n",
5112 i, pic_config->mc_canvas_u_v,
5113 pic_config->mc_canvas_y);
5114 }
5115 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
5116 (16 << 8) | (0 << 1) | 1);
5117 for (i = 0; i < REFS_PER_FRAME; ++i) {
5118 struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf;
5119 if (!pic_config)
5120 continue;
5121 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
5122 (pic_config->mc_canvas_u_v << 16)
5123 | (pic_config->mc_canvas_u_v << 8)
5124 | pic_config->mc_canvas_y);
5125 }
5126
5127 /*auto_inc start index:0 field:0*/
5128 WRITE_VREG(VP9D_MPP_REFINFO_TBL_ACCCONFIG, 0x1 << 2);
5129 /*index 0:last 1:golden 2:altref*/
5130 for (i = 0; i < REFS_PER_FRAME; i++) {
5131 int ref_pic_body_size;
5132 struct PIC_BUFFER_CONFIG_s *pic_config = cm->frame_refs[i].buf;
5133 if (!pic_config)
5134 continue;
5135 WRITE_VREG(VP9D_MPP_REFINFO_DATA, pic_config->y_crop_width);
5136 WRITE_VREG(VP9D_MPP_REFINFO_DATA, pic_config->y_crop_height);
5137
5138 if (pic_config->y_crop_width != cur_pic_config->y_crop_width ||
5139 pic_config->y_crop_height != cur_pic_config->y_crop_height) {
5140 scale_enable |= (1 << i);
5141 }
5142 ref_pic_body_size =
5143 compute_losless_comp_body_size(pic_config->y_crop_width,
5144 pic_config->y_crop_height, (bit_depth == VPX_BITS_10));
5145 WRITE_VREG(VP9D_MPP_REFINFO_DATA,
5146 (pic_config->y_crop_width << 14)
5147 / cur_pic_config->y_crop_width);
5148 WRITE_VREG(VP9D_MPP_REFINFO_DATA,
5149 (pic_config->y_crop_height << 14)
5150 / cur_pic_config->y_crop_height);
5151 if (pbi->mmu_enable)
5152 WRITE_VREG(VP9D_MPP_REFINFO_DATA, 0);
5153 else
5154 WRITE_VREG(VP9D_MPP_REFINFO_DATA, ref_pic_body_size >> 5);
5155 }
5156 WRITE_VREG(VP9D_MPP_REF_SCALE_ENBL, scale_enable);
5157 return 0;
5158}
5159
5160static void clear_mpred_hw(struct VP9Decoder_s *pbi)
5161{
5162 unsigned int data32;
5163
5164 data32 = READ_VREG(HEVC_MPRED_CTRL4);
5165 data32 &= (~(1 << 6));
5166 WRITE_VREG(HEVC_MPRED_CTRL4, data32);
5167}
5168
5169static void config_mpred_hw(struct VP9Decoder_s *pbi)
5170{
5171 struct VP9_Common_s *cm = &pbi->common;
5172 struct PIC_BUFFER_CONFIG_s *cur_pic_config = &cm->cur_frame->buf;
5173 struct PIC_BUFFER_CONFIG_s *last_frame_pic_config =
5174 &cm->prev_frame->buf;
5175
5176 unsigned int data32;
5177 int mpred_curr_lcu_x;
5178 int mpred_curr_lcu_y;
5179 int mpred_mv_rd_end_addr;
5180
5181
5182 mpred_mv_rd_end_addr = last_frame_pic_config->mpred_mv_wr_start_addr
5183 + (last_frame_pic_config->lcu_total * MV_MEM_UNIT);
5184
5185 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
5186 mpred_curr_lcu_x = data32 & 0xffff;
5187 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
5188
5189 if (debug & VP9_DEBUG_BUFMGR)
5190 pr_info("cur pic_config index %d col pic_config index %d\n",
5191 cur_pic_config->index, last_frame_pic_config->index);
5192 WRITE_VREG(HEVC_MPRED_CTRL3, 0x24122412);
5193 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR,
5194 pbi->work_space_buf->mpred_above.buf_start);
5195
5196 data32 = READ_VREG(HEVC_MPRED_CTRL4);
5197
5198 data32 &= (~(1 << 6));
5199 data32 |= (cm->use_prev_frame_mvs << 6);
5200 WRITE_VREG(HEVC_MPRED_CTRL4, data32);
5201
5202 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
5203 cur_pic_config->mpred_mv_wr_start_addr);
5204 WRITE_VREG(HEVC_MPRED_MV_WPTR, cur_pic_config->mpred_mv_wr_start_addr);
5205
5206 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR,
5207 last_frame_pic_config->mpred_mv_wr_start_addr);
5208 WRITE_VREG(HEVC_MPRED_MV_RPTR,
5209 last_frame_pic_config->mpred_mv_wr_start_addr);
5210 /*data32 = ((pbi->lcu_x_num - pbi->tile_width_lcu)*MV_MEM_UNIT);*/
5211 /*WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP,data32);*/
5212 /*WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP,data32);*/
5213 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5214
5215}
5216
5217static void config_sao_hw(struct VP9Decoder_s *pbi, union param_u *params)
5218{
5219 struct VP9_Common_s *cm = &pbi->common;
5220 struct PIC_BUFFER_CONFIG_s *pic_config = &cm->cur_frame->buf;
5221
5222 unsigned int data32;
5223 int lcu_size = 64;
5224 int mc_buffer_size_u_v =
5225 pic_config->lcu_total * lcu_size*lcu_size/2;
5226 int mc_buffer_size_u_v_h =
5227 (mc_buffer_size_u_v + 0xffff) >> 16;/*64k alignment*/
5228 struct aml_vcodec_ctx * v4l2_ctx = pbi->v4l2_ctx;
5229
5230 if (get_double_write_mode(pbi)) {
5231 WRITE_VREG(HEVC_SAO_Y_START_ADDR, pic_config->dw_y_adr);
5232 WRITE_VREG(HEVC_SAO_C_START_ADDR, pic_config->dw_u_v_adr);
5233 WRITE_VREG(HEVC_SAO_Y_WPTR, pic_config->dw_y_adr);
5234 WRITE_VREG(HEVC_SAO_C_WPTR, pic_config->dw_u_v_adr);
5235 } else {
5236 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5237 WRITE_VREG(HEVC_SAO_C_START_ADDR, 0xffffffff);
5238 }
5239 if (pbi->mmu_enable)
5240 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, pic_config->header_adr);
5241
5242 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5243 /*pr_info("data32=%x,mc_buffer_size_u_v_h=%x,lcu_total=%x\n",
5244 * data32, mc_buffer_size_u_v_h, pic_config->lcu_total);
5245 */
5246 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5247
5248 data32 = (mc_buffer_size_u_v_h << 16);
5249 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5250
5251#ifdef VP9_10B_NV21
5252#ifdef DOS_PROJECT
5253 data32 = READ_VREG(HEVC_SAO_CTRL1);
5254 data32 &= (~0x3000);
5255 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5256 data32 |= (pbi->mem_map_mode << 12);
5257 data32 &= (~0x3);
5258 data32 |= 0x1; /* [1]:dw_disable [0]:cm_disable*/
5259 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5260 /*[23:22] dw_v1_ctrl [21:20] dw_v0_ctrl [19:18] dw_h1_ctrl
5261 * [17:16] dw_h0_ctrl
5262 */
5263 data32 = READ_VREG(HEVC_SAO_CTRL5);
5264 /*set them all 0 for H265_NV21 (no down-scale)*/
5265 data32 &= ~(0xff << 16);
5266 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5267 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5268 data32 &= (~0x30);
5269 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5270 data32 |= (pbi->mem_map_mode << 4);
5271 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5272#else
5273 /*m8baby test1902*/
5274 data32 = READ_VREG(HEVC_SAO_CTRL1);
5275 data32 &= (~0x3000);
5276 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5277 data32 |= (pbi->mem_map_mode << 12);
5278 data32 &= (~0xff0);
5279 /*data32 |= 0x670;*/ /*Big-Endian per 64-bit*/
5280 data32 |= 0x880; /*.Big-Endian per 64-bit */
5281 data32 &= (~0x3);
5282 data32 |= 0x1; /*[1]:dw_disable [0]:cm_disable*/
5283 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5284 /* [23:22] dw_v1_ctrl [21:20] dw_v0_ctrl
5285 *[19:18] dw_h1_ctrl [17:16] dw_h0_ctrl
5286 */
5287 data32 = READ_VREG(HEVC_SAO_CTRL5);
5288 /* set them all 0 for H265_NV21 (no down-scale)*/
5289 data32 &= ~(0xff << 16);
5290 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5291
5292 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5293 data32 &= (~0x30);
5294 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5295 data32 |= (pbi->mem_map_mode << 4);
5296 data32 &= (~0xF);
5297 data32 |= 0x8; /*Big-Endian per 64-bit*/
5298 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5299#endif
5300#else
5301 data32 = READ_VREG(HEVC_SAO_CTRL1);
5302 data32 &= (~0x3000);
5303 data32 |= (pbi->mem_map_mode <<
5304 12);
5305
5306/* [13:12] axi_aformat, 0-Linear,
5307 * 1-32x32, 2-64x32
5308 */
5309 data32 &= (~0xff0);
5310 /* data32 |= 0x670; // Big-Endian per 64-bit */
5311 data32 |= endian; /* Big-Endian per 64-bit */
5312 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
5313 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5314 if (get_double_write_mode(pbi) == 0)
5315 data32 |= 0x2; /*disable double write*/
5316 else if (get_double_write_mode(pbi) & 0x10)
5317 data32 |= 0x1; /*disable cm*/
5318 } else { /* >= G12A dw write control */
5319 unsigned int data;
5320 data = READ_VREG(HEVC_DBLK_CFGB);
5321 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5322 if (get_double_write_mode(pbi) == 0)
5323 data |= (0x1 << 8); /*enable first write*/
5324 else if (get_double_write_mode(pbi) & 0x10)
5325 data |= (0x1 << 9); /*double write only*/
5326 else
5327 data |= ((0x1 << 8) |(0x1 << 9));
5328 WRITE_VREG(HEVC_DBLK_CFGB, data);
5329 }
5330
5331 /* swap uv */
5332 if (pbi->is_used_v4l) {
5333 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5334 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5335 data32 &= ~(1 << 8); /* NV21 */
5336 else
5337 data32 |= (1 << 8); /* NV12 */
5338 }
5339
5340 /*
5341 * [31:24] ar_fifo1_axi_thred
5342 * [23:16] ar_fifo0_axi_thred
5343 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5344 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5345 * [11:08] axi_lendian_C
5346 * [07:04] axi_lendian_Y
5347 * [3] reserved
5348 * [2] clk_forceon
5349 * [1] dw_disable:disable double write output
5350 * [0] cm_disable:disable compress output
5351 */
5352 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5353
5354 if (get_double_write_mode(pbi) & 0x10) {
5355 /* [23:22] dw_v1_ctrl
5356 *[21:20] dw_v0_ctrl
5357 *[19:18] dw_h1_ctrl
5358 *[17:16] dw_h0_ctrl
5359 */
5360 data32 = READ_VREG(HEVC_SAO_CTRL5);
5361 /*set them all 0 for H265_NV21 (no down-scale)*/
5362 data32 &= ~(0xff << 16);
5363 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5364 } else {
5365 data32 = READ_VREG(HEVC_SAO_CTRL5);
5366 data32 &= (~(0xff << 16));
5367 if (get_double_write_mode(pbi) == 2 ||
5368 get_double_write_mode(pbi) == 3)
5369 data32 |= (0xff<<16);
5370 else if (get_double_write_mode(pbi) == 4)
5371 data32 |= (0x33<<16);
5372 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5373 }
5374
5375 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5376 data32 &= (~0x30);
5377 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5378 data32 |= (pbi->mem_map_mode <<
5379 4);
5380 data32 &= (~0xF);
5381 data32 |= 0xf; /* valid only when double write only */
5382 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5383
5384 /* swap uv */
5385 if (pbi->is_used_v4l) {
5386 if ((v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21) ||
5387 (v4l2_ctx->q_data[AML_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_NV21M))
5388 data32 |= (1 << 12); /* NV21 */
5389 else
5390 data32 &= ~(1 << 12); /* NV12 */
5391 }
5392
5393 /*
5394 * [3:0] little_endian
5395 * [5:4] address_format 00:linear 01:32x32 10:64x32
5396 * [7:6] reserved
5397 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5398 * [11:10] reserved
5399 * [12] CbCr_byte_swap
5400 * [31:13] reserved
5401 */
5402 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5403#endif
5404}
5405
5406static void vp9_config_work_space_hw(struct VP9Decoder_s *pbi, u32 mask)
5407{
5408 struct BuffInfo_s *buf_spec = pbi->work_space_buf;
5409 unsigned int data32;
5410
5411 if (debug && pbi->init_flag == 0)
5412 pr_info("%s %x %x %x %x %x %x %x %x %x %x %x %x\n",
5413 __func__,
5414 buf_spec->ipp.buf_start,
5415 buf_spec->start_adr,
5416 buf_spec->short_term_rps.buf_start,
5417 buf_spec->vps.buf_start,
5418 buf_spec->sps.buf_start,
5419 buf_spec->pps.buf_start,
5420 buf_spec->sao_up.buf_start,
5421 buf_spec->swap_buf.buf_start,
5422 buf_spec->swap_buf2.buf_start,
5423 buf_spec->scalelut.buf_start,
5424 buf_spec->dblk_para.buf_start,
5425 buf_spec->dblk_data.buf_start);
5426
5427 if (mask & HW_MASK_FRONT) {
5428 if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0)
5429 WRITE_VREG(HEVC_RPM_BUFFER, (u32)pbi->rpm_phy_addr);
5430
5431 WRITE_VREG(HEVC_SHORT_TERM_RPS,
5432 buf_spec->short_term_rps.buf_start);
5433 /*WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);*/
5434 /*WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);*/
5435 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
5436 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
5437 buf_spec->swap_buf.buf_start);
5438 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2,
5439 buf_spec->swap_buf2.buf_start);
5440 WRITE_VREG(LMEM_DUMP_ADR, (u32)pbi->lmem_phy_addr);
5441
5442 }
5443
5444 if (mask & HW_MASK_BACK) {
5445#ifdef LOSLESS_COMPRESS_MODE
5446 int losless_comp_header_size =
5447 compute_losless_comp_header_size(pbi->init_pic_w,
5448 pbi->init_pic_h);
5449 int losless_comp_body_size =
5450 compute_losless_comp_body_size(pbi->init_pic_w,
5451 pbi->init_pic_h, buf_alloc_depth == 10);
5452#endif
5453 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE,
5454 buf_spec->ipp.buf_start);
5455 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
5456 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
5457 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5458 /* cfg_addr_adp*/
5459 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_para.buf_start);
5460 if (debug & VP9_DEBUG_BUFMGR_MORE)
5461 pr_info("Write HEVC_DBLK_CFGE\n");
5462 }
5463 /* cfg_p_addr */
5464 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
5465 /* cfg_d_addr */
5466 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
5467
5468 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
5469 /*
5470 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>8) & 0xff; // xio left offset, default is 0x40
5471 * data32 = data32 * 2;
5472 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>16) & 0xff; // adp left offset, default is 0x040
5473 * data32 = data32 * 2;
5474 */
5475 WRITE_VREG(HEVC_DBLK_CFG3, 0x808010); // make left storage 2 x 4k]
5476 }
5477#ifdef LOSLESS_COMPRESS_MODE
5478 if (pbi->mmu_enable) {
5479 /*bit[4] : paged_mem_mode*/
5480 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
5481 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1)
5482 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0);
5483 } else {
5484 /*if(cur_pic_config->bit_depth == VPX_BITS_10)
5485 * WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (0<<3));
5486 */
5487 /*bit[3] smem mdoe*/
5488 /*else WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (1<<3));*/
5489 /*bit[3] smem mdoe*/
5490 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2,
5491 (losless_comp_body_size >> 5));
5492 }
5493 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL2,
5494 (losless_comp_body_size >> 5));*/
5495 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,
5496 (0xff<<20) | (0xff<<10) | 0xff);*/
5497 /*8-bit mode */
5498 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
5499 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
5500 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
5501 if (get_double_write_mode(pbi) & 0x10)
5502 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31);
5503#else
5504 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31);
5505#endif
5506
5507 if (pbi->mmu_enable) {
5508 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
5509 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR, buf_spec->mmu_vbh.buf_start
5510 + buf_spec->mmu_vbh.buf_size/2);
5511 /*data32 = READ_VREG(P_HEVC_SAO_CTRL9);*/
5512 /*data32 |= 0x1;*/
5513 /*WRITE_VREG(P_HEVC_SAO_CTRL9, data32);*/
5514
5515 /* use HEVC_CM_HEADER_START_ADDR */
5516 data32 = READ_VREG(HEVC_SAO_CTRL5);
5517 data32 |= (1<<10);
5518 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5519 }
5520
5521 WRITE_VREG(VP9_SEG_MAP_BUFFER, buf_spec->seg_map.buf_start);
5522
5523 WRITE_VREG(LMEM_DUMP_ADR, (u32)pbi->lmem_phy_addr);
5524 /**/
5525 WRITE_VREG(VP9_PROB_SWAP_BUFFER, pbi->prob_buffer_phy_addr);
5526 WRITE_VREG(VP9_COUNT_SWAP_BUFFER, pbi->count_buffer_phy_addr);
5527 if (pbi->mmu_enable) {
5528 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
5529 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, pbi->frame_mmu_map_phy_addr);
5530 else
5531 WRITE_VREG(VP9_MMU_MAP_BUFFER, pbi->frame_mmu_map_phy_addr);
5532 }
5533 }
5534}
5535
5536
5537#ifdef VP9_LPF_LVL_UPDATE
5538/*
5539 * Defines, declarations, sub-functions for vp9 de-block loop
5540 filter Thr/Lvl table update
5541 * - struct segmentation is for loop filter only (removed something)
5542 * - function "vp9_loop_filter_init" and "vp9_loop_filter_frame_init" will
5543 be instantiated in C_Entry
5544 * - vp9_loop_filter_init run once before decoding start
5545 * - vp9_loop_filter_frame_init run before every frame decoding start
5546 * - set video format to VP9 is in vp9_loop_filter_init
5547 */
5548#define MAX_LOOP_FILTER 63
5549#define MAX_REF_LF_DELTAS 4
5550#define MAX_MODE_LF_DELTAS 2
5551/*#define INTRA_FRAME 0*/
5552/*#define LAST_FRAME 1*/
5553/*#define MAX_REF_FRAMES 4*/
5554#define SEGMENT_DELTADATA 0
5555#define SEGMENT_ABSDATA 1
5556#define MAX_SEGMENTS 8
5557/*.#define SEG_TREE_PROBS (MAX_SEGMENTS-1)*/
5558/*no use for loop filter, if this struct for common use, pls add it back*/
5559/*#define PREDICTION_PROBS 3*/
5560/* no use for loop filter, if this struct for common use, pls add it back*/
5561
5562enum SEG_LVL_FEATURES {
5563 SEG_LVL_ALT_Q = 0, /*Use alternate Quantizer ....*/
5564 SEG_LVL_ALT_LF = 1, /*Use alternate loop filter value...*/
5565 SEG_LVL_REF_FRAME = 2, /*Optional Segment reference frame*/
5566 SEG_LVL_SKIP = 3, /*Optional Segment (0,0) + skip mode*/
5567 SEG_LVL_MAX = 4 /*Number of features supported*/
5568};
5569
5570struct segmentation {
5571 uint8_t enabled;
5572 uint8_t update_map;
5573 uint8_t update_data;
5574 uint8_t abs_delta;
5575 uint8_t temporal_update;
5576
5577 /*no use for loop filter, if this struct
5578 *for common use, pls add it back
5579 */
5580 /*vp9_prob tree_probs[SEG_TREE_PROBS]; */
5581 /* no use for loop filter, if this struct
5582 * for common use, pls add it back
5583 */
5584 /*vp9_prob pred_probs[PREDICTION_PROBS];*/
5585
5586 int16_t feature_data[MAX_SEGMENTS][SEG_LVL_MAX];
5587 unsigned int feature_mask[MAX_SEGMENTS];
5588};
5589
5590struct loop_filter_thresh {
5591 uint8_t mblim;
5592 uint8_t lim;
5593 uint8_t hev_thr;
5594};
5595
5596struct loop_filter_info_n {
5597 struct loop_filter_thresh lfthr[MAX_LOOP_FILTER + 1];
5598 uint8_t lvl[MAX_SEGMENTS][MAX_REF_FRAMES][MAX_MODE_LF_DELTAS];
5599};
5600
5601struct loopfilter {
5602 int filter_level;
5603
5604 int sharpness_level;
5605 int last_sharpness_level;
5606
5607 uint8_t mode_ref_delta_enabled;
5608 uint8_t mode_ref_delta_update;
5609
5610 /*0 = Intra, Last, GF, ARF*/
5611 signed char ref_deltas[MAX_REF_LF_DELTAS];
5612 signed char last_ref_deltas[MAX_REF_LF_DELTAS];
5613
5614 /*0 = ZERO_MV, MV*/
5615 signed char mode_deltas[MAX_MODE_LF_DELTAS];
5616 signed char last_mode_deltas[MAX_MODE_LF_DELTAS];
5617};
5618
5619static int vp9_clamp(int value, int low, int high)
5620{
5621 return value < low ? low : (value > high ? high : value);
5622}
5623
5624int segfeature_active(struct segmentation *seg,
5625 int segment_id,
5626 enum SEG_LVL_FEATURES feature_id) {
5627 return seg->enabled &&
5628 (seg->feature_mask[segment_id] & (1 << feature_id));
5629}
5630
5631int get_segdata(struct segmentation *seg, int segment_id,
5632 enum SEG_LVL_FEATURES feature_id) {
5633 return seg->feature_data[segment_id][feature_id];
5634}
5635
5636static void vp9_update_sharpness(struct loop_filter_info_n *lfi,
5637 int sharpness_lvl)
5638{
5639 int lvl;
5640 /*For each possible value for the loop filter fill out limits*/
5641 for (lvl = 0; lvl <= MAX_LOOP_FILTER; lvl++) {
5642 /*Set loop filter parameters that control sharpness.*/
5643 int block_inside_limit = lvl >> ((sharpness_lvl > 0) +
5644 (sharpness_lvl > 4));
5645
5646 if (sharpness_lvl > 0) {
5647 if (block_inside_limit > (9 - sharpness_lvl))
5648 block_inside_limit = (9 - sharpness_lvl);
5649 }
5650
5651 if (block_inside_limit < 1)
5652 block_inside_limit = 1;
5653
5654 lfi->lfthr[lvl].lim = (uint8_t)block_inside_limit;
5655 lfi->lfthr[lvl].mblim = (uint8_t)(2 * (lvl + 2) +
5656 block_inside_limit);
5657 }
5658}
5659
5660/*instantiate this function once when decode is started*/
5661void vp9_loop_filter_init(struct VP9Decoder_s *pbi)
5662{
5663 struct loop_filter_info_n *lfi = pbi->lfi;
5664 struct loopfilter *lf = pbi->lf;
5665 struct segmentation *seg_4lf = pbi->seg_4lf;
5666 int i;
5667 unsigned int data32;
5668
5669 memset(lfi, 0, sizeof(struct loop_filter_info_n));
5670 memset(lf, 0, sizeof(struct loopfilter));
5671 memset(seg_4lf, 0, sizeof(struct segmentation));
5672 lf->sharpness_level = 0; /*init to 0 */
5673 /*init limits for given sharpness*/
5674 vp9_update_sharpness(lfi, lf->sharpness_level);
5675 lf->last_sharpness_level = lf->sharpness_level;
5676 /*init hev threshold const vectors (actually no use)
5677 *for (i = 0; i <= MAX_LOOP_FILTER; i++)
5678 * lfi->lfthr[i].hev_thr = (uint8_t)(i >> 4);
5679 */
5680
5681 /*Write to register*/
5682 for (i = 0; i < 32; i++) {
5683 unsigned int thr;
5684
5685 thr = ((lfi->lfthr[i * 2 + 1].lim & 0x3f)<<8) |
5686 (lfi->lfthr[i * 2 + 1].mblim & 0xff);
5687 thr = (thr<<16) | ((lfi->lfthr[i*2].lim & 0x3f)<<8) |
5688 (lfi->lfthr[i * 2].mblim & 0xff);
5689 WRITE_VREG(HEVC_DBLK_CFG9, thr);
5690 }
5691
5692 /*video format is VP9*/
5693 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
5694 data32 = (0x3 << 14) | // (dw fifo thres r and b)
5695 (0x3 << 12) | // (dw fifo thres r or b)
5696 (0x3 << 10) | // (dw fifo thres not r/b)
5697 (0x3 << 8) | // 1st/2nd write both enable
5698 (0x1 << 0); // vp9 video format
5699 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5700 data32 = (0x57 << 8) | /*1st/2nd write both enable*/
5701 (0x1 << 0); /*vp9 video format*/
5702 } else
5703 data32 = 0x40400001;
5704
5705 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5706 if (debug & VP9_DEBUG_BUFMGR_MORE)
5707 pr_info("[DBLK DEBUG] CFGB : 0x%x\n", data32);
5708}
5709 /* perform this function per frame*/
5710void vp9_loop_filter_frame_init(struct segmentation *seg,
5711 struct loop_filter_info_n *lfi, struct loopfilter *lf,
5712 int default_filt_lvl) {
5713 int i;
5714 int seg_id;
5715 /*n_shift is the multiplier for lf_deltas
5716 *the multiplier is 1 for when filter_lvl is between 0 and 31;
5717 *2 when filter_lvl is between 32 and 63
5718 */
5719 const int scale = 1 << (default_filt_lvl >> 5);
5720
5721 /*update limits if sharpness has changed*/
5722 if (lf->last_sharpness_level != lf->sharpness_level) {
5723 vp9_update_sharpness(lfi, lf->sharpness_level);
5724 lf->last_sharpness_level = lf->sharpness_level;
5725
5726 /*Write to register*/
5727 for (i = 0; i < 32; i++) {
5728 unsigned int thr;
5729
5730 thr = ((lfi->lfthr[i * 2 + 1].lim & 0x3f) << 8)
5731 | (lfi->lfthr[i * 2 + 1].mblim & 0xff);
5732 thr = (thr << 16) | ((lfi->lfthr[i * 2].lim & 0x3f) << 8)
5733 | (lfi->lfthr[i * 2].mblim & 0xff);
5734 WRITE_VREG(HEVC_DBLK_CFG9, thr);
5735 }
5736 }
5737
5738 for (seg_id = 0; seg_id < MAX_SEGMENTS; seg_id++) {/*MAX_SEGMENTS = 8*/
5739 int lvl_seg = default_filt_lvl;
5740
5741 if (segfeature_active(seg, seg_id, SEG_LVL_ALT_LF)) {
5742 const int data = get_segdata(seg, seg_id,
5743 SEG_LVL_ALT_LF);
5744 lvl_seg = vp9_clamp(seg->abs_delta == SEGMENT_ABSDATA ?
5745 data : default_filt_lvl + data,
5746 0, MAX_LOOP_FILTER);
5747#ifdef DBG_LF_PRINT
5748 pr_info("segfeature_active!!!seg_id=%d,lvl_seg=%d\n", seg_id, lvl_seg);
5749#endif
5750 }
5751
5752 if (!lf->mode_ref_delta_enabled) {
5753 /*we could get rid of this if we assume that deltas are set to
5754 *zero when not in use; encoder always uses deltas
5755 */
5756 memset(lfi->lvl[seg_id], lvl_seg, sizeof(lfi->lvl[seg_id]));
5757 } else {
5758 int ref, mode;
5759 const int intra_lvl = lvl_seg + lf->ref_deltas[INTRA_FRAME]
5760 * scale;
5761#ifdef DBG_LF_PRINT
5762 pr_info("LF_PRINT:vp9_loop_filter_frame_init,seg_id=%d\n", seg_id);
5763 pr_info("ref_deltas[INTRA_FRAME]=%d\n", lf->ref_deltas[INTRA_FRAME]);
5764#endif
5765 lfi->lvl[seg_id][INTRA_FRAME][0] =
5766 vp9_clamp(intra_lvl, 0, MAX_LOOP_FILTER);
5767
5768 for (ref = LAST_FRAME; ref < MAX_REF_FRAMES; ++ref) {
5769 /* LAST_FRAME = 1, MAX_REF_FRAMES = 4*/
5770 for (mode = 0; mode < MAX_MODE_LF_DELTAS; ++mode) {
5771 /*MAX_MODE_LF_DELTAS = 2*/
5772 const int inter_lvl =
5773 lvl_seg + lf->ref_deltas[ref] * scale
5774 + lf->mode_deltas[mode] * scale;
5775#ifdef DBG_LF_PRINT
5776#endif
5777 lfi->lvl[seg_id][ref][mode] =
5778 vp9_clamp(inter_lvl, 0,
5779 MAX_LOOP_FILTER);
5780 }
5781 }
5782 }
5783 }
5784
5785#ifdef DBG_LF_PRINT
5786 /*print out thr/lvl table per frame*/
5787 for (i = 0; i <= MAX_LOOP_FILTER; i++) {
5788 pr_info("LF_PRINT:(%d)thr=%d,blim=%d,lim=%d\n",
5789 i, lfi->lfthr[i].hev_thr, lfi->lfthr[i].mblim,
5790 lfi->lfthr[i].lim);
5791 }
5792 for (seg_id = 0; seg_id < MAX_SEGMENTS; seg_id++) {
5793 pr_info("LF_PRINT:lvl(seg_id=%d)(mode=0,%d,%d,%d,%d)\n",
5794 seg_id, lfi->lvl[seg_id][0][0],
5795 lfi->lvl[seg_id][1][0], lfi->lvl[seg_id][2][0],
5796 lfi->lvl[seg_id][3][0]);
5797 pr_info("i(mode=1,%d,%d,%d,%d)\n", lfi->lvl[seg_id][0][1],
5798 lfi->lvl[seg_id][1][1], lfi->lvl[seg_id][2][1],
5799 lfi->lvl[seg_id][3][1]);
5800 }
5801#endif
5802
5803 /*Write to register */
5804 for (i = 0; i < 16; i++) {
5805 unsigned int level;
5806
5807 level = ((lfi->lvl[i >> 1][3][i & 1] & 0x3f) << 24) |
5808 ((lfi->lvl[i >> 1][2][i & 1] & 0x3f) << 16) |
5809 ((lfi->lvl[i >> 1][1][i & 1] & 0x3f) << 8) |
5810 (lfi->lvl[i >> 1][0][i & 1] & 0x3f);
5811 if (!default_filt_lvl)
5812 level = 0;
5813 WRITE_VREG(HEVC_DBLK_CFGA, level);
5814 }
5815}
5816/* VP9_LPF_LVL_UPDATE */
5817#endif
5818
5819static void vp9_init_decoder_hw(struct VP9Decoder_s *pbi, u32 mask)
5820{
5821 unsigned int data32;
5822 int i;
5823 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
5824 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
5825 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
5826 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
5827 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
5828 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
5829 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
5830 0x7C00
5831 };
5832#if 0
5833 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
5834 /* Set MCR fetch priorities*/
5835 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
5836 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
5837 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
5838 }
5839#endif
5840 /*if (debug & VP9_DEBUG_BUFMGR_MORE)
5841 pr_info("%s\n", __func__);*/
5842 if (mask & HW_MASK_FRONT) {
5843 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
5844#if 1
5845 /* set bit 31~29 to 3 if HEVC_STREAM_FIFO_CTL[29] is 1 */
5846 data32 &= ~(7 << 29);
5847 data32 |= (3 << 29);
5848#endif
5849 data32 = data32 |
5850 (1 << 24) |/*stream_buffer_empty_int_amrisc_enable*/
5851 (1 << 22) |/*stream_fifo_empty_int_amrisc_enable*/
5852 (1 << 7) |/*dec_done_int_cpu_enable*/
5853 (1 << 4) |/*startcode_found_int_cpu_enable*/
5854 (0 << 3) |/*startcode_found_int_amrisc_enable*/
5855 (1 << 0) /*parser_int_enable*/
5856 ;
5857#ifdef SUPPORT_FB_DECODING
5858#ifndef FB_DECODING_TEST_SCHEDULE
5859 /*fed_fb_slice_done_int_cpu_enable*/
5860 if (pbi->used_stage_buf_num > 0)
5861 data32 |= (1 << 10);
5862#endif
5863#endif
5864 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
5865
5866 data32 = READ_VREG(HEVC_SHIFT_STATUS);
5867 data32 = data32 |
5868 (0 << 1) |/*emulation_check_off VP9
5869 do not have emulation*/
5870 (1 << 0)/*startcode_check_on*/
5871 ;
5872 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
5873 WRITE_VREG(HEVC_SHIFT_CONTROL,
5874 (0 << 14) | /*disable_start_code_protect*/
5875 (1 << 10) | /*length_zero_startcode_en for VP9*/
5876 (1 << 9) | /*length_valid_startcode_en for VP9*/
5877 (3 << 6) | /*sft_valid_wr_position*/
5878 (2 << 4) | /*emulate_code_length_sub_1*/
5879 (3 << 1) | /*start_code_length_sub_1
5880 VP9 use 0x00000001 as startcode (4 Bytes)*/
5881 (1 << 0) /*stream_shift_enable*/
5882 );
5883
5884 WRITE_VREG(HEVC_CABAC_CONTROL,
5885 (1 << 0)/*cabac_enable*/
5886 );
5887
5888 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
5889 (1 << 0)/* hevc_parser_core_clk_en*/
5890 );
5891
5892
5893 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
5894
5895 }
5896
5897 if (mask & HW_MASK_BACK) {
5898 /*Initial IQIT_SCALELUT memory
5899 -- just to avoid X in simulation*/
5900
5901 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0);/*cfg_p_addr*/
5902 for (i = 0; i < 1024; i++)
5903 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
5904 }
5905
5906 if (mask & HW_MASK_FRONT) {
5907 u32 decode_mode;
5908#ifdef ENABLE_SWAP_TEST
5909 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
5910#else
5911 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 0);
5912#endif
5913#ifdef MULTI_INSTANCE_SUPPORT
5914 if (!pbi->m_ins_flag) {
5915 if (pbi->low_latency_flag)
5916 decode_mode = DECODE_MODE_SINGLE_LOW_LATENCY;
5917 else
5918 decode_mode = DECODE_MODE_SINGLE;
5919 } else if (vdec_frame_based(hw_to_vdec(pbi)))
5920 decode_mode = pbi->no_head ?
5921 DECODE_MODE_MULTI_FRAMEBASE_NOHEAD :
5922 DECODE_MODE_MULTI_FRAMEBASE;
5923 else
5924 decode_mode = DECODE_MODE_MULTI_STREAMBASE;
5925#ifdef SUPPORT_FB_DECODING
5926#ifndef FB_DECODING_TEST_SCHEDULE
5927 if (pbi->used_stage_buf_num > 0)
5928 decode_mode |= (0x01 << 24);
5929#endif
5930#endif
5931 WRITE_VREG(DECODE_MODE, decode_mode);
5932 WRITE_VREG(HEVC_DECODE_SIZE, 0);
5933 WRITE_VREG(HEVC_DECODE_COUNT, 0);
5934#else
5935 WRITE_VREG(DECODE_MODE, DECODE_MODE_SINGLE);
5936 WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);
5937 WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0x7fffffff); /*to remove*/
5938#endif
5939 /*Send parser_cmd*/
5940 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
5941 for (i = 0; i < PARSER_CMD_NUMBER; i++)
5942 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
5943 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
5944 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
5945 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
5946
5947
5948 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
5949 /* (1 << 8) |*/ /*sao_sw_pred_enable*/
5950 (1 << 5) | /*parser_sao_if_en*/
5951 (1 << 2) | /*parser_mpred_if_en*/
5952 (1 << 0) /*parser_scaler_if_en*/
5953 );
5954 }
5955
5956 if (mask & HW_MASK_BACK) {
5957 /*Changed to Start MPRED in microcode*/
5958 /*
5959 pr_info("[test.c] Start MPRED\n");
5960 WRITE_VREG(HEVC_MPRED_INT_STATUS,
5961 (1<<31)
5962 );
5963 */
5964 WRITE_VREG(HEVCD_IPP_TOP_CNTL,
5965 (0 << 1) | /*enable ipp*/
5966 (1 << 0) /*software reset ipp and mpp*/
5967 );
5968 WRITE_VREG(HEVCD_IPP_TOP_CNTL,
5969 (1 << 1) | /*enable ipp*/
5970 (0 << 0) /*software reset ipp and mpp*/
5971 );
5972 if (get_double_write_mode(pbi) & 0x10) {
5973 /*Enable NV21 reference read mode for MC*/
5974 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x1 << 31);
5975 }
5976
5977 /*Initialize mcrcc and decomp perf counters*/
5978 if (mcrcc_cache_alg_flag &&
5979 pbi->init_flag == 0) {
5980 mcrcc_perfcount_reset();
5981 decomp_perfcount_reset();
5982 }
5983 }
5984 return;
5985}
5986
5987
5988#ifdef CONFIG_HEVC_CLK_FORCED_ON
5989static void config_vp9_clk_forced_on(void)
5990{
5991 unsigned int rdata32;
5992 /*IQIT*/
5993 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
5994 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
5995
5996 /* DBLK*/
5997 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
5998 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
5999
6000 /* SAO*/
6001 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
6002 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
6003
6004 /*MPRED*/
6005 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
6006 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
6007
6008 /* PARSER*/
6009 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
6010 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
6011 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
6012 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
6013 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
6014 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
6015 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
6016 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
6017 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
6018 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
6019 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
6020 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
6021 rdata32 | (0x1 << 6) | (0x1 << 3) | (0x1 << 1));
6022
6023 /*IPP*/
6024 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
6025 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
6026
6027 /* MCRCC*/
6028 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
6029 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
6030}
6031#endif
6032
6033
6034#ifdef MCRCC_ENABLE
6035static void dump_hit_rate(struct VP9Decoder_s *pbi)
6036{
6037 if (debug & VP9_DEBUG_CACHE_HIT_RATE) {
6038 mcrcc_get_hitrate(pbi->m_ins_flag);
6039 decomp_get_hitrate();
6040 decomp_get_comprate();
6041 }
6042}
6043
6044static void config_mcrcc_axi_hw(struct VP9Decoder_s *pbi)
6045{
6046 unsigned int rdata32;
6047 unsigned short is_inter;
6048 /*pr_info("Entered config_mcrcc_axi_hw...\n");*/
6049 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);/* reset mcrcc*/
6050 is_inter = ((pbi->common.frame_type != KEY_FRAME) &&
6051 (!pbi->common.intra_only)) ? 1 : 0;
6052 if (!is_inter) { /* I-PIC*/
6053 /*remove reset -- disables clock*/
6054 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
6055 return;
6056 }
6057
6058 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
6059 mcrcc_get_hitrate(pbi->m_ins_flag);
6060 decomp_get_hitrate();
6061 decomp_get_comprate();
6062 }
6063
6064 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
6065 (0 << 8) | (1 << 1) | 0);
6066 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6067 rdata32 = rdata32 & 0xffff;
6068 rdata32 = rdata32 | (rdata32 << 16);
6069 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
6070 /*Programme canvas1 */
6071 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6072 rdata32 = rdata32 & 0xffff;
6073 rdata32 = rdata32 | (rdata32 << 16);
6074 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
6075 /*enable mcrcc progressive-mode*/
6076 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
6077}
6078
6079static void config_mcrcc_axi_hw_new(struct VP9Decoder_s *pbi)
6080{
6081 u32 curr_picnum = -1;
6082 u32 lastref_picnum = -1;
6083 u32 goldenref_picnum = -1;
6084 u32 altref_picnum = -1;
6085
6086 u32 lastref_delta_picnum;
6087 u32 goldenref_delta_picnum;
6088 u32 altref_delta_picnum;
6089
6090 u32 rdata32;
6091
6092 u32 lastcanvas;
6093 u32 goldencanvas;
6094 u32 altrefcanvas;
6095
6096 u16 is_inter;
6097 u16 lastref_inref;
6098 u16 goldenref_inref;
6099 u16 altref_inref;
6100
6101 u32 refcanvas_array[3], utmp;
6102 int deltapicnum_array[3], tmp;
6103
6104 struct VP9_Common_s *cm = &pbi->common;
6105 struct PIC_BUFFER_CONFIG_s *cur_pic_config
6106 = &cm->cur_frame->buf;
6107 curr_picnum = cur_pic_config->decode_idx;
6108 if (cm->frame_refs[0].buf)
6109 lastref_picnum = cm->frame_refs[0].buf->decode_idx;
6110 if (cm->frame_refs[1].buf)
6111 goldenref_picnum = cm->frame_refs[1].buf->decode_idx;
6112 if (cm->frame_refs[2].buf)
6113 altref_picnum = cm->frame_refs[2].buf->decode_idx;
6114
6115 lastref_delta_picnum = (lastref_picnum >= curr_picnum) ?
6116 (lastref_picnum - curr_picnum) : (curr_picnum - lastref_picnum);
6117 goldenref_delta_picnum = (goldenref_picnum >= curr_picnum) ?
6118 (goldenref_picnum - curr_picnum) :
6119 (curr_picnum - goldenref_picnum);
6120 altref_delta_picnum =
6121 (altref_picnum >= curr_picnum) ?
6122 (altref_picnum - curr_picnum) : (curr_picnum - altref_picnum);
6123
6124 lastref_inref = (cm->frame_refs[0].idx != INVALID_IDX) ? 1 : 0;
6125 goldenref_inref = (cm->frame_refs[1].idx != INVALID_IDX) ? 1 : 0;
6126 altref_inref = (cm->frame_refs[2].idx != INVALID_IDX) ? 1 : 0;
6127
6128 if (debug & VP9_DEBUG_CACHE)
6129 pr_info("%s--0--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6130 __func__, lastref_inref, goldenref_inref, altref_inref);
6131
6132 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
6133
6134 is_inter = ((pbi->common.frame_type != KEY_FRAME)
6135 && (!pbi->common.intra_only)) ? 1 : 0;
6136
6137 if (!is_inter) { /* I-PIC */
6138 /* remove reset -- disables clock */
6139 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
6140 return;
6141 }
6142
6143 if (!pbi->m_ins_flag)
6144 dump_hit_rate(pbi);
6145
6146 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (1<<1) | 0);
6147 lastcanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6148 goldencanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6149 altrefcanvas = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6150
6151 if (debug & VP9_DEBUG_CACHE)
6152 pr_info("[test.c] lastref_canv:%x goldenref_canv:%x altref_canv:%x\n",
6153 lastcanvas, goldencanvas, altrefcanvas);
6154
6155 altref_inref = ((altref_inref == 1) &&
6156 (altrefcanvas != (goldenref_inref
6157 ? goldencanvas : 0xffffffff)) &&
6158 (altrefcanvas != (lastref_inref ?
6159 lastcanvas : 0xffffffff))) ? 1 : 0;
6160 goldenref_inref = ((goldenref_inref == 1) &&
6161 (goldencanvas != (lastref_inref ?
6162 lastcanvas : 0xffffffff))) ? 1 : 0;
6163 if (debug & VP9_DEBUG_CACHE)
6164 pr_info("[test.c]--1--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6165 lastref_inref, goldenref_inref, altref_inref);
6166
6167 altref_delta_picnum = altref_inref ? altref_delta_picnum : 0x7fffffff;
6168 goldenref_delta_picnum = goldenref_inref ?
6169 goldenref_delta_picnum : 0x7fffffff;
6170 lastref_delta_picnum = lastref_inref ?
6171 lastref_delta_picnum : 0x7fffffff;
6172 if (debug & VP9_DEBUG_CACHE)
6173 pr_info("[test.c]--1--lastref_delta_picnum:%d goldenref_delta_picnum:%d altref_delta_picnum:%d\n",
6174 lastref_delta_picnum, goldenref_delta_picnum,
6175 altref_delta_picnum);
6176 /*ARRAY SORT HERE DELTA/CANVAS ARRAY SORT -- use DELTA*/
6177
6178 refcanvas_array[0] = lastcanvas;
6179 refcanvas_array[1] = goldencanvas;
6180 refcanvas_array[2] = altrefcanvas;
6181
6182 deltapicnum_array[0] = lastref_delta_picnum;
6183 deltapicnum_array[1] = goldenref_delta_picnum;
6184 deltapicnum_array[2] = altref_delta_picnum;
6185
6186 /* sort0 : 2-to-1 */
6187 if (deltapicnum_array[2] < deltapicnum_array[1]) {
6188 utmp = refcanvas_array[2];
6189 refcanvas_array[2] = refcanvas_array[1];
6190 refcanvas_array[1] = utmp;
6191 tmp = deltapicnum_array[2];
6192 deltapicnum_array[2] = deltapicnum_array[1];
6193 deltapicnum_array[1] = tmp;
6194 }
6195 /* sort1 : 1-to-0 */
6196 if (deltapicnum_array[1] < deltapicnum_array[0]) {
6197 utmp = refcanvas_array[1];
6198 refcanvas_array[1] = refcanvas_array[0];
6199 refcanvas_array[0] = utmp;
6200 tmp = deltapicnum_array[1];
6201 deltapicnum_array[1] = deltapicnum_array[0];
6202 deltapicnum_array[0] = tmp;
6203 }
6204 /* sort2 : 2-to-1 */
6205 if (deltapicnum_array[2] < deltapicnum_array[1]) {
6206 utmp = refcanvas_array[2]; refcanvas_array[2] =
6207 refcanvas_array[1]; refcanvas_array[1] = utmp;
6208 tmp = deltapicnum_array[2]; deltapicnum_array[2] =
6209 deltapicnum_array[1]; deltapicnum_array[1] = tmp;
6210 }
6211 if (mcrcc_cache_alg_flag ==
6212 THODIYIL_MCRCC_CANVAS_ALGX) { /*09/15/2017*/
6213 /* lowest delta_picnum */
6214 rdata32 = refcanvas_array[0];
6215 rdata32 = rdata32 & 0xffff;
6216 rdata32 = rdata32 | (rdata32 << 16);
6217 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
6218
6219 /* 2nd-lowest delta_picnum */
6220 rdata32 = refcanvas_array[1];
6221 rdata32 = rdata32 & 0xffff;
6222 rdata32 = rdata32 | (rdata32 << 16);
6223 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
6224 } else {
6225 /* previous version -- LAST/GOLDEN ALWAYS -- before 09/13/2017*/
6226 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
6227 (0 << 8) | (1<<1) | 0);
6228 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6229 rdata32 = rdata32 & 0xffff;
6230 rdata32 = rdata32 | (rdata32 << 16);
6231 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
6232
6233 /* Programme canvas1 */
6234 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
6235 rdata32 = rdata32 & 0xffff;
6236 rdata32 = rdata32 | (rdata32 << 16);
6237 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
6238 }
6239
6240 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0); /* enable mcrcc progressive-mode */
6241 return;
6242}
6243
6244#endif
6245
6246
6247static void free_lf_buf(struct VP9Decoder_s *pbi)
6248{
6249 if (pbi->lfi)
6250 vfree(pbi->lfi);
6251 if (pbi->lf)
6252 vfree(pbi->lf);
6253 if (pbi->seg_4lf)
6254 vfree(pbi->seg_4lf);
6255 pbi->lfi = NULL;
6256 pbi->lf = NULL;
6257 pbi->seg_4lf = NULL;
6258}
6259
6260static int alloc_lf_buf(struct VP9Decoder_s *pbi)
6261{
6262 pbi->lfi = vmalloc(sizeof(struct loop_filter_info_n));
6263 pbi->lf = vmalloc(sizeof(struct loopfilter));
6264 pbi->seg_4lf = vmalloc(sizeof(struct segmentation));
6265 if (pbi->lfi == NULL || pbi->lf == NULL || pbi->seg_4lf == NULL) {
6266 free_lf_buf(pbi);
6267 pr_err("[test.c] vp9_loop_filter init malloc error!!!\n");
6268 return -1;
6269 }
6270 return 0;
6271}
6272
6273static void vp9_local_uninit(struct VP9Decoder_s *pbi)
6274{
6275 pbi->rpm_ptr = NULL;
6276 pbi->lmem_ptr = NULL;
6277 if (pbi->rpm_addr) {
6278 dma_free_coherent(amports_get_dma_device(),
6279 RPM_BUF_SIZE,
6280 pbi->rpm_addr,
6281 pbi->rpm_phy_addr);
6282 pbi->rpm_addr = NULL;
6283 }
6284 if (pbi->lmem_addr) {
6285 if (pbi->lmem_phy_addr)
6286 dma_free_coherent(amports_get_dma_device(),
6287 LMEM_BUF_SIZE, pbi->lmem_addr,
6288 pbi->lmem_phy_addr);
6289 pbi->lmem_addr = NULL;
6290 }
6291 if (pbi->prob_buffer_addr) {
6292 if (pbi->prob_buffer_phy_addr)
6293 dma_free_coherent(amports_get_dma_device(),
6294 PROB_BUF_SIZE, pbi->prob_buffer_addr,
6295 pbi->prob_buffer_phy_addr);
6296
6297 pbi->prob_buffer_addr = NULL;
6298 }
6299 if (pbi->count_buffer_addr) {
6300 if (pbi->count_buffer_phy_addr)
6301 dma_free_coherent(amports_get_dma_device(),
6302 COUNT_BUF_SIZE, pbi->count_buffer_addr,
6303 pbi->count_buffer_phy_addr);
6304
6305 pbi->count_buffer_addr = NULL;
6306 }
6307 if (pbi->mmu_enable) {
6308 u32 mmu_map_size = vvp9_frame_mmu_map_size(pbi);
6309 if (pbi->frame_mmu_map_addr) {
6310 if (pbi->frame_mmu_map_phy_addr)
6311 dma_free_coherent(amports_get_dma_device(),
6312 mmu_map_size,
6313 pbi->frame_mmu_map_addr,
6314 pbi->frame_mmu_map_phy_addr);
6315 pbi->frame_mmu_map_addr = NULL;
6316 }
6317 }
6318#ifdef SUPPORT_FB_DECODING
6319 if (pbi->stage_mmu_map_addr) {
6320 if (pbi->stage_mmu_map_phy_addr)
6321 dma_free_coherent(amports_get_dma_device(),
6322 STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS,
6323 pbi->stage_mmu_map_addr,
6324 pbi->stage_mmu_map_phy_addr);
6325 pbi->stage_mmu_map_addr = NULL;
6326 }
6327
6328 uninit_stage_buf(pbi);
6329#endif
6330
6331#ifdef VP9_LPF_LVL_UPDATE
6332 free_lf_buf(pbi);
6333#endif
6334 if (pbi->gvs)
6335 vfree(pbi->gvs);
6336 pbi->gvs = NULL;
6337}
6338
6339static int vp9_local_init(struct VP9Decoder_s *pbi)
6340{
6341 int ret = -1;
6342 /*int losless_comp_header_size, losless_comp_body_size;*/
6343
6344 struct BuffInfo_s *cur_buf_info = NULL;
6345
6346 memset(&pbi->param, 0, sizeof(union param_u));
6347 memset(&pbi->common, 0, sizeof(struct VP9_Common_s));
6348#ifdef MULTI_INSTANCE_SUPPORT
6349 cur_buf_info = &pbi->work_space_buf_store;
6350
6351 if (vdec_is_support_4k()) {
6352 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
6353 memcpy(cur_buf_info, &amvvp9_workbuff_spec[2], /* 8k */
6354 sizeof(struct BuffInfo_s));
6355 } else
6356 memcpy(cur_buf_info, &amvvp9_workbuff_spec[1], /* 4k */
6357 sizeof(struct BuffInfo_s));
6358 } else
6359 memcpy(cur_buf_info, &amvvp9_workbuff_spec[0],/* 1080p */
6360 sizeof(struct BuffInfo_s));
6361
6362 cur_buf_info->start_adr = pbi->buf_start;
6363 if (!pbi->mmu_enable)
6364 pbi->mc_buf_spec.buf_end = pbi->buf_start + pbi->buf_size;
6365
6366#else
6367/*! MULTI_INSTANCE_SUPPORT*/
6368 if (vdec_is_support_4k()) {
6369 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
6370 cur_buf_info = &amvvp9_workbuff_spec[2];/* 8k work space */
6371 else
6372 cur_buf_info = &amvvp9_workbuff_spec[1];/* 4k2k work space */
6373 } else
6374 cur_buf_info = &amvvp9_workbuff_spec[0];/* 1080p work space */
6375
6376#endif
6377
6378 init_buff_spec(pbi, cur_buf_info);
6379 vp9_bufmgr_init(pbi, cur_buf_info, NULL);
6380
6381 if (!vdec_is_support_4k()
6382 && (buf_alloc_width > 1920 && buf_alloc_height > 1088)) {
6383 buf_alloc_width = 1920;
6384 buf_alloc_height = 1088;
6385 if (pbi->max_pic_w > 1920 && pbi->max_pic_h > 1088) {
6386 pbi->max_pic_w = 1920;
6387 pbi->max_pic_h = 1088;
6388 }
6389 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
6390 buf_alloc_width = 8192;
6391 buf_alloc_height = 4608;
6392 }
6393 pbi->init_pic_w = pbi->max_pic_w ? pbi->max_pic_w :
6394 (buf_alloc_width ? buf_alloc_width :
6395 (pbi->vvp9_amstream_dec_info.width ?
6396 pbi->vvp9_amstream_dec_info.width :
6397 pbi->work_space_buf->max_width));
6398 pbi->init_pic_h = pbi->max_pic_h ? pbi->max_pic_h :
6399 (buf_alloc_height ? buf_alloc_height :
6400 (pbi->vvp9_amstream_dec_info.height ?
6401 pbi->vvp9_amstream_dec_info.height :
6402 pbi->work_space_buf->max_height));
6403
6404 pbi->mem_map_mode = mem_map_mode ? mem_map_mode : 0;
6405
6406 /* video is not support unaligned with 64 in tl1
6407 ** vdec canvas mode will be linear when dump yuv is set
6408 */
6409 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) &&
6410 (pbi->double_write_mode != 0) &&
6411 (((pbi->max_pic_w % 64) != 0) ||
6412 (pbi->vvp9_amstream_dec_info.width % 64) != 0)) {
6413 if (hw_to_vdec(pbi)->canvas_mode !=
6414 CANVAS_BLKMODE_LINEAR)
6415 pbi->mem_map_mode = 2;
6416 else {
6417 pbi->mem_map_mode = 0;
6418 pr_info("vdec blkmod linear, force mem_map_mode 0\n");
6419 }
6420 }
6421
6422#ifndef MV_USE_FIXED_BUF
6423 if (init_mv_buf_list(pbi) < 0) {
6424 pr_err("%s: init_mv_buf_list fail\n", __func__);
6425 return -1;
6426 }
6427#endif
6428 if (pbi->save_buffer_mode)
6429 pbi->used_buf_num = MAX_BUF_NUM_SAVE_BUF;
6430 else
6431 pbi->used_buf_num = max_buf_num;
6432
6433 if (pbi->used_buf_num > MAX_BUF_NUM)
6434 pbi->used_buf_num = MAX_BUF_NUM;
6435 if (pbi->used_buf_num > FRAME_BUFFERS)
6436 pbi->used_buf_num = FRAME_BUFFERS;
6437
6438 pbi->pts_unstable = ((unsigned long)(pbi->vvp9_amstream_dec_info.param)
6439 & 0x40) >> 6;
6440
6441 if ((debug & VP9_DEBUG_SEND_PARAM_WITH_REG) == 0) {
6442 pbi->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
6443 RPM_BUF_SIZE,
6444 &pbi->rpm_phy_addr, GFP_KERNEL);
6445 if (pbi->rpm_addr == NULL) {
6446 pr_err("%s: failed to alloc rpm buffer\n", __func__);
6447 return -1;
6448 }
6449
6450 pbi->rpm_ptr = pbi->rpm_addr;
6451 }
6452
6453 pbi->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
6454 LMEM_BUF_SIZE,
6455 &pbi->lmem_phy_addr, GFP_KERNEL);
6456 if (pbi->lmem_addr == NULL) {
6457 pr_err("%s: failed to alloc lmem buffer\n", __func__);
6458 return -1;
6459 }
6460 pbi->lmem_ptr = pbi->lmem_addr;
6461
6462 pbi->prob_buffer_addr = dma_alloc_coherent(amports_get_dma_device(),
6463 PROB_BUF_SIZE,
6464 &pbi->prob_buffer_phy_addr, GFP_KERNEL);
6465 if (pbi->prob_buffer_addr == NULL) {
6466 pr_err("%s: failed to alloc prob_buffer\n", __func__);
6467 return -1;
6468 }
6469 memset(pbi->prob_buffer_addr, 0, PROB_BUF_SIZE);
6470 pbi->count_buffer_addr = dma_alloc_coherent(amports_get_dma_device(),
6471 COUNT_BUF_SIZE,
6472 &pbi->count_buffer_phy_addr, GFP_KERNEL);
6473 if (pbi->count_buffer_addr == NULL) {
6474 pr_err("%s: failed to alloc count_buffer\n", __func__);
6475 return -1;
6476 }
6477 memset(pbi->count_buffer_addr, 0, COUNT_BUF_SIZE);
6478
6479 if (pbi->mmu_enable) {
6480 u32 mmu_map_size = vvp9_frame_mmu_map_size(pbi);
6481 pbi->frame_mmu_map_addr =
6482 dma_alloc_coherent(amports_get_dma_device(),
6483 mmu_map_size,
6484 &pbi->frame_mmu_map_phy_addr, GFP_KERNEL);
6485 if (pbi->frame_mmu_map_addr == NULL) {
6486 pr_err("%s: failed to alloc count_buffer\n", __func__);
6487 return -1;
6488 }
6489 memset(pbi->frame_mmu_map_addr, 0, COUNT_BUF_SIZE);
6490 }
6491#ifdef SUPPORT_FB_DECODING
6492 if (pbi->m_ins_flag && stage_buf_num > 0) {
6493 pbi->stage_mmu_map_addr =
6494 dma_alloc_coherent(amports_get_dma_device(),
6495 STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS,
6496 &pbi->stage_mmu_map_phy_addr, GFP_KERNEL);
6497 if (pbi->stage_mmu_map_addr == NULL) {
6498 pr_err("%s: failed to alloc count_buffer\n", __func__);
6499 return -1;
6500 }
6501 memset(pbi->stage_mmu_map_addr,
6502 0, STAGE_MMU_MAP_SIZE * STAGE_MAX_BUFFERS);
6503
6504 init_stage_buf(pbi);
6505 }
6506#endif
6507
6508 ret = 0;
6509 return ret;
6510}
6511
6512/********************************************
6513 * Mailbox command
6514 ********************************************/
6515#define CMD_FINISHED 0
6516#define CMD_ALLOC_VIEW 1
6517#define CMD_FRAME_DISPLAY 3
6518#define CMD_DEBUG 10
6519
6520
6521#define DECODE_BUFFER_NUM_MAX 32
6522#define DISPLAY_BUFFER_NUM 6
6523
6524#define video_domain_addr(adr) (adr&0x7fffffff)
6525#define DECODER_WORK_SPACE_SIZE 0x800000
6526
6527#define spec2canvas(x) \
6528 (((x)->uv_canvas_index << 16) | \
6529 ((x)->uv_canvas_index << 8) | \
6530 ((x)->y_canvas_index << 0))
6531
6532
6533static void set_canvas(struct VP9Decoder_s *pbi,
6534 struct PIC_BUFFER_CONFIG_s *pic_config)
6535{
6536 struct vdec_s *vdec = hw_to_vdec(pbi);
6537 int canvas_w = ALIGN(pic_config->y_crop_width, 64)/4;
6538 int canvas_h = ALIGN(pic_config->y_crop_height, 32)/4;
6539 int blkmode = pbi->mem_map_mode;
6540 /*CANVAS_BLKMODE_64X32*/
6541 if (pic_config->double_write_mode) {
6542 canvas_w = pic_config->y_crop_width /
6543 get_double_write_ratio(pbi,
6544 pic_config->double_write_mode);
6545 canvas_h = pic_config->y_crop_height /
6546 get_double_write_ratio(pbi,
6547 pic_config->double_write_mode);
6548
6549 if (pbi->mem_map_mode == 0)
6550 canvas_w = ALIGN(canvas_w, 32);
6551 else
6552 canvas_w = ALIGN(canvas_w, 64);
6553 canvas_h = ALIGN(canvas_h, 32);
6554
6555 if (vdec->parallel_dec == 1) {
6556 if (pic_config->y_canvas_index == -1)
6557 pic_config->y_canvas_index =
6558 vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
6559 if (pic_config->uv_canvas_index == -1)
6560 pic_config->uv_canvas_index =
6561 vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
6562 } else {
6563 pic_config->y_canvas_index = 128 + pic_config->index * 2;
6564 pic_config->uv_canvas_index = 128 + pic_config->index * 2 + 1;
6565 }
6566
6567 canvas_config_ex(pic_config->y_canvas_index,
6568 pic_config->dw_y_adr, canvas_w, canvas_h,
6569 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
6570 canvas_config_ex(pic_config->uv_canvas_index,
6571 pic_config->dw_u_v_adr, canvas_w, canvas_h,
6572 CANVAS_ADDR_NOWRAP, blkmode, 0x7);
6573
6574#ifdef MULTI_INSTANCE_SUPPORT
6575 pic_config->canvas_config[0].phy_addr =
6576 pic_config->dw_y_adr;
6577 pic_config->canvas_config[0].width =
6578 canvas_w;
6579 pic_config->canvas_config[0].height =
6580 canvas_h;
6581 pic_config->canvas_config[0].block_mode =
6582 blkmode;
6583 pic_config->canvas_config[0].endian = 7;
6584
6585 pic_config->canvas_config[1].phy_addr =
6586 pic_config->dw_u_v_adr;
6587 pic_config->canvas_config[1].width =
6588 canvas_w;
6589 pic_config->canvas_config[1].height =
6590 canvas_h;
6591 pic_config->canvas_config[1].block_mode =
6592 blkmode;
6593 pic_config->canvas_config[1].endian = 7;
6594#endif
6595 }
6596}
6597
6598
6599static void set_frame_info(struct VP9Decoder_s *pbi, struct vframe_s *vf)
6600{
6601 unsigned int ar;
6602 vf->duration = pbi->frame_dur;
6603 vf->duration_pulldown = 0;
6604 vf->flag = 0;
6605 vf->prop.master_display_colour = pbi->vf_dp;
6606 vf->signal_type = pbi->video_signal_type;
6607 if (vf->compWidth && vf->compHeight)
6608 pbi->frame_ar = vf->compHeight * 0x100 / vf->compWidth;
6609 ar = min_t(u32, pbi->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
6610 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
6611
6612}
6613
6614static int vvp9_vf_states(struct vframe_states *states, void *op_arg)
6615{
6616 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg;
6617
6618 states->vf_pool_size = VF_POOL_SIZE;
6619 states->buf_free_num = kfifo_len(&pbi->newframe_q);
6620 states->buf_avail_num = kfifo_len(&pbi->display_q);
6621
6622 if (step == 2)
6623 states->buf_avail_num = 0;
6624 return 0;
6625}
6626
6627static struct vframe_s *vvp9_vf_peek(void *op_arg)
6628{
6629 struct vframe_s *vf[2] = {0, 0};
6630 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg;
6631
6632 if (step == 2)
6633 return NULL;
6634
6635 if (kfifo_out_peek(&pbi->display_q, (void *)&vf, 2)) {
6636 if (vf[1]) {
6637 vf[0]->next_vf_pts_valid = true;
6638 vf[0]->next_vf_pts = vf[1]->pts;
6639 } else
6640 vf[0]->next_vf_pts_valid = false;
6641 return vf[0];
6642 }
6643
6644 return NULL;
6645}
6646
6647static struct vframe_s *vvp9_vf_get(void *op_arg)
6648{
6649 struct vframe_s *vf;
6650 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg;
6651
6652 if (step == 2)
6653 return NULL;
6654 else if (step == 1)
6655 step = 2;
6656
6657 if (kfifo_get(&pbi->display_q, &vf)) {
6658 struct vframe_s *next_vf;
6659 uint8_t index = vf->index & 0xff;
6660 if (index < pbi->used_buf_num ||
6661 (vf->type & VIDTYPE_V4L_EOS)) {
6662 pbi->vf_get_count++;
6663 if (debug & VP9_DEBUG_BUFMGR)
6664 pr_info("%s type 0x%x w/h %d/%d, pts %d, %lld\n",
6665 __func__, vf->type,
6666 vf->width, vf->height,
6667 vf->pts,
6668 vf->pts_us64);
6669
6670 if (kfifo_peek(&pbi->display_q, &next_vf)) {
6671 vf->next_vf_pts_valid = true;
6672 vf->next_vf_pts = next_vf->pts;
6673 } else
6674 vf->next_vf_pts_valid = false;
6675
6676 return vf;
6677 }
6678 }
6679 return NULL;
6680}
6681
6682static void vvp9_vf_put(struct vframe_s *vf, void *op_arg)
6683{
6684 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)op_arg;
6685 uint8_t index = vf->index & 0xff;
6686
6687 kfifo_put(&pbi->newframe_q, (const struct vframe_s *)vf);
6688 pbi->vf_put_count++;
6689 if (index < pbi->used_buf_num) {
6690 struct VP9_Common_s *cm = &pbi->common;
6691 struct BufferPool_s *pool = cm->buffer_pool;
6692 unsigned long flags;
6693
6694 lock_buffer_pool(pool, flags);
6695 if (pool->frame_bufs[index].buf.vf_ref > 0)
6696 pool->frame_bufs[index].buf.vf_ref--;
6697
6698 if (pbi->wait_buf)
6699 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
6700 0x1);
6701 pbi->last_put_idx = index;
6702 pbi->new_frame_displayed++;
6703 unlock_buffer_pool(pool, flags);
6704#ifdef SUPPORT_FB_DECODING
6705 if (pbi->used_stage_buf_num > 0 &&
6706 pbi->back_not_run_ready)
6707 trigger_schedule(pbi);
6708#endif
6709 }
6710
6711}
6712
6713static int vvp9_event_cb(int type, void *data, void *private_data)
6714{
6715 if (type & VFRAME_EVENT_RECEIVER_RESET) {
6716#if 0
6717 unsigned long flags;
6718
6719 amhevc_stop();
6720#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
6721 vf_light_unreg_provider(&vvp9_vf_prov);
6722#endif
6723 spin_lock_irqsave(&pbi->lock, flags);
6724 vvp9_local_init();
6725 vvp9_prot_init();
6726 spin_unlock_irqrestore(&pbi->lock, flags);
6727#ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
6728 vf_reg_provider(&vvp9_vf_prov);
6729#endif
6730 amhevc_start();
6731#endif
6732 }
6733
6734 return 0;
6735}
6736
6737void inc_vf_ref(struct VP9Decoder_s *pbi, int index)
6738{
6739 struct VP9_Common_s *cm = &pbi->common;
6740
6741 cm->buffer_pool->frame_bufs[index].buf.vf_ref++;
6742
6743 if (debug & VP9_DEBUG_BUFMGR_MORE)
6744 pr_info("%s index = %d new vf_ref = %d\r\n",
6745 __func__, index,
6746 cm->buffer_pool->frame_bufs[index].buf.vf_ref);
6747}
6748
6749static int frame_duration_adapt(struct VP9Decoder_s *pbi, struct vframe_s *vf, u32 valid)
6750{
6751 u32 old_duration, pts_duration = 0;
6752 u32 pts = vf->pts;
6753
6754 if (pbi->get_frame_dur == true)
6755 return true;
6756
6757 pbi->frame_cnt_window++;
6758 if (!(pbi->vp9_first_pts_ready == 1)) {
6759 if (valid) {
6760 pbi->pts1 = pts;
6761 pbi->frame_cnt_window = 0;
6762 pbi->duration_from_pts_done = 0;
6763 pbi->vp9_first_pts_ready = 1;
6764 } else {
6765 return false;
6766 }
6767 } else {
6768 if (pts < pbi->pts1) {
6769 if (pbi->frame_cnt_window > FRAME_CNT_WINDOW_SIZE) {
6770 pbi->pts1 = pts;
6771 pbi->frame_cnt_window = 0;
6772 }
6773 }
6774
6775 if (valid && (pbi->frame_cnt_window > FRAME_CNT_WINDOW_SIZE) &&
6776 (pts > pbi->pts1) && (pbi->duration_from_pts_done == 0)) {
6777 old_duration = pbi->frame_dur;
6778 pbi->pts2 = pts;
6779 pts_duration = (((pbi->pts2 - pbi->pts1) * 16) /
6780 (pbi->frame_cnt_window * 15));
6781
6782 if (close_to(pts_duration, old_duration, 2000)) {
6783 pbi->frame_dur = pts_duration;
6784 if ((debug & VP9_DEBUG_OUT_PTS) != 0)
6785 pr_info("use calc duration %d\n", pts_duration);
6786 }
6787
6788 if (pbi->duration_from_pts_done == 0) {
6789 if (close_to(pts_duration, old_duration, RATE_CORRECTION_THRESHOLD)) {
6790 pbi->duration_from_pts_done = 1;
6791 } else {
6792 if (!close_to(pts_duration,
6793 old_duration, 1000) &&
6794 !close_to(pts_duration,
6795 pbi->frame_dur, 1000) &&
6796 close_to(pts_duration,
6797 pbi->last_duration, 200)) {
6798 /* frame_dur must
6799 * wrong,recover it.
6800 */
6801 pbi->frame_dur = pts_duration;
6802 }
6803 pbi->pts1 = pbi->pts2;
6804 pbi->frame_cnt_window = 0;
6805 pbi->duration_from_pts_done = 0;
6806 }
6807 }
6808 pbi->last_duration = pts_duration;
6809 }
6810 }
6811 return true;
6812}
6813
6814static void update_vf_memhandle(struct VP9Decoder_s *pbi,
6815 struct vframe_s *vf, struct PIC_BUFFER_CONFIG_s *pic)
6816{
6817 if (pic->index < 0) {
6818 vf->mem_handle = NULL;
6819 vf->mem_head_handle = NULL;
6820 } else if (vf->type & VIDTYPE_SCATTER) {
6821 vf->mem_handle =
6822 decoder_mmu_box_get_mem_handle(
6823 pbi->mmu_box, pic->index);
6824 vf->mem_head_handle =
6825 decoder_bmmu_box_get_mem_handle(
6826 pbi->bmmu_box,
6827 HEADER_BUFFER_IDX(pic->BUF_index));
6828 } else {
6829 vf->mem_handle =
6830 decoder_bmmu_box_get_mem_handle(
6831 pbi->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
6832 vf->mem_head_handle = NULL;
6833 /*vf->mem_head_handle =
6834 *decoder_bmmu_box_get_mem_handle(
6835 *hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));
6836 */
6837 }
6838}
6839
6840static int prepare_display_buf(struct VP9Decoder_s *pbi,
6841 struct PIC_BUFFER_CONFIG_s *pic_config)
6842{
6843 struct vframe_s *vf = NULL;
6844 int stream_offset = pic_config->stream_offset;
6845 unsigned short slice_type = pic_config->slice_type;
6846 u32 pts_valid = 0, pts_us64_valid = 0;
6847 u32 pts_save;
6848 u64 pts_us64_save;
6849 u32 frame_size;
6850
6851 if (debug & VP9_DEBUG_BUFMGR)
6852 pr_info("%s index = %d\r\n", __func__, pic_config->index);
6853 if (kfifo_get(&pbi->newframe_q, &vf) == 0) {
6854 pr_info("fatal error, no available buffer slot.");
6855 return -1;
6856 }
6857
6858 if (pic_config->double_write_mode)
6859 set_canvas(pbi, pic_config);
6860
6861 display_frame_count[pbi->index]++;
6862 if (vf) {
6863 if (pbi->is_used_v4l) {
6864 vf->v4l_mem_handle
6865 = pbi->m_BUF[pic_config->BUF_index].v4l_ref_buf_addr;
6866 if (pbi->mmu_enable) {
6867 if (vdec_v4l_binding_fd_and_vf(vf->v4l_mem_handle, vf) < 0) {
6868 vp9_print(pbi, PRINT_FLAG_V4L_DETAIL,
6869 "v4l: binding vf fail.\n");
6870 return -1;
6871 }
6872 }
6873 vp9_print(pbi, PRINT_FLAG_V4L_DETAIL,
6874 "[%d] %s(), v4l mem handle: 0x%lx\n",
6875 ((struct aml_vcodec_ctx *)(pbi->v4l2_ctx))->id,
6876 __func__, vf->v4l_mem_handle);
6877 }
6878
6879#ifdef MULTI_INSTANCE_SUPPORT
6880 if (vdec_frame_based(hw_to_vdec(pbi))) {
6881 vf->pts = pic_config->pts;
6882 vf->pts_us64 = pic_config->pts64;
6883 vf->timestamp = pic_config->timestamp;
6884 if (vf->pts != 0 || vf->pts_us64 != 0) {
6885 pts_valid = 1;
6886 pts_us64_valid = 1;
6887 } else {
6888 pts_valid = 0;
6889 pts_us64_valid = 0;
6890 }
6891 } else
6892#endif
6893 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
6894 * stream_offset, &vf->pts, 0) != 0) {
6895 */
6896 if (pts_lookup_offset_us64
6897 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
6898 &frame_size, 0,
6899 &vf->pts_us64) != 0) {
6900#ifdef DEBUG_PTS
6901 pbi->pts_missed++;
6902#endif
6903 vf->pts = 0;
6904 vf->pts_us64 = 0;
6905 pts_valid = 0;
6906 pts_us64_valid = 0;
6907 } else {
6908#ifdef DEBUG_PTS
6909 pbi->pts_hit++;
6910#endif
6911 pts_valid = 1;
6912 pts_us64_valid = 1;
6913 }
6914
6915 fill_frame_info(pbi, pic_config, frame_size, vf->pts);
6916
6917 pts_save = vf->pts;
6918 pts_us64_save = vf->pts_us64;
6919 if (pbi->pts_unstable) {
6920 frame_duration_adapt(pbi, vf, pts_valid);
6921 if (pbi->duration_from_pts_done) {
6922 pbi->pts_mode = PTS_NONE_REF_USE_DURATION;
6923 } else {
6924 if (pts_valid || pts_us64_valid)
6925 pbi->pts_mode = PTS_NORMAL;
6926 }
6927 }
6928
6929 if ((pbi->pts_mode == PTS_NORMAL) && (vf->pts != 0)
6930 && pbi->get_frame_dur) {
6931 int pts_diff = (int)vf->pts - pbi->last_lookup_pts;
6932
6933 if (pts_diff < 0) {
6934 pbi->pts_mode_switching_count++;
6935 pbi->pts_mode_recovery_count = 0;
6936
6937 if (pbi->pts_mode_switching_count >=
6938 PTS_MODE_SWITCHING_THRESHOLD) {
6939 pbi->pts_mode =
6940 PTS_NONE_REF_USE_DURATION;
6941 pr_info
6942 ("HEVC: switch to n_d mode.\n");
6943 }
6944
6945 } else {
6946 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
6947
6948 pbi->pts_mode_recovery_count++;
6949 if (pbi->pts_mode_recovery_count > p) {
6950 pbi->pts_mode_switching_count = 0;
6951 pbi->pts_mode_recovery_count = 0;
6952 }
6953 }
6954 }
6955
6956 if (vf->pts != 0)
6957 pbi->last_lookup_pts = vf->pts;
6958
6959 if ((pbi->pts_mode == PTS_NONE_REF_USE_DURATION)
6960 && (slice_type != KEY_FRAME))
6961 vf->pts = pbi->last_pts + DUR2PTS(pbi->frame_dur);
6962 pbi->last_pts = vf->pts;
6963
6964 if (vf->pts_us64 != 0)
6965 pbi->last_lookup_pts_us64 = vf->pts_us64;
6966
6967 if ((pbi->pts_mode == PTS_NONE_REF_USE_DURATION)
6968 && (slice_type != KEY_FRAME)) {
6969 vf->pts_us64 =
6970 pbi->last_pts_us64 +
6971 (DUR2PTS(pbi->frame_dur) * 100 / 9);
6972 }
6973 pbi->last_pts_us64 = vf->pts_us64;
6974 if ((debug & VP9_DEBUG_OUT_PTS) != 0) {
6975 pr_info
6976 ("VP9 dec out pts: pts_mode=%d,dur=%d,pts(%d,%lld)(%d,%lld)\n",
6977 pbi->pts_mode, pbi->frame_dur, vf->pts,
6978 vf->pts_us64, pts_save, pts_us64_save);
6979 }
6980
6981 if (pbi->pts_mode == PTS_NONE_REF_USE_DURATION) {
6982 vf->disp_pts = vf->pts;
6983 vf->disp_pts_us64 = vf->pts_us64;
6984 vf->pts = pts_save;
6985 vf->pts_us64 = pts_us64_save;
6986 } else {
6987 vf->disp_pts = 0;
6988 vf->disp_pts_us64 = 0;
6989 }
6990
6991 vf->index = 0xff00 | pic_config->index;
6992
6993 if (pic_config->double_write_mode & 0x10) {
6994 /* double write only */
6995 vf->compBodyAddr = 0;
6996 vf->compHeadAddr = 0;
6997 } else {
6998 if (pbi->mmu_enable) {
6999 vf->compBodyAddr = 0;
7000 vf->compHeadAddr = pic_config->header_adr;
7001 } else {
7002 /*vf->compBodyAddr = pic_config->mc_y_adr;
7003 *vf->compHeadAddr = pic_config->mc_y_adr +
7004 *pic_config->comp_body_size; */
7005 /*head adr*/
7006 }
7007 vf->canvas0Addr = vf->canvas1Addr = 0;
7008 }
7009 if (pic_config->double_write_mode) {
7010 vf->type = VIDTYPE_PROGRESSIVE |
7011 VIDTYPE_VIU_FIELD;
7012 vf->type |= VIDTYPE_VIU_NV21;
7013 if ((pic_config->double_write_mode == 3) &&
7014 (!IS_8K_SIZE(pic_config->y_crop_width,
7015 pic_config->y_crop_height))) {
7016 vf->type |= VIDTYPE_COMPRESS;
7017 if (pbi->mmu_enable)
7018 vf->type |= VIDTYPE_SCATTER;
7019 }
7020#ifdef MULTI_INSTANCE_SUPPORT
7021 if (pbi->m_ins_flag) {
7022 vf->canvas0Addr = vf->canvas1Addr = -1;
7023 vf->plane_num = 2;
7024 vf->canvas0_config[0] =
7025 pic_config->canvas_config[0];
7026 vf->canvas0_config[1] =
7027 pic_config->canvas_config[1];
7028 vf->canvas1_config[0] =
7029 pic_config->canvas_config[0];
7030 vf->canvas1_config[1] =
7031 pic_config->canvas_config[1];
7032
7033 } else
7034#endif
7035 vf->canvas0Addr = vf->canvas1Addr =
7036 spec2canvas(pic_config);
7037 } else {
7038 vf->canvas0Addr = vf->canvas1Addr = 0;
7039 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
7040 if (pbi->mmu_enable)
7041 vf->type |= VIDTYPE_SCATTER;
7042 }
7043
7044 switch (pic_config->bit_depth) {
7045 case VPX_BITS_8:
7046 vf->bitdepth = BITDEPTH_Y8 |
7047 BITDEPTH_U8 | BITDEPTH_V8;
7048 break;
7049 case VPX_BITS_10:
7050 case VPX_BITS_12:
7051 vf->bitdepth = BITDEPTH_Y10 |
7052 BITDEPTH_U10 | BITDEPTH_V10;
7053 break;
7054 default:
7055 vf->bitdepth = BITDEPTH_Y10 |
7056 BITDEPTH_U10 | BITDEPTH_V10;
7057 break;
7058 }
7059 if ((vf->type & VIDTYPE_COMPRESS) == 0)
7060 vf->bitdepth =
7061 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
7062 if (pic_config->bit_depth == VPX_BITS_8)
7063 vf->bitdepth |= BITDEPTH_SAVING_MODE;
7064
7065 /* if((vf->width!=pic_config->width)|
7066 * (vf->height!=pic_config->height))
7067 */
7068 /* pr_info("aaa: %d/%d, %d/%d\n",
7069 vf->width,vf->height, pic_config->width,
7070 pic_config->height); */
7071 vf->width = pic_config->y_crop_width /
7072 get_double_write_ratio(pbi,
7073 pic_config->double_write_mode);
7074 vf->height = pic_config->y_crop_height /
7075 get_double_write_ratio(pbi,
7076 pic_config->double_write_mode);
7077 if (force_w_h != 0) {
7078 vf->width = (force_w_h >> 16) & 0xffff;
7079 vf->height = force_w_h & 0xffff;
7080 }
7081 vf->compWidth = pic_config->y_crop_width;
7082 vf->compHeight = pic_config->y_crop_height;
7083 set_frame_info(pbi, vf);
7084 if (force_fps & 0x100) {
7085 u32 rate = force_fps & 0xff;
7086
7087 if (rate)
7088 vf->duration = 96000/rate;
7089 else
7090 vf->duration = 0;
7091 }
7092 update_vf_memhandle(pbi, vf, pic_config);
7093 if (!(pic_config->y_crop_width == 196
7094 && pic_config->y_crop_height == 196
7095 && (debug & VP9_DEBUG_NO_TRIGGER_FRAME) == 0
7096 && (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX))) {
7097 inc_vf_ref(pbi, pic_config->index);
7098 decoder_do_frame_check(hw_to_vdec(pbi), vf);
7099 kfifo_put(&pbi->display_q, (const struct vframe_s *)vf);
7100 ATRACE_COUNTER(MODULE_NAME, vf->pts);
7101 pbi->vf_pre_count++;
7102#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7103 /*count info*/
7104 gvs->frame_dur = pbi->frame_dur;
7105 vdec_count_info(gvs, 0, stream_offset);
7106#endif
7107 hw_to_vdec(pbi)->vdec_fps_detec(hw_to_vdec(pbi)->id);
7108 if (without_display_mode == 0) {
7109 vf_notify_receiver(pbi->provider_name,
7110 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
7111 } else
7112 vvp9_vf_put(vvp9_vf_get(pbi), pbi);
7113 } else {
7114 pbi->stat |= VP9_TRIGGER_FRAME_DONE;
7115 hevc_source_changed(VFORMAT_VP9, 196, 196, 30);
7116 pr_debug("[%s %d] drop trigger frame width %d height %d state 0x%x\n",
7117 __func__, __LINE__, vf->width,
7118 vf->height, pbi->stat);
7119 }
7120 }
7121
7122 return 0;
7123}
7124
7125static int notify_v4l_eos(struct vdec_s *vdec)
7126{
7127 struct VP9Decoder_s *hw = (struct VP9Decoder_s *)vdec->private;
7128 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
7129 struct vframe_s *vf = NULL;
7130 struct vdec_v4l2_buffer *fb = NULL;
7131
7132 if (hw->is_used_v4l && hw->eos) {
7133 if (kfifo_get(&hw->newframe_q, &vf) == 0 || vf == NULL) {
7134 vp9_print(hw, 0,
7135 "%s fatal error, no available buffer slot.\n",
7136 __func__);
7137 return -1;
7138 }
7139
7140 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb)) {
7141 pr_err("[%d] get fb fail.\n", ctx->id);
7142 return -1;
7143 }
7144
7145 vf->type |= VIDTYPE_V4L_EOS;
7146 vf->timestamp = ULONG_MAX;
7147 vf->v4l_mem_handle = (unsigned long)fb;
7148 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
7149
7150 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
7151 vf_notify_receiver(vdec->vf_provider_name,
7152 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
7153
7154 pr_info("[%d] VP9 EOS notify.\n", ctx->id);
7155 }
7156
7157 return 0;
7158}
7159
7160static void get_rpm_param(union param_u *params)
7161{
7162 int i;
7163 unsigned int data32;
7164
7165 if (debug & VP9_DEBUG_BUFMGR)
7166 pr_info("enter %s\r\n", __func__);
7167 for (i = 0; i < 128; i++) {
7168 do {
7169 data32 = READ_VREG(RPM_CMD_REG);
7170 /*pr_info("%x\n", data32);*/
7171 } while ((data32 & 0x10000) == 0);
7172 params->l.data[i] = data32&0xffff;
7173 /*pr_info("%x\n", data32);*/
7174 WRITE_VREG(RPM_CMD_REG, 0);
7175 }
7176 if (debug & VP9_DEBUG_BUFMGR)
7177 pr_info("leave %s\r\n", __func__);
7178}
7179static void debug_buffer_mgr_more(struct VP9Decoder_s *pbi)
7180{
7181 int i;
7182
7183 if (!(debug & VP9_DEBUG_BUFMGR_MORE))
7184 return;
7185 pr_info("vp9_param: (%d)\n", pbi->slice_idx);
7186 for (i = 0; i < (RPM_END-RPM_BEGIN); i++) {
7187 pr_info("%04x ", vp9_param.l.data[i]);
7188 if (((i + 1) & 0xf) == 0)
7189 pr_info("\n");
7190 }
7191 pr_info("=============param==========\r\n");
7192 pr_info("profile %x\r\n", vp9_param.p.profile);
7193 pr_info("show_existing_frame %x\r\n",
7194 vp9_param.p.show_existing_frame);
7195 pr_info("frame_to_show_idx %x\r\n",
7196 vp9_param.p.frame_to_show_idx);
7197 pr_info("frame_type %x\r\n", vp9_param.p.frame_type);
7198 pr_info("show_frame %x\r\n", vp9_param.p.show_frame);
7199 pr_info("e.r.r.o.r_resilient_mode %x\r\n",
7200 vp9_param.p.error_resilient_mode);
7201 pr_info("intra_only %x\r\n", vp9_param.p.intra_only);
7202 pr_info("display_size_present %x\r\n",
7203 vp9_param.p.display_size_present);
7204 pr_info("reset_frame_context %x\r\n",
7205 vp9_param.p.reset_frame_context);
7206 pr_info("refresh_frame_flags %x\r\n",
7207 vp9_param.p.refresh_frame_flags);
7208 pr_info("bit_depth %x\r\n", vp9_param.p.bit_depth);
7209 pr_info("width %x\r\n", vp9_param.p.width);
7210 pr_info("height %x\r\n", vp9_param.p.height);
7211 pr_info("display_width %x\r\n", vp9_param.p.display_width);
7212 pr_info("display_height %x\r\n", vp9_param.p.display_height);
7213 pr_info("ref_info %x\r\n", vp9_param.p.ref_info);
7214 pr_info("same_frame_size %x\r\n", vp9_param.p.same_frame_size);
7215 if (!(debug & VP9_DEBUG_DBG_LF_PRINT))
7216 return;
7217 pr_info("mode_ref_delta_enabled: 0x%x\r\n",
7218 vp9_param.p.mode_ref_delta_enabled);
7219 pr_info("sharpness_level: 0x%x\r\n",
7220 vp9_param.p.sharpness_level);
7221 pr_info("ref_deltas: 0x%x, 0x%x, 0x%x, 0x%x\r\n",
7222 vp9_param.p.ref_deltas[0], vp9_param.p.ref_deltas[1],
7223 vp9_param.p.ref_deltas[2], vp9_param.p.ref_deltas[3]);
7224 pr_info("mode_deltas: 0x%x, 0x%x\r\n", vp9_param.p.mode_deltas[0],
7225 vp9_param.p.mode_deltas[1]);
7226 pr_info("filter_level: 0x%x\r\n", vp9_param.p.filter_level);
7227 pr_info("seg_enabled: 0x%x\r\n", vp9_param.p.seg_enabled);
7228 pr_info("seg_abs_delta: 0x%x\r\n", vp9_param.p.seg_abs_delta);
7229 pr_info("seg_lf_feature_enabled: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7230 (vp9_param.p.seg_lf_info[0]>>15 & 1),
7231 (vp9_param.p.seg_lf_info[1]>>15 & 1),
7232 (vp9_param.p.seg_lf_info[2]>>15 & 1),
7233 (vp9_param.p.seg_lf_info[3]>>15 & 1),
7234 (vp9_param.p.seg_lf_info[4]>>15 & 1),
7235 (vp9_param.p.seg_lf_info[5]>>15 & 1),
7236 (vp9_param.p.seg_lf_info[6]>>15 & 1),
7237 (vp9_param.p.seg_lf_info[7]>>15 & 1));
7238 pr_info("seg_lf_feature_data: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7239 (vp9_param.p.seg_lf_info[0] & 0x13f),
7240 (vp9_param.p.seg_lf_info[1] & 0x13f),
7241 (vp9_param.p.seg_lf_info[2] & 0x13f),
7242 (vp9_param.p.seg_lf_info[3] & 0x13f),
7243 (vp9_param.p.seg_lf_info[4] & 0x13f),
7244 (vp9_param.p.seg_lf_info[5] & 0x13f),
7245 (vp9_param.p.seg_lf_info[6] & 0x13f),
7246 (vp9_param.p.seg_lf_info[7] & 0x13f));
7247
7248}
7249
7250
7251static void vp9_recycle_mmu_buf_tail(struct VP9Decoder_s *pbi)
7252{
7253 struct VP9_Common_s *const cm = &pbi->common;
7254 if (pbi->double_write_mode & 0x10)
7255 return;
7256 if (cm->cur_fb_idx_mmu != INVALID_IDX) {
7257 if (pbi->used_4k_num == -1) {
7258 pbi->used_4k_num =
7259 (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16);
7260 if (pbi->m_ins_flag)
7261 hevc_mmu_dma_check(hw_to_vdec(pbi));
7262 }
7263 decoder_mmu_box_free_idx_tail(pbi->mmu_box,
7264 cm->cur_fb_idx_mmu, pbi->used_4k_num);
7265 cm->cur_fb_idx_mmu = INVALID_IDX;
7266 pbi->used_4k_num = -1;
7267 }
7268}
7269
7270#ifdef MULTI_INSTANCE_SUPPORT
7271static void vp9_recycle_mmu_buf(struct VP9Decoder_s *pbi)
7272{
7273 struct VP9_Common_s *const cm = &pbi->common;
7274 if (pbi->double_write_mode & 0x10)
7275 return;
7276 if (cm->cur_fb_idx_mmu != INVALID_IDX) {
7277 decoder_mmu_box_free_idx(pbi->mmu_box,
7278 cm->cur_fb_idx_mmu);
7279
7280 cm->cur_fb_idx_mmu = INVALID_IDX;
7281 pbi->used_4k_num = -1;
7282 }
7283}
7284#endif
7285
7286
7287static void dec_again_process(struct VP9Decoder_s *pbi)
7288{
7289 amhevc_stop();
7290 pbi->dec_result = DEC_RESULT_AGAIN;
7291 if (pbi->process_state ==
7292 PROC_STATE_DECODESLICE) {
7293 pbi->process_state =
7294 PROC_STATE_SENDAGAIN;
7295 if (pbi->mmu_enable)
7296 vp9_recycle_mmu_buf(pbi);
7297 }
7298 reset_process_time(pbi);
7299 vdec_schedule_work(&pbi->work);
7300}
7301
7302int continue_decoding(struct VP9Decoder_s *pbi)
7303{
7304 int ret;
7305 int i;
7306 struct VP9_Common_s *const cm = &pbi->common;
7307 debug_buffer_mgr_more(pbi);
7308
7309 bit_depth_luma = vp9_param.p.bit_depth;
7310 bit_depth_chroma = vp9_param.p.bit_depth;
7311
7312 if ((vp9_param.p.bit_depth >= VPX_BITS_10) &&
7313 (get_double_write_mode(pbi) == 0x10)) {
7314 pbi->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
7315 pr_err("fatal err, bit_depth %d, unsupport dw 0x10\n",
7316 vp9_param.p.bit_depth);
7317 return -1;
7318 }
7319
7320 if (pbi->process_state != PROC_STATE_SENDAGAIN) {
7321 ret = vp9_bufmgr_process(pbi, &vp9_param);
7322 if (!pbi->m_ins_flag)
7323 pbi->slice_idx++;
7324 } else {
7325 union param_u *params = &vp9_param;
7326 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
7327 ret = vp9_alloc_mmu(pbi,
7328 cm->new_fb_idx,
7329 params->p.width,
7330 params->p.height,
7331 params->p.bit_depth,
7332 pbi->frame_mmu_map_addr);
7333 if (ret >= 0)
7334 cm->cur_fb_idx_mmu = cm->new_fb_idx;
7335 else
7336 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
7337 cm->new_fb_idx,
7338 ret);
7339 } else {
7340 ret = 0;
7341 }
7342 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE,
7343 (params->p.height << 16) | params->p.width);
7344 }
7345 if (ret < 0) {
7346 pr_info("vp9_bufmgr_process=> %d, VP9_10B_DISCARD_NAL\r\n",
7347 ret);
7348 WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DISCARD_NAL);
7349 cm->show_frame = 0;
7350 if (pbi->mmu_enable)
7351 vp9_recycle_mmu_buf(pbi);
7352#ifdef MULTI_INSTANCE_SUPPORT
7353 if (pbi->m_ins_flag) {
7354 pbi->dec_result = DEC_RESULT_DONE;
7355#ifdef SUPPORT_FB_DECODING
7356 if (pbi->used_stage_buf_num == 0)
7357#endif
7358 amhevc_stop();
7359 vdec_schedule_work(&pbi->work);
7360 }
7361#endif
7362 return ret;
7363 } else if (ret == 0) {
7364 struct PIC_BUFFER_CONFIG_s *cur_pic_config
7365 = &cm->cur_frame->buf;
7366 cur_pic_config->decode_idx = pbi->frame_count;
7367
7368 if (pbi->process_state != PROC_STATE_SENDAGAIN) {
7369 if (!pbi->m_ins_flag) {
7370 pbi->frame_count++;
7371 decode_frame_count[pbi->index]
7372 = pbi->frame_count;
7373 }
7374#ifdef MULTI_INSTANCE_SUPPORT
7375 if (pbi->chunk) {
7376 cur_pic_config->pts = pbi->chunk->pts;
7377 cur_pic_config->pts64 = pbi->chunk->pts64;
7378 cur_pic_config->timestamp = pbi->chunk->timestamp;
7379 }
7380#endif
7381 }
7382 /*pr_info("Decode Frame Data %d\n", pbi->frame_count);*/
7383 config_pic_size(pbi, vp9_param.p.bit_depth);
7384
7385 if ((pbi->common.frame_type != KEY_FRAME)
7386 && (!pbi->common.intra_only)) {
7387 config_mc_buffer(pbi, vp9_param.p.bit_depth);
7388#ifdef SUPPORT_FB_DECODING
7389 if (pbi->used_stage_buf_num == 0)
7390#endif
7391 config_mpred_hw(pbi);
7392 } else {
7393#ifdef SUPPORT_FB_DECODING
7394 if (pbi->used_stage_buf_num == 0)
7395#endif
7396 clear_mpred_hw(pbi);
7397 }
7398#ifdef MCRCC_ENABLE
7399 if (mcrcc_cache_alg_flag)
7400 config_mcrcc_axi_hw_new(pbi);
7401 else
7402 config_mcrcc_axi_hw(pbi);
7403#endif
7404 config_sao_hw(pbi, &vp9_param);
7405
7406#ifdef VP9_LPF_LVL_UPDATE
7407 /*
7408 * Get loop filter related picture level parameters from Parser
7409 */
7410 pbi->lf->mode_ref_delta_enabled = vp9_param.p.mode_ref_delta_enabled;
7411 pbi->lf->sharpness_level = vp9_param.p.sharpness_level;
7412 for (i = 0; i < 4; i++)
7413 pbi->lf->ref_deltas[i] = vp9_param.p.ref_deltas[i];
7414 for (i = 0; i < 2; i++)
7415 pbi->lf->mode_deltas[i] = vp9_param.p.mode_deltas[i];
7416 pbi->default_filt_lvl = vp9_param.p.filter_level;
7417 pbi->seg_4lf->enabled = vp9_param.p.seg_enabled;
7418 pbi->seg_4lf->abs_delta = vp9_param.p.seg_abs_delta;
7419 for (i = 0; i < MAX_SEGMENTS; i++)
7420 pbi->seg_4lf->feature_mask[i] = (vp9_param.p.seg_lf_info[i] &
7421 0x8000) ? (1 << SEG_LVL_ALT_LF) : 0;
7422 for (i = 0; i < MAX_SEGMENTS; i++)
7423 pbi->seg_4lf->feature_data[i][SEG_LVL_ALT_LF]
7424 = (vp9_param.p.seg_lf_info[i]
7425 & 0x100) ? -(vp9_param.p.seg_lf_info[i]
7426 & 0x3f) : (vp9_param.p.seg_lf_info[i] & 0x3f);
7427 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
7428 /*Set pipeline mode*/
7429 uint32_t lpf_data32 = READ_VREG(HEVC_DBLK_CFGB);
7430 /*dblk pipeline mode=1 for performance*/
7431 if (vp9_param.p.width >= 1280)
7432 lpf_data32 |= (0x1 << 4);
7433 else
7434 lpf_data32 &= ~(0x3 << 4);
7435 WRITE_VREG(HEVC_DBLK_CFGB, lpf_data32);
7436 }
7437 /*
7438 * Update loop filter Thr/Lvl table for every frame
7439 */
7440 /*pr_info
7441 ("vp9_loop_filter (run before every frame decoding start)\n");*/
7442 vp9_loop_filter_frame_init(pbi->seg_4lf,
7443 pbi->lfi, pbi->lf, pbi->default_filt_lvl);
7444#endif
7445 /*pr_info("HEVC_DEC_STATUS_REG <= VP9_10B_DECODE_SLICE\n");*/
7446 WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DECODE_SLICE);
7447 } else {
7448 pr_info("Skip search next start code\n");
7449 cm->prev_fb_idx = INVALID_IDX;
7450 /*skip, search next start code*/
7451 WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DECODE_SLICE);
7452 }
7453 pbi->process_state = PROC_STATE_DECODESLICE;
7454 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
7455 if (pbi->last_put_idx < pbi->used_buf_num) {
7456 struct RefCntBuffer_s *frame_bufs =
7457 cm->buffer_pool->frame_bufs;
7458 int i = pbi->last_put_idx;
7459 /*free not used buffers.*/
7460 if ((frame_bufs[i].ref_count == 0) &&
7461 (frame_bufs[i].buf.vf_ref == 0) &&
7462 (frame_bufs[i].buf.index != -1)) {
7463 decoder_mmu_box_free_idx(pbi->mmu_box, i);
7464 }
7465 pbi->last_put_idx = -1;
7466 }
7467 }
7468 return ret;
7469}
7470
7471static void fill_frame_info(struct VP9Decoder_s *pbi,
7472 struct PIC_BUFFER_CONFIG_s *frame,
7473 unsigned int framesize,
7474 unsigned int pts)
7475{
7476 struct vframe_qos_s *vframe_qos = &pbi->vframe_qos;
7477
7478 if (frame->slice_type == KEY_FRAME)
7479 vframe_qos->type = 1;
7480 else if (frame->slice_type == INTER_FRAME)
7481 vframe_qos->type = 2;
7482/*
7483#define SHOW_QOS_INFO
7484*/
7485 vframe_qos->size = framesize;
7486 vframe_qos->pts = pts;
7487#ifdef SHOW_QOS_INFO
7488 vp9_print(pbi, 0, "slice:%d\n", frame->slice_type);
7489#endif
7490 vframe_qos->max_mv = frame->max_mv;
7491 vframe_qos->avg_mv = frame->avg_mv;
7492 vframe_qos->min_mv = frame->min_mv;
7493#ifdef SHOW_QOS_INFO
7494 vp9_print(pbi, 0, "mv: max:%d, avg:%d, min:%d\n",
7495 vframe_qos->max_mv,
7496 vframe_qos->avg_mv,
7497 vframe_qos->min_mv);
7498#endif
7499 vframe_qos->max_qp = frame->max_qp;
7500 vframe_qos->avg_qp = frame->avg_qp;
7501 vframe_qos->min_qp = frame->min_qp;
7502#ifdef SHOW_QOS_INFO
7503 vp9_print(pbi, 0, "qp: max:%d, avg:%d, min:%d\n",
7504 vframe_qos->max_qp,
7505 vframe_qos->avg_qp,
7506 vframe_qos->min_qp);
7507#endif
7508 vframe_qos->max_skip = frame->max_skip;
7509 vframe_qos->avg_skip = frame->avg_skip;
7510 vframe_qos->min_skip = frame->min_skip;
7511#ifdef SHOW_QOS_INFO
7512 vp9_print(pbi, 0, "skip: max:%d, avg:%d, min:%d\n",
7513 vframe_qos->max_skip,
7514 vframe_qos->avg_skip,
7515 vframe_qos->min_skip);
7516#endif
7517 vframe_qos->num++;
7518
7519 if (pbi->frameinfo_enable)
7520 vdec_fill_frame_info(vframe_qos, 1);
7521}
7522
7523/* only when we decoded one field or one frame,
7524we can call this function to get qos info*/
7525static void get_picture_qos_info(struct VP9Decoder_s *pbi)
7526{
7527 struct PIC_BUFFER_CONFIG_s *frame = &pbi->cur_buf->buf;
7528
7529 if (!frame)
7530 return;
7531
7532 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
7533 unsigned char a[3];
7534 unsigned char i, j, t;
7535 unsigned long data;
7536
7537 data = READ_VREG(HEVC_MV_INFO);
7538 if (frame->slice_type == KEY_FRAME)
7539 data = 0;
7540 a[0] = data & 0xff;
7541 a[1] = (data >> 8) & 0xff;
7542 a[2] = (data >> 16) & 0xff;
7543
7544 for (i = 0; i < 3; i++) {
7545 for (j = i+1; j < 3; j++) {
7546 if (a[j] < a[i]) {
7547 t = a[j];
7548 a[j] = a[i];
7549 a[i] = t;
7550 } else if (a[j] == a[i]) {
7551 a[i]++;
7552 t = a[j];
7553 a[j] = a[i];
7554 a[i] = t;
7555 }
7556 }
7557 }
7558 frame->max_mv = a[2];
7559 frame->avg_mv = a[1];
7560 frame->min_mv = a[0];
7561
7562 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7563 "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
7564 data, a[0], a[1], a[2]);
7565
7566 data = READ_VREG(HEVC_QP_INFO);
7567 a[0] = data & 0x1f;
7568 a[1] = (data >> 8) & 0x3f;
7569 a[2] = (data >> 16) & 0x7f;
7570
7571 for (i = 0; i < 3; i++) {
7572 for (j = i+1; j < 3; j++) {
7573 if (a[j] < a[i]) {
7574 t = a[j];
7575 a[j] = a[i];
7576 a[i] = t;
7577 } else if (a[j] == a[i]) {
7578 a[i]++;
7579 t = a[j];
7580 a[j] = a[i];
7581 a[i] = t;
7582 }
7583 }
7584 }
7585 frame->max_qp = a[2];
7586 frame->avg_qp = a[1];
7587 frame->min_qp = a[0];
7588
7589 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7590 "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
7591 data, a[0], a[1], a[2]);
7592
7593 data = READ_VREG(HEVC_SKIP_INFO);
7594 a[0] = data & 0x1f;
7595 a[1] = (data >> 8) & 0x3f;
7596 a[2] = (data >> 16) & 0x7f;
7597
7598 for (i = 0; i < 3; i++) {
7599 for (j = i+1; j < 3; j++) {
7600 if (a[j] < a[i]) {
7601 t = a[j];
7602 a[j] = a[i];
7603 a[i] = t;
7604 } else if (a[j] == a[i]) {
7605 a[i]++;
7606 t = a[j];
7607 a[j] = a[i];
7608 a[i] = t;
7609 }
7610 }
7611 }
7612 frame->max_skip = a[2];
7613 frame->avg_skip = a[1];
7614 frame->min_skip = a[0];
7615
7616 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7617 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
7618 data, a[0], a[1], a[2]);
7619 } else {
7620 uint32_t blk88_y_count;
7621 uint32_t blk88_c_count;
7622 uint32_t blk22_mv_count;
7623 uint32_t rdata32;
7624 int32_t mv_hi;
7625 int32_t mv_lo;
7626 uint32_t rdata32_l;
7627 uint32_t mvx_L0_hi;
7628 uint32_t mvy_L0_hi;
7629 uint32_t mvx_L1_hi;
7630 uint32_t mvy_L1_hi;
7631 int64_t value;
7632 uint64_t temp_value;
7633 int pic_number = frame->decode_idx;
7634
7635 frame->max_mv = 0;
7636 frame->avg_mv = 0;
7637 frame->min_mv = 0;
7638
7639 frame->max_skip = 0;
7640 frame->avg_skip = 0;
7641 frame->min_skip = 0;
7642
7643 frame->max_qp = 0;
7644 frame->avg_qp = 0;
7645 frame->min_qp = 0;
7646
7647 vp9_print(pbi, VP9_DEBUG_QOS_INFO, "slice_type:%d, poc:%d\n",
7648 frame->slice_type,
7649 pic_number);
7650
7651 /* set rd_idx to 0 */
7652 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
7653
7654 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
7655 if (blk88_y_count == 0) {
7656
7657 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7658 "[Picture %d Quality] NO Data yet.\n",
7659 pic_number);
7660
7661 /* reset all counts */
7662 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
7663 return;
7664 }
7665 /* qp_y_sum */
7666 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7667
7668 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7669 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
7670 pic_number, rdata32/blk88_y_count,
7671 rdata32, blk88_y_count);
7672
7673 frame->avg_qp = rdata32/blk88_y_count;
7674 /* intra_y_count */
7675 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7676
7677 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7678 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
7679 pic_number, rdata32*100/blk88_y_count,
7680 '%', rdata32);
7681
7682 /* skipped_y_count */
7683 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7684
7685 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7686 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
7687 pic_number, rdata32*100/blk88_y_count,
7688 '%', rdata32);
7689
7690 frame->avg_skip = rdata32*100/blk88_y_count;
7691 /* coeff_non_zero_y_count */
7692 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7693
7694 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7695 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
7696 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
7697 '%', rdata32);
7698
7699 /* blk66_c_count */
7700 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
7701 if (blk88_c_count == 0) {
7702 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7703 "[Picture %d Quality] NO Data yet.\n",
7704 pic_number);
7705 /* reset all counts */
7706 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
7707 return;
7708 }
7709 /* qp_c_sum */
7710 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7711
7712 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7713 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
7714 pic_number, rdata32/blk88_c_count,
7715 rdata32, blk88_c_count);
7716
7717 /* intra_c_count */
7718 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7719
7720 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7721 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
7722 pic_number, rdata32*100/blk88_c_count,
7723 '%', rdata32);
7724
7725 /* skipped_cu_c_count */
7726 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7727
7728 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7729 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
7730 pic_number, rdata32*100/blk88_c_count,
7731 '%', rdata32);
7732
7733 /* coeff_non_zero_c_count */
7734 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7735
7736 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7737 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
7738 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
7739 '%', rdata32);
7740
7741 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
7742 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
7743 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7744
7745 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7746 "[Picture %d Quality] Y QP min : %d\n",
7747 pic_number, (rdata32>>0)&0xff);
7748
7749 frame->min_qp = (rdata32>>0)&0xff;
7750
7751 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7752 "[Picture %d Quality] Y QP max : %d\n",
7753 pic_number, (rdata32>>8)&0xff);
7754
7755 frame->max_qp = (rdata32>>8)&0xff;
7756
7757 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7758 "[Picture %d Quality] C QP min : %d\n",
7759 pic_number, (rdata32>>16)&0xff);
7760 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7761 "[Picture %d Quality] C QP max : %d\n",
7762 pic_number, (rdata32>>24)&0xff);
7763
7764 /* blk22_mv_count */
7765 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
7766 if (blk22_mv_count == 0) {
7767 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7768 "[Picture %d Quality] NO MV Data yet.\n",
7769 pic_number);
7770 /* reset all counts */
7771 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
7772 return;
7773 }
7774 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
7775 mvy_L0_count[39:32], mvx_L0_count[39:32] */
7776 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7777 /* should all be 0x00 or 0xff */
7778 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7779 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
7780 pic_number, rdata32);
7781
7782 mvx_L0_hi = ((rdata32>>0)&0xff);
7783 mvy_L0_hi = ((rdata32>>8)&0xff);
7784 mvx_L1_hi = ((rdata32>>16)&0xff);
7785 mvy_L1_hi = ((rdata32>>24)&0xff);
7786
7787 /* mvx_L0_count[31:0] */
7788 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
7789 temp_value = mvx_L0_hi;
7790 temp_value = (temp_value << 32) | rdata32_l;
7791
7792 if (mvx_L0_hi & 0x80)
7793 value = 0xFFFFFFF000000000 | temp_value;
7794 else
7795 value = temp_value;
7796
7797 value = div_s64(value, blk22_mv_count);
7798
7799 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7800 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
7801 pic_number, (int)value,
7802 value, blk22_mv_count);
7803
7804 frame->avg_mv = value;
7805
7806 /* mvy_L0_count[31:0] */
7807 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
7808 temp_value = mvy_L0_hi;
7809 temp_value = (temp_value << 32) | rdata32_l;
7810
7811 if (mvy_L0_hi & 0x80)
7812 value = 0xFFFFFFF000000000 | temp_value;
7813 else
7814 value = temp_value;
7815
7816 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7817 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
7818 pic_number, rdata32_l/blk22_mv_count,
7819 value, blk22_mv_count);
7820
7821 /* mvx_L1_count[31:0] */
7822 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
7823 temp_value = mvx_L1_hi;
7824 temp_value = (temp_value << 32) | rdata32_l;
7825 if (mvx_L1_hi & 0x80)
7826 value = 0xFFFFFFF000000000 | temp_value;
7827 else
7828 value = temp_value;
7829
7830 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7831 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
7832 pic_number, rdata32_l/blk22_mv_count,
7833 value, blk22_mv_count);
7834
7835 /* mvy_L1_count[31:0] */
7836 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
7837 temp_value = mvy_L1_hi;
7838 temp_value = (temp_value << 32) | rdata32_l;
7839 if (mvy_L1_hi & 0x80)
7840 value = 0xFFFFFFF000000000 | temp_value;
7841 else
7842 value = temp_value;
7843
7844 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7845 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
7846 pic_number, rdata32_l/blk22_mv_count,
7847 value, blk22_mv_count);
7848
7849 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
7850 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7851 mv_hi = (rdata32>>16)&0xffff;
7852 if (mv_hi & 0x8000)
7853 mv_hi = 0x8000 - mv_hi;
7854
7855 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7856 "[Picture %d Quality] MVX_L0 MAX : %d\n",
7857 pic_number, mv_hi);
7858
7859 frame->max_mv = mv_hi;
7860
7861 mv_lo = (rdata32>>0)&0xffff;
7862 if (mv_lo & 0x8000)
7863 mv_lo = 0x8000 - mv_lo;
7864
7865 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7866 "[Picture %d Quality] MVX_L0 MIN : %d\n",
7867 pic_number, mv_lo);
7868
7869 frame->min_mv = mv_lo;
7870
7871 /* {mvy_L0_max, mvy_L0_min} */
7872 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7873 mv_hi = (rdata32>>16)&0xffff;
7874 if (mv_hi & 0x8000)
7875 mv_hi = 0x8000 - mv_hi;
7876
7877 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7878 "[Picture %d Quality] MVY_L0 MAX : %d\n",
7879 pic_number, mv_hi);
7880
7881 mv_lo = (rdata32>>0)&0xffff;
7882 if (mv_lo & 0x8000)
7883 mv_lo = 0x8000 - mv_lo;
7884
7885 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7886 "[Picture %d Quality] MVY_L0 MIN : %d\n",
7887 pic_number, mv_lo);
7888
7889 /* {mvx_L1_max, mvx_L1_min} */
7890 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7891 mv_hi = (rdata32>>16)&0xffff;
7892 if (mv_hi & 0x8000)
7893 mv_hi = 0x8000 - mv_hi;
7894
7895 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7896 "[Picture %d Quality] MVX_L1 MAX : %d\n",
7897 pic_number, mv_hi);
7898
7899 mv_lo = (rdata32>>0)&0xffff;
7900 if (mv_lo & 0x8000)
7901 mv_lo = 0x8000 - mv_lo;
7902
7903 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7904 "[Picture %d Quality] MVX_L1 MIN : %d\n",
7905 pic_number, mv_lo);
7906
7907 /* {mvy_L1_max, mvy_L1_min} */
7908 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
7909 mv_hi = (rdata32>>16)&0xffff;
7910 if (mv_hi & 0x8000)
7911 mv_hi = 0x8000 - mv_hi;
7912
7913 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7914 "[Picture %d Quality] MVY_L1 MAX : %d\n",
7915 pic_number, mv_hi);
7916
7917 mv_lo = (rdata32>>0)&0xffff;
7918 if (mv_lo & 0x8000)
7919 mv_lo = 0x8000 - mv_lo;
7920
7921 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7922 "[Picture %d Quality] MVY_L1 MIN : %d\n",
7923 pic_number, mv_lo);
7924
7925 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
7926
7927 vp9_print(pbi, VP9_DEBUG_QOS_INFO,
7928 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
7929 pic_number, rdata32);
7930
7931 /* reset all counts */
7932 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
7933 }
7934}
7935
7936static irqreturn_t vvp9_isr_thread_fn(int irq, void *data)
7937{
7938 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)data;
7939 unsigned int dec_status = pbi->dec_status;
7940 int i;
7941
7942 /*if (pbi->wait_buf)
7943 * pr_info("set wait_buf to 0\r\n");
7944 */
7945 if (pbi->eos)
7946 return IRQ_HANDLED;
7947 pbi->wait_buf = 0;
7948#ifdef MULTI_INSTANCE_SUPPORT
7949#ifdef SUPPORT_FB_DECODING
7950#ifdef FB_DECODING_TEST_SCHEDULE
7951 if (pbi->s1_test_cmd == TEST_SET_PIC_DONE)
7952 dec_status = HEVC_DECPIC_DATA_DONE;
7953 else if (pbi->s1_test_cmd == TEST_SET_S2_DONE
7954 && dec_status == HEVC_DECPIC_DATA_DONE)
7955 dec_status = HEVC_S2_DECODING_DONE;
7956 pbi->s1_test_cmd = TEST_SET_NONE;
7957#else
7958 /*if (irq != VDEC_IRQ_0)
7959 dec_status = HEVC_S2_DECODING_DONE;*/
7960#endif
7961 if (dec_status == HEVC_S2_DECODING_DONE) {
7962 pbi->dec_result = DEC_RESULT_DONE;
7963 vdec_schedule_work(&pbi->work);
7964#ifdef FB_DECODING_TEST_SCHEDULE
7965 amhevc_stop();
7966 pbi->dec_s1_result = DEC_S1_RESULT_DONE;
7967 vdec_schedule_work(&pbi->s1_work);
7968#endif
7969 } else
7970#endif
7971 if ((dec_status == HEVC_NAL_DECODE_DONE) ||
7972 (dec_status == HEVC_SEARCH_BUFEMPTY) ||
7973 (dec_status == HEVC_DECODE_BUFEMPTY)
7974 ) {
7975 if (pbi->m_ins_flag) {
7976 reset_process_time(pbi);
7977 if (!vdec_frame_based(hw_to_vdec(pbi)))
7978 dec_again_process(pbi);
7979 else {
7980 pbi->dec_result = DEC_RESULT_GET_DATA;
7981 vdec_schedule_work(&pbi->work);
7982 }
7983 }
7984 pbi->process_busy = 0;
7985 return IRQ_HANDLED;
7986 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
7987 if (pbi->m_ins_flag) {
7988 get_picture_qos_info(pbi);
7989#ifdef SUPPORT_FB_DECODING
7990 if (pbi->used_stage_buf_num > 0) {
7991 reset_process_time(pbi);
7992 inc_s1_pos(pbi);
7993 trigger_schedule(pbi);
7994#ifdef FB_DECODING_TEST_SCHEDULE
7995 pbi->s1_test_cmd = TEST_SET_S2_DONE;
7996#else
7997 amhevc_stop();
7998 pbi->dec_s1_result = DEC_S1_RESULT_DONE;
7999 vdec_schedule_work(&pbi->s1_work);
8000#endif
8001 } else
8002#endif
8003 {
8004 reset_process_time(pbi);
8005 if (pbi->vf_pre_count == 0 || pbi->low_latency_flag)
8006 vp9_bufmgr_postproc(pbi);
8007
8008 pbi->dec_result = DEC_RESULT_DONE;
8009 amhevc_stop();
8010 if (mcrcc_cache_alg_flag)
8011 dump_hit_rate(pbi);
8012 vdec_schedule_work(&pbi->work);
8013 }
8014 } else {
8015 if (pbi->low_latency_flag) {
8016 vp9_bufmgr_postproc(pbi);
8017 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
8018#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8019 vdec_profile(hw_to_vdec(pbi), VDEC_PROFILE_EVENT_CB);
8020 if (debug & PRINT_FLAG_VDEC_DETAIL)
8021 pr_info("%s VP9 frame done \n", __func__);
8022#endif
8023 }
8024 }
8025
8026 pbi->process_busy = 0;
8027 return IRQ_HANDLED;
8028 }
8029#endif
8030
8031 if (dec_status == VP9_EOS) {
8032#ifdef MULTI_INSTANCE_SUPPORT
8033 if (pbi->m_ins_flag)
8034 reset_process_time(pbi);
8035#endif
8036
8037 pr_info("VP9_EOS, flush buffer\r\n");
8038
8039 vp9_bufmgr_postproc(pbi);
8040
8041 pr_info("send VP9_10B_DISCARD_NAL\r\n");
8042 WRITE_VREG(HEVC_DEC_STATUS_REG, VP9_10B_DISCARD_NAL);
8043 pbi->process_busy = 0;
8044#ifdef MULTI_INSTANCE_SUPPORT
8045 if (pbi->m_ins_flag) {
8046 pbi->dec_result = DEC_RESULT_DONE;
8047 amhevc_stop();
8048 vdec_schedule_work(&pbi->work);
8049 }
8050#endif
8051 return IRQ_HANDLED;
8052 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
8053 pr_info("vp9 decode oversize !!\n");
8054 debug |= (VP9_DEBUG_DIS_LOC_ERROR_PROC |
8055 VP9_DEBUG_DIS_SYS_ERROR_PROC);
8056 pbi->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
8057#ifdef MULTI_INSTANCE_SUPPORT
8058 if (pbi->m_ins_flag)
8059 reset_process_time(pbi);
8060#endif
8061 return IRQ_HANDLED;
8062 }
8063
8064 if (dec_status != VP9_HEAD_PARSER_DONE) {
8065 pbi->process_busy = 0;
8066 return IRQ_HANDLED;
8067 }
8068
8069
8070#ifdef MULTI_INSTANCE_SUPPORT
8071#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8072 if (pbi->m_ins_flag ==0 && pbi->low_latency_flag) {
8073 vdec_profile(hw_to_vdec(pbi), VDEC_PROFILE_EVENT_RUN);
8074 if (debug & PRINT_FLAG_VDEC_DETAIL)
8075 pr_info("%s VP9 frame header found \n", __func__);
8076 }
8077#endif
8078 if (pbi->m_ins_flag)
8079 reset_process_time(pbi);
8080#endif
8081 if (pbi->process_state != PROC_STATE_SENDAGAIN
8082#ifdef SUPPORT_FB_DECODING
8083 && pbi->used_stage_buf_num == 0
8084#endif
8085 ) {
8086 if (pbi->mmu_enable)
8087 vp9_recycle_mmu_buf_tail(pbi);
8088
8089
8090 if (pbi->frame_count > 0)
8091 vp9_bufmgr_postproc(pbi);
8092 }
8093
8094 if (debug & VP9_DEBUG_SEND_PARAM_WITH_REG) {
8095 get_rpm_param(&vp9_param);
8096 } else {
8097#ifdef SUPPORT_FB_DECODING
8098 if (pbi->used_stage_buf_num > 0) {
8099 reset_process_time(pbi);
8100 get_s1_buf(pbi);
8101
8102 if (get_mv_buf(pbi,
8103 &pbi->s1_mv_buf_index,
8104 &pbi->s1_mpred_mv_wr_start_addr
8105 ) < 0) {
8106 vp9_print(pbi, 0,
8107 "%s: Error get_mv_buf fail\n",
8108 __func__);
8109 }
8110
8111 if (pbi->s1_buf == NULL) {
8112 vp9_print(pbi, 0,
8113 "%s: Error get_s1_buf fail\n",
8114 __func__);
8115 pbi->process_busy = 0;
8116 return IRQ_HANDLED;
8117 }
8118
8119 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
8120 int ii;
8121 for (ii = 0; ii < 4; ii++) {
8122 pbi->s1_buf->rpm[i + 3 - ii] =
8123 pbi->rpm_ptr[i + 3 - ii];
8124 pbi->s1_param.l.data[i + ii] =
8125 pbi->rpm_ptr[i + 3 - ii];
8126 }
8127 }
8128
8129 mpred_process(pbi);
8130#ifdef FB_DECODING_TEST_SCHEDULE
8131 pbi->dec_s1_result =
8132 DEC_S1_RESULT_TEST_TRIGGER_DONE;
8133 vdec_schedule_work(&pbi->s1_work);
8134#else
8135 WRITE_VREG(HEVC_ASSIST_FB_MMU_MAP_ADDR,
8136 pbi->stage_mmu_map_phy_addr +
8137 pbi->s1_buf->index * STAGE_MMU_MAP_SIZE);
8138
8139 start_s1_decoding(pbi);
8140#endif
8141 start_process_time(pbi);
8142 pbi->process_busy = 0;
8143 return IRQ_HANDLED;
8144 } else
8145#endif
8146 {
8147 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
8148 int ii;
8149 for (ii = 0; ii < 4; ii++)
8150 vp9_param.l.data[i + ii] =
8151 pbi->rpm_ptr[i + 3 - ii];
8152 }
8153 }
8154 }
8155
8156 if (pbi->is_used_v4l) {
8157 struct aml_vcodec_ctx *ctx =
8158 (struct aml_vcodec_ctx *)(pbi->v4l2_ctx);
8159
8160 pbi->frame_width = vp9_param.p.width;
8161 pbi->frame_height = vp9_param.p.height;
8162 if (ctx->param_sets_from_ucode && !pbi->v4l_params_parsed) {
8163 struct aml_vdec_pic_infos info;
8164
8165 info.visible_width = pbi->frame_width;
8166 info.visible_height = pbi->frame_height;
8167 info.coded_width = ALIGN(pbi->frame_width, 32);
8168 info.coded_height = ALIGN(pbi->frame_height, 32);
8169 info.dpb_size = pbi->used_buf_num;
8170 pbi->v4l_params_parsed = true;
8171 vdec_v4l_set_pic_infos(ctx, &info);
8172 }
8173 }
8174
8175 if (pbi->is_used_v4l) {
8176 pbi->dec_result = DEC_V4L2_CONTINUE_DECODING;
8177 vdec_schedule_work(&pbi->work);
8178 } else {
8179 continue_decoding(pbi);
8180 pbi->postproc_done = 0;
8181 pbi->process_busy = 0;
8182 }
8183
8184#ifdef MULTI_INSTANCE_SUPPORT
8185 if (pbi->m_ins_flag)
8186 start_process_time(pbi);
8187#endif
8188
8189 return IRQ_HANDLED;
8190}
8191
8192static irqreturn_t vvp9_isr(int irq, void *data)
8193{
8194 int i;
8195 unsigned int dec_status;
8196 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)data;
8197 unsigned int adapt_prob_status;
8198 struct VP9_Common_s *const cm = &pbi->common;
8199 uint debug_tag;
8200
8201 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
8202
8203 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
8204 adapt_prob_status = READ_VREG(VP9_ADAPT_PROB_REG);
8205 if (!pbi)
8206 return IRQ_HANDLED;
8207 if (pbi->init_flag == 0)
8208 return IRQ_HANDLED;
8209 if (pbi->process_busy)/*on process.*/
8210 return IRQ_HANDLED;
8211 pbi->dec_status = dec_status;
8212 pbi->process_busy = 1;
8213 if (debug & VP9_DEBUG_BUFMGR)
8214 pr_info("vp9 isr (%d) dec status = 0x%x, lcu 0x%x shiftbyte 0x%x (%x %x lev %x, wr %x, rd %x)\n",
8215 irq,
8216 dec_status, READ_VREG(HEVC_PARSER_LCU_START),
8217 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
8218 READ_VREG(HEVC_STREAM_START_ADDR),
8219 READ_VREG(HEVC_STREAM_END_ADDR),
8220 READ_VREG(HEVC_STREAM_LEVEL),
8221 READ_VREG(HEVC_STREAM_WR_PTR),
8222 READ_VREG(HEVC_STREAM_RD_PTR)
8223 );
8224#ifdef SUPPORT_FB_DECODING
8225 /*if (irq != VDEC_IRQ_0)
8226 return IRQ_WAKE_THREAD;*/
8227#endif
8228
8229 debug_tag = READ_HREG(DEBUG_REG1);
8230 if (debug_tag & 0x10000) {
8231 pr_info("LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
8232 for (i = 0; i < 0x400; i += 4) {
8233 int ii;
8234 if ((i & 0xf) == 0)
8235 pr_info("%03x: ", i);
8236 for (ii = 0; ii < 4; ii++) {
8237 pr_info("%04x ",
8238 pbi->lmem_ptr[i + 3 - ii]);
8239 }
8240 if (((i + ii) & 0xf) == 0)
8241 pr_info("\n");
8242 }
8243
8244 if ((udebug_pause_pos == (debug_tag & 0xffff)) &&
8245 (udebug_pause_decode_idx == 0 ||
8246 udebug_pause_decode_idx == pbi->slice_idx) &&
8247 (udebug_pause_val == 0 ||
8248 udebug_pause_val == READ_HREG(DEBUG_REG2)))
8249 pbi->ucode_pause_pos = udebug_pause_pos;
8250 else if (debug_tag & 0x20000)
8251 pbi->ucode_pause_pos = 0xffffffff;
8252 if (pbi->ucode_pause_pos)
8253 reset_process_time(pbi);
8254 else
8255 WRITE_HREG(DEBUG_REG1, 0);
8256 } else if (debug_tag != 0) {
8257 pr_info(
8258 "dbg%x: %x lcu %x\n", READ_HREG(DEBUG_REG1),
8259 READ_HREG(DEBUG_REG2),
8260 READ_VREG(HEVC_PARSER_LCU_START));
8261 if ((udebug_pause_pos == (debug_tag & 0xffff)) &&
8262 (udebug_pause_decode_idx == 0 ||
8263 udebug_pause_decode_idx == pbi->slice_idx) &&
8264 (udebug_pause_val == 0 ||
8265 udebug_pause_val == READ_HREG(DEBUG_REG2)))
8266 pbi->ucode_pause_pos = udebug_pause_pos;
8267 if (pbi->ucode_pause_pos)
8268 reset_process_time(pbi);
8269 else
8270 WRITE_HREG(DEBUG_REG1, 0);
8271 pbi->process_busy = 0;
8272 return IRQ_HANDLED;
8273 }
8274
8275#ifdef MULTI_INSTANCE_SUPPORT
8276 if (!pbi->m_ins_flag) {
8277#endif
8278 if (pbi->error_flag == 1) {
8279 pbi->error_flag = 2;
8280 pbi->process_busy = 0;
8281 return IRQ_HANDLED;
8282 } else if (pbi->error_flag == 3) {
8283 pbi->process_busy = 0;
8284 return IRQ_HANDLED;
8285 }
8286
8287 if (get_free_buf_count(pbi) <= 0) {
8288 /*
8289 if (pbi->wait_buf == 0)
8290 pr_info("set wait_buf to 1\r\n");
8291 */
8292 pbi->wait_buf = 1;
8293 pbi->process_busy = 0;
8294 return IRQ_HANDLED;
8295 }
8296#ifdef MULTI_INSTANCE_SUPPORT
8297 }
8298#endif
8299 if ((adapt_prob_status & 0xff) == 0xfd) {
8300 /*VP9_REQ_ADAPT_PROB*/
8301 int pre_fc = (cm->frame_type == KEY_FRAME) ? 1 : 0;
8302 uint8_t *prev_prob_b =
8303 ((uint8_t *)pbi->prob_buffer_addr) +
8304 ((adapt_prob_status >> 8) * 0x1000);
8305 uint8_t *cur_prob_b =
8306 ((uint8_t *)pbi->prob_buffer_addr) + 0x4000;
8307 uint8_t *count_b = (uint8_t *)pbi->count_buffer_addr;
8308#ifdef MULTI_INSTANCE_SUPPORT
8309 if (pbi->m_ins_flag)
8310 reset_process_time(pbi);
8311#endif
8312 adapt_coef_probs(pbi->pic_count,
8313 (cm->last_frame_type == KEY_FRAME),
8314 pre_fc, (adapt_prob_status >> 8),
8315 (unsigned int *)prev_prob_b,
8316 (unsigned int *)cur_prob_b, (unsigned int *)count_b);
8317
8318 memcpy(prev_prob_b, cur_prob_b, PROB_SIZE);
8319 WRITE_VREG(VP9_ADAPT_PROB_REG, 0);
8320 pbi->pic_count += 1;
8321#ifdef MULTI_INSTANCE_SUPPORT
8322 if (pbi->m_ins_flag)
8323 start_process_time(pbi);
8324#endif
8325
8326 /*return IRQ_HANDLED;*/
8327 }
8328 return IRQ_WAKE_THREAD;
8329}
8330
8331static void vp9_set_clk(struct work_struct *work)
8332{
8333 struct VP9Decoder_s *pbi = container_of(work,
8334 struct VP9Decoder_s, set_clk_work);
8335 int fps = 96000 / pbi->frame_dur;
8336
8337 if (hevc_source_changed(VFORMAT_VP9,
8338 frame_width, frame_height, fps) > 0)
8339 pbi->saved_resolution = frame_width *
8340 frame_height * fps;
8341}
8342
8343static void vvp9_put_timer_func(unsigned long arg)
8344{
8345 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)arg;
8346 struct timer_list *timer = &pbi->timer;
8347 uint8_t empty_flag;
8348 unsigned int buf_level;
8349
8350 enum receviver_start_e state = RECEIVER_INACTIVE;
8351
8352 if (pbi->m_ins_flag) {
8353 if (hw_to_vdec(pbi)->next_status
8354 == VDEC_STATUS_DISCONNECTED) {
8355#ifdef SUPPORT_FB_DECODING
8356 if (pbi->run2_busy)
8357 return;
8358
8359 pbi->dec_s1_result = DEC_S1_RESULT_FORCE_EXIT;
8360 vdec_schedule_work(&pbi->s1_work);
8361#endif
8362 pbi->dec_result = DEC_RESULT_FORCE_EXIT;
8363 vdec_schedule_work(&pbi->work);
8364 pr_debug(
8365 "vdec requested to be disconnected\n");
8366 return;
8367 }
8368 }
8369 if (pbi->init_flag == 0) {
8370 if (pbi->stat & STAT_TIMER_ARM) {
8371 timer->expires = jiffies + PUT_INTERVAL;
8372 add_timer(&pbi->timer);
8373 }
8374 return;
8375 }
8376 if (pbi->m_ins_flag == 0) {
8377 if (vf_get_receiver(pbi->provider_name)) {
8378 state =
8379 vf_notify_receiver(pbi->provider_name,
8380 VFRAME_EVENT_PROVIDER_QUREY_STATE,
8381 NULL);
8382 if ((state == RECEIVER_STATE_NULL)
8383 || (state == RECEIVER_STATE_NONE))
8384 state = RECEIVER_INACTIVE;
8385 } else
8386 state = RECEIVER_INACTIVE;
8387
8388 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
8389 /* error watchdog */
8390 if (empty_flag == 0) {
8391 /* decoder has input */
8392 if ((debug & VP9_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
8393
8394 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
8395 /* receiver has no buffer to recycle */
8396 if ((state == RECEIVER_INACTIVE) &&
8397 (kfifo_is_empty(&pbi->display_q) &&
8398 buf_level > 0x200)
8399 ) {
8400 WRITE_VREG
8401 (HEVC_ASSIST_MBOX0_IRQ_REG,
8402 0x1);
8403 }
8404 }
8405
8406 if ((debug & VP9_DEBUG_DIS_SYS_ERROR_PROC) == 0) {
8407 /* receiver has no buffer to recycle */
8408 /*if ((state == RECEIVER_INACTIVE) &&
8409 * (kfifo_is_empty(&pbi->display_q))) {
8410 *pr_info("vp9 something error,need reset\n");
8411 *}
8412 */
8413 }
8414 }
8415 }
8416#ifdef MULTI_INSTANCE_SUPPORT
8417 else {
8418 if (
8419 (decode_timeout_val > 0) &&
8420 (pbi->start_process_time > 0) &&
8421 ((1000 * (jiffies - pbi->start_process_time) / HZ)
8422 > decode_timeout_val)
8423 ) {
8424 int current_lcu_idx =
8425 READ_VREG(HEVC_PARSER_LCU_START)
8426 & 0xffffff;
8427 if (pbi->last_lcu_idx == current_lcu_idx) {
8428 if (pbi->decode_timeout_count > 0)
8429 pbi->decode_timeout_count--;
8430 if (pbi->decode_timeout_count == 0) {
8431 if (input_frame_based(
8432 hw_to_vdec(pbi)) ||
8433 (READ_VREG(HEVC_STREAM_LEVEL) > 0x200))
8434 timeout_process(pbi);
8435 else {
8436 vp9_print(pbi, 0,
8437 "timeout & empty, again\n");
8438 dec_again_process(pbi);
8439 }
8440 }
8441 } else {
8442 start_process_time(pbi);
8443 pbi->last_lcu_idx = current_lcu_idx;
8444 }
8445 }
8446 }
8447#endif
8448
8449 if ((pbi->ucode_pause_pos != 0) &&
8450 (pbi->ucode_pause_pos != 0xffffffff) &&
8451 udebug_pause_pos != pbi->ucode_pause_pos) {
8452 pbi->ucode_pause_pos = 0;
8453 WRITE_HREG(DEBUG_REG1, 0);
8454 }
8455#ifdef MULTI_INSTANCE_SUPPORT
8456 if (debug & VP9_DEBUG_FORCE_SEND_AGAIN) {
8457 pr_info(
8458 "Force Send Again\r\n");
8459 debug &= ~VP9_DEBUG_FORCE_SEND_AGAIN;
8460 reset_process_time(pbi);
8461 pbi->dec_result = DEC_RESULT_AGAIN;
8462 if (pbi->process_state ==
8463 PROC_STATE_DECODESLICE) {
8464 if (pbi->mmu_enable)
8465 vp9_recycle_mmu_buf(pbi);
8466 pbi->process_state =
8467 PROC_STATE_SENDAGAIN;
8468 }
8469 amhevc_stop();
8470
8471 vdec_schedule_work(&pbi->work);
8472 }
8473
8474 if (debug & VP9_DEBUG_DUMP_DATA) {
8475 debug &= ~VP9_DEBUG_DUMP_DATA;
8476 vp9_print(pbi, 0,
8477 "%s: chunk size 0x%x off 0x%x sum 0x%x\n",
8478 __func__,
8479 pbi->chunk->size,
8480 pbi->chunk->offset,
8481 get_data_check_sum(pbi, pbi->chunk->size)
8482 );
8483 dump_data(pbi, pbi->chunk->size);
8484 }
8485#endif
8486 if (debug & VP9_DEBUG_DUMP_PIC_LIST) {
8487 dump_pic_list(pbi);
8488 debug &= ~VP9_DEBUG_DUMP_PIC_LIST;
8489 }
8490 if (debug & VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
8491 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
8492 debug &= ~VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC;
8493 }
8494 /*if (debug & VP9_DEBUG_HW_RESET) {
8495 }*/
8496
8497 if (radr != 0) {
8498 if (rval != 0) {
8499 WRITE_VREG(radr, rval);
8500 pr_info("WRITE_VREG(%x,%x)\n", radr, rval);
8501 } else
8502 pr_info("READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
8503 rval = 0;
8504 radr = 0;
8505 }
8506 if (pop_shorts != 0) {
8507 int i;
8508 u32 sum = 0;
8509
8510 pr_info("pop stream 0x%x shorts\r\n", pop_shorts);
8511 for (i = 0; i < pop_shorts; i++) {
8512 u32 data =
8513 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
8514 WRITE_HREG(HEVC_SHIFT_COMMAND,
8515 (1<<7)|16);
8516 if ((i & 0xf) == 0)
8517 pr_info("%04x:", i);
8518 pr_info("%04x ", data);
8519 if (((i + 1) & 0xf) == 0)
8520 pr_info("\r\n");
8521 sum += data;
8522 }
8523 pr_info("\r\nsum = %x\r\n", sum);
8524 pop_shorts = 0;
8525 }
8526 if (dbg_cmd != 0) {
8527 if (dbg_cmd == 1) {
8528 u32 disp_laddr;
8529
8530 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
8531 get_double_write_mode(pbi) == 0) {
8532 disp_laddr =
8533 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
8534 } else {
8535 struct canvas_s cur_canvas;
8536
8537 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
8538 & 0xff), &cur_canvas);
8539 disp_laddr = cur_canvas.addr;
8540 }
8541 pr_info("current displayed buffer address %x\r\n",
8542 disp_laddr);
8543 }
8544 dbg_cmd = 0;
8545 }
8546 /*don't changed at start.*/
8547 if (pbi->get_frame_dur && pbi->show_frame_num > 60 &&
8548 pbi->frame_dur > 0 && pbi->saved_resolution !=
8549 frame_width * frame_height *
8550 (96000 / pbi->frame_dur))
8551 vdec_schedule_work(&pbi->set_clk_work);
8552
8553 timer->expires = jiffies + PUT_INTERVAL;
8554 add_timer(timer);
8555}
8556
8557
8558int vvp9_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
8559{
8560 struct VP9Decoder_s *vp9 =
8561 (struct VP9Decoder_s *)vdec->private;
8562
8563 if (!vp9)
8564 return -1;
8565
8566 vstatus->frame_width = frame_width;
8567 vstatus->frame_height = frame_height;
8568 if (vp9->frame_dur != 0)
8569 vstatus->frame_rate = 96000 / vp9->frame_dur;
8570 else
8571 vstatus->frame_rate = -1;
8572 vstatus->error_count = 0;
8573 vstatus->status = vp9->stat | vp9->fatal_error;
8574 vstatus->frame_dur = vp9->frame_dur;
8575#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8576 vstatus->bit_rate = gvs->bit_rate;
8577 vstatus->frame_data = gvs->frame_data;
8578 vstatus->total_data = gvs->total_data;
8579 vstatus->frame_count = gvs->frame_count;
8580 vstatus->error_frame_count = gvs->error_frame_count;
8581 vstatus->drop_frame_count = gvs->drop_frame_count;
8582 vstatus->total_data = gvs->total_data;
8583 vstatus->samp_cnt = gvs->samp_cnt;
8584 vstatus->offset = gvs->offset;
8585 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
8586 "%s", DRIVER_NAME);
8587#endif
8588 return 0;
8589}
8590
8591int vvp9_set_isreset(struct vdec_s *vdec, int isreset)
8592{
8593 is_reset = isreset;
8594 return 0;
8595}
8596
8597#if 0
8598static void VP9_DECODE_INIT(void)
8599{
8600 /* enable vp9 clocks */
8601 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
8602 /* *************************************************************** */
8603 /* Power ON HEVC */
8604 /* *************************************************************** */
8605 /* Powerup HEVC */
8606 WRITE_VREG(AO_RTI_GEN_PWR_SLEEP0,
8607 READ_VREG(AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
8608 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
8609 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
8610 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
8611 /* remove isolations */
8612 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
8613 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
8614
8615}
8616#endif
8617
8618static void vvp9_prot_init(struct VP9Decoder_s *pbi, u32 mask)
8619{
8620 unsigned int data32;
8621 /* VP9_DECODE_INIT(); */
8622 vp9_config_work_space_hw(pbi, mask);
8623 if (mask & HW_MASK_BACK)
8624 init_pic_list_hw(pbi);
8625
8626 vp9_init_decoder_hw(pbi, mask);
8627
8628#ifdef VP9_LPF_LVL_UPDATE
8629 if (mask & HW_MASK_BACK)
8630 vp9_loop_filter_init(pbi);
8631#endif
8632
8633 if ((mask & HW_MASK_FRONT) == 0)
8634 return;
8635#if 1
8636 if (debug & VP9_DEBUG_BUFMGR_MORE)
8637 pr_info("%s\n", __func__);
8638 data32 = READ_VREG(HEVC_STREAM_CONTROL);
8639 data32 = data32 |
8640 (1 << 0)/*stream_fetch_enable*/
8641 ;
8642 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
8643
8644 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
8645 if (debug & VP9_DEBUG_BUFMGR)
8646 pr_info("[test.c] Config STREAM_FIFO_CTL\n");
8647 data32 = READ_VREG(HEVC_STREAM_FIFO_CTL);
8648 data32 = data32 |
8649 (1 << 29) // stream_fifo_hole
8650 ;
8651 WRITE_VREG(HEVC_STREAM_FIFO_CTL, data32);
8652 }
8653#if 0
8654 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
8655 if (data32 != 0x00000100) {
8656 pr_info("vp9 prot init error %d\n", __LINE__);
8657 return;
8658 }
8659 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
8660 if (data32 != 0x00000300) {
8661 pr_info("vp9 prot init error %d\n", __LINE__);
8662 return;
8663 }
8664 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
8665 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
8666 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
8667 if (data32 != 0x12345678) {
8668 pr_info("vp9 prot init error %d\n", __LINE__);
8669 return;
8670 }
8671 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
8672 if (data32 != 0x9abcdef0) {
8673 pr_info("vp9 prot init error %d\n", __LINE__);
8674 return;
8675 }
8676#endif
8677 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x000000001);
8678 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
8679#endif
8680
8681
8682
8683 WRITE_VREG(HEVC_WAIT_FLAG, 1);
8684
8685 /* WRITE_VREG(HEVC_MPSR, 1); */
8686
8687 /* clear mailbox interrupt */
8688 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
8689
8690 /* enable mailbox interrupt */
8691 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
8692
8693 /* disable PSCALE for hardware sharing */
8694 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
8695
8696 WRITE_VREG(DEBUG_REG1, 0x0);
8697 /*check vps/sps/pps/i-slice in ucode*/
8698 WRITE_VREG(NAL_SEARCH_CTL, 0x8);
8699
8700 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
8701#ifdef SUPPORT_FB_DECODING
8702#ifndef FB_DECODING_TEST_SCHEDULE
8703 if (pbi->used_stage_buf_num > 0) {
8704 if (mask & HW_MASK_FRONT) {
8705 data32 = READ_VREG(
8706 HEVC_ASSIST_HED_FB_W_CTL);
8707 data32 = data32 |
8708 (1 << 0) /*hed_fb_wr_en*/
8709 ;
8710 WRITE_VREG(HEVC_ASSIST_HED_FB_W_CTL,
8711 data32);
8712 }
8713 if (mask & HW_MASK_BACK) {
8714 data32 = READ_VREG(
8715 HEVC_ASSIST_HED_FB_R_CTL);
8716 while (data32 & (1 << 7)) {
8717 /*wait finish*/
8718 data32 = READ_VREG(
8719 HEVC_ASSIST_HED_FB_R_CTL);
8720 }
8721 data32 &= (~(0x1 << 0));
8722 /*hed_fb_rd_addr_auto_rd*/
8723 data32 &= (~(0x1 << 1));
8724 /*rd_id = 0, hed_rd_map_auto_halt_num,
8725 after wr 2 ready, then start reading*/
8726 data32 |= (0x2 << 16);
8727 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL,
8728 data32);
8729
8730 data32 |= (0x1 << 11); /*hed_rd_map_auto_halt_en*/
8731 data32 |= (0x1 << 1); /*hed_fb_rd_addr_auto_rd*/
8732 data32 |= (0x1 << 0); /*hed_fb_rd_en*/
8733 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL,
8734 data32);
8735 }
8736
8737 }
8738#endif
8739#endif
8740}
8741
8742static int vvp9_local_init(struct VP9Decoder_s *pbi)
8743{
8744 int i;
8745 int ret;
8746 int width, height;
8747 if (alloc_lf_buf(pbi) < 0)
8748 return -1;
8749
8750 pbi->gvs = vzalloc(sizeof(struct vdec_info));
8751 if (NULL == pbi->gvs) {
8752 pr_info("the struct of vdec status malloc failed.\n");
8753 return -1;
8754 }
8755#ifdef DEBUG_PTS
8756 pbi->pts_missed = 0;
8757 pbi->pts_hit = 0;
8758#endif
8759 pbi->new_frame_displayed = 0;
8760 pbi->last_put_idx = -1;
8761 pbi->saved_resolution = 0;
8762 pbi->get_frame_dur = false;
8763 on_no_keyframe_skiped = 0;
8764 pbi->duration_from_pts_done = 0;
8765 pbi->vp9_first_pts_ready = 0;
8766 pbi->frame_cnt_window = 0;
8767 width = pbi->vvp9_amstream_dec_info.width;
8768 height = pbi->vvp9_amstream_dec_info.height;
8769 pbi->frame_dur =
8770 (pbi->vvp9_amstream_dec_info.rate ==
8771 0) ? 3200 : pbi->vvp9_amstream_dec_info.rate;
8772 if (width && height)
8773 pbi->frame_ar = height * 0x100 / width;
8774/*
8775 *TODO:FOR VERSION
8776 */
8777 pr_info("vp9: ver (%d,%d) decinfo: %dx%d rate=%d\n", vp9_version,
8778 0, width, height, pbi->frame_dur);
8779
8780 if (pbi->frame_dur == 0)
8781 pbi->frame_dur = 96000 / 24;
8782
8783 INIT_KFIFO(pbi->display_q);
8784 INIT_KFIFO(pbi->newframe_q);
8785
8786
8787 for (i = 0; i < VF_POOL_SIZE; i++) {
8788 const struct vframe_s *vf = &pbi->vfpool[i];
8789
8790 pbi->vfpool[i].index = -1;
8791 kfifo_put(&pbi->newframe_q, vf);
8792 }
8793
8794
8795 ret = vp9_local_init(pbi);
8796
8797 if (!pbi->pts_unstable) {
8798 pbi->pts_unstable =
8799 (pbi->vvp9_amstream_dec_info.rate == 0)?1:0;
8800 pr_info("set pts unstable\n");
8801 }
8802
8803 return ret;
8804}
8805
8806
8807#ifdef MULTI_INSTANCE_SUPPORT
8808static s32 vvp9_init(struct vdec_s *vdec)
8809{
8810 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)vdec->private;
8811#else
8812static s32 vvp9_init(struct VP9Decoder_s *pbi)
8813{
8814#endif
8815 int ret;
8816 int fw_size = 0x1000 * 16;
8817 struct firmware_s *fw = NULL;
8818
8819 pbi->stat |= STAT_TIMER_INIT;
8820
8821 if (vvp9_local_init(pbi) < 0)
8822 return -EBUSY;
8823
8824 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
8825 if (IS_ERR_OR_NULL(fw))
8826 return -ENOMEM;
8827
8828 if (get_firmware_data(VIDEO_DEC_VP9_MMU, fw->data) < 0) {
8829 pr_err("get firmware fail.\n");
8830 vfree(fw);
8831 return -1;
8832 }
8833
8834 fw->len = fw_size;
8835
8836 INIT_WORK(&pbi->set_clk_work, vp9_set_clk);
8837 init_timer(&pbi->timer);
8838
8839#ifdef MULTI_INSTANCE_SUPPORT
8840 if (pbi->m_ins_flag) {
8841 pbi->timer.data = (ulong) pbi;
8842 pbi->timer.function = vvp9_put_timer_func;
8843 pbi->timer.expires = jiffies + PUT_INTERVAL;
8844
8845 /*add_timer(&pbi->timer);
8846
8847 pbi->stat |= STAT_TIMER_ARM;
8848 pbi->stat |= STAT_ISR_REG;*/
8849
8850 INIT_WORK(&pbi->work, vp9_work);
8851#ifdef SUPPORT_FB_DECODING
8852 if (pbi->used_stage_buf_num > 0)
8853 INIT_WORK(&pbi->s1_work, vp9_s1_work);
8854#endif
8855 pbi->fw = fw;
8856
8857 /* picture list init.*/
8858 pbi->dec_result = DEC_INIT_PICLIST;
8859 vdec_schedule_work(&pbi->work);
8860
8861 return 0;
8862 }
8863#endif
8864 amhevc_enable();
8865
8866 init_pic_list(pbi);
8867
8868 ret = amhevc_loadmc_ex(VFORMAT_VP9, NULL, fw->data);
8869 if (ret < 0) {
8870 amhevc_disable();
8871 vfree(fw);
8872 pr_err("VP9: the %s fw loading failed, err: %x\n",
8873 tee_enabled() ? "TEE" : "local", ret);
8874 return -EBUSY;
8875 }
8876
8877 vfree(fw);
8878
8879 pbi->stat |= STAT_MC_LOAD;
8880
8881 /* enable AMRISC side protocol */
8882 vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK);
8883
8884 if (vdec_request_threaded_irq(VDEC_IRQ_0,
8885 vvp9_isr,
8886 vvp9_isr_thread_fn,
8887 IRQF_ONESHOT,/*run thread on this irq disabled*/
8888 "vvp9-irq", (void *)pbi)) {
8889 pr_info("vvp9 irq register error.\n");
8890 amhevc_disable();
8891 return -ENOENT;
8892 }
8893
8894 pbi->stat |= STAT_ISR_REG;
8895
8896 pbi->provider_name = PROVIDER_NAME;
8897#ifdef MULTI_INSTANCE_SUPPORT
8898 vf_provider_init(&vvp9_vf_prov, PROVIDER_NAME,
8899 &vvp9_vf_provider, pbi);
8900 vf_reg_provider(&vvp9_vf_prov);
8901 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
8902 if (pbi->frame_dur != 0) {
8903 if (!is_reset)
8904 vf_notify_receiver(pbi->provider_name,
8905 VFRAME_EVENT_PROVIDER_FR_HINT,
8906 (void *)
8907 ((unsigned long)pbi->frame_dur));
8908 }
8909#else
8910 vf_provider_init(&vvp9_vf_prov, PROVIDER_NAME, &vvp9_vf_provider,
8911 pbi);
8912 vf_reg_provider(&vvp9_vf_prov);
8913 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
8914 if (!is_reset)
8915 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_FR_HINT,
8916 (void *)((unsigned long)pbi->frame_dur));
8917#endif
8918 pbi->stat |= STAT_VF_HOOK;
8919
8920 pbi->timer.data = (ulong)pbi;
8921 pbi->timer.function = vvp9_put_timer_func;
8922 pbi->timer.expires = jiffies + PUT_INTERVAL;
8923
8924 pbi->stat |= STAT_VDEC_RUN;
8925
8926 add_timer(&pbi->timer);
8927
8928 pbi->stat |= STAT_TIMER_ARM;
8929
8930 amhevc_start();
8931
8932 pbi->init_flag = 1;
8933 pbi->process_busy = 0;
8934 pr_info("%d, vvp9_init, RP=0x%x\n",
8935 __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
8936 return 0;
8937}
8938
8939static int vmvp9_stop(struct VP9Decoder_s *pbi)
8940{
8941 pbi->init_flag = 0;
8942
8943 if (pbi->stat & STAT_VDEC_RUN) {
8944 amhevc_stop();
8945 pbi->stat &= ~STAT_VDEC_RUN;
8946 }
8947 if (pbi->stat & STAT_ISR_REG) {
8948 vdec_free_irq(VDEC_IRQ_0, (void *)pbi);
8949 pbi->stat &= ~STAT_ISR_REG;
8950 }
8951 if (pbi->stat & STAT_TIMER_ARM) {
8952 del_timer_sync(&pbi->timer);
8953 pbi->stat &= ~STAT_TIMER_ARM;
8954 }
8955
8956 if (pbi->stat & STAT_VF_HOOK) {
8957 if (!is_reset)
8958 vf_notify_receiver(pbi->provider_name,
8959 VFRAME_EVENT_PROVIDER_FR_END_HINT,
8960 NULL);
8961
8962 vf_unreg_provider(&vvp9_vf_prov);
8963 pbi->stat &= ~STAT_VF_HOOK;
8964 }
8965 vp9_local_uninit(pbi);
8966 reset_process_time(pbi);
8967 cancel_work_sync(&pbi->work);
8968#ifdef SUPPORT_FB_DECODING
8969 if (pbi->used_stage_buf_num > 0)
8970 cancel_work_sync(&pbi->s1_work);
8971#endif
8972 cancel_work_sync(&pbi->set_clk_work);
8973 uninit_mmu_buffers(pbi);
8974 if (pbi->fw)
8975 vfree(pbi->fw);
8976 pbi->fw = NULL;
8977 return 0;
8978}
8979
8980static int vvp9_stop(struct VP9Decoder_s *pbi)
8981{
8982
8983 pbi->init_flag = 0;
8984 pbi->first_sc_checked = 0;
8985 if (pbi->stat & STAT_VDEC_RUN) {
8986 amhevc_stop();
8987 pbi->stat &= ~STAT_VDEC_RUN;
8988 }
8989
8990 if (pbi->stat & STAT_ISR_REG) {
8991#ifdef MULTI_INSTANCE_SUPPORT
8992 if (!pbi->m_ins_flag)
8993#endif
8994 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
8995 vdec_free_irq(VDEC_IRQ_0, (void *)pbi);
8996 pbi->stat &= ~STAT_ISR_REG;
8997 }
8998
8999 if (pbi->stat & STAT_TIMER_ARM) {
9000 del_timer_sync(&pbi->timer);
9001 pbi->stat &= ~STAT_TIMER_ARM;
9002 }
9003
9004 if (pbi->stat & STAT_VF_HOOK) {
9005 if (!is_reset)
9006 vf_notify_receiver(pbi->provider_name,
9007 VFRAME_EVENT_PROVIDER_FR_END_HINT,
9008 NULL);
9009
9010 vf_unreg_provider(&vvp9_vf_prov);
9011 pbi->stat &= ~STAT_VF_HOOK;
9012 }
9013 vp9_local_uninit(pbi);
9014
9015 cancel_work_sync(&pbi->set_clk_work);
9016#ifdef MULTI_INSTANCE_SUPPORT
9017 if (pbi->m_ins_flag) {
9018#ifdef SUPPORT_FB_DECODING
9019 if (pbi->used_stage_buf_num > 0)
9020 cancel_work_sync(&pbi->s1_work);
9021#endif
9022 cancel_work_sync(&pbi->work);
9023 } else
9024 amhevc_disable();
9025#else
9026 amhevc_disable();
9027#endif
9028 uninit_mmu_buffers(pbi);
9029
9030 vfree(pbi->fw);
9031 pbi->fw = NULL;
9032 return 0;
9033}
9034static int amvdec_vp9_mmu_init(struct VP9Decoder_s *pbi)
9035{
9036 int tvp_flag = vdec_secure(hw_to_vdec(pbi)) ?
9037 CODEC_MM_FLAGS_TVP : 0;
9038 int buf_size = 48;
9039
9040 if ((pbi->max_pic_w * pbi->max_pic_h > 1280*736) &&
9041 (pbi->max_pic_w * pbi->max_pic_h <= 1920*1088)) {
9042 buf_size = 12;
9043 } else if ((pbi->max_pic_w * pbi->max_pic_h > 0) &&
9044 (pbi->max_pic_w * pbi->max_pic_h <= 1280*736)) {
9045 buf_size = 4;
9046 }
9047 pbi->need_cache_size = buf_size * SZ_1M;
9048 pbi->sc_start_time = get_jiffies_64();
9049 if (pbi->mmu_enable && ((pbi->double_write_mode & 0x10) == 0)) {
9050 pbi->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
9051 pbi->index, FRAME_BUFFERS,
9052 pbi->need_cache_size,
9053 tvp_flag
9054 );
9055 if (!pbi->mmu_box) {
9056 pr_err("vp9 alloc mmu box failed!!\n");
9057 return -1;
9058 }
9059 }
9060 pbi->bmmu_box = decoder_bmmu_box_alloc_box(
9061 DRIVER_NAME,
9062 pbi->index,
9063 MAX_BMMU_BUFFER_NUM,
9064 4 + PAGE_SHIFT,
9065 CODEC_MM_FLAGS_CMA_CLEAR |
9066 CODEC_MM_FLAGS_FOR_VDECODER |
9067 tvp_flag);
9068 if (!pbi->bmmu_box) {
9069 pr_err("vp9 alloc bmmu box failed!!\n");
9070 return -1;
9071 }
9072 return 0;
9073}
9074
9075static struct VP9Decoder_s *gHevc;
9076
9077static int amvdec_vp9_probe(struct platform_device *pdev)
9078{
9079 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
9080 struct BUF_s BUF[MAX_BUF_NUM];
9081 struct VP9Decoder_s *pbi;
9082 int ret;
9083#ifndef MULTI_INSTANCE_SUPPORT
9084 int i;
9085#endif
9086 pr_debug("%s\n", __func__);
9087
9088 mutex_lock(&vvp9_mutex);
9089 pbi = vmalloc(sizeof(struct VP9Decoder_s));
9090 if (pbi == NULL) {
9091 pr_info("\namvdec_vp9 device data allocation failed\n");
9092 mutex_unlock(&vvp9_mutex);
9093 return -ENOMEM;
9094 }
9095
9096 gHevc = pbi;
9097 memcpy(&BUF[0], &pbi->m_BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM);
9098 memset(pbi, 0, sizeof(struct VP9Decoder_s));
9099 memcpy(&pbi->m_BUF[0], &BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM);
9100
9101 pbi->init_flag = 0;
9102 pbi->first_sc_checked= 0;
9103 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
9104 vp9_max_pic_w = 8192;
9105 vp9_max_pic_h = 4608;
9106 }
9107 pbi->max_pic_w = vp9_max_pic_w;
9108 pbi->max_pic_h = vp9_max_pic_h;
9109
9110#ifdef MULTI_INSTANCE_SUPPORT
9111 pbi->eos = 0;
9112 pbi->start_process_time = 0;
9113 pbi->timeout_num = 0;
9114#endif
9115 pbi->fatal_error = 0;
9116 pbi->show_frame_num = 0;
9117 if (pdata == NULL) {
9118 pr_info("\namvdec_vp9 memory resource undefined.\n");
9119 vfree(pbi);
9120 mutex_unlock(&vvp9_mutex);
9121 return -EFAULT;
9122 }
9123 pbi->m_ins_flag = 0;
9124#ifdef MULTI_INSTANCE_SUPPORT
9125 pbi->platform_dev = pdev;
9126 platform_set_drvdata(pdev, pdata);
9127#endif
9128 pbi->double_write_mode = double_write_mode;
9129 pbi->mmu_enable = 1;
9130 if (amvdec_vp9_mmu_init(pbi) < 0) {
9131 vfree(pbi);
9132 mutex_unlock(&vvp9_mutex);
9133 pr_err("vp9 alloc bmmu box failed!!\n");
9134 return -1;
9135 }
9136
9137 ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box, WORK_SPACE_BUF_ID,
9138 work_buf_size, DRIVER_NAME, &pdata->mem_start);
9139 if (ret < 0) {
9140 uninit_mmu_buffers(pbi);
9141 vfree(pbi);
9142 mutex_unlock(&vvp9_mutex);
9143 return ret;
9144 }
9145 pbi->buf_size = work_buf_size;
9146
9147#ifdef MULTI_INSTANCE_SUPPORT
9148 pbi->buf_start = pdata->mem_start;
9149#else
9150 if (!pbi->mmu_enable)
9151 pbi->mc_buf_spec.buf_end = pdata->mem_start + pbi->buf_size;
9152
9153 for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
9154 amvvp9_workbuff_spec[i].start_adr = pdata->mem_start;
9155#endif
9156
9157
9158 if (debug) {
9159 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
9160 pdata->mem_start, pbi->buf_size);
9161 }
9162
9163 if (pdata->sys_info)
9164 pbi->vvp9_amstream_dec_info = *pdata->sys_info;
9165 else {
9166 pbi->vvp9_amstream_dec_info.width = 0;
9167 pbi->vvp9_amstream_dec_info.height = 0;
9168 pbi->vvp9_amstream_dec_info.rate = 30;
9169 }
9170 pbi->no_head = no_head;
9171#ifdef MULTI_INSTANCE_SUPPORT
9172 pbi->cma_dev = pdata->cma_dev;
9173#else
9174 cma_dev = pdata->cma_dev;
9175#endif
9176
9177#ifdef MULTI_INSTANCE_SUPPORT
9178 pdata->private = pbi;
9179 pdata->dec_status = vvp9_dec_status;
9180 pdata->set_isreset = vvp9_set_isreset;
9181 is_reset = 0;
9182 if (vvp9_init(pdata) < 0) {
9183#else
9184 if (vvp9_init(pbi) < 0) {
9185#endif
9186 pr_info("\namvdec_vp9 init failed.\n");
9187 vp9_local_uninit(pbi);
9188 uninit_mmu_buffers(pbi);
9189 vfree(pbi);
9190 pdata->dec_status = NULL;
9191 mutex_unlock(&vvp9_mutex);
9192 return -ENODEV;
9193 }
9194 /*set the max clk for smooth playing...*/
9195 hevc_source_changed(VFORMAT_VP9,
9196 4096, 2048, 60);
9197 mutex_unlock(&vvp9_mutex);
9198
9199 return 0;
9200}
9201
9202static int amvdec_vp9_remove(struct platform_device *pdev)
9203{
9204 struct VP9Decoder_s *pbi = gHevc;
9205 struct vdec_s *vdec = hw_to_vdec(pbi);
9206 int i;
9207
9208 if (debug)
9209 pr_info("amvdec_vp9_remove\n");
9210
9211 mutex_lock(&vvp9_mutex);
9212
9213 vvp9_stop(pbi);
9214
9215 hevc_source_changed(VFORMAT_VP9, 0, 0, 0);
9216
9217 if (vdec->parallel_dec == 1) {
9218 for (i = 0; i < FRAME_BUFFERS; i++) {
9219 vdec->free_canvas_ex(pbi->common.buffer_pool->
9220 frame_bufs[i].buf.y_canvas_index, vdec->id);
9221 vdec->free_canvas_ex(pbi->common.buffer_pool->
9222 frame_bufs[i].buf.uv_canvas_index, vdec->id);
9223 }
9224 }
9225
9226#ifdef DEBUG_PTS
9227 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
9228 pbi->pts_missed, pbi->pts_hit, pbi->frame_dur);
9229#endif
9230 mem_map_mode = 0;
9231
9232 vfree(pbi);
9233 mutex_unlock(&vvp9_mutex);
9234
9235 return 0;
9236}
9237
9238/****************************************/
9239#ifdef CONFIG_PM
9240static int vp9_suspend(struct device *dev)
9241{
9242 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
9243 return 0;
9244}
9245
9246static int vp9_resume(struct device *dev)
9247{
9248 amhevc_resume(to_platform_device(dev));
9249 return 0;
9250}
9251
9252static const struct dev_pm_ops vp9_pm_ops = {
9253 SET_SYSTEM_SLEEP_PM_OPS(vp9_suspend, vp9_resume)
9254};
9255#endif
9256
9257static struct platform_driver amvdec_vp9_driver = {
9258 .probe = amvdec_vp9_probe,
9259 .remove = amvdec_vp9_remove,
9260 .driver = {
9261 .name = DRIVER_NAME,
9262#ifdef CONFIG_PM
9263 .pm = &vp9_pm_ops,
9264#endif
9265 }
9266};
9267
9268static struct codec_profile_t amvdec_vp9_profile = {
9269 .name = "vp9",
9270 .profile = ""
9271};
9272
9273static struct codec_profile_t amvdec_vp9_profile_mult;
9274
9275static unsigned char get_data_check_sum
9276 (struct VP9Decoder_s *pbi, int size)
9277{
9278 int jj;
9279 int sum = 0;
9280 u8 *data = NULL;
9281
9282 if (!pbi->chunk->block->is_mapped)
9283 data = codec_mm_vmap(pbi->chunk->block->start +
9284 pbi->chunk->offset, size);
9285 else
9286 data = ((u8 *)pbi->chunk->block->start_virt) +
9287 pbi->chunk->offset;
9288
9289 for (jj = 0; jj < size; jj++)
9290 sum += data[jj];
9291
9292 if (!pbi->chunk->block->is_mapped)
9293 codec_mm_unmap_phyaddr(data);
9294 return sum;
9295}
9296
9297static void dump_data(struct VP9Decoder_s *pbi, int size)
9298{
9299 int jj;
9300 u8 *data = NULL;
9301 int padding_size = pbi->chunk->offset &
9302 (VDEC_FIFO_ALIGN - 1);
9303
9304 if (!pbi->chunk->block->is_mapped)
9305 data = codec_mm_vmap(pbi->chunk->block->start +
9306 pbi->chunk->offset, size);
9307 else
9308 data = ((u8 *)pbi->chunk->block->start_virt) +
9309 pbi->chunk->offset;
9310
9311 vp9_print(pbi, 0, "padding: ");
9312 for (jj = padding_size; jj > 0; jj--)
9313 vp9_print_cont(pbi,
9314 0,
9315 "%02x ", *(data - jj));
9316 vp9_print_cont(pbi, 0, "data adr %p\n",
9317 data);
9318
9319 for (jj = 0; jj < size; jj++) {
9320 if ((jj & 0xf) == 0)
9321 vp9_print(pbi,
9322 0,
9323 "%06x:", jj);
9324 vp9_print_cont(pbi,
9325 0,
9326 "%02x ", data[jj]);
9327 if (((jj + 1) & 0xf) == 0)
9328 vp9_print(pbi,
9329 0,
9330 "\n");
9331 }
9332 vp9_print(pbi,
9333 0,
9334 "\n");
9335
9336 if (!pbi->chunk->block->is_mapped)
9337 codec_mm_unmap_phyaddr(data);
9338}
9339
9340static void vp9_work(struct work_struct *work)
9341{
9342 struct VP9Decoder_s *pbi = container_of(work,
9343 struct VP9Decoder_s, work);
9344 struct vdec_s *vdec = hw_to_vdec(pbi);
9345 /* finished decoding one frame or error,
9346 * notify vdec core to switch context
9347 */
9348 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9349 "%s dec_result %d %x %x %x\n",
9350 __func__,
9351 pbi->dec_result,
9352 READ_VREG(HEVC_STREAM_LEVEL),
9353 READ_VREG(HEVC_STREAM_WR_PTR),
9354 READ_VREG(HEVC_STREAM_RD_PTR));
9355
9356 if (pbi->dec_result == DEC_INIT_PICLIST) {
9357 init_pic_list(pbi);
9358 pbi->pic_list_init_done = true;
9359 return;
9360 }
9361
9362 if (pbi->dec_result == DEC_V4L2_CONTINUE_DECODING) {
9363 struct aml_vcodec_ctx *ctx =
9364 (struct aml_vcodec_ctx *)(pbi->v4l2_ctx);
9365
9366 if (ctx->param_sets_from_ucode) {
9367 reset_process_time(pbi);
9368 if (wait_event_interruptible_timeout(ctx->wq,
9369 ctx->v4l_codec_ready,
9370 msecs_to_jiffies(500)) < 0)
9371 return;
9372 }
9373
9374 continue_decoding(pbi);
9375 pbi->postproc_done = 0;
9376 pbi->process_busy = 0;
9377
9378 return;
9379 }
9380
9381 if (((pbi->dec_result == DEC_RESULT_GET_DATA) ||
9382 (pbi->dec_result == DEC_RESULT_GET_DATA_RETRY))
9383 && (hw_to_vdec(pbi)->next_status !=
9384 VDEC_STATUS_DISCONNECTED)) {
9385 if (!vdec_has_more_input(vdec)) {
9386 pbi->dec_result = DEC_RESULT_EOS;
9387 vdec_schedule_work(&pbi->work);
9388 return;
9389 }
9390
9391 if (pbi->dec_result == DEC_RESULT_GET_DATA) {
9392 vp9_print(pbi, PRINT_FLAG_VDEC_STATUS,
9393 "%s DEC_RESULT_GET_DATA %x %x %x\n",
9394 __func__,
9395 READ_VREG(HEVC_STREAM_LEVEL),
9396 READ_VREG(HEVC_STREAM_WR_PTR),
9397 READ_VREG(HEVC_STREAM_RD_PTR));
9398 vdec_vframe_dirty(vdec, pbi->chunk);
9399 vdec_clean_input(vdec);
9400 }
9401
9402 if (get_free_buf_count(pbi) >=
9403 run_ready_min_buf_num) {
9404 int r;
9405 int decode_size;
9406 r = vdec_prepare_input(vdec, &pbi->chunk);
9407 if (r < 0) {
9408 pbi->dec_result = DEC_RESULT_GET_DATA_RETRY;
9409
9410 vp9_print(pbi,
9411 PRINT_FLAG_VDEC_DETAIL,
9412 "amvdec_vh265: Insufficient data\n");
9413
9414 vdec_schedule_work(&pbi->work);
9415 return;
9416 }
9417 pbi->dec_result = DEC_RESULT_NONE;
9418 vp9_print(pbi, PRINT_FLAG_VDEC_STATUS,
9419 "%s: chunk size 0x%x sum 0x%x\n",
9420 __func__, r,
9421 (debug & PRINT_FLAG_VDEC_STATUS) ?
9422 get_data_check_sum(pbi, r) : 0
9423 );
9424
9425 if (debug & PRINT_FLAG_VDEC_DATA)
9426 dump_data(pbi, pbi->chunk->size);
9427
9428 decode_size = pbi->chunk->size +
9429 (pbi->chunk->offset & (VDEC_FIFO_ALIGN - 1));
9430
9431 WRITE_VREG(HEVC_DECODE_SIZE,
9432 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
9433
9434 vdec_enable_input(vdec);
9435
9436 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9437
9438 start_process_time(pbi);
9439
9440 } else{
9441 pbi->dec_result = DEC_RESULT_GET_DATA_RETRY;
9442
9443 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9444 "amvdec_vh265: Insufficient data\n");
9445
9446 vdec_schedule_work(&pbi->work);
9447 }
9448 return;
9449 } else if (pbi->dec_result == DEC_RESULT_DONE) {
9450#ifdef SUPPORT_FB_DECODING
9451 if (pbi->used_stage_buf_num > 0) {
9452#ifndef FB_DECODING_TEST_SCHEDULE
9453 if (!is_s2_decoding_finished(pbi)) {
9454 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9455 "s2 decoding not done, check again later\n");
9456 vdec_schedule_work(&pbi->work);
9457 }
9458#endif
9459 inc_s2_pos(pbi);
9460 if (mcrcc_cache_alg_flag)
9461 dump_hit_rate(pbi);
9462 }
9463#endif
9464 /* if (!pbi->ctx_valid)
9465 pbi->ctx_valid = 1; */
9466 pbi->slice_idx++;
9467 pbi->frame_count++;
9468 pbi->process_state = PROC_STATE_INIT;
9469 decode_frame_count[pbi->index] = pbi->frame_count;
9470
9471 if (pbi->mmu_enable)
9472 pbi->used_4k_num =
9473 (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16);
9474 vp9_print(pbi, PRINT_FLAG_VDEC_STATUS,
9475 "%s (===> %d) dec_result %d %x %x %x shiftbytes 0x%x decbytes 0x%x\n",
9476 __func__,
9477 pbi->frame_count,
9478 pbi->dec_result,
9479 READ_VREG(HEVC_STREAM_LEVEL),
9480 READ_VREG(HEVC_STREAM_WR_PTR),
9481 READ_VREG(HEVC_STREAM_RD_PTR),
9482 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
9483 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
9484 pbi->start_shift_bytes
9485 );
9486 vdec_vframe_dirty(hw_to_vdec(pbi), pbi->chunk);
9487 } else if (pbi->dec_result == DEC_RESULT_AGAIN) {
9488 /*
9489 stream base: stream buf empty or timeout
9490 frame base: vdec_prepare_input fail
9491 */
9492 if (!vdec_has_more_input(vdec)) {
9493 pbi->dec_result = DEC_RESULT_EOS;
9494 vdec_schedule_work(&pbi->work);
9495 return;
9496 }
9497 } else if (pbi->dec_result == DEC_RESULT_EOS) {
9498 vp9_print(pbi, PRINT_FLAG_VDEC_STATUS,
9499 "%s: end of stream\n",
9500 __func__);
9501 pbi->eos = 1;
9502 vp9_bufmgr_postproc(pbi);
9503
9504 if (pbi->is_used_v4l)
9505 notify_v4l_eos(hw_to_vdec(pbi));
9506
9507 vdec_vframe_dirty(hw_to_vdec(pbi), pbi->chunk);
9508 } else if (pbi->dec_result == DEC_RESULT_FORCE_EXIT) {
9509 vp9_print(pbi, PRINT_FLAG_VDEC_STATUS,
9510 "%s: force exit\n",
9511 __func__);
9512 if (pbi->stat & STAT_VDEC_RUN) {
9513 amhevc_stop();
9514 pbi->stat &= ~STAT_VDEC_RUN;
9515 }
9516
9517 if (pbi->stat & STAT_ISR_REG) {
9518#ifdef MULTI_INSTANCE_SUPPORT
9519 if (!pbi->m_ins_flag)
9520#endif
9521 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
9522 vdec_free_irq(VDEC_IRQ_0, (void *)pbi);
9523 pbi->stat &= ~STAT_ISR_REG;
9524 }
9525 }
9526 if (pbi->stat & STAT_VDEC_RUN) {
9527 amhevc_stop();
9528 pbi->stat &= ~STAT_VDEC_RUN;
9529 }
9530
9531 if (pbi->stat & STAT_TIMER_ARM) {
9532 del_timer_sync(&pbi->timer);
9533 pbi->stat &= ~STAT_TIMER_ARM;
9534 }
9535 /* mark itself has all HW resource released and input released */
9536#ifdef SUPPORT_FB_DECODING
9537 if (pbi->used_stage_buf_num > 0)
9538 vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_HEVC_BACK);
9539 else
9540 vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_VDEC_1
9541 | CORE_MASK_HEVC
9542 | CORE_MASK_HEVC_FRONT
9543 | CORE_MASK_HEVC_BACK
9544 );
9545#else
9546 if (vdec->parallel_dec == 1)
9547 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
9548 else
9549 vdec_core_finish_run(hw_to_vdec(pbi), CORE_MASK_VDEC_1
9550 | CORE_MASK_HEVC);
9551#endif
9552 trigger_schedule(pbi);
9553}
9554
9555static int vp9_hw_ctx_restore(struct VP9Decoder_s *pbi)
9556{
9557 /* new to do ... */
9558#if (!defined SUPPORT_FB_DECODING)
9559 vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK);
9560#elif (defined FB_DECODING_TEST_SCHEDULE)
9561 vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK);
9562#else
9563 if (pbi->used_stage_buf_num > 0)
9564 vvp9_prot_init(pbi, HW_MASK_FRONT);
9565 else
9566 vvp9_prot_init(pbi, HW_MASK_FRONT | HW_MASK_BACK);
9567#endif
9568 return 0;
9569}
9570static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
9571{
9572 struct VP9Decoder_s *pbi =
9573 (struct VP9Decoder_s *)vdec->private;
9574 int tvp = vdec_secure(hw_to_vdec(pbi)) ?
9575 CODEC_MM_FLAGS_TVP : 0;
9576 unsigned long ret = 0;
9577
9578 if (!(pbi->pic_list_init_done && pbi->pic_list_init_done2) || pbi->eos)
9579 return ret;
9580 if (!pbi->first_sc_checked && pbi->mmu_enable) {
9581 int size = decoder_mmu_box_sc_check(pbi->mmu_box, tvp);
9582 pbi->first_sc_checked = 1;
9583 vp9_print(pbi, 0, "vp9 cached=%d need_size=%d speed= %d ms\n",
9584 size, (pbi->need_cache_size >> PAGE_SHIFT),
9585 (int)(get_jiffies_64() - pbi->sc_start_time) * 1000/HZ);
9586 }
9587
9588#ifdef SUPPORT_FB_DECODING
9589 if (pbi->used_stage_buf_num > 0) {
9590 if (mask & CORE_MASK_HEVC_FRONT) {
9591 if (get_free_stage_buf_num(pbi) > 0
9592 && mv_buf_available(pbi))
9593 ret |= CORE_MASK_HEVC_FRONT;
9594 }
9595 if (mask & CORE_MASK_HEVC_BACK) {
9596 if (s2_buf_available(pbi) &&
9597 (get_free_buf_count(pbi) >=
9598 run_ready_min_buf_num)) {
9599 ret |= CORE_MASK_HEVC_BACK;
9600 pbi->back_not_run_ready = 0;
9601 } else
9602 pbi->back_not_run_ready = 1;
9603#if 0
9604 if (get_free_buf_count(pbi) <
9605 run_ready_min_buf_num)
9606 dump_pic_list(pbi);
9607#endif
9608 }
9609 } else if (get_free_buf_count(pbi) >=
9610 run_ready_min_buf_num)
9611 ret = CORE_MASK_VDEC_1 | CORE_MASK_HEVC
9612 | CORE_MASK_HEVC_FRONT
9613 | CORE_MASK_HEVC_BACK;
9614
9615 if (ret & CORE_MASK_HEVC_FRONT)
9616 not_run_ready[pbi->index] = 0;
9617 else
9618 not_run_ready[pbi->index]++;
9619
9620 if (ret & CORE_MASK_HEVC_BACK)
9621 not_run2_ready[pbi->index] = 0;
9622 else
9623 not_run2_ready[pbi->index]++;
9624
9625 vp9_print(pbi,
9626 PRINT_FLAG_VDEC_DETAIL, "%s mask %lx=>%lx (%d %d %d %d)\r\n",
9627 __func__, mask, ret,
9628 get_free_stage_buf_num(pbi),
9629 mv_buf_available(pbi),
9630 s2_buf_available(pbi),
9631 get_free_buf_count(pbi)
9632 );
9633
9634 return ret;
9635
9636#else
9637 if (get_free_buf_count(pbi) >=
9638 run_ready_min_buf_num) {
9639 if (vdec->parallel_dec == 1)
9640 ret = CORE_MASK_HEVC;
9641 else
9642 ret = CORE_MASK_VDEC_1 | CORE_MASK_HEVC;
9643 }
9644
9645 if (pbi->is_used_v4l) {
9646 struct aml_vcodec_ctx *ctx =
9647 (struct aml_vcodec_ctx *)(pbi->v4l2_ctx);
9648
9649 if (ctx->param_sets_from_ucode &&
9650 !ctx->v4l_codec_ready &&
9651 pbi->v4l_params_parsed)
9652 ret = 0; /*the params has parsed.*/
9653 }
9654
9655 if (ret)
9656 not_run_ready[pbi->index] = 0;
9657 else
9658 not_run_ready[pbi->index]++;
9659
9660 vp9_print(pbi,
9661 PRINT_FLAG_VDEC_DETAIL, "%s mask %lx=>%lx\r\n",
9662 __func__, mask, ret);
9663 return ret;
9664#endif
9665}
9666
9667static void run_front(struct vdec_s *vdec)
9668{
9669 struct VP9Decoder_s *pbi =
9670 (struct VP9Decoder_s *)vdec->private;
9671 int ret, size;
9672
9673 run_count[pbi->index]++;
9674 /* pbi->chunk = vdec_prepare_input(vdec); */
9675#if (!defined SUPPORT_FB_DECODING)
9676 hevc_reset_core(vdec);
9677#elif (defined FB_DECODING_TEST_SCHEDULE)
9678 hevc_reset_core(vdec);
9679#else
9680 if (pbi->used_stage_buf_num > 0)
9681 fb_reset_core(vdec, HW_MASK_FRONT);
9682 else
9683 hevc_reset_core(vdec);
9684#endif
9685
9686 size = vdec_prepare_input(vdec, &pbi->chunk);
9687 if (size < 0) {
9688 input_empty[pbi->index]++;
9689
9690 pbi->dec_result = DEC_RESULT_AGAIN;
9691
9692 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9693 "ammvdec_vh265: Insufficient data\n");
9694
9695 vdec_schedule_work(&pbi->work);
9696 return;
9697 }
9698 input_empty[pbi->index] = 0;
9699 pbi->dec_result = DEC_RESULT_NONE;
9700 pbi->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9701
9702 if (debug & PRINT_FLAG_VDEC_STATUS) {
9703 int ii;
9704 vp9_print(pbi, 0,
9705 "%s (%d): size 0x%x (0x%x 0x%x) sum 0x%x (%x %x %x %x %x) bytes 0x%x",
9706 __func__,
9707 pbi->frame_count, size,
9708 pbi->chunk ? pbi->chunk->size : 0,
9709 pbi->chunk ? pbi->chunk->offset : 0,
9710 pbi->chunk ? ((vdec_frame_based(vdec) &&
9711 (debug & PRINT_FLAG_VDEC_STATUS)) ?
9712 get_data_check_sum(pbi, size) : 0) : 0,
9713 READ_VREG(HEVC_STREAM_START_ADDR),
9714 READ_VREG(HEVC_STREAM_END_ADDR),
9715 READ_VREG(HEVC_STREAM_LEVEL),
9716 READ_VREG(HEVC_STREAM_WR_PTR),
9717 READ_VREG(HEVC_STREAM_RD_PTR),
9718 pbi->start_shift_bytes);
9719 if (vdec_frame_based(vdec) && pbi->chunk) {
9720 u8 *data = NULL;
9721
9722 if (!pbi->chunk->block->is_mapped)
9723 data = codec_mm_vmap(pbi->chunk->block->start +
9724 pbi->chunk->offset, 8);
9725 else
9726 data = ((u8 *)pbi->chunk->block->start_virt) +
9727 pbi->chunk->offset;
9728
9729 vp9_print_cont(pbi, 0, "data adr %p:",
9730 data);
9731 for (ii = 0; ii < 8; ii++)
9732 vp9_print_cont(pbi, 0, "%02x ",
9733 data[ii]);
9734
9735 if (!pbi->chunk->block->is_mapped)
9736 codec_mm_unmap_phyaddr(data);
9737 }
9738 vp9_print_cont(pbi, 0, "\r\n");
9739 }
9740 if (vdec->mc_loaded) {
9741 /*firmware have load before,
9742 and not changes to another.
9743 ignore reload.
9744 */
9745 } else {
9746 ret = amhevc_loadmc_ex(VFORMAT_VP9, NULL, pbi->fw->data);
9747 if (ret < 0) {
9748 amhevc_disable();
9749 vp9_print(pbi, PRINT_FLAG_ERROR,
9750 "VP9: the %s fw loading failed, err: %x\n",
9751 tee_enabled() ? "TEE" : "local", ret);
9752 pbi->dec_result = DEC_RESULT_FORCE_EXIT;
9753 vdec_schedule_work(&pbi->work);
9754 return;
9755 }
9756 vdec->mc_loaded = 1;
9757 vdec->mc_type = VFORMAT_VP9;
9758 }
9759
9760 if (vp9_hw_ctx_restore(pbi) < 0) {
9761 vdec_schedule_work(&pbi->work);
9762 return;
9763 }
9764
9765 vdec_enable_input(vdec);
9766
9767 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9768
9769 if (vdec_frame_based(vdec)) {
9770 if (debug & PRINT_FLAG_VDEC_DATA)
9771 dump_data(pbi, pbi->chunk->size);
9772
9773 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
9774 size = pbi->chunk->size +
9775 (pbi->chunk->offset & (VDEC_FIFO_ALIGN - 1));
9776 }
9777 WRITE_VREG(HEVC_DECODE_SIZE, size);
9778 WRITE_VREG(HEVC_DECODE_COUNT, pbi->slice_idx);
9779 pbi->init_flag = 1;
9780
9781 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9782 "%s: start hevc (%x %x %x)\n",
9783 __func__,
9784 READ_VREG(HEVC_DEC_STATUS_REG),
9785 READ_VREG(HEVC_MPC_E),
9786 READ_VREG(HEVC_MPSR));
9787
9788 start_process_time(pbi);
9789 mod_timer(&pbi->timer, jiffies);
9790 pbi->stat |= STAT_TIMER_ARM;
9791 pbi->stat |= STAT_ISR_REG;
9792 amhevc_start();
9793 pbi->stat |= STAT_VDEC_RUN;
9794}
9795
9796#ifdef SUPPORT_FB_DECODING
9797static void mpred_process(struct VP9Decoder_s *pbi)
9798{
9799 union param_u *params = &pbi->s1_param;
9800 unsigned char use_prev_frame_mvs =
9801 !params->p.error_resilient_mode &&
9802 params->p.width == pbi->s1_width &&
9803 params->p.height == pbi->s1_height &&
9804 !pbi->s1_intra_only &&
9805 pbi->s1_last_show_frame &&
9806 (pbi->s1_frame_type != KEY_FRAME);
9807 pbi->s1_width = params->p.width;
9808 pbi->s1_height = params->p.height;
9809 pbi->s1_frame_type = params->p.frame_type;
9810 pbi->s1_intra_only =
9811 (params->p.show_frame ||
9812 params->p.show_existing_frame)
9813 ? 0 : params->p.intra_only;
9814 if ((pbi->s1_frame_type != KEY_FRAME)
9815 && (!pbi->s1_intra_only)) {
9816 unsigned int data32;
9817 int mpred_mv_rd_end_addr;
9818
9819 mpred_mv_rd_end_addr =
9820 pbi->s1_mpred_mv_wr_start_addr_pre
9821 + (pbi->lcu_total * MV_MEM_UNIT);
9822
9823 WRITE_VREG(HEVC_MPRED_CTRL3, 0x24122412);
9824 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR,
9825 pbi->work_space_buf->
9826 mpred_above.buf_start);
9827
9828 data32 = READ_VREG(HEVC_MPRED_CTRL4);
9829
9830 data32 &= (~(1 << 6));
9831 data32 |= (use_prev_frame_mvs << 6);
9832 WRITE_VREG(HEVC_MPRED_CTRL4, data32);
9833
9834 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
9835 pbi->s1_mpred_mv_wr_start_addr);
9836 WRITE_VREG(HEVC_MPRED_MV_WPTR,
9837 pbi->s1_mpred_mv_wr_start_addr);
9838
9839 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR,
9840 pbi->s1_mpred_mv_wr_start_addr_pre);
9841 WRITE_VREG(HEVC_MPRED_MV_RPTR,
9842 pbi->s1_mpred_mv_wr_start_addr_pre);
9843
9844 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR,
9845 mpred_mv_rd_end_addr);
9846
9847 } else
9848 clear_mpred_hw(pbi);
9849
9850 if (!params->p.show_existing_frame) {
9851 pbi->s1_mpred_mv_wr_start_addr_pre =
9852 pbi->s1_mpred_mv_wr_start_addr;
9853 pbi->s1_last_show_frame =
9854 params->p.show_frame;
9855 if (pbi->s1_mv_buf_index_pre_pre != MV_BUFFER_NUM)
9856 put_mv_buf(pbi, &pbi->s1_mv_buf_index_pre_pre);
9857 pbi->s1_mv_buf_index_pre_pre =
9858 pbi->s1_mv_buf_index_pre;
9859 pbi->s1_mv_buf_index_pre = pbi->s1_mv_buf_index;
9860 } else
9861 put_mv_buf(pbi, &pbi->s1_mv_buf_index);
9862}
9863
9864static void vp9_s1_work(struct work_struct *s1_work)
9865{
9866 struct VP9Decoder_s *pbi = container_of(s1_work,
9867 struct VP9Decoder_s, s1_work);
9868 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL,
9869 "%s dec_s1_result %d\n",
9870 __func__,
9871 pbi->dec_s1_result);
9872
9873#ifdef FB_DECODING_TEST_SCHEDULE
9874 if (pbi->dec_s1_result ==
9875 DEC_S1_RESULT_TEST_TRIGGER_DONE) {
9876 pbi->s1_test_cmd = TEST_SET_PIC_DONE;
9877 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
9878 }
9879#endif
9880 if (pbi->dec_s1_result == DEC_S1_RESULT_DONE ||
9881 pbi->dec_s1_result == DEC_S1_RESULT_FORCE_EXIT) {
9882
9883 vdec_core_finish_run(hw_to_vdec(pbi),
9884 CORE_MASK_HEVC_FRONT);
9885
9886 trigger_schedule(pbi);
9887 /*pbi->dec_s1_result = DEC_S1_RESULT_NONE;*/
9888 }
9889
9890}
9891
9892static void run_back(struct vdec_s *vdec)
9893{
9894 struct VP9Decoder_s *pbi =
9895 (struct VP9Decoder_s *)vdec->private;
9896 int i;
9897 run2_count[pbi->index]++;
9898 if (debug & PRINT_FLAG_VDEC_STATUS) {
9899 vp9_print(pbi, 0,
9900 "%s", __func__);
9901 }
9902 pbi->run2_busy = 1;
9903#ifndef FB_DECODING_TEST_SCHEDULE
9904 fb_reset_core(vdec, HW_MASK_BACK);
9905
9906 vvp9_prot_init(pbi, HW_MASK_BACK);
9907#endif
9908 vp9_recycle_mmu_buf_tail(pbi);
9909
9910 if (pbi->frame_count > 0)
9911 vp9_bufmgr_postproc(pbi);
9912
9913 if (get_s2_buf(pbi) >= 0) {
9914 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
9915 int ii;
9916 for (ii = 0; ii < 4; ii++)
9917 vp9_param.l.data[i + ii] =
9918 pbi->s2_buf->rpm[i + 3 - ii];
9919 }
9920#ifndef FB_DECODING_TEST_SCHEDULE
9921 WRITE_VREG(HEVC_ASSIST_FBD_MMU_MAP_ADDR,
9922 pbi->stage_mmu_map_phy_addr +
9923 pbi->s2_buf->index * STAGE_MMU_MAP_SIZE);
9924#endif
9925 continue_decoding(pbi);
9926 }
9927 pbi->run2_busy = 0;
9928}
9929#endif
9930
9931static void run(struct vdec_s *vdec, unsigned long mask,
9932 void (*callback)(struct vdec_s *, void *), void *arg)
9933{
9934 struct VP9Decoder_s *pbi =
9935 (struct VP9Decoder_s *)vdec->private;
9936
9937 vp9_print(pbi,
9938 PRINT_FLAG_VDEC_DETAIL, "%s mask %lx\r\n",
9939 __func__, mask);
9940
9941 run_count[pbi->index]++;
9942 pbi->vdec_cb_arg = arg;
9943 pbi->vdec_cb = callback;
9944#ifdef SUPPORT_FB_DECODING
9945 if ((mask & CORE_MASK_HEVC) ||
9946 (mask & CORE_MASK_HEVC_FRONT))
9947 run_front(vdec);
9948
9949 if ((pbi->used_stage_buf_num > 0)
9950 && (mask & CORE_MASK_HEVC_BACK))
9951 run_back(vdec);
9952#else
9953 run_front(vdec);
9954#endif
9955}
9956
9957static void init_frame_bufs(struct VP9Decoder_s *pbi)
9958{
9959 struct vdec_s *vdec = hw_to_vdec(pbi);
9960 struct VP9_Common_s *const cm = &pbi->common;
9961 struct RefCntBuffer_s *const frame_bufs = cm->buffer_pool->frame_bufs;
9962 int i;
9963
9964 for (i = 0; i < pbi->used_buf_num; ++i) {
9965 frame_bufs[i].ref_count = 0;
9966 frame_bufs[i].buf.vf_ref = 0;
9967 frame_bufs[i].buf.decode_idx = 0;
9968 }
9969
9970 if (vdec->parallel_dec == 1) {
9971 for (i = 0; i < FRAME_BUFFERS; i++) {
9972 vdec->free_canvas_ex
9973 (pbi->common.buffer_pool->frame_bufs[i].buf.y_canvas_index,
9974 vdec->id);
9975 vdec->free_canvas_ex
9976 (pbi->common.buffer_pool->frame_bufs[i].buf.uv_canvas_index,
9977 vdec->id);
9978 }
9979 }
9980}
9981
9982static void reset(struct vdec_s *vdec)
9983{
9984
9985 struct VP9Decoder_s *pbi =
9986 (struct VP9Decoder_s *)vdec->private;
9987
9988 cancel_work_sync(&pbi->work);
9989 if (pbi->stat & STAT_VDEC_RUN) {
9990 amhevc_stop();
9991 pbi->stat &= ~STAT_VDEC_RUN;
9992 }
9993
9994 if (pbi->stat & STAT_TIMER_ARM) {
9995 del_timer_sync(&pbi->timer);
9996 pbi->stat &= ~STAT_TIMER_ARM;
9997 }
9998 pbi->dec_result = DEC_RESULT_NONE;
9999 reset_process_time(pbi);
10000 dealloc_mv_bufs(pbi);
10001 vp9_local_uninit(pbi);
10002 if (vvp9_local_init(pbi) < 0)
10003 vp9_print(pbi, 0, "%s local_init failed \r\n", __func__);
10004 init_frame_bufs(pbi);
10005
10006 pbi->eos = 0;
10007
10008 vp9_print(pbi, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
10009}
10010
10011static irqreturn_t vp9_irq_cb(struct vdec_s *vdec, int irq)
10012{
10013 struct VP9Decoder_s *pbi =
10014 (struct VP9Decoder_s *)vdec->private;
10015 return vvp9_isr(0, pbi);
10016}
10017
10018static irqreturn_t vp9_threaded_irq_cb(struct vdec_s *vdec, int irq)
10019{
10020 struct VP9Decoder_s *pbi =
10021 (struct VP9Decoder_s *)vdec->private;
10022 return vvp9_isr_thread_fn(0, pbi);
10023}
10024
10025static void vp9_dump_state(struct vdec_s *vdec)
10026{
10027 struct VP9Decoder_s *pbi =
10028 (struct VP9Decoder_s *)vdec->private;
10029 struct VP9_Common_s *const cm = &pbi->common;
10030 int i;
10031 vp9_print(pbi, 0, "====== %s\n", __func__);
10032
10033 vp9_print(pbi, 0,
10034 "width/height (%d/%d), used_buf_num %d\n",
10035 cm->width,
10036 cm->height,
10037 pbi->used_buf_num
10038 );
10039
10040 vp9_print(pbi, 0,
10041 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d low_latency %d no_head %d \n",
10042 input_frame_based(vdec),
10043 pbi->eos,
10044 pbi->dec_result,
10045 decode_frame_count[pbi->index],
10046 display_frame_count[pbi->index],
10047 run_count[pbi->index],
10048 not_run_ready[pbi->index],
10049 input_empty[pbi->index],
10050 pbi->low_latency_flag,
10051 pbi->no_head
10052 );
10053
10054 if (vf_get_receiver(vdec->vf_provider_name)) {
10055 enum receviver_start_e state =
10056 vf_notify_receiver(vdec->vf_provider_name,
10057 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10058 NULL);
10059 vp9_print(pbi, 0,
10060 "\nreceiver(%s) state %d\n",
10061 vdec->vf_provider_name,
10062 state);
10063 }
10064
10065 vp9_print(pbi, 0,
10066 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), free_buf_count %d (min %d for run_ready)\n",
10067 __func__,
10068 kfifo_len(&pbi->newframe_q),
10069 VF_POOL_SIZE,
10070 kfifo_len(&pbi->display_q),
10071 VF_POOL_SIZE,
10072 pbi->vf_pre_count,
10073 pbi->vf_get_count,
10074 pbi->vf_put_count,
10075 get_free_buf_count(pbi),
10076 run_ready_min_buf_num
10077 );
10078
10079 dump_pic_list(pbi);
10080
10081 for (i = 0; i < MAX_BUF_NUM; i++) {
10082 vp9_print(pbi, 0,
10083 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
10084 i,
10085 pbi->m_mv_BUF[i].start_adr,
10086 pbi->m_mv_BUF[i].size,
10087 pbi->m_mv_BUF[i].used_flag);
10088 }
10089
10090 vp9_print(pbi, 0,
10091 "HEVC_DEC_STATUS_REG=0x%x\n",
10092 READ_VREG(HEVC_DEC_STATUS_REG));
10093 vp9_print(pbi, 0,
10094 "HEVC_MPC_E=0x%x\n",
10095 READ_VREG(HEVC_MPC_E));
10096 vp9_print(pbi, 0,
10097 "DECODE_MODE=0x%x\n",
10098 READ_VREG(DECODE_MODE));
10099 vp9_print(pbi, 0,
10100 "NAL_SEARCH_CTL=0x%x\n",
10101 READ_VREG(NAL_SEARCH_CTL));
10102 vp9_print(pbi, 0,
10103 "HEVC_PARSER_LCU_START=0x%x\n",
10104 READ_VREG(HEVC_PARSER_LCU_START));
10105 vp9_print(pbi, 0,
10106 "HEVC_DECODE_SIZE=0x%x\n",
10107 READ_VREG(HEVC_DECODE_SIZE));
10108 vp9_print(pbi, 0,
10109 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
10110 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
10111 vp9_print(pbi, 0,
10112 "HEVC_STREAM_START_ADDR=0x%x\n",
10113 READ_VREG(HEVC_STREAM_START_ADDR));
10114 vp9_print(pbi, 0,
10115 "HEVC_STREAM_END_ADDR=0x%x\n",
10116 READ_VREG(HEVC_STREAM_END_ADDR));
10117 vp9_print(pbi, 0,
10118 "HEVC_STREAM_LEVEL=0x%x\n",
10119 READ_VREG(HEVC_STREAM_LEVEL));
10120 vp9_print(pbi, 0,
10121 "HEVC_STREAM_WR_PTR=0x%x\n",
10122 READ_VREG(HEVC_STREAM_WR_PTR));
10123 vp9_print(pbi, 0,
10124 "HEVC_STREAM_RD_PTR=0x%x\n",
10125 READ_VREG(HEVC_STREAM_RD_PTR));
10126 vp9_print(pbi, 0,
10127 "PARSER_VIDEO_RP=0x%x\n",
10128 READ_PARSER_REG(PARSER_VIDEO_RP));
10129 vp9_print(pbi, 0,
10130 "PARSER_VIDEO_WP=0x%x\n",
10131 READ_PARSER_REG(PARSER_VIDEO_WP));
10132
10133 if (input_frame_based(vdec) &&
10134 (debug & PRINT_FLAG_VDEC_DATA)
10135 ) {
10136 int jj;
10137 if (pbi->chunk && pbi->chunk->block &&
10138 pbi->chunk->size > 0) {
10139 u8 *data = NULL;
10140
10141 if (!pbi->chunk->block->is_mapped)
10142 data = codec_mm_vmap(
10143 pbi->chunk->block->start +
10144 pbi->chunk->offset,
10145 pbi->chunk->size);
10146 else
10147 data = ((u8 *)pbi->chunk->block->start_virt)
10148 + pbi->chunk->offset;
10149 vp9_print(pbi, 0,
10150 "frame data size 0x%x\n",
10151 pbi->chunk->size);
10152 for (jj = 0; jj < pbi->chunk->size; jj++) {
10153 if ((jj & 0xf) == 0)
10154 vp9_print(pbi, 0,
10155 "%06x:", jj);
10156 vp9_print_cont(pbi, 0,
10157 "%02x ", data[jj]);
10158 if (((jj + 1) & 0xf) == 0)
10159 vp9_print_cont(pbi, 0,
10160 "\n");
10161 }
10162
10163 if (!pbi->chunk->block->is_mapped)
10164 codec_mm_unmap_phyaddr(data);
10165 }
10166 }
10167
10168}
10169
10170static int ammvdec_vp9_probe(struct platform_device *pdev)
10171{
10172 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
10173 int ret;
10174 int config_val;
10175 struct vframe_content_light_level_s content_light_level;
10176 struct vframe_master_display_colour_s vf_dp;
10177
10178 struct BUF_s BUF[MAX_BUF_NUM];
10179 struct VP9Decoder_s *pbi = NULL;
10180 pr_debug("%s\n", __func__);
10181
10182 if (pdata == NULL) {
10183 pr_info("\nammvdec_vp9 memory resource undefined.\n");
10184 return -EFAULT;
10185 }
10186 /*pbi = (struct VP9Decoder_s *)devm_kzalloc(&pdev->dev,
10187 sizeof(struct VP9Decoder_s), GFP_KERNEL);*/
10188 memset(&vf_dp, 0, sizeof(struct vframe_master_display_colour_s));
10189 pbi = vmalloc(sizeof(struct VP9Decoder_s));
10190 if (pbi == NULL) {
10191 pr_info("\nammvdec_vp9 device data allocation failed\n");
10192 return -ENOMEM;
10193 }
10194 memset(pbi, 0, sizeof(struct VP9Decoder_s));
10195
10196 /* the ctx from v4l2 driver. */
10197 pbi->v4l2_ctx = pdata->private;
10198
10199 pdata->private = pbi;
10200 pdata->dec_status = vvp9_dec_status;
10201 /* pdata->set_trickmode = set_trickmode; */
10202 pdata->run_ready = run_ready;
10203 pdata->run = run;
10204 pdata->reset = reset;
10205 pdata->irq_handler = vp9_irq_cb;
10206 pdata->threaded_irq_handler = vp9_threaded_irq_cb;
10207 pdata->dump_state = vp9_dump_state;
10208
10209 memcpy(&BUF[0], &pbi->m_BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM);
10210 memcpy(&pbi->m_BUF[0], &BUF[0], sizeof(struct BUF_s) * MAX_BUF_NUM);
10211
10212 pbi->index = pdev->id;
10213
10214 if (pdata->use_vfm_path)
10215 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
10216 VFM_DEC_PROVIDER_NAME);
10217 else
10218 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
10219 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
10220
10221 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
10222 &vvp9_vf_provider, pbi);
10223
10224 pbi->provider_name = pdata->vf_provider_name;
10225 platform_set_drvdata(pdev, pdata);
10226
10227 pbi->platform_dev = pdev;
10228 pbi->video_signal_type = 0;
10229 pbi->m_ins_flag = 1;
10230 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX)
10231 pbi->stat |= VP9_TRIGGER_FRAME_ENABLE;
10232
10233 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
10234 pbi->max_pic_w = 8192;
10235 pbi->max_pic_h = 4608;
10236 } else {
10237 pbi->max_pic_w = 4096;
10238 pbi->max_pic_h = 2304;
10239 }
10240#if 1
10241 if ((debug & IGNORE_PARAM_FROM_CONFIG) == 0 &&
10242 pdata->config_len) {
10243#ifdef MULTI_INSTANCE_SUPPORT
10244 int vp9_buf_width = 0;
10245 int vp9_buf_height = 0;
10246 /*use ptr config for doubel_write_mode, etc*/
10247 vp9_print(pbi, 0, "pdata->config=%s\n", pdata->config);
10248 if (get_config_int(pdata->config, "vp9_double_write_mode",
10249 &config_val) == 0)
10250 pbi->double_write_mode = config_val;
10251 else
10252 pbi->double_write_mode = double_write_mode;
10253
10254 if (get_config_int(pdata->config, "save_buffer_mode",
10255 &config_val) == 0)
10256 pbi->save_buffer_mode = config_val;
10257 else
10258 pbi->save_buffer_mode = 0;
10259 if (get_config_int(pdata->config, "vp9_buf_width",
10260 &config_val) == 0) {
10261 vp9_buf_width = config_val;
10262 }
10263 if (get_config_int(pdata->config, "vp9_buf_height",
10264 &config_val) == 0) {
10265 vp9_buf_height = config_val;
10266 }
10267
10268 if (get_config_int(pdata->config, "no_head",
10269 &config_val) == 0)
10270 pbi->no_head = config_val;
10271 else
10272 pbi->no_head = no_head;
10273
10274 /*use ptr config for max_pic_w, etc*/
10275 if (get_config_int(pdata->config, "vp9_max_pic_w",
10276 &config_val) == 0) {
10277 pbi->max_pic_w = config_val;
10278 }
10279 if (get_config_int(pdata->config, "vp9_max_pic_h",
10280 &config_val) == 0) {
10281 pbi->max_pic_h = config_val;
10282 }
10283 if ((pbi->max_pic_w * pbi->max_pic_h)
10284 < (vp9_buf_width * vp9_buf_height)) {
10285 pbi->max_pic_w = vp9_buf_width;
10286 pbi->max_pic_h = vp9_buf_height;
10287 vp9_print(pbi, 0, "use buf resolution\n");
10288 }
10289#endif
10290 if (get_config_int(pdata->config, "HDRStaticInfo",
10291 &vf_dp.present_flag) == 0
10292 && vf_dp.present_flag == 1) {
10293 get_config_int(pdata->config, "mG.x",
10294 &vf_dp.primaries[0][0]);
10295 get_config_int(pdata->config, "mG.y",
10296 &vf_dp.primaries[0][1]);
10297 get_config_int(pdata->config, "mB.x",
10298 &vf_dp.primaries[1][0]);
10299 get_config_int(pdata->config, "mB.y",
10300 &vf_dp.primaries[1][1]);
10301 get_config_int(pdata->config, "mR.x",
10302 &vf_dp.primaries[2][0]);
10303 get_config_int(pdata->config, "mR.y",
10304 &vf_dp.primaries[2][1]);
10305 get_config_int(pdata->config, "mW.x",
10306 &vf_dp.white_point[0]);
10307 get_config_int(pdata->config, "mW.y",
10308 &vf_dp.white_point[1]);
10309 get_config_int(pdata->config, "mMaxDL",
10310 &vf_dp.luminance[0]);
10311 get_config_int(pdata->config, "mMinDL",
10312 &vf_dp.luminance[1]);
10313 vf_dp.content_light_level.present_flag = 1;
10314 get_config_int(pdata->config, "mMaxCLL",
10315 &content_light_level.max_content);
10316 get_config_int(pdata->config, "mMaxFALL",
10317 &content_light_level.max_pic_average);
10318 vf_dp.content_light_level = content_light_level;
10319 pbi->video_signal_type = (1 << 29)
10320 | (5 << 26) /* unspecified */
10321 | (0 << 25) /* limit */
10322 | (1 << 24) /* color available */
10323 | (9 << 16) /* 2020 */
10324 | (16 << 8) /* 2084 */
10325 | (9 << 0); /* 2020 */
10326 }
10327 pbi->vf_dp = vf_dp;
10328 } else
10329#endif
10330 {
10331 /*pbi->vvp9_amstream_dec_info.width = 0;
10332 pbi->vvp9_amstream_dec_info.height = 0;
10333 pbi->vvp9_amstream_dec_info.rate = 30;*/
10334 pbi->double_write_mode = double_write_mode;
10335 }
10336 if (is_oversize(pbi->max_pic_w, pbi->max_pic_h)) {
10337 pr_err("over size: %dx%d, probe failed\n",
10338 pbi->max_pic_w, pbi->max_pic_h);
10339 return -1;
10340 }
10341 pbi->mmu_enable = 1;
10342 video_signal_type = pbi->video_signal_type;
10343
10344 if (pdata->sys_info) {
10345 pbi->vvp9_amstream_dec_info = *pdata->sys_info;
10346 } else {
10347 pbi->vvp9_amstream_dec_info.width = 0;
10348 pbi->vvp9_amstream_dec_info.height = 0;
10349 pbi->vvp9_amstream_dec_info.rate = 30;
10350 }
10351 pbi->low_latency_flag = 1;
10352 pbi->is_used_v4l = (((unsigned long)
10353 pbi->vvp9_amstream_dec_info.param & 0x80) >> 7);
10354 if (pbi->is_used_v4l) {
10355 pbi->mmu_enable = (((unsigned long)
10356 pbi->vvp9_amstream_dec_info.param & 0x100) >> 8);
10357 if (!pbi->mmu_enable)
10358 pbi->double_write_mode = 0x10;
10359
10360 pbi->mmu_enable = 1;
10361 pbi->max_pic_w = 4096;
10362 pbi->max_pic_h = 2304;
10363 }
10364
10365 vp9_print(pbi, 0,
10366 "no_head %d low_latency %d\n",
10367 pbi->no_head, pbi->low_latency_flag);
10368#if 0
10369 pbi->buf_start = pdata->mem_start;
10370 pbi->buf_size = pdata->mem_end - pdata->mem_start + 1;
10371#else
10372 if (amvdec_vp9_mmu_init(pbi) < 0) {
10373 pr_err("vp9 alloc bmmu box failed!!\n");
10374 /* devm_kfree(&pdev->dev, (void *)pbi); */
10375 vfree((void *)pbi);
10376 pdata->dec_status = NULL;
10377 return -1;
10378 }
10379
10380 pbi->cma_alloc_count = PAGE_ALIGN(work_buf_size) / PAGE_SIZE;
10381 ret = decoder_bmmu_box_alloc_buf_phy(pbi->bmmu_box, WORK_SPACE_BUF_ID,
10382 pbi->cma_alloc_count * PAGE_SIZE, DRIVER_NAME,
10383 &pbi->cma_alloc_addr);
10384 if (ret < 0) {
10385 uninit_mmu_buffers(pbi);
10386 /* devm_kfree(&pdev->dev, (void *)pbi); */
10387 vfree((void *)pbi);
10388 pdata->dec_status = NULL;
10389 return ret;
10390 }
10391 pbi->buf_start = pbi->cma_alloc_addr;
10392 pbi->buf_size = work_buf_size;
10393#endif
10394
10395 pbi->init_flag = 0;
10396 pbi->first_sc_checked = 0;
10397 pbi->fatal_error = 0;
10398 pbi->show_frame_num = 0;
10399
10400 if (debug) {
10401 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
10402 pbi->buf_start,
10403 pbi->buf_size);
10404 }
10405
10406 pbi->cma_dev = pdata->cma_dev;
10407 if (vvp9_init(pdata) < 0) {
10408 pr_info("\namvdec_vp9 init failed.\n");
10409 vp9_local_uninit(pbi);
10410 uninit_mmu_buffers(pbi);
10411 /* devm_kfree(&pdev->dev, (void *)pbi); */
10412 vfree((void *)pbi);
10413 pdata->dec_status = NULL;
10414 return -ENODEV;
10415 }
10416 vdec_set_prepare_level(pdata, start_decode_buf_level);
10417 hevc_source_changed(VFORMAT_VP9,
10418 4096, 2048, 60);
10419#ifdef SUPPORT_FB_DECODING
10420 if (pbi->used_stage_buf_num > 0)
10421 vdec_core_request(pdata,
10422 CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK);
10423 else
10424 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
10425 | CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK
10426 | CORE_MASK_COMBINE);
10427#else
10428 if (pdata->parallel_dec == 1)
10429 vdec_core_request(pdata, CORE_MASK_HEVC);
10430 else
10431 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
10432 | CORE_MASK_COMBINE);
10433#endif
10434 pbi->pic_list_init_done2 = true;
10435 return 0;
10436}
10437
10438static int ammvdec_vp9_remove(struct platform_device *pdev)
10439{
10440 struct VP9Decoder_s *pbi = (struct VP9Decoder_s *)
10441 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
10442 struct vdec_s *vdec = hw_to_vdec(pbi);
10443 int i;
10444 if (debug)
10445 pr_info("amvdec_vp9_remove\n");
10446
10447 vmvp9_stop(pbi);
10448
10449#ifdef SUPPORT_FB_DECODING
10450 vdec_core_release(hw_to_vdec(pbi), CORE_MASK_VDEC_1 | CORE_MASK_HEVC
10451 | CORE_MASK_HEVC_FRONT | CORE_MASK_HEVC_BACK
10452 );
10453#else
10454 if (vdec->parallel_dec == 1)
10455 vdec_core_release(hw_to_vdec(pbi), CORE_MASK_HEVC);
10456 else
10457 vdec_core_release(hw_to_vdec(pbi), CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
10458#endif
10459 vdec_set_status(hw_to_vdec(pbi), VDEC_STATUS_DISCONNECTED);
10460
10461 if (vdec->parallel_dec == 1) {
10462 for (i = 0; i < FRAME_BUFFERS; i++) {
10463 vdec->free_canvas_ex
10464 (pbi->common.buffer_pool->frame_bufs[i].buf.y_canvas_index,
10465 vdec->id);
10466 vdec->free_canvas_ex
10467 (pbi->common.buffer_pool->frame_bufs[i].buf.uv_canvas_index,
10468 vdec->id);
10469 }
10470 }
10471
10472
10473#ifdef DEBUG_PTS
10474 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
10475 pbi->pts_missed, pbi->pts_hit, pbi->frame_dur);
10476#endif
10477 mem_map_mode = 0;
10478
10479 /* devm_kfree(&pdev->dev, (void *)pbi); */
10480 vfree((void *)pbi);
10481 return 0;
10482}
10483
10484static struct platform_driver ammvdec_vp9_driver = {
10485 .probe = ammvdec_vp9_probe,
10486 .remove = ammvdec_vp9_remove,
10487 .driver = {
10488 .name = MULTI_DRIVER_NAME,
10489#ifdef CONFIG_PM
10490 .pm = &vp9_pm_ops,
10491#endif
10492 }
10493};
10494#endif
10495static struct mconfig vp9_configs[] = {
10496 MC_PU32("bit_depth_luma", &bit_depth_luma),
10497 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
10498 MC_PU32("frame_width", &frame_width),
10499 MC_PU32("frame_height", &frame_height),
10500 MC_PU32("debug", &debug),
10501 MC_PU32("radr", &radr),
10502 MC_PU32("rval", &rval),
10503 MC_PU32("pop_shorts", &pop_shorts),
10504 MC_PU32("dbg_cmd", &dbg_cmd),
10505 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
10506 MC_PU32("endian", &endian),
10507 MC_PU32("step", &step),
10508 MC_PU32("udebug_flag", &udebug_flag),
10509 MC_PU32("decode_pic_begin", &decode_pic_begin),
10510 MC_PU32("slice_parse_begin", &slice_parse_begin),
10511 MC_PU32("i_only_flag", &i_only_flag),
10512 MC_PU32("error_handle_policy", &error_handle_policy),
10513 MC_PU32("buf_alloc_width", &buf_alloc_width),
10514 MC_PU32("buf_alloc_height", &buf_alloc_height),
10515 MC_PU32("buf_alloc_depth", &buf_alloc_depth),
10516 MC_PU32("buf_alloc_size", &buf_alloc_size),
10517 MC_PU32("buffer_mode", &buffer_mode),
10518 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
10519 MC_PU32("max_buf_num", &max_buf_num),
10520 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
10521 MC_PU32("mem_map_mode", &mem_map_mode),
10522 MC_PU32("double_write_mode", &double_write_mode),
10523 MC_PU32("enable_mem_saving", &enable_mem_saving),
10524 MC_PU32("force_w_h", &force_w_h),
10525 MC_PU32("force_fps", &force_fps),
10526 MC_PU32("max_decoding_time", &max_decoding_time),
10527 MC_PU32("on_no_keyframe_skiped", &on_no_keyframe_skiped),
10528 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
10529 MC_PU32("decode_timeout_val", &decode_timeout_val),
10530 MC_PU32("vp9_max_pic_w", &vp9_max_pic_w),
10531 MC_PU32("vp9_max_pic_h", &vp9_max_pic_h),
10532};
10533static struct mconfig_node vp9_node;
10534
10535static int __init amvdec_vp9_driver_init_module(void)
10536{
10537
10538 struct BuffInfo_s *p_buf_info;
10539
10540 if (vdec_is_support_4k()) {
10541 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
10542 p_buf_info = &amvvp9_workbuff_spec[2];
10543 else
10544 p_buf_info = &amvvp9_workbuff_spec[1];
10545 } else
10546 p_buf_info = &amvvp9_workbuff_spec[0];
10547
10548 init_buff_spec(NULL, p_buf_info);
10549 work_buf_size =
10550 (p_buf_info->end_adr - p_buf_info->start_adr
10551 + 0xffff) & (~0xffff);
10552
10553 pr_debug("amvdec_vp9 module init\n");
10554
10555 error_handle_policy = 0;
10556
10557#ifdef ERROR_HANDLE_DEBUG
10558 dbg_nal_skip_flag = 0;
10559 dbg_nal_skip_count = 0;
10560#endif
10561 udebug_flag = 0;
10562 decode_pic_begin = 0;
10563 slice_parse_begin = 0;
10564 step = 0;
10565 buf_alloc_size = 0;
10566#ifdef MULTI_INSTANCE_SUPPORT
10567 if (platform_driver_register(&ammvdec_vp9_driver))
10568 pr_err("failed to register ammvdec_vp9 driver\n");
10569
10570#endif
10571 if (platform_driver_register(&amvdec_vp9_driver)) {
10572 pr_err("failed to register amvdec_vp9 driver\n");
10573 return -ENODEV;
10574 }
10575
10576 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
10577 amvdec_vp9_profile.profile =
10578 "8k, 10bit, dwrite, compressed, no_head";
10579 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL
10580 /*&& get_cpu_major_id() != MESON_CPU_MAJOR_ID_GXLX*/
10581 && get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TXL) {
10582 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX) {
10583 if (vdec_is_support_4k())
10584 amvdec_vp9_profile.profile =
10585 "4k, 10bit, dwrite, compressed";
10586 else
10587 amvdec_vp9_profile.profile =
10588 "10bit, dwrite, compressed";
10589 } else {
10590 if (vdec_is_support_4k())
10591 amvdec_vp9_profile.profile =
10592 "4k, 10bit, dwrite, compressed, no_head";
10593 else
10594 amvdec_vp9_profile.profile =
10595 "10bit, dwrite, compressed, no_head";
10596 }
10597
10598 } else {
10599 amvdec_vp9_profile.name = "vp9_unsupport";
10600 }
10601
10602 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
10603 max_buf_num = MAX_BUF_NUM_LESS;
10604
10605 vcodec_profile_register(&amvdec_vp9_profile);
10606 amvdec_vp9_profile_mult = amvdec_vp9_profile;
10607 amvdec_vp9_profile_mult.name = "mvp9";
10608 vcodec_profile_register(&amvdec_vp9_profile_mult);
10609 INIT_REG_NODE_CONFIGS("media.decoder", &vp9_node,
10610 "vp9", vp9_configs, CONFIG_FOR_RW);
10611
10612 return 0;
10613}
10614
10615static void __exit amvdec_vp9_driver_remove_module(void)
10616{
10617 pr_debug("amvdec_vp9 module remove.\n");
10618#ifdef MULTI_INSTANCE_SUPPORT
10619 platform_driver_unregister(&ammvdec_vp9_driver);
10620#endif
10621 platform_driver_unregister(&amvdec_vp9_driver);
10622}
10623
10624/****************************************/
10625
10626module_param(bit_depth_luma, uint, 0664);
10627MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_vp9 bit_depth_luma\n");
10628
10629module_param(bit_depth_chroma, uint, 0664);
10630MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_vp9 bit_depth_chroma\n");
10631
10632module_param(frame_width, uint, 0664);
10633MODULE_PARM_DESC(frame_width, "\n amvdec_vp9 frame_width\n");
10634
10635module_param(frame_height, uint, 0664);
10636MODULE_PARM_DESC(frame_height, "\n amvdec_vp9 frame_height\n");
10637
10638module_param(debug, uint, 0664);
10639MODULE_PARM_DESC(debug, "\n amvdec_vp9 debug\n");
10640
10641module_param(radr, uint, 0664);
10642MODULE_PARM_DESC(radr, "\n radr\n");
10643
10644module_param(rval, uint, 0664);
10645MODULE_PARM_DESC(rval, "\n rval\n");
10646
10647module_param(pop_shorts, uint, 0664);
10648MODULE_PARM_DESC(pop_shorts, "\n rval\n");
10649
10650module_param(dbg_cmd, uint, 0664);
10651MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
10652
10653module_param(dbg_skip_decode_index, uint, 0664);
10654MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
10655
10656module_param(endian, uint, 0664);
10657MODULE_PARM_DESC(endian, "\n rval\n");
10658
10659module_param(step, uint, 0664);
10660MODULE_PARM_DESC(step, "\n amvdec_vp9 step\n");
10661
10662module_param(decode_pic_begin, uint, 0664);
10663MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_vp9 decode_pic_begin\n");
10664
10665module_param(slice_parse_begin, uint, 0664);
10666MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_vp9 slice_parse_begin\n");
10667
10668module_param(i_only_flag, uint, 0664);
10669MODULE_PARM_DESC(i_only_flag, "\n amvdec_vp9 i_only_flag\n");
10670
10671module_param(low_latency_flag, uint, 0664);
10672MODULE_PARM_DESC(low_latency_flag, "\n amvdec_vp9 low_latency_flag\n");
10673
10674module_param(no_head, uint, 0664);
10675MODULE_PARM_DESC(no_head, "\n amvdec_vp9 no_head\n");
10676
10677module_param(error_handle_policy, uint, 0664);
10678MODULE_PARM_DESC(error_handle_policy, "\n amvdec_vp9 error_handle_policy\n");
10679
10680module_param(buf_alloc_width, uint, 0664);
10681MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
10682
10683module_param(buf_alloc_height, uint, 0664);
10684MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
10685
10686module_param(buf_alloc_depth, uint, 0664);
10687MODULE_PARM_DESC(buf_alloc_depth, "\n buf_alloc_depth\n");
10688
10689module_param(buf_alloc_size, uint, 0664);
10690MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
10691
10692module_param(buffer_mode, uint, 0664);
10693MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
10694
10695module_param(buffer_mode_dbg, uint, 0664);
10696MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
10697/*USE_BUF_BLOCK*/
10698module_param(max_buf_num, uint, 0664);
10699MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
10700
10701module_param(dynamic_buf_num_margin, uint, 0664);
10702MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
10703
10704module_param(mv_buf_margin, uint, 0664);
10705MODULE_PARM_DESC(mv_buf_margin, "\n mv_buf_margin\n");
10706
10707module_param(run_ready_min_buf_num, uint, 0664);
10708MODULE_PARM_DESC(run_ready_min_buf_num, "\n run_ready_min_buf_num\n");
10709
10710/**/
10711
10712module_param(mem_map_mode, uint, 0664);
10713MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
10714
10715#ifdef SUPPORT_10BIT
10716module_param(double_write_mode, uint, 0664);
10717MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
10718
10719module_param(enable_mem_saving, uint, 0664);
10720MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
10721
10722module_param(force_w_h, uint, 0664);
10723MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
10724#endif
10725
10726module_param(force_fps, uint, 0664);
10727MODULE_PARM_DESC(force_fps, "\n force_fps\n");
10728
10729module_param(max_decoding_time, uint, 0664);
10730MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
10731
10732module_param(on_no_keyframe_skiped, uint, 0664);
10733MODULE_PARM_DESC(on_no_keyframe_skiped, "\n on_no_keyframe_skiped\n");
10734
10735module_param(mcrcc_cache_alg_flag, uint, 0664);
10736MODULE_PARM_DESC(mcrcc_cache_alg_flag, "\n mcrcc_cache_alg_flag\n");
10737
10738#ifdef MULTI_INSTANCE_SUPPORT
10739module_param(start_decode_buf_level, int, 0664);
10740MODULE_PARM_DESC(start_decode_buf_level,
10741 "\n vp9 start_decode_buf_level\n");
10742
10743module_param(decode_timeout_val, uint, 0664);
10744MODULE_PARM_DESC(decode_timeout_val,
10745 "\n vp9 decode_timeout_val\n");
10746
10747module_param(vp9_max_pic_w, uint, 0664);
10748MODULE_PARM_DESC(vp9_max_pic_w, "\n vp9_max_pic_w\n");
10749
10750module_param(vp9_max_pic_h, uint, 0664);
10751MODULE_PARM_DESC(vp9_max_pic_h, "\n vp9_max_pic_h\n");
10752
10753module_param_array(decode_frame_count, uint,
10754 &max_decode_instance_num, 0664);
10755
10756module_param_array(display_frame_count, uint,
10757 &max_decode_instance_num, 0664);
10758
10759module_param_array(max_process_time, uint,
10760 &max_decode_instance_num, 0664);
10761
10762module_param_array(run_count, uint,
10763 &max_decode_instance_num, 0664);
10764
10765module_param_array(input_empty, uint,
10766 &max_decode_instance_num, 0664);
10767
10768module_param_array(not_run_ready, uint,
10769 &max_decode_instance_num, 0664);
10770#endif
10771
10772#ifdef SUPPORT_FB_DECODING
10773module_param_array(not_run2_ready, uint,
10774 &max_decode_instance_num, 0664);
10775
10776module_param_array(run2_count, uint,
10777 &max_decode_instance_num, 0664);
10778
10779module_param(stage_buf_num, uint, 0664);
10780MODULE_PARM_DESC(stage_buf_num, "\n amvdec_h265 stage_buf_num\n");
10781#endif
10782
10783module_param(udebug_flag, uint, 0664);
10784MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
10785
10786module_param(udebug_pause_pos, uint, 0664);
10787MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
10788
10789module_param(udebug_pause_val, uint, 0664);
10790MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
10791
10792module_param(udebug_pause_decode_idx, uint, 0664);
10793MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
10794
10795module_param(without_display_mode, uint, 0664);
10796MODULE_PARM_DESC(without_display_mode, "\n without_display_mode\n");
10797
10798module_init(amvdec_vp9_driver_init_module);
10799module_exit(amvdec_vp9_driver_remove_module);
10800
10801MODULE_DESCRIPTION("AMLOGIC vp9 Video Decoder Driver");
10802MODULE_LICENSE("GPL");
10803
10804