summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: 1a6f982737184d69648716371465140b11f921d7
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40/*for VDEC_DEBUG_SUPPORT*/
41#include <linux/time.h>
42
43#include <linux/amlogic/media/utils/vdec_reg.h>
44#include "vdec.h"
45#include "vdec_trace.h"
46#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
47#include "vdec_profile.h"
48#endif
49#include <linux/of.h>
50#include <linux/of_fdt.h>
51#include <linux/libfdt_env.h>
52#include <linux/of_reserved_mem.h>
53#include <linux/dma-contiguous.h>
54#include <linux/cma.h>
55#include <linux/module.h>
56#include <linux/slab.h>
57#include <linux/dma-mapping.h>
58#include <linux/dma-contiguous.h>
59#include "../../../stream_input/amports/amports_priv.h"
60
61#include <linux/amlogic/media/utils/amports_config.h>
62#include "../utils/amvdec.h"
63#include "vdec_input.h"
64
65#include "../../../common/media_clock/clk/clk.h"
66#include <linux/reset.h>
67#include <linux/amlogic/cpu_version.h>
68#include <linux/amlogic/media/codec_mm/codec_mm.h>
69#include <linux/amlogic/media/video_sink/video_keeper.h>
70#include <linux/amlogic/media/codec_mm/configs.h>
71#include <linux/amlogic/media/frame_sync/ptsserv.h>
72#include "secprot.h"
73#include "../../../common/chips/decoder_cpu_ver_info.h"
74#include "frame_check.h"
75
76#ifdef CONFIG_AMLOGIC_POWER
77#include <linux/amlogic/power_ctrl.h>
78#endif
79
80static DEFINE_MUTEX(vdec_mutex);
81
82#define MC_SIZE (4096 * 4)
83#define CMA_ALLOC_SIZE SZ_64M
84#define MEM_NAME "vdec_prealloc"
85static int inited_vcodec_num;
86#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
87static int poweron_clock_level;
88static int keep_vdec_mem;
89static unsigned int debug_trace_num = 16 * 20;
90static int step_mode;
91static unsigned int clk_config;
92/*
93 &1: sched_priority to MAX_RT_PRIO -1.
94 &2: always reload firmware.
95 &4: vdec canvas debug enable
96 */
97static unsigned int debug;
98
99static int hevc_max_reset_count;
100
101static int no_powerdown;
102static int parallel_decode = 1;
103static int fps_detection;
104static int fps_clear;
105
106
107static int force_nosecure_even_drm;
108static int disable_switch_single_to_mult;
109
110static DEFINE_SPINLOCK(vdec_spin_lock);
111
112#define HEVC_TEST_LIMIT 100
113#define GXBB_REV_A_MINOR 0xA
114
115#define PRINT_FRAME_INFO 1
116#define DISABLE_FRAME_INFO 2
117
118static int frameinfo_flag = 0;
119static int v4lvideo_add_di = 1;
120static int max_di_instance = 1;
121
122//static int path_debug = 0;
123
124static struct vframe_qos_s *frame_info_buf_in = NULL;
125static struct vframe_qos_s *frame_info_buf_out = NULL;
126static int frame_qos_wr = 0;
127static int frame_qos_rd = 0;
128int decode_underflow = 0;
129
130#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
131
132struct am_reg {
133 char *name;
134 int offset;
135};
136
137struct vdec_isr_context_s {
138 int index;
139 int irq;
140 irq_handler_t dev_isr;
141 irq_handler_t dev_threaded_isr;
142 void *dev_id;
143 struct vdec_s *vdec;
144};
145
146struct decode_fps_s {
147 u32 frame_count;
148 u64 start_timestamp;
149 u64 last_timestamp;
150 u32 fps;
151};
152
153struct vdec_core_s {
154 struct list_head connected_vdec_list;
155 spinlock_t lock;
156 spinlock_t canvas_lock;
157 spinlock_t fps_lock;
158 spinlock_t input_lock;
159 struct ida ida;
160 atomic_t vdec_nr;
161 struct vdec_s *vfm_vdec;
162 struct vdec_s *active_vdec;
163 struct vdec_s *active_hevc;
164 struct vdec_s *hint_fr_vdec;
165 struct platform_device *vdec_core_platform_device;
166 struct device *cma_dev;
167 struct semaphore sem;
168 struct task_struct *thread;
169 struct workqueue_struct *vdec_core_wq;
170
171 unsigned long sched_mask;
172 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
173 int power_ref_count[VDEC_MAX];
174 struct vdec_s *last_vdec;
175 int parallel_dec;
176 unsigned long power_ref_mask;
177 int vdec_combine_flag;
178 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
179 unsigned long buff_flag;
180 unsigned long stream_buff_flag;
181};
182
183struct canvas_status_s {
184 int type;
185 int canvas_used_flag;
186 int id;
187};
188
189
190static struct vdec_core_s *vdec_core;
191
192static const char * const vdec_status_string[] = {
193 "VDEC_STATUS_UNINITIALIZED",
194 "VDEC_STATUS_DISCONNECTED",
195 "VDEC_STATUS_CONNECTED",
196 "VDEC_STATUS_ACTIVE"
197};
198
199static int debugflags;
200
201static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
202
203
204int vdec_get_debug_flags(void)
205{
206 return debugflags;
207}
208EXPORT_SYMBOL(vdec_get_debug_flags);
209
210unsigned char is_mult_inc(unsigned int type)
211{
212 unsigned char ret = 0;
213 if (vdec_get_debug_flags() & 0xf000)
214 ret = (vdec_get_debug_flags() & 0x1000)
215 ? 1 : 0;
216 else if (type & PORT_TYPE_DECODER_SCHED)
217 ret = 1;
218 return ret;
219}
220EXPORT_SYMBOL(is_mult_inc);
221
222static const bool cores_with_input[VDEC_MAX] = {
223 true, /* VDEC_1 */
224 false, /* VDEC_HCODEC */
225 false, /* VDEC_2 */
226 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
227 false, /* VDEC_HEVC_BACK */
228};
229
230static const int cores_int[VDEC_MAX] = {
231 VDEC_IRQ_1,
232 VDEC_IRQ_2,
233 VDEC_IRQ_0,
234 VDEC_IRQ_0,
235 VDEC_IRQ_HEVC_BACK
236};
237
238unsigned long vdec_canvas_lock(struct vdec_core_s *core)
239{
240 unsigned long flags;
241 spin_lock_irqsave(&core->canvas_lock, flags);
242
243 return flags;
244}
245
246void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
247{
248 spin_unlock_irqrestore(&core->canvas_lock, flags);
249}
250
251unsigned long vdec_fps_lock(struct vdec_core_s *core)
252{
253 unsigned long flags;
254 spin_lock_irqsave(&core->fps_lock, flags);
255
256 return flags;
257}
258
259void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
260{
261 spin_unlock_irqrestore(&core->fps_lock, flags);
262}
263
264unsigned long vdec_core_lock(struct vdec_core_s *core)
265{
266 unsigned long flags;
267
268 spin_lock_irqsave(&core->lock, flags);
269
270 return flags;
271}
272
273void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
274{
275 spin_unlock_irqrestore(&core->lock, flags);
276}
277
278unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
279{
280 unsigned long flags;
281
282 spin_lock_irqsave(&core->input_lock, flags);
283
284 return flags;
285}
286
287void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
288{
289 spin_unlock_irqrestore(&core->input_lock, flags);
290}
291
292
293static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
294 struct vdec_core_s *core = vdec_core;
295 bool ret;
296 unsigned long flags;
297
298 flags = vdec_inputbuff_lock(core);
299 ret = !(vdec->core_mask & core->buff_flag);
300 vdec_inputbuff_unlock(core, flags);
301
302 return ret;
303}
304
305static void vdec_up(struct vdec_s *vdec)
306{
307 struct vdec_core_s *core = vdec_core;
308
309 if (debug & 8)
310 pr_info("vdec_up, id:%d\n", vdec->id);
311 up(&core->sem);
312}
313
314
315static u64 vdec_get_us_time_system(void)
316{
317 struct timeval tv;
318
319 do_gettimeofday(&tv);
320
321 return div64_u64(timeval_to_ns(&tv), 1000);
322}
323
324static void vdec_fps_clear(int id)
325{
326 if (id >= MAX_INSTANCE_MUN)
327 return;
328
329 vdec_core->decode_fps[id].frame_count = 0;
330 vdec_core->decode_fps[id].start_timestamp = 0;
331 vdec_core->decode_fps[id].last_timestamp = 0;
332 vdec_core->decode_fps[id].fps = 0;
333}
334
335static void vdec_fps_clearall(void)
336{
337 int i;
338
339 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
340 vdec_core->decode_fps[i].frame_count = 0;
341 vdec_core->decode_fps[i].start_timestamp = 0;
342 vdec_core->decode_fps[i].last_timestamp = 0;
343 vdec_core->decode_fps[i].fps = 0;
344 }
345}
346
347static void vdec_fps_detec(int id)
348{
349 unsigned long flags;
350
351 if (fps_detection == 0)
352 return;
353
354 if (id >= MAX_INSTANCE_MUN)
355 return;
356
357 flags = vdec_fps_lock(vdec_core);
358
359 if (fps_clear == 1) {
360 vdec_fps_clearall();
361 fps_clear = 0;
362 }
363
364 vdec_core->decode_fps[id].frame_count++;
365 if (vdec_core->decode_fps[id].frame_count == 1) {
366 vdec_core->decode_fps[id].start_timestamp =
367 vdec_get_us_time_system();
368 vdec_core->decode_fps[id].last_timestamp =
369 vdec_core->decode_fps[id].start_timestamp;
370 } else {
371 vdec_core->decode_fps[id].last_timestamp =
372 vdec_get_us_time_system();
373 vdec_core->decode_fps[id].fps =
374 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
375 10000000000),
376 (vdec_core->decode_fps[id].last_timestamp -
377 vdec_core->decode_fps[id].start_timestamp));
378 }
379 vdec_fps_unlock(vdec_core, flags);
380}
381
382
383
384static int get_canvas(unsigned int index, unsigned int base)
385{
386 int start;
387 int canvas_index = index * base;
388 int ret;
389
390 if ((base > 4) || (base == 0))
391 return -1;
392
393 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
394 <= AMVDEC_CANVAS_MAX1) {
395 start = AMVDEC_CANVAS_START_INDEX + base * index;
396 } else {
397 canvas_index -= (AMVDEC_CANVAS_MAX1 -
398 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
399 if (canvas_index <= AMVDEC_CANVAS_MAX2)
400 start = canvas_index / base;
401 else
402 return -1;
403 }
404
405 if (base == 1) {
406 ret = start;
407 } else if (base == 2) {
408 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
409 } else if (base == 3) {
410 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
411 } else if (base == 4) {
412 ret = (((start + 3) << 24) | (start + 2) << 16) |
413 ((start + 1) << 8) | start;
414 }
415
416 return ret;
417}
418
419static int get_canvas_ex(int type, int id)
420{
421 int i;
422 unsigned long flags;
423
424 flags = vdec_canvas_lock(vdec_core);
425
426 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
427 /*0x10-0x15 has been used by rdma*/
428 if ((i >= 0x10) && (i <= 0x15))
429 continue;
430 if ((canvas_stat[i].type == type) &&
431 (canvas_stat[i].id & (1 << id)) == 0) {
432 canvas_stat[i].canvas_used_flag++;
433 canvas_stat[i].id |= (1 << id);
434 if (debug & 4)
435 pr_debug("get used canvas %d\n", i);
436 vdec_canvas_unlock(vdec_core, flags);
437 if (i < AMVDEC_CANVAS_MAX2 + 1)
438 return i;
439 else
440 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
441 }
442 }
443
444 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
445 /*0x10-0x15 has been used by rdma*/
446 if ((i >= 0x10) && (i <= 0x15))
447 continue;
448 if (canvas_stat[i].type == 0) {
449 canvas_stat[i].type = type;
450 canvas_stat[i].canvas_used_flag = 1;
451 canvas_stat[i].id = (1 << id);
452 if (debug & 4) {
453 pr_debug("get canvas %d\n", i);
454 pr_debug("canvas_used_flag %d\n",
455 canvas_stat[i].canvas_used_flag);
456 pr_debug("canvas_stat[i].id %d\n",
457 canvas_stat[i].id);
458 }
459 vdec_canvas_unlock(vdec_core, flags);
460 if (i < AMVDEC_CANVAS_MAX2 + 1)
461 return i;
462 else
463 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
464 }
465 }
466 vdec_canvas_unlock(vdec_core, flags);
467
468 pr_info("cannot get canvas\n");
469
470 return -1;
471}
472
473static void free_canvas_ex(int index, int id)
474{
475 unsigned long flags;
476 int offset;
477
478 flags = vdec_canvas_lock(vdec_core);
479 if (index >= 0 &&
480 index < AMVDEC_CANVAS_MAX2 + 1)
481 offset = index;
482 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
483 (index <= AMVDEC_CANVAS_MAX1))
484 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
485 else {
486 vdec_canvas_unlock(vdec_core, flags);
487 return;
488 }
489
490 if ((canvas_stat[offset].canvas_used_flag > 0) &&
491 (canvas_stat[offset].id & (1 << id))) {
492 canvas_stat[offset].canvas_used_flag--;
493 canvas_stat[offset].id &= ~(1 << id);
494 if (canvas_stat[offset].canvas_used_flag == 0) {
495 canvas_stat[offset].type = 0;
496 canvas_stat[offset].id = 0;
497 }
498 if (debug & 4) {
499 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
500 offset,
501 canvas_stat[offset].canvas_used_flag,
502 canvas_stat[offset].type,
503 canvas_stat[offset].id);
504 }
505 }
506 vdec_canvas_unlock(vdec_core, flags);
507
508 return;
509
510}
511
512static void vdec_dmc_pipeline_reset(void)
513{
514 /*
515 * bit15: vdec_piple
516 * bit14: hevc_dmc_piple
517 * bit13: hevcf_dmc_pipl
518 * bit12: wave420_dmc_pipl
519 * bit11: hcodec_dmc_pipl
520 */
521
522 WRITE_RESET_REG(RESET7_REGISTER,
523 (1 << 15) | (1 << 14) | (1 << 13) |
524 (1 << 12) | (1 << 11));
525}
526
527static void vdec_stop_armrisc(int hw)
528{
529 ulong timeout = jiffies + HZ;
530
531 if (hw == VDEC_INPUT_TARGET_VLD) {
532 WRITE_VREG(MPSR, 0);
533 WRITE_VREG(CPSR, 0);
534
535 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
536 if (time_after(jiffies, timeout))
537 break;
538 }
539
540 timeout = jiffies + HZ;
541 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
542 if (time_after(jiffies, timeout))
543 break;
544 }
545 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
546 WRITE_VREG(HEVC_MPSR, 0);
547 WRITE_VREG(HEVC_CPSR, 0);
548
549 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
550 if (time_after(jiffies, timeout))
551 break;
552 }
553
554 timeout = jiffies + HZ/10;
555 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
556 if (time_after(jiffies, timeout))
557 break;
558 }
559 }
560}
561
562static void vdec_disable_DMC(struct vdec_s *vdec)
563{
564 /*close first,then wait pedding end,timing suggestion from vlsi*/
565 struct vdec_input_s *input = &vdec->input;
566 unsigned long flags;
567 unsigned int mask = 0;
568
569 if (input->target == VDEC_INPUT_TARGET_VLD) {
570 mask = (1 << 13);
571 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
572 mask = (1 << 21);
573 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
574 mask = (1 << 4); /*hevc*/
575 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
576 mask |= (1 << 8); /*hevcb */
577 }
578
579 /* need to stop armrisc. */
580 if (!IS_ERR_OR_NULL(vdec->dev))
581 vdec_stop_armrisc(input->target);
582
583 spin_lock_irqsave(&vdec_spin_lock, flags);
584 codec_dmcbus_write(DMC_REQ_CTRL,
585 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
586 spin_unlock_irqrestore(&vdec_spin_lock, flags);
587
588 while (!(codec_dmcbus_read(DMC_CHAN_STS)
589 & mask))
590 ;
591
592 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
593}
594
595static void vdec_enable_DMC(struct vdec_s *vdec)
596{
597 struct vdec_input_s *input = &vdec->input;
598 unsigned long flags;
599 unsigned int mask = 0;
600
601 if (input->target == VDEC_INPUT_TARGET_VLD) {
602 mask = (1 << 13);
603 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
604 mask = (1 << 21);
605 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
606 mask = (1 << 4); /*hevc*/
607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
608 mask |= (1 << 8); /*hevcb */
609 }
610
611 /*must to be reset the dmc pipeline if it's g12b.*/
612 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
613 vdec_dmc_pipeline_reset();
614
615 spin_lock_irqsave(&vdec_spin_lock, flags);
616 codec_dmcbus_write(DMC_REQ_CTRL,
617 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
618 spin_unlock_irqrestore(&vdec_spin_lock, flags);
619 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
620}
621
622
623
624static int vdec_get_hw_type(int value)
625{
626 int type;
627 switch (value) {
628 case VFORMAT_HEVC:
629 case VFORMAT_VP9:
630 case VFORMAT_AVS2:
631 type = CORE_MASK_HEVC;
632 break;
633
634 case VFORMAT_MPEG12:
635 case VFORMAT_MPEG4:
636 case VFORMAT_H264:
637 case VFORMAT_MJPEG:
638 case VFORMAT_REAL:
639 case VFORMAT_JPEG:
640 case VFORMAT_VC1:
641 case VFORMAT_AVS:
642 case VFORMAT_YUV:
643 case VFORMAT_H264MVC:
644 case VFORMAT_H264_4K2K:
645 case VFORMAT_H264_ENC:
646 case VFORMAT_JPEG_ENC:
647 type = CORE_MASK_VDEC_1;
648 break;
649
650 default:
651 type = -1;
652 }
653
654 return type;
655}
656
657
658static void vdec_save_active_hw(struct vdec_s *vdec)
659{
660 int type;
661
662 type = vdec_get_hw_type(vdec->port->vformat);
663
664 if (type == CORE_MASK_HEVC) {
665 vdec_core->active_hevc = vdec;
666 } else if (type == CORE_MASK_VDEC_1) {
667 vdec_core->active_vdec = vdec;
668 } else {
669 pr_info("save_active_fw wrong\n");
670 }
671}
672
673static void vdec_update_buff_status(void)
674{
675 struct vdec_core_s *core = vdec_core;
676 unsigned long flags;
677 struct vdec_s *vdec;
678
679 flags = vdec_inputbuff_lock(core);
680 core->buff_flag = 0;
681 core->stream_buff_flag = 0;
682 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
683 struct vdec_input_s *input = &vdec->input;
684 if (input_frame_based(input)) {
685 if (input->have_frame_num || input->eos)
686 core->buff_flag |= vdec->core_mask;
687 } else if (input_stream_based(input)) {
688 core->stream_buff_flag |= vdec->core_mask;
689 }
690 }
691 vdec_inputbuff_unlock(core, flags);
692}
693
694#if 0
695void vdec_update_streambuff_status(void)
696{
697 struct vdec_core_s *core = vdec_core;
698 struct vdec_s *vdec;
699
700 /* check streaming prepare level threshold if not EOS */
701 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
702 struct vdec_input_s *input = &vdec->input;
703 if (input && input_stream_based(input) && !input->eos &&
704 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
705 u32 rp, wp, level;
706
707 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
708 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
709 if (wp < rp)
710 level = input->size + wp - rp;
711 else
712 level = wp - rp;
713 if ((level < input->prepare_level) &&
714 (pts_get_rec_num(PTS_TYPE_VIDEO,
715 vdec->input.total_rd_count) < 2)) {
716 break;
717 } else if (level > input->prepare_level) {
718 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
719 if (debug & 8)
720 pr_info("vdec_flush_streambuff_status up\n");
721 vdec_up(vdec);
722 }
723 break;
724 }
725 }
726}
727EXPORT_SYMBOL(vdec_update_streambuff_status);
728#endif
729
730int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
731{
732 if (vdec && vdec->dec_status &&
733 ((vdec->status == VDEC_STATUS_CONNECTED ||
734 vdec->status == VDEC_STATUS_ACTIVE)))
735 return vdec->dec_status(vdec, vstatus);
736
737 return 0;
738}
739EXPORT_SYMBOL(vdec_status);
740
741int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
742{
743 int r;
744
745 if (vdec->set_trickmode) {
746 r = vdec->set_trickmode(vdec, trickmode);
747
748 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
749 r = vdec->slave->set_trickmode(vdec->slave,
750 trickmode);
751 return r;
752 }
753
754 return -1;
755}
756EXPORT_SYMBOL(vdec_set_trickmode);
757
758int vdec_set_isreset(struct vdec_s *vdec, int isreset)
759{
760 vdec->is_reset = isreset;
761 pr_info("is_reset=%d\n", isreset);
762 if (vdec->set_isreset)
763 return vdec->set_isreset(vdec, isreset);
764 return 0;
765}
766EXPORT_SYMBOL(vdec_set_isreset);
767
768int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
769{
770 vdec->dolby_meta_with_el = isdvmetawithel;
771 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
772 return 0;
773}
774EXPORT_SYMBOL(vdec_set_dv_metawithel);
775
776void vdec_set_no_powerdown(int flag)
777{
778 no_powerdown = flag;
779 pr_info("no_powerdown=%d\n", no_powerdown);
780 return;
781}
782EXPORT_SYMBOL(vdec_set_no_powerdown);
783
784void vdec_count_info(struct vdec_info *vs, unsigned int err,
785 unsigned int offset)
786{
787 if (err)
788 vs->error_frame_count++;
789 if (offset) {
790 if (0 == vs->frame_count) {
791 vs->offset = 0;
792 vs->samp_cnt = 0;
793 }
794 vs->frame_data = offset > vs->total_data ?
795 offset - vs->total_data : vs->total_data - offset;
796 vs->total_data = offset;
797 if (vs->samp_cnt < 96000 * 2) { /* 2s */
798 if (0 == vs->samp_cnt)
799 vs->offset = offset;
800 vs->samp_cnt += vs->frame_dur;
801 } else {
802 vs->bit_rate = (offset - vs->offset) / 2;
803 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
804 vs->samp_cnt = 0;
805 }
806 vs->frame_count++;
807 }
808 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
809 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
810 return;
811}
812EXPORT_SYMBOL(vdec_count_info);
813int vdec_is_support_4k(void)
814{
815 return !is_meson_gxl_package_805X();
816}
817EXPORT_SYMBOL(vdec_is_support_4k);
818
819/*
820 * clk_config:
821 *0:default
822 *1:no gp0_pll;
823 *2:always used gp0_pll;
824 *>=10:fixed n M clk;
825 *== 100 , 100M clks;
826 */
827unsigned int get_vdec_clk_config_settings(void)
828{
829 return clk_config;
830}
831void update_vdec_clk_config_settings(unsigned int config)
832{
833 clk_config = config;
834}
835EXPORT_SYMBOL(update_vdec_clk_config_settings);
836
837static bool hevc_workaround_needed(void)
838{
839 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
840 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
841 == GXBB_REV_A_MINOR);
842}
843
844struct device *get_codec_cma_device(void)
845{
846 return vdec_core->cma_dev;
847}
848
849#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
850static const char * const vdec_device_name[] = {
851 "amvdec_mpeg12", "ammvdec_mpeg12",
852 "amvdec_mpeg4", "ammvdec_mpeg4",
853 "amvdec_h264", "ammvdec_h264",
854 "amvdec_mjpeg", "ammvdec_mjpeg",
855 "amvdec_real", "ammvdec_real",
856 "amjpegdec", "ammjpegdec",
857 "amvdec_vc1", "ammvdec_vc1",
858 "amvdec_avs", "ammvdec_avs",
859 "amvdec_yuv", "ammvdec_yuv",
860 "amvdec_h264mvc", "ammvdec_h264mvc",
861 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
862 "amvdec_h265", "ammvdec_h265",
863 "amvenc_avc", "amvenc_avc",
864 "jpegenc", "jpegenc",
865 "amvdec_vp9", "ammvdec_vp9",
866 "amvdec_avs2", "ammvdec_avs2"
867};
868
869
870#else
871
872static const char * const vdec_device_name[] = {
873 "amvdec_mpeg12",
874 "amvdec_mpeg4",
875 "amvdec_h264",
876 "amvdec_mjpeg",
877 "amvdec_real",
878 "amjpegdec",
879 "amvdec_vc1",
880 "amvdec_avs",
881 "amvdec_yuv",
882 "amvdec_h264mvc",
883 "amvdec_h264_4k2k",
884 "amvdec_h265",
885 "amvenc_avc",
886 "jpegenc",
887 "amvdec_vp9",
888 "amvdec_avs2"
889};
890
891#endif
892
893/*
894 * Only support time sliced decoding for frame based input,
895 * so legacy decoder can exist with time sliced decoder.
896 */
897static const char *get_dev_name(bool use_legacy_vdec, int format)
898{
899#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
900 if (use_legacy_vdec)
901 return vdec_device_name[format * 2];
902 else
903 return vdec_device_name[format * 2 + 1];
904#else
905 return vdec_device_name[format];
906#endif
907}
908
909#ifdef VDEC_DEBUG_SUPPORT
910static u64 get_current_clk(void)
911{
912 /*struct timespec xtime = current_kernel_time();
913 u64 usec = xtime.tv_sec * 1000000;
914 usec += xtime.tv_nsec / 1000;
915 */
916 u64 usec = sched_clock();
917 return usec;
918}
919
920static void inc_profi_count(unsigned long mask, u32 *count)
921{
922 enum vdec_type_e type;
923
924 for (type = VDEC_1; type < VDEC_MAX; type++) {
925 if (mask & (1 << type))
926 count[type]++;
927 }
928}
929
930static void update_profi_clk_run(struct vdec_s *vdec,
931 unsigned long mask, u64 clk)
932{
933 enum vdec_type_e type;
934
935 for (type = VDEC_1; type < VDEC_MAX; type++) {
936 if (mask & (1 << type)) {
937 vdec->start_run_clk[type] = clk;
938 if (vdec->profile_start_clk[type] == 0)
939 vdec->profile_start_clk[type] = clk;
940 vdec->total_clk[type] = clk
941 - vdec->profile_start_clk[type];
942 /*pr_info("set start_run_clk %ld\n",
943 vdec->start_run_clk);*/
944
945 }
946 }
947}
948
949static void update_profi_clk_stop(struct vdec_s *vdec,
950 unsigned long mask, u64 clk)
951{
952 enum vdec_type_e type;
953
954 for (type = VDEC_1; type < VDEC_MAX; type++) {
955 if (mask & (1 << type)) {
956 if (vdec->start_run_clk[type] == 0)
957 pr_info("error, start_run_clk[%d] not set\n", type);
958
959 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
960 type,
961 clk,
962 vdec->start_run_clk[type],
963 vdec->run_clk[type]);*/
964 vdec->run_clk[type] +=
965 (clk - vdec->start_run_clk[type]);
966 }
967 }
968}
969
970#endif
971
972int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
973{
974 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
975 sizeof(struct dec_sysinfo)))
976 return -EFAULT;
977
978 /* force switch to mult instance if supports this profile. */
979 if ((vdec->type == VDEC_TYPE_SINGLE) &&
980 !disable_switch_single_to_mult) {
981 const char *str = NULL;
982 char fmt[16] = {0};
983
984 str = strchr(get_dev_name(false, vdec->format), '_');
985 if (!str)
986 return -1;
987
988 sprintf(fmt, "m%s", ++str);
989 if (is_support_profile(fmt) &&
990 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263)
991 vdec->type = VDEC_TYPE_STREAM_PARSER;
992 }
993
994 return 0;
995}
996EXPORT_SYMBOL(vdec_set_decinfo);
997
998/* construct vdec strcture */
999struct vdec_s *vdec_create(struct stream_port_s *port,
1000 struct vdec_s *master)
1001{
1002 struct vdec_s *vdec;
1003 int type = VDEC_TYPE_SINGLE;
1004 int id;
1005
1006 if (is_mult_inc(port->type))
1007 type = (port->type & PORT_TYPE_FRAME) ?
1008 VDEC_TYPE_FRAME_BLOCK :
1009 VDEC_TYPE_STREAM_PARSER;
1010
1011 id = ida_simple_get(&vdec_core->ida,
1012 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1013 if (id < 0) {
1014 pr_info("vdec_create request id failed!ret =%d\n", id);
1015 return NULL;
1016 }
1017 vdec = vzalloc(sizeof(struct vdec_s));
1018
1019 /* TBD */
1020 if (vdec) {
1021 vdec->magic = 0x43454456;
1022 vdec->id = -1;
1023 vdec->type = type;
1024 vdec->port = port;
1025 vdec->sys_info = &vdec->sys_info_store;
1026
1027 INIT_LIST_HEAD(&vdec->list);
1028
1029 atomic_inc(&vdec_core->vdec_nr);
1030 vdec->id = id;
1031 vdec_input_init(&vdec->input, vdec);
1032 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1033 vdec->input.vdec_up = vdec_up;
1034 if (master) {
1035 vdec->master = master;
1036 master->slave = vdec;
1037 master->sched = 1;
1038 }
1039 }
1040
1041 pr_debug("vdec_create instance %p, total %d\n", vdec,
1042 atomic_read(&vdec_core->vdec_nr));
1043
1044 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1045
1046 return vdec;
1047}
1048EXPORT_SYMBOL(vdec_create);
1049
1050int vdec_set_format(struct vdec_s *vdec, int format)
1051{
1052 vdec->format = format;
1053 vdec->port_flag |= PORT_FLAG_VFORMAT;
1054
1055 if (vdec->slave) {
1056 vdec->slave->format = format;
1057 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1058 }
1059 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1060
1061 return 0;
1062}
1063EXPORT_SYMBOL(vdec_set_format);
1064
1065int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1066{
1067 vdec->pts = pts;
1068 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1069 vdec->pts_valid = true;
1070 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1071 return 0;
1072}
1073EXPORT_SYMBOL(vdec_set_pts);
1074
1075void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1076{
1077 vdec->timestamp = timestamp;
1078 vdec->timestamp_valid = true;
1079}
1080EXPORT_SYMBOL(vdec_set_timestamp);
1081
1082int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1083{
1084 vdec->pts64 = pts64;
1085 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1086 vdec->pts_valid = true;
1087
1088 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1089 return 0;
1090}
1091EXPORT_SYMBOL(vdec_set_pts64);
1092
1093int vdec_get_status(struct vdec_s *vdec)
1094{
1095 return vdec->status;
1096}
1097EXPORT_SYMBOL(vdec_get_status);
1098
1099int vdec_get_frame_num(struct vdec_s *vdec)
1100{
1101 return vdec->input.have_frame_num;
1102}
1103EXPORT_SYMBOL(vdec_get_frame_num);
1104
1105void vdec_set_status(struct vdec_s *vdec, int status)
1106{
1107 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1108 vdec->status = status;
1109}
1110EXPORT_SYMBOL(vdec_set_status);
1111
1112void vdec_set_next_status(struct vdec_s *vdec, int status)
1113{
1114 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1115 vdec->next_status = status;
1116}
1117EXPORT_SYMBOL(vdec_set_next_status);
1118
1119int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1120{
1121 vdec->frame_base_video_path = video_path;
1122 return 0;
1123}
1124EXPORT_SYMBOL(vdec_set_video_path);
1125
1126int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1127{
1128 vdec->vf_receiver_inst = receive_id;
1129 return 0;
1130}
1131EXPORT_SYMBOL(vdec_set_receive_id);
1132
1133/* add frame data to input chain */
1134int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1135{
1136 return vdec_input_add_frame(&vdec->input, buf, count);
1137}
1138EXPORT_SYMBOL(vdec_write_vframe);
1139
1140/* add a work queue thread for vdec*/
1141void vdec_schedule_work(struct work_struct *work)
1142{
1143 if (vdec_core->vdec_core_wq)
1144 queue_work(vdec_core->vdec_core_wq, work);
1145 else
1146 schedule_work(work);
1147}
1148EXPORT_SYMBOL(vdec_schedule_work);
1149
1150static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1151{
1152 if (vdec->master)
1153 return vdec->master;
1154 else if (vdec->slave)
1155 return vdec->slave;
1156 return NULL;
1157}
1158
1159static void vdec_sync_input_read(struct vdec_s *vdec)
1160{
1161 if (!vdec_stream_based(vdec))
1162 return;
1163
1164 if (vdec_dual(vdec)) {
1165 u32 me, other;
1166 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1167 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1168 other =
1169 vdec_get_associate(vdec)->input.stream_cookie;
1170 if (me > other)
1171 return;
1172 else if (me == other) {
1173 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1174 other =
1175 vdec_get_associate(vdec)->input.swap_rp;
1176 if (me > other) {
1177 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1178 vdec_get_associate(vdec)->
1179 input.swap_rp);
1180 return;
1181 }
1182 }
1183 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1184 READ_VREG(VLD_MEM_VIFIFO_RP));
1185 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1186 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1187 if (((me & 0x80000000) == 0) &&
1188 (vdec->input.streaming_rp & 0x80000000))
1189 me += 1ULL << 32;
1190 other = vdec_get_associate(vdec)->input.streaming_rp;
1191 if (me > other) {
1192 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1193 vdec_get_associate(vdec)->
1194 input.swap_rp);
1195 return;
1196 }
1197
1198 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1199 READ_VREG(HEVC_STREAM_RD_PTR));
1200 }
1201 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1202 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1203 READ_VREG(VLD_MEM_VIFIFO_RP));
1204 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1205 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1206 READ_VREG(HEVC_STREAM_RD_PTR));
1207 }
1208}
1209
1210static void vdec_sync_input_write(struct vdec_s *vdec)
1211{
1212 if (!vdec_stream_based(vdec))
1213 return;
1214
1215 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1216 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1217 READ_PARSER_REG(PARSER_VIDEO_WP));
1218 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1219 WRITE_VREG(HEVC_STREAM_WR_PTR,
1220 READ_PARSER_REG(PARSER_VIDEO_WP));
1221 }
1222}
1223
1224/*
1225 *get next frame from input chain
1226 */
1227/*
1228 *THE VLD_FIFO is 512 bytes and Video buffer level
1229 * empty interrupt is set to 0x80 bytes threshold
1230 */
1231#define VLD_PADDING_SIZE 1024
1232#define HEVC_PADDING_SIZE (1024*16)
1233int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1234{
1235 struct vdec_input_s *input = &vdec->input;
1236 struct vframe_chunk_s *chunk = NULL;
1237 struct vframe_block_list_s *block = NULL;
1238 int dummy;
1239
1240 /* full reset to HW input */
1241 if (input->target == VDEC_INPUT_TARGET_VLD) {
1242 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1243
1244 /* reset VLD fifo for all vdec */
1245 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1246 WRITE_VREG(DOS_SW_RESET0, 0);
1247
1248 dummy = READ_RESET_REG(RESET0_REGISTER);
1249 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1250 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1251#if 0
1252 /*move to driver*/
1253 if (input_frame_based(input))
1254 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1255
1256 /*
1257 * 2: assist
1258 * 3: parser
1259 * 4: parser_state
1260 * 8: dblk
1261 * 11:mcpu
1262 * 12:ccpu
1263 * 13:ddr
1264 * 14:iqit
1265 * 15:ipp
1266 * 17:qdct
1267 * 18:mpred
1268 * 19:sao
1269 * 24:hevc_afifo
1270 */
1271 WRITE_VREG(DOS_SW_RESET3,
1272 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1273 (1<<17)|(1<<18)|(1<<19));
1274 WRITE_VREG(DOS_SW_RESET3, 0);
1275#endif
1276 }
1277
1278 /*
1279 *setup HW decoder input buffer (VLD context)
1280 * based on input->type and input->target
1281 */
1282 if (input_frame_based(input)) {
1283 chunk = vdec_input_next_chunk(&vdec->input);
1284
1285 if (chunk == NULL) {
1286 *p = NULL;
1287 return -1;
1288 }
1289
1290 block = chunk->block;
1291
1292 if (input->target == VDEC_INPUT_TARGET_VLD) {
1293 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1294 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1295 block->size - 8);
1296 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1297 round_down(block->start + chunk->offset,
1298 VDEC_FIFO_ALIGN));
1299
1300 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1301 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1302
1303 /* set to manual mode */
1304 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1305 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1306 round_down(block->start + chunk->offset,
1307 VDEC_FIFO_ALIGN));
1308 dummy = chunk->offset + chunk->size +
1309 VLD_PADDING_SIZE;
1310 if (dummy >= block->size)
1311 dummy -= block->size;
1312 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1313 round_down(block->start + dummy,
1314 VDEC_FIFO_ALIGN));
1315
1316 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1317 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1318
1319 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1320 (0x11 << 16) | (1<<10) | (7<<3));
1321
1322 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1323 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1324 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1325 block->size);
1326 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1327 chunk->offset);
1328 dummy = chunk->offset + chunk->size +
1329 HEVC_PADDING_SIZE;
1330 if (dummy >= block->size)
1331 dummy -= block->size;
1332 WRITE_VREG(HEVC_STREAM_WR_PTR,
1333 round_down(block->start + dummy,
1334 VDEC_FIFO_ALIGN));
1335
1336 /* set endian */
1337 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1338 }
1339
1340 *p = chunk;
1341 return chunk->size;
1342
1343 } else {
1344 /* stream based */
1345 u32 rp = 0, wp = 0, fifo_len = 0;
1346 int size;
1347 bool swap_valid = input->swap_valid;
1348 unsigned long swap_page_phys = input->swap_page_phys;
1349
1350 if (vdec_dual(vdec) &&
1351 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1352 /* keep using previous input context */
1353 struct vdec_s *master = (vdec->slave) ?
1354 vdec : vdec->master;
1355 if (master->input.last_swap_slave) {
1356 swap_valid = master->slave->input.swap_valid;
1357 swap_page_phys =
1358 master->slave->input.swap_page_phys;
1359 } else {
1360 swap_valid = master->input.swap_valid;
1361 swap_page_phys = master->input.swap_page_phys;
1362 }
1363 }
1364
1365 if (swap_valid) {
1366 if (input->target == VDEC_INPUT_TARGET_VLD) {
1367 if (vdec->format == VFORMAT_H264)
1368 SET_VREG_MASK(POWER_CTL_VLD,
1369 (1 << 9));
1370
1371 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1372
1373 /* restore read side */
1374 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1375 swap_page_phys);
1376 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1377
1378 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1379 ;
1380 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1381
1382 /* restore wrap count */
1383 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1384 input->stream_cookie);
1385
1386 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1387 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1388
1389 /* enable */
1390 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1391 (0x11 << 16) | (1<<10));
1392
1393 /* sync with front end */
1394 vdec_sync_input_read(vdec);
1395 vdec_sync_input_write(vdec);
1396
1397 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1398 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1399 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1400
1401 /* restore read side */
1402 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1403 swap_page_phys);
1404 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1405
1406 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1407 & (1<<7))
1408 ;
1409 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1410
1411 /* restore stream offset */
1412 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1413 input->stream_cookie);
1414
1415 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1416 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1417 >> 16) & 0x7f;
1418
1419
1420 /* enable */
1421
1422 /* sync with front end */
1423 vdec_sync_input_read(vdec);
1424 vdec_sync_input_write(vdec);
1425
1426 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1427
1428 /*pr_info("vdec: restore context\r\n");*/
1429 }
1430
1431 } else {
1432 if (input->target == VDEC_INPUT_TARGET_VLD) {
1433 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1434 input->start);
1435 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1436 input->start + input->size - 8);
1437 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1438 input->start);
1439
1440 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1441 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1442
1443 /* set to manual mode */
1444 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1445 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1446 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1447 READ_PARSER_REG(PARSER_VIDEO_WP));
1448
1449 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1450
1451 /* enable */
1452 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1453 (0x11 << 16) | (1<<10));
1454
1455 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1456
1457 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1458 WRITE_VREG(HEVC_STREAM_START_ADDR,
1459 input->start);
1460 WRITE_VREG(HEVC_STREAM_END_ADDR,
1461 input->start + input->size);
1462 WRITE_VREG(HEVC_STREAM_RD_PTR,
1463 input->start);
1464 WRITE_VREG(HEVC_STREAM_WR_PTR,
1465 READ_PARSER_REG(PARSER_VIDEO_WP));
1466
1467 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1468 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1469 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1470 >> 16) & 0x7f;
1471
1472 /* enable */
1473 }
1474 }
1475 *p = NULL;
1476 if (wp >= rp)
1477 size = wp - rp + fifo_len;
1478 else
1479 size = wp + input->size - rp + fifo_len;
1480 if (size < 0) {
1481 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1482 __func__, input->size, wp, rp, fifo_len, size);
1483 size = 0;
1484 }
1485 return size;
1486 }
1487}
1488EXPORT_SYMBOL(vdec_prepare_input);
1489
1490void vdec_enable_input(struct vdec_s *vdec)
1491{
1492 struct vdec_input_s *input = &vdec->input;
1493
1494 if (vdec->status != VDEC_STATUS_ACTIVE)
1495 return;
1496
1497 if (input->target == VDEC_INPUT_TARGET_VLD)
1498 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1499 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1500 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1501 if (vdec_stream_based(vdec))
1502 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1503 else
1504 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1505 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1506 }
1507}
1508EXPORT_SYMBOL(vdec_enable_input);
1509
1510int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1511{
1512 int r = vdec_input_set_buffer(&vdec->input, start, size);
1513
1514 if (r)
1515 return r;
1516
1517 if (vdec->slave)
1518 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1519
1520 return r;
1521}
1522EXPORT_SYMBOL(vdec_set_input_buffer);
1523
1524/*
1525 * vdec_eos returns the possibility that there are
1526 * more input can be used by decoder through vdec_prepare_input
1527 * Note: this function should be called prior to vdec_vframe_dirty
1528 * by decoder driver to determine if EOS happens for stream based
1529 * decoding when there is no sufficient data for a frame
1530 */
1531bool vdec_has_more_input(struct vdec_s *vdec)
1532{
1533 struct vdec_input_s *input = &vdec->input;
1534
1535 if (!input->eos)
1536 return true;
1537
1538 if (input_frame_based(input))
1539 return vdec_input_next_input_chunk(input) != NULL;
1540 else {
1541 if (input->target == VDEC_INPUT_TARGET_VLD)
1542 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1543 READ_PARSER_REG(PARSER_VIDEO_WP);
1544 else {
1545 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1546 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1547 }
1548 }
1549}
1550EXPORT_SYMBOL(vdec_has_more_input);
1551
1552void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1553{
1554 vdec->input.prepare_level = level;
1555}
1556EXPORT_SYMBOL(vdec_set_prepare_level);
1557
1558void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1559{
1560 vdec->flag = flag;
1561}
1562EXPORT_SYMBOL(vdec_set_flag);
1563
1564void vdec_set_eos(struct vdec_s *vdec, bool eos)
1565{
1566 struct vdec_core_s *core = vdec_core;
1567 vdec->input.eos = eos;
1568
1569 if (vdec->slave)
1570 vdec->slave->input.eos = eos;
1571 up(&core->sem);
1572}
1573EXPORT_SYMBOL(vdec_set_eos);
1574
1575#ifdef VDEC_DEBUG_SUPPORT
1576void vdec_set_step_mode(void)
1577{
1578 step_mode = 0x1ff;
1579}
1580EXPORT_SYMBOL(vdec_set_step_mode);
1581#endif
1582
1583void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1584{
1585 if (vdec && next_vdec) {
1586 vdec->sched = 0;
1587 next_vdec->sched = 1;
1588 }
1589}
1590EXPORT_SYMBOL(vdec_set_next_sched);
1591
1592/*
1593 * Swap Context: S0 S1 S2 S3 S4
1594 * Sample sequence: M S M M S
1595 * Master Context: S0 S0 S2 S3 S3
1596 * Slave context: NA S1 S1 S2 S4
1597 * ^
1598 * ^
1599 * ^
1600 * the tricky part
1601 * If there are back to back decoding of master or slave
1602 * then the context of the counter part should be updated
1603 * with current decoder. In this example, S1 should be
1604 * updated to S2.
1605 * This is done by swap the swap_page and related info
1606 * between two layers.
1607 */
1608static void vdec_borrow_input_context(struct vdec_s *vdec)
1609{
1610 struct page *swap_page;
1611 unsigned long swap_page_phys;
1612 struct vdec_input_s *me;
1613 struct vdec_input_s *other;
1614
1615 if (!vdec_dual(vdec))
1616 return;
1617
1618 me = &vdec->input;
1619 other = &vdec_get_associate(vdec)->input;
1620
1621 /* swap the swap_context, borrow counter part's
1622 * swap context storage and update all related info.
1623 * After vdec_vframe_dirty, vdec_save_input_context
1624 * will be called to update current vdec's
1625 * swap context
1626 */
1627 swap_page = other->swap_page;
1628 other->swap_page = me->swap_page;
1629 me->swap_page = swap_page;
1630
1631 swap_page_phys = other->swap_page_phys;
1632 other->swap_page_phys = me->swap_page_phys;
1633 me->swap_page_phys = swap_page_phys;
1634
1635 other->swap_rp = me->swap_rp;
1636 other->streaming_rp = me->streaming_rp;
1637 other->stream_cookie = me->stream_cookie;
1638 other->swap_valid = me->swap_valid;
1639}
1640
1641void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1642{
1643 if (chunk)
1644 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1645
1646 if (vdec_stream_based(vdec)) {
1647 vdec->input.swap_needed = true;
1648
1649 if (vdec_dual(vdec)) {
1650 vdec_get_associate(vdec)->input.dirty_count = 0;
1651 vdec->input.dirty_count++;
1652 if (vdec->input.dirty_count > 1) {
1653 vdec->input.dirty_count = 1;
1654 vdec_borrow_input_context(vdec);
1655 }
1656 }
1657
1658 /* for stream based mode, we update read and write pointer
1659 * also in case decoder wants to keep working on decoding
1660 * for more frames while input front end has more data
1661 */
1662 vdec_sync_input_read(vdec);
1663 vdec_sync_input_write(vdec);
1664
1665 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1666 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1667 }
1668}
1669EXPORT_SYMBOL(vdec_vframe_dirty);
1670
1671bool vdec_need_more_data(struct vdec_s *vdec)
1672{
1673 if (vdec_stream_based(vdec))
1674 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1675
1676 return false;
1677}
1678EXPORT_SYMBOL(vdec_need_more_data);
1679
1680
1681void hevc_wait_ddr(void)
1682{
1683 unsigned long flags;
1684 unsigned int mask = 0;
1685
1686 mask = 1 << 4; /* hevc */
1687 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1688 mask |= (1 << 8); /* hevcb */
1689
1690 spin_lock_irqsave(&vdec_spin_lock, flags);
1691 codec_dmcbus_write(DMC_REQ_CTRL,
1692 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1693 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1694
1695 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1696 & mask))
1697 ;
1698}
1699
1700void vdec_save_input_context(struct vdec_s *vdec)
1701{
1702 struct vdec_input_s *input = &vdec->input;
1703
1704#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1705 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1706#endif
1707
1708 if (input->target == VDEC_INPUT_TARGET_VLD)
1709 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1710
1711 if (input_stream_based(input) && (input->swap_needed)) {
1712 if (input->target == VDEC_INPUT_TARGET_VLD) {
1713 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1714 input->swap_page_phys);
1715 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1716 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1717 ;
1718 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1719 vdec->input.stream_cookie =
1720 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1721 vdec->input.swap_rp =
1722 READ_VREG(VLD_MEM_VIFIFO_RP);
1723 vdec->input.total_rd_count =
1724 (u64)vdec->input.stream_cookie *
1725 vdec->input.size + vdec->input.swap_rp -
1726 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1727 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1728 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1729 input->swap_page_phys);
1730 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1731
1732 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1733 ;
1734 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1735
1736 vdec->input.stream_cookie =
1737 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1738 vdec->input.swap_rp =
1739 READ_VREG(HEVC_STREAM_RD_PTR);
1740 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1741 (vdec->input.streaming_rp & 0x80000000))
1742 vdec->input.streaming_rp += 1ULL << 32;
1743 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1744 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1745 vdec->input.total_rd_count = vdec->input.streaming_rp;
1746 }
1747
1748 input->swap_valid = true;
1749 input->swap_needed = false;
1750 /*pr_info("vdec: save context\r\n");*/
1751
1752 vdec_sync_input_read(vdec);
1753
1754 if (vdec_dual(vdec)) {
1755 struct vdec_s *master = (vdec->slave) ?
1756 vdec : vdec->master;
1757 master->input.last_swap_slave = (master->slave == vdec);
1758 /* pr_info("master->input.last_swap_slave = %d\n",
1759 master->input.last_swap_slave); */
1760 }
1761
1762 hevc_wait_ddr();
1763 }
1764}
1765EXPORT_SYMBOL(vdec_save_input_context);
1766
1767void vdec_clean_input(struct vdec_s *vdec)
1768{
1769 struct vdec_input_s *input = &vdec->input;
1770
1771 while (!list_empty(&input->vframe_chunk_list)) {
1772 struct vframe_chunk_s *chunk =
1773 vdec_input_next_chunk(input);
1774 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1775 vdec_input_release_chunk(input, chunk);
1776 else
1777 break;
1778 }
1779 vdec_save_input_context(vdec);
1780}
1781EXPORT_SYMBOL(vdec_clean_input);
1782
1783
1784static int vdec_input_read_restore(struct vdec_s *vdec)
1785{
1786 struct vdec_input_s *input = &vdec->input;
1787
1788 if (!vdec_stream_based(vdec))
1789 return 0;
1790
1791 if (!input->swap_valid) {
1792 if (input->target == VDEC_INPUT_TARGET_VLD) {
1793 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1794 input->start);
1795 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1796 input->start + input->size - 8);
1797 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1798 input->start);
1799 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1800 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1801
1802 /* set to manual mode */
1803 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1804 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1805 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1806 WRITE_VREG(HEVC_STREAM_START_ADDR,
1807 input->start);
1808 WRITE_VREG(HEVC_STREAM_END_ADDR,
1809 input->start + input->size);
1810 WRITE_VREG(HEVC_STREAM_RD_PTR,
1811 input->start);
1812 }
1813 return 0;
1814 }
1815 if (input->target == VDEC_INPUT_TARGET_VLD) {
1816 /* restore read side */
1817 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1818 input->swap_page_phys);
1819
1820 /*swap active*/
1821 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1822
1823 /*wait swap busy*/
1824 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1825 ;
1826
1827 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1828 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1829 /* restore read side */
1830 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1831 input->swap_page_phys);
1832 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1833
1834 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1835 & (1<<7))
1836 ;
1837 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1838 }
1839
1840 return 0;
1841}
1842
1843
1844int vdec_sync_input(struct vdec_s *vdec)
1845{
1846 struct vdec_input_s *input = &vdec->input;
1847 u32 rp = 0, wp = 0, fifo_len = 0;
1848 int size;
1849
1850 vdec_input_read_restore(vdec);
1851 vdec_sync_input_read(vdec);
1852 vdec_sync_input_write(vdec);
1853 if (input->target == VDEC_INPUT_TARGET_VLD) {
1854 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1855 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1856
1857 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1858 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1859 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1860 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1861 >> 16) & 0x7f;
1862 }
1863 if (wp >= rp)
1864 size = wp - rp + fifo_len;
1865 else
1866 size = wp + input->size - rp + fifo_len;
1867 if (size < 0) {
1868 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1869 __func__, input->size, wp, rp, fifo_len, size);
1870 size = 0;
1871 }
1872 return size;
1873
1874}
1875EXPORT_SYMBOL(vdec_sync_input);
1876
1877const char *vdec_status_str(struct vdec_s *vdec)
1878{
1879 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1880 vdec_status_string[vdec->status] : "INVALID";
1881}
1882
1883const char *vdec_type_str(struct vdec_s *vdec)
1884{
1885 switch (vdec->type) {
1886 case VDEC_TYPE_SINGLE:
1887 return "VDEC_TYPE_SINGLE";
1888 case VDEC_TYPE_STREAM_PARSER:
1889 return "VDEC_TYPE_STREAM_PARSER";
1890 case VDEC_TYPE_FRAME_BLOCK:
1891 return "VDEC_TYPE_FRAME_BLOCK";
1892 case VDEC_TYPE_FRAME_CIRCULAR:
1893 return "VDEC_TYPE_FRAME_CIRCULAR";
1894 default:
1895 return "VDEC_TYPE_INVALID";
1896 }
1897}
1898
1899const char *vdec_device_name_str(struct vdec_s *vdec)
1900{
1901 return vdec_device_name[vdec->format * 2 + 1];
1902}
1903EXPORT_SYMBOL(vdec_device_name_str);
1904
1905void walk_vdec_core_list(char *s)
1906{
1907 struct vdec_s *vdec;
1908 struct vdec_core_s *core = vdec_core;
1909 unsigned long flags;
1910
1911 pr_info("%s --->\n", s);
1912
1913 flags = vdec_core_lock(vdec_core);
1914
1915 if (list_empty(&core->connected_vdec_list)) {
1916 pr_info("connected vdec list empty\n");
1917 } else {
1918 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
1919 pr_info("\tvdec (%p), status = %s\n", vdec,
1920 vdec_status_str(vdec));
1921 }
1922 }
1923
1924 vdec_core_unlock(vdec_core, flags);
1925}
1926EXPORT_SYMBOL(walk_vdec_core_list);
1927
1928/* insert vdec to vdec_core for scheduling,
1929 * for dual running decoders, connect/disconnect always runs in pairs
1930 */
1931int vdec_connect(struct vdec_s *vdec)
1932{
1933 unsigned long flags;
1934
1935 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
1936
1937 if (vdec->status != VDEC_STATUS_DISCONNECTED)
1938 return 0;
1939
1940 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
1941 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
1942
1943 init_completion(&vdec->inactive_done);
1944
1945 if (vdec->slave) {
1946 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
1947 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
1948
1949 init_completion(&vdec->slave->inactive_done);
1950 }
1951
1952 flags = vdec_core_lock(vdec_core);
1953
1954 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
1955
1956 if (vdec->slave) {
1957 list_add_tail(&vdec->slave->list,
1958 &vdec_core->connected_vdec_list);
1959 }
1960
1961 vdec_core_unlock(vdec_core, flags);
1962
1963 up(&vdec_core->sem);
1964
1965 return 0;
1966}
1967EXPORT_SYMBOL(vdec_connect);
1968
1969/* remove vdec from vdec_core scheduling */
1970int vdec_disconnect(struct vdec_s *vdec)
1971{
1972#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1973 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
1974#endif
1975 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
1976
1977 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
1978 (vdec->status != VDEC_STATUS_ACTIVE)) {
1979 return 0;
1980 }
1981 mutex_lock(&vdec_mutex);
1982 /*
1983 *when a vdec is under the management of scheduler
1984 * the status change will only be from vdec_core_thread
1985 */
1986 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
1987
1988 if (vdec->slave)
1989 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
1990 else if (vdec->master)
1991 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
1992 mutex_unlock(&vdec_mutex);
1993 up(&vdec_core->sem);
1994
1995 if(!wait_for_completion_timeout(&vdec->inactive_done,
1996 msecs_to_jiffies(2000)))
1997 goto discon_timeout;
1998
1999 if (vdec->slave) {
2000 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2001 msecs_to_jiffies(2000)))
2002 goto discon_timeout;
2003 } else if (vdec->master) {
2004 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2005 msecs_to_jiffies(2000)))
2006 goto discon_timeout;
2007 }
2008
2009 return 0;
2010discon_timeout:
2011 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2012 return 0;
2013}
2014EXPORT_SYMBOL(vdec_disconnect);
2015
2016/* release vdec structure */
2017int vdec_destroy(struct vdec_s *vdec)
2018{
2019 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2020
2021 vdec_input_release(&vdec->input);
2022
2023#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2024 vdec_profile_flush(vdec);
2025#endif
2026 ida_simple_remove(&vdec_core->ida, vdec->id);
2027 vfree(vdec);
2028
2029 atomic_dec(&vdec_core->vdec_nr);
2030
2031 return 0;
2032}
2033EXPORT_SYMBOL(vdec_destroy);
2034
2035/*
2036 *register vdec_device
2037 * create output, vfm or create ionvideo output
2038 */
2039s32 vdec_init(struct vdec_s *vdec, int is_4k)
2040{
2041 int r = 0;
2042 struct vdec_s *p = vdec;
2043 const char *dev_name;
2044 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2045
2046 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2047
2048 if (dev_name == NULL)
2049 return -ENODEV;
2050
2051 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2052 dev_name, vdec_type_str(vdec));
2053
2054 /*
2055 *todo: VFM patch control should be configurable,
2056 * for now all stream based input uses default VFM path.
2057 */
2058 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2059 if (vdec_core->vfm_vdec == NULL) {
2060 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2061 vdec_core->vfm_vdec = vdec;
2062 } else {
2063 pr_info("vdec_init vfm path busy.\n");
2064 return -EBUSY;
2065 }
2066 }
2067
2068 mutex_lock(&vdec_mutex);
2069 inited_vcodec_num++;
2070 mutex_unlock(&vdec_mutex);
2071
2072 vdec_input_set_type(&vdec->input, vdec->type,
2073 (vdec->format == VFORMAT_HEVC ||
2074 vdec->format == VFORMAT_AVS2 ||
2075 vdec->format == VFORMAT_VP9) ?
2076 VDEC_INPUT_TARGET_HEVC :
2077 VDEC_INPUT_TARGET_VLD);
2078 if (vdec_single(vdec))
2079 vdec_enable_DMC(vdec);
2080 p->cma_dev = vdec_core->cma_dev;
2081 p->get_canvas = get_canvas;
2082 p->get_canvas_ex = get_canvas_ex;
2083 p->free_canvas_ex = free_canvas_ex;
2084 p->vdec_fps_detec = vdec_fps_detec;
2085 atomic_set(&p->inirq_flag, 0);
2086 atomic_set(&p->inirq_thread_flag, 0);
2087 /* todo */
2088 if (!vdec_dual(vdec))
2089 p->use_vfm_path = vdec_stream_based(vdec);
2090 /* vdec_dev_reg.flag = 0; */
2091 if (vdec->id >= 0)
2092 id = vdec->id;
2093 p->parallel_dec = parallel_decode;
2094 vdec_core->parallel_dec = parallel_decode;
2095 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2096#ifdef FRAME_CHECK
2097 vdec_frame_check_init(vdec);
2098#endif
2099 p->dev = platform_device_register_data(
2100 &vdec_core->vdec_core_platform_device->dev,
2101 dev_name,
2102 id,
2103 &p, sizeof(struct vdec_s *));
2104
2105 if (IS_ERR(p->dev)) {
2106 r = PTR_ERR(p->dev);
2107 pr_err("vdec: Decoder device %s register failed (%d)\n",
2108 dev_name, r);
2109
2110 mutex_lock(&vdec_mutex);
2111 inited_vcodec_num--;
2112 mutex_unlock(&vdec_mutex);
2113
2114 goto error;
2115 } else if (!p->dev->dev.driver) {
2116 pr_info("vdec: Decoder device %s driver probe failed.\n",
2117 dev_name);
2118 r = -ENODEV;
2119
2120 goto error;
2121 }
2122
2123 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2124 r = -ENODEV;
2125 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2126
2127 mutex_lock(&vdec_mutex);
2128 inited_vcodec_num--;
2129 mutex_unlock(&vdec_mutex);
2130
2131 goto error;
2132 }
2133
2134 if (p->use_vfm_path) {
2135 vdec->vf_receiver_inst = -1;
2136 vdec->vfm_map_id[0] = 0;
2137 } else if (!vdec_dual(vdec)) {
2138 /* create IONVIDEO instance and connect decoder's
2139 * vf_provider interface to it
2140 */
2141 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2142 r = -ENODEV;
2143 pr_err("vdec: Incorrect decoder type\n");
2144
2145 mutex_lock(&vdec_mutex);
2146 inited_vcodec_num--;
2147 mutex_unlock(&vdec_mutex);
2148
2149 goto error;
2150 }
2151 if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2152#if 1
2153 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2154 &vdec->vf_receiver_inst);
2155#else
2156 /*
2157 * temporarily just use decoder instance ID as iondriver ID
2158 * to solve OMX iondriver instance number check time sequence
2159 * only the limitation is we can NOT mix different video
2160 * decoders since same ID will be used for different decoder
2161 * formats.
2162 */
2163 vdec->vf_receiver_inst = p->dev->id;
2164 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2165 &vdec->vf_receiver_inst);
2166#endif
2167 if (r < 0) {
2168 pr_err("IonVideo frame receiver allocation failed.\n");
2169
2170 mutex_lock(&vdec_mutex);
2171 inited_vcodec_num--;
2172 mutex_unlock(&vdec_mutex);
2173
2174 goto error;
2175 }
2176
2177 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2178 "%s %s", vdec->vf_provider_name,
2179 vdec->vf_receiver_name);
2180 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2181 "vdec-map-%d", vdec->id);
2182 } else if (p->frame_base_video_path ==
2183 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2184 if (vdec_secure(vdec)) {
2185 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2186 "%s %s", vdec->vf_provider_name,
2187 "amlvideo amvideo");
2188 } else {
2189 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2190 "%s %s", vdec->vf_provider_name,
2191 "amlvideo ppmgr deinterlace amvideo");
2192 }
2193 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2194 "vdec-map-%d", vdec->id);
2195 } else if (p->frame_base_video_path ==
2196 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2197 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2198 "%s %s", vdec->vf_provider_name,
2199 "aml_video.1 videosync.0 videopip");
2200 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2201 "vdec-map-%d", vdec->id);
2202 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2203 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2204 "%s %s", vdec->vf_provider_name,
2205 vdec->vf_receiver_name);
2206 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2207 "vdec-map-%d", vdec->id);
2208 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2209 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2210 "%s %s", vdec->vf_provider_name,
2211 "amvideo");
2212 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2213 "vdec-map-%d", vdec->id);
2214 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2215 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2216 "%s %s %s", vdec->vf_provider_name,
2217 vdec->vf_receiver_name, "amvideo");
2218 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2219 "vdec-map-%d", vdec->id);
2220 } else if (p->frame_base_video_path ==
2221 FRAME_BASE_PATH_DI_V4LVIDEO) {
2222#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2223 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2224 &vdec->vf_receiver_inst);
2225#else
2226 r = -1;
2227#endif
2228 if (r < 0) {
2229 pr_err("V4lVideo frame receiver allocation failed.\n");
2230 mutex_lock(&vdec_mutex);
2231 inited_vcodec_num--;
2232 mutex_unlock(&vdec_mutex);
2233 goto error;
2234 }
2235 if (!v4lvideo_add_di || vdec_secure(vdec))
2236 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2237 "%s %s", vdec->vf_provider_name,
2238 vdec->vf_receiver_name);
2239 else {
2240 if (vdec->vf_receiver_inst == 0)
2241 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2242 "%s %s %s", vdec->vf_provider_name,
2243 "deinterlace",
2244 vdec->vf_receiver_name);
2245 else if (vdec->vf_receiver_inst < max_di_instance)
2246 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2247 "%s %s%d %s", vdec->vf_provider_name,
2248 "dimulti.",
2249 vdec->vf_receiver_inst,
2250 vdec->vf_receiver_name);
2251 else
2252 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2253 "%s %s", vdec->vf_provider_name,
2254 vdec->vf_receiver_name);
2255 }
2256 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2257 "vdec-map-%d", vdec->id);
2258 }
2259
2260 if (vfm_map_add(vdec->vfm_map_id,
2261 vdec->vfm_map_chain) < 0) {
2262 r = -ENOMEM;
2263 pr_err("Decoder pipeline map creation failed %s.\n",
2264 vdec->vfm_map_id);
2265 vdec->vfm_map_id[0] = 0;
2266
2267 mutex_lock(&vdec_mutex);
2268 inited_vcodec_num--;
2269 mutex_unlock(&vdec_mutex);
2270
2271 goto error;
2272 }
2273
2274 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2275
2276 /*
2277 *assume IONVIDEO driver already have a few vframe_receiver
2278 * registered.
2279 * 1. Call iondriver function to allocate a IONVIDEO path and
2280 * provide receiver's name and receiver op.
2281 * 2. Get decoder driver's provider name from driver instance
2282 * 3. vfm_map_add(name, "<decoder provider name>
2283 * <iondriver receiver name>"), e.g.
2284 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2285 * 4. vf_reg_provider and vf_reg_receiver
2286 * Note: the decoder provider's op uses vdec as op_arg
2287 * the iondriver receiver's op uses iondev device as
2288 * op_arg
2289 */
2290
2291 }
2292
2293 if (!vdec_single(vdec)) {
2294 vf_reg_provider(&p->vframe_provider);
2295
2296 vf_notify_receiver(p->vf_provider_name,
2297 VFRAME_EVENT_PROVIDER_START,
2298 vdec);
2299
2300 if (vdec_core->hint_fr_vdec == NULL)
2301 vdec_core->hint_fr_vdec = vdec;
2302
2303 if (vdec_core->hint_fr_vdec == vdec) {
2304 if (p->sys_info->rate != 0) {
2305 if (!vdec->is_reset) {
2306 vf_notify_receiver(p->vf_provider_name,
2307 VFRAME_EVENT_PROVIDER_FR_HINT,
2308 (void *)
2309 ((unsigned long)
2310 p->sys_info->rate));
2311 vdec->fr_hint_state = VDEC_HINTED;
2312 }
2313 } else {
2314 vdec->fr_hint_state = VDEC_NEED_HINT;
2315 }
2316 }
2317 }
2318
2319 p->dolby_meta_with_el = 0;
2320 pr_debug("vdec_init, vf_provider_name = %s\n", p->vf_provider_name);
2321 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2322 &vdec->input,
2323 vdec->sys_info->width,
2324 vdec->sys_info->height);
2325 /* vdec is now ready to be active */
2326 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2327 if (p->use_vfm_path) {
2328 frame_info_buf_in = (struct vframe_qos_s *)
2329 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2330 if (!frame_info_buf_in)
2331 pr_err("kmalloc: frame_info_buf_in failed\n");
2332 else
2333 memset(frame_info_buf_in, 0,
2334 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2335
2336 frame_info_buf_out = (struct vframe_qos_s *)
2337 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2338 if (!frame_info_buf_out)
2339 pr_err("kmalloc: frame_info_buf_out failed\n");
2340 else
2341 memset(frame_info_buf_out, 0,
2342 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2343 frame_qos_wr = 0;
2344 frame_qos_rd = 0;
2345 }
2346 return 0;
2347
2348error:
2349 return r;
2350}
2351EXPORT_SYMBOL(vdec_init);
2352
2353/* vdec_create/init/release/destroy are applied to both dual running decoders
2354 */
2355void vdec_release(struct vdec_s *vdec)
2356{
2357 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2358#ifdef VDEC_DEBUG_SUPPORT
2359 if (step_mode) {
2360 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2361 while (step_mode)
2362 udelay(10);
2363 pr_info("VDEC_DEBUG: step_mode is clear\n");
2364 }
2365#endif
2366 vdec_disconnect(vdec);
2367
2368 if (vdec->vframe_provider.name) {
2369 if (!vdec_single(vdec)) {
2370 if (vdec_core->hint_fr_vdec == vdec
2371 && vdec->fr_hint_state == VDEC_HINTED)
2372 vf_notify_receiver(
2373 vdec->vf_provider_name,
2374 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2375 NULL);
2376 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2377 }
2378 vf_unreg_provider(&vdec->vframe_provider);
2379 }
2380
2381 if (vdec_core->vfm_vdec == vdec)
2382 vdec_core->vfm_vdec = NULL;
2383
2384 if (vdec_core->hint_fr_vdec == vdec)
2385 vdec_core->hint_fr_vdec = NULL;
2386
2387 if (vdec->vf_receiver_inst >= 0) {
2388 if (vdec->vfm_map_id[0]) {
2389 vfm_map_remove(vdec->vfm_map_id);
2390 vdec->vfm_map_id[0] = 0;
2391 }
2392 }
2393
2394 while ((atomic_read(&vdec->inirq_flag) > 0)
2395 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2396 schedule();
2397
2398#ifdef FRAME_CHECK
2399 vdec_frame_check_exit(vdec);
2400#endif
2401 vdec_fps_clear(vdec->id);
2402 if (atomic_read(&vdec_core->vdec_nr) == 1)
2403 vdec_disable_DMC(vdec);
2404 platform_device_unregister(vdec->dev);
2405 pr_debug("vdec_release instance %p, total %d\n", vdec,
2406 atomic_read(&vdec_core->vdec_nr));
2407 if (vdec->use_vfm_path) {
2408 kfree(frame_info_buf_in);
2409 frame_info_buf_in = NULL;
2410 kfree(frame_info_buf_out);
2411 frame_info_buf_out = NULL;
2412 frame_qos_wr = 0;
2413 frame_qos_rd = 0;
2414 }
2415 vdec_destroy(vdec);
2416
2417 mutex_lock(&vdec_mutex);
2418 inited_vcodec_num--;
2419 mutex_unlock(&vdec_mutex);
2420
2421}
2422EXPORT_SYMBOL(vdec_release);
2423
2424/* For dual running decoders, vdec_reset is only called with master vdec.
2425 */
2426int vdec_reset(struct vdec_s *vdec)
2427{
2428 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2429
2430 vdec_disconnect(vdec);
2431
2432 if (vdec->vframe_provider.name)
2433 vf_unreg_provider(&vdec->vframe_provider);
2434
2435 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2436 vf_unreg_provider(&vdec->slave->vframe_provider);
2437
2438 if (vdec->reset) {
2439 vdec->reset(vdec);
2440 if (vdec->slave)
2441 vdec->slave->reset(vdec->slave);
2442 }
2443 vdec->mc_loaded = 0;/*clear for reload firmware*/
2444 vdec_input_release(&vdec->input);
2445
2446 vdec_input_init(&vdec->input, vdec);
2447
2448 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2449 vdec->sys_info->height);
2450
2451 vf_reg_provider(&vdec->vframe_provider);
2452 vf_notify_receiver(vdec->vf_provider_name,
2453 VFRAME_EVENT_PROVIDER_START, vdec);
2454
2455 if (vdec->slave) {
2456 vf_reg_provider(&vdec->slave->vframe_provider);
2457 vf_notify_receiver(vdec->slave->vf_provider_name,
2458 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2459 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2460 }
2461
2462 vdec_connect(vdec);
2463
2464 return 0;
2465}
2466EXPORT_SYMBOL(vdec_reset);
2467
2468void vdec_free_cmabuf(void)
2469{
2470 mutex_lock(&vdec_mutex);
2471
2472 /*if (inited_vcodec_num > 0) {
2473 mutex_unlock(&vdec_mutex);
2474 return;
2475 }*/
2476 mutex_unlock(&vdec_mutex);
2477}
2478
2479void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2480{
2481 vdec->core_mask |= mask;
2482
2483 if (vdec->slave)
2484 vdec->slave->core_mask |= mask;
2485 if (vdec_core->parallel_dec == 1) {
2486 if (mask & CORE_MASK_COMBINE)
2487 vdec_core->vdec_combine_flag++;
2488 }
2489
2490}
2491EXPORT_SYMBOL(vdec_core_request);
2492
2493int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2494{
2495 vdec->core_mask &= ~mask;
2496
2497 if (vdec->slave)
2498 vdec->slave->core_mask &= ~mask;
2499 if (vdec_core->parallel_dec == 1) {
2500 if (mask & CORE_MASK_COMBINE)
2501 vdec_core->vdec_combine_flag--;
2502 }
2503 return 0;
2504}
2505EXPORT_SYMBOL(vdec_core_release);
2506
2507bool vdec_core_with_input(unsigned long mask)
2508{
2509 enum vdec_type_e type;
2510
2511 for (type = VDEC_1; type < VDEC_MAX; type++) {
2512 if ((mask & (1 << type)) && cores_with_input[type])
2513 return true;
2514 }
2515
2516 return false;
2517}
2518
2519void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2520{
2521 unsigned long i;
2522 unsigned long t = mask;
2523 mutex_lock(&vdec_mutex);
2524 while (t) {
2525 i = __ffs(t);
2526 clear_bit(i, &vdec->active_mask);
2527 t &= ~(1 << i);
2528 }
2529
2530 if (vdec->active_mask == 0)
2531 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2532
2533 mutex_unlock(&vdec_mutex);
2534}
2535EXPORT_SYMBOL(vdec_core_finish_run);
2536/*
2537 * find what core resources are available for vdec
2538 */
2539static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2540 unsigned long active_mask)
2541{
2542 unsigned long mask = vdec->core_mask &
2543 ~CORE_MASK_COMBINE;
2544
2545 if (vdec->core_mask & CORE_MASK_COMBINE) {
2546 /* combined cores must be granted together */
2547 if ((mask & ~active_mask) == mask)
2548 return mask;
2549 else
2550 return 0;
2551 } else
2552 return mask & ~vdec->sched_mask & ~active_mask;
2553}
2554
2555/*
2556 *Decoder callback
2557 * Each decoder instance uses this callback to notify status change, e.g. when
2558 * decoder finished using HW resource.
2559 * a sample callback from decoder's driver is following:
2560 *
2561 * if (hw->vdec_cb) {
2562 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2563 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2564 * }
2565 */
2566static void vdec_callback(struct vdec_s *vdec, void *data)
2567{
2568 struct vdec_core_s *core = (struct vdec_core_s *)data;
2569
2570#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2571 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2572#endif
2573
2574 up(&core->sem);
2575}
2576
2577static irqreturn_t vdec_isr(int irq, void *dev_id)
2578{
2579 struct vdec_isr_context_s *c =
2580 (struct vdec_isr_context_s *)dev_id;
2581 struct vdec_s *vdec = vdec_core->last_vdec;
2582 irqreturn_t ret = IRQ_HANDLED;
2583
2584 if (vdec_core->parallel_dec == 1) {
2585 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2586 vdec = vdec_core->active_hevc;
2587 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2588 vdec = vdec_core->active_vdec;
2589 else
2590 vdec = NULL;
2591 }
2592
2593 if (vdec) {
2594 atomic_set(&vdec->inirq_flag, 1);
2595 vdec->isr_ns = local_clock();
2596 }
2597 if (c->dev_isr) {
2598 ret = c->dev_isr(irq, c->dev_id);
2599 goto isr_done;
2600 }
2601
2602 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2603 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2604 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2605#if 0
2606 pr_warn("vdec interrupt w/o a valid receiver\n");
2607#endif
2608 goto isr_done;
2609 }
2610
2611 if (!vdec) {
2612#if 0
2613 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2614 core);
2615#endif
2616 goto isr_done;
2617 }
2618
2619 if (!vdec->irq_handler) {
2620#if 0
2621 pr_warn("vdec instance has no irq handle.\n");
2622#endif
2623 goto isr_done;
2624 }
2625
2626 ret = vdec->irq_handler(vdec, c->index);
2627isr_done:
2628 if (vdec)
2629 atomic_set(&vdec->inirq_flag, 0);
2630 return ret;
2631}
2632
2633static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2634{
2635 struct vdec_isr_context_s *c =
2636 (struct vdec_isr_context_s *)dev_id;
2637 struct vdec_s *vdec = vdec_core->last_vdec;
2638 irqreturn_t ret = IRQ_HANDLED;
2639
2640 if (vdec_core->parallel_dec == 1) {
2641 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2642 vdec = vdec_core->active_hevc;
2643 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2644 vdec = vdec_core->active_vdec;
2645 else
2646 vdec = NULL;
2647 }
2648
2649 if (vdec) {
2650 u32 isr2tfn = 0;
2651 atomic_set(&vdec->inirq_thread_flag, 1);
2652 vdec->tfn_ns = local_clock();
2653 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2654 if (isr2tfn > 10000000)
2655 pr_err("!!!!!!! %s vdec_isr to %s took %uns !!!\n",
2656 vdec->vf_provider_name, __func__, isr2tfn);
2657 }
2658 if (c->dev_threaded_isr) {
2659 ret = c->dev_threaded_isr(irq, c->dev_id);
2660 goto thread_isr_done;
2661 }
2662 if (!vdec)
2663 goto thread_isr_done;
2664
2665 if (!vdec->threaded_irq_handler)
2666 goto thread_isr_done;
2667 ret = vdec->threaded_irq_handler(vdec, c->index);
2668thread_isr_done:
2669 if (vdec)
2670 atomic_set(&vdec->inirq_thread_flag, 0);
2671 return ret;
2672}
2673
2674unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2675{
2676 unsigned long ready_mask;
2677 struct vdec_input_s *input = &vdec->input;
2678 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2679 (vdec->status != VDEC_STATUS_ACTIVE))
2680 return false;
2681
2682 if (!vdec->run_ready)
2683 return false;
2684
2685 /* when crc32 error, block at error frame */
2686 if (vdec->vfc.err_crc_block)
2687 return false;
2688
2689 if ((vdec->slave || vdec->master) &&
2690 (vdec->sched == 0))
2691 return false;
2692#ifdef VDEC_DEBUG_SUPPORT
2693 inc_profi_count(mask, vdec->check_count);
2694#endif
2695 if (vdec_core_with_input(mask)) {
2696 /* check frame based input underrun */
2697 if (input && !input->eos && input_frame_based(input)
2698 && (!vdec_input_next_chunk(input))) {
2699#ifdef VDEC_DEBUG_SUPPORT
2700 inc_profi_count(mask, vdec->input_underrun_count);
2701#endif
2702 return false;
2703 }
2704 /* check streaming prepare level threshold if not EOS */
2705 if (input && input_stream_based(input) && !input->eos) {
2706 u32 rp, wp, level;
2707
2708 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2709 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2710 if (wp < rp)
2711 level = input->size + wp - rp;
2712 else
2713 level = wp - rp;
2714
2715 if ((level < input->prepare_level) &&
2716 (pts_get_rec_num(PTS_TYPE_VIDEO,
2717 vdec->input.total_rd_count) < 2)) {
2718 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2719#ifdef VDEC_DEBUG_SUPPORT
2720 inc_profi_count(mask, vdec->input_underrun_count);
2721 if (step_mode & 0x200) {
2722 if ((step_mode & 0xff) == vdec->id) {
2723 step_mode |= 0xff;
2724 return mask;
2725 }
2726 }
2727#endif
2728 return false;
2729 } else if (level > input->prepare_level)
2730 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2731 }
2732 }
2733
2734 if (step_mode) {
2735 if ((step_mode & 0xff) != vdec->id)
2736 return 0;
2737 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2738 }
2739
2740 /*step_mode &= ~0xff; not work for id of 0, removed*/
2741
2742#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2743 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2744#endif
2745
2746 ready_mask = vdec->run_ready(vdec, mask) & mask;
2747#ifdef VDEC_DEBUG_SUPPORT
2748 if (ready_mask != mask)
2749 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2750#endif
2751#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2752 if (ready_mask)
2753 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2754#endif
2755
2756 return ready_mask;
2757}
2758
2759/* bridge on/off vdec's interrupt processing to vdec core */
2760static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2761 bool enable)
2762{
2763 enum vdec_type_e type;
2764
2765 for (type = VDEC_1; type < VDEC_MAX; type++) {
2766 if (mask & (1 << type)) {
2767 struct vdec_isr_context_s *c =
2768 &vdec_core->isr_context[cores_int[type]];
2769 if (enable)
2770 c->vdec = vdec;
2771 else if (c->vdec == vdec)
2772 c->vdec = NULL;
2773 }
2774 }
2775}
2776
2777/*
2778 * Set up secure protection for each decoder instance running.
2779 * Note: The operation from REE side only resets memory access
2780 * to a default policy and even a non_secure type will still be
2781 * changed to secure type automatically when secure source is
2782 * detected inside TEE.
2783 * Perform need_more_data checking and set flag is decoder
2784 * is not consuming data.
2785 */
2786void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2787{
2788 struct vdec_input_s *input = &vdec->input;
2789 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2790 DMC_DEV_TYPE_NON_SECURE;
2791
2792 vdec_route_interrupt(vdec, mask, true);
2793
2794 if (!vdec_core_with_input(mask))
2795 return;
2796
2797 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2798 {
2799 /* Verimatrix ultra webclient (HLS) was played in drmmode and used hw demux. In drmmode VDEC only can access secure.
2800 Now HW demux parsed es data to no-secure buffer. So the VDEC input was no-secure, VDEC playback failed. Forcing
2801 use nosecure for verimatrix webclient HLS. If in the future HW demux can parse es data to secure buffer, make
2802 VDEC r/w secure.*/
2803 secure = 0;
2804 //pr_debug("allow VDEC can access nosecure even in drmmode\n");
2805 }
2806 if (input->target == VDEC_INPUT_TARGET_VLD)
2807 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2808 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2809 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2810
2811 if (vdec_stream_based(vdec) &&
2812 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2813 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2814 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2815 }
2816
2817 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
2818 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
2819}
2820
2821
2822/* struct vdec_core_shread manages all decoder instance in active list. When
2823 * a vdec is added into the active list, it can onlt be in two status:
2824 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
2825 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
2826 * Removing a decoder from active list is only performed within core thread.
2827 * Adding a decoder into active list is performed from user thread.
2828 */
2829static int vdec_core_thread(void *data)
2830{
2831 struct vdec_core_s *core = (struct vdec_core_s *)data;
2832 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
2833 unsigned long flags;
2834 int i;
2835
2836 sched_setscheduler(current, SCHED_FIFO, &param);
2837
2838 allow_signal(SIGTERM);
2839
2840 while (down_interruptible(&core->sem) == 0) {
2841 struct vdec_s *vdec, *tmp, *worker;
2842 unsigned long sched_mask = 0;
2843 LIST_HEAD(disconnecting_list);
2844
2845 if (kthread_should_stop())
2846 break;
2847 mutex_lock(&vdec_mutex);
2848
2849 if (core->parallel_dec == 1) {
2850 for (i = VDEC_1; i < VDEC_MAX; i++) {
2851 core->power_ref_mask =
2852 core->power_ref_count[i] > 0 ?
2853 (core->power_ref_mask | (1 << i)) :
2854 (core->power_ref_mask & ~(1 << i));
2855 }
2856 }
2857 /* clean up previous active vdec's input */
2858 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2859 unsigned long mask = vdec->sched_mask &
2860 (vdec->active_mask ^ vdec->sched_mask);
2861
2862 vdec_route_interrupt(vdec, mask, false);
2863
2864#ifdef VDEC_DEBUG_SUPPORT
2865 update_profi_clk_stop(vdec, mask, get_current_clk());
2866#endif
2867 /*
2868 * If decoder released some core resources (mask), then
2869 * check if these core resources are associated
2870 * with any input side and do input clean up accordingly
2871 */
2872 if (vdec_core_with_input(mask)) {
2873 struct vdec_input_s *input = &vdec->input;
2874 while (!list_empty(
2875 &input->vframe_chunk_list)) {
2876 struct vframe_chunk_s *chunk =
2877 vdec_input_next_chunk(input);
2878 if (chunk && (chunk->flag &
2879 VFRAME_CHUNK_FLAG_CONSUMED))
2880 vdec_input_release_chunk(input,
2881 chunk);
2882 else
2883 break;
2884 }
2885
2886 vdec_save_input_context(vdec);
2887 }
2888
2889 vdec->sched_mask &= ~mask;
2890 core->sched_mask &= ~mask;
2891 }
2892 vdec_update_buff_status();
2893 /*
2894 *todo:
2895 * this is the case when the decoder is in active mode and
2896 * the system side wants to stop it. Currently we rely on
2897 * the decoder instance to go back to VDEC_STATUS_CONNECTED
2898 * from VDEC_STATUS_ACTIVE by its own. However, if for some
2899 * reason the decoder can not exist by itself (dead decoding
2900 * or whatever), then we may have to add another vdec API
2901 * to kill the vdec and release its HW resource and make it
2902 * become inactive again.
2903 * if ((core->active_vdec) &&
2904 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
2905 * }
2906 */
2907
2908 /* check disconnected decoders */
2909 flags = vdec_core_lock(vdec_core);
2910 list_for_each_entry_safe(vdec, tmp,
2911 &core->connected_vdec_list, list) {
2912 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
2913 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
2914 if (core->parallel_dec == 1) {
2915 if (vdec_core->active_hevc == vdec)
2916 vdec_core->active_hevc = NULL;
2917 if (vdec_core->active_vdec == vdec)
2918 vdec_core->active_vdec = NULL;
2919 }
2920 if (core->last_vdec == vdec)
2921 core->last_vdec = NULL;
2922 list_move(&vdec->list, &disconnecting_list);
2923 }
2924 }
2925 vdec_core_unlock(vdec_core, flags);
2926 mutex_unlock(&vdec_mutex);
2927 /* elect next vdec to be scheduled */
2928 vdec = core->last_vdec;
2929 if (vdec) {
2930 vdec = list_entry(vdec->list.next, struct vdec_s, list);
2931 list_for_each_entry_from(vdec,
2932 &core->connected_vdec_list, list) {
2933 sched_mask = vdec_schedule_mask(vdec,
2934 core->sched_mask);
2935 if (!sched_mask)
2936 continue;
2937 sched_mask = vdec_ready_to_run(vdec,
2938 sched_mask);
2939 if (sched_mask)
2940 break;
2941 }
2942
2943 if (&vdec->list == &core->connected_vdec_list)
2944 vdec = NULL;
2945 }
2946
2947 if (!vdec) {
2948 /* search from beginning */
2949 list_for_each_entry(vdec,
2950 &core->connected_vdec_list, list) {
2951 sched_mask = vdec_schedule_mask(vdec,
2952 core->sched_mask);
2953 if (vdec == core->last_vdec) {
2954 if (!sched_mask) {
2955 vdec = NULL;
2956 break;
2957 }
2958
2959 sched_mask = vdec_ready_to_run(vdec,
2960 sched_mask);
2961
2962 if (!sched_mask) {
2963 vdec = NULL;
2964 break;
2965 }
2966 break;
2967 }
2968
2969 if (!sched_mask)
2970 continue;
2971
2972 sched_mask = vdec_ready_to_run(vdec,
2973 sched_mask);
2974 if (sched_mask)
2975 break;
2976 }
2977
2978 if (&vdec->list == &core->connected_vdec_list)
2979 vdec = NULL;
2980 }
2981
2982 worker = vdec;
2983
2984 if (vdec) {
2985 unsigned long mask = sched_mask;
2986 unsigned long i;
2987
2988 /* setting active_mask should be atomic.
2989 * it can be modified by decoder driver callbacks.
2990 */
2991 while (sched_mask) {
2992 i = __ffs(sched_mask);
2993 set_bit(i, &vdec->active_mask);
2994 sched_mask &= ~(1 << i);
2995 }
2996
2997 /* vdec's sched_mask is only set from core thread */
2998 vdec->sched_mask |= mask;
2999 if (core->last_vdec) {
3000 if ((core->last_vdec != vdec) &&
3001 (core->last_vdec->mc_type != vdec->mc_type))
3002 vdec->mc_loaded = 0;/*clear for reload firmware*/
3003 } else
3004 vdec->mc_loaded = 0;
3005 core->last_vdec = vdec;
3006 if (debug & 2)
3007 vdec->mc_loaded = 0;/*alway reload firmware*/
3008 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3009
3010 core->sched_mask |= mask;
3011 if (core->parallel_dec == 1)
3012 vdec_save_active_hw(vdec);
3013#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3014 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3015#endif
3016 vdec_prepare_run(vdec, mask);
3017#ifdef VDEC_DEBUG_SUPPORT
3018 inc_profi_count(mask, vdec->run_count);
3019 update_profi_clk_run(vdec, mask, get_current_clk());
3020#endif
3021 vdec->run(vdec, mask, vdec_callback, core);
3022
3023
3024 /* we have some cores scheduled, keep working until
3025 * all vdecs are checked with no cores to schedule
3026 */
3027 if (core->parallel_dec == 1) {
3028 if (vdec_core->vdec_combine_flag == 0)
3029 up(&core->sem);
3030 } else
3031 up(&core->sem);
3032 }
3033
3034 /* remove disconnected decoder from active list */
3035 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3036 list_del(&vdec->list);
3037 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3038 /*core->last_vdec = NULL;*/
3039 complete(&vdec->inactive_done);
3040 }
3041
3042 /* if there is no new work scheduled and nothing
3043 * is running, sleep 20ms
3044 */
3045 if (core->parallel_dec == 1) {
3046 if (vdec_core->vdec_combine_flag == 0) {
3047 if ((!worker) &&
3048 ((core->sched_mask != core->power_ref_mask)) &&
3049 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3050 ((core->buff_flag | core->stream_buff_flag) &
3051 (core->sched_mask ^ core->power_ref_mask))) {
3052 usleep_range(1000, 2000);
3053 up(&core->sem);
3054 }
3055 } else {
3056 if ((!worker) && (!core->sched_mask) &&
3057 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3058 (core->buff_flag | core->stream_buff_flag)) {
3059 usleep_range(1000, 2000);
3060 up(&core->sem);
3061 }
3062 }
3063 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3064 usleep_range(1000, 2000);
3065 up(&core->sem);
3066 }
3067
3068 }
3069
3070 return 0;
3071}
3072
3073#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3074static bool test_hevc(u32 decomp_addr, u32 us_delay)
3075{
3076 int i;
3077
3078 /* SW_RESET IPP */
3079 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3080 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3081
3082 /* initialize all canvas table */
3083 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3084 for (i = 0; i < 32; i++)
3085 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3086 0x1 | (i << 8) | decomp_addr);
3087 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3088 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3089 for (i = 0; i < 32; i++)
3090 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3091
3092 /* Initialize mcrcc */
3093 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3094 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3095 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3096 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3097
3098 /* Decomp initialize */
3099 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3100 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3101
3102 /* Frame level initialization */
3103 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3104 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3105 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3106 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3107
3108 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3109 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3110
3111 /* Enable SWIMP mode */
3112 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3113
3114 /* Enable frame */
3115 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3116 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3117
3118 /* Send SW-command CTB info */
3119 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3120
3121 /* Send PU_command */
3122 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3123 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3124 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3125 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3126
3127 udelay(us_delay);
3128
3129 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3130
3131 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3132}
3133
3134void vdec_power_reset(void)
3135{
3136 /* enable vdec1 isolation */
3137 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3138 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3139 /* power off vdec1 memories */
3140 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3141 /* vdec1 power off */
3142 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3143 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3144
3145 if (has_vdec2()) {
3146 /* enable vdec2 isolation */
3147 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3148 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3149 /* power off vdec2 memories */
3150 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3151 /* vdec2 power off */
3152 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3153 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3154 }
3155
3156 if (has_hdec()) {
3157 /* enable hcodec isolation */
3158 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3159 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3160 /* power off hcodec memories */
3161 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3162 /* hcodec power off */
3163 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3164 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3165 }
3166
3167 if (has_hevc_vdec()) {
3168 /* enable hevc isolation */
3169 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3170 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3171 /* power off hevc memories */
3172 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3173 /* hevc power off */
3174 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3175 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3176 }
3177}
3178EXPORT_SYMBOL(vdec_power_reset);
3179
3180void vdec_poweron(enum vdec_type_e core)
3181{
3182 void *decomp_addr = NULL;
3183 dma_addr_t decomp_dma_addr;
3184 u32 decomp_addr_aligned = 0;
3185 int hevc_loop = 0;
3186 int sleep_val, iso_val;
3187 bool is_power_ctrl_ver2 = false;
3188
3189 if (core >= VDEC_MAX)
3190 return;
3191
3192 mutex_lock(&vdec_mutex);
3193
3194 vdec_core->power_ref_count[core]++;
3195 if (vdec_core->power_ref_count[core] > 1) {
3196 mutex_unlock(&vdec_mutex);
3197 return;
3198 }
3199
3200 if (vdec_on(core)) {
3201 mutex_unlock(&vdec_mutex);
3202 return;
3203 }
3204
3205 is_power_ctrl_ver2 =
3206 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3207 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3208
3209 if (hevc_workaround_needed() &&
3210 (core == VDEC_HEVC)) {
3211 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3212 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3213
3214 if (decomp_addr) {
3215 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3216 memset((u8 *)decomp_addr +
3217 (decomp_addr_aligned - decomp_dma_addr),
3218 0xff, SZ_4K);
3219 } else
3220 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3221 }
3222
3223 if (core == VDEC_1) {
3224 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3225 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3226
3227 /* vdec1 power on */
3228#ifdef CONFIG_AMLOGIC_POWER
3229 if (is_support_power_ctrl()) {
3230 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3231 mutex_unlock(&vdec_mutex);
3232 pr_err("vdec-1 power on ctrl sleep fail.\n");
3233 return;
3234 }
3235 } else {
3236 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3237 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3238 }
3239#else
3240 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3241 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3242#endif
3243 /* wait 10uS */
3244 udelay(10);
3245 /* vdec1 soft reset */
3246 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3247 WRITE_VREG(DOS_SW_RESET0, 0);
3248 /* enable vdec1 clock */
3249 /*
3250 *add power on vdec clock level setting,only for m8 chip,
3251 * m8baby and m8m2 can dynamic adjust vdec clock,
3252 * power on with default clock level
3253 */
3254 amports_switch_gate("clk_vdec_mux", 1);
3255 vdec_clock_hi_enable();
3256 /* power up vdec memories */
3257 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3258
3259 /* remove vdec1 isolation */
3260#ifdef CONFIG_AMLOGIC_POWER
3261 if (is_support_power_ctrl()) {
3262 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3263 mutex_unlock(&vdec_mutex);
3264 pr_err("vdec-1 power on ctrl iso fail.\n");
3265 return;
3266 }
3267 } else {
3268 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3269 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3270 }
3271#else
3272 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3273 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3274#endif
3275 /* reset DOS top registers */
3276 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3277 } else if (core == VDEC_2) {
3278 if (has_vdec2()) {
3279 /* vdec2 power on */
3280 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3281 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3282 ~0x30);
3283 /* wait 10uS */
3284 udelay(10);
3285 /* vdec2 soft reset */
3286 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3287 WRITE_VREG(DOS_SW_RESET2, 0);
3288 /* enable vdec1 clock */
3289 vdec2_clock_hi_enable();
3290 /* power up vdec memories */
3291 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3292 /* remove vdec2 isolation */
3293 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3294 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3295 ~0x300);
3296 /* reset DOS top registers */
3297 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3298 }
3299 } else if (core == VDEC_HCODEC) {
3300 if (has_hdec()) {
3301 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3302 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3303
3304 /* hcodec power on */
3305#ifdef CONFIG_AMLOGIC_POWER
3306 if (is_support_power_ctrl()) {
3307 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3308 mutex_unlock(&vdec_mutex);
3309 pr_err("hcodec power on ctrl sleep fail.\n");
3310 return;
3311 }
3312 } else {
3313 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3314 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3315 }
3316#else
3317 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3318 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3319#endif
3320 /* wait 10uS */
3321 udelay(10);
3322 /* hcodec soft reset */
3323 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3324 WRITE_VREG(DOS_SW_RESET1, 0);
3325 /* enable hcodec clock */
3326 hcodec_clock_enable();
3327 /* power up hcodec memories */
3328 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3329 /* remove hcodec isolation */
3330#ifdef CONFIG_AMLOGIC_POWER
3331 if (is_support_power_ctrl()) {
3332 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3333 mutex_unlock(&vdec_mutex);
3334 pr_err("hcodec power on ctrl iso fail.\n");
3335 return;
3336 }
3337 } else {
3338 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3339 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3340 }
3341#else
3342 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3343 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3344#endif
3345 }
3346 } else if (core == VDEC_HEVC) {
3347 if (has_hevc_vdec()) {
3348 bool hevc_fixed = false;
3349
3350 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3351 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3352
3353 while (!hevc_fixed) {
3354 /* hevc power on */
3355#ifdef CONFIG_AMLOGIC_POWER
3356 if (is_support_power_ctrl()) {
3357 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3358 mutex_unlock(&vdec_mutex);
3359 pr_err("hevc power on ctrl sleep fail.\n");
3360 return;
3361 }
3362 } else {
3363 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3364 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3365 }
3366#else
3367 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3368 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3369#endif
3370 /* wait 10uS */
3371 udelay(10);
3372 /* hevc soft reset */
3373 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3374 WRITE_VREG(DOS_SW_RESET3, 0);
3375 /* enable hevc clock */
3376 amports_switch_gate("clk_hevc_mux", 1);
3377 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3378 amports_switch_gate("clk_hevcb_mux", 1);
3379 hevc_clock_hi_enable();
3380 hevc_back_clock_hi_enable();
3381 /* power up hevc memories */
3382 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3383 /* remove hevc isolation */
3384#ifdef CONFIG_AMLOGIC_POWER
3385 if (is_support_power_ctrl()) {
3386 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3387 mutex_unlock(&vdec_mutex);
3388 pr_err("hevc power on ctrl iso fail.\n");
3389 return;
3390 }
3391 } else {
3392 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3393 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3394 }
3395#else
3396 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3397 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3398#endif
3399 if (!hevc_workaround_needed())
3400 break;
3401
3402 if (decomp_addr)
3403 hevc_fixed = test_hevc(
3404 decomp_addr_aligned, 20);
3405
3406 if (!hevc_fixed) {
3407 hevc_loop++;
3408
3409 mutex_unlock(&vdec_mutex);
3410
3411 if (hevc_loop >= HEVC_TEST_LIMIT) {
3412 pr_warn("hevc power sequence over limit\n");
3413 pr_warn("=====================================================\n");
3414 pr_warn(" This chip is identified to have HW failure.\n");
3415 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3416 pr_warn("=====================================================\n");
3417
3418 panic("Force panic for chip detection !!!\n");
3419
3420 break;
3421 }
3422
3423 vdec_poweroff(VDEC_HEVC);
3424
3425 mdelay(10);
3426
3427 mutex_lock(&vdec_mutex);
3428 }
3429 }
3430
3431 if (hevc_loop > hevc_max_reset_count)
3432 hevc_max_reset_count = hevc_loop;
3433
3434 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3435 udelay(10);
3436 WRITE_VREG(DOS_SW_RESET3, 0);
3437 }
3438 }
3439
3440 if (decomp_addr)
3441 codec_mm_dma_free_coherent(MEM_NAME,
3442 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3443
3444 mutex_unlock(&vdec_mutex);
3445}
3446EXPORT_SYMBOL(vdec_poweron);
3447
3448void vdec_poweroff(enum vdec_type_e core)
3449{
3450 int sleep_val, iso_val;
3451 bool is_power_ctrl_ver2 = false;
3452
3453 if (core >= VDEC_MAX)
3454 return;
3455
3456 mutex_lock(&vdec_mutex);
3457
3458 vdec_core->power_ref_count[core]--;
3459 if (vdec_core->power_ref_count[core] > 0) {
3460 mutex_unlock(&vdec_mutex);
3461 return;
3462 }
3463
3464 is_power_ctrl_ver2 =
3465 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3466 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3467
3468 if (core == VDEC_1) {
3469 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3470 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3471
3472 /* enable vdec1 isolation */
3473#ifdef CONFIG_AMLOGIC_POWER
3474 if (is_support_power_ctrl()) {
3475 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3476 mutex_unlock(&vdec_mutex);
3477 pr_err("vdec-1 power off ctrl iso fail.\n");
3478 return;
3479 }
3480 } else {
3481 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3482 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3483 }
3484#else
3485 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3486 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3487#endif
3488 /* power off vdec1 memories */
3489 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3490 /* disable vdec1 clock */
3491 vdec_clock_off();
3492 /* vdec1 power off */
3493#ifdef CONFIG_AMLOGIC_POWER
3494 if (is_support_power_ctrl()) {
3495 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3496 mutex_unlock(&vdec_mutex);
3497 pr_err("vdec-1 power off ctrl sleep fail.\n");
3498 return;
3499 }
3500 } else {
3501 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3502 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3503 }
3504#else
3505 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3506 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3507#endif
3508 } else if (core == VDEC_2) {
3509 if (has_vdec2()) {
3510 /* enable vdec2 isolation */
3511 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3512 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3513 0x300);
3514 /* power off vdec2 memories */
3515 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3516 /* disable vdec2 clock */
3517 vdec2_clock_off();
3518 /* vdec2 power off */
3519 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3520 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3521 0x30);
3522 }
3523 } else if (core == VDEC_HCODEC) {
3524 if (has_hdec()) {
3525 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3526 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3527
3528 /* enable hcodec isolation */
3529#ifdef CONFIG_AMLOGIC_POWER
3530 if (is_support_power_ctrl()) {
3531 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3532 mutex_unlock(&vdec_mutex);
3533 pr_err("hcodec power off ctrl iso fail.\n");
3534 return;
3535 }
3536 } else {
3537 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3538 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3539 }
3540#else
3541 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3542 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3543#endif
3544 /* power off hcodec memories */
3545 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3546 /* disable hcodec clock */
3547 hcodec_clock_off();
3548 /* hcodec power off */
3549#ifdef CONFIG_AMLOGIC_POWER
3550 if (is_support_power_ctrl()) {
3551 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3552 mutex_unlock(&vdec_mutex);
3553 pr_err("hcodec power off ctrl sleep fail.\n");
3554 return;
3555 }
3556 } else {
3557 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3558 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3559 }
3560#else
3561 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3562 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3563#endif
3564 }
3565 } else if (core == VDEC_HEVC) {
3566 if (has_hevc_vdec()) {
3567 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3568 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3569
3570 if (no_powerdown == 0) {
3571 /* enable hevc isolation */
3572#ifdef CONFIG_AMLOGIC_POWER
3573 if (is_support_power_ctrl()) {
3574 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3575 mutex_unlock(&vdec_mutex);
3576 pr_err("hevc power off ctrl iso fail.\n");
3577 return;
3578 }
3579 } else {
3580 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3581 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3582 }
3583#else
3584 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3585 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3586#endif
3587 /* power off hevc memories */
3588 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3589
3590 /* disable hevc clock */
3591 hevc_clock_off();
3592 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3593 hevc_back_clock_off();
3594
3595 /* hevc power off */
3596#ifdef CONFIG_AMLOGIC_POWER
3597 if (is_support_power_ctrl()) {
3598 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3599 mutex_unlock(&vdec_mutex);
3600 pr_err("hevc power off ctrl sleep fail.\n");
3601 return;
3602 }
3603 } else {
3604 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3605 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3606 }
3607#else
3608 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3609 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3610#endif
3611 } else {
3612 pr_info("!!!!!!!!not power down\n");
3613 hevc_reset_core(NULL);
3614 no_powerdown = 0;
3615 }
3616 }
3617 }
3618 mutex_unlock(&vdec_mutex);
3619}
3620EXPORT_SYMBOL(vdec_poweroff);
3621
3622bool vdec_on(enum vdec_type_e core)
3623{
3624 bool ret = false;
3625
3626 if (core == VDEC_1) {
3627 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3628 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3629 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3630 ? 0x2 : 0xc)) == 0) &&
3631 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3632 ret = true;
3633 } else if (core == VDEC_2) {
3634 if (has_vdec2()) {
3635 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3636 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3637 ret = true;
3638 }
3639 } else if (core == VDEC_HCODEC) {
3640 if (has_hdec()) {
3641 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3642 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3643 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3644 ? 0x1 : 0x3)) == 0) &&
3645 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3646 ret = true;
3647 }
3648 } else if (core == VDEC_HEVC) {
3649 if (has_hevc_vdec()) {
3650 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3651 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3652 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3653 ? 0x4 : 0xc0)) == 0) &&
3654 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3655 ret = true;
3656 }
3657 }
3658
3659 return ret;
3660}
3661EXPORT_SYMBOL(vdec_on);
3662
3663#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3664void vdec_poweron(enum vdec_type_e core)
3665{
3666 ulong flags;
3667
3668 spin_lock_irqsave(&lock, flags);
3669
3670 if (core == VDEC_1) {
3671 /* vdec1 soft reset */
3672 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3673 WRITE_VREG(DOS_SW_RESET0, 0);
3674 /* enable vdec1 clock */
3675 vdec_clock_enable();
3676 /* reset DOS top registers */
3677 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3678 } else if (core == VDEC_2) {
3679 /* vdec2 soft reset */
3680 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3681 WRITE_VREG(DOS_SW_RESET2, 0);
3682 /* enable vdec2 clock */
3683 vdec2_clock_enable();
3684 /* reset DOS top registers */
3685 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3686 } else if (core == VDEC_HCODEC) {
3687 /* hcodec soft reset */
3688 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3689 WRITE_VREG(DOS_SW_RESET1, 0);
3690 /* enable hcodec clock */
3691 hcodec_clock_enable();
3692 }
3693
3694 spin_unlock_irqrestore(&lock, flags);
3695}
3696
3697void vdec_poweroff(enum vdec_type_e core)
3698{
3699 ulong flags;
3700
3701 spin_lock_irqsave(&lock, flags);
3702
3703 if (core == VDEC_1) {
3704 /* disable vdec1 clock */
3705 vdec_clock_off();
3706 } else if (core == VDEC_2) {
3707 /* disable vdec2 clock */
3708 vdec2_clock_off();
3709 } else if (core == VDEC_HCODEC) {
3710 /* disable hcodec clock */
3711 hcodec_clock_off();
3712 }
3713
3714 spin_unlock_irqrestore(&lock, flags);
3715}
3716
3717bool vdec_on(enum vdec_type_e core)
3718{
3719 bool ret = false;
3720
3721 if (core == VDEC_1) {
3722 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3723 ret = true;
3724 } else if (core == VDEC_2) {
3725 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3726 ret = true;
3727 } else if (core == VDEC_HCODEC) {
3728 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3729 ret = true;
3730 }
3731
3732 return ret;
3733}
3734#endif
3735
3736int vdec_source_changed(int format, int width, int height, int fps)
3737{
3738 /* todo: add level routines for clock adjustment per chips */
3739 int ret = -1;
3740 static int on_setting;
3741
3742 if (on_setting > 0)
3743 return ret;/*on changing clk,ignore this change*/
3744
3745 if (vdec_source_get(VDEC_1) == width * height * fps)
3746 return ret;
3747
3748
3749 on_setting = 1;
3750 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3751 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3752 width, height, fps, vdec_clk_get(VDEC_1));
3753 on_setting = 0;
3754 return ret;
3755
3756}
3757EXPORT_SYMBOL(vdec_source_changed);
3758
3759void vdec_reset_core(struct vdec_s *vdec)
3760{
3761 unsigned long flags;
3762 unsigned int mask = 0;
3763
3764 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3765 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3766 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3767
3768 spin_lock_irqsave(&vdec_spin_lock, flags);
3769 codec_dmcbus_write(DMC_REQ_CTRL,
3770 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3771 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3772
3773 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3774 & mask))
3775 ;
3776 /*
3777 * 2: assist
3778 * 3: vld_reset
3779 * 4: vld_part_reset
3780 * 5: vfifo reset
3781 * 6: iqidct
3782 * 7: mc
3783 * 8: dblk
3784 * 9: pic_dc
3785 * 10: psc
3786 * 11: mcpu
3787 * 12: ccpu
3788 * 13: ddr
3789 * 14: afifo
3790 */
3791 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3792 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3793 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<6)|(1<<7)|(1<<8)|(1<<9));
3794 } else {
3795 WRITE_VREG(DOS_SW_RESET0,
3796 (1<<3)|(1<<4)|(1<<5));
3797 }
3798 WRITE_VREG(DOS_SW_RESET0, 0);
3799
3800 spin_lock_irqsave(&vdec_spin_lock, flags);
3801 codec_dmcbus_write(DMC_REQ_CTRL,
3802 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3803 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3804}
3805EXPORT_SYMBOL(vdec_reset_core);
3806
3807void hevc_mmu_dma_check(struct vdec_s *vdec)
3808{
3809 ulong timeout;
3810 u32 data;
3811 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
3812 return;
3813 timeout = jiffies + HZ/100;
3814 while (1) {
3815 data = READ_VREG(HEVC_CM_CORE_STATUS);
3816 if ((data & 0x1) == 0)
3817 break;
3818 if (time_after(jiffies, timeout)) {
3819 if (debug & 0x10)
3820 pr_info(" %s sao mmu dma idle\n", __func__);
3821 break;
3822 }
3823 }
3824 /*disable sao mmu dma */
3825 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
3826 timeout = jiffies + HZ/100;
3827 while (1) {
3828 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
3829 if ((data & 0x1))
3830 break;
3831 if (time_after(jiffies, timeout)) {
3832 if (debug & 0x10)
3833 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
3834 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
3835 break;
3836 }
3837 }
3838}
3839EXPORT_SYMBOL(hevc_mmu_dma_check);
3840
3841void hevc_reset_core(struct vdec_s *vdec)
3842{
3843 unsigned long flags;
3844 unsigned int mask = 0;
3845
3846 mask = 1 << 4; /*bit4: hevc*/
3847 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3848 mask |= 1 << 8; /*bit8: hevcb*/
3849
3850 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3851 spin_lock_irqsave(&vdec_spin_lock, flags);
3852 codec_dmcbus_write(DMC_REQ_CTRL,
3853 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3854 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3855
3856 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3857 & mask))
3858 ;
3859
3860 if (vdec == NULL || input_frame_based(vdec))
3861 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3862
3863 /*
3864 * 2: assist
3865 * 3: parser
3866 * 4: parser_state
3867 * 8: dblk
3868 * 11:mcpu
3869 * 12:ccpu
3870 * 13:ddr
3871 * 14:iqit
3872 * 15:ipp
3873 * 17:qdct
3874 * 18:mpred
3875 * 19:sao
3876 * 24:hevc_afifo
3877 */
3878 WRITE_VREG(DOS_SW_RESET3,
3879 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
3880 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
3881 (1<<17)|(1<<18)|(1<<19)|(1<<24));
3882
3883 WRITE_VREG(DOS_SW_RESET3, 0);
3884
3885
3886 spin_lock_irqsave(&vdec_spin_lock, flags);
3887 codec_dmcbus_write(DMC_REQ_CTRL,
3888 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3889 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3890
3891}
3892EXPORT_SYMBOL(hevc_reset_core);
3893
3894int vdec2_source_changed(int format, int width, int height, int fps)
3895{
3896 int ret = -1;
3897 static int on_setting;
3898
3899 if (has_vdec2()) {
3900 /* todo: add level routines for clock adjustment per chips */
3901 if (on_setting != 0)
3902 return ret;/*on changing clk,ignore this change*/
3903
3904 if (vdec_source_get(VDEC_2) == width * height * fps)
3905 return ret;
3906
3907 on_setting = 1;
3908 ret = vdec_source_changed_for_clk_set(format,
3909 width, height, fps);
3910 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
3911 width, height, fps, vdec_clk_get(VDEC_2));
3912 on_setting = 0;
3913 return ret;
3914 }
3915 return 0;
3916}
3917EXPORT_SYMBOL(vdec2_source_changed);
3918
3919int hevc_source_changed(int format, int width, int height, int fps)
3920{
3921 /* todo: add level routines for clock adjustment per chips */
3922 int ret = -1;
3923 static int on_setting;
3924
3925 if (on_setting != 0)
3926 return ret;/*on changing clk,ignore this change*/
3927
3928 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
3929 return ret;
3930
3931 on_setting = 1;
3932 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3933 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
3934 width, height, fps, vdec_clk_get(VDEC_HEVC));
3935 on_setting = 0;
3936
3937 return ret;
3938}
3939EXPORT_SYMBOL(hevc_source_changed);
3940
3941static struct am_reg am_risc[] = {
3942 {"MSP", 0x300},
3943 {"MPSR", 0x301},
3944 {"MCPU_INT_BASE", 0x302},
3945 {"MCPU_INTR_GRP", 0x303},
3946 {"MCPU_INTR_MSK", 0x304},
3947 {"MCPU_INTR_REQ", 0x305},
3948 {"MPC-P", 0x306},
3949 {"MPC-D", 0x307},
3950 {"MPC_E", 0x308},
3951 {"MPC_W", 0x309},
3952 {"CSP", 0x320},
3953 {"CPSR", 0x321},
3954 {"CCPU_INT_BASE", 0x322},
3955 {"CCPU_INTR_GRP", 0x323},
3956 {"CCPU_INTR_MSK", 0x324},
3957 {"CCPU_INTR_REQ", 0x325},
3958 {"CPC-P", 0x326},
3959 {"CPC-D", 0x327},
3960 {"CPC_E", 0x328},
3961 {"CPC_W", 0x329},
3962 {"AV_SCRATCH_0", 0x09c0},
3963 {"AV_SCRATCH_1", 0x09c1},
3964 {"AV_SCRATCH_2", 0x09c2},
3965 {"AV_SCRATCH_3", 0x09c3},
3966 {"AV_SCRATCH_4", 0x09c4},
3967 {"AV_SCRATCH_5", 0x09c5},
3968 {"AV_SCRATCH_6", 0x09c6},
3969 {"AV_SCRATCH_7", 0x09c7},
3970 {"AV_SCRATCH_8", 0x09c8},
3971 {"AV_SCRATCH_9", 0x09c9},
3972 {"AV_SCRATCH_A", 0x09ca},
3973 {"AV_SCRATCH_B", 0x09cb},
3974 {"AV_SCRATCH_C", 0x09cc},
3975 {"AV_SCRATCH_D", 0x09cd},
3976 {"AV_SCRATCH_E", 0x09ce},
3977 {"AV_SCRATCH_F", 0x09cf},
3978 {"AV_SCRATCH_G", 0x09d0},
3979 {"AV_SCRATCH_H", 0x09d1},
3980 {"AV_SCRATCH_I", 0x09d2},
3981 {"AV_SCRATCH_J", 0x09d3},
3982 {"AV_SCRATCH_K", 0x09d4},
3983 {"AV_SCRATCH_L", 0x09d5},
3984 {"AV_SCRATCH_M", 0x09d6},
3985 {"AV_SCRATCH_N", 0x09d7},
3986};
3987
3988static ssize_t amrisc_regs_show(struct class *class,
3989 struct class_attribute *attr, char *buf)
3990{
3991 char *pbuf = buf;
3992 struct am_reg *regs = am_risc;
3993 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
3994 int i;
3995 unsigned int val;
3996 ssize_t ret;
3997
3998 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
3999 mutex_lock(&vdec_mutex);
4000 if (!vdec_on(VDEC_1)) {
4001 mutex_unlock(&vdec_mutex);
4002 pbuf += sprintf(pbuf, "amrisc is power off\n");
4003 ret = pbuf - buf;
4004 return ret;
4005 }
4006 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4007 /*TODO:M6 define */
4008 /*
4009 * switch_mod_gate_by_type(MOD_VDEC, 1);
4010 */
4011 amports_switch_gate("vdec", 1);
4012 }
4013 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4014 for (i = 0; i < rsize; i++) {
4015 val = READ_VREG(regs[i].offset);
4016 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4017 regs[i].name, regs[i].offset, val, val);
4018 }
4019 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4020 mutex_unlock(&vdec_mutex);
4021 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4022 /*TODO:M6 define */
4023 /*
4024 * switch_mod_gate_by_type(MOD_VDEC, 0);
4025 */
4026 amports_switch_gate("vdec", 0);
4027 }
4028 ret = pbuf - buf;
4029 return ret;
4030}
4031
4032static ssize_t dump_trace_show(struct class *class,
4033 struct class_attribute *attr, char *buf)
4034{
4035 int i;
4036 char *pbuf = buf;
4037 ssize_t ret;
4038 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4039
4040 if (!trace_buf) {
4041 pbuf += sprintf(pbuf, "No Memory bug\n");
4042 ret = pbuf - buf;
4043 return ret;
4044 }
4045 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4046 mutex_lock(&vdec_mutex);
4047 if (!vdec_on(VDEC_1)) {
4048 mutex_unlock(&vdec_mutex);
4049 kfree(trace_buf);
4050 pbuf += sprintf(pbuf, "amrisc is power off\n");
4051 ret = pbuf - buf;
4052 return ret;
4053 }
4054 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4055 /*TODO:M6 define */
4056 /*
4057 * switch_mod_gate_by_type(MOD_VDEC, 1);
4058 */
4059 amports_switch_gate("vdec", 1);
4060 }
4061 pr_info("dump trace steps:%d start\n", debug_trace_num);
4062 i = 0;
4063 while (i <= debug_trace_num - 16) {
4064 trace_buf[i] = READ_VREG(MPC_E);
4065 trace_buf[i + 1] = READ_VREG(MPC_E);
4066 trace_buf[i + 2] = READ_VREG(MPC_E);
4067 trace_buf[i + 3] = READ_VREG(MPC_E);
4068 trace_buf[i + 4] = READ_VREG(MPC_E);
4069 trace_buf[i + 5] = READ_VREG(MPC_E);
4070 trace_buf[i + 6] = READ_VREG(MPC_E);
4071 trace_buf[i + 7] = READ_VREG(MPC_E);
4072 trace_buf[i + 8] = READ_VREG(MPC_E);
4073 trace_buf[i + 9] = READ_VREG(MPC_E);
4074 trace_buf[i + 10] = READ_VREG(MPC_E);
4075 trace_buf[i + 11] = READ_VREG(MPC_E);
4076 trace_buf[i + 12] = READ_VREG(MPC_E);
4077 trace_buf[i + 13] = READ_VREG(MPC_E);
4078 trace_buf[i + 14] = READ_VREG(MPC_E);
4079 trace_buf[i + 15] = READ_VREG(MPC_E);
4080 i += 16;
4081 };
4082 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4083 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4084 mutex_unlock(&vdec_mutex);
4085 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4086 /*TODO:M6 define */
4087 /*
4088 * switch_mod_gate_by_type(MOD_VDEC, 0);
4089 */
4090 amports_switch_gate("vdec", 0);
4091 }
4092 for (i = 0; i < debug_trace_num; i++) {
4093 if (i % 4 == 0) {
4094 if (i % 16 == 0)
4095 pbuf += sprintf(pbuf, "\n");
4096 else if (i % 8 == 0)
4097 pbuf += sprintf(pbuf, " ");
4098 else /* 4 */
4099 pbuf += sprintf(pbuf, " ");
4100 }
4101 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4102 }
4103 while (i < debug_trace_num)
4104 ;
4105 kfree(trace_buf);
4106 pbuf += sprintf(pbuf, "\n");
4107 ret = pbuf - buf;
4108 return ret;
4109}
4110
4111static ssize_t clock_level_show(struct class *class,
4112 struct class_attribute *attr, char *buf)
4113{
4114 char *pbuf = buf;
4115 size_t ret;
4116
4117 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4118
4119 if (has_vdec2())
4120 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4121
4122 if (has_hevc_vdec())
4123 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4124
4125 ret = pbuf - buf;
4126 return ret;
4127}
4128
4129static ssize_t store_poweron_clock_level(struct class *class,
4130 struct class_attribute *attr,
4131 const char *buf, size_t size)
4132{
4133 unsigned int val;
4134 ssize_t ret;
4135
4136 /*ret = sscanf(buf, "%d", &val);*/
4137 ret = kstrtoint(buf, 0, &val);
4138
4139 if (ret != 0)
4140 return -EINVAL;
4141 poweron_clock_level = val;
4142 return size;
4143}
4144
4145static ssize_t show_poweron_clock_level(struct class *class,
4146 struct class_attribute *attr, char *buf)
4147{
4148 return sprintf(buf, "%d\n", poweron_clock_level);
4149}
4150
4151/*
4152 *if keep_vdec_mem == 1
4153 *always don't release
4154 *vdec 64 memory for fast play.
4155 */
4156static ssize_t store_keep_vdec_mem(struct class *class,
4157 struct class_attribute *attr,
4158 const char *buf, size_t size)
4159{
4160 unsigned int val;
4161 ssize_t ret;
4162
4163 /*ret = sscanf(buf, "%d", &val);*/
4164 ret = kstrtoint(buf, 0, &val);
4165 if (ret != 0)
4166 return -EINVAL;
4167 keep_vdec_mem = val;
4168 return size;
4169}
4170
4171static ssize_t show_keep_vdec_mem(struct class *class,
4172 struct class_attribute *attr, char *buf)
4173{
4174 return sprintf(buf, "%d\n", keep_vdec_mem);
4175}
4176
4177#ifdef VDEC_DEBUG_SUPPORT
4178static ssize_t store_debug(struct class *class,
4179 struct class_attribute *attr,
4180 const char *buf, size_t size)
4181{
4182 struct vdec_s *vdec;
4183 struct vdec_core_s *core = vdec_core;
4184 unsigned long flags;
4185
4186 unsigned id;
4187 unsigned val;
4188 ssize_t ret;
4189 char cbuf[32];
4190
4191 cbuf[0] = 0;
4192 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4193 /*pr_info(
4194 "%s(%s)=>ret %ld: %s, %x, %x\n",
4195 __func__, buf, ret, cbuf, id, val);*/
4196 if (strcmp(cbuf, "schedule") == 0) {
4197 pr_info("VDEC_DEBUG: force schedule\n");
4198 up(&core->sem);
4199 } else if (strcmp(cbuf, "power_off") == 0) {
4200 pr_info("VDEC_DEBUG: power off core %d\n", id);
4201 vdec_poweroff(id);
4202 } else if (strcmp(cbuf, "power_on") == 0) {
4203 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4204 vdec_poweron(id);
4205 } else if (strcmp(cbuf, "wr") == 0) {
4206 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4207 id, val);
4208 WRITE_VREG(id, val);
4209 } else if (strcmp(cbuf, "rd") == 0) {
4210 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4211 id, READ_VREG(id));
4212 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4213 pr_info(
4214 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4215 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4216 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4217 }
4218
4219 flags = vdec_core_lock(vdec_core);
4220
4221 list_for_each_entry(vdec,
4222 &core->connected_vdec_list, list) {
4223 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4224 if (((vdec->status == VDEC_STATUS_CONNECTED
4225 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4226 (vdec->id == id)) {
4227 /*to add*/
4228 break;
4229 }
4230 }
4231 vdec_core_unlock(vdec_core, flags);
4232 return size;
4233}
4234
4235static ssize_t show_debug(struct class *class,
4236 struct class_attribute *attr, char *buf)
4237{
4238 char *pbuf = buf;
4239 struct vdec_s *vdec;
4240 struct vdec_core_s *core = vdec_core;
4241 unsigned long flags = vdec_core_lock(vdec_core);
4242 u64 tmp;
4243
4244 pbuf += sprintf(pbuf,
4245 "============== help:\n");
4246 pbuf += sprintf(pbuf,
4247 "'echo xxx > debug' usuage:\n");
4248 pbuf += sprintf(pbuf,
4249 "schedule - trigger schedule thread to run\n");
4250 pbuf += sprintf(pbuf,
4251 "power_off core_num - call vdec_poweroff(core_num)\n");
4252 pbuf += sprintf(pbuf,
4253 "power_on core_num - call vdec_poweron(core_num)\n");
4254 pbuf += sprintf(pbuf,
4255 "wr adr val - call WRITE_VREG(adr, val)\n");
4256 pbuf += sprintf(pbuf,
4257 "rd adr - call READ_VREG(adr)\n");
4258 pbuf += sprintf(pbuf,
4259 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4260 pbuf += sprintf(pbuf,
4261 "===================\n");
4262
4263 pbuf += sprintf(pbuf,
4264 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4265 list_for_each_entry(vdec,
4266 &core->connected_vdec_list, list) {
4267 enum vdec_type_e type;
4268 if ((vdec->status == VDEC_STATUS_CONNECTED
4269 || vdec->status == VDEC_STATUS_ACTIVE)) {
4270 for (type = VDEC_1; type < VDEC_MAX; type++) {
4271 if (vdec->core_mask & (1 << type)) {
4272 pbuf += sprintf(pbuf, "%s(%d):",
4273 vdec->vf_provider_name, type);
4274 pbuf += sprintf(pbuf, "\t%d",
4275 vdec->check_count[type]);
4276 pbuf += sprintf(pbuf, "\t%d",
4277 vdec->run_count[type]);
4278 pbuf += sprintf(pbuf, "\t%d",
4279 vdec->input_underrun_count[type]);
4280 pbuf += sprintf(pbuf, "\t%d",
4281 vdec->not_run_ready_count[type]);
4282 tmp = vdec->run_clk[type] * 100;
4283 do_div(tmp, vdec->total_clk[type]);
4284 pbuf += sprintf(pbuf,
4285 "\t%d%%\n",
4286 vdec->total_clk[type] == 0 ? 0 :
4287 (u32)tmp);
4288 }
4289 }
4290 }
4291 }
4292
4293 vdec_core_unlock(vdec_core, flags);
4294 return pbuf - buf;
4295
4296}
4297#endif
4298
4299/*irq num as same as .dts*/
4300/*
4301 * interrupts = <0 3 1
4302 * 0 23 1
4303 * 0 32 1
4304 * 0 43 1
4305 * 0 44 1
4306 * 0 45 1>;
4307 * interrupt-names = "vsync",
4308 * "demux",
4309 * "parser",
4310 * "mailbox_0",
4311 * "mailbox_1",
4312 * "mailbox_2";
4313 */
4314s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4315 irq_handler_t handler,
4316 irq_handler_t thread_fn,
4317 unsigned long irqflags,
4318 const char *devname, void *dev)
4319{
4320 s32 res_irq;
4321 s32 ret = 0;
4322
4323 if (num >= VDEC_IRQ_MAX) {
4324 pr_err("[%s] request irq error, irq num too big!", __func__);
4325 return -EINVAL;
4326 }
4327
4328 if (vdec_core->isr_context[num].irq < 0) {
4329 res_irq = platform_get_irq(
4330 vdec_core->vdec_core_platform_device, num);
4331 if (res_irq < 0) {
4332 pr_err("[%s] get irq error!", __func__);
4333 return -EINVAL;
4334 }
4335
4336 vdec_core->isr_context[num].irq = res_irq;
4337 vdec_core->isr_context[num].dev_isr = handler;
4338 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4339 vdec_core->isr_context[num].dev_id = dev;
4340
4341 ret = request_threaded_irq(res_irq,
4342 vdec_isr,
4343 vdec_thread_isr,
4344 (thread_fn) ? IRQF_ONESHOT : irqflags,
4345 devname,
4346 &vdec_core->isr_context[num]);
4347
4348 if (ret) {
4349 vdec_core->isr_context[num].irq = -1;
4350 vdec_core->isr_context[num].dev_isr = NULL;
4351 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4352 vdec_core->isr_context[num].dev_id = NULL;
4353
4354 pr_err("vdec irq register error for %s.\n", devname);
4355 return -EIO;
4356 }
4357 } else {
4358 vdec_core->isr_context[num].dev_isr = handler;
4359 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4360 vdec_core->isr_context[num].dev_id = dev;
4361 }
4362
4363 return ret;
4364}
4365EXPORT_SYMBOL(vdec_request_threaded_irq);
4366
4367s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4368 const char *devname, void *dev)
4369{
4370 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4371
4372 return vdec_request_threaded_irq(num,
4373 handler,
4374 NULL,/*no thread_fn*/
4375 IRQF_SHARED,
4376 devname,
4377 dev);
4378}
4379EXPORT_SYMBOL(vdec_request_irq);
4380
4381void vdec_free_irq(enum vdec_irq_num num, void *dev)
4382{
4383 if (num >= VDEC_IRQ_MAX) {
4384 pr_err("[%s] request irq error, irq num too big!", __func__);
4385 return;
4386 }
4387 /*
4388 *assume amrisc is stopped already and there is no mailbox interrupt
4389 * when we reset pointers here.
4390 */
4391 vdec_core->isr_context[num].dev_isr = NULL;
4392 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4393 vdec_core->isr_context[num].dev_id = NULL;
4394 synchronize_irq(vdec_core->isr_context[num].irq);
4395}
4396EXPORT_SYMBOL(vdec_free_irq);
4397
4398struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4399{
4400 struct vdec_s *vdec;
4401 struct vdec_s *ret_vdec;
4402 struct vdec_core_s *core = vdec_core;
4403 unsigned long flags;
4404 int id;
4405
4406 flags = vdec_core_lock(vdec_core);
4407
4408 id = 0x10000000;
4409 ret_vdec = NULL;
4410 if (!list_empty(&core->connected_vdec_list)) {
4411 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4412 if (vdec->id < id) {
4413 id = vdec->id;
4414 ret_vdec = vdec;
4415 }
4416 }
4417 }
4418
4419 vdec_core_unlock(vdec_core, flags);
4420
4421 return ret_vdec;
4422}
4423EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4424
4425struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4426{
4427 struct vdec_s *vdec;
4428 struct vdec_s *ret_vdec;
4429 struct vdec_core_s *core = vdec_core;
4430 unsigned long flags;
4431
4432 flags = vdec_core_lock(vdec_core);
4433
4434 ret_vdec = NULL;
4435 if (!list_empty(&core->connected_vdec_list)) {
4436 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4437 if (vdec->id == vdec_id) {
4438 ret_vdec = vdec;
4439 break;
4440 }
4441 }
4442 }
4443
4444 vdec_core_unlock(vdec_core, flags);
4445
4446 return ret_vdec;
4447}
4448EXPORT_SYMBOL(vdec_get_vdec_by_id);
4449
4450int vdec_read_user_data(struct vdec_s *vdec,
4451 struct userdata_param_t *p_userdata_param)
4452{
4453 int ret = 0;
4454
4455 if (!vdec)
4456 vdec = vdec_get_default_vdec_for_userdata();
4457
4458 if (vdec) {
4459 if (vdec->user_data_read)
4460 ret = vdec->user_data_read(vdec, p_userdata_param);
4461 }
4462 return ret;
4463}
4464EXPORT_SYMBOL(vdec_read_user_data);
4465
4466int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4467{
4468 if (vdec) {
4469 if (vdec->wakeup_userdata_poll)
4470 vdec->wakeup_userdata_poll(vdec);
4471 }
4472
4473 return 0;
4474}
4475EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4476
4477void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4478{
4479 if (!vdec)
4480 vdec = vdec_get_default_vdec_for_userdata();
4481
4482 if (vdec) {
4483 if (vdec->reset_userdata_fifo)
4484 vdec->reset_userdata_fifo(vdec, bInit);
4485 }
4486}
4487EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4488
4489static int dump_mode;
4490static ssize_t dump_risc_mem_store(struct class *class,
4491 struct class_attribute *attr,
4492 const char *buf, size_t size)/*set*/
4493{
4494 unsigned int val;
4495 ssize_t ret;
4496 char dump_mode_str[4] = "PRL";
4497
4498 /*ret = sscanf(buf, "%d", &val);*/
4499 ret = kstrtoint(buf, 0, &val);
4500
4501 if (ret != 0)
4502 return -EINVAL;
4503 dump_mode = val & 0x3;
4504 pr_info("set dump mode to %d,%c_mem\n",
4505 dump_mode, dump_mode_str[dump_mode]);
4506 return size;
4507}
4508static u32 read_amrisc_reg(int reg)
4509{
4510 WRITE_VREG(0x31b, reg);
4511 return READ_VREG(0x31c);
4512}
4513
4514static void dump_pmem(void)
4515{
4516 int i;
4517
4518 WRITE_VREG(0x301, 0x8000);
4519 WRITE_VREG(0x31d, 0);
4520 pr_info("start dump amrisc pmem of risc\n");
4521 for (i = 0; i < 0xfff; i++) {
4522 /*same as .o format*/
4523 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4524 }
4525}
4526
4527static void dump_lmem(void)
4528{
4529 int i;
4530
4531 WRITE_VREG(0x301, 0x8000);
4532 WRITE_VREG(0x31d, 2);
4533 pr_info("start dump amrisc lmem\n");
4534 for (i = 0; i < 0x3ff; i++) {
4535 /*same as */
4536 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4537 }
4538}
4539
4540static ssize_t dump_risc_mem_show(struct class *class,
4541 struct class_attribute *attr, char *buf)
4542{
4543 char *pbuf = buf;
4544 int ret;
4545
4546 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4547 mutex_lock(&vdec_mutex);
4548 if (!vdec_on(VDEC_1)) {
4549 mutex_unlock(&vdec_mutex);
4550 pbuf += sprintf(pbuf, "amrisc is power off\n");
4551 ret = pbuf - buf;
4552 return ret;
4553 }
4554 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4555 /*TODO:M6 define */
4556 /*
4557 * switch_mod_gate_by_type(MOD_VDEC, 1);
4558 */
4559 amports_switch_gate("vdec", 1);
4560 }
4561 /*start do**/
4562 switch (dump_mode) {
4563 case 0:
4564 dump_pmem();
4565 break;
4566 case 2:
4567 dump_lmem();
4568 break;
4569 default:
4570 break;
4571 }
4572
4573 /*done*/
4574 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4575 mutex_unlock(&vdec_mutex);
4576 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4577 /*TODO:M6 define */
4578 /*
4579 * switch_mod_gate_by_type(MOD_VDEC, 0);
4580 */
4581 amports_switch_gate("vdec", 0);
4582 }
4583 return sprintf(buf, "done\n");
4584}
4585
4586static ssize_t core_show(struct class *class, struct class_attribute *attr,
4587 char *buf)
4588{
4589 struct vdec_core_s *core = vdec_core;
4590 char *pbuf = buf;
4591
4592 if (list_empty(&core->connected_vdec_list))
4593 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4594 else {
4595 struct vdec_s *vdec;
4596
4597 pbuf += sprintf(pbuf,
4598 " Core: last_sched %p, sched_mask %lx\n",
4599 core->last_vdec,
4600 core->sched_mask);
4601
4602 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4603 pbuf += sprintf(pbuf,
4604 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4605 vdec->id,
4606 vdec,
4607 vdec_device_name[vdec->format * 2],
4608 vdec_status_str(vdec),
4609 vdec_type_str(vdec),
4610 vdec->active_mask);
4611 }
4612 }
4613
4614 return pbuf - buf;
4615}
4616
4617static ssize_t vdec_status_show(struct class *class,
4618 struct class_attribute *attr, char *buf)
4619{
4620 char *pbuf = buf;
4621 struct vdec_s *vdec;
4622 struct vdec_info vs;
4623 unsigned char vdec_num = 0;
4624 struct vdec_core_s *core = vdec_core;
4625 unsigned long flags = vdec_core_lock(vdec_core);
4626
4627 if (list_empty(&core->connected_vdec_list)) {
4628 pbuf += sprintf(pbuf, "No vdec.\n");
4629 goto out;
4630 }
4631
4632 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4633 if ((vdec->status == VDEC_STATUS_CONNECTED
4634 || vdec->status == VDEC_STATUS_ACTIVE)) {
4635 memset(&vs, 0, sizeof(vs));
4636 if (vdec_status(vdec, &vs)) {
4637 pbuf += sprintf(pbuf, "err.\n");
4638 goto out;
4639 }
4640 pbuf += sprintf(pbuf,
4641 "vdec channel %u statistics:\n",
4642 vdec_num);
4643 pbuf += sprintf(pbuf,
4644 "%13s : %s\n", "device name",
4645 vs.vdec_name);
4646 pbuf += sprintf(pbuf,
4647 "%13s : %u\n", "frame width",
4648 vs.frame_width);
4649 pbuf += sprintf(pbuf,
4650 "%13s : %u\n", "frame height",
4651 vs.frame_height);
4652 pbuf += sprintf(pbuf,
4653 "%13s : %u %s\n", "frame rate",
4654 vs.frame_rate, "fps");
4655 pbuf += sprintf(pbuf,
4656 "%13s : %u %s\n", "bit rate",
4657 vs.bit_rate / 1024 * 8, "kbps");
4658 pbuf += sprintf(pbuf,
4659 "%13s : %u\n", "status",
4660 vs.status);
4661 pbuf += sprintf(pbuf,
4662 "%13s : %u\n", "frame dur",
4663 vs.frame_dur);
4664 pbuf += sprintf(pbuf,
4665 "%13s : %u %s\n", "frame data",
4666 vs.frame_data / 1024, "KB");
4667 pbuf += sprintf(pbuf,
4668 "%13s : %u\n", "frame count",
4669 vs.frame_count);
4670 pbuf += sprintf(pbuf,
4671 "%13s : %u\n", "drop count",
4672 vs.drop_frame_count);
4673 pbuf += sprintf(pbuf,
4674 "%13s : %u\n", "fra err count",
4675 vs.error_frame_count);
4676 pbuf += sprintf(pbuf,
4677 "%13s : %u\n", "hw err count",
4678 vs.error_count);
4679 pbuf += sprintf(pbuf,
4680 "%13s : %llu %s\n", "total data",
4681 vs.total_data / 1024, "KB");
4682 pbuf += sprintf(pbuf,
4683 "%13s : %x\n\n", "ratio_control",
4684 vs.ratio_control);
4685
4686 vdec_num++;
4687 }
4688 }
4689out:
4690 vdec_core_unlock(vdec_core, flags);
4691 return pbuf - buf;
4692}
4693
4694static ssize_t dump_vdec_blocks_show(struct class *class,
4695 struct class_attribute *attr, char *buf)
4696{
4697 struct vdec_core_s *core = vdec_core;
4698 char *pbuf = buf;
4699 unsigned long flags = vdec_core_lock(vdec_core);
4700
4701 if (list_empty(&core->connected_vdec_list))
4702 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4703 else {
4704 struct vdec_s *vdec;
4705 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4706 pbuf += vdec_input_dump_blocks(&vdec->input,
4707 pbuf, PAGE_SIZE - (pbuf - buf));
4708 }
4709 }
4710 vdec_core_unlock(vdec_core, flags);
4711
4712 return pbuf - buf;
4713}
4714static ssize_t dump_vdec_chunks_show(struct class *class,
4715 struct class_attribute *attr, char *buf)
4716{
4717 struct vdec_core_s *core = vdec_core;
4718 char *pbuf = buf;
4719 unsigned long flags = vdec_core_lock(vdec_core);
4720
4721 if (list_empty(&core->connected_vdec_list))
4722 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4723 else {
4724 struct vdec_s *vdec;
4725 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4726 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
4727 pbuf, PAGE_SIZE - (pbuf - buf));
4728 }
4729 }
4730 vdec_core_unlock(vdec_core, flags);
4731
4732 return pbuf - buf;
4733}
4734
4735static ssize_t dump_decoder_state_show(struct class *class,
4736 struct class_attribute *attr, char *buf)
4737{
4738 char *pbuf = buf;
4739 struct vdec_s *vdec;
4740 struct vdec_core_s *core = vdec_core;
4741 unsigned long flags = vdec_core_lock(vdec_core);
4742
4743 if (list_empty(&core->connected_vdec_list)) {
4744 pbuf += sprintf(pbuf, "No vdec.\n");
4745 } else {
4746 list_for_each_entry(vdec,
4747 &core->connected_vdec_list, list) {
4748 if ((vdec->status == VDEC_STATUS_CONNECTED
4749 || vdec->status == VDEC_STATUS_ACTIVE)
4750 && vdec->dump_state)
4751 vdec->dump_state(vdec);
4752 }
4753 }
4754 vdec_core_unlock(vdec_core, flags);
4755
4756 return pbuf - buf;
4757}
4758
4759static ssize_t dump_fps_show(struct class *class,
4760 struct class_attribute *attr, char *buf)
4761{
4762 char *pbuf = buf;
4763 struct vdec_core_s *core = vdec_core;
4764 int i;
4765
4766 unsigned long flags = vdec_fps_lock(vdec_core);
4767 for (i = 0; i < MAX_INSTANCE_MUN; i++)
4768 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
4769
4770 pbuf += sprintf(pbuf, "\n");
4771 vdec_fps_unlock(vdec_core, flags);
4772
4773 return pbuf - buf;
4774}
4775
4776
4777
4778static struct class_attribute vdec_class_attrs[] = {
4779 __ATTR_RO(amrisc_regs),
4780 __ATTR_RO(dump_trace),
4781 __ATTR_RO(clock_level),
4782 __ATTR(poweron_clock_level, S_IRUGO | S_IWUSR | S_IWGRP,
4783 show_poweron_clock_level, store_poweron_clock_level),
4784 __ATTR(dump_risc_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4785 dump_risc_mem_show, dump_risc_mem_store),
4786 __ATTR(keep_vdec_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4787 show_keep_vdec_mem, store_keep_vdec_mem),
4788 __ATTR_RO(core),
4789 __ATTR_RO(vdec_status),
4790 __ATTR_RO(dump_vdec_blocks),
4791 __ATTR_RO(dump_vdec_chunks),
4792 __ATTR_RO(dump_decoder_state),
4793#ifdef VDEC_DEBUG_SUPPORT
4794 __ATTR(debug, S_IRUGO | S_IWUSR | S_IWGRP,
4795 show_debug, store_debug),
4796#endif
4797#ifdef FRAME_CHECK
4798 __ATTR(dump_yuv, S_IRUGO | S_IWUSR | S_IWGRP,
4799 dump_yuv_show, dump_yuv_store),
4800 __ATTR(frame_check, S_IRUGO | S_IWUSR | S_IWGRP,
4801 frame_check_show, frame_check_store),
4802#endif
4803 __ATTR_RO(dump_fps),
4804 __ATTR_NULL
4805};
4806
4807static struct class vdec_class = {
4808 .name = "vdec",
4809 .class_attrs = vdec_class_attrs,
4810 };
4811
4812struct device *get_vdec_device(void)
4813{
4814 return &vdec_core->vdec_core_platform_device->dev;
4815}
4816EXPORT_SYMBOL(get_vdec_device);
4817
4818static int vdec_probe(struct platform_device *pdev)
4819{
4820 s32 i, r;
4821
4822 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
4823 sizeof(struct vdec_core_s), GFP_KERNEL);
4824 if (vdec_core == NULL) {
4825 pr_err("vdec core allocation failed.\n");
4826 return -ENOMEM;
4827 }
4828
4829 atomic_set(&vdec_core->vdec_nr, 0);
4830 sema_init(&vdec_core->sem, 1);
4831
4832 r = class_register(&vdec_class);
4833 if (r) {
4834 pr_info("vdec class create fail.\n");
4835 return r;
4836 }
4837
4838 vdec_core->vdec_core_platform_device = pdev;
4839
4840 platform_set_drvdata(pdev, vdec_core);
4841
4842 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4843 vdec_core->isr_context[i].index = i;
4844 vdec_core->isr_context[i].irq = -1;
4845 }
4846
4847 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
4848 IRQF_ONESHOT, "vdec-0", NULL);
4849 if (r < 0) {
4850 pr_err("vdec interrupt request failed\n");
4851 return r;
4852 }
4853
4854 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
4855 IRQF_ONESHOT, "vdec-1", NULL);
4856 if (r < 0) {
4857 pr_err("vdec interrupt request failed\n");
4858 return r;
4859 }
4860#if 0
4861 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4862 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
4863 IRQF_ONESHOT, "vdec-hevc_back", NULL);
4864 if (r < 0) {
4865 pr_err("vdec interrupt request failed\n");
4866 return r;
4867 }
4868 }
4869#endif
4870 r = of_reserved_mem_device_init(&pdev->dev);
4871 if (r == 0)
4872 pr_info("vdec_probe done\n");
4873
4874 vdec_core->cma_dev = &pdev->dev;
4875
4876 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
4877 /* default to 250MHz */
4878 vdec_clock_hi_enable();
4879 }
4880
4881 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
4882 /* set vdec dmc request to urgent */
4883 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
4884 }
4885 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
4886 spin_lock_init(&vdec_core->lock);
4887 spin_lock_init(&vdec_core->canvas_lock);
4888 spin_lock_init(&vdec_core->fps_lock);
4889 spin_lock_init(&vdec_core->input_lock);
4890 ida_init(&vdec_core->ida);
4891 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
4892 "vdec-core");
4893
4894 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
4895 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
4896 /*work queue priority lower than vdec-core.*/
4897 return 0;
4898}
4899
4900static int vdec_remove(struct platform_device *pdev)
4901{
4902 int i;
4903
4904 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4905 if (vdec_core->isr_context[i].irq >= 0) {
4906 free_irq(vdec_core->isr_context[i].irq,
4907 &vdec_core->isr_context[i]);
4908 vdec_core->isr_context[i].irq = -1;
4909 vdec_core->isr_context[i].dev_isr = NULL;
4910 vdec_core->isr_context[i].dev_threaded_isr = NULL;
4911 vdec_core->isr_context[i].dev_id = NULL;
4912 }
4913 }
4914
4915 kthread_stop(vdec_core->thread);
4916
4917 destroy_workqueue(vdec_core->vdec_core_wq);
4918 class_unregister(&vdec_class);
4919
4920 return 0;
4921}
4922
4923static const struct of_device_id amlogic_vdec_dt_match[] = {
4924 {
4925 .compatible = "amlogic, vdec",
4926 },
4927 {},
4928};
4929
4930static struct mconfig vdec_configs[] = {
4931 MC_PU32("debug_trace_num", &debug_trace_num),
4932 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
4933 MC_PU32("clk_config", &clk_config),
4934 MC_PI32("step_mode", &step_mode),
4935 MC_PI32("poweron_clock_level", &poweron_clock_level),
4936};
4937static struct mconfig_node vdec_node;
4938
4939static struct platform_driver vdec_driver = {
4940 .probe = vdec_probe,
4941 .remove = vdec_remove,
4942 .driver = {
4943 .name = "vdec",
4944 .of_match_table = amlogic_vdec_dt_match,
4945 }
4946};
4947
4948static struct codec_profile_t amvdec_input_profile = {
4949 .name = "vdec_input",
4950 .profile = "drm_framemode"
4951};
4952
4953int vdec_module_init(void)
4954{
4955 if (platform_driver_register(&vdec_driver)) {
4956 pr_info("failed to register vdec module\n");
4957 return -ENODEV;
4958 }
4959 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
4960 "vdec", vdec_configs, CONFIG_FOR_RW);
4961 vcodec_profile_register(&amvdec_input_profile);
4962 return 0;
4963}
4964EXPORT_SYMBOL(vdec_module_init);
4965
4966void vdec_module_exit(void)
4967{
4968 platform_driver_unregister(&vdec_driver);
4969}
4970EXPORT_SYMBOL(vdec_module_exit);
4971
4972#if 0
4973static int __init vdec_module_init(void)
4974{
4975 if (platform_driver_register(&vdec_driver)) {
4976 pr_info("failed to register vdec module\n");
4977 return -ENODEV;
4978 }
4979 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
4980 "vdec", vdec_configs, CONFIG_FOR_RW);
4981 return 0;
4982}
4983
4984static void __exit vdec_module_exit(void)
4985{
4986 platform_driver_unregister(&vdec_driver);
4987}
4988#endif
4989
4990static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
4991{
4992 vdec_core->cma_dev = dev;
4993
4994 return 0;
4995}
4996
4997static const struct reserved_mem_ops rmem_vdec_ops = {
4998 .device_init = vdec_mem_device_init,
4999};
5000
5001static int __init vdec_mem_setup(struct reserved_mem *rmem)
5002{
5003 rmem->ops = &rmem_vdec_ops;
5004 pr_info("vdec: reserved mem setup\n");
5005
5006 return 0;
5007}
5008
5009void vdec_fill_frame_info(struct vframe_qos_s *vframe_qos, int debug)
5010{
5011 if (frame_info_buf_in == NULL) {
5012 pr_info("error,frame_info_buf_in is null\n");
5013 return;
5014 }
5015 if (frame_info_buf_out == NULL) {
5016 pr_info("error,frame_info_buf_out is null\n");
5017 return;
5018 }
5019 if (frame_qos_wr >= QOS_FRAME_NUM)
5020 frame_qos_wr = 0;
5021
5022 if (frame_qos_wr >= QOS_FRAME_NUM ||
5023 frame_qos_wr < 0) {
5024 pr_info("error,index :%d is error\n", frame_qos_wr);
5025 return;
5026 }
5027 if (frameinfo_flag == DISABLE_FRAME_INFO)
5028 return;
5029
5030 if (frameinfo_flag == PRINT_FRAME_INFO) {
5031 pr_info("num %d size %d pts %d\n",
5032 vframe_qos->num,
5033 vframe_qos->size,
5034 vframe_qos->pts);
5035 pr_info("mv min_mv %d avg_mv %d max_mv %d\n",
5036 vframe_qos->min_mv,
5037 vframe_qos->avg_mv,
5038 vframe_qos->max_mv);
5039 pr_info("qp min_qp %d avg_qp %d max_qp %d\n",
5040 vframe_qos->min_qp,
5041 vframe_qos->avg_qp,
5042 vframe_qos->max_qp);
5043 pr_info("skip min_skip %d avg_skip %d max_skip %d\n",
5044 vframe_qos->min_skip,
5045 vframe_qos->avg_skip,
5046 vframe_qos->max_skip);
5047 }
5048 memcpy(&frame_info_buf_in[frame_qos_wr++],
5049 vframe_qos, sizeof(struct vframe_qos_s));
5050 if (frame_qos_wr >= QOS_FRAME_NUM)
5051 frame_qos_wr = 0;
5052
5053 /*pr_info("frame_qos_wr:%d\n", frame_qos_wr);*/
5054
5055}
5056EXPORT_SYMBOL(vdec_fill_frame_info);
5057
5058struct vframe_qos_s *vdec_get_qos_info(void)
5059{
5060 int write_count = 0;
5061 int qos_wr = frame_qos_wr;
5062
5063 if (frame_info_buf_in == NULL) {
5064 pr_info("error,frame_info_buf_in is null\n");
5065 return NULL;
5066 }
5067 if (frame_info_buf_out == NULL) {
5068 pr_info("error,frame_info_buf_out is null\n");
5069 return NULL;
5070 }
5071
5072
5073 memset(frame_info_buf_out, 0,
5074 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
5075 if (frame_qos_rd > qos_wr) {
5076 write_count = QOS_FRAME_NUM - frame_qos_rd;
5077 if (write_count > 0 && write_count <= QOS_FRAME_NUM) {
5078 memcpy(frame_info_buf_out, &frame_info_buf_in[0],
5079 write_count*sizeof(struct vframe_qos_s));
5080 if ((write_count + qos_wr) <= QOS_FRAME_NUM)
5081 memcpy(&frame_info_buf_out[write_count], frame_info_buf_in,
5082 qos_wr*sizeof(struct vframe_qos_s));
5083 else
5084 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5085 } else
5086 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5087 } else if (frame_qos_rd < qos_wr) {
5088 write_count = qos_wr - frame_qos_rd;
5089 if (write_count > 0 && write_count < QOS_FRAME_NUM)
5090 memcpy(frame_info_buf_out, &frame_info_buf_in[frame_qos_rd],
5091 (write_count)*sizeof(struct vframe_qos_s));
5092 else
5093 pr_info("get_qos_info:%d, out of range\n", __LINE__);
5094 }
5095 /*
5096 pr_info("cnt:%d,size:%d,num:%d,rd:%d,wr:%d\n",
5097 wirte_count,
5098 frame_info_buf_out[0].size,
5099 frame_info_buf_out[0].num,
5100 frame_qos_rd,qos_wr);
5101 */
5102 frame_qos_rd = qos_wr;
5103 return frame_info_buf_out;
5104}
5105EXPORT_SYMBOL(vdec_get_qos_info);
5106
5107
5108RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5109/*
5110uint force_hevc_clock_cntl;
5111EXPORT_SYMBOL(force_hevc_clock_cntl);
5112
5113module_param(force_hevc_clock_cntl, uint, 0664);
5114*/
5115module_param(debug, uint, 0664);
5116module_param(debug_trace_num, uint, 0664);
5117module_param(hevc_max_reset_count, int, 0664);
5118module_param(clk_config, uint, 0664);
5119module_param(step_mode, int, 0664);
5120module_param(debugflags, int, 0664);
5121module_param(parallel_decode, int, 0664);
5122module_param(fps_detection, int, 0664);
5123module_param(fps_clear, int, 0664);
5124module_param(force_nosecure_even_drm, int, 0664);
5125module_param(disable_switch_single_to_mult, int, 0664);
5126
5127module_param(frameinfo_flag, int, 0664);
5128MODULE_PARM_DESC(frameinfo_flag,
5129 "\n frameinfo_flag\n");
5130module_param(v4lvideo_add_di, int, 0664);
5131MODULE_PARM_DESC(v4lvideo_add_di,
5132 "\n v4lvideo_add_di\n");
5133
5134module_param(max_di_instance, int, 0664);
5135MODULE_PARM_DESC(max_di_instance,
5136 "\n max_di_instance\n");
5137
5138/*
5139*module_init(vdec_module_init);
5140*module_exit(vdec_module_exit);
5141*/
5142#define CREATE_TRACE_POINTS
5143#include "vdec_trace.h"
5144MODULE_DESCRIPTION("AMLOGIC vdec driver");
5145MODULE_LICENSE("GPL");
5146MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5147