summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: 15b841e1e04a853ea957bd11dc0e2a71bac9628c
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40/*for VDEC_DEBUG_SUPPORT*/
41#include <linux/time.h>
42
43#include <linux/amlogic/media/utils/vdec_reg.h>
44#include "vdec.h"
45#include "vdec_trace.h"
46#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
47#include "vdec_profile.h"
48#endif
49#include <linux/of.h>
50#include <linux/of_fdt.h>
51#include <linux/libfdt_env.h>
52#include <linux/of_reserved_mem.h>
53#include <linux/dma-contiguous.h>
54#include <linux/cma.h>
55#include <linux/module.h>
56#include <linux/slab.h>
57#include <linux/dma-mapping.h>
58#include <linux/dma-contiguous.h>
59#include "../../../stream_input/amports/amports_priv.h"
60
61#include <linux/amlogic/media/utils/amports_config.h>
62#include "../utils/amvdec.h"
63#include "vdec_input.h"
64
65#include "../../../common/media_clock/clk/clk.h"
66#include <linux/reset.h>
67#include <linux/amlogic/cpu_version.h>
68#include <linux/amlogic/media/codec_mm/codec_mm.h>
69#include <linux/amlogic/media/video_sink/video_keeper.h>
70#include <linux/amlogic/media/codec_mm/configs.h>
71#include <linux/amlogic/media/frame_sync/ptsserv.h>
72#include "secprot.h"
73#include "../../../common/chips/decoder_cpu_ver_info.h"
74#include "frame_check.h"
75
76#ifdef CONFIG_AMLOGIC_POWER
77#include <linux/amlogic/power_ctrl.h>
78#endif
79
80static DEFINE_MUTEX(vdec_mutex);
81
82#define MC_SIZE (4096 * 4)
83#define CMA_ALLOC_SIZE SZ_64M
84#define MEM_NAME "vdec_prealloc"
85static int inited_vcodec_num;
86#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
87static int poweron_clock_level;
88static int keep_vdec_mem;
89static unsigned int debug_trace_num = 16 * 20;
90static int step_mode;
91static unsigned int clk_config;
92/*
93 &1: sched_priority to MAX_RT_PRIO -1.
94 &2: always reload firmware.
95 &4: vdec canvas debug enable
96 */
97static unsigned int debug;
98
99static int hevc_max_reset_count;
100
101static int no_powerdown;
102static int parallel_decode = 1;
103static int fps_detection;
104static int fps_clear;
105
106
107static int force_nosecure_even_drm;
108static int disable_switch_single_to_mult;
109
110static DEFINE_SPINLOCK(vdec_spin_lock);
111
112#define HEVC_TEST_LIMIT 100
113#define GXBB_REV_A_MINOR 0xA
114
115#define PRINT_FRAME_INFO 1
116#define DISABLE_FRAME_INFO 2
117
118static int frameinfo_flag = 0;
119static int v4lvideo_add_di = 1;
120static int max_di_instance = 2;
121
122//static int path_debug = 0;
123
124static int enable_mvdec_info = 1;
125
126int decode_underflow = 0;
127
128#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
129
130struct am_reg {
131 char *name;
132 int offset;
133};
134
135struct vdec_isr_context_s {
136 int index;
137 int irq;
138 irq_handler_t dev_isr;
139 irq_handler_t dev_threaded_isr;
140 void *dev_id;
141 struct vdec_s *vdec;
142};
143
144struct decode_fps_s {
145 u32 frame_count;
146 u64 start_timestamp;
147 u64 last_timestamp;
148 u32 fps;
149};
150
151struct vdec_core_s {
152 struct list_head connected_vdec_list;
153 spinlock_t lock;
154 spinlock_t canvas_lock;
155 spinlock_t fps_lock;
156 spinlock_t input_lock;
157 struct ida ida;
158 atomic_t vdec_nr;
159 struct vdec_s *vfm_vdec;
160 struct vdec_s *active_vdec;
161 struct vdec_s *active_hevc;
162 struct vdec_s *hint_fr_vdec;
163 struct platform_device *vdec_core_platform_device;
164 struct device *cma_dev;
165 struct semaphore sem;
166 struct task_struct *thread;
167 struct workqueue_struct *vdec_core_wq;
168
169 unsigned long sched_mask;
170 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
171 int power_ref_count[VDEC_MAX];
172 struct vdec_s *last_vdec;
173 int parallel_dec;
174 unsigned long power_ref_mask;
175 int vdec_combine_flag;
176 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
177 unsigned long buff_flag;
178 unsigned long stream_buff_flag;
179};
180
181struct canvas_status_s {
182 int type;
183 int canvas_used_flag;
184 int id;
185};
186
187
188static struct vdec_core_s *vdec_core;
189
190static const char * const vdec_status_string[] = {
191 "VDEC_STATUS_UNINITIALIZED",
192 "VDEC_STATUS_DISCONNECTED",
193 "VDEC_STATUS_CONNECTED",
194 "VDEC_STATUS_ACTIVE"
195};
196/*
197bit [28] enable print
198bit [23:16] etc
199bit [15:12]
200 none 0 and not 0x1: force single
201 none 0 and 0x1: force multi
202bit [8]
203 1: force dual
204bit [3]
205 1: use mavs for single mode
206bit [2]
207 1: force vfm path for frame mode
208bit [1]
209 1: force esparser auto mode
210bit [0]
211 1: disable audo manual mode ??
212*/
213
214static int debugflags;
215
216static char vfm_path[VDEC_MAP_NAME_SIZE] = {"disable"};
217static const char vfm_path_node[][VDEC_MAP_NAME_SIZE] =
218{
219 "video_render.0",
220 "video_render.1",
221 "amvideo",
222 "videopip",
223 "deinterlace",
224 "dimulti.1",
225 "amlvideo",
226 "aml_video.1",
227 "amlvideo2.0",
228 "amlvideo2.1",
229 "ppmgr",
230 "ionvideo",
231 "ionvideo.1",
232 "ionvideo.2",
233 "ionvideo.3",
234 "ionvideo.4",
235 "ionvideo.5",
236 "ionvideo.6",
237 "ionvideo.7",
238 "ionvideo.8",
239 "videosync.0",
240 "v4lvideo.0",
241 "v4lvideo.1",
242 "v4lvideo.2",
243 "v4lvideo.3",
244 "v4lvideo.4",
245 "v4lvideo.5",
246 "v4lvideo.6",
247 "v4lvideo.7",
248 "v4lvideo.8",
249 "disable",
250 "reserved",
251};
252
253static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
254
255
256int vdec_get_debug_flags(void)
257{
258 return debugflags;
259}
260EXPORT_SYMBOL(vdec_get_debug_flags);
261
262void VDEC_PRINT_FUN_LINENO(const char *fun, int line)
263{
264 if (debugflags & 0x10000000)
265 pr_info("%s, %d\n", fun, line);
266}
267EXPORT_SYMBOL(VDEC_PRINT_FUN_LINENO);
268
269unsigned char is_mult_inc(unsigned int type)
270{
271 unsigned char ret = 0;
272 if (vdec_get_debug_flags() & 0xf000)
273 ret = (vdec_get_debug_flags() & 0x1000)
274 ? 1 : 0;
275 else if (type & PORT_TYPE_DECODER_SCHED)
276 ret = 1;
277 return ret;
278}
279EXPORT_SYMBOL(is_mult_inc);
280
281static const bool cores_with_input[VDEC_MAX] = {
282 true, /* VDEC_1 */
283 false, /* VDEC_HCODEC */
284 false, /* VDEC_2 */
285 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
286 false, /* VDEC_HEVC_BACK */
287};
288
289static const int cores_int[VDEC_MAX] = {
290 VDEC_IRQ_1,
291 VDEC_IRQ_2,
292 VDEC_IRQ_0,
293 VDEC_IRQ_0,
294 VDEC_IRQ_HEVC_BACK
295};
296
297unsigned long vdec_canvas_lock(struct vdec_core_s *core)
298{
299 unsigned long flags;
300 spin_lock_irqsave(&core->canvas_lock, flags);
301
302 return flags;
303}
304
305void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
306{
307 spin_unlock_irqrestore(&core->canvas_lock, flags);
308}
309
310unsigned long vdec_fps_lock(struct vdec_core_s *core)
311{
312 unsigned long flags;
313 spin_lock_irqsave(&core->fps_lock, flags);
314
315 return flags;
316}
317
318void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
319{
320 spin_unlock_irqrestore(&core->fps_lock, flags);
321}
322
323unsigned long vdec_core_lock(struct vdec_core_s *core)
324{
325 unsigned long flags;
326
327 spin_lock_irqsave(&core->lock, flags);
328
329 return flags;
330}
331
332void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
333{
334 spin_unlock_irqrestore(&core->lock, flags);
335}
336
337unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
338{
339 unsigned long flags;
340
341 spin_lock_irqsave(&core->input_lock, flags);
342
343 return flags;
344}
345
346void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
347{
348 spin_unlock_irqrestore(&core->input_lock, flags);
349}
350
351
352static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
353 struct vdec_core_s *core = vdec_core;
354 bool ret;
355 unsigned long flags;
356
357 flags = vdec_inputbuff_lock(core);
358 ret = !(vdec->core_mask & core->buff_flag);
359 vdec_inputbuff_unlock(core, flags);
360
361 return ret;
362}
363
364static void vdec_up(struct vdec_s *vdec)
365{
366 struct vdec_core_s *core = vdec_core;
367
368 if (debug & 8)
369 pr_info("vdec_up, id:%d\n", vdec->id);
370 up(&core->sem);
371}
372
373
374static u64 vdec_get_us_time_system(void)
375{
376 struct timeval tv;
377
378 do_gettimeofday(&tv);
379
380 return div64_u64(timeval_to_ns(&tv), 1000);
381}
382
383static void vdec_fps_clear(int id)
384{
385 if (id >= MAX_INSTANCE_MUN)
386 return;
387
388 vdec_core->decode_fps[id].frame_count = 0;
389 vdec_core->decode_fps[id].start_timestamp = 0;
390 vdec_core->decode_fps[id].last_timestamp = 0;
391 vdec_core->decode_fps[id].fps = 0;
392}
393
394static void vdec_fps_clearall(void)
395{
396 int i;
397
398 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
399 vdec_core->decode_fps[i].frame_count = 0;
400 vdec_core->decode_fps[i].start_timestamp = 0;
401 vdec_core->decode_fps[i].last_timestamp = 0;
402 vdec_core->decode_fps[i].fps = 0;
403 }
404}
405
406static void vdec_fps_detec(int id)
407{
408 unsigned long flags;
409
410 if (fps_detection == 0)
411 return;
412
413 if (id >= MAX_INSTANCE_MUN)
414 return;
415
416 flags = vdec_fps_lock(vdec_core);
417
418 if (fps_clear == 1) {
419 vdec_fps_clearall();
420 fps_clear = 0;
421 }
422
423 vdec_core->decode_fps[id].frame_count++;
424 if (vdec_core->decode_fps[id].frame_count == 1) {
425 vdec_core->decode_fps[id].start_timestamp =
426 vdec_get_us_time_system();
427 vdec_core->decode_fps[id].last_timestamp =
428 vdec_core->decode_fps[id].start_timestamp;
429 } else {
430 vdec_core->decode_fps[id].last_timestamp =
431 vdec_get_us_time_system();
432 vdec_core->decode_fps[id].fps =
433 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
434 10000000000),
435 (vdec_core->decode_fps[id].last_timestamp -
436 vdec_core->decode_fps[id].start_timestamp));
437 }
438 vdec_fps_unlock(vdec_core, flags);
439}
440
441
442
443static int get_canvas(unsigned int index, unsigned int base)
444{
445 int start;
446 int canvas_index = index * base;
447 int ret;
448
449 if ((base > 4) || (base == 0))
450 return -1;
451
452 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
453 <= AMVDEC_CANVAS_MAX1) {
454 start = AMVDEC_CANVAS_START_INDEX + base * index;
455 } else {
456 canvas_index -= (AMVDEC_CANVAS_MAX1 -
457 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
458 if (canvas_index <= AMVDEC_CANVAS_MAX2)
459 start = canvas_index / base;
460 else
461 return -1;
462 }
463
464 if (base == 1) {
465 ret = start;
466 } else if (base == 2) {
467 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
468 } else if (base == 3) {
469 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
470 } else if (base == 4) {
471 ret = (((start + 3) << 24) | (start + 2) << 16) |
472 ((start + 1) << 8) | start;
473 }
474
475 return ret;
476}
477
478static int get_canvas_ex(int type, int id)
479{
480 int i;
481 unsigned long flags;
482
483 flags = vdec_canvas_lock(vdec_core);
484
485 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
486 /*0x10-0x15 has been used by rdma*/
487 if ((i >= 0x10) && (i <= 0x15))
488 continue;
489 if ((canvas_stat[i].type == type) &&
490 (canvas_stat[i].id & (1 << id)) == 0) {
491 canvas_stat[i].canvas_used_flag++;
492 canvas_stat[i].id |= (1 << id);
493 if (debug & 4)
494 pr_debug("get used canvas %d\n", i);
495 vdec_canvas_unlock(vdec_core, flags);
496 if (i < AMVDEC_CANVAS_MAX2 + 1)
497 return i;
498 else
499 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
500 }
501 }
502
503 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
504 /*0x10-0x15 has been used by rdma*/
505 if ((i >= 0x10) && (i <= 0x15))
506 continue;
507 if (canvas_stat[i].type == 0) {
508 canvas_stat[i].type = type;
509 canvas_stat[i].canvas_used_flag = 1;
510 canvas_stat[i].id = (1 << id);
511 if (debug & 4) {
512 pr_debug("get canvas %d\n", i);
513 pr_debug("canvas_used_flag %d\n",
514 canvas_stat[i].canvas_used_flag);
515 pr_debug("canvas_stat[i].id %d\n",
516 canvas_stat[i].id);
517 }
518 vdec_canvas_unlock(vdec_core, flags);
519 if (i < AMVDEC_CANVAS_MAX2 + 1)
520 return i;
521 else
522 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
523 }
524 }
525 vdec_canvas_unlock(vdec_core, flags);
526
527 pr_info("cannot get canvas\n");
528
529 return -1;
530}
531
532static void free_canvas_ex(int index, int id)
533{
534 unsigned long flags;
535 int offset;
536
537 flags = vdec_canvas_lock(vdec_core);
538 if (index >= 0 &&
539 index < AMVDEC_CANVAS_MAX2 + 1)
540 offset = index;
541 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
542 (index <= AMVDEC_CANVAS_MAX1))
543 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
544 else {
545 vdec_canvas_unlock(vdec_core, flags);
546 return;
547 }
548
549 if ((canvas_stat[offset].canvas_used_flag > 0) &&
550 (canvas_stat[offset].id & (1 << id))) {
551 canvas_stat[offset].canvas_used_flag--;
552 canvas_stat[offset].id &= ~(1 << id);
553 if (canvas_stat[offset].canvas_used_flag == 0) {
554 canvas_stat[offset].type = 0;
555 canvas_stat[offset].id = 0;
556 }
557 if (debug & 4) {
558 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
559 offset,
560 canvas_stat[offset].canvas_used_flag,
561 canvas_stat[offset].type,
562 canvas_stat[offset].id);
563 }
564 }
565 vdec_canvas_unlock(vdec_core, flags);
566
567 return;
568
569}
570
571static void vdec_dmc_pipeline_reset(void)
572{
573 /*
574 * bit15: vdec_piple
575 * bit14: hevc_dmc_piple
576 * bit13: hevcf_dmc_pipl
577 * bit12: wave420_dmc_pipl
578 * bit11: hcodec_dmc_pipl
579 */
580
581 WRITE_RESET_REG(RESET7_REGISTER,
582 (1 << 15) | (1 << 14) | (1 << 13) |
583 (1 << 12) | (1 << 11));
584}
585
586static void vdec_stop_armrisc(int hw)
587{
588 ulong timeout = jiffies + HZ;
589
590 if (hw == VDEC_INPUT_TARGET_VLD) {
591 WRITE_VREG(MPSR, 0);
592 WRITE_VREG(CPSR, 0);
593
594 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
595 if (time_after(jiffies, timeout))
596 break;
597 }
598
599 timeout = jiffies + HZ;
600 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
601 if (time_after(jiffies, timeout))
602 break;
603 }
604 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
605 WRITE_VREG(HEVC_MPSR, 0);
606 WRITE_VREG(HEVC_CPSR, 0);
607
608 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
609 if (time_after(jiffies, timeout))
610 break;
611 }
612
613 timeout = jiffies + HZ/10;
614 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
615 if (time_after(jiffies, timeout))
616 break;
617 }
618 }
619}
620
621static void vdec_disable_DMC(struct vdec_s *vdec)
622{
623 /*close first,then wait pedding end,timing suggestion from vlsi*/
624 struct vdec_input_s *input = &vdec->input;
625 unsigned long flags;
626 unsigned int mask = 0;
627
628 if (input->target == VDEC_INPUT_TARGET_VLD) {
629 mask = (1 << 13);
630 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
631 mask = (1 << 21);
632 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
633 mask = (1 << 4); /*hevc*/
634 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
635 mask |= (1 << 8); /*hevcb */
636 }
637
638 /* need to stop armrisc. */
639 if (!IS_ERR_OR_NULL(vdec->dev))
640 vdec_stop_armrisc(input->target);
641
642 spin_lock_irqsave(&vdec_spin_lock, flags);
643 codec_dmcbus_write(DMC_REQ_CTRL,
644 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
645 spin_unlock_irqrestore(&vdec_spin_lock, flags);
646
647 if (is_cpu_tm2_revb()) {
648 while (!(codec_dmcbus_read(TM2_REVB_DMC_CHAN_STS)
649 & mask))
650 ;
651 } else {
652 while (!(codec_dmcbus_read(DMC_CHAN_STS)
653 & mask))
654 ;
655 }
656
657 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
658}
659
660static void vdec_enable_DMC(struct vdec_s *vdec)
661{
662 struct vdec_input_s *input = &vdec->input;
663 unsigned long flags;
664 unsigned int mask = 0;
665
666 if (input->target == VDEC_INPUT_TARGET_VLD) {
667 mask = (1 << 13);
668 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
669 mask = (1 << 21);
670 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
671 mask = (1 << 4); /*hevc*/
672 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
673 mask |= (1 << 8); /*hevcb */
674 }
675
676 /*must to be reset the dmc pipeline if it's g12b.*/
677 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
678 vdec_dmc_pipeline_reset();
679
680 spin_lock_irqsave(&vdec_spin_lock, flags);
681 codec_dmcbus_write(DMC_REQ_CTRL,
682 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
683 spin_unlock_irqrestore(&vdec_spin_lock, flags);
684 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
685}
686
687
688
689static int vdec_get_hw_type(int value)
690{
691 int type;
692 switch (value) {
693 case VFORMAT_HEVC:
694 case VFORMAT_VP9:
695 case VFORMAT_AVS2:
696 case VFORMAT_AV1:
697 type = CORE_MASK_HEVC;
698 break;
699
700 case VFORMAT_MPEG12:
701 case VFORMAT_MPEG4:
702 case VFORMAT_H264:
703 case VFORMAT_MJPEG:
704 case VFORMAT_REAL:
705 case VFORMAT_JPEG:
706 case VFORMAT_VC1:
707 case VFORMAT_AVS:
708 case VFORMAT_YUV:
709 case VFORMAT_H264MVC:
710 case VFORMAT_H264_4K2K:
711 case VFORMAT_H264_ENC:
712 case VFORMAT_JPEG_ENC:
713 type = CORE_MASK_VDEC_1;
714 break;
715
716 default:
717 type = -1;
718 }
719
720 return type;
721}
722
723
724static void vdec_save_active_hw(struct vdec_s *vdec)
725{
726 int type;
727
728 type = vdec_get_hw_type(vdec->port->vformat);
729
730 if (type == CORE_MASK_HEVC) {
731 vdec_core->active_hevc = vdec;
732 } else if (type == CORE_MASK_VDEC_1) {
733 vdec_core->active_vdec = vdec;
734 } else {
735 pr_info("save_active_fw wrong\n");
736 }
737}
738
739static void vdec_update_buff_status(void)
740{
741 struct vdec_core_s *core = vdec_core;
742 unsigned long flags;
743 struct vdec_s *vdec;
744
745 flags = vdec_inputbuff_lock(core);
746 core->buff_flag = 0;
747 core->stream_buff_flag = 0;
748 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
749 struct vdec_input_s *input = &vdec->input;
750 if (input_frame_based(input)) {
751 if (input->have_frame_num || input->eos)
752 core->buff_flag |= vdec->core_mask;
753 } else if (input_stream_based(input)) {
754 core->stream_buff_flag |= vdec->core_mask;
755 }
756 }
757 vdec_inputbuff_unlock(core, flags);
758}
759
760#if 0
761void vdec_update_streambuff_status(void)
762{
763 struct vdec_core_s *core = vdec_core;
764 struct vdec_s *vdec;
765
766 /* check streaming prepare level threshold if not EOS */
767 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
768 struct vdec_input_s *input = &vdec->input;
769 if (input && input_stream_based(input) && !input->eos &&
770 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
771 u32 rp, wp, level;
772
773 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
774 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
775 if (wp < rp)
776 level = input->size + wp - rp;
777 else
778 level = wp - rp;
779 if ((level < input->prepare_level) &&
780 (pts_get_rec_num(PTS_TYPE_VIDEO,
781 vdec->input.total_rd_count) < 2)) {
782 break;
783 } else if (level > input->prepare_level) {
784 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
785 if (debug & 8)
786 pr_info("vdec_flush_streambuff_status up\n");
787 vdec_up(vdec);
788 }
789 break;
790 }
791 }
792}
793EXPORT_SYMBOL(vdec_update_streambuff_status);
794#endif
795
796int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
797{
798 if (vdec && vdec->dec_status &&
799 ((vdec->status == VDEC_STATUS_CONNECTED ||
800 vdec->status == VDEC_STATUS_ACTIVE)))
801 return vdec->dec_status(vdec, vstatus);
802
803 return 0;
804}
805EXPORT_SYMBOL(vdec_status);
806
807int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
808{
809 int r;
810 if (vdec->set_trickmode) {
811 r = vdec->set_trickmode(vdec, trickmode);
812
813 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
814 r = vdec->slave->set_trickmode(vdec->slave,
815 trickmode);
816 return r;
817 }
818 return -1;
819}
820EXPORT_SYMBOL(vdec_set_trickmode);
821
822int vdec_set_isreset(struct vdec_s *vdec, int isreset)
823{
824 vdec->is_reset = isreset;
825 pr_info("is_reset=%d\n", isreset);
826 if (vdec->set_isreset)
827 return vdec->set_isreset(vdec, isreset);
828 return 0;
829}
830EXPORT_SYMBOL(vdec_set_isreset);
831
832int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
833{
834 vdec->dolby_meta_with_el = isdvmetawithel;
835 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
836 return 0;
837}
838EXPORT_SYMBOL(vdec_set_dv_metawithel);
839
840void vdec_set_no_powerdown(int flag)
841{
842 no_powerdown = flag;
843 pr_info("no_powerdown=%d\n", no_powerdown);
844 return;
845}
846EXPORT_SYMBOL(vdec_set_no_powerdown);
847
848void vdec_count_info(struct vdec_info *vs, unsigned int err,
849 unsigned int offset)
850{
851 if (err)
852 vs->error_frame_count++;
853 if (offset) {
854 if (0 == vs->frame_count) {
855 vs->offset = 0;
856 vs->samp_cnt = 0;
857 }
858 vs->frame_data = offset > vs->total_data ?
859 offset - vs->total_data : vs->total_data - offset;
860 vs->total_data = offset;
861 if (vs->samp_cnt < 96000 * 2) { /* 2s */
862 if (0 == vs->samp_cnt)
863 vs->offset = offset;
864 vs->samp_cnt += vs->frame_dur;
865 } else {
866 vs->bit_rate = (offset - vs->offset) / 2;
867 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
868 vs->samp_cnt = 0;
869 }
870 vs->frame_count++;
871 }
872 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
873 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
874 return;
875}
876EXPORT_SYMBOL(vdec_count_info);
877int vdec_is_support_4k(void)
878{
879 return !is_meson_gxl_package_805X();
880}
881EXPORT_SYMBOL(vdec_is_support_4k);
882
883/*
884 * clk_config:
885 *0:default
886 *1:no gp0_pll;
887 *2:always used gp0_pll;
888 *>=10:fixed n M clk;
889 *== 100 , 100M clks;
890 */
891unsigned int get_vdec_clk_config_settings(void)
892{
893 return clk_config;
894}
895void update_vdec_clk_config_settings(unsigned int config)
896{
897 clk_config = config;
898}
899EXPORT_SYMBOL(update_vdec_clk_config_settings);
900
901static bool hevc_workaround_needed(void)
902{
903 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
904 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
905 == GXBB_REV_A_MINOR);
906}
907
908struct device *get_codec_cma_device(void)
909{
910 return vdec_core->cma_dev;
911}
912
913#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
914static const char * const vdec_device_name[] = {
915 "amvdec_mpeg12", "ammvdec_mpeg12",
916 "amvdec_mpeg4", "ammvdec_mpeg4",
917 "amvdec_h264", "ammvdec_h264",
918 "amvdec_mjpeg", "ammvdec_mjpeg",
919 "amvdec_real", "ammvdec_real",
920 "amjpegdec", "ammjpegdec",
921 "amvdec_vc1", "ammvdec_vc1",
922 "amvdec_avs", "ammvdec_avs",
923 "amvdec_yuv", "ammvdec_yuv",
924 "amvdec_h264mvc", "ammvdec_h264mvc",
925 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
926 "amvdec_h265", "ammvdec_h265",
927 "amvenc_avc", "amvenc_avc",
928 "jpegenc", "jpegenc",
929 "amvdec_vp9", "ammvdec_vp9",
930 "amvdec_avs2", "ammvdec_avs2",
931 "amvdec_av1", "ammvdec_av1",
932};
933
934
935#else
936
937static const char * const vdec_device_name[] = {
938 "amvdec_mpeg12",
939 "amvdec_mpeg4",
940 "amvdec_h264",
941 "amvdec_mjpeg",
942 "amvdec_real",
943 "amjpegdec",
944 "amvdec_vc1",
945 "amvdec_avs",
946 "amvdec_yuv",
947 "amvdec_h264mvc",
948 "amvdec_h264_4k2k",
949 "amvdec_h265",
950 "amvenc_avc",
951 "jpegenc",
952 "amvdec_vp9",
953 "amvdec_avs2",
954 "amvdec_av1"
955};
956
957#endif
958
959/*
960 * Only support time sliced decoding for frame based input,
961 * so legacy decoder can exist with time sliced decoder.
962 */
963static const char *get_dev_name(bool use_legacy_vdec, int format)
964{
965#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
966 if (use_legacy_vdec && (debugflags & 0x8) == 0)
967 return vdec_device_name[format * 2];
968 else
969 return vdec_device_name[format * 2 + 1];
970#else
971 return vdec_device_name[format];
972#endif
973}
974
975#ifdef VDEC_DEBUG_SUPPORT
976static u64 get_current_clk(void)
977{
978 /*struct timespec xtime = current_kernel_time();
979 u64 usec = xtime.tv_sec * 1000000;
980 usec += xtime.tv_nsec / 1000;
981 */
982 u64 usec = sched_clock();
983 return usec;
984}
985
986static void inc_profi_count(unsigned long mask, u32 *count)
987{
988 enum vdec_type_e type;
989
990 for (type = VDEC_1; type < VDEC_MAX; type++) {
991 if (mask & (1 << type))
992 count[type]++;
993 }
994}
995
996static void update_profi_clk_run(struct vdec_s *vdec,
997 unsigned long mask, u64 clk)
998{
999 enum vdec_type_e type;
1000
1001 for (type = VDEC_1; type < VDEC_MAX; type++) {
1002 if (mask & (1 << type)) {
1003 vdec->start_run_clk[type] = clk;
1004 if (vdec->profile_start_clk[type] == 0)
1005 vdec->profile_start_clk[type] = clk;
1006 vdec->total_clk[type] = clk
1007 - vdec->profile_start_clk[type];
1008 /*pr_info("set start_run_clk %ld\n",
1009 vdec->start_run_clk);*/
1010
1011 }
1012 }
1013}
1014
1015static void update_profi_clk_stop(struct vdec_s *vdec,
1016 unsigned long mask, u64 clk)
1017{
1018 enum vdec_type_e type;
1019
1020 for (type = VDEC_1; type < VDEC_MAX; type++) {
1021 if (mask & (1 << type)) {
1022 if (vdec->start_run_clk[type] == 0)
1023 pr_info("error, start_run_clk[%d] not set\n", type);
1024
1025 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
1026 type,
1027 clk,
1028 vdec->start_run_clk[type],
1029 vdec->run_clk[type]);*/
1030 vdec->run_clk[type] +=
1031 (clk - vdec->start_run_clk[type]);
1032 }
1033 }
1034}
1035
1036#endif
1037
1038int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
1039{
1040 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
1041 sizeof(struct dec_sysinfo)))
1042 return -EFAULT;
1043
1044 /* force switch to mult instance if supports this profile. */
1045 if ((vdec->type == VDEC_TYPE_SINGLE) &&
1046 !disable_switch_single_to_mult) {
1047 const char *str = NULL;
1048 char fmt[16] = {0};
1049
1050 str = strchr(get_dev_name(false, vdec->format), '_');
1051 if (!str)
1052 return -1;
1053
1054 sprintf(fmt, "m%s", ++str);
1055 if (is_support_profile(fmt) &&
1056 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263 &&
1057 vdec->format != VFORMAT_AV1)
1058 vdec->type = VDEC_TYPE_STREAM_PARSER;
1059 }
1060
1061 return 0;
1062}
1063EXPORT_SYMBOL(vdec_set_decinfo);
1064
1065/* construct vdec strcture */
1066struct vdec_s *vdec_create(struct stream_port_s *port,
1067 struct vdec_s *master)
1068{
1069 struct vdec_s *vdec;
1070 int type = VDEC_TYPE_SINGLE;
1071 int id;
1072
1073 if (is_mult_inc(port->type))
1074 type = (port->type & PORT_TYPE_FRAME) ?
1075 VDEC_TYPE_FRAME_BLOCK :
1076 VDEC_TYPE_STREAM_PARSER;
1077
1078 id = ida_simple_get(&vdec_core->ida,
1079 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1080 if (id < 0) {
1081 pr_info("vdec_create request id failed!ret =%d\n", id);
1082 return NULL;
1083 }
1084 vdec = vzalloc(sizeof(struct vdec_s));
1085
1086 /* TBD */
1087 if (vdec) {
1088 vdec->magic = 0x43454456;
1089 vdec->id = -1;
1090 vdec->type = type;
1091 vdec->port = port;
1092 vdec->sys_info = &vdec->sys_info_store;
1093
1094 INIT_LIST_HEAD(&vdec->list);
1095
1096 atomic_inc(&vdec_core->vdec_nr);
1097#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
1098 v4lvideo_dec_count_increase();
1099#endif
1100 vdec->id = id;
1101 vdec_input_init(&vdec->input, vdec);
1102 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1103 vdec->input.vdec_up = vdec_up;
1104 if (master) {
1105 vdec->master = master;
1106 master->slave = vdec;
1107 master->sched = 1;
1108 }
1109 if (enable_mvdec_info) {
1110 vdec->mvfrm = (struct vdec_frames_s *)
1111 vzalloc(sizeof(struct vdec_frames_s));
1112 if (!vdec->mvfrm)
1113 pr_err("vzalloc: vdec_frames_s failed\n");
1114 }
1115 }
1116
1117 pr_debug("vdec_create instance %p, total %d\n", vdec,
1118 atomic_read(&vdec_core->vdec_nr));
1119
1120 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1121
1122 return vdec;
1123}
1124EXPORT_SYMBOL(vdec_create);
1125
1126int vdec_set_format(struct vdec_s *vdec, int format)
1127{
1128 vdec->format = format;
1129 vdec->port_flag |= PORT_FLAG_VFORMAT;
1130
1131 if (vdec->slave) {
1132 vdec->slave->format = format;
1133 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1134 }
1135 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1136
1137 return 0;
1138}
1139EXPORT_SYMBOL(vdec_set_format);
1140
1141int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1142{
1143 vdec->pts = pts;
1144 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1145 vdec->pts_valid = true;
1146 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1147 return 0;
1148}
1149EXPORT_SYMBOL(vdec_set_pts);
1150
1151void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1152{
1153 vdec->timestamp = timestamp;
1154 vdec->timestamp_valid = true;
1155}
1156EXPORT_SYMBOL(vdec_set_timestamp);
1157
1158int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1159{
1160 vdec->pts64 = pts64;
1161 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1162 vdec->pts_valid = true;
1163
1164 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1165 return 0;
1166}
1167EXPORT_SYMBOL(vdec_set_pts64);
1168
1169int vdec_get_status(struct vdec_s *vdec)
1170{
1171 return vdec->status;
1172}
1173EXPORT_SYMBOL(vdec_get_status);
1174
1175int vdec_get_frame_num(struct vdec_s *vdec)
1176{
1177 return vdec->input.have_frame_num;
1178}
1179EXPORT_SYMBOL(vdec_get_frame_num);
1180
1181void vdec_set_status(struct vdec_s *vdec, int status)
1182{
1183 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1184 vdec->status = status;
1185}
1186EXPORT_SYMBOL(vdec_set_status);
1187
1188void vdec_set_next_status(struct vdec_s *vdec, int status)
1189{
1190 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1191 vdec->next_status = status;
1192}
1193EXPORT_SYMBOL(vdec_set_next_status);
1194
1195int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1196{
1197 vdec->frame_base_video_path = video_path;
1198 return 0;
1199}
1200EXPORT_SYMBOL(vdec_set_video_path);
1201
1202int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1203{
1204 vdec->vf_receiver_inst = receive_id;
1205 return 0;
1206}
1207EXPORT_SYMBOL(vdec_set_receive_id);
1208
1209/* add frame data to input chain */
1210int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1211{
1212 return vdec_input_add_frame(&vdec->input, buf, count);
1213}
1214EXPORT_SYMBOL(vdec_write_vframe);
1215
1216int vdec_write_vframe_with_dma(struct vdec_s *vdec,
1217 ulong addr, size_t count, u32 handle)
1218{
1219 return vdec_input_add_frame_with_dma(&vdec->input, addr, count, handle);
1220}
1221EXPORT_SYMBOL(vdec_write_vframe_with_dma);
1222
1223/* add a work queue thread for vdec*/
1224void vdec_schedule_work(struct work_struct *work)
1225{
1226 if (vdec_core->vdec_core_wq)
1227 queue_work(vdec_core->vdec_core_wq, work);
1228 else
1229 schedule_work(work);
1230}
1231EXPORT_SYMBOL(vdec_schedule_work);
1232
1233static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1234{
1235 if (vdec->master)
1236 return vdec->master;
1237 else if (vdec->slave)
1238 return vdec->slave;
1239 return NULL;
1240}
1241
1242static void vdec_sync_input_read(struct vdec_s *vdec)
1243{
1244 if (!vdec_stream_based(vdec))
1245 return;
1246
1247 if (vdec_dual(vdec)) {
1248 u32 me, other;
1249 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1250 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1251 other =
1252 vdec_get_associate(vdec)->input.stream_cookie;
1253 if (me > other)
1254 return;
1255 else if (me == other) {
1256 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1257 other =
1258 vdec_get_associate(vdec)->input.swap_rp;
1259 if (me > other) {
1260 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1261 vdec_get_associate(vdec)->
1262 input.swap_rp);
1263 return;
1264 }
1265 }
1266 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1267 READ_VREG(VLD_MEM_VIFIFO_RP));
1268 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1269 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1270 if (((me & 0x80000000) == 0) &&
1271 (vdec->input.streaming_rp & 0x80000000))
1272 me += 1ULL << 32;
1273 other = vdec_get_associate(vdec)->input.streaming_rp;
1274 if (me > other) {
1275 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1276 vdec_get_associate(vdec)->
1277 input.swap_rp);
1278 return;
1279 }
1280
1281 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1282 READ_VREG(HEVC_STREAM_RD_PTR));
1283 }
1284 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1285 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1286 READ_VREG(VLD_MEM_VIFIFO_RP));
1287 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1288 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1289 READ_VREG(HEVC_STREAM_RD_PTR));
1290 }
1291}
1292
1293static void vdec_sync_input_write(struct vdec_s *vdec)
1294{
1295 if (!vdec_stream_based(vdec))
1296 return;
1297
1298 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1299 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1300 READ_PARSER_REG(PARSER_VIDEO_WP));
1301 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1302 WRITE_VREG(HEVC_STREAM_WR_PTR,
1303 READ_PARSER_REG(PARSER_VIDEO_WP));
1304 }
1305}
1306
1307/*
1308 *get next frame from input chain
1309 */
1310/*
1311 *THE VLD_FIFO is 512 bytes and Video buffer level
1312 * empty interrupt is set to 0x80 bytes threshold
1313 */
1314#define VLD_PADDING_SIZE 1024
1315#define HEVC_PADDING_SIZE (1024*16)
1316int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1317{
1318 struct vdec_input_s *input = &vdec->input;
1319 struct vframe_chunk_s *chunk = NULL;
1320 struct vframe_block_list_s *block = NULL;
1321 int dummy;
1322
1323 /* full reset to HW input */
1324 if (input->target == VDEC_INPUT_TARGET_VLD) {
1325 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1326
1327 /* reset VLD fifo for all vdec */
1328 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1329 WRITE_VREG(DOS_SW_RESET0, 0);
1330
1331 dummy = READ_RESET_REG(RESET0_REGISTER);
1332 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1333 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1334#if 0
1335 /*move to driver*/
1336 if (input_frame_based(input))
1337 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1338
1339 /*
1340 * 2: assist
1341 * 3: parser
1342 * 4: parser_state
1343 * 8: dblk
1344 * 11:mcpu
1345 * 12:ccpu
1346 * 13:ddr
1347 * 14:iqit
1348 * 15:ipp
1349 * 17:qdct
1350 * 18:mpred
1351 * 19:sao
1352 * 24:hevc_afifo
1353 */
1354 WRITE_VREG(DOS_SW_RESET3,
1355 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1356 (1<<17)|(1<<18)|(1<<19));
1357 WRITE_VREG(DOS_SW_RESET3, 0);
1358#endif
1359 }
1360
1361 /*
1362 *setup HW decoder input buffer (VLD context)
1363 * based on input->type and input->target
1364 */
1365 if (input_frame_based(input)) {
1366 chunk = vdec_input_next_chunk(&vdec->input);
1367
1368 if (chunk == NULL) {
1369 *p = NULL;
1370 return -1;
1371 }
1372
1373 block = chunk->block;
1374
1375 if (input->target == VDEC_INPUT_TARGET_VLD) {
1376 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1377 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1378 block->size - 8);
1379 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1380 round_down(block->start + chunk->offset,
1381 VDEC_FIFO_ALIGN));
1382
1383 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1384 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1385
1386 /* set to manual mode */
1387 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1388 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1389 round_down(block->start + chunk->offset,
1390 VDEC_FIFO_ALIGN));
1391 dummy = chunk->offset + chunk->size +
1392 VLD_PADDING_SIZE;
1393 if (dummy >= block->size)
1394 dummy -= block->size;
1395 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1396 round_down(block->start + dummy,
1397 VDEC_FIFO_ALIGN));
1398
1399 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1400 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1401
1402 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1403 (0x11 << 16) | (1<<10) | (7<<3));
1404
1405 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1406 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1407 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1408 block->size);
1409 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1410 chunk->offset);
1411 dummy = chunk->offset + chunk->size +
1412 HEVC_PADDING_SIZE;
1413 if (dummy >= block->size)
1414 dummy -= block->size;
1415 WRITE_VREG(HEVC_STREAM_WR_PTR,
1416 round_down(block->start + dummy,
1417 VDEC_FIFO_ALIGN));
1418
1419 /* set endian */
1420 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1421 }
1422
1423 *p = chunk;
1424 return chunk->size;
1425
1426 } else {
1427 /* stream based */
1428 u32 rp = 0, wp = 0, fifo_len = 0;
1429 int size;
1430 bool swap_valid = input->swap_valid;
1431 unsigned long swap_page_phys = input->swap_page_phys;
1432
1433 if (vdec_dual(vdec) &&
1434 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1435 /* keep using previous input context */
1436 struct vdec_s *master = (vdec->slave) ?
1437 vdec : vdec->master;
1438 if (master->input.last_swap_slave) {
1439 swap_valid = master->slave->input.swap_valid;
1440 swap_page_phys =
1441 master->slave->input.swap_page_phys;
1442 } else {
1443 swap_valid = master->input.swap_valid;
1444 swap_page_phys = master->input.swap_page_phys;
1445 }
1446 }
1447
1448 if (swap_valid) {
1449 if (input->target == VDEC_INPUT_TARGET_VLD) {
1450 if (vdec->format == VFORMAT_H264)
1451 SET_VREG_MASK(POWER_CTL_VLD,
1452 (1 << 9));
1453
1454 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1455
1456 /* restore read side */
1457 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1458 swap_page_phys);
1459 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1460
1461 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1462 ;
1463 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1464
1465 /* restore wrap count */
1466 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1467 input->stream_cookie);
1468
1469 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1470 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1471
1472 /* enable */
1473 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1474 (0x11 << 16) | (1<<10));
1475
1476 /* sync with front end */
1477 vdec_sync_input_read(vdec);
1478 vdec_sync_input_write(vdec);
1479
1480 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1481 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1482 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1483
1484 /* restore read side */
1485 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1486 swap_page_phys);
1487 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1488
1489 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1490 & (1<<7))
1491 ;
1492 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1493
1494 /* restore stream offset */
1495 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1496 input->stream_cookie);
1497
1498 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1499 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1500 >> 16) & 0x7f;
1501
1502
1503 /* enable */
1504
1505 /* sync with front end */
1506 vdec_sync_input_read(vdec);
1507 vdec_sync_input_write(vdec);
1508
1509 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1510
1511 /*pr_info("vdec: restore context\r\n");*/
1512 }
1513
1514 } else {
1515 if (input->target == VDEC_INPUT_TARGET_VLD) {
1516 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1517 input->start);
1518 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1519 input->start + input->size - 8);
1520 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1521 input->start);
1522
1523 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1524 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1525
1526 /* set to manual mode */
1527 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1528 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1529 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1530 READ_PARSER_REG(PARSER_VIDEO_WP));
1531
1532 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1533
1534 /* enable */
1535 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1536 (0x11 << 16) | (1<<10));
1537
1538 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1539
1540 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1541 WRITE_VREG(HEVC_STREAM_START_ADDR,
1542 input->start);
1543 WRITE_VREG(HEVC_STREAM_END_ADDR,
1544 input->start + input->size);
1545 WRITE_VREG(HEVC_STREAM_RD_PTR,
1546 input->start);
1547 WRITE_VREG(HEVC_STREAM_WR_PTR,
1548 READ_PARSER_REG(PARSER_VIDEO_WP));
1549
1550 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1551 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1552 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1553 >> 16) & 0x7f;
1554
1555 /* enable */
1556 }
1557 }
1558 *p = NULL;
1559 if (wp >= rp)
1560 size = wp - rp + fifo_len;
1561 else
1562 size = wp + input->size - rp + fifo_len;
1563 if (size < 0) {
1564 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1565 __func__, input->size, wp, rp, fifo_len, size);
1566 size = 0;
1567 }
1568 return size;
1569 }
1570}
1571EXPORT_SYMBOL(vdec_prepare_input);
1572
1573void vdec_enable_input(struct vdec_s *vdec)
1574{
1575 struct vdec_input_s *input = &vdec->input;
1576
1577 if (vdec->status != VDEC_STATUS_ACTIVE)
1578 return;
1579
1580 if (input->target == VDEC_INPUT_TARGET_VLD)
1581 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1582 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1583 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1584 if (vdec_stream_based(vdec))
1585 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1586 else
1587 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1588 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1589 }
1590}
1591EXPORT_SYMBOL(vdec_enable_input);
1592
1593int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1594{
1595 int r = vdec_input_set_buffer(&vdec->input, start, size);
1596
1597 if (r)
1598 return r;
1599
1600 if (vdec->slave)
1601 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1602
1603 return r;
1604}
1605EXPORT_SYMBOL(vdec_set_input_buffer);
1606
1607/*
1608 * vdec_eos returns the possibility that there are
1609 * more input can be used by decoder through vdec_prepare_input
1610 * Note: this function should be called prior to vdec_vframe_dirty
1611 * by decoder driver to determine if EOS happens for stream based
1612 * decoding when there is no sufficient data for a frame
1613 */
1614bool vdec_has_more_input(struct vdec_s *vdec)
1615{
1616 struct vdec_input_s *input = &vdec->input;
1617
1618 if (!input->eos)
1619 return true;
1620
1621 if (input_frame_based(input))
1622 return vdec_input_next_input_chunk(input) != NULL;
1623 else {
1624 if (input->target == VDEC_INPUT_TARGET_VLD)
1625 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1626 READ_PARSER_REG(PARSER_VIDEO_WP);
1627 else {
1628 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1629 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1630 }
1631 }
1632}
1633EXPORT_SYMBOL(vdec_has_more_input);
1634
1635void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1636{
1637 vdec->input.prepare_level = level;
1638}
1639EXPORT_SYMBOL(vdec_set_prepare_level);
1640
1641void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1642{
1643 vdec->flag = flag;
1644}
1645EXPORT_SYMBOL(vdec_set_flag);
1646
1647void vdec_set_eos(struct vdec_s *vdec, bool eos)
1648{
1649 struct vdec_core_s *core = vdec_core;
1650
1651 vdec->input.eos = eos;
1652
1653 if (vdec->slave)
1654 vdec->slave->input.eos = eos;
1655 up(&core->sem);
1656}
1657EXPORT_SYMBOL(vdec_set_eos);
1658
1659#ifdef VDEC_DEBUG_SUPPORT
1660void vdec_set_step_mode(void)
1661{
1662 step_mode = 0x1ff;
1663}
1664EXPORT_SYMBOL(vdec_set_step_mode);
1665#endif
1666
1667void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1668{
1669 if (vdec && next_vdec) {
1670 vdec->sched = 0;
1671 next_vdec->sched = 1;
1672 }
1673}
1674EXPORT_SYMBOL(vdec_set_next_sched);
1675
1676/*
1677 * Swap Context: S0 S1 S2 S3 S4
1678 * Sample sequence: M S M M S
1679 * Master Context: S0 S0 S2 S3 S3
1680 * Slave context: NA S1 S1 S2 S4
1681 * ^
1682 * ^
1683 * ^
1684 * the tricky part
1685 * If there are back to back decoding of master or slave
1686 * then the context of the counter part should be updated
1687 * with current decoder. In this example, S1 should be
1688 * updated to S2.
1689 * This is done by swap the swap_page and related info
1690 * between two layers.
1691 */
1692static void vdec_borrow_input_context(struct vdec_s *vdec)
1693{
1694 struct page *swap_page;
1695 unsigned long swap_page_phys;
1696 struct vdec_input_s *me;
1697 struct vdec_input_s *other;
1698
1699 if (!vdec_dual(vdec))
1700 return;
1701
1702 me = &vdec->input;
1703 other = &vdec_get_associate(vdec)->input;
1704
1705 /* swap the swap_context, borrow counter part's
1706 * swap context storage and update all related info.
1707 * After vdec_vframe_dirty, vdec_save_input_context
1708 * will be called to update current vdec's
1709 * swap context
1710 */
1711 swap_page = other->swap_page;
1712 other->swap_page = me->swap_page;
1713 me->swap_page = swap_page;
1714
1715 swap_page_phys = other->swap_page_phys;
1716 other->swap_page_phys = me->swap_page_phys;
1717 me->swap_page_phys = swap_page_phys;
1718
1719 other->swap_rp = me->swap_rp;
1720 other->streaming_rp = me->streaming_rp;
1721 other->stream_cookie = me->stream_cookie;
1722 other->swap_valid = me->swap_valid;
1723}
1724
1725void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1726{
1727 if (chunk)
1728 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1729
1730 if (vdec_stream_based(vdec)) {
1731 vdec->input.swap_needed = true;
1732
1733 if (vdec_dual(vdec)) {
1734 vdec_get_associate(vdec)->input.dirty_count = 0;
1735 vdec->input.dirty_count++;
1736 if (vdec->input.dirty_count > 1) {
1737 vdec->input.dirty_count = 1;
1738 vdec_borrow_input_context(vdec);
1739 }
1740 }
1741
1742 /* for stream based mode, we update read and write pointer
1743 * also in case decoder wants to keep working on decoding
1744 * for more frames while input front end has more data
1745 */
1746 vdec_sync_input_read(vdec);
1747 vdec_sync_input_write(vdec);
1748
1749 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1750 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1751 }
1752}
1753EXPORT_SYMBOL(vdec_vframe_dirty);
1754
1755bool vdec_need_more_data(struct vdec_s *vdec)
1756{
1757 if (vdec_stream_based(vdec))
1758 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1759
1760 return false;
1761}
1762EXPORT_SYMBOL(vdec_need_more_data);
1763
1764
1765void hevc_wait_ddr(void)
1766{
1767 unsigned long flags;
1768 unsigned int mask = 0;
1769
1770 mask = 1 << 4; /* hevc */
1771 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1772 mask |= (1 << 8); /* hevcb */
1773
1774 spin_lock_irqsave(&vdec_spin_lock, flags);
1775 codec_dmcbus_write(DMC_REQ_CTRL,
1776 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1777 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1778
1779 if (is_cpu_tm2_revb()) {
1780 while (!(codec_dmcbus_read(TM2_REVB_DMC_CHAN_STS)
1781 & mask))
1782 ;
1783 } else {
1784 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1785 & mask))
1786 ;
1787 }
1788}
1789
1790void vdec_save_input_context(struct vdec_s *vdec)
1791{
1792 struct vdec_input_s *input = &vdec->input;
1793
1794#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1795 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1796#endif
1797
1798 if (input->target == VDEC_INPUT_TARGET_VLD)
1799 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1800
1801 if (input_stream_based(input) && (input->swap_needed)) {
1802 if (input->target == VDEC_INPUT_TARGET_VLD) {
1803 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1804 input->swap_page_phys);
1805 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1806 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1807 ;
1808 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1809 vdec->input.stream_cookie =
1810 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1811 vdec->input.swap_rp =
1812 READ_VREG(VLD_MEM_VIFIFO_RP);
1813 vdec->input.total_rd_count =
1814 (u64)vdec->input.stream_cookie *
1815 vdec->input.size + vdec->input.swap_rp -
1816 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1817 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1818 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1819 input->swap_page_phys);
1820 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1821
1822 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1823 ;
1824 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1825
1826 vdec->input.stream_cookie =
1827 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1828 vdec->input.swap_rp =
1829 READ_VREG(HEVC_STREAM_RD_PTR);
1830 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1831 (vdec->input.streaming_rp & 0x80000000))
1832 vdec->input.streaming_rp += 1ULL << 32;
1833 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1834 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1835 vdec->input.total_rd_count = vdec->input.streaming_rp;
1836 hevc_wait_ddr();
1837 }
1838
1839 input->swap_valid = true;
1840 input->swap_needed = false;
1841 /*pr_info("vdec: save context\r\n");*/
1842
1843 vdec_sync_input_read(vdec);
1844
1845 if (vdec_dual(vdec)) {
1846 struct vdec_s *master = (vdec->slave) ?
1847 vdec : vdec->master;
1848 master->input.last_swap_slave = (master->slave == vdec);
1849 /* pr_info("master->input.last_swap_slave = %d\n",
1850 master->input.last_swap_slave); */
1851 }
1852 }
1853}
1854EXPORT_SYMBOL(vdec_save_input_context);
1855
1856void vdec_clean_input(struct vdec_s *vdec)
1857{
1858 struct vdec_input_s *input = &vdec->input;
1859
1860 while (!list_empty(&input->vframe_chunk_list)) {
1861 struct vframe_chunk_s *chunk =
1862 vdec_input_next_chunk(input);
1863 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1864 vdec_input_release_chunk(input, chunk);
1865 else
1866 break;
1867 }
1868 vdec_save_input_context(vdec);
1869}
1870EXPORT_SYMBOL(vdec_clean_input);
1871
1872
1873static int vdec_input_read_restore(struct vdec_s *vdec)
1874{
1875 struct vdec_input_s *input = &vdec->input;
1876
1877 if (!vdec_stream_based(vdec))
1878 return 0;
1879
1880 if (!input->swap_valid) {
1881 if (input->target == VDEC_INPUT_TARGET_VLD) {
1882 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1883 input->start);
1884 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1885 input->start + input->size - 8);
1886 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1887 input->start);
1888 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1889 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1890
1891 /* set to manual mode */
1892 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1893 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1894 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1895 WRITE_VREG(HEVC_STREAM_START_ADDR,
1896 input->start);
1897 WRITE_VREG(HEVC_STREAM_END_ADDR,
1898 input->start + input->size);
1899 WRITE_VREG(HEVC_STREAM_RD_PTR,
1900 input->start);
1901 }
1902 return 0;
1903 }
1904 if (input->target == VDEC_INPUT_TARGET_VLD) {
1905 /* restore read side */
1906 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1907 input->swap_page_phys);
1908
1909 /*swap active*/
1910 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1911
1912 /*wait swap busy*/
1913 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1914 ;
1915
1916 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1917 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1918 /* restore read side */
1919 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1920 input->swap_page_phys);
1921 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1922
1923 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1924 & (1<<7))
1925 ;
1926 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1927 }
1928
1929 return 0;
1930}
1931
1932
1933int vdec_sync_input(struct vdec_s *vdec)
1934{
1935 struct vdec_input_s *input = &vdec->input;
1936 u32 rp = 0, wp = 0, fifo_len = 0;
1937 int size;
1938
1939 vdec_input_read_restore(vdec);
1940 vdec_sync_input_read(vdec);
1941 vdec_sync_input_write(vdec);
1942 if (input->target == VDEC_INPUT_TARGET_VLD) {
1943 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1944 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1945
1946 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1947 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1948 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1949 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1950 >> 16) & 0x7f;
1951 }
1952 if (wp >= rp)
1953 size = wp - rp + fifo_len;
1954 else
1955 size = wp + input->size - rp + fifo_len;
1956 if (size < 0) {
1957 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1958 __func__, input->size, wp, rp, fifo_len, size);
1959 size = 0;
1960 }
1961 return size;
1962
1963}
1964EXPORT_SYMBOL(vdec_sync_input);
1965
1966const char *vdec_status_str(struct vdec_s *vdec)
1967{
1968 if (vdec->status < 0)
1969 return "INVALID";
1970 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1971 vdec_status_string[vdec->status] : "INVALID";
1972}
1973
1974const char *vdec_type_str(struct vdec_s *vdec)
1975{
1976 switch (vdec->type) {
1977 case VDEC_TYPE_SINGLE:
1978 return "VDEC_TYPE_SINGLE";
1979 case VDEC_TYPE_STREAM_PARSER:
1980 return "VDEC_TYPE_STREAM_PARSER";
1981 case VDEC_TYPE_FRAME_BLOCK:
1982 return "VDEC_TYPE_FRAME_BLOCK";
1983 case VDEC_TYPE_FRAME_CIRCULAR:
1984 return "VDEC_TYPE_FRAME_CIRCULAR";
1985 default:
1986 return "VDEC_TYPE_INVALID";
1987 }
1988}
1989
1990const char *vdec_device_name_str(struct vdec_s *vdec)
1991{
1992 return vdec_device_name[vdec->format * 2 + 1];
1993}
1994EXPORT_SYMBOL(vdec_device_name_str);
1995
1996void walk_vdec_core_list(char *s)
1997{
1998 struct vdec_s *vdec;
1999 struct vdec_core_s *core = vdec_core;
2000 unsigned long flags;
2001
2002 pr_info("%s --->\n", s);
2003
2004 flags = vdec_core_lock(vdec_core);
2005
2006 if (list_empty(&core->connected_vdec_list)) {
2007 pr_info("connected vdec list empty\n");
2008 } else {
2009 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2010 pr_info("\tvdec (%p), status = %s\n", vdec,
2011 vdec_status_str(vdec));
2012 }
2013 }
2014
2015 vdec_core_unlock(vdec_core, flags);
2016}
2017EXPORT_SYMBOL(walk_vdec_core_list);
2018
2019/* insert vdec to vdec_core for scheduling,
2020 * for dual running decoders, connect/disconnect always runs in pairs
2021 */
2022int vdec_connect(struct vdec_s *vdec)
2023{
2024 unsigned long flags;
2025
2026 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
2027
2028 if (vdec->status != VDEC_STATUS_DISCONNECTED)
2029 return 0;
2030
2031 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2032 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2033
2034 init_completion(&vdec->inactive_done);
2035
2036 if (vdec->slave) {
2037 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
2038 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
2039
2040 init_completion(&vdec->slave->inactive_done);
2041 }
2042
2043 flags = vdec_core_lock(vdec_core);
2044
2045 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
2046
2047 if (vdec->slave) {
2048 list_add_tail(&vdec->slave->list,
2049 &vdec_core->connected_vdec_list);
2050 }
2051
2052 vdec_core_unlock(vdec_core, flags);
2053
2054 up(&vdec_core->sem);
2055
2056 return 0;
2057}
2058EXPORT_SYMBOL(vdec_connect);
2059
2060/* remove vdec from vdec_core scheduling */
2061int vdec_disconnect(struct vdec_s *vdec)
2062{
2063#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2064 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
2065#endif
2066 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
2067
2068 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2069 (vdec->status != VDEC_STATUS_ACTIVE)) {
2070 return 0;
2071 }
2072 mutex_lock(&vdec_mutex);
2073 /*
2074 *when a vdec is under the management of scheduler
2075 * the status change will only be from vdec_core_thread
2076 */
2077 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
2078
2079 if (vdec->slave)
2080 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
2081 else if (vdec->master)
2082 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
2083 mutex_unlock(&vdec_mutex);
2084 up(&vdec_core->sem);
2085
2086 if(!wait_for_completion_timeout(&vdec->inactive_done,
2087 msecs_to_jiffies(2000)))
2088 goto discon_timeout;
2089
2090 if (vdec->slave) {
2091 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2092 msecs_to_jiffies(2000)))
2093 goto discon_timeout;
2094 } else if (vdec->master) {
2095 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2096 msecs_to_jiffies(2000)))
2097 goto discon_timeout;
2098 }
2099
2100 return 0;
2101discon_timeout:
2102 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2103 return 0;
2104}
2105EXPORT_SYMBOL(vdec_disconnect);
2106
2107/* release vdec structure */
2108int vdec_destroy(struct vdec_s *vdec)
2109{
2110 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2111
2112 vdec_input_release(&vdec->input);
2113
2114#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2115 vdec_profile_flush(vdec);
2116#endif
2117 ida_simple_remove(&vdec_core->ida, vdec->id);
2118 if (vdec->mvfrm)
2119 vfree(vdec->mvfrm);
2120 vfree(vdec);
2121
2122#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2123 v4lvideo_dec_count_decrease();
2124#endif
2125 atomic_dec(&vdec_core->vdec_nr);
2126
2127 return 0;
2128}
2129EXPORT_SYMBOL(vdec_destroy);
2130
2131/*
2132 *register vdec_device
2133 * create output, vfm or create ionvideo output
2134 */
2135s32 vdec_init(struct vdec_s *vdec, int is_4k)
2136{
2137 int r = 0;
2138 struct vdec_s *p = vdec;
2139 const char *dev_name;
2140 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2141
2142 //pr_err("%s [pid=%d,tgid=%d]\n", __func__, current->pid, current->tgid);
2143 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2144
2145 if (dev_name == NULL)
2146 return -ENODEV;
2147
2148 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2149 dev_name, vdec_type_str(vdec));
2150
2151 /*
2152 *todo: VFM patch control should be configurable,
2153 * for now all stream based input uses default VFM path.
2154 */
2155 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2156 if (vdec_core->vfm_vdec == NULL) {
2157 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2158 vdec_core->vfm_vdec = vdec;
2159 } else {
2160 pr_info("vdec_init vfm path busy.\n");
2161 return -EBUSY;
2162 }
2163 }
2164
2165 mutex_lock(&vdec_mutex);
2166 inited_vcodec_num++;
2167 mutex_unlock(&vdec_mutex);
2168
2169 vdec_input_set_type(&vdec->input, vdec->type,
2170 (vdec->format == VFORMAT_HEVC ||
2171 vdec->format == VFORMAT_AVS2 ||
2172 vdec->format == VFORMAT_VP9 ||
2173 vdec->format == VFORMAT_AV1
2174 ) ?
2175 VDEC_INPUT_TARGET_HEVC :
2176 VDEC_INPUT_TARGET_VLD);
2177 if (vdec_single(vdec) || (vdec_get_debug_flags() & 0x2))
2178 vdec_enable_DMC(vdec);
2179 p->cma_dev = vdec_core->cma_dev;
2180 p->get_canvas = get_canvas;
2181 p->get_canvas_ex = get_canvas_ex;
2182 p->free_canvas_ex = free_canvas_ex;
2183 p->vdec_fps_detec = vdec_fps_detec;
2184 atomic_set(&p->inrelease, 0);
2185 atomic_set(&p->inirq_flag, 0);
2186 atomic_set(&p->inirq_thread_flag, 0);
2187 /* todo */
2188 if (!vdec_dual(vdec))
2189 p->use_vfm_path = vdec_stream_based(vdec);
2190 if (debugflags & 0x4)
2191 p->use_vfm_path = 1;
2192 /* vdec_dev_reg.flag = 0; */
2193 if (vdec->id >= 0)
2194 id = vdec->id;
2195 p->parallel_dec = parallel_decode;
2196 vdec_core->parallel_dec = parallel_decode;
2197 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2198#ifdef FRAME_CHECK
2199 vdec_frame_check_init(vdec);
2200#endif
2201 p->dev = platform_device_register_data(
2202 &vdec_core->vdec_core_platform_device->dev,
2203 dev_name,
2204 id,
2205 &p, sizeof(struct vdec_s *));
2206
2207 if (IS_ERR(p->dev)) {
2208 r = PTR_ERR(p->dev);
2209 pr_err("vdec: Decoder device %s register failed (%d)\n",
2210 dev_name, r);
2211
2212 mutex_lock(&vdec_mutex);
2213 inited_vcodec_num--;
2214 mutex_unlock(&vdec_mutex);
2215
2216 goto error;
2217 } else if (!p->dev->dev.driver) {
2218 pr_info("vdec: Decoder device %s driver probe failed.\n",
2219 dev_name);
2220 r = -ENODEV;
2221
2222 goto error;
2223 }
2224
2225 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2226 r = -ENODEV;
2227 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2228
2229 mutex_lock(&vdec_mutex);
2230 inited_vcodec_num--;
2231 mutex_unlock(&vdec_mutex);
2232
2233 goto error;
2234 }
2235
2236 if (p->use_vfm_path) {
2237 vdec->vf_receiver_inst = -1;
2238 vdec->vfm_map_id[0] = 0;
2239 } else if (!vdec_dual(vdec)) {
2240 /* create IONVIDEO instance and connect decoder's
2241 * vf_provider interface to it
2242 */
2243 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2244 r = -ENODEV;
2245 pr_err("vdec: Incorrect decoder type\n");
2246
2247 mutex_lock(&vdec_mutex);
2248 inited_vcodec_num--;
2249 mutex_unlock(&vdec_mutex);
2250
2251 goto error;
2252 }
2253
2254 if (strncmp("disable", vfm_path, strlen("disable"))) {
2255 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2256 "%s %s", vdec->vf_provider_name, vfm_path);
2257 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2258 "vdec-map-%d", vdec->id);
2259 } else if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2260#if 1
2261 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2262 &vdec->vf_receiver_inst);
2263#else
2264 /*
2265 * temporarily just use decoder instance ID as iondriver ID
2266 * to solve OMX iondriver instance number check time sequence
2267 * only the limitation is we can NOT mix different video
2268 * decoders since same ID will be used for different decoder
2269 * formats.
2270 */
2271 vdec->vf_receiver_inst = p->dev->id;
2272 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2273 &vdec->vf_receiver_inst);
2274#endif
2275 if (r < 0) {
2276 pr_err("IonVideo frame receiver allocation failed.\n");
2277
2278 mutex_lock(&vdec_mutex);
2279 inited_vcodec_num--;
2280 mutex_unlock(&vdec_mutex);
2281
2282 goto error;
2283 }
2284
2285 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2286 "%s %s", vdec->vf_provider_name,
2287 vdec->vf_receiver_name);
2288 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2289 "vdec-map-%d", vdec->id);
2290 } else if (p->frame_base_video_path ==
2291 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2292 if (vdec_secure(vdec)) {
2293 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2294 "%s %s", vdec->vf_provider_name,
2295 "amlvideo amvideo");
2296 } else {
2297 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2298 "%s %s", vdec->vf_provider_name,
2299 "amlvideo ppmgr deinterlace amvideo");
2300 }
2301 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2302 "vdec-map-%d", vdec->id);
2303 } else if (p->frame_base_video_path ==
2304 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2305 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2306 "%s %s", vdec->vf_provider_name,
2307 "aml_video.1 videosync.0 videopip");
2308 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2309 "vdec-map-%d", vdec->id);
2310 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2311 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2312 "%s %s", vdec->vf_provider_name,
2313 vdec->vf_receiver_name);
2314 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2315 "vdec-map-%d", vdec->id);
2316 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2317 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2318 "%s %s", vdec->vf_provider_name,
2319 "amvideo");
2320 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2321 "vdec-map-%d", vdec->id);
2322 } else if (p->frame_base_video_path == FRAME_BASE_PATH_PIP_TUNNEL_MODE) {
2323 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2324 "%s %s", vdec->vf_provider_name,
2325 "videosync.0 videopip");
2326 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2327 "vdec-map-%d", vdec->id);
2328 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2329 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2330 "%s %s %s", vdec->vf_provider_name,
2331 vdec->vf_receiver_name, "amvideo");
2332 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2333 "vdec-map-%d", vdec->id);
2334 } else if (p->frame_base_video_path ==
2335 FRAME_BASE_PATH_DI_V4LVIDEO) {
2336#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2337 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2338 &vdec->vf_receiver_inst);
2339#else
2340 r = -1;
2341#endif
2342 if (r < 0) {
2343 pr_err("V4lVideo frame receiver allocation failed.\n");
2344 mutex_lock(&vdec_mutex);
2345 inited_vcodec_num--;
2346 mutex_unlock(&vdec_mutex);
2347 goto error;
2348 }
2349 if (!v4lvideo_add_di)
2350 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2351 "%s %s", vdec->vf_provider_name,
2352 vdec->vf_receiver_name);
2353 else {
2354 if (vdec->vf_receiver_inst == 0)
2355 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2356 "%s %s %s", vdec->vf_provider_name,
2357 "dimulti.1",
2358 vdec->vf_receiver_name);
2359 else if ((vdec->vf_receiver_inst <
2360 max_di_instance) &&
2361 (vdec->vf_receiver_inst == 1))
2362 snprintf(vdec->vfm_map_chain,
2363 VDEC_MAP_NAME_SIZE,
2364 "%s %s %s",
2365 vdec->vf_provider_name,
2366 "deinterlace",
2367 vdec->vf_receiver_name);
2368 else if (vdec->vf_receiver_inst <
2369 max_di_instance)
2370 snprintf(vdec->vfm_map_chain,
2371 VDEC_MAP_NAME_SIZE,
2372 "%s %s%d %s",
2373 vdec->vf_provider_name,
2374 "dimulti.",
2375 vdec->vf_receiver_inst,
2376 vdec->vf_receiver_name);
2377 else
2378 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2379 "%s %s", vdec->vf_provider_name,
2380 vdec->vf_receiver_name);
2381 }
2382 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2383 "vdec-map-%d", vdec->id);
2384 }
2385
2386 if (vfm_map_add(vdec->vfm_map_id,
2387 vdec->vfm_map_chain) < 0) {
2388 r = -ENOMEM;
2389 pr_err("Decoder pipeline map creation failed %s.\n",
2390 vdec->vfm_map_id);
2391 vdec->vfm_map_id[0] = 0;
2392
2393 mutex_lock(&vdec_mutex);
2394 inited_vcodec_num--;
2395 mutex_unlock(&vdec_mutex);
2396
2397 goto error;
2398 }
2399
2400 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2401
2402 /*
2403 *assume IONVIDEO driver already have a few vframe_receiver
2404 * registered.
2405 * 1. Call iondriver function to allocate a IONVIDEO path and
2406 * provide receiver's name and receiver op.
2407 * 2. Get decoder driver's provider name from driver instance
2408 * 3. vfm_map_add(name, "<decoder provider name>
2409 * <iondriver receiver name>"), e.g.
2410 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2411 * 4. vf_reg_provider and vf_reg_receiver
2412 * Note: the decoder provider's op uses vdec as op_arg
2413 * the iondriver receiver's op uses iondev device as
2414 * op_arg
2415 */
2416
2417 }
2418
2419 if (!vdec_single(vdec)) {
2420 vf_reg_provider(&p->vframe_provider);
2421
2422 vf_notify_receiver(p->vf_provider_name,
2423 VFRAME_EVENT_PROVIDER_START,
2424 vdec);
2425
2426 if (vdec_core->hint_fr_vdec == NULL)
2427 vdec_core->hint_fr_vdec = vdec;
2428
2429 if (vdec_core->hint_fr_vdec == vdec) {
2430 if (p->sys_info->rate != 0) {
2431 if (!vdec->is_reset) {
2432 vf_notify_receiver(p->vf_provider_name,
2433 VFRAME_EVENT_PROVIDER_FR_HINT,
2434 (void *)
2435 ((unsigned long)
2436 p->sys_info->rate));
2437 vdec->fr_hint_state = VDEC_HINTED;
2438 }
2439 } else {
2440 vdec->fr_hint_state = VDEC_NEED_HINT;
2441 }
2442 }
2443 }
2444
2445 p->dolby_meta_with_el = 0;
2446 pr_debug("vdec_init, vf_provider_name = %s, b %d\n",
2447 p->vf_provider_name, is_cpu_tm2_revb());
2448 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2449 &vdec->input,
2450 vdec->sys_info->width,
2451 vdec->sys_info->height);
2452 /* vdec is now ready to be active */
2453 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2454 return 0;
2455
2456error:
2457 return r;
2458}
2459EXPORT_SYMBOL(vdec_init);
2460
2461/*
2462 *Remove the vdec after timeout happens both in vdec_disconnect
2463 *and platform_device_unregister. Then after, we can release the vdec.
2464 */
2465static void vdec_connect_list_force_clear(struct vdec_core_s *core, struct vdec_s *v_ref)
2466{
2467 struct vdec_s *vdec, *tmp;
2468 unsigned long flags;
2469
2470 flags = vdec_core_lock(core);
2471
2472 list_for_each_entry_safe(vdec, tmp,
2473 &core->connected_vdec_list, list) {
2474 if ((vdec->status == VDEC_STATUS_DISCONNECTED) &&
2475 (vdec == v_ref)) {
2476 pr_err("%s, vdec = %p, active vdec = %p\n",
2477 __func__, vdec, core->active_vdec);
2478 if (core->active_vdec == v_ref)
2479 core->active_vdec = NULL;
2480 if (core->last_vdec == v_ref)
2481 core->last_vdec = NULL;
2482 list_del(&vdec->list);
2483 }
2484 }
2485
2486 vdec_core_unlock(core, flags);
2487}
2488
2489/* vdec_create/init/release/destroy are applied to both dual running decoders
2490 */
2491void vdec_release(struct vdec_s *vdec)
2492{
2493 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2494#ifdef VDEC_DEBUG_SUPPORT
2495 if (step_mode) {
2496 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2497 while (step_mode)
2498 udelay(10);
2499 pr_info("VDEC_DEBUG: step_mode is clear\n");
2500 }
2501#endif
2502 vdec_disconnect(vdec);
2503
2504 if (vdec->vframe_provider.name) {
2505 if (!vdec_single(vdec)) {
2506 if (vdec_core->hint_fr_vdec == vdec
2507 && vdec->fr_hint_state == VDEC_HINTED)
2508 vf_notify_receiver(
2509 vdec->vf_provider_name,
2510 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2511 NULL);
2512 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2513 }
2514 vf_unreg_provider(&vdec->vframe_provider);
2515 }
2516
2517 if (vdec_core->vfm_vdec == vdec)
2518 vdec_core->vfm_vdec = NULL;
2519
2520 if (vdec_core->hint_fr_vdec == vdec)
2521 vdec_core->hint_fr_vdec = NULL;
2522
2523 if (vdec->vf_receiver_inst >= 0) {
2524 if (vdec->vfm_map_id[0]) {
2525 vfm_map_remove(vdec->vfm_map_id);
2526 vdec->vfm_map_id[0] = 0;
2527 }
2528 }
2529
2530 atomic_set(&vdec->inrelease, 1);
2531 while ((atomic_read(&vdec->inirq_flag) > 0)
2532 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2533 schedule();
2534
2535#ifdef FRAME_CHECK
2536 vdec_frame_check_exit(vdec);
2537#endif
2538 vdec_fps_clear(vdec->id);
2539 if (atomic_read(&vdec_core->vdec_nr) == 1)
2540 vdec_disable_DMC(vdec);
2541 platform_device_unregister(vdec->dev);
2542 /*Check if the vdec still in connected list, if yes, delete it*/
2543 vdec_connect_list_force_clear(vdec_core, vdec);
2544 pr_debug("vdec_release instance %p, total %d\n", vdec,
2545 atomic_read(&vdec_core->vdec_nr));
2546 vdec_destroy(vdec);
2547
2548 mutex_lock(&vdec_mutex);
2549 inited_vcodec_num--;
2550 mutex_unlock(&vdec_mutex);
2551
2552}
2553EXPORT_SYMBOL(vdec_release);
2554
2555/* For dual running decoders, vdec_reset is only called with master vdec.
2556 */
2557int vdec_reset(struct vdec_s *vdec)
2558{
2559 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2560
2561 vdec_disconnect(vdec);
2562
2563 if (vdec->vframe_provider.name)
2564 vf_unreg_provider(&vdec->vframe_provider);
2565
2566 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2567 vf_unreg_provider(&vdec->slave->vframe_provider);
2568
2569 if (vdec->reset) {
2570 vdec->reset(vdec);
2571 if (vdec->slave)
2572 vdec->slave->reset(vdec->slave);
2573 }
2574 vdec->mc_loaded = 0;/*clear for reload firmware*/
2575 vdec_input_release(&vdec->input);
2576
2577 vdec_input_init(&vdec->input, vdec);
2578
2579 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2580 vdec->sys_info->height);
2581
2582 vf_reg_provider(&vdec->vframe_provider);
2583 vf_notify_receiver(vdec->vf_provider_name,
2584 VFRAME_EVENT_PROVIDER_START, vdec);
2585
2586 if (vdec->slave) {
2587 vf_reg_provider(&vdec->slave->vframe_provider);
2588 vf_notify_receiver(vdec->slave->vf_provider_name,
2589 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2590 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2591 }
2592
2593 vdec_connect(vdec);
2594
2595 return 0;
2596}
2597EXPORT_SYMBOL(vdec_reset);
2598
2599int vdec_v4l2_reset(struct vdec_s *vdec, int flag)
2600{
2601 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2602 pr_debug("vdec_v4l2_reset %d\n", flag);
2603 vdec_disconnect(vdec);
2604 if (flag != 2) {
2605 if (vdec->vframe_provider.name)
2606 vf_unreg_provider(&vdec->vframe_provider);
2607
2608 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2609 vf_unreg_provider(&vdec->slave->vframe_provider);
2610
2611 if (vdec->reset) {
2612 vdec->reset(vdec);
2613 if (vdec->slave)
2614 vdec->slave->reset(vdec->slave);
2615 }
2616 vdec->mc_loaded = 0;/*clear for reload firmware*/
2617
2618 vdec_input_release(&vdec->input);
2619
2620 vdec_input_init(&vdec->input, vdec);
2621
2622 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2623 vdec->sys_info->height);
2624
2625 vf_reg_provider(&vdec->vframe_provider);
2626 vf_notify_receiver(vdec->vf_provider_name,
2627 VFRAME_EVENT_PROVIDER_START, vdec);
2628
2629 if (vdec->slave) {
2630 vf_reg_provider(&vdec->slave->vframe_provider);
2631 vf_notify_receiver(vdec->slave->vf_provider_name,
2632 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2633 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2634 }
2635 } else {
2636 if (vdec->reset) {
2637 vdec->reset(vdec);
2638 if (vdec->slave)
2639 vdec->slave->reset(vdec->slave);
2640 }
2641 }
2642
2643 vdec_connect(vdec);
2644
2645 vdec_frame_check_init(vdec);
2646
2647 return 0;
2648}
2649EXPORT_SYMBOL(vdec_v4l2_reset);
2650
2651
2652void vdec_free_cmabuf(void)
2653{
2654 mutex_lock(&vdec_mutex);
2655
2656 /*if (inited_vcodec_num > 0) {
2657 mutex_unlock(&vdec_mutex);
2658 return;
2659 }*/
2660 mutex_unlock(&vdec_mutex);
2661}
2662
2663void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2664{
2665 vdec->core_mask |= mask;
2666
2667 if (vdec->slave)
2668 vdec->slave->core_mask |= mask;
2669 if (vdec_core->parallel_dec == 1) {
2670 if (mask & CORE_MASK_COMBINE)
2671 vdec_core->vdec_combine_flag++;
2672 }
2673
2674}
2675EXPORT_SYMBOL(vdec_core_request);
2676
2677int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2678{
2679 vdec->core_mask &= ~mask;
2680
2681 if (vdec->slave)
2682 vdec->slave->core_mask &= ~mask;
2683 if (vdec_core->parallel_dec == 1) {
2684 if (mask & CORE_MASK_COMBINE)
2685 vdec_core->vdec_combine_flag--;
2686 }
2687 return 0;
2688}
2689EXPORT_SYMBOL(vdec_core_release);
2690
2691bool vdec_core_with_input(unsigned long mask)
2692{
2693 enum vdec_type_e type;
2694
2695 for (type = VDEC_1; type < VDEC_MAX; type++) {
2696 if ((mask & (1 << type)) && cores_with_input[type])
2697 return true;
2698 }
2699
2700 return false;
2701}
2702
2703void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2704{
2705 unsigned long i;
2706 unsigned long t = mask;
2707 mutex_lock(&vdec_mutex);
2708 while (t) {
2709 i = __ffs(t);
2710 clear_bit(i, &vdec->active_mask);
2711 t &= ~(1 << i);
2712 }
2713
2714 if (vdec->active_mask == 0)
2715 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2716
2717 mutex_unlock(&vdec_mutex);
2718}
2719EXPORT_SYMBOL(vdec_core_finish_run);
2720/*
2721 * find what core resources are available for vdec
2722 */
2723static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2724 unsigned long active_mask)
2725{
2726 unsigned long mask = vdec->core_mask &
2727 ~CORE_MASK_COMBINE;
2728
2729 if (vdec->core_mask & CORE_MASK_COMBINE) {
2730 /* combined cores must be granted together */
2731 if ((mask & ~active_mask) == mask)
2732 return mask;
2733 else
2734 return 0;
2735 } else
2736 return mask & ~vdec->sched_mask & ~active_mask;
2737}
2738
2739/*
2740 *Decoder callback
2741 * Each decoder instance uses this callback to notify status change, e.g. when
2742 * decoder finished using HW resource.
2743 * a sample callback from decoder's driver is following:
2744 *
2745 * if (hw->vdec_cb) {
2746 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2747 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2748 * }
2749 */
2750static void vdec_callback(struct vdec_s *vdec, void *data)
2751{
2752 struct vdec_core_s *core = (struct vdec_core_s *)data;
2753
2754#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2755 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2756#endif
2757
2758 up(&core->sem);
2759}
2760
2761static irqreturn_t vdec_isr(int irq, void *dev_id)
2762{
2763 struct vdec_isr_context_s *c =
2764 (struct vdec_isr_context_s *)dev_id;
2765 struct vdec_s *vdec = vdec_core->last_vdec;
2766 irqreturn_t ret = IRQ_HANDLED;
2767
2768 if (vdec_core->parallel_dec == 1) {
2769 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2770 vdec = vdec_core->active_hevc;
2771 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2772 vdec = vdec_core->active_vdec;
2773 else
2774 vdec = NULL;
2775 }
2776
2777 if (vdec) {
2778 if (atomic_read(&vdec->inrelease) > 0)
2779 return ret;
2780 atomic_set(&vdec->inirq_flag, 1);
2781 vdec->isr_ns = local_clock();
2782 }
2783 if (c->dev_isr) {
2784 ret = c->dev_isr(irq, c->dev_id);
2785 goto isr_done;
2786 }
2787
2788 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2789 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2790 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2791#if 0
2792 pr_warn("vdec interrupt w/o a valid receiver\n");
2793#endif
2794 goto isr_done;
2795 }
2796
2797 if (!vdec) {
2798#if 0
2799 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2800 core);
2801#endif
2802 goto isr_done;
2803 }
2804
2805 if (!vdec->irq_handler) {
2806#if 0
2807 pr_warn("vdec instance has no irq handle.\n");
2808#endif
2809 goto isr_done;
2810 }
2811
2812 ret = vdec->irq_handler(vdec, c->index);
2813isr_done:
2814 if (vdec)
2815 atomic_set(&vdec->inirq_flag, 0);
2816 return ret;
2817}
2818
2819static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2820{
2821 struct vdec_isr_context_s *c =
2822 (struct vdec_isr_context_s *)dev_id;
2823 struct vdec_s *vdec = vdec_core->last_vdec;
2824 irqreturn_t ret = IRQ_HANDLED;
2825
2826 if (vdec_core->parallel_dec == 1) {
2827 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2828 vdec = vdec_core->active_hevc;
2829 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2830 vdec = vdec_core->active_vdec;
2831 else
2832 vdec = NULL;
2833 }
2834
2835 if (vdec) {
2836 u32 isr2tfn = 0;
2837 if (atomic_read(&vdec->inrelease) > 0)
2838 return ret;
2839 atomic_set(&vdec->inirq_thread_flag, 1);
2840 vdec->tfn_ns = local_clock();
2841 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2842 if (isr2tfn > 10000000)
2843 pr_err("!!!!!!! %s vdec_isr to %s took %u ns !!!\n",
2844 vdec->vf_provider_name, __func__, isr2tfn);
2845 }
2846 if (c->dev_threaded_isr) {
2847 ret = c->dev_threaded_isr(irq, c->dev_id);
2848 goto thread_isr_done;
2849 }
2850 if (!vdec)
2851 goto thread_isr_done;
2852
2853 if (!vdec->threaded_irq_handler)
2854 goto thread_isr_done;
2855 ret = vdec->threaded_irq_handler(vdec, c->index);
2856thread_isr_done:
2857 if (vdec)
2858 atomic_set(&vdec->inirq_thread_flag, 0);
2859 return ret;
2860}
2861
2862unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2863{
2864 unsigned long ready_mask;
2865 struct vdec_input_s *input = &vdec->input;
2866 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2867 (vdec->status != VDEC_STATUS_ACTIVE))
2868 return false;
2869
2870 if (!vdec->run_ready)
2871 return false;
2872
2873 /* when crc32 error, block at error frame */
2874 if (vdec->vfc.err_crc_block)
2875 return false;
2876
2877 if ((vdec->slave || vdec->master) &&
2878 (vdec->sched == 0))
2879 return false;
2880#ifdef VDEC_DEBUG_SUPPORT
2881 inc_profi_count(mask, vdec->check_count);
2882#endif
2883 if (vdec_core_with_input(mask)) {
2884
2885 /* check frame based input underrun */
2886 if (input && !input->eos && input_frame_based(input)
2887 && (!vdec_input_next_chunk(input))) {
2888#ifdef VDEC_DEBUG_SUPPORT
2889 inc_profi_count(mask, vdec->input_underrun_count);
2890#endif
2891 return false;
2892 }
2893 /* check streaming prepare level threshold if not EOS */
2894 if (input && input_stream_based(input) && !input->eos) {
2895 u32 rp, wp, level;
2896
2897 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2898 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2899 if (wp < rp)
2900 level = input->size + wp - rp;
2901 else
2902 level = wp - rp;
2903
2904 if ((level < input->prepare_level) &&
2905 (pts_get_rec_num(PTS_TYPE_VIDEO,
2906 vdec->input.total_rd_count) < 2)) {
2907 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2908#ifdef VDEC_DEBUG_SUPPORT
2909 inc_profi_count(mask, vdec->input_underrun_count);
2910 if (step_mode & 0x200) {
2911 if ((step_mode & 0xff) == vdec->id) {
2912 step_mode |= 0xff;
2913 return mask;
2914 }
2915 }
2916#endif
2917 return false;
2918 } else if (level > input->prepare_level)
2919 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2920 }
2921 }
2922
2923 if (step_mode) {
2924 if ((step_mode & 0xff) != vdec->id)
2925 return 0;
2926 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2927 }
2928
2929 /*step_mode &= ~0xff; not work for id of 0, removed*/
2930
2931#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2932 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2933#endif
2934
2935 ready_mask = vdec->run_ready(vdec, mask) & mask;
2936#ifdef VDEC_DEBUG_SUPPORT
2937 if (ready_mask != mask)
2938 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2939#endif
2940#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2941 if (ready_mask)
2942 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2943#endif
2944
2945 return ready_mask;
2946}
2947
2948/* bridge on/off vdec's interrupt processing to vdec core */
2949static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2950 bool enable)
2951{
2952 enum vdec_type_e type;
2953
2954 for (type = VDEC_1; type < VDEC_MAX; type++) {
2955 if (mask & (1 << type)) {
2956 struct vdec_isr_context_s *c =
2957 &vdec_core->isr_context[cores_int[type]];
2958 if (enable)
2959 c->vdec = vdec;
2960 else if (c->vdec == vdec)
2961 c->vdec = NULL;
2962 }
2963 }
2964}
2965
2966/*
2967 * Set up secure protection for each decoder instance running.
2968 * Note: The operation from REE side only resets memory access
2969 * to a default policy and even a non_secure type will still be
2970 * changed to secure type automatically when secure source is
2971 * detected inside TEE.
2972 * Perform need_more_data checking and set flag is decoder
2973 * is not consuming data.
2974 */
2975void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2976{
2977 struct vdec_input_s *input = &vdec->input;
2978 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2979 DMC_DEV_TYPE_NON_SECURE;
2980
2981 vdec_route_interrupt(vdec, mask, true);
2982
2983 if (!vdec_core_with_input(mask))
2984 return;
2985
2986 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2987 {
2988 secure = 0;
2989 }
2990 if (input->target == VDEC_INPUT_TARGET_VLD)
2991 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2992 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2993 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2994
2995 if (vdec_stream_based(vdec) &&
2996 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2997 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2998 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2999 }
3000
3001 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
3002 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
3003}
3004
3005
3006/* struct vdec_core_shread manages all decoder instance in active list. When
3007 * a vdec is added into the active list, it can onlt be in two status:
3008 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
3009 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
3010 * Removing a decoder from active list is only performed within core thread.
3011 * Adding a decoder into active list is performed from user thread.
3012 */
3013static int vdec_core_thread(void *data)
3014{
3015 struct vdec_core_s *core = (struct vdec_core_s *)data;
3016 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
3017 unsigned long flags;
3018 int i;
3019
3020 sched_setscheduler(current, SCHED_FIFO, &param);
3021
3022 allow_signal(SIGTERM);
3023
3024 while (down_interruptible(&core->sem) == 0) {
3025 struct vdec_s *vdec, *tmp, *worker;
3026 unsigned long sched_mask = 0;
3027 LIST_HEAD(disconnecting_list);
3028
3029 if (kthread_should_stop())
3030 break;
3031 mutex_lock(&vdec_mutex);
3032
3033 if (core->parallel_dec == 1) {
3034 for (i = VDEC_1; i < VDEC_MAX; i++) {
3035 core->power_ref_mask =
3036 core->power_ref_count[i] > 0 ?
3037 (core->power_ref_mask | (1 << i)) :
3038 (core->power_ref_mask & ~(1 << i));
3039 }
3040 }
3041 /* clean up previous active vdec's input */
3042 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
3043 unsigned long mask = vdec->sched_mask &
3044 (vdec->active_mask ^ vdec->sched_mask);
3045
3046 vdec_route_interrupt(vdec, mask, false);
3047
3048#ifdef VDEC_DEBUG_SUPPORT
3049 update_profi_clk_stop(vdec, mask, get_current_clk());
3050#endif
3051 /*
3052 * If decoder released some core resources (mask), then
3053 * check if these core resources are associated
3054 * with any input side and do input clean up accordingly
3055 */
3056 if (vdec_core_with_input(mask)) {
3057 struct vdec_input_s *input = &vdec->input;
3058 while (!list_empty(
3059 &input->vframe_chunk_list)) {
3060 struct vframe_chunk_s *chunk =
3061 vdec_input_next_chunk(input);
3062 if (chunk && (chunk->flag &
3063 VFRAME_CHUNK_FLAG_CONSUMED))
3064 vdec_input_release_chunk(input,
3065 chunk);
3066 else
3067 break;
3068 }
3069
3070 vdec_save_input_context(vdec);
3071 }
3072
3073 vdec->sched_mask &= ~mask;
3074 core->sched_mask &= ~mask;
3075 }
3076 vdec_update_buff_status();
3077 /*
3078 *todo:
3079 * this is the case when the decoder is in active mode and
3080 * the system side wants to stop it. Currently we rely on
3081 * the decoder instance to go back to VDEC_STATUS_CONNECTED
3082 * from VDEC_STATUS_ACTIVE by its own. However, if for some
3083 * reason the decoder can not exist by itself (dead decoding
3084 * or whatever), then we may have to add another vdec API
3085 * to kill the vdec and release its HW resource and make it
3086 * become inactive again.
3087 * if ((core->active_vdec) &&
3088 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
3089 * }
3090 */
3091
3092 /* check disconnected decoders */
3093 flags = vdec_core_lock(vdec_core);
3094 list_for_each_entry_safe(vdec, tmp,
3095 &core->connected_vdec_list, list) {
3096 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
3097 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
3098 if (core->parallel_dec == 1) {
3099 if (vdec_core->active_hevc == vdec)
3100 vdec_core->active_hevc = NULL;
3101 if (vdec_core->active_vdec == vdec)
3102 vdec_core->active_vdec = NULL;
3103 }
3104 if (core->last_vdec == vdec)
3105 core->last_vdec = NULL;
3106 list_move(&vdec->list, &disconnecting_list);
3107 }
3108 }
3109 vdec_core_unlock(vdec_core, flags);
3110 mutex_unlock(&vdec_mutex);
3111 /* elect next vdec to be scheduled */
3112 vdec = core->last_vdec;
3113 if (vdec) {
3114 vdec = list_entry(vdec->list.next, struct vdec_s, list);
3115 list_for_each_entry_from(vdec,
3116 &core->connected_vdec_list, list) {
3117 sched_mask = vdec_schedule_mask(vdec,
3118 core->sched_mask);
3119 if (!sched_mask)
3120 continue;
3121 sched_mask = vdec_ready_to_run(vdec,
3122 sched_mask);
3123 if (sched_mask)
3124 break;
3125 }
3126
3127 if (&vdec->list == &core->connected_vdec_list)
3128 vdec = NULL;
3129 }
3130
3131 if (!vdec) {
3132 /* search from beginning */
3133 list_for_each_entry(vdec,
3134 &core->connected_vdec_list, list) {
3135 sched_mask = vdec_schedule_mask(vdec,
3136 core->sched_mask);
3137 if (vdec == core->last_vdec) {
3138 if (!sched_mask) {
3139 vdec = NULL;
3140 break;
3141 }
3142
3143 sched_mask = vdec_ready_to_run(vdec,
3144 sched_mask);
3145
3146 if (!sched_mask) {
3147 vdec = NULL;
3148 break;
3149 }
3150 break;
3151 }
3152
3153 if (!sched_mask)
3154 continue;
3155
3156 sched_mask = vdec_ready_to_run(vdec,
3157 sched_mask);
3158 if (sched_mask)
3159 break;
3160 }
3161
3162 if (&vdec->list == &core->connected_vdec_list)
3163 vdec = NULL;
3164 }
3165
3166 worker = vdec;
3167
3168 if (vdec) {
3169 unsigned long mask = sched_mask;
3170 unsigned long i;
3171
3172 /* setting active_mask should be atomic.
3173 * it can be modified by decoder driver callbacks.
3174 */
3175 while (sched_mask) {
3176 i = __ffs(sched_mask);
3177 set_bit(i, &vdec->active_mask);
3178 sched_mask &= ~(1 << i);
3179 }
3180
3181 /* vdec's sched_mask is only set from core thread */
3182 vdec->sched_mask |= mask;
3183 if (core->last_vdec) {
3184 if ((core->last_vdec != vdec) &&
3185 (core->last_vdec->mc_type != vdec->mc_type))
3186 vdec->mc_loaded = 0;/*clear for reload firmware*/
3187 } else
3188 vdec->mc_loaded = 0;
3189 core->last_vdec = vdec;
3190 if (debug & 2)
3191 vdec->mc_loaded = 0;/*alway reload firmware*/
3192 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3193
3194 core->sched_mask |= mask;
3195 if (core->parallel_dec == 1)
3196 vdec_save_active_hw(vdec);
3197#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3198 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3199#endif
3200 vdec_prepare_run(vdec, mask);
3201#ifdef VDEC_DEBUG_SUPPORT
3202 inc_profi_count(mask, vdec->run_count);
3203 update_profi_clk_run(vdec, mask, get_current_clk());
3204#endif
3205 vdec->run(vdec, mask, vdec_callback, core);
3206
3207
3208 /* we have some cores scheduled, keep working until
3209 * all vdecs are checked with no cores to schedule
3210 */
3211 if (core->parallel_dec == 1) {
3212 if (vdec_core->vdec_combine_flag == 0)
3213 up(&core->sem);
3214 } else
3215 up(&core->sem);
3216 }
3217
3218 /* remove disconnected decoder from active list */
3219 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3220 list_del(&vdec->list);
3221 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3222 /*core->last_vdec = NULL;*/
3223 complete(&vdec->inactive_done);
3224 }
3225
3226 /* if there is no new work scheduled and nothing
3227 * is running, sleep 20ms
3228 */
3229 if (core->parallel_dec == 1) {
3230 if (vdec_core->vdec_combine_flag == 0) {
3231 if ((!worker) &&
3232 ((core->sched_mask != core->power_ref_mask)) &&
3233 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3234 ((core->buff_flag | core->stream_buff_flag) &
3235 (core->sched_mask ^ core->power_ref_mask))) {
3236 usleep_range(1000, 2000);
3237 up(&core->sem);
3238 }
3239 } else {
3240 if ((!worker) && (!core->sched_mask) &&
3241 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3242 (core->buff_flag | core->stream_buff_flag)) {
3243 usleep_range(1000, 2000);
3244 up(&core->sem);
3245 }
3246 }
3247 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3248 usleep_range(1000, 2000);
3249 up(&core->sem);
3250 }
3251
3252 }
3253
3254 return 0;
3255}
3256
3257#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3258static bool test_hevc(u32 decomp_addr, u32 us_delay)
3259{
3260 int i;
3261
3262 /* SW_RESET IPP */
3263 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3264 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3265
3266 /* initialize all canvas table */
3267 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3268 for (i = 0; i < 32; i++)
3269 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3270 0x1 | (i << 8) | decomp_addr);
3271 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3272 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3273 for (i = 0; i < 32; i++)
3274 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3275
3276 /* Initialize mcrcc */
3277 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3278 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3279 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3280 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3281
3282 /* Decomp initialize */
3283 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3284 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3285
3286 /* Frame level initialization */
3287 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3288 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3289 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3290 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3291
3292 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3293 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3294
3295 /* Enable SWIMP mode */
3296 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3297
3298 /* Enable frame */
3299 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3300 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3301
3302 /* Send SW-command CTB info */
3303 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3304
3305 /* Send PU_command */
3306 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3307 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3308 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3309 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3310
3311 udelay(us_delay);
3312
3313 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3314
3315 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3316}
3317
3318void vdec_power_reset(void)
3319{
3320 /* enable vdec1 isolation */
3321 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3322 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3323 /* power off vdec1 memories */
3324 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3325 /* vdec1 power off */
3326 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3327 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3328
3329 if (has_vdec2()) {
3330 /* enable vdec2 isolation */
3331 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3332 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3333 /* power off vdec2 memories */
3334 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3335 /* vdec2 power off */
3336 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3337 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3338 }
3339
3340 if (has_hdec()) {
3341 /* enable hcodec isolation */
3342 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3343 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3344 /* power off hcodec memories */
3345 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3346 /* hcodec power off */
3347 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3348 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3349 }
3350
3351 if (has_hevc_vdec()) {
3352 /* enable hevc isolation */
3353 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3354 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3355 /* power off hevc memories */
3356 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3357 /* hevc power off */
3358 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3359 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3360 }
3361}
3362EXPORT_SYMBOL(vdec_power_reset);
3363
3364void vdec_poweron(enum vdec_type_e core)
3365{
3366 void *decomp_addr = NULL;
3367 dma_addr_t decomp_dma_addr;
3368 u32 decomp_addr_aligned = 0;
3369 int hevc_loop = 0;
3370 int sleep_val, iso_val;
3371 bool is_power_ctrl_ver2 = false;
3372
3373 if (core >= VDEC_MAX)
3374 return;
3375
3376 mutex_lock(&vdec_mutex);
3377
3378 vdec_core->power_ref_count[core]++;
3379 if (vdec_core->power_ref_count[core] > 1) {
3380 mutex_unlock(&vdec_mutex);
3381 return;
3382 }
3383
3384 if (vdec_on(core)) {
3385 mutex_unlock(&vdec_mutex);
3386 return;
3387 }
3388
3389 is_power_ctrl_ver2 =
3390 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3391 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3392
3393 if (hevc_workaround_needed() &&
3394 (core == VDEC_HEVC)) {
3395 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3396 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3397
3398 if (decomp_addr) {
3399 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3400 memset((u8 *)decomp_addr +
3401 (decomp_addr_aligned - decomp_dma_addr),
3402 0xff, SZ_4K);
3403 } else
3404 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3405 }
3406
3407 if (core == VDEC_1) {
3408 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3409 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3410
3411 /* vdec1 power on */
3412#ifdef CONFIG_AMLOGIC_POWER
3413 if (is_support_power_ctrl()) {
3414 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3415 mutex_unlock(&vdec_mutex);
3416 pr_err("vdec-1 power on ctrl sleep fail.\n");
3417 return;
3418 }
3419 } else {
3420 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3421 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3422 }
3423#else
3424 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3425 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3426#endif
3427 /* wait 10uS */
3428 udelay(10);
3429 /* vdec1 soft reset */
3430 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3431 WRITE_VREG(DOS_SW_RESET0, 0);
3432 /* enable vdec1 clock */
3433 /*
3434 *add power on vdec clock level setting,only for m8 chip,
3435 * m8baby and m8m2 can dynamic adjust vdec clock,
3436 * power on with default clock level
3437 */
3438 amports_switch_gate("clk_vdec_mux", 1);
3439 vdec_clock_hi_enable();
3440 /* power up vdec memories */
3441 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3442
3443 /* remove vdec1 isolation */
3444#ifdef CONFIG_AMLOGIC_POWER
3445 if (is_support_power_ctrl()) {
3446 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3447 mutex_unlock(&vdec_mutex);
3448 pr_err("vdec-1 power on ctrl iso fail.\n");
3449 return;
3450 }
3451 } else {
3452 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3453 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3454 }
3455#else
3456 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3457 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3458#endif
3459 /* reset DOS top registers */
3460 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3461 } else if (core == VDEC_2) {
3462 if (has_vdec2()) {
3463 /* vdec2 power on */
3464 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3465 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3466 ~0x30);
3467 /* wait 10uS */
3468 udelay(10);
3469 /* vdec2 soft reset */
3470 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3471 WRITE_VREG(DOS_SW_RESET2, 0);
3472 /* enable vdec1 clock */
3473 vdec2_clock_hi_enable();
3474 /* power up vdec memories */
3475 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3476 /* remove vdec2 isolation */
3477 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3478 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3479 ~0x300);
3480 /* reset DOS top registers */
3481 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3482 }
3483 } else if (core == VDEC_HCODEC) {
3484 if (has_hdec()) {
3485 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3486 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3487
3488 /* hcodec power on */
3489#ifdef CONFIG_AMLOGIC_POWER
3490 if (is_support_power_ctrl()) {
3491 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3492 mutex_unlock(&vdec_mutex);
3493 pr_err("hcodec power on ctrl sleep fail.\n");
3494 return;
3495 }
3496 } else {
3497 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3498 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3499 }
3500#else
3501 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3502 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3503#endif
3504 /* wait 10uS */
3505 udelay(10);
3506 /* hcodec soft reset */
3507 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3508 WRITE_VREG(DOS_SW_RESET1, 0);
3509 /* enable hcodec clock */
3510 hcodec_clock_enable();
3511 /* power up hcodec memories */
3512 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3513 /* remove hcodec isolation */
3514#ifdef CONFIG_AMLOGIC_POWER
3515 if (is_support_power_ctrl()) {
3516 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3517 mutex_unlock(&vdec_mutex);
3518 pr_err("hcodec power on ctrl iso fail.\n");
3519 return;
3520 }
3521 } else {
3522 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3523 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3524 }
3525#else
3526 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3527 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3528#endif
3529 }
3530 } else if (core == VDEC_HEVC) {
3531 if (has_hevc_vdec()) {
3532 bool hevc_fixed = false;
3533
3534 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3535 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3536
3537 while (!hevc_fixed) {
3538 /* hevc power on */
3539#ifdef CONFIG_AMLOGIC_POWER
3540 if (is_support_power_ctrl()) {
3541 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3542 mutex_unlock(&vdec_mutex);
3543 pr_err("hevc power on ctrl sleep fail.\n");
3544 return;
3545 }
3546 } else {
3547 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3548 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3549 }
3550#else
3551 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3552 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3553#endif
3554 /* wait 10uS */
3555 udelay(10);
3556 /* hevc soft reset */
3557 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3558 WRITE_VREG(DOS_SW_RESET3, 0);
3559 /* enable hevc clock */
3560 amports_switch_gate("clk_hevc_mux", 1);
3561 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3562 amports_switch_gate("clk_hevcb_mux", 1);
3563 hevc_clock_hi_enable();
3564 hevc_back_clock_hi_enable();
3565 /* power up hevc memories */
3566 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3567 /* remove hevc isolation */
3568#ifdef CONFIG_AMLOGIC_POWER
3569 if (is_support_power_ctrl()) {
3570 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3571 mutex_unlock(&vdec_mutex);
3572 pr_err("hevc power on ctrl iso fail.\n");
3573 return;
3574 }
3575 } else {
3576 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3577 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3578 }
3579#else
3580 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3581 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3582#endif
3583 if (!hevc_workaround_needed())
3584 break;
3585
3586 if (decomp_addr)
3587 hevc_fixed = test_hevc(
3588 decomp_addr_aligned, 20);
3589
3590 if (!hevc_fixed) {
3591 hevc_loop++;
3592
3593 mutex_unlock(&vdec_mutex);
3594
3595 if (hevc_loop >= HEVC_TEST_LIMIT) {
3596 pr_warn("hevc power sequence over limit\n");
3597 pr_warn("=====================================================\n");
3598 pr_warn(" This chip is identified to have HW failure.\n");
3599 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3600 pr_warn("=====================================================\n");
3601
3602 panic("Force panic for chip detection !!!\n");
3603
3604 break;
3605 }
3606
3607 vdec_poweroff(VDEC_HEVC);
3608
3609 mdelay(10);
3610
3611 mutex_lock(&vdec_mutex);
3612 }
3613 }
3614
3615 if (hevc_loop > hevc_max_reset_count)
3616 hevc_max_reset_count = hevc_loop;
3617
3618 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3619 udelay(10);
3620 WRITE_VREG(DOS_SW_RESET3, 0);
3621 }
3622 }
3623
3624 if (decomp_addr)
3625 codec_mm_dma_free_coherent(MEM_NAME,
3626 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3627
3628 mutex_unlock(&vdec_mutex);
3629}
3630EXPORT_SYMBOL(vdec_poweron);
3631
3632void vdec_poweroff(enum vdec_type_e core)
3633{
3634 int sleep_val, iso_val;
3635 bool is_power_ctrl_ver2 = false;
3636
3637 if (core >= VDEC_MAX)
3638 return;
3639
3640 mutex_lock(&vdec_mutex);
3641
3642 vdec_core->power_ref_count[core]--;
3643 if (vdec_core->power_ref_count[core] > 0) {
3644 mutex_unlock(&vdec_mutex);
3645 return;
3646 }
3647
3648 is_power_ctrl_ver2 =
3649 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3650 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3651
3652 if (core == VDEC_1) {
3653 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3654 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3655
3656 /* enable vdec1 isolation */
3657#ifdef CONFIG_AMLOGIC_POWER
3658 if (is_support_power_ctrl()) {
3659 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3660 mutex_unlock(&vdec_mutex);
3661 pr_err("vdec-1 power off ctrl iso fail.\n");
3662 return;
3663 }
3664 } else {
3665 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3666 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3667 }
3668#else
3669 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3670 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3671#endif
3672 /* power off vdec1 memories */
3673 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3674 /* disable vdec1 clock */
3675 vdec_clock_off();
3676 /* vdec1 power off */
3677#ifdef CONFIG_AMLOGIC_POWER
3678 if (is_support_power_ctrl()) {
3679 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3680 mutex_unlock(&vdec_mutex);
3681 pr_err("vdec-1 power off ctrl sleep fail.\n");
3682 return;
3683 }
3684 } else {
3685 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3686 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3687 }
3688#else
3689 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3690 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3691#endif
3692 } else if (core == VDEC_2) {
3693 if (has_vdec2()) {
3694 /* enable vdec2 isolation */
3695 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3696 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3697 0x300);
3698 /* power off vdec2 memories */
3699 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3700 /* disable vdec2 clock */
3701 vdec2_clock_off();
3702 /* vdec2 power off */
3703 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3704 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3705 0x30);
3706 }
3707 } else if (core == VDEC_HCODEC) {
3708 if (has_hdec()) {
3709 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3710 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3711
3712 /* enable hcodec isolation */
3713#ifdef CONFIG_AMLOGIC_POWER
3714 if (is_support_power_ctrl()) {
3715 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3716 mutex_unlock(&vdec_mutex);
3717 pr_err("hcodec power off ctrl iso fail.\n");
3718 return;
3719 }
3720 } else {
3721 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3722 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3723 }
3724#else
3725 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3726 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3727#endif
3728 /* power off hcodec memories */
3729 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3730 /* disable hcodec clock */
3731 hcodec_clock_off();
3732 /* hcodec power off */
3733#ifdef CONFIG_AMLOGIC_POWER
3734 if (is_support_power_ctrl()) {
3735 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3736 mutex_unlock(&vdec_mutex);
3737 pr_err("hcodec power off ctrl sleep fail.\n");
3738 return;
3739 }
3740 } else {
3741 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3742 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3743 }
3744#else
3745 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3746 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3747#endif
3748 }
3749 } else if (core == VDEC_HEVC) {
3750 if (has_hevc_vdec()) {
3751 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3752 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3753
3754 if (no_powerdown == 0) {
3755 /* enable hevc isolation */
3756#ifdef CONFIG_AMLOGIC_POWER
3757 if (is_support_power_ctrl()) {
3758 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3759 mutex_unlock(&vdec_mutex);
3760 pr_err("hevc power off ctrl iso fail.\n");
3761 return;
3762 }
3763 } else {
3764 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3765 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3766 }
3767#else
3768 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3769 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3770#endif
3771 /* power off hevc memories */
3772 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3773
3774 /* disable hevc clock */
3775 hevc_clock_off();
3776 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3777 hevc_back_clock_off();
3778
3779 /* hevc power off */
3780#ifdef CONFIG_AMLOGIC_POWER
3781 if (is_support_power_ctrl()) {
3782 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3783 mutex_unlock(&vdec_mutex);
3784 pr_err("hevc power off ctrl sleep fail.\n");
3785 return;
3786 }
3787 } else {
3788 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3789 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3790 }
3791#else
3792 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3793 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3794#endif
3795 } else {
3796 pr_info("!!!!!!!!not power down\n");
3797 hevc_reset_core(NULL);
3798 no_powerdown = 0;
3799 }
3800 }
3801 }
3802 mutex_unlock(&vdec_mutex);
3803}
3804EXPORT_SYMBOL(vdec_poweroff);
3805
3806bool vdec_on(enum vdec_type_e core)
3807{
3808 bool ret = false;
3809
3810 if (core == VDEC_1) {
3811 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3812 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3813 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3814 ? 0x2 : 0xc)) == 0) &&
3815 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3816 ret = true;
3817 } else if (core == VDEC_2) {
3818 if (has_vdec2()) {
3819 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3820 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3821 ret = true;
3822 }
3823 } else if (core == VDEC_HCODEC) {
3824 if (has_hdec()) {
3825 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3826 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3827 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3828 ? 0x1 : 0x3)) == 0) &&
3829 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3830 ret = true;
3831 }
3832 } else if (core == VDEC_HEVC) {
3833 if (has_hevc_vdec()) {
3834 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3835 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3836 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3837 ? 0x4 : 0xc0)) == 0) &&
3838 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3839 ret = true;
3840 }
3841 }
3842
3843 return ret;
3844}
3845EXPORT_SYMBOL(vdec_on);
3846
3847#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3848void vdec_poweron(enum vdec_type_e core)
3849{
3850 ulong flags;
3851
3852 spin_lock_irqsave(&lock, flags);
3853
3854 if (core == VDEC_1) {
3855 /* vdec1 soft reset */
3856 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3857 WRITE_VREG(DOS_SW_RESET0, 0);
3858 /* enable vdec1 clock */
3859 vdec_clock_enable();
3860 /* reset DOS top registers */
3861 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3862 } else if (core == VDEC_2) {
3863 /* vdec2 soft reset */
3864 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3865 WRITE_VREG(DOS_SW_RESET2, 0);
3866 /* enable vdec2 clock */
3867 vdec2_clock_enable();
3868 /* reset DOS top registers */
3869 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3870 } else if (core == VDEC_HCODEC) {
3871 /* hcodec soft reset */
3872 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3873 WRITE_VREG(DOS_SW_RESET1, 0);
3874 /* enable hcodec clock */
3875 hcodec_clock_enable();
3876 }
3877
3878 spin_unlock_irqrestore(&lock, flags);
3879}
3880
3881void vdec_poweroff(enum vdec_type_e core)
3882{
3883 ulong flags;
3884
3885 spin_lock_irqsave(&lock, flags);
3886
3887 if (core == VDEC_1) {
3888 /* disable vdec1 clock */
3889 vdec_clock_off();
3890 } else if (core == VDEC_2) {
3891 /* disable vdec2 clock */
3892 vdec2_clock_off();
3893 } else if (core == VDEC_HCODEC) {
3894 /* disable hcodec clock */
3895 hcodec_clock_off();
3896 }
3897
3898 spin_unlock_irqrestore(&lock, flags);
3899}
3900
3901bool vdec_on(enum vdec_type_e core)
3902{
3903 bool ret = false;
3904
3905 if (core == VDEC_1) {
3906 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3907 ret = true;
3908 } else if (core == VDEC_2) {
3909 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3910 ret = true;
3911 } else if (core == VDEC_HCODEC) {
3912 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3913 ret = true;
3914 }
3915
3916 return ret;
3917}
3918#endif
3919
3920int vdec_source_changed(int format, int width, int height, int fps)
3921{
3922 /* todo: add level routines for clock adjustment per chips */
3923 int ret = -1;
3924 static int on_setting;
3925
3926 if (on_setting > 0)
3927 return ret;/*on changing clk,ignore this change*/
3928
3929 if (vdec_source_get(VDEC_1) == width * height * fps)
3930 return ret;
3931
3932
3933 on_setting = 1;
3934 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3935 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3936 width, height, fps, vdec_clk_get(VDEC_1));
3937 on_setting = 0;
3938 return ret;
3939
3940}
3941EXPORT_SYMBOL(vdec_source_changed);
3942
3943void vdec_reset_core(struct vdec_s *vdec)
3944{
3945 unsigned long flags;
3946 unsigned int mask = 0;
3947
3948 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3949 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3950 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3951
3952 spin_lock_irqsave(&vdec_spin_lock, flags);
3953 codec_dmcbus_write(DMC_REQ_CTRL,
3954 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3955 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3956
3957 if (is_cpu_tm2_revb()) {
3958 while (!(codec_dmcbus_read(TM2_REVB_DMC_CHAN_STS)
3959 & mask))
3960 ;
3961 } else {
3962 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3963 & mask))
3964 ;
3965 }
3966 /*
3967 * 2: assist
3968 * 3: vld_reset
3969 * 4: vld_part_reset
3970 * 5: vfifo reset
3971 * 6: iqidct
3972 * 7: mc
3973 * 8: dblk
3974 * 9: pic_dc
3975 * 10: psc
3976 * 11: mcpu
3977 * 12: ccpu
3978 * 13: ddr
3979 * 14: afifo
3980 */
3981 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3982 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3983 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<7)|(1<<8)|(1<<9));
3984 } else {
3985 WRITE_VREG(DOS_SW_RESET0,
3986 (1<<3)|(1<<4)|(1<<5));
3987 }
3988 WRITE_VREG(DOS_SW_RESET0, 0);
3989
3990 spin_lock_irqsave(&vdec_spin_lock, flags);
3991 codec_dmcbus_write(DMC_REQ_CTRL,
3992 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3993 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3994}
3995EXPORT_SYMBOL(vdec_reset_core);
3996
3997void hevc_mmu_dma_check(struct vdec_s *vdec)
3998{
3999 ulong timeout;
4000 u32 data;
4001 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
4002 return;
4003 timeout = jiffies + HZ/100;
4004 while (1) {
4005 data = READ_VREG(HEVC_CM_CORE_STATUS);
4006 if ((data & 0x1) == 0)
4007 break;
4008 if (time_after(jiffies, timeout)) {
4009 if (debug & 0x10)
4010 pr_info(" %s sao mmu dma idle\n", __func__);
4011 break;
4012 }
4013 }
4014 /*disable sao mmu dma */
4015 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
4016 timeout = jiffies + HZ/100;
4017 while (1) {
4018 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
4019 if ((data & 0x1))
4020 break;
4021 if (time_after(jiffies, timeout)) {
4022 if (debug & 0x10)
4023 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
4024 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
4025 break;
4026 }
4027 }
4028}
4029EXPORT_SYMBOL(hevc_mmu_dma_check);
4030
4031void hevc_reset_core(struct vdec_s *vdec)
4032{
4033 unsigned long flags;
4034 unsigned int mask = 0;
4035
4036 mask = 1 << 4; /*bit4: hevc*/
4037 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4038 mask |= 1 << 8; /*bit8: hevcb*/
4039
4040 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
4041 spin_lock_irqsave(&vdec_spin_lock, flags);
4042 codec_dmcbus_write(DMC_REQ_CTRL,
4043 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
4044 spin_unlock_irqrestore(&vdec_spin_lock, flags);
4045
4046 if (is_cpu_tm2_revb()) {
4047 while (!(codec_dmcbus_read(TM2_REVB_DMC_CHAN_STS)
4048 & mask))
4049 ;
4050 } else {
4051 while (!(codec_dmcbus_read(DMC_CHAN_STS)
4052 & mask))
4053 ;
4054 }
4055
4056 if (vdec == NULL || input_frame_based(vdec))
4057 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
4058
4059 /*
4060 * 2: assist
4061 * 3: parser
4062 * 4: parser_state
4063 * 8: dblk
4064 * 11:mcpu
4065 * 12:ccpu
4066 * 13:ddr
4067 * 14:iqit
4068 * 15:ipp
4069 * 17:qdct
4070 * 18:mpred
4071 * 19:sao
4072 * 24:hevc_afifo
4073 */
4074 WRITE_VREG(DOS_SW_RESET3,
4075 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
4076 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
4077 (1<<17)|(1<<18)|(1<<19)|(1<<24));
4078
4079 WRITE_VREG(DOS_SW_RESET3, 0);
4080
4081
4082 spin_lock_irqsave(&vdec_spin_lock, flags);
4083 codec_dmcbus_write(DMC_REQ_CTRL,
4084 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
4085 spin_unlock_irqrestore(&vdec_spin_lock, flags);
4086
4087}
4088EXPORT_SYMBOL(hevc_reset_core);
4089
4090int vdec2_source_changed(int format, int width, int height, int fps)
4091{
4092 int ret = -1;
4093 static int on_setting;
4094
4095 if (has_vdec2()) {
4096 /* todo: add level routines for clock adjustment per chips */
4097 if (on_setting != 0)
4098 return ret;/*on changing clk,ignore this change*/
4099
4100 if (vdec_source_get(VDEC_2) == width * height * fps)
4101 return ret;
4102
4103 on_setting = 1;
4104 ret = vdec_source_changed_for_clk_set(format,
4105 width, height, fps);
4106 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
4107 width, height, fps, vdec_clk_get(VDEC_2));
4108 on_setting = 0;
4109 return ret;
4110 }
4111 return 0;
4112}
4113EXPORT_SYMBOL(vdec2_source_changed);
4114
4115int hevc_source_changed(int format, int width, int height, int fps)
4116{
4117 /* todo: add level routines for clock adjustment per chips */
4118 int ret = -1;
4119 static int on_setting;
4120
4121 if (on_setting != 0)
4122 return ret;/*on changing clk,ignore this change*/
4123
4124 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
4125 return ret;
4126
4127 on_setting = 1;
4128 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
4129 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
4130 width, height, fps, vdec_clk_get(VDEC_HEVC));
4131 on_setting = 0;
4132
4133 return ret;
4134}
4135EXPORT_SYMBOL(hevc_source_changed);
4136
4137static struct am_reg am_risc[] = {
4138 {"MSP", 0x300},
4139 {"MPSR", 0x301},
4140 {"MCPU_INT_BASE", 0x302},
4141 {"MCPU_INTR_GRP", 0x303},
4142 {"MCPU_INTR_MSK", 0x304},
4143 {"MCPU_INTR_REQ", 0x305},
4144 {"MPC-P", 0x306},
4145 {"MPC-D", 0x307},
4146 {"MPC_E", 0x308},
4147 {"MPC_W", 0x309},
4148 {"CSP", 0x320},
4149 {"CPSR", 0x321},
4150 {"CCPU_INT_BASE", 0x322},
4151 {"CCPU_INTR_GRP", 0x323},
4152 {"CCPU_INTR_MSK", 0x324},
4153 {"CCPU_INTR_REQ", 0x325},
4154 {"CPC-P", 0x326},
4155 {"CPC-D", 0x327},
4156 {"CPC_E", 0x328},
4157 {"CPC_W", 0x329},
4158 {"AV_SCRATCH_0", 0x09c0},
4159 {"AV_SCRATCH_1", 0x09c1},
4160 {"AV_SCRATCH_2", 0x09c2},
4161 {"AV_SCRATCH_3", 0x09c3},
4162 {"AV_SCRATCH_4", 0x09c4},
4163 {"AV_SCRATCH_5", 0x09c5},
4164 {"AV_SCRATCH_6", 0x09c6},
4165 {"AV_SCRATCH_7", 0x09c7},
4166 {"AV_SCRATCH_8", 0x09c8},
4167 {"AV_SCRATCH_9", 0x09c9},
4168 {"AV_SCRATCH_A", 0x09ca},
4169 {"AV_SCRATCH_B", 0x09cb},
4170 {"AV_SCRATCH_C", 0x09cc},
4171 {"AV_SCRATCH_D", 0x09cd},
4172 {"AV_SCRATCH_E", 0x09ce},
4173 {"AV_SCRATCH_F", 0x09cf},
4174 {"AV_SCRATCH_G", 0x09d0},
4175 {"AV_SCRATCH_H", 0x09d1},
4176 {"AV_SCRATCH_I", 0x09d2},
4177 {"AV_SCRATCH_J", 0x09d3},
4178 {"AV_SCRATCH_K", 0x09d4},
4179 {"AV_SCRATCH_L", 0x09d5},
4180 {"AV_SCRATCH_M", 0x09d6},
4181 {"AV_SCRATCH_N", 0x09d7},
4182};
4183
4184static ssize_t amrisc_regs_show(struct class *class,
4185 struct class_attribute *attr, char *buf)
4186{
4187 char *pbuf = buf;
4188 struct am_reg *regs = am_risc;
4189 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
4190 int i;
4191 unsigned int val;
4192 ssize_t ret;
4193
4194 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4195 mutex_lock(&vdec_mutex);
4196 if (!vdec_on(VDEC_1)) {
4197 mutex_unlock(&vdec_mutex);
4198 pbuf += sprintf(pbuf, "amrisc is power off\n");
4199 ret = pbuf - buf;
4200 return ret;
4201 }
4202 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4203 /*TODO:M6 define */
4204 /*
4205 * switch_mod_gate_by_type(MOD_VDEC, 1);
4206 */
4207 amports_switch_gate("vdec", 1);
4208 }
4209 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4210 for (i = 0; i < rsize; i++) {
4211 val = READ_VREG(regs[i].offset);
4212 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4213 regs[i].name, regs[i].offset, val, val);
4214 }
4215 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4216 mutex_unlock(&vdec_mutex);
4217 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4218 /*TODO:M6 define */
4219 /*
4220 * switch_mod_gate_by_type(MOD_VDEC, 0);
4221 */
4222 amports_switch_gate("vdec", 0);
4223 }
4224 ret = pbuf - buf;
4225 return ret;
4226}
4227
4228static ssize_t dump_trace_show(struct class *class,
4229 struct class_attribute *attr, char *buf)
4230{
4231 int i;
4232 char *pbuf = buf;
4233 ssize_t ret;
4234 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4235
4236 if (!trace_buf) {
4237 pbuf += sprintf(pbuf, "No Memory bug\n");
4238 ret = pbuf - buf;
4239 return ret;
4240 }
4241 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4242 mutex_lock(&vdec_mutex);
4243 if (!vdec_on(VDEC_1)) {
4244 mutex_unlock(&vdec_mutex);
4245 kfree(trace_buf);
4246 pbuf += sprintf(pbuf, "amrisc is power off\n");
4247 ret = pbuf - buf;
4248 return ret;
4249 }
4250 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4251 /*TODO:M6 define */
4252 /*
4253 * switch_mod_gate_by_type(MOD_VDEC, 1);
4254 */
4255 amports_switch_gate("vdec", 1);
4256 }
4257 pr_info("dump trace steps:%d start\n", debug_trace_num);
4258 i = 0;
4259 while (i <= debug_trace_num - 16) {
4260 trace_buf[i] = READ_VREG(MPC_E);
4261 trace_buf[i + 1] = READ_VREG(MPC_E);
4262 trace_buf[i + 2] = READ_VREG(MPC_E);
4263 trace_buf[i + 3] = READ_VREG(MPC_E);
4264 trace_buf[i + 4] = READ_VREG(MPC_E);
4265 trace_buf[i + 5] = READ_VREG(MPC_E);
4266 trace_buf[i + 6] = READ_VREG(MPC_E);
4267 trace_buf[i + 7] = READ_VREG(MPC_E);
4268 trace_buf[i + 8] = READ_VREG(MPC_E);
4269 trace_buf[i + 9] = READ_VREG(MPC_E);
4270 trace_buf[i + 10] = READ_VREG(MPC_E);
4271 trace_buf[i + 11] = READ_VREG(MPC_E);
4272 trace_buf[i + 12] = READ_VREG(MPC_E);
4273 trace_buf[i + 13] = READ_VREG(MPC_E);
4274 trace_buf[i + 14] = READ_VREG(MPC_E);
4275 trace_buf[i + 15] = READ_VREG(MPC_E);
4276 i += 16;
4277 };
4278 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4279 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4280 mutex_unlock(&vdec_mutex);
4281 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4282 /*TODO:M6 define */
4283 /*
4284 * switch_mod_gate_by_type(MOD_VDEC, 0);
4285 */
4286 amports_switch_gate("vdec", 0);
4287 }
4288 for (i = 0; i < debug_trace_num; i++) {
4289 if (i % 4 == 0) {
4290 if (i % 16 == 0)
4291 pbuf += sprintf(pbuf, "\n");
4292 else if (i % 8 == 0)
4293 pbuf += sprintf(pbuf, " ");
4294 else /* 4 */
4295 pbuf += sprintf(pbuf, " ");
4296 }
4297 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4298 }
4299 while (i < debug_trace_num)
4300 ;
4301 kfree(trace_buf);
4302 pbuf += sprintf(pbuf, "\n");
4303 ret = pbuf - buf;
4304 return ret;
4305}
4306
4307static ssize_t clock_level_show(struct class *class,
4308 struct class_attribute *attr, char *buf)
4309{
4310 char *pbuf = buf;
4311 size_t ret;
4312
4313 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4314
4315 if (has_vdec2())
4316 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4317
4318 if (has_hevc_vdec())
4319 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4320
4321 ret = pbuf - buf;
4322 return ret;
4323}
4324
4325static ssize_t enable_mvdec_info_show(struct class *cla,
4326 struct class_attribute *attr, char *buf)
4327{
4328 return sprintf(buf, "%d\n", enable_mvdec_info);
4329}
4330
4331static ssize_t enable_mvdec_info_store(struct class *cla,
4332 struct class_attribute *attr,
4333 const char *buf, size_t count)
4334{
4335 int r;
4336 int val;
4337
4338 r = kstrtoint(buf, 0, &val);
4339 if (r < 0)
4340 return -EINVAL;
4341 enable_mvdec_info = val;
4342
4343 return count;
4344}
4345
4346
4347static ssize_t store_poweron_clock_level(struct class *class,
4348 struct class_attribute *attr,
4349 const char *buf, size_t size)
4350{
4351 unsigned int val;
4352 ssize_t ret;
4353
4354 /*ret = sscanf(buf, "%d", &val);*/
4355 ret = kstrtoint(buf, 0, &val);
4356
4357 if (ret != 0)
4358 return -EINVAL;
4359 poweron_clock_level = val;
4360 return size;
4361}
4362
4363static ssize_t show_poweron_clock_level(struct class *class,
4364 struct class_attribute *attr, char *buf)
4365{
4366 return sprintf(buf, "%d\n", poweron_clock_level);
4367}
4368
4369/*
4370 *if keep_vdec_mem == 1
4371 *always don't release
4372 *vdec 64 memory for fast play.
4373 */
4374static ssize_t store_keep_vdec_mem(struct class *class,
4375 struct class_attribute *attr,
4376 const char *buf, size_t size)
4377{
4378 unsigned int val;
4379 ssize_t ret;
4380
4381 /*ret = sscanf(buf, "%d", &val);*/
4382 ret = kstrtoint(buf, 0, &val);
4383 if (ret != 0)
4384 return -EINVAL;
4385 keep_vdec_mem = val;
4386 return size;
4387}
4388
4389static ssize_t show_keep_vdec_mem(struct class *class,
4390 struct class_attribute *attr, char *buf)
4391{
4392 return sprintf(buf, "%d\n", keep_vdec_mem);
4393}
4394
4395
4396#ifdef VDEC_DEBUG_SUPPORT
4397static ssize_t store_debug(struct class *class,
4398 struct class_attribute *attr,
4399 const char *buf, size_t size)
4400{
4401 struct vdec_s *vdec;
4402 struct vdec_core_s *core = vdec_core;
4403 unsigned long flags;
4404
4405 unsigned id;
4406 unsigned val;
4407 ssize_t ret;
4408 char cbuf[32];
4409
4410 cbuf[0] = 0;
4411 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4412 /*pr_info(
4413 "%s(%s)=>ret %ld: %s, %x, %x\n",
4414 __func__, buf, ret, cbuf, id, val);*/
4415 if (strcmp(cbuf, "schedule") == 0) {
4416 pr_info("VDEC_DEBUG: force schedule\n");
4417 up(&core->sem);
4418 } else if (strcmp(cbuf, "power_off") == 0) {
4419 pr_info("VDEC_DEBUG: power off core %d\n", id);
4420 vdec_poweroff(id);
4421 } else if (strcmp(cbuf, "power_on") == 0) {
4422 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4423 vdec_poweron(id);
4424 } else if (strcmp(cbuf, "wr") == 0) {
4425 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4426 id, val);
4427 WRITE_VREG(id, val);
4428 } else if (strcmp(cbuf, "rd") == 0) {
4429 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4430 id, READ_VREG(id));
4431 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4432 pr_info(
4433 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4434 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4435 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4436 }
4437
4438 flags = vdec_core_lock(vdec_core);
4439
4440 list_for_each_entry(vdec,
4441 &core->connected_vdec_list, list) {
4442 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4443 if (((vdec->status == VDEC_STATUS_CONNECTED
4444 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4445 (vdec->id == id)) {
4446 /*to add*/
4447 break;
4448 }
4449 }
4450 vdec_core_unlock(vdec_core, flags);
4451 return size;
4452}
4453
4454static ssize_t show_debug(struct class *class,
4455 struct class_attribute *attr, char *buf)
4456{
4457 char *pbuf = buf;
4458 struct vdec_s *vdec;
4459 struct vdec_core_s *core = vdec_core;
4460 unsigned long flags = vdec_core_lock(vdec_core);
4461 u64 tmp;
4462
4463 pbuf += sprintf(pbuf,
4464 "============== help:\n");
4465 pbuf += sprintf(pbuf,
4466 "'echo xxx > debug' usuage:\n");
4467 pbuf += sprintf(pbuf,
4468 "schedule - trigger schedule thread to run\n");
4469 pbuf += sprintf(pbuf,
4470 "power_off core_num - call vdec_poweroff(core_num)\n");
4471 pbuf += sprintf(pbuf,
4472 "power_on core_num - call vdec_poweron(core_num)\n");
4473 pbuf += sprintf(pbuf,
4474 "wr adr val - call WRITE_VREG(adr, val)\n");
4475 pbuf += sprintf(pbuf,
4476 "rd adr - call READ_VREG(adr)\n");
4477 pbuf += sprintf(pbuf,
4478 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4479 pbuf += sprintf(pbuf,
4480 "===================\n");
4481
4482 pbuf += sprintf(pbuf,
4483 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4484 list_for_each_entry(vdec,
4485 &core->connected_vdec_list, list) {
4486 enum vdec_type_e type;
4487 if ((vdec->status == VDEC_STATUS_CONNECTED
4488 || vdec->status == VDEC_STATUS_ACTIVE)) {
4489 for (type = VDEC_1; type < VDEC_MAX; type++) {
4490 if (vdec->core_mask & (1 << type)) {
4491 pbuf += sprintf(pbuf, "%s(%d):",
4492 vdec->vf_provider_name, type);
4493 pbuf += sprintf(pbuf, "\t%d",
4494 vdec->check_count[type]);
4495 pbuf += sprintf(pbuf, "\t%d",
4496 vdec->run_count[type]);
4497 pbuf += sprintf(pbuf, "\t%d",
4498 vdec->input_underrun_count[type]);
4499 pbuf += sprintf(pbuf, "\t%d",
4500 vdec->not_run_ready_count[type]);
4501 tmp = vdec->run_clk[type] * 100;
4502 do_div(tmp, vdec->total_clk[type]);
4503 pbuf += sprintf(pbuf,
4504 "\t%d%%\n",
4505 vdec->total_clk[type] == 0 ? 0 :
4506 (u32)tmp);
4507 }
4508 }
4509 }
4510 }
4511
4512 vdec_core_unlock(vdec_core, flags);
4513 return pbuf - buf;
4514
4515}
4516#endif
4517
4518static ssize_t store_vdec_vfm_path(struct class *class,
4519 struct class_attribute *attr,
4520 const char *buf, size_t count)
4521{
4522 char *buf_dup, *ps, *token;
4523 char str[VDEC_MAP_NAME_SIZE] = "\0";
4524 bool found = false;
4525 int i;
4526
4527 if (strlen(buf) >= VDEC_MAP_NAME_SIZE) {
4528 pr_info("parameter is overflow\n");
4529 return -1;
4530 }
4531
4532 buf_dup = kstrdup(buf, GFP_KERNEL);
4533 ps = buf_dup;
4534 while (1) {
4535 token = strsep(&ps, "\n ");
4536 if (token == NULL)
4537 break;
4538 if (*token == '\0')
4539 continue;
4540
4541 for (i = 0; strcmp("reserved", vfm_path_node[i]) != 0; i++) {
4542 if (!strncmp (vfm_path_node[i], token, strlen(vfm_path_node[i]))) {
4543 break;
4544 }
4545 }
4546
4547 if (strcmp("reserved", vfm_path_node[i]) == 0 ||
4548 strncmp("help", buf, strlen("help")) == 0) {
4549 if (strncmp("help", buf, strlen("help")) != 0) {
4550 pr_info("warnning! Input parameter is invalid. set failed!\n");
4551 }
4552 pr_info("\nusage for example: \n");
4553 pr_info("echo help > /sys/class/vdec/vfm_path \n");
4554 pr_info("echo disable > /sys/class/vdec/vfm_path \n");
4555 pr_info("echo amlvideo ppmgr amvideo > /sys/class/vdec/vfm_path \n");
4556 found = false;
4557
4558 break;
4559 } else {
4560 strcat(str, vfm_path_node[i]);
4561 strcat(str, " ");
4562 found = true;
4563 }
4564 }
4565
4566 if (found == true) {
4567 memset(vfm_path, 0, sizeof(vfm_path));
4568 strncpy(vfm_path, str, strlen(str));
4569 vfm_path[VDEC_MAP_NAME_SIZE - 1] = '\0';
4570 pr_info("cfg path success: decoder %s\n", vfm_path);
4571 }
4572 kfree(buf_dup);
4573
4574 return count;
4575}
4576
4577static ssize_t show_vdec_vfm_path(struct class *class,
4578 struct class_attribute *attr, char *buf)
4579{
4580 int len = 0;
4581 int i;
4582 len += sprintf(buf + len, "cfg vfm path: decoder %s\n", vfm_path);
4583 len += sprintf(buf + len, "\nvfm path node list: \n");
4584 for (i = 0; strcmp("reserved", vfm_path_node[i]) != 0; i++) {
4585 len += sprintf(buf + len, "\t%s \n", vfm_path_node[i]);
4586 }
4587
4588 return len;
4589}
4590
4591/*irq num as same as .dts*/
4592/*
4593 * interrupts = <0 3 1
4594 * 0 23 1
4595 * 0 32 1
4596 * 0 43 1
4597 * 0 44 1
4598 * 0 45 1>;
4599 * interrupt-names = "vsync",
4600 * "demux",
4601 * "parser",
4602 * "mailbox_0",
4603 * "mailbox_1",
4604 * "mailbox_2";
4605 */
4606s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4607 irq_handler_t handler,
4608 irq_handler_t thread_fn,
4609 unsigned long irqflags,
4610 const char *devname, void *dev)
4611{
4612 s32 res_irq;
4613 s32 ret = 0;
4614
4615 if (num >= VDEC_IRQ_MAX) {
4616 pr_err("[%s] request irq error, irq num too big!", __func__);
4617 return -EINVAL;
4618 }
4619
4620 if (vdec_core->isr_context[num].irq < 0) {
4621 res_irq = platform_get_irq(
4622 vdec_core->vdec_core_platform_device, num);
4623 if (res_irq < 0) {
4624 pr_err("[%s] get irq error!", __func__);
4625 return -EINVAL;
4626 }
4627
4628 vdec_core->isr_context[num].irq = res_irq;
4629 vdec_core->isr_context[num].dev_isr = handler;
4630 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4631 vdec_core->isr_context[num].dev_id = dev;
4632
4633 ret = request_threaded_irq(res_irq,
4634 vdec_isr,
4635 vdec_thread_isr,
4636 (thread_fn) ? IRQF_ONESHOT : irqflags,
4637 devname,
4638 &vdec_core->isr_context[num]);
4639
4640 if (ret) {
4641 vdec_core->isr_context[num].irq = -1;
4642 vdec_core->isr_context[num].dev_isr = NULL;
4643 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4644 vdec_core->isr_context[num].dev_id = NULL;
4645
4646 pr_err("vdec irq register error for %s.\n", devname);
4647 return -EIO;
4648 }
4649 } else {
4650 vdec_core->isr_context[num].dev_isr = handler;
4651 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4652 vdec_core->isr_context[num].dev_id = dev;
4653 }
4654
4655 return ret;
4656}
4657EXPORT_SYMBOL(vdec_request_threaded_irq);
4658
4659s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4660 const char *devname, void *dev)
4661{
4662 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4663
4664 return vdec_request_threaded_irq(num,
4665 handler,
4666 NULL,/*no thread_fn*/
4667 IRQF_SHARED,
4668 devname,
4669 dev);
4670}
4671EXPORT_SYMBOL(vdec_request_irq);
4672
4673void vdec_free_irq(enum vdec_irq_num num, void *dev)
4674{
4675 if (num >= VDEC_IRQ_MAX) {
4676 pr_err("[%s] request irq error, irq num too big!", __func__);
4677 return;
4678 }
4679 /*
4680 *assume amrisc is stopped already and there is no mailbox interrupt
4681 * when we reset pointers here.
4682 */
4683 vdec_core->isr_context[num].dev_isr = NULL;
4684 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4685 vdec_core->isr_context[num].dev_id = NULL;
4686 synchronize_irq(vdec_core->isr_context[num].irq);
4687}
4688EXPORT_SYMBOL(vdec_free_irq);
4689
4690struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4691{
4692 struct vdec_s *vdec;
4693 struct vdec_s *ret_vdec;
4694 struct vdec_core_s *core = vdec_core;
4695 unsigned long flags;
4696 int id;
4697
4698 flags = vdec_core_lock(vdec_core);
4699
4700 id = 0x10000000;
4701 ret_vdec = NULL;
4702 if (!list_empty(&core->connected_vdec_list)) {
4703 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4704 if (vdec->id < id) {
4705 id = vdec->id;
4706 ret_vdec = vdec;
4707 }
4708 }
4709 }
4710
4711 vdec_core_unlock(vdec_core, flags);
4712
4713 return ret_vdec;
4714}
4715EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4716
4717struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4718{
4719 struct vdec_s *vdec;
4720 struct vdec_s *ret_vdec;
4721 struct vdec_core_s *core = vdec_core;
4722 unsigned long flags;
4723
4724 flags = vdec_core_lock(vdec_core);
4725
4726 ret_vdec = NULL;
4727 if (!list_empty(&core->connected_vdec_list)) {
4728 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4729 if (vdec->id == vdec_id) {
4730 ret_vdec = vdec;
4731 break;
4732 }
4733 }
4734 }
4735
4736 vdec_core_unlock(vdec_core, flags);
4737
4738 return ret_vdec;
4739}
4740EXPORT_SYMBOL(vdec_get_vdec_by_id);
4741
4742int vdec_read_user_data(struct vdec_s *vdec,
4743 struct userdata_param_t *p_userdata_param)
4744{
4745 int ret = 0;
4746
4747 if (!vdec)
4748 vdec = vdec_get_default_vdec_for_userdata();
4749
4750 if (vdec) {
4751 if (vdec->user_data_read)
4752 ret = vdec->user_data_read(vdec, p_userdata_param);
4753 }
4754 return ret;
4755}
4756EXPORT_SYMBOL(vdec_read_user_data);
4757
4758int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4759{
4760 if (vdec) {
4761 if (vdec->wakeup_userdata_poll)
4762 vdec->wakeup_userdata_poll(vdec);
4763 }
4764
4765 return 0;
4766}
4767EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4768
4769void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4770{
4771 if (!vdec)
4772 vdec = vdec_get_default_vdec_for_userdata();
4773
4774 if (vdec) {
4775 if (vdec->reset_userdata_fifo)
4776 vdec->reset_userdata_fifo(vdec, bInit);
4777 }
4778}
4779EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4780
4781static int dump_mode;
4782static ssize_t dump_risc_mem_store(struct class *class,
4783 struct class_attribute *attr,
4784 const char *buf, size_t size)/*set*/
4785{
4786 unsigned int val;
4787 ssize_t ret;
4788 char dump_mode_str[4] = "PRL";
4789
4790 /*ret = sscanf(buf, "%d", &val);*/
4791 ret = kstrtoint(buf, 0, &val);
4792
4793 if (ret != 0)
4794 return -EINVAL;
4795 dump_mode = val & 0x3;
4796 pr_info("set dump mode to %d,%c_mem\n",
4797 dump_mode, dump_mode_str[dump_mode]);
4798 return size;
4799}
4800static u32 read_amrisc_reg(int reg)
4801{
4802 WRITE_VREG(0x31b, reg);
4803 return READ_VREG(0x31c);
4804}
4805
4806static void dump_pmem(void)
4807{
4808 int i;
4809
4810 WRITE_VREG(0x301, 0x8000);
4811 WRITE_VREG(0x31d, 0);
4812 pr_info("start dump amrisc pmem of risc\n");
4813 for (i = 0; i < 0xfff; i++) {
4814 /*same as .o format*/
4815 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4816 }
4817}
4818
4819static void dump_lmem(void)
4820{
4821 int i;
4822
4823 WRITE_VREG(0x301, 0x8000);
4824 WRITE_VREG(0x31d, 2);
4825 pr_info("start dump amrisc lmem\n");
4826 for (i = 0; i < 0x3ff; i++) {
4827 /*same as */
4828 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4829 }
4830}
4831
4832static ssize_t dump_risc_mem_show(struct class *class,
4833 struct class_attribute *attr, char *buf)
4834{
4835 char *pbuf = buf;
4836 int ret;
4837
4838 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4839 mutex_lock(&vdec_mutex);
4840 if (!vdec_on(VDEC_1)) {
4841 mutex_unlock(&vdec_mutex);
4842 pbuf += sprintf(pbuf, "amrisc is power off\n");
4843 ret = pbuf - buf;
4844 return ret;
4845 }
4846 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4847 /*TODO:M6 define */
4848 /*
4849 * switch_mod_gate_by_type(MOD_VDEC, 1);
4850 */
4851 amports_switch_gate("vdec", 1);
4852 }
4853 /*start do**/
4854 switch (dump_mode) {
4855 case 0:
4856 dump_pmem();
4857 break;
4858 case 2:
4859 dump_lmem();
4860 break;
4861 default:
4862 break;
4863 }
4864
4865 /*done*/
4866 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4867 mutex_unlock(&vdec_mutex);
4868 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4869 /*TODO:M6 define */
4870 /*
4871 * switch_mod_gate_by_type(MOD_VDEC, 0);
4872 */
4873 amports_switch_gate("vdec", 0);
4874 }
4875 return sprintf(buf, "done\n");
4876}
4877
4878static ssize_t core_show(struct class *class, struct class_attribute *attr,
4879 char *buf)
4880{
4881 struct vdec_core_s *core = vdec_core;
4882 char *pbuf = buf;
4883
4884 if (list_empty(&core->connected_vdec_list))
4885 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4886 else {
4887 struct vdec_s *vdec;
4888
4889 pbuf += sprintf(pbuf,
4890 " Core: last_sched %p, sched_mask %lx\n",
4891 core->last_vdec,
4892 core->sched_mask);
4893
4894 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4895 pbuf += sprintf(pbuf,
4896 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4897 vdec->id,
4898 vdec,
4899 vdec_device_name[vdec->format * 2],
4900 vdec_status_str(vdec),
4901 vdec_type_str(vdec),
4902 vdec->active_mask);
4903 }
4904 }
4905
4906 return pbuf - buf;
4907}
4908
4909static ssize_t vdec_status_show(struct class *class,
4910 struct class_attribute *attr, char *buf)
4911{
4912 char *pbuf = buf;
4913 struct vdec_s *vdec;
4914 struct vdec_info vs;
4915 unsigned char vdec_num = 0;
4916 struct vdec_core_s *core = vdec_core;
4917 unsigned long flags = vdec_core_lock(vdec_core);
4918
4919 if (list_empty(&core->connected_vdec_list)) {
4920 pbuf += sprintf(pbuf, "No vdec.\n");
4921 goto out;
4922 }
4923
4924 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4925 if ((vdec->status == VDEC_STATUS_CONNECTED
4926 || vdec->status == VDEC_STATUS_ACTIVE)) {
4927 memset(&vs, 0, sizeof(vs));
4928 if (vdec_status(vdec, &vs)) {
4929 pbuf += sprintf(pbuf, "err.\n");
4930 goto out;
4931 }
4932 pbuf += sprintf(pbuf,
4933 "vdec channel %u statistics:\n",
4934 vdec_num);
4935 pbuf += sprintf(pbuf,
4936 "%13s : %s\n", "device name",
4937 vs.vdec_name);
4938 pbuf += sprintf(pbuf,
4939 "%13s : %u\n", "frame width",
4940 vs.frame_width);
4941 pbuf += sprintf(pbuf,
4942 "%13s : %u\n", "frame height",
4943 vs.frame_height);
4944 pbuf += sprintf(pbuf,
4945 "%13s : %u %s\n", "frame rate",
4946 vs.frame_rate, "fps");
4947 pbuf += sprintf(pbuf,
4948 "%13s : %u %s\n", "bit rate",
4949 vs.bit_rate / 1024 * 8, "kbps");
4950 pbuf += sprintf(pbuf,
4951 "%13s : %u\n", "status",
4952 vs.status);
4953 pbuf += sprintf(pbuf,
4954 "%13s : %u\n", "frame dur",
4955 vs.frame_dur);
4956 pbuf += sprintf(pbuf,
4957 "%13s : %u %s\n", "frame data",
4958 vs.frame_data / 1024, "KB");
4959 pbuf += sprintf(pbuf,
4960 "%13s : %u\n", "frame count",
4961 vs.frame_count);
4962 pbuf += sprintf(pbuf,
4963 "%13s : %u\n", "drop count",
4964 vs.drop_frame_count);
4965 pbuf += sprintf(pbuf,
4966 "%13s : %u\n", "fra err count",
4967 vs.error_frame_count);
4968 pbuf += sprintf(pbuf,
4969 "%13s : %u\n", "hw err count",
4970 vs.error_count);
4971 pbuf += sprintf(pbuf,
4972 "%13s : %llu %s\n", "total data",
4973 vs.total_data / 1024, "KB");
4974 pbuf += sprintf(pbuf,
4975 "%13s : %x\n\n", "ratio_control",
4976 vs.ratio_control);
4977
4978 vdec_num++;
4979 }
4980 }
4981out:
4982 vdec_core_unlock(vdec_core, flags);
4983 return pbuf - buf;
4984}
4985
4986static ssize_t dump_vdec_blocks_show(struct class *class,
4987 struct class_attribute *attr, char *buf)
4988{
4989 struct vdec_core_s *core = vdec_core;
4990 char *pbuf = buf;
4991 unsigned long flags = vdec_core_lock(vdec_core);
4992
4993 if (list_empty(&core->connected_vdec_list))
4994 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4995 else {
4996 struct vdec_s *vdec;
4997 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4998 pbuf += vdec_input_dump_blocks(&vdec->input,
4999 pbuf, PAGE_SIZE - (pbuf - buf));
5000 }
5001 }
5002 vdec_core_unlock(vdec_core, flags);
5003
5004 return pbuf - buf;
5005}
5006static ssize_t dump_vdec_chunks_show(struct class *class,
5007 struct class_attribute *attr, char *buf)
5008{
5009 struct vdec_core_s *core = vdec_core;
5010 char *pbuf = buf;
5011 unsigned long flags = vdec_core_lock(vdec_core);
5012
5013 if (list_empty(&core->connected_vdec_list))
5014 pbuf += sprintf(pbuf, "connected vdec list empty\n");
5015 else {
5016 struct vdec_s *vdec;
5017 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
5018 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
5019 pbuf, PAGE_SIZE - (pbuf - buf));
5020 }
5021 }
5022 vdec_core_unlock(vdec_core, flags);
5023
5024 return pbuf - buf;
5025}
5026
5027static ssize_t dump_decoder_state_show(struct class *class,
5028 struct class_attribute *attr, char *buf)
5029{
5030 char *pbuf = buf;
5031 struct vdec_s *vdec;
5032 struct vdec_core_s *core = vdec_core;
5033 unsigned long flags = vdec_core_lock(vdec_core);
5034
5035 if (list_empty(&core->connected_vdec_list)) {
5036 pbuf += sprintf(pbuf, "No vdec.\n");
5037 } else {
5038 list_for_each_entry(vdec,
5039 &core->connected_vdec_list, list) {
5040 if ((vdec->status == VDEC_STATUS_CONNECTED
5041 || vdec->status == VDEC_STATUS_ACTIVE)
5042 && vdec->dump_state)
5043 vdec->dump_state(vdec);
5044 }
5045 }
5046 vdec_core_unlock(vdec_core, flags);
5047
5048 return pbuf - buf;
5049}
5050
5051static ssize_t dump_fps_show(struct class *class,
5052 struct class_attribute *attr, char *buf)
5053{
5054 char *pbuf = buf;
5055 struct vdec_core_s *core = vdec_core;
5056 int i;
5057
5058 unsigned long flags = vdec_fps_lock(vdec_core);
5059 for (i = 0; i < MAX_INSTANCE_MUN; i++)
5060 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
5061
5062 pbuf += sprintf(pbuf, "\n");
5063 vdec_fps_unlock(vdec_core, flags);
5064
5065 return pbuf - buf;
5066}
5067
5068
5069
5070static struct class_attribute vdec_class_attrs[] = {
5071 __ATTR_RO(amrisc_regs),
5072 __ATTR_RO(dump_trace),
5073 __ATTR_RO(clock_level),
5074 __ATTR(enable_mvdec_info, S_IRUGO | S_IWUSR | S_IWGRP,
5075 enable_mvdec_info_show, enable_mvdec_info_store),
5076 __ATTR(poweron_clock_level, S_IRUGO | S_IWUSR | S_IWGRP,
5077 show_poweron_clock_level, store_poweron_clock_level),
5078 __ATTR(dump_risc_mem, S_IRUGO | S_IWUSR | S_IWGRP,
5079 dump_risc_mem_show, dump_risc_mem_store),
5080 __ATTR(keep_vdec_mem, S_IRUGO | S_IWUSR | S_IWGRP,
5081 show_keep_vdec_mem, store_keep_vdec_mem),
5082 __ATTR_RO(core),
5083 __ATTR_RO(vdec_status),
5084 __ATTR_RO(dump_vdec_blocks),
5085 __ATTR_RO(dump_vdec_chunks),
5086 __ATTR_RO(dump_decoder_state),
5087#ifdef VDEC_DEBUG_SUPPORT
5088 __ATTR(debug, S_IRUGO | S_IWUSR | S_IWGRP,
5089 show_debug, store_debug),
5090#endif
5091#ifdef FRAME_CHECK
5092 __ATTR(dump_yuv, S_IRUGO | S_IWUSR | S_IWGRP,
5093 dump_yuv_show, dump_yuv_store),
5094 __ATTR(frame_check, S_IRUGO | S_IWUSR | S_IWGRP,
5095 frame_check_show, frame_check_store),
5096#endif
5097 __ATTR_RO(dump_fps),
5098 __ATTR(vfm_path, S_IRUGO | S_IWUSR | S_IWGRP,
5099 show_vdec_vfm_path, store_vdec_vfm_path),
5100 __ATTR_NULL
5101};
5102
5103static struct class vdec_class = {
5104 .name = "vdec",
5105 .class_attrs = vdec_class_attrs,
5106 };
5107
5108struct device *get_vdec_device(void)
5109{
5110 return &vdec_core->vdec_core_platform_device->dev;
5111}
5112EXPORT_SYMBOL(get_vdec_device);
5113
5114static int vdec_probe(struct platform_device *pdev)
5115{
5116 s32 i, r;
5117
5118 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
5119 sizeof(struct vdec_core_s), GFP_KERNEL);
5120 if (vdec_core == NULL) {
5121 pr_err("vdec core allocation failed.\n");
5122 return -ENOMEM;
5123 }
5124
5125 atomic_set(&vdec_core->vdec_nr, 0);
5126 sema_init(&vdec_core->sem, 1);
5127
5128 r = class_register(&vdec_class);
5129 if (r) {
5130 pr_info("vdec class create fail.\n");
5131 return r;
5132 }
5133
5134 vdec_core->vdec_core_platform_device = pdev;
5135
5136 platform_set_drvdata(pdev, vdec_core);
5137
5138 for (i = 0; i < VDEC_IRQ_MAX; i++) {
5139 vdec_core->isr_context[i].index = i;
5140 vdec_core->isr_context[i].irq = -1;
5141 }
5142
5143 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
5144 IRQF_ONESHOT, "vdec-0", NULL);
5145 if (r < 0) {
5146 pr_err("vdec interrupt request failed\n");
5147 return r;
5148 }
5149
5150 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
5151 IRQF_ONESHOT, "vdec-1", NULL);
5152 if (r < 0) {
5153 pr_err("vdec interrupt request failed\n");
5154 return r;
5155 }
5156#if 0
5157 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
5158 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
5159 IRQF_ONESHOT, "vdec-hevc_back", NULL);
5160 if (r < 0) {
5161 pr_err("vdec interrupt request failed\n");
5162 return r;
5163 }
5164 }
5165#endif
5166 r = of_reserved_mem_device_init(&pdev->dev);
5167 if (r == 0)
5168 pr_info("vdec_probe done\n");
5169
5170 vdec_core->cma_dev = &pdev->dev;
5171
5172 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
5173 /* default to 250MHz */
5174 vdec_clock_hi_enable();
5175 }
5176
5177 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
5178 /* set vdec dmc request to urgent */
5179 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
5180 }
5181 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
5182 spin_lock_init(&vdec_core->lock);
5183 spin_lock_init(&vdec_core->canvas_lock);
5184 spin_lock_init(&vdec_core->fps_lock);
5185 spin_lock_init(&vdec_core->input_lock);
5186 ida_init(&vdec_core->ida);
5187 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
5188 "vdec-core");
5189
5190 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
5191 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
5192 /*work queue priority lower than vdec-core.*/
5193 return 0;
5194}
5195
5196static int vdec_remove(struct platform_device *pdev)
5197{
5198 int i;
5199
5200 for (i = 0; i < VDEC_IRQ_MAX; i++) {
5201 if (vdec_core->isr_context[i].irq >= 0) {
5202 free_irq(vdec_core->isr_context[i].irq,
5203 &vdec_core->isr_context[i]);
5204 vdec_core->isr_context[i].irq = -1;
5205 vdec_core->isr_context[i].dev_isr = NULL;
5206 vdec_core->isr_context[i].dev_threaded_isr = NULL;
5207 vdec_core->isr_context[i].dev_id = NULL;
5208 }
5209 }
5210
5211 kthread_stop(vdec_core->thread);
5212
5213 destroy_workqueue(vdec_core->vdec_core_wq);
5214 class_unregister(&vdec_class);
5215
5216 return 0;
5217}
5218
5219static const struct of_device_id amlogic_vdec_dt_match[] = {
5220 {
5221 .compatible = "amlogic, vdec",
5222 },
5223 {},
5224};
5225
5226static struct mconfig vdec_configs[] = {
5227 MC_PU32("debug_trace_num", &debug_trace_num),
5228 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
5229 MC_PU32("clk_config", &clk_config),
5230 MC_PI32("step_mode", &step_mode),
5231 MC_PI32("poweron_clock_level", &poweron_clock_level),
5232};
5233static struct mconfig_node vdec_node;
5234
5235static struct platform_driver vdec_driver = {
5236 .probe = vdec_probe,
5237 .remove = vdec_remove,
5238 .driver = {
5239 .name = "vdec",
5240 .of_match_table = amlogic_vdec_dt_match,
5241 }
5242};
5243
5244static struct codec_profile_t amvdec_input_profile = {
5245 .name = "vdec_input",
5246 .profile = "drm_framemode"
5247};
5248
5249int vdec_module_init(void)
5250{
5251 if (platform_driver_register(&vdec_driver)) {
5252 pr_info("failed to register vdec module\n");
5253 return -ENODEV;
5254 }
5255 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5256 "vdec", vdec_configs, CONFIG_FOR_RW);
5257 vcodec_profile_register(&amvdec_input_profile);
5258 return 0;
5259}
5260EXPORT_SYMBOL(vdec_module_init);
5261
5262void vdec_module_exit(void)
5263{
5264 platform_driver_unregister(&vdec_driver);
5265}
5266EXPORT_SYMBOL(vdec_module_exit);
5267
5268#if 0
5269static int __init vdec_module_init(void)
5270{
5271 if (platform_driver_register(&vdec_driver)) {
5272 pr_info("failed to register vdec module\n");
5273 return -ENODEV;
5274 }
5275 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5276 "vdec", vdec_configs, CONFIG_FOR_RW);
5277 return 0;
5278}
5279
5280static void __exit vdec_module_exit(void)
5281{
5282 platform_driver_unregister(&vdec_driver);
5283}
5284#endif
5285
5286static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
5287{
5288 vdec_core->cma_dev = dev;
5289
5290 return 0;
5291}
5292
5293static const struct reserved_mem_ops rmem_vdec_ops = {
5294 .device_init = vdec_mem_device_init,
5295};
5296
5297static int __init vdec_mem_setup(struct reserved_mem *rmem)
5298{
5299 rmem->ops = &rmem_vdec_ops;
5300 pr_info("vdec: reserved mem setup\n");
5301
5302 return 0;
5303}
5304
5305
5306void vdec_set_vframe_comm(struct vdec_s *vdec, char *n)
5307{
5308 struct vdec_frames_s *mvfrm = vdec->mvfrm;
5309
5310 if (!mvfrm)
5311 return;
5312
5313 mvfrm->comm.vdec_id = vdec->id;
5314
5315 snprintf(mvfrm->comm.vdec_name, sizeof(mvfrm->comm.vdec_name)-1,
5316 "%s", n);
5317 mvfrm->comm.vdec_type = vdec->type;
5318}
5319EXPORT_SYMBOL(vdec_set_vframe_comm);
5320
5321void vdec_fill_vdec_frame(struct vdec_s *vdec, struct vframe_qos_s *vframe_qos,
5322 struct vdec_info *vinfo,struct vframe_s *vf,
5323 u32 hw_dec_time)
5324{
5325 u32 i;
5326 struct vframe_counter_s *fifo_buf;
5327 struct vdec_frames_s *mvfrm = vdec->mvfrm;
5328
5329 if (!mvfrm)
5330 return;
5331 fifo_buf = mvfrm->fifo_buf;
5332
5333 /* assume fps==60,mv->wr max value can support system running 828 days,
5334 this is enough for us */
5335 i = mvfrm->wr & (NUM_FRAME_VDEC-1); //find the slot num in fifo_buf
5336 mvfrm->fifo_buf[i].decode_time_cost = hw_dec_time;
5337 if (vframe_qos)
5338 memcpy(&fifo_buf[i].qos, vframe_qos, sizeof(struct vframe_qos_s));
5339 if (vinfo) {
5340 memcpy(&fifo_buf[i].frame_width, &vinfo->frame_width,
5341 ((char*)&vinfo->reserved[0] - (char*)&vinfo->frame_width));
5342 }
5343 if (vf) {
5344 fifo_buf[i].vf_type = vf->type;
5345 fifo_buf[i].signal_type = vf->signal_type;
5346 fifo_buf[i].pts = vf->pts;
5347 fifo_buf[i].pts_us64 = vf->pts_us64;
5348 }
5349 mvfrm->wr++;
5350}
5351EXPORT_SYMBOL(vdec_fill_vdec_frame);
5352
5353/* In this function,if we use copy_to_user, we may encounter sleep,
5354which may block the vdec_fill_vdec_frame,this is not acceptable.
5355So, we should use a tmp buffer(passed by caller) to get the content */
5356u32 vdec_get_frame_vdec(struct vdec_s *vdec, struct vframe_counter_s *tmpbuf)
5357{
5358 u32 toread = 0;
5359 u32 slot_rd;
5360 struct vframe_counter_s *fifo_buf = NULL;
5361 struct vdec_frames_s *mvfrm = NULL;
5362
5363 /*
5364 switch (version) {
5365 case version_1:
5366 f1();
5367 case version_2:
5368 f2();
5369 default:
5370 break;
5371 }
5372 */
5373
5374 if (!vdec)
5375 return 0;
5376 mvfrm = vdec->mvfrm;
5377 if (!mvfrm)
5378 return 0;
5379
5380 fifo_buf = &mvfrm->fifo_buf[0];
5381
5382 toread = mvfrm->wr - mvfrm->rd;
5383 if (toread) {
5384 if (toread >= NUM_FRAME_VDEC - QOS_FRAME_NUM) {
5385 /* round the fifo_buf length happens, give QOS_FRAME_NUM for buffer */
5386 mvfrm->rd = mvfrm->wr - (NUM_FRAME_VDEC - QOS_FRAME_NUM);
5387 }
5388
5389 if (toread >= QOS_FRAME_NUM) {
5390 toread = QOS_FRAME_NUM; //by default, we use this num
5391 }
5392
5393 slot_rd = mvfrm->rd &( NUM_FRAME_VDEC-1); //In this case it equals to x%y
5394 if (slot_rd + toread <= NUM_FRAME_VDEC) {
5395 memcpy(tmpbuf, &fifo_buf[slot_rd], toread*sizeof(struct vframe_counter_s));
5396 } else {
5397 u32 exeed;
5398 exeed = slot_rd + toread - NUM_FRAME_VDEC;
5399 memcpy(tmpbuf, &fifo_buf[slot_rd], (NUM_FRAME_VDEC - slot_rd)*sizeof(struct vframe_counter_s));
5400 memcpy(&tmpbuf[NUM_FRAME_VDEC-slot_rd], &fifo_buf[0], exeed*sizeof(struct vframe_counter_s));
5401 }
5402
5403 mvfrm->rd += toread;
5404 }
5405 return toread;
5406}
5407EXPORT_SYMBOL(vdec_get_frame_vdec);
5408
5409RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5410/*
5411uint force_hevc_clock_cntl;
5412EXPORT_SYMBOL(force_hevc_clock_cntl);
5413
5414module_param(force_hevc_clock_cntl, uint, 0664);
5415*/
5416module_param(debug, uint, 0664);
5417module_param(debug_trace_num, uint, 0664);
5418module_param(hevc_max_reset_count, int, 0664);
5419module_param(clk_config, uint, 0664);
5420module_param(step_mode, int, 0664);
5421module_param(debugflags, int, 0664);
5422module_param(parallel_decode, int, 0664);
5423module_param(fps_detection, int, 0664);
5424module_param(fps_clear, int, 0664);
5425module_param(force_nosecure_even_drm, int, 0664);
5426module_param(disable_switch_single_to_mult, int, 0664);
5427
5428module_param(frameinfo_flag, int, 0664);
5429MODULE_PARM_DESC(frameinfo_flag,
5430 "\n frameinfo_flag\n");
5431module_param(v4lvideo_add_di, int, 0664);
5432MODULE_PARM_DESC(v4lvideo_add_di,
5433 "\n v4lvideo_add_di\n");
5434
5435module_param(max_di_instance, int, 0664);
5436MODULE_PARM_DESC(max_di_instance,
5437 "\n max_di_instance\n");
5438
5439/*
5440*module_init(vdec_module_init);
5441*module_exit(vdec_module_exit);
5442*/
5443#define CREATE_TRACE_POINTS
5444#include "vdec_trace.h"
5445MODULE_DESCRIPTION("AMLOGIC vdec driver");
5446MODULE_LICENSE("GPL");
5447MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5448