summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: 41e84ba04b26ea12114f9ea501dddaf7b69710c8
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40/*for VDEC_DEBUG_SUPPORT*/
41#include <linux/time.h>
42
43#include <linux/amlogic/media/utils/vdec_reg.h>
44#include "vdec.h"
45#include "vdec_trace.h"
46#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
47#include "vdec_profile.h"
48#endif
49#include <linux/of.h>
50#include <linux/of_fdt.h>
51#include <linux/libfdt_env.h>
52#include <linux/of_reserved_mem.h>
53#include <linux/dma-contiguous.h>
54#include <linux/cma.h>
55#include <linux/module.h>
56#include <linux/slab.h>
57#include <linux/dma-mapping.h>
58#include <linux/dma-contiguous.h>
59#include "../../../stream_input/amports/amports_priv.h"
60
61#include <linux/amlogic/media/utils/amports_config.h>
62#include "../utils/amvdec.h"
63#include "vdec_input.h"
64
65#include "../../../common/media_clock/clk/clk.h"
66#include <linux/reset.h>
67#include <linux/amlogic/cpu_version.h>
68#include <linux/amlogic/media/codec_mm/codec_mm.h>
69#include <linux/amlogic/media/video_sink/video_keeper.h>
70#include <linux/amlogic/media/codec_mm/configs.h>
71#include <linux/amlogic/media/frame_sync/ptsserv.h>
72#include "secprot.h"
73#include "../../../common/chips/decoder_cpu_ver_info.h"
74#include "frame_check.h"
75
76#ifdef CONFIG_AMLOGIC_POWER
77#include <linux/amlogic/power_ctrl.h>
78#endif
79
80static DEFINE_MUTEX(vdec_mutex);
81
82#define MC_SIZE (4096 * 4)
83#define CMA_ALLOC_SIZE SZ_64M
84#define MEM_NAME "vdec_prealloc"
85static int inited_vcodec_num;
86#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
87static int poweron_clock_level;
88static int keep_vdec_mem;
89static unsigned int debug_trace_num = 16 * 20;
90static int step_mode;
91static unsigned int clk_config;
92/*
93 &1: sched_priority to MAX_RT_PRIO -1.
94 &2: always reload firmware.
95 &4: vdec canvas debug enable
96 */
97static unsigned int debug;
98
99static int hevc_max_reset_count;
100
101static int no_powerdown;
102static int parallel_decode = 1;
103static int fps_detection;
104static int fps_clear;
105
106
107static int force_nosecure_even_drm;
108static int disable_switch_single_to_mult;
109
110static DEFINE_SPINLOCK(vdec_spin_lock);
111
112#define HEVC_TEST_LIMIT 100
113#define GXBB_REV_A_MINOR 0xA
114
115#define PRINT_FRAME_INFO 1
116#define DISABLE_FRAME_INFO 2
117
118static int frameinfo_flag = 0;
119static int v4lvideo_add_di = 1;
120static int max_di_instance = 2;
121
122//static int path_debug = 0;
123
124static struct vframe_qos_s *frame_info_buf_in = NULL;
125static struct vframe_qos_s *frame_info_buf_out = NULL;
126static int frame_qos_wr = 0;
127static int frame_qos_rd = 0;
128int decode_underflow = 0;
129
130#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
131
132struct am_reg {
133 char *name;
134 int offset;
135};
136
137struct vdec_isr_context_s {
138 int index;
139 int irq;
140 irq_handler_t dev_isr;
141 irq_handler_t dev_threaded_isr;
142 void *dev_id;
143 struct vdec_s *vdec;
144};
145
146struct decode_fps_s {
147 u32 frame_count;
148 u64 start_timestamp;
149 u64 last_timestamp;
150 u32 fps;
151};
152
153struct vdec_core_s {
154 struct list_head connected_vdec_list;
155 spinlock_t lock;
156 spinlock_t canvas_lock;
157 spinlock_t fps_lock;
158 spinlock_t input_lock;
159 struct ida ida;
160 atomic_t vdec_nr;
161 struct vdec_s *vfm_vdec;
162 struct vdec_s *active_vdec;
163 struct vdec_s *active_hevc;
164 struct vdec_s *hint_fr_vdec;
165 struct platform_device *vdec_core_platform_device;
166 struct device *cma_dev;
167 struct semaphore sem;
168 struct task_struct *thread;
169 struct workqueue_struct *vdec_core_wq;
170
171 unsigned long sched_mask;
172 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
173 int power_ref_count[VDEC_MAX];
174 struct vdec_s *last_vdec;
175 int parallel_dec;
176 unsigned long power_ref_mask;
177 int vdec_combine_flag;
178 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
179 unsigned long buff_flag;
180 unsigned long stream_buff_flag;
181};
182
183struct canvas_status_s {
184 int type;
185 int canvas_used_flag;
186 int id;
187};
188
189
190static struct vdec_core_s *vdec_core;
191
192static const char * const vdec_status_string[] = {
193 "VDEC_STATUS_UNINITIALIZED",
194 "VDEC_STATUS_DISCONNECTED",
195 "VDEC_STATUS_CONNECTED",
196 "VDEC_STATUS_ACTIVE"
197};
198
199static int debugflags;
200
201static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
202
203
204int vdec_get_debug_flags(void)
205{
206 return debugflags;
207}
208EXPORT_SYMBOL(vdec_get_debug_flags);
209
210unsigned char is_mult_inc(unsigned int type)
211{
212 unsigned char ret = 0;
213 if (vdec_get_debug_flags() & 0xf000)
214 ret = (vdec_get_debug_flags() & 0x1000)
215 ? 1 : 0;
216 else if (type & PORT_TYPE_DECODER_SCHED)
217 ret = 1;
218 return ret;
219}
220EXPORT_SYMBOL(is_mult_inc);
221
222static const bool cores_with_input[VDEC_MAX] = {
223 true, /* VDEC_1 */
224 false, /* VDEC_HCODEC */
225 false, /* VDEC_2 */
226 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
227 false, /* VDEC_HEVC_BACK */
228};
229
230static const int cores_int[VDEC_MAX] = {
231 VDEC_IRQ_1,
232 VDEC_IRQ_2,
233 VDEC_IRQ_0,
234 VDEC_IRQ_0,
235 VDEC_IRQ_HEVC_BACK
236};
237
238unsigned long vdec_canvas_lock(struct vdec_core_s *core)
239{
240 unsigned long flags;
241 spin_lock_irqsave(&core->canvas_lock, flags);
242
243 return flags;
244}
245
246void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
247{
248 spin_unlock_irqrestore(&core->canvas_lock, flags);
249}
250
251unsigned long vdec_fps_lock(struct vdec_core_s *core)
252{
253 unsigned long flags;
254 spin_lock_irqsave(&core->fps_lock, flags);
255
256 return flags;
257}
258
259void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
260{
261 spin_unlock_irqrestore(&core->fps_lock, flags);
262}
263
264unsigned long vdec_core_lock(struct vdec_core_s *core)
265{
266 unsigned long flags;
267
268 spin_lock_irqsave(&core->lock, flags);
269
270 return flags;
271}
272
273void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
274{
275 spin_unlock_irqrestore(&core->lock, flags);
276}
277
278unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
279{
280 unsigned long flags;
281
282 spin_lock_irqsave(&core->input_lock, flags);
283
284 return flags;
285}
286
287void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
288{
289 spin_unlock_irqrestore(&core->input_lock, flags);
290}
291
292
293static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
294 struct vdec_core_s *core = vdec_core;
295 bool ret;
296 unsigned long flags;
297
298 flags = vdec_inputbuff_lock(core);
299 ret = !(vdec->core_mask & core->buff_flag);
300 vdec_inputbuff_unlock(core, flags);
301
302 return ret;
303}
304
305static void vdec_up(struct vdec_s *vdec)
306{
307 struct vdec_core_s *core = vdec_core;
308
309 if (debug & 8)
310 pr_info("vdec_up, id:%d\n", vdec->id);
311 up(&core->sem);
312}
313
314
315static u64 vdec_get_us_time_system(void)
316{
317 struct timeval tv;
318
319 do_gettimeofday(&tv);
320
321 return div64_u64(timeval_to_ns(&tv), 1000);
322}
323
324static void vdec_fps_clear(int id)
325{
326 if (id >= MAX_INSTANCE_MUN)
327 return;
328
329 vdec_core->decode_fps[id].frame_count = 0;
330 vdec_core->decode_fps[id].start_timestamp = 0;
331 vdec_core->decode_fps[id].last_timestamp = 0;
332 vdec_core->decode_fps[id].fps = 0;
333}
334
335static void vdec_fps_clearall(void)
336{
337 int i;
338
339 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
340 vdec_core->decode_fps[i].frame_count = 0;
341 vdec_core->decode_fps[i].start_timestamp = 0;
342 vdec_core->decode_fps[i].last_timestamp = 0;
343 vdec_core->decode_fps[i].fps = 0;
344 }
345}
346
347static void vdec_fps_detec(int id)
348{
349 unsigned long flags;
350
351 if (fps_detection == 0)
352 return;
353
354 if (id >= MAX_INSTANCE_MUN)
355 return;
356
357 flags = vdec_fps_lock(vdec_core);
358
359 if (fps_clear == 1) {
360 vdec_fps_clearall();
361 fps_clear = 0;
362 }
363
364 vdec_core->decode_fps[id].frame_count++;
365 if (vdec_core->decode_fps[id].frame_count == 1) {
366 vdec_core->decode_fps[id].start_timestamp =
367 vdec_get_us_time_system();
368 vdec_core->decode_fps[id].last_timestamp =
369 vdec_core->decode_fps[id].start_timestamp;
370 } else {
371 vdec_core->decode_fps[id].last_timestamp =
372 vdec_get_us_time_system();
373 vdec_core->decode_fps[id].fps =
374 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
375 10000000000),
376 (vdec_core->decode_fps[id].last_timestamp -
377 vdec_core->decode_fps[id].start_timestamp));
378 }
379 vdec_fps_unlock(vdec_core, flags);
380}
381
382
383
384static int get_canvas(unsigned int index, unsigned int base)
385{
386 int start;
387 int canvas_index = index * base;
388 int ret;
389
390 if ((base > 4) || (base == 0))
391 return -1;
392
393 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
394 <= AMVDEC_CANVAS_MAX1) {
395 start = AMVDEC_CANVAS_START_INDEX + base * index;
396 } else {
397 canvas_index -= (AMVDEC_CANVAS_MAX1 -
398 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
399 if (canvas_index <= AMVDEC_CANVAS_MAX2)
400 start = canvas_index / base;
401 else
402 return -1;
403 }
404
405 if (base == 1) {
406 ret = start;
407 } else if (base == 2) {
408 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
409 } else if (base == 3) {
410 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
411 } else if (base == 4) {
412 ret = (((start + 3) << 24) | (start + 2) << 16) |
413 ((start + 1) << 8) | start;
414 }
415
416 return ret;
417}
418
419static int get_canvas_ex(int type, int id)
420{
421 int i;
422 unsigned long flags;
423
424 flags = vdec_canvas_lock(vdec_core);
425
426 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
427 /*0x10-0x15 has been used by rdma*/
428 if ((i >= 0x10) && (i <= 0x15))
429 continue;
430 if ((canvas_stat[i].type == type) &&
431 (canvas_stat[i].id & (1 << id)) == 0) {
432 canvas_stat[i].canvas_used_flag++;
433 canvas_stat[i].id |= (1 << id);
434 if (debug & 4)
435 pr_debug("get used canvas %d\n", i);
436 vdec_canvas_unlock(vdec_core, flags);
437 if (i < AMVDEC_CANVAS_MAX2 + 1)
438 return i;
439 else
440 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
441 }
442 }
443
444 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
445 /*0x10-0x15 has been used by rdma*/
446 if ((i >= 0x10) && (i <= 0x15))
447 continue;
448 if (canvas_stat[i].type == 0) {
449 canvas_stat[i].type = type;
450 canvas_stat[i].canvas_used_flag = 1;
451 canvas_stat[i].id = (1 << id);
452 if (debug & 4) {
453 pr_debug("get canvas %d\n", i);
454 pr_debug("canvas_used_flag %d\n",
455 canvas_stat[i].canvas_used_flag);
456 pr_debug("canvas_stat[i].id %d\n",
457 canvas_stat[i].id);
458 }
459 vdec_canvas_unlock(vdec_core, flags);
460 if (i < AMVDEC_CANVAS_MAX2 + 1)
461 return i;
462 else
463 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
464 }
465 }
466 vdec_canvas_unlock(vdec_core, flags);
467
468 pr_info("cannot get canvas\n");
469
470 return -1;
471}
472
473static void free_canvas_ex(int index, int id)
474{
475 unsigned long flags;
476 int offset;
477
478 flags = vdec_canvas_lock(vdec_core);
479 if (index >= 0 &&
480 index < AMVDEC_CANVAS_MAX2 + 1)
481 offset = index;
482 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
483 (index <= AMVDEC_CANVAS_MAX1))
484 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
485 else {
486 vdec_canvas_unlock(vdec_core, flags);
487 return;
488 }
489
490 if ((canvas_stat[offset].canvas_used_flag > 0) &&
491 (canvas_stat[offset].id & (1 << id))) {
492 canvas_stat[offset].canvas_used_flag--;
493 canvas_stat[offset].id &= ~(1 << id);
494 if (canvas_stat[offset].canvas_used_flag == 0) {
495 canvas_stat[offset].type = 0;
496 canvas_stat[offset].id = 0;
497 }
498 if (debug & 4) {
499 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
500 offset,
501 canvas_stat[offset].canvas_used_flag,
502 canvas_stat[offset].type,
503 canvas_stat[offset].id);
504 }
505 }
506 vdec_canvas_unlock(vdec_core, flags);
507
508 return;
509
510}
511
512static void vdec_dmc_pipeline_reset(void)
513{
514 /*
515 * bit15: vdec_piple
516 * bit14: hevc_dmc_piple
517 * bit13: hevcf_dmc_pipl
518 * bit12: wave420_dmc_pipl
519 * bit11: hcodec_dmc_pipl
520 */
521
522 WRITE_RESET_REG(RESET7_REGISTER,
523 (1 << 15) | (1 << 14) | (1 << 13) |
524 (1 << 12) | (1 << 11));
525}
526
527static void vdec_stop_armrisc(int hw)
528{
529 ulong timeout = jiffies + HZ;
530
531 if (hw == VDEC_INPUT_TARGET_VLD) {
532 WRITE_VREG(MPSR, 0);
533 WRITE_VREG(CPSR, 0);
534
535 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
536 if (time_after(jiffies, timeout))
537 break;
538 }
539
540 timeout = jiffies + HZ;
541 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
542 if (time_after(jiffies, timeout))
543 break;
544 }
545 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
546 WRITE_VREG(HEVC_MPSR, 0);
547 WRITE_VREG(HEVC_CPSR, 0);
548
549 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
550 if (time_after(jiffies, timeout))
551 break;
552 }
553
554 timeout = jiffies + HZ/10;
555 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
556 if (time_after(jiffies, timeout))
557 break;
558 }
559 }
560}
561
562static void vdec_disable_DMC(struct vdec_s *vdec)
563{
564 /*close first,then wait pedding end,timing suggestion from vlsi*/
565 struct vdec_input_s *input = &vdec->input;
566 unsigned long flags;
567 unsigned int mask = 0;
568
569 if (input->target == VDEC_INPUT_TARGET_VLD) {
570 mask = (1 << 13);
571 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
572 mask = (1 << 21);
573 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
574 mask = (1 << 4); /*hevc*/
575 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
576 mask |= (1 << 8); /*hevcb */
577 }
578
579 /* need to stop armrisc. */
580 if (!IS_ERR_OR_NULL(vdec->dev))
581 vdec_stop_armrisc(input->target);
582
583 spin_lock_irqsave(&vdec_spin_lock, flags);
584 codec_dmcbus_write(DMC_REQ_CTRL,
585 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
586 spin_unlock_irqrestore(&vdec_spin_lock, flags);
587
588 while (!(codec_dmcbus_read(DMC_CHAN_STS)
589 & mask))
590 ;
591
592 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
593}
594
595static void vdec_enable_DMC(struct vdec_s *vdec)
596{
597 struct vdec_input_s *input = &vdec->input;
598 unsigned long flags;
599 unsigned int mask = 0;
600
601 if (input->target == VDEC_INPUT_TARGET_VLD) {
602 mask = (1 << 13);
603 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
604 mask = (1 << 21);
605 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
606 mask = (1 << 4); /*hevc*/
607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
608 mask |= (1 << 8); /*hevcb */
609 }
610
611 /*must to be reset the dmc pipeline if it's g12b.*/
612 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
613 vdec_dmc_pipeline_reset();
614
615 spin_lock_irqsave(&vdec_spin_lock, flags);
616 codec_dmcbus_write(DMC_REQ_CTRL,
617 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
618 spin_unlock_irqrestore(&vdec_spin_lock, flags);
619 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
620}
621
622
623
624static int vdec_get_hw_type(int value)
625{
626 int type;
627 switch (value) {
628 case VFORMAT_HEVC:
629 case VFORMAT_VP9:
630 case VFORMAT_AVS2:
631 type = CORE_MASK_HEVC;
632 break;
633
634 case VFORMAT_MPEG12:
635 case VFORMAT_MPEG4:
636 case VFORMAT_H264:
637 case VFORMAT_MJPEG:
638 case VFORMAT_REAL:
639 case VFORMAT_JPEG:
640 case VFORMAT_VC1:
641 case VFORMAT_AVS:
642 case VFORMAT_YUV:
643 case VFORMAT_H264MVC:
644 case VFORMAT_H264_4K2K:
645 case VFORMAT_H264_ENC:
646 case VFORMAT_JPEG_ENC:
647 type = CORE_MASK_VDEC_1;
648 break;
649
650 default:
651 type = -1;
652 }
653
654 return type;
655}
656
657
658static void vdec_save_active_hw(struct vdec_s *vdec)
659{
660 int type;
661
662 type = vdec_get_hw_type(vdec->port->vformat);
663
664 if (type == CORE_MASK_HEVC) {
665 vdec_core->active_hevc = vdec;
666 } else if (type == CORE_MASK_VDEC_1) {
667 vdec_core->active_vdec = vdec;
668 } else {
669 pr_info("save_active_fw wrong\n");
670 }
671}
672
673static void vdec_update_buff_status(void)
674{
675 struct vdec_core_s *core = vdec_core;
676 unsigned long flags;
677 struct vdec_s *vdec;
678
679 flags = vdec_inputbuff_lock(core);
680 core->buff_flag = 0;
681 core->stream_buff_flag = 0;
682 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
683 struct vdec_input_s *input = &vdec->input;
684 if (input_frame_based(input)) {
685 if (input->have_frame_num || input->eos)
686 core->buff_flag |= vdec->core_mask;
687 } else if (input_stream_based(input)) {
688 core->stream_buff_flag |= vdec->core_mask;
689 }
690 }
691 vdec_inputbuff_unlock(core, flags);
692}
693
694#if 0
695void vdec_update_streambuff_status(void)
696{
697 struct vdec_core_s *core = vdec_core;
698 struct vdec_s *vdec;
699
700 /* check streaming prepare level threshold if not EOS */
701 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
702 struct vdec_input_s *input = &vdec->input;
703 if (input && input_stream_based(input) && !input->eos &&
704 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
705 u32 rp, wp, level;
706
707 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
708 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
709 if (wp < rp)
710 level = input->size + wp - rp;
711 else
712 level = wp - rp;
713 if ((level < input->prepare_level) &&
714 (pts_get_rec_num(PTS_TYPE_VIDEO,
715 vdec->input.total_rd_count) < 2)) {
716 break;
717 } else if (level > input->prepare_level) {
718 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
719 if (debug & 8)
720 pr_info("vdec_flush_streambuff_status up\n");
721 vdec_up(vdec);
722 }
723 break;
724 }
725 }
726}
727EXPORT_SYMBOL(vdec_update_streambuff_status);
728#endif
729
730int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
731{
732 if (vdec && vdec->dec_status &&
733 ((vdec->status == VDEC_STATUS_CONNECTED ||
734 vdec->status == VDEC_STATUS_ACTIVE)))
735 return vdec->dec_status(vdec, vstatus);
736
737 return 0;
738}
739EXPORT_SYMBOL(vdec_status);
740
741int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
742{
743 int r;
744
745 if (vdec->set_trickmode) {
746 r = vdec->set_trickmode(vdec, trickmode);
747
748 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
749 r = vdec->slave->set_trickmode(vdec->slave,
750 trickmode);
751 return r;
752 }
753
754 return -1;
755}
756EXPORT_SYMBOL(vdec_set_trickmode);
757
758int vdec_set_isreset(struct vdec_s *vdec, int isreset)
759{
760 vdec->is_reset = isreset;
761 pr_info("is_reset=%d\n", isreset);
762 if (vdec->set_isreset)
763 return vdec->set_isreset(vdec, isreset);
764 return 0;
765}
766EXPORT_SYMBOL(vdec_set_isreset);
767
768int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
769{
770 vdec->dolby_meta_with_el = isdvmetawithel;
771 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
772 return 0;
773}
774EXPORT_SYMBOL(vdec_set_dv_metawithel);
775
776void vdec_set_no_powerdown(int flag)
777{
778 no_powerdown = flag;
779 pr_info("no_powerdown=%d\n", no_powerdown);
780 return;
781}
782EXPORT_SYMBOL(vdec_set_no_powerdown);
783
784void vdec_count_info(struct vdec_info *vs, unsigned int err,
785 unsigned int offset)
786{
787 if (err)
788 vs->error_frame_count++;
789 if (offset) {
790 if (0 == vs->frame_count) {
791 vs->offset = 0;
792 vs->samp_cnt = 0;
793 }
794 vs->frame_data = offset > vs->total_data ?
795 offset - vs->total_data : vs->total_data - offset;
796 vs->total_data = offset;
797 if (vs->samp_cnt < 96000 * 2) { /* 2s */
798 if (0 == vs->samp_cnt)
799 vs->offset = offset;
800 vs->samp_cnt += vs->frame_dur;
801 } else {
802 vs->bit_rate = (offset - vs->offset) / 2;
803 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
804 vs->samp_cnt = 0;
805 }
806 vs->frame_count++;
807 }
808 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
809 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
810 return;
811}
812EXPORT_SYMBOL(vdec_count_info);
813int vdec_is_support_4k(void)
814{
815 return !is_meson_gxl_package_805X();
816}
817EXPORT_SYMBOL(vdec_is_support_4k);
818
819/*
820 * clk_config:
821 *0:default
822 *1:no gp0_pll;
823 *2:always used gp0_pll;
824 *>=10:fixed n M clk;
825 *== 100 , 100M clks;
826 */
827unsigned int get_vdec_clk_config_settings(void)
828{
829 return clk_config;
830}
831void update_vdec_clk_config_settings(unsigned int config)
832{
833 clk_config = config;
834}
835EXPORT_SYMBOL(update_vdec_clk_config_settings);
836
837static bool hevc_workaround_needed(void)
838{
839 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
840 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
841 == GXBB_REV_A_MINOR);
842}
843
844struct device *get_codec_cma_device(void)
845{
846 return vdec_core->cma_dev;
847}
848
849#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
850static const char * const vdec_device_name[] = {
851 "amvdec_mpeg12", "ammvdec_mpeg12",
852 "amvdec_mpeg4", "ammvdec_mpeg4",
853 "amvdec_h264", "ammvdec_h264",
854 "amvdec_mjpeg", "ammvdec_mjpeg",
855 "amvdec_real", "ammvdec_real",
856 "amjpegdec", "ammjpegdec",
857 "amvdec_vc1", "ammvdec_vc1",
858 "amvdec_avs", "ammvdec_avs",
859 "amvdec_yuv", "ammvdec_yuv",
860 "amvdec_h264mvc", "ammvdec_h264mvc",
861 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
862 "amvdec_h265", "ammvdec_h265",
863 "amvenc_avc", "amvenc_avc",
864 "jpegenc", "jpegenc",
865 "amvdec_vp9", "ammvdec_vp9",
866 "amvdec_avs2", "ammvdec_avs2"
867};
868
869
870#else
871
872static const char * const vdec_device_name[] = {
873 "amvdec_mpeg12",
874 "amvdec_mpeg4",
875 "amvdec_h264",
876 "amvdec_mjpeg",
877 "amvdec_real",
878 "amjpegdec",
879 "amvdec_vc1",
880 "amvdec_avs",
881 "amvdec_yuv",
882 "amvdec_h264mvc",
883 "amvdec_h264_4k2k",
884 "amvdec_h265",
885 "amvenc_avc",
886 "jpegenc",
887 "amvdec_vp9",
888 "amvdec_avs2"
889};
890
891#endif
892
893/*
894 * Only support time sliced decoding for frame based input,
895 * so legacy decoder can exist with time sliced decoder.
896 */
897static const char *get_dev_name(bool use_legacy_vdec, int format)
898{
899#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
900 if (use_legacy_vdec)
901 return vdec_device_name[format * 2];
902 else
903 return vdec_device_name[format * 2 + 1];
904#else
905 return vdec_device_name[format];
906#endif
907}
908
909#ifdef VDEC_DEBUG_SUPPORT
910static u64 get_current_clk(void)
911{
912 /*struct timespec xtime = current_kernel_time();
913 u64 usec = xtime.tv_sec * 1000000;
914 usec += xtime.tv_nsec / 1000;
915 */
916 u64 usec = sched_clock();
917 return usec;
918}
919
920static void inc_profi_count(unsigned long mask, u32 *count)
921{
922 enum vdec_type_e type;
923
924 for (type = VDEC_1; type < VDEC_MAX; type++) {
925 if (mask & (1 << type))
926 count[type]++;
927 }
928}
929
930static void update_profi_clk_run(struct vdec_s *vdec,
931 unsigned long mask, u64 clk)
932{
933 enum vdec_type_e type;
934
935 for (type = VDEC_1; type < VDEC_MAX; type++) {
936 if (mask & (1 << type)) {
937 vdec->start_run_clk[type] = clk;
938 if (vdec->profile_start_clk[type] == 0)
939 vdec->profile_start_clk[type] = clk;
940 vdec->total_clk[type] = clk
941 - vdec->profile_start_clk[type];
942 /*pr_info("set start_run_clk %ld\n",
943 vdec->start_run_clk);*/
944
945 }
946 }
947}
948
949static void update_profi_clk_stop(struct vdec_s *vdec,
950 unsigned long mask, u64 clk)
951{
952 enum vdec_type_e type;
953
954 for (type = VDEC_1; type < VDEC_MAX; type++) {
955 if (mask & (1 << type)) {
956 if (vdec->start_run_clk[type] == 0)
957 pr_info("error, start_run_clk[%d] not set\n", type);
958
959 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
960 type,
961 clk,
962 vdec->start_run_clk[type],
963 vdec->run_clk[type]);*/
964 vdec->run_clk[type] +=
965 (clk - vdec->start_run_clk[type]);
966 }
967 }
968}
969
970#endif
971
972int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
973{
974 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
975 sizeof(struct dec_sysinfo)))
976 return -EFAULT;
977
978 /* force switch to mult instance if supports this profile. */
979 if ((vdec->type == VDEC_TYPE_SINGLE) &&
980 !disable_switch_single_to_mult) {
981 const char *str = NULL;
982 char fmt[16] = {0};
983
984 str = strchr(get_dev_name(false, vdec->format), '_');
985 if (!str)
986 return -1;
987
988 sprintf(fmt, "m%s", ++str);
989 if (is_support_profile(fmt) &&
990 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263)
991 vdec->type = VDEC_TYPE_STREAM_PARSER;
992 }
993
994 return 0;
995}
996EXPORT_SYMBOL(vdec_set_decinfo);
997
998/* construct vdec strcture */
999struct vdec_s *vdec_create(struct stream_port_s *port,
1000 struct vdec_s *master)
1001{
1002 struct vdec_s *vdec;
1003 int type = VDEC_TYPE_SINGLE;
1004 int id;
1005
1006 if (is_mult_inc(port->type))
1007 type = (port->type & PORT_TYPE_FRAME) ?
1008 VDEC_TYPE_FRAME_BLOCK :
1009 VDEC_TYPE_STREAM_PARSER;
1010
1011 id = ida_simple_get(&vdec_core->ida,
1012 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1013 if (id < 0) {
1014 pr_info("vdec_create request id failed!ret =%d\n", id);
1015 return NULL;
1016 }
1017 vdec = vzalloc(sizeof(struct vdec_s));
1018
1019 /* TBD */
1020 if (vdec) {
1021 vdec->magic = 0x43454456;
1022 vdec->id = -1;
1023 vdec->type = type;
1024 vdec->port = port;
1025 vdec->sys_info = &vdec->sys_info_store;
1026
1027 INIT_LIST_HEAD(&vdec->list);
1028
1029 atomic_inc(&vdec_core->vdec_nr);
1030 vdec->id = id;
1031 vdec_input_init(&vdec->input, vdec);
1032 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1033 vdec->input.vdec_up = vdec_up;
1034 if (master) {
1035 vdec->master = master;
1036 master->slave = vdec;
1037 master->sched = 1;
1038 }
1039 }
1040
1041 pr_debug("vdec_create instance %p, total %d\n", vdec,
1042 atomic_read(&vdec_core->vdec_nr));
1043
1044 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1045
1046 return vdec;
1047}
1048EXPORT_SYMBOL(vdec_create);
1049
1050int vdec_set_format(struct vdec_s *vdec, int format)
1051{
1052 vdec->format = format;
1053 vdec->port_flag |= PORT_FLAG_VFORMAT;
1054
1055 if (vdec->slave) {
1056 vdec->slave->format = format;
1057 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1058 }
1059 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1060
1061 return 0;
1062}
1063EXPORT_SYMBOL(vdec_set_format);
1064
1065int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1066{
1067 vdec->pts = pts;
1068 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1069 vdec->pts_valid = true;
1070 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1071 return 0;
1072}
1073EXPORT_SYMBOL(vdec_set_pts);
1074
1075void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1076{
1077 vdec->timestamp = timestamp;
1078 vdec->timestamp_valid = true;
1079}
1080EXPORT_SYMBOL(vdec_set_timestamp);
1081
1082int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1083{
1084 vdec->pts64 = pts64;
1085 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1086 vdec->pts_valid = true;
1087
1088 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1089 return 0;
1090}
1091EXPORT_SYMBOL(vdec_set_pts64);
1092
1093int vdec_get_status(struct vdec_s *vdec)
1094{
1095 return vdec->status;
1096}
1097EXPORT_SYMBOL(vdec_get_status);
1098
1099int vdec_get_frame_num(struct vdec_s *vdec)
1100{
1101 return vdec->input.have_frame_num;
1102}
1103EXPORT_SYMBOL(vdec_get_frame_num);
1104
1105void vdec_set_status(struct vdec_s *vdec, int status)
1106{
1107 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1108 vdec->status = status;
1109}
1110EXPORT_SYMBOL(vdec_set_status);
1111
1112void vdec_set_next_status(struct vdec_s *vdec, int status)
1113{
1114 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1115 vdec->next_status = status;
1116}
1117EXPORT_SYMBOL(vdec_set_next_status);
1118
1119int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1120{
1121 vdec->frame_base_video_path = video_path;
1122 return 0;
1123}
1124EXPORT_SYMBOL(vdec_set_video_path);
1125
1126int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1127{
1128 vdec->vf_receiver_inst = receive_id;
1129 return 0;
1130}
1131EXPORT_SYMBOL(vdec_set_receive_id);
1132
1133/* add frame data to input chain */
1134int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1135{
1136 return vdec_input_add_frame(&vdec->input, buf, count);
1137}
1138EXPORT_SYMBOL(vdec_write_vframe);
1139
1140/* add a work queue thread for vdec*/
1141void vdec_schedule_work(struct work_struct *work)
1142{
1143 if (vdec_core->vdec_core_wq)
1144 queue_work(vdec_core->vdec_core_wq, work);
1145 else
1146 schedule_work(work);
1147}
1148EXPORT_SYMBOL(vdec_schedule_work);
1149
1150static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1151{
1152 if (vdec->master)
1153 return vdec->master;
1154 else if (vdec->slave)
1155 return vdec->slave;
1156 return NULL;
1157}
1158
1159static void vdec_sync_input_read(struct vdec_s *vdec)
1160{
1161 if (!vdec_stream_based(vdec))
1162 return;
1163
1164 if (vdec_dual(vdec)) {
1165 u32 me, other;
1166 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1167 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1168 other =
1169 vdec_get_associate(vdec)->input.stream_cookie;
1170 if (me > other)
1171 return;
1172 else if (me == other) {
1173 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1174 other =
1175 vdec_get_associate(vdec)->input.swap_rp;
1176 if (me > other) {
1177 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1178 vdec_get_associate(vdec)->
1179 input.swap_rp);
1180 return;
1181 }
1182 }
1183 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1184 READ_VREG(VLD_MEM_VIFIFO_RP));
1185 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1186 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1187 if (((me & 0x80000000) == 0) &&
1188 (vdec->input.streaming_rp & 0x80000000))
1189 me += 1ULL << 32;
1190 other = vdec_get_associate(vdec)->input.streaming_rp;
1191 if (me > other) {
1192 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1193 vdec_get_associate(vdec)->
1194 input.swap_rp);
1195 return;
1196 }
1197
1198 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1199 READ_VREG(HEVC_STREAM_RD_PTR));
1200 }
1201 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1202 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1203 READ_VREG(VLD_MEM_VIFIFO_RP));
1204 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1205 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1206 READ_VREG(HEVC_STREAM_RD_PTR));
1207 }
1208}
1209
1210static void vdec_sync_input_write(struct vdec_s *vdec)
1211{
1212 if (!vdec_stream_based(vdec))
1213 return;
1214
1215 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1216 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1217 READ_PARSER_REG(PARSER_VIDEO_WP));
1218 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1219 WRITE_VREG(HEVC_STREAM_WR_PTR,
1220 READ_PARSER_REG(PARSER_VIDEO_WP));
1221 }
1222}
1223
1224/*
1225 *get next frame from input chain
1226 */
1227/*
1228 *THE VLD_FIFO is 512 bytes and Video buffer level
1229 * empty interrupt is set to 0x80 bytes threshold
1230 */
1231#define VLD_PADDING_SIZE 1024
1232#define HEVC_PADDING_SIZE (1024*16)
1233int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1234{
1235 struct vdec_input_s *input = &vdec->input;
1236 struct vframe_chunk_s *chunk = NULL;
1237 struct vframe_block_list_s *block = NULL;
1238 int dummy;
1239
1240 /* full reset to HW input */
1241 if (input->target == VDEC_INPUT_TARGET_VLD) {
1242 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1243
1244 /* reset VLD fifo for all vdec */
1245 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1246 WRITE_VREG(DOS_SW_RESET0, 0);
1247
1248 dummy = READ_RESET_REG(RESET0_REGISTER);
1249 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1250 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1251#if 0
1252 /*move to driver*/
1253 if (input_frame_based(input))
1254 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1255
1256 /*
1257 * 2: assist
1258 * 3: parser
1259 * 4: parser_state
1260 * 8: dblk
1261 * 11:mcpu
1262 * 12:ccpu
1263 * 13:ddr
1264 * 14:iqit
1265 * 15:ipp
1266 * 17:qdct
1267 * 18:mpred
1268 * 19:sao
1269 * 24:hevc_afifo
1270 */
1271 WRITE_VREG(DOS_SW_RESET3,
1272 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1273 (1<<17)|(1<<18)|(1<<19));
1274 WRITE_VREG(DOS_SW_RESET3, 0);
1275#endif
1276 }
1277
1278 /*
1279 *setup HW decoder input buffer (VLD context)
1280 * based on input->type and input->target
1281 */
1282 if (input_frame_based(input)) {
1283 chunk = vdec_input_next_chunk(&vdec->input);
1284
1285 if (chunk == NULL) {
1286 *p = NULL;
1287 return -1;
1288 }
1289
1290 block = chunk->block;
1291
1292 if (input->target == VDEC_INPUT_TARGET_VLD) {
1293 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1294 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1295 block->size - 8);
1296 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1297 round_down(block->start + chunk->offset,
1298 VDEC_FIFO_ALIGN));
1299
1300 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1301 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1302
1303 /* set to manual mode */
1304 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1305 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1306 round_down(block->start + chunk->offset,
1307 VDEC_FIFO_ALIGN));
1308 dummy = chunk->offset + chunk->size +
1309 VLD_PADDING_SIZE;
1310 if (dummy >= block->size)
1311 dummy -= block->size;
1312 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1313 round_down(block->start + dummy,
1314 VDEC_FIFO_ALIGN));
1315
1316 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1317 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1318
1319 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1320 (0x11 << 16) | (1<<10) | (7<<3));
1321
1322 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1323 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1324 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1325 block->size);
1326 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1327 chunk->offset);
1328 dummy = chunk->offset + chunk->size +
1329 HEVC_PADDING_SIZE;
1330 if (dummy >= block->size)
1331 dummy -= block->size;
1332 WRITE_VREG(HEVC_STREAM_WR_PTR,
1333 round_down(block->start + dummy,
1334 VDEC_FIFO_ALIGN));
1335
1336 /* set endian */
1337 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1338 }
1339
1340 *p = chunk;
1341 return chunk->size;
1342
1343 } else {
1344 /* stream based */
1345 u32 rp = 0, wp = 0, fifo_len = 0;
1346 int size;
1347 bool swap_valid = input->swap_valid;
1348 unsigned long swap_page_phys = input->swap_page_phys;
1349
1350 if (vdec_dual(vdec) &&
1351 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1352 /* keep using previous input context */
1353 struct vdec_s *master = (vdec->slave) ?
1354 vdec : vdec->master;
1355 if (master->input.last_swap_slave) {
1356 swap_valid = master->slave->input.swap_valid;
1357 swap_page_phys =
1358 master->slave->input.swap_page_phys;
1359 } else {
1360 swap_valid = master->input.swap_valid;
1361 swap_page_phys = master->input.swap_page_phys;
1362 }
1363 }
1364
1365 if (swap_valid) {
1366 if (input->target == VDEC_INPUT_TARGET_VLD) {
1367 if (vdec->format == VFORMAT_H264)
1368 SET_VREG_MASK(POWER_CTL_VLD,
1369 (1 << 9));
1370
1371 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1372
1373 /* restore read side */
1374 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1375 swap_page_phys);
1376 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1377
1378 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1379 ;
1380 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1381
1382 /* restore wrap count */
1383 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1384 input->stream_cookie);
1385
1386 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1387 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1388
1389 /* enable */
1390 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1391 (0x11 << 16) | (1<<10));
1392
1393 /* sync with front end */
1394 vdec_sync_input_read(vdec);
1395 vdec_sync_input_write(vdec);
1396
1397 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1398 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1399 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1400
1401 /* restore read side */
1402 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1403 swap_page_phys);
1404 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1405
1406 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1407 & (1<<7))
1408 ;
1409 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1410
1411 /* restore stream offset */
1412 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1413 input->stream_cookie);
1414
1415 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1416 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1417 >> 16) & 0x7f;
1418
1419
1420 /* enable */
1421
1422 /* sync with front end */
1423 vdec_sync_input_read(vdec);
1424 vdec_sync_input_write(vdec);
1425
1426 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1427
1428 /*pr_info("vdec: restore context\r\n");*/
1429 }
1430
1431 } else {
1432 if (input->target == VDEC_INPUT_TARGET_VLD) {
1433 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1434 input->start);
1435 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1436 input->start + input->size - 8);
1437 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1438 input->start);
1439
1440 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1441 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1442
1443 /* set to manual mode */
1444 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1445 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1446 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1447 READ_PARSER_REG(PARSER_VIDEO_WP));
1448
1449 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1450
1451 /* enable */
1452 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1453 (0x11 << 16) | (1<<10));
1454
1455 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1456
1457 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1458 WRITE_VREG(HEVC_STREAM_START_ADDR,
1459 input->start);
1460 WRITE_VREG(HEVC_STREAM_END_ADDR,
1461 input->start + input->size);
1462 WRITE_VREG(HEVC_STREAM_RD_PTR,
1463 input->start);
1464 WRITE_VREG(HEVC_STREAM_WR_PTR,
1465 READ_PARSER_REG(PARSER_VIDEO_WP));
1466
1467 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1468 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1469 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1470 >> 16) & 0x7f;
1471
1472 /* enable */
1473 }
1474 }
1475 *p = NULL;
1476 if (wp >= rp)
1477 size = wp - rp + fifo_len;
1478 else
1479 size = wp + input->size - rp + fifo_len;
1480 if (size < 0) {
1481 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1482 __func__, input->size, wp, rp, fifo_len, size);
1483 size = 0;
1484 }
1485 return size;
1486 }
1487}
1488EXPORT_SYMBOL(vdec_prepare_input);
1489
1490void vdec_enable_input(struct vdec_s *vdec)
1491{
1492 struct vdec_input_s *input = &vdec->input;
1493
1494 if (vdec->status != VDEC_STATUS_ACTIVE)
1495 return;
1496
1497 if (input->target == VDEC_INPUT_TARGET_VLD)
1498 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1499 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1500 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1501 if (vdec_stream_based(vdec))
1502 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1503 else
1504 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1505 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1506 }
1507}
1508EXPORT_SYMBOL(vdec_enable_input);
1509
1510int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1511{
1512 int r = vdec_input_set_buffer(&vdec->input, start, size);
1513
1514 if (r)
1515 return r;
1516
1517 if (vdec->slave)
1518 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1519
1520 return r;
1521}
1522EXPORT_SYMBOL(vdec_set_input_buffer);
1523
1524/*
1525 * vdec_eos returns the possibility that there are
1526 * more input can be used by decoder through vdec_prepare_input
1527 * Note: this function should be called prior to vdec_vframe_dirty
1528 * by decoder driver to determine if EOS happens for stream based
1529 * decoding when there is no sufficient data for a frame
1530 */
1531bool vdec_has_more_input(struct vdec_s *vdec)
1532{
1533 struct vdec_input_s *input = &vdec->input;
1534
1535 if (!input->eos)
1536 return true;
1537
1538 if (input_frame_based(input))
1539 return vdec_input_next_input_chunk(input) != NULL;
1540 else {
1541 if (input->target == VDEC_INPUT_TARGET_VLD)
1542 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1543 READ_PARSER_REG(PARSER_VIDEO_WP);
1544 else {
1545 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1546 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1547 }
1548 }
1549}
1550EXPORT_SYMBOL(vdec_has_more_input);
1551
1552void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1553{
1554 vdec->input.prepare_level = level;
1555}
1556EXPORT_SYMBOL(vdec_set_prepare_level);
1557
1558void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1559{
1560 vdec->flag = flag;
1561}
1562EXPORT_SYMBOL(vdec_set_flag);
1563
1564void vdec_set_eos(struct vdec_s *vdec, bool eos)
1565{
1566 struct vdec_core_s *core = vdec_core;
1567
1568 vdec->input.eos = eos;
1569
1570 if (vdec->slave)
1571 vdec->slave->input.eos = eos;
1572 up(&core->sem);
1573}
1574EXPORT_SYMBOL(vdec_set_eos);
1575
1576#ifdef VDEC_DEBUG_SUPPORT
1577void vdec_set_step_mode(void)
1578{
1579 step_mode = 0x1ff;
1580}
1581EXPORT_SYMBOL(vdec_set_step_mode);
1582#endif
1583
1584void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1585{
1586 if (vdec && next_vdec) {
1587 vdec->sched = 0;
1588 next_vdec->sched = 1;
1589 }
1590}
1591EXPORT_SYMBOL(vdec_set_next_sched);
1592
1593/*
1594 * Swap Context: S0 S1 S2 S3 S4
1595 * Sample sequence: M S M M S
1596 * Master Context: S0 S0 S2 S3 S3
1597 * Slave context: NA S1 S1 S2 S4
1598 * ^
1599 * ^
1600 * ^
1601 * the tricky part
1602 * If there are back to back decoding of master or slave
1603 * then the context of the counter part should be updated
1604 * with current decoder. In this example, S1 should be
1605 * updated to S2.
1606 * This is done by swap the swap_page and related info
1607 * between two layers.
1608 */
1609static void vdec_borrow_input_context(struct vdec_s *vdec)
1610{
1611 struct page *swap_page;
1612 unsigned long swap_page_phys;
1613 struct vdec_input_s *me;
1614 struct vdec_input_s *other;
1615
1616 if (!vdec_dual(vdec))
1617 return;
1618
1619 me = &vdec->input;
1620 other = &vdec_get_associate(vdec)->input;
1621
1622 /* swap the swap_context, borrow counter part's
1623 * swap context storage and update all related info.
1624 * After vdec_vframe_dirty, vdec_save_input_context
1625 * will be called to update current vdec's
1626 * swap context
1627 */
1628 swap_page = other->swap_page;
1629 other->swap_page = me->swap_page;
1630 me->swap_page = swap_page;
1631
1632 swap_page_phys = other->swap_page_phys;
1633 other->swap_page_phys = me->swap_page_phys;
1634 me->swap_page_phys = swap_page_phys;
1635
1636 other->swap_rp = me->swap_rp;
1637 other->streaming_rp = me->streaming_rp;
1638 other->stream_cookie = me->stream_cookie;
1639 other->swap_valid = me->swap_valid;
1640}
1641
1642void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1643{
1644 if (chunk)
1645 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1646
1647 if (vdec_stream_based(vdec)) {
1648 vdec->input.swap_needed = true;
1649
1650 if (vdec_dual(vdec)) {
1651 vdec_get_associate(vdec)->input.dirty_count = 0;
1652 vdec->input.dirty_count++;
1653 if (vdec->input.dirty_count > 1) {
1654 vdec->input.dirty_count = 1;
1655 vdec_borrow_input_context(vdec);
1656 }
1657 }
1658
1659 /* for stream based mode, we update read and write pointer
1660 * also in case decoder wants to keep working on decoding
1661 * for more frames while input front end has more data
1662 */
1663 vdec_sync_input_read(vdec);
1664 vdec_sync_input_write(vdec);
1665
1666 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1667 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1668 }
1669}
1670EXPORT_SYMBOL(vdec_vframe_dirty);
1671
1672bool vdec_need_more_data(struct vdec_s *vdec)
1673{
1674 if (vdec_stream_based(vdec))
1675 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1676
1677 return false;
1678}
1679EXPORT_SYMBOL(vdec_need_more_data);
1680
1681
1682void hevc_wait_ddr(void)
1683{
1684 unsigned long flags;
1685 unsigned int mask = 0;
1686
1687 mask = 1 << 4; /* hevc */
1688 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1689 mask |= (1 << 8); /* hevcb */
1690
1691 spin_lock_irqsave(&vdec_spin_lock, flags);
1692 codec_dmcbus_write(DMC_REQ_CTRL,
1693 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1694 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1695
1696 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1697 & mask))
1698 ;
1699}
1700
1701void vdec_save_input_context(struct vdec_s *vdec)
1702{
1703 struct vdec_input_s *input = &vdec->input;
1704
1705#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1706 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1707#endif
1708
1709 if (input->target == VDEC_INPUT_TARGET_VLD)
1710 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1711
1712 if (input_stream_based(input) && (input->swap_needed)) {
1713 if (input->target == VDEC_INPUT_TARGET_VLD) {
1714 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1715 input->swap_page_phys);
1716 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1717 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1718 ;
1719 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1720 vdec->input.stream_cookie =
1721 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1722 vdec->input.swap_rp =
1723 READ_VREG(VLD_MEM_VIFIFO_RP);
1724 vdec->input.total_rd_count =
1725 (u64)vdec->input.stream_cookie *
1726 vdec->input.size + vdec->input.swap_rp -
1727 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1728 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1729 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1730 input->swap_page_phys);
1731 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1732
1733 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1734 ;
1735 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1736
1737 vdec->input.stream_cookie =
1738 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1739 vdec->input.swap_rp =
1740 READ_VREG(HEVC_STREAM_RD_PTR);
1741 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1742 (vdec->input.streaming_rp & 0x80000000))
1743 vdec->input.streaming_rp += 1ULL << 32;
1744 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1745 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1746 vdec->input.total_rd_count = vdec->input.streaming_rp;
1747 }
1748
1749 input->swap_valid = true;
1750 input->swap_needed = false;
1751 /*pr_info("vdec: save context\r\n");*/
1752
1753 vdec_sync_input_read(vdec);
1754
1755 if (vdec_dual(vdec)) {
1756 struct vdec_s *master = (vdec->slave) ?
1757 vdec : vdec->master;
1758 master->input.last_swap_slave = (master->slave == vdec);
1759 /* pr_info("master->input.last_swap_slave = %d\n",
1760 master->input.last_swap_slave); */
1761 }
1762
1763 hevc_wait_ddr();
1764 }
1765}
1766EXPORT_SYMBOL(vdec_save_input_context);
1767
1768void vdec_clean_input(struct vdec_s *vdec)
1769{
1770 struct vdec_input_s *input = &vdec->input;
1771
1772 while (!list_empty(&input->vframe_chunk_list)) {
1773 struct vframe_chunk_s *chunk =
1774 vdec_input_next_chunk(input);
1775 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1776 vdec_input_release_chunk(input, chunk);
1777 else
1778 break;
1779 }
1780 vdec_save_input_context(vdec);
1781}
1782EXPORT_SYMBOL(vdec_clean_input);
1783
1784
1785static int vdec_input_read_restore(struct vdec_s *vdec)
1786{
1787 struct vdec_input_s *input = &vdec->input;
1788
1789 if (!vdec_stream_based(vdec))
1790 return 0;
1791
1792 if (!input->swap_valid) {
1793 if (input->target == VDEC_INPUT_TARGET_VLD) {
1794 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1795 input->start);
1796 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1797 input->start + input->size - 8);
1798 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1799 input->start);
1800 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1801 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1802
1803 /* set to manual mode */
1804 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1805 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1806 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1807 WRITE_VREG(HEVC_STREAM_START_ADDR,
1808 input->start);
1809 WRITE_VREG(HEVC_STREAM_END_ADDR,
1810 input->start + input->size);
1811 WRITE_VREG(HEVC_STREAM_RD_PTR,
1812 input->start);
1813 }
1814 return 0;
1815 }
1816 if (input->target == VDEC_INPUT_TARGET_VLD) {
1817 /* restore read side */
1818 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1819 input->swap_page_phys);
1820
1821 /*swap active*/
1822 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1823
1824 /*wait swap busy*/
1825 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1826 ;
1827
1828 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1829 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1830 /* restore read side */
1831 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1832 input->swap_page_phys);
1833 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1834
1835 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1836 & (1<<7))
1837 ;
1838 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1839 }
1840
1841 return 0;
1842}
1843
1844
1845int vdec_sync_input(struct vdec_s *vdec)
1846{
1847 struct vdec_input_s *input = &vdec->input;
1848 u32 rp = 0, wp = 0, fifo_len = 0;
1849 int size;
1850
1851 vdec_input_read_restore(vdec);
1852 vdec_sync_input_read(vdec);
1853 vdec_sync_input_write(vdec);
1854 if (input->target == VDEC_INPUT_TARGET_VLD) {
1855 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1856 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1857
1858 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1859 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1860 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1861 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1862 >> 16) & 0x7f;
1863 }
1864 if (wp >= rp)
1865 size = wp - rp + fifo_len;
1866 else
1867 size = wp + input->size - rp + fifo_len;
1868 if (size < 0) {
1869 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1870 __func__, input->size, wp, rp, fifo_len, size);
1871 size = 0;
1872 }
1873 return size;
1874
1875}
1876EXPORT_SYMBOL(vdec_sync_input);
1877
1878const char *vdec_status_str(struct vdec_s *vdec)
1879{
1880 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1881 vdec_status_string[vdec->status] : "INVALID";
1882}
1883
1884const char *vdec_type_str(struct vdec_s *vdec)
1885{
1886 switch (vdec->type) {
1887 case VDEC_TYPE_SINGLE:
1888 return "VDEC_TYPE_SINGLE";
1889 case VDEC_TYPE_STREAM_PARSER:
1890 return "VDEC_TYPE_STREAM_PARSER";
1891 case VDEC_TYPE_FRAME_BLOCK:
1892 return "VDEC_TYPE_FRAME_BLOCK";
1893 case VDEC_TYPE_FRAME_CIRCULAR:
1894 return "VDEC_TYPE_FRAME_CIRCULAR";
1895 default:
1896 return "VDEC_TYPE_INVALID";
1897 }
1898}
1899
1900const char *vdec_device_name_str(struct vdec_s *vdec)
1901{
1902 return vdec_device_name[vdec->format * 2 + 1];
1903}
1904EXPORT_SYMBOL(vdec_device_name_str);
1905
1906void walk_vdec_core_list(char *s)
1907{
1908 struct vdec_s *vdec;
1909 struct vdec_core_s *core = vdec_core;
1910 unsigned long flags;
1911
1912 pr_info("%s --->\n", s);
1913
1914 flags = vdec_core_lock(vdec_core);
1915
1916 if (list_empty(&core->connected_vdec_list)) {
1917 pr_info("connected vdec list empty\n");
1918 } else {
1919 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
1920 pr_info("\tvdec (%p), status = %s\n", vdec,
1921 vdec_status_str(vdec));
1922 }
1923 }
1924
1925 vdec_core_unlock(vdec_core, flags);
1926}
1927EXPORT_SYMBOL(walk_vdec_core_list);
1928
1929/* insert vdec to vdec_core for scheduling,
1930 * for dual running decoders, connect/disconnect always runs in pairs
1931 */
1932int vdec_connect(struct vdec_s *vdec)
1933{
1934 unsigned long flags;
1935
1936 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
1937
1938 if (vdec->status != VDEC_STATUS_DISCONNECTED)
1939 return 0;
1940
1941 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
1942 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
1943
1944 init_completion(&vdec->inactive_done);
1945
1946 if (vdec->slave) {
1947 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
1948 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
1949
1950 init_completion(&vdec->slave->inactive_done);
1951 }
1952
1953 flags = vdec_core_lock(vdec_core);
1954
1955 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
1956
1957 if (vdec->slave) {
1958 list_add_tail(&vdec->slave->list,
1959 &vdec_core->connected_vdec_list);
1960 }
1961
1962 vdec_core_unlock(vdec_core, flags);
1963
1964 up(&vdec_core->sem);
1965
1966 return 0;
1967}
1968EXPORT_SYMBOL(vdec_connect);
1969
1970/* remove vdec from vdec_core scheduling */
1971int vdec_disconnect(struct vdec_s *vdec)
1972{
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
1975#endif
1976 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
1977
1978 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
1979 (vdec->status != VDEC_STATUS_ACTIVE)) {
1980 return 0;
1981 }
1982 mutex_lock(&vdec_mutex);
1983 /*
1984 *when a vdec is under the management of scheduler
1985 * the status change will only be from vdec_core_thread
1986 */
1987 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
1988
1989 if (vdec->slave)
1990 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
1991 else if (vdec->master)
1992 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
1993 mutex_unlock(&vdec_mutex);
1994 up(&vdec_core->sem);
1995
1996 if(!wait_for_completion_timeout(&vdec->inactive_done,
1997 msecs_to_jiffies(2000)))
1998 goto discon_timeout;
1999
2000 if (vdec->slave) {
2001 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2002 msecs_to_jiffies(2000)))
2003 goto discon_timeout;
2004 } else if (vdec->master) {
2005 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2006 msecs_to_jiffies(2000)))
2007 goto discon_timeout;
2008 }
2009
2010 return 0;
2011discon_timeout:
2012 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2013 return 0;
2014}
2015EXPORT_SYMBOL(vdec_disconnect);
2016
2017/* release vdec structure */
2018int vdec_destroy(struct vdec_s *vdec)
2019{
2020 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2021
2022 vdec_input_release(&vdec->input);
2023
2024#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2025 vdec_profile_flush(vdec);
2026#endif
2027 ida_simple_remove(&vdec_core->ida, vdec->id);
2028 vfree(vdec);
2029
2030 atomic_dec(&vdec_core->vdec_nr);
2031
2032 return 0;
2033}
2034EXPORT_SYMBOL(vdec_destroy);
2035
2036/*
2037 *register vdec_device
2038 * create output, vfm or create ionvideo output
2039 */
2040s32 vdec_init(struct vdec_s *vdec, int is_4k)
2041{
2042 int r = 0;
2043 struct vdec_s *p = vdec;
2044 const char *dev_name;
2045 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2046
2047 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2048
2049 if (dev_name == NULL)
2050 return -ENODEV;
2051
2052 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2053 dev_name, vdec_type_str(vdec));
2054
2055 /*
2056 *todo: VFM patch control should be configurable,
2057 * for now all stream based input uses default VFM path.
2058 */
2059 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2060 if (vdec_core->vfm_vdec == NULL) {
2061 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2062 vdec_core->vfm_vdec = vdec;
2063 } else {
2064 pr_info("vdec_init vfm path busy.\n");
2065 return -EBUSY;
2066 }
2067 }
2068
2069 mutex_lock(&vdec_mutex);
2070 inited_vcodec_num++;
2071 mutex_unlock(&vdec_mutex);
2072
2073 vdec_input_set_type(&vdec->input, vdec->type,
2074 (vdec->format == VFORMAT_HEVC ||
2075 vdec->format == VFORMAT_AVS2 ||
2076 vdec->format == VFORMAT_VP9) ?
2077 VDEC_INPUT_TARGET_HEVC :
2078 VDEC_INPUT_TARGET_VLD);
2079 if (vdec_single(vdec))
2080 vdec_enable_DMC(vdec);
2081 p->cma_dev = vdec_core->cma_dev;
2082 p->get_canvas = get_canvas;
2083 p->get_canvas_ex = get_canvas_ex;
2084 p->free_canvas_ex = free_canvas_ex;
2085 p->vdec_fps_detec = vdec_fps_detec;
2086 atomic_set(&p->inirq_flag, 0);
2087 atomic_set(&p->inirq_thread_flag, 0);
2088 /* todo */
2089 if (!vdec_dual(vdec))
2090 p->use_vfm_path = vdec_stream_based(vdec);
2091 /* vdec_dev_reg.flag = 0; */
2092 if (vdec->id >= 0)
2093 id = vdec->id;
2094 p->parallel_dec = parallel_decode;
2095 vdec_core->parallel_dec = parallel_decode;
2096 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2097#ifdef FRAME_CHECK
2098 vdec_frame_check_init(vdec);
2099#endif
2100 p->dev = platform_device_register_data(
2101 &vdec_core->vdec_core_platform_device->dev,
2102 dev_name,
2103 id,
2104 &p, sizeof(struct vdec_s *));
2105
2106 if (IS_ERR(p->dev)) {
2107 r = PTR_ERR(p->dev);
2108 pr_err("vdec: Decoder device %s register failed (%d)\n",
2109 dev_name, r);
2110
2111 mutex_lock(&vdec_mutex);
2112 inited_vcodec_num--;
2113 mutex_unlock(&vdec_mutex);
2114
2115 goto error;
2116 } else if (!p->dev->dev.driver) {
2117 pr_info("vdec: Decoder device %s driver probe failed.\n",
2118 dev_name);
2119 r = -ENODEV;
2120
2121 goto error;
2122 }
2123
2124 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2125 r = -ENODEV;
2126 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2127
2128 mutex_lock(&vdec_mutex);
2129 inited_vcodec_num--;
2130 mutex_unlock(&vdec_mutex);
2131
2132 goto error;
2133 }
2134
2135 if (p->use_vfm_path) {
2136 vdec->vf_receiver_inst = -1;
2137 vdec->vfm_map_id[0] = 0;
2138 } else if (!vdec_dual(vdec)) {
2139 /* create IONVIDEO instance and connect decoder's
2140 * vf_provider interface to it
2141 */
2142 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2143 r = -ENODEV;
2144 pr_err("vdec: Incorrect decoder type\n");
2145
2146 mutex_lock(&vdec_mutex);
2147 inited_vcodec_num--;
2148 mutex_unlock(&vdec_mutex);
2149
2150 goto error;
2151 }
2152 if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2153#if 1
2154 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2155 &vdec->vf_receiver_inst);
2156#else
2157 /*
2158 * temporarily just use decoder instance ID as iondriver ID
2159 * to solve OMX iondriver instance number check time sequence
2160 * only the limitation is we can NOT mix different video
2161 * decoders since same ID will be used for different decoder
2162 * formats.
2163 */
2164 vdec->vf_receiver_inst = p->dev->id;
2165 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2166 &vdec->vf_receiver_inst);
2167#endif
2168 if (r < 0) {
2169 pr_err("IonVideo frame receiver allocation failed.\n");
2170
2171 mutex_lock(&vdec_mutex);
2172 inited_vcodec_num--;
2173 mutex_unlock(&vdec_mutex);
2174
2175 goto error;
2176 }
2177
2178 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2179 "%s %s", vdec->vf_provider_name,
2180 vdec->vf_receiver_name);
2181 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2182 "vdec-map-%d", vdec->id);
2183 } else if (p->frame_base_video_path ==
2184 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2185 if (vdec_secure(vdec)) {
2186 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2187 "%s %s", vdec->vf_provider_name,
2188 "amlvideo amvideo");
2189 } else {
2190 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2191 "%s %s", vdec->vf_provider_name,
2192 "amlvideo ppmgr deinterlace amvideo");
2193 }
2194 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2195 "vdec-map-%d", vdec->id);
2196 } else if (p->frame_base_video_path ==
2197 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2198 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2199 "%s %s", vdec->vf_provider_name,
2200 "aml_video.1 videosync.0 videopip");
2201 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2202 "vdec-map-%d", vdec->id);
2203 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2204 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2205 "%s %s", vdec->vf_provider_name,
2206 vdec->vf_receiver_name);
2207 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2208 "vdec-map-%d", vdec->id);
2209 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2210 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2211 "%s %s", vdec->vf_provider_name,
2212 "amvideo");
2213 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2214 "vdec-map-%d", vdec->id);
2215 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2216 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2217 "%s %s %s", vdec->vf_provider_name,
2218 vdec->vf_receiver_name, "amvideo");
2219 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2220 "vdec-map-%d", vdec->id);
2221 } else if (p->frame_base_video_path ==
2222 FRAME_BASE_PATH_DI_V4LVIDEO) {
2223#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2224 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2225 &vdec->vf_receiver_inst);
2226#else
2227 r = -1;
2228#endif
2229 if (r < 0) {
2230 pr_err("V4lVideo frame receiver allocation failed.\n");
2231 mutex_lock(&vdec_mutex);
2232 inited_vcodec_num--;
2233 mutex_unlock(&vdec_mutex);
2234 goto error;
2235 }
2236 if (!v4lvideo_add_di || vdec_secure(vdec))
2237 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2238 "%s %s", vdec->vf_provider_name,
2239 vdec->vf_receiver_name);
2240 else {
2241 if (vdec->vf_receiver_inst == 0)
2242 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2243 "%s %s %s", vdec->vf_provider_name,
2244 "dimulti.1",
2245 vdec->vf_receiver_name);
2246 else if ((vdec->vf_receiver_inst <
2247 max_di_instance) &&
2248 (vdec->vf_receiver_inst == 1))
2249 snprintf(vdec->vfm_map_chain,
2250 VDEC_MAP_NAME_SIZE,
2251 "%s %s %s",
2252 vdec->vf_provider_name,
2253 "deinterlace",
2254 vdec->vf_receiver_name);
2255 else if (vdec->vf_receiver_inst <
2256 max_di_instance)
2257 snprintf(vdec->vfm_map_chain,
2258 VDEC_MAP_NAME_SIZE,
2259 "%s %s%d %s",
2260 vdec->vf_provider_name,
2261 "dimulti.",
2262 vdec->vf_receiver_inst,
2263 vdec->vf_receiver_name);
2264 else
2265 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2266 "%s %s", vdec->vf_provider_name,
2267 vdec->vf_receiver_name);
2268 }
2269 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2270 "vdec-map-%d", vdec->id);
2271 }
2272
2273 if (vfm_map_add(vdec->vfm_map_id,
2274 vdec->vfm_map_chain) < 0) {
2275 r = -ENOMEM;
2276 pr_err("Decoder pipeline map creation failed %s.\n",
2277 vdec->vfm_map_id);
2278 vdec->vfm_map_id[0] = 0;
2279
2280 mutex_lock(&vdec_mutex);
2281 inited_vcodec_num--;
2282 mutex_unlock(&vdec_mutex);
2283
2284 goto error;
2285 }
2286
2287 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2288
2289 /*
2290 *assume IONVIDEO driver already have a few vframe_receiver
2291 * registered.
2292 * 1. Call iondriver function to allocate a IONVIDEO path and
2293 * provide receiver's name and receiver op.
2294 * 2. Get decoder driver's provider name from driver instance
2295 * 3. vfm_map_add(name, "<decoder provider name>
2296 * <iondriver receiver name>"), e.g.
2297 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2298 * 4. vf_reg_provider and vf_reg_receiver
2299 * Note: the decoder provider's op uses vdec as op_arg
2300 * the iondriver receiver's op uses iondev device as
2301 * op_arg
2302 */
2303
2304 }
2305
2306 if (!vdec_single(vdec)) {
2307 vf_reg_provider(&p->vframe_provider);
2308
2309 vf_notify_receiver(p->vf_provider_name,
2310 VFRAME_EVENT_PROVIDER_START,
2311 vdec);
2312
2313 if (vdec_core->hint_fr_vdec == NULL)
2314 vdec_core->hint_fr_vdec = vdec;
2315
2316 if (vdec_core->hint_fr_vdec == vdec) {
2317 if (p->sys_info->rate != 0) {
2318 if (!vdec->is_reset) {
2319 vf_notify_receiver(p->vf_provider_name,
2320 VFRAME_EVENT_PROVIDER_FR_HINT,
2321 (void *)
2322 ((unsigned long)
2323 p->sys_info->rate));
2324 vdec->fr_hint_state = VDEC_HINTED;
2325 }
2326 } else {
2327 vdec->fr_hint_state = VDEC_NEED_HINT;
2328 }
2329 }
2330 }
2331
2332 p->dolby_meta_with_el = 0;
2333 pr_debug("vdec_init, vf_provider_name = %s\n", p->vf_provider_name);
2334 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2335 &vdec->input,
2336 vdec->sys_info->width,
2337 vdec->sys_info->height);
2338 /* vdec is now ready to be active */
2339 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2340 if (p->use_vfm_path) {
2341 frame_info_buf_in = (struct vframe_qos_s *)
2342 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2343 if (!frame_info_buf_in)
2344 pr_err("kmalloc: frame_info_buf_in failed\n");
2345 else
2346 memset(frame_info_buf_in, 0,
2347 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2348
2349 frame_info_buf_out = (struct vframe_qos_s *)
2350 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2351 if (!frame_info_buf_out)
2352 pr_err("kmalloc: frame_info_buf_out failed\n");
2353 else
2354 memset(frame_info_buf_out, 0,
2355 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2356 frame_qos_wr = 0;
2357 frame_qos_rd = 0;
2358 }
2359 return 0;
2360
2361error:
2362 return r;
2363}
2364EXPORT_SYMBOL(vdec_init);
2365
2366/* vdec_create/init/release/destroy are applied to both dual running decoders
2367 */
2368void vdec_release(struct vdec_s *vdec)
2369{
2370 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2371#ifdef VDEC_DEBUG_SUPPORT
2372 if (step_mode) {
2373 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2374 while (step_mode)
2375 udelay(10);
2376 pr_info("VDEC_DEBUG: step_mode is clear\n");
2377 }
2378#endif
2379 vdec_disconnect(vdec);
2380
2381 if (vdec->vframe_provider.name) {
2382 if (!vdec_single(vdec)) {
2383 if (vdec_core->hint_fr_vdec == vdec
2384 && vdec->fr_hint_state == VDEC_HINTED)
2385 vf_notify_receiver(
2386 vdec->vf_provider_name,
2387 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2388 NULL);
2389 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2390 }
2391 vf_unreg_provider(&vdec->vframe_provider);
2392 }
2393
2394 if (vdec_core->vfm_vdec == vdec)
2395 vdec_core->vfm_vdec = NULL;
2396
2397 if (vdec_core->hint_fr_vdec == vdec)
2398 vdec_core->hint_fr_vdec = NULL;
2399
2400 if (vdec->vf_receiver_inst >= 0) {
2401 if (vdec->vfm_map_id[0]) {
2402 vfm_map_remove(vdec->vfm_map_id);
2403 vdec->vfm_map_id[0] = 0;
2404 }
2405 }
2406
2407 while ((atomic_read(&vdec->inirq_flag) > 0)
2408 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2409 schedule();
2410
2411#ifdef FRAME_CHECK
2412 vdec_frame_check_exit(vdec);
2413#endif
2414 vdec_fps_clear(vdec->id);
2415 if (atomic_read(&vdec_core->vdec_nr) == 1)
2416 vdec_disable_DMC(vdec);
2417 platform_device_unregister(vdec->dev);
2418 pr_debug("vdec_release instance %p, total %d\n", vdec,
2419 atomic_read(&vdec_core->vdec_nr));
2420 if (vdec->use_vfm_path) {
2421 kfree(frame_info_buf_in);
2422 frame_info_buf_in = NULL;
2423 kfree(frame_info_buf_out);
2424 frame_info_buf_out = NULL;
2425 frame_qos_wr = 0;
2426 frame_qos_rd = 0;
2427 }
2428 vdec_destroy(vdec);
2429
2430 mutex_lock(&vdec_mutex);
2431 inited_vcodec_num--;
2432 mutex_unlock(&vdec_mutex);
2433
2434}
2435EXPORT_SYMBOL(vdec_release);
2436
2437/* For dual running decoders, vdec_reset is only called with master vdec.
2438 */
2439int vdec_reset(struct vdec_s *vdec)
2440{
2441 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2442
2443 vdec_disconnect(vdec);
2444
2445 if (vdec->vframe_provider.name)
2446 vf_unreg_provider(&vdec->vframe_provider);
2447
2448 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2449 vf_unreg_provider(&vdec->slave->vframe_provider);
2450
2451 if (vdec->reset) {
2452 vdec->reset(vdec);
2453 if (vdec->slave)
2454 vdec->slave->reset(vdec->slave);
2455 }
2456 vdec->mc_loaded = 0;/*clear for reload firmware*/
2457 vdec_input_release(&vdec->input);
2458
2459 vdec_input_init(&vdec->input, vdec);
2460
2461 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2462 vdec->sys_info->height);
2463
2464 vf_reg_provider(&vdec->vframe_provider);
2465 vf_notify_receiver(vdec->vf_provider_name,
2466 VFRAME_EVENT_PROVIDER_START, vdec);
2467
2468 if (vdec->slave) {
2469 vf_reg_provider(&vdec->slave->vframe_provider);
2470 vf_notify_receiver(vdec->slave->vf_provider_name,
2471 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2472 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2473 }
2474
2475 vdec_connect(vdec);
2476
2477 return 0;
2478}
2479EXPORT_SYMBOL(vdec_reset);
2480
2481int vdec_v4l2_reset(struct vdec_s *vdec, int flag)
2482{
2483 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2484 pr_debug("vdec_v4l2_reset %d\n", flag);
2485 vdec_disconnect(vdec);
2486 if (flag != 2) {
2487 if (vdec->vframe_provider.name)
2488 vf_unreg_provider(&vdec->vframe_provider);
2489
2490 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2491 vf_unreg_provider(&vdec->slave->vframe_provider);
2492
2493 if (vdec->reset) {
2494 vdec->reset(vdec);
2495 if (vdec->slave)
2496 vdec->slave->reset(vdec->slave);
2497 }
2498 vdec->mc_loaded = 0;/*clear for reload firmware*/
2499
2500 vdec_input_release(&vdec->input);
2501
2502 vdec_input_init(&vdec->input, vdec);
2503
2504 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2505 vdec->sys_info->height);
2506
2507 vf_reg_provider(&vdec->vframe_provider);
2508 vf_notify_receiver(vdec->vf_provider_name,
2509 VFRAME_EVENT_PROVIDER_START, vdec);
2510
2511 if (vdec->slave) {
2512 vf_reg_provider(&vdec->slave->vframe_provider);
2513 vf_notify_receiver(vdec->slave->vf_provider_name,
2514 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2515 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2516 }
2517 } else {
2518 if (vdec->reset) {
2519 vdec->reset(vdec);
2520 if (vdec->slave)
2521 vdec->slave->reset(vdec->slave);
2522 }
2523 }
2524
2525 vdec_connect(vdec);
2526
2527 vdec_frame_check_init(vdec);
2528
2529 return 0;
2530}
2531EXPORT_SYMBOL(vdec_v4l2_reset);
2532
2533
2534void vdec_free_cmabuf(void)
2535{
2536 mutex_lock(&vdec_mutex);
2537
2538 /*if (inited_vcodec_num > 0) {
2539 mutex_unlock(&vdec_mutex);
2540 return;
2541 }*/
2542 mutex_unlock(&vdec_mutex);
2543}
2544
2545void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2546{
2547 vdec->core_mask |= mask;
2548
2549 if (vdec->slave)
2550 vdec->slave->core_mask |= mask;
2551 if (vdec_core->parallel_dec == 1) {
2552 if (mask & CORE_MASK_COMBINE)
2553 vdec_core->vdec_combine_flag++;
2554 }
2555
2556}
2557EXPORT_SYMBOL(vdec_core_request);
2558
2559int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2560{
2561 vdec->core_mask &= ~mask;
2562
2563 if (vdec->slave)
2564 vdec->slave->core_mask &= ~mask;
2565 if (vdec_core->parallel_dec == 1) {
2566 if (mask & CORE_MASK_COMBINE)
2567 vdec_core->vdec_combine_flag--;
2568 }
2569 return 0;
2570}
2571EXPORT_SYMBOL(vdec_core_release);
2572
2573bool vdec_core_with_input(unsigned long mask)
2574{
2575 enum vdec_type_e type;
2576
2577 for (type = VDEC_1; type < VDEC_MAX; type++) {
2578 if ((mask & (1 << type)) && cores_with_input[type])
2579 return true;
2580 }
2581
2582 return false;
2583}
2584
2585void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2586{
2587 unsigned long i;
2588 unsigned long t = mask;
2589 mutex_lock(&vdec_mutex);
2590 while (t) {
2591 i = __ffs(t);
2592 clear_bit(i, &vdec->active_mask);
2593 t &= ~(1 << i);
2594 }
2595
2596 if (vdec->active_mask == 0)
2597 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2598
2599 mutex_unlock(&vdec_mutex);
2600}
2601EXPORT_SYMBOL(vdec_core_finish_run);
2602/*
2603 * find what core resources are available for vdec
2604 */
2605static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2606 unsigned long active_mask)
2607{
2608 unsigned long mask = vdec->core_mask &
2609 ~CORE_MASK_COMBINE;
2610
2611 if (vdec->core_mask & CORE_MASK_COMBINE) {
2612 /* combined cores must be granted together */
2613 if ((mask & ~active_mask) == mask)
2614 return mask;
2615 else
2616 return 0;
2617 } else
2618 return mask & ~vdec->sched_mask & ~active_mask;
2619}
2620
2621/*
2622 *Decoder callback
2623 * Each decoder instance uses this callback to notify status change, e.g. when
2624 * decoder finished using HW resource.
2625 * a sample callback from decoder's driver is following:
2626 *
2627 * if (hw->vdec_cb) {
2628 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2629 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2630 * }
2631 */
2632static void vdec_callback(struct vdec_s *vdec, void *data)
2633{
2634 struct vdec_core_s *core = (struct vdec_core_s *)data;
2635
2636#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2637 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2638#endif
2639
2640 up(&core->sem);
2641}
2642
2643static irqreturn_t vdec_isr(int irq, void *dev_id)
2644{
2645 struct vdec_isr_context_s *c =
2646 (struct vdec_isr_context_s *)dev_id;
2647 struct vdec_s *vdec = vdec_core->last_vdec;
2648 irqreturn_t ret = IRQ_HANDLED;
2649
2650 if (vdec_core->parallel_dec == 1) {
2651 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2652 vdec = vdec_core->active_hevc;
2653 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2654 vdec = vdec_core->active_vdec;
2655 else
2656 vdec = NULL;
2657 }
2658
2659 if (vdec) {
2660 atomic_set(&vdec->inirq_flag, 1);
2661 vdec->isr_ns = local_clock();
2662 }
2663 if (c->dev_isr) {
2664 ret = c->dev_isr(irq, c->dev_id);
2665 goto isr_done;
2666 }
2667
2668 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2669 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2670 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2671#if 0
2672 pr_warn("vdec interrupt w/o a valid receiver\n");
2673#endif
2674 goto isr_done;
2675 }
2676
2677 if (!vdec) {
2678#if 0
2679 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2680 core);
2681#endif
2682 goto isr_done;
2683 }
2684
2685 if (!vdec->irq_handler) {
2686#if 0
2687 pr_warn("vdec instance has no irq handle.\n");
2688#endif
2689 goto isr_done;
2690 }
2691
2692 ret = vdec->irq_handler(vdec, c->index);
2693isr_done:
2694 if (vdec)
2695 atomic_set(&vdec->inirq_flag, 0);
2696 return ret;
2697}
2698
2699static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2700{
2701 struct vdec_isr_context_s *c =
2702 (struct vdec_isr_context_s *)dev_id;
2703 struct vdec_s *vdec = vdec_core->last_vdec;
2704 irqreturn_t ret = IRQ_HANDLED;
2705
2706 if (vdec_core->parallel_dec == 1) {
2707 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2708 vdec = vdec_core->active_hevc;
2709 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2710 vdec = vdec_core->active_vdec;
2711 else
2712 vdec = NULL;
2713 }
2714
2715 if (vdec) {
2716 u32 isr2tfn = 0;
2717 atomic_set(&vdec->inirq_thread_flag, 1);
2718 vdec->tfn_ns = local_clock();
2719 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2720 if (isr2tfn > 10000000)
2721 pr_err("!!!!!!! %s vdec_isr to %s took %uns !!!\n",
2722 vdec->vf_provider_name, __func__, isr2tfn);
2723 }
2724 if (c->dev_threaded_isr) {
2725 ret = c->dev_threaded_isr(irq, c->dev_id);
2726 goto thread_isr_done;
2727 }
2728 if (!vdec)
2729 goto thread_isr_done;
2730
2731 if (!vdec->threaded_irq_handler)
2732 goto thread_isr_done;
2733 ret = vdec->threaded_irq_handler(vdec, c->index);
2734thread_isr_done:
2735 if (vdec)
2736 atomic_set(&vdec->inirq_thread_flag, 0);
2737 return ret;
2738}
2739
2740unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2741{
2742 unsigned long ready_mask;
2743 struct vdec_input_s *input = &vdec->input;
2744 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2745 (vdec->status != VDEC_STATUS_ACTIVE))
2746 return false;
2747
2748 if (!vdec->run_ready)
2749 return false;
2750
2751 /* when crc32 error, block at error frame */
2752 if (vdec->vfc.err_crc_block)
2753 return false;
2754
2755 if ((vdec->slave || vdec->master) &&
2756 (vdec->sched == 0))
2757 return false;
2758#ifdef VDEC_DEBUG_SUPPORT
2759 inc_profi_count(mask, vdec->check_count);
2760#endif
2761 if (vdec_core_with_input(mask)) {
2762
2763 /* check frame based input underrun */
2764 if (input && !input->eos && input_frame_based(input)
2765 && (!vdec_input_next_chunk(input))) {
2766#ifdef VDEC_DEBUG_SUPPORT
2767 inc_profi_count(mask, vdec->input_underrun_count);
2768#endif
2769 return false;
2770 }
2771 /* check streaming prepare level threshold if not EOS */
2772 if (input && input_stream_based(input) && !input->eos) {
2773 u32 rp, wp, level;
2774
2775 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2776 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2777 if (wp < rp)
2778 level = input->size + wp - rp;
2779 else
2780 level = wp - rp;
2781
2782 if ((level < input->prepare_level) &&
2783 (pts_get_rec_num(PTS_TYPE_VIDEO,
2784 vdec->input.total_rd_count) < 2)) {
2785 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2786#ifdef VDEC_DEBUG_SUPPORT
2787 inc_profi_count(mask, vdec->input_underrun_count);
2788 if (step_mode & 0x200) {
2789 if ((step_mode & 0xff) == vdec->id) {
2790 step_mode |= 0xff;
2791 return mask;
2792 }
2793 }
2794#endif
2795 return false;
2796 } else if (level > input->prepare_level)
2797 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2798 }
2799 }
2800
2801 if (step_mode) {
2802 if ((step_mode & 0xff) != vdec->id)
2803 return 0;
2804 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2805 }
2806
2807 /*step_mode &= ~0xff; not work for id of 0, removed*/
2808
2809#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2810 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2811#endif
2812
2813 ready_mask = vdec->run_ready(vdec, mask) & mask;
2814#ifdef VDEC_DEBUG_SUPPORT
2815 if (ready_mask != mask)
2816 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2817#endif
2818#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2819 if (ready_mask)
2820 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2821#endif
2822
2823 return ready_mask;
2824}
2825
2826/* bridge on/off vdec's interrupt processing to vdec core */
2827static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2828 bool enable)
2829{
2830 enum vdec_type_e type;
2831
2832 for (type = VDEC_1; type < VDEC_MAX; type++) {
2833 if (mask & (1 << type)) {
2834 struct vdec_isr_context_s *c =
2835 &vdec_core->isr_context[cores_int[type]];
2836 if (enable)
2837 c->vdec = vdec;
2838 else if (c->vdec == vdec)
2839 c->vdec = NULL;
2840 }
2841 }
2842}
2843
2844/*
2845 * Set up secure protection for each decoder instance running.
2846 * Note: The operation from REE side only resets memory access
2847 * to a default policy and even a non_secure type will still be
2848 * changed to secure type automatically when secure source is
2849 * detected inside TEE.
2850 * Perform need_more_data checking and set flag is decoder
2851 * is not consuming data.
2852 */
2853void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2854{
2855 struct vdec_input_s *input = &vdec->input;
2856 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2857 DMC_DEV_TYPE_NON_SECURE;
2858
2859 vdec_route_interrupt(vdec, mask, true);
2860
2861 if (!vdec_core_with_input(mask))
2862 return;
2863
2864 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2865 {
2866 /* Verimatrix ultra webclient (HLS) was played in drmmode and used hw demux. In drmmode VDEC only can access secure.
2867 Now HW demux parsed es data to no-secure buffer. So the VDEC input was no-secure, VDEC playback failed. Forcing
2868 use nosecure for verimatrix webclient HLS. If in the future HW demux can parse es data to secure buffer, make
2869 VDEC r/w secure.*/
2870 secure = 0;
2871 //pr_debug("allow VDEC can access nosecure even in drmmode\n");
2872 }
2873 if (input->target == VDEC_INPUT_TARGET_VLD)
2874 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2875 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2876 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2877
2878 if (vdec_stream_based(vdec) &&
2879 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2880 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2881 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2882 }
2883
2884 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
2885 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
2886}
2887
2888
2889/* struct vdec_core_shread manages all decoder instance in active list. When
2890 * a vdec is added into the active list, it can onlt be in two status:
2891 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
2892 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
2893 * Removing a decoder from active list is only performed within core thread.
2894 * Adding a decoder into active list is performed from user thread.
2895 */
2896static int vdec_core_thread(void *data)
2897{
2898 struct vdec_core_s *core = (struct vdec_core_s *)data;
2899 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
2900 unsigned long flags;
2901 int i;
2902
2903 sched_setscheduler(current, SCHED_FIFO, &param);
2904
2905 allow_signal(SIGTERM);
2906
2907 while (down_interruptible(&core->sem) == 0) {
2908 struct vdec_s *vdec, *tmp, *worker;
2909 unsigned long sched_mask = 0;
2910 LIST_HEAD(disconnecting_list);
2911
2912 if (kthread_should_stop())
2913 break;
2914 mutex_lock(&vdec_mutex);
2915
2916 if (core->parallel_dec == 1) {
2917 for (i = VDEC_1; i < VDEC_MAX; i++) {
2918 core->power_ref_mask =
2919 core->power_ref_count[i] > 0 ?
2920 (core->power_ref_mask | (1 << i)) :
2921 (core->power_ref_mask & ~(1 << i));
2922 }
2923 }
2924 /* clean up previous active vdec's input */
2925 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2926 unsigned long mask = vdec->sched_mask &
2927 (vdec->active_mask ^ vdec->sched_mask);
2928
2929 vdec_route_interrupt(vdec, mask, false);
2930
2931#ifdef VDEC_DEBUG_SUPPORT
2932 update_profi_clk_stop(vdec, mask, get_current_clk());
2933#endif
2934 /*
2935 * If decoder released some core resources (mask), then
2936 * check if these core resources are associated
2937 * with any input side and do input clean up accordingly
2938 */
2939 if (vdec_core_with_input(mask)) {
2940 struct vdec_input_s *input = &vdec->input;
2941 while (!list_empty(
2942 &input->vframe_chunk_list)) {
2943 struct vframe_chunk_s *chunk =
2944 vdec_input_next_chunk(input);
2945 if (chunk && (chunk->flag &
2946 VFRAME_CHUNK_FLAG_CONSUMED))
2947 vdec_input_release_chunk(input,
2948 chunk);
2949 else
2950 break;
2951 }
2952
2953 vdec_save_input_context(vdec);
2954 }
2955
2956 vdec->sched_mask &= ~mask;
2957 core->sched_mask &= ~mask;
2958 }
2959 vdec_update_buff_status();
2960 /*
2961 *todo:
2962 * this is the case when the decoder is in active mode and
2963 * the system side wants to stop it. Currently we rely on
2964 * the decoder instance to go back to VDEC_STATUS_CONNECTED
2965 * from VDEC_STATUS_ACTIVE by its own. However, if for some
2966 * reason the decoder can not exist by itself (dead decoding
2967 * or whatever), then we may have to add another vdec API
2968 * to kill the vdec and release its HW resource and make it
2969 * become inactive again.
2970 * if ((core->active_vdec) &&
2971 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
2972 * }
2973 */
2974
2975 /* check disconnected decoders */
2976 flags = vdec_core_lock(vdec_core);
2977 list_for_each_entry_safe(vdec, tmp,
2978 &core->connected_vdec_list, list) {
2979 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
2980 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
2981 if (core->parallel_dec == 1) {
2982 if (vdec_core->active_hevc == vdec)
2983 vdec_core->active_hevc = NULL;
2984 if (vdec_core->active_vdec == vdec)
2985 vdec_core->active_vdec = NULL;
2986 }
2987 if (core->last_vdec == vdec)
2988 core->last_vdec = NULL;
2989 list_move(&vdec->list, &disconnecting_list);
2990 }
2991 }
2992 vdec_core_unlock(vdec_core, flags);
2993 mutex_unlock(&vdec_mutex);
2994 /* elect next vdec to be scheduled */
2995 vdec = core->last_vdec;
2996 if (vdec) {
2997 vdec = list_entry(vdec->list.next, struct vdec_s, list);
2998 list_for_each_entry_from(vdec,
2999 &core->connected_vdec_list, list) {
3000 sched_mask = vdec_schedule_mask(vdec,
3001 core->sched_mask);
3002 if (!sched_mask)
3003 continue;
3004 sched_mask = vdec_ready_to_run(vdec,
3005 sched_mask);
3006 if (sched_mask)
3007 break;
3008 }
3009
3010 if (&vdec->list == &core->connected_vdec_list)
3011 vdec = NULL;
3012 }
3013
3014 if (!vdec) {
3015 /* search from beginning */
3016 list_for_each_entry(vdec,
3017 &core->connected_vdec_list, list) {
3018 sched_mask = vdec_schedule_mask(vdec,
3019 core->sched_mask);
3020 if (vdec == core->last_vdec) {
3021 if (!sched_mask) {
3022 vdec = NULL;
3023 break;
3024 }
3025
3026 sched_mask = vdec_ready_to_run(vdec,
3027 sched_mask);
3028
3029 if (!sched_mask) {
3030 vdec = NULL;
3031 break;
3032 }
3033 break;
3034 }
3035
3036 if (!sched_mask)
3037 continue;
3038
3039 sched_mask = vdec_ready_to_run(vdec,
3040 sched_mask);
3041 if (sched_mask)
3042 break;
3043 }
3044
3045 if (&vdec->list == &core->connected_vdec_list)
3046 vdec = NULL;
3047 }
3048
3049 worker = vdec;
3050
3051 if (vdec) {
3052 unsigned long mask = sched_mask;
3053 unsigned long i;
3054
3055 /* setting active_mask should be atomic.
3056 * it can be modified by decoder driver callbacks.
3057 */
3058 while (sched_mask) {
3059 i = __ffs(sched_mask);
3060 set_bit(i, &vdec->active_mask);
3061 sched_mask &= ~(1 << i);
3062 }
3063
3064 /* vdec's sched_mask is only set from core thread */
3065 vdec->sched_mask |= mask;
3066 if (core->last_vdec) {
3067 if ((core->last_vdec != vdec) &&
3068 (core->last_vdec->mc_type != vdec->mc_type))
3069 vdec->mc_loaded = 0;/*clear for reload firmware*/
3070 } else
3071 vdec->mc_loaded = 0;
3072 core->last_vdec = vdec;
3073 if (debug & 2)
3074 vdec->mc_loaded = 0;/*alway reload firmware*/
3075 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3076
3077 core->sched_mask |= mask;
3078 if (core->parallel_dec == 1)
3079 vdec_save_active_hw(vdec);
3080#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3081 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3082#endif
3083 vdec_prepare_run(vdec, mask);
3084#ifdef VDEC_DEBUG_SUPPORT
3085 inc_profi_count(mask, vdec->run_count);
3086 update_profi_clk_run(vdec, mask, get_current_clk());
3087#endif
3088 vdec->run(vdec, mask, vdec_callback, core);
3089
3090
3091 /* we have some cores scheduled, keep working until
3092 * all vdecs are checked with no cores to schedule
3093 */
3094 if (core->parallel_dec == 1) {
3095 if (vdec_core->vdec_combine_flag == 0)
3096 up(&core->sem);
3097 } else
3098 up(&core->sem);
3099 }
3100
3101 /* remove disconnected decoder from active list */
3102 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3103 list_del(&vdec->list);
3104 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3105 /*core->last_vdec = NULL;*/
3106 complete(&vdec->inactive_done);
3107 }
3108
3109 /* if there is no new work scheduled and nothing
3110 * is running, sleep 20ms
3111 */
3112 if (core->parallel_dec == 1) {
3113 if (vdec_core->vdec_combine_flag == 0) {
3114 if ((!worker) &&
3115 ((core->sched_mask != core->power_ref_mask)) &&
3116 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3117 ((core->buff_flag | core->stream_buff_flag) &
3118 (core->sched_mask ^ core->power_ref_mask))) {
3119 usleep_range(1000, 2000);
3120 up(&core->sem);
3121 }
3122 } else {
3123 if ((!worker) && (!core->sched_mask) &&
3124 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3125 (core->buff_flag | core->stream_buff_flag)) {
3126 usleep_range(1000, 2000);
3127 up(&core->sem);
3128 }
3129 }
3130 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3131 usleep_range(1000, 2000);
3132 up(&core->sem);
3133 }
3134
3135 }
3136
3137 return 0;
3138}
3139
3140#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3141static bool test_hevc(u32 decomp_addr, u32 us_delay)
3142{
3143 int i;
3144
3145 /* SW_RESET IPP */
3146 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3147 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3148
3149 /* initialize all canvas table */
3150 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3151 for (i = 0; i < 32; i++)
3152 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3153 0x1 | (i << 8) | decomp_addr);
3154 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3155 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3156 for (i = 0; i < 32; i++)
3157 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3158
3159 /* Initialize mcrcc */
3160 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3161 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3162 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3163 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3164
3165 /* Decomp initialize */
3166 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3167 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3168
3169 /* Frame level initialization */
3170 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3171 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3172 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3173 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3174
3175 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3176 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3177
3178 /* Enable SWIMP mode */
3179 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3180
3181 /* Enable frame */
3182 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3183 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3184
3185 /* Send SW-command CTB info */
3186 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3187
3188 /* Send PU_command */
3189 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3190 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3191 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3192 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3193
3194 udelay(us_delay);
3195
3196 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3197
3198 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3199}
3200
3201void vdec_power_reset(void)
3202{
3203 /* enable vdec1 isolation */
3204 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3205 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3206 /* power off vdec1 memories */
3207 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3208 /* vdec1 power off */
3209 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3210 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3211
3212 if (has_vdec2()) {
3213 /* enable vdec2 isolation */
3214 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3215 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3216 /* power off vdec2 memories */
3217 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3218 /* vdec2 power off */
3219 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3220 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3221 }
3222
3223 if (has_hdec()) {
3224 /* enable hcodec isolation */
3225 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3226 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3227 /* power off hcodec memories */
3228 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3229 /* hcodec power off */
3230 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3231 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3232 }
3233
3234 if (has_hevc_vdec()) {
3235 /* enable hevc isolation */
3236 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3237 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3238 /* power off hevc memories */
3239 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3240 /* hevc power off */
3241 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3242 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3243 }
3244}
3245EXPORT_SYMBOL(vdec_power_reset);
3246
3247void vdec_poweron(enum vdec_type_e core)
3248{
3249 void *decomp_addr = NULL;
3250 dma_addr_t decomp_dma_addr;
3251 u32 decomp_addr_aligned = 0;
3252 int hevc_loop = 0;
3253 int sleep_val, iso_val;
3254 bool is_power_ctrl_ver2 = false;
3255
3256 if (core >= VDEC_MAX)
3257 return;
3258
3259 mutex_lock(&vdec_mutex);
3260
3261 vdec_core->power_ref_count[core]++;
3262 if (vdec_core->power_ref_count[core] > 1) {
3263 mutex_unlock(&vdec_mutex);
3264 return;
3265 }
3266
3267 if (vdec_on(core)) {
3268 mutex_unlock(&vdec_mutex);
3269 return;
3270 }
3271
3272 is_power_ctrl_ver2 =
3273 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3274 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3275
3276 if (hevc_workaround_needed() &&
3277 (core == VDEC_HEVC)) {
3278 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3279 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3280
3281 if (decomp_addr) {
3282 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3283 memset((u8 *)decomp_addr +
3284 (decomp_addr_aligned - decomp_dma_addr),
3285 0xff, SZ_4K);
3286 } else
3287 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3288 }
3289
3290 if (core == VDEC_1) {
3291 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3292 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3293
3294 /* vdec1 power on */
3295#ifdef CONFIG_AMLOGIC_POWER
3296 if (is_support_power_ctrl()) {
3297 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3298 mutex_unlock(&vdec_mutex);
3299 pr_err("vdec-1 power on ctrl sleep fail.\n");
3300 return;
3301 }
3302 } else {
3303 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3304 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3305 }
3306#else
3307 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3308 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3309#endif
3310 /* wait 10uS */
3311 udelay(10);
3312 /* vdec1 soft reset */
3313 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3314 WRITE_VREG(DOS_SW_RESET0, 0);
3315 /* enable vdec1 clock */
3316 /*
3317 *add power on vdec clock level setting,only for m8 chip,
3318 * m8baby and m8m2 can dynamic adjust vdec clock,
3319 * power on with default clock level
3320 */
3321 amports_switch_gate("clk_vdec_mux", 1);
3322 vdec_clock_hi_enable();
3323 /* power up vdec memories */
3324 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3325
3326 /* remove vdec1 isolation */
3327#ifdef CONFIG_AMLOGIC_POWER
3328 if (is_support_power_ctrl()) {
3329 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3330 mutex_unlock(&vdec_mutex);
3331 pr_err("vdec-1 power on ctrl iso fail.\n");
3332 return;
3333 }
3334 } else {
3335 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3336 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3337 }
3338#else
3339 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3340 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3341#endif
3342 /* reset DOS top registers */
3343 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3344 } else if (core == VDEC_2) {
3345 if (has_vdec2()) {
3346 /* vdec2 power on */
3347 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3348 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3349 ~0x30);
3350 /* wait 10uS */
3351 udelay(10);
3352 /* vdec2 soft reset */
3353 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3354 WRITE_VREG(DOS_SW_RESET2, 0);
3355 /* enable vdec1 clock */
3356 vdec2_clock_hi_enable();
3357 /* power up vdec memories */
3358 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3359 /* remove vdec2 isolation */
3360 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3361 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3362 ~0x300);
3363 /* reset DOS top registers */
3364 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3365 }
3366 } else if (core == VDEC_HCODEC) {
3367 if (has_hdec()) {
3368 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3369 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3370
3371 /* hcodec power on */
3372#ifdef CONFIG_AMLOGIC_POWER
3373 if (is_support_power_ctrl()) {
3374 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3375 mutex_unlock(&vdec_mutex);
3376 pr_err("hcodec power on ctrl sleep fail.\n");
3377 return;
3378 }
3379 } else {
3380 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3381 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3382 }
3383#else
3384 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3385 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3386#endif
3387 /* wait 10uS */
3388 udelay(10);
3389 /* hcodec soft reset */
3390 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3391 WRITE_VREG(DOS_SW_RESET1, 0);
3392 /* enable hcodec clock */
3393 hcodec_clock_enable();
3394 /* power up hcodec memories */
3395 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3396 /* remove hcodec isolation */
3397#ifdef CONFIG_AMLOGIC_POWER
3398 if (is_support_power_ctrl()) {
3399 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3400 mutex_unlock(&vdec_mutex);
3401 pr_err("hcodec power on ctrl iso fail.\n");
3402 return;
3403 }
3404 } else {
3405 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3406 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3407 }
3408#else
3409 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3410 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3411#endif
3412 }
3413 } else if (core == VDEC_HEVC) {
3414 if (has_hevc_vdec()) {
3415 bool hevc_fixed = false;
3416
3417 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3418 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3419
3420 while (!hevc_fixed) {
3421 /* hevc power on */
3422#ifdef CONFIG_AMLOGIC_POWER
3423 if (is_support_power_ctrl()) {
3424 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3425 mutex_unlock(&vdec_mutex);
3426 pr_err("hevc power on ctrl sleep fail.\n");
3427 return;
3428 }
3429 } else {
3430 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3431 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3432 }
3433#else
3434 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3435 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3436#endif
3437 /* wait 10uS */
3438 udelay(10);
3439 /* hevc soft reset */
3440 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3441 WRITE_VREG(DOS_SW_RESET3, 0);
3442 /* enable hevc clock */
3443 amports_switch_gate("clk_hevc_mux", 1);
3444 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3445 amports_switch_gate("clk_hevcb_mux", 1);
3446 hevc_clock_hi_enable();
3447 hevc_back_clock_hi_enable();
3448 /* power up hevc memories */
3449 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3450 /* remove hevc isolation */
3451#ifdef CONFIG_AMLOGIC_POWER
3452 if (is_support_power_ctrl()) {
3453 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3454 mutex_unlock(&vdec_mutex);
3455 pr_err("hevc power on ctrl iso fail.\n");
3456 return;
3457 }
3458 } else {
3459 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3460 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3461 }
3462#else
3463 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3464 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3465#endif
3466 if (!hevc_workaround_needed())
3467 break;
3468
3469 if (decomp_addr)
3470 hevc_fixed = test_hevc(
3471 decomp_addr_aligned, 20);
3472
3473 if (!hevc_fixed) {
3474 hevc_loop++;
3475
3476 mutex_unlock(&vdec_mutex);
3477
3478 if (hevc_loop >= HEVC_TEST_LIMIT) {
3479 pr_warn("hevc power sequence over limit\n");
3480 pr_warn("=====================================================\n");
3481 pr_warn(" This chip is identified to have HW failure.\n");
3482 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3483 pr_warn("=====================================================\n");
3484
3485 panic("Force panic for chip detection !!!\n");
3486
3487 break;
3488 }
3489
3490 vdec_poweroff(VDEC_HEVC);
3491
3492 mdelay(10);
3493
3494 mutex_lock(&vdec_mutex);
3495 }
3496 }
3497
3498 if (hevc_loop > hevc_max_reset_count)
3499 hevc_max_reset_count = hevc_loop;
3500
3501 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3502 udelay(10);
3503 WRITE_VREG(DOS_SW_RESET3, 0);
3504 }
3505 }
3506
3507 if (decomp_addr)
3508 codec_mm_dma_free_coherent(MEM_NAME,
3509 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3510
3511 mutex_unlock(&vdec_mutex);
3512}
3513EXPORT_SYMBOL(vdec_poweron);
3514
3515void vdec_poweroff(enum vdec_type_e core)
3516{
3517 int sleep_val, iso_val;
3518 bool is_power_ctrl_ver2 = false;
3519
3520 if (core >= VDEC_MAX)
3521 return;
3522
3523 mutex_lock(&vdec_mutex);
3524
3525 vdec_core->power_ref_count[core]--;
3526 if (vdec_core->power_ref_count[core] > 0) {
3527 mutex_unlock(&vdec_mutex);
3528 return;
3529 }
3530
3531 is_power_ctrl_ver2 =
3532 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3533 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3534
3535 if (core == VDEC_1) {
3536 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3537 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3538
3539 /* enable vdec1 isolation */
3540#ifdef CONFIG_AMLOGIC_POWER
3541 if (is_support_power_ctrl()) {
3542 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3543 mutex_unlock(&vdec_mutex);
3544 pr_err("vdec-1 power off ctrl iso fail.\n");
3545 return;
3546 }
3547 } else {
3548 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3549 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3550 }
3551#else
3552 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3553 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3554#endif
3555 /* power off vdec1 memories */
3556 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3557 /* disable vdec1 clock */
3558 vdec_clock_off();
3559 /* vdec1 power off */
3560#ifdef CONFIG_AMLOGIC_POWER
3561 if (is_support_power_ctrl()) {
3562 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3563 mutex_unlock(&vdec_mutex);
3564 pr_err("vdec-1 power off ctrl sleep fail.\n");
3565 return;
3566 }
3567 } else {
3568 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3569 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3570 }
3571#else
3572 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3573 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3574#endif
3575 } else if (core == VDEC_2) {
3576 if (has_vdec2()) {
3577 /* enable vdec2 isolation */
3578 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3579 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3580 0x300);
3581 /* power off vdec2 memories */
3582 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3583 /* disable vdec2 clock */
3584 vdec2_clock_off();
3585 /* vdec2 power off */
3586 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3587 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3588 0x30);
3589 }
3590 } else if (core == VDEC_HCODEC) {
3591 if (has_hdec()) {
3592 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3593 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3594
3595 /* enable hcodec isolation */
3596#ifdef CONFIG_AMLOGIC_POWER
3597 if (is_support_power_ctrl()) {
3598 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3599 mutex_unlock(&vdec_mutex);
3600 pr_err("hcodec power off ctrl iso fail.\n");
3601 return;
3602 }
3603 } else {
3604 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3605 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3606 }
3607#else
3608 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3609 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3610#endif
3611 /* power off hcodec memories */
3612 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3613 /* disable hcodec clock */
3614 hcodec_clock_off();
3615 /* hcodec power off */
3616#ifdef CONFIG_AMLOGIC_POWER
3617 if (is_support_power_ctrl()) {
3618 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3619 mutex_unlock(&vdec_mutex);
3620 pr_err("hcodec power off ctrl sleep fail.\n");
3621 return;
3622 }
3623 } else {
3624 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3625 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3626 }
3627#else
3628 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3629 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3630#endif
3631 }
3632 } else if (core == VDEC_HEVC) {
3633 if (has_hevc_vdec()) {
3634 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3635 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3636
3637 if (no_powerdown == 0) {
3638 /* enable hevc isolation */
3639#ifdef CONFIG_AMLOGIC_POWER
3640 if (is_support_power_ctrl()) {
3641 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3642 mutex_unlock(&vdec_mutex);
3643 pr_err("hevc power off ctrl iso fail.\n");
3644 return;
3645 }
3646 } else {
3647 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3648 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3649 }
3650#else
3651 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3652 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3653#endif
3654 /* power off hevc memories */
3655 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3656
3657 /* disable hevc clock */
3658 hevc_clock_off();
3659 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3660 hevc_back_clock_off();
3661
3662 /* hevc power off */
3663#ifdef CONFIG_AMLOGIC_POWER
3664 if (is_support_power_ctrl()) {
3665 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3666 mutex_unlock(&vdec_mutex);
3667 pr_err("hevc power off ctrl sleep fail.\n");
3668 return;
3669 }
3670 } else {
3671 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3672 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3673 }
3674#else
3675 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3676 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3677#endif
3678 } else {
3679 pr_info("!!!!!!!!not power down\n");
3680 hevc_reset_core(NULL);
3681 no_powerdown = 0;
3682 }
3683 }
3684 }
3685 mutex_unlock(&vdec_mutex);
3686}
3687EXPORT_SYMBOL(vdec_poweroff);
3688
3689bool vdec_on(enum vdec_type_e core)
3690{
3691 bool ret = false;
3692
3693 if (core == VDEC_1) {
3694 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3695 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3696 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3697 ? 0x2 : 0xc)) == 0) &&
3698 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3699 ret = true;
3700 } else if (core == VDEC_2) {
3701 if (has_vdec2()) {
3702 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3703 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3704 ret = true;
3705 }
3706 } else if (core == VDEC_HCODEC) {
3707 if (has_hdec()) {
3708 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3709 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3710 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3711 ? 0x1 : 0x3)) == 0) &&
3712 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3713 ret = true;
3714 }
3715 } else if (core == VDEC_HEVC) {
3716 if (has_hevc_vdec()) {
3717 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3718 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3719 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3720 ? 0x4 : 0xc0)) == 0) &&
3721 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3722 ret = true;
3723 }
3724 }
3725
3726 return ret;
3727}
3728EXPORT_SYMBOL(vdec_on);
3729
3730#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3731void vdec_poweron(enum vdec_type_e core)
3732{
3733 ulong flags;
3734
3735 spin_lock_irqsave(&lock, flags);
3736
3737 if (core == VDEC_1) {
3738 /* vdec1 soft reset */
3739 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3740 WRITE_VREG(DOS_SW_RESET0, 0);
3741 /* enable vdec1 clock */
3742 vdec_clock_enable();
3743 /* reset DOS top registers */
3744 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3745 } else if (core == VDEC_2) {
3746 /* vdec2 soft reset */
3747 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3748 WRITE_VREG(DOS_SW_RESET2, 0);
3749 /* enable vdec2 clock */
3750 vdec2_clock_enable();
3751 /* reset DOS top registers */
3752 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3753 } else if (core == VDEC_HCODEC) {
3754 /* hcodec soft reset */
3755 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3756 WRITE_VREG(DOS_SW_RESET1, 0);
3757 /* enable hcodec clock */
3758 hcodec_clock_enable();
3759 }
3760
3761 spin_unlock_irqrestore(&lock, flags);
3762}
3763
3764void vdec_poweroff(enum vdec_type_e core)
3765{
3766 ulong flags;
3767
3768 spin_lock_irqsave(&lock, flags);
3769
3770 if (core == VDEC_1) {
3771 /* disable vdec1 clock */
3772 vdec_clock_off();
3773 } else if (core == VDEC_2) {
3774 /* disable vdec2 clock */
3775 vdec2_clock_off();
3776 } else if (core == VDEC_HCODEC) {
3777 /* disable hcodec clock */
3778 hcodec_clock_off();
3779 }
3780
3781 spin_unlock_irqrestore(&lock, flags);
3782}
3783
3784bool vdec_on(enum vdec_type_e core)
3785{
3786 bool ret = false;
3787
3788 if (core == VDEC_1) {
3789 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3790 ret = true;
3791 } else if (core == VDEC_2) {
3792 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3793 ret = true;
3794 } else if (core == VDEC_HCODEC) {
3795 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3796 ret = true;
3797 }
3798
3799 return ret;
3800}
3801#endif
3802
3803int vdec_source_changed(int format, int width, int height, int fps)
3804{
3805 /* todo: add level routines for clock adjustment per chips */
3806 int ret = -1;
3807 static int on_setting;
3808
3809 if (on_setting > 0)
3810 return ret;/*on changing clk,ignore this change*/
3811
3812 if (vdec_source_get(VDEC_1) == width * height * fps)
3813 return ret;
3814
3815
3816 on_setting = 1;
3817 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3818 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3819 width, height, fps, vdec_clk_get(VDEC_1));
3820 on_setting = 0;
3821 return ret;
3822
3823}
3824EXPORT_SYMBOL(vdec_source_changed);
3825
3826void vdec_reset_core(struct vdec_s *vdec)
3827{
3828 unsigned long flags;
3829 unsigned int mask = 0;
3830
3831 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3832 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3833 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3834
3835 spin_lock_irqsave(&vdec_spin_lock, flags);
3836 codec_dmcbus_write(DMC_REQ_CTRL,
3837 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3838 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3839
3840 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3841 & mask))
3842 ;
3843 /*
3844 * 2: assist
3845 * 3: vld_reset
3846 * 4: vld_part_reset
3847 * 5: vfifo reset
3848 * 6: iqidct
3849 * 7: mc
3850 * 8: dblk
3851 * 9: pic_dc
3852 * 10: psc
3853 * 11: mcpu
3854 * 12: ccpu
3855 * 13: ddr
3856 * 14: afifo
3857 */
3858 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3859 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3860 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<7)|(1<<8)|(1<<9));
3861 } else {
3862 WRITE_VREG(DOS_SW_RESET0,
3863 (1<<3)|(1<<4)|(1<<5));
3864 }
3865 WRITE_VREG(DOS_SW_RESET0, 0);
3866
3867 spin_lock_irqsave(&vdec_spin_lock, flags);
3868 codec_dmcbus_write(DMC_REQ_CTRL,
3869 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3870 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3871}
3872EXPORT_SYMBOL(vdec_reset_core);
3873
3874void hevc_mmu_dma_check(struct vdec_s *vdec)
3875{
3876 ulong timeout;
3877 u32 data;
3878 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
3879 return;
3880 timeout = jiffies + HZ/100;
3881 while (1) {
3882 data = READ_VREG(HEVC_CM_CORE_STATUS);
3883 if ((data & 0x1) == 0)
3884 break;
3885 if (time_after(jiffies, timeout)) {
3886 if (debug & 0x10)
3887 pr_info(" %s sao mmu dma idle\n", __func__);
3888 break;
3889 }
3890 }
3891 /*disable sao mmu dma */
3892 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
3893 timeout = jiffies + HZ/100;
3894 while (1) {
3895 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
3896 if ((data & 0x1))
3897 break;
3898 if (time_after(jiffies, timeout)) {
3899 if (debug & 0x10)
3900 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
3901 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
3902 break;
3903 }
3904 }
3905}
3906EXPORT_SYMBOL(hevc_mmu_dma_check);
3907
3908void hevc_reset_core(struct vdec_s *vdec)
3909{
3910 unsigned long flags;
3911 unsigned int mask = 0;
3912
3913 mask = 1 << 4; /*bit4: hevc*/
3914 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3915 mask |= 1 << 8; /*bit8: hevcb*/
3916
3917 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3918 spin_lock_irqsave(&vdec_spin_lock, flags);
3919 codec_dmcbus_write(DMC_REQ_CTRL,
3920 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3921 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3922
3923 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3924 & mask))
3925 ;
3926
3927 if (vdec == NULL || input_frame_based(vdec))
3928 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3929
3930 /*
3931 * 2: assist
3932 * 3: parser
3933 * 4: parser_state
3934 * 8: dblk
3935 * 11:mcpu
3936 * 12:ccpu
3937 * 13:ddr
3938 * 14:iqit
3939 * 15:ipp
3940 * 17:qdct
3941 * 18:mpred
3942 * 19:sao
3943 * 24:hevc_afifo
3944 */
3945 WRITE_VREG(DOS_SW_RESET3,
3946 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
3947 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
3948 (1<<17)|(1<<18)|(1<<19)|(1<<24));
3949
3950 WRITE_VREG(DOS_SW_RESET3, 0);
3951
3952
3953 spin_lock_irqsave(&vdec_spin_lock, flags);
3954 codec_dmcbus_write(DMC_REQ_CTRL,
3955 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3956 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3957
3958}
3959EXPORT_SYMBOL(hevc_reset_core);
3960
3961int vdec2_source_changed(int format, int width, int height, int fps)
3962{
3963 int ret = -1;
3964 static int on_setting;
3965
3966 if (has_vdec2()) {
3967 /* todo: add level routines for clock adjustment per chips */
3968 if (on_setting != 0)
3969 return ret;/*on changing clk,ignore this change*/
3970
3971 if (vdec_source_get(VDEC_2) == width * height * fps)
3972 return ret;
3973
3974 on_setting = 1;
3975 ret = vdec_source_changed_for_clk_set(format,
3976 width, height, fps);
3977 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
3978 width, height, fps, vdec_clk_get(VDEC_2));
3979 on_setting = 0;
3980 return ret;
3981 }
3982 return 0;
3983}
3984EXPORT_SYMBOL(vdec2_source_changed);
3985
3986int hevc_source_changed(int format, int width, int height, int fps)
3987{
3988 /* todo: add level routines for clock adjustment per chips */
3989 int ret = -1;
3990 static int on_setting;
3991
3992 if (on_setting != 0)
3993 return ret;/*on changing clk,ignore this change*/
3994
3995 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
3996 return ret;
3997
3998 on_setting = 1;
3999 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
4000 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
4001 width, height, fps, vdec_clk_get(VDEC_HEVC));
4002 on_setting = 0;
4003
4004 return ret;
4005}
4006EXPORT_SYMBOL(hevc_source_changed);
4007
4008static struct am_reg am_risc[] = {
4009 {"MSP", 0x300},
4010 {"MPSR", 0x301},
4011 {"MCPU_INT_BASE", 0x302},
4012 {"MCPU_INTR_GRP", 0x303},
4013 {"MCPU_INTR_MSK", 0x304},
4014 {"MCPU_INTR_REQ", 0x305},
4015 {"MPC-P", 0x306},
4016 {"MPC-D", 0x307},
4017 {"MPC_E", 0x308},
4018 {"MPC_W", 0x309},
4019 {"CSP", 0x320},
4020 {"CPSR", 0x321},
4021 {"CCPU_INT_BASE", 0x322},
4022 {"CCPU_INTR_GRP", 0x323},
4023 {"CCPU_INTR_MSK", 0x324},
4024 {"CCPU_INTR_REQ", 0x325},
4025 {"CPC-P", 0x326},
4026 {"CPC-D", 0x327},
4027 {"CPC_E", 0x328},
4028 {"CPC_W", 0x329},
4029 {"AV_SCRATCH_0", 0x09c0},
4030 {"AV_SCRATCH_1", 0x09c1},
4031 {"AV_SCRATCH_2", 0x09c2},
4032 {"AV_SCRATCH_3", 0x09c3},
4033 {"AV_SCRATCH_4", 0x09c4},
4034 {"AV_SCRATCH_5", 0x09c5},
4035 {"AV_SCRATCH_6", 0x09c6},
4036 {"AV_SCRATCH_7", 0x09c7},
4037 {"AV_SCRATCH_8", 0x09c8},
4038 {"AV_SCRATCH_9", 0x09c9},
4039 {"AV_SCRATCH_A", 0x09ca},
4040 {"AV_SCRATCH_B", 0x09cb},
4041 {"AV_SCRATCH_C", 0x09cc},
4042 {"AV_SCRATCH_D", 0x09cd},
4043 {"AV_SCRATCH_E", 0x09ce},
4044 {"AV_SCRATCH_F", 0x09cf},
4045 {"AV_SCRATCH_G", 0x09d0},
4046 {"AV_SCRATCH_H", 0x09d1},
4047 {"AV_SCRATCH_I", 0x09d2},
4048 {"AV_SCRATCH_J", 0x09d3},
4049 {"AV_SCRATCH_K", 0x09d4},
4050 {"AV_SCRATCH_L", 0x09d5},
4051 {"AV_SCRATCH_M", 0x09d6},
4052 {"AV_SCRATCH_N", 0x09d7},
4053};
4054
4055static ssize_t amrisc_regs_show(struct class *class,
4056 struct class_attribute *attr, char *buf)
4057{
4058 char *pbuf = buf;
4059 struct am_reg *regs = am_risc;
4060 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
4061 int i;
4062 unsigned int val;
4063 ssize_t ret;
4064
4065 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4066 mutex_lock(&vdec_mutex);
4067 if (!vdec_on(VDEC_1)) {
4068 mutex_unlock(&vdec_mutex);
4069 pbuf += sprintf(pbuf, "amrisc is power off\n");
4070 ret = pbuf - buf;
4071 return ret;
4072 }
4073 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4074 /*TODO:M6 define */
4075 /*
4076 * switch_mod_gate_by_type(MOD_VDEC, 1);
4077 */
4078 amports_switch_gate("vdec", 1);
4079 }
4080 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4081 for (i = 0; i < rsize; i++) {
4082 val = READ_VREG(regs[i].offset);
4083 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4084 regs[i].name, regs[i].offset, val, val);
4085 }
4086 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4087 mutex_unlock(&vdec_mutex);
4088 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4089 /*TODO:M6 define */
4090 /*
4091 * switch_mod_gate_by_type(MOD_VDEC, 0);
4092 */
4093 amports_switch_gate("vdec", 0);
4094 }
4095 ret = pbuf - buf;
4096 return ret;
4097}
4098
4099static ssize_t dump_trace_show(struct class *class,
4100 struct class_attribute *attr, char *buf)
4101{
4102 int i;
4103 char *pbuf = buf;
4104 ssize_t ret;
4105 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4106
4107 if (!trace_buf) {
4108 pbuf += sprintf(pbuf, "No Memory bug\n");
4109 ret = pbuf - buf;
4110 return ret;
4111 }
4112 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4113 mutex_lock(&vdec_mutex);
4114 if (!vdec_on(VDEC_1)) {
4115 mutex_unlock(&vdec_mutex);
4116 kfree(trace_buf);
4117 pbuf += sprintf(pbuf, "amrisc is power off\n");
4118 ret = pbuf - buf;
4119 return ret;
4120 }
4121 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4122 /*TODO:M6 define */
4123 /*
4124 * switch_mod_gate_by_type(MOD_VDEC, 1);
4125 */
4126 amports_switch_gate("vdec", 1);
4127 }
4128 pr_info("dump trace steps:%d start\n", debug_trace_num);
4129 i = 0;
4130 while (i <= debug_trace_num - 16) {
4131 trace_buf[i] = READ_VREG(MPC_E);
4132 trace_buf[i + 1] = READ_VREG(MPC_E);
4133 trace_buf[i + 2] = READ_VREG(MPC_E);
4134 trace_buf[i + 3] = READ_VREG(MPC_E);
4135 trace_buf[i + 4] = READ_VREG(MPC_E);
4136 trace_buf[i + 5] = READ_VREG(MPC_E);
4137 trace_buf[i + 6] = READ_VREG(MPC_E);
4138 trace_buf[i + 7] = READ_VREG(MPC_E);
4139 trace_buf[i + 8] = READ_VREG(MPC_E);
4140 trace_buf[i + 9] = READ_VREG(MPC_E);
4141 trace_buf[i + 10] = READ_VREG(MPC_E);
4142 trace_buf[i + 11] = READ_VREG(MPC_E);
4143 trace_buf[i + 12] = READ_VREG(MPC_E);
4144 trace_buf[i + 13] = READ_VREG(MPC_E);
4145 trace_buf[i + 14] = READ_VREG(MPC_E);
4146 trace_buf[i + 15] = READ_VREG(MPC_E);
4147 i += 16;
4148 };
4149 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4150 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4151 mutex_unlock(&vdec_mutex);
4152 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4153 /*TODO:M6 define */
4154 /*
4155 * switch_mod_gate_by_type(MOD_VDEC, 0);
4156 */
4157 amports_switch_gate("vdec", 0);
4158 }
4159 for (i = 0; i < debug_trace_num; i++) {
4160 if (i % 4 == 0) {
4161 if (i % 16 == 0)
4162 pbuf += sprintf(pbuf, "\n");
4163 else if (i % 8 == 0)
4164 pbuf += sprintf(pbuf, " ");
4165 else /* 4 */
4166 pbuf += sprintf(pbuf, " ");
4167 }
4168 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4169 }
4170 while (i < debug_trace_num)
4171 ;
4172 kfree(trace_buf);
4173 pbuf += sprintf(pbuf, "\n");
4174 ret = pbuf - buf;
4175 return ret;
4176}
4177
4178static ssize_t clock_level_show(struct class *class,
4179 struct class_attribute *attr, char *buf)
4180{
4181 char *pbuf = buf;
4182 size_t ret;
4183
4184 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4185
4186 if (has_vdec2())
4187 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4188
4189 if (has_hevc_vdec())
4190 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4191
4192 ret = pbuf - buf;
4193 return ret;
4194}
4195
4196static ssize_t store_poweron_clock_level(struct class *class,
4197 struct class_attribute *attr,
4198 const char *buf, size_t size)
4199{
4200 unsigned int val;
4201 ssize_t ret;
4202
4203 /*ret = sscanf(buf, "%d", &val);*/
4204 ret = kstrtoint(buf, 0, &val);
4205
4206 if (ret != 0)
4207 return -EINVAL;
4208 poweron_clock_level = val;
4209 return size;
4210}
4211
4212static ssize_t show_poweron_clock_level(struct class *class,
4213 struct class_attribute *attr, char *buf)
4214{
4215 return sprintf(buf, "%d\n", poweron_clock_level);
4216}
4217
4218/*
4219 *if keep_vdec_mem == 1
4220 *always don't release
4221 *vdec 64 memory for fast play.
4222 */
4223static ssize_t store_keep_vdec_mem(struct class *class,
4224 struct class_attribute *attr,
4225 const char *buf, size_t size)
4226{
4227 unsigned int val;
4228 ssize_t ret;
4229
4230 /*ret = sscanf(buf, "%d", &val);*/
4231 ret = kstrtoint(buf, 0, &val);
4232 if (ret != 0)
4233 return -EINVAL;
4234 keep_vdec_mem = val;
4235 return size;
4236}
4237
4238static ssize_t show_keep_vdec_mem(struct class *class,
4239 struct class_attribute *attr, char *buf)
4240{
4241 return sprintf(buf, "%d\n", keep_vdec_mem);
4242}
4243
4244#ifdef VDEC_DEBUG_SUPPORT
4245static ssize_t store_debug(struct class *class,
4246 struct class_attribute *attr,
4247 const char *buf, size_t size)
4248{
4249 struct vdec_s *vdec;
4250 struct vdec_core_s *core = vdec_core;
4251 unsigned long flags;
4252
4253 unsigned id;
4254 unsigned val;
4255 ssize_t ret;
4256 char cbuf[32];
4257
4258 cbuf[0] = 0;
4259 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4260 /*pr_info(
4261 "%s(%s)=>ret %ld: %s, %x, %x\n",
4262 __func__, buf, ret, cbuf, id, val);*/
4263 if (strcmp(cbuf, "schedule") == 0) {
4264 pr_info("VDEC_DEBUG: force schedule\n");
4265 up(&core->sem);
4266 } else if (strcmp(cbuf, "power_off") == 0) {
4267 pr_info("VDEC_DEBUG: power off core %d\n", id);
4268 vdec_poweroff(id);
4269 } else if (strcmp(cbuf, "power_on") == 0) {
4270 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4271 vdec_poweron(id);
4272 } else if (strcmp(cbuf, "wr") == 0) {
4273 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4274 id, val);
4275 WRITE_VREG(id, val);
4276 } else if (strcmp(cbuf, "rd") == 0) {
4277 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4278 id, READ_VREG(id));
4279 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4280 pr_info(
4281 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4282 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4283 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4284 }
4285
4286 flags = vdec_core_lock(vdec_core);
4287
4288 list_for_each_entry(vdec,
4289 &core->connected_vdec_list, list) {
4290 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4291 if (((vdec->status == VDEC_STATUS_CONNECTED
4292 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4293 (vdec->id == id)) {
4294 /*to add*/
4295 break;
4296 }
4297 }
4298 vdec_core_unlock(vdec_core, flags);
4299 return size;
4300}
4301
4302static ssize_t show_debug(struct class *class,
4303 struct class_attribute *attr, char *buf)
4304{
4305 char *pbuf = buf;
4306 struct vdec_s *vdec;
4307 struct vdec_core_s *core = vdec_core;
4308 unsigned long flags = vdec_core_lock(vdec_core);
4309 u64 tmp;
4310
4311 pbuf += sprintf(pbuf,
4312 "============== help:\n");
4313 pbuf += sprintf(pbuf,
4314 "'echo xxx > debug' usuage:\n");
4315 pbuf += sprintf(pbuf,
4316 "schedule - trigger schedule thread to run\n");
4317 pbuf += sprintf(pbuf,
4318 "power_off core_num - call vdec_poweroff(core_num)\n");
4319 pbuf += sprintf(pbuf,
4320 "power_on core_num - call vdec_poweron(core_num)\n");
4321 pbuf += sprintf(pbuf,
4322 "wr adr val - call WRITE_VREG(adr, val)\n");
4323 pbuf += sprintf(pbuf,
4324 "rd adr - call READ_VREG(adr)\n");
4325 pbuf += sprintf(pbuf,
4326 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4327 pbuf += sprintf(pbuf,
4328 "===================\n");
4329
4330 pbuf += sprintf(pbuf,
4331 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4332 list_for_each_entry(vdec,
4333 &core->connected_vdec_list, list) {
4334 enum vdec_type_e type;
4335 if ((vdec->status == VDEC_STATUS_CONNECTED
4336 || vdec->status == VDEC_STATUS_ACTIVE)) {
4337 for (type = VDEC_1; type < VDEC_MAX; type++) {
4338 if (vdec->core_mask & (1 << type)) {
4339 pbuf += sprintf(pbuf, "%s(%d):",
4340 vdec->vf_provider_name, type);
4341 pbuf += sprintf(pbuf, "\t%d",
4342 vdec->check_count[type]);
4343 pbuf += sprintf(pbuf, "\t%d",
4344 vdec->run_count[type]);
4345 pbuf += sprintf(pbuf, "\t%d",
4346 vdec->input_underrun_count[type]);
4347 pbuf += sprintf(pbuf, "\t%d",
4348 vdec->not_run_ready_count[type]);
4349 tmp = vdec->run_clk[type] * 100;
4350 do_div(tmp, vdec->total_clk[type]);
4351 pbuf += sprintf(pbuf,
4352 "\t%d%%\n",
4353 vdec->total_clk[type] == 0 ? 0 :
4354 (u32)tmp);
4355 }
4356 }
4357 }
4358 }
4359
4360 vdec_core_unlock(vdec_core, flags);
4361 return pbuf - buf;
4362
4363}
4364#endif
4365
4366/*irq num as same as .dts*/
4367/*
4368 * interrupts = <0 3 1
4369 * 0 23 1
4370 * 0 32 1
4371 * 0 43 1
4372 * 0 44 1
4373 * 0 45 1>;
4374 * interrupt-names = "vsync",
4375 * "demux",
4376 * "parser",
4377 * "mailbox_0",
4378 * "mailbox_1",
4379 * "mailbox_2";
4380 */
4381s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4382 irq_handler_t handler,
4383 irq_handler_t thread_fn,
4384 unsigned long irqflags,
4385 const char *devname, void *dev)
4386{
4387 s32 res_irq;
4388 s32 ret = 0;
4389
4390 if (num >= VDEC_IRQ_MAX) {
4391 pr_err("[%s] request irq error, irq num too big!", __func__);
4392 return -EINVAL;
4393 }
4394
4395 if (vdec_core->isr_context[num].irq < 0) {
4396 res_irq = platform_get_irq(
4397 vdec_core->vdec_core_platform_device, num);
4398 if (res_irq < 0) {
4399 pr_err("[%s] get irq error!", __func__);
4400 return -EINVAL;
4401 }
4402
4403 vdec_core->isr_context[num].irq = res_irq;
4404 vdec_core->isr_context[num].dev_isr = handler;
4405 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4406 vdec_core->isr_context[num].dev_id = dev;
4407
4408 ret = request_threaded_irq(res_irq,
4409 vdec_isr,
4410 vdec_thread_isr,
4411 (thread_fn) ? IRQF_ONESHOT : irqflags,
4412 devname,
4413 &vdec_core->isr_context[num]);
4414
4415 if (ret) {
4416 vdec_core->isr_context[num].irq = -1;
4417 vdec_core->isr_context[num].dev_isr = NULL;
4418 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4419 vdec_core->isr_context[num].dev_id = NULL;
4420
4421 pr_err("vdec irq register error for %s.\n", devname);
4422 return -EIO;
4423 }
4424 } else {
4425 vdec_core->isr_context[num].dev_isr = handler;
4426 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4427 vdec_core->isr_context[num].dev_id = dev;
4428 }
4429
4430 return ret;
4431}
4432EXPORT_SYMBOL(vdec_request_threaded_irq);
4433
4434s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4435 const char *devname, void *dev)
4436{
4437 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4438
4439 return vdec_request_threaded_irq(num,
4440 handler,
4441 NULL,/*no thread_fn*/
4442 IRQF_SHARED,
4443 devname,
4444 dev);
4445}
4446EXPORT_SYMBOL(vdec_request_irq);
4447
4448void vdec_free_irq(enum vdec_irq_num num, void *dev)
4449{
4450 if (num >= VDEC_IRQ_MAX) {
4451 pr_err("[%s] request irq error, irq num too big!", __func__);
4452 return;
4453 }
4454 /*
4455 *assume amrisc is stopped already and there is no mailbox interrupt
4456 * when we reset pointers here.
4457 */
4458 vdec_core->isr_context[num].dev_isr = NULL;
4459 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4460 vdec_core->isr_context[num].dev_id = NULL;
4461 synchronize_irq(vdec_core->isr_context[num].irq);
4462}
4463EXPORT_SYMBOL(vdec_free_irq);
4464
4465struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4466{
4467 struct vdec_s *vdec;
4468 struct vdec_s *ret_vdec;
4469 struct vdec_core_s *core = vdec_core;
4470 unsigned long flags;
4471 int id;
4472
4473 flags = vdec_core_lock(vdec_core);
4474
4475 id = 0x10000000;
4476 ret_vdec = NULL;
4477 if (!list_empty(&core->connected_vdec_list)) {
4478 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4479 if (vdec->id < id) {
4480 id = vdec->id;
4481 ret_vdec = vdec;
4482 }
4483 }
4484 }
4485
4486 vdec_core_unlock(vdec_core, flags);
4487
4488 return ret_vdec;
4489}
4490EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4491
4492struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4493{
4494 struct vdec_s *vdec;
4495 struct vdec_s *ret_vdec;
4496 struct vdec_core_s *core = vdec_core;
4497 unsigned long flags;
4498
4499 flags = vdec_core_lock(vdec_core);
4500
4501 ret_vdec = NULL;
4502 if (!list_empty(&core->connected_vdec_list)) {
4503 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4504 if (vdec->id == vdec_id) {
4505 ret_vdec = vdec;
4506 break;
4507 }
4508 }
4509 }
4510
4511 vdec_core_unlock(vdec_core, flags);
4512
4513 return ret_vdec;
4514}
4515EXPORT_SYMBOL(vdec_get_vdec_by_id);
4516
4517int vdec_read_user_data(struct vdec_s *vdec,
4518 struct userdata_param_t *p_userdata_param)
4519{
4520 int ret = 0;
4521
4522 if (!vdec)
4523 vdec = vdec_get_default_vdec_for_userdata();
4524
4525 if (vdec) {
4526 if (vdec->user_data_read)
4527 ret = vdec->user_data_read(vdec, p_userdata_param);
4528 }
4529 return ret;
4530}
4531EXPORT_SYMBOL(vdec_read_user_data);
4532
4533int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4534{
4535 if (vdec) {
4536 if (vdec->wakeup_userdata_poll)
4537 vdec->wakeup_userdata_poll(vdec);
4538 }
4539
4540 return 0;
4541}
4542EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4543
4544void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4545{
4546 if (!vdec)
4547 vdec = vdec_get_default_vdec_for_userdata();
4548
4549 if (vdec) {
4550 if (vdec->reset_userdata_fifo)
4551 vdec->reset_userdata_fifo(vdec, bInit);
4552 }
4553}
4554EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4555
4556static int dump_mode;
4557static ssize_t dump_risc_mem_store(struct class *class,
4558 struct class_attribute *attr,
4559 const char *buf, size_t size)/*set*/
4560{
4561 unsigned int val;
4562 ssize_t ret;
4563 char dump_mode_str[4] = "PRL";
4564
4565 /*ret = sscanf(buf, "%d", &val);*/
4566 ret = kstrtoint(buf, 0, &val);
4567
4568 if (ret != 0)
4569 return -EINVAL;
4570 dump_mode = val & 0x3;
4571 pr_info("set dump mode to %d,%c_mem\n",
4572 dump_mode, dump_mode_str[dump_mode]);
4573 return size;
4574}
4575static u32 read_amrisc_reg(int reg)
4576{
4577 WRITE_VREG(0x31b, reg);
4578 return READ_VREG(0x31c);
4579}
4580
4581static void dump_pmem(void)
4582{
4583 int i;
4584
4585 WRITE_VREG(0x301, 0x8000);
4586 WRITE_VREG(0x31d, 0);
4587 pr_info("start dump amrisc pmem of risc\n");
4588 for (i = 0; i < 0xfff; i++) {
4589 /*same as .o format*/
4590 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4591 }
4592}
4593
4594static void dump_lmem(void)
4595{
4596 int i;
4597
4598 WRITE_VREG(0x301, 0x8000);
4599 WRITE_VREG(0x31d, 2);
4600 pr_info("start dump amrisc lmem\n");
4601 for (i = 0; i < 0x3ff; i++) {
4602 /*same as */
4603 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4604 }
4605}
4606
4607static ssize_t dump_risc_mem_show(struct class *class,
4608 struct class_attribute *attr, char *buf)
4609{
4610 char *pbuf = buf;
4611 int ret;
4612
4613 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4614 mutex_lock(&vdec_mutex);
4615 if (!vdec_on(VDEC_1)) {
4616 mutex_unlock(&vdec_mutex);
4617 pbuf += sprintf(pbuf, "amrisc is power off\n");
4618 ret = pbuf - buf;
4619 return ret;
4620 }
4621 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4622 /*TODO:M6 define */
4623 /*
4624 * switch_mod_gate_by_type(MOD_VDEC, 1);
4625 */
4626 amports_switch_gate("vdec", 1);
4627 }
4628 /*start do**/
4629 switch (dump_mode) {
4630 case 0:
4631 dump_pmem();
4632 break;
4633 case 2:
4634 dump_lmem();
4635 break;
4636 default:
4637 break;
4638 }
4639
4640 /*done*/
4641 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4642 mutex_unlock(&vdec_mutex);
4643 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4644 /*TODO:M6 define */
4645 /*
4646 * switch_mod_gate_by_type(MOD_VDEC, 0);
4647 */
4648 amports_switch_gate("vdec", 0);
4649 }
4650 return sprintf(buf, "done\n");
4651}
4652
4653static ssize_t core_show(struct class *class, struct class_attribute *attr,
4654 char *buf)
4655{
4656 struct vdec_core_s *core = vdec_core;
4657 char *pbuf = buf;
4658
4659 if (list_empty(&core->connected_vdec_list))
4660 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4661 else {
4662 struct vdec_s *vdec;
4663
4664 pbuf += sprintf(pbuf,
4665 " Core: last_sched %p, sched_mask %lx\n",
4666 core->last_vdec,
4667 core->sched_mask);
4668
4669 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4670 pbuf += sprintf(pbuf,
4671 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4672 vdec->id,
4673 vdec,
4674 vdec_device_name[vdec->format * 2],
4675 vdec_status_str(vdec),
4676 vdec_type_str(vdec),
4677 vdec->active_mask);
4678 }
4679 }
4680
4681 return pbuf - buf;
4682}
4683
4684static ssize_t vdec_status_show(struct class *class,
4685 struct class_attribute *attr, char *buf)
4686{
4687 char *pbuf = buf;
4688 struct vdec_s *vdec;
4689 struct vdec_info vs;
4690 unsigned char vdec_num = 0;
4691 struct vdec_core_s *core = vdec_core;
4692 unsigned long flags = vdec_core_lock(vdec_core);
4693
4694 if (list_empty(&core->connected_vdec_list)) {
4695 pbuf += sprintf(pbuf, "No vdec.\n");
4696 goto out;
4697 }
4698
4699 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4700 if ((vdec->status == VDEC_STATUS_CONNECTED
4701 || vdec->status == VDEC_STATUS_ACTIVE)) {
4702 memset(&vs, 0, sizeof(vs));
4703 if (vdec_status(vdec, &vs)) {
4704 pbuf += sprintf(pbuf, "err.\n");
4705 goto out;
4706 }
4707 pbuf += sprintf(pbuf,
4708 "vdec channel %u statistics:\n",
4709 vdec_num);
4710 pbuf += sprintf(pbuf,
4711 "%13s : %s\n", "device name",
4712 vs.vdec_name);
4713 pbuf += sprintf(pbuf,
4714 "%13s : %u\n", "frame width",
4715 vs.frame_width);
4716 pbuf += sprintf(pbuf,
4717 "%13s : %u\n", "frame height",
4718 vs.frame_height);
4719 pbuf += sprintf(pbuf,
4720 "%13s : %u %s\n", "frame rate",
4721 vs.frame_rate, "fps");
4722 pbuf += sprintf(pbuf,
4723 "%13s : %u %s\n", "bit rate",
4724 vs.bit_rate / 1024 * 8, "kbps");
4725 pbuf += sprintf(pbuf,
4726 "%13s : %u\n", "status",
4727 vs.status);
4728 pbuf += sprintf(pbuf,
4729 "%13s : %u\n", "frame dur",
4730 vs.frame_dur);
4731 pbuf += sprintf(pbuf,
4732 "%13s : %u %s\n", "frame data",
4733 vs.frame_data / 1024, "KB");
4734 pbuf += sprintf(pbuf,
4735 "%13s : %u\n", "frame count",
4736 vs.frame_count);
4737 pbuf += sprintf(pbuf,
4738 "%13s : %u\n", "drop count",
4739 vs.drop_frame_count);
4740 pbuf += sprintf(pbuf,
4741 "%13s : %u\n", "fra err count",
4742 vs.error_frame_count);
4743 pbuf += sprintf(pbuf,
4744 "%13s : %u\n", "hw err count",
4745 vs.error_count);
4746 pbuf += sprintf(pbuf,
4747 "%13s : %llu %s\n", "total data",
4748 vs.total_data / 1024, "KB");
4749 pbuf += sprintf(pbuf,
4750 "%13s : %x\n\n", "ratio_control",
4751 vs.ratio_control);
4752
4753 vdec_num++;
4754 }
4755 }
4756out:
4757 vdec_core_unlock(vdec_core, flags);
4758 return pbuf - buf;
4759}
4760
4761static ssize_t dump_vdec_blocks_show(struct class *class,
4762 struct class_attribute *attr, char *buf)
4763{
4764 struct vdec_core_s *core = vdec_core;
4765 char *pbuf = buf;
4766 unsigned long flags = vdec_core_lock(vdec_core);
4767
4768 if (list_empty(&core->connected_vdec_list))
4769 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4770 else {
4771 struct vdec_s *vdec;
4772 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4773 pbuf += vdec_input_dump_blocks(&vdec->input,
4774 pbuf, PAGE_SIZE - (pbuf - buf));
4775 }
4776 }
4777 vdec_core_unlock(vdec_core, flags);
4778
4779 return pbuf - buf;
4780}
4781static ssize_t dump_vdec_chunks_show(struct class *class,
4782 struct class_attribute *attr, char *buf)
4783{
4784 struct vdec_core_s *core = vdec_core;
4785 char *pbuf = buf;
4786 unsigned long flags = vdec_core_lock(vdec_core);
4787
4788 if (list_empty(&core->connected_vdec_list))
4789 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4790 else {
4791 struct vdec_s *vdec;
4792 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4793 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
4794 pbuf, PAGE_SIZE - (pbuf - buf));
4795 }
4796 }
4797 vdec_core_unlock(vdec_core, flags);
4798
4799 return pbuf - buf;
4800}
4801
4802static ssize_t dump_decoder_state_show(struct class *class,
4803 struct class_attribute *attr, char *buf)
4804{
4805 char *pbuf = buf;
4806 struct vdec_s *vdec;
4807 struct vdec_core_s *core = vdec_core;
4808 unsigned long flags = vdec_core_lock(vdec_core);
4809
4810 if (list_empty(&core->connected_vdec_list)) {
4811 pbuf += sprintf(pbuf, "No vdec.\n");
4812 } else {
4813 list_for_each_entry(vdec,
4814 &core->connected_vdec_list, list) {
4815 if ((vdec->status == VDEC_STATUS_CONNECTED
4816 || vdec->status == VDEC_STATUS_ACTIVE)
4817 && vdec->dump_state)
4818 vdec->dump_state(vdec);
4819 }
4820 }
4821 vdec_core_unlock(vdec_core, flags);
4822
4823 return pbuf - buf;
4824}
4825
4826static ssize_t dump_fps_show(struct class *class,
4827 struct class_attribute *attr, char *buf)
4828{
4829 char *pbuf = buf;
4830 struct vdec_core_s *core = vdec_core;
4831 int i;
4832
4833 unsigned long flags = vdec_fps_lock(vdec_core);
4834 for (i = 0; i < MAX_INSTANCE_MUN; i++)
4835 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
4836
4837 pbuf += sprintf(pbuf, "\n");
4838 vdec_fps_unlock(vdec_core, flags);
4839
4840 return pbuf - buf;
4841}
4842
4843
4844
4845static struct class_attribute vdec_class_attrs[] = {
4846 __ATTR_RO(amrisc_regs),
4847 __ATTR_RO(dump_trace),
4848 __ATTR_RO(clock_level),
4849 __ATTR(poweron_clock_level, S_IRUGO | S_IWUSR | S_IWGRP,
4850 show_poweron_clock_level, store_poweron_clock_level),
4851 __ATTR(dump_risc_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4852 dump_risc_mem_show, dump_risc_mem_store),
4853 __ATTR(keep_vdec_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4854 show_keep_vdec_mem, store_keep_vdec_mem),
4855 __ATTR_RO(core),
4856 __ATTR_RO(vdec_status),
4857 __ATTR_RO(dump_vdec_blocks),
4858 __ATTR_RO(dump_vdec_chunks),
4859 __ATTR_RO(dump_decoder_state),
4860#ifdef VDEC_DEBUG_SUPPORT
4861 __ATTR(debug, S_IRUGO | S_IWUSR | S_IWGRP,
4862 show_debug, store_debug),
4863#endif
4864#ifdef FRAME_CHECK
4865 __ATTR(dump_yuv, S_IRUGO | S_IWUSR | S_IWGRP,
4866 dump_yuv_show, dump_yuv_store),
4867 __ATTR(frame_check, S_IRUGO | S_IWUSR | S_IWGRP,
4868 frame_check_show, frame_check_store),
4869#endif
4870 __ATTR_RO(dump_fps),
4871 __ATTR_NULL
4872};
4873
4874static struct class vdec_class = {
4875 .name = "vdec",
4876 .class_attrs = vdec_class_attrs,
4877 };
4878
4879struct device *get_vdec_device(void)
4880{
4881 return &vdec_core->vdec_core_platform_device->dev;
4882}
4883EXPORT_SYMBOL(get_vdec_device);
4884
4885static int vdec_probe(struct platform_device *pdev)
4886{
4887 s32 i, r;
4888
4889 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
4890 sizeof(struct vdec_core_s), GFP_KERNEL);
4891 if (vdec_core == NULL) {
4892 pr_err("vdec core allocation failed.\n");
4893 return -ENOMEM;
4894 }
4895
4896 atomic_set(&vdec_core->vdec_nr, 0);
4897 sema_init(&vdec_core->sem, 1);
4898
4899 r = class_register(&vdec_class);
4900 if (r) {
4901 pr_info("vdec class create fail.\n");
4902 return r;
4903 }
4904
4905 vdec_core->vdec_core_platform_device = pdev;
4906
4907 platform_set_drvdata(pdev, vdec_core);
4908
4909 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4910 vdec_core->isr_context[i].index = i;
4911 vdec_core->isr_context[i].irq = -1;
4912 }
4913
4914 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
4915 IRQF_ONESHOT, "vdec-0", NULL);
4916 if (r < 0) {
4917 pr_err("vdec interrupt request failed\n");
4918 return r;
4919 }
4920
4921 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
4922 IRQF_ONESHOT, "vdec-1", NULL);
4923 if (r < 0) {
4924 pr_err("vdec interrupt request failed\n");
4925 return r;
4926 }
4927#if 0
4928 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4929 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
4930 IRQF_ONESHOT, "vdec-hevc_back", NULL);
4931 if (r < 0) {
4932 pr_err("vdec interrupt request failed\n");
4933 return r;
4934 }
4935 }
4936#endif
4937 r = of_reserved_mem_device_init(&pdev->dev);
4938 if (r == 0)
4939 pr_info("vdec_probe done\n");
4940
4941 vdec_core->cma_dev = &pdev->dev;
4942
4943 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
4944 /* default to 250MHz */
4945 vdec_clock_hi_enable();
4946 }
4947
4948 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
4949 /* set vdec dmc request to urgent */
4950 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
4951 }
4952 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
4953 spin_lock_init(&vdec_core->lock);
4954 spin_lock_init(&vdec_core->canvas_lock);
4955 spin_lock_init(&vdec_core->fps_lock);
4956 spin_lock_init(&vdec_core->input_lock);
4957 ida_init(&vdec_core->ida);
4958 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
4959 "vdec-core");
4960
4961 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
4962 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
4963 /*work queue priority lower than vdec-core.*/
4964 return 0;
4965}
4966
4967static int vdec_remove(struct platform_device *pdev)
4968{
4969 int i;
4970
4971 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4972 if (vdec_core->isr_context[i].irq >= 0) {
4973 free_irq(vdec_core->isr_context[i].irq,
4974 &vdec_core->isr_context[i]);
4975 vdec_core->isr_context[i].irq = -1;
4976 vdec_core->isr_context[i].dev_isr = NULL;
4977 vdec_core->isr_context[i].dev_threaded_isr = NULL;
4978 vdec_core->isr_context[i].dev_id = NULL;
4979 }
4980 }
4981
4982 kthread_stop(vdec_core->thread);
4983
4984 destroy_workqueue(vdec_core->vdec_core_wq);
4985 class_unregister(&vdec_class);
4986
4987 return 0;
4988}
4989
4990static const struct of_device_id amlogic_vdec_dt_match[] = {
4991 {
4992 .compatible = "amlogic, vdec",
4993 },
4994 {},
4995};
4996
4997static struct mconfig vdec_configs[] = {
4998 MC_PU32("debug_trace_num", &debug_trace_num),
4999 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
5000 MC_PU32("clk_config", &clk_config),
5001 MC_PI32("step_mode", &step_mode),
5002 MC_PI32("poweron_clock_level", &poweron_clock_level),
5003};
5004static struct mconfig_node vdec_node;
5005
5006static struct platform_driver vdec_driver = {
5007 .probe = vdec_probe,
5008 .remove = vdec_remove,
5009 .driver = {
5010 .name = "vdec",
5011 .of_match_table = amlogic_vdec_dt_match,
5012 }
5013};
5014
5015static struct codec_profile_t amvdec_input_profile = {
5016 .name = "vdec_input",
5017 .profile = "drm_framemode"
5018};
5019
5020int vdec_module_init(void)
5021{
5022 if (platform_driver_register(&vdec_driver)) {
5023 pr_info("failed to register vdec module\n");
5024 return -ENODEV;
5025 }
5026 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5027 "vdec", vdec_configs, CONFIG_FOR_RW);
5028 vcodec_profile_register(&amvdec_input_profile);
5029 return 0;
5030}
5031EXPORT_SYMBOL(vdec_module_init);
5032
5033void vdec_module_exit(void)
5034{
5035 platform_driver_unregister(&vdec_driver);
5036}
5037EXPORT_SYMBOL(vdec_module_exit);
5038
5039#if 0
5040static int __init vdec_module_init(void)
5041{
5042 if (platform_driver_register(&vdec_driver)) {
5043 pr_info("failed to register vdec module\n");
5044 return -ENODEV;
5045 }
5046 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5047 "vdec", vdec_configs, CONFIG_FOR_RW);
5048 return 0;
5049}
5050
5051static void __exit vdec_module_exit(void)
5052{
5053 platform_driver_unregister(&vdec_driver);
5054}
5055#endif
5056
5057static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
5058{
5059 vdec_core->cma_dev = dev;
5060
5061 return 0;
5062}
5063
5064static const struct reserved_mem_ops rmem_vdec_ops = {
5065 .device_init = vdec_mem_device_init,
5066};
5067
5068static int __init vdec_mem_setup(struct reserved_mem *rmem)
5069{
5070 rmem->ops = &rmem_vdec_ops;
5071 pr_info("vdec: reserved mem setup\n");
5072
5073 return 0;
5074}
5075
5076void vdec_fill_frame_info(struct vframe_qos_s *vframe_qos, int debug)
5077{
5078 if (frame_info_buf_in == NULL) {
5079 pr_info("error,frame_info_buf_in is null\n");
5080 return;
5081 }
5082 if (frame_info_buf_out == NULL) {
5083 pr_info("error,frame_info_buf_out is null\n");
5084 return;
5085 }
5086 if (frame_qos_wr >= QOS_FRAME_NUM)
5087 frame_qos_wr = 0;
5088
5089 if (frame_qos_wr >= QOS_FRAME_NUM ||
5090 frame_qos_wr < 0) {
5091 pr_info("error,index :%d is error\n", frame_qos_wr);
5092 return;
5093 }
5094 if (frameinfo_flag == DISABLE_FRAME_INFO)
5095 return;
5096
5097 if (frameinfo_flag == PRINT_FRAME_INFO) {
5098 pr_info("num %d size %d pts %d\n",
5099 vframe_qos->num,
5100 vframe_qos->size,
5101 vframe_qos->pts);
5102 pr_info("mv min_mv %d avg_mv %d max_mv %d\n",
5103 vframe_qos->min_mv,
5104 vframe_qos->avg_mv,
5105 vframe_qos->max_mv);
5106 pr_info("qp min_qp %d avg_qp %d max_qp %d\n",
5107 vframe_qos->min_qp,
5108 vframe_qos->avg_qp,
5109 vframe_qos->max_qp);
5110 pr_info("skip min_skip %d avg_skip %d max_skip %d\n",
5111 vframe_qos->min_skip,
5112 vframe_qos->avg_skip,
5113 vframe_qos->max_skip);
5114 }
5115 memcpy(&frame_info_buf_in[frame_qos_wr++],
5116 vframe_qos, sizeof(struct vframe_qos_s));
5117 if (frame_qos_wr >= QOS_FRAME_NUM)
5118 frame_qos_wr = 0;
5119
5120 /*pr_info("frame_qos_wr:%d\n", frame_qos_wr);*/
5121
5122}
5123EXPORT_SYMBOL(vdec_fill_frame_info);
5124
5125struct vframe_qos_s *vdec_get_qos_info(void)
5126{
5127 int write_count = 0;
5128 int qos_wr = frame_qos_wr;
5129
5130 if (frame_info_buf_in == NULL) {
5131 pr_info("error,frame_info_buf_in is null\n");
5132 return NULL;
5133 }
5134 if (frame_info_buf_out == NULL) {
5135 pr_info("error,frame_info_buf_out is null\n");
5136 return NULL;
5137 }
5138
5139
5140 memset(frame_info_buf_out, 0,
5141 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
5142 if (frame_qos_rd > qos_wr) {
5143 write_count = QOS_FRAME_NUM - frame_qos_rd;
5144 if (write_count > 0 && write_count <= QOS_FRAME_NUM) {
5145 memcpy(frame_info_buf_out, &frame_info_buf_in[0],
5146 write_count*sizeof(struct vframe_qos_s));
5147 if ((write_count + qos_wr) <= QOS_FRAME_NUM)
5148 memcpy(&frame_info_buf_out[write_count], frame_info_buf_in,
5149 qos_wr*sizeof(struct vframe_qos_s));
5150 else
5151 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5152 } else
5153 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5154 } else if (frame_qos_rd < qos_wr) {
5155 write_count = qos_wr - frame_qos_rd;
5156 if (write_count > 0 && write_count < QOS_FRAME_NUM)
5157 memcpy(frame_info_buf_out, &frame_info_buf_in[frame_qos_rd],
5158 (write_count)*sizeof(struct vframe_qos_s));
5159 else
5160 pr_info("get_qos_info:%d, out of range\n", __LINE__);
5161 }
5162 /*
5163 pr_info("cnt:%d,size:%d,num:%d,rd:%d,wr:%d\n",
5164 wirte_count,
5165 frame_info_buf_out[0].size,
5166 frame_info_buf_out[0].num,
5167 frame_qos_rd,qos_wr);
5168 */
5169 frame_qos_rd = qos_wr;
5170 return frame_info_buf_out;
5171}
5172EXPORT_SYMBOL(vdec_get_qos_info);
5173
5174
5175RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5176/*
5177uint force_hevc_clock_cntl;
5178EXPORT_SYMBOL(force_hevc_clock_cntl);
5179
5180module_param(force_hevc_clock_cntl, uint, 0664);
5181*/
5182module_param(debug, uint, 0664);
5183module_param(debug_trace_num, uint, 0664);
5184module_param(hevc_max_reset_count, int, 0664);
5185module_param(clk_config, uint, 0664);
5186module_param(step_mode, int, 0664);
5187module_param(debugflags, int, 0664);
5188module_param(parallel_decode, int, 0664);
5189module_param(fps_detection, int, 0664);
5190module_param(fps_clear, int, 0664);
5191module_param(force_nosecure_even_drm, int, 0664);
5192module_param(disable_switch_single_to_mult, int, 0664);
5193
5194module_param(frameinfo_flag, int, 0664);
5195MODULE_PARM_DESC(frameinfo_flag,
5196 "\n frameinfo_flag\n");
5197module_param(v4lvideo_add_di, int, 0664);
5198MODULE_PARM_DESC(v4lvideo_add_di,
5199 "\n v4lvideo_add_di\n");
5200
5201module_param(max_di_instance, int, 0664);
5202MODULE_PARM_DESC(max_di_instance,
5203 "\n max_di_instance\n");
5204
5205/*
5206*module_init(vdec_module_init);
5207*module_exit(vdec_module_exit);
5208*/
5209#define CREATE_TRACE_POINTS
5210#include "vdec_trace.h"
5211MODULE_DESCRIPTION("AMLOGIC vdec driver");
5212MODULE_LICENSE("GPL");
5213MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5214