summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: 5c9e1d59da66c08bb1fd8d06db9c6823f3bd71d6
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40/*for VDEC_DEBUG_SUPPORT*/
41#include <linux/time.h>
42
43#include <linux/amlogic/media/utils/vdec_reg.h>
44#include "vdec.h"
45#include "vdec_trace.h"
46#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
47#include "vdec_profile.h"
48#endif
49#include <linux/of.h>
50#include <linux/of_fdt.h>
51#include <linux/libfdt_env.h>
52#include <linux/of_reserved_mem.h>
53#include <linux/dma-contiguous.h>
54#include <linux/cma.h>
55#include <linux/module.h>
56#include <linux/slab.h>
57#include <linux/dma-mapping.h>
58#include <linux/dma-contiguous.h>
59#include "../../../stream_input/amports/amports_priv.h"
60
61#include <linux/amlogic/media/utils/amports_config.h>
62#include "../utils/amvdec.h"
63#include "vdec_input.h"
64
65#include "../../../common/media_clock/clk/clk.h"
66#include <linux/reset.h>
67#include <linux/amlogic/cpu_version.h>
68#include <linux/amlogic/media/codec_mm/codec_mm.h>
69#include <linux/amlogic/media/video_sink/video_keeper.h>
70#include <linux/amlogic/media/codec_mm/configs.h>
71#include <linux/amlogic/media/frame_sync/ptsserv.h>
72#include "secprot.h"
73#include "../../../common/chips/decoder_cpu_ver_info.h"
74#include "frame_check.h"
75
76#ifdef CONFIG_AMLOGIC_POWER
77#include <linux/amlogic/power_ctrl.h>
78#endif
79
80static DEFINE_MUTEX(vdec_mutex);
81
82#define MC_SIZE (4096 * 4)
83#define CMA_ALLOC_SIZE SZ_64M
84#define MEM_NAME "vdec_prealloc"
85static int inited_vcodec_num;
86#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
87static int poweron_clock_level;
88static int keep_vdec_mem;
89static unsigned int debug_trace_num = 16 * 20;
90static int step_mode;
91static unsigned int clk_config;
92/*
93 &1: sched_priority to MAX_RT_PRIO -1.
94 &2: always reload firmware.
95 &4: vdec canvas debug enable
96 */
97static unsigned int debug;
98
99static int hevc_max_reset_count;
100
101static int no_powerdown;
102static int parallel_decode = 1;
103static int fps_detection;
104static int fps_clear;
105
106
107static int force_nosecure_even_drm;
108static int disable_switch_single_to_mult;
109
110static DEFINE_SPINLOCK(vdec_spin_lock);
111
112#define HEVC_TEST_LIMIT 100
113#define GXBB_REV_A_MINOR 0xA
114
115#define PRINT_FRAME_INFO 1
116#define DISABLE_FRAME_INFO 2
117
118static int frameinfo_flag = 0;
119static int v4lvideo_add_di = 1;
120static int max_di_instance = 1;
121
122//static int path_debug = 0;
123
124static struct vframe_qos_s *frame_info_buf_in = NULL;
125static struct vframe_qos_s *frame_info_buf_out = NULL;
126static int frame_qos_wr = 0;
127static int frame_qos_rd = 0;
128int decode_underflow = 0;
129
130#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
131
132struct am_reg {
133 char *name;
134 int offset;
135};
136
137struct vdec_isr_context_s {
138 int index;
139 int irq;
140 irq_handler_t dev_isr;
141 irq_handler_t dev_threaded_isr;
142 void *dev_id;
143 struct vdec_s *vdec;
144};
145
146struct decode_fps_s {
147 u32 frame_count;
148 u64 start_timestamp;
149 u64 last_timestamp;
150 u32 fps;
151};
152
153struct vdec_core_s {
154 struct list_head connected_vdec_list;
155 spinlock_t lock;
156 spinlock_t canvas_lock;
157 spinlock_t fps_lock;
158 spinlock_t input_lock;
159 struct ida ida;
160 atomic_t vdec_nr;
161 struct vdec_s *vfm_vdec;
162 struct vdec_s *active_vdec;
163 struct vdec_s *active_hevc;
164 struct vdec_s *hint_fr_vdec;
165 struct platform_device *vdec_core_platform_device;
166 struct device *cma_dev;
167 struct semaphore sem;
168 struct task_struct *thread;
169 struct workqueue_struct *vdec_core_wq;
170
171 unsigned long sched_mask;
172 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
173 int power_ref_count[VDEC_MAX];
174 struct vdec_s *last_vdec;
175 int parallel_dec;
176 unsigned long power_ref_mask;
177 int vdec_combine_flag;
178 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
179 unsigned long buff_flag;
180 unsigned long stream_buff_flag;
181};
182
183struct canvas_status_s {
184 int type;
185 int canvas_used_flag;
186 int id;
187};
188
189
190static struct vdec_core_s *vdec_core;
191
192static const char * const vdec_status_string[] = {
193 "VDEC_STATUS_UNINITIALIZED",
194 "VDEC_STATUS_DISCONNECTED",
195 "VDEC_STATUS_CONNECTED",
196 "VDEC_STATUS_ACTIVE"
197};
198
199static int debugflags;
200
201static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
202
203
204int vdec_get_debug_flags(void)
205{
206 return debugflags;
207}
208EXPORT_SYMBOL(vdec_get_debug_flags);
209
210unsigned char is_mult_inc(unsigned int type)
211{
212 unsigned char ret = 0;
213 if (vdec_get_debug_flags() & 0xf000)
214 ret = (vdec_get_debug_flags() & 0x1000)
215 ? 1 : 0;
216 else if (type & PORT_TYPE_DECODER_SCHED)
217 ret = 1;
218 return ret;
219}
220EXPORT_SYMBOL(is_mult_inc);
221
222static const bool cores_with_input[VDEC_MAX] = {
223 true, /* VDEC_1 */
224 false, /* VDEC_HCODEC */
225 false, /* VDEC_2 */
226 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
227 false, /* VDEC_HEVC_BACK */
228};
229
230static const int cores_int[VDEC_MAX] = {
231 VDEC_IRQ_1,
232 VDEC_IRQ_2,
233 VDEC_IRQ_0,
234 VDEC_IRQ_0,
235 VDEC_IRQ_HEVC_BACK
236};
237
238unsigned long vdec_canvas_lock(struct vdec_core_s *core)
239{
240 unsigned long flags;
241 spin_lock_irqsave(&core->canvas_lock, flags);
242
243 return flags;
244}
245
246void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
247{
248 spin_unlock_irqrestore(&core->canvas_lock, flags);
249}
250
251unsigned long vdec_fps_lock(struct vdec_core_s *core)
252{
253 unsigned long flags;
254 spin_lock_irqsave(&core->fps_lock, flags);
255
256 return flags;
257}
258
259void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
260{
261 spin_unlock_irqrestore(&core->fps_lock, flags);
262}
263
264unsigned long vdec_core_lock(struct vdec_core_s *core)
265{
266 unsigned long flags;
267
268 spin_lock_irqsave(&core->lock, flags);
269
270 return flags;
271}
272
273void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
274{
275 spin_unlock_irqrestore(&core->lock, flags);
276}
277
278unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
279{
280 unsigned long flags;
281
282 spin_lock_irqsave(&core->input_lock, flags);
283
284 return flags;
285}
286
287void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
288{
289 spin_unlock_irqrestore(&core->input_lock, flags);
290}
291
292
293static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
294 struct vdec_core_s *core = vdec_core;
295 bool ret;
296 unsigned long flags;
297
298 flags = vdec_inputbuff_lock(core);
299 ret = !(vdec->core_mask & core->buff_flag);
300 vdec_inputbuff_unlock(core, flags);
301
302 return ret;
303}
304
305static void vdec_up(struct vdec_s *vdec)
306{
307 struct vdec_core_s *core = vdec_core;
308
309 if (debug & 8)
310 pr_info("vdec_up, id:%d\n", vdec->id);
311 up(&core->sem);
312}
313
314
315static u64 vdec_get_us_time_system(void)
316{
317 struct timeval tv;
318
319 do_gettimeofday(&tv);
320
321 return div64_u64(timeval_to_ns(&tv), 1000);
322}
323
324static void vdec_fps_clear(int id)
325{
326 if (id >= MAX_INSTANCE_MUN)
327 return;
328
329 vdec_core->decode_fps[id].frame_count = 0;
330 vdec_core->decode_fps[id].start_timestamp = 0;
331 vdec_core->decode_fps[id].last_timestamp = 0;
332 vdec_core->decode_fps[id].fps = 0;
333}
334
335static void vdec_fps_clearall(void)
336{
337 int i;
338
339 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
340 vdec_core->decode_fps[i].frame_count = 0;
341 vdec_core->decode_fps[i].start_timestamp = 0;
342 vdec_core->decode_fps[i].last_timestamp = 0;
343 vdec_core->decode_fps[i].fps = 0;
344 }
345}
346
347static void vdec_fps_detec(int id)
348{
349 unsigned long flags;
350
351 if (fps_detection == 0)
352 return;
353
354 if (id >= MAX_INSTANCE_MUN)
355 return;
356
357 flags = vdec_fps_lock(vdec_core);
358
359 if (fps_clear == 1) {
360 vdec_fps_clearall();
361 fps_clear = 0;
362 }
363
364 vdec_core->decode_fps[id].frame_count++;
365 if (vdec_core->decode_fps[id].frame_count == 1) {
366 vdec_core->decode_fps[id].start_timestamp =
367 vdec_get_us_time_system();
368 vdec_core->decode_fps[id].last_timestamp =
369 vdec_core->decode_fps[id].start_timestamp;
370 } else {
371 vdec_core->decode_fps[id].last_timestamp =
372 vdec_get_us_time_system();
373 vdec_core->decode_fps[id].fps =
374 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
375 10000000000),
376 (vdec_core->decode_fps[id].last_timestamp -
377 vdec_core->decode_fps[id].start_timestamp));
378 }
379 vdec_fps_unlock(vdec_core, flags);
380}
381
382
383
384static int get_canvas(unsigned int index, unsigned int base)
385{
386 int start;
387 int canvas_index = index * base;
388 int ret;
389
390 if ((base > 4) || (base == 0))
391 return -1;
392
393 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
394 <= AMVDEC_CANVAS_MAX1) {
395 start = AMVDEC_CANVAS_START_INDEX + base * index;
396 } else {
397 canvas_index -= (AMVDEC_CANVAS_MAX1 -
398 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
399 if (canvas_index <= AMVDEC_CANVAS_MAX2)
400 start = canvas_index / base;
401 else
402 return -1;
403 }
404
405 if (base == 1) {
406 ret = start;
407 } else if (base == 2) {
408 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
409 } else if (base == 3) {
410 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
411 } else if (base == 4) {
412 ret = (((start + 3) << 24) | (start + 2) << 16) |
413 ((start + 1) << 8) | start;
414 }
415
416 return ret;
417}
418
419static int get_canvas_ex(int type, int id)
420{
421 int i;
422 unsigned long flags;
423
424 flags = vdec_canvas_lock(vdec_core);
425
426 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
427 /*0x10-0x15 has been used by rdma*/
428 if ((i >= 0x10) && (i <= 0x15))
429 continue;
430 if ((canvas_stat[i].type == type) &&
431 (canvas_stat[i].id & (1 << id)) == 0) {
432 canvas_stat[i].canvas_used_flag++;
433 canvas_stat[i].id |= (1 << id);
434 if (debug & 4)
435 pr_debug("get used canvas %d\n", i);
436 vdec_canvas_unlock(vdec_core, flags);
437 if (i < AMVDEC_CANVAS_MAX2 + 1)
438 return i;
439 else
440 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
441 }
442 }
443
444 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
445 /*0x10-0x15 has been used by rdma*/
446 if ((i >= 0x10) && (i <= 0x15))
447 continue;
448 if (canvas_stat[i].type == 0) {
449 canvas_stat[i].type = type;
450 canvas_stat[i].canvas_used_flag = 1;
451 canvas_stat[i].id = (1 << id);
452 if (debug & 4) {
453 pr_debug("get canvas %d\n", i);
454 pr_debug("canvas_used_flag %d\n",
455 canvas_stat[i].canvas_used_flag);
456 pr_debug("canvas_stat[i].id %d\n",
457 canvas_stat[i].id);
458 }
459 vdec_canvas_unlock(vdec_core, flags);
460 if (i < AMVDEC_CANVAS_MAX2 + 1)
461 return i;
462 else
463 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
464 }
465 }
466 vdec_canvas_unlock(vdec_core, flags);
467
468 pr_info("cannot get canvas\n");
469
470 return -1;
471}
472
473static void free_canvas_ex(int index, int id)
474{
475 unsigned long flags;
476 int offset;
477
478 flags = vdec_canvas_lock(vdec_core);
479 if (index >= 0 &&
480 index < AMVDEC_CANVAS_MAX2 + 1)
481 offset = index;
482 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
483 (index <= AMVDEC_CANVAS_MAX1))
484 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
485 else {
486 vdec_canvas_unlock(vdec_core, flags);
487 return;
488 }
489
490 if ((canvas_stat[offset].canvas_used_flag > 0) &&
491 (canvas_stat[offset].id & (1 << id))) {
492 canvas_stat[offset].canvas_used_flag--;
493 canvas_stat[offset].id &= ~(1 << id);
494 if (canvas_stat[offset].canvas_used_flag == 0) {
495 canvas_stat[offset].type = 0;
496 canvas_stat[offset].id = 0;
497 }
498 if (debug & 4) {
499 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
500 offset,
501 canvas_stat[offset].canvas_used_flag,
502 canvas_stat[offset].type,
503 canvas_stat[offset].id);
504 }
505 }
506 vdec_canvas_unlock(vdec_core, flags);
507
508 return;
509
510}
511
512static void vdec_dmc_pipeline_reset(void)
513{
514 /*
515 * bit15: vdec_piple
516 * bit14: hevc_dmc_piple
517 * bit13: hevcf_dmc_pipl
518 * bit12: wave420_dmc_pipl
519 * bit11: hcodec_dmc_pipl
520 */
521
522 WRITE_RESET_REG(RESET7_REGISTER,
523 (1 << 15) | (1 << 14) | (1 << 13) |
524 (1 << 12) | (1 << 11));
525}
526
527static void vdec_stop_armrisc(int hw)
528{
529 ulong timeout = jiffies + HZ;
530
531 if (hw == VDEC_INPUT_TARGET_VLD) {
532 WRITE_VREG(MPSR, 0);
533 WRITE_VREG(CPSR, 0);
534
535 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
536 if (time_after(jiffies, timeout))
537 break;
538 }
539
540 timeout = jiffies + HZ;
541 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
542 if (time_after(jiffies, timeout))
543 break;
544 }
545 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
546 WRITE_VREG(HEVC_MPSR, 0);
547 WRITE_VREG(HEVC_CPSR, 0);
548
549 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
550 if (time_after(jiffies, timeout))
551 break;
552 }
553
554 timeout = jiffies + HZ/10;
555 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
556 if (time_after(jiffies, timeout))
557 break;
558 }
559 }
560}
561
562static void vdec_disable_DMC(struct vdec_s *vdec)
563{
564 /*close first,then wait pedding end,timing suggestion from vlsi*/
565 struct vdec_input_s *input = &vdec->input;
566 unsigned long flags;
567 unsigned int mask = 0;
568
569 if (input->target == VDEC_INPUT_TARGET_VLD) {
570 mask = (1 << 13);
571 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
572 mask = (1 << 21);
573 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
574 mask = (1 << 4); /*hevc*/
575 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
576 mask |= (1 << 8); /*hevcb */
577 }
578
579 /* need to stop armrisc. */
580 if (!IS_ERR_OR_NULL(vdec->dev))
581 vdec_stop_armrisc(input->target);
582
583 spin_lock_irqsave(&vdec_spin_lock, flags);
584 codec_dmcbus_write(DMC_REQ_CTRL,
585 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
586 spin_unlock_irqrestore(&vdec_spin_lock, flags);
587
588 while (!(codec_dmcbus_read(DMC_CHAN_STS)
589 & mask))
590 ;
591
592 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
593}
594
595static void vdec_enable_DMC(struct vdec_s *vdec)
596{
597 struct vdec_input_s *input = &vdec->input;
598 unsigned long flags;
599 unsigned int mask = 0;
600
601 if (input->target == VDEC_INPUT_TARGET_VLD) {
602 mask = (1 << 13);
603 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
604 mask = (1 << 21);
605 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
606 mask = (1 << 4); /*hevc*/
607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
608 mask |= (1 << 8); /*hevcb */
609 }
610
611 /*must to be reset the dmc pipeline if it's g12b.*/
612 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
613 vdec_dmc_pipeline_reset();
614
615 spin_lock_irqsave(&vdec_spin_lock, flags);
616 codec_dmcbus_write(DMC_REQ_CTRL,
617 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
618 spin_unlock_irqrestore(&vdec_spin_lock, flags);
619 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
620}
621
622
623
624static int vdec_get_hw_type(int value)
625{
626 int type;
627 switch (value) {
628 case VFORMAT_HEVC:
629 case VFORMAT_VP9:
630 case VFORMAT_AVS2:
631 type = CORE_MASK_HEVC;
632 break;
633
634 case VFORMAT_MPEG12:
635 case VFORMAT_MPEG4:
636 case VFORMAT_H264:
637 case VFORMAT_MJPEG:
638 case VFORMAT_REAL:
639 case VFORMAT_JPEG:
640 case VFORMAT_VC1:
641 case VFORMAT_AVS:
642 case VFORMAT_YUV:
643 case VFORMAT_H264MVC:
644 case VFORMAT_H264_4K2K:
645 case VFORMAT_H264_ENC:
646 case VFORMAT_JPEG_ENC:
647 type = CORE_MASK_VDEC_1;
648 break;
649
650 default:
651 type = -1;
652 }
653
654 return type;
655}
656
657
658static void vdec_save_active_hw(struct vdec_s *vdec)
659{
660 int type;
661
662 type = vdec_get_hw_type(vdec->port->vformat);
663
664 if (type == CORE_MASK_HEVC) {
665 vdec_core->active_hevc = vdec;
666 } else if (type == CORE_MASK_VDEC_1) {
667 vdec_core->active_vdec = vdec;
668 } else {
669 pr_info("save_active_fw wrong\n");
670 }
671}
672
673static void vdec_update_buff_status(void)
674{
675 struct vdec_core_s *core = vdec_core;
676 unsigned long flags;
677 struct vdec_s *vdec;
678
679 flags = vdec_inputbuff_lock(core);
680 core->buff_flag = 0;
681 core->stream_buff_flag = 0;
682 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
683 struct vdec_input_s *input = &vdec->input;
684 if (input_frame_based(input)) {
685 if (input->have_frame_num || input->eos)
686 core->buff_flag |= vdec->core_mask;
687 } else if (input_stream_based(input)) {
688 core->stream_buff_flag |= vdec->core_mask;
689 }
690 }
691 vdec_inputbuff_unlock(core, flags);
692}
693
694#if 0
695void vdec_update_streambuff_status(void)
696{
697 struct vdec_core_s *core = vdec_core;
698 struct vdec_s *vdec;
699
700 /* check streaming prepare level threshold if not EOS */
701 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
702 struct vdec_input_s *input = &vdec->input;
703 if (input && input_stream_based(input) && !input->eos &&
704 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
705 u32 rp, wp, level;
706
707 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
708 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
709 if (wp < rp)
710 level = input->size + wp - rp;
711 else
712 level = wp - rp;
713 if ((level < input->prepare_level) &&
714 (pts_get_rec_num(PTS_TYPE_VIDEO,
715 vdec->input.total_rd_count) < 2)) {
716 break;
717 } else if (level > input->prepare_level) {
718 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
719 if (debug & 8)
720 pr_info("vdec_flush_streambuff_status up\n");
721 vdec_up(vdec);
722 }
723 break;
724 }
725 }
726}
727EXPORT_SYMBOL(vdec_update_streambuff_status);
728#endif
729
730int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
731{
732 if (vdec && vdec->dec_status &&
733 ((vdec->status == VDEC_STATUS_CONNECTED ||
734 vdec->status == VDEC_STATUS_ACTIVE)))
735 return vdec->dec_status(vdec, vstatus);
736
737 return 0;
738}
739EXPORT_SYMBOL(vdec_status);
740
741int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
742{
743 int r;
744
745 if (vdec->set_trickmode) {
746 r = vdec->set_trickmode(vdec, trickmode);
747
748 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
749 r = vdec->slave->set_trickmode(vdec->slave,
750 trickmode);
751 return r;
752 }
753
754 return -1;
755}
756EXPORT_SYMBOL(vdec_set_trickmode);
757
758int vdec_set_isreset(struct vdec_s *vdec, int isreset)
759{
760 vdec->is_reset = isreset;
761 pr_info("is_reset=%d\n", isreset);
762 if (vdec->set_isreset)
763 return vdec->set_isreset(vdec, isreset);
764 return 0;
765}
766EXPORT_SYMBOL(vdec_set_isreset);
767
768int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
769{
770 vdec->dolby_meta_with_el = isdvmetawithel;
771 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
772 return 0;
773}
774EXPORT_SYMBOL(vdec_set_dv_metawithel);
775
776void vdec_set_no_powerdown(int flag)
777{
778 no_powerdown = flag;
779 pr_info("no_powerdown=%d\n", no_powerdown);
780 return;
781}
782EXPORT_SYMBOL(vdec_set_no_powerdown);
783
784void vdec_count_info(struct vdec_info *vs, unsigned int err,
785 unsigned int offset)
786{
787 if (err)
788 vs->error_frame_count++;
789 if (offset) {
790 if (0 == vs->frame_count) {
791 vs->offset = 0;
792 vs->samp_cnt = 0;
793 }
794 vs->frame_data = offset > vs->total_data ?
795 offset - vs->total_data : vs->total_data - offset;
796 vs->total_data = offset;
797 if (vs->samp_cnt < 96000 * 2) { /* 2s */
798 if (0 == vs->samp_cnt)
799 vs->offset = offset;
800 vs->samp_cnt += vs->frame_dur;
801 } else {
802 vs->bit_rate = (offset - vs->offset) / 2;
803 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
804 vs->samp_cnt = 0;
805 }
806 vs->frame_count++;
807 }
808 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
809 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
810 return;
811}
812EXPORT_SYMBOL(vdec_count_info);
813int vdec_is_support_4k(void)
814{
815 return !is_meson_gxl_package_805X();
816}
817EXPORT_SYMBOL(vdec_is_support_4k);
818
819/*
820 * clk_config:
821 *0:default
822 *1:no gp0_pll;
823 *2:always used gp0_pll;
824 *>=10:fixed n M clk;
825 *== 100 , 100M clks;
826 */
827unsigned int get_vdec_clk_config_settings(void)
828{
829 return clk_config;
830}
831void update_vdec_clk_config_settings(unsigned int config)
832{
833 clk_config = config;
834}
835EXPORT_SYMBOL(update_vdec_clk_config_settings);
836
837static bool hevc_workaround_needed(void)
838{
839 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
840 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
841 == GXBB_REV_A_MINOR);
842}
843
844struct device *get_codec_cma_device(void)
845{
846 return vdec_core->cma_dev;
847}
848
849#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
850static const char * const vdec_device_name[] = {
851 "amvdec_mpeg12", "ammvdec_mpeg12",
852 "amvdec_mpeg4", "ammvdec_mpeg4",
853 "amvdec_h264", "ammvdec_h264",
854 "amvdec_mjpeg", "ammvdec_mjpeg",
855 "amvdec_real", "ammvdec_real",
856 "amjpegdec", "ammjpegdec",
857 "amvdec_vc1", "ammvdec_vc1",
858 "amvdec_avs", "ammvdec_avs",
859 "amvdec_yuv", "ammvdec_yuv",
860 "amvdec_h264mvc", "ammvdec_h264mvc",
861 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
862 "amvdec_h265", "ammvdec_h265",
863 "amvenc_avc", "amvenc_avc",
864 "jpegenc", "jpegenc",
865 "amvdec_vp9", "ammvdec_vp9",
866 "amvdec_avs2", "ammvdec_avs2"
867};
868
869
870#else
871
872static const char * const vdec_device_name[] = {
873 "amvdec_mpeg12",
874 "amvdec_mpeg4",
875 "amvdec_h264",
876 "amvdec_mjpeg",
877 "amvdec_real",
878 "amjpegdec",
879 "amvdec_vc1",
880 "amvdec_avs",
881 "amvdec_yuv",
882 "amvdec_h264mvc",
883 "amvdec_h264_4k2k",
884 "amvdec_h265",
885 "amvenc_avc",
886 "jpegenc",
887 "amvdec_vp9",
888 "amvdec_avs2"
889};
890
891#endif
892
893/*
894 * Only support time sliced decoding for frame based input,
895 * so legacy decoder can exist with time sliced decoder.
896 */
897static const char *get_dev_name(bool use_legacy_vdec, int format)
898{
899#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
900 if (use_legacy_vdec)
901 return vdec_device_name[format * 2];
902 else
903 return vdec_device_name[format * 2 + 1];
904#else
905 return vdec_device_name[format];
906#endif
907}
908
909#ifdef VDEC_DEBUG_SUPPORT
910static u64 get_current_clk(void)
911{
912 /*struct timespec xtime = current_kernel_time();
913 u64 usec = xtime.tv_sec * 1000000;
914 usec += xtime.tv_nsec / 1000;
915 */
916 u64 usec = sched_clock();
917 return usec;
918}
919
920static void inc_profi_count(unsigned long mask, u32 *count)
921{
922 enum vdec_type_e type;
923
924 for (type = VDEC_1; type < VDEC_MAX; type++) {
925 if (mask & (1 << type))
926 count[type]++;
927 }
928}
929
930static void update_profi_clk_run(struct vdec_s *vdec,
931 unsigned long mask, u64 clk)
932{
933 enum vdec_type_e type;
934
935 for (type = VDEC_1; type < VDEC_MAX; type++) {
936 if (mask & (1 << type)) {
937 vdec->start_run_clk[type] = clk;
938 if (vdec->profile_start_clk[type] == 0)
939 vdec->profile_start_clk[type] = clk;
940 vdec->total_clk[type] = clk
941 - vdec->profile_start_clk[type];
942 /*pr_info("set start_run_clk %ld\n",
943 vdec->start_run_clk);*/
944
945 }
946 }
947}
948
949static void update_profi_clk_stop(struct vdec_s *vdec,
950 unsigned long mask, u64 clk)
951{
952 enum vdec_type_e type;
953
954 for (type = VDEC_1; type < VDEC_MAX; type++) {
955 if (mask & (1 << type)) {
956 if (vdec->start_run_clk[type] == 0)
957 pr_info("error, start_run_clk[%d] not set\n", type);
958
959 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
960 type,
961 clk,
962 vdec->start_run_clk[type],
963 vdec->run_clk[type]);*/
964 vdec->run_clk[type] +=
965 (clk - vdec->start_run_clk[type]);
966 }
967 }
968}
969
970#endif
971
972int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
973{
974 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
975 sizeof(struct dec_sysinfo)))
976 return -EFAULT;
977
978 /* force switch to mult instance if supports this profile. */
979 if ((vdec->type == VDEC_TYPE_SINGLE) &&
980 !disable_switch_single_to_mult) {
981 const char *str = NULL;
982 char fmt[16] = {0};
983
984 str = strchr(get_dev_name(false, vdec->format), '_');
985 if (!str)
986 return -1;
987
988 sprintf(fmt, "m%s", ++str);
989 if (is_support_profile(fmt) &&
990 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263)
991 vdec->type = VDEC_TYPE_STREAM_PARSER;
992 }
993
994 return 0;
995}
996EXPORT_SYMBOL(vdec_set_decinfo);
997
998/* construct vdec strcture */
999struct vdec_s *vdec_create(struct stream_port_s *port,
1000 struct vdec_s *master)
1001{
1002 struct vdec_s *vdec;
1003 int type = VDEC_TYPE_SINGLE;
1004 int id;
1005
1006 if (is_mult_inc(port->type))
1007 type = (port->type & PORT_TYPE_FRAME) ?
1008 VDEC_TYPE_FRAME_BLOCK :
1009 VDEC_TYPE_STREAM_PARSER;
1010
1011 id = ida_simple_get(&vdec_core->ida,
1012 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1013 if (id < 0) {
1014 pr_info("vdec_create request id failed!ret =%d\n", id);
1015 return NULL;
1016 }
1017 vdec = vzalloc(sizeof(struct vdec_s));
1018
1019 /* TBD */
1020 if (vdec) {
1021 vdec->magic = 0x43454456;
1022 vdec->id = -1;
1023 vdec->type = type;
1024 vdec->port = port;
1025 vdec->sys_info = &vdec->sys_info_store;
1026
1027 INIT_LIST_HEAD(&vdec->list);
1028
1029 atomic_inc(&vdec_core->vdec_nr);
1030 vdec->id = id;
1031 vdec_input_init(&vdec->input, vdec);
1032 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1033 vdec->input.vdec_up = vdec_up;
1034 if (master) {
1035 vdec->master = master;
1036 master->slave = vdec;
1037 master->sched = 1;
1038 }
1039 }
1040
1041 pr_debug("vdec_create instance %p, total %d\n", vdec,
1042 atomic_read(&vdec_core->vdec_nr));
1043
1044 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1045
1046 return vdec;
1047}
1048EXPORT_SYMBOL(vdec_create);
1049
1050int vdec_set_format(struct vdec_s *vdec, int format)
1051{
1052 vdec->format = format;
1053 vdec->port_flag |= PORT_FLAG_VFORMAT;
1054
1055 if (vdec->slave) {
1056 vdec->slave->format = format;
1057 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1058 }
1059 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1060
1061 return 0;
1062}
1063EXPORT_SYMBOL(vdec_set_format);
1064
1065int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1066{
1067 vdec->pts = pts;
1068 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1069 vdec->pts_valid = true;
1070 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1071 return 0;
1072}
1073EXPORT_SYMBOL(vdec_set_pts);
1074
1075void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1076{
1077 vdec->timestamp = timestamp;
1078 vdec->timestamp_valid = true;
1079}
1080EXPORT_SYMBOL(vdec_set_timestamp);
1081
1082int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1083{
1084 vdec->pts64 = pts64;
1085 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1086 vdec->pts_valid = true;
1087
1088 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1089 return 0;
1090}
1091EXPORT_SYMBOL(vdec_set_pts64);
1092
1093int vdec_get_status(struct vdec_s *vdec)
1094{
1095 return vdec->status;
1096}
1097EXPORT_SYMBOL(vdec_get_status);
1098
1099int vdec_get_frame_num(struct vdec_s *vdec)
1100{
1101 return vdec->input.have_frame_num;
1102}
1103EXPORT_SYMBOL(vdec_get_frame_num);
1104
1105void vdec_set_status(struct vdec_s *vdec, int status)
1106{
1107 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1108 vdec->status = status;
1109}
1110EXPORT_SYMBOL(vdec_set_status);
1111
1112void vdec_set_next_status(struct vdec_s *vdec, int status)
1113{
1114 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1115 vdec->next_status = status;
1116}
1117EXPORT_SYMBOL(vdec_set_next_status);
1118
1119int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1120{
1121 vdec->frame_base_video_path = video_path;
1122 return 0;
1123}
1124EXPORT_SYMBOL(vdec_set_video_path);
1125
1126int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1127{
1128 vdec->vf_receiver_inst = receive_id;
1129 return 0;
1130}
1131EXPORT_SYMBOL(vdec_set_receive_id);
1132
1133/* add frame data to input chain */
1134int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1135{
1136 return vdec_input_add_frame(&vdec->input, buf, count);
1137}
1138EXPORT_SYMBOL(vdec_write_vframe);
1139
1140/* add a work queue thread for vdec*/
1141void vdec_schedule_work(struct work_struct *work)
1142{
1143 if (vdec_core->vdec_core_wq)
1144 queue_work(vdec_core->vdec_core_wq, work);
1145 else
1146 schedule_work(work);
1147}
1148EXPORT_SYMBOL(vdec_schedule_work);
1149
1150static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1151{
1152 if (vdec->master)
1153 return vdec->master;
1154 else if (vdec->slave)
1155 return vdec->slave;
1156 return NULL;
1157}
1158
1159static void vdec_sync_input_read(struct vdec_s *vdec)
1160{
1161 if (!vdec_stream_based(vdec))
1162 return;
1163
1164 if (vdec_dual(vdec)) {
1165 u32 me, other;
1166 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1167 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1168 other =
1169 vdec_get_associate(vdec)->input.stream_cookie;
1170 if (me > other)
1171 return;
1172 else if (me == other) {
1173 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1174 other =
1175 vdec_get_associate(vdec)->input.swap_rp;
1176 if (me > other) {
1177 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1178 vdec_get_associate(vdec)->
1179 input.swap_rp);
1180 return;
1181 }
1182 }
1183 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1184 READ_VREG(VLD_MEM_VIFIFO_RP));
1185 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1186 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1187 if (((me & 0x80000000) == 0) &&
1188 (vdec->input.streaming_rp & 0x80000000))
1189 me += 1ULL << 32;
1190 other = vdec_get_associate(vdec)->input.streaming_rp;
1191 if (me > other) {
1192 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1193 vdec_get_associate(vdec)->
1194 input.swap_rp);
1195 return;
1196 }
1197
1198 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1199 READ_VREG(HEVC_STREAM_RD_PTR));
1200 }
1201 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1202 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1203 READ_VREG(VLD_MEM_VIFIFO_RP));
1204 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1205 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1206 READ_VREG(HEVC_STREAM_RD_PTR));
1207 }
1208}
1209
1210static void vdec_sync_input_write(struct vdec_s *vdec)
1211{
1212 if (!vdec_stream_based(vdec))
1213 return;
1214
1215 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1216 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1217 READ_PARSER_REG(PARSER_VIDEO_WP));
1218 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1219 WRITE_VREG(HEVC_STREAM_WR_PTR,
1220 READ_PARSER_REG(PARSER_VIDEO_WP));
1221 }
1222}
1223
1224/*
1225 *get next frame from input chain
1226 */
1227/*
1228 *THE VLD_FIFO is 512 bytes and Video buffer level
1229 * empty interrupt is set to 0x80 bytes threshold
1230 */
1231#define VLD_PADDING_SIZE 1024
1232#define HEVC_PADDING_SIZE (1024*16)
1233int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1234{
1235 struct vdec_input_s *input = &vdec->input;
1236 struct vframe_chunk_s *chunk = NULL;
1237 struct vframe_block_list_s *block = NULL;
1238 int dummy;
1239
1240 /* full reset to HW input */
1241 if (input->target == VDEC_INPUT_TARGET_VLD) {
1242 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1243
1244 /* reset VLD fifo for all vdec */
1245 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1246 WRITE_VREG(DOS_SW_RESET0, 0);
1247
1248 dummy = READ_RESET_REG(RESET0_REGISTER);
1249 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1250 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1251#if 0
1252 /*move to driver*/
1253 if (input_frame_based(input))
1254 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1255
1256 /*
1257 * 2: assist
1258 * 3: parser
1259 * 4: parser_state
1260 * 8: dblk
1261 * 11:mcpu
1262 * 12:ccpu
1263 * 13:ddr
1264 * 14:iqit
1265 * 15:ipp
1266 * 17:qdct
1267 * 18:mpred
1268 * 19:sao
1269 * 24:hevc_afifo
1270 */
1271 WRITE_VREG(DOS_SW_RESET3,
1272 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1273 (1<<17)|(1<<18)|(1<<19));
1274 WRITE_VREG(DOS_SW_RESET3, 0);
1275#endif
1276 }
1277
1278 /*
1279 *setup HW decoder input buffer (VLD context)
1280 * based on input->type and input->target
1281 */
1282 if (input_frame_based(input)) {
1283 chunk = vdec_input_next_chunk(&vdec->input);
1284
1285 if (chunk == NULL) {
1286 *p = NULL;
1287 return -1;
1288 }
1289
1290 block = chunk->block;
1291
1292 if (input->target == VDEC_INPUT_TARGET_VLD) {
1293 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1294 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1295 block->size - 8);
1296 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1297 round_down(block->start + chunk->offset,
1298 VDEC_FIFO_ALIGN));
1299
1300 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1301 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1302
1303 /* set to manual mode */
1304 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1305 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1306 round_down(block->start + chunk->offset,
1307 VDEC_FIFO_ALIGN));
1308 dummy = chunk->offset + chunk->size +
1309 VLD_PADDING_SIZE;
1310 if (dummy >= block->size)
1311 dummy -= block->size;
1312 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1313 round_down(block->start + dummy,
1314 VDEC_FIFO_ALIGN));
1315
1316 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1317 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1318
1319 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1320 (0x11 << 16) | (1<<10) | (7<<3));
1321
1322 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1323 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1324 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1325 block->size);
1326 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1327 chunk->offset);
1328 dummy = chunk->offset + chunk->size +
1329 HEVC_PADDING_SIZE;
1330 if (dummy >= block->size)
1331 dummy -= block->size;
1332 WRITE_VREG(HEVC_STREAM_WR_PTR,
1333 round_down(block->start + dummy,
1334 VDEC_FIFO_ALIGN));
1335
1336 /* set endian */
1337 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1338 }
1339
1340 *p = chunk;
1341 return chunk->size;
1342
1343 } else {
1344 /* stream based */
1345 u32 rp = 0, wp = 0, fifo_len = 0;
1346 int size;
1347 bool swap_valid = input->swap_valid;
1348 unsigned long swap_page_phys = input->swap_page_phys;
1349
1350 if (vdec_dual(vdec) &&
1351 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1352 /* keep using previous input context */
1353 struct vdec_s *master = (vdec->slave) ?
1354 vdec : vdec->master;
1355 if (master->input.last_swap_slave) {
1356 swap_valid = master->slave->input.swap_valid;
1357 swap_page_phys =
1358 master->slave->input.swap_page_phys;
1359 } else {
1360 swap_valid = master->input.swap_valid;
1361 swap_page_phys = master->input.swap_page_phys;
1362 }
1363 }
1364
1365 if (swap_valid) {
1366 if (input->target == VDEC_INPUT_TARGET_VLD) {
1367 if (vdec->format == VFORMAT_H264)
1368 SET_VREG_MASK(POWER_CTL_VLD,
1369 (1 << 9));
1370
1371 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1372
1373 /* restore read side */
1374 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1375 swap_page_phys);
1376 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1377
1378 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1379 ;
1380 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1381
1382 /* restore wrap count */
1383 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1384 input->stream_cookie);
1385
1386 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1387 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1388
1389 /* enable */
1390 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1391 (0x11 << 16) | (1<<10));
1392
1393 /* sync with front end */
1394 vdec_sync_input_read(vdec);
1395 vdec_sync_input_write(vdec);
1396
1397 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1398 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1399 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1400
1401 /* restore read side */
1402 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1403 swap_page_phys);
1404 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1405
1406 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1407 & (1<<7))
1408 ;
1409 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1410
1411 /* restore stream offset */
1412 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1413 input->stream_cookie);
1414
1415 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1416 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1417 >> 16) & 0x7f;
1418
1419
1420 /* enable */
1421
1422 /* sync with front end */
1423 vdec_sync_input_read(vdec);
1424 vdec_sync_input_write(vdec);
1425
1426 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1427
1428 /*pr_info("vdec: restore context\r\n");*/
1429 }
1430
1431 } else {
1432 if (input->target == VDEC_INPUT_TARGET_VLD) {
1433 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1434 input->start);
1435 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1436 input->start + input->size - 8);
1437 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1438 input->start);
1439
1440 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1441 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1442
1443 /* set to manual mode */
1444 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1445 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1446 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1447 READ_PARSER_REG(PARSER_VIDEO_WP));
1448
1449 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1450
1451 /* enable */
1452 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1453 (0x11 << 16) | (1<<10));
1454
1455 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1456
1457 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1458 WRITE_VREG(HEVC_STREAM_START_ADDR,
1459 input->start);
1460 WRITE_VREG(HEVC_STREAM_END_ADDR,
1461 input->start + input->size);
1462 WRITE_VREG(HEVC_STREAM_RD_PTR,
1463 input->start);
1464 WRITE_VREG(HEVC_STREAM_WR_PTR,
1465 READ_PARSER_REG(PARSER_VIDEO_WP));
1466
1467 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1468 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1469 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1470 >> 16) & 0x7f;
1471
1472 /* enable */
1473 }
1474 }
1475 *p = NULL;
1476 if (wp >= rp)
1477 size = wp - rp + fifo_len;
1478 else
1479 size = wp + input->size - rp + fifo_len;
1480 if (size < 0) {
1481 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1482 __func__, input->size, wp, rp, fifo_len, size);
1483 size = 0;
1484 }
1485 return size;
1486 }
1487}
1488EXPORT_SYMBOL(vdec_prepare_input);
1489
1490void vdec_enable_input(struct vdec_s *vdec)
1491{
1492 struct vdec_input_s *input = &vdec->input;
1493
1494 if (vdec->status != VDEC_STATUS_ACTIVE)
1495 return;
1496
1497 if (input->target == VDEC_INPUT_TARGET_VLD)
1498 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1499 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1500 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1501 if (vdec_stream_based(vdec))
1502 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1503 else
1504 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1505 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1506 }
1507}
1508EXPORT_SYMBOL(vdec_enable_input);
1509
1510int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1511{
1512 int r = vdec_input_set_buffer(&vdec->input, start, size);
1513
1514 if (r)
1515 return r;
1516
1517 if (vdec->slave)
1518 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1519
1520 return r;
1521}
1522EXPORT_SYMBOL(vdec_set_input_buffer);
1523
1524/*
1525 * vdec_eos returns the possibility that there are
1526 * more input can be used by decoder through vdec_prepare_input
1527 * Note: this function should be called prior to vdec_vframe_dirty
1528 * by decoder driver to determine if EOS happens for stream based
1529 * decoding when there is no sufficient data for a frame
1530 */
1531bool vdec_has_more_input(struct vdec_s *vdec)
1532{
1533 struct vdec_input_s *input = &vdec->input;
1534
1535 if (!input->eos)
1536 return true;
1537
1538 if (input_frame_based(input))
1539 return vdec_input_next_input_chunk(input) != NULL;
1540 else {
1541 if (input->target == VDEC_INPUT_TARGET_VLD)
1542 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1543 READ_PARSER_REG(PARSER_VIDEO_WP);
1544 else {
1545 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1546 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1547 }
1548 }
1549}
1550EXPORT_SYMBOL(vdec_has_more_input);
1551
1552void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1553{
1554 vdec->input.prepare_level = level;
1555}
1556EXPORT_SYMBOL(vdec_set_prepare_level);
1557
1558void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1559{
1560 vdec->flag = flag;
1561}
1562EXPORT_SYMBOL(vdec_set_flag);
1563
1564void vdec_set_eos(struct vdec_s *vdec, bool eos)
1565{
1566 struct vdec_core_s *core = vdec_core;
1567
1568 vdec->input.eos = eos;
1569
1570 if (vdec->slave)
1571 vdec->slave->input.eos = eos;
1572 up(&core->sem);
1573}
1574EXPORT_SYMBOL(vdec_set_eos);
1575
1576#ifdef VDEC_DEBUG_SUPPORT
1577void vdec_set_step_mode(void)
1578{
1579 step_mode = 0x1ff;
1580}
1581EXPORT_SYMBOL(vdec_set_step_mode);
1582#endif
1583
1584void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1585{
1586 if (vdec && next_vdec) {
1587 vdec->sched = 0;
1588 next_vdec->sched = 1;
1589 }
1590}
1591EXPORT_SYMBOL(vdec_set_next_sched);
1592
1593/*
1594 * Swap Context: S0 S1 S2 S3 S4
1595 * Sample sequence: M S M M S
1596 * Master Context: S0 S0 S2 S3 S3
1597 * Slave context: NA S1 S1 S2 S4
1598 * ^
1599 * ^
1600 * ^
1601 * the tricky part
1602 * If there are back to back decoding of master or slave
1603 * then the context of the counter part should be updated
1604 * with current decoder. In this example, S1 should be
1605 * updated to S2.
1606 * This is done by swap the swap_page and related info
1607 * between two layers.
1608 */
1609static void vdec_borrow_input_context(struct vdec_s *vdec)
1610{
1611 struct page *swap_page;
1612 unsigned long swap_page_phys;
1613 struct vdec_input_s *me;
1614 struct vdec_input_s *other;
1615
1616 if (!vdec_dual(vdec))
1617 return;
1618
1619 me = &vdec->input;
1620 other = &vdec_get_associate(vdec)->input;
1621
1622 /* swap the swap_context, borrow counter part's
1623 * swap context storage and update all related info.
1624 * After vdec_vframe_dirty, vdec_save_input_context
1625 * will be called to update current vdec's
1626 * swap context
1627 */
1628 swap_page = other->swap_page;
1629 other->swap_page = me->swap_page;
1630 me->swap_page = swap_page;
1631
1632 swap_page_phys = other->swap_page_phys;
1633 other->swap_page_phys = me->swap_page_phys;
1634 me->swap_page_phys = swap_page_phys;
1635
1636 other->swap_rp = me->swap_rp;
1637 other->streaming_rp = me->streaming_rp;
1638 other->stream_cookie = me->stream_cookie;
1639 other->swap_valid = me->swap_valid;
1640}
1641
1642void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1643{
1644 if (chunk)
1645 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1646
1647 if (vdec_stream_based(vdec)) {
1648 vdec->input.swap_needed = true;
1649
1650 if (vdec_dual(vdec)) {
1651 vdec_get_associate(vdec)->input.dirty_count = 0;
1652 vdec->input.dirty_count++;
1653 if (vdec->input.dirty_count > 1) {
1654 vdec->input.dirty_count = 1;
1655 vdec_borrow_input_context(vdec);
1656 }
1657 }
1658
1659 /* for stream based mode, we update read and write pointer
1660 * also in case decoder wants to keep working on decoding
1661 * for more frames while input front end has more data
1662 */
1663 vdec_sync_input_read(vdec);
1664 vdec_sync_input_write(vdec);
1665
1666 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1667 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1668 }
1669}
1670EXPORT_SYMBOL(vdec_vframe_dirty);
1671
1672bool vdec_need_more_data(struct vdec_s *vdec)
1673{
1674 if (vdec_stream_based(vdec))
1675 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1676
1677 return false;
1678}
1679EXPORT_SYMBOL(vdec_need_more_data);
1680
1681
1682void hevc_wait_ddr(void)
1683{
1684 unsigned long flags;
1685 unsigned int mask = 0;
1686
1687 mask = 1 << 4; /* hevc */
1688 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1689 mask |= (1 << 8); /* hevcb */
1690
1691 spin_lock_irqsave(&vdec_spin_lock, flags);
1692 codec_dmcbus_write(DMC_REQ_CTRL,
1693 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1694 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1695
1696 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1697 & mask))
1698 ;
1699}
1700
1701void vdec_save_input_context(struct vdec_s *vdec)
1702{
1703 struct vdec_input_s *input = &vdec->input;
1704
1705#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1706 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1707#endif
1708
1709 if (input->target == VDEC_INPUT_TARGET_VLD)
1710 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1711
1712 if (input_stream_based(input) && (input->swap_needed)) {
1713 if (input->target == VDEC_INPUT_TARGET_VLD) {
1714 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1715 input->swap_page_phys);
1716 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1717 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1718 ;
1719 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1720 vdec->input.stream_cookie =
1721 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1722 vdec->input.swap_rp =
1723 READ_VREG(VLD_MEM_VIFIFO_RP);
1724 vdec->input.total_rd_count =
1725 (u64)vdec->input.stream_cookie *
1726 vdec->input.size + vdec->input.swap_rp -
1727 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1728 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1729 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1730 input->swap_page_phys);
1731 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1732
1733 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1734 ;
1735 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1736
1737 vdec->input.stream_cookie =
1738 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1739 vdec->input.swap_rp =
1740 READ_VREG(HEVC_STREAM_RD_PTR);
1741 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1742 (vdec->input.streaming_rp & 0x80000000))
1743 vdec->input.streaming_rp += 1ULL << 32;
1744 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1745 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1746 vdec->input.total_rd_count = vdec->input.streaming_rp;
1747 }
1748
1749 input->swap_valid = true;
1750 input->swap_needed = false;
1751 /*pr_info("vdec: save context\r\n");*/
1752
1753 vdec_sync_input_read(vdec);
1754
1755 if (vdec_dual(vdec)) {
1756 struct vdec_s *master = (vdec->slave) ?
1757 vdec : vdec->master;
1758 master->input.last_swap_slave = (master->slave == vdec);
1759 /* pr_info("master->input.last_swap_slave = %d\n",
1760 master->input.last_swap_slave); */
1761 }
1762
1763 hevc_wait_ddr();
1764 }
1765}
1766EXPORT_SYMBOL(vdec_save_input_context);
1767
1768void vdec_clean_input(struct vdec_s *vdec)
1769{
1770 struct vdec_input_s *input = &vdec->input;
1771
1772 while (!list_empty(&input->vframe_chunk_list)) {
1773 struct vframe_chunk_s *chunk =
1774 vdec_input_next_chunk(input);
1775 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1776 vdec_input_release_chunk(input, chunk);
1777 else
1778 break;
1779 }
1780 vdec_save_input_context(vdec);
1781}
1782EXPORT_SYMBOL(vdec_clean_input);
1783
1784
1785static int vdec_input_read_restore(struct vdec_s *vdec)
1786{
1787 struct vdec_input_s *input = &vdec->input;
1788
1789 if (!vdec_stream_based(vdec))
1790 return 0;
1791
1792 if (!input->swap_valid) {
1793 if (input->target == VDEC_INPUT_TARGET_VLD) {
1794 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1795 input->start);
1796 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1797 input->start + input->size - 8);
1798 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1799 input->start);
1800 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1801 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1802
1803 /* set to manual mode */
1804 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1805 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1806 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1807 WRITE_VREG(HEVC_STREAM_START_ADDR,
1808 input->start);
1809 WRITE_VREG(HEVC_STREAM_END_ADDR,
1810 input->start + input->size);
1811 WRITE_VREG(HEVC_STREAM_RD_PTR,
1812 input->start);
1813 }
1814 return 0;
1815 }
1816 if (input->target == VDEC_INPUT_TARGET_VLD) {
1817 /* restore read side */
1818 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1819 input->swap_page_phys);
1820
1821 /*swap active*/
1822 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1823
1824 /*wait swap busy*/
1825 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1826 ;
1827
1828 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1829 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1830 /* restore read side */
1831 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1832 input->swap_page_phys);
1833 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1834
1835 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1836 & (1<<7))
1837 ;
1838 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1839 }
1840
1841 return 0;
1842}
1843
1844
1845int vdec_sync_input(struct vdec_s *vdec)
1846{
1847 struct vdec_input_s *input = &vdec->input;
1848 u32 rp = 0, wp = 0, fifo_len = 0;
1849 int size;
1850
1851 vdec_input_read_restore(vdec);
1852 vdec_sync_input_read(vdec);
1853 vdec_sync_input_write(vdec);
1854 if (input->target == VDEC_INPUT_TARGET_VLD) {
1855 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1856 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1857
1858 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1859 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1860 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1861 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1862 >> 16) & 0x7f;
1863 }
1864 if (wp >= rp)
1865 size = wp - rp + fifo_len;
1866 else
1867 size = wp + input->size - rp + fifo_len;
1868 if (size < 0) {
1869 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1870 __func__, input->size, wp, rp, fifo_len, size);
1871 size = 0;
1872 }
1873 return size;
1874
1875}
1876EXPORT_SYMBOL(vdec_sync_input);
1877
1878const char *vdec_status_str(struct vdec_s *vdec)
1879{
1880 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1881 vdec_status_string[vdec->status] : "INVALID";
1882}
1883
1884const char *vdec_type_str(struct vdec_s *vdec)
1885{
1886 switch (vdec->type) {
1887 case VDEC_TYPE_SINGLE:
1888 return "VDEC_TYPE_SINGLE";
1889 case VDEC_TYPE_STREAM_PARSER:
1890 return "VDEC_TYPE_STREAM_PARSER";
1891 case VDEC_TYPE_FRAME_BLOCK:
1892 return "VDEC_TYPE_FRAME_BLOCK";
1893 case VDEC_TYPE_FRAME_CIRCULAR:
1894 return "VDEC_TYPE_FRAME_CIRCULAR";
1895 default:
1896 return "VDEC_TYPE_INVALID";
1897 }
1898}
1899
1900const char *vdec_device_name_str(struct vdec_s *vdec)
1901{
1902 return vdec_device_name[vdec->format * 2 + 1];
1903}
1904EXPORT_SYMBOL(vdec_device_name_str);
1905
1906void walk_vdec_core_list(char *s)
1907{
1908 struct vdec_s *vdec;
1909 struct vdec_core_s *core = vdec_core;
1910 unsigned long flags;
1911
1912 pr_info("%s --->\n", s);
1913
1914 flags = vdec_core_lock(vdec_core);
1915
1916 if (list_empty(&core->connected_vdec_list)) {
1917 pr_info("connected vdec list empty\n");
1918 } else {
1919 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
1920 pr_info("\tvdec (%p), status = %s\n", vdec,
1921 vdec_status_str(vdec));
1922 }
1923 }
1924
1925 vdec_core_unlock(vdec_core, flags);
1926}
1927EXPORT_SYMBOL(walk_vdec_core_list);
1928
1929/* insert vdec to vdec_core for scheduling,
1930 * for dual running decoders, connect/disconnect always runs in pairs
1931 */
1932int vdec_connect(struct vdec_s *vdec)
1933{
1934 unsigned long flags;
1935
1936 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
1937
1938 if (vdec->status != VDEC_STATUS_DISCONNECTED)
1939 return 0;
1940
1941 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
1942 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
1943
1944 init_completion(&vdec->inactive_done);
1945
1946 if (vdec->slave) {
1947 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
1948 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
1949
1950 init_completion(&vdec->slave->inactive_done);
1951 }
1952
1953 flags = vdec_core_lock(vdec_core);
1954
1955 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
1956
1957 if (vdec->slave) {
1958 list_add_tail(&vdec->slave->list,
1959 &vdec_core->connected_vdec_list);
1960 }
1961
1962 vdec_core_unlock(vdec_core, flags);
1963
1964 up(&vdec_core->sem);
1965
1966 return 0;
1967}
1968EXPORT_SYMBOL(vdec_connect);
1969
1970/* remove vdec from vdec_core scheduling */
1971int vdec_disconnect(struct vdec_s *vdec)
1972{
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
1975#endif
1976 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
1977
1978 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
1979 (vdec->status != VDEC_STATUS_ACTIVE)) {
1980 return 0;
1981 }
1982 mutex_lock(&vdec_mutex);
1983 /*
1984 *when a vdec is under the management of scheduler
1985 * the status change will only be from vdec_core_thread
1986 */
1987 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
1988
1989 if (vdec->slave)
1990 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
1991 else if (vdec->master)
1992 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
1993 mutex_unlock(&vdec_mutex);
1994 up(&vdec_core->sem);
1995
1996 if(!wait_for_completion_timeout(&vdec->inactive_done,
1997 msecs_to_jiffies(2000)))
1998 goto discon_timeout;
1999
2000 if (vdec->slave) {
2001 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2002 msecs_to_jiffies(2000)))
2003 goto discon_timeout;
2004 } else if (vdec->master) {
2005 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2006 msecs_to_jiffies(2000)))
2007 goto discon_timeout;
2008 }
2009
2010 return 0;
2011discon_timeout:
2012 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2013 return 0;
2014}
2015EXPORT_SYMBOL(vdec_disconnect);
2016
2017/* release vdec structure */
2018int vdec_destroy(struct vdec_s *vdec)
2019{
2020 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2021
2022 vdec_input_release(&vdec->input);
2023
2024#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2025 vdec_profile_flush(vdec);
2026#endif
2027 ida_simple_remove(&vdec_core->ida, vdec->id);
2028 vfree(vdec);
2029
2030 atomic_dec(&vdec_core->vdec_nr);
2031
2032 return 0;
2033}
2034EXPORT_SYMBOL(vdec_destroy);
2035
2036/*
2037 *register vdec_device
2038 * create output, vfm or create ionvideo output
2039 */
2040s32 vdec_init(struct vdec_s *vdec, int is_4k)
2041{
2042 int r = 0;
2043 struct vdec_s *p = vdec;
2044 const char *dev_name;
2045 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2046
2047 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2048
2049 if (dev_name == NULL)
2050 return -ENODEV;
2051
2052 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2053 dev_name, vdec_type_str(vdec));
2054
2055 /*
2056 *todo: VFM patch control should be configurable,
2057 * for now all stream based input uses default VFM path.
2058 */
2059 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2060 if (vdec_core->vfm_vdec == NULL) {
2061 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2062 vdec_core->vfm_vdec = vdec;
2063 } else {
2064 pr_info("vdec_init vfm path busy.\n");
2065 return -EBUSY;
2066 }
2067 }
2068
2069 mutex_lock(&vdec_mutex);
2070 inited_vcodec_num++;
2071 mutex_unlock(&vdec_mutex);
2072
2073 vdec_input_set_type(&vdec->input, vdec->type,
2074 (vdec->format == VFORMAT_HEVC ||
2075 vdec->format == VFORMAT_AVS2 ||
2076 vdec->format == VFORMAT_VP9) ?
2077 VDEC_INPUT_TARGET_HEVC :
2078 VDEC_INPUT_TARGET_VLD);
2079 if (vdec_single(vdec))
2080 vdec_enable_DMC(vdec);
2081 p->cma_dev = vdec_core->cma_dev;
2082 p->get_canvas = get_canvas;
2083 p->get_canvas_ex = get_canvas_ex;
2084 p->free_canvas_ex = free_canvas_ex;
2085 p->vdec_fps_detec = vdec_fps_detec;
2086 atomic_set(&p->inirq_flag, 0);
2087 atomic_set(&p->inirq_thread_flag, 0);
2088 /* todo */
2089 if (!vdec_dual(vdec))
2090 p->use_vfm_path = vdec_stream_based(vdec);
2091 /* vdec_dev_reg.flag = 0; */
2092 if (vdec->id >= 0)
2093 id = vdec->id;
2094 p->parallel_dec = parallel_decode;
2095 vdec_core->parallel_dec = parallel_decode;
2096 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2097#ifdef FRAME_CHECK
2098 vdec_frame_check_init(vdec);
2099#endif
2100 p->dev = platform_device_register_data(
2101 &vdec_core->vdec_core_platform_device->dev,
2102 dev_name,
2103 id,
2104 &p, sizeof(struct vdec_s *));
2105
2106 if (IS_ERR(p->dev)) {
2107 r = PTR_ERR(p->dev);
2108 pr_err("vdec: Decoder device %s register failed (%d)\n",
2109 dev_name, r);
2110
2111 mutex_lock(&vdec_mutex);
2112 inited_vcodec_num--;
2113 mutex_unlock(&vdec_mutex);
2114
2115 goto error;
2116 } else if (!p->dev->dev.driver) {
2117 pr_info("vdec: Decoder device %s driver probe failed.\n",
2118 dev_name);
2119 r = -ENODEV;
2120
2121 goto error;
2122 }
2123
2124 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2125 r = -ENODEV;
2126 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2127
2128 mutex_lock(&vdec_mutex);
2129 inited_vcodec_num--;
2130 mutex_unlock(&vdec_mutex);
2131
2132 goto error;
2133 }
2134
2135 if (p->use_vfm_path) {
2136 vdec->vf_receiver_inst = -1;
2137 vdec->vfm_map_id[0] = 0;
2138 } else if (!vdec_dual(vdec)) {
2139 /* create IONVIDEO instance and connect decoder's
2140 * vf_provider interface to it
2141 */
2142 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2143 r = -ENODEV;
2144 pr_err("vdec: Incorrect decoder type\n");
2145
2146 mutex_lock(&vdec_mutex);
2147 inited_vcodec_num--;
2148 mutex_unlock(&vdec_mutex);
2149
2150 goto error;
2151 }
2152 if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2153#if 1
2154 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2155 &vdec->vf_receiver_inst);
2156#else
2157 /*
2158 * temporarily just use decoder instance ID as iondriver ID
2159 * to solve OMX iondriver instance number check time sequence
2160 * only the limitation is we can NOT mix different video
2161 * decoders since same ID will be used for different decoder
2162 * formats.
2163 */
2164 vdec->vf_receiver_inst = p->dev->id;
2165 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2166 &vdec->vf_receiver_inst);
2167#endif
2168 if (r < 0) {
2169 pr_err("IonVideo frame receiver allocation failed.\n");
2170
2171 mutex_lock(&vdec_mutex);
2172 inited_vcodec_num--;
2173 mutex_unlock(&vdec_mutex);
2174
2175 goto error;
2176 }
2177
2178 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2179 "%s %s", vdec->vf_provider_name,
2180 vdec->vf_receiver_name);
2181 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2182 "vdec-map-%d", vdec->id);
2183 } else if (p->frame_base_video_path ==
2184 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2185 if (vdec_secure(vdec)) {
2186 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2187 "%s %s", vdec->vf_provider_name,
2188 "amlvideo amvideo");
2189 } else {
2190 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2191 "%s %s", vdec->vf_provider_name,
2192 "amlvideo ppmgr deinterlace amvideo");
2193 }
2194 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2195 "vdec-map-%d", vdec->id);
2196 } else if (p->frame_base_video_path ==
2197 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2198 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2199 "%s %s", vdec->vf_provider_name,
2200 "aml_video.1 videosync.0 videopip");
2201 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2202 "vdec-map-%d", vdec->id);
2203 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2204 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2205 "%s %s", vdec->vf_provider_name,
2206 vdec->vf_receiver_name);
2207 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2208 "vdec-map-%d", vdec->id);
2209 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2210 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2211 "%s %s", vdec->vf_provider_name,
2212 "amvideo");
2213 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2214 "vdec-map-%d", vdec->id);
2215 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2216 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2217 "%s %s %s", vdec->vf_provider_name,
2218 vdec->vf_receiver_name, "amvideo");
2219 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2220 "vdec-map-%d", vdec->id);
2221 } else if (p->frame_base_video_path ==
2222 FRAME_BASE_PATH_DI_V4LVIDEO) {
2223#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2224 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2225 &vdec->vf_receiver_inst);
2226#else
2227 r = -1;
2228#endif
2229 if (r < 0) {
2230 pr_err("V4lVideo frame receiver allocation failed.\n");
2231 mutex_lock(&vdec_mutex);
2232 inited_vcodec_num--;
2233 mutex_unlock(&vdec_mutex);
2234 goto error;
2235 }
2236 if (!v4lvideo_add_di || vdec_secure(vdec))
2237 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2238 "%s %s", vdec->vf_provider_name,
2239 vdec->vf_receiver_name);
2240 else {
2241 if (vdec->vf_receiver_inst == 0)
2242 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2243 "%s %s %s", vdec->vf_provider_name,
2244 "deinterlace",
2245 vdec->vf_receiver_name);
2246 else if (vdec->vf_receiver_inst < max_di_instance)
2247 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2248 "%s %s%d %s", vdec->vf_provider_name,
2249 "dimulti.",
2250 vdec->vf_receiver_inst,
2251 vdec->vf_receiver_name);
2252 else
2253 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2254 "%s %s", vdec->vf_provider_name,
2255 vdec->vf_receiver_name);
2256 }
2257 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2258 "vdec-map-%d", vdec->id);
2259 }
2260
2261 if (vfm_map_add(vdec->vfm_map_id,
2262 vdec->vfm_map_chain) < 0) {
2263 r = -ENOMEM;
2264 pr_err("Decoder pipeline map creation failed %s.\n",
2265 vdec->vfm_map_id);
2266 vdec->vfm_map_id[0] = 0;
2267
2268 mutex_lock(&vdec_mutex);
2269 inited_vcodec_num--;
2270 mutex_unlock(&vdec_mutex);
2271
2272 goto error;
2273 }
2274
2275 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2276
2277 /*
2278 *assume IONVIDEO driver already have a few vframe_receiver
2279 * registered.
2280 * 1. Call iondriver function to allocate a IONVIDEO path and
2281 * provide receiver's name and receiver op.
2282 * 2. Get decoder driver's provider name from driver instance
2283 * 3. vfm_map_add(name, "<decoder provider name>
2284 * <iondriver receiver name>"), e.g.
2285 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2286 * 4. vf_reg_provider and vf_reg_receiver
2287 * Note: the decoder provider's op uses vdec as op_arg
2288 * the iondriver receiver's op uses iondev device as
2289 * op_arg
2290 */
2291
2292 }
2293
2294 if (!vdec_single(vdec)) {
2295 vf_reg_provider(&p->vframe_provider);
2296
2297 vf_notify_receiver(p->vf_provider_name,
2298 VFRAME_EVENT_PROVIDER_START,
2299 vdec);
2300
2301 if (vdec_core->hint_fr_vdec == NULL)
2302 vdec_core->hint_fr_vdec = vdec;
2303
2304 if (vdec_core->hint_fr_vdec == vdec) {
2305 if (p->sys_info->rate != 0) {
2306 if (!vdec->is_reset) {
2307 vf_notify_receiver(p->vf_provider_name,
2308 VFRAME_EVENT_PROVIDER_FR_HINT,
2309 (void *)
2310 ((unsigned long)
2311 p->sys_info->rate));
2312 vdec->fr_hint_state = VDEC_HINTED;
2313 }
2314 } else {
2315 vdec->fr_hint_state = VDEC_NEED_HINT;
2316 }
2317 }
2318 }
2319
2320 p->dolby_meta_with_el = 0;
2321 pr_debug("vdec_init, vf_provider_name = %s\n", p->vf_provider_name);
2322 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2323 &vdec->input,
2324 vdec->sys_info->width,
2325 vdec->sys_info->height);
2326 /* vdec is now ready to be active */
2327 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2328 if (p->use_vfm_path) {
2329 frame_info_buf_in = (struct vframe_qos_s *)
2330 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2331 if (!frame_info_buf_in)
2332 pr_err("kmalloc: frame_info_buf_in failed\n");
2333 else
2334 memset(frame_info_buf_in, 0,
2335 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2336
2337 frame_info_buf_out = (struct vframe_qos_s *)
2338 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2339 if (!frame_info_buf_out)
2340 pr_err("kmalloc: frame_info_buf_out failed\n");
2341 else
2342 memset(frame_info_buf_out, 0,
2343 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2344 frame_qos_wr = 0;
2345 frame_qos_rd = 0;
2346 }
2347 return 0;
2348
2349error:
2350 return r;
2351}
2352EXPORT_SYMBOL(vdec_init);
2353
2354/* vdec_create/init/release/destroy are applied to both dual running decoders
2355 */
2356void vdec_release(struct vdec_s *vdec)
2357{
2358 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2359#ifdef VDEC_DEBUG_SUPPORT
2360 if (step_mode) {
2361 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2362 while (step_mode)
2363 udelay(10);
2364 pr_info("VDEC_DEBUG: step_mode is clear\n");
2365 }
2366#endif
2367 vdec_disconnect(vdec);
2368
2369 if (vdec->vframe_provider.name) {
2370 if (!vdec_single(vdec)) {
2371 if (vdec_core->hint_fr_vdec == vdec
2372 && vdec->fr_hint_state == VDEC_HINTED)
2373 vf_notify_receiver(
2374 vdec->vf_provider_name,
2375 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2376 NULL);
2377 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2378 }
2379 vf_unreg_provider(&vdec->vframe_provider);
2380 }
2381
2382 if (vdec_core->vfm_vdec == vdec)
2383 vdec_core->vfm_vdec = NULL;
2384
2385 if (vdec_core->hint_fr_vdec == vdec)
2386 vdec_core->hint_fr_vdec = NULL;
2387
2388 if (vdec->vf_receiver_inst >= 0) {
2389 if (vdec->vfm_map_id[0]) {
2390 vfm_map_remove(vdec->vfm_map_id);
2391 vdec->vfm_map_id[0] = 0;
2392 }
2393 }
2394
2395 while ((atomic_read(&vdec->inirq_flag) > 0)
2396 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2397 schedule();
2398
2399#ifdef FRAME_CHECK
2400 vdec_frame_check_exit(vdec);
2401#endif
2402 vdec_fps_clear(vdec->id);
2403 if (atomic_read(&vdec_core->vdec_nr) == 1)
2404 vdec_disable_DMC(vdec);
2405 platform_device_unregister(vdec->dev);
2406 pr_debug("vdec_release instance %p, total %d\n", vdec,
2407 atomic_read(&vdec_core->vdec_nr));
2408 if (vdec->use_vfm_path) {
2409 kfree(frame_info_buf_in);
2410 frame_info_buf_in = NULL;
2411 kfree(frame_info_buf_out);
2412 frame_info_buf_out = NULL;
2413 frame_qos_wr = 0;
2414 frame_qos_rd = 0;
2415 }
2416 vdec_destroy(vdec);
2417
2418 mutex_lock(&vdec_mutex);
2419 inited_vcodec_num--;
2420 mutex_unlock(&vdec_mutex);
2421
2422}
2423EXPORT_SYMBOL(vdec_release);
2424
2425/* For dual running decoders, vdec_reset is only called with master vdec.
2426 */
2427int vdec_reset(struct vdec_s *vdec)
2428{
2429 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2430
2431 vdec_disconnect(vdec);
2432
2433 if (vdec->vframe_provider.name)
2434 vf_unreg_provider(&vdec->vframe_provider);
2435
2436 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2437 vf_unreg_provider(&vdec->slave->vframe_provider);
2438
2439 if (vdec->reset) {
2440 vdec->reset(vdec);
2441 if (vdec->slave)
2442 vdec->slave->reset(vdec->slave);
2443 }
2444 vdec->mc_loaded = 0;/*clear for reload firmware*/
2445 vdec_input_release(&vdec->input);
2446
2447 vdec_input_init(&vdec->input, vdec);
2448
2449 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2450 vdec->sys_info->height);
2451
2452 vf_reg_provider(&vdec->vframe_provider);
2453 vf_notify_receiver(vdec->vf_provider_name,
2454 VFRAME_EVENT_PROVIDER_START, vdec);
2455
2456 if (vdec->slave) {
2457 vf_reg_provider(&vdec->slave->vframe_provider);
2458 vf_notify_receiver(vdec->slave->vf_provider_name,
2459 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2460 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2461 }
2462
2463 vdec_connect(vdec);
2464
2465 return 0;
2466}
2467EXPORT_SYMBOL(vdec_reset);
2468
2469int vdec_v4l2_reset(struct vdec_s *vdec, int flag)
2470{
2471 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2472 pr_debug("vdec_v4l2_reset %d\n", flag);
2473 vdec_disconnect(vdec);
2474 if (flag != 2) {
2475 if (vdec->vframe_provider.name)
2476 vf_unreg_provider(&vdec->vframe_provider);
2477
2478 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2479 vf_unreg_provider(&vdec->slave->vframe_provider);
2480
2481 if (vdec->reset) {
2482 vdec->reset(vdec);
2483 if (vdec->slave)
2484 vdec->slave->reset(vdec->slave);
2485 }
2486 vdec->mc_loaded = 0;/*clear for reload firmware*/
2487
2488 vdec_input_release(&vdec->input);
2489
2490 vdec_input_init(&vdec->input, vdec);
2491
2492 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2493 vdec->sys_info->height);
2494
2495 vf_reg_provider(&vdec->vframe_provider);
2496 vf_notify_receiver(vdec->vf_provider_name,
2497 VFRAME_EVENT_PROVIDER_START, vdec);
2498
2499 if (vdec->slave) {
2500 vf_reg_provider(&vdec->slave->vframe_provider);
2501 vf_notify_receiver(vdec->slave->vf_provider_name,
2502 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2503 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2504 }
2505 } else {
2506 if (vdec->reset) {
2507 vdec->reset(vdec);
2508 if (vdec->slave)
2509 vdec->slave->reset(vdec->slave);
2510 }
2511 }
2512
2513 vdec_connect(vdec);
2514
2515 return 0;
2516}
2517EXPORT_SYMBOL(vdec_v4l2_reset);
2518
2519
2520void vdec_free_cmabuf(void)
2521{
2522 mutex_lock(&vdec_mutex);
2523
2524 /*if (inited_vcodec_num > 0) {
2525 mutex_unlock(&vdec_mutex);
2526 return;
2527 }*/
2528 mutex_unlock(&vdec_mutex);
2529}
2530
2531void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2532{
2533 vdec->core_mask |= mask;
2534
2535 if (vdec->slave)
2536 vdec->slave->core_mask |= mask;
2537 if (vdec_core->parallel_dec == 1) {
2538 if (mask & CORE_MASK_COMBINE)
2539 vdec_core->vdec_combine_flag++;
2540 }
2541
2542}
2543EXPORT_SYMBOL(vdec_core_request);
2544
2545int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2546{
2547 vdec->core_mask &= ~mask;
2548
2549 if (vdec->slave)
2550 vdec->slave->core_mask &= ~mask;
2551 if (vdec_core->parallel_dec == 1) {
2552 if (mask & CORE_MASK_COMBINE)
2553 vdec_core->vdec_combine_flag--;
2554 }
2555 return 0;
2556}
2557EXPORT_SYMBOL(vdec_core_release);
2558
2559bool vdec_core_with_input(unsigned long mask)
2560{
2561 enum vdec_type_e type;
2562
2563 for (type = VDEC_1; type < VDEC_MAX; type++) {
2564 if ((mask & (1 << type)) && cores_with_input[type])
2565 return true;
2566 }
2567
2568 return false;
2569}
2570
2571void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2572{
2573 unsigned long i;
2574 unsigned long t = mask;
2575 mutex_lock(&vdec_mutex);
2576 while (t) {
2577 i = __ffs(t);
2578 clear_bit(i, &vdec->active_mask);
2579 t &= ~(1 << i);
2580 }
2581
2582 if (vdec->active_mask == 0)
2583 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2584
2585 mutex_unlock(&vdec_mutex);
2586}
2587EXPORT_SYMBOL(vdec_core_finish_run);
2588/*
2589 * find what core resources are available for vdec
2590 */
2591static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2592 unsigned long active_mask)
2593{
2594 unsigned long mask = vdec->core_mask &
2595 ~CORE_MASK_COMBINE;
2596
2597 if (vdec->core_mask & CORE_MASK_COMBINE) {
2598 /* combined cores must be granted together */
2599 if ((mask & ~active_mask) == mask)
2600 return mask;
2601 else
2602 return 0;
2603 } else
2604 return mask & ~vdec->sched_mask & ~active_mask;
2605}
2606
2607/*
2608 *Decoder callback
2609 * Each decoder instance uses this callback to notify status change, e.g. when
2610 * decoder finished using HW resource.
2611 * a sample callback from decoder's driver is following:
2612 *
2613 * if (hw->vdec_cb) {
2614 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2615 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2616 * }
2617 */
2618static void vdec_callback(struct vdec_s *vdec, void *data)
2619{
2620 struct vdec_core_s *core = (struct vdec_core_s *)data;
2621
2622#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2623 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2624#endif
2625
2626 up(&core->sem);
2627}
2628
2629static irqreturn_t vdec_isr(int irq, void *dev_id)
2630{
2631 struct vdec_isr_context_s *c =
2632 (struct vdec_isr_context_s *)dev_id;
2633 struct vdec_s *vdec = vdec_core->last_vdec;
2634 irqreturn_t ret = IRQ_HANDLED;
2635
2636 if (vdec_core->parallel_dec == 1) {
2637 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2638 vdec = vdec_core->active_hevc;
2639 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2640 vdec = vdec_core->active_vdec;
2641 else
2642 vdec = NULL;
2643 }
2644
2645 if (vdec) {
2646 atomic_set(&vdec->inirq_flag, 1);
2647 vdec->isr_ns = local_clock();
2648 }
2649 if (c->dev_isr) {
2650 ret = c->dev_isr(irq, c->dev_id);
2651 goto isr_done;
2652 }
2653
2654 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2655 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2656 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2657#if 0
2658 pr_warn("vdec interrupt w/o a valid receiver\n");
2659#endif
2660 goto isr_done;
2661 }
2662
2663 if (!vdec) {
2664#if 0
2665 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2666 core);
2667#endif
2668 goto isr_done;
2669 }
2670
2671 if (!vdec->irq_handler) {
2672#if 0
2673 pr_warn("vdec instance has no irq handle.\n");
2674#endif
2675 goto isr_done;
2676 }
2677
2678 ret = vdec->irq_handler(vdec, c->index);
2679isr_done:
2680 if (vdec)
2681 atomic_set(&vdec->inirq_flag, 0);
2682 return ret;
2683}
2684
2685static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2686{
2687 struct vdec_isr_context_s *c =
2688 (struct vdec_isr_context_s *)dev_id;
2689 struct vdec_s *vdec = vdec_core->last_vdec;
2690 irqreturn_t ret = IRQ_HANDLED;
2691
2692 if (vdec_core->parallel_dec == 1) {
2693 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2694 vdec = vdec_core->active_hevc;
2695 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2696 vdec = vdec_core->active_vdec;
2697 else
2698 vdec = NULL;
2699 }
2700
2701 if (vdec) {
2702 u32 isr2tfn = 0;
2703 atomic_set(&vdec->inirq_thread_flag, 1);
2704 vdec->tfn_ns = local_clock();
2705 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2706 if (isr2tfn > 10000000)
2707 pr_err("!!!!!!! %s vdec_isr to %s took %uns !!!\n",
2708 vdec->vf_provider_name, __func__, isr2tfn);
2709 }
2710 if (c->dev_threaded_isr) {
2711 ret = c->dev_threaded_isr(irq, c->dev_id);
2712 goto thread_isr_done;
2713 }
2714 if (!vdec)
2715 goto thread_isr_done;
2716
2717 if (!vdec->threaded_irq_handler)
2718 goto thread_isr_done;
2719 ret = vdec->threaded_irq_handler(vdec, c->index);
2720thread_isr_done:
2721 if (vdec)
2722 atomic_set(&vdec->inirq_thread_flag, 0);
2723 return ret;
2724}
2725
2726unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2727{
2728 unsigned long ready_mask;
2729 struct vdec_input_s *input = &vdec->input;
2730 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2731 (vdec->status != VDEC_STATUS_ACTIVE))
2732 return false;
2733
2734 if (!vdec->run_ready)
2735 return false;
2736
2737 /* when crc32 error, block at error frame */
2738 if (vdec->vfc.err_crc_block)
2739 return false;
2740
2741 if ((vdec->slave || vdec->master) &&
2742 (vdec->sched == 0))
2743 return false;
2744#ifdef VDEC_DEBUG_SUPPORT
2745 inc_profi_count(mask, vdec->check_count);
2746#endif
2747 if (vdec_core_with_input(mask)) {
2748
2749 /* check frame based input underrun */
2750 if (input && !input->eos && input_frame_based(input)
2751 && (!vdec_input_next_chunk(input))) {
2752#ifdef VDEC_DEBUG_SUPPORT
2753 inc_profi_count(mask, vdec->input_underrun_count);
2754#endif
2755 return false;
2756 }
2757 /* check streaming prepare level threshold if not EOS */
2758 if (input && input_stream_based(input) && !input->eos) {
2759 u32 rp, wp, level;
2760
2761 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2762 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2763 if (wp < rp)
2764 level = input->size + wp - rp;
2765 else
2766 level = wp - rp;
2767
2768 if ((level < input->prepare_level) &&
2769 (pts_get_rec_num(PTS_TYPE_VIDEO,
2770 vdec->input.total_rd_count) < 2)) {
2771 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2772#ifdef VDEC_DEBUG_SUPPORT
2773 inc_profi_count(mask, vdec->input_underrun_count);
2774 if (step_mode & 0x200) {
2775 if ((step_mode & 0xff) == vdec->id) {
2776 step_mode |= 0xff;
2777 return mask;
2778 }
2779 }
2780#endif
2781 return false;
2782 } else if (level > input->prepare_level)
2783 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2784 }
2785 }
2786
2787 if (step_mode) {
2788 if ((step_mode & 0xff) != vdec->id)
2789 return 0;
2790 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2791 }
2792
2793 /*step_mode &= ~0xff; not work for id of 0, removed*/
2794
2795#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2796 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2797#endif
2798
2799 ready_mask = vdec->run_ready(vdec, mask) & mask;
2800#ifdef VDEC_DEBUG_SUPPORT
2801 if (ready_mask != mask)
2802 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2803#endif
2804#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2805 if (ready_mask)
2806 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2807#endif
2808
2809 return ready_mask;
2810}
2811
2812/* bridge on/off vdec's interrupt processing to vdec core */
2813static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2814 bool enable)
2815{
2816 enum vdec_type_e type;
2817
2818 for (type = VDEC_1; type < VDEC_MAX; type++) {
2819 if (mask & (1 << type)) {
2820 struct vdec_isr_context_s *c =
2821 &vdec_core->isr_context[cores_int[type]];
2822 if (enable)
2823 c->vdec = vdec;
2824 else if (c->vdec == vdec)
2825 c->vdec = NULL;
2826 }
2827 }
2828}
2829
2830/*
2831 * Set up secure protection for each decoder instance running.
2832 * Note: The operation from REE side only resets memory access
2833 * to a default policy and even a non_secure type will still be
2834 * changed to secure type automatically when secure source is
2835 * detected inside TEE.
2836 * Perform need_more_data checking and set flag is decoder
2837 * is not consuming data.
2838 */
2839void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2840{
2841 struct vdec_input_s *input = &vdec->input;
2842 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2843 DMC_DEV_TYPE_NON_SECURE;
2844
2845 vdec_route_interrupt(vdec, mask, true);
2846
2847 if (!vdec_core_with_input(mask))
2848 return;
2849
2850 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2851 {
2852 /* Verimatrix ultra webclient (HLS) was played in drmmode and used hw demux. In drmmode VDEC only can access secure.
2853 Now HW demux parsed es data to no-secure buffer. So the VDEC input was no-secure, VDEC playback failed. Forcing
2854 use nosecure for verimatrix webclient HLS. If in the future HW demux can parse es data to secure buffer, make
2855 VDEC r/w secure.*/
2856 secure = 0;
2857 //pr_debug("allow VDEC can access nosecure even in drmmode\n");
2858 }
2859 if (input->target == VDEC_INPUT_TARGET_VLD)
2860 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2861 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2862 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2863
2864 if (vdec_stream_based(vdec) &&
2865 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2866 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2867 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2868 }
2869
2870 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
2871 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
2872}
2873
2874
2875/* struct vdec_core_shread manages all decoder instance in active list. When
2876 * a vdec is added into the active list, it can onlt be in two status:
2877 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
2878 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
2879 * Removing a decoder from active list is only performed within core thread.
2880 * Adding a decoder into active list is performed from user thread.
2881 */
2882static int vdec_core_thread(void *data)
2883{
2884 struct vdec_core_s *core = (struct vdec_core_s *)data;
2885 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
2886 unsigned long flags;
2887 int i;
2888
2889 sched_setscheduler(current, SCHED_FIFO, &param);
2890
2891 allow_signal(SIGTERM);
2892
2893 while (down_interruptible(&core->sem) == 0) {
2894 struct vdec_s *vdec, *tmp, *worker;
2895 unsigned long sched_mask = 0;
2896 LIST_HEAD(disconnecting_list);
2897
2898 if (kthread_should_stop())
2899 break;
2900 mutex_lock(&vdec_mutex);
2901
2902 if (core->parallel_dec == 1) {
2903 for (i = VDEC_1; i < VDEC_MAX; i++) {
2904 core->power_ref_mask =
2905 core->power_ref_count[i] > 0 ?
2906 (core->power_ref_mask | (1 << i)) :
2907 (core->power_ref_mask & ~(1 << i));
2908 }
2909 }
2910 /* clean up previous active vdec's input */
2911 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2912 unsigned long mask = vdec->sched_mask &
2913 (vdec->active_mask ^ vdec->sched_mask);
2914
2915 vdec_route_interrupt(vdec, mask, false);
2916
2917#ifdef VDEC_DEBUG_SUPPORT
2918 update_profi_clk_stop(vdec, mask, get_current_clk());
2919#endif
2920 /*
2921 * If decoder released some core resources (mask), then
2922 * check if these core resources are associated
2923 * with any input side and do input clean up accordingly
2924 */
2925 if (vdec_core_with_input(mask)) {
2926 struct vdec_input_s *input = &vdec->input;
2927 while (!list_empty(
2928 &input->vframe_chunk_list)) {
2929 struct vframe_chunk_s *chunk =
2930 vdec_input_next_chunk(input);
2931 if (chunk && (chunk->flag &
2932 VFRAME_CHUNK_FLAG_CONSUMED))
2933 vdec_input_release_chunk(input,
2934 chunk);
2935 else
2936 break;
2937 }
2938
2939 vdec_save_input_context(vdec);
2940 }
2941
2942 vdec->sched_mask &= ~mask;
2943 core->sched_mask &= ~mask;
2944 }
2945 vdec_update_buff_status();
2946 /*
2947 *todo:
2948 * this is the case when the decoder is in active mode and
2949 * the system side wants to stop it. Currently we rely on
2950 * the decoder instance to go back to VDEC_STATUS_CONNECTED
2951 * from VDEC_STATUS_ACTIVE by its own. However, if for some
2952 * reason the decoder can not exist by itself (dead decoding
2953 * or whatever), then we may have to add another vdec API
2954 * to kill the vdec and release its HW resource and make it
2955 * become inactive again.
2956 * if ((core->active_vdec) &&
2957 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
2958 * }
2959 */
2960
2961 /* check disconnected decoders */
2962 flags = vdec_core_lock(vdec_core);
2963 list_for_each_entry_safe(vdec, tmp,
2964 &core->connected_vdec_list, list) {
2965 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
2966 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
2967 if (core->parallel_dec == 1) {
2968 if (vdec_core->active_hevc == vdec)
2969 vdec_core->active_hevc = NULL;
2970 if (vdec_core->active_vdec == vdec)
2971 vdec_core->active_vdec = NULL;
2972 }
2973 if (core->last_vdec == vdec)
2974 core->last_vdec = NULL;
2975 list_move(&vdec->list, &disconnecting_list);
2976 }
2977 }
2978 vdec_core_unlock(vdec_core, flags);
2979 mutex_unlock(&vdec_mutex);
2980 /* elect next vdec to be scheduled */
2981 vdec = core->last_vdec;
2982 if (vdec) {
2983 vdec = list_entry(vdec->list.next, struct vdec_s, list);
2984 list_for_each_entry_from(vdec,
2985 &core->connected_vdec_list, list) {
2986 sched_mask = vdec_schedule_mask(vdec,
2987 core->sched_mask);
2988 if (!sched_mask)
2989 continue;
2990 sched_mask = vdec_ready_to_run(vdec,
2991 sched_mask);
2992 if (sched_mask)
2993 break;
2994 }
2995
2996 if (&vdec->list == &core->connected_vdec_list)
2997 vdec = NULL;
2998 }
2999
3000 if (!vdec) {
3001 /* search from beginning */
3002 list_for_each_entry(vdec,
3003 &core->connected_vdec_list, list) {
3004 sched_mask = vdec_schedule_mask(vdec,
3005 core->sched_mask);
3006 if (vdec == core->last_vdec) {
3007 if (!sched_mask) {
3008 vdec = NULL;
3009 break;
3010 }
3011
3012 sched_mask = vdec_ready_to_run(vdec,
3013 sched_mask);
3014
3015 if (!sched_mask) {
3016 vdec = NULL;
3017 break;
3018 }
3019 break;
3020 }
3021
3022 if (!sched_mask)
3023 continue;
3024
3025 sched_mask = vdec_ready_to_run(vdec,
3026 sched_mask);
3027 if (sched_mask)
3028 break;
3029 }
3030
3031 if (&vdec->list == &core->connected_vdec_list)
3032 vdec = NULL;
3033 }
3034
3035 worker = vdec;
3036
3037 if (vdec) {
3038 unsigned long mask = sched_mask;
3039 unsigned long i;
3040
3041 /* setting active_mask should be atomic.
3042 * it can be modified by decoder driver callbacks.
3043 */
3044 while (sched_mask) {
3045 i = __ffs(sched_mask);
3046 set_bit(i, &vdec->active_mask);
3047 sched_mask &= ~(1 << i);
3048 }
3049
3050 /* vdec's sched_mask is only set from core thread */
3051 vdec->sched_mask |= mask;
3052 if (core->last_vdec) {
3053 if ((core->last_vdec != vdec) &&
3054 (core->last_vdec->mc_type != vdec->mc_type))
3055 vdec->mc_loaded = 0;/*clear for reload firmware*/
3056 } else
3057 vdec->mc_loaded = 0;
3058 core->last_vdec = vdec;
3059 if (debug & 2)
3060 vdec->mc_loaded = 0;/*alway reload firmware*/
3061 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3062
3063 core->sched_mask |= mask;
3064 if (core->parallel_dec == 1)
3065 vdec_save_active_hw(vdec);
3066#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3067 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3068#endif
3069 vdec_prepare_run(vdec, mask);
3070#ifdef VDEC_DEBUG_SUPPORT
3071 inc_profi_count(mask, vdec->run_count);
3072 update_profi_clk_run(vdec, mask, get_current_clk());
3073#endif
3074 vdec->run(vdec, mask, vdec_callback, core);
3075
3076
3077 /* we have some cores scheduled, keep working until
3078 * all vdecs are checked with no cores to schedule
3079 */
3080 if (core->parallel_dec == 1) {
3081 if (vdec_core->vdec_combine_flag == 0)
3082 up(&core->sem);
3083 } else
3084 up(&core->sem);
3085 }
3086
3087 /* remove disconnected decoder from active list */
3088 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3089 list_del(&vdec->list);
3090 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3091 /*core->last_vdec = NULL;*/
3092 complete(&vdec->inactive_done);
3093 }
3094
3095 /* if there is no new work scheduled and nothing
3096 * is running, sleep 20ms
3097 */
3098 if (core->parallel_dec == 1) {
3099 if (vdec_core->vdec_combine_flag == 0) {
3100 if ((!worker) &&
3101 ((core->sched_mask != core->power_ref_mask)) &&
3102 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3103 ((core->buff_flag | core->stream_buff_flag) &
3104 (core->sched_mask ^ core->power_ref_mask))) {
3105 usleep_range(1000, 2000);
3106 up(&core->sem);
3107 }
3108 } else {
3109 if ((!worker) && (!core->sched_mask) &&
3110 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3111 (core->buff_flag | core->stream_buff_flag)) {
3112 usleep_range(1000, 2000);
3113 up(&core->sem);
3114 }
3115 }
3116 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3117 usleep_range(1000, 2000);
3118 up(&core->sem);
3119 }
3120
3121 }
3122
3123 return 0;
3124}
3125
3126#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3127static bool test_hevc(u32 decomp_addr, u32 us_delay)
3128{
3129 int i;
3130
3131 /* SW_RESET IPP */
3132 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3133 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3134
3135 /* initialize all canvas table */
3136 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3137 for (i = 0; i < 32; i++)
3138 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3139 0x1 | (i << 8) | decomp_addr);
3140 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3141 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3142 for (i = 0; i < 32; i++)
3143 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3144
3145 /* Initialize mcrcc */
3146 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3147 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3148 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3149 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3150
3151 /* Decomp initialize */
3152 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3153 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3154
3155 /* Frame level initialization */
3156 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3157 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3158 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3159 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3160
3161 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3162 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3163
3164 /* Enable SWIMP mode */
3165 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3166
3167 /* Enable frame */
3168 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3169 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3170
3171 /* Send SW-command CTB info */
3172 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3173
3174 /* Send PU_command */
3175 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3176 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3177 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3178 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3179
3180 udelay(us_delay);
3181
3182 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3183
3184 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3185}
3186
3187void vdec_power_reset(void)
3188{
3189 /* enable vdec1 isolation */
3190 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3191 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3192 /* power off vdec1 memories */
3193 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3194 /* vdec1 power off */
3195 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3196 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3197
3198 if (has_vdec2()) {
3199 /* enable vdec2 isolation */
3200 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3201 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3202 /* power off vdec2 memories */
3203 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3204 /* vdec2 power off */
3205 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3206 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3207 }
3208
3209 if (has_hdec()) {
3210 /* enable hcodec isolation */
3211 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3212 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3213 /* power off hcodec memories */
3214 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3215 /* hcodec power off */
3216 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3217 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3218 }
3219
3220 if (has_hevc_vdec()) {
3221 /* enable hevc isolation */
3222 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3223 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3224 /* power off hevc memories */
3225 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3226 /* hevc power off */
3227 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3228 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3229 }
3230}
3231EXPORT_SYMBOL(vdec_power_reset);
3232
3233void vdec_poweron(enum vdec_type_e core)
3234{
3235 void *decomp_addr = NULL;
3236 dma_addr_t decomp_dma_addr;
3237 u32 decomp_addr_aligned = 0;
3238 int hevc_loop = 0;
3239 int sleep_val, iso_val;
3240 bool is_power_ctrl_ver2 = false;
3241
3242 if (core >= VDEC_MAX)
3243 return;
3244
3245 mutex_lock(&vdec_mutex);
3246
3247 vdec_core->power_ref_count[core]++;
3248 if (vdec_core->power_ref_count[core] > 1) {
3249 mutex_unlock(&vdec_mutex);
3250 return;
3251 }
3252
3253 if (vdec_on(core)) {
3254 mutex_unlock(&vdec_mutex);
3255 return;
3256 }
3257
3258 is_power_ctrl_ver2 =
3259 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3260 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3261
3262 if (hevc_workaround_needed() &&
3263 (core == VDEC_HEVC)) {
3264 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3265 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3266
3267 if (decomp_addr) {
3268 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3269 memset((u8 *)decomp_addr +
3270 (decomp_addr_aligned - decomp_dma_addr),
3271 0xff, SZ_4K);
3272 } else
3273 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3274 }
3275
3276 if (core == VDEC_1) {
3277 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3278 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3279
3280 /* vdec1 power on */
3281#ifdef CONFIG_AMLOGIC_POWER
3282 if (is_support_power_ctrl()) {
3283 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3284 mutex_unlock(&vdec_mutex);
3285 pr_err("vdec-1 power on ctrl sleep fail.\n");
3286 return;
3287 }
3288 } else {
3289 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3290 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3291 }
3292#else
3293 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3294 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3295#endif
3296 /* wait 10uS */
3297 udelay(10);
3298 /* vdec1 soft reset */
3299 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3300 WRITE_VREG(DOS_SW_RESET0, 0);
3301 /* enable vdec1 clock */
3302 /*
3303 *add power on vdec clock level setting,only for m8 chip,
3304 * m8baby and m8m2 can dynamic adjust vdec clock,
3305 * power on with default clock level
3306 */
3307 amports_switch_gate("clk_vdec_mux", 1);
3308 vdec_clock_hi_enable();
3309 /* power up vdec memories */
3310 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3311
3312 /* remove vdec1 isolation */
3313#ifdef CONFIG_AMLOGIC_POWER
3314 if (is_support_power_ctrl()) {
3315 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3316 mutex_unlock(&vdec_mutex);
3317 pr_err("vdec-1 power on ctrl iso fail.\n");
3318 return;
3319 }
3320 } else {
3321 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3322 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3323 }
3324#else
3325 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3326 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3327#endif
3328 /* reset DOS top registers */
3329 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3330 } else if (core == VDEC_2) {
3331 if (has_vdec2()) {
3332 /* vdec2 power on */
3333 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3334 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3335 ~0x30);
3336 /* wait 10uS */
3337 udelay(10);
3338 /* vdec2 soft reset */
3339 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3340 WRITE_VREG(DOS_SW_RESET2, 0);
3341 /* enable vdec1 clock */
3342 vdec2_clock_hi_enable();
3343 /* power up vdec memories */
3344 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3345 /* remove vdec2 isolation */
3346 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3347 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3348 ~0x300);
3349 /* reset DOS top registers */
3350 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3351 }
3352 } else if (core == VDEC_HCODEC) {
3353 if (has_hdec()) {
3354 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3355 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3356
3357 /* hcodec power on */
3358#ifdef CONFIG_AMLOGIC_POWER
3359 if (is_support_power_ctrl()) {
3360 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3361 mutex_unlock(&vdec_mutex);
3362 pr_err("hcodec power on ctrl sleep fail.\n");
3363 return;
3364 }
3365 } else {
3366 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3367 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3368 }
3369#else
3370 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3371 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3372#endif
3373 /* wait 10uS */
3374 udelay(10);
3375 /* hcodec soft reset */
3376 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3377 WRITE_VREG(DOS_SW_RESET1, 0);
3378 /* enable hcodec clock */
3379 hcodec_clock_enable();
3380 /* power up hcodec memories */
3381 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3382 /* remove hcodec isolation */
3383#ifdef CONFIG_AMLOGIC_POWER
3384 if (is_support_power_ctrl()) {
3385 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3386 mutex_unlock(&vdec_mutex);
3387 pr_err("hcodec power on ctrl iso fail.\n");
3388 return;
3389 }
3390 } else {
3391 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3392 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3393 }
3394#else
3395 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3396 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3397#endif
3398 }
3399 } else if (core == VDEC_HEVC) {
3400 if (has_hevc_vdec()) {
3401 bool hevc_fixed = false;
3402
3403 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3404 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3405
3406 while (!hevc_fixed) {
3407 /* hevc power on */
3408#ifdef CONFIG_AMLOGIC_POWER
3409 if (is_support_power_ctrl()) {
3410 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3411 mutex_unlock(&vdec_mutex);
3412 pr_err("hevc power on ctrl sleep fail.\n");
3413 return;
3414 }
3415 } else {
3416 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3417 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3418 }
3419#else
3420 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3421 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3422#endif
3423 /* wait 10uS */
3424 udelay(10);
3425 /* hevc soft reset */
3426 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3427 WRITE_VREG(DOS_SW_RESET3, 0);
3428 /* enable hevc clock */
3429 amports_switch_gate("clk_hevc_mux", 1);
3430 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3431 amports_switch_gate("clk_hevcb_mux", 1);
3432 hevc_clock_hi_enable();
3433 hevc_back_clock_hi_enable();
3434 /* power up hevc memories */
3435 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3436 /* remove hevc isolation */
3437#ifdef CONFIG_AMLOGIC_POWER
3438 if (is_support_power_ctrl()) {
3439 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3440 mutex_unlock(&vdec_mutex);
3441 pr_err("hevc power on ctrl iso fail.\n");
3442 return;
3443 }
3444 } else {
3445 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3446 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3447 }
3448#else
3449 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3450 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3451#endif
3452 if (!hevc_workaround_needed())
3453 break;
3454
3455 if (decomp_addr)
3456 hevc_fixed = test_hevc(
3457 decomp_addr_aligned, 20);
3458
3459 if (!hevc_fixed) {
3460 hevc_loop++;
3461
3462 mutex_unlock(&vdec_mutex);
3463
3464 if (hevc_loop >= HEVC_TEST_LIMIT) {
3465 pr_warn("hevc power sequence over limit\n");
3466 pr_warn("=====================================================\n");
3467 pr_warn(" This chip is identified to have HW failure.\n");
3468 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3469 pr_warn("=====================================================\n");
3470
3471 panic("Force panic for chip detection !!!\n");
3472
3473 break;
3474 }
3475
3476 vdec_poweroff(VDEC_HEVC);
3477
3478 mdelay(10);
3479
3480 mutex_lock(&vdec_mutex);
3481 }
3482 }
3483
3484 if (hevc_loop > hevc_max_reset_count)
3485 hevc_max_reset_count = hevc_loop;
3486
3487 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3488 udelay(10);
3489 WRITE_VREG(DOS_SW_RESET3, 0);
3490 }
3491 }
3492
3493 if (decomp_addr)
3494 codec_mm_dma_free_coherent(MEM_NAME,
3495 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3496
3497 mutex_unlock(&vdec_mutex);
3498}
3499EXPORT_SYMBOL(vdec_poweron);
3500
3501void vdec_poweroff(enum vdec_type_e core)
3502{
3503 int sleep_val, iso_val;
3504 bool is_power_ctrl_ver2 = false;
3505
3506 if (core >= VDEC_MAX)
3507 return;
3508
3509 mutex_lock(&vdec_mutex);
3510
3511 vdec_core->power_ref_count[core]--;
3512 if (vdec_core->power_ref_count[core] > 0) {
3513 mutex_unlock(&vdec_mutex);
3514 return;
3515 }
3516
3517 is_power_ctrl_ver2 =
3518 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3519 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3520
3521 if (core == VDEC_1) {
3522 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3523 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3524
3525 /* enable vdec1 isolation */
3526#ifdef CONFIG_AMLOGIC_POWER
3527 if (is_support_power_ctrl()) {
3528 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3529 mutex_unlock(&vdec_mutex);
3530 pr_err("vdec-1 power off ctrl iso fail.\n");
3531 return;
3532 }
3533 } else {
3534 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3535 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3536 }
3537#else
3538 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3539 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3540#endif
3541 /* power off vdec1 memories */
3542 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3543 /* disable vdec1 clock */
3544 vdec_clock_off();
3545 /* vdec1 power off */
3546#ifdef CONFIG_AMLOGIC_POWER
3547 if (is_support_power_ctrl()) {
3548 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3549 mutex_unlock(&vdec_mutex);
3550 pr_err("vdec-1 power off ctrl sleep fail.\n");
3551 return;
3552 }
3553 } else {
3554 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3555 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3556 }
3557#else
3558 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3559 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3560#endif
3561 } else if (core == VDEC_2) {
3562 if (has_vdec2()) {
3563 /* enable vdec2 isolation */
3564 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3565 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3566 0x300);
3567 /* power off vdec2 memories */
3568 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3569 /* disable vdec2 clock */
3570 vdec2_clock_off();
3571 /* vdec2 power off */
3572 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3573 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3574 0x30);
3575 }
3576 } else if (core == VDEC_HCODEC) {
3577 if (has_hdec()) {
3578 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3579 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3580
3581 /* enable hcodec isolation */
3582#ifdef CONFIG_AMLOGIC_POWER
3583 if (is_support_power_ctrl()) {
3584 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3585 mutex_unlock(&vdec_mutex);
3586 pr_err("hcodec power off ctrl iso fail.\n");
3587 return;
3588 }
3589 } else {
3590 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3591 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3592 }
3593#else
3594 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3595 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3596#endif
3597 /* power off hcodec memories */
3598 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3599 /* disable hcodec clock */
3600 hcodec_clock_off();
3601 /* hcodec power off */
3602#ifdef CONFIG_AMLOGIC_POWER
3603 if (is_support_power_ctrl()) {
3604 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3605 mutex_unlock(&vdec_mutex);
3606 pr_err("hcodec power off ctrl sleep fail.\n");
3607 return;
3608 }
3609 } else {
3610 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3611 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3612 }
3613#else
3614 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3615 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3616#endif
3617 }
3618 } else if (core == VDEC_HEVC) {
3619 if (has_hevc_vdec()) {
3620 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3621 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3622
3623 if (no_powerdown == 0) {
3624 /* enable hevc isolation */
3625#ifdef CONFIG_AMLOGIC_POWER
3626 if (is_support_power_ctrl()) {
3627 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3628 mutex_unlock(&vdec_mutex);
3629 pr_err("hevc power off ctrl iso fail.\n");
3630 return;
3631 }
3632 } else {
3633 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3634 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3635 }
3636#else
3637 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3638 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3639#endif
3640 /* power off hevc memories */
3641 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3642
3643 /* disable hevc clock */
3644 hevc_clock_off();
3645 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3646 hevc_back_clock_off();
3647
3648 /* hevc power off */
3649#ifdef CONFIG_AMLOGIC_POWER
3650 if (is_support_power_ctrl()) {
3651 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3652 mutex_unlock(&vdec_mutex);
3653 pr_err("hevc power off ctrl sleep fail.\n");
3654 return;
3655 }
3656 } else {
3657 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3658 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3659 }
3660#else
3661 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3662 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3663#endif
3664 } else {
3665 pr_info("!!!!!!!!not power down\n");
3666 hevc_reset_core(NULL);
3667 no_powerdown = 0;
3668 }
3669 }
3670 }
3671 mutex_unlock(&vdec_mutex);
3672}
3673EXPORT_SYMBOL(vdec_poweroff);
3674
3675bool vdec_on(enum vdec_type_e core)
3676{
3677 bool ret = false;
3678
3679 if (core == VDEC_1) {
3680 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3681 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3682 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3683 ? 0x2 : 0xc)) == 0) &&
3684 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3685 ret = true;
3686 } else if (core == VDEC_2) {
3687 if (has_vdec2()) {
3688 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3689 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3690 ret = true;
3691 }
3692 } else if (core == VDEC_HCODEC) {
3693 if (has_hdec()) {
3694 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3695 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3696 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3697 ? 0x1 : 0x3)) == 0) &&
3698 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3699 ret = true;
3700 }
3701 } else if (core == VDEC_HEVC) {
3702 if (has_hevc_vdec()) {
3703 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3704 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3705 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3706 ? 0x4 : 0xc0)) == 0) &&
3707 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3708 ret = true;
3709 }
3710 }
3711
3712 return ret;
3713}
3714EXPORT_SYMBOL(vdec_on);
3715
3716#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3717void vdec_poweron(enum vdec_type_e core)
3718{
3719 ulong flags;
3720
3721 spin_lock_irqsave(&lock, flags);
3722
3723 if (core == VDEC_1) {
3724 /* vdec1 soft reset */
3725 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3726 WRITE_VREG(DOS_SW_RESET0, 0);
3727 /* enable vdec1 clock */
3728 vdec_clock_enable();
3729 /* reset DOS top registers */
3730 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3731 } else if (core == VDEC_2) {
3732 /* vdec2 soft reset */
3733 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3734 WRITE_VREG(DOS_SW_RESET2, 0);
3735 /* enable vdec2 clock */
3736 vdec2_clock_enable();
3737 /* reset DOS top registers */
3738 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3739 } else if (core == VDEC_HCODEC) {
3740 /* hcodec soft reset */
3741 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3742 WRITE_VREG(DOS_SW_RESET1, 0);
3743 /* enable hcodec clock */
3744 hcodec_clock_enable();
3745 }
3746
3747 spin_unlock_irqrestore(&lock, flags);
3748}
3749
3750void vdec_poweroff(enum vdec_type_e core)
3751{
3752 ulong flags;
3753
3754 spin_lock_irqsave(&lock, flags);
3755
3756 if (core == VDEC_1) {
3757 /* disable vdec1 clock */
3758 vdec_clock_off();
3759 } else if (core == VDEC_2) {
3760 /* disable vdec2 clock */
3761 vdec2_clock_off();
3762 } else if (core == VDEC_HCODEC) {
3763 /* disable hcodec clock */
3764 hcodec_clock_off();
3765 }
3766
3767 spin_unlock_irqrestore(&lock, flags);
3768}
3769
3770bool vdec_on(enum vdec_type_e core)
3771{
3772 bool ret = false;
3773
3774 if (core == VDEC_1) {
3775 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3776 ret = true;
3777 } else if (core == VDEC_2) {
3778 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3779 ret = true;
3780 } else if (core == VDEC_HCODEC) {
3781 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3782 ret = true;
3783 }
3784
3785 return ret;
3786}
3787#endif
3788
3789int vdec_source_changed(int format, int width, int height, int fps)
3790{
3791 /* todo: add level routines for clock adjustment per chips */
3792 int ret = -1;
3793 static int on_setting;
3794
3795 if (on_setting > 0)
3796 return ret;/*on changing clk,ignore this change*/
3797
3798 if (vdec_source_get(VDEC_1) == width * height * fps)
3799 return ret;
3800
3801
3802 on_setting = 1;
3803 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3804 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3805 width, height, fps, vdec_clk_get(VDEC_1));
3806 on_setting = 0;
3807 return ret;
3808
3809}
3810EXPORT_SYMBOL(vdec_source_changed);
3811
3812void vdec_reset_core(struct vdec_s *vdec)
3813{
3814 unsigned long flags;
3815 unsigned int mask = 0;
3816
3817 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3818 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3819 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3820
3821 spin_lock_irqsave(&vdec_spin_lock, flags);
3822 codec_dmcbus_write(DMC_REQ_CTRL,
3823 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3824 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3825
3826 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3827 & mask))
3828 ;
3829 /*
3830 * 2: assist
3831 * 3: vld_reset
3832 * 4: vld_part_reset
3833 * 5: vfifo reset
3834 * 6: iqidct
3835 * 7: mc
3836 * 8: dblk
3837 * 9: pic_dc
3838 * 10: psc
3839 * 11: mcpu
3840 * 12: ccpu
3841 * 13: ddr
3842 * 14: afifo
3843 */
3844 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3845 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3846 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<6)|(1<<7)|(1<<8)|(1<<9));
3847 } else {
3848 WRITE_VREG(DOS_SW_RESET0,
3849 (1<<3)|(1<<4)|(1<<5));
3850 }
3851 WRITE_VREG(DOS_SW_RESET0, 0);
3852
3853 spin_lock_irqsave(&vdec_spin_lock, flags);
3854 codec_dmcbus_write(DMC_REQ_CTRL,
3855 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3856 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3857}
3858EXPORT_SYMBOL(vdec_reset_core);
3859
3860void hevc_mmu_dma_check(struct vdec_s *vdec)
3861{
3862 ulong timeout;
3863 u32 data;
3864 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
3865 return;
3866 timeout = jiffies + HZ/100;
3867 while (1) {
3868 data = READ_VREG(HEVC_CM_CORE_STATUS);
3869 if ((data & 0x1) == 0)
3870 break;
3871 if (time_after(jiffies, timeout)) {
3872 if (debug & 0x10)
3873 pr_info(" %s sao mmu dma idle\n", __func__);
3874 break;
3875 }
3876 }
3877 /*disable sao mmu dma */
3878 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
3879 timeout = jiffies + HZ/100;
3880 while (1) {
3881 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
3882 if ((data & 0x1))
3883 break;
3884 if (time_after(jiffies, timeout)) {
3885 if (debug & 0x10)
3886 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
3887 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
3888 break;
3889 }
3890 }
3891}
3892EXPORT_SYMBOL(hevc_mmu_dma_check);
3893
3894void hevc_reset_core(struct vdec_s *vdec)
3895{
3896 unsigned long flags;
3897 unsigned int mask = 0;
3898
3899 mask = 1 << 4; /*bit4: hevc*/
3900 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3901 mask |= 1 << 8; /*bit8: hevcb*/
3902
3903 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3904 spin_lock_irqsave(&vdec_spin_lock, flags);
3905 codec_dmcbus_write(DMC_REQ_CTRL,
3906 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3907 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3908
3909 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3910 & mask))
3911 ;
3912
3913 if (vdec == NULL || input_frame_based(vdec))
3914 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3915
3916 /*
3917 * 2: assist
3918 * 3: parser
3919 * 4: parser_state
3920 * 8: dblk
3921 * 11:mcpu
3922 * 12:ccpu
3923 * 13:ddr
3924 * 14:iqit
3925 * 15:ipp
3926 * 17:qdct
3927 * 18:mpred
3928 * 19:sao
3929 * 24:hevc_afifo
3930 */
3931 WRITE_VREG(DOS_SW_RESET3,
3932 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
3933 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
3934 (1<<17)|(1<<18)|(1<<19)|(1<<24));
3935
3936 WRITE_VREG(DOS_SW_RESET3, 0);
3937
3938
3939 spin_lock_irqsave(&vdec_spin_lock, flags);
3940 codec_dmcbus_write(DMC_REQ_CTRL,
3941 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3942 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3943
3944}
3945EXPORT_SYMBOL(hevc_reset_core);
3946
3947int vdec2_source_changed(int format, int width, int height, int fps)
3948{
3949 int ret = -1;
3950 static int on_setting;
3951
3952 if (has_vdec2()) {
3953 /* todo: add level routines for clock adjustment per chips */
3954 if (on_setting != 0)
3955 return ret;/*on changing clk,ignore this change*/
3956
3957 if (vdec_source_get(VDEC_2) == width * height * fps)
3958 return ret;
3959
3960 on_setting = 1;
3961 ret = vdec_source_changed_for_clk_set(format,
3962 width, height, fps);
3963 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
3964 width, height, fps, vdec_clk_get(VDEC_2));
3965 on_setting = 0;
3966 return ret;
3967 }
3968 return 0;
3969}
3970EXPORT_SYMBOL(vdec2_source_changed);
3971
3972int hevc_source_changed(int format, int width, int height, int fps)
3973{
3974 /* todo: add level routines for clock adjustment per chips */
3975 int ret = -1;
3976 static int on_setting;
3977
3978 if (on_setting != 0)
3979 return ret;/*on changing clk,ignore this change*/
3980
3981 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
3982 return ret;
3983
3984 on_setting = 1;
3985 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3986 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
3987 width, height, fps, vdec_clk_get(VDEC_HEVC));
3988 on_setting = 0;
3989
3990 return ret;
3991}
3992EXPORT_SYMBOL(hevc_source_changed);
3993
3994static struct am_reg am_risc[] = {
3995 {"MSP", 0x300},
3996 {"MPSR", 0x301},
3997 {"MCPU_INT_BASE", 0x302},
3998 {"MCPU_INTR_GRP", 0x303},
3999 {"MCPU_INTR_MSK", 0x304},
4000 {"MCPU_INTR_REQ", 0x305},
4001 {"MPC-P", 0x306},
4002 {"MPC-D", 0x307},
4003 {"MPC_E", 0x308},
4004 {"MPC_W", 0x309},
4005 {"CSP", 0x320},
4006 {"CPSR", 0x321},
4007 {"CCPU_INT_BASE", 0x322},
4008 {"CCPU_INTR_GRP", 0x323},
4009 {"CCPU_INTR_MSK", 0x324},
4010 {"CCPU_INTR_REQ", 0x325},
4011 {"CPC-P", 0x326},
4012 {"CPC-D", 0x327},
4013 {"CPC_E", 0x328},
4014 {"CPC_W", 0x329},
4015 {"AV_SCRATCH_0", 0x09c0},
4016 {"AV_SCRATCH_1", 0x09c1},
4017 {"AV_SCRATCH_2", 0x09c2},
4018 {"AV_SCRATCH_3", 0x09c3},
4019 {"AV_SCRATCH_4", 0x09c4},
4020 {"AV_SCRATCH_5", 0x09c5},
4021 {"AV_SCRATCH_6", 0x09c6},
4022 {"AV_SCRATCH_7", 0x09c7},
4023 {"AV_SCRATCH_8", 0x09c8},
4024 {"AV_SCRATCH_9", 0x09c9},
4025 {"AV_SCRATCH_A", 0x09ca},
4026 {"AV_SCRATCH_B", 0x09cb},
4027 {"AV_SCRATCH_C", 0x09cc},
4028 {"AV_SCRATCH_D", 0x09cd},
4029 {"AV_SCRATCH_E", 0x09ce},
4030 {"AV_SCRATCH_F", 0x09cf},
4031 {"AV_SCRATCH_G", 0x09d0},
4032 {"AV_SCRATCH_H", 0x09d1},
4033 {"AV_SCRATCH_I", 0x09d2},
4034 {"AV_SCRATCH_J", 0x09d3},
4035 {"AV_SCRATCH_K", 0x09d4},
4036 {"AV_SCRATCH_L", 0x09d5},
4037 {"AV_SCRATCH_M", 0x09d6},
4038 {"AV_SCRATCH_N", 0x09d7},
4039};
4040
4041static ssize_t amrisc_regs_show(struct class *class,
4042 struct class_attribute *attr, char *buf)
4043{
4044 char *pbuf = buf;
4045 struct am_reg *regs = am_risc;
4046 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
4047 int i;
4048 unsigned int val;
4049 ssize_t ret;
4050
4051 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4052 mutex_lock(&vdec_mutex);
4053 if (!vdec_on(VDEC_1)) {
4054 mutex_unlock(&vdec_mutex);
4055 pbuf += sprintf(pbuf, "amrisc is power off\n");
4056 ret = pbuf - buf;
4057 return ret;
4058 }
4059 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4060 /*TODO:M6 define */
4061 /*
4062 * switch_mod_gate_by_type(MOD_VDEC, 1);
4063 */
4064 amports_switch_gate("vdec", 1);
4065 }
4066 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4067 for (i = 0; i < rsize; i++) {
4068 val = READ_VREG(regs[i].offset);
4069 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4070 regs[i].name, regs[i].offset, val, val);
4071 }
4072 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4073 mutex_unlock(&vdec_mutex);
4074 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4075 /*TODO:M6 define */
4076 /*
4077 * switch_mod_gate_by_type(MOD_VDEC, 0);
4078 */
4079 amports_switch_gate("vdec", 0);
4080 }
4081 ret = pbuf - buf;
4082 return ret;
4083}
4084
4085static ssize_t dump_trace_show(struct class *class,
4086 struct class_attribute *attr, char *buf)
4087{
4088 int i;
4089 char *pbuf = buf;
4090 ssize_t ret;
4091 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4092
4093 if (!trace_buf) {
4094 pbuf += sprintf(pbuf, "No Memory bug\n");
4095 ret = pbuf - buf;
4096 return ret;
4097 }
4098 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4099 mutex_lock(&vdec_mutex);
4100 if (!vdec_on(VDEC_1)) {
4101 mutex_unlock(&vdec_mutex);
4102 kfree(trace_buf);
4103 pbuf += sprintf(pbuf, "amrisc is power off\n");
4104 ret = pbuf - buf;
4105 return ret;
4106 }
4107 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4108 /*TODO:M6 define */
4109 /*
4110 * switch_mod_gate_by_type(MOD_VDEC, 1);
4111 */
4112 amports_switch_gate("vdec", 1);
4113 }
4114 pr_info("dump trace steps:%d start\n", debug_trace_num);
4115 i = 0;
4116 while (i <= debug_trace_num - 16) {
4117 trace_buf[i] = READ_VREG(MPC_E);
4118 trace_buf[i + 1] = READ_VREG(MPC_E);
4119 trace_buf[i + 2] = READ_VREG(MPC_E);
4120 trace_buf[i + 3] = READ_VREG(MPC_E);
4121 trace_buf[i + 4] = READ_VREG(MPC_E);
4122 trace_buf[i + 5] = READ_VREG(MPC_E);
4123 trace_buf[i + 6] = READ_VREG(MPC_E);
4124 trace_buf[i + 7] = READ_VREG(MPC_E);
4125 trace_buf[i + 8] = READ_VREG(MPC_E);
4126 trace_buf[i + 9] = READ_VREG(MPC_E);
4127 trace_buf[i + 10] = READ_VREG(MPC_E);
4128 trace_buf[i + 11] = READ_VREG(MPC_E);
4129 trace_buf[i + 12] = READ_VREG(MPC_E);
4130 trace_buf[i + 13] = READ_VREG(MPC_E);
4131 trace_buf[i + 14] = READ_VREG(MPC_E);
4132 trace_buf[i + 15] = READ_VREG(MPC_E);
4133 i += 16;
4134 };
4135 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4136 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4137 mutex_unlock(&vdec_mutex);
4138 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4139 /*TODO:M6 define */
4140 /*
4141 * switch_mod_gate_by_type(MOD_VDEC, 0);
4142 */
4143 amports_switch_gate("vdec", 0);
4144 }
4145 for (i = 0; i < debug_trace_num; i++) {
4146 if (i % 4 == 0) {
4147 if (i % 16 == 0)
4148 pbuf += sprintf(pbuf, "\n");
4149 else if (i % 8 == 0)
4150 pbuf += sprintf(pbuf, " ");
4151 else /* 4 */
4152 pbuf += sprintf(pbuf, " ");
4153 }
4154 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4155 }
4156 while (i < debug_trace_num)
4157 ;
4158 kfree(trace_buf);
4159 pbuf += sprintf(pbuf, "\n");
4160 ret = pbuf - buf;
4161 return ret;
4162}
4163
4164static ssize_t clock_level_show(struct class *class,
4165 struct class_attribute *attr, char *buf)
4166{
4167 char *pbuf = buf;
4168 size_t ret;
4169
4170 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4171
4172 if (has_vdec2())
4173 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4174
4175 if (has_hevc_vdec())
4176 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4177
4178 ret = pbuf - buf;
4179 return ret;
4180}
4181
4182static ssize_t store_poweron_clock_level(struct class *class,
4183 struct class_attribute *attr,
4184 const char *buf, size_t size)
4185{
4186 unsigned int val;
4187 ssize_t ret;
4188
4189 /*ret = sscanf(buf, "%d", &val);*/
4190 ret = kstrtoint(buf, 0, &val);
4191
4192 if (ret != 0)
4193 return -EINVAL;
4194 poweron_clock_level = val;
4195 return size;
4196}
4197
4198static ssize_t show_poweron_clock_level(struct class *class,
4199 struct class_attribute *attr, char *buf)
4200{
4201 return sprintf(buf, "%d\n", poweron_clock_level);
4202}
4203
4204/*
4205 *if keep_vdec_mem == 1
4206 *always don't release
4207 *vdec 64 memory for fast play.
4208 */
4209static ssize_t store_keep_vdec_mem(struct class *class,
4210 struct class_attribute *attr,
4211 const char *buf, size_t size)
4212{
4213 unsigned int val;
4214 ssize_t ret;
4215
4216 /*ret = sscanf(buf, "%d", &val);*/
4217 ret = kstrtoint(buf, 0, &val);
4218 if (ret != 0)
4219 return -EINVAL;
4220 keep_vdec_mem = val;
4221 return size;
4222}
4223
4224static ssize_t show_keep_vdec_mem(struct class *class,
4225 struct class_attribute *attr, char *buf)
4226{
4227 return sprintf(buf, "%d\n", keep_vdec_mem);
4228}
4229
4230#ifdef VDEC_DEBUG_SUPPORT
4231static ssize_t store_debug(struct class *class,
4232 struct class_attribute *attr,
4233 const char *buf, size_t size)
4234{
4235 struct vdec_s *vdec;
4236 struct vdec_core_s *core = vdec_core;
4237 unsigned long flags;
4238
4239 unsigned id;
4240 unsigned val;
4241 ssize_t ret;
4242 char cbuf[32];
4243
4244 cbuf[0] = 0;
4245 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4246 /*pr_info(
4247 "%s(%s)=>ret %ld: %s, %x, %x\n",
4248 __func__, buf, ret, cbuf, id, val);*/
4249 if (strcmp(cbuf, "schedule") == 0) {
4250 pr_info("VDEC_DEBUG: force schedule\n");
4251 up(&core->sem);
4252 } else if (strcmp(cbuf, "power_off") == 0) {
4253 pr_info("VDEC_DEBUG: power off core %d\n", id);
4254 vdec_poweroff(id);
4255 } else if (strcmp(cbuf, "power_on") == 0) {
4256 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4257 vdec_poweron(id);
4258 } else if (strcmp(cbuf, "wr") == 0) {
4259 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4260 id, val);
4261 WRITE_VREG(id, val);
4262 } else if (strcmp(cbuf, "rd") == 0) {
4263 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4264 id, READ_VREG(id));
4265 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4266 pr_info(
4267 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4268 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4269 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4270 }
4271
4272 flags = vdec_core_lock(vdec_core);
4273
4274 list_for_each_entry(vdec,
4275 &core->connected_vdec_list, list) {
4276 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4277 if (((vdec->status == VDEC_STATUS_CONNECTED
4278 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4279 (vdec->id == id)) {
4280 /*to add*/
4281 break;
4282 }
4283 }
4284 vdec_core_unlock(vdec_core, flags);
4285 return size;
4286}
4287
4288static ssize_t show_debug(struct class *class,
4289 struct class_attribute *attr, char *buf)
4290{
4291 char *pbuf = buf;
4292 struct vdec_s *vdec;
4293 struct vdec_core_s *core = vdec_core;
4294 unsigned long flags = vdec_core_lock(vdec_core);
4295 u64 tmp;
4296
4297 pbuf += sprintf(pbuf,
4298 "============== help:\n");
4299 pbuf += sprintf(pbuf,
4300 "'echo xxx > debug' usuage:\n");
4301 pbuf += sprintf(pbuf,
4302 "schedule - trigger schedule thread to run\n");
4303 pbuf += sprintf(pbuf,
4304 "power_off core_num - call vdec_poweroff(core_num)\n");
4305 pbuf += sprintf(pbuf,
4306 "power_on core_num - call vdec_poweron(core_num)\n");
4307 pbuf += sprintf(pbuf,
4308 "wr adr val - call WRITE_VREG(adr, val)\n");
4309 pbuf += sprintf(pbuf,
4310 "rd adr - call READ_VREG(adr)\n");
4311 pbuf += sprintf(pbuf,
4312 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4313 pbuf += sprintf(pbuf,
4314 "===================\n");
4315
4316 pbuf += sprintf(pbuf,
4317 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4318 list_for_each_entry(vdec,
4319 &core->connected_vdec_list, list) {
4320 enum vdec_type_e type;
4321 if ((vdec->status == VDEC_STATUS_CONNECTED
4322 || vdec->status == VDEC_STATUS_ACTIVE)) {
4323 for (type = VDEC_1; type < VDEC_MAX; type++) {
4324 if (vdec->core_mask & (1 << type)) {
4325 pbuf += sprintf(pbuf, "%s(%d):",
4326 vdec->vf_provider_name, type);
4327 pbuf += sprintf(pbuf, "\t%d",
4328 vdec->check_count[type]);
4329 pbuf += sprintf(pbuf, "\t%d",
4330 vdec->run_count[type]);
4331 pbuf += sprintf(pbuf, "\t%d",
4332 vdec->input_underrun_count[type]);
4333 pbuf += sprintf(pbuf, "\t%d",
4334 vdec->not_run_ready_count[type]);
4335 tmp = vdec->run_clk[type] * 100;
4336 do_div(tmp, vdec->total_clk[type]);
4337 pbuf += sprintf(pbuf,
4338 "\t%d%%\n",
4339 vdec->total_clk[type] == 0 ? 0 :
4340 (u32)tmp);
4341 }
4342 }
4343 }
4344 }
4345
4346 vdec_core_unlock(vdec_core, flags);
4347 return pbuf - buf;
4348
4349}
4350#endif
4351
4352/*irq num as same as .dts*/
4353/*
4354 * interrupts = <0 3 1
4355 * 0 23 1
4356 * 0 32 1
4357 * 0 43 1
4358 * 0 44 1
4359 * 0 45 1>;
4360 * interrupt-names = "vsync",
4361 * "demux",
4362 * "parser",
4363 * "mailbox_0",
4364 * "mailbox_1",
4365 * "mailbox_2";
4366 */
4367s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4368 irq_handler_t handler,
4369 irq_handler_t thread_fn,
4370 unsigned long irqflags,
4371 const char *devname, void *dev)
4372{
4373 s32 res_irq;
4374 s32 ret = 0;
4375
4376 if (num >= VDEC_IRQ_MAX) {
4377 pr_err("[%s] request irq error, irq num too big!", __func__);
4378 return -EINVAL;
4379 }
4380
4381 if (vdec_core->isr_context[num].irq < 0) {
4382 res_irq = platform_get_irq(
4383 vdec_core->vdec_core_platform_device, num);
4384 if (res_irq < 0) {
4385 pr_err("[%s] get irq error!", __func__);
4386 return -EINVAL;
4387 }
4388
4389 vdec_core->isr_context[num].irq = res_irq;
4390 vdec_core->isr_context[num].dev_isr = handler;
4391 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4392 vdec_core->isr_context[num].dev_id = dev;
4393
4394 ret = request_threaded_irq(res_irq,
4395 vdec_isr,
4396 vdec_thread_isr,
4397 (thread_fn) ? IRQF_ONESHOT : irqflags,
4398 devname,
4399 &vdec_core->isr_context[num]);
4400
4401 if (ret) {
4402 vdec_core->isr_context[num].irq = -1;
4403 vdec_core->isr_context[num].dev_isr = NULL;
4404 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4405 vdec_core->isr_context[num].dev_id = NULL;
4406
4407 pr_err("vdec irq register error for %s.\n", devname);
4408 return -EIO;
4409 }
4410 } else {
4411 vdec_core->isr_context[num].dev_isr = handler;
4412 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4413 vdec_core->isr_context[num].dev_id = dev;
4414 }
4415
4416 return ret;
4417}
4418EXPORT_SYMBOL(vdec_request_threaded_irq);
4419
4420s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4421 const char *devname, void *dev)
4422{
4423 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4424
4425 return vdec_request_threaded_irq(num,
4426 handler,
4427 NULL,/*no thread_fn*/
4428 IRQF_SHARED,
4429 devname,
4430 dev);
4431}
4432EXPORT_SYMBOL(vdec_request_irq);
4433
4434void vdec_free_irq(enum vdec_irq_num num, void *dev)
4435{
4436 if (num >= VDEC_IRQ_MAX) {
4437 pr_err("[%s] request irq error, irq num too big!", __func__);
4438 return;
4439 }
4440 /*
4441 *assume amrisc is stopped already and there is no mailbox interrupt
4442 * when we reset pointers here.
4443 */
4444 vdec_core->isr_context[num].dev_isr = NULL;
4445 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4446 vdec_core->isr_context[num].dev_id = NULL;
4447 synchronize_irq(vdec_core->isr_context[num].irq);
4448}
4449EXPORT_SYMBOL(vdec_free_irq);
4450
4451struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4452{
4453 struct vdec_s *vdec;
4454 struct vdec_s *ret_vdec;
4455 struct vdec_core_s *core = vdec_core;
4456 unsigned long flags;
4457 int id;
4458
4459 flags = vdec_core_lock(vdec_core);
4460
4461 id = 0x10000000;
4462 ret_vdec = NULL;
4463 if (!list_empty(&core->connected_vdec_list)) {
4464 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4465 if (vdec->id < id) {
4466 id = vdec->id;
4467 ret_vdec = vdec;
4468 }
4469 }
4470 }
4471
4472 vdec_core_unlock(vdec_core, flags);
4473
4474 return ret_vdec;
4475}
4476EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4477
4478struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4479{
4480 struct vdec_s *vdec;
4481 struct vdec_s *ret_vdec;
4482 struct vdec_core_s *core = vdec_core;
4483 unsigned long flags;
4484
4485 flags = vdec_core_lock(vdec_core);
4486
4487 ret_vdec = NULL;
4488 if (!list_empty(&core->connected_vdec_list)) {
4489 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4490 if (vdec->id == vdec_id) {
4491 ret_vdec = vdec;
4492 break;
4493 }
4494 }
4495 }
4496
4497 vdec_core_unlock(vdec_core, flags);
4498
4499 return ret_vdec;
4500}
4501EXPORT_SYMBOL(vdec_get_vdec_by_id);
4502
4503int vdec_read_user_data(struct vdec_s *vdec,
4504 struct userdata_param_t *p_userdata_param)
4505{
4506 int ret = 0;
4507
4508 if (!vdec)
4509 vdec = vdec_get_default_vdec_for_userdata();
4510
4511 if (vdec) {
4512 if (vdec->user_data_read)
4513 ret = vdec->user_data_read(vdec, p_userdata_param);
4514 }
4515 return ret;
4516}
4517EXPORT_SYMBOL(vdec_read_user_data);
4518
4519int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4520{
4521 if (vdec) {
4522 if (vdec->wakeup_userdata_poll)
4523 vdec->wakeup_userdata_poll(vdec);
4524 }
4525
4526 return 0;
4527}
4528EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4529
4530void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4531{
4532 if (!vdec)
4533 vdec = vdec_get_default_vdec_for_userdata();
4534
4535 if (vdec) {
4536 if (vdec->reset_userdata_fifo)
4537 vdec->reset_userdata_fifo(vdec, bInit);
4538 }
4539}
4540EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4541
4542static int dump_mode;
4543static ssize_t dump_risc_mem_store(struct class *class,
4544 struct class_attribute *attr,
4545 const char *buf, size_t size)/*set*/
4546{
4547 unsigned int val;
4548 ssize_t ret;
4549 char dump_mode_str[4] = "PRL";
4550
4551 /*ret = sscanf(buf, "%d", &val);*/
4552 ret = kstrtoint(buf, 0, &val);
4553
4554 if (ret != 0)
4555 return -EINVAL;
4556 dump_mode = val & 0x3;
4557 pr_info("set dump mode to %d,%c_mem\n",
4558 dump_mode, dump_mode_str[dump_mode]);
4559 return size;
4560}
4561static u32 read_amrisc_reg(int reg)
4562{
4563 WRITE_VREG(0x31b, reg);
4564 return READ_VREG(0x31c);
4565}
4566
4567static void dump_pmem(void)
4568{
4569 int i;
4570
4571 WRITE_VREG(0x301, 0x8000);
4572 WRITE_VREG(0x31d, 0);
4573 pr_info("start dump amrisc pmem of risc\n");
4574 for (i = 0; i < 0xfff; i++) {
4575 /*same as .o format*/
4576 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4577 }
4578}
4579
4580static void dump_lmem(void)
4581{
4582 int i;
4583
4584 WRITE_VREG(0x301, 0x8000);
4585 WRITE_VREG(0x31d, 2);
4586 pr_info("start dump amrisc lmem\n");
4587 for (i = 0; i < 0x3ff; i++) {
4588 /*same as */
4589 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4590 }
4591}
4592
4593static ssize_t dump_risc_mem_show(struct class *class,
4594 struct class_attribute *attr, char *buf)
4595{
4596 char *pbuf = buf;
4597 int ret;
4598
4599 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4600 mutex_lock(&vdec_mutex);
4601 if (!vdec_on(VDEC_1)) {
4602 mutex_unlock(&vdec_mutex);
4603 pbuf += sprintf(pbuf, "amrisc is power off\n");
4604 ret = pbuf - buf;
4605 return ret;
4606 }
4607 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4608 /*TODO:M6 define */
4609 /*
4610 * switch_mod_gate_by_type(MOD_VDEC, 1);
4611 */
4612 amports_switch_gate("vdec", 1);
4613 }
4614 /*start do**/
4615 switch (dump_mode) {
4616 case 0:
4617 dump_pmem();
4618 break;
4619 case 2:
4620 dump_lmem();
4621 break;
4622 default:
4623 break;
4624 }
4625
4626 /*done*/
4627 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4628 mutex_unlock(&vdec_mutex);
4629 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4630 /*TODO:M6 define */
4631 /*
4632 * switch_mod_gate_by_type(MOD_VDEC, 0);
4633 */
4634 amports_switch_gate("vdec", 0);
4635 }
4636 return sprintf(buf, "done\n");
4637}
4638
4639static ssize_t core_show(struct class *class, struct class_attribute *attr,
4640 char *buf)
4641{
4642 struct vdec_core_s *core = vdec_core;
4643 char *pbuf = buf;
4644
4645 if (list_empty(&core->connected_vdec_list))
4646 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4647 else {
4648 struct vdec_s *vdec;
4649
4650 pbuf += sprintf(pbuf,
4651 " Core: last_sched %p, sched_mask %lx\n",
4652 core->last_vdec,
4653 core->sched_mask);
4654
4655 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4656 pbuf += sprintf(pbuf,
4657 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4658 vdec->id,
4659 vdec,
4660 vdec_device_name[vdec->format * 2],
4661 vdec_status_str(vdec),
4662 vdec_type_str(vdec),
4663 vdec->active_mask);
4664 }
4665 }
4666
4667 return pbuf - buf;
4668}
4669
4670static ssize_t vdec_status_show(struct class *class,
4671 struct class_attribute *attr, char *buf)
4672{
4673 char *pbuf = buf;
4674 struct vdec_s *vdec;
4675 struct vdec_info vs;
4676 unsigned char vdec_num = 0;
4677 struct vdec_core_s *core = vdec_core;
4678 unsigned long flags = vdec_core_lock(vdec_core);
4679
4680 if (list_empty(&core->connected_vdec_list)) {
4681 pbuf += sprintf(pbuf, "No vdec.\n");
4682 goto out;
4683 }
4684
4685 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4686 if ((vdec->status == VDEC_STATUS_CONNECTED
4687 || vdec->status == VDEC_STATUS_ACTIVE)) {
4688 memset(&vs, 0, sizeof(vs));
4689 if (vdec_status(vdec, &vs)) {
4690 pbuf += sprintf(pbuf, "err.\n");
4691 goto out;
4692 }
4693 pbuf += sprintf(pbuf,
4694 "vdec channel %u statistics:\n",
4695 vdec_num);
4696 pbuf += sprintf(pbuf,
4697 "%13s : %s\n", "device name",
4698 vs.vdec_name);
4699 pbuf += sprintf(pbuf,
4700 "%13s : %u\n", "frame width",
4701 vs.frame_width);
4702 pbuf += sprintf(pbuf,
4703 "%13s : %u\n", "frame height",
4704 vs.frame_height);
4705 pbuf += sprintf(pbuf,
4706 "%13s : %u %s\n", "frame rate",
4707 vs.frame_rate, "fps");
4708 pbuf += sprintf(pbuf,
4709 "%13s : %u %s\n", "bit rate",
4710 vs.bit_rate / 1024 * 8, "kbps");
4711 pbuf += sprintf(pbuf,
4712 "%13s : %u\n", "status",
4713 vs.status);
4714 pbuf += sprintf(pbuf,
4715 "%13s : %u\n", "frame dur",
4716 vs.frame_dur);
4717 pbuf += sprintf(pbuf,
4718 "%13s : %u %s\n", "frame data",
4719 vs.frame_data / 1024, "KB");
4720 pbuf += sprintf(pbuf,
4721 "%13s : %u\n", "frame count",
4722 vs.frame_count);
4723 pbuf += sprintf(pbuf,
4724 "%13s : %u\n", "drop count",
4725 vs.drop_frame_count);
4726 pbuf += sprintf(pbuf,
4727 "%13s : %u\n", "fra err count",
4728 vs.error_frame_count);
4729 pbuf += sprintf(pbuf,
4730 "%13s : %u\n", "hw err count",
4731 vs.error_count);
4732 pbuf += sprintf(pbuf,
4733 "%13s : %llu %s\n", "total data",
4734 vs.total_data / 1024, "KB");
4735 pbuf += sprintf(pbuf,
4736 "%13s : %x\n\n", "ratio_control",
4737 vs.ratio_control);
4738
4739 vdec_num++;
4740 }
4741 }
4742out:
4743 vdec_core_unlock(vdec_core, flags);
4744 return pbuf - buf;
4745}
4746
4747static ssize_t dump_vdec_blocks_show(struct class *class,
4748 struct class_attribute *attr, char *buf)
4749{
4750 struct vdec_core_s *core = vdec_core;
4751 char *pbuf = buf;
4752 unsigned long flags = vdec_core_lock(vdec_core);
4753
4754 if (list_empty(&core->connected_vdec_list))
4755 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4756 else {
4757 struct vdec_s *vdec;
4758 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4759 pbuf += vdec_input_dump_blocks(&vdec->input,
4760 pbuf, PAGE_SIZE - (pbuf - buf));
4761 }
4762 }
4763 vdec_core_unlock(vdec_core, flags);
4764
4765 return pbuf - buf;
4766}
4767static ssize_t dump_vdec_chunks_show(struct class *class,
4768 struct class_attribute *attr, char *buf)
4769{
4770 struct vdec_core_s *core = vdec_core;
4771 char *pbuf = buf;
4772 unsigned long flags = vdec_core_lock(vdec_core);
4773
4774 if (list_empty(&core->connected_vdec_list))
4775 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4776 else {
4777 struct vdec_s *vdec;
4778 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4779 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
4780 pbuf, PAGE_SIZE - (pbuf - buf));
4781 }
4782 }
4783 vdec_core_unlock(vdec_core, flags);
4784
4785 return pbuf - buf;
4786}
4787
4788static ssize_t dump_decoder_state_show(struct class *class,
4789 struct class_attribute *attr, char *buf)
4790{
4791 char *pbuf = buf;
4792 struct vdec_s *vdec;
4793 struct vdec_core_s *core = vdec_core;
4794 unsigned long flags = vdec_core_lock(vdec_core);
4795
4796 if (list_empty(&core->connected_vdec_list)) {
4797 pbuf += sprintf(pbuf, "No vdec.\n");
4798 } else {
4799 list_for_each_entry(vdec,
4800 &core->connected_vdec_list, list) {
4801 if ((vdec->status == VDEC_STATUS_CONNECTED
4802 || vdec->status == VDEC_STATUS_ACTIVE)
4803 && vdec->dump_state)
4804 vdec->dump_state(vdec);
4805 }
4806 }
4807 vdec_core_unlock(vdec_core, flags);
4808
4809 return pbuf - buf;
4810}
4811
4812static ssize_t dump_fps_show(struct class *class,
4813 struct class_attribute *attr, char *buf)
4814{
4815 char *pbuf = buf;
4816 struct vdec_core_s *core = vdec_core;
4817 int i;
4818
4819 unsigned long flags = vdec_fps_lock(vdec_core);
4820 for (i = 0; i < MAX_INSTANCE_MUN; i++)
4821 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
4822
4823 pbuf += sprintf(pbuf, "\n");
4824 vdec_fps_unlock(vdec_core, flags);
4825
4826 return pbuf - buf;
4827}
4828
4829
4830
4831static struct class_attribute vdec_class_attrs[] = {
4832 __ATTR_RO(amrisc_regs),
4833 __ATTR_RO(dump_trace),
4834 __ATTR_RO(clock_level),
4835 __ATTR(poweron_clock_level, S_IRUGO | S_IWUSR | S_IWGRP,
4836 show_poweron_clock_level, store_poweron_clock_level),
4837 __ATTR(dump_risc_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4838 dump_risc_mem_show, dump_risc_mem_store),
4839 __ATTR(keep_vdec_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4840 show_keep_vdec_mem, store_keep_vdec_mem),
4841 __ATTR_RO(core),
4842 __ATTR_RO(vdec_status),
4843 __ATTR_RO(dump_vdec_blocks),
4844 __ATTR_RO(dump_vdec_chunks),
4845 __ATTR_RO(dump_decoder_state),
4846#ifdef VDEC_DEBUG_SUPPORT
4847 __ATTR(debug, S_IRUGO | S_IWUSR | S_IWGRP,
4848 show_debug, store_debug),
4849#endif
4850#ifdef FRAME_CHECK
4851 __ATTR(dump_yuv, S_IRUGO | S_IWUSR | S_IWGRP,
4852 dump_yuv_show, dump_yuv_store),
4853 __ATTR(frame_check, S_IRUGO | S_IWUSR | S_IWGRP,
4854 frame_check_show, frame_check_store),
4855#endif
4856 __ATTR_RO(dump_fps),
4857 __ATTR_NULL
4858};
4859
4860static struct class vdec_class = {
4861 .name = "vdec",
4862 .class_attrs = vdec_class_attrs,
4863 };
4864
4865struct device *get_vdec_device(void)
4866{
4867 return &vdec_core->vdec_core_platform_device->dev;
4868}
4869EXPORT_SYMBOL(get_vdec_device);
4870
4871static int vdec_probe(struct platform_device *pdev)
4872{
4873 s32 i, r;
4874
4875 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
4876 sizeof(struct vdec_core_s), GFP_KERNEL);
4877 if (vdec_core == NULL) {
4878 pr_err("vdec core allocation failed.\n");
4879 return -ENOMEM;
4880 }
4881
4882 atomic_set(&vdec_core->vdec_nr, 0);
4883 sema_init(&vdec_core->sem, 1);
4884
4885 r = class_register(&vdec_class);
4886 if (r) {
4887 pr_info("vdec class create fail.\n");
4888 return r;
4889 }
4890
4891 vdec_core->vdec_core_platform_device = pdev;
4892
4893 platform_set_drvdata(pdev, vdec_core);
4894
4895 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4896 vdec_core->isr_context[i].index = i;
4897 vdec_core->isr_context[i].irq = -1;
4898 }
4899
4900 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
4901 IRQF_ONESHOT, "vdec-0", NULL);
4902 if (r < 0) {
4903 pr_err("vdec interrupt request failed\n");
4904 return r;
4905 }
4906
4907 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
4908 IRQF_ONESHOT, "vdec-1", NULL);
4909 if (r < 0) {
4910 pr_err("vdec interrupt request failed\n");
4911 return r;
4912 }
4913#if 0
4914 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4915 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
4916 IRQF_ONESHOT, "vdec-hevc_back", NULL);
4917 if (r < 0) {
4918 pr_err("vdec interrupt request failed\n");
4919 return r;
4920 }
4921 }
4922#endif
4923 r = of_reserved_mem_device_init(&pdev->dev);
4924 if (r == 0)
4925 pr_info("vdec_probe done\n");
4926
4927 vdec_core->cma_dev = &pdev->dev;
4928
4929 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
4930 /* default to 250MHz */
4931 vdec_clock_hi_enable();
4932 }
4933
4934 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
4935 /* set vdec dmc request to urgent */
4936 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
4937 }
4938 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
4939 spin_lock_init(&vdec_core->lock);
4940 spin_lock_init(&vdec_core->canvas_lock);
4941 spin_lock_init(&vdec_core->fps_lock);
4942 spin_lock_init(&vdec_core->input_lock);
4943 ida_init(&vdec_core->ida);
4944 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
4945 "vdec-core");
4946
4947 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
4948 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
4949 /*work queue priority lower than vdec-core.*/
4950 return 0;
4951}
4952
4953static int vdec_remove(struct platform_device *pdev)
4954{
4955 int i;
4956
4957 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4958 if (vdec_core->isr_context[i].irq >= 0) {
4959 free_irq(vdec_core->isr_context[i].irq,
4960 &vdec_core->isr_context[i]);
4961 vdec_core->isr_context[i].irq = -1;
4962 vdec_core->isr_context[i].dev_isr = NULL;
4963 vdec_core->isr_context[i].dev_threaded_isr = NULL;
4964 vdec_core->isr_context[i].dev_id = NULL;
4965 }
4966 }
4967
4968 kthread_stop(vdec_core->thread);
4969
4970 destroy_workqueue(vdec_core->vdec_core_wq);
4971 class_unregister(&vdec_class);
4972
4973 return 0;
4974}
4975
4976static const struct of_device_id amlogic_vdec_dt_match[] = {
4977 {
4978 .compatible = "amlogic, vdec",
4979 },
4980 {},
4981};
4982
4983static struct mconfig vdec_configs[] = {
4984 MC_PU32("debug_trace_num", &debug_trace_num),
4985 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
4986 MC_PU32("clk_config", &clk_config),
4987 MC_PI32("step_mode", &step_mode),
4988 MC_PI32("poweron_clock_level", &poweron_clock_level),
4989};
4990static struct mconfig_node vdec_node;
4991
4992static struct platform_driver vdec_driver = {
4993 .probe = vdec_probe,
4994 .remove = vdec_remove,
4995 .driver = {
4996 .name = "vdec",
4997 .of_match_table = amlogic_vdec_dt_match,
4998 }
4999};
5000
5001static struct codec_profile_t amvdec_input_profile = {
5002 .name = "vdec_input",
5003 .profile = "drm_framemode"
5004};
5005
5006int vdec_module_init(void)
5007{
5008 if (platform_driver_register(&vdec_driver)) {
5009 pr_info("failed to register vdec module\n");
5010 return -ENODEV;
5011 }
5012 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5013 "vdec", vdec_configs, CONFIG_FOR_RW);
5014 vcodec_profile_register(&amvdec_input_profile);
5015 return 0;
5016}
5017EXPORT_SYMBOL(vdec_module_init);
5018
5019void vdec_module_exit(void)
5020{
5021 platform_driver_unregister(&vdec_driver);
5022}
5023EXPORT_SYMBOL(vdec_module_exit);
5024
5025#if 0
5026static int __init vdec_module_init(void)
5027{
5028 if (platform_driver_register(&vdec_driver)) {
5029 pr_info("failed to register vdec module\n");
5030 return -ENODEV;
5031 }
5032 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5033 "vdec", vdec_configs, CONFIG_FOR_RW);
5034 return 0;
5035}
5036
5037static void __exit vdec_module_exit(void)
5038{
5039 platform_driver_unregister(&vdec_driver);
5040}
5041#endif
5042
5043static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
5044{
5045 vdec_core->cma_dev = dev;
5046
5047 return 0;
5048}
5049
5050static const struct reserved_mem_ops rmem_vdec_ops = {
5051 .device_init = vdec_mem_device_init,
5052};
5053
5054static int __init vdec_mem_setup(struct reserved_mem *rmem)
5055{
5056 rmem->ops = &rmem_vdec_ops;
5057 pr_info("vdec: reserved mem setup\n");
5058
5059 return 0;
5060}
5061
5062void vdec_fill_frame_info(struct vframe_qos_s *vframe_qos, int debug)
5063{
5064 if (frame_info_buf_in == NULL) {
5065 pr_info("error,frame_info_buf_in is null\n");
5066 return;
5067 }
5068 if (frame_info_buf_out == NULL) {
5069 pr_info("error,frame_info_buf_out is null\n");
5070 return;
5071 }
5072 if (frame_qos_wr >= QOS_FRAME_NUM)
5073 frame_qos_wr = 0;
5074
5075 if (frame_qos_wr >= QOS_FRAME_NUM ||
5076 frame_qos_wr < 0) {
5077 pr_info("error,index :%d is error\n", frame_qos_wr);
5078 return;
5079 }
5080 if (frameinfo_flag == DISABLE_FRAME_INFO)
5081 return;
5082
5083 if (frameinfo_flag == PRINT_FRAME_INFO) {
5084 pr_info("num %d size %d pts %d\n",
5085 vframe_qos->num,
5086 vframe_qos->size,
5087 vframe_qos->pts);
5088 pr_info("mv min_mv %d avg_mv %d max_mv %d\n",
5089 vframe_qos->min_mv,
5090 vframe_qos->avg_mv,
5091 vframe_qos->max_mv);
5092 pr_info("qp min_qp %d avg_qp %d max_qp %d\n",
5093 vframe_qos->min_qp,
5094 vframe_qos->avg_qp,
5095 vframe_qos->max_qp);
5096 pr_info("skip min_skip %d avg_skip %d max_skip %d\n",
5097 vframe_qos->min_skip,
5098 vframe_qos->avg_skip,
5099 vframe_qos->max_skip);
5100 }
5101 memcpy(&frame_info_buf_in[frame_qos_wr++],
5102 vframe_qos, sizeof(struct vframe_qos_s));
5103 if (frame_qos_wr >= QOS_FRAME_NUM)
5104 frame_qos_wr = 0;
5105
5106 /*pr_info("frame_qos_wr:%d\n", frame_qos_wr);*/
5107
5108}
5109EXPORT_SYMBOL(vdec_fill_frame_info);
5110
5111struct vframe_qos_s *vdec_get_qos_info(void)
5112{
5113 int write_count = 0;
5114 int qos_wr = frame_qos_wr;
5115
5116 if (frame_info_buf_in == NULL) {
5117 pr_info("error,frame_info_buf_in is null\n");
5118 return NULL;
5119 }
5120 if (frame_info_buf_out == NULL) {
5121 pr_info("error,frame_info_buf_out is null\n");
5122 return NULL;
5123 }
5124
5125
5126 memset(frame_info_buf_out, 0,
5127 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
5128 if (frame_qos_rd > qos_wr) {
5129 write_count = QOS_FRAME_NUM - frame_qos_rd;
5130 if (write_count > 0 && write_count <= QOS_FRAME_NUM) {
5131 memcpy(frame_info_buf_out, &frame_info_buf_in[0],
5132 write_count*sizeof(struct vframe_qos_s));
5133 if ((write_count + qos_wr) <= QOS_FRAME_NUM)
5134 memcpy(&frame_info_buf_out[write_count], frame_info_buf_in,
5135 qos_wr*sizeof(struct vframe_qos_s));
5136 else
5137 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5138 } else
5139 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5140 } else if (frame_qos_rd < qos_wr) {
5141 write_count = qos_wr - frame_qos_rd;
5142 if (write_count > 0 && write_count < QOS_FRAME_NUM)
5143 memcpy(frame_info_buf_out, &frame_info_buf_in[frame_qos_rd],
5144 (write_count)*sizeof(struct vframe_qos_s));
5145 else
5146 pr_info("get_qos_info:%d, out of range\n", __LINE__);
5147 }
5148 /*
5149 pr_info("cnt:%d,size:%d,num:%d,rd:%d,wr:%d\n",
5150 wirte_count,
5151 frame_info_buf_out[0].size,
5152 frame_info_buf_out[0].num,
5153 frame_qos_rd,qos_wr);
5154 */
5155 frame_qos_rd = qos_wr;
5156 return frame_info_buf_out;
5157}
5158EXPORT_SYMBOL(vdec_get_qos_info);
5159
5160
5161RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5162/*
5163uint force_hevc_clock_cntl;
5164EXPORT_SYMBOL(force_hevc_clock_cntl);
5165
5166module_param(force_hevc_clock_cntl, uint, 0664);
5167*/
5168module_param(debug, uint, 0664);
5169module_param(debug_trace_num, uint, 0664);
5170module_param(hevc_max_reset_count, int, 0664);
5171module_param(clk_config, uint, 0664);
5172module_param(step_mode, int, 0664);
5173module_param(debugflags, int, 0664);
5174module_param(parallel_decode, int, 0664);
5175module_param(fps_detection, int, 0664);
5176module_param(fps_clear, int, 0664);
5177module_param(force_nosecure_even_drm, int, 0664);
5178module_param(disable_switch_single_to_mult, int, 0664);
5179
5180module_param(frameinfo_flag, int, 0664);
5181MODULE_PARM_DESC(frameinfo_flag,
5182 "\n frameinfo_flag\n");
5183module_param(v4lvideo_add_di, int, 0664);
5184MODULE_PARM_DESC(v4lvideo_add_di,
5185 "\n v4lvideo_add_di\n");
5186
5187module_param(max_di_instance, int, 0664);
5188MODULE_PARM_DESC(max_di_instance,
5189 "\n max_di_instance\n");
5190
5191/*
5192*module_init(vdec_module_init);
5193*module_exit(vdec_module_exit);
5194*/
5195#define CREATE_TRACE_POINTS
5196#include "vdec_trace.h"
5197MODULE_DESCRIPTION("AMLOGIC vdec driver");
5198MODULE_LICENSE("GPL");
5199MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5200