summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: f5acd6aa3af1be631165185f11313145a918ab25
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40/*for VDEC_DEBUG_SUPPORT*/
41#include <linux/time.h>
42
43#include <linux/amlogic/media/utils/vdec_reg.h>
44#include "vdec.h"
45#include "vdec_trace.h"
46#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
47#include "vdec_profile.h"
48#endif
49#include <linux/of.h>
50#include <linux/of_fdt.h>
51#include <linux/libfdt_env.h>
52#include <linux/of_reserved_mem.h>
53#include <linux/dma-contiguous.h>
54#include <linux/cma.h>
55#include <linux/module.h>
56#include <linux/slab.h>
57#include <linux/dma-mapping.h>
58#include <linux/dma-contiguous.h>
59#include "../../../stream_input/amports/amports_priv.h"
60
61#include <linux/amlogic/media/utils/amports_config.h>
62#include "../utils/amvdec.h"
63#include "vdec_input.h"
64
65#include "../../../common/media_clock/clk/clk.h"
66#include <linux/reset.h>
67#include <linux/amlogic/cpu_version.h>
68#include <linux/amlogic/media/codec_mm/codec_mm.h>
69#include <linux/amlogic/media/video_sink/video_keeper.h>
70#include <linux/amlogic/media/codec_mm/configs.h>
71#include <linux/amlogic/media/frame_sync/ptsserv.h>
72#include "secprot.h"
73#include "../../../common/chips/decoder_cpu_ver_info.h"
74#include "frame_check.h"
75
76#ifdef CONFIG_AMLOGIC_POWER
77#include <linux/amlogic/power_ctrl.h>
78#endif
79
80static DEFINE_MUTEX(vdec_mutex);
81
82#define MC_SIZE (4096 * 4)
83#define CMA_ALLOC_SIZE SZ_64M
84#define MEM_NAME "vdec_prealloc"
85static int inited_vcodec_num;
86#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
87static int poweron_clock_level;
88static int keep_vdec_mem;
89static unsigned int debug_trace_num = 16 * 20;
90static int step_mode;
91static unsigned int clk_config;
92/*
93 &1: sched_priority to MAX_RT_PRIO -1.
94 &2: always reload firmware.
95 &4: vdec canvas debug enable
96 */
97static unsigned int debug;
98
99static int hevc_max_reset_count;
100
101static int no_powerdown;
102static int parallel_decode = 1;
103static int fps_detection;
104static int fps_clear;
105
106
107static int force_nosecure_even_drm;
108static int disable_switch_single_to_mult;
109
110static DEFINE_SPINLOCK(vdec_spin_lock);
111
112#define HEVC_TEST_LIMIT 100
113#define GXBB_REV_A_MINOR 0xA
114
115#define PRINT_FRAME_INFO 1
116#define DISABLE_FRAME_INFO 2
117
118static int frameinfo_flag = 0;
119static int v4lvideo_add_di = 1;
120static int max_di_instance = 2;
121
122//static int path_debug = 0;
123
124static struct vframe_qos_s *frame_info_buf_in = NULL;
125static struct vframe_qos_s *frame_info_buf_out = NULL;
126static int frame_qos_wr = 0;
127static int frame_qos_rd = 0;
128int decode_underflow = 0;
129
130#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
131
132struct am_reg {
133 char *name;
134 int offset;
135};
136
137struct vdec_isr_context_s {
138 int index;
139 int irq;
140 irq_handler_t dev_isr;
141 irq_handler_t dev_threaded_isr;
142 void *dev_id;
143 struct vdec_s *vdec;
144};
145
146struct decode_fps_s {
147 u32 frame_count;
148 u64 start_timestamp;
149 u64 last_timestamp;
150 u32 fps;
151};
152
153struct vdec_core_s {
154 struct list_head connected_vdec_list;
155 spinlock_t lock;
156 spinlock_t canvas_lock;
157 spinlock_t fps_lock;
158 spinlock_t input_lock;
159 struct ida ida;
160 atomic_t vdec_nr;
161 struct vdec_s *vfm_vdec;
162 struct vdec_s *active_vdec;
163 struct vdec_s *active_hevc;
164 struct vdec_s *hint_fr_vdec;
165 struct platform_device *vdec_core_platform_device;
166 struct device *cma_dev;
167 struct semaphore sem;
168 struct task_struct *thread;
169 struct workqueue_struct *vdec_core_wq;
170
171 unsigned long sched_mask;
172 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
173 int power_ref_count[VDEC_MAX];
174 struct vdec_s *last_vdec;
175 int parallel_dec;
176 unsigned long power_ref_mask;
177 int vdec_combine_flag;
178 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
179 unsigned long buff_flag;
180 unsigned long stream_buff_flag;
181};
182
183struct canvas_status_s {
184 int type;
185 int canvas_used_flag;
186 int id;
187};
188
189
190static struct vdec_core_s *vdec_core;
191
192static const char * const vdec_status_string[] = {
193 "VDEC_STATUS_UNINITIALIZED",
194 "VDEC_STATUS_DISCONNECTED",
195 "VDEC_STATUS_CONNECTED",
196 "VDEC_STATUS_ACTIVE"
197};
198
199static int debugflags;
200
201static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
202
203
204int vdec_get_debug_flags(void)
205{
206 return debugflags;
207}
208EXPORT_SYMBOL(vdec_get_debug_flags);
209
210unsigned char is_mult_inc(unsigned int type)
211{
212 unsigned char ret = 0;
213 if (vdec_get_debug_flags() & 0xf000)
214 ret = (vdec_get_debug_flags() & 0x1000)
215 ? 1 : 0;
216 else if (type & PORT_TYPE_DECODER_SCHED)
217 ret = 1;
218 return ret;
219}
220EXPORT_SYMBOL(is_mult_inc);
221
222static const bool cores_with_input[VDEC_MAX] = {
223 true, /* VDEC_1 */
224 false, /* VDEC_HCODEC */
225 false, /* VDEC_2 */
226 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
227 false, /* VDEC_HEVC_BACK */
228};
229
230static const int cores_int[VDEC_MAX] = {
231 VDEC_IRQ_1,
232 VDEC_IRQ_2,
233 VDEC_IRQ_0,
234 VDEC_IRQ_0,
235 VDEC_IRQ_HEVC_BACK
236};
237
238unsigned long vdec_canvas_lock(struct vdec_core_s *core)
239{
240 unsigned long flags;
241 spin_lock_irqsave(&core->canvas_lock, flags);
242
243 return flags;
244}
245
246void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
247{
248 spin_unlock_irqrestore(&core->canvas_lock, flags);
249}
250
251unsigned long vdec_fps_lock(struct vdec_core_s *core)
252{
253 unsigned long flags;
254 spin_lock_irqsave(&core->fps_lock, flags);
255
256 return flags;
257}
258
259void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
260{
261 spin_unlock_irqrestore(&core->fps_lock, flags);
262}
263
264unsigned long vdec_core_lock(struct vdec_core_s *core)
265{
266 unsigned long flags;
267
268 spin_lock_irqsave(&core->lock, flags);
269
270 return flags;
271}
272
273void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
274{
275 spin_unlock_irqrestore(&core->lock, flags);
276}
277
278unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
279{
280 unsigned long flags;
281
282 spin_lock_irqsave(&core->input_lock, flags);
283
284 return flags;
285}
286
287void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
288{
289 spin_unlock_irqrestore(&core->input_lock, flags);
290}
291
292
293static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
294 struct vdec_core_s *core = vdec_core;
295 bool ret;
296 unsigned long flags;
297
298 flags = vdec_inputbuff_lock(core);
299 ret = !(vdec->core_mask & core->buff_flag);
300 vdec_inputbuff_unlock(core, flags);
301
302 return ret;
303}
304
305static void vdec_up(struct vdec_s *vdec)
306{
307 struct vdec_core_s *core = vdec_core;
308
309 if (debug & 8)
310 pr_info("vdec_up, id:%d\n", vdec->id);
311 up(&core->sem);
312}
313
314
315static u64 vdec_get_us_time_system(void)
316{
317 struct timeval tv;
318
319 do_gettimeofday(&tv);
320
321 return div64_u64(timeval_to_ns(&tv), 1000);
322}
323
324static void vdec_fps_clear(int id)
325{
326 if (id >= MAX_INSTANCE_MUN)
327 return;
328
329 vdec_core->decode_fps[id].frame_count = 0;
330 vdec_core->decode_fps[id].start_timestamp = 0;
331 vdec_core->decode_fps[id].last_timestamp = 0;
332 vdec_core->decode_fps[id].fps = 0;
333}
334
335static void vdec_fps_clearall(void)
336{
337 int i;
338
339 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
340 vdec_core->decode_fps[i].frame_count = 0;
341 vdec_core->decode_fps[i].start_timestamp = 0;
342 vdec_core->decode_fps[i].last_timestamp = 0;
343 vdec_core->decode_fps[i].fps = 0;
344 }
345}
346
347static void vdec_fps_detec(int id)
348{
349 unsigned long flags;
350
351 if (fps_detection == 0)
352 return;
353
354 if (id >= MAX_INSTANCE_MUN)
355 return;
356
357 flags = vdec_fps_lock(vdec_core);
358
359 if (fps_clear == 1) {
360 vdec_fps_clearall();
361 fps_clear = 0;
362 }
363
364 vdec_core->decode_fps[id].frame_count++;
365 if (vdec_core->decode_fps[id].frame_count == 1) {
366 vdec_core->decode_fps[id].start_timestamp =
367 vdec_get_us_time_system();
368 vdec_core->decode_fps[id].last_timestamp =
369 vdec_core->decode_fps[id].start_timestamp;
370 } else {
371 vdec_core->decode_fps[id].last_timestamp =
372 vdec_get_us_time_system();
373 vdec_core->decode_fps[id].fps =
374 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
375 10000000000),
376 (vdec_core->decode_fps[id].last_timestamp -
377 vdec_core->decode_fps[id].start_timestamp));
378 }
379 vdec_fps_unlock(vdec_core, flags);
380}
381
382
383
384static int get_canvas(unsigned int index, unsigned int base)
385{
386 int start;
387 int canvas_index = index * base;
388 int ret;
389
390 if ((base > 4) || (base == 0))
391 return -1;
392
393 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
394 <= AMVDEC_CANVAS_MAX1) {
395 start = AMVDEC_CANVAS_START_INDEX + base * index;
396 } else {
397 canvas_index -= (AMVDEC_CANVAS_MAX1 -
398 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
399 if (canvas_index <= AMVDEC_CANVAS_MAX2)
400 start = canvas_index / base;
401 else
402 return -1;
403 }
404
405 if (base == 1) {
406 ret = start;
407 } else if (base == 2) {
408 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
409 } else if (base == 3) {
410 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
411 } else if (base == 4) {
412 ret = (((start + 3) << 24) | (start + 2) << 16) |
413 ((start + 1) << 8) | start;
414 }
415
416 return ret;
417}
418
419static int get_canvas_ex(int type, int id)
420{
421 int i;
422 unsigned long flags;
423
424 flags = vdec_canvas_lock(vdec_core);
425
426 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
427 /*0x10-0x15 has been used by rdma*/
428 if ((i >= 0x10) && (i <= 0x15))
429 continue;
430 if ((canvas_stat[i].type == type) &&
431 (canvas_stat[i].id & (1 << id)) == 0) {
432 canvas_stat[i].canvas_used_flag++;
433 canvas_stat[i].id |= (1 << id);
434 if (debug & 4)
435 pr_debug("get used canvas %d\n", i);
436 vdec_canvas_unlock(vdec_core, flags);
437 if (i < AMVDEC_CANVAS_MAX2 + 1)
438 return i;
439 else
440 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
441 }
442 }
443
444 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
445 /*0x10-0x15 has been used by rdma*/
446 if ((i >= 0x10) && (i <= 0x15))
447 continue;
448 if (canvas_stat[i].type == 0) {
449 canvas_stat[i].type = type;
450 canvas_stat[i].canvas_used_flag = 1;
451 canvas_stat[i].id = (1 << id);
452 if (debug & 4) {
453 pr_debug("get canvas %d\n", i);
454 pr_debug("canvas_used_flag %d\n",
455 canvas_stat[i].canvas_used_flag);
456 pr_debug("canvas_stat[i].id %d\n",
457 canvas_stat[i].id);
458 }
459 vdec_canvas_unlock(vdec_core, flags);
460 if (i < AMVDEC_CANVAS_MAX2 + 1)
461 return i;
462 else
463 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
464 }
465 }
466 vdec_canvas_unlock(vdec_core, flags);
467
468 pr_info("cannot get canvas\n");
469
470 return -1;
471}
472
473static void free_canvas_ex(int index, int id)
474{
475 unsigned long flags;
476 int offset;
477
478 flags = vdec_canvas_lock(vdec_core);
479 if (index >= 0 &&
480 index < AMVDEC_CANVAS_MAX2 + 1)
481 offset = index;
482 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
483 (index <= AMVDEC_CANVAS_MAX1))
484 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
485 else {
486 vdec_canvas_unlock(vdec_core, flags);
487 return;
488 }
489
490 if ((canvas_stat[offset].canvas_used_flag > 0) &&
491 (canvas_stat[offset].id & (1 << id))) {
492 canvas_stat[offset].canvas_used_flag--;
493 canvas_stat[offset].id &= ~(1 << id);
494 if (canvas_stat[offset].canvas_used_flag == 0) {
495 canvas_stat[offset].type = 0;
496 canvas_stat[offset].id = 0;
497 }
498 if (debug & 4) {
499 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
500 offset,
501 canvas_stat[offset].canvas_used_flag,
502 canvas_stat[offset].type,
503 canvas_stat[offset].id);
504 }
505 }
506 vdec_canvas_unlock(vdec_core, flags);
507
508 return;
509
510}
511
512static void vdec_dmc_pipeline_reset(void)
513{
514 /*
515 * bit15: vdec_piple
516 * bit14: hevc_dmc_piple
517 * bit13: hevcf_dmc_pipl
518 * bit12: wave420_dmc_pipl
519 * bit11: hcodec_dmc_pipl
520 */
521
522 WRITE_RESET_REG(RESET7_REGISTER,
523 (1 << 15) | (1 << 14) | (1 << 13) |
524 (1 << 12) | (1 << 11));
525}
526
527static void vdec_stop_armrisc(int hw)
528{
529 ulong timeout = jiffies + HZ;
530
531 if (hw == VDEC_INPUT_TARGET_VLD) {
532 WRITE_VREG(MPSR, 0);
533 WRITE_VREG(CPSR, 0);
534
535 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
536 if (time_after(jiffies, timeout))
537 break;
538 }
539
540 timeout = jiffies + HZ;
541 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
542 if (time_after(jiffies, timeout))
543 break;
544 }
545 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
546 WRITE_VREG(HEVC_MPSR, 0);
547 WRITE_VREG(HEVC_CPSR, 0);
548
549 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
550 if (time_after(jiffies, timeout))
551 break;
552 }
553
554 timeout = jiffies + HZ/10;
555 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
556 if (time_after(jiffies, timeout))
557 break;
558 }
559 }
560}
561
562static void vdec_disable_DMC(struct vdec_s *vdec)
563{
564 /*close first,then wait pedding end,timing suggestion from vlsi*/
565 struct vdec_input_s *input = &vdec->input;
566 unsigned long flags;
567 unsigned int mask = 0;
568
569 if (input->target == VDEC_INPUT_TARGET_VLD) {
570 mask = (1 << 13);
571 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
572 mask = (1 << 21);
573 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
574 mask = (1 << 4); /*hevc*/
575 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
576 mask |= (1 << 8); /*hevcb */
577 }
578
579 /* need to stop armrisc. */
580 if (!IS_ERR_OR_NULL(vdec->dev))
581 vdec_stop_armrisc(input->target);
582
583 spin_lock_irqsave(&vdec_spin_lock, flags);
584 codec_dmcbus_write(DMC_REQ_CTRL,
585 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
586 spin_unlock_irqrestore(&vdec_spin_lock, flags);
587
588 while (!(codec_dmcbus_read(DMC_CHAN_STS)
589 & mask))
590 ;
591
592 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
593}
594
595static void vdec_enable_DMC(struct vdec_s *vdec)
596{
597 struct vdec_input_s *input = &vdec->input;
598 unsigned long flags;
599 unsigned int mask = 0;
600
601 if (input->target == VDEC_INPUT_TARGET_VLD) {
602 mask = (1 << 13);
603 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
604 mask = (1 << 21);
605 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
606 mask = (1 << 4); /*hevc*/
607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
608 mask |= (1 << 8); /*hevcb */
609 }
610
611 /*must to be reset the dmc pipeline if it's g12b.*/
612 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
613 vdec_dmc_pipeline_reset();
614
615 spin_lock_irqsave(&vdec_spin_lock, flags);
616 codec_dmcbus_write(DMC_REQ_CTRL,
617 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
618 spin_unlock_irqrestore(&vdec_spin_lock, flags);
619 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
620}
621
622
623
624static int vdec_get_hw_type(int value)
625{
626 int type;
627 switch (value) {
628 case VFORMAT_HEVC:
629 case VFORMAT_VP9:
630 case VFORMAT_AVS2:
631 type = CORE_MASK_HEVC;
632 break;
633
634 case VFORMAT_MPEG12:
635 case VFORMAT_MPEG4:
636 case VFORMAT_H264:
637 case VFORMAT_MJPEG:
638 case VFORMAT_REAL:
639 case VFORMAT_JPEG:
640 case VFORMAT_VC1:
641 case VFORMAT_AVS:
642 case VFORMAT_YUV:
643 case VFORMAT_H264MVC:
644 case VFORMAT_H264_4K2K:
645 case VFORMAT_H264_ENC:
646 case VFORMAT_JPEG_ENC:
647 type = CORE_MASK_VDEC_1;
648 break;
649
650 default:
651 type = -1;
652 }
653
654 return type;
655}
656
657
658static void vdec_save_active_hw(struct vdec_s *vdec)
659{
660 int type;
661
662 type = vdec_get_hw_type(vdec->port->vformat);
663
664 if (type == CORE_MASK_HEVC) {
665 vdec_core->active_hevc = vdec;
666 } else if (type == CORE_MASK_VDEC_1) {
667 vdec_core->active_vdec = vdec;
668 } else {
669 pr_info("save_active_fw wrong\n");
670 }
671}
672
673static void vdec_update_buff_status(void)
674{
675 struct vdec_core_s *core = vdec_core;
676 unsigned long flags;
677 struct vdec_s *vdec;
678
679 flags = vdec_inputbuff_lock(core);
680 core->buff_flag = 0;
681 core->stream_buff_flag = 0;
682 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
683 struct vdec_input_s *input = &vdec->input;
684 if (input_frame_based(input)) {
685 if (input->have_frame_num || input->eos)
686 core->buff_flag |= vdec->core_mask;
687 } else if (input_stream_based(input)) {
688 core->stream_buff_flag |= vdec->core_mask;
689 }
690 }
691 vdec_inputbuff_unlock(core, flags);
692}
693
694#if 0
695void vdec_update_streambuff_status(void)
696{
697 struct vdec_core_s *core = vdec_core;
698 struct vdec_s *vdec;
699
700 /* check streaming prepare level threshold if not EOS */
701 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
702 struct vdec_input_s *input = &vdec->input;
703 if (input && input_stream_based(input) && !input->eos &&
704 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
705 u32 rp, wp, level;
706
707 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
708 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
709 if (wp < rp)
710 level = input->size + wp - rp;
711 else
712 level = wp - rp;
713 if ((level < input->prepare_level) &&
714 (pts_get_rec_num(PTS_TYPE_VIDEO,
715 vdec->input.total_rd_count) < 2)) {
716 break;
717 } else if (level > input->prepare_level) {
718 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
719 if (debug & 8)
720 pr_info("vdec_flush_streambuff_status up\n");
721 vdec_up(vdec);
722 }
723 break;
724 }
725 }
726}
727EXPORT_SYMBOL(vdec_update_streambuff_status);
728#endif
729
730int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
731{
732 if (vdec && vdec->dec_status &&
733 ((vdec->status == VDEC_STATUS_CONNECTED ||
734 vdec->status == VDEC_STATUS_ACTIVE)))
735 return vdec->dec_status(vdec, vstatus);
736
737 return 0;
738}
739EXPORT_SYMBOL(vdec_status);
740
741int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
742{
743 int r;
744
745 if (vdec->set_trickmode) {
746 r = vdec->set_trickmode(vdec, trickmode);
747
748 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
749 r = vdec->slave->set_trickmode(vdec->slave,
750 trickmode);
751 return r;
752 }
753
754 return -1;
755}
756EXPORT_SYMBOL(vdec_set_trickmode);
757
758int vdec_set_isreset(struct vdec_s *vdec, int isreset)
759{
760 vdec->is_reset = isreset;
761 pr_info("is_reset=%d\n", isreset);
762 if (vdec->set_isreset)
763 return vdec->set_isreset(vdec, isreset);
764 return 0;
765}
766EXPORT_SYMBOL(vdec_set_isreset);
767
768int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
769{
770 vdec->dolby_meta_with_el = isdvmetawithel;
771 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
772 return 0;
773}
774EXPORT_SYMBOL(vdec_set_dv_metawithel);
775
776void vdec_set_no_powerdown(int flag)
777{
778 no_powerdown = flag;
779 pr_info("no_powerdown=%d\n", no_powerdown);
780 return;
781}
782EXPORT_SYMBOL(vdec_set_no_powerdown);
783
784void vdec_count_info(struct vdec_info *vs, unsigned int err,
785 unsigned int offset)
786{
787 if (err)
788 vs->error_frame_count++;
789 if (offset) {
790 if (0 == vs->frame_count) {
791 vs->offset = 0;
792 vs->samp_cnt = 0;
793 }
794 vs->frame_data = offset > vs->total_data ?
795 offset - vs->total_data : vs->total_data - offset;
796 vs->total_data = offset;
797 if (vs->samp_cnt < 96000 * 2) { /* 2s */
798 if (0 == vs->samp_cnt)
799 vs->offset = offset;
800 vs->samp_cnt += vs->frame_dur;
801 } else {
802 vs->bit_rate = (offset - vs->offset) / 2;
803 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
804 vs->samp_cnt = 0;
805 }
806 vs->frame_count++;
807 }
808 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
809 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
810 return;
811}
812EXPORT_SYMBOL(vdec_count_info);
813int vdec_is_support_4k(void)
814{
815 return !is_meson_gxl_package_805X();
816}
817EXPORT_SYMBOL(vdec_is_support_4k);
818
819/*
820 * clk_config:
821 *0:default
822 *1:no gp0_pll;
823 *2:always used gp0_pll;
824 *>=10:fixed n M clk;
825 *== 100 , 100M clks;
826 */
827unsigned int get_vdec_clk_config_settings(void)
828{
829 return clk_config;
830}
831void update_vdec_clk_config_settings(unsigned int config)
832{
833 clk_config = config;
834}
835EXPORT_SYMBOL(update_vdec_clk_config_settings);
836
837static bool hevc_workaround_needed(void)
838{
839 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
840 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
841 == GXBB_REV_A_MINOR);
842}
843
844struct device *get_codec_cma_device(void)
845{
846 return vdec_core->cma_dev;
847}
848
849#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
850static const char * const vdec_device_name[] = {
851 "amvdec_mpeg12", "ammvdec_mpeg12",
852 "amvdec_mpeg4", "ammvdec_mpeg4",
853 "amvdec_h264", "ammvdec_h264",
854 "amvdec_mjpeg", "ammvdec_mjpeg",
855 "amvdec_real", "ammvdec_real",
856 "amjpegdec", "ammjpegdec",
857 "amvdec_vc1", "ammvdec_vc1",
858 "amvdec_avs", "ammvdec_avs",
859 "amvdec_yuv", "ammvdec_yuv",
860 "amvdec_h264mvc", "ammvdec_h264mvc",
861 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
862 "amvdec_h265", "ammvdec_h265",
863 "amvenc_avc", "amvenc_avc",
864 "jpegenc", "jpegenc",
865 "amvdec_vp9", "ammvdec_vp9",
866 "amvdec_avs2", "ammvdec_avs2"
867};
868
869
870#else
871
872static const char * const vdec_device_name[] = {
873 "amvdec_mpeg12",
874 "amvdec_mpeg4",
875 "amvdec_h264",
876 "amvdec_mjpeg",
877 "amvdec_real",
878 "amjpegdec",
879 "amvdec_vc1",
880 "amvdec_avs",
881 "amvdec_yuv",
882 "amvdec_h264mvc",
883 "amvdec_h264_4k2k",
884 "amvdec_h265",
885 "amvenc_avc",
886 "jpegenc",
887 "amvdec_vp9",
888 "amvdec_avs2"
889};
890
891#endif
892
893/*
894 * Only support time sliced decoding for frame based input,
895 * so legacy decoder can exist with time sliced decoder.
896 */
897static const char *get_dev_name(bool use_legacy_vdec, int format)
898{
899#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
900 if (use_legacy_vdec)
901 return vdec_device_name[format * 2];
902 else
903 return vdec_device_name[format * 2 + 1];
904#else
905 return vdec_device_name[format];
906#endif
907}
908
909#ifdef VDEC_DEBUG_SUPPORT
910static u64 get_current_clk(void)
911{
912 /*struct timespec xtime = current_kernel_time();
913 u64 usec = xtime.tv_sec * 1000000;
914 usec += xtime.tv_nsec / 1000;
915 */
916 u64 usec = sched_clock();
917 return usec;
918}
919
920static void inc_profi_count(unsigned long mask, u32 *count)
921{
922 enum vdec_type_e type;
923
924 for (type = VDEC_1; type < VDEC_MAX; type++) {
925 if (mask & (1 << type))
926 count[type]++;
927 }
928}
929
930static void update_profi_clk_run(struct vdec_s *vdec,
931 unsigned long mask, u64 clk)
932{
933 enum vdec_type_e type;
934
935 for (type = VDEC_1; type < VDEC_MAX; type++) {
936 if (mask & (1 << type)) {
937 vdec->start_run_clk[type] = clk;
938 if (vdec->profile_start_clk[type] == 0)
939 vdec->profile_start_clk[type] = clk;
940 vdec->total_clk[type] = clk
941 - vdec->profile_start_clk[type];
942 /*pr_info("set start_run_clk %ld\n",
943 vdec->start_run_clk);*/
944
945 }
946 }
947}
948
949static void update_profi_clk_stop(struct vdec_s *vdec,
950 unsigned long mask, u64 clk)
951{
952 enum vdec_type_e type;
953
954 for (type = VDEC_1; type < VDEC_MAX; type++) {
955 if (mask & (1 << type)) {
956 if (vdec->start_run_clk[type] == 0)
957 pr_info("error, start_run_clk[%d] not set\n", type);
958
959 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
960 type,
961 clk,
962 vdec->start_run_clk[type],
963 vdec->run_clk[type]);*/
964 vdec->run_clk[type] +=
965 (clk - vdec->start_run_clk[type]);
966 }
967 }
968}
969
970#endif
971
972int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
973{
974 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
975 sizeof(struct dec_sysinfo)))
976 return -EFAULT;
977
978 /* force switch to mult instance if supports this profile. */
979 if ((vdec->type == VDEC_TYPE_SINGLE) &&
980 !disable_switch_single_to_mult) {
981 const char *str = NULL;
982 char fmt[16] = {0};
983
984 str = strchr(get_dev_name(false, vdec->format), '_');
985 if (!str)
986 return -1;
987
988 sprintf(fmt, "m%s", ++str);
989 if (is_support_profile(fmt) &&
990 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263)
991 vdec->type = VDEC_TYPE_STREAM_PARSER;
992 }
993
994 return 0;
995}
996EXPORT_SYMBOL(vdec_set_decinfo);
997
998/* construct vdec strcture */
999struct vdec_s *vdec_create(struct stream_port_s *port,
1000 struct vdec_s *master)
1001{
1002 struct vdec_s *vdec;
1003 int type = VDEC_TYPE_SINGLE;
1004 int id;
1005
1006 if (is_mult_inc(port->type))
1007 type = (port->type & PORT_TYPE_FRAME) ?
1008 VDEC_TYPE_FRAME_BLOCK :
1009 VDEC_TYPE_STREAM_PARSER;
1010
1011 id = ida_simple_get(&vdec_core->ida,
1012 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1013 if (id < 0) {
1014 pr_info("vdec_create request id failed!ret =%d\n", id);
1015 return NULL;
1016 }
1017 vdec = vzalloc(sizeof(struct vdec_s));
1018
1019 /* TBD */
1020 if (vdec) {
1021 vdec->magic = 0x43454456;
1022 vdec->id = -1;
1023 vdec->type = type;
1024 vdec->port = port;
1025 vdec->sys_info = &vdec->sys_info_store;
1026
1027 INIT_LIST_HEAD(&vdec->list);
1028
1029 atomic_inc(&vdec_core->vdec_nr);
1030 vdec->id = id;
1031 vdec_input_init(&vdec->input, vdec);
1032 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1033 vdec->input.vdec_up = vdec_up;
1034 if (master) {
1035 vdec->master = master;
1036 master->slave = vdec;
1037 master->sched = 1;
1038 }
1039 }
1040
1041 pr_debug("vdec_create instance %p, total %d\n", vdec,
1042 atomic_read(&vdec_core->vdec_nr));
1043
1044 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1045
1046 return vdec;
1047}
1048EXPORT_SYMBOL(vdec_create);
1049
1050int vdec_set_format(struct vdec_s *vdec, int format)
1051{
1052 vdec->format = format;
1053 vdec->port_flag |= PORT_FLAG_VFORMAT;
1054
1055 if (vdec->slave) {
1056 vdec->slave->format = format;
1057 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1058 }
1059 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1060
1061 return 0;
1062}
1063EXPORT_SYMBOL(vdec_set_format);
1064
1065int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1066{
1067 vdec->pts = pts;
1068 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1069 vdec->pts_valid = true;
1070 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1071 return 0;
1072}
1073EXPORT_SYMBOL(vdec_set_pts);
1074
1075void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1076{
1077 vdec->timestamp = timestamp;
1078 vdec->timestamp_valid = true;
1079}
1080EXPORT_SYMBOL(vdec_set_timestamp);
1081
1082int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1083{
1084 vdec->pts64 = pts64;
1085 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1086 vdec->pts_valid = true;
1087
1088 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1089 return 0;
1090}
1091EXPORT_SYMBOL(vdec_set_pts64);
1092
1093int vdec_get_status(struct vdec_s *vdec)
1094{
1095 return vdec->status;
1096}
1097EXPORT_SYMBOL(vdec_get_status);
1098
1099int vdec_get_frame_num(struct vdec_s *vdec)
1100{
1101 return vdec->input.have_frame_num;
1102}
1103EXPORT_SYMBOL(vdec_get_frame_num);
1104
1105void vdec_set_status(struct vdec_s *vdec, int status)
1106{
1107 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1108 vdec->status = status;
1109}
1110EXPORT_SYMBOL(vdec_set_status);
1111
1112void vdec_set_next_status(struct vdec_s *vdec, int status)
1113{
1114 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1115 vdec->next_status = status;
1116}
1117EXPORT_SYMBOL(vdec_set_next_status);
1118
1119int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1120{
1121 vdec->frame_base_video_path = video_path;
1122 return 0;
1123}
1124EXPORT_SYMBOL(vdec_set_video_path);
1125
1126int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1127{
1128 vdec->vf_receiver_inst = receive_id;
1129 return 0;
1130}
1131EXPORT_SYMBOL(vdec_set_receive_id);
1132
1133/* add frame data to input chain */
1134int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1135{
1136 return vdec_input_add_frame(&vdec->input, buf, count);
1137}
1138EXPORT_SYMBOL(vdec_write_vframe);
1139
1140/* add a work queue thread for vdec*/
1141void vdec_schedule_work(struct work_struct *work)
1142{
1143 if (vdec_core->vdec_core_wq)
1144 queue_work(vdec_core->vdec_core_wq, work);
1145 else
1146 schedule_work(work);
1147}
1148EXPORT_SYMBOL(vdec_schedule_work);
1149
1150static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1151{
1152 if (vdec->master)
1153 return vdec->master;
1154 else if (vdec->slave)
1155 return vdec->slave;
1156 return NULL;
1157}
1158
1159static void vdec_sync_input_read(struct vdec_s *vdec)
1160{
1161 if (!vdec_stream_based(vdec))
1162 return;
1163
1164 if (vdec_dual(vdec)) {
1165 u32 me, other;
1166 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1167 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1168 other =
1169 vdec_get_associate(vdec)->input.stream_cookie;
1170 if (me > other)
1171 return;
1172 else if (me == other) {
1173 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1174 other =
1175 vdec_get_associate(vdec)->input.swap_rp;
1176 if (me > other) {
1177 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1178 vdec_get_associate(vdec)->
1179 input.swap_rp);
1180 return;
1181 }
1182 }
1183 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1184 READ_VREG(VLD_MEM_VIFIFO_RP));
1185 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1186 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1187 if (((me & 0x80000000) == 0) &&
1188 (vdec->input.streaming_rp & 0x80000000))
1189 me += 1ULL << 32;
1190 other = vdec_get_associate(vdec)->input.streaming_rp;
1191 if (me > other) {
1192 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1193 vdec_get_associate(vdec)->
1194 input.swap_rp);
1195 return;
1196 }
1197
1198 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1199 READ_VREG(HEVC_STREAM_RD_PTR));
1200 }
1201 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1202 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1203 READ_VREG(VLD_MEM_VIFIFO_RP));
1204 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1205 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1206 READ_VREG(HEVC_STREAM_RD_PTR));
1207 }
1208}
1209
1210static void vdec_sync_input_write(struct vdec_s *vdec)
1211{
1212 if (!vdec_stream_based(vdec))
1213 return;
1214
1215 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1216 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1217 READ_PARSER_REG(PARSER_VIDEO_WP));
1218 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1219 WRITE_VREG(HEVC_STREAM_WR_PTR,
1220 READ_PARSER_REG(PARSER_VIDEO_WP));
1221 }
1222}
1223
1224/*
1225 *get next frame from input chain
1226 */
1227/*
1228 *THE VLD_FIFO is 512 bytes and Video buffer level
1229 * empty interrupt is set to 0x80 bytes threshold
1230 */
1231#define VLD_PADDING_SIZE 1024
1232#define HEVC_PADDING_SIZE (1024*16)
1233int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1234{
1235 struct vdec_input_s *input = &vdec->input;
1236 struct vframe_chunk_s *chunk = NULL;
1237 struct vframe_block_list_s *block = NULL;
1238 int dummy;
1239
1240 /* full reset to HW input */
1241 if (input->target == VDEC_INPUT_TARGET_VLD) {
1242 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1243
1244 /* reset VLD fifo for all vdec */
1245 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1246 WRITE_VREG(DOS_SW_RESET0, 0);
1247
1248 dummy = READ_RESET_REG(RESET0_REGISTER);
1249 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1250 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1251#if 0
1252 /*move to driver*/
1253 if (input_frame_based(input))
1254 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1255
1256 /*
1257 * 2: assist
1258 * 3: parser
1259 * 4: parser_state
1260 * 8: dblk
1261 * 11:mcpu
1262 * 12:ccpu
1263 * 13:ddr
1264 * 14:iqit
1265 * 15:ipp
1266 * 17:qdct
1267 * 18:mpred
1268 * 19:sao
1269 * 24:hevc_afifo
1270 */
1271 WRITE_VREG(DOS_SW_RESET3,
1272 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1273 (1<<17)|(1<<18)|(1<<19));
1274 WRITE_VREG(DOS_SW_RESET3, 0);
1275#endif
1276 }
1277
1278 /*
1279 *setup HW decoder input buffer (VLD context)
1280 * based on input->type and input->target
1281 */
1282 if (input_frame_based(input)) {
1283 chunk = vdec_input_next_chunk(&vdec->input);
1284
1285 if (chunk == NULL) {
1286 *p = NULL;
1287 return -1;
1288 }
1289
1290 block = chunk->block;
1291
1292 if (input->target == VDEC_INPUT_TARGET_VLD) {
1293 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1294 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1295 block->size - 8);
1296 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1297 round_down(block->start + chunk->offset,
1298 VDEC_FIFO_ALIGN));
1299
1300 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1301 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1302
1303 /* set to manual mode */
1304 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1305 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1306 round_down(block->start + chunk->offset,
1307 VDEC_FIFO_ALIGN));
1308 dummy = chunk->offset + chunk->size +
1309 VLD_PADDING_SIZE;
1310 if (dummy >= block->size)
1311 dummy -= block->size;
1312 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1313 round_down(block->start + dummy,
1314 VDEC_FIFO_ALIGN));
1315
1316 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1317 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1318
1319 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1320 (0x11 << 16) | (1<<10) | (7<<3));
1321
1322 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1323 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1324 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1325 block->size);
1326 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1327 chunk->offset);
1328 dummy = chunk->offset + chunk->size +
1329 HEVC_PADDING_SIZE;
1330 if (dummy >= block->size)
1331 dummy -= block->size;
1332 WRITE_VREG(HEVC_STREAM_WR_PTR,
1333 round_down(block->start + dummy,
1334 VDEC_FIFO_ALIGN));
1335
1336 /* set endian */
1337 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1338 }
1339
1340 *p = chunk;
1341 return chunk->size;
1342
1343 } else {
1344 /* stream based */
1345 u32 rp = 0, wp = 0, fifo_len = 0;
1346 int size;
1347 bool swap_valid = input->swap_valid;
1348 unsigned long swap_page_phys = input->swap_page_phys;
1349
1350 if (vdec_dual(vdec) &&
1351 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1352 /* keep using previous input context */
1353 struct vdec_s *master = (vdec->slave) ?
1354 vdec : vdec->master;
1355 if (master->input.last_swap_slave) {
1356 swap_valid = master->slave->input.swap_valid;
1357 swap_page_phys =
1358 master->slave->input.swap_page_phys;
1359 } else {
1360 swap_valid = master->input.swap_valid;
1361 swap_page_phys = master->input.swap_page_phys;
1362 }
1363 }
1364
1365 if (swap_valid) {
1366 if (input->target == VDEC_INPUT_TARGET_VLD) {
1367 if (vdec->format == VFORMAT_H264)
1368 SET_VREG_MASK(POWER_CTL_VLD,
1369 (1 << 9));
1370
1371 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1372
1373 /* restore read side */
1374 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1375 swap_page_phys);
1376 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1377
1378 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1379 ;
1380 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1381
1382 /* restore wrap count */
1383 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1384 input->stream_cookie);
1385
1386 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1387 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1388
1389 /* enable */
1390 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1391 (0x11 << 16) | (1<<10));
1392
1393 /* sync with front end */
1394 vdec_sync_input_read(vdec);
1395 vdec_sync_input_write(vdec);
1396
1397 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1398 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1399 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1400
1401 /* restore read side */
1402 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1403 swap_page_phys);
1404 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1405
1406 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1407 & (1<<7))
1408 ;
1409 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1410
1411 /* restore stream offset */
1412 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1413 input->stream_cookie);
1414
1415 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1416 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1417 >> 16) & 0x7f;
1418
1419
1420 /* enable */
1421
1422 /* sync with front end */
1423 vdec_sync_input_read(vdec);
1424 vdec_sync_input_write(vdec);
1425
1426 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1427
1428 /*pr_info("vdec: restore context\r\n");*/
1429 }
1430
1431 } else {
1432 if (input->target == VDEC_INPUT_TARGET_VLD) {
1433 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1434 input->start);
1435 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1436 input->start + input->size - 8);
1437 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1438 input->start);
1439
1440 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1441 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1442
1443 /* set to manual mode */
1444 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1445 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1446 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1447 READ_PARSER_REG(PARSER_VIDEO_WP));
1448
1449 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1450
1451 /* enable */
1452 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1453 (0x11 << 16) | (1<<10));
1454
1455 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1456
1457 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1458 WRITE_VREG(HEVC_STREAM_START_ADDR,
1459 input->start);
1460 WRITE_VREG(HEVC_STREAM_END_ADDR,
1461 input->start + input->size);
1462 WRITE_VREG(HEVC_STREAM_RD_PTR,
1463 input->start);
1464 WRITE_VREG(HEVC_STREAM_WR_PTR,
1465 READ_PARSER_REG(PARSER_VIDEO_WP));
1466
1467 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1468 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1469 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1470 >> 16) & 0x7f;
1471
1472 /* enable */
1473 }
1474 }
1475 *p = NULL;
1476 if (wp >= rp)
1477 size = wp - rp + fifo_len;
1478 else
1479 size = wp + input->size - rp + fifo_len;
1480 if (size < 0) {
1481 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1482 __func__, input->size, wp, rp, fifo_len, size);
1483 size = 0;
1484 }
1485 return size;
1486 }
1487}
1488EXPORT_SYMBOL(vdec_prepare_input);
1489
1490void vdec_enable_input(struct vdec_s *vdec)
1491{
1492 struct vdec_input_s *input = &vdec->input;
1493
1494 if (vdec->status != VDEC_STATUS_ACTIVE)
1495 return;
1496
1497 if (input->target == VDEC_INPUT_TARGET_VLD)
1498 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1499 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1500 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1501 if (vdec_stream_based(vdec))
1502 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1503 else
1504 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1505 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1506 }
1507}
1508EXPORT_SYMBOL(vdec_enable_input);
1509
1510int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1511{
1512 int r = vdec_input_set_buffer(&vdec->input, start, size);
1513
1514 if (r)
1515 return r;
1516
1517 if (vdec->slave)
1518 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1519
1520 return r;
1521}
1522EXPORT_SYMBOL(vdec_set_input_buffer);
1523
1524/*
1525 * vdec_eos returns the possibility that there are
1526 * more input can be used by decoder through vdec_prepare_input
1527 * Note: this function should be called prior to vdec_vframe_dirty
1528 * by decoder driver to determine if EOS happens for stream based
1529 * decoding when there is no sufficient data for a frame
1530 */
1531bool vdec_has_more_input(struct vdec_s *vdec)
1532{
1533 struct vdec_input_s *input = &vdec->input;
1534
1535 if (!input->eos)
1536 return true;
1537
1538 if (input_frame_based(input))
1539 return vdec_input_next_input_chunk(input) != NULL;
1540 else {
1541 if (input->target == VDEC_INPUT_TARGET_VLD)
1542 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1543 READ_PARSER_REG(PARSER_VIDEO_WP);
1544 else {
1545 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1546 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1547 }
1548 }
1549}
1550EXPORT_SYMBOL(vdec_has_more_input);
1551
1552void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1553{
1554 vdec->input.prepare_level = level;
1555}
1556EXPORT_SYMBOL(vdec_set_prepare_level);
1557
1558void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1559{
1560 vdec->flag = flag;
1561}
1562EXPORT_SYMBOL(vdec_set_flag);
1563
1564void vdec_set_eos(struct vdec_s *vdec, bool eos)
1565{
1566 struct vdec_core_s *core = vdec_core;
1567
1568 vdec->input.eos = eos;
1569
1570 if (vdec->slave)
1571 vdec->slave->input.eos = eos;
1572 up(&core->sem);
1573}
1574EXPORT_SYMBOL(vdec_set_eos);
1575
1576#ifdef VDEC_DEBUG_SUPPORT
1577void vdec_set_step_mode(void)
1578{
1579 step_mode = 0x1ff;
1580}
1581EXPORT_SYMBOL(vdec_set_step_mode);
1582#endif
1583
1584void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1585{
1586 if (vdec && next_vdec) {
1587 vdec->sched = 0;
1588 next_vdec->sched = 1;
1589 }
1590}
1591EXPORT_SYMBOL(vdec_set_next_sched);
1592
1593/*
1594 * Swap Context: S0 S1 S2 S3 S4
1595 * Sample sequence: M S M M S
1596 * Master Context: S0 S0 S2 S3 S3
1597 * Slave context: NA S1 S1 S2 S4
1598 * ^
1599 * ^
1600 * ^
1601 * the tricky part
1602 * If there are back to back decoding of master or slave
1603 * then the context of the counter part should be updated
1604 * with current decoder. In this example, S1 should be
1605 * updated to S2.
1606 * This is done by swap the swap_page and related info
1607 * between two layers.
1608 */
1609static void vdec_borrow_input_context(struct vdec_s *vdec)
1610{
1611 struct page *swap_page;
1612 unsigned long swap_page_phys;
1613 struct vdec_input_s *me;
1614 struct vdec_input_s *other;
1615
1616 if (!vdec_dual(vdec))
1617 return;
1618
1619 me = &vdec->input;
1620 other = &vdec_get_associate(vdec)->input;
1621
1622 /* swap the swap_context, borrow counter part's
1623 * swap context storage and update all related info.
1624 * After vdec_vframe_dirty, vdec_save_input_context
1625 * will be called to update current vdec's
1626 * swap context
1627 */
1628 swap_page = other->swap_page;
1629 other->swap_page = me->swap_page;
1630 me->swap_page = swap_page;
1631
1632 swap_page_phys = other->swap_page_phys;
1633 other->swap_page_phys = me->swap_page_phys;
1634 me->swap_page_phys = swap_page_phys;
1635
1636 other->swap_rp = me->swap_rp;
1637 other->streaming_rp = me->streaming_rp;
1638 other->stream_cookie = me->stream_cookie;
1639 other->swap_valid = me->swap_valid;
1640}
1641
1642void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1643{
1644 if (chunk)
1645 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1646
1647 if (vdec_stream_based(vdec)) {
1648 vdec->input.swap_needed = true;
1649
1650 if (vdec_dual(vdec)) {
1651 vdec_get_associate(vdec)->input.dirty_count = 0;
1652 vdec->input.dirty_count++;
1653 if (vdec->input.dirty_count > 1) {
1654 vdec->input.dirty_count = 1;
1655 vdec_borrow_input_context(vdec);
1656 }
1657 }
1658
1659 /* for stream based mode, we update read and write pointer
1660 * also in case decoder wants to keep working on decoding
1661 * for more frames while input front end has more data
1662 */
1663 vdec_sync_input_read(vdec);
1664 vdec_sync_input_write(vdec);
1665
1666 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1667 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1668 }
1669}
1670EXPORT_SYMBOL(vdec_vframe_dirty);
1671
1672bool vdec_need_more_data(struct vdec_s *vdec)
1673{
1674 if (vdec_stream_based(vdec))
1675 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1676
1677 return false;
1678}
1679EXPORT_SYMBOL(vdec_need_more_data);
1680
1681
1682void hevc_wait_ddr(void)
1683{
1684 unsigned long flags;
1685 unsigned int mask = 0;
1686
1687 mask = 1 << 4; /* hevc */
1688 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1689 mask |= (1 << 8); /* hevcb */
1690
1691 spin_lock_irqsave(&vdec_spin_lock, flags);
1692 codec_dmcbus_write(DMC_REQ_CTRL,
1693 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1694 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1695
1696 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1697 & mask))
1698 ;
1699}
1700
1701void vdec_save_input_context(struct vdec_s *vdec)
1702{
1703 struct vdec_input_s *input = &vdec->input;
1704
1705#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1706 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1707#endif
1708
1709 if (input->target == VDEC_INPUT_TARGET_VLD)
1710 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1711
1712 if (input_stream_based(input) && (input->swap_needed)) {
1713 if (input->target == VDEC_INPUT_TARGET_VLD) {
1714 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1715 input->swap_page_phys);
1716 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1717 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1718 ;
1719 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1720 vdec->input.stream_cookie =
1721 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1722 vdec->input.swap_rp =
1723 READ_VREG(VLD_MEM_VIFIFO_RP);
1724 vdec->input.total_rd_count =
1725 (u64)vdec->input.stream_cookie *
1726 vdec->input.size + vdec->input.swap_rp -
1727 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1728 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1729 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1730 input->swap_page_phys);
1731 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1732
1733 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1734 ;
1735 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1736
1737 vdec->input.stream_cookie =
1738 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1739 vdec->input.swap_rp =
1740 READ_VREG(HEVC_STREAM_RD_PTR);
1741 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1742 (vdec->input.streaming_rp & 0x80000000))
1743 vdec->input.streaming_rp += 1ULL << 32;
1744 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1745 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1746 vdec->input.total_rd_count = vdec->input.streaming_rp;
1747 }
1748
1749 input->swap_valid = true;
1750 input->swap_needed = false;
1751 /*pr_info("vdec: save context\r\n");*/
1752
1753 vdec_sync_input_read(vdec);
1754
1755 if (vdec_dual(vdec)) {
1756 struct vdec_s *master = (vdec->slave) ?
1757 vdec : vdec->master;
1758 master->input.last_swap_slave = (master->slave == vdec);
1759 /* pr_info("master->input.last_swap_slave = %d\n",
1760 master->input.last_swap_slave); */
1761 }
1762
1763 hevc_wait_ddr();
1764 }
1765}
1766EXPORT_SYMBOL(vdec_save_input_context);
1767
1768void vdec_clean_input(struct vdec_s *vdec)
1769{
1770 struct vdec_input_s *input = &vdec->input;
1771
1772 while (!list_empty(&input->vframe_chunk_list)) {
1773 struct vframe_chunk_s *chunk =
1774 vdec_input_next_chunk(input);
1775 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1776 vdec_input_release_chunk(input, chunk);
1777 else
1778 break;
1779 }
1780 vdec_save_input_context(vdec);
1781}
1782EXPORT_SYMBOL(vdec_clean_input);
1783
1784
1785static int vdec_input_read_restore(struct vdec_s *vdec)
1786{
1787 struct vdec_input_s *input = &vdec->input;
1788
1789 if (!vdec_stream_based(vdec))
1790 return 0;
1791
1792 if (!input->swap_valid) {
1793 if (input->target == VDEC_INPUT_TARGET_VLD) {
1794 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1795 input->start);
1796 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1797 input->start + input->size - 8);
1798 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1799 input->start);
1800 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1801 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1802
1803 /* set to manual mode */
1804 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1805 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1806 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1807 WRITE_VREG(HEVC_STREAM_START_ADDR,
1808 input->start);
1809 WRITE_VREG(HEVC_STREAM_END_ADDR,
1810 input->start + input->size);
1811 WRITE_VREG(HEVC_STREAM_RD_PTR,
1812 input->start);
1813 }
1814 return 0;
1815 }
1816 if (input->target == VDEC_INPUT_TARGET_VLD) {
1817 /* restore read side */
1818 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1819 input->swap_page_phys);
1820
1821 /*swap active*/
1822 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1823
1824 /*wait swap busy*/
1825 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1826 ;
1827
1828 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1829 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1830 /* restore read side */
1831 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1832 input->swap_page_phys);
1833 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1834
1835 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1836 & (1<<7))
1837 ;
1838 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1839 }
1840
1841 return 0;
1842}
1843
1844
1845int vdec_sync_input(struct vdec_s *vdec)
1846{
1847 struct vdec_input_s *input = &vdec->input;
1848 u32 rp = 0, wp = 0, fifo_len = 0;
1849 int size;
1850
1851 vdec_input_read_restore(vdec);
1852 vdec_sync_input_read(vdec);
1853 vdec_sync_input_write(vdec);
1854 if (input->target == VDEC_INPUT_TARGET_VLD) {
1855 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1856 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1857
1858 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1859 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1860 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1861 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1862 >> 16) & 0x7f;
1863 }
1864 if (wp >= rp)
1865 size = wp - rp + fifo_len;
1866 else
1867 size = wp + input->size - rp + fifo_len;
1868 if (size < 0) {
1869 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1870 __func__, input->size, wp, rp, fifo_len, size);
1871 size = 0;
1872 }
1873 return size;
1874
1875}
1876EXPORT_SYMBOL(vdec_sync_input);
1877
1878const char *vdec_status_str(struct vdec_s *vdec)
1879{
1880 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1881 vdec_status_string[vdec->status] : "INVALID";
1882}
1883
1884const char *vdec_type_str(struct vdec_s *vdec)
1885{
1886 switch (vdec->type) {
1887 case VDEC_TYPE_SINGLE:
1888 return "VDEC_TYPE_SINGLE";
1889 case VDEC_TYPE_STREAM_PARSER:
1890 return "VDEC_TYPE_STREAM_PARSER";
1891 case VDEC_TYPE_FRAME_BLOCK:
1892 return "VDEC_TYPE_FRAME_BLOCK";
1893 case VDEC_TYPE_FRAME_CIRCULAR:
1894 return "VDEC_TYPE_FRAME_CIRCULAR";
1895 default:
1896 return "VDEC_TYPE_INVALID";
1897 }
1898}
1899
1900const char *vdec_device_name_str(struct vdec_s *vdec)
1901{
1902 return vdec_device_name[vdec->format * 2 + 1];
1903}
1904EXPORT_SYMBOL(vdec_device_name_str);
1905
1906void walk_vdec_core_list(char *s)
1907{
1908 struct vdec_s *vdec;
1909 struct vdec_core_s *core = vdec_core;
1910 unsigned long flags;
1911
1912 pr_info("%s --->\n", s);
1913
1914 flags = vdec_core_lock(vdec_core);
1915
1916 if (list_empty(&core->connected_vdec_list)) {
1917 pr_info("connected vdec list empty\n");
1918 } else {
1919 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
1920 pr_info("\tvdec (%p), status = %s\n", vdec,
1921 vdec_status_str(vdec));
1922 }
1923 }
1924
1925 vdec_core_unlock(vdec_core, flags);
1926}
1927EXPORT_SYMBOL(walk_vdec_core_list);
1928
1929/* insert vdec to vdec_core for scheduling,
1930 * for dual running decoders, connect/disconnect always runs in pairs
1931 */
1932int vdec_connect(struct vdec_s *vdec)
1933{
1934 unsigned long flags;
1935
1936 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
1937
1938 if (vdec->status != VDEC_STATUS_DISCONNECTED)
1939 return 0;
1940
1941 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
1942 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
1943
1944 init_completion(&vdec->inactive_done);
1945
1946 if (vdec->slave) {
1947 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
1948 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
1949
1950 init_completion(&vdec->slave->inactive_done);
1951 }
1952
1953 flags = vdec_core_lock(vdec_core);
1954
1955 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
1956
1957 if (vdec->slave) {
1958 list_add_tail(&vdec->slave->list,
1959 &vdec_core->connected_vdec_list);
1960 }
1961
1962 vdec_core_unlock(vdec_core, flags);
1963
1964 up(&vdec_core->sem);
1965
1966 return 0;
1967}
1968EXPORT_SYMBOL(vdec_connect);
1969
1970/* remove vdec from vdec_core scheduling */
1971int vdec_disconnect(struct vdec_s *vdec)
1972{
1973#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1974 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
1975#endif
1976 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
1977
1978 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
1979 (vdec->status != VDEC_STATUS_ACTIVE)) {
1980 return 0;
1981 }
1982 mutex_lock(&vdec_mutex);
1983 /*
1984 *when a vdec is under the management of scheduler
1985 * the status change will only be from vdec_core_thread
1986 */
1987 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
1988
1989 if (vdec->slave)
1990 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
1991 else if (vdec->master)
1992 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
1993 mutex_unlock(&vdec_mutex);
1994 up(&vdec_core->sem);
1995
1996 if(!wait_for_completion_timeout(&vdec->inactive_done,
1997 msecs_to_jiffies(2000)))
1998 goto discon_timeout;
1999
2000 if (vdec->slave) {
2001 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2002 msecs_to_jiffies(2000)))
2003 goto discon_timeout;
2004 } else if (vdec->master) {
2005 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2006 msecs_to_jiffies(2000)))
2007 goto discon_timeout;
2008 }
2009
2010 return 0;
2011discon_timeout:
2012 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2013 return 0;
2014}
2015EXPORT_SYMBOL(vdec_disconnect);
2016
2017/* release vdec structure */
2018int vdec_destroy(struct vdec_s *vdec)
2019{
2020 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2021
2022 vdec_input_release(&vdec->input);
2023
2024#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2025 vdec_profile_flush(vdec);
2026#endif
2027 ida_simple_remove(&vdec_core->ida, vdec->id);
2028 vfree(vdec);
2029
2030 atomic_dec(&vdec_core->vdec_nr);
2031
2032 return 0;
2033}
2034EXPORT_SYMBOL(vdec_destroy);
2035
2036/*
2037 *register vdec_device
2038 * create output, vfm or create ionvideo output
2039 */
2040s32 vdec_init(struct vdec_s *vdec, int is_4k)
2041{
2042 int r = 0;
2043 struct vdec_s *p = vdec;
2044 const char *dev_name;
2045 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2046
2047 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2048
2049 if (dev_name == NULL)
2050 return -ENODEV;
2051
2052 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2053 dev_name, vdec_type_str(vdec));
2054
2055 /*
2056 *todo: VFM patch control should be configurable,
2057 * for now all stream based input uses default VFM path.
2058 */
2059 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2060 if (vdec_core->vfm_vdec == NULL) {
2061 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2062 vdec_core->vfm_vdec = vdec;
2063 } else {
2064 pr_info("vdec_init vfm path busy.\n");
2065 return -EBUSY;
2066 }
2067 }
2068
2069 mutex_lock(&vdec_mutex);
2070 inited_vcodec_num++;
2071 mutex_unlock(&vdec_mutex);
2072
2073 vdec_input_set_type(&vdec->input, vdec->type,
2074 (vdec->format == VFORMAT_HEVC ||
2075 vdec->format == VFORMAT_AVS2 ||
2076 vdec->format == VFORMAT_VP9) ?
2077 VDEC_INPUT_TARGET_HEVC :
2078 VDEC_INPUT_TARGET_VLD);
2079 if (vdec_single(vdec))
2080 vdec_enable_DMC(vdec);
2081 p->cma_dev = vdec_core->cma_dev;
2082 p->get_canvas = get_canvas;
2083 p->get_canvas_ex = get_canvas_ex;
2084 p->free_canvas_ex = free_canvas_ex;
2085 p->vdec_fps_detec = vdec_fps_detec;
2086 atomic_set(&p->inirq_flag, 0);
2087 atomic_set(&p->inirq_thread_flag, 0);
2088 /* todo */
2089 if (!vdec_dual(vdec))
2090 p->use_vfm_path = vdec_stream_based(vdec);
2091 /* vdec_dev_reg.flag = 0; */
2092 if (vdec->id >= 0)
2093 id = vdec->id;
2094 p->parallel_dec = parallel_decode;
2095 vdec_core->parallel_dec = parallel_decode;
2096 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2097#ifdef FRAME_CHECK
2098 vdec_frame_check_init(vdec);
2099#endif
2100 p->dev = platform_device_register_data(
2101 &vdec_core->vdec_core_platform_device->dev,
2102 dev_name,
2103 id,
2104 &p, sizeof(struct vdec_s *));
2105
2106 if (IS_ERR(p->dev)) {
2107 r = PTR_ERR(p->dev);
2108 pr_err("vdec: Decoder device %s register failed (%d)\n",
2109 dev_name, r);
2110
2111 mutex_lock(&vdec_mutex);
2112 inited_vcodec_num--;
2113 mutex_unlock(&vdec_mutex);
2114
2115 goto error;
2116 } else if (!p->dev->dev.driver) {
2117 pr_info("vdec: Decoder device %s driver probe failed.\n",
2118 dev_name);
2119 r = -ENODEV;
2120
2121 goto error;
2122 }
2123
2124 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2125 r = -ENODEV;
2126 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2127
2128 mutex_lock(&vdec_mutex);
2129 inited_vcodec_num--;
2130 mutex_unlock(&vdec_mutex);
2131
2132 goto error;
2133 }
2134
2135 if (p->use_vfm_path) {
2136 vdec->vf_receiver_inst = -1;
2137 vdec->vfm_map_id[0] = 0;
2138 } else if (!vdec_dual(vdec)) {
2139 /* create IONVIDEO instance and connect decoder's
2140 * vf_provider interface to it
2141 */
2142 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2143 r = -ENODEV;
2144 pr_err("vdec: Incorrect decoder type\n");
2145
2146 mutex_lock(&vdec_mutex);
2147 inited_vcodec_num--;
2148 mutex_unlock(&vdec_mutex);
2149
2150 goto error;
2151 }
2152 if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2153#if 1
2154 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2155 &vdec->vf_receiver_inst);
2156#else
2157 /*
2158 * temporarily just use decoder instance ID as iondriver ID
2159 * to solve OMX iondriver instance number check time sequence
2160 * only the limitation is we can NOT mix different video
2161 * decoders since same ID will be used for different decoder
2162 * formats.
2163 */
2164 vdec->vf_receiver_inst = p->dev->id;
2165 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2166 &vdec->vf_receiver_inst);
2167#endif
2168 if (r < 0) {
2169 pr_err("IonVideo frame receiver allocation failed.\n");
2170
2171 mutex_lock(&vdec_mutex);
2172 inited_vcodec_num--;
2173 mutex_unlock(&vdec_mutex);
2174
2175 goto error;
2176 }
2177
2178 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2179 "%s %s", vdec->vf_provider_name,
2180 vdec->vf_receiver_name);
2181 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2182 "vdec-map-%d", vdec->id);
2183 } else if (p->frame_base_video_path ==
2184 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2185 if (vdec_secure(vdec)) {
2186 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2187 "%s %s", vdec->vf_provider_name,
2188 "amlvideo amvideo");
2189 } else {
2190 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2191 "%s %s", vdec->vf_provider_name,
2192 "amlvideo ppmgr deinterlace amvideo");
2193 }
2194 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2195 "vdec-map-%d", vdec->id);
2196 } else if (p->frame_base_video_path ==
2197 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2198 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2199 "%s %s", vdec->vf_provider_name,
2200 "aml_video.1 videosync.0 videopip");
2201 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2202 "vdec-map-%d", vdec->id);
2203 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2204 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2205 "%s %s", vdec->vf_provider_name,
2206 vdec->vf_receiver_name);
2207 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2208 "vdec-map-%d", vdec->id);
2209 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2210 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2211 "%s %s", vdec->vf_provider_name,
2212 "amvideo");
2213 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2214 "vdec-map-%d", vdec->id);
2215 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2216 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2217 "%s %s %s", vdec->vf_provider_name,
2218 vdec->vf_receiver_name, "amvideo");
2219 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2220 "vdec-map-%d", vdec->id);
2221 } else if (p->frame_base_video_path ==
2222 FRAME_BASE_PATH_DI_V4LVIDEO) {
2223#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2224 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2225 &vdec->vf_receiver_inst);
2226#else
2227 r = -1;
2228#endif
2229 if (r < 0) {
2230 pr_err("V4lVideo frame receiver allocation failed.\n");
2231 mutex_lock(&vdec_mutex);
2232 inited_vcodec_num--;
2233 mutex_unlock(&vdec_mutex);
2234 goto error;
2235 }
2236 if (!v4lvideo_add_di || vdec_secure(vdec))
2237 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2238 "%s %s", vdec->vf_provider_name,
2239 vdec->vf_receiver_name);
2240 else {
2241 if (vdec->vf_receiver_inst == 0)
2242 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2243 "%s %s %s", vdec->vf_provider_name,
2244 "deinterlace",
2245 vdec->vf_receiver_name);
2246 else if (vdec->vf_receiver_inst < max_di_instance)
2247 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2248 "%s %s%d %s", vdec->vf_provider_name,
2249 "dimulti.",
2250 vdec->vf_receiver_inst,
2251 vdec->vf_receiver_name);
2252 else
2253 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2254 "%s %s", vdec->vf_provider_name,
2255 vdec->vf_receiver_name);
2256 }
2257 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2258 "vdec-map-%d", vdec->id);
2259 }
2260
2261 if (vfm_map_add(vdec->vfm_map_id,
2262 vdec->vfm_map_chain) < 0) {
2263 r = -ENOMEM;
2264 pr_err("Decoder pipeline map creation failed %s.\n",
2265 vdec->vfm_map_id);
2266 vdec->vfm_map_id[0] = 0;
2267
2268 mutex_lock(&vdec_mutex);
2269 inited_vcodec_num--;
2270 mutex_unlock(&vdec_mutex);
2271
2272 goto error;
2273 }
2274
2275 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2276
2277 /*
2278 *assume IONVIDEO driver already have a few vframe_receiver
2279 * registered.
2280 * 1. Call iondriver function to allocate a IONVIDEO path and
2281 * provide receiver's name and receiver op.
2282 * 2. Get decoder driver's provider name from driver instance
2283 * 3. vfm_map_add(name, "<decoder provider name>
2284 * <iondriver receiver name>"), e.g.
2285 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2286 * 4. vf_reg_provider and vf_reg_receiver
2287 * Note: the decoder provider's op uses vdec as op_arg
2288 * the iondriver receiver's op uses iondev device as
2289 * op_arg
2290 */
2291
2292 }
2293
2294 if (!vdec_single(vdec)) {
2295 vf_reg_provider(&p->vframe_provider);
2296
2297 vf_notify_receiver(p->vf_provider_name,
2298 VFRAME_EVENT_PROVIDER_START,
2299 vdec);
2300
2301 if (vdec_core->hint_fr_vdec == NULL)
2302 vdec_core->hint_fr_vdec = vdec;
2303
2304 if (vdec_core->hint_fr_vdec == vdec) {
2305 if (p->sys_info->rate != 0) {
2306 if (!vdec->is_reset) {
2307 vf_notify_receiver(p->vf_provider_name,
2308 VFRAME_EVENT_PROVIDER_FR_HINT,
2309 (void *)
2310 ((unsigned long)
2311 p->sys_info->rate));
2312 vdec->fr_hint_state = VDEC_HINTED;
2313 }
2314 } else {
2315 vdec->fr_hint_state = VDEC_NEED_HINT;
2316 }
2317 }
2318 }
2319
2320 p->dolby_meta_with_el = 0;
2321 pr_debug("vdec_init, vf_provider_name = %s\n", p->vf_provider_name);
2322 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2323 &vdec->input,
2324 vdec->sys_info->width,
2325 vdec->sys_info->height);
2326 /* vdec is now ready to be active */
2327 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2328 if (p->use_vfm_path) {
2329 frame_info_buf_in = (struct vframe_qos_s *)
2330 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2331 if (!frame_info_buf_in)
2332 pr_err("kmalloc: frame_info_buf_in failed\n");
2333 else
2334 memset(frame_info_buf_in, 0,
2335 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2336
2337 frame_info_buf_out = (struct vframe_qos_s *)
2338 kmalloc(QOS_FRAME_NUM*sizeof(struct vframe_qos_s), GFP_KERNEL);
2339 if (!frame_info_buf_out)
2340 pr_err("kmalloc: frame_info_buf_out failed\n");
2341 else
2342 memset(frame_info_buf_out, 0,
2343 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
2344 frame_qos_wr = 0;
2345 frame_qos_rd = 0;
2346 }
2347 return 0;
2348
2349error:
2350 return r;
2351}
2352EXPORT_SYMBOL(vdec_init);
2353
2354/* vdec_create/init/release/destroy are applied to both dual running decoders
2355 */
2356void vdec_release(struct vdec_s *vdec)
2357{
2358 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2359#ifdef VDEC_DEBUG_SUPPORT
2360 if (step_mode) {
2361 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2362 while (step_mode)
2363 udelay(10);
2364 pr_info("VDEC_DEBUG: step_mode is clear\n");
2365 }
2366#endif
2367 vdec_disconnect(vdec);
2368
2369 if (vdec->vframe_provider.name) {
2370 if (!vdec_single(vdec)) {
2371 if (vdec_core->hint_fr_vdec == vdec
2372 && vdec->fr_hint_state == VDEC_HINTED)
2373 vf_notify_receiver(
2374 vdec->vf_provider_name,
2375 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2376 NULL);
2377 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2378 }
2379 vf_unreg_provider(&vdec->vframe_provider);
2380 }
2381
2382 if (vdec_core->vfm_vdec == vdec)
2383 vdec_core->vfm_vdec = NULL;
2384
2385 if (vdec_core->hint_fr_vdec == vdec)
2386 vdec_core->hint_fr_vdec = NULL;
2387
2388 if (vdec->vf_receiver_inst >= 0) {
2389 if (vdec->vfm_map_id[0]) {
2390 vfm_map_remove(vdec->vfm_map_id);
2391 vdec->vfm_map_id[0] = 0;
2392 }
2393 }
2394
2395 while ((atomic_read(&vdec->inirq_flag) > 0)
2396 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2397 schedule();
2398
2399#ifdef FRAME_CHECK
2400 vdec_frame_check_exit(vdec);
2401#endif
2402 vdec_fps_clear(vdec->id);
2403 if (atomic_read(&vdec_core->vdec_nr) == 1)
2404 vdec_disable_DMC(vdec);
2405 platform_device_unregister(vdec->dev);
2406 pr_debug("vdec_release instance %p, total %d\n", vdec,
2407 atomic_read(&vdec_core->vdec_nr));
2408 if (vdec->use_vfm_path) {
2409 kfree(frame_info_buf_in);
2410 frame_info_buf_in = NULL;
2411 kfree(frame_info_buf_out);
2412 frame_info_buf_out = NULL;
2413 frame_qos_wr = 0;
2414 frame_qos_rd = 0;
2415 }
2416 vdec_destroy(vdec);
2417
2418 mutex_lock(&vdec_mutex);
2419 inited_vcodec_num--;
2420 mutex_unlock(&vdec_mutex);
2421
2422}
2423EXPORT_SYMBOL(vdec_release);
2424
2425/* For dual running decoders, vdec_reset is only called with master vdec.
2426 */
2427int vdec_reset(struct vdec_s *vdec)
2428{
2429 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2430
2431 vdec_disconnect(vdec);
2432
2433 if (vdec->vframe_provider.name)
2434 vf_unreg_provider(&vdec->vframe_provider);
2435
2436 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2437 vf_unreg_provider(&vdec->slave->vframe_provider);
2438
2439 if (vdec->reset) {
2440 vdec->reset(vdec);
2441 if (vdec->slave)
2442 vdec->slave->reset(vdec->slave);
2443 }
2444 vdec->mc_loaded = 0;/*clear for reload firmware*/
2445 vdec_input_release(&vdec->input);
2446
2447 vdec_input_init(&vdec->input, vdec);
2448
2449 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2450 vdec->sys_info->height);
2451
2452 vf_reg_provider(&vdec->vframe_provider);
2453 vf_notify_receiver(vdec->vf_provider_name,
2454 VFRAME_EVENT_PROVIDER_START, vdec);
2455
2456 if (vdec->slave) {
2457 vf_reg_provider(&vdec->slave->vframe_provider);
2458 vf_notify_receiver(vdec->slave->vf_provider_name,
2459 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2460 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2461 }
2462
2463 vdec_connect(vdec);
2464
2465 return 0;
2466}
2467EXPORT_SYMBOL(vdec_reset);
2468
2469int vdec_v4l2_reset(struct vdec_s *vdec, int flag)
2470{
2471 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2472 pr_debug("vdec_v4l2_reset %d\n", flag);
2473 vdec_disconnect(vdec);
2474 if (flag != 2) {
2475 if (vdec->vframe_provider.name)
2476 vf_unreg_provider(&vdec->vframe_provider);
2477
2478 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2479 vf_unreg_provider(&vdec->slave->vframe_provider);
2480
2481 if (vdec->reset) {
2482 vdec->reset(vdec);
2483 if (vdec->slave)
2484 vdec->slave->reset(vdec->slave);
2485 }
2486 vdec->mc_loaded = 0;/*clear for reload firmware*/
2487
2488 vdec_input_release(&vdec->input);
2489
2490 vdec_input_init(&vdec->input, vdec);
2491
2492 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2493 vdec->sys_info->height);
2494
2495 vf_reg_provider(&vdec->vframe_provider);
2496 vf_notify_receiver(vdec->vf_provider_name,
2497 VFRAME_EVENT_PROVIDER_START, vdec);
2498
2499 if (vdec->slave) {
2500 vf_reg_provider(&vdec->slave->vframe_provider);
2501 vf_notify_receiver(vdec->slave->vf_provider_name,
2502 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2503 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2504 }
2505 } else {
2506 if (vdec->reset) {
2507 vdec->reset(vdec);
2508 if (vdec->slave)
2509 vdec->slave->reset(vdec->slave);
2510 }
2511 }
2512
2513 vdec_connect(vdec);
2514
2515 vdec_frame_check_init(vdec);
2516
2517 return 0;
2518}
2519EXPORT_SYMBOL(vdec_v4l2_reset);
2520
2521
2522void vdec_free_cmabuf(void)
2523{
2524 mutex_lock(&vdec_mutex);
2525
2526 /*if (inited_vcodec_num > 0) {
2527 mutex_unlock(&vdec_mutex);
2528 return;
2529 }*/
2530 mutex_unlock(&vdec_mutex);
2531}
2532
2533void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2534{
2535 vdec->core_mask |= mask;
2536
2537 if (vdec->slave)
2538 vdec->slave->core_mask |= mask;
2539 if (vdec_core->parallel_dec == 1) {
2540 if (mask & CORE_MASK_COMBINE)
2541 vdec_core->vdec_combine_flag++;
2542 }
2543
2544}
2545EXPORT_SYMBOL(vdec_core_request);
2546
2547int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2548{
2549 vdec->core_mask &= ~mask;
2550
2551 if (vdec->slave)
2552 vdec->slave->core_mask &= ~mask;
2553 if (vdec_core->parallel_dec == 1) {
2554 if (mask & CORE_MASK_COMBINE)
2555 vdec_core->vdec_combine_flag--;
2556 }
2557 return 0;
2558}
2559EXPORT_SYMBOL(vdec_core_release);
2560
2561bool vdec_core_with_input(unsigned long mask)
2562{
2563 enum vdec_type_e type;
2564
2565 for (type = VDEC_1; type < VDEC_MAX; type++) {
2566 if ((mask & (1 << type)) && cores_with_input[type])
2567 return true;
2568 }
2569
2570 return false;
2571}
2572
2573void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2574{
2575 unsigned long i;
2576 unsigned long t = mask;
2577 mutex_lock(&vdec_mutex);
2578 while (t) {
2579 i = __ffs(t);
2580 clear_bit(i, &vdec->active_mask);
2581 t &= ~(1 << i);
2582 }
2583
2584 if (vdec->active_mask == 0)
2585 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2586
2587 mutex_unlock(&vdec_mutex);
2588}
2589EXPORT_SYMBOL(vdec_core_finish_run);
2590/*
2591 * find what core resources are available for vdec
2592 */
2593static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2594 unsigned long active_mask)
2595{
2596 unsigned long mask = vdec->core_mask &
2597 ~CORE_MASK_COMBINE;
2598
2599 if (vdec->core_mask & CORE_MASK_COMBINE) {
2600 /* combined cores must be granted together */
2601 if ((mask & ~active_mask) == mask)
2602 return mask;
2603 else
2604 return 0;
2605 } else
2606 return mask & ~vdec->sched_mask & ~active_mask;
2607}
2608
2609/*
2610 *Decoder callback
2611 * Each decoder instance uses this callback to notify status change, e.g. when
2612 * decoder finished using HW resource.
2613 * a sample callback from decoder's driver is following:
2614 *
2615 * if (hw->vdec_cb) {
2616 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2617 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2618 * }
2619 */
2620static void vdec_callback(struct vdec_s *vdec, void *data)
2621{
2622 struct vdec_core_s *core = (struct vdec_core_s *)data;
2623
2624#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2625 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2626#endif
2627
2628 up(&core->sem);
2629}
2630
2631static irqreturn_t vdec_isr(int irq, void *dev_id)
2632{
2633 struct vdec_isr_context_s *c =
2634 (struct vdec_isr_context_s *)dev_id;
2635 struct vdec_s *vdec = vdec_core->last_vdec;
2636 irqreturn_t ret = IRQ_HANDLED;
2637
2638 if (vdec_core->parallel_dec == 1) {
2639 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2640 vdec = vdec_core->active_hevc;
2641 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2642 vdec = vdec_core->active_vdec;
2643 else
2644 vdec = NULL;
2645 }
2646
2647 if (vdec) {
2648 atomic_set(&vdec->inirq_flag, 1);
2649 vdec->isr_ns = local_clock();
2650 }
2651 if (c->dev_isr) {
2652 ret = c->dev_isr(irq, c->dev_id);
2653 goto isr_done;
2654 }
2655
2656 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2657 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2658 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2659#if 0
2660 pr_warn("vdec interrupt w/o a valid receiver\n");
2661#endif
2662 goto isr_done;
2663 }
2664
2665 if (!vdec) {
2666#if 0
2667 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2668 core);
2669#endif
2670 goto isr_done;
2671 }
2672
2673 if (!vdec->irq_handler) {
2674#if 0
2675 pr_warn("vdec instance has no irq handle.\n");
2676#endif
2677 goto isr_done;
2678 }
2679
2680 ret = vdec->irq_handler(vdec, c->index);
2681isr_done:
2682 if (vdec)
2683 atomic_set(&vdec->inirq_flag, 0);
2684 return ret;
2685}
2686
2687static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2688{
2689 struct vdec_isr_context_s *c =
2690 (struct vdec_isr_context_s *)dev_id;
2691 struct vdec_s *vdec = vdec_core->last_vdec;
2692 irqreturn_t ret = IRQ_HANDLED;
2693
2694 if (vdec_core->parallel_dec == 1) {
2695 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2696 vdec = vdec_core->active_hevc;
2697 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2698 vdec = vdec_core->active_vdec;
2699 else
2700 vdec = NULL;
2701 }
2702
2703 if (vdec) {
2704 u32 isr2tfn = 0;
2705 atomic_set(&vdec->inirq_thread_flag, 1);
2706 vdec->tfn_ns = local_clock();
2707 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2708 if (isr2tfn > 10000000)
2709 pr_err("!!!!!!! %s vdec_isr to %s took %uns !!!\n",
2710 vdec->vf_provider_name, __func__, isr2tfn);
2711 }
2712 if (c->dev_threaded_isr) {
2713 ret = c->dev_threaded_isr(irq, c->dev_id);
2714 goto thread_isr_done;
2715 }
2716 if (!vdec)
2717 goto thread_isr_done;
2718
2719 if (!vdec->threaded_irq_handler)
2720 goto thread_isr_done;
2721 ret = vdec->threaded_irq_handler(vdec, c->index);
2722thread_isr_done:
2723 if (vdec)
2724 atomic_set(&vdec->inirq_thread_flag, 0);
2725 return ret;
2726}
2727
2728unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2729{
2730 unsigned long ready_mask;
2731 struct vdec_input_s *input = &vdec->input;
2732 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2733 (vdec->status != VDEC_STATUS_ACTIVE))
2734 return false;
2735
2736 if (!vdec->run_ready)
2737 return false;
2738
2739 /* when crc32 error, block at error frame */
2740 if (vdec->vfc.err_crc_block)
2741 return false;
2742
2743 if ((vdec->slave || vdec->master) &&
2744 (vdec->sched == 0))
2745 return false;
2746#ifdef VDEC_DEBUG_SUPPORT
2747 inc_profi_count(mask, vdec->check_count);
2748#endif
2749 if (vdec_core_with_input(mask)) {
2750
2751 /* check frame based input underrun */
2752 if (input && !input->eos && input_frame_based(input)
2753 && (!vdec_input_next_chunk(input))) {
2754#ifdef VDEC_DEBUG_SUPPORT
2755 inc_profi_count(mask, vdec->input_underrun_count);
2756#endif
2757 return false;
2758 }
2759 /* check streaming prepare level threshold if not EOS */
2760 if (input && input_stream_based(input) && !input->eos) {
2761 u32 rp, wp, level;
2762
2763 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2764 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2765 if (wp < rp)
2766 level = input->size + wp - rp;
2767 else
2768 level = wp - rp;
2769
2770 if ((level < input->prepare_level) &&
2771 (pts_get_rec_num(PTS_TYPE_VIDEO,
2772 vdec->input.total_rd_count) < 2)) {
2773 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2774#ifdef VDEC_DEBUG_SUPPORT
2775 inc_profi_count(mask, vdec->input_underrun_count);
2776 if (step_mode & 0x200) {
2777 if ((step_mode & 0xff) == vdec->id) {
2778 step_mode |= 0xff;
2779 return mask;
2780 }
2781 }
2782#endif
2783 return false;
2784 } else if (level > input->prepare_level)
2785 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2786 }
2787 }
2788
2789 if (step_mode) {
2790 if ((step_mode & 0xff) != vdec->id)
2791 return 0;
2792 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2793 }
2794
2795 /*step_mode &= ~0xff; not work for id of 0, removed*/
2796
2797#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2798 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2799#endif
2800
2801 ready_mask = vdec->run_ready(vdec, mask) & mask;
2802#ifdef VDEC_DEBUG_SUPPORT
2803 if (ready_mask != mask)
2804 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2805#endif
2806#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2807 if (ready_mask)
2808 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2809#endif
2810
2811 return ready_mask;
2812}
2813
2814/* bridge on/off vdec's interrupt processing to vdec core */
2815static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2816 bool enable)
2817{
2818 enum vdec_type_e type;
2819
2820 for (type = VDEC_1; type < VDEC_MAX; type++) {
2821 if (mask & (1 << type)) {
2822 struct vdec_isr_context_s *c =
2823 &vdec_core->isr_context[cores_int[type]];
2824 if (enable)
2825 c->vdec = vdec;
2826 else if (c->vdec == vdec)
2827 c->vdec = NULL;
2828 }
2829 }
2830}
2831
2832/*
2833 * Set up secure protection for each decoder instance running.
2834 * Note: The operation from REE side only resets memory access
2835 * to a default policy and even a non_secure type will still be
2836 * changed to secure type automatically when secure source is
2837 * detected inside TEE.
2838 * Perform need_more_data checking and set flag is decoder
2839 * is not consuming data.
2840 */
2841void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2842{
2843 struct vdec_input_s *input = &vdec->input;
2844 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2845 DMC_DEV_TYPE_NON_SECURE;
2846
2847 vdec_route_interrupt(vdec, mask, true);
2848
2849 if (!vdec_core_with_input(mask))
2850 return;
2851
2852 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2853 {
2854 /* Verimatrix ultra webclient (HLS) was played in drmmode and used hw demux. In drmmode VDEC only can access secure.
2855 Now HW demux parsed es data to no-secure buffer. So the VDEC input was no-secure, VDEC playback failed. Forcing
2856 use nosecure for verimatrix webclient HLS. If in the future HW demux can parse es data to secure buffer, make
2857 VDEC r/w secure.*/
2858 secure = 0;
2859 //pr_debug("allow VDEC can access nosecure even in drmmode\n");
2860 }
2861 if (input->target == VDEC_INPUT_TARGET_VLD)
2862 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2863 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2864 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2865
2866 if (vdec_stream_based(vdec) &&
2867 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2868 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2869 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2870 }
2871
2872 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
2873 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
2874}
2875
2876
2877/* struct vdec_core_shread manages all decoder instance in active list. When
2878 * a vdec is added into the active list, it can onlt be in two status:
2879 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
2880 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
2881 * Removing a decoder from active list is only performed within core thread.
2882 * Adding a decoder into active list is performed from user thread.
2883 */
2884static int vdec_core_thread(void *data)
2885{
2886 struct vdec_core_s *core = (struct vdec_core_s *)data;
2887 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
2888 unsigned long flags;
2889 int i;
2890
2891 sched_setscheduler(current, SCHED_FIFO, &param);
2892
2893 allow_signal(SIGTERM);
2894
2895 while (down_interruptible(&core->sem) == 0) {
2896 struct vdec_s *vdec, *tmp, *worker;
2897 unsigned long sched_mask = 0;
2898 LIST_HEAD(disconnecting_list);
2899
2900 if (kthread_should_stop())
2901 break;
2902 mutex_lock(&vdec_mutex);
2903
2904 if (core->parallel_dec == 1) {
2905 for (i = VDEC_1; i < VDEC_MAX; i++) {
2906 core->power_ref_mask =
2907 core->power_ref_count[i] > 0 ?
2908 (core->power_ref_mask | (1 << i)) :
2909 (core->power_ref_mask & ~(1 << i));
2910 }
2911 }
2912 /* clean up previous active vdec's input */
2913 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2914 unsigned long mask = vdec->sched_mask &
2915 (vdec->active_mask ^ vdec->sched_mask);
2916
2917 vdec_route_interrupt(vdec, mask, false);
2918
2919#ifdef VDEC_DEBUG_SUPPORT
2920 update_profi_clk_stop(vdec, mask, get_current_clk());
2921#endif
2922 /*
2923 * If decoder released some core resources (mask), then
2924 * check if these core resources are associated
2925 * with any input side and do input clean up accordingly
2926 */
2927 if (vdec_core_with_input(mask)) {
2928 struct vdec_input_s *input = &vdec->input;
2929 while (!list_empty(
2930 &input->vframe_chunk_list)) {
2931 struct vframe_chunk_s *chunk =
2932 vdec_input_next_chunk(input);
2933 if (chunk && (chunk->flag &
2934 VFRAME_CHUNK_FLAG_CONSUMED))
2935 vdec_input_release_chunk(input,
2936 chunk);
2937 else
2938 break;
2939 }
2940
2941 vdec_save_input_context(vdec);
2942 }
2943
2944 vdec->sched_mask &= ~mask;
2945 core->sched_mask &= ~mask;
2946 }
2947 vdec_update_buff_status();
2948 /*
2949 *todo:
2950 * this is the case when the decoder is in active mode and
2951 * the system side wants to stop it. Currently we rely on
2952 * the decoder instance to go back to VDEC_STATUS_CONNECTED
2953 * from VDEC_STATUS_ACTIVE by its own. However, if for some
2954 * reason the decoder can not exist by itself (dead decoding
2955 * or whatever), then we may have to add another vdec API
2956 * to kill the vdec and release its HW resource and make it
2957 * become inactive again.
2958 * if ((core->active_vdec) &&
2959 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
2960 * }
2961 */
2962
2963 /* check disconnected decoders */
2964 flags = vdec_core_lock(vdec_core);
2965 list_for_each_entry_safe(vdec, tmp,
2966 &core->connected_vdec_list, list) {
2967 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
2968 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
2969 if (core->parallel_dec == 1) {
2970 if (vdec_core->active_hevc == vdec)
2971 vdec_core->active_hevc = NULL;
2972 if (vdec_core->active_vdec == vdec)
2973 vdec_core->active_vdec = NULL;
2974 }
2975 if (core->last_vdec == vdec)
2976 core->last_vdec = NULL;
2977 list_move(&vdec->list, &disconnecting_list);
2978 }
2979 }
2980 vdec_core_unlock(vdec_core, flags);
2981 mutex_unlock(&vdec_mutex);
2982 /* elect next vdec to be scheduled */
2983 vdec = core->last_vdec;
2984 if (vdec) {
2985 vdec = list_entry(vdec->list.next, struct vdec_s, list);
2986 list_for_each_entry_from(vdec,
2987 &core->connected_vdec_list, list) {
2988 sched_mask = vdec_schedule_mask(vdec,
2989 core->sched_mask);
2990 if (!sched_mask)
2991 continue;
2992 sched_mask = vdec_ready_to_run(vdec,
2993 sched_mask);
2994 if (sched_mask)
2995 break;
2996 }
2997
2998 if (&vdec->list == &core->connected_vdec_list)
2999 vdec = NULL;
3000 }
3001
3002 if (!vdec) {
3003 /* search from beginning */
3004 list_for_each_entry(vdec,
3005 &core->connected_vdec_list, list) {
3006 sched_mask = vdec_schedule_mask(vdec,
3007 core->sched_mask);
3008 if (vdec == core->last_vdec) {
3009 if (!sched_mask) {
3010 vdec = NULL;
3011 break;
3012 }
3013
3014 sched_mask = vdec_ready_to_run(vdec,
3015 sched_mask);
3016
3017 if (!sched_mask) {
3018 vdec = NULL;
3019 break;
3020 }
3021 break;
3022 }
3023
3024 if (!sched_mask)
3025 continue;
3026
3027 sched_mask = vdec_ready_to_run(vdec,
3028 sched_mask);
3029 if (sched_mask)
3030 break;
3031 }
3032
3033 if (&vdec->list == &core->connected_vdec_list)
3034 vdec = NULL;
3035 }
3036
3037 worker = vdec;
3038
3039 if (vdec) {
3040 unsigned long mask = sched_mask;
3041 unsigned long i;
3042
3043 /* setting active_mask should be atomic.
3044 * it can be modified by decoder driver callbacks.
3045 */
3046 while (sched_mask) {
3047 i = __ffs(sched_mask);
3048 set_bit(i, &vdec->active_mask);
3049 sched_mask &= ~(1 << i);
3050 }
3051
3052 /* vdec's sched_mask is only set from core thread */
3053 vdec->sched_mask |= mask;
3054 if (core->last_vdec) {
3055 if ((core->last_vdec != vdec) &&
3056 (core->last_vdec->mc_type != vdec->mc_type))
3057 vdec->mc_loaded = 0;/*clear for reload firmware*/
3058 } else
3059 vdec->mc_loaded = 0;
3060 core->last_vdec = vdec;
3061 if (debug & 2)
3062 vdec->mc_loaded = 0;/*alway reload firmware*/
3063 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3064
3065 core->sched_mask |= mask;
3066 if (core->parallel_dec == 1)
3067 vdec_save_active_hw(vdec);
3068#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3069 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3070#endif
3071 vdec_prepare_run(vdec, mask);
3072#ifdef VDEC_DEBUG_SUPPORT
3073 inc_profi_count(mask, vdec->run_count);
3074 update_profi_clk_run(vdec, mask, get_current_clk());
3075#endif
3076 vdec->run(vdec, mask, vdec_callback, core);
3077
3078
3079 /* we have some cores scheduled, keep working until
3080 * all vdecs are checked with no cores to schedule
3081 */
3082 if (core->parallel_dec == 1) {
3083 if (vdec_core->vdec_combine_flag == 0)
3084 up(&core->sem);
3085 } else
3086 up(&core->sem);
3087 }
3088
3089 /* remove disconnected decoder from active list */
3090 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3091 list_del(&vdec->list);
3092 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3093 /*core->last_vdec = NULL;*/
3094 complete(&vdec->inactive_done);
3095 }
3096
3097 /* if there is no new work scheduled and nothing
3098 * is running, sleep 20ms
3099 */
3100 if (core->parallel_dec == 1) {
3101 if (vdec_core->vdec_combine_flag == 0) {
3102 if ((!worker) &&
3103 ((core->sched_mask != core->power_ref_mask)) &&
3104 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3105 ((core->buff_flag | core->stream_buff_flag) &
3106 (core->sched_mask ^ core->power_ref_mask))) {
3107 usleep_range(1000, 2000);
3108 up(&core->sem);
3109 }
3110 } else {
3111 if ((!worker) && (!core->sched_mask) &&
3112 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3113 (core->buff_flag | core->stream_buff_flag)) {
3114 usleep_range(1000, 2000);
3115 up(&core->sem);
3116 }
3117 }
3118 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3119 usleep_range(1000, 2000);
3120 up(&core->sem);
3121 }
3122
3123 }
3124
3125 return 0;
3126}
3127
3128#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3129static bool test_hevc(u32 decomp_addr, u32 us_delay)
3130{
3131 int i;
3132
3133 /* SW_RESET IPP */
3134 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3135 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3136
3137 /* initialize all canvas table */
3138 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3139 for (i = 0; i < 32; i++)
3140 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3141 0x1 | (i << 8) | decomp_addr);
3142 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3143 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3144 for (i = 0; i < 32; i++)
3145 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3146
3147 /* Initialize mcrcc */
3148 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3149 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3150 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3151 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3152
3153 /* Decomp initialize */
3154 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3155 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3156
3157 /* Frame level initialization */
3158 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3159 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3160 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3161 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3162
3163 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3164 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3165
3166 /* Enable SWIMP mode */
3167 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3168
3169 /* Enable frame */
3170 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3171 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3172
3173 /* Send SW-command CTB info */
3174 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3175
3176 /* Send PU_command */
3177 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3178 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3179 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3180 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3181
3182 udelay(us_delay);
3183
3184 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3185
3186 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3187}
3188
3189void vdec_power_reset(void)
3190{
3191 /* enable vdec1 isolation */
3192 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3193 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3194 /* power off vdec1 memories */
3195 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3196 /* vdec1 power off */
3197 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3198 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3199
3200 if (has_vdec2()) {
3201 /* enable vdec2 isolation */
3202 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3203 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3204 /* power off vdec2 memories */
3205 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3206 /* vdec2 power off */
3207 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3208 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3209 }
3210
3211 if (has_hdec()) {
3212 /* enable hcodec isolation */
3213 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3214 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3215 /* power off hcodec memories */
3216 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3217 /* hcodec power off */
3218 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3219 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3220 }
3221
3222 if (has_hevc_vdec()) {
3223 /* enable hevc isolation */
3224 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3225 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3226 /* power off hevc memories */
3227 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3228 /* hevc power off */
3229 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3230 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3231 }
3232}
3233EXPORT_SYMBOL(vdec_power_reset);
3234
3235void vdec_poweron(enum vdec_type_e core)
3236{
3237 void *decomp_addr = NULL;
3238 dma_addr_t decomp_dma_addr;
3239 u32 decomp_addr_aligned = 0;
3240 int hevc_loop = 0;
3241 int sleep_val, iso_val;
3242 bool is_power_ctrl_ver2 = false;
3243
3244 if (core >= VDEC_MAX)
3245 return;
3246
3247 mutex_lock(&vdec_mutex);
3248
3249 vdec_core->power_ref_count[core]++;
3250 if (vdec_core->power_ref_count[core] > 1) {
3251 mutex_unlock(&vdec_mutex);
3252 return;
3253 }
3254
3255 if (vdec_on(core)) {
3256 mutex_unlock(&vdec_mutex);
3257 return;
3258 }
3259
3260 is_power_ctrl_ver2 =
3261 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3262 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3263
3264 if (hevc_workaround_needed() &&
3265 (core == VDEC_HEVC)) {
3266 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3267 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3268
3269 if (decomp_addr) {
3270 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3271 memset((u8 *)decomp_addr +
3272 (decomp_addr_aligned - decomp_dma_addr),
3273 0xff, SZ_4K);
3274 } else
3275 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3276 }
3277
3278 if (core == VDEC_1) {
3279 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3280 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3281
3282 /* vdec1 power on */
3283#ifdef CONFIG_AMLOGIC_POWER
3284 if (is_support_power_ctrl()) {
3285 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3286 mutex_unlock(&vdec_mutex);
3287 pr_err("vdec-1 power on ctrl sleep fail.\n");
3288 return;
3289 }
3290 } else {
3291 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3292 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3293 }
3294#else
3295 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3296 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3297#endif
3298 /* wait 10uS */
3299 udelay(10);
3300 /* vdec1 soft reset */
3301 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3302 WRITE_VREG(DOS_SW_RESET0, 0);
3303 /* enable vdec1 clock */
3304 /*
3305 *add power on vdec clock level setting,only for m8 chip,
3306 * m8baby and m8m2 can dynamic adjust vdec clock,
3307 * power on with default clock level
3308 */
3309 amports_switch_gate("clk_vdec_mux", 1);
3310 vdec_clock_hi_enable();
3311 /* power up vdec memories */
3312 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3313
3314 /* remove vdec1 isolation */
3315#ifdef CONFIG_AMLOGIC_POWER
3316 if (is_support_power_ctrl()) {
3317 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3318 mutex_unlock(&vdec_mutex);
3319 pr_err("vdec-1 power on ctrl iso fail.\n");
3320 return;
3321 }
3322 } else {
3323 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3324 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3325 }
3326#else
3327 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3328 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3329#endif
3330 /* reset DOS top registers */
3331 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3332 } else if (core == VDEC_2) {
3333 if (has_vdec2()) {
3334 /* vdec2 power on */
3335 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3336 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3337 ~0x30);
3338 /* wait 10uS */
3339 udelay(10);
3340 /* vdec2 soft reset */
3341 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3342 WRITE_VREG(DOS_SW_RESET2, 0);
3343 /* enable vdec1 clock */
3344 vdec2_clock_hi_enable();
3345 /* power up vdec memories */
3346 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3347 /* remove vdec2 isolation */
3348 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3349 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3350 ~0x300);
3351 /* reset DOS top registers */
3352 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3353 }
3354 } else if (core == VDEC_HCODEC) {
3355 if (has_hdec()) {
3356 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3357 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3358
3359 /* hcodec power on */
3360#ifdef CONFIG_AMLOGIC_POWER
3361 if (is_support_power_ctrl()) {
3362 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3363 mutex_unlock(&vdec_mutex);
3364 pr_err("hcodec power on ctrl sleep fail.\n");
3365 return;
3366 }
3367 } else {
3368 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3369 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3370 }
3371#else
3372 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3373 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3374#endif
3375 /* wait 10uS */
3376 udelay(10);
3377 /* hcodec soft reset */
3378 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3379 WRITE_VREG(DOS_SW_RESET1, 0);
3380 /* enable hcodec clock */
3381 hcodec_clock_enable();
3382 /* power up hcodec memories */
3383 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3384 /* remove hcodec isolation */
3385#ifdef CONFIG_AMLOGIC_POWER
3386 if (is_support_power_ctrl()) {
3387 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3388 mutex_unlock(&vdec_mutex);
3389 pr_err("hcodec power on ctrl iso fail.\n");
3390 return;
3391 }
3392 } else {
3393 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3394 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3395 }
3396#else
3397 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3398 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3399#endif
3400 }
3401 } else if (core == VDEC_HEVC) {
3402 if (has_hevc_vdec()) {
3403 bool hevc_fixed = false;
3404
3405 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3406 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3407
3408 while (!hevc_fixed) {
3409 /* hevc power on */
3410#ifdef CONFIG_AMLOGIC_POWER
3411 if (is_support_power_ctrl()) {
3412 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3413 mutex_unlock(&vdec_mutex);
3414 pr_err("hevc power on ctrl sleep fail.\n");
3415 return;
3416 }
3417 } else {
3418 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3419 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3420 }
3421#else
3422 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3423 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3424#endif
3425 /* wait 10uS */
3426 udelay(10);
3427 /* hevc soft reset */
3428 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3429 WRITE_VREG(DOS_SW_RESET3, 0);
3430 /* enable hevc clock */
3431 amports_switch_gate("clk_hevc_mux", 1);
3432 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3433 amports_switch_gate("clk_hevcb_mux", 1);
3434 hevc_clock_hi_enable();
3435 hevc_back_clock_hi_enable();
3436 /* power up hevc memories */
3437 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3438 /* remove hevc isolation */
3439#ifdef CONFIG_AMLOGIC_POWER
3440 if (is_support_power_ctrl()) {
3441 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3442 mutex_unlock(&vdec_mutex);
3443 pr_err("hevc power on ctrl iso fail.\n");
3444 return;
3445 }
3446 } else {
3447 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3448 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3449 }
3450#else
3451 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3452 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3453#endif
3454 if (!hevc_workaround_needed())
3455 break;
3456
3457 if (decomp_addr)
3458 hevc_fixed = test_hevc(
3459 decomp_addr_aligned, 20);
3460
3461 if (!hevc_fixed) {
3462 hevc_loop++;
3463
3464 mutex_unlock(&vdec_mutex);
3465
3466 if (hevc_loop >= HEVC_TEST_LIMIT) {
3467 pr_warn("hevc power sequence over limit\n");
3468 pr_warn("=====================================================\n");
3469 pr_warn(" This chip is identified to have HW failure.\n");
3470 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3471 pr_warn("=====================================================\n");
3472
3473 panic("Force panic for chip detection !!!\n");
3474
3475 break;
3476 }
3477
3478 vdec_poweroff(VDEC_HEVC);
3479
3480 mdelay(10);
3481
3482 mutex_lock(&vdec_mutex);
3483 }
3484 }
3485
3486 if (hevc_loop > hevc_max_reset_count)
3487 hevc_max_reset_count = hevc_loop;
3488
3489 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3490 udelay(10);
3491 WRITE_VREG(DOS_SW_RESET3, 0);
3492 }
3493 }
3494
3495 if (decomp_addr)
3496 codec_mm_dma_free_coherent(MEM_NAME,
3497 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3498
3499 mutex_unlock(&vdec_mutex);
3500}
3501EXPORT_SYMBOL(vdec_poweron);
3502
3503void vdec_poweroff(enum vdec_type_e core)
3504{
3505 int sleep_val, iso_val;
3506 bool is_power_ctrl_ver2 = false;
3507
3508 if (core >= VDEC_MAX)
3509 return;
3510
3511 mutex_lock(&vdec_mutex);
3512
3513 vdec_core->power_ref_count[core]--;
3514 if (vdec_core->power_ref_count[core] > 0) {
3515 mutex_unlock(&vdec_mutex);
3516 return;
3517 }
3518
3519 is_power_ctrl_ver2 =
3520 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3521 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3522
3523 if (core == VDEC_1) {
3524 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3525 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3526
3527 /* enable vdec1 isolation */
3528#ifdef CONFIG_AMLOGIC_POWER
3529 if (is_support_power_ctrl()) {
3530 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3531 mutex_unlock(&vdec_mutex);
3532 pr_err("vdec-1 power off ctrl iso fail.\n");
3533 return;
3534 }
3535 } else {
3536 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3537 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3538 }
3539#else
3540 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3541 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3542#endif
3543 /* power off vdec1 memories */
3544 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3545 /* disable vdec1 clock */
3546 vdec_clock_off();
3547 /* vdec1 power off */
3548#ifdef CONFIG_AMLOGIC_POWER
3549 if (is_support_power_ctrl()) {
3550 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3551 mutex_unlock(&vdec_mutex);
3552 pr_err("vdec-1 power off ctrl sleep fail.\n");
3553 return;
3554 }
3555 } else {
3556 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3557 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3558 }
3559#else
3560 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3561 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3562#endif
3563 } else if (core == VDEC_2) {
3564 if (has_vdec2()) {
3565 /* enable vdec2 isolation */
3566 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3567 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3568 0x300);
3569 /* power off vdec2 memories */
3570 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3571 /* disable vdec2 clock */
3572 vdec2_clock_off();
3573 /* vdec2 power off */
3574 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3575 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3576 0x30);
3577 }
3578 } else if (core == VDEC_HCODEC) {
3579 if (has_hdec()) {
3580 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3581 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3582
3583 /* enable hcodec isolation */
3584#ifdef CONFIG_AMLOGIC_POWER
3585 if (is_support_power_ctrl()) {
3586 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3587 mutex_unlock(&vdec_mutex);
3588 pr_err("hcodec power off ctrl iso fail.\n");
3589 return;
3590 }
3591 } else {
3592 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3593 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3594 }
3595#else
3596 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3597 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3598#endif
3599 /* power off hcodec memories */
3600 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3601 /* disable hcodec clock */
3602 hcodec_clock_off();
3603 /* hcodec power off */
3604#ifdef CONFIG_AMLOGIC_POWER
3605 if (is_support_power_ctrl()) {
3606 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3607 mutex_unlock(&vdec_mutex);
3608 pr_err("hcodec power off ctrl sleep fail.\n");
3609 return;
3610 }
3611 } else {
3612 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3613 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3614 }
3615#else
3616 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3617 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3618#endif
3619 }
3620 } else if (core == VDEC_HEVC) {
3621 if (has_hevc_vdec()) {
3622 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3623 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3624
3625 if (no_powerdown == 0) {
3626 /* enable hevc isolation */
3627#ifdef CONFIG_AMLOGIC_POWER
3628 if (is_support_power_ctrl()) {
3629 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3630 mutex_unlock(&vdec_mutex);
3631 pr_err("hevc power off ctrl iso fail.\n");
3632 return;
3633 }
3634 } else {
3635 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3636 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3637 }
3638#else
3639 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3640 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3641#endif
3642 /* power off hevc memories */
3643 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3644
3645 /* disable hevc clock */
3646 hevc_clock_off();
3647 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3648 hevc_back_clock_off();
3649
3650 /* hevc power off */
3651#ifdef CONFIG_AMLOGIC_POWER
3652 if (is_support_power_ctrl()) {
3653 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3654 mutex_unlock(&vdec_mutex);
3655 pr_err("hevc power off ctrl sleep fail.\n");
3656 return;
3657 }
3658 } else {
3659 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3660 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3661 }
3662#else
3663 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3664 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3665#endif
3666 } else {
3667 pr_info("!!!!!!!!not power down\n");
3668 hevc_reset_core(NULL);
3669 no_powerdown = 0;
3670 }
3671 }
3672 }
3673 mutex_unlock(&vdec_mutex);
3674}
3675EXPORT_SYMBOL(vdec_poweroff);
3676
3677bool vdec_on(enum vdec_type_e core)
3678{
3679 bool ret = false;
3680
3681 if (core == VDEC_1) {
3682 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3683 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3684 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3685 ? 0x2 : 0xc)) == 0) &&
3686 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3687 ret = true;
3688 } else if (core == VDEC_2) {
3689 if (has_vdec2()) {
3690 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3691 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3692 ret = true;
3693 }
3694 } else if (core == VDEC_HCODEC) {
3695 if (has_hdec()) {
3696 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3697 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3698 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3699 ? 0x1 : 0x3)) == 0) &&
3700 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3701 ret = true;
3702 }
3703 } else if (core == VDEC_HEVC) {
3704 if (has_hevc_vdec()) {
3705 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3706 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3707 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3708 ? 0x4 : 0xc0)) == 0) &&
3709 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3710 ret = true;
3711 }
3712 }
3713
3714 return ret;
3715}
3716EXPORT_SYMBOL(vdec_on);
3717
3718#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3719void vdec_poweron(enum vdec_type_e core)
3720{
3721 ulong flags;
3722
3723 spin_lock_irqsave(&lock, flags);
3724
3725 if (core == VDEC_1) {
3726 /* vdec1 soft reset */
3727 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3728 WRITE_VREG(DOS_SW_RESET0, 0);
3729 /* enable vdec1 clock */
3730 vdec_clock_enable();
3731 /* reset DOS top registers */
3732 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3733 } else if (core == VDEC_2) {
3734 /* vdec2 soft reset */
3735 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3736 WRITE_VREG(DOS_SW_RESET2, 0);
3737 /* enable vdec2 clock */
3738 vdec2_clock_enable();
3739 /* reset DOS top registers */
3740 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3741 } else if (core == VDEC_HCODEC) {
3742 /* hcodec soft reset */
3743 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3744 WRITE_VREG(DOS_SW_RESET1, 0);
3745 /* enable hcodec clock */
3746 hcodec_clock_enable();
3747 }
3748
3749 spin_unlock_irqrestore(&lock, flags);
3750}
3751
3752void vdec_poweroff(enum vdec_type_e core)
3753{
3754 ulong flags;
3755
3756 spin_lock_irqsave(&lock, flags);
3757
3758 if (core == VDEC_1) {
3759 /* disable vdec1 clock */
3760 vdec_clock_off();
3761 } else if (core == VDEC_2) {
3762 /* disable vdec2 clock */
3763 vdec2_clock_off();
3764 } else if (core == VDEC_HCODEC) {
3765 /* disable hcodec clock */
3766 hcodec_clock_off();
3767 }
3768
3769 spin_unlock_irqrestore(&lock, flags);
3770}
3771
3772bool vdec_on(enum vdec_type_e core)
3773{
3774 bool ret = false;
3775
3776 if (core == VDEC_1) {
3777 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3778 ret = true;
3779 } else if (core == VDEC_2) {
3780 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3781 ret = true;
3782 } else if (core == VDEC_HCODEC) {
3783 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3784 ret = true;
3785 }
3786
3787 return ret;
3788}
3789#endif
3790
3791int vdec_source_changed(int format, int width, int height, int fps)
3792{
3793 /* todo: add level routines for clock adjustment per chips */
3794 int ret = -1;
3795 static int on_setting;
3796
3797 if (on_setting > 0)
3798 return ret;/*on changing clk,ignore this change*/
3799
3800 if (vdec_source_get(VDEC_1) == width * height * fps)
3801 return ret;
3802
3803
3804 on_setting = 1;
3805 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3806 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3807 width, height, fps, vdec_clk_get(VDEC_1));
3808 on_setting = 0;
3809 return ret;
3810
3811}
3812EXPORT_SYMBOL(vdec_source_changed);
3813
3814void vdec_reset_core(struct vdec_s *vdec)
3815{
3816 unsigned long flags;
3817 unsigned int mask = 0;
3818
3819 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3820 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3821 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3822
3823 spin_lock_irqsave(&vdec_spin_lock, flags);
3824 codec_dmcbus_write(DMC_REQ_CTRL,
3825 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3826 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3827
3828 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3829 & mask))
3830 ;
3831 /*
3832 * 2: assist
3833 * 3: vld_reset
3834 * 4: vld_part_reset
3835 * 5: vfifo reset
3836 * 6: iqidct
3837 * 7: mc
3838 * 8: dblk
3839 * 9: pic_dc
3840 * 10: psc
3841 * 11: mcpu
3842 * 12: ccpu
3843 * 13: ddr
3844 * 14: afifo
3845 */
3846 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3847 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3848 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<6)|(1<<7)|(1<<8)|(1<<9));
3849 } else {
3850 WRITE_VREG(DOS_SW_RESET0,
3851 (1<<3)|(1<<4)|(1<<5));
3852 }
3853 WRITE_VREG(DOS_SW_RESET0, 0);
3854
3855 spin_lock_irqsave(&vdec_spin_lock, flags);
3856 codec_dmcbus_write(DMC_REQ_CTRL,
3857 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3858 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3859}
3860EXPORT_SYMBOL(vdec_reset_core);
3861
3862void hevc_mmu_dma_check(struct vdec_s *vdec)
3863{
3864 ulong timeout;
3865 u32 data;
3866 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
3867 return;
3868 timeout = jiffies + HZ/100;
3869 while (1) {
3870 data = READ_VREG(HEVC_CM_CORE_STATUS);
3871 if ((data & 0x1) == 0)
3872 break;
3873 if (time_after(jiffies, timeout)) {
3874 if (debug & 0x10)
3875 pr_info(" %s sao mmu dma idle\n", __func__);
3876 break;
3877 }
3878 }
3879 /*disable sao mmu dma */
3880 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
3881 timeout = jiffies + HZ/100;
3882 while (1) {
3883 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
3884 if ((data & 0x1))
3885 break;
3886 if (time_after(jiffies, timeout)) {
3887 if (debug & 0x10)
3888 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
3889 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
3890 break;
3891 }
3892 }
3893}
3894EXPORT_SYMBOL(hevc_mmu_dma_check);
3895
3896void hevc_reset_core(struct vdec_s *vdec)
3897{
3898 unsigned long flags;
3899 unsigned int mask = 0;
3900
3901 mask = 1 << 4; /*bit4: hevc*/
3902 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3903 mask |= 1 << 8; /*bit8: hevcb*/
3904
3905 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3906 spin_lock_irqsave(&vdec_spin_lock, flags);
3907 codec_dmcbus_write(DMC_REQ_CTRL,
3908 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3909 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3910
3911 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3912 & mask))
3913 ;
3914
3915 if (vdec == NULL || input_frame_based(vdec))
3916 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3917
3918 /*
3919 * 2: assist
3920 * 3: parser
3921 * 4: parser_state
3922 * 8: dblk
3923 * 11:mcpu
3924 * 12:ccpu
3925 * 13:ddr
3926 * 14:iqit
3927 * 15:ipp
3928 * 17:qdct
3929 * 18:mpred
3930 * 19:sao
3931 * 24:hevc_afifo
3932 */
3933 WRITE_VREG(DOS_SW_RESET3,
3934 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
3935 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
3936 (1<<17)|(1<<18)|(1<<19)|(1<<24));
3937
3938 WRITE_VREG(DOS_SW_RESET3, 0);
3939
3940
3941 spin_lock_irqsave(&vdec_spin_lock, flags);
3942 codec_dmcbus_write(DMC_REQ_CTRL,
3943 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3944 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3945
3946}
3947EXPORT_SYMBOL(hevc_reset_core);
3948
3949int vdec2_source_changed(int format, int width, int height, int fps)
3950{
3951 int ret = -1;
3952 static int on_setting;
3953
3954 if (has_vdec2()) {
3955 /* todo: add level routines for clock adjustment per chips */
3956 if (on_setting != 0)
3957 return ret;/*on changing clk,ignore this change*/
3958
3959 if (vdec_source_get(VDEC_2) == width * height * fps)
3960 return ret;
3961
3962 on_setting = 1;
3963 ret = vdec_source_changed_for_clk_set(format,
3964 width, height, fps);
3965 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
3966 width, height, fps, vdec_clk_get(VDEC_2));
3967 on_setting = 0;
3968 return ret;
3969 }
3970 return 0;
3971}
3972EXPORT_SYMBOL(vdec2_source_changed);
3973
3974int hevc_source_changed(int format, int width, int height, int fps)
3975{
3976 /* todo: add level routines for clock adjustment per chips */
3977 int ret = -1;
3978 static int on_setting;
3979
3980 if (on_setting != 0)
3981 return ret;/*on changing clk,ignore this change*/
3982
3983 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
3984 return ret;
3985
3986 on_setting = 1;
3987 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3988 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
3989 width, height, fps, vdec_clk_get(VDEC_HEVC));
3990 on_setting = 0;
3991
3992 return ret;
3993}
3994EXPORT_SYMBOL(hevc_source_changed);
3995
3996static struct am_reg am_risc[] = {
3997 {"MSP", 0x300},
3998 {"MPSR", 0x301},
3999 {"MCPU_INT_BASE", 0x302},
4000 {"MCPU_INTR_GRP", 0x303},
4001 {"MCPU_INTR_MSK", 0x304},
4002 {"MCPU_INTR_REQ", 0x305},
4003 {"MPC-P", 0x306},
4004 {"MPC-D", 0x307},
4005 {"MPC_E", 0x308},
4006 {"MPC_W", 0x309},
4007 {"CSP", 0x320},
4008 {"CPSR", 0x321},
4009 {"CCPU_INT_BASE", 0x322},
4010 {"CCPU_INTR_GRP", 0x323},
4011 {"CCPU_INTR_MSK", 0x324},
4012 {"CCPU_INTR_REQ", 0x325},
4013 {"CPC-P", 0x326},
4014 {"CPC-D", 0x327},
4015 {"CPC_E", 0x328},
4016 {"CPC_W", 0x329},
4017 {"AV_SCRATCH_0", 0x09c0},
4018 {"AV_SCRATCH_1", 0x09c1},
4019 {"AV_SCRATCH_2", 0x09c2},
4020 {"AV_SCRATCH_3", 0x09c3},
4021 {"AV_SCRATCH_4", 0x09c4},
4022 {"AV_SCRATCH_5", 0x09c5},
4023 {"AV_SCRATCH_6", 0x09c6},
4024 {"AV_SCRATCH_7", 0x09c7},
4025 {"AV_SCRATCH_8", 0x09c8},
4026 {"AV_SCRATCH_9", 0x09c9},
4027 {"AV_SCRATCH_A", 0x09ca},
4028 {"AV_SCRATCH_B", 0x09cb},
4029 {"AV_SCRATCH_C", 0x09cc},
4030 {"AV_SCRATCH_D", 0x09cd},
4031 {"AV_SCRATCH_E", 0x09ce},
4032 {"AV_SCRATCH_F", 0x09cf},
4033 {"AV_SCRATCH_G", 0x09d0},
4034 {"AV_SCRATCH_H", 0x09d1},
4035 {"AV_SCRATCH_I", 0x09d2},
4036 {"AV_SCRATCH_J", 0x09d3},
4037 {"AV_SCRATCH_K", 0x09d4},
4038 {"AV_SCRATCH_L", 0x09d5},
4039 {"AV_SCRATCH_M", 0x09d6},
4040 {"AV_SCRATCH_N", 0x09d7},
4041};
4042
4043static ssize_t amrisc_regs_show(struct class *class,
4044 struct class_attribute *attr, char *buf)
4045{
4046 char *pbuf = buf;
4047 struct am_reg *regs = am_risc;
4048 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
4049 int i;
4050 unsigned int val;
4051 ssize_t ret;
4052
4053 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4054 mutex_lock(&vdec_mutex);
4055 if (!vdec_on(VDEC_1)) {
4056 mutex_unlock(&vdec_mutex);
4057 pbuf += sprintf(pbuf, "amrisc is power off\n");
4058 ret = pbuf - buf;
4059 return ret;
4060 }
4061 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4062 /*TODO:M6 define */
4063 /*
4064 * switch_mod_gate_by_type(MOD_VDEC, 1);
4065 */
4066 amports_switch_gate("vdec", 1);
4067 }
4068 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4069 for (i = 0; i < rsize; i++) {
4070 val = READ_VREG(regs[i].offset);
4071 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4072 regs[i].name, regs[i].offset, val, val);
4073 }
4074 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4075 mutex_unlock(&vdec_mutex);
4076 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4077 /*TODO:M6 define */
4078 /*
4079 * switch_mod_gate_by_type(MOD_VDEC, 0);
4080 */
4081 amports_switch_gate("vdec", 0);
4082 }
4083 ret = pbuf - buf;
4084 return ret;
4085}
4086
4087static ssize_t dump_trace_show(struct class *class,
4088 struct class_attribute *attr, char *buf)
4089{
4090 int i;
4091 char *pbuf = buf;
4092 ssize_t ret;
4093 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4094
4095 if (!trace_buf) {
4096 pbuf += sprintf(pbuf, "No Memory bug\n");
4097 ret = pbuf - buf;
4098 return ret;
4099 }
4100 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4101 mutex_lock(&vdec_mutex);
4102 if (!vdec_on(VDEC_1)) {
4103 mutex_unlock(&vdec_mutex);
4104 kfree(trace_buf);
4105 pbuf += sprintf(pbuf, "amrisc is power off\n");
4106 ret = pbuf - buf;
4107 return ret;
4108 }
4109 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4110 /*TODO:M6 define */
4111 /*
4112 * switch_mod_gate_by_type(MOD_VDEC, 1);
4113 */
4114 amports_switch_gate("vdec", 1);
4115 }
4116 pr_info("dump trace steps:%d start\n", debug_trace_num);
4117 i = 0;
4118 while (i <= debug_trace_num - 16) {
4119 trace_buf[i] = READ_VREG(MPC_E);
4120 trace_buf[i + 1] = READ_VREG(MPC_E);
4121 trace_buf[i + 2] = READ_VREG(MPC_E);
4122 trace_buf[i + 3] = READ_VREG(MPC_E);
4123 trace_buf[i + 4] = READ_VREG(MPC_E);
4124 trace_buf[i + 5] = READ_VREG(MPC_E);
4125 trace_buf[i + 6] = READ_VREG(MPC_E);
4126 trace_buf[i + 7] = READ_VREG(MPC_E);
4127 trace_buf[i + 8] = READ_VREG(MPC_E);
4128 trace_buf[i + 9] = READ_VREG(MPC_E);
4129 trace_buf[i + 10] = READ_VREG(MPC_E);
4130 trace_buf[i + 11] = READ_VREG(MPC_E);
4131 trace_buf[i + 12] = READ_VREG(MPC_E);
4132 trace_buf[i + 13] = READ_VREG(MPC_E);
4133 trace_buf[i + 14] = READ_VREG(MPC_E);
4134 trace_buf[i + 15] = READ_VREG(MPC_E);
4135 i += 16;
4136 };
4137 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4138 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4139 mutex_unlock(&vdec_mutex);
4140 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4141 /*TODO:M6 define */
4142 /*
4143 * switch_mod_gate_by_type(MOD_VDEC, 0);
4144 */
4145 amports_switch_gate("vdec", 0);
4146 }
4147 for (i = 0; i < debug_trace_num; i++) {
4148 if (i % 4 == 0) {
4149 if (i % 16 == 0)
4150 pbuf += sprintf(pbuf, "\n");
4151 else if (i % 8 == 0)
4152 pbuf += sprintf(pbuf, " ");
4153 else /* 4 */
4154 pbuf += sprintf(pbuf, " ");
4155 }
4156 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4157 }
4158 while (i < debug_trace_num)
4159 ;
4160 kfree(trace_buf);
4161 pbuf += sprintf(pbuf, "\n");
4162 ret = pbuf - buf;
4163 return ret;
4164}
4165
4166static ssize_t clock_level_show(struct class *class,
4167 struct class_attribute *attr, char *buf)
4168{
4169 char *pbuf = buf;
4170 size_t ret;
4171
4172 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4173
4174 if (has_vdec2())
4175 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4176
4177 if (has_hevc_vdec())
4178 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4179
4180 ret = pbuf - buf;
4181 return ret;
4182}
4183
4184static ssize_t store_poweron_clock_level(struct class *class,
4185 struct class_attribute *attr,
4186 const char *buf, size_t size)
4187{
4188 unsigned int val;
4189 ssize_t ret;
4190
4191 /*ret = sscanf(buf, "%d", &val);*/
4192 ret = kstrtoint(buf, 0, &val);
4193
4194 if (ret != 0)
4195 return -EINVAL;
4196 poweron_clock_level = val;
4197 return size;
4198}
4199
4200static ssize_t show_poweron_clock_level(struct class *class,
4201 struct class_attribute *attr, char *buf)
4202{
4203 return sprintf(buf, "%d\n", poweron_clock_level);
4204}
4205
4206/*
4207 *if keep_vdec_mem == 1
4208 *always don't release
4209 *vdec 64 memory for fast play.
4210 */
4211static ssize_t store_keep_vdec_mem(struct class *class,
4212 struct class_attribute *attr,
4213 const char *buf, size_t size)
4214{
4215 unsigned int val;
4216 ssize_t ret;
4217
4218 /*ret = sscanf(buf, "%d", &val);*/
4219 ret = kstrtoint(buf, 0, &val);
4220 if (ret != 0)
4221 return -EINVAL;
4222 keep_vdec_mem = val;
4223 return size;
4224}
4225
4226static ssize_t show_keep_vdec_mem(struct class *class,
4227 struct class_attribute *attr, char *buf)
4228{
4229 return sprintf(buf, "%d\n", keep_vdec_mem);
4230}
4231
4232#ifdef VDEC_DEBUG_SUPPORT
4233static ssize_t store_debug(struct class *class,
4234 struct class_attribute *attr,
4235 const char *buf, size_t size)
4236{
4237 struct vdec_s *vdec;
4238 struct vdec_core_s *core = vdec_core;
4239 unsigned long flags;
4240
4241 unsigned id;
4242 unsigned val;
4243 ssize_t ret;
4244 char cbuf[32];
4245
4246 cbuf[0] = 0;
4247 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4248 /*pr_info(
4249 "%s(%s)=>ret %ld: %s, %x, %x\n",
4250 __func__, buf, ret, cbuf, id, val);*/
4251 if (strcmp(cbuf, "schedule") == 0) {
4252 pr_info("VDEC_DEBUG: force schedule\n");
4253 up(&core->sem);
4254 } else if (strcmp(cbuf, "power_off") == 0) {
4255 pr_info("VDEC_DEBUG: power off core %d\n", id);
4256 vdec_poweroff(id);
4257 } else if (strcmp(cbuf, "power_on") == 0) {
4258 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4259 vdec_poweron(id);
4260 } else if (strcmp(cbuf, "wr") == 0) {
4261 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4262 id, val);
4263 WRITE_VREG(id, val);
4264 } else if (strcmp(cbuf, "rd") == 0) {
4265 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4266 id, READ_VREG(id));
4267 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4268 pr_info(
4269 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4270 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4271 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4272 }
4273
4274 flags = vdec_core_lock(vdec_core);
4275
4276 list_for_each_entry(vdec,
4277 &core->connected_vdec_list, list) {
4278 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4279 if (((vdec->status == VDEC_STATUS_CONNECTED
4280 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4281 (vdec->id == id)) {
4282 /*to add*/
4283 break;
4284 }
4285 }
4286 vdec_core_unlock(vdec_core, flags);
4287 return size;
4288}
4289
4290static ssize_t show_debug(struct class *class,
4291 struct class_attribute *attr, char *buf)
4292{
4293 char *pbuf = buf;
4294 struct vdec_s *vdec;
4295 struct vdec_core_s *core = vdec_core;
4296 unsigned long flags = vdec_core_lock(vdec_core);
4297 u64 tmp;
4298
4299 pbuf += sprintf(pbuf,
4300 "============== help:\n");
4301 pbuf += sprintf(pbuf,
4302 "'echo xxx > debug' usuage:\n");
4303 pbuf += sprintf(pbuf,
4304 "schedule - trigger schedule thread to run\n");
4305 pbuf += sprintf(pbuf,
4306 "power_off core_num - call vdec_poweroff(core_num)\n");
4307 pbuf += sprintf(pbuf,
4308 "power_on core_num - call vdec_poweron(core_num)\n");
4309 pbuf += sprintf(pbuf,
4310 "wr adr val - call WRITE_VREG(adr, val)\n");
4311 pbuf += sprintf(pbuf,
4312 "rd adr - call READ_VREG(adr)\n");
4313 pbuf += sprintf(pbuf,
4314 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4315 pbuf += sprintf(pbuf,
4316 "===================\n");
4317
4318 pbuf += sprintf(pbuf,
4319 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4320 list_for_each_entry(vdec,
4321 &core->connected_vdec_list, list) {
4322 enum vdec_type_e type;
4323 if ((vdec->status == VDEC_STATUS_CONNECTED
4324 || vdec->status == VDEC_STATUS_ACTIVE)) {
4325 for (type = VDEC_1; type < VDEC_MAX; type++) {
4326 if (vdec->core_mask & (1 << type)) {
4327 pbuf += sprintf(pbuf, "%s(%d):",
4328 vdec->vf_provider_name, type);
4329 pbuf += sprintf(pbuf, "\t%d",
4330 vdec->check_count[type]);
4331 pbuf += sprintf(pbuf, "\t%d",
4332 vdec->run_count[type]);
4333 pbuf += sprintf(pbuf, "\t%d",
4334 vdec->input_underrun_count[type]);
4335 pbuf += sprintf(pbuf, "\t%d",
4336 vdec->not_run_ready_count[type]);
4337 tmp = vdec->run_clk[type] * 100;
4338 do_div(tmp, vdec->total_clk[type]);
4339 pbuf += sprintf(pbuf,
4340 "\t%d%%\n",
4341 vdec->total_clk[type] == 0 ? 0 :
4342 (u32)tmp);
4343 }
4344 }
4345 }
4346 }
4347
4348 vdec_core_unlock(vdec_core, flags);
4349 return pbuf - buf;
4350
4351}
4352#endif
4353
4354/*irq num as same as .dts*/
4355/*
4356 * interrupts = <0 3 1
4357 * 0 23 1
4358 * 0 32 1
4359 * 0 43 1
4360 * 0 44 1
4361 * 0 45 1>;
4362 * interrupt-names = "vsync",
4363 * "demux",
4364 * "parser",
4365 * "mailbox_0",
4366 * "mailbox_1",
4367 * "mailbox_2";
4368 */
4369s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4370 irq_handler_t handler,
4371 irq_handler_t thread_fn,
4372 unsigned long irqflags,
4373 const char *devname, void *dev)
4374{
4375 s32 res_irq;
4376 s32 ret = 0;
4377
4378 if (num >= VDEC_IRQ_MAX) {
4379 pr_err("[%s] request irq error, irq num too big!", __func__);
4380 return -EINVAL;
4381 }
4382
4383 if (vdec_core->isr_context[num].irq < 0) {
4384 res_irq = platform_get_irq(
4385 vdec_core->vdec_core_platform_device, num);
4386 if (res_irq < 0) {
4387 pr_err("[%s] get irq error!", __func__);
4388 return -EINVAL;
4389 }
4390
4391 vdec_core->isr_context[num].irq = res_irq;
4392 vdec_core->isr_context[num].dev_isr = handler;
4393 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4394 vdec_core->isr_context[num].dev_id = dev;
4395
4396 ret = request_threaded_irq(res_irq,
4397 vdec_isr,
4398 vdec_thread_isr,
4399 (thread_fn) ? IRQF_ONESHOT : irqflags,
4400 devname,
4401 &vdec_core->isr_context[num]);
4402
4403 if (ret) {
4404 vdec_core->isr_context[num].irq = -1;
4405 vdec_core->isr_context[num].dev_isr = NULL;
4406 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4407 vdec_core->isr_context[num].dev_id = NULL;
4408
4409 pr_err("vdec irq register error for %s.\n", devname);
4410 return -EIO;
4411 }
4412 } else {
4413 vdec_core->isr_context[num].dev_isr = handler;
4414 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4415 vdec_core->isr_context[num].dev_id = dev;
4416 }
4417
4418 return ret;
4419}
4420EXPORT_SYMBOL(vdec_request_threaded_irq);
4421
4422s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4423 const char *devname, void *dev)
4424{
4425 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4426
4427 return vdec_request_threaded_irq(num,
4428 handler,
4429 NULL,/*no thread_fn*/
4430 IRQF_SHARED,
4431 devname,
4432 dev);
4433}
4434EXPORT_SYMBOL(vdec_request_irq);
4435
4436void vdec_free_irq(enum vdec_irq_num num, void *dev)
4437{
4438 if (num >= VDEC_IRQ_MAX) {
4439 pr_err("[%s] request irq error, irq num too big!", __func__);
4440 return;
4441 }
4442 /*
4443 *assume amrisc is stopped already and there is no mailbox interrupt
4444 * when we reset pointers here.
4445 */
4446 vdec_core->isr_context[num].dev_isr = NULL;
4447 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4448 vdec_core->isr_context[num].dev_id = NULL;
4449 synchronize_irq(vdec_core->isr_context[num].irq);
4450}
4451EXPORT_SYMBOL(vdec_free_irq);
4452
4453struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4454{
4455 struct vdec_s *vdec;
4456 struct vdec_s *ret_vdec;
4457 struct vdec_core_s *core = vdec_core;
4458 unsigned long flags;
4459 int id;
4460
4461 flags = vdec_core_lock(vdec_core);
4462
4463 id = 0x10000000;
4464 ret_vdec = NULL;
4465 if (!list_empty(&core->connected_vdec_list)) {
4466 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4467 if (vdec->id < id) {
4468 id = vdec->id;
4469 ret_vdec = vdec;
4470 }
4471 }
4472 }
4473
4474 vdec_core_unlock(vdec_core, flags);
4475
4476 return ret_vdec;
4477}
4478EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4479
4480struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4481{
4482 struct vdec_s *vdec;
4483 struct vdec_s *ret_vdec;
4484 struct vdec_core_s *core = vdec_core;
4485 unsigned long flags;
4486
4487 flags = vdec_core_lock(vdec_core);
4488
4489 ret_vdec = NULL;
4490 if (!list_empty(&core->connected_vdec_list)) {
4491 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4492 if (vdec->id == vdec_id) {
4493 ret_vdec = vdec;
4494 break;
4495 }
4496 }
4497 }
4498
4499 vdec_core_unlock(vdec_core, flags);
4500
4501 return ret_vdec;
4502}
4503EXPORT_SYMBOL(vdec_get_vdec_by_id);
4504
4505int vdec_read_user_data(struct vdec_s *vdec,
4506 struct userdata_param_t *p_userdata_param)
4507{
4508 int ret = 0;
4509
4510 if (!vdec)
4511 vdec = vdec_get_default_vdec_for_userdata();
4512
4513 if (vdec) {
4514 if (vdec->user_data_read)
4515 ret = vdec->user_data_read(vdec, p_userdata_param);
4516 }
4517 return ret;
4518}
4519EXPORT_SYMBOL(vdec_read_user_data);
4520
4521int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4522{
4523 if (vdec) {
4524 if (vdec->wakeup_userdata_poll)
4525 vdec->wakeup_userdata_poll(vdec);
4526 }
4527
4528 return 0;
4529}
4530EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4531
4532void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4533{
4534 if (!vdec)
4535 vdec = vdec_get_default_vdec_for_userdata();
4536
4537 if (vdec) {
4538 if (vdec->reset_userdata_fifo)
4539 vdec->reset_userdata_fifo(vdec, bInit);
4540 }
4541}
4542EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4543
4544static int dump_mode;
4545static ssize_t dump_risc_mem_store(struct class *class,
4546 struct class_attribute *attr,
4547 const char *buf, size_t size)/*set*/
4548{
4549 unsigned int val;
4550 ssize_t ret;
4551 char dump_mode_str[4] = "PRL";
4552
4553 /*ret = sscanf(buf, "%d", &val);*/
4554 ret = kstrtoint(buf, 0, &val);
4555
4556 if (ret != 0)
4557 return -EINVAL;
4558 dump_mode = val & 0x3;
4559 pr_info("set dump mode to %d,%c_mem\n",
4560 dump_mode, dump_mode_str[dump_mode]);
4561 return size;
4562}
4563static u32 read_amrisc_reg(int reg)
4564{
4565 WRITE_VREG(0x31b, reg);
4566 return READ_VREG(0x31c);
4567}
4568
4569static void dump_pmem(void)
4570{
4571 int i;
4572
4573 WRITE_VREG(0x301, 0x8000);
4574 WRITE_VREG(0x31d, 0);
4575 pr_info("start dump amrisc pmem of risc\n");
4576 for (i = 0; i < 0xfff; i++) {
4577 /*same as .o format*/
4578 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4579 }
4580}
4581
4582static void dump_lmem(void)
4583{
4584 int i;
4585
4586 WRITE_VREG(0x301, 0x8000);
4587 WRITE_VREG(0x31d, 2);
4588 pr_info("start dump amrisc lmem\n");
4589 for (i = 0; i < 0x3ff; i++) {
4590 /*same as */
4591 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4592 }
4593}
4594
4595static ssize_t dump_risc_mem_show(struct class *class,
4596 struct class_attribute *attr, char *buf)
4597{
4598 char *pbuf = buf;
4599 int ret;
4600
4601 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4602 mutex_lock(&vdec_mutex);
4603 if (!vdec_on(VDEC_1)) {
4604 mutex_unlock(&vdec_mutex);
4605 pbuf += sprintf(pbuf, "amrisc is power off\n");
4606 ret = pbuf - buf;
4607 return ret;
4608 }
4609 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4610 /*TODO:M6 define */
4611 /*
4612 * switch_mod_gate_by_type(MOD_VDEC, 1);
4613 */
4614 amports_switch_gate("vdec", 1);
4615 }
4616 /*start do**/
4617 switch (dump_mode) {
4618 case 0:
4619 dump_pmem();
4620 break;
4621 case 2:
4622 dump_lmem();
4623 break;
4624 default:
4625 break;
4626 }
4627
4628 /*done*/
4629 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4630 mutex_unlock(&vdec_mutex);
4631 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4632 /*TODO:M6 define */
4633 /*
4634 * switch_mod_gate_by_type(MOD_VDEC, 0);
4635 */
4636 amports_switch_gate("vdec", 0);
4637 }
4638 return sprintf(buf, "done\n");
4639}
4640
4641static ssize_t core_show(struct class *class, struct class_attribute *attr,
4642 char *buf)
4643{
4644 struct vdec_core_s *core = vdec_core;
4645 char *pbuf = buf;
4646
4647 if (list_empty(&core->connected_vdec_list))
4648 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4649 else {
4650 struct vdec_s *vdec;
4651
4652 pbuf += sprintf(pbuf,
4653 " Core: last_sched %p, sched_mask %lx\n",
4654 core->last_vdec,
4655 core->sched_mask);
4656
4657 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4658 pbuf += sprintf(pbuf,
4659 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4660 vdec->id,
4661 vdec,
4662 vdec_device_name[vdec->format * 2],
4663 vdec_status_str(vdec),
4664 vdec_type_str(vdec),
4665 vdec->active_mask);
4666 }
4667 }
4668
4669 return pbuf - buf;
4670}
4671
4672static ssize_t vdec_status_show(struct class *class,
4673 struct class_attribute *attr, char *buf)
4674{
4675 char *pbuf = buf;
4676 struct vdec_s *vdec;
4677 struct vdec_info vs;
4678 unsigned char vdec_num = 0;
4679 struct vdec_core_s *core = vdec_core;
4680 unsigned long flags = vdec_core_lock(vdec_core);
4681
4682 if (list_empty(&core->connected_vdec_list)) {
4683 pbuf += sprintf(pbuf, "No vdec.\n");
4684 goto out;
4685 }
4686
4687 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4688 if ((vdec->status == VDEC_STATUS_CONNECTED
4689 || vdec->status == VDEC_STATUS_ACTIVE)) {
4690 memset(&vs, 0, sizeof(vs));
4691 if (vdec_status(vdec, &vs)) {
4692 pbuf += sprintf(pbuf, "err.\n");
4693 goto out;
4694 }
4695 pbuf += sprintf(pbuf,
4696 "vdec channel %u statistics:\n",
4697 vdec_num);
4698 pbuf += sprintf(pbuf,
4699 "%13s : %s\n", "device name",
4700 vs.vdec_name);
4701 pbuf += sprintf(pbuf,
4702 "%13s : %u\n", "frame width",
4703 vs.frame_width);
4704 pbuf += sprintf(pbuf,
4705 "%13s : %u\n", "frame height",
4706 vs.frame_height);
4707 pbuf += sprintf(pbuf,
4708 "%13s : %u %s\n", "frame rate",
4709 vs.frame_rate, "fps");
4710 pbuf += sprintf(pbuf,
4711 "%13s : %u %s\n", "bit rate",
4712 vs.bit_rate / 1024 * 8, "kbps");
4713 pbuf += sprintf(pbuf,
4714 "%13s : %u\n", "status",
4715 vs.status);
4716 pbuf += sprintf(pbuf,
4717 "%13s : %u\n", "frame dur",
4718 vs.frame_dur);
4719 pbuf += sprintf(pbuf,
4720 "%13s : %u %s\n", "frame data",
4721 vs.frame_data / 1024, "KB");
4722 pbuf += sprintf(pbuf,
4723 "%13s : %u\n", "frame count",
4724 vs.frame_count);
4725 pbuf += sprintf(pbuf,
4726 "%13s : %u\n", "drop count",
4727 vs.drop_frame_count);
4728 pbuf += sprintf(pbuf,
4729 "%13s : %u\n", "fra err count",
4730 vs.error_frame_count);
4731 pbuf += sprintf(pbuf,
4732 "%13s : %u\n", "hw err count",
4733 vs.error_count);
4734 pbuf += sprintf(pbuf,
4735 "%13s : %llu %s\n", "total data",
4736 vs.total_data / 1024, "KB");
4737 pbuf += sprintf(pbuf,
4738 "%13s : %x\n\n", "ratio_control",
4739 vs.ratio_control);
4740
4741 vdec_num++;
4742 }
4743 }
4744out:
4745 vdec_core_unlock(vdec_core, flags);
4746 return pbuf - buf;
4747}
4748
4749static ssize_t dump_vdec_blocks_show(struct class *class,
4750 struct class_attribute *attr, char *buf)
4751{
4752 struct vdec_core_s *core = vdec_core;
4753 char *pbuf = buf;
4754 unsigned long flags = vdec_core_lock(vdec_core);
4755
4756 if (list_empty(&core->connected_vdec_list))
4757 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4758 else {
4759 struct vdec_s *vdec;
4760 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4761 pbuf += vdec_input_dump_blocks(&vdec->input,
4762 pbuf, PAGE_SIZE - (pbuf - buf));
4763 }
4764 }
4765 vdec_core_unlock(vdec_core, flags);
4766
4767 return pbuf - buf;
4768}
4769static ssize_t dump_vdec_chunks_show(struct class *class,
4770 struct class_attribute *attr, char *buf)
4771{
4772 struct vdec_core_s *core = vdec_core;
4773 char *pbuf = buf;
4774 unsigned long flags = vdec_core_lock(vdec_core);
4775
4776 if (list_empty(&core->connected_vdec_list))
4777 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4778 else {
4779 struct vdec_s *vdec;
4780 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4781 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
4782 pbuf, PAGE_SIZE - (pbuf - buf));
4783 }
4784 }
4785 vdec_core_unlock(vdec_core, flags);
4786
4787 return pbuf - buf;
4788}
4789
4790static ssize_t dump_decoder_state_show(struct class *class,
4791 struct class_attribute *attr, char *buf)
4792{
4793 char *pbuf = buf;
4794 struct vdec_s *vdec;
4795 struct vdec_core_s *core = vdec_core;
4796 unsigned long flags = vdec_core_lock(vdec_core);
4797
4798 if (list_empty(&core->connected_vdec_list)) {
4799 pbuf += sprintf(pbuf, "No vdec.\n");
4800 } else {
4801 list_for_each_entry(vdec,
4802 &core->connected_vdec_list, list) {
4803 if ((vdec->status == VDEC_STATUS_CONNECTED
4804 || vdec->status == VDEC_STATUS_ACTIVE)
4805 && vdec->dump_state)
4806 vdec->dump_state(vdec);
4807 }
4808 }
4809 vdec_core_unlock(vdec_core, flags);
4810
4811 return pbuf - buf;
4812}
4813
4814static ssize_t dump_fps_show(struct class *class,
4815 struct class_attribute *attr, char *buf)
4816{
4817 char *pbuf = buf;
4818 struct vdec_core_s *core = vdec_core;
4819 int i;
4820
4821 unsigned long flags = vdec_fps_lock(vdec_core);
4822 for (i = 0; i < MAX_INSTANCE_MUN; i++)
4823 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
4824
4825 pbuf += sprintf(pbuf, "\n");
4826 vdec_fps_unlock(vdec_core, flags);
4827
4828 return pbuf - buf;
4829}
4830
4831
4832
4833static struct class_attribute vdec_class_attrs[] = {
4834 __ATTR_RO(amrisc_regs),
4835 __ATTR_RO(dump_trace),
4836 __ATTR_RO(clock_level),
4837 __ATTR(poweron_clock_level, S_IRUGO | S_IWUSR | S_IWGRP,
4838 show_poweron_clock_level, store_poweron_clock_level),
4839 __ATTR(dump_risc_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4840 dump_risc_mem_show, dump_risc_mem_store),
4841 __ATTR(keep_vdec_mem, S_IRUGO | S_IWUSR | S_IWGRP,
4842 show_keep_vdec_mem, store_keep_vdec_mem),
4843 __ATTR_RO(core),
4844 __ATTR_RO(vdec_status),
4845 __ATTR_RO(dump_vdec_blocks),
4846 __ATTR_RO(dump_vdec_chunks),
4847 __ATTR_RO(dump_decoder_state),
4848#ifdef VDEC_DEBUG_SUPPORT
4849 __ATTR(debug, S_IRUGO | S_IWUSR | S_IWGRP,
4850 show_debug, store_debug),
4851#endif
4852#ifdef FRAME_CHECK
4853 __ATTR(dump_yuv, S_IRUGO | S_IWUSR | S_IWGRP,
4854 dump_yuv_show, dump_yuv_store),
4855 __ATTR(frame_check, S_IRUGO | S_IWUSR | S_IWGRP,
4856 frame_check_show, frame_check_store),
4857#endif
4858 __ATTR_RO(dump_fps),
4859 __ATTR_NULL
4860};
4861
4862static struct class vdec_class = {
4863 .name = "vdec",
4864 .class_attrs = vdec_class_attrs,
4865 };
4866
4867struct device *get_vdec_device(void)
4868{
4869 return &vdec_core->vdec_core_platform_device->dev;
4870}
4871EXPORT_SYMBOL(get_vdec_device);
4872
4873static int vdec_probe(struct platform_device *pdev)
4874{
4875 s32 i, r;
4876
4877 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
4878 sizeof(struct vdec_core_s), GFP_KERNEL);
4879 if (vdec_core == NULL) {
4880 pr_err("vdec core allocation failed.\n");
4881 return -ENOMEM;
4882 }
4883
4884 atomic_set(&vdec_core->vdec_nr, 0);
4885 sema_init(&vdec_core->sem, 1);
4886
4887 r = class_register(&vdec_class);
4888 if (r) {
4889 pr_info("vdec class create fail.\n");
4890 return r;
4891 }
4892
4893 vdec_core->vdec_core_platform_device = pdev;
4894
4895 platform_set_drvdata(pdev, vdec_core);
4896
4897 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4898 vdec_core->isr_context[i].index = i;
4899 vdec_core->isr_context[i].irq = -1;
4900 }
4901
4902 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
4903 IRQF_ONESHOT, "vdec-0", NULL);
4904 if (r < 0) {
4905 pr_err("vdec interrupt request failed\n");
4906 return r;
4907 }
4908
4909 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
4910 IRQF_ONESHOT, "vdec-1", NULL);
4911 if (r < 0) {
4912 pr_err("vdec interrupt request failed\n");
4913 return r;
4914 }
4915#if 0
4916 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4917 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
4918 IRQF_ONESHOT, "vdec-hevc_back", NULL);
4919 if (r < 0) {
4920 pr_err("vdec interrupt request failed\n");
4921 return r;
4922 }
4923 }
4924#endif
4925 r = of_reserved_mem_device_init(&pdev->dev);
4926 if (r == 0)
4927 pr_info("vdec_probe done\n");
4928
4929 vdec_core->cma_dev = &pdev->dev;
4930
4931 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
4932 /* default to 250MHz */
4933 vdec_clock_hi_enable();
4934 }
4935
4936 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
4937 /* set vdec dmc request to urgent */
4938 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
4939 }
4940 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
4941 spin_lock_init(&vdec_core->lock);
4942 spin_lock_init(&vdec_core->canvas_lock);
4943 spin_lock_init(&vdec_core->fps_lock);
4944 spin_lock_init(&vdec_core->input_lock);
4945 ida_init(&vdec_core->ida);
4946 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
4947 "vdec-core");
4948
4949 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
4950 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
4951 /*work queue priority lower than vdec-core.*/
4952 return 0;
4953}
4954
4955static int vdec_remove(struct platform_device *pdev)
4956{
4957 int i;
4958
4959 for (i = 0; i < VDEC_IRQ_MAX; i++) {
4960 if (vdec_core->isr_context[i].irq >= 0) {
4961 free_irq(vdec_core->isr_context[i].irq,
4962 &vdec_core->isr_context[i]);
4963 vdec_core->isr_context[i].irq = -1;
4964 vdec_core->isr_context[i].dev_isr = NULL;
4965 vdec_core->isr_context[i].dev_threaded_isr = NULL;
4966 vdec_core->isr_context[i].dev_id = NULL;
4967 }
4968 }
4969
4970 kthread_stop(vdec_core->thread);
4971
4972 destroy_workqueue(vdec_core->vdec_core_wq);
4973 class_unregister(&vdec_class);
4974
4975 return 0;
4976}
4977
4978static const struct of_device_id amlogic_vdec_dt_match[] = {
4979 {
4980 .compatible = "amlogic, vdec",
4981 },
4982 {},
4983};
4984
4985static struct mconfig vdec_configs[] = {
4986 MC_PU32("debug_trace_num", &debug_trace_num),
4987 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
4988 MC_PU32("clk_config", &clk_config),
4989 MC_PI32("step_mode", &step_mode),
4990 MC_PI32("poweron_clock_level", &poweron_clock_level),
4991};
4992static struct mconfig_node vdec_node;
4993
4994static struct platform_driver vdec_driver = {
4995 .probe = vdec_probe,
4996 .remove = vdec_remove,
4997 .driver = {
4998 .name = "vdec",
4999 .of_match_table = amlogic_vdec_dt_match,
5000 }
5001};
5002
5003static struct codec_profile_t amvdec_input_profile = {
5004 .name = "vdec_input",
5005 .profile = "drm_framemode"
5006};
5007
5008int vdec_module_init(void)
5009{
5010 if (platform_driver_register(&vdec_driver)) {
5011 pr_info("failed to register vdec module\n");
5012 return -ENODEV;
5013 }
5014 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5015 "vdec", vdec_configs, CONFIG_FOR_RW);
5016 vcodec_profile_register(&amvdec_input_profile);
5017 return 0;
5018}
5019EXPORT_SYMBOL(vdec_module_init);
5020
5021void vdec_module_exit(void)
5022{
5023 platform_driver_unregister(&vdec_driver);
5024}
5025EXPORT_SYMBOL(vdec_module_exit);
5026
5027#if 0
5028static int __init vdec_module_init(void)
5029{
5030 if (platform_driver_register(&vdec_driver)) {
5031 pr_info("failed to register vdec module\n");
5032 return -ENODEV;
5033 }
5034 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5035 "vdec", vdec_configs, CONFIG_FOR_RW);
5036 return 0;
5037}
5038
5039static void __exit vdec_module_exit(void)
5040{
5041 platform_driver_unregister(&vdec_driver);
5042}
5043#endif
5044
5045static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
5046{
5047 vdec_core->cma_dev = dev;
5048
5049 return 0;
5050}
5051
5052static const struct reserved_mem_ops rmem_vdec_ops = {
5053 .device_init = vdec_mem_device_init,
5054};
5055
5056static int __init vdec_mem_setup(struct reserved_mem *rmem)
5057{
5058 rmem->ops = &rmem_vdec_ops;
5059 pr_info("vdec: reserved mem setup\n");
5060
5061 return 0;
5062}
5063
5064void vdec_fill_frame_info(struct vframe_qos_s *vframe_qos, int debug)
5065{
5066 if (frame_info_buf_in == NULL) {
5067 pr_info("error,frame_info_buf_in is null\n");
5068 return;
5069 }
5070 if (frame_info_buf_out == NULL) {
5071 pr_info("error,frame_info_buf_out is null\n");
5072 return;
5073 }
5074 if (frame_qos_wr >= QOS_FRAME_NUM)
5075 frame_qos_wr = 0;
5076
5077 if (frame_qos_wr >= QOS_FRAME_NUM ||
5078 frame_qos_wr < 0) {
5079 pr_info("error,index :%d is error\n", frame_qos_wr);
5080 return;
5081 }
5082 if (frameinfo_flag == DISABLE_FRAME_INFO)
5083 return;
5084
5085 if (frameinfo_flag == PRINT_FRAME_INFO) {
5086 pr_info("num %d size %d pts %d\n",
5087 vframe_qos->num,
5088 vframe_qos->size,
5089 vframe_qos->pts);
5090 pr_info("mv min_mv %d avg_mv %d max_mv %d\n",
5091 vframe_qos->min_mv,
5092 vframe_qos->avg_mv,
5093 vframe_qos->max_mv);
5094 pr_info("qp min_qp %d avg_qp %d max_qp %d\n",
5095 vframe_qos->min_qp,
5096 vframe_qos->avg_qp,
5097 vframe_qos->max_qp);
5098 pr_info("skip min_skip %d avg_skip %d max_skip %d\n",
5099 vframe_qos->min_skip,
5100 vframe_qos->avg_skip,
5101 vframe_qos->max_skip);
5102 }
5103 memcpy(&frame_info_buf_in[frame_qos_wr++],
5104 vframe_qos, sizeof(struct vframe_qos_s));
5105 if (frame_qos_wr >= QOS_FRAME_NUM)
5106 frame_qos_wr = 0;
5107
5108 /*pr_info("frame_qos_wr:%d\n", frame_qos_wr);*/
5109
5110}
5111EXPORT_SYMBOL(vdec_fill_frame_info);
5112
5113struct vframe_qos_s *vdec_get_qos_info(void)
5114{
5115 int write_count = 0;
5116 int qos_wr = frame_qos_wr;
5117
5118 if (frame_info_buf_in == NULL) {
5119 pr_info("error,frame_info_buf_in is null\n");
5120 return NULL;
5121 }
5122 if (frame_info_buf_out == NULL) {
5123 pr_info("error,frame_info_buf_out is null\n");
5124 return NULL;
5125 }
5126
5127
5128 memset(frame_info_buf_out, 0,
5129 QOS_FRAME_NUM*sizeof(struct vframe_qos_s));
5130 if (frame_qos_rd > qos_wr) {
5131 write_count = QOS_FRAME_NUM - frame_qos_rd;
5132 if (write_count > 0 && write_count <= QOS_FRAME_NUM) {
5133 memcpy(frame_info_buf_out, &frame_info_buf_in[0],
5134 write_count*sizeof(struct vframe_qos_s));
5135 if ((write_count + qos_wr) <= QOS_FRAME_NUM)
5136 memcpy(&frame_info_buf_out[write_count], frame_info_buf_in,
5137 qos_wr*sizeof(struct vframe_qos_s));
5138 else
5139 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5140 } else
5141 pr_info("get_qos_info:%d,out of range\n", __LINE__);
5142 } else if (frame_qos_rd < qos_wr) {
5143 write_count = qos_wr - frame_qos_rd;
5144 if (write_count > 0 && write_count < QOS_FRAME_NUM)
5145 memcpy(frame_info_buf_out, &frame_info_buf_in[frame_qos_rd],
5146 (write_count)*sizeof(struct vframe_qos_s));
5147 else
5148 pr_info("get_qos_info:%d, out of range\n", __LINE__);
5149 }
5150 /*
5151 pr_info("cnt:%d,size:%d,num:%d,rd:%d,wr:%d\n",
5152 wirte_count,
5153 frame_info_buf_out[0].size,
5154 frame_info_buf_out[0].num,
5155 frame_qos_rd,qos_wr);
5156 */
5157 frame_qos_rd = qos_wr;
5158 return frame_info_buf_out;
5159}
5160EXPORT_SYMBOL(vdec_get_qos_info);
5161
5162
5163RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5164/*
5165uint force_hevc_clock_cntl;
5166EXPORT_SYMBOL(force_hevc_clock_cntl);
5167
5168module_param(force_hevc_clock_cntl, uint, 0664);
5169*/
5170module_param(debug, uint, 0664);
5171module_param(debug_trace_num, uint, 0664);
5172module_param(hevc_max_reset_count, int, 0664);
5173module_param(clk_config, uint, 0664);
5174module_param(step_mode, int, 0664);
5175module_param(debugflags, int, 0664);
5176module_param(parallel_decode, int, 0664);
5177module_param(fps_detection, int, 0664);
5178module_param(fps_clear, int, 0664);
5179module_param(force_nosecure_even_drm, int, 0664);
5180module_param(disable_switch_single_to_mult, int, 0664);
5181
5182module_param(frameinfo_flag, int, 0664);
5183MODULE_PARM_DESC(frameinfo_flag,
5184 "\n frameinfo_flag\n");
5185module_param(v4lvideo_add_di, int, 0664);
5186MODULE_PARM_DESC(v4lvideo_add_di,
5187 "\n v4lvideo_add_di\n");
5188
5189module_param(max_di_instance, int, 0664);
5190MODULE_PARM_DESC(max_di_instance,
5191 "\n max_di_instance\n");
5192
5193/*
5194*module_init(vdec_module_init);
5195*module_exit(vdec_module_exit);
5196*/
5197#define CREATE_TRACE_POINTS
5198#include "vdec_trace.h"
5199MODULE_DESCRIPTION("AMLOGIC vdec driver");
5200MODULE_LICENSE("GPL");
5201MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5202