summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/utils/vdec.c (plain)
blob: a8a4b74809d6d2deccf31cef27b2e9107d58d5c5
1/*
2 * drivers/amlogic/media/frame_provider/decoder/utils/vdec.c
3 *
4 * Copyright (C) 2016 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17#define DEBUG
18#include <linux/kernel.h>
19#include <linux/spinlock.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/delay.h>
23#include <linux/kthread.h>
24#include <linux/platform_device.h>
25#include <linux/uaccess.h>
26#include <linux/semaphore.h>
27#include <linux/sched/rt.h>
28#include <linux/interrupt.h>
29#include <linux/amlogic/media/utils/vformat.h>
30#include <linux/amlogic/iomap.h>
31#include <linux/amlogic/media/canvas/canvas.h>
32#include <linux/amlogic/media/vfm/vframe.h>
33#include <linux/amlogic/media/vfm/vframe_provider.h>
34#include <linux/amlogic/media/vfm/vframe_receiver.h>
35#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
36#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
37#include <linux/amlogic/media/video_sink/v4lvideo_ext.h>
38#endif
39#include <linux/amlogic/media/vfm/vfm_ext.h>
40#include <linux/sched/clock.h>
41#include <uapi/linux/sched/types.h>
42#include <linux/signal.h>
43/*for VDEC_DEBUG_SUPPORT*/
44#include <linux/time.h>
45#include <linux/amlogic/media/utils/vdec_reg.h>
46#include "vdec.h"
47#include "vdec_trace.h"
48#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
49#include "vdec_profile.h"
50#endif
51#include <linux/sched/clock.h>
52#include <linux/of.h>
53#include <linux/of_fdt.h>
54#include <linux/libfdt_env.h>
55#include <linux/of_reserved_mem.h>
56#include <linux/dma-contiguous.h>
57#include <linux/cma.h>
58#include <linux/module.h>
59#include <linux/slab.h>
60#include <linux/dma-mapping.h>
61#include <linux/dma-contiguous.h>
62#include "../../../stream_input/amports/amports_priv.h"
63
64#include <linux/amlogic/media/utils/amports_config.h>
65#include "../utils/amvdec.h"
66#include "vdec_input.h"
67
68#include "../../../common/media_clock/clk/clk.h"
69#include <linux/reset.h>
70#include <linux/amlogic/cpu_version.h>
71#include <linux/amlogic/media/codec_mm/codec_mm.h>
72#include <linux/amlogic/media/video_sink/video_keeper.h>
73#include <linux/amlogic/media/codec_mm/configs.h>
74#include <linux/amlogic/media/frame_sync/ptsserv.h>
75#include "secprot.h"
76#include "../../../common/chips/decoder_cpu_ver_info.h"
77#include "frame_check.h"
78
79#ifdef CONFIG_AMLOGIC_POWER
80#include <linux/amlogic/power_ctrl.h>
81#endif
82
83#ifdef CONFIG_AMLOGIC_IONVIDEO
84#include <linux/amlogic/media/video_sink/ionvideo_ext.h>
85#endif
86#include <linux/pm.h>
87#include <linux/pm_domain.h>
88#include <linux/pm_runtime.h>
89
90/* wait other module to support this function */
91#define is_support_power_ctrl() 0
92
93static DEFINE_MUTEX(vdec_mutex);
94
95#define MC_SIZE (4096 * 4)
96#define CMA_ALLOC_SIZE SZ_64M
97#define MEM_NAME "vdec_prealloc"
98static int inited_vcodec_num;
99#define jiffies_ms div64_u64(get_jiffies_64() * 1000, HZ)
100static int poweron_clock_level;
101static int keep_vdec_mem;
102static unsigned int debug_trace_num = 16 * 20;
103static int step_mode;
104static unsigned int clk_config;
105/*
106 &1: sched_priority to MAX_RT_PRIO -1.
107 &2: always reload firmware.
108 &4: vdec canvas debug enable
109 */
110static unsigned int debug;
111
112static int hevc_max_reset_count;
113
114static int no_powerdown;
115static int parallel_decode = 1;
116static int fps_detection;
117static int fps_clear;
118
119static int force_nosecure_even_drm;
120static int disable_switch_single_to_mult;
121
122static DEFINE_SPINLOCK(vdec_spin_lock);
123
124#define HEVC_TEST_LIMIT 100
125#define GXBB_REV_A_MINOR 0xA
126
127#define PRINT_FRAME_INFO 1
128#define DISABLE_FRAME_INFO 2
129
130static int frameinfo_flag = 0;
131static int v4lvideo_add_di = 1;
132static int max_di_instance = 2;
133//static int path_debug = 0;
134
135static int enable_mvdec_info = 1;
136
137int decode_underflow = 0;
138
139static bool disable_power_domain;
140
141#define CANVAS_MAX_SIZE (AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1)
142
143struct am_reg {
144 char *name;
145 int offset;
146};
147
148struct vdec_isr_context_s {
149 int index;
150 int irq;
151 irq_handler_t dev_isr;
152 irq_handler_t dev_threaded_isr;
153 void *dev_id;
154 struct vdec_s *vdec;
155};
156
157struct decode_fps_s {
158 u32 frame_count;
159 u64 start_timestamp;
160 u64 last_timestamp;
161 u32 fps;
162};
163
164enum vdec_pd_e {
165 PD_VDEC,
166 PD_HCODEC,
167 PD_HEVC,
168 PD_WAVE,
169 PD_MAX
170};
171
172struct vdec_pwrc_s {
173 u8 *name;
174 struct device *dev;
175 struct device_link *link;
176};
177
178static struct vdec_pwrc_s vdec_pd[] = {
179 { .name = "pwrc-vdec", },
180 { .name = "pwrc-hcodec",},
181 { .name = "pwrc-hevc", },
182 { .name = "pwrc-wave", },
183};
184
185struct vdec_core_s {
186 struct list_head connected_vdec_list;
187 spinlock_t lock;
188 spinlock_t canvas_lock;
189 spinlock_t fps_lock;
190 spinlock_t input_lock;
191 struct ida ida;
192 atomic_t vdec_nr;
193 struct vdec_s *vfm_vdec;
194 struct vdec_s *active_vdec;
195 struct vdec_s *active_hevc;
196 struct vdec_s *hint_fr_vdec;
197 struct platform_device *vdec_core_platform_device;
198 struct device *cma_dev;
199 struct semaphore sem;
200 struct task_struct *thread;
201 struct workqueue_struct *vdec_core_wq;
202
203 unsigned long sched_mask;
204 struct vdec_isr_context_s isr_context[VDEC_IRQ_MAX];
205 int power_ref_count[VDEC_MAX];
206 struct vdec_s *last_vdec;
207 int parallel_dec;
208 unsigned long power_ref_mask;
209 int vdec_combine_flag;
210 struct decode_fps_s decode_fps[MAX_INSTANCE_MUN];
211 unsigned long buff_flag;
212 unsigned long stream_buff_flag;
213 struct vdec_pwrc_s *pd;
214};
215
216struct canvas_status_s {
217 int type;
218 int canvas_used_flag;
219 int id;
220};
221
222
223static struct vdec_core_s *vdec_core;
224
225static const char * const vdec_status_string[] = {
226 "VDEC_STATUS_UNINITIALIZED",
227 "VDEC_STATUS_DISCONNECTED",
228 "VDEC_STATUS_CONNECTED",
229 "VDEC_STATUS_ACTIVE"
230};
231
232static int debugflags;
233
234static struct canvas_status_s canvas_stat[AMVDEC_CANVAS_MAX1 - AMVDEC_CANVAS_START_INDEX + 1 + AMVDEC_CANVAS_MAX2 + 1];
235
236
237int vdec_get_debug_flags(void)
238{
239 return debugflags;
240}
241EXPORT_SYMBOL(vdec_get_debug_flags);
242
243void VDEC_PRINT_FUN_LINENO(const char *fun, int line)
244{
245 if (debugflags & 0x10000000)
246 pr_info("%s, %d\n", fun, line);
247}
248EXPORT_SYMBOL(VDEC_PRINT_FUN_LINENO);
249
250unsigned char is_mult_inc(unsigned int type)
251{
252 unsigned char ret = 0;
253 if (vdec_get_debug_flags() & 0xf000)
254 ret = (vdec_get_debug_flags() & 0x1000)
255 ? 1 : 0;
256 else if (type & PORT_TYPE_DECODER_SCHED)
257 ret = 1;
258 return ret;
259}
260EXPORT_SYMBOL(is_mult_inc);
261
262static const bool cores_with_input[VDEC_MAX] = {
263 true, /* VDEC_1 */
264 false, /* VDEC_HCODEC */
265 false, /* VDEC_2 */
266 true, /* VDEC_HEVC / VDEC_HEVC_FRONT */
267 false, /* VDEC_HEVC_BACK */
268};
269
270static const int cores_int[VDEC_MAX] = {
271 VDEC_IRQ_1,
272 VDEC_IRQ_2,
273 VDEC_IRQ_0,
274 VDEC_IRQ_0,
275 VDEC_IRQ_HEVC_BACK
276};
277
278unsigned long vdec_canvas_lock(struct vdec_core_s *core)
279{
280 unsigned long flags;
281 spin_lock_irqsave(&core->canvas_lock, flags);
282
283 return flags;
284}
285
286void vdec_canvas_unlock(struct vdec_core_s *core, unsigned long flags)
287{
288 spin_unlock_irqrestore(&core->canvas_lock, flags);
289}
290
291unsigned long vdec_fps_lock(struct vdec_core_s *core)
292{
293 unsigned long flags;
294 spin_lock_irqsave(&core->fps_lock, flags);
295
296 return flags;
297}
298
299void vdec_fps_unlock(struct vdec_core_s *core, unsigned long flags)
300{
301 spin_unlock_irqrestore(&core->fps_lock, flags);
302}
303
304unsigned long vdec_core_lock(struct vdec_core_s *core)
305{
306 unsigned long flags;
307
308 spin_lock_irqsave(&core->lock, flags);
309
310 return flags;
311}
312
313void vdec_core_unlock(struct vdec_core_s *core, unsigned long flags)
314{
315 spin_unlock_irqrestore(&core->lock, flags);
316}
317
318unsigned long vdec_inputbuff_lock(struct vdec_core_s *core)
319{
320 unsigned long flags;
321
322 spin_lock_irqsave(&core->input_lock, flags);
323
324 return flags;
325}
326
327void vdec_inputbuff_unlock(struct vdec_core_s *core, unsigned long flags)
328{
329 spin_unlock_irqrestore(&core->input_lock, flags);
330}
331
332
333static bool vdec_is_input_frame_empty(struct vdec_s *vdec) {
334 struct vdec_core_s *core = vdec_core;
335 bool ret;
336 unsigned long flags;
337
338 flags = vdec_inputbuff_lock(core);
339 ret = !(vdec->core_mask & core->buff_flag);
340 vdec_inputbuff_unlock(core, flags);
341
342 return ret;
343}
344
345static void vdec_up(struct vdec_s *vdec)
346{
347 struct vdec_core_s *core = vdec_core;
348
349 if (debug & 8)
350 pr_info("vdec_up, id:%d\n", vdec->id);
351 up(&core->sem);
352}
353
354static u64 vdec_get_us_time_system(void)
355{
356 return div64_u64(local_clock(), 1000);
357}
358
359static void vdec_fps_clear(int id)
360{
361 if (id >= MAX_INSTANCE_MUN)
362 return;
363
364 vdec_core->decode_fps[id].frame_count = 0;
365 vdec_core->decode_fps[id].start_timestamp = 0;
366 vdec_core->decode_fps[id].last_timestamp = 0;
367 vdec_core->decode_fps[id].fps = 0;
368}
369
370static void vdec_fps_clearall(void)
371{
372 int i;
373
374 for (i = 0; i < MAX_INSTANCE_MUN; i++) {
375 vdec_core->decode_fps[i].frame_count = 0;
376 vdec_core->decode_fps[i].start_timestamp = 0;
377 vdec_core->decode_fps[i].last_timestamp = 0;
378 vdec_core->decode_fps[i].fps = 0;
379 }
380}
381
382static void vdec_fps_detec(int id)
383{
384 unsigned long flags;
385
386 if (fps_detection == 0)
387 return;
388
389 if (id >= MAX_INSTANCE_MUN)
390 return;
391
392 flags = vdec_fps_lock(vdec_core);
393
394 if (fps_clear == 1) {
395 vdec_fps_clearall();
396 fps_clear = 0;
397 }
398
399 vdec_core->decode_fps[id].frame_count++;
400 if (vdec_core->decode_fps[id].frame_count == 1) {
401 vdec_core->decode_fps[id].start_timestamp =
402 vdec_get_us_time_system();
403 vdec_core->decode_fps[id].last_timestamp =
404 vdec_core->decode_fps[id].start_timestamp;
405 } else {
406 vdec_core->decode_fps[id].last_timestamp =
407 vdec_get_us_time_system();
408 vdec_core->decode_fps[id].fps =
409 (u32)div_u64(((u64)(vdec_core->decode_fps[id].frame_count) *
410 10000000000),
411 (vdec_core->decode_fps[id].last_timestamp -
412 vdec_core->decode_fps[id].start_timestamp));
413 }
414 vdec_fps_unlock(vdec_core, flags);
415}
416
417
418
419static int get_canvas(unsigned int index, unsigned int base)
420{
421 int start;
422 int canvas_index = index * base;
423 int ret;
424
425 if ((base > 4) || (base == 0))
426 return -1;
427
428 if ((AMVDEC_CANVAS_START_INDEX + canvas_index + base - 1)
429 <= AMVDEC_CANVAS_MAX1) {
430 start = AMVDEC_CANVAS_START_INDEX + base * index;
431 } else {
432 canvas_index -= (AMVDEC_CANVAS_MAX1 -
433 AMVDEC_CANVAS_START_INDEX + 1) / base * base;
434 if (canvas_index <= AMVDEC_CANVAS_MAX2)
435 start = canvas_index / base;
436 else
437 return -1;
438 }
439
440 if (base == 1) {
441 ret = start;
442 } else if (base == 2) {
443 ret = ((start + 1) << 16) | ((start + 1) << 8) | start;
444 } else if (base == 3) {
445 ret = ((start + 2) << 16) | ((start + 1) << 8) | start;
446 } else if (base == 4) {
447 ret = (((start + 3) << 24) | (start + 2) << 16) |
448 ((start + 1) << 8) | start;
449 }
450
451 return ret;
452}
453
454static int get_canvas_ex(int type, int id)
455{
456 int i;
457 unsigned long flags;
458
459 flags = vdec_canvas_lock(vdec_core);
460
461 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
462 /*0x10-0x15 has been used by rdma*/
463 if ((i >= 0x10) && (i <= 0x15))
464 continue;
465 if ((canvas_stat[i].type == type) &&
466 (canvas_stat[i].id & (1 << id)) == 0) {
467 canvas_stat[i].canvas_used_flag++;
468 canvas_stat[i].id |= (1 << id);
469 if (debug & 4)
470 pr_debug("get used canvas %d\n", i);
471 vdec_canvas_unlock(vdec_core, flags);
472 if (i < AMVDEC_CANVAS_MAX2 + 1)
473 return i;
474 else
475 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
476 }
477 }
478
479 for (i = 0; i < CANVAS_MAX_SIZE; i++) {
480 /*0x10-0x15 has been used by rdma*/
481 if ((i >= 0x10) && (i <= 0x15))
482 continue;
483 if (canvas_stat[i].type == 0) {
484 canvas_stat[i].type = type;
485 canvas_stat[i].canvas_used_flag = 1;
486 canvas_stat[i].id = (1 << id);
487 if (debug & 4) {
488 pr_debug("get canvas %d\n", i);
489 pr_debug("canvas_used_flag %d\n",
490 canvas_stat[i].canvas_used_flag);
491 pr_debug("canvas_stat[i].id %d\n",
492 canvas_stat[i].id);
493 }
494 vdec_canvas_unlock(vdec_core, flags);
495 if (i < AMVDEC_CANVAS_MAX2 + 1)
496 return i;
497 else
498 return (i + AMVDEC_CANVAS_START_INDEX - AMVDEC_CANVAS_MAX2 - 1);
499 }
500 }
501 vdec_canvas_unlock(vdec_core, flags);
502
503 pr_info("cannot get canvas\n");
504
505 return -1;
506}
507
508static void free_canvas_ex(int index, int id)
509{
510 unsigned long flags;
511 int offset;
512
513 flags = vdec_canvas_lock(vdec_core);
514 if (index >= 0 &&
515 index < AMVDEC_CANVAS_MAX2 + 1)
516 offset = index;
517 else if ((index >= AMVDEC_CANVAS_START_INDEX) &&
518 (index <= AMVDEC_CANVAS_MAX1))
519 offset = index + AMVDEC_CANVAS_MAX2 + 1 - AMVDEC_CANVAS_START_INDEX;
520 else {
521 vdec_canvas_unlock(vdec_core, flags);
522 return;
523 }
524
525 if ((canvas_stat[offset].canvas_used_flag > 0) &&
526 (canvas_stat[offset].id & (1 << id))) {
527 canvas_stat[offset].canvas_used_flag--;
528 canvas_stat[offset].id &= ~(1 << id);
529 if (canvas_stat[offset].canvas_used_flag == 0) {
530 canvas_stat[offset].type = 0;
531 canvas_stat[offset].id = 0;
532 }
533 if (debug & 4) {
534 pr_debug("free index %d used_flag %d, type = %d, id = %d\n",
535 offset,
536 canvas_stat[offset].canvas_used_flag,
537 canvas_stat[offset].type,
538 canvas_stat[offset].id);
539 }
540 }
541 vdec_canvas_unlock(vdec_core, flags);
542
543 return;
544
545}
546
547static void vdec_dmc_pipeline_reset(void)
548{
549
550 WRITE_RESET_REG(RESET7_REGISTER,
551 (1 << 15) | (1 << 14) | (1 << 13) |
552 (1 << 12) | (1 << 11));
553}
554
555static void vdec_stop_armrisc(int hw)
556{
557 ulong timeout = jiffies + HZ;
558
559 if (hw == VDEC_INPUT_TARGET_VLD) {
560 WRITE_VREG(MPSR, 0);
561 WRITE_VREG(CPSR, 0);
562
563 while (READ_VREG(IMEM_DMA_CTRL) & 0x8000) {
564 if (time_after(jiffies, timeout))
565 break;
566 }
567
568 timeout = jiffies + HZ;
569 while (READ_VREG(LMEM_DMA_CTRL) & 0x8000) {
570 if (time_after(jiffies, timeout))
571 break;
572 }
573 } else if (hw == VDEC_INPUT_TARGET_HEVC) {
574 WRITE_VREG(HEVC_MPSR, 0);
575 WRITE_VREG(HEVC_CPSR, 0);
576
577 while (READ_VREG(HEVC_IMEM_DMA_CTRL) & 0x8000) {
578 if (time_after(jiffies, timeout))
579 break;
580 }
581
582 timeout = jiffies + HZ/10;
583 while (READ_VREG(HEVC_LMEM_DMA_CTRL) & 0x8000) {
584 if (time_after(jiffies, timeout))
585 break;
586 }
587 }
588}
589
590static void vdec_disable_DMC(struct vdec_s *vdec)
591{
592 /*close first,then wait pedding end,timing suggestion from vlsi*/
593 struct vdec_input_s *input = &vdec->input;
594 unsigned long flags;
595 unsigned int mask = 0;
596
597 if (input->target == VDEC_INPUT_TARGET_VLD) {
598 mask = (1 << 13);
599 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
600 mask = (1 << 21);
601 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
602 mask = (1 << 4); /*hevc*/
603 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
604 mask |= (1 << 8); /*hevcb */
605 }
606
607 /* need to stop armrisc. */
608 if (!IS_ERR_OR_NULL(vdec->dev))
609 vdec_stop_armrisc(input->target);
610
611 spin_lock_irqsave(&vdec_spin_lock, flags);
612 codec_dmcbus_write(DMC_REQ_CTRL,
613 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
614 spin_unlock_irqrestore(&vdec_spin_lock, flags);
615
616 while (!(codec_dmcbus_read(DMC_CHAN_STS)
617 & mask))
618 ;
619
620 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
621}
622
623static void vdec_enable_DMC(struct vdec_s *vdec)
624{
625 struct vdec_input_s *input = &vdec->input;
626 unsigned long flags;
627 unsigned int mask = 0;
628
629 if (input->target == VDEC_INPUT_TARGET_VLD) {
630 mask = (1 << 13);
631 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
632 mask = (1 << 21);
633 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
634 mask = (1 << 4); /*hevc*/
635 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
636 mask |= (1 << 8); /*hevcb */
637 }
638
639 /*must to be reset the dmc pipeline if it's g12b.*/
640 if (get_cpu_type() == AM_MESON_CPU_MAJOR_ID_G12B)
641 vdec_dmc_pipeline_reset();
642
643 spin_lock_irqsave(&vdec_spin_lock, flags);
644 codec_dmcbus_write(DMC_REQ_CTRL,
645 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
646 spin_unlock_irqrestore(&vdec_spin_lock, flags);
647 pr_debug("%s input->target= 0x%x\n", __func__, input->target);
648}
649
650
651
652static int vdec_get_hw_type(int value)
653{
654 int type;
655 switch (value) {
656 case VFORMAT_HEVC:
657 case VFORMAT_VP9:
658 case VFORMAT_AVS2:
659 type = CORE_MASK_HEVC;
660 break;
661
662 case VFORMAT_MPEG12:
663 case VFORMAT_MPEG4:
664 case VFORMAT_H264:
665 case VFORMAT_MJPEG:
666 case VFORMAT_REAL:
667 case VFORMAT_JPEG:
668 case VFORMAT_VC1:
669 case VFORMAT_AVS:
670 case VFORMAT_YUV:
671 case VFORMAT_H264MVC:
672 case VFORMAT_H264_4K2K:
673 case VFORMAT_H264_ENC:
674 case VFORMAT_JPEG_ENC:
675 type = CORE_MASK_VDEC_1;
676 break;
677
678 default:
679 type = -1;
680 }
681
682 return type;
683}
684
685
686static void vdec_save_active_hw(struct vdec_s *vdec)
687{
688 int type;
689
690 type = vdec_get_hw_type(vdec->port->vformat);
691
692 if (type == CORE_MASK_HEVC) {
693 vdec_core->active_hevc = vdec;
694 } else if (type == CORE_MASK_VDEC_1) {
695 vdec_core->active_vdec = vdec;
696 } else {
697 pr_info("save_active_fw wrong\n");
698 }
699}
700
701static void vdec_update_buff_status(void)
702{
703 struct vdec_core_s *core = vdec_core;
704 unsigned long flags;
705 struct vdec_s *vdec;
706
707 flags = vdec_inputbuff_lock(core);
708 core->buff_flag = 0;
709 core->stream_buff_flag = 0;
710 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
711 struct vdec_input_s *input = &vdec->input;
712 if (input_frame_based(input)) {
713 if (input->have_frame_num || input->eos)
714 core->buff_flag |= vdec->core_mask;
715 } else if (input_stream_based(input)) {
716 core->stream_buff_flag |= vdec->core_mask;
717 }
718 }
719 vdec_inputbuff_unlock(core, flags);
720}
721
722#if 0
723void vdec_update_streambuff_status(void)
724{
725 struct vdec_core_s *core = vdec_core;
726 struct vdec_s *vdec;
727
728 /* check streaming prepare level threshold if not EOS */
729 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
730 struct vdec_input_s *input = &vdec->input;
731 if (input && input_stream_based(input) && !input->eos &&
732 (vdec->need_more_data & VDEC_NEED_MORE_DATA)) {
733 u32 rp, wp, level;
734
735 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
736 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
737 if (wp < rp)
738 level = input->size + wp - rp;
739 else
740 level = wp - rp;
741 if ((level < input->prepare_level) &&
742 (pts_get_rec_num(PTS_TYPE_VIDEO,
743 vdec->input.total_rd_count) < 2)) {
744 break;
745 } else if (level > input->prepare_level) {
746 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
747 if (debug & 8)
748 pr_info("vdec_flush_streambuff_status up\n");
749 vdec_up(vdec);
750 }
751 break;
752 }
753 }
754}
755EXPORT_SYMBOL(vdec_update_streambuff_status);
756#endif
757
758int vdec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
759{
760 if (vdec && vdec->dec_status &&
761 ((vdec->status == VDEC_STATUS_CONNECTED ||
762 vdec->status == VDEC_STATUS_ACTIVE)))
763 return vdec->dec_status(vdec, vstatus);
764
765 return 0;
766}
767EXPORT_SYMBOL(vdec_status);
768
769int vdec_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
770{
771 int r;
772
773 if (vdec->set_trickmode) {
774 r = vdec->set_trickmode(vdec, trickmode);
775
776 if ((r == 0) && (vdec->slave) && (vdec->slave->set_trickmode))
777 r = vdec->slave->set_trickmode(vdec->slave,
778 trickmode);
779 return r;
780 }
781
782 return -1;
783}
784EXPORT_SYMBOL(vdec_set_trickmode);
785
786int vdec_set_isreset(struct vdec_s *vdec, int isreset)
787{
788 vdec->is_reset = isreset;
789 pr_info("is_reset=%d\n", isreset);
790 if (vdec->set_isreset)
791 return vdec->set_isreset(vdec, isreset);
792 return 0;
793}
794EXPORT_SYMBOL(vdec_set_isreset);
795
796int vdec_set_dv_metawithel(struct vdec_s *vdec, int isdvmetawithel)
797{
798 vdec->dolby_meta_with_el = isdvmetawithel;
799 pr_info("isdvmetawithel=%d\n", isdvmetawithel);
800 return 0;
801}
802EXPORT_SYMBOL(vdec_set_dv_metawithel);
803
804void vdec_set_no_powerdown(int flag)
805{
806 no_powerdown = flag;
807 pr_info("no_powerdown=%d\n", no_powerdown);
808 return;
809}
810EXPORT_SYMBOL(vdec_set_no_powerdown);
811
812void vdec_count_info(struct vdec_info *vs, unsigned int err,
813 unsigned int offset)
814{
815 if (err)
816 vs->error_frame_count++;
817 if (offset) {
818 if (0 == vs->frame_count) {
819 vs->offset = 0;
820 vs->samp_cnt = 0;
821 }
822 vs->frame_data = offset > vs->total_data ?
823 offset - vs->total_data : vs->total_data - offset;
824 vs->total_data = offset;
825 if (vs->samp_cnt < 96000 * 2) { /* 2s */
826 if (0 == vs->samp_cnt)
827 vs->offset = offset;
828 vs->samp_cnt += vs->frame_dur;
829 } else {
830 vs->bit_rate = (offset - vs->offset) / 2;
831 /*pr_info("bitrate : %u\n",vs->bit_rate);*/
832 vs->samp_cnt = 0;
833 }
834 vs->frame_count++;
835 }
836 /*pr_info("size : %u, offset : %u, dur : %u, cnt : %u\n",
837 vs->offset,offset,vs->frame_dur,vs->samp_cnt);*/
838 return;
839}
840EXPORT_SYMBOL(vdec_count_info);
841int vdec_is_support_4k(void)
842{
843 return !is_meson_gxl_package_805X();
844}
845EXPORT_SYMBOL(vdec_is_support_4k);
846
847/*
848 * clk_config:
849 *0:default
850 *1:no gp0_pll;
851 *2:always used gp0_pll;
852 *>=10:fixed n M clk;
853 *== 100 , 100M clks;
854 */
855unsigned int get_vdec_clk_config_settings(void)
856{
857 return clk_config;
858}
859void update_vdec_clk_config_settings(unsigned int config)
860{
861 clk_config = config;
862}
863EXPORT_SYMBOL(update_vdec_clk_config_settings);
864
865static bool hevc_workaround_needed(void)
866{
867 return (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) &&
868 (get_meson_cpu_version(MESON_CPU_VERSION_LVL_MINOR)
869 == GXBB_REV_A_MINOR);
870}
871
872struct device *get_codec_cma_device(void)
873{
874 return vdec_core->cma_dev;
875}
876
877#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
878static const char * const vdec_device_name[] = {
879 "amvdec_mpeg12", "ammvdec_mpeg12",
880 "amvdec_mpeg4", "ammvdec_mpeg4",
881 "amvdec_h264", "ammvdec_h264",
882 "amvdec_mjpeg", "ammvdec_mjpeg",
883 "amvdec_real", "ammvdec_real",
884 "amjpegdec", "ammjpegdec",
885 "amvdec_vc1", "ammvdec_vc1",
886 "amvdec_avs", "ammvdec_avs",
887 "amvdec_yuv", "ammvdec_yuv",
888 "amvdec_h264mvc", "ammvdec_h264mvc",
889 "amvdec_h264_4k2k", "ammvdec_h264_4k2k",
890 "amvdec_h265", "ammvdec_h265",
891 "amvenc_avc", "amvenc_avc",
892 "jpegenc", "jpegenc",
893 "amvdec_vp9", "ammvdec_vp9",
894 "amvdec_avs2", "ammvdec_avs2"
895};
896
897
898#else
899
900static const char * const vdec_device_name[] = {
901 "amvdec_mpeg12",
902 "amvdec_mpeg4",
903 "amvdec_h264",
904 "amvdec_mjpeg",
905 "amvdec_real",
906 "amjpegdec",
907 "amvdec_vc1",
908 "amvdec_avs",
909 "amvdec_yuv",
910 "amvdec_h264mvc",
911 "amvdec_h264_4k2k",
912 "amvdec_h265",
913 "amvenc_avc",
914 "jpegenc",
915 "amvdec_vp9",
916 "amvdec_avs2"
917};
918
919#endif
920
921/*
922 * Only support time sliced decoding for frame based input,
923 * so legacy decoder can exist with time sliced decoder.
924 */
925static const char *get_dev_name(bool use_legacy_vdec, int format)
926{
927#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
928 if (use_legacy_vdec && (debugflags & 0x8) == 0)
929 return vdec_device_name[format * 2];
930 else
931 return vdec_device_name[format * 2 + 1];
932#else
933 return vdec_device_name[format];
934#endif
935}
936
937#ifdef VDEC_DEBUG_SUPPORT
938static u64 get_current_clk(void)
939{
940 /*struct timespec xtime = current_kernel_time();
941 u64 usec = xtime.tv_sec * 1000000;
942 usec += xtime.tv_nsec / 1000;
943 */
944 u64 usec = sched_clock();
945 return usec;
946}
947
948static void inc_profi_count(unsigned long mask, u32 *count)
949{
950 enum vdec_type_e type;
951
952 for (type = VDEC_1; type < VDEC_MAX; type++) {
953 if (mask & (1 << type))
954 count[type]++;
955 }
956}
957
958static void update_profi_clk_run(struct vdec_s *vdec,
959 unsigned long mask, u64 clk)
960{
961 enum vdec_type_e type;
962
963 for (type = VDEC_1; type < VDEC_MAX; type++) {
964 if (mask & (1 << type)) {
965 vdec->start_run_clk[type] = clk;
966 if (vdec->profile_start_clk[type] == 0)
967 vdec->profile_start_clk[type] = clk;
968 vdec->total_clk[type] = clk
969 - vdec->profile_start_clk[type];
970 /*pr_info("set start_run_clk %ld\n",
971 vdec->start_run_clk);*/
972
973 }
974 }
975}
976
977static void update_profi_clk_stop(struct vdec_s *vdec,
978 unsigned long mask, u64 clk)
979{
980 enum vdec_type_e type;
981
982 for (type = VDEC_1; type < VDEC_MAX; type++) {
983 if (mask & (1 << type)) {
984 if (vdec->start_run_clk[type] == 0)
985 pr_info("error, start_run_clk[%d] not set\n", type);
986
987 /*pr_info("update run_clk type %d, %ld, %ld, %ld\n",
988 type,
989 clk,
990 vdec->start_run_clk[type],
991 vdec->run_clk[type]);*/
992 vdec->run_clk[type] +=
993 (clk - vdec->start_run_clk[type]);
994 }
995 }
996}
997
998#endif
999
1000int vdec_set_decinfo(struct vdec_s *vdec, struct dec_sysinfo *p)
1001{
1002 if (copy_from_user((void *)&vdec->sys_info_store, (void *)p,
1003 sizeof(struct dec_sysinfo)))
1004 return -EFAULT;
1005
1006 /* force switch to mult instance if supports this profile. */
1007 if ((vdec->type == VDEC_TYPE_SINGLE) &&
1008 !disable_switch_single_to_mult) {
1009 const char *str = NULL;
1010 char fmt[16] = {0};
1011
1012 str = strchr(get_dev_name(false, vdec->format), '_');
1013 if (!str)
1014 return -1;
1015
1016 sprintf(fmt, "m%s", ++str);
1017 if (is_support_profile(fmt) &&
1018 vdec->sys_info->format != VIDEO_DEC_FORMAT_H263)
1019 vdec->type = VDEC_TYPE_STREAM_PARSER;
1020 }
1021
1022 return 0;
1023}
1024EXPORT_SYMBOL(vdec_set_decinfo);
1025
1026/* construct vdec strcture */
1027struct vdec_s *vdec_create(struct stream_port_s *port,
1028 struct vdec_s *master)
1029{
1030 struct vdec_s *vdec;
1031 int type = VDEC_TYPE_SINGLE;
1032 int id;
1033 if (is_mult_inc(port->type))
1034 type = (port->type & PORT_TYPE_FRAME) ?
1035 VDEC_TYPE_FRAME_BLOCK :
1036 VDEC_TYPE_STREAM_PARSER;
1037
1038 id = ida_simple_get(&vdec_core->ida,
1039 0, MAX_INSTANCE_MUN, GFP_KERNEL);
1040 if (id < 0) {
1041 pr_info("vdec_create request id failed!ret =%d\n", id);
1042 return NULL;
1043 }
1044 vdec = vzalloc(sizeof(struct vdec_s));
1045
1046 /* TBD */
1047 if (vdec) {
1048 vdec->magic = 0x43454456;
1049 vdec->id = -1;
1050 vdec->type = type;
1051 vdec->port = port;
1052 vdec->sys_info = &vdec->sys_info_store;
1053
1054 INIT_LIST_HEAD(&vdec->list);
1055
1056 atomic_inc(&vdec_core->vdec_nr);
1057 vdec->id = id;
1058 vdec_input_init(&vdec->input, vdec);
1059 vdec->input.vdec_is_input_frame_empty = vdec_is_input_frame_empty;
1060 vdec->input.vdec_up = vdec_up;
1061 if (master) {
1062 vdec->master = master;
1063 master->slave = vdec;
1064 master->sched = 1;
1065 }
1066 if (enable_mvdec_info) {
1067 vdec->mvfrm = (struct vdec_frames_s *)
1068 vzalloc(sizeof(struct vdec_frames_s));
1069 if (!vdec->mvfrm)
1070 pr_err("vzalloc: vdec_frames_s failed\n");
1071 }
1072 }
1073
1074 pr_debug("vdec_create instance %p, total %d\n", vdec,
1075 atomic_read(&vdec_core->vdec_nr));
1076
1077 //trace_vdec_create(vdec); /*DEBUG_TMP*/
1078
1079 return vdec;
1080}
1081EXPORT_SYMBOL(vdec_create);
1082
1083int vdec_set_format(struct vdec_s *vdec, int format)
1084{
1085 vdec->format = format;
1086 vdec->port_flag |= PORT_FLAG_VFORMAT;
1087
1088 if (vdec->slave) {
1089 vdec->slave->format = format;
1090 vdec->slave->port_flag |= PORT_FLAG_VFORMAT;
1091 }
1092
1093 //trace_vdec_set_format(vdec, format);/*DEBUG_TMP*/
1094
1095 return 0;
1096}
1097EXPORT_SYMBOL(vdec_set_format);
1098
1099int vdec_set_pts(struct vdec_s *vdec, u32 pts)
1100{
1101 vdec->pts = pts;
1102 vdec->pts64 = div64_u64((u64)pts * 100, 9);
1103 vdec->pts_valid = true;
1104 //trace_vdec_set_pts(vdec, (u64)pts);/*DEBUG_TMP*/
1105 return 0;
1106}
1107EXPORT_SYMBOL(vdec_set_pts);
1108
1109void vdec_set_timestamp(struct vdec_s *vdec, u64 timestamp)
1110{
1111 vdec->timestamp = timestamp;
1112 vdec->timestamp_valid = true;
1113}
1114EXPORT_SYMBOL(vdec_set_timestamp);
1115
1116int vdec_set_pts64(struct vdec_s *vdec, u64 pts64)
1117{
1118 vdec->pts64 = pts64;
1119 vdec->pts = (u32)div64_u64(pts64 * 9, 100);
1120 vdec->pts_valid = true;
1121
1122 //trace_vdec_set_pts64(vdec, pts64);/*DEBUG_TMP*/
1123 return 0;
1124}
1125EXPORT_SYMBOL(vdec_set_pts64);
1126
1127int vdec_get_status(struct vdec_s *vdec)
1128{
1129 return vdec->status;
1130}
1131EXPORT_SYMBOL(vdec_get_status);
1132
1133int vdec_get_frame_num(struct vdec_s *vdec)
1134{
1135 return vdec->input.have_frame_num;
1136}
1137EXPORT_SYMBOL(vdec_get_frame_num);
1138
1139void vdec_set_status(struct vdec_s *vdec, int status)
1140{
1141 //trace_vdec_set_status(vdec, status);/*DEBUG_TMP*/
1142 vdec->status = status;
1143}
1144EXPORT_SYMBOL(vdec_set_status);
1145
1146void vdec_set_next_status(struct vdec_s *vdec, int status)
1147{
1148 //trace_vdec_set_next_status(vdec, status);/*DEBUG_TMP*/
1149 vdec->next_status = status;
1150}
1151EXPORT_SYMBOL(vdec_set_next_status);
1152
1153int vdec_set_video_path(struct vdec_s *vdec, int video_path)
1154{
1155 vdec->frame_base_video_path = video_path;
1156 return 0;
1157}
1158EXPORT_SYMBOL(vdec_set_video_path);
1159
1160int vdec_set_receive_id(struct vdec_s *vdec, int receive_id)
1161{
1162 vdec->vf_receiver_inst = receive_id;
1163 return 0;
1164}
1165EXPORT_SYMBOL(vdec_set_receive_id);
1166
1167/* add frame data to input chain */
1168int vdec_write_vframe(struct vdec_s *vdec, const char *buf, size_t count)
1169{
1170 return vdec_input_add_frame(&vdec->input, buf, count);
1171}
1172EXPORT_SYMBOL(vdec_write_vframe);
1173
1174int vdec_write_vframe_with_dma(struct vdec_s *vdec,
1175 ulong addr, size_t count, u32 handle)
1176{
1177 return vdec_input_add_frame_with_dma(&vdec->input, addr, count, handle);
1178}
1179EXPORT_SYMBOL(vdec_write_vframe_with_dma);
1180
1181/* add a work queue thread for vdec*/
1182void vdec_schedule_work(struct work_struct *work)
1183{
1184 if (vdec_core->vdec_core_wq)
1185 queue_work(vdec_core->vdec_core_wq, work);
1186 else
1187 schedule_work(work);
1188}
1189EXPORT_SYMBOL(vdec_schedule_work);
1190
1191static struct vdec_s *vdec_get_associate(struct vdec_s *vdec)
1192{
1193 if (vdec->master)
1194 return vdec->master;
1195 else if (vdec->slave)
1196 return vdec->slave;
1197 return NULL;
1198}
1199
1200static void vdec_sync_input_read(struct vdec_s *vdec)
1201{
1202 if (!vdec_stream_based(vdec))
1203 return;
1204
1205 if (vdec_dual(vdec)) {
1206 u32 me, other;
1207 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1208 me = READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1209 other =
1210 vdec_get_associate(vdec)->input.stream_cookie;
1211 if (me > other)
1212 return;
1213 else if (me == other) {
1214 me = READ_VREG(VLD_MEM_VIFIFO_RP);
1215 other =
1216 vdec_get_associate(vdec)->input.swap_rp;
1217 if (me > other) {
1218 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1219 vdec_get_associate(vdec)->
1220 input.swap_rp);
1221 return;
1222 }
1223 }
1224 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1225 READ_VREG(VLD_MEM_VIFIFO_RP));
1226 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1227 me = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1228 if (((me & 0x80000000) == 0) &&
1229 (vdec->input.streaming_rp & 0x80000000))
1230 me += 1ULL << 32;
1231 other = vdec_get_associate(vdec)->input.streaming_rp;
1232 if (me > other) {
1233 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1234 vdec_get_associate(vdec)->
1235 input.swap_rp);
1236 return;
1237 }
1238
1239 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1240 READ_VREG(HEVC_STREAM_RD_PTR));
1241 }
1242 } else if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1243 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1244 READ_VREG(VLD_MEM_VIFIFO_RP));
1245 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1246 WRITE_PARSER_REG(PARSER_VIDEO_RP,
1247 READ_VREG(HEVC_STREAM_RD_PTR));
1248 }
1249}
1250
1251static void vdec_sync_input_write(struct vdec_s *vdec)
1252{
1253 if (!vdec_stream_based(vdec))
1254 return;
1255
1256 if (vdec->input.target == VDEC_INPUT_TARGET_VLD) {
1257 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1258 READ_PARSER_REG(PARSER_VIDEO_WP));
1259 } else if (vdec->input.target == VDEC_INPUT_TARGET_HEVC) {
1260 WRITE_VREG(HEVC_STREAM_WR_PTR,
1261 READ_PARSER_REG(PARSER_VIDEO_WP));
1262 }
1263}
1264
1265/*
1266 *get next frame from input chain
1267 */
1268/*
1269 *THE VLD_FIFO is 512 bytes and Video buffer level
1270 * empty interrupt is set to 0x80 bytes threshold
1271 */
1272#define VLD_PADDING_SIZE 1024
1273#define HEVC_PADDING_SIZE (1024*16)
1274int vdec_prepare_input(struct vdec_s *vdec, struct vframe_chunk_s **p)
1275{
1276 struct vdec_input_s *input = &vdec->input;
1277 struct vframe_chunk_s *chunk = NULL;
1278 struct vframe_block_list_s *block = NULL;
1279 int dummy;
1280
1281 /* full reset to HW input */
1282 if (input->target == VDEC_INPUT_TARGET_VLD) {
1283 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1284
1285 /* reset VLD fifo for all vdec */
1286 WRITE_VREG(DOS_SW_RESET0, (1<<5) | (1<<4) | (1<<3));
1287 WRITE_VREG(DOS_SW_RESET0, 0);
1288
1289 dummy = READ_RESET_REG(RESET0_REGISTER);
1290 WRITE_VREG(POWER_CTL_VLD, 1 << 4);
1291 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1292#if 0
1293 /*move to driver*/
1294 if (input_frame_based(input))
1295 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
1296
1297 /*
1298 * 2: assist
1299 * 3: parser
1300 * 4: parser_state
1301 * 8: dblk
1302 * 11:mcpu
1303 * 12:ccpu
1304 * 13:ddr
1305 * 14:iqit
1306 * 15:ipp
1307 * 17:qdct
1308 * 18:mpred
1309 * 19:sao
1310 * 24:hevc_afifo
1311 */
1312 WRITE_VREG(DOS_SW_RESET3,
1313 (1<<3)|(1<<4)|(1<<8)|(1<<11)|(1<<12)|(1<<14)|(1<<15)|
1314 (1<<17)|(1<<18)|(1<<19));
1315 WRITE_VREG(DOS_SW_RESET3, 0);
1316#endif
1317 }
1318
1319 /*
1320 *setup HW decoder input buffer (VLD context)
1321 * based on input->type and input->target
1322 */
1323 if (input_frame_based(input)) {
1324 chunk = vdec_input_next_chunk(&vdec->input);
1325
1326 if (chunk == NULL) {
1327 *p = NULL;
1328 return -1;
1329 }
1330
1331 block = chunk->block;
1332
1333 if (input->target == VDEC_INPUT_TARGET_VLD) {
1334 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR, block->start);
1335 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR, block->start +
1336 block->size - 8);
1337 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1338 round_down(block->start + chunk->offset,
1339 VDEC_FIFO_ALIGN));
1340
1341 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1342 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1343
1344 /* set to manual mode */
1345 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1346 WRITE_VREG(VLD_MEM_VIFIFO_RP,
1347 round_down(block->start + chunk->offset,
1348 VDEC_FIFO_ALIGN));
1349 dummy = chunk->offset + chunk->size +
1350 VLD_PADDING_SIZE;
1351 if (dummy >= block->size)
1352 dummy -= block->size;
1353 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1354 round_down(block->start + dummy,
1355 VDEC_FIFO_ALIGN));
1356
1357 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 3);
1358 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1359
1360 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1361 (0x11 << 16) | (1<<10) | (7<<3));
1362
1363 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1364 WRITE_VREG(HEVC_STREAM_START_ADDR, block->start);
1365 WRITE_VREG(HEVC_STREAM_END_ADDR, block->start +
1366 block->size);
1367 WRITE_VREG(HEVC_STREAM_RD_PTR, block->start +
1368 chunk->offset);
1369 dummy = chunk->offset + chunk->size +
1370 HEVC_PADDING_SIZE;
1371 if (dummy >= block->size)
1372 dummy -= block->size;
1373 WRITE_VREG(HEVC_STREAM_WR_PTR,
1374 round_down(block->start + dummy,
1375 VDEC_FIFO_ALIGN));
1376
1377 /* set endian */
1378 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1379 }
1380
1381 *p = chunk;
1382 return chunk->size;
1383
1384 } else {
1385 /* stream based */
1386 u32 rp = 0, wp = 0, fifo_len = 0;
1387 int size;
1388 bool swap_valid = input->swap_valid;
1389 unsigned long swap_page_phys = input->swap_page_phys;
1390
1391 if (vdec_dual(vdec) &&
1392 ((vdec->flag & VDEC_FLAG_SELF_INPUT_CONTEXT) == 0)) {
1393 /* keep using previous input context */
1394 struct vdec_s *master = (vdec->slave) ?
1395 vdec : vdec->master;
1396 if (master->input.last_swap_slave) {
1397 swap_valid = master->slave->input.swap_valid;
1398 swap_page_phys =
1399 master->slave->input.swap_page_phys;
1400 } else {
1401 swap_valid = master->input.swap_valid;
1402 swap_page_phys = master->input.swap_page_phys;
1403 }
1404 }
1405
1406 if (swap_valid) {
1407 if (input->target == VDEC_INPUT_TARGET_VLD) {
1408 if (vdec->format == VFORMAT_H264)
1409 SET_VREG_MASK(POWER_CTL_VLD,
1410 (1 << 9));
1411
1412 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1413
1414 /* restore read side */
1415 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1416 swap_page_phys);
1417 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1418
1419 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1420 ;
1421 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1422
1423 /* restore wrap count */
1424 WRITE_VREG(VLD_MEM_VIFIFO_WRAP_COUNT,
1425 input->stream_cookie);
1426
1427 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1428 fifo_len = READ_VREG(VLD_MEM_VIFIFO_LEVEL);
1429
1430 /* enable */
1431 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1432 (0x11 << 16) | (1<<10));
1433
1434 /* sync with front end */
1435 vdec_sync_input_read(vdec);
1436 vdec_sync_input_write(vdec);
1437
1438 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1439 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1440 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1441
1442 /* restore read side */
1443 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1444 swap_page_phys);
1445 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1446
1447 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1448 & (1<<7))
1449 ;
1450 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1451
1452 /* restore stream offset */
1453 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
1454 input->stream_cookie);
1455
1456 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1457 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1458 >> 16) & 0x7f;
1459
1460
1461 /* enable */
1462
1463 /* sync with front end */
1464 vdec_sync_input_read(vdec);
1465 vdec_sync_input_write(vdec);
1466
1467 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1468
1469 /*pr_info("vdec: restore context\r\n");*/
1470 }
1471
1472 } else {
1473 if (input->target == VDEC_INPUT_TARGET_VLD) {
1474 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1475 input->start);
1476 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1477 input->start + input->size - 8);
1478 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1479 input->start);
1480
1481 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1482 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1483
1484 /* set to manual mode */
1485 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1486 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1487 WRITE_VREG(VLD_MEM_VIFIFO_WP,
1488 READ_PARSER_REG(PARSER_VIDEO_WP));
1489
1490 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1491
1492 /* enable */
1493 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL,
1494 (0x11 << 16) | (1<<10));
1495
1496 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1497
1498 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1499 WRITE_VREG(HEVC_STREAM_START_ADDR,
1500 input->start);
1501 WRITE_VREG(HEVC_STREAM_END_ADDR,
1502 input->start + input->size);
1503 WRITE_VREG(HEVC_STREAM_RD_PTR,
1504 input->start);
1505 WRITE_VREG(HEVC_STREAM_WR_PTR,
1506 READ_PARSER_REG(PARSER_VIDEO_WP));
1507
1508 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1509 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1510 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1511 >> 16) & 0x7f;
1512
1513 /* enable */
1514 }
1515 }
1516 *p = NULL;
1517 if (wp >= rp)
1518 size = wp - rp + fifo_len;
1519 else
1520 size = wp + input->size - rp + fifo_len;
1521 if (size < 0) {
1522 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1523 __func__, input->size, wp, rp, fifo_len, size);
1524 size = 0;
1525 }
1526 return size;
1527 }
1528}
1529EXPORT_SYMBOL(vdec_prepare_input);
1530
1531void vdec_enable_input(struct vdec_s *vdec)
1532{
1533 struct vdec_input_s *input = &vdec->input;
1534
1535 if (vdec->status != VDEC_STATUS_ACTIVE)
1536 return;
1537
1538 if (input->target == VDEC_INPUT_TARGET_VLD)
1539 SET_VREG_MASK(VLD_MEM_VIFIFO_CONTROL, (1<<2) | (1<<1));
1540 else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1541 SET_VREG_MASK(HEVC_STREAM_CONTROL, 1);
1542 if (vdec_stream_based(vdec))
1543 CLEAR_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1544 else
1545 SET_VREG_MASK(HEVC_STREAM_CONTROL, 7 << 4);
1546 SET_VREG_MASK(HEVC_STREAM_FIFO_CTL, (1<<29));
1547 }
1548}
1549EXPORT_SYMBOL(vdec_enable_input);
1550
1551int vdec_set_input_buffer(struct vdec_s *vdec, u32 start, u32 size)
1552{
1553 int r = vdec_input_set_buffer(&vdec->input, start, size);
1554
1555 if (r)
1556 return r;
1557
1558 if (vdec->slave)
1559 r = vdec_input_set_buffer(&vdec->slave->input, start, size);
1560
1561 return r;
1562}
1563EXPORT_SYMBOL(vdec_set_input_buffer);
1564
1565/*
1566 * vdec_eos returns the possibility that there are
1567 * more input can be used by decoder through vdec_prepare_input
1568 * Note: this function should be called prior to vdec_vframe_dirty
1569 * by decoder driver to determine if EOS happens for stream based
1570 * decoding when there is no sufficient data for a frame
1571 */
1572bool vdec_has_more_input(struct vdec_s *vdec)
1573{
1574 struct vdec_input_s *input = &vdec->input;
1575
1576 if (!input->eos)
1577 return true;
1578
1579 if (input_frame_based(input))
1580 return vdec_input_next_input_chunk(input) != NULL;
1581 else {
1582 if (input->target == VDEC_INPUT_TARGET_VLD)
1583 return READ_VREG(VLD_MEM_VIFIFO_WP) !=
1584 READ_PARSER_REG(PARSER_VIDEO_WP);
1585 else {
1586 return (READ_VREG(HEVC_STREAM_WR_PTR) & ~0x3) !=
1587 (READ_PARSER_REG(PARSER_VIDEO_WP) & ~0x3);
1588 }
1589 }
1590}
1591EXPORT_SYMBOL(vdec_has_more_input);
1592
1593void vdec_set_prepare_level(struct vdec_s *vdec, int level)
1594{
1595 vdec->input.prepare_level = level;
1596}
1597EXPORT_SYMBOL(vdec_set_prepare_level);
1598
1599void vdec_set_flag(struct vdec_s *vdec, u32 flag)
1600{
1601 vdec->flag = flag;
1602}
1603EXPORT_SYMBOL(vdec_set_flag);
1604
1605void vdec_set_eos(struct vdec_s *vdec, bool eos)
1606{
1607 struct vdec_core_s *core = vdec_core;
1608
1609 vdec->input.eos = eos;
1610
1611 if (vdec->slave)
1612 vdec->slave->input.eos = eos;
1613 up(&core->sem);
1614}
1615EXPORT_SYMBOL(vdec_set_eos);
1616
1617#ifdef VDEC_DEBUG_SUPPORT
1618void vdec_set_step_mode(void)
1619{
1620 step_mode = 0x1ff;
1621}
1622EXPORT_SYMBOL(vdec_set_step_mode);
1623#endif
1624
1625void vdec_set_next_sched(struct vdec_s *vdec, struct vdec_s *next_vdec)
1626{
1627 if (vdec && next_vdec) {
1628 vdec->sched = 0;
1629 next_vdec->sched = 1;
1630 }
1631}
1632EXPORT_SYMBOL(vdec_set_next_sched);
1633
1634/*
1635 * Swap Context: S0 S1 S2 S3 S4
1636 * Sample sequence: M S M M S
1637 * Master Context: S0 S0 S2 S3 S3
1638 * Slave context: NA S1 S1 S2 S4
1639 * ^
1640 * ^
1641 * ^
1642 * the tricky part
1643 * If there are back to back decoding of master or slave
1644 * then the context of the counter part should be updated
1645 * with current decoder. In this example, S1 should be
1646 * updated to S2.
1647 * This is done by swap the swap_page and related info
1648 * between two layers.
1649 */
1650static void vdec_borrow_input_context(struct vdec_s *vdec)
1651{
1652 struct page *swap_page;
1653 unsigned long swap_page_phys;
1654 struct vdec_input_s *me;
1655 struct vdec_input_s *other;
1656
1657 if (!vdec_dual(vdec))
1658 return;
1659
1660 me = &vdec->input;
1661 other = &vdec_get_associate(vdec)->input;
1662
1663 /* swap the swap_context, borrow counter part's
1664 * swap context storage and update all related info.
1665 * After vdec_vframe_dirty, vdec_save_input_context
1666 * will be called to update current vdec's
1667 * swap context
1668 */
1669 swap_page = other->swap_page;
1670 other->swap_page = me->swap_page;
1671 me->swap_page = swap_page;
1672
1673 swap_page_phys = other->swap_page_phys;
1674 other->swap_page_phys = me->swap_page_phys;
1675 me->swap_page_phys = swap_page_phys;
1676
1677 other->swap_rp = me->swap_rp;
1678 other->streaming_rp = me->streaming_rp;
1679 other->stream_cookie = me->stream_cookie;
1680 other->swap_valid = me->swap_valid;
1681}
1682
1683void vdec_vframe_dirty(struct vdec_s *vdec, struct vframe_chunk_s *chunk)
1684{
1685 if (chunk)
1686 chunk->flag |= VFRAME_CHUNK_FLAG_CONSUMED;
1687
1688 if (vdec_stream_based(vdec)) {
1689 vdec->input.swap_needed = true;
1690
1691 if (vdec_dual(vdec)) {
1692 vdec_get_associate(vdec)->input.dirty_count = 0;
1693 vdec->input.dirty_count++;
1694 if (vdec->input.dirty_count > 1) {
1695 vdec->input.dirty_count = 1;
1696 vdec_borrow_input_context(vdec);
1697 }
1698 }
1699
1700 /* for stream based mode, we update read and write pointer
1701 * also in case decoder wants to keep working on decoding
1702 * for more frames while input front end has more data
1703 */
1704 vdec_sync_input_read(vdec);
1705 vdec_sync_input_write(vdec);
1706
1707 vdec->need_more_data |= VDEC_NEED_MORE_DATA_DIRTY;
1708 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
1709 }
1710}
1711EXPORT_SYMBOL(vdec_vframe_dirty);
1712
1713bool vdec_need_more_data(struct vdec_s *vdec)
1714{
1715 if (vdec_stream_based(vdec))
1716 return vdec->need_more_data & VDEC_NEED_MORE_DATA;
1717
1718 return false;
1719}
1720EXPORT_SYMBOL(vdec_need_more_data);
1721
1722
1723void hevc_wait_ddr(void)
1724{
1725 unsigned long flags;
1726 unsigned int mask = 0;
1727
1728 mask = 1 << 4; /* hevc */
1729 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
1730 mask |= (1 << 8); /* hevcb */
1731
1732 spin_lock_irqsave(&vdec_spin_lock, flags);
1733 codec_dmcbus_write(DMC_REQ_CTRL,
1734 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
1735 spin_unlock_irqrestore(&vdec_spin_lock, flags);
1736
1737 while (!(codec_dmcbus_read(DMC_CHAN_STS)
1738 & mask))
1739 ;
1740}
1741
1742void vdec_save_input_context(struct vdec_s *vdec)
1743{
1744 struct vdec_input_s *input = &vdec->input;
1745
1746#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
1747 vdec_profile(vdec, VDEC_PROFILE_EVENT_SAVE_INPUT);
1748#endif
1749
1750 if (input->target == VDEC_INPUT_TARGET_VLD)
1751 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1<<15);
1752
1753 if (input_stream_based(input) && (input->swap_needed)) {
1754 if (input->target == VDEC_INPUT_TARGET_VLD) {
1755 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1756 input->swap_page_phys);
1757 WRITE_VREG(VLD_MEM_SWAP_CTL, 3);
1758 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1759 ;
1760 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1761 vdec->input.stream_cookie =
1762 READ_VREG(VLD_MEM_VIFIFO_WRAP_COUNT);
1763 vdec->input.swap_rp =
1764 READ_VREG(VLD_MEM_VIFIFO_RP);
1765 vdec->input.total_rd_count =
1766 (u64)vdec->input.stream_cookie *
1767 vdec->input.size + vdec->input.swap_rp -
1768 READ_VREG(VLD_MEM_VIFIFO_BYTES_AVAIL);
1769 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1770 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1771 input->swap_page_phys);
1772 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 3);
1773
1774 while (READ_VREG(HEVC_STREAM_SWAP_CTRL) & (1<<7))
1775 ;
1776 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1777
1778 vdec->input.stream_cookie =
1779 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
1780 vdec->input.swap_rp =
1781 READ_VREG(HEVC_STREAM_RD_PTR);
1782 if (((vdec->input.stream_cookie & 0x80000000) == 0) &&
1783 (vdec->input.streaming_rp & 0x80000000))
1784 vdec->input.streaming_rp += 1ULL << 32;
1785 vdec->input.streaming_rp &= 0xffffffffULL << 32;
1786 vdec->input.streaming_rp |= vdec->input.stream_cookie;
1787 vdec->input.total_rd_count = vdec->input.streaming_rp;
1788 }
1789
1790 input->swap_valid = true;
1791 input->swap_needed = false;
1792 /*pr_info("vdec: save context\r\n");*/
1793
1794 vdec_sync_input_read(vdec);
1795
1796 if (vdec_dual(vdec)) {
1797 struct vdec_s *master = (vdec->slave) ?
1798 vdec : vdec->master;
1799 master->input.last_swap_slave = (master->slave == vdec);
1800 /* pr_info("master->input.last_swap_slave = %d\n",
1801 master->input.last_swap_slave); */
1802 }
1803
1804 hevc_wait_ddr();
1805 }
1806}
1807EXPORT_SYMBOL(vdec_save_input_context);
1808
1809void vdec_clean_input(struct vdec_s *vdec)
1810{
1811 struct vdec_input_s *input = &vdec->input;
1812
1813 while (!list_empty(&input->vframe_chunk_list)) {
1814 struct vframe_chunk_s *chunk =
1815 vdec_input_next_chunk(input);
1816 if (chunk && (chunk->flag & VFRAME_CHUNK_FLAG_CONSUMED))
1817 vdec_input_release_chunk(input, chunk);
1818 else
1819 break;
1820 }
1821 vdec_save_input_context(vdec);
1822}
1823EXPORT_SYMBOL(vdec_clean_input);
1824
1825
1826static int vdec_input_read_restore(struct vdec_s *vdec)
1827{
1828 struct vdec_input_s *input = &vdec->input;
1829
1830 if (!vdec_stream_based(vdec))
1831 return 0;
1832
1833 if (!input->swap_valid) {
1834 if (input->target == VDEC_INPUT_TARGET_VLD) {
1835 WRITE_VREG(VLD_MEM_VIFIFO_START_PTR,
1836 input->start);
1837 WRITE_VREG(VLD_MEM_VIFIFO_END_PTR,
1838 input->start + input->size - 8);
1839 WRITE_VREG(VLD_MEM_VIFIFO_CURR_PTR,
1840 input->start);
1841 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 1);
1842 WRITE_VREG(VLD_MEM_VIFIFO_CONTROL, 0);
1843
1844 /* set to manual mode */
1845 WRITE_VREG(VLD_MEM_VIFIFO_BUF_CNTL, 2);
1846 WRITE_VREG(VLD_MEM_VIFIFO_RP, input->start);
1847 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1848 WRITE_VREG(HEVC_STREAM_START_ADDR,
1849 input->start);
1850 WRITE_VREG(HEVC_STREAM_END_ADDR,
1851 input->start + input->size);
1852 WRITE_VREG(HEVC_STREAM_RD_PTR,
1853 input->start);
1854 }
1855 return 0;
1856 }
1857 if (input->target == VDEC_INPUT_TARGET_VLD) {
1858 /* restore read side */
1859 WRITE_VREG(VLD_MEM_SWAP_ADDR,
1860 input->swap_page_phys);
1861
1862 /*swap active*/
1863 WRITE_VREG(VLD_MEM_SWAP_CTL, 1);
1864
1865 /*wait swap busy*/
1866 while (READ_VREG(VLD_MEM_SWAP_CTL) & (1<<7))
1867 ;
1868
1869 WRITE_VREG(VLD_MEM_SWAP_CTL, 0);
1870 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1871 /* restore read side */
1872 WRITE_VREG(HEVC_STREAM_SWAP_ADDR,
1873 input->swap_page_phys);
1874 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 1);
1875
1876 while (READ_VREG(HEVC_STREAM_SWAP_CTRL)
1877 & (1<<7))
1878 ;
1879 WRITE_VREG(HEVC_STREAM_SWAP_CTRL, 0);
1880 }
1881
1882 return 0;
1883}
1884
1885
1886int vdec_sync_input(struct vdec_s *vdec)
1887{
1888 struct vdec_input_s *input = &vdec->input;
1889 u32 rp = 0, wp = 0, fifo_len = 0;
1890 int size;
1891
1892 vdec_input_read_restore(vdec);
1893 vdec_sync_input_read(vdec);
1894 vdec_sync_input_write(vdec);
1895 if (input->target == VDEC_INPUT_TARGET_VLD) {
1896 rp = READ_VREG(VLD_MEM_VIFIFO_RP);
1897 wp = READ_VREG(VLD_MEM_VIFIFO_WP);
1898
1899 } else if (input->target == VDEC_INPUT_TARGET_HEVC) {
1900 rp = READ_VREG(HEVC_STREAM_RD_PTR);
1901 wp = READ_VREG(HEVC_STREAM_WR_PTR);
1902 fifo_len = (READ_VREG(HEVC_STREAM_FIFO_CTL)
1903 >> 16) & 0x7f;
1904 }
1905 if (wp >= rp)
1906 size = wp - rp + fifo_len;
1907 else
1908 size = wp + input->size - rp + fifo_len;
1909 if (size < 0) {
1910 pr_info("%s error: input->size %x wp %x rp %x fifo_len %x => size %x\r\n",
1911 __func__, input->size, wp, rp, fifo_len, size);
1912 size = 0;
1913 }
1914 return size;
1915
1916}
1917EXPORT_SYMBOL(vdec_sync_input);
1918
1919const char *vdec_status_str(struct vdec_s *vdec)
1920{
1921 if (vdec->status < 0)
1922 return "INVALID";
1923 return vdec->status < ARRAY_SIZE(vdec_status_string) ?
1924 vdec_status_string[vdec->status] : "INVALID";
1925}
1926
1927const char *vdec_type_str(struct vdec_s *vdec)
1928{
1929 switch (vdec->type) {
1930 case VDEC_TYPE_SINGLE:
1931 return "VDEC_TYPE_SINGLE";
1932 case VDEC_TYPE_STREAM_PARSER:
1933 return "VDEC_TYPE_STREAM_PARSER";
1934 case VDEC_TYPE_FRAME_BLOCK:
1935 return "VDEC_TYPE_FRAME_BLOCK";
1936 case VDEC_TYPE_FRAME_CIRCULAR:
1937 return "VDEC_TYPE_FRAME_CIRCULAR";
1938 default:
1939 return "VDEC_TYPE_INVALID";
1940 }
1941}
1942
1943const char *vdec_device_name_str(struct vdec_s *vdec)
1944{
1945 return vdec_device_name[vdec->format * 2 + 1];
1946}
1947EXPORT_SYMBOL(vdec_device_name_str);
1948
1949void walk_vdec_core_list(char *s)
1950{
1951 struct vdec_s *vdec;
1952 struct vdec_core_s *core = vdec_core;
1953 unsigned long flags;
1954
1955 pr_info("%s --->\n", s);
1956
1957 flags = vdec_core_lock(vdec_core);
1958
1959 if (list_empty(&core->connected_vdec_list)) {
1960 pr_info("connected vdec list empty\n");
1961 } else {
1962 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
1963 pr_info("\tvdec (%p), status = %s\n", vdec,
1964 vdec_status_str(vdec));
1965 }
1966 }
1967
1968 vdec_core_unlock(vdec_core, flags);
1969}
1970EXPORT_SYMBOL(walk_vdec_core_list);
1971
1972/* insert vdec to vdec_core for scheduling,
1973 * for dual running decoders, connect/disconnect always runs in pairs
1974 */
1975int vdec_connect(struct vdec_s *vdec)
1976{
1977 unsigned long flags;
1978
1979 //trace_vdec_connect(vdec);/*DEBUG_TMP*/
1980
1981 if (vdec->status != VDEC_STATUS_DISCONNECTED)
1982 return 0;
1983
1984 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
1985 vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
1986
1987 init_completion(&vdec->inactive_done);
1988
1989 if (vdec->slave) {
1990 vdec_set_status(vdec->slave, VDEC_STATUS_CONNECTED);
1991 vdec_set_next_status(vdec->slave, VDEC_STATUS_CONNECTED);
1992
1993 init_completion(&vdec->slave->inactive_done);
1994 }
1995
1996 flags = vdec_core_lock(vdec_core);
1997
1998 list_add_tail(&vdec->list, &vdec_core->connected_vdec_list);
1999
2000 if (vdec->slave) {
2001 list_add_tail(&vdec->slave->list,
2002 &vdec_core->connected_vdec_list);
2003 }
2004
2005 vdec_core_unlock(vdec_core, flags);
2006
2007 up(&vdec_core->sem);
2008
2009 return 0;
2010}
2011EXPORT_SYMBOL(vdec_connect);
2012
2013/* remove vdec from vdec_core scheduling */
2014int vdec_disconnect(struct vdec_s *vdec)
2015{
2016#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2017 vdec_profile(vdec, VDEC_PROFILE_EVENT_DISCONNECT);
2018#endif
2019 //trace_vdec_disconnect(vdec);/*DEBUG_TMP*/
2020
2021 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2022 (vdec->status != VDEC_STATUS_ACTIVE)) {
2023 return 0;
2024 }
2025 mutex_lock(&vdec_mutex);
2026 /*
2027 *when a vdec is under the management of scheduler
2028 * the status change will only be from vdec_core_thread
2029 */
2030 vdec_set_next_status(vdec, VDEC_STATUS_DISCONNECTED);
2031
2032 if (vdec->slave)
2033 vdec_set_next_status(vdec->slave, VDEC_STATUS_DISCONNECTED);
2034 else if (vdec->master)
2035 vdec_set_next_status(vdec->master, VDEC_STATUS_DISCONNECTED);
2036 mutex_unlock(&vdec_mutex);
2037 up(&vdec_core->sem);
2038
2039 if(!wait_for_completion_timeout(&vdec->inactive_done,
2040 msecs_to_jiffies(2000)))
2041 goto discon_timeout;
2042
2043 if (vdec->slave) {
2044 if(!wait_for_completion_timeout(&vdec->slave->inactive_done,
2045 msecs_to_jiffies(2000)))
2046 goto discon_timeout;
2047 } else if (vdec->master) {
2048 if(!wait_for_completion_timeout(&vdec->master->inactive_done,
2049 msecs_to_jiffies(2000)))
2050 goto discon_timeout;
2051 }
2052
2053 return 0;
2054discon_timeout:
2055 pr_err("%s timeout!!! status: 0x%x\n", __func__, vdec->status);
2056 return 0;
2057}
2058EXPORT_SYMBOL(vdec_disconnect);
2059
2060/* release vdec structure */
2061int vdec_destroy(struct vdec_s *vdec)
2062{
2063 //trace_vdec_destroy(vdec);/*DEBUG_TMP*/
2064
2065 vdec_input_release(&vdec->input);
2066
2067#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2068 vdec_profile_flush(vdec);
2069#endif
2070 ida_simple_remove(&vdec_core->ida, vdec->id);
2071 if (vdec->mvfrm)
2072 vfree(vdec->mvfrm);
2073 vfree(vdec);
2074
2075 atomic_dec(&vdec_core->vdec_nr);
2076
2077 return 0;
2078}
2079EXPORT_SYMBOL(vdec_destroy);
2080
2081/*
2082 *register vdec_device
2083 * create output, vfm or create ionvideo output
2084 */
2085s32 vdec_init(struct vdec_s *vdec, int is_4k)
2086{
2087 int r = 0;
2088 struct vdec_s *p = vdec;
2089 const char *dev_name;
2090 int id = PLATFORM_DEVID_AUTO;/*if have used my self*/
2091
2092 dev_name = get_dev_name(vdec_single(vdec), vdec->format);
2093
2094 if (dev_name == NULL)
2095 return -ENODEV;
2096
2097 pr_info("vdec_init, dev_name:%s, vdec_type=%s\n",
2098 dev_name, vdec_type_str(vdec));
2099
2100 /*
2101 *todo: VFM patch control should be configurable,
2102 * for now all stream based input uses default VFM path.
2103 */
2104 if (vdec_stream_based(vdec) && !vdec_dual(vdec)) {
2105 if (vdec_core->vfm_vdec == NULL) {
2106 pr_debug("vdec_init set vfm decoder %p\n", vdec);
2107 vdec_core->vfm_vdec = vdec;
2108 } else {
2109 pr_info("vdec_init vfm path busy.\n");
2110 return -EBUSY;
2111 }
2112 }
2113
2114 mutex_lock(&vdec_mutex);
2115 inited_vcodec_num++;
2116 mutex_unlock(&vdec_mutex);
2117
2118 vdec_input_set_type(&vdec->input, vdec->type,
2119 (vdec->format == VFORMAT_HEVC ||
2120 vdec->format == VFORMAT_AVS2 ||
2121 vdec->format == VFORMAT_VP9) ?
2122 VDEC_INPUT_TARGET_HEVC :
2123 VDEC_INPUT_TARGET_VLD);
2124 if (vdec_single(vdec) || (vdec_get_debug_flags() & 0x2))
2125 vdec_enable_DMC(vdec);
2126 p->cma_dev = vdec_core->cma_dev;
2127 p->get_canvas = get_canvas;
2128 p->get_canvas_ex = get_canvas_ex;
2129 p->free_canvas_ex = free_canvas_ex;
2130 p->vdec_fps_detec = vdec_fps_detec;
2131 atomic_set(&p->inrelease, 0);
2132 atomic_set(&p->inirq_flag, 0);
2133 atomic_set(&p->inirq_thread_flag, 0);
2134 /* todo */
2135 if (!vdec_dual(vdec))
2136 p->use_vfm_path = vdec_stream_based(vdec);
2137 if (debugflags & 0x4)
2138 p->use_vfm_path = 1;
2139 /* vdec_dev_reg.flag = 0; */
2140 if (vdec->id >= 0)
2141 id = vdec->id;
2142 p->parallel_dec = parallel_decode;
2143 vdec_core->parallel_dec = parallel_decode;
2144 vdec->canvas_mode = CANVAS_BLKMODE_32X32;
2145#ifdef FRAME_CHECK
2146 vdec_frame_check_init(vdec);
2147#endif
2148 p->dev = platform_device_register_data(
2149 &vdec_core->vdec_core_platform_device->dev,
2150 dev_name,
2151 id,
2152 &p, sizeof(struct vdec_s *));
2153
2154 if (IS_ERR(p->dev)) {
2155 r = PTR_ERR(p->dev);
2156 pr_err("vdec: Decoder device %s register failed (%d)\n",
2157 dev_name, r);
2158
2159 mutex_lock(&vdec_mutex);
2160 inited_vcodec_num--;
2161 mutex_unlock(&vdec_mutex);
2162
2163 goto error;
2164 } else if (!p->dev->dev.driver) {
2165 pr_info("vdec: Decoder device %s driver probe failed.\n",
2166 dev_name);
2167 r = -ENODEV;
2168
2169 goto error;
2170 }
2171
2172 if ((p->type == VDEC_TYPE_FRAME_BLOCK) && (p->run == NULL)) {
2173 r = -ENODEV;
2174 pr_err("vdec: Decoder device not handled (%s)\n", dev_name);
2175
2176 mutex_lock(&vdec_mutex);
2177 inited_vcodec_num--;
2178 mutex_unlock(&vdec_mutex);
2179
2180 goto error;
2181 }
2182
2183 if (p->use_vfm_path) {
2184 vdec->vf_receiver_inst = -1;
2185 vdec->vfm_map_id[0] = 0;
2186 } else if (!vdec_dual(vdec)) {
2187 /* create IONVIDEO instance and connect decoder's
2188 * vf_provider interface to it
2189 */
2190 if (p->type != VDEC_TYPE_FRAME_BLOCK) {
2191 r = -ENODEV;
2192 pr_err("vdec: Incorrect decoder type\n");
2193
2194 mutex_lock(&vdec_mutex);
2195 inited_vcodec_num--;
2196 mutex_unlock(&vdec_mutex);
2197
2198 goto error;
2199 }
2200 if (p->frame_base_video_path == FRAME_BASE_PATH_IONVIDEO) {
2201#if 1
2202 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2203 &vdec->vf_receiver_inst);
2204#else
2205 /*
2206 * temporarily just use decoder instance ID as iondriver ID
2207 * to solve OMX iondriver instance number check time sequence
2208 * only the limitation is we can NOT mix different video
2209 * decoders since same ID will be used for different decoder
2210 * formats.
2211 */
2212 vdec->vf_receiver_inst = p->dev->id;
2213 r = ionvideo_assign_map(&vdec->vf_receiver_name,
2214 &vdec->vf_receiver_inst);
2215#endif
2216 if (r < 0) {
2217 pr_err("IonVideo frame receiver allocation failed.\n");
2218
2219 mutex_lock(&vdec_mutex);
2220 inited_vcodec_num--;
2221 mutex_unlock(&vdec_mutex);
2222
2223 goto error;
2224 }
2225
2226 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2227 "%s %s", vdec->vf_provider_name,
2228 vdec->vf_receiver_name);
2229 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2230 "vdec-map-%d", vdec->id);
2231 } else if (p->frame_base_video_path ==
2232 FRAME_BASE_PATH_AMLVIDEO_AMVIDEO) {
2233 if (vdec_secure(vdec)) {
2234 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2235 "%s %s", vdec->vf_provider_name,
2236 "amlvideo amvideo");
2237 } else {
2238 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2239 "%s %s", vdec->vf_provider_name,
2240 "amlvideo ppmgr deinterlace amvideo");
2241 }
2242 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2243 "vdec-map-%d", vdec->id);
2244 } else if (p->frame_base_video_path ==
2245 FRAME_BASE_PATH_AMLVIDEO1_AMVIDEO2) {
2246 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2247 "%s %s", vdec->vf_provider_name,
2248 "aml_video.1 videosync.0 videopip");
2249 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2250 "vdec-map-%d", vdec->id);
2251 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_OSD) {
2252 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2253 "%s %s", vdec->vf_provider_name,
2254 vdec->vf_receiver_name);
2255 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2256 "vdec-map-%d", vdec->id);
2257 } else if (p->frame_base_video_path == FRAME_BASE_PATH_TUNNEL_MODE) {
2258 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2259 "%s %s", vdec->vf_provider_name,
2260 "amvideo");
2261 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2262 "vdec-map-%d", vdec->id);
2263 } else if (p->frame_base_video_path == FRAME_BASE_PATH_V4L_VIDEO) {
2264 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2265 "%s %s %s", vdec->vf_provider_name,
2266 vdec->vf_receiver_name, "amvideo");
2267 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2268 "vdec-map-%d", vdec->id);
2269 } else if (p->frame_base_video_path ==
2270 FRAME_BASE_PATH_DI_V4LVIDEO) {
2271#ifdef CONFIG_AMLOGIC_V4L_VIDEO3
2272 r = v4lvideo_assign_map(&vdec->vf_receiver_name,
2273 &vdec->vf_receiver_inst);
2274#else
2275 r = -1;
2276#endif
2277 if (r < 0) {
2278 pr_err("V4lVideo frame receiver allocation failed.\n");
2279 mutex_lock(&vdec_mutex);
2280 inited_vcodec_num--;
2281 mutex_unlock(&vdec_mutex);
2282 goto error;
2283 }
2284 if (!v4lvideo_add_di || vdec_secure(vdec))
2285 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2286 "%s %s", vdec->vf_provider_name,
2287 vdec->vf_receiver_name);
2288 else {
2289 if (vdec->vf_receiver_inst == 0)
2290 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2291 "%s %s %s", vdec->vf_provider_name,
2292 "dimulti.1",
2293 vdec->vf_receiver_name);
2294 else if ((vdec->vf_receiver_inst <
2295 max_di_instance) &&
2296 (vdec->vf_receiver_inst == 1))
2297 snprintf(vdec->vfm_map_chain,
2298 VDEC_MAP_NAME_SIZE,
2299 "%s %s %s",
2300 vdec->vf_provider_name,
2301 "deinterlace",
2302 vdec->vf_receiver_name);
2303 else if (vdec->vf_receiver_inst <
2304 max_di_instance)
2305 snprintf(vdec->vfm_map_chain,
2306 VDEC_MAP_NAME_SIZE,
2307 "%s %s%d %s",
2308 vdec->vf_provider_name,
2309 "dimulti.",
2310 vdec->vf_receiver_inst,
2311 vdec->vf_receiver_name);
2312 else
2313 snprintf(vdec->vfm_map_chain, VDEC_MAP_NAME_SIZE,
2314 "%s %s", vdec->vf_provider_name,
2315 vdec->vf_receiver_name);
2316 }
2317 snprintf(vdec->vfm_map_id, VDEC_MAP_NAME_SIZE,
2318 "vdec-map-%d", vdec->id);
2319 }
2320
2321 if (vfm_map_add(vdec->vfm_map_id,
2322 vdec->vfm_map_chain) < 0) {
2323 r = -ENOMEM;
2324 pr_err("Decoder pipeline map creation failed %s.\n",
2325 vdec->vfm_map_id);
2326 vdec->vfm_map_id[0] = 0;
2327
2328 mutex_lock(&vdec_mutex);
2329 inited_vcodec_num--;
2330 mutex_unlock(&vdec_mutex);
2331
2332 goto error;
2333 }
2334
2335 pr_debug("vfm map %s created\n", vdec->vfm_map_id);
2336
2337 /*
2338 *assume IONVIDEO driver already have a few vframe_receiver
2339 * registered.
2340 * 1. Call iondriver function to allocate a IONVIDEO path and
2341 * provide receiver's name and receiver op.
2342 * 2. Get decoder driver's provider name from driver instance
2343 * 3. vfm_map_add(name, "<decoder provider name>
2344 * <iondriver receiver name>"), e.g.
2345 * vfm_map_add("vdec_ion_map_0", "mpeg4_0 iondriver_1");
2346 * 4. vf_reg_provider and vf_reg_receiver
2347 * Note: the decoder provider's op uses vdec as op_arg
2348 * the iondriver receiver's op uses iondev device as
2349 * op_arg
2350 */
2351
2352 }
2353
2354 if (!vdec_single(vdec)) {
2355 vf_reg_provider(&p->vframe_provider);
2356
2357 vf_notify_receiver(p->vf_provider_name,
2358 VFRAME_EVENT_PROVIDER_START,
2359 vdec);
2360
2361 if (vdec_core->hint_fr_vdec == NULL)
2362 vdec_core->hint_fr_vdec = vdec;
2363
2364 if (vdec_core->hint_fr_vdec == vdec) {
2365 if (p->sys_info->rate != 0) {
2366 if (!vdec->is_reset) {
2367 vf_notify_receiver(p->vf_provider_name,
2368 VFRAME_EVENT_PROVIDER_FR_HINT,
2369 (void *)
2370 ((unsigned long)
2371 p->sys_info->rate));
2372 vdec->fr_hint_state = VDEC_HINTED;
2373 }
2374 } else {
2375 vdec->fr_hint_state = VDEC_NEED_HINT;
2376 }
2377 }
2378 }
2379
2380 p->dolby_meta_with_el = 0;
2381 pr_debug("vdec_init, vf_provider_name = %s\n", p->vf_provider_name);
2382 vdec_input_prepare_bufs(/*prepared buffer for fast playing.*/
2383 &vdec->input,
2384 vdec->sys_info->width,
2385 vdec->sys_info->height);
2386 /* vdec is now ready to be active */
2387 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
2388 return 0;
2389
2390error:
2391 return r;
2392}
2393EXPORT_SYMBOL(vdec_init);
2394
2395/* vdec_create/init/release/destroy are applied to both dual running decoders
2396 */
2397void vdec_release(struct vdec_s *vdec)
2398{
2399 //trace_vdec_release(vdec);/*DEBUG_TMP*/
2400#ifdef VDEC_DEBUG_SUPPORT
2401 if (step_mode) {
2402 pr_info("VDEC_DEBUG: in step_mode, wait release\n");
2403 while (step_mode)
2404 udelay(10);
2405 pr_info("VDEC_DEBUG: step_mode is clear\n");
2406 }
2407#endif
2408 vdec_disconnect(vdec);
2409
2410 if (vdec->vframe_provider.name) {
2411 if (!vdec_single(vdec)) {
2412 if (vdec_core->hint_fr_vdec == vdec
2413 && vdec->fr_hint_state == VDEC_HINTED)
2414 vf_notify_receiver(
2415 vdec->vf_provider_name,
2416 VFRAME_EVENT_PROVIDER_FR_END_HINT,
2417 NULL);
2418 vdec->fr_hint_state = VDEC_NO_NEED_HINT;
2419 }
2420 vf_unreg_provider(&vdec->vframe_provider);
2421 }
2422
2423 if (vdec_core->vfm_vdec == vdec)
2424 vdec_core->vfm_vdec = NULL;
2425
2426 if (vdec_core->hint_fr_vdec == vdec)
2427 vdec_core->hint_fr_vdec = NULL;
2428
2429 if (vdec->vf_receiver_inst >= 0) {
2430 if (vdec->vfm_map_id[0]) {
2431 vfm_map_remove(vdec->vfm_map_id);
2432 vdec->vfm_map_id[0] = 0;
2433 }
2434 }
2435
2436 atomic_set(&vdec->inrelease, 1);
2437 while ((atomic_read(&vdec->inirq_flag) > 0)
2438 || (atomic_read(&vdec->inirq_thread_flag) > 0))
2439 schedule();
2440
2441#ifdef FRAME_CHECK
2442 vdec_frame_check_exit(vdec);
2443#endif
2444 vdec_fps_clear(vdec->id);
2445 if (atomic_read(&vdec_core->vdec_nr) == 1)
2446 vdec_disable_DMC(vdec);
2447 platform_device_unregister(vdec->dev);
2448 pr_debug("vdec_release instance %p, total %d\n", vdec,
2449 atomic_read(&vdec_core->vdec_nr));
2450 vdec_destroy(vdec);
2451
2452 mutex_lock(&vdec_mutex);
2453 inited_vcodec_num--;
2454 mutex_unlock(&vdec_mutex);
2455
2456}
2457EXPORT_SYMBOL(vdec_release);
2458
2459/* For dual running decoders, vdec_reset is only called with master vdec.
2460 */
2461int vdec_reset(struct vdec_s *vdec)
2462{
2463 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2464
2465 vdec_disconnect(vdec);
2466
2467 if (vdec->vframe_provider.name)
2468 vf_unreg_provider(&vdec->vframe_provider);
2469
2470 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2471 vf_unreg_provider(&vdec->slave->vframe_provider);
2472
2473 if (vdec->reset) {
2474 vdec->reset(vdec);
2475 if (vdec->slave)
2476 vdec->slave->reset(vdec->slave);
2477 }
2478 vdec->mc_loaded = 0;/*clear for reload firmware*/
2479 vdec_input_release(&vdec->input);
2480
2481 vdec_input_init(&vdec->input, vdec);
2482
2483 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2484 vdec->sys_info->height);
2485
2486 vf_reg_provider(&vdec->vframe_provider);
2487 vf_notify_receiver(vdec->vf_provider_name,
2488 VFRAME_EVENT_PROVIDER_START, vdec);
2489
2490 if (vdec->slave) {
2491 vf_reg_provider(&vdec->slave->vframe_provider);
2492 vf_notify_receiver(vdec->slave->vf_provider_name,
2493 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2494 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2495 }
2496
2497 vdec_connect(vdec);
2498
2499 return 0;
2500}
2501EXPORT_SYMBOL(vdec_reset);
2502
2503int vdec_v4l2_reset(struct vdec_s *vdec, int flag)
2504{
2505 //trace_vdec_reset(vdec); /*DEBUG_TMP*/
2506 pr_debug("vdec_v4l2_reset %d\n", flag);
2507 vdec_disconnect(vdec);
2508 if (flag != 2) {
2509 if (vdec->vframe_provider.name)
2510 vf_unreg_provider(&vdec->vframe_provider);
2511
2512 if ((vdec->slave) && (vdec->slave->vframe_provider.name))
2513 vf_unreg_provider(&vdec->slave->vframe_provider);
2514
2515 if (vdec->reset) {
2516 vdec->reset(vdec);
2517 if (vdec->slave)
2518 vdec->slave->reset(vdec->slave);
2519 }
2520 vdec->mc_loaded = 0;/*clear for reload firmware*/
2521
2522 vdec_input_release(&vdec->input);
2523
2524 vdec_input_init(&vdec->input, vdec);
2525
2526 vdec_input_prepare_bufs(&vdec->input, vdec->sys_info->width,
2527 vdec->sys_info->height);
2528
2529 vf_reg_provider(&vdec->vframe_provider);
2530 vf_notify_receiver(vdec->vf_provider_name,
2531 VFRAME_EVENT_PROVIDER_START, vdec);
2532
2533 if (vdec->slave) {
2534 vf_reg_provider(&vdec->slave->vframe_provider);
2535 vf_notify_receiver(vdec->slave->vf_provider_name,
2536 VFRAME_EVENT_PROVIDER_START, vdec->slave);
2537 vdec->slave->mc_loaded = 0;/*clear for reload firmware*/
2538 }
2539 } else {
2540 if (vdec->reset) {
2541 vdec->reset(vdec);
2542 if (vdec->slave)
2543 vdec->slave->reset(vdec->slave);
2544 }
2545 }
2546
2547 vdec_connect(vdec);
2548
2549 vdec_frame_check_init(vdec);
2550
2551 return 0;
2552}
2553EXPORT_SYMBOL(vdec_v4l2_reset);
2554
2555void vdec_free_cmabuf(void)
2556{
2557 mutex_lock(&vdec_mutex);
2558
2559 /*if (inited_vcodec_num > 0) {
2560 mutex_unlock(&vdec_mutex);
2561 return;
2562 }*/
2563 mutex_unlock(&vdec_mutex);
2564}
2565
2566void vdec_core_request(struct vdec_s *vdec, unsigned long mask)
2567{
2568 vdec->core_mask |= mask;
2569
2570 if (vdec->slave)
2571 vdec->slave->core_mask |= mask;
2572 if (vdec_core->parallel_dec == 1) {
2573 if (mask & CORE_MASK_COMBINE)
2574 vdec_core->vdec_combine_flag++;
2575 }
2576
2577}
2578EXPORT_SYMBOL(vdec_core_request);
2579
2580int vdec_core_release(struct vdec_s *vdec, unsigned long mask)
2581{
2582 vdec->core_mask &= ~mask;
2583
2584 if (vdec->slave)
2585 vdec->slave->core_mask &= ~mask;
2586 if (vdec_core->parallel_dec == 1) {
2587 if (mask & CORE_MASK_COMBINE)
2588 vdec_core->vdec_combine_flag--;
2589 }
2590 return 0;
2591}
2592EXPORT_SYMBOL(vdec_core_release);
2593
2594bool vdec_core_with_input(unsigned long mask)
2595{
2596 enum vdec_type_e type;
2597
2598 for (type = VDEC_1; type < VDEC_MAX; type++) {
2599 if ((mask & (1 << type)) && cores_with_input[type])
2600 return true;
2601 }
2602
2603 return false;
2604}
2605
2606void vdec_core_finish_run(struct vdec_s *vdec, unsigned long mask)
2607{
2608 unsigned long i;
2609 unsigned long t = mask;
2610 mutex_lock(&vdec_mutex);
2611 while (t) {
2612 i = __ffs(t);
2613 clear_bit(i, &vdec->active_mask);
2614 t &= ~(1 << i);
2615 }
2616
2617 if (vdec->active_mask == 0)
2618 vdec_set_status(vdec, VDEC_STATUS_CONNECTED);
2619
2620 mutex_unlock(&vdec_mutex);
2621}
2622EXPORT_SYMBOL(vdec_core_finish_run);
2623/*
2624 * find what core resources are available for vdec
2625 */
2626static unsigned long vdec_schedule_mask(struct vdec_s *vdec,
2627 unsigned long active_mask)
2628{
2629 unsigned long mask = vdec->core_mask &
2630 ~CORE_MASK_COMBINE;
2631
2632 if (vdec->core_mask & CORE_MASK_COMBINE) {
2633 /* combined cores must be granted together */
2634 if ((mask & ~active_mask) == mask)
2635 return mask;
2636 else
2637 return 0;
2638 } else
2639 return mask & ~vdec->sched_mask & ~active_mask;
2640}
2641
2642/*
2643 *Decoder callback
2644 * Each decoder instance uses this callback to notify status change, e.g. when
2645 * decoder finished using HW resource.
2646 * a sample callback from decoder's driver is following:
2647 *
2648 * if (hw->vdec_cb) {
2649 * vdec_set_next_status(vdec, VDEC_STATUS_CONNECTED);
2650 * hw->vdec_cb(vdec, hw->vdec_cb_arg);
2651 * }
2652 */
2653static void vdec_callback(struct vdec_s *vdec, void *data)
2654{
2655 struct vdec_core_s *core = (struct vdec_core_s *)data;
2656
2657#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2658 vdec_profile(vdec, VDEC_PROFILE_EVENT_CB);
2659#endif
2660
2661 up(&core->sem);
2662}
2663
2664static irqreturn_t vdec_isr(int irq, void *dev_id)
2665{
2666 struct vdec_isr_context_s *c =
2667 (struct vdec_isr_context_s *)dev_id;
2668 struct vdec_s *vdec = vdec_core->last_vdec;
2669 irqreturn_t ret = IRQ_HANDLED;
2670
2671 if (vdec_core->parallel_dec == 1) {
2672 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2673 vdec = vdec_core->active_hevc;
2674 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2675 vdec = vdec_core->active_vdec;
2676 else
2677 vdec = NULL;
2678 }
2679
2680 if (vdec) {
2681 if (atomic_read(&vdec->inrelease) > 0)
2682 return ret;
2683 atomic_set(&vdec->inirq_flag, 1);
2684 vdec->isr_ns = local_clock();
2685 }
2686 if (c->dev_isr) {
2687 ret = c->dev_isr(irq, c->dev_id);
2688 goto isr_done;
2689 }
2690
2691 if ((c != &vdec_core->isr_context[VDEC_IRQ_0]) &&
2692 (c != &vdec_core->isr_context[VDEC_IRQ_1]) &&
2693 (c != &vdec_core->isr_context[VDEC_IRQ_HEVC_BACK])) {
2694#if 0
2695 pr_warn("vdec interrupt w/o a valid receiver\n");
2696#endif
2697 goto isr_done;
2698 }
2699
2700 if (!vdec) {
2701#if 0
2702 pr_warn("vdec interrupt w/o an active instance running. core = %p\n",
2703 core);
2704#endif
2705 goto isr_done;
2706 }
2707
2708 if (!vdec->irq_handler) {
2709#if 0
2710 pr_warn("vdec instance has no irq handle.\n");
2711#endif
2712 goto isr_done;
2713 }
2714
2715 ret = vdec->irq_handler(vdec, c->index);
2716isr_done:
2717 if (vdec)
2718 atomic_set(&vdec->inirq_flag, 0);
2719 return ret;
2720}
2721
2722static irqreturn_t vdec_thread_isr(int irq, void *dev_id)
2723{
2724 struct vdec_isr_context_s *c =
2725 (struct vdec_isr_context_s *)dev_id;
2726 struct vdec_s *vdec = vdec_core->last_vdec;
2727 irqreturn_t ret = IRQ_HANDLED;
2728
2729 if (vdec_core->parallel_dec == 1) {
2730 if (irq == vdec_core->isr_context[VDEC_IRQ_0].irq)
2731 vdec = vdec_core->active_hevc;
2732 else if (irq == vdec_core->isr_context[VDEC_IRQ_1].irq)
2733 vdec = vdec_core->active_vdec;
2734 else
2735 vdec = NULL;
2736 }
2737
2738 if (vdec) {
2739 u32 isr2tfn = 0;
2740 if (atomic_read(&vdec->inrelease) > 0)
2741 return ret;
2742 atomic_set(&vdec->inirq_thread_flag, 1);
2743 vdec->tfn_ns = local_clock();
2744 isr2tfn = vdec->tfn_ns - vdec->isr_ns;
2745 if (isr2tfn > 10000000)
2746 pr_err("!!!!!!! %s vdec_isr to %s took %u ns !!!\n",
2747 vdec->vf_provider_name, __func__, isr2tfn);
2748 }
2749 if (c->dev_threaded_isr) {
2750 ret = c->dev_threaded_isr(irq, c->dev_id);
2751 goto thread_isr_done;
2752 }
2753 if (!vdec)
2754 goto thread_isr_done;
2755
2756 if (!vdec->threaded_irq_handler)
2757 goto thread_isr_done;
2758 ret = vdec->threaded_irq_handler(vdec, c->index);
2759thread_isr_done:
2760 if (vdec)
2761 atomic_set(&vdec->inirq_thread_flag, 0);
2762 return ret;
2763}
2764
2765unsigned long vdec_ready_to_run(struct vdec_s *vdec, unsigned long mask)
2766{
2767 unsigned long ready_mask;
2768 struct vdec_input_s *input = &vdec->input;
2769 if ((vdec->status != VDEC_STATUS_CONNECTED) &&
2770 (vdec->status != VDEC_STATUS_ACTIVE))
2771 return false;
2772
2773 if (!vdec->run_ready)
2774 return false;
2775
2776 /* when crc32 error, block at error frame */
2777 if (vdec->vfc.err_crc_block)
2778 return false;
2779
2780 if ((vdec->slave || vdec->master) &&
2781 (vdec->sched == 0))
2782 return false;
2783#ifdef VDEC_DEBUG_SUPPORT
2784 inc_profi_count(mask, vdec->check_count);
2785#endif
2786 if (vdec_core_with_input(mask)) {
2787 /* check frame based input underrun */
2788 if (input && !input->eos && input_frame_based(input)
2789 && (!vdec_input_next_chunk(input))) {
2790#ifdef VDEC_DEBUG_SUPPORT
2791 inc_profi_count(mask, vdec->input_underrun_count);
2792#endif
2793 return false;
2794 }
2795 /* check streaming prepare level threshold if not EOS */
2796 if (input && input_stream_based(input) && !input->eos) {
2797 u32 rp, wp, level;
2798
2799 rp = READ_PARSER_REG(PARSER_VIDEO_RP);
2800 wp = READ_PARSER_REG(PARSER_VIDEO_WP);
2801 if (wp < rp)
2802 level = input->size + wp - rp;
2803 else
2804 level = wp - rp;
2805
2806 if ((level < input->prepare_level) &&
2807 (pts_get_rec_num(PTS_TYPE_VIDEO,
2808 vdec->input.total_rd_count) < 2)) {
2809 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2810#ifdef VDEC_DEBUG_SUPPORT
2811 inc_profi_count(mask, vdec->input_underrun_count);
2812 if (step_mode & 0x200) {
2813 if ((step_mode & 0xff) == vdec->id) {
2814 step_mode |= 0xff;
2815 return mask;
2816 }
2817 }
2818#endif
2819 return false;
2820 } else if (level > input->prepare_level)
2821 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA;
2822 }
2823 }
2824
2825 if (step_mode) {
2826 if ((step_mode & 0xff) != vdec->id)
2827 return 0;
2828 step_mode |= 0xff; /*VDEC_DEBUG_SUPPORT*/
2829 }
2830
2831 /*step_mode &= ~0xff; not work for id of 0, removed*/
2832
2833#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2834 vdec_profile(vdec, VDEC_PROFILE_EVENT_CHK_RUN_READY);
2835#endif
2836
2837 ready_mask = vdec->run_ready(vdec, mask) & mask;
2838#ifdef VDEC_DEBUG_SUPPORT
2839 if (ready_mask != mask)
2840 inc_profi_count(ready_mask ^ mask, vdec->not_run_ready_count);
2841#endif
2842#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2843 if (ready_mask)
2844 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN_READY);
2845#endif
2846
2847 return ready_mask;
2848}
2849
2850/* bridge on/off vdec's interrupt processing to vdec core */
2851static void vdec_route_interrupt(struct vdec_s *vdec, unsigned long mask,
2852 bool enable)
2853{
2854 enum vdec_type_e type;
2855
2856 for (type = VDEC_1; type < VDEC_MAX; type++) {
2857 if (mask & (1 << type)) {
2858 struct vdec_isr_context_s *c =
2859 &vdec_core->isr_context[cores_int[type]];
2860 if (enable)
2861 c->vdec = vdec;
2862 else if (c->vdec == vdec)
2863 c->vdec = NULL;
2864 }
2865 }
2866}
2867
2868/*
2869 * Set up secure protection for each decoder instance running.
2870 * Note: The operation from REE side only resets memory access
2871 * to a default policy and even a non_secure type will still be
2872 * changed to secure type automatically when secure source is
2873 * detected inside TEE.
2874 * Perform need_more_data checking and set flag is decoder
2875 * is not consuming data.
2876 */
2877void vdec_prepare_run(struct vdec_s *vdec, unsigned long mask)
2878{
2879 struct vdec_input_s *input = &vdec->input;
2880 int secure = (vdec_secure(vdec)) ? DMC_DEV_TYPE_SECURE :
2881 DMC_DEV_TYPE_NON_SECURE;
2882
2883 vdec_route_interrupt(vdec, mask, true);
2884
2885 if (!vdec_core_with_input(mask))
2886 return;
2887
2888 if (secure && vdec_stream_based(vdec) && force_nosecure_even_drm)
2889 {
2890 /* Verimatrix ultra webclient (HLS) was played in drmmode and used hw demux. In drmmode VDEC only can access secure.
2891 Now HW demux parsed es data to no-secure buffer. So the VDEC input was no-secure, VDEC playback failed. Forcing
2892 use nosecure for verimatrix webclient HLS. If in the future HW demux can parse es data to secure buffer, make
2893 VDEC r/w secure.*/
2894 secure = 0;
2895 //pr_debug("allow VDEC can access nosecure even in drmmode\n");
2896 }
2897 if (input->target == VDEC_INPUT_TARGET_VLD)
2898 tee_config_device_secure(DMC_DEV_ID_VDEC, secure);
2899 else if (input->target == VDEC_INPUT_TARGET_HEVC)
2900 tee_config_device_secure(DMC_DEV_ID_HEVC, secure);
2901
2902 if (vdec_stream_based(vdec) &&
2903 ((vdec->need_more_data & VDEC_NEED_MORE_DATA_RUN) &&
2904 (vdec->need_more_data & VDEC_NEED_MORE_DATA_DIRTY) == 0)) {
2905 vdec->need_more_data |= VDEC_NEED_MORE_DATA;
2906 }
2907
2908 vdec->need_more_data |= VDEC_NEED_MORE_DATA_RUN;
2909 vdec->need_more_data &= ~VDEC_NEED_MORE_DATA_DIRTY;
2910}
2911
2912/* struct vdec_core_shread manages all decoder instance in active list. When
2913 * a vdec is added into the active list, it can onlt be in two status:
2914 * VDEC_STATUS_CONNECTED(the decoder does not own HW resource and ready to run)
2915 * VDEC_STATUS_ACTIVE(the decoder owns HW resources and is running).
2916 * Removing a decoder from active list is only performed within core thread.
2917 * Adding a decoder into active list is performed from user thread.
2918 */
2919static int vdec_core_thread(void *data)
2920{
2921 struct vdec_core_s *core = (struct vdec_core_s *)data;
2922 struct sched_param param = {.sched_priority = MAX_RT_PRIO/2};
2923 unsigned long flags;
2924 int i;
2925
2926 sched_setscheduler(current, SCHED_FIFO, &param);
2927
2928 allow_signal(SIGTERM);
2929
2930 while (down_interruptible(&core->sem) == 0) {
2931 struct vdec_s *vdec, *tmp, *worker;
2932 unsigned long sched_mask = 0;
2933 LIST_HEAD(disconnecting_list);
2934
2935 if (kthread_should_stop())
2936 break;
2937 mutex_lock(&vdec_mutex);
2938
2939 if (core->parallel_dec == 1) {
2940 for (i = VDEC_1; i < VDEC_MAX; i++) {
2941 core->power_ref_mask =
2942 core->power_ref_count[i] > 0 ?
2943 (core->power_ref_mask | (1 << i)) :
2944 (core->power_ref_mask & ~(1 << i));
2945 }
2946 }
2947 /* clean up previous active vdec's input */
2948 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
2949 unsigned long mask = vdec->sched_mask &
2950 (vdec->active_mask ^ vdec->sched_mask);
2951
2952 vdec_route_interrupt(vdec, mask, false);
2953
2954#ifdef VDEC_DEBUG_SUPPORT
2955 update_profi_clk_stop(vdec, mask, get_current_clk());
2956#endif
2957 /*
2958 * If decoder released some core resources (mask), then
2959 * check if these core resources are associated
2960 * with any input side and do input clean up accordingly
2961 */
2962 if (vdec_core_with_input(mask)) {
2963 struct vdec_input_s *input = &vdec->input;
2964 while (!list_empty(
2965 &input->vframe_chunk_list)) {
2966 struct vframe_chunk_s *chunk =
2967 vdec_input_next_chunk(input);
2968 if (chunk && (chunk->flag &
2969 VFRAME_CHUNK_FLAG_CONSUMED))
2970 vdec_input_release_chunk(input,
2971 chunk);
2972 else
2973 break;
2974 }
2975
2976 vdec_save_input_context(vdec);
2977 }
2978
2979 vdec->sched_mask &= ~mask;
2980 core->sched_mask &= ~mask;
2981 }
2982 vdec_update_buff_status();
2983 /*
2984 *todo:
2985 * this is the case when the decoder is in active mode and
2986 * the system side wants to stop it. Currently we rely on
2987 * the decoder instance to go back to VDEC_STATUS_CONNECTED
2988 * from VDEC_STATUS_ACTIVE by its own. However, if for some
2989 * reason the decoder can not exist by itself (dead decoding
2990 * or whatever), then we may have to add another vdec API
2991 * to kill the vdec and release its HW resource and make it
2992 * become inactive again.
2993 * if ((core->active_vdec) &&
2994 * (core->active_vdec->status == VDEC_STATUS_DISCONNECTED)) {
2995 * }
2996 */
2997
2998 /* check disconnected decoders */
2999 flags = vdec_core_lock(vdec_core);
3000 list_for_each_entry_safe(vdec, tmp,
3001 &core->connected_vdec_list, list) {
3002 if ((vdec->status == VDEC_STATUS_CONNECTED) &&
3003 (vdec->next_status == VDEC_STATUS_DISCONNECTED)) {
3004 if (core->parallel_dec == 1) {
3005 if (vdec_core->active_hevc == vdec)
3006 vdec_core->active_hevc = NULL;
3007 if (vdec_core->active_vdec == vdec)
3008 vdec_core->active_vdec = NULL;
3009 }
3010 if (core->last_vdec == vdec)
3011 core->last_vdec = NULL;
3012 list_move(&vdec->list, &disconnecting_list);
3013 }
3014 }
3015 vdec_core_unlock(vdec_core, flags);
3016 mutex_unlock(&vdec_mutex);
3017 /* elect next vdec to be scheduled */
3018 vdec = core->last_vdec;
3019 if (vdec) {
3020 vdec = list_entry(vdec->list.next, struct vdec_s, list);
3021 list_for_each_entry_from(vdec,
3022 &core->connected_vdec_list, list) {
3023 sched_mask = vdec_schedule_mask(vdec,
3024 core->sched_mask);
3025 if (!sched_mask)
3026 continue;
3027 sched_mask = vdec_ready_to_run(vdec,
3028 sched_mask);
3029 if (sched_mask)
3030 break;
3031 }
3032
3033 if (&vdec->list == &core->connected_vdec_list)
3034 vdec = NULL;
3035 }
3036
3037 if (!vdec) {
3038 /* search from beginning */
3039 list_for_each_entry(vdec,
3040 &core->connected_vdec_list, list) {
3041 sched_mask = vdec_schedule_mask(vdec,
3042 core->sched_mask);
3043 if (vdec == core->last_vdec) {
3044 if (!sched_mask) {
3045 vdec = NULL;
3046 break;
3047 }
3048
3049 sched_mask = vdec_ready_to_run(vdec,
3050 sched_mask);
3051
3052 if (!sched_mask) {
3053 vdec = NULL;
3054 break;
3055 }
3056 break;
3057 }
3058
3059 if (!sched_mask)
3060 continue;
3061
3062 sched_mask = vdec_ready_to_run(vdec,
3063 sched_mask);
3064 if (sched_mask)
3065 break;
3066 }
3067
3068 if (&vdec->list == &core->connected_vdec_list)
3069 vdec = NULL;
3070 }
3071
3072 worker = vdec;
3073
3074 if (vdec) {
3075 unsigned long mask = sched_mask;
3076 unsigned long i;
3077
3078 /* setting active_mask should be atomic.
3079 * it can be modified by decoder driver callbacks.
3080 */
3081 while (sched_mask) {
3082 i = __ffs(sched_mask);
3083 set_bit(i, &vdec->active_mask);
3084 sched_mask &= ~(1 << i);
3085 }
3086
3087 /* vdec's sched_mask is only set from core thread */
3088 vdec->sched_mask |= mask;
3089 if (core->last_vdec) {
3090 if ((core->last_vdec != vdec) &&
3091 (core->last_vdec->mc_type != vdec->mc_type))
3092 vdec->mc_loaded = 0;/*clear for reload firmware*/
3093 } else
3094 vdec->mc_loaded = 0;
3095 core->last_vdec = vdec;
3096 if (debug & 2)
3097 vdec->mc_loaded = 0;/*alway reload firmware*/
3098 vdec_set_status(vdec, VDEC_STATUS_ACTIVE);
3099
3100 core->sched_mask |= mask;
3101 if (core->parallel_dec == 1)
3102 vdec_save_active_hw(vdec);
3103#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
3104 vdec_profile(vdec, VDEC_PROFILE_EVENT_RUN);
3105#endif
3106 vdec_prepare_run(vdec, mask);
3107#ifdef VDEC_DEBUG_SUPPORT
3108 inc_profi_count(mask, vdec->run_count);
3109 update_profi_clk_run(vdec, mask, get_current_clk());
3110#endif
3111 vdec->run(vdec, mask, vdec_callback, core);
3112
3113
3114 /* we have some cores scheduled, keep working until
3115 * all vdecs are checked with no cores to schedule
3116 */
3117 if (core->parallel_dec == 1) {
3118 if (vdec_core->vdec_combine_flag == 0)
3119 up(&core->sem);
3120 } else
3121 up(&core->sem);
3122 }
3123
3124 /* remove disconnected decoder from active list */
3125 list_for_each_entry_safe(vdec, tmp, &disconnecting_list, list) {
3126 list_del(&vdec->list);
3127 vdec_set_status(vdec, VDEC_STATUS_DISCONNECTED);
3128 /*core->last_vdec = NULL;*/
3129 complete(&vdec->inactive_done);
3130 }
3131
3132 /* if there is no new work scheduled and nothing
3133 * is running, sleep 20ms
3134 */
3135 if (core->parallel_dec == 1) {
3136 if (vdec_core->vdec_combine_flag == 0) {
3137 if ((!worker) &&
3138 ((core->sched_mask != core->power_ref_mask)) &&
3139 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3140 ((core->buff_flag | core->stream_buff_flag) &
3141 (core->sched_mask ^ core->power_ref_mask))) {
3142 usleep_range(1000, 2000);
3143 up(&core->sem);
3144 }
3145 } else {
3146 if ((!worker) && (!core->sched_mask) &&
3147 (atomic_read(&vdec_core->vdec_nr) > 0) &&
3148 (core->buff_flag | core->stream_buff_flag)) {
3149 usleep_range(1000, 2000);
3150 up(&core->sem);
3151 }
3152 }
3153 } else if ((!worker) && (!core->sched_mask) && (atomic_read(&vdec_core->vdec_nr) > 0)) {
3154 usleep_range(1000, 2000);
3155 up(&core->sem);
3156 }
3157
3158 }
3159
3160 return 0;
3161}
3162
3163#if 1 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8 */
3164static bool test_hevc(u32 decomp_addr, u32 us_delay)
3165{
3166 int i;
3167
3168 /* SW_RESET IPP */
3169 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 1);
3170 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0);
3171
3172 /* initialize all canvas table */
3173 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
3174 for (i = 0; i < 32; i++)
3175 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3176 0x1 | (i << 8) | decomp_addr);
3177 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
3178 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, (0 << 8) | (0<<1) | 1);
3179 for (i = 0; i < 32; i++)
3180 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3181
3182 /* Initialize mcrcc */
3183 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2);
3184 WRITE_VREG(HEVCD_MCRCC_CTL2, 0x0);
3185 WRITE_VREG(HEVCD_MCRCC_CTL3, 0x0);
3186 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
3187
3188 /* Decomp initialize */
3189 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, 0x0);
3190 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3191
3192 /* Frame level initialization */
3193 WRITE_VREG(HEVCD_IPP_TOP_FRMCONFIG, 0x100 | (0x100 << 16));
3194 WRITE_VREG(HEVCD_IPP_TOP_TILECONFIG3, 0x0);
3195 WRITE_VREG(HEVCD_IPP_TOP_LCUCONFIG, 0x1 << 5);
3196 WRITE_VREG(HEVCD_IPP_BITDEPTH_CONFIG, 0x2 | (0x2 << 2));
3197
3198 WRITE_VREG(HEVCD_IPP_CONFIG, 0x0);
3199 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, 0x0);
3200
3201 /* Enable SWIMP mode */
3202 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CONFIG, 0x1);
3203
3204 /* Enable frame */
3205 WRITE_VREG(HEVCD_IPP_TOP_CNTL, 0x2);
3206 WRITE_VREG(HEVCD_IPP_TOP_FRMCTL, 0x1);
3207
3208 /* Send SW-command CTB info */
3209 WRITE_VREG(HEVCD_IPP_SWMPREDIF_CTBINFO, 0x1 << 31);
3210
3211 /* Send PU_command */
3212 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO0, (0x4 << 9) | (0x4 << 16));
3213 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO1, 0x1 << 3);
3214 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO2, 0x0);
3215 WRITE_VREG(HEVCD_IPP_SWMPREDIF_PUINFO3, 0x0);
3216
3217 udelay(us_delay);
3218
3219 WRITE_VREG(HEVCD_IPP_DBG_SEL, 0x2 << 4);
3220
3221 return (READ_VREG(HEVCD_IPP_DBG_DATA) & 3) == 1;
3222}
3223
3224void vdec_power_reset(void)
3225{
3226 /* enable vdec1 isolation */
3227 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3228 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc0);
3229 /* power off vdec1 memories */
3230 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3231 /* vdec1 power off */
3232 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3233 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc);
3234
3235 if (has_vdec2()) {
3236 /* enable vdec2 isolation */
3237 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3238 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x300);
3239 /* power off vdec2 memories */
3240 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3241 /* vdec2 power off */
3242 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3243 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x30);
3244 }
3245
3246 if (has_hdec()) {
3247 /* enable hcodec isolation */
3248 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3249 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
3250 /* power off hcodec memories */
3251 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3252 /* hcodec power off */
3253 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3254 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 3);
3255 }
3256
3257 if (has_hevc_vdec()) {
3258 /* enable hevc isolation */
3259 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3260 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0xc00);
3261 /* power off hevc memories */
3262 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3263 /* hevc power off */
3264 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3265 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0xc0);
3266 }
3267}
3268EXPORT_SYMBOL(vdec_power_reset);
3269
3270static void vdec_power_switch(struct vdec_pwrc_s *pd, int id, bool on);
3271
3272void vdec_poweron(enum vdec_type_e core)
3273{
3274 void *decomp_addr = NULL;
3275 dma_addr_t decomp_dma_addr;
3276 u32 decomp_addr_aligned = 0;
3277 int hevc_loop = 0;
3278 int sleep_val, iso_val;
3279 bool is_power_ctrl_ver2 = false;
3280
3281 if (core >= VDEC_MAX)
3282 return;
3283
3284 mutex_lock(&vdec_mutex);
3285
3286 vdec_core->power_ref_count[core]++;
3287 if (vdec_core->power_ref_count[core] > 1) {
3288 mutex_unlock(&vdec_mutex);
3289 return;
3290 }
3291
3292 if (vdec_on(core)) {
3293 mutex_unlock(&vdec_mutex);
3294 return;
3295 }
3296
3297 /* power domain check. */
3298 if (!disable_power_domain && vdec_core->pd) {
3299 int pd_id = 0;
3300
3301 if (core == VDEC_1) {
3302 amports_switch_gate("clk_vdec_mux", 1);
3303 vdec_clock_hi_enable();
3304 pd_id = PD_VDEC;
3305 } else if (core == VDEC_HCODEC) {
3306 hcodec_clock_enable();
3307 pd_id = PD_HCODEC;
3308 } else if (core == VDEC_HEVC) {
3309 /* enable hevc clock */
3310 amports_switch_gate("clk_hevc_mux", 1);
3311 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3312 amports_switch_gate("clk_hevcb_mux", 1);
3313 hevc_clock_hi_enable();
3314 hevc_back_clock_hi_enable();
3315 pd_id = PD_HEVC;
3316 }
3317
3318 vdec_power_switch(vdec_core->pd, pd_id, true);
3319 mutex_unlock(&vdec_mutex);
3320
3321 return;
3322 }
3323
3324 is_power_ctrl_ver2 =
3325 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3326 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3327
3328 if (hevc_workaround_needed() &&
3329 (core == VDEC_HEVC)) {
3330 decomp_addr = codec_mm_dma_alloc_coherent(MEM_NAME,
3331 SZ_64K + SZ_4K, &decomp_dma_addr, GFP_KERNEL, 0);
3332
3333 if (decomp_addr) {
3334 decomp_addr_aligned = ALIGN(decomp_dma_addr, SZ_64K);
3335 memset((u8 *)decomp_addr +
3336 (decomp_addr_aligned - decomp_dma_addr),
3337 0xff, SZ_4K);
3338 } else
3339 pr_err("vdec: alloc HEVC gxbb decomp buffer failed.\n");
3340 }
3341
3342 if (core == VDEC_1) {
3343 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3344 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3345
3346 /* vdec1 power on */
3347#ifdef CONFIG_AMLOGIC_POWER
3348 if (is_support_power_ctrl()) {
3349 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3350 mutex_unlock(&vdec_mutex);
3351 pr_err("vdec-1 power on ctrl sleep fail.\n");
3352 return;
3353 }
3354 } else {
3355 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3356 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3357 }
3358#else
3359 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3360 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3361#endif
3362 /* wait 10uS */
3363 udelay(10);
3364 /* vdec1 soft reset */
3365 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3366 WRITE_VREG(DOS_SW_RESET0, 0);
3367 /* enable vdec1 clock */
3368 /*
3369 *add power on vdec clock level setting,only for m8 chip,
3370 * m8baby and m8m2 can dynamic adjust vdec clock,
3371 * power on with default clock level
3372 */
3373 amports_switch_gate("clk_vdec_mux", 1);
3374 vdec_clock_hi_enable();
3375 /* power up vdec memories */
3376 WRITE_VREG(DOS_MEM_PD_VDEC, 0);
3377
3378 /* remove vdec1 isolation */
3379#ifdef CONFIG_AMLOGIC_POWER
3380 if (is_support_power_ctrl()) {
3381 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3382 mutex_unlock(&vdec_mutex);
3383 pr_err("vdec-1 power on ctrl iso fail.\n");
3384 return;
3385 }
3386 } else {
3387 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3388 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3389 }
3390#else
3391 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3392 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3393#endif
3394 /* reset DOS top registers */
3395 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3396 } else if (core == VDEC_2) {
3397 if (has_vdec2()) {
3398 /* vdec2 power on */
3399 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3400 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3401 ~0x30);
3402 /* wait 10uS */
3403 udelay(10);
3404 /* vdec2 soft reset */
3405 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3406 WRITE_VREG(DOS_SW_RESET2, 0);
3407 /* enable vdec1 clock */
3408 vdec2_clock_hi_enable();
3409 /* power up vdec memories */
3410 WRITE_VREG(DOS_MEM_PD_VDEC2, 0);
3411 /* remove vdec2 isolation */
3412 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3413 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
3414 ~0x300);
3415 /* reset DOS top registers */
3416 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3417 }
3418 } else if (core == VDEC_HCODEC) {
3419 if (has_hdec()) {
3420 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3421 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3422 /* hcodec power on */
3423#ifdef CONFIG_AMLOGIC_POWER
3424 if (is_support_power_ctrl()) {
3425 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3426 mutex_unlock(&vdec_mutex);
3427 pr_err("hcodec power on ctrl sleep fail.\n");
3428 return;
3429 }
3430 } else {
3431 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3432 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3433 }
3434#else
3435 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3436 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3437#endif
3438 /* wait 10uS */
3439 udelay(10);
3440 /* hcodec soft reset */
3441 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3442 WRITE_VREG(DOS_SW_RESET1, 0);
3443 /* enable hcodec clock */
3444 hcodec_clock_enable();
3445 /* power up hcodec memories */
3446 WRITE_VREG(DOS_MEM_PD_HCODEC, 0);
3447 /* remove hcodec isolation */
3448#ifdef CONFIG_AMLOGIC_POWER
3449 if (is_support_power_ctrl()) {
3450 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3451 mutex_unlock(&vdec_mutex);
3452 pr_err("hcodec power on ctrl iso fail.\n");
3453 return;
3454 }
3455 } else {
3456 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3457 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3458 }
3459#else
3460 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3461 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3462#endif
3463 }
3464 } else if (core == VDEC_HEVC) {
3465 if (has_hevc_vdec()) {
3466 bool hevc_fixed = false;
3467
3468 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3469 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3470
3471 while (!hevc_fixed) {
3472 /* hevc power on */
3473#ifdef CONFIG_AMLOGIC_POWER
3474 if (is_support_power_ctrl()) {
3475 if (power_ctrl_sleep_mask(true, sleep_val, 0)) {
3476 mutex_unlock(&vdec_mutex);
3477 pr_err("hevc power on ctrl sleep fail.\n");
3478 return;
3479 }
3480 } else {
3481 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3482 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3483 }
3484#else
3485 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3486 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & ~sleep_val);
3487#endif
3488 /* wait 10uS */
3489 udelay(10);
3490 /* hevc soft reset */
3491 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3492 WRITE_VREG(DOS_SW_RESET3, 0);
3493 /* enable hevc clock */
3494 amports_switch_gate("clk_hevc_mux", 1);
3495 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3496 amports_switch_gate("clk_hevcb_mux", 1);
3497 hevc_clock_hi_enable();
3498 hevc_back_clock_hi_enable();
3499 /* power up hevc memories */
3500 WRITE_VREG(DOS_MEM_PD_HEVC, 0);
3501 /* remove hevc isolation */
3502#ifdef CONFIG_AMLOGIC_POWER
3503 if (is_support_power_ctrl()) {
3504 if (power_ctrl_iso_mask(true, iso_val, 0)) {
3505 mutex_unlock(&vdec_mutex);
3506 pr_err("hevc power on ctrl iso fail.\n");
3507 return;
3508 }
3509 } else {
3510 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3511 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3512 }
3513#else
3514 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3515 READ_AOREG(AO_RTI_GEN_PWR_ISO0) & ~iso_val);
3516#endif
3517 if (!hevc_workaround_needed())
3518 break;
3519
3520 if (decomp_addr)
3521 hevc_fixed = test_hevc(
3522 decomp_addr_aligned, 20);
3523
3524 if (!hevc_fixed) {
3525 hevc_loop++;
3526
3527 mutex_unlock(&vdec_mutex);
3528
3529 if (hevc_loop >= HEVC_TEST_LIMIT) {
3530 pr_warn("hevc power sequence over limit\n");
3531 pr_warn("=====================================================\n");
3532 pr_warn(" This chip is identified to have HW failure.\n");
3533 pr_warn(" Please contact sqa-platform to replace the platform.\n");
3534 pr_warn("=====================================================\n");
3535
3536 panic("Force panic for chip detection !!!\n");
3537
3538 break;
3539 }
3540
3541 vdec_poweroff(VDEC_HEVC);
3542
3543 mdelay(10);
3544
3545 mutex_lock(&vdec_mutex);
3546 }
3547 }
3548
3549 if (hevc_loop > hevc_max_reset_count)
3550 hevc_max_reset_count = hevc_loop;
3551
3552 WRITE_VREG(DOS_SW_RESET3, 0xffffffff);
3553 udelay(10);
3554 WRITE_VREG(DOS_SW_RESET3, 0);
3555 }
3556 }
3557
3558 if (decomp_addr)
3559 codec_mm_dma_free_coherent(MEM_NAME,
3560 SZ_64K + SZ_4K, decomp_addr, decomp_dma_addr, 0);
3561
3562 mutex_unlock(&vdec_mutex);
3563}
3564EXPORT_SYMBOL(vdec_poweron);
3565
3566void vdec_poweroff(enum vdec_type_e core)
3567{
3568 int sleep_val, iso_val;
3569 bool is_power_ctrl_ver2 = false;
3570 if (core >= VDEC_MAX)
3571 return;
3572
3573 mutex_lock(&vdec_mutex);
3574
3575 vdec_core->power_ref_count[core]--;
3576 if (vdec_core->power_ref_count[core] > 0) {
3577 mutex_unlock(&vdec_mutex);
3578 return;
3579 }
3580
3581 /* power domain check. */
3582 if (!disable_power_domain && vdec_core->pd) {
3583 int pd_id = 0;
3584
3585 if (core == VDEC_1) {
3586 vdec_clock_off();
3587 pd_id = PD_VDEC;
3588 } else if (core == VDEC_HCODEC) {
3589 hcodec_clock_off();
3590 pd_id = PD_HCODEC;
3591 } else if (core == VDEC_HEVC) {
3592 /* disable hevc clock */
3593 hevc_clock_off();
3594 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3595 hevc_back_clock_off();
3596 pd_id = PD_HEVC;
3597 }
3598
3599 vdec_power_switch(vdec_core->pd, pd_id, false);
3600 mutex_unlock(&vdec_mutex);
3601
3602 return;
3603 }
3604
3605 is_power_ctrl_ver2 =
3606 ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3607 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) ? true : false;
3608
3609 if (core == VDEC_1) {
3610 sleep_val = is_power_ctrl_ver2 ? 0x2 : 0xc;
3611 iso_val = is_power_ctrl_ver2 ? 0x2 : 0xc0;
3612
3613 /* disable VDEC_1 DMC REQ*/
3614#ifdef CONFIG_AMLOGIC_POWER
3615 if (is_support_power_ctrl()) {
3616 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3617 mutex_unlock(&vdec_mutex);
3618 pr_err("vdec-1 power off ctrl iso fail.\n");
3619 return;
3620 }
3621 } else {
3622 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3623 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3624 }
3625#else
3626 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3627 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3628#endif
3629 /* power off vdec1 memories */
3630 WRITE_VREG(DOS_MEM_PD_VDEC, 0xffffffffUL);
3631 /* disable vdec1 clock */
3632 vdec_clock_off();
3633 /* vdec1 power off */
3634#ifdef CONFIG_AMLOGIC_POWER
3635 if (is_support_power_ctrl()) {
3636 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3637 mutex_unlock(&vdec_mutex);
3638 pr_err("vdec-1 power off ctrl sleep fail.\n");
3639 return;
3640 }
3641 } else {
3642 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3643 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3644 }
3645#else
3646 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3647 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3648#endif
3649 } else if (core == VDEC_2) {
3650 if (has_vdec2()) {
3651 /* enable vdec2 isolation */
3652 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3653 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
3654 0x300);
3655 /* power off vdec2 memories */
3656 WRITE_VREG(DOS_MEM_PD_VDEC2, 0xffffffffUL);
3657 /* disable vdec2 clock */
3658 vdec2_clock_off();
3659 /* vdec2 power off */
3660 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3661 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
3662 0x30);
3663 }
3664 } else if (core == VDEC_HCODEC) {
3665 if (has_hdec()) {
3666 sleep_val = is_power_ctrl_ver2 ? 0x1 : 0x3;
3667 iso_val = is_power_ctrl_ver2 ? 0x1 : 0x30;
3668
3669 /* enable hcodec isolation */
3670#ifdef CONFIG_AMLOGIC_POWER
3671 if (is_support_power_ctrl()) {
3672 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3673 mutex_unlock(&vdec_mutex);
3674 pr_err("hcodec power off ctrl iso fail.\n");
3675 return;
3676 }
3677 } else {
3678 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3679 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3680 }
3681#else
3682 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3683 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3684#endif
3685 /* power off hcodec memories */
3686 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
3687 /* disable hcodec clock */
3688 hcodec_clock_off();
3689 /* hcodec power off */
3690#ifdef CONFIG_AMLOGIC_POWER
3691 if (is_support_power_ctrl()) {
3692 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3693 mutex_unlock(&vdec_mutex);
3694 pr_err("hcodec power off ctrl sleep fail.\n");
3695 return;
3696 }
3697 } else {
3698 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3699 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3700 }
3701#else
3702 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3703 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3704#endif
3705 }
3706 } else if (core == VDEC_HEVC) {
3707 if (has_hevc_vdec()) {
3708 sleep_val = is_power_ctrl_ver2 ? 0x4 : 0xc0;
3709 iso_val = is_power_ctrl_ver2 ? 0x4 : 0xc00;
3710
3711 if (no_powerdown == 0) {
3712 /* enable hevc isolation */
3713#ifdef CONFIG_AMLOGIC_POWER
3714 if (is_support_power_ctrl()) {
3715 if (power_ctrl_iso_mask(false, iso_val, 0)) {
3716 mutex_unlock(&vdec_mutex);
3717 pr_err("hevc power off ctrl iso fail.\n");
3718 return;
3719 }
3720 } else {
3721 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3722 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3723 }
3724#else
3725 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
3726 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | iso_val);
3727#endif
3728 /* power off hevc memories */
3729 WRITE_VREG(DOS_MEM_PD_HEVC, 0xffffffffUL);
3730
3731 /* disable hevc clock */
3732 hevc_clock_off();
3733 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3734 hevc_back_clock_off();
3735
3736 /* hevc power off */
3737#ifdef CONFIG_AMLOGIC_POWER
3738 if (is_support_power_ctrl()) {
3739 if (power_ctrl_sleep_mask(false, sleep_val, 0)) {
3740 mutex_unlock(&vdec_mutex);
3741 pr_err("hevc power off ctrl sleep fail.\n");
3742 return;
3743 }
3744 } else {
3745 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3746 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3747 }
3748#else
3749 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
3750 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | sleep_val);
3751#endif
3752 } else {
3753 pr_info("!!!!!!!!not power down\n");
3754 hevc_reset_core(NULL);
3755 no_powerdown = 0;
3756 }
3757 }
3758 }
3759 mutex_unlock(&vdec_mutex);
3760}
3761EXPORT_SYMBOL(vdec_poweroff);
3762
3763bool vdec_on(enum vdec_type_e core)
3764{
3765 bool ret = false;
3766
3767 if (core == VDEC_1) {
3768 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3769 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3770 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3771 ? 0x2 : 0xc)) == 0) &&
3772 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100))
3773 ret = true;
3774 } else if (core == VDEC_2) {
3775 if (has_vdec2()) {
3776 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & 0x30) == 0) &&
3777 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100))
3778 ret = true;
3779 }
3780 } else if (core == VDEC_HCODEC) {
3781 if (has_hdec()) {
3782 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3783 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3784 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3785 ? 0x1 : 0x3)) == 0) &&
3786 (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000))
3787 ret = true;
3788 }
3789 } else if (core == VDEC_HEVC) {
3790 if (has_hevc_vdec()) {
3791 if (((READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
3792 (((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3793 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1))
3794 ? 0x4 : 0xc0)) == 0) &&
3795 (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x1000000))
3796 ret = true;
3797 }
3798 }
3799
3800 return ret;
3801}
3802EXPORT_SYMBOL(vdec_on);
3803
3804#elif 0 /* MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON6TVD */
3805void vdec_poweron(enum vdec_type_e core)
3806{
3807 ulong flags;
3808
3809 spin_lock_irqsave(&lock, flags);
3810
3811 if (core == VDEC_1) {
3812 /* vdec1 soft reset */
3813 WRITE_VREG(DOS_SW_RESET0, 0xfffffffc);
3814 WRITE_VREG(DOS_SW_RESET0, 0);
3815 /* enable vdec1 clock */
3816 vdec_clock_enable();
3817 /* reset DOS top registers */
3818 WRITE_VREG(DOS_VDEC_MCRCC_STALL_CTRL, 0);
3819 } else if (core == VDEC_2) {
3820 /* vdec2 soft reset */
3821 WRITE_VREG(DOS_SW_RESET2, 0xffffffff);
3822 WRITE_VREG(DOS_SW_RESET2, 0);
3823 /* enable vdec2 clock */
3824 vdec2_clock_enable();
3825 /* reset DOS top registers */
3826 WRITE_VREG(DOS_VDEC2_MCRCC_STALL_CTRL, 0);
3827 } else if (core == VDEC_HCODEC) {
3828 /* hcodec soft reset */
3829 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
3830 WRITE_VREG(DOS_SW_RESET1, 0);
3831 /* enable hcodec clock */
3832 hcodec_clock_enable();
3833 }
3834
3835 spin_unlock_irqrestore(&lock, flags);
3836}
3837
3838void vdec_poweroff(enum vdec_type_e core)
3839{
3840 ulong flags;
3841
3842 spin_lock_irqsave(&lock, flags);
3843
3844 if (core == VDEC_1) {
3845 /* disable vdec1 clock */
3846 vdec_clock_off();
3847 } else if (core == VDEC_2) {
3848 /* disable vdec2 clock */
3849 vdec2_clock_off();
3850 } else if (core == VDEC_HCODEC) {
3851 /* disable hcodec clock */
3852 hcodec_clock_off();
3853 }
3854
3855 spin_unlock_irqrestore(&lock, flags);
3856}
3857
3858bool vdec_on(enum vdec_type_e core)
3859{
3860 bool ret = false;
3861
3862 if (core == VDEC_1) {
3863 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x100)
3864 ret = true;
3865 } else if (core == VDEC_2) {
3866 if (READ_HHI_REG(HHI_VDEC2_CLK_CNTL) & 0x100)
3867 ret = true;
3868 } else if (core == VDEC_HCODEC) {
3869 if (READ_HHI_REG(HHI_VDEC_CLK_CNTL) & 0x1000000)
3870 ret = true;
3871 }
3872
3873 return ret;
3874}
3875#endif
3876
3877int vdec_source_changed(int format, int width, int height, int fps)
3878{
3879 /* todo: add level routines for clock adjustment per chips */
3880 int ret = -1;
3881 static int on_setting;
3882
3883 if (on_setting > 0)
3884 return ret;/*on changing clk,ignore this change*/
3885
3886 if (vdec_source_get(VDEC_1) == width * height * fps)
3887 return ret;
3888
3889
3890 on_setting = 1;
3891 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
3892 pr_debug("vdec1 video changed to %d x %d %d fps clk->%dMHZ\n",
3893 width, height, fps, vdec_clk_get(VDEC_1));
3894 on_setting = 0;
3895 return ret;
3896
3897}
3898EXPORT_SYMBOL(vdec_source_changed);
3899
3900void vdec_reset_core(struct vdec_s *vdec)
3901{
3902 unsigned long flags;
3903 unsigned int mask = 0;
3904
3905 mask = 1 << 13; /*bit13: DOS VDEC interface*/
3906 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3907 mask = 1 << 21; /*bit21: DOS VDEC interface*/
3908
3909 spin_lock_irqsave(&vdec_spin_lock, flags);
3910 codec_dmcbus_write(DMC_REQ_CTRL,
3911 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3912 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3913
3914 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3915 & mask))
3916 ;
3917 /*
3918 * 2: assist
3919 * 3: vld_reset
3920 * 4: vld_part_reset
3921 * 5: vfifo reset
3922 * 6: iqidct
3923 * 7: mc
3924 * 8: dblk
3925 * 9: pic_dc
3926 * 10: psc
3927 * 11: mcpu
3928 * 12: ccpu
3929 * 13: ddr
3930 * 14: afifo
3931 */
3932 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3933 (get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TL1)) {
3934 WRITE_VREG(DOS_SW_RESET0, (1<<3)|(1<<4)|(1<<5)|(1<<7)|(1<<8)|(1<<9));
3935 } else {
3936 WRITE_VREG(DOS_SW_RESET0,
3937 (1<<3)|(1<<4)|(1<<5));
3938 }
3939 WRITE_VREG(DOS_SW_RESET0, 0);
3940
3941 spin_lock_irqsave(&vdec_spin_lock, flags);
3942 codec_dmcbus_write(DMC_REQ_CTRL,
3943 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
3944 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3945}
3946EXPORT_SYMBOL(vdec_reset_core);
3947
3948void hevc_mmu_dma_check(struct vdec_s *vdec)
3949{
3950 ulong timeout;
3951 u32 data;
3952 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A)
3953 return;
3954 timeout = jiffies + HZ/100;
3955 while (1) {
3956 data = READ_VREG(HEVC_CM_CORE_STATUS);
3957 if ((data & 0x1) == 0)
3958 break;
3959 if (time_after(jiffies, timeout)) {
3960 if (debug & 0x10)
3961 pr_info(" %s sao mmu dma idle\n", __func__);
3962 break;
3963 }
3964 }
3965 /*disable sao mmu dma */
3966 CLEAR_VREG_MASK(HEVC_SAO_MMU_DMA_CTRL, 1 << 0);
3967 timeout = jiffies + HZ/100;
3968 while (1) {
3969 data = READ_VREG(HEVC_SAO_MMU_DMA_STATUS);
3970 if ((data & 0x1))
3971 break;
3972 if (time_after(jiffies, timeout)) {
3973 if (debug & 0x10)
3974 pr_err("%s sao mmu dma timeout, num_buf_used = 0x%x\n",
3975 __func__, (READ_VREG(HEVC_SAO_MMU_STATUS) >> 16));
3976 break;
3977 }
3978 }
3979}
3980EXPORT_SYMBOL(hevc_mmu_dma_check);
3981void hevc_reset_core(struct vdec_s *vdec)
3982{
3983 unsigned long flags;
3984 unsigned int mask = 0;
3985 mask = 1 << 4; /*bit4: hevc*/
3986 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
3987 mask |= 1 << 8; /*bit8: hevcb*/
3988 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
3989 spin_lock_irqsave(&vdec_spin_lock, flags);
3990 codec_dmcbus_write(DMC_REQ_CTRL,
3991 codec_dmcbus_read(DMC_REQ_CTRL) & ~mask);
3992 spin_unlock_irqrestore(&vdec_spin_lock, flags);
3993
3994 while (!(codec_dmcbus_read(DMC_CHAN_STS)
3995 & mask))
3996 ;
3997
3998 if (vdec == NULL || input_frame_based(vdec))
3999 WRITE_VREG(HEVC_STREAM_CONTROL, 0);
4000
4001 /*
4002 * 2: assist
4003 * 3: parser
4004 * 4: parser_state
4005 * 8: dblk
4006 * 11:mcpu
4007 * 12:ccpu
4008 * 13:ddr
4009 * 14:iqit
4010 * 15:ipp
4011 * 17:qdct
4012 * 18:mpred
4013 * 19:sao
4014 * 24:hevc_afifo
4015 */
4016 WRITE_VREG(DOS_SW_RESET3,
4017 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
4018 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
4019 (1<<17)|(1<<18)|(1<<19)|(1<<24));
4020
4021 WRITE_VREG(DOS_SW_RESET3, 0);
4022
4023
4024 spin_lock_irqsave(&vdec_spin_lock, flags);
4025 codec_dmcbus_write(DMC_REQ_CTRL,
4026 codec_dmcbus_read(DMC_REQ_CTRL) | mask);
4027 spin_unlock_irqrestore(&vdec_spin_lock, flags);
4028
4029}
4030EXPORT_SYMBOL(hevc_reset_core);
4031
4032int vdec2_source_changed(int format, int width, int height, int fps)
4033{
4034 int ret = -1;
4035 static int on_setting;
4036
4037 if (has_vdec2()) {
4038 /* todo: add level routines for clock adjustment per chips */
4039 if (on_setting != 0)
4040 return ret;/*on changing clk,ignore this change*/
4041
4042 if (vdec_source_get(VDEC_2) == width * height * fps)
4043 return ret;
4044
4045 on_setting = 1;
4046 ret = vdec_source_changed_for_clk_set(format,
4047 width, height, fps);
4048 pr_debug("vdec2 video changed to %d x %d %d fps clk->%dMHZ\n",
4049 width, height, fps, vdec_clk_get(VDEC_2));
4050 on_setting = 0;
4051 return ret;
4052 }
4053 return 0;
4054}
4055EXPORT_SYMBOL(vdec2_source_changed);
4056
4057int hevc_source_changed(int format, int width, int height, int fps)
4058{
4059 /* todo: add level routines for clock adjustment per chips */
4060 int ret = -1;
4061 static int on_setting;
4062
4063 if (on_setting != 0)
4064 return ret;/*on changing clk,ignore this change*/
4065
4066 if (vdec_source_get(VDEC_HEVC) == width * height * fps)
4067 return ret;
4068
4069 on_setting = 1;
4070 ret = vdec_source_changed_for_clk_set(format, width, height, fps);
4071 pr_debug("hevc video changed to %d x %d %d fps clk->%dMHZ\n",
4072 width, height, fps, vdec_clk_get(VDEC_HEVC));
4073 on_setting = 0;
4074
4075 return ret;
4076}
4077EXPORT_SYMBOL(hevc_source_changed);
4078
4079static struct am_reg am_risc[] = {
4080 {"MSP", 0x300},
4081 {"MPSR", 0x301},
4082 {"MCPU_INT_BASE", 0x302},
4083 {"MCPU_INTR_GRP", 0x303},
4084 {"MCPU_INTR_MSK", 0x304},
4085 {"MCPU_INTR_REQ", 0x305},
4086 {"MPC-P", 0x306},
4087 {"MPC-D", 0x307},
4088 {"MPC_E", 0x308},
4089 {"MPC_W", 0x309},
4090 {"CSP", 0x320},
4091 {"CPSR", 0x321},
4092 {"CCPU_INT_BASE", 0x322},
4093 {"CCPU_INTR_GRP", 0x323},
4094 {"CCPU_INTR_MSK", 0x324},
4095 {"CCPU_INTR_REQ", 0x325},
4096 {"CPC-P", 0x326},
4097 {"CPC-D", 0x327},
4098 {"CPC_E", 0x328},
4099 {"CPC_W", 0x329},
4100 {"AV_SCRATCH_0", 0x09c0},
4101 {"AV_SCRATCH_1", 0x09c1},
4102 {"AV_SCRATCH_2", 0x09c2},
4103 {"AV_SCRATCH_3", 0x09c3},
4104 {"AV_SCRATCH_4", 0x09c4},
4105 {"AV_SCRATCH_5", 0x09c5},
4106 {"AV_SCRATCH_6", 0x09c6},
4107 {"AV_SCRATCH_7", 0x09c7},
4108 {"AV_SCRATCH_8", 0x09c8},
4109 {"AV_SCRATCH_9", 0x09c9},
4110 {"AV_SCRATCH_A", 0x09ca},
4111 {"AV_SCRATCH_B", 0x09cb},
4112 {"AV_SCRATCH_C", 0x09cc},
4113 {"AV_SCRATCH_D", 0x09cd},
4114 {"AV_SCRATCH_E", 0x09ce},
4115 {"AV_SCRATCH_F", 0x09cf},
4116 {"AV_SCRATCH_G", 0x09d0},
4117 {"AV_SCRATCH_H", 0x09d1},
4118 {"AV_SCRATCH_I", 0x09d2},
4119 {"AV_SCRATCH_J", 0x09d3},
4120 {"AV_SCRATCH_K", 0x09d4},
4121 {"AV_SCRATCH_L", 0x09d5},
4122 {"AV_SCRATCH_M", 0x09d6},
4123 {"AV_SCRATCH_N", 0x09d7},
4124};
4125
4126static ssize_t amrisc_regs_show(struct class *class,
4127 struct class_attribute *attr, char *buf)
4128{
4129 char *pbuf = buf;
4130 struct am_reg *regs = am_risc;
4131 int rsize = sizeof(am_risc) / sizeof(struct am_reg);
4132 int i;
4133 unsigned int val;
4134 ssize_t ret;
4135
4136 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4137 mutex_lock(&vdec_mutex);
4138 if (!vdec_on(VDEC_1)) {
4139 mutex_unlock(&vdec_mutex);
4140 pbuf += sprintf(pbuf, "amrisc is power off\n");
4141 ret = pbuf - buf;
4142 return ret;
4143 }
4144 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4145 /*TODO:M6 define */
4146 /*
4147 * switch_mod_gate_by_type(MOD_VDEC, 1);
4148 */
4149 amports_switch_gate("vdec", 1);
4150 }
4151 pbuf += sprintf(pbuf, "amrisc registers show:\n");
4152 for (i = 0; i < rsize; i++) {
4153 val = READ_VREG(regs[i].offset);
4154 pbuf += sprintf(pbuf, "%s(%#x)\t:%#x(%d)\n",
4155 regs[i].name, regs[i].offset, val, val);
4156 }
4157 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4158 mutex_unlock(&vdec_mutex);
4159 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4160 /*TODO:M6 define */
4161 /*
4162 * switch_mod_gate_by_type(MOD_VDEC, 0);
4163 */
4164 amports_switch_gate("vdec", 0);
4165 }
4166 ret = pbuf - buf;
4167 return ret;
4168}
4169
4170static ssize_t dump_trace_show(struct class *class,
4171 struct class_attribute *attr, char *buf)
4172{
4173 int i;
4174 char *pbuf = buf;
4175 ssize_t ret;
4176 u16 *trace_buf = kmalloc(debug_trace_num * 2, GFP_KERNEL);
4177
4178 if (!trace_buf) {
4179 pbuf += sprintf(pbuf, "No Memory bug\n");
4180 ret = pbuf - buf;
4181 return ret;
4182 }
4183 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4184 mutex_lock(&vdec_mutex);
4185 if (!vdec_on(VDEC_1)) {
4186 mutex_unlock(&vdec_mutex);
4187 kfree(trace_buf);
4188 pbuf += sprintf(pbuf, "amrisc is power off\n");
4189 ret = pbuf - buf;
4190 return ret;
4191 }
4192 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4193 /*TODO:M6 define */
4194 /*
4195 * switch_mod_gate_by_type(MOD_VDEC, 1);
4196 */
4197 amports_switch_gate("vdec", 1);
4198 }
4199 pr_info("dump trace steps:%d start\n", debug_trace_num);
4200 i = 0;
4201 while (i <= debug_trace_num - 16) {
4202 trace_buf[i] = READ_VREG(MPC_E);
4203 trace_buf[i + 1] = READ_VREG(MPC_E);
4204 trace_buf[i + 2] = READ_VREG(MPC_E);
4205 trace_buf[i + 3] = READ_VREG(MPC_E);
4206 trace_buf[i + 4] = READ_VREG(MPC_E);
4207 trace_buf[i + 5] = READ_VREG(MPC_E);
4208 trace_buf[i + 6] = READ_VREG(MPC_E);
4209 trace_buf[i + 7] = READ_VREG(MPC_E);
4210 trace_buf[i + 8] = READ_VREG(MPC_E);
4211 trace_buf[i + 9] = READ_VREG(MPC_E);
4212 trace_buf[i + 10] = READ_VREG(MPC_E);
4213 trace_buf[i + 11] = READ_VREG(MPC_E);
4214 trace_buf[i + 12] = READ_VREG(MPC_E);
4215 trace_buf[i + 13] = READ_VREG(MPC_E);
4216 trace_buf[i + 14] = READ_VREG(MPC_E);
4217 trace_buf[i + 15] = READ_VREG(MPC_E);
4218 i += 16;
4219 };
4220 pr_info("dump trace steps:%d finished\n", debug_trace_num);
4221 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4222 mutex_unlock(&vdec_mutex);
4223 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4224 /*TODO:M6 define */
4225 /*
4226 * switch_mod_gate_by_type(MOD_VDEC, 0);
4227 */
4228 amports_switch_gate("vdec", 0);
4229 }
4230 for (i = 0; i < debug_trace_num; i++) {
4231 if (i % 4 == 0) {
4232 if (i % 16 == 0)
4233 pbuf += sprintf(pbuf, "\n");
4234 else if (i % 8 == 0)
4235 pbuf += sprintf(pbuf, " ");
4236 else /* 4 */
4237 pbuf += sprintf(pbuf, " ");
4238 }
4239 pbuf += sprintf(pbuf, "%04x:", trace_buf[i]);
4240 }
4241 while (i < debug_trace_num)
4242 ;
4243 kfree(trace_buf);
4244 pbuf += sprintf(pbuf, "\n");
4245 ret = pbuf - buf;
4246 return ret;
4247}
4248
4249static ssize_t clock_level_show(struct class *class,
4250 struct class_attribute *attr, char *buf)
4251{
4252 char *pbuf = buf;
4253 size_t ret;
4254
4255 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_1));
4256
4257 if (has_vdec2())
4258 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_2));
4259
4260 if (has_hevc_vdec())
4261 pbuf += sprintf(pbuf, "%dMHZ\n", vdec_clk_get(VDEC_HEVC));
4262
4263 ret = pbuf - buf;
4264 return ret;
4265}
4266
4267static ssize_t enable_mvdec_info_show(struct class *cla,
4268 struct class_attribute *attr, char *buf)
4269{
4270 return sprintf(buf, "%d\n", enable_mvdec_info);
4271}
4272
4273static ssize_t enable_mvdec_info_store(struct class *cla,
4274 struct class_attribute *attr,
4275 const char *buf, size_t count)
4276{
4277 int r;
4278 int val;
4279
4280 r = kstrtoint(buf, 0, &val);
4281 if (r < 0)
4282 return -EINVAL;
4283 enable_mvdec_info = val;
4284
4285 return count;
4286}
4287static ssize_t poweron_clock_level_store(struct class *class,
4288 struct class_attribute *attr,
4289 const char *buf, size_t size)
4290{
4291 unsigned int val;
4292 ssize_t ret;
4293
4294 /*ret = sscanf(buf, "%d", &val);*/
4295 ret = kstrtoint(buf, 0, &val);
4296
4297 if (ret != 0)
4298 return -EINVAL;
4299 poweron_clock_level = val;
4300 return size;
4301}
4302
4303static ssize_t poweron_clock_level_show(struct class *class,
4304 struct class_attribute *attr, char *buf)
4305{
4306 return sprintf(buf, "%d\n", poweron_clock_level);
4307}
4308
4309/*
4310 *if keep_vdec_mem == 1
4311 *always don't release
4312 *vdec 64 memory for fast play.
4313 */
4314static ssize_t keep_vdec_mem_store(struct class *class,
4315 struct class_attribute *attr,
4316 const char *buf, size_t size)
4317{
4318 unsigned int val;
4319 ssize_t ret;
4320
4321 /*ret = sscanf(buf, "%d", &val);*/
4322 ret = kstrtoint(buf, 0, &val);
4323 if (ret != 0)
4324 return -EINVAL;
4325 keep_vdec_mem = val;
4326 return size;
4327}
4328
4329static ssize_t keep_vdec_mem_show(struct class *class,
4330 struct class_attribute *attr, char *buf)
4331{
4332 return sprintf(buf, "%d\n", keep_vdec_mem);
4333}
4334
4335#ifdef VDEC_DEBUG_SUPPORT
4336static ssize_t debug_store(struct class *class,
4337 struct class_attribute *attr,
4338 const char *buf, size_t size)
4339{
4340 struct vdec_s *vdec;
4341 struct vdec_core_s *core = vdec_core;
4342 unsigned long flags;
4343
4344 unsigned id;
4345 unsigned val;
4346 ssize_t ret;
4347 char cbuf[32];
4348
4349 cbuf[0] = 0;
4350 ret = sscanf(buf, "%s %x %x", cbuf, &id, &val);
4351 /*pr_info(
4352 "%s(%s)=>ret %ld: %s, %x, %x\n",
4353 __func__, buf, ret, cbuf, id, val);*/
4354 if (strcmp(cbuf, "schedule") == 0) {
4355 pr_info("VDEC_DEBUG: force schedule\n");
4356 up(&core->sem);
4357 } else if (strcmp(cbuf, "power_off") == 0) {
4358 pr_info("VDEC_DEBUG: power off core %d\n", id);
4359 vdec_poweroff(id);
4360 } else if (strcmp(cbuf, "power_on") == 0) {
4361 pr_info("VDEC_DEBUG: power_on core %d\n", id);
4362 vdec_poweron(id);
4363 } else if (strcmp(cbuf, "wr") == 0) {
4364 pr_info("VDEC_DEBUG: WRITE_VREG(0x%x, 0x%x)\n",
4365 id, val);
4366 WRITE_VREG(id, val);
4367 } else if (strcmp(cbuf, "rd") == 0) {
4368 pr_info("VDEC_DEBUG: READ_VREG(0x%x) = 0x%x\n",
4369 id, READ_VREG(id));
4370 } else if (strcmp(cbuf, "read_hevc_clk_reg") == 0) {
4371 pr_info(
4372 "VDEC_DEBUG: HHI_VDEC4_CLK_CNTL = 0x%x, HHI_VDEC2_CLK_CNTL = 0x%x\n",
4373 READ_HHI_REG(HHI_VDEC4_CLK_CNTL),
4374 READ_HHI_REG(HHI_VDEC2_CLK_CNTL));
4375 }
4376
4377 flags = vdec_core_lock(vdec_core);
4378
4379 list_for_each_entry(vdec,
4380 &core->connected_vdec_list, list) {
4381 pr_info("vdec: status %d, id %d\n", vdec->status, vdec->id);
4382 if (((vdec->status == VDEC_STATUS_CONNECTED
4383 || vdec->status == VDEC_STATUS_ACTIVE)) &&
4384 (vdec->id == id)) {
4385 /*to add*/
4386 break;
4387 }
4388 }
4389 vdec_core_unlock(vdec_core, flags);
4390 return size;
4391}
4392
4393static ssize_t debug_show(struct class *class,
4394 struct class_attribute *attr, char *buf)
4395{
4396 char *pbuf = buf;
4397 struct vdec_s *vdec;
4398 struct vdec_core_s *core = vdec_core;
4399 unsigned long flags = vdec_core_lock(vdec_core);
4400 u64 tmp;
4401
4402 pbuf += sprintf(pbuf,
4403 "============== help:\n");
4404 pbuf += sprintf(pbuf,
4405 "'echo xxx > debug' usuage:\n");
4406 pbuf += sprintf(pbuf,
4407 "schedule - trigger schedule thread to run\n");
4408 pbuf += sprintf(pbuf,
4409 "power_off core_num - call vdec_poweroff(core_num)\n");
4410 pbuf += sprintf(pbuf,
4411 "power_on core_num - call vdec_poweron(core_num)\n");
4412 pbuf += sprintf(pbuf,
4413 "wr adr val - call WRITE_VREG(adr, val)\n");
4414 pbuf += sprintf(pbuf,
4415 "rd adr - call READ_VREG(adr)\n");
4416 pbuf += sprintf(pbuf,
4417 "read_hevc_clk_reg - read HHI register for hevc clk\n");
4418 pbuf += sprintf(pbuf,
4419 "===================\n");
4420
4421 pbuf += sprintf(pbuf,
4422 "name(core)\tschedule_count\trun_count\tinput_underrun\tdecbuf_not_ready\trun_time\n");
4423 list_for_each_entry(vdec,
4424 &core->connected_vdec_list, list) {
4425 enum vdec_type_e type;
4426 if ((vdec->status == VDEC_STATUS_CONNECTED
4427 || vdec->status == VDEC_STATUS_ACTIVE)) {
4428 for (type = VDEC_1; type < VDEC_MAX; type++) {
4429 if (vdec->core_mask & (1 << type)) {
4430 pbuf += sprintf(pbuf, "%s(%d):",
4431 vdec->vf_provider_name, type);
4432 pbuf += sprintf(pbuf, "\t%d",
4433 vdec->check_count[type]);
4434 pbuf += sprintf(pbuf, "\t%d",
4435 vdec->run_count[type]);
4436 pbuf += sprintf(pbuf, "\t%d",
4437 vdec->input_underrun_count[type]);
4438 pbuf += sprintf(pbuf, "\t%d",
4439 vdec->not_run_ready_count[type]);
4440 tmp = vdec->run_clk[type] * 100;
4441 do_div(tmp, vdec->total_clk[type]);
4442 pbuf += sprintf(pbuf,
4443 "\t%d%%\n",
4444 vdec->total_clk[type] == 0 ? 0 :
4445 (u32)tmp);
4446 }
4447 }
4448 }
4449 }
4450
4451 vdec_core_unlock(vdec_core, flags);
4452 return pbuf - buf;
4453
4454}
4455#endif
4456
4457/*irq num as same as .dts*/
4458/*
4459 * interrupts = <0 3 1
4460 * 0 23 1
4461 * 0 32 1
4462 * 0 43 1
4463 * 0 44 1
4464 * 0 45 1>;
4465 * interrupt-names = "vsync",
4466 * "demux",
4467 * "parser",
4468 * "mailbox_0",
4469 * "mailbox_1",
4470 * "mailbox_2";
4471 */
4472s32 vdec_request_threaded_irq(enum vdec_irq_num num,
4473 irq_handler_t handler,
4474 irq_handler_t thread_fn,
4475 unsigned long irqflags,
4476 const char *devname, void *dev)
4477{
4478 s32 res_irq;
4479 s32 ret = 0;
4480
4481 if (num >= VDEC_IRQ_MAX) {
4482 pr_err("[%s] request irq error, irq num too big!", __func__);
4483 return -EINVAL;
4484 }
4485
4486 if (vdec_core->isr_context[num].irq < 0) {
4487 res_irq = platform_get_irq(
4488 vdec_core->vdec_core_platform_device, num);
4489 if (res_irq < 0) {
4490 pr_err("[%s] get irq error!", __func__);
4491 return -EINVAL;
4492 }
4493
4494 vdec_core->isr_context[num].irq = res_irq;
4495 vdec_core->isr_context[num].dev_isr = handler;
4496 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4497 vdec_core->isr_context[num].dev_id = dev;
4498
4499 ret = request_threaded_irq(res_irq,
4500 vdec_isr,
4501 vdec_thread_isr,
4502 (thread_fn) ? IRQF_ONESHOT : irqflags,
4503 devname,
4504 &vdec_core->isr_context[num]);
4505
4506 if (ret) {
4507 vdec_core->isr_context[num].irq = -1;
4508 vdec_core->isr_context[num].dev_isr = NULL;
4509 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4510 vdec_core->isr_context[num].dev_id = NULL;
4511
4512 pr_err("vdec irq register error for %s.\n", devname);
4513 return -EIO;
4514 }
4515 } else {
4516 vdec_core->isr_context[num].dev_isr = handler;
4517 vdec_core->isr_context[num].dev_threaded_isr = thread_fn;
4518 vdec_core->isr_context[num].dev_id = dev;
4519 }
4520
4521 return ret;
4522}
4523EXPORT_SYMBOL(vdec_request_threaded_irq);
4524
4525s32 vdec_request_irq(enum vdec_irq_num num, irq_handler_t handler,
4526 const char *devname, void *dev)
4527{
4528 pr_debug("vdec_request_irq %p, %s\n", handler, devname);
4529
4530 return vdec_request_threaded_irq(num,
4531 handler,
4532 NULL,/*no thread_fn*/
4533 IRQF_SHARED,
4534 devname,
4535 dev);
4536}
4537EXPORT_SYMBOL(vdec_request_irq);
4538
4539void vdec_free_irq(enum vdec_irq_num num, void *dev)
4540{
4541 if (num >= VDEC_IRQ_MAX) {
4542 pr_err("[%s] request irq error, irq num too big!", __func__);
4543 return;
4544 }
4545 /*
4546 *assume amrisc is stopped already and there is no mailbox interrupt
4547 * when we reset pointers here.
4548 */
4549 vdec_core->isr_context[num].dev_isr = NULL;
4550 vdec_core->isr_context[num].dev_threaded_isr = NULL;
4551 vdec_core->isr_context[num].dev_id = NULL;
4552 synchronize_irq(vdec_core->isr_context[num].irq);
4553}
4554EXPORT_SYMBOL(vdec_free_irq);
4555
4556struct vdec_s *vdec_get_default_vdec_for_userdata(void)
4557{
4558 struct vdec_s *vdec;
4559 struct vdec_s *ret_vdec;
4560 struct vdec_core_s *core = vdec_core;
4561 unsigned long flags;
4562 int id;
4563
4564 flags = vdec_core_lock(vdec_core);
4565
4566 id = 0x10000000;
4567 ret_vdec = NULL;
4568 if (!list_empty(&core->connected_vdec_list)) {
4569 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4570 if (vdec->id < id) {
4571 id = vdec->id;
4572 ret_vdec = vdec;
4573 }
4574 }
4575 }
4576
4577 vdec_core_unlock(vdec_core, flags);
4578
4579 return ret_vdec;
4580}
4581EXPORT_SYMBOL(vdec_get_default_vdec_for_userdata);
4582
4583struct vdec_s *vdec_get_vdec_by_id(int vdec_id)
4584{
4585 struct vdec_s *vdec;
4586 struct vdec_s *ret_vdec;
4587 struct vdec_core_s *core = vdec_core;
4588 unsigned long flags;
4589
4590 flags = vdec_core_lock(vdec_core);
4591
4592 ret_vdec = NULL;
4593 if (!list_empty(&core->connected_vdec_list)) {
4594 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4595 if (vdec->id == vdec_id) {
4596 ret_vdec = vdec;
4597 break;
4598 }
4599 }
4600 }
4601
4602 vdec_core_unlock(vdec_core, flags);
4603
4604 return ret_vdec;
4605}
4606EXPORT_SYMBOL(vdec_get_vdec_by_id);
4607
4608int vdec_read_user_data(struct vdec_s *vdec,
4609 struct userdata_param_t *p_userdata_param)
4610{
4611 int ret = 0;
4612
4613 if (!vdec)
4614 vdec = vdec_get_default_vdec_for_userdata();
4615
4616 if (vdec) {
4617 if (vdec->user_data_read)
4618 ret = vdec->user_data_read(vdec, p_userdata_param);
4619 }
4620 return ret;
4621}
4622EXPORT_SYMBOL(vdec_read_user_data);
4623
4624int vdec_wakeup_userdata_poll(struct vdec_s *vdec)
4625{
4626 if (vdec) {
4627 if (vdec->wakeup_userdata_poll)
4628 vdec->wakeup_userdata_poll(vdec);
4629 }
4630
4631 return 0;
4632}
4633EXPORT_SYMBOL(vdec_wakeup_userdata_poll);
4634
4635void vdec_reset_userdata_fifo(struct vdec_s *vdec, int bInit)
4636{
4637 if (!vdec)
4638 vdec = vdec_get_default_vdec_for_userdata();
4639
4640 if (vdec) {
4641 if (vdec->reset_userdata_fifo)
4642 vdec->reset_userdata_fifo(vdec, bInit);
4643 }
4644}
4645EXPORT_SYMBOL(vdec_reset_userdata_fifo);
4646
4647static int dump_mode;
4648static ssize_t dump_risc_mem_store(struct class *class,
4649 struct class_attribute *attr,
4650 const char *buf, size_t size)/*set*/
4651{
4652 unsigned int val;
4653 ssize_t ret;
4654 char dump_mode_str[4] = "PRL";
4655
4656 /*ret = sscanf(buf, "%d", &val);*/
4657 ret = kstrtoint(buf, 0, &val);
4658
4659 if (ret != 0)
4660 return -EINVAL;
4661 dump_mode = val & 0x3;
4662 pr_info("set dump mode to %d,%c_mem\n",
4663 dump_mode, dump_mode_str[dump_mode]);
4664 return size;
4665}
4666static u32 read_amrisc_reg(int reg)
4667{
4668 WRITE_VREG(0x31b, reg);
4669 return READ_VREG(0x31c);
4670}
4671
4672static void dump_pmem(void)
4673{
4674 int i;
4675
4676 WRITE_VREG(0x301, 0x8000);
4677 WRITE_VREG(0x31d, 0);
4678 pr_info("start dump amrisc pmem of risc\n");
4679 for (i = 0; i < 0xfff; i++) {
4680 /*same as .o format*/
4681 pr_info("%08x // 0x%04x:\n", read_amrisc_reg(i), i);
4682 }
4683}
4684
4685static void dump_lmem(void)
4686{
4687 int i;
4688
4689 WRITE_VREG(0x301, 0x8000);
4690 WRITE_VREG(0x31d, 2);
4691 pr_info("start dump amrisc lmem\n");
4692 for (i = 0; i < 0x3ff; i++) {
4693 /*same as */
4694 pr_info("[%04x] = 0x%08x:\n", i, read_amrisc_reg(i));
4695 }
4696}
4697
4698static ssize_t dump_risc_mem_show(struct class *class,
4699 struct class_attribute *attr, char *buf)
4700{
4701 char *pbuf = buf;
4702 int ret;
4703
4704 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8) {
4705 mutex_lock(&vdec_mutex);
4706 if (!vdec_on(VDEC_1)) {
4707 mutex_unlock(&vdec_mutex);
4708 pbuf += sprintf(pbuf, "amrisc is power off\n");
4709 ret = pbuf - buf;
4710 return ret;
4711 }
4712 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4713 /*TODO:M6 define */
4714 /*
4715 * switch_mod_gate_by_type(MOD_VDEC, 1);
4716 */
4717 amports_switch_gate("vdec", 1);
4718 }
4719 /*start do**/
4720 switch (dump_mode) {
4721 case 0:
4722 dump_pmem();
4723 break;
4724 case 2:
4725 dump_lmem();
4726 break;
4727 default:
4728 break;
4729 }
4730
4731 /*done*/
4732 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M8)
4733 mutex_unlock(&vdec_mutex);
4734 else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_M6) {
4735 /*TODO:M6 define */
4736 /*
4737 * switch_mod_gate_by_type(MOD_VDEC, 0);
4738 */
4739 amports_switch_gate("vdec", 0);
4740 }
4741 return sprintf(buf, "done\n");
4742}
4743
4744static ssize_t core_show(struct class *class, struct class_attribute *attr,
4745 char *buf)
4746{
4747 struct vdec_core_s *core = vdec_core;
4748 char *pbuf = buf;
4749
4750 if (list_empty(&core->connected_vdec_list))
4751 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4752 else {
4753 struct vdec_s *vdec;
4754
4755 pbuf += sprintf(pbuf,
4756 " Core: last_sched %p, sched_mask %lx\n",
4757 core->last_vdec,
4758 core->sched_mask);
4759
4760 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4761 pbuf += sprintf(pbuf,
4762 "\tvdec.%d (%p (%s)), status = %s,\ttype = %s, \tactive_mask = %lx\n",
4763 vdec->id,
4764 vdec,
4765 vdec_device_name[vdec->format * 2],
4766 vdec_status_str(vdec),
4767 vdec_type_str(vdec),
4768 vdec->active_mask);
4769 }
4770 }
4771
4772 return pbuf - buf;
4773}
4774
4775static ssize_t vdec_status_show(struct class *class,
4776 struct class_attribute *attr, char *buf)
4777{
4778 char *pbuf = buf;
4779 struct vdec_s *vdec;
4780 struct vdec_info vs;
4781 unsigned char vdec_num = 0;
4782 struct vdec_core_s *core = vdec_core;
4783 unsigned long flags = vdec_core_lock(vdec_core);
4784
4785 if (list_empty(&core->connected_vdec_list)) {
4786 pbuf += sprintf(pbuf, "No vdec.\n");
4787 goto out;
4788 }
4789
4790 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4791 if ((vdec->status == VDEC_STATUS_CONNECTED
4792 || vdec->status == VDEC_STATUS_ACTIVE)) {
4793 memset(&vs, 0, sizeof(vs));
4794 if (vdec_status(vdec, &vs)) {
4795 pbuf += sprintf(pbuf, "err.\n");
4796 goto out;
4797 }
4798 pbuf += sprintf(pbuf,
4799 "vdec channel %u statistics:\n",
4800 vdec_num);
4801 pbuf += sprintf(pbuf,
4802 "%13s : %s\n", "device name",
4803 vs.vdec_name);
4804 pbuf += sprintf(pbuf,
4805 "%13s : %u\n", "frame width",
4806 vs.frame_width);
4807 pbuf += sprintf(pbuf,
4808 "%13s : %u\n", "frame height",
4809 vs.frame_height);
4810 pbuf += sprintf(pbuf,
4811 "%13s : %u %s\n", "frame rate",
4812 vs.frame_rate, "fps");
4813 pbuf += sprintf(pbuf,
4814 "%13s : %u %s\n", "bit rate",
4815 vs.bit_rate / 1024 * 8, "kbps");
4816 pbuf += sprintf(pbuf,
4817 "%13s : %u\n", "status",
4818 vs.status);
4819 pbuf += sprintf(pbuf,
4820 "%13s : %u\n", "frame dur",
4821 vs.frame_dur);
4822 pbuf += sprintf(pbuf,
4823 "%13s : %u %s\n", "frame data",
4824 vs.frame_data / 1024, "KB");
4825 pbuf += sprintf(pbuf,
4826 "%13s : %u\n", "frame count",
4827 vs.frame_count);
4828 pbuf += sprintf(pbuf,
4829 "%13s : %u\n", "drop count",
4830 vs.drop_frame_count);
4831 pbuf += sprintf(pbuf,
4832 "%13s : %u\n", "fra err count",
4833 vs.error_frame_count);
4834 pbuf += sprintf(pbuf,
4835 "%13s : %u\n", "hw err count",
4836 vs.error_count);
4837 pbuf += sprintf(pbuf,
4838 "%13s : %llu %s\n", "total data",
4839 vs.total_data / 1024, "KB");
4840 pbuf += sprintf(pbuf,
4841 "%13s : %x\n\n", "ratio_control",
4842 vs.ratio_control);
4843
4844 vdec_num++;
4845 }
4846 }
4847out:
4848 vdec_core_unlock(vdec_core, flags);
4849 return pbuf - buf;
4850}
4851
4852static ssize_t dump_vdec_blocks_show(struct class *class,
4853 struct class_attribute *attr, char *buf)
4854{
4855 struct vdec_core_s *core = vdec_core;
4856 char *pbuf = buf;
4857 unsigned long flags = vdec_core_lock(vdec_core);
4858
4859 if (list_empty(&core->connected_vdec_list))
4860 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4861 else {
4862 struct vdec_s *vdec;
4863 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4864 pbuf += vdec_input_dump_blocks(&vdec->input,
4865 pbuf, PAGE_SIZE - (pbuf - buf));
4866 }
4867 }
4868 vdec_core_unlock(vdec_core, flags);
4869
4870 return pbuf - buf;
4871}
4872static ssize_t dump_vdec_chunks_show(struct class *class,
4873 struct class_attribute *attr, char *buf)
4874{
4875 struct vdec_core_s *core = vdec_core;
4876 char *pbuf = buf;
4877 unsigned long flags = vdec_core_lock(vdec_core);
4878
4879 if (list_empty(&core->connected_vdec_list))
4880 pbuf += sprintf(pbuf, "connected vdec list empty\n");
4881 else {
4882 struct vdec_s *vdec;
4883 list_for_each_entry(vdec, &core->connected_vdec_list, list) {
4884 pbuf += vdec_input_dump_chunks(vdec->id, &vdec->input,
4885 pbuf, PAGE_SIZE - (pbuf - buf));
4886 }
4887 }
4888 vdec_core_unlock(vdec_core, flags);
4889
4890 return pbuf - buf;
4891}
4892
4893static ssize_t dump_decoder_state_show(struct class *class,
4894 struct class_attribute *attr, char *buf)
4895{
4896 char *pbuf = buf;
4897 struct vdec_s *vdec;
4898 struct vdec_core_s *core = vdec_core;
4899 unsigned long flags = vdec_core_lock(vdec_core);
4900
4901 if (list_empty(&core->connected_vdec_list)) {
4902 pbuf += sprintf(pbuf, "No vdec.\n");
4903 } else {
4904 list_for_each_entry(vdec,
4905 &core->connected_vdec_list, list) {
4906 if ((vdec->status == VDEC_STATUS_CONNECTED
4907 || vdec->status == VDEC_STATUS_ACTIVE)
4908 && vdec->dump_state)
4909 vdec->dump_state(vdec);
4910 }
4911 }
4912 vdec_core_unlock(vdec_core, flags);
4913
4914 return pbuf - buf;
4915}
4916
4917static ssize_t dump_fps_show(struct class *class,
4918 struct class_attribute *attr, char *buf)
4919{
4920 char *pbuf = buf;
4921 struct vdec_core_s *core = vdec_core;
4922 int i;
4923
4924 unsigned long flags = vdec_fps_lock(vdec_core);
4925 for (i = 0; i < MAX_INSTANCE_MUN; i++)
4926 pbuf += sprintf(pbuf, "%d ", core->decode_fps[i].fps);
4927
4928 pbuf += sprintf(pbuf, "\n");
4929 vdec_fps_unlock(vdec_core, flags);
4930
4931 return pbuf - buf;
4932}
4933
4934static CLASS_ATTR_RO(amrisc_regs);
4935static CLASS_ATTR_RO(dump_trace);
4936static CLASS_ATTR_RO(clock_level);
4937static CLASS_ATTR_RW(poweron_clock_level);
4938static CLASS_ATTR_RW(dump_risc_mem);
4939static CLASS_ATTR_RW(keep_vdec_mem);
4940static CLASS_ATTR_RW(enable_mvdec_info);
4941static CLASS_ATTR_RO(core);
4942static CLASS_ATTR_RO(vdec_status);
4943static CLASS_ATTR_RO(dump_vdec_blocks);
4944static CLASS_ATTR_RO(dump_vdec_chunks);
4945static CLASS_ATTR_RO(dump_decoder_state);
4946#ifdef VDEC_DEBUG_SUPPORT
4947static CLASS_ATTR_RW(debug);
4948#endif
4949#ifdef FRAME_CHECK
4950static CLASS_ATTR_RW(dump_yuv);
4951static CLASS_ATTR_RW(frame_check);
4952#endif
4953static CLASS_ATTR_RO(dump_fps);
4954
4955static struct attribute *vdec_class_attrs[] = {
4956 &class_attr_amrisc_regs.attr,
4957 &class_attr_dump_trace.attr,
4958 &class_attr_clock_level.attr,
4959 &class_attr_poweron_clock_level.attr,
4960 &class_attr_dump_risc_mem.attr,
4961 &class_attr_keep_vdec_mem.attr,
4962 &class_attr_enable_mvdec_info.attr,
4963 &class_attr_core.attr,
4964 &class_attr_vdec_status.attr,
4965 &class_attr_dump_vdec_blocks.attr,
4966 &class_attr_dump_vdec_chunks.attr,
4967 &class_attr_dump_decoder_state.attr,
4968#ifdef VDEC_DEBUG_SUPPORT
4969 &class_attr_debug.attr,
4970#endif
4971#ifdef FRAME_CHECK
4972 &class_attr_dump_yuv.attr,
4973 &class_attr_frame_check.attr,
4974#endif
4975 &class_attr_dump_fps.attr,
4976 NULL
4977};
4978
4979ATTRIBUTE_GROUPS(vdec_class);
4980
4981static struct class vdec_class = {
4982 .name = "vdec",
4983 .class_groups = vdec_class_groups,
4984};
4985
4986struct device *get_vdec_device(void)
4987{
4988 return &vdec_core->vdec_core_platform_device->dev;
4989}
4990EXPORT_SYMBOL(get_vdec_device);
4991
4992static void vdec_power_switch(struct vdec_pwrc_s *pd, int id, bool on)
4993{
4994 struct device *dev = pd[id].dev;
4995
4996 if (on)
4997 pm_runtime_get_sync(dev);
4998 else
4999 pm_runtime_put_sync(dev);
5000
5001 pr_debug("the %-15s power %s\n",
5002 pd[id].name, on ? "on" : "off");
5003}
5004
5005static int vdec_power_domain_init(struct device *dev,
5006 struct vdec_pwrc_s **pd_out)
5007{
5008 int i, err;
5009 struct vdec_pwrc_s *pd = vdec_pd;
5010
5011 for (i = 0; i < ARRAY_SIZE(vdec_pd); i++) {
5012 pd[i].dev = dev_pm_domain_attach_by_name(dev, pd[i].name);
5013 if (IS_ERR_OR_NULL(pd[i].dev)) {
5014 err = PTR_ERR(pd[i].dev);
5015 dev_err(dev, "Get %s failed, pm-domain: %d\n",
5016 pd[i].name, err);
5017 continue;
5018 }
5019
5020 pd[i].link = device_link_add(dev, pd[i].dev,
5021 DL_FLAG_PM_RUNTIME |
5022 DL_FLAG_STATELESS);
5023 if (IS_ERR_OR_NULL(pd[i].link)) {
5024 dev_err(dev, "Adding %s device link failed!\n",
5025 pd[i].name);
5026 return -ENODEV;
5027 }
5028
5029 pr_debug("power domain: name: %s, dev: %px, link: %px\n",
5030 pd[i].name, pd[i].dev, pd[i].link);
5031 }
5032
5033 *pd_out = pd;
5034
5035 return 0;
5036}
5037
5038static void vdec_power_domain_remove(struct vdec_pwrc_s *pd)
5039{
5040 int i;
5041
5042 for (i = 0; i < ARRAY_SIZE(vdec_pd); i++) {
5043 if (!IS_ERR_OR_NULL(pd[i].link))
5044 device_link_del(pd[i].link);
5045
5046 if (!IS_ERR_OR_NULL(pd[i].dev))
5047 dev_pm_domain_detach(pd[i].dev, true);
5048 }
5049}
5050
5051static int vdec_probe(struct platform_device *pdev)
5052{
5053 s32 i, r;
5054
5055 vdec_core = (struct vdec_core_s *)devm_kzalloc(&pdev->dev,
5056 sizeof(struct vdec_core_s), GFP_KERNEL);
5057 if (vdec_core == NULL) {
5058 pr_err("vdec core allocation failed.\n");
5059 return -ENOMEM;
5060 }
5061
5062 atomic_set(&vdec_core->vdec_nr, 0);
5063 sema_init(&vdec_core->sem, 1);
5064
5065 r = class_register(&vdec_class);
5066 if (r) {
5067 pr_info("vdec class create fail.\n");
5068 return r;
5069 }
5070
5071 vdec_core->vdec_core_platform_device = pdev;
5072
5073 platform_set_drvdata(pdev, vdec_core);
5074
5075 for (i = 0; i < VDEC_IRQ_MAX; i++) {
5076 vdec_core->isr_context[i].index = i;
5077 vdec_core->isr_context[i].irq = -1;
5078 }
5079
5080 r = vdec_request_threaded_irq(VDEC_IRQ_0, NULL, NULL,
5081 IRQF_ONESHOT, "vdec-0", NULL);
5082 if (r < 0) {
5083 pr_err("vdec interrupt request failed\n");
5084 return r;
5085 }
5086
5087 r = vdec_request_threaded_irq(VDEC_IRQ_1, NULL, NULL,
5088 IRQF_ONESHOT, "vdec-1", NULL);
5089 if (r < 0) {
5090 pr_err("vdec interrupt request failed\n");
5091 return r;
5092 }
5093#if 0
5094 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
5095 r = vdec_request_threaded_irq(VDEC_IRQ_HEVC_BACK, NULL, NULL,
5096 IRQF_ONESHOT, "vdec-hevc_back", NULL);
5097 if (r < 0) {
5098 pr_err("vdec interrupt request failed\n");
5099 return r;
5100 }
5101 }
5102#endif
5103 r = of_reserved_mem_device_init(&pdev->dev);
5104 if (r == 0)
5105 pr_info("vdec_probe done\n");
5106
5107 vdec_core->cma_dev = &pdev->dev;
5108
5109 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_M8) {
5110 /* default to 250MHz */
5111 vdec_clock_hi_enable();
5112 }
5113
5114 if (get_cpu_major_id() == AM_MESON_CPU_MAJOR_ID_GXBB) {
5115 /* set vdec dmc request to urgent */
5116 WRITE_DMCREG(DMC_AM5_CHAN_CTRL, 0x3f203cf);
5117 }
5118 INIT_LIST_HEAD(&vdec_core->connected_vdec_list);
5119 spin_lock_init(&vdec_core->lock);
5120 spin_lock_init(&vdec_core->canvas_lock);
5121 spin_lock_init(&vdec_core->fps_lock);
5122 spin_lock_init(&vdec_core->input_lock);
5123 ida_init(&vdec_core->ida);
5124 vdec_core->thread = kthread_run(vdec_core_thread, vdec_core,
5125 "vdec-core");
5126
5127 vdec_core->vdec_core_wq = alloc_ordered_workqueue("%s",__WQ_LEGACY |
5128 WQ_MEM_RECLAIM |WQ_HIGHPRI/*high priority*/, "vdec-work");
5129 /*work queue priority lower than vdec-core.*/
5130
5131 /* init power domains. */
5132 if (of_property_read_bool(pdev->dev.of_node, "power-domains")) {
5133 r = vdec_power_domain_init(&pdev->dev, &vdec_core->pd);
5134 if (r) {
5135 vdec_power_domain_remove(vdec_core->pd);
5136 pr_err("vdec power domain init failed\n");
5137 return r;
5138 }
5139 }
5140
5141 return 0;
5142}
5143
5144static int vdec_remove(struct platform_device *pdev)
5145{
5146 int i;
5147
5148 for (i = 0; i < VDEC_IRQ_MAX; i++) {
5149 if (vdec_core->isr_context[i].irq >= 0) {
5150 free_irq(vdec_core->isr_context[i].irq,
5151 &vdec_core->isr_context[i]);
5152 vdec_core->isr_context[i].irq = -1;
5153 vdec_core->isr_context[i].dev_isr = NULL;
5154 vdec_core->isr_context[i].dev_threaded_isr = NULL;
5155 vdec_core->isr_context[i].dev_id = NULL;
5156 }
5157 }
5158
5159 kthread_stop(vdec_core->thread);
5160
5161 destroy_workqueue(vdec_core->vdec_core_wq);
5162
5163 if (vdec_core->pd)
5164 vdec_power_domain_remove(vdec_core->pd);
5165
5166 class_unregister(&vdec_class);
5167
5168 return 0;
5169}
5170
5171static const struct of_device_id amlogic_vdec_dt_match[] = {
5172 {
5173 .compatible = "amlogic, vdec",
5174 },
5175 {},
5176};
5177
5178static struct mconfig vdec_configs[] = {
5179 MC_PU32("debug_trace_num", &debug_trace_num),
5180 MC_PI32("hevc_max_reset_count", &hevc_max_reset_count),
5181 MC_PU32("clk_config", &clk_config),
5182 MC_PI32("step_mode", &step_mode),
5183 MC_PI32("poweron_clock_level", &poweron_clock_level),
5184};
5185static struct mconfig_node vdec_node;
5186
5187static struct platform_driver vdec_driver = {
5188 .probe = vdec_probe,
5189 .remove = vdec_remove,
5190 .driver = {
5191 .name = "vdec",
5192 .of_match_table = amlogic_vdec_dt_match,
5193 }
5194};
5195
5196static struct codec_profile_t amvdec_input_profile = {
5197 .name = "vdec_input",
5198 .profile = "drm_framemode"
5199};
5200
5201int vdec_module_init(void)
5202{
5203 if (platform_driver_register(&vdec_driver)) {
5204 pr_info("failed to register vdec module\n");
5205 return -ENODEV;
5206 }
5207 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5208 "vdec", vdec_configs, CONFIG_FOR_RW);
5209 vcodec_profile_register(&amvdec_input_profile);
5210 return 0;
5211}
5212EXPORT_SYMBOL(vdec_module_init);
5213
5214void vdec_module_exit(void)
5215{
5216 platform_driver_unregister(&vdec_driver);
5217}
5218EXPORT_SYMBOL(vdec_module_exit);
5219
5220#if 0
5221static int __init vdec_module_init(void)
5222{
5223 if (platform_driver_register(&vdec_driver)) {
5224 pr_info("failed to register vdec module\n");
5225 return -ENODEV;
5226 }
5227 INIT_REG_NODE_CONFIGS("media.decoder", &vdec_node,
5228 "vdec", vdec_configs, CONFIG_FOR_RW);
5229 return 0;
5230}
5231
5232static void __exit vdec_module_exit(void)
5233{
5234 platform_driver_unregister(&vdec_driver);
5235}
5236#endif
5237
5238static int vdec_mem_device_init(struct reserved_mem *rmem, struct device *dev)
5239{
5240 vdec_core->cma_dev = dev;
5241
5242 return 0;
5243}
5244
5245static const struct reserved_mem_ops rmem_vdec_ops = {
5246 .device_init = vdec_mem_device_init,
5247};
5248
5249static int __init vdec_mem_setup(struct reserved_mem *rmem)
5250{
5251 rmem->ops = &rmem_vdec_ops;
5252 pr_info("vdec: reserved mem setup\n");
5253
5254 return 0;
5255}
5256
5257
5258void vdec_set_vframe_comm(struct vdec_s *vdec, char *n)
5259{
5260 struct vdec_frames_s *mvfrm = vdec->mvfrm;
5261
5262 if (!mvfrm)
5263 return;
5264
5265 mvfrm->comm.vdec_id = vdec->id;
5266
5267 snprintf(mvfrm->comm.vdec_name, sizeof(mvfrm->comm.vdec_name)-1,
5268 "%s", n);
5269 mvfrm->comm.vdec_type = vdec->type;
5270}
5271EXPORT_SYMBOL(vdec_set_vframe_comm);
5272
5273void vdec_fill_vdec_frame(struct vdec_s *vdec, struct vframe_qos_s *vframe_qos,
5274 struct vdec_info *vinfo,struct vframe_s *vf,
5275 u32 hw_dec_time)
5276{
5277 u32 i;
5278 struct vframe_counter_s *fifo_buf;
5279 struct vdec_frames_s *mvfrm = vdec->mvfrm;
5280
5281 if (!mvfrm)
5282 return;
5283 fifo_buf = mvfrm->fifo_buf;
5284
5285 /* assume fps==60,mv->wr max value can support system running 828 days,
5286 this is enough for us */
5287 i = mvfrm->wr & (NUM_FRAME_VDEC-1); //find the slot num in fifo_buf
5288 mvfrm->fifo_buf[i].decode_time_cost = hw_dec_time;
5289 if (vframe_qos)
5290 memcpy(&fifo_buf[i].qos, vframe_qos, sizeof(struct vframe_qos_s));
5291 if (vinfo) {
5292 memcpy(&fifo_buf[i].frame_width, &vinfo->frame_width,
5293 ((char*)&vinfo->reserved[0] - (char*)&vinfo->frame_width));
5294 }
5295 if (vf) {
5296 fifo_buf[i].vf_type = vf->type;
5297 fifo_buf[i].signal_type = vf->signal_type;
5298 fifo_buf[i].pts = vf->pts;
5299 fifo_buf[i].pts_us64 = vf->pts_us64;
5300 }
5301 mvfrm->wr++;
5302}
5303EXPORT_SYMBOL(vdec_fill_vdec_frame);
5304
5305/* In this function,if we use copy_to_user, we may encounter sleep,
5306which may block the vdec_fill_vdec_frame,this is not acceptable.
5307So, we should use a tmp buffer(passed by caller) to get the content */
5308u32 vdec_get_frame_vdec(struct vdec_s *vdec, struct vframe_counter_s *tmpbuf)
5309{
5310 u32 toread = 0;
5311 u32 slot_rd;
5312 struct vframe_counter_s *fifo_buf = NULL;
5313 struct vdec_frames_s *mvfrm = NULL;
5314
5315 /*
5316 switch (version) {
5317 case version_1:
5318 f1();
5319 case version_2:
5320 f2();
5321 default:
5322 break;
5323 }
5324 */
5325
5326 if (!vdec)
5327 return 0;
5328 mvfrm = vdec->mvfrm;
5329 if (!mvfrm)
5330 return 0;
5331
5332 fifo_buf = &mvfrm->fifo_buf[0];
5333
5334 toread = mvfrm->wr - mvfrm->rd;
5335 if (toread) {
5336 if (toread >= NUM_FRAME_VDEC - QOS_FRAME_NUM) {
5337 /* round the fifo_buf length happens, give QOS_FRAME_NUM for buffer */
5338 mvfrm->rd = mvfrm->wr - (NUM_FRAME_VDEC - QOS_FRAME_NUM);
5339 }
5340
5341 if (toread >= QOS_FRAME_NUM) {
5342 toread = QOS_FRAME_NUM; //by default, we use this num
5343 }
5344
5345 slot_rd = mvfrm->rd &( NUM_FRAME_VDEC-1); //In this case it equals to x%y
5346 if (slot_rd + toread <= NUM_FRAME_VDEC) {
5347 memcpy(tmpbuf, &fifo_buf[slot_rd], toread*sizeof(struct vframe_counter_s));
5348 } else {
5349 u32 exeed;
5350 exeed = slot_rd + toread - NUM_FRAME_VDEC;
5351 memcpy(tmpbuf, &fifo_buf[slot_rd], (NUM_FRAME_VDEC - slot_rd)*sizeof(struct vframe_counter_s));
5352 memcpy(&tmpbuf[NUM_FRAME_VDEC-slot_rd], &fifo_buf[0], exeed*sizeof(struct vframe_counter_s));
5353 }
5354
5355 mvfrm->rd += toread;
5356 }
5357 return toread;
5358}
5359EXPORT_SYMBOL(vdec_get_frame_vdec);
5360
5361
5362RESERVEDMEM_OF_DECLARE(vdec, "amlogic, vdec-memory", vdec_mem_setup);
5363/*
5364uint force_hevc_clock_cntl;
5365EXPORT_SYMBOL(force_hevc_clock_cntl);
5366
5367module_param(force_hevc_clock_cntl, uint, 0664);
5368*/
5369module_param(debug, uint, 0664);
5370module_param(debug_trace_num, uint, 0664);
5371module_param(hevc_max_reset_count, int, 0664);
5372module_param(clk_config, uint, 0664);
5373module_param(step_mode, int, 0664);
5374module_param(debugflags, int, 0664);
5375module_param(parallel_decode, int, 0664);
5376module_param(fps_detection, int, 0664);
5377module_param(fps_clear, int, 0664);
5378module_param(force_nosecure_even_drm, int, 0664);
5379module_param(disable_switch_single_to_mult, int, 0664);
5380module_param(disable_power_domain, bool, 0664);
5381
5382module_param(frameinfo_flag, int, 0664);
5383MODULE_PARM_DESC(frameinfo_flag,
5384 "\n frameinfo_flag\n");
5385module_param(v4lvideo_add_di, int, 0664);
5386MODULE_PARM_DESC(v4lvideo_add_di,
5387 "\n v4lvideo_add_di\n");
5388
5389module_param(max_di_instance, int, 0664);
5390MODULE_PARM_DESC(max_di_instance,
5391 "\n max_di_instance\n");
5392
5393/*
5394*module_init(vdec_module_init);
5395*module_exit(vdec_module_exit);
5396*/
5397#define CREATE_TRACE_POINTS
5398#include "vdec_trace.h"
5399MODULE_DESCRIPTION("AMLOGIC vdec driver");
5400MODULE_LICENSE("GPL");
5401MODULE_AUTHOR("Tim Yao <timyao@amlogic.com>");
5402