summaryrefslogtreecommitdiff
path: root/drivers/frame_sink/encoder/h264/encoder.c (plain)
blob: 66a3f6c8552ae6264e4f0f9c4c32479e84beef9c
1/*
2 * drivers/amlogic/amports/encoder.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16*/
17
18#define LOG_LINE() pr_err("[%s:%d]\n", __FUNCTION__, __LINE__);
19#include <linux/kernel.h>
20#include <linux/types.h>
21#include <linux/errno.h>
22#include <linux/interrupt.h>
23#include <linux/timer.h>
24#include <linux/clk.h>
25#include <linux/fs.h>
26#include <linux/sched.h>
27#include <linux/slab.h>
28#include <linux/reset.h>
29#include <linux/dma-mapping.h>
30#include <linux/platform_device.h>
31#include <linux/spinlock.h>
32#include <linux/ctype.h>
33#include <linux/amlogic/media/frame_sync/ptsserv.h>
34#include <linux/amlogic/media/utils/amstream.h>
35#include <linux/amlogic/media/canvas/canvas.h>
36#include <linux/amlogic/media/canvas/canvas_mgr.h>
37#include <linux/amlogic/media/codec_mm/codec_mm.h>
38
39#include <linux/amlogic/media/utils/vdec_reg.h>
40#include "../../../frame_provider/decoder/utils/vdec.h"
41#include <linux/delay.h>
42#include <linux/poll.h>
43#include <linux/of.h>
44#include <linux/of_fdt.h>
45#include <linux/dma-contiguous.h>
46#include <linux/kthread.h>
47#include <linux/sched/rt.h>
48#include <linux/amlogic/media/utils/amports_config.h>
49#include "encoder.h"
50#include "../../../frame_provider/decoder/utils/amvdec.h"
51#include "../../../frame_provider/decoder/utils/vdec_power_ctrl.h"
52#include <linux/amlogic/media/utils/vdec_reg.h>
53#include <linux/amlogic/power_ctrl.h>
54#include <dt-bindings/power/sc2-pd.h>
55#include <linux/amlogic/pwr_ctrl.h>
56
57#include <linux/amlogic/media/utils/amlog.h>
58#include "../../../stream_input/amports/amports_priv.h"
59#include "../../../frame_provider/decoder/utils/firmware.h"
60#include <linux/of_reserved_mem.h>
61
62
63#ifdef CONFIG_AM_JPEG_ENCODER
64#include "jpegenc.h"
65#endif
66
67#define MHz (1000000)
68
69#define CHECK_RET(_ret) if (ret) {enc_pr(LOG_ERROR, \
70 "%s:%d:function call failed with result: %d\n",\
71 __FUNCTION__, __LINE__, _ret);}
72
73#define ENCODE_NAME "encoder"
74#define AMVENC_CANVAS_INDEX 0xE4
75#define AMVENC_CANVAS_MAX_INDEX 0xEF
76
77#define MIN_SIZE amvenc_buffspec[0].min_buffsize
78#define DUMP_INFO_BYTES_PER_MB 80
79
80#define ADJUSTED_QP_FLAG 64
81
82static s32 avc_device_major;
83static struct device *amvenc_avc_dev;
84#define DRIVER_NAME "amvenc_avc"
85#define CLASS_NAME "amvenc_avc"
86#define DEVICE_NAME "amvenc_avc"
87
88static struct encode_manager_s encode_manager;
89
90#define MULTI_SLICE_MC
91#define H264_ENC_CBR
92/* #define MORE_MODULE_PARAM */
93
94#define ENC_CANVAS_OFFSET AMVENC_CANVAS_INDEX
95
96#define UCODE_MODE_FULL 0
97
98/* #define ENABLE_IGNORE_FUNCTION */
99
100static u32 ie_me_mb_type;
101static u32 ie_me_mode;
102static u32 ie_pippeline_block = 3;
103static u32 ie_cur_ref_sel;
104/* static u32 avc_endian = 6; */
105static u32 clock_level = 5;
106
107static u32 encode_print_level = LOG_DEBUG;
108static u32 no_timeout;
109static int nr_mode = -1;
110static u32 qp_table_debug;
111static u32 use_reset_control;
112static u32 use_ge2d;
113
114#ifdef H264_ENC_SVC
115static u32 svc_enable = 0; /* Enable sac feature or not */
116static u32 svc_ref_conf = 0; /* Continuous no reference numbers */
117#endif
118
119struct hcodec_clks {
120 struct clk *hcodec_aclk;
121 //struct clk *hcodec_bclk;
122 //struct clk *hcodec_cclk;
123};
124
125static struct hcodec_clks s_hcodec_clks;
126struct reset_control *hcodec_rst;
127
128static u32 me_mv_merge_ctl =
129 (0x1 << 31) | /* [31] me_merge_mv_en_16 */
130 (0x1 << 30) | /* [30] me_merge_small_mv_en_16 */
131 (0x1 << 29) | /* [29] me_merge_flex_en_16 */
132 (0x1 << 28) | /* [28] me_merge_sad_en_16 */
133 (0x1 << 27) | /* [27] me_merge_mv_en_8 */
134 (0x1 << 26) | /* [26] me_merge_small_mv_en_8 */
135 (0x1 << 25) | /* [25] me_merge_flex_en_8 */
136 (0x1 << 24) | /* [24] me_merge_sad_en_8 */
137 /* [23:18] me_merge_mv_diff_16 - MV diff <= n pixel can be merged */
138 (0x12 << 18) |
139 /* [17:12] me_merge_mv_diff_8 - MV diff <= n pixel can be merged */
140 (0x2b << 12) |
141 /* [11:0] me_merge_min_sad - SAD >= 0x180 can be merged with other MV */
142 (0x80 << 0);
143 /* ( 0x4 << 18) |
144 * // [23:18] me_merge_mv_diff_16 - MV diff <= n pixel can be merged
145 */
146 /* ( 0x3f << 12) |
147 * // [17:12] me_merge_mv_diff_8 - MV diff <= n pixel can be merged
148 */
149 /* ( 0xc0 << 0);
150 * // [11:0] me_merge_min_sad - SAD >= 0x180 can be merged with other MV
151 */
152
153static u32 me_mv_weight_01 = (0x40 << 24) | (0x30 << 16) | (0x20 << 8) | 0x30;
154static u32 me_mv_weight_23 = (0x40 << 8) | 0x30;
155static u32 me_sad_range_inc = 0x03030303;
156static u32 me_step0_close_mv = 0x003ffc21;
157static u32 me_f_skip_sad;
158static u32 me_f_skip_weight;
159static u32 me_sad_enough_01;/* 0x00018010; */
160static u32 me_sad_enough_23;/* 0x00000020; */
161
162/* [31:0] NUM_ROWS_PER_SLICE_P */
163/* [15:0] NUM_ROWS_PER_SLICE_I */
164static u32 fixed_slice_cfg;
165
166/* y tnr */
167static unsigned int y_tnr_mc_en = 1;
168static unsigned int y_tnr_txt_mode;
169static unsigned int y_tnr_mot_sad_margin = 1;
170static unsigned int y_tnr_mot_cortxt_rate = 1;
171static unsigned int y_tnr_mot_distxt_ofst = 5;
172static unsigned int y_tnr_mot_distxt_rate = 4;
173static unsigned int y_tnr_mot_dismot_ofst = 4;
174static unsigned int y_tnr_mot_frcsad_lock = 8;
175static unsigned int y_tnr_mot2alp_frc_gain = 10;
176static unsigned int y_tnr_mot2alp_nrm_gain = 216;
177static unsigned int y_tnr_mot2alp_dis_gain = 128;
178static unsigned int y_tnr_mot2alp_dis_ofst = 32;
179static unsigned int y_tnr_alpha_min = 32;
180static unsigned int y_tnr_alpha_max = 63;
181static unsigned int y_tnr_deghost_os;
182/* c tnr */
183static unsigned int c_tnr_mc_en = 1;
184static unsigned int c_tnr_txt_mode;
185static unsigned int c_tnr_mot_sad_margin = 1;
186static unsigned int c_tnr_mot_cortxt_rate = 1;
187static unsigned int c_tnr_mot_distxt_ofst = 5;
188static unsigned int c_tnr_mot_distxt_rate = 4;
189static unsigned int c_tnr_mot_dismot_ofst = 4;
190static unsigned int c_tnr_mot_frcsad_lock = 8;
191static unsigned int c_tnr_mot2alp_frc_gain = 10;
192static unsigned int c_tnr_mot2alp_nrm_gain = 216;
193static unsigned int c_tnr_mot2alp_dis_gain = 128;
194static unsigned int c_tnr_mot2alp_dis_ofst = 32;
195static unsigned int c_tnr_alpha_min = 32;
196static unsigned int c_tnr_alpha_max = 63;
197static unsigned int c_tnr_deghost_os;
198/* y snr */
199static unsigned int y_snr_err_norm = 1;
200static unsigned int y_snr_gau_bld_core = 1;
201static int y_snr_gau_bld_ofst = -1;
202static unsigned int y_snr_gau_bld_rate = 48;
203static unsigned int y_snr_gau_alp0_min;
204static unsigned int y_snr_gau_alp0_max = 63;
205static unsigned int y_bld_beta2alp_rate = 16;
206static unsigned int y_bld_beta_min;
207static unsigned int y_bld_beta_max = 63;
208/* c snr */
209static unsigned int c_snr_err_norm = 1;
210static unsigned int c_snr_gau_bld_core = 1;
211static int c_snr_gau_bld_ofst = -1;
212static unsigned int c_snr_gau_bld_rate = 48;
213static unsigned int c_snr_gau_alp0_min;
214static unsigned int c_snr_gau_alp0_max = 63;
215static unsigned int c_bld_beta2alp_rate = 16;
216static unsigned int c_bld_beta_min;
217static unsigned int c_bld_beta_max = 63;
218static unsigned int qp_mode;
219
220static DEFINE_SPINLOCK(lock);
221
222#define ADV_MV_LARGE_16x8 1
223#define ADV_MV_LARGE_8x16 1
224#define ADV_MV_LARGE_16x16 1
225
226/* me weight offset should not very small, it used by v1 me module. */
227/* the min real sad for me is 16 by hardware. */
228#define ME_WEIGHT_OFFSET 0x520
229#define I4MB_WEIGHT_OFFSET 0x655
230#define I16MB_WEIGHT_OFFSET 0x560
231
232#define ADV_MV_16x16_WEIGHT 0x080
233#define ADV_MV_16_8_WEIGHT 0x0e0
234#define ADV_MV_8x8_WEIGHT 0x240
235#define ADV_MV_4x4x4_WEIGHT 0x3000
236
237#define IE_SAD_SHIFT_I16 0x001
238#define IE_SAD_SHIFT_I4 0x001
239#define ME_SAD_SHIFT_INTER 0x001
240
241#define STEP_2_SKIP_SAD 0
242#define STEP_1_SKIP_SAD 0
243#define STEP_0_SKIP_SAD 0
244#define STEP_2_SKIP_WEIGHT 0
245#define STEP_1_SKIP_WEIGHT 0
246#define STEP_0_SKIP_WEIGHT 0
247
248#define ME_SAD_RANGE_0 0x1 /* 0x0 */
249#define ME_SAD_RANGE_1 0x0
250#define ME_SAD_RANGE_2 0x0
251#define ME_SAD_RANGE_3 0x0
252
253/* use 0 for v3, 0x18 for v2 */
254#define ME_MV_PRE_WEIGHT_0 0x18
255/* use 0 for v3, 0x18 for v2 */
256#define ME_MV_PRE_WEIGHT_1 0x18
257#define ME_MV_PRE_WEIGHT_2 0x0
258#define ME_MV_PRE_WEIGHT_3 0x0
259
260/* use 0 for v3, 0x18 for v2 */
261#define ME_MV_STEP_WEIGHT_0 0x18
262/* use 0 for v3, 0x18 for v2 */
263#define ME_MV_STEP_WEIGHT_1 0x18
264#define ME_MV_STEP_WEIGHT_2 0x0
265#define ME_MV_STEP_WEIGHT_3 0x0
266
267#define ME_SAD_ENOUGH_0_DATA 0x00
268#define ME_SAD_ENOUGH_1_DATA 0x04
269#define ME_SAD_ENOUGH_2_DATA 0x11
270#define ADV_MV_8x8_ENOUGH_DATA 0x20
271
272/* V4_COLOR_BLOCK_FIX */
273#define V3_FORCE_SKIP_SAD_0 0x10
274/* 4 Blocks */
275#define V3_FORCE_SKIP_SAD_1 0x60
276/* 16 Blocks + V3_SKIP_WEIGHT_2 */
277#define V3_FORCE_SKIP_SAD_2 0x250
278/* almost disable it -- use t_lac_coeff_2 output to F_ZERO is better */
279#define V3_ME_F_ZERO_SAD (ME_WEIGHT_OFFSET + 0x10)
280
281#define V3_IE_F_ZERO_SAD_I16 (I16MB_WEIGHT_OFFSET + 0x10)
282#define V3_IE_F_ZERO_SAD_I4 (I4MB_WEIGHT_OFFSET + 0x20)
283
284#define V3_SKIP_WEIGHT_0 0x10
285/* 4 Blocks 8 separate search sad can be very low */
286#define V3_SKIP_WEIGHT_1 0x8 /* (4 * ME_MV_STEP_WEIGHT_1 + 0x100) */
287#define V3_SKIP_WEIGHT_2 0x3
288
289#define V3_LEVEL_1_F_SKIP_MAX_SAD 0x0
290#define V3_LEVEL_1_SKIP_MAX_SAD 0x6
291
292#define I4_ipred_weight_most 0x18
293#define I4_ipred_weight_else 0x28
294
295#define C_ipred_weight_V 0x04
296#define C_ipred_weight_H 0x08
297#define C_ipred_weight_DC 0x0c
298
299#define I16_ipred_weight_V 0x04
300#define I16_ipred_weight_H 0x08
301#define I16_ipred_weight_DC 0x0c
302
303/* 0x00 same as disable */
304#define v3_left_small_max_ie_sad 0x00
305#define v3_left_small_max_me_sad 0x40
306
307#define v5_use_small_diff_cnt 0
308#define v5_simple_mb_inter_all_en 1
309#define v5_simple_mb_inter_8x8_en 1
310#define v5_simple_mb_inter_16_8_en 1
311#define v5_simple_mb_inter_16x16_en 1
312#define v5_simple_mb_intra_en 1
313#define v5_simple_mb_C_en 0
314#define v5_simple_mb_Y_en 1
315#define v5_small_diff_Y 0x10
316#define v5_small_diff_C 0x18
317/* shift 8-bits, 2, 1, 0, -1, -2, -3, -4 */
318#define v5_simple_dq_setting 0x43210fed
319#define v5_simple_me_weight_setting 0
320
321#ifdef H264_ENC_CBR
322#define CBR_TABLE_SIZE 0x800
323#define CBR_SHORT_SHIFT 12 /* same as disable */
324#define CBR_LONG_MB_NUM 2
325#define START_TABLE_ID 8
326#define CBR_LONG_THRESH 4
327#endif
328
329static u32 v3_mv_sad[64] = {
330 /* For step0 */
331 0x00000004,
332 0x00010008,
333 0x00020010,
334 0x00030018,
335 0x00040020,
336 0x00050028,
337 0x00060038,
338 0x00070048,
339 0x00080058,
340 0x00090068,
341 0x000a0080,
342 0x000b0098,
343 0x000c00b0,
344 0x000d00c8,
345 0x000e00e8,
346 0x000f0110,
347 /* For step1 */
348 0x00100002,
349 0x00110004,
350 0x00120008,
351 0x0013000c,
352 0x00140010,
353 0x00150014,
354 0x0016001c,
355 0x00170024,
356 0x0018002c,
357 0x00190034,
358 0x001a0044,
359 0x001b0054,
360 0x001c0064,
361 0x001d0074,
362 0x001e0094,
363 0x001f00b4,
364 /* For step2 */
365 0x00200006,
366 0x0021000c,
367 0x0022000c,
368 0x00230018,
369 0x00240018,
370 0x00250018,
371 0x00260018,
372 0x00270030,
373 0x00280030,
374 0x00290030,
375 0x002a0030,
376 0x002b0030,
377 0x002c0030,
378 0x002d0030,
379 0x002e0030,
380 0x002f0050,
381 /* For step2 4x4-8x8 */
382 0x00300001,
383 0x00310002,
384 0x00320002,
385 0x00330004,
386 0x00340004,
387 0x00350004,
388 0x00360004,
389 0x00370006,
390 0x00380006,
391 0x00390006,
392 0x003a0006,
393 0x003b0006,
394 0x003c0006,
395 0x003d0006,
396 0x003e0006,
397 0x003f0006
398};
399
400static struct BuffInfo_s amvenc_buffspec[] = {
401 {
402 .lev_id = 0,
403 .max_width = 1920,
404 .max_height = 1088,
405 .min_buffsize = 0x1400000,
406 .dct = {
407 .buf_start = 0,
408 .buf_size = 0x800000, /* 1920x1088x4 */
409 },
410 .dec0_y = {
411 .buf_start = 0x800000,
412 .buf_size = 0x300000,
413 },
414 .dec1_y = {
415 .buf_start = 0xb00000,
416 .buf_size = 0x300000,
417 },
418 .assit = {
419 .buf_start = 0xe10000,
420 .buf_size = 0xc0000,
421 },
422 .bitstream = {
423 .buf_start = 0xf00000,
424 .buf_size = 0x100000,
425 },
426 .scale_buff = {
427 .buf_start = 0x1000000,
428 .buf_size = 0x300000,
429 },
430 .dump_info = {
431 .buf_start = 0x1300000,
432 .buf_size = 0xa0000, /* (1920x1088/256)x80 */
433 },
434 .cbr_info = {
435 .buf_start = 0x13b0000,
436 .buf_size = 0x2000,
437 }
438 }
439};
440
441enum ucode_type_e {
442 UCODE_GXL,
443 UCODE_TXL,
444 UCODE_G12A,
445 UCODE_MAX
446};
447
448const char *ucode_name[] = {
449 "gxl_h264_enc",
450 "txl_h264_enc_cavlc",
451 "ga_h264_enc_cabac",
452};
453
454static void dma_flush(u32 buf_start, u32 buf_size);
455static void cache_flush(u32 buf_start, u32 buf_size);
456static int enc_dma_buf_get_phys(struct enc_dma_cfg *cfg, unsigned long *addr);
457static void enc_dma_buf_unmap(struct enc_dma_cfg *cfg);
458
459s32 hcodec_hw_reset(void)
460{
461 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2 && use_reset_control) {
462 reset_control_reset(hcodec_rst);
463 enc_pr(LOG_DEBUG, "request hcodec reset from application.\n");
464 }
465 return 0;
466}
467
468s32 hcodec_clk_prepare(struct device *dev, struct hcodec_clks *clks)
469{
470 int ret;
471
472 clks->hcodec_aclk = devm_clk_get(dev, "cts_hcodec_aclk");
473
474 if (IS_ERR_OR_NULL(clks->hcodec_aclk)) {
475 enc_pr(LOG_ERROR, "failed to get hcodec aclk\n");
476 return -1;
477 }
478
479 ret = clk_set_rate(clks->hcodec_aclk, 667 * MHz);
480 CHECK_RET(ret);
481
482 ret = clk_prepare(clks->hcodec_aclk);
483 CHECK_RET(ret);
484
485 enc_pr(LOG_ERROR, "hcodec_clk_a: %lu MHz\n", clk_get_rate(clks->hcodec_aclk) / 1000000);
486
487 return 0;
488}
489
490void hcodec_clk_unprepare(struct device *dev, struct hcodec_clks *clks)
491{
492 clk_unprepare(clks->hcodec_aclk);
493 devm_clk_put(dev, clks->hcodec_aclk);
494
495 //clk_unprepare(clks->wave_bclk);
496 //devm_clk_put(dev, clks->wave_bclk);
497
498 //clk_unprepare(clks->wave_aclk);
499 //devm_clk_put(dev, clks->wave_aclk);
500}
501
502s32 hcodec_clk_config(u32 enable)
503{
504 if (enable) {
505 clk_enable(s_hcodec_clks.hcodec_aclk);
506 //clk_enable(s_hcodec_clks.wave_bclk);
507 //clk_enable(s_hcodec_clks.wave_cclk);
508 } else {
509 clk_disable(s_hcodec_clks.hcodec_aclk);
510 //clk_disable(s_hcodec_clks.wave_bclk);
511 //clk_disable(s_hcodec_clks.wave_aclk);
512 }
513
514 return 0;
515}
516
517static const char *select_ucode(u32 ucode_index)
518{
519 enum ucode_type_e ucode = UCODE_GXL;
520
521 switch (ucode_index) {
522 case UCODE_MODE_FULL:
523 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_G12A)
524 ucode = UCODE_G12A;
525 else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL)
526 ucode = UCODE_TXL;
527 else /* (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) */
528 ucode = UCODE_GXL;
529 break;
530 break;
531 default:
532 break;
533 }
534 return (const char *)ucode_name[ucode];
535}
536
537static void hcodec_prog_qtbl(struct encode_wq_s *wq)
538{
539 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
540 (0 << 23) | /* quant_table_addr */
541 (1 << 22)); /* quant_table_addr_update */
542
543 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
544 wq->quant_tbl_i4[0]);
545 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
546 wq->quant_tbl_i4[1]);
547 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
548 wq->quant_tbl_i4[2]);
549 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
550 wq->quant_tbl_i4[3]);
551 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
552 wq->quant_tbl_i4[4]);
553 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
554 wq->quant_tbl_i4[5]);
555 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
556 wq->quant_tbl_i4[6]);
557 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
558 wq->quant_tbl_i4[7]);
559
560 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
561 (8 << 23) | /* quant_table_addr */
562 (1 << 22)); /* quant_table_addr_update */
563
564 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
565 wq->quant_tbl_i16[0]);
566 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
567 wq->quant_tbl_i16[1]);
568 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
569 wq->quant_tbl_i16[2]);
570 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
571 wq->quant_tbl_i16[3]);
572 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
573 wq->quant_tbl_i16[4]);
574 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
575 wq->quant_tbl_i16[5]);
576 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
577 wq->quant_tbl_i16[6]);
578 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
579 wq->quant_tbl_i16[7]);
580
581 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
582 (16 << 23) | /* quant_table_addr */
583 (1 << 22)); /* quant_table_addr_update */
584
585 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
586 wq->quant_tbl_me[0]);
587 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
588 wq->quant_tbl_me[1]);
589 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
590 wq->quant_tbl_me[2]);
591 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
592 wq->quant_tbl_me[3]);
593 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
594 wq->quant_tbl_me[4]);
595 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
596 wq->quant_tbl_me[5]);
597 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
598 wq->quant_tbl_me[6]);
599 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
600 wq->quant_tbl_me[7]);
601}
602
603static void InitEncodeWeight(void)
604{
605 me_mv_merge_ctl =
606 (0x1 << 31) | /* [31] me_merge_mv_en_16 */
607 (0x1 << 30) | /* [30] me_merge_small_mv_en_16 */
608 (0x1 << 29) | /* [29] me_merge_flex_en_16 */
609 (0x1 << 28) | /* [28] me_merge_sad_en_16 */
610 (0x1 << 27) | /* [27] me_merge_mv_en_8 */
611 (0x1 << 26) | /* [26] me_merge_small_mv_en_8 */
612 (0x1 << 25) | /* [25] me_merge_flex_en_8 */
613 (0x1 << 24) | /* [24] me_merge_sad_en_8 */
614 (0x12 << 18) |
615 /* [23:18] me_merge_mv_diff_16 - MV diff
616 * <= n pixel can be merged
617 */
618 (0x2b << 12) |
619 /* [17:12] me_merge_mv_diff_8 - MV diff
620 * <= n pixel can be merged
621 */
622 (0x80 << 0);
623 /* [11:0] me_merge_min_sad - SAD
624 * >= 0x180 can be merged with other MV
625 */
626
627 me_mv_weight_01 = (ME_MV_STEP_WEIGHT_1 << 24) |
628 (ME_MV_PRE_WEIGHT_1 << 16) |
629 (ME_MV_STEP_WEIGHT_0 << 8) |
630 (ME_MV_PRE_WEIGHT_0 << 0);
631
632 me_mv_weight_23 = (ME_MV_STEP_WEIGHT_3 << 24) |
633 (ME_MV_PRE_WEIGHT_3 << 16) |
634 (ME_MV_STEP_WEIGHT_2 << 8) |
635 (ME_MV_PRE_WEIGHT_2 << 0);
636
637 me_sad_range_inc = (ME_SAD_RANGE_3 << 24) |
638 (ME_SAD_RANGE_2 << 16) |
639 (ME_SAD_RANGE_1 << 8) |
640 (ME_SAD_RANGE_0 << 0);
641
642 me_step0_close_mv = (0x100 << 10) |
643 /* me_step0_big_sad -- two MV sad
644 * diff bigger will use use 1
645 */
646 (2 << 5) | /* me_step0_close_mv_y */
647 (2 << 0); /* me_step0_close_mv_x */
648
649 me_f_skip_sad = (0x00 << 24) | /* force_skip_sad_3 */
650 (STEP_2_SKIP_SAD << 16) | /* force_skip_sad_2 */
651 (STEP_1_SKIP_SAD << 8) | /* force_skip_sad_1 */
652 (STEP_0_SKIP_SAD << 0); /* force_skip_sad_0 */
653
654 me_f_skip_weight = (0x00 << 24) | /* force_skip_weight_3 */
655 /* force_skip_weight_2 */
656 (STEP_2_SKIP_WEIGHT << 16) |
657 /* force_skip_weight_1 */
658 (STEP_1_SKIP_WEIGHT << 8) |
659 /* force_skip_weight_0 */
660 (STEP_0_SKIP_WEIGHT << 0);
661
662 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
663 me_f_skip_sad = 0;
664 me_f_skip_weight = 0;
665 me_mv_weight_01 = 0;
666 me_mv_weight_23 = 0;
667 }
668
669 me_sad_enough_01 = (ME_SAD_ENOUGH_1_DATA << 12) |
670 /* me_sad_enough_1 */
671 (ME_SAD_ENOUGH_0_DATA << 0) |
672 /* me_sad_enough_0 */
673 (0 << 12) | /* me_sad_enough_1 */
674 (0 << 0); /* me_sad_enough_0 */
675
676 me_sad_enough_23 = (ADV_MV_8x8_ENOUGH_DATA << 12) |
677 /* adv_mv_8x8_enough */
678 (ME_SAD_ENOUGH_2_DATA << 0) |
679 /* me_sad_enough_2 */
680 (0 << 12) | /* me_sad_enough_3 */
681 (0 << 0); /* me_sad_enough_2 */
682}
683
684/*output stream buffer setting*/
685static void avc_init_output_buffer(struct encode_wq_s *wq)
686{
687 WRITE_HREG(HCODEC_VLC_VB_MEM_CTL,
688 ((1 << 31) | (0x3f << 24) |
689 (0x20 << 16) | (2 << 0)));
690 WRITE_HREG(HCODEC_VLC_VB_START_PTR,
691 wq->mem.BitstreamStart);
692 WRITE_HREG(HCODEC_VLC_VB_WR_PTR,
693 wq->mem.BitstreamStart);
694 WRITE_HREG(HCODEC_VLC_VB_SW_RD_PTR,
695 wq->mem.BitstreamStart);
696 WRITE_HREG(HCODEC_VLC_VB_END_PTR,
697 wq->mem.BitstreamEnd);
698 WRITE_HREG(HCODEC_VLC_VB_CONTROL, 1);
699 WRITE_HREG(HCODEC_VLC_VB_CONTROL,
700 ((0 << 14) | (7 << 3) |
701 (1 << 1) | (0 << 0)));
702}
703
704/*input dct buffer setting*/
705static void avc_init_input_buffer(struct encode_wq_s *wq)
706{
707 WRITE_HREG(HCODEC_QDCT_MB_START_PTR,
708 wq->mem.dct_buff_start_addr);
709 WRITE_HREG(HCODEC_QDCT_MB_END_PTR,
710 wq->mem.dct_buff_end_addr);
711 WRITE_HREG(HCODEC_QDCT_MB_WR_PTR,
712 wq->mem.dct_buff_start_addr);
713 WRITE_HREG(HCODEC_QDCT_MB_RD_PTR,
714 wq->mem.dct_buff_start_addr);
715 WRITE_HREG(HCODEC_QDCT_MB_BUFF, 0);
716}
717
718/*input reference buffer setting*/
719static void avc_init_reference_buffer(s32 canvas)
720{
721 WRITE_HREG(HCODEC_ANC0_CANVAS_ADDR, canvas);
722 WRITE_HREG(HCODEC_VLC_HCMD_CONFIG, 0);
723}
724
725static void avc_init_assit_buffer(struct encode_wq_s *wq)
726{
727 WRITE_HREG(MEM_OFFSET_REG, wq->mem.assit_buffer_offset);
728}
729
730/*deblock buffer setting, same as INI_CANVAS*/
731static void avc_init_dblk_buffer(s32 canvas)
732{
733 WRITE_HREG(HCODEC_REC_CANVAS_ADDR, canvas);
734 WRITE_HREG(HCODEC_DBKR_CANVAS_ADDR, canvas);
735 WRITE_HREG(HCODEC_DBKW_CANVAS_ADDR, canvas);
736}
737
738static void avc_init_encoder(struct encode_wq_s *wq, bool idr)
739{
740 WRITE_HREG(HCODEC_VLC_TOTAL_BYTES, 0);
741 WRITE_HREG(HCODEC_VLC_CONFIG, 0x07);
742 WRITE_HREG(HCODEC_VLC_INT_CONTROL, 0);
743
744 WRITE_HREG(HCODEC_ASSIST_AMR1_INT0, 0x15);
745 WRITE_HREG(HCODEC_ASSIST_AMR1_INT1, 0x8);
746 WRITE_HREG(HCODEC_ASSIST_AMR1_INT3, 0x14);
747
748 WRITE_HREG(IDR_PIC_ID, wq->pic.idr_pic_id);
749 WRITE_HREG(FRAME_NUMBER,
750 (idr == true) ? 0 : wq->pic.frame_number);
751 WRITE_HREG(PIC_ORDER_CNT_LSB,
752 (idr == true) ? 0 : wq->pic.pic_order_cnt_lsb);
753
754 WRITE_HREG(LOG2_MAX_PIC_ORDER_CNT_LSB,
755 wq->pic.log2_max_pic_order_cnt_lsb);
756 WRITE_HREG(LOG2_MAX_FRAME_NUM,
757 wq->pic.log2_max_frame_num);
758 WRITE_HREG(ANC0_BUFFER_ID, 0);
759 WRITE_HREG(QPPICTURE, wq->pic.init_qppicture);
760}
761
762static void avc_canvas_init(struct encode_wq_s *wq)
763{
764 u32 canvas_width, canvas_height;
765 u32 start_addr = wq->mem.buf_start;
766
767 canvas_width = ((wq->pic.encoder_width + 31) >> 5) << 5;
768 canvas_height = ((wq->pic.encoder_height + 15) >> 4) << 4;
769
770 canvas_config(ENC_CANVAS_OFFSET,
771 start_addr + wq->mem.bufspec.dec0_y.buf_start,
772 canvas_width, canvas_height,
773 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
774 canvas_config(1 + ENC_CANVAS_OFFSET,
775 start_addr + wq->mem.bufspec.dec0_uv.buf_start,
776 canvas_width, canvas_height / 2,
777 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
778 /*here the third plane use the same address as the second plane*/
779 canvas_config(2 + ENC_CANVAS_OFFSET,
780 start_addr + wq->mem.bufspec.dec0_uv.buf_start,
781 canvas_width, canvas_height / 2,
782 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
783
784 canvas_config(3 + ENC_CANVAS_OFFSET,
785 start_addr + wq->mem.bufspec.dec1_y.buf_start,
786 canvas_width, canvas_height,
787 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
788 canvas_config(4 + ENC_CANVAS_OFFSET,
789 start_addr + wq->mem.bufspec.dec1_uv.buf_start,
790 canvas_width, canvas_height / 2,
791 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
792 /*here the third plane use the same address as the second plane*/
793 canvas_config(5 + ENC_CANVAS_OFFSET,
794 start_addr + wq->mem.bufspec.dec1_uv.buf_start,
795 canvas_width, canvas_height / 2,
796 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
797}
798
799static void avc_buffspec_init(struct encode_wq_s *wq)
800{
801 u32 canvas_width, canvas_height;
802 u32 start_addr = wq->mem.buf_start;
803 u32 mb_w = (wq->pic.encoder_width + 15) >> 4;
804 u32 mb_h = (wq->pic.encoder_height + 15) >> 4;
805 u32 mbs = mb_w * mb_h;
806
807 canvas_width = ((wq->pic.encoder_width + 31) >> 5) << 5;
808 canvas_height = ((wq->pic.encoder_height + 15) >> 4) << 4;
809
810 wq->mem.dct_buff_start_addr = start_addr +
811 wq->mem.bufspec.dct.buf_start;
812 wq->mem.dct_buff_end_addr =
813 wq->mem.dct_buff_start_addr +
814 wq->mem.bufspec.dct.buf_size - 1;
815 enc_pr(LOG_INFO, "dct_buff_start_addr is 0x%x, wq:%p.\n",
816 wq->mem.dct_buff_start_addr, (void *)wq);
817
818 wq->mem.bufspec.dec0_uv.buf_start =
819 wq->mem.bufspec.dec0_y.buf_start +
820 canvas_width * canvas_height;
821 wq->mem.bufspec.dec0_uv.buf_size = canvas_width * canvas_height / 2;
822 wq->mem.bufspec.dec1_uv.buf_start =
823 wq->mem.bufspec.dec1_y.buf_start +
824 canvas_width * canvas_height;
825 wq->mem.bufspec.dec1_uv.buf_size = canvas_width * canvas_height / 2;
826 wq->mem.assit_buffer_offset = start_addr +
827 wq->mem.bufspec.assit.buf_start;
828 enc_pr(LOG_INFO, "assit_buffer_offset is 0x%x, wq: %p.\n",
829 wq->mem.assit_buffer_offset, (void *)wq);
830 /*output stream buffer config*/
831 wq->mem.BitstreamStart = start_addr +
832 wq->mem.bufspec.bitstream.buf_start;
833 wq->mem.BitstreamEnd =
834 wq->mem.BitstreamStart +
835 wq->mem.bufspec.bitstream.buf_size - 1;
836 enc_pr(LOG_INFO, "BitstreamStart is 0x%x, wq: %p.\n",
837 wq->mem.BitstreamStart, (void *)wq);
838
839 wq->mem.scaler_buff_start_addr =
840 wq->mem.buf_start + wq->mem.bufspec.scale_buff.buf_start;
841 wq->mem.dump_info_ddr_start_addr =
842 wq->mem.buf_start + wq->mem.bufspec.dump_info.buf_start;
843 enc_pr(LOG_INFO,
844 "CBR: dump_info_ddr_start_addr:%x.\n",
845 wq->mem.dump_info_ddr_start_addr);
846 enc_pr(LOG_INFO, "CBR: buf_start :%d.\n",
847 wq->mem.buf_start);
848 enc_pr(LOG_INFO, "CBR: dump_info.buf_start :%d.\n",
849 wq->mem.bufspec.dump_info.buf_start);
850 wq->mem.dump_info_ddr_size =
851 DUMP_INFO_BYTES_PER_MB * mbs;
852 wq->mem.dump_info_ddr_size =
853 (wq->mem.dump_info_ddr_size + PAGE_SIZE - 1)
854 & ~(PAGE_SIZE - 1);
855 wq->mem.cbr_info_ddr_start_addr =
856 wq->mem.buf_start + wq->mem.bufspec.cbr_info.buf_start;
857 wq->mem.cbr_info_ddr_size =
858 wq->mem.bufspec.cbr_info.buf_size;
859 wq->mem.cbr_info_ddr_virt_addr =
860 codec_mm_vmap(wq->mem.cbr_info_ddr_start_addr,
861 wq->mem.bufspec.cbr_info.buf_size);
862
863 wq->mem.dblk_buf_canvas =
864 ((ENC_CANVAS_OFFSET + 2) << 16) |
865 ((ENC_CANVAS_OFFSET + 1) << 8) |
866 (ENC_CANVAS_OFFSET);
867 wq->mem.ref_buf_canvas =
868 ((ENC_CANVAS_OFFSET + 5) << 16) |
869 ((ENC_CANVAS_OFFSET + 4) << 8) |
870 (ENC_CANVAS_OFFSET + 3);
871}
872
873static void avc_init_ie_me_parameter(struct encode_wq_s *wq, u32 quant)
874{
875 ie_cur_ref_sel = 0;
876 ie_pippeline_block = 12;
877 /* currently disable half and sub pixel */
878 ie_me_mode =
879 (ie_pippeline_block & IE_PIPPELINE_BLOCK_MASK) <<
880 IE_PIPPELINE_BLOCK_SHIFT;
881
882 WRITE_HREG(IE_ME_MODE, ie_me_mode);
883 WRITE_HREG(IE_REF_SEL, ie_cur_ref_sel);
884 WRITE_HREG(IE_ME_MB_TYPE, ie_me_mb_type);
885#ifdef MULTI_SLICE_MC
886 if (fixed_slice_cfg)
887 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
888 else if (wq->pic.rows_per_slice !=
889 (wq->pic.encoder_height + 15) >> 4) {
890 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
891
892 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
893 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
894 } else
895 WRITE_HREG(FIXED_SLICE_CFG, 0);
896#else
897 WRITE_HREG(FIXED_SLICE_CFG, 0);
898#endif
899}
900
901/* for temp */
902#define HCODEC_MFDIN_REGC_MBLP (HCODEC_MFDIN_REGB_AMPC + 0x1)
903#define HCODEC_MFDIN_REG0D (HCODEC_MFDIN_REGB_AMPC + 0x2)
904#define HCODEC_MFDIN_REG0E (HCODEC_MFDIN_REGB_AMPC + 0x3)
905#define HCODEC_MFDIN_REG0F (HCODEC_MFDIN_REGB_AMPC + 0x4)
906#define HCODEC_MFDIN_REG10 (HCODEC_MFDIN_REGB_AMPC + 0x5)
907#define HCODEC_MFDIN_REG11 (HCODEC_MFDIN_REGB_AMPC + 0x6)
908#define HCODEC_MFDIN_REG12 (HCODEC_MFDIN_REGB_AMPC + 0x7)
909#define HCODEC_MFDIN_REG13 (HCODEC_MFDIN_REGB_AMPC + 0x8)
910#define HCODEC_MFDIN_REG14 (HCODEC_MFDIN_REGB_AMPC + 0x9)
911#define HCODEC_MFDIN_REG15 (HCODEC_MFDIN_REGB_AMPC + 0xa)
912#define HCODEC_MFDIN_REG16 (HCODEC_MFDIN_REGB_AMPC + 0xb)
913
914static void mfdin_basic(u32 input, u8 iformat,
915 u8 oformat, u32 picsize_x, u32 picsize_y,
916 u8 r2y_en, u8 nr, u8 ifmt_extra)
917{
918 u8 dsample_en; /* Downsample Enable */
919 u8 interp_en; /* Interpolation Enable */
920 u8 y_size; /* 0:16 Pixels for y direction pickup; 1:8 pixels */
921 u8 r2y_mode; /* RGB2YUV Mode, range(0~3) */
922 /* mfdin_reg3_canv[25:24];
923 * // bytes per pixel in x direction for index0, 0:half 1:1 2:2 3:3
924 */
925 u8 canv_idx0_bppx;
926 /* mfdin_reg3_canv[27:26];
927 * // bytes per pixel in x direction for index1-2, 0:half 1:1 2:2 3:3
928 */
929 u8 canv_idx1_bppx;
930 /* mfdin_reg3_canv[29:28];
931 * // bytes per pixel in y direction for index0, 0:half 1:1 2:2 3:3
932 */
933 u8 canv_idx0_bppy;
934 /* mfdin_reg3_canv[31:30];
935 * // bytes per pixel in y direction for index1-2, 0:half 1:1 2:2 3:3
936 */
937 u8 canv_idx1_bppy;
938 u8 ifmt444, ifmt422, ifmt420, linear_bytes4p;
939 u8 nr_enable;
940 u8 cfg_y_snr_en;
941 u8 cfg_y_tnr_en;
942 u8 cfg_c_snr_en;
943 u8 cfg_c_tnr_en;
944 u32 linear_bytesperline;
945 s32 reg_offset;
946 bool linear_enable = false;
947 bool format_err = false;
948
949 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL) {
950 if ((iformat == 7) && (ifmt_extra > 2))
951 format_err = true;
952 } else if (iformat == 7)
953 format_err = true;
954
955 if (format_err) {
956 enc_pr(LOG_ERROR,
957 "mfdin format err, iformat:%d, ifmt_extra:%d\n",
958 iformat, ifmt_extra);
959 return;
960 }
961 if (iformat != 7)
962 ifmt_extra = 0;
963
964 ifmt444 = ((iformat == 1) || (iformat == 5) || (iformat == 8) ||
965 (iformat == 9) || (iformat == 12)) ? 1 : 0;
966 if (iformat == 7 && ifmt_extra == 1)
967 ifmt444 = 1;
968 ifmt422 = ((iformat == 0) || (iformat == 10)) ? 1 : 0;
969 if (iformat == 7 && ifmt_extra != 1)
970 ifmt422 = 1;
971 ifmt420 = ((iformat == 2) || (iformat == 3) || (iformat == 4) ||
972 (iformat == 11)) ? 1 : 0;
973 dsample_en = ((ifmt444 && (oformat != 2)) ||
974 (ifmt422 && (oformat == 0))) ? 1 : 0;
975 interp_en = ((ifmt422 && (oformat == 2)) ||
976 (ifmt420 && (oformat != 0))) ? 1 : 0;
977 y_size = (oformat != 0) ? 1 : 0;
978 if (iformat == 12)
979 y_size = 0;
980 r2y_mode = (r2y_en == 1) ? 1 : 0; /* Fixed to 1 (TODO) */
981 canv_idx0_bppx = (iformat == 1) ? 3 : (iformat == 0) ? 2 : 1;
982 canv_idx1_bppx = (iformat == 4) ? 0 : 1;
983 canv_idx0_bppy = 1;
984 canv_idx1_bppy = (iformat == 5) ? 1 : 0;
985
986 if ((iformat == 8) || (iformat == 9) || (iformat == 12))
987 linear_bytes4p = 3;
988 else if (iformat == 10)
989 linear_bytes4p = 2;
990 else if (iformat == 11)
991 linear_bytes4p = 1;
992 else
993 linear_bytes4p = 0;
994 if (iformat == 12)
995 linear_bytesperline = picsize_x * 4;
996 else
997 linear_bytesperline = picsize_x * linear_bytes4p;
998
999 if (iformat < 8)
1000 linear_enable = false;
1001 else
1002 linear_enable = true;
1003
1004 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
1005 reg_offset = -8;
1006 /* nr_mode: 0:Disabled 1:SNR Only 2:TNR Only 3:3DNR */
1007 nr_enable = (nr) ? 1 : 0;
1008 cfg_y_snr_en = ((nr == 1) || (nr == 3)) ? 1 : 0;
1009 cfg_y_tnr_en = ((nr == 2) || (nr == 3)) ? 1 : 0;
1010 cfg_c_snr_en = cfg_y_snr_en;
1011 /* cfg_c_tnr_en = cfg_y_tnr_en; */
1012 cfg_c_tnr_en = 0;
1013
1014 /* NR For Y */
1015 WRITE_HREG((HCODEC_MFDIN_REG0D + reg_offset),
1016 ((cfg_y_snr_en << 0) |
1017 (y_snr_err_norm << 1) |
1018 (y_snr_gau_bld_core << 2) |
1019 (((y_snr_gau_bld_ofst) & 0xff) << 6) |
1020 (y_snr_gau_bld_rate << 14) |
1021 (y_snr_gau_alp0_min << 20) |
1022 (y_snr_gau_alp0_max << 26)));
1023 WRITE_HREG((HCODEC_MFDIN_REG0E + reg_offset),
1024 ((cfg_y_tnr_en << 0) |
1025 (y_tnr_mc_en << 1) |
1026 (y_tnr_txt_mode << 2) |
1027 (y_tnr_mot_sad_margin << 3) |
1028 (y_tnr_alpha_min << 7) |
1029 (y_tnr_alpha_max << 13) |
1030 (y_tnr_deghost_os << 19)));
1031 WRITE_HREG((HCODEC_MFDIN_REG0F + reg_offset),
1032 ((y_tnr_mot_cortxt_rate << 0) |
1033 (y_tnr_mot_distxt_ofst << 8) |
1034 (y_tnr_mot_distxt_rate << 4) |
1035 (y_tnr_mot_dismot_ofst << 16) |
1036 (y_tnr_mot_frcsad_lock << 24)));
1037 WRITE_HREG((HCODEC_MFDIN_REG10 + reg_offset),
1038 ((y_tnr_mot2alp_frc_gain << 0) |
1039 (y_tnr_mot2alp_nrm_gain << 8) |
1040 (y_tnr_mot2alp_dis_gain << 16) |
1041 (y_tnr_mot2alp_dis_ofst << 24)));
1042 WRITE_HREG((HCODEC_MFDIN_REG11 + reg_offset),
1043 ((y_bld_beta2alp_rate << 0) |
1044 (y_bld_beta_min << 8) |
1045 (y_bld_beta_max << 14)));
1046
1047 /* NR For C */
1048 WRITE_HREG((HCODEC_MFDIN_REG12 + reg_offset),
1049 ((cfg_y_snr_en << 0) |
1050 (c_snr_err_norm << 1) |
1051 (c_snr_gau_bld_core << 2) |
1052 (((c_snr_gau_bld_ofst) & 0xff) << 6) |
1053 (c_snr_gau_bld_rate << 14) |
1054 (c_snr_gau_alp0_min << 20) |
1055 (c_snr_gau_alp0_max << 26)));
1056
1057 WRITE_HREG((HCODEC_MFDIN_REG13 + reg_offset),
1058 ((cfg_c_tnr_en << 0) |
1059 (c_tnr_mc_en << 1) |
1060 (c_tnr_txt_mode << 2) |
1061 (c_tnr_mot_sad_margin << 3) |
1062 (c_tnr_alpha_min << 7) |
1063 (c_tnr_alpha_max << 13) |
1064 (c_tnr_deghost_os << 19)));
1065 WRITE_HREG((HCODEC_MFDIN_REG14 + reg_offset),
1066 ((c_tnr_mot_cortxt_rate << 0) |
1067 (c_tnr_mot_distxt_ofst << 8) |
1068 (c_tnr_mot_distxt_rate << 4) |
1069 (c_tnr_mot_dismot_ofst << 16) |
1070 (c_tnr_mot_frcsad_lock << 24)));
1071 WRITE_HREG((HCODEC_MFDIN_REG15 + reg_offset),
1072 ((c_tnr_mot2alp_frc_gain << 0) |
1073 (c_tnr_mot2alp_nrm_gain << 8) |
1074 (c_tnr_mot2alp_dis_gain << 16) |
1075 (c_tnr_mot2alp_dis_ofst << 24)));
1076
1077 WRITE_HREG((HCODEC_MFDIN_REG16 + reg_offset),
1078 ((c_bld_beta2alp_rate << 0) |
1079 (c_bld_beta_min << 8) |
1080 (c_bld_beta_max << 14)));
1081
1082 WRITE_HREG((HCODEC_MFDIN_REG1_CTRL + reg_offset),
1083 (iformat << 0) | (oformat << 4) |
1084 (dsample_en << 6) | (y_size << 8) |
1085 (interp_en << 9) | (r2y_en << 12) |
1086 (r2y_mode << 13) | (ifmt_extra << 16) |
1087 (nr_enable << 19));
1088
1089 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
1090 WRITE_HREG((HCODEC_MFDIN_REG8_DMBL + reg_offset),
1091 (picsize_x << 16) | (picsize_y << 0));
1092 } else {
1093 WRITE_HREG((HCODEC_MFDIN_REG8_DMBL + reg_offset),
1094 (picsize_x << 14) | (picsize_y << 0));
1095 }
1096 } else {
1097 reg_offset = 0;
1098 WRITE_HREG((HCODEC_MFDIN_REG1_CTRL + reg_offset),
1099 (iformat << 0) | (oformat << 4) |
1100 (dsample_en << 6) | (y_size << 8) |
1101 (interp_en << 9) | (r2y_en << 12) |
1102 (r2y_mode << 13));
1103
1104 WRITE_HREG((HCODEC_MFDIN_REG8_DMBL + reg_offset),
1105 (picsize_x << 12) | (picsize_y << 0));
1106 }
1107
1108 if (linear_enable == false) {
1109 WRITE_HREG((HCODEC_MFDIN_REG3_CANV + reg_offset),
1110 (input & 0xffffff) |
1111 (canv_idx1_bppy << 30) |
1112 (canv_idx0_bppy << 28) |
1113 (canv_idx1_bppx << 26) |
1114 (canv_idx0_bppx << 24));
1115 WRITE_HREG((HCODEC_MFDIN_REG4_LNR0 + reg_offset),
1116 (0 << 16) | (0 << 0));
1117 WRITE_HREG((HCODEC_MFDIN_REG5_LNR1 + reg_offset), 0);
1118 } else {
1119 WRITE_HREG((HCODEC_MFDIN_REG3_CANV + reg_offset),
1120 (canv_idx1_bppy << 30) |
1121 (canv_idx0_bppy << 28) |
1122 (canv_idx1_bppx << 26) |
1123 (canv_idx0_bppx << 24));
1124 WRITE_HREG((HCODEC_MFDIN_REG4_LNR0 + reg_offset),
1125 (linear_bytes4p << 16) | (linear_bytesperline << 0));
1126 WRITE_HREG((HCODEC_MFDIN_REG5_LNR1 + reg_offset), input);
1127 }
1128
1129 if (iformat == 12)
1130 WRITE_HREG((HCODEC_MFDIN_REG9_ENDN + reg_offset),
1131 (2 << 0) | (1 << 3) | (0 << 6) |
1132 (3 << 9) | (6 << 12) | (5 << 15) |
1133 (4 << 18) | (7 << 21));
1134 else
1135 WRITE_HREG((HCODEC_MFDIN_REG9_ENDN + reg_offset),
1136 (7 << 0) | (6 << 3) | (5 << 6) |
1137 (4 << 9) | (3 << 12) | (2 << 15) |
1138 (1 << 18) | (0 << 21));
1139}
1140
1141#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
1142static int scale_frame(struct encode_wq_s *wq,
1143 struct encode_request_s *request,
1144 struct config_para_ex_s *ge2d_config,
1145 u32 src_addr, bool canvas)
1146{
1147 struct ge2d_context_s *context = encode_manager.context;
1148 int src_top, src_left, src_width, src_height;
1149 struct canvas_s cs0, cs1, cs2, cd;
1150 u32 src_canvas, dst_canvas;
1151 u32 src_canvas_w, dst_canvas_w;
1152 u32 src_h = request->src_h;
1153 u32 dst_w = ((wq->pic.encoder_width + 15) >> 4) << 4;
1154 u32 dst_h = ((wq->pic.encoder_height + 15) >> 4) << 4;
1155 int input_format = GE2D_FORMAT_M24_NV21;
1156
1157 src_top = request->crop_top;
1158 src_left = request->crop_left;
1159 src_width = request->src_w - src_left - request->crop_right;
1160 src_height = request->src_h - src_top - request->crop_bottom;
1161 pr_err("request->fmt=%d, %d %d, canvas=%d\n", request->fmt, FMT_NV21, FMT_BGR888, canvas);
1162
1163 if (canvas) {
1164 if ((request->fmt == FMT_NV21)
1165 || (request->fmt == FMT_NV12)) {
1166 src_canvas = src_addr & 0xffff;
1167 input_format = GE2D_FORMAT_M24_NV21;
1168 } else if (request->fmt == FMT_BGR888) {
1169 src_canvas = src_addr & 0xffffff;
1170 input_format = GE2D_FORMAT_S24_RGB; //Opposite color after ge2d
1171 } else if (request->fmt == FMT_RGBA8888) {
1172 src_canvas = src_addr & 0xffffff;
1173 input_format = GE2D_FORMAT_S32_ABGR;
1174 } else {
1175 src_canvas = src_addr & 0xffffff;
1176 input_format = GE2D_FORMAT_M24_YUV420;
1177 }
1178 } else {
1179 if ((request->fmt == FMT_NV21)
1180 || (request->fmt == FMT_NV12)) {
1181 src_canvas_w =
1182 ((request->src_w + 31) >> 5) << 5;
1183 canvas_config(ENC_CANVAS_OFFSET + 9,
1184 src_addr,
1185 src_canvas_w, src_h,
1186 CANVAS_ADDR_NOWRAP,
1187 CANVAS_BLKMODE_LINEAR);
1188 canvas_config(ENC_CANVAS_OFFSET + 10,
1189 src_addr + src_canvas_w * src_h,
1190 src_canvas_w, src_h / 2,
1191 CANVAS_ADDR_NOWRAP,
1192 CANVAS_BLKMODE_LINEAR);
1193 src_canvas =
1194 ((ENC_CANVAS_OFFSET + 10) << 8)
1195 | (ENC_CANVAS_OFFSET + 9);
1196 input_format = GE2D_FORMAT_M24_NV21;
1197 } else if (request->fmt == FMT_BGR888) {
1198 src_canvas_w =
1199 ((request->src_w + 31) >> 5) << 5;
1200
1201 canvas_config(ENC_CANVAS_OFFSET + 9,
1202 src_addr,
1203 src_canvas_w * 3, src_h,
1204 CANVAS_ADDR_NOWRAP,
1205 CANVAS_BLKMODE_LINEAR);
1206 src_canvas = ENC_CANVAS_OFFSET + 9;
1207 input_format = GE2D_FORMAT_S24_RGB; //Opposite color after ge2d
1208 } else if (request->fmt == FMT_RGBA8888) {
1209 src_canvas_w =
1210 ((request->src_w + 31) >> 5) << 5;
1211 canvas_config(
1212 ENC_CANVAS_OFFSET + 9,
1213 src_addr,
1214 src_canvas_w * 4,
1215 src_h,
1216 CANVAS_ADDR_NOWRAP,
1217 CANVAS_BLKMODE_LINEAR);
1218 src_canvas = ENC_CANVAS_OFFSET + 9;
1219 input_format = GE2D_FORMAT_S32_ABGR; //Opposite color after ge2d
1220 } else {
1221 src_canvas_w =
1222 ((request->src_w + 63) >> 6) << 6;
1223 canvas_config(ENC_CANVAS_OFFSET + 9,
1224 src_addr,
1225 src_canvas_w, src_h,
1226 CANVAS_ADDR_NOWRAP,
1227 CANVAS_BLKMODE_LINEAR);
1228 canvas_config(ENC_CANVAS_OFFSET + 10,
1229 src_addr + src_canvas_w * src_h,
1230 src_canvas_w / 2, src_h / 2,
1231 CANVAS_ADDR_NOWRAP,
1232 CANVAS_BLKMODE_LINEAR);
1233 canvas_config(ENC_CANVAS_OFFSET + 11,
1234 src_addr + src_canvas_w * src_h * 5 / 4,
1235 src_canvas_w / 2, src_h / 2,
1236 CANVAS_ADDR_NOWRAP,
1237 CANVAS_BLKMODE_LINEAR);
1238 src_canvas =
1239 ((ENC_CANVAS_OFFSET + 11) << 16) |
1240 ((ENC_CANVAS_OFFSET + 10) << 8) |
1241 (ENC_CANVAS_OFFSET + 9);
1242 input_format = GE2D_FORMAT_M24_YUV420;
1243 }
1244 }
1245
1246 dst_canvas_w = ((dst_w + 31) >> 5) << 5;
1247
1248 canvas_config(ENC_CANVAS_OFFSET + 6,
1249 wq->mem.scaler_buff_start_addr,
1250 dst_canvas_w, dst_h,
1251 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
1252
1253 canvas_config(ENC_CANVAS_OFFSET + 7,
1254 wq->mem.scaler_buff_start_addr + dst_canvas_w * dst_h,
1255 dst_canvas_w, dst_h / 2,
1256 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
1257
1258 dst_canvas = ((ENC_CANVAS_OFFSET + 7) << 8) |
1259 (ENC_CANVAS_OFFSET + 6);
1260
1261 ge2d_config->alu_const_color = 0;
1262 ge2d_config->bitmask_en = 0;
1263 ge2d_config->src1_gb_alpha = 0;
1264 ge2d_config->dst_xy_swap = 0;
1265 canvas_read(src_canvas & 0xff, &cs0);
1266 canvas_read((src_canvas >> 8) & 0xff, &cs1);
1267 canvas_read((src_canvas >> 16) & 0xff, &cs2);
1268 ge2d_config->src_planes[0].addr = cs0.addr;
1269 ge2d_config->src_planes[0].w = dst_w * 4;//cs0.width;
1270 ge2d_config->src_planes[0].h = dst_h;//cs0.height;
1271 ge2d_config->src_planes[1].addr = cs1.addr;
1272 ge2d_config->src_planes[1].w = cs1.width;
1273 ge2d_config->src_planes[1].h = cs1.height;
1274 ge2d_config->src_planes[2].addr = cs2.addr;
1275 ge2d_config->src_planes[2].w = cs2.width;
1276 ge2d_config->src_planes[2].h = cs2.height;
1277
1278 canvas_read(dst_canvas & 0xff, &cd);
1279
1280 ge2d_config->dst_planes[0].addr = cd.addr;
1281 ge2d_config->dst_planes[0].w = dst_w * 4;//cd.width;
1282 ge2d_config->dst_planes[0].h = dst_h;//cd.height;
1283 ge2d_config->src_key.key_enable = 0;
1284 ge2d_config->src_key.key_mask = 0;
1285 ge2d_config->src_key.key_mode = 0;
1286 ge2d_config->src_para.canvas_index = src_canvas;
1287 ge2d_config->src_para.mem_type = CANVAS_TYPE_INVALID;
1288 ge2d_config->src_para.format = input_format | GE2D_LITTLE_ENDIAN;
1289 ge2d_config->src_para.fill_color_en = 0;
1290 ge2d_config->src_para.fill_mode = 0;
1291 ge2d_config->src_para.x_rev = 0;
1292 ge2d_config->src_para.y_rev = 0;
1293 ge2d_config->src_para.color = 0xffffffff;
1294 ge2d_config->src_para.top = 0;
1295 ge2d_config->src_para.left = 0;
1296 ge2d_config->src_para.width = dst_w;//request->src_w;
1297 ge2d_config->src_para.height = dst_h;//request->src_h;
1298 ge2d_config->src2_para.mem_type = CANVAS_TYPE_INVALID;
1299 ge2d_config->dst_para.canvas_index = dst_canvas;
1300 ge2d_config->dst_para.mem_type = CANVAS_TYPE_INVALID;
1301 ge2d_config->dst_para.format =
1302 GE2D_FORMAT_M24_NV21 | GE2D_LITTLE_ENDIAN;
1303
1304 if (wq->pic.encoder_width >= 1280 && wq->pic.encoder_height >= 720) {
1305 ge2d_config->dst_para.format |= wq->pic.color_space;
1306 }
1307
1308 ge2d_config->dst_para.fill_color_en = 0;
1309 ge2d_config->dst_para.fill_mode = 0;
1310 ge2d_config->dst_para.x_rev = 0;
1311 ge2d_config->dst_para.y_rev = 0;
1312 ge2d_config->dst_para.color = 0;
1313 ge2d_config->dst_para.top = 0;
1314 ge2d_config->dst_para.left = 0;
1315 ge2d_config->dst_para.width = dst_w;
1316 ge2d_config->dst_para.height = dst_h;
1317 ge2d_config->dst_para.x_rev = 0;
1318 ge2d_config->dst_para.y_rev = 0;
1319
1320
1321 if (ge2d_context_config_ex(context, ge2d_config) < 0) {
1322 pr_err("++ge2d configing error.\n");
1323 return -1;
1324 }
1325 stretchblt_noalpha(context, src_left, src_top, src_width, src_height,
1326 0, 0, wq->pic.encoder_width, wq->pic.encoder_height);
1327 return dst_canvas_w*dst_h * 3 / 2;
1328}
1329#endif
1330
1331static s32 set_input_format(struct encode_wq_s *wq,
1332 struct encode_request_s *request)
1333{
1334 s32 ret = 0;
1335 u8 iformat = MAX_FRAME_FMT, oformat = MAX_FRAME_FMT, r2y_en = 0;
1336 u32 picsize_x, picsize_y, src_addr;
1337 u32 canvas_w = 0;
1338 u32 input = request->src;
1339 u32 input_y = 0;
1340 u32 input_u = 0;
1341 u32 input_v = 0;
1342 u8 ifmt_extra = 0;
1343
1344 if ((request->fmt == FMT_RGB565) || (request->fmt >= MAX_FRAME_FMT))
1345 return -1;
1346
1347 picsize_x = ((wq->pic.encoder_width + 15) >> 4) << 4;
1348 picsize_y = ((wq->pic.encoder_height + 15) >> 4) << 4;
1349 oformat = 0;
1350
1351 if ((request->type == LOCAL_BUFF)
1352 || (request->type == PHYSICAL_BUFF)
1353 || (request->type == DMA_BUFF)) {
1354 if ((request->type == LOCAL_BUFF) &&
1355 (request->flush_flag & AMVENC_FLUSH_FLAG_INPUT))
1356 dma_flush(wq->mem.dct_buff_start_addr,
1357 request->framesize);
1358 if (request->type == LOCAL_BUFF) {
1359 input = wq->mem.dct_buff_start_addr;
1360 src_addr =
1361 wq->mem.dct_buff_start_addr;
1362 } else if (request->type == DMA_BUFF) {
1363 if (request->plane_num == 3) {
1364 input_y = (unsigned long)request->dma_cfg[0].paddr;
1365 input_u = (unsigned long)request->dma_cfg[1].paddr;
1366 input_v = (unsigned long)request->dma_cfg[2].paddr;
1367 } else if (request->plane_num == 2) {
1368 input_y = (unsigned long)request->dma_cfg[0].paddr;
1369 input_u = (unsigned long)request->dma_cfg[1].paddr;
1370 input_v = input_u;
1371 } else if (request->plane_num == 1) {
1372 input_y = (unsigned long)request->dma_cfg[0].paddr;
1373 if (request->fmt == FMT_NV21
1374 || request->fmt == FMT_NV12) {
1375 input_u = input_y + picsize_x * picsize_y;
1376 input_v = input_u;
1377 }
1378 if (request->fmt == FMT_YUV420) {
1379 input_u = input_y + picsize_x * picsize_y;
1380 input_v = input_u + picsize_x * picsize_y / 4;
1381 }
1382 }
1383 src_addr = input_y;
1384 picsize_y = wq->pic.encoder_height;
1385 enc_pr(LOG_INFO, "dma addr[0x%lx 0x%lx 0x%lx 0x%lx 0x%lx 0x%lx]\n",
1386 (unsigned long)request->dma_cfg[0].vaddr,
1387 (unsigned long)request->dma_cfg[0].paddr,
1388 (unsigned long)request->dma_cfg[1].vaddr,
1389 (unsigned long)request->dma_cfg[1].paddr,
1390 (unsigned long)request->dma_cfg[2].vaddr,
1391 (unsigned long)request->dma_cfg[2].paddr);
1392 } else {
1393 src_addr = input;
1394 picsize_y = wq->pic.encoder_height;
1395 }
1396
1397 if (request->scale_enable) {
1398#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
1399 struct config_para_ex_s ge2d_config;
1400
1401 memset(&ge2d_config, 0,
1402 sizeof(struct config_para_ex_s));
1403 scale_frame(
1404 wq, request,
1405 &ge2d_config,
1406 src_addr,
1407 false);
1408 iformat = 2;
1409 r2y_en = 0;
1410 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1411 (ENC_CANVAS_OFFSET + 6);
1412 ret = 0;
1413 goto MFDIN;
1414#else
1415 enc_pr(LOG_ERROR,
1416 "Warning: need enable ge2d for scale frame!\n");
1417 return -1;
1418#endif
1419 }
1420 if ((request->fmt <= FMT_YUV444_PLANE) ||
1421 (request->fmt >= FMT_YUV422_12BIT))
1422 r2y_en = 0;
1423 else
1424 r2y_en = 1;
1425
1426 if (request->fmt >= FMT_YUV422_12BIT) {
1427 iformat = 7;
1428 ifmt_extra = request->fmt - FMT_YUV422_12BIT;
1429 if (request->fmt == FMT_YUV422_12BIT)
1430 canvas_w = picsize_x * 24 / 8;
1431 else if (request->fmt == FMT_YUV444_10BIT)
1432 canvas_w = picsize_x * 32 / 8;
1433 else
1434 canvas_w = (picsize_x * 20 + 7) / 8;
1435 canvas_w = ((canvas_w + 31) >> 5) << 5;
1436 canvas_config(ENC_CANVAS_OFFSET + 6,
1437 input,
1438 canvas_w, picsize_y,
1439 CANVAS_ADDR_NOWRAP,
1440 CANVAS_BLKMODE_LINEAR);
1441 input = ENC_CANVAS_OFFSET + 6;
1442 input = input & 0xff;
1443 } else if (request->fmt == FMT_YUV422_SINGLE)
1444 iformat = 10;
1445 else if ((request->fmt == FMT_YUV444_SINGLE)
1446 || (request->fmt == FMT_RGB888)) {
1447 iformat = 1;
1448 if (request->fmt == FMT_RGB888)
1449 r2y_en = 1;
1450 canvas_w = picsize_x * 3;
1451 canvas_w = ((canvas_w + 31) >> 5) << 5;
1452 canvas_config(ENC_CANVAS_OFFSET + 6,
1453 input,
1454 canvas_w, picsize_y,
1455 CANVAS_ADDR_NOWRAP,
1456 CANVAS_BLKMODE_LINEAR);
1457 input = ENC_CANVAS_OFFSET + 6;
1458 } else if ((request->fmt == FMT_NV21)
1459 || (request->fmt == FMT_NV12)) {
1460 canvas_w = ((wq->pic.encoder_width + 31) >> 5) << 5;
1461 iformat = (request->fmt == FMT_NV21) ? 2 : 3;
1462 if (request->type == DMA_BUFF) {
1463 canvas_config(ENC_CANVAS_OFFSET + 6,
1464 input_y,
1465 canvas_w, picsize_y,
1466 CANVAS_ADDR_NOWRAP,
1467 CANVAS_BLKMODE_LINEAR);
1468 canvas_config(ENC_CANVAS_OFFSET + 7,
1469 input_u,
1470 canvas_w, picsize_y / 2,
1471 CANVAS_ADDR_NOWRAP,
1472 CANVAS_BLKMODE_LINEAR);
1473 } else {
1474 canvas_config(ENC_CANVAS_OFFSET + 6,
1475 input,
1476 canvas_w, picsize_y,
1477 CANVAS_ADDR_NOWRAP,
1478 CANVAS_BLKMODE_LINEAR);
1479 canvas_config(ENC_CANVAS_OFFSET + 7,
1480 input + canvas_w * picsize_y,
1481 canvas_w, picsize_y / 2,
1482 CANVAS_ADDR_NOWRAP,
1483 CANVAS_BLKMODE_LINEAR);
1484 }
1485 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1486 (ENC_CANVAS_OFFSET + 6);
1487 } else if (request->fmt == FMT_YUV420) {
1488 iformat = 4;
1489 canvas_w = ((wq->pic.encoder_width + 63) >> 6) << 6;
1490 if (request->type == DMA_BUFF) {
1491 canvas_config(ENC_CANVAS_OFFSET + 6,
1492 input_y,
1493 canvas_w, picsize_y,
1494 CANVAS_ADDR_NOWRAP,
1495 CANVAS_BLKMODE_LINEAR);
1496 canvas_config(ENC_CANVAS_OFFSET + 7,
1497 input_u,
1498 canvas_w / 2, picsize_y / 2,
1499 CANVAS_ADDR_NOWRAP,
1500 CANVAS_BLKMODE_LINEAR);
1501 canvas_config(ENC_CANVAS_OFFSET + 8,
1502 input_v,
1503 canvas_w / 2, picsize_y / 2,
1504 CANVAS_ADDR_NOWRAP,
1505 CANVAS_BLKMODE_LINEAR);
1506 } else {
1507 canvas_config(ENC_CANVAS_OFFSET + 6,
1508 input,
1509 canvas_w, picsize_y,
1510 CANVAS_ADDR_NOWRAP,
1511 CANVAS_BLKMODE_LINEAR);
1512 canvas_config(ENC_CANVAS_OFFSET + 7,
1513 input + canvas_w * picsize_y,
1514 canvas_w / 2, picsize_y / 2,
1515 CANVAS_ADDR_NOWRAP,
1516 CANVAS_BLKMODE_LINEAR);
1517 canvas_config(ENC_CANVAS_OFFSET + 8,
1518 input + canvas_w * picsize_y * 5 / 4,
1519 canvas_w / 2, picsize_y / 2,
1520 CANVAS_ADDR_NOWRAP,
1521 CANVAS_BLKMODE_LINEAR);
1522
1523 }
1524 input = ((ENC_CANVAS_OFFSET + 8) << 16) |
1525 ((ENC_CANVAS_OFFSET + 7) << 8) |
1526 (ENC_CANVAS_OFFSET + 6);
1527 } else if ((request->fmt == FMT_YUV444_PLANE)
1528 || (request->fmt == FMT_RGB888_PLANE)) {
1529 if (request->fmt == FMT_RGB888_PLANE)
1530 r2y_en = 1;
1531 iformat = 5;
1532 canvas_w = ((wq->pic.encoder_width + 31) >> 5) << 5;
1533 canvas_config(ENC_CANVAS_OFFSET + 6,
1534 input,
1535 canvas_w, picsize_y,
1536 CANVAS_ADDR_NOWRAP,
1537 CANVAS_BLKMODE_LINEAR);
1538 canvas_config(ENC_CANVAS_OFFSET + 7,
1539 input + canvas_w * picsize_y,
1540 canvas_w, picsize_y,
1541 CANVAS_ADDR_NOWRAP,
1542 CANVAS_BLKMODE_LINEAR);
1543 canvas_config(ENC_CANVAS_OFFSET + 8,
1544 input + canvas_w * picsize_y * 2,
1545 canvas_w, picsize_y,
1546 CANVAS_ADDR_NOWRAP,
1547 CANVAS_BLKMODE_LINEAR);
1548 input = ((ENC_CANVAS_OFFSET + 8) << 16) |
1549 ((ENC_CANVAS_OFFSET + 7) << 8) |
1550 (ENC_CANVAS_OFFSET + 6);
1551 } else if (request->fmt == FMT_RGBA8888) {
1552 r2y_en = 1;
1553 iformat = 12;
1554 }
1555 ret = 0;
1556 } else if (request->type == CANVAS_BUFF) {
1557 r2y_en = 0;
1558 if (request->scale_enable) {
1559#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
1560 struct config_para_ex_s ge2d_config;
1561 memset(&ge2d_config, 0,
1562 sizeof(struct config_para_ex_s));
1563 scale_frame(
1564 wq, request,
1565 &ge2d_config,
1566 input, true);
1567 iformat = 2;
1568 r2y_en = 0;
1569 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1570 (ENC_CANVAS_OFFSET + 6);
1571 ret = 0;
1572 goto MFDIN;
1573#else
1574 enc_pr(LOG_ERROR,
1575 "Warning: need enable ge2d for scale frame!\n");
1576 return -1;
1577#endif
1578 }
1579 if (request->fmt == FMT_YUV422_SINGLE) {
1580 iformat = 0;
1581 input = input & 0xff;
1582 } else if (request->fmt == FMT_YUV444_SINGLE) {
1583 iformat = 1;
1584 input = input & 0xff;
1585 } else if ((request->fmt == FMT_NV21)
1586 || (request->fmt == FMT_NV12)) {
1587 iformat = (request->fmt == FMT_NV21) ? 2 : 3;
1588 input = input & 0xffff;
1589 } else if (request->fmt == FMT_YUV420) {
1590 iformat = 4;
1591 input = input & 0xffffff;
1592 } else if ((request->fmt == FMT_YUV444_PLANE)
1593 || (request->fmt == FMT_RGB888_PLANE)) {
1594 if (request->fmt == FMT_RGB888_PLANE)
1595 r2y_en = 1;
1596 iformat = 5;
1597 input = input & 0xffffff;
1598 } else if ((request->fmt == FMT_YUV422_12BIT)
1599 || (request->fmt == FMT_YUV444_10BIT)
1600 || (request->fmt == FMT_YUV422_10BIT)) {
1601 iformat = 7;
1602 ifmt_extra = request->fmt - FMT_YUV422_12BIT;
1603 input = input & 0xff;
1604 } else
1605 ret = -1;
1606 }
1607#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
1608MFDIN:
1609#endif
1610 if (ret == 0)
1611 mfdin_basic(input, iformat, oformat,
1612 picsize_x, picsize_y, r2y_en,
1613 request->nr_mode, ifmt_extra);
1614 return ret;
1615}
1616
1617#ifdef H264_ENC_CBR
1618static void ConvertTable2Risc(void *table, u32 len)
1619{
1620 u32 i, j;
1621 u16 temp;
1622 u16 *tbl = (u16 *)table;
1623
1624 if ((len < 8) || (len % 8) || (!table)) {
1625 enc_pr(LOG_ERROR, "ConvertTable2Risc tbl %p, len %d error\n",
1626 table, len);
1627 return;
1628 }
1629 for (i = 0; i < len / 8; i++) {
1630 j = i << 2;
1631 temp = tbl[j];
1632 tbl[j] = tbl[j + 3];
1633 tbl[j + 3] = temp;
1634
1635 temp = tbl[j + 1];
1636 tbl[j + 1] = tbl[j + 2];
1637 tbl[j + 2] = temp;
1638 }
1639
1640}
1641#endif
1642
1643static void avc_prot_init(struct encode_wq_s *wq,
1644 struct encode_request_s *request, u32 quant, bool IDR)
1645{
1646 u32 data32;
1647 u32 pic_width, pic_height;
1648 u32 pic_mb_nr;
1649 u32 pic_mbx, pic_mby;
1650 u32 i_pic_qp, p_pic_qp;
1651 u32 i_pic_qp_c, p_pic_qp_c;
1652 u32 pic_width_in_mb;
1653 u32 slice_qp;
1654
1655 pic_width = wq->pic.encoder_width;
1656 pic_height = wq->pic.encoder_height;
1657 pic_mb_nr = 0;
1658 pic_mbx = 0;
1659 pic_mby = 0;
1660 i_pic_qp = quant;
1661 p_pic_qp = quant;
1662
1663 pic_width_in_mb = (pic_width + 15) / 16;
1664 WRITE_HREG(HCODEC_HDEC_MC_OMEM_AUTO,
1665 (1 << 31) | /* use_omem_mb_xy */
1666 ((pic_width_in_mb - 1) << 16)); /* omem_max_mb_x */
1667
1668 WRITE_HREG(HCODEC_VLC_ADV_CONFIG,
1669 /* early_mix_mc_hcmd -- will enable in P Picture */
1670 (0 << 10) |
1671 (1 << 9) | /* update_top_left_mix */
1672 (1 << 8) | /* p_top_left_mix */
1673 /* mv_cal_mixed_type -- will enable in P Picture */
1674 (0 << 7) |
1675 /* mc_hcmd_mixed_type -- will enable in P Picture */
1676 (0 << 6) |
1677 (1 << 5) | /* use_separate_int_control */
1678 (1 << 4) | /* hcmd_intra_use_q_info */
1679 (1 << 3) | /* hcmd_left_use_prev_info */
1680 (1 << 2) | /* hcmd_use_q_info */
1681 (1 << 1) | /* use_q_delta_quant */
1682 /* detect_I16_from_I4 use qdct detected mb_type */
1683 (0 << 0));
1684
1685 WRITE_HREG(HCODEC_QDCT_ADV_CONFIG,
1686 (1 << 29) | /* mb_info_latch_no_I16_pred_mode */
1687 (1 << 28) | /* ie_dma_mbxy_use_i_pred */
1688 (1 << 27) | /* ie_dma_read_write_use_ip_idx */
1689 (1 << 26) | /* ie_start_use_top_dma_count */
1690 (1 << 25) | /* i_pred_top_dma_rd_mbbot */
1691 (1 << 24) | /* i_pred_top_dma_wr_disable */
1692 /* i_pred_mix -- will enable in P Picture */
1693 (0 << 23) |
1694 (1 << 22) | /* me_ab_rd_when_intra_in_p */
1695 (1 << 21) | /* force_mb_skip_run_when_intra */
1696 /* mc_out_mixed_type -- will enable in P Picture */
1697 (0 << 20) |
1698 (1 << 19) | /* ie_start_when_quant_not_full */
1699 (1 << 18) | /* mb_info_state_mix */
1700 /* mb_type_use_mix_result -- will enable in P Picture */
1701 (0 << 17) |
1702 /* me_cb_ie_read_enable -- will enable in P Picture */
1703 (0 << 16) |
1704 /* ie_cur_data_from_me -- will enable in P Picture */
1705 (0 << 15) |
1706 (1 << 14) | /* rem_per_use_table */
1707 (0 << 13) | /* q_latch_int_enable */
1708 (1 << 12) | /* q_use_table */
1709 (0 << 11) | /* q_start_wait */
1710 (1 << 10) | /* LUMA_16_LEFT_use_cur */
1711 (1 << 9) | /* DC_16_LEFT_SUM_use_cur */
1712 (1 << 8) | /* c_ref_ie_sel_cur */
1713 (0 << 7) | /* c_ipred_perfect_mode */
1714 (1 << 6) | /* ref_ie_ul_sel */
1715 (1 << 5) | /* mb_type_use_ie_result */
1716 (1 << 4) | /* detect_I16_from_I4 */
1717 (1 << 3) | /* ie_not_wait_ref_busy */
1718 (1 << 2) | /* ie_I16_enable */
1719 (3 << 0)); /* ie_done_sel // fastest when waiting */
1720
1721 if (request != NULL) {
1722 WRITE_HREG(HCODEC_IE_WEIGHT,
1723 (request->i16_weight << 16) |
1724 (request->i4_weight << 0));
1725 WRITE_HREG(HCODEC_ME_WEIGHT,
1726 (request->me_weight << 0));
1727 WRITE_HREG(HCODEC_SAD_CONTROL_0,
1728 /* ie_sad_offset_I16 */
1729 (request->i16_weight << 16) |
1730 /* ie_sad_offset_I4 */
1731 (request->i4_weight << 0));
1732 WRITE_HREG(HCODEC_SAD_CONTROL_1,
1733 /* ie_sad_shift_I16 */
1734 (IE_SAD_SHIFT_I16 << 24) |
1735 /* ie_sad_shift_I4 */
1736 (IE_SAD_SHIFT_I4 << 20) |
1737 /* me_sad_shift_INTER */
1738 (ME_SAD_SHIFT_INTER << 16) |
1739 /* me_sad_offset_INTER */
1740 (request->me_weight << 0));
1741 wq->me_weight = request->me_weight;
1742 wq->i4_weight = request->i4_weight;
1743 wq->i16_weight = request->i16_weight;
1744 } else {
1745 WRITE_HREG(HCODEC_IE_WEIGHT,
1746 (I16MB_WEIGHT_OFFSET << 16) |
1747 (I4MB_WEIGHT_OFFSET << 0));
1748 WRITE_HREG(HCODEC_ME_WEIGHT,
1749 (ME_WEIGHT_OFFSET << 0));
1750 WRITE_HREG(HCODEC_SAD_CONTROL_0,
1751 /* ie_sad_offset_I16 */
1752 (I16MB_WEIGHT_OFFSET << 16) |
1753 /* ie_sad_offset_I4 */
1754 (I4MB_WEIGHT_OFFSET << 0));
1755 WRITE_HREG(HCODEC_SAD_CONTROL_1,
1756 /* ie_sad_shift_I16 */
1757 (IE_SAD_SHIFT_I16 << 24) |
1758 /* ie_sad_shift_I4 */
1759 (IE_SAD_SHIFT_I4 << 20) |
1760 /* me_sad_shift_INTER */
1761 (ME_SAD_SHIFT_INTER << 16) |
1762 /* me_sad_offset_INTER */
1763 (ME_WEIGHT_OFFSET << 0));
1764 }
1765
1766 WRITE_HREG(HCODEC_ADV_MV_CTL0,
1767 (ADV_MV_LARGE_16x8 << 31) |
1768 (ADV_MV_LARGE_8x16 << 30) |
1769 (ADV_MV_8x8_WEIGHT << 16) | /* adv_mv_8x8_weight */
1770 /* adv_mv_4x4x4_weight should be set bigger */
1771 (ADV_MV_4x4x4_WEIGHT << 0));
1772 WRITE_HREG(HCODEC_ADV_MV_CTL1,
1773 /* adv_mv_16x16_weight */
1774 (ADV_MV_16x16_WEIGHT << 16) |
1775 (ADV_MV_LARGE_16x16 << 15) |
1776 (ADV_MV_16_8_WEIGHT << 0)); /* adv_mv_16_8_weight */
1777
1778 hcodec_prog_qtbl(wq);
1779 if (IDR) {
1780 i_pic_qp =
1781 wq->quant_tbl_i4[0] & 0xff;
1782 i_pic_qp +=
1783 wq->quant_tbl_i16[0] & 0xff;
1784 i_pic_qp /= 2;
1785 p_pic_qp = i_pic_qp;
1786 } else {
1787 i_pic_qp =
1788 wq->quant_tbl_i4[0] & 0xff;
1789 i_pic_qp +=
1790 wq->quant_tbl_i16[0] & 0xff;
1791 p_pic_qp = wq->quant_tbl_me[0] & 0xff;
1792 slice_qp = (i_pic_qp + p_pic_qp) / 3;
1793 i_pic_qp = slice_qp;
1794 p_pic_qp = i_pic_qp;
1795 }
1796#ifdef H264_ENC_CBR
1797 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
1798 data32 = READ_HREG(HCODEC_SAD_CONTROL_1);
1799 data32 = data32 & 0xffff; /* remove sad shift */
1800 WRITE_HREG(HCODEC_SAD_CONTROL_1, data32);
1801 WRITE_HREG(H264_ENC_CBR_TABLE_ADDR,
1802 wq->mem.cbr_info_ddr_start_addr);
1803 WRITE_HREG(H264_ENC_CBR_MB_SIZE_ADDR,
1804 wq->mem.cbr_info_ddr_start_addr
1805 + CBR_TABLE_SIZE);
1806 WRITE_HREG(H264_ENC_CBR_CTL,
1807 (wq->cbr_info.start_tbl_id << 28) |
1808 (wq->cbr_info.short_shift << 24) |
1809 (wq->cbr_info.long_mb_num << 16) |
1810 (wq->cbr_info.long_th << 0));
1811 WRITE_HREG(H264_ENC_CBR_REGION_SIZE,
1812 (wq->cbr_info.block_w << 16) |
1813 (wq->cbr_info.block_h << 0));
1814 }
1815#endif
1816 WRITE_HREG(HCODEC_QDCT_VLC_QUANT_CTL_0,
1817 (0 << 19) | /* vlc_delta_quant_1 */
1818 (i_pic_qp << 13) | /* vlc_quant_1 */
1819 (0 << 6) | /* vlc_delta_quant_0 */
1820 (i_pic_qp << 0)); /* vlc_quant_0 */
1821 WRITE_HREG(HCODEC_QDCT_VLC_QUANT_CTL_1,
1822 (14 << 6) | /* vlc_max_delta_q_neg */
1823 (13 << 0)); /* vlc_max_delta_q_pos */
1824 WRITE_HREG(HCODEC_VLC_PIC_SIZE,
1825 pic_width | (pic_height << 16));
1826 WRITE_HREG(HCODEC_VLC_PIC_POSITION,
1827 (pic_mb_nr << 16) |
1828 (pic_mby << 8) |
1829 (pic_mbx << 0));
1830
1831 /* synopsys parallel_case full_case */
1832 switch (i_pic_qp) {
1833 case 0:
1834 i_pic_qp_c = 0;
1835 break;
1836 case 1:
1837 i_pic_qp_c = 1;
1838 break;
1839 case 2:
1840 i_pic_qp_c = 2;
1841 break;
1842 case 3:
1843 i_pic_qp_c = 3;
1844 break;
1845 case 4:
1846 i_pic_qp_c = 4;
1847 break;
1848 case 5:
1849 i_pic_qp_c = 5;
1850 break;
1851 case 6:
1852 i_pic_qp_c = 6;
1853 break;
1854 case 7:
1855 i_pic_qp_c = 7;
1856 break;
1857 case 8:
1858 i_pic_qp_c = 8;
1859 break;
1860 case 9:
1861 i_pic_qp_c = 9;
1862 break;
1863 case 10:
1864 i_pic_qp_c = 10;
1865 break;
1866 case 11:
1867 i_pic_qp_c = 11;
1868 break;
1869 case 12:
1870 i_pic_qp_c = 12;
1871 break;
1872 case 13:
1873 i_pic_qp_c = 13;
1874 break;
1875 case 14:
1876 i_pic_qp_c = 14;
1877 break;
1878 case 15:
1879 i_pic_qp_c = 15;
1880 break;
1881 case 16:
1882 i_pic_qp_c = 16;
1883 break;
1884 case 17:
1885 i_pic_qp_c = 17;
1886 break;
1887 case 18:
1888 i_pic_qp_c = 18;
1889 break;
1890 case 19:
1891 i_pic_qp_c = 19;
1892 break;
1893 case 20:
1894 i_pic_qp_c = 20;
1895 break;
1896 case 21:
1897 i_pic_qp_c = 21;
1898 break;
1899 case 22:
1900 i_pic_qp_c = 22;
1901 break;
1902 case 23:
1903 i_pic_qp_c = 23;
1904 break;
1905 case 24:
1906 i_pic_qp_c = 24;
1907 break;
1908 case 25:
1909 i_pic_qp_c = 25;
1910 break;
1911 case 26:
1912 i_pic_qp_c = 26;
1913 break;
1914 case 27:
1915 i_pic_qp_c = 27;
1916 break;
1917 case 28:
1918 i_pic_qp_c = 28;
1919 break;
1920 case 29:
1921 i_pic_qp_c = 29;
1922 break;
1923 case 30:
1924 i_pic_qp_c = 29;
1925 break;
1926 case 31:
1927 i_pic_qp_c = 30;
1928 break;
1929 case 32:
1930 i_pic_qp_c = 31;
1931 break;
1932 case 33:
1933 i_pic_qp_c = 32;
1934 break;
1935 case 34:
1936 i_pic_qp_c = 32;
1937 break;
1938 case 35:
1939 i_pic_qp_c = 33;
1940 break;
1941 case 36:
1942 i_pic_qp_c = 34;
1943 break;
1944 case 37:
1945 i_pic_qp_c = 34;
1946 break;
1947 case 38:
1948 i_pic_qp_c = 35;
1949 break;
1950 case 39:
1951 i_pic_qp_c = 35;
1952 break;
1953 case 40:
1954 i_pic_qp_c = 36;
1955 break;
1956 case 41:
1957 i_pic_qp_c = 36;
1958 break;
1959 case 42:
1960 i_pic_qp_c = 37;
1961 break;
1962 case 43:
1963 i_pic_qp_c = 37;
1964 break;
1965 case 44:
1966 i_pic_qp_c = 37;
1967 break;
1968 case 45:
1969 i_pic_qp_c = 38;
1970 break;
1971 case 46:
1972 i_pic_qp_c = 38;
1973 break;
1974 case 47:
1975 i_pic_qp_c = 38;
1976 break;
1977 case 48:
1978 i_pic_qp_c = 39;
1979 break;
1980 case 49:
1981 i_pic_qp_c = 39;
1982 break;
1983 case 50:
1984 i_pic_qp_c = 39;
1985 break;
1986 default:
1987 i_pic_qp_c = 39;
1988 break;
1989 }
1990
1991 /* synopsys parallel_case full_case */
1992 switch (p_pic_qp) {
1993 case 0:
1994 p_pic_qp_c = 0;
1995 break;
1996 case 1:
1997 p_pic_qp_c = 1;
1998 break;
1999 case 2:
2000 p_pic_qp_c = 2;
2001 break;
2002 case 3:
2003 p_pic_qp_c = 3;
2004 break;
2005 case 4:
2006 p_pic_qp_c = 4;
2007 break;
2008 case 5:
2009 p_pic_qp_c = 5;
2010 break;
2011 case 6:
2012 p_pic_qp_c = 6;
2013 break;
2014 case 7:
2015 p_pic_qp_c = 7;
2016 break;
2017 case 8:
2018 p_pic_qp_c = 8;
2019 break;
2020 case 9:
2021 p_pic_qp_c = 9;
2022 break;
2023 case 10:
2024 p_pic_qp_c = 10;
2025 break;
2026 case 11:
2027 p_pic_qp_c = 11;
2028 break;
2029 case 12:
2030 p_pic_qp_c = 12;
2031 break;
2032 case 13:
2033 p_pic_qp_c = 13;
2034 break;
2035 case 14:
2036 p_pic_qp_c = 14;
2037 break;
2038 case 15:
2039 p_pic_qp_c = 15;
2040 break;
2041 case 16:
2042 p_pic_qp_c = 16;
2043 break;
2044 case 17:
2045 p_pic_qp_c = 17;
2046 break;
2047 case 18:
2048 p_pic_qp_c = 18;
2049 break;
2050 case 19:
2051 p_pic_qp_c = 19;
2052 break;
2053 case 20:
2054 p_pic_qp_c = 20;
2055 break;
2056 case 21:
2057 p_pic_qp_c = 21;
2058 break;
2059 case 22:
2060 p_pic_qp_c = 22;
2061 break;
2062 case 23:
2063 p_pic_qp_c = 23;
2064 break;
2065 case 24:
2066 p_pic_qp_c = 24;
2067 break;
2068 case 25:
2069 p_pic_qp_c = 25;
2070 break;
2071 case 26:
2072 p_pic_qp_c = 26;
2073 break;
2074 case 27:
2075 p_pic_qp_c = 27;
2076 break;
2077 case 28:
2078 p_pic_qp_c = 28;
2079 break;
2080 case 29:
2081 p_pic_qp_c = 29;
2082 break;
2083 case 30:
2084 p_pic_qp_c = 29;
2085 break;
2086 case 31:
2087 p_pic_qp_c = 30;
2088 break;
2089 case 32:
2090 p_pic_qp_c = 31;
2091 break;
2092 case 33:
2093 p_pic_qp_c = 32;
2094 break;
2095 case 34:
2096 p_pic_qp_c = 32;
2097 break;
2098 case 35:
2099 p_pic_qp_c = 33;
2100 break;
2101 case 36:
2102 p_pic_qp_c = 34;
2103 break;
2104 case 37:
2105 p_pic_qp_c = 34;
2106 break;
2107 case 38:
2108 p_pic_qp_c = 35;
2109 break;
2110 case 39:
2111 p_pic_qp_c = 35;
2112 break;
2113 case 40:
2114 p_pic_qp_c = 36;
2115 break;
2116 case 41:
2117 p_pic_qp_c = 36;
2118 break;
2119 case 42:
2120 p_pic_qp_c = 37;
2121 break;
2122 case 43:
2123 p_pic_qp_c = 37;
2124 break;
2125 case 44:
2126 p_pic_qp_c = 37;
2127 break;
2128 case 45:
2129 p_pic_qp_c = 38;
2130 break;
2131 case 46:
2132 p_pic_qp_c = 38;
2133 break;
2134 case 47:
2135 p_pic_qp_c = 38;
2136 break;
2137 case 48:
2138 p_pic_qp_c = 39;
2139 break;
2140 case 49:
2141 p_pic_qp_c = 39;
2142 break;
2143 case 50:
2144 p_pic_qp_c = 39;
2145 break;
2146 default:
2147 p_pic_qp_c = 39;
2148 break;
2149 }
2150 WRITE_HREG(HCODEC_QDCT_Q_QUANT_I,
2151 (i_pic_qp_c << 22) |
2152 (i_pic_qp << 16) |
2153 ((i_pic_qp_c % 6) << 12) |
2154 ((i_pic_qp_c / 6) << 8) |
2155 ((i_pic_qp % 6) << 4) |
2156 ((i_pic_qp / 6) << 0));
2157
2158 WRITE_HREG(HCODEC_QDCT_Q_QUANT_P,
2159 (p_pic_qp_c << 22) |
2160 (p_pic_qp << 16) |
2161 ((p_pic_qp_c % 6) << 12) |
2162 ((p_pic_qp_c / 6) << 8) |
2163 ((p_pic_qp % 6) << 4) |
2164 ((p_pic_qp / 6) << 0));
2165
2166#ifdef ENABLE_IGNORE_FUNCTION
2167 WRITE_HREG(HCODEC_IGNORE_CONFIG,
2168 (1 << 31) | /* ignore_lac_coeff_en */
2169 (1 << 26) | /* ignore_lac_coeff_else (<1) */
2170 (1 << 21) | /* ignore_lac_coeff_2 (<1) */
2171 (2 << 16) | /* ignore_lac_coeff_1 (<2) */
2172 (1 << 15) | /* ignore_cac_coeff_en */
2173 (1 << 10) | /* ignore_cac_coeff_else (<1) */
2174 (1 << 5) | /* ignore_cac_coeff_2 (<1) */
2175 (3 << 0)); /* ignore_cac_coeff_1 (<2) */
2176
2177 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB)
2178 WRITE_HREG(HCODEC_IGNORE_CONFIG_2,
2179 (1 << 31) | /* ignore_t_lac_coeff_en */
2180 (1 << 26) | /* ignore_t_lac_coeff_else (<1) */
2181 (2 << 21) | /* ignore_t_lac_coeff_2 (<2) */
2182 (6 << 16) | /* ignore_t_lac_coeff_1 (<6) */
2183 (1<<15) | /* ignore_cdc_coeff_en */
2184 (0<<14) | /* ignore_t_lac_coeff_else_le_3 */
2185 (1<<13) | /* ignore_t_lac_coeff_else_le_4 */
2186 (1<<12) | /* ignore_cdc_only_when_empty_cac_inter */
2187 (1<<11) | /* ignore_cdc_only_when_one_empty_inter */
2188 /* ignore_cdc_range_max_inter 0-0, 1-1, 2-2, 3-3 */
2189 (2<<9) |
2190 /* ignore_cdc_abs_max_inter 0-1, 1-2, 2-3, 3-4 */
2191 (0<<7) |
2192 /* ignore_cdc_only_when_empty_cac_intra */
2193 (1<<5) |
2194 /* ignore_cdc_only_when_one_empty_intra */
2195 (1<<4) |
2196 /* ignore_cdc_range_max_intra 0-0, 1-1, 2-2, 3-3 */
2197 (1<<2) |
2198 /* ignore_cdc_abs_max_intra 0-1, 1-2, 2-3, 3-4 */
2199 (0<<0));
2200 else
2201 WRITE_HREG(HCODEC_IGNORE_CONFIG_2,
2202 (1 << 31) | /* ignore_t_lac_coeff_en */
2203 (1 << 26) | /* ignore_t_lac_coeff_else (<1) */
2204 (1 << 21) | /* ignore_t_lac_coeff_2 (<1) */
2205 (5 << 16) | /* ignore_t_lac_coeff_1 (<5) */
2206 (0 << 0));
2207#else
2208 WRITE_HREG(HCODEC_IGNORE_CONFIG, 0);
2209 WRITE_HREG(HCODEC_IGNORE_CONFIG_2, 0);
2210#endif
2211
2212 WRITE_HREG(HCODEC_QDCT_MB_CONTROL,
2213 (1 << 9) | /* mb_info_soft_reset */
2214 (1 << 0)); /* mb read buffer soft reset */
2215
2216 WRITE_HREG(HCODEC_QDCT_MB_CONTROL,
2217 (1 << 28) | /* ignore_t_p8x8 */
2218 (0 << 27) | /* zero_mc_out_null_non_skipped_mb */
2219 (0 << 26) | /* no_mc_out_null_non_skipped_mb */
2220 (0 << 25) | /* mc_out_even_skipped_mb */
2221 (0 << 24) | /* mc_out_wait_cbp_ready */
2222 (0 << 23) | /* mc_out_wait_mb_type_ready */
2223 (1 << 29) | /* ie_start_int_enable */
2224 (1 << 19) | /* i_pred_enable */
2225 (1 << 20) | /* ie_sub_enable */
2226 (1 << 18) | /* iq_enable */
2227 (1 << 17) | /* idct_enable */
2228 (1 << 14) | /* mb_pause_enable */
2229 (1 << 13) | /* q_enable */
2230 (1 << 12) | /* dct_enable */
2231 (1 << 10) | /* mb_info_en */
2232 (0 << 3) | /* endian */
2233 (0 << 1) | /* mb_read_en */
2234 (0 << 0)); /* soft reset */
2235
2236 WRITE_HREG(HCODEC_SAD_CONTROL,
2237 (0 << 3) | /* ie_result_buff_enable */
2238 (1 << 2) | /* ie_result_buff_soft_reset */
2239 (0 << 1) | /* sad_enable */
2240 (1 << 0)); /* sad soft reset */
2241 WRITE_HREG(HCODEC_IE_RESULT_BUFFER, 0);
2242
2243 WRITE_HREG(HCODEC_SAD_CONTROL,
2244 (1 << 3) | /* ie_result_buff_enable */
2245 (0 << 2) | /* ie_result_buff_soft_reset */
2246 (1 << 1) | /* sad_enable */
2247 (0 << 0)); /* sad soft reset */
2248
2249 WRITE_HREG(HCODEC_IE_CONTROL,
2250 (1 << 30) | /* active_ul_block */
2251 (0 << 1) | /* ie_enable */
2252 (1 << 0)); /* ie soft reset */
2253
2254 WRITE_HREG(HCODEC_IE_CONTROL,
2255 (1 << 30) | /* active_ul_block */
2256 (0 << 1) | /* ie_enable */
2257 (0 << 0)); /* ie soft reset */
2258
2259 WRITE_HREG(HCODEC_ME_SKIP_LINE,
2260 (8 << 24) | /* step_3_skip_line */
2261 (8 << 18) | /* step_2_skip_line */
2262 (2 << 12) | /* step_1_skip_line */
2263 (0 << 6) | /* step_0_skip_line */
2264 (0 << 0));
2265
2266 WRITE_HREG(HCODEC_ME_MV_MERGE_CTL, me_mv_merge_ctl);
2267 WRITE_HREG(HCODEC_ME_STEP0_CLOSE_MV, me_step0_close_mv);
2268 WRITE_HREG(HCODEC_ME_SAD_ENOUGH_01, me_sad_enough_01);
2269 WRITE_HREG(HCODEC_ME_SAD_ENOUGH_23, me_sad_enough_23);
2270 WRITE_HREG(HCODEC_ME_F_SKIP_SAD, me_f_skip_sad);
2271 WRITE_HREG(HCODEC_ME_F_SKIP_WEIGHT, me_f_skip_weight);
2272 WRITE_HREG(HCODEC_ME_MV_WEIGHT_01, me_mv_weight_01);
2273 WRITE_HREG(HCODEC_ME_MV_WEIGHT_23, me_mv_weight_23);
2274 WRITE_HREG(HCODEC_ME_SAD_RANGE_INC, me_sad_range_inc);
2275
2276 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL) {
2277 WRITE_HREG(HCODEC_V5_SIMPLE_MB_CTL, 0);
2278 WRITE_HREG(HCODEC_V5_SIMPLE_MB_CTL,
2279 (v5_use_small_diff_cnt << 7) |
2280 (v5_simple_mb_inter_all_en << 6) |
2281 (v5_simple_mb_inter_8x8_en << 5) |
2282 (v5_simple_mb_inter_16_8_en << 4) |
2283 (v5_simple_mb_inter_16x16_en << 3) |
2284 (v5_simple_mb_intra_en << 2) |
2285 (v5_simple_mb_C_en << 1) |
2286 (v5_simple_mb_Y_en << 0));
2287 WRITE_HREG(HCODEC_V5_MB_DIFF_SUM, 0);
2288 WRITE_HREG(HCODEC_V5_SMALL_DIFF_CNT,
2289 (v5_small_diff_C<<16) |
2290 (v5_small_diff_Y<<0));
2291 if (qp_mode == 1) {
2292 WRITE_HREG(HCODEC_V5_SIMPLE_MB_DQUANT,
2293 0);
2294 } else {
2295 WRITE_HREG(HCODEC_V5_SIMPLE_MB_DQUANT,
2296 v5_simple_dq_setting);
2297 }
2298 WRITE_HREG(HCODEC_V5_SIMPLE_MB_ME_WEIGHT,
2299 v5_simple_me_weight_setting);
2300 /* txlx can remove it */
2301 WRITE_HREG(HCODEC_QDCT_CONFIG, 1 << 0);
2302 }
2303
2304 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) {
2305 WRITE_HREG(HCODEC_V4_FORCE_SKIP_CFG,
2306 (i_pic_qp << 26) | /* v4_force_q_r_intra */
2307 (i_pic_qp << 20) | /* v4_force_q_r_inter */
2308 (0 << 19) | /* v4_force_q_y_enable */
2309 (5 << 16) | /* v4_force_qr_y */
2310 (6 << 12) | /* v4_force_qp_y */
2311 (0 << 0)); /* v4_force_skip_sad */
2312
2313 /* V3 Force skip */
2314 WRITE_HREG(HCODEC_V3_SKIP_CONTROL,
2315 (1 << 31) | /* v3_skip_enable */
2316 (0 << 30) | /* v3_step_1_weight_enable */
2317 (1 << 28) | /* v3_mv_sad_weight_enable */
2318 (1 << 27) | /* v3_ipred_type_enable */
2319 (V3_FORCE_SKIP_SAD_1 << 12) |
2320 (V3_FORCE_SKIP_SAD_0 << 0));
2321 WRITE_HREG(HCODEC_V3_SKIP_WEIGHT,
2322 (V3_SKIP_WEIGHT_1 << 16) |
2323 (V3_SKIP_WEIGHT_0 << 0));
2324 WRITE_HREG(HCODEC_V3_L1_SKIP_MAX_SAD,
2325 (V3_LEVEL_1_F_SKIP_MAX_SAD << 16) |
2326 (V3_LEVEL_1_SKIP_MAX_SAD << 0));
2327 WRITE_HREG(HCODEC_V3_L2_SKIP_WEIGHT,
2328 (V3_FORCE_SKIP_SAD_2 << 16) |
2329 (V3_SKIP_WEIGHT_2 << 0));
2330 if (request != NULL) {
2331 unsigned int off1, off2;
2332
2333 off1 = V3_IE_F_ZERO_SAD_I4 - I4MB_WEIGHT_OFFSET;
2334 off2 = V3_IE_F_ZERO_SAD_I16
2335 - I16MB_WEIGHT_OFFSET;
2336 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2337 ((request->i16_weight + off2) << 16) |
2338 ((request->i4_weight + off1) << 0));
2339 off1 = V3_ME_F_ZERO_SAD - ME_WEIGHT_OFFSET;
2340 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2341 (0 << 25) |
2342 /* v3_no_ver_when_top_zero_en */
2343 (0 << 24) |
2344 /* v3_no_hor_when_left_zero_en */
2345 (3 << 16) | /* type_hor break */
2346 ((request->me_weight + off1) << 0));
2347 } else {
2348 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2349 (V3_IE_F_ZERO_SAD_I16 << 16) |
2350 (V3_IE_F_ZERO_SAD_I4 << 0));
2351 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2352 (0 << 25) |
2353 /* v3_no_ver_when_top_zero_en */
2354 (0 << 24) |
2355 /* v3_no_hor_when_left_zero_en */
2356 (3 << 16) | /* type_hor break */
2357 (V3_ME_F_ZERO_SAD << 0));
2358 }
2359 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
2360 /* V3 Force skip */
2361 WRITE_HREG(HCODEC_V3_SKIP_CONTROL,
2362 (1 << 31) | /* v3_skip_enable */
2363 (0 << 30) | /* v3_step_1_weight_enable */
2364 (1 << 28) | /* v3_mv_sad_weight_enable */
2365 (1 << 27) | /* v3_ipred_type_enable */
2366 (0 << 12) | /* V3_FORCE_SKIP_SAD_1 */
2367 (0 << 0)); /* V3_FORCE_SKIP_SAD_0 */
2368 WRITE_HREG(HCODEC_V3_SKIP_WEIGHT,
2369 (V3_SKIP_WEIGHT_1 << 16) |
2370 (V3_SKIP_WEIGHT_0 << 0));
2371 WRITE_HREG(HCODEC_V3_L1_SKIP_MAX_SAD,
2372 (V3_LEVEL_1_F_SKIP_MAX_SAD << 16) |
2373 (V3_LEVEL_1_SKIP_MAX_SAD << 0));
2374 WRITE_HREG(HCODEC_V3_L2_SKIP_WEIGHT,
2375 (0 << 16) | /* V3_FORCE_SKIP_SAD_2 */
2376 (V3_SKIP_WEIGHT_2 << 0));
2377 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2378 (0 << 16) | /* V3_IE_F_ZERO_SAD_I16 */
2379 (0 << 0)); /* V3_IE_F_ZERO_SAD_I4 */
2380 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2381 (0 << 25) | /* v3_no_ver_when_top_zero_en */
2382 (0 << 24) | /* v3_no_hor_when_left_zero_en */
2383 (3 << 16) | /* type_hor break */
2384 (0 << 0)); /* V3_ME_F_ZERO_SAD */
2385 }
2386 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
2387 int i;
2388 /* MV SAD Table */
2389 for (i = 0; i < 64; i++)
2390 WRITE_HREG(HCODEC_V3_MV_SAD_TABLE,
2391 v3_mv_sad[i]);
2392
2393 /* IE PRED SAD Table*/
2394 WRITE_HREG(HCODEC_V3_IPRED_TYPE_WEIGHT_0,
2395 (C_ipred_weight_H << 24) |
2396 (C_ipred_weight_V << 16) |
2397 (I4_ipred_weight_else << 8) |
2398 (I4_ipred_weight_most << 0));
2399 WRITE_HREG(HCODEC_V3_IPRED_TYPE_WEIGHT_1,
2400 (I16_ipred_weight_DC << 24) |
2401 (I16_ipred_weight_H << 16) |
2402 (I16_ipred_weight_V << 8) |
2403 (C_ipred_weight_DC << 0));
2404 WRITE_HREG(HCODEC_V3_LEFT_SMALL_MAX_SAD,
2405 (v3_left_small_max_me_sad << 16) |
2406 (v3_left_small_max_ie_sad << 0));
2407 }
2408 WRITE_HREG(HCODEC_IE_DATA_FEED_BUFF_INFO, 0);
2409 WRITE_HREG(HCODEC_CURR_CANVAS_CTRL, 0);
2410 data32 = READ_HREG(HCODEC_VLC_CONFIG);
2411 data32 = data32 | (1 << 0); /* set pop_coeff_even_all_zero */
2412 WRITE_HREG(HCODEC_VLC_CONFIG, data32);
2413
2414 WRITE_HREG(INFO_DUMP_START_ADDR,
2415 wq->mem.dump_info_ddr_start_addr);
2416
2417 /* clear mailbox interrupt */
2418 WRITE_HREG(HCODEC_IRQ_MBOX_CLR, 1);
2419
2420 /* enable mailbox interrupt */
2421 WRITE_HREG(HCODEC_IRQ_MBOX_MASK, 1);
2422}
2423
2424void amvenc_reset(void)
2425{
2426 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2 &&
2427 use_reset_control) {
2428 hcodec_hw_reset();
2429 } else {
2430 READ_VREG(DOS_SW_RESET1);
2431 READ_VREG(DOS_SW_RESET1);
2432 READ_VREG(DOS_SW_RESET1);
2433 WRITE_VREG(DOS_SW_RESET1,
2434 (1 << 2) | (1 << 6) |
2435 (1 << 7) | (1 << 8) |
2436 (1 << 14) | (1 << 16) |
2437 (1 << 17));
2438 WRITE_VREG(DOS_SW_RESET1, 0);
2439 READ_VREG(DOS_SW_RESET1);
2440 READ_VREG(DOS_SW_RESET1);
2441 READ_VREG(DOS_SW_RESET1);
2442 }
2443}
2444
2445void amvenc_start(void)
2446{
2447 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2 &&
2448 use_reset_control) {
2449 hcodec_hw_reset();
2450 } else {
2451 READ_VREG(DOS_SW_RESET1);
2452 READ_VREG(DOS_SW_RESET1);
2453 READ_VREG(DOS_SW_RESET1);
2454
2455 WRITE_VREG(DOS_SW_RESET1,
2456 (1 << 12) | (1 << 11));
2457 WRITE_VREG(DOS_SW_RESET1, 0);
2458
2459 READ_VREG(DOS_SW_RESET1);
2460 READ_VREG(DOS_SW_RESET1);
2461 READ_VREG(DOS_SW_RESET1);
2462 }
2463
2464 WRITE_HREG(HCODEC_MPSR, 0x0001);
2465}
2466
2467void amvenc_stop(void)
2468{
2469 ulong timeout = jiffies + HZ;
2470
2471 WRITE_HREG(HCODEC_MPSR, 0);
2472 WRITE_HREG(HCODEC_CPSR, 0);
2473
2474 while (READ_HREG(HCODEC_IMEM_DMA_CTRL) & 0x8000) {
2475 if (time_after(jiffies, timeout))
2476 break;
2477 }
2478
2479 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2 &&
2480 use_reset_control) {
2481 hcodec_hw_reset();
2482 } else {
2483 READ_VREG(DOS_SW_RESET1);
2484 READ_VREG(DOS_SW_RESET1);
2485 READ_VREG(DOS_SW_RESET1);
2486
2487 WRITE_VREG(DOS_SW_RESET1,
2488 (1 << 12) | (1 << 11) |
2489 (1 << 2) | (1 << 6) |
2490 (1 << 7) | (1 << 8) |
2491 (1 << 14) | (1 << 16) |
2492 (1 << 17));
2493
2494 WRITE_VREG(DOS_SW_RESET1, 0);
2495
2496 READ_VREG(DOS_SW_RESET1);
2497 READ_VREG(DOS_SW_RESET1);
2498 READ_VREG(DOS_SW_RESET1);
2499 }
2500
2501}
2502
2503static void __iomem *mc_addr;
2504static u32 mc_addr_map;
2505#define MC_SIZE (4096 * 8)
2506s32 amvenc_loadmc(const char *p, struct encode_wq_s *wq)
2507{
2508 ulong timeout;
2509 s32 ret = 0;
2510
2511 /* use static mempry*/
2512 if (mc_addr == NULL) {
2513 mc_addr = kmalloc(MC_SIZE, GFP_KERNEL);
2514 if (!mc_addr) {
2515 enc_pr(LOG_ERROR, "avc loadmc iomap mc addr error.\n");
2516 return -ENOMEM;
2517 }
2518 }
2519
2520 enc_pr(LOG_ALL, "avc encode ucode name is %s\n", p);
2521 ret = get_data_from_name(p, (u8 *)mc_addr);
2522 if (ret < 0) {
2523 enc_pr(LOG_ERROR,
2524 "avc microcode fail ret=%d, name: %s, wq:%p.\n",
2525 ret, p, (void *)wq);
2526 }
2527
2528 mc_addr_map = dma_map_single(
2529 &encode_manager.this_pdev->dev,
2530 mc_addr, MC_SIZE, DMA_TO_DEVICE);
2531
2532 /* mc_addr_map = wq->mem.assit_buffer_offset; */
2533 /* mc_addr = ioremap_wc(mc_addr_map, MC_SIZE); */
2534 /* memcpy(mc_addr, p, MC_SIZE); */
2535 enc_pr(LOG_ALL, "address 0 is 0x%x\n", *((u32 *)mc_addr));
2536 enc_pr(LOG_ALL, "address 1 is 0x%x\n", *((u32 *)mc_addr + 1));
2537 enc_pr(LOG_ALL, "address 2 is 0x%x\n", *((u32 *)mc_addr + 2));
2538 enc_pr(LOG_ALL, "address 3 is 0x%x\n", *((u32 *)mc_addr + 3));
2539 WRITE_HREG(HCODEC_MPSR, 0);
2540 WRITE_HREG(HCODEC_CPSR, 0);
2541
2542 /* Read CBUS register for timing */
2543 timeout = READ_HREG(HCODEC_MPSR);
2544 timeout = READ_HREG(HCODEC_MPSR);
2545
2546 timeout = jiffies + HZ;
2547
2548 WRITE_HREG(HCODEC_IMEM_DMA_ADR, mc_addr_map);
2549 WRITE_HREG(HCODEC_IMEM_DMA_COUNT, 0x1000);
2550 WRITE_HREG(HCODEC_IMEM_DMA_CTRL, (0x8000 | (7 << 16)));
2551
2552 while (READ_HREG(HCODEC_IMEM_DMA_CTRL) & 0x8000) {
2553 if (time_before(jiffies, timeout))
2554 schedule();
2555 else {
2556 enc_pr(LOG_ERROR, "hcodec load mc error\n");
2557 ret = -EBUSY;
2558 break;
2559 }
2560 }
2561 dma_unmap_single(
2562 &encode_manager.this_pdev->dev,
2563 mc_addr_map, MC_SIZE, DMA_TO_DEVICE);
2564 return ret;
2565}
2566
2567const u32 fix_mc[] __aligned(8) = {
2568 0x0809c05a, 0x06696000, 0x0c780000, 0x00000000
2569};
2570
2571
2572/*
2573 * DOS top level register access fix.
2574 * When hcodec is running, a protocol register HCODEC_CCPU_INTR_MSK
2575 * is set to make hcodec access one CBUS out of DOS domain once
2576 * to work around a HW bug for 4k2k dual decoder implementation.
2577 * If hcodec is not running, then a ucode is loaded and executed
2578 * instead.
2579 */
2580/*void amvenc_dos_top_reg_fix(void)
2581{
2582 bool hcodec_on;
2583 ulong flags;
2584
2585 spin_lock_irqsave(&lock, flags);
2586
2587 hcodec_on = vdec_on(VDEC_HCODEC);
2588
2589 if ((hcodec_on) && (READ_VREG(HCODEC_MPSR) & 1)) {
2590 WRITE_HREG(HCODEC_CCPU_INTR_MSK, 1);
2591 spin_unlock_irqrestore(&lock, flags);
2592 return;
2593 }
2594
2595 if (!hcodec_on)
2596 vdec_poweron(VDEC_HCODEC);
2597
2598 amhcodec_loadmc(fix_mc);
2599
2600 amhcodec_start();
2601
2602 udelay(1000);
2603
2604 amhcodec_stop();
2605
2606 if (!hcodec_on)
2607 vdec_poweroff(VDEC_HCODEC);
2608
2609 spin_unlock_irqrestore(&lock, flags);
2610}
2611
2612bool amvenc_avc_on(void)
2613{
2614 bool hcodec_on;
2615 ulong flags;
2616
2617 spin_lock_irqsave(&lock, flags);
2618
2619 hcodec_on = vdec_on(VDEC_HCODEC);
2620 hcodec_on &= (encode_manager.wq_count > 0);
2621
2622 spin_unlock_irqrestore(&lock, flags);
2623 return hcodec_on;
2624}
2625*/
2626
2627static s32 avc_poweron(u32 clock)
2628{
2629 ulong flags;
2630 u32 data32;
2631
2632 data32 = 0;
2633
2634 amports_switch_gate("vdec", 1);
2635
2636 spin_lock_irqsave(&lock, flags);
2637
2638 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
2639 hcodec_clk_config(1);
2640 udelay(20);
2641
2642 pwr_ctrl_psci_smc(PDID_DOS_HCODEC, PWR_ON);
2643 udelay(20);
2644 pr_err("hcodec powered on, hcodec clk rate:%ld, pwr_state:%d\n",
2645 clk_get_rate(s_hcodec_clks.hcodec_aclk),
2646 !pwr_ctrl_status_psci_smc(PDID_DOS_HCODEC));
2647 } else {
2648 WRITE_AOREG(AO_RTI_PWR_CNTL_REG0,
2649 (READ_AOREG(AO_RTI_PWR_CNTL_REG0) & (~0x18)));
2650 udelay(10);
2651 /* Powerup HCODEC */
2652 /* [1:0] HCODEC */
2653 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
2654 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) &
2655 ((get_cpu_type() == MESON_CPU_MAJOR_ID_SM1 ||
2656 get_cpu_type() >= MESON_CPU_MAJOR_ID_TM2)
2657 ? ~0x1 : ~0x3));
2658
2659 udelay(10);
2660 }
2661
2662 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
2663 WRITE_VREG(DOS_SW_RESET1, 0);
2664
2665 /* Enable Dos internal clock gating */
2666 hvdec_clock_enable(clock);
2667
2668 /* Powerup HCODEC memories */
2669 WRITE_VREG(DOS_MEM_PD_HCODEC, 0x0);
2670
2671 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
2672
2673 } else {
2674 /* Remove HCODEC ISO */
2675 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
2676 READ_AOREG(AO_RTI_GEN_PWR_ISO0) &
2677 ((get_cpu_type() == MESON_CPU_MAJOR_ID_SM1 ||
2678 get_cpu_type() >= MESON_CPU_MAJOR_ID_TM2)
2679 ? ~0x1 : ~0x30));
2680 }
2681
2682 udelay(10);
2683 /* Disable auto-clock gate */
2684 WRITE_VREG(DOS_GEN_CTRL0,
2685 (READ_VREG(DOS_GEN_CTRL0) | 0x1));
2686 WRITE_VREG(DOS_GEN_CTRL0,
2687 (READ_VREG(DOS_GEN_CTRL0) & 0xFFFFFFFE));
2688
2689 spin_unlock_irqrestore(&lock, flags);
2690
2691 mdelay(10);
2692 return 0;
2693}
2694
2695static s32 avc_poweroff(void)
2696{
2697 ulong flags;
2698
2699 spin_lock_irqsave(&lock, flags);
2700
2701 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
2702 hcodec_clk_config(0);
2703 udelay(20);
2704 pwr_ctrl_psci_smc(PDID_DOS_HCODEC, PWR_OFF);
2705 udelay(20);
2706 } else {
2707 /* enable HCODEC isolation */
2708 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
2709 READ_AOREG(AO_RTI_GEN_PWR_ISO0) |
2710 ((get_cpu_type() == MESON_CPU_MAJOR_ID_SM1 ||
2711 get_cpu_type() >= MESON_CPU_MAJOR_ID_TM2)
2712 ? 0x1 : 0x30));
2713 }
2714 /* power off HCODEC memories */
2715 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
2716
2717 /* disable HCODEC clock */
2718 hvdec_clock_disable();
2719
2720 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
2721
2722 } else {
2723 /* HCODEC power off */
2724 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
2725 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) |
2726 ((get_cpu_type() == MESON_CPU_MAJOR_ID_SM1 ||
2727 get_cpu_type() >= MESON_CPU_MAJOR_ID_TM2)
2728 ? 0x1 : 0x3));
2729 }
2730
2731 spin_unlock_irqrestore(&lock, flags);
2732
2733 /* release DOS clk81 clock gating */
2734 amports_switch_gate("vdec", 0);
2735 return 0;
2736}
2737
2738static s32 reload_mc(struct encode_wq_s *wq)
2739{
2740 const char *p = select_ucode(encode_manager.ucode_index);
2741
2742 amvenc_stop();
2743
2744 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2 && use_reset_control) {
2745 hcodec_hw_reset();
2746 } else {
2747 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
2748 WRITE_VREG(DOS_SW_RESET1, 0);
2749 }
2750
2751 udelay(10);
2752
2753 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x32);
2754 enc_pr(LOG_INFO, "reload microcode\n");
2755
2756 if (amvenc_loadmc(p, wq) < 0)
2757 return -EBUSY;
2758 return 0;
2759}
2760
2761static void encode_isr_tasklet(ulong data)
2762{
2763 struct encode_manager_s *manager = (struct encode_manager_s *)data;
2764
2765 enc_pr(LOG_INFO, "encoder is done %d\n", manager->encode_hw_status);
2766 if (((manager->encode_hw_status == ENCODER_IDR_DONE)
2767 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2768 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2769 || (manager->encode_hw_status == ENCODER_PICTURE_DONE))
2770 && (manager->process_irq)) {
2771 wake_up_interruptible(&manager->event.hw_complete);
2772 }
2773}
2774
2775/* irq function */
2776static irqreturn_t enc_isr(s32 irq_number, void *para)
2777{
2778 struct encode_manager_s *manager = (struct encode_manager_s *)para;
2779
2780 enc_pr(LOG_INFO, "*****ENC_ISR*****\n");
2781 WRITE_HREG(HCODEC_IRQ_MBOX_CLR, 1);
2782
2783 manager->encode_hw_status = READ_HREG(ENCODER_STATUS);
2784 if ((manager->encode_hw_status == ENCODER_IDR_DONE)
2785 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2786 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2787 || (manager->encode_hw_status == ENCODER_PICTURE_DONE)) {
2788 enc_pr(LOG_ALL, "encoder stage is %d\n",
2789 manager->encode_hw_status);
2790 }
2791
2792 if (((manager->encode_hw_status == ENCODER_IDR_DONE)
2793 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2794 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2795 || (manager->encode_hw_status == ENCODER_PICTURE_DONE))
2796 && (!manager->process_irq)) {
2797 manager->process_irq = true;
2798 if (manager->encode_hw_status != ENCODER_SEQUENCE_DONE)
2799 manager->need_reset = true;
2800 tasklet_schedule(&manager->encode_tasklet);
2801 }
2802 return IRQ_HANDLED;
2803}
2804
2805static s32 convert_request(struct encode_wq_s *wq, u32 *cmd_info)
2806{
2807 int i = 0;
2808 u8 *ptr;
2809 u32 data_offset;
2810 u32 cmd = cmd_info[0];
2811 unsigned long paddr = 0;
2812 struct enc_dma_cfg *cfg = NULL;
2813 s32 ret = 0;
2814 struct platform_device *pdev;
2815
2816 if (!wq)
2817 return -1;
2818 memset(&wq->request, 0, sizeof(struct encode_request_s));
2819 wq->request.me_weight = ME_WEIGHT_OFFSET;
2820 wq->request.i4_weight = I4MB_WEIGHT_OFFSET;
2821 wq->request.i16_weight = I16MB_WEIGHT_OFFSET;
2822
2823 if (cmd == ENCODER_SEQUENCE) {
2824 wq->request.cmd = cmd;
2825 wq->request.ucode_mode = cmd_info[1];
2826 wq->request.quant = cmd_info[2];
2827 wq->request.flush_flag = cmd_info[3];
2828 wq->request.timeout = cmd_info[4];
2829 wq->request.timeout = 5000; /* 5000 ms */
2830 } else if ((cmd == ENCODER_IDR) || (cmd == ENCODER_NON_IDR)) {
2831 wq->request.cmd = cmd;
2832 wq->request.ucode_mode = cmd_info[1];
2833 wq->request.type = cmd_info[2];
2834 wq->request.fmt = cmd_info[3];
2835 wq->request.src = cmd_info[4];
2836 wq->request.framesize = cmd_info[5];
2837 wq->request.quant = cmd_info[6];
2838 wq->request.flush_flag = cmd_info[7];
2839 wq->request.timeout = cmd_info[8];
2840 wq->request.crop_top = cmd_info[9];
2841 wq->request.crop_bottom = cmd_info[10];
2842 wq->request.crop_left = cmd_info[11];
2843 wq->request.crop_right = cmd_info[12];
2844 wq->request.src_w = cmd_info[13];
2845 wq->request.src_h = cmd_info[14];
2846 wq->request.scale_enable = cmd_info[15];
2847
2848 enc_pr(LOG_INFO, "hwenc: wq->pic.encoder_width %d, ",
2849 wq->pic.encoder_width);
2850 enc_pr(LOG_INFO, "wq->pic.encoder_height:%d, request fmt=%d\n",
2851 wq->pic.encoder_height, wq->request.fmt);
2852
2853 if (wq->pic.encoder_width >= 1280 && wq->pic.encoder_height >= 720
2854 && wq->request.fmt == FMT_RGBA8888 && wq->pic.color_space != GE2D_FORMAT_BT601) {
2855 wq->request.scale_enable = 1;
2856 wq->request.src_w = wq->pic.encoder_width;
2857 wq->request.src_h = wq->pic.encoder_height;
2858 enc_pr(LOG_INFO, "hwenc: force wq->request.scale_enable=%d\n", wq->request.scale_enable);
2859 }
2860
2861 wq->request.nr_mode =
2862 (nr_mode > 0) ? nr_mode : cmd_info[16];
2863 if (cmd == ENCODER_IDR)
2864 wq->request.nr_mode = 0;
2865
2866 data_offset = 17 +
2867 (sizeof(wq->quant_tbl_i4)
2868 + sizeof(wq->quant_tbl_i16)
2869 + sizeof(wq->quant_tbl_me)) / 4;
2870
2871 if (wq->request.quant == ADJUSTED_QP_FLAG) {
2872 ptr = (u8 *) &cmd_info[17];
2873 memcpy(wq->quant_tbl_i4, ptr,
2874 sizeof(wq->quant_tbl_i4));
2875 ptr += sizeof(wq->quant_tbl_i4);
2876 memcpy(wq->quant_tbl_i16, ptr,
2877 sizeof(wq->quant_tbl_i16));
2878 ptr += sizeof(wq->quant_tbl_i16);
2879 memcpy(wq->quant_tbl_me, ptr,
2880 sizeof(wq->quant_tbl_me));
2881 wq->request.i4_weight -=
2882 cmd_info[data_offset++];
2883 wq->request.i16_weight -=
2884 cmd_info[data_offset++];
2885 wq->request.me_weight -=
2886 cmd_info[data_offset++];
2887 if (qp_table_debug) {
2888 u8 *qp_tb = (u8 *)(&wq->quant_tbl_i4[0]);
2889
2890 for (i = 0; i < 32; i++) {
2891 enc_pr(LOG_INFO, "%d ", *qp_tb);
2892 qp_tb++;
2893 }
2894 enc_pr(LOG_INFO, "\n");
2895
2896 qp_tb = (u8 *)(&wq->quant_tbl_i16[0]);
2897 for (i = 0; i < 32; i++) {
2898 enc_pr(LOG_INFO, "%d ", *qp_tb);
2899 qp_tb++;
2900 }
2901 enc_pr(LOG_INFO, "\n");
2902
2903 qp_tb = (u8 *)(&wq->quant_tbl_me[0]);
2904 for (i = 0; i < 32; i++) {
2905 enc_pr(LOG_INFO, "%d ", *qp_tb);
2906 qp_tb++;
2907 }
2908 enc_pr(LOG_INFO, "\n");
2909 }
2910 } else {
2911 memset(wq->quant_tbl_me, wq->request.quant,
2912 sizeof(wq->quant_tbl_me));
2913 memset(wq->quant_tbl_i4, wq->request.quant,
2914 sizeof(wq->quant_tbl_i4));
2915 memset(wq->quant_tbl_i16, wq->request.quant,
2916 sizeof(wq->quant_tbl_i16));
2917 data_offset += 3;
2918 }
2919#ifdef H264_ENC_CBR
2920 wq->cbr_info.block_w = cmd_info[data_offset++];
2921 wq->cbr_info.block_h = cmd_info[data_offset++];
2922 wq->cbr_info.long_th = cmd_info[data_offset++];
2923 wq->cbr_info.start_tbl_id = cmd_info[data_offset++];
2924 wq->cbr_info.short_shift = CBR_SHORT_SHIFT;
2925 wq->cbr_info.long_mb_num = CBR_LONG_MB_NUM;
2926#endif
2927 data_offset = 17 +
2928 (sizeof(wq->quant_tbl_i4)
2929 + sizeof(wq->quant_tbl_i16)
2930 + sizeof(wq->quant_tbl_me)) / 4 + 7;
2931
2932 if (wq->request.type == DMA_BUFF) {
2933 wq->request.plane_num = cmd_info[data_offset++];
2934 enc_pr(LOG_INFO, "wq->request.plane_num %d\n",
2935 wq->request.plane_num);
2936 if (wq->request.fmt == FMT_NV12 ||
2937 wq->request.fmt == FMT_NV21 ||
2938 wq->request.fmt == FMT_YUV420) {
2939 for (i = 0; i < wq->request.plane_num; i++) {
2940 cfg = &wq->request.dma_cfg[i];
2941 cfg->dir = DMA_TO_DEVICE;
2942 cfg->fd = cmd_info[data_offset++];
2943 pdev = encode_manager.this_pdev;
2944 cfg->dev = &(pdev->dev);
2945
2946 ret = enc_dma_buf_get_phys(cfg, &paddr);
2947 if (ret < 0) {
2948 enc_pr(LOG_ERROR,
2949 "import fd %d failed\n",
2950 cfg->fd);
2951 cfg->paddr = NULL;
2952 cfg->vaddr = NULL;
2953 return -1;
2954 }
2955 cfg->paddr = (void *)paddr;
2956 enc_pr(LOG_INFO, "vaddr %p\n",
2957 cfg->vaddr);
2958 }
2959 } else {
2960 enc_pr(LOG_ERROR, "error fmt = %d\n",
2961 wq->request.fmt);
2962 }
2963 }
2964
2965 } else {
2966 enc_pr(LOG_ERROR, "error cmd = %d, wq: %p.\n",
2967 cmd, (void *)wq);
2968 return -1;
2969 }
2970 wq->request.parent = wq;
2971 return 0;
2972}
2973
2974void amvenc_avc_start_cmd(struct encode_wq_s *wq,
2975 struct encode_request_s *request)
2976{
2977 u32 reload_flag = 0;
2978
2979 if (request->ucode_mode != encode_manager.ucode_index) {
2980 encode_manager.ucode_index = request->ucode_mode;
2981 if (reload_mc(wq)) {
2982 enc_pr(LOG_ERROR,
2983 "reload mc fail, wq:%p\n", (void *)wq);
2984 return;
2985 }
2986 reload_flag = 1;
2987 encode_manager.need_reset = true;
2988 }
2989
2990 wq->hw_status = 0;
2991 wq->output_size = 0;
2992 wq->ucode_index = encode_manager.ucode_index;
2993
2994 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
2995
2996 if (encode_manager.need_reset) {
2997 amvenc_stop();
2998 reload_flag = 1;
2999 encode_manager.need_reset = false;
3000 encode_manager.encode_hw_status = ENCODER_IDLE;
3001 amvenc_reset();
3002 avc_canvas_init(wq);
3003 avc_init_encoder(wq, (request->cmd == ENCODER_IDR) ? true : false);
3004 avc_init_input_buffer(wq);
3005 avc_init_output_buffer(wq);
3006
3007 avc_prot_init(
3008 wq, request, request->quant,
3009 (request->cmd == ENCODER_IDR) ? true : false);
3010
3011 avc_init_assit_buffer(wq);
3012
3013 enc_pr(LOG_INFO,
3014 "begin to new frame, request->cmd: %d, ucode mode: %d, wq:%p\n",
3015 request->cmd, request->ucode_mode, (void *)wq);
3016 }
3017
3018 if ((request->cmd == ENCODER_IDR) ||
3019 (request->cmd == ENCODER_NON_IDR)) {
3020#ifdef H264_ENC_SVC
3021 /* encode non reference frame or not */
3022 if (request->cmd == ENCODER_IDR)
3023 wq->pic.non_ref_cnt = 0; //IDR reset counter
3024
3025 if (wq->pic.enable_svc && wq->pic.non_ref_cnt) {
3026 enc_pr(LOG_INFO,
3027 "PIC is NON REF cmd %d cnt %d value 0x%x\n",
3028 request->cmd, wq->pic.non_ref_cnt,
3029 ENC_SLC_NON_REF);
3030 WRITE_HREG(H264_ENC_SVC_PIC_TYPE, ENC_SLC_NON_REF);
3031 } else {
3032 enc_pr(LOG_INFO,
3033 "PIC is REF cmd %d cnt %d val 0x%x\n",
3034 request->cmd, wq->pic.non_ref_cnt,
3035 ENC_SLC_REF);
3036 WRITE_HREG(H264_ENC_SVC_PIC_TYPE, ENC_SLC_REF);
3037 }
3038#else
3039 /* if FW defined but not defined SVC in driver here*/
3040 WRITE_HREG(H264_ENC_SVC_PIC_TYPE, ENC_SLC_REF);
3041#endif
3042 avc_init_dblk_buffer(wq->mem.dblk_buf_canvas);
3043 avc_init_reference_buffer(wq->mem.ref_buf_canvas);
3044 }
3045 if ((request->cmd == ENCODER_IDR) ||
3046 (request->cmd == ENCODER_NON_IDR))
3047 set_input_format(wq, request);
3048
3049 if (request->cmd == ENCODER_IDR)
3050 ie_me_mb_type = HENC_MB_Type_I4MB;
3051 else if (request->cmd == ENCODER_NON_IDR)
3052 ie_me_mb_type =
3053 (HENC_SKIP_RUN_AUTO << 16) |
3054 (HENC_MB_Type_AUTO << 4) |
3055 (HENC_MB_Type_AUTO << 0);
3056 else
3057 ie_me_mb_type = 0;
3058 avc_init_ie_me_parameter(wq, request->quant);
3059
3060#ifdef MULTI_SLICE_MC
3061 if (fixed_slice_cfg)
3062 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3063 else if (wq->pic.rows_per_slice !=
3064 (wq->pic.encoder_height + 15) >> 4) {
3065 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
3066
3067 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
3068 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
3069 } else
3070 WRITE_HREG(FIXED_SLICE_CFG, 0);
3071#else
3072 WRITE_HREG(FIXED_SLICE_CFG, 0);
3073#endif
3074
3075 encode_manager.encode_hw_status = request->cmd;
3076 wq->hw_status = request->cmd;
3077 WRITE_HREG(ENCODER_STATUS, request->cmd);
3078 if ((request->cmd == ENCODER_IDR)
3079 || (request->cmd == ENCODER_NON_IDR)
3080 || (request->cmd == ENCODER_SEQUENCE)
3081 || (request->cmd == ENCODER_PICTURE))
3082 encode_manager.process_irq = false;
3083
3084 if (reload_flag)
3085 amvenc_start();
3086 enc_pr(LOG_ALL, "amvenc_avc_start cmd out, request:%p.\n", (void*)request);
3087}
3088
3089static void dma_flush(u32 buf_start, u32 buf_size)
3090{
3091 if ((buf_start == 0) || (buf_size == 0))
3092 return;
3093 dma_sync_single_for_device(
3094 &encode_manager.this_pdev->dev, buf_start,
3095 buf_size, DMA_TO_DEVICE);
3096}
3097
3098static void cache_flush(u32 buf_start, u32 buf_size)
3099{
3100 if ((buf_start == 0) || (buf_size == 0))
3101 return;
3102 dma_sync_single_for_cpu(
3103 &encode_manager.this_pdev->dev, buf_start,
3104 buf_size, DMA_FROM_DEVICE);
3105}
3106
3107static u32 getbuffer(struct encode_wq_s *wq, u32 type)
3108{
3109 u32 ret = 0;
3110
3111 switch (type) {
3112 case ENCODER_BUFFER_INPUT:
3113 ret = wq->mem.dct_buff_start_addr;
3114 break;
3115 case ENCODER_BUFFER_REF0:
3116 ret = wq->mem.dct_buff_start_addr +
3117 wq->mem.bufspec.dec0_y.buf_start;
3118 break;
3119 case ENCODER_BUFFER_REF1:
3120 ret = wq->mem.dct_buff_start_addr +
3121 wq->mem.bufspec.dec1_y.buf_start;
3122 break;
3123 case ENCODER_BUFFER_OUTPUT:
3124 ret = wq->mem.BitstreamStart;
3125 break;
3126 case ENCODER_BUFFER_DUMP:
3127 ret = wq->mem.dump_info_ddr_start_addr;
3128 break;
3129 case ENCODER_BUFFER_CBR:
3130 ret = wq->mem.cbr_info_ddr_start_addr;
3131 break;
3132 default:
3133 break;
3134 }
3135 return ret;
3136}
3137
3138s32 amvenc_avc_start(struct encode_wq_s *wq, u32 clock)
3139{
3140 const char *p = select_ucode(encode_manager.ucode_index);
3141
3142 avc_poweron(clock);
3143 avc_canvas_init(wq);
3144
3145 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x32);
3146
3147 if (amvenc_loadmc(p, wq) < 0)
3148 return -EBUSY;
3149
3150 encode_manager.need_reset = true;
3151 encode_manager.process_irq = false;
3152 encode_manager.encode_hw_status = ENCODER_IDLE;
3153 amvenc_reset();
3154 avc_init_encoder(wq, true);
3155 avc_init_input_buffer(wq); /* dct buffer setting */
3156 avc_init_output_buffer(wq); /* output stream buffer */
3157
3158 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3159 avc_prot_init(wq, NULL, wq->pic.init_qppicture, true);
3160
3161 if (request_irq(encode_manager.irq_num, enc_isr, IRQF_SHARED,
3162 "enc-irq", (void *)&encode_manager) == 0)
3163 encode_manager.irq_requested = true;
3164 else
3165 encode_manager.irq_requested = false;
3166
3167 /* decoder buffer , need set before each frame start */
3168 avc_init_dblk_buffer(wq->mem.dblk_buf_canvas);
3169 /* reference buffer , need set before each frame start */
3170 avc_init_reference_buffer(wq->mem.ref_buf_canvas);
3171 avc_init_assit_buffer(wq); /* assitant buffer for microcode */
3172 ie_me_mb_type = 0;
3173 avc_init_ie_me_parameter(wq, wq->pic.init_qppicture);
3174 WRITE_HREG(ENCODER_STATUS, ENCODER_IDLE);
3175
3176#ifdef MULTI_SLICE_MC
3177 if (fixed_slice_cfg)
3178 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3179 else if (wq->pic.rows_per_slice !=
3180 (wq->pic.encoder_height + 15) >> 4) {
3181 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
3182
3183 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
3184 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
3185 } else
3186 WRITE_HREG(FIXED_SLICE_CFG, 0);
3187#else
3188 WRITE_HREG(FIXED_SLICE_CFG, 0);
3189#endif
3190 amvenc_start();
3191 return 0;
3192}
3193
3194void amvenc_avc_stop(void)
3195{
3196 if ((encode_manager.irq_num >= 0) &&
3197 (encode_manager.irq_requested == true)) {
3198 free_irq(encode_manager.irq_num, &encode_manager);
3199 encode_manager.irq_requested = false;
3200 }
3201 amvenc_stop();
3202 avc_poweroff();
3203}
3204
3205static s32 avc_init(struct encode_wq_s *wq)
3206{
3207 s32 r = 0;
3208
3209 encode_manager.ucode_index = wq->ucode_index;
3210 r = amvenc_avc_start(wq, clock_level);
3211
3212 enc_pr(LOG_DEBUG,
3213 "init avc encode. microcode %d, ret=%d, wq:%px\n",
3214 encode_manager.ucode_index, r, (void *)wq);
3215 return 0;
3216}
3217
3218static s32 amvenc_avc_light_reset(struct encode_wq_s *wq, u32 value)
3219{
3220 s32 r = 0;
3221
3222 amvenc_avc_stop();
3223
3224 mdelay(value);
3225
3226 encode_manager.ucode_index = UCODE_MODE_FULL;
3227 r = amvenc_avc_start(wq, clock_level);
3228
3229 enc_pr(LOG_DEBUG,
3230 "amvenc_avc_light_reset finish, wq:%px, ret=%d\n",
3231 (void *)wq, r);
3232 return r;
3233}
3234
3235#ifdef CONFIG_CMA
3236static u32 checkCMA(void)
3237{
3238 u32 ret;
3239
3240 if (encode_manager.cma_pool_size > 0) {
3241 ret = encode_manager.cma_pool_size;
3242 ret = ret / MIN_SIZE;
3243 } else
3244 ret = 0;
3245 return ret;
3246}
3247#endif
3248
3249/* file operation */
3250static s32 amvenc_avc_open(struct inode *inode, struct file *file)
3251{
3252 s32 r = 0;
3253 struct encode_wq_s *wq = NULL;
3254
3255 file->private_data = NULL;
3256 enc_pr(LOG_DEBUG, "avc open\n");
3257#ifdef CONFIG_AM_JPEG_ENCODER
3258 if (jpegenc_on() == true) {
3259 enc_pr(LOG_ERROR,
3260 "hcodec in use for JPEG Encode now.\n");
3261 return -EBUSY;
3262 }
3263#endif
3264
3265#ifdef CONFIG_CMA
3266 if ((encode_manager.use_reserve == false) &&
3267 (encode_manager.check_cma == false)) {
3268 encode_manager.max_instance = checkCMA();
3269 if (encode_manager.max_instance > 0) {
3270 enc_pr(LOG_DEBUG,
3271 "amvenc_avc check CMA pool success, max instance: %d.\n",
3272 encode_manager.max_instance);
3273 } else {
3274 enc_pr(LOG_ERROR,
3275 "amvenc_avc CMA pool too small.\n");
3276 }
3277 encode_manager.check_cma = true;
3278 }
3279#endif
3280
3281 wq = create_encode_work_queue();
3282 if (wq == NULL) {
3283 enc_pr(LOG_ERROR, "amvenc_avc create instance fail.\n");
3284 return -EBUSY;
3285 }
3286
3287#ifdef CONFIG_CMA
3288 if (encode_manager.use_reserve == false) {
3289 wq->mem.buf_start = codec_mm_alloc_for_dma(ENCODE_NAME,
3290 MIN_SIZE >> PAGE_SHIFT, 0,
3291 CODEC_MM_FLAGS_CPU);
3292 if (wq->mem.buf_start) {
3293 wq->mem.buf_size = MIN_SIZE;
3294 enc_pr(LOG_DEBUG,
3295 "allocating phys 0x%x, size %dk, wq:%p.\n",
3296 wq->mem.buf_start,
3297 wq->mem.buf_size >> 10, (void *)wq);
3298 } else {
3299 enc_pr(LOG_ERROR,
3300 "CMA failed to allocate dma buffer for %s, wq:%p.\n",
3301 encode_manager.this_pdev->name,
3302 (void *)wq);
3303 destroy_encode_work_queue(wq);
3304 return -ENOMEM;
3305 }
3306 }
3307#endif
3308
3309 if (wq->mem.buf_start == 0 ||
3310 wq->mem.buf_size < MIN_SIZE) {
3311 enc_pr(LOG_ERROR,
3312 "alloc mem failed, start: 0x%x, size:0x%x, wq:%p.\n",
3313 wq->mem.buf_start,
3314 wq->mem.buf_size, (void *)wq);
3315 destroy_encode_work_queue(wq);
3316 return -ENOMEM;
3317 }
3318
3319 memcpy(&wq->mem.bufspec, &amvenc_buffspec[0],
3320 sizeof(struct BuffInfo_s));
3321
3322 enc_pr(LOG_DEBUG,
3323 "amvenc_avc memory config success, buff start:0x%x, size is 0x%x, wq:%p.\n",
3324 wq->mem.buf_start, wq->mem.buf_size, (void *)wq);
3325
3326 file->private_data = (void *) wq;
3327 return r;
3328}
3329
3330static s32 amvenc_avc_release(struct inode *inode, struct file *file)
3331{
3332 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3333
3334 if (wq) {
3335 enc_pr(LOG_DEBUG, "avc release, wq:%p\n", (void *)wq);
3336 destroy_encode_work_queue(wq);
3337 }
3338 return 0;
3339}
3340
3341static long amvenc_avc_ioctl(struct file *file, u32 cmd, ulong arg)
3342{
3343 long r = 0;
3344 u32 amrisc_cmd = 0;
3345 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3346#define MAX_ADDR_INFO_SIZE 52
3347 u32 addr_info[MAX_ADDR_INFO_SIZE + 4];
3348 ulong argV;
3349 u32 buf_start;
3350 s32 canvas = -1;
3351 struct canvas_s dst;
3352
3353 switch (cmd) {
3354 case AMVENC_AVC_IOC_GET_ADDR:
3355 if ((wq->mem.ref_buf_canvas & 0xff) == (ENC_CANVAS_OFFSET))
3356 put_user(1, (u32 *)arg);
3357 else
3358 put_user(2, (u32 *)arg);
3359 break;
3360 case AMVENC_AVC_IOC_INPUT_UPDATE:
3361 break;
3362 case AMVENC_AVC_IOC_NEW_CMD:
3363 if (copy_from_user(addr_info, (void *)arg,
3364 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3365 enc_pr(LOG_ERROR,
3366 "avc get new cmd error, wq:%p.\n", (void *)wq);
3367 return -1;
3368 }
3369 r = convert_request(wq, addr_info);
3370 if (r == 0)
3371 r = encode_wq_add_request(wq);
3372 if (r) {
3373 enc_pr(LOG_ERROR,
3374 "avc add new request error, wq:%p.\n",
3375 (void *)wq);
3376 }
3377 break;
3378 case AMVENC_AVC_IOC_GET_STAGE:
3379 put_user(wq->hw_status, (u32 *)arg);
3380 break;
3381 case AMVENC_AVC_IOC_GET_OUTPUT_SIZE:
3382 addr_info[0] = wq->output_size;
3383 addr_info[1] = wq->me_weight;
3384 addr_info[2] = wq->i4_weight;
3385 addr_info[3] = wq->i16_weight;
3386 r = copy_to_user((u32 *)arg,
3387 addr_info, 4 * sizeof(u32));
3388 break;
3389 case AMVENC_AVC_IOC_CONFIG_INIT:
3390 if (copy_from_user(addr_info, (void *)arg,
3391 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3392 enc_pr(LOG_ERROR,
3393 "avc config init error, wq:%p.\n", (void *)wq);
3394 return -1;
3395 }
3396 wq->ucode_index = UCODE_MODE_FULL;
3397#ifdef MULTI_SLICE_MC
3398 wq->pic.rows_per_slice = addr_info[1];
3399 enc_pr(LOG_DEBUG,
3400 "avc init -- rows_per_slice: %d, wq: %p.\n",
3401 wq->pic.rows_per_slice, (void *)wq);
3402#endif
3403 enc_pr(LOG_DEBUG,
3404 "avc init as mode %d, wq: %px.\n",
3405 wq->ucode_index, (void *)wq);
3406
3407 if (addr_info[2] > wq->mem.bufspec.max_width ||
3408 addr_info[3] > wq->mem.bufspec.max_height) {
3409 enc_pr(LOG_ERROR,
3410 "avc config init- encode size %dx%d is larger than supported (%dx%d). wq:%p.\n",
3411 addr_info[2], addr_info[3],
3412 wq->mem.bufspec.max_width,
3413 wq->mem.bufspec.max_height, (void *)wq);
3414 return -1;
3415 }
3416 pr_err("hwenc: AMVENC_AVC_IOC_CONFIG_INIT: w:%d, h:%d\n", wq->pic.encoder_width, wq->pic.encoder_height);
3417 wq->pic.encoder_width = addr_info[2];
3418 wq->pic.encoder_height = addr_info[3];
3419
3420 wq->pic.color_space = addr_info[4];
3421 pr_err("hwenc: AMVENC_AVC_IOC_CONFIG_INIT, wq->pic.color_space=%#x\n", wq->pic.color_space);
3422 if (wq->pic.encoder_width *
3423 wq->pic.encoder_height >= 1280 * 720)
3424 clock_level = 6;
3425 else
3426 clock_level = 5;
3427 avc_buffspec_init(wq);
3428 complete(&encode_manager.event.request_in_com);
3429 addr_info[1] = wq->mem.bufspec.dct.buf_start;
3430 addr_info[2] = wq->mem.bufspec.dct.buf_size;
3431 addr_info[3] = wq->mem.bufspec.bitstream.buf_start;
3432 addr_info[4] = wq->mem.bufspec.bitstream.buf_size;
3433 addr_info[5] = wq->mem.bufspec.scale_buff.buf_start;
3434 addr_info[6] = wq->mem.bufspec.scale_buff.buf_size;
3435 addr_info[7] = wq->mem.bufspec.dump_info.buf_start;
3436 addr_info[8] = wq->mem.bufspec.dump_info.buf_size;
3437 addr_info[9] = wq->mem.bufspec.cbr_info.buf_start;
3438 addr_info[10] = wq->mem.bufspec.cbr_info.buf_size;
3439 r = copy_to_user((u32 *)arg, addr_info, 11*sizeof(u32));
3440 break;
3441 case AMVENC_AVC_IOC_FLUSH_CACHE:
3442 if (copy_from_user(addr_info, (void *)arg,
3443 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3444 enc_pr(LOG_ERROR,
3445 "avc flush cache error, wq: %p.\n", (void *)wq);
3446 return -1;
3447 }
3448 buf_start = getbuffer(wq, addr_info[0]);
3449 dma_flush(buf_start + addr_info[1],
3450 addr_info[2] - addr_info[1]);
3451 break;
3452 case AMVENC_AVC_IOC_FLUSH_DMA:
3453 if (copy_from_user(addr_info, (void *)arg,
3454 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3455 enc_pr(LOG_ERROR,
3456 "avc flush dma error, wq:%p.\n", (void *)wq);
3457 return -1;
3458 }
3459 buf_start = getbuffer(wq, addr_info[0]);
3460 cache_flush(buf_start + addr_info[1],
3461 addr_info[2] - addr_info[1]);
3462 break;
3463 case AMVENC_AVC_IOC_GET_BUFFINFO:
3464 put_user(wq->mem.buf_size, (u32 *)arg);
3465 break;
3466 case AMVENC_AVC_IOC_GET_DEVINFO:
3467 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) {
3468 /* send the same id as GXTVBB to upper*/
3469 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXTVBB,
3470 strlen(AMVENC_DEVINFO_GXTVBB));
3471 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_GXTVBB) {
3472 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXTVBB,
3473 strlen(AMVENC_DEVINFO_GXTVBB));
3474 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_GXBB) {
3475 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXBB,
3476 strlen(AMVENC_DEVINFO_GXBB));
3477 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_MG9TV) {
3478 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_G9,
3479 strlen(AMVENC_DEVINFO_G9));
3480 } else {
3481 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_M8,
3482 strlen(AMVENC_DEVINFO_M8));
3483 }
3484 break;
3485 case AMVENC_AVC_IOC_SUBMIT:
3486 get_user(amrisc_cmd, ((u32 *)arg));
3487 if (amrisc_cmd == ENCODER_IDR) {
3488 wq->pic.idr_pic_id++;
3489 if (wq->pic.idr_pic_id > 65535)
3490 wq->pic.idr_pic_id = 0;
3491 wq->pic.pic_order_cnt_lsb = 2;
3492 wq->pic.frame_number = 1;
3493 } else if (amrisc_cmd == ENCODER_NON_IDR) {
3494#ifdef H264_ENC_SVC
3495 /* only update when there is reference frame */
3496 if (wq->pic.enable_svc == 0 || wq->pic.non_ref_cnt == 0) {
3497 wq->pic.frame_number++;
3498 enc_pr(LOG_INFO, "Increase frame_num to %d\n",
3499 wq->pic.frame_number);
3500 }
3501#else
3502 wq->pic.frame_number++;
3503#endif
3504
3505 wq->pic.pic_order_cnt_lsb += 2;
3506 if (wq->pic.frame_number > 65535)
3507 wq->pic.frame_number = 0;
3508 }
3509#ifdef H264_ENC_SVC
3510 /* only update when there is reference frame */
3511 if (wq->pic.enable_svc == 0 || wq->pic.non_ref_cnt == 0) {
3512 amrisc_cmd = wq->mem.dblk_buf_canvas;
3513 wq->mem.dblk_buf_canvas = wq->mem.ref_buf_canvas;
3514 /* current dblk buffer as next reference buffer */
3515 wq->mem.ref_buf_canvas = amrisc_cmd;
3516 enc_pr(LOG_INFO,
3517 "switch buffer enable %d cnt %d\n",
3518 wq->pic.enable_svc, wq->pic.non_ref_cnt);
3519 }
3520 if (wq->pic.enable_svc) {
3521 wq->pic.non_ref_cnt ++;
3522 if (wq->pic.non_ref_cnt > wq->pic.non_ref_limit) {
3523 enc_pr(LOG_INFO, "Svc clear cnt %d conf %d\n",
3524 wq->pic.non_ref_cnt,
3525 wq->pic.non_ref_limit);
3526 wq->pic.non_ref_cnt = 0;
3527 } else
3528 enc_pr(LOG_INFO,"Svc increase non ref counter to %d\n",
3529 wq->pic.non_ref_cnt );
3530 }
3531#else
3532 amrisc_cmd = wq->mem.dblk_buf_canvas;
3533 wq->mem.dblk_buf_canvas = wq->mem.ref_buf_canvas;
3534 /* current dblk buffer as next reference buffer */
3535 wq->mem.ref_buf_canvas = amrisc_cmd;
3536#endif
3537 break;
3538 case AMVENC_AVC_IOC_READ_CANVAS:
3539 get_user(argV, ((u32 *)arg));
3540 canvas = argV;
3541 if (canvas & 0xff) {
3542 canvas_read(canvas & 0xff, &dst);
3543 addr_info[0] = dst.addr;
3544 if ((canvas & 0xff00) >> 8)
3545 canvas_read((canvas & 0xff00) >> 8, &dst);
3546 if ((canvas & 0xff0000) >> 16)
3547 canvas_read((canvas & 0xff0000) >> 16, &dst);
3548 addr_info[1] = dst.addr - addr_info[0] +
3549 dst.width * dst.height;
3550 } else {
3551 addr_info[0] = 0;
3552 addr_info[1] = 0;
3553 }
3554 dma_flush(dst.addr, dst.width * dst.height * 3 / 2);
3555 r = copy_to_user((u32 *)arg, addr_info, 2 * sizeof(u32));
3556 break;
3557 case AMVENC_AVC_IOC_MAX_INSTANCE:
3558 put_user(encode_manager.max_instance, (u32 *)arg);
3559 break;
3560 case AMVENC_AVC_IOC_QP_MODE:
3561 get_user(qp_mode, ((u32 *)arg));
3562 pr_info("qp_mode %d\n", qp_mode);
3563 break;
3564 default:
3565 r = -1;
3566 break;
3567 }
3568 return r;
3569}
3570
3571#ifdef CONFIG_COMPAT
3572static long amvenc_avc_compat_ioctl(struct file *filp,
3573 unsigned int cmd, unsigned long args)
3574{
3575 unsigned long ret;
3576
3577 args = (unsigned long)compat_ptr(args);
3578 ret = amvenc_avc_ioctl(filp, cmd, args);
3579 return ret;
3580}
3581#endif
3582
3583static s32 avc_mmap(struct file *filp, struct vm_area_struct *vma)
3584{
3585 struct encode_wq_s *wq = (struct encode_wq_s *)filp->private_data;
3586 ulong off = vma->vm_pgoff << PAGE_SHIFT;
3587 ulong vma_size = vma->vm_end - vma->vm_start;
3588
3589 if (vma_size == 0) {
3590 enc_pr(LOG_ERROR, "vma_size is 0, wq:%p.\n", (void *)wq);
3591 return -EAGAIN;
3592 }
3593 if (!off)
3594 off += wq->mem.buf_start;
3595 enc_pr(LOG_ALL,
3596 "vma_size is %ld , off is %ld, wq:%p.\n",
3597 vma_size, off, (void *)wq);
3598 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP | VM_IO;
3599 /* vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); */
3600 if (remap_pfn_range(vma, vma->vm_start, off >> PAGE_SHIFT,
3601 vma->vm_end - vma->vm_start, vma->vm_page_prot)) {
3602 enc_pr(LOG_ERROR,
3603 "set_cached: failed remap_pfn_range, wq:%p.\n",
3604 (void *)wq);
3605 return -EAGAIN;
3606 }
3607 return 0;
3608}
3609
3610static u32 amvenc_avc_poll(struct file *file, poll_table *wait_table)
3611{
3612 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3613
3614 poll_wait(file, &wq->request_complete, wait_table);
3615
3616 if (atomic_read(&wq->request_ready)) {
3617 atomic_dec(&wq->request_ready);
3618 return POLLIN | POLLRDNORM;
3619 }
3620
3621 return 0;
3622}
3623
3624static const struct file_operations amvenc_avc_fops = {
3625 .owner = THIS_MODULE,
3626 .open = amvenc_avc_open,
3627 .mmap = avc_mmap,
3628 .release = amvenc_avc_release,
3629 .unlocked_ioctl = amvenc_avc_ioctl,
3630#ifdef CONFIG_COMPAT
3631 .compat_ioctl = amvenc_avc_compat_ioctl,
3632#endif
3633 .poll = amvenc_avc_poll,
3634};
3635
3636/* work queue function */
3637static s32 encode_process_request(struct encode_manager_s *manager,
3638 struct encode_queue_item_s *pitem)
3639{
3640 s32 ret = 0;
3641 struct encode_wq_s *wq = pitem->request.parent;
3642 struct encode_request_s *request = &pitem->request;
3643
3644 u32 timeout = (request->timeout == 0) ?
3645 1 : msecs_to_jiffies(request->timeout);
3646
3647 u32 buf_start = 0;
3648 u32 size = 0;
3649 u32 flush_size = ((wq->pic.encoder_width + 31) >> 5 << 5) *
3650 ((wq->pic.encoder_height + 15) >> 4 << 4) * 3 / 2;
3651
3652 struct enc_dma_cfg *cfg = NULL;
3653 int i = 0;
3654
3655#ifdef H264_ENC_CBR
3656 if (request->cmd == ENCODER_IDR || request->cmd == ENCODER_NON_IDR) {
3657 if (request->flush_flag & AMVENC_FLUSH_FLAG_CBR
3658 && get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
3659 void *vaddr = wq->mem.cbr_info_ddr_virt_addr;
3660 ConvertTable2Risc(vaddr, 0xa00);
3661 buf_start = getbuffer(wq, ENCODER_BUFFER_CBR);
3662 codec_mm_dma_flush(vaddr, wq->mem.cbr_info_ddr_size, DMA_TO_DEVICE);
3663 }
3664 }
3665#endif
3666
3667Again:
3668 amvenc_avc_start_cmd(wq, request);
3669
3670 if (no_timeout) {
3671 wait_event_interruptible(manager->event.hw_complete,
3672 (manager->encode_hw_status == ENCODER_IDR_DONE
3673 || manager->encode_hw_status == ENCODER_NON_IDR_DONE
3674 || manager->encode_hw_status == ENCODER_SEQUENCE_DONE
3675 || manager->encode_hw_status == ENCODER_PICTURE_DONE));
3676 } else {
3677 wait_event_interruptible_timeout(manager->event.hw_complete,
3678 ((manager->encode_hw_status == ENCODER_IDR_DONE)
3679 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
3680 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
3681 || (manager->encode_hw_status == ENCODER_PICTURE_DONE)),
3682 timeout);
3683 }
3684
3685 if ((request->cmd == ENCODER_SEQUENCE) &&
3686 (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)) {
3687 wq->sps_size = READ_HREG(HCODEC_VLC_TOTAL_BYTES);
3688 wq->hw_status = manager->encode_hw_status;
3689 request->cmd = ENCODER_PICTURE;
3690 goto Again;
3691 } else if ((request->cmd == ENCODER_PICTURE) &&
3692 (manager->encode_hw_status == ENCODER_PICTURE_DONE)) {
3693 wq->pps_size =
3694 READ_HREG(HCODEC_VLC_TOTAL_BYTES) - wq->sps_size;
3695 wq->hw_status = manager->encode_hw_status;
3696 if (request->flush_flag & AMVENC_FLUSH_FLAG_OUTPUT) {
3697 buf_start = getbuffer(wq, ENCODER_BUFFER_OUTPUT);
3698 cache_flush(buf_start,
3699 wq->sps_size + wq->pps_size);
3700 }
3701 wq->output_size = (wq->sps_size << 16) | wq->pps_size;
3702 } else {
3703 wq->hw_status = manager->encode_hw_status;
3704
3705 if ((manager->encode_hw_status == ENCODER_IDR_DONE) ||
3706 (manager->encode_hw_status == ENCODER_NON_IDR_DONE)) {
3707 wq->output_size = READ_HREG(HCODEC_VLC_TOTAL_BYTES);
3708
3709 if (request->flush_flag & AMVENC_FLUSH_FLAG_OUTPUT) {
3710 buf_start = getbuffer(wq, ENCODER_BUFFER_OUTPUT);
3711 cache_flush(buf_start, wq->output_size);
3712 }
3713
3714 if (request->flush_flag & AMVENC_FLUSH_FLAG_DUMP) {
3715 buf_start = getbuffer(wq, ENCODER_BUFFER_DUMP);
3716 size = wq->mem.dump_info_ddr_size;
3717 cache_flush(buf_start, size);
3718 //enc_pr(LOG_DEBUG, "CBR flush dump_info done");
3719 }
3720
3721 if (request->flush_flag & AMVENC_FLUSH_FLAG_REFERENCE) {
3722 u32 ref_id = ENCODER_BUFFER_REF0;
3723
3724 if ((wq->mem.ref_buf_canvas & 0xff) == (ENC_CANVAS_OFFSET))
3725 ref_id = ENCODER_BUFFER_REF0;
3726 else
3727 ref_id = ENCODER_BUFFER_REF1;
3728
3729 buf_start = getbuffer(wq, ref_id);
3730 cache_flush(buf_start, flush_size);
3731 }
3732 } else {
3733 manager->encode_hw_status = ENCODER_ERROR;
3734 enc_pr(LOG_DEBUG, "avc encode light reset --- ");
3735 enc_pr(LOG_DEBUG,
3736 "frame type: %s, size: %dx%d, wq: %px\n",
3737 (request->cmd == ENCODER_IDR) ? "IDR" : "P",
3738 wq->pic.encoder_width,
3739 wq->pic.encoder_height, (void *)wq);
3740 enc_pr(LOG_DEBUG,
3741 "mb info: 0x%x, encode status: 0x%x, dct status: 0x%x ",
3742 READ_HREG(HCODEC_VLC_MB_INFO),
3743 READ_HREG(ENCODER_STATUS),
3744 READ_HREG(HCODEC_QDCT_STATUS_CTRL));
3745 enc_pr(LOG_DEBUG,
3746 "vlc status: 0x%x, me status: 0x%x, risc pc:0x%x, debug:0x%x\n",
3747 READ_HREG(HCODEC_VLC_STATUS_CTRL),
3748 READ_HREG(HCODEC_ME_STATUS),
3749 READ_HREG(HCODEC_MPC_E),
3750 READ_HREG(DEBUG_REG));
3751 amvenc_avc_light_reset(wq, 30);
3752 }
3753
3754 for (i = 0; i < request->plane_num; i++) {
3755 cfg = &request->dma_cfg[i];
3756 enc_pr(LOG_INFO, "request vaddr %p, paddr %p\n",
3757 cfg->vaddr, cfg->paddr);
3758 if (cfg->fd >= 0 && cfg->vaddr != NULL)
3759 enc_dma_buf_unmap(cfg);
3760 }
3761 }
3762 atomic_inc(&wq->request_ready);
3763 wake_up_interruptible(&wq->request_complete);
3764 return ret;
3765}
3766
3767s32 encode_wq_add_request(struct encode_wq_s *wq)
3768{
3769 struct encode_queue_item_s *pitem = NULL;
3770 struct list_head *head = NULL;
3771 struct encode_wq_s *tmp = NULL;
3772 bool find = false;
3773
3774 spin_lock(&encode_manager.event.sem_lock);
3775
3776 head = &encode_manager.wq;
3777 list_for_each_entry(tmp, head, list) {
3778 if ((wq == tmp) && (wq != NULL)) {
3779 find = true;
3780 break;
3781 }
3782 }
3783
3784 if (find == false) {
3785 enc_pr(LOG_ERROR, "current wq (%p) doesn't register.\n",
3786 (void *)wq);
3787 goto error;
3788 }
3789
3790 if (list_empty(&encode_manager.free_queue)) {
3791 enc_pr(LOG_ERROR, "work queue no space, wq:%p.\n",
3792 (void *)wq);
3793 goto error;
3794 }
3795
3796 pitem = list_entry(encode_manager.free_queue.next,
3797 struct encode_queue_item_s, list);
3798
3799 if (IS_ERR(pitem))
3800 goto error;
3801
3802 memcpy(&pitem->request, &wq->request, sizeof(struct encode_request_s));
3803
3804 enc_pr(LOG_INFO, "new work request %p, vaddr %p, paddr %p\n", &pitem->request,
3805 pitem->request.dma_cfg[0].vaddr,pitem->request.dma_cfg[0].paddr);
3806
3807 memset(&wq->request, 0, sizeof(struct encode_request_s));
3808 wq->request.dma_cfg[0].fd = -1;
3809 wq->request.dma_cfg[1].fd = -1;
3810 wq->request.dma_cfg[2].fd = -1;
3811 wq->hw_status = 0;
3812 wq->output_size = 0;
3813 pitem->request.parent = wq;
3814 list_move_tail(&pitem->list, &encode_manager.process_queue);
3815 spin_unlock(&encode_manager.event.sem_lock);
3816
3817 enc_pr(LOG_INFO,
3818 "add new work ok, cmd:%d, ucode mode: %d, wq:%p.\n",
3819 pitem->request.cmd, pitem->request.ucode_mode,
3820 (void *)wq);
3821 complete(&encode_manager.event.request_in_com);/* new cmd come in */
3822 return 0;
3823error:
3824 spin_unlock(&encode_manager.event.sem_lock);
3825 return -1;
3826}
3827
3828struct encode_wq_s *create_encode_work_queue(void)
3829{
3830 struct encode_wq_s *encode_work_queue = NULL;
3831 bool done = false;
3832 u32 i, max_instance;
3833 struct Buff_s *reserve_buff;
3834
3835 encode_work_queue = kzalloc(sizeof(struct encode_wq_s), GFP_KERNEL);
3836 if (IS_ERR(encode_work_queue)) {
3837 enc_pr(LOG_ERROR, "can't create work queue\n");
3838 return NULL;
3839 }
3840 max_instance = encode_manager.max_instance;
3841 encode_work_queue->pic.init_qppicture = 26;
3842 encode_work_queue->pic.log2_max_frame_num = 4;
3843 encode_work_queue->pic.log2_max_pic_order_cnt_lsb = 4;
3844 encode_work_queue->pic.idr_pic_id = 0;
3845 encode_work_queue->pic.frame_number = 0;
3846 encode_work_queue->pic.pic_order_cnt_lsb = 0;
3847#ifdef H264_ENC_SVC
3848 /* Get settings from the global*/
3849 encode_work_queue->pic.enable_svc = svc_enable;
3850 encode_work_queue->pic.non_ref_limit = svc_ref_conf;
3851 encode_work_queue->pic.non_ref_cnt = 0;
3852 enc_pr(LOG_INFO, "svc conf enable %d, duration %d\n",
3853 encode_work_queue->pic.enable_svc,
3854 encode_work_queue->pic.non_ref_limit);
3855#endif
3856 encode_work_queue->ucode_index = UCODE_MODE_FULL;
3857
3858#ifdef H264_ENC_CBR
3859 encode_work_queue->cbr_info.block_w = 16;
3860 encode_work_queue->cbr_info.block_h = 9;
3861 encode_work_queue->cbr_info.long_th = CBR_LONG_THRESH;
3862 encode_work_queue->cbr_info.start_tbl_id = START_TABLE_ID;
3863 encode_work_queue->cbr_info.short_shift = CBR_SHORT_SHIFT;
3864 encode_work_queue->cbr_info.long_mb_num = CBR_LONG_MB_NUM;
3865#endif
3866 init_waitqueue_head(&encode_work_queue->request_complete);
3867 atomic_set(&encode_work_queue->request_ready, 0);
3868 spin_lock(&encode_manager.event.sem_lock);
3869 if (encode_manager.wq_count < encode_manager.max_instance) {
3870 list_add_tail(&encode_work_queue->list, &encode_manager.wq);
3871 encode_manager.wq_count++;
3872 if (encode_manager.use_reserve == true) {
3873 for (i = 0; i < max_instance; i++) {
3874 reserve_buff = &encode_manager.reserve_buff[i];
3875 if (reserve_buff->used == false) {
3876 encode_work_queue->mem.buf_start =
3877 reserve_buff->buf_start;
3878 encode_work_queue->mem.buf_size =
3879 reserve_buff->buf_size;
3880 reserve_buff->used = true;
3881 done = true;
3882 break;
3883 }
3884 }
3885 } else
3886 done = true;
3887 }
3888 spin_unlock(&encode_manager.event.sem_lock);
3889 if (done == false) {
3890 kfree(encode_work_queue);
3891 encode_work_queue = NULL;
3892 enc_pr(LOG_ERROR, "too many work queue!\n");
3893 }
3894 return encode_work_queue; /* find it */
3895}
3896
3897static void _destroy_encode_work_queue(struct encode_manager_s *manager,
3898 struct encode_wq_s **wq,
3899 struct encode_wq_s *encode_work_queue,
3900 bool *find)
3901{
3902 struct list_head *head;
3903 struct encode_wq_s *wp_tmp = NULL;
3904 u32 i, max_instance;
3905 struct Buff_s *reserve_buff;
3906 u32 buf_start = encode_work_queue->mem.buf_start;
3907
3908 max_instance = manager->max_instance;
3909 head = &manager->wq;
3910 list_for_each_entry_safe((*wq), wp_tmp, head, list) {
3911 if ((*wq) && (*wq == encode_work_queue)) {
3912 list_del(&(*wq)->list);
3913 if (manager->use_reserve == true) {
3914 for (i = 0; i < max_instance; i++) {
3915 reserve_buff =
3916 &manager->reserve_buff[i];
3917 if (reserve_buff->used == true &&
3918 buf_start ==
3919 reserve_buff->buf_start) {
3920 reserve_buff->used = false;
3921 break;
3922 }
3923 }
3924 }
3925 *find = true;
3926 manager->wq_count--;
3927 enc_pr(LOG_DEBUG,
3928 "remove encode_work_queue %p success, %s line %d.\n",
3929 (void *)encode_work_queue,
3930 __func__, __LINE__);
3931 break;
3932 }
3933 }
3934}
3935
3936s32 destroy_encode_work_queue(struct encode_wq_s *encode_work_queue)
3937{
3938 struct encode_queue_item_s *pitem, *tmp;
3939 struct encode_wq_s *wq = NULL;
3940 bool find = false;
3941
3942 struct list_head *head;
3943
3944 if (encode_work_queue) {
3945 spin_lock(&encode_manager.event.sem_lock);
3946 if (encode_manager.current_wq == encode_work_queue) {
3947 encode_manager.remove_flag = true;
3948 spin_unlock(&encode_manager.event.sem_lock);
3949 enc_pr(LOG_DEBUG,
3950 "warning--Destroy the running queue, should not be here.\n");
3951 wait_for_completion(
3952 &encode_manager.event.process_complete);
3953 spin_lock(&encode_manager.event.sem_lock);
3954 } /* else we can delete it safely. */
3955
3956 head = &encode_manager.process_queue;
3957 list_for_each_entry_safe(pitem, tmp, head, list) {
3958 if (pitem && pitem->request.parent ==
3959 encode_work_queue) {
3960 pitem->request.parent = NULL;
3961 enc_pr(LOG_DEBUG,
3962 "warning--remove not process request, should not be here.\n");
3963 list_move_tail(&pitem->list,
3964 &encode_manager.free_queue);
3965 }
3966 }
3967
3968 _destroy_encode_work_queue(&encode_manager, &wq,
3969 encode_work_queue, &find);
3970 spin_unlock(&encode_manager.event.sem_lock);
3971#ifdef CONFIG_CMA
3972 if (encode_work_queue->mem.buf_start) {
3973 if (wq->mem.cbr_info_ddr_virt_addr != NULL) {
3974 codec_mm_unmap_phyaddr(wq->mem.cbr_info_ddr_virt_addr);
3975 wq->mem.cbr_info_ddr_virt_addr = NULL;
3976 }
3977 codec_mm_free_for_dma(
3978 ENCODE_NAME,
3979 encode_work_queue->mem.buf_start);
3980 encode_work_queue->mem.buf_start = 0;
3981
3982 }
3983#endif
3984 kfree(encode_work_queue);
3985 complete(&encode_manager.event.request_in_com);
3986 }
3987 return 0;
3988}
3989
3990static s32 encode_monitor_thread(void *data)
3991{
3992 struct encode_manager_s *manager = (struct encode_manager_s *)data;
3993 struct encode_queue_item_s *pitem = NULL;
3994 struct sched_param param = {.sched_priority = MAX_RT_PRIO - 1 };
3995 s32 ret = 0;
3996
3997 enc_pr(LOG_DEBUG, "encode workqueue monitor start.\n");
3998 sched_setscheduler(current, SCHED_FIFO, &param);
3999 allow_signal(SIGTERM);
4000
4001 /* setup current_wq here. */
4002 while (manager->process_queue_state != ENCODE_PROCESS_QUEUE_STOP) {
4003 if (kthread_should_stop())
4004 break;
4005
4006 ret = wait_for_completion_interruptible(
4007 &manager->event.request_in_com);
4008
4009 if (ret == -ERESTARTSYS)
4010 break;
4011
4012 if (kthread_should_stop())
4013 break;
4014
4015 if (manager->inited == false) {
4016 spin_lock(&manager->event.sem_lock);
4017
4018 if (!list_empty(&manager->wq)) {
4019 struct encode_wq_s *first_wq =
4020 list_entry(manager->wq.next,
4021 struct encode_wq_s, list);
4022 manager->current_wq = first_wq;
4023 spin_unlock(&manager->event.sem_lock);
4024
4025 if (first_wq) {
4026#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
4027 if (!manager->context)
4028 manager->context =
4029 create_ge2d_work_queue();
4030#endif
4031 avc_init(first_wq);
4032 manager->inited = true;
4033 }
4034 spin_lock(&manager->event.sem_lock);
4035 manager->current_wq = NULL;
4036 spin_unlock(&manager->event.sem_lock);
4037 if (manager->remove_flag) {
4038 complete(
4039 &manager
4040 ->event.process_complete);
4041 manager->remove_flag = false;
4042 }
4043 } else
4044 spin_unlock(&manager->event.sem_lock);
4045 continue;
4046 }
4047
4048 spin_lock(&manager->event.sem_lock);
4049 pitem = NULL;
4050
4051 if (list_empty(&manager->wq)) {
4052 spin_unlock(&manager->event.sem_lock);
4053 manager->inited = false;
4054 amvenc_avc_stop();
4055
4056#ifdef CONFIG_AMLOGIC_MEDIA_GE2D
4057 if (manager->context) {
4058 destroy_ge2d_work_queue(manager->context);
4059 manager->context = NULL;
4060 }
4061#endif
4062
4063 enc_pr(LOG_DEBUG, "power off encode.\n");
4064 continue;
4065 } else if (!list_empty(&manager->process_queue)) {
4066 pitem = list_entry(manager->process_queue.next,
4067 struct encode_queue_item_s, list);
4068 list_del(&pitem->list);
4069 manager->current_item = pitem;
4070 manager->current_wq = pitem->request.parent;
4071 }
4072
4073 spin_unlock(&manager->event.sem_lock);
4074
4075 if (pitem) {
4076 encode_process_request(manager, pitem);
4077 spin_lock(&manager->event.sem_lock);
4078 list_add_tail(&pitem->list, &manager->free_queue);
4079 manager->current_item = NULL;
4080 manager->last_wq = manager->current_wq;
4081 manager->current_wq = NULL;
4082 spin_unlock(&manager->event.sem_lock);
4083 }
4084
4085 if (manager->remove_flag) {
4086 complete(&manager->event.process_complete);
4087 manager->remove_flag = false;
4088 }
4089 }
4090 while (!kthread_should_stop())
4091 msleep(20);
4092
4093 enc_pr(LOG_DEBUG, "exit encode_monitor_thread.\n");
4094 return 0;
4095}
4096
4097static s32 encode_start_monitor(void)
4098{
4099 s32 ret = 0;
4100
4101 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
4102 y_tnr_mot2alp_nrm_gain = 216;
4103 y_tnr_mot2alp_dis_gain = 144;
4104 c_tnr_mot2alp_nrm_gain = 216;
4105 c_tnr_mot2alp_dis_gain = 144;
4106 } else {
4107 /* more tnr */
4108 y_tnr_mot2alp_nrm_gain = 144;
4109 y_tnr_mot2alp_dis_gain = 96;
4110 c_tnr_mot2alp_nrm_gain = 144;
4111 c_tnr_mot2alp_dis_gain = 96;
4112 }
4113
4114 enc_pr(LOG_DEBUG, "encode start monitor.\n");
4115 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_START;
4116 encode_manager.encode_thread = kthread_run(encode_monitor_thread,
4117 &encode_manager, "encode_monitor");
4118 if (IS_ERR(encode_manager.encode_thread)) {
4119 ret = PTR_ERR(encode_manager.encode_thread);
4120 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_STOP;
4121 enc_pr(LOG_ERROR,
4122 "encode monitor : failed to start kthread (%d)\n", ret);
4123 }
4124 return ret;
4125}
4126
4127static s32 encode_stop_monitor(void)
4128{
4129 enc_pr(LOG_DEBUG, "stop encode monitor thread\n");
4130 if (encode_manager.encode_thread) {
4131 spin_lock(&encode_manager.event.sem_lock);
4132 if (!list_empty(&encode_manager.wq)) {
4133 u32 count = encode_manager.wq_count;
4134
4135 spin_unlock(&encode_manager.event.sem_lock);
4136 enc_pr(LOG_ERROR,
4137 "stop encode monitor thread error, active wq (%d) is not 0.\n",
4138 count);
4139 return -1;
4140 }
4141 spin_unlock(&encode_manager.event.sem_lock);
4142 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_STOP;
4143 send_sig(SIGTERM, encode_manager.encode_thread, 1);
4144 complete(&encode_manager.event.request_in_com);
4145 kthread_stop(encode_manager.encode_thread);
4146 encode_manager.encode_thread = NULL;
4147 kfree(mc_addr);
4148 mc_addr = NULL;
4149 }
4150 return 0;
4151}
4152
4153static s32 encode_wq_init(void)
4154{
4155 u32 i = 0;
4156 struct encode_queue_item_s *pitem = NULL;
4157
4158 enc_pr(LOG_DEBUG, "encode_wq_init.\n");
4159 encode_manager.irq_requested = false;
4160
4161 spin_lock_init(&encode_manager.event.sem_lock);
4162 init_completion(&encode_manager.event.request_in_com);
4163 init_waitqueue_head(&encode_manager.event.hw_complete);
4164 init_completion(&encode_manager.event.process_complete);
4165 INIT_LIST_HEAD(&encode_manager.process_queue);
4166 INIT_LIST_HEAD(&encode_manager.free_queue);
4167 INIT_LIST_HEAD(&encode_manager.wq);
4168
4169 tasklet_init(&encode_manager.encode_tasklet,
4170 encode_isr_tasklet,
4171 (ulong)&encode_manager);
4172
4173 for (i = 0; i < MAX_ENCODE_REQUEST; i++) {
4174 pitem = kcalloc(1,
4175 sizeof(struct encode_queue_item_s),
4176 GFP_KERNEL);
4177 if (IS_ERR(pitem)) {
4178 enc_pr(LOG_ERROR, "can't request queue item memory.\n");
4179 return -1;
4180 }
4181 pitem->request.parent = NULL;
4182 list_add_tail(&pitem->list, &encode_manager.free_queue);
4183 }
4184 encode_manager.current_wq = NULL;
4185 encode_manager.last_wq = NULL;
4186 encode_manager.encode_thread = NULL;
4187 encode_manager.current_item = NULL;
4188 encode_manager.wq_count = 0;
4189 encode_manager.remove_flag = false;
4190 InitEncodeWeight();
4191 if (encode_start_monitor()) {
4192 enc_pr(LOG_ERROR, "encode create thread error.\n");
4193 return -1;
4194 }
4195 return 0;
4196}
4197
4198static s32 encode_wq_uninit(void)
4199{
4200 struct encode_queue_item_s *pitem, *tmp;
4201 struct list_head *head;
4202 u32 count = 0;
4203 s32 r = -1;
4204
4205 enc_pr(LOG_DEBUG, "uninit encode wq.\n");
4206 if (encode_stop_monitor() == 0) {
4207 if ((encode_manager.irq_num >= 0) &&
4208 (encode_manager.irq_requested == true)) {
4209 free_irq(encode_manager.irq_num, &encode_manager);
4210 encode_manager.irq_requested = false;
4211 }
4212 spin_lock(&encode_manager.event.sem_lock);
4213 head = &encode_manager.process_queue;
4214 list_for_each_entry_safe(pitem, tmp, head, list) {
4215 if (pitem) {
4216 list_del(&pitem->list);
4217 kfree(pitem);
4218 count++;
4219 }
4220 }
4221 head = &encode_manager.free_queue;
4222 list_for_each_entry_safe(pitem, tmp, head, list) {
4223 if (pitem) {
4224 list_del(&pitem->list);
4225 kfree(pitem);
4226 count++;
4227 }
4228 }
4229 spin_unlock(&encode_manager.event.sem_lock);
4230 if (count == MAX_ENCODE_REQUEST)
4231 r = 0;
4232 else {
4233 enc_pr(LOG_ERROR, "lost some request item %d.\n",
4234 MAX_ENCODE_REQUEST - count);
4235 }
4236 }
4237 return r;
4238}
4239
4240static ssize_t encode_status_show(struct class *cla,
4241 struct class_attribute *attr, char *buf)
4242{
4243 u32 process_count = 0;
4244 u32 free_count = 0;
4245 struct encode_queue_item_s *pitem = NULL;
4246 struct encode_wq_s *current_wq = NULL;
4247 struct encode_wq_s *last_wq = NULL;
4248 struct list_head *head = NULL;
4249 s32 irq_num = 0;
4250 u32 hw_status = 0;
4251 u32 process_queue_state = 0;
4252 u32 wq_count = 0;
4253 u32 ucode_index;
4254 bool need_reset;
4255 bool process_irq;
4256 bool inited;
4257 bool use_reserve;
4258 struct Buff_s reserve_mem;
4259 u32 max_instance;
4260#ifdef CONFIG_CMA
4261 bool check_cma = false;
4262#endif
4263
4264 spin_lock(&encode_manager.event.sem_lock);
4265 head = &encode_manager.free_queue;
4266 list_for_each_entry(pitem, head, list) {
4267 free_count++;
4268 if (free_count > MAX_ENCODE_REQUEST)
4269 break;
4270 }
4271
4272 head = &encode_manager.process_queue;
4273 list_for_each_entry(pitem, head, list) {
4274 process_count++;
4275 if (free_count > MAX_ENCODE_REQUEST)
4276 break;
4277 }
4278
4279 current_wq = encode_manager.current_wq;
4280 last_wq = encode_manager.last_wq;
4281 pitem = encode_manager.current_item;
4282 irq_num = encode_manager.irq_num;
4283 hw_status = encode_manager.encode_hw_status;
4284 process_queue_state = encode_manager.process_queue_state;
4285 wq_count = encode_manager.wq_count;
4286 ucode_index = encode_manager.ucode_index;
4287 need_reset = encode_manager.need_reset;
4288 process_irq = encode_manager.process_irq;
4289 inited = encode_manager.inited;
4290 use_reserve = encode_manager.use_reserve;
4291 reserve_mem.buf_start = encode_manager.reserve_mem.buf_start;
4292 reserve_mem.buf_size = encode_manager.reserve_mem.buf_size;
4293
4294 max_instance = encode_manager.max_instance;
4295#ifdef CONFIG_CMA
4296 check_cma = encode_manager.check_cma;
4297#endif
4298
4299 spin_unlock(&encode_manager.event.sem_lock);
4300
4301 enc_pr(LOG_DEBUG,
4302 "encode process queue count: %d, free queue count: %d.\n",
4303 process_count, free_count);
4304 enc_pr(LOG_DEBUG,
4305 "encode curent wq: %p, last wq: %p, wq count: %d, max_instance: %d.\n",
4306 current_wq, last_wq, wq_count, max_instance);
4307 if (current_wq)
4308 enc_pr(LOG_DEBUG,
4309 "encode curent wq -- encode width: %d, encode height: %d.\n",
4310 current_wq->pic.encoder_width,
4311 current_wq->pic.encoder_height);
4312 enc_pr(LOG_DEBUG,
4313 "encode curent pitem: %p, ucode_index: %d, hw_status: %d, need_reset: %s, process_irq: %s.\n",
4314 pitem, ucode_index, hw_status, need_reset ? "true" : "false",
4315 process_irq ? "true" : "false");
4316 enc_pr(LOG_DEBUG,
4317 "encode irq num: %d, inited: %s, process_queue_state: %d.\n",
4318 irq_num, inited ? "true" : "false", process_queue_state);
4319 if (use_reserve) {
4320 enc_pr(LOG_DEBUG,
4321 "encode use reserve memory, buffer start: 0x%x, size: %d MB.\n",
4322 reserve_mem.buf_start,
4323 reserve_mem.buf_size / SZ_1M);
4324 } else {
4325#ifdef CONFIG_CMA
4326 enc_pr(LOG_DEBUG, "encode check cma: %s.\n",
4327 check_cma ? "true" : "false");
4328#endif
4329 }
4330 return snprintf(buf, 40, "encode max instance: %d\n", max_instance);
4331}
4332
4333static struct class_attribute amvenc_class_attrs[] = {
4334 __ATTR(encode_status,
4335 S_IRUGO | S_IWUSR,
4336 encode_status_show,
4337 NULL),
4338 __ATTR_NULL
4339};
4340
4341static struct class amvenc_avc_class = {
4342 .name = CLASS_NAME,
4343 .class_attrs = amvenc_class_attrs,
4344};
4345
4346s32 init_avc_device(void)
4347{
4348 s32 r = 0;
4349
4350 r = register_chrdev(0, DEVICE_NAME, &amvenc_avc_fops);
4351 if (r <= 0) {
4352 enc_pr(LOG_ERROR, "register amvenc_avc device error.\n");
4353 return r;
4354 }
4355 avc_device_major = r;
4356
4357 r = class_register(&amvenc_avc_class);
4358 if (r < 0) {
4359 enc_pr(LOG_ERROR, "error create amvenc_avc class.\n");
4360 return r;
4361 }
4362
4363 amvenc_avc_dev = device_create(&amvenc_avc_class, NULL,
4364 MKDEV(avc_device_major, 0), NULL,
4365 DEVICE_NAME);
4366
4367 if (IS_ERR(amvenc_avc_dev)) {
4368 enc_pr(LOG_ERROR, "create amvenc_avc device error.\n");
4369 class_unregister(&amvenc_avc_class);
4370 return -1;
4371 }
4372 return r;
4373}
4374
4375s32 uninit_avc_device(void)
4376{
4377 if (amvenc_avc_dev)
4378 device_destroy(&amvenc_avc_class, MKDEV(avc_device_major, 0));
4379
4380 class_destroy(&amvenc_avc_class);
4381
4382 unregister_chrdev(avc_device_major, DEVICE_NAME);
4383 return 0;
4384}
4385
4386static s32 avc_mem_device_init(struct reserved_mem *rmem, struct device *dev)
4387{
4388 s32 r;
4389 struct resource res;
4390
4391 if (!rmem) {
4392 enc_pr(LOG_ERROR,
4393 "Can not obtain I/O memory, and will allocate avc buffer!\n");
4394 r = -EFAULT;
4395 return r;
4396 }
4397 res.start = (phys_addr_t)rmem->base;
4398 res.end = res.start + (phys_addr_t)rmem->size - 1;
4399 encode_manager.reserve_mem.buf_start = res.start;
4400 encode_manager.reserve_mem.buf_size = res.end - res.start + 1;
4401
4402 if (encode_manager.reserve_mem.buf_size >=
4403 amvenc_buffspec[0].min_buffsize) {
4404 encode_manager.max_instance =
4405 encode_manager.reserve_mem.buf_size /
4406 amvenc_buffspec[0].min_buffsize;
4407 if (encode_manager.max_instance > MAX_ENCODE_INSTANCE)
4408 encode_manager.max_instance = MAX_ENCODE_INSTANCE;
4409 encode_manager.reserve_buff = kzalloc(
4410 encode_manager.max_instance *
4411 sizeof(struct Buff_s), GFP_KERNEL);
4412 if (encode_manager.reserve_buff) {
4413 u32 i;
4414 struct Buff_s *reserve_buff;
4415 u32 max_instance = encode_manager.max_instance;
4416
4417 for (i = 0; i < max_instance; i++) {
4418 reserve_buff = &encode_manager.reserve_buff[i];
4419 reserve_buff->buf_start =
4420 i *
4421 amvenc_buffspec[0]
4422 .min_buffsize +
4423 encode_manager.reserve_mem.buf_start;
4424 reserve_buff->buf_size =
4425 encode_manager.reserve_mem.buf_start;
4426 reserve_buff->used = false;
4427 }
4428 encode_manager.use_reserve = true;
4429 r = 0;
4430 enc_pr(LOG_DEBUG,
4431 "amvenc_avc use reserve memory, buff start: 0x%x, size: 0x%x, max instance is %d\n",
4432 encode_manager.reserve_mem.buf_start,
4433 encode_manager.reserve_mem.buf_size,
4434 encode_manager.max_instance);
4435 } else {
4436 enc_pr(LOG_ERROR,
4437 "amvenc_avc alloc reserve buffer pointer fail. max instance is %d.\n",
4438 encode_manager.max_instance);
4439 encode_manager.max_instance = 0;
4440 encode_manager.reserve_mem.buf_start = 0;
4441 encode_manager.reserve_mem.buf_size = 0;
4442 r = -ENOMEM;
4443 }
4444 } else {
4445 enc_pr(LOG_ERROR,
4446 "amvenc_avc memory resource too small, size is 0x%x. Need 0x%x bytes at least.\n",
4447 encode_manager.reserve_mem.buf_size,
4448 amvenc_buffspec[0]
4449 .min_buffsize);
4450 encode_manager.reserve_mem.buf_start = 0;
4451 encode_manager.reserve_mem.buf_size = 0;
4452 r = -ENOMEM;
4453 }
4454 return r;
4455}
4456
4457static s32 amvenc_avc_probe(struct platform_device *pdev)
4458{
4459 /* struct resource mem; */
4460 s32 res_irq;
4461 s32 idx;
4462 s32 r;
4463
4464 enc_pr(LOG_ERROR, "amvenc_avc probe start.\n");
4465
4466 encode_manager.this_pdev = pdev;
4467#ifdef CONFIG_CMA
4468 encode_manager.check_cma = false;
4469#endif
4470 encode_manager.reserve_mem.buf_start = 0;
4471 encode_manager.reserve_mem.buf_size = 0;
4472 encode_manager.use_reserve = false;
4473 encode_manager.max_instance = 0;
4474 encode_manager.reserve_buff = NULL;
4475
4476 idx = of_reserved_mem_device_init(&pdev->dev);
4477
4478 if (idx != 0) {
4479 enc_pr(LOG_DEBUG,
4480 "amvenc_avc_probe -- reserved memory config fail.\n");
4481 }
4482
4483
4484 if (encode_manager.use_reserve == false) {
4485#ifndef CONFIG_CMA
4486 enc_pr(LOG_ERROR,
4487 "amvenc_avc memory is invaild, probe fail!\n");
4488 return -EFAULT;
4489#else
4490 encode_manager.cma_pool_size =
4491 (codec_mm_get_total_size() > (MIN_SIZE * 3)) ?
4492 (MIN_SIZE * 3) : codec_mm_get_total_size();
4493 enc_pr(LOG_DEBUG,
4494 "amvenc_avc - cma memory pool size: %d MB\n",
4495 (u32)encode_manager.cma_pool_size / SZ_1M);
4496#endif
4497 }
4498
4499 if (hcodec_clk_prepare(&pdev->dev, &s_hcodec_clks)) {
4500 //err = -ENOENT;
4501 enc_pr(LOG_ERROR, "[%s:%d] probe hcodec enc failed\n", __FUNCTION__, __LINE__);
4502 //goto ERROR_PROBE_DEVICE;
4503 return -EINVAL;
4504 }
4505
4506 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_SC2) {
4507 hcodec_rst = devm_reset_control_get(&pdev->dev, "hcodec_rst");
4508 if (IS_ERR(hcodec_rst))
4509 pr_err("amvenc probe, hcodec get reset failed: %ld\n", PTR_ERR(hcodec_rst));
4510 }
4511
4512 res_irq = platform_get_irq(pdev, 0);
4513 if (res_irq < 0) {
4514 enc_pr(LOG_ERROR, "[%s] get irq error!", __func__);
4515 return -EINVAL;
4516 }
4517
4518 encode_manager.irq_num = res_irq;
4519 if (encode_wq_init()) {
4520 kfree(encode_manager.reserve_buff);
4521 encode_manager.reserve_buff = NULL;
4522 enc_pr(LOG_ERROR, "encode work queue init error.\n");
4523 return -EFAULT;
4524 }
4525
4526 r = init_avc_device();
4527 enc_pr(LOG_INFO, "amvenc_avc probe end.\n");
4528 return r;
4529}
4530
4531static s32 amvenc_avc_remove(struct platform_device *pdev)
4532{
4533 kfree(encode_manager.reserve_buff);
4534 encode_manager.reserve_buff = NULL;
4535 if (encode_wq_uninit())
4536 enc_pr(LOG_ERROR, "encode work queue uninit error.\n");
4537 uninit_avc_device();
4538 hcodec_clk_unprepare(&pdev->dev, &s_hcodec_clks);
4539 enc_pr(LOG_INFO, "amvenc_avc remove.\n");
4540 return 0;
4541}
4542
4543static const struct of_device_id amlogic_avcenc_dt_match[] = {
4544 {
4545 .compatible = "amlogic, amvenc_avc",
4546 },
4547 {},
4548};
4549
4550static struct platform_driver amvenc_avc_driver = {
4551 .probe = amvenc_avc_probe,
4552 .remove = amvenc_avc_remove,
4553 .driver = {
4554 .name = DRIVER_NAME,
4555 .of_match_table = amlogic_avcenc_dt_match,
4556 }
4557};
4558
4559static struct codec_profile_t amvenc_avc_profile = {
4560 .name = "avc",
4561 .profile = ""
4562};
4563
4564static s32 __init amvenc_avc_driver_init_module(void)
4565{
4566 enc_pr(LOG_INFO, "amvenc_avc module init\n");
4567
4568 if (platform_driver_register(&amvenc_avc_driver)) {
4569 enc_pr(LOG_ERROR,
4570 "failed to register amvenc_avc driver\n");
4571 return -ENODEV;
4572 }
4573 vcodec_profile_register(&amvenc_avc_profile);
4574 return 0;
4575}
4576
4577static void __exit amvenc_avc_driver_remove_module(void)
4578{
4579 enc_pr(LOG_INFO, "amvenc_avc module remove.\n");
4580
4581 platform_driver_unregister(&amvenc_avc_driver);
4582}
4583
4584static const struct reserved_mem_ops rmem_avc_ops = {
4585 .device_init = avc_mem_device_init,
4586};
4587
4588static s32 __init avc_mem_setup(struct reserved_mem *rmem)
4589{
4590 rmem->ops = &rmem_avc_ops;
4591 enc_pr(LOG_DEBUG, "amvenc_avc reserved mem setup.\n");
4592 return 0;
4593}
4594
4595static int enc_dma_buf_map(struct enc_dma_cfg *cfg)
4596{
4597 long ret = -1;
4598 int fd = -1;
4599 struct dma_buf *dbuf = NULL;
4600 struct dma_buf_attachment *d_att = NULL;
4601 struct sg_table *sg = NULL;
4602 void *vaddr = NULL;
4603 struct device *dev = NULL;
4604 enum dma_data_direction dir;
4605
4606 if (cfg == NULL || (cfg->fd < 0) || cfg->dev == NULL) {
4607 enc_pr(LOG_ERROR, "error input param\n");
4608 return -EINVAL;
4609 }
4610 enc_pr(LOG_INFO, "enc_dma_buf_map, fd %d\n", cfg->fd);
4611
4612 fd = cfg->fd;
4613 dev = cfg->dev;
4614 dir = cfg->dir;
4615 enc_pr(LOG_INFO, "enc_dma_buffer_map fd %d\n", fd);
4616
4617 dbuf = dma_buf_get(fd);
4618 if (dbuf == NULL) {
4619 enc_pr(LOG_ERROR, "failed to get dma buffer,fd %d\n",fd);
4620 return -EINVAL;
4621 }
4622
4623 d_att = dma_buf_attach(dbuf, dev);
4624 if (d_att == NULL) {
4625 enc_pr(LOG_ERROR, "failed to set dma attach\n");
4626 goto attach_err;
4627 }
4628
4629 sg = dma_buf_map_attachment(d_att, dir);
4630 if (sg == NULL) {
4631 enc_pr(LOG_ERROR, "failed to get dma sg\n");
4632 goto map_attach_err;
4633 }
4634
4635 ret = dma_buf_begin_cpu_access(dbuf, dir);
4636 if (ret != 0) {
4637 enc_pr(LOG_ERROR, "failed to access dma buff\n");
4638 goto access_err;
4639 }
4640
4641 vaddr = dma_buf_vmap(dbuf);
4642 if (vaddr == NULL) {
4643 enc_pr(LOG_ERROR, "failed to vmap dma buf\n");
4644 goto vmap_err;
4645 }
4646 cfg->dbuf = dbuf;
4647 cfg->attach = d_att;
4648 cfg->vaddr = vaddr;
4649 cfg->sg = sg;
4650
4651 return ret;
4652
4653vmap_err:
4654 dma_buf_end_cpu_access(dbuf, dir);
4655
4656access_err:
4657 dma_buf_unmap_attachment(d_att, sg, dir);
4658
4659map_attach_err:
4660 dma_buf_detach(dbuf, d_att);
4661
4662attach_err:
4663 dma_buf_put(dbuf);
4664
4665 return ret;
4666}
4667
4668static int enc_dma_buf_get_phys(struct enc_dma_cfg *cfg, unsigned long *addr)
4669{
4670 struct sg_table *sg_table;
4671 struct page *page;
4672 int ret;
4673 enc_pr(LOG_INFO, "enc_dma_buf_get_phys in\n");
4674
4675 ret = enc_dma_buf_map(cfg);
4676 if (ret < 0) {
4677 enc_pr(LOG_ERROR, "gdc_dma_buf_map failed\n");
4678 return ret;
4679 }
4680 if (cfg->sg) {
4681 sg_table = cfg->sg;
4682 page = sg_page(sg_table->sgl);
4683 *addr = PFN_PHYS(page_to_pfn(page));
4684 ret = 0;
4685 }
4686 enc_pr(LOG_INFO, "enc_dma_buf_get_phys 0x%lx\n", *addr);
4687 return ret;
4688}
4689
4690static void enc_dma_buf_unmap(struct enc_dma_cfg *cfg)
4691{
4692 int fd = -1;
4693 struct dma_buf *dbuf = NULL;
4694 struct dma_buf_attachment *d_att = NULL;
4695 struct sg_table *sg = NULL;
4696 void *vaddr = NULL;
4697 struct device *dev = NULL;
4698 enum dma_data_direction dir;
4699
4700 if (cfg == NULL || (cfg->fd < 0) || cfg->dev == NULL
4701 || cfg->dbuf == NULL || cfg->vaddr == NULL
4702 || cfg->attach == NULL || cfg->sg == NULL) {
4703 enc_pr(LOG_ERROR, "Error input param\n");
4704 return;
4705 }
4706
4707 fd = cfg->fd;
4708 dev = cfg->dev;
4709 dir = cfg->dir;
4710 dbuf = cfg->dbuf;
4711 vaddr = cfg->vaddr;
4712 d_att = cfg->attach;
4713 sg = cfg->sg;
4714
4715 dma_buf_vunmap(dbuf, vaddr);
4716
4717 dma_buf_end_cpu_access(dbuf, dir);
4718
4719 dma_buf_unmap_attachment(d_att, sg, dir);
4720
4721 dma_buf_detach(dbuf, d_att);
4722
4723 dma_buf_put(dbuf);
4724 enc_pr(LOG_DEBUG, "enc_dma_buffer_unmap vaddr %p\n",(unsigned *)vaddr);
4725}
4726
4727
4728module_param(fixed_slice_cfg, uint, 0664);
4729MODULE_PARM_DESC(fixed_slice_cfg, "\n fixed_slice_cfg\n");
4730
4731module_param(clock_level, uint, 0664);
4732MODULE_PARM_DESC(clock_level, "\n clock_level\n");
4733
4734module_param(encode_print_level, uint, 0664);
4735MODULE_PARM_DESC(encode_print_level, "\n encode_print_level\n");
4736
4737module_param(no_timeout, uint, 0664);
4738MODULE_PARM_DESC(no_timeout, "\n no_timeout flag for process request\n");
4739
4740module_param(nr_mode, int, 0664);
4741MODULE_PARM_DESC(nr_mode, "\n nr_mode option\n");
4742
4743module_param(qp_table_debug, uint, 0664);
4744MODULE_PARM_DESC(qp_table_debug, "\n print qp table\n");
4745
4746module_param(use_reset_control, uint, 0664);
4747MODULE_PARM_DESC(use_reset_control, "\n use_reset_control\n");
4748
4749module_param(use_ge2d, uint, 0664);
4750MODULE_PARM_DESC(use_ge2d, "\n use_ge2d\n");
4751
4752#ifdef H264_ENC_SVC
4753module_param(svc_enable, uint, 0664);
4754MODULE_PARM_DESC(svc_enable, "\n svc enable\n");
4755module_param(svc_ref_conf, uint, 0664);
4756MODULE_PARM_DESC(svc_ref_conf, "\n svc reference duration config\n");
4757#endif
4758
4759#ifdef MORE_MODULE_PARAM
4760module_param(me_mv_merge_ctl, uint, 0664);
4761MODULE_PARM_DESC(me_mv_merge_ctl, "\n me_mv_merge_ctl\n");
4762
4763module_param(me_step0_close_mv, uint, 0664);
4764MODULE_PARM_DESC(me_step0_close_mv, "\n me_step0_close_mv\n");
4765
4766module_param(me_f_skip_sad, uint, 0664);
4767MODULE_PARM_DESC(me_f_skip_sad, "\n me_f_skip_sad\n");
4768
4769module_param(me_f_skip_weight, uint, 0664);
4770MODULE_PARM_DESC(me_f_skip_weight, "\n me_f_skip_weight\n");
4771
4772module_param(me_mv_weight_01, uint, 0664);
4773MODULE_PARM_DESC(me_mv_weight_01, "\n me_mv_weight_01\n");
4774
4775module_param(me_mv_weight_23, uint, 0664);
4776MODULE_PARM_DESC(me_mv_weight_23, "\n me_mv_weight_23\n");
4777
4778module_param(me_sad_range_inc, uint, 0664);
4779MODULE_PARM_DESC(me_sad_range_inc, "\n me_sad_range_inc\n");
4780
4781module_param(me_sad_enough_01, uint, 0664);
4782MODULE_PARM_DESC(me_sad_enough_01, "\n me_sad_enough_01\n");
4783
4784module_param(me_sad_enough_23, uint, 0664);
4785MODULE_PARM_DESC(me_sad_enough_23, "\n me_sad_enough_23\n");
4786
4787module_param(y_tnr_mc_en, uint, 0664);
4788MODULE_PARM_DESC(y_tnr_mc_en, "\n y_tnr_mc_en option\n");
4789module_param(y_tnr_txt_mode, uint, 0664);
4790MODULE_PARM_DESC(y_tnr_txt_mode, "\n y_tnr_txt_mode option\n");
4791module_param(y_tnr_mot_sad_margin, uint, 0664);
4792MODULE_PARM_DESC(y_tnr_mot_sad_margin, "\n y_tnr_mot_sad_margin option\n");
4793module_param(y_tnr_mot_cortxt_rate, uint, 0664);
4794MODULE_PARM_DESC(y_tnr_mot_cortxt_rate, "\n y_tnr_mot_cortxt_rate option\n");
4795module_param(y_tnr_mot_distxt_ofst, uint, 0664);
4796MODULE_PARM_DESC(y_tnr_mot_distxt_ofst, "\n y_tnr_mot_distxt_ofst option\n");
4797module_param(y_tnr_mot_distxt_rate, uint, 0664);
4798MODULE_PARM_DESC(y_tnr_mot_distxt_rate, "\n y_tnr_mot_distxt_rate option\n");
4799module_param(y_tnr_mot_dismot_ofst, uint, 0664);
4800MODULE_PARM_DESC(y_tnr_mot_dismot_ofst, "\n y_tnr_mot_dismot_ofst option\n");
4801module_param(y_tnr_mot_frcsad_lock, uint, 0664);
4802MODULE_PARM_DESC(y_tnr_mot_frcsad_lock, "\n y_tnr_mot_frcsad_lock option\n");
4803module_param(y_tnr_mot2alp_frc_gain, uint, 0664);
4804MODULE_PARM_DESC(y_tnr_mot2alp_frc_gain, "\n y_tnr_mot2alp_frc_gain option\n");
4805module_param(y_tnr_mot2alp_nrm_gain, uint, 0664);
4806MODULE_PARM_DESC(y_tnr_mot2alp_nrm_gain, "\n y_tnr_mot2alp_nrm_gain option\n");
4807module_param(y_tnr_mot2alp_dis_gain, uint, 0664);
4808MODULE_PARM_DESC(y_tnr_mot2alp_dis_gain, "\n y_tnr_mot2alp_dis_gain option\n");
4809module_param(y_tnr_mot2alp_dis_ofst, uint, 0664);
4810MODULE_PARM_DESC(y_tnr_mot2alp_dis_ofst, "\n y_tnr_mot2alp_dis_ofst option\n");
4811module_param(y_tnr_alpha_min, uint, 0664);
4812MODULE_PARM_DESC(y_tnr_alpha_min, "\n y_tnr_alpha_min option\n");
4813module_param(y_tnr_alpha_max, uint, 0664);
4814MODULE_PARM_DESC(y_tnr_alpha_max, "\n y_tnr_alpha_max option\n");
4815module_param(y_tnr_deghost_os, uint, 0664);
4816MODULE_PARM_DESC(y_tnr_deghost_os, "\n y_tnr_deghost_os option\n");
4817
4818module_param(c_tnr_mc_en, uint, 0664);
4819MODULE_PARM_DESC(c_tnr_mc_en, "\n c_tnr_mc_en option\n");
4820module_param(c_tnr_txt_mode, uint, 0664);
4821MODULE_PARM_DESC(c_tnr_txt_mode, "\n c_tnr_txt_mode option\n");
4822module_param(c_tnr_mot_sad_margin, uint, 0664);
4823MODULE_PARM_DESC(c_tnr_mot_sad_margin, "\n c_tnr_mot_sad_margin option\n");
4824module_param(c_tnr_mot_cortxt_rate, uint, 0664);
4825MODULE_PARM_DESC(c_tnr_mot_cortxt_rate, "\n c_tnr_mot_cortxt_rate option\n");
4826module_param(c_tnr_mot_distxt_ofst, uint, 0664);
4827MODULE_PARM_DESC(c_tnr_mot_distxt_ofst, "\n c_tnr_mot_distxt_ofst option\n");
4828module_param(c_tnr_mot_distxt_rate, uint, 0664);
4829MODULE_PARM_DESC(c_tnr_mot_distxt_rate, "\n c_tnr_mot_distxt_rate option\n");
4830module_param(c_tnr_mot_dismot_ofst, uint, 0664);
4831MODULE_PARM_DESC(c_tnr_mot_dismot_ofst, "\n c_tnr_mot_dismot_ofst option\n");
4832module_param(c_tnr_mot_frcsad_lock, uint, 0664);
4833MODULE_PARM_DESC(c_tnr_mot_frcsad_lock, "\n c_tnr_mot_frcsad_lock option\n");
4834module_param(c_tnr_mot2alp_frc_gain, uint, 0664);
4835MODULE_PARM_DESC(c_tnr_mot2alp_frc_gain, "\n c_tnr_mot2alp_frc_gain option\n");
4836module_param(c_tnr_mot2alp_nrm_gain, uint, 0664);
4837MODULE_PARM_DESC(c_tnr_mot2alp_nrm_gain, "\n c_tnr_mot2alp_nrm_gain option\n");
4838module_param(c_tnr_mot2alp_dis_gain, uint, 0664);
4839MODULE_PARM_DESC(c_tnr_mot2alp_dis_gain, "\n c_tnr_mot2alp_dis_gain option\n");
4840module_param(c_tnr_mot2alp_dis_ofst, uint, 0664);
4841MODULE_PARM_DESC(c_tnr_mot2alp_dis_ofst, "\n c_tnr_mot2alp_dis_ofst option\n");
4842module_param(c_tnr_alpha_min, uint, 0664);
4843MODULE_PARM_DESC(c_tnr_alpha_min, "\n c_tnr_alpha_min option\n");
4844module_param(c_tnr_alpha_max, uint, 0664);
4845MODULE_PARM_DESC(c_tnr_alpha_max, "\n c_tnr_alpha_max option\n");
4846module_param(c_tnr_deghost_os, uint, 0664);
4847MODULE_PARM_DESC(c_tnr_deghost_os, "\n c_tnr_deghost_os option\n");
4848
4849module_param(y_snr_err_norm, uint, 0664);
4850MODULE_PARM_DESC(y_snr_err_norm, "\n y_snr_err_norm option\n");
4851module_param(y_snr_gau_bld_core, uint, 0664);
4852MODULE_PARM_DESC(y_snr_gau_bld_core, "\n y_snr_gau_bld_core option\n");
4853module_param(y_snr_gau_bld_ofst, int, 0664);
4854MODULE_PARM_DESC(y_snr_gau_bld_ofst, "\n y_snr_gau_bld_ofst option\n");
4855module_param(y_snr_gau_bld_rate, uint, 0664);
4856MODULE_PARM_DESC(y_snr_gau_bld_rate, "\n y_snr_gau_bld_rate option\n");
4857module_param(y_snr_gau_alp0_min, uint, 0664);
4858MODULE_PARM_DESC(y_snr_gau_alp0_min, "\n y_snr_gau_alp0_min option\n");
4859module_param(y_snr_gau_alp0_max, uint, 0664);
4860MODULE_PARM_DESC(y_snr_gau_alp0_max, "\n y_snr_gau_alp0_max option\n");
4861module_param(y_bld_beta2alp_rate, uint, 0664);
4862MODULE_PARM_DESC(y_bld_beta2alp_rate, "\n y_bld_beta2alp_rate option\n");
4863module_param(y_bld_beta_min, uint, 0664);
4864MODULE_PARM_DESC(y_bld_beta_min, "\n y_bld_beta_min option\n");
4865module_param(y_bld_beta_max, uint, 0664);
4866MODULE_PARM_DESC(y_bld_beta_max, "\n y_bld_beta_max option\n");
4867
4868module_param(c_snr_err_norm, uint, 0664);
4869MODULE_PARM_DESC(c_snr_err_norm, "\n c_snr_err_norm option\n");
4870module_param(c_snr_gau_bld_core, uint, 0664);
4871MODULE_PARM_DESC(c_snr_gau_bld_core, "\n c_snr_gau_bld_core option\n");
4872module_param(c_snr_gau_bld_ofst, int, 0664);
4873MODULE_PARM_DESC(c_snr_gau_bld_ofst, "\n c_snr_gau_bld_ofst option\n");
4874module_param(c_snr_gau_bld_rate, uint, 0664);
4875MODULE_PARM_DESC(c_snr_gau_bld_rate, "\n c_snr_gau_bld_rate option\n");
4876module_param(c_snr_gau_alp0_min, uint, 0664);
4877MODULE_PARM_DESC(c_snr_gau_alp0_min, "\n c_snr_gau_alp0_min option\n");
4878module_param(c_snr_gau_alp0_max, uint, 0664);
4879MODULE_PARM_DESC(c_snr_gau_alp0_max, "\n c_snr_gau_alp0_max option\n");
4880module_param(c_bld_beta2alp_rate, uint, 0664);
4881MODULE_PARM_DESC(c_bld_beta2alp_rate, "\n c_bld_beta2alp_rate option\n");
4882module_param(c_bld_beta_min, uint, 0664);
4883MODULE_PARM_DESC(c_bld_beta_min, "\n c_bld_beta_min option\n");
4884module_param(c_bld_beta_max, uint, 0664);
4885MODULE_PARM_DESC(c_bld_beta_max, "\n c_bld_beta_max option\n");
4886#endif
4887
4888module_init(amvenc_avc_driver_init_module);
4889module_exit(amvenc_avc_driver_remove_module);
4890RESERVEDMEM_OF_DECLARE(amvenc_avc, "amlogic, amvenc-memory", avc_mem_setup);
4891
4892MODULE_DESCRIPTION("AMLOGIC AVC Video Encoder Driver");
4893MODULE_LICENSE("GPL");
4894MODULE_AUTHOR("simon.zheng <simon.zheng@amlogic.com>");
4895