summaryrefslogtreecommitdiff
path: root/drivers/frame_sink/encoder/h264/encoder.c (plain)
blob: 1ce9caff780381ebba11f709ae7b6106d9ca4ed5
1/*
2 * drivers/amlogic/amports/encoder.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16*/
17
18#include <linux/kernel.h>
19#include <linux/types.h>
20#include <linux/errno.h>
21#include <linux/interrupt.h>
22#include <linux/timer.h>
23#include <linux/fs.h>
24#include <linux/sched.h>
25#include <linux/slab.h>
26#include <linux/dma-mapping.h>
27#include <linux/platform_device.h>
28#include <linux/spinlock.h>
29#include <linux/ctype.h>
30#include <linux/amlogic/media/frame_sync/ptsserv.h>
31#include <linux/amlogic/media/utils/amstream.h>
32#include <linux/amlogic/media/canvas/canvas.h>
33#include <linux/amlogic/media/canvas/canvas_mgr.h>
34#include <linux/amlogic/media/codec_mm/codec_mm.h>
35
36#include "linux/amlogic/media/utils/vdec_reg.h"
37#include "../../../frame_provider/decoder/utils/vdec.h"
38#include <linux/delay.h>
39#include <linux/poll.h>
40#include <linux/of.h>
41#include <linux/of_fdt.h>
42#include <linux/dma-contiguous.h>
43#include <linux/kthread.h>
44#include <linux/sched/rt.h>
45#include "linux/amlogic/media/utils/amports_config.h"
46#include "encoder.h"
47#include "../../../frame_provider/decoder/utils/amvdec.h"
48#include <linux/amlogic/media/utils/amlog.h>
49#include "../../../stream_input/amports/amports_priv.h"
50#include <linux/of_reserved_mem.h>
51#ifdef CONFIG_AM_JPEG_ENCODER
52#include "jpegenc.h"
53#endif
54
55#define ENCODE_NAME "encoder"
56#define AMVENC_CANVAS_INDEX 0xE4
57#define AMVENC_CANVAS_MAX_INDEX 0xEF
58
59#define MIN_SIZE 20
60#define DUMP_INFO_BYTES_PER_MB 80
61/* #define USE_OLD_DUMP_MC */
62
63#define ADJUSTED_QP_FLAG 64
64
65static s32 avc_device_major;
66static struct device *amvenc_avc_dev;
67#define DRIVER_NAME "amvenc_avc"
68#define CLASS_NAME "amvenc_avc"
69#define DEVICE_NAME "amvenc_avc"
70
71static struct encode_manager_s encode_manager;
72
73#define MULTI_SLICE_MC
74/*same as INIT_ENCODER*/
75#define INTRA_IN_P_TOP
76
77#define ENC_CANVAS_OFFSET AMVENC_CANVAS_INDEX
78
79#define UCODE_MODE_FULL 0
80#define UCODE_MODE_SW_MIX 1
81
82#ifdef USE_VDEC2
83#define STREAM_WR_PTR DOS_SCRATCH20
84#define DECODABLE_MB_Y DOS_SCRATCH21
85#define DECODED_MB_Y DOS_SCRATCH22
86#endif
87
88/* #define ENABLE_IGNORE_FUNCTION */
89
90static u32 anc0_buffer_id;
91static u32 ie_me_mb_type;
92static u32 ie_me_mode;
93static u32 ie_pippeline_block = 3;
94static u32 ie_cur_ref_sel;
95static u32 avc_endian = 6;
96static u32 clock_level = 1;
97static u32 enable_dblk = 1; /* 0 disable, 1 vdec 2 hdec */
98
99static u32 encode_print_level = LOG_DEBUG;
100static u32 no_timeout;
101static int nr_mode = -1;
102
103static u32 me_mv_merge_ctl =
104 (0x1 << 31) | /* [31] me_merge_mv_en_16 */
105 (0x1 << 30) | /* [30] me_merge_small_mv_en_16 */
106 (0x1 << 29) | /* [29] me_merge_flex_en_16 */
107 (0x1 << 28) | /* [28] me_merge_sad_en_16 */
108 (0x1 << 27) | /* [27] me_merge_mv_en_8 */
109 (0x1 << 26) | /* [26] me_merge_small_mv_en_8 */
110 (0x1 << 25) | /* [25] me_merge_flex_en_8 */
111 (0x1 << 24) | /* [24] me_merge_sad_en_8 */
112 /* [23:18] me_merge_mv_diff_16 - MV diff <= n pixel can be merged */
113 (0x12 << 18) |
114 /* [17:12] me_merge_mv_diff_8 - MV diff <= n pixel can be merged */
115 (0x2b << 12) |
116 /* [11:0] me_merge_min_sad - SAD >= 0x180 can be merged with other MV */
117 (0x80 << 0);
118 /* ( 0x4 << 18) |
119 // [23:18] me_merge_mv_diff_16 - MV diff <= n pixel can be merged */
120 /* ( 0x3f << 12) |
121 // [17:12] me_merge_mv_diff_8 - MV diff <= n pixel can be merged */
122 /* ( 0xc0 << 0);
123 // [11:0] me_merge_min_sad - SAD >= 0x180 can be merged with other MV */
124
125static u32 me_mv_weight_01 = (0x40 << 24) | (0x30 << 16) | (0x20 << 8) | 0x30;
126static u32 me_mv_weight_23 = (0x40 << 8) | 0x30;
127static u32 me_sad_range_inc = 0x03030303;
128static u32 me_step0_close_mv = 0x003ffc21;
129static u32 me_f_skip_sad;
130static u32 me_f_skip_weight;
131static u32 me_sad_enough_01;/* 0x00018010; */
132static u32 me_sad_enough_23;/* 0x00000020; */
133
134static u32 p_intra_config = (30 << 16) | (0xffff << 0);
135
136/* [31:16] TARGET_BITS_PER_MB */
137/* [15:8] MIN_QUANT */
138/* [7:0] MAX_QUANT */
139static u32 p_mb_quant_config = (20 << 16) | (24 << 8) | (24 << 0);
140
141/* [31:24] INC_4_BITS */
142/* [23:16] INC_3_BITS */
143/* [15:8] INC_2_BITS */
144/* [7:0] INC_1_BITS */
145static u32 p_mb_quant_inc_cfg = (20 << 24) | (15 << 16) | (10 << 8) | (5 << 0);
146
147/* [31:24] DEC_4_BITS */
148/* [23:16] DEC_3_BITS */
149/* [15:8] DEC_2_BITS */
150/* [7:0] DEC_1_BITS */
151static u32 p_mb_quant_dec_cfg = (60 << 24) | (40 << 16) | (30 << 8) | (20 << 0);
152
153/* [31:0] NUM_ROWS_PER_SLICE_P */
154/* [15:0] NUM_ROWS_PER_SLICE_I */
155static u32 fixed_slice_cfg;
156
157/* y tnr */
158static unsigned int y_tnr_mc_en = 1;
159static unsigned int y_tnr_txt_mode;
160static unsigned int y_tnr_mot_sad_margin = 1;
161static unsigned int y_tnr_mot_cortxt_rate = 1;
162static unsigned int y_tnr_mot_distxt_ofst = 5;
163static unsigned int y_tnr_mot_distxt_rate = 4;
164static unsigned int y_tnr_mot_dismot_ofst = 4;
165static unsigned int y_tnr_mot_frcsad_lock = 8;
166static unsigned int y_tnr_mot2alp_frc_gain = 10;
167static unsigned int y_tnr_mot2alp_nrm_gain = 216;
168static unsigned int y_tnr_mot2alp_dis_gain = 128;
169static unsigned int y_tnr_mot2alp_dis_ofst = 32;
170static unsigned int y_tnr_alpha_min = 32;
171static unsigned int y_tnr_alpha_max = 63;
172static unsigned int y_tnr_deghost_os;
173/* c tnr */
174static unsigned int c_tnr_mc_en = 1;
175static unsigned int c_tnr_txt_mode;
176static unsigned int c_tnr_mot_sad_margin = 1;
177static unsigned int c_tnr_mot_cortxt_rate = 1;
178static unsigned int c_tnr_mot_distxt_ofst = 5;
179static unsigned int c_tnr_mot_distxt_rate = 4;
180static unsigned int c_tnr_mot_dismot_ofst = 4;
181static unsigned int c_tnr_mot_frcsad_lock = 8;
182static unsigned int c_tnr_mot2alp_frc_gain = 10;
183static unsigned int c_tnr_mot2alp_nrm_gain = 216;
184static unsigned int c_tnr_mot2alp_dis_gain = 128;
185static unsigned int c_tnr_mot2alp_dis_ofst = 32;
186static unsigned int c_tnr_alpha_min = 32;
187static unsigned int c_tnr_alpha_max = 63;
188static unsigned int c_tnr_deghost_os;
189/* y snr */
190static unsigned int y_snr_err_norm = 1;
191static unsigned int y_snr_gau_bld_core = 1;
192static int y_snr_gau_bld_ofst = -1;
193static unsigned int y_snr_gau_bld_rate = 48;
194static unsigned int y_snr_gau_alp0_min;
195static unsigned int y_snr_gau_alp0_max = 63;
196static unsigned int y_bld_beta2alp_rate = 16;
197static unsigned int y_bld_beta_min;
198static unsigned int y_bld_beta_max = 63;
199/* c snr */
200static unsigned int c_snr_err_norm = 1;
201static unsigned int c_snr_gau_bld_core = 1;
202static int c_snr_gau_bld_ofst = -1;
203static unsigned int c_snr_gau_bld_rate = 48;
204static unsigned int c_snr_gau_alp0_min;
205static unsigned int c_snr_gau_alp0_max = 63;
206static unsigned int c_bld_beta2alp_rate = 16;
207static unsigned int c_bld_beta_min;
208static unsigned int c_bld_beta_max = 63;
209
210static DEFINE_SPINLOCK(lock);
211
212#define ADV_MV_LARGE_16x8 1
213#define ADV_MV_LARGE_8x16 1
214#define ADV_MV_LARGE_16x16 1
215
216/* me weight offset should not very small, it used by v1 me module. */
217/* the min real sad for me is 16 by hardware. */
218#define ME_WEIGHT_OFFSET 0x520
219#define I4MB_WEIGHT_OFFSET 0x655
220#define I16MB_WEIGHT_OFFSET 0x560
221
222#define ADV_MV_16x16_WEIGHT 0x080
223#define ADV_MV_16_8_WEIGHT 0x0e0
224#define ADV_MV_8x8_WEIGHT 0x240
225#define ADV_MV_4x4x4_WEIGHT 0x3000
226
227#define IE_SAD_SHIFT_I16 0x001
228#define IE_SAD_SHIFT_I4 0x001
229#define ME_SAD_SHIFT_INTER 0x001
230
231#define STEP_2_SKIP_SAD 0
232#define STEP_1_SKIP_SAD 0
233#define STEP_0_SKIP_SAD 0
234#define STEP_2_SKIP_WEIGHT 0
235#define STEP_1_SKIP_WEIGHT 0
236#define STEP_0_SKIP_WEIGHT 0
237
238#define ME_SAD_RANGE_0 0x1 /* 0x0 */
239#define ME_SAD_RANGE_1 0x0
240#define ME_SAD_RANGE_2 0x0
241#define ME_SAD_RANGE_3 0x0
242
243/* use 0 for v3, 0x18 for v2 */
244#define ME_MV_PRE_WEIGHT_0 0x18
245/* use 0 for v3, 0x18 for v2 */
246#define ME_MV_PRE_WEIGHT_1 0x18
247#define ME_MV_PRE_WEIGHT_2 0x0
248#define ME_MV_PRE_WEIGHT_3 0x0
249
250/* use 0 for v3, 0x18 for v2 */
251#define ME_MV_STEP_WEIGHT_0 0x18
252/* use 0 for v3, 0x18 for v2 */
253#define ME_MV_STEP_WEIGHT_1 0x18
254#define ME_MV_STEP_WEIGHT_2 0x0
255#define ME_MV_STEP_WEIGHT_3 0x0
256
257#define ME_SAD_ENOUGH_0_DATA 0x00
258#define ME_SAD_ENOUGH_1_DATA 0x04
259#define ME_SAD_ENOUGH_2_DATA 0x11
260#define ADV_MV_8x8_ENOUGH_DATA 0x20
261
262/* V4_COLOR_BLOCK_FIX */
263#define V3_FORCE_SKIP_SAD_0 0x10
264/* 4 Blocks */
265#define V3_FORCE_SKIP_SAD_1 0x60
266/* 16 Blocks + V3_SKIP_WEIGHT_2 */
267#define V3_FORCE_SKIP_SAD_2 0x250
268/* almost disable it -- use t_lac_coeff_2 output to F_ZERO is better */
269#define V3_ME_F_ZERO_SAD (ME_WEIGHT_OFFSET + 0x10)
270
271#define V3_IE_F_ZERO_SAD_I16 (I16MB_WEIGHT_OFFSET + 0x10)
272#define V3_IE_F_ZERO_SAD_I4 (I4MB_WEIGHT_OFFSET + 0x20)
273
274#define V3_SKIP_WEIGHT_0 0x10
275/* 4 Blocks 8 seperate search sad can be very low */
276#define V3_SKIP_WEIGHT_1 0x8 /* (4 * ME_MV_STEP_WEIGHT_1 + 0x100) */
277#define V3_SKIP_WEIGHT_2 0x3
278
279#define V3_LEVEL_1_F_SKIP_MAX_SAD 0x0
280#define V3_LEVEL_1_SKIP_MAX_SAD 0x6
281
282#define I4_ipred_weight_most 0x18
283#define I4_ipred_weight_else 0x28
284
285#define C_ipred_weight_V 0x04
286#define C_ipred_weight_H 0x08
287#define C_ipred_weight_DC 0x0c
288
289#define I16_ipred_weight_V 0x04
290#define I16_ipred_weight_H 0x08
291#define I16_ipred_weight_DC 0x0c
292
293/* 0x00 same as disable */
294#define v3_left_small_max_ie_sad 0x00
295#define v3_left_small_max_me_sad 0x40
296
297#define v5_use_small_diff_cnt 0
298#define v5_simple_mb_inter_all_en 1
299#define v5_simple_mb_inter_8x8_en 1
300#define v5_simple_mb_inter_16_8_en 1
301#define v5_simple_mb_inter_16x16_en 1
302#define v5_simple_mb_intra_en 1
303#define v5_simple_mb_C_en 0
304#define v5_simple_mb_Y_en 1
305#define v5_small_diff_Y 0x10
306#define v5_small_diff_C 0x18
307/* shift 8-bits, 2, 1, 0, -1, -2, -3, -4 */
308#define v5_simple_dq_setting 0x43210fed
309#define v5_simple_me_weight_setting 0
310
311#ifndef USE_OLD_DUMP_MC
312static u32 qp_table_pr;
313static u32 v3_mv_sad[64] = {
314 /* For step0 */
315 0x00000004,
316 0x00010008,
317 0x00020010,
318 0x00030018,
319 0x00040020,
320 0x00050028,
321 0x00060038,
322 0x00070048,
323 0x00080058,
324 0x00090068,
325 0x000a0080,
326 0x000b0098,
327 0x000c00b0,
328 0x000d00c8,
329 0x000e00e8,
330 0x000f0110,
331 /* For step1 */
332 0x00100002,
333 0x00110004,
334 0x00120008,
335 0x0013000c,
336 0x00140010,
337 0x00150014,
338 0x0016001c,
339 0x00170024,
340 0x0018002c,
341 0x00190034,
342 0x001a0044,
343 0x001b0054,
344 0x001c0064,
345 0x001d0074,
346 0x001e0094,
347 0x001f00b4,
348 /* For step2 */
349 0x00200006,
350 0x0021000c,
351 0x0022000c,
352 0x00230018,
353 0x00240018,
354 0x00250018,
355 0x00260018,
356 0x00270030,
357 0x00280030,
358 0x00290030,
359 0x002a0030,
360 0x002b0030,
361 0x002c0030,
362 0x002d0030,
363 0x002e0030,
364 0x002f0050,
365 /* For step2 4x4-8x8 */
366 0x00300001,
367 0x00310002,
368 0x00320002,
369 0x00330004,
370 0x00340004,
371 0x00350004,
372 0x00360004,
373 0x00370006,
374 0x00380006,
375 0x00390006,
376 0x003a0006,
377 0x003b0006,
378 0x003c0006,
379 0x003d0006,
380 0x003e0006,
381 0x003f0006
382};
383#endif
384
385static struct BuffInfo_s amvenc_buffspec[] = {
386 {
387 .lev_id = AMVENC_BUFFER_LEVEL_480P,
388 .max_width = 640,
389 .max_height = 480,
390 .min_buffsize = 0x580000,
391 .dct = {
392 .buf_start = 0,
393 .buf_size = 0xfe000,
394 },
395 .dec0_y = {
396 .buf_start = 0x100000,
397 .buf_size = 0x80000,
398 },
399 .dec1_y = {
400 .buf_start = 0x180000,
401 .buf_size = 0x80000,
402 },
403 .assit = {
404 .buf_start = 0x240000,
405 .buf_size = 0xc0000,
406 },
407 .bitstream = {
408 .buf_start = 0x300000,
409 .buf_size = 0x100000,
410 },
411 .inter_bits_info = {
412 .buf_start = 0x400000,
413 .buf_size = 0x2000,
414 },
415 .inter_mv_info = {
416 .buf_start = 0x402000,
417 .buf_size = 0x13000,
418 },
419 .intra_bits_info = {
420 .buf_start = 0x420000,
421 .buf_size = 0x2000,
422 },
423 .intra_pred_info = {
424 .buf_start = 0x422000,
425 .buf_size = 0x13000,
426 },
427 .qp_info = {
428 .buf_start = 0x438000,
429 .buf_size = 0x8000,
430 },
431 .scale_buff = {
432 .buf_start = 0,
433 .buf_size = 0,
434 }
435#ifdef USE_VDEC2
436 ,
437 .vdec2_info = {
438 .buf_start = 0x440000,
439 .buf_size = 0x13e000,
440 }
441#endif
442 }, {
443 .lev_id = AMVENC_BUFFER_LEVEL_720P,
444 .max_width = 1280,
445 .max_height = 720,
446 .min_buffsize = 0x9e0000,
447 .dct = {
448 .buf_start = 0,
449 .buf_size = 0x2f8000,
450 },
451 .dec0_y = {
452 .buf_start = 0x300000,
453 .buf_size = 0x180000,
454 },
455 .dec1_y = {
456 .buf_start = 0x480000,
457 .buf_size = 0x180000,
458 },
459 .assit = {
460 .buf_start = 0x640000,
461 .buf_size = 0xc0000,
462 },
463 .bitstream = {
464 .buf_start = 0x700000,
465 .buf_size = 0x100000,
466 },
467 .inter_bits_info = {
468 .buf_start = 0x800000,
469 .buf_size = 0x4000,
470 },
471 .inter_mv_info = {
472 .buf_start = 0x804000,
473 .buf_size = 0x40000,
474 },
475 .intra_bits_info = {
476 .buf_start = 0x848000,
477 .buf_size = 0x4000,
478 },
479 .intra_pred_info = {
480 .buf_start = 0x84c000,
481 .buf_size = 0x40000,
482 },
483 .qp_info = {
484 .buf_start = 0x890000,
485 .buf_size = 0x8000,
486 },
487 .scale_buff = {
488 .buf_start = 0,
489 .buf_size = 0,
490 }
491#ifdef USE_VDEC2
492 ,
493 .vdec2_info = {
494 .buf_start = 0x8a0000,
495 .buf_size = 0x13e000,
496 }
497#endif
498 }, {
499 .lev_id = AMVENC_BUFFER_LEVEL_1080P,
500 .max_width = 1920,
501 .max_height = 1088,
502 .min_buffsize = 0x1370000,
503 .dct = {
504 .buf_start = 0,
505 .buf_size = 0x6ba000,
506 },
507 .dec0_y = {
508 .buf_start = 0x6d0000,
509 .buf_size = 0x300000,
510 },
511 .dec1_y = {
512 .buf_start = 0x9d0000,
513 .buf_size = 0x300000,
514 },
515 .assit = {
516 .buf_start = 0xd10000,
517 .buf_size = 0xc0000,
518 },
519 .bitstream = {
520 .buf_start = 0xe00000,
521 .buf_size = 0x100000,
522 },
523 .scale_buff = {
524 .buf_start = 0xf00000,
525 .buf_size = 0x300000,
526 },
527 .inter_bits_info = {
528 .buf_start = 0x1200000,
529 .buf_size = 0x8000,
530 },
531 .inter_mv_info = {
532 .buf_start = 0x1208000,
533 .buf_size = 0x80000,
534 },
535 .intra_bits_info = {
536 .buf_start = 0x1288000,
537 .buf_size = 0x8000,
538 },
539 .intra_pred_info = {
540 .buf_start = 0x1290000,
541 .buf_size = 0x80000,
542 },
543 .qp_info = {
544 .buf_start = 0x1310000,
545 .buf_size = 0x8000,
546 }
547#ifdef USE_VDEC2
548 ,
549 .vdec2_info = {
550 .buf_start = 0x12b0000,
551 .buf_size = 0x13e000,
552 }
553#endif
554 }
555};
556
557enum ucode_type_e {
558 UCODE_DUMP = 0,
559 UCODE_DUMP_DBLK,
560 UCODE_DUMP_M2_DBLK,
561 UCODE_DUMP_GX_DBLK,
562 UCODE_SW,
563 UCODE_SW_HDEC_DBLK,
564 UCODE_SW_VDEC2_DBLK,
565 UCODE_SW_HDEC_M2_DBLK,
566 UCODE_SW_HDEC_GX_DBLK,
567 UCODE_VDEC2,
568 UCODE_GX,
569 UCODE_GXTV,
570 UCODE_TXL,
571 UCODE_MAX
572};
573
574const char *ucode_name[] = {
575 "mix_dump_mc",
576 "mix_dump_mc_dblk",
577 "mix_dump_mc_m2_dblk",
578 "mix_dump_mc_gx_dblk",
579 "mix_sw_mc",
580 "mix_sw_mc_hdec_dblk",
581 "mix_sw_mc_vdec2_dblk",
582 "mix_sw_mc_hdec_m2_dblk",
583 "mix_sw_mc_hdec_gx_dblk",
584 "vdec2_encoder_mc",
585 "gx_h264_enc",
586 "h264_enc_mc_gxtv",
587 "h264_enc_mc_txl",
588};
589
590static void dma_flush(u32 buf_start, u32 buf_size);
591static void cache_flush(u32 buf_start , u32 buf_size);
592
593static const char *select_ucode(u32 ucode_index)
594{
595 enum ucode_type_e ucode = UCODE_DUMP;
596 switch (ucode_index) {
597 case UCODE_MODE_FULL:
598 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL) {
599#ifndef USE_OLD_DUMP_MC
600 ucode = UCODE_TXL;
601#else
602 ucode = UCODE_DUMP_GX_DBLK;
603#endif
604 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) {
605#ifndef USE_OLD_DUMP_MC
606 ucode = UCODE_GX;
607#else
608 ucode = UCODE_DUMP_GX_DBLK;
609#endif
610 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
611#ifndef USE_OLD_DUMP_MC
612 ucode = UCODE_GXTV;
613#else
614 ucode = UCODE_DUMP_GX_DBLK;
615#endif
616 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
617#ifndef USE_OLD_DUMP_MC
618 ucode = UCODE_GX;
619#else
620 ucode = UCODE_DUMP_GX_DBLK;
621#endif
622 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_MG9TV) {
623 ucode = UCODE_DUMP_M2_DBLK;
624 } else {
625 if (enable_dblk)
626 ucode = UCODE_DUMP_DBLK;
627 else
628 ucode = UCODE_DUMP;
629 }
630 break;
631 case UCODE_MODE_SW_MIX:
632 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
633 ucode = UCODE_SW_HDEC_GX_DBLK;
634 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_MG9TV) {
635 ucode = UCODE_SW_HDEC_M2_DBLK;
636 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8B) {
637 if (enable_dblk)
638 ucode = UCODE_SW_HDEC_DBLK;
639 else
640 ucode = UCODE_SW;
641 } else {
642 if (enable_dblk == 1)
643 ucode = UCODE_SW_HDEC_DBLK;
644 else if (enable_dblk == 2)
645 ucode = UCODE_SW_VDEC2_DBLK;
646 else
647 ucode = UCODE_SW;
648 }
649 break;
650 default:
651 break;
652 }
653
654 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_MG9TV)
655 encode_manager.dblk_fix_flag =
656 (ucode_index == UCODE_MODE_SW_MIX);
657 else
658 encode_manager.dblk_fix_flag = false;
659 return (const char *)ucode_name[ucode];
660}
661
662#ifndef USE_OLD_DUMP_MC
663static void hcodec_prog_qtbl(struct encode_wq_s *wq)
664{
665 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
666 (0 << 23) | /* quant_table_addr */
667 (1 << 22)); /* quant_table_addr_update */
668
669 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
670 wq->quant_tbl_i4[wq->qp_table_id][0]);
671 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
672 wq->quant_tbl_i4[wq->qp_table_id][1]);
673 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
674 wq->quant_tbl_i4[wq->qp_table_id][2]);
675 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
676 wq->quant_tbl_i4[wq->qp_table_id][3]);
677 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
678 wq->quant_tbl_i4[wq->qp_table_id][4]);
679 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
680 wq->quant_tbl_i4[wq->qp_table_id][5]);
681 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
682 wq->quant_tbl_i4[wq->qp_table_id][6]);
683 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
684 wq->quant_tbl_i4[wq->qp_table_id][7]);
685
686 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
687 (8 << 23) | /* quant_table_addr */
688 (1 << 22)); /* quant_table_addr_update */
689
690 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
691 wq->quant_tbl_i16[wq->qp_table_id][0]);
692 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
693 wq->quant_tbl_i16[wq->qp_table_id][1]);
694 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
695 wq->quant_tbl_i16[wq->qp_table_id][2]);
696 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
697 wq->quant_tbl_i16[wq->qp_table_id][3]);
698 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
699 wq->quant_tbl_i16[wq->qp_table_id][4]);
700 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
701 wq->quant_tbl_i16[wq->qp_table_id][5]);
702 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
703 wq->quant_tbl_i16[wq->qp_table_id][6]);
704 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
705 wq->quant_tbl_i16[wq->qp_table_id][7]);
706
707 WRITE_HREG(HCODEC_Q_QUANT_CONTROL,
708 (16 << 23) | /* quant_table_addr */
709 (1 << 22)); /* quant_table_addr_update */
710
711 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
712 wq->quant_tbl_me[wq->qp_table_id][0]);
713 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
714 wq->quant_tbl_me[wq->qp_table_id][1]);
715 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
716 wq->quant_tbl_me[wq->qp_table_id][2]);
717 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
718 wq->quant_tbl_me[wq->qp_table_id][3]);
719 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
720 wq->quant_tbl_me[wq->qp_table_id][4]);
721 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
722 wq->quant_tbl_me[wq->qp_table_id][5]);
723 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
724 wq->quant_tbl_me[wq->qp_table_id][6]);
725 WRITE_HREG(HCODEC_QUANT_TABLE_DATA,
726 wq->quant_tbl_me[wq->qp_table_id][7]);
727 return;
728}
729#endif
730
731static void InitEncodeWeight(void)
732{
733 me_mv_merge_ctl =
734 (0x1 << 31) | /* [31] me_merge_mv_en_16 */
735 (0x1 << 30) | /* [30] me_merge_small_mv_en_16 */
736 (0x1 << 29) | /* [29] me_merge_flex_en_16 */
737 (0x1 << 28) | /* [28] me_merge_sad_en_16 */
738 (0x1 << 27) | /* [27] me_merge_mv_en_8 */
739 (0x1 << 26) | /* [26] me_merge_small_mv_en_8 */
740 (0x1 << 25) | /* [25] me_merge_flex_en_8 */
741 (0x1 << 24) | /* [24] me_merge_sad_en_8 */
742 (0x12 << 18) |
743 /* [23:18] me_merge_mv_diff_16 - MV diff
744 <= n pixel can be merged */
745 (0x2b << 12) |
746 /* [17:12] me_merge_mv_diff_8 - MV diff
747 <= n pixel can be merged */
748 (0x80 << 0);
749 /* [11:0] me_merge_min_sad - SAD
750 >= 0x180 can be merged with other MV */
751
752 /* need add a condition for ucode mode */
753 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
754 me_mv_weight_01 = (ME_MV_STEP_WEIGHT_1 << 24) |
755 (ME_MV_PRE_WEIGHT_1 << 16) |
756 (ME_MV_STEP_WEIGHT_0 << 8) |
757 (ME_MV_PRE_WEIGHT_0 << 0);
758
759 me_mv_weight_23 = (ME_MV_STEP_WEIGHT_3 << 24) |
760 (ME_MV_PRE_WEIGHT_3 << 16) |
761 (ME_MV_STEP_WEIGHT_2 << 8) |
762 (ME_MV_PRE_WEIGHT_2 << 0);
763
764 me_sad_range_inc = (ME_SAD_RANGE_3 << 24) |
765 (ME_SAD_RANGE_2 << 16) |
766 (ME_SAD_RANGE_1 << 8) |
767 (ME_SAD_RANGE_0 << 0);
768
769 me_step0_close_mv = (0x100 << 10) |
770 /* me_step0_big_sad -- two MV sad
771 diff bigger will use use 1 */
772 (2 << 5) | /* me_step0_close_mv_y */
773 (2 << 0); /* me_step0_close_mv_x */
774
775 me_f_skip_sad = (0x00 << 24) | /* force_skip_sad_3 */
776 (STEP_2_SKIP_SAD << 16) | /* force_skip_sad_2 */
777 (STEP_1_SKIP_SAD << 8) | /* force_skip_sad_1 */
778 (STEP_0_SKIP_SAD << 0); /* force_skip_sad_0 */
779
780 me_f_skip_weight = (0x00 << 24) | /* force_skip_weight_3 */
781 /* force_skip_weight_2 */
782 (STEP_2_SKIP_WEIGHT << 16) |
783 /* force_skip_weight_1 */
784 (STEP_1_SKIP_WEIGHT << 8) |
785 /* force_skip_weight_0 */
786 (STEP_0_SKIP_WEIGHT << 0);
787
788#ifndef USE_OLD_DUMP_MC
789 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
790 me_f_skip_sad = 0;
791 me_f_skip_weight = 0;
792 me_mv_weight_01 = 0;
793 me_mv_weight_23 = 0;
794 }
795#endif
796 me_sad_enough_01 = (ME_SAD_ENOUGH_1_DATA << 12) |
797 /* me_sad_enough_1 */
798 (ME_SAD_ENOUGH_0_DATA << 0) |
799 /* me_sad_enough_0 */
800 (0 << 12) | /* me_sad_enough_1 */
801 (0 << 0); /* me_sad_enough_0 */
802
803 me_sad_enough_23 = (ADV_MV_8x8_ENOUGH_DATA << 12) |
804 /* adv_mv_8x8_enough */
805 (ME_SAD_ENOUGH_2_DATA << 0) |
806 /* me_sad_enough_2 */
807 (0 << 12) | /* me_sad_enough_3 */
808 (0 << 0); /* me_sad_enough_2 */
809 } else {
810 me_mv_weight_01 = (0x40 << 24) | (0x30 << 16) |
811 (0x20 << 8) | 0x30;
812 me_mv_weight_23 = (0x40 << 8) | 0x30;
813 me_sad_range_inc = 0x01010101; /* 0x03030303; */
814 me_step0_close_mv = 0x0000ac21; /* 0x003ffc21; */
815 me_f_skip_sad = 0x18181818;
816 me_f_skip_weight = 0x12121212;
817 me_sad_enough_01 = 0;
818 me_sad_enough_23 = 0;
819 }
820}
821
822/*output stream buffer setting*/
823static void avc_init_output_buffer(struct encode_wq_s *wq)
824{
825 WRITE_HREG(HCODEC_VLC_VB_MEM_CTL,
826 ((1 << 31) | (0x3f << 24) |
827 (0x20 << 16) | (2 << 0)));
828 WRITE_HREG(HCODEC_VLC_VB_START_PTR,
829 wq->mem.BitstreamStart);
830 WRITE_HREG(HCODEC_VLC_VB_WR_PTR,
831 wq->mem.BitstreamStart);
832 WRITE_HREG(HCODEC_VLC_VB_SW_RD_PTR,
833 wq->mem.BitstreamStart);
834 WRITE_HREG(HCODEC_VLC_VB_END_PTR,
835 wq->mem.BitstreamEnd);
836 WRITE_HREG(HCODEC_VLC_VB_CONTROL, 1);
837 WRITE_HREG(HCODEC_VLC_VB_CONTROL,
838 ((0 << 14) | (7 << 3) |
839 (1 << 1) | (0 << 0)));
840}
841
842/*input dct buffer setting*/
843static void avc_init_input_buffer(struct encode_wq_s *wq)
844{
845 WRITE_HREG(HCODEC_QDCT_MB_START_PTR,
846 wq->mem.dct_buff_start_addr);
847 WRITE_HREG(HCODEC_QDCT_MB_END_PTR,
848 wq->mem.dct_buff_end_addr);
849 WRITE_HREG(HCODEC_QDCT_MB_WR_PTR,
850 wq->mem.dct_buff_start_addr);
851 WRITE_HREG(HCODEC_QDCT_MB_RD_PTR,
852 wq->mem.dct_buff_start_addr);
853 WRITE_HREG(HCODEC_QDCT_MB_BUFF, 0);
854}
855
856/*input reference buffer setting*/
857static void avc_init_reference_buffer(s32 canvas)
858{
859 WRITE_HREG(HCODEC_ANC0_CANVAS_ADDR, canvas);
860 WRITE_HREG(HCODEC_VLC_HCMD_CONFIG, 0);
861}
862
863static void avc_init_assit_buffer(struct encode_wq_s *wq)
864{
865 WRITE_HREG(MEM_OFFSET_REG, wq->mem.assit_buffer_offset);
866}
867
868/*deblock buffer setting, same as INI_CANVAS*/
869static void avc_init_dblk_buffer(s32 canvas)
870{
871 WRITE_HREG(HCODEC_REC_CANVAS_ADDR, canvas);
872 WRITE_HREG(HCODEC_DBKR_CANVAS_ADDR, canvas);
873 WRITE_HREG(HCODEC_DBKW_CANVAS_ADDR, canvas);
874}
875
876static void avc_init_encoder(struct encode_wq_s *wq, bool idr)
877{
878 WRITE_HREG(HCODEC_VLC_TOTAL_BYTES, 0);
879 WRITE_HREG(HCODEC_VLC_CONFIG, 0x07);
880 WRITE_HREG(HCODEC_VLC_INT_CONTROL, 0);
881#ifndef USE_OLD_DUMP_MC
882 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
883 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
884 WRITE_HREG(HCODEC_ASSIST_AMR1_INT0, 0x15);
885 WRITE_HREG(HCODEC_ASSIST_AMR1_INT1, 0x8);
886 WRITE_HREG(HCODEC_ASSIST_AMR1_INT3, 0x14);
887 } else
888#endif
889 {
890 WRITE_HREG(HCODEC_ASSIST_AMR1_INT0, 0x15);
891#ifdef MULTI_SLICE_MC
892 if (encode_manager.dblk_fix_flag) {
893 WRITE_HREG(HCODEC_ASSIST_AMR1_INT2, 0x19);
894 WRITE_HREG(HCODEC_ASSIST_AMR1_INT1, 0x8);
895 } else {
896 WRITE_HREG(HCODEC_ASSIST_AMR1_INT1, 0x19);
897 WRITE_HREG(HCODEC_ASSIST_AMR1_INT2, 0x8);
898 }
899#else
900 if (encode_manager.dblk_fix_flag)
901 WRITE_HREG(HCODEC_ASSIST_AMR1_INT1, 0x8);
902 else
903 WRITE_HREG(HCODEC_ASSIST_AMR1_INT2, 0x8);
904#endif
905 WRITE_HREG(HCODEC_ASSIST_AMR1_INT3, 0x14);
906#ifdef INTRA_IN_P_TOP
907 WRITE_HREG(HCODEC_ASSIST_DMA_INT_MSK, 0xfd);
908 WRITE_HREG(HCODEC_ASSIST_DMA_INT_MSK2, 0xff);
909 WRITE_HREG(HCODEC_ASSIST_AMR1_INT4, 0x18);
910 /* mtspi 0xfd, HCODEC_ASSIST_DMA_INT_MSK
911 enable lmem_mpeg_dma_int */
912 /* mtspi 0xff, HCODEC_ASSIST_DMA_INT_MSK2
913 disable cpu19_int */
914 /* mtspi 0x18, HCODEC_ASSIST_AMR1_INT4 // lmem_dma_isr */
915#else
916 WRITE_HREG(HCODEC_ASSIST_DMA_INT_MSK, 0xff);
917 WRITE_HREG(HCODEC_ASSIST_DMA_INT_MSK2, 0xff);
918#endif
919 }
920 WRITE_HREG(IDR_PIC_ID, wq->pic.idr_pic_id);
921 WRITE_HREG(FRAME_NUMBER, (idr == true) ? 0 : wq->pic.frame_number);
922 WRITE_HREG(PIC_ORDER_CNT_LSB,
923 (idr == true) ? 0 : wq->pic.pic_order_cnt_lsb);
924
925 WRITE_HREG(LOG2_MAX_PIC_ORDER_CNT_LSB,
926 wq->pic.log2_max_pic_order_cnt_lsb);
927 WRITE_HREG(LOG2_MAX_FRAME_NUM, wq->pic.log2_max_frame_num);
928 WRITE_HREG(ANC0_BUFFER_ID, anc0_buffer_id);
929 WRITE_HREG(QPPICTURE, wq->pic.init_qppicture);
930}
931
932static void avc_canvas_init(struct encode_wq_s *wq)
933{
934 u32 canvas_width, canvas_height;
935 u32 start_addr = wq->mem.buf_start;
936
937 canvas_width = ((wq->pic.encoder_width + 31) >> 5) << 5;
938 canvas_height = ((wq->pic.encoder_height + 15) >> 4) << 4;
939
940 canvas_config(ENC_CANVAS_OFFSET,
941 start_addr + wq->mem.bufspec.dec0_y.buf_start,
942 canvas_width, canvas_height,
943 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
944 canvas_config(1 + ENC_CANVAS_OFFSET,
945 start_addr + wq->mem.bufspec.dec0_uv.buf_start,
946 canvas_width, canvas_height / 2,
947 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
948 /*here the third plane use the same address as the second plane*/
949 canvas_config(2 + ENC_CANVAS_OFFSET,
950 start_addr + wq->mem.bufspec.dec0_uv.buf_start,
951 canvas_width, canvas_height / 2,
952 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
953
954 canvas_config(3 + ENC_CANVAS_OFFSET,
955 start_addr + wq->mem.bufspec.dec1_y.buf_start,
956 canvas_width, canvas_height,
957 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
958 canvas_config(4 + ENC_CANVAS_OFFSET,
959 start_addr + wq->mem.bufspec.dec1_uv.buf_start,
960 canvas_width, canvas_height / 2,
961 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
962 /*here the third plane use the same address as the second plane*/
963 canvas_config(5 + ENC_CANVAS_OFFSET,
964 start_addr + wq->mem.bufspec.dec1_uv.buf_start,
965 canvas_width, canvas_height / 2,
966 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
967}
968
969static void avc_buffspec_init(struct encode_wq_s *wq)
970{
971 u32 canvas_width, canvas_height;
972 u32 start_addr = wq->mem.buf_start;
973
974 canvas_width = ((wq->pic.encoder_width + 31) >> 5) << 5;
975 canvas_height = ((wq->pic.encoder_height + 15) >> 4) << 4;
976
977 /*input dct buffer config */
978 /* (w>>4)*(h>>4)*864 */
979 wq->mem.dct_buff_start_addr = start_addr +
980 wq->mem.bufspec.dct.buf_start;
981 wq->mem.dct_buff_end_addr =
982 wq->mem.dct_buff_start_addr +
983 wq->mem.bufspec.dct.buf_size - 1;
984 enc_pr(LOG_INFO, "dct_buff_start_addr is 0x%x, wq:%p.\n",
985 wq->mem.dct_buff_start_addr, (void *)wq);
986
987 wq->mem.bufspec.dec0_uv.buf_start =
988 wq->mem.bufspec.dec0_y.buf_start +
989 canvas_width * canvas_height;
990 wq->mem.bufspec.dec0_uv.buf_size = canvas_width * canvas_height / 2;
991 wq->mem.bufspec.dec1_uv.buf_start =
992 wq->mem.bufspec.dec1_y.buf_start +
993 canvas_width * canvas_height;
994 wq->mem.bufspec.dec1_uv.buf_size = canvas_width * canvas_height / 2;
995 wq->mem.assit_buffer_offset = start_addr +
996 wq->mem.bufspec.assit.buf_start;
997 enc_pr(LOG_INFO, "assit_buffer_offset is 0x%x, wq: %p.\n",
998 wq->mem.assit_buffer_offset, (void *)wq);
999 /*output stream buffer config*/
1000 wq->mem.BitstreamStart = start_addr +
1001 wq->mem.bufspec.bitstream.buf_start;
1002 wq->mem.BitstreamEnd =
1003 wq->mem.BitstreamStart +
1004 wq->mem.bufspec.bitstream.buf_size - 1;
1005
1006#ifndef USE_OLD_DUMP_MC
1007 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
1008 u32 mb_w = (wq->pic.encoder_width + 15) >> 4;
1009 u32 mb_h = (wq->pic.encoder_height + 15) >> 4;
1010 u32 mbs = mb_w * mb_h;
1011 wq->mem.dump_info_ddr_size =
1012 DUMP_INFO_BYTES_PER_MB * mbs;
1013 wq->mem.dump_info_ddr_size =
1014 (wq->mem.dump_info_ddr_size + PAGE_SIZE - 1)
1015 & ~(PAGE_SIZE - 1);
1016 } else {
1017 wq->mem.dump_info_ddr_start_addr = 0;
1018 wq->mem.dump_info_ddr_size = 0;
1019 }
1020#else
1021 wq->mem.dump_info_ddr_start_addr = 0;
1022 wq->mem.dump_info_ddr_size = 0;
1023#endif
1024
1025 enc_pr(LOG_INFO, "BitstreamStart is 0x%x, wq: %p.\n",
1026 wq->mem.BitstreamStart, (void *)wq);
1027
1028 wq->mem.dblk_buf_canvas = ((ENC_CANVAS_OFFSET + 2) << 16) |
1029 ((ENC_CANVAS_OFFSET + 1) << 8) |
1030 (ENC_CANVAS_OFFSET);
1031 wq->mem.ref_buf_canvas = ((ENC_CANVAS_OFFSET + 5) << 16) |
1032 ((ENC_CANVAS_OFFSET + 4) << 8) |
1033 (ENC_CANVAS_OFFSET + 3);
1034}
1035
1036#ifdef USE_VDEC2
1037static s32 abort_vdec2_flag;
1038void AbortEncodeWithVdec2(s32 abort)
1039{
1040 abort_vdec2_flag = abort;
1041}
1042#endif
1043
1044static void avc_init_ie_me_parameter(struct encode_wq_s *wq, u32 quant)
1045{
1046#ifndef USE_OLD_DUMP_MC
1047 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
1048 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
1049 ie_cur_ref_sel = 0;
1050 ie_pippeline_block = 12;
1051 /* currently disable half and sub pixel */
1052 ie_me_mode |= (ie_pippeline_block & IE_PIPPELINE_BLOCK_MASK) <<
1053 IE_PIPPELINE_BLOCK_SHIFT;
1054 } else
1055#endif
1056 {
1057 ie_pippeline_block = 3;
1058 if (ie_pippeline_block == 3)
1059 ie_cur_ref_sel = ((1 << 13) | (1 << 12) |
1060 (1 << 9) | (1 << 8));
1061 else if (ie_pippeline_block == 0)
1062 ie_cur_ref_sel = 0xffffffff;
1063 else {
1064 enc_pr(LOG_ERROR,
1065 "Error : Please calculate IE_CUR_REF_SEL for IE_PIPPELINE_BLOCK. wq:%p\n",
1066 (void *)wq);
1067 }
1068 /* currently disable half and sub pixel */
1069 ie_me_mode |= (ie_pippeline_block & IE_PIPPELINE_BLOCK_MASK) <<
1070 IE_PIPPELINE_BLOCK_SHIFT;
1071 }
1072
1073 WRITE_HREG(IE_ME_MODE, ie_me_mode);
1074 WRITE_HREG(IE_REF_SEL, ie_cur_ref_sel);
1075 WRITE_HREG(IE_ME_MB_TYPE, ie_me_mb_type);
1076
1077#ifndef USE_OLD_DUMP_MC
1078 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
1079 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
1080 if (fixed_slice_cfg)
1081 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
1082 else if (wq->pic.rows_per_slice !=
1083 (wq->pic.encoder_height + 15) >> 4) {
1084 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
1085 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
1086 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
1087 } else
1088 WRITE_HREG(FIXED_SLICE_CFG, 0);
1089 } else
1090#endif
1091 {
1092 if (fixed_slice_cfg)
1093 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
1094 else if (wq->pic.rows_per_slice !=
1095 (wq->pic.encoder_height + 15) >> 4)
1096 WRITE_HREG(FIXED_SLICE_CFG,
1097 (wq->pic.rows_per_slice << 16) |
1098 wq->pic.rows_per_slice);
1099 else
1100 WRITE_HREG(FIXED_SLICE_CFG, 0);
1101 WRITE_HREG(P_INTRA_CONFIG, p_intra_config);
1102 WRITE_HREG(P_MB_QUANT_CONFIG, p_mb_quant_config);
1103 if (encode_manager.ucode_index != UCODE_MODE_SW_MIX) {
1104 p_mb_quant_config = (20 << 16) |
1105 (quant << 8) |
1106 (quant << 0);
1107 WRITE_HREG(P_MB_QUANT_CONFIG, p_mb_quant_config);
1108 }
1109 WRITE_HREG(P_MB_QUANT_INC_CFG, p_mb_quant_inc_cfg);
1110 WRITE_HREG(P_MB_QUANT_DEC_CFG, p_mb_quant_dec_cfg);
1111 }
1112#ifndef MULTI_SLICE_MC
1113 WRITE_HREG(FIXED_SLICE_CFG, 0);
1114#endif
1115}
1116
1117/* for temp */
1118#define HCODEC_MFDIN_REGC_MBLP (HCODEC_MFDIN_REGB_AMPC + 0x1)
1119#define HCODEC_MFDIN_REG0D (HCODEC_MFDIN_REGB_AMPC + 0x2)
1120#define HCODEC_MFDIN_REG0E (HCODEC_MFDIN_REGB_AMPC + 0x3)
1121#define HCODEC_MFDIN_REG0F (HCODEC_MFDIN_REGB_AMPC + 0x4)
1122#define HCODEC_MFDIN_REG10 (HCODEC_MFDIN_REGB_AMPC + 0x5)
1123#define HCODEC_MFDIN_REG11 (HCODEC_MFDIN_REGB_AMPC + 0x6)
1124#define HCODEC_MFDIN_REG12 (HCODEC_MFDIN_REGB_AMPC + 0x7)
1125#define HCODEC_MFDIN_REG13 (HCODEC_MFDIN_REGB_AMPC + 0x8)
1126#define HCODEC_MFDIN_REG14 (HCODEC_MFDIN_REGB_AMPC + 0x9)
1127#define HCODEC_MFDIN_REG15 (HCODEC_MFDIN_REGB_AMPC + 0xa)
1128#define HCODEC_MFDIN_REG16 (HCODEC_MFDIN_REGB_AMPC + 0xb)
1129
1130static void mfdin_basic(u32 input, u8 iformat,
1131 u8 oformat, u32 picsize_x, u32 picsize_y,
1132 u8 r2y_en, u8 nr, u8 ifmt_extra)
1133{
1134 u8 dsample_en; /* Downsample Enable */
1135 u8 interp_en; /* Interpolation Enable */
1136 u8 y_size; /* 0:16 Pixels for y direction pickup; 1:8 pixels */
1137 u8 r2y_mode; /* RGB2YUV Mode, range(0~3) */
1138 /* mfdin_reg3_canv[25:24];
1139 // bytes per pixel in x direction for index0, 0:half 1:1 2:2 3:3 */
1140 u8 canv_idx0_bppx;
1141 /* mfdin_reg3_canv[27:26];
1142 // bytes per pixel in x direction for index1-2, 0:half 1:1 2:2 3:3 */
1143 u8 canv_idx1_bppx;
1144 /* mfdin_reg3_canv[29:28];
1145 // bytes per pixel in y direction for index0, 0:half 1:1 2:2 3:3 */
1146 u8 canv_idx0_bppy;
1147 /* mfdin_reg3_canv[31:30];
1148 // bytes per pixel in y direction for index1-2, 0:half 1:1 2:2 3:3 */
1149 u8 canv_idx1_bppy;
1150 u8 ifmt444, ifmt422, ifmt420, linear_bytes4p;
1151 u8 nr_enable;
1152 u8 cfg_y_snr_en;
1153 u8 cfg_y_tnr_en;
1154 u8 cfg_c_snr_en;
1155 u8 cfg_c_tnr_en;
1156 u32 linear_bytesperline;
1157 s32 reg_offset;
1158 bool linear_enable = false;
1159 bool format_err = false;
1160
1161 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL) {
1162 if ((iformat == 7) && (ifmt_extra > 2))
1163 format_err = true;
1164 } else if (iformat == 7)
1165 format_err = true;
1166
1167 if (format_err) {
1168 enc_pr(LOG_ERROR,
1169 "mfdin format err, iformat:%d, ifmt_extra:%d\n",
1170 iformat, ifmt_extra);
1171 return;
1172 }
1173 if (iformat != 7)
1174 ifmt_extra = 0;
1175
1176 ifmt444 = ((iformat == 1) || (iformat == 5) || (iformat == 8) ||
1177 (iformat == 9) || (iformat == 12)) ? 1 : 0;
1178 if (iformat == 7 && ifmt_extra == 1)
1179 ifmt444 = 1;
1180 ifmt422 = ((iformat == 0) || (iformat == 10)) ? 1 : 0;
1181 if (iformat == 7 && ifmt_extra != 1)
1182 ifmt422 = 1;
1183 ifmt420 = ((iformat == 2) || (iformat == 3) || (iformat == 4) ||
1184 (iformat == 11)) ? 1 : 0;
1185 dsample_en = ((ifmt444 && (oformat != 2)) ||
1186 (ifmt422 && (oformat == 0))) ? 1 : 0;
1187 interp_en = ((ifmt422 && (oformat == 2)) ||
1188 (ifmt420 && (oformat != 0))) ? 1 : 0;
1189 y_size = (oformat != 0) ? 1 : 0;
1190 r2y_mode = (r2y_en == 1) ? 1 : 0; /* Fixed to 1 (TODO) */
1191 canv_idx0_bppx = (iformat == 1) ? 3 : (iformat == 0) ? 2 : 1;
1192 canv_idx1_bppx = (iformat == 4) ? 0 : 1;
1193 canv_idx0_bppy = 1;
1194 canv_idx1_bppy = (iformat == 5) ? 1 : 0;
1195
1196 if ((iformat == 8) || (iformat == 9) || (iformat == 12))
1197 linear_bytes4p = 3;
1198 else if (iformat == 10)
1199 linear_bytes4p = 2;
1200 else if (iformat == 11)
1201 linear_bytes4p = 1;
1202 else
1203 linear_bytes4p = 0;
1204 linear_bytesperline = picsize_x * linear_bytes4p;
1205
1206 if (iformat < 8)
1207 linear_enable = false;
1208 else
1209 linear_enable = true;
1210
1211 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
1212 reg_offset = -8;
1213 /* nr_mode: 0:Disabled 1:SNR Only 2:TNR Only 3:3DNR */
1214 nr_enable = (nr) ? 1 : 0;
1215 cfg_y_snr_en = ((nr == 1) || (nr == 3)) ? 1 : 0;
1216 cfg_y_tnr_en = ((nr == 2) || (nr == 3)) ? 1 : 0;
1217 cfg_c_snr_en = cfg_y_snr_en;
1218 /* cfg_c_tnr_en = cfg_y_tnr_en; */
1219 cfg_c_tnr_en = 0;
1220
1221 /* NR For Y */
1222 WRITE_HREG((HCODEC_MFDIN_REG0D + reg_offset),
1223 ((cfg_y_snr_en << 0) |
1224 (y_snr_err_norm << 1) |
1225 (y_snr_gau_bld_core << 2) |
1226 (((y_snr_gau_bld_ofst) & 0xff) << 6) |
1227 (y_snr_gau_bld_rate << 14) |
1228 (y_snr_gau_alp0_min << 20) |
1229 (y_snr_gau_alp0_max << 26)));
1230 WRITE_HREG((HCODEC_MFDIN_REG0E + reg_offset),
1231 ((cfg_y_tnr_en << 0) |
1232 (y_tnr_mc_en << 1) |
1233 (y_tnr_txt_mode << 2) |
1234 (y_tnr_mot_sad_margin << 3) |
1235 (y_tnr_alpha_min << 7) |
1236 (y_tnr_alpha_max << 13) |
1237 (y_tnr_deghost_os << 19)));
1238 WRITE_HREG((HCODEC_MFDIN_REG0F + reg_offset),
1239 ((y_tnr_mot_cortxt_rate << 0) |
1240 (y_tnr_mot_distxt_ofst << 8) |
1241 (y_tnr_mot_distxt_rate << 4) |
1242 (y_tnr_mot_dismot_ofst << 16) |
1243 (y_tnr_mot_frcsad_lock << 24)));
1244 WRITE_HREG((HCODEC_MFDIN_REG10 + reg_offset),
1245 ((y_tnr_mot2alp_frc_gain << 0) |
1246 (y_tnr_mot2alp_nrm_gain << 8) |
1247 (y_tnr_mot2alp_dis_gain << 16) |
1248 (y_tnr_mot2alp_dis_ofst << 24)));
1249 WRITE_HREG((HCODEC_MFDIN_REG11 + reg_offset),
1250 ((y_bld_beta2alp_rate << 0) |
1251 (y_bld_beta_min << 8) |
1252 (y_bld_beta_max << 14)));
1253
1254 /* NR For C */
1255 WRITE_HREG((HCODEC_MFDIN_REG12 + reg_offset),
1256 ((cfg_y_snr_en << 0) |
1257 (c_snr_err_norm << 1) |
1258 (c_snr_gau_bld_core << 2) |
1259 (((c_snr_gau_bld_ofst) & 0xff) << 6) |
1260 (c_snr_gau_bld_rate << 14) |
1261 (c_snr_gau_alp0_min << 20) |
1262 (c_snr_gau_alp0_max << 26)));
1263
1264 WRITE_HREG((HCODEC_MFDIN_REG13 + reg_offset),
1265 ((cfg_c_tnr_en << 0) |
1266 (c_tnr_mc_en << 1) |
1267 (c_tnr_txt_mode << 2) |
1268 (c_tnr_mot_sad_margin << 3) |
1269 (c_tnr_alpha_min << 7) |
1270 (c_tnr_alpha_max << 13) |
1271 (c_tnr_deghost_os << 19)));
1272 WRITE_HREG((HCODEC_MFDIN_REG14 + reg_offset),
1273 ((c_tnr_mot_cortxt_rate << 0) |
1274 (c_tnr_mot_distxt_ofst << 8) |
1275 (c_tnr_mot_distxt_rate << 4) |
1276 (c_tnr_mot_dismot_ofst << 16) |
1277 (c_tnr_mot_frcsad_lock << 24)));
1278 WRITE_HREG((HCODEC_MFDIN_REG15 + reg_offset),
1279 ((c_tnr_mot2alp_frc_gain << 0) |
1280 (c_tnr_mot2alp_nrm_gain << 8) |
1281 (c_tnr_mot2alp_dis_gain << 16) |
1282 (c_tnr_mot2alp_dis_ofst << 24)));
1283
1284 WRITE_HREG((HCODEC_MFDIN_REG16 + reg_offset),
1285 ((c_bld_beta2alp_rate << 0) |
1286 (c_bld_beta_min << 8) |
1287 (c_bld_beta_max << 14)));
1288
1289 WRITE_HREG((HCODEC_MFDIN_REG1_CTRL + reg_offset),
1290 (iformat << 0) | (oformat << 4) |
1291 (dsample_en << 6) | (y_size << 8) |
1292 (interp_en << 9) | (r2y_en << 12) |
1293 (r2y_mode << 13) | (ifmt_extra << 16) |
1294 (nr_enable << 19));
1295 WRITE_HREG((HCODEC_MFDIN_REG8_DMBL + reg_offset),
1296 (picsize_x << 14) | (picsize_y << 0));
1297 } else {
1298 reg_offset = 0;
1299 WRITE_HREG((HCODEC_MFDIN_REG1_CTRL + reg_offset),
1300 (iformat << 0) | (oformat << 4) |
1301 (dsample_en << 6) | (y_size << 8) |
1302 (interp_en << 9) | (r2y_en << 12) |
1303 (r2y_mode << 13));
1304 WRITE_HREG((HCODEC_MFDIN_REG8_DMBL + reg_offset),
1305 (picsize_x << 12) | (picsize_y << 0));
1306 }
1307
1308 if (linear_enable == false) {
1309 WRITE_HREG((HCODEC_MFDIN_REG3_CANV + reg_offset),
1310 (input & 0xffffff) |
1311 (canv_idx1_bppy << 30) |
1312 (canv_idx0_bppy << 28) |
1313 (canv_idx1_bppx << 26) |
1314 (canv_idx0_bppx << 24));
1315 WRITE_HREG((HCODEC_MFDIN_REG4_LNR0 + reg_offset),
1316 (0 << 16) | (0 << 0));
1317 WRITE_HREG((HCODEC_MFDIN_REG5_LNR1 + reg_offset), 0);
1318 } else {
1319 WRITE_HREG((HCODEC_MFDIN_REG3_CANV + reg_offset),
1320 (canv_idx1_bppy << 30) |
1321 (canv_idx0_bppy << 28) |
1322 (canv_idx1_bppx << 26) |
1323 (canv_idx0_bppx << 24));
1324 WRITE_HREG((HCODEC_MFDIN_REG4_LNR0 + reg_offset),
1325 (linear_bytes4p << 16) | (linear_bytesperline << 0));
1326 WRITE_HREG((HCODEC_MFDIN_REG5_LNR1 + reg_offset), input);
1327 }
1328
1329 WRITE_HREG((HCODEC_MFDIN_REG9_ENDN + reg_offset),
1330 (7 << 0) | (6 << 3) | (5 << 6) |
1331 (4 << 9) | (3 << 12) | (2 << 15) |
1332 (1 << 18) | (0 << 21));
1333}
1334
1335#ifdef CONFIG_AM_GE2D
1336static int scale_frame(struct encode_wq_s *wq,
1337 struct encode_request_s *request,
1338 struct config_para_ex_s *ge2d_config,
1339 u32 src_addr, bool canvas)
1340{
1341 struct ge2d_context_s *context = encode_manager.context;
1342 int src_top, src_left, src_width, src_height;
1343 struct canvas_s cs0, cs1, cs2, cd;
1344 u32 src_canvas, dst_canvas;
1345 u32 src_canvas_w, dst_canvas_w;
1346 u32 src_h = request->src_h;
1347 u32 dst_w = ((wq->pic.encoder_width + 15) >> 4) << 4;
1348 u32 dst_h = ((wq->pic.encoder_height + 15) >> 4) << 4;
1349 int input_format = GE2D_FORMAT_M24_NV21;
1350 src_top = request->crop_top;
1351 src_left = request->crop_left;
1352 src_width = request->src_w - src_left - request->crop_right;
1353 src_height = request->src_h - src_top - request->crop_bottom;
1354 if (canvas) {
1355 if ((request->fmt == FMT_NV21)
1356 || (request->fmt == FMT_NV12)) {
1357 src_canvas = src_addr & 0xffff;
1358 input_format = GE2D_FORMAT_M24_NV21;
1359 } else {
1360 src_canvas = src_addr & 0xffffff;
1361 input_format = GE2D_FORMAT_M24_YUV420;
1362 }
1363 } else {
1364 if ((request->fmt == FMT_NV21)
1365 || (request->fmt == FMT_NV12)) {
1366 src_canvas_w =
1367 ((request->src_w + 31) >> 5) << 5;
1368 canvas_config(ENC_CANVAS_OFFSET + 9,
1369 src_addr,
1370 src_canvas_w, src_h,
1371 CANVAS_ADDR_NOWRAP,
1372 CANVAS_BLKMODE_LINEAR);
1373 canvas_config(ENC_CANVAS_OFFSET + 10,
1374 src_addr + src_canvas_w * src_h,
1375 src_canvas_w, src_h / 2,
1376 CANVAS_ADDR_NOWRAP,
1377 CANVAS_BLKMODE_LINEAR);
1378 src_canvas =
1379 ((ENC_CANVAS_OFFSET + 10) << 8)
1380 | (ENC_CANVAS_OFFSET + 9);
1381 input_format = GE2D_FORMAT_M24_NV21;
1382 } else {
1383 src_canvas_w =
1384 ((request->src_w + 63) >> 6) << 6;
1385 canvas_config(ENC_CANVAS_OFFSET + 9,
1386 src_addr,
1387 src_canvas_w, src_h,
1388 CANVAS_ADDR_NOWRAP,
1389 CANVAS_BLKMODE_LINEAR);
1390 canvas_config(ENC_CANVAS_OFFSET + 10,
1391 src_addr + src_canvas_w * src_h,
1392 src_canvas_w / 2, src_h / 2,
1393 CANVAS_ADDR_NOWRAP,
1394 CANVAS_BLKMODE_LINEAR);
1395 canvas_config(ENC_CANVAS_OFFSET + 11,
1396 src_addr + src_canvas_w * src_h * 5 / 4,
1397 src_canvas_w / 2, src_h / 2,
1398 CANVAS_ADDR_NOWRAP,
1399 CANVAS_BLKMODE_LINEAR);
1400 src_canvas =
1401 ((ENC_CANVAS_OFFSET + 11) << 16) |
1402 ((ENC_CANVAS_OFFSET + 10) << 8) |
1403 (ENC_CANVAS_OFFSET + 9);
1404 input_format = GE2D_FORMAT_M24_YUV420;
1405 }
1406 }
1407 dst_canvas_w = ((dst_w + 31) >> 5) << 5;
1408 canvas_config(ENC_CANVAS_OFFSET + 6,
1409 wq->mem.scaler_buff_start_addr,
1410 dst_canvas_w, dst_h,
1411 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
1412 canvas_config(ENC_CANVAS_OFFSET + 7,
1413 wq->mem.scaler_buff_start_addr + dst_canvas_w * dst_h,
1414 dst_canvas_w, dst_h / 2,
1415 CANVAS_ADDR_NOWRAP, CANVAS_BLKMODE_LINEAR);
1416 dst_canvas = ((ENC_CANVAS_OFFSET + 7) << 8) |
1417 (ENC_CANVAS_OFFSET + 6);
1418 ge2d_config->alu_const_color = 0;
1419 ge2d_config->bitmask_en = 0;
1420 ge2d_config->src1_gb_alpha = 0;
1421 ge2d_config->dst_xy_swap = 0;
1422 canvas_read(src_canvas & 0xff, &cs0);
1423 canvas_read((src_canvas >> 8) & 0xff, &cs1);
1424 canvas_read((src_canvas >> 16) & 0xff, &cs2);
1425 ge2d_config->src_planes[0].addr = cs0.addr;
1426 ge2d_config->src_planes[0].w = cs0.width;
1427 ge2d_config->src_planes[0].h = cs0.height;
1428 ge2d_config->src_planes[1].addr = cs1.addr;
1429 ge2d_config->src_planes[1].w = cs1.width;
1430 ge2d_config->src_planes[1].h = cs1.height;
1431 ge2d_config->src_planes[2].addr = cs2.addr;
1432 ge2d_config->src_planes[2].w = cs2.width;
1433 ge2d_config->src_planes[2].h = cs2.height;
1434 canvas_read(dst_canvas & 0xff, &cd);
1435 ge2d_config->dst_planes[0].addr = cd.addr;
1436 ge2d_config->dst_planes[0].w = cd.width;
1437 ge2d_config->dst_planes[0].h = cd.height;
1438 ge2d_config->src_key.key_enable = 0;
1439 ge2d_config->src_key.key_mask = 0;
1440 ge2d_config->src_key.key_mode = 0;
1441 ge2d_config->src_para.canvas_index = src_canvas;
1442 ge2d_config->src_para.mem_type = CANVAS_TYPE_INVALID;
1443 ge2d_config->src_para.format = input_format | GE2D_LITTLE_ENDIAN;
1444 ge2d_config->src_para.fill_color_en = 0;
1445 ge2d_config->src_para.fill_mode = 0;
1446 ge2d_config->src_para.x_rev = 0;
1447 ge2d_config->src_para.y_rev = 0;
1448 ge2d_config->src_para.color = 0xffffffff;
1449 ge2d_config->src_para.top = 0;
1450 ge2d_config->src_para.left = 0;
1451 ge2d_config->src_para.width = request->src_w;
1452 ge2d_config->src_para.height = request->src_h;
1453 ge2d_config->src2_para.mem_type = CANVAS_TYPE_INVALID;
1454 ge2d_config->dst_para.canvas_index = dst_canvas;
1455 ge2d_config->dst_para.mem_type = CANVAS_TYPE_INVALID;
1456 ge2d_config->dst_para.format =
1457 GE2D_FORMAT_M24_NV21 | GE2D_LITTLE_ENDIAN;
1458 ge2d_config->dst_para.fill_color_en = 0;
1459 ge2d_config->dst_para.fill_mode = 0;
1460 ge2d_config->dst_para.x_rev = 0;
1461 ge2d_config->dst_para.y_rev = 0;
1462 ge2d_config->dst_para.color = 0;
1463 ge2d_config->dst_para.top = 0;
1464 ge2d_config->dst_para.left = 0;
1465 ge2d_config->dst_para.width = dst_w;
1466 ge2d_config->dst_para.height = dst_h;
1467 ge2d_config->dst_para.x_rev = 0;
1468 ge2d_config->dst_para.y_rev = 0;
1469 if (ge2d_context_config_ex(context, ge2d_config) < 0) {
1470 pr_err("++ge2d configing error.\n");
1471 return -1;
1472 }
1473 stretchblt_noalpha(context, src_left, src_top, src_width, src_height,
1474 0, 0, wq->pic.encoder_width, wq->pic.encoder_height);
1475 return dst_canvas_w*dst_h * 3 / 2;
1476}
1477#endif
1478
1479static s32 set_input_format(struct encode_wq_s *wq,
1480 struct encode_request_s *request)
1481{
1482 s32 ret = 0;
1483 u8 iformat = MAX_FRAME_FMT, oformat = MAX_FRAME_FMT, r2y_en = 0;
1484 u32 picsize_x, picsize_y;
1485 u32 canvas_w = 0;
1486 u32 input = request->src;
1487 u8 ifmt_extra = 0;
1488
1489 if ((request->fmt == FMT_RGB565) || (request->fmt >= MAX_FRAME_FMT))
1490 return -1;
1491
1492 picsize_x = ((wq->pic.encoder_width + 15) >> 4) << 4;
1493 picsize_y = ((wq->pic.encoder_height + 15) >> 4) << 4;
1494 oformat = 0;
1495 if ((request->type == LOCAL_BUFF)
1496 || (request->type == PHYSICAL_BUFF)) {
1497 if (request->type == LOCAL_BUFF) {
1498 if (request->flush_flag & AMVENC_FLUSH_FLAG_INPUT)
1499 dma_flush(wq->mem.dct_buff_start_addr,
1500 request->framesize);
1501 if (request->scale_enable) {
1502#ifdef CONFIG_AM_GE2D
1503 struct config_para_ex_s ge2d_config;
1504 u32 src_addr =
1505 wq->mem.dct_buff_start_addr;
1506 memset(&ge2d_config, 0,
1507 sizeof(struct config_para_ex_s));
1508 if (request->flush_flag &
1509 AMVENC_FLUSH_FLAG_INPUT) {
1510 int scale_size =
1511 scale_frame(
1512 wq, request,
1513 &ge2d_config,
1514 src_addr,
1515 false);
1516 if (scale_size > 0)
1517 cache_flush(
1518 wq->mem.scaler_buff_start_addr,
1519 scale_size);
1520 }
1521#else
1522 enc_pr(LOG_ERROR,
1523 "Warning: need enable ge2d for scale frame!\n");
1524 return -1;
1525#endif
1526 iformat = 2;
1527 r2y_en = 0;
1528 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1529 (ENC_CANVAS_OFFSET + 6);
1530 ret = 0;
1531 goto MFDIN;
1532 } else {
1533 input = wq->mem.dct_buff_start_addr;
1534 }
1535 } else {
1536 picsize_y = wq->pic.encoder_height;
1537 if (request->scale_enable) {
1538#ifdef CONFIG_AM_GE2D
1539 struct config_para_ex_s ge2d_config;
1540 memset(&ge2d_config, 0,
1541 sizeof(struct config_para_ex_s));
1542 scale_frame(
1543 wq, request,
1544 &ge2d_config,
1545 input, false);
1546 iformat = 2;
1547 r2y_en = 0;
1548 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1549 (ENC_CANVAS_OFFSET + 6);
1550 ret = 0;
1551 goto MFDIN;
1552#else
1553 enc_pr(LOG_ERROR,
1554 "Warning: need enable ge2d for scale frame!\n");
1555 return -1;
1556#endif
1557 }
1558 }
1559 if ((request->fmt <= FMT_YUV444_PLANE) ||
1560 (request->fmt >= FMT_YUV422_12BIT))
1561 r2y_en = 0;
1562 else
1563 r2y_en = 1;
1564
1565 if (request->fmt >= FMT_YUV422_12BIT) {
1566 iformat = 7;
1567 ifmt_extra = request->fmt - FMT_YUV422_12BIT;
1568 if (request->fmt == FMT_YUV422_12BIT)
1569 canvas_w = picsize_x * 24 / 8;
1570 else if (request->fmt == FMT_YUV444_10BIT)
1571 canvas_w = picsize_x * 32 / 8;
1572 else
1573 canvas_w = (picsize_x * 20 + 7) / 8;
1574 canvas_w = ((canvas_w + 31) >> 5) << 5;
1575 canvas_config(ENC_CANVAS_OFFSET + 6,
1576 input,
1577 canvas_w, picsize_y,
1578 CANVAS_ADDR_NOWRAP,
1579 CANVAS_BLKMODE_LINEAR);
1580 input = ENC_CANVAS_OFFSET + 6;
1581 input = input & 0xff;
1582 } else if (request->fmt == FMT_YUV422_SINGLE)
1583 iformat = 10;
1584 else if ((request->fmt == FMT_YUV444_SINGLE)
1585 || (request->fmt == FMT_RGB888)) {
1586 iformat = 1;
1587 if (request->fmt == FMT_RGB888)
1588 r2y_en = 1;
1589 canvas_w = picsize_x * 3;
1590 canvas_w = ((canvas_w + 31) >> 5) << 5;
1591 canvas_config(ENC_CANVAS_OFFSET + 6,
1592 input,
1593 canvas_w, picsize_y,
1594 CANVAS_ADDR_NOWRAP,
1595 CANVAS_BLKMODE_LINEAR);
1596 input = ENC_CANVAS_OFFSET + 6;
1597 } else if ((request->fmt == FMT_NV21)
1598 || (request->fmt == FMT_NV12)) {
1599 canvas_w = ((wq->pic.encoder_width + 31) >> 5) << 5;
1600 iformat = (request->fmt == FMT_NV21) ? 2 : 3;
1601 canvas_config(ENC_CANVAS_OFFSET + 6,
1602 input,
1603 canvas_w, picsize_y,
1604 CANVAS_ADDR_NOWRAP,
1605 CANVAS_BLKMODE_LINEAR);
1606 canvas_config(ENC_CANVAS_OFFSET + 7,
1607 input + canvas_w * picsize_y,
1608 canvas_w, picsize_y / 2,
1609 CANVAS_ADDR_NOWRAP,
1610 CANVAS_BLKMODE_LINEAR);
1611 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1612 (ENC_CANVAS_OFFSET + 6);
1613 } else if (request->fmt == FMT_YUV420) {
1614 iformat = 4;
1615 canvas_w = ((wq->pic.encoder_width + 63) >> 6) << 6;
1616 canvas_config(ENC_CANVAS_OFFSET + 6,
1617 input,
1618 canvas_w, picsize_y,
1619 CANVAS_ADDR_NOWRAP,
1620 CANVAS_BLKMODE_LINEAR);
1621 canvas_config(ENC_CANVAS_OFFSET + 7,
1622 input + canvas_w * picsize_y,
1623 canvas_w / 2, picsize_y / 2,
1624 CANVAS_ADDR_NOWRAP,
1625 CANVAS_BLKMODE_LINEAR);
1626 canvas_config(ENC_CANVAS_OFFSET + 8,
1627 input + canvas_w * picsize_y * 5 / 4,
1628 canvas_w / 2, picsize_y / 2,
1629 CANVAS_ADDR_NOWRAP,
1630 CANVAS_BLKMODE_LINEAR);
1631 input = ((ENC_CANVAS_OFFSET + 8) << 16) |
1632 ((ENC_CANVAS_OFFSET + 7) << 8) |
1633 (ENC_CANVAS_OFFSET + 6);
1634 } else if ((request->fmt == FMT_YUV444_PLANE)
1635 || (request->fmt == FMT_RGB888_PLANE)) {
1636 if (request->fmt == FMT_RGB888_PLANE)
1637 r2y_en = 1;
1638 iformat = 5;
1639 canvas_w = ((wq->pic.encoder_width + 31) >> 5) << 5;
1640 canvas_config(ENC_CANVAS_OFFSET + 6,
1641 input,
1642 canvas_w, picsize_y,
1643 CANVAS_ADDR_NOWRAP,
1644 CANVAS_BLKMODE_LINEAR);
1645 canvas_config(ENC_CANVAS_OFFSET + 7,
1646 input + canvas_w * picsize_y,
1647 canvas_w, picsize_y,
1648 CANVAS_ADDR_NOWRAP,
1649 CANVAS_BLKMODE_LINEAR);
1650 canvas_config(ENC_CANVAS_OFFSET + 8,
1651 input + canvas_w * picsize_y * 2,
1652 canvas_w, picsize_y,
1653 CANVAS_ADDR_NOWRAP,
1654 CANVAS_BLKMODE_LINEAR);
1655 input = ((ENC_CANVAS_OFFSET + 8) << 16) |
1656 ((ENC_CANVAS_OFFSET + 7) << 8) |
1657 (ENC_CANVAS_OFFSET + 6);
1658 } else if (request->fmt == FMT_RGBA8888) {
1659 r2y_en = 1;
1660 iformat = 12;
1661 }
1662 ret = 0;
1663 } else if (request->type == CANVAS_BUFF) {
1664 r2y_en = 0;
1665 if (request->scale_enable) {
1666#ifdef CONFIG_AM_GE2D
1667 struct config_para_ex_s ge2d_config;
1668 memset(&ge2d_config, 0,
1669 sizeof(struct config_para_ex_s));
1670 scale_frame(
1671 wq, request,
1672 &ge2d_config,
1673 input, true);
1674 iformat = 2;
1675 r2y_en = 0;
1676 input = ((ENC_CANVAS_OFFSET + 7) << 8) |
1677 (ENC_CANVAS_OFFSET + 6);
1678 ret = 0;
1679 goto MFDIN;
1680#else
1681 enc_pr(LOG_ERROR,
1682 "Warning: need enable ge2d for scale frame!\n");
1683 return -1;
1684#endif
1685 }
1686 if (request->fmt == FMT_YUV422_SINGLE) {
1687 iformat = 0;
1688 input = input & 0xff;
1689 } else if (request->fmt == FMT_YUV444_SINGLE) {
1690 iformat = 1;
1691 input = input & 0xff;
1692 } else if ((request->fmt == FMT_NV21)
1693 || (request->fmt == FMT_NV12)) {
1694 iformat = (request->fmt == FMT_NV21) ? 2 : 3;
1695 input = input & 0xffff;
1696 } else if (request->fmt == FMT_YUV420) {
1697 iformat = 4;
1698 input = input & 0xffffff;
1699 } else if ((request->fmt == FMT_YUV444_PLANE)
1700 || (request->fmt == FMT_RGB888_PLANE)) {
1701 if (request->fmt == FMT_RGB888_PLANE)
1702 r2y_en = 1;
1703 iformat = 5;
1704 input = input & 0xffffff;
1705 } else if ((request->fmt == FMT_YUV422_12BIT)
1706 || (request->fmt == FMT_YUV444_10BIT)
1707 || (request->fmt == FMT_YUV422_10BIT)) {
1708 iformat = 7;
1709 ifmt_extra = request->fmt - FMT_YUV422_12BIT;
1710 input = input & 0xff;
1711 } else
1712 ret = -1;
1713 }
1714MFDIN:
1715 if (ret == 0)
1716 mfdin_basic(input, iformat, oformat,
1717 picsize_x, picsize_y, r2y_en,
1718 (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) ?
1719 request->nr_mode : 0, ifmt_extra);
1720 wq->control.finish = true;
1721 return ret;
1722}
1723
1724static void avc_prot_init(struct encode_wq_s *wq,
1725 struct encode_request_s *request, u32 quant, bool IDR)
1726{
1727 u32 data32;
1728 u32 pic_width, pic_height;
1729 u32 pic_mb_nr;
1730 u32 pic_mbx, pic_mby;
1731 u32 i_pic_qp, p_pic_qp;
1732 u32 i_pic_qp_c, p_pic_qp_c;
1733 pic_width = wq->pic.encoder_width;
1734 pic_height = wq->pic.encoder_height;
1735 pic_mb_nr = 0;
1736 pic_mbx = 0;
1737 pic_mby = 0;
1738 i_pic_qp = quant;
1739 p_pic_qp = quant;
1740
1741#ifndef USE_OLD_DUMP_MC
1742 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
1743 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
1744 u32 pic_width_in_mb;
1745 u32 slice_qp;
1746 pic_width_in_mb = (pic_width + 15) / 16;
1747 WRITE_HREG(HCODEC_HDEC_MC_OMEM_AUTO,
1748 (1 << 31) | /* use_omem_mb_xy */
1749 ((pic_width_in_mb - 1) << 16)); /* omem_max_mb_x */
1750
1751 WRITE_HREG(HCODEC_VLC_ADV_CONFIG,
1752 /* early_mix_mc_hcmd -- will enable in P Picture */
1753 (0 << 10) |
1754 (1 << 9) | /* update_top_left_mix */
1755 (1 << 8) | /* p_top_left_mix */
1756 /* mv_cal_mixed_type -- will enable in P Picture */
1757 (0 << 7) |
1758 /* mc_hcmd_mixed_type -- will enable in P Picture */
1759 (0 << 6) |
1760 (1 << 5) | /* use_seperate_int_control */
1761 (1 << 4) | /* hcmd_intra_use_q_info */
1762 (1 << 3) | /* hcmd_left_use_prev_info */
1763 (1 << 2) | /* hcmd_use_q_info */
1764 (1 << 1) | /* use_q_delta_quant */
1765 /* detect_I16_from_I4 use qdct detected mb_type */
1766 (0 << 0));
1767
1768 WRITE_HREG(HCODEC_QDCT_ADV_CONFIG,
1769 (1 << 29) | /* mb_info_latch_no_I16_pred_mode */
1770 (1 << 28) | /* ie_dma_mbxy_use_i_pred */
1771 (1 << 27) | /* ie_dma_read_write_use_ip_idx */
1772 (1 << 26) | /* ie_start_use_top_dma_count */
1773 (1 << 25) | /* i_pred_top_dma_rd_mbbot */
1774 (1 << 24) | /* i_pred_top_dma_wr_disable */
1775 /* i_pred_mix -- will enable in P Picture */
1776 (0 << 23) |
1777 (1 << 22) | /* me_ab_rd_when_intra_in_p */
1778 (1 << 21) | /* force_mb_skip_run_when_intra */
1779 /* mc_out_mixed_type -- will enable in P Picture */
1780 (0 << 20) |
1781 (1 << 19) | /* ie_start_when_quant_not_full */
1782 (1 << 18) | /* mb_info_state_mix */
1783 /* mb_type_use_mix_result -- will enable in P Picture */
1784 (0 << 17) |
1785 /* me_cb_ie_read_enable -- will enable in P Picture */
1786 (0 << 16) |
1787 /* ie_cur_data_from_me -- will enable in P Picture */
1788 (0 << 15) |
1789 (1 << 14) | /* rem_per_use_table */
1790 (0 << 13) | /* q_latch_int_enable */
1791 (1 << 12) | /* q_use_table */
1792 (0 << 11) | /* q_start_wait */
1793 (1 << 10) | /* LUMA_16_LEFT_use_cur */
1794 (1 << 9) | /* DC_16_LEFT_SUM_use_cur */
1795 (1 << 8) | /* c_ref_ie_sel_cur */
1796 (0 << 7) | /* c_ipred_perfect_mode */
1797 (1 << 6) | /* ref_ie_ul_sel */
1798 (1 << 5) | /* mb_type_use_ie_result */
1799 (1 << 4) | /* detect_I16_from_I4 */
1800 (1 << 3) | /* ie_not_wait_ref_busy */
1801 (1 << 2) | /* ie_I16_enable */
1802 (3 << 0)); /* ie_done_sel // fastest when waiting */
1803
1804 if (request != NULL) {
1805 WRITE_HREG(HCODEC_IE_WEIGHT,
1806 (request->i16_weight << 16) |
1807 (request->i4_weight << 0));
1808
1809 WRITE_HREG(HCODEC_ME_WEIGHT, (request->me_weight << 0));
1810
1811 WRITE_HREG(HCODEC_SAD_CONTROL_0,
1812 /* ie_sad_offset_I16 */
1813 (request->i16_weight << 16) |
1814 /* ie_sad_offset_I4 */
1815 (request->i4_weight << 0));
1816
1817 WRITE_HREG(HCODEC_SAD_CONTROL_1,
1818 /* ie_sad_shift_I16 */
1819 (IE_SAD_SHIFT_I16 << 24) |
1820 /* ie_sad_shift_I4 */
1821 (IE_SAD_SHIFT_I4 << 20) |
1822 /* me_sad_shift_INTER */
1823 (ME_SAD_SHIFT_INTER << 16) |
1824 /* me_sad_offset_INTER */
1825 (request->me_weight << 0));
1826 } else {
1827 WRITE_HREG(HCODEC_IE_WEIGHT,
1828 (I16MB_WEIGHT_OFFSET << 16) |
1829 (I4MB_WEIGHT_OFFSET << 0));
1830
1831 WRITE_HREG(HCODEC_ME_WEIGHT, (ME_WEIGHT_OFFSET << 0));
1832
1833 WRITE_HREG(HCODEC_SAD_CONTROL_0,
1834 /* ie_sad_offset_I16 */
1835 (I16MB_WEIGHT_OFFSET << 16) |
1836 /* ie_sad_offset_I4 */
1837 (I4MB_WEIGHT_OFFSET << 0));
1838
1839 WRITE_HREG(HCODEC_SAD_CONTROL_1,
1840 /* ie_sad_shift_I16 */
1841 (IE_SAD_SHIFT_I16 << 24) |
1842 /* ie_sad_shift_I4 */
1843 (IE_SAD_SHIFT_I4 << 20) |
1844 /* me_sad_shift_INTER */
1845 (ME_SAD_SHIFT_INTER << 16) |
1846 /* me_sad_offset_INTER */
1847 (ME_WEIGHT_OFFSET << 0));
1848 }
1849
1850 WRITE_HREG(HCODEC_ADV_MV_CTL0,
1851 (ADV_MV_LARGE_16x8 << 31) |
1852 (ADV_MV_LARGE_8x16 << 30) |
1853 (ADV_MV_8x8_WEIGHT << 16) | /* adv_mv_8x8_weight */
1854 /* adv_mv_4x4x4_weight should be set bigger */
1855 (ADV_MV_4x4x4_WEIGHT << 0));
1856
1857 WRITE_HREG(HCODEC_ADV_MV_CTL1,
1858 /* adv_mv_16x16_weight */
1859 (ADV_MV_16x16_WEIGHT << 16) |
1860 (ADV_MV_LARGE_16x16 << 15) |
1861 (ADV_MV_16_8_WEIGHT << 0)); /* adv_mv_16_8_weight */
1862
1863 hcodec_prog_qtbl(wq);
1864 if (IDR) {
1865 i_pic_qp =
1866 wq->quant_tbl_i4[wq->qp_table_id][0] & 0xff;
1867 i_pic_qp +=
1868 wq->quant_tbl_i16[wq->qp_table_id][0] & 0xff;
1869 i_pic_qp /= 2;
1870 p_pic_qp = i_pic_qp;
1871 } else {
1872 i_pic_qp =
1873 wq->quant_tbl_i4[wq->qp_table_id][0] & 0xff;
1874 i_pic_qp +=
1875 wq->quant_tbl_i16[wq->qp_table_id][0] & 0xff;
1876 p_pic_qp = wq->quant_tbl_me[wq->qp_table_id][0] & 0xff;
1877 slice_qp = (i_pic_qp + p_pic_qp) / 3;
1878 i_pic_qp = slice_qp;
1879 p_pic_qp = i_pic_qp;
1880 }
1881
1882 WRITE_HREG(HCODEC_QDCT_VLC_QUANT_CTL_0,
1883 (0 << 19) | /* vlc_delta_quant_1 */
1884 (i_pic_qp << 13) | /* vlc_quant_1 */
1885 (0 << 6) | /* vlc_delta_quant_0 */
1886 (i_pic_qp << 0)); /* vlc_quant_0 */
1887
1888 WRITE_HREG(HCODEC_QDCT_VLC_QUANT_CTL_1,
1889 (26 << 6) | /* vlc_max_delta_q_neg */
1890 (25 << 0)); /* vlc_max_delta_q_pos */
1891
1892 if (request != NULL) {
1893 wq->me_weight = request->me_weight;
1894 wq->i4_weight = request->i4_weight;
1895 wq->i16_weight = request->i16_weight;
1896 }
1897 }
1898#endif
1899
1900 WRITE_HREG(HCODEC_VLC_PIC_SIZE, pic_width | (pic_height << 16));
1901 WRITE_HREG(HCODEC_VLC_PIC_POSITION,
1902 (pic_mb_nr << 16) | (pic_mby << 8) | (pic_mbx << 0));
1903
1904 switch (i_pic_qp) { /* synopsys parallel_case full_case */
1905 case 0:
1906 i_pic_qp_c = 0;
1907 break;
1908 case 1:
1909 i_pic_qp_c = 1;
1910 break;
1911 case 2:
1912 i_pic_qp_c = 2;
1913 break;
1914 case 3:
1915 i_pic_qp_c = 3;
1916 break;
1917 case 4:
1918 i_pic_qp_c = 4;
1919 break;
1920 case 5:
1921 i_pic_qp_c = 5;
1922 break;
1923 case 6:
1924 i_pic_qp_c = 6;
1925 break;
1926 case 7:
1927 i_pic_qp_c = 7;
1928 break;
1929 case 8:
1930 i_pic_qp_c = 8;
1931 break;
1932 case 9:
1933 i_pic_qp_c = 9;
1934 break;
1935 case 10:
1936 i_pic_qp_c = 10;
1937 break;
1938 case 11:
1939 i_pic_qp_c = 11;
1940 break;
1941 case 12:
1942 i_pic_qp_c = 12;
1943 break;
1944 case 13:
1945 i_pic_qp_c = 13;
1946 break;
1947 case 14:
1948 i_pic_qp_c = 14;
1949 break;
1950 case 15:
1951 i_pic_qp_c = 15;
1952 break;
1953 case 16:
1954 i_pic_qp_c = 16;
1955 break;
1956 case 17:
1957 i_pic_qp_c = 17;
1958 break;
1959 case 18:
1960 i_pic_qp_c = 18;
1961 break;
1962 case 19:
1963 i_pic_qp_c = 19;
1964 break;
1965 case 20:
1966 i_pic_qp_c = 20;
1967 break;
1968 case 21:
1969 i_pic_qp_c = 21;
1970 break;
1971 case 22:
1972 i_pic_qp_c = 22;
1973 break;
1974 case 23:
1975 i_pic_qp_c = 23;
1976 break;
1977 case 24:
1978 i_pic_qp_c = 24;
1979 break;
1980 case 25:
1981 i_pic_qp_c = 25;
1982 break;
1983 case 26:
1984 i_pic_qp_c = 26;
1985 break;
1986 case 27:
1987 i_pic_qp_c = 27;
1988 break;
1989 case 28:
1990 i_pic_qp_c = 28;
1991 break;
1992 case 29:
1993 i_pic_qp_c = 29;
1994 break;
1995 case 30:
1996 i_pic_qp_c = 29;
1997 break;
1998 case 31:
1999 i_pic_qp_c = 30;
2000 break;
2001 case 32:
2002 i_pic_qp_c = 31;
2003 break;
2004 case 33:
2005 i_pic_qp_c = 32;
2006 break;
2007 case 34:
2008 i_pic_qp_c = 32;
2009 break;
2010 case 35:
2011 i_pic_qp_c = 33;
2012 break;
2013 case 36:
2014 i_pic_qp_c = 34;
2015 break;
2016 case 37:
2017 i_pic_qp_c = 34;
2018 break;
2019 case 38:
2020 i_pic_qp_c = 35;
2021 break;
2022 case 39:
2023 i_pic_qp_c = 35;
2024 break;
2025 case 40:
2026 i_pic_qp_c = 36;
2027 break;
2028 case 41:
2029 i_pic_qp_c = 36;
2030 break;
2031 case 42:
2032 i_pic_qp_c = 37;
2033 break;
2034 case 43:
2035 i_pic_qp_c = 37;
2036 break;
2037 case 44:
2038 i_pic_qp_c = 37;
2039 break;
2040 case 45:
2041 i_pic_qp_c = 38;
2042 break;
2043 case 46:
2044 i_pic_qp_c = 38;
2045 break;
2046 case 47:
2047 i_pic_qp_c = 38;
2048 break;
2049 case 48:
2050 i_pic_qp_c = 39;
2051 break;
2052 case 49:
2053 i_pic_qp_c = 39;
2054 break;
2055 case 50:
2056 i_pic_qp_c = 39;
2057 break;
2058 default:
2059 i_pic_qp_c = 39;
2060 break;
2061 }
2062
2063 switch (p_pic_qp) { /* synopsys parallel_case full_case */
2064 case 0:
2065 p_pic_qp_c = 0;
2066 break;
2067 case 1:
2068 p_pic_qp_c = 1;
2069 break;
2070 case 2:
2071 p_pic_qp_c = 2;
2072 break;
2073 case 3:
2074 p_pic_qp_c = 3;
2075 break;
2076 case 4:
2077 p_pic_qp_c = 4;
2078 break;
2079 case 5:
2080 p_pic_qp_c = 5;
2081 break;
2082 case 6:
2083 p_pic_qp_c = 6;
2084 break;
2085 case 7:
2086 p_pic_qp_c = 7;
2087 break;
2088 case 8:
2089 p_pic_qp_c = 8;
2090 break;
2091 case 9:
2092 p_pic_qp_c = 9;
2093 break;
2094 case 10:
2095 p_pic_qp_c = 10;
2096 break;
2097 case 11:
2098 p_pic_qp_c = 11;
2099 break;
2100 case 12:
2101 p_pic_qp_c = 12;
2102 break;
2103 case 13:
2104 p_pic_qp_c = 13;
2105 break;
2106 case 14:
2107 p_pic_qp_c = 14;
2108 break;
2109 case 15:
2110 p_pic_qp_c = 15;
2111 break;
2112 case 16:
2113 p_pic_qp_c = 16;
2114 break;
2115 case 17:
2116 p_pic_qp_c = 17;
2117 break;
2118 case 18:
2119 p_pic_qp_c = 18;
2120 break;
2121 case 19:
2122 p_pic_qp_c = 19;
2123 break;
2124 case 20:
2125 p_pic_qp_c = 20;
2126 break;
2127 case 21:
2128 p_pic_qp_c = 21;
2129 break;
2130 case 22:
2131 p_pic_qp_c = 22;
2132 break;
2133 case 23:
2134 p_pic_qp_c = 23;
2135 break;
2136 case 24:
2137 p_pic_qp_c = 24;
2138 break;
2139 case 25:
2140 p_pic_qp_c = 25;
2141 break;
2142 case 26:
2143 p_pic_qp_c = 26;
2144 break;
2145 case 27:
2146 p_pic_qp_c = 27;
2147 break;
2148 case 28:
2149 p_pic_qp_c = 28;
2150 break;
2151 case 29:
2152 p_pic_qp_c = 29;
2153 break;
2154 case 30:
2155 p_pic_qp_c = 29;
2156 break;
2157 case 31:
2158 p_pic_qp_c = 30;
2159 break;
2160 case 32:
2161 p_pic_qp_c = 31;
2162 break;
2163 case 33:
2164 p_pic_qp_c = 32;
2165 break;
2166 case 34:
2167 p_pic_qp_c = 32;
2168 break;
2169 case 35:
2170 p_pic_qp_c = 33;
2171 break;
2172 case 36:
2173 p_pic_qp_c = 34;
2174 break;
2175 case 37:
2176 p_pic_qp_c = 34;
2177 break;
2178 case 38:
2179 p_pic_qp_c = 35;
2180 break;
2181 case 39:
2182 p_pic_qp_c = 35;
2183 break;
2184 case 40:
2185 p_pic_qp_c = 36;
2186 break;
2187 case 41:
2188 p_pic_qp_c = 36;
2189 break;
2190 case 42:
2191 p_pic_qp_c = 37;
2192 break;
2193 case 43:
2194 p_pic_qp_c = 37;
2195 break;
2196 case 44:
2197 p_pic_qp_c = 37;
2198 break;
2199 case 45:
2200 p_pic_qp_c = 38;
2201 break;
2202 case 46:
2203 p_pic_qp_c = 38;
2204 break;
2205 case 47:
2206 p_pic_qp_c = 38;
2207 break;
2208 case 48:
2209 p_pic_qp_c = 39;
2210 break;
2211 case 49:
2212 p_pic_qp_c = 39;
2213 break;
2214 case 50:
2215 p_pic_qp_c = 39;
2216 break;
2217 default:
2218 p_pic_qp_c = 39;
2219 break;
2220 }
2221 WRITE_HREG(HCODEC_QDCT_Q_QUANT_I,
2222 (i_pic_qp_c << 22) |
2223 (i_pic_qp << 16) |
2224 ((i_pic_qp_c % 6) << 12) |
2225 ((i_pic_qp_c / 6) << 8) |
2226 ((i_pic_qp % 6) << 4) |
2227 ((i_pic_qp / 6) << 0));
2228
2229 WRITE_HREG(HCODEC_QDCT_Q_QUANT_P,
2230 (p_pic_qp_c << 22) |
2231 (p_pic_qp << 16) |
2232 ((p_pic_qp_c % 6) << 12) |
2233 ((p_pic_qp_c / 6) << 8) |
2234 ((p_pic_qp % 6) << 4) |
2235 ((p_pic_qp / 6) << 0));
2236
2237#ifdef ENABLE_IGNORE_FUNCTION
2238 WRITE_HREG(HCODEC_IGNORE_CONFIG,
2239 (1 << 31) | /* ignore_lac_coeff_en */
2240 (1 << 26) | /* ignore_lac_coeff_else (<1) */
2241 (1 << 21) | /* ignore_lac_coeff_2 (<1) */
2242 (2 << 16) | /* ignore_lac_coeff_1 (<2) */
2243 (1 << 15) | /* ignore_cac_coeff_en */
2244 (1 << 10) | /* ignore_cac_coeff_else (<1) */
2245 (1 << 5) | /* ignore_cac_coeff_2 (<1) */
2246 (3 << 0)); /* ignore_cac_coeff_1 (<2) */
2247
2248#ifndef USE_OLD_DUMP_MC
2249 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB)
2250 WRITE_HREG(HCODEC_IGNORE_CONFIG_2,
2251 (1 << 31) | /* ignore_t_lac_coeff_en */
2252 (1 << 26) | /* ignore_t_lac_coeff_else (<1) */
2253 (2 << 21) | /* ignore_t_lac_coeff_2 (<2) */
2254 (6 << 16) | /* ignore_t_lac_coeff_1 (<6) */
2255 (1<<15) | /* ignore_cdc_coeff_en */
2256 (0<<14) | /* ignore_t_lac_coeff_else_le_3 */
2257 (1<<13) | /* ignore_t_lac_coeff_else_le_4 */
2258 (1<<12) | /* ignore_cdc_only_when_empty_cac_inter */
2259 (1<<11) | /* ignore_cdc_only_when_one_empty_inter */
2260 /* ignore_cdc_range_max_inter 0-0, 1-1, 2-2, 3-3 */
2261 (2<<9) |
2262 /* ignore_cdc_abs_max_inter 0-1, 1-2, 2-3, 3-4 */
2263 (0<<7) |
2264 /* ignore_cdc_only_when_empty_cac_intra */
2265 (1<<5) |
2266 /* ignore_cdc_only_when_one_empty_intra */
2267 (1<<4) |
2268 /* ignore_cdc_range_max_intra 0-0, 1-1, 2-2, 3-3 */
2269 (1<<2) |
2270 /* ignore_cdc_abs_max_intra 0-1, 1-2, 2-3, 3-4 */
2271 (0<<0));
2272 else
2273#endif
2274 WRITE_HREG(HCODEC_IGNORE_CONFIG_2,
2275 (1 << 31) | /* ignore_t_lac_coeff_en */
2276 (1 << 26) | /* ignore_t_lac_coeff_else (<1) */
2277 (1 << 21) | /* ignore_t_lac_coeff_2 (<1) */
2278 (5 << 16) | /* ignore_t_lac_coeff_1 (<5) */
2279 (0 << 0));
2280#else
2281 WRITE_HREG(HCODEC_IGNORE_CONFIG, 0);
2282 WRITE_HREG(HCODEC_IGNORE_CONFIG_2, 0);
2283#endif
2284
2285 WRITE_HREG(HCODEC_QDCT_MB_CONTROL,
2286 (1 << 9) | /* mb_info_soft_reset */
2287 (1 << 0)); /* mb read buffer soft reset */
2288
2289 if (encode_manager.ucode_index != UCODE_MODE_SW_MIX) {
2290 u32 me_mode = (ie_me_mode >> ME_PIXEL_MODE_SHIFT) &
2291 ME_PIXEL_MODE_MASK;
2292 WRITE_HREG(HCODEC_QDCT_MB_CONTROL,
2293 (1 << 28) | /* ignore_t_p8x8 */
2294 (0 << 27) | /* zero_mc_out_null_non_skipped_mb */
2295 (0 << 26) | /* no_mc_out_null_non_skipped_mb */
2296 (0 << 25) | /* mc_out_even_skipped_mb */
2297 (0 << 24) | /* mc_out_wait_cbp_ready */
2298 (0 << 23) | /* mc_out_wait_mb_type_ready */
2299 (1 << 29) | /* ie_start_int_enable */
2300 (1 << 19) | /* i_pred_enable */
2301 (1 << 20) | /* ie_sub_enable */
2302 (1 << 18) | /* iq_enable */
2303 (1 << 17) | /* idct_enable */
2304 (1 << 14) | /* mb_pause_enable */
2305 (1 << 13) | /* q_enable */
2306 (1 << 12) | /* dct_enable */
2307 (1 << 10) | /* mb_info_en */
2308 (0 << 3) | /* endian */
2309 (0 << 1) | /* mb_read_en */
2310 (0 << 0)); /* soft reset */
2311
2312 WRITE_HREG(HCODEC_SAD_CONTROL,
2313 (0 << 3) | /* ie_result_buff_enable */
2314 (1 << 2) | /* ie_result_buff_soft_reset */
2315 (0 << 1) | /* sad_enable */
2316 (1 << 0)); /* sad soft reset */
2317
2318 WRITE_HREG(HCODEC_IE_RESULT_BUFFER, 0);
2319
2320 WRITE_HREG(HCODEC_SAD_CONTROL,
2321 (1 << 3) | /* ie_result_buff_enable */
2322 (0 << 2) | /* ie_result_buff_soft_reset */
2323 (1 << 1) | /* sad_enable */
2324 (0 << 0)); /* sad soft reset */
2325
2326#ifndef USE_OLD_DUMP_MC
2327 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
2328 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
2329 WRITE_HREG(HCODEC_IE_CONTROL,
2330 (1 << 30) | /* active_ul_block */
2331 (0 << 1) | /* ie_enable */
2332 (1 << 0)); /* ie soft reset */
2333
2334 WRITE_HREG(HCODEC_IE_CONTROL,
2335 (1 << 30) | /* active_ul_block */
2336 (0 << 1) | /* ie_enable */
2337 (0 << 0)); /* ie soft reset */
2338
2339 WRITE_HREG(HCODEC_ME_SKIP_LINE,
2340 (8 << 24) | /* step_3_skip_line */
2341 (8 << 18) | /* step_2_skip_line */
2342 (2 << 12) | /* step_1_skip_line */
2343 (0 << 6) | /* step_0_skip_line */
2344 (0 << 0));
2345
2346 } else
2347#endif
2348 {
2349 WRITE_HREG(HCODEC_IE_CONTROL,
2350 (0 << 1) | /* ie_enable */
2351 (1 << 0)); /* ie soft reset */
2352
2353 WRITE_HREG(HCODEC_IE_CONTROL,
2354 (0 << 1) | /* ie_enable */
2355 (0 << 0)); /* ie soft reset */
2356 if (me_mode == 3) {
2357 WRITE_HREG(HCODEC_ME_SKIP_LINE,
2358 (8 << 24) | /* step_3_skip_line */
2359 (8 << 18) | /* step_2_skip_line */
2360 (2 << 12) | /* step_1_skip_line */
2361 (0 << 6) | /* step_0_skip_line */
2362 (0 << 0));
2363 } else {
2364 WRITE_HREG(HCODEC_ME_SKIP_LINE,
2365 (4 << 24) | /* step_3_skip_line */
2366 (4 << 18) | /* step_2_skip_line */
2367 (2 << 12) | /* step_1_skip_line */
2368 (0 << 6) | /* step_0_skip_line */
2369 (0 << 0));
2370 }
2371 }
2372
2373 WRITE_HREG(HCODEC_ME_MV_MERGE_CTL, me_mv_merge_ctl);
2374 WRITE_HREG(HCODEC_ME_STEP0_CLOSE_MV, me_step0_close_mv);
2375
2376 WRITE_HREG(HCODEC_ME_SAD_ENOUGH_01, me_sad_enough_01);
2377 /* (0x18<<12) | // me_sad_enough_1 */
2378 /* (0x10<<0)); // me_sad_enough_0 */
2379
2380 WRITE_HREG(HCODEC_ME_SAD_ENOUGH_23, me_sad_enough_23);
2381 /* (0x20<<0) | // me_sad_enough_2 */
2382 /* (0<<12)); // me_sad_enough_3 */
2383
2384 WRITE_HREG(HCODEC_ME_F_SKIP_SAD, me_f_skip_sad);
2385 WRITE_HREG(HCODEC_ME_F_SKIP_WEIGHT, me_f_skip_weight);
2386 WRITE_HREG(HCODEC_ME_MV_WEIGHT_01, me_mv_weight_01);
2387 WRITE_HREG(HCODEC_ME_MV_WEIGHT_23, me_mv_weight_23);
2388 WRITE_HREG(HCODEC_ME_SAD_RANGE_INC, me_sad_range_inc);
2389
2390#ifndef USE_OLD_DUMP_MC
2391 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_TXL) {
2392 WRITE_HREG(HCODEC_V5_SIMPLE_MB_CTL, 0);
2393 WRITE_HREG(HCODEC_V5_SIMPLE_MB_CTL,
2394 (v5_use_small_diff_cnt << 7) |
2395 (v5_simple_mb_inter_all_en << 6) |
2396 (v5_simple_mb_inter_8x8_en << 5) |
2397 (v5_simple_mb_inter_16_8_en << 4) |
2398 (v5_simple_mb_inter_16x16_en << 3) |
2399 (v5_simple_mb_intra_en << 2) |
2400 (v5_simple_mb_C_en << 1) |
2401 (v5_simple_mb_Y_en << 0));
2402 WRITE_HREG(HCODEC_V5_MB_DIFF_SUM, 0);
2403 WRITE_HREG(HCODEC_V5_SMALL_DIFF_CNT,
2404 (v5_small_diff_C<<16) |
2405 (v5_small_diff_Y<<0));
2406 WRITE_HREG(HCODEC_V5_SIMPLE_MB_DQUANT,
2407 v5_simple_dq_setting);
2408 WRITE_HREG(HCODEC_V5_SIMPLE_MB_ME_WEIGHT,
2409 v5_simple_me_weight_setting);
2410 WRITE_HREG(HCODEC_QDCT_CONFIG, 1 << 0);
2411 }
2412
2413 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) {
2414 WRITE_HREG(HCODEC_V4_FORCE_SKIP_CFG,
2415 (i_pic_qp << 26) | /* v4_force_q_r_intra */
2416 (i_pic_qp << 20) | /* v4_force_q_r_inter */
2417 (0 << 19) | /* v4_force_q_y_enable */
2418 (5 << 16) | /* v4_force_qr_y */
2419 (6 << 12) | /* v4_force_qp_y */
2420 (0 << 0)); /* v4_force_skip_sad */
2421
2422 /* V3 Force skip */
2423 WRITE_HREG(HCODEC_V3_SKIP_CONTROL,
2424 (1 << 31) | /* v3_skip_enable */
2425 (0 << 30) | /* v3_step_1_weight_enable */
2426 (1 << 28) | /* v3_mv_sad_weight_enable */
2427 (1 << 27) | /* v3_ipred_type_enable */
2428 (V3_FORCE_SKIP_SAD_1 << 12) |
2429 (V3_FORCE_SKIP_SAD_0 << 0));
2430 WRITE_HREG(HCODEC_V3_SKIP_WEIGHT,
2431 (V3_SKIP_WEIGHT_1 << 16) |
2432 (V3_SKIP_WEIGHT_0 << 0));
2433 WRITE_HREG(HCODEC_V3_L1_SKIP_MAX_SAD,
2434 (V3_LEVEL_1_F_SKIP_MAX_SAD << 16) |
2435 (V3_LEVEL_1_SKIP_MAX_SAD << 0));
2436 WRITE_HREG(HCODEC_V3_L2_SKIP_WEIGHT,
2437 (V3_FORCE_SKIP_SAD_2 << 16) |
2438 (V3_SKIP_WEIGHT_2 << 0));
2439 if (request != NULL) {
2440 unsigned int off1, off2;
2441 off1 = V3_IE_F_ZERO_SAD_I4 - I4MB_WEIGHT_OFFSET;
2442 off2 = V3_IE_F_ZERO_SAD_I16
2443 - I16MB_WEIGHT_OFFSET;
2444 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2445 ((request->i16_weight + off2) << 16) |
2446 ((request->i4_weight + off1) << 0));
2447 off1 = V3_ME_F_ZERO_SAD - ME_WEIGHT_OFFSET;
2448 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2449 (0 << 25) |
2450 /* v3_no_ver_when_top_zero_en */
2451 (0 << 24) |
2452 /* v3_no_hor_when_left_zero_en */
2453 (3 << 16) | /* type_hor break */
2454 ((request->me_weight + off1) << 0));
2455 } else {
2456 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2457 (V3_IE_F_ZERO_SAD_I16 << 16) |
2458 (V3_IE_F_ZERO_SAD_I4 << 0));
2459 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2460 (0 << 25) |
2461 /* v3_no_ver_when_top_zero_en */
2462 (0 << 24) |
2463 /* v3_no_hor_when_left_zero_en */
2464 (3 << 16) | /* type_hor break */
2465 (V3_ME_F_ZERO_SAD << 0));
2466 }
2467 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
2468 /* V3 Force skip */
2469 WRITE_HREG(HCODEC_V3_SKIP_CONTROL,
2470 (1 << 31) | /* v3_skip_enable */
2471 (0 << 30) | /* v3_step_1_weight_enable */
2472 (1 << 28) | /* v3_mv_sad_weight_enable */
2473 (1 << 27) | /* v3_ipred_type_enable */
2474 (0 << 12) | /* V3_FORCE_SKIP_SAD_1 */
2475 (0 << 0)); /* V3_FORCE_SKIP_SAD_0 */
2476 WRITE_HREG(HCODEC_V3_SKIP_WEIGHT,
2477 (V3_SKIP_WEIGHT_1 << 16) |
2478 (V3_SKIP_WEIGHT_0 << 0));
2479 WRITE_HREG(HCODEC_V3_L1_SKIP_MAX_SAD,
2480 (V3_LEVEL_1_F_SKIP_MAX_SAD << 16) |
2481 (V3_LEVEL_1_SKIP_MAX_SAD << 0));
2482 WRITE_HREG(HCODEC_V3_L2_SKIP_WEIGHT,
2483 (0 << 16) | /* V3_FORCE_SKIP_SAD_2 */
2484 (V3_SKIP_WEIGHT_2 << 0));
2485 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_0,
2486 (0 << 16) | /* V3_IE_F_ZERO_SAD_I16 */
2487 (0 << 0)); /* V3_IE_F_ZERO_SAD_I4 */
2488 WRITE_HREG(HCODEC_V3_F_ZERO_CTL_1,
2489 (0 << 25) | /* v3_no_ver_when_top_zero_en */
2490 (0 << 24) | /* v3_no_hor_when_left_zero_en */
2491 (3 << 16) | /* type_hor break */
2492 (0 << 0)); /* V3_ME_F_ZERO_SAD */
2493 }
2494 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
2495 int i;
2496 /* MV SAD Table */
2497 for (i = 0; i < 64; i++)
2498 WRITE_HREG(HCODEC_V3_MV_SAD_TABLE,
2499 v3_mv_sad[i]);
2500
2501 /* IE PRED SAD Table*/
2502 WRITE_HREG(HCODEC_V3_IPRED_TYPE_WEIGHT_0,
2503 (C_ipred_weight_H << 24) |
2504 (C_ipred_weight_V << 16) |
2505 (I4_ipred_weight_else << 8) |
2506 (I4_ipred_weight_most << 0));
2507 WRITE_HREG(HCODEC_V3_IPRED_TYPE_WEIGHT_1,
2508 (I16_ipred_weight_DC << 24) |
2509 (I16_ipred_weight_H << 16) |
2510 (I16_ipred_weight_V << 8) |
2511 (C_ipred_weight_DC << 0));
2512 WRITE_HREG(HCODEC_V3_LEFT_SMALL_MAX_SAD,
2513 (v3_left_small_max_me_sad << 16) |
2514 (v3_left_small_max_ie_sad << 0));
2515 }
2516#endif
2517 WRITE_HREG(HCODEC_IE_DATA_FEED_BUFF_INFO, 0);
2518 } else {
2519 WRITE_HREG(HCODEC_QDCT_MB_CONTROL,
2520 (0 << 28) | /* ignore_t_p8x8 */
2521 (0 << 27) | /* zero_mc_out_null_non_skipped_mb */
2522 (0 << 26) | /* no_mc_out_null_non_skipped_mb */
2523 (0 << 25) | /* mc_out_even_skipped_mb */
2524 (0 << 24) | /* mc_out_wait_cbp_ready */
2525 (0 << 23) | /* mc_out_wait_mb_type_ready */
2526 (1 << 22) | /* i_pred_int_enable */
2527 (1 << 19) | /* i_pred_enable */
2528 (1 << 20) | /* ie_sub_enable */
2529 (1 << 18) | /* iq_enable */
2530 (1 << 17) | /* idct_enable */
2531 (1 << 14) | /* mb_pause_enable */
2532 (1 << 13) | /* q_enable */
2533 (1 << 12) | /* dct_enable */
2534 (1 << 10) | /* mb_info_en */
2535 (avc_endian << 3) | /* endian */
2536 (1 << 1) | /* mb_read_en */
2537 (0 << 0)); /* soft reset */
2538 }
2539 WRITE_HREG(HCODEC_CURR_CANVAS_CTRL, 0);
2540 data32 = READ_HREG(HCODEC_VLC_CONFIG);
2541 data32 = data32 | (1 << 0); /* set pop_coeff_even_all_zero */
2542 WRITE_HREG(HCODEC_VLC_CONFIG, data32);
2543
2544 if (encode_manager.ucode_index == UCODE_MODE_SW_MIX)
2545 WRITE_HREG(SW_CTL_INFO_DDR_START,
2546 wq->mem.sw_ctl_info_start_addr);
2547#ifndef USE_OLD_DUMP_MC
2548 else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
2549 WRITE_HREG(INFO_DUMP_START_ADDR,
2550 wq->mem.dump_info_ddr_start_addr);
2551#endif
2552 else {
2553 if (IDR) {
2554 WRITE_HREG(BITS_INFO_DDR_START,
2555 wq->mem.intra_bits_info_ddr_start_addr);
2556 WRITE_HREG(MV_INFO_DDR_START,
2557 wq->mem.intra_pred_info_ddr_start_addr);
2558 } else {
2559 WRITE_HREG(BITS_INFO_DDR_START,
2560 wq->mem.inter_bits_info_ddr_start_addr);
2561 WRITE_HREG(MV_INFO_DDR_START,
2562 wq->mem.inter_mv_info_ddr_start_addr);
2563 }
2564 }
2565
2566 /* clear mailbox interrupt */
2567 WRITE_HREG(HCODEC_IRQ_MBOX_CLR, 1);
2568
2569 /* enable mailbox interrupt */
2570 WRITE_HREG(HCODEC_IRQ_MBOX_MASK, 1);
2571}
2572
2573void amvenc_reset(void)
2574{
2575 READ_VREG(DOS_SW_RESET1);
2576 READ_VREG(DOS_SW_RESET1);
2577 READ_VREG(DOS_SW_RESET1);
2578 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
2579 WRITE_VREG(DOS_SW_RESET1,
2580 (1 << 2) | (1 << 6) |
2581 (1 << 7) | (1 << 8) |
2582 (1 << 14) | (1 << 16) |
2583 (1 << 17));
2584 else
2585 WRITE_VREG(DOS_SW_RESET1,
2586 (1 << 2) | (1 << 6) | (1 << 7) |
2587 (1 << 8) | (1 << 16) | (1 << 17));
2588 WRITE_VREG(DOS_SW_RESET1, 0);
2589 READ_VREG(DOS_SW_RESET1);
2590 READ_VREG(DOS_SW_RESET1);
2591 READ_VREG(DOS_SW_RESET1);
2592}
2593
2594void amvenc_start(void)
2595{
2596 READ_VREG(DOS_SW_RESET1);
2597 READ_VREG(DOS_SW_RESET1);
2598 READ_VREG(DOS_SW_RESET1);
2599 WRITE_VREG(DOS_SW_RESET1, (1 << 12) | (1 << 11));
2600 WRITE_VREG(DOS_SW_RESET1, 0);
2601
2602 READ_VREG(DOS_SW_RESET1);
2603 READ_VREG(DOS_SW_RESET1);
2604 READ_VREG(DOS_SW_RESET1);
2605
2606 WRITE_HREG(HCODEC_MPSR, 0x0001);
2607}
2608
2609void amvenc_stop(void)
2610{
2611 ulong timeout = jiffies + HZ;
2612
2613 WRITE_HREG(HCODEC_MPSR, 0);
2614 WRITE_HREG(HCODEC_CPSR, 0);
2615 while (READ_HREG(HCODEC_IMEM_DMA_CTRL) & 0x8000) {
2616 if (time_after(jiffies, timeout))
2617 break;
2618 }
2619 READ_VREG(DOS_SW_RESET1);
2620 READ_VREG(DOS_SW_RESET1);
2621 READ_VREG(DOS_SW_RESET1);
2622
2623 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
2624 WRITE_VREG(DOS_SW_RESET1,
2625 (1 << 12) | (1 << 11) |
2626 (1 << 2) | (1 << 6) |
2627 (1 << 7) | (1 << 8) |
2628 (1 << 14) | (1 << 16) |
2629 (1 << 17));
2630 else
2631 WRITE_VREG(DOS_SW_RESET1,
2632 (1 << 12) | (1 << 11) |
2633 (1 << 2) | (1 << 6) |
2634 (1 << 7) | (1 << 8) |
2635 (1 << 16) | (1 << 17));
2636
2637
2638 WRITE_VREG(DOS_SW_RESET1, 0);
2639
2640 READ_VREG(DOS_SW_RESET1);
2641 READ_VREG(DOS_SW_RESET1);
2642 READ_VREG(DOS_SW_RESET1);
2643}
2644
2645static void __iomem *mc_addr;
2646static u32 mc_addr_map;
2647#define MC_SIZE (4096 * 16)
2648s32 amvenc_loadmc(const char *p, struct encode_wq_s *wq)
2649{
2650 ulong timeout;
2651 s32 ret = 0;
2652
2653 /* use static mempry*/
2654 if (mc_addr == NULL) {
2655 mc_addr = kmalloc(MC_SIZE, GFP_KERNEL);
2656 if (!mc_addr) {
2657 enc_pr(LOG_ERROR, "avc loadmc iomap mc addr error.\n");
2658 return -ENOMEM;
2659 }
2660 }
2661
2662 enc_pr(LOG_ALL, "avc encode ucode name is %s\n", p);
2663 ret = get_decoder_firmware_data(VFORMAT_H264_ENC, p,
2664 (u8 *)mc_addr, MC_SIZE);
2665 if (ret < 0) {
2666 enc_pr(LOG_ERROR,
2667 "avc microcode fail ret=%d, name: %s, wq:%p.\n",
2668 ret, p, (void *)wq);
2669 }
2670
2671 mc_addr_map = dma_map_single(
2672 &encode_manager.this_pdev->dev,
2673 mc_addr, MC_SIZE, DMA_TO_DEVICE);
2674
2675 /* mc_addr_map = wq->mem.assit_buffer_offset; */
2676 /* mc_addr = ioremap_wc(mc_addr_map, MC_SIZE); */
2677 /* memcpy(mc_addr, p, MC_SIZE); */
2678 enc_pr(LOG_ALL, "address 0 is 0x%x\n", *((u32 *)mc_addr));
2679 enc_pr(LOG_ALL, "address 1 is 0x%x\n", *((u32 *)mc_addr + 1));
2680 enc_pr(LOG_ALL, "address 2 is 0x%x\n", *((u32 *)mc_addr + 2));
2681 enc_pr(LOG_ALL, "address 3 is 0x%x\n", *((u32 *)mc_addr + 3));
2682 WRITE_HREG(HCODEC_MPSR, 0);
2683 WRITE_HREG(HCODEC_CPSR, 0);
2684
2685 /* Read CBUS register for timing */
2686 timeout = READ_HREG(HCODEC_MPSR);
2687 timeout = READ_HREG(HCODEC_MPSR);
2688
2689 timeout = jiffies + HZ;
2690
2691 WRITE_HREG(HCODEC_IMEM_DMA_ADR, mc_addr_map);
2692 WRITE_HREG(HCODEC_IMEM_DMA_COUNT, 0x1000);
2693 WRITE_HREG(HCODEC_IMEM_DMA_CTRL, (0x8000 | (7 << 16)));
2694
2695 while (READ_HREG(HCODEC_IMEM_DMA_CTRL) & 0x8000) {
2696 if (time_before(jiffies, timeout))
2697 schedule();
2698 else {
2699 enc_pr(LOG_ERROR, "hcodec load mc error\n");
2700 ret = -EBUSY;
2701 break;
2702 }
2703 }
2704 dma_unmap_single(
2705 &encode_manager.this_pdev->dev,
2706 mc_addr_map, MC_SIZE, DMA_TO_DEVICE);
2707 return ret;
2708}
2709
2710const u32 fix_mc[] __aligned(8) = {
2711 0x0809c05a, 0x06696000, 0x0c780000, 0x00000000
2712};
2713
2714
2715/*
2716 * DOS top level register access fix.
2717 * When hcodec is running, a protocol register HCODEC_CCPU_INTR_MSK
2718 * is set to make hcodec access one CBUS out of DOS domain once
2719 * to work around a HW bug for 4k2k dual decoder implementation.
2720 * If hcodec is not running, then a ucode is loaded and executed
2721 * instead.
2722 */
2723void amvenc_dos_top_reg_fix(void)
2724{
2725 bool hcodec_on;
2726 ulong flags;
2727
2728 spin_lock_irqsave(&lock, flags);
2729
2730 hcodec_on = vdec_on(VDEC_HCODEC);
2731
2732 if ((hcodec_on) && (READ_VREG(HCODEC_MPSR) & 1)) {
2733 WRITE_HREG(HCODEC_CCPU_INTR_MSK, 1);
2734 spin_unlock_irqrestore(&lock, flags);
2735 return;
2736 }
2737
2738 if (!hcodec_on)
2739 vdec_poweron(VDEC_HCODEC);
2740
2741 amhcodec_loadmc(fix_mc);
2742
2743 amhcodec_start();
2744
2745 udelay(1000);
2746
2747 amhcodec_stop();
2748
2749 if (!hcodec_on)
2750 vdec_poweroff(VDEC_HCODEC);
2751
2752 spin_unlock_irqrestore(&lock, flags);
2753}
2754
2755bool amvenc_avc_on(void)
2756{
2757 bool hcodec_on;
2758 ulong flags;
2759
2760 spin_lock_irqsave(&lock, flags);
2761
2762 hcodec_on = vdec_on(VDEC_HCODEC);
2763 hcodec_on &= (encode_manager.wq_count > 0);
2764
2765 spin_unlock_irqrestore(&lock, flags);
2766 return hcodec_on;
2767}
2768
2769static s32 avc_poweron(u32 clock)
2770{
2771 ulong flags;
2772 u32 data32;
2773
2774 data32 = 0;
2775
2776 amports_switch_gate("vdec", 1);
2777
2778 spin_lock_irqsave(&lock, flags);
2779
2780 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
2781 WRITE_AOREG(AO_RTI_PWR_CNTL_REG0,
2782 (READ_AOREG(AO_RTI_PWR_CNTL_REG0) & (~0x18)));
2783 udelay(10);
2784 /* Powerup HCODEC */
2785 /* [1:0] HCODEC */
2786 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
2787 (READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) & (~0x3)));
2788 udelay(10);
2789 }
2790
2791 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
2792 WRITE_VREG(DOS_SW_RESET1, 0);
2793
2794 /* Enable Dos internal clock gating */
2795 hvdec_clock_enable(clock);
2796
2797 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
2798 /* Powerup HCODEC memories */
2799 WRITE_VREG(DOS_MEM_PD_HCODEC, 0x0);
2800
2801 /* Remove HCODEC ISO */
2802 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
2803 (READ_AOREG(AO_RTI_GEN_PWR_ISO0) & (~0x30)));
2804 udelay(10);
2805 }
2806 /* Disable auto-clock gate */
2807 WRITE_VREG(DOS_GEN_CTRL0, (READ_VREG(DOS_GEN_CTRL0) | 0x1));
2808 WRITE_VREG(DOS_GEN_CTRL0, (READ_VREG(DOS_GEN_CTRL0) & 0xFFFFFFFE));
2809
2810#ifdef USE_VDEC2
2811 if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8) {
2812 if (!vdec_on(VDEC_2) && get_vdec2_usage() == USAGE_NONE) {
2813 set_vdec2_usage(USAGE_ENCODE);
2814 vdec_poweron(VDEC_2);
2815 }
2816 }
2817#endif
2818
2819 spin_unlock_irqrestore(&lock, flags);
2820
2821 mdelay(10);
2822 return 0;
2823}
2824
2825static s32 avc_poweroff(void)
2826{
2827 ulong flags;
2828
2829 spin_lock_irqsave(&lock, flags);
2830
2831 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
2832 /* enable HCODEC isolation */
2833 WRITE_AOREG(AO_RTI_GEN_PWR_ISO0,
2834 READ_AOREG(AO_RTI_GEN_PWR_ISO0) | 0x30);
2835 /* power off HCODEC memories */
2836 WRITE_VREG(DOS_MEM_PD_HCODEC, 0xffffffffUL);
2837 }
2838 /* disable HCODEC clock */
2839 hvdec_clock_disable();
2840
2841 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
2842 /* HCODEC power off */
2843 WRITE_AOREG(AO_RTI_GEN_PWR_SLEEP0,
2844 READ_AOREG(AO_RTI_GEN_PWR_SLEEP0) | 0x3);
2845 }
2846#ifdef USE_VDEC2
2847 if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8) {
2848 if (vdec_on(VDEC_2) && get_vdec2_usage() != USAGE_DEC_4K2K) {
2849 vdec_poweroff(VDEC_2);
2850 set_vdec2_usage(USAGE_NONE);
2851 }
2852 }
2853#endif
2854
2855 spin_unlock_irqrestore(&lock, flags);
2856
2857 /* release DOS clk81 clock gating */
2858 amports_switch_gate("vdec", 0);
2859 return 0;
2860}
2861
2862static s32 reload_mc(struct encode_wq_s *wq)
2863{
2864 const char *p = select_ucode(encode_manager.ucode_index);
2865
2866 amvenc_stop();
2867
2868 WRITE_VREG(DOS_SW_RESET1, 0xffffffff);
2869 WRITE_VREG(DOS_SW_RESET1, 0);
2870
2871 udelay(10);
2872
2873 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_MG9TV)
2874 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x32);
2875 else
2876 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x2);
2877
2878 enc_pr(LOG_INFO, "reload microcode\n");
2879
2880 if (amvenc_loadmc(p, wq) < 0)
2881 return -EBUSY;
2882 return 0;
2883}
2884
2885static void encode_isr_tasklet(ulong data)
2886{
2887 struct encode_manager_s *manager = (struct encode_manager_s *)data;
2888 enc_pr(LOG_INFO, "encoder is done %d\n", manager->encode_hw_status);
2889 if (((manager->encode_hw_status == ENCODER_IDR_DONE)
2890 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2891 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2892 || (manager->encode_hw_status == ENCODER_PICTURE_DONE))
2893 && (manager->process_irq)) {
2894#ifdef USE_VDEC2
2895 if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8) {
2896 if ((abort_vdec2_flag) &&
2897 (get_vdec2_usage() == USAGE_ENCODE))
2898 set_vdec2_usage(USAGE_NONE);
2899 }
2900#endif
2901 wake_up_interruptible(&manager->event.hw_complete);
2902 }
2903}
2904
2905/* irq function */
2906static irqreturn_t enc_isr(s32 irq_number, void *para)
2907{
2908 struct encode_manager_s *manager = (struct encode_manager_s *)para;
2909 WRITE_HREG(HCODEC_IRQ_MBOX_CLR, 1);
2910
2911#ifdef DEBUG_UCODE
2912 /* rain */
2913 if (READ_HREG(DEBUG_REG) != 0) {
2914 enc_pr(LOG_DEBUG, "dbg%x: %x\n", READ_HREG(DEBUG_REG),
2915 READ_HREG(HCODEC_HENC_SCRATCH_1));
2916 WRITE_HREG(DEBUG_REG, 0);
2917 return IRQ_HANDLED;
2918 }
2919#endif
2920
2921 manager->encode_hw_status = READ_HREG(ENCODER_STATUS);
2922 if ((manager->encode_hw_status == ENCODER_IDR_DONE)
2923 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2924 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2925 || (manager->encode_hw_status == ENCODER_PICTURE_DONE)) {
2926 enc_pr(LOG_ALL, "encoder stage is %d\n",
2927 manager->encode_hw_status);
2928 }
2929
2930 if (((manager->encode_hw_status == ENCODER_IDR_DONE)
2931 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
2932 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
2933 || (manager->encode_hw_status == ENCODER_PICTURE_DONE))
2934 && (!manager->process_irq)) {
2935 manager->process_irq = true;
2936 if (manager->encode_hw_status != ENCODER_SEQUENCE_DONE)
2937 manager->need_reset = true;
2938 tasklet_schedule(&manager->encode_tasklet);
2939 }
2940 return IRQ_HANDLED;
2941}
2942
2943static struct class *avc_enc_class;
2944
2945static ssize_t dg_manual_store(struct class *cls, struct class_attribute *attr,
2946 const char *buf, size_t len)
2947{
2948 int ret = 0;
2949 ret = sscanf(buf, "%x", (unsigned int *)&qp_table_pr);
2950 if (ret < 0)
2951 enc_pr(LOG_INFO, "set encoder table failed\n");
2952 else
2953 enc_pr(LOG_INFO, "set encoder table print = %d\n", qp_table_pr);
2954
2955 return len;
2956}
2957
2958static ssize_t dg_manual_show(struct class *cls, struct class_attribute *attr,
2959 char *buf)
2960{
2961 size_t len = 0;
2962 enc_pr(LOG_INFO, "encoder table print = %d\n", qp_table_pr);
2963 return len;
2964}
2965
2966static CLASS_ATTR(encode_tbl_debug, 0664, dg_manual_show, dg_manual_store);
2967
2968
2969static s32 convert_request(struct encode_wq_s *wq, u32 *cmd_info)
2970{
2971 int i = 0;
2972 u8 *qp_tb;
2973 u8 *ptr;
2974 u32 weight_offset;
2975 u32 cmd = cmd_info[0];
2976 if (!wq)
2977 return -1;
2978 memset(&wq->request, 0, sizeof(struct encode_request_s));
2979 wq->request.me_weight = ME_WEIGHT_OFFSET;
2980 wq->request.i4_weight = I4MB_WEIGHT_OFFSET;
2981 wq->request.i16_weight = I16MB_WEIGHT_OFFSET;
2982
2983 if (cmd == ENCODER_SEQUENCE) {
2984 wq->request.cmd = cmd;
2985 wq->request.ucode_mode = cmd_info[1];
2986 wq->request.quant = cmd_info[2];
2987 wq->request.flush_flag = cmd_info[3];
2988 wq->request.timeout = cmd_info[4];
2989 wq->request.timeout = 5000; /* 5000 ms */
2990 } else if ((cmd == ENCODER_IDR) || (cmd == ENCODER_NON_IDR)) {
2991 wq->request.cmd = cmd;
2992 wq->request.ucode_mode = cmd_info[1];
2993 if (wq->request.ucode_mode == UCODE_MODE_FULL) {
2994 wq->request.type = cmd_info[2];
2995 wq->request.fmt = cmd_info[3];
2996 wq->request.src = cmd_info[4];
2997 wq->request.framesize = cmd_info[5];
2998 wq->request.quant = cmd_info[6];
2999 wq->request.flush_flag = cmd_info[7];
3000 wq->request.timeout = cmd_info[8];
3001 wq->request.crop_top = cmd_info[9];
3002 wq->request.crop_bottom = cmd_info[10];
3003 wq->request.crop_left = cmd_info[11];
3004 wq->request.crop_right = cmd_info[12];
3005 wq->request.src_w = cmd_info[13];
3006 wq->request.src_h = cmd_info[14];
3007 wq->request.scale_enable = cmd_info[15];
3008
3009 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
3010 wq->request.nr_mode =
3011 (nr_mode > 0) ? nr_mode : cmd_info[16];
3012 if (cmd == ENCODER_IDR)
3013 wq->request.nr_mode = 0;
3014 }
3015
3016 if (wq->request.quant == ADJUSTED_QP_FLAG) {
3017 ptr = (u8 *) &cmd_info[17];
3018 memcpy(wq->quant_tbl_i4[1], ptr,
3019 sizeof(wq->quant_tbl_i4[1]));
3020 ptr += sizeof(wq->quant_tbl_i4[1]);
3021 memcpy(wq->quant_tbl_i16[1], ptr,
3022 sizeof(wq->quant_tbl_i16[1]));
3023 ptr += sizeof(wq->quant_tbl_i16[1]);
3024 memcpy(wq->quant_tbl_me[1], ptr,
3025 sizeof(wq->quant_tbl_me[1]));
3026
3027 weight_offset = 17 +
3028 (sizeof(wq->quant_tbl_i4[1])
3029 + sizeof(wq->quant_tbl_i16[1])
3030 + sizeof(wq->quant_tbl_me[1])) / 4;
3031
3032 wq->request.i4_weight -=
3033 cmd_info[weight_offset];
3034 wq->request.i16_weight -=
3035 cmd_info[weight_offset + 1];
3036 wq->request.me_weight -=
3037 cmd_info[weight_offset + 2];
3038
3039 /* switch to 1 qp table */
3040 wq->qp_table_id = 1;
3041 if (qp_table_pr != 0) {
3042 qp_tb = (u8 *)(&wq->quant_tbl_i4[1][0]);
3043 for (i = 0; i < 32; i++) {
3044 enc_pr(LOG_INFO, "%d ", *qp_tb);
3045 qp_tb++;
3046 }
3047 enc_pr(LOG_INFO, "\n");
3048
3049 qp_tb = (u8 *)
3050 (&wq->quant_tbl_i16[1][0]);
3051 for (i = 0; i < 32; i++) {
3052 enc_pr(LOG_INFO, "%d ", *qp_tb);
3053 qp_tb++;
3054 }
3055 enc_pr(LOG_INFO, "\n");
3056
3057 qp_tb = (u8 *)(&wq->quant_tbl_me[1][0]);
3058 for (i = 0; i < 32; i++) {
3059 enc_pr(LOG_INFO, "%d ", *qp_tb);
3060 qp_tb++;
3061 }
3062 enc_pr(LOG_INFO, "\n");
3063 }
3064 } else {
3065 wq->qp_table_id = 0;
3066 memset(wq->quant_tbl_me[0], wq->request.quant,
3067 sizeof(wq->quant_tbl_me[0]));
3068 memset(wq->quant_tbl_i4[0], wq->request.quant,
3069 sizeof(wq->quant_tbl_i4[0]));
3070 memset(wq->quant_tbl_i16[0], wq->request.quant,
3071 sizeof(wq->quant_tbl_i16[0]));
3072 }
3073 } else {
3074 wq->request.quant = cmd_info[2];
3075 wq->request.qp_info_size = cmd_info[3];
3076 wq->request.flush_flag = cmd_info[4];
3077 wq->request.timeout = cmd_info[5];
3078 }
3079 } else {
3080 enc_pr(LOG_ERROR, "error cmd = %d, wq: %p.\n",
3081 cmd, (void *)wq);
3082 return -1;
3083 }
3084 wq->request.parent = wq;
3085 return 0;
3086}
3087
3088void amvenc_avc_start_cmd(struct encode_wq_s *wq,
3089 struct encode_request_s *request)
3090{
3091 u32 reload_flag = 0;
3092#ifdef USE_VDEC2
3093 if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8) {
3094 if ((request->ucode_mode == UCODE_MODE_SW_MIX) &&
3095 (enable_dblk > 0)) {
3096 if ((get_vdec2_usage() == USAGE_DEC_4K2K)
3097 || (abort_vdec2_flag)) {
3098 enable_dblk = 2;
3099 if ((abort_vdec2_flag) &&
3100 (get_vdec2_usage() == USAGE_ENCODE)) {
3101 enc_pr(LOG_DEBUG,
3102 "switch encode ucode, wq:%p\n",
3103 (void *)wq);
3104 set_vdec2_usage(USAGE_NONE);
3105 }
3106 } else {
3107 if (get_vdec2_usage() == USAGE_NONE)
3108 set_vdec2_usage(USAGE_ENCODE);
3109 if (!vdec_on(VDEC_2)) {
3110 vdec_poweron(VDEC_2);
3111 mdelay(10);
3112 }
3113 enable_dblk = 1;
3114 }
3115 }
3116 }
3117#endif
3118 if (request->ucode_mode != encode_manager.ucode_index) {
3119 encode_manager.ucode_index = request->ucode_mode;
3120 if (reload_mc(wq)) {
3121 enc_pr(LOG_ERROR,
3122 "reload mc fail, wq:%p\n", (void *)wq);
3123 return;
3124 }
3125 reload_flag = 1;
3126 encode_manager.need_reset = true;
3127 } else if ((request->parent != encode_manager.last_wq)
3128 && (request->ucode_mode == UCODE_MODE_SW_MIX)) {
3129 /* walk around to reset the armrisc */
3130 if (reload_mc(wq)) {
3131 enc_pr(LOG_ERROR,
3132 "reload mc fail, wq:%p\n", (void *)wq);
3133 return;
3134 }
3135 reload_flag = 1;
3136 encode_manager.need_reset = true;
3137 }
3138
3139 wq->hw_status = 0;
3140 wq->output_size = 0;
3141 wq->ucode_index = encode_manager.ucode_index;
3142 if ((request->cmd == ENCODER_SEQUENCE) ||
3143 (request->cmd == ENCODER_PICTURE))
3144 wq->control.finish = true;
3145#if 0
3146 if (encode_manager.ucode_index == UCODE_MODE_SW_MIX)
3147 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3148 else if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
3149 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
3150 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3151 } else
3152 ie_me_mode = (3 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3153#else
3154 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3155#endif
3156 if (encode_manager.need_reset) {
3157 encode_manager.need_reset = false;
3158 encode_manager.encode_hw_status = ENCODER_IDLE;
3159 amvenc_reset();
3160 avc_canvas_init(wq);
3161 avc_init_encoder(wq,
3162 (request->cmd == ENCODER_IDR) ? true : false);
3163 avc_init_input_buffer(wq);
3164 avc_init_output_buffer(wq);
3165 avc_prot_init(wq, request, request->quant,
3166 (request->cmd == ENCODER_IDR) ? true : false);
3167 avc_init_assit_buffer(wq);
3168 enc_pr(LOG_INFO,
3169 "begin to new frame, request->cmd: %d, ucode mode: %d, wq:%p.\n",
3170 request->cmd, request->ucode_mode, (void *)wq);
3171 }
3172 if ((request->cmd == ENCODER_IDR) ||
3173 (request->cmd == ENCODER_NON_IDR)) {
3174 avc_init_dblk_buffer(wq->mem.dblk_buf_canvas);
3175 avc_init_reference_buffer(wq->mem.ref_buf_canvas);
3176 }
3177 if (encode_manager.ucode_index != UCODE_MODE_SW_MIX) {
3178 if ((request->cmd == ENCODER_IDR) ||
3179 (request->cmd == ENCODER_NON_IDR))
3180 set_input_format(wq, request);
3181 if (request->cmd == ENCODER_IDR)
3182 ie_me_mb_type = HENC_MB_Type_I4MB;
3183 else if (request->cmd == ENCODER_NON_IDR)
3184 ie_me_mb_type = (HENC_SKIP_RUN_AUTO << 16) |
3185 (HENC_MB_Type_AUTO << 4) |
3186 (HENC_MB_Type_AUTO << 0);
3187 else
3188 ie_me_mb_type = 0;
3189 avc_init_ie_me_parameter(wq, request->quant);
3190 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
3191 if ((wq->mem.dblk_buf_canvas & 0xff) == ENC_CANVAS_OFFSET) {
3192 WRITE_HREG(CURRENT_Y_CANVAS_START,
3193 wq->mem.buf_start +
3194 wq->mem.bufspec.dec0_y.buf_start);
3195 WRITE_HREG(CURRENT_C_CANVAS_START,
3196 wq->mem.buf_start +
3197 wq->mem.bufspec.dec0_uv.buf_start);
3198 } else {
3199 WRITE_HREG(CURRENT_Y_CANVAS_START,
3200 wq->mem.buf_start +
3201 wq->mem.bufspec.dec1_y.buf_start);
3202 WRITE_HREG(CURRENT_C_CANVAS_START,
3203 wq->mem.buf_start +
3204 wq->mem.bufspec.dec1_uv.buf_start);
3205 }
3206 WRITE_HREG(CANVAS_ROW_SIZE,
3207 (((wq->pic.encoder_width + 31) >> 5) << 5));
3208
3209#ifdef USE_VDEC2
3210 if ((enable_dblk == 1) &&
3211 ((get_cpu_type() == MESON_CPU_MAJOR_ID_M8))) {
3212 amvdec2_stop();
3213 WRITE_VREG(VDEC2_AV_SCRATCH_2, 0xffff);
3214 /* set vdec2 input, clone hcodec input
3215 buffer and set to manual mode */
3216 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_CONTROL, 0);
3217
3218 WRITE_VREG(DOS_SW_RESET2, (1 << 4));
3219 WRITE_VREG(DOS_SW_RESET2, 0);
3220 (void)READ_VREG(DOS_SW_RESET2);
3221 (void)READ_VREG(DOS_SW_RESET2);
3222 WRITE_VREG(VDEC2_POWER_CTL_VLD,
3223 (1 << 4) | (1 << 6) | (1 << 9));
3224
3225 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_START_PTR,
3226 wq->mem.BitstreamStart);
3227 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_END_PTR,
3228 wq->mem.BitstreamEnd);
3229 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_CURR_PTR,
3230 wq->mem.BitstreamStart);
3231
3232 SET_VREG_MASK(VDEC2_VLD_MEM_VIFIFO_CONTROL, 1);
3233 CLEAR_VREG_MASK(VDEC2_VLD_MEM_VIFIFO_CONTROL, 1);
3234
3235 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_BUF_CNTL, 2);
3236 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_WP,
3237 wq->mem.BitstreamStart);
3238
3239 SET_VREG_MASK(VDEC2_VLD_MEM_VIFIFO_BUF_CNTL, 1);
3240 CLEAR_VREG_MASK(VDEC2_VLD_MEM_VIFIFO_BUF_CNTL, 1);
3241
3242 WRITE_VREG(VDEC2_VLD_MEM_VIFIFO_CONTROL,
3243 (0x11 << 16) | (1 << 10) |
3244 (7 << 3) | (1 << 2) | (1 << 1));
3245
3246 amvdec2_loadmc_ex(ucode_name[UCODE_VDEC2]);
3247
3248 WRITE_VREG(VDEC2_AV_SCRATCH_1,
3249 wq->mem.vdec2_start_addr -
3250 VDEC2_DEF_BUF_START_ADDR);
3251 WRITE_VREG(VDEC2_AV_SCRATCH_8,
3252 wq->pic.log2_max_pic_order_cnt_lsb);
3253 WRITE_VREG(VDEC2_AV_SCRATCH_9,
3254 wq->pic.log2_max_frame_num);
3255 WRITE_VREG(VDEC2_AV_SCRATCH_B, wq->pic.init_qppicture);
3256 WRITE_VREG(VDEC2_AV_SCRATCH_A,
3257 (((wq->pic.encoder_height + 15) / 16) << 16) |
3258 ((wq->pic.encoder_width + 15) / 16));
3259
3260 /* Input/Output canvas */
3261 WRITE_VREG(VDEC2_ANC0_CANVAS_ADDR,
3262 wq->mem.ref_buf_canvas);
3263 WRITE_VREG(VDEC2_ANC1_CANVAS_ADDR,
3264 wq->mem.dblk_buf_canvas);
3265
3266 WRITE_VREG(DECODED_MB_Y, 0);
3267 /* MBY limit */
3268 WRITE_VREG(DECODABLE_MB_Y, 0);
3269 /* VB WP */
3270 WRITE_VREG(STREAM_WR_PTR, wq->mem.BitstreamStart);
3271 /* NV21 */
3272 SET_VREG_MASK(VDEC2_MDEC_PIC_DC_CTRL, 1 << 17);
3273
3274 WRITE_VREG(VDEC2_M4_CONTROL_REG, 1 << 13);
3275 WRITE_VREG(VDEC2_MDEC_PIC_DC_THRESH, 0x404038aa);
3276
3277 amvdec2_start();
3278 }
3279#endif
3280 }
3281
3282#ifdef MULTI_SLICE_MC
3283#ifndef USE_OLD_DUMP_MC
3284 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
3285 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
3286 if (fixed_slice_cfg)
3287 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3288 else if (wq->pic.rows_per_slice !=
3289 (wq->pic.encoder_height + 15) >> 4) {
3290 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
3291 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
3292 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
3293 } else
3294 WRITE_HREG(FIXED_SLICE_CFG, 0);
3295 } else
3296#endif
3297 {
3298 if (fixed_slice_cfg)
3299 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3300 else if (wq->pic.rows_per_slice !=
3301 (wq->pic.encoder_height + 15) >> 4)
3302 WRITE_HREG(FIXED_SLICE_CFG,
3303 (wq->pic.rows_per_slice << 16) |
3304 wq->pic.rows_per_slice);
3305 else
3306 WRITE_HREG(FIXED_SLICE_CFG, 0);
3307 }
3308#else
3309 WRITE_HREG(FIXED_SLICE_CFG, 0);
3310#endif
3311
3312 encode_manager.encode_hw_status = request->cmd;
3313 wq->hw_status = request->cmd;
3314 WRITE_HREG(ENCODER_STATUS , request->cmd);
3315 if ((request->cmd == ENCODER_IDR) || (request->cmd == ENCODER_NON_IDR)
3316 || (request->cmd == ENCODER_SEQUENCE)
3317 || (request->cmd == ENCODER_PICTURE))
3318 encode_manager.process_irq = false;
3319
3320 if (reload_flag)
3321 amvenc_start();
3322 if ((encode_manager.ucode_index == UCODE_MODE_SW_MIX) &&
3323 ((request->cmd == ENCODER_IDR) ||
3324 (request->cmd == ENCODER_NON_IDR)))
3325 wq->control.can_update = true;
3326 enc_pr(LOG_ALL, "amvenc_avc_start cmd, wq:%p.\n", (void *)wq);
3327}
3328
3329static void dma_flush(u32 buf_start , u32 buf_size)
3330{
3331 dma_sync_single_for_device(
3332 &encode_manager.this_pdev->dev, buf_start,
3333 buf_size, DMA_TO_DEVICE);
3334}
3335
3336static void cache_flush(u32 buf_start , u32 buf_size)
3337{
3338 dma_sync_single_for_cpu(
3339 &encode_manager.this_pdev->dev, buf_start,
3340 buf_size, DMA_FROM_DEVICE);
3341}
3342
3343static u32 getbuffer(struct encode_wq_s *wq, u32 type)
3344{
3345 u32 ret = 0;
3346
3347 switch (type) {
3348 case ENCODER_BUFFER_INPUT:
3349 ret = wq->mem.dct_buff_start_addr;
3350 break;
3351 case ENCODER_BUFFER_REF0:
3352 ret = wq->mem.dct_buff_start_addr +
3353 wq->mem.bufspec.dec0_y.buf_start;
3354 break;
3355 case ENCODER_BUFFER_REF1:
3356 ret = wq->mem.dct_buff_start_addr +
3357 wq->mem.bufspec.dec1_y.buf_start;
3358 break;
3359 case ENCODER_BUFFER_OUTPUT:
3360 ret = wq->mem.BitstreamStart;
3361 break;
3362 case ENCODER_BUFFER_INTER_INFO:
3363#ifndef USE_OLD_DUMP_MC
3364 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
3365 ret = wq->mem.dump_info_ddr_start_addr;
3366 else
3367 ret = wq->mem.inter_bits_info_ddr_start_addr;
3368#else
3369 ret = wq->mem.inter_bits_info_ddr_start_addr;
3370#endif
3371 break;
3372 case ENCODER_BUFFER_INTRA_INFO:
3373#ifndef USE_OLD_DUMP_MC
3374 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
3375 ret = wq->mem.dump_info_ddr_start_addr;
3376 else
3377 ret = wq->mem.intra_bits_info_ddr_start_addr;
3378#else
3379 ret = wq->mem.intra_bits_info_ddr_start_addr;
3380#endif
3381 break;
3382 case ENCODER_BUFFER_QP:
3383 ret = wq->mem.sw_ctl_info_start_addr;
3384 break;
3385 default:
3386 break;
3387 }
3388 return ret;
3389}
3390
3391s32 amvenc_avc_start(struct encode_wq_s *wq, u32 clock)
3392{
3393 const char *p = select_ucode(encode_manager.ucode_index);
3394
3395 avc_poweron(clock);
3396 avc_canvas_init(wq);
3397
3398 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_MG9TV)
3399 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x32);
3400 else
3401 WRITE_HREG(HCODEC_ASSIST_MMC_CTRL1, 0x2);
3402
3403 if (amvenc_loadmc(p, wq) < 0)
3404 return -EBUSY;
3405
3406 encode_manager.need_reset = true;
3407 encode_manager.process_irq = false;
3408 encode_manager.encode_hw_status = ENCODER_IDLE;
3409 amvenc_reset();
3410 avc_init_encoder(wq, true);
3411 avc_init_input_buffer(wq); /* dct buffer setting */
3412 avc_init_output_buffer(wq); /* output stream buffer */
3413
3414#if 0
3415 if (encode_manager.ucode_index == UCODE_MODE_SW_MIX)
3416 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3417 else if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
3418 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
3419 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3420 } else
3421 ie_me_mode = (3 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3422#else
3423 ie_me_mode = (0 & ME_PIXEL_MODE_MASK) << ME_PIXEL_MODE_SHIFT;
3424#endif
3425 avc_prot_init(wq, NULL, wq->pic.init_qppicture, true);
3426 if (request_irq(encode_manager.irq_num, enc_isr, IRQF_SHARED,
3427 "enc-irq", (void *)&encode_manager) == 0)
3428 encode_manager.irq_requested = true;
3429 else
3430 encode_manager.irq_requested = false;
3431
3432 /* decoder buffer , need set before each frame start */
3433 avc_init_dblk_buffer(wq->mem.dblk_buf_canvas);
3434 /* reference buffer , need set before each frame start */
3435 avc_init_reference_buffer(wq->mem.ref_buf_canvas);
3436 avc_init_assit_buffer(wq); /* assitant buffer for microcode */
3437 if (encode_manager.ucode_index != UCODE_MODE_SW_MIX) {
3438 ie_me_mb_type = 0;
3439 avc_init_ie_me_parameter(wq, wq->pic.init_qppicture);
3440 } else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8) {
3441 if ((wq->mem.dblk_buf_canvas & 0xff) == ENC_CANVAS_OFFSET) {
3442 WRITE_HREG(CURRENT_Y_CANVAS_START,
3443 wq->mem.buf_start +
3444 wq->mem.bufspec.dec0_y.buf_start);
3445 WRITE_HREG(CURRENT_C_CANVAS_START,
3446 wq->mem.buf_start +
3447 wq->mem.bufspec.dec0_uv.buf_start);
3448 } else {
3449 WRITE_HREG(CURRENT_Y_CANVAS_START,
3450 wq->mem.buf_start +
3451 wq->mem.bufspec.dec1_y.buf_start);
3452 WRITE_HREG(CURRENT_C_CANVAS_START,
3453 wq->mem.buf_start +
3454 wq->mem.bufspec.dec1_uv.buf_start);
3455 }
3456 WRITE_HREG(CANVAS_ROW_SIZE,
3457 (((wq->pic.encoder_width + 31) >> 5) << 5));
3458 }
3459 WRITE_HREG(ENCODER_STATUS , ENCODER_IDLE);
3460
3461#ifdef MULTI_SLICE_MC
3462#ifndef USE_OLD_DUMP_MC
3463 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) &&
3464 (encode_manager.ucode_index == UCODE_MODE_FULL)) {
3465 if (fixed_slice_cfg)
3466 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3467 else if (wq->pic.rows_per_slice !=
3468 (wq->pic.encoder_height + 15) >> 4) {
3469 u32 mb_per_slice = (wq->pic.encoder_height + 15) >> 4;
3470 mb_per_slice = mb_per_slice * wq->pic.rows_per_slice;
3471 WRITE_HREG(FIXED_SLICE_CFG, mb_per_slice);
3472 } else
3473 WRITE_HREG(FIXED_SLICE_CFG, 0);
3474 } else
3475#endif
3476 {
3477 if (fixed_slice_cfg)
3478 WRITE_HREG(FIXED_SLICE_CFG, fixed_slice_cfg);
3479 else if (wq->pic.rows_per_slice !=
3480 (wq->pic.encoder_height + 15) >> 4)
3481 WRITE_HREG(FIXED_SLICE_CFG,
3482 (wq->pic.rows_per_slice << 16) |
3483 wq->pic.rows_per_slice);
3484 else
3485 WRITE_HREG(FIXED_SLICE_CFG, 0);
3486 }
3487#else
3488 WRITE_HREG(FIXED_SLICE_CFG, 0);
3489#endif
3490 amvenc_start();
3491 return 0;
3492}
3493
3494void amvenc_avc_stop(void)
3495{
3496 if ((encode_manager.irq_num >= 0) &&
3497 (encode_manager.irq_requested == true)) {
3498 free_irq(encode_manager.irq_num, &encode_manager);
3499 encode_manager.irq_requested = false;
3500 }
3501#ifdef USE_VDEC2
3502 if (get_cpu_type() == MESON_CPU_MAJOR_ID_M8) {
3503 if ((get_vdec2_usage() != USAGE_DEC_4K2K) && (vdec_on(VDEC_2)))
3504 amvdec2_stop();
3505 }
3506#endif
3507 amvenc_stop();
3508 avc_poweroff();
3509}
3510
3511static s32 avc_init(struct encode_wq_s *wq)
3512{
3513 s32 r = 0;
3514
3515 encode_manager.ucode_index = wq->ucode_index;
3516 r = amvenc_avc_start(wq, clock_level);
3517
3518 enc_pr(LOG_DEBUG,
3519 "init avc encode. microcode %d, ret=%d, wq:%p.\n",
3520 encode_manager.ucode_index, r, (void *)wq);
3521 return 0;
3522}
3523
3524static s32 amvenc_avc_light_reset(struct encode_wq_s *wq, u32 value)
3525{
3526 s32 r = 0;
3527
3528 amvenc_avc_stop();
3529
3530 mdelay(value);
3531
3532 encode_manager.ucode_index = UCODE_MODE_FULL;
3533 r = amvenc_avc_start(wq, clock_level);
3534
3535 enc_pr(LOG_DEBUG,
3536 "amvenc_avc_light_reset finish, wq:%p. ret=%d\n",
3537 (void *)wq, r);
3538 return r;
3539}
3540
3541#ifdef CONFIG_CMA
3542static u32 checkCMA(void)
3543{
3544 u32 ret;
3545 if (encode_manager.cma_pool_size > 0) {
3546 ret = encode_manager.cma_pool_size / SZ_1M;
3547 ret = ret / MIN_SIZE;
3548 } else
3549 ret = 0;
3550 return ret;
3551}
3552#endif
3553
3554/* file operation */
3555static s32 amvenc_avc_open(struct inode *inode, struct file *file)
3556{
3557 s32 r = 0;
3558 struct encode_wq_s *wq = NULL;
3559 u8 cur_lev;
3560 file->private_data = NULL;
3561 enc_pr(LOG_DEBUG, "avc open\n");
3562#ifdef CONFIG_AM_JPEG_ENCODER
3563 if (jpegenc_on() == true) {
3564 enc_pr(LOG_ERROR,
3565 "hcodec in use for JPEG Encode now.\n");
3566 return -EBUSY;
3567 }
3568#endif
3569
3570#ifdef CONFIG_CMA
3571 if ((encode_manager.use_reserve == false) &&
3572 (encode_manager.check_cma == false)) {
3573 encode_manager.max_instance = checkCMA();
3574 if (encode_manager.max_instance > 0) {
3575 enc_pr(LOG_DEBUG,
3576 "amvenc_avc check CMA pool sucess, max instance: %d.\n",
3577 encode_manager.max_instance);
3578 } else {
3579 enc_pr(LOG_ERROR,
3580 "amvenc_avc CMA pool too small.\n");
3581 }
3582 encode_manager.check_cma = true;
3583 }
3584#endif
3585
3586 wq = create_encode_work_queue();
3587 if (wq == NULL) {
3588 enc_pr(LOG_ERROR, "amvenc_avc create instance fail.\n");
3589 return -EBUSY;
3590 }
3591
3592#ifdef CONFIG_CMA
3593 if (encode_manager.use_reserve == false) {
3594 wq->mem.buf_start = codec_mm_alloc_for_dma(ENCODE_NAME,
3595 (MIN_SIZE * SZ_1M) >> PAGE_SHIFT, 0,
3596 CODEC_MM_FLAGS_CPU);
3597 if (wq->mem.buf_start) {
3598 wq->mem.buf_size = MIN_SIZE * SZ_1M;
3599 enc_pr(LOG_DEBUG,
3600 "allocating phys 0x%x, size %dk, wq:%p.\n",
3601 wq->mem.buf_start,
3602 wq->mem.buf_size >> 10, (void *)wq);
3603 } else {
3604 enc_pr(LOG_ERROR,
3605 "CMA failed to allocate dma buffer for %s, wq:%p.\n",
3606 encode_manager.this_pdev->name,
3607 (void *)wq);
3608 destroy_encode_work_queue(wq);
3609 return -ENOMEM;
3610 }
3611 }
3612#endif
3613
3614 cur_lev = wq->mem.cur_buf_lev;
3615 if ((wq->mem.buf_start == 0) ||
3616 (wq->mem.buf_size < amvenc_buffspec[cur_lev].min_buffsize)) {
3617 enc_pr(LOG_ERROR,
3618 "alloc mem failed, start: 0x%x, size:0x%x, wq:%p.\n",
3619 wq->mem.buf_start,
3620 wq->mem.buf_size, (void *)wq);
3621 destroy_encode_work_queue(wq);
3622 return -ENOMEM;
3623 }
3624
3625 wq->mem.cur_buf_lev = AMVENC_BUFFER_LEVEL_1080P;
3626 memcpy(&wq->mem.bufspec, &amvenc_buffspec[wq->mem.cur_buf_lev],
3627 sizeof(struct BuffInfo_s));
3628 wq->mem.inter_bits_info_ddr_start_addr =
3629 wq->mem.buf_start + wq->mem.bufspec.inter_bits_info.buf_start;
3630 wq->mem.inter_mv_info_ddr_start_addr =
3631 wq->mem.buf_start + wq->mem.bufspec.inter_mv_info.buf_start;
3632 wq->mem.intra_bits_info_ddr_start_addr =
3633 wq->mem.buf_start + wq->mem.bufspec.intra_bits_info.buf_start;
3634 wq->mem.intra_pred_info_ddr_start_addr =
3635 wq->mem.buf_start + wq->mem.bufspec.intra_pred_info.buf_start;
3636 wq->mem.sw_ctl_info_start_addr =
3637 wq->mem.buf_start + wq->mem.bufspec.qp_info.buf_start;
3638 wq->mem.scaler_buff_start_addr =
3639 wq->mem.buf_start + wq->mem.bufspec.scale_buff.buf_start;
3640#ifdef USE_VDEC2
3641 wq->mem.vdec2_start_addr =
3642 wq->mem.buf_start + wq->mem.bufspec.vdec2_info.buf_start;
3643#endif
3644
3645#ifndef USE_OLD_DUMP_MC
3646 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
3647 wq->mem.dump_info_ddr_start_addr =
3648 wq->mem.inter_bits_info_ddr_start_addr;
3649 } else {
3650 wq->mem.dump_info_ddr_start_addr = 0;
3651 wq->mem.dump_info_ddr_size = 0;
3652 }
3653#else
3654 wq->mem.dump_info_ddr_start_addr = 0;
3655 wq->mem.dump_info_ddr_size = 0;
3656#endif
3657 enc_pr(LOG_DEBUG,
3658 "amvenc_avc memory config sucess, buff start:0x%x, size is 0x%x, wq:%p.\n",
3659 wq->mem.buf_start, wq->mem.buf_size, (void *)wq);
3660
3661 file->private_data = (void *) wq;
3662 return r;
3663}
3664
3665static s32 amvenc_avc_release(struct inode *inode, struct file *file)
3666{
3667 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3668 if (wq) {
3669 enc_pr(LOG_DEBUG, "avc release, wq:%p\n", (void *)wq);
3670 destroy_encode_work_queue(wq);
3671 }
3672 return 0;
3673}
3674
3675static long amvenc_avc_ioctl(struct file *file, u32 cmd, ulong arg)
3676{
3677 long r = 0;
3678 u32 amrisc_cmd = 0;
3679 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3680#define MAX_ADDR_INFO_SIZE 50
3681 u32 addr_info[MAX_ADDR_INFO_SIZE + 4];
3682 ulong argV;
3683 u32 buf_start;
3684 s32 canvas = -1;
3685 struct canvas_s dst;
3686 switch (cmd) {
3687 case AMVENC_AVC_IOC_GET_ADDR:
3688 if ((wq->mem.ref_buf_canvas & 0xff) == (ENC_CANVAS_OFFSET))
3689 put_user(1, (u32 *)arg);
3690 else
3691 put_user(2, (u32 *)arg);
3692 break;
3693 case AMVENC_AVC_IOC_INPUT_UPDATE:
3694 if (copy_from_user(addr_info, (void *)arg,
3695 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3696 enc_pr(LOG_ERROR,
3697 "avc update input ptr error, wq: %p.\n",
3698 (void *)wq);
3699 return -1;
3700 }
3701
3702 wq->control.dct_buffer_write_ptr = addr_info[2];
3703 if ((encode_manager.current_wq == wq) &&
3704 (wq->control.can_update == true)) {
3705 buf_start = getbuffer(wq, addr_info[0]);
3706 if (buf_start)
3707 dma_flush(buf_start +
3708 wq->control.dct_flush_start,
3709 wq->control.dct_buffer_write_ptr -
3710 wq->control.dct_flush_start);
3711 WRITE_HREG(HCODEC_QDCT_MB_WR_PTR,
3712 (wq->mem.dct_buff_start_addr +
3713 wq->control.dct_buffer_write_ptr));
3714 wq->control.dct_flush_start =
3715 wq->control.dct_buffer_write_ptr;
3716 }
3717 wq->control.finish = (addr_info[3] == 1) ? true : false;
3718 break;
3719 case AMVENC_AVC_IOC_NEW_CMD:
3720 if (copy_from_user(addr_info, (void *)arg,
3721 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3722 enc_pr(LOG_ERROR,
3723 "avc get new cmd error, wq:%p.\n", (void *)wq);
3724 return -1;
3725 }
3726 r = convert_request(wq, addr_info);
3727 if (r == 0)
3728 r = encode_wq_add_request(wq);
3729 if (r) {
3730 enc_pr(LOG_ERROR,
3731 "avc add new request error, wq:%p.\n",
3732 (void *)wq);
3733 }
3734 break;
3735 case AMVENC_AVC_IOC_GET_STAGE:
3736 put_user(wq->hw_status, (u32 *)arg);
3737 break;
3738 case AMVENC_AVC_IOC_GET_OUTPUT_SIZE:
3739 if ((get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
3740 && (wq->ucode_index == UCODE_MODE_FULL)) {
3741#ifndef USE_OLD_DUMP_MC
3742 addr_info[0] = wq->output_size;
3743 addr_info[1] = wq->me_weight;
3744 addr_info[2] = wq->i4_weight;
3745 addr_info[3] = wq->i16_weight;
3746 r = copy_to_user((u32 *)arg,
3747 addr_info , 4 * sizeof(u32));
3748#else
3749 put_user(wq->output_size, (u32 *)arg);
3750#endif
3751 } else {
3752 put_user(wq->output_size, (u32 *)arg);
3753 }
3754 break;
3755 case AMVENC_AVC_IOC_CONFIG_INIT:
3756 if (copy_from_user(addr_info, (void *)arg,
3757 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3758 enc_pr(LOG_ERROR,
3759 "avc config init error, wq:%p.\n", (void *)wq);
3760 return -1;
3761 }
3762 if (addr_info[0] <= UCODE_MODE_SW_MIX)
3763 wq->ucode_index = addr_info[0];
3764 else
3765 wq->ucode_index = UCODE_MODE_FULL;
3766
3767 if (get_cpu_type() < MESON_CPU_MAJOR_ID_M8)
3768 wq->ucode_index = UCODE_MODE_FULL;
3769
3770#ifdef MULTI_SLICE_MC
3771 wq->pic.rows_per_slice = addr_info[1];
3772 enc_pr(LOG_DEBUG,
3773 "avc init -- rows_per_slice: %d, wq: %p.\n",
3774 wq->pic.rows_per_slice, (void *)wq);
3775#endif
3776 enc_pr(LOG_DEBUG,
3777 "avc init as mode %d, wq: %p.\n",
3778 wq->ucode_index, (void *)wq);
3779
3780 if ((addr_info[2] > wq->mem.bufspec.max_width) ||
3781 (addr_info[3] > wq->mem.bufspec.max_height)) {
3782 enc_pr(LOG_ERROR,
3783 "avc config init- encode size %dx%d is larger than supported (%dx%d). wq:%p.\n",
3784 addr_info[2], addr_info[3],
3785 wq->mem.bufspec.max_width,
3786 wq->mem.bufspec.max_height, (void *)wq);
3787 return -1;
3788 }
3789 wq->pic.encoder_width = addr_info[2];
3790 wq->pic.encoder_height = addr_info[3];
3791
3792 avc_buffspec_init(wq);
3793 complete(&encode_manager.event.request_in_com);
3794 addr_info[1] = wq->mem.bufspec.dct.buf_start;
3795 addr_info[2] = wq->mem.bufspec.dct.buf_size;
3796 addr_info[3] = wq->mem.bufspec.dec0_y.buf_start;
3797 addr_info[4] = wq->mem.bufspec.dec0_y.buf_size;
3798 addr_info[5] = wq->mem.bufspec.dec0_uv.buf_start;
3799 addr_info[6] = wq->mem.bufspec.dec0_uv.buf_size;
3800 addr_info[7] = wq->mem.bufspec.dec1_y.buf_start;
3801 addr_info[8] = wq->mem.bufspec.dec1_y.buf_size;
3802 addr_info[9] = wq->mem.bufspec.dec1_uv.buf_start;
3803 addr_info[10] = wq->mem.bufspec.dec1_uv.buf_size;
3804 addr_info[11] = wq->mem.bufspec.bitstream.buf_start;
3805 addr_info[12] = wq->mem.bufspec.bitstream.buf_size;
3806#ifndef USE_OLD_DUMP_MC
3807 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB) {
3808 /* new dump info addr same as inter_bits_info address */
3809 addr_info[13] =
3810 wq->mem.bufspec.inter_bits_info.buf_start;
3811 addr_info[14] =
3812 wq->mem.dump_info_ddr_size;
3813 } else {
3814 addr_info[13] =
3815 wq->mem.bufspec.inter_bits_info.buf_start;
3816 addr_info[14] =
3817 wq->mem.bufspec.inter_bits_info.buf_size;
3818 }
3819#else
3820 addr_info[13] = wq->mem.bufspec.inter_bits_info.buf_start;
3821 addr_info[14] = wq->mem.bufspec.inter_bits_info.buf_size;
3822#endif
3823 addr_info[15] = wq->mem.bufspec.inter_mv_info.buf_start;
3824 addr_info[16] = wq->mem.bufspec.inter_mv_info.buf_size;
3825 addr_info[17] = wq->mem.bufspec.intra_bits_info.buf_start;
3826 addr_info[18] = wq->mem.bufspec.intra_bits_info.buf_size;
3827 addr_info[19] = wq->mem.bufspec.intra_pred_info.buf_start;
3828 addr_info[20] = wq->mem.bufspec.intra_pred_info.buf_size;
3829 addr_info[21] = wq->mem.bufspec.qp_info.buf_start;
3830 addr_info[22] = wq->mem.bufspec.qp_info.buf_size;
3831 addr_info[23] = wq->mem.bufspec.scale_buff.buf_start;
3832 addr_info[24] = wq->mem.bufspec.scale_buff.buf_size;
3833 r = copy_to_user((u32 *)arg, addr_info , 25*sizeof(u32));
3834 break;
3835 case AMVENC_AVC_IOC_FLUSH_CACHE:
3836 if (copy_from_user(addr_info, (void *)arg,
3837 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3838 enc_pr(LOG_ERROR,
3839 "avc flush cache error, wq: %p.\n", (void *)wq);
3840 return -1;
3841 }
3842 buf_start = getbuffer(wq, addr_info[0]);
3843 if (buf_start)
3844 dma_flush(buf_start + addr_info[1],
3845 addr_info[2] - addr_info[1]);
3846 break;
3847 case AMVENC_AVC_IOC_FLUSH_DMA:
3848 if (copy_from_user(addr_info, (void *)arg,
3849 MAX_ADDR_INFO_SIZE * sizeof(u32))) {
3850 enc_pr(LOG_ERROR,
3851 "avc flush dma error, wq:%p.\n", (void *)wq);
3852 return -1;
3853 }
3854 buf_start = getbuffer(wq, addr_info[0]);
3855 if (buf_start)
3856 cache_flush(buf_start + addr_info[1],
3857 addr_info[2] - addr_info[1]);
3858 break;
3859 case AMVENC_AVC_IOC_GET_BUFFINFO:
3860 put_user(wq->mem.buf_size, (u32 *)arg);
3861 break;
3862 case AMVENC_AVC_IOC_GET_DEVINFO:
3863 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXL) {
3864 /* send the same id as GXTVBB to upper*/
3865 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXTVBB,
3866 strlen(AMVENC_DEVINFO_GXTVBB));
3867 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_GXTVBB) {
3868 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXTVBB,
3869 strlen(AMVENC_DEVINFO_GXTVBB));
3870 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_GXBB) {
3871 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_GXBB,
3872 strlen(AMVENC_DEVINFO_GXBB));
3873 } else if (get_cpu_type() == MESON_CPU_MAJOR_ID_MG9TV) {
3874 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_G9,
3875 strlen(AMVENC_DEVINFO_G9));
3876 } else {
3877 r = copy_to_user((s8 *)arg, AMVENC_DEVINFO_M8,
3878 strlen(AMVENC_DEVINFO_M8));
3879 }
3880 break;
3881 case AMVENC_AVC_IOC_SUBMIT:
3882 get_user(amrisc_cmd, ((u32 *)arg));
3883 if (amrisc_cmd == ENCODER_IDR) {
3884 wq->pic.idr_pic_id++;
3885 if (wq->pic.idr_pic_id > 65535)
3886 wq->pic.idr_pic_id = 0;
3887 wq->pic.pic_order_cnt_lsb = 2;
3888 wq->pic.frame_number = 1;
3889 } else if (amrisc_cmd == ENCODER_NON_IDR) {
3890 wq->pic.frame_number++;
3891 wq->pic.pic_order_cnt_lsb += 2;
3892 if (wq->pic.frame_number > 65535)
3893 wq->pic.frame_number = 0;
3894 }
3895 amrisc_cmd = wq->mem.dblk_buf_canvas;
3896 wq->mem.dblk_buf_canvas = wq->mem.ref_buf_canvas;
3897 /* current dblk buffer as next reference buffer */
3898 wq->mem.ref_buf_canvas = amrisc_cmd;
3899 break;
3900 case AMVENC_AVC_IOC_READ_CANVAS:
3901 get_user(argV, ((u32 *)arg));
3902 canvas = argV;
3903 if (canvas & 0xff) {
3904 canvas_read(canvas & 0xff, &dst);
3905 addr_info[0] = dst.addr;
3906 if ((canvas & 0xff00) >> 8)
3907 canvas_read((canvas & 0xff00) >> 8, &dst);
3908 if ((canvas & 0xff0000) >> 16)
3909 canvas_read((canvas & 0xff0000) >> 16, &dst);
3910 addr_info[1] = dst.addr - addr_info[0] +
3911 dst.width * dst.height;
3912 } else {
3913 addr_info[0] = 0;
3914 addr_info[1] = 0;
3915 }
3916 dma_flush(dst.addr, dst.width * dst.height * 3 / 2);
3917 r = copy_to_user((u32 *)arg, addr_info , 2 * sizeof(u32));
3918 break;
3919 case AMVENC_AVC_IOC_MAX_INSTANCE:
3920 put_user(encode_manager.max_instance, (u32 *)arg);
3921 break;
3922 default:
3923 r = -1;
3924 break;
3925 }
3926 return r;
3927}
3928
3929#ifdef CONFIG_COMPAT
3930static long amvenc_avc_compat_ioctl(struct file *filp,
3931 unsigned int cmd, unsigned long args)
3932{
3933 unsigned long ret;
3934
3935 args = (unsigned long)compat_ptr(args);
3936 ret = amvenc_avc_ioctl(filp, cmd, args);
3937 return ret;
3938}
3939#endif
3940
3941static s32 avc_mmap(struct file *filp, struct vm_area_struct *vma)
3942{
3943 struct encode_wq_s *wq = (struct encode_wq_s *)filp->private_data;
3944 ulong off = vma->vm_pgoff << PAGE_SHIFT;
3945 ulong vma_size = vma->vm_end - vma->vm_start;
3946
3947 if (vma_size == 0) {
3948 enc_pr(LOG_ERROR, "vma_size is 0, wq:%p.\n", (void *)wq);
3949 return -EAGAIN;
3950 }
3951 if (!off)
3952 off += wq->mem.buf_start;
3953 enc_pr(LOG_ALL,
3954 "vma_size is %ld , off is %ld, wq:%p.\n",
3955 vma_size , off, (void *)wq);
3956 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP | VM_IO;
3957 /* vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); */
3958 if (remap_pfn_range(vma, vma->vm_start, off >> PAGE_SHIFT,
3959 vma->vm_end - vma->vm_start, vma->vm_page_prot)) {
3960 enc_pr(LOG_ERROR,
3961 "set_cached: failed remap_pfn_range, wq:%p.\n",
3962 (void *)wq);
3963 return -EAGAIN;
3964 }
3965 return 0;
3966}
3967
3968static u32 amvenc_avc_poll(struct file *file, poll_table *wait_table)
3969{
3970 struct encode_wq_s *wq = (struct encode_wq_s *)file->private_data;
3971 poll_wait(file, &wq->request_complete, wait_table);
3972
3973 if (atomic_read(&wq->request_ready)) {
3974 atomic_dec(&wq->request_ready);
3975 return POLLIN | POLLRDNORM;
3976 }
3977 return 0;
3978}
3979
3980static const struct file_operations amvenc_avc_fops = {
3981 .owner = THIS_MODULE,
3982 .open = amvenc_avc_open,
3983 .mmap = avc_mmap,
3984 .release = amvenc_avc_release,
3985 .unlocked_ioctl = amvenc_avc_ioctl,
3986#ifdef CONFIG_COMPAT
3987 .compat_ioctl = amvenc_avc_compat_ioctl,
3988#endif
3989 .poll = amvenc_avc_poll,
3990};
3991
3992/* work queue function */
3993static s32 encode_process_request(struct encode_manager_s *manager,
3994 struct encode_queue_item_s *pitem)
3995{
3996 s32 ret = 0;
3997 struct encode_wq_s *wq = pitem->request.parent;
3998 struct encode_request_s *request = &pitem->request;
3999 u32 timeout = (request->timeout == 0) ?
4000 1 : msecs_to_jiffies(request->timeout);
4001 u32 buf_start = 0;
4002 u32 size = 0;
4003 u32 flush_size = ((wq->pic.encoder_width + 31) >> 5 << 5) *
4004 ((wq->pic.encoder_height + 15) >> 4 << 4) * 3 / 2;
4005
4006 if (((request->cmd == ENCODER_IDR) || (request->cmd == ENCODER_NON_IDR))
4007 && (request->ucode_mode == UCODE_MODE_SW_MIX)) {
4008 if (request->flush_flag & AMVENC_FLUSH_FLAG_QP) {
4009 buf_start = getbuffer(wq, ENCODER_BUFFER_QP);
4010 if ((buf_start) && (request->qp_info_size > 0))
4011 dma_flush(buf_start, request->qp_info_size);
4012 }
4013 }
4014
4015Again:
4016 amvenc_avc_start_cmd(wq, request);
4017
4018 while (wq->control.finish == 0)
4019 wait_event_interruptible_timeout(manager->event.hw_complete,
4020 (wq->control.finish == true), msecs_to_jiffies(1));
4021
4022 if ((wq->control.finish == true) &&
4023 (wq->control.dct_flush_start < wq->control.dct_buffer_write_ptr)) {
4024 buf_start = getbuffer(wq, ENCODER_BUFFER_INPUT);
4025 if (buf_start)
4026 dma_flush(buf_start + wq->control.dct_flush_start,
4027 wq->control.dct_buffer_write_ptr -
4028 wq->control.dct_flush_start);
4029 WRITE_HREG(HCODEC_QDCT_MB_WR_PTR,
4030 (wq->mem.dct_buff_start_addr +
4031 wq->control.dct_buffer_write_ptr));
4032 wq->control.dct_flush_start = wq->control.dct_buffer_write_ptr;
4033 }
4034
4035 if (no_timeout) {
4036 wait_event_interruptible(manager->event.hw_complete,
4037 (manager->encode_hw_status == ENCODER_IDR_DONE
4038 || manager->encode_hw_status == ENCODER_NON_IDR_DONE
4039 || manager->encode_hw_status == ENCODER_SEQUENCE_DONE
4040 || manager->encode_hw_status == ENCODER_PICTURE_DONE));
4041 } else {
4042 wait_event_interruptible_timeout(manager->event.hw_complete,
4043 ((manager->encode_hw_status == ENCODER_IDR_DONE)
4044 || (manager->encode_hw_status == ENCODER_NON_IDR_DONE)
4045 || (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)
4046 || (manager->encode_hw_status == ENCODER_PICTURE_DONE)),
4047 timeout);
4048 }
4049
4050 if ((request->cmd == ENCODER_SEQUENCE) &&
4051 (manager->encode_hw_status == ENCODER_SEQUENCE_DONE)) {
4052 wq->sps_size = READ_HREG(HCODEC_VLC_TOTAL_BYTES);
4053 wq->hw_status = manager->encode_hw_status;
4054 request->cmd = ENCODER_PICTURE;
4055 goto Again;
4056 } else if ((request->cmd == ENCODER_PICTURE) &&
4057 (manager->encode_hw_status == ENCODER_PICTURE_DONE)) {
4058 wq->pps_size =
4059 READ_HREG(HCODEC_VLC_TOTAL_BYTES) - wq->sps_size;
4060 wq->hw_status = manager->encode_hw_status;
4061 if (request->flush_flag & AMVENC_FLUSH_FLAG_OUTPUT) {
4062 buf_start = getbuffer(wq, ENCODER_BUFFER_OUTPUT);
4063 if (buf_start)
4064 cache_flush(buf_start,
4065 wq->sps_size + wq->pps_size);
4066 }
4067 wq->output_size = (wq->sps_size << 16) | wq->pps_size;
4068 } else {
4069 wq->hw_status = manager->encode_hw_status;
4070 if ((manager->encode_hw_status == ENCODER_IDR_DONE) ||
4071 (manager->encode_hw_status == ENCODER_NON_IDR_DONE)) {
4072 wq->output_size = READ_HREG(HCODEC_VLC_TOTAL_BYTES);
4073 if (request->flush_flag & AMVENC_FLUSH_FLAG_OUTPUT) {
4074 buf_start = getbuffer(wq,
4075 ENCODER_BUFFER_OUTPUT);
4076 if (buf_start)
4077 cache_flush(buf_start, wq->output_size);
4078 }
4079 if (request->flush_flag &
4080 AMVENC_FLUSH_FLAG_INTER_INFO) {
4081 buf_start = getbuffer(wq,
4082 ENCODER_BUFFER_INTER_INFO);
4083 size = wq->mem.inter_mv_info_ddr_start_addr -
4084 wq->mem.inter_bits_info_ddr_start_addr +
4085 wq->mem.bufspec.inter_mv_info.buf_size;
4086#ifndef USE_OLD_DUMP_MC
4087 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
4088 size = wq->mem.dump_info_ddr_size;
4089#endif
4090 if (buf_start)
4091 cache_flush(buf_start, size);
4092 }
4093 if (request->flush_flag &
4094 AMVENC_FLUSH_FLAG_INTRA_INFO) {
4095 buf_start = getbuffer(wq,
4096 ENCODER_BUFFER_INTRA_INFO);
4097 size = wq->mem.intra_pred_info_ddr_start_addr -
4098 wq->mem.intra_bits_info_ddr_start_addr +
4099 wq->mem.bufspec.intra_pred_info.buf_size;
4100#ifndef USE_OLD_DUMP_MC
4101 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
4102 size = wq->mem.dump_info_ddr_size;
4103#endif
4104 if (buf_start)
4105 cache_flush(buf_start, size);
4106 }
4107 if (request->flush_flag &
4108 AMVENC_FLUSH_FLAG_REFERENCE) {
4109 u32 ref_id = ENCODER_BUFFER_REF0;
4110 if ((wq->mem.ref_buf_canvas & 0xff) ==
4111 (ENC_CANVAS_OFFSET))
4112 ref_id = ENCODER_BUFFER_REF0;
4113 else
4114 ref_id = ENCODER_BUFFER_REF1;
4115 buf_start = getbuffer(wq, ref_id);
4116 if (buf_start)
4117 cache_flush(buf_start, flush_size);
4118 }
4119 } else {
4120 manager->encode_hw_status = ENCODER_ERROR;
4121 enc_pr(LOG_DEBUG, "avc encode light reset --- ");
4122 enc_pr(LOG_DEBUG,
4123 "frame type: %s, size: %dx%d, wq: %p\n",
4124 (request->cmd == ENCODER_IDR) ? "IDR" : "P",
4125 wq->pic.encoder_width,
4126 wq->pic.encoder_height, (void *)wq);
4127 enc_pr(LOG_DEBUG,
4128 "mb info: 0x%x, encode status: 0x%x, dct status: 0x%x ",
4129 READ_HREG(HCODEC_VLC_MB_INFO),
4130 READ_HREG(ENCODER_STATUS),
4131 READ_HREG(HCODEC_QDCT_STATUS_CTRL));
4132 enc_pr(LOG_DEBUG,
4133 "vlc status: 0x%x, me status: 0x%x, risc pc:0x%x\n",
4134 READ_HREG(HCODEC_VLC_STATUS_CTRL),
4135 READ_HREG(HCODEC_ME_STATUS),
4136 READ_HREG(HCODEC_MPC_E));
4137 amvenc_avc_light_reset(wq, 30);
4138 }
4139 }
4140
4141 wq->control.can_update = false;
4142 wq->control.dct_buffer_write_ptr = 0;
4143 wq->control.dct_flush_start = 0;
4144 wq->control.finish = false;
4145 atomic_inc(&wq->request_ready);
4146 wake_up_interruptible(&wq->request_complete);
4147 return ret;
4148}
4149
4150s32 encode_wq_add_request(struct encode_wq_s *wq)
4151{
4152 struct encode_queue_item_s *pitem = NULL;
4153 struct list_head *head = NULL;
4154 struct encode_wq_s *tmp = NULL;
4155 bool find = false;
4156
4157 spin_lock(&encode_manager.event.sem_lock);
4158
4159 head = &encode_manager.wq;
4160 list_for_each_entry(tmp, head, list) {
4161 if ((wq == tmp) && (wq != NULL)) {
4162 find = true;
4163 break;
4164 }
4165 }
4166
4167 if (find == false) {
4168 enc_pr(LOG_ERROR, "current wq (%p) doesn't register.\n",
4169 (void *)wq);
4170 goto error;
4171 }
4172
4173 if (list_empty(&encode_manager.free_queue)) {
4174 enc_pr(LOG_ERROR, "work queue no space, wq:%p.\n",
4175 (void *)wq);
4176 goto error;
4177 }
4178
4179 pitem = list_entry(encode_manager.free_queue.next,
4180 struct encode_queue_item_s, list);
4181 if (IS_ERR(pitem))
4182 goto error;
4183
4184 memcpy(&pitem->request, &wq->request, sizeof(struct encode_request_s));
4185 memset(&wq->request, 0, sizeof(struct encode_request_s));
4186 wq->hw_status = 0;
4187 wq->output_size = 0;
4188 wq->control.dct_buffer_write_ptr = 0;
4189 wq->control.dct_flush_start = 0;
4190 wq->control.finish = false;
4191 wq->control.can_update = false;
4192 pitem->request.parent = wq;
4193 list_move_tail(&pitem->list, &encode_manager.process_queue);
4194 spin_unlock(&encode_manager.event.sem_lock);
4195
4196 enc_pr(LOG_INFO,
4197 "add new work ok, cmd:%d, ucode mode: %d, wq:%p.\n",
4198 pitem->request.cmd, pitem->request.ucode_mode,
4199 (void *)wq);
4200 complete(&encode_manager.event.request_in_com);/* new cmd come in */
4201 return 0;
4202error:
4203 spin_unlock(&encode_manager.event.sem_lock);
4204 return -1;
4205}
4206
4207struct encode_wq_s *create_encode_work_queue(void)
4208{
4209 struct encode_wq_s *encode_work_queue = NULL;
4210 bool done = false;
4211 u32 i, max_instance;
4212 struct Buff_s *reserve_buff;
4213
4214 encode_work_queue = kzalloc(sizeof(struct encode_wq_s), GFP_KERNEL);
4215 if (IS_ERR(encode_work_queue)) {
4216 enc_pr(LOG_ERROR, "can't create work queue\n");
4217 return NULL;
4218 }
4219 max_instance = encode_manager.max_instance;
4220 encode_work_queue->pic.init_qppicture = 26;
4221 encode_work_queue->pic.log2_max_frame_num = 4;
4222 encode_work_queue->pic.log2_max_pic_order_cnt_lsb = 4;
4223 encode_work_queue->pic.idr_pic_id = 0;
4224 encode_work_queue->pic.frame_number = 0;
4225 encode_work_queue->pic.pic_order_cnt_lsb = 0;
4226 encode_work_queue->ucode_index = UCODE_MODE_FULL;
4227 init_waitqueue_head(&encode_work_queue->request_complete);
4228 atomic_set(&encode_work_queue->request_ready, 0);
4229 spin_lock(&encode_manager.event.sem_lock);
4230 if (encode_manager.wq_count < encode_manager.max_instance) {
4231 list_add_tail(&encode_work_queue->list, &encode_manager.wq);
4232 encode_manager.wq_count++;
4233 if (encode_manager.use_reserve == true) {
4234 for (i = 0; i < max_instance; i++) {
4235 reserve_buff = &encode_manager.reserve_buff[i];
4236 if (reserve_buff->used == false) {
4237 encode_work_queue->mem.buf_start =
4238 reserve_buff->buf_start;
4239 encode_work_queue->mem.buf_size =
4240 reserve_buff->buf_size;
4241 reserve_buff->used = true;
4242 done = true;
4243 break;
4244 }
4245 }
4246 } else
4247 done = true;
4248 }
4249 spin_unlock(&encode_manager.event.sem_lock);
4250 if (done == false) {
4251 kfree(encode_work_queue);
4252 encode_work_queue = NULL;
4253 enc_pr(LOG_ERROR, "too many work queue!\n");
4254 }
4255 return encode_work_queue; /* find it */
4256}
4257
4258static void _destroy_encode_work_queue(struct encode_manager_s *manager,
4259 struct encode_wq_s **wq,
4260 struct encode_wq_s *encode_work_queue,
4261 bool *find)
4262{
4263 struct list_head *head;
4264 struct encode_wq_s *wp_tmp = NULL;
4265 u32 i, max_instance;
4266 struct Buff_s *reserve_buff;
4267 u32 buf_start = encode_work_queue->mem.buf_start;
4268
4269 max_instance = manager->max_instance;
4270 head = &manager->wq;
4271 list_for_each_entry_safe((*wq), wp_tmp, head, list) {
4272 if ((*wq) && (*wq == encode_work_queue)) {
4273 list_del(&(*wq)->list);
4274 if (manager->use_reserve == true) {
4275 for (i = 0; i < max_instance; i++) {
4276 reserve_buff =
4277 &manager->reserve_buff[i];
4278 if (reserve_buff->used == true &&
4279 buf_start ==
4280 reserve_buff->buf_start) {
4281 reserve_buff->used = false;
4282 break;
4283 }
4284 }
4285 }
4286 *find = true;
4287 manager->wq_count--;
4288 enc_pr(LOG_DEBUG,
4289 "remove encode_work_queue %p sucess, %s line %d.\n",
4290 (void *)encode_work_queue,
4291 __func__, __LINE__);
4292 break;
4293 }
4294 }
4295}
4296
4297s32 destroy_encode_work_queue(struct encode_wq_s *encode_work_queue)
4298{
4299 struct encode_queue_item_s *pitem, *tmp;
4300 struct encode_wq_s *wq = NULL;
4301 bool find = false;
4302
4303 struct list_head *head;
4304 if (encode_work_queue) {
4305 spin_lock(&encode_manager.event.sem_lock);
4306 if (encode_manager.current_wq == encode_work_queue) {
4307 encode_manager.remove_flag = true;
4308 spin_unlock(&encode_manager.event.sem_lock);
4309 enc_pr(LOG_DEBUG,
4310 "warning--Destory the running queue, should not be here.\n");
4311 wait_for_completion(
4312 &encode_manager.event.process_complete);
4313 spin_lock(&encode_manager.event.sem_lock);
4314 } /* else we can delete it safely. */
4315
4316 head = &encode_manager.process_queue;
4317 list_for_each_entry_safe(pitem, tmp, head, list) {
4318 if (pitem) {
4319 if (pitem->request.parent ==
4320 encode_work_queue) {
4321 pitem->request.parent = NULL;
4322 enc_pr(LOG_DEBUG,
4323 "warning--remove not process request, should not be here.\n");
4324 list_move_tail(&pitem->list,
4325 &encode_manager.free_queue);
4326 }
4327 }
4328 }
4329
4330 _destroy_encode_work_queue(&encode_manager, &wq,
4331 encode_work_queue, &find);
4332 spin_unlock(&encode_manager.event.sem_lock);
4333#ifdef CONFIG_CMA
4334 if (encode_work_queue->mem.buf_start) {
4335 codec_mm_free_for_dma(
4336 ENCODE_NAME,
4337 encode_work_queue->mem.buf_start);
4338 encode_work_queue->mem.buf_start = 0;
4339
4340 }
4341#endif
4342 kfree(encode_work_queue);
4343 complete(&encode_manager.event.request_in_com);
4344 }
4345 return 0;
4346}
4347
4348static s32 encode_monitor_thread(void *data)
4349{
4350 struct encode_manager_s *manager = (struct encode_manager_s *)data;
4351 struct encode_queue_item_s *pitem = NULL;
4352 struct sched_param param = {.sched_priority = MAX_RT_PRIO - 1 };
4353 s32 ret = 0;
4354 enc_pr(LOG_DEBUG, "encode workqueue monitor start.\n");
4355 sched_setscheduler(current, SCHED_FIFO, &param);
4356 allow_signal(SIGTERM);
4357 /* setup current_wq here. */
4358 while (manager->process_queue_state != ENCODE_PROCESS_QUEUE_STOP) {
4359 if (kthread_should_stop())
4360 break;
4361
4362 ret = wait_for_completion_interruptible(
4363 &manager->event.request_in_com);
4364
4365 if (ret == -ERESTARTSYS)
4366 break;
4367
4368 if (kthread_should_stop())
4369 break;
4370 if (manager->inited == false) {
4371 spin_lock(&manager->event.sem_lock);
4372 if (!list_empty(&manager->wq)) {
4373 struct encode_wq_s *first_wq =
4374 list_entry(manager->wq.next,
4375 struct encode_wq_s, list);
4376 manager->current_wq = first_wq;
4377 spin_unlock(&manager->event.sem_lock);
4378 if (first_wq) {
4379#ifdef CONFIG_AM_GE2D
4380 if (!manager->context)
4381 manager->context =
4382 create_ge2d_work_queue();
4383#endif
4384 avc_init(first_wq);
4385 manager->inited = true;
4386 }
4387 spin_lock(&manager->event.sem_lock);
4388 manager->current_wq = NULL;
4389 spin_unlock(&manager->event.sem_lock);
4390 if (manager->remove_flag) {
4391 complete(
4392 &manager
4393 ->event.process_complete);
4394 manager->remove_flag = false;
4395 }
4396 } else
4397 spin_unlock(&manager->event.sem_lock);
4398 continue;
4399 }
4400
4401 spin_lock(&manager->event.sem_lock);
4402 pitem = NULL;
4403 if (list_empty(&manager->wq)) {
4404 spin_unlock(&manager->event.sem_lock);
4405 manager->inited = false;
4406 amvenc_avc_stop();
4407#ifdef CONFIG_AM_GE2D
4408 if (manager->context) {
4409 destroy_ge2d_work_queue(manager->context);
4410 manager->context = NULL;
4411 }
4412#endif
4413 enc_pr(LOG_DEBUG, "power off encode.\n");
4414 continue;
4415 } else if (!list_empty(&manager->process_queue)) {
4416 pitem = list_entry(manager->process_queue.next,
4417 struct encode_queue_item_s, list);
4418 list_del(&pitem->list);
4419 manager->current_item = pitem;
4420 manager->current_wq = pitem->request.parent;
4421 }
4422 spin_unlock(&manager->event.sem_lock);
4423
4424 if (pitem) {
4425 encode_process_request(manager, pitem);
4426 spin_lock(&manager->event.sem_lock);
4427 list_add_tail(&pitem->list, &manager->free_queue);
4428 manager->current_item = NULL;
4429 manager->last_wq = manager->current_wq;
4430 manager->current_wq = NULL;
4431 spin_unlock(&manager->event.sem_lock);
4432 }
4433 if (manager->remove_flag) {
4434 complete(&manager->event.process_complete);
4435 manager->remove_flag = false;
4436 }
4437 }
4438 while (!kthread_should_stop())
4439 msleep(20);
4440
4441 enc_pr(LOG_DEBUG, "exit encode_monitor_thread.\n");
4442 return 0;
4443}
4444
4445static s32 encode_start_monitor(void)
4446{
4447 s32 ret = 0;
4448
4449 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXBB)
4450 clock_level = 5;
4451 else if (get_cpu_type() >= MESON_CPU_MAJOR_ID_M8M2)
4452 clock_level = 3;
4453 else
4454 clock_level = 1;
4455
4456 if (get_cpu_type() >= MESON_CPU_MAJOR_ID_GXTVBB) {
4457 y_tnr_mot2alp_nrm_gain = 216;
4458 y_tnr_mot2alp_dis_gain = 144;
4459 c_tnr_mot2alp_nrm_gain = 216;
4460 c_tnr_mot2alp_dis_gain = 144;
4461 } else {
4462 /* more tnr */
4463 y_tnr_mot2alp_nrm_gain = 144;
4464 y_tnr_mot2alp_dis_gain = 96;
4465 c_tnr_mot2alp_nrm_gain = 144;
4466 c_tnr_mot2alp_dis_gain = 96;
4467 }
4468
4469 enc_pr(LOG_DEBUG, "encode start monitor.\n");
4470 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_START;
4471 encode_manager.encode_thread = kthread_run(encode_monitor_thread,
4472 &encode_manager, "encode_monitor");
4473 if (IS_ERR(encode_manager.encode_thread)) {
4474 ret = PTR_ERR(encode_manager.encode_thread);
4475 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_STOP;
4476 enc_pr(LOG_ERROR,
4477 "encode monitor : failed to start kthread (%d)\n", ret);
4478 }
4479 return ret;
4480}
4481
4482static s32 encode_stop_monitor(void)
4483{
4484 enc_pr(LOG_DEBUG, "stop encode monitor thread\n");
4485 if (encode_manager.encode_thread) {
4486 spin_lock(&encode_manager.event.sem_lock);
4487 if (!list_empty(&encode_manager.wq)) {
4488 u32 count = encode_manager.wq_count;
4489 spin_unlock(&encode_manager.event.sem_lock);
4490 enc_pr(LOG_ERROR,
4491 "stop encode monitor thread error, active wq (%d) is not 0.\n",
4492 count);
4493 return -1;
4494 }
4495 spin_unlock(&encode_manager.event.sem_lock);
4496 encode_manager.process_queue_state = ENCODE_PROCESS_QUEUE_STOP;
4497 send_sig(SIGTERM, encode_manager.encode_thread, 1);
4498 complete(&encode_manager.event.request_in_com);
4499 kthread_stop(encode_manager.encode_thread);
4500 encode_manager.encode_thread = NULL;
4501 kfree(mc_addr);
4502 mc_addr = NULL;
4503 }
4504 return 0;
4505}
4506
4507static s32 encode_wq_init(void)
4508{
4509 u32 i = 0;
4510 struct encode_queue_item_s *pitem = NULL;
4511
4512 enc_pr(LOG_DEBUG, "encode_wq_init.\n");
4513 encode_manager.irq_requested = false;
4514
4515 spin_lock_init(&encode_manager.event.sem_lock);
4516 init_completion(&encode_manager.event.request_in_com);
4517 init_waitqueue_head(&encode_manager.event.hw_complete);
4518 init_completion(&encode_manager.event.process_complete);
4519 INIT_LIST_HEAD(&encode_manager.process_queue);
4520 INIT_LIST_HEAD(&encode_manager.free_queue);
4521 INIT_LIST_HEAD(&encode_manager.wq);
4522
4523 tasklet_init(&encode_manager.encode_tasklet,
4524 encode_isr_tasklet,
4525 (ulong)&encode_manager);
4526
4527 for (i = 0; i < MAX_ENCODE_REQUEST; i++) {
4528 pitem = kcalloc(1,
4529 sizeof(struct encode_queue_item_s),
4530 GFP_KERNEL);
4531 if (IS_ERR(pitem)) {
4532 enc_pr(LOG_ERROR, "can't request queue item memory.\n");
4533 return -1;
4534 }
4535 pitem->request.parent = NULL;
4536 list_add_tail(&pitem->list, &encode_manager.free_queue);
4537 }
4538 encode_manager.current_wq = NULL;
4539 encode_manager.last_wq = NULL;
4540 encode_manager.encode_thread = NULL;
4541 encode_manager.current_item = NULL;
4542 encode_manager.wq_count = 0;
4543 encode_manager.remove_flag = false;
4544 InitEncodeWeight();
4545 if (encode_start_monitor()) {
4546 enc_pr(LOG_ERROR, "encode create thread error.\n");
4547 return -1;
4548 }
4549 return 0;
4550}
4551
4552static s32 encode_wq_uninit(void)
4553{
4554 struct encode_queue_item_s *pitem, *tmp;
4555 struct list_head *head;
4556 u32 count = 0;
4557 s32 r = -1;
4558 enc_pr(LOG_DEBUG, "uninit encode wq.\n");
4559 if (encode_stop_monitor() == 0) {
4560 if ((encode_manager.irq_num >= 0) &&
4561 (encode_manager.irq_requested == true)) {
4562 free_irq(encode_manager.irq_num, &encode_manager);
4563 encode_manager.irq_requested = false;
4564 }
4565 spin_lock(&encode_manager.event.sem_lock);
4566 head = &encode_manager.process_queue;
4567 list_for_each_entry_safe(pitem, tmp, head, list) {
4568 if (pitem) {
4569 list_del(&pitem->list);
4570 kfree(pitem);
4571 count++;
4572 }
4573 }
4574 head = &encode_manager.free_queue;
4575 list_for_each_entry_safe(pitem, tmp, head, list) {
4576 if (pitem) {
4577 list_del(&pitem->list);
4578 kfree(pitem);
4579 count++;
4580 }
4581 }
4582 spin_unlock(&encode_manager.event.sem_lock);
4583 if (count == MAX_ENCODE_REQUEST)
4584 r = 0;
4585 else {
4586 enc_pr(LOG_ERROR, "lost some request item %d.\n",
4587 MAX_ENCODE_REQUEST - count);
4588 }
4589 }
4590 return r;
4591}
4592
4593static ssize_t encode_status_show(struct class *cla,
4594 struct class_attribute *attr, char *buf)
4595{
4596 u32 process_count = 0;
4597 u32 free_count = 0;
4598 struct encode_queue_item_s *pitem = NULL;
4599 struct encode_wq_s *current_wq = NULL;
4600 struct encode_wq_s *last_wq = NULL;
4601 struct list_head *head = NULL;
4602 s32 irq_num = 0;
4603 u32 hw_status = 0;
4604 u32 process_queue_state = 0;
4605 u32 wq_count = 0;
4606 u32 ucode_index;
4607 bool need_reset;
4608 bool process_irq;
4609 bool inited;
4610 bool use_reserve;
4611 struct Buff_s reserve_mem;
4612 u32 max_instance;
4613#ifdef CONFIG_CMA
4614 bool check_cma = false;
4615#endif
4616
4617 spin_lock(&encode_manager.event.sem_lock);
4618 head = &encode_manager.free_queue;
4619 list_for_each_entry(pitem, head , list) {
4620 free_count++;
4621 if (free_count > MAX_ENCODE_REQUEST)
4622 break;
4623 }
4624
4625 head = &encode_manager.process_queue;
4626 list_for_each_entry(pitem, head , list) {
4627 process_count++;
4628 if (free_count > MAX_ENCODE_REQUEST)
4629 break;
4630 }
4631
4632 current_wq = encode_manager.current_wq;
4633 last_wq = encode_manager.last_wq;
4634 pitem = encode_manager.current_item;
4635 irq_num = encode_manager.irq_num;
4636 hw_status = encode_manager.encode_hw_status;
4637 process_queue_state = encode_manager.process_queue_state;
4638 wq_count = encode_manager.wq_count;
4639 ucode_index = encode_manager.ucode_index;
4640 need_reset = encode_manager.need_reset;
4641 process_irq = encode_manager.process_irq;
4642 inited = encode_manager.inited;
4643 use_reserve = encode_manager.use_reserve;
4644 reserve_mem.buf_start = encode_manager.reserve_mem.buf_start;
4645 reserve_mem.buf_size = encode_manager.reserve_mem.buf_size;
4646
4647 max_instance = encode_manager.max_instance;
4648#ifdef CONFIG_CMA
4649 check_cma = encode_manager.check_cma;
4650#endif
4651
4652 spin_unlock(&encode_manager.event.sem_lock);
4653
4654 enc_pr(LOG_DEBUG,
4655 "encode process queue count: %d, free queue count: %d.\n",
4656 process_count, free_count);
4657 enc_pr(LOG_DEBUG,
4658 "encode curent wq: %p, last wq: %p, wq count: %d, max_instance: %d.\n",
4659 current_wq, last_wq, wq_count, max_instance);
4660 if (current_wq)
4661 enc_pr(LOG_DEBUG,
4662 "encode curent wq -- encode width: %d, encode height: %d.\n",
4663 current_wq->pic.encoder_width,
4664 current_wq->pic.encoder_height);
4665 enc_pr(LOG_DEBUG,
4666 "encode curent pitem: %p, ucode_index: %d, hw_status: %d, need_reset: %s, process_irq: %s.\n",
4667 pitem, ucode_index, hw_status, need_reset ? "true" : "false",
4668 process_irq ? "true" : "false");
4669 enc_pr(LOG_DEBUG,
4670 "encode irq num: %d, inited: %s, process_queue_state: %d.\n",
4671 irq_num, inited ? "true" : "false", process_queue_state);
4672 if (use_reserve) {
4673 enc_pr(LOG_DEBUG,
4674 "encode use reserve memory, buffer start: 0x%x, size: %d MB.\n",
4675 reserve_mem.buf_start,
4676 reserve_mem.buf_size / SZ_1M);
4677 } else {
4678#ifdef CONFIG_CMA
4679 enc_pr(LOG_DEBUG, "encode check cma: %s.\n",
4680 check_cma ? "true" : "false");
4681#endif
4682 }
4683 return snprintf(buf, 40, "encode max instance: %d\n", max_instance);
4684}
4685
4686static struct class_attribute amvenc_class_attrs[] = {
4687 __ATTR(encode_status,
4688 S_IRUGO | S_IWUSR,
4689 encode_status_show,
4690 NULL),
4691 __ATTR_NULL
4692};
4693
4694static struct class amvenc_avc_class = {
4695 .name = CLASS_NAME,
4696 .class_attrs = amvenc_class_attrs,
4697};
4698
4699s32 init_avc_device(void)
4700{
4701 s32 r = 0;
4702 r = register_chrdev(0, DEVICE_NAME, &amvenc_avc_fops);
4703 if (r <= 0) {
4704 enc_pr(LOG_ERROR, "register amvenc_avc device error.\n");
4705 return r;
4706 }
4707 avc_device_major = r;
4708
4709 r = class_register(&amvenc_avc_class);
4710 if (r < 0) {
4711 enc_pr(LOG_ERROR, "error create amvenc_avc class.\n");
4712 return r;
4713 }
4714
4715 amvenc_avc_dev = device_create(&amvenc_avc_class, NULL,
4716 MKDEV(avc_device_major, 0), NULL,
4717 DEVICE_NAME);
4718
4719 if (IS_ERR(amvenc_avc_dev)) {
4720 enc_pr(LOG_ERROR, "create amvenc_avc device error.\n");
4721 class_unregister(&amvenc_avc_class);
4722 return -1;
4723 }
4724 return r;
4725}
4726
4727s32 uninit_avc_device(void)
4728{
4729 if (amvenc_avc_dev)
4730 device_destroy(&amvenc_avc_class, MKDEV(avc_device_major, 0));
4731
4732 class_destroy(&amvenc_avc_class);
4733
4734 unregister_chrdev(avc_device_major, DEVICE_NAME);
4735 return 0;
4736}
4737
4738static s32 avc_mem_device_init(struct reserved_mem *rmem, struct device *dev)
4739{
4740 s32 r;
4741 struct resource res;
4742 if (!rmem) {
4743 enc_pr(LOG_ERROR,
4744 "Can not obtain I/O memory, and will allocate avc buffer!\n");
4745 r = -EFAULT;
4746 return r;
4747 }
4748 res.start = (phys_addr_t)rmem->base;
4749 res.end = res.start + (phys_addr_t)rmem->size - 1;
4750 encode_manager.reserve_mem.buf_start = res.start;
4751 encode_manager.reserve_mem.buf_size = res.end - res.start + 1;
4752
4753 if (encode_manager.reserve_mem.buf_size >=
4754 amvenc_buffspec[AMVENC_BUFFER_LEVEL_1080P].min_buffsize) {
4755 encode_manager.max_instance =
4756 encode_manager.reserve_mem.buf_size /
4757 amvenc_buffspec[AMVENC_BUFFER_LEVEL_1080P].min_buffsize;
4758 if (encode_manager.max_instance > MAX_ENCODE_INSTANCE)
4759 encode_manager.max_instance = MAX_ENCODE_INSTANCE;
4760 encode_manager.reserve_buff = kzalloc(
4761 encode_manager.max_instance *
4762 sizeof(struct Buff_s), GFP_KERNEL);
4763 if (encode_manager.reserve_buff) {
4764 u32 i;
4765 struct Buff_s *reserve_buff;
4766 u32 max_instance = encode_manager.max_instance;
4767 for (i = 0; i < max_instance; i++) {
4768 reserve_buff = &encode_manager.reserve_buff[i];
4769 reserve_buff->buf_start =
4770 i *
4771 amvenc_buffspec
4772 [AMVENC_BUFFER_LEVEL_1080P]
4773 .min_buffsize +
4774 encode_manager.reserve_mem.buf_start;
4775 reserve_buff->buf_size =
4776 encode_manager.reserve_mem.buf_start;
4777 reserve_buff->used = false;
4778 }
4779 encode_manager.use_reserve = true;
4780 r = 0;
4781 enc_pr(LOG_DEBUG,
4782 "amvenc_avc use reserve memory, buff start: 0x%x, size: 0x%x, max instance is %d\n",
4783 encode_manager.reserve_mem.buf_start,
4784 encode_manager.reserve_mem.buf_size,
4785 encode_manager.max_instance);
4786 } else {
4787 enc_pr(LOG_ERROR,
4788 "amvenc_avc alloc reserve buffer pointer fail. max instance is %d.\n",
4789 encode_manager.max_instance);
4790 encode_manager.max_instance = 0;
4791 encode_manager.reserve_mem.buf_start = 0;
4792 encode_manager.reserve_mem.buf_size = 0;
4793 r = -ENOMEM;
4794 }
4795 } else {
4796 enc_pr(LOG_ERROR,
4797 "amvenc_avc memory resource too small, size is 0x%x. Need 0x%x bytes at least.\n",
4798 encode_manager.reserve_mem.buf_size,
4799 amvenc_buffspec[AMVENC_BUFFER_LEVEL_1080P]
4800 .min_buffsize);
4801 encode_manager.reserve_mem.buf_start = 0;
4802 encode_manager.reserve_mem.buf_size = 0;
4803 r = -ENOMEM;
4804 }
4805 return r;
4806}
4807
4808static s32 amvenc_avc_probe(struct platform_device *pdev)
4809{
4810 /* struct resource mem; */
4811 s32 res_irq;
4812 s32 idx;
4813 s32 r;
4814
4815 enc_pr(LOG_INFO, "amvenc_avc probe start.\n");
4816
4817 encode_manager.this_pdev = pdev;
4818#ifdef CONFIG_CMA
4819 encode_manager.check_cma = false;
4820#endif
4821 encode_manager.reserve_mem.buf_start = 0;
4822 encode_manager.reserve_mem.buf_size = 0;
4823 encode_manager.use_reserve = false;
4824 encode_manager.max_instance = 0;
4825 encode_manager.reserve_buff = NULL;
4826
4827 idx = of_reserved_mem_device_init(&pdev->dev);
4828 if (idx != 0) {
4829 enc_pr(LOG_DEBUG,
4830 "amvenc_avc_probe -- reserved memory config fail.\n");
4831 }
4832
4833 if (encode_manager.use_reserve == false) {
4834#ifndef CONFIG_CMA
4835 enc_pr(LOG_ERROR,
4836 "amvenc_avc memory is invaild, probe fail!\n");
4837 return -EFAULT;
4838#else
4839 encode_manager.cma_pool_size =
4840 (codec_mm_get_total_size() > (40 * SZ_1M)) ?
4841 (40 * SZ_1M) : codec_mm_get_total_size();
4842 enc_pr(LOG_DEBUG,
4843 "amvenc_avc - cma memory pool size: %d MB\n",
4844 (u32)encode_manager.cma_pool_size / SZ_1M);
4845#endif
4846 }
4847
4848 res_irq = platform_get_irq(pdev, 0);
4849 if (res_irq < 0) {
4850 enc_pr(LOG_ERROR, "[%s] get irq error!", __func__);
4851 return -EINVAL;
4852 }
4853
4854 encode_manager.irq_num = res_irq;
4855 if (encode_wq_init()) {
4856 kfree(encode_manager.reserve_buff);
4857 encode_manager.reserve_buff = NULL;
4858 enc_pr(LOG_ERROR, "encode work queue init error.\n");
4859 return -EFAULT;
4860 }
4861
4862 r = init_avc_device();
4863 enc_pr(LOG_INFO, "amvenc_avc probe end.\n");
4864
4865 avc_enc_class = class_create(THIS_MODULE, "avc_enc_debug");
4866 if (IS_ERR(avc_enc_class))
4867 return PTR_ERR(avc_enc_class);
4868 r = class_create_file(avc_enc_class, &class_attr_encode_tbl_debug);
4869
4870 return r;
4871}
4872
4873static s32 amvenc_avc_remove(struct platform_device *pdev)
4874{
4875 kfree(encode_manager.reserve_buff);
4876 encode_manager.reserve_buff = NULL;
4877 if (encode_wq_uninit()) {
4878 enc_pr(LOG_ERROR, "encode work queue uninit error.\n");
4879 }
4880 uninit_avc_device();
4881 enc_pr(LOG_INFO, "amvenc_avc remove.\n");
4882
4883 class_remove_file(avc_enc_class, &class_attr_encode_tbl_debug);
4884 class_destroy(avc_enc_class);
4885
4886 return 0;
4887}
4888
4889static const struct of_device_id amlogic_avcenc_dt_match[] = {
4890 {
4891 .compatible = "amlogic, amvenc_avc",
4892 },
4893 {},
4894};
4895
4896static struct platform_driver amvenc_avc_driver = {
4897 .probe = amvenc_avc_probe,
4898 .remove = amvenc_avc_remove,
4899 .driver = {
4900 .name = DRIVER_NAME,
4901 .of_match_table = amlogic_avcenc_dt_match,
4902 }
4903};
4904
4905static struct codec_profile_t amvenc_avc_profile = {
4906 .name = "avc",
4907 .profile = ""
4908};
4909
4910static s32 __init amvenc_avc_driver_init_module(void)
4911{
4912 enc_pr(LOG_INFO, "amvenc_avc module init\n");
4913
4914 if (platform_driver_register(&amvenc_avc_driver)) {
4915 enc_pr(LOG_ERROR,
4916 "failed to register amvenc_avc driver\n");
4917 return -ENODEV;
4918 }
4919 vcodec_profile_register(&amvenc_avc_profile);
4920 return 0;
4921}
4922
4923static void __exit amvenc_avc_driver_remove_module(void)
4924{
4925 enc_pr(LOG_INFO, "amvenc_avc module remove.\n");
4926
4927 platform_driver_unregister(&amvenc_avc_driver);
4928}
4929
4930static const struct reserved_mem_ops rmem_avc_ops = {
4931 .device_init = avc_mem_device_init,
4932};
4933
4934static s32 __init avc_mem_setup(struct reserved_mem *rmem)
4935{
4936 rmem->ops = &rmem_avc_ops;
4937 enc_pr(LOG_DEBUG, "amvenc_avc reserved mem setup.\n");
4938 return 0;
4939}
4940
4941module_param(me_mv_merge_ctl, uint, 0664);
4942MODULE_PARM_DESC(me_mv_merge_ctl, "\n me_mv_merge_ctl\n");
4943
4944module_param(me_step0_close_mv, uint, 0664);
4945MODULE_PARM_DESC(me_step0_close_mv, "\n me_step0_close_mv\n");
4946
4947module_param(me_f_skip_sad, uint, 0664);
4948MODULE_PARM_DESC(me_f_skip_sad, "\n me_f_skip_sad\n");
4949
4950module_param(me_f_skip_weight, uint, 0664);
4951MODULE_PARM_DESC(me_f_skip_weight, "\n me_f_skip_weight\n");
4952
4953module_param(me_mv_weight_01, uint, 0664);
4954MODULE_PARM_DESC(me_mv_weight_01, "\n me_mv_weight_01\n");
4955
4956module_param(me_mv_weight_23, uint, 0664);
4957MODULE_PARM_DESC(me_mv_weight_23, "\n me_mv_weight_23\n");
4958
4959module_param(me_sad_range_inc, uint, 0664);
4960MODULE_PARM_DESC(me_sad_range_inc, "\n me_sad_range_inc\n");
4961
4962module_param(me_sad_enough_01, uint, 0664);
4963MODULE_PARM_DESC(me_sad_enough_01, "\n me_sad_enough_01\n");
4964
4965module_param(me_sad_enough_23, uint, 0664);
4966MODULE_PARM_DESC(me_sad_enough_23, "\n me_sad_enough_23\n");
4967
4968module_param(fixed_slice_cfg, uint, 0664);
4969MODULE_PARM_DESC(fixed_slice_cfg, "\n fixed_slice_cfg\n");
4970
4971module_param(enable_dblk, uint, 0664);
4972MODULE_PARM_DESC(enable_dblk, "\n enable_dblk\n");
4973
4974module_param(clock_level, uint, 0664);
4975MODULE_PARM_DESC(clock_level, "\n clock_level\n");
4976
4977module_param(encode_print_level, uint, 0664);
4978MODULE_PARM_DESC(encode_print_level, "\n encode_print_level\n");
4979
4980module_param(no_timeout, uint, 0664);
4981MODULE_PARM_DESC(no_timeout, "\n no_timeout flag for process request\n");
4982
4983module_param(nr_mode, int, 0664);
4984MODULE_PARM_DESC(nr_mode, "\n nr_mode option\n");
4985
4986module_param(y_tnr_mc_en, uint, 0664);
4987MODULE_PARM_DESC(y_tnr_mc_en, "\n y_tnr_mc_en option\n");
4988module_param(y_tnr_txt_mode, uint, 0664);
4989MODULE_PARM_DESC(y_tnr_txt_mode, "\n y_tnr_txt_mode option\n");
4990module_param(y_tnr_mot_sad_margin, uint, 0664);
4991MODULE_PARM_DESC(y_tnr_mot_sad_margin, "\n y_tnr_mot_sad_margin option\n");
4992module_param(y_tnr_mot_cortxt_rate, uint, 0664);
4993MODULE_PARM_DESC(y_tnr_mot_cortxt_rate, "\n y_tnr_mot_cortxt_rate option\n");
4994module_param(y_tnr_mot_distxt_ofst, uint, 0664);
4995MODULE_PARM_DESC(y_tnr_mot_distxt_ofst, "\n y_tnr_mot_distxt_ofst option\n");
4996module_param(y_tnr_mot_distxt_rate, uint, 0664);
4997MODULE_PARM_DESC(y_tnr_mot_distxt_rate, "\n y_tnr_mot_distxt_rate option\n");
4998module_param(y_tnr_mot_dismot_ofst, uint, 0664);
4999MODULE_PARM_DESC(y_tnr_mot_dismot_ofst, "\n y_tnr_mot_dismot_ofst option\n");
5000module_param(y_tnr_mot_frcsad_lock, uint, 0664);
5001MODULE_PARM_DESC(y_tnr_mot_frcsad_lock, "\n y_tnr_mot_frcsad_lock option\n");
5002module_param(y_tnr_mot2alp_frc_gain, uint, 0664);
5003MODULE_PARM_DESC(y_tnr_mot2alp_frc_gain, "\n y_tnr_mot2alp_frc_gain option\n");
5004module_param(y_tnr_mot2alp_nrm_gain, uint, 0664);
5005MODULE_PARM_DESC(y_tnr_mot2alp_nrm_gain, "\n y_tnr_mot2alp_nrm_gain option\n");
5006module_param(y_tnr_mot2alp_dis_gain, uint, 0664);
5007MODULE_PARM_DESC(y_tnr_mot2alp_dis_gain, "\n y_tnr_mot2alp_dis_gain option\n");
5008module_param(y_tnr_mot2alp_dis_ofst, uint, 0664);
5009MODULE_PARM_DESC(y_tnr_mot2alp_dis_ofst, "\n y_tnr_mot2alp_dis_ofst option\n");
5010module_param(y_tnr_alpha_min, uint, 0664);
5011MODULE_PARM_DESC(y_tnr_alpha_min, "\n y_tnr_alpha_min option\n");
5012module_param(y_tnr_alpha_max, uint, 0664);
5013MODULE_PARM_DESC(y_tnr_alpha_max, "\n y_tnr_alpha_max option\n");
5014module_param(y_tnr_deghost_os, uint, 0664);
5015MODULE_PARM_DESC(y_tnr_deghost_os, "\n y_tnr_deghost_os option\n");
5016
5017module_param(c_tnr_mc_en, uint, 0664);
5018MODULE_PARM_DESC(c_tnr_mc_en, "\n c_tnr_mc_en option\n");
5019module_param(c_tnr_txt_mode, uint, 0664);
5020MODULE_PARM_DESC(c_tnr_txt_mode, "\n c_tnr_txt_mode option\n");
5021module_param(c_tnr_mot_sad_margin, uint, 0664);
5022MODULE_PARM_DESC(c_tnr_mot_sad_margin, "\n c_tnr_mot_sad_margin option\n");
5023module_param(c_tnr_mot_cortxt_rate, uint, 0664);
5024MODULE_PARM_DESC(c_tnr_mot_cortxt_rate, "\n c_tnr_mot_cortxt_rate option\n");
5025module_param(c_tnr_mot_distxt_ofst, uint, 0664);
5026MODULE_PARM_DESC(c_tnr_mot_distxt_ofst, "\n c_tnr_mot_distxt_ofst option\n");
5027module_param(c_tnr_mot_distxt_rate, uint, 0664);
5028MODULE_PARM_DESC(c_tnr_mot_distxt_rate, "\n c_tnr_mot_distxt_rate option\n");
5029module_param(c_tnr_mot_dismot_ofst, uint, 0664);
5030MODULE_PARM_DESC(c_tnr_mot_dismot_ofst, "\n c_tnr_mot_dismot_ofst option\n");
5031module_param(c_tnr_mot_frcsad_lock, uint, 0664);
5032MODULE_PARM_DESC(c_tnr_mot_frcsad_lock, "\n c_tnr_mot_frcsad_lock option\n");
5033module_param(c_tnr_mot2alp_frc_gain, uint, 0664);
5034MODULE_PARM_DESC(c_tnr_mot2alp_frc_gain, "\n c_tnr_mot2alp_frc_gain option\n");
5035module_param(c_tnr_mot2alp_nrm_gain, uint, 0664);
5036MODULE_PARM_DESC(c_tnr_mot2alp_nrm_gain, "\n c_tnr_mot2alp_nrm_gain option\n");
5037module_param(c_tnr_mot2alp_dis_gain, uint, 0664);
5038MODULE_PARM_DESC(c_tnr_mot2alp_dis_gain, "\n c_tnr_mot2alp_dis_gain option\n");
5039module_param(c_tnr_mot2alp_dis_ofst, uint, 0664);
5040MODULE_PARM_DESC(c_tnr_mot2alp_dis_ofst, "\n c_tnr_mot2alp_dis_ofst option\n");
5041module_param(c_tnr_alpha_min, uint, 0664);
5042MODULE_PARM_DESC(c_tnr_alpha_min, "\n c_tnr_alpha_min option\n");
5043module_param(c_tnr_alpha_max, uint, 0664);
5044MODULE_PARM_DESC(c_tnr_alpha_max, "\n c_tnr_alpha_max option\n");
5045module_param(c_tnr_deghost_os, uint, 0664);
5046MODULE_PARM_DESC(c_tnr_deghost_os, "\n c_tnr_deghost_os option\n");
5047
5048module_param(y_snr_err_norm, uint, 0664);
5049MODULE_PARM_DESC(y_snr_err_norm, "\n y_snr_err_norm option\n");
5050module_param(y_snr_gau_bld_core, uint, 0664);
5051MODULE_PARM_DESC(y_snr_gau_bld_core, "\n y_snr_gau_bld_core option\n");
5052module_param(y_snr_gau_bld_ofst, int, 0664);
5053MODULE_PARM_DESC(y_snr_gau_bld_ofst, "\n y_snr_gau_bld_ofst option\n");
5054module_param(y_snr_gau_bld_rate, uint, 0664);
5055MODULE_PARM_DESC(y_snr_gau_bld_rate, "\n y_snr_gau_bld_rate option\n");
5056module_param(y_snr_gau_alp0_min, uint, 0664);
5057MODULE_PARM_DESC(y_snr_gau_alp0_min, "\n y_snr_gau_alp0_min option\n");
5058module_param(y_snr_gau_alp0_max, uint, 0664);
5059MODULE_PARM_DESC(y_snr_gau_alp0_max, "\n y_snr_gau_alp0_max option\n");
5060module_param(y_bld_beta2alp_rate, uint, 0664);
5061MODULE_PARM_DESC(y_bld_beta2alp_rate, "\n y_bld_beta2alp_rate option\n");
5062module_param(y_bld_beta_min, uint, 0664);
5063MODULE_PARM_DESC(y_bld_beta_min, "\n y_bld_beta_min option\n");
5064module_param(y_bld_beta_max, uint, 0664);
5065MODULE_PARM_DESC(y_bld_beta_max, "\n y_bld_beta_max option\n");
5066
5067module_param(c_snr_err_norm, uint, 0664);
5068MODULE_PARM_DESC(c_snr_err_norm, "\n c_snr_err_norm option\n");
5069module_param(c_snr_gau_bld_core, uint, 0664);
5070MODULE_PARM_DESC(c_snr_gau_bld_core, "\n c_snr_gau_bld_core option\n");
5071module_param(c_snr_gau_bld_ofst, int, 0664);
5072MODULE_PARM_DESC(c_snr_gau_bld_ofst, "\n c_snr_gau_bld_ofst option\n");
5073module_param(c_snr_gau_bld_rate, uint, 0664);
5074MODULE_PARM_DESC(c_snr_gau_bld_rate, "\n c_snr_gau_bld_rate option\n");
5075module_param(c_snr_gau_alp0_min, uint, 0664);
5076MODULE_PARM_DESC(c_snr_gau_alp0_min, "\n c_snr_gau_alp0_min option\n");
5077module_param(c_snr_gau_alp0_max, uint, 0664);
5078MODULE_PARM_DESC(c_snr_gau_alp0_max, "\n c_snr_gau_alp0_max option\n");
5079module_param(c_bld_beta2alp_rate, uint, 0664);
5080MODULE_PARM_DESC(c_bld_beta2alp_rate, "\n c_bld_beta2alp_rate option\n");
5081module_param(c_bld_beta_min, uint, 0664);
5082MODULE_PARM_DESC(c_bld_beta_min, "\n c_bld_beta_min option\n");
5083module_param(c_bld_beta_max, uint, 0664);
5084MODULE_PARM_DESC(c_bld_beta_max, "\n c_bld_beta_max option\n");
5085
5086module_init(amvenc_avc_driver_init_module);
5087module_exit(amvenc_avc_driver_remove_module);
5088RESERVEDMEM_OF_DECLARE(amvenc_avc, "amlogic, amvenc-memory", avc_mem_setup);
5089
5090MODULE_DESCRIPTION("AMLOGIC AVC Video Encoder Driver");
5091MODULE_LICENSE("GPL");
5092MODULE_AUTHOR("simon.zheng <simon.zheng@amlogic.com>");
5093