blob: 2d82dfbf6976d032e4338aaff2647cfa91d945a9
1 | #include <linux/kernel.h> |
2 | #include <linux/types.h> |
3 | #include <linux/vmalloc.h> |
4 | #include <linux/mm.h> |
5 | #include <linux/string.h> |
6 | |
7 | #include "aml_vp9_parser.h" |
8 | #include "../utils/get_bits.h" |
9 | #include "../utils/put_bits.h" |
10 | #include "../utils/golomb.h" |
11 | #include "../utils/common.h" |
12 | #include "utils.h" |
13 | |
14 | #define VP9_SYNCCODE 0x498342 |
15 | |
16 | static int read_colorspace_details(struct VP9Context *s, int profile) |
17 | { |
18 | static const enum AVColorSpace colorspaces[8] = { |
19 | AVCOL_SPC_UNSPECIFIED, AVCOL_SPC_BT470BG, AVCOL_SPC_BT709, AVCOL_SPC_SMPTE170M, |
20 | AVCOL_SPC_SMPTE240M, AVCOL_SPC_BT2020_NCL, AVCOL_SPC_RESERVED, AVCOL_SPC_RGB, |
21 | }; |
22 | |
23 | enum AVColorSpace colorspace; |
24 | int color_range; |
25 | int bits = profile <= 1 ? 0 : 1 + get_bits1(&s->gb); // 0:8, 1:10, 2:12 |
26 | |
27 | s->bpp_index = bits; |
28 | s->s.h.bpp = 8 + bits * 2; |
29 | s->bytesperpixel = (7 + s->s.h.bpp) >> 3; |
30 | colorspace = colorspaces[get_bits(&s->gb, 3)]; |
31 | if (colorspace == AVCOL_SPC_RGB) { // RGB = profile 1 |
32 | if (profile & 1) { |
33 | if (get_bits1(&s->gb)) { |
34 | pr_err("Reserved bit set in RGB\n"); |
35 | return -1; |
36 | } |
37 | } else { |
38 | pr_err("RGB not supported in profile %d\n", profile); |
39 | return -1; |
40 | } |
41 | } else { |
42 | static const enum AVPixelFormat pix_fmt_for_ss[3][2 /* v */][2 /* h */] = { |
43 | { { AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P }, |
44 | { AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV420P } }, |
45 | { { AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10 }, |
46 | { AV_PIX_FMT_YUV440P10, AV_PIX_FMT_YUV420P10 } }, |
47 | { { AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12 }, |
48 | { AV_PIX_FMT_YUV440P12, AV_PIX_FMT_YUV420P12 } }}; |
49 | color_range = get_bits1(&s->gb) ? 2 : 1; |
50 | if (profile & 1) { |
51 | s->ss_h = get_bits1(&s->gb); |
52 | s->ss_v = get_bits1(&s->gb); |
53 | s->pix_fmt = pix_fmt_for_ss[bits][s->ss_v][s->ss_h]; |
54 | if (s->pix_fmt == AV_PIX_FMT_YUV420P) { |
55 | pr_err("YUV 4:2:0 not supported in profile %d\n", profile); |
56 | return -1; |
57 | } else if (get_bits1(&s->gb)) { |
58 | pr_err("Profile %d color details reserved bit set\n", profile); |
59 | return -1; |
60 | } |
61 | } else { |
62 | s->ss_h = s->ss_v = 1; |
63 | s->pix_fmt = pix_fmt_for_ss[bits][1][1]; |
64 | } |
65 | } |
66 | |
67 | return 0; |
68 | } |
69 | |
70 | int decode_frame_header(const u8 *data, int size, struct VP9Context *s, int *ref) |
71 | { |
72 | int ret, last_invisible, profile; |
73 | |
74 | /* general header */ |
75 | if ((ret = init_get_bits8(&s->gb, data, size)) < 0) { |
76 | pr_err("Failed to initialize bitstream reader\n"); |
77 | return ret; |
78 | } |
79 | |
80 | if (get_bits(&s->gb, 2) != 0x2) { // frame marker |
81 | pr_err("Invalid frame marker\n"); |
82 | return -1; |
83 | } |
84 | |
85 | profile = get_bits1(&s->gb); |
86 | profile |= get_bits1(&s->gb) << 1; |
87 | if (profile == 3) |
88 | profile += get_bits1(&s->gb); |
89 | |
90 | if (profile > 3) { |
91 | pr_err("Profile %d is not yet supported\n", profile); |
92 | return -1; |
93 | } |
94 | |
95 | s->s.h.profile = profile; |
96 | if (get_bits1(&s->gb)) { |
97 | *ref = get_bits(&s->gb, 3); |
98 | return 0; |
99 | } |
100 | |
101 | s->last_keyframe = s->s.h.keyframe; |
102 | s->s.h.keyframe = !get_bits1(&s->gb); |
103 | |
104 | last_invisible = s->s.h.invisible; |
105 | s->s.h.invisible = !get_bits1(&s->gb); |
106 | s->s.h.errorres = get_bits1(&s->gb); |
107 | s->s.h.use_last_frame_mvs = !s->s.h.errorres && !last_invisible; |
108 | |
109 | if (s->s.h.keyframe) { |
110 | if (get_bits_long(&s->gb, 24) != VP9_SYNCCODE) { // synccode |
111 | pr_err("Invalid sync code\n"); |
112 | return -1; |
113 | } |
114 | if ((ret = read_colorspace_details(s,profile)) < 0) |
115 | return ret; |
116 | // for profile 1, here follows the subsampling bits |
117 | s->s.h.refreshrefmask = 0xff; |
118 | s->width = get_bits(&s->gb, 16) + 1; |
119 | s->height = get_bits(&s->gb, 16) + 1; |
120 | if (get_bits1(&s->gb)) { // has scaling |
121 | s->render_width = get_bits(&s->gb, 16) + 1; |
122 | s->render_height = get_bits(&s->gb, 16) + 1; |
123 | } else { |
124 | s->render_width = s->width; |
125 | s->render_height = s->height; |
126 | } |
127 | /*pr_info("keyframe res: (%d x %d), render size: (%d x %d)\n", |
128 | s->width, s->height, s->render_width, s->render_height);*/ |
129 | } else { |
130 | s->s.h.intraonly = s->s.h.invisible ? get_bits1(&s->gb) : 0; |
131 | s->s.h.resetctx = s->s.h.errorres ? 0 : get_bits(&s->gb, 2); |
132 | if (s->s.h.intraonly) { |
133 | if (get_bits_long(&s->gb, 24) != VP9_SYNCCODE) { // synccode |
134 | pr_err("Invalid sync code\n"); |
135 | return -1; |
136 | } |
137 | if (profile >= 1) { |
138 | if ((ret = read_colorspace_details(s, profile)) < 0) |
139 | return ret; |
140 | } else { |
141 | s->ss_h = s->ss_v = 1; |
142 | s->s.h.bpp = 8; |
143 | s->bpp_index = 0; |
144 | s->bytesperpixel = 1; |
145 | s->pix_fmt = AV_PIX_FMT_YUV420P; |
146 | } |
147 | s->s.h.refreshrefmask = get_bits(&s->gb, 8); |
148 | s->width = get_bits(&s->gb, 16) + 1; |
149 | s->height = get_bits(&s->gb, 16) + 1; |
150 | if (get_bits1(&s->gb)) { // has scaling |
151 | s->render_width = get_bits(&s->gb, 16) + 1; |
152 | s->render_height = get_bits(&s->gb, 16) + 1; |
153 | } else { |
154 | s->render_width = s->width; |
155 | s->render_height = s->height; |
156 | } |
157 | pr_info("intra res: (%d x %d), render size: (%d x %d)\n", |
158 | s->width, s->height, s->render_width, s->render_height); |
159 | } else { |
160 | s->s.h.refreshrefmask = get_bits(&s->gb, 8); |
161 | s->s.h.refidx[0] = get_bits(&s->gb, 3); |
162 | s->s.h.signbias[0] = get_bits1(&s->gb) && !s->s.h.errorres; |
163 | s->s.h.refidx[1] = get_bits(&s->gb, 3); |
164 | s->s.h.signbias[1] = get_bits1(&s->gb) && !s->s.h.errorres; |
165 | s->s.h.refidx[2] = get_bits(&s->gb, 3); |
166 | s->s.h.signbias[2] = get_bits1(&s->gb) && !s->s.h.errorres; |
167 | |
168 | /*refresh_frame_flags; |
169 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
170 | frame_refs[i]; |
171 | ref_frame_sign_biases[i]; |
172 | } |
173 | frame_size_from_refs(); |
174 | high_precision_mv; |
175 | interp_filter();*/ |
176 | |
177 | return -1; |
178 | } |
179 | } |
180 | |
181 | return 0; |
182 | } |
183 | |
184 | int vp9_superframe_split_filter(struct vp9_superframe_split *s) |
185 | { |
186 | int i, j, ret, marker; |
187 | bool is_superframe = false; |
188 | int *prefix = (int *)s->data; |
189 | |
190 | if (!s->data) |
191 | return -1; |
192 | |
193 | #define AML_PREFIX ('V' << 24 | 'L' << 16 | 'M' << 8 | 'A') |
194 | if (prefix[3] == AML_PREFIX) { |
195 | s->prefix_size = 16; |
196 | /*pr_info("the frame data has beed added header\n");*/ |
197 | } |
198 | |
199 | marker = s->data[s->data_size - 1]; |
200 | if ((marker & 0xe0) == 0xc0) { |
201 | int length_size = 1 + ((marker >> 3) & 0x3); |
202 | int nb_frames = 1 + (marker & 0x7); |
203 | int idx_size = 2 + nb_frames * length_size; |
204 | |
205 | if (s->data_size >= idx_size && |
206 | s->data[s->data_size - idx_size] == marker) { |
207 | s64 total_size = 0; |
208 | int idx = s->data_size + 1 - idx_size; |
209 | |
210 | for (i = 0; i < nb_frames; i++) { |
211 | int frame_size = 0; |
212 | for (j = 0; j < length_size; j++) |
213 | frame_size |= s->data[idx++] << (j * 8); |
214 | |
215 | total_size += frame_size; |
216 | if (frame_size < 0 || |
217 | total_size > s->data_size - idx_size) { |
218 | pr_err( "Invalid frame size in a sframe: %d\n", |
219 | frame_size); |
220 | ret = -EINVAL; |
221 | goto fail; |
222 | } |
223 | s->sizes[i] = frame_size; |
224 | } |
225 | |
226 | s->nb_frames = nb_frames; |
227 | s->size = total_size; |
228 | s->next_frame = 0; |
229 | s->next_frame_offset = 0; |
230 | is_superframe = true; |
231 | } |
232 | }else { |
233 | s->nb_frames = 1; |
234 | s->sizes[0] = s->data_size; |
235 | s->size = s->data_size; |
236 | } |
237 | |
238 | /*pr_info("sframe: %d, frames: %d, IN: %x, OUT: %x\n", |
239 | is_superframe, s->nb_frames, |
240 | s->data_size, s->size);*/ |
241 | |
242 | /* parse uncompressed header. */ |
243 | if (is_superframe) { |
244 | /* bitstream profile. */ |
245 | /* frame type. (intra or inter) */ |
246 | /* colorspace descriptor */ |
247 | /* ... */ |
248 | |
249 | pr_info("the frame is a superframe.\n"); |
250 | } |
251 | |
252 | /*pr_err("in: %x, %d, out: %x, sizes %d,%d,%d,%d,%d,%d,%d,%d\n", |
253 | s->data_size, |
254 | s->nb_frames, |
255 | s->size, |
256 | s->sizes[0], |
257 | s->sizes[1], |
258 | s->sizes[2], |
259 | s->sizes[3], |
260 | s->sizes[4], |
261 | s->sizes[5], |
262 | s->sizes[6], |
263 | s->sizes[7]);*/ |
264 | |
265 | return 0; |
266 | fail: |
267 | return ret; |
268 | } |
269 | |
270 | int vp9_decode_extradata_ps(u8 *data, int size, struct vp9_param_sets *ps) |
271 | { |
272 | int i, ref = -1, ret = 0; |
273 | struct vp9_superframe_split s = {0}; |
274 | |
275 | /*parse superframe.*/ |
276 | s.data = data; |
277 | s.data_size = size; |
278 | ret = vp9_superframe_split_filter(&s); |
279 | if (ret) { |
280 | pr_err("parse frames failed.\n"); |
281 | return ret; |
282 | } |
283 | |
284 | for (i = 0; i < s.nb_frames; i++) { |
285 | u32 len = s.sizes[i] - s.prefix_size; |
286 | u8 *buf = s.data + s.next_frame_offset + s.prefix_size; |
287 | |
288 | ret = decode_frame_header(buf, len, &ps->ctx, &ref); |
289 | if (!ret) { |
290 | ps->head_parsed = ref < 0 ? true : false; |
291 | return 0; |
292 | } |
293 | |
294 | s.next_frame_offset = len + s.prefix_size; |
295 | } |
296 | |
297 | return ret; |
298 | } |
299 | |
300 |