summaryrefslogtreecommitdiff
path: root/drivers/frame_provider/decoder/vav1/av1_bufmgr.c (plain)
blob: 0e983c9b1d53443283af6b04aaf75a5d110f1096
1#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2#include <stdio.h>
3#include <stdlib.h>
4#include <string.h>
5#else
6#include <linux/kernel.h>
7#include <linux/module.h>
8#include <linux/types.h>
9#include <linux/errno.h>
10#include <linux/spinlock.h>
11#include <linux/slab.h>
12#include <linux/vmalloc.h>
13#include <linux/amlogic/media/canvas/canvas.h>
14
15#undef pr_info
16#define pr_info printk
17
18#define __COMPARE(context, p1, p2) comp(p1, p2)
19#define __SHORTSORT(lo, hi, width, comp, context) \
20 shortsort(lo, hi, width, comp)
21#define CUTOFF 8 /* testing shows that this is good value */
22#define STKSIZ (8*sizeof(void *) - 2)
23
24#undef swap
25static void swap(char *a, char *b, size_t width)
26{
27 char tmp;
28
29 if (a != b)
30 /* Do the swap one character at a time to avoid potential
31 * alignment problems.
32 */
33 while (width--) {
34 tmp = *a;
35 *a++ = *b;
36 *b++ = tmp;
37 }
38}
39
40static void shortsort(char *lo, char *hi, size_t width,
41 int (*comp)(const void *, const void *))
42{
43 char *p, *max;
44
45 /* Note: in assertions below, i and j are alway inside original
46 * bound of array to sort.
47 */
48 while (hi > lo) {
49 /* A[i] <= A[j] for i <= j, j > hi */
50 max = lo;
51 for (p = lo + width; p <= hi; p += width) {
52 /* A[i] <= A[max] for lo <= i < p */
53 if (__COMPARE(context, p, max) > 0)
54 max = p;
55 /* A[i] <= A[max] for lo <= i <= p */
56 }
57 /* A[i] <= A[max] for lo <= i <= hi */
58 swap(max, hi, width);
59
60 /* A[i] <= A[hi] for i <= hi, so A[i] <= A[j] for i <= j,
61 * j >= hi
62 */
63 hi -= width;
64
65 /* A[i] <= A[j] for i <= j, j > hi, loop top condition
66 * established
67 */
68 }
69}
70
71static void qsort(void *base, size_t num, size_t width,
72 int (*comp)(const void *, const void *))
73{
74 char *lo, *hi; /* ends of sub-array currently sorting */
75 char *mid; /* points to middle of subarray */
76 char *loguy, *higuy; /* traveling pointers for partition step */
77 size_t size; /* size of the sub-array */
78 char *lostk[STKSIZ], *histk[STKSIZ];
79 int stkptr;
80
81/* stack for saving sub-array to be
82 * processed
83 */
84#if 0
85 /* validation section */
86 _VALIDATE_RETURN_VOID(base != NULL || num == 0, EINVAL);
87 _VALIDATE_RETURN_VOID(width > 0, EINVAL);
88 _VALIDATE_RETURN_VOID(comp != NULL, EINVAL);
89#endif
90 if (num < 2)
91 return; /* nothing to do */
92
93 stkptr = 0; /* initialize stack */
94 lo = (char *)base;
95 hi = (char *)base + width * (num - 1); /* initialize limits */
96
97 /* this entry point is for pseudo-recursion calling: setting
98 * lo and hi and jumping to here is like recursion, but stkptr is
99 * preserved, locals aren't, so we preserve stuff on the stack
100 */
101recurse:
102
103 size = (hi - lo) / width + 1; /* number of el's to sort */
104
105 /* below a certain size, it is faster to use a O(n^2) sorting method */
106 if (size <= CUTOFF) {
107 __SHORTSORT(lo, hi, width, comp, context);
108 } else {
109 /* First we pick a partitioning element. The efficiency of
110 * the algorithm demands that we find one that is approximately
111 * the median of the values, but also that we select one fast.
112 * We choose the median of the first, middle, and last
113 * elements, to avoid bad performance in the face of already
114 * sorted data, or data that is made up of multiple sorted
115 * runs appended together. Testing shows that a
116 * median-of-three algorithm provides better performance than
117 * simply picking the middle element for the latter case.
118 */
119
120 mid = lo + (size / 2) * width; /* find middle element */
121
122 /* Sort the first, middle, last elements into order */
123 if (__COMPARE(context, lo, mid) > 0)
124 swap(lo, mid, width);
125 if (__COMPARE(context, lo, hi) > 0)
126 swap(lo, hi, width);
127 if (__COMPARE(context, mid, hi) > 0)
128 swap(mid, hi, width);
129
130 /* We now wish to partition the array into three pieces, one
131 * consisting of elements <= partition element, one of elements
132 * equal to the partition element, and one of elements > than
133 * it. This is done below; comments indicate conditions
134 * established at every step.
135 */
136
137 loguy = lo;
138 higuy = hi;
139
140 /* Note that higuy decreases and loguy increases on every
141 * iteration, so loop must terminate.
142 */
143 for (;;) {
144 /* lo <= loguy < hi, lo < higuy <= hi,
145 * A[i] <= A[mid] for lo <= i <= loguy,
146 * A[i] > A[mid] for higuy <= i < hi,
147 * A[hi] >= A[mid]
148 */
149
150 /* The doubled loop is to avoid calling comp(mid,mid),
151 * since some existing comparison funcs don't work
152 * when passed the same value for both pointers.
153 */
154
155 if (mid > loguy) {
156 do {
157 loguy += width;
158 } while (loguy < mid &&
159 __COMPARE(context, loguy, mid) <= 0);
160 }
161 if (mid <= loguy) {
162 do {
163 loguy += width;
164 } while (loguy <= hi &&
165 __COMPARE(context, loguy, mid) <= 0);
166 }
167
168 /* lo < loguy <= hi+1, A[i] <= A[mid] for
169 * lo <= i < loguy,
170 * either loguy > hi or A[loguy] > A[mid]
171 */
172
173 do {
174 higuy -= width;
175 } while (higuy > mid &&
176 __COMPARE(context, higuy, mid) > 0);
177
178 /* lo <= higuy < hi, A[i] > A[mid] for higuy < i < hi,
179 * either higuy == lo or A[higuy] <= A[mid]
180 */
181
182 if (higuy < loguy)
183 break;
184
185 /* if loguy > hi or higuy == lo, then we would have
186 * exited, so A[loguy] > A[mid], A[higuy] <= A[mid],
187 * loguy <= hi, higuy > lo
188 */
189
190 swap(loguy, higuy, width);
191
192 /* If the partition element was moved, follow it.
193 * Only need to check for mid == higuy, since before
194 * the swap, A[loguy] > A[mid] implies loguy != mid.
195 */
196
197 if (mid == higuy)
198 mid = loguy;
199
200 /* A[loguy] <= A[mid], A[higuy] > A[mid]; so condition
201 * at top of loop is re-established
202 */
203 }
204
205 /* A[i] <= A[mid] for lo <= i < loguy,
206 * A[i] > A[mid] for higuy < i < hi,
207 * A[hi] >= A[mid]
208 * higuy < loguy
209 * implying:
210 * higuy == loguy-1
211 * or higuy == hi - 1, loguy == hi + 1, A[hi] == A[mid]
212 */
213
214 /* Find adjacent elements equal to the partition element. The
215 * doubled loop is to avoid calling comp(mid,mid), since some
216 * existing comparison funcs don't work when passed the same
217 * value for both pointers.
218 */
219
220 higuy += width;
221 if (mid < higuy) {
222 do {
223 higuy -= width;
224 } while (higuy > mid &&
225 __COMPARE(context, higuy, mid) == 0);
226 }
227 if (mid >= higuy) {
228 do {
229 higuy -= width;
230 } while (higuy > lo &&
231 __COMPARE(context, higuy, mid) == 0);
232 }
233
234 /* OK, now we have the following:
235 * higuy < loguy
236 * lo <= higuy <= hi
237 * A[i] <= A[mid] for lo <= i <= higuy
238 * A[i] == A[mid] for higuy < i < loguy
239 * A[i] > A[mid] for loguy <= i < hi
240 * A[hi] >= A[mid]
241 */
242
243 /* We've finished the partition, now we want to sort the
244 * subarrays [lo, higuy] and [loguy, hi].
245 * We do the smaller one first to minimize stack usage.
246 * We only sort arrays of length 2 or more.
247 */
248
249 if (higuy - lo >= hi - loguy) {
250 if (lo < higuy) {
251 lostk[stkptr] = lo;
252 histk[stkptr] = higuy;
253 ++stkptr;
254 } /* save big recursion for later */
255
256 if (loguy < hi) {
257 lo = loguy;
258 goto recurse; /* do small recursion */
259 }
260 } else {
261 if (loguy < hi) {
262 lostk[stkptr] = loguy;
263 histk[stkptr] = hi;
264 ++stkptr; /* save big recursion for later */
265 }
266
267 if (lo < higuy) {
268 hi = higuy;
269 goto recurse; /* do small recursion */
270 }
271 }
272 }
273
274 /* We have sorted the array, except for any pending sorts on the stack.
275 * Check if there are any, and do them.
276 */
277
278 --stkptr;
279 if (stkptr >= 0) {
280 lo = lostk[stkptr];
281 hi = histk[stkptr];
282 goto recurse; /* pop subarray from stack */
283 } else
284 return; /* all subarrays done */
285}
286
287#endif
288
289#include "av1_global.h"
290int aom_realloc_frame_buffer(AV1_COMMON *cm, PIC_BUFFER_CONFIG *pic,
291 int width, int height, unsigned int order_hint);
292void dump_params(AV1Decoder *pbi, union param_u *params);
293
294#define assert(a)
295#define IMPLIES(a)
296
297int new_compressed_data_count = 0;
298
299static int valid_ref_frame_size(int ref_width, int ref_height,
300 int this_width, int this_height) {
301 return 2 * this_width >= ref_width && 2 * this_height >= ref_height &&
302 this_width <= 16 * ref_width && this_height <= 16 * ref_height;
303}
304
305#ifdef SUPPORT_SCALE_FACTOR
306// Note: Expect val to be in q4 precision
307static inline int scaled_x(int val, const struct scale_factors *sf) {
308 const int off =
309 (sf->x_scale_fp - (1 << REF_SCALE_SHIFT)) * (1 << (SUBPEL_BITS - 1));
310 const int64_t tval = (int64_t)val * sf->x_scale_fp + off;
311 return (int)ROUND_POWER_OF_TWO_SIGNED_64(tval,
312 REF_SCALE_SHIFT - SCALE_EXTRA_BITS);
313}
314
315// Note: Expect val to be in q4 precision
316static inline int scaled_y(int val, const struct scale_factors *sf) {
317 const int off =
318 (sf->y_scale_fp - (1 << REF_SCALE_SHIFT)) * (1 << (SUBPEL_BITS - 1));
319 const int64_t tval = (int64_t)val * sf->y_scale_fp + off;
320 return (int)ROUND_POWER_OF_TWO_SIGNED_64(tval,
321 REF_SCALE_SHIFT - SCALE_EXTRA_BITS);
322}
323
324// Note: Expect val to be in q4 precision
325static int unscaled_value(int val, const struct scale_factors *sf) {
326 (void)sf;
327 return val << SCALE_EXTRA_BITS;
328}
329
330static int get_fixed_point_scale_factor(int other_size, int this_size) {
331 // Calculate scaling factor once for each reference frame
332 // and use fixed point scaling factors in decoding and encoding routines.
333 // Hardware implementations can calculate scale factor in device driver
334 // and use multiplication and shifting on hardware instead of division.
335 return ((other_size << REF_SCALE_SHIFT) + this_size / 2) / this_size;
336}
337
338// Given the fixed point scale, calculate coarse point scale.
339static int fixed_point_scale_to_coarse_point_scale(int scale_fp) {
340 return ROUND_POWER_OF_TWO(scale_fp, REF_SCALE_SHIFT - SCALE_SUBPEL_BITS);
341}
342
343
344void av1_setup_scale_factors_for_frame(struct scale_factors *sf, int other_w,
345 int other_h, int this_w, int this_h) {
346 if (!valid_ref_frame_size(other_w, other_h, this_w, this_h)) {
347 sf->x_scale_fp = REF_INVALID_SCALE;
348 sf->y_scale_fp = REF_INVALID_SCALE;
349 return;
350 }
351
352 sf->x_scale_fp = get_fixed_point_scale_factor(other_w, this_w);
353 sf->y_scale_fp = get_fixed_point_scale_factor(other_h, this_h);
354
355 sf->x_step_q4 = fixed_point_scale_to_coarse_point_scale(sf->x_scale_fp);
356 sf->y_step_q4 = fixed_point_scale_to_coarse_point_scale(sf->y_scale_fp);
357
358 if (av1_is_scaled(sf)) {
359 sf->scale_value_x = scaled_x;
360 sf->scale_value_y = scaled_y;
361 } else {
362 sf->scale_value_x = unscaled_value;
363 sf->scale_value_y = unscaled_value;
364 }
365#ifdef ORI_CODE
366 // AV1 convolve functions
367 // Special case convolve functions should produce the same result as
368 // av1_convolve_2d.
369 // subpel_x_qn == 0 && subpel_y_qn == 0
370 sf->convolve[0][0][0] = av1_convolve_2d_copy_sr;
371 // subpel_x_qn == 0
372 sf->convolve[0][1][0] = av1_convolve_y_sr;
373 // subpel_y_qn == 0
374 sf->convolve[1][0][0] = av1_convolve_x_sr;
375 // subpel_x_qn != 0 && subpel_y_qn != 0
376 sf->convolve[1][1][0] = av1_convolve_2d_sr;
377 // subpel_x_qn == 0 && subpel_y_qn == 0
378 sf->convolve[0][0][1] = av1_dist_wtd_convolve_2d_copy;
379 // subpel_x_qn == 0
380 sf->convolve[0][1][1] = av1_dist_wtd_convolve_y;
381 // subpel_y_qn == 0
382 sf->convolve[1][0][1] = av1_dist_wtd_convolve_x;
383 // subpel_x_qn != 0 && subpel_y_qn != 0
384 sf->convolve[1][1][1] = av1_dist_wtd_convolve_2d;
385 // AV1 High BD convolve functions
386 // Special case convolve functions should produce the same result as
387 // av1_highbd_convolve_2d.
388 // subpel_x_qn == 0 && subpel_y_qn == 0
389 sf->highbd_convolve[0][0][0] = av1_highbd_convolve_2d_copy_sr;
390 // subpel_x_qn == 0
391 sf->highbd_convolve[0][1][0] = av1_highbd_convolve_y_sr;
392 // subpel_y_qn == 0
393 sf->highbd_convolve[1][0][0] = av1_highbd_convolve_x_sr;
394 // subpel_x_qn != 0 && subpel_y_qn != 0
395 sf->highbd_convolve[1][1][0] = av1_highbd_convolve_2d_sr;
396 // subpel_x_qn == 0 && subpel_y_qn == 0
397 sf->highbd_convolve[0][0][1] = av1_highbd_dist_wtd_convolve_2d_copy;
398 // subpel_x_qn == 0
399 sf->highbd_convolve[0][1][1] = av1_highbd_dist_wtd_convolve_y;
400 // subpel_y_qn == 0
401 sf->highbd_convolve[1][0][1] = av1_highbd_dist_wtd_convolve_x;
402 // subpel_x_qn != 0 && subpel_y_qn != 0
403 sf->highbd_convolve[1][1][1] = av1_highbd_dist_wtd_convolve_2d;
404#endif
405}
406#endif
407
408
409static int get_free_fb(AV1_COMMON *cm) {
410 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
411 int i;
412 unsigned long flags;
413 lock_buffer_pool(cm->buffer_pool, flags);
414 for (i = 0; i < FRAME_BUFFERS; ++i)
415 if (frame_bufs[i].ref_count == 0
416#ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
417 && frame_bufs[i].buf.vf_ref == 0
418#endif
419 )
420 break;
421
422 if (i != FRAME_BUFFERS) {
423 if (frame_bufs[i].buf.use_external_reference_buffers) {
424 // If this frame buffer's y_buffer, u_buffer, and v_buffer point to the
425 // external reference buffers. Restore the buffer pointers to point to the
426 // internally allocated memory.
427 PIC_BUFFER_CONFIG *ybf = &frame_bufs[i].buf;
428 ybf->y_buffer = ybf->store_buf_adr[0];
429 ybf->u_buffer = ybf->store_buf_adr[1];
430 ybf->v_buffer = ybf->store_buf_adr[2];
431 ybf->use_external_reference_buffers = 0;
432 }
433
434 frame_bufs[i].ref_count = 1;
435 } else {
436 // We should never run out of free buffers. If this assertion fails, there
437 // is a reference leak.
438 assert(0 && "Ran out of free frame buffers. Likely a reference leak.");
439 // Reset i to be INVALID_IDX to indicate no free buffer found.
440 i = INVALID_IDX;
441 }
442
443 unlock_buffer_pool(cm->buffer_pool, flags);
444 return i;
445}
446
447static RefCntBuffer *assign_cur_frame_new_fb(AV1_COMMON *const cm) {
448 // Release the previously-used frame-buffer
449 int new_fb_idx;
450 if (cm->cur_frame != NULL) {
451 --cm->cur_frame->ref_count;
452 cm->cur_frame = NULL;
453 }
454
455 // Assign a new framebuffer
456 new_fb_idx = get_free_fb(cm);
457 if (new_fb_idx == INVALID_IDX) return NULL;
458
459 cm->cur_frame = &cm->buffer_pool->frame_bufs[new_fb_idx];
460 cm->cur_frame->buf.buf_8bit_valid = 0;
461#ifdef AML
462 cm->cur_frame->buf.index = new_fb_idx;
463#endif
464#ifdef ORI_CODE
465 av1_zero(cm->cur_frame->interp_filter_selected);
466#endif
467 return cm->cur_frame;
468}
469
470// Modify 'lhs_ptr' to reference the buffer at 'rhs_ptr', and update the ref
471// counts accordingly.
472static void assign_frame_buffer_p(RefCntBuffer **lhs_ptr,
473 RefCntBuffer *rhs_ptr) {
474 RefCntBuffer *const old_ptr = *lhs_ptr;
475 if (old_ptr != NULL) {
476 assert(old_ptr->ref_count > 0);
477 // One less reference to the buffer at 'old_ptr', so decrease ref count.
478 --old_ptr->ref_count;
479 }
480
481 *lhs_ptr = rhs_ptr;
482 // One more reference to the buffer at 'rhs_ptr', so increase ref count.
483 ++rhs_ptr->ref_count;
484}
485
486AV1Decoder *av1_decoder_create(BufferPool *const pool) {
487 int i;
488 AV1_COMMON *cm;
489
490#ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
491 AV1Decoder *pbi = (AV1Decoder *)malloc(sizeof(*pbi));
492#else
493 AV1Decoder *pbi = (AV1Decoder *)vmalloc(sizeof(AV1Decoder));
494#endif
495 if (!pbi) return NULL;
496 memset(pbi, 0, sizeof(*pbi));
497
498 cm = &pbi->common;
499
500 // The jmp_buf is valid only for the duration of the function that calls
501 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
502 // before it returns.
503
504 cm->error.setjmp = 1;
505
506#ifdef ORI_CODE
507 memset(cm->fc, 0, sizeof(*cm->fc));
508 memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
509#endif
510 pbi->need_resync = 1;
511
512 // Initialize the references to not point to any frame buffers.
513 for (i = 0; i < REF_FRAMES; i++) {
514 cm->ref_frame_map[i] = NULL;
515 cm->next_ref_frame_map[i] = NULL;
516#ifdef AML
517 cm->next_used_ref_frame_map[i] = NULL;
518#endif
519 }
520
521 cm->current_frame.frame_number = 0;
522 pbi->decoding_first_frame = 1;
523 pbi->common.buffer_pool = pool;
524
525 cm->seq_params.bit_depth = AOM_BITS_8;
526
527#ifdef ORI_CODE
528 cm->alloc_mi = dec_alloc_mi;
529 cm->free_mi = dec_free_mi;
530 cm->setup_mi = dec_setup_mi;
531
532 av1_loop_filter_init(cm);
533
534 av1_qm_init(cm);
535 av1_loop_restoration_precal();
536#if CONFIG_ACCOUNTING
537 pbi->acct_enabled = 1;
538 aom_accounting_init(&pbi->accounting);
539#endif
540#endif
541 cm->error.setjmp = 0;
542
543#ifdef ORI_CODE
544 aom_get_worker_interface()->init(&pbi->lf_worker);
545 pbi->lf_worker.thread_name = "aom lf worker";
546#endif
547
548 return pbi;
549}
550
551int release_fb_cb(void *cb_priv, aom_codec_frame_buffer_t *fb) {
552#if 0
553 InternalFrameBuffer *const int_fb = (InternalFrameBuffer *)fb->priv;
554 (void)cb_priv;
555 if (int_fb) int_fb->in_use = 0;
556#endif
557 return 0;
558}
559
560static void decrease_ref_count(AV1Decoder *pbi, RefCntBuffer *const buf,
561 BufferPool *const pool) {
562 if (buf != NULL) {
563 --buf->ref_count;
564 // Reference counts should never become negative. If this assertion fails,
565 // there is a bug in our reference count management.
566 assert(buf->ref_count >= 0);
567 // A worker may only get a free framebuffer index when calling get_free_fb.
568 // But the raw frame buffer is not set up until we finish decoding header.
569 // So if any error happens during decoding header, frame_bufs[idx] will not
570 // have a valid raw frame buffer.
571 if (buf->ref_count == 0
572#ifdef ORI_CODE
573 && buf->raw_frame_buffer.data
574#endif
575 ) {
576#ifdef AML
577 av1_release_buf(pbi, buf);
578#endif
579 release_fb_cb(pool->cb_priv, &buf->raw_frame_buffer);
580 buf->raw_frame_buffer.data = NULL;
581 buf->raw_frame_buffer.size = 0;
582 buf->raw_frame_buffer.priv = NULL;
583 }
584 }
585}
586
587static void swap_frame_buffers(AV1Decoder *pbi, int frame_decoded) {
588 int ref_index = 0, mask;
589 AV1_COMMON *const cm = &pbi->common;
590 BufferPool *const pool = cm->buffer_pool;
591 unsigned long flags;
592
593 if (frame_decoded) {
594 int check_on_show_existing_frame;
595 lock_buffer_pool(pool, flags);
596
597 // In ext-tile decoding, the camera frame header is only decoded once. So,
598 // we don't release the references here.
599 if (!pbi->camera_frame_header_ready) {
600 // If we are not holding reference buffers in cm->next_ref_frame_map,
601 // assert that the following two for loops are no-ops.
602 assert(IMPLIES(!pbi->hold_ref_buf,
603 cm->current_frame.refresh_frame_flags == 0));
604 assert(IMPLIES(!pbi->hold_ref_buf,
605 cm->show_existing_frame && !pbi->reset_decoder_state));
606
607 // The following two for loops need to release the reference stored in
608 // cm->ref_frame_map[ref_index] before transferring the reference stored
609 // in cm->next_ref_frame_map[ref_index] to cm->ref_frame_map[ref_index].
610 for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
611 decrease_ref_count(pbi, cm->ref_frame_map[ref_index], pool);
612 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
613 cm->next_ref_frame_map[ref_index] = NULL;
614 ++ref_index;
615 }
616
617 check_on_show_existing_frame =
618 !cm->show_existing_frame || pbi->reset_decoder_state;
619 for (; ref_index < REF_FRAMES && check_on_show_existing_frame;
620 ++ref_index) {
621 decrease_ref_count(pbi, cm->ref_frame_map[ref_index], pool);
622 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
623 cm->next_ref_frame_map[ref_index] = NULL;
624 }
625 }
626
627 if (cm->show_existing_frame || cm->show_frame) {
628 if (pbi->output_all_layers) {
629 // Append this frame to the output queue
630 if (pbi->num_output_frames >= MAX_NUM_SPATIAL_LAYERS) {
631 // We can't store the new frame anywhere, so drop it and return an
632 // error
633 cm->cur_frame->buf.corrupted = 1;
634 decrease_ref_count(pbi, cm->cur_frame, pool);
635 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
636 } else {
637 pbi->output_frames[pbi->num_output_frames] = cm->cur_frame;
638 pbi->num_output_frames++;
639 }
640 } else {
641 // Replace any existing output frame
642 assert(pbi->num_output_frames == 0 || pbi->num_output_frames == 1);
643 if (pbi->num_output_frames > 0) {
644 decrease_ref_count(pbi, pbi->output_frames[0], pool);
645 }
646 pbi->output_frames[0] = cm->cur_frame;
647 pbi->num_output_frames = 1;
648 }
649 } else {
650 decrease_ref_count(pbi, cm->cur_frame, pool);
651 }
652
653 unlock_buffer_pool(pool, flags);
654 } else {
655 // The code here assumes we are not holding reference buffers in
656 // cm->next_ref_frame_map. If this assertion fails, we are leaking the
657 // frame buffer references in cm->next_ref_frame_map.
658 assert(IMPLIES(!pbi->camera_frame_header_ready, !pbi->hold_ref_buf));
659 // Nothing was decoded, so just drop this frame buffer
660 lock_buffer_pool(pool, flags);
661 decrease_ref_count(pbi, cm->cur_frame, pool);
662 unlock_buffer_pool(pool, flags);
663 }
664 cm->cur_frame = NULL;
665
666 if (!pbi->camera_frame_header_ready) {
667 pbi->hold_ref_buf = 0;
668
669 // Invalidate these references until the next frame starts.
670 for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) {
671 cm->remapped_ref_idx[ref_index] = INVALID_IDX;
672 }
673 }
674}
675
676void aom_internal_error(struct aom_internal_error_info *info,
677 aom_codec_err_t error, const char *fmt, ...) {
678 va_list ap;
679
680 info->error_code = error;
681 info->has_detail = 0;
682
683 if (fmt) {
684 size_t sz = sizeof(info->detail);
685
686 info->has_detail = 1;
687 va_start(ap, fmt);
688 vsnprintf(info->detail, sz - 1, fmt, ap);
689 va_end(ap);
690 info->detail[sz - 1] = '\0';
691 }
692#ifdef ORI_CODE
693 if (info->setjmp) longjmp(info->jmp, info->error_code);
694#endif
695}
696
697#ifdef ORI_CODE
698void av1_zero_unused_internal_frame_buffers(InternalFrameBufferList *list) {
699 int i;
700
701 assert(list != NULL);
702
703 for (i = 0; i < list->num_internal_frame_buffers; ++i) {
704 if (list->int_fb[i].data && !list->int_fb[i].in_use)
705 memset(list->int_fb[i].data, 0, list->int_fb[i].size);
706 }
707}
708#endif
709
710// Release the references to the frame buffers in cm->ref_frame_map and reset
711// all elements of cm->ref_frame_map to NULL.
712static void reset_ref_frame_map(AV1Decoder *const pbi) {
713 AV1_COMMON *const cm = &pbi->common;
714 BufferPool *const pool = cm->buffer_pool;
715 int i;
716
717 for (i = 0; i < REF_FRAMES; i++) {
718 decrease_ref_count(pbi, cm->ref_frame_map[i], pool);
719 cm->ref_frame_map[i] = NULL;
720#ifdef AML
721 cm->next_used_ref_frame_map[i] = NULL;
722#endif
723 }
724}
725
726// Generate next_ref_frame_map.
727static void generate_next_ref_frame_map(AV1Decoder *const pbi) {
728 AV1_COMMON *const cm = &pbi->common;
729 BufferPool *const pool = cm->buffer_pool;
730 unsigned long flags;
731 int ref_index = 0;
732 int mask;
733
734 lock_buffer_pool(pool, flags);
735 // cm->next_ref_frame_map holds references to frame buffers. After storing a
736 // frame buffer index in cm->next_ref_frame_map, we need to increase the
737 // frame buffer's ref_count.
738 for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
739 if (mask & 1) {
740 cm->next_ref_frame_map[ref_index] = cm->cur_frame;
741 } else {
742 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
743 }
744 if (cm->next_ref_frame_map[ref_index] != NULL)
745 ++cm->next_ref_frame_map[ref_index]->ref_count;
746 ++ref_index;
747 }
748
749 for (; ref_index < REF_FRAMES; ++ref_index) {
750 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
751 if (cm->next_ref_frame_map[ref_index] != NULL)
752 ++cm->next_ref_frame_map[ref_index]->ref_count;
753 }
754 unlock_buffer_pool(pool, flags);
755 pbi->hold_ref_buf = 1;
756}
757
758// If the refresh_frame_flags bitmask is set, update reference frame id values
759// and mark frames as valid for reference.
760static void update_ref_frame_id(AV1_COMMON *const cm, int frame_id) {
761 int i;
762 int refresh_frame_flags = cm->current_frame.refresh_frame_flags;
763 assert(cm->seq_params.frame_id_numbers_present_flag);
764 for (i = 0; i < REF_FRAMES; i++) {
765 if ((refresh_frame_flags >> i) & 1) {
766 cm->ref_frame_id[i] = frame_id;
767 cm->valid_for_referencing[i] = 1;
768 }
769 }
770}
771
772static void show_existing_frame_reset(AV1Decoder *const pbi,
773 int existing_frame_idx) {
774 AV1_COMMON *const cm = &pbi->common;
775 int i;
776 assert(cm->show_existing_frame);
777
778 cm->current_frame.frame_type = KEY_FRAME;
779
780 cm->current_frame.refresh_frame_flags = (1 << REF_FRAMES) - 1;
781
782 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
783 cm->remapped_ref_idx[i] = INVALID_IDX;
784 }
785
786 if (pbi->need_resync) {
787 reset_ref_frame_map(pbi);
788 pbi->need_resync = 0;
789 }
790
791 // Note that the displayed frame must be valid for referencing in order to
792 // have been selected.
793 if (cm->seq_params.frame_id_numbers_present_flag) {
794 cm->current_frame_id = cm->ref_frame_id[existing_frame_idx];
795 update_ref_frame_id(cm, cm->current_frame_id);
796 }
797
798 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
799
800 generate_next_ref_frame_map(pbi);
801
802#ifdef ORI_CODE
803 // Reload the adapted CDFs from when we originally coded this keyframe
804 *cm->fc = cm->next_ref_frame_map[existing_frame_idx]->frame_context;
805#endif
806}
807
808static void reset_frame_buffers(AV1Decoder *const pbi) {
809 AV1_COMMON *const cm = &pbi->common;
810 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
811 int i;
812 unsigned long flags;
813
814 // We have not stored any references to frame buffers in
815 // cm->next_ref_frame_map, so we can directly reset it to all NULL.
816 for (i = 0; i < REF_FRAMES; ++i) {
817 cm->next_ref_frame_map[i] = NULL;
818 }
819
820 lock_buffer_pool(cm->buffer_pool, flags);
821 reset_ref_frame_map(pbi);
822 assert(cm->cur_frame->ref_count == 1);
823 for (i = 0; i < FRAME_BUFFERS; ++i) {
824 // Reset all unreferenced frame buffers. We can also reset cm->cur_frame
825 // because we are the sole owner of cm->cur_frame.
826 if (frame_bufs[i].ref_count > 0 && &frame_bufs[i] != cm->cur_frame) {
827 continue;
828 }
829 frame_bufs[i].order_hint = 0;
830 av1_zero(frame_bufs[i].ref_order_hints);
831 }
832#ifdef ORI_CODE
833 av1_zero_unused_internal_frame_buffers(&cm->buffer_pool->int_frame_buffers);
834#endif
835 unlock_buffer_pool(cm->buffer_pool, flags);
836}
837
838static int frame_is_intra_only(const AV1_COMMON *const cm) {
839 return cm->current_frame.frame_type == KEY_FRAME ||
840 cm->current_frame.frame_type == INTRA_ONLY_FRAME;
841}
842
843static int frame_is_sframe(const AV1_COMMON *cm) {
844 return cm->current_frame.frame_type == S_FRAME;
845}
846
847// These functions take a reference frame label between LAST_FRAME and
848// EXTREF_FRAME inclusive. Note that this is different to the indexing
849// previously used by the frame_refs[] array.
850static int get_ref_frame_map_idx(const AV1_COMMON *const cm,
851 const MV_REFERENCE_FRAME ref_frame) {
852 return (ref_frame >= LAST_FRAME && ref_frame <= EXTREF_FRAME)
853 ? cm->remapped_ref_idx[ref_frame - LAST_FRAME]
854 : INVALID_IDX;
855}
856
857static RefCntBuffer *get_ref_frame_buf(
858 const AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
859 const int map_idx = get_ref_frame_map_idx(cm, ref_frame);
860 return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : NULL;
861}
862#ifdef SUPPORT_SCALE_FACTOR
863static struct scale_factors *get_ref_scale_factors(
864 AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
865 const int map_idx = get_ref_frame_map_idx(cm, ref_frame);
866 return (map_idx != INVALID_IDX) ? &cm->ref_scale_factors[map_idx] : NULL;
867}
868#endif
869static RefCntBuffer *get_primary_ref_frame_buf(
870 const AV1_COMMON *const cm) {
871 int map_idx;
872 if (cm->primary_ref_frame == PRIMARY_REF_NONE) return NULL;
873 map_idx = get_ref_frame_map_idx(cm, cm->primary_ref_frame + 1);
874 return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : NULL;
875}
876
877static int get_relative_dist(const OrderHintInfo *oh, int a, int b) {
878 int bits;
879 int m;
880 int diff;
881 if (!oh->enable_order_hint) return 0;
882
883 bits = oh->order_hint_bits_minus_1 + 1;
884
885 assert(bits >= 1);
886 assert(a >= 0 && a < (1 << bits));
887 assert(b >= 0 && b < (1 << bits));
888
889 diff = a - b;
890 m = 1 << (bits - 1);
891 diff = (diff & (m - 1)) - (diff & m);
892 return diff;
893}
894
895
896void av1_read_frame_size(union param_u *params, int num_bits_width,
897 int num_bits_height, int *width, int *height, int* dec_width) {
898 *width = params->p.frame_width;
899 *height = params->p.frame_height;//aom_rb_read_literal(rb, num_bits_height) + 1;
900#ifdef AML
901 *dec_width = params->p.dec_frame_width;
902#endif
903}
904
905static REFERENCE_MODE read_frame_reference_mode(
906 const AV1_COMMON *cm, union param_u *params) {
907 if (frame_is_intra_only(cm)) {
908 return SINGLE_REFERENCE;
909 } else {
910 return params->p.reference_mode ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
911 }
912}
913
914static inline int calc_mi_size(int len) {
915 // len is in mi units. Align to a multiple of SBs.
916 return ALIGN_POWER_OF_TWO(len, MAX_MIB_SIZE_LOG2);
917}
918
919void av1_set_mb_mi(AV1_COMMON *cm, int width, int height) {
920 // Ensure that the decoded width and height are both multiples of
921 // 8 luma pixels (note: this may only be a multiple of 4 chroma pixels if
922 // subsampling is used).
923 // This simplifies the implementation of various experiments,
924 // eg. cdef, which operates on units of 8x8 luma pixels.
925 const int aligned_width = ALIGN_POWER_OF_TWO(width, 3);
926 const int aligned_height = ALIGN_POWER_OF_TWO(height, 3);
927 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, " [PICTURE] av1_set_mb_mi (%d X %d)\n", width, height);
928
929 cm->mi_cols = aligned_width >> MI_SIZE_LOG2;
930 cm->mi_rows = aligned_height >> MI_SIZE_LOG2;
931 cm->mi_stride = calc_mi_size(cm->mi_cols);
932
933 cm->mb_cols = (cm->mi_cols + 2) >> 2;
934 cm->mb_rows = (cm->mi_rows + 2) >> 2;
935 cm->MBs = cm->mb_rows * cm->mb_cols;
936
937#if CONFIG_LPF_MASK
938 alloc_loop_filter_mask(cm);
939#endif
940}
941
942int av1_alloc_context_buffers(AV1_COMMON *cm, int width, int height) {
943#ifdef ORI_CODE
944 int new_mi_size;
945#endif
946 av1_set_mb_mi(cm, width, height);
947#ifdef ORI_CODE
948 new_mi_size = cm->mi_stride * calc_mi_size(cm->mi_rows);
949 if (cm->mi_alloc_size < new_mi_size) {
950 cm->free_mi(cm);
951 if (cm->alloc_mi(cm, new_mi_size)) goto fail;
952 }
953#endif
954 return 0;
955
956#ifdef ORI_CODE
957fail:
958#endif
959 // clear the mi_* values to force a realloc on resync
960 av1_set_mb_mi(cm, 0, 0);
961#ifdef ORI_CODE
962 av1_free_context_buffers(cm);
963#endif
964 return 1;
965}
966
967#ifndef USE_SCALED_WIDTH_FROM_UCODE
968static void calculate_scaled_size_helper(int *dim, int denom) {
969 if (denom != SCALE_NUMERATOR) {
970 // We need to ensure the constraint in "Appendix A" of the spec:
971 // * FrameWidth is greater than or equal to 16
972 // * FrameHeight is greater than or equal to 16
973 // For this, we clamp the downscaled dimension to at least 16. One
974 // exception: if original dimension itself was < 16, then we keep the
975 // downscaled dimension to be same as the original, to ensure that resizing
976 // is valid.
977 const int min_dim = AOMMIN(16, *dim);
978 // Use this version if we need *dim to be even
979 // *width = (*width * SCALE_NUMERATOR + denom) / (2 * denom);
980 // *width <<= 1;
981 *dim = (*dim * SCALE_NUMERATOR + denom / 2) / (denom);
982 *dim = AOMMAX(*dim, min_dim);
983 }
984}
985#ifdef ORI_CODE
986void av1_calculate_scaled_size(int *width, int *height, int resize_denom) {
987 calculate_scaled_size_helper(width, resize_denom);
988 calculate_scaled_size_helper(height, resize_denom);
989}
990#endif
991void av1_calculate_scaled_superres_size(int *width, int *height,
992 int superres_denom) {
993 (void)height;
994 calculate_scaled_size_helper(width, superres_denom);
995}
996#endif
997
998static void setup_superres(AV1_COMMON *const cm, union param_u *params,
999 int *width, int *height) {
1000#ifdef USE_SCALED_WIDTH_FROM_UCODE
1001 cm->superres_upscaled_width = params->p.frame_width_scaled;
1002 cm->superres_upscaled_height = params->p.frame_height;
1003
1004
1005 *width = params->p.dec_frame_width;
1006 *height = params->p.frame_height;
1007 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, " [PICTURE] set decoding size to (%d X %d) scaled size to (%d X %d)\n",
1008 *width, *height,
1009 cm->superres_upscaled_width,
1010 cm->superres_upscaled_height);
1011#else
1012 cm->superres_upscaled_width = *width;
1013 cm->superres_upscaled_height = *height;
1014
1015 const SequenceHeader *const seq_params = &cm->seq_params;
1016 if (!seq_params->enable_superres) return;
1017
1018 //if (aom_rb_read_bit(-1, defmark, rb)) {
1019 if (params->p.superres_scale_denominator != SCALE_NUMERATOR) {
1020#ifdef ORI_CODE
1021 cm->superres_scale_denominator =
1022 (uint8_t)aom_rb_read_literal(-1, defmark, rb, SUPERRES_SCALE_BITS);
1023 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
1024#else
1025 cm->superres_scale_denominator = params->p.superres_scale_denominator;
1026#endif
1027 // Don't edit cm->width or cm->height directly, or the buffers won't get
1028 // resized correctly
1029 av1_calculate_scaled_superres_size(width, height,
1030 cm->superres_scale_denominator);
1031 } else {
1032 // 1:1 scaling - ie. no scaling, scale not provided
1033 cm->superres_scale_denominator = SCALE_NUMERATOR;
1034 }
1035/*!USE_SCALED_WIDTH_FROM_UCODE*/
1036#endif
1037}
1038
1039static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
1040#if CONFIG_SIZE_LIMIT
1041 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
1042 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1043 "Dimensions of %dx%d beyond allowed size of %dx%d.",
1044 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1045#endif
1046 if (cm->width != width || cm->height != height) {
1047 const int new_mi_rows =
1048 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1049 const int new_mi_cols =
1050 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1051
1052 // Allocations in av1_alloc_context_buffers() depend on individual
1053 // dimensions as well as the overall size.
1054 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
1055 if (av1_alloc_context_buffers(cm, width, height)) {
1056 // The cm->mi_* values have been cleared and any existing context
1057 // buffers have been freed. Clear cm->width and cm->height to be
1058 // consistent and to force a realloc next time.
1059 cm->width = 0;
1060 cm->height = 0;
1061 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
1062 "Failed to allocate context buffers");
1063 }
1064 } else {
1065 av1_set_mb_mi(cm, width, height);
1066 }
1067#ifdef ORI_CODE
1068 av1_init_context_buffers(cm);
1069#endif
1070 cm->width = width;
1071 cm->height = height;
1072 }
1073
1074#ifdef ORI_CODE
1075 ensure_mv_buffer(cm->cur_frame, cm);
1076#endif
1077 cm->cur_frame->width = cm->width;
1078 cm->cur_frame->height = cm->height;
1079}
1080
1081static void setup_buffer_pool(AV1_COMMON *cm) {
1082 BufferPool *const pool = cm->buffer_pool;
1083 const SequenceHeader *const seq_params = &cm->seq_params;
1084 unsigned long flags;
1085
1086 lock_buffer_pool(pool, flags);
1087 if (aom_realloc_frame_buffer(cm, &cm->cur_frame->buf,
1088 cm->width, cm->height, cm->cur_frame->order_hint)) {
1089 unlock_buffer_pool(pool, flags);
1090 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
1091 "Failed to allocate frame buffer");
1092 }
1093 unlock_buffer_pool(pool, flags);
1094
1095 cm->cur_frame->buf.bit_depth = (unsigned int)seq_params->bit_depth;
1096 cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
1097 cm->cur_frame->buf.transfer_characteristics =
1098 seq_params->transfer_characteristics;
1099 cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
1100 cm->cur_frame->buf.monochrome = seq_params->monochrome;
1101 cm->cur_frame->buf.chroma_sample_position =
1102 seq_params->chroma_sample_position;
1103 cm->cur_frame->buf.color_range = seq_params->color_range;
1104 cm->cur_frame->buf.render_width = cm->render_width;
1105 cm->cur_frame->buf.render_height = cm->render_height;
1106}
1107
1108static void setup_frame_size(AV1_COMMON *cm, int frame_size_override_flag, union param_u *params) {
1109 const SequenceHeader *const seq_params = &cm->seq_params;
1110 int width, height, dec_width;
1111
1112 if (frame_size_override_flag) {
1113 int num_bits_width = seq_params->num_bits_width;
1114 int num_bits_height = seq_params->num_bits_height;
1115 av1_read_frame_size(params, num_bits_width, num_bits_height, &width, &height, &dec_width);
1116#ifdef AML
1117 cm->dec_width = dec_width;
1118#endif
1119 if (width > seq_params->max_frame_width ||
1120 height > seq_params->max_frame_height) {
1121 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1122 "Frame dimensions are larger than the maximum values");
1123 }
1124 } else {
1125 width = seq_params->max_frame_width;
1126 height = seq_params->max_frame_height;
1127#ifdef AML
1128 cm->dec_width = dec_width = params->p.dec_frame_width;
1129#endif
1130 }
1131 setup_superres(cm, params, &width, &height);
1132 resize_context_buffers(cm, width, height);
1133#ifdef ORI_CODE
1134 setup_render_size(cm, params);
1135#endif
1136 setup_buffer_pool(cm);
1137}
1138
1139static int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
1140 int ref_xss, int ref_yss,
1141 aom_bit_depth_t this_bit_depth,
1142 int this_xss, int this_yss) {
1143 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1144 ref_yss == this_yss;
1145}
1146
1147static void setup_frame_size_with_refs(AV1_COMMON *cm, union param_u *params) {
1148 int width, height, dec_width;
1149 int found = 0;
1150 int has_valid_ref_frame = 0;
1151 int i;
1152 SequenceHeader *seq_params;
1153 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1154 /*if (aom_rb_read_bit(rb)) {*/
1155 if (params->p.valid_ref_frame_bits & (1<<i)) {
1156 const RefCntBuffer *const ref_buf = get_ref_frame_buf(cm, i);
1157 // This will never be NULL in a normal stream, as streams are required to
1158 // have a shown keyframe before any inter frames, which would refresh all
1159 // the reference buffers. However, it might be null if we're starting in
1160 // the middle of a stream, and static analysis will error if we don't do
1161 // a null check here.
1162 if (ref_buf == NULL) {
1163 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1164 "Invalid condition: invalid reference buffer");
1165 } else {
1166 const PIC_BUFFER_CONFIG *const buf = &ref_buf->buf;
1167 width = buf->y_crop_width;
1168 height = buf->y_crop_height;
1169 cm->render_width = buf->render_width;
1170 cm->render_height = buf->render_height;
1171 setup_superres(cm, params, &width, &height);
1172 resize_context_buffers(cm, width, height);
1173 found = 1;
1174 break;
1175 }
1176 }
1177 }
1178
1179 seq_params = &cm->seq_params;
1180 if (!found) {
1181 int num_bits_width = seq_params->num_bits_width;
1182 int num_bits_height = seq_params->num_bits_height;
1183
1184 av1_read_frame_size(params, num_bits_width, num_bits_height, &width, &height, &dec_width);
1185#ifdef AML
1186 cm->dec_width = dec_width;
1187#endif
1188 setup_superres(cm, params, &width, &height);
1189 resize_context_buffers(cm, width, height);
1190#ifdef ORI_CODE
1191 setup_render_size(cm, rb);
1192#endif
1193 }
1194
1195 if (width <= 0 || height <= 0)
1196 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1197 "Invalid frame size");
1198
1199 // Check to make sure at least one of frames that this frame references
1200 // has valid dimensions.
1201 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1202 const RefCntBuffer *const ref_frame = get_ref_frame_buf(cm, i);
1203 if (ref_frame != NULL) {
1204 has_valid_ref_frame |=
1205 valid_ref_frame_size(ref_frame->buf.y_crop_width,
1206 ref_frame->buf.y_crop_height, width, height);
1207 }
1208 }
1209 if (!has_valid_ref_frame)
1210 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1211 "Referenced frame has invalid size");
1212 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1213 const RefCntBuffer *const ref_frame = get_ref_frame_buf(cm, i);
1214 if (ref_frame != NULL) {
1215 if (!valid_ref_frame_img_fmt(
1216 ref_frame->buf.bit_depth, ref_frame->buf.subsampling_x,
1217 ref_frame->buf.subsampling_y, seq_params->bit_depth,
1218 seq_params->subsampling_x, seq_params->subsampling_y))
1219 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1220 "Referenced frame has incompatible color format");
1221 }
1222 }
1223 setup_buffer_pool(cm);
1224}
1225
1226typedef struct {
1227 int map_idx; // frame map index
1228 RefCntBuffer *buf; // frame buffer
1229 int sort_idx; // index based on the offset to be used for sorting
1230} REF_FRAME_INFO;
1231
1232// Compares the sort_idx fields. If they are equal, then compares the map_idx
1233// fields to break the tie. This ensures a stable sort.
1234static int compare_ref_frame_info(const void *arg_a, const void *arg_b) {
1235 const REF_FRAME_INFO *info_a = (REF_FRAME_INFO *)arg_a;
1236 const REF_FRAME_INFO *info_b = (REF_FRAME_INFO *)arg_b;
1237
1238 const int sort_idx_diff = info_a->sort_idx - info_b->sort_idx;
1239 if (sort_idx_diff != 0) return sort_idx_diff;
1240 return info_a->map_idx - info_b->map_idx;
1241}
1242
1243
1244/*
1245for av1_setup_motion_field()
1246*/
1247static int motion_field_projection(AV1_COMMON *cm,
1248 MV_REFERENCE_FRAME start_frame, int dir) {
1249#ifdef ORI_CODE
1250 TPL_MV_REF *tpl_mvs_base = cm->tpl_mvs;
1251 int ref_offset[REF_FRAMES] = { 0 };
1252#endif
1253 MV_REFERENCE_FRAME rf;
1254 const RefCntBuffer *const start_frame_buf =
1255 get_ref_frame_buf(cm, start_frame);
1256 int start_frame_order_hint;
1257 unsigned int const *ref_order_hints;
1258 int cur_order_hint;
1259 int start_to_current_frame_offset;
1260
1261#ifdef AML
1262 int i;
1263 //av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "$$$$$$$$$$$%s:cm->mv_ref_id_index = %d, start_frame=%d\n", __func__, cm->mv_ref_id_index, start_frame);
1264 cm->mv_ref_id[cm->mv_ref_id_index] = start_frame;
1265 for (i = 0; i < REF_FRAMES; i++) {
1266 cm->mv_ref_offset[cm->mv_ref_id_index][i]=0;
1267 }
1268 cm->mv_cal_tpl_mvs[cm->mv_ref_id_index]=0;
1269 cm->mv_ref_id_index++;
1270#endif
1271 if (start_frame_buf == NULL) return 0;
1272
1273 if (start_frame_buf->frame_type == KEY_FRAME ||
1274 start_frame_buf->frame_type == INTRA_ONLY_FRAME)
1275 return 0;
1276
1277 if (start_frame_buf->mi_rows != cm->mi_rows ||
1278 start_frame_buf->mi_cols != cm->mi_cols)
1279 return 0;
1280
1281 start_frame_order_hint = start_frame_buf->order_hint;
1282 ref_order_hints =
1283 &start_frame_buf->ref_order_hints[0];
1284 cur_order_hint = cm->cur_frame->order_hint;
1285 start_to_current_frame_offset = get_relative_dist(
1286 &cm->seq_params.order_hint_info, start_frame_order_hint, cur_order_hint);
1287
1288 for (rf = LAST_FRAME; rf <= INTER_REFS_PER_FRAME; ++rf) {
1289 cm->mv_ref_offset[cm->mv_ref_id_index-1][rf] = get_relative_dist(&cm->seq_params.order_hint_info,
1290 start_frame_order_hint,
1291 ref_order_hints[rf - LAST_FRAME]);
1292 }
1293#ifdef AML
1294 cm->mv_cal_tpl_mvs[cm->mv_ref_id_index-1]=1;
1295#endif
1296 if (dir == 2) start_to_current_frame_offset = -start_to_current_frame_offset;
1297#ifdef ORI_CODE
1298 MV_REF *mv_ref_base = start_frame_buf->mvs;
1299 const int mvs_rows = (cm->mi_rows + 1) >> 1;
1300 const int mvs_cols = (cm->mi_cols + 1) >> 1;
1301
1302 for (int blk_row = 0; blk_row < mvs_rows; ++blk_row) {
1303 for (int blk_col = 0; blk_col < mvs_cols; ++blk_col) {
1304 MV_REF *mv_ref = &mv_ref_base[blk_row * mvs_cols + blk_col];
1305 MV fwd_mv = mv_ref->mv.as_mv;
1306
1307 if (mv_ref->ref_frame > INTRA_FRAME) {
1308 int_mv this_mv;
1309 int mi_r, mi_c;
1310 const int ref_frame_offset = ref_offset[mv_ref->ref_frame];
1311
1312 int pos_valid =
1313 abs(ref_frame_offset) <= MAX_FRAME_DISTANCE &&
1314 ref_frame_offset > 0 &&
1315 abs(start_to_current_frame_offset) <= MAX_FRAME_DISTANCE;
1316
1317 if (pos_valid) {
1318 get_mv_projection(&this_mv.as_mv, fwd_mv,
1319 start_to_current_frame_offset, ref_frame_offset);
1320 pos_valid = get_block_position(cm, &mi_r, &mi_c, blk_row, blk_col,
1321 this_mv.as_mv, dir >> 1);
1322 }
1323
1324 if (pos_valid) {
1325 const int mi_offset = mi_r * (cm->mi_stride >> 1) + mi_c;
1326
1327 tpl_mvs_base[mi_offset].mfmv0.as_mv.row = fwd_mv.row;
1328 tpl_mvs_base[mi_offset].mfmv0.as_mv.col = fwd_mv.col;
1329 tpl_mvs_base[mi_offset].ref_frame_offset = ref_frame_offset;
1330 }
1331 }
1332 }
1333 }
1334#endif
1335 return 1;
1336}
1337
1338#ifdef AML
1339static int setup_motion_field_debug_count = 0;
1340#endif
1341void av1_setup_motion_field(AV1_COMMON *cm) {
1342 const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
1343 int ref_frame;
1344 int size;
1345 int cur_order_hint;
1346 const RefCntBuffer *ref_buf[INTER_REFS_PER_FRAME];
1347 int ref_order_hint[INTER_REFS_PER_FRAME];
1348 int ref_stamp;
1349 memset(cm->ref_frame_side, 0, sizeof(cm->ref_frame_side));
1350 if (!order_hint_info->enable_order_hint) return;
1351#ifdef ORI_CODE
1352 TPL_MV_REF *tpl_mvs_base = cm->tpl_mvs;
1353#endif
1354 size = ((cm->mi_rows + MAX_MIB_SIZE) >> 1) * (cm->mi_stride >> 1);
1355#ifdef ORI_CODE
1356 for (int idx = 0; idx < size; ++idx) {
1357 tpl_mvs_base[idx].mfmv0.as_int = INVALID_MV;
1358 tpl_mvs_base[idx].ref_frame_offset = 0;
1359 }
1360#endif
1361 cur_order_hint = cm->cur_frame->order_hint;
1362
1363 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
1364 const int ref_idx = ref_frame - LAST_FRAME;
1365 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1366 int order_hint = 0;
1367
1368 if (buf != NULL) order_hint = buf->order_hint;
1369
1370 ref_buf[ref_idx] = buf;
1371 ref_order_hint[ref_idx] = order_hint;
1372
1373 if (get_relative_dist(order_hint_info, order_hint, cur_order_hint) > 0)
1374 cm->ref_frame_side[ref_frame] = 1;
1375 else if (order_hint == cur_order_hint)
1376 cm->ref_frame_side[ref_frame] = -1;
1377 }
1378 ref_stamp = MFMV_STACK_SIZE - 1;
1379#ifdef AML
1380 cm->mv_ref_id_index = 0;
1381 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s(%d) mi_cols %d mi_rows %d\n",
1382 __func__, setup_motion_field_debug_count++,
1383 cm->mi_cols,
1384 cm->mi_rows
1385 );
1386#endif
1387 if (ref_buf[LAST_FRAME - LAST_FRAME] != NULL) {
1388 const int alt_of_lst_order_hint =
1389 ref_buf[LAST_FRAME - LAST_FRAME]
1390 ->ref_order_hints[ALTREF_FRAME - LAST_FRAME];
1391
1392 const int is_lst_overlay =
1393 (alt_of_lst_order_hint == ref_order_hint[GOLDEN_FRAME - LAST_FRAME]);
1394 if (!is_lst_overlay) motion_field_projection(cm, LAST_FRAME, 2);
1395 --ref_stamp;
1396 }
1397
1398 if (get_relative_dist(order_hint_info,
1399 ref_order_hint[BWDREF_FRAME - LAST_FRAME],
1400 cur_order_hint) > 0) {
1401 if (motion_field_projection(cm, BWDREF_FRAME, 0)) --ref_stamp;
1402 }
1403
1404 if (get_relative_dist(order_hint_info,
1405 ref_order_hint[ALTREF2_FRAME - LAST_FRAME],
1406 cur_order_hint) > 0) {
1407 if (motion_field_projection(cm, ALTREF2_FRAME, 0)) --ref_stamp;
1408 }
1409
1410 if (get_relative_dist(order_hint_info,
1411 ref_order_hint[ALTREF_FRAME - LAST_FRAME],
1412 cur_order_hint) > 0 &&
1413 ref_stamp >= 0)
1414 if (motion_field_projection(cm, ALTREF_FRAME, 0)) --ref_stamp;
1415
1416 if (ref_stamp >= 0) motion_field_projection(cm, LAST2_FRAME, 2);
1417}
1418
1419
1420static void set_ref_frame_info(int *remapped_ref_idx, int frame_idx,
1421 REF_FRAME_INFO *ref_info) {
1422 assert(frame_idx >= 0 && frame_idx < INTER_REFS_PER_FRAME);
1423
1424 remapped_ref_idx[frame_idx] = ref_info->map_idx;
1425 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "+++++++++++++%s:remapped_ref_idx[%d]=0x%x\n", __func__, frame_idx, ref_info->map_idx);
1426}
1427
1428
1429void av1_set_frame_refs(AV1_COMMON *const cm, int *remapped_ref_idx,
1430 int lst_map_idx, int gld_map_idx) {
1431 int lst_frame_sort_idx = -1;
1432 int gld_frame_sort_idx = -1;
1433 int i;
1434 //assert(cm->seq_params.order_hint_info.enable_order_hint);
1435 //assert(cm->seq_params.order_hint_info.order_hint_bits_minus_1 >= 0);
1436 const int cur_order_hint = (int)cm->current_frame.order_hint;
1437 const int cur_frame_sort_idx =
1438 1 << cm->seq_params.order_hint_info.order_hint_bits_minus_1;
1439
1440 REF_FRAME_INFO ref_frame_info[REF_FRAMES];
1441 int ref_flag_list[INTER_REFS_PER_FRAME] = { 0, 0, 0, 0, 0, 0, 0 };
1442 int bwd_start_idx;
1443 int bwd_end_idx;
1444 int fwd_start_idx, fwd_end_idx;
1445 int ref_idx;
1446 static const MV_REFERENCE_FRAME ref_frame_list[INTER_REFS_PER_FRAME - 2] = {
1447 LAST2_FRAME, LAST3_FRAME, BWDREF_FRAME, ALTREF2_FRAME, ALTREF_FRAME
1448 };
1449
1450 for (i = 0; i < REF_FRAMES; ++i) {
1451 const int map_idx = i;
1452 RefCntBuffer *buf;
1453 int offset;
1454
1455 ref_frame_info[i].map_idx = map_idx;
1456 ref_frame_info[i].sort_idx = -1;
1457
1458 buf = cm->ref_frame_map[map_idx];
1459 ref_frame_info[i].buf = buf;
1460
1461 if (buf == NULL) continue;
1462 // If this assertion fails, there is a reference leak.
1463 assert(buf->ref_count > 0);
1464
1465 offset = (int)buf->order_hint;
1466 ref_frame_info[i].sort_idx =
1467 (offset == -1) ? -1
1468 : cur_frame_sort_idx +
1469 get_relative_dist(&cm->seq_params.order_hint_info,
1470 offset, cur_order_hint);
1471 assert(ref_frame_info[i].sort_idx >= -1);
1472
1473 if (map_idx == lst_map_idx) lst_frame_sort_idx = ref_frame_info[i].sort_idx;
1474 if (map_idx == gld_map_idx) gld_frame_sort_idx = ref_frame_info[i].sort_idx;
1475 }
1476
1477 // Confirm both LAST_FRAME and GOLDEN_FRAME are valid forward reference
1478 // frames.
1479 if (lst_frame_sort_idx == -1 || lst_frame_sort_idx >= cur_frame_sort_idx) {
1480 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1481 "Inter frame requests a look-ahead frame as LAST");
1482 }
1483 if (gld_frame_sort_idx == -1 || gld_frame_sort_idx >= cur_frame_sort_idx) {
1484 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1485 "Inter frame requests a look-ahead frame as GOLDEN");
1486 }
1487
1488 // Sort ref frames based on their frame_offset values.
1489 qsort(ref_frame_info, REF_FRAMES, sizeof(REF_FRAME_INFO),
1490 compare_ref_frame_info);
1491
1492 // Identify forward and backward reference frames.
1493 // Forward reference: offset < order_hint
1494 // Backward reference: offset >= order_hint
1495 fwd_start_idx = 0;
1496 fwd_end_idx = REF_FRAMES - 1;
1497
1498 for (i = 0; i < REF_FRAMES; i++) {
1499 if (ref_frame_info[i].sort_idx == -1) {
1500 fwd_start_idx++;
1501 continue;
1502 }
1503
1504 if (ref_frame_info[i].sort_idx >= cur_frame_sort_idx) {
1505 fwd_end_idx = i - 1;
1506 break;
1507 }
1508 }
1509
1510 bwd_start_idx = fwd_end_idx + 1;
1511 bwd_end_idx = REF_FRAMES - 1;
1512
1513 // === Backward Reference Frames ===
1514
1515 // == ALTREF_FRAME ==
1516 if (bwd_start_idx <= bwd_end_idx) {
1517 set_ref_frame_info(remapped_ref_idx, ALTREF_FRAME - LAST_FRAME,
1518 &ref_frame_info[bwd_end_idx]);
1519 ref_flag_list[ALTREF_FRAME - LAST_FRAME] = 1;
1520 bwd_end_idx--;
1521 }
1522
1523 // == BWDREF_FRAME ==
1524 if (bwd_start_idx <= bwd_end_idx) {
1525 set_ref_frame_info(remapped_ref_idx, BWDREF_FRAME - LAST_FRAME,
1526 &ref_frame_info[bwd_start_idx]);
1527 ref_flag_list[BWDREF_FRAME - LAST_FRAME] = 1;
1528 bwd_start_idx++;
1529 }
1530
1531 // == ALTREF2_FRAME ==
1532 if (bwd_start_idx <= bwd_end_idx) {
1533 set_ref_frame_info(remapped_ref_idx, ALTREF2_FRAME - LAST_FRAME,
1534 &ref_frame_info[bwd_start_idx]);
1535 ref_flag_list[ALTREF2_FRAME - LAST_FRAME] = 1;
1536 }
1537
1538 // === Forward Reference Frames ===
1539
1540 for (i = fwd_start_idx; i <= fwd_end_idx; ++i) {
1541 // == LAST_FRAME ==
1542 if (ref_frame_info[i].map_idx == lst_map_idx) {
1543 set_ref_frame_info(remapped_ref_idx, LAST_FRAME - LAST_FRAME,
1544 &ref_frame_info[i]);
1545 ref_flag_list[LAST_FRAME - LAST_FRAME] = 1;
1546 }
1547
1548 // == GOLDEN_FRAME ==
1549 if (ref_frame_info[i].map_idx == gld_map_idx) {
1550 set_ref_frame_info(remapped_ref_idx, GOLDEN_FRAME - LAST_FRAME,
1551 &ref_frame_info[i]);
1552 ref_flag_list[GOLDEN_FRAME - LAST_FRAME] = 1;
1553 }
1554 }
1555
1556 assert(ref_flag_list[LAST_FRAME - LAST_FRAME] == 1 &&
1557 ref_flag_list[GOLDEN_FRAME - LAST_FRAME] == 1);
1558
1559 // == LAST2_FRAME ==
1560 // == LAST3_FRAME ==
1561 // == BWDREF_FRAME ==
1562 // == ALTREF2_FRAME ==
1563 // == ALTREF_FRAME ==
1564
1565 // Set up the reference frames in the anti-chronological order.
1566 for (ref_idx = 0; ref_idx < (INTER_REFS_PER_FRAME - 2); ref_idx++) {
1567 const MV_REFERENCE_FRAME ref_frame = ref_frame_list[ref_idx];
1568
1569 if (ref_flag_list[ref_frame - LAST_FRAME] == 1) continue;
1570
1571 while (fwd_start_idx <= fwd_end_idx &&
1572 (ref_frame_info[fwd_end_idx].map_idx == lst_map_idx ||
1573 ref_frame_info[fwd_end_idx].map_idx == gld_map_idx)) {
1574 fwd_end_idx--;
1575 }
1576 if (fwd_start_idx > fwd_end_idx) break;
1577
1578 set_ref_frame_info(remapped_ref_idx, ref_frame - LAST_FRAME,
1579 &ref_frame_info[fwd_end_idx]);
1580 ref_flag_list[ref_frame - LAST_FRAME] = 1;
1581
1582 fwd_end_idx--;
1583 }
1584
1585 // Assign all the remaining frame(s), if any, to the earliest reference frame.
1586 for (; ref_idx < (INTER_REFS_PER_FRAME - 2); ref_idx++) {
1587 const MV_REFERENCE_FRAME ref_frame = ref_frame_list[ref_idx];
1588 if (ref_flag_list[ref_frame - LAST_FRAME] == 1) continue;
1589 set_ref_frame_info(remapped_ref_idx, ref_frame - LAST_FRAME,
1590 &ref_frame_info[fwd_start_idx]);
1591 ref_flag_list[ref_frame - LAST_FRAME] = 1;
1592 }
1593
1594 for (i = 0; i < INTER_REFS_PER_FRAME; i++) {
1595 assert(ref_flag_list[i] == 1);
1596 }
1597}
1598
1599void av1_setup_frame_buf_refs(AV1_COMMON *cm) {
1600 MV_REFERENCE_FRAME ref_frame;
1601 cm->cur_frame->order_hint = cm->current_frame.order_hint;
1602
1603 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
1604 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1605 if (buf != NULL)
1606 cm->cur_frame->ref_order_hints[ref_frame - LAST_FRAME] = buf->order_hint;
1607 }
1608}
1609
1610void av1_setup_frame_sign_bias(AV1_COMMON *cm) {
1611 MV_REFERENCE_FRAME ref_frame;
1612 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
1613 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1614 if (cm->seq_params.order_hint_info.enable_order_hint && buf != NULL) {
1615 const int ref_order_hint = buf->order_hint;
1616 cm->ref_frame_sign_bias[ref_frame] =
1617 (get_relative_dist(&cm->seq_params.order_hint_info, ref_order_hint,
1618 (int)cm->current_frame.order_hint) <= 0)
1619 ? 0
1620 : 1;
1621 } else {
1622 cm->ref_frame_sign_bias[ref_frame] = 0;
1623 }
1624 }
1625}
1626
1627
1628void av1_setup_skip_mode_allowed(AV1_COMMON *cm)
1629{
1630 const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
1631 SkipModeInfo *const skip_mode_info = &cm->current_frame.skip_mode_info;
1632 int i;
1633 int cur_order_hint;
1634 int ref_order_hints[2] = { -1, INT_MAX };
1635 int ref_idx[2] = { INVALID_IDX, INVALID_IDX };
1636
1637 skip_mode_info->skip_mode_allowed = 0;
1638 skip_mode_info->ref_frame_idx_0 = INVALID_IDX;
1639 skip_mode_info->ref_frame_idx_1 = INVALID_IDX;
1640 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "av1_setup_skip_mode_allowed %d %d %d\n", order_hint_info->enable_order_hint,
1641 frame_is_intra_only(cm),
1642 cm->current_frame.reference_mode);
1643 if (!order_hint_info->enable_order_hint || frame_is_intra_only(cm) ||
1644 cm->current_frame.reference_mode == SINGLE_REFERENCE)
1645 return;
1646
1647 cur_order_hint = cm->current_frame.order_hint;
1648
1649 // Identify the nearest forward and backward references.
1650 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1651 const RefCntBuffer *const buf = get_ref_frame_buf(cm, LAST_FRAME + i);
1652 int ref_order_hint;
1653 if (buf == NULL) continue;
1654
1655 ref_order_hint = buf->order_hint;
1656 if (get_relative_dist(order_hint_info, ref_order_hint, cur_order_hint) < 0) {
1657 // Forward reference
1658 if (ref_order_hints[0] == -1 ||
1659 get_relative_dist(order_hint_info, ref_order_hint,
1660 ref_order_hints[0]) > 0) {
1661 ref_order_hints[0] = ref_order_hint;
1662 ref_idx[0] = i;
1663 }
1664 } else if (get_relative_dist(order_hint_info, ref_order_hint,
1665 cur_order_hint) > 0) {
1666 // Backward reference
1667 if (ref_order_hints[1] == INT_MAX ||
1668 get_relative_dist(order_hint_info, ref_order_hint,
1669 ref_order_hints[1]) < 0) {
1670 ref_order_hints[1] = ref_order_hint;
1671 ref_idx[1] = i;
1672 }
1673 }
1674 }
1675
1676 if (ref_idx[0] != INVALID_IDX && ref_idx[1] != INVALID_IDX) {
1677 // == Bi-directional prediction ==
1678 skip_mode_info->skip_mode_allowed = 1;
1679 skip_mode_info->ref_frame_idx_0 = AOMMIN(ref_idx[0], ref_idx[1]);
1680 skip_mode_info->ref_frame_idx_1 = AOMMAX(ref_idx[0], ref_idx[1]);
1681 } else if (ref_idx[0] != INVALID_IDX && ref_idx[1] == INVALID_IDX) {
1682 // == Forward prediction only ==
1683 // Identify the second nearest forward reference.
1684 ref_order_hints[1] = -1;
1685 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1686 const RefCntBuffer *const buf = get_ref_frame_buf(cm, LAST_FRAME + i);
1687 int ref_order_hint;
1688 if (buf == NULL) continue;
1689
1690 ref_order_hint = buf->order_hint;
1691 if ((ref_order_hints[0] != -1 &&
1692 get_relative_dist(order_hint_info, ref_order_hint, ref_order_hints[0]) < 0) &&
1693 (ref_order_hints[1] == -1 ||
1694 get_relative_dist(order_hint_info, ref_order_hint, ref_order_hints[1]) > 0)) {
1695 // Second closest forward reference
1696 ref_order_hints[1] = ref_order_hint;
1697 ref_idx[1] = i;
1698 }
1699 }
1700 if (ref_order_hints[1] != -1) {
1701 skip_mode_info->skip_mode_allowed = 1;
1702 skip_mode_info->ref_frame_idx_0 = AOMMIN(ref_idx[0], ref_idx[1]);
1703 skip_mode_info->ref_frame_idx_1 = AOMMAX(ref_idx[0], ref_idx[1]);
1704 }
1705 }
1706 av1_print2(AV1_DEBUG_BUFMGR_DETAIL,
1707 "skip_mode_info: skip_mode_allowed 0x%x 0x%x 0x%x\n",
1708 cm->current_frame.skip_mode_info.skip_mode_allowed,
1709 cm->current_frame.skip_mode_info.ref_frame_idx_0,
1710 cm->current_frame.skip_mode_info.ref_frame_idx_1);
1711}
1712
1713static inline int frame_might_allow_ref_frame_mvs(const AV1_COMMON *cm) {
1714 return !cm->error_resilient_mode &&
1715 cm->seq_params.order_hint_info.enable_ref_frame_mvs &&
1716 cm->seq_params.order_hint_info.enable_order_hint &&
1717 !frame_is_intra_only(cm);
1718}
1719
1720#ifdef ORI_CODE
1721/*
1722* segmentation
1723*/
1724static const int seg_feature_data_signed[SEG_LVL_MAX] = {
1725 1, 1, 1, 1, 1, 0, 0, 0
1726};
1727
1728static const int seg_feature_data_max[SEG_LVL_MAX] = { MAXQ,
1729 MAX_LOOP_FILTER,
1730 MAX_LOOP_FILTER,
1731 MAX_LOOP_FILTER,
1732 MAX_LOOP_FILTER,
1733 7,
1734 0,
1735 0 };
1736
1737
1738static inline void segfeatures_copy(struct segmentation *dst,
1739 const struct segmentation *src) {
1740 int i, j;
1741 for (i = 0; i < MAX_SEGMENTS; i++) {
1742 dst->feature_mask[i] = src->feature_mask[i];
1743 for (j = 0; j < SEG_LVL_MAX; j++) {
1744 dst->feature_data[i][j] = src->feature_data[i][j];
1745 }
1746 }
1747 dst->segid_preskip = src->segid_preskip;
1748 dst->last_active_segid = src->last_active_segid;
1749}
1750
1751static void av1_clearall_segfeatures(struct segmentation *seg) {
1752 av1_zero(seg->feature_data);
1753 av1_zero(seg->feature_mask);
1754}
1755
1756static void av1_enable_segfeature(struct segmentation *seg, int segment_id,
1757 int feature_id) {
1758 seg->feature_mask[segment_id] |= 1 << feature_id;
1759}
1760
1761void av1_calculate_segdata(struct segmentation *seg) {
1762 seg->segid_preskip = 0;
1763 seg->last_active_segid = 0;
1764 for (int i = 0; i < MAX_SEGMENTS; i++) {
1765 for (int j = 0; j < SEG_LVL_MAX; j++) {
1766 if (seg->feature_mask[i] & (1 << j)) {
1767 seg->segid_preskip |= (j >= SEG_LVL_REF_FRAME);
1768 seg->last_active_segid = i;
1769 }
1770 }
1771 }
1772}
1773
1774static int av1_seg_feature_data_max(int feature_id) {
1775 return seg_feature_data_max[feature_id];
1776}
1777
1778static int av1_is_segfeature_signed(int feature_id) {
1779 return seg_feature_data_signed[feature_id];
1780}
1781
1782static void av1_set_segdata(struct segmentation *seg, int segment_id,
1783 int feature_id, int seg_data) {
1784 if (seg_data < 0) {
1785 assert(seg_feature_data_signed[feature_id]);
1786 assert(-seg_data <= seg_feature_data_max[feature_id]);
1787 } else {
1788 assert(seg_data <= seg_feature_data_max[feature_id]);
1789 }
1790
1791 seg->feature_data[segment_id][feature_id] = seg_data;
1792}
1793
1794static inline int clamp(int value, int low, int high) {
1795 return value < low ? low : (value > high ? high : value);
1796}
1797
1798static void setup_segmentation(AV1_COMMON *const cm,
1799 union param_u *params) {
1800 struct segmentation *const seg = &cm->seg;
1801
1802 seg->update_map = 0;
1803 seg->update_data = 0;
1804 seg->temporal_update = 0;
1805
1806 seg->enabled = params->p.seg_enabled; //aom_rb_read_bit(-1, defmark, rb);
1807 if (!seg->enabled) {
1808 if (cm->cur_frame->seg_map)
1809 memset(cm->cur_frame->seg_map, 0, (cm->mi_rows * cm->mi_cols));
1810
1811 memset(seg, 0, sizeof(*seg));
1812 segfeatures_copy(&cm->cur_frame->seg, seg);
1813 return;
1814 }
1815 if (cm->seg.enabled && cm->prev_frame &&
1816 (cm->mi_rows == cm->prev_frame->mi_rows) &&
1817 (cm->mi_cols == cm->prev_frame->mi_cols)) {
1818 cm->last_frame_seg_map = cm->prev_frame->seg_map;
1819 } else {
1820 cm->last_frame_seg_map = NULL;
1821 }
1822 // Read update flags
1823 if (cm->primary_ref_frame == PRIMARY_REF_NONE) {
1824 // These frames can't use previous frames, so must signal map + features
1825 seg->update_map = 1;
1826 seg->temporal_update = 0;
1827 seg->update_data = 1;
1828 } else {
1829 seg->update_map = params->p.seg_update_map; // aom_rb_read_bit(-1, defmark, rb);
1830 if (seg->update_map) {
1831 seg->temporal_update = params->p.seg_temporal_update; //aom_rb_read_bit(-1, defmark, rb);
1832 } else {
1833 seg->temporal_update = 0;
1834 }
1835 seg->update_data = params->p.seg_update_data; //aom_rb_read_bit(-1, defmark, rb);
1836 }
1837
1838 // Segmentation data update
1839 if (seg->update_data) {
1840 av1_clearall_segfeatures(seg);
1841
1842 for (int i = 0; i < MAX_SEGMENTS; i++) {
1843 for (int j = 0; j < SEG_LVL_MAX; j++) {
1844 int data = 0;
1845 const int feature_enabled = params->p.seg_feature_enabled ;//aom_rb_read_bit(-1, defmark, rb);
1846 if (feature_enabled) {
1847 av1_enable_segfeature(seg, i, j);
1848
1849 const int data_max = av1_seg_feature_data_max(j);
1850 const int data_min = -data_max;
1851 /*
1852 const int ubits = get_unsigned_bits(data_max);
1853
1854 if (av1_is_segfeature_signed(j)) {
1855 data = aom_rb_read_inv_signed_literal(-1, defmark, rb, ubits);
1856 } else {
1857 data = aom_rb_read_literal(-1, defmark, rb, ubits);
1858 }*/
1859 data = params->p.seg_data;
1860 data = clamp(data, data_min, data_max);
1861 }
1862 av1_set_segdata(seg, i, j, data);
1863 }
1864 }
1865 av1_calculate_segdata(seg);
1866 } else if (cm->prev_frame) {
1867 segfeatures_copy(seg, &cm->prev_frame->seg);
1868 }
1869 segfeatures_copy(&cm->cur_frame->seg, seg);
1870}
1871#endif
1872
1873/**/
1874
1875
1876int av1_decode_frame_headers_and_setup(AV1Decoder *pbi, int trailing_bits_present, union param_u *params)
1877{
1878 AV1_COMMON *const cm = &pbi->common;
1879 /*
1880 read_uncompressed_header()
1881 */
1882 const SequenceHeader *const seq_params = &cm->seq_params;
1883 CurrentFrame *const current_frame = &cm->current_frame;
1884 //MACROBLOCKD *const xd = &pbi->mb;
1885 BufferPool *const pool = cm->buffer_pool;
1886 RefCntBuffer *const frame_bufs = pool->frame_bufs;
1887 int i;
1888 int frame_size_override_flag;
1889 unsigned long flags;
1890
1891 if (!pbi->sequence_header_ready) {
1892 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1893 "No sequence header");
1894 }
1895 cm->last_frame_type = current_frame->frame_type;
1896
1897 if (seq_params->reduced_still_picture_hdr) {
1898 cm->show_existing_frame = 0;
1899 cm->show_frame = 1;
1900 current_frame->frame_type = KEY_FRAME;
1901 if (pbi->sequence_header_changed) {
1902 // This is the start of a new coded video sequence.
1903 pbi->sequence_header_changed = 0;
1904 pbi->decoding_first_frame = 1;
1905 reset_frame_buffers(pbi);
1906 }
1907 cm->error_resilient_mode = 1;
1908 } else {
1909 cm->show_existing_frame = params->p.show_existing_frame;
1910 pbi->reset_decoder_state = 0;
1911 if (cm->show_existing_frame) {
1912 int existing_frame_idx;
1913 RefCntBuffer *frame_to_show;
1914 if (pbi->sequence_header_changed) {
1915 aom_internal_error(
1916 &cm->error, AOM_CODEC_CORRUPT_FRAME,
1917 "New sequence header starts with a show_existing_frame.");
1918 }
1919 // Show an existing frame directly.
1920 existing_frame_idx = params->p.existing_frame_idx; //aom_rb_read_literal(rb, 3);
1921 frame_to_show = cm->ref_frame_map[existing_frame_idx];
1922 if (frame_to_show == NULL) {
1923 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1924 "Buffer does not contain a decoded frame");
1925 }
1926 if (seq_params->decoder_model_info_present_flag &&
1927 cm->timing_info.equal_picture_interval == 0) {
1928 cm->frame_presentation_time = params->p.frame_presentation_time;
1929 //read_temporal_point_info(cm);
1930 }
1931 if (seq_params->frame_id_numbers_present_flag) {
1932 //int frame_id_length = seq_params->frame_id_length;
1933 int display_frame_id = params->p.display_frame_id; //aom_rb_read_literal(rb, frame_id_length);
1934 /* Compare display_frame_id with ref_frame_id and check valid for
1935 * referencing */
1936 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
1937 cm->valid_for_referencing[existing_frame_idx] == 0)
1938 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1939 "Reference buffer frame ID mismatch");
1940 }
1941 lock_buffer_pool(pool, flags);
1942 assert(frame_to_show->ref_count > 0);
1943 // cm->cur_frame should be the buffer referenced by the return value
1944 // of the get_free_fb() call in av1_receive_compressed_data(), and
1945 // generate_next_ref_frame_map() has not been called, so ref_count
1946 // should still be 1.
1947 assert(cm->cur_frame->ref_count == 1);
1948 // assign_frame_buffer_p() decrements ref_count directly rather than
1949 // call decrease_ref_count(). If cm->cur_frame->raw_frame_buffer has
1950 // already been allocated, it will not be released by
1951 // assign_frame_buffer_p()!
1952 assert(!cm->cur_frame->raw_frame_buffer.data);
1953 assign_frame_buffer_p(&cm->cur_frame, frame_to_show);
1954 pbi->reset_decoder_state = frame_to_show->frame_type == KEY_FRAME;
1955 unlock_buffer_pool(pool, flags);
1956
1957#ifdef ORI_CODE
1958 cm->lf.filter_level[0] = 0;
1959 cm->lf.filter_level[1] = 0;
1960#endif
1961 cm->show_frame = 1;
1962
1963 // Section 6.8.2: It is a requirement of bitstream conformance that when
1964 // show_existing_frame is used to show a previous frame, that the value
1965 // of showable_frame for the previous frame was equal to 1.
1966 if (!frame_to_show->showable_frame) {
1967 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1968 "Buffer does not contain a showable frame");
1969 }
1970 // Section 6.8.2: It is a requirement of bitstream conformance that when
1971 // show_existing_frame is used to show a previous frame with
1972 // RefFrameType[ frame_to_show_map_idx ] equal to KEY_FRAME, that the
1973 // frame is output via the show_existing_frame mechanism at most once.
1974 if (pbi->reset_decoder_state) frame_to_show->showable_frame = 0;
1975
1976#ifdef ORI_CODE
1977 cm->film_grain_params = frame_to_show->film_grain_params;
1978#endif
1979 if (pbi->reset_decoder_state) {
1980 show_existing_frame_reset(pbi, existing_frame_idx);
1981 } else {
1982 current_frame->refresh_frame_flags = 0;
1983 }
1984
1985 return 0;
1986 }
1987
1988 current_frame->frame_type = (FRAME_TYPE)params->p.frame_type; //aom_rb_read_literal(rb, 2);
1989 if (pbi->sequence_header_changed) {
1990 if (current_frame->frame_type == KEY_FRAME) {
1991 // This is the start of a new coded video sequence.
1992 pbi->sequence_header_changed = 0;
1993 pbi->decoding_first_frame = 1;
1994 reset_frame_buffers(pbi);
1995 } else {
1996 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1997 "Sequence header has changed without a keyframe.");
1998 }
1999 }
2000 cm->show_frame = params->p.show_frame; //aom_rb_read_bit(rb);
2001 if (seq_params->still_picture &&
2002 (current_frame->frame_type != KEY_FRAME || !cm->show_frame)) {
2003 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2004 "Still pictures must be coded as shown keyframes");
2005 }
2006 cm->showable_frame = current_frame->frame_type != KEY_FRAME;
2007 if (cm->show_frame) {
2008 if (seq_params->decoder_model_info_present_flag &&
2009 cm->timing_info.equal_picture_interval == 0)
2010 cm->frame_presentation_time = params->p.frame_presentation_time;
2011 //read_temporal_point_info(cm);
2012 } else {
2013 // See if this frame can be used as show_existing_frame in future
2014 cm->showable_frame = params->p.showable_frame;//aom_rb_read_bit(rb);
2015 }
2016 cm->cur_frame->showable_frame = cm->showable_frame;
2017 cm->error_resilient_mode =
2018 frame_is_sframe(cm) ||
2019 (current_frame->frame_type == KEY_FRAME && cm->show_frame)
2020 ? 1
2021 : params->p.error_resilient_mode; //aom_rb_read_bit(rb);
2022 }
2023
2024#ifdef ORI_CODE
2025 cm->disable_cdf_update = aom_rb_read_bit(rb);
2026 if (seq_params->force_screen_content_tools == 2) {
2027 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
2028 } else {
2029 cm->allow_screen_content_tools = seq_params->force_screen_content_tools;
2030 }
2031
2032 if (cm->allow_screen_content_tools) {
2033 if (seq_params->force_integer_mv == 2) {
2034 cm->cur_frame_force_integer_mv = aom_rb_read_bit(rb);
2035 } else {
2036 cm->cur_frame_force_integer_mv = seq_params->force_integer_mv;
2037 }
2038 } else {
2039 cm->cur_frame_force_integer_mv = 0;
2040 }
2041#endif
2042
2043 frame_size_override_flag = 0;
2044 cm->allow_intrabc = 0;
2045 cm->primary_ref_frame = PRIMARY_REF_NONE;
2046
2047 if (!seq_params->reduced_still_picture_hdr) {
2048 if (seq_params->frame_id_numbers_present_flag) {
2049 int frame_id_length = seq_params->frame_id_length;
2050 int diff_len = seq_params->delta_frame_id_length;
2051 int prev_frame_id = 0;
2052 int have_prev_frame_id =
2053 !pbi->decoding_first_frame &&
2054 !(current_frame->frame_type == KEY_FRAME && cm->show_frame);
2055 if (have_prev_frame_id) {
2056 prev_frame_id = cm->current_frame_id;
2057 }
2058 cm->current_frame_id = params->p.current_frame_id; //aom_rb_read_literal(rb, frame_id_length);
2059
2060 if (have_prev_frame_id) {
2061 int diff_frame_id;
2062 if (cm->current_frame_id > prev_frame_id) {
2063 diff_frame_id = cm->current_frame_id - prev_frame_id;
2064 } else {
2065 diff_frame_id =
2066 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
2067 }
2068 /* Check current_frame_id for conformance */
2069 if (prev_frame_id == cm->current_frame_id ||
2070 diff_frame_id >= (1 << (frame_id_length - 1))) {
2071 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2072 "Invalid value of current_frame_id");
2073 }
2074 }
2075 /* Check if some frames need to be marked as not valid for referencing */
2076 for (i = 0; i < REF_FRAMES; i++) {
2077 if (current_frame->frame_type == KEY_FRAME && cm->show_frame) {
2078 cm->valid_for_referencing[i] = 0;
2079 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
2080 if (cm->ref_frame_id[i] > cm->current_frame_id ||
2081 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
2082 cm->valid_for_referencing[i] = 0;
2083 } else {
2084 if (cm->ref_frame_id[i] > cm->current_frame_id &&
2085 cm->ref_frame_id[i] < (1 << frame_id_length) +
2086 cm->current_frame_id - (1 << diff_len))
2087 cm->valid_for_referencing[i] = 0;
2088 }
2089 }
2090 }
2091
2092 frame_size_override_flag = frame_is_sframe(cm) ? 1 : params->p.frame_size_override_flag; //aom_rb_read_bit(rb);
2093
2094 current_frame->order_hint = params->p.order_hint; /*aom_rb_read_literal(
2095 rb, seq_params->order_hint_info.order_hint_bits_minus_1 + 1);*/
2096 current_frame->frame_number = current_frame->order_hint;
2097
2098 if (!cm->error_resilient_mode && !frame_is_intra_only(cm)) {
2099 cm->primary_ref_frame = params->p.primary_ref_frame;//aom_rb_read_literal(rb, PRIMARY_REF_BITS);
2100 }
2101 }
2102
2103 if (seq_params->decoder_model_info_present_flag) {
2104 cm->buffer_removal_time_present = params->p.buffer_removal_time_present; //aom_rb_read_bit(rb);
2105 if (cm->buffer_removal_time_present) {
2106 int op_num;
2107 for (op_num = 0;
2108 op_num < seq_params->operating_points_cnt_minus_1 + 1; op_num++) {
2109 if (cm->op_params[op_num].decoder_model_param_present_flag) {
2110 if ((((seq_params->operating_point_idc[op_num] >>
2111 cm->temporal_layer_id) &
2112 0x1) &&
2113 ((seq_params->operating_point_idc[op_num] >>
2114 (cm->spatial_layer_id + 8)) &
2115 0x1)) ||
2116 seq_params->operating_point_idc[op_num] == 0) {
2117 cm->op_frame_timing[op_num].buffer_removal_time =
2118 params->p.op_frame_timing[op_num];
2119 /*aom_rb_read_unsigned_literal(
2120 rb, cm->buffer_model.buffer_removal_time_length);*/
2121 } else {
2122 cm->op_frame_timing[op_num].buffer_removal_time = 0;
2123 }
2124 } else {
2125 cm->op_frame_timing[op_num].buffer_removal_time = 0;
2126 }
2127 }
2128 }
2129 }
2130 if (current_frame->frame_type == KEY_FRAME) {
2131 if (!cm->show_frame) { // unshown keyframe (forward keyframe)
2132 current_frame->refresh_frame_flags = params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2133 } else { // shown keyframe
2134 current_frame->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2135 }
2136
2137 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2138 cm->remapped_ref_idx[i] = INVALID_IDX;
2139 }
2140 if (pbi->need_resync) {
2141 reset_ref_frame_map(pbi);
2142 pbi->need_resync = 0;
2143 }
2144 } else {
2145 if (current_frame->frame_type == INTRA_ONLY_FRAME) {
2146 current_frame->refresh_frame_flags = params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2147 if (current_frame->refresh_frame_flags == 0xFF) {
2148 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2149 "Intra only frames cannot have refresh flags 0xFF");
2150 }
2151 if (pbi->need_resync) {
2152 reset_ref_frame_map(pbi);
2153 pbi->need_resync = 0;
2154 }
2155 } else if (pbi->need_resync != 1) { /* Skip if need resync */
2156 current_frame->refresh_frame_flags =
2157 frame_is_sframe(cm) ? 0xFF : params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2158 }
2159 }
2160
2161 if (!frame_is_intra_only(cm) || current_frame->refresh_frame_flags != 0xFF) {
2162 // Read all ref frame order hints if error_resilient_mode == 1
2163 if (cm->error_resilient_mode &&
2164 seq_params->order_hint_info.enable_order_hint) {
2165 int ref_idx;
2166 for (ref_idx = 0; ref_idx < REF_FRAMES; ref_idx++) {
2167 // Read order hint from bit stream
2168 unsigned int order_hint = params->p.ref_order_hint[ref_idx];/*aom_rb_read_literal(
2169 rb, seq_params->order_hint_info.order_hint_bits_minus_1 + 1);*/
2170 // Get buffer
2171 RefCntBuffer *buf = cm->ref_frame_map[ref_idx];
2172 int buf_idx;
2173 if (buf == NULL || order_hint != buf->order_hint) {
2174 if (buf != NULL) {
2175 lock_buffer_pool(pool, flags);
2176 decrease_ref_count(pbi, buf, pool);
2177 unlock_buffer_pool(pool, flags);
2178 }
2179 // If no corresponding buffer exists, allocate a new buffer with all
2180 // pixels set to neutral grey.
2181 buf_idx = get_free_fb(cm);
2182 if (buf_idx == INVALID_IDX) {
2183 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2184 "Unable to find free frame buffer");
2185 }
2186 buf = &frame_bufs[buf_idx];
2187 lock_buffer_pool(pool, flags);
2188 if (aom_realloc_frame_buffer(cm, &buf->buf, seq_params->max_frame_width,
2189 seq_params->max_frame_height, buf->order_hint)) {
2190 decrease_ref_count(pbi, buf, pool);
2191 unlock_buffer_pool(pool, flags);
2192 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2193 "Failed to allocate frame buffer");
2194 }
2195 unlock_buffer_pool(pool, flags);
2196#ifdef ORI_CODE
2197 set_planes_to_neutral_grey(seq_params, &buf->buf, 0);
2198#endif
2199 cm->ref_frame_map[ref_idx] = buf;
2200 buf->order_hint = order_hint;
2201 }
2202 }
2203 }
2204 }
2205
2206 if (current_frame->frame_type == KEY_FRAME) {
2207 setup_frame_size(cm, frame_size_override_flag, params);
2208#ifdef ORI_CODE
2209 if (cm->allow_screen_content_tools && !av1_superres_scaled(cm))
2210 cm->allow_intrabc = aom_rb_read_bit(rb);
2211#endif
2212 cm->allow_ref_frame_mvs = 0;
2213 cm->prev_frame = NULL;
2214 } else {
2215 cm->allow_ref_frame_mvs = 0;
2216
2217 if (current_frame->frame_type == INTRA_ONLY_FRAME) {
2218#ifdef ORI_CODE
2219 cm->cur_frame->film_grain_params_present =
2220 seq_params->film_grain_params_present;
2221#endif
2222 setup_frame_size(cm, frame_size_override_flag, params);
2223#ifdef ORI_CODE
2224 if (cm->allow_screen_content_tools && !av1_superres_scaled(cm))
2225 cm->allow_intrabc = aom_rb_read_bit(rb);
2226#endif
2227 } else if (pbi->need_resync != 1) { /* Skip if need resync */
2228 int frame_refs_short_signaling = 0;
2229 // Frame refs short signaling is off when error resilient mode is on.
2230 if (seq_params->order_hint_info.enable_order_hint)
2231 frame_refs_short_signaling = params->p.frame_refs_short_signaling;//aom_rb_read_bit(rb);
2232
2233 if (frame_refs_short_signaling) {
2234 // == LAST_FRAME ==
2235 const int lst_ref = params->p.lst_ref; //aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2236 const RefCntBuffer *const lst_buf = cm->ref_frame_map[lst_ref];
2237
2238 // == GOLDEN_FRAME ==
2239 const int gld_ref = params->p.gld_ref; //aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2240 const RefCntBuffer *const gld_buf = cm->ref_frame_map[gld_ref];
2241
2242 // Most of the time, streams start with a keyframe. In that case,
2243 // ref_frame_map will have been filled in at that point and will not
2244 // contain any NULLs. However, streams are explicitly allowed to start
2245 // with an intra-only frame, so long as they don't then signal a
2246 // reference to a slot that hasn't been set yet. That's what we are
2247 // checking here.
2248 if (lst_buf == NULL)
2249 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2250 "Inter frame requests nonexistent reference");
2251 if (gld_buf == NULL)
2252 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2253 "Inter frame requests nonexistent reference");
2254
2255 av1_set_frame_refs(cm, cm->remapped_ref_idx, lst_ref, gld_ref);
2256 }
2257
2258 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2259 int ref = 0;
2260 if (!frame_refs_short_signaling) {
2261 ref = params->p.remapped_ref_idx[i];//aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2262
2263 // Most of the time, streams start with a keyframe. In that case,
2264 // ref_frame_map will have been filled in at that point and will not
2265 // contain any NULLs. However, streams are explicitly allowed to start
2266 // with an intra-only frame, so long as they don't then signal a
2267 // reference to a slot that hasn't been set yet. That's what we are
2268 // checking here.
2269 if (cm->ref_frame_map[ref] == NULL)
2270 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2271 "Inter frame requests nonexistent reference");
2272 cm->remapped_ref_idx[i] = ref;
2273 } else {
2274 ref = cm->remapped_ref_idx[i];
2275 }
2276
2277 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
2278
2279 if (seq_params->frame_id_numbers_present_flag) {
2280 int frame_id_length = seq_params->frame_id_length;
2281 //int diff_len = seq_params->delta_frame_id_length;
2282 int delta_frame_id_minus_1 = params->p.delta_frame_id_minus_1[i];//aom_rb_read_literal(rb, diff_len);
2283 int ref_frame_id =
2284 ((cm->current_frame_id - (delta_frame_id_minus_1 + 1) +
2285 (1 << frame_id_length)) %
2286 (1 << frame_id_length));
2287 // Compare values derived from delta_frame_id_minus_1 and
2288 // refresh_frame_flags. Also, check valid for referencing
2289 if (ref_frame_id != cm->ref_frame_id[ref] ||
2290 cm->valid_for_referencing[ref] == 0)
2291 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2292 "Reference buffer frame ID mismatch");
2293 }
2294 }
2295
2296 if (!cm->error_resilient_mode && frame_size_override_flag) {
2297 setup_frame_size_with_refs(cm, params);
2298 } else {
2299 setup_frame_size(cm, frame_size_override_flag, params);
2300 }
2301#ifdef ORI_CODE
2302 if (cm->cur_frame_force_integer_mv) {
2303 cm->allow_high_precision_mv = 0;
2304 } else {
2305 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
2306 }
2307 cm->interp_filter = read_frame_interp_filter(rb);
2308 cm->switchable_motion_mode = aom_rb_read_bit(rb);
2309#endif
2310 }
2311
2312 cm->prev_frame = get_primary_ref_frame_buf(cm);
2313 if (cm->primary_ref_frame != PRIMARY_REF_NONE &&
2314 get_primary_ref_frame_buf(cm) == NULL) {
2315 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2316 "Reference frame containing this frame's initial "
2317 "frame context is unavailable.");
2318 }
2319#if 0
2320 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%d,%d,%d,%d\n",cm->error_resilient_mode,
2321 cm->seq_params.order_hint_info.enable_ref_frame_mvs,
2322 cm->seq_params.order_hint_info.enable_order_hint,frame_is_intra_only(cm));
2323
2324 printf("frame_might_allow_ref_frame_mvs()=>%d, current_frame->frame_type=%d, pbi->need_resync=%d, params->p.allow_ref_frame_mvs=%d\n",
2325 frame_might_allow_ref_frame_mvs(cm), current_frame->frame_type, pbi->need_resync,
2326 params->p.allow_ref_frame_mvs);
2327#endif
2328 if (!(current_frame->frame_type == INTRA_ONLY_FRAME) &&
2329 pbi->need_resync != 1) {
2330 if (frame_might_allow_ref_frame_mvs(cm))
2331 cm->allow_ref_frame_mvs = params->p.allow_ref_frame_mvs; //aom_rb_read_bit(-1, "<allow_ref_frame_mvs>", rb);
2332 else
2333 cm->allow_ref_frame_mvs = 0;
2334
2335#ifdef SUPPORT_SCALE_FACTOR
2336 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
2337 const RefCntBuffer *const ref_buf = get_ref_frame_buf(cm, i);
2338 struct scale_factors *const ref_scale_factors =
2339 get_ref_scale_factors(cm, i);
2340 if (ref_buf != NULL) {
2341#ifdef AML
2342 av1_setup_scale_factors_for_frame(
2343 ref_scale_factors, ref_buf->buf.y_crop_width,
2344 ref_buf->buf.y_crop_height, cm->dec_width, cm->height);
2345#else
2346 av1_setup_scale_factors_for_frame(
2347 ref_scale_factors, ref_buf->buf.y_crop_width,
2348 ref_buf->buf.y_crop_height, cm->width, cm->height);
2349#endif
2350 }
2351 if (ref_scale_factors) {
2352 if ((!av1_is_valid_scale(ref_scale_factors)))
2353 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2354 "Reference frame has invalid dimensions");
2355 }
2356 }
2357#endif
2358 }
2359 }
2360
2361 av1_setup_frame_buf_refs(cm);
2362
2363 av1_setup_frame_sign_bias(cm);
2364
2365 cm->cur_frame->frame_type = current_frame->frame_type;
2366
2367 if (seq_params->frame_id_numbers_present_flag) {
2368 update_ref_frame_id(cm, cm->current_frame_id);
2369 }
2370#ifdef ORI_CODE
2371 const int might_bwd_adapt =
2372 !(seq_params->reduced_still_picture_hdr) && !(cm->disable_cdf_update);
2373 if (might_bwd_adapt) {
2374 cm->refresh_frame_context = aom_rb_read_bit(rb)
2375 ? REFRESH_FRAME_CONTEXT_DISABLED
2376 : REFRESH_FRAME_CONTEXT_BACKWARD;
2377 } else {
2378 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
2379 }
2380#endif
2381
2382 cm->cur_frame->buf.bit_depth = seq_params->bit_depth;
2383 cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
2384 cm->cur_frame->buf.transfer_characteristics =
2385 seq_params->transfer_characteristics;
2386 cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
2387 cm->cur_frame->buf.monochrome = seq_params->monochrome;
2388 cm->cur_frame->buf.chroma_sample_position =
2389 seq_params->chroma_sample_position;
2390 cm->cur_frame->buf.color_range = seq_params->color_range;
2391 cm->cur_frame->buf.render_width = cm->render_width;
2392 cm->cur_frame->buf.render_height = cm->render_height;
2393
2394 if (pbi->need_resync) {
2395 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2396 "Keyframe / intra-only frame required to reset decoder"
2397 " state");
2398 }
2399
2400 generate_next_ref_frame_map(pbi);
2401
2402#ifdef ORI_CODE
2403 if (cm->allow_intrabc) {
2404 // Set parameters corresponding to no filtering.
2405 struct loopfilter *lf = &cm->lf;
2406 lf->filter_level[0] = 0;
2407 lf->filter_level[1] = 0;
2408 cm->cdef_info.cdef_bits = 0;
2409 cm->cdef_info.cdef_strengths[0] = 0;
2410 cm->cdef_info.nb_cdef_strengths = 1;
2411 cm->cdef_info.cdef_uv_strengths[0] = 0;
2412 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
2413 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
2414 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
2415 }
2416
2417 read_tile_info(pbi, rb);
2418 if (!av1_is_min_tile_width_satisfied(cm)) {
2419 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2420 "Minimum tile width requirement not satisfied");
2421 }
2422
2423 setup_quantization(cm, rb);
2424 xd->bd = (int)seq_params->bit_depth;
2425
2426 if (cm->num_allocated_above_context_planes < av1_num_planes(cm) ||
2427 cm->num_allocated_above_context_mi_col < cm->mi_cols ||
2428 cm->num_allocated_above_contexts < cm->tile_rows) {
2429 av1_free_above_context_buffers(cm, cm->num_allocated_above_contexts);
2430 if (av1_alloc_above_context_buffers(cm, cm->tile_rows))
2431 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2432 "Failed to allocate context buffers");
2433 }
2434
2435 if (cm->primary_ref_frame == PRIMARY_REF_NONE) {
2436 av1_setup_past_independence(cm);
2437 }
2438
2439 setup_segmentation(cm, params);
2440
2441 cm->delta_q_info.delta_q_res = 1;
2442 cm->delta_q_info.delta_lf_res = 1;
2443 cm->delta_q_info.delta_lf_present_flag = 0;
2444 cm->delta_q_info.delta_lf_multi = 0;
2445 cm->delta_q_info.delta_q_present_flag =
2446 cm->base_qindex > 0 ? aom_rb_read_bit(-1, defmark, rb) : 0;
2447 if (cm->delta_q_info.delta_q_present_flag) {
2448 xd->current_qindex = cm->base_qindex;
2449 cm->delta_q_info.delta_q_res = 1 << aom_rb_read_literal(-1, defmark, rb, 2);
2450 if (!cm->allow_intrabc)
2451 cm->delta_q_info.delta_lf_present_flag = aom_rb_read_bit(-1, defmark, rb);
2452 if (cm->delta_q_info.delta_lf_present_flag) {
2453 cm->delta_q_info.delta_lf_res = 1 << aom_rb_read_literal(-1, defmark, rb, 2);
2454 cm->delta_q_info.delta_lf_multi = aom_rb_read_bit(-1, defmark, rb);
2455 av1_reset_loop_filter_delta(xd, av1_num_planes(cm));
2456 }
2457 }
2458
2459 xd->cur_frame_force_integer_mv = cm->cur_frame_force_integer_mv;
2460
2461 for (int i = 0; i < MAX_SEGMENTS; ++i) {
2462 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
2463 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
2464 cm->u_dc_delta_q == 0 && cm->u_ac_delta_q == 0 &&
2465 cm->v_dc_delta_q == 0 && cm->v_ac_delta_q == 0;
2466 xd->qindex[i] = qindex;
2467 }
2468 cm->coded_lossless = is_coded_lossless(cm, xd);
2469 cm->all_lossless = cm->coded_lossless && !av1_superres_scaled(cm);
2470 setup_segmentation_dequant(cm, xd);
2471 if (cm->coded_lossless) {
2472 cm->lf.filter_level[0] = 0;
2473 cm->lf.filter_level[1] = 0;
2474 }
2475 if (cm->coded_lossless || !seq_params->enable_cdef) {
2476 cm->cdef_info.cdef_bits = 0;
2477 cm->cdef_info.cdef_strengths[0] = 0;
2478 cm->cdef_info.cdef_uv_strengths[0] = 0;
2479 }
2480 if (cm->all_lossless || !seq_params->enable_restoration) {
2481 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
2482 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
2483 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
2484 }
2485 setup_loopfilter(cm, rb);
2486
2487 if (!cm->coded_lossless && seq_params->enable_cdef) {
2488 setup_cdef(cm, rb);
2489 }
2490 if (!cm->all_lossless && seq_params->enable_restoration) {
2491 decode_restoration_mode(cm, rb);
2492 }
2493
2494 cm->tx_mode = read_tx_mode(cm, rb);
2495#endif
2496
2497 current_frame->reference_mode = read_frame_reference_mode(cm, params);
2498
2499#ifdef ORI_CODE
2500 if (current_frame->reference_mode != SINGLE_REFERENCE)
2501 setup_compound_reference_mode(cm);
2502
2503
2504#endif
2505
2506 av1_setup_skip_mode_allowed(cm);
2507
2508 /*
2509 the point that ucode send send_bufmgr_info
2510 and wait bufmgr code to return is_skip_mode_allowed
2511 */
2512
2513 /*
2514 read_uncompressed_header() end
2515 */
2516
2517 av1_setup_motion_field(cm);
2518#ifdef AML
2519 cm->cur_frame->mi_cols = cm->mi_cols;
2520 cm->cur_frame->mi_rows = cm->mi_rows;
2521 cm->cur_frame->dec_width = cm->dec_width;
2522
2523 /*
2524 superres_post_decode(AV1Decoder *pbi) =>
2525 av1_superres_upscale(cm, pool); =>
2526 aom_realloc_frame_buffer(
2527 frame_to_show, cm->superres_upscaled_width,
2528 cm->superres_upscaled_height, seq_params->subsampling_x,
2529 seq_params->subsampling_y, seq_params->use_highbitdepth,
2530 AOM_BORDER_IN_PIXELS, cm->byte_alignment, fb, cb, cb_priv)
2531 */
2532 aom_realloc_frame_buffer(cm, &cm->cur_frame->buf,
2533 cm->superres_upscaled_width, cm->superres_upscaled_height,
2534 cm->cur_frame->order_hint);
2535#endif
2536 return 0;
2537}
2538
2539static int are_seq_headers_consistent(const SequenceHeader *seq_params_old,
2540 const SequenceHeader *seq_params_new) {
2541 return !memcmp(seq_params_old, seq_params_new, sizeof(SequenceHeader));
2542}
2543
2544aom_codec_err_t aom_get_num_layers_from_operating_point_idc(
2545 int operating_point_idc, unsigned int *number_spatial_layers,
2546 unsigned int *number_temporal_layers) {
2547 // derive number of spatial/temporal layers from operating_point_idc
2548
2549 if (!number_spatial_layers || !number_temporal_layers)
2550 return AOM_CODEC_INVALID_PARAM;
2551
2552 if (operating_point_idc == 0) {
2553 *number_temporal_layers = 1;
2554 *number_spatial_layers = 1;
2555 } else {
2556 int j;
2557 *number_spatial_layers = 0;
2558 *number_temporal_layers = 0;
2559 for (j = 0; j < MAX_NUM_SPATIAL_LAYERS; j++) {
2560 *number_spatial_layers +=
2561 (operating_point_idc >> (j + MAX_NUM_TEMPORAL_LAYERS)) & 0x1;
2562 }
2563 for (j = 0; j < MAX_NUM_TEMPORAL_LAYERS; j++) {
2564 *number_temporal_layers += (operating_point_idc >> j) & 0x1;
2565 }
2566 }
2567
2568 return AOM_CODEC_OK;
2569}
2570
2571void av1_read_sequence_header(AV1_COMMON *cm, union param_u *params,
2572 SequenceHeader *seq_params) {
2573#ifdef ORI_CODE
2574 const int num_bits_width = aom_rb_read_literal(-1, "<num_bits_width>", rb, 4) + 1;
2575 const int num_bits_height = aom_rb_read_literal(-1, "<num_bits_height>", rb, 4) + 1;
2576 const int max_frame_width = aom_rb_read_literal(-1, "<max_frame_width>", rb, num_bits_width) + 1;
2577 const int max_frame_height = aom_rb_read_literal(-1, "<max_frame_height>", rb, num_bits_height) + 1;
2578
2579 seq_params->num_bits_width = num_bits_width;
2580 seq_params->num_bits_height = num_bits_height;
2581#endif
2582 seq_params->max_frame_width = params->p.max_frame_width; //max_frame_width;
2583 seq_params->max_frame_height = params->p.max_frame_height; //max_frame_height;
2584
2585 if (seq_params->reduced_still_picture_hdr) {
2586 seq_params->frame_id_numbers_present_flag = 0;
2587 } else {
2588 seq_params->frame_id_numbers_present_flag = params->p.frame_id_numbers_present_flag; //aom_rb_read_bit(-1, "<frame_id_numbers_present_flag>", rb);
2589 }
2590 if (seq_params->frame_id_numbers_present_flag) {
2591 // We must always have delta_frame_id_length < frame_id_length,
2592 // in order for a frame to be referenced with a unique delta.
2593 // Avoid wasting bits by using a coding that enforces this restriction.
2594#ifdef ORI_CODE
2595 seq_params->delta_frame_id_length = aom_rb_read_literal(-1, "<delta_frame_id_length>", rb, 4) + 2;
2596 seq_params->frame_id_length = params->p.frame_id_length + aom_rb_read_literal(-1, "<frame_id_length>", rb, 3) + seq_params->delta_frame_id_length + 1;
2597#else
2598 seq_params->delta_frame_id_length = params->p.delta_frame_id_length;
2599 seq_params->frame_id_length = params->p.frame_id_length + seq_params->delta_frame_id_length + 1;
2600#endif
2601 if (seq_params->frame_id_length > 16)
2602 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2603 "Invalid frame_id_length");
2604 }
2605#ifdef ORI_CODE
2606 setup_sb_size(seq_params, rb);
2607 seq_params->enable_filter_intra = aom_rb_read_bit(-1, "<enable_filter_intra>", rb);
2608 seq_params->enable_intra_edge_filter = aom_rb_read_bit(-1, "<enable_intra_edge_filter>", rb);
2609#endif
2610
2611 if (seq_params->reduced_still_picture_hdr) {
2612 seq_params->enable_interintra_compound = 0;
2613 seq_params->enable_masked_compound = 0;
2614 seq_params->enable_warped_motion = 0;
2615 seq_params->enable_dual_filter = 0;
2616 seq_params->order_hint_info.enable_order_hint = 0;
2617 seq_params->order_hint_info.enable_dist_wtd_comp = 0;
2618 seq_params->order_hint_info.enable_ref_frame_mvs = 0;
2619 seq_params->force_screen_content_tools = 2; // SELECT_SCREEN_CONTENT_TOOLS
2620 seq_params->force_integer_mv = 2; // SELECT_INTEGER_MV
2621 seq_params->order_hint_info.order_hint_bits_minus_1 = -1;
2622 } else {
2623#ifdef ORI_CODE
2624 seq_params->enable_interintra_compound = aom_rb_read_bit(-1, "<enable_interintra_compound>", rb);
2625 seq_params->enable_masked_compound = aom_rb_read_bit(-1, "<enable_masked_compound>", rb);
2626 seq_params->enable_warped_motion = aom_rb_read_bit(-1, "<enable_warped_motion>", rb);
2627 seq_params->enable_dual_filter = aom_rb_read_bit(-1, "<enable_dual_filter>", rb);
2628#endif
2629 seq_params->order_hint_info.enable_order_hint = params->p.enable_order_hint; //aom_rb_read_bit(-1, "<order_hint_info.enable_order_hint>", rb);
2630 seq_params->order_hint_info.enable_dist_wtd_comp =
2631 seq_params->order_hint_info.enable_order_hint ? params->p.enable_dist_wtd_comp : 0; //aom_rb_read_bit(-1, "<order_hint_info.enable_dist_wtd_comp>", rb) : 0;
2632 seq_params->order_hint_info.enable_ref_frame_mvs =
2633 seq_params->order_hint_info.enable_order_hint ? params->p.enable_ref_frame_mvs : 0; //aom_rb_read_bit(-1, "<order_hint_info.enable_ref_frame_mvs>", rb) : 0;
2634
2635#ifdef ORI_CODE
2636 if (aom_rb_read_bit(-1, defmark, rb)) {
2637 seq_params->force_screen_content_tools =
2638 2; // SELECT_SCREEN_CONTENT_TOOLS
2639 } else {
2640 seq_params->force_screen_content_tools = aom_rb_read_bit(-1, defmark, rb);
2641 }
2642
2643 if (seq_params->force_screen_content_tools > 0) {
2644 if (aom_rb_read_bit(-1, defmark, rb)) {
2645 seq_params->force_integer_mv = 2; // SELECT_INTEGER_MV
2646 } else {
2647 seq_params->force_integer_mv = aom_rb_read_bit(-1, defmark, rb);
2648 }
2649 } else {
2650 seq_params->force_integer_mv = 2; // SELECT_INTEGER_MV
2651 }
2652#endif
2653 seq_params->order_hint_info.order_hint_bits_minus_1 =
2654 seq_params->order_hint_info.enable_order_hint
2655 ? params->p.order_hint_bits_minus_1 /*aom_rb_read_literal(-1, "<order_hint_info.order_hint_bits_minus_1>", rb, 3)*/
2656 : -1;
2657 }
2658 seq_params->enable_superres = params->p.enable_superres; //aom_rb_read_bit(-1, defmark, rb);
2659
2660#ifdef ORI_CODE
2661 seq_params->enable_cdef = aom_rb_read_bit(-1, defmark, rb);
2662 seq_params->enable_restoration = aom_rb_read_bit(-1, defmark, rb);
2663#endif
2664}
2665
2666#ifdef ORI_CODE
2667void av1_read_op_parameters_info(AV1_COMMON *const cm,
2668 struct aom_read_bit_buffer *rb, int op_num) {
2669 // The cm->op_params array has MAX_NUM_OPERATING_POINTS + 1 elements.
2670 if (op_num > MAX_NUM_OPERATING_POINTS) {
2671 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2672 "AV1 does not support %d decoder model operating points",
2673 op_num + 1);
2674 }
2675
2676 cm->op_params[op_num].decoder_buffer_delay = aom_rb_read_unsigned_literal(-1, defmark,
2677 rb, cm->buffer_model.encoder_decoder_buffer_delay_length);
2678
2679 cm->op_params[op_num].encoder_buffer_delay = aom_rb_read_unsigned_literal(-1, defmark,
2680 rb, cm->buffer_model.encoder_decoder_buffer_delay_length);
2681
2682 cm->op_params[op_num].low_delay_mode_flag = aom_rb_read_bit(-1, defmark, rb);
2683}
2684#endif
2685
2686static int is_valid_seq_level_idx(AV1_LEVEL seq_level_idx) {
2687 return seq_level_idx < SEQ_LEVELS || seq_level_idx == SEQ_LEVEL_MAX;
2688}
2689
2690static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
2691 union param_u *params) {
2692 AV1_COMMON *const cm = &pbi->common;
2693 int i;
2694 int operating_point;
2695 // Verify rb has been configured to report errors.
2696 //assert(rb->error_handler);
2697
2698 // Use a local variable to store the information as we decode. At the end,
2699 // if no errors have occurred, cm->seq_params is updated.
2700 SequenceHeader sh = cm->seq_params;
2701 SequenceHeader *const seq_params = &sh;
2702
2703 seq_params->profile = params->p.profile; //av1_read_profile(rb);
2704 if (seq_params->profile > CONFIG_MAX_DECODE_PROFILE) {
2705 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2706 return 0;
2707 }
2708
2709 // Still picture or not
2710 seq_params->still_picture = params->p.still_picture; //aom_rb_read_bit(-1, "<still_picture>", rb);
2711 seq_params->reduced_still_picture_hdr = params->p.reduced_still_picture_hdr; //aom_rb_read_bit(-1, "<reduced_still_picture_hdr>", rb);
2712 // Video must have reduced_still_picture_hdr = 0
2713 if (!seq_params->still_picture && seq_params->reduced_still_picture_hdr) {
2714 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2715 return 0;
2716 }
2717
2718 if (seq_params->reduced_still_picture_hdr) {
2719 cm->timing_info_present = 0;
2720 seq_params->decoder_model_info_present_flag = 0;
2721 seq_params->display_model_info_present_flag = 0;
2722 seq_params->operating_points_cnt_minus_1 = 0;
2723 seq_params->operating_point_idc[0] = 0;
2724 //if (!read_bitstream_level(0, "<seq_level_idx>", &seq_params->seq_level_idx[0], rb)) {
2725 if (!is_valid_seq_level_idx(params->p.seq_level_idx[0])) {
2726 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2727 return 0;
2728 }
2729 seq_params->tier[0] = 0;
2730 cm->op_params[0].decoder_model_param_present_flag = 0;
2731 cm->op_params[0].display_model_param_present_flag = 0;
2732 } else {
2733 cm->timing_info_present = params->p.timing_info_present; //aom_rb_read_bit(-1, "<timing_info_present>", rb); // timing_info_present_flag
2734 if (cm->timing_info_present) {
2735#ifdef ORI_CODE
2736 av1_read_timing_info_header(cm, rb);
2737#endif
2738 seq_params->decoder_model_info_present_flag = params->p.decoder_model_info_present_flag; //aom_rb_read_bit(-1, "<decoder_model_info_present_flag>", rb);
2739#ifdef ORI_CODE
2740 if (seq_params->decoder_model_info_present_flag)
2741 av1_read_decoder_model_info(cm, rb);
2742#endif
2743 } else {
2744 seq_params->decoder_model_info_present_flag = 0;
2745 }
2746#ifdef ORI_CODE
2747 seq_params->display_model_info_present_flag = aom_rb_read_bit(-1, "<display_model_info_present_flag>", rb);
2748#endif
2749 seq_params->operating_points_cnt_minus_1 = params->p.operating_points_cnt_minus_1;
2750 //aom_rb_read_literal(-1, "<operating_points_cnt_minus_1>", rb, OP_POINTS_CNT_MINUS_1_BITS);
2751 for (i = 0; i < seq_params->operating_points_cnt_minus_1 + 1; i++) {
2752 seq_params->operating_point_idc[i] = params->p.operating_point_idc[i];
2753 //aom_rb_read_literal(i, "<operating_point_idc>", rb, OP_POINTS_IDC_BITS);
2754 //if (!read_bitstream_level(i, "<seq_level_idx>", &seq_params->seq_level_idx[i], rb)) {
2755 if (!is_valid_seq_level_idx(params->p.seq_level_idx[i])) {
2756 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2757 return 0;
2758 }
2759 // This is the seq_level_idx[i] > 7 check in the spec. seq_level_idx 7
2760 // is equivalent to level 3.3.
2761#ifdef ORI_CODE
2762 if (seq_params->seq_level_idx[i] >= SEQ_LEVEL_4_0)
2763 seq_params->tier[i] = aom_rb_read_bit(i, "<tier>", rb);
2764 else
2765 seq_params->tier[i] = 0;
2766#endif
2767 if (seq_params->decoder_model_info_present_flag) {
2768 cm->op_params[i].decoder_model_param_present_flag = params->p.decoder_model_param_present_flag[i]; //aom_rb_read_bit(-1, defmark, rb);
2769#ifdef ORI_CODE
2770 if (cm->op_params[i].decoder_model_param_present_flag)
2771 av1_read_op_parameters_info(cm, rb, i);
2772#endif
2773 } else {
2774 cm->op_params[i].decoder_model_param_present_flag = 0;
2775 }
2776#ifdef ORI_CODE
2777 if (cm->timing_info_present &&
2778 (cm->timing_info.equal_picture_interval ||
2779 cm->op_params[i].decoder_model_param_present_flag)) {
2780 cm->op_params[i].bitrate = av1_max_level_bitrate(
2781 seq_params->profile, seq_params->seq_level_idx[i],
2782 seq_params->tier[i]);
2783 // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass
2784 // the check
2785 if (cm->op_params[i].bitrate == 0)
2786 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2787 "AV1 does not support this combination of "
2788 "profile, level, and tier.");
2789 // Buffer size in bits/s is bitrate in bits/s * 1 s
2790 cm->op_params[i].buffer_size = cm->op_params[i].bitrate;
2791 }
2792#endif
2793 if (cm->timing_info_present && cm->timing_info.equal_picture_interval &&
2794 !cm->op_params[i].decoder_model_param_present_flag) {
2795 // When the decoder_model_parameters are not sent for this op, set
2796 // the default ones that can be used with the resource availability mode
2797 cm->op_params[i].decoder_buffer_delay = 70000;
2798 cm->op_params[i].encoder_buffer_delay = 20000;
2799 cm->op_params[i].low_delay_mode_flag = 0;
2800 }
2801
2802#ifdef ORI_CODE
2803 if (seq_params->display_model_info_present_flag) {
2804 cm->op_params[i].display_model_param_present_flag = aom_rb_read_bit(-1, defmark, rb);
2805 if (cm->op_params[i].display_model_param_present_flag) {
2806 cm->op_params[i].initial_display_delay =
2807 aom_rb_read_literal(-1, defmark, rb, 4) + 1;
2808 if (cm->op_params[i].initial_display_delay > 10)
2809 aom_internal_error(
2810 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2811 "AV1 does not support more than 10 decoded frames delay");
2812 } else {
2813 cm->op_params[i].initial_display_delay = 10;
2814 }
2815 } else {
2816 cm->op_params[i].display_model_param_present_flag = 0;
2817 cm->op_params[i].initial_display_delay = 10;
2818 }
2819#endif
2820 }
2821 }
2822 // This decoder supports all levels. Choose operating point provided by
2823 // external means
2824 operating_point = pbi->operating_point;
2825 if (operating_point < 0 ||
2826 operating_point > seq_params->operating_points_cnt_minus_1)
2827 operating_point = 0;
2828 pbi->current_operating_point =
2829 seq_params->operating_point_idc[operating_point];
2830 if (aom_get_num_layers_from_operating_point_idc(
2831 pbi->current_operating_point, &cm->number_spatial_layers,
2832 &cm->number_temporal_layers) != AOM_CODEC_OK) {
2833 cm->error.error_code = AOM_CODEC_ERROR;
2834 return 0;
2835 }
2836
2837 av1_read_sequence_header(cm, params, seq_params);
2838#ifdef ORI_CODE
2839 av1_read_color_config(rb, pbi->allow_lowbitdepth, seq_params, &cm->error);
2840 if (!(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0) &&
2841 !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1) &&
2842 !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 0)) {
2843 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2844 "Only 4:4:4, 4:2:2 and 4:2:0 are currently supported, "
2845 "%d %d subsampling is not supported.\n",
2846 seq_params->subsampling_x, seq_params->subsampling_y);
2847 }
2848 seq_params->film_grain_params_present = aom_rb_read_bit(-1, "<film_grain_params_present>", rb);
2849
2850 if (av1_check_trailing_bits(pbi, rb) != 0) {
2851 // cm->error.error_code is already set.
2852 return 0;
2853 }
2854#endif
2855
2856 // If a sequence header has been decoded before, we check if the new
2857 // one is consistent with the old one.
2858 if (pbi->sequence_header_ready) {
2859 if (!are_seq_headers_consistent(&cm->seq_params, seq_params))
2860 pbi->sequence_header_changed = 1;
2861 }
2862
2863 cm->seq_params = *seq_params;
2864 pbi->sequence_header_ready = 1;
2865 return 0;
2866
2867}
2868
2869int aom_decode_frame_from_obus(AV1Decoder *pbi, union param_u *params, int obu_type)
2870{
2871 AV1_COMMON *const cm = &pbi->common;
2872 ObuHeader obu_header;
2873 int frame_decoding_finished = 0;
2874 uint32_t frame_header_size = 0;
2875
2876 //struct aom_read_bit_buffer rb;
2877 size_t payload_size = 0;
2878 size_t decoded_payload_size = 0;
2879 size_t obu_payload_offset = 0;
2880 //size_t bytes_read = 0;
2881
2882 memset(&obu_header, 0, sizeof(obu_header));
2883#ifdef ORI_CODE
2884 pbi->seen_frame_header = 0;
2885#else
2886 /* set in the test.c*/
2887#endif
2888
2889 obu_header.type = obu_type;
2890 pbi->cur_obu_type = obu_header.type;
2891 if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO))
2892 dump_params(pbi, params);
2893 switch (obu_header.type) {
2894 case OBU_SEQUENCE_HEADER:
2895 decoded_payload_size = read_sequence_header_obu(pbi, params);
2896 if (cm->error.error_code != AOM_CODEC_OK) return -1;
2897 break;
2898
2899 case OBU_FRAME_HEADER:
2900 case OBU_REDUNDANT_FRAME_HEADER:
2901 case OBU_FRAME:
2902 if (obu_header.type == OBU_REDUNDANT_FRAME_HEADER) {
2903 if (!pbi->seen_frame_header) {
2904 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2905 return -1;
2906 }
2907 } else {
2908 // OBU_FRAME_HEADER or OBU_FRAME.
2909 if (pbi->seen_frame_header) {
2910 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2911 return -1;
2912 }
2913 }
2914 // Only decode first frame header received
2915 if (!pbi->seen_frame_header ||
2916 (cm->large_scale_tile && !pbi->camera_frame_header_ready)) {
2917 frame_header_size = av1_decode_frame_headers_and_setup(
2918 pbi, /*&rb, data, p_data_end,*/obu_header.type != OBU_FRAME, params);
2919 pbi->seen_frame_header = 1;
2920 if (!pbi->ext_tile_debug && cm->large_scale_tile)
2921 pbi->camera_frame_header_ready = 1;
2922 } else {
2923 // TODO(wtc): Verify that the frame_header_obu is identical to the
2924 // original frame_header_obu. For now just skip frame_header_size
2925 // bytes in the bit buffer.
2926 if (frame_header_size > payload_size) {
2927 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2928 return -1;
2929 }
2930 assert(rb.bit_offset == 0);
2931#ifdef ORI_CODE
2932 rb.bit_offset = 8 * frame_header_size;
2933#endif
2934 }
2935
2936 decoded_payload_size = frame_header_size;
2937 pbi->frame_header_size = frame_header_size;
2938
2939 if (cm->show_existing_frame) {
2940 if (obu_header.type == OBU_FRAME) {
2941 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2942 return -1;
2943 }
2944 frame_decoding_finished = 1;
2945 pbi->seen_frame_header = 0;
2946 break;
2947 }
2948
2949 // In large scale tile coding, decode the common camera frame header
2950 // before any tile list OBU.
2951 if (!pbi->ext_tile_debug && pbi->camera_frame_header_ready) {
2952 frame_decoding_finished = 1;
2953 // Skip the rest of the frame data.
2954 decoded_payload_size = payload_size;
2955 // Update data_end.
2956#ifdef ORI_CODE
2957 *p_data_end = data_end;
2958#endif
2959 break;
2960 }
2961#if 0 //def AML
2962 frame_decoding_finished = 1;
2963#endif
2964 if (obu_header.type != OBU_FRAME) break;
2965 obu_payload_offset = frame_header_size;
2966 // Byte align the reader before reading the tile group.
2967 // byte_alignment() has set cm->error.error_code if it returns -1.
2968#ifdef ORI_CODE
2969 if (byte_alignment(cm, &rb)) return -1;
2970 AOM_FALLTHROUGH_INTENDED; // fall through to read tile group.
2971#endif
2972 default:
2973 break;
2974 }
2975 return frame_decoding_finished;
2976}
2977
2978int get_buffer_index(AV1Decoder *pbi, RefCntBuffer *buffer)
2979{
2980 AV1_COMMON *const cm = &pbi->common;
2981 int i = -1;
2982
2983 if (buffer) {
2984 for (i = 0; i < FRAME_BUFFERS; i++) {
2985 RefCntBuffer *buf =
2986 &cm->buffer_pool->frame_bufs[i];
2987 if (buf == buffer) {
2988 break;
2989 }
2990 }
2991 }
2992 return i;
2993}
2994
2995void dump_buffer(RefCntBuffer *buf)
2996{
2997 int i;
2998 pr_info("ref_count %d, vf_ref %d, order_hint %d, w/h(%d,%d) showable_frame %d frame_type %d canvas(%d,%d) w/h(%d,%d) mi_c/r(%d,%d) header 0x%x ref_deltas(",
2999 buf->ref_count, buf->buf.vf_ref, buf->order_hint, buf->width, buf->height, buf->showable_frame, buf->frame_type,
3000 buf->buf.mc_canvas_y, buf->buf.mc_canvas_u_v,
3001 buf->buf.y_crop_width, buf->buf.y_crop_height,
3002 buf->mi_cols, buf->mi_rows,
3003 buf->buf.header_adr);
3004 for (i = 0; i < REF_FRAMES; i++)
3005 pr_info("%d,", buf->ref_deltas[i]);
3006 pr_info("), ref_order_hints(");
3007
3008 for (i = 0; i < INTER_REFS_PER_FRAME; i++)
3009 pr_info("%d ", buf->ref_order_hints[i]);
3010 pr_info(")");
3011}
3012
3013void dump_ref_buffer_info(AV1Decoder *pbi, int i)
3014{
3015 AV1_COMMON *const cm = &pbi->common;
3016 pr_info("remapped_ref_idx %d, ref_frame_sign_bias %d, ref_frame_id %d, valid_for_referencing %d ref_frame_side %d ref_frame_map idx %d, next_ref_frame_map idx %d",
3017 cm->remapped_ref_idx[i],
3018 cm->ref_frame_sign_bias[i],
3019 cm->ref_frame_id[i],
3020 cm->valid_for_referencing[i],
3021 cm->ref_frame_side[i],
3022 get_buffer_index(pbi, cm->ref_frame_map[i]),
3023 get_buffer_index(pbi, cm->next_ref_frame_map[i]));
3024}
3025
3026void dump_mv_refs(AV1Decoder *pbi)
3027{
3028 int i, j;
3029 AV1_COMMON *const cm = &pbi->common;
3030 for (i = 0; i < cm->mv_ref_id_index; i++) {
3031 pr_info("%d: ref_id %d cal_tpl_mvs %d mv_ref_offset: ",
3032 i, cm->mv_ref_id[i], cm->mv_cal_tpl_mvs[i]);
3033 for (j = 0; j < REF_FRAMES; j++)
3034 pr_info("%d ", cm->mv_ref_offset[i][j]);
3035 pr_info("\n");
3036 }
3037}
3038
3039void dump_ref_spec_bufs(AV1Decoder *pbi)
3040{
3041 int i;
3042 AV1_COMMON *const cm = &pbi->common;
3043 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3044 PIC_BUFFER_CONFIG *pic_config = av1_get_ref_frame_spec_buf(cm, LAST_FRAME + i);
3045 if (pic_config == NULL) continue;
3046 pr_info("%d: index %d order_hint %d header 0x%x dw_header 0x%x canvas(%d,%d) mv_wr_start 0x%x lcu_total %d\n",
3047 i, pic_config->index,
3048 pic_config->order_hint,
3049 pic_config->header_adr,
3050#ifdef AOM_AV1_MMU_DW
3051 pic_config->header_dw_adr,
3052#else
3053 0,
3054#endif
3055 pic_config->mc_canvas_y,
3056 pic_config->mc_canvas_u_v,
3057 pic_config->mpred_mv_wr_start_addr,
3058 pic_config->lcu_total
3059 );
3060 }
3061}
3062
3063#ifdef SUPPORT_SCALE_FACTOR
3064void dump_scale_factors(AV1Decoder *pbi)
3065{
3066 int i;
3067 AV1_COMMON *const cm = &pbi->common;
3068 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3069 struct scale_factors *const sf =
3070 get_ref_scale_factors(cm, i);
3071 if (sf)
3072 pr_info("%d: is_scaled %d x_scale_fp %d, y_scale_fp %d\n",
3073 i, av1_is_scaled(sf),
3074 sf->x_scale_fp, sf->y_scale_fp);
3075 else
3076 pr_info("%d: sf null\n", i);
3077 }
3078}
3079
3080#endif
3081
3082void dump_buffer_status(AV1Decoder *pbi)
3083{
3084 int i;
3085 AV1_COMMON *const cm = &pbi->common;
3086 BufferPool *const pool = cm->buffer_pool;
3087 unsigned long flags;
3088
3089 lock_buffer_pool(pool, flags);
3090
3091 pr_info("%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3092
3093 pr_info("Buffer Pool:\n");
3094 for (i = 0; i < FRAME_BUFFERS; i++) {
3095 RefCntBuffer *buf =
3096 &cm->buffer_pool->frame_bufs[i];
3097 pr_info("%d: ", i);
3098 if (buf)
3099 dump_buffer(buf);
3100 pr_info("\n");
3101 }
3102
3103 if (cm->prev_frame) {
3104 pr_info("prev_frame (%d): ",
3105 get_buffer_index(pbi, cm->prev_frame));
3106 dump_buffer(cm->prev_frame);
3107 pr_info("\n");
3108 }
3109 if (cm->cur_frame) {
3110 pr_info("cur_frame (%d): ",
3111 get_buffer_index(pbi, cm->cur_frame));
3112 dump_buffer(cm->cur_frame);
3113 pr_info("\n");
3114 }
3115 pr_info("REF_FRAMES Info(ref buf is ref_frame_map[remapped_ref_idx[i-1]], i=1~7):\n");
3116 for (i = 0; i < REF_FRAMES; i++) {
3117 pr_info("%d: ", i);
3118 dump_ref_buffer_info(pbi, i);
3119 pr_info("\n");
3120 }
3121 pr_info("Ref Spec Buffers:\n");
3122 dump_ref_spec_bufs(pbi);
3123
3124 pr_info("MV refs:\n");
3125 dump_mv_refs(pbi);
3126
3127#ifdef SUPPORT_SCALE_FACTOR
3128 pr_info("Scale factors:\n");
3129 dump_scale_factors(pbi);
3130#endif
3131 unlock_buffer_pool(pool, flags);
3132}
3133
3134
3135struct param_dump_item_s {
3136 unsigned int size;
3137 char* name;
3138 unsigned int adr_off;
3139} param_dump_items[] = {
3140 {1, "profile", (unsigned long)&(((union param_u *)0)->p.profile )},
3141 {1, "still_picture", (unsigned long)&(((union param_u *)0)->p.still_picture )},
3142 {1, "reduced_still_picture_hdr", (unsigned long)&(((union param_u *)0)->p.reduced_still_picture_hdr )},
3143 {1, "decoder_model_info_present_flag", (unsigned long)&(((union param_u *)0)->p.decoder_model_info_present_flag)},
3144 {1, "max_frame_width", (unsigned long)&(((union param_u *)0)->p.max_frame_width )},
3145 {1, "max_frame_height", (unsigned long)&(((union param_u *)0)->p.max_frame_height )},
3146 {1, "frame_id_numbers_present_flag", (unsigned long)&(((union param_u *)0)->p.frame_id_numbers_present_flag )},
3147 {1, "delta_frame_id_length", (unsigned long)&(((union param_u *)0)->p.delta_frame_id_length )},
3148 {1, "frame_id_length", (unsigned long)&(((union param_u *)0)->p.frame_id_length )},
3149 {1, "order_hint_bits_minus_1", (unsigned long)&(((union param_u *)0)->p.order_hint_bits_minus_1 )},
3150 {1, "enable_order_hint", (unsigned long)&(((union param_u *)0)->p.enable_order_hint )},
3151 {1, "enable_dist_wtd_comp", (unsigned long)&(((union param_u *)0)->p.enable_dist_wtd_comp )},
3152 {1, "enable_ref_frame_mvs", (unsigned long)&(((union param_u *)0)->p.enable_ref_frame_mvs )},
3153 {1, "enable_superres", (unsigned long)&(((union param_u *)0)->p.enable_superres )},
3154 {1, "superres_scale_denominator", (unsigned long)&(((union param_u *)0)->p.superres_scale_denominator )},
3155 {1, "show_existing_frame", (unsigned long)&(((union param_u *)0)->p.show_existing_frame )},
3156 {1, "frame_type", (unsigned long)&(((union param_u *)0)->p.frame_type )},
3157 {1, "show_frame", (unsigned long)&(((union param_u *)0)->p.show_frame )},
3158 {1, "e.r.r.o.r_resilient_mode", (unsigned long)&(((union param_u *)0)->p.error_resilient_mode )},
3159 {1, "refresh_frame_flags", (unsigned long)&(((union param_u *)0)->p.refresh_frame_flags )},
3160 {1, "showable_frame", (unsigned long)&(((union param_u *)0)->p.showable_frame )},
3161 {1, "current_frame_id", (unsigned long)&(((union param_u *)0)->p.current_frame_id )},
3162 {1, "frame_size_override_flag", (unsigned long)&(((union param_u *)0)->p.frame_size_override_flag )},
3163 {1, "order_hint", (unsigned long)&(((union param_u *)0)->p.order_hint )},
3164 {1, "primary_ref_frame", (unsigned long)&(((union param_u *)0)->p.primary_ref_frame )},
3165 {1, "frame_refs_short_signaling", (unsigned long)&(((union param_u *)0)->p.frame_refs_short_signaling )},
3166 {1, "frame_width", (unsigned long)&(((union param_u *)0)->p.frame_width )},
3167 {1, "dec_frame_width", (unsigned long)&(((union param_u *)0)->p.dec_frame_width )},
3168 {1, "frame_width_scaled", (unsigned long)&(((union param_u *)0)->p.frame_width_scaled )},
3169 {1, "frame_height", (unsigned long)&(((union param_u *)0)->p.frame_height )},
3170 {1, "reference_mode", (unsigned long)&(((union param_u *)0)->p.reference_mode )},
3171 {1, "update_parameters", (unsigned long)&(((union param_u *)0)->p.update_parameters )},
3172 {1, "film_grain_params_ref_idx", (unsigned long)&(((union param_u *)0)->p.film_grain_params_ref_idx )},
3173 {1, "allow_ref_frame_mvs", (unsigned long)&(((union param_u *)0)->p.allow_ref_frame_mvs )},
3174 {1, "lst_ref", (unsigned long)&(((union param_u *)0)->p.lst_ref )},
3175 {1, "gld_ref", (unsigned long)&(((union param_u *)0)->p.gld_ref )},
3176 {INTER_REFS_PER_FRAME, "remapped_ref_idx", (unsigned long)&(((union param_u *)0)->p.remapped_ref_idx[0] )},
3177 {INTER_REFS_PER_FRAME, "delta_frame_id_minus_1", (unsigned long)&(((union param_u *)0)->p.delta_frame_id_minus_1[0] )},
3178 {REF_FRAMES, "ref_order_hint", (unsigned long)&(((union param_u *)0)->p.ref_order_hint[0] )},
3179};
3180
3181void dump_params(AV1Decoder *pbi, union param_u *params)
3182{
3183 int i, j;
3184 unsigned char *start_adr = (unsigned char*)params;
3185
3186 pr_info("============ params:\n");
3187 for (i = 0; i < sizeof(param_dump_items) / sizeof(param_dump_items[0]); i++) {
3188 for (j = 0; j < param_dump_items[i].size; j++) {
3189 if (param_dump_items[i].size > 1)
3190 pr_info("%s(%d): 0x%x\n",
3191 param_dump_items[i].name, j,
3192 *((unsigned short*)(start_adr + param_dump_items[i].adr_off + j * 2)));
3193 else
3194 pr_info("%s: 0x%x\n", param_dump_items[i].name,
3195 *((unsigned short*)(start_adr + param_dump_items[i].adr_off + j * 2)));
3196 }
3197 }
3198}
3199
3200/*static void raw_write_image(AV1Decoder *pbi, PIC_BUFFER_CONFIG *sd)
3201{
3202 printf("$$$$$$$ output image\n");
3203}*/
3204
3205/*
3206 return 0, need decoding data
3207 1, decoding done
3208 -1, decoding error
3209
3210*/
3211int av1_bufmgr_process(AV1Decoder *pbi, union param_u *params,
3212 unsigned char new_compressed_data, int obu_type)
3213{
3214 AV1_COMMON *const cm = &pbi->common;
3215 int j;
3216 // Release any pending output frames from the previous decoder_decode call.
3217 // We need to do this even if the decoder is being flushed or the input
3218 // arguments are invalid.
3219 BufferPool *const pool = cm->buffer_pool;
3220 int frame_decoded;
3221 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3222 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: new_compressed_data= %d\n", __func__, new_compressed_data);
3223 for (j = 0; j < pbi->num_output_frames; j++) {
3224 decrease_ref_count(pbi, pbi->output_frames[j], pool);
3225 }
3226 pbi->num_output_frames = 0;
3227 //
3228 if (new_compressed_data) {
3229 if (assign_cur_frame_new_fb(cm) == NULL) {
3230 cm->error.error_code = AOM_CODEC_MEM_ERROR;
3231 return -1;
3232 }
3233 pbi->seen_frame_header = 0;
3234 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "New_compressed_data (%d)\n", new_compressed_data_count++);
3235
3236 }
3237
3238 frame_decoded =
3239 aom_decode_frame_from_obus(pbi, params, obu_type);
3240
3241 if (pbi->cur_obu_type == OBU_FRAME_HEADER ||
3242 pbi->cur_obu_type == OBU_REDUNDANT_FRAME_HEADER ||
3243 pbi->cur_obu_type == OBU_FRAME) {
3244 if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO)) {
3245 pr_info("after bufmgr (frame_decoded %d seen_frame_header %d): ",
3246 frame_decoded, pbi->seen_frame_header);
3247 dump_buffer_status(pbi);
3248 }
3249 }
3250 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3251
3252 return frame_decoded;
3253
3254}
3255
3256int av1_get_raw_frame(AV1Decoder *pbi, size_t index, PIC_BUFFER_CONFIG **sd) {
3257 if (index >= pbi->num_output_frames) return -1;
3258 *sd = &pbi->output_frames[index]->buf;
3259 //*grain_params = &pbi->output_frames[index]->film_grain_params;
3260 //aom_clear_system_state();
3261 return 0;
3262}
3263
3264int av1_bufmgr_postproc(AV1Decoder *pbi, unsigned char frame_decoded)
3265{
3266 PIC_BUFFER_CONFIG *sd;
3267 int index;
3268#if 0
3269 if (frame_decoded) {
3270 printf("before swap_frame_buffers: ");
3271 dump_buffer_status(pbi);
3272 }
3273#endif
3274 swap_frame_buffers(pbi, frame_decoded);
3275 if (frame_decoded) {
3276 if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO)) {
3277 pr_info("after swap_frame_buffers: ");
3278 dump_buffer_status(pbi);
3279 }
3280 }
3281 if (frame_decoded) {
3282 pbi->decoding_first_frame = 0;
3283 }
3284
3285
3286 for (index = 0;;index++) {
3287 if (av1_get_raw_frame(pbi, index, &sd) < 0)
3288 break;
3289 av1_raw_write_image(pbi, sd);
3290 }
3291 return 0;
3292}
3293
3294int aom_realloc_frame_buffer(AV1_COMMON *cm, PIC_BUFFER_CONFIG *pic,
3295 int width, int height, unsigned int order_hint)
3296{
3297 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s, index 0x%x, width 0x%x, height 0x%x order_hint 0x%x\n",
3298 __func__, pic->index, width, height, order_hint);
3299 pic->y_crop_width = width;
3300 pic->y_crop_height = height;
3301 pic->order_hint = order_hint;
3302 return 0;
3303}
3304
3305
3306unsigned char av1_frame_is_inter(const AV1_COMMON *const cm) {
3307 unsigned char is_inter = cm->cur_frame && (cm->cur_frame->frame_type != KEY_FRAME)
3308 && (cm->current_frame.frame_type != INTRA_ONLY_FRAME);
3309 return is_inter;
3310}
3311
3312PIC_BUFFER_CONFIG *av1_get_ref_frame_spec_buf(
3313 const AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
3314 RefCntBuffer *buf = get_ref_frame_buf(cm, ref_frame);
3315 if (buf) {
3316 buf->buf.order_hint = buf->order_hint;
3317 return &(buf->buf);
3318 }
3319 return NULL;
3320}
3321
3322struct scale_factors *av1_get_ref_scale_factors(
3323 AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame)
3324{
3325 return get_ref_scale_factors(cm, ref_frame);
3326}
3327
3328void av1_set_next_ref_frame_map(AV1Decoder *pbi) {
3329 int ref_index = 0;
3330 int mask;
3331 AV1_COMMON *const cm = &pbi->common;
3332 int check_on_show_existing_frame;
3333 av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s, %d, mask 0x%x, show_existing_frame %d, reset_decoder_state %d\n",
3334 __func__, pbi->camera_frame_header_ready,
3335 cm->current_frame.refresh_frame_flags,
3336 cm->show_existing_frame,
3337 pbi->reset_decoder_state
3338 );
3339 if (!pbi->camera_frame_header_ready) {
3340 for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
3341 cm->next_used_ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
3342 ++ref_index;
3343 }
3344
3345 check_on_show_existing_frame =
3346 !cm->show_existing_frame || pbi->reset_decoder_state;
3347 for (; ref_index < REF_FRAMES && check_on_show_existing_frame;
3348 ++ref_index) {
3349 cm->next_used_ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
3350 }
3351 }
3352}
3353
3354unsigned int av1_get_next_used_ref_info(
3355 const AV1_COMMON *const cm, int i) {
3356 /*
3357 i = 0~1 orde_hint map
3358 i = 2~10 size map[i-2]
3359 */
3360 unsigned int info = 0;
3361 int j;
3362 if (i < 2) {
3363 /*next_used_ref_frame_map has 8 items*/
3364 for (j = 0; j < 4; j++) {
3365 RefCntBuffer *buf =
3366 cm->next_used_ref_frame_map[(i * 4) + j];
3367 if (buf)
3368 info |= ((buf->buf.order_hint & 0xff)
3369 << (j * 8));
3370 }
3371 } else if (i < 10) {
3372 RefCntBuffer *buf =
3373 cm->next_used_ref_frame_map[i-2];
3374 if (buf)
3375 info = (buf->buf.y_crop_width << 16) | (buf->buf.y_crop_height & 0xffff);
3376 } else {
3377 for (j = 0; j < 4; j++) {
3378 RefCntBuffer *buf =
3379 cm->next_used_ref_frame_map[((i - 10) * 4) + j];
3380 if (buf)
3381 info |= ((buf->buf.index & 0xff)
3382 << (j * 8));
3383 }
3384 }
3385 return info;
3386}
3387
3388RefCntBuffer *av1_get_primary_ref_frame_buf(
3389 const AV1_COMMON *const cm)
3390{
3391 return get_primary_ref_frame_buf(cm);
3392}
3393