xref: /linux/drivers/media/platform/verisilicon/hantro_g2_vp9_dec.c (revision cbdb1f163af2bb90d01be1f0263df1d8d5c9d9d3)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VP9 codec driver
4  *
5  * Copyright (C) 2021 Collabora Ltd.
6  */
7 #include "media/videobuf2-core.h"
8 #include "media/videobuf2-dma-contig.h"
9 #include "media/videobuf2-v4l2.h"
10 #include <linux/kernel.h>
11 #include <linux/vmalloc.h>
12 #include <media/v4l2-mem2mem.h>
13 #include <media/v4l2-vp9.h>
14 
15 #include "hantro.h"
16 #include "hantro_vp9.h"
17 #include "hantro_g2_regs.h"
18 
19 #define G2_ALIGN 16
20 
21 enum hantro_ref_frames {
22 	INTRA_FRAME = 0,
23 	LAST_FRAME = 1,
24 	GOLDEN_FRAME = 2,
25 	ALTREF_FRAME = 3,
26 	MAX_REF_FRAMES = 4
27 };
28 
29 static int start_prepare_run(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame **dec_params)
30 {
31 	const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
32 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
33 	struct v4l2_ctrl *ctrl;
34 	unsigned int fctx_idx;
35 
36 	/* v4l2-specific stuff */
37 	hantro_start_prepare_run(ctx);
38 
39 	ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_FRAME);
40 	if (WARN_ON(!ctrl))
41 		return -EINVAL;
42 	*dec_params = ctrl->p_cur.p;
43 
44 	ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
45 	if (WARN_ON(!ctrl))
46 		return -EINVAL;
47 	prob_updates = ctrl->p_cur.p;
48 	vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
49 
50 	/*
51 	 * vp9 stuff
52 	 *
53 	 * by this point the userspace has done all parts of 6.2 uncompressed_header()
54 	 * except this fragment:
55 	 * if ( FrameIsIntra || error_resilient_mode ) {
56 	 *	setup_past_independence ( )
57 	 *	if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
58 	 *	     reset_frame_context == 3 ) {
59 	 *		for ( i = 0; i < 4; i ++ ) {
60 	 *			save_probs( i )
61 	 *		}
62 	 *	} else if ( reset_frame_context == 2 ) {
63 	 *		save_probs( frame_context_idx )
64 	 *	}
65 	 *	frame_context_idx = 0
66 	 * }
67 	 */
68 	fctx_idx = v4l2_vp9_reset_frame_ctx(*dec_params, vp9_ctx->frame_context);
69 	vp9_ctx->cur.frame_context_idx = fctx_idx;
70 
71 	/* 6.1 frame(sz): load_probs() and load_probs2() */
72 	vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
73 
74 	/*
75 	 * The userspace has also performed 6.3 compressed_header(), but handling the
76 	 * probs in a special way. All probs which need updating, except MV-related,
77 	 * have been read from the bitstream and translated through inv_map_table[],
78 	 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
79 	 * by userspace are either translated values (there are no 0 values in
80 	 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
81 	 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
82 	 * performed. The values passed by userspace are either new values
83 	 * to replace old ones (the above mentioned shift and bitwise or never result in
84 	 * a zero) or zero to indicate no update.
85 	 * fw_update_probs() performs actual probs updates or leaves probs as-is
86 	 * for values for which a zero was passed from userspace.
87 	 */
88 	v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, *dec_params);
89 
90 	return 0;
91 }
92 
93 static size_t chroma_offset(const struct hantro_ctx *ctx,
94 			    const struct v4l2_ctrl_vp9_frame *dec_params)
95 {
96 	int bytes_per_pixel = dec_params->bit_depth == 8 ? 1 : 2;
97 
98 	return ctx->src_fmt.width * ctx->src_fmt.height * bytes_per_pixel;
99 }
100 
101 static size_t mv_offset(const struct hantro_ctx *ctx,
102 			const struct v4l2_ctrl_vp9_frame *dec_params)
103 {
104 	size_t cr_offset = chroma_offset(ctx, dec_params);
105 
106 	return ALIGN((cr_offset * 3) / 2, G2_ALIGN);
107 }
108 
109 static struct hantro_decoded_buffer *
110 get_ref_buf(struct hantro_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
111 {
112 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
113 	struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
114 	struct vb2_buffer *buf;
115 
116 	/*
117 	 * If a ref is unused or invalid, address of current destination
118 	 * buffer is returned.
119 	 */
120 	buf = vb2_find_buffer(cap_q, timestamp);
121 	if (!buf)
122 		buf = &dst->vb2_buf;
123 
124 	return vb2_to_hantro_decoded_buf(buf);
125 }
126 
127 static void update_dec_buf_info(struct hantro_decoded_buffer *buf,
128 				const struct v4l2_ctrl_vp9_frame *dec_params)
129 {
130 	buf->vp9.width = dec_params->frame_width_minus_1 + 1;
131 	buf->vp9.height = dec_params->frame_height_minus_1 + 1;
132 	buf->vp9.bit_depth = dec_params->bit_depth;
133 }
134 
135 static void update_ctx_cur_info(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
136 				struct hantro_decoded_buffer *buf,
137 				const struct v4l2_ctrl_vp9_frame *dec_params)
138 {
139 	vp9_ctx->cur.valid = true;
140 	vp9_ctx->cur.reference_mode = dec_params->reference_mode;
141 	vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
142 	vp9_ctx->cur.flags = dec_params->flags;
143 	vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
144 }
145 
146 static void config_output(struct hantro_ctx *ctx,
147 			  struct hantro_decoded_buffer *dst,
148 			  const struct v4l2_ctrl_vp9_frame *dec_params)
149 {
150 	dma_addr_t luma_addr, chroma_addr, mv_addr;
151 
152 	hantro_reg_write(ctx->dev, &g2_out_dis, 0);
153 	if (!ctx->dev->variant->legacy_regs)
154 		hantro_reg_write(ctx->dev, &g2_output_format, 0);
155 
156 	luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
157 	hantro_write_addr(ctx->dev, G2_OUT_LUMA_ADDR, luma_addr);
158 
159 	chroma_addr = luma_addr + chroma_offset(ctx, dec_params);
160 	hantro_write_addr(ctx->dev, G2_OUT_CHROMA_ADDR, chroma_addr);
161 
162 	mv_addr = luma_addr + mv_offset(ctx, dec_params);
163 	hantro_write_addr(ctx->dev, G2_OUT_MV_ADDR, mv_addr);
164 }
165 
166 struct hantro_vp9_ref_reg {
167 	const struct hantro_reg width;
168 	const struct hantro_reg height;
169 	const struct hantro_reg hor_scale;
170 	const struct hantro_reg ver_scale;
171 	u32 y_base;
172 	u32 c_base;
173 };
174 
175 static void config_ref(struct hantro_ctx *ctx,
176 		       struct hantro_decoded_buffer *dst,
177 		       const struct hantro_vp9_ref_reg *ref_reg,
178 		       const struct v4l2_ctrl_vp9_frame *dec_params,
179 		       u64 ref_ts)
180 {
181 	struct hantro_decoded_buffer *buf;
182 	dma_addr_t luma_addr, chroma_addr;
183 	u32 refw, refh;
184 
185 	buf = get_ref_buf(ctx, &dst->base.vb, ref_ts);
186 	refw = buf->vp9.width;
187 	refh = buf->vp9.height;
188 
189 	hantro_reg_write(ctx->dev, &ref_reg->width, refw);
190 	hantro_reg_write(ctx->dev, &ref_reg->height, refh);
191 
192 	hantro_reg_write(ctx->dev, &ref_reg->hor_scale, (refw << 14) / dst->vp9.width);
193 	hantro_reg_write(ctx->dev, &ref_reg->ver_scale, (refh << 14) / dst->vp9.height);
194 
195 	luma_addr = hantro_get_dec_buf_addr(ctx, &buf->base.vb.vb2_buf);
196 	hantro_write_addr(ctx->dev, ref_reg->y_base, luma_addr);
197 
198 	chroma_addr = luma_addr + chroma_offset(ctx, dec_params);
199 	hantro_write_addr(ctx->dev, ref_reg->c_base, chroma_addr);
200 }
201 
202 static void config_ref_registers(struct hantro_ctx *ctx,
203 				 const struct v4l2_ctrl_vp9_frame *dec_params,
204 				 struct hantro_decoded_buffer *dst,
205 				 struct hantro_decoded_buffer *mv_ref)
206 {
207 	static const struct hantro_vp9_ref_reg ref_regs[] = {
208 		{
209 			/* Last */
210 			.width = vp9_lref_width,
211 			.height = vp9_lref_height,
212 			.hor_scale = vp9_lref_hor_scale,
213 			.ver_scale = vp9_lref_ver_scale,
214 			.y_base = G2_REF_LUMA_ADDR(0),
215 			.c_base = G2_REF_CHROMA_ADDR(0),
216 		}, {
217 			/* Golden */
218 			.width = vp9_gref_width,
219 			.height = vp9_gref_height,
220 			.hor_scale = vp9_gref_hor_scale,
221 			.ver_scale = vp9_gref_ver_scale,
222 			.y_base = G2_REF_LUMA_ADDR(4),
223 			.c_base = G2_REF_CHROMA_ADDR(4),
224 		}, {
225 			/* Altref */
226 			.width = vp9_aref_width,
227 			.height = vp9_aref_height,
228 			.hor_scale = vp9_aref_hor_scale,
229 			.ver_scale = vp9_aref_ver_scale,
230 			.y_base = G2_REF_LUMA_ADDR(5),
231 			.c_base = G2_REF_CHROMA_ADDR(5),
232 		},
233 	};
234 	dma_addr_t mv_addr;
235 
236 	config_ref(ctx, dst, &ref_regs[0], dec_params, dec_params->last_frame_ts);
237 	config_ref(ctx, dst, &ref_regs[1], dec_params, dec_params->golden_frame_ts);
238 	config_ref(ctx, dst, &ref_regs[2], dec_params, dec_params->alt_frame_ts);
239 
240 	mv_addr = hantro_get_dec_buf_addr(ctx, &mv_ref->base.vb.vb2_buf) +
241 		  mv_offset(ctx, dec_params);
242 	hantro_write_addr(ctx->dev, G2_REF_MV_ADDR(0), mv_addr);
243 
244 	hantro_reg_write(ctx->dev, &vp9_last_sign_bias,
245 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST ? 1 : 0);
246 
247 	hantro_reg_write(ctx->dev, &vp9_gref_sign_bias,
248 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN ? 1 : 0);
249 
250 	hantro_reg_write(ctx->dev, &vp9_aref_sign_bias,
251 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT ? 1 : 0);
252 }
253 
254 static void recompute_tile_info(unsigned short *tile_info, unsigned int tiles, unsigned int sbs)
255 {
256 	int i;
257 	unsigned int accumulated = 0;
258 	unsigned int next_accumulated;
259 
260 	for (i = 1; i <= tiles; ++i) {
261 		next_accumulated = i * sbs / tiles;
262 		*tile_info++ = next_accumulated - accumulated;
263 		accumulated = next_accumulated;
264 	}
265 }
266 
267 static void
268 recompute_tile_rc_info(struct hantro_ctx *ctx,
269 		       unsigned int tile_r, unsigned int tile_c,
270 		       unsigned int sbs_r, unsigned int sbs_c)
271 {
272 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
273 
274 	recompute_tile_info(vp9_ctx->tile_r_info, tile_r, sbs_r);
275 	recompute_tile_info(vp9_ctx->tile_c_info, tile_c, sbs_c);
276 
277 	vp9_ctx->last_tile_r = tile_r;
278 	vp9_ctx->last_tile_c = tile_c;
279 	vp9_ctx->last_sbs_r = sbs_r;
280 	vp9_ctx->last_sbs_c = sbs_c;
281 }
282 
283 static inline unsigned int first_tile_row(unsigned int tile_r, unsigned int sbs_r)
284 {
285 	if (tile_r == sbs_r + 1)
286 		return 1;
287 
288 	if (tile_r == sbs_r + 2)
289 		return 2;
290 
291 	return 0;
292 }
293 
294 static void
295 fill_tile_info(struct hantro_ctx *ctx,
296 	       unsigned int tile_r, unsigned int tile_c,
297 	       unsigned int sbs_r, unsigned int sbs_c,
298 	       unsigned short *tile_mem)
299 {
300 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
301 	unsigned int i, j;
302 	bool first = true;
303 
304 	for (i = first_tile_row(tile_r, sbs_r); i < tile_r; ++i) {
305 		unsigned short r_info = vp9_ctx->tile_r_info[i];
306 
307 		if (first) {
308 			if (i > 0)
309 				r_info += vp9_ctx->tile_r_info[0];
310 			if (i == 2)
311 				r_info += vp9_ctx->tile_r_info[1];
312 			first = false;
313 		}
314 		for (j = 0; j < tile_c; ++j) {
315 			*tile_mem++ = vp9_ctx->tile_c_info[j];
316 			*tile_mem++ = r_info;
317 		}
318 	}
319 }
320 
321 static void
322 config_tiles(struct hantro_ctx *ctx,
323 	     const struct v4l2_ctrl_vp9_frame *dec_params,
324 	     struct hantro_decoded_buffer *dst)
325 {
326 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
327 	struct hantro_aux_buf *misc = &vp9_ctx->misc;
328 	struct hantro_aux_buf *tile_edge = &vp9_ctx->tile_edge;
329 	dma_addr_t addr;
330 	unsigned short *tile_mem;
331 	unsigned int rows, cols;
332 
333 	addr = misc->dma + vp9_ctx->tile_info_offset;
334 	hantro_write_addr(ctx->dev, G2_TILE_SIZES_ADDR, addr);
335 
336 	tile_mem = misc->cpu + vp9_ctx->tile_info_offset;
337 	if (dec_params->tile_cols_log2 || dec_params->tile_rows_log2) {
338 		unsigned int tile_r = (1 << dec_params->tile_rows_log2);
339 		unsigned int tile_c = (1 << dec_params->tile_cols_log2);
340 		unsigned int sbs_r = hantro_vp9_num_sbs(dst->vp9.height);
341 		unsigned int sbs_c = hantro_vp9_num_sbs(dst->vp9.width);
342 
343 		if (tile_r != vp9_ctx->last_tile_r || tile_c != vp9_ctx->last_tile_c ||
344 		    sbs_r != vp9_ctx->last_sbs_r || sbs_c != vp9_ctx->last_sbs_c)
345 			recompute_tile_rc_info(ctx, tile_r, tile_c, sbs_r, sbs_c);
346 
347 		fill_tile_info(ctx, tile_r, tile_c, sbs_r, sbs_c, tile_mem);
348 
349 		cols = tile_c;
350 		rows = tile_r;
351 		hantro_reg_write(ctx->dev, &g2_tile_e, 1);
352 	} else {
353 		tile_mem[0] = hantro_vp9_num_sbs(dst->vp9.width);
354 		tile_mem[1] = hantro_vp9_num_sbs(dst->vp9.height);
355 
356 		cols = 1;
357 		rows = 1;
358 		hantro_reg_write(ctx->dev, &g2_tile_e, 0);
359 	}
360 
361 	if (ctx->dev->variant->legacy_regs) {
362 		hantro_reg_write(ctx->dev, &g2_num_tile_cols_old, cols);
363 		hantro_reg_write(ctx->dev, &g2_num_tile_rows_old, rows);
364 	} else {
365 		hantro_reg_write(ctx->dev, &g2_num_tile_cols, cols);
366 		hantro_reg_write(ctx->dev, &g2_num_tile_rows, rows);
367 	}
368 
369 	/* provide aux buffers even if no tiles are used */
370 	addr = tile_edge->dma;
371 	hantro_write_addr(ctx->dev, G2_TILE_FILTER_ADDR, addr);
372 
373 	addr = tile_edge->dma + vp9_ctx->bsd_ctrl_offset;
374 	hantro_write_addr(ctx->dev, G2_TILE_BSD_ADDR, addr);
375 }
376 
377 static void
378 update_feat_and_flag(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
379 		     const struct v4l2_vp9_segmentation *seg,
380 		     unsigned int feature,
381 		     unsigned int segid)
382 {
383 	u8 mask = V4L2_VP9_SEGMENT_FEATURE_ENABLED(feature);
384 
385 	vp9_ctx->feature_data[segid][feature] = seg->feature_data[segid][feature];
386 	vp9_ctx->feature_enabled[segid] &= ~mask;
387 	vp9_ctx->feature_enabled[segid] |= (seg->feature_enabled[segid] & mask);
388 }
389 
390 static inline s16 clip3(s16 x, s16 y, s16 z)
391 {
392 	return (z < x) ? x : (z > y) ? y : z;
393 }
394 
395 static s16 feat_val_clip3(s16 feat_val, s16 feature_data, bool absolute, u8 clip)
396 {
397 	if (absolute)
398 		return feature_data;
399 
400 	return clip3(0, 255, feat_val + feature_data);
401 }
402 
403 static void config_segment(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
404 {
405 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
406 	const struct v4l2_vp9_segmentation *seg;
407 	s16 feat_val;
408 	unsigned char feat_id;
409 	unsigned int segid;
410 	bool segment_enabled, absolute, update_data;
411 
412 	static const struct hantro_reg seg_regs[8][V4L2_VP9_SEG_LVL_MAX] = {
413 		{ vp9_quant_seg0, vp9_filt_level_seg0, vp9_refpic_seg0, vp9_skip_seg0 },
414 		{ vp9_quant_seg1, vp9_filt_level_seg1, vp9_refpic_seg1, vp9_skip_seg1 },
415 		{ vp9_quant_seg2, vp9_filt_level_seg2, vp9_refpic_seg2, vp9_skip_seg2 },
416 		{ vp9_quant_seg3, vp9_filt_level_seg3, vp9_refpic_seg3, vp9_skip_seg3 },
417 		{ vp9_quant_seg4, vp9_filt_level_seg4, vp9_refpic_seg4, vp9_skip_seg4 },
418 		{ vp9_quant_seg5, vp9_filt_level_seg5, vp9_refpic_seg5, vp9_skip_seg5 },
419 		{ vp9_quant_seg6, vp9_filt_level_seg6, vp9_refpic_seg6, vp9_skip_seg6 },
420 		{ vp9_quant_seg7, vp9_filt_level_seg7, vp9_refpic_seg7, vp9_skip_seg7 },
421 	};
422 
423 	segment_enabled = !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED);
424 	hantro_reg_write(ctx->dev, &vp9_segment_e, segment_enabled);
425 	hantro_reg_write(ctx->dev, &vp9_segment_upd_e,
426 			 !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP));
427 	hantro_reg_write(ctx->dev, &vp9_segment_temp_upd_e,
428 			 !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE));
429 
430 	seg = &dec_params->seg;
431 	absolute = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE);
432 	update_data = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA);
433 
434 	for (segid = 0; segid < 8; ++segid) {
435 		/* Quantizer segment feature */
436 		feat_id = V4L2_VP9_SEG_LVL_ALT_Q;
437 		feat_val = dec_params->quant.base_q_idx;
438 		if (segment_enabled) {
439 			if (update_data)
440 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
441 			if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
442 				feat_val = feat_val_clip3(feat_val,
443 							  vp9_ctx->feature_data[segid][feat_id],
444 							  absolute, 255);
445 		}
446 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
447 
448 		/* Loop filter segment feature */
449 		feat_id = V4L2_VP9_SEG_LVL_ALT_L;
450 		feat_val = dec_params->lf.level;
451 		if (segment_enabled) {
452 			if (update_data)
453 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
454 			if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
455 				feat_val = feat_val_clip3(feat_val,
456 							  vp9_ctx->feature_data[segid][feat_id],
457 							  absolute, 63);
458 		}
459 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
460 
461 		/* Reference frame segment feature */
462 		feat_id = V4L2_VP9_SEG_LVL_REF_FRAME;
463 		feat_val = 0;
464 		if (segment_enabled) {
465 			if (update_data)
466 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
467 			if (!(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
468 			    v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
469 				feat_val = vp9_ctx->feature_data[segid][feat_id] + 1;
470 		}
471 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
472 
473 		/* Skip segment feature */
474 		feat_id = V4L2_VP9_SEG_LVL_SKIP;
475 		feat_val = 0;
476 		if (segment_enabled) {
477 			if (update_data)
478 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
479 			feat_val = v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled,
480 							     feat_id, segid) ? 1 : 0;
481 		}
482 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
483 	}
484 }
485 
486 static void config_loop_filter(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
487 {
488 	bool d = dec_params->lf.flags & V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED;
489 
490 	hantro_reg_write(ctx->dev, &vp9_filt_level, dec_params->lf.level);
491 	hantro_reg_write(ctx->dev, &g2_out_filtering_dis, dec_params->lf.level == 0);
492 	hantro_reg_write(ctx->dev, &vp9_filt_sharpness, dec_params->lf.sharpness);
493 
494 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_0, d ? dec_params->lf.ref_deltas[0] : 0);
495 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_1, d ? dec_params->lf.ref_deltas[1] : 0);
496 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_2, d ? dec_params->lf.ref_deltas[2] : 0);
497 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_3, d ? dec_params->lf.ref_deltas[3] : 0);
498 	hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_0, d ? dec_params->lf.mode_deltas[0] : 0);
499 	hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_1, d ? dec_params->lf.mode_deltas[1] : 0);
500 }
501 
502 static void config_picture_dimensions(struct hantro_ctx *ctx, struct hantro_decoded_buffer *dst)
503 {
504 	u32 pic_w_4x4, pic_h_4x4;
505 
506 	hantro_reg_write(ctx->dev, &g2_pic_width_in_cbs, (dst->vp9.width + 7) / 8);
507 	hantro_reg_write(ctx->dev, &g2_pic_height_in_cbs, (dst->vp9.height + 7) / 8);
508 	pic_w_4x4 = roundup(dst->vp9.width, 8) >> 2;
509 	pic_h_4x4 = roundup(dst->vp9.height, 8) >> 2;
510 	hantro_reg_write(ctx->dev, &g2_pic_width_4x4, pic_w_4x4);
511 	hantro_reg_write(ctx->dev, &g2_pic_height_4x4, pic_h_4x4);
512 }
513 
514 static void
515 config_bit_depth(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
516 {
517 	if (ctx->dev->variant->legacy_regs) {
518 		hantro_reg_write(ctx->dev, &g2_bit_depth_y, dec_params->bit_depth);
519 		hantro_reg_write(ctx->dev, &g2_bit_depth_c, dec_params->bit_depth);
520 		hantro_reg_write(ctx->dev, &g2_pix_shift, 0);
521 	} else {
522 		hantro_reg_write(ctx->dev, &g2_bit_depth_y_minus8, dec_params->bit_depth - 8);
523 		hantro_reg_write(ctx->dev, &g2_bit_depth_c_minus8, dec_params->bit_depth - 8);
524 	}
525 }
526 
527 static inline bool is_lossless(const struct v4l2_vp9_quantization *quant)
528 {
529 	return quant->base_q_idx == 0 && quant->delta_q_uv_ac == 0 &&
530 	       quant->delta_q_uv_dc == 0 && quant->delta_q_y_dc == 0;
531 }
532 
533 static void
534 config_quant(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
535 {
536 	hantro_reg_write(ctx->dev, &vp9_qp_delta_y_dc, dec_params->quant.delta_q_y_dc);
537 	hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_dc, dec_params->quant.delta_q_uv_dc);
538 	hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_ac, dec_params->quant.delta_q_uv_ac);
539 	hantro_reg_write(ctx->dev, &vp9_lossless_e, is_lossless(&dec_params->quant));
540 }
541 
542 static u32
543 hantro_interp_filter_from_v4l2(unsigned int interpolation_filter)
544 {
545 	switch (interpolation_filter) {
546 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP:
547 		return 0x1;
548 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SMOOTH:
549 		return 0;
550 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SHARP:
551 		return 0x2;
552 	case V4L2_VP9_INTERP_FILTER_BILINEAR:
553 		return 0x3;
554 	case V4L2_VP9_INTERP_FILTER_SWITCHABLE:
555 		return 0x4;
556 	}
557 
558 	return 0;
559 }
560 
561 static void
562 config_others(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
563 	      bool intra_only, bool resolution_change)
564 {
565 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
566 
567 	hantro_reg_write(ctx->dev, &g2_idr_pic_e, intra_only);
568 
569 	hantro_reg_write(ctx->dev, &vp9_transform_mode, vp9_ctx->cur.tx_mode);
570 
571 	hantro_reg_write(ctx->dev, &vp9_mcomp_filt_type, intra_only ?
572 		0 : hantro_interp_filter_from_v4l2(dec_params->interpolation_filter));
573 
574 	hantro_reg_write(ctx->dev, &vp9_high_prec_mv_e,
575 			 !!(dec_params->flags & V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV));
576 
577 	hantro_reg_write(ctx->dev, &vp9_comp_pred_mode, dec_params->reference_mode);
578 
579 	hantro_reg_write(ctx->dev, &g2_tempor_mvp_e,
580 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
581 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
582 			 !(vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
583 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_INTRA_ONLY) &&
584 			 !resolution_change &&
585 			 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME
586 	);
587 
588 	hantro_reg_write(ctx->dev, &g2_write_mvs_e,
589 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME));
590 }
591 
592 static void
593 config_compound_reference(struct hantro_ctx *ctx,
594 			  const struct v4l2_ctrl_vp9_frame *dec_params)
595 {
596 	u32 comp_fixed_ref, comp_var_ref[2];
597 	bool last_ref_frame_sign_bias;
598 	bool golden_ref_frame_sign_bias;
599 	bool alt_ref_frame_sign_bias;
600 	bool comp_ref_allowed = 0;
601 
602 	comp_fixed_ref = 0;
603 	comp_var_ref[0] = 0;
604 	comp_var_ref[1] = 0;
605 
606 	last_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST;
607 	golden_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN;
608 	alt_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT;
609 
610 	/* 6.3.12 Frame reference mode syntax */
611 	comp_ref_allowed |= golden_ref_frame_sign_bias != last_ref_frame_sign_bias;
612 	comp_ref_allowed |= alt_ref_frame_sign_bias != last_ref_frame_sign_bias;
613 
614 	if (comp_ref_allowed) {
615 		if (last_ref_frame_sign_bias ==
616 		    golden_ref_frame_sign_bias) {
617 			comp_fixed_ref = ALTREF_FRAME;
618 			comp_var_ref[0] = LAST_FRAME;
619 			comp_var_ref[1] = GOLDEN_FRAME;
620 		} else if (last_ref_frame_sign_bias ==
621 			   alt_ref_frame_sign_bias) {
622 			comp_fixed_ref = GOLDEN_FRAME;
623 			comp_var_ref[0] = LAST_FRAME;
624 			comp_var_ref[1] = ALTREF_FRAME;
625 		} else {
626 			comp_fixed_ref = LAST_FRAME;
627 			comp_var_ref[0] = GOLDEN_FRAME;
628 			comp_var_ref[1] = ALTREF_FRAME;
629 		}
630 	}
631 
632 	hantro_reg_write(ctx->dev, &vp9_comp_pred_fixed_ref, comp_fixed_ref);
633 	hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref0, comp_var_ref[0]);
634 	hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref1, comp_var_ref[1]);
635 }
636 
637 #define INNER_LOOP \
638 do {									\
639 	for (m = 0; m < ARRAY_SIZE(adaptive->coef[0][0][0][0]); ++m) {	\
640 		memcpy(adaptive->coef[i][j][k][l][m],			\
641 		       probs->coef[i][j][k][l][m],			\
642 		       sizeof(probs->coef[i][j][k][l][m]));		\
643 									\
644 		adaptive->coef[i][j][k][l][m][3] = 0;			\
645 	}								\
646 } while (0)
647 
648 static void config_probs(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
649 {
650 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
651 	struct hantro_aux_buf *misc = &vp9_ctx->misc;
652 	struct hantro_g2_all_probs *all_probs = misc->cpu;
653 	struct hantro_g2_probs *adaptive;
654 	struct hantro_g2_mv_probs *mv;
655 	const struct v4l2_vp9_segmentation *seg = &dec_params->seg;
656 	const struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
657 	int i, j, k, l, m;
658 
659 	for (i = 0; i < ARRAY_SIZE(all_probs->kf_y_mode_prob); ++i)
660 		for (j = 0; j < ARRAY_SIZE(all_probs->kf_y_mode_prob[0]); ++j) {
661 			memcpy(all_probs->kf_y_mode_prob[i][j],
662 			       v4l2_vp9_kf_y_mode_prob[i][j],
663 			       ARRAY_SIZE(all_probs->kf_y_mode_prob[i][j]));
664 
665 			all_probs->kf_y_mode_prob_tail[i][j][0] =
666 				v4l2_vp9_kf_y_mode_prob[i][j][8];
667 		}
668 
669 	memcpy(all_probs->mb_segment_tree_probs, seg->tree_probs,
670 	       sizeof(all_probs->mb_segment_tree_probs));
671 
672 	memcpy(all_probs->segment_pred_probs, seg->pred_probs,
673 	       sizeof(all_probs->segment_pred_probs));
674 
675 	for (i = 0; i < ARRAY_SIZE(all_probs->kf_uv_mode_prob); ++i) {
676 		memcpy(all_probs->kf_uv_mode_prob[i], v4l2_vp9_kf_uv_mode_prob[i],
677 		       ARRAY_SIZE(all_probs->kf_uv_mode_prob[i]));
678 
679 		all_probs->kf_uv_mode_prob_tail[i][0] = v4l2_vp9_kf_uv_mode_prob[i][8];
680 	}
681 
682 	adaptive = &all_probs->probs;
683 
684 	for (i = 0; i < ARRAY_SIZE(adaptive->inter_mode); ++i) {
685 		memcpy(adaptive->inter_mode[i], probs->inter_mode[i],
686 		       ARRAY_SIZE(probs->inter_mode[i]));
687 
688 		adaptive->inter_mode[i][3] = 0;
689 	}
690 
691 	memcpy(adaptive->is_inter, probs->is_inter, sizeof(adaptive->is_inter));
692 
693 	for (i = 0; i < ARRAY_SIZE(adaptive->uv_mode); ++i) {
694 		memcpy(adaptive->uv_mode[i], probs->uv_mode[i],
695 		       sizeof(adaptive->uv_mode[i]));
696 		adaptive->uv_mode_tail[i][0] = probs->uv_mode[i][8];
697 	}
698 
699 	memcpy(adaptive->tx8, probs->tx8, sizeof(adaptive->tx8));
700 	memcpy(adaptive->tx16, probs->tx16, sizeof(adaptive->tx16));
701 	memcpy(adaptive->tx32, probs->tx32, sizeof(adaptive->tx32));
702 
703 	for (i = 0; i < ARRAY_SIZE(adaptive->y_mode); ++i) {
704 		memcpy(adaptive->y_mode[i], probs->y_mode[i],
705 		       ARRAY_SIZE(adaptive->y_mode[i]));
706 
707 		adaptive->y_mode_tail[i][0] = probs->y_mode[i][8];
708 	}
709 
710 	for (i = 0; i < ARRAY_SIZE(adaptive->partition[0]); ++i) {
711 		memcpy(adaptive->partition[0][i], v4l2_vp9_kf_partition_probs[i],
712 		       sizeof(v4l2_vp9_kf_partition_probs[i]));
713 
714 		adaptive->partition[0][i][3] = 0;
715 	}
716 
717 	for (i = 0; i < ARRAY_SIZE(adaptive->partition[1]); ++i) {
718 		memcpy(adaptive->partition[1][i], probs->partition[i],
719 		       sizeof(probs->partition[i]));
720 
721 		adaptive->partition[1][i][3] = 0;
722 	}
723 
724 	memcpy(adaptive->interp_filter, probs->interp_filter,
725 	       sizeof(adaptive->interp_filter));
726 
727 	memcpy(adaptive->comp_mode, probs->comp_mode, sizeof(adaptive->comp_mode));
728 
729 	memcpy(adaptive->skip, probs->skip, sizeof(adaptive->skip));
730 
731 	mv = &adaptive->mv;
732 
733 	memcpy(mv->joint, probs->mv.joint, sizeof(mv->joint));
734 	memcpy(mv->sign, probs->mv.sign, sizeof(mv->sign));
735 	memcpy(mv->class0_bit, probs->mv.class0_bit, sizeof(mv->class0_bit));
736 	memcpy(mv->fr, probs->mv.fr, sizeof(mv->fr));
737 	memcpy(mv->class0_hp, probs->mv.class0_hp, sizeof(mv->class0_hp));
738 	memcpy(mv->hp, probs->mv.hp, sizeof(mv->hp));
739 	memcpy(mv->classes, probs->mv.classes, sizeof(mv->classes));
740 	memcpy(mv->class0_fr, probs->mv.class0_fr, sizeof(mv->class0_fr));
741 	memcpy(mv->bits, probs->mv.bits, sizeof(mv->bits));
742 
743 	memcpy(adaptive->single_ref, probs->single_ref, sizeof(adaptive->single_ref));
744 
745 	memcpy(adaptive->comp_ref, probs->comp_ref, sizeof(adaptive->comp_ref));
746 
747 	for (i = 0; i < ARRAY_SIZE(adaptive->coef); ++i)
748 		for (j = 0; j < ARRAY_SIZE(adaptive->coef[0]); ++j)
749 			for (k = 0; k < ARRAY_SIZE(adaptive->coef[0][0]); ++k)
750 				for (l = 0; l < ARRAY_SIZE(adaptive->coef[0][0][0]); ++l)
751 					INNER_LOOP;
752 
753 	hantro_write_addr(ctx->dev, G2_VP9_PROBS_ADDR, misc->dma);
754 }
755 
756 static void config_counts(struct hantro_ctx *ctx)
757 {
758 	struct hantro_vp9_dec_hw_ctx *vp9_dec = &ctx->vp9_dec;
759 	struct hantro_aux_buf *misc = &vp9_dec->misc;
760 	dma_addr_t addr = misc->dma + vp9_dec->ctx_counters_offset;
761 
762 	hantro_write_addr(ctx->dev, G2_VP9_CTX_COUNT_ADDR, addr);
763 }
764 
765 static void config_seg_map(struct hantro_ctx *ctx,
766 			   const struct v4l2_ctrl_vp9_frame *dec_params,
767 			   bool intra_only, bool update_map)
768 {
769 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
770 	struct hantro_aux_buf *segment_map = &vp9_ctx->segment_map;
771 	dma_addr_t addr;
772 
773 	if (intra_only ||
774 	    (dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT)) {
775 		memset(segment_map->cpu, 0, segment_map->size);
776 		memset(vp9_ctx->feature_data, 0, sizeof(vp9_ctx->feature_data));
777 		memset(vp9_ctx->feature_enabled, 0, sizeof(vp9_ctx->feature_enabled));
778 	}
779 
780 	addr = segment_map->dma + vp9_ctx->active_segment * vp9_ctx->segment_map_size;
781 	hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_READ_ADDR, addr);
782 
783 	addr = segment_map->dma + (1 - vp9_ctx->active_segment) * vp9_ctx->segment_map_size;
784 	hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_WRITE_ADDR, addr);
785 
786 	if (update_map)
787 		vp9_ctx->active_segment = 1 - vp9_ctx->active_segment;
788 }
789 
790 static void
791 config_source(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
792 	      struct vb2_v4l2_buffer *vb2_src)
793 {
794 	dma_addr_t stream_base, tmp_addr;
795 	unsigned int headres_size;
796 	u32 src_len, start_bit, src_buf_len;
797 
798 	headres_size = dec_params->uncompressed_header_size
799 		     + dec_params->compressed_header_size;
800 
801 	stream_base = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
802 
803 	tmp_addr = stream_base + headres_size;
804 	if (ctx->dev->variant->legacy_regs)
805 		hantro_write_addr(ctx->dev, G2_STREAM_ADDR, (tmp_addr & ~0xf));
806 	else
807 		hantro_write_addr(ctx->dev, G2_STREAM_ADDR, stream_base);
808 
809 	start_bit = (tmp_addr & 0xf) * 8;
810 	hantro_reg_write(ctx->dev, &g2_start_bit, start_bit);
811 
812 	src_len = vb2_get_plane_payload(&vb2_src->vb2_buf, 0);
813 	src_len += start_bit / 8 - headres_size;
814 	hantro_reg_write(ctx->dev, &g2_stream_len, src_len);
815 
816 	if (!ctx->dev->variant->legacy_regs) {
817 		tmp_addr &= ~0xf;
818 		hantro_reg_write(ctx->dev, &g2_strm_start_offset, tmp_addr - stream_base);
819 		src_buf_len = vb2_plane_size(&vb2_src->vb2_buf, 0);
820 		hantro_reg_write(ctx->dev, &g2_strm_buffer_len, src_buf_len);
821 	}
822 }
823 
824 static void
825 config_registers(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
826 		 struct vb2_v4l2_buffer *vb2_src, struct vb2_v4l2_buffer *vb2_dst)
827 {
828 	struct hantro_decoded_buffer *dst, *last, *mv_ref;
829 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
830 	const struct v4l2_vp9_segmentation *seg;
831 	bool intra_only, resolution_change;
832 
833 	/* vp9 stuff */
834 	dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
835 
836 	if (vp9_ctx->last.valid)
837 		last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
838 	else
839 		last = dst;
840 
841 	update_dec_buf_info(dst, dec_params);
842 	update_ctx_cur_info(vp9_ctx, dst, dec_params);
843 	seg = &dec_params->seg;
844 
845 	intra_only = !!(dec_params->flags &
846 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
847 			V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
848 
849 	if (!intra_only &&
850 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
851 	    vp9_ctx->last.valid)
852 		mv_ref = last;
853 	else
854 		mv_ref = dst;
855 
856 	resolution_change = dst->vp9.width != last->vp9.width ||
857 			    dst->vp9.height != last->vp9.height;
858 
859 	/* configure basic registers */
860 	hantro_reg_write(ctx->dev, &g2_mode, VP9_DEC_MODE);
861 	if (!ctx->dev->variant->legacy_regs) {
862 		hantro_reg_write(ctx->dev, &g2_strm_swap, 0xf);
863 		hantro_reg_write(ctx->dev, &g2_dirmv_swap, 0xf);
864 		hantro_reg_write(ctx->dev, &g2_compress_swap, 0xf);
865 		hantro_reg_write(ctx->dev, &g2_ref_compress_bypass, 1);
866 	} else {
867 		hantro_reg_write(ctx->dev, &g2_strm_swap_old, 0x1f);
868 		hantro_reg_write(ctx->dev, &g2_pic_swap, 0x10);
869 		hantro_reg_write(ctx->dev, &g2_dirmv_swap_old, 0x10);
870 		hantro_reg_write(ctx->dev, &g2_tab0_swap_old, 0x10);
871 		hantro_reg_write(ctx->dev, &g2_tab1_swap_old, 0x10);
872 		hantro_reg_write(ctx->dev, &g2_tab2_swap_old, 0x10);
873 		hantro_reg_write(ctx->dev, &g2_tab3_swap_old, 0x10);
874 		hantro_reg_write(ctx->dev, &g2_rscan_swap, 0x10);
875 	}
876 	hantro_reg_write(ctx->dev, &g2_buswidth, BUS_WIDTH_128);
877 	hantro_reg_write(ctx->dev, &g2_max_burst, 16);
878 	hantro_reg_write(ctx->dev, &g2_apf_threshold, 8);
879 	hantro_reg_write(ctx->dev, &g2_clk_gate_e, 1);
880 	hantro_reg_write(ctx->dev, &g2_max_cb_size, 6);
881 	hantro_reg_write(ctx->dev, &g2_min_cb_size, 3);
882 	if (ctx->dev->variant->double_buffer)
883 		hantro_reg_write(ctx->dev, &g2_double_buffer_e, 1);
884 
885 	config_output(ctx, dst, dec_params);
886 
887 	if (!intra_only)
888 		config_ref_registers(ctx, dec_params, dst, mv_ref);
889 
890 	config_tiles(ctx, dec_params, dst);
891 	config_segment(ctx, dec_params);
892 	config_loop_filter(ctx, dec_params);
893 	config_picture_dimensions(ctx, dst);
894 	config_bit_depth(ctx, dec_params);
895 	config_quant(ctx, dec_params);
896 	config_others(ctx, dec_params, intra_only, resolution_change);
897 	config_compound_reference(ctx, dec_params);
898 	config_probs(ctx, dec_params);
899 	config_counts(ctx);
900 	config_seg_map(ctx, dec_params, intra_only,
901 		       seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP);
902 	config_source(ctx, dec_params, vb2_src);
903 }
904 
905 int hantro_g2_vp9_dec_run(struct hantro_ctx *ctx)
906 {
907 	const struct v4l2_ctrl_vp9_frame *decode_params;
908 	struct vb2_v4l2_buffer *src;
909 	struct vb2_v4l2_buffer *dst;
910 	int ret;
911 
912 	hantro_g2_check_idle(ctx->dev);
913 
914 	ret = start_prepare_run(ctx, &decode_params);
915 	if (ret) {
916 		hantro_end_prepare_run(ctx);
917 		return ret;
918 	}
919 
920 	src = hantro_get_src_buf(ctx);
921 	dst = hantro_get_dst_buf(ctx);
922 
923 	config_registers(ctx, decode_params, src, dst);
924 
925 	hantro_end_prepare_run(ctx);
926 
927 	vdpu_write(ctx->dev, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
928 
929 	return 0;
930 }
931 
932 #define copy_tx_and_skip(p1, p2)				\
933 do {								\
934 	memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));	\
935 	memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));	\
936 	memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));	\
937 	memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));	\
938 } while (0)
939 
940 void hantro_g2_vp9_dec_done(struct hantro_ctx *ctx)
941 {
942 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
943 	unsigned int fctx_idx;
944 
945 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
946 		goto out_update_last;
947 
948 	fctx_idx = vp9_ctx->cur.frame_context_idx;
949 
950 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
951 		/* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
952 		struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
953 		bool frame_is_intra = vp9_ctx->cur.flags &
954 		    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
955 		struct tx_and_skip {
956 			u8 tx8[2][1];
957 			u8 tx16[2][2];
958 			u8 tx32[2][3];
959 			u8 skip[3];
960 		} _tx_skip, *tx_skip = &_tx_skip;
961 		struct v4l2_vp9_frame_symbol_counts *counts;
962 		struct symbol_counts *hantro_cnts;
963 		u32 tx16p[2][4];
964 		int i;
965 
966 		/* buffer the forward-updated TX and skip probs */
967 		if (frame_is_intra)
968 			copy_tx_and_skip(tx_skip, probs);
969 
970 		/* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
971 		*probs = vp9_ctx->frame_context[fctx_idx];
972 
973 		/* if FrameIsIntra then undo the effect of load_probs2() */
974 		if (frame_is_intra)
975 			copy_tx_and_skip(probs, tx_skip);
976 
977 		counts = &vp9_ctx->cnts;
978 		hantro_cnts = vp9_ctx->misc.cpu + vp9_ctx->ctx_counters_offset;
979 		for (i = 0; i < ARRAY_SIZE(tx16p); ++i) {
980 			memcpy(tx16p[i],
981 			       hantro_cnts->tx16x16_count[i],
982 			       sizeof(hantro_cnts->tx16x16_count[0]));
983 			tx16p[i][3] = 0;
984 		}
985 		counts->tx16p = &tx16p;
986 
987 		v4l2_vp9_adapt_coef_probs(probs, counts,
988 					  !vp9_ctx->last.valid ||
989 					  vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
990 					  frame_is_intra);
991 
992 		if (!frame_is_intra) {
993 			/* load_probs2() already done */
994 			u32 mv_mode[7][4];
995 
996 			for (i = 0; i < ARRAY_SIZE(mv_mode); ++i) {
997 				mv_mode[i][0] = hantro_cnts->inter_mode_counts[i][1][0];
998 				mv_mode[i][1] = hantro_cnts->inter_mode_counts[i][2][0];
999 				mv_mode[i][2] = hantro_cnts->inter_mode_counts[i][0][0];
1000 				mv_mode[i][3] = hantro_cnts->inter_mode_counts[i][2][1];
1001 			}
1002 			counts->mv_mode = &mv_mode;
1003 			v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
1004 						     vp9_ctx->cur.reference_mode,
1005 						     vp9_ctx->cur.interpolation_filter,
1006 						     vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
1007 		}
1008 	}
1009 
1010 	vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
1011 
1012 out_update_last:
1013 	vp9_ctx->last = vp9_ctx->cur;
1014 }
1015