FFmpeg: libavfilter/vf_chromakey.c Source File

FFmpeg
vf_chromakey.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Timo Rothenpieler <timo@rothenpieler.org>
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/opt.h"
22 #include "libavutil/intreadwrite.h"
23 #include "libavutil/pixdesc.h"
24 #include "avfilter.h"
25 #include "filters.h"
26 
27  typedef struct ChromakeyContext {
28   const AVClass *class;
29 
30   uint8_t chromakey_rgba[4];
31   uint16_t chromakey_uv[2];
32 
33   float similarity;
34   float blend;
35 
36   int is_yuv;
37   int depth;
38   int mid;
39   int max;
40 
41   int hsub_log2;
42   int vsub_log2;
43 
44   int (*do_slice)(AVFilterContext *ctx, void *arg,
45  int jobnr, int nb_jobs);
46 } ChromakeyContext;
47 
48  static uint8_t do_chromakey_pixel(ChromakeyContext *ctx, uint8_t u[9], uint8_t v[9])
49 {
50  double diff = 0.0;
51  int du, dv, i;
52 
53  for (i = 0; i < 9; ++i) {
54  du = (int)u[i] - ctx->chromakey_uv[0];
55  dv = (int)v[i] - ctx->chromakey_uv[1];
56 
57  diff += sqrt((du * du + dv * dv) / (255.0 * 255.0 * 2));
58  }
59 
60  diff /= 9.0;
61 
62  if (ctx->blend > 0.0001) {
63  return av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0) * 255.0;
64  } else {
65  return (diff > ctx->similarity) ? 255 : 0;
66  }
67 }
68 
69  static uint16_t do_chromakey_pixel16(ChromakeyContext *ctx, uint16_t u[9], uint16_t v[9])
70 {
71  double max = ctx->max;
72  double diff = 0.0;
73  int du, dv, i;
74 
75  for (i = 0; i < 9; ++i) {
76  du = (int)u[i] - ctx->chromakey_uv[0];
77  dv = (int)v[i] - ctx->chromakey_uv[1];
78 
79  diff += sqrt((du * du + dv * dv) / (max * max * 2));
80  }
81 
82  diff /= 9.0;
83 
84  if (ctx->blend > 0.0001) {
85  return av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0) * max;
86  } else {
87  return (diff > ctx->similarity) ? max : 0;
88  }
89 }
90 
91  static av_always_inline void get_pixel_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint8_t *u, uint8_t *v)
92 {
93  if (x < 0 || x >= frame->width || y < 0 || y >= frame->height)
94  return;
95 
96  x >>= hsub_log2;
97  y >>= vsub_log2;
98 
99  *u = frame->data[1][frame->linesize[1] * y + x];
100  *v = frame->data[2][frame->linesize[2] * y + x];
101 }
102 
103  static av_always_inline void get_pixel16_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint16_t *u, uint16_t *v)
104 {
105  if (x < 0 || x >= frame->width || y < 0 || y >= frame->height)
106  return;
107 
108  x >>= hsub_log2;
109  y >>= vsub_log2;
110 
111  *u = AV_RN16(&frame->data[1][frame->linesize[1] * y + 2 * x]);
112  *v = AV_RN16(&frame->data[2][frame->linesize[2] * y + 2 * x]);
113 }
114 
115  static int do_chromakey_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
116 {
117  AVFrame *frame = arg;
118 
119  const int slice_start = (frame->height * jobnr) / nb_jobs;
120  const int slice_end = (frame->height * (jobnr + 1)) / nb_jobs;
121 
122  ChromakeyContext *ctx = avctx->priv;
123 
124  int x, y, xo, yo;
125  uint8_t u[9], v[9];
126 
127  memset(u, ctx->chromakey_uv[0], sizeof(u));
128  memset(v, ctx->chromakey_uv[1], sizeof(v));
129 
130  for (y = slice_start; y < slice_end; ++y) {
131  for (x = 0; x < frame->width; ++x) {
132  for (yo = 0; yo < 3; ++yo) {
133  for (xo = 0; xo < 3; ++xo) {
134  get_pixel_uv(frame, ctx->hsub_log2, ctx->vsub_log2, x + xo - 1, y + yo - 1, &u[yo * 3 + xo], &v[yo * 3 + xo]);
135  }
136  }
137 
138  frame->data[3][frame->linesize[3] * y + x] = do_chromakey_pixel(ctx, u, v);
139  }
140  }
141 
142  return 0;
143 }
144 
145  static int do_chromakey16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
146 {
147  AVFrame *frame = arg;
148 
149  const int slice_start = (frame->height * jobnr) / nb_jobs;
150  const int slice_end = (frame->height * (jobnr + 1)) / nb_jobs;
151 
152  ChromakeyContext *ctx = avctx->priv;
153 
154  int x, y, xo, yo;
155  uint16_t u[9], v[9];
156 
157  for (int i = 0; i < 9; i++) {
158  u[i] = ctx->chromakey_uv[0];
159  v[i] = ctx->chromakey_uv[1];
160  }
161 
162  for (y = slice_start; y < slice_end; ++y) {
163  for (x = 0; x < frame->width; ++x) {
164  uint16_t *dst = (uint16_t *)(frame->data[3] + frame->linesize[3] * y);
165 
166  for (yo = 0; yo < 3; ++yo) {
167  for (xo = 0; xo < 3; ++xo) {
168  get_pixel16_uv(frame, ctx->hsub_log2, ctx->vsub_log2, x + xo - 1, y + yo - 1, &u[yo * 3 + xo], &v[yo * 3 + xo]);
169  }
170  }
171 
172  dst[x] = do_chromakey_pixel16(ctx, u, v);
173  }
174  }
175 
176  return 0;
177 }
178 
179  static int do_chromahold_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
180 {
181  ChromakeyContext *ctx = avctx->priv;
182  AVFrame *frame = arg;
183  const int slice_start = ((frame->height >> ctx->vsub_log2) * jobnr) / nb_jobs;
184  const int slice_end = ((frame->height >> ctx->vsub_log2) * (jobnr + 1)) / nb_jobs;
185 
186  int x, y, alpha;
187 
188  for (y = slice_start; y < slice_end; ++y) {
189  for (x = 0; x < frame->width >> ctx->hsub_log2; ++x) {
190  int u = frame->data[1][frame->linesize[1] * y + x];
191  int v = frame->data[2][frame->linesize[2] * y + x];
192  double diff;
193  int du, dv;
194 
195  du = u - ctx->chromakey_uv[0];
196  dv = v - ctx->chromakey_uv[1];
197 
198  diff = sqrt((du * du + dv * dv) / (255.0 * 255.0 * 2.0));
199 
200  alpha = diff > ctx->similarity;
201  if (ctx->blend > 0.0001) {
202  double f = 1. - av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0);
203 
204  frame->data[1][frame->linesize[1] * y + x] = 128 + (u - 128) * f;
205  frame->data[2][frame->linesize[2] * y + x] = 128 + (v - 128) * f;
206  } else if (alpha) {
207  frame->data[1][frame->linesize[1] * y + x] = 128;
208  frame->data[2][frame->linesize[2] * y + x] = 128;
209  }
210  }
211  }
212 
213  return 0;
214 }
215 
216  static int do_chromahold16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
217 {
218  ChromakeyContext *ctx = avctx->priv;
219  AVFrame *frame = arg;
220  const int slice_start = ((frame->height >> ctx->vsub_log2) * jobnr) / nb_jobs;
221  const int slice_end = ((frame->height >> ctx->vsub_log2) * (jobnr + 1)) / nb_jobs;
222  const int mid = ctx->mid;
223  double max = ctx->max;
224 
225  int x, y, alpha;
226 
227  for (y = slice_start; y < slice_end; ++y) {
228  for (x = 0; x < frame->width >> ctx->hsub_log2; ++x) {
229  int u = AV_RN16(&frame->data[1][frame->linesize[1] * y + 2 * x]);
230  int v = AV_RN16(&frame->data[2][frame->linesize[2] * y + 2 * x]);
231  double diff;
232  int du, dv;
233 
234  du = u - ctx->chromakey_uv[0];
235  dv = v - ctx->chromakey_uv[1];
236 
237  diff = sqrt((du * du + dv * dv) / (max * max * 2.0));
238 
239  alpha = diff > ctx->similarity;
240  if (ctx->blend > 0.0001) {
241  double f = 1. - av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0);
242 
243  AV_WN16(&frame->data[1][frame->linesize[1] * y + 2 * x], mid + (u - mid) * f);
244  AV_WN16(&frame->data[2][frame->linesize[2] * y + 2 * x], mid + (v - mid) * f);
245  } else if (alpha) {
246  AV_WN16(&frame->data[1][frame->linesize[1] * y + 2 * x], mid);
247  AV_WN16(&frame->data[2][frame->linesize[2] * y + 2 * x], mid);
248  }
249  }
250  }
251 
252  return 0;
253 }
254 
255  static int filter_frame(AVFilterLink *link, AVFrame *frame)
256 {
257  AVFilterContext *avctx = link->dst;
258  ChromakeyContext *ctx = avctx->priv;
259  int res;
260 
261  if (res = ff_filter_execute(avctx, ctx->do_slice, frame, NULL,
262  FFMIN(frame->height, ff_filter_get_nb_threads(avctx))))
263  return res;
264 
265  if (!strcmp(avctx->filter->name, "chromakey"))
266  frame->alpha_mode = avctx->outputs[0]->alpha_mode;
267  return ff_filter_frame(avctx->outputs[0], frame);
268 }
269 
270  #define FIXNUM(x) lrint((x) * (1 << 10))
271  #define RGB_TO_U(rgb) (((- FIXNUM(0.16874) * rgb[0] - FIXNUM(0.33126) * rgb[1] + FIXNUM(0.50000) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
272  #define RGB_TO_V(rgb) ((( FIXNUM(0.50000) * rgb[0] - FIXNUM(0.41869) * rgb[1] - FIXNUM(0.08131) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
273 
274  static av_cold int config_output(AVFilterLink *outlink)
275 {
276  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
277  AVFilterContext *avctx = outlink->src;
278  ChromakeyContext *ctx = avctx->priv;
279  int factor;
280 
281  ctx->depth = desc->comp[0].depth;
282  ctx->mid = 1 << (ctx->depth - 1);
283  ctx->max = (1 << ctx->depth) - 1;
284 
285  factor = 1 << (ctx->depth - 8);
286 
287  if (ctx->is_yuv) {
288  ctx->chromakey_uv[0] = ctx->chromakey_rgba[1] * factor;
289  ctx->chromakey_uv[1] = ctx->chromakey_rgba[2] * factor;
290  } else {
291  ctx->chromakey_uv[0] = RGB_TO_U(ctx->chromakey_rgba) * factor;
292  ctx->chromakey_uv[1] = RGB_TO_V(ctx->chromakey_rgba) * factor;
293  }
294 
295  if (!strcmp(avctx->filter->name, "chromakey")) {
296  outlink->alpha_mode = AVALPHA_MODE_STRAIGHT;
297  ctx->do_slice = ctx->depth <= 8 ? do_chromakey_slice : do_chromakey16_slice;
298  } else {
299  ctx->do_slice = ctx->depth <= 8 ? do_chromahold_slice: do_chromahold16_slice;
300  }
301 
302  return 0;
303 }
304 
305  static av_cold int config_input(AVFilterLink *inlink)
306 {
307  AVFilterContext *avctx = inlink->dst;
308  ChromakeyContext *ctx = avctx->priv;
309  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
310 
311  ctx->hsub_log2 = desc->log2_chroma_w;
312  ctx->vsub_log2 = desc->log2_chroma_h;
313 
314  return 0;
315 }
316 
317  static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
318  char *res, int res_len, int flags)
319 {
320  int ret;
321 
322  ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
323  if (ret < 0)
324  return ret;
325 
326  return config_output(ctx->outputs[0]);
327 }
328 
329  static const AVFilterPad inputs[] = {
330  {
331  .name = "default",
332  .type = AVMEDIA_TYPE_VIDEO,
333  .flags = AVFILTERPAD_FLAG_NEEDS_WRITABLE,
334  .filter_frame = filter_frame,
335  .config_props = config_input,
336  },
337 };
338 
339  static const AVFilterPad outputs[] = {
340  {
341  .name = "default",
342  .type = AVMEDIA_TYPE_VIDEO,
343  .config_props = config_output,
344  },
345 };
346 
347  #define OFFSET(x) offsetof(ChromakeyContext, x)
348  #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
349 
350  static const AVOption chromakey_options[] = {
351  { "color", "set the chromakey key color", OFFSET(chromakey_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
352  { "similarity", "set the chromakey similarity value", OFFSET(similarity), AV_OPT_TYPE_FLOAT, { .dbl = 0.01 }, 0.00001, 1.0, FLAGS },
353  { "blend", "set the chromakey key blend value", OFFSET(blend), AV_OPT_TYPE_FLOAT, { .dbl = 0.0 }, 0.0, 1.0, FLAGS },
354  { "yuv", "color parameter is in yuv instead of rgb", OFFSET(is_yuv), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
355  { NULL }
356 };
357 
358  static const enum AVPixelFormat chromakey_fmts[] = {
359  AV_PIX_FMT_YUVA420P,
360  AV_PIX_FMT_YUVA422P,
361  AV_PIX_FMT_YUVA444P,
362  AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
363  AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
364  AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
365  AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
366  AV_PIX_FMT_NONE
367 };
368 
369 AVFILTER_DEFINE_CLASS(chromakey);
370 
371  const FFFilter ff_vf_chromakey = {
372  .p.name = "chromakey",
373  .p.description = NULL_IF_CONFIG_SMALL("Turns a certain color into transparency. Operates on YUV colors."),
374  .p.priv_class = &chromakey_class,
375  .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
376  .priv_size = sizeof(ChromakeyContext),
377  FILTER_INPUTS(inputs),
378  FILTER_OUTPUTS(outputs),
379  FILTER_PIXFMTS_ARRAY(chromakey_fmts),
380  .process_command = process_command,
381 };
382 
383  static const AVOption chromahold_options[] = {
384  { "color", "set the chromahold key color", OFFSET(chromakey_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
385  { "similarity", "set the chromahold similarity value", OFFSET(similarity), AV_OPT_TYPE_FLOAT, { .dbl = 0.01 }, 0.00001, 1.0, FLAGS },
386  { "blend", "set the chromahold blend value", OFFSET(blend), AV_OPT_TYPE_FLOAT, { .dbl = 0.0 }, 0.0, 1.0, FLAGS },
387  { "yuv", "color parameter is in yuv instead of rgb", OFFSET(is_yuv), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
388  { NULL }
389 };
390 
391  static const enum AVPixelFormat hold_pixel_fmts[] = {
392  AV_PIX_FMT_YUV420P,
393  AV_PIX_FMT_YUV422P,
394  AV_PIX_FMT_YUV444P,
395  AV_PIX_FMT_YUVA420P,
396  AV_PIX_FMT_YUVA422P,
397  AV_PIX_FMT_YUVA444P,
398  AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
399  AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
400  AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV420P12,
401  AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV420P14,
402  AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
403  AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
404  AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
405  AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
406  AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
407  AV_PIX_FMT_NONE
408 };
409 
410 AVFILTER_DEFINE_CLASS(chromahold);
411 
412  const FFFilter ff_vf_chromahold = {
413  .p.name = "chromahold",
414  .p.description = NULL_IF_CONFIG_SMALL("Turns a certain color range into gray."),
415  .p.priv_class = &chromahold_class,
416  .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
417  .priv_size = sizeof(ChromakeyContext),
418  FILTER_INPUTS(inputs),
419  FILTER_OUTPUTS(outputs),
420  FILTER_PIXFMTS_ARRAY(hold_pixel_fmts),
421  .process_command = process_command,
422 };
flags
const SwsFlags flags[]
Definition: swscale.c:61
inputs
static const AVFilterPad inputs[]
Definition: vf_chromakey.c:329
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:596
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
config_input
static av_cold int config_input(AVFilterLink *inlink)
Definition: vf_chromakey.c:305
ChromakeyContext::do_slice
int(* do_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_chromakey.c:44
opt.h
AVALPHA_MODE_STRAIGHT
@ AVALPHA_MODE_STRAIGHT
Alpha channel is independent of color values.
Definition: pixfmt.h:803
RGB_TO_V
#define RGB_TO_V(rgb)
Definition: vf_chromakey.c:272
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: filters.h:243
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3447
ff_vf_chromakey
const FFFilter ff_vf_chromakey
Definition: vf_chromakey.c:371
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:356
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:588
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:263
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:595
u
#define u(width, name, range_min, range_max)
Definition: cbs_apv.c:68
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:590
AVOption
AVOption.
Definition: opt.h:429
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:539
max
#define max(a, b)
Definition: cuda_runtime.h:33
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
ChromakeyContext::chromakey_uv
uint16_t chromakey_uv[2]
Definition: vf_chromakey.c:31
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:591
do_chromakey_pixel
static uint8_t do_chromakey_pixel(ChromakeyContext *ctx, uint8_t u[9], uint8_t v[9])
Definition: vf_chromakey.c:48
ChromakeyContext::max
int max
Definition: vf_chromakey.c:39
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:587
chromahold_options
static const AVOption chromahold_options[]
Definition: vf_chromakey.c:383
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1688
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:597
ChromakeyContext::mid
int mid
Definition: vf_chromakey.c:38
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:537
ChromakeyContext::depth
int depth
Definition: vf_chromakey.c:37
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:542
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(chromakey)
av_cold
#define av_cold
Definition: attributes.h:106
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:551
FFFilter
Definition: filters.h:266
ChromakeyContext::is_yuv
int is_yuv
Definition: vf_chromakey.c:36
intreadwrite.h
get_pixel16_uv
static av_always_inline void get_pixel16_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint16_t *u, uint16_t *v)
Definition: vf_chromakey.c:103
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:552
ChromakeyContext
Definition: vf_chromakey.c:27
filters.h
AV_PIX_FMT_YUVA444P12
#define AV_PIX_FMT_YUVA444P12
Definition: pixfmt.h:594
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:536
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:550
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
link
Filter the word "frame" indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
arg
const char * arg
Definition: jacosubdec.c:67
chromakey_options
static const AVOption chromakey_options[]
Definition: vf_chromakey.c:350
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Underlying C type is uint8_t[4].
Definition: opt.h:323
ChromakeyContext::blend
float blend
Definition: vf_chromakey.c:34
outputs
static const AVFilterPad outputs[]
Definition: vf_chromakey.c:339
ff_vf_chromahold
const FFFilter ff_vf_chromahold
Definition: vf_chromakey.c:412
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_chromakey.c:317
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:540
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *frame)
Definition: vf_chromakey.c:255
AVFILTERPAD_FLAG_NEEDS_WRITABLE
#define AVFILTERPAD_FLAG_NEEDS_WRITABLE
The filter expects writable frames from its input link, duplicating data buffers if needed.
Definition: filters.h:58
f
f
Definition: af_crystalizer.c:122
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:87
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:544
do_chromahold16_slice
static int do_chromahold16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_chromakey.c:216
ChromakeyContext::chromakey_rgba
uint8_t chromakey_rgba[4]
Definition: vf_chromakey.c:30
do_chromakey16_slice
static int do_chromakey16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_chromakey.c:145
RGB_TO_U
#define RGB_TO_U(rgb)
Definition: vf_chromakey.c:271
do_chromakey_pixel16
static uint16_t do_chromakey_pixel16(ChromakeyContext *ctx, uint16_t u[9], uint16_t v[9])
Definition: vf_chromakey.c:69
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:546
ff_filter_process_command
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options.
Definition: avfilter.c:905
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:166
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:592
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:197
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Underlying C type is float.
Definition: opt.h:271
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:845
av_always_inline
#define av_always_inline
Definition: attributes.h:63
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:538
get_pixel_uv
static av_always_inline void get_pixel_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint8_t *u, uint8_t *v)
Definition: vf_chromakey.c:91
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:845
hold_pixel_fmts
static enum AVPixelFormat hold_pixel_fmts[]
Definition: vf_chromakey.c:391
ret
ret
Definition: filter_design.txt:187
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:589
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:543
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:548
do_chromakey_slice
static int do_chromakey_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_chromakey.c:115
ff_filter_execute
int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: avfilter.c:1693
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_PIX_FMT_YUVA422P12
#define AV_PIX_FMT_YUVA422P12
Definition: pixfmt.h:593
avfilter.h
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
factor
static const int factor[16]
Definition: vf_pp7.c:80
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:167
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
OFFSET
#define OFFSET(x)
Definition: vf_chromakey.c:347
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
do_chromahold_slice
static int do_chromahold_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_chromakey.c:179
FLAGS
#define FLAGS
Definition: vf_chromakey.c:348
ChromakeyContext::vsub_log2
int vsub_log2
Definition: vf_chromakey.c:42
ChromakeyContext::similarity
float similarity
Definition: vf_chromakey.c:33
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:549
chromakey_fmts
static enum AVPixelFormat chromakey_fmts[]
Definition: vf_chromakey.c:358
AVFilterContext::filter
const AVFilter * filter
the AVFilter of which this is an instance
Definition: avfilter.h:277
ChromakeyContext::hsub_log2
int hsub_log2
Definition: vf_chromakey.c:41
config_output
static av_cold int config_output(AVFilterLink *outlink)
Definition: vf_chromakey.c:274
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:173
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:547
av_clipd
av_clipd
Definition: af_crystalizer.c:132
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:286
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:368

Generated on Sat Oct 18 2025 19:23:02 for FFmpeg by   doxygen 1.8.17

AltStyle によって変換されたページ (->オリジナル) /